@mdvp/cli 1.10.0 → 1.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cli.mjs +87 -3
- package/package.json +1 -1
package/cli.mjs
CHANGED
|
@@ -99,9 +99,86 @@ function toTextFormat(site, bd) {
|
|
|
99
99
|
].join('\n')
|
|
100
100
|
}
|
|
101
101
|
|
|
102
|
-
async function
|
|
102
|
+
async function cmdAuditLocal(domain, opts) {
|
|
103
|
+
const { json, raw, text } = opts
|
|
104
|
+
const { spawn } = await import("child_process")
|
|
105
|
+
const { existsSync } = await import("fs")
|
|
106
|
+
const dir = `${homedir()}/.mdvp/crawler`
|
|
107
|
+
|
|
108
|
+
if (!existsSync(`${dir}/crawler-worker.mjs`)) {
|
|
109
|
+
process.stderr.write(`${DIM}downloading crawler...${R}\n`)
|
|
110
|
+
mkdirSync(dir, { recursive: true })
|
|
111
|
+
const download = (url, dest) => new Promise((res, rej) => {
|
|
112
|
+
const { get: g } = pickModule(url)
|
|
113
|
+
g(url, { headers: { Accept: "text/plain" } }, (r) => {
|
|
114
|
+
let body = ""
|
|
115
|
+
r.on("data", (c) => (body += c))
|
|
116
|
+
r.on("end", () => { try { writeFileSync(dest, body); res() } catch { rej(new Error("write failed")) } })
|
|
117
|
+
}).on("error", rej)
|
|
118
|
+
})
|
|
119
|
+
await download(`${API}/crawler-worker.mjs`, `${dir}/crawler-worker.mjs`).catch(() => {})
|
|
120
|
+
await download(`${API}/extract.js`, `${dir}/extract.js`).catch(() => {})
|
|
121
|
+
writeFileSync(`${dir}/package.json`, '{"type":"module","dependencies":{"puppeteer":"*"}}')
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
if (!existsSync(`${dir}/node_modules/puppeteer`)) {
|
|
125
|
+
process.stderr.write(`${DIM}installing puppeteer (first run ~30s)...${R}\n`)
|
|
126
|
+
await new Promise((res, rej) => {
|
|
127
|
+
const child = spawn("npm", ["install", "--prefer-offline"], { cwd: dir, stdio: "inherit" })
|
|
128
|
+
child.on("exit", (code) => code === 0 ? res() : rej(new Error(`npm install failed`)))
|
|
129
|
+
})
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
process.stderr.write(`${DIM}crawling https://${domain} locally...${R}\n`)
|
|
133
|
+
|
|
134
|
+
const result = await new Promise((resolve, reject) => {
|
|
135
|
+
const env = { ...process.env, CRAWL_ONCE: `https://${domain}`, CRAWL_ONCE_STDOUT: "1", TABS: "1", API_URL: API }
|
|
136
|
+
const child = spawn("node", [`${dir}/crawler-worker.mjs`], { env, cwd: dir, stdio: ["ignore", "pipe", "inherit"] })
|
|
137
|
+
let out = ""
|
|
138
|
+
child.stdout.on("data", (d) => (out += d))
|
|
139
|
+
child.on("exit", () => {
|
|
140
|
+
try { resolve(JSON.parse(out)) } catch { reject(new Error("crawler returned no data")) }
|
|
141
|
+
})
|
|
142
|
+
})
|
|
143
|
+
|
|
144
|
+
if (!result || !result.metrics) {
|
|
145
|
+
console.error(`${RED}Crawl failed — no metrics returned${R}`)
|
|
146
|
+
process.exit(1)
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
const scoreRes = await apiPost("/features", result.metrics, null).catch(() => null)
|
|
150
|
+
if (!scoreRes || !scoreRes.score) {
|
|
151
|
+
console.error(`${RED}Scoring failed${R}`)
|
|
152
|
+
process.exit(1)
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
const { score } = scoreRes
|
|
156
|
+
const bd = score.breakdown.map((b) => ({ c: b.category, s: b.score }))
|
|
157
|
+
const sorted = [...bd].sort((a, b) => a.s - b.s)
|
|
158
|
+
|
|
159
|
+
const site = { id: domain, url: `https://${domain}`, overall_score: score.overall, grade: score.grade, label: null, scores: { breakdown: bd } }
|
|
160
|
+
|
|
161
|
+
if (json) {
|
|
162
|
+
console.log(JSON.stringify({ id: site.id, url: site.url, grade: score.grade, overall_score: score.overall, scores: { overall: score.overall, grade: score.grade, breakdown: Object.fromEntries(bd.map((b) => [b.c, b.s])), worst: sorted.slice(0, 3).map((b) => ({ key: b.c, score: b.s })) } }, null, 2))
|
|
163
|
+
return
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
if (text) { console.log(toTextFormat(site, bd)); return }
|
|
167
|
+
|
|
168
|
+
console.log(`\n${BOLD}${domain}${R} ${scoreColor(score.overall)}${score.grade} ${score.overall}/100${R} ${DIM}local crawl${R}\n`)
|
|
169
|
+
for (const cat of Object.keys(CATS)) {
|
|
170
|
+
const s = bd.find((b) => b.c === cat)?.s ?? 0
|
|
171
|
+
console.log(` ${CATS[cat].padEnd(16)} ${scoreColor(s)}${bar(s)}${R} ${s}`)
|
|
172
|
+
}
|
|
173
|
+
console.log(`\n${DIM}Lowest: ${sorted.slice(0, 3).map((i) => `${CATS[i.c] ?? i.c} (${i.s})`).join(" · ")}${R}\n`)
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
async function cmdAudit(domain, opts) {
|
|
177
|
+
const { json, raw, text, apiKey, local } = opts
|
|
103
178
|
domain = parseDomain(domain)
|
|
104
179
|
|
|
180
|
+
if (local) return cmdAuditLocal(domain, opts)
|
|
181
|
+
|
|
105
182
|
if ((json || raw || text) && !apiKey) {
|
|
106
183
|
console.error(`${RED}--json and --raw require an API key (costs 1 credit).${R}`)
|
|
107
184
|
console.error(`${DIM}Run: npx @mdvp/cli login or npx @mdvp/cli balance${R}`)
|
|
@@ -360,8 +437,15 @@ async function cmdHire(opts) {
|
|
|
360
437
|
await download(extractUrl, `${dir}/extract.js`)
|
|
361
438
|
.catch(() => {})
|
|
362
439
|
|
|
363
|
-
|
|
364
|
-
|
|
440
|
+
writeFileSync(`${dir}/package.json`, '{"type":"module","dependencies":{"puppeteer":"*"}}')
|
|
441
|
+
|
|
442
|
+
const needsInstall = !existsSync(`${dir}/node_modules/puppeteer`)
|
|
443
|
+
if (needsInstall) {
|
|
444
|
+
process.stderr.write(`${DIM}Installing puppeteer (first run, ~30s)...${R}\n`)
|
|
445
|
+
await new Promise((res, rej) => {
|
|
446
|
+
const child = spawn("npm", ["install", "--prefer-offline"], { cwd: dir, stdio: "inherit" })
|
|
447
|
+
child.on("exit", (code) => code === 0 ? res() : rej(new Error(`npm install failed (${code})`)))
|
|
448
|
+
})
|
|
365
449
|
}
|
|
366
450
|
|
|
367
451
|
const apiUrl = local ? "http://localhost:7227" : API
|