@mdvp/cli 1.9.0 → 1.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cli.mjs +21 -1
- package/package.json +1 -1
package/cli.mjs
CHANGED
|
@@ -262,7 +262,7 @@ async function cmdLogin() {
|
|
|
262
262
|
async function cmdBalance({ json, apiKey }) {
|
|
263
263
|
if (!apiKey) { console.error(`${RED}No API key. Run: npx mdvp login${R}`); process.exit(1) }
|
|
264
264
|
const d = await new Promise((resolve, reject) => {
|
|
265
|
-
|
|
265
|
+
httpsGet(`${API}/token/balance`, { headers: { Accept: "application/json", "x-api-key": apiKey } }, (res) => {
|
|
266
266
|
let body = ""
|
|
267
267
|
res.on("data", (c) => (body += c))
|
|
268
268
|
res.on("end", () => resolve(JSON.parse(body)))
|
|
@@ -299,6 +299,9 @@ ${BOLD}Account${R}
|
|
|
299
299
|
submit <domain> --local Submit to local crawler node instead
|
|
300
300
|
|
|
301
301
|
${BOLD}Crawler${R}
|
|
302
|
+
recrawl Re-queue existing sites for recrawl (updates data)
|
|
303
|
+
recrawl linear.app Re-queue specific sites
|
|
304
|
+
recrawl --limit=100 Re-queue oldest N sites
|
|
302
305
|
hire Become a crawler node (downloads + runs worker)
|
|
303
306
|
hire --daemon Run crawler in background
|
|
304
307
|
hire --tabs=4 Run with 4 parallel tabs
|
|
@@ -391,6 +394,20 @@ async function cmdHire(opts) {
|
|
|
391
394
|
}
|
|
392
395
|
}
|
|
393
396
|
|
|
397
|
+
async function cmdRecrawl(opts, domains) {
|
|
398
|
+
const { apiKey } = opts
|
|
399
|
+
if (!apiKey) { console.error(`${RED}No API key. Run: npx @mdvp/cli login${R}`); process.exit(1) }
|
|
400
|
+
|
|
401
|
+
const limit = opts.limit || 50
|
|
402
|
+
const body = domains.length > 0 ? { domains } : { limit }
|
|
403
|
+
process.stderr.write(`${DIM}queuing ${domains.length > 0 ? domains.join(', ') : `up to ${limit} oldest`} for recrawl...${R}\n`)
|
|
404
|
+
|
|
405
|
+
const d = await apiPost("/crawl/recrawl", body, apiKey)
|
|
406
|
+
if (d.error) { console.error(`${RED}Error: ${d.error}${R}`); process.exit(1) }
|
|
407
|
+
console.log(`\n ${BOLD}Queued!${R} ${d.queued} sites scheduled for recrawl.`)
|
|
408
|
+
console.log(` ${DIM}Run: npx @mdvp/cli hire --tabs=4 to start crawling${R}\n`)
|
|
409
|
+
}
|
|
410
|
+
|
|
394
411
|
async function cmdSubmit(domain, opts) {
|
|
395
412
|
const { apiKey, local } = opts
|
|
396
413
|
domain = parseDomain(domain)
|
|
@@ -452,6 +469,9 @@ async function main() {
|
|
|
452
469
|
const d = await apiGet("/dataset/stats")
|
|
453
470
|
if (opts.json) { console.log(JSON.stringify(d, null, 2)); return }
|
|
454
471
|
console.log(`\n Total sites: ${d.totalSites}\n Average score: ${d.averageScore}\n`)
|
|
472
|
+
} else if (cmd === "recrawl") {
|
|
473
|
+
const domains = positional.slice(1) // optional: npx @mdvp/cli recrawl linear.app vercel.com
|
|
474
|
+
await cmdRecrawl({ ...opts, limit: parseInt(flagValues.limit || "50") || 50 }, domains)
|
|
455
475
|
} else if (cmd === "hire" || cmd === "apply" || cmd === "serve") {
|
|
456
476
|
await cmdHire(opts)
|
|
457
477
|
} else if (cmd === "label" && arg1) {
|