@mdvp/cli 1.4.0 → 1.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/cli.mjs +39 -35
  2. package/package.json +3 -3
package/cli.mjs CHANGED
@@ -221,13 +221,15 @@ ${BOLD}Docs${R} https://mdvp.tixo-digital.workers.dev/docs
221
221
  `
222
222
 
223
223
  async function cmdHire(opts) {
224
- const { daemon, tabs, local } = opts
225
- const { execSync, spawn } = await import("child_process")
224
+ const { daemon, tabs, local, _url, _once } = opts
225
+ const { spawn } = await import("child_process")
226
226
  const { existsSync } = await import("fs")
227
227
  const dir = `${homedir()}/.mdvp/crawler`
228
228
 
229
- console.log(`${DIM}${ASCII}${R}\n`)
230
- console.log(`${BOLD}Hiring as crawler node...${R}`)
229
+ if (!_once) {
230
+ console.log(`${DIM}${ASCII}${R}\n`)
231
+ console.log(`${BOLD}Hiring as crawler node...${R}`)
232
+ }
231
233
 
232
234
  process.stderr.write(`${DIM}Downloading worker from ${API}...${R}\n`)
233
235
  mkdirSync(dir, { recursive: true })
@@ -235,44 +237,42 @@ async function cmdHire(opts) {
235
237
  const workerUrl = `${API}/crawler-worker.mjs`
236
238
  const extractUrl = `${API}/extract.js`
237
239
 
238
- await new Promise((res, rej) => {
239
- const { writeFileSync } = { writeFileSync }
240
- get(workerUrl, { headers: { Accept: "text/plain" } }, (r) => {
240
+ const download = (url, dest) => new Promise((res, rej) => {
241
+ const { get: g } = pickModule(url)
242
+ g(url, { headers: { Accept: "text/plain" } }, (r) => {
241
243
  let body = ""
242
244
  r.on("data", (c) => (body += c))
243
- r.on("end", () => {
244
- writeFileSync(`${dir}/crawler-worker.mjs`, body)
245
- res()
246
- })
245
+ r.on("end", () => { try { writeFileSync(dest, body); res() } catch { rej(new Error("write failed")) } })
247
246
  }).on("error", rej)
248
- }).catch(() => process.stderr.write(`${DIM}Could not download worker, using local copy${R}\n`))
249
-
250
- await new Promise((res) => {
251
- get(extractUrl, { headers: { Accept: "text/plain" } }, (r) => {
252
- let body = ""
253
- r.on("data", (c) => (body += c))
254
- r.on("end", () => {
255
- try { writeFileSync(`${dir}/extract.js`, body) } catch {}
256
- res()
257
- })
258
- }).on("error", () => res())
259
247
  })
260
248
 
249
+ await download(workerUrl, `${dir}/crawler-worker.mjs`)
250
+ .catch(() => process.stderr.write(`${DIM}Could not download worker${R}\n`))
251
+
252
+ await download(extractUrl, `${dir}/extract.js`)
253
+ .catch(() => {})
254
+
261
255
  if (!existsSync(`${dir}/package.json`)) {
262
256
  writeFileSync(`${dir}/package.json`, '{"type":"module"}')
263
257
  }
264
258
 
265
259
  const apiUrl = local ? "http://localhost:7227" : API
266
260
  const nodeId = `mdvp-${Math.random().toString(36).slice(2, 8)}`
267
- const env = { ...process.env, NODE_ID: nodeId, TABS: String(tabs || 2), API_URL: apiUrl }
261
+ const env = { ...process.env, NODE_ID: nodeId, TABS: String(tabs || 2), API_URL: apiUrl, ...(_url ? { CRAWL_ONCE: _url } : {}) }
262
+
263
+ if (_once && _url) {
264
+ process.stderr.write(`${DIM}running local crawl for ${_url}...${R}\n`)
265
+ const child = spawn("node", [`${dir}/crawler-worker.mjs`], { env, cwd: dir, stdio: "inherit" })
266
+ await new Promise((res) => child.on("exit", res))
267
+ console.log(`\n ${BOLD}Done!${R} Check: npx @mdvp/cli audit ${_url.replace(/^https?:\/\//, "")}`)
268
+ return
269
+ }
268
270
 
269
271
  if (daemon) {
270
272
  const { openSync } = await import("fs")
271
273
  const log = `${dir}/worker-${process.pid}.log`
272
274
  const out = openSync(log, "a")
273
- const child = spawn("node", [`${dir}/crawler-worker.mjs`], {
274
- env, cwd: dir, detached: true, stdio: ["ignore", out, out],
275
- })
275
+ const child = spawn("node", [`${dir}/crawler-worker.mjs`], { env, cwd: dir, detached: true, stdio: ["ignore", out, out] })
276
276
  child.unref()
277
277
  console.log(`${GREEN}Worker started in background${R}`)
278
278
  console.log(` PID: ${child.pid}`)
@@ -281,23 +281,27 @@ async function cmdHire(opts) {
281
281
  console.log(` Node: ${nodeId}`)
282
282
  } else {
283
283
  console.log(`${GREEN}Starting crawler node ${nodeId}${R} (Ctrl+C to stop)\n`)
284
- const child = spawn("node", [`${dir}/crawler-worker.mjs`], {
285
- env, cwd: dir, stdio: "inherit",
286
- })
284
+ const child = spawn("node", [`${dir}/crawler-worker.mjs`], { env, cwd: dir, stdio: "inherit" })
287
285
  await new Promise((res) => child.on("exit", res))
288
286
  }
289
287
  }
290
288
 
291
289
  async function cmdSubmit(domain, opts) {
292
290
  const { apiKey, local } = opts
293
- if (!apiKey) { console.error(`${RED}No API key. Run: npx mdvp login${R}`); process.exit(1) }
294
291
  domain = parseDomain(domain)
295
- const targetApi = local ? "http://localhost:7227" : API
296
- process.stderr.write(`${DIM}submitting ${domain} → ${local ? "local" : "global"} crawler...${R}\n`)
297
- const d = await apiPost("/crawl/submit", { domain, url: `https://${domain}` }, apiKey, targetApi)
292
+
293
+ if (local) {
294
+ process.stderr.write(`${DIM}crawling ${domain} locally...${R}\n`)
295
+ await cmdHire({ ...opts, domain, daemon: false, _url: `https://${domain}`, _once: true })
296
+ return
297
+ }
298
+
299
+ if (!apiKey) { console.error(`${RED}No API key. Run: npx @mdvp/cli login${R}`); process.exit(1) }
300
+ process.stderr.write(`${DIM}submitting ${domain} → global crawler queue...${R}\n`)
301
+ const d = await apiPost("/crawl/submit", { domain, url: `https://${domain}` }, apiKey)
298
302
  if (d.error) { console.error(`${RED}Error: ${d.error}${R}`); process.exit(1) }
299
- console.log(`\n ${BOLD}Submitted!${R} ${domain} added to crawl queue.`)
300
- console.log(` ${DIM}Results in ~60s. Check: npx mdvp audit ${domain}${R}\n`)
303
+ console.log(`\n ${BOLD}Submitted!${R} ${domain} queued for crawl.`)
304
+ console.log(` ${DIM}Results in ~60s. Check: npx @mdvp/cli audit ${domain}${R}\n`)
301
305
  }
302
306
 
303
307
  async function main() {
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@mdvp/cli",
3
- "version": "1.4.0",
4
- "description": "Machine Design Vision Protocol \u2014 gives agents eyes to understand design quality",
3
+ "version": "1.6.0",
4
+ "description": "Machine Design Vision Protocol gives agents eyes to understand design quality",
5
5
  "bin": {
6
6
  "mdvp": "./cli.mjs"
7
7
  },
@@ -23,4 +23,4 @@
23
23
  },
24
24
  "homepage": "https://mdvp.tixo-digital.workers.dev",
25
25
  "license": "MIT"
26
- }
26
+ }