@mdvp/cli 1.5.0 → 1.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/cli.mjs +25 -21
  2. package/package.json +3 -3
package/cli.mjs CHANGED
@@ -84,11 +84,23 @@ function apiPost(path, data, apiKey, baseUrl = API) {
84
84
  async function cmdAudit(domain, { json }) {
85
85
  domain = parseDomain(domain)
86
86
  process.stderr.write(`${DIM}fetching ${domain}...${R}\n`)
87
- const data = await apiGet(`/dataset?limit=800`)
88
- const site = (data.sites ?? []).find((s) => s.id === domain)
87
+
88
+ // Try direct lookup first, fall back to full scan
89
+ let site = null
90
+ try {
91
+ const direct = await apiGet(`/dataset/${domain}`)
92
+ if (direct && direct.id) site = direct
93
+ } catch (_) {}
94
+
95
+ if (!site) {
96
+ const data = await apiGet(`/dataset?limit=1000`)
97
+ site = (data.sites ?? []).find((s) => s.id === domain) ?? null
98
+ }
99
+
89
100
  if (!site) {
90
101
  if (json) { console.log(JSON.stringify({ error: "not_in_dataset", domain }, null, 2)); process.exit(1) }
91
- console.error(`${RED}not in dataset: ${domain}${R}\n${DIM}run: npx mdvp submit ${domain}${R}`)
102
+ console.error(`${RED}not in dataset: ${domain}${R}`)
103
+ console.error(`${DIM}submit for crawl: npx @mdvp/cli submit ${domain}${R}`)
92
104
  process.exit(1)
93
105
  }
94
106
  const bd = site.scores?.breakdown ?? []
@@ -237,29 +249,21 @@ async function cmdHire(opts) {
237
249
  const workerUrl = `${API}/crawler-worker.mjs`
238
250
  const extractUrl = `${API}/extract.js`
239
251
 
240
- await new Promise((res, rej) => {
241
- const { writeFileSync } = { writeFileSync }
242
- get(workerUrl, { headers: { Accept: "text/plain" } }, (r) => {
252
+ const download = (url, dest) => new Promise((res, rej) => {
253
+ const { get: g } = pickModule(url)
254
+ g(url, { headers: { Accept: "text/plain" } }, (r) => {
243
255
  let body = ""
244
256
  r.on("data", (c) => (body += c))
245
- r.on("end", () => {
246
- writeFileSync(`${dir}/crawler-worker.mjs`, body)
247
- res()
248
- })
257
+ r.on("end", () => { try { writeFileSync(dest, body); res() } catch { rej(new Error("write failed")) } })
249
258
  }).on("error", rej)
250
- }).catch(() => process.stderr.write(`${DIM}Could not download worker, using local copy${R}\n`))
251
-
252
- await new Promise((res) => {
253
- get(extractUrl, { headers: { Accept: "text/plain" } }, (r) => {
254
- let body = ""
255
- r.on("data", (c) => (body += c))
256
- r.on("end", () => {
257
- try { writeFileSync(`${dir}/extract.js`, body) } catch {}
258
- res()
259
- })
260
- }).on("error", () => res())
261
259
  })
262
260
 
261
+ await download(workerUrl, `${dir}/crawler-worker.mjs`)
262
+ .catch(() => process.stderr.write(`${DIM}Could not download worker${R}\n`))
263
+
264
+ await download(extractUrl, `${dir}/extract.js`)
265
+ .catch(() => {})
266
+
263
267
  if (!existsSync(`${dir}/package.json`)) {
264
268
  writeFileSync(`${dir}/package.json`, '{"type":"module"}')
265
269
  }
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@mdvp/cli",
3
- "version": "1.5.0",
4
- "description": "Machine Design Vision Protocol \u2014 gives agents eyes to understand design quality",
3
+ "version": "1.7.0",
4
+ "description": "Machine Design Vision Protocol gives agents eyes to understand design quality",
5
5
  "bin": {
6
6
  "mdvp": "./cli.mjs"
7
7
  },
@@ -23,4 +23,4 @@
23
23
  },
24
24
  "homepage": "https://mdvp.tixo-digital.workers.dev",
25
25
  "license": "MIT"
26
- }
26
+ }