@mdvp/cli 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cli.mjs +349 -0
- package/package.json +26 -0
package/cli.mjs
ADDED
|
@@ -0,0 +1,349 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { homedir } from "os"
|
|
3
|
+
import { readFileSync, writeFileSync, mkdirSync } from "fs"
|
|
4
|
+
import { get, request } from "https"
|
|
5
|
+
import { createInterface } from "readline"
|
|
6
|
+
|
|
7
|
+
const API = "https://designsense.tixo-digital.workers.dev"
|
|
8
|
+
const CONFIG_DIR = `${homedir()}/.mdvp`
|
|
9
|
+
const CONFIG_FILE = `${CONFIG_DIR}/config.json`
|
|
10
|
+
const VERSION = "1.2.0"
|
|
11
|
+
|
|
12
|
+
const CATS = {
|
|
13
|
+
spacing: "Spacing", typography: "Typography", color: "Color",
|
|
14
|
+
components: "Components", modernity: "Modernity", originality: "Originality",
|
|
15
|
+
html_quality: "HTML Quality", visual_polish: "Visual Polish",
|
|
16
|
+
sophistication: "Sophistication", readability: "Readability",
|
|
17
|
+
ux_patterns: "UX Patterns", contentDepth: "Content Depth",
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
const R = "\x1b[0m"
|
|
21
|
+
const DIM = "\x1b[2m"
|
|
22
|
+
const BOLD = "\x1b[1m"
|
|
23
|
+
const RED = "\x1b[31m"
|
|
24
|
+
const GREEN = "\x1b[32m"
|
|
25
|
+
const YELLOW = "\x1b[33m"
|
|
26
|
+
|
|
27
|
+
const scoreColor = (s) => s >= 80 ? GREEN : s >= 60 ? YELLOW : RED
|
|
28
|
+
const bar = (s) => "█".repeat(Math.round(s / 10)) + "░".repeat(10 - Math.round(s / 10))
|
|
29
|
+
const parseDomain = (s) => s.replace(/^https?:\/\//, "").replace(/^www\./, "").replace(/\/.*$/, "")
|
|
30
|
+
|
|
31
|
+
function loadConfig() {
|
|
32
|
+
try { return JSON.parse(readFileSync(CONFIG_FILE, "utf8")) } catch { return {} }
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
function saveConfig(data) {
|
|
36
|
+
mkdirSync(CONFIG_DIR, { recursive: true })
|
|
37
|
+
writeFileSync(CONFIG_FILE, JSON.stringify(data, null, 2))
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
function apiGet(path) {
|
|
41
|
+
return new Promise((resolve, reject) => {
|
|
42
|
+
get(`${API}${path}`, { headers: { Accept: "application/json" } }, (res) => {
|
|
43
|
+
let body = ""
|
|
44
|
+
res.on("data", (c) => (body += c))
|
|
45
|
+
res.on("end", () => {
|
|
46
|
+
try { resolve(JSON.parse(body)) }
|
|
47
|
+
catch { reject(new Error(`Invalid JSON: ${body.slice(0, 200)}`)) }
|
|
48
|
+
})
|
|
49
|
+
}).on("error", reject)
|
|
50
|
+
})
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
function apiPost(path, data, apiKey, baseUrl = API) {
|
|
54
|
+
return new Promise((resolve, reject) => {
|
|
55
|
+
const body = JSON.stringify(data)
|
|
56
|
+
const req = request(`${baseUrl}${path}`, {
|
|
57
|
+
method: "POST",
|
|
58
|
+
headers: {
|
|
59
|
+
"Content-Type": "application/json",
|
|
60
|
+
Accept: "application/json",
|
|
61
|
+
...(apiKey ? { "x-api-key": apiKey } : {}),
|
|
62
|
+
},
|
|
63
|
+
}, (res) => {
|
|
64
|
+
let resp = ""
|
|
65
|
+
res.on("data", (c) => (resp += c))
|
|
66
|
+
res.on("end", () => {
|
|
67
|
+
try { resolve(JSON.parse(resp)) }
|
|
68
|
+
catch { reject(new Error(`Invalid JSON: ${resp.slice(0, 200)}`)) }
|
|
69
|
+
})
|
|
70
|
+
})
|
|
71
|
+
req.on("error", reject)
|
|
72
|
+
req.write(body)
|
|
73
|
+
req.end()
|
|
74
|
+
})
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
async function cmdAudit(domain, { json }) {
|
|
78
|
+
domain = parseDomain(domain)
|
|
79
|
+
process.stderr.write(`${DIM}fetching ${domain}...${R}\n`)
|
|
80
|
+
const data = await apiGet(`/dataset?limit=800`)
|
|
81
|
+
const site = (data.sites ?? []).find((s) => s.id === domain)
|
|
82
|
+
if (!site) {
|
|
83
|
+
if (json) { console.log(JSON.stringify({ error: "not_in_dataset", domain }, null, 2)); process.exit(1) }
|
|
84
|
+
console.error(`${RED}not in dataset: ${domain}${R}\n${DIM}run: npx mdvp submit ${domain}${R}`)
|
|
85
|
+
process.exit(1)
|
|
86
|
+
}
|
|
87
|
+
const bd = site.scores?.breakdown ?? []
|
|
88
|
+
if (json) {
|
|
89
|
+
console.log(JSON.stringify({
|
|
90
|
+
id: site.id, url: site.url, grade: site.grade,
|
|
91
|
+
overall_score: site.overall_score, label: site.label,
|
|
92
|
+
scores: { overall: site.overall_score, grade: site.grade, breakdown: Object.fromEntries(bd.map((b) => [b.c, b.s])) },
|
|
93
|
+
}, null, 2))
|
|
94
|
+
return
|
|
95
|
+
}
|
|
96
|
+
console.log(`\n${BOLD}${site.id}${R} ${scoreColor(site.overall_score)}${site.grade} ${site.overall_score}/100${R} ${DIM}${site.label}${R}\n`)
|
|
97
|
+
for (const cat of Object.keys(CATS)) {
|
|
98
|
+
const s = bd.find((b) => b.c === cat)?.s ?? 0
|
|
99
|
+
console.log(` ${CATS[cat].padEnd(16)} ${scoreColor(s)}${bar(s)}${R} ${s}`)
|
|
100
|
+
}
|
|
101
|
+
const sorted = [...bd].sort((a, b) => a.s - b.s).slice(0, 3)
|
|
102
|
+
console.log(`\n${DIM}Lowest: ${sorted.map((i) => `${CATS[i.c] ?? i.c} (${i.s})`).join(" · ")}${R}\n`)
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
async function cmdCompare(da, db) {
|
|
106
|
+
;[da, db] = [da, db].map(parseDomain)
|
|
107
|
+
const sites = (await apiGet(`/dataset?limit=800`)).sites ?? []
|
|
108
|
+
const a = sites.find((s) => s.id === da)
|
|
109
|
+
const b = sites.find((s) => s.id === db)
|
|
110
|
+
if (!a) { console.error(`${RED}not found: ${da}${R}`); process.exit(1) }
|
|
111
|
+
if (!b) { console.error(`${RED}not found: ${db}${R}`); process.exit(1) }
|
|
112
|
+
const bda = Object.fromEntries((a.scores?.breakdown ?? []).map((x) => [x.c, x.s]))
|
|
113
|
+
const bdb = Object.fromEntries((b.scores?.breakdown ?? []).map((x) => [x.c, x.s]))
|
|
114
|
+
console.log(`\n ${"Category".padEnd(16)} ${da.slice(0, 14).padEnd(14)} ${db.slice(0, 14).padEnd(14)} Δ`)
|
|
115
|
+
console.log(` ${"─".repeat(16)} ${"─".repeat(14)} ${"─".repeat(14)} ─────`)
|
|
116
|
+
console.log(` ${"Overall".padEnd(16)} ${String(a.overall_score).padEnd(14)} ${String(b.overall_score).padEnd(14)} ${b.overall_score - a.overall_score > 0 ? "+" : ""}${b.overall_score - a.overall_score}`)
|
|
117
|
+
for (const cat of Object.keys(CATS)) {
|
|
118
|
+
const va = bda[cat] ?? 0, vb = bdb[cat] ?? 0, diff = vb - va
|
|
119
|
+
const c = diff > 5 ? GREEN : diff < -5 ? RED : DIM
|
|
120
|
+
console.log(` ${(CATS[cat] ?? cat).padEnd(16)} ${String(va).padEnd(14)} ${String(vb).padEnd(14)} ${c}${diff > 0 ? "+" : ""}${diff}${R}`)
|
|
121
|
+
}
|
|
122
|
+
console.log()
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
async function cmdTop(n, worst) {
|
|
126
|
+
const sites = (await apiGet(`/dataset?limit=800`)).sites ?? []
|
|
127
|
+
const sorted = (worst ? sites.sort((a, b) => a.overall_score - b.overall_score) : sites.sort((a, b) => b.overall_score - a.overall_score)).slice(0, n)
|
|
128
|
+
console.log(`\n ${"#".padEnd(4)} ${"Domain".padEnd(28)} ${"Score".padEnd(6)} Grade Label`)
|
|
129
|
+
console.log(` ${"─".repeat(4)} ${"─".repeat(28)} ${"─".repeat(6)} ───── ─────────`)
|
|
130
|
+
for (const [i, s] of sorted.entries()) {
|
|
131
|
+
console.log(` ${String(i + 1).padEnd(4)} ${s.id.padEnd(28)} ${scoreColor(s.overall_score)}${String(s.overall_score).padEnd(6)}${R} ${s.grade.padEnd(5)} ${s.label}`)
|
|
132
|
+
}
|
|
133
|
+
console.log()
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
async function cmdLogin() {
|
|
137
|
+
const rl = createInterface({ input: process.stdin, output: process.stdout })
|
|
138
|
+
return new Promise((resolve) => {
|
|
139
|
+
rl.question(`${BOLD}MDVP API Key${R} (from mdvp.tixo-digital.workers.dev): `, (key) => {
|
|
140
|
+
rl.close()
|
|
141
|
+
key = key.trim()
|
|
142
|
+
if (!key.startsWith("ds_")) { console.error(`${RED}Invalid key — must start with ds_${R}`); process.exit(1) }
|
|
143
|
+
saveConfig({ apiKey: key })
|
|
144
|
+
console.log(`${DIM}Saved to ~/.mdvp/config.json${R}\n${BOLD}Logged in.${R} Try: npx mdvp audit stripe.com`)
|
|
145
|
+
resolve()
|
|
146
|
+
})
|
|
147
|
+
})
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
async function cmdBalance({ json, apiKey }) {
|
|
151
|
+
if (!apiKey) { console.error(`${RED}No API key. Run: npx mdvp login${R}`); process.exit(1) }
|
|
152
|
+
const d = await new Promise((resolve, reject) => {
|
|
153
|
+
get(`${API}/token/balance`, { headers: { Accept: "application/json", "x-api-key": apiKey } }, (res) => {
|
|
154
|
+
let body = ""
|
|
155
|
+
res.on("data", (c) => (body += c))
|
|
156
|
+
res.on("end", () => resolve(JSON.parse(body)))
|
|
157
|
+
}).on("error", reject)
|
|
158
|
+
})
|
|
159
|
+
if (json) { console.log(JSON.stringify(d, null, 2)); return }
|
|
160
|
+
console.log(`\n Key: ${d.token}\n Balance: $${d.balance_usd}\n Credits: ${d.credits_remaining} audits remaining\n`)
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
const ASCII = `
|
|
164
|
+
███╗ ███╗██████╗ ██╗ ██╗██████╗
|
|
165
|
+
████╗ ████║██╔══██╗██║ ██║██╔══██╗
|
|
166
|
+
██╔████╔██║██║ ██║██║ ██║██████╔╝
|
|
167
|
+
██║╚██╔╝██║██║ ██║╚██╗ ██╔╝██╔═══╝
|
|
168
|
+
██║ ╚═╝ ██║██████╔╝ ╚████╔╝ ██║
|
|
169
|
+
╚═╝ ╚═╝╚═════╝ ╚═══╝ ╚═╝`
|
|
170
|
+
|
|
171
|
+
const HELP = `${DIM}${ASCII}${R}
|
|
172
|
+
${DIM} Machine Design Vision Protocol v${VERSION}${R}
|
|
173
|
+
|
|
174
|
+
${BOLD}Audit${R}
|
|
175
|
+
audit <domain> Score a website
|
|
176
|
+
audit <domain> --json Output as JSON
|
|
177
|
+
compare <a> <b> Compare two sites side by side
|
|
178
|
+
top [n] Top-scored sites (default 10)
|
|
179
|
+
worst [n] Lowest-scored sites
|
|
180
|
+
label <label> Filter by label (premium/good/vibecoded/bad)
|
|
181
|
+
stats Dataset statistics
|
|
182
|
+
|
|
183
|
+
${BOLD}Account${R}
|
|
184
|
+
login Save your API key
|
|
185
|
+
balance Check credit balance
|
|
186
|
+
submit <domain> Submit URL for crawl (1 credit)
|
|
187
|
+
submit <domain> --local Submit to local crawler node instead
|
|
188
|
+
|
|
189
|
+
${BOLD}Crawler${R}
|
|
190
|
+
hire Become a crawler node (downloads + runs worker)
|
|
191
|
+
hire --daemon Run crawler in background
|
|
192
|
+
hire --tabs=4 Run with 4 parallel tabs
|
|
193
|
+
apply Same as hire
|
|
194
|
+
serve Same as hire (global by default)
|
|
195
|
+
serve --local Run local-only crawler node
|
|
196
|
+
|
|
197
|
+
${BOLD}Flags${R}
|
|
198
|
+
--json Output as JSON
|
|
199
|
+
--local Use local crawler (http://localhost:7227)
|
|
200
|
+
--daemon -d Run in background (hire/serve only)
|
|
201
|
+
--tabs=N Parallel tabs for crawler
|
|
202
|
+
|
|
203
|
+
${BOLD}Examples${R}
|
|
204
|
+
npx mdvp audit stripe.com
|
|
205
|
+
npx mdvp audit stripe.com --json | jq .overall_score
|
|
206
|
+
npx mdvp compare figma.com linear.app
|
|
207
|
+
npx mdvp submit myapp.com
|
|
208
|
+
npx mdvp submit myapp.com --local
|
|
209
|
+
npx mdvp hire --daemon --tabs=4
|
|
210
|
+
curl -fsSL ${API}/hire.sh | bash -s -- -d
|
|
211
|
+
|
|
212
|
+
${BOLD}Web${R} https://mdvp.tixo-digital.workers.dev
|
|
213
|
+
${BOLD}Docs${R} https://mdvp.tixo-digital.workers.dev/docs
|
|
214
|
+
`
|
|
215
|
+
|
|
216
|
+
async function cmdHire(opts) {
|
|
217
|
+
const { daemon, tabs, local } = opts
|
|
218
|
+
const { execSync, spawn } = await import("child_process")
|
|
219
|
+
const { existsSync } = await import("fs")
|
|
220
|
+
const dir = `${homedir()}/.mdvp/crawler`
|
|
221
|
+
|
|
222
|
+
console.log(`${DIM}${ASCII}${R}\n`)
|
|
223
|
+
console.log(`${BOLD}Hiring as crawler node...${R}`)
|
|
224
|
+
|
|
225
|
+
process.stderr.write(`${DIM}Downloading worker from ${API}...${R}\n`)
|
|
226
|
+
mkdirSync(dir, { recursive: true })
|
|
227
|
+
|
|
228
|
+
const workerUrl = `${API}/crawler-worker.mjs`
|
|
229
|
+
const extractUrl = `${API}/extract.js`
|
|
230
|
+
|
|
231
|
+
await new Promise((res, rej) => {
|
|
232
|
+
const { writeFileSync } = { writeFileSync }
|
|
233
|
+
get(workerUrl, { headers: { Accept: "text/plain" } }, (r) => {
|
|
234
|
+
let body = ""
|
|
235
|
+
r.on("data", (c) => (body += c))
|
|
236
|
+
r.on("end", () => {
|
|
237
|
+
writeFileSync(`${dir}/crawler-worker.mjs`, body)
|
|
238
|
+
res()
|
|
239
|
+
})
|
|
240
|
+
}).on("error", rej)
|
|
241
|
+
}).catch(() => process.stderr.write(`${DIM}Could not download worker, using local copy${R}\n`))
|
|
242
|
+
|
|
243
|
+
await new Promise((res) => {
|
|
244
|
+
get(extractUrl, { headers: { Accept: "text/plain" } }, (r) => {
|
|
245
|
+
let body = ""
|
|
246
|
+
r.on("data", (c) => (body += c))
|
|
247
|
+
r.on("end", () => {
|
|
248
|
+
try { writeFileSync(`${dir}/extract.js`, body) } catch {}
|
|
249
|
+
res()
|
|
250
|
+
})
|
|
251
|
+
}).on("error", () => res())
|
|
252
|
+
})
|
|
253
|
+
|
|
254
|
+
if (!existsSync(`${dir}/package.json`)) {
|
|
255
|
+
writeFileSync(`${dir}/package.json`, '{"type":"module"}')
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
const apiUrl = local ? "http://localhost:7227" : API
|
|
259
|
+
const nodeId = `mdvp-${Math.random().toString(36).slice(2, 8)}`
|
|
260
|
+
const env = { ...process.env, NODE_ID: nodeId, TABS: String(tabs || 2), API_URL: apiUrl }
|
|
261
|
+
|
|
262
|
+
if (daemon) {
|
|
263
|
+
const { openSync } = await import("fs")
|
|
264
|
+
const log = `${dir}/worker-${process.pid}.log`
|
|
265
|
+
const out = openSync(log, "a")
|
|
266
|
+
const child = spawn("node", [`${dir}/crawler-worker.mjs`], {
|
|
267
|
+
env, cwd: dir, detached: true, stdio: ["ignore", out, out],
|
|
268
|
+
})
|
|
269
|
+
child.unref()
|
|
270
|
+
console.log(`${GREEN}Worker started in background${R}`)
|
|
271
|
+
console.log(` PID: ${child.pid}`)
|
|
272
|
+
console.log(` Log: ${log}`)
|
|
273
|
+
console.log(` Stop: kill ${child.pid}`)
|
|
274
|
+
console.log(` Node: ${nodeId}`)
|
|
275
|
+
} else {
|
|
276
|
+
console.log(`${GREEN}Starting crawler node ${nodeId}${R} (Ctrl+C to stop)\n`)
|
|
277
|
+
const child = spawn("node", [`${dir}/crawler-worker.mjs`], {
|
|
278
|
+
env, cwd: dir, stdio: "inherit",
|
|
279
|
+
})
|
|
280
|
+
await new Promise((res) => child.on("exit", res))
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
async function cmdSubmit(domain, opts) {
|
|
285
|
+
const { apiKey, local } = opts
|
|
286
|
+
if (!apiKey) { console.error(`${RED}No API key. Run: npx mdvp login${R}`); process.exit(1) }
|
|
287
|
+
domain = parseDomain(domain)
|
|
288
|
+
const targetApi = local ? "http://localhost:7227" : API
|
|
289
|
+
process.stderr.write(`${DIM}submitting ${domain} → ${local ? "local" : "global"} crawler...${R}\n`)
|
|
290
|
+
const d = await apiPost("/crawl/submit", { domain, url: `https://${domain}` }, apiKey, targetApi)
|
|
291
|
+
if (d.error) { console.error(`${RED}Error: ${d.error}${R}`); process.exit(1) }
|
|
292
|
+
console.log(`\n ${BOLD}Submitted!${R} ${domain} added to crawl queue.`)
|
|
293
|
+
console.log(` ${DIM}Results in ~60s. Check: npx mdvp audit ${domain}${R}\n`)
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
async function main() {
|
|
297
|
+
const argv = process.argv.slice(2)
|
|
298
|
+
const flags = new Set(argv.filter((a) => a.startsWith("--") && !a.includes("=")))
|
|
299
|
+
const flagValues = Object.fromEntries(argv.filter((a) => a.includes("=")).map((a) => a.replace("--", "").split("=")))
|
|
300
|
+
const positional = argv.filter((a) => !a.startsWith("--"))
|
|
301
|
+
const [cmd, arg1, arg2] = positional
|
|
302
|
+
const cfg = loadConfig()
|
|
303
|
+
const opts = {
|
|
304
|
+
json: flags.has("--json") || flags.has("--raw"),
|
|
305
|
+
apiKey: cfg.apiKey ?? null,
|
|
306
|
+
local: flags.has("--local"),
|
|
307
|
+
daemon: flags.has("--daemon") || flags.has("-d"),
|
|
308
|
+
tabs: parseInt(flagValues.tabs || "2") || 2,
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
if (!cmd || cmd === "help" || cmd === "--help" || cmd === "-h") {
|
|
312
|
+
console.log(HELP)
|
|
313
|
+
} else if (cmd === "login") {
|
|
314
|
+
await cmdLogin()
|
|
315
|
+
} else if (cmd === "audit" && arg1) {
|
|
316
|
+
await cmdAudit(arg1, opts)
|
|
317
|
+
} else if (cmd === "submit" && arg1) {
|
|
318
|
+
await cmdSubmit(arg1, opts)
|
|
319
|
+
} else if (cmd === "balance") {
|
|
320
|
+
await cmdBalance(opts)
|
|
321
|
+
} else if (cmd === "compare" && arg1 && arg2) {
|
|
322
|
+
await cmdCompare(arg1, arg2)
|
|
323
|
+
} else if (cmd === "top") {
|
|
324
|
+
await cmdTop(parseInt(arg1 || "10") || 10, false)
|
|
325
|
+
} else if (cmd === "worst") {
|
|
326
|
+
await cmdTop(parseInt(arg1 || "10") || 10, true)
|
|
327
|
+
} else if (cmd === "stats") {
|
|
328
|
+
const d = await apiGet("/dataset/stats")
|
|
329
|
+
if (opts.json) { console.log(JSON.stringify(d, null, 2)); return }
|
|
330
|
+
console.log(`\n Total sites: ${d.totalSites}\n Average score: ${d.averageScore}\n`)
|
|
331
|
+
} else if (cmd === "hire" || cmd === "apply" || cmd === "serve") {
|
|
332
|
+
await cmdHire(opts)
|
|
333
|
+
} else if (cmd === "label" && arg1) {
|
|
334
|
+
const sites = (await apiGet(`/dataset?limit=800`)).sites ?? []
|
|
335
|
+
const filtered = sites.filter((s) => s.label === arg1).sort((a, b) => b.overall_score - a.overall_score).slice(0, 20)
|
|
336
|
+
if (!filtered.length) { console.error(`No sites with label: ${arg1}`); process.exit(1) }
|
|
337
|
+
console.log(`\n ${filtered.length} sites · label=${arg1}\n`)
|
|
338
|
+
for (const s of filtered) console.log(` ${s.id.padEnd(28)} ${scoreColor(s.overall_score)}${s.overall_score}${R} ${s.grade}`)
|
|
339
|
+
console.log()
|
|
340
|
+
} else {
|
|
341
|
+
console.error(`Unknown command: ${cmd}\nRun 'npx mdvp help' for usage.`)
|
|
342
|
+
process.exit(1)
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
|
|
346
|
+
main().catch((err) => {
|
|
347
|
+
console.error(`${RED}Error: ${err.message}${R}`)
|
|
348
|
+
process.exit(1)
|
|
349
|
+
})
|
package/package.json
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@mdvp/cli",
|
|
3
|
+
"version": "1.3.0",
|
|
4
|
+
"description": "Machine Design Vision Protocol \u2014 gives agents eyes to understand design quality",
|
|
5
|
+
"bin": {
|
|
6
|
+
"mdvp": "./cli.mjs"
|
|
7
|
+
},
|
|
8
|
+
"type": "module",
|
|
9
|
+
"engines": {
|
|
10
|
+
"node": ">=18"
|
|
11
|
+
},
|
|
12
|
+
"keywords": [
|
|
13
|
+
"design",
|
|
14
|
+
"audit",
|
|
15
|
+
"cli",
|
|
16
|
+
"ux",
|
|
17
|
+
"mdvp",
|
|
18
|
+
"machine-design"
|
|
19
|
+
],
|
|
20
|
+
"repository": {
|
|
21
|
+
"type": "git",
|
|
22
|
+
"url": "git+https://github.com/tixo/mdvp.git"
|
|
23
|
+
},
|
|
24
|
+
"homepage": "https://mdvp.tixo-digital.workers.dev",
|
|
25
|
+
"license": "MIT"
|
|
26
|
+
}
|