start-vibing 4.3.0 → 4.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (22) hide show
  1. package/package.json +2 -2
  2. package/template/.claude/agents/sd-audit.md +121 -2
  3. package/template/.claude/agents/sd-fix.md +11 -0
  4. package/template/.claude/agents/sd-research.md +49 -2
  5. package/template/.claude/skills/super-design/.schema-version +1 -0
  6. package/template/.claude/skills/super-design/SKILL.md +94 -2
  7. package/template/.claude/skills/super-design/audit-state.schema.json +226 -0
  8. package/template/.claude/skills/super-design/references/audit-methodology.md +118 -0
  9. package/template/.claude/skills/super-design/references/design-intelligence-rubric.md +92 -11
  10. package/template/.claude/skills/super-design/references/design-skills-catalog.md +31 -0
  11. package/template/.claude/skills/super-design/scripts/build-import-graph.sh +208 -0
  12. package/template/.claude/skills/super-design/scripts/detect-apps.sh +180 -0
  13. package/template/.claude/skills/super-design/scripts/detect-changes.sh +177 -21
  14. package/template/.claude/skills/super-design/scripts/discover-routes.sh +120 -6
  15. package/template/.claude/skills/super-design/scripts/extract-tokens.mjs +165 -4
  16. package/template/.claude/skills/super-design/scripts/hash-pages.sh +209 -23
  17. package/template/.claude/skills/super-design/scripts/setup-git-notes.sh +21 -0
  18. package/template/.claude/skills/super-design/scripts/validate-state.sh +74 -11
  19. package/template/.claude/skills/super-design/scripts/verify-audit.sh +62 -9
  20. package/template/.claude/skills/super-design/scripts/visual-regression.sh +275 -0
  21. package/template/.claude/skills/super-design/scripts/write-state.sh +53 -0
  22. package/template/.claude/skills/super-design/templates/audit-state.schema.json +0 -57
@@ -1,10 +1,18 @@
1
1
  #!/usr/bin/env node
2
+ // Extract + canonicalize + hash design tokens from Tailwind configs, CSS custom
3
+ // properties, and DTCG / Tokens Studio JSON (artifact §9.1 lines 707-709, §9.2
4
+ // lines 720-761). Output is deterministic regardless of insertion order
5
+ // (artifact §9.2 line 722: "canonical = JSON.stringify(theme,
6
+ // Object.keys(theme).sort())").
2
7
  import fs from "node:fs";
3
8
  import path from "node:path";
9
+ import { createHash } from "node:crypto";
4
10
  import { pathToFileURL } from "node:url";
5
11
 
6
12
  const out = {};
13
+ const sources = [];
7
14
  const tailwindConfigs = ["tailwind.config.ts", "tailwind.config.js", "tailwind.config.mjs", "tailwind.config.cjs"];
15
+ let tailwindFound = false;
8
16
  for (const candidate of tailwindConfigs) {
9
17
  if (fs.existsSync(candidate)) {
10
18
  try {
@@ -12,10 +20,14 @@ for (const candidate of tailwindConfigs) {
12
20
  const cfg = (await import(pathToFileURL(path.resolve(candidate)).href)).default;
13
21
  const resolved = resolveConfig(cfg);
14
22
  flatten(resolved.theme, "tw", out);
23
+ tailwindFound = true;
15
24
  } catch (e) { out[`_error_${candidate}`] = String(e.message || e); }
16
25
  break;
17
26
  }
18
27
  }
28
+ if (tailwindFound) sources.push("tailwind");
29
+
30
+ let cssFound = false;
19
31
  try {
20
32
  const postcss = (await import("postcss")).default;
21
33
  const cssCandidates = ["styles/globals.css","src/styles/globals.css","app/globals.css","styles/theme.css","src/app/globals.css"];
@@ -23,19 +35,168 @@ try {
23
35
  if (!fs.existsSync(css)) continue;
24
36
  const source = fs.readFileSync(css, "utf8");
25
37
  const root = postcss.parse(source);
26
- root.walkAtRules("theme", at => { at.walkDecls(/^--/, d => { out[`css:${d.prop}`] = d.value.trim(); }); });
27
- root.walkRules(":root", rule => { rule.walkDecls(/^--/, d => { out[`css:${d.prop}`] = d.value.trim(); }); });
38
+ root.walkAtRules("theme", at => { at.walkDecls(/^--/, d => { out[`css:${d.prop}`] = d.value.trim(); cssFound = true; }); });
39
+ root.walkRules(":root", rule => { rule.walkDecls(/^--/, d => { out[`css:${d.prop}`] = d.value.trim(); cssFound = true; }); });
28
40
  }
29
41
  } catch (e) { out._postcss_error = String(e.message || e); }
42
+ if (cssFound) sources.push("css-vars");
43
+
44
+ // DTCG + Tokens Studio support (artifact §9.1 line 707-709, §9.2 line 747).
45
+ const dtcgFiles = discoverDtcgFiles(".");
46
+ let dtcgFound = false;
47
+ for (const file of dtcgFiles) {
48
+ try {
49
+ const raw = fs.readFileSync(file, "utf8");
50
+ const parsed = JSON.parse(raw);
51
+ // Tokens Studio $metadata/$themes are not token trees — skip.
52
+ if (path.basename(file) === "$metadata.json" || path.basename(file) === "$themes.json") continue;
53
+ const resolved = resolveDtcgAliases(parsed, file);
54
+ const relFile = path.relative(".", file).replaceAll("\\", "/");
55
+ flattenDtcg(resolved, `dtcg:${relFile}`, out);
56
+ dtcgFound = true;
57
+ } catch (e) {
58
+ out[`_dtcg_error_${file}`] = String(e.message || e);
59
+ }
60
+ }
61
+ if (dtcgFound) sources.push("dtcg");
62
+
63
+ // Deterministic canonical form: keys sorted top-to-bottom.
64
+ const sorted = Object.keys(out).sort().reduce((acc, k) => { acc[k] = out[k]; return acc; }, {});
65
+ const canonical = JSON.stringify(sorted);
66
+ const tokens_hash = "sha256:" + createHash("sha256").update(canonical).digest("hex");
30
67
 
31
- console.log(JSON.stringify(out, null, 2));
68
+ console.log(JSON.stringify({ tokens: sorted, tokens_hash, sources: sources.sort() }, null, 2));
32
69
 
33
70
  function flatten(obj, prefix, acc) {
34
71
  if (obj == null) return;
35
72
  if (typeof obj !== "object") { acc[prefix] = String(obj); return; }
36
- for (const [k, v] of Object.entries(obj)) {
73
+ // Sort keys so hash is stable across JS engines / config formatters.
74
+ // Artifact §9.2 line 722 calls this out explicitly.
75
+ for (const k of Object.keys(obj).sort()) {
76
+ const v = obj[k];
37
77
  const key = `${prefix}.${k}`;
38
78
  if (v && typeof v === "object" && !Array.isArray(v)) flatten(v, key, acc);
39
79
  else acc[key] = Array.isArray(v) ? v.join(",") : String(v);
40
80
  }
41
81
  }
82
+
83
+ // Walk cwd, returning **/*.tokens.json (excluding build/vendor dirs) plus the
84
+ // two conventional Tokens Studio single-file exports.
85
+ function discoverDtcgFiles(rootDir) {
86
+ const skipDirs = new Set(["node_modules", "dist", ".next", "build", ".git", "coverage", ".turbo", ".cache"]);
87
+ const found = [];
88
+ const walk = (dir) => {
89
+ let entries;
90
+ try { entries = fs.readdirSync(dir, { withFileTypes: true }); } catch { return; }
91
+ for (const entry of entries) {
92
+ if (entry.isDirectory()) {
93
+ if (skipDirs.has(entry.name) || entry.name.startsWith(".")) {
94
+ // Allow explicit `.tokens` dir used by some Tokens Studio setups.
95
+ if (entry.name !== ".tokens") continue;
96
+ }
97
+ walk(path.join(dir, entry.name));
98
+ } else if (entry.isFile()) {
99
+ const full = path.join(dir, entry.name);
100
+ if (entry.name.endsWith(".tokens.json")) found.push(full);
101
+ else if (entry.name === "tokens.json" && (dir === rootDir || path.basename(dir) === ".tokens")) found.push(full);
102
+ }
103
+ }
104
+ };
105
+ walk(rootDir);
106
+ return found.sort();
107
+ }
108
+
109
+ // Resolve DTCG aliases transitively. Supports both `{color.brand.primary}` and
110
+ // Tokens Studio legacy `$color.brand.primary`. Cycle detection throws loudly.
111
+ function resolveDtcgAliases(tree, filename) {
112
+ const resolving = new Set();
113
+ const lookup = (pathStr) => {
114
+ const segs = pathStr.split(".");
115
+ let node = tree;
116
+ for (const s of segs) {
117
+ if (node == null || typeof node !== "object") return undefined;
118
+ node = node[s];
119
+ }
120
+ return node;
121
+ };
122
+ const resolveValue = (value, trail) => {
123
+ if (typeof value !== "string") return value;
124
+ const braceMatch = value.match(/^\{([^}]+)\}$/);
125
+ const dollarMatch = value.match(/^\$([A-Za-z_][A-Za-z0-9_.-]*)$/);
126
+ const aliasPath = braceMatch ? braceMatch[1] : dollarMatch ? dollarMatch[1] : null;
127
+ if (!aliasPath) return value;
128
+ if (trail.has(aliasPath)) {
129
+ throw new Error(`DTCG alias cycle in ${filename}: ${[...trail, aliasPath].join(" -> ")}`);
130
+ }
131
+ const target = lookup(aliasPath);
132
+ if (target == null) return value; // dangling alias: pass through raw
133
+ const next = typeof target === "object" && target !== null && "$value" in target ? target.$value : target;
134
+ return resolveValue(next, new Set([...trail, aliasPath]));
135
+ };
136
+ const walk = (node) => {
137
+ if (node == null || typeof node !== "object") return node;
138
+ if (Array.isArray(node)) return node.map(walk);
139
+ if ("$value" in node) {
140
+ const copy = { ...node };
141
+ try {
142
+ copy.$value = resolveValue(copy.$value, new Set());
143
+ } catch (err) {
144
+ // Cycles must fail loudly per requirements.
145
+ throw err;
146
+ }
147
+ copy.$value = applyTokensStudioTransforms(copy.$value, node.$extensions, filename);
148
+ return copy;
149
+ }
150
+ const result = {};
151
+ for (const k of Object.keys(node)) result[k] = walk(node[k]);
152
+ return result;
153
+ };
154
+ return walk(tree);
155
+ }
156
+
157
+ // Apply Tokens Studio `modify` transforms (lighten/darken/alpha). Anything
158
+ // unparseable emits a warning to stderr and passes through the raw $value.
159
+ function applyTokensStudioTransforms(value, extensions, filename) {
160
+ const modify = extensions?.["studio.tokens"]?.modify;
161
+ if (!modify || typeof value !== "string") return value;
162
+ const { type, amount, space } = modify;
163
+ const num = Number(amount);
164
+ try {
165
+ if (type === "alpha" && /^#([0-9a-f]{6})$/i.test(value) && Number.isFinite(num)) {
166
+ const a = Math.round(Math.max(0, Math.min(1, num)) * 255).toString(16).padStart(2, "0");
167
+ return value + a;
168
+ }
169
+ if ((type === "lighten" || type === "darken") && /^#([0-9a-f]{6})$/i.test(value) && Number.isFinite(num)) {
170
+ const hex = value.slice(1);
171
+ const rgb = [0, 2, 4].map(i => parseInt(hex.slice(i, i + 2), 16));
172
+ const factor = type === "lighten" ? num : -num;
173
+ const adjusted = rgb.map(c => Math.max(0, Math.min(255, Math.round(c + (factor * 255)))));
174
+ return "#" + adjusted.map(c => c.toString(16).padStart(2, "0")).join("");
175
+ }
176
+ process.stderr.write(`[extract-tokens] warn: unsupported Tokens Studio modify(${type}, space=${space}) in ${filename}; passing through raw value\n`);
177
+ return value;
178
+ } catch {
179
+ process.stderr.write(`[extract-tokens] warn: failed to apply Tokens Studio modify(${type}) in ${filename}; passing through raw value\n`);
180
+ return value;
181
+ }
182
+ }
183
+
184
+ // Flatten a resolved DTCG tree into `prefix.path = stringified $value` leaves.
185
+ function flattenDtcg(node, prefix, acc) {
186
+ if (node == null) return;
187
+ if (typeof node !== "object" || Array.isArray(node)) {
188
+ acc[prefix] = Array.isArray(node) ? node.join(",") : String(node);
189
+ return;
190
+ }
191
+ if ("$value" in node) {
192
+ const v = node.$value;
193
+ const t = node.$type;
194
+ const key = t ? `${prefix}[${t}]` : prefix;
195
+ acc[key] = typeof v === "object" ? JSON.stringify(v, Object.keys(v).sort()) : String(v);
196
+ return;
197
+ }
198
+ for (const k of Object.keys(node).sort()) {
199
+ if (k.startsWith("$")) continue; // $description / $extensions / $metadata
200
+ flattenDtcg(node[k], `${prefix}.${k}`, acc);
201
+ }
202
+ }
@@ -1,42 +1,228 @@
1
1
  #!/usr/bin/env bash
2
2
  # Usage: hash-pages.sh <urls_file>
3
+ #
4
+ # Captures three viewports per URL (mobile_375, tablet_768, desktop_1280),
5
+ # computes sha256 (exact) + phash (perceptual) for each, and emits a single
6
+ # JSON file with one record per URL (artifact §3.4, §10 line 492, §16 lines
7
+ # 1367-1384).
8
+ #
9
+ # Env vars:
10
+ # OUT_DIR — where to write hashes.json and the per-viewport PNGs
11
+ # (default: docs/super-design/.cache/hashes).
12
+ # MASK_SELECTORS — comma-separated CSS selectors masked on every
13
+ # screenshot via Playwright's `mask:` option with
14
+ # maskColor "#000". Artifact §3.4 defaults are always
15
+ # merged in; user-supplied selectors are additive.
16
+ # MASK_COLOR — overrides the mask fill color (default #000).
17
+ # HASH_URL_TIMEOUT — per-URL navigation timeout ms (default 30000).
18
+ #
19
+ # Output shape (artifact §10 line 492):
20
+ # {
21
+ # "url": "...",
22
+ # "html_hash": "sha256:...",
23
+ # "dom_structure_hash": "sha256:...",
24
+ # "screenshot_hash": "sha256:...", (desktop, back-compat)
25
+ # "viewport_hashes": {
26
+ # "mobile_375": { "sha256": "...", "phash": "..." },
27
+ # "tablet_768": { "sha256": "...", "phash": "..." },
28
+ # "desktop_1280":{ "sha256": "...", "phash": "..." }
29
+ # }
30
+ # }
31
+ #
32
+ # Perceptual hash (phash):
33
+ # We prefer `sharp` if installed (true DCT/aHash on a 32x32 greyscale
34
+ # downsample). If sharp is unavailable we fall back to a deterministic
35
+ # PNG-structural fingerprint — 64 bits derived from 8 evenly-sliced
36
+ # sha256 windows over the PNG buffer. This is NOT a true perceptual
37
+ # hash; it survives identical re-renders but is sensitive to any pixel
38
+ # change. Compare with Hamming distance only when sharp was used (the
39
+ # emitted record includes an "engine" field so consumers can pick the
40
+ # right distance metric).
3
41
  set -euo pipefail
42
+
4
43
  URLS="${1:?usage: hash-pages.sh <urls_file>}"
5
44
  OUT_DIR="${OUT_DIR:-docs/super-design/.cache/hashes}"
45
+ MASK_SELECTORS="${MASK_SELECTORS:-}"
46
+ MASK_COLOR="${MASK_COLOR:-#000}"
47
+ HASH_URL_TIMEOUT="${HASH_URL_TIMEOUT:-30000}"
6
48
  mkdir -p "$OUT_DIR"
7
49
 
8
- URLS="$URLS" OUT_DIR="$OUT_DIR" node --experimental-vm-modules <<'JS'
50
+ URLS="$URLS" OUT_DIR="$OUT_DIR" \
51
+ MASK_SELECTORS="$MASK_SELECTORS" MASK_COLOR="$MASK_COLOR" \
52
+ HASH_URL_TIMEOUT="$HASH_URL_TIMEOUT" node --experimental-vm-modules <<'JS'
9
53
  import { chromium } from "playwright";
10
54
  import { createHash } from "node:crypto";
11
- import { readFileSync, writeFileSync } from "node:fs";
55
+ import { readFileSync, writeFileSync, mkdirSync } from "node:fs";
56
+ import { join } from "node:path";
12
57
 
13
58
  const urlsFile = process.env.URLS;
14
59
  const outDir = process.env.OUT_DIR;
15
- const urls = readFileSync(urlsFile, "utf8").split("\n").map(s => s.trim()).filter(Boolean);
60
+ const maskColor = process.env.MASK_COLOR || "#000";
61
+ const navTimeout = Number(process.env.HASH_URL_TIMEOUT || 30000);
62
+
63
+ // Artifact §3.4 defaults — always masked.
64
+ const DEFAULT_MASKS = [
65
+ "[data-timestamp]",
66
+ ".relative-time",
67
+ "[data-react-hydration]",
68
+ "video",
69
+ "canvas",
70
+ ];
71
+ const userMasks = (process.env.MASK_SELECTORS || "")
72
+ .split(",").map(s => s.trim()).filter(Boolean);
73
+ const maskSelectors = [...new Set([...DEFAULT_MASKS, ...userMasks])];
74
+
75
+ // Volatile attributes stripped from the DOM structure hash (artifact §3.4
76
+ // — added data-react-hydration to match the mask defaults).
77
+ const VOLATILE_ATTRS = new Set([
78
+ "nonce",
79
+ "data-timestamp",
80
+ "data-reactid",
81
+ "data-next-hydrate",
82
+ "data-react-hydration",
83
+ ]);
84
+
85
+ const urls = readFileSync(urlsFile, "utf8")
86
+ .split("\n").map(s => s.trim()).filter(Boolean);
87
+
88
+ const VIEWPORTS = [
89
+ { label: "mobile_375", width: 375, height: 667 },
90
+ { label: "tablet_768", width: 768, height: 1024 },
91
+ { label: "desktop_1280", width: 1280, height: 800 },
92
+ ];
93
+
94
+ const sha = (s) => createHash("sha256").update(s).digest("hex");
95
+
96
+ // Optional sharp — gives us a real perceptual hash. Falls back to a
97
+ // PNG-structural fingerprint (documented in the header) when absent.
98
+ let sharp = null;
99
+ let phashEngine = "fallback-png-fingerprint";
100
+ try {
101
+ sharp = (await import("sharp")).default;
102
+ phashEngine = "sharp-ahash-8x8";
103
+ } catch { /* no sharp installed; use fallback */ }
104
+
105
+ async function aHash(buf) {
106
+ if (!sharp) return fallbackFingerprint(buf);
107
+ // Average-hash: downscale to 8x8 greyscale, threshold at mean.
108
+ const raw = await sharp(buf)
109
+ .greyscale()
110
+ .resize(8, 8, { fit: "fill", kernel: "cubic" })
111
+ .raw()
112
+ .toBuffer();
113
+ let sum = 0;
114
+ for (let i = 0; i < raw.length; i++) sum += raw[i];
115
+ const mean = sum / raw.length;
116
+ let bits = 0n;
117
+ for (let i = 0; i < 64; i++) {
118
+ bits = (bits << 1n) | (raw[i] >= mean ? 1n : 0n);
119
+ }
120
+ return bits.toString(16).padStart(16, "0");
121
+ }
122
+
123
+ function fallbackFingerprint(buf) {
124
+ // Zero-dep deterministic fingerprint: slice PNG into 8 windows, take the
125
+ // first byte of each window's sha256, concat to 16 hex chars. Order of
126
+ // magnitude cheaper than a real pHash and only collision-safe for
127
+ // identical-buffer comparisons, but it gives us a stable 64-bit slot in
128
+ // viewport_hashes so downstream tooling can still key on it.
129
+ if (buf.length === 0) return "0".repeat(16);
130
+ const windows = 8;
131
+ const step = Math.max(1, Math.floor(buf.length / windows));
132
+ let out = "";
133
+ for (let i = 0; i < windows; i++) {
134
+ const start = i * step;
135
+ const end = i === windows - 1 ? buf.length : start + step;
136
+ const slice = buf.subarray(start, end);
137
+ const h = createHash("sha256").update(slice).digest();
138
+ out += h.subarray(0, 1).toString("hex");
139
+ }
140
+ return out;
141
+ }
142
+
16
143
  const browser = await chromium.launch();
17
- const ctx = await browser.newContext({
18
- viewport: { width: 1280, height: 800 }, reducedMotion: "reduce", deviceScaleFactor: 1,
19
- });
20
- const page = await ctx.newPage();
21
- const sha = s => createHash("sha256").update(s).digest("hex");
22
144
  const results = [];
23
145
  for (const url of urls) {
146
+ const entry = { url, viewport_hashes: {} };
24
147
  try {
25
- await page.goto(url, { waitUntil: "networkidle", timeout: 30000 });
26
- const html = (await page.content()).replace(/\s+/g, " ").trim();
27
- const dom = await page.evaluate(() => {
28
- const V = new Set(["nonce","data-timestamp","data-reactid","data-next-hydrate"]);
29
- const walk = n => n.nodeType !== 1 ? "" :
30
- `<${n.tagName.toLowerCase()}[${[...n.attributes].filter(a=>!V.has(a.name))
31
- .map(a=>`${a.name}=${a.value}`).sort().join(",")}]${[...n.childNodes].map(walk).join("")}>`;
32
- return walk(document.documentElement);
33
- });
34
- const buf = await page.screenshot({ fullPage: true, animations: "disabled", caret: "hide" });
35
- results.push({ url, html_hash: "sha256:" + sha(html),
36
- dom_structure_hash: "sha256:" + sha(dom), screenshot_hash: "sha256:" + sha(buf) });
37
- } catch (e) { results.push({ url, error: String(e.message || e) }); }
148
+ // Shared context: cache html/dom work on first (desktop) viewport.
149
+ let capturedHtml = null;
150
+ let capturedDom = null;
151
+
152
+ for (const vp of VIEWPORTS) {
153
+ const ctx = await browser.newContext({
154
+ viewport: { width: vp.width, height: vp.height },
155
+ reducedMotion: "reduce",
156
+ deviceScaleFactor: 1,
157
+ });
158
+ const page = await ctx.newPage();
159
+ await page.goto(url, { waitUntil: "networkidle", timeout: navTimeout });
160
+
161
+ if (capturedHtml === null) {
162
+ capturedHtml = (await page.content()).replace(/\s+/g, " ").trim();
163
+ capturedDom = await page.evaluate((volatileList) => {
164
+ const V = new Set(volatileList);
165
+ const walk = (n) => n.nodeType !== 1 ? "" :
166
+ `<${n.tagName.toLowerCase()}[${[...n.attributes]
167
+ .filter(a => !V.has(a.name))
168
+ .map(a => `${a.name}=${a.value}`)
169
+ .sort().join(",")}]${[...n.childNodes].map(walk).join("")}>`;
170
+ return walk(document.documentElement);
171
+ }, [...VOLATILE_ATTRS]);
172
+ }
173
+
174
+ // Build mask locator list (skip selectors that error — they're OK
175
+ // if nothing matches; only invalid syntax throws).
176
+ const masks = [];
177
+ for (const sel of maskSelectors) {
178
+ try { masks.push(page.locator(sel)); } catch { /* bad selector */ }
179
+ }
180
+
181
+ const buf = await page.screenshot({
182
+ fullPage: true,
183
+ animations: "disabled",
184
+ caret: "hide",
185
+ mask: masks,
186
+ maskColor,
187
+ });
188
+
189
+ // Persist PNG for visual-regression.sh to consume.
190
+ const pngDir = join(outDir, "screenshots", encodeURIComponent(url));
191
+ mkdirSync(pngDir, { recursive: true });
192
+ const pngPath = join(pngDir, `${vp.label}.png`);
193
+ writeFileSync(pngPath, buf);
194
+
195
+ const ph = await aHash(buf);
196
+ entry.viewport_hashes[vp.label] = {
197
+ sha256: "sha256:" + sha(buf),
198
+ phash: `${phashEngine === "sharp-ahash-8x8" ? "phash" : "fpr"}:${ph}`,
199
+ png_path: pngPath,
200
+ };
201
+
202
+ if (vp.label === "desktop_1280") {
203
+ entry.screenshot_hash = entry.viewport_hashes[vp.label].sha256;
204
+ }
205
+
206
+ await ctx.close();
207
+ }
208
+
209
+ entry.html_hash = "sha256:" + sha(capturedHtml);
210
+ entry.dom_structure_hash = "sha256:" + sha(capturedDom);
211
+ entry.mask_selectors = maskSelectors;
212
+ entry.phash_engine = phashEngine;
213
+ } catch (e) {
214
+ entry.error = String(e.message || e);
215
+ }
216
+ results.push(entry);
38
217
  }
39
- writeFileSync(outDir + "/hashes.json", JSON.stringify(results, null, 2));
218
+
219
+ writeFileSync(join(outDir, "hashes.json"), JSON.stringify(results, null, 2));
40
220
  await browser.close();
41
- console.log(JSON.stringify({ count: results.length, path: outDir + "/hashes.json" }));
221
+ console.log(JSON.stringify({
222
+ count: results.length,
223
+ path: join(outDir, "hashes.json"),
224
+ phash_engine: phashEngine,
225
+ viewports: VIEWPORTS.map(v => v.label),
226
+ mask_selectors: maskSelectors,
227
+ }));
42
228
  JS
@@ -0,0 +1,21 @@
1
+ #!/usr/bin/env bash
2
+ # Usage: setup-git-notes.sh
3
+ #
4
+ # One-shot setup so `git notes --ref=super-design` round-trips across
5
+ # clones. Without the remote refspec, git fetch ignores notes by default
6
+ # (artifact §7 line 570-573).
7
+ set -euo pipefail
8
+
9
+ if ! git rev-parse --git-dir >/dev/null 2>&1; then
10
+ echo '{"error":"not-a-git-repo"}' >&2; exit 3
11
+ fi
12
+
13
+ # Idempotent: only add if absent.
14
+ if git config --get-all remote.origin.fetch 2>/dev/null |
15
+ grep -q 'refs/notes/super-design'; then
16
+ echo '{"status":"already-configured"}'
17
+ else
18
+ git config --add remote.origin.fetch \
19
+ '+refs/notes/super-design:refs/notes/super-design'
20
+ echo '{"status":"added","ref":"refs/notes/super-design"}'
21
+ fi
@@ -1,15 +1,78 @@
1
1
  #!/usr/bin/env bash
2
+ # Usage: validate-state.sh [<app_path_or_state_path>]
3
+ #
4
+ # Validates the super-design audit state file. On schema/parse errors,
5
+ # moves the broken file aside (artifact §3 "Graceful corruption handling"
6
+ # line 74) and emits a JSON verdict. Also enforces schema_version major
7
+ # compatibility (artifact §12 line 934).
8
+ #
9
+ # Monorepo support (artifact §11 line 902): the positional arg is the
10
+ # app root (e.g. `apps/web`); state is looked up at
11
+ # `<app_path>/docs/super-design/.audit-state.json`. For single-app
12
+ # repos pass "." or omit (default behavior preserved). Back-compat: if
13
+ # the arg ends in `.audit-state.json` it is used verbatim.
14
+ #
15
+ # Validation strategy:
16
+ # 1. If `ajv` is on PATH, validate against audit-state.schema.json
17
+ # (draft-07 canonical schema, task #18).
18
+ # 2. Otherwise fall back to the inline jq shape check that existed
19
+ # pre-schema. Same corrupt-rename behavior in both paths.
2
20
  set -euo pipefail
3
- STATE="${1:-docs/super-design/.audit-state.json}"
4
- if [[ ! -f "$STATE" ]]; then echo '{"status":"missing"}'; exit 2; fi
5
- jq -e '
6
- (.schema_version | type == "string") and
7
- (.last_audit_at | fromdateiso8601 | . > 0) and
8
- (.git_sha_at_audit | test("^[0-9a-f]{7,64}$")) and
9
- (.skill_version | type == "string") and
10
- (.tools | type == "object")
11
- ' "$STATE" >/dev/null 2>&1 || { echo '{"status":"corrupt"}'; exit 2; }
21
+ ARG="${1:-.}"
22
+ case "$ARG" in
23
+ *.audit-state.json) STATE="$ARG" ;;
24
+ .|"") STATE="docs/super-design/.audit-state.json" ;;
25
+ *) STATE="${ARG%/}/docs/super-design/.audit-state.json" ;;
26
+ esac
27
+ SKILL_DIR="$(cd "$(dirname "$0")/.." && pwd)"
28
+ SCHEMA="$SKILL_DIR/audit-state.schema.json"
29
+
30
+ # Current schema major is either read from a sibling .schema-version file
31
+ # (so the number can be bumped without editing shell) or falls back to 1.
32
+ SCHEMA_VERSION_FILE="$SKILL_DIR/.schema-version"
33
+ if [ -f "$SCHEMA_VERSION_FILE" ]; then
34
+ CURRENT_SCHEMA_MAJOR="$(cut -d. -f1 <"$SCHEMA_VERSION_FILE" | tr -d '[:space:]')"
35
+ else
36
+ CURRENT_SCHEMA_MAJOR=1
37
+ fi
38
+
39
+ if [ ! -f "$STATE" ]; then echo '{"status":"missing"}'; exit 2; fi
40
+
41
+ # Parse + shape check. On failure, rename so the user can inspect and we
42
+ # fall through to first-audit (SKILL.md Step 1 treats "corrupt" that way).
43
+ schema_ok=1
44
+ if command -v ajv >/dev/null 2>&1 && [ -f "$SCHEMA" ]; then
45
+ if ! ajv validate -s "$SCHEMA" -d "$STATE" --errors=text >/dev/null 2>&1; then
46
+ schema_ok=0
47
+ fi
48
+ else
49
+ # Fallback: inline jq shape check (pre-schema behavior).
50
+ if ! jq -e '
51
+ (.schema_version | type == "string") and
52
+ (.last_audit_at | fromdateiso8601 | . > 0) and
53
+ (.git_sha_at_audit | test("^[0-9a-f]{7,64}$")) and
54
+ (.skill_version | type == "string") and
55
+ (.tools | type == "object")
56
+ ' "$STATE" >/dev/null 2>&1; then
57
+ schema_ok=0
58
+ fi
59
+ fi
60
+
61
+ if [ "$schema_ok" -eq 0 ]; then
62
+ mv "$STATE" "$STATE.corrupt-$(date +%s)" 2>/dev/null || true
63
+ echo '{"status":"corrupt"}'; exit 2
64
+ fi
65
+
66
+ # schema_version major-bump check — if state was written by a newer OR
67
+ # incompatible-older skill, force a full re-audit rather than silently
68
+ # trusting the shape.
69
+ STATE_MAJOR="$(jq -r '.schema_version' "$STATE" | cut -d. -f1)"
70
+ if [ -z "$STATE_MAJOR" ] || [ "$STATE_MAJOR" != "$CURRENT_SCHEMA_MAJOR" ]; then
71
+ echo "{\"status\":\"schema-incompatible\",\"action\":\"force-full\",\"state_major\":\"${STATE_MAJOR:-unknown}\",\"current_major\":\"${CURRENT_SCHEMA_MAJOR}\"}"
72
+ exit 1
73
+ fi
74
+
12
75
  AGE_DAYS=$(( ( $(date -u +%s) - $(jq -r '.last_audit_at | fromdateiso8601' "$STATE") ) / 86400 ))
13
- if (( AGE_DAYS > 180 )); then echo "{\"status\":\"stale-force-full\",\"age_days\":$AGE_DAYS}"; exit 1
14
- elif (( AGE_DAYS > 90 )); then echo "{\"status\":\"stale-refresh-research\",\"age_days\":$AGE_DAYS}"; exit 1
76
+ if [ "$AGE_DAYS" -gt 180 ]; then echo "{\"status\":\"stale-force-full\",\"age_days\":$AGE_DAYS}"; exit 1
77
+ elif [ "$AGE_DAYS" -gt 90 ]; then echo "{\"status\":\"stale-refresh-research\",\"age_days\":$AGE_DAYS}"; exit 1
15
78
  else echo "{\"status\":\"fresh\",\"age_days\":$AGE_DAYS}"; exit 0; fi
@@ -1,19 +1,72 @@
1
1
  #!/usr/bin/env bash
2
+ # Usage: verify-audit.sh [--strict] <session_dir>
3
+ #
4
+ # Verifies the artifacts produced by an sd-audit session:
5
+ # 1. .audit-state.json exists and passes validate-state.sh
6
+ # (so the schema task #18 schema is actually enforced end-to-end).
7
+ # 2. findings.json exists and parses as a JSON array.
8
+ # 3. Every finding has a SHOT (screenshot_path) + SNAP (snapshot_path)
9
+ # that resolves to a non-empty file on disk.
10
+ # 4. Every finding's snapshot_quote appears verbatim in its snapshot.
11
+ #
12
+ # Exit codes:
13
+ # 0 OK (no errors, no warnings — or warnings present without --strict)
14
+ # 1 non-fatal warning in --strict mode, or verification failure
15
+ # 2 missing prerequisites (session dir / findings.json)
2
16
  set -euo pipefail
3
- SESSION_DIR="${1:?usage: verify-audit.sh <session_dir>}"
17
+
18
+ STRICT=0
19
+ if [ "${1:-}" = "--strict" ]; then STRICT=1; shift; fi
20
+
21
+ SESSION_DIR="${1:?usage: verify-audit.sh [--strict] <session_dir>}"
4
22
  FINDINGS="$SESSION_DIR/findings.json"
5
- if [[ ! -f "$FINDINGS" ]]; then echo "FATAL: no findings.json at $FINDINGS" >&2; exit 2; fi
23
+ SKILL_DIR="$(cd "$(dirname "$0")/.." && pwd)"
24
+ VALIDATE="$SKILL_DIR/scripts/validate-state.sh"
25
+ STATE="${AUDIT_STATE:-docs/super-design/.audit-state.json}"
26
+
27
+ WARNINGS=0
28
+ warn() { echo "WARN: $*" >&2; WARNINGS=$((WARNINGS + 1)); }
6
29
 
7
- jq -r '.[] | [.id, .screenshot_path, .snapshot_path] | @tsv' "$FINDINGS" | while IFS=$'\t' read -r id shot snap; do
8
- if [[ ! -s "$shot" ]]; then echo "FAIL $id: missing/empty screenshot $shot" >&2; exit 1; fi
9
- if [[ ! -s "$snap" ]]; then echo "FAIL $id: missing/empty snapshot $snap" >&2; exit 1; fi
30
+ # 1. State file schema check non-fatal (state may live outside session
31
+ # dir, or may legitimately be absent on first run). In --strict mode
32
+ # any anomaly counts as a warning.
33
+ if [ ! -f "$STATE" ]; then
34
+ warn "no audit state at $STATE (first audit? expected on CI)"
35
+ else
36
+ if ! bash "$VALIDATE" "$STATE" >/dev/null 2>&1; then
37
+ warn "validate-state.sh rejected $STATE (schema drift or stale)"
38
+ fi
39
+ fi
40
+
41
+ # 2. findings.json must exist and parse.
42
+ if [ ! -f "$FINDINGS" ]; then
43
+ echo "FATAL: no findings.json at $FINDINGS" >&2; exit 2
44
+ fi
45
+ if ! jq -e 'type == "array"' "$FINDINGS" >/dev/null 2>&1; then
46
+ echo "FATAL: $FINDINGS is not a JSON array" >&2; exit 1
47
+ fi
48
+
49
+ # 3. Every referenced screenshot/snapshot resolves to a non-empty file.
50
+ jq -r '.[] | [.id, .screenshot_path, .snapshot_path] | @tsv' "$FINDINGS" | \
51
+ while IFS=$'\t' read -r id shot snap; do
52
+ if [ ! -s "$shot" ]; then echo "FAIL $id: missing/empty screenshot $shot" >&2; exit 1; fi
53
+ if [ ! -s "$snap" ]; then echo "FAIL $id: missing/empty snapshot $snap" >&2; exit 1; fi
10
54
  done
11
55
 
56
+ # 4. snapshot_quote must appear verbatim in the snapshot.
12
57
  jq -c '.[]' "$FINDINGS" | while read -r f; do
13
58
  id=$(echo "$f" | jq -r .id)
14
- q=$(echo "$f" | jq -r .snapshot_quote)
15
- s=$(echo "$f" | jq -r .snapshot_path)
16
- if ! grep -qF "$q" "$s"; then echo "FAIL $id: quote not found verbatim in $s" >&2; exit 1; fi
59
+ q=$(echo "$f" | jq -r .snapshot_quote)
60
+ s=$(echo "$f" | jq -r .snapshot_path)
61
+ if ! grep -qF "$q" "$s"; then
62
+ echo "FAIL $id: quote not found verbatim in $s" >&2; exit 1
63
+ fi
17
64
  done
18
65
 
19
- echo "OK: $(jq 'length' "$FINDINGS") findings verified"
66
+ COUNT=$(jq 'length' "$FINDINGS")
67
+ if [ "$STRICT" -eq 1 ] && [ "$WARNINGS" -gt 0 ]; then
68
+ echo "STRICT: $COUNT findings verified, $WARNINGS warning(s)" >&2
69
+ exit 1
70
+ fi
71
+
72
+ echo "OK: $COUNT findings verified${WARNINGS:+ ($WARNINGS warning(s))}"