delimit-cli 3.3.0 → 3.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +34 -62
- package/gateway/ai/backends/tools_data.py +830 -0
- package/gateway/ai/backends/tools_design.py +921 -0
- package/gateway/ai/backends/tools_infra.py +866 -0
- package/gateway/ai/backends/tools_real.py +766 -0
- package/gateway/ai/backends/ui_bridge.py +26 -49
- package/gateway/ai/deliberation.py +387 -0
- package/gateway/ai/ledger_manager.py +207 -0
- package/gateway/ai/server.py +630 -216
- package/glama.json +1 -0
- package/package.json +2 -2
|
@@ -0,0 +1,921 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Real implementations for design/* and story/* tools.
|
|
3
|
+
Works WITHOUT Figma or Storybook by default — scans local project files.
|
|
4
|
+
Optional Figma API integration when FIGMA_TOKEN env var is set.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import ast
|
|
8
|
+
import json
|
|
9
|
+
import logging
|
|
10
|
+
import os
|
|
11
|
+
import re
|
|
12
|
+
import shutil
|
|
13
|
+
import subprocess
|
|
14
|
+
import tempfile
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
from typing import Any, Dict, List, Optional
|
|
17
|
+
|
|
18
|
+
logger = logging.getLogger("delimit.ai.tools_design")
|
|
19
|
+
|
|
20
|
+
# ---------------------------------------------------------------------------
|
|
21
|
+
# Helpers
|
|
22
|
+
# ---------------------------------------------------------------------------
|
|
23
|
+
|
|
24
|
+
def _has_playwright() -> bool:
|
|
25
|
+
"""Check whether Playwright is importable and browsers are installed."""
|
|
26
|
+
try:
|
|
27
|
+
import playwright # noqa: F401
|
|
28
|
+
return True
|
|
29
|
+
except ImportError:
|
|
30
|
+
return False
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def _find_files(root: Path, extensions: List[str], max_depth: int = 6) -> List[Path]:
|
|
34
|
+
"""Recursively find files by extension, skipping node_modules/dist/.next."""
|
|
35
|
+
skip = {"node_modules", "dist", ".next", ".git", "__pycache__", "build", ".cache"}
|
|
36
|
+
results: List[Path] = []
|
|
37
|
+
if not root.is_dir():
|
|
38
|
+
return results
|
|
39
|
+
|
|
40
|
+
def _walk(p: Path, depth: int):
|
|
41
|
+
if depth > max_depth:
|
|
42
|
+
return
|
|
43
|
+
try:
|
|
44
|
+
for child in sorted(p.iterdir()):
|
|
45
|
+
if child.name in skip:
|
|
46
|
+
continue
|
|
47
|
+
if child.is_dir():
|
|
48
|
+
_walk(child, depth + 1)
|
|
49
|
+
elif child.suffix in extensions:
|
|
50
|
+
results.append(child)
|
|
51
|
+
except PermissionError:
|
|
52
|
+
pass
|
|
53
|
+
|
|
54
|
+
_walk(root, 0)
|
|
55
|
+
return results
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def _read_text(path: Path, limit: int = 200_000) -> str:
|
|
59
|
+
"""Read file text, capped at *limit* chars."""
|
|
60
|
+
try:
|
|
61
|
+
return path.read_text(errors="replace")[:limit]
|
|
62
|
+
except Exception:
|
|
63
|
+
return ""
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
# ---------------------------------------------------------------------------
|
|
67
|
+
# CSS / Tailwind token extraction helpers
|
|
68
|
+
# ---------------------------------------------------------------------------
|
|
69
|
+
|
|
70
|
+
_CSS_VAR_RE = re.compile(r"--([a-zA-Z0-9_-]+)\s*:\s*([^;]+);")
|
|
71
|
+
_MEDIA_QUERY_RE = re.compile(r"@media[^{]*\(\s*(?:min|max)-width\s*:\s*([^)]+)\)")
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def _extract_css_variables(text: str) -> Dict[str, List[Dict[str, str]]]:
|
|
75
|
+
"""Extract CSS custom properties grouped by category."""
|
|
76
|
+
colors: List[Dict[str, str]] = []
|
|
77
|
+
spacing: List[Dict[str, str]] = []
|
|
78
|
+
typography: List[Dict[str, str]] = []
|
|
79
|
+
other: List[Dict[str, str]] = []
|
|
80
|
+
|
|
81
|
+
for name, value in _CSS_VAR_RE.findall(text):
|
|
82
|
+
value = value.strip()
|
|
83
|
+
entry = {"name": f"--{name}", "value": value}
|
|
84
|
+
lower = name.lower()
|
|
85
|
+
if any(k in lower for k in ("color", "bg", "text", "border", "fill", "stroke", "accent", "primary", "secondary")):
|
|
86
|
+
colors.append(entry)
|
|
87
|
+
elif any(k in lower for k in ("space", "gap", "margin", "padding", "size", "width", "height", "radius")):
|
|
88
|
+
spacing.append(entry)
|
|
89
|
+
elif any(k in lower for k in ("font", "line", "letter", "text", "heading")):
|
|
90
|
+
typography.append(entry)
|
|
91
|
+
elif _is_color_value(value):
|
|
92
|
+
colors.append(entry)
|
|
93
|
+
else:
|
|
94
|
+
other.append(entry)
|
|
95
|
+
|
|
96
|
+
return {"colors": colors, "spacing": spacing, "typography": typography, "other": other}
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def _is_color_value(v: str) -> bool:
|
|
100
|
+
v = v.lower().strip()
|
|
101
|
+
if v.startswith("#") and len(v) in (4, 7, 9):
|
|
102
|
+
return True
|
|
103
|
+
if v.startswith(("rgb", "hsl", "oklch", "lab(", "lch(")):
|
|
104
|
+
return True
|
|
105
|
+
return False
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def _parse_tailwind_config(text: str) -> Dict[str, Any]:
|
|
109
|
+
"""Best-effort parse of tailwind.config.{js,ts,mjs} into token counts."""
|
|
110
|
+
colors_count = 0
|
|
111
|
+
spacing_count = 0
|
|
112
|
+
breakpoints: List[str] = []
|
|
113
|
+
|
|
114
|
+
# Extract theme.extend or theme sections
|
|
115
|
+
for match in re.finditer(r"colors\s*:\s*\{([^}]*(?:\{[^}]*\}[^}]*)*)\}", text):
|
|
116
|
+
block = match.group(1)
|
|
117
|
+
colors_count += block.count(":")
|
|
118
|
+
|
|
119
|
+
for match in re.finditer(r"spacing\s*:\s*\{([^}]*)\}", text):
|
|
120
|
+
block = match.group(1)
|
|
121
|
+
spacing_count += block.count(":")
|
|
122
|
+
|
|
123
|
+
for match in re.finditer(r"screens\s*:\s*\{([^}]*)\}", text):
|
|
124
|
+
block = match.group(1)
|
|
125
|
+
for bp_match in re.finditer(r"['\"]?(\w+)['\"]?\s*:", block):
|
|
126
|
+
breakpoints.append(bp_match.group(1))
|
|
127
|
+
|
|
128
|
+
return {"colors_count": colors_count, "spacing_count": spacing_count, "breakpoints": breakpoints}
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
# ---------------------------------------------------------------------------
|
|
132
|
+
# Component scanning helpers
|
|
133
|
+
# ---------------------------------------------------------------------------
|
|
134
|
+
|
|
135
|
+
_REACT_COMPONENT_RE = re.compile(
|
|
136
|
+
r"(?:export\s+(?:default\s+)?)?(?:function|const)\s+([A-Z][A-Za-z0-9]*)"
|
|
137
|
+
)
|
|
138
|
+
_PROPS_INTERFACE_RE = re.compile(
|
|
139
|
+
r"(?:interface|type)\s+(\w+Props)\s*(?:=\s*)?\{([^}]*)\}", re.DOTALL
|
|
140
|
+
)
|
|
141
|
+
_EXPORT_RE = re.compile(r"export\s+(?:default\s+)?(?:function|const|class)\s+(\w+)")
|
|
142
|
+
_VUE_NAME_RE = re.compile(r"name\s*:\s*['\"]([^'\"]+)['\"]")
|
|
143
|
+
_SVELTE_EXPORT_RE = re.compile(r"export\s+let\s+(\w+)")
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def _scan_react_component(path: Path, text: str) -> Optional[Dict[str, Any]]:
|
|
147
|
+
"""Extract component metadata from a React/TSX/JSX file."""
|
|
148
|
+
components = _REACT_COMPONENT_RE.findall(text)
|
|
149
|
+
if not components:
|
|
150
|
+
return None
|
|
151
|
+
exports = _EXPORT_RE.findall(text)
|
|
152
|
+
props_raw = _PROPS_INTERFACE_RE.findall(text)
|
|
153
|
+
props: List[str] = []
|
|
154
|
+
for _name, body in props_raw:
|
|
155
|
+
for line in body.strip().split("\n"):
|
|
156
|
+
line = line.strip().rstrip(";").rstrip(",")
|
|
157
|
+
if line and not line.startswith("//"):
|
|
158
|
+
props.append(line)
|
|
159
|
+
return {
|
|
160
|
+
"name": components[0],
|
|
161
|
+
"path": str(path),
|
|
162
|
+
"props": props,
|
|
163
|
+
"exports": exports,
|
|
164
|
+
"framework": "react",
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
def _scan_vue_component(path: Path, text: str) -> Optional[Dict[str, Any]]:
|
|
169
|
+
m = _VUE_NAME_RE.search(text)
|
|
170
|
+
name = m.group(1) if m else path.stem
|
|
171
|
+
props = re.findall(r"(?:defineProps|props)\s*(?:<[^>]+>)?\s*\(\s*\{([^}]*)\}", text, re.DOTALL)
|
|
172
|
+
prop_list = []
|
|
173
|
+
for block in props:
|
|
174
|
+
for line in block.strip().split("\n"):
|
|
175
|
+
line = line.strip().rstrip(",")
|
|
176
|
+
if line and not line.startswith("//"):
|
|
177
|
+
prop_list.append(line)
|
|
178
|
+
return {"name": name, "path": str(path), "props": prop_list, "exports": [name], "framework": "vue"}
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
def _scan_svelte_component(path: Path, text: str) -> Optional[Dict[str, Any]]:
|
|
182
|
+
props = _SVELTE_EXPORT_RE.findall(text)
|
|
183
|
+
return {"name": path.stem, "path": str(path), "props": props, "exports": [path.stem], "framework": "svelte"}
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
# ---------------------------------------------------------------------------
|
|
187
|
+
# 19. design_extract_tokens
|
|
188
|
+
# ---------------------------------------------------------------------------
|
|
189
|
+
|
|
190
|
+
def design_extract_tokens(
|
|
191
|
+
figma_file_key: Optional[str] = None,
|
|
192
|
+
token_types: Optional[List[str]] = None,
|
|
193
|
+
project_path: Optional[str] = None,
|
|
194
|
+
) -> Dict[str, Any]:
|
|
195
|
+
"""Extract design tokens from project CSS/SCSS/Tailwind config.
|
|
196
|
+
|
|
197
|
+
If FIGMA_TOKEN env var is set and figma_file_key provided, fetches from Figma API.
|
|
198
|
+
Otherwise scans local project files for CSS variables, Tailwind config, etc.
|
|
199
|
+
"""
|
|
200
|
+
figma_token = os.environ.get("FIGMA_TOKEN", "")
|
|
201
|
+
if figma_token and figma_file_key:
|
|
202
|
+
return _figma_extract_tokens(figma_file_key, figma_token, token_types)
|
|
203
|
+
|
|
204
|
+
# Local extraction
|
|
205
|
+
root = Path(project_path) if project_path else Path.cwd()
|
|
206
|
+
if not root.is_dir():
|
|
207
|
+
return {"tool": "design.extract_tokens", "error": f"Directory not found: {root}"}
|
|
208
|
+
|
|
209
|
+
all_tokens: Dict[str, List] = {"colors": [], "spacing": [], "typography": [], "breakpoints": [], "other": []}
|
|
210
|
+
source_files: List[str] = []
|
|
211
|
+
|
|
212
|
+
# 1. Tailwind config
|
|
213
|
+
for tw_name in ("tailwind.config.js", "tailwind.config.ts", "tailwind.config.mjs", "tailwind.config.cjs"):
|
|
214
|
+
tw_path = root / tw_name
|
|
215
|
+
if tw_path.exists():
|
|
216
|
+
text = _read_text(tw_path)
|
|
217
|
+
parsed = _parse_tailwind_config(text)
|
|
218
|
+
source_files.append(str(tw_path))
|
|
219
|
+
if parsed["breakpoints"]:
|
|
220
|
+
all_tokens["breakpoints"].extend(
|
|
221
|
+
[{"name": bp, "source": str(tw_path)} for bp in parsed["breakpoints"]]
|
|
222
|
+
)
|
|
223
|
+
break
|
|
224
|
+
|
|
225
|
+
# 2. CSS / SCSS files
|
|
226
|
+
css_files = _find_files(root, [".css", ".scss", ".sass"])
|
|
227
|
+
for cf in css_files:
|
|
228
|
+
text = _read_text(cf)
|
|
229
|
+
if "--" not in text and "@media" not in text:
|
|
230
|
+
continue
|
|
231
|
+
source_files.append(str(cf))
|
|
232
|
+
vars_found = _extract_css_variables(text)
|
|
233
|
+
for cat in ("colors", "spacing", "typography", "other"):
|
|
234
|
+
for entry in vars_found[cat]:
|
|
235
|
+
entry["source"] = str(cf)
|
|
236
|
+
all_tokens[cat].append(entry)
|
|
237
|
+
|
|
238
|
+
# breakpoints from media queries
|
|
239
|
+
for bp_val in _MEDIA_QUERY_RE.findall(text):
|
|
240
|
+
all_tokens["breakpoints"].append({"value": bp_val.strip(), "source": str(cf)})
|
|
241
|
+
|
|
242
|
+
# 3. Filter by token_types if specified
|
|
243
|
+
if token_types:
|
|
244
|
+
all_tokens = {k: v for k, v in all_tokens.items() if k in token_types}
|
|
245
|
+
|
|
246
|
+
# Deduplicate breakpoints
|
|
247
|
+
seen_bp = set()
|
|
248
|
+
unique_bp = []
|
|
249
|
+
for bp in all_tokens.get("breakpoints", []):
|
|
250
|
+
key = bp.get("name", bp.get("value", ""))
|
|
251
|
+
if key not in seen_bp:
|
|
252
|
+
seen_bp.add(key)
|
|
253
|
+
unique_bp.append(bp)
|
|
254
|
+
if "breakpoints" in all_tokens:
|
|
255
|
+
all_tokens["breakpoints"] = unique_bp
|
|
256
|
+
|
|
257
|
+
total = sum(len(v) for v in all_tokens.values())
|
|
258
|
+
return {
|
|
259
|
+
"tool": "design.extract_tokens",
|
|
260
|
+
"status": "ok",
|
|
261
|
+
"tokens": all_tokens,
|
|
262
|
+
"total_tokens": total,
|
|
263
|
+
"source_files": sorted(set(source_files)),
|
|
264
|
+
"figma_used": False,
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
def _figma_extract_tokens(file_key: str, token: str, token_types: Optional[List[str]]) -> Dict[str, Any]:
|
|
269
|
+
"""Fetch design tokens from Figma API."""
|
|
270
|
+
try:
|
|
271
|
+
import urllib.request
|
|
272
|
+
url = f"https://api.figma.com/v1/files/{file_key}/styles"
|
|
273
|
+
req = urllib.request.Request(url, headers={"X-Figma-Token": token})
|
|
274
|
+
with urllib.request.urlopen(req, timeout=15) as resp:
|
|
275
|
+
data = json.loads(resp.read())
|
|
276
|
+
styles = data.get("meta", {}).get("styles", [])
|
|
277
|
+
tokens: Dict[str, List] = {"colors": [], "typography": [], "spacing": [], "other": []}
|
|
278
|
+
for s in styles:
|
|
279
|
+
entry = {"name": s.get("name", ""), "key": s.get("key", ""), "style_type": s.get("style_type", "")}
|
|
280
|
+
stype = s.get("style_type", "").upper()
|
|
281
|
+
if stype == "FILL":
|
|
282
|
+
tokens["colors"].append(entry)
|
|
283
|
+
elif stype == "TEXT":
|
|
284
|
+
tokens["typography"].append(entry)
|
|
285
|
+
else:
|
|
286
|
+
tokens["other"].append(entry)
|
|
287
|
+
if token_types:
|
|
288
|
+
tokens = {k: v for k, v in tokens.items() if k in token_types}
|
|
289
|
+
return {"tool": "design.extract_tokens", "status": "ok", "tokens": tokens,
|
|
290
|
+
"total_tokens": sum(len(v) for v in tokens.values()),
|
|
291
|
+
"source_files": [f"figma:{file_key}"], "figma_used": True}
|
|
292
|
+
except Exception as e:
|
|
293
|
+
return {"tool": "design.extract_tokens", "error": f"Figma API error: {e}", "figma_used": True}
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
# ---------------------------------------------------------------------------
|
|
297
|
+
# 20. design_generate_component
|
|
298
|
+
# ---------------------------------------------------------------------------
|
|
299
|
+
|
|
300
|
+
def design_generate_component(
|
|
301
|
+
component_name: str,
|
|
302
|
+
figma_node_id: Optional[str] = None,
|
|
303
|
+
output_path: Optional[str] = None,
|
|
304
|
+
project_path: Optional[str] = None,
|
|
305
|
+
) -> Dict[str, Any]:
|
|
306
|
+
"""Generate a React/Next.js component skeleton.
|
|
307
|
+
|
|
308
|
+
Uses Tailwind classes if tailwind.config is detected in the project.
|
|
309
|
+
"""
|
|
310
|
+
root = Path(project_path) if project_path else Path.cwd()
|
|
311
|
+
use_tailwind = any((root / n).exists() for n in
|
|
312
|
+
("tailwind.config.js", "tailwind.config.ts", "tailwind.config.mjs", "tailwind.config.cjs"))
|
|
313
|
+
|
|
314
|
+
# Determine output path
|
|
315
|
+
if output_path:
|
|
316
|
+
out = Path(output_path)
|
|
317
|
+
else:
|
|
318
|
+
# Default: components/<Name>/<Name>.tsx
|
|
319
|
+
comp_dir = root / "components" / component_name
|
|
320
|
+
out = comp_dir / f"{component_name}.tsx"
|
|
321
|
+
|
|
322
|
+
# Determine file extension for template
|
|
323
|
+
is_tsx = out.suffix in (".tsx", ".ts")
|
|
324
|
+
|
|
325
|
+
# Build component content
|
|
326
|
+
props_type = f"{component_name}Props"
|
|
327
|
+
if use_tailwind:
|
|
328
|
+
style_attr = 'className="p-4"'
|
|
329
|
+
else:
|
|
330
|
+
style_attr = 'style={{ padding: "1rem" }}'
|
|
331
|
+
|
|
332
|
+
if is_tsx:
|
|
333
|
+
content = f"""import React from 'react';
|
|
334
|
+
|
|
335
|
+
export interface {props_type} {{
|
|
336
|
+
/** Primary content */
|
|
337
|
+
children?: React.ReactNode;
|
|
338
|
+
/** Additional CSS class names */
|
|
339
|
+
className?: string;
|
|
340
|
+
}}
|
|
341
|
+
|
|
342
|
+
export default function {component_name}({{ children, className }}: {props_type}) {{
|
|
343
|
+
return (
|
|
344
|
+
<div {style_attr} data-testid="{component_name.lower()}">
|
|
345
|
+
{{children}}
|
|
346
|
+
</div>
|
|
347
|
+
);
|
|
348
|
+
}}
|
|
349
|
+
"""
|
|
350
|
+
else:
|
|
351
|
+
content = f"""import React from 'react';
|
|
352
|
+
|
|
353
|
+
/**
|
|
354
|
+
* @param {{{{ children?: React.ReactNode, className?: string }}}} props
|
|
355
|
+
*/
|
|
356
|
+
export default function {component_name}({{ children, className }}) {{
|
|
357
|
+
return (
|
|
358
|
+
<div {style_attr} data-testid="{component_name.lower()}">
|
|
359
|
+
{{children}}
|
|
360
|
+
</div>
|
|
361
|
+
);
|
|
362
|
+
}}
|
|
363
|
+
"""
|
|
364
|
+
|
|
365
|
+
# Write file
|
|
366
|
+
try:
|
|
367
|
+
out.parent.mkdir(parents=True, exist_ok=True)
|
|
368
|
+
out.write_text(content)
|
|
369
|
+
except Exception as e:
|
|
370
|
+
return {"tool": "design.generate_component", "error": f"Failed to write: {e}"}
|
|
371
|
+
|
|
372
|
+
return {
|
|
373
|
+
"tool": "design.generate_component",
|
|
374
|
+
"status": "ok",
|
|
375
|
+
"component_path": str(out),
|
|
376
|
+
"props": ["children?: React.ReactNode", "className?: string"],
|
|
377
|
+
"template_used": "tailwind" if use_tailwind else "inline-style",
|
|
378
|
+
"typescript": is_tsx,
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
|
|
382
|
+
# ---------------------------------------------------------------------------
|
|
383
|
+
# 21. design_generate_tailwind
|
|
384
|
+
# ---------------------------------------------------------------------------
|
|
385
|
+
|
|
386
|
+
def design_generate_tailwind(
|
|
387
|
+
figma_file_key: Optional[str] = None,
|
|
388
|
+
output_path: Optional[str] = None,
|
|
389
|
+
project_path: Optional[str] = None,
|
|
390
|
+
) -> Dict[str, Any]:
|
|
391
|
+
"""Read existing tailwind.config or generate a default one from detected CSS tokens."""
|
|
392
|
+
root = Path(project_path) if project_path else Path.cwd()
|
|
393
|
+
out = Path(output_path) if output_path else root / "tailwind.config.js"
|
|
394
|
+
|
|
395
|
+
# Check for existing config
|
|
396
|
+
for tw_name in ("tailwind.config.js", "tailwind.config.ts", "tailwind.config.mjs", "tailwind.config.cjs"):
|
|
397
|
+
existing = root / tw_name
|
|
398
|
+
if existing.exists():
|
|
399
|
+
text = _read_text(existing)
|
|
400
|
+
parsed = _parse_tailwind_config(text)
|
|
401
|
+
return {
|
|
402
|
+
"tool": "design.generate_tailwind",
|
|
403
|
+
"status": "ok",
|
|
404
|
+
"config_path": str(existing),
|
|
405
|
+
"colors_count": parsed["colors_count"],
|
|
406
|
+
"spacing_values": parsed["spacing_count"],
|
|
407
|
+
"breakpoints": parsed["breakpoints"],
|
|
408
|
+
"generated": False,
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
# Generate from CSS tokens
|
|
412
|
+
tokens_result = design_extract_tokens(project_path=str(root))
|
|
413
|
+
tokens = tokens_result.get("tokens", {})
|
|
414
|
+
colors = tokens.get("colors", [])
|
|
415
|
+
spacing = tokens.get("spacing", [])
|
|
416
|
+
|
|
417
|
+
# Build color entries
|
|
418
|
+
color_entries = []
|
|
419
|
+
for c in colors[:50]:
|
|
420
|
+
name = c.get("name", "").lstrip("-").replace("-", "_")
|
|
421
|
+
val = c.get("value", "")
|
|
422
|
+
if name and val:
|
|
423
|
+
safe_name = re.sub(r"[^a-zA-Z0-9_]", "_", name)
|
|
424
|
+
color_entries.append(f" '{safe_name}': '{val}',")
|
|
425
|
+
|
|
426
|
+
spacing_entries = []
|
|
427
|
+
for s in spacing[:30]:
|
|
428
|
+
name = s.get("name", "").lstrip("-").replace("-", "_")
|
|
429
|
+
val = s.get("value", "")
|
|
430
|
+
if name and val:
|
|
431
|
+
safe_name = re.sub(r"[^a-zA-Z0-9_]", "_", name)
|
|
432
|
+
spacing_entries.append(f" '{safe_name}': '{val}',")
|
|
433
|
+
|
|
434
|
+
colors_block = "\n".join(color_entries) if color_entries else " // No CSS color tokens detected"
|
|
435
|
+
spacing_block = "\n".join(spacing_entries) if spacing_entries else " // No CSS spacing tokens detected"
|
|
436
|
+
|
|
437
|
+
config_content = f"""/** @type {{import('tailwindcss').Config}} */
|
|
438
|
+
module.exports = {{
|
|
439
|
+
content: [
|
|
440
|
+
'./src/**/*.{{js,ts,jsx,tsx,mdx}}',
|
|
441
|
+
'./app/**/*.{{js,ts,jsx,tsx,mdx}}',
|
|
442
|
+
'./components/**/*.{{js,ts,jsx,tsx,mdx}}',
|
|
443
|
+
'./pages/**/*.{{js,ts,jsx,tsx,mdx}}',
|
|
444
|
+
],
|
|
445
|
+
theme: {{
|
|
446
|
+
extend: {{
|
|
447
|
+
colors: {{
|
|
448
|
+
{colors_block}
|
|
449
|
+
}},
|
|
450
|
+
spacing: {{
|
|
451
|
+
{spacing_block}
|
|
452
|
+
}},
|
|
453
|
+
}},
|
|
454
|
+
}},
|
|
455
|
+
plugins: [],
|
|
456
|
+
}};
|
|
457
|
+
"""
|
|
458
|
+
|
|
459
|
+
try:
|
|
460
|
+
out.parent.mkdir(parents=True, exist_ok=True)
|
|
461
|
+
out.write_text(config_content)
|
|
462
|
+
except Exception as e:
|
|
463
|
+
return {"tool": "design.generate_tailwind", "error": f"Failed to write: {e}"}
|
|
464
|
+
|
|
465
|
+
return {
|
|
466
|
+
"tool": "design.generate_tailwind",
|
|
467
|
+
"status": "ok",
|
|
468
|
+
"config_path": str(out),
|
|
469
|
+
"colors_count": len(color_entries),
|
|
470
|
+
"spacing_values": len(spacing_entries),
|
|
471
|
+
"breakpoints": [],
|
|
472
|
+
"generated": True,
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
|
|
476
|
+
# ---------------------------------------------------------------------------
|
|
477
|
+
# 22. design_validate_responsive
|
|
478
|
+
# ---------------------------------------------------------------------------
|
|
479
|
+
|
|
480
|
+
_VIEWPORT_META_RE = re.compile(r'<meta[^>]*name=["\']viewport["\'][^>]*>', re.IGNORECASE)
|
|
481
|
+
_RESPONSIVE_UNITS_RE = re.compile(r"(?:vw|vh|vmin|vmax|%|rem|em|clamp|min\(|max\()")
|
|
482
|
+
|
|
483
|
+
|
|
484
|
+
def design_validate_responsive(
|
|
485
|
+
project_path: str,
|
|
486
|
+
check_types: Optional[List[str]] = None,
|
|
487
|
+
) -> Dict[str, Any]:
|
|
488
|
+
"""Validate responsive design patterns via static analysis (Playwright optional)."""
|
|
489
|
+
root = Path(project_path)
|
|
490
|
+
if not root.is_dir():
|
|
491
|
+
return {"tool": "design.validate_responsive", "error": f"Directory not found: {root}"}
|
|
492
|
+
|
|
493
|
+
issues: List[Dict[str, str]] = []
|
|
494
|
+
breakpoints_found: List[str] = []
|
|
495
|
+
viewport_meta = False
|
|
496
|
+
responsive_units_count = 0
|
|
497
|
+
|
|
498
|
+
# Scan HTML files for viewport meta
|
|
499
|
+
html_files = _find_files(root, [".html", ".htm"])
|
|
500
|
+
for hf in html_files:
|
|
501
|
+
text = _read_text(hf)
|
|
502
|
+
if _VIEWPORT_META_RE.search(text):
|
|
503
|
+
viewport_meta = True
|
|
504
|
+
break
|
|
505
|
+
|
|
506
|
+
# Also check Next.js layout files
|
|
507
|
+
if not viewport_meta:
|
|
508
|
+
for layout_name in ("layout.tsx", "layout.jsx", "layout.js", "_document.tsx", "_document.jsx", "_app.tsx"):
|
|
509
|
+
candidates = list(root.rglob(layout_name))
|
|
510
|
+
for c in candidates[:5]:
|
|
511
|
+
text = _read_text(c)
|
|
512
|
+
if "viewport" in text.lower():
|
|
513
|
+
viewport_meta = True
|
|
514
|
+
break
|
|
515
|
+
if viewport_meta:
|
|
516
|
+
break
|
|
517
|
+
|
|
518
|
+
if not viewport_meta:
|
|
519
|
+
issues.append({"severity": "warning", "message": "No viewport meta tag detected", "fix": "Add <meta name='viewport' content='width=device-width, initial-scale=1'>"})
|
|
520
|
+
|
|
521
|
+
# Scan CSS for media queries and responsive patterns
|
|
522
|
+
css_files = _find_files(root, [".css", ".scss", ".sass"])
|
|
523
|
+
for cf in css_files:
|
|
524
|
+
text = _read_text(cf)
|
|
525
|
+
for bp_val in _MEDIA_QUERY_RE.findall(text):
|
|
526
|
+
bp_val = bp_val.strip()
|
|
527
|
+
if bp_val not in breakpoints_found:
|
|
528
|
+
breakpoints_found.append(bp_val)
|
|
529
|
+
responsive_units_count += len(_RESPONSIVE_UNITS_RE.findall(text))
|
|
530
|
+
|
|
531
|
+
# Check for mobile-first patterns (min-width preferred over max-width)
|
|
532
|
+
min_width_count = 0
|
|
533
|
+
max_width_count = 0
|
|
534
|
+
for cf in css_files:
|
|
535
|
+
text = _read_text(cf)
|
|
536
|
+
min_width_count += len(re.findall(r"min-width\s*:", text))
|
|
537
|
+
max_width_count += len(re.findall(r"max-width\s*:", text))
|
|
538
|
+
|
|
539
|
+
if max_width_count > min_width_count * 2 and max_width_count > 3:
|
|
540
|
+
issues.append({
|
|
541
|
+
"severity": "info",
|
|
542
|
+
"message": f"Desktop-first pattern detected ({max_width_count} max-width vs {min_width_count} min-width)",
|
|
543
|
+
"fix": "Consider mobile-first approach using min-width media queries",
|
|
544
|
+
})
|
|
545
|
+
|
|
546
|
+
if not breakpoints_found and not any(
|
|
547
|
+
(root / n).exists() for n in ("tailwind.config.js", "tailwind.config.ts", "tailwind.config.mjs")
|
|
548
|
+
):
|
|
549
|
+
issues.append({
|
|
550
|
+
"severity": "warning",
|
|
551
|
+
"message": "No CSS breakpoints or Tailwind config detected",
|
|
552
|
+
"fix": "Add responsive breakpoints via media queries or a CSS framework",
|
|
553
|
+
})
|
|
554
|
+
|
|
555
|
+
# Check for fixed-width containers
|
|
556
|
+
for cf in css_files:
|
|
557
|
+
text = _read_text(cf)
|
|
558
|
+
fixed_widths = re.findall(r"width\s*:\s*(\d{4,}px)", text)
|
|
559
|
+
for fw in fixed_widths:
|
|
560
|
+
issues.append({
|
|
561
|
+
"severity": "warning",
|
|
562
|
+
"message": f"Fixed width {fw} in {cf.name} may cause horizontal scroll on mobile",
|
|
563
|
+
"fix": "Use max-width or responsive units instead",
|
|
564
|
+
})
|
|
565
|
+
|
|
566
|
+
return {
|
|
567
|
+
"tool": "design.validate_responsive",
|
|
568
|
+
"status": "ok",
|
|
569
|
+
"breakpoints_found": breakpoints_found,
|
|
570
|
+
"responsive_issues": issues,
|
|
571
|
+
"viewport_meta": viewport_meta,
|
|
572
|
+
"responsive_units_count": responsive_units_count,
|
|
573
|
+
"mobile_first": min_width_count >= max_width_count,
|
|
574
|
+
}
|
|
575
|
+
|
|
576
|
+
|
|
577
|
+
# ---------------------------------------------------------------------------
|
|
578
|
+
# 23. design_component_library
|
|
579
|
+
# ---------------------------------------------------------------------------
|
|
580
|
+
|
|
581
|
+
def design_component_library(
|
|
582
|
+
project_path: str,
|
|
583
|
+
output_format: str = "json",
|
|
584
|
+
) -> Dict[str, Any]:
|
|
585
|
+
"""Scan for React/Vue/Svelte components and build a catalog."""
|
|
586
|
+
root = Path(project_path)
|
|
587
|
+
if not root.is_dir():
|
|
588
|
+
return {"tool": "design.component_library", "error": f"Directory not found: {root}"}
|
|
589
|
+
|
|
590
|
+
components: List[Dict[str, Any]] = []
|
|
591
|
+
|
|
592
|
+
# React / TSX / JSX
|
|
593
|
+
for f in _find_files(root, [".tsx", ".jsx"]):
|
|
594
|
+
text = _read_text(f)
|
|
595
|
+
info = _scan_react_component(f, text)
|
|
596
|
+
if info:
|
|
597
|
+
components.append(info)
|
|
598
|
+
|
|
599
|
+
# Vue
|
|
600
|
+
for f in _find_files(root, [".vue"]):
|
|
601
|
+
text = _read_text(f)
|
|
602
|
+
info = _scan_vue_component(f, text)
|
|
603
|
+
if info:
|
|
604
|
+
components.append(info)
|
|
605
|
+
|
|
606
|
+
# Svelte
|
|
607
|
+
for f in _find_files(root, [".svelte"]):
|
|
608
|
+
text = _read_text(f)
|
|
609
|
+
info = _scan_svelte_component(f, text)
|
|
610
|
+
if info:
|
|
611
|
+
components.append(info)
|
|
612
|
+
|
|
613
|
+
# Sort by name
|
|
614
|
+
components.sort(key=lambda c: c["name"])
|
|
615
|
+
|
|
616
|
+
result: Dict[str, Any] = {
|
|
617
|
+
"tool": "design.component_library",
|
|
618
|
+
"status": "ok",
|
|
619
|
+
"components": components,
|
|
620
|
+
"total_count": len(components),
|
|
621
|
+
}
|
|
622
|
+
|
|
623
|
+
if output_format == "markdown":
|
|
624
|
+
lines = [f"# Component Library ({len(components)} components)\n"]
|
|
625
|
+
for c in components:
|
|
626
|
+
lines.append(f"## {c['name']}")
|
|
627
|
+
lines.append(f"- **Path**: `{c['path']}`")
|
|
628
|
+
lines.append(f"- **Framework**: {c['framework']}")
|
|
629
|
+
if c.get("props"):
|
|
630
|
+
lines.append(f"- **Props**: {', '.join(c['props'][:10])}")
|
|
631
|
+
if c.get("exports"):
|
|
632
|
+
lines.append(f"- **Exports**: {', '.join(c['exports'][:10])}")
|
|
633
|
+
lines.append("")
|
|
634
|
+
result["markdown"] = "\n".join(lines)
|
|
635
|
+
|
|
636
|
+
return result
|
|
637
|
+
|
|
638
|
+
|
|
639
|
+
# ---------------------------------------------------------------------------
|
|
640
|
+
# 24. story_generate
|
|
641
|
+
# ---------------------------------------------------------------------------
|
|
642
|
+
|
|
643
|
+
def story_generate(
|
|
644
|
+
component_path: str,
|
|
645
|
+
story_name: Optional[str] = None,
|
|
646
|
+
variants: Optional[List[str]] = None,
|
|
647
|
+
) -> Dict[str, Any]:
|
|
648
|
+
"""Generate a .stories.tsx file for a component (no Storybook required)."""
|
|
649
|
+
comp = Path(component_path)
|
|
650
|
+
if not comp.exists():
|
|
651
|
+
return {"tool": "story.generate", "error": f"Component file not found: {comp}"}
|
|
652
|
+
|
|
653
|
+
text = _read_text(comp)
|
|
654
|
+
info = _scan_react_component(comp, text)
|
|
655
|
+
if not info:
|
|
656
|
+
# Try to use filename as component name
|
|
657
|
+
info = {"name": comp.stem, "props": [], "exports": [comp.stem]}
|
|
658
|
+
|
|
659
|
+
comp_name = info["name"]
|
|
660
|
+
name = story_name or comp_name
|
|
661
|
+
variant_list = variants or ["Default", "WithChildren"]
|
|
662
|
+
|
|
663
|
+
# Determine import path (relative from story file location)
|
|
664
|
+
story_path = comp.with_suffix(".stories.tsx")
|
|
665
|
+
import_name = f"./{comp.stem}"
|
|
666
|
+
|
|
667
|
+
# Build story content
|
|
668
|
+
stories = []
|
|
669
|
+
for v in variant_list:
|
|
670
|
+
variant_fn = v.replace(" ", "")
|
|
671
|
+
if v.lower() == "default":
|
|
672
|
+
stories.append(f"""
|
|
673
|
+
export const {variant_fn}: Story = {{
|
|
674
|
+
args: {{}},
|
|
675
|
+
}};""")
|
|
676
|
+
elif "children" in v.lower() or v.lower() == "withchildren":
|
|
677
|
+
stories.append(f"""
|
|
678
|
+
export const {variant_fn}: Story = {{
|
|
679
|
+
args: {{
|
|
680
|
+
children: 'Sample content',
|
|
681
|
+
}},
|
|
682
|
+
}};""")
|
|
683
|
+
else:
|
|
684
|
+
stories.append(f"""
|
|
685
|
+
export const {variant_fn}: Story = {{
|
|
686
|
+
args: {{}},
|
|
687
|
+
}};""")
|
|
688
|
+
|
|
689
|
+
content = f"""import type {{ Meta, StoryObj }} from '@storybook/react';
|
|
690
|
+
import {comp_name} from '{import_name}';
|
|
691
|
+
|
|
692
|
+
const meta: Meta<typeof {comp_name}> = {{
|
|
693
|
+
title: '{name}',
|
|
694
|
+
component: {comp_name},
|
|
695
|
+
tags: ['autodocs'],
|
|
696
|
+
}};
|
|
697
|
+
|
|
698
|
+
export default meta;
|
|
699
|
+
type Story = StoryObj<typeof {comp_name}>;
|
|
700
|
+
{"".join(stories)}
|
|
701
|
+
"""
|
|
702
|
+
|
|
703
|
+
try:
|
|
704
|
+
story_path.write_text(content)
|
|
705
|
+
except Exception as e:
|
|
706
|
+
return {"tool": "story.generate", "error": f"Failed to write: {e}"}
|
|
707
|
+
|
|
708
|
+
return {
|
|
709
|
+
"tool": "story.generate",
|
|
710
|
+
"status": "ok",
|
|
711
|
+
"story_path": str(story_path),
|
|
712
|
+
"component_name": comp_name,
|
|
713
|
+
"variants_generated": variant_list,
|
|
714
|
+
}
|
|
715
|
+
|
|
716
|
+
|
|
717
|
+
# ---------------------------------------------------------------------------
|
|
718
|
+
# 25. story_visual_test
|
|
719
|
+
# ---------------------------------------------------------------------------
|
|
720
|
+
|
|
721
|
+
def story_visual_test(
|
|
722
|
+
url: str,
|
|
723
|
+
project_path: Optional[str] = None,
|
|
724
|
+
threshold: float = 0.05,
|
|
725
|
+
) -> Dict[str, Any]:
|
|
726
|
+
"""Take a screenshot with Playwright and compare against baseline."""
|
|
727
|
+
root = Path(project_path) if project_path else Path.cwd()
|
|
728
|
+
baselines_dir = root / ".delimit" / "visual-baselines"
|
|
729
|
+
|
|
730
|
+
if not _has_playwright():
|
|
731
|
+
return {
|
|
732
|
+
"tool": "story.visual_test",
|
|
733
|
+
"status": "no_playwright",
|
|
734
|
+
"message": "Playwright is not installed. Install with: pip install playwright && python -m playwright install chromium",
|
|
735
|
+
"screenshot_path": None,
|
|
736
|
+
"baseline_exists": False,
|
|
737
|
+
"diff_percent": None,
|
|
738
|
+
"next_steps_hint": "Install Playwright for visual regression testing, or use static accessibility checks instead.",
|
|
739
|
+
}
|
|
740
|
+
|
|
741
|
+
try:
|
|
742
|
+
from playwright.sync_api import sync_playwright
|
|
743
|
+
|
|
744
|
+
baselines_dir.mkdir(parents=True, exist_ok=True)
|
|
745
|
+
# Sanitize URL to filename
|
|
746
|
+
safe_name = re.sub(r"[^a-zA-Z0-9]", "_", url)[:100]
|
|
747
|
+
screenshot_path = baselines_dir / f"{safe_name}.png"
|
|
748
|
+
baseline_path = baselines_dir / f"{safe_name}.baseline.png"
|
|
749
|
+
|
|
750
|
+
with sync_playwright() as p:
|
|
751
|
+
browser = p.chromium.launch(headless=True)
|
|
752
|
+
page = browser.new_page(viewport={"width": 1280, "height": 720})
|
|
753
|
+
page.goto(url, wait_until="networkidle", timeout=15000)
|
|
754
|
+
page.screenshot(path=str(screenshot_path))
|
|
755
|
+
browser.close()
|
|
756
|
+
|
|
757
|
+
baseline_exists = baseline_path.exists()
|
|
758
|
+
diff_percent = None
|
|
759
|
+
|
|
760
|
+
if baseline_exists:
|
|
761
|
+
# Simple pixel comparison
|
|
762
|
+
try:
|
|
763
|
+
import hashlib
|
|
764
|
+
current_hash = hashlib.md5(screenshot_path.read_bytes()).hexdigest()
|
|
765
|
+
baseline_hash = hashlib.md5(baseline_path.read_bytes()).hexdigest()
|
|
766
|
+
diff_percent = 0.0 if current_hash == baseline_hash else None
|
|
767
|
+
if diff_percent is None:
|
|
768
|
+
# Rough byte-level diff
|
|
769
|
+
cur = screenshot_path.read_bytes()
|
|
770
|
+
base = baseline_path.read_bytes()
|
|
771
|
+
min_len = min(len(cur), len(base))
|
|
772
|
+
if min_len > 0:
|
|
773
|
+
diffs = sum(1 for i in range(0, min_len, 4) if cur[i:i+4] != base[i:i+4])
|
|
774
|
+
diff_percent = round(diffs / (min_len / 4) * 100, 2)
|
|
775
|
+
else:
|
|
776
|
+
diff_percent = 100.0
|
|
777
|
+
except Exception:
|
|
778
|
+
diff_percent = None
|
|
779
|
+
else:
|
|
780
|
+
# Save as baseline
|
|
781
|
+
import shutil as _shutil
|
|
782
|
+
_shutil.copy2(str(screenshot_path), str(baseline_path))
|
|
783
|
+
|
|
784
|
+
passed = diff_percent is not None and diff_percent <= (threshold * 100)
|
|
785
|
+
|
|
786
|
+
return {
|
|
787
|
+
"tool": "story.visual_test",
|
|
788
|
+
"status": "ok",
|
|
789
|
+
"screenshot_path": str(screenshot_path),
|
|
790
|
+
"baseline_exists": baseline_exists,
|
|
791
|
+
"baseline_path": str(baseline_path),
|
|
792
|
+
"diff_percent": diff_percent,
|
|
793
|
+
"threshold_percent": threshold * 100,
|
|
794
|
+
"passed": passed if baseline_exists else None,
|
|
795
|
+
}
|
|
796
|
+
|
|
797
|
+
except Exception as e:
|
|
798
|
+
return {"tool": "story.visual_test", "error": str(e)}
|
|
799
|
+
|
|
800
|
+
|
|
801
|
+
# ---------------------------------------------------------------------------
|
|
802
|
+
# 26. story_accessibility
|
|
803
|
+
# ---------------------------------------------------------------------------
|
|
804
|
+
|
|
805
|
+
_IMG_NO_ALT_RE = re.compile(r"<img(?![^>]*alt=)[^>]*>", re.IGNORECASE)
|
|
806
|
+
_INPUT_NO_LABEL_RE = re.compile(r"<input(?![^>]*(?:aria-label|aria-labelledby|id=)[^>]*>)[^>]*>", re.IGNORECASE)
|
|
807
|
+
_BUTTON_EMPTY_RE = re.compile(r"<button[^>]*>\s*</button>", re.IGNORECASE)
|
|
808
|
+
_A_NO_HREF_RE = re.compile(r"<a(?![^>]*href=)[^>]*>", re.IGNORECASE)
|
|
809
|
+
_HEADING_SKIP_RE = re.compile(r"<h([1-6])")
|
|
810
|
+
_ARIA_HIDDEN_FOCUSABLE_RE = re.compile(r'aria-hidden=["\']true["\'][^>]*(?:tabindex=["\']0["\']|<button|<a\s)', re.IGNORECASE)
|
|
811
|
+
|
|
812
|
+
|
|
813
|
+
def story_accessibility(
|
|
814
|
+
project_path: str,
|
|
815
|
+
standards: str = "WCAG2AA",
|
|
816
|
+
) -> Dict[str, Any]:
|
|
817
|
+
"""Run accessibility checks by scanning HTML/JSX/TSX for common issues."""
|
|
818
|
+
root = Path(project_path)
|
|
819
|
+
if not root.is_dir():
|
|
820
|
+
return {"tool": "story.accessibility", "error": f"Directory not found: {root}"}
|
|
821
|
+
|
|
822
|
+
issues: List[Dict[str, Any]] = []
|
|
823
|
+
files_checked = 0
|
|
824
|
+
|
|
825
|
+
scan_files = _find_files(root, [".html", ".htm", ".tsx", ".jsx", ".vue", ".svelte"])
|
|
826
|
+
|
|
827
|
+
for f in scan_files:
|
|
828
|
+
text = _read_text(f)
|
|
829
|
+
files_checked += 1
|
|
830
|
+
rel = str(f.relative_to(root)) if f.is_relative_to(root) else str(f)
|
|
831
|
+
|
|
832
|
+
# Missing alt on images
|
|
833
|
+
for m in _IMG_NO_ALT_RE.finditer(text):
|
|
834
|
+
issues.append({
|
|
835
|
+
"rule": "img-alt",
|
|
836
|
+
"severity": "error",
|
|
837
|
+
"message": "Image missing alt attribute",
|
|
838
|
+
"file": rel,
|
|
839
|
+
"standard": "WCAG2A",
|
|
840
|
+
"snippet": m.group()[:120],
|
|
841
|
+
})
|
|
842
|
+
|
|
843
|
+
# Inputs without labels
|
|
844
|
+
for m in _INPUT_NO_LABEL_RE.finditer(text):
|
|
845
|
+
snippet = m.group()
|
|
846
|
+
# Skip hidden inputs
|
|
847
|
+
if 'type="hidden"' in snippet or "type='hidden'" in snippet:
|
|
848
|
+
continue
|
|
849
|
+
issues.append({
|
|
850
|
+
"rule": "input-label",
|
|
851
|
+
"severity": "error",
|
|
852
|
+
"message": "Input missing associated label or aria-label",
|
|
853
|
+
"file": rel,
|
|
854
|
+
"standard": "WCAG2A",
|
|
855
|
+
"snippet": snippet[:120],
|
|
856
|
+
})
|
|
857
|
+
|
|
858
|
+
# Empty buttons
|
|
859
|
+
for m in _BUTTON_EMPTY_RE.finditer(text):
|
|
860
|
+
issues.append({
|
|
861
|
+
"rule": "button-content",
|
|
862
|
+
"severity": "error",
|
|
863
|
+
"message": "Button has no text content or aria-label",
|
|
864
|
+
"file": rel,
|
|
865
|
+
"standard": "WCAG2A",
|
|
866
|
+
"snippet": m.group()[:120],
|
|
867
|
+
})
|
|
868
|
+
|
|
869
|
+
# Links without href
|
|
870
|
+
for m in _A_NO_HREF_RE.finditer(text):
|
|
871
|
+
issues.append({
|
|
872
|
+
"rule": "link-href",
|
|
873
|
+
"severity": "warning",
|
|
874
|
+
"message": "Anchor element missing href attribute",
|
|
875
|
+
"file": rel,
|
|
876
|
+
"standard": "WCAG2A",
|
|
877
|
+
"snippet": m.group()[:120],
|
|
878
|
+
})
|
|
879
|
+
|
|
880
|
+
# Heading level skips (e.g., h1 -> h3 without h2)
|
|
881
|
+
headings = [int(h) for h in _HEADING_SKIP_RE.findall(text)]
|
|
882
|
+
for i in range(1, len(headings)):
|
|
883
|
+
if headings[i] > headings[i - 1] + 1:
|
|
884
|
+
issues.append({
|
|
885
|
+
"rule": "heading-order",
|
|
886
|
+
"severity": "warning",
|
|
887
|
+
"message": f"Heading level skipped: h{headings[i-1]} to h{headings[i]}",
|
|
888
|
+
"file": rel,
|
|
889
|
+
"standard": "WCAG2A",
|
|
890
|
+
})
|
|
891
|
+
|
|
892
|
+
# aria-hidden on focusable elements
|
|
893
|
+
for m in _ARIA_HIDDEN_FOCUSABLE_RE.finditer(text):
|
|
894
|
+
issues.append({
|
|
895
|
+
"rule": "aria-hidden-focusable",
|
|
896
|
+
"severity": "error",
|
|
897
|
+
"message": "Focusable element has aria-hidden='true'",
|
|
898
|
+
"file": rel,
|
|
899
|
+
"standard": "WCAG2AA",
|
|
900
|
+
"snippet": m.group()[:120],
|
|
901
|
+
})
|
|
902
|
+
|
|
903
|
+
# Filter by standard level if needed
|
|
904
|
+
standard_levels = {"WCAG2A": 1, "WCAG2AA": 2, "WCAG2AAA": 3}
|
|
905
|
+
requested_level = standard_levels.get(standards, 2)
|
|
906
|
+
filtered = [i for i in issues if standard_levels.get(i.get("standard", "WCAG2A"), 1) <= requested_level]
|
|
907
|
+
|
|
908
|
+
errors = [i for i in filtered if i["severity"] == "error"]
|
|
909
|
+
warnings = [i for i in filtered if i["severity"] == "warning"]
|
|
910
|
+
|
|
911
|
+
return {
|
|
912
|
+
"tool": "story.accessibility",
|
|
913
|
+
"status": "ok",
|
|
914
|
+
"standard": standards,
|
|
915
|
+
"issues": filtered,
|
|
916
|
+
"passed_count": files_checked - len(set(i["file"] for i in errors)),
|
|
917
|
+
"failed_count": len(set(i["file"] for i in errors)),
|
|
918
|
+
"error_count": len(errors),
|
|
919
|
+
"warning_count": len(warnings),
|
|
920
|
+
"files_checked": files_checked,
|
|
921
|
+
}
|