@voodocs/cli 0.4.2 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +312 -0
- package/lib/cli/__init__.py +53 -0
- package/lib/cli/benchmark.py +311 -0
- package/lib/cli/fix.py +244 -0
- package/lib/cli/generate.py +310 -0
- package/lib/cli/test_cli.py +215 -0
- package/lib/cli/validate.py +364 -0
- package/lib/darkarts/__init__.py +11 -5
- package/lib/darkarts/annotations/__init__.py +11 -3
- package/lib/darkarts/annotations/darkarts_parser.py +1 -1
- package/lib/darkarts/annotations/types.py +16 -3
- package/lib/darkarts/cli_darkarts.py +1 -1
- package/lib/darkarts/context/__init__.py +11 -3
- package/lib/darkarts/context/ai_integrations.py +7 -21
- package/lib/darkarts/context/commands.py +1 -1
- package/lib/darkarts/context/diagram.py +8 -22
- package/lib/darkarts/context/models.py +7 -22
- package/lib/darkarts/context/module_utils.py +1 -1
- package/lib/darkarts/context/ui.py +1 -1
- package/lib/darkarts/context/validation.py +1 -1
- package/lib/darkarts/context/yaml_utils.py +8 -23
- package/lib/darkarts/core/__init__.py +12 -2
- package/lib/darkarts/core/interface.py +16 -2
- package/lib/darkarts/core/loader.py +17 -2
- package/lib/darkarts/core/plugin.py +16 -3
- package/lib/darkarts/core/registry.py +17 -2
- package/lib/darkarts/exceptions.py +17 -3
- package/lib/darkarts/plugins/voodocs/__init__.py +12 -2
- package/lib/darkarts/plugins/voodocs/ai_native_plugin.py +16 -5
- package/lib/darkarts/plugins/voodocs/annotation_validator.py +16 -3
- package/lib/darkarts/plugins/voodocs/api_spec_generator.py +16 -3
- package/lib/darkarts/plugins/voodocs/documentation_generator.py +16 -3
- package/lib/darkarts/plugins/voodocs/html_exporter.py +16 -3
- package/lib/darkarts/plugins/voodocs/instruction_generator.py +1 -1
- package/lib/darkarts/plugins/voodocs/pdf_exporter.py +16 -3
- package/lib/darkarts/plugins/voodocs/test_generator.py +16 -3
- package/lib/darkarts/telemetry.py +16 -3
- package/lib/darkarts/validation/README.md +147 -0
- package/lib/darkarts/validation/__init__.py +91 -0
- package/lib/darkarts/validation/autofix.py +297 -0
- package/lib/darkarts/validation/benchmark.py +426 -0
- package/lib/darkarts/validation/benchmark_wrapper.py +22 -0
- package/lib/darkarts/validation/config.py +257 -0
- package/lib/darkarts/validation/performance.py +412 -0
- package/lib/darkarts/validation/performance_wrapper.py +37 -0
- package/lib/darkarts/validation/semantic.py +461 -0
- package/lib/darkarts/validation/semantic_wrapper.py +77 -0
- package/lib/darkarts/validation/test_validation.py +160 -0
- package/lib/darkarts/validation/types.py +97 -0
- package/lib/darkarts/validation/watch.py +239 -0
- package/package.json +19 -6
- package/voodocs_cli.py +28 -0
- package/cli.py +0 -1646
- package/lib/darkarts/cli.py +0 -128
|
@@ -0,0 +1,461 @@
|
|
|
1
|
+
"""@darkarts
|
|
2
|
+
⊢semantic-validator:validation.layer2
|
|
3
|
+
∂{ast,re,pathlib,typing,dataclasses}
|
|
4
|
+
⚠{python≥3.7,file:readable,annotation:parseable}
|
|
5
|
+
⊨{∀validate→ValidationResult,∀import→detected,∀dependency→compared,¬modify-files,accurate:ast-based}
|
|
6
|
+
🔒{read-only:files,¬write,¬network,¬exec}
|
|
7
|
+
⚡{O(n)|n=file-size,ast-parsing:linear}
|
|
8
|
+
|
|
9
|
+
Layer 2 Semantic Validation for @darkarts Annotations
|
|
10
|
+
|
|
11
|
+
Validates that annotations accurately reflect the actual code:
|
|
12
|
+
- Dependency validation (∂{} matches actual imports)
|
|
13
|
+
- Import extraction (AST-based, handles all import styles)
|
|
14
|
+
- Dependency parsing (extracts from annotation string)
|
|
15
|
+
- Diff reporting (missing, extra, mismatched dependencies)
|
|
16
|
+
- Batch validation (entire codebase at once)
|
|
17
|
+
- Auto-fix suggestions (how to correct mismatches)
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
import ast
|
|
21
|
+
import re
|
|
22
|
+
from pathlib import Path
|
|
23
|
+
from typing import Set, List, Dict, Optional, Tuple
|
|
24
|
+
from dataclasses import dataclass
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@dataclass
|
|
28
|
+
class ValidationResult:
|
|
29
|
+
"""Result of semantic validation."""
|
|
30
|
+
file_path: str
|
|
31
|
+
is_valid: bool
|
|
32
|
+
missing_deps: Set[str] # In code but not in annotation
|
|
33
|
+
extra_deps: Set[str] # In annotation but not in code
|
|
34
|
+
errors: List[str]
|
|
35
|
+
warnings: List[str]
|
|
36
|
+
suggestions: List[str]
|
|
37
|
+
|
|
38
|
+
def __str__(self) -> str:
|
|
39
|
+
"""Human-readable validation result."""
|
|
40
|
+
if self.is_valid:
|
|
41
|
+
return f"✅ {self.file_path}: Valid"
|
|
42
|
+
|
|
43
|
+
lines = [f"❌ {self.file_path}: Invalid"]
|
|
44
|
+
|
|
45
|
+
if self.missing_deps:
|
|
46
|
+
lines.append(f" Missing from ∂{{}}: {', '.join(sorted(self.missing_deps))}")
|
|
47
|
+
|
|
48
|
+
if self.extra_deps:
|
|
49
|
+
lines.append(f" Extra in ∂{{}}: {', '.join(sorted(self.extra_deps))}")
|
|
50
|
+
|
|
51
|
+
if self.errors:
|
|
52
|
+
for error in self.errors:
|
|
53
|
+
lines.append(f" ERROR: {error}")
|
|
54
|
+
|
|
55
|
+
if self.warnings:
|
|
56
|
+
for warning in self.warnings:
|
|
57
|
+
lines.append(f" WARNING: {warning}")
|
|
58
|
+
|
|
59
|
+
if self.suggestions:
|
|
60
|
+
lines.append(" Suggestions:")
|
|
61
|
+
for suggestion in self.suggestions:
|
|
62
|
+
lines.append(f" • {suggestion}")
|
|
63
|
+
|
|
64
|
+
return "\n".join(lines)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
class ImportExtractor(ast.NodeVisitor):
|
|
68
|
+
"""AST visitor to extract all imports from a Python file."""
|
|
69
|
+
|
|
70
|
+
def __init__(self, file_path: Optional[Path] = None):
|
|
71
|
+
self.imports: Set[str] = set()
|
|
72
|
+
self.from_imports: Dict[str, Set[str]] = {}
|
|
73
|
+
self.file_path = file_path
|
|
74
|
+
self.package_name = self._detect_package_name(file_path) if file_path else None
|
|
75
|
+
|
|
76
|
+
def _detect_package_name(self, file_path: Path) -> Optional[str]:
|
|
77
|
+
"""Detect the top-level package name from file path."""
|
|
78
|
+
# Look for common package indicators
|
|
79
|
+
parts = file_path.parts
|
|
80
|
+
|
|
81
|
+
# Check for lib/darkarts pattern
|
|
82
|
+
if 'lib' in parts:
|
|
83
|
+
lib_idx = parts.index('lib')
|
|
84
|
+
if lib_idx + 1 < len(parts):
|
|
85
|
+
return parts[lib_idx + 1]
|
|
86
|
+
|
|
87
|
+
# Check for src/package pattern
|
|
88
|
+
if 'src' in parts:
|
|
89
|
+
src_idx = parts.index('src')
|
|
90
|
+
if src_idx + 1 < len(parts):
|
|
91
|
+
return parts[src_idx + 1]
|
|
92
|
+
|
|
93
|
+
# Look for setup.py or pyproject.toml in parent directories
|
|
94
|
+
current = file_path.parent
|
|
95
|
+
while current != current.parent:
|
|
96
|
+
if (current / 'setup.py').exists() or (current / 'pyproject.toml').exists():
|
|
97
|
+
# The directory name is likely the package
|
|
98
|
+
return current.name
|
|
99
|
+
current = current.parent
|
|
100
|
+
|
|
101
|
+
return None
|
|
102
|
+
|
|
103
|
+
def visit_Import(self, node: ast.Import):
|
|
104
|
+
"""Handle 'import module' statements."""
|
|
105
|
+
for alias in node.names:
|
|
106
|
+
# Get the top-level module name
|
|
107
|
+
module = alias.name.split('.')[0]
|
|
108
|
+
self.imports.add(module)
|
|
109
|
+
self.generic_visit(node)
|
|
110
|
+
|
|
111
|
+
def visit_ImportFrom(self, node: ast.ImportFrom):
|
|
112
|
+
"""Handle 'from module import name' statements."""
|
|
113
|
+
if node.level > 0:
|
|
114
|
+
# Relative import (e.g., from ..parser.types import X)
|
|
115
|
+
# Convert to absolute import using package name
|
|
116
|
+
if self.package_name and node.module:
|
|
117
|
+
# Relative import with module (e.g., from ..parser.types)
|
|
118
|
+
module = node.module.split('.')[0]
|
|
119
|
+
self.imports.add(self.package_name)
|
|
120
|
+
|
|
121
|
+
# Also track the submodule
|
|
122
|
+
if module not in self.from_imports:
|
|
123
|
+
self.from_imports[module] = set()
|
|
124
|
+
for alias in node.names:
|
|
125
|
+
self.from_imports[module].add(alias.name)
|
|
126
|
+
elif self.package_name:
|
|
127
|
+
# Relative import without module (e.g., from . import X)
|
|
128
|
+
self.imports.add(self.package_name)
|
|
129
|
+
elif node.module:
|
|
130
|
+
# Absolute import
|
|
131
|
+
# Get the top-level module name
|
|
132
|
+
module = node.module.split('.')[0]
|
|
133
|
+
self.imports.add(module)
|
|
134
|
+
|
|
135
|
+
# Track what was imported from this module
|
|
136
|
+
if module not in self.from_imports:
|
|
137
|
+
self.from_imports[module] = set()
|
|
138
|
+
|
|
139
|
+
for alias in node.names:
|
|
140
|
+
self.from_imports[module].add(alias.name)
|
|
141
|
+
|
|
142
|
+
self.generic_visit(node)
|
|
143
|
+
|
|
144
|
+
def get_all_imports(self) -> Set[str]:
|
|
145
|
+
"""Get all unique top-level module names."""
|
|
146
|
+
return self.imports
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
def extract_imports(file_path: Path) -> Set[str]:
|
|
150
|
+
"""
|
|
151
|
+
Extract all imports from a Python file using AST parsing.
|
|
152
|
+
|
|
153
|
+
Handles both absolute and relative imports.
|
|
154
|
+
|
|
155
|
+
Args:
|
|
156
|
+
file_path: Path to the Python file
|
|
157
|
+
|
|
158
|
+
Returns:
|
|
159
|
+
Set of top-level module names imported in the file
|
|
160
|
+
|
|
161
|
+
Examples:
|
|
162
|
+
>>> extract_imports(Path("myfile.py"))
|
|
163
|
+
{'os', 'sys', 'pathlib', 'typing', 'darkarts'}
|
|
164
|
+
"""
|
|
165
|
+
try:
|
|
166
|
+
content = file_path.read_text(encoding='utf-8')
|
|
167
|
+
tree = ast.parse(content, filename=str(file_path))
|
|
168
|
+
|
|
169
|
+
extractor = ImportExtractor(file_path)
|
|
170
|
+
extractor.visit(tree)
|
|
171
|
+
|
|
172
|
+
imports = extractor.get_all_imports()
|
|
173
|
+
|
|
174
|
+
# Filter out standard library modules that are commonly omitted
|
|
175
|
+
# from annotations (optional - can be configured)
|
|
176
|
+
stdlib_to_keep = {
|
|
177
|
+
'typing', 'dataclasses', 'enum', 'abc', 'pathlib',
|
|
178
|
+
'datetime', 'json', 'yaml', 're', 'os', 'sys',
|
|
179
|
+
'time', 'uuid', 'collections', 'itertools', 'functools'
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
# Keep all non-stdlib and explicitly listed stdlib modules
|
|
183
|
+
filtered_imports = {
|
|
184
|
+
imp for imp in imports
|
|
185
|
+
if not _is_stdlib_module(imp) or imp in stdlib_to_keep
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
return filtered_imports
|
|
189
|
+
|
|
190
|
+
except SyntaxError as e:
|
|
191
|
+
raise ValueError(f"Syntax error in {file_path}: {e}")
|
|
192
|
+
except Exception as e:
|
|
193
|
+
raise ValueError(f"Error parsing {file_path}: {e}")
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
def _is_stdlib_module(module_name: str) -> bool:
|
|
197
|
+
"""
|
|
198
|
+
Check if a module is part of Python's standard library.
|
|
199
|
+
|
|
200
|
+
This is a heuristic - not 100% accurate but good enough.
|
|
201
|
+
"""
|
|
202
|
+
# Common stdlib modules
|
|
203
|
+
stdlib = {
|
|
204
|
+
'abc', 'argparse', 'ast', 'asyncio', 'base64', 'bisect',
|
|
205
|
+
'collections', 'copy', 'csv', 'datetime', 'decimal', 'enum',
|
|
206
|
+
'functools', 'glob', 'hashlib', 'heapq', 'html', 'http',
|
|
207
|
+
'io', 'itertools', 'json', 'logging', 'math', 'operator',
|
|
208
|
+
'os', 'pathlib', 'pickle', 'platform', 'queue', 'random',
|
|
209
|
+
're', 'shutil', 'socket', 'sqlite3', 'string', 'struct',
|
|
210
|
+
'subprocess', 'sys', 'tempfile', 'threading', 'time',
|
|
211
|
+
'traceback', 'typing', 'unittest', 'urllib', 'uuid',
|
|
212
|
+
'warnings', 'weakref', 'xml', 'zipfile'
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
return module_name in stdlib
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
def parse_dependencies(annotation: str) -> Set[str]:
|
|
219
|
+
"""
|
|
220
|
+
Parse the ∂{} section from a @darkarts annotation.
|
|
221
|
+
|
|
222
|
+
Args:
|
|
223
|
+
annotation: The full @darkarts annotation string
|
|
224
|
+
|
|
225
|
+
Returns:
|
|
226
|
+
Set of dependency module names
|
|
227
|
+
|
|
228
|
+
Examples:
|
|
229
|
+
>>> annotation = '∂{os,sys,pathlib,typing,darkarts.core}'
|
|
230
|
+
>>> parse_dependencies(annotation)
|
|
231
|
+
{'os', 'sys', 'pathlib', 'typing', 'darkarts'}
|
|
232
|
+
"""
|
|
233
|
+
# Find the ∂{...} section
|
|
234
|
+
pattern = r'∂\{([^}]*)\}'
|
|
235
|
+
match = re.search(pattern, annotation)
|
|
236
|
+
|
|
237
|
+
if not match:
|
|
238
|
+
return set()
|
|
239
|
+
|
|
240
|
+
deps_str = match.group(1)
|
|
241
|
+
|
|
242
|
+
# Handle empty dependencies
|
|
243
|
+
if not deps_str.strip():
|
|
244
|
+
return set()
|
|
245
|
+
|
|
246
|
+
# Split by comma and clean up
|
|
247
|
+
deps = set()
|
|
248
|
+
for dep in deps_str.split(','):
|
|
249
|
+
dep = dep.strip()
|
|
250
|
+
if dep:
|
|
251
|
+
# Extract top-level module name
|
|
252
|
+
# e.g., 'darkarts.core.plugin' -> 'darkarts'
|
|
253
|
+
top_level = dep.split('.')[0]
|
|
254
|
+
deps.add(top_level)
|
|
255
|
+
|
|
256
|
+
return deps
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
def extract_annotation(file_path: Path) -> Optional[str]:
|
|
260
|
+
"""
|
|
261
|
+
Extract the @darkarts annotation from a Python file.
|
|
262
|
+
|
|
263
|
+
Args:
|
|
264
|
+
file_path: Path to the Python file
|
|
265
|
+
|
|
266
|
+
Returns:
|
|
267
|
+
The annotation string, or None if not found
|
|
268
|
+
"""
|
|
269
|
+
try:
|
|
270
|
+
content = file_path.read_text(encoding='utf-8')
|
|
271
|
+
|
|
272
|
+
# Check if file starts with @darkarts annotation
|
|
273
|
+
if not content.startswith('"""@darkarts'):
|
|
274
|
+
return None
|
|
275
|
+
|
|
276
|
+
# Find the end of the docstring
|
|
277
|
+
end_idx = content.find('"""', 3)
|
|
278
|
+
if end_idx == -1:
|
|
279
|
+
return None
|
|
280
|
+
|
|
281
|
+
annotation = content[3:end_idx] # Skip opening """
|
|
282
|
+
return annotation
|
|
283
|
+
|
|
284
|
+
except Exception as e:
|
|
285
|
+
raise ValueError(f"Error reading {file_path}: {e}")
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
def validate_dependencies(file_path: Path, annotation: str) -> ValidationResult:
|
|
289
|
+
"""
|
|
290
|
+
Validate that the ∂{} section matches actual imports in the file.
|
|
291
|
+
|
|
292
|
+
Args:
|
|
293
|
+
file_path: Path to the Python file
|
|
294
|
+
annotation: The @darkarts annotation string
|
|
295
|
+
|
|
296
|
+
Returns:
|
|
297
|
+
ValidationResult with detailed comparison
|
|
298
|
+
|
|
299
|
+
Examples:
|
|
300
|
+
>>> result = validate_dependencies(Path("myfile.py"), annotation)
|
|
301
|
+
>>> if not result.is_valid:
|
|
302
|
+
... print(result)
|
|
303
|
+
"""
|
|
304
|
+
errors = []
|
|
305
|
+
warnings = []
|
|
306
|
+
suggestions = []
|
|
307
|
+
|
|
308
|
+
try:
|
|
309
|
+
# Extract actual imports from code
|
|
310
|
+
actual_imports = extract_imports(file_path)
|
|
311
|
+
except ValueError as e:
|
|
312
|
+
return ValidationResult(
|
|
313
|
+
file_path=str(file_path),
|
|
314
|
+
is_valid=False,
|
|
315
|
+
missing_deps=set(),
|
|
316
|
+
extra_deps=set(),
|
|
317
|
+
errors=[str(e)],
|
|
318
|
+
warnings=[],
|
|
319
|
+
suggestions=[]
|
|
320
|
+
)
|
|
321
|
+
|
|
322
|
+
# Parse declared dependencies from annotation
|
|
323
|
+
try:
|
|
324
|
+
declared_deps = parse_dependencies(annotation)
|
|
325
|
+
except Exception as e:
|
|
326
|
+
return ValidationResult(
|
|
327
|
+
file_path=str(file_path),
|
|
328
|
+
is_valid=False,
|
|
329
|
+
missing_deps=set(),
|
|
330
|
+
extra_deps=set(),
|
|
331
|
+
errors=[f"Failed to parse dependencies: {e}"],
|
|
332
|
+
warnings=[],
|
|
333
|
+
suggestions=[]
|
|
334
|
+
)
|
|
335
|
+
|
|
336
|
+
# Compare
|
|
337
|
+
missing_deps = actual_imports - declared_deps
|
|
338
|
+
extra_deps = declared_deps - actual_imports
|
|
339
|
+
|
|
340
|
+
# Generate suggestions
|
|
341
|
+
if missing_deps:
|
|
342
|
+
suggestions.append(
|
|
343
|
+
f"Add to ∂{{}}: {', '.join(sorted(missing_deps))}"
|
|
344
|
+
)
|
|
345
|
+
|
|
346
|
+
if extra_deps:
|
|
347
|
+
suggestions.append(
|
|
348
|
+
f"Remove from ∂{{}}: {', '.join(sorted(extra_deps))}"
|
|
349
|
+
)
|
|
350
|
+
warnings.append(
|
|
351
|
+
f"Declared dependencies not found in imports: {', '.join(sorted(extra_deps))}"
|
|
352
|
+
)
|
|
353
|
+
|
|
354
|
+
# Check for common issues
|
|
355
|
+
if 'darkarts' in actual_imports and 'darkarts' not in declared_deps:
|
|
356
|
+
warnings.append(
|
|
357
|
+
"File imports from 'darkarts' package but doesn't declare it"
|
|
358
|
+
)
|
|
359
|
+
|
|
360
|
+
is_valid = len(missing_deps) == 0 and len(extra_deps) == 0 and len(errors) == 0
|
|
361
|
+
|
|
362
|
+
return ValidationResult(
|
|
363
|
+
file_path=str(file_path),
|
|
364
|
+
is_valid=is_valid,
|
|
365
|
+
missing_deps=missing_deps,
|
|
366
|
+
extra_deps=extra_deps,
|
|
367
|
+
errors=errors,
|
|
368
|
+
warnings=warnings,
|
|
369
|
+
suggestions=suggestions
|
|
370
|
+
)
|
|
371
|
+
|
|
372
|
+
|
|
373
|
+
def validate_file(file_path: Path) -> ValidationResult:
|
|
374
|
+
"""
|
|
375
|
+
Validate a single file's @darkarts annotation.
|
|
376
|
+
|
|
377
|
+
Args:
|
|
378
|
+
file_path: Path to the Python file
|
|
379
|
+
|
|
380
|
+
Returns:
|
|
381
|
+
ValidationResult
|
|
382
|
+
"""
|
|
383
|
+
# Extract annotation
|
|
384
|
+
annotation = extract_annotation(file_path)
|
|
385
|
+
|
|
386
|
+
if annotation is None:
|
|
387
|
+
return ValidationResult(
|
|
388
|
+
file_path=str(file_path),
|
|
389
|
+
is_valid=False,
|
|
390
|
+
missing_deps=set(),
|
|
391
|
+
extra_deps=set(),
|
|
392
|
+
errors=["No @darkarts annotation found"],
|
|
393
|
+
warnings=[],
|
|
394
|
+
suggestions=["Add @darkarts annotation to the file"]
|
|
395
|
+
)
|
|
396
|
+
|
|
397
|
+
# Validate dependencies
|
|
398
|
+
return validate_dependencies(file_path, annotation)
|
|
399
|
+
|
|
400
|
+
|
|
401
|
+
def validate_directory(directory: Path, recursive: bool = True) -> List[ValidationResult]:
|
|
402
|
+
"""
|
|
403
|
+
Validate all Python files in a directory.
|
|
404
|
+
|
|
405
|
+
Args:
|
|
406
|
+
directory: Path to the directory
|
|
407
|
+
recursive: Whether to search recursively
|
|
408
|
+
|
|
409
|
+
Returns:
|
|
410
|
+
List of ValidationResult objects
|
|
411
|
+
"""
|
|
412
|
+
results = []
|
|
413
|
+
|
|
414
|
+
pattern = "**/*.py" if recursive else "*.py"
|
|
415
|
+
|
|
416
|
+
for py_file in directory.glob(pattern):
|
|
417
|
+
if py_file.name.startswith('.'):
|
|
418
|
+
continue # Skip hidden files
|
|
419
|
+
|
|
420
|
+
result = validate_file(py_file)
|
|
421
|
+
results.append(result)
|
|
422
|
+
|
|
423
|
+
return results
|
|
424
|
+
|
|
425
|
+
|
|
426
|
+
def generate_fix_script(result: ValidationResult) -> str:
|
|
427
|
+
"""
|
|
428
|
+
Generate a script to automatically fix dependency mismatches.
|
|
429
|
+
|
|
430
|
+
Args:
|
|
431
|
+
result: ValidationResult with mismatches
|
|
432
|
+
|
|
433
|
+
Returns:
|
|
434
|
+
Python code to fix the annotation
|
|
435
|
+
"""
|
|
436
|
+
if result.is_valid:
|
|
437
|
+
return "# No fixes needed"
|
|
438
|
+
|
|
439
|
+
script_lines = [
|
|
440
|
+
f"# Fix for {result.file_path}",
|
|
441
|
+
"",
|
|
442
|
+
"# Current issues:"
|
|
443
|
+
]
|
|
444
|
+
|
|
445
|
+
if result.missing_deps:
|
|
446
|
+
script_lines.append(f"# Missing: {', '.join(sorted(result.missing_deps))}")
|
|
447
|
+
|
|
448
|
+
if result.extra_deps:
|
|
449
|
+
script_lines.append(f"# Extra: {', '.join(sorted(result.extra_deps))}")
|
|
450
|
+
|
|
451
|
+
script_lines.extend([
|
|
452
|
+
"",
|
|
453
|
+
"# To fix, update the ∂{} section to:",
|
|
454
|
+
f"# ∂{{{', '.join(sorted(result.missing_deps | (parse_dependencies(extract_annotation(Path(result.file_path)) or '') - result.extra_deps)))}}}",
|
|
455
|
+
])
|
|
456
|
+
|
|
457
|
+
return "\n".join(script_lines)
|
|
458
|
+
|
|
459
|
+
|
|
460
|
+
|
|
461
|
+
# CLI interface removed - use darkarts.validation API or voodocs CLI instead
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
"""@darkarts
|
|
2
|
+
⊢validation:semantic.wrapper
|
|
3
|
+
∂{pathlib,typing}
|
|
4
|
+
⚠{python≥3.7}
|
|
5
|
+
⊨{∀method→delegates,¬state}
|
|
6
|
+
🔒{read-only}
|
|
7
|
+
⚡{O(1):creation,O(n):directory|n=files}
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
"""
|
|
11
|
+
Wrapper class for semantic validation functions.
|
|
12
|
+
Provides object-oriented interface to functional validation code.
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
from typing import List
|
|
17
|
+
|
|
18
|
+
from . import semantic
|
|
19
|
+
from .types import ValidationResult
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class SemanticValidator:
|
|
23
|
+
"""
|
|
24
|
+
Semantic validator for @darkarts annotations.
|
|
25
|
+
|
|
26
|
+
Validates that dependencies in ∂{} match actual imports.
|
|
27
|
+
|
|
28
|
+
Usage:
|
|
29
|
+
validator = SemanticValidator()
|
|
30
|
+
result = validator.validate_file("myfile.py")
|
|
31
|
+
if not result.is_valid:
|
|
32
|
+
print(f"Validation failed: {result.errors}")
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
def validate_file(self, file_path: str | Path) -> ValidationResult:
|
|
36
|
+
"""
|
|
37
|
+
Validate a single file.
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
file_path: Path to Python file to validate
|
|
41
|
+
|
|
42
|
+
Returns:
|
|
43
|
+
ValidationResult with validation details
|
|
44
|
+
"""
|
|
45
|
+
return semantic.validate_file(Path(file_path))
|
|
46
|
+
|
|
47
|
+
def validate_directory(self, directory: str | Path, recursive: bool = True) -> List[ValidationResult]:
|
|
48
|
+
"""
|
|
49
|
+
Validate all Python files in a directory.
|
|
50
|
+
|
|
51
|
+
Args:
|
|
52
|
+
directory: Path to directory to validate
|
|
53
|
+
recursive: Whether to recurse into subdirectories
|
|
54
|
+
|
|
55
|
+
Returns:
|
|
56
|
+
List of ValidationResult for each file
|
|
57
|
+
"""
|
|
58
|
+
return semantic.validate_directory(Path(directory), recursive=recursive)
|
|
59
|
+
|
|
60
|
+
def validate_path(self, path: str | Path, recursive: bool = False) -> List[ValidationResult]:
|
|
61
|
+
"""
|
|
62
|
+
Validate a file or directory.
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
path: Path to file or directory
|
|
66
|
+
recursive: Whether to recurse into subdirectories (for directories)
|
|
67
|
+
|
|
68
|
+
Returns:
|
|
69
|
+
List of ValidationResult (single item for files)
|
|
70
|
+
"""
|
|
71
|
+
path = Path(path)
|
|
72
|
+
if path.is_file():
|
|
73
|
+
return [self.validate_file(path)]
|
|
74
|
+
elif path.is_dir():
|
|
75
|
+
return self.validate_directory(path, recursive=recursive)
|
|
76
|
+
else:
|
|
77
|
+
raise ValueError(f"Path does not exist: {path}")
|
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
"""@darkarts
|
|
2
|
+
⊢validation:tests
|
|
3
|
+
∂{unittest,pathlib,tempfile,typing}
|
|
4
|
+
⚠{python≥3.7,pytest:optional}
|
|
5
|
+
⊨{∀test→isolated,∀test→repeatable}
|
|
6
|
+
🔒{read-only:tests}
|
|
7
|
+
⚡{O(n)|n=test-count}
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
"""
|
|
11
|
+
Unit tests for validation module.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
import unittest
|
|
15
|
+
import tempfile
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
from typing import List
|
|
18
|
+
|
|
19
|
+
# Import validation modules
|
|
20
|
+
try:
|
|
21
|
+
from .semantic import SemanticValidator
|
|
22
|
+
from .performance import PerformanceTracker
|
|
23
|
+
from .types import ValidationResult, PerformanceResult
|
|
24
|
+
except ImportError:
|
|
25
|
+
# Fallback for direct execution
|
|
26
|
+
import sys
|
|
27
|
+
sys.path.insert(0, str(Path(__file__).parent))
|
|
28
|
+
from semantic import SemanticValidator
|
|
29
|
+
from performance import PerformanceTracker
|
|
30
|
+
from types import ValidationResult, PerformanceResult
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class TestSemanticValidator(unittest.TestCase):
|
|
34
|
+
"""Test semantic validation."""
|
|
35
|
+
|
|
36
|
+
def setUp(self):
|
|
37
|
+
"""Set up test fixtures."""
|
|
38
|
+
self.validator = SemanticValidator()
|
|
39
|
+
self.temp_dir = tempfile.mkdtemp()
|
|
40
|
+
|
|
41
|
+
def test_validator_creation(self):
|
|
42
|
+
"""Test validator can be created."""
|
|
43
|
+
self.assertIsNotNone(self.validator)
|
|
44
|
+
|
|
45
|
+
def test_validate_simple_file(self):
|
|
46
|
+
"""Test validation of a simple file."""
|
|
47
|
+
# Create a test file
|
|
48
|
+
test_file = Path(self.temp_dir) / "test.py"
|
|
49
|
+
test_file.write_text('''"""@darkarts
|
|
50
|
+
⊢test:module
|
|
51
|
+
∂{os,sys}
|
|
52
|
+
⚠{}
|
|
53
|
+
⊨{}
|
|
54
|
+
🔒{}
|
|
55
|
+
⚡{O(1)}
|
|
56
|
+
"""
|
|
57
|
+
import os
|
|
58
|
+
import sys
|
|
59
|
+
|
|
60
|
+
def main():
|
|
61
|
+
print("Hello")
|
|
62
|
+
''')
|
|
63
|
+
|
|
64
|
+
# Validate
|
|
65
|
+
result = self.validator.validate_file(test_file)
|
|
66
|
+
|
|
67
|
+
# Check result
|
|
68
|
+
self.assertIsInstance(result, ValidationResult)
|
|
69
|
+
self.assertTrue(result.is_valid)
|
|
70
|
+
self.assertEqual(len(result.missing_deps), 0)
|
|
71
|
+
self.assertEqual(len(result.extra_deps), 0)
|
|
72
|
+
|
|
73
|
+
def test_validate_missing_dependency(self):
|
|
74
|
+
"""Test detection of missing dependency."""
|
|
75
|
+
test_file = Path(self.temp_dir) / "test_missing.py"
|
|
76
|
+
test_file.write_text('''"""@darkarts
|
|
77
|
+
⊢test:module
|
|
78
|
+
∂{os}
|
|
79
|
+
⚠{}
|
|
80
|
+
⊨{}
|
|
81
|
+
🔒{}
|
|
82
|
+
⚡{O(1)}
|
|
83
|
+
"""
|
|
84
|
+
import os
|
|
85
|
+
import sys
|
|
86
|
+
|
|
87
|
+
def main():
|
|
88
|
+
print("Hello")
|
|
89
|
+
''')
|
|
90
|
+
|
|
91
|
+
# Validate
|
|
92
|
+
result = self.validator.validate_file(test_file)
|
|
93
|
+
|
|
94
|
+
# Check result
|
|
95
|
+
self.assertFalse(result.is_valid)
|
|
96
|
+
self.assertIn('sys', result.missing_deps)
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
class TestPerformanceTracker(unittest.TestCase):
|
|
100
|
+
"""Test performance tracking."""
|
|
101
|
+
|
|
102
|
+
def setUp(self):
|
|
103
|
+
"""Set up test fixtures."""
|
|
104
|
+
self.tracker = PerformanceTracker()
|
|
105
|
+
self.temp_dir = tempfile.mkdtemp()
|
|
106
|
+
|
|
107
|
+
def test_tracker_creation(self):
|
|
108
|
+
"""Test tracker can be created."""
|
|
109
|
+
self.assertIsNotNone(self.tracker)
|
|
110
|
+
|
|
111
|
+
def test_analyze_simple_file(self):
|
|
112
|
+
"""Test analysis of a simple file."""
|
|
113
|
+
test_file = Path(self.temp_dir) / "test.py"
|
|
114
|
+
test_file.write_text('''"""@darkarts
|
|
115
|
+
⊢test:module
|
|
116
|
+
∂{}
|
|
117
|
+
⚠{}
|
|
118
|
+
⊨{}
|
|
119
|
+
🔒{}
|
|
120
|
+
⚡{O(1)}
|
|
121
|
+
"""
|
|
122
|
+
|
|
123
|
+
def simple_function():
|
|
124
|
+
return 42
|
|
125
|
+
''')
|
|
126
|
+
|
|
127
|
+
# Analyze
|
|
128
|
+
result = self.tracker.analyze_file(test_file)
|
|
129
|
+
|
|
130
|
+
# Check result
|
|
131
|
+
self.assertIsInstance(result, PerformanceResult)
|
|
132
|
+
self.assertTrue(result.is_valid)
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
class TestValidationTypes(unittest.TestCase):
|
|
136
|
+
"""Test validation type definitions."""
|
|
137
|
+
|
|
138
|
+
def test_validation_result_creation(self):
|
|
139
|
+
"""Test ValidationResult can be created."""
|
|
140
|
+
result = ValidationResult(
|
|
141
|
+
file_path="test.py",
|
|
142
|
+
is_valid=True,
|
|
143
|
+
)
|
|
144
|
+
self.assertEqual(result.file_path, "test.py")
|
|
145
|
+
self.assertTrue(result.is_valid)
|
|
146
|
+
|
|
147
|
+
def test_validation_result_to_dict(self):
|
|
148
|
+
"""Test ValidationResult can be serialized."""
|
|
149
|
+
result = ValidationResult(
|
|
150
|
+
file_path="test.py",
|
|
151
|
+
is_valid=True,
|
|
152
|
+
)
|
|
153
|
+
data = result.to_dict()
|
|
154
|
+
self.assertIsInstance(data, dict)
|
|
155
|
+
self.assertEqual(data["file_path"], "test.py")
|
|
156
|
+
self.assertTrue(data["is_valid"])
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
if __name__ == "__main__":
|
|
160
|
+
unittest.main()
|