archunitpython 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- archunitpython/__init__.py +45 -0
- archunitpython/common/__init__.py +18 -0
- archunitpython/common/assertion/__init__.py +3 -0
- archunitpython/common/assertion/violation.py +21 -0
- archunitpython/common/error/__init__.py +3 -0
- archunitpython/common/error/errors.py +13 -0
- archunitpython/common/extraction/__init__.py +13 -0
- archunitpython/common/extraction/extract_graph.py +345 -0
- archunitpython/common/extraction/graph.py +39 -0
- archunitpython/common/fluentapi/__init__.py +3 -0
- archunitpython/common/fluentapi/checkable.py +28 -0
- archunitpython/common/logging/__init__.py +3 -0
- archunitpython/common/logging/types.py +18 -0
- archunitpython/common/pattern_matching.py +80 -0
- archunitpython/common/projection/__init__.py +30 -0
- archunitpython/common/projection/cycles/__init__.py +4 -0
- archunitpython/common/projection/cycles/cycle_utils.py +49 -0
- archunitpython/common/projection/cycles/cycles.py +26 -0
- archunitpython/common/projection/cycles/johnsons_apsp.py +110 -0
- archunitpython/common/projection/cycles/model.py +22 -0
- archunitpython/common/projection/cycles/tarjan_scc.py +86 -0
- archunitpython/common/projection/edge_projections.py +36 -0
- archunitpython/common/projection/project_cycles.py +85 -0
- archunitpython/common/projection/project_edges.py +43 -0
- archunitpython/common/projection/project_nodes.py +49 -0
- archunitpython/common/projection/types.py +40 -0
- archunitpython/common/regex_factory.py +76 -0
- archunitpython/common/types.py +29 -0
- archunitpython/common/util/__init__.py +3 -0
- archunitpython/common/util/declaration_detector.py +115 -0
- archunitpython/common/util/logger.py +100 -0
- archunitpython/files/__init__.py +3 -0
- archunitpython/files/assertion/__init__.py +28 -0
- archunitpython/files/assertion/custom_file_logic.py +107 -0
- archunitpython/files/assertion/cycle_free.py +29 -0
- archunitpython/files/assertion/depend_on_files.py +67 -0
- archunitpython/files/assertion/matching_files.py +64 -0
- archunitpython/files/fluentapi/__init__.py +3 -0
- archunitpython/files/fluentapi/files.py +403 -0
- archunitpython/metrics/__init__.py +3 -0
- archunitpython/metrics/assertion/__init__.py +0 -0
- archunitpython/metrics/assertion/metric_thresholds.py +51 -0
- archunitpython/metrics/calculation/__init__.py +0 -0
- archunitpython/metrics/calculation/count.py +148 -0
- archunitpython/metrics/calculation/distance.py +110 -0
- archunitpython/metrics/calculation/lcom.py +177 -0
- archunitpython/metrics/common/__init__.py +19 -0
- archunitpython/metrics/common/types.py +67 -0
- archunitpython/metrics/extraction/__init__.py +0 -0
- archunitpython/metrics/extraction/extract_class_info.py +246 -0
- archunitpython/metrics/fluentapi/__init__.py +3 -0
- archunitpython/metrics/fluentapi/export_utils.py +89 -0
- archunitpython/metrics/fluentapi/metrics.py +589 -0
- archunitpython/metrics/projection/__init__.py +0 -0
- archunitpython/py.typed +0 -0
- archunitpython/slices/__init__.py +3 -0
- archunitpython/slices/assertion/__init__.py +13 -0
- archunitpython/slices/assertion/admissible_edges.py +108 -0
- archunitpython/slices/fluentapi/__init__.py +3 -0
- archunitpython/slices/fluentapi/slices.py +220 -0
- archunitpython/slices/projection/__init__.py +8 -0
- archunitpython/slices/projection/slicing_projections.py +128 -0
- archunitpython/slices/uml/__init__.py +4 -0
- archunitpython/slices/uml/export_diagram.py +31 -0
- archunitpython/slices/uml/generate_rules.py +71 -0
- archunitpython/testing/__init__.py +3 -0
- archunitpython/testing/assertion.py +47 -0
- archunitpython/testing/common/__init__.py +4 -0
- archunitpython/testing/common/color_utils.py +57 -0
- archunitpython/testing/common/violation_factory.py +97 -0
- archunitpython/testing/pytest_plugin/__init__.py +0 -0
- archunitpython-1.0.0.dist-info/METADATA +660 -0
- archunitpython-1.0.0.dist-info/RECORD +75 -0
- archunitpython-1.0.0.dist-info/WHEEL +4 -0
- archunitpython-1.0.0.dist-info/licenses/LICENSE +7 -0
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
"""ArchUnitPython - Architecture testing library for Python projects."""
|
|
2
|
+
|
|
3
|
+
__version__ = "0.1.0"
|
|
4
|
+
|
|
5
|
+
# Files API
|
|
6
|
+
# Common
|
|
7
|
+
from archunitpython.common import (
|
|
8
|
+
CheckOptions,
|
|
9
|
+
EmptyTestViolation,
|
|
10
|
+
TechnicalError,
|
|
11
|
+
UserError,
|
|
12
|
+
Violation,
|
|
13
|
+
)
|
|
14
|
+
from archunitpython.common.extraction import clear_graph_cache, extract_graph
|
|
15
|
+
from archunitpython.files import files, project_files
|
|
16
|
+
|
|
17
|
+
# Metrics API
|
|
18
|
+
from archunitpython.metrics import metrics
|
|
19
|
+
|
|
20
|
+
# Slices API
|
|
21
|
+
from archunitpython.slices import project_slices
|
|
22
|
+
|
|
23
|
+
# Testing
|
|
24
|
+
from archunitpython.testing import assert_passes, format_violations
|
|
25
|
+
|
|
26
|
+
__all__ = [
|
|
27
|
+
# Files
|
|
28
|
+
"project_files",
|
|
29
|
+
"files",
|
|
30
|
+
# Slices
|
|
31
|
+
"project_slices",
|
|
32
|
+
# Metrics
|
|
33
|
+
"metrics",
|
|
34
|
+
# Testing
|
|
35
|
+
"assert_passes",
|
|
36
|
+
"format_violations",
|
|
37
|
+
# Common
|
|
38
|
+
"Violation",
|
|
39
|
+
"EmptyTestViolation",
|
|
40
|
+
"CheckOptions",
|
|
41
|
+
"TechnicalError",
|
|
42
|
+
"UserError",
|
|
43
|
+
"extract_graph",
|
|
44
|
+
"clear_graph_cache",
|
|
45
|
+
]
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
from archunitpython.common.assertion.violation import EmptyTestViolation, Violation
|
|
2
|
+
from archunitpython.common.error.errors import TechnicalError, UserError
|
|
3
|
+
from archunitpython.common.fluentapi.checkable import Checkable, CheckOptions
|
|
4
|
+
from archunitpython.common.logging.types import LoggingOptions
|
|
5
|
+
from archunitpython.common.types import Filter, Pattern, PatternMatchingOptions
|
|
6
|
+
|
|
7
|
+
__all__ = [
|
|
8
|
+
"Violation",
|
|
9
|
+
"EmptyTestViolation",
|
|
10
|
+
"TechnicalError",
|
|
11
|
+
"UserError",
|
|
12
|
+
"Checkable",
|
|
13
|
+
"CheckOptions",
|
|
14
|
+
"LoggingOptions",
|
|
15
|
+
"Pattern",
|
|
16
|
+
"Filter",
|
|
17
|
+
"PatternMatchingOptions",
|
|
18
|
+
]
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
"""Base violation types for architecture rule checks."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class Violation:
|
|
10
|
+
"""Base class for all architecture violations."""
|
|
11
|
+
|
|
12
|
+
pass
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@dataclass
|
|
16
|
+
class EmptyTestViolation(Violation):
|
|
17
|
+
"""Violation raised when no files match the specified filter patterns."""
|
|
18
|
+
|
|
19
|
+
filters: list[Any]
|
|
20
|
+
message: str
|
|
21
|
+
is_negated: bool = False
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
from archunitpython.common.extraction.extract_graph import (
|
|
2
|
+
clear_graph_cache,
|
|
3
|
+
extract_graph,
|
|
4
|
+
)
|
|
5
|
+
from archunitpython.common.extraction.graph import Edge, Graph, ImportKind
|
|
6
|
+
|
|
7
|
+
__all__ = [
|
|
8
|
+
"Edge",
|
|
9
|
+
"Graph",
|
|
10
|
+
"ImportKind",
|
|
11
|
+
"extract_graph",
|
|
12
|
+
"clear_graph_cache",
|
|
13
|
+
]
|
|
@@ -0,0 +1,345 @@
|
|
|
1
|
+
"""Extract dependency graph from Python projects using AST analysis."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import ast
|
|
6
|
+
import os
|
|
7
|
+
|
|
8
|
+
from archunitpython.common.extraction.graph import Edge, Graph, ImportKind
|
|
9
|
+
from archunitpython.common.fluentapi.checkable import CheckOptions
|
|
10
|
+
|
|
11
|
+
_graph_cache: dict[str, Graph] = {}
|
|
12
|
+
|
|
13
|
+
_DEFAULT_EXCLUDE = [
|
|
14
|
+
"__pycache__",
|
|
15
|
+
".venv",
|
|
16
|
+
"venv",
|
|
17
|
+
".env",
|
|
18
|
+
"node_modules",
|
|
19
|
+
".git",
|
|
20
|
+
".mypy_cache",
|
|
21
|
+
".pytest_cache",
|
|
22
|
+
".ruff_cache",
|
|
23
|
+
"dist",
|
|
24
|
+
"build",
|
|
25
|
+
"*.egg-info",
|
|
26
|
+
]
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def clear_graph_cache(options: CheckOptions | None = None) -> None:
|
|
30
|
+
"""Clear the cached dependency graphs."""
|
|
31
|
+
_graph_cache.clear()
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def extract_graph(
|
|
35
|
+
project_path: str | None = None,
|
|
36
|
+
*,
|
|
37
|
+
exclude_patterns: list[str] | None = None,
|
|
38
|
+
options: CheckOptions | None = None,
|
|
39
|
+
) -> Graph:
|
|
40
|
+
"""Extract a dependency graph from a Python project.
|
|
41
|
+
|
|
42
|
+
Scans all .py files in the project directory, parses their imports,
|
|
43
|
+
and resolves them to build a list of Edge objects.
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
project_path: Root directory of the project to analyze.
|
|
47
|
+
Defaults to current working directory.
|
|
48
|
+
exclude_patterns: Directory/file names to exclude.
|
|
49
|
+
Defaults to common non-source directories.
|
|
50
|
+
options: Check options (supports clear_cache).
|
|
51
|
+
|
|
52
|
+
Returns:
|
|
53
|
+
List of Edge objects representing import relationships.
|
|
54
|
+
"""
|
|
55
|
+
if project_path is None:
|
|
56
|
+
project_path = os.getcwd()
|
|
57
|
+
|
|
58
|
+
project_path = os.path.abspath(project_path)
|
|
59
|
+
cache_key = project_path
|
|
60
|
+
|
|
61
|
+
if options and options.clear_cache:
|
|
62
|
+
_graph_cache.pop(cache_key, None)
|
|
63
|
+
|
|
64
|
+
if cache_key in _graph_cache:
|
|
65
|
+
return _graph_cache[cache_key]
|
|
66
|
+
|
|
67
|
+
result = _extract_graph_uncached(project_path, exclude_patterns)
|
|
68
|
+
_graph_cache[cache_key] = result
|
|
69
|
+
return result
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def _extract_graph_uncached(
|
|
73
|
+
project_path: str,
|
|
74
|
+
exclude_patterns: list[str] | None = None,
|
|
75
|
+
) -> Graph:
|
|
76
|
+
"""Extract graph without caching."""
|
|
77
|
+
excludes = exclude_patterns if exclude_patterns is not None else _DEFAULT_EXCLUDE
|
|
78
|
+
py_files = _find_python_files(project_path, excludes)
|
|
79
|
+
|
|
80
|
+
edges: list[Edge] = []
|
|
81
|
+
py_files_set = set(py_files)
|
|
82
|
+
|
|
83
|
+
for file_path in py_files:
|
|
84
|
+
# Add self-referencing edge (ensures the file appears as a node)
|
|
85
|
+
edges.append(
|
|
86
|
+
Edge(
|
|
87
|
+
source=_normalize(file_path),
|
|
88
|
+
target=_normalize(file_path),
|
|
89
|
+
external=False,
|
|
90
|
+
)
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
# Extract and resolve imports
|
|
94
|
+
imports = _extract_imports(file_path)
|
|
95
|
+
for module_name, import_kind in imports:
|
|
96
|
+
resolved, is_external = _resolve_import(
|
|
97
|
+
module_name, file_path, project_path, import_kind
|
|
98
|
+
)
|
|
99
|
+
if resolved and resolved != _normalize(file_path):
|
|
100
|
+
# Check if the resolved path is in our project
|
|
101
|
+
if not is_external and resolved not in {
|
|
102
|
+
_normalize(f) for f in py_files_set
|
|
103
|
+
}:
|
|
104
|
+
is_external = True
|
|
105
|
+
|
|
106
|
+
edges.append(
|
|
107
|
+
Edge(
|
|
108
|
+
source=_normalize(file_path),
|
|
109
|
+
target=resolved,
|
|
110
|
+
external=is_external,
|
|
111
|
+
import_kinds=(import_kind,),
|
|
112
|
+
)
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
return _merge_edges(edges)
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def _normalize(path: str) -> str:
|
|
119
|
+
"""Normalize a file path to use forward slashes."""
|
|
120
|
+
return path.replace("\\", "/")
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def _find_python_files(root: str, exclude: list[str]) -> list[str]:
|
|
124
|
+
"""Recursively find all .py files, excluding specified patterns."""
|
|
125
|
+
py_files: list[str] = []
|
|
126
|
+
for dirpath, dirnames, filenames in os.walk(root):
|
|
127
|
+
# Filter out excluded directories in-place
|
|
128
|
+
dirnames[:] = [
|
|
129
|
+
d
|
|
130
|
+
for d in dirnames
|
|
131
|
+
if not _should_exclude(d, exclude)
|
|
132
|
+
]
|
|
133
|
+
|
|
134
|
+
for filename in filenames:
|
|
135
|
+
if filename.endswith(".py") and not _should_exclude(filename, exclude):
|
|
136
|
+
full_path = os.path.join(dirpath, filename)
|
|
137
|
+
py_files.append(os.path.abspath(full_path))
|
|
138
|
+
|
|
139
|
+
return py_files
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
def _should_exclude(name: str, patterns: list[str]) -> bool:
|
|
143
|
+
"""Check if a name matches any exclude pattern."""
|
|
144
|
+
import fnmatch
|
|
145
|
+
|
|
146
|
+
for pattern in patterns:
|
|
147
|
+
if fnmatch.fnmatch(name, pattern):
|
|
148
|
+
return True
|
|
149
|
+
return False
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
def _extract_imports(file_path: str) -> list[tuple[str, ImportKind]]:
|
|
153
|
+
"""Parse a Python file and extract all import statements.
|
|
154
|
+
|
|
155
|
+
Returns list of (module_name, import_kind) tuples.
|
|
156
|
+
"""
|
|
157
|
+
try:
|
|
158
|
+
with open(file_path, "r", encoding="utf-8", errors="replace") as f:
|
|
159
|
+
source = f.read()
|
|
160
|
+
except (OSError, IOError):
|
|
161
|
+
return []
|
|
162
|
+
|
|
163
|
+
try:
|
|
164
|
+
tree = ast.parse(source, filename=file_path)
|
|
165
|
+
except SyntaxError:
|
|
166
|
+
return []
|
|
167
|
+
|
|
168
|
+
imports: list[tuple[str, ImportKind]] = []
|
|
169
|
+
type_checking_ranges = _find_type_checking_ranges(tree)
|
|
170
|
+
|
|
171
|
+
for node in ast.walk(tree):
|
|
172
|
+
if isinstance(node, ast.Import):
|
|
173
|
+
is_type = _in_type_checking(node, type_checking_ranges)
|
|
174
|
+
kind = ImportKind.TYPE_IMPORT if is_type else ImportKind.IMPORT
|
|
175
|
+
for alias in node.names:
|
|
176
|
+
imports.append((alias.name, kind))
|
|
177
|
+
|
|
178
|
+
elif isinstance(node, ast.ImportFrom):
|
|
179
|
+
is_type = _in_type_checking(node, type_checking_ranges)
|
|
180
|
+
if node.level and node.level > 0:
|
|
181
|
+
# Relative import
|
|
182
|
+
kind = ImportKind.TYPE_IMPORT if is_type else ImportKind.RELATIVE_IMPORT
|
|
183
|
+
module = node.module or ""
|
|
184
|
+
dots = "." * node.level
|
|
185
|
+
imports.append((f"{dots}{module}", kind))
|
|
186
|
+
else:
|
|
187
|
+
kind = ImportKind.TYPE_IMPORT if is_type else ImportKind.FROM_IMPORT
|
|
188
|
+
if node.module:
|
|
189
|
+
imports.append((node.module, kind))
|
|
190
|
+
|
|
191
|
+
return imports
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
def _find_type_checking_ranges(tree: ast.Module) -> list[tuple[int, int]]:
|
|
195
|
+
"""Find line ranges of TYPE_CHECKING blocks."""
|
|
196
|
+
ranges: list[tuple[int, int]] = []
|
|
197
|
+
|
|
198
|
+
for node in ast.walk(tree):
|
|
199
|
+
if isinstance(node, ast.If):
|
|
200
|
+
# Check for `if TYPE_CHECKING:` pattern
|
|
201
|
+
test = node.test
|
|
202
|
+
is_type_checking = False
|
|
203
|
+
|
|
204
|
+
if isinstance(test, ast.Name) and test.id == "TYPE_CHECKING":
|
|
205
|
+
is_type_checking = True
|
|
206
|
+
elif isinstance(test, ast.Attribute) and test.attr == "TYPE_CHECKING":
|
|
207
|
+
is_type_checking = True
|
|
208
|
+
|
|
209
|
+
if is_type_checking and node.body:
|
|
210
|
+
start = node.body[0].lineno
|
|
211
|
+
end = max(
|
|
212
|
+
getattr(n, "end_lineno", n.lineno)
|
|
213
|
+
for n in node.body
|
|
214
|
+
if hasattr(n, "lineno")
|
|
215
|
+
)
|
|
216
|
+
ranges.append((start, end))
|
|
217
|
+
|
|
218
|
+
return ranges
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
def _in_type_checking(
|
|
222
|
+
node: ast.AST, ranges: list[tuple[int, int]]
|
|
223
|
+
) -> bool:
|
|
224
|
+
"""Check if a node is inside a TYPE_CHECKING block."""
|
|
225
|
+
if not hasattr(node, "lineno"):
|
|
226
|
+
return False
|
|
227
|
+
lineno = node.lineno
|
|
228
|
+
return any(start <= lineno <= end for start, end in ranges)
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
def _resolve_import(
|
|
232
|
+
import_name: str,
|
|
233
|
+
source_file: str,
|
|
234
|
+
project_root: str,
|
|
235
|
+
kind: ImportKind,
|
|
236
|
+
) -> tuple[str, bool]:
|
|
237
|
+
"""Resolve an import name to an absolute file path.
|
|
238
|
+
|
|
239
|
+
Returns (resolved_path, is_external).
|
|
240
|
+
The path is normalized with forward slashes.
|
|
241
|
+
"""
|
|
242
|
+
if kind in (ImportKind.RELATIVE_IMPORT, ImportKind.TYPE_IMPORT) and import_name.startswith("."):
|
|
243
|
+
# Relative import
|
|
244
|
+
return _resolve_relative_import(import_name, source_file, project_root)
|
|
245
|
+
|
|
246
|
+
# Absolute import: try to resolve within the project
|
|
247
|
+
return _resolve_absolute_import(import_name, project_root)
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
def _resolve_relative_import(
|
|
251
|
+
import_name: str,
|
|
252
|
+
source_file: str,
|
|
253
|
+
project_root: str,
|
|
254
|
+
) -> tuple[str, bool]:
|
|
255
|
+
"""Resolve a relative import (starts with dots)."""
|
|
256
|
+
# Count dots
|
|
257
|
+
dots = 0
|
|
258
|
+
for ch in import_name:
|
|
259
|
+
if ch == ".":
|
|
260
|
+
dots += 1
|
|
261
|
+
else:
|
|
262
|
+
break
|
|
263
|
+
|
|
264
|
+
module_part = import_name[dots:]
|
|
265
|
+
|
|
266
|
+
# Navigate up from the source file's directory
|
|
267
|
+
source_dir = os.path.dirname(os.path.abspath(source_file))
|
|
268
|
+
for _ in range(dots - 1): # -1 because first dot = current package
|
|
269
|
+
source_dir = os.path.dirname(source_dir)
|
|
270
|
+
|
|
271
|
+
if module_part:
|
|
272
|
+
parts = module_part.split(".")
|
|
273
|
+
candidate_base = os.path.join(source_dir, *parts)
|
|
274
|
+
else:
|
|
275
|
+
candidate_base = source_dir
|
|
276
|
+
|
|
277
|
+
# Try file.py first, then package/__init__.py
|
|
278
|
+
for candidate in [
|
|
279
|
+
candidate_base + ".py",
|
|
280
|
+
os.path.join(candidate_base, "__init__.py"),
|
|
281
|
+
]:
|
|
282
|
+
if os.path.isfile(candidate):
|
|
283
|
+
return _normalize(os.path.abspath(candidate)), False
|
|
284
|
+
|
|
285
|
+
return _normalize(candidate_base + ".py"), True
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
def _resolve_absolute_import(
|
|
289
|
+
import_name: str,
|
|
290
|
+
project_root: str,
|
|
291
|
+
) -> tuple[str, bool]:
|
|
292
|
+
"""Resolve an absolute import within the project."""
|
|
293
|
+
parts = import_name.split(".")
|
|
294
|
+
|
|
295
|
+
# Try resolving from the project root and also from parent directory
|
|
296
|
+
# (the parent handles the case where project_root is itself a package
|
|
297
|
+
# and imports use the package name as prefix)
|
|
298
|
+
search_roots = [project_root]
|
|
299
|
+
parent = os.path.dirname(project_root)
|
|
300
|
+
if parent and parent != project_root:
|
|
301
|
+
search_roots.append(parent)
|
|
302
|
+
|
|
303
|
+
for root in search_roots:
|
|
304
|
+
for i in range(len(parts), 0, -1):
|
|
305
|
+
path_parts = parts[:i]
|
|
306
|
+
candidate_base = os.path.join(root, *path_parts)
|
|
307
|
+
|
|
308
|
+
# Try as a module file
|
|
309
|
+
candidate_file = candidate_base + ".py"
|
|
310
|
+
if os.path.isfile(candidate_file):
|
|
311
|
+
resolved = _normalize(os.path.abspath(candidate_file))
|
|
312
|
+
# Only count as internal if it's inside the project root
|
|
313
|
+
is_internal = resolved.startswith(_normalize(os.path.abspath(project_root)))
|
|
314
|
+
return resolved, not is_internal
|
|
315
|
+
|
|
316
|
+
# Try as a package
|
|
317
|
+
candidate_init = os.path.join(candidate_base, "__init__.py")
|
|
318
|
+
if os.path.isfile(candidate_init):
|
|
319
|
+
resolved = _normalize(os.path.abspath(candidate_init))
|
|
320
|
+
is_internal = resolved.startswith(_normalize(os.path.abspath(project_root)))
|
|
321
|
+
return resolved, not is_internal
|
|
322
|
+
|
|
323
|
+
# Not found in project → external
|
|
324
|
+
return import_name, True
|
|
325
|
+
|
|
326
|
+
|
|
327
|
+
def _merge_edges(edges: list[Edge]) -> list[Edge]:
|
|
328
|
+
"""Merge edges with same source and target, combining import kinds."""
|
|
329
|
+
edge_map: dict[tuple[str, str], Edge] = {}
|
|
330
|
+
|
|
331
|
+
for edge in edges:
|
|
332
|
+
key = (edge.source, edge.target)
|
|
333
|
+
if key in edge_map:
|
|
334
|
+
existing = edge_map[key]
|
|
335
|
+
combined_kinds = set(existing.import_kinds) | set(edge.import_kinds)
|
|
336
|
+
edge_map[key] = Edge(
|
|
337
|
+
source=edge.source,
|
|
338
|
+
target=edge.target,
|
|
339
|
+
external=edge.external,
|
|
340
|
+
import_kinds=tuple(sorted(combined_kinds, key=lambda k: k.value)),
|
|
341
|
+
)
|
|
342
|
+
else:
|
|
343
|
+
edge_map[key] = edge
|
|
344
|
+
|
|
345
|
+
return list(edge_map.values())
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
"""Graph model for dependency analysis."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from enum import Enum
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class ImportKind(Enum):
|
|
10
|
+
"""Types of Python import statements."""
|
|
11
|
+
|
|
12
|
+
IMPORT = "import" # import foo
|
|
13
|
+
FROM_IMPORT = "from_import" # from foo import bar
|
|
14
|
+
RELATIVE_IMPORT = "relative" # from . import bar / from ..foo import bar
|
|
15
|
+
DYNAMIC_IMPORT = "dynamic" # __import__('foo') / importlib.import_module()
|
|
16
|
+
TYPE_IMPORT = "type" # inside TYPE_CHECKING block
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass(frozen=True)
|
|
20
|
+
class Edge:
|
|
21
|
+
"""A dependency edge between two files.
|
|
22
|
+
|
|
23
|
+
Represents an import relationship: source imports target.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
source: str
|
|
27
|
+
"""Absolute path of the importing file."""
|
|
28
|
+
|
|
29
|
+
target: str
|
|
30
|
+
"""Absolute path of the imported file (or module name if external)."""
|
|
31
|
+
|
|
32
|
+
external: bool
|
|
33
|
+
"""True if the target is an external dependency (not part of the project)."""
|
|
34
|
+
|
|
35
|
+
import_kinds: tuple[ImportKind, ...] = ()
|
|
36
|
+
"""The types of import statements creating this edge."""
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
Graph = list[Edge]
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
"""Checkable protocol and CheckOptions for architecture rule execution."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from typing import Protocol
|
|
7
|
+
|
|
8
|
+
from archunitpython.common.assertion.violation import Violation
|
|
9
|
+
from archunitpython.common.logging.types import LoggingOptions
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dataclass(frozen=True)
|
|
13
|
+
class CheckOptions:
|
|
14
|
+
"""Options for controlling rule check execution."""
|
|
15
|
+
|
|
16
|
+
allow_empty_tests: bool = False
|
|
17
|
+
logging: LoggingOptions | None = None
|
|
18
|
+
clear_cache: bool = False
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class Checkable(Protocol):
|
|
22
|
+
"""Protocol for any architecture rule that can be checked.
|
|
23
|
+
|
|
24
|
+
All fluent API chains ultimately produce a Checkable whose check()
|
|
25
|
+
method executes the rule and returns a list of violations (empty = pass).
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
def check(self, options: CheckOptions | None = None) -> list[Violation]: ...
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"""Logging configuration types."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from typing import Literal
|
|
7
|
+
|
|
8
|
+
LogLevel = Literal["debug", "info", "warn", "error"]
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dataclass(frozen=True)
|
|
12
|
+
class LoggingOptions:
|
|
13
|
+
"""Options for controlling logging during architecture checks."""
|
|
14
|
+
|
|
15
|
+
enabled: bool = False
|
|
16
|
+
level: LogLevel = "info"
|
|
17
|
+
log_file: bool = False
|
|
18
|
+
append_to_log_file: bool = False
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
"""Pattern matching functions for file paths and class names."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from archunitpython.common.types import Filter
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def normalize_path(path: str) -> str:
|
|
9
|
+
"""Normalize a file path by converting backslashes to forward slashes."""
|
|
10
|
+
return path.replace("\\", "/")
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def extract_filename(file_path: str) -> str:
|
|
14
|
+
"""Extract the filename from a file path."""
|
|
15
|
+
normalized = normalize_path(file_path)
|
|
16
|
+
parts = normalized.split("/")
|
|
17
|
+
return parts[-1] if parts else file_path
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def path_without_filename(file_path: str) -> str:
|
|
21
|
+
"""Extract the directory portion of a file path (without the filename)."""
|
|
22
|
+
normalized = normalize_path(file_path)
|
|
23
|
+
parts = normalized.split("/")
|
|
24
|
+
if len(parts) > 1:
|
|
25
|
+
parts.pop()
|
|
26
|
+
return "/".join(parts)
|
|
27
|
+
return ""
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def matches_pattern(file_path: str, filter_: Filter) -> bool:
|
|
31
|
+
"""Check if a file path matches a single filter.
|
|
32
|
+
|
|
33
|
+
The filter's target option determines which part of the path is tested:
|
|
34
|
+
- 'filename': only the filename
|
|
35
|
+
- 'path': the full normalized path
|
|
36
|
+
- 'path-no-filename': the directory portion only
|
|
37
|
+
- 'classname': not applicable for file paths (always uses full path)
|
|
38
|
+
"""
|
|
39
|
+
target = filter_.options.target
|
|
40
|
+
|
|
41
|
+
if target == "filename":
|
|
42
|
+
target_string = extract_filename(file_path)
|
|
43
|
+
elif target == "path":
|
|
44
|
+
target_string = normalize_path(file_path)
|
|
45
|
+
elif target == "path-no-filename":
|
|
46
|
+
target_string = path_without_filename(file_path)
|
|
47
|
+
else:
|
|
48
|
+
target_string = normalize_path(file_path)
|
|
49
|
+
|
|
50
|
+
return bool(filter_.regexp.search(target_string))
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def matches_pattern_classname(
|
|
54
|
+
class_name: str, file_path: str, filter_: Filter
|
|
55
|
+
) -> bool:
|
|
56
|
+
"""Check if a class/file matches a filter, supporting classname target."""
|
|
57
|
+
target = filter_.options.target
|
|
58
|
+
|
|
59
|
+
if target == "classname":
|
|
60
|
+
target_string = class_name
|
|
61
|
+
elif target == "filename":
|
|
62
|
+
target_string = extract_filename(file_path)
|
|
63
|
+
elif target == "path":
|
|
64
|
+
target_string = normalize_path(file_path)
|
|
65
|
+
elif target == "path-no-filename":
|
|
66
|
+
target_string = path_without_filename(file_path)
|
|
67
|
+
else:
|
|
68
|
+
target_string = normalize_path(file_path)
|
|
69
|
+
|
|
70
|
+
return bool(filter_.regexp.search(target_string))
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def matches_all_patterns(file_path: str, filters: list[Filter]) -> bool:
|
|
74
|
+
"""Check if a file path matches ALL filters (AND logic)."""
|
|
75
|
+
return all(matches_pattern(file_path, f) for f in filters)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def matches_any_pattern(file_path: str, filters: list[Filter]) -> bool:
|
|
79
|
+
"""Check if a file path matches ANY filter (OR logic)."""
|
|
80
|
+
return any(matches_pattern(file_path, f) for f in filters)
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
from archunitpython.common.projection.edge_projections import per_edge, per_internal_edge
|
|
2
|
+
from archunitpython.common.projection.project_cycles import (
|
|
3
|
+
ProjectedCycles,
|
|
4
|
+
project_cycles,
|
|
5
|
+
project_internal_cycles,
|
|
6
|
+
)
|
|
7
|
+
from archunitpython.common.projection.project_edges import project_edges
|
|
8
|
+
from archunitpython.common.projection.project_nodes import project_to_nodes
|
|
9
|
+
from archunitpython.common.projection.types import (
|
|
10
|
+
MapFunction,
|
|
11
|
+
MappedEdge,
|
|
12
|
+
ProjectedEdge,
|
|
13
|
+
ProjectedGraph,
|
|
14
|
+
ProjectedNode,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
__all__ = [
|
|
18
|
+
"MapFunction",
|
|
19
|
+
"MappedEdge",
|
|
20
|
+
"ProjectedCycles",
|
|
21
|
+
"ProjectedEdge",
|
|
22
|
+
"ProjectedGraph",
|
|
23
|
+
"ProjectedNode",
|
|
24
|
+
"per_edge",
|
|
25
|
+
"per_internal_edge",
|
|
26
|
+
"project_cycles",
|
|
27
|
+
"project_edges",
|
|
28
|
+
"project_internal_cycles",
|
|
29
|
+
"project_to_nodes",
|
|
30
|
+
]
|