archunitpython 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. archunitpython/__init__.py +45 -0
  2. archunitpython/common/__init__.py +18 -0
  3. archunitpython/common/assertion/__init__.py +3 -0
  4. archunitpython/common/assertion/violation.py +21 -0
  5. archunitpython/common/error/__init__.py +3 -0
  6. archunitpython/common/error/errors.py +13 -0
  7. archunitpython/common/extraction/__init__.py +13 -0
  8. archunitpython/common/extraction/extract_graph.py +345 -0
  9. archunitpython/common/extraction/graph.py +39 -0
  10. archunitpython/common/fluentapi/__init__.py +3 -0
  11. archunitpython/common/fluentapi/checkable.py +28 -0
  12. archunitpython/common/logging/__init__.py +3 -0
  13. archunitpython/common/logging/types.py +18 -0
  14. archunitpython/common/pattern_matching.py +80 -0
  15. archunitpython/common/projection/__init__.py +30 -0
  16. archunitpython/common/projection/cycles/__init__.py +4 -0
  17. archunitpython/common/projection/cycles/cycle_utils.py +49 -0
  18. archunitpython/common/projection/cycles/cycles.py +26 -0
  19. archunitpython/common/projection/cycles/johnsons_apsp.py +110 -0
  20. archunitpython/common/projection/cycles/model.py +22 -0
  21. archunitpython/common/projection/cycles/tarjan_scc.py +86 -0
  22. archunitpython/common/projection/edge_projections.py +36 -0
  23. archunitpython/common/projection/project_cycles.py +85 -0
  24. archunitpython/common/projection/project_edges.py +43 -0
  25. archunitpython/common/projection/project_nodes.py +49 -0
  26. archunitpython/common/projection/types.py +40 -0
  27. archunitpython/common/regex_factory.py +76 -0
  28. archunitpython/common/types.py +29 -0
  29. archunitpython/common/util/__init__.py +3 -0
  30. archunitpython/common/util/declaration_detector.py +115 -0
  31. archunitpython/common/util/logger.py +100 -0
  32. archunitpython/files/__init__.py +3 -0
  33. archunitpython/files/assertion/__init__.py +28 -0
  34. archunitpython/files/assertion/custom_file_logic.py +107 -0
  35. archunitpython/files/assertion/cycle_free.py +29 -0
  36. archunitpython/files/assertion/depend_on_files.py +67 -0
  37. archunitpython/files/assertion/matching_files.py +64 -0
  38. archunitpython/files/fluentapi/__init__.py +3 -0
  39. archunitpython/files/fluentapi/files.py +403 -0
  40. archunitpython/metrics/__init__.py +3 -0
  41. archunitpython/metrics/assertion/__init__.py +0 -0
  42. archunitpython/metrics/assertion/metric_thresholds.py +51 -0
  43. archunitpython/metrics/calculation/__init__.py +0 -0
  44. archunitpython/metrics/calculation/count.py +148 -0
  45. archunitpython/metrics/calculation/distance.py +110 -0
  46. archunitpython/metrics/calculation/lcom.py +177 -0
  47. archunitpython/metrics/common/__init__.py +19 -0
  48. archunitpython/metrics/common/types.py +67 -0
  49. archunitpython/metrics/extraction/__init__.py +0 -0
  50. archunitpython/metrics/extraction/extract_class_info.py +246 -0
  51. archunitpython/metrics/fluentapi/__init__.py +3 -0
  52. archunitpython/metrics/fluentapi/export_utils.py +89 -0
  53. archunitpython/metrics/fluentapi/metrics.py +589 -0
  54. archunitpython/metrics/projection/__init__.py +0 -0
  55. archunitpython/py.typed +0 -0
  56. archunitpython/slices/__init__.py +3 -0
  57. archunitpython/slices/assertion/__init__.py +13 -0
  58. archunitpython/slices/assertion/admissible_edges.py +108 -0
  59. archunitpython/slices/fluentapi/__init__.py +3 -0
  60. archunitpython/slices/fluentapi/slices.py +220 -0
  61. archunitpython/slices/projection/__init__.py +8 -0
  62. archunitpython/slices/projection/slicing_projections.py +128 -0
  63. archunitpython/slices/uml/__init__.py +4 -0
  64. archunitpython/slices/uml/export_diagram.py +31 -0
  65. archunitpython/slices/uml/generate_rules.py +71 -0
  66. archunitpython/testing/__init__.py +3 -0
  67. archunitpython/testing/assertion.py +47 -0
  68. archunitpython/testing/common/__init__.py +4 -0
  69. archunitpython/testing/common/color_utils.py +57 -0
  70. archunitpython/testing/common/violation_factory.py +97 -0
  71. archunitpython/testing/pytest_plugin/__init__.py +0 -0
  72. archunitpython-1.0.0.dist-info/METADATA +660 -0
  73. archunitpython-1.0.0.dist-info/RECORD +75 -0
  74. archunitpython-1.0.0.dist-info/WHEEL +4 -0
  75. archunitpython-1.0.0.dist-info/licenses/LICENSE +7 -0
@@ -0,0 +1,110 @@
1
+ """Robert C. Martin's distance metrics: Abstractness, Instability, Distance."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from dataclasses import dataclass
6
+
7
+ from archunitpython.metrics.common.types import FileAnalysisResult
8
+
9
+
10
+ @dataclass
11
+ class DistanceMetrics:
12
+ """Distance metrics for a file/module."""
13
+
14
+ abstractness: float # A = Na / N
15
+ instability: float # I = Ce / (Ca + Ce)
16
+ distance: float # D = |A + I - 1|
17
+ coupling_factor: float # CF
18
+ normalized_distance: float
19
+ in_zone_of_pain: bool
20
+ in_zone_of_uselessness: bool
21
+
22
+
23
+ @dataclass
24
+ class ProjectDistanceMetrics:
25
+ """Project-wide distance metric summary."""
26
+
27
+ average_abstractness: float
28
+ average_instability: float
29
+ average_distance: float
30
+ files_in_zone_of_pain: int
31
+ files_in_zone_of_uselessness: int
32
+ total_files: int
33
+
34
+
35
+ def calculate_file_distance_metrics(
36
+ file_analysis: FileAnalysisResult,
37
+ all_files: list[FileAnalysisResult],
38
+ ) -> DistanceMetrics:
39
+ """Calculate distance metrics for a single file.
40
+
41
+ Args:
42
+ file_analysis: Analysis result for the target file.
43
+ all_files: All file analysis results (for coupling calculation).
44
+ """
45
+ # Abstractness: ratio of abstract types to total types
46
+ total_types = file_analysis.total_types
47
+ abstract_types = file_analysis.abstract_classes + file_analysis.protocols
48
+
49
+ abstractness = abstract_types / total_types if total_types > 0 else 0.0
50
+
51
+ # Coupling: count files that depend on this file (Ca) and
52
+ # files this file depends on (Ce)
53
+ # For simplicity, use class count as proxy for coupling
54
+ ce = sum(c.efferent_coupling for c in file_analysis.classes)
55
+ ca = sum(c.afferent_coupling for c in file_analysis.classes)
56
+ total_coupling = ca + ce
57
+
58
+ instability = ce / total_coupling if total_coupling > 0 else 0.5
59
+
60
+ # Distance from main sequence
61
+ distance = abs(abstractness + instability - 1)
62
+
63
+ # Coupling factor
64
+ max_coupling = max(1, len(all_files) - 1) * 2
65
+ coupling_factor = total_coupling / max_coupling if max_coupling > 0 else 0.0
66
+
67
+ # Normalized distance
68
+ normalized_distance = distance
69
+
70
+ # Zone detection
71
+ in_zone_of_pain = abstractness < 0.25 and instability < 0.25
72
+ in_zone_of_uselessness = abstractness > 0.75 and instability > 0.75
73
+
74
+ return DistanceMetrics(
75
+ abstractness=abstractness,
76
+ instability=instability,
77
+ distance=distance,
78
+ coupling_factor=coupling_factor,
79
+ normalized_distance=normalized_distance,
80
+ in_zone_of_pain=in_zone_of_pain,
81
+ in_zone_of_uselessness=in_zone_of_uselessness,
82
+ )
83
+
84
+
85
+ def calculate_distance_metrics_for_project(
86
+ files: list[FileAnalysisResult],
87
+ ) -> ProjectDistanceMetrics:
88
+ """Calculate project-wide distance metric summary."""
89
+ if not files:
90
+ return ProjectDistanceMetrics(
91
+ average_abstractness=0,
92
+ average_instability=0,
93
+ average_distance=0,
94
+ files_in_zone_of_pain=0,
95
+ files_in_zone_of_uselessness=0,
96
+ total_files=0,
97
+ )
98
+
99
+ metrics = [calculate_file_distance_metrics(f, files) for f in files]
100
+
101
+ return ProjectDistanceMetrics(
102
+ average_abstractness=sum(m.abstractness for m in metrics) / len(metrics),
103
+ average_instability=sum(m.instability for m in metrics) / len(metrics),
104
+ average_distance=sum(m.distance for m in metrics) / len(metrics),
105
+ files_in_zone_of_pain=sum(1 for m in metrics if m.in_zone_of_pain),
106
+ files_in_zone_of_uselessness=sum(
107
+ 1 for m in metrics if m.in_zone_of_uselessness
108
+ ),
109
+ total_files=len(files),
110
+ )
@@ -0,0 +1,177 @@
1
+ """LCOM (Lack of Cohesion of Methods) metrics - 8 variants."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from archunitpython.metrics.common.types import ClassInfo
6
+
7
+
8
+ class LCOM96a:
9
+ """LCOM96a: (1/(1-m)) * ((1/a) * sum(mu(Ai)) - m)."""
10
+
11
+ name = "LCOM96a"
12
+ description = "Lack of Cohesion of Methods (96a variant)"
13
+
14
+ def calculate(self, class_info: ClassInfo) -> float:
15
+ m = len(class_info.methods)
16
+ a = len(class_info.fields)
17
+ if m <= 1 or a == 0:
18
+ return 0.0
19
+
20
+ total_mu = sum(len(f.accessed_by) for f in class_info.fields)
21
+ return (1 / (1 - m)) * ((1 / a) * total_mu - m)
22
+
23
+
24
+ class LCOM96b:
25
+ """LCOM96b: (1/a) * sum((1/m) * (m - mu(Ai))). Normalized to [0,1]."""
26
+
27
+ name = "LCOM96b"
28
+ description = "Lack of Cohesion of Methods (96b variant, 0-1 normalized)"
29
+
30
+ def calculate(self, class_info: ClassInfo) -> float:
31
+ m = len(class_info.methods)
32
+ a = len(class_info.fields)
33
+ if m == 0 or a == 0:
34
+ return 0.0
35
+
36
+ total = 0.0
37
+ for field in class_info.fields:
38
+ mu_a = len(field.accessed_by)
39
+ total += (1 / m) * (m - mu_a)
40
+
41
+ return total / a
42
+
43
+
44
+ class LCOM1:
45
+ """LCOM1: |P| - |Q| where P = non-sharing pairs, Q = sharing pairs."""
46
+
47
+ name = "LCOM1"
48
+ description = "Lack of Cohesion of Methods (LCOM1)"
49
+
50
+ def calculate(self, class_info: ClassInfo) -> float:
51
+ methods = class_info.methods
52
+ m = len(methods)
53
+ if m <= 1:
54
+ return 0.0
55
+
56
+ p = 0 # Non-sharing pairs
57
+ q = 0 # Sharing pairs
58
+
59
+ for i in range(m):
60
+ for j in range(i + 1, m):
61
+ fields_i = set(methods[i].accessed_fields)
62
+ fields_j = set(methods[j].accessed_fields)
63
+ if fields_i & fields_j:
64
+ q += 1
65
+ else:
66
+ p += 1
67
+
68
+ return float(max(0, p - q))
69
+
70
+
71
+ class LCOM2:
72
+ """LCOM2: 1 - (sum(MF) / (M * F)) where MF = methods accessing field f."""
73
+
74
+ name = "LCOM2"
75
+ description = "Lack of Cohesion of Methods (LCOM2)"
76
+
77
+ def calculate(self, class_info: ClassInfo) -> float:
78
+ m = len(class_info.methods)
79
+ f = len(class_info.fields)
80
+ if m == 0 or f == 0:
81
+ return 0.0
82
+
83
+ total_mf = sum(len(field.accessed_by) for field in class_info.fields)
84
+ return 1 - (total_mf / (m * f))
85
+
86
+
87
+ class LCOM3:
88
+ """LCOM3: (M - sum(MF)/F) / (M - 1)."""
89
+
90
+ name = "LCOM3"
91
+ description = "Lack of Cohesion of Methods (LCOM3)"
92
+
93
+ def calculate(self, class_info: ClassInfo) -> float:
94
+ m = len(class_info.methods)
95
+ f = len(class_info.fields)
96
+ if m <= 1 or f == 0:
97
+ return 0.0
98
+
99
+ total_mf = sum(len(field.accessed_by) for field in class_info.fields)
100
+ return (m - total_mf / f) / (m - 1)
101
+
102
+
103
+ class LCOM4:
104
+ """LCOM4: Number of connected components in method-field access graph."""
105
+
106
+ name = "LCOM4"
107
+ description = "Lack of Cohesion of Methods (LCOM4, connected components)"
108
+
109
+ def calculate(self, class_info: ClassInfo) -> float:
110
+ methods = class_info.methods
111
+ if not methods:
112
+ return 0.0
113
+
114
+ # Build adjacency: two methods are connected if they share a field
115
+ n = len(methods)
116
+ parent = list(range(n))
117
+
118
+ def find(x: int) -> int:
119
+ while parent[x] != x:
120
+ parent[x] = parent[parent[x]]
121
+ x = parent[x]
122
+ return x
123
+
124
+ def union(x: int, y: int) -> None:
125
+ px, py = find(x), find(y)
126
+ if px != py:
127
+ parent[px] = py
128
+
129
+ for i in range(n):
130
+ for j in range(i + 1, n):
131
+ fields_i = set(methods[i].accessed_fields)
132
+ fields_j = set(methods[j].accessed_fields)
133
+ if fields_i & fields_j:
134
+ union(i, j)
135
+
136
+ components = len(set(find(i) for i in range(n)))
137
+ return float(components)
138
+
139
+
140
+ class LCOM5:
141
+ """LCOM5: (a - mu * sum(mA)) / (a - mu) adjusted by method count."""
142
+
143
+ name = "LCOM5"
144
+ description = "Lack of Cohesion of Methods (LCOM5)"
145
+
146
+ def calculate(self, class_info: ClassInfo) -> float:
147
+ m = len(class_info.methods)
148
+ a = len(class_info.fields)
149
+ if m <= 1 or a == 0:
150
+ return 0.0
151
+
152
+ total_mf = sum(len(field.accessed_by) for field in class_info.fields)
153
+ avg_mf = total_mf / a if a > 0 else 0
154
+
155
+ denominator = m - 1
156
+ if denominator == 0:
157
+ return 0.0
158
+
159
+ return (m - avg_mf) / denominator
160
+
161
+
162
+ class LCOMStar:
163
+ """LCOM* (Henderson-Sellers variant): based on method pair sharing ratio."""
164
+
165
+ name = "LCOMStar"
166
+ description = "Lack of Cohesion of Methods (Henderson-Sellers variant)"
167
+
168
+ def calculate(self, class_info: ClassInfo) -> float:
169
+ m = len(class_info.methods)
170
+ a = len(class_info.fields)
171
+ if m <= 1 or a == 0:
172
+ return 0.0
173
+
174
+ total_mu = sum(len(f.accessed_by) for f in class_info.fields)
175
+ avg_mu = total_mu / a
176
+
177
+ return (m - avg_mu) / (m - 1)
@@ -0,0 +1,19 @@
1
+ from archunitpython.metrics.common.types import (
2
+ ClassInfo,
3
+ EnhancedClassInfo,
4
+ FieldInfo,
5
+ FileAnalysisResult,
6
+ MethodInfo,
7
+ Metric,
8
+ MetricComparison,
9
+ )
10
+
11
+ __all__ = [
12
+ "ClassInfo",
13
+ "EnhancedClassInfo",
14
+ "FieldInfo",
15
+ "FileAnalysisResult",
16
+ "Metric",
17
+ "MetricComparison",
18
+ "MethodInfo",
19
+ ]
@@ -0,0 +1,67 @@
1
+ """Common types for the metrics module."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from dataclasses import dataclass, field
6
+ from typing import Literal, Protocol
7
+
8
+
9
+ @dataclass
10
+ class MethodInfo:
11
+ """Information about a class method."""
12
+
13
+ name: str
14
+ accessed_fields: list[str] = field(default_factory=list)
15
+
16
+
17
+ @dataclass
18
+ class FieldInfo:
19
+ """Information about a class field."""
20
+
21
+ name: str
22
+ accessed_by: list[str] = field(default_factory=list)
23
+
24
+
25
+ @dataclass
26
+ class ClassInfo:
27
+ """Information about a class extracted from source code."""
28
+
29
+ name: str
30
+ file_path: str
31
+ methods: list[MethodInfo] = field(default_factory=list)
32
+ fields: list[FieldInfo] = field(default_factory=list)
33
+
34
+
35
+ @dataclass
36
+ class EnhancedClassInfo(ClassInfo):
37
+ """Extended class info with abstractness and dependency data."""
38
+
39
+ is_abstract: bool = False
40
+ is_protocol: bool = False
41
+ abstract_methods: list[str] = field(default_factory=list)
42
+ efferent_coupling: int = 0 # Ce: outgoing dependencies
43
+ afferent_coupling: int = 0 # Ca: incoming dependencies
44
+
45
+
46
+ @dataclass
47
+ class FileAnalysisResult:
48
+ """Analysis result for a single Python file."""
49
+
50
+ file_path: str
51
+ classes: list[EnhancedClassInfo] = field(default_factory=list)
52
+ protocols: int = 0
53
+ abstract_classes: int = 0
54
+ concrete_classes: int = 0
55
+ total_types: int = 0
56
+
57
+
58
+ MetricComparison = Literal["below", "above", "equal", "above_equal", "below_equal"]
59
+
60
+
61
+ class Metric(Protocol):
62
+ """Protocol for class-level metrics."""
63
+
64
+ name: str
65
+ description: str
66
+
67
+ def calculate(self, class_info: ClassInfo) -> float: ...
File without changes
@@ -0,0 +1,246 @@
1
+ """Extract class information from Python source files using AST."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import ast
6
+ import os
7
+
8
+ from archunitpython.common.extraction.extract_graph import _DEFAULT_EXCLUDE, _find_python_files
9
+ from archunitpython.metrics.common.types import (
10
+ ClassInfo,
11
+ EnhancedClassInfo,
12
+ FieldInfo,
13
+ FileAnalysisResult,
14
+ MethodInfo,
15
+ )
16
+
17
+
18
+ def extract_class_info(
19
+ project_path: str | None = None,
20
+ *,
21
+ exclude_patterns: list[str] | None = None,
22
+ ) -> list[ClassInfo]:
23
+ """Extract class metadata from all Python files in a project.
24
+
25
+ Args:
26
+ project_path: Root directory to scan.
27
+ exclude_patterns: Directory/file names to exclude.
28
+
29
+ Returns:
30
+ List of ClassInfo for every class found.
31
+ """
32
+ if project_path is None:
33
+ project_path = os.getcwd()
34
+
35
+ project_path = os.path.abspath(project_path)
36
+ excludes = exclude_patterns if exclude_patterns is not None else _DEFAULT_EXCLUDE
37
+ py_files = _find_python_files(project_path, excludes)
38
+
39
+ classes: list[ClassInfo] = []
40
+ for file_path in py_files:
41
+ classes.extend(_process_source_file(file_path))
42
+
43
+ return classes
44
+
45
+
46
+ def extract_enhanced_class_info(
47
+ project_path: str | None = None,
48
+ *,
49
+ exclude_patterns: list[str] | None = None,
50
+ ) -> list[FileAnalysisResult]:
51
+ """Enhanced extraction with abstractness and dependency information."""
52
+ if project_path is None:
53
+ project_path = os.getcwd()
54
+
55
+ project_path = os.path.abspath(project_path)
56
+ excludes = exclude_patterns if exclude_patterns is not None else _DEFAULT_EXCLUDE
57
+ py_files = _find_python_files(project_path, excludes)
58
+
59
+ results: list[FileAnalysisResult] = []
60
+ for file_path in py_files:
61
+ result = _process_source_file_enhanced(file_path)
62
+ if result.classes:
63
+ results.append(result)
64
+
65
+ return results
66
+
67
+
68
+ def _process_source_file(file_path: str) -> list[ClassInfo]:
69
+ """Extract ClassInfo from a single Python file."""
70
+ try:
71
+ with open(file_path, "r", encoding="utf-8", errors="replace") as f:
72
+ source = f.read()
73
+ except (OSError, IOError):
74
+ return []
75
+
76
+ try:
77
+ tree = ast.parse(source, filename=file_path)
78
+ except SyntaxError:
79
+ return []
80
+
81
+ classes: list[ClassInfo] = []
82
+ norm_path = file_path.replace("\\", "/")
83
+
84
+ for node in ast.walk(tree):
85
+ if isinstance(node, ast.ClassDef):
86
+ class_info = _extract_class(node, norm_path)
87
+ classes.append(class_info)
88
+
89
+ return classes
90
+
91
+
92
+ def _process_source_file_enhanced(file_path: str) -> FileAnalysisResult:
93
+ """Extract enhanced class info from a single Python file."""
94
+ norm_path = file_path.replace("\\", "/")
95
+ result = FileAnalysisResult(file_path=norm_path)
96
+
97
+ try:
98
+ with open(file_path, "r", encoding="utf-8", errors="replace") as f:
99
+ source = f.read()
100
+ except (OSError, IOError):
101
+ return result
102
+
103
+ try:
104
+ tree = ast.parse(source, filename=file_path)
105
+ except SyntaxError:
106
+ return result
107
+
108
+ for node in ast.walk(tree):
109
+ if isinstance(node, ast.ClassDef):
110
+ enhanced = _extract_enhanced_class(node, norm_path)
111
+ result.classes.append(enhanced)
112
+ result.total_types += 1
113
+ if enhanced.is_protocol:
114
+ result.protocols += 1
115
+ elif enhanced.is_abstract:
116
+ result.abstract_classes += 1
117
+ else:
118
+ result.concrete_classes += 1
119
+
120
+ return result
121
+
122
+
123
+ def _extract_class(node: ast.ClassDef, file_path: str) -> ClassInfo:
124
+ """Extract ClassInfo from a ClassDef AST node."""
125
+ methods: list[MethodInfo] = []
126
+ fields: dict[str, FieldInfo] = {}
127
+
128
+ # Find fields from __init__ and class body
129
+ for item in ast.walk(node):
130
+ if isinstance(item, ast.Assign):
131
+ for target in item.targets:
132
+ if isinstance(target, ast.Attribute) and isinstance(
133
+ target.value, ast.Name
134
+ ):
135
+ if target.value.id == "self":
136
+ field_name = target.attr
137
+ if field_name not in fields:
138
+ fields[field_name] = FieldInfo(name=field_name)
139
+
140
+ # Extract methods and track field accesses
141
+ for item in node.body:
142
+ if isinstance(item, (ast.FunctionDef, ast.AsyncFunctionDef)):
143
+ method_name = item.name
144
+ accessed = _find_field_accesses(item, set(fields.keys()))
145
+ methods.append(
146
+ MethodInfo(name=method_name, accessed_fields=accessed)
147
+ )
148
+ # Update field access tracking
149
+ for field_name in accessed:
150
+ if field_name in fields:
151
+ if method_name not in fields[field_name].accessed_by:
152
+ fields[field_name].accessed_by.append(method_name)
153
+
154
+ return ClassInfo(
155
+ name=node.name,
156
+ file_path=file_path,
157
+ methods=methods,
158
+ fields=list(fields.values()),
159
+ )
160
+
161
+
162
+ def _extract_enhanced_class(
163
+ node: ast.ClassDef, file_path: str
164
+ ) -> EnhancedClassInfo:
165
+ """Extract EnhancedClassInfo from a ClassDef AST node."""
166
+ base = _extract_class(node, file_path)
167
+
168
+ is_abstract = _is_abstract_class(node)
169
+ is_protocol = _is_protocol_class(node)
170
+ abstract_methods = _find_abstract_methods(node)
171
+
172
+ return EnhancedClassInfo(
173
+ name=base.name,
174
+ file_path=base.file_path,
175
+ methods=base.methods,
176
+ fields=base.fields,
177
+ is_abstract=is_abstract,
178
+ is_protocol=is_protocol,
179
+ abstract_methods=abstract_methods,
180
+ )
181
+
182
+
183
+ def _find_field_accesses(
184
+ func_node: ast.FunctionDef | ast.AsyncFunctionDef,
185
+ known_fields: set[str],
186
+ ) -> list[str]:
187
+ """Find all self.field accesses within a method."""
188
+ accessed: list[str] = []
189
+ for node in ast.walk(func_node):
190
+ if isinstance(node, ast.Attribute) and isinstance(node.value, ast.Name):
191
+ if node.value.id == "self" and node.attr in known_fields:
192
+ if node.attr not in accessed:
193
+ accessed.append(node.attr)
194
+ return accessed
195
+
196
+
197
+ def _is_abstract_class(node: ast.ClassDef) -> bool:
198
+ """Check if a class inherits from ABC or ABCMeta."""
199
+ for base in node.bases:
200
+ name = _get_name(base)
201
+ if name in ("ABC", "ABCMeta"):
202
+ return True
203
+ for keyword in node.keywords:
204
+ if keyword.arg == "metaclass":
205
+ name = _get_name(keyword.value)
206
+ if name == "ABCMeta":
207
+ return True
208
+ # Also check for @abstractmethod decorators
209
+ for item in node.body:
210
+ if isinstance(item, (ast.FunctionDef, ast.AsyncFunctionDef)):
211
+ for decorator in item.decorator_list:
212
+ name = _get_name(decorator)
213
+ if name == "abstractmethod":
214
+ return True
215
+ return False
216
+
217
+
218
+ def _is_protocol_class(node: ast.ClassDef) -> bool:
219
+ """Check if a class inherits from Protocol."""
220
+ for base in node.bases:
221
+ name = _get_name(base)
222
+ if name == "Protocol":
223
+ return True
224
+ return False
225
+
226
+
227
+ def _find_abstract_methods(node: ast.ClassDef) -> list[str]:
228
+ """Find all methods with @abstractmethod decorator."""
229
+ abstract: list[str] = []
230
+ for item in node.body:
231
+ if isinstance(item, (ast.FunctionDef, ast.AsyncFunctionDef)):
232
+ for decorator in item.decorator_list:
233
+ name = _get_name(decorator)
234
+ if name == "abstractmethod":
235
+ abstract.append(item.name)
236
+ break
237
+ return abstract
238
+
239
+
240
+ def _get_name(node: ast.expr) -> str:
241
+ """Extract a name from an AST node (handles Name and Attribute)."""
242
+ if isinstance(node, ast.Name):
243
+ return node.id
244
+ if isinstance(node, ast.Attribute):
245
+ return node.attr
246
+ return ""
@@ -0,0 +1,3 @@
1
+ from archunitpython.metrics.fluentapi.metrics import metrics
2
+
3
+ __all__ = ["metrics"]