cortex-solver 3.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cortex/__init__.py +5 -0
- cortex/__main__.py +5 -0
- cortex/core/__init__.py +1 -0
- cortex/core/decomposer.py +792 -0
- cortex/core/scene_solver.py +586 -0
- cortex/core/solver.py +304 -0
- cortex/core/validator.py +360 -0
- cortex/core/verifier.py +307 -0
- cortex/server.py +226 -0
- cortex/tools/__init__.py +1 -0
- cortex/tools/decompose.py +24 -0
- cortex/tools/research.py +17 -0
- cortex/tools/solve.py +16 -0
- cortex/tools/solve_scene.py +57 -0
- cortex/tools/validate.py +75 -0
- cortex/tools/verify.py +22 -0
- cortex/types.py +298 -0
- cortex_solver-3.0.0.dist-info/METADATA +151 -0
- cortex_solver-3.0.0.dist-info/RECORD +21 -0
- cortex_solver-3.0.0.dist-info/WHEEL +4 -0
- cortex_solver-3.0.0.dist-info/entry_points.txt +2 -0
cortex/core/solver.py
ADDED
|
@@ -0,0 +1,304 @@
|
|
|
1
|
+
"""Constraint solver for part recipes."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections import deque
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
from typing import Iterable
|
|
8
|
+
|
|
9
|
+
from cortex.types import (
|
|
10
|
+
Constraint,
|
|
11
|
+
Dimensions,
|
|
12
|
+
Mirror,
|
|
13
|
+
PartRecipe,
|
|
14
|
+
PartSpec,
|
|
15
|
+
SolveResult,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
AXIS_INDEX = {"X": 0, "Y": 1, "Z": 2}
|
|
19
|
+
DIMENSION_INDEX = {0: "width", 1: "depth", 2: "height"}
|
|
20
|
+
TOLERANCE = 0.0001
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@dataclass(slots=True)
|
|
24
|
+
class _AxisAssignment:
|
|
25
|
+
axis: int
|
|
26
|
+
value: float
|
|
27
|
+
constraint: Constraint
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@dataclass(slots=True)
|
|
31
|
+
class _AxisSetResult:
|
|
32
|
+
position: list[float]
|
|
33
|
+
conflicts: list[dict[str, object]]
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def solve(recipe: PartRecipe) -> SolveResult:
|
|
37
|
+
"""Solve all part positions for a recipe."""
|
|
38
|
+
|
|
39
|
+
if recipe.anchor not in recipe.parts:
|
|
40
|
+
return SolveResult(
|
|
41
|
+
success=False,
|
|
42
|
+
error="Anchor part is missing from recipe parts.",
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
part_names = set(recipe.parts.keys())
|
|
46
|
+
constraints = list(recipe.constraints or [])
|
|
47
|
+
dependencies: dict[str, set[str]] = {name: set() for name in part_names}
|
|
48
|
+
dependents: dict[str, set[str]] = {name: set() for name in part_names}
|
|
49
|
+
|
|
50
|
+
for constraint in constraints:
|
|
51
|
+
if constraint.part_a in part_names and constraint.part_b in part_names:
|
|
52
|
+
dependencies[constraint.part_a].add(constraint.part_b)
|
|
53
|
+
dependents[constraint.part_b].add(constraint.part_a)
|
|
54
|
+
|
|
55
|
+
in_degree = {name: len(dependencies[name]) for name in part_names}
|
|
56
|
+
zero_degree = sorted(
|
|
57
|
+
name for name, degree in in_degree.items() if degree == 0 and name != recipe.anchor
|
|
58
|
+
)
|
|
59
|
+
queue = deque(
|
|
60
|
+
[recipe.anchor, *zero_degree] if in_degree.get(recipe.anchor, 0) == 0 else []
|
|
61
|
+
)
|
|
62
|
+
build_order: list[str] = []
|
|
63
|
+
|
|
64
|
+
while queue:
|
|
65
|
+
current = queue.popleft()
|
|
66
|
+
build_order.append(current)
|
|
67
|
+
for dependent in sorted(dependents.get(current, [])):
|
|
68
|
+
in_degree[dependent] -= 1
|
|
69
|
+
if in_degree[dependent] == 0:
|
|
70
|
+
queue.append(dependent)
|
|
71
|
+
|
|
72
|
+
if len(build_order) < len(part_names):
|
|
73
|
+
return SolveResult(
|
|
74
|
+
success=False,
|
|
75
|
+
error="Circular dependency detected in recipe constraints.",
|
|
76
|
+
build_order=build_order,
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
positions: dict[str, dict[str, list[float]]] = {}
|
|
80
|
+
conflicts: list[dict[str, object]] = []
|
|
81
|
+
|
|
82
|
+
anchor_position = list(recipe.anchor_position)
|
|
83
|
+
anchor_spec = recipe.parts[recipe.anchor]
|
|
84
|
+
positions[recipe.anchor] = _make_position(anchor_position, anchor_spec)
|
|
85
|
+
|
|
86
|
+
if len(part_names) == 1:
|
|
87
|
+
return SolveResult(
|
|
88
|
+
success=True,
|
|
89
|
+
positions=positions,
|
|
90
|
+
build_order=build_order,
|
|
91
|
+
conflicts=conflicts,
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
constraints_by_target: dict[str, list[Constraint]] = {
|
|
95
|
+
name: [] for name in part_names
|
|
96
|
+
}
|
|
97
|
+
for constraint in constraints:
|
|
98
|
+
if constraint.part_a in part_names:
|
|
99
|
+
constraints_by_target[constraint.part_a].append(constraint)
|
|
100
|
+
|
|
101
|
+
for part_name in build_order:
|
|
102
|
+
if part_name == recipe.anchor:
|
|
103
|
+
continue
|
|
104
|
+
part_spec = recipe.parts[part_name]
|
|
105
|
+
axis_set = _solve_part_axes(
|
|
106
|
+
part_name,
|
|
107
|
+
part_spec,
|
|
108
|
+
constraints_by_target.get(part_name, []),
|
|
109
|
+
recipe.parts,
|
|
110
|
+
positions,
|
|
111
|
+
anchor_position,
|
|
112
|
+
)
|
|
113
|
+
positions[part_name] = _make_position(axis_set.position, part_spec)
|
|
114
|
+
conflicts.extend(axis_set.conflicts)
|
|
115
|
+
|
|
116
|
+
for mirror in recipe.mirrors:
|
|
117
|
+
if mirror.source not in positions:
|
|
118
|
+
continue
|
|
119
|
+
mirrored = _apply_mirror(
|
|
120
|
+
mirror, positions[mirror.source]["location"], anchor_position
|
|
121
|
+
)
|
|
122
|
+
positions[mirror.target] = {
|
|
123
|
+
"location": list(mirrored),
|
|
124
|
+
"dimensions": list(positions[mirror.source]["dimensions"]),
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
return SolveResult(
|
|
128
|
+
success=True,
|
|
129
|
+
positions=positions,
|
|
130
|
+
build_order=build_order,
|
|
131
|
+
conflicts=conflicts,
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def _solve_part_axes(
|
|
136
|
+
part_name: str,
|
|
137
|
+
part_spec: PartSpec,
|
|
138
|
+
constraints: Iterable[Constraint],
|
|
139
|
+
parts: dict[str, PartSpec],
|
|
140
|
+
positions: dict[str, dict[str, list[float]]],
|
|
141
|
+
anchor_position: list[float],
|
|
142
|
+
) -> _AxisSetResult:
|
|
143
|
+
axis_values = list(anchor_position)
|
|
144
|
+
axis_locked = [False, False, False]
|
|
145
|
+
conflicts: list[dict[str, object]] = []
|
|
146
|
+
|
|
147
|
+
for constraint in constraints:
|
|
148
|
+
if constraint.part_b not in positions:
|
|
149
|
+
continue
|
|
150
|
+
target_assignments = _resolve_constraint(
|
|
151
|
+
constraint,
|
|
152
|
+
part_spec,
|
|
153
|
+
parts[constraint.part_b],
|
|
154
|
+
positions[constraint.part_b]["location"],
|
|
155
|
+
anchor_position,
|
|
156
|
+
)
|
|
157
|
+
for assignment in target_assignments:
|
|
158
|
+
conflicts = _apply_axis_assignment(
|
|
159
|
+
part_name,
|
|
160
|
+
axis_values,
|
|
161
|
+
axis_locked,
|
|
162
|
+
assignment,
|
|
163
|
+
conflicts,
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
return _AxisSetResult(position=axis_values, conflicts=conflicts)
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def _resolve_constraint(
|
|
170
|
+
constraint: Constraint,
|
|
171
|
+
part_a: PartSpec,
|
|
172
|
+
part_b: PartSpec,
|
|
173
|
+
b_position: list[float],
|
|
174
|
+
anchor_position: list[float],
|
|
175
|
+
) -> list[_AxisAssignment]:
|
|
176
|
+
constraint_type = constraint.type
|
|
177
|
+
axis_index = AXIS_INDEX.get(constraint.axis)
|
|
178
|
+
if axis_index is None:
|
|
179
|
+
return []
|
|
180
|
+
|
|
181
|
+
a_dim = _dimension_for_axis(part_a.dimensions, axis_index)
|
|
182
|
+
b_dim = _dimension_for_axis(part_b.dimensions, axis_index)
|
|
183
|
+
a_half = a_dim / 2.0
|
|
184
|
+
b_half = b_dim / 2.0
|
|
185
|
+
|
|
186
|
+
if constraint_type == "STACKED":
|
|
187
|
+
if constraint.reference == "top":
|
|
188
|
+
value = b_position[axis_index] + b_half + a_half
|
|
189
|
+
else:
|
|
190
|
+
value = b_position[axis_index] - b_half - a_half
|
|
191
|
+
return [_AxisAssignment(axis=axis_index, value=value, constraint=constraint)]
|
|
192
|
+
|
|
193
|
+
if constraint_type == "CENTERED":
|
|
194
|
+
return [
|
|
195
|
+
_AxisAssignment(
|
|
196
|
+
axis=axis_index, value=b_position[axis_index], constraint=constraint
|
|
197
|
+
)
|
|
198
|
+
]
|
|
199
|
+
|
|
200
|
+
if constraint_type == "FLUSH":
|
|
201
|
+
b_face = _face_position(b_position[axis_index], b_half, constraint.face_b)
|
|
202
|
+
a_center = _face_center(b_face, a_half, constraint.face_a)
|
|
203
|
+
return [_AxisAssignment(axis=axis_index, value=a_center, constraint=constraint)]
|
|
204
|
+
|
|
205
|
+
if constraint_type == "OFFSET":
|
|
206
|
+
b_face = _face_position(b_position[axis_index], b_half, constraint.face_b)
|
|
207
|
+
a_center = _face_center(b_face, a_half, constraint.face_a) + constraint.offset
|
|
208
|
+
return [_AxisAssignment(axis=axis_index, value=a_center, constraint=constraint)]
|
|
209
|
+
|
|
210
|
+
if constraint_type == "COAXIAL":
|
|
211
|
+
assignments = []
|
|
212
|
+
for other_axis in range(3):
|
|
213
|
+
if other_axis == axis_index:
|
|
214
|
+
continue
|
|
215
|
+
assignments.append(
|
|
216
|
+
_AxisAssignment(
|
|
217
|
+
axis=other_axis,
|
|
218
|
+
value=b_position[other_axis],
|
|
219
|
+
constraint=constraint,
|
|
220
|
+
)
|
|
221
|
+
)
|
|
222
|
+
return assignments
|
|
223
|
+
|
|
224
|
+
if constraint_type == "INSIDE":
|
|
225
|
+
return [
|
|
226
|
+
_AxisAssignment(
|
|
227
|
+
axis=axis_index, value=b_position[axis_index], constraint=constraint
|
|
228
|
+
)
|
|
229
|
+
]
|
|
230
|
+
|
|
231
|
+
if constraint_type == "ALIGNED":
|
|
232
|
+
return [
|
|
233
|
+
_AxisAssignment(
|
|
234
|
+
axis=axis_index, value=b_position[axis_index], constraint=constraint
|
|
235
|
+
)
|
|
236
|
+
]
|
|
237
|
+
|
|
238
|
+
if constraint_type == "SYMMETRIC":
|
|
239
|
+
mirrored = [b_position[0], b_position[1], b_position[2]]
|
|
240
|
+
mirrored[axis_index] = 2 * anchor_position[axis_index] - b_position[axis_index]
|
|
241
|
+
return [
|
|
242
|
+
_AxisAssignment(axis=0, value=mirrored[0], constraint=constraint),
|
|
243
|
+
_AxisAssignment(axis=1, value=mirrored[1], constraint=constraint),
|
|
244
|
+
_AxisAssignment(axis=2, value=mirrored[2], constraint=constraint),
|
|
245
|
+
]
|
|
246
|
+
|
|
247
|
+
return []
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
def _apply_axis_assignment(
|
|
251
|
+
part_name: str,
|
|
252
|
+
axis_values: list[float],
|
|
253
|
+
axis_locked: list[bool],
|
|
254
|
+
assignment: _AxisAssignment,
|
|
255
|
+
conflicts: list[dict[str, object]],
|
|
256
|
+
) -> list[dict[str, object]]:
|
|
257
|
+
axis = assignment.axis
|
|
258
|
+
if axis_locked[axis]:
|
|
259
|
+
if abs(axis_values[axis] - assignment.value) > TOLERANCE:
|
|
260
|
+
conflicts.append(
|
|
261
|
+
{
|
|
262
|
+
"part": part_name,
|
|
263
|
+
"axis": axis,
|
|
264
|
+
"value": axis_values[axis],
|
|
265
|
+
"conflict": assignment.value,
|
|
266
|
+
"constraint": assignment.constraint,
|
|
267
|
+
}
|
|
268
|
+
)
|
|
269
|
+
return conflicts
|
|
270
|
+
|
|
271
|
+
axis_values[axis] = assignment.value
|
|
272
|
+
axis_locked[axis] = True
|
|
273
|
+
return conflicts
|
|
274
|
+
|
|
275
|
+
|
|
276
|
+
def _dimension_for_axis(dimensions: Dimensions, axis_index: int) -> float:
|
|
277
|
+
return getattr(dimensions, DIMENSION_INDEX[axis_index])
|
|
278
|
+
|
|
279
|
+
|
|
280
|
+
def _face_position(center_value: float, half: float, face: str) -> float:
|
|
281
|
+
if face == "+":
|
|
282
|
+
return center_value + half
|
|
283
|
+
return center_value - half
|
|
284
|
+
|
|
285
|
+
|
|
286
|
+
def _face_center(face_value: float, half: float, face: str) -> float:
|
|
287
|
+
if face == "+":
|
|
288
|
+
return face_value - half
|
|
289
|
+
return face_value + half
|
|
290
|
+
|
|
291
|
+
|
|
292
|
+
def _apply_mirror(
|
|
293
|
+
mirror: Mirror, source_position: list[float], anchor_position: list[float]
|
|
294
|
+
) -> list[float]:
|
|
295
|
+
axis_index = AXIS_INDEX.get(mirror.axis)
|
|
296
|
+
if axis_index is None:
|
|
297
|
+
return list(source_position)
|
|
298
|
+
mirrored = list(source_position)
|
|
299
|
+
mirrored[axis_index] = 2 * anchor_position[axis_index] - source_position[axis_index]
|
|
300
|
+
return mirrored
|
|
301
|
+
|
|
302
|
+
|
|
303
|
+
def _make_position(position: list[float], spec: PartSpec) -> dict[str, list[float]]:
|
|
304
|
+
return {"location": list(position), "dimensions": spec.dimensions.as_list()}
|
cortex/core/validator.py
ADDED
|
@@ -0,0 +1,360 @@
|
|
|
1
|
+
"""Recipe validator for part constraints before solving."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from cortex.types import (
|
|
6
|
+
ConstraintType,
|
|
7
|
+
PartRecipe,
|
|
8
|
+
ValidationError,
|
|
9
|
+
ValidationResult,
|
|
10
|
+
ValidationWarning,
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
_ALLOWED_AXES = {"X", "Y", "Z"}
|
|
14
|
+
_ALLOWED_CONSTRAINTS = {
|
|
15
|
+
ConstraintType.STACKED.value,
|
|
16
|
+
ConstraintType.CENTERED.value,
|
|
17
|
+
ConstraintType.FLUSH.value,
|
|
18
|
+
ConstraintType.OFFSET.value,
|
|
19
|
+
ConstraintType.COAXIAL.value,
|
|
20
|
+
ConstraintType.INSIDE.value,
|
|
21
|
+
ConstraintType.ALIGNED.value,
|
|
22
|
+
ConstraintType.SYMMETRIC.value,
|
|
23
|
+
}
|
|
24
|
+
_STACKED_REFERENCES = {"top", "bottom"}
|
|
25
|
+
_FACE_VALUES = {"+", "-"}
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def validate(recipe: PartRecipe) -> ValidationResult:
|
|
29
|
+
errors: list[ValidationError] = []
|
|
30
|
+
warnings: list[ValidationWarning] = []
|
|
31
|
+
_check_schema_completeness(recipe, errors)
|
|
32
|
+
_check_part_existence(recipe, errors)
|
|
33
|
+
_check_anchor_exists(recipe, errors)
|
|
34
|
+
_check_constraint_validity(recipe, errors)
|
|
35
|
+
_check_no_orphans(recipe, errors)
|
|
36
|
+
_check_circular_dependencies(recipe, errors)
|
|
37
|
+
_check_dimensions(recipe, errors)
|
|
38
|
+
_check_mirror_validity(recipe, errors)
|
|
39
|
+
_check_constraint_conflicts(recipe, warnings)
|
|
40
|
+
_check_type_validation(recipe, errors)
|
|
41
|
+
return ValidationResult(valid=not errors, errors=errors, warnings=warnings)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def _check_schema_completeness(
|
|
45
|
+
recipe: PartRecipe, errors: list[ValidationError]
|
|
46
|
+
) -> None:
|
|
47
|
+
if not getattr(recipe, "name", ""):
|
|
48
|
+
errors.append(
|
|
49
|
+
ValidationError("missing_field", "Recipe name is required.", "name")
|
|
50
|
+
)
|
|
51
|
+
if not getattr(recipe, "anchor", ""):
|
|
52
|
+
errors.append(
|
|
53
|
+
ValidationError("missing_field", "Recipe anchor is required.", "anchor")
|
|
54
|
+
)
|
|
55
|
+
anchor_position = getattr(recipe, "anchor_position", None)
|
|
56
|
+
if not isinstance(anchor_position, list) or len(anchor_position) != 3:
|
|
57
|
+
errors.append(
|
|
58
|
+
ValidationError(
|
|
59
|
+
"missing_field",
|
|
60
|
+
"Anchor position must be a list of three values.",
|
|
61
|
+
"anchor_position",
|
|
62
|
+
)
|
|
63
|
+
)
|
|
64
|
+
parts = getattr(recipe, "parts", None)
|
|
65
|
+
if not isinstance(parts, dict) or not parts:
|
|
66
|
+
errors.append(
|
|
67
|
+
ValidationError(
|
|
68
|
+
"missing_field",
|
|
69
|
+
"Recipe parts must be a non-empty dictionary.",
|
|
70
|
+
"parts",
|
|
71
|
+
)
|
|
72
|
+
)
|
|
73
|
+
constraints = getattr(recipe, "constraints", None)
|
|
74
|
+
if not isinstance(constraints, list):
|
|
75
|
+
errors.append(
|
|
76
|
+
ValidationError(
|
|
77
|
+
"missing_field", "Recipe constraints must be a list.", "constraints"
|
|
78
|
+
)
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def _check_part_existence(recipe: PartRecipe, errors: list[ValidationError]) -> None:
|
|
83
|
+
parts = recipe.parts if isinstance(recipe.parts, dict) else {}
|
|
84
|
+
constraints = recipe.constraints if isinstance(recipe.constraints, list) else []
|
|
85
|
+
for index, constraint in enumerate(constraints):
|
|
86
|
+
if constraint.part_a not in parts:
|
|
87
|
+
errors.append(
|
|
88
|
+
ValidationError(
|
|
89
|
+
"missing_part",
|
|
90
|
+
f"Constraint part_a '{constraint.part_a}' is missing.",
|
|
91
|
+
f"constraints[{index}].part_a",
|
|
92
|
+
)
|
|
93
|
+
)
|
|
94
|
+
if constraint.part_b not in parts:
|
|
95
|
+
errors.append(
|
|
96
|
+
ValidationError(
|
|
97
|
+
"missing_part",
|
|
98
|
+
f"Constraint part_b '{constraint.part_b}' is missing.",
|
|
99
|
+
f"constraints[{index}].part_b",
|
|
100
|
+
)
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def _check_anchor_exists(recipe: PartRecipe, errors: list[ValidationError]) -> None:
|
|
105
|
+
parts = recipe.parts if isinstance(recipe.parts, dict) else {}
|
|
106
|
+
if recipe.anchor and recipe.anchor not in parts:
|
|
107
|
+
errors.append(
|
|
108
|
+
ValidationError(
|
|
109
|
+
"missing_part",
|
|
110
|
+
f"Anchor part '{recipe.anchor}' is missing from parts.",
|
|
111
|
+
"anchor",
|
|
112
|
+
)
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def _check_constraint_validity(
|
|
117
|
+
recipe: PartRecipe, errors: list[ValidationError]
|
|
118
|
+
) -> None:
|
|
119
|
+
constraints = recipe.constraints if isinstance(recipe.constraints, list) else []
|
|
120
|
+
for index, constraint in enumerate(constraints):
|
|
121
|
+
if constraint.type not in _ALLOWED_CONSTRAINTS:
|
|
122
|
+
errors.append(
|
|
123
|
+
ValidationError(
|
|
124
|
+
"invalid_constraint",
|
|
125
|
+
f"Constraint type '{constraint.type}' is not supported.",
|
|
126
|
+
f"constraints[{index}].type",
|
|
127
|
+
)
|
|
128
|
+
)
|
|
129
|
+
if constraint.axis not in _ALLOWED_AXES:
|
|
130
|
+
errors.append(
|
|
131
|
+
ValidationError(
|
|
132
|
+
"invalid_constraint",
|
|
133
|
+
f"Constraint axis '{constraint.axis}' is invalid.",
|
|
134
|
+
f"constraints[{index}].axis",
|
|
135
|
+
)
|
|
136
|
+
)
|
|
137
|
+
if constraint.type == ConstraintType.STACKED.value:
|
|
138
|
+
reference = constraint.reference.lower()
|
|
139
|
+
if reference not in _STACKED_REFERENCES:
|
|
140
|
+
errors.append(
|
|
141
|
+
ValidationError(
|
|
142
|
+
"invalid_constraint",
|
|
143
|
+
"STACKED constraints require reference 'top' or 'bottom'.",
|
|
144
|
+
f"constraints[{index}].reference",
|
|
145
|
+
)
|
|
146
|
+
)
|
|
147
|
+
if constraint.type in {ConstraintType.FLUSH.value, ConstraintType.OFFSET.value}:
|
|
148
|
+
if (
|
|
149
|
+
constraint.face_a not in _FACE_VALUES
|
|
150
|
+
or constraint.face_b not in _FACE_VALUES
|
|
151
|
+
):
|
|
152
|
+
errors.append(
|
|
153
|
+
ValidationError(
|
|
154
|
+
"invalid_constraint",
|
|
155
|
+
"FLUSH/OFFSET constraints require face_a and face_b of '+' or '-'.",
|
|
156
|
+
f"constraints[{index}]",
|
|
157
|
+
)
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
def _check_no_orphans(recipe: PartRecipe, errors: list[ValidationError]) -> None:
|
|
162
|
+
parts = recipe.parts if isinstance(recipe.parts, dict) else {}
|
|
163
|
+
if not parts:
|
|
164
|
+
return
|
|
165
|
+
anchor = recipe.anchor
|
|
166
|
+
constraints = recipe.constraints if isinstance(recipe.constraints, list) else []
|
|
167
|
+
mirrors = recipe.mirrors if isinstance(recipe.mirrors, list) else []
|
|
168
|
+
referenced_parts: set[str] = set()
|
|
169
|
+
for constraint in constraints:
|
|
170
|
+
referenced_parts.add(constraint.part_a)
|
|
171
|
+
referenced_parts.add(constraint.part_b)
|
|
172
|
+
for mirror in mirrors:
|
|
173
|
+
referenced_parts.add(mirror.source)
|
|
174
|
+
referenced_parts.add(mirror.target)
|
|
175
|
+
for part_name in parts:
|
|
176
|
+
if part_name == anchor:
|
|
177
|
+
continue
|
|
178
|
+
if part_name not in referenced_parts:
|
|
179
|
+
errors.append(
|
|
180
|
+
ValidationError(
|
|
181
|
+
"orphan_part",
|
|
182
|
+
f"Part '{part_name}' is not referenced by constraints or mirrors.",
|
|
183
|
+
f"parts.{part_name}",
|
|
184
|
+
)
|
|
185
|
+
)
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
def _check_circular_dependencies(
|
|
189
|
+
recipe: PartRecipe, errors: list[ValidationError]
|
|
190
|
+
) -> None:
|
|
191
|
+
parts = recipe.parts if isinstance(recipe.parts, dict) else {}
|
|
192
|
+
constraints = recipe.constraints if isinstance(recipe.constraints, list) else []
|
|
193
|
+
if not parts:
|
|
194
|
+
return
|
|
195
|
+
adjacency: dict[str, set[str]] = {name: set() for name in parts}
|
|
196
|
+
for constraint in constraints:
|
|
197
|
+
if constraint.part_a in parts and constraint.part_b in parts:
|
|
198
|
+
adjacency[constraint.part_a].add(constraint.part_b)
|
|
199
|
+
visited: set[str] = set()
|
|
200
|
+
visiting: set[str] = set()
|
|
201
|
+
path: list[str] = []
|
|
202
|
+
reported: set[tuple[str, ...]] = set()
|
|
203
|
+
|
|
204
|
+
def dfs(node: str) -> None:
|
|
205
|
+
visiting.add(node)
|
|
206
|
+
path.append(node)
|
|
207
|
+
for neighbor in adjacency.get(node, ()): # pragma: no branch
|
|
208
|
+
if neighbor in visiting:
|
|
209
|
+
cycle = _extract_cycle(path, neighbor)
|
|
210
|
+
cycle_key = tuple(cycle)
|
|
211
|
+
if cycle_key not in reported:
|
|
212
|
+
reported.add(cycle_key)
|
|
213
|
+
errors.append(
|
|
214
|
+
ValidationError(
|
|
215
|
+
"circular_dependency",
|
|
216
|
+
f"Circular dependency detected: {' -> '.join(cycle)}.",
|
|
217
|
+
"constraints",
|
|
218
|
+
)
|
|
219
|
+
)
|
|
220
|
+
continue
|
|
221
|
+
if neighbor in visited:
|
|
222
|
+
continue
|
|
223
|
+
dfs(neighbor)
|
|
224
|
+
visiting.remove(node)
|
|
225
|
+
visited.add(node)
|
|
226
|
+
path.pop()
|
|
227
|
+
|
|
228
|
+
for part in parts:
|
|
229
|
+
if part not in visited:
|
|
230
|
+
dfs(part)
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
def _check_dimensions(recipe: PartRecipe, errors: list[ValidationError]) -> None:
|
|
234
|
+
parts = recipe.parts if isinstance(recipe.parts, dict) else {}
|
|
235
|
+
for part_name, part in parts.items():
|
|
236
|
+
dims = getattr(part, "dimensions", None)
|
|
237
|
+
if dims is None:
|
|
238
|
+
errors.append(
|
|
239
|
+
ValidationError(
|
|
240
|
+
"missing_field",
|
|
241
|
+
f"Part '{part_name}' is missing dimensions.",
|
|
242
|
+
f"parts.{part_name}.dimensions",
|
|
243
|
+
)
|
|
244
|
+
)
|
|
245
|
+
continue
|
|
246
|
+
if not _dimensions_positive(dims):
|
|
247
|
+
errors.append(
|
|
248
|
+
ValidationError(
|
|
249
|
+
"missing_field",
|
|
250
|
+
f"Part '{part_name}' dimensions must be positive.",
|
|
251
|
+
f"parts.{part_name}.dimensions",
|
|
252
|
+
)
|
|
253
|
+
)
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
def _check_mirror_validity(recipe: PartRecipe, errors: list[ValidationError]) -> None:
|
|
257
|
+
parts = recipe.parts if isinstance(recipe.parts, dict) else {}
|
|
258
|
+
mirrors = recipe.mirrors if isinstance(recipe.mirrors, list) else []
|
|
259
|
+
for index, mirror in enumerate(mirrors):
|
|
260
|
+
if mirror.source not in parts:
|
|
261
|
+
errors.append(
|
|
262
|
+
ValidationError(
|
|
263
|
+
"missing_part",
|
|
264
|
+
f"Mirror source '{mirror.source}' is missing.",
|
|
265
|
+
f"mirrors[{index}].source",
|
|
266
|
+
)
|
|
267
|
+
)
|
|
268
|
+
# Mirror targets need not exist in parts — the solver auto-generates
|
|
269
|
+
# them from the source part.
|
|
270
|
+
if mirror.axis not in _ALLOWED_AXES:
|
|
271
|
+
errors.append(
|
|
272
|
+
ValidationError(
|
|
273
|
+
"invalid_constraint",
|
|
274
|
+
f"Mirror axis '{mirror.axis}' is invalid.",
|
|
275
|
+
f"mirrors[{index}].axis",
|
|
276
|
+
)
|
|
277
|
+
)
|
|
278
|
+
if mirror.source == mirror.target and mirror.source:
|
|
279
|
+
errors.append(
|
|
280
|
+
ValidationError(
|
|
281
|
+
"invalid_constraint",
|
|
282
|
+
"Mirror source and target must be different.",
|
|
283
|
+
f"mirrors[{index}]",
|
|
284
|
+
)
|
|
285
|
+
)
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
def _check_constraint_conflicts(
|
|
289
|
+
recipe: PartRecipe, warnings: list[ValidationWarning]
|
|
290
|
+
) -> None:
|
|
291
|
+
constraints = recipe.constraints if isinstance(recipe.constraints, list) else []
|
|
292
|
+
axis_map: dict[tuple[str, str], int] = {}
|
|
293
|
+
for constraint in constraints:
|
|
294
|
+
for part in (constraint.part_a, constraint.part_b):
|
|
295
|
+
key = (part, constraint.axis)
|
|
296
|
+
axis_map[key] = axis_map.get(key, 0) + 1
|
|
297
|
+
for (part, axis), count in axis_map.items():
|
|
298
|
+
if count > 1:
|
|
299
|
+
warnings.append(
|
|
300
|
+
ValidationWarning(
|
|
301
|
+
"constraint_conflict",
|
|
302
|
+
f"Part '{part}' has multiple constraints on axis '{axis}'.",
|
|
303
|
+
)
|
|
304
|
+
)
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
def _check_type_validation(recipe: PartRecipe, errors: list[ValidationError]) -> None:
|
|
308
|
+
anchor_position = recipe.anchor_position
|
|
309
|
+
if (
|
|
310
|
+
not isinstance(anchor_position, list)
|
|
311
|
+
or len(anchor_position) != 3
|
|
312
|
+
or not all(_is_number(value) for value in anchor_position)
|
|
313
|
+
):
|
|
314
|
+
errors.append(
|
|
315
|
+
ValidationError(
|
|
316
|
+
"type_error",
|
|
317
|
+
"Anchor position must be a list of three numbers.",
|
|
318
|
+
"anchor_position",
|
|
319
|
+
)
|
|
320
|
+
)
|
|
321
|
+
constraints = recipe.constraints if isinstance(recipe.constraints, list) else []
|
|
322
|
+
for index, constraint in enumerate(constraints):
|
|
323
|
+
if not _is_number(constraint.offset):
|
|
324
|
+
errors.append(
|
|
325
|
+
ValidationError(
|
|
326
|
+
"type_error",
|
|
327
|
+
"Constraint offset must be numeric.",
|
|
328
|
+
f"constraints[{index}].offset",
|
|
329
|
+
)
|
|
330
|
+
)
|
|
331
|
+
|
|
332
|
+
|
|
333
|
+
def _is_number(value: object) -> bool:
|
|
334
|
+
return isinstance(value, (int, float)) and not isinstance(value, bool)
|
|
335
|
+
|
|
336
|
+
|
|
337
|
+
def _dimensions_positive(dimensions: object) -> bool:
|
|
338
|
+
try:
|
|
339
|
+
return (
|
|
340
|
+
_is_number(dimensions.width)
|
|
341
|
+
and _is_number(dimensions.depth)
|
|
342
|
+
and _is_number(dimensions.height)
|
|
343
|
+
and dimensions.width > 0
|
|
344
|
+
and dimensions.depth > 0
|
|
345
|
+
and dimensions.height > 0
|
|
346
|
+
)
|
|
347
|
+
except AttributeError:
|
|
348
|
+
return False
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
def _extract_cycle(path: list[str], start: str) -> list[str]:
|
|
352
|
+
cycle: list[str] = []
|
|
353
|
+
in_cycle = False
|
|
354
|
+
for node in path:
|
|
355
|
+
if node == start:
|
|
356
|
+
in_cycle = True
|
|
357
|
+
if in_cycle:
|
|
358
|
+
cycle.append(node)
|
|
359
|
+
cycle.append(start)
|
|
360
|
+
return cycle
|