cortex-solver 3.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cortex/__init__.py +5 -0
- cortex/__main__.py +5 -0
- cortex/core/__init__.py +1 -0
- cortex/core/decomposer.py +792 -0
- cortex/core/scene_solver.py +586 -0
- cortex/core/solver.py +304 -0
- cortex/core/validator.py +360 -0
- cortex/core/verifier.py +307 -0
- cortex/server.py +226 -0
- cortex/tools/__init__.py +1 -0
- cortex/tools/decompose.py +24 -0
- cortex/tools/research.py +17 -0
- cortex/tools/solve.py +16 -0
- cortex/tools/solve_scene.py +57 -0
- cortex/tools/validate.py +75 -0
- cortex/tools/verify.py +22 -0
- cortex/types.py +298 -0
- cortex_solver-3.0.0.dist-info/METADATA +151 -0
- cortex_solver-3.0.0.dist-info/RECORD +21 -0
- cortex_solver-3.0.0.dist-info/WHEEL +4 -0
- cortex_solver-3.0.0.dist-info/entry_points.txt +2 -0
cortex/tools/validate.py
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
"""Tool handler for the ``validate`` MCP tool."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from cortex.core.validator import validate
|
|
8
|
+
from cortex.types import (
|
|
9
|
+
Constraint,
|
|
10
|
+
Dimensions,
|
|
11
|
+
Mirror,
|
|
12
|
+
PartRecipe,
|
|
13
|
+
PartSpec,
|
|
14
|
+
_to_dict,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def _parse_recipe(raw: dict[str, Any]) -> PartRecipe:
|
|
19
|
+
"""Convert a raw JSON dict into a typed PartRecipe."""
|
|
20
|
+
parts: dict[str, PartSpec] = {}
|
|
21
|
+
for name, spec in raw.get("parts", {}).items():
|
|
22
|
+
dims = spec.get("dimensions", {})
|
|
23
|
+
parts[name] = PartSpec(
|
|
24
|
+
name=name,
|
|
25
|
+
dimensions=Dimensions(
|
|
26
|
+
width=float(dims.get("w", dims.get("width", 0))),
|
|
27
|
+
depth=float(dims.get("d", dims.get("depth", 0))),
|
|
28
|
+
height=float(dims.get("h", dims.get("height", 0))),
|
|
29
|
+
),
|
|
30
|
+
type=spec.get("type", ""),
|
|
31
|
+
params=spec.get("params", {}),
|
|
32
|
+
metadata=spec.get("metadata", {}),
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
constraints: list[Constraint] = []
|
|
36
|
+
for c in raw.get("constraints", []):
|
|
37
|
+
constraints.append(
|
|
38
|
+
Constraint(
|
|
39
|
+
type=c["type"],
|
|
40
|
+
part_a=c["part_a"],
|
|
41
|
+
part_b=c["part_b"],
|
|
42
|
+
axis=c.get("axis", ""),
|
|
43
|
+
face_a=c.get("face_a", ""),
|
|
44
|
+
face_b=c.get("face_b", ""),
|
|
45
|
+
offset=float(c.get("offset", 0)),
|
|
46
|
+
reference=c.get("reference", ""),
|
|
47
|
+
)
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
mirrors: list[Mirror] = []
|
|
51
|
+
for m in raw.get("mirrors", []):
|
|
52
|
+
mirrors.append(
|
|
53
|
+
Mirror(
|
|
54
|
+
source=m["source"],
|
|
55
|
+
target=m["target"],
|
|
56
|
+
axis=m["axis"],
|
|
57
|
+
)
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
return PartRecipe(
|
|
61
|
+
name=raw.get("name", ""),
|
|
62
|
+
anchor=raw.get("anchor", ""),
|
|
63
|
+
anchor_position=[float(v) for v in raw.get("anchor_position", [0, 0, 0])],
|
|
64
|
+
parts=parts,
|
|
65
|
+
constraints=constraints,
|
|
66
|
+
mirrors=mirrors,
|
|
67
|
+
collections=raw.get("collections", {}),
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def handle_validate(arguments: dict[str, Any]) -> dict[str, Any]:
|
|
72
|
+
"""Parse input and delegate to the validator."""
|
|
73
|
+
recipe = _parse_recipe(arguments["recipe"])
|
|
74
|
+
result = validate(recipe)
|
|
75
|
+
return _to_dict(result)
|
cortex/tools/verify.py
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
"""Tool handler for the ``verify`` MCP tool."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from cortex.core.verifier import verify
|
|
8
|
+
from cortex.types import _to_dict
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def handle_verify(arguments: dict[str, Any]) -> dict[str, Any]:
|
|
12
|
+
"""Parse input and delegate to the verifier."""
|
|
13
|
+
object_name: str = arguments["object_name"]
|
|
14
|
+
object_data: dict = arguments["object_data"]
|
|
15
|
+
expected: dict = arguments["expected"]
|
|
16
|
+
|
|
17
|
+
result = verify(
|
|
18
|
+
object_name=object_name,
|
|
19
|
+
object_data=object_data,
|
|
20
|
+
expected=expected,
|
|
21
|
+
)
|
|
22
|
+
return _to_dict(result)
|
cortex/types.py
ADDED
|
@@ -0,0 +1,298 @@
|
|
|
1
|
+
"""Cortex type definitions — all dataclasses for the MCP build-methodology server."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass, field, fields as _dc_fields
|
|
6
|
+
from enum import Enum
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
# ---------------------------------------------------------------------------
|
|
11
|
+
# Enums
|
|
12
|
+
# ---------------------------------------------------------------------------
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class ConstraintType(str, Enum):
|
|
16
|
+
"""Part-level constraint types."""
|
|
17
|
+
|
|
18
|
+
STACKED = "STACKED"
|
|
19
|
+
CENTERED = "CENTERED"
|
|
20
|
+
FLUSH = "FLUSH"
|
|
21
|
+
OFFSET = "OFFSET"
|
|
22
|
+
COAXIAL = "COAXIAL"
|
|
23
|
+
INSIDE = "INSIDE"
|
|
24
|
+
ALIGNED = "ALIGNED"
|
|
25
|
+
SYMMETRIC = "SYMMETRIC"
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class SceneConstraintType(str, Enum):
|
|
29
|
+
"""Scene-level constraint types."""
|
|
30
|
+
|
|
31
|
+
GRID = "GRID"
|
|
32
|
+
ALONG_PATH = "ALONG_PATH"
|
|
33
|
+
RADIAL = "RADIAL"
|
|
34
|
+
FACING = "FACING"
|
|
35
|
+
DISTANCE = "DISTANCE"
|
|
36
|
+
AGAINST_EDGE = "AGAINST_EDGE"
|
|
37
|
+
RANDOM_SCATTER = "RANDOM_SCATTER"
|
|
38
|
+
STACK_VERTICAL = "STACK_VERTICAL"
|
|
39
|
+
MIRROR_SCENE = "MIRROR_SCENE"
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class Scope(str, Enum):
|
|
43
|
+
OBJECT = "object"
|
|
44
|
+
SCENE = "scene"
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class DetailLevel(str, Enum):
|
|
48
|
+
BASIC = "basic"
|
|
49
|
+
DETAILED = "detailed"
|
|
50
|
+
EXHAUSTIVE = "exhaustive"
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class Severity(str, Enum):
|
|
54
|
+
CRITICAL = "critical"
|
|
55
|
+
WARNING = "warning"
|
|
56
|
+
INFO = "info"
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
# ---------------------------------------------------------------------------
|
|
60
|
+
# Core geometry
|
|
61
|
+
# ---------------------------------------------------------------------------
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
@dataclass(frozen=True, slots=True)
|
|
65
|
+
class Dimensions:
|
|
66
|
+
"""Width (X), depth (Y), height (Z)."""
|
|
67
|
+
|
|
68
|
+
width: float
|
|
69
|
+
depth: float
|
|
70
|
+
height: float
|
|
71
|
+
|
|
72
|
+
def as_list(self) -> list[float]:
|
|
73
|
+
return [self.width, self.depth, self.height]
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
# ---------------------------------------------------------------------------
|
|
77
|
+
# Part recipe types
|
|
78
|
+
# ---------------------------------------------------------------------------
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
@dataclass(slots=True)
|
|
82
|
+
class PartSpec:
|
|
83
|
+
"""Specification for a single part."""
|
|
84
|
+
|
|
85
|
+
name: str
|
|
86
|
+
dimensions: Dimensions
|
|
87
|
+
type: str = ""
|
|
88
|
+
params: dict[str, Any] = field(default_factory=dict)
|
|
89
|
+
metadata: dict[str, Any] = field(default_factory=dict)
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
@dataclass(frozen=True, slots=True)
|
|
93
|
+
class Constraint:
|
|
94
|
+
"""A single constraint between two parts."""
|
|
95
|
+
|
|
96
|
+
type: str
|
|
97
|
+
part_a: str
|
|
98
|
+
part_b: str
|
|
99
|
+
axis: str
|
|
100
|
+
face_a: str = ""
|
|
101
|
+
face_b: str = ""
|
|
102
|
+
offset: float = 0.0
|
|
103
|
+
reference: str = ""
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
@dataclass(frozen=True, slots=True)
|
|
107
|
+
class Mirror:
|
|
108
|
+
"""Mirror definition — clone *source* to *target* across *axis*."""
|
|
109
|
+
|
|
110
|
+
source: str
|
|
111
|
+
target: str
|
|
112
|
+
axis: str
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
@dataclass(slots=True)
|
|
116
|
+
class PartRecipe:
|
|
117
|
+
"""Full recipe describing an object's parts and their relationships."""
|
|
118
|
+
|
|
119
|
+
name: str
|
|
120
|
+
anchor: str
|
|
121
|
+
anchor_position: list[float]
|
|
122
|
+
parts: dict[str, PartSpec]
|
|
123
|
+
constraints: list[Constraint]
|
|
124
|
+
mirrors: list[Mirror] = field(default_factory=list)
|
|
125
|
+
collections: dict[str, list[str]] = field(default_factory=dict)
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
# ---------------------------------------------------------------------------
|
|
129
|
+
# Scene recipe types
|
|
130
|
+
# ---------------------------------------------------------------------------
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
@dataclass(frozen=True, slots=True)
|
|
134
|
+
class SceneConstraint:
|
|
135
|
+
"""A single scene-level constraint."""
|
|
136
|
+
|
|
137
|
+
type: str
|
|
138
|
+
object: str | list[str]
|
|
139
|
+
params: dict[str, Any] = field(default_factory=dict)
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
@dataclass(slots=True)
|
|
143
|
+
class SceneRecipe:
|
|
144
|
+
"""Full recipe describing a scene layout."""
|
|
145
|
+
|
|
146
|
+
name: str
|
|
147
|
+
bounds: dict[str, list[float]]
|
|
148
|
+
objects: dict[str, PartSpec]
|
|
149
|
+
zones: dict[str, Any] = field(default_factory=dict)
|
|
150
|
+
constraints: list[SceneConstraint] = field(default_factory=list)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
# ---------------------------------------------------------------------------
|
|
154
|
+
# Result types
|
|
155
|
+
# ---------------------------------------------------------------------------
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
@dataclass(slots=True)
|
|
159
|
+
class SolveResult:
|
|
160
|
+
"""Result of constraint solving."""
|
|
161
|
+
|
|
162
|
+
success: bool
|
|
163
|
+
positions: dict[str, dict[str, list[float]]] = field(default_factory=dict)
|
|
164
|
+
build_order: list[str] = field(default_factory=list)
|
|
165
|
+
conflicts: list[dict[str, Any]] = field(default_factory=list)
|
|
166
|
+
warnings: list[str] = field(default_factory=list)
|
|
167
|
+
error: str = ""
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
@dataclass(frozen=True, slots=True)
|
|
171
|
+
class VerifyCheck:
|
|
172
|
+
"""Result of a single verification check."""
|
|
173
|
+
|
|
174
|
+
name: str
|
|
175
|
+
passed: bool
|
|
176
|
+
details: str
|
|
177
|
+
severity: str
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
@dataclass(slots=True)
|
|
181
|
+
class VerifyResult:
|
|
182
|
+
"""Aggregated verification result for one object."""
|
|
183
|
+
|
|
184
|
+
object_name: str
|
|
185
|
+
passed: bool
|
|
186
|
+
checks: list[VerifyCheck]
|
|
187
|
+
summary: str
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
# ---------------------------------------------------------------------------
|
|
191
|
+
# Decomposition result types
|
|
192
|
+
# ---------------------------------------------------------------------------
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
@dataclass(slots=True)
|
|
196
|
+
class HierarchyNode:
|
|
197
|
+
"""Single node in a decomposition hierarchy."""
|
|
198
|
+
|
|
199
|
+
name: str
|
|
200
|
+
parent: str | None = None
|
|
201
|
+
connection: str = ""
|
|
202
|
+
children: list[str] = field(default_factory=list)
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
@dataclass(slots=True)
|
|
206
|
+
class ResearchItem:
|
|
207
|
+
"""What research is needed for a specific part."""
|
|
208
|
+
|
|
209
|
+
part: str
|
|
210
|
+
needed: list[str] = field(default_factory=list)
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
@dataclass(slots=True)
|
|
214
|
+
class DecomposeResult:
|
|
215
|
+
"""Full output from the decompose tool."""
|
|
216
|
+
|
|
217
|
+
subject: str
|
|
218
|
+
hierarchy: dict[str, HierarchyNode]
|
|
219
|
+
research_required: list[ResearchItem]
|
|
220
|
+
total_parts: int
|
|
221
|
+
build_order: list[str]
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
@dataclass(slots=True)
|
|
225
|
+
class ResearchWarning:
|
|
226
|
+
"""Warning about a researched value."""
|
|
227
|
+
|
|
228
|
+
part: str
|
|
229
|
+
field: str
|
|
230
|
+
value: Any
|
|
231
|
+
concern: str
|
|
232
|
+
|
|
233
|
+
|
|
234
|
+
@dataclass(slots=True)
|
|
235
|
+
class MissingField:
|
|
236
|
+
"""A missing research field."""
|
|
237
|
+
|
|
238
|
+
part: str
|
|
239
|
+
field: str
|
|
240
|
+
hint: str = ""
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
@dataclass(slots=True)
|
|
244
|
+
class ResearchResult:
|
|
245
|
+
"""Full output from the research tool."""
|
|
246
|
+
|
|
247
|
+
complete: bool
|
|
248
|
+
missing: list[MissingField]
|
|
249
|
+
warnings: list[ResearchWarning]
|
|
250
|
+
ready_for_recipe: bool
|
|
251
|
+
|
|
252
|
+
|
|
253
|
+
@dataclass(slots=True)
|
|
254
|
+
class ValidationError:
|
|
255
|
+
"""A single validation error."""
|
|
256
|
+
|
|
257
|
+
type: str
|
|
258
|
+
message: str
|
|
259
|
+
location: str = ""
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
@dataclass(slots=True)
|
|
263
|
+
class ValidationWarning:
|
|
264
|
+
"""A single validation warning."""
|
|
265
|
+
|
|
266
|
+
type: str
|
|
267
|
+
message: str
|
|
268
|
+
|
|
269
|
+
|
|
270
|
+
@dataclass(slots=True)
|
|
271
|
+
class ValidationResult:
|
|
272
|
+
"""Full output from the validate tool."""
|
|
273
|
+
|
|
274
|
+
valid: bool
|
|
275
|
+
errors: list[ValidationError]
|
|
276
|
+
warnings: list[ValidationWarning]
|
|
277
|
+
|
|
278
|
+
|
|
279
|
+
# ---------------------------------------------------------------------------
|
|
280
|
+
# Serialisation helpers
|
|
281
|
+
# ---------------------------------------------------------------------------
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
def _to_dict(obj: Any) -> Any:
|
|
285
|
+
"""Recursively convert dataclasses / enums to plain dicts for JSON."""
|
|
286
|
+
if isinstance(obj, dict):
|
|
287
|
+
return {k: _to_dict(v) for k, v in obj.items()}
|
|
288
|
+
if isinstance(obj, (list, tuple)):
|
|
289
|
+
return [_to_dict(v) for v in obj]
|
|
290
|
+
if isinstance(obj, Enum):
|
|
291
|
+
return obj.value
|
|
292
|
+
if hasattr(obj, "__dataclass_fields__"):
|
|
293
|
+
return {
|
|
294
|
+
f.name: _to_dict(getattr(obj, f.name))
|
|
295
|
+
for f in _dc_fields(obj)
|
|
296
|
+
if not f.name.startswith("_")
|
|
297
|
+
}
|
|
298
|
+
return obj
|
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: cortex-solver
|
|
3
|
+
Version: 3.0.0
|
|
4
|
+
Summary: MCP server that enforces build methodology for LLMs
|
|
5
|
+
Project-URL: Homepage, https://github.com/cortex-solver/cortex
|
|
6
|
+
License-Expression: MIT
|
|
7
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
8
|
+
Classifier: Operating System :: OS Independent
|
|
9
|
+
Classifier: Programming Language :: Python :: 3
|
|
10
|
+
Requires-Python: >=3.11
|
|
11
|
+
Requires-Dist: mcp
|
|
12
|
+
Provides-Extra: dev
|
|
13
|
+
Requires-Dist: pytest; extra == 'dev'
|
|
14
|
+
Description-Content-Type: text/markdown
|
|
15
|
+
|
|
16
|
+
# cortex-solver
|
|
17
|
+
|
|
18
|
+
cortex-solver is an MCP server that enforces a rigorous build methodology for LLMs constructing 3D objects and scenes. It provides a structured pipeline to decompose subjects, validate research, solve geometric constraints, and verify the final output.
|
|
19
|
+
|
|
20
|
+
## Installation
|
|
21
|
+
|
|
22
|
+
Run directly using uvx:
|
|
23
|
+
|
|
24
|
+
```bash
|
|
25
|
+
uvx cortex-solver
|
|
26
|
+
```
|
|
27
|
+
|
|
28
|
+
Or install permanently:
|
|
29
|
+
|
|
30
|
+
```bash
|
|
31
|
+
uv pip install cortex-solver
|
|
32
|
+
```
|
|
33
|
+
|
|
34
|
+
## MCP Configuration
|
|
35
|
+
|
|
36
|
+
To use cortex-solver with an MCP client, add it to your configuration file:
|
|
37
|
+
|
|
38
|
+
```json
|
|
39
|
+
{
|
|
40
|
+
"mcpServers": {
|
|
41
|
+
"cortex": {
|
|
42
|
+
"command": "uvx",
|
|
43
|
+
"args": ["cortex-solver"]
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
```
|
|
48
|
+
|
|
49
|
+
## Build Pipeline
|
|
50
|
+
|
|
51
|
+
The server enforces a linear workflow for constructing 3D objects:
|
|
52
|
+
|
|
53
|
+
1. Decompose: Break the subject into a part hierarchy.
|
|
54
|
+
2. Research: Validate that all necessary research data is present.
|
|
55
|
+
3. Validate: Check the structural integrity and logic of the recipe.
|
|
56
|
+
4. Solve: Compute precise 3D positions based on constraints.
|
|
57
|
+
5. Verify: Confirm the built object matches the solver output.
|
|
58
|
+
6. Solve Scene: Arrange multiple objects within a larger scene.
|
|
59
|
+
|
|
60
|
+
## Tools
|
|
61
|
+
|
|
62
|
+
### decompose
|
|
63
|
+
Breaks a subject into a hierarchical part/object tree and generates research checklists.
|
|
64
|
+
- subject (string): The name of the object to decompose.
|
|
65
|
+
- variant (string, optional): Specific version or style of the subject.
|
|
66
|
+
- scope (string, optional): Scope of the decomposition.
|
|
67
|
+
- detail_level (string, optional): Level of granularity for the hierarchy.
|
|
68
|
+
|
|
69
|
+
### research
|
|
70
|
+
Validates the completeness of research data and identifies missing information.
|
|
71
|
+
- hierarchy (object): The part hierarchy from the decompose tool.
|
|
72
|
+
- filled_data (object, optional): The current research data to validate.
|
|
73
|
+
|
|
74
|
+
### validate
|
|
75
|
+
Checks the recipe schema, references, cycles, and constraint validity.
|
|
76
|
+
- recipe (object): The complete build recipe for the object.
|
|
77
|
+
|
|
78
|
+
### solve
|
|
79
|
+
Computes part positions via constraint resolution. Returns positions, build order, and any conflicts.
|
|
80
|
+
- recipe (object): The validated build recipe.
|
|
81
|
+
|
|
82
|
+
### solve_scene
|
|
83
|
+
Computes placements for multiple objects in a scene. Returns positions, rotations, and scales.
|
|
84
|
+
- scene_recipe (object): The recipe defining the scene and its constraints.
|
|
85
|
+
|
|
86
|
+
### verify
|
|
87
|
+
Verifies a built object against the solver output, checking mesh health, transforms, and dimensions.
|
|
88
|
+
- object_name (string): Name of the object to verify.
|
|
89
|
+
- object_data (object): Data from the constructed object.
|
|
90
|
+
- expected (object): The expected values from the solver.
|
|
91
|
+
|
|
92
|
+
## Constraints
|
|
93
|
+
|
|
94
|
+
### Part Constraints
|
|
95
|
+
Used during the object build phase:
|
|
96
|
+
- STACKED
|
|
97
|
+
- CENTERED
|
|
98
|
+
- FLUSH
|
|
99
|
+
- OFFSET
|
|
100
|
+
- COAXIAL
|
|
101
|
+
- INSIDE
|
|
102
|
+
- ALIGNED
|
|
103
|
+
- SYMMETRIC
|
|
104
|
+
|
|
105
|
+
### Scene Constraints
|
|
106
|
+
Used during the scene arrangement phase:
|
|
107
|
+
- GRID
|
|
108
|
+
- ALONG_PATH
|
|
109
|
+
- RADIAL
|
|
110
|
+
- FACING
|
|
111
|
+
- DISTANCE
|
|
112
|
+
- AGAINST_EDGE
|
|
113
|
+
- RANDOM_SCATTER
|
|
114
|
+
- STACK_VERTICAL
|
|
115
|
+
- MIRROR_SCENE
|
|
116
|
+
|
|
117
|
+
## Project Structure
|
|
118
|
+
|
|
119
|
+
```
|
|
120
|
+
cortex/
|
|
121
|
+
├── __init__.py
|
|
122
|
+
├── __main__.py
|
|
123
|
+
├── server.py
|
|
124
|
+
├── types.py
|
|
125
|
+
├── core/
|
|
126
|
+
│ ├── solver.py
|
|
127
|
+
│ ├── scene_solver.py
|
|
128
|
+
│ ├── validator.py
|
|
129
|
+
│ ├── verifier.py
|
|
130
|
+
│ └── decomposer.py
|
|
131
|
+
└── tools/
|
|
132
|
+
├── decompose.py
|
|
133
|
+
├── research.py
|
|
134
|
+
├── validate.py
|
|
135
|
+
├── solve.py
|
|
136
|
+
├── solve_scene.py
|
|
137
|
+
└── verify.py
|
|
138
|
+
```
|
|
139
|
+
|
|
140
|
+
## Development
|
|
141
|
+
|
|
142
|
+
Set up the environment and run tests:
|
|
143
|
+
|
|
144
|
+
```bash
|
|
145
|
+
uv sync --extra dev
|
|
146
|
+
uv run pytest
|
|
147
|
+
```
|
|
148
|
+
|
|
149
|
+
## License
|
|
150
|
+
|
|
151
|
+
MIT
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
cortex/__init__.py,sha256=nEbnd_yPvQp6Mgt16N3_1BX4ZCOVkG-qUSIvD8y5DTY,99
|
|
2
|
+
cortex/__main__.py,sha256=Y6wNGY4NPx70Xw8JSO3vAsnj6qbSox0iBdvKXlwwk9w,85
|
|
3
|
+
cortex/server.py,sha256=LJcXxqVxUOElLzBtrymq9NF58eTNi5FDpfSczqM3aSg,7169
|
|
4
|
+
cortex/types.py,sha256=THPjCYk2h6ALRd8Go6XB3mJpfglK5h7QZIlzWRARE_E,7021
|
|
5
|
+
cortex/core/__init__.py,sha256=qttevc6amsmADPmQHQw8r_nWWsdpgXaD2uxJKYNjUZ8,38
|
|
6
|
+
cortex/core/decomposer.py,sha256=WEhkDgDyn4Yw9U7_t0Kl6KxlRRJ51gdyWrc5DAcy7oc,31985
|
|
7
|
+
cortex/core/scene_solver.py,sha256=CGs-vBotp609o2QBdv71njmCJI-naTUWNUTYHoXSmPU,18009
|
|
8
|
+
cortex/core/solver.py,sha256=ZBxA-PTNLCqb7_pFjHpAiaekQ2ibRpzByNAYj9XqSJs,9555
|
|
9
|
+
cortex/core/validator.py,sha256=Q0-IDQHAr6DfnYmAtoyCgYEOCi0MZo-sSqJwRyHuJts,12907
|
|
10
|
+
cortex/core/verifier.py,sha256=nTpUTJl82O4Ykm_Z59sg3yhJfin_ivcMwUhwK56Wjw8,9943
|
|
11
|
+
cortex/tools/__init__.py,sha256=16IN7VHvXL3Ityxe9rLnaldLd9qI3ccYGGf1fyG-fJM,36
|
|
12
|
+
cortex/tools/decompose.py,sha256=_ImuASejK2PKdQVACbkUbEQbBrSEHRzEoqI5_6BCugc,683
|
|
13
|
+
cortex/tools/research.py,sha256=3vayN8JqQK8MDkHlIirip9zq10qn0Ms7E66PJkh2my4,519
|
|
14
|
+
cortex/tools/solve.py,sha256=4mx2gKk7VcwvEXYyQbcn8be7xz0OkFLy3-35pP-DmsA,445
|
|
15
|
+
cortex/tools/solve_scene.py,sha256=h6V15Cv5Gp9rrMiuXg-coHEg1kcgMLofwE1ABZoNM2M,1764
|
|
16
|
+
cortex/tools/validate.py,sha256=V7nuJKMPE_Lfw5c0m3X7oos72EX4mdLkYF5D2CcWnTM,2229
|
|
17
|
+
cortex/tools/verify.py,sha256=UZ_tpz3apgFRwkgUTqZRj7lhxvNnCCCvs8-fV44JXLk,590
|
|
18
|
+
cortex_solver-3.0.0.dist-info/METADATA,sha256=qN6JcVbTqJSZcmJH3m9oflrcC7oPV7l8_3kPvhi8sLM,3873
|
|
19
|
+
cortex_solver-3.0.0.dist-info/WHEEL,sha256=QccIxa26bgl1E6uMy58deGWi-0aeIkkangHcxk2kWfw,87
|
|
20
|
+
cortex_solver-3.0.0.dist-info/entry_points.txt,sha256=VJ_FWlfV87S0FVdYs11osG4CZsLYfCbUpHrGkkyJLZg,39
|
|
21
|
+
cortex_solver-3.0.0.dist-info/RECORD,,
|