luminarycloud 0.15.1__py3-none-any.whl → 0.15.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- luminarycloud/_client/client.py +3 -0
- luminarycloud/_helpers/_code_representation.py +44 -19
- luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2.py +81 -81
- luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2.pyi +7 -1
- luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2.py +61 -0
- luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2.pyi +76 -0
- luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2_grpc.py +67 -0
- luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2_grpc.pyi +26 -0
- luminarycloud/_proto/api/v0/luminarycloud/mesh/mesh_pb2.py +29 -27
- luminarycloud/_proto/api/v0/luminarycloud/mesh/mesh_pb2.pyi +5 -1
- luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2.py +46 -46
- luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2.pyi +7 -1
- luminarycloud/_proto/assistant/assistant_pb2.py +23 -23
- luminarycloud/_proto/assistant/assistant_pb2.pyi +21 -11
- luminarycloud/_proto/assistant/assistant_pb2_grpc.py +13 -13
- luminarycloud/_proto/assistant/assistant_pb2_grpc.pyi +6 -6
- luminarycloud/_proto/client/simulation_pb2.py +333 -324
- luminarycloud/_proto/client/simulation_pb2.pyi +26 -1
- luminarycloud/_proto/geometry/geometry_pb2.py +69 -61
- luminarycloud/_proto/geometry/geometry_pb2.pyi +30 -3
- luminarycloud/_proto/hexmesh/hexmesh_pb2.py +41 -37
- luminarycloud/_proto/hexmesh/hexmesh_pb2.pyi +18 -8
- luminarycloud/_proto/inferenceservice/inferenceservice_pb2.py +69 -0
- luminarycloud/_proto/inferenceservice/inferenceservice_pb2.pyi +80 -0
- luminarycloud/_proto/luminarycloud/luminarycloud_api.pb +0 -0
- luminarycloud/_proto/named_variable_set/named_variable_set_pb2.py +49 -0
- luminarycloud/_proto/named_variable_set/named_variable_set_pb2.pyi +53 -0
- luminarycloud/_proto/quantity/quantity_pb2.py +8 -5
- luminarycloud/_proto/quantity/quantity_pb2.pyi +2 -0
- luminarycloud/enum/__init__.py +3 -0
- luminarycloud/meshing/mesh_generation_params.py +6 -5
- luminarycloud/meshing/sizing_strategy/sizing_strategies.py +2 -1
- luminarycloud/named_variable_set.py +3 -1
- luminarycloud/physics_ai/inference.py +55 -0
- luminarycloud/pipeline_util/dictable.py +27 -0
- luminarycloud/pipeline_util/yaml.py +55 -0
- luminarycloud/pipelines/__init__.py +29 -0
- luminarycloud/pipelines/core.py +225 -0
- luminarycloud/pipelines/operators.py +197 -0
- luminarycloud/pipelines/parameters.py +42 -0
- luminarycloud/project.py +6 -6
- luminarycloud/simulation.py +35 -4
- luminarycloud/simulation_param.py +16 -12
- luminarycloud/simulation_template.py +10 -6
- luminarycloud/types/vector3.py +2 -1
- luminarycloud/vis/__init__.py +0 -3
- luminarycloud/vis/display.py +3 -2
- luminarycloud/vis/filters.py +1 -2
- luminarycloud/vis/interactive_scene.py +1 -1
- luminarycloud/vis/visualization.py +17 -1
- {luminarycloud-0.15.1.dist-info → luminarycloud-0.15.3.dist-info}/METADATA +2 -1
- {luminarycloud-0.15.1.dist-info → luminarycloud-0.15.3.dist-info}/RECORD +53 -37
- {luminarycloud-0.15.1.dist-info → luminarycloud-0.15.3.dist-info}/WHEEL +0 -0
|
@@ -16,10 +16,11 @@ from ..params.geometry import (
|
|
|
16
16
|
)
|
|
17
17
|
from ..types import Vector3
|
|
18
18
|
from .sizing_strategy import MaxCount, Minimal, MinimalCount, SizingStrategy, TargetCount
|
|
19
|
+
from ..pipeline_util.dictable import PipelineDictable
|
|
19
20
|
|
|
20
21
|
|
|
21
22
|
@dataclass(kw_only=True)
|
|
22
|
-
class VolumeMeshingParams:
|
|
23
|
+
class VolumeMeshingParams(PipelineDictable):
|
|
23
24
|
"""Volume meshing parameters."""
|
|
24
25
|
|
|
25
26
|
volumes: list[Volume]
|
|
@@ -38,7 +39,7 @@ class VolumeMeshingParams:
|
|
|
38
39
|
|
|
39
40
|
|
|
40
41
|
@dataclass(kw_only=True)
|
|
41
|
-
class ModelMeshingParams:
|
|
42
|
+
class ModelMeshingParams(PipelineDictable):
|
|
42
43
|
"""Model meshing parameters."""
|
|
43
44
|
|
|
44
45
|
surfaces: Sequence[Surface | str]
|
|
@@ -61,7 +62,7 @@ class ModelMeshingParams:
|
|
|
61
62
|
|
|
62
63
|
|
|
63
64
|
@dataclass(kw_only=True)
|
|
64
|
-
class BoundaryLayerParams:
|
|
65
|
+
class BoundaryLayerParams(PipelineDictable):
|
|
65
66
|
"""Boundary layer meshing parameters."""
|
|
66
67
|
|
|
67
68
|
surfaces: Sequence[Surface | str]
|
|
@@ -87,7 +88,7 @@ class BoundaryLayerParams:
|
|
|
87
88
|
|
|
88
89
|
|
|
89
90
|
@dataclass(kw_only=True)
|
|
90
|
-
class RefinementRegion:
|
|
91
|
+
class RefinementRegion(PipelineDictable):
|
|
91
92
|
"""Refinement region parameters."""
|
|
92
93
|
|
|
93
94
|
name: str
|
|
@@ -137,7 +138,7 @@ class RefinementRegion:
|
|
|
137
138
|
|
|
138
139
|
|
|
139
140
|
@dataclass(kw_only=True)
|
|
140
|
-
class MeshGenerationParams:
|
|
141
|
+
class MeshGenerationParams(PipelineDictable):
|
|
141
142
|
"""Mesh generation parameters."""
|
|
142
143
|
|
|
143
144
|
geometry_id: str
|
|
@@ -1,10 +1,11 @@
|
|
|
1
1
|
from dataclasses import dataclass
|
|
2
2
|
|
|
3
3
|
from luminarycloud._helpers.warnings.deprecated import deprecated
|
|
4
|
+
from ...pipeline_util.dictable import PipelineDictable
|
|
4
5
|
|
|
5
6
|
|
|
6
7
|
@dataclass
|
|
7
|
-
class SizingStrategy:
|
|
8
|
+
class SizingStrategy(PipelineDictable):
|
|
8
9
|
"""Sizing strategy parameters."""
|
|
9
10
|
|
|
10
11
|
pass
|
|
@@ -32,6 +32,8 @@ class NamedVariableSet(ProtoWrapperBase):
|
|
|
32
32
|
Time the named variable set was created.
|
|
33
33
|
update_time : datetime
|
|
34
34
|
Time the named variable set was last updated.
|
|
35
|
+
version_id : str
|
|
36
|
+
ID of the current (latest) version of the named variable set.
|
|
35
37
|
|
|
36
38
|
Examples
|
|
37
39
|
--------
|
|
@@ -45,7 +47,7 @@ class NamedVariableSet(ProtoWrapperBase):
|
|
|
45
47
|
id: NamedVariableSetID
|
|
46
48
|
"Named variable set ID."
|
|
47
49
|
project_id: ProjectID
|
|
48
|
-
"ID of the project containing this
|
|
50
|
+
"ID of the project containing this named variable set."
|
|
49
51
|
|
|
50
52
|
_proto: namedvariablepb.NamedVariableSet
|
|
51
53
|
_named_variables: dict[str, float | str] | None
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
# File: python/sdk/luminarycloud/inference/inference.py
|
|
2
|
+
# Copyright 2025 Luminary Cloud, Inc. All Rights Reserved.
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from typing import Any
|
|
7
|
+
from json import loads as json_loads
|
|
8
|
+
|
|
9
|
+
from .._client import get_default_client
|
|
10
|
+
from .._helpers._timestamp_to_datetime import timestamp_to_datetime
|
|
11
|
+
from .._proto.api.v0.luminarycloud.inference import inference_pb2 as inferencepb
|
|
12
|
+
from .._proto.inferenceservice import inferenceservice_pb2 as inferenceservicepb
|
|
13
|
+
from .._wrapper import ProtoWrapper, ProtoWrapperBase
|
|
14
|
+
from .._helpers.warnings import experimental
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@experimental
|
|
18
|
+
def start_inference_job(
|
|
19
|
+
stl_url: str,
|
|
20
|
+
model_url: str,
|
|
21
|
+
config_name: str,
|
|
22
|
+
stencil_size: int,
|
|
23
|
+
) -> dict[str, Any]:
|
|
24
|
+
"""Creates an inference service job.
|
|
25
|
+
Parameters
|
|
26
|
+
----------
|
|
27
|
+
stl_url : str
|
|
28
|
+
URL of the STL file to be used for inference.
|
|
29
|
+
model_url : str
|
|
30
|
+
URL of the model to be used for inference.
|
|
31
|
+
config_name :str
|
|
32
|
+
Name of the configuration to be used for inference.
|
|
33
|
+
stencil_size :int
|
|
34
|
+
Size of the stencil to be used for inference.
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
Returns
|
|
38
|
+
dict[str, Any]
|
|
39
|
+
Response from the server as key-value pairs.
|
|
40
|
+
|
|
41
|
+
warning:: This feature is experimental and may change or be removed without notice.
|
|
42
|
+
"""
|
|
43
|
+
|
|
44
|
+
req = inferencepb.CreateInferenceServiceJobRequest(
|
|
45
|
+
stl_url=stl_url,
|
|
46
|
+
model_url=model_url,
|
|
47
|
+
config_name=config_name,
|
|
48
|
+
stencil_size=stencil_size,
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
res: inferencepb.CreateInferenceServiceJobResponse = (
|
|
52
|
+
get_default_client().CreateInferenceServiceJob(req)
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
return json_loads(str(res.response, encoding="utf-8"))
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
from dataclasses import fields, is_dataclass
|
|
2
|
+
from typing import TYPE_CHECKING
|
|
3
|
+
|
|
4
|
+
if TYPE_CHECKING:
|
|
5
|
+
from luminarycloud.pipelines.parameters import PipelineParameter
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class PipelineDictable:
|
|
9
|
+
"""
|
|
10
|
+
A mixin for dataclasses that can contain PipelineParameters and/or other PipelineDictables
|
|
11
|
+
(i.e. it's recursive). Used to construct a dictionary that can be serialized to YAML for a
|
|
12
|
+
Pipeline definition, and collects all PipelineParameters encountered along the way.
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
def _to_pipeline_dict(self) -> tuple[dict, list["PipelineParameter"]]:
|
|
16
|
+
if not is_dataclass(self):
|
|
17
|
+
raise ValueError("PipelineDictable can only be used on dataclasses")
|
|
18
|
+
result = {}
|
|
19
|
+
params = []
|
|
20
|
+
for field in fields(self):
|
|
21
|
+
value = getattr(self, field.name)
|
|
22
|
+
if hasattr(value, "_to_pipeline_dict"):
|
|
23
|
+
result[field.name], downstream_params = value._to_pipeline_dict()
|
|
24
|
+
params.extend(downstream_params)
|
|
25
|
+
else:
|
|
26
|
+
result[field.name] = value
|
|
27
|
+
return result, params
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import Any
|
|
3
|
+
import yaml
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
@dataclass
|
|
7
|
+
class YamlProblem:
|
|
8
|
+
path: str
|
|
9
|
+
problem: str
|
|
10
|
+
|
|
11
|
+
def __str__(self) -> str:
|
|
12
|
+
return f"{self.path}: {self.problem}"
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def find_yaml_problems(
|
|
16
|
+
data: Any, path: str = "", problems: list[YamlProblem] | None = None
|
|
17
|
+
) -> list[YamlProblem]:
|
|
18
|
+
"""
|
|
19
|
+
Find any problems with the given data that would prevent it from being serialized to "standard"
|
|
20
|
+
YAML, i.e. it's all dicts, lists, and primitives.
|
|
21
|
+
|
|
22
|
+
Returns a list of YamlProblem instances, which will be empty if there are no problems.
|
|
23
|
+
"""
|
|
24
|
+
if problems is None:
|
|
25
|
+
problems = []
|
|
26
|
+
if isinstance(data, (str, int, float, bool, type(None))):
|
|
27
|
+
return problems
|
|
28
|
+
elif isinstance(data, list):
|
|
29
|
+
for i, item in enumerate(data):
|
|
30
|
+
find_yaml_problems(item, f"{path}[{i}]", problems)
|
|
31
|
+
elif isinstance(data, dict):
|
|
32
|
+
for k, v in data.items():
|
|
33
|
+
if not isinstance(k, str):
|
|
34
|
+
problems.append(YamlProblem(path, f"Invalid dict key: {type(k).__name__}"))
|
|
35
|
+
return problems
|
|
36
|
+
find_yaml_problems(v, f"{path}.{k}", problems)
|
|
37
|
+
else:
|
|
38
|
+
problems.append(YamlProblem(path, f"Invalid type: {type(data).__name__}"))
|
|
39
|
+
return problems
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def ensure_yamlizable(data: Any, data_description: str) -> None:
|
|
43
|
+
"""
|
|
44
|
+
Ensure that the given data is serializable to YAML without any non-standard tags. I.e. it's all
|
|
45
|
+
dicts, lists, and primitives.
|
|
46
|
+
|
|
47
|
+
Raises a TypeError with a very descriptive error message if the data is not serializable.
|
|
48
|
+
"""
|
|
49
|
+
problems = find_yaml_problems(data)
|
|
50
|
+
if problems:
|
|
51
|
+
bad_yaml = yaml.dump(data)
|
|
52
|
+
problems_str = " - " + "\n - ".join(str(p) for p in problems)
|
|
53
|
+
raise TypeError(
|
|
54
|
+
f"Failed to serialize {data_description} to safe YAML:\n\n{bad_yaml}\nProblems:\n{problems_str}"
|
|
55
|
+
)
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
# Copyright 2025 Luminary Cloud, Inc. All Rights Reserved.
|
|
2
|
+
from .core import (
|
|
3
|
+
Pipeline as Pipeline,
|
|
4
|
+
)
|
|
5
|
+
|
|
6
|
+
from .parameters import (
|
|
7
|
+
StringPipelineParameter as StringPipelineParameter,
|
|
8
|
+
IntPipelineParameter as IntPipelineParameter,
|
|
9
|
+
FloatPipelineParameter as FloatPipelineParameter,
|
|
10
|
+
BoolPipelineParameter as BoolPipelineParameter,
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
from .operators import (
|
|
14
|
+
# Operator base class, mainly exported for testing
|
|
15
|
+
Operator as Operator,
|
|
16
|
+
# PipelineOutputs, i.e. things that "flow" in a Pipeline
|
|
17
|
+
PipelineOutputGeometry as PipelineOutputGeometry,
|
|
18
|
+
PipelineOutputMesh as PipelineOutputMesh,
|
|
19
|
+
PipelineOutputSimulation as PipelineOutputSimulation,
|
|
20
|
+
# Concrete operators and their output types
|
|
21
|
+
ReadGeometry as ReadGeometry,
|
|
22
|
+
ReadGeometryOutputs as ReadGeometryOutputs,
|
|
23
|
+
ModifyGeometry as ModifyGeometry,
|
|
24
|
+
ModifyGeometryOutputs as ModifyGeometryOutputs,
|
|
25
|
+
Mesh as Mesh,
|
|
26
|
+
MeshOutputs as MeshOutputs,
|
|
27
|
+
Simulate as Simulate,
|
|
28
|
+
SimulateOutputs as SimulateOutputs,
|
|
29
|
+
)
|
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
# Copyright 2025 Luminary Cloud, Inc. All Rights Reserved.
|
|
2
|
+
from abc import ABC, abstractmethod
|
|
3
|
+
from dataclasses import is_dataclass, fields
|
|
4
|
+
from typing import Type, TypeVar, Generic
|
|
5
|
+
import re
|
|
6
|
+
import yaml
|
|
7
|
+
|
|
8
|
+
from .._helpers.warnings import experimental
|
|
9
|
+
from ..pipeline_util.yaml import ensure_yamlizable
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class PipelineParameter(ABC):
|
|
13
|
+
"""
|
|
14
|
+
Base class for all concrete PipelineParameters.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
def __init__(self, name: str):
|
|
18
|
+
self.name = name
|
|
19
|
+
self._validate()
|
|
20
|
+
|
|
21
|
+
@property
|
|
22
|
+
def type(self) -> str:
|
|
23
|
+
return self._type()
|
|
24
|
+
|
|
25
|
+
@abstractmethod
|
|
26
|
+
def _type(self) -> str:
|
|
27
|
+
pass
|
|
28
|
+
|
|
29
|
+
def _validate(self) -> None:
|
|
30
|
+
if not re.match(r"^[a-zA-Z0-9_-]+$", self.name):
|
|
31
|
+
raise ValueError(
|
|
32
|
+
"name must only contain alphanumeric characters, underscores and hyphens"
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
def _add_to_params(self, params: dict) -> None:
|
|
36
|
+
if self.name in params and params[self.name]["type"] != self.type:
|
|
37
|
+
raise ValueError(
|
|
38
|
+
f"Parameter name {self.name} used with multiple types: {params[self.name]['type']} != {self.type}"
|
|
39
|
+
)
|
|
40
|
+
params[self.name] = {"type": self.type}
|
|
41
|
+
|
|
42
|
+
def _to_pipeline_dict(self) -> tuple[dict, list["PipelineParameter"]]:
|
|
43
|
+
return {"$pipeline_param": self.name}, [self]
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class PipelineInput:
|
|
47
|
+
"""
|
|
48
|
+
A named input for an Operator instance (i.e. a Task). Explicitly connected to a PipelineOutput.
|
|
49
|
+
"""
|
|
50
|
+
|
|
51
|
+
def __init__(self, upstream_output: "PipelineOutput", owner: "Operator", name: str):
|
|
52
|
+
self.upstream_output = upstream_output
|
|
53
|
+
self.owner = owner
|
|
54
|
+
self.name = name
|
|
55
|
+
|
|
56
|
+
def _to_dict(self, id_for_task: dict) -> dict:
|
|
57
|
+
if self.upstream_output.owner not in id_for_task:
|
|
58
|
+
raise ValueError(
|
|
59
|
+
f"Task {self.owner} depends on a task, {self.upstream_output.owner}, that isn't in the Pipeline. Did you forget to add it?"
|
|
60
|
+
)
|
|
61
|
+
upstream_task_id = id_for_task[self.upstream_output.owner]
|
|
62
|
+
upstream_output_name = self.upstream_output.name
|
|
63
|
+
return {self.name: f"{upstream_task_id}.{upstream_output_name}"}
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class PipelineOutput(ABC):
|
|
67
|
+
"""
|
|
68
|
+
A named output for an Operator instance (i.e. a Task). Can be used to spawn any number of
|
|
69
|
+
connected PipelineInputs.
|
|
70
|
+
"""
|
|
71
|
+
|
|
72
|
+
def __init__(self, owner: "Operator", name: str):
|
|
73
|
+
self.owner = owner
|
|
74
|
+
self.name = name
|
|
75
|
+
self.downstream_inputs: list[PipelineInput] = []
|
|
76
|
+
|
|
77
|
+
def _spawn_input(self, owner: "Operator", name: str) -> PipelineInput:
|
|
78
|
+
input = PipelineInput(self, owner, name)
|
|
79
|
+
self.downstream_inputs.append(input)
|
|
80
|
+
return input
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class OperatorInputs:
|
|
84
|
+
"""
|
|
85
|
+
A collection of all PipelineInputs for an Operator instance (i.e. a Task).
|
|
86
|
+
"""
|
|
87
|
+
|
|
88
|
+
def __init__(
|
|
89
|
+
self, owner: "Operator", **input_descriptors: tuple[Type[PipelineOutput], PipelineOutput]
|
|
90
|
+
):
|
|
91
|
+
"""
|
|
92
|
+
input_descriptors is a dict of input name -> (required_upstream_output_type, upstream_output)
|
|
93
|
+
We have that required_upstream_output_type so we can do runtime validation that each given
|
|
94
|
+
output is of the correct type for the input it's hooked up to.
|
|
95
|
+
"""
|
|
96
|
+
self.inputs: set[PipelineInput] = set()
|
|
97
|
+
for name, (required_upstream_output_type, upstream_output) in input_descriptors.items():
|
|
98
|
+
if not isinstance(upstream_output, required_upstream_output_type):
|
|
99
|
+
raise ValueError(
|
|
100
|
+
f"Input {name} must be a {required_upstream_output_type.__name__}, got {upstream_output.__class__.__name__}"
|
|
101
|
+
)
|
|
102
|
+
self.inputs.add(upstream_output._spawn_input(owner, name))
|
|
103
|
+
|
|
104
|
+
def _to_dict(self, id_for_task: dict) -> dict[str, str]:
|
|
105
|
+
d: dict[str, str] = {}
|
|
106
|
+
for input in self.inputs:
|
|
107
|
+
d |= input._to_dict(id_for_task)
|
|
108
|
+
return d
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
T = TypeVar("T", bound="OperatorOutputs")
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
class OperatorOutputs(ABC):
|
|
115
|
+
"""
|
|
116
|
+
A collection of all PipelineOutputs for an Operator instance (i.e. a Task). Must be subclassed,
|
|
117
|
+
and the subclass must also be a dataclass whose fields are all PipelineOutput subclasses. Then
|
|
118
|
+
that subclass should be instantiated with `_instantiate_for`. Sounds a little complicated,
|
|
119
|
+
perhaps, but it's not bad. See the existing subclasses in `./operators.py` for examples.
|
|
120
|
+
"""
|
|
121
|
+
|
|
122
|
+
@classmethod
|
|
123
|
+
def _instantiate_for(cls: type[T], owner: "Operator") -> T:
|
|
124
|
+
# create an instance with all fields instantiated with the given owner, and named by the
|
|
125
|
+
# field name.
|
|
126
|
+
# Also validate here that we are a dataclass, and all our fields are PipelineOutput types.
|
|
127
|
+
# Would love to get this done in the type system, but I think it's impossible, so this is
|
|
128
|
+
# the next best thing.
|
|
129
|
+
if not is_dataclass(cls):
|
|
130
|
+
raise TypeError(f"'{cls.__name__}' must be a dataclass")
|
|
131
|
+
outputs = {}
|
|
132
|
+
for field in fields(cls):
|
|
133
|
+
assert not isinstance(field.type, str)
|
|
134
|
+
if not issubclass(field.type, PipelineOutput):
|
|
135
|
+
raise TypeError(
|
|
136
|
+
f"Field '{field.name}' in '{cls.__name__}' must be a subclass of PipelineOutput"
|
|
137
|
+
)
|
|
138
|
+
outputs[field.name] = field.type(owner, field.name)
|
|
139
|
+
return cls(**outputs)
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
TOutputs = TypeVar("TOutputs", bound=OperatorOutputs)
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
class Operator(Generic[TOutputs], ABC):
|
|
146
|
+
def __init__(
|
|
147
|
+
self,
|
|
148
|
+
task_name: str | None,
|
|
149
|
+
params: dict,
|
|
150
|
+
inputs: OperatorInputs,
|
|
151
|
+
outputs: TOutputs,
|
|
152
|
+
):
|
|
153
|
+
self._operator_name = self.__class__.__name__
|
|
154
|
+
self._task_name = task_name if task_name is not None else self._operator_name
|
|
155
|
+
self._params = params
|
|
156
|
+
self._inputs = inputs
|
|
157
|
+
self.outputs = outputs
|
|
158
|
+
ensure_yamlizable(self._params_dict()[0], "Operator parameters")
|
|
159
|
+
|
|
160
|
+
def _to_dict(self, id_for_task: dict) -> tuple[dict, list[PipelineParameter]]:
|
|
161
|
+
params, params_list = self._params_dict()
|
|
162
|
+
d = {
|
|
163
|
+
"name": self._task_name,
|
|
164
|
+
"operator": self._operator_name,
|
|
165
|
+
"params": params,
|
|
166
|
+
"inputs": self._inputs._to_dict(id_for_task),
|
|
167
|
+
}
|
|
168
|
+
return d, params_list
|
|
169
|
+
|
|
170
|
+
def _params_dict(self) -> tuple[dict, list[PipelineParameter]]:
|
|
171
|
+
d = {}
|
|
172
|
+
params = []
|
|
173
|
+
for name, value in self._params.items():
|
|
174
|
+
if hasattr(value, "_to_pipeline_dict"):
|
|
175
|
+
d[name], downstream_params = value._to_pipeline_dict()
|
|
176
|
+
params.extend(downstream_params)
|
|
177
|
+
else:
|
|
178
|
+
d[name] = value
|
|
179
|
+
return d, params
|
|
180
|
+
|
|
181
|
+
def __str__(self) -> str:
|
|
182
|
+
return f'{self._operator_name}(name="{self._task_name}")'
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
@experimental
|
|
186
|
+
class Pipeline:
|
|
187
|
+
def __init__(self, name: str, tasks: list[Operator]):
|
|
188
|
+
self.name = name
|
|
189
|
+
self.tasks = tasks
|
|
190
|
+
|
|
191
|
+
def to_yaml(self) -> str:
|
|
192
|
+
return yaml.safe_dump(self._to_dict())
|
|
193
|
+
|
|
194
|
+
def _to_dict(self) -> dict:
|
|
195
|
+
id_for_task = self._assign_ids_to_tasks()
|
|
196
|
+
tasks = {}
|
|
197
|
+
params = []
|
|
198
|
+
for task in id_for_task.keys():
|
|
199
|
+
task_dict, referenced_params = task._to_dict(id_for_task)
|
|
200
|
+
tasks[id_for_task[task]] = task_dict
|
|
201
|
+
params.extend(referenced_params)
|
|
202
|
+
|
|
203
|
+
d = {
|
|
204
|
+
"lc_pipeline": {
|
|
205
|
+
"schema_version": 1,
|
|
206
|
+
"name": self.name,
|
|
207
|
+
"params": self._pipeline_params_dict(params),
|
|
208
|
+
"tasks": tasks,
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
ensure_yamlizable(d, "Pipeline")
|
|
212
|
+
return d
|
|
213
|
+
|
|
214
|
+
def _assign_ids_to_tasks(self) -> dict[Operator, str]:
|
|
215
|
+
return {task: f"t{i + 1}-{task._operator_name}" for i, task in enumerate(self.tasks)}
|
|
216
|
+
|
|
217
|
+
def _pipeline_params_dict(self, params: list[PipelineParameter]) -> dict:
|
|
218
|
+
d: dict[str, dict] = {}
|
|
219
|
+
for p in params:
|
|
220
|
+
if p.name in d and d[p.name]["type"] != p.type:
|
|
221
|
+
raise ValueError(
|
|
222
|
+
f'PipelineParameter "{p.name}" used with multiple types: {d[p.name]["type"]} != {p.type}'
|
|
223
|
+
)
|
|
224
|
+
d[p.name] = {"type": p.type}
|
|
225
|
+
return d
|
|
@@ -0,0 +1,197 @@
|
|
|
1
|
+
# Copyright 2025 Luminary Cloud, Inc. All Rights Reserved.
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
|
|
4
|
+
from .._helpers.warnings import experimental
|
|
5
|
+
from .core import Operator, OperatorInputs, OperatorOutputs, PipelineOutput
|
|
6
|
+
from .parameters import StringPipelineParameter
|
|
7
|
+
from ..meshing import MeshGenerationParams
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
# Concrete PipelineOutput classes, i.e. the things that can "flow" in a Pipeline
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class PipelineOutputGeometry(PipelineOutput):
|
|
14
|
+
"""A representation of a Geometry in a Pipeline."""
|
|
15
|
+
|
|
16
|
+
pass
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class PipelineOutputMesh(PipelineOutput):
|
|
20
|
+
"""A representation of a Mesh in a Pipeline."""
|
|
21
|
+
|
|
22
|
+
pass
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class PipelineOutputSimulation(PipelineOutput):
|
|
26
|
+
"""A representation of a Simulation in a Pipeline."""
|
|
27
|
+
|
|
28
|
+
pass
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
# Operators
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@dataclass
|
|
35
|
+
class ReadGeometryOutputs(OperatorOutputs):
|
|
36
|
+
geometry: PipelineOutputGeometry
|
|
37
|
+
"""
|
|
38
|
+
The Geometry identified by the given `geometry_id`, in the state it was in when the Pipeline was
|
|
39
|
+
invoked. I.e. the latest GeometryVersion at that moment.
|
|
40
|
+
"""
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
@experimental
|
|
44
|
+
class ReadGeometry(Operator[ReadGeometryOutputs]):
|
|
45
|
+
"""
|
|
46
|
+
Reads a Geometry into the Pipeline.
|
|
47
|
+
|
|
48
|
+
Parameters
|
|
49
|
+
----------
|
|
50
|
+
geometry_id : str | StringPipelineParameter
|
|
51
|
+
The ID of the Geometry to retrieve.
|
|
52
|
+
|
|
53
|
+
Outputs
|
|
54
|
+
-------
|
|
55
|
+
geometry : PipelineOutputGeometry
|
|
56
|
+
The latest GeometryVersion of the Geometry as of the moment the Pipeline was invoked.
|
|
57
|
+
|
|
58
|
+
.. warning:: This feature is experimental and may change or be removed in the future.
|
|
59
|
+
"""
|
|
60
|
+
|
|
61
|
+
def __init__(
|
|
62
|
+
self,
|
|
63
|
+
*,
|
|
64
|
+
task_name: str | None = None,
|
|
65
|
+
geometry_id: str | StringPipelineParameter,
|
|
66
|
+
):
|
|
67
|
+
super().__init__(
|
|
68
|
+
task_name,
|
|
69
|
+
{"geometry_id": geometry_id},
|
|
70
|
+
OperatorInputs(self),
|
|
71
|
+
ReadGeometryOutputs._instantiate_for(self),
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
@dataclass
|
|
76
|
+
class ModifyGeometryOutputs(OperatorOutputs):
|
|
77
|
+
geometry: PipelineOutputGeometry
|
|
78
|
+
"""The modified Geometry, represented as a new GeometryVersion."""
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
# TODO: figure out what `mods` actually is. What does the non-pipeline geo mod interface look like?
|
|
82
|
+
@experimental
|
|
83
|
+
class ModifyGeometry(Operator[ModifyGeometryOutputs]):
|
|
84
|
+
"""
|
|
85
|
+
Modifies a Geometry.
|
|
86
|
+
|
|
87
|
+
Parameters
|
|
88
|
+
----------
|
|
89
|
+
mods : dict
|
|
90
|
+
The modifications to apply to the Geometry.
|
|
91
|
+
geometry : PipelineOutputGeometry
|
|
92
|
+
The Geometry to modify.
|
|
93
|
+
|
|
94
|
+
Outputs
|
|
95
|
+
-------
|
|
96
|
+
geometry : PipelineOutputGeometry
|
|
97
|
+
The modified Geometry, represented as a new GeometryVersion.
|
|
98
|
+
|
|
99
|
+
.. warning:: This feature is experimental and may change or be removed in the future.
|
|
100
|
+
"""
|
|
101
|
+
|
|
102
|
+
def __init__(
|
|
103
|
+
self,
|
|
104
|
+
*,
|
|
105
|
+
task_name: str | None = None,
|
|
106
|
+
mods: list[dict],
|
|
107
|
+
geometry: PipelineOutputGeometry,
|
|
108
|
+
):
|
|
109
|
+
raise NotImplementedError("ModifyGeometry is not implemented yet.")
|
|
110
|
+
super().__init__(
|
|
111
|
+
task_name,
|
|
112
|
+
{"mods": mods},
|
|
113
|
+
OperatorInputs(self, geometry=(PipelineOutputGeometry, geometry)),
|
|
114
|
+
ModifyGeometryOutputs._instantiate_for(self),
|
|
115
|
+
)
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
@dataclass
|
|
119
|
+
class MeshOutputs(OperatorOutputs):
|
|
120
|
+
mesh: PipelineOutputMesh
|
|
121
|
+
"""The Mesh generated from the given Geometry."""
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
@experimental
|
|
125
|
+
class Mesh(Operator[MeshOutputs]):
|
|
126
|
+
"""
|
|
127
|
+
Generates a Mesh from a Geometry.
|
|
128
|
+
|
|
129
|
+
Parameters
|
|
130
|
+
----------
|
|
131
|
+
mesh_gen_params : MeshGenerationParams
|
|
132
|
+
The parameters to use for mesh generation.
|
|
133
|
+
geometry : PipelineOutputGeometry
|
|
134
|
+
The Geometry to mesh.
|
|
135
|
+
|
|
136
|
+
Outputs
|
|
137
|
+
-------
|
|
138
|
+
mesh : PipelineOutputMesh
|
|
139
|
+
The generated Mesh.
|
|
140
|
+
|
|
141
|
+
.. warning:: This feature is experimental and may change or be removed in the future.
|
|
142
|
+
"""
|
|
143
|
+
|
|
144
|
+
def __init__(
|
|
145
|
+
self,
|
|
146
|
+
*,
|
|
147
|
+
task_name: str | None = None,
|
|
148
|
+
mesh_gen_params: MeshGenerationParams,
|
|
149
|
+
geometry: PipelineOutputGeometry,
|
|
150
|
+
):
|
|
151
|
+
super().__init__(
|
|
152
|
+
task_name,
|
|
153
|
+
{"mesh_gen_params": mesh_gen_params},
|
|
154
|
+
OperatorInputs(self, geometry=(PipelineOutputGeometry, geometry)),
|
|
155
|
+
MeshOutputs._instantiate_for(self),
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
@dataclass
|
|
160
|
+
class SimulateOutputs(OperatorOutputs):
|
|
161
|
+
simulation: PipelineOutputSimulation
|
|
162
|
+
"""The Simulation."""
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
@experimental
|
|
166
|
+
class Simulate(Operator[SimulateOutputs]):
|
|
167
|
+
"""
|
|
168
|
+
Runs a Simulation.
|
|
169
|
+
|
|
170
|
+
Parameters
|
|
171
|
+
----------
|
|
172
|
+
sim_template_id : str | StringPipelineParameter
|
|
173
|
+
The ID of the SimulationTemplate to use for the Simulation.
|
|
174
|
+
mesh : PipelineOutputMesh
|
|
175
|
+
The Mesh to use for the Simulation.
|
|
176
|
+
|
|
177
|
+
Outputs
|
|
178
|
+
-------
|
|
179
|
+
simulation : PipelineOutputSimulation
|
|
180
|
+
The Simulation.
|
|
181
|
+
|
|
182
|
+
.. warning:: This feature is experimental and may change or be removed in the future.
|
|
183
|
+
"""
|
|
184
|
+
|
|
185
|
+
def __init__(
|
|
186
|
+
self,
|
|
187
|
+
*,
|
|
188
|
+
task_name: str | None = None,
|
|
189
|
+
sim_template_id: str | StringPipelineParameter,
|
|
190
|
+
mesh: PipelineOutputMesh,
|
|
191
|
+
):
|
|
192
|
+
super().__init__(
|
|
193
|
+
task_name,
|
|
194
|
+
{"sim_template_id": sim_template_id},
|
|
195
|
+
OperatorInputs(self, mesh=(PipelineOutputMesh, mesh)),
|
|
196
|
+
SimulateOutputs._instantiate_for(self),
|
|
197
|
+
)
|