luminarycloud 0.22.1__py3-none-any.whl → 0.22.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- luminarycloud/_client/client.py +5 -3
- luminarycloud/_client/retry_interceptor.py +13 -2
- luminarycloud/_helpers/__init__.py +9 -0
- luminarycloud/_helpers/_inference_jobs.py +227 -0
- luminarycloud/_helpers/_parse_iso_datetime.py +54 -0
- luminarycloud/_helpers/proto_decorator.py +38 -7
- luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2.py +45 -25
- luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2.pyi +30 -0
- luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2_grpc.py +34 -0
- luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2_grpc.pyi +12 -0
- luminarycloud/_proto/api/v0/luminarycloud/named_variable_set/named_variable_set_pb2.py +25 -3
- luminarycloud/_proto/api/v0/luminarycloud/named_variable_set/named_variable_set_pb2.pyi +30 -0
- luminarycloud/_proto/api/v0/luminarycloud/named_variable_set/named_variable_set_pb2_grpc.py +34 -0
- luminarycloud/_proto/api/v0/luminarycloud/named_variable_set/named_variable_set_pb2_grpc.pyi +12 -0
- luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2.py +140 -45
- luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2.pyi +322 -8
- luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2_grpc.py +68 -0
- luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2_grpc.pyi +24 -0
- luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2.py +93 -33
- luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2.pyi +105 -0
- luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2_grpc.py +70 -0
- luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2_grpc.pyi +29 -0
- luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2.py +29 -7
- luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2.pyi +39 -0
- luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2_grpc.py +36 -0
- luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2_grpc.pyi +18 -0
- luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2.py +70 -70
- luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2.pyi +5 -5
- luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2.py +163 -153
- luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2.pyi +37 -3
- luminarycloud/_proto/client/simulation_pb2.py +356 -337
- luminarycloud/_proto/client/simulation_pb2.pyi +89 -3
- luminarycloud/_proto/lcstatus/details/geometry/geometry_pb2.py +256 -0
- luminarycloud/_proto/lcstatus/details/geometry/geometry_pb2.pyi +472 -0
- luminarycloud/_proto/physicsaiinferenceservice/physicsaiinferenceservice_pb2.py +9 -4
- luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2.py +6 -3
- luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2_grpc.py +68 -0
- luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2_grpc.pyi +24 -0
- luminarycloud/_proto/quantity/quantity_pb2.pyi +1 -1
- luminarycloud/_wrapper.py +53 -7
- luminarycloud/feature_modification.py +25 -32
- luminarycloud/geometry.py +6 -6
- luminarycloud/outputs/__init__.py +2 -0
- luminarycloud/outputs/output_definitions.py +3 -3
- luminarycloud/outputs/stopping_conditions.py +94 -0
- luminarycloud/params/enum/_enum_wrappers.py +16 -0
- luminarycloud/params/geometry/shapes.py +33 -33
- luminarycloud/params/simulation/adaptive_mesh_refinement/__init__.py +1 -0
- luminarycloud/params/simulation/adaptive_mesh_refinement/active_region_.py +83 -0
- luminarycloud/params/simulation/adaptive_mesh_refinement/boundary_layer_profile_.py +1 -1
- luminarycloud/params/simulation/adaptive_mesh_refinement_.py +8 -1
- luminarycloud/physics_ai/__init__.py +15 -0
- luminarycloud/physics_ai/architectures.py +1 -1
- luminarycloud/physics_ai/datasets.py +246 -0
- luminarycloud/physics_ai/inference.py +166 -199
- luminarycloud/physics_ai/models.py +22 -0
- luminarycloud/pipelines/__init__.py +11 -0
- luminarycloud/pipelines/api.py +106 -9
- luminarycloud/pipelines/core.py +358 -45
- luminarycloud/pipelines/flowables.py +138 -0
- luminarycloud/pipelines/stages.py +7 -31
- luminarycloud/project.py +56 -2
- luminarycloud/simulation.py +25 -0
- luminarycloud/types/__init__.py +2 -0
- luminarycloud/types/ids.py +2 -0
- luminarycloud/vis/__init__.py +1 -0
- luminarycloud/vis/filters.py +97 -0
- luminarycloud/vis/visualization.py +3 -0
- luminarycloud/volume_selection.py +6 -6
- luminarycloud/workflow_utils.py +149 -0
- {luminarycloud-0.22.1.dist-info → luminarycloud-0.22.3.dist-info}/METADATA +1 -1
- {luminarycloud-0.22.1.dist-info → luminarycloud-0.22.3.dist-info}/RECORD +73 -70
- luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2.py +0 -61
- luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2.pyi +0 -85
- luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2_grpc.py +0 -67
- luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2_grpc.pyi +0 -26
- luminarycloud/_proto/inferenceservice/inferenceservice_pb2.py +0 -69
- {luminarycloud-0.22.1.dist-info → luminarycloud-0.22.3.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from abc import ABC
|
|
4
|
+
from dataclasses import dataclass, field
|
|
5
|
+
from enum import Enum
|
|
6
|
+
from typing import TYPE_CHECKING, Type, Mapping
|
|
7
|
+
|
|
8
|
+
if TYPE_CHECKING:
|
|
9
|
+
from .core import Stage
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class PipelineInput:
|
|
13
|
+
"""
|
|
14
|
+
A named input for a Stage. Explicitly connected to a PipelineOutput.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
def __init__(self, upstream_output: "PipelineOutput", owner: "Stage", name: str):
|
|
18
|
+
self.upstream_output = upstream_output
|
|
19
|
+
self.owner = owner
|
|
20
|
+
self.name = name
|
|
21
|
+
|
|
22
|
+
def _to_dict(self, id_for_stage: dict) -> dict:
|
|
23
|
+
if self.upstream_output.owner not in id_for_stage:
|
|
24
|
+
raise ValueError(
|
|
25
|
+
f"Stage {self.owner} depends on a stage, {self.upstream_output.owner}, that isn't in the Pipeline. Did you forget to add it?"
|
|
26
|
+
)
|
|
27
|
+
upstream_stage_id = id_for_stage[self.upstream_output.owner]
|
|
28
|
+
upstream_output_name = self.upstream_output.name
|
|
29
|
+
return {self.name: f"{upstream_stage_id}.{upstream_output_name}"}
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class PipelineOutput(ABC):
|
|
33
|
+
"""
|
|
34
|
+
A named output for a Stage. Can be used to spawn any number of connected PipelineInputs.
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
def __init__(self, owner: "Stage", name: str):
|
|
38
|
+
self.owner = owner
|
|
39
|
+
self.name = name
|
|
40
|
+
self.downstream_inputs: list[PipelineInput] = []
|
|
41
|
+
|
|
42
|
+
def _spawn_input(self, owner: "Stage", name: str) -> PipelineInput:
|
|
43
|
+
input = PipelineInput(self, owner, name)
|
|
44
|
+
self.downstream_inputs.append(input)
|
|
45
|
+
return input
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
# Concrete PipelineOutput classes, i.e. the things that can "flow" in a Pipeline
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class PipelineOutputGeometry(PipelineOutput):
|
|
52
|
+
"""A representation of a Geometry in a Pipeline."""
|
|
53
|
+
|
|
54
|
+
pass
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class PipelineOutputMesh(PipelineOutput):
|
|
58
|
+
"""A representation of a Mesh in a Pipeline."""
|
|
59
|
+
|
|
60
|
+
pass
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
class PipelineOutputSimulation(PipelineOutput):
|
|
64
|
+
"""A representation of a Simulation in a Pipeline."""
|
|
65
|
+
|
|
66
|
+
pass
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
# We don't inherit from StrEnum because that was added in Python 3.11, but we still want to support
|
|
70
|
+
# older versions. Inheriting from str and Enum gives us the StrEnum-like behavior we want.
|
|
71
|
+
class FlowableType(str, Enum):
|
|
72
|
+
"""Canonical flowable type identifiers."""
|
|
73
|
+
|
|
74
|
+
GEOMETRY = "Geometry"
|
|
75
|
+
MESH = "Mesh"
|
|
76
|
+
SIMULATION = "Simulation"
|
|
77
|
+
|
|
78
|
+
def __str__(self) -> str:
|
|
79
|
+
return self.value
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
_FLOWABLE_NAME_TO_CLASS: dict[FlowableType, Type[PipelineOutput]] = {
|
|
83
|
+
FlowableType.GEOMETRY: PipelineOutputGeometry,
|
|
84
|
+
FlowableType.MESH: PipelineOutputMesh,
|
|
85
|
+
FlowableType.SIMULATION: PipelineOutputSimulation,
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def flowable_class_to_name(output_cls: Type[PipelineOutput]) -> FlowableType:
|
|
90
|
+
"""
|
|
91
|
+
Convert a PipelineOutput subclass to the canonical flowable type name used in pipeline YAML.
|
|
92
|
+
"""
|
|
93
|
+
for flowable_type, cls in _FLOWABLE_NAME_TO_CLASS.items():
|
|
94
|
+
if issubclass(output_cls, cls):
|
|
95
|
+
return flowable_type
|
|
96
|
+
raise ValueError(f"Unsupported PipelineOutput subclass: {output_cls.__name__}")
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def flowable_name_to_class(name: str | FlowableType) -> Type[PipelineOutput]:
|
|
100
|
+
"""
|
|
101
|
+
Convert a canonical flowable type name into the corresponding PipelineOutput subclass.
|
|
102
|
+
"""
|
|
103
|
+
try:
|
|
104
|
+
flowable_type = FlowableType(name)
|
|
105
|
+
except ValueError as exc:
|
|
106
|
+
supported = ", ".join(ft.value for ft in FlowableType)
|
|
107
|
+
raise ValueError(
|
|
108
|
+
f"Unknown flowable type '{name}'. Supported types are: {supported}"
|
|
109
|
+
) from exc
|
|
110
|
+
return _FLOWABLE_NAME_TO_CLASS[flowable_type]
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def _ensure_flowable_mapping(data: Mapping[str, FlowableType | str]) -> dict[str, FlowableType]:
|
|
114
|
+
mapping: dict[str, FlowableType] = {}
|
|
115
|
+
for name, value in data.items():
|
|
116
|
+
mapping[name] = value if isinstance(value, FlowableType) else FlowableType(value)
|
|
117
|
+
return mapping
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
@dataclass(slots=True)
|
|
121
|
+
class FlowableIOSchema:
|
|
122
|
+
"""Typed representation of RunScript input/output schema."""
|
|
123
|
+
|
|
124
|
+
inputs: dict[str, FlowableType] = field(default_factory=dict)
|
|
125
|
+
outputs: dict[str, FlowableType] = field(default_factory=dict)
|
|
126
|
+
|
|
127
|
+
@classmethod
|
|
128
|
+
def from_dict(cls, data: Mapping[str, Mapping[str, FlowableType | str]]) -> "FlowableIOSchema":
|
|
129
|
+
return cls(
|
|
130
|
+
inputs=_ensure_flowable_mapping(data["inputs"]),
|
|
131
|
+
outputs=_ensure_flowable_mapping(data["outputs"]),
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
def to_dict(self) -> dict[str, dict[str, str]]:
|
|
135
|
+
return {
|
|
136
|
+
"inputs": {name: flowable.value for name, flowable in self.inputs.items()},
|
|
137
|
+
"outputs": {name: flowable.value for name, flowable in self.outputs.items()},
|
|
138
|
+
}
|
|
@@ -1,33 +1,9 @@
|
|
|
1
1
|
# Copyright 2025 Luminary Cloud, Inc. All Rights Reserved.
|
|
2
2
|
from dataclasses import dataclass
|
|
3
3
|
|
|
4
|
-
from .core import
|
|
4
|
+
from .core import StandardStage, StageInputs, StageOutputs
|
|
5
|
+
from .flowables import PipelineOutputGeometry, PipelineOutputMesh, PipelineOutputSimulation
|
|
5
6
|
from .parameters import BoolPipelineParameter, StringPipelineParameter, IntPipelineParameter
|
|
6
|
-
from ..meshing import MeshGenerationParams
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
# Concrete PipelineOutput classes, i.e. the things that can "flow" in a Pipeline
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
class PipelineOutputGeometry(PipelineOutput):
|
|
13
|
-
"""A representation of a Geometry in a Pipeline."""
|
|
14
|
-
|
|
15
|
-
pass
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
class PipelineOutputMesh(PipelineOutput):
|
|
19
|
-
"""A representation of a Mesh in a Pipeline."""
|
|
20
|
-
|
|
21
|
-
pass
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
class PipelineOutputSimulation(PipelineOutput):
|
|
25
|
-
"""A representation of a Simulation in a Pipeline."""
|
|
26
|
-
|
|
27
|
-
pass
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
# Stages
|
|
31
7
|
|
|
32
8
|
|
|
33
9
|
@dataclass
|
|
@@ -39,7 +15,7 @@ class ReadGeometryOutputs(StageOutputs):
|
|
|
39
15
|
"""
|
|
40
16
|
|
|
41
17
|
|
|
42
|
-
class ReadGeometry(
|
|
18
|
+
class ReadGeometry(StandardStage[ReadGeometryOutputs]):
|
|
43
19
|
"""
|
|
44
20
|
Reads a Geometry into the Pipeline.
|
|
45
21
|
|
|
@@ -93,7 +69,7 @@ class ReadMeshOutputs(StageOutputs):
|
|
|
93
69
|
"""
|
|
94
70
|
|
|
95
71
|
|
|
96
|
-
class ReadMesh(
|
|
72
|
+
class ReadMesh(StandardStage[ReadMeshOutputs]):
|
|
97
73
|
"""
|
|
98
74
|
Reads a Mesh into the Pipeline.
|
|
99
75
|
|
|
@@ -137,7 +113,7 @@ class ModifyGeometryOutputs(StageOutputs):
|
|
|
137
113
|
|
|
138
114
|
|
|
139
115
|
# TODO: figure out what `mods` actually is. What does the non-pipeline geo mod interface look like?
|
|
140
|
-
class ModifyGeometry(
|
|
116
|
+
class ModifyGeometry(StandardStage[ModifyGeometryOutputs]):
|
|
141
117
|
"""
|
|
142
118
|
Modifies a Geometry.
|
|
143
119
|
|
|
@@ -178,7 +154,7 @@ class MeshOutputs(StageOutputs):
|
|
|
178
154
|
"""The Mesh generated from the given Geometry."""
|
|
179
155
|
|
|
180
156
|
|
|
181
|
-
class Mesh(
|
|
157
|
+
class Mesh(StandardStage[MeshOutputs]):
|
|
182
158
|
"""
|
|
183
159
|
Generates a Mesh from a Geometry.
|
|
184
160
|
|
|
@@ -224,7 +200,7 @@ class SimulateOutputs(StageOutputs):
|
|
|
224
200
|
"""The Simulation."""
|
|
225
201
|
|
|
226
202
|
|
|
227
|
-
class Simulate(
|
|
203
|
+
class Simulate(StandardStage[SimulateOutputs]):
|
|
228
204
|
"""
|
|
229
205
|
Runs a Simulation.
|
|
230
206
|
|
luminarycloud/project.py
CHANGED
|
@@ -25,6 +25,10 @@ from ._helpers import (
|
|
|
25
25
|
upload_file,
|
|
26
26
|
upload_mesh,
|
|
27
27
|
upload_table_as_json,
|
|
28
|
+
create_inference_job,
|
|
29
|
+
get_inference_job,
|
|
30
|
+
list_inference_jobs,
|
|
31
|
+
SurfaceForInference,
|
|
28
32
|
)
|
|
29
33
|
from ._helpers.warnings import deprecated
|
|
30
34
|
from ._proto.api.v0.luminarycloud.geometry import geometry_pb2 as geometrypb
|
|
@@ -47,10 +51,20 @@ from ._proto.upload import upload_pb2 as uploadpb
|
|
|
47
51
|
from ._wrapper import ProtoWrapper, ProtoWrapperBase
|
|
48
52
|
from .enum import GPUType, MeshType, TableType
|
|
49
53
|
from .meshing import MeshAdaptationParams, MeshGenerationParams
|
|
54
|
+
from .named_variable_set import NamedVariableSet, get_named_variable_set
|
|
55
|
+
from .physics_ai.inference import InferenceJob, VisualizationExport
|
|
50
56
|
from .simulation_param import SimulationParam
|
|
51
57
|
from .tables import RectilinearTable, create_rectilinear_table
|
|
52
|
-
from .types import
|
|
53
|
-
|
|
58
|
+
from .types import (
|
|
59
|
+
MeshID,
|
|
60
|
+
ProjectID,
|
|
61
|
+
SimulationTemplateID,
|
|
62
|
+
NamedVariableSetID,
|
|
63
|
+
Expression,
|
|
64
|
+
LcFloat,
|
|
65
|
+
PhysicsAiInferenceJobID,
|
|
66
|
+
PhysicsAiModelVersionID,
|
|
67
|
+
)
|
|
54
68
|
|
|
55
69
|
if TYPE_CHECKING:
|
|
56
70
|
from .geometry import Geometry
|
|
@@ -701,6 +715,46 @@ class Project(ProtoWrapperBase):
|
|
|
701
715
|
req = projectpb.UnshareProjectWithSupportRequest(id=self.id)
|
|
702
716
|
get_default_client().UnshareProjectWithSupport(req)
|
|
703
717
|
|
|
718
|
+
def create_inference_job(
|
|
719
|
+
self,
|
|
720
|
+
geometry: str,
|
|
721
|
+
model_version_id: PhysicsAiModelVersionID,
|
|
722
|
+
synchronous: bool = False,
|
|
723
|
+
conditions: Optional[Dict[str, Any]] = None,
|
|
724
|
+
settings: Optional[Dict[str, Any]] = None,
|
|
725
|
+
surfaces: Optional[list[SurfaceForInference]] = None,
|
|
726
|
+
inference_fields: Optional[list[str]] = None,
|
|
727
|
+
per_surface_visualizations: Optional[list[VisualizationExport]] = None,
|
|
728
|
+
merged_visualizations: Optional[list[VisualizationExport]] = None,
|
|
729
|
+
) -> InferenceJob:
|
|
730
|
+
"""
|
|
731
|
+
Create a new Physics AI inference job.
|
|
732
|
+
"""
|
|
733
|
+
return create_inference_job(
|
|
734
|
+
self.id,
|
|
735
|
+
geometry,
|
|
736
|
+
model_version_id,
|
|
737
|
+
synchronous,
|
|
738
|
+
conditions,
|
|
739
|
+
settings,
|
|
740
|
+
surfaces,
|
|
741
|
+
inference_fields,
|
|
742
|
+
per_surface_visualizations,
|
|
743
|
+
merged_visualizations,
|
|
744
|
+
)
|
|
745
|
+
|
|
746
|
+
def get_inference_job(self, job_id: PhysicsAiInferenceJobID) -> InferenceJob:
|
|
747
|
+
"""
|
|
748
|
+
Get a Physics AI inference job by its ID.
|
|
749
|
+
"""
|
|
750
|
+
return get_inference_job(job_id)
|
|
751
|
+
|
|
752
|
+
def list_inference_jobs(self) -> list[InferenceJob]:
|
|
753
|
+
"""
|
|
754
|
+
List all inference jobs for the project.
|
|
755
|
+
"""
|
|
756
|
+
return list_inference_jobs(self.id)
|
|
757
|
+
|
|
704
758
|
|
|
705
759
|
def add_named_variables_from_csv(project: Project, csv_path: str) -> list[NamedVariableSet]:
|
|
706
760
|
"""
|
luminarycloud/simulation.py
CHANGED
|
@@ -22,6 +22,7 @@ from .enum import (
|
|
|
22
22
|
SimulationStatus,
|
|
23
23
|
Vector3Component,
|
|
24
24
|
)
|
|
25
|
+
from .outputs.stopping_conditions import StoppingConditionStatusResult
|
|
25
26
|
from .simulation_param import SimulationParam
|
|
26
27
|
from .reference_values import ReferenceValues
|
|
27
28
|
from .simulation_param import SimulationParam
|
|
@@ -324,6 +325,30 @@ class Simulation(ProtoWrapperBase):
|
|
|
324
325
|
result = _get_workflow_ids([self.id])
|
|
325
326
|
return result.get(self.id)
|
|
326
327
|
|
|
328
|
+
def get_stopping_condition_status(self) -> StoppingConditionStatusResult:
|
|
329
|
+
"""
|
|
330
|
+
Retrieves the stopping condition status for a completed simulation.
|
|
331
|
+
|
|
332
|
+
This evaluates the stopping conditions defined in the simulation parameters
|
|
333
|
+
against the final simulation results to determine which conditions were satisfied.
|
|
334
|
+
|
|
335
|
+
Returns
|
|
336
|
+
-------
|
|
337
|
+
StoppingConditionStatusResult
|
|
338
|
+
The stopping condition status containing:
|
|
339
|
+
- overall_success: Whether the overall stopping criteria were met
|
|
340
|
+
- force_stopped: Whether a force-stop condition was triggered
|
|
341
|
+
- condition_results: Results for each individual condition (output name, threshold, value, satisfied)
|
|
342
|
+
|
|
343
|
+
Raises
|
|
344
|
+
------
|
|
345
|
+
SDKException
|
|
346
|
+
If the simulation has not completed or the status cannot be retrieved.
|
|
347
|
+
"""
|
|
348
|
+
req = simulationpb.GetStoppingConditionStatusRequest(id=self.id)
|
|
349
|
+
res = get_default_client().GetStoppingConditionStatus(req)
|
|
350
|
+
return StoppingConditionStatusResult._from_proto(res)
|
|
351
|
+
|
|
327
352
|
@deprecated(
|
|
328
353
|
"Use get_parameters() instead. This method will be removed in a future release.",
|
|
329
354
|
)
|
luminarycloud/types/__init__.py
CHANGED
|
@@ -7,6 +7,8 @@ from .ids import (
|
|
|
7
7
|
SimulationTemplateID as SimulationTemplateID,
|
|
8
8
|
GeometryFeatureID as GeometryFeatureID,
|
|
9
9
|
NamedVariableSetID as NamedVariableSetID,
|
|
10
|
+
PhysicsAiInferenceJobID as PhysicsAiInferenceJobID,
|
|
11
|
+
PhysicsAiModelVersionID as PhysicsAiModelVersionID,
|
|
10
12
|
)
|
|
11
13
|
|
|
12
14
|
from .adfloat import (
|
luminarycloud/types/ids.py
CHANGED
|
@@ -11,5 +11,7 @@ GeometryFeatureID = NewType("GeometryFeatureID", str)
|
|
|
11
11
|
NamedVariableSetID = NewType("NamedVariableSetID", str)
|
|
12
12
|
PhysicsAiArchitectureID = NewType("PhysicsAiArchitectureID", str)
|
|
13
13
|
PhysicsAiArchitectureVersionID = NewType("PhysicsAiArchitectureVersionID", str)
|
|
14
|
+
PhysicsAiInferenceJobID = NewType("PhysicsAiInferenceJobID", str)
|
|
14
15
|
PhysicsAiModelID = NewType("PhysicsAiModelID", str)
|
|
15
16
|
PhysicsAiModelVersionID = NewType("PhysicsAiModelVersionID", str)
|
|
17
|
+
PhysicsAiTrainingJobID = NewType("PhysicsAiTrainingJobID", str)
|
luminarycloud/vis/__init__.py
CHANGED
luminarycloud/vis/filters.py
CHANGED
|
@@ -139,6 +139,101 @@ class Slice(Filter):
|
|
|
139
139
|
self.plane._from_proto(filter.slice.plane)
|
|
140
140
|
|
|
141
141
|
|
|
142
|
+
class MultiSlice(Filter):
|
|
143
|
+
"""
|
|
144
|
+
Creates multiple parallel slice planes between two positions.
|
|
145
|
+
Primarily useful as a convenience wrapper when combined with child filters like Threshold, avoiding manual slice filter loops.
|
|
146
|
+
|
|
147
|
+
.. warning:: This feature is experimental and may change or be removed in the future.
|
|
148
|
+
|
|
149
|
+
Attributes:
|
|
150
|
+
-----------
|
|
151
|
+
start_position : Vector3Like
|
|
152
|
+
The position of the first slice plane.
|
|
153
|
+
end_position : Vector3Like
|
|
154
|
+
The position of the last slice plane.
|
|
155
|
+
n_slices : int
|
|
156
|
+
The number of slice planes to create between start and end positions.
|
|
157
|
+
name : str
|
|
158
|
+
A user provided name for the filter.
|
|
159
|
+
project_vectors: bool
|
|
160
|
+
When true, vector fields will be projected onto the plane of each slice. This is often
|
|
161
|
+
useful for visualizing vector fields by removing the vector components in the normal
|
|
162
|
+
direction of the planes. Default: False
|
|
163
|
+
display_attrs : DisplayAttributes
|
|
164
|
+
Specifies this filter's appearance.
|
|
165
|
+
"""
|
|
166
|
+
|
|
167
|
+
def __init__(self, name: str = "") -> None:
|
|
168
|
+
super().__init__(generate_id("multi-slice-"))
|
|
169
|
+
self._start_position: Vector3Like = Vector3(x=0, y=0, z=0)
|
|
170
|
+
self._end_position: Vector3Like = Vector3(x=1, y=0, z=0)
|
|
171
|
+
self._n_slices: int = 10
|
|
172
|
+
self._project_vectors: bool = False
|
|
173
|
+
self.name = name
|
|
174
|
+
|
|
175
|
+
@property
|
|
176
|
+
def start_position(self) -> Vector3Like:
|
|
177
|
+
return self._start_position
|
|
178
|
+
|
|
179
|
+
@start_position.setter
|
|
180
|
+
def start_position(self, new_start_position: Vector3Like) -> None:
|
|
181
|
+
self._start_position = _to_vector3(new_start_position)
|
|
182
|
+
|
|
183
|
+
@property
|
|
184
|
+
def end_position(self) -> Vector3Like:
|
|
185
|
+
return self._end_position
|
|
186
|
+
|
|
187
|
+
@end_position.setter
|
|
188
|
+
def end_position(self, new_end_position: Vector3Like) -> None:
|
|
189
|
+
self._end_position = _to_vector3(new_end_position)
|
|
190
|
+
|
|
191
|
+
@property
|
|
192
|
+
def n_slices(self) -> int:
|
|
193
|
+
return self._n_slices
|
|
194
|
+
|
|
195
|
+
@n_slices.setter
|
|
196
|
+
def n_slices(self, new_n_slices: int) -> None:
|
|
197
|
+
if not isinstance(new_n_slices, int):
|
|
198
|
+
raise TypeError(f"Expected 'int', got {type(new_n_slices).__name__}")
|
|
199
|
+
if new_n_slices < 2:
|
|
200
|
+
raise ValueError("n_slices must be at least 2")
|
|
201
|
+
self._n_slices = new_n_slices
|
|
202
|
+
|
|
203
|
+
@property
|
|
204
|
+
def project_vectors(self) -> bool:
|
|
205
|
+
return self._project_vectors
|
|
206
|
+
|
|
207
|
+
@project_vectors.setter
|
|
208
|
+
def project_vectors(self, new_project_vectors: bool) -> None:
|
|
209
|
+
if not isinstance(new_project_vectors, bool):
|
|
210
|
+
raise TypeError(f"Expected 'bool', got {type(new_project_vectors).__name__}")
|
|
211
|
+
self._project_vectors = new_project_vectors
|
|
212
|
+
|
|
213
|
+
def _to_proto(self) -> vis_pb2.Filter:
|
|
214
|
+
vis_filter = vis_pb2.Filter()
|
|
215
|
+
vis_filter.id = self.id
|
|
216
|
+
vis_filter.name = self.name
|
|
217
|
+
vis_filter.multi_slice.start_position.CopyFrom(_to_vector3(self.start_position)._to_proto())
|
|
218
|
+
vis_filter.multi_slice.end_position.CopyFrom(_to_vector3(self.end_position)._to_proto())
|
|
219
|
+
vis_filter.multi_slice.n_slices = self.n_slices
|
|
220
|
+
vis_filter.multi_slice.project_vectors = self.project_vectors
|
|
221
|
+
return vis_filter
|
|
222
|
+
|
|
223
|
+
def _from_proto(self, filter: vis_pb2.Filter) -> None:
|
|
224
|
+
typ = filter.WhichOneof("value")
|
|
225
|
+
if typ != "multi_slice":
|
|
226
|
+
raise TypeError(f"Expected 'multi_slice', got {typ}")
|
|
227
|
+
self.id = filter.id
|
|
228
|
+
self.name = filter.name
|
|
229
|
+
self.start_position = Vector3()
|
|
230
|
+
self.start_position._from_proto(filter.multi_slice.start_position)
|
|
231
|
+
self.end_position = Vector3()
|
|
232
|
+
self.end_position._from_proto(filter.multi_slice.end_position)
|
|
233
|
+
self.n_slices = filter.multi_slice.n_slices
|
|
234
|
+
self.project_vectors = filter.multi_slice.project_vectors
|
|
235
|
+
|
|
236
|
+
|
|
142
237
|
class Isosurface(Filter):
|
|
143
238
|
"""
|
|
144
239
|
Isosurface is used to evaluate scalar fields at constant values, known as
|
|
@@ -1130,6 +1225,8 @@ def _filter_to_obj_name(filter: Filter) -> str:
|
|
|
1130
1225
|
raise TypeError(f"Expected 'Filter', got {type(filter).__name__}")
|
|
1131
1226
|
if isinstance(filter, Slice):
|
|
1132
1227
|
return "slice"
|
|
1228
|
+
elif isinstance(filter, MultiSlice):
|
|
1229
|
+
return "multi_slice"
|
|
1133
1230
|
elif isinstance(filter, Isosurface):
|
|
1134
1231
|
return "isosurface"
|
|
1135
1232
|
elif isinstance(filter, PlaneClip):
|
|
@@ -38,6 +38,7 @@ from .filters import (
|
|
|
38
38
|
Filter,
|
|
39
39
|
PlaneClip,
|
|
40
40
|
Slice,
|
|
41
|
+
MultiSlice,
|
|
41
42
|
SurfaceStreamlines,
|
|
42
43
|
SurfaceLIC,
|
|
43
44
|
Threshold,
|
|
@@ -1282,6 +1283,8 @@ def _spec_to_scene(spec: vis_pb2.ExtractSpec) -> Scene:
|
|
|
1282
1283
|
pfilter = PlaneClip("")
|
|
1283
1284
|
elif typ == "slice":
|
|
1284
1285
|
pfilter = Slice("")
|
|
1286
|
+
elif typ == "multi_slice":
|
|
1287
|
+
pfilter = MultiSlice("")
|
|
1285
1288
|
elif typ == "streamlines":
|
|
1286
1289
|
s_typ = filter.streamlines.WhichOneof("seed_type")
|
|
1287
1290
|
if s_typ == "surface":
|
|
@@ -727,17 +727,17 @@ class VolumeSelection:
|
|
|
727
727
|
|
|
728
728
|
params = gpb.Create()
|
|
729
729
|
if isinstance(shape, Sphere):
|
|
730
|
-
params.sphere.CopyFrom(shape._to_proto())
|
|
730
|
+
params.sphere.CopyFrom(shape._to_proto())
|
|
731
731
|
elif isinstance(shape, Cube):
|
|
732
|
-
params.box.CopyFrom(shape._to_proto())
|
|
732
|
+
params.box.CopyFrom(shape._to_proto())
|
|
733
733
|
elif isinstance(shape, Cylinder):
|
|
734
|
-
params.cylinder.CopyFrom(shape._to_proto())
|
|
734
|
+
params.cylinder.CopyFrom(shape._to_proto())
|
|
735
735
|
elif isinstance(shape, Torus):
|
|
736
|
-
params.torus.CopyFrom(shape._to_proto())
|
|
736
|
+
params.torus.CopyFrom(shape._to_proto())
|
|
737
737
|
elif isinstance(shape, Cone):
|
|
738
|
-
params.cone.CopyFrom(shape._to_proto())
|
|
738
|
+
params.cone.CopyFrom(shape._to_proto())
|
|
739
739
|
elif isinstance(shape, HalfSphere):
|
|
740
|
-
params.half_sphere.CopyFrom(shape._to_proto())
|
|
740
|
+
params.half_sphere.CopyFrom(shape._to_proto())
|
|
741
741
|
else:
|
|
742
742
|
raise TypeError(f"Unsupported shape type: {type(shape)}")
|
|
743
743
|
self.__create_feature(
|
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
# Copyright 2025 Luminary Cloud, Inc. All Rights Reserved.
|
|
2
|
+
"""Utilities for working with workflows and entity IDs."""
|
|
3
|
+
|
|
4
|
+
from luminarycloud.simulation import _get_workflow_ids
|
|
5
|
+
from luminarycloud.types import SimulationID, GeometryID
|
|
6
|
+
from luminarycloud._client.client import _get_primary_domain_for_apiserver_domain
|
|
7
|
+
import luminarycloud as lc
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def get_workflow_url_from_entity(entity_id: str, base_url: str | None = None) -> str:
|
|
11
|
+
"""
|
|
12
|
+
Get a complete workflow URL from either a geometry ID or simulation ID.
|
|
13
|
+
|
|
14
|
+
The base URL is automatically determined from the current client's API domain.
|
|
15
|
+
For example, if connected to apis.main.int.luminarycloud.com, the URL will use
|
|
16
|
+
https://main.int.luminarycloud.com.
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
Parameters
|
|
20
|
+
----------
|
|
21
|
+
entity_id : str
|
|
22
|
+
Either a geometry ID (starting with 'geo-') or simulation ID (starting with 'sim-')
|
|
23
|
+
base_url : str, optional
|
|
24
|
+
Override the base URL for the Luminary Cloud application. If not provided,
|
|
25
|
+
it will be automatically determined from the client's API domain.
|
|
26
|
+
|
|
27
|
+
Returns
|
|
28
|
+
-------
|
|
29
|
+
str
|
|
30
|
+
The complete workflow URL
|
|
31
|
+
|
|
32
|
+
Raises
|
|
33
|
+
------
|
|
34
|
+
ValueError
|
|
35
|
+
If the entity_id is not a valid geometry or simulation ID, or if no simulations
|
|
36
|
+
are found using the specified geometry ID
|
|
37
|
+
"""
|
|
38
|
+
# Use the batch function for a single entity
|
|
39
|
+
urls = get_workflow_urls_from_entities([entity_id], base_url)
|
|
40
|
+
|
|
41
|
+
if entity_id not in urls:
|
|
42
|
+
if entity_id.startswith("geo-"):
|
|
43
|
+
raise ValueError(f"No simulations found using geometry ID: {entity_id}")
|
|
44
|
+
elif entity_id.startswith("sim-"):
|
|
45
|
+
raise ValueError(f"Unable to resolve simulation ID: {entity_id}")
|
|
46
|
+
else:
|
|
47
|
+
raise ValueError(
|
|
48
|
+
f"Invalid entity ID: {entity_id}. " f"Expected ID starting with 'geo-' or 'sim-'"
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
return urls[entity_id]
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def get_workflow_urls_from_entities(
|
|
55
|
+
entity_ids: list[str], base_url: str | None = None
|
|
56
|
+
) -> dict[str, str]:
|
|
57
|
+
"""
|
|
58
|
+
Get workflow URLs for multiple entity IDs in a single batch operation.
|
|
59
|
+
|
|
60
|
+
Parameters
|
|
61
|
+
----------
|
|
62
|
+
entity_ids : list[str]
|
|
63
|
+
List of entity IDs (geometry IDs starting with 'geo-' or simulation IDs
|
|
64
|
+
starting with 'sim-')
|
|
65
|
+
base_url : str, optional
|
|
66
|
+
Override the base URL for the Luminary Cloud application. If not provided,
|
|
67
|
+
it will be automatically determined from the client's API domain.
|
|
68
|
+
|
|
69
|
+
Returns
|
|
70
|
+
-------
|
|
71
|
+
dict[str, str]
|
|
72
|
+
Dictionary mapping entity IDs to their workflow URLs. Only includes entity IDs
|
|
73
|
+
that were successfully resolved (partial data pattern).
|
|
74
|
+
|
|
75
|
+
Raises
|
|
76
|
+
------
|
|
77
|
+
ValueError
|
|
78
|
+
If base_url cannot be determined from the client
|
|
79
|
+
|
|
80
|
+
Notes
|
|
81
|
+
-----
|
|
82
|
+
- Geometry IDs that don't map to any simulation will be omitted from the result
|
|
83
|
+
- Invalid entity IDs will be omitted from the result
|
|
84
|
+
"""
|
|
85
|
+
if not entity_ids:
|
|
86
|
+
return {}
|
|
87
|
+
|
|
88
|
+
# Auto-detect base URL from client if not provided
|
|
89
|
+
if base_url is None:
|
|
90
|
+
client = lc.get_default_client()
|
|
91
|
+
# Remove port if present from the API domain
|
|
92
|
+
api_domain = client._apiserver_domain.split(":", maxsplit=1)[0]
|
|
93
|
+
primary_domain = _get_primary_domain_for_apiserver_domain(api_domain)
|
|
94
|
+
if primary_domain is None:
|
|
95
|
+
raise ValueError(f"Unable to determine web URL for API domain: {api_domain}")
|
|
96
|
+
base_url = f"https://{primary_domain}"
|
|
97
|
+
|
|
98
|
+
# Separate simulation IDs and geometry IDs
|
|
99
|
+
sim_ids_to_entity: dict[SimulationID, str] = {}
|
|
100
|
+
geo_ids_to_entity: dict[GeometryID, str] = {}
|
|
101
|
+
|
|
102
|
+
for entity_id in entity_ids:
|
|
103
|
+
if entity_id.startswith("sim-"):
|
|
104
|
+
sim_ids_to_entity[SimulationID(entity_id)] = entity_id
|
|
105
|
+
elif entity_id.startswith("geo-"):
|
|
106
|
+
geo_ids_to_entity[GeometryID(entity_id)] = entity_id
|
|
107
|
+
|
|
108
|
+
# Get project IDs for all simulation IDs
|
|
109
|
+
sim_to_project: dict[SimulationID, str] = {}
|
|
110
|
+
for sim_id in sim_ids_to_entity.keys():
|
|
111
|
+
try:
|
|
112
|
+
sim = lc.get_simulation(sim_id)
|
|
113
|
+
sim_to_project[sim_id] = sim.project_id
|
|
114
|
+
except Exception:
|
|
115
|
+
# Skip simulation IDs that can't be resolved
|
|
116
|
+
continue
|
|
117
|
+
|
|
118
|
+
# Handle geometry IDs - find simulations that use each geometry
|
|
119
|
+
for geo_id_typed, entity_id in geo_ids_to_entity.items():
|
|
120
|
+
try:
|
|
121
|
+
geom = lc.get_geometry(geo_id_typed)
|
|
122
|
+
project = geom.project()
|
|
123
|
+
|
|
124
|
+
# Find first simulation using this geometry
|
|
125
|
+
for sim in project.list_simulations():
|
|
126
|
+
mesh = lc.get_mesh(sim.mesh_id)
|
|
127
|
+
geom_version = mesh.geometry_version()
|
|
128
|
+
|
|
129
|
+
if geom_version and geom_version.geometry().id == entity_id:
|
|
130
|
+
# Found a simulation using this geometry
|
|
131
|
+
sim_ids_to_entity[sim.id] = entity_id
|
|
132
|
+
sim_to_project[sim.id] = project.id
|
|
133
|
+
break
|
|
134
|
+
except Exception:
|
|
135
|
+
# Skip geometry IDs that can't be resolved
|
|
136
|
+
continue
|
|
137
|
+
|
|
138
|
+
# Batch get workflow IDs for all simulation IDs
|
|
139
|
+
result: dict[str, str] = {}
|
|
140
|
+
if sim_ids_to_entity:
|
|
141
|
+
workflow_ids = _get_workflow_ids(list(sim_ids_to_entity.keys()))
|
|
142
|
+
|
|
143
|
+
for sim_id, workflow_id in workflow_ids.items():
|
|
144
|
+
entity_id = sim_ids_to_entity[sim_id]
|
|
145
|
+
project_id = sim_to_project.get(sim_id)
|
|
146
|
+
if project_id:
|
|
147
|
+
result[entity_id] = f"{base_url}/project/{project_id}/simulation/{workflow_id}"
|
|
148
|
+
|
|
149
|
+
return result
|