luminarycloud 0.16.0__py3-none-any.whl → 0.16.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- luminarycloud/_client/client.py +6 -1
- luminarycloud/_helpers/_code_representation.py +5 -3
- luminarycloud/_helpers/warnings/__init__.py +0 -1
- luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2.py +88 -34
- luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2.pyi +100 -10
- luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2_grpc.py +68 -0
- luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2_grpc.pyi +24 -0
- luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2.py +128 -107
- luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2.pyi +48 -3
- luminarycloud/_proto/cad/shape_pb2.py +78 -19
- luminarycloud/_proto/cad/transformation_pb2.py +34 -15
- luminarycloud/_proto/client/simulation_pb2.py +348 -350
- luminarycloud/_proto/client/simulation_pb2.pyi +95 -95
- luminarycloud/_proto/geometry/geometry_pb2.py +68 -68
- luminarycloud/_proto/geometry/geometry_pb2.pyi +10 -6
- luminarycloud/_proto/hexmesh/hexmesh_pb2.py +40 -15
- luminarycloud/_proto/hexmesh/hexmesh_pb2.pyi +58 -1
- luminarycloud/_proto/lcstatus/codes_pb2.py +3 -2
- luminarycloud/_proto/lcstatus/codes_pb2.pyi +4 -0
- luminarycloud/_proto/upload/upload_pb2.py +25 -15
- luminarycloud/_proto/upload/upload_pb2.pyi +31 -2
- luminarycloud/enum/quantity_type.py +4 -0
- luminarycloud/enum/tables.py +1 -0
- luminarycloud/enum/vis_enums.py +20 -0
- luminarycloud/feature_modification.py +6 -7
- luminarycloud/geometry.py +26 -2
- luminarycloud/geometry_version.py +23 -0
- luminarycloud/named_variable_set.py +3 -4
- luminarycloud/outputs/stopping_conditions.py +0 -3
- luminarycloud/params/simulation/adjoint_.py +4 -4
- luminarycloud/params/simulation/material/material_fluid_.py +1 -1
- luminarycloud/params/simulation/material/material_solid_.py +1 -1
- luminarycloud/params/simulation/output_.py +1 -1
- luminarycloud/params/simulation/physics/fluid/initialization/fluid_existing_solution_.py +28 -0
- luminarycloud/params/simulation/simulation_param_.py +6 -0
- luminarycloud/physics_ai/architectures.py +0 -4
- luminarycloud/physics_ai/inference.py +0 -4
- luminarycloud/physics_ai/models.py +0 -4
- luminarycloud/physics_ai/solution.py +2 -2
- luminarycloud/pipelines/__init__.py +6 -0
- luminarycloud/pipelines/arguments.py +105 -0
- luminarycloud/pipelines/core.py +204 -20
- luminarycloud/pipelines/operators.py +11 -9
- luminarycloud/pipelines/parameters.py +25 -4
- luminarycloud/project.py +13 -12
- luminarycloud/simulation_param.py +29 -17
- luminarycloud/simulation_template.py +15 -13
- luminarycloud/solution.py +1 -3
- luminarycloud/tables.py +5 -4
- luminarycloud/thirdparty/__init__.py +12 -0
- luminarycloud/thirdparty/onshape.py +170 -0
- luminarycloud/vis/__init__.py +2 -0
- luminarycloud/vis/data_extraction.py +40 -1
- luminarycloud/vis/filters.py +128 -2
- luminarycloud/vis/visualization.py +1 -1
- luminarycloud/volume_selection.py +2 -2
- {luminarycloud-0.16.0.dist-info → luminarycloud-0.16.2.dist-info}/METADATA +6 -6
- {luminarycloud-0.16.0.dist-info → luminarycloud-0.16.2.dist-info}/RECORD +59 -57
- luminarycloud/_helpers/warnings/experimental.py +0 -48
- {luminarycloud-0.16.0.dist-info → luminarycloud-0.16.2.dist-info}/WHEEL +0 -0
luminarycloud/pipelines/core.py
CHANGED
|
@@ -1,14 +1,27 @@
|
|
|
1
1
|
# Copyright 2025 Luminary Cloud, Inc. All Rights Reserved.
|
|
2
2
|
from abc import ABC, abstractmethod
|
|
3
3
|
from dataclasses import is_dataclass, fields
|
|
4
|
-
from typing import Type, TypeVar, Generic
|
|
4
|
+
from typing import Any, Type, TypeVar, Generic
|
|
5
|
+
from typing_extensions import Self
|
|
5
6
|
import re
|
|
6
7
|
import yaml
|
|
7
8
|
|
|
8
|
-
from .._helpers.warnings import experimental
|
|
9
9
|
from ..pipeline_util.yaml import ensure_yamlizable
|
|
10
10
|
|
|
11
11
|
|
|
12
|
+
class PipelineParameterRegistry:
|
|
13
|
+
def __init__(self):
|
|
14
|
+
self.parameters = {}
|
|
15
|
+
|
|
16
|
+
def register(self, parameter_class: Type["PipelineParameter"]) -> None:
|
|
17
|
+
self.parameters[parameter_class._type_name()] = parameter_class
|
|
18
|
+
|
|
19
|
+
def get(self, type_name: str) -> Type["PipelineParameter"]:
|
|
20
|
+
if type_name not in self.parameters:
|
|
21
|
+
raise ValueError(f"Unknown parameter type: {type_name}")
|
|
22
|
+
return self.parameters[type_name]
|
|
23
|
+
|
|
24
|
+
|
|
12
25
|
class PipelineParameter(ABC):
|
|
13
26
|
"""
|
|
14
27
|
Base class for all concrete PipelineParameters.
|
|
@@ -20,10 +33,16 @@ class PipelineParameter(ABC):
|
|
|
20
33
|
|
|
21
34
|
@property
|
|
22
35
|
def type(self) -> str:
|
|
23
|
-
return self.
|
|
36
|
+
return self.__class__._type_name()
|
|
37
|
+
|
|
38
|
+
@classmethod
|
|
39
|
+
@abstractmethod
|
|
40
|
+
def _represented_type(cls) -> Type:
|
|
41
|
+
pass
|
|
24
42
|
|
|
43
|
+
@classmethod
|
|
25
44
|
@abstractmethod
|
|
26
|
-
def
|
|
45
|
+
def _type_name(cls) -> str:
|
|
27
46
|
pass
|
|
28
47
|
|
|
29
48
|
def _validate(self) -> None:
|
|
@@ -42,6 +61,30 @@ class PipelineParameter(ABC):
|
|
|
42
61
|
def _to_pipeline_dict(self) -> tuple[dict, list["PipelineParameter"]]:
|
|
43
62
|
return {"$pipeline_param": self.name}, [self]
|
|
44
63
|
|
|
64
|
+
def __str__(self) -> str:
|
|
65
|
+
return f'{self.__class__.__name__}(name="{self.name}")'
|
|
66
|
+
|
|
67
|
+
_registry = PipelineParameterRegistry()
|
|
68
|
+
|
|
69
|
+
def __init_subclass__(cls, **kwargs):
|
|
70
|
+
super().__init_subclass__(**kwargs)
|
|
71
|
+
PipelineParameter._registry.register(cls)
|
|
72
|
+
|
|
73
|
+
@classmethod
|
|
74
|
+
def _get_subclass(cls, parameter_type: str) -> Type["PipelineParameter"]:
|
|
75
|
+
return cls._registry.get(parameter_type)
|
|
76
|
+
|
|
77
|
+
def _is_valid_value(self, value: Any) -> bool:
|
|
78
|
+
return isinstance(value, self._represented_type())
|
|
79
|
+
|
|
80
|
+
def __hash__(self) -> int:
|
|
81
|
+
return hash((self.type, self.name))
|
|
82
|
+
|
|
83
|
+
def __eq__(self, other: object) -> bool:
|
|
84
|
+
if not isinstance(other, PipelineParameter):
|
|
85
|
+
return False
|
|
86
|
+
return self.__hash__() == other.__hash__()
|
|
87
|
+
|
|
45
88
|
|
|
46
89
|
class PipelineInput:
|
|
47
90
|
"""
|
|
@@ -138,6 +181,25 @@ class OperatorOutputs(ABC):
|
|
|
138
181
|
outputs[field.name] = field.type(owner, field.name)
|
|
139
182
|
return cls(**outputs)
|
|
140
183
|
|
|
184
|
+
def downstream_inputs(self) -> list[PipelineInput]:
|
|
185
|
+
inputs = []
|
|
186
|
+
for field in fields(self):
|
|
187
|
+
inputs.extend(getattr(self, field.name).downstream_inputs)
|
|
188
|
+
return inputs
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
class OperatorRegistry:
|
|
192
|
+
def __init__(self):
|
|
193
|
+
self.operators = {}
|
|
194
|
+
|
|
195
|
+
def register(self, operator_class: Type["Operator"]) -> None:
|
|
196
|
+
self.operators[operator_class.__name__] = operator_class
|
|
197
|
+
|
|
198
|
+
def get(self, operator_name: str) -> Type["Operator"]:
|
|
199
|
+
if operator_name not in self.operators:
|
|
200
|
+
raise ValueError(f"Unknown operator: {operator_name}")
|
|
201
|
+
return self.operators[operator_name]
|
|
202
|
+
|
|
141
203
|
|
|
142
204
|
TOutputs = TypeVar("TOutputs", bound=OperatorOutputs)
|
|
143
205
|
|
|
@@ -157,53 +219,92 @@ class Operator(Generic[TOutputs], ABC):
|
|
|
157
219
|
self.outputs = outputs
|
|
158
220
|
ensure_yamlizable(self._params_dict()[0], "Operator parameters")
|
|
159
221
|
|
|
160
|
-
def
|
|
161
|
-
|
|
222
|
+
def is_source(self) -> bool:
|
|
223
|
+
return len(self._inputs.inputs) == 0
|
|
224
|
+
|
|
225
|
+
def inputs_dict(self) -> dict[str, tuple["Operator", str]]:
|
|
226
|
+
inputs = {}
|
|
227
|
+
for pipeline_input in self._inputs.inputs:
|
|
228
|
+
inputs[pipeline_input.name] = (
|
|
229
|
+
pipeline_input.upstream_output.owner,
|
|
230
|
+
pipeline_input.upstream_output.name,
|
|
231
|
+
)
|
|
232
|
+
return inputs
|
|
233
|
+
|
|
234
|
+
def downstream_tasks(self) -> list["Operator"]:
|
|
235
|
+
return [input.owner for input in self.outputs.downstream_inputs()]
|
|
236
|
+
|
|
237
|
+
def _to_dict(self, id_for_task: dict) -> tuple[dict, set[PipelineParameter]]:
|
|
238
|
+
params, pipeline_params_set = self._params_dict()
|
|
162
239
|
d = {
|
|
163
240
|
"name": self._task_name,
|
|
164
241
|
"operator": self._operator_name,
|
|
165
242
|
"params": params,
|
|
166
243
|
"inputs": self._inputs._to_dict(id_for_task),
|
|
167
244
|
}
|
|
168
|
-
return d,
|
|
245
|
+
return d, pipeline_params_set
|
|
169
246
|
|
|
170
|
-
def _params_dict(self) -> tuple[dict,
|
|
247
|
+
def _params_dict(self) -> tuple[dict, set[PipelineParameter]]:
|
|
171
248
|
d = {}
|
|
172
|
-
|
|
249
|
+
pipeline_params = set()
|
|
173
250
|
for name, value in self._params.items():
|
|
174
251
|
if hasattr(value, "_to_pipeline_dict"):
|
|
175
252
|
d[name], downstream_params = value._to_pipeline_dict()
|
|
176
|
-
|
|
253
|
+
pipeline_params.update(downstream_params)
|
|
177
254
|
else:
|
|
178
255
|
d[name] = value
|
|
179
|
-
return d,
|
|
256
|
+
return d, pipeline_params
|
|
180
257
|
|
|
181
258
|
def __str__(self) -> str:
|
|
182
259
|
return f'{self._operator_name}(name="{self._task_name}")'
|
|
183
260
|
|
|
261
|
+
_registry = OperatorRegistry()
|
|
262
|
+
|
|
263
|
+
def __init_subclass__(cls, **kwargs):
|
|
264
|
+
super().__init_subclass__(**kwargs)
|
|
265
|
+
Operator._registry.register(cls)
|
|
266
|
+
|
|
267
|
+
@classmethod
|
|
268
|
+
def _get_subclass(cls, operator_name: str) -> Type["Operator"]:
|
|
269
|
+
return cls._registry.get(operator_name)
|
|
270
|
+
|
|
271
|
+
@classmethod
|
|
272
|
+
def _parse_params(cls, params: dict) -> dict:
|
|
273
|
+
# Operators with params that are just primitives or PipelineParams have no parsing to do.
|
|
274
|
+
# Operators with more complicated params should override this method.
|
|
275
|
+
return params
|
|
276
|
+
|
|
184
277
|
|
|
185
|
-
@experimental
|
|
186
278
|
class Pipeline:
|
|
187
|
-
def __init__(self,
|
|
188
|
-
self.name = name
|
|
279
|
+
def __init__(self, tasks: list[Operator]):
|
|
189
280
|
self.tasks = tasks
|
|
281
|
+
self._task_ids = self._assign_ids_to_tasks()
|
|
190
282
|
|
|
191
283
|
def to_yaml(self) -> str:
|
|
192
284
|
return yaml.safe_dump(self._to_dict())
|
|
193
285
|
|
|
194
|
-
def
|
|
195
|
-
|
|
286
|
+
def pipeline_params(self) -> set[PipelineParameter]:
|
|
287
|
+
return self._tasks_dict_and_params()[1]
|
|
288
|
+
|
|
289
|
+
def _get_task_id(self, task: Operator) -> str:
|
|
290
|
+
return self._task_ids[task]
|
|
291
|
+
|
|
292
|
+
def _tasks_dict_and_params(self) -> tuple[dict, set[PipelineParameter]]:
|
|
293
|
+
id_for_task = self._task_ids
|
|
196
294
|
tasks = {}
|
|
197
|
-
params =
|
|
295
|
+
params = set()
|
|
198
296
|
for task in id_for_task.keys():
|
|
199
297
|
task_dict, referenced_params = task._to_dict(id_for_task)
|
|
200
298
|
tasks[id_for_task[task]] = task_dict
|
|
201
|
-
params.
|
|
299
|
+
params.update(referenced_params)
|
|
300
|
+
return tasks, params
|
|
301
|
+
|
|
302
|
+
def _to_dict(self) -> dict:
|
|
303
|
+
tasks, params = self._tasks_dict_and_params()
|
|
202
304
|
|
|
203
305
|
d = {
|
|
204
306
|
"lc_pipeline": {
|
|
205
307
|
"schema_version": 1,
|
|
206
|
-
"name": self.name,
|
|
207
308
|
"params": self._pipeline_params_dict(params),
|
|
208
309
|
"tasks": tasks,
|
|
209
310
|
}
|
|
@@ -214,7 +315,7 @@ class Pipeline:
|
|
|
214
315
|
def _assign_ids_to_tasks(self) -> dict[Operator, str]:
|
|
215
316
|
return {task: f"t{i + 1}-{task._operator_name}" for i, task in enumerate(self.tasks)}
|
|
216
317
|
|
|
217
|
-
def _pipeline_params_dict(self, params:
|
|
318
|
+
def _pipeline_params_dict(self, params: set[PipelineParameter]) -> dict:
|
|
218
319
|
d: dict[str, dict] = {}
|
|
219
320
|
for p in params:
|
|
220
321
|
if p.name in d and d[p.name]["type"] != p.type:
|
|
@@ -223,3 +324,86 @@ class Pipeline:
|
|
|
223
324
|
)
|
|
224
325
|
d[p.name] = {"type": p.type}
|
|
225
326
|
return d
|
|
327
|
+
|
|
328
|
+
@classmethod
|
|
329
|
+
def _from_yaml(cls, yaml_str: str) -> Self:
|
|
330
|
+
d = yaml.safe_load(yaml_str)
|
|
331
|
+
if "lc_pipeline" not in d:
|
|
332
|
+
raise ValueError("Invalid pipeline YAML: missing 'lc_pipeline' key")
|
|
333
|
+
|
|
334
|
+
d = d["lc_pipeline"]
|
|
335
|
+
if "schema_version" not in d:
|
|
336
|
+
raise ValueError("Invalid pipeline YAML: missing 'schema_version' key")
|
|
337
|
+
if "tasks" not in d:
|
|
338
|
+
raise ValueError("Invalid pipeline YAML: missing 'tasks' key")
|
|
339
|
+
|
|
340
|
+
if d["schema_version"] != 1:
|
|
341
|
+
raise ValueError(f"Unsupported schema version: {d['schema_version']}")
|
|
342
|
+
|
|
343
|
+
# first, parse the pipeline parameters...
|
|
344
|
+
parsed_params = {}
|
|
345
|
+
for param_name, param_metadata in d.get("params", {}).items():
|
|
346
|
+
parsed_params[param_name] = PipelineParameter._get_subclass(param_metadata["type"])(
|
|
347
|
+
param_name
|
|
348
|
+
)
|
|
349
|
+
|
|
350
|
+
# ...and use them as replacements for any references in the tasks' parameters
|
|
351
|
+
for task_dict in d["tasks"].values():
|
|
352
|
+
task_dict["params"] = _recursive_replace_pipeline_params(
|
|
353
|
+
task_dict["params"], parsed_params
|
|
354
|
+
)
|
|
355
|
+
|
|
356
|
+
# then, finish parsing the tasks
|
|
357
|
+
parsed_tasks = {}
|
|
358
|
+
for task_id in d["tasks"]:
|
|
359
|
+
_parse_task(d, task_id, parsed_tasks)
|
|
360
|
+
|
|
361
|
+
return cls(list(parsed_tasks.values()))
|
|
362
|
+
|
|
363
|
+
|
|
364
|
+
def _recursive_replace_pipeline_params(d: Any, parsed_params: dict) -> Any:
|
|
365
|
+
if isinstance(d, dict):
|
|
366
|
+
if "$pipeline_param" in d:
|
|
367
|
+
# d is a dict representation of a PipelineParameter, so return the actual PipelineParameter
|
|
368
|
+
pp_name = d["$pipeline_param"]
|
|
369
|
+
if pp_name not in parsed_params:
|
|
370
|
+
raise ValueError(
|
|
371
|
+
f'Pipeline parameter "{pp_name}" referenced in a pipeline task, but not found in pipeline\'s declared parameters'
|
|
372
|
+
)
|
|
373
|
+
return parsed_params[pp_name]
|
|
374
|
+
else:
|
|
375
|
+
return {
|
|
376
|
+
key: _recursive_replace_pipeline_params(value, parsed_params)
|
|
377
|
+
for key, value in d.items()
|
|
378
|
+
}
|
|
379
|
+
elif isinstance(d, list):
|
|
380
|
+
return [_recursive_replace_pipeline_params(item, parsed_params) for item in d]
|
|
381
|
+
else:
|
|
382
|
+
return d
|
|
383
|
+
|
|
384
|
+
|
|
385
|
+
def _parse_task(pipeline_dict: dict, task_id: str, all_tasks: dict[str, Operator]) -> Operator:
|
|
386
|
+
all_tasks_dict = pipeline_dict["tasks"]
|
|
387
|
+
if task_id in all_tasks:
|
|
388
|
+
return all_tasks[task_id]
|
|
389
|
+
task_dict = all_tasks_dict[task_id]
|
|
390
|
+
operator_name = task_dict["operator"]
|
|
391
|
+
operator_class = Operator._get_subclass(operator_name)
|
|
392
|
+
|
|
393
|
+
parsed_inputs = {}
|
|
394
|
+
for input_name, input_value in task_dict["inputs"].items():
|
|
395
|
+
source_task_id, source_output_name = input_value.split(".")
|
|
396
|
+
source_task = _parse_task(pipeline_dict, source_task_id, all_tasks)
|
|
397
|
+
source_output = getattr(source_task.outputs, source_output_name)
|
|
398
|
+
parsed_inputs[input_name] = source_output
|
|
399
|
+
|
|
400
|
+
parsed_params = operator_class._parse_params(task_dict["params"])
|
|
401
|
+
|
|
402
|
+
op_params = {
|
|
403
|
+
"task_name": task_dict["name"],
|
|
404
|
+
**parsed_params,
|
|
405
|
+
**parsed_inputs,
|
|
406
|
+
}
|
|
407
|
+
operator = operator_class(**op_params)
|
|
408
|
+
all_tasks[task_id] = operator
|
|
409
|
+
return operator
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
# Copyright 2025 Luminary Cloud, Inc. All Rights Reserved.
|
|
2
2
|
from dataclasses import dataclass
|
|
3
3
|
|
|
4
|
-
from .._helpers.warnings import experimental
|
|
5
4
|
from .core import Operator, OperatorInputs, OperatorOutputs, PipelineOutput
|
|
6
5
|
from .parameters import StringPipelineParameter
|
|
7
6
|
from ..meshing import MeshGenerationParams
|
|
@@ -40,7 +39,6 @@ class ReadGeometryOutputs(OperatorOutputs):
|
|
|
40
39
|
"""
|
|
41
40
|
|
|
42
41
|
|
|
43
|
-
@experimental
|
|
44
42
|
class ReadGeometry(Operator[ReadGeometryOutputs]):
|
|
45
43
|
"""
|
|
46
44
|
Reads a Geometry into the Pipeline.
|
|
@@ -79,7 +77,6 @@ class ModifyGeometryOutputs(OperatorOutputs):
|
|
|
79
77
|
|
|
80
78
|
|
|
81
79
|
# TODO: figure out what `mods` actually is. What does the non-pipeline geo mod interface look like?
|
|
82
|
-
@experimental
|
|
83
80
|
class ModifyGeometry(Operator[ModifyGeometryOutputs]):
|
|
84
81
|
"""
|
|
85
82
|
Modifies a Geometry.
|
|
@@ -121,15 +118,14 @@ class MeshOutputs(OperatorOutputs):
|
|
|
121
118
|
"""The Mesh generated from the given Geometry."""
|
|
122
119
|
|
|
123
120
|
|
|
124
|
-
@experimental
|
|
125
121
|
class Mesh(Operator[MeshOutputs]):
|
|
126
122
|
"""
|
|
127
123
|
Generates a Mesh from a Geometry.
|
|
128
124
|
|
|
129
125
|
Parameters
|
|
130
126
|
----------
|
|
131
|
-
|
|
132
|
-
The
|
|
127
|
+
max_cv_count : int
|
|
128
|
+
The maximum number of control volumes to generate.
|
|
133
129
|
geometry : PipelineOutputGeometry
|
|
134
130
|
The Geometry to mesh.
|
|
135
131
|
|
|
@@ -145,16 +141,23 @@ class Mesh(Operator[MeshOutputs]):
|
|
|
145
141
|
self,
|
|
146
142
|
*,
|
|
147
143
|
task_name: str | None = None,
|
|
148
|
-
|
|
144
|
+
max_cv_count: int,
|
|
149
145
|
geometry: PipelineOutputGeometry,
|
|
150
146
|
):
|
|
151
147
|
super().__init__(
|
|
152
148
|
task_name,
|
|
153
|
-
{"
|
|
149
|
+
{"max_cv_count": max_cv_count},
|
|
154
150
|
OperatorInputs(self, geometry=(PipelineOutputGeometry, geometry)),
|
|
155
151
|
MeshOutputs._instantiate_for(self),
|
|
156
152
|
)
|
|
157
153
|
|
|
154
|
+
# TODO: bring back the full MeshGenerationParams, but we need to be able to hydrate it from the
|
|
155
|
+
# pipeline YAML. I can probably bake that logic into PipelineDictable, `from_pipeline_dict` or
|
|
156
|
+
# something.
|
|
157
|
+
# @classmethod
|
|
158
|
+
# def _parse_params(cls, params: dict) -> dict:
|
|
159
|
+
# return {"mesh_gen_params": MeshGenerationParams.from_pipeline_dict(**params["mesh_gen_params"])}
|
|
160
|
+
|
|
158
161
|
|
|
159
162
|
@dataclass
|
|
160
163
|
class SimulateOutputs(OperatorOutputs):
|
|
@@ -162,7 +165,6 @@ class SimulateOutputs(OperatorOutputs):
|
|
|
162
165
|
"""The Simulation."""
|
|
163
166
|
|
|
164
167
|
|
|
165
|
-
@experimental
|
|
166
168
|
class Simulate(Operator[SimulateOutputs]):
|
|
167
169
|
"""
|
|
168
170
|
Runs a Simulation.
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
# Copyright 2025 Luminary Cloud, Inc. All Rights Reserved.
|
|
2
2
|
from .core import PipelineParameter
|
|
3
|
+
from typing import Type
|
|
3
4
|
|
|
4
5
|
|
|
5
6
|
class StringPipelineParameter(PipelineParameter):
|
|
@@ -8,7 +9,12 @@ class StringPipelineParameter(PipelineParameter):
|
|
|
8
9
|
allow its value to be set when the Pipeline is invoked.
|
|
9
10
|
"""
|
|
10
11
|
|
|
11
|
-
|
|
12
|
+
@classmethod
|
|
13
|
+
def _represented_type(cls) -> Type:
|
|
14
|
+
return str
|
|
15
|
+
|
|
16
|
+
@classmethod
|
|
17
|
+
def _type_name(cls) -> str:
|
|
12
18
|
return "string"
|
|
13
19
|
|
|
14
20
|
|
|
@@ -18,7 +24,12 @@ class FloatPipelineParameter(PipelineParameter):
|
|
|
18
24
|
allow its value to be set when the Pipeline is invoked.
|
|
19
25
|
"""
|
|
20
26
|
|
|
21
|
-
|
|
27
|
+
@classmethod
|
|
28
|
+
def _represented_type(cls) -> Type:
|
|
29
|
+
return float
|
|
30
|
+
|
|
31
|
+
@classmethod
|
|
32
|
+
def _type_name(cls) -> str:
|
|
22
33
|
return "float"
|
|
23
34
|
|
|
24
35
|
|
|
@@ -28,7 +39,12 @@ class IntPipelineParameter(PipelineParameter):
|
|
|
28
39
|
allow its value to be set when the Pipeline is invoked.
|
|
29
40
|
"""
|
|
30
41
|
|
|
31
|
-
|
|
42
|
+
@classmethod
|
|
43
|
+
def _represented_type(cls) -> Type:
|
|
44
|
+
return int
|
|
45
|
+
|
|
46
|
+
@classmethod
|
|
47
|
+
def _type_name(cls) -> str:
|
|
32
48
|
return "int"
|
|
33
49
|
|
|
34
50
|
|
|
@@ -38,5 +54,10 @@ class BoolPipelineParameter(PipelineParameter):
|
|
|
38
54
|
allow its value to be set when the Pipeline is invoked.
|
|
39
55
|
"""
|
|
40
56
|
|
|
41
|
-
|
|
57
|
+
@classmethod
|
|
58
|
+
def _represented_type(cls) -> Type:
|
|
59
|
+
return bool
|
|
60
|
+
|
|
61
|
+
@classmethod
|
|
62
|
+
def _type_name(cls) -> str:
|
|
42
63
|
return "bool"
|
luminarycloud/project.py
CHANGED
|
@@ -27,7 +27,7 @@ from ._helpers import (
|
|
|
27
27
|
upload_mesh,
|
|
28
28
|
upload_table_as_json,
|
|
29
29
|
)
|
|
30
|
-
from ._helpers.warnings import deprecated
|
|
30
|
+
from ._helpers.warnings import deprecated
|
|
31
31
|
from ._proto.api.v0.luminarycloud.geometry import geometry_pb2 as geometrypb
|
|
32
32
|
from ._proto.api.v0.luminarycloud.mesh import mesh_pb2 as meshpb
|
|
33
33
|
from ._proto.api.v0.luminarycloud.named_variable_set import (
|
|
@@ -39,6 +39,7 @@ from ._proto.api.v0.luminarycloud.simulation_template import (
|
|
|
39
39
|
simulation_template_pb2 as simtemplatepb,
|
|
40
40
|
)
|
|
41
41
|
from ._proto.client import simulation_pb2 as clientpb
|
|
42
|
+
from ._proto.table import table_pb2 as tablepb
|
|
42
43
|
from ._proto.hexmesh import hexmesh_pb2 as hexmeshpb
|
|
43
44
|
from ._proto.upload import upload_pb2 as uploadpb
|
|
44
45
|
from ._wrapper import ProtoWrapper, ProtoWrapperBase
|
|
@@ -440,15 +441,15 @@ class Project(ProtoWrapperBase):
|
|
|
440
441
|
raise RuntimeError("The table upload failed.")
|
|
441
442
|
|
|
442
443
|
# Update the simulation template with the new table reference.
|
|
443
|
-
params:
|
|
444
|
-
params.
|
|
445
|
-
params.
|
|
446
|
-
params.
|
|
447
|
-
|
|
444
|
+
params: SimulationParam = simulation_template.get_parameters()
|
|
445
|
+
params._table_references[name] = tablepb.Metadata()
|
|
446
|
+
params._table_references[name].url = url
|
|
447
|
+
params._table_references[name].table_type = table_type.value
|
|
448
|
+
params._table_references[name].uploaded_filename = uploaded_filename
|
|
449
|
+
simulation_template.update(parameters=params)
|
|
450
|
+
# The name is lost in to/from proto conversions so make it equal to the id for consistency.
|
|
451
|
+
return RectilinearTable(id=name, name=name, table_type=table_type)
|
|
448
452
|
|
|
449
|
-
return RectilinearTable(id=name, name=uploaded_filename, table_type=table_type)
|
|
450
|
-
|
|
451
|
-
@experimental
|
|
452
453
|
def set_surface_deformation(
|
|
453
454
|
self,
|
|
454
455
|
file_path: PathLike | str,
|
|
@@ -608,14 +609,13 @@ class Project(ProtoWrapperBase):
|
|
|
608
609
|
res = get_default_client().CreateSimulationTemplate(req)
|
|
609
610
|
return lc.SimulationTemplate(res.simulation_template)
|
|
610
611
|
|
|
611
|
-
@experimental
|
|
612
612
|
def create_named_variable_set(
|
|
613
613
|
self, name: str, named_variables: dict[str, LcFloat]
|
|
614
614
|
) -> NamedVariableSet:
|
|
615
615
|
"""
|
|
616
616
|
Create a new named variable set.
|
|
617
617
|
|
|
618
|
-
|
|
618
|
+
.. warning:: This feature is experimental and may change or be removed without notice.
|
|
619
619
|
"""
|
|
620
620
|
req = namedvariablepb.CreateNamedVariableSetRequest(
|
|
621
621
|
project_id=self.id,
|
|
@@ -636,7 +636,6 @@ class Project(ProtoWrapperBase):
|
|
|
636
636
|
return [lc.NamedVariableSet(n) for n in res.named_variable_sets]
|
|
637
637
|
|
|
638
638
|
|
|
639
|
-
@experimental
|
|
640
639
|
def add_named_variables_from_csv(project: Project, csv_path: str) -> list[NamedVariableSet]:
|
|
641
640
|
"""
|
|
642
641
|
This function reads named variables from a CSV file and creates corresponding NamedVariableSets in the given project.
|
|
@@ -644,6 +643,8 @@ def add_named_variables_from_csv(project: Project, csv_path: str) -> list[NamedV
|
|
|
644
643
|
name, var1, var2, ...
|
|
645
644
|
name1, val1, val2, ...
|
|
646
645
|
name2, val1, val2, ...
|
|
646
|
+
|
|
647
|
+
.. warning:: This feature is experimental and may change or be removed without notice.
|
|
647
648
|
"""
|
|
648
649
|
import csv
|
|
649
650
|
|
|
@@ -6,12 +6,10 @@ from os import PathLike
|
|
|
6
6
|
from pprint import pformat
|
|
7
7
|
from typing import Optional, TypeVar, cast
|
|
8
8
|
|
|
9
|
-
import luminarycloud.params.enum._enum_wrappers as enum
|
|
10
9
|
from luminarycloud._helpers._simulation_params_from_json import (
|
|
11
10
|
simulation_params_from_json_path,
|
|
12
11
|
simulation_params_from_json_dict,
|
|
13
12
|
)
|
|
14
|
-
from luminarycloud._helpers.warnings import experimental
|
|
15
13
|
from luminarycloud._proto.client import simulation_pb2 as clientpb
|
|
16
14
|
from luminarycloud._proto.client.entity_pb2 import EntityIdentifier
|
|
17
15
|
from luminarycloud._proto.output import output_pb2 as outputpb
|
|
@@ -38,7 +36,7 @@ from luminarycloud.params.simulation.volume_entity_ import (
|
|
|
38
36
|
from luminarycloud.params.simulation import (
|
|
39
37
|
SimulationParam as _SimulationParam,
|
|
40
38
|
)
|
|
41
|
-
from luminarycloud.reference_values import ReferenceValues
|
|
39
|
+
from luminarycloud.reference_values import ReferenceValues
|
|
42
40
|
from luminarycloud.types import Vector3Like
|
|
43
41
|
from luminarycloud.types.vector3 import _to_vector3_ad_proto
|
|
44
42
|
|
|
@@ -152,7 +150,10 @@ class SimulationParam(_SimulationParam):
|
|
|
152
150
|
_list=volume_material_pairs,
|
|
153
151
|
_accessor=lambda v: get_id(v.volume_identifier),
|
|
154
152
|
_to_remove=volume_identifier.id,
|
|
155
|
-
_warning_message=lambda v:
|
|
153
|
+
_warning_message=lambda v: (
|
|
154
|
+
f"Volume {_stringify_identifier(volume_identifier)} has already been assigned "
|
|
155
|
+
f"material {_stringify_identifier(v.material_identifier)}. Overwriting..."
|
|
156
|
+
),
|
|
156
157
|
)
|
|
157
158
|
|
|
158
159
|
if volume_identifier.id not in (get_id(v.volume_identifier) for v in self.volume_entity):
|
|
@@ -226,13 +227,19 @@ class SimulationParam(_SimulationParam):
|
|
|
226
227
|
_list=volume_physics_pairs,
|
|
227
228
|
_accessor=lambda v: get_id(v.volume_identifier),
|
|
228
229
|
_to_remove=volume_identifier.id,
|
|
229
|
-
_warning_message=lambda v:
|
|
230
|
+
_warning_message=lambda v: (
|
|
231
|
+
f"Volume {_stringify_identifier(volume_identifier)} has already been assigned "
|
|
232
|
+
f"physics {_stringify_identifier(v.physics_identifier)}. Overwriting..."
|
|
233
|
+
),
|
|
230
234
|
)
|
|
231
235
|
_remove_from_list_with_warning(
|
|
232
236
|
_list=volume_physics_pairs,
|
|
233
237
|
_accessor=lambda v: get_id(v.physics_identifier),
|
|
234
238
|
_to_remove=get_id(physics.physics_identifier),
|
|
235
|
-
_warning_message=lambda v:
|
|
239
|
+
_warning_message=lambda v: (
|
|
240
|
+
f"Physics {_stringify_identifier(physics.physics_identifier)} has already been "
|
|
241
|
+
f"assigned to volume {_stringify_identifier(v.volume_identifier)}. Overwriting..."
|
|
242
|
+
),
|
|
236
243
|
)
|
|
237
244
|
|
|
238
245
|
if volume_identifier.id not in (get_id(v.volume_identifier) for v in self.volume_entity):
|
|
@@ -249,7 +256,6 @@ class SimulationParam(_SimulationParam):
|
|
|
249
256
|
)
|
|
250
257
|
)
|
|
251
258
|
|
|
252
|
-
@experimental
|
|
253
259
|
def configure_adjoint_surface_output(
|
|
254
260
|
self,
|
|
255
261
|
quantity_type: QuantityType,
|
|
@@ -268,25 +274,25 @@ class SimulationParam(_SimulationParam):
|
|
|
268
274
|
.. warning:: This feature is experimental and may change or be removed without notice.
|
|
269
275
|
"""
|
|
270
276
|
self.adjoint = self.adjoint or Adjoint()
|
|
271
|
-
self.adjoint.
|
|
272
|
-
self.adjoint.
|
|
273
|
-
self.adjoint.
|
|
274
|
-
self.adjoint.
|
|
277
|
+
self.adjoint._output = outputpb.Output()
|
|
278
|
+
self.adjoint._output.quantity = quantity_type.value
|
|
279
|
+
self.adjoint._output.in_surfaces.extend(surface_ids)
|
|
280
|
+
self.adjoint._output.frame_id = frame_id
|
|
275
281
|
if QuantityType._is_average(quantity_type):
|
|
276
282
|
if averaging_type == AveragingType.UNSPECIFIED:
|
|
277
|
-
self.adjoint.
|
|
283
|
+
self.adjoint._output.surface_average_properties.averaging_type = (
|
|
278
284
|
SpaceAveragingType.NO_AVERAGING.value
|
|
279
285
|
)
|
|
280
286
|
elif averaging_type == AveragingType.AREA:
|
|
281
|
-
self.adjoint.
|
|
287
|
+
self.adjoint._output.surface_average_properties.averaging_type = (
|
|
282
288
|
SpaceAveragingType.AREA.value
|
|
283
289
|
)
|
|
284
290
|
elif averaging_type == AveragingType.MASS_FLOW:
|
|
285
|
-
self.adjoint.
|
|
291
|
+
self.adjoint._output.surface_average_properties.averaging_type = (
|
|
286
292
|
SpaceAveragingType.MASS_FLOW.value
|
|
287
293
|
)
|
|
288
294
|
elif QuantityType._is_force(quantity_type):
|
|
289
|
-
self.adjoint.
|
|
295
|
+
self.adjoint._output.force_properties.CopyFrom(
|
|
290
296
|
outputpb.ForceProperties(
|
|
291
297
|
force_dir_type=(
|
|
292
298
|
outputpb.FORCE_DIRECTION_BODY_ORIENTATION_AND_FLOW_DIR
|
|
@@ -332,9 +338,15 @@ from luminarycloud import outputs, SimulationParam, EntityIdentifier
|
|
|
332
338
|
|
|
333
339
|
|
|
334
340
|
"""
|
|
335
|
-
|
|
336
|
-
"luminarycloud.simulation_param.SimulationParam()", "SimulationParam()", 1
|
|
341
|
+
code += self._to_code_helper("obj", hide_defaults, use_tmp_objs).replace(
|
|
342
|
+
"luminarycloud.simulation_param.SimulationParam()", "luminarycloud.SimulationParam()", 1
|
|
337
343
|
)
|
|
344
|
+
if self.adjoint.primal_simulation_id != "":
|
|
345
|
+
code += """
|
|
346
|
+
# TODO(USER): Select appropriate parameters to configure the adjoint output.
|
|
347
|
+
obj.configure_adjoint_output("...")
|
|
348
|
+
"""
|
|
349
|
+
return code
|
|
338
350
|
|
|
339
351
|
def find_parameter(self, parameter: str) -> None:
|
|
340
352
|
"""
|