luminarycloud 0.19.0__py3-none-any.whl → 0.22.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (110) hide show
  1. luminarycloud/__init__.py +5 -1
  2. luminarycloud/_client/client.py +7 -0
  3. luminarycloud/_client/http_client.py +10 -8
  4. luminarycloud/_feature_flag.py +22 -0
  5. luminarycloud/_helpers/_create_simulation.py +7 -2
  6. luminarycloud/_helpers/_upload_mesh.py +1 -0
  7. luminarycloud/_helpers/_wait_for_mesh.py +6 -5
  8. luminarycloud/_helpers/_wait_for_simulation.py +3 -3
  9. luminarycloud/_helpers/download.py +3 -1
  10. luminarycloud/_helpers/pagination.py +62 -0
  11. luminarycloud/_helpers/proto_decorator.py +13 -5
  12. luminarycloud/_helpers/upload.py +18 -12
  13. luminarycloud/_proto/api/v0/luminarycloud/feature_flag/feature_flag_pb2.py +55 -0
  14. luminarycloud/_proto/api/v0/luminarycloud/feature_flag/feature_flag_pb2.pyi +52 -0
  15. luminarycloud/_proto/api/v0/luminarycloud/feature_flag/feature_flag_pb2_grpc.py +72 -0
  16. luminarycloud/_proto/api/v0/luminarycloud/feature_flag/feature_flag_pb2_grpc.pyi +35 -0
  17. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2.py +168 -124
  18. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2.pyi +133 -4
  19. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2_grpc.py +66 -0
  20. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2_grpc.pyi +20 -0
  21. luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2.py +8 -8
  22. luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2.pyi +5 -5
  23. luminarycloud/_proto/api/v0/luminarycloud/mesh/mesh_pb2.py +74 -73
  24. luminarycloud/_proto/api/v0/luminarycloud/mesh/mesh_pb2.pyi +17 -3
  25. luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2.py +96 -25
  26. luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2.pyi +235 -1
  27. luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2_grpc.py +34 -0
  28. luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2_grpc.pyi +12 -0
  29. luminarycloud/_proto/api/v0/luminarycloud/project/project_pb2.py +16 -16
  30. luminarycloud/_proto/api/v0/luminarycloud/project/project_pb2.pyi +7 -3
  31. luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2.py +97 -61
  32. luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2.pyi +77 -4
  33. luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2_grpc.py +34 -0
  34. luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2_grpc.pyi +12 -0
  35. luminarycloud/_proto/api/v0/luminarycloud/simulation_template/simulation_template_pb2.py +33 -31
  36. luminarycloud/_proto/api/v0/luminarycloud/simulation_template/simulation_template_pb2.pyi +23 -2
  37. luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2.py +126 -27
  38. luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2.pyi +183 -0
  39. luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2_grpc.py +99 -0
  40. luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2_grpc.pyi +30 -0
  41. luminarycloud/_proto/assistant/assistant_pb2.py +74 -41
  42. luminarycloud/_proto/assistant/assistant_pb2.pyi +64 -2
  43. luminarycloud/_proto/assistant/assistant_pb2_grpc.py +33 -0
  44. luminarycloud/_proto/assistant/assistant_pb2_grpc.pyi +10 -0
  45. luminarycloud/_proto/base/base_pb2.py +20 -7
  46. luminarycloud/_proto/base/base_pb2.pyi +38 -0
  47. luminarycloud/_proto/cad/shape_pb2.py +39 -19
  48. luminarycloud/_proto/cad/shape_pb2.pyi +86 -34
  49. luminarycloud/_proto/cad/transformation_pb2.py +60 -16
  50. luminarycloud/_proto/cad/transformation_pb2.pyi +138 -32
  51. luminarycloud/_proto/client/simulation_pb2.py +501 -348
  52. luminarycloud/_proto/client/simulation_pb2.pyi +607 -11
  53. luminarycloud/_proto/geometry/geometry_pb2.py +77 -63
  54. luminarycloud/_proto/geometry/geometry_pb2.pyi +42 -3
  55. luminarycloud/_proto/hexmesh/hexmesh_pb2.py +24 -18
  56. luminarycloud/_proto/hexmesh/hexmesh_pb2.pyi +23 -2
  57. luminarycloud/_proto/inferenceservice/inferenceservice_pb2.py +10 -10
  58. luminarycloud/_proto/inferenceservice/inferenceservice_pb2.pyi +5 -5
  59. luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2.py +29 -0
  60. luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2.pyi +7 -0
  61. luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2_grpc.py +70 -0
  62. luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2_grpc.pyi +30 -0
  63. luminarycloud/_proto/quantity/quantity_options_pb2.py +6 -6
  64. luminarycloud/_proto/quantity/quantity_options_pb2.pyi +10 -1
  65. luminarycloud/_proto/quantity/quantity_pb2.py +176 -167
  66. luminarycloud/_proto/quantity/quantity_pb2.pyi +11 -5
  67. luminarycloud/enum/__init__.py +1 -0
  68. luminarycloud/enum/gpu_type.py +2 -0
  69. luminarycloud/enum/quantity_type.py +9 -0
  70. luminarycloud/enum/vis_enums.py +23 -3
  71. luminarycloud/exceptions.py +7 -1
  72. luminarycloud/feature_modification.py +45 -35
  73. luminarycloud/geometry.py +107 -9
  74. luminarycloud/geometry_version.py +57 -3
  75. luminarycloud/mesh.py +1 -2
  76. luminarycloud/meshing/mesh_generation_params.py +8 -8
  77. luminarycloud/params/enum/_enum_wrappers.py +562 -30
  78. luminarycloud/params/simulation/adaptive_mesh_refinement_.py +4 -0
  79. luminarycloud/params/simulation/material/material_solid_.py +15 -1
  80. luminarycloud/params/simulation/physics/__init__.py +0 -1
  81. luminarycloud/params/simulation/physics/periodic_pair_.py +12 -31
  82. luminarycloud/physics_ai/architectures.py +58 -0
  83. luminarycloud/physics_ai/inference.py +13 -13
  84. luminarycloud/physics_ai/solution.py +3 -1
  85. luminarycloud/physics_ai/training_jobs.py +37 -0
  86. luminarycloud/pipelines/__init__.py +11 -3
  87. luminarycloud/pipelines/api.py +248 -16
  88. luminarycloud/pipelines/arguments.py +15 -0
  89. luminarycloud/pipelines/core.py +113 -96
  90. luminarycloud/pipelines/{operators.py → stages.py} +96 -39
  91. luminarycloud/project.py +15 -47
  92. luminarycloud/simulation.py +69 -5
  93. luminarycloud/simulation_param.py +0 -9
  94. luminarycloud/simulation_template.py +2 -1
  95. luminarycloud/types/matrix3.py +12 -0
  96. luminarycloud/vis/__init__.py +17 -0
  97. luminarycloud/vis/data_extraction.py +20 -4
  98. luminarycloud/vis/interactive_report.py +110 -0
  99. luminarycloud/vis/interactive_scene.py +29 -2
  100. luminarycloud/vis/report.py +252 -0
  101. luminarycloud/vis/visualization.py +127 -5
  102. luminarycloud/volume_selection.py +132 -69
  103. {luminarycloud-0.19.0.dist-info → luminarycloud-0.22.0.dist-info}/METADATA +1 -1
  104. {luminarycloud-0.19.0.dist-info → luminarycloud-0.22.0.dist-info}/RECORD +105 -97
  105. luminarycloud/params/simulation/physics/periodic_pair/__init__.py +0 -2
  106. luminarycloud/params/simulation/physics/periodic_pair/periodicity_type/__init__.py +0 -2
  107. luminarycloud/params/simulation/physics/periodic_pair/periodicity_type/rotational_periodicity_.py +0 -31
  108. luminarycloud/params/simulation/physics/periodic_pair/periodicity_type/translational_periodicity_.py +0 -29
  109. luminarycloud/params/simulation/physics/periodic_pair/periodicity_type_.py +0 -25
  110. {luminarycloud-0.19.0.dist-info → luminarycloud-0.22.0.dist-info}/WHEEL +0 -0
@@ -88,48 +88,47 @@ class PipelineParameter(ABC):
88
88
 
89
89
  class PipelineInput:
90
90
  """
91
- A named input for an Operator instance (i.e. a Task). Explicitly connected to a PipelineOutput.
91
+ A named input for a Stage. Explicitly connected to a PipelineOutput.
92
92
  """
93
93
 
94
- def __init__(self, upstream_output: "PipelineOutput", owner: "Operator", name: str):
94
+ def __init__(self, upstream_output: "PipelineOutput", owner: "Stage", name: str):
95
95
  self.upstream_output = upstream_output
96
96
  self.owner = owner
97
97
  self.name = name
98
98
 
99
- def _to_dict(self, id_for_task: dict) -> dict:
100
- if self.upstream_output.owner not in id_for_task:
99
+ def _to_dict(self, id_for_stage: dict) -> dict:
100
+ if self.upstream_output.owner not in id_for_stage:
101
101
  raise ValueError(
102
- f"Task {self.owner} depends on a task, {self.upstream_output.owner}, that isn't in the Pipeline. Did you forget to add it?"
102
+ f"Stage {self.owner} depends on a stage, {self.upstream_output.owner}, that isn't in the Pipeline. Did you forget to add it?"
103
103
  )
104
- upstream_task_id = id_for_task[self.upstream_output.owner]
104
+ upstream_stage_id = id_for_stage[self.upstream_output.owner]
105
105
  upstream_output_name = self.upstream_output.name
106
- return {self.name: f"{upstream_task_id}.{upstream_output_name}"}
106
+ return {self.name: f"{upstream_stage_id}.{upstream_output_name}"}
107
107
 
108
108
 
109
109
  class PipelineOutput(ABC):
110
110
  """
111
- A named output for an Operator instance (i.e. a Task). Can be used to spawn any number of
112
- connected PipelineInputs.
111
+ A named output for a Stage. Can be used to spawn any number of connected PipelineInputs.
113
112
  """
114
113
 
115
- def __init__(self, owner: "Operator", name: str):
114
+ def __init__(self, owner: "Stage", name: str):
116
115
  self.owner = owner
117
116
  self.name = name
118
117
  self.downstream_inputs: list[PipelineInput] = []
119
118
 
120
- def _spawn_input(self, owner: "Operator", name: str) -> PipelineInput:
119
+ def _spawn_input(self, owner: "Stage", name: str) -> PipelineInput:
121
120
  input = PipelineInput(self, owner, name)
122
121
  self.downstream_inputs.append(input)
123
122
  return input
124
123
 
125
124
 
126
- class OperatorInputs:
125
+ class StageInputs:
127
126
  """
128
- A collection of all PipelineInputs for an Operator instance (i.e. a Task).
127
+ A collection of all PipelineInputs for a Stage.
129
128
  """
130
129
 
131
130
  def __init__(
132
- self, owner: "Operator", **input_descriptors: tuple[Type[PipelineOutput], PipelineOutput]
131
+ self, owner: "Stage", **input_descriptors: tuple[Type[PipelineOutput], PipelineOutput]
133
132
  ):
134
133
  """
135
134
  input_descriptors is a dict of input name -> (required_upstream_output_type, upstream_output)
@@ -144,26 +143,26 @@ class OperatorInputs:
144
143
  )
145
144
  self.inputs.add(upstream_output._spawn_input(owner, name))
146
145
 
147
- def _to_dict(self, id_for_task: dict) -> dict[str, str]:
146
+ def _to_dict(self, id_for_stage: dict) -> dict[str, str]:
148
147
  d: dict[str, str] = {}
149
148
  for input in self.inputs:
150
- d |= input._to_dict(id_for_task)
149
+ d |= input._to_dict(id_for_stage)
151
150
  return d
152
151
 
153
152
 
154
- T = TypeVar("T", bound="OperatorOutputs")
153
+ T = TypeVar("T", bound="StageOutputs")
155
154
 
156
155
 
157
- class OperatorOutputs(ABC):
156
+ class StageOutputs(ABC):
158
157
  """
159
- A collection of all PipelineOutputs for an Operator instance (i.e. a Task). Must be subclassed,
160
- and the subclass must also be a dataclass whose fields are all PipelineOutput subclasses. Then
161
- that subclass should be instantiated with `_instantiate_for`. Sounds a little complicated,
162
- perhaps, but it's not bad. See the existing subclasses in `./operators.py` for examples.
158
+ A collection of all PipelineOutputs for a Stage. Must be subclassed, and the subclass must also
159
+ be a dataclass whose fields are all PipelineOutput subclasses. Then that subclass should be
160
+ instantiated with `_instantiate_for`. Sounds a little complicated, perhaps, but it's not bad.
161
+ See the existing subclasses in `./stages.py` for examples.
163
162
  """
164
163
 
165
164
  @classmethod
166
- def _instantiate_for(cls: type[T], owner: "Operator") -> T:
165
+ def _instantiate_for(cls: type[T], owner: "Stage") -> T:
167
166
  # create an instance with all fields instantiated with the given owner, and named by the
168
167
  # field name.
169
168
  # Also validate here that we are a dataclass, and all our fields are PipelineOutput types.
@@ -188,41 +187,41 @@ class OperatorOutputs(ABC):
188
187
  return inputs
189
188
 
190
189
 
191
- class OperatorRegistry:
190
+ class StageRegistry:
192
191
  def __init__(self):
193
- self.operators = {}
192
+ self.stages = {}
194
193
 
195
- def register(self, operator_class: Type["Operator"]) -> None:
196
- self.operators[operator_class.__name__] = operator_class
194
+ def register(self, stage_class: Type["Stage"]) -> None:
195
+ self.stages[stage_class.__name__] = stage_class
197
196
 
198
- def get(self, operator_name: str) -> Type["Operator"]:
199
- if operator_name not in self.operators:
200
- raise ValueError(f"Unknown operator: {operator_name}")
201
- return self.operators[operator_name]
197
+ def get(self, stage_type_name: str) -> Type["Stage"]:
198
+ if stage_type_name not in self.stages:
199
+ raise ValueError(f"Unknown stage type: {stage_type_name}")
200
+ return self.stages[stage_type_name]
202
201
 
203
202
 
204
- TOutputs = TypeVar("TOutputs", bound=OperatorOutputs)
203
+ TOutputs = TypeVar("TOutputs", bound=StageOutputs)
205
204
 
206
205
 
207
- class Operator(Generic[TOutputs], ABC):
206
+ class Stage(Generic[TOutputs], ABC):
208
207
  def __init__(
209
208
  self,
210
- task_name: str | None,
209
+ stage_name: str | None,
211
210
  params: dict,
212
- inputs: OperatorInputs,
211
+ inputs: StageInputs,
213
212
  outputs: TOutputs,
214
213
  ):
215
- self._operator_name = self.__class__.__name__
216
- self._task_name = task_name if task_name is not None else self._operator_name
214
+ self._stage_type_name = self.__class__.__name__
215
+ self._name = stage_name if stage_name is not None else self._stage_type_name
217
216
  self._params = params
218
217
  self._inputs = inputs
219
218
  self.outputs = outputs
220
- ensure_yamlizable(self._params_dict()[0], "Operator parameters")
219
+ ensure_yamlizable(self._params_dict()[0], "Stage parameters")
221
220
 
222
221
  def is_source(self) -> bool:
223
222
  return len(self._inputs.inputs) == 0
224
223
 
225
- def inputs_dict(self) -> dict[str, tuple["Operator", str]]:
224
+ def inputs_dict(self) -> dict[str, tuple["Stage", str]]:
226
225
  inputs = {}
227
226
  for pipeline_input in self._inputs.inputs:
228
227
  inputs[pipeline_input.name] = (
@@ -231,16 +230,16 @@ class Operator(Generic[TOutputs], ABC):
231
230
  )
232
231
  return inputs
233
232
 
234
- def downstream_tasks(self) -> list["Operator"]:
233
+ def downstream_stages(self) -> list["Stage"]:
235
234
  return [input.owner for input in self.outputs.downstream_inputs()]
236
235
 
237
- def _to_dict(self, id_for_task: dict) -> tuple[dict, set[PipelineParameter]]:
236
+ def _to_dict(self, id_for_stage: dict) -> tuple[dict, set[PipelineParameter]]:
238
237
  params, pipeline_params_set = self._params_dict()
239
238
  d = {
240
- "name": self._task_name,
241
- "operator": self._operator_name,
239
+ "name": self._name,
240
+ "operator": self._stage_type_name, # TODO: change key to "stage_type" when we're ready to bump the yaml schema version
242
241
  "params": params,
243
- "inputs": self._inputs._to_dict(id_for_task),
242
+ "inputs": self._inputs._to_dict(id_for_stage),
244
243
  }
245
244
  return d, pipeline_params_set
246
245
 
@@ -250,70 +249,77 @@ class Operator(Generic[TOutputs], ABC):
250
249
  for name, value in self._params.items():
251
250
  if hasattr(value, "_to_pipeline_dict"):
252
251
  d[name], downstream_params = value._to_pipeline_dict()
252
+ for param in downstream_params:
253
+ if not isinstance(param, PipelineParameter):
254
+ raise ValueError(
255
+ f"Expected `_to_pipeline_dict()` to only return PipelineParameters, but got a {type(param)}: {param}"
256
+ )
253
257
  pipeline_params.update(downstream_params)
254
258
  else:
255
259
  d[name] = value
260
+ # Strip None values. We treat absence of a param value in the YAML the same as a present null value.
261
+ d = {k: v for k, v in d.items() if v is not None}
256
262
  return d, pipeline_params
257
263
 
258
264
  def __str__(self) -> str:
259
- return f'{self._operator_name}(name="{self._task_name}")'
265
+ return f'{self._stage_type_name}(name="{self._name}")'
260
266
 
261
- _registry = OperatorRegistry()
267
+ _registry = StageRegistry()
262
268
 
263
269
  def __init_subclass__(cls, **kwargs):
264
270
  super().__init_subclass__(**kwargs)
265
- Operator._registry.register(cls)
271
+ Stage._registry.register(cls)
266
272
 
267
273
  @classmethod
268
- def _get_subclass(cls, operator_name: str) -> Type["Operator"]:
269
- return cls._registry.get(operator_name)
274
+ def _get_subclass(cls, stage_type_name: str) -> Type["Stage"]:
275
+ return cls._registry.get(stage_type_name)
270
276
 
271
277
  @classmethod
272
278
  def _parse_params(cls, params: dict) -> dict:
273
- # Operators with params that are just primitives or PipelineParams have no parsing to do.
274
- # Operators with more complicated params should override this method.
279
+ # Stages with params that are just primitives or PipelineParams have no parsing to do.
280
+ # Stages with more complicated params should override this method.
275
281
  return params
276
282
 
277
283
 
278
284
  class Pipeline:
279
- def __init__(self, tasks: list[Operator]):
280
- self.tasks = tasks
281
- self._task_ids = self._assign_ids_to_tasks()
285
+ def __init__(self, stages: list[Stage]):
286
+ self.stages = stages
287
+ self._stage_ids = self._assign_ids_to_stages()
282
288
 
283
289
  def to_yaml(self) -> str:
284
290
  return yaml.safe_dump(self._to_dict())
285
291
 
286
292
  def pipeline_params(self) -> set[PipelineParameter]:
287
- return self._tasks_dict_and_params()[1]
293
+ return self._stages_dict_and_params()[1]
288
294
 
289
- def _get_task_id(self, task: Operator) -> str:
290
- return self._task_ids[task]
295
+ def _get_stage_id(self, stage: Stage) -> str:
296
+ return self._stage_ids[stage]
291
297
 
292
- def _tasks_dict_and_params(self) -> tuple[dict, set[PipelineParameter]]:
293
- id_for_task = self._task_ids
294
- tasks = {}
298
+ def _stages_dict_and_params(self) -> tuple[dict, set[PipelineParameter]]:
299
+ id_for_stage = self._stage_ids
300
+ stages = {}
295
301
  params = set()
296
- for task in id_for_task.keys():
297
- task_dict, referenced_params = task._to_dict(id_for_task)
298
- tasks[id_for_task[task]] = task_dict
302
+ for stage in id_for_stage.keys():
303
+ stage_dict, referenced_params = stage._to_dict(id_for_stage)
304
+ stages[id_for_stage[stage]] = stage_dict
299
305
  params.update(referenced_params)
300
- return tasks, params
306
+ return stages, params
301
307
 
302
308
  def _to_dict(self) -> dict:
303
- tasks, params = self._tasks_dict_and_params()
309
+ stages, params = self._stages_dict_and_params()
304
310
 
305
311
  d = {
306
312
  "lc_pipeline": {
307
313
  "schema_version": 1,
308
314
  "params": self._pipeline_params_dict(params),
309
- "tasks": tasks,
315
+ "tasks": stages, # TODO: change key to "stages" when we're ready to bump the yaml schema version
310
316
  }
311
317
  }
312
318
  ensure_yamlizable(d, "Pipeline")
313
319
  return d
314
320
 
315
- def _assign_ids_to_tasks(self) -> dict[Operator, str]:
316
- return {task: f"t{i + 1}-{task._operator_name}" for i, task in enumerate(self.tasks)}
321
+ def _assign_ids_to_stages(self) -> dict[Stage, str]:
322
+ return {stage: f"s{i + 1}-{stage._stage_type_name}" for i, stage in enumerate(self.stages)}
317
323
 
318
324
  def _pipeline_params_dict(self, params: set[PipelineParameter]) -> dict:
319
325
  d: dict[str, dict] = {}
@@ -334,7 +340,9 @@ class Pipeline:
334
340
  d = d["lc_pipeline"]
335
341
  if "schema_version" not in d:
336
342
  raise ValueError("Invalid pipeline YAML: missing 'schema_version' key")
337
- if "tasks" not in d:
343
+ if (
344
+ "tasks" not in d
345
+ ): # TODO: change key to "stages" when we're ready to bump the yaml schema version
338
346
  raise ValueError("Invalid pipeline YAML: missing 'tasks' key")
339
347
 
340
348
  if d["schema_version"] != 1:
@@ -347,18 +355,23 @@ class Pipeline:
347
355
  param_name
348
356
  )
349
357
 
350
- # ...and use them as replacements for any references in the tasks' parameters
351
- for task_dict in d["tasks"].values():
352
- task_dict["params"] = _recursive_replace_pipeline_params(
353
- task_dict["params"], parsed_params
358
+ # ...and use them as replacements for any references in the stages' parameters
359
+ for stage_dict in d[
360
+ "tasks"
361
+ ].values(): # TODO: change key to "stages" when we're ready to bump the yaml schema version
362
+ stage_dict["params"] = _recursive_replace_pipeline_params(
363
+ stage_dict["params"], parsed_params
354
364
  )
355
365
 
356
- # then, finish parsing the tasks
357
- parsed_tasks = {}
358
- for task_id in d["tasks"]:
359
- _parse_task(d, task_id, parsed_tasks)
366
+ # then, finish parsing the stages
367
+ parsed_stages = {}
368
+ for stage_id in d["tasks"]:
369
+ _parse_stage(d, stage_id, parsed_stages)
360
370
 
361
- return cls(list(parsed_tasks.values()))
371
+ pipe = cls(list(parsed_stages.values()))
372
+ # Preserve the stage IDs from the YAML definition by overwriting the auto-assigned ones
373
+ pipe._stage_ids = {stage: stage_id for stage_id, stage in parsed_stages.items()}
374
+ return pipe
362
375
 
363
376
 
364
377
  def _recursive_replace_pipeline_params(d: Any, parsed_params: dict) -> Any:
@@ -368,7 +381,7 @@ def _recursive_replace_pipeline_params(d: Any, parsed_params: dict) -> Any:
368
381
  pp_name = d["$pipeline_param"]
369
382
  if pp_name not in parsed_params:
370
383
  raise ValueError(
371
- f'Pipeline parameter "{pp_name}" referenced in a pipeline task, but not found in pipeline\'s declared parameters'
384
+ f'Pipeline parameter "{pp_name}" referenced in a pipeline stage, but not found in pipeline\'s declared parameters'
372
385
  )
373
386
  return parsed_params[pp_name]
374
387
  else:
@@ -382,28 +395,32 @@ def _recursive_replace_pipeline_params(d: Any, parsed_params: dict) -> Any:
382
395
  return d
383
396
 
384
397
 
385
- def _parse_task(pipeline_dict: dict, task_id: str, all_tasks: dict[str, Operator]) -> Operator:
386
- all_tasks_dict = pipeline_dict["tasks"]
387
- if task_id in all_tasks:
388
- return all_tasks[task_id]
389
- task_dict = all_tasks_dict[task_id]
390
- operator_name = task_dict["operator"]
391
- operator_class = Operator._get_subclass(operator_name)
398
+ def _parse_stage(pipeline_dict: dict, stage_id: str, all_stages: dict[str, Stage]) -> Stage:
399
+ all_stages_dict = pipeline_dict[
400
+ "tasks"
401
+ ] # TODO: change key to "stages" when we're ready to bump the yaml schema version
402
+ if stage_id in all_stages:
403
+ return all_stages[stage_id]
404
+ stage_dict = all_stages_dict[stage_id]
405
+ stage_type_name = stage_dict[
406
+ "operator"
407
+ ] # TODO: change key to "stage_type" when we're ready to bump the yaml schema version
408
+ stage_class = Stage._get_subclass(stage_type_name)
392
409
 
393
410
  parsed_inputs = {}
394
- for input_name, input_value in task_dict["inputs"].items():
395
- source_task_id, source_output_name = input_value.split(".")
396
- source_task = _parse_task(pipeline_dict, source_task_id, all_tasks)
397
- source_output = getattr(source_task.outputs, source_output_name)
411
+ for input_name, input_value in stage_dict["inputs"].items():
412
+ source_stage_id, source_output_name = input_value.split(".")
413
+ source_stage = _parse_stage(pipeline_dict, source_stage_id, all_stages)
414
+ source_output = getattr(source_stage.outputs, source_output_name)
398
415
  parsed_inputs[input_name] = source_output
399
416
 
400
- parsed_params = operator_class._parse_params(task_dict["params"])
417
+ parsed_params = stage_class._parse_params(stage_dict["params"])
401
418
 
402
- op_params = {
403
- "task_name": task_dict["name"],
419
+ stage_params = {
420
+ "stage_name": stage_dict["name"],
404
421
  **parsed_params,
405
422
  **parsed_inputs,
406
423
  }
407
- operator = operator_class(**op_params)
408
- all_tasks[task_id] = operator
409
- return operator
424
+ stage = stage_class(**stage_params)
425
+ all_stages[stage_id] = stage
426
+ return stage
@@ -1,8 +1,8 @@
1
1
  # Copyright 2025 Luminary Cloud, Inc. All Rights Reserved.
2
2
  from dataclasses import dataclass
3
3
 
4
- from .core import Operator, OperatorInputs, OperatorOutputs, PipelineOutput
5
- from .parameters import StringPipelineParameter
4
+ from .core import Stage, StageInputs, StageOutputs, PipelineOutput
5
+ from .parameters import StringPipelineParameter, IntPipelineParameter
6
6
  from ..meshing import MeshGenerationParams
7
7
 
8
8
 
@@ -27,11 +27,11 @@ class PipelineOutputSimulation(PipelineOutput):
27
27
  pass
28
28
 
29
29
 
30
- # Operators
30
+ # Stages
31
31
 
32
32
 
33
33
  @dataclass
34
- class ReadGeometryOutputs(OperatorOutputs):
34
+ class ReadGeometryOutputs(StageOutputs):
35
35
  geometry: PipelineOutputGeometry
36
36
  """
37
37
  The Geometry identified by the given `geometry_id`, in the state it was in when the Pipeline was
@@ -39,10 +39,12 @@ class ReadGeometryOutputs(OperatorOutputs):
39
39
  """
40
40
 
41
41
 
42
- class ReadGeometry(Operator[ReadGeometryOutputs]):
42
+ class ReadGeometry(Stage[ReadGeometryOutputs]):
43
43
  """
44
44
  Reads a Geometry into the Pipeline.
45
45
 
46
+ .. warning:: This feature is experimental and may change or be removed in the future.
47
+
46
48
  Parameters
47
49
  ----------
48
50
  geometry_id : str | StringPipelineParameter
@@ -52,35 +54,80 @@ class ReadGeometry(Operator[ReadGeometryOutputs]):
52
54
  -------
53
55
  geometry : PipelineOutputGeometry
54
56
  The latest GeometryVersion of the Geometry as of the moment the Pipeline was invoked.
55
-
56
- .. warning:: This feature is experimental and may change or be removed in the future.
57
57
  """
58
58
 
59
59
  def __init__(
60
60
  self,
61
61
  *,
62
- task_name: str | None = None,
62
+ stage_name: str | None = None,
63
63
  geometry_id: str | StringPipelineParameter,
64
64
  ):
65
65
  super().__init__(
66
- task_name,
66
+ stage_name,
67
67
  {"geometry_id": geometry_id},
68
- OperatorInputs(self),
68
+ StageInputs(self),
69
69
  ReadGeometryOutputs._instantiate_for(self),
70
70
  )
71
71
 
72
72
 
73
73
  @dataclass
74
- class ModifyGeometryOutputs(OperatorOutputs):
74
+ class ReadMeshOutputs(StageOutputs):
75
+ mesh: PipelineOutputMesh
76
+ """
77
+ The Mesh read from the given `mesh_id`.
78
+ """
79
+
80
+
81
+ class ReadMesh(Stage[ReadMeshOutputs]):
82
+ """
83
+ Reads a Mesh into the Pipeline.
84
+
85
+ .. warning:: This feature is experimental and may change or be removed in the future.
86
+
87
+ Parameters
88
+ ----------
89
+ mesh_id : str | StringPipelineParameter
90
+ The ID of the Mesh to retrieve.
91
+ wait_timeout_seconds : int | IntPipelineParameter | None
92
+ The number of seconds to wait for the Mesh to be ready. If None, defaults to 1800 seconds
93
+ (30 minutes).
94
+
95
+ Outputs
96
+ -------
97
+ mesh : PipelineOutputMesh
98
+ The Mesh with the given `mesh_id`.
99
+ """
100
+
101
+ def __init__(
102
+ self,
103
+ *,
104
+ stage_name: str | None = None,
105
+ mesh_id: str | StringPipelineParameter,
106
+ wait_timeout_seconds: int | IntPipelineParameter | None = None,
107
+ ):
108
+ if wait_timeout_seconds is None:
109
+ wait_timeout_seconds = 30 * 60
110
+ super().__init__(
111
+ stage_name,
112
+ {"mesh_id": mesh_id, "wait_timeout_seconds": wait_timeout_seconds},
113
+ StageInputs(self),
114
+ ReadMeshOutputs._instantiate_for(self),
115
+ )
116
+
117
+
118
+ @dataclass
119
+ class ModifyGeometryOutputs(StageOutputs):
75
120
  geometry: PipelineOutputGeometry
76
121
  """The modified Geometry, represented as a new GeometryVersion."""
77
122
 
78
123
 
79
124
  # TODO: figure out what `mods` actually is. What does the non-pipeline geo mod interface look like?
80
- class ModifyGeometry(Operator[ModifyGeometryOutputs]):
125
+ class ModifyGeometry(Stage[ModifyGeometryOutputs]):
81
126
  """
82
127
  Modifies a Geometry.
83
128
 
129
+ .. warning:: This feature is experimental and may change or be removed in the future.
130
+
84
131
  Parameters
85
132
  ----------
86
133
  mods : dict
@@ -92,62 +139,66 @@ class ModifyGeometry(Operator[ModifyGeometryOutputs]):
92
139
  -------
93
140
  geometry : PipelineOutputGeometry
94
141
  The modified Geometry, represented as a new GeometryVersion.
95
-
96
- .. warning:: This feature is experimental and may change or be removed in the future.
97
142
  """
98
143
 
99
144
  def __init__(
100
145
  self,
101
146
  *,
102
- task_name: str | None = None,
147
+ stage_name: str | None = None,
103
148
  mods: list[dict],
104
149
  geometry: PipelineOutputGeometry,
105
150
  ):
106
151
  raise NotImplementedError("ModifyGeometry is not implemented yet.")
107
152
  super().__init__(
108
- task_name,
153
+ stage_name,
109
154
  {"mods": mods},
110
- OperatorInputs(self, geometry=(PipelineOutputGeometry, geometry)),
155
+ StageInputs(self, geometry=(PipelineOutputGeometry, geometry)),
111
156
  ModifyGeometryOutputs._instantiate_for(self),
112
157
  )
113
158
 
114
159
 
115
160
  @dataclass
116
- class MeshOutputs(OperatorOutputs):
161
+ class MeshOutputs(StageOutputs):
117
162
  mesh: PipelineOutputMesh
118
163
  """The Mesh generated from the given Geometry."""
119
164
 
120
165
 
121
- class Mesh(Operator[MeshOutputs]):
166
+ class Mesh(Stage[MeshOutputs]):
122
167
  """
123
168
  Generates a Mesh from a Geometry.
124
169
 
170
+ .. warning:: This feature is experimental and may change or be removed in the future.
171
+
125
172
  Parameters
126
173
  ----------
127
- target_cv_count : int | None
128
- The target number of control volumes to generate. If None, a minimal mesh will be generated.
129
174
  geometry : PipelineOutputGeometry
130
175
  The Geometry to mesh.
176
+ mesh_name : str | StringPipelineParameter | None
177
+ The name to assign to the Mesh. If None, a default name will be used.
178
+ target_cv_count : int | None
179
+ The target number of control volumes to generate. If None, a minimal mesh will be generated.
131
180
 
132
181
  Outputs
133
182
  -------
134
183
  mesh : PipelineOutputMesh
135
184
  The generated Mesh.
136
-
137
- .. warning:: This feature is experimental and may change or be removed in the future.
138
185
  """
139
186
 
140
187
  def __init__(
141
188
  self,
142
189
  *,
143
- task_name: str | None = None,
144
- target_cv_count: int | None,
190
+ stage_name: str | None = None,
145
191
  geometry: PipelineOutputGeometry,
192
+ mesh_name: str | StringPipelineParameter | None = None,
193
+ target_cv_count: int | IntPipelineParameter | None = None,
146
194
  ):
147
195
  super().__init__(
148
- task_name,
149
- {"target_cv_count": target_cv_count},
150
- OperatorInputs(self, geometry=(PipelineOutputGeometry, geometry)),
196
+ stage_name,
197
+ {
198
+ "mesh_name": mesh_name,
199
+ "target_cv_count": target_cv_count,
200
+ },
201
+ StageInputs(self, geometry=(PipelineOutputGeometry, geometry)),
151
202
  MeshOutputs._instantiate_for(self),
152
203
  )
153
204
 
@@ -160,40 +211,46 @@ class Mesh(Operator[MeshOutputs]):
160
211
 
161
212
 
162
213
  @dataclass
163
- class SimulateOutputs(OperatorOutputs):
214
+ class SimulateOutputs(StageOutputs):
164
215
  simulation: PipelineOutputSimulation
165
216
  """The Simulation."""
166
217
 
167
218
 
168
- class Simulate(Operator[SimulateOutputs]):
219
+ class Simulate(Stage[SimulateOutputs]):
169
220
  """
170
221
  Runs a Simulation.
171
222
 
223
+ .. warning:: This feature is experimental and may change or be removed in the future.
224
+
172
225
  Parameters
173
226
  ----------
174
- sim_template_id : str | StringPipelineParameter
175
- The ID of the SimulationTemplate to use for the Simulation.
176
227
  mesh : PipelineOutputMesh
177
228
  The Mesh to use for the Simulation.
229
+ sim_name : str | StringPipelineParameter | None
230
+ The name to assign to the Simulation. If None, a default name will be used.
231
+ sim_template_id : str | StringPipelineParameter
232
+ The ID of the SimulationTemplate to use for the Simulation.
178
233
 
179
234
  Outputs
180
235
  -------
181
236
  simulation : PipelineOutputSimulation
182
237
  The Simulation.
183
-
184
- .. warning:: This feature is experimental and may change or be removed in the future.
185
238
  """
186
239
 
187
240
  def __init__(
188
241
  self,
189
242
  *,
190
- task_name: str | None = None,
191
- sim_template_id: str | StringPipelineParameter,
243
+ stage_name: str | None = None,
192
244
  mesh: PipelineOutputMesh,
245
+ sim_name: str | StringPipelineParameter | None = None,
246
+ sim_template_id: str | StringPipelineParameter,
193
247
  ):
194
248
  super().__init__(
195
- task_name,
196
- {"sim_template_id": sim_template_id},
197
- OperatorInputs(self, mesh=(PipelineOutputMesh, mesh)),
249
+ stage_name,
250
+ {
251
+ "sim_name": sim_name,
252
+ "sim_template_id": sim_template_id,
253
+ },
254
+ StageInputs(self, mesh=(PipelineOutputMesh, mesh)),
198
255
  SimulateOutputs._instantiate_for(self),
199
256
  )