luminarycloud 0.22.2__py3-none-any.whl → 0.22.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. luminarycloud/_client/retry_interceptor.py +13 -2
  2. luminarycloud/_proto/api/v0/luminarycloud/named_variable_set/named_variable_set_pb2.py +25 -3
  3. luminarycloud/_proto/api/v0/luminarycloud/named_variable_set/named_variable_set_pb2.pyi +30 -0
  4. luminarycloud/_proto/api/v0/luminarycloud/named_variable_set/named_variable_set_pb2_grpc.py +34 -0
  5. luminarycloud/_proto/api/v0/luminarycloud/named_variable_set/named_variable_set_pb2_grpc.pyi +12 -0
  6. luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2.py +62 -40
  7. luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2.pyi +86 -16
  8. luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2_grpc.py +34 -0
  9. luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2_grpc.pyi +12 -0
  10. luminarycloud/_proto/lcstatus/details/geometry/geometry_pb2.py +256 -0
  11. luminarycloud/_proto/lcstatus/details/geometry/geometry_pb2.pyi +472 -0
  12. luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2.py +2 -2
  13. luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2_grpc.py +34 -0
  14. luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2_grpc.pyi +12 -0
  15. luminarycloud/_proto/quantity/quantity_pb2.pyi +1 -1
  16. luminarycloud/physics_ai/__init__.py +8 -0
  17. luminarycloud/physics_ai/architectures.py +1 -1
  18. luminarycloud/physics_ai/datasets.py +246 -0
  19. luminarycloud/pipelines/__init__.py +11 -0
  20. luminarycloud/pipelines/api.py +61 -0
  21. luminarycloud/pipelines/core.py +358 -45
  22. luminarycloud/pipelines/flowables.py +138 -0
  23. luminarycloud/pipelines/stages.py +7 -31
  24. {luminarycloud-0.22.2.dist-info → luminarycloud-0.22.3.dist-info}/METADATA +1 -1
  25. {luminarycloud-0.22.2.dist-info → luminarycloud-0.22.3.dist-info}/RECORD +26 -22
  26. {luminarycloud-0.22.2.dist-info → luminarycloud-0.22.3.dist-info}/WHEEL +0 -0
@@ -1,12 +1,26 @@
1
1
  # Copyright 2025 Luminary Cloud, Inc. All Rights Reserved.
2
+ from __future__ import annotations
2
3
  from abc import ABC, abstractmethod
3
4
  from dataclasses import is_dataclass, fields
4
- from typing import Any, Type, TypeVar, Generic
5
+ from typing import Any, Callable, Mapping, Type, TypeVar, Generic, TYPE_CHECKING
5
6
  from typing_extensions import Self
7
+ import inspect
6
8
  import re
9
+ import textwrap
7
10
  import yaml
8
11
 
9
12
  from ..pipeline_util.yaml import ensure_yamlizable
13
+ from .flowables import (
14
+ PipelineOutput,
15
+ PipelineInput,
16
+ FlowableType,
17
+ flowable_class_to_name,
18
+ flowable_name_to_class,
19
+ FlowableIOSchema,
20
+ )
21
+
22
+ if TYPE_CHECKING:
23
+ from .arguments import PipelineArgValueType
10
24
 
11
25
 
12
26
  class PipelineParameterRegistry:
@@ -86,40 +100,12 @@ class PipelineParameter(ABC):
86
100
  return self.__hash__() == other.__hash__()
87
101
 
88
102
 
89
- class PipelineInput:
90
- """
91
- A named input for a Stage. Explicitly connected to a PipelineOutput.
92
- """
93
-
94
- def __init__(self, upstream_output: "PipelineOutput", owner: "Stage", name: str):
95
- self.upstream_output = upstream_output
96
- self.owner = owner
97
- self.name = name
98
-
99
- def _to_dict(self, id_for_stage: dict) -> dict:
100
- if self.upstream_output.owner not in id_for_stage:
101
- raise ValueError(
102
- f"Stage {self.owner} depends on a stage, {self.upstream_output.owner}, that isn't in the Pipeline. Did you forget to add it?"
103
- )
104
- upstream_stage_id = id_for_stage[self.upstream_output.owner]
105
- upstream_output_name = self.upstream_output.name
106
- return {self.name: f"{upstream_stage_id}.{upstream_output_name}"}
107
-
108
-
109
- class PipelineOutput(ABC):
103
+ class StopRun(RuntimeError):
110
104
  """
111
- A named output for a Stage. Can be used to spawn any number of connected PipelineInputs.
105
+ Raised by RunScript code to indicate that the pipeline run should stop intentionally.
112
106
  """
113
107
 
114
- def __init__(self, owner: "Stage", name: str):
115
- self.owner = owner
116
- self.name = name
117
- self.downstream_inputs: list[PipelineInput] = []
118
-
119
- def _spawn_input(self, owner: "Stage", name: str) -> PipelineInput:
120
- input = PipelineInput(self, owner, name)
121
- self.downstream_inputs.append(input)
122
- return input
108
+ pass
123
109
 
124
110
 
125
111
  class StageInputs:
@@ -187,11 +173,52 @@ class StageOutputs(ABC):
187
173
  return inputs
188
174
 
189
175
 
176
+ class DynamicStageOutputs(StageOutputs):
177
+ def __init__(self, owner: "RunScript", output_types: dict[str, FlowableType]):
178
+ self.owner = owner
179
+ self._order = list(output_types.keys())
180
+ self.outputs: dict[str, PipelineOutput] = {}
181
+ for name in self._order:
182
+ output_type = output_types[name]
183
+ output_cls = flowable_name_to_class(output_type)
184
+ self.outputs[name] = output_cls(owner, name)
185
+
186
+ def downstream_inputs(self) -> list[PipelineInput]:
187
+ inputs = []
188
+ for output in self.outputs.values():
189
+ inputs.extend(output.downstream_inputs)
190
+ return inputs
191
+
192
+ def __getattr__(self, name: str) -> PipelineOutput:
193
+ return self.outputs[name]
194
+
195
+ def __getitem__(self, key: int | str) -> PipelineOutput:
196
+ if isinstance(key, int):
197
+ name = self._order[key]
198
+ return self.outputs[name]
199
+ return self.outputs[key]
200
+
201
+ def __iter__(self):
202
+ return iter(self._order)
203
+
204
+ def __len__(self) -> int:
205
+ return len(self.outputs)
206
+
207
+ def keys(self):
208
+ return self.outputs.keys()
209
+
210
+ def values(self):
211
+ return self.outputs.values()
212
+
213
+ def items(self):
214
+ return self.outputs.items()
215
+
216
+
190
217
  class StageRegistry:
191
218
  def __init__(self):
192
219
  self.stages = {}
193
220
 
194
- def register(self, stage_class: Type["Stage"]) -> None:
221
+ def register(self, stage_class: Type["StandardStage"] | Type["RunScript"]) -> None:
195
222
  self.stages[stage_class.__name__] = stage_class
196
223
 
197
224
  def get(self, stage_type_name: str) -> Type["Stage"]:
@@ -203,7 +230,7 @@ class StageRegistry:
203
230
  TOutputs = TypeVar("TOutputs", bound=StageOutputs)
204
231
 
205
232
 
206
- class Stage(Generic[TOutputs], ABC):
233
+ class StandardStage(Generic[TOutputs], ABC):
207
234
  def __init__(
208
235
  self,
209
236
  stage_name: str | None,
@@ -268,7 +295,7 @@ class Stage(Generic[TOutputs], ABC):
268
295
 
269
296
  def __init_subclass__(cls, **kwargs):
270
297
  super().__init_subclass__(**kwargs)
271
- Stage._registry.register(cls)
298
+ StandardStage._registry.register(cls)
272
299
 
273
300
  @classmethod
274
301
  def _get_subclass(cls, stage_type_name: str) -> Type["Stage"]:
@@ -281,6 +308,276 @@ class Stage(Generic[TOutputs], ABC):
281
308
  return params
282
309
 
283
310
 
311
+ class RunScript:
312
+ """
313
+ RunScript is a stage that runs a user-provided Python function.
314
+
315
+ While you can instantiate a RunScript stage directly, the usual way to construct one is to
316
+ decorate a function with the `@stage` decorator.
317
+
318
+ Examples
319
+ --------
320
+ >>> @pipelines.stage(
321
+ ... inputs={"geometry": read_geo.outputs.geometry},
322
+ ... outputs={"geometry": pipelines.PipelineOutputGeometry},
323
+ ... )
324
+ ... def ensure_single_volume(geometry: lc.Geometry):
325
+ ... _, volumes = geometry.list_entities()
326
+ ... if len(volumes) != 1:
327
+ ... raise pipelines.StopRun("expected exactly one volume")
328
+ ... return {"geometry": geometry}
329
+ """
330
+
331
+ def __init__(
332
+ self,
333
+ script: Callable[..., dict[str, Any]] | str,
334
+ *,
335
+ stage_name: str | None = None,
336
+ inputs: dict[str, PipelineOutput] | None = None,
337
+ outputs: Mapping[str, type[PipelineOutput] | str] | None = None,
338
+ entrypoint: str | None = None,
339
+ params: dict[str, Any] | None = None,
340
+ ):
341
+ inputs = inputs or {}
342
+ params = params or {}
343
+ outputs = outputs or {}
344
+ overlapping = set(inputs.keys()).intersection(params.keys())
345
+ if overlapping:
346
+ overlap = ", ".join(sorted(overlapping))
347
+ raise ValueError(f"RunScript params and inputs cannot share names: {overlap}")
348
+
349
+ inputs_and_params = set(inputs.keys()).union(params.keys())
350
+ script_source, callable_entrypoint = self._get_script_source(script, inputs_and_params)
351
+ self._stage_type_name = "RunScript"
352
+ self._entrypoint = (
353
+ entrypoint or callable_entrypoint or self._infer_entrypoint(script_source)
354
+ )
355
+ self._name = (
356
+ stage_name if stage_name is not None else self._default_stage_name(self._entrypoint)
357
+ )
358
+
359
+ for input_name, upstream_output in inputs.items():
360
+ if not isinstance(upstream_output, PipelineOutput):
361
+ raise TypeError(
362
+ f"Input '{input_name}' must be a PipelineOutput, got {type(upstream_output).__name__}"
363
+ )
364
+
365
+ stage_inputs_kwargs = {
366
+ input_name: (PipelineOutput, upstream_output)
367
+ for input_name, upstream_output in inputs.items()
368
+ }
369
+ self._inputs = StageInputs(self, **stage_inputs_kwargs)
370
+
371
+ input_types = {
372
+ input_name: flowable_class_to_name(type(upstream_output))
373
+ for input_name, upstream_output in inputs.items()
374
+ }
375
+ output_flowable_types = self._normalize_output_types(outputs)
376
+ self._io_schema = FlowableIOSchema(
377
+ inputs=input_types,
378
+ outputs=output_flowable_types,
379
+ )
380
+
381
+ self.outputs = DynamicStageOutputs(self, output_flowable_types)
382
+
383
+ reserved_params = {
384
+ "$script": script_source,
385
+ "$output_types": {name: ft.value for name, ft in output_flowable_types.items()},
386
+ "$entrypoint": self._entrypoint,
387
+ }
388
+ user_params = dict(params or {})
389
+ invalid_param_names = ({"context"} | reserved_params.keys()).intersection(
390
+ user_params.keys()
391
+ )
392
+ if invalid_param_names:
393
+ invalid = ", ".join(sorted(invalid_param_names))
394
+ raise ValueError(f"RunScript params cannot use reserved names: {invalid}")
395
+ overlapping_input_names = set(inputs.keys()).intersection(user_params.keys())
396
+ if overlapping_input_names:
397
+ overlap = ", ".join(sorted(overlapping_input_names))
398
+ raise ValueError(f"RunScript params and inputs cannot share names: {overlap}")
399
+ if "context" in inputs.keys():
400
+ raise ValueError("RunScript inputs cannot include reserved name 'context'")
401
+
402
+ self._params = reserved_params | user_params
403
+ ensure_yamlizable(self._params_dict()[0], "RunScript parameters")
404
+
405
+ @staticmethod
406
+ def _default_stage_name(entrypoint: str) -> str:
407
+ words = entrypoint.replace("_", " ").split()
408
+ if not words:
409
+ return "RunScript"
410
+ return " ".join(word.capitalize() for word in words)
411
+
412
+ @staticmethod
413
+ def _normalize_output_types(
414
+ output_types: Mapping[str, type[PipelineOutput] | str | FlowableType],
415
+ ) -> dict[str, FlowableType]:
416
+ normalized: dict[str, FlowableType] = {}
417
+ if not output_types:
418
+ raise ValueError("RunScript stages must declare at least one output")
419
+ for name, value in output_types.items():
420
+ if isinstance(value, FlowableType):
421
+ normalized[name] = value
422
+ elif isinstance(value, str):
423
+ normalized[name] = FlowableType(value)
424
+ elif isinstance(value, type) and issubclass(value, PipelineOutput):
425
+ normalized[name] = flowable_class_to_name(value)
426
+ else:
427
+ raise TypeError(
428
+ f"Output '{name}' must be a PipelineOutput subclass or flowable type string, got {value}"
429
+ )
430
+ return normalized
431
+
432
+ @staticmethod
433
+ def _validate_script(
434
+ script: Callable[..., dict[str, Any]], inputs_and_params: set[str]
435
+ ) -> None:
436
+ closurevars = inspect.getclosurevars(script)
437
+ if closurevars.nonlocals:
438
+ raise ValueError(
439
+ f"RunScript functions must not close over non-local variables. Found these non-local variables: {', '.join(closurevars.nonlocals.keys())}"
440
+ )
441
+ globals_except_lc = {
442
+ k for k in closurevars.globals.keys() if k != "lc" and k != "luminarycloud"
443
+ }
444
+ if globals_except_lc:
445
+ raise ValueError(
446
+ f"RunScript functions must not rely on global variables, including imports. All modules your script needs (except `luminarycloud` or `lc`) must be imported in the function body. Found globals: {', '.join(globals_except_lc)}"
447
+ )
448
+ script_params = set(inspect.signature(script).parameters.keys())
449
+ if script_params != inputs_and_params and script_params != inputs_and_params | {"context"}:
450
+ raise ValueError(
451
+ f"RunScript function must take exactly the same parameters as the inputs and params (and optionally `context`): {script_params} != {inputs_and_params}"
452
+ )
453
+
454
+ @staticmethod
455
+ def _get_script_source(
456
+ script: Callable[..., dict[str, Any]] | str,
457
+ inputs_and_params: set[str],
458
+ ) -> tuple[str, str | None]:
459
+ if callable(script):
460
+ RunScript._validate_script(script, inputs_and_params)
461
+ try:
462
+ source_lines, _ = inspect.getsourcelines(script) # type: ignore[arg-type]
463
+ except (OSError, IOError, TypeError) as exc:
464
+ raise ValueError(f"Unable to retrieve source for {script.__name__}: {exc}") from exc
465
+ # Drop decorator lines (everything before the `def`)
466
+ for i, line in enumerate(source_lines):
467
+ if line.lstrip().startswith("def "):
468
+ source_lines = source_lines[i:]
469
+ break
470
+ source = "".join(source_lines)
471
+ entrypoint = script.__name__
472
+ else:
473
+ source = script
474
+ entrypoint = None
475
+ dedented = textwrap.dedent(source).strip()
476
+ if not dedented:
477
+ raise ValueError("RunScript code cannot be empty")
478
+ return dedented + "\n", entrypoint
479
+
480
+ @staticmethod
481
+ def _infer_entrypoint(script_source: str) -> str:
482
+ matches = re.findall(r"^def\s+([A-Za-z_][\w]*)\s*\(", script_source, re.MULTILINE)
483
+ if not matches:
484
+ raise ValueError(
485
+ "Could not determine the entrypoint for the RunScript code. Please set the `entrypoint` argument."
486
+ )
487
+ unique_matches = [match for match in matches if match]
488
+ if len(unique_matches) > 1:
489
+ raise ValueError(
490
+ "Multiple top-level functions were found in the RunScript code. Please specify the `entrypoint` argument."
491
+ )
492
+ return unique_matches[0]
493
+
494
+ def is_source(self) -> bool:
495
+ return len(self._inputs.inputs) == 0
496
+
497
+ def inputs_dict(self) -> dict[str, tuple["Stage", str]]:
498
+ inputs: dict[str, tuple["Stage", str]] = {}
499
+ for pipeline_input in self._inputs.inputs:
500
+ inputs[pipeline_input.name] = (
501
+ pipeline_input.upstream_output.owner,
502
+ pipeline_input.upstream_output.name,
503
+ )
504
+ return inputs
505
+
506
+ def downstream_stages(self) -> list["Stage"]:
507
+ return [inp.owner for inp in self.outputs.downstream_inputs()]
508
+
509
+ def _params_dict(self) -> tuple[dict, set[PipelineParameter]]:
510
+ d: dict[str, Any] = {}
511
+ pipeline_params = set()
512
+ for name, value in self._params.items():
513
+ if hasattr(value, "_to_pipeline_dict"):
514
+ d[name], downstream_params = value._to_pipeline_dict()
515
+ for param in downstream_params:
516
+ if not isinstance(param, PipelineParameter):
517
+ raise ValueError(
518
+ f"Expected `_to_pipeline_dict()` to only return PipelineParameters, but got {type(param)}"
519
+ )
520
+ pipeline_params.update(downstream_params)
521
+ else:
522
+ d[name] = value
523
+ d = {k: v for k, v in d.items() if v is not None}
524
+ return d, pipeline_params
525
+
526
+ def _to_dict(self, id_for_task: dict) -> tuple[dict, set[PipelineParameter]]:
527
+ params, pipeline_params = self._params_dict()
528
+ d = {
529
+ "name": self._name,
530
+ "operator": self._stage_type_name,
531
+ "params": params,
532
+ "inputs": self._inputs._to_dict(id_for_task),
533
+ }
534
+ return d, pipeline_params
535
+
536
+ @classmethod
537
+ def _parse_params(cls, params: dict) -> dict:
538
+ return params
539
+
540
+
541
+ def stage(
542
+ *,
543
+ inputs: dict[str, PipelineOutput] | None = None,
544
+ outputs: dict[str, type[PipelineOutput]] | None = None,
545
+ stage_name: str | None = None,
546
+ params: dict[str, PipelineParameter | PipelineArgValueType] | None = None,
547
+ ) -> Callable[[Callable[..., dict[str, Any]]], RunScript]:
548
+ """
549
+ Decorator for building a RunScript stage from a Python function.
550
+
551
+ Examples
552
+ --------
553
+ >>> @pipelines.stage(
554
+ ... inputs={"geometry": read_geo.outputs.geometry},
555
+ ... outputs={"geometry": pipelines.PipelineOutputGeometry},
556
+ ... )
557
+ ... def ensure_single_volume(geometry: lc.Geometry):
558
+ ... _, volumes = geometry.list_entities()
559
+ ... if len(volumes) != 1:
560
+ ... raise pipelines.StopRun("expected exactly one volume")
561
+ ... return {"geometry": geometry}
562
+ """
563
+
564
+ def decorator(fn: Callable[..., dict[str, Any]]) -> RunScript:
565
+ return RunScript(
566
+ script=fn,
567
+ stage_name=stage_name,
568
+ inputs=inputs,
569
+ outputs=outputs,
570
+ params=params,
571
+ )
572
+
573
+ return decorator
574
+
575
+
576
+ StandardStage._registry.register(RunScript)
577
+
578
+ Stage = StandardStage | RunScript
579
+
580
+
284
581
  class Pipeline:
285
582
  def __init__(self, stages: list[Stage]):
286
583
  self.stages = stages
@@ -350,7 +647,7 @@ class Pipeline:
350
647
 
351
648
  # first, parse the pipeline parameters...
352
649
  parsed_params = {}
353
- for param_name, param_metadata in d.get("params", {}).items():
650
+ for param_name, param_metadata in (d.get("params") or {}).items():
354
651
  parsed_params[param_name] = PipelineParameter._get_subclass(param_metadata["type"])(
355
652
  param_name
356
653
  )
@@ -405,7 +702,7 @@ def _parse_stage(pipeline_dict: dict, stage_id: str, all_stages: dict[str, Stage
405
702
  stage_type_name = stage_dict[
406
703
  "operator"
407
704
  ] # TODO: change key to "stage_type" when we're ready to bump the yaml schema version
408
- stage_class = Stage._get_subclass(stage_type_name)
705
+ stage_class = StandardStage._get_subclass(stage_type_name)
409
706
 
410
707
  parsed_inputs = {}
411
708
  for input_name, input_value in stage_dict["inputs"].items():
@@ -414,13 +711,29 @@ def _parse_stage(pipeline_dict: dict, stage_id: str, all_stages: dict[str, Stage
414
711
  source_output = getattr(source_stage.outputs, source_output_name)
415
712
  parsed_inputs[input_name] = source_output
416
713
 
417
- parsed_params = stage_class._parse_params(stage_dict["params"])
418
-
419
- stage_params = {
420
- "stage_name": stage_dict["name"],
421
- **parsed_params,
422
- **parsed_inputs,
423
- }
424
- stage = stage_class(**stage_params)
714
+ parsed_params = stage_class._parse_params(stage_dict.get("params"))
715
+
716
+ if stage_class == RunScript:
717
+ user_params = parsed_params.copy()
718
+ script = user_params.pop("$script", None)
719
+ output_types = user_params.pop("$output_types", None)
720
+ entrypoint = user_params.pop("$entrypoint", None)
721
+ if script is None or output_types is None:
722
+ raise ValueError("RunScript stages must define both `$script` and `$output_types`")
723
+ stage = RunScript(
724
+ stage_name=stage_dict["name"],
725
+ script=script,
726
+ inputs=parsed_inputs,
727
+ outputs=output_types,
728
+ entrypoint=entrypoint,
729
+ params=user_params,
730
+ )
731
+ else:
732
+ stage_params = {
733
+ "stage_name": stage_dict["name"],
734
+ **parsed_params,
735
+ **parsed_inputs,
736
+ }
737
+ stage = stage_class(**stage_params)
425
738
  all_stages[stage_id] = stage
426
739
  return stage
@@ -0,0 +1,138 @@
1
+ from __future__ import annotations
2
+
3
+ from abc import ABC
4
+ from dataclasses import dataclass, field
5
+ from enum import Enum
6
+ from typing import TYPE_CHECKING, Type, Mapping
7
+
8
+ if TYPE_CHECKING:
9
+ from .core import Stage
10
+
11
+
12
+ class PipelineInput:
13
+ """
14
+ A named input for a Stage. Explicitly connected to a PipelineOutput.
15
+ """
16
+
17
+ def __init__(self, upstream_output: "PipelineOutput", owner: "Stage", name: str):
18
+ self.upstream_output = upstream_output
19
+ self.owner = owner
20
+ self.name = name
21
+
22
+ def _to_dict(self, id_for_stage: dict) -> dict:
23
+ if self.upstream_output.owner not in id_for_stage:
24
+ raise ValueError(
25
+ f"Stage {self.owner} depends on a stage, {self.upstream_output.owner}, that isn't in the Pipeline. Did you forget to add it?"
26
+ )
27
+ upstream_stage_id = id_for_stage[self.upstream_output.owner]
28
+ upstream_output_name = self.upstream_output.name
29
+ return {self.name: f"{upstream_stage_id}.{upstream_output_name}"}
30
+
31
+
32
+ class PipelineOutput(ABC):
33
+ """
34
+ A named output for a Stage. Can be used to spawn any number of connected PipelineInputs.
35
+ """
36
+
37
+ def __init__(self, owner: "Stage", name: str):
38
+ self.owner = owner
39
+ self.name = name
40
+ self.downstream_inputs: list[PipelineInput] = []
41
+
42
+ def _spawn_input(self, owner: "Stage", name: str) -> PipelineInput:
43
+ input = PipelineInput(self, owner, name)
44
+ self.downstream_inputs.append(input)
45
+ return input
46
+
47
+
48
+ # Concrete PipelineOutput classes, i.e. the things that can "flow" in a Pipeline
49
+
50
+
51
+ class PipelineOutputGeometry(PipelineOutput):
52
+ """A representation of a Geometry in a Pipeline."""
53
+
54
+ pass
55
+
56
+
57
+ class PipelineOutputMesh(PipelineOutput):
58
+ """A representation of a Mesh in a Pipeline."""
59
+
60
+ pass
61
+
62
+
63
+ class PipelineOutputSimulation(PipelineOutput):
64
+ """A representation of a Simulation in a Pipeline."""
65
+
66
+ pass
67
+
68
+
69
+ # We don't inherit from StrEnum because that was added in Python 3.11, but we still want to support
70
+ # older versions. Inheriting from str and Enum gives us the StrEnum-like behavior we want.
71
+ class FlowableType(str, Enum):
72
+ """Canonical flowable type identifiers."""
73
+
74
+ GEOMETRY = "Geometry"
75
+ MESH = "Mesh"
76
+ SIMULATION = "Simulation"
77
+
78
+ def __str__(self) -> str:
79
+ return self.value
80
+
81
+
82
+ _FLOWABLE_NAME_TO_CLASS: dict[FlowableType, Type[PipelineOutput]] = {
83
+ FlowableType.GEOMETRY: PipelineOutputGeometry,
84
+ FlowableType.MESH: PipelineOutputMesh,
85
+ FlowableType.SIMULATION: PipelineOutputSimulation,
86
+ }
87
+
88
+
89
+ def flowable_class_to_name(output_cls: Type[PipelineOutput]) -> FlowableType:
90
+ """
91
+ Convert a PipelineOutput subclass to the canonical flowable type name used in pipeline YAML.
92
+ """
93
+ for flowable_type, cls in _FLOWABLE_NAME_TO_CLASS.items():
94
+ if issubclass(output_cls, cls):
95
+ return flowable_type
96
+ raise ValueError(f"Unsupported PipelineOutput subclass: {output_cls.__name__}")
97
+
98
+
99
+ def flowable_name_to_class(name: str | FlowableType) -> Type[PipelineOutput]:
100
+ """
101
+ Convert a canonical flowable type name into the corresponding PipelineOutput subclass.
102
+ """
103
+ try:
104
+ flowable_type = FlowableType(name)
105
+ except ValueError as exc:
106
+ supported = ", ".join(ft.value for ft in FlowableType)
107
+ raise ValueError(
108
+ f"Unknown flowable type '{name}'. Supported types are: {supported}"
109
+ ) from exc
110
+ return _FLOWABLE_NAME_TO_CLASS[flowable_type]
111
+
112
+
113
+ def _ensure_flowable_mapping(data: Mapping[str, FlowableType | str]) -> dict[str, FlowableType]:
114
+ mapping: dict[str, FlowableType] = {}
115
+ for name, value in data.items():
116
+ mapping[name] = value if isinstance(value, FlowableType) else FlowableType(value)
117
+ return mapping
118
+
119
+
120
+ @dataclass(slots=True)
121
+ class FlowableIOSchema:
122
+ """Typed representation of RunScript input/output schema."""
123
+
124
+ inputs: dict[str, FlowableType] = field(default_factory=dict)
125
+ outputs: dict[str, FlowableType] = field(default_factory=dict)
126
+
127
+ @classmethod
128
+ def from_dict(cls, data: Mapping[str, Mapping[str, FlowableType | str]]) -> "FlowableIOSchema":
129
+ return cls(
130
+ inputs=_ensure_flowable_mapping(data["inputs"]),
131
+ outputs=_ensure_flowable_mapping(data["outputs"]),
132
+ )
133
+
134
+ def to_dict(self) -> dict[str, dict[str, str]]:
135
+ return {
136
+ "inputs": {name: flowable.value for name, flowable in self.inputs.items()},
137
+ "outputs": {name: flowable.value for name, flowable in self.outputs.items()},
138
+ }
@@ -1,33 +1,9 @@
1
1
  # Copyright 2025 Luminary Cloud, Inc. All Rights Reserved.
2
2
  from dataclasses import dataclass
3
3
 
4
- from .core import Stage, StageInputs, StageOutputs, PipelineOutput
4
+ from .core import StandardStage, StageInputs, StageOutputs
5
+ from .flowables import PipelineOutputGeometry, PipelineOutputMesh, PipelineOutputSimulation
5
6
  from .parameters import BoolPipelineParameter, StringPipelineParameter, IntPipelineParameter
6
- from ..meshing import MeshGenerationParams
7
-
8
-
9
- # Concrete PipelineOutput classes, i.e. the things that can "flow" in a Pipeline
10
-
11
-
12
- class PipelineOutputGeometry(PipelineOutput):
13
- """A representation of a Geometry in a Pipeline."""
14
-
15
- pass
16
-
17
-
18
- class PipelineOutputMesh(PipelineOutput):
19
- """A representation of a Mesh in a Pipeline."""
20
-
21
- pass
22
-
23
-
24
- class PipelineOutputSimulation(PipelineOutput):
25
- """A representation of a Simulation in a Pipeline."""
26
-
27
- pass
28
-
29
-
30
- # Stages
31
7
 
32
8
 
33
9
  @dataclass
@@ -39,7 +15,7 @@ class ReadGeometryOutputs(StageOutputs):
39
15
  """
40
16
 
41
17
 
42
- class ReadGeometry(Stage[ReadGeometryOutputs]):
18
+ class ReadGeometry(StandardStage[ReadGeometryOutputs]):
43
19
  """
44
20
  Reads a Geometry into the Pipeline.
45
21
 
@@ -93,7 +69,7 @@ class ReadMeshOutputs(StageOutputs):
93
69
  """
94
70
 
95
71
 
96
- class ReadMesh(Stage[ReadMeshOutputs]):
72
+ class ReadMesh(StandardStage[ReadMeshOutputs]):
97
73
  """
98
74
  Reads a Mesh into the Pipeline.
99
75
 
@@ -137,7 +113,7 @@ class ModifyGeometryOutputs(StageOutputs):
137
113
 
138
114
 
139
115
  # TODO: figure out what `mods` actually is. What does the non-pipeline geo mod interface look like?
140
- class ModifyGeometry(Stage[ModifyGeometryOutputs]):
116
+ class ModifyGeometry(StandardStage[ModifyGeometryOutputs]):
141
117
  """
142
118
  Modifies a Geometry.
143
119
 
@@ -178,7 +154,7 @@ class MeshOutputs(StageOutputs):
178
154
  """The Mesh generated from the given Geometry."""
179
155
 
180
156
 
181
- class Mesh(Stage[MeshOutputs]):
157
+ class Mesh(StandardStage[MeshOutputs]):
182
158
  """
183
159
  Generates a Mesh from a Geometry.
184
160
 
@@ -224,7 +200,7 @@ class SimulateOutputs(StageOutputs):
224
200
  """The Simulation."""
225
201
 
226
202
 
227
- class Simulate(Stage[SimulateOutputs]):
203
+ class Simulate(StandardStage[SimulateOutputs]):
228
204
  """
229
205
  Runs a Simulation.
230
206
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: luminarycloud
3
- Version: 0.22.2
3
+ Version: 0.22.3
4
4
  Summary: Luminary Cloud SDK
5
5
  Project-URL: Homepage, https://www.luminarycloud.com/
6
6
  Project-URL: Documentation, https://app.luminarycloud.com/docs/api/