luminarycloud 0.20.0__py3-none-any.whl → 0.21.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. luminarycloud/__init__.py +2 -0
  2. luminarycloud/_helpers/pagination.py +62 -0
  3. luminarycloud/_helpers/upload.py +3 -6
  4. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2.py +168 -124
  5. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2.pyi +125 -3
  6. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2_grpc.py +66 -0
  7. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2_grpc.pyi +20 -0
  8. luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2.py +8 -8
  9. luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2.pyi +5 -5
  10. luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2.py +66 -19
  11. luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2.pyi +92 -0
  12. luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2_grpc.py +33 -0
  13. luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2_grpc.pyi +10 -0
  14. luminarycloud/_proto/assistant/assistant_pb2.py +61 -41
  15. luminarycloud/_proto/assistant/assistant_pb2.pyi +43 -1
  16. luminarycloud/_proto/assistant/assistant_pb2_grpc.py +33 -0
  17. luminarycloud/_proto/assistant/assistant_pb2_grpc.pyi +10 -0
  18. luminarycloud/_proto/base/base_pb2.py +9 -6
  19. luminarycloud/_proto/base/base_pb2.pyi +12 -0
  20. luminarycloud/_proto/client/simulation_pb2.py +490 -348
  21. luminarycloud/_proto/client/simulation_pb2.pyi +570 -8
  22. luminarycloud/_proto/inferenceservice/inferenceservice_pb2.py +10 -10
  23. luminarycloud/_proto/inferenceservice/inferenceservice_pb2.pyi +5 -5
  24. luminarycloud/_proto/quantity/quantity_pb2.py +24 -15
  25. luminarycloud/_proto/quantity/quantity_pb2.pyi +10 -4
  26. luminarycloud/enum/__init__.py +1 -0
  27. luminarycloud/enum/quantity_type.py +9 -0
  28. luminarycloud/enum/vis_enums.py +23 -3
  29. luminarycloud/geometry.py +41 -1
  30. luminarycloud/geometry_version.py +57 -3
  31. luminarycloud/params/enum/_enum_wrappers.py +537 -30
  32. luminarycloud/params/simulation/adaptive_mesh_refinement_.py +4 -0
  33. luminarycloud/params/simulation/physics/__init__.py +0 -1
  34. luminarycloud/params/simulation/physics/periodic_pair_.py +12 -31
  35. luminarycloud/physics_ai/architectures.py +5 -5
  36. luminarycloud/physics_ai/inference.py +13 -13
  37. luminarycloud/pipelines/__init__.py +8 -0
  38. luminarycloud/pipelines/api.py +159 -4
  39. luminarycloud/pipelines/arguments.py +15 -0
  40. luminarycloud/pipelines/operators.py +74 -17
  41. luminarycloud/project.py +5 -44
  42. luminarycloud/simulation.py +9 -3
  43. luminarycloud/simulation_param.py +0 -9
  44. luminarycloud/vis/__init__.py +2 -0
  45. luminarycloud/vis/interactive_report.py +79 -93
  46. luminarycloud/vis/report.py +219 -65
  47. luminarycloud/vis/visualization.py +60 -0
  48. luminarycloud/volume_selection.py +58 -9
  49. {luminarycloud-0.20.0.dist-info → luminarycloud-0.21.0.dist-info}/METADATA +1 -1
  50. {luminarycloud-0.20.0.dist-info → luminarycloud-0.21.0.dist-info}/RECORD +51 -55
  51. luminarycloud/params/simulation/physics/periodic_pair/__init__.py +0 -2
  52. luminarycloud/params/simulation/physics/periodic_pair/periodicity_type/__init__.py +0 -2
  53. luminarycloud/params/simulation/physics/periodic_pair/periodicity_type/rotational_periodicity_.py +0 -31
  54. luminarycloud/params/simulation/physics/periodic_pair/periodicity_type/translational_periodicity_.py +0 -29
  55. luminarycloud/params/simulation/physics/periodic_pair/periodicity_type_.py +0 -25
  56. {luminarycloud-0.20.0.dist-info → luminarycloud-0.21.0.dist-info}/WHEEL +0 -0
@@ -17,17 +17,6 @@ import luminarycloud.params.enum._enum_wrappers as enum
17
17
 
18
18
  from luminarycloud.params.simulation._lib import ParamGroupWrapper, create_unique_id
19
19
 
20
- from luminarycloud.params.simulation.physics.periodic_pair.periodicity_type_ import PeriodicityType
21
- from luminarycloud.params.simulation.physics.periodic_pair.periodicity_type_ import *
22
- from luminarycloud.params.simulation.physics.periodic_pair.periodicity_type.rotational_periodicity_ import (
23
- RotationalPeriodicity,
24
- )
25
- from luminarycloud.params.simulation.physics.periodic_pair.periodicity_type.rotational_periodicity_ import *
26
- from luminarycloud.params.simulation.physics.periodic_pair.periodicity_type.translational_periodicity_ import (
27
- TranslationalPeriodicity,
28
- )
29
- from luminarycloud.params.simulation.physics.periodic_pair.periodicity_type.translational_periodicity_ import *
30
-
31
20
 
32
21
  @dataclass(kw_only=True)
33
22
  class PeriodicPair(CodeRepr, ParamGroupWrapper[clientpb.PeriodicPair]):
@@ -39,8 +28,12 @@ class PeriodicPair(CodeRepr, ParamGroupWrapper[clientpb.PeriodicPair]):
39
28
  ""
40
29
  surfaces_side_b: list[str] = field(default_factory=list)
41
30
  ""
42
- periodicity_type: PeriodicityType = field(default_factory=TranslationalPeriodicity)
43
- " Possible types: ``TranslationalPeriodicity``, ``RotationalPeriodicity`` from the ``periodicity_type`` module."
31
+ translation: Vector3 = field(default_factory=lambda: Vector3(0.0, 0.0, 0.0))
32
+ "Holds the translation vector in the x,y,z directions from each point on the first periodic boundary to its matching point on the second periodic boundary."
33
+ center: Vector3 = field(default_factory=lambda: Vector3(0.0, 0.0, 0.0))
34
+ "Origin for the rotational periodicity transformation."
35
+ rotation_vector: Vector3 = field(default_factory=lambda: Vector3(0.0, 0.0, 0.0))
36
+ "Holds the rotation vector needed to transform each point on the first periodic boundary to its matching point on the second periodic boundary. The vector direction defines the periodicity axis and its magnitude defines the periodicity angle, in degrees, from A to B according to the right-hand rule."
44
37
 
45
38
  def _to_proto(self) -> clientpb.PeriodicPair:
46
39
  _proto = clientpb.PeriodicPair()
@@ -50,28 +43,16 @@ class PeriodicPair(CodeRepr, ParamGroupWrapper[clientpb.PeriodicPair]):
50
43
  _proto.bound_a.extend(self.surfaces_side_a)
51
44
  if self.surfaces_side_b is not None:
52
45
  _proto.bound_b.extend(self.surfaces_side_b)
53
- if isinstance(self.periodicity_type, TranslationalPeriodicity):
54
- _proto.periodic_bc_type = clientpb.TRANSLATIONAL
55
- _proto.periodic_translation.CopyFrom(self.periodicity_type.translation._to_ad_proto())
56
- if isinstance(self.periodicity_type, RotationalPeriodicity):
57
- _proto.periodic_bc_type = clientpb.ROTATIONAL
58
- _proto.periodic_center_of_rotation.CopyFrom(self.periodicity_type.center._to_ad_proto())
59
- _proto.periodic_rotation_angles.CopyFrom(
60
- self.periodicity_type.rotation_vector._to_ad_proto()
61
- )
46
+ _proto.periodic_translation.CopyFrom(self.translation._to_ad_proto())
47
+ _proto.periodic_center_of_rotation.CopyFrom(self.center._to_ad_proto())
48
+ _proto.periodic_rotation_angles.CopyFrom(self.rotation_vector._to_ad_proto())
62
49
  return _proto
63
50
 
64
51
  def _from_proto(self, proto: clientpb.PeriodicPair) -> None:
65
52
  self.name = proto.periodic_pair_name
66
53
  self.surfaces_side_a.extend(proto.bound_a)
67
54
  self.surfaces_side_b.extend(proto.bound_b)
68
- if proto.periodic_bc_type == clientpb.INVALID_PERIODIC_BC_TYPE:
69
- raise ValueError("Invalid periodicity_type")
70
- elif proto.periodic_bc_type == clientpb.TRANSLATIONAL:
71
- self.periodicity_type = TranslationalPeriodicity()
72
- self.periodicity_type.translation._from_ad_proto(proto.periodic_translation)
73
- elif proto.periodic_bc_type == clientpb.ROTATIONAL:
74
- self.periodicity_type = RotationalPeriodicity()
75
- self.periodicity_type.center._from_ad_proto(proto.periodic_center_of_rotation)
76
- self.periodicity_type.rotation_vector._from_ad_proto(proto.periodic_rotation_angles)
55
+ self.translation._from_ad_proto(proto.periodic_translation)
56
+ self.center._from_ad_proto(proto.periodic_center_of_rotation)
57
+ self.rotation_vector._from_ad_proto(proto.periodic_rotation_angles)
77
58
  return None
@@ -56,14 +56,14 @@ class PhysicsAiArchitectureVersion(ProtoWrapperBase):
56
56
  config["priority_class"] = "prod-batch-priority"
57
57
  if "resources" not in config:
58
58
  config["resources"] = {}
59
- if "process_gpus" not in config["resources"]:
60
- config["resources"]["process_gpus"] = 8
59
+ if "process_cpus" not in config["resources"]:
60
+ config["resources"]["process_cpus"] = 8
61
61
  if "train_gpus" not in config["resources"]:
62
- config["resources"]["train_gpus"] = 8
62
+ config["resources"]["train_gpus"] = 1
63
63
  if "test_gpus" not in config["resources"]:
64
- config["resources"]["test_gpus"] = 8
64
+ config["resources"]["test_gpus"] = 1
65
65
  if "mode" not in config:
66
- config["mode"] = "full-gpu"
66
+ config["mode"] = "full"
67
67
 
68
68
  training_config_json = json.dumps(config, indent=2)
69
69
  external_dataset_uri = f"gs://training-data/architecture-{self.id}"
@@ -17,6 +17,7 @@ from ..project import Project
17
17
  from ..project import Project
18
18
  from .._helpers import upload_file
19
19
  from .._proto.upload import upload_pb2 as uploadpb
20
+ from ..types.ids import PhysicsAiModelVersionID
20
21
 
21
22
 
22
23
  @dataclass
@@ -50,7 +51,7 @@ class ExtAeroInferenceResult:
50
51
  def external_aero_inference(
51
52
  project: Project,
52
53
  stl_file: str,
53
- artifact_url: str,
54
+ model_version_id: PhysicsAiModelVersionID,
54
55
  conditions: Optional[Dict[str, Any]] = None,
55
56
  settings: Optional[Dict[str, Any]] = None,
56
57
  write_visualization_data=False,
@@ -62,8 +63,8 @@ def external_aero_inference(
62
63
  The project to which the inference files will be added.
63
64
  stl_file : str
64
65
  Fullpath the STL file to be used for inference.
65
- artifact_url : str
66
- Fullpath of the model artifact directory to be used for inference.
66
+ model_version_id : PhysicsAiModelVersionID
67
+ The ID of the trained model version to use for inference.
67
68
  conditions : Dict[str, Any], optional
68
69
  Dictionary of conditions to be passed to the inference service (e.g., alpha, beta, etc.).
69
70
  settings : Dict[str, Any], optional
@@ -80,7 +81,7 @@ def external_aero_inference(
80
81
  """
81
82
 
82
83
  result = perform_inference(
83
- project, stl_file, artifact_url, conditions, settings, write_visualization_data
84
+ project, stl_file, model_version_id, conditions, settings, write_visualization_data
84
85
  )
85
86
  return ExtAeroInferenceResult(result)
86
87
 
@@ -88,7 +89,7 @@ def external_aero_inference(
88
89
  def perform_inference(
89
90
  project: Project,
90
91
  stl_file: str,
91
- artifact_url: str,
92
+ model_version_id: PhysicsAiModelVersionID,
92
93
  conditions: Optional[Dict[str, Any]] = None,
93
94
  settings: Optional[Dict[str, Any]] = None,
94
95
  write_visualization_data=False,
@@ -100,8 +101,8 @@ def perform_inference(
100
101
  The project to which the inference files will be added.
101
102
  stl_file : str
102
103
  Fullpath the STL file to be used for inference.
103
- artifact_url : str
104
- Fullpath of the model artifact directory to be used for inference.
104
+ model_version_id : PhysicsAiModelVersionID
105
+ The ID of the trained model version to use for inference.
105
106
  conditions : Dict[str, Any], optional
106
107
  Dictionary of conditions to be passed to the inference service (e.g., alpha, beta, etc.).
107
108
  settings : Dict[str, Any], optional
@@ -142,7 +143,7 @@ def perform_inference(
142
143
  stl_url = upload_if_file(stl_file)
143
144
 
144
145
  raw = start_inference_job(
145
- project, stl_url, artifact_url, conditions, settings, write_visualization_data
146
+ project, stl_url, model_version_id, conditions, settings, write_visualization_data
146
147
  )
147
148
  currated: dict[str, Any] = {}
148
149
  for k, v in raw.items():
@@ -162,7 +163,7 @@ def perform_inference(
162
163
  def start_inference_job(
163
164
  project: Project,
164
165
  stl_url: str,
165
- artifact_url: str,
166
+ model_version_id: PhysicsAiModelVersionID,
166
167
  conditions: Optional[Dict[str, Any]] = None,
167
168
  settings: Optional[Dict[str, Any]] = None,
168
169
  write_visualization_data=False,
@@ -174,8 +175,8 @@ def start_inference_job(
174
175
  Reference to a project.
175
176
  stl_url : str
176
177
  URL of the STL file to be used for inference.
177
- artifact_url : str
178
- URL of the model artifact directory to be used for inference.
178
+ model_version_id : PhysicsAiModelVersionID
179
+ The ID of the trained model version to use for inference.
179
180
  conditions : Dict[str, Any], optional
180
181
  Dictionary of conditions to be passed to the inference service (e.g., alpha, beta, etc.).
181
182
  settings : Dict[str, Any], optional
@@ -203,13 +204,12 @@ def start_inference_job(
203
204
 
204
205
  req = inferencepb.CreateInferenceServiceJobRequest(
205
206
  stl_url=stl_url,
206
- artifact_url=artifact_url,
207
+ model_version_id=str(model_version_id),
207
208
  conditions=conditions_bytes,
208
209
  settings=settings_bytes,
209
210
  project_id=project.id,
210
211
  write_visualization_data=write_visualization_data,
211
212
  )
212
-
213
213
  res: inferencepb.CreateInferenceServiceJobResponse = (
214
214
  get_default_client().CreateInferenceServiceJob(req)
215
215
  )
@@ -21,6 +21,8 @@ from .operators import (
21
21
  # Concrete operators and their output types
22
22
  ReadGeometry as ReadGeometry,
23
23
  ReadGeometryOutputs as ReadGeometryOutputs,
24
+ ReadMesh as ReadMesh,
25
+ ReadMeshOutputs as ReadMeshOutputs,
24
26
  ModifyGeometry as ModifyGeometry,
25
27
  ModifyGeometryOutputs as ModifyGeometryOutputs,
26
28
  Mesh as Mesh,
@@ -39,4 +41,10 @@ from .api import (
39
41
  list_pipelines as list_pipelines,
40
42
  get_pipeline as get_pipeline,
41
43
  create_pipeline_job as create_pipeline_job,
44
+ get_pipeline_job as get_pipeline_job,
45
+ list_pipeline_jobs as list_pipeline_jobs,
46
+ PipelineJobRecord as PipelineJobRecord,
47
+ PipelineRecord as PipelineRecord,
48
+ PipelineJobRunRecord as PipelineJobRunRecord,
49
+ LogLine as LogLine,
42
50
  )
@@ -1,15 +1,35 @@
1
1
  # Copyright 2023-2024 Luminary Cloud, Inc. All Rights Reserved.
2
- from typing import Literal
2
+ from typing import Any, Literal
3
3
  from dataclasses import dataclass
4
4
 
5
5
  from datetime import datetime
6
6
 
7
+ from .arguments import PipelineArgValueType
7
8
  from ..pipelines import Pipeline, PipelineArgs
8
9
  from .._client import get_default_client
9
10
 
10
11
 
12
+ @dataclass
13
+ class LogLine:
14
+ timestamp: datetime
15
+ level: int
16
+ message: str
17
+
18
+ @classmethod
19
+ def from_json(cls, json: dict) -> "LogLine":
20
+ return cls(
21
+ timestamp=datetime.fromisoformat(json["timestamp"]),
22
+ level=json["level"],
23
+ message=json["message"],
24
+ )
25
+
26
+
11
27
  @dataclass
12
28
  class PipelineRecord:
29
+ """
30
+ A PipelineRecord represents a persisted pipeline.
31
+ """
32
+
13
33
  id: str
14
34
  name: str
15
35
  description: str | None
@@ -17,8 +37,10 @@ class PipelineRecord:
17
37
  created_at: datetime
18
38
  updated_at: datetime
19
39
 
20
- def pipeline(self) -> Pipeline:
21
- return Pipeline._from_yaml(self.definition_yaml)
40
+ # I don't think users need to get the Pipeline object from a PipelineRecord, but if they did,
41
+ # it would be done like this.
42
+ # def pipeline(self) -> Pipeline:
43
+ # return Pipeline._from_yaml(self.definition_yaml)
22
44
 
23
45
  @classmethod
24
46
  def from_json(cls, json: dict) -> "PipelineRecord":
@@ -31,14 +53,30 @@ class PipelineRecord:
31
53
  updated_at=datetime.fromisoformat(json["updated_at"]),
32
54
  )
33
55
 
56
+ def pipeline_jobs(self) -> "list[PipelineJobRecord]":
57
+ """
58
+ Returns a list of pipeline jobs that were created from this pipeline.
59
+
60
+ Returns
61
+ -------
62
+ list[PipelineJobRecord]
63
+ A list of PipelineJobRecord objects.
64
+ """
65
+ res = get_default_client().http.get(f"/rest/v0/pipelines/{self.id}/pipeline_jobs")
66
+ return [PipelineJobRecord.from_json(p) for p in res["data"]]
67
+
34
68
 
35
69
  @dataclass
36
70
  class PipelineJobRecord:
71
+ """
72
+ A PipelineJobRecord represents a persisted pipeline job.
73
+ """
74
+
37
75
  id: str
38
76
  pipeline_id: str
39
77
  name: str
40
78
  description: str | None
41
- status: Literal["pending", "running", "completed", "failed", "cancelled"]
79
+ status: Literal["pending", "running", "completed", "failed"]
42
80
  created_at: datetime
43
81
  updated_at: datetime
44
82
  started_at: datetime | None
@@ -60,6 +98,123 @@ class PipelineJobRecord:
60
98
  ),
61
99
  )
62
100
 
101
+ def pipeline(self) -> PipelineRecord:
102
+ """
103
+ Returns the pipeline that this pipeline job was created from.
104
+
105
+ Returns
106
+ -------
107
+ PipelineRecord
108
+ The PipelineRecord for the pipeline that this pipeline job was created from.
109
+ """
110
+ return get_pipeline(self.pipeline_id)
111
+
112
+ def runs(self) -> "list[PipelineJobRunRecord]":
113
+ """
114
+ Returns a list of runs for this pipeline job.
115
+
116
+ Returns
117
+ -------
118
+ list[PipelineJobRunRecord]
119
+ A list of PipelineJobRunRecord objects.
120
+ """
121
+ res = get_default_client().http.get(f"/rest/v0/pipeline_jobs/{self.id}/runs")
122
+ return [PipelineJobRunRecord.from_json(r) for r in res["data"]]
123
+
124
+ def logs(self) -> list[LogLine]:
125
+ """
126
+ Returns a list of log lines for this pipeline job.
127
+
128
+ Each log line is a LogLine object, which has a timestamp, level, and message.
129
+
130
+ Returns
131
+ -------
132
+ list[LogLine]
133
+ A list of LogLine objects.
134
+ """
135
+ res = get_default_client().http.get(f"/rest/v0/pipeline_jobs/{self.id}/logs")
136
+ return [LogLine.from_json(l) for l in res["data"]]
137
+
138
+ def artifacts(self) -> list[dict]:
139
+ """
140
+ Returns a list of artifacts that were produced by this pipeline job.
141
+
142
+ Artifacts are things like Geometries, Meshes, and Simulations. Each artifact is a dictionary
143
+ with an "id" key, which is an identifier for the artifact.
144
+
145
+ .. warning:: This feature is experimental and may change or be removed in the future.
146
+
147
+ Returns
148
+ -------
149
+ list[dict]
150
+ A list of artifact dictionaries.
151
+ """
152
+ res = get_default_client().http.get(f"/rest/v0/pipeline_jobs/{self.id}/artifacts")
153
+ return res["data"]
154
+
155
+
156
+ @dataclass
157
+ class PipelineJobRunRecord:
158
+ pipeline_job_id: str
159
+ idx: int
160
+ arguments: list[PipelineArgValueType]
161
+ status: Literal["pending", "running", "completed", "failed"]
162
+
163
+ @classmethod
164
+ def from_json(cls, json: dict) -> "PipelineJobRunRecord":
165
+ return cls(
166
+ pipeline_job_id=json["pipeline_job_id"],
167
+ idx=json["idx"],
168
+ arguments=json["arguments"],
169
+ status=json["status"],
170
+ )
171
+
172
+ def pipeline_job(self) -> PipelineJobRecord:
173
+ """
174
+ Returns the pipeline job that this pipeline job run was created from.
175
+
176
+ Returns
177
+ -------
178
+ PipelineJobRecord
179
+ The PipelineJobRecord for the pipeline job that this pipeline job run was created from.
180
+ """
181
+ return get_pipeline_job(self.pipeline_job_id)
182
+
183
+ def logs(self) -> list[LogLine]:
184
+ """
185
+ Returns a list of log lines for this pipeline job run.
186
+
187
+ Each log line is a LogLine object, which has a timestamp, level, and message.
188
+
189
+ Returns
190
+ -------
191
+ list[LogLine]
192
+ A list of LogLine objects.
193
+ """
194
+ res = get_default_client().http.get(
195
+ f"/rest/v0/pipeline_jobs/{self.pipeline_job_id}/runs/{self.idx}/logs"
196
+ )
197
+ return [LogLine.from_json(l) for l in res["data"]]
198
+
199
+ def artifacts(self) -> list[dict]:
200
+ """
201
+ Returns a list of artifacts that were produced by this pipeline job run.
202
+
203
+ Artifacts are things like Geometries, Meshes, and Simulations. Each artifact is a dictionary
204
+ with an "id" key, which is an identifier for the artifact.
205
+
206
+ .. warning:: This feature is experimental and may change or be removed in the future.
207
+
208
+ Returns
209
+ -------
210
+ list[dict]
211
+ A list of artifact dictionaries.
212
+ """
213
+ res = get_default_client().http.get(
214
+ f"/rest/v0/pipeline_jobs/{self.pipeline_job_id}/runs/{self.idx}/artifacts"
215
+ )
216
+ return res["data"]
217
+
63
218
 
64
219
  def create_pipeline(
65
220
  name: str, pipeline: Pipeline | str, description: str | None = None
@@ -103,3 +103,18 @@ class PipelineArgs:
103
103
  return (
104
104
  f"PipelineArgs(param_names={[p.name for p in self.params]}, row_count={len(self.rows)})"
105
105
  )
106
+
107
+ def print_as_table(self) -> None:
108
+ headers = [p.name for p in self.params]
109
+ row_strs = [[str(v) for v in row.row_values] for row in self.rows]
110
+ col_widths = [
111
+ max(len(headers[i]), *(len(r[i]) for r in row_strs)) for i in range(len(headers))
112
+ ]
113
+
114
+ def format_row(values: list[str]) -> str:
115
+ return " | ".join(val.ljust(col_widths[i]) for i, val in enumerate(values))
116
+
117
+ print(format_row(headers))
118
+ print("-+-".join("-" * w for w in col_widths))
119
+ for r in row_strs:
120
+ print(format_row(r))
@@ -2,7 +2,7 @@
2
2
  from dataclasses import dataclass
3
3
 
4
4
  from .core import Operator, OperatorInputs, OperatorOutputs, PipelineOutput
5
- from .parameters import StringPipelineParameter
5
+ from .parameters import StringPipelineParameter, IntPipelineParameter
6
6
  from ..meshing import MeshGenerationParams
7
7
 
8
8
 
@@ -43,6 +43,8 @@ class ReadGeometry(Operator[ReadGeometryOutputs]):
43
43
  """
44
44
  Reads a Geometry into the Pipeline.
45
45
 
46
+ .. warning:: This feature is experimental and may change or be removed in the future.
47
+
46
48
  Parameters
47
49
  ----------
48
50
  geometry_id : str | StringPipelineParameter
@@ -52,8 +54,6 @@ class ReadGeometry(Operator[ReadGeometryOutputs]):
52
54
  -------
53
55
  geometry : PipelineOutputGeometry
54
56
  The latest GeometryVersion of the Geometry as of the moment the Pipeline was invoked.
55
-
56
- .. warning:: This feature is experimental and may change or be removed in the future.
57
57
  """
58
58
 
59
59
  def __init__(
@@ -70,6 +70,51 @@ class ReadGeometry(Operator[ReadGeometryOutputs]):
70
70
  )
71
71
 
72
72
 
73
+ @dataclass
74
+ class ReadMeshOutputs(OperatorOutputs):
75
+ mesh: PipelineOutputMesh
76
+ """
77
+ The Mesh read from the given `mesh_id`.
78
+ """
79
+
80
+
81
+ class ReadMesh(Operator[ReadMeshOutputs]):
82
+ """
83
+ Reads a Mesh into the Pipeline.
84
+
85
+ .. warning:: This feature is experimental and may change or be removed in the future.
86
+
87
+ Parameters
88
+ ----------
89
+ mesh_id : str | StringPipelineParameter
90
+ The ID of the Mesh to retrieve.
91
+ wait_timeout_seconds : int | IntPipelineParameter | None
92
+ The number of seconds to wait for the Mesh to be ready. If None, defaults to 1800 seconds
93
+ (30 minutes).
94
+
95
+ Outputs
96
+ -------
97
+ mesh : PipelineOutputMesh
98
+ The Mesh with the given `mesh_id`.
99
+ """
100
+
101
+ def __init__(
102
+ self,
103
+ *,
104
+ task_name: str | None = None,
105
+ mesh_id: str | StringPipelineParameter,
106
+ wait_timeout_seconds: int | IntPipelineParameter | None = None,
107
+ ):
108
+ if wait_timeout_seconds is None:
109
+ wait_timeout_seconds = 30 * 60
110
+ super().__init__(
111
+ task_name,
112
+ {"mesh_id": mesh_id, "wait_timeout_seconds": wait_timeout_seconds},
113
+ OperatorInputs(self),
114
+ ReadMeshOutputs._instantiate_for(self),
115
+ )
116
+
117
+
73
118
  @dataclass
74
119
  class ModifyGeometryOutputs(OperatorOutputs):
75
120
  geometry: PipelineOutputGeometry
@@ -81,6 +126,8 @@ class ModifyGeometry(Operator[ModifyGeometryOutputs]):
81
126
  """
82
127
  Modifies a Geometry.
83
128
 
129
+ .. warning:: This feature is experimental and may change or be removed in the future.
130
+
84
131
  Parameters
85
132
  ----------
86
133
  mods : dict
@@ -92,8 +139,6 @@ class ModifyGeometry(Operator[ModifyGeometryOutputs]):
92
139
  -------
93
140
  geometry : PipelineOutputGeometry
94
141
  The modified Geometry, represented as a new GeometryVersion.
95
-
96
- .. warning:: This feature is experimental and may change or be removed in the future.
97
142
  """
98
143
 
99
144
  def __init__(
@@ -122,31 +167,37 @@ class Mesh(Operator[MeshOutputs]):
122
167
  """
123
168
  Generates a Mesh from a Geometry.
124
169
 
170
+ .. warning:: This feature is experimental and may change or be removed in the future.
171
+
125
172
  Parameters
126
173
  ----------
127
- target_cv_count : int | None
128
- The target number of control volumes to generate. If None, a minimal mesh will be generated.
129
174
  geometry : PipelineOutputGeometry
130
175
  The Geometry to mesh.
176
+ mesh_name : str | StringPipelineParameter | None
177
+ The name to assign to the Mesh. If None, a default name will be used.
178
+ target_cv_count : int | None
179
+ The target number of control volumes to generate. If None, a minimal mesh will be generated.
131
180
 
132
181
  Outputs
133
182
  -------
134
183
  mesh : PipelineOutputMesh
135
184
  The generated Mesh.
136
-
137
- .. warning:: This feature is experimental and may change or be removed in the future.
138
185
  """
139
186
 
140
187
  def __init__(
141
188
  self,
142
189
  *,
143
190
  task_name: str | None = None,
144
- target_cv_count: int | None,
145
191
  geometry: PipelineOutputGeometry,
192
+ mesh_name: str | StringPipelineParameter | None = None,
193
+ target_cv_count: int | None,
146
194
  ):
147
195
  super().__init__(
148
196
  task_name,
149
- {"target_cv_count": target_cv_count},
197
+ {
198
+ "mesh_name": mesh_name,
199
+ "target_cv_count": target_cv_count,
200
+ },
150
201
  OperatorInputs(self, geometry=(PipelineOutputGeometry, geometry)),
151
202
  MeshOutputs._instantiate_for(self),
152
203
  )
@@ -169,31 +220,37 @@ class Simulate(Operator[SimulateOutputs]):
169
220
  """
170
221
  Runs a Simulation.
171
222
 
223
+ .. warning:: This feature is experimental and may change or be removed in the future.
224
+
172
225
  Parameters
173
226
  ----------
174
- sim_template_id : str | StringPipelineParameter
175
- The ID of the SimulationTemplate to use for the Simulation.
176
227
  mesh : PipelineOutputMesh
177
228
  The Mesh to use for the Simulation.
229
+ sim_name : str | StringPipelineParameter | None
230
+ The name to assign to the Simulation. If None, a default name will be used.
231
+ sim_template_id : str | StringPipelineParameter
232
+ The ID of the SimulationTemplate to use for the Simulation.
178
233
 
179
234
  Outputs
180
235
  -------
181
236
  simulation : PipelineOutputSimulation
182
237
  The Simulation.
183
-
184
- .. warning:: This feature is experimental and may change or be removed in the future.
185
238
  """
186
239
 
187
240
  def __init__(
188
241
  self,
189
242
  *,
190
243
  task_name: str | None = None,
191
- sim_template_id: str | StringPipelineParameter,
192
244
  mesh: PipelineOutputMesh,
245
+ sim_name: str | StringPipelineParameter | None = None,
246
+ sim_template_id: str | StringPipelineParameter,
193
247
  ):
194
248
  super().__init__(
195
249
  task_name,
196
- {"sim_template_id": sim_template_id},
250
+ {
251
+ "sim_name": sim_name,
252
+ "sim_template_id": sim_template_id,
253
+ },
197
254
  OperatorInputs(self, mesh=(PipelineOutputMesh, mesh)),
198
255
  SimulateOutputs._instantiate_for(self),
199
256
  )
luminarycloud/project.py CHANGED
@@ -12,6 +12,7 @@ import concurrent
12
12
 
13
13
  import luminarycloud as lc
14
14
  from luminarycloud._helpers.named_variables import _named_variables_to_proto
15
+ from luminarycloud._helpers.pagination import PaginationIterator
15
16
  from luminarycloud.params.simulation.adjoint_ import Adjoint
16
17
 
17
18
  from ._client import get_default_client
@@ -785,53 +786,13 @@ def list_projects() -> list[Project]:
785
786
  return list(iterate_projects())
786
787
 
787
788
 
788
- class ProjectIterator:
789
+ class ProjectIterator(PaginationIterator[Project]):
789
790
  """Iterator class for projects that provides length hint."""
790
791
 
791
- def __init__(self, page_size: int):
792
- self._page_size: int = page_size
793
- self._page_token: str = ""
794
- self._total_count: Optional[int] = None
795
- self._current_page: Optional[list[projectpb.Project]] = None
796
- self._client = get_default_client()
797
- self._iterated_count: int = 0
798
-
799
- def __iter__(self) -> "ProjectIterator":
800
- return self
801
-
802
- def __next__(self) -> Project:
803
- if self._current_page is None:
804
- self._fetch_next_page()
805
-
806
- # _current_page really can't be None here, but this assertion is needed to appease mypy
807
- assert self._current_page is not None
808
-
809
- if len(self._current_page) == 0:
810
- if not self._page_token:
811
- raise StopIteration
812
- self._fetch_next_page()
813
-
814
- self._iterated_count += 1
815
-
816
- return Project(self._current_page.pop(0))
817
-
818
- def _fetch_next_page(self) -> None:
819
- req = projectpb.ListProjectsRequest(page_size=self._page_size, page_token=self._page_token)
792
+ def _fetch_page(self, page_size: int, page_token: str) -> tuple[list[Project], str, int]:
793
+ req = projectpb.ListProjectsRequest(page_size=page_size, page_token=page_token)
820
794
  res = self._client.ListProjects(req)
821
-
822
- self._current_page = list(res.projects)
823
- self._page_token = res.next_page_token
824
-
825
- # Set length hint on first fetch if available
826
- if self._total_count is None:
827
- self._total_count = res.total_count or 0
828
-
829
- def __length_hint__(self) -> int:
830
- if self._total_count is None:
831
- # Fetch first page to get total size if not already fetched
832
- if self._current_page is None:
833
- self._fetch_next_page()
834
- return max(0, (self._total_count or 0) - self._iterated_count)
795
+ return [Project(p) for p in res.projects], res.next_page_token, res.total_count
835
796
 
836
797
 
837
798
  def iterate_projects(page_size: int = 50) -> ProjectIterator: