luminarycloud 0.18.0__py3-none-any.whl → 0.19.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. luminarycloud/__init__.py +4 -0
  2. luminarycloud/_client/client.py +21 -5
  3. luminarycloud/_client/http_client.py +168 -0
  4. luminarycloud/_client/rpc_error.py +1 -0
  5. luminarycloud/_client/tracing.py +72 -22
  6. luminarycloud/_helpers/_wait_for_mesh.py +5 -7
  7. luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2.py +8 -8
  8. luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2.pyi +9 -8
  9. luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2.py +88 -55
  10. luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2.pyi +108 -1
  11. luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2_grpc.py +35 -0
  12. luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2_grpc.pyi +16 -0
  13. luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2.py +70 -40
  14. luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2.pyi +64 -3
  15. luminarycloud/_proto/client/simulation_pb2.py +273 -269
  16. luminarycloud/_proto/client/simulation_pb2.pyi +23 -7
  17. luminarycloud/_proto/inferenceservice/inferenceservice_pb2.py +10 -10
  18. luminarycloud/_proto/inferenceservice/inferenceservice_pb2.pyi +9 -8
  19. luminarycloud/_proto/output/output_pb2.py +43 -36
  20. luminarycloud/_proto/output/output_pb2.pyi +28 -1
  21. luminarycloud/_proto/quantity/quantity_options_pb2.py +5 -4
  22. luminarycloud/_proto/quantity/quantity_options_pb2.pyi +4 -0
  23. luminarycloud/_proto/quantity/quantity_pb2.py +8 -8
  24. luminarycloud/enum/__init__.py +1 -0
  25. luminarycloud/enum/moment_convention_type.py +19 -0
  26. luminarycloud/enum/quantity_type.py +13 -0
  27. luminarycloud/exceptions.py +6 -0
  28. luminarycloud/outputs/output_definitions.py +5 -0
  29. luminarycloud/params/enum/_enum_wrappers.py +3 -2
  30. luminarycloud/params/simulation/monitor_plane_.py +1 -1
  31. luminarycloud/params/simulation/sliding_interfaces_.py +8 -0
  32. luminarycloud/physics_ai/inference.py +42 -26
  33. luminarycloud/pipelines/api.py +49 -102
  34. luminarycloud/pipelines/operators.py +4 -4
  35. luminarycloud/project.py +16 -44
  36. luminarycloud/simulation.py +6 -0
  37. luminarycloud/simulation_param.py +4 -2
  38. luminarycloud/simulation_queue.py +130 -0
  39. luminarycloud/simulation_template.py +6 -1
  40. luminarycloud/tables.py +14 -15
  41. luminarycloud/types/adfloat.py +3 -0
  42. luminarycloud/vis/interactive_scene.py +14 -1
  43. luminarycloud/vis/visualization.py +18 -2
  44. {luminarycloud-0.18.0.dist-info → luminarycloud-0.19.0.dist-info}/METADATA +1 -1
  45. {luminarycloud-0.18.0.dist-info → luminarycloud-0.19.0.dist-info}/RECORD +46 -48
  46. luminarycloud/_proto/api/v0/luminarycloud/pipelines/pipelines_pb2.py +0 -246
  47. luminarycloud/_proto/api/v0/luminarycloud/pipelines/pipelines_pb2.pyi +0 -420
  48. luminarycloud/_proto/api/v0/luminarycloud/pipelines/pipelines_pb2_grpc.py +0 -240
  49. luminarycloud/_proto/api/v0/luminarycloud/pipelines/pipelines_pb2_grpc.pyi +0 -90
  50. luminarycloud/enum/pipeline_job_status.py +0 -23
  51. {luminarycloud-0.18.0.dist-info → luminarycloud-0.19.0.dist-info}/WHEEL +0 -0
@@ -1,14 +1,11 @@
1
1
  # Copyright 2023-2024 Luminary Cloud, Inc. All Rights Reserved.
2
+ from typing import Literal
2
3
  from dataclasses import dataclass
3
4
 
4
5
  from datetime import datetime
5
6
 
6
- from luminarycloud._helpers import timestamp_to_datetime
7
-
8
- from ..enum.pipeline_job_status import PipelineJobStatus
9
7
  from ..pipelines import Pipeline, PipelineArgs
10
8
  from .._client import get_default_client
11
- from .._proto.api.v0.luminarycloud.pipelines import pipelines_pb2 as pipelinespb
12
9
 
13
10
 
14
11
  @dataclass
@@ -17,21 +14,21 @@ class PipelineRecord:
17
14
  name: str
18
15
  description: str | None
19
16
  definition_yaml: str
20
- create_time: datetime
21
- update_time: datetime
17
+ created_at: datetime
18
+ updated_at: datetime
22
19
 
23
20
  def pipeline(self) -> Pipeline:
24
21
  return Pipeline._from_yaml(self.definition_yaml)
25
22
 
26
23
  @classmethod
27
- def from_proto(cls, proto: pipelinespb.Pipeline) -> "PipelineRecord":
24
+ def from_json(cls, json: dict) -> "PipelineRecord":
28
25
  return cls(
29
- id=proto.id,
30
- name=proto.name,
31
- description=proto.description,
32
- definition_yaml=proto.definition_yaml,
33
- create_time=timestamp_to_datetime(proto.created_at),
34
- update_time=timestamp_to_datetime(proto.updated_at),
26
+ id=json["id"],
27
+ name=json["name"],
28
+ description=json["description"],
29
+ definition_yaml=json["definition_yaml"],
30
+ created_at=datetime.fromisoformat(json["created_at"]),
31
+ updated_at=datetime.fromisoformat(json["updated_at"]),
35
32
  )
36
33
 
37
34
 
@@ -42,30 +39,26 @@ class PipelineJobRecord:
42
39
  project_id: str
43
40
  name: str
44
41
  description: str | None
45
- status: PipelineJobStatus
46
- create_time: datetime
47
- update_time: datetime
42
+ status: Literal["pending", "running", "completed", "failed", "cancelled"]
43
+ created_at: datetime
44
+ updated_at: datetime
48
45
  started_at: datetime | None
49
46
  completed_at: datetime | None
50
47
 
51
48
  @classmethod
52
- def from_proto(cls, proto: pipelinespb.PipelineJob) -> "PipelineJobRecord":
49
+ def from_json(cls, json: dict) -> "PipelineJobRecord":
53
50
  return cls(
54
- id=proto.id,
55
- pipeline_id=proto.pipeline_id,
56
- project_id=proto.project_id,
57
- name=proto.name,
58
- description=proto.description,
59
- status=PipelineJobStatus(proto.status),
60
- create_time=timestamp_to_datetime(proto.created_at),
61
- update_time=timestamp_to_datetime(proto.updated_at),
62
- started_at=(
63
- timestamp_to_datetime(proto.started_at) if proto.HasField("started_at") else None
64
- ),
51
+ id=json["id"],
52
+ pipeline_id=json["pipeline_id"],
53
+ project_id=json["project_id"],
54
+ name=json["name"],
55
+ description=json["description"],
56
+ status=json["status"],
57
+ created_at=datetime.fromisoformat(json["created_at"]),
58
+ updated_at=datetime.fromisoformat(json["updated_at"]),
59
+ started_at=datetime.fromisoformat(json["started_at"]) if json["started_at"] else None,
65
60
  completed_at=(
66
- timestamp_to_datetime(proto.completed_at)
67
- if proto.HasField("completed_at")
68
- else None
61
+ datetime.fromisoformat(json["completed_at"]) if json["completed_at"] else None
69
62
  ),
70
63
  )
71
64
 
@@ -89,20 +82,21 @@ def create_pipeline(
89
82
  definition_yaml = pipeline.to_yaml()
90
83
  else:
91
84
  definition_yaml = pipeline
92
- req = pipelinespb.CreatePipelineRequest(
93
- name=name, definition_yaml=definition_yaml, description=description
94
- )
95
- res: pipelinespb.CreatePipelineResponse = get_default_client().CreatePipeline(req)
96
- return PipelineRecord.from_proto(res.pipeline)
85
+ body = {
86
+ "name": name,
87
+ "definition_yaml": definition_yaml,
88
+ "description": description,
89
+ }
90
+ res = get_default_client().http.post("/rest/v0/pipelines", body)
91
+ return PipelineRecord.from_json(res)
97
92
 
98
93
 
99
94
  def list_pipelines() -> list[PipelineRecord]:
100
95
  """
101
96
  List all pipelines.
102
97
  """
103
- req = pipelinespb.ListPipelinesRequest()
104
- res: pipelinespb.ListPipelinesResponse = get_default_client().ListPipelines(req)
105
- return [PipelineRecord.from_proto(p) for p in res.pipelines]
98
+ res = get_default_client().http.get("/rest/v0/pipelines")
99
+ return [PipelineRecord.from_json(p) for p in res]
106
100
 
107
101
 
108
102
  def get_pipeline(id: str) -> PipelineRecord:
@@ -114,9 +108,8 @@ def get_pipeline(id: str) -> PipelineRecord:
114
108
  id : str
115
109
  ID of the pipeline to fetch.
116
110
  """
117
- req = pipelinespb.GetPipelineRequest(id=id)
118
- res: pipelinespb.GetPipelineResponse = get_default_client().GetPipeline(req)
119
- return PipelineRecord.from_proto(res.pipeline)
111
+ res = get_default_client().http.get(f"/rest/v0/pipelines/{id}")
112
+ return PipelineRecord.from_json(res)
120
113
 
121
114
 
122
115
  def create_pipeline_job(
@@ -139,75 +132,29 @@ def create_pipeline_job(
139
132
  Description of the pipeline job.
140
133
  """
141
134
 
142
- col_values = [[] for _ in args.params]
143
- for row in args.rows:
144
- for i, v in enumerate(row.row_values):
145
- col_values[i].append(v)
146
-
147
- cols = []
148
-
149
- for i, param in enumerate(args.params):
150
- if param._represented_type() == str:
151
- cols.append(
152
- pipelinespb.PipelineJobArgsColumn(
153
- string_column=pipelinespb.PipelineJobArgsColumn.StringColumn(
154
- name=param.name,
155
- values=col_values[i],
156
- )
157
- )
158
- )
159
- elif param._represented_type() == int:
160
- cols.append(
161
- pipelinespb.PipelineJobArgsColumn(
162
- int_column=pipelinespb.PipelineJobArgsColumn.IntColumn(
163
- name=param.name,
164
- values=col_values[i],
165
- )
166
- )
167
- )
168
- elif param._represented_type() == float:
169
- cols.append(
170
- pipelinespb.PipelineJobArgsColumn(
171
- double_column=pipelinespb.PipelineJobArgsColumn.DoubleColumn(
172
- name=param.name,
173
- values=col_values[i],
174
- )
175
- )
176
- )
177
- elif param._represented_type() == bool:
178
- cols.append(
179
- pipelinespb.PipelineJobArgsColumn(
180
- bool_column=pipelinespb.PipelineJobArgsColumn.BoolColumn(
181
- name=param.name,
182
- values=col_values[i],
183
- )
184
- )
185
- )
186
-
187
- req = pipelinespb.CreatePipelineJobRequest(
188
- pipeline_id=pipeline_id,
189
- args_columns=cols,
190
- name=name,
191
- description=description,
192
- project_id=project_id,
193
- )
194
- res: pipelinespb.CreatePipelineJobResponse = get_default_client().CreatePipelineJob(req)
195
- return PipelineJobRecord.from_proto(res.pipeline_job)
135
+ arg_rows = [row.row_values for row in args.rows]
136
+ body = {
137
+ "name": name,
138
+ "project_id": project_id,
139
+ "argument_names": [p.name for p in args.params],
140
+ "argument_rows": arg_rows,
141
+ }
142
+
143
+ res = get_default_client().http.post(f"/rest/v0/pipelines/{pipeline_id}/pipeline_jobs", body)
144
+ return PipelineJobRecord.from_json(res)
196
145
 
197
146
 
198
147
  def get_pipeline_job(id: str) -> PipelineJobRecord:
199
148
  """
200
149
  Get a pipeline job by ID.
201
150
  """
202
- req = pipelinespb.GetPipelineJobRequest(id=id)
203
- res: pipelinespb.GetPipelineJobResponse = get_default_client().GetPipelineJob(req)
204
- return PipelineJobRecord.from_proto(res.pipeline_job)
151
+ res = get_default_client().http.get(f"/rest/v0/pipeline_jobs/{id}")
152
+ return PipelineJobRecord.from_json(res)
205
153
 
206
154
 
207
155
  def list_pipeline_jobs() -> list[PipelineJobRecord]:
208
156
  """
209
157
  List all pipeline jobs.
210
158
  """
211
- req = pipelinespb.ListPipelineJobsRequest()
212
- res: pipelinespb.ListPipelineJobsResponse = get_default_client().ListPipelineJobs(req)
213
- return [PipelineJobRecord.from_proto(p) for p in res.pipeline_jobs]
159
+ res = get_default_client().http.get("/rest/v0/pipeline_jobs")
160
+ return [PipelineJobRecord.from_json(p) for p in res]
@@ -124,8 +124,8 @@ class Mesh(Operator[MeshOutputs]):
124
124
 
125
125
  Parameters
126
126
  ----------
127
- max_cv_count : int
128
- The maximum number of control volumes to generate.
127
+ target_cv_count : int | None
128
+ The target number of control volumes to generate. If None, a minimal mesh will be generated.
129
129
  geometry : PipelineOutputGeometry
130
130
  The Geometry to mesh.
131
131
 
@@ -141,12 +141,12 @@ class Mesh(Operator[MeshOutputs]):
141
141
  self,
142
142
  *,
143
143
  task_name: str | None = None,
144
- max_cv_count: int,
144
+ target_cv_count: int | None,
145
145
  geometry: PipelineOutputGeometry,
146
146
  ):
147
147
  super().__init__(
148
148
  task_name,
149
- {"max_cv_count": max_cv_count},
149
+ {"target_cv_count": target_cv_count},
150
150
  OperatorInputs(self, geometry=(PipelineOutputGeometry, geometry)),
151
151
  MeshOutputs._instantiate_for(self),
152
152
  )
luminarycloud/project.py CHANGED
@@ -10,8 +10,6 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union, Literal
10
10
 
11
11
  import concurrent
12
12
 
13
- import grpc
14
-
15
13
  import luminarycloud as lc
16
14
  from luminarycloud._helpers.named_variables import _named_variables_to_proto
17
15
  from luminarycloud.params.simulation.adjoint_ import Adjoint
@@ -258,19 +256,16 @@ class Project(ProtoWrapperBase):
258
256
  )
259
257
  return lc.Mesh(_mesh)
260
258
 
261
- @deprecated("Use create_or_get_mesh() instead", "0.10.0")
262
- def create_mesh(
259
+ def create_or_get_mesh(
263
260
  self,
264
261
  params: MeshAdaptationParams | MeshGenerationParams,
265
262
  *,
266
263
  name: str,
264
+ request_id: Optional[str] = None,
267
265
  ) -> "Mesh":
268
266
  """
269
- Create a new mesh in the project.
270
-
271
- .. deprecated:: 0.10.0
272
- `create_mesh()` will be removed in v0.11.0, it is replaced by
273
- `create_or_get_mesh()`.
267
+ Create a new mesh in the project, or return an existing mesh with the same request_id
268
+ if it already exists.
274
269
 
275
270
  Parameters
276
271
  ----------
@@ -279,14 +274,24 @@ class Project(ProtoWrapperBase):
279
274
  existing geometry, use MeshGenerationParams. If adapting a mesh from an existing,
280
275
  solution use MeshAdaptationParams.
281
276
  name : str
282
- (Optional) Mesh name. Max 256 characters.
277
+ Mesh name. Max 256 characters.
278
+ request_id : str, optional
279
+ Can be useful as an idempotency key. If there's an existing Mesh with the given
280
+ request_id, that Mesh will be returned. If there's no existing Mesh with the given
281
+ request_id, then a Mesh will be created and associated with that request_id. If not
282
+ provided, a random request_id will be generated for the Mesh, effectively preventing it
283
+ from being retrieved by a future `create_or_get_mesh` request. Max 256 characters.
283
284
  """
284
285
 
286
+ if request_id is None:
287
+ request_id = str(uuid.uuid4())
288
+
285
289
  client = get_default_client()
286
290
 
287
291
  req = meshpb.CreateMeshRequest(
288
292
  project_id=self.id,
289
293
  name=name,
294
+ request_id=request_id,
290
295
  )
291
296
 
292
297
  if isinstance(params, meshpb.MeshGenerationParams):
@@ -314,39 +319,6 @@ class Project(ProtoWrapperBase):
314
319
  res: meshpb.CreateMeshResponse = client.CreateMesh(req)
315
320
  return lc.Mesh(res.mesh)
316
321
 
317
- def create_or_get_mesh(
318
- self,
319
- params: MeshAdaptationParams | MeshGenerationParams,
320
- *,
321
- name: str,
322
- ) -> "Mesh":
323
- """
324
- Create a new mesh in the project, or return an existing mesh with the same parameters
325
- if it already exists.
326
-
327
- Parameters
328
- ----------
329
- params : MeshGenerationParams | MeshAdaptationParams
330
- The parameters to use to create the mesh. If generating a new mesh from an
331
- existing geometry, use MeshGenerationParams. If adapting a mesh from an existing,
332
- solution use MeshAdaptationParams.
333
- name : str
334
- Mesh name. Max 256 characters.
335
- """
336
-
337
- try:
338
- return self.create_mesh(params, name=name)
339
- except grpc.RpcError as e:
340
- if e.code() == grpc.StatusCode.ALREADY_EXISTS:
341
- message = e.details()
342
- match = re.search(r"mesh-[a-f0-9-]+$", message)
343
- if match:
344
- existing_mesh_id = match.group(0)
345
- req = meshpb.GetMeshRequest(id=existing_mesh_id)
346
- res = get_default_client().GetMesh(req)
347
- return lc.Mesh(res.mesh)
348
- raise
349
-
350
322
  def _create_hex_mesh(
351
323
  self,
352
324
  names_to_file_paths: Dict[str, Union[PathLike[Any], str]],
@@ -898,6 +870,6 @@ def iterate_projects(page_size: int = 50) -> ProjectIterator:
898
870
  Project(...)
899
871
  >>> next(my_projects) # second page of projects is fetched, third project is returned
900
872
  Project(...)
901
- >>> next(my_projects) # if there areno more projects, this call raises StopIteration
873
+ >>> next(my_projects) # if there are no more projects, this call raises StopIteration
902
874
  """
903
875
  return ProjectIterator(page_size)
@@ -18,6 +18,7 @@ from .enum import (
18
18
  CalculationType,
19
19
  QuantityType,
20
20
  ResidualNormalization,
21
+ MomentConventionType,
21
22
  SimulationStatus,
22
23
  Vector3Component,
23
24
  )
@@ -188,6 +189,7 @@ class Simulation(ProtoWrapperBase):
188
189
  moment_center: Optional[Vector3Like] = None,
189
190
  averaging_type: AveragingType = AveragingType.UNSPECIFIED,
190
191
  vector_component: Vector3Component = Vector3Component.UNSPECIFIED,
192
+ moment_convention_type: MomentConventionType = MomentConventionType.BODY_FRAME,
191
193
  ) -> _DownloadedTextFile:
192
194
  """
193
195
  Downloads surface outputs (e.g. lift, drag, ...) in csv format.
@@ -224,6 +226,9 @@ class Simulation(ProtoWrapperBase):
224
226
  vector_component : Vector3Component, optional
225
227
  For 3-vector quantity types (e.g. `QuantityType.VELOCITY`), the component of the vector to extract.
226
228
  Ignored for scalar quantity types.
229
+ moment_convention_type : MomentConventionType, optional
230
+ The frame type to use for "aerodynamic moment" quantity types.
231
+ Ignored for non-moment quantity types.
227
232
 
228
233
  Returns
229
234
  -------
@@ -261,6 +266,7 @@ class Simulation(ProtoWrapperBase):
261
266
  moment_center=_to_vector3_proto(moment_center) if moment_center else None,
262
267
  averaging_type=averaging_type.value,
263
268
  vector_component=vector_component.value,
269
+ moment_convention_type=moment_convention_type.value,
264
270
  )
265
271
  res = get_default_client().GetSimulationSurfaceQuantityOutput(req)
266
272
  return _DownloadedTextFile(res.csv_file)
@@ -14,7 +14,7 @@ from luminarycloud._proto.client import simulation_pb2 as clientpb
14
14
  from luminarycloud._proto.client.entity_pb2 import EntityIdentifier
15
15
  from luminarycloud._proto.output import output_pb2 as outputpb
16
16
  from luminarycloud._proto.quantity import quantity_options_pb2 as quantityoptspb
17
- from luminarycloud.enum import AveragingType, QuantityType, SpaceAveragingType
17
+ from luminarycloud.enum import AveragingType, MomentConventionType, QuantityType, SpaceAveragingType
18
18
  from luminarycloud.params.geometry import Volume
19
19
  from luminarycloud.params.simulation import (
20
20
  EntityRelationships,
@@ -265,6 +265,7 @@ class SimulationParam(_SimulationParam):
265
265
  frame_id: str = "",
266
266
  force_direction: Optional[Vector3Like] = None,
267
267
  moment_center: Optional[Vector3Like] = None,
268
+ moment_convention_type: MomentConventionType = MomentConventionType.BODY_FRAME,
268
269
  averaging_type: AveragingType = AveragingType.UNSPECIFIED,
269
270
  ) -> None:
270
271
  """
@@ -303,6 +304,7 @@ class SimulationParam(_SimulationParam):
303
304
  _to_vector3_ad_proto(force_direction) if force_direction else None
304
305
  ),
305
306
  moment_center=_to_vector3_ad_proto(moment_center) if moment_center else None,
307
+ moment_convention_type=moment_convention_type.value,
306
308
  )
307
309
  )
308
310
  else:
@@ -329,7 +331,7 @@ class SimulationParam(_SimulationParam):
329
331
  ## code that creates (from scratch) an identical object. As such, some parts do
330
332
  ## not match 1 to 1 with hand-written examples.
331
333
  import luminarycloud
332
- from luminarycloud.types import Vector3
334
+ from luminarycloud.types import Vector3, Expression
333
335
  from luminarycloud.tables import RectilinearTable
334
336
  from luminarycloud.enum import *
335
337
  from luminarycloud.params.enum import *
@@ -0,0 +1,130 @@
1
+ # Copyright 2025 Luminary Cloud, Inc. All Rights Reserved.
2
+
3
+ """Simulation queue management functionality."""
4
+
5
+ from datetime import datetime
6
+ from typing import Optional
7
+
8
+ from ._client import get_default_client
9
+ from ._helpers._timestamp_to_datetime import timestamp_to_datetime
10
+ from ._proto.api.v0.luminarycloud.simulation import simulation_pb2 as simulationpb
11
+ from ._wrapper import ProtoWrapper, ProtoWrapperBase
12
+ from .types import ProjectID, SimulationID
13
+
14
+
15
+ @ProtoWrapper(simulationpb.SimulationQueueStatus)
16
+ class SimulationQueueStatus(ProtoWrapperBase):
17
+ """Represents the status of a queued simulation."""
18
+
19
+ project_id: ProjectID
20
+ """The ID of the project to which the simulation belongs."""
21
+ simulation_id: SimulationID
22
+ """The ID of the simulation."""
23
+ name: str
24
+ """The name of the simulation."""
25
+ is_lma: bool
26
+ """Whether this is an LMA simulation."""
27
+ priority: bool
28
+ """Whether this is a priority simulation."""
29
+
30
+ _proto: simulationpb.SimulationQueueStatus
31
+
32
+ @property
33
+ def creation_time(self) -> datetime:
34
+ """The time when the simulation was created."""
35
+ return timestamp_to_datetime(self._proto.creation_time)
36
+
37
+ @property
38
+ def started_time(self) -> Optional[datetime]:
39
+ """The time when the simulation started running, if it has started."""
40
+ if self._proto.HasField("started_time"):
41
+ return timestamp_to_datetime(self._proto.started_time)
42
+ return None
43
+
44
+
45
+ class SimulationStatusQueueIterator:
46
+ """Iterator class for simulation status queue that provides length hint."""
47
+
48
+ def __init__(self, page_size: int):
49
+ self._page_size: int = page_size
50
+ self._page_token: str = ""
51
+ self._total_count: Optional[int] = None
52
+ self._current_page: Optional[list[simulationpb.SimulationQueueStatus]] = None
53
+ self._client = get_default_client()
54
+ self._iterated_count: int = 0
55
+
56
+ def __iter__(self) -> "SimulationStatusQueueIterator":
57
+ return self
58
+
59
+ def __next__(self) -> SimulationQueueStatus:
60
+ if self._current_page is None:
61
+ self._fetch_next_page()
62
+
63
+ # _current_page really can't be None here, but this assertion is needed to appease mypy
64
+ assert self._current_page is not None
65
+
66
+ if len(self._current_page) == 0:
67
+ if not self._page_token:
68
+ raise StopIteration
69
+ self._fetch_next_page()
70
+
71
+ self._iterated_count += 1
72
+
73
+ return SimulationQueueStatus(self._current_page.pop(0))
74
+
75
+ def _fetch_next_page(self) -> None:
76
+ req = simulationpb.ListQueuedSimulationsRequest(
77
+ page_size=self._page_size, page_token=self._page_token
78
+ )
79
+ res = self._client.ListQueuedSimulations(req)
80
+
81
+ self._current_page = list(res.simulations)
82
+ self._page_token = res.next_page_token
83
+ if self._total_count is None:
84
+ self._total_count = res.total_count or 0
85
+
86
+ def __length_hint__(self) -> int:
87
+ if self._total_count is None:
88
+ # Fetch first page to get total size if not already fetched
89
+ if self._current_page is None:
90
+ self._fetch_next_page()
91
+ return max(0, (self._total_count or 0) - self._iterated_count)
92
+
93
+
94
+ def iterate_simulation_status_queue(page_size: int = 50) -> SimulationStatusQueueIterator:
95
+ """
96
+ Iterate over all simulations in the scheduling queue for the current account.
97
+
98
+ This function is only available for accounts with a Subscription Plan.
99
+
100
+ Parameters
101
+ ----------
102
+ page_size : int, optional
103
+ Number of simulations to fetch per page. Defaults to 50, max is 100.
104
+
105
+ Returns
106
+ -------
107
+ SimulationStatusQueueIterator
108
+ An iterator that yields SimulationQueueStatus objects one at a time.
109
+
110
+ Examples
111
+ --------
112
+ Fetch all queued simulations and filter them for LMA simulations.
113
+
114
+ >>> lma_sims = [sim for sim in iterate_simulation_status_queue() if sim.is_lma]
115
+ [SimulationQueueStatus(...), SimulationQueueStatus(...)]
116
+
117
+ Lazily fetch simulations.
118
+ (A batch size of 2 is a bad idea in real-world usage, but it helps demonstrate the lazy
119
+ fetching.)
120
+
121
+ >>> my_sims = iterate_simulation_status_queue(page_size=2)
122
+ >>> next(my_sims) # first page of simulations is fetched, first simulation is returned.
123
+ SimulationQueueStatus(...)
124
+ >>> next(my_sims) # second simulation is returned from memory.
125
+ SimulationQueueStatus(...)
126
+ >>> next(my_sims) # second page of simulations is fetched, third simulation is returned.
127
+ SimulationQueueStatus(...)
128
+ >>> next(my_sims) # if there are no more simulations, this call raises StopIteration.
129
+ """
130
+ return SimulationStatusQueueIterator(page_size)
@@ -518,6 +518,11 @@ class SimulationTemplate(ProtoWrapperBase):
518
518
  for i, definition in enumerate(output_definitions):
519
519
  if i == 0:
520
520
  code += "output_list = []\n"
521
+ if isinstance(definition, DerivedOutputDefinition):
522
+ code += "# WARNING: Output {i} - Custom outputs are not yet supported in the SDK.\n"
523
+ # This is to make the stopping condition ID logic work.
524
+ code += "output_list.append(None)\n\n"
525
+ continue
521
526
  output_code = definition._to_code_helper("new_output", hide_defaults)
522
527
  for line in output_code.split("\n"):
523
528
  # Omit ID because we are generating for create_output_definition.
@@ -535,7 +540,7 @@ class SimulationTemplate(ProtoWrapperBase):
535
540
  code += "\n# Output-based conditions require the ID of the associated output.\n"
536
541
  # Find the old output to use the new ID created by create_output_definition.
537
542
  for j, od in enumerate(output_definitions):
538
- if sc.output_definition_id == od.id:
543
+ if sc.output_definition_id == od.id and not isinstance(od, DerivedOutputDefinition):
539
544
  code += f"template.create_or_update_stopping_condition(output_list[{j}].id, "
540
545
  code += f"{sc.threshold}, {sc.start_at_iteration}, {sc.averaging_iterations}, "
541
546
  code += f"{sc.iterations_to_consider})\n"
luminarycloud/tables.py CHANGED
@@ -10,7 +10,6 @@ from typing import Union
10
10
  from .enum import TableType, QuantityType
11
11
  from ._helpers import CodeRepr
12
12
  from ._proto.table import table_pb2 as tablepb
13
- from ._proto.quantity import quantity_pb2 as quantitypb
14
13
 
15
14
 
16
15
  def create_rectilinear_table(
@@ -37,30 +36,30 @@ def create_rectilinear_table(
37
36
  def lc_defined_header(table_type: TableType) -> list[Union[int, str]]:
38
37
  """Returns the required header (if any) for a type of table."""
39
38
  if table_type == TableType.MONITOR_POINTS:
40
- return [quantitypb.LENGTH, quantitypb.LENGTH, quantitypb.LENGTH, "name", "id"]
39
+ return [QuantityType.LENGTH, QuantityType.LENGTH, QuantityType.LENGTH, "name", "id"]
41
40
  elif table_type == TableType.RADIAL_DISTRIBUTION:
42
41
  return [
43
- quantitypb.RELATIVE_RADIUS,
44
- quantitypb.THRUST_PROFILE,
45
- quantitypb.TORQUE_PROFILE,
46
- quantitypb.RADIAL_FORCE_PROFILE,
42
+ QuantityType.RELATIVE_RADIUS,
43
+ QuantityType.THRUST_PROFILE,
44
+ QuantityType.TORQUE_PROFILE,
45
+ QuantityType.RADIAL_FORCE_PROFILE,
47
46
  ]
48
47
  elif table_type == TableType.BLADE_GEOMETRY:
49
48
  return [
50
- quantitypb.RELATIVE_RADIUS,
51
- quantitypb.TWIST_ANGLE,
52
- quantitypb.SWEEP_ANGLE,
53
- quantitypb.ANHEDRAL_ANGLE,
54
- quantitypb.RELATIVE_CHORD,
49
+ QuantityType.RELATIVE_RADIUS,
50
+ QuantityType.TWIST_ANGLE,
51
+ QuantityType.SWEEP_ANGLE,
52
+ QuantityType.ANHEDRAL_ANGLE,
53
+ QuantityType.RELATIVE_CHORD,
55
54
  ]
56
55
  elif table_type == TableType.PROFILE_BC:
57
56
  return []
58
57
  elif table_type == TableType.FAN_CURVE:
59
- return [quantitypb.VOLUME_FLOW_RATE, quantitypb.PRESSURE_RISE]
58
+ return [QuantityType.VOLUME_FLOW_RATE, QuantityType.PRESSURE_RISE]
60
59
  elif table_type == TableType.CUSTOM_SAMPLE_DOE:
61
60
  return []
62
61
  elif table_type == TableType.TEMP_VARYING:
63
- return [quantitypb.TEMPERATURE, "quantity"]
62
+ return [QuantityType.TEMPERATURE, "quantity"]
64
63
  else:
65
64
  raise RuntimeError("Unknown type of table.")
66
65
 
@@ -113,7 +112,7 @@ def create_rectilinear_table(
113
112
  if isinstance(first_header, str):
114
113
  table.header.axis_label[-1].name = first_header
115
114
  else:
116
- table.header.axis_label[-1].quantity = QuantityType(first_header).value
115
+ table.header.axis_label[-1].quantity = first_header.value
117
116
  table.axis.append(tablepb.Axis())
118
117
 
119
118
  for label in header[has_axis(table_type) :]:
@@ -121,7 +120,7 @@ def create_rectilinear_table(
121
120
  if isinstance(label, str):
122
121
  table.header.record_label[-1].name = label
123
122
  else:
124
- table.header.record_label[-1].quantity = QuantityType(label).value
123
+ table.header.record_label[-1].quantity = label.value
125
124
 
126
125
  types = data_types(table_type, len(header))
127
126
 
@@ -145,6 +145,9 @@ class Expression:
145
145
  return False
146
146
  return self._value == other._value and self._expression == other._expression
147
147
 
148
+ def _to_code(self, *args) -> str:
149
+ return f"Expression({self._expression.__repr__()})"
150
+
148
151
 
149
152
  LcFloat = Union[float, FirstOrderAdFloat, SecondOrderAdFloat, Expression]
150
153