luminarycloud 0.22.0__py3-none-any.whl → 0.22.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. luminarycloud/_client/authentication_plugin.py +49 -0
  2. luminarycloud/_client/client.py +33 -8
  3. luminarycloud/_client/http_client.py +1 -1
  4. luminarycloud/_client/retry_interceptor.py +64 -2
  5. luminarycloud/_helpers/download.py +11 -0
  6. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2.py +132 -132
  7. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2.pyi +36 -8
  8. luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2.py +53 -23
  9. luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2.pyi +54 -1
  10. luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2.py +195 -0
  11. luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2.pyi +361 -0
  12. luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2_grpc.py +172 -0
  13. luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2_grpc.pyi +66 -0
  14. luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2.py +88 -65
  15. luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2.pyi +42 -0
  16. luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2_grpc.py +34 -0
  17. luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2_grpc.pyi +12 -0
  18. luminarycloud/_proto/base/base_pb2.py +7 -6
  19. luminarycloud/_proto/base/base_pb2.pyi +4 -0
  20. luminarycloud/_proto/client/simulation_pb2.py +3 -3
  21. luminarycloud/_proto/physicsaiinferenceservice/physicsaiinferenceservice_pb2.py +30 -0
  22. luminarycloud/_proto/physicsaiinferenceservice/physicsaiinferenceservice_pb2.pyi +7 -0
  23. luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2.py +2 -2
  24. luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2_grpc.py +34 -0
  25. luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2_grpc.pyi +12 -0
  26. luminarycloud/enum/vis_enums.py +6 -0
  27. luminarycloud/geometry.py +4 -0
  28. luminarycloud/geometry_version.py +4 -0
  29. luminarycloud/mesh.py +4 -0
  30. luminarycloud/meshing/mesh_generation_params.py +5 -6
  31. luminarycloud/meshing/sizing_strategy/sizing_strategies.py +1 -2
  32. luminarycloud/physics_ai/solution.py +4 -0
  33. luminarycloud/pipelines/api.py +99 -8
  34. luminarycloud/pipelines/core.py +1 -1
  35. luminarycloud/pipelines/stages.py +22 -9
  36. luminarycloud/project.py +5 -6
  37. luminarycloud/types/vector3.py +1 -2
  38. luminarycloud/vis/data_extraction.py +7 -7
  39. luminarycloud/vis/interactive_report.py +163 -7
  40. luminarycloud/vis/report.py +113 -1
  41. luminarycloud/volume_selection.py +10 -2
  42. {luminarycloud-0.22.0.dist-info → luminarycloud-0.22.1.dist-info}/METADATA +1 -1
  43. {luminarycloud-0.22.0.dist-info → luminarycloud-0.22.1.dist-info}/RECORD +44 -39
  44. {luminarycloud-0.22.0.dist-info → luminarycloud-0.22.1.dist-info}/WHEEL +1 -1
  45. luminarycloud/pipeline_util/dictable.py +0 -27
@@ -0,0 +1,30 @@
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # source: proto/physicsaiinferenceservice/physicsaiinferenceservice.proto
4
+ """Generated protocol buffer code."""
5
+ from google.protobuf import descriptor as _descriptor
6
+ from google.protobuf import descriptor_pool as _descriptor_pool
7
+ from google.protobuf import message as _message
8
+ from google.protobuf import reflection as _reflection
9
+ from google.protobuf import symbol_database as _symbol_database
10
+ # @@protoc_insertion_point(imports)
11
+
12
+ _sym_db = _symbol_database.Default()
13
+
14
+
15
+ from luminarycloud._proto.base import base_pb2 as proto_dot_base_dot_base__pb2
16
+ from luminarycloud._proto.api.v0.luminarycloud.physicsaiinference import physicsaiinference_pb2 as proto_dot_api_dot_v0_dot_luminarycloud_dot_physicsaiinference_dot_physicsaiinference__pb2
17
+
18
+
19
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n?proto/physicsaiinferenceservice/physicsaiinferenceservice.proto\x12(luminary.proto.physicsaiinferenceservice\x1a\x15proto/base/base.proto\x1a\x46proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference.proto2\xda\x06\n\x19PhysicsAiInferenceService\x12\xcd\x01\n\x19\x43reateInferenceServiceJob\x12X.luminary.proto.api.v0.luminarycloud.physicsaiinference.CreateInferenceServiceJobRequest\x1aV.luminary.proto.api.v0.luminarycloud.physicsaiinference.GetInferenceServiceJobResponse\x12\xd2\x01\n\x1e\x43reateInferenceServiceJobAsync\x12X.luminary.proto.api.v0.luminarycloud.physicsaiinference.CreateInferenceServiceJobRequest\x1aV.luminary.proto.api.v0.luminarycloud.physicsaiinference.GetInferenceServiceJobResponse\x12\xc7\x01\n\x16GetInferenceServiceJob\x12U.luminary.proto.api.v0.luminarycloud.physicsaiinference.GetInferenceServiceJobRequest\x1aV.luminary.proto.api.v0.luminarycloud.physicsaiinference.GetInferenceServiceJobResponse\x12\xcd\x01\n\x18ListInferenceServiceJobs\x12W.luminary.proto.api.v0.luminarycloud.physicsaiinference.ListInferenceServiceJobsRequest\x1aX.luminary.proto.api.v0.luminarycloud.physicsaiinference.ListInferenceServiceJobsResponseB8Z6luminarycloud.com/core/proto/physicsaiinferenceserviceb\x06proto3')
20
+
21
+
22
+
23
+ _PHYSICSAIINFERENCESERVICE = DESCRIPTOR.services_by_name['PhysicsAiInferenceService']
24
+ if _descriptor._USE_C_DESCRIPTORS == False:
25
+
26
+ DESCRIPTOR._options = None
27
+ DESCRIPTOR._serialized_options = b'Z6luminarycloud.com/core/proto/physicsaiinferenceservice'
28
+ _PHYSICSAIINFERENCESERVICE._serialized_start=205
29
+ _PHYSICSAIINFERENCESERVICE._serialized_end=1063
30
+ # @@protoc_insertion_point(module_scope)
@@ -0,0 +1,7 @@
1
+ """
2
+ @generated by mypy-protobuf. Do not edit manually!
3
+ isort:skip_file
4
+ """
5
+ import google.protobuf.descriptor
6
+
7
+ DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@@ -15,7 +15,7 @@ _sym_db = _symbol_database.Default()
15
15
  from luminarycloud._proto.api.v0.luminarycloud.physics_ai import physics_ai_pb2 as proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2
16
16
 
17
17
 
18
- DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n=proto/physicsaitrainingservice/physicsaitrainingservice.proto\x12\'luminary.proto.physicsaitrainingservice\x1a\x36proto/api/v0/luminarycloud/physics_ai/physics_ai.proto2\xc7\x01\n\x18PhysicsAiTrainingService\x12\xaa\x01\n\x11SubmitTrainingJob\x12H.luminary.proto.api.v0.luminarycloud.physics_ai.SubmitTrainingJobRequest\x1aI.luminary.proto.api.v0.luminarycloud.physics_ai.SubmitTrainingJobResponse\"\x00\x42\x37Z5luminarycloud.com/core/proto/physicsaitrainingserviceb\x06proto3')
18
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n=proto/physicsaitrainingservice/physicsaitrainingservice.proto\x12\'luminary.proto.physicsaitrainingservice\x1a\x36proto/api/v0/luminarycloud/physics_ai/physics_ai.proto2\xf4\x02\n\x18PhysicsAiTrainingService\x12\xaa\x01\n\x11SubmitTrainingJob\x12H.luminary.proto.api.v0.luminarycloud.physics_ai.SubmitTrainingJobRequest\x1aI.luminary.proto.api.v0.luminarycloud.physics_ai.SubmitTrainingJobResponse\"\x00\x12\xaa\x01\n\x11\x43\x61ncelTrainingJob\x12H.luminary.proto.api.v0.luminarycloud.physics_ai.CancelTrainingJobRequest\x1aI.luminary.proto.api.v0.luminarycloud.physics_ai.CancelTrainingJobResponse\"\x00\x42\x37Z5luminarycloud.com/core/proto/physicsaitrainingserviceb\x06proto3')
19
19
 
20
20
 
21
21
 
@@ -25,5 +25,5 @@ if _descriptor._USE_C_DESCRIPTORS == False:
25
25
  DESCRIPTOR._options = None
26
26
  DESCRIPTOR._serialized_options = b'Z5luminarycloud.com/core/proto/physicsaitrainingservice'
27
27
  _PHYSICSAITRAININGSERVICE._serialized_start=163
28
- _PHYSICSAITRAININGSERVICE._serialized_end=362
28
+ _PHYSICSAITRAININGSERVICE._serialized_end=535
29
29
  # @@protoc_insertion_point(module_scope)
@@ -20,6 +20,11 @@ class PhysicsAiTrainingServiceStub(object):
20
20
  request_serializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.SubmitTrainingJobRequest.SerializeToString,
21
21
  response_deserializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.SubmitTrainingJobResponse.FromString,
22
22
  )
23
+ self.CancelTrainingJob = channel.unary_unary(
24
+ '/luminary.proto.physicsaitrainingservice.PhysicsAiTrainingService/CancelTrainingJob',
25
+ request_serializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.CancelTrainingJobRequest.SerializeToString,
26
+ response_deserializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.CancelTrainingJobResponse.FromString,
27
+ )
23
28
 
24
29
 
25
30
  class PhysicsAiTrainingServiceServicer(object):
@@ -33,6 +38,13 @@ class PhysicsAiTrainingServiceServicer(object):
33
38
  context.set_details('Method not implemented!')
34
39
  raise NotImplementedError('Method not implemented!')
35
40
 
41
+ def CancelTrainingJob(self, request, context):
42
+ """Cancel a physics AI training job
43
+ """
44
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
45
+ context.set_details('Method not implemented!')
46
+ raise NotImplementedError('Method not implemented!')
47
+
36
48
 
37
49
  def add_PhysicsAiTrainingServiceServicer_to_server(servicer, server):
38
50
  rpc_method_handlers = {
@@ -41,6 +53,11 @@ def add_PhysicsAiTrainingServiceServicer_to_server(servicer, server):
41
53
  request_deserializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.SubmitTrainingJobRequest.FromString,
42
54
  response_serializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.SubmitTrainingJobResponse.SerializeToString,
43
55
  ),
56
+ 'CancelTrainingJob': grpc.unary_unary_rpc_method_handler(
57
+ servicer.CancelTrainingJob,
58
+ request_deserializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.CancelTrainingJobRequest.FromString,
59
+ response_serializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.CancelTrainingJobResponse.SerializeToString,
60
+ ),
44
61
  }
45
62
  generic_handler = grpc.method_handlers_generic_handler(
46
63
  'luminary.proto.physicsaitrainingservice.PhysicsAiTrainingService', rpc_method_handlers)
@@ -68,3 +85,20 @@ class PhysicsAiTrainingService(object):
68
85
  proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.SubmitTrainingJobResponse.FromString,
69
86
  options, channel_credentials,
70
87
  insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
88
+
89
+ @staticmethod
90
+ def CancelTrainingJob(request,
91
+ target,
92
+ options=(),
93
+ channel_credentials=None,
94
+ call_credentials=None,
95
+ insecure=False,
96
+ compression=None,
97
+ wait_for_ready=None,
98
+ timeout=None,
99
+ metadata=None):
100
+ return grpc.experimental.unary_unary(request, target, '/luminary.proto.physicsaitrainingservice.PhysicsAiTrainingService/CancelTrainingJob',
101
+ proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.CancelTrainingJobRequest.SerializeToString,
102
+ proto_dot_api_dot_v0_dot_luminarycloud_dot_physics__ai_dot_physics__ai__pb2.CancelTrainingJobResponse.FromString,
103
+ options, channel_credentials,
104
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@@ -15,6 +15,11 @@ class PhysicsAiTrainingServiceStub:
15
15
  luminarycloud._proto.api.v0.luminarycloud.physics_ai.physics_ai_pb2.SubmitTrainingJobResponse,
16
16
  ]
17
17
  """Submit a physics AI training job"""
18
+ CancelTrainingJob: grpc.UnaryUnaryMultiCallable[
19
+ luminarycloud._proto.api.v0.luminarycloud.physics_ai.physics_ai_pb2.CancelTrainingJobRequest,
20
+ luminarycloud._proto.api.v0.luminarycloud.physics_ai.physics_ai_pb2.CancelTrainingJobResponse,
21
+ ]
22
+ """Cancel a physics AI training job"""
18
23
 
19
24
  class PhysicsAiTrainingServiceServicer(metaclass=abc.ABCMeta):
20
25
  """PhysicsAiTrainingService provides training functionality for Physics AI"""
@@ -26,5 +31,12 @@ class PhysicsAiTrainingServiceServicer(metaclass=abc.ABCMeta):
26
31
  context: grpc.ServicerContext,
27
32
  ) -> luminarycloud._proto.api.v0.luminarycloud.physics_ai.physics_ai_pb2.SubmitTrainingJobResponse:
28
33
  """Submit a physics AI training job"""
34
+ @abc.abstractmethod
35
+ def CancelTrainingJob(
36
+ self,
37
+ request: luminarycloud._proto.api.v0.luminarycloud.physics_ai.physics_ai_pb2.CancelTrainingJobRequest,
38
+ context: grpc.ServicerContext,
39
+ ) -> luminarycloud._proto.api.v0.luminarycloud.physics_ai.physics_ai_pb2.CancelTrainingJobResponse:
40
+ """Cancel a physics AI training job"""
29
41
 
30
42
  def add_PhysicsAiTrainingServiceServicer_to_server(servicer: PhysicsAiTrainingServiceServicer, server: grpc.Server) -> None: ...
@@ -73,6 +73,12 @@ class VisQuantity(IntEnum):
73
73
  Q_CRITERION_TIME_AVERAGE = quantitypb.Q_CRITERION_TIME_AVERAGE
74
74
  HEAT_FLUX_TIME_AVERAGE = quantitypb.HEAT_FLUX_TIME_AVERAGE
75
75
  DEBUG_QUANTITY = quantitypb.DEBUG_QUANTITY
76
+ # Actuator disk quanties
77
+ THRUST_PER_UNIT_AREA = quantitypb.THRUST_PER_UNIT_AREA
78
+ TORQUE_PER_UNIT_AREA = quantitypb.TORQUE_PER_UNIT_AREA
79
+ BLADE_LOCAL_ANGLE_OF_ATTACK = quantitypb.BLADE_LOCAL_ANGLE_OF_ATTACK
80
+ BLADE_SECTIONAL_DRAG_COEFFICIENT = quantitypb.BLADE_SECTIONAL_DRAG_COEFFICIENT
81
+ BLADE_SECTIONAL_LIFT_COEFFICIENT = quantitypb.BLADE_SECTIONAL_LIFT_COEFFICIENT
76
82
 
77
83
 
78
84
  # Return the text name for the VisQuantity including the units, as it appears in the UI
luminarycloud/geometry.py CHANGED
@@ -61,6 +61,10 @@ class Geometry(ProtoWrapperBase):
61
61
  """
62
62
  return timestamp_to_datetime(self._proto.update_time)
63
63
 
64
+ @property
65
+ def url(self) -> str:
66
+ return f"{self.project().url}/geometry/{self.id}"
67
+
64
68
  def project(self) -> "Project":
65
69
  """
66
70
  Get the project this geometry belongs to.
@@ -31,6 +31,10 @@ class GeometryVersion(ProtoWrapperBase):
31
31
  """
32
32
  return timestamp_to_datetime(self._proto.create_time)
33
33
 
34
+ @property
35
+ def url(self) -> str:
36
+ return f"{self.geometry().url}/version/{self.id}"
37
+
34
38
  def geometry(self) -> Geometry:
35
39
  """
36
40
  Get the parent geometry.
luminarycloud/mesh.py CHANGED
@@ -38,6 +38,10 @@ class Mesh(ProtoWrapperBase):
38
38
  def create_time(self) -> datetime:
39
39
  return timestamp_to_datetime(self._proto.create_time)
40
40
 
41
+ @property
42
+ def url(self) -> str:
43
+ return f"{self.project().url}/mesh/{self.id}"
44
+
41
45
  def project(self) -> "Project":
42
46
  """
43
47
  Get the project this mesh belongs to.
@@ -16,11 +16,10 @@ from ..params.geometry import (
16
16
  )
17
17
  from ..types import Vector3
18
18
  from .sizing_strategy import MaxCount, Minimal, MinimalCount, SizingStrategy, TargetCount
19
- from ..pipeline_util.dictable import PipelineDictable
20
19
 
21
20
 
22
21
  @dataclass(kw_only=True)
23
- class VolumeMeshingParams(PipelineDictable):
22
+ class VolumeMeshingParams:
24
23
  """Volume meshing parameters."""
25
24
 
26
25
  volumes: list[Volume]
@@ -39,7 +38,7 @@ class VolumeMeshingParams(PipelineDictable):
39
38
 
40
39
 
41
40
  @dataclass(kw_only=True)
42
- class ModelMeshingParams(PipelineDictable):
41
+ class ModelMeshingParams:
43
42
  """Model meshing parameters."""
44
43
 
45
44
  surfaces: Sequence[Surface | str]
@@ -62,7 +61,7 @@ class ModelMeshingParams(PipelineDictable):
62
61
 
63
62
 
64
63
  @dataclass(kw_only=True)
65
- class BoundaryLayerParams(PipelineDictable):
64
+ class BoundaryLayerParams:
66
65
  """Boundary layer meshing parameters."""
67
66
 
68
67
  surfaces: Sequence[Surface | str]
@@ -88,7 +87,7 @@ class BoundaryLayerParams(PipelineDictable):
88
87
 
89
88
 
90
89
  @dataclass(kw_only=True)
91
- class RefinementRegion(PipelineDictable):
90
+ class RefinementRegion:
92
91
  """Refinement region parameters."""
93
92
 
94
93
  name: str
@@ -138,7 +137,7 @@ class RefinementRegion(PipelineDictable):
138
137
 
139
138
 
140
139
  @dataclass(kw_only=True)
141
- class MeshGenerationParams(PipelineDictable):
140
+ class MeshGenerationParams:
142
141
  """Mesh generation parameters."""
143
142
 
144
143
  geometry_id: str
@@ -1,11 +1,10 @@
1
1
  from dataclasses import dataclass
2
2
 
3
3
  from luminarycloud._helpers.warnings.deprecated import deprecated
4
- from ...pipeline_util.dictable import PipelineDictable
5
4
 
6
5
 
7
6
  @dataclass
8
- class SizingStrategy(PipelineDictable):
7
+ class SizingStrategy:
9
8
  """Sizing strategy parameters."""
10
9
 
11
10
  pass
@@ -17,6 +17,7 @@ def _download_processed_solution_physics_ai( # noqa: F841
17
17
  process_volume: bool = False,
18
18
  single_precision: bool = True,
19
19
  internal_options: Optional[Dict[str, str]] = None,
20
+ export_surface_groups: Optional[Dict[str, List[str]]] = None,
20
21
  ) -> tarfile.TarFile:
21
22
  """
22
23
  Download solution data with physics AI processing applied.
@@ -37,6 +38,8 @@ def _download_processed_solution_physics_ai( # noqa: F841
37
38
  If None, all available volume fields are included.
38
39
  process_volume: Whether to process volume data
39
40
  single_precision: Whether to use single precision for floating point fields
41
+ export_surface_groups: Dictionary mapping group names to lists of surface names.
42
+ Each group will be exported as an individual STL file.
40
43
 
41
44
  Raises:
42
45
  ValueError: If invalid field names are provided
@@ -46,6 +49,7 @@ def _download_processed_solution_physics_ai( # noqa: F841
46
49
  get_default_client(),
47
50
  solution_id,
48
51
  exclude_surfaces=exclude_surfaces,
52
+ export_surface_groups=export_surface_groups,
49
53
  fill_holes=fill_holes,
50
54
  surface_fields_to_keep=surface_fields_to_keep,
51
55
  volume_fields_to_keep=volume_fields_to_keep,
@@ -7,6 +7,7 @@ from time import time, sleep
7
7
  import logging
8
8
 
9
9
  from .arguments import PipelineArgValueType
10
+ from .core import Stage
10
11
  from ..pipelines import Pipeline, PipelineArgs
11
12
  from .._client import get_default_client
12
13
 
@@ -69,6 +70,25 @@ class PipelineRecord:
69
70
  res = get_default_client().http.get(f"/rest/v0/pipelines/{self.id}/pipeline_jobs")
70
71
  return [PipelineJobRecord.from_json(p) for p in res["data"]]
71
72
 
73
+ def delete(self) -> None:
74
+ """
75
+ Delete this pipeline.
76
+
77
+ This will permanently delete the pipeline and all associated pipeline jobs.
78
+ This operation cannot be undone.
79
+
80
+ Raises
81
+ ------
82
+ HTTPException
83
+ If the pipeline does not exist or if you do not have permission to delete it.
84
+
85
+ Examples
86
+ --------
87
+ >>> pipeline = pipelines.get_pipeline("pipeline-123")
88
+ >>> pipeline.delete()
89
+ """
90
+ get_default_client().http.delete(f"/rest/v0/pipelines/{self.id}")
91
+
72
92
 
73
93
  @dataclass
74
94
  class PipelineJobRecord:
@@ -80,7 +100,7 @@ class PipelineJobRecord:
80
100
  pipeline_id: str
81
101
  name: str
82
102
  description: str | None
83
- status: Literal["pending", "running", "completed", "failed"]
103
+ status: Literal["pending", "running", "completed", "failed", "cancelled"]
84
104
  created_at: datetime
85
105
  updated_at: datetime
86
106
  started_at: datetime | None
@@ -156,18 +176,37 @@ class PipelineJobRecord:
156
176
  res = get_default_client().http.get(f"/rest/v0/pipeline_jobs/{self.id}/artifacts")
157
177
  return res["data"]
158
178
 
179
+ def delete(self) -> None:
180
+ """
181
+ Delete this pipeline job.
182
+
183
+ This will permanently delete the pipeline job and all associated runs and tasks.
184
+ This operation cannot be undone.
185
+
186
+ Raises
187
+ ------
188
+ HTTPException
189
+ If the pipeline job does not exist or if you do not have permission to delete it.
190
+
191
+ Examples
192
+ --------
193
+ >>> pipeline_job = pipelines.get_pipeline_job("pipelinejob-123")
194
+ >>> pipeline_job.delete()
195
+ """
196
+ get_default_client().http.delete(f"/rest/v0/pipeline_jobs/{self.id}")
197
+
159
198
  def wait(
160
199
  self,
161
200
  *,
162
201
  interval_seconds: float = 5,
163
202
  timeout_seconds: float = float("inf"),
164
203
  print_logs: bool = False,
165
- ) -> Literal["completed", "failed"]:
204
+ ) -> Literal["completed", "failed", "cancelled"]:
166
205
  """
167
- Wait for the pipeline job to complete or fail.
206
+ Wait for the pipeline job to complete, fail, or be cancelled.
168
207
 
169
208
  This method polls the pipeline job status at regular intervals until it reaches
170
- a terminal state (completed or failed).
209
+ a terminal state (completed, failed, or cancelled).
171
210
 
172
211
  Parameters
173
212
  ----------
@@ -180,7 +219,7 @@ class PipelineJobRecord:
180
219
 
181
220
  Returns
182
221
  -------
183
- Literal["completed", "failed"]
222
+ Literal["completed", "failed", "cancelled"]
184
223
  The final status of the pipeline job.
185
224
 
186
225
  Raises
@@ -216,6 +255,9 @@ class PipelineJobRecord:
216
255
  elif updated_job.status == "failed":
217
256
  logger.warning(f"Pipeline job {self.id} failed")
218
257
  return "failed"
258
+ elif updated_job.status == "cancelled":
259
+ logger.info(f"Pipeline job {self.id} was cancelled")
260
+ return "cancelled"
219
261
 
220
262
  # Check timeout
221
263
  if time() >= deadline:
@@ -233,13 +275,53 @@ class PipelineJobRecord:
233
275
  self.started_at = updated_job.started_at
234
276
  self.completed_at = updated_job.completed_at
235
277
 
278
+ def get_concurrency_limits(self) -> dict[str, int]:
279
+ """
280
+ Returns the concurrency limits for this pipeline job.
281
+
282
+ Returns
283
+ -------
284
+ dict[str, int]
285
+ A dictionary mapping stage IDs to their concurrency limits.
286
+ """
287
+ res = get_default_client().http.get(f"/rest/v0/pipeline_jobs/{self.id}/concurrency_limits")
288
+ return {k: v["limit"] for k, v in res["data"].items()}
289
+
290
+ def set_concurrency_limits(self, limits: dict[str, int]) -> None:
291
+ """
292
+ Sets the concurrency limits for this pipeline job.
293
+
294
+ Parameters
295
+ ----------
296
+ limits : dict[str, int]
297
+ A dictionary mapping stage IDs to their concurrency limits.
298
+ """
299
+ body = {k: {"limit": v} for k, v in limits.items()}
300
+ get_default_client().http.put(f"/rest/v0/pipeline_jobs/{self.id}/concurrency_limits", body)
301
+
302
+ def cancel(self) -> None:
303
+ """Cancel this running pipeline job.
304
+
305
+ This will request cancellation of the underlying Prefect flow run. The
306
+ job should eventually transition to a cancelled terminal state once
307
+ the backend processes the cancellation.
308
+
309
+ Raises
310
+ ------
311
+ HTTPError
312
+ If the pipeline job cannot be cancelled (e.g., not found, not
313
+ running, or lacks the necessary Prefect flow run ID).
314
+ """
315
+ get_default_client().http.post(f"/rest/v0/pipeline_jobs/{self.id}/cancel", {})
316
+ logger.info(f"Cancelled pipeline job {self.id}")
317
+
236
318
 
237
319
  @dataclass
238
320
  class PipelineJobRunRecord:
239
321
  pipeline_job_id: str
240
322
  idx: int
241
323
  arguments: list[PipelineArgValueType]
242
- status: Literal["pending", "running", "completed", "failed"]
324
+ status: Literal["pending", "running", "completed", "failed", "cancelled"]
243
325
 
244
326
  @classmethod
245
327
  def from_json(cls, json: dict) -> "PipelineJobRunRecord":
@@ -347,7 +429,11 @@ def get_pipeline(id: str) -> PipelineRecord:
347
429
 
348
430
 
349
431
  def create_pipeline_job(
350
- pipeline_id: str, args: PipelineArgs, name: str, description: str | None = None
432
+ pipeline_id: str,
433
+ args: PipelineArgs,
434
+ name: str,
435
+ description: str | None = None,
436
+ concurrency_limits: dict[str, int] | None = None,
351
437
  ) -> PipelineJobRecord:
352
438
  """
353
439
  Create a new pipeline job.
@@ -362,6 +448,8 @@ def create_pipeline_job(
362
448
  Name of the pipeline job.
363
449
  description : str, optional
364
450
  Description of the pipeline job.
451
+ concurrency_limits : dict[str, int], optional
452
+ A dictionary mapping stage IDs to their concurrency limits.
365
453
  """
366
454
 
367
455
  arg_rows = [row.row_values for row in args.rows]
@@ -373,7 +461,10 @@ def create_pipeline_job(
373
461
  }
374
462
 
375
463
  res = get_default_client().http.post(f"/rest/v0/pipelines/{pipeline_id}/pipeline_jobs", body)
376
- return PipelineJobRecord.from_json(res["data"])
464
+ pjr = PipelineJobRecord.from_json(res["data"])
465
+ if concurrency_limits is not None:
466
+ pjr.set_concurrency_limits(concurrency_limits)
467
+ return pjr
377
468
 
378
469
 
379
470
  def get_pipeline_job(id: str) -> PipelineJobRecord:
@@ -292,7 +292,7 @@ class Pipeline:
292
292
  def pipeline_params(self) -> set[PipelineParameter]:
293
293
  return self._stages_dict_and_params()[1]
294
294
 
295
- def _get_stage_id(self, stage: Stage) -> str:
295
+ def get_stage_id(self, stage: Stage) -> str:
296
296
  return self._stage_ids[stage]
297
297
 
298
298
  def _stages_dict_and_params(self) -> tuple[dict, set[PipelineParameter]]:
@@ -2,7 +2,7 @@
2
2
  from dataclasses import dataclass
3
3
 
4
4
  from .core import Stage, StageInputs, StageOutputs, PipelineOutput
5
- from .parameters import StringPipelineParameter, IntPipelineParameter
5
+ from .parameters import BoolPipelineParameter, StringPipelineParameter, IntPipelineParameter
6
6
  from ..meshing import MeshGenerationParams
7
7
 
8
8
 
@@ -49,6 +49,20 @@ class ReadGeometry(Stage[ReadGeometryOutputs]):
49
49
  ----------
50
50
  geometry_id : str | StringPipelineParameter
51
51
  The ID of the Geometry to retrieve.
52
+ use_geo_without_copying : bool | BoolPipelineParameter
53
+ By default, this is False, meaning that each Geometry this stage references will be copied
54
+ and the PipelineJob will actually operate on the copied Geometry. This is done so that a
55
+ PipelineJob can be based on a single parametric Geometry which each PipelineJobRun modifies
56
+ by applying a NamedVariableSet. That modification mutates the Geometry, so those runs can
57
+ only happen in parallel without intefrering with each other if they each operate on a
58
+ different copy of the Geometry.
59
+
60
+ However, if you've already prepared your Geometry in advance and you don't want the
61
+ PipelineJob to create copies, you can set this to True. In that case, the referenced
62
+ Geometry will be used directly without being copied.
63
+
64
+ IMPORTANT: If you set this to True, you must ensure no two PipelineJobRuns operate on the
65
+ same Geometry, i.e. no two PipelineArgs rows contain the same Geometry ID.
52
66
 
53
67
  Outputs
54
68
  -------
@@ -61,10 +75,11 @@ class ReadGeometry(Stage[ReadGeometryOutputs]):
61
75
  *,
62
76
  stage_name: str | None = None,
63
77
  geometry_id: str | StringPipelineParameter,
78
+ use_geo_without_copying: bool | BoolPipelineParameter = False,
64
79
  ):
65
80
  super().__init__(
66
81
  stage_name,
67
- {"geometry_id": geometry_id},
82
+ {"geometry_id": geometry_id, "use_geo_without_copying": use_geo_without_copying},
68
83
  StageInputs(self),
69
84
  ReadGeometryOutputs._instantiate_for(self),
70
85
  )
@@ -202,13 +217,6 @@ class Mesh(Stage[MeshOutputs]):
202
217
  MeshOutputs._instantiate_for(self),
203
218
  )
204
219
 
205
- # TODO: bring back the full MeshGenerationParams, but we need to be able to hydrate it from the
206
- # pipeline YAML. I can probably bake that logic into PipelineDictable, `from_pipeline_dict` or
207
- # something.
208
- # @classmethod
209
- # def _parse_params(cls, params: dict) -> dict:
210
- # return {"mesh_gen_params": MeshGenerationParams.from_pipeline_dict(**params["mesh_gen_params"])}
211
-
212
220
 
213
221
  @dataclass
214
222
  class SimulateOutputs(StageOutputs):
@@ -230,6 +238,9 @@ class Simulate(Stage[SimulateOutputs]):
230
238
  The name to assign to the Simulation. If None, a default name will be used.
231
239
  sim_template_id : str | StringPipelineParameter
232
240
  The ID of the SimulationTemplate to use for the Simulation.
241
+ batch_processing : bool | BoolPipelineParameter, default True
242
+ If True, the Simulation will run as a standard job. If False, the Simulation will run as a
243
+ priority job.
233
244
 
234
245
  Outputs
235
246
  -------
@@ -244,10 +255,12 @@ class Simulate(Stage[SimulateOutputs]):
244
255
  mesh: PipelineOutputMesh,
245
256
  sim_name: str | StringPipelineParameter | None = None,
246
257
  sim_template_id: str | StringPipelineParameter,
258
+ batch_processing: bool | BoolPipelineParameter = True,
247
259
  ):
248
260
  super().__init__(
249
261
  stage_name,
250
262
  {
263
+ "batch_processing": batch_processing,
251
264
  "sim_name": sim_name,
252
265
  "sim_template_id": sim_template_id,
253
266
  },
luminarycloud/project.py CHANGED
@@ -485,15 +485,14 @@ class Project(ProtoWrapperBase):
485
485
  description : str, optional
486
486
  Simulation description.
487
487
  batch_processing : bool, default True
488
- If True, batch processing will be used for this
489
- simulation.
490
- Use Batch Processing on simulations that are not time-sensitive to
491
- save up to 65% in credits.
488
+ If True, this simulation will run as a standard job. If False, this simulation will run
489
+ as a priority job.
492
490
  gpu_type : GPUType, optional
493
491
  GPU type to use for the simulation.
494
492
  gpu_count : int, optional
495
- Number of GPUs to use for the simulation. Must be specified to a
496
- positive value if `gpu_type` is specified.
493
+ Number of GPUs to use for the simulation. Only relevant if `gpu_type` is
494
+ specified. If this is set to 0 or omitted and `gpu_type` is specified, the number
495
+ of gpus will be automatically determined.
497
496
  """
498
497
 
499
498
  named_variable_set_version_id: Optional[str] = None
@@ -8,11 +8,10 @@ from .adfloat import (
8
8
  _to_ad_proto as _float_to_ad_proto,
9
9
  _from_ad_proto as _float_from_ad_proto,
10
10
  )
11
- from ..pipeline_util.dictable import PipelineDictable
12
11
 
13
12
 
14
13
  @dataclass
15
- class Vector3(PipelineDictable):
14
+ class Vector3:
16
15
  """Represents a 3-dimensional vector.
17
16
 
18
17
  Supports direct component access, indexing, iteration, and conversion to numpy arrays.
@@ -602,7 +602,7 @@ class DataExtractor:
602
602
  code += f" if sol.id == '{self._solution.id}':\n"
603
603
  code += f" solution = sol\n"
604
604
  code += f" break\n"
605
- code += "data_extractor = vis.DataExtractor(solution)\n"
605
+ code += f"{obj_name} = vis.DataExtractor(solution)\n"
606
606
  code += "\n"
607
607
 
608
608
  code += "\n"
@@ -615,11 +615,11 @@ class DataExtractor:
615
615
  for extract in self._extracts:
616
616
  # Name objects numerically: slice0, slice1, etc.
617
617
  name = _data_extract_to_obj_name(extract)
618
- obj_name = f"{name}{name_map[obj_name]}"
619
- name_map[obj_name] += 1
620
- ids_to_obj_name[extract.id] = obj_name
621
- code += extract._to_code_helper(obj_name, hide_defaults=hide_defaults)
622
- code += f"data_extractor.add_data_extract({obj_name})\n"
618
+ extract_obj_name = f"{name}{name_map[name]}"
619
+ name_map[name] += 1
620
+ ids_to_obj_name[extract.id] = extract_obj_name
621
+ code += extract._to_code_helper(extract_obj_name, hide_defaults=hide_defaults)
622
+ code += f"{obj_name}.add_data_extract({extract_obj_name})\n"
623
623
  code += "\n"
624
624
 
625
625
  if include_imports:
@@ -649,7 +649,7 @@ class DataExtractor:
649
649
  code = "\n".join(filtered_lines)
650
650
 
651
651
  code += "\n"
652
- code += "extract_output = extractor.create_extracts(name='extract data', description='lonerg description')\n"
652
+ code += f"extract_output = {obj_name}.create_extracts(name='extract data', description='longer description')\n"
653
653
  code += "status = extract_output.wait()\n"
654
654
  code += "if status == ExtractStatusType.COMPLETED:\n"
655
655
  code += " extract_output.save_files('data_extracts_prefix', True)\n"