luminarycloud 0.22.1__py3-none-any.whl → 0.22.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. luminarycloud/_client/client.py +5 -3
  2. luminarycloud/_helpers/__init__.py +9 -0
  3. luminarycloud/_helpers/_inference_jobs.py +227 -0
  4. luminarycloud/_helpers/_parse_iso_datetime.py +54 -0
  5. luminarycloud/_helpers/proto_decorator.py +38 -7
  6. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2.py +45 -25
  7. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2.pyi +30 -0
  8. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2_grpc.py +34 -0
  9. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2_grpc.pyi +12 -0
  10. luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2.py +118 -45
  11. luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2.pyi +246 -2
  12. luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2_grpc.py +34 -0
  13. luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2_grpc.pyi +12 -0
  14. luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2.py +93 -33
  15. luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2.pyi +105 -0
  16. luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2_grpc.py +70 -0
  17. luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2_grpc.pyi +29 -0
  18. luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2.py +29 -7
  19. luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2.pyi +39 -0
  20. luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2_grpc.py +36 -0
  21. luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2_grpc.pyi +18 -0
  22. luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2.py +70 -70
  23. luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2.pyi +5 -5
  24. luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2.py +163 -153
  25. luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2.pyi +37 -3
  26. luminarycloud/_proto/client/simulation_pb2.py +356 -337
  27. luminarycloud/_proto/client/simulation_pb2.pyi +89 -3
  28. luminarycloud/_proto/physicsaiinferenceservice/physicsaiinferenceservice_pb2.py +9 -4
  29. luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2.py +6 -3
  30. luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2_grpc.py +34 -0
  31. luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2_grpc.pyi +12 -0
  32. luminarycloud/_wrapper.py +53 -7
  33. luminarycloud/feature_modification.py +25 -32
  34. luminarycloud/geometry.py +6 -6
  35. luminarycloud/outputs/__init__.py +2 -0
  36. luminarycloud/outputs/output_definitions.py +3 -3
  37. luminarycloud/outputs/stopping_conditions.py +94 -0
  38. luminarycloud/params/enum/_enum_wrappers.py +16 -0
  39. luminarycloud/params/geometry/shapes.py +33 -33
  40. luminarycloud/params/simulation/adaptive_mesh_refinement/__init__.py +1 -0
  41. luminarycloud/params/simulation/adaptive_mesh_refinement/active_region_.py +83 -0
  42. luminarycloud/params/simulation/adaptive_mesh_refinement/boundary_layer_profile_.py +1 -1
  43. luminarycloud/params/simulation/adaptive_mesh_refinement_.py +8 -1
  44. luminarycloud/physics_ai/__init__.py +7 -0
  45. luminarycloud/physics_ai/inference.py +166 -199
  46. luminarycloud/physics_ai/models.py +22 -0
  47. luminarycloud/pipelines/api.py +45 -9
  48. luminarycloud/project.py +56 -2
  49. luminarycloud/simulation.py +25 -0
  50. luminarycloud/types/__init__.py +2 -0
  51. luminarycloud/types/ids.py +2 -0
  52. luminarycloud/vis/__init__.py +1 -0
  53. luminarycloud/vis/filters.py +97 -0
  54. luminarycloud/vis/visualization.py +3 -0
  55. luminarycloud/volume_selection.py +6 -6
  56. luminarycloud/workflow_utils.py +149 -0
  57. {luminarycloud-0.22.1.dist-info → luminarycloud-0.22.2.dist-info}/METADATA +1 -1
  58. {luminarycloud-0.22.1.dist-info → luminarycloud-0.22.2.dist-info}/RECORD +59 -60
  59. luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2.py +0 -61
  60. luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2.pyi +0 -85
  61. luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2_grpc.py +0 -67
  62. luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2_grpc.pyi +0 -26
  63. luminarycloud/_proto/inferenceservice/inferenceservice_pb2.py +0 -69
  64. {luminarycloud-0.22.1.dist-info → luminarycloud-0.22.2.dist-info}/WHEEL +0 -0
@@ -1,217 +1,184 @@
1
1
  # File: python/sdk/luminarycloud/inference/inference.py
2
2
  # Copyright 2025 Luminary Cloud, Inc. All Rights Reserved.
3
- from datetime import datetime
4
- from typing import Any, Callable, Dict, Optional
5
- from json import loads as json_loads, dumps as json_dumps
6
- from dataclasses import dataclass
7
- import base64
8
- import os
9
- import urllib.request
10
-
11
- from .._client import get_default_client
12
- from .._helpers._timestamp_to_datetime import timestamp_to_datetime
13
- from .._proto.api.v0.luminarycloud.inference import inference_pb2 as inferencepb
14
- from .._proto.inferenceservice import inferenceservice_pb2 as inferenceservicepb
3
+ from typing import Any
4
+ from enum import IntEnum
5
+ from google.protobuf.json_format import MessageToDict
15
6
  from .._wrapper import ProtoWrapper, ProtoWrapperBase
16
- from ..project import Project
17
- from ..project import Project
18
- from .._helpers import upload_file
19
- from .._proto.upload import upload_pb2 as uploadpb
20
- from ..types.ids import PhysicsAiModelVersionID
7
+ from .._proto.api.v0.luminarycloud.physicsaiinference import (
8
+ physicsaiinference_pb2 as physicsaiinferencepb,
9
+ )
10
+ from .._proto.base import base_pb2 as basepb
21
11
 
22
12
 
23
- @dataclass
24
- class ExtAeroInferenceResult:
25
- """Result of an external aerodynamic inference job.
13
+ class VisualizationOutput(IntEnum):
14
+ """
15
+ Represents the type of visualization output.
26
16
 
27
17
  Attributes
28
18
  ----------
29
- drag_force: float
30
- The drag force returned from the inference.
31
- lift_force: float
32
- The lift force returned from the inference.
33
- wall_shear_stress:
34
- A dict containing wall shear stress data, or None if not available.
35
- pressure_surface:
36
- A dict containing pressure surface stress data, or None if not available.
19
+ INVALID
20
+ Invalid visualization output type.
21
+ LUMINARY
22
+ Luminary visualization format.
23
+ VTK
24
+ VTK visualization format.
37
25
  """
38
26
 
39
- drag_force: float
40
- lift_force: float
41
- wall_shear_stress: dict[str, Any] | None
42
- pressure_surface: dict[str, Any] | None
43
-
44
- def __init__(self, inference_result: dict[str, Any]) -> None:
45
- self.drag_force = inference_result["drag_force"]
46
- self.lift_force = inference_result["lift_force"]
47
- self.wall_shear_stress = inference_result.get("wall-shear-stress", None)
48
- self.pressure_surface = inference_result.get("pressure_surface", None)
49
-
50
-
51
- def external_aero_inference(
52
- project: Project,
53
- stl_file: str,
54
- model_version_id: PhysicsAiModelVersionID,
55
- conditions: Optional[Dict[str, Any]] = None,
56
- settings: Optional[Dict[str, Any]] = None,
57
- write_visualization_data=False,
58
- ) -> ExtAeroInferenceResult:
59
- """Performs an inference job returning external aerodynamic results.
60
- Parameters
61
- ----------
62
- project : Project
63
- The project to which the inference files will be added.
64
- stl_file : str
65
- Fullpath the STL file to be used for inference.
66
- model_version_id : PhysicsAiModelVersionID
67
- The ID of the trained model version to use for inference.
68
- conditions : Dict[str, Any], optional
69
- Dictionary of conditions to be passed to the inference service (e.g., alpha, beta, etc.).
70
- settings : Dict[str, Any], optional
71
- Dictionary of settings to be passed to inference service (e.g., stencil_size)
72
- write_visualization_data : bool, optional
73
- Whether to write LC visualization data for visualization by Luminary.
74
-
75
-
76
- Returns
77
- ExtAeroInferenceResult
78
- Result of the external aerodynamic inference job.
79
-
80
- warning:: This feature is experimental and may change or be removed without notice.
27
+ INVALID = physicsaiinferencepb.INVALID
28
+ LUMINARY = physicsaiinferencepb.LUMINARY
29
+ VTK = physicsaiinferencepb.VTK
30
+
31
+
32
+ class InferenceFieldType(IntEnum):
81
33
  """
34
+ Represents the type of an inference field.
82
35
 
83
- result = perform_inference(
84
- project, stl_file, model_version_id, conditions, settings, write_visualization_data
85
- )
86
- return ExtAeroInferenceResult(result)
87
-
88
-
89
- def perform_inference(
90
- project: Project,
91
- stl_file: str,
92
- model_version_id: PhysicsAiModelVersionID,
93
- conditions: Optional[Dict[str, Any]] = None,
94
- settings: Optional[Dict[str, Any]] = None,
95
- write_visualization_data=False,
96
- ) -> dict[str, Any]:
97
- """Creates an inference service job.
98
- Parameters
36
+ Attributes
99
37
  ----------
100
- project : Project
101
- The project to which the inference files will be added.
102
- stl_file : str
103
- Fullpath the STL file to be used for inference.
104
- model_version_id : PhysicsAiModelVersionID
105
- The ID of the trained model version to use for inference.
106
- conditions : Dict[str, Any], optional
107
- Dictionary of conditions to be passed to the inference service (e.g., alpha, beta, etc.).
108
- settings : Dict[str, Any], optional
109
- Dictionary of settings to be passed to inference service (e.g., stencil_size)
110
- write_visualization_data : bool, optional
111
- Whether to write LC visualization data for visualization by Luminary.
112
-
113
-
114
- Returns
115
- dict[str, Any]
116
- Response from the server as key-value pairs.
117
-
118
- warning:: This feature is experimental and may change or be removed without notice.
38
+ UNKNOWN_TYPE
39
+ Unknown field type.
40
+ SCALAR
41
+ Scalar field type (single value).
42
+ VECTOR
43
+ Vector field type (multiple values).
119
44
  """
120
45
 
121
- client = get_default_client()
122
-
123
- def upload_if_file(fname: str) -> str:
124
- if os.path.exists(fname) and os.path.isfile(fname):
125
- params = uploadpb.ResourceParams()
126
- result = upload_file(client, project.id, params, fname)
127
- return result[1].url
128
- if fname.startswith("gs://"):
129
- return fname
130
- raise RuntimeError("Unsupported file for inference")
131
-
132
- def future_file(url: str) -> Callable[[], dict[str, Any]]:
133
- def download_file() -> dict[str, Any]:
134
- with urllib.request.urlopen(url) as f:
135
- serialized = f.read()
136
- jsondata = json_loads(serialized)
137
- data = base64.b64decode(jsondata["data"])
138
- jsondata["data"] = data
139
- return jsondata
140
-
141
- return download_file
142
-
143
- stl_url = upload_if_file(stl_file)
144
-
145
- raw = start_inference_job(
146
- project, stl_url, model_version_id, conditions, settings, write_visualization_data
147
- )
148
- currated: dict[str, Any] = {}
149
- for k, v in raw.items():
150
- if isinstance(v, str) and v.startswith("https://"):
151
- tmp = future_file(v)
152
- if k.endswith("_url"):
153
- currated[k[:-4]] = tmp
154
- currated[k] = v
155
- else:
156
- currated[k] = tmp
157
- currated[k + "_url"] = v
158
- else:
159
- currated[k] = v
160
- return currated
161
-
162
-
163
- def start_inference_job(
164
- project: Project,
165
- stl_url: str,
166
- model_version_id: PhysicsAiModelVersionID,
167
- conditions: Optional[Dict[str, Any]] = None,
168
- settings: Optional[Dict[str, Any]] = None,
169
- write_visualization_data=False,
170
- ) -> dict[str, Any]:
171
- """Creates an inference service job.
172
- Parameters
46
+ UNKNOWN_TYPE = physicsaiinferencepb.UNKNOWN_TYPE
47
+ SCALAR = physicsaiinferencepb.SCALAR
48
+ VECTOR = physicsaiinferencepb.VECTOR
49
+
50
+
51
+ class InferenceFieldCategory(IntEnum):
52
+ """
53
+ Represents the category of an inference field.
54
+
55
+ Attributes
173
56
  ----------
174
- project : Project
175
- Reference to a project.
176
- stl_url : str
177
- URL of the STL file to be used for inference.
178
- model_version_id : PhysicsAiModelVersionID
179
- The ID of the trained model version to use for inference.
180
- conditions : Dict[str, Any], optional
181
- Dictionary of conditions to be passed to the inference service (e.g., alpha, beta, etc.).
182
- settings : Dict[str, Any], optional
183
- Dictionary of settings to be passed to inference service (e.g., stencil_size)
184
- write_visualization_data : bool, optional
185
- Whether to write LC visualization data for visualization by Luminary.
186
-
187
-
188
- Returns
189
- dict[str, Any]
190
- Response from the server as key-value pairs.
191
-
192
- warning:: This feature is experimental and may change or be removed without notice.
57
+ UNKNOWN_CATEGORY
58
+ Unknown field category.
59
+ NUMERIC
60
+ Numeric field category (e.g., forces, moments).
61
+ SURFACE
62
+ Surface field category (e.g., surface pressure).
63
+ VOLUME
64
+ Volume field category (e.g., velocity, pressure).
193
65
  """
194
66
 
195
- # Embed settings and store as bytes
196
- settings_bytes = b""
197
- if settings is not None:
198
- settings_bytes = json_dumps(settings).encode("utf-8")
199
-
200
- # Convert parameters dict to bytes if provided
201
- conditions_bytes = b""
202
- if conditions is not None:
203
- conditions_bytes = json_dumps(conditions).encode("utf-8")
204
-
205
- req = inferencepb.CreateInferenceServiceJobRequest(
206
- stl_url=stl_url,
207
- model_version_id=str(model_version_id),
208
- conditions=conditions_bytes,
209
- settings=settings_bytes,
210
- project_id=project.id,
211
- write_visualization_data=write_visualization_data,
212
- )
213
- res: inferencepb.CreateInferenceServiceJobResponse = (
214
- get_default_client().CreateInferenceServiceJob(req)
215
- )
216
-
217
- return json_loads(str(res.response, encoding="utf-8"))
67
+ UNKNOWN_CATEGORY = physicsaiinferencepb.UNKNOWN_CATEGORY
68
+ NUMERIC = physicsaiinferencepb.NUMERIC
69
+ SURFACE = physicsaiinferencepb.SURFACE
70
+ VOLUME = physicsaiinferencepb.VOLUME
71
+
72
+
73
+ @ProtoWrapper(physicsaiinferencepb.VisualizationExport)
74
+ class VisualizationExport(ProtoWrapperBase):
75
+ """Represents a visualization export."""
76
+
77
+ type: VisualizationOutput
78
+ url: str
79
+ _proto: physicsaiinferencepb.VisualizationExport
80
+
81
+ def get_url(self) -> str:
82
+ return self.url
83
+
84
+
85
+ @ProtoWrapper(physicsaiinferencepb.NumericResult)
86
+ class NumericResult(ProtoWrapperBase):
87
+ """Represents a numeric result."""
88
+
89
+ scalar: float
90
+ vector: list[float]
91
+ _proto: physicsaiinferencepb.NumericResult
92
+
93
+ def get_value(self) -> Any:
94
+ if self._proto.HasField("scalar"):
95
+ return self.scalar
96
+ if self._proto.HasField("vector"):
97
+ return list(self._proto.vector.values)
98
+ return None
99
+
100
+
101
+ @ProtoWrapper(physicsaiinferencepb.SurfaceForInference)
102
+ class SurfaceForInference(ProtoWrapperBase):
103
+ """Represents a surface for inference."""
104
+
105
+ name: str
106
+ url: str
107
+ _proto: physicsaiinferencepb.SurfaceForInference
108
+
109
+ def get_name(self) -> str:
110
+ return self.name
111
+
112
+ def get_url(self) -> str:
113
+ return self.url
114
+
115
+
116
+ @ProtoWrapper(physicsaiinferencepb.InferenceResult)
117
+ class InferenceResult(ProtoWrapperBase):
118
+ """Represents an inference result."""
119
+
120
+ name: str
121
+ surface_results: dict[str, str]
122
+ volume_results: dict[str, str]
123
+ visualizations: list[VisualizationExport]
124
+
125
+ @property
126
+ def number_outputs(self) -> dict[str, NumericResult]:
127
+ """Returns number_outputs with wrapped NumericResult values."""
128
+ return {k: NumericResult(v) for k, v in self._proto.number_outputs.items()}
129
+
130
+ def get_number_outputs(self) -> dict[str, Any]:
131
+ return MessageToDict(self._proto.number_outputs, preserving_proto_field_name=True)
132
+
133
+ def get_surface_results(self) -> dict[str, Any]:
134
+ return MessageToDict(self.surface_results, preserving_proto_field_name=True)
135
+
136
+ def get_volume_results(self) -> dict[str, Any]:
137
+ return MessageToDict(self.volume_results, preserving_proto_field_name=True)
138
+
139
+ def get_visualizations(self) -> list[dict[str, Any]]:
140
+ return [
141
+ MessageToDict(viz._proto, preserving_proto_field_name=True)
142
+ for viz in self.visualizations
143
+ ]
144
+
145
+
146
+ @ProtoWrapper(physicsaiinferencepb.InferenceServiceJob)
147
+ class InferenceJob(ProtoWrapperBase):
148
+ """Represents an inference service job."""
149
+
150
+ job_id: str
151
+ status: basepb.JobStatus
152
+ results: list[InferenceResult]
153
+ merged_visualizations: list[VisualizationExport]
154
+ _proto: physicsaiinferencepb.InferenceServiceJob
155
+
156
+ @property
157
+ def id(self) -> str:
158
+ """Alias for job_id for convenience."""
159
+ return self.job_id
160
+
161
+ def get_status(self) -> str:
162
+ return basepb.JobStatusType.Name(self.status.typ)
163
+
164
+ def get_results(self) -> list[dict[str, Any]]:
165
+ return [
166
+ MessageToDict(result._proto, preserving_proto_field_name=True)
167
+ for result in self.results
168
+ ]
169
+
170
+ def get_merged_visualizations(self) -> list[dict[str, Any]]:
171
+ return [
172
+ MessageToDict(viz._proto, preserving_proto_field_name=True)
173
+ for viz in self.merged_visualizations
174
+ ]
175
+
176
+
177
+ @ProtoWrapper(physicsaiinferencepb.InferenceField)
178
+ class InferenceField(ProtoWrapperBase):
179
+ """Represents an inference field."""
180
+
181
+ name: str
182
+ type: InferenceFieldType
183
+ category: InferenceFieldCategory
184
+ _proto: physicsaiinferencepb.InferenceField
@@ -3,6 +3,9 @@ from typing import List, Optional
3
3
 
4
4
  from .._client import get_default_client
5
5
  from .._proto.api.v0.luminarycloud.physics_ai import physics_ai_pb2 as physaipb
6
+ from .._proto.api.v0.luminarycloud.physicsaiinference import (
7
+ physicsaiinference_pb2 as physicsaiinferencepb,
8
+ )
6
9
  from .._wrapper import ProtoWrapper, ProtoWrapperBase
7
10
  from ..types.ids import PhysicsAiModelID, PhysicsAiModelVersionID
8
11
  from ..enum.physics_ai_lifecycle_state import PhysicsAiLifecycleState
@@ -21,6 +24,25 @@ class PhysicsAiModelVersion(ProtoWrapperBase):
21
24
  lifecycle_state: PhysicsAiLifecycleState
22
25
  _proto: physaipb.PhysicsAiModelVersion
23
26
 
27
+ def get_inference_fields(self) -> list[str]:
28
+ """Gets the inference fields available for a trained model version.
29
+
30
+ This retrieves the list of output fields that can be requested from a specific
31
+ model version during inference.
32
+
33
+ Returns
34
+ -------
35
+ list[str]
36
+ List of available inference field names that can be requested from the model.
37
+
38
+ warning:: This feature is experimental and may change or be removed without notice.
39
+ """
40
+ req = physicsaiinferencepb.GetInferenceFieldsRequest(model_version_id=str(self.id))
41
+ res: physicsaiinferencepb.GetInferenceFieldsResponse = (
42
+ get_default_client().GetInferenceFields(req)
43
+ )
44
+ return list(res.inference_fields)
45
+
24
46
 
25
47
  @ProtoWrapper(physaipb.PhysicsAiModel)
26
48
  class PhysicsAiModel(ProtoWrapperBase):
@@ -10,6 +10,7 @@ from .arguments import PipelineArgValueType
10
10
  from .core import Stage
11
11
  from ..pipelines import Pipeline, PipelineArgs
12
12
  from .._client import get_default_client
13
+ from .._helpers import parse_iso_datetime
13
14
 
14
15
  logger = logging.getLogger(__name__)
15
16
 
@@ -23,7 +24,7 @@ class LogLine:
23
24
  @classmethod
24
25
  def from_json(cls, json: dict) -> "LogLine":
25
26
  return cls(
26
- timestamp=datetime.fromisoformat(json["timestamp"]),
27
+ timestamp=parse_iso_datetime(json["timestamp"]),
27
28
  level=json["level"],
28
29
  message=json["message"],
29
30
  )
@@ -54,8 +55,8 @@ class PipelineRecord:
54
55
  name=json["name"],
55
56
  description=json["description"],
56
57
  definition_yaml=json["definition_yaml"],
57
- created_at=datetime.fromisoformat(json["created_at"]),
58
- updated_at=datetime.fromisoformat(json["updated_at"]),
58
+ created_at=parse_iso_datetime(json["created_at"]),
59
+ updated_at=parse_iso_datetime(json["updated_at"]),
59
60
  )
60
61
 
61
62
  def pipeline_jobs(self) -> "list[PipelineJobRecord]":
@@ -100,11 +101,12 @@ class PipelineJobRecord:
100
101
  pipeline_id: str
101
102
  name: str
102
103
  description: str | None
103
- status: Literal["pending", "running", "completed", "failed", "cancelled"]
104
+ status: Literal["pending", "running", "completed", "failed", "cancelled", "paused"]
104
105
  created_at: datetime
105
106
  updated_at: datetime
106
107
  started_at: datetime | None
107
108
  completed_at: datetime | None
109
+ paused_at: datetime | None
108
110
 
109
111
  @classmethod
110
112
  def from_json(cls, json: dict) -> "PipelineJobRecord":
@@ -112,14 +114,15 @@ class PipelineJobRecord:
112
114
  id=json["id"],
113
115
  pipeline_id=json["pipeline_id"],
114
116
  name=json["name"],
115
- description=json["description"],
117
+ description=json.get("description"),
116
118
  status=json["status"],
117
- created_at=datetime.fromisoformat(json["created_at"]),
118
- updated_at=datetime.fromisoformat(json["updated_at"]),
119
- started_at=datetime.fromisoformat(json["started_at"]) if json["started_at"] else None,
119
+ created_at=parse_iso_datetime(json["created_at"]),
120
+ updated_at=parse_iso_datetime(json["updated_at"]),
121
+ started_at=(parse_iso_datetime(json["started_at"]) if json.get("started_at") else None),
120
122
  completed_at=(
121
- datetime.fromisoformat(json["completed_at"]) if json["completed_at"] else None
123
+ parse_iso_datetime(json["completed_at"]) if json.get("completed_at") else None
122
124
  ),
125
+ paused_at=(parse_iso_datetime(json["paused_at"]) if json.get("paused_at") else None),
123
126
  )
124
127
 
125
128
  def pipeline(self) -> PipelineRecord:
@@ -315,6 +318,39 @@ class PipelineJobRecord:
315
318
  get_default_client().http.post(f"/rest/v0/pipeline_jobs/{self.id}/cancel", {})
316
319
  logger.info(f"Cancelled pipeline job {self.id}")
317
320
 
321
+ def pause(self) -> None:
322
+ """Pause this running pipeline job.
323
+
324
+ This will prevent new tasks from being scheduled while allowing
325
+ in-progress tasks to complete. The job status will be set to PAUSED
326
+ and all stage concurrency limits will be temporarily set to 0.
327
+
328
+ Call resume() to continue execution.
329
+
330
+ Raises
331
+ ------
332
+ HTTPError
333
+ If the pipeline job cannot be paused (e.g., not found or not in
334
+ RUNNING state).
335
+ """
336
+ get_default_client().http.post(f"/rest/v0/pipeline_jobs/{self.id}/pause", {})
337
+ logger.info(f"Paused pipeline job {self.id}")
338
+
339
+ def resume(self) -> None:
340
+ """Resume this paused pipeline job.
341
+
342
+ This will restore the job status to RUNNING and restore the original
343
+ concurrency limits, allowing new tasks to be scheduled again.
344
+
345
+ Raises
346
+ ------
347
+ HTTPError
348
+ If the pipeline job cannot be resumed (e.g., not found or not in
349
+ PAUSED state).
350
+ """
351
+ get_default_client().http.post(f"/rest/v0/pipeline_jobs/{self.id}/resume", {})
352
+ logger.info(f"Resumed pipeline job {self.id}")
353
+
318
354
 
319
355
  @dataclass
320
356
  class PipelineJobRunRecord:
luminarycloud/project.py CHANGED
@@ -25,6 +25,10 @@ from ._helpers import (
25
25
  upload_file,
26
26
  upload_mesh,
27
27
  upload_table_as_json,
28
+ create_inference_job,
29
+ get_inference_job,
30
+ list_inference_jobs,
31
+ SurfaceForInference,
28
32
  )
29
33
  from ._helpers.warnings import deprecated
30
34
  from ._proto.api.v0.luminarycloud.geometry import geometry_pb2 as geometrypb
@@ -47,10 +51,20 @@ from ._proto.upload import upload_pb2 as uploadpb
47
51
  from ._wrapper import ProtoWrapper, ProtoWrapperBase
48
52
  from .enum import GPUType, MeshType, TableType
49
53
  from .meshing import MeshAdaptationParams, MeshGenerationParams
54
+ from .named_variable_set import NamedVariableSet, get_named_variable_set
55
+ from .physics_ai.inference import InferenceJob, VisualizationExport
50
56
  from .simulation_param import SimulationParam
51
57
  from .tables import RectilinearTable, create_rectilinear_table
52
- from .types import MeshID, ProjectID, SimulationTemplateID, NamedVariableSetID, Expression, LcFloat
53
- from .named_variable_set import get_named_variable_set
58
+ from .types import (
59
+ MeshID,
60
+ ProjectID,
61
+ SimulationTemplateID,
62
+ NamedVariableSetID,
63
+ Expression,
64
+ LcFloat,
65
+ PhysicsAiInferenceJobID,
66
+ PhysicsAiModelVersionID,
67
+ )
54
68
 
55
69
  if TYPE_CHECKING:
56
70
  from .geometry import Geometry
@@ -701,6 +715,46 @@ class Project(ProtoWrapperBase):
701
715
  req = projectpb.UnshareProjectWithSupportRequest(id=self.id)
702
716
  get_default_client().UnshareProjectWithSupport(req)
703
717
 
718
+ def create_inference_job(
719
+ self,
720
+ geometry: str,
721
+ model_version_id: PhysicsAiModelVersionID,
722
+ synchronous: bool = False,
723
+ conditions: Optional[Dict[str, Any]] = None,
724
+ settings: Optional[Dict[str, Any]] = None,
725
+ surfaces: Optional[list[SurfaceForInference]] = None,
726
+ inference_fields: Optional[list[str]] = None,
727
+ per_surface_visualizations: Optional[list[VisualizationExport]] = None,
728
+ merged_visualizations: Optional[list[VisualizationExport]] = None,
729
+ ) -> InferenceJob:
730
+ """
731
+ Create a new Physics AI inference job.
732
+ """
733
+ return create_inference_job(
734
+ self.id,
735
+ geometry,
736
+ model_version_id,
737
+ synchronous,
738
+ conditions,
739
+ settings,
740
+ surfaces,
741
+ inference_fields,
742
+ per_surface_visualizations,
743
+ merged_visualizations,
744
+ )
745
+
746
+ def get_inference_job(self, job_id: PhysicsAiInferenceJobID) -> InferenceJob:
747
+ """
748
+ Get a Physics AI inference job by its ID.
749
+ """
750
+ return get_inference_job(job_id)
751
+
752
+ def list_inference_jobs(self) -> list[InferenceJob]:
753
+ """
754
+ List all inference jobs for the project.
755
+ """
756
+ return list_inference_jobs(self.id)
757
+
704
758
 
705
759
  def add_named_variables_from_csv(project: Project, csv_path: str) -> list[NamedVariableSet]:
706
760
  """
@@ -22,6 +22,7 @@ from .enum import (
22
22
  SimulationStatus,
23
23
  Vector3Component,
24
24
  )
25
+ from .outputs.stopping_conditions import StoppingConditionStatusResult
25
26
  from .simulation_param import SimulationParam
26
27
  from .reference_values import ReferenceValues
27
28
  from .simulation_param import SimulationParam
@@ -324,6 +325,30 @@ class Simulation(ProtoWrapperBase):
324
325
  result = _get_workflow_ids([self.id])
325
326
  return result.get(self.id)
326
327
 
328
+ def get_stopping_condition_status(self) -> StoppingConditionStatusResult:
329
+ """
330
+ Retrieves the stopping condition status for a completed simulation.
331
+
332
+ This evaluates the stopping conditions defined in the simulation parameters
333
+ against the final simulation results to determine which conditions were satisfied.
334
+
335
+ Returns
336
+ -------
337
+ StoppingConditionStatusResult
338
+ The stopping condition status containing:
339
+ - overall_success: Whether the overall stopping criteria were met
340
+ - force_stopped: Whether a force-stop condition was triggered
341
+ - condition_results: Results for each individual condition (output name, threshold, value, satisfied)
342
+
343
+ Raises
344
+ ------
345
+ SDKException
346
+ If the simulation has not completed or the status cannot be retrieved.
347
+ """
348
+ req = simulationpb.GetStoppingConditionStatusRequest(id=self.id)
349
+ res = get_default_client().GetStoppingConditionStatus(req)
350
+ return StoppingConditionStatusResult._from_proto(res)
351
+
327
352
  @deprecated(
328
353
  "Use get_parameters() instead. This method will be removed in a future release.",
329
354
  )
@@ -7,6 +7,8 @@ from .ids import (
7
7
  SimulationTemplateID as SimulationTemplateID,
8
8
  GeometryFeatureID as GeometryFeatureID,
9
9
  NamedVariableSetID as NamedVariableSetID,
10
+ PhysicsAiInferenceJobID as PhysicsAiInferenceJobID,
11
+ PhysicsAiModelVersionID as PhysicsAiModelVersionID,
10
12
  )
11
13
 
12
14
  from .adfloat import (
@@ -11,5 +11,7 @@ GeometryFeatureID = NewType("GeometryFeatureID", str)
11
11
  NamedVariableSetID = NewType("NamedVariableSetID", str)
12
12
  PhysicsAiArchitectureID = NewType("PhysicsAiArchitectureID", str)
13
13
  PhysicsAiArchitectureVersionID = NewType("PhysicsAiArchitectureVersionID", str)
14
+ PhysicsAiInferenceJobID = NewType("PhysicsAiInferenceJobID", str)
14
15
  PhysicsAiModelID = NewType("PhysicsAiModelID", str)
15
16
  PhysicsAiModelVersionID = NewType("PhysicsAiModelVersionID", str)
17
+ PhysicsAiTrainingJobID = NewType("PhysicsAiTrainingJobID", str)
@@ -22,6 +22,7 @@ from .primitives import (
22
22
 
23
23
  from .filters import (
24
24
  Slice as Slice,
25
+ MultiSlice as MultiSlice,
25
26
  PlaneClip as PlaneClip,
26
27
  BoxClip as BoxClip,
27
28
  FixedSizeVectorGlyphs as FixedSizeVectorGlyphs,