luminarycloud 0.22.2__py3-none-any.whl → 0.22.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- luminarycloud/_client/retry_interceptor.py +13 -2
- luminarycloud/_proto/api/v0/luminarycloud/named_variable_set/named_variable_set_pb2.py +25 -3
- luminarycloud/_proto/api/v0/luminarycloud/named_variable_set/named_variable_set_pb2.pyi +30 -0
- luminarycloud/_proto/api/v0/luminarycloud/named_variable_set/named_variable_set_pb2_grpc.py +34 -0
- luminarycloud/_proto/api/v0/luminarycloud/named_variable_set/named_variable_set_pb2_grpc.pyi +12 -0
- luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2.py +62 -40
- luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2.pyi +86 -16
- luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2_grpc.py +34 -0
- luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2_grpc.pyi +12 -0
- luminarycloud/_proto/lcstatus/details/geometry/geometry_pb2.py +256 -0
- luminarycloud/_proto/lcstatus/details/geometry/geometry_pb2.pyi +472 -0
- luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2.py +2 -2
- luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2_grpc.py +34 -0
- luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2_grpc.pyi +12 -0
- luminarycloud/_proto/quantity/quantity_pb2.pyi +1 -1
- luminarycloud/physics_ai/__init__.py +8 -0
- luminarycloud/physics_ai/architectures.py +1 -1
- luminarycloud/physics_ai/datasets.py +246 -0
- luminarycloud/pipelines/__init__.py +11 -0
- luminarycloud/pipelines/api.py +61 -0
- luminarycloud/pipelines/core.py +358 -45
- luminarycloud/pipelines/flowables.py +138 -0
- luminarycloud/pipelines/stages.py +7 -31
- {luminarycloud-0.22.2.dist-info → luminarycloud-0.22.3.dist-info}/METADATA +1 -1
- {luminarycloud-0.22.2.dist-info → luminarycloud-0.22.3.dist-info}/RECORD +26 -22
- {luminarycloud-0.22.2.dist-info → luminarycloud-0.22.3.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,246 @@
|
|
|
1
|
+
# Copyright 2025 Luminary Cloud, Inc. All Rights Reserved.
|
|
2
|
+
from dataclasses import dataclass, field
|
|
3
|
+
from typing import List, Optional, Union, TYPE_CHECKING
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
|
|
6
|
+
from google.protobuf.struct_pb2 import Struct
|
|
7
|
+
from google.protobuf.json_format import ParseDict
|
|
8
|
+
|
|
9
|
+
from ..enum import QuantityType
|
|
10
|
+
from .._client import get_default_client
|
|
11
|
+
from .._proto.api.v0.luminarycloud.physics_ai import physics_ai_pb2 as physaipb
|
|
12
|
+
from .._wrapper import ProtoWrapper, ProtoWrapperBase
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
from ..solution import Solution
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass(kw_only=True)
|
|
19
|
+
class ExportConfig:
|
|
20
|
+
"""
|
|
21
|
+
Configuration for exporting Physics AI dataset cases.
|
|
22
|
+
|
|
23
|
+
.. warning:: This feature is experimental and may change or be removed without notice.
|
|
24
|
+
|
|
25
|
+
Parameters
|
|
26
|
+
----------
|
|
27
|
+
exclude_surfaces : List[str], optional
|
|
28
|
+
Raw mesh boundary names to exclude from export.
|
|
29
|
+
exclude_tags : List[str], optional
|
|
30
|
+
Geometry tag names to exclude. These are resolved to mesh boundary names
|
|
31
|
+
server-side based on the geometry tags.
|
|
32
|
+
fill_holes : float, optional
|
|
33
|
+
Size threshold for filling holes in the surface mesh. Default is 0 (no filling).
|
|
34
|
+
single_precision : bool, optional
|
|
35
|
+
If True, export floating point fields in single precision. Default is True.
|
|
36
|
+
process_volume : bool, optional
|
|
37
|
+
If True, include volume mesh data in the export. Default is False.
|
|
38
|
+
surface_fields_to_keep : List[QuantityType], optional
|
|
39
|
+
Surface fields to include in the export. If empty, all fields are kept.
|
|
40
|
+
volume_fields_to_keep : List[QuantityType], optional
|
|
41
|
+
Volume fields to include in the export. If empty, all fields are kept.
|
|
42
|
+
"""
|
|
43
|
+
|
|
44
|
+
exclude_surfaces: List[str] = field(default_factory=list)
|
|
45
|
+
exclude_tags: List[str] = field(default_factory=list)
|
|
46
|
+
fill_holes: float = 0.0
|
|
47
|
+
single_precision: bool = True
|
|
48
|
+
process_volume: bool = False
|
|
49
|
+
surface_fields_to_keep: List[QuantityType] = field(default_factory=list)
|
|
50
|
+
volume_fields_to_keep: List[QuantityType] = field(default_factory=list)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
@dataclass
|
|
54
|
+
class DatasetCaseInput:
|
|
55
|
+
"""
|
|
56
|
+
Input for a case in a Physics AI dataset.
|
|
57
|
+
|
|
58
|
+
.. warning:: This feature is experimental and may change or be removed without notice.
|
|
59
|
+
|
|
60
|
+
Parameters
|
|
61
|
+
----------
|
|
62
|
+
solution : Solution
|
|
63
|
+
The solution to include in the dataset.
|
|
64
|
+
params : dict, optional
|
|
65
|
+
Physics parameters for this case (e.g., {"alpha": 5.0, "stream_velocity": 50.0}).
|
|
66
|
+
These are used as conditioning inputs during training.
|
|
67
|
+
"""
|
|
68
|
+
|
|
69
|
+
solution: "Solution"
|
|
70
|
+
params: dict = field(default_factory=dict)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
@ProtoWrapper(physaipb.PhysicsAiDatasetCase)
|
|
74
|
+
class PhysicsAiDatasetCase(ProtoWrapperBase):
|
|
75
|
+
"""
|
|
76
|
+
Represents a case in a Physics AI dataset.
|
|
77
|
+
|
|
78
|
+
.. warning:: This feature is experimental and may change or be removed without notice.
|
|
79
|
+
"""
|
|
80
|
+
|
|
81
|
+
case_id: str
|
|
82
|
+
solution_id: str
|
|
83
|
+
simulation_id: str
|
|
84
|
+
_proto: physaipb.PhysicsAiDatasetCase
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
@ProtoWrapper(physaipb.PhysicsAiDataset)
|
|
88
|
+
class PhysicsAiDataset(ProtoWrapperBase):
|
|
89
|
+
"""
|
|
90
|
+
Represents a Physics AI dataset containing training cases.
|
|
91
|
+
|
|
92
|
+
.. warning:: This feature is experimental and may change or be removed without notice.
|
|
93
|
+
"""
|
|
94
|
+
|
|
95
|
+
id: str
|
|
96
|
+
created_by: str
|
|
97
|
+
name: str
|
|
98
|
+
description: str
|
|
99
|
+
is_locked: bool
|
|
100
|
+
creation_time: datetime
|
|
101
|
+
update_time: datetime
|
|
102
|
+
_proto: physaipb.PhysicsAiDataset
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def create_dataset(
|
|
106
|
+
name: str,
|
|
107
|
+
cases: List[Union["Solution", DatasetCaseInput]],
|
|
108
|
+
export_config: Optional[ExportConfig] = None,
|
|
109
|
+
description: str = "",
|
|
110
|
+
parameter_schema: Optional[dict] = None,
|
|
111
|
+
) -> PhysicsAiDataset:
|
|
112
|
+
"""
|
|
113
|
+
Create a Physics AI dataset from simulation solutions.
|
|
114
|
+
|
|
115
|
+
.. warning:: This feature is experimental and may change or be removed without notice.
|
|
116
|
+
|
|
117
|
+
Parameters
|
|
118
|
+
----------
|
|
119
|
+
name : str
|
|
120
|
+
Name of the dataset
|
|
121
|
+
cases : List[Union[Solution, DatasetCaseInput]]
|
|
122
|
+
List of solutions or DatasetCaseInput objects. For simple cases, pass
|
|
123
|
+
Solution objects directly. For cases with physics parameters (e.g., alpha,
|
|
124
|
+
stream_velocity), wrap in DatasetCaseInput.
|
|
125
|
+
export_config : ExportConfig, optional
|
|
126
|
+
Export configuration. If not provided, uses defaults (single_precision=True).
|
|
127
|
+
description : str, optional
|
|
128
|
+
Description of the dataset
|
|
129
|
+
parameter_schema : dict, optional
|
|
130
|
+
JSON schema for case parameters
|
|
131
|
+
|
|
132
|
+
Returns
|
|
133
|
+
-------
|
|
134
|
+
PhysicsAiDataset
|
|
135
|
+
The created dataset
|
|
136
|
+
|
|
137
|
+
Examples
|
|
138
|
+
--------
|
|
139
|
+
Simple usage with solutions (uses default export config):
|
|
140
|
+
|
|
141
|
+
>>> dataset = create_dataset(
|
|
142
|
+
... name="my-dataset",
|
|
143
|
+
... cases=[solution1, solution2, solution3],
|
|
144
|
+
... )
|
|
145
|
+
|
|
146
|
+
With custom export config:
|
|
147
|
+
|
|
148
|
+
>>> dataset = create_dataset(
|
|
149
|
+
... name="my-dataset",
|
|
150
|
+
... cases=[solution1, solution2, solution3],
|
|
151
|
+
... export_config=ExportConfig(
|
|
152
|
+
... exclude_tags=["Farfield", "Symmetry"],
|
|
153
|
+
... surface_fields_to_keep=[QuantityType.PRESSURE, QuantityType.WALL_SHEAR_STRESS],
|
|
154
|
+
... ),
|
|
155
|
+
... )
|
|
156
|
+
|
|
157
|
+
With physics parameters:
|
|
158
|
+
|
|
159
|
+
>>> dataset = create_dataset(
|
|
160
|
+
... name="parametric-dataset",
|
|
161
|
+
... cases=[
|
|
162
|
+
... DatasetCaseInput(solution1, params={"alpha": 5.0}),
|
|
163
|
+
... DatasetCaseInput(solution2, params={"alpha": 10.0}),
|
|
164
|
+
... ],
|
|
165
|
+
... export_config=ExportConfig(process_volume=True),
|
|
166
|
+
... )
|
|
167
|
+
"""
|
|
168
|
+
# Use default export config if not provided
|
|
169
|
+
if export_config is None:
|
|
170
|
+
export_config = ExportConfig()
|
|
171
|
+
|
|
172
|
+
# Build case inputs
|
|
173
|
+
case_inputs = []
|
|
174
|
+
for case in cases:
|
|
175
|
+
if isinstance(case, DatasetCaseInput):
|
|
176
|
+
solution = case.solution
|
|
177
|
+
params = case.params
|
|
178
|
+
else:
|
|
179
|
+
# It's a Solution object
|
|
180
|
+
solution = case
|
|
181
|
+
params = {}
|
|
182
|
+
|
|
183
|
+
case_input = physaipb.CreatePhysicsAiDatasetCaseInput(
|
|
184
|
+
solution_id=solution.id,
|
|
185
|
+
simulation_id=solution.simulation_id,
|
|
186
|
+
)
|
|
187
|
+
if params:
|
|
188
|
+
case_input.params.CopyFrom(ParseDict(params, Struct()))
|
|
189
|
+
case_inputs.append(case_input)
|
|
190
|
+
|
|
191
|
+
# Build export config proto from dataclass
|
|
192
|
+
export_config_proto = physaipb.GetSolutionDataPhysicsAIRequest(
|
|
193
|
+
exclude_surfaces=export_config.exclude_surfaces,
|
|
194
|
+
exclude_tags=export_config.exclude_tags,
|
|
195
|
+
fill_holes=export_config.fill_holes,
|
|
196
|
+
single_precision=export_config.single_precision,
|
|
197
|
+
process_volume=export_config.process_volume,
|
|
198
|
+
)
|
|
199
|
+
if export_config.surface_fields_to_keep:
|
|
200
|
+
export_config_proto.surface_fields_to_keep.extend(export_config.surface_fields_to_keep)
|
|
201
|
+
if export_config.volume_fields_to_keep:
|
|
202
|
+
export_config_proto.volume_fields_to_keep.extend(export_config.volume_fields_to_keep)
|
|
203
|
+
|
|
204
|
+
# Build parameter schema
|
|
205
|
+
if parameter_schema:
|
|
206
|
+
param_schema_proto = ParseDict(parameter_schema, Struct())
|
|
207
|
+
else:
|
|
208
|
+
# Default empty schema
|
|
209
|
+
param_schema_proto = Struct()
|
|
210
|
+
|
|
211
|
+
req = physaipb.CreateDatasetRequest(
|
|
212
|
+
name=name,
|
|
213
|
+
description=description,
|
|
214
|
+
cases=case_inputs,
|
|
215
|
+
parameter_schema=param_schema_proto,
|
|
216
|
+
export_config=export_config_proto,
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
response = get_default_client().CreateDataset(req)
|
|
220
|
+
return PhysicsAiDataset(response.dataset)
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
def list_datasets() -> List[PhysicsAiDataset]:
|
|
224
|
+
"""
|
|
225
|
+
List Physics AI datasets accessible to the current user.
|
|
226
|
+
|
|
227
|
+
Returns datasets created by the current user plus platform-curated datasets.
|
|
228
|
+
|
|
229
|
+
.. warning:: This feature is experimental and may change or be removed without notice.
|
|
230
|
+
|
|
231
|
+
Returns
|
|
232
|
+
-------
|
|
233
|
+
List[PhysicsAiDataset]
|
|
234
|
+
A list of accessible Physics AI datasets, ordered by creation time (newest first).
|
|
235
|
+
|
|
236
|
+
Examples
|
|
237
|
+
--------
|
|
238
|
+
List all datasets:
|
|
239
|
+
|
|
240
|
+
>>> datasets = list_datasets()
|
|
241
|
+
>>> for ds in datasets:
|
|
242
|
+
... print(f"{ds.name}: {ds.id} (locked={ds.is_locked})")
|
|
243
|
+
"""
|
|
244
|
+
req = physaipb.ListDatasetsRequest()
|
|
245
|
+
response = get_default_client().ListDatasets(req)
|
|
246
|
+
return [PhysicsAiDataset(dataset) for dataset in response.datasets]
|
|
@@ -2,8 +2,12 @@
|
|
|
2
2
|
from .core import (
|
|
3
3
|
Pipeline as Pipeline,
|
|
4
4
|
PipelineParameter as PipelineParameter,
|
|
5
|
+
stage as stage,
|
|
5
6
|
# Stage base class, mainly exported for testing
|
|
6
7
|
Stage as Stage,
|
|
8
|
+
# RunScript stage lives in core because it's a special snowflake
|
|
9
|
+
RunScript as RunScript,
|
|
10
|
+
StopRun as StopRun,
|
|
7
11
|
)
|
|
8
12
|
|
|
9
13
|
from .parameters import (
|
|
@@ -36,6 +40,11 @@ from .arguments import (
|
|
|
36
40
|
ArgNamedVariableSet as ArgNamedVariableSet,
|
|
37
41
|
)
|
|
38
42
|
|
|
43
|
+
from .flowables import (
|
|
44
|
+
FlowableType as FlowableType,
|
|
45
|
+
FlowableIOSchema as FlowableIOSchema,
|
|
46
|
+
)
|
|
47
|
+
|
|
39
48
|
from .api import (
|
|
40
49
|
create_pipeline as create_pipeline,
|
|
41
50
|
list_pipelines as list_pipelines,
|
|
@@ -46,5 +55,7 @@ from .api import (
|
|
|
46
55
|
PipelineJobRecord as PipelineJobRecord,
|
|
47
56
|
PipelineRecord as PipelineRecord,
|
|
48
57
|
PipelineJobRunRecord as PipelineJobRunRecord,
|
|
58
|
+
PipelineTaskRecord as PipelineTaskRecord,
|
|
59
|
+
StageDefinition as StageDefinition,
|
|
49
60
|
LogLine as LogLine,
|
|
50
61
|
)
|
luminarycloud/pipelines/api.py
CHANGED
|
@@ -30,6 +30,50 @@ class LogLine:
|
|
|
30
30
|
)
|
|
31
31
|
|
|
32
32
|
|
|
33
|
+
@dataclass
|
|
34
|
+
class StageDefinition:
|
|
35
|
+
"""
|
|
36
|
+
Represents a stage definition from a pipeline.
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
id: str
|
|
40
|
+
name: str
|
|
41
|
+
stage_type: str
|
|
42
|
+
|
|
43
|
+
@classmethod
|
|
44
|
+
def from_json(cls, json: dict) -> "StageDefinition":
|
|
45
|
+
return cls(
|
|
46
|
+
id=json["id"],
|
|
47
|
+
name=json["name"],
|
|
48
|
+
stage_type=json["stage_type"],
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
@dataclass
|
|
53
|
+
class PipelineTaskRecord:
|
|
54
|
+
"""
|
|
55
|
+
A PipelineTaskRecord represents a task within a pipeline job run.
|
|
56
|
+
"""
|
|
57
|
+
|
|
58
|
+
status: Literal["pending", "running", "completed", "failed", "upstream_failed", "cancelled"]
|
|
59
|
+
artifacts: dict[str, dict]
|
|
60
|
+
stage: StageDefinition | None
|
|
61
|
+
created_at: datetime
|
|
62
|
+
updated_at: datetime
|
|
63
|
+
error_messages: list[str] | None
|
|
64
|
+
|
|
65
|
+
@classmethod
|
|
66
|
+
def from_json(cls, json: dict) -> "PipelineTaskRecord":
|
|
67
|
+
return cls(
|
|
68
|
+
status=json["status"],
|
|
69
|
+
artifacts=json["artifacts"],
|
|
70
|
+
stage=StageDefinition.from_json(json["stage"]) if json.get("stage") else None,
|
|
71
|
+
created_at=parse_iso_datetime(json["created_at"]),
|
|
72
|
+
updated_at=parse_iso_datetime(json["updated_at"]),
|
|
73
|
+
error_messages=json.get("error_messages"),
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
|
|
33
77
|
@dataclass
|
|
34
78
|
class PipelineRecord:
|
|
35
79
|
"""
|
|
@@ -414,6 +458,23 @@ class PipelineJobRunRecord:
|
|
|
414
458
|
)
|
|
415
459
|
return res["data"]
|
|
416
460
|
|
|
461
|
+
def tasks(self) -> list[PipelineTaskRecord]:
|
|
462
|
+
"""
|
|
463
|
+
Returns a list of tasks for this pipeline job run.
|
|
464
|
+
|
|
465
|
+
Each task represents an execution of a stage of the pipeline, with its own
|
|
466
|
+
status, artifacts, and stage information.
|
|
467
|
+
|
|
468
|
+
Returns
|
|
469
|
+
-------
|
|
470
|
+
list[PipelineTaskRecord]
|
|
471
|
+
The tasks associated with this pipeline job run.
|
|
472
|
+
"""
|
|
473
|
+
res = get_default_client().http.get(
|
|
474
|
+
f"/rest/v0/pipeline_jobs/{self.pipeline_job_id}/runs/{self.idx}/tasks"
|
|
475
|
+
)
|
|
476
|
+
return [PipelineTaskRecord.from_json(t) for t in res["data"]]
|
|
477
|
+
|
|
417
478
|
|
|
418
479
|
def create_pipeline(
|
|
419
480
|
name: str, pipeline: Pipeline | str, description: str | None = None
|