luminarycloud 0.18.1__py3-none-any.whl → 0.19.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- luminarycloud/_client/client.py +21 -5
- luminarycloud/_client/http_client.py +168 -0
- luminarycloud/_client/rpc_error.py +1 -0
- luminarycloud/_client/tracing.py +72 -22
- luminarycloud/_helpers/_wait_for_mesh.py +5 -7
- luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2.py +8 -8
- luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2.pyi +9 -8
- luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2.py +70 -40
- luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2.pyi +64 -3
- luminarycloud/_proto/client/simulation_pb2.py +273 -269
- luminarycloud/_proto/client/simulation_pb2.pyi +18 -6
- luminarycloud/_proto/inferenceservice/inferenceservice_pb2.py +10 -10
- luminarycloud/_proto/inferenceservice/inferenceservice_pb2.pyi +9 -8
- luminarycloud/enum/quantity_type.py +13 -0
- luminarycloud/exceptions.py +6 -0
- luminarycloud/params/enum/_enum_wrappers.py +3 -2
- luminarycloud/params/simulation/sliding_interfaces_.py +8 -0
- luminarycloud/physics_ai/inference.py +30 -25
- luminarycloud/pipelines/api.py +49 -102
- luminarycloud/project.py +15 -43
- luminarycloud/tables.py +14 -15
- luminarycloud/vis/visualization.py +2 -2
- {luminarycloud-0.18.1.dist-info → luminarycloud-0.19.0.dist-info}/METADATA +1 -1
- {luminarycloud-0.18.1.dist-info → luminarycloud-0.19.0.dist-info}/RECORD +25 -29
- luminarycloud/_proto/api/v0/luminarycloud/pipelines/pipelines_pb2.py +0 -246
- luminarycloud/_proto/api/v0/luminarycloud/pipelines/pipelines_pb2.pyi +0 -420
- luminarycloud/_proto/api/v0/luminarycloud/pipelines/pipelines_pb2_grpc.py +0 -240
- luminarycloud/_proto/api/v0/luminarycloud/pipelines/pipelines_pb2_grpc.pyi +0 -90
- luminarycloud/enum/pipeline_job_status.py +0 -23
- {luminarycloud-0.18.1.dist-info → luminarycloud-0.19.0.dist-info}/WHEEL +0 -0
|
@@ -1515,8 +1515,9 @@ class _InterfaceTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._
|
|
|
1515
1515
|
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
|
|
1516
1516
|
INVALID_INTERFACE_TYPE: _InterfaceType.ValueType # 0
|
|
1517
1517
|
GENERAL_INTERFACE: _InterfaceType.ValueType # 53698
|
|
1518
|
-
"""Automatic treatment based on geometry and settings
|
|
1519
|
-
interface,
|
|
1518
|
+
"""Automatic treatment based on geometry and settings. General
|
|
1519
|
+
interface, frozen rotor, or sliding interface for fluids. General interface
|
|
1520
|
+
with contact resistance for solids.
|
|
1520
1521
|
"""
|
|
1521
1522
|
MIXING_PLANE_INTERFACE: _InterfaceType.ValueType # 44970
|
|
1522
1523
|
"""Imposes a pitchwise average of the variables on either side of the interface."""
|
|
@@ -1525,8 +1526,9 @@ class InterfaceType(_InterfaceType, metaclass=_InterfaceTypeEnumTypeWrapper): ..
|
|
|
1525
1526
|
|
|
1526
1527
|
INVALID_INTERFACE_TYPE: InterfaceType.ValueType # 0
|
|
1527
1528
|
GENERAL_INTERFACE: InterfaceType.ValueType # 53698
|
|
1528
|
-
"""Automatic treatment based on geometry and settings
|
|
1529
|
-
interface,
|
|
1529
|
+
"""Automatic treatment based on geometry and settings. General
|
|
1530
|
+
interface, frozen rotor, or sliding interface for fluids. General interface
|
|
1531
|
+
with contact resistance for solids.
|
|
1530
1532
|
"""
|
|
1531
1533
|
MIXING_PLANE_INTERFACE: InterfaceType.ValueType # 44970
|
|
1532
1534
|
"""Imposes a pitchwise average of the variables on either side of the interface."""
|
|
@@ -4303,6 +4305,8 @@ class SlidingInterfaces(google.protobuf.message.Message):
|
|
|
4303
4305
|
SLIDING_MATCHING_PERIODIC_ROTATION_ANGLES_FIELD_NUMBER: builtins.int
|
|
4304
4306
|
SLIDING_MATCHING_PERIODIC_CENTER_OF_ROTATION_FIELD_NUMBER: builtins.int
|
|
4305
4307
|
INTERFACE_TYPE_FIELD_NUMBER: builtins.int
|
|
4308
|
+
THERMAL_INTERFACE_MATERIAL_THICKNESS_FIELD_NUMBER: builtins.int
|
|
4309
|
+
THERMAL_INTERFACE_MATERIAL_CONDUCTIVITY_FIELD_NUMBER: builtins.int
|
|
4306
4310
|
sliding_interface_id: builtins.str
|
|
4307
4311
|
"""ID of the sliding interface"""
|
|
4308
4312
|
sliding_interface_name: builtins.str
|
|
@@ -4332,6 +4336,12 @@ class SlidingInterfaces(google.protobuf.message.Message):
|
|
|
4332
4336
|
"""
|
|
4333
4337
|
interface_type: global___InterfaceType.ValueType
|
|
4334
4338
|
"""Type of interface treatment"""
|
|
4339
|
+
@property
|
|
4340
|
+
def thermal_interface_material_thickness(self) -> luminarycloud._proto.base.base_pb2.AdFloatType:
|
|
4341
|
+
"""Thickness of the thermal interface material (TIM). Zero thickness implies perfect contact."""
|
|
4342
|
+
@property
|
|
4343
|
+
def thermal_interface_material_conductivity(self) -> luminarycloud._proto.base.base_pb2.AdFloatType:
|
|
4344
|
+
"""Thermal conductivity of the thermal interface material (TIM)."""
|
|
4335
4345
|
def __init__(
|
|
4336
4346
|
self,
|
|
4337
4347
|
*,
|
|
@@ -4343,9 +4353,11 @@ class SlidingInterfaces(google.protobuf.message.Message):
|
|
|
4343
4353
|
sliding_matching_periodic_rotation_angles: luminarycloud._proto.base.base_pb2.AdVector3 | None = ...,
|
|
4344
4354
|
sliding_matching_periodic_center_of_rotation: luminarycloud._proto.base.base_pb2.AdVector3 | None = ...,
|
|
4345
4355
|
interface_type: global___InterfaceType.ValueType = ...,
|
|
4356
|
+
thermal_interface_material_thickness: luminarycloud._proto.base.base_pb2.AdFloatType | None = ...,
|
|
4357
|
+
thermal_interface_material_conductivity: luminarycloud._proto.base.base_pb2.AdFloatType | None = ...,
|
|
4346
4358
|
) -> None: ...
|
|
4347
|
-
def HasField(self, field_name: typing_extensions.Literal["sliding_matching_periodic_center_of_rotation", b"sliding_matching_periodic_center_of_rotation", "sliding_matching_periodic_rotation_angles", b"sliding_matching_periodic_rotation_angles", "sliding_matching_translation_transform", b"sliding_matching_translation_transform"]) -> builtins.bool: ...
|
|
4348
|
-
def ClearField(self, field_name: typing_extensions.Literal["interface_type", b"interface_type", "sliding_a", b"sliding_a", "sliding_b", b"sliding_b", "sliding_interface_id", b"sliding_interface_id", "sliding_interface_name", b"sliding_interface_name", "sliding_matching_periodic_center_of_rotation", b"sliding_matching_periodic_center_of_rotation", "sliding_matching_periodic_rotation_angles", b"sliding_matching_periodic_rotation_angles", "sliding_matching_translation_transform", b"sliding_matching_translation_transform"]) -> None: ...
|
|
4359
|
+
def HasField(self, field_name: typing_extensions.Literal["sliding_matching_periodic_center_of_rotation", b"sliding_matching_periodic_center_of_rotation", "sliding_matching_periodic_rotation_angles", b"sliding_matching_periodic_rotation_angles", "sliding_matching_translation_transform", b"sliding_matching_translation_transform", "thermal_interface_material_conductivity", b"thermal_interface_material_conductivity", "thermal_interface_material_thickness", b"thermal_interface_material_thickness"]) -> builtins.bool: ...
|
|
4360
|
+
def ClearField(self, field_name: typing_extensions.Literal["interface_type", b"interface_type", "sliding_a", b"sliding_a", "sliding_b", b"sliding_b", "sliding_interface_id", b"sliding_interface_id", "sliding_interface_name", b"sliding_interface_name", "sliding_matching_periodic_center_of_rotation", b"sliding_matching_periodic_center_of_rotation", "sliding_matching_periodic_rotation_angles", b"sliding_matching_periodic_rotation_angles", "sliding_matching_translation_transform", b"sliding_matching_translation_transform", "thermal_interface_material_conductivity", b"thermal_interface_material_conductivity", "thermal_interface_material_thickness", b"thermal_interface_material_thickness"]) -> None: ...
|
|
4349
4361
|
|
|
4350
4362
|
global___SlidingInterfaces = SlidingInterfaces
|
|
4351
4363
|
|
|
@@ -15,7 +15,7 @@ _sym_db = _symbol_database.Default()
|
|
|
15
15
|
from luminarycloud._proto.base import base_pb2 as proto_dot_base_dot_base__pb2
|
|
16
16
|
|
|
17
17
|
|
|
18
|
-
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n-proto/inferenceservice/inferenceservice.proto\x12\x1fluminary.proto.inferenceservice\x1a\x15proto/base/base.proto\"\
|
|
18
|
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n-proto/inferenceservice/inferenceservice.proto\x12\x1fluminary.proto.inferenceservice\x1a\x15proto/base/base.proto\"\xab\x01\n CreateInferenceServiceJobRequest\x12\x14\n\x0c\x61rtifact_url\x18\x01 \x01(\t\x12\x0f\n\x07stl_url\x18\x02 \x01(\t\x12\x10\n\x08settings\x18\x07 \x01(\x0c\x12\x12\n\nconditions\x18\x03 \x01(\x0c\x12\x12\n\nproject_id\x18\x05 \x01(\t\x12 \n\x18write_visualization_data\x18\x06 \x01(\x08J\x04\x08\x04\x10\x05\"5\n!CreateInferenceServiceJobResponse\x12\x10\n\x08response\x18\x01 \x01(\x0c\"*\n\x1bPingInferenceServiceRequest\x12\x0b\n\x03msg\x18\x01 \x01(\t\"+\n\x1cPingInferenceServiceResponse\x12\x0b\n\x03msg\x18\x01 \x01(\t2\xcd\x02\n\x10InferenceService\x12\xa2\x01\n\x19\x43reateInferenceServiceJob\x12\x41.luminary.proto.inferenceservice.CreateInferenceServiceJobRequest\x1a\x42.luminary.proto.inferenceservice.CreateInferenceServiceJobResponse\x12\x93\x01\n\x14PingInferenceService\x12<.luminary.proto.inferenceservice.PingInferenceServiceRequest\x1a=.luminary.proto.inferenceservice.PingInferenceServiceResponseB/Z-luminarycloud.com/core/proto/inferenceserviceb\x06proto3')
|
|
19
19
|
|
|
20
20
|
|
|
21
21
|
|
|
@@ -57,13 +57,13 @@ if _descriptor._USE_C_DESCRIPTORS == False:
|
|
|
57
57
|
DESCRIPTOR._options = None
|
|
58
58
|
DESCRIPTOR._serialized_options = b'Z-luminarycloud.com/core/proto/inferenceservice'
|
|
59
59
|
_CREATEINFERENCESERVICEJOBREQUEST._serialized_start=106
|
|
60
|
-
_CREATEINFERENCESERVICEJOBREQUEST._serialized_end=
|
|
61
|
-
_CREATEINFERENCESERVICEJOBRESPONSE._serialized_start=
|
|
62
|
-
_CREATEINFERENCESERVICEJOBRESPONSE._serialized_end=
|
|
63
|
-
_PINGINFERENCESERVICEREQUEST._serialized_start=
|
|
64
|
-
_PINGINFERENCESERVICEREQUEST._serialized_end=
|
|
65
|
-
_PINGINFERENCESERVICERESPONSE._serialized_start=
|
|
66
|
-
_PINGINFERENCESERVICERESPONSE._serialized_end=
|
|
67
|
-
_INFERENCESERVICE._serialized_start=
|
|
68
|
-
_INFERENCESERVICE._serialized_end=
|
|
60
|
+
_CREATEINFERENCESERVICEJOBREQUEST._serialized_end=277
|
|
61
|
+
_CREATEINFERENCESERVICEJOBRESPONSE._serialized_start=279
|
|
62
|
+
_CREATEINFERENCESERVICEJOBRESPONSE._serialized_end=332
|
|
63
|
+
_PINGINFERENCESERVICEREQUEST._serialized_start=334
|
|
64
|
+
_PINGINFERENCESERVICEREQUEST._serialized_end=376
|
|
65
|
+
_PINGINFERENCESERVICERESPONSE._serialized_start=378
|
|
66
|
+
_PINGINFERENCESERVICERESPONSE._serialized_end=421
|
|
67
|
+
_INFERENCESERVICE._serialized_start=424
|
|
68
|
+
_INFERENCESERVICE._serialized_end=757
|
|
69
69
|
# @@protoc_insertion_point(module_scope)
|
|
@@ -19,16 +19,17 @@ class CreateInferenceServiceJobRequest(google.protobuf.message.Message):
|
|
|
19
19
|
|
|
20
20
|
ARTIFACT_URL_FIELD_NUMBER: builtins.int
|
|
21
21
|
STL_URL_FIELD_NUMBER: builtins.int
|
|
22
|
-
|
|
23
|
-
|
|
22
|
+
SETTINGS_FIELD_NUMBER: builtins.int
|
|
23
|
+
CONDITIONS_FIELD_NUMBER: builtins.int
|
|
24
24
|
PROJECT_ID_FIELD_NUMBER: builtins.int
|
|
25
25
|
WRITE_VISUALIZATION_DATA_FIELD_NUMBER: builtins.int
|
|
26
26
|
artifact_url: builtins.str
|
|
27
27
|
"""Eventually should be a model version id"""
|
|
28
28
|
stl_url: builtins.str
|
|
29
|
-
|
|
30
|
-
"""JSON encoded
|
|
31
|
-
|
|
29
|
+
settings: builtins.bytes
|
|
30
|
+
"""JSON encoded settings, like stencil_size."""
|
|
31
|
+
conditions: builtins.bytes
|
|
32
|
+
"""JSON encoded conditions, like alpha, beta, etc."""
|
|
32
33
|
project_id: builtins.str
|
|
33
34
|
write_visualization_data: builtins.bool
|
|
34
35
|
def __init__(
|
|
@@ -36,12 +37,12 @@ class CreateInferenceServiceJobRequest(google.protobuf.message.Message):
|
|
|
36
37
|
*,
|
|
37
38
|
artifact_url: builtins.str = ...,
|
|
38
39
|
stl_url: builtins.str = ...,
|
|
39
|
-
|
|
40
|
-
|
|
40
|
+
settings: builtins.bytes = ...,
|
|
41
|
+
conditions: builtins.bytes = ...,
|
|
41
42
|
project_id: builtins.str = ...,
|
|
42
43
|
write_visualization_data: builtins.bool = ...,
|
|
43
44
|
) -> None: ...
|
|
44
|
-
def ClearField(self, field_name: typing_extensions.Literal["artifact_url", b"artifact_url", "
|
|
45
|
+
def ClearField(self, field_name: typing_extensions.Literal["artifact_url", b"artifact_url", "conditions", b"conditions", "project_id", b"project_id", "settings", b"settings", "stl_url", b"stl_url", "write_visualization_data", b"write_visualization_data"]) -> None: ...
|
|
45
46
|
|
|
46
47
|
global___CreateInferenceServiceJobRequest = CreateInferenceServiceJobRequest
|
|
47
48
|
|
|
@@ -174,6 +174,19 @@ class QuantityType(IntEnum):
|
|
|
174
174
|
NORMAL_SENSITIVITY = quantitypb.NORMAL_SENSITIVITY
|
|
175
175
|
SMOOTHED_NORMAL_SENSITIVITY = quantitypb.SMOOTHED_NORMAL_SENSITIVITY
|
|
176
176
|
|
|
177
|
+
# Quantities needed for table upload
|
|
178
|
+
LENGTH = quantitypb.LENGTH
|
|
179
|
+
RELATIVE_RADIUS = quantitypb.RELATIVE_RADIUS
|
|
180
|
+
THRUST_PROFILE = quantitypb.THRUST_PROFILE
|
|
181
|
+
TORQUE_PROFILE = quantitypb.TORQUE_PROFILE
|
|
182
|
+
RADIAL_FORCE_PROFILE = quantitypb.RADIAL_FORCE_PROFILE
|
|
183
|
+
TWIST_ANGLE = quantitypb.TWIST_ANGLE
|
|
184
|
+
SWEEP_ANGLE = quantitypb.SWEEP_ANGLE
|
|
185
|
+
ANHEDRAL_ANGLE = quantitypb.ANHEDRAL_ANGLE
|
|
186
|
+
RELATIVE_CHORD = quantitypb.RELATIVE_CHORD
|
|
187
|
+
PRESSURE_RISE = quantitypb.PRESSURE_RISE
|
|
188
|
+
VOLUME_FLOW_RATE = quantitypb.VOLUME_FLOW_RATE
|
|
189
|
+
|
|
177
190
|
@classmethod
|
|
178
191
|
def _is_average(cls, quantity: "QuantityType") -> bool:
|
|
179
192
|
return quantity._has_tag(quantityoptspb.TAG_ANALYZER_AVERAGE)
|
luminarycloud/exceptions.py
CHANGED
|
@@ -67,3 +67,9 @@ class FailedPreconditionError(RpcError):
|
|
|
67
67
|
"""Raised when the resource is not in the correct state to perform the operation."""
|
|
68
68
|
|
|
69
69
|
pass
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
class DeadlineExceededError(RpcError):
|
|
73
|
+
"""Raised when the deadline expired before the operation could complete. I.e. it timed out."""
|
|
74
|
+
|
|
75
|
+
pass
|
|
@@ -1192,8 +1192,9 @@ class InterfaceType(_IntEnum):
|
|
|
1192
1192
|
Attributes
|
|
1193
1193
|
----------
|
|
1194
1194
|
GENERAL_INTERFACE
|
|
1195
|
-
Automatic treatment based on geometry and settings
|
|
1196
|
-
interface,
|
|
1195
|
+
Automatic treatment based on geometry and settings. General
|
|
1196
|
+
interface, frozen rotor, or sliding interface for fluids. General interface
|
|
1197
|
+
with contact resistance for solids.
|
|
1197
1198
|
MIXING_PLANE_INTERFACE
|
|
1198
1199
|
Imposes a pitchwise average of the variables on either side of the interface.
|
|
1199
1200
|
|
|
@@ -32,6 +32,10 @@ class SlidingInterfaces(CodeRepr, ParamGroupWrapper[clientpb.SlidingInterfaces])
|
|
|
32
32
|
"Names of the surfaces of side B of the sliding interface."
|
|
33
33
|
interface_type: enum.InterfaceType = enum.InterfaceType.GENERAL_INTERFACE
|
|
34
34
|
"Type of interface treatment."
|
|
35
|
+
tim_thickness: LcFloat = 0
|
|
36
|
+
"Thickness of the thermal interface material (TIM). Zero thickness implies perfect contact."
|
|
37
|
+
tim_conductivity: LcFloat = 5
|
|
38
|
+
"Thermal conductivity of the thermal interface material (TIM)."
|
|
35
39
|
|
|
36
40
|
def _to_proto(self) -> clientpb.SlidingInterfaces:
|
|
37
41
|
_proto = clientpb.SlidingInterfaces()
|
|
@@ -44,6 +48,8 @@ class SlidingInterfaces(CodeRepr, ParamGroupWrapper[clientpb.SlidingInterfaces])
|
|
|
44
48
|
if self.surfaces_side_b is not None:
|
|
45
49
|
_proto.sliding_b.extend(self.surfaces_side_b)
|
|
46
50
|
_proto.interface_type = self.interface_type.value
|
|
51
|
+
_proto.thermal_interface_material_thickness.CopyFrom(_to_ad_proto(self.tim_thickness))
|
|
52
|
+
_proto.thermal_interface_material_conductivity.CopyFrom(_to_ad_proto(self.tim_conductivity))
|
|
47
53
|
return _proto
|
|
48
54
|
|
|
49
55
|
def _from_proto(self, proto: clientpb.SlidingInterfaces) -> None:
|
|
@@ -52,4 +58,6 @@ class SlidingInterfaces(CodeRepr, ParamGroupWrapper[clientpb.SlidingInterfaces])
|
|
|
52
58
|
self.surfaces_side_a.extend(proto.sliding_a)
|
|
53
59
|
self.surfaces_side_b.extend(proto.sliding_b)
|
|
54
60
|
self.interface_type = enum.InterfaceType(proto.interface_type)
|
|
61
|
+
self.tim_thickness = _from_ad_proto(proto.thermal_interface_material_thickness)
|
|
62
|
+
self.tim_conductivity = _from_ad_proto(proto.thermal_interface_material_conductivity)
|
|
55
63
|
return None
|
|
@@ -51,8 +51,8 @@ def external_aero_inference(
|
|
|
51
51
|
project: Project,
|
|
52
52
|
stl_file: str,
|
|
53
53
|
artifact_url: str,
|
|
54
|
-
|
|
55
|
-
|
|
54
|
+
conditions: Optional[Dict[str, Any]] = None,
|
|
55
|
+
settings: Optional[Dict[str, Any]] = None,
|
|
56
56
|
write_visualization_data=False,
|
|
57
57
|
) -> ExtAeroInferenceResult:
|
|
58
58
|
"""Performs an inference job returning external aerodynamic results.
|
|
@@ -64,10 +64,10 @@ def external_aero_inference(
|
|
|
64
64
|
Fullpath the STL file to be used for inference.
|
|
65
65
|
artifact_url : str
|
|
66
66
|
Fullpath of the model artifact directory to be used for inference.
|
|
67
|
-
|
|
68
|
-
Dictionary of
|
|
69
|
-
|
|
70
|
-
|
|
67
|
+
conditions : Dict[str, Any], optional
|
|
68
|
+
Dictionary of conditions to be passed to the inference service (e.g., alpha, beta, etc.).
|
|
69
|
+
settings : Dict[str, Any], optional
|
|
70
|
+
Dictionary of settings to be passed to inference service (e.g., stencil_size)
|
|
71
71
|
write_visualization_data : bool, optional
|
|
72
72
|
Whether to write LC visualization data for visualization by Luminary.
|
|
73
73
|
|
|
@@ -80,7 +80,7 @@ def external_aero_inference(
|
|
|
80
80
|
"""
|
|
81
81
|
|
|
82
82
|
result = perform_inference(
|
|
83
|
-
project, stl_file, artifact_url,
|
|
83
|
+
project, stl_file, artifact_url, conditions, settings, write_visualization_data
|
|
84
84
|
)
|
|
85
85
|
return ExtAeroInferenceResult(result)
|
|
86
86
|
|
|
@@ -89,8 +89,8 @@ def perform_inference(
|
|
|
89
89
|
project: Project,
|
|
90
90
|
stl_file: str,
|
|
91
91
|
artifact_url: str,
|
|
92
|
-
|
|
93
|
-
|
|
92
|
+
conditions: Optional[Dict[str, Any]] = None,
|
|
93
|
+
settings: Optional[Dict[str, Any]] = None,
|
|
94
94
|
write_visualization_data=False,
|
|
95
95
|
) -> dict[str, Any]:
|
|
96
96
|
"""Creates an inference service job.
|
|
@@ -102,10 +102,10 @@ def perform_inference(
|
|
|
102
102
|
Fullpath the STL file to be used for inference.
|
|
103
103
|
artifact_url : str
|
|
104
104
|
Fullpath of the model artifact directory to be used for inference.
|
|
105
|
-
|
|
106
|
-
Dictionary of
|
|
107
|
-
|
|
108
|
-
|
|
105
|
+
conditions : Dict[str, Any], optional
|
|
106
|
+
Dictionary of conditions to be passed to the inference service (e.g., alpha, beta, etc.).
|
|
107
|
+
settings : Dict[str, Any], optional
|
|
108
|
+
Dictionary of settings to be passed to inference service (e.g., stencil_size)
|
|
109
109
|
write_visualization_data : bool, optional
|
|
110
110
|
Whether to write LC visualization data for visualization by Luminary.
|
|
111
111
|
|
|
@@ -142,7 +142,7 @@ def perform_inference(
|
|
|
142
142
|
stl_url = upload_if_file(stl_file)
|
|
143
143
|
|
|
144
144
|
raw = start_inference_job(
|
|
145
|
-
project, stl_url, artifact_url,
|
|
145
|
+
project, stl_url, artifact_url, conditions, settings, write_visualization_data
|
|
146
146
|
)
|
|
147
147
|
currated: dict[str, Any] = {}
|
|
148
148
|
for k, v in raw.items():
|
|
@@ -163,8 +163,8 @@ def start_inference_job(
|
|
|
163
163
|
project: Project,
|
|
164
164
|
stl_url: str,
|
|
165
165
|
artifact_url: str,
|
|
166
|
-
|
|
167
|
-
|
|
166
|
+
conditions: Optional[Dict[str, Any]] = None,
|
|
167
|
+
settings: Optional[Dict[str, Any]] = None,
|
|
168
168
|
write_visualization_data=False,
|
|
169
169
|
) -> dict[str, Any]:
|
|
170
170
|
"""Creates an inference service job.
|
|
@@ -176,10 +176,10 @@ def start_inference_job(
|
|
|
176
176
|
URL of the STL file to be used for inference.
|
|
177
177
|
artifact_url : str
|
|
178
178
|
URL of the model artifact directory to be used for inference.
|
|
179
|
-
|
|
180
|
-
Dictionary of
|
|
181
|
-
|
|
182
|
-
|
|
179
|
+
conditions : Dict[str, Any], optional
|
|
180
|
+
Dictionary of conditions to be passed to the inference service (e.g., alpha, beta, etc.).
|
|
181
|
+
settings : Dict[str, Any], optional
|
|
182
|
+
Dictionary of settings to be passed to inference service (e.g., stencil_size)
|
|
183
183
|
write_visualization_data : bool, optional
|
|
184
184
|
Whether to write LC visualization data for visualization by Luminary.
|
|
185
185
|
|
|
@@ -191,16 +191,21 @@ def start_inference_job(
|
|
|
191
191
|
warning:: This feature is experimental and may change or be removed without notice.
|
|
192
192
|
"""
|
|
193
193
|
|
|
194
|
+
# Embed settings and store as bytes
|
|
195
|
+
settings_bytes = b""
|
|
196
|
+
if settings is not None:
|
|
197
|
+
settings_bytes = json_dumps(settings).encode("utf-8")
|
|
198
|
+
|
|
194
199
|
# Convert parameters dict to bytes if provided
|
|
195
|
-
|
|
196
|
-
if
|
|
197
|
-
|
|
200
|
+
conditions_bytes = b""
|
|
201
|
+
if conditions is not None:
|
|
202
|
+
conditions_bytes = json_dumps(conditions).encode("utf-8")
|
|
198
203
|
|
|
199
204
|
req = inferencepb.CreateInferenceServiceJobRequest(
|
|
200
205
|
stl_url=stl_url,
|
|
201
206
|
artifact_url=artifact_url,
|
|
202
|
-
|
|
203
|
-
|
|
207
|
+
conditions=conditions_bytes,
|
|
208
|
+
settings=settings_bytes,
|
|
204
209
|
project_id=project.id,
|
|
205
210
|
write_visualization_data=write_visualization_data,
|
|
206
211
|
)
|
luminarycloud/pipelines/api.py
CHANGED
|
@@ -1,14 +1,11 @@
|
|
|
1
1
|
# Copyright 2023-2024 Luminary Cloud, Inc. All Rights Reserved.
|
|
2
|
+
from typing import Literal
|
|
2
3
|
from dataclasses import dataclass
|
|
3
4
|
|
|
4
5
|
from datetime import datetime
|
|
5
6
|
|
|
6
|
-
from luminarycloud._helpers import timestamp_to_datetime
|
|
7
|
-
|
|
8
|
-
from ..enum.pipeline_job_status import PipelineJobStatus
|
|
9
7
|
from ..pipelines import Pipeline, PipelineArgs
|
|
10
8
|
from .._client import get_default_client
|
|
11
|
-
from .._proto.api.v0.luminarycloud.pipelines import pipelines_pb2 as pipelinespb
|
|
12
9
|
|
|
13
10
|
|
|
14
11
|
@dataclass
|
|
@@ -17,21 +14,21 @@ class PipelineRecord:
|
|
|
17
14
|
name: str
|
|
18
15
|
description: str | None
|
|
19
16
|
definition_yaml: str
|
|
20
|
-
|
|
21
|
-
|
|
17
|
+
created_at: datetime
|
|
18
|
+
updated_at: datetime
|
|
22
19
|
|
|
23
20
|
def pipeline(self) -> Pipeline:
|
|
24
21
|
return Pipeline._from_yaml(self.definition_yaml)
|
|
25
22
|
|
|
26
23
|
@classmethod
|
|
27
|
-
def
|
|
24
|
+
def from_json(cls, json: dict) -> "PipelineRecord":
|
|
28
25
|
return cls(
|
|
29
|
-
id=
|
|
30
|
-
name=
|
|
31
|
-
description=
|
|
32
|
-
definition_yaml=
|
|
33
|
-
|
|
34
|
-
|
|
26
|
+
id=json["id"],
|
|
27
|
+
name=json["name"],
|
|
28
|
+
description=json["description"],
|
|
29
|
+
definition_yaml=json["definition_yaml"],
|
|
30
|
+
created_at=datetime.fromisoformat(json["created_at"]),
|
|
31
|
+
updated_at=datetime.fromisoformat(json["updated_at"]),
|
|
35
32
|
)
|
|
36
33
|
|
|
37
34
|
|
|
@@ -42,30 +39,26 @@ class PipelineJobRecord:
|
|
|
42
39
|
project_id: str
|
|
43
40
|
name: str
|
|
44
41
|
description: str | None
|
|
45
|
-
status:
|
|
46
|
-
|
|
47
|
-
|
|
42
|
+
status: Literal["pending", "running", "completed", "failed", "cancelled"]
|
|
43
|
+
created_at: datetime
|
|
44
|
+
updated_at: datetime
|
|
48
45
|
started_at: datetime | None
|
|
49
46
|
completed_at: datetime | None
|
|
50
47
|
|
|
51
48
|
@classmethod
|
|
52
|
-
def
|
|
49
|
+
def from_json(cls, json: dict) -> "PipelineJobRecord":
|
|
53
50
|
return cls(
|
|
54
|
-
id=
|
|
55
|
-
pipeline_id=
|
|
56
|
-
project_id=
|
|
57
|
-
name=
|
|
58
|
-
description=
|
|
59
|
-
status=
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
started_at=(
|
|
63
|
-
timestamp_to_datetime(proto.started_at) if proto.HasField("started_at") else None
|
|
64
|
-
),
|
|
51
|
+
id=json["id"],
|
|
52
|
+
pipeline_id=json["pipeline_id"],
|
|
53
|
+
project_id=json["project_id"],
|
|
54
|
+
name=json["name"],
|
|
55
|
+
description=json["description"],
|
|
56
|
+
status=json["status"],
|
|
57
|
+
created_at=datetime.fromisoformat(json["created_at"]),
|
|
58
|
+
updated_at=datetime.fromisoformat(json["updated_at"]),
|
|
59
|
+
started_at=datetime.fromisoformat(json["started_at"]) if json["started_at"] else None,
|
|
65
60
|
completed_at=(
|
|
66
|
-
|
|
67
|
-
if proto.HasField("completed_at")
|
|
68
|
-
else None
|
|
61
|
+
datetime.fromisoformat(json["completed_at"]) if json["completed_at"] else None
|
|
69
62
|
),
|
|
70
63
|
)
|
|
71
64
|
|
|
@@ -89,20 +82,21 @@ def create_pipeline(
|
|
|
89
82
|
definition_yaml = pipeline.to_yaml()
|
|
90
83
|
else:
|
|
91
84
|
definition_yaml = pipeline
|
|
92
|
-
|
|
93
|
-
name
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
85
|
+
body = {
|
|
86
|
+
"name": name,
|
|
87
|
+
"definition_yaml": definition_yaml,
|
|
88
|
+
"description": description,
|
|
89
|
+
}
|
|
90
|
+
res = get_default_client().http.post("/rest/v0/pipelines", body)
|
|
91
|
+
return PipelineRecord.from_json(res)
|
|
97
92
|
|
|
98
93
|
|
|
99
94
|
def list_pipelines() -> list[PipelineRecord]:
|
|
100
95
|
"""
|
|
101
96
|
List all pipelines.
|
|
102
97
|
"""
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
return [PipelineRecord.from_proto(p) for p in res.pipelines]
|
|
98
|
+
res = get_default_client().http.get("/rest/v0/pipelines")
|
|
99
|
+
return [PipelineRecord.from_json(p) for p in res]
|
|
106
100
|
|
|
107
101
|
|
|
108
102
|
def get_pipeline(id: str) -> PipelineRecord:
|
|
@@ -114,9 +108,8 @@ def get_pipeline(id: str) -> PipelineRecord:
|
|
|
114
108
|
id : str
|
|
115
109
|
ID of the pipeline to fetch.
|
|
116
110
|
"""
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
return PipelineRecord.from_proto(res.pipeline)
|
|
111
|
+
res = get_default_client().http.get(f"/rest/v0/pipelines/{id}")
|
|
112
|
+
return PipelineRecord.from_json(res)
|
|
120
113
|
|
|
121
114
|
|
|
122
115
|
def create_pipeline_job(
|
|
@@ -139,75 +132,29 @@ def create_pipeline_job(
|
|
|
139
132
|
Description of the pipeline job.
|
|
140
133
|
"""
|
|
141
134
|
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
pipelinespb.PipelineJobArgsColumn(
|
|
153
|
-
string_column=pipelinespb.PipelineJobArgsColumn.StringColumn(
|
|
154
|
-
name=param.name,
|
|
155
|
-
values=col_values[i],
|
|
156
|
-
)
|
|
157
|
-
)
|
|
158
|
-
)
|
|
159
|
-
elif param._represented_type() == int:
|
|
160
|
-
cols.append(
|
|
161
|
-
pipelinespb.PipelineJobArgsColumn(
|
|
162
|
-
int_column=pipelinespb.PipelineJobArgsColumn.IntColumn(
|
|
163
|
-
name=param.name,
|
|
164
|
-
values=col_values[i],
|
|
165
|
-
)
|
|
166
|
-
)
|
|
167
|
-
)
|
|
168
|
-
elif param._represented_type() == float:
|
|
169
|
-
cols.append(
|
|
170
|
-
pipelinespb.PipelineJobArgsColumn(
|
|
171
|
-
double_column=pipelinespb.PipelineJobArgsColumn.DoubleColumn(
|
|
172
|
-
name=param.name,
|
|
173
|
-
values=col_values[i],
|
|
174
|
-
)
|
|
175
|
-
)
|
|
176
|
-
)
|
|
177
|
-
elif param._represented_type() == bool:
|
|
178
|
-
cols.append(
|
|
179
|
-
pipelinespb.PipelineJobArgsColumn(
|
|
180
|
-
bool_column=pipelinespb.PipelineJobArgsColumn.BoolColumn(
|
|
181
|
-
name=param.name,
|
|
182
|
-
values=col_values[i],
|
|
183
|
-
)
|
|
184
|
-
)
|
|
185
|
-
)
|
|
186
|
-
|
|
187
|
-
req = pipelinespb.CreatePipelineJobRequest(
|
|
188
|
-
pipeline_id=pipeline_id,
|
|
189
|
-
args_columns=cols,
|
|
190
|
-
name=name,
|
|
191
|
-
description=description,
|
|
192
|
-
project_id=project_id,
|
|
193
|
-
)
|
|
194
|
-
res: pipelinespb.CreatePipelineJobResponse = get_default_client().CreatePipelineJob(req)
|
|
195
|
-
return PipelineJobRecord.from_proto(res.pipeline_job)
|
|
135
|
+
arg_rows = [row.row_values for row in args.rows]
|
|
136
|
+
body = {
|
|
137
|
+
"name": name,
|
|
138
|
+
"project_id": project_id,
|
|
139
|
+
"argument_names": [p.name for p in args.params],
|
|
140
|
+
"argument_rows": arg_rows,
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
res = get_default_client().http.post(f"/rest/v0/pipelines/{pipeline_id}/pipeline_jobs", body)
|
|
144
|
+
return PipelineJobRecord.from_json(res)
|
|
196
145
|
|
|
197
146
|
|
|
198
147
|
def get_pipeline_job(id: str) -> PipelineJobRecord:
|
|
199
148
|
"""
|
|
200
149
|
Get a pipeline job by ID.
|
|
201
150
|
"""
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
return PipelineJobRecord.from_proto(res.pipeline_job)
|
|
151
|
+
res = get_default_client().http.get(f"/rest/v0/pipeline_jobs/{id}")
|
|
152
|
+
return PipelineJobRecord.from_json(res)
|
|
205
153
|
|
|
206
154
|
|
|
207
155
|
def list_pipeline_jobs() -> list[PipelineJobRecord]:
|
|
208
156
|
"""
|
|
209
157
|
List all pipeline jobs.
|
|
210
158
|
"""
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
return [PipelineJobRecord.from_proto(p) for p in res.pipeline_jobs]
|
|
159
|
+
res = get_default_client().http.get("/rest/v0/pipeline_jobs")
|
|
160
|
+
return [PipelineJobRecord.from_json(p) for p in res]
|
luminarycloud/project.py
CHANGED
|
@@ -10,8 +10,6 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union, Literal
|
|
|
10
10
|
|
|
11
11
|
import concurrent
|
|
12
12
|
|
|
13
|
-
import grpc
|
|
14
|
-
|
|
15
13
|
import luminarycloud as lc
|
|
16
14
|
from luminarycloud._helpers.named_variables import _named_variables_to_proto
|
|
17
15
|
from luminarycloud.params.simulation.adjoint_ import Adjoint
|
|
@@ -258,19 +256,16 @@ class Project(ProtoWrapperBase):
|
|
|
258
256
|
)
|
|
259
257
|
return lc.Mesh(_mesh)
|
|
260
258
|
|
|
261
|
-
|
|
262
|
-
def create_mesh(
|
|
259
|
+
def create_or_get_mesh(
|
|
263
260
|
self,
|
|
264
261
|
params: MeshAdaptationParams | MeshGenerationParams,
|
|
265
262
|
*,
|
|
266
263
|
name: str,
|
|
264
|
+
request_id: Optional[str] = None,
|
|
267
265
|
) -> "Mesh":
|
|
268
266
|
"""
|
|
269
|
-
Create a new mesh in the project
|
|
270
|
-
|
|
271
|
-
.. deprecated:: 0.10.0
|
|
272
|
-
`create_mesh()` will be removed in v0.11.0, it is replaced by
|
|
273
|
-
`create_or_get_mesh()`.
|
|
267
|
+
Create a new mesh in the project, or return an existing mesh with the same request_id
|
|
268
|
+
if it already exists.
|
|
274
269
|
|
|
275
270
|
Parameters
|
|
276
271
|
----------
|
|
@@ -279,14 +274,24 @@ class Project(ProtoWrapperBase):
|
|
|
279
274
|
existing geometry, use MeshGenerationParams. If adapting a mesh from an existing,
|
|
280
275
|
solution use MeshAdaptationParams.
|
|
281
276
|
name : str
|
|
282
|
-
|
|
277
|
+
Mesh name. Max 256 characters.
|
|
278
|
+
request_id : str, optional
|
|
279
|
+
Can be useful as an idempotency key. If there's an existing Mesh with the given
|
|
280
|
+
request_id, that Mesh will be returned. If there's no existing Mesh with the given
|
|
281
|
+
request_id, then a Mesh will be created and associated with that request_id. If not
|
|
282
|
+
provided, a random request_id will be generated for the Mesh, effectively preventing it
|
|
283
|
+
from being retrieved by a future `create_or_get_mesh` request. Max 256 characters.
|
|
283
284
|
"""
|
|
284
285
|
|
|
286
|
+
if request_id is None:
|
|
287
|
+
request_id = str(uuid.uuid4())
|
|
288
|
+
|
|
285
289
|
client = get_default_client()
|
|
286
290
|
|
|
287
291
|
req = meshpb.CreateMeshRequest(
|
|
288
292
|
project_id=self.id,
|
|
289
293
|
name=name,
|
|
294
|
+
request_id=request_id,
|
|
290
295
|
)
|
|
291
296
|
|
|
292
297
|
if isinstance(params, meshpb.MeshGenerationParams):
|
|
@@ -314,39 +319,6 @@ class Project(ProtoWrapperBase):
|
|
|
314
319
|
res: meshpb.CreateMeshResponse = client.CreateMesh(req)
|
|
315
320
|
return lc.Mesh(res.mesh)
|
|
316
321
|
|
|
317
|
-
def create_or_get_mesh(
|
|
318
|
-
self,
|
|
319
|
-
params: MeshAdaptationParams | MeshGenerationParams,
|
|
320
|
-
*,
|
|
321
|
-
name: str,
|
|
322
|
-
) -> "Mesh":
|
|
323
|
-
"""
|
|
324
|
-
Create a new mesh in the project, or return an existing mesh with the same parameters
|
|
325
|
-
if it already exists.
|
|
326
|
-
|
|
327
|
-
Parameters
|
|
328
|
-
----------
|
|
329
|
-
params : MeshGenerationParams | MeshAdaptationParams
|
|
330
|
-
The parameters to use to create the mesh. If generating a new mesh from an
|
|
331
|
-
existing geometry, use MeshGenerationParams. If adapting a mesh from an existing,
|
|
332
|
-
solution use MeshAdaptationParams.
|
|
333
|
-
name : str
|
|
334
|
-
Mesh name. Max 256 characters.
|
|
335
|
-
"""
|
|
336
|
-
|
|
337
|
-
try:
|
|
338
|
-
return self.create_mesh(params, name=name)
|
|
339
|
-
except grpc.RpcError as e:
|
|
340
|
-
if e.code() == grpc.StatusCode.ALREADY_EXISTS:
|
|
341
|
-
message = e.details()
|
|
342
|
-
match = re.search(r"mesh-[a-f0-9-]+$", message)
|
|
343
|
-
if match:
|
|
344
|
-
existing_mesh_id = match.group(0)
|
|
345
|
-
req = meshpb.GetMeshRequest(id=existing_mesh_id)
|
|
346
|
-
res = get_default_client().GetMesh(req)
|
|
347
|
-
return lc.Mesh(res.mesh)
|
|
348
|
-
raise
|
|
349
|
-
|
|
350
322
|
def _create_hex_mesh(
|
|
351
323
|
self,
|
|
352
324
|
names_to_file_paths: Dict[str, Union[PathLike[Any], str]],
|