luminarycloud 0.22.0__py3-none-any.whl → 0.22.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. luminarycloud/_client/authentication_plugin.py +49 -0
  2. luminarycloud/_client/client.py +33 -8
  3. luminarycloud/_client/http_client.py +1 -1
  4. luminarycloud/_client/retry_interceptor.py +64 -2
  5. luminarycloud/_helpers/download.py +11 -0
  6. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2.py +132 -132
  7. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2.pyi +36 -8
  8. luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2.py +53 -23
  9. luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2.pyi +54 -1
  10. luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2.py +195 -0
  11. luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2.pyi +361 -0
  12. luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2_grpc.py +172 -0
  13. luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2_grpc.pyi +66 -0
  14. luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2.py +88 -65
  15. luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2.pyi +42 -0
  16. luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2_grpc.py +34 -0
  17. luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2_grpc.pyi +12 -0
  18. luminarycloud/_proto/base/base_pb2.py +7 -6
  19. luminarycloud/_proto/base/base_pb2.pyi +4 -0
  20. luminarycloud/_proto/client/simulation_pb2.py +3 -3
  21. luminarycloud/_proto/physicsaiinferenceservice/physicsaiinferenceservice_pb2.py +30 -0
  22. luminarycloud/_proto/physicsaiinferenceservice/physicsaiinferenceservice_pb2.pyi +7 -0
  23. luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2.py +2 -2
  24. luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2_grpc.py +34 -0
  25. luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2_grpc.pyi +12 -0
  26. luminarycloud/enum/vis_enums.py +6 -0
  27. luminarycloud/geometry.py +4 -0
  28. luminarycloud/geometry_version.py +4 -0
  29. luminarycloud/mesh.py +4 -0
  30. luminarycloud/meshing/mesh_generation_params.py +5 -6
  31. luminarycloud/meshing/sizing_strategy/sizing_strategies.py +1 -2
  32. luminarycloud/physics_ai/solution.py +4 -0
  33. luminarycloud/pipelines/api.py +99 -8
  34. luminarycloud/pipelines/core.py +1 -1
  35. luminarycloud/pipelines/stages.py +22 -9
  36. luminarycloud/project.py +5 -6
  37. luminarycloud/types/vector3.py +1 -2
  38. luminarycloud/vis/data_extraction.py +7 -7
  39. luminarycloud/vis/interactive_report.py +163 -7
  40. luminarycloud/vis/report.py +113 -1
  41. luminarycloud/volume_selection.py +10 -2
  42. {luminarycloud-0.22.0.dist-info → luminarycloud-0.22.1.dist-info}/METADATA +1 -1
  43. {luminarycloud-0.22.0.dist-info → luminarycloud-0.22.1.dist-info}/RECORD +44 -39
  44. {luminarycloud-0.22.0.dist-info → luminarycloud-0.22.1.dist-info}/WHEEL +1 -1
  45. luminarycloud/pipeline_util/dictable.py +0 -27
@@ -0,0 +1,361 @@
1
+ """
2
+ @generated by mypy-protobuf. Do not edit manually!
3
+ isort:skip_file
4
+ """
5
+ import builtins
6
+ import collections.abc
7
+ import google.protobuf.descriptor
8
+ import google.protobuf.internal.containers
9
+ import google.protobuf.internal.enum_type_wrapper
10
+ import google.protobuf.message
11
+ import luminarycloud._proto.base.base_pb2
12
+ import sys
13
+ import typing
14
+
15
+ if sys.version_info >= (3, 10):
16
+ import typing as typing_extensions
17
+ else:
18
+ import typing_extensions
19
+
20
+ DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
21
+
22
+ class _VisualizationOutput:
23
+ ValueType = typing.NewType("ValueType", builtins.int)
24
+ V: typing_extensions.TypeAlias = ValueType
25
+
26
+ class _VisualizationOutputEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_VisualizationOutput.ValueType], builtins.type): # noqa: F821
27
+ DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
28
+ INVALID: _VisualizationOutput.ValueType # 0
29
+ LUMINARY: _VisualizationOutput.ValueType # 1
30
+ VTK: _VisualizationOutput.ValueType # 2
31
+
32
+ class VisualizationOutput(_VisualizationOutput, metaclass=_VisualizationOutputEnumTypeWrapper):
33
+ """Visualization output types."""
34
+
35
+ INVALID: VisualizationOutput.ValueType # 0
36
+ LUMINARY: VisualizationOutput.ValueType # 1
37
+ VTK: VisualizationOutput.ValueType # 2
38
+ global___VisualizationOutput = VisualizationOutput
39
+
40
+ class CreateInferenceServiceJobRequest(google.protobuf.message.Message):
41
+ """Request message for creating an inference service job."""
42
+
43
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
44
+
45
+ REQUEST_ID_FIELD_NUMBER: builtins.int
46
+ PROJECT_ID_FIELD_NUMBER: builtins.int
47
+ MODEL_VERSION_ID_FIELD_NUMBER: builtins.int
48
+ GEOMETRY_FIELD_NUMBER: builtins.int
49
+ SURFACES_FIELD_NUMBER: builtins.int
50
+ INFERENCE_FIELDS_FIELD_NUMBER: builtins.int
51
+ SETTINGS_FIELD_NUMBER: builtins.int
52
+ PER_SURFACE_VISUALIZATIONS_FIELD_NUMBER: builtins.int
53
+ MERGED_VISUALIZATIONS_FIELD_NUMBER: builtins.int
54
+ CONDITIONS_FIELD_NUMBER: builtins.int
55
+ request_id: builtins.str
56
+ """Optional. Request ID used to deduplicate creation requests. If not provided, a new ID will be generated."""
57
+ project_id: builtins.str
58
+ """Required. Project ID where the inference job will be created."""
59
+ model_version_id: builtins.str
60
+ """Required. ID of the trained model version to use for inference."""
61
+ geometry: builtins.str
62
+ """Required. STL URL we will perform inference on."""
63
+ @property
64
+ def surfaces(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___SurfaceForInference]:
65
+ """STL defining the surfaces where inference will be performed"""
66
+ @property
67
+ def inference_fields(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
68
+ """Optional. Specific fields within the trained model to return inference results for."""
69
+ settings: builtins.bytes
70
+ """Optional. JSON encoded settings for DoMINO (e.g., stencil_size)."""
71
+ @property
72
+ def per_surface_visualizations(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___VisualizationOutput.ValueType]:
73
+ """Types of visualization to write for each surface"""
74
+ @property
75
+ def merged_visualizations(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___VisualizationOutput.ValueType]:
76
+ """Types of merged visualization to write"""
77
+ conditions: builtins.bytes
78
+ """Optional. JSON encoded conditions to apply to the model (e.g., alpha, mach number, etc.)."""
79
+ def __init__(
80
+ self,
81
+ *,
82
+ request_id: builtins.str = ...,
83
+ project_id: builtins.str = ...,
84
+ model_version_id: builtins.str = ...,
85
+ geometry: builtins.str = ...,
86
+ surfaces: collections.abc.Iterable[global___SurfaceForInference] | None = ...,
87
+ inference_fields: collections.abc.Iterable[builtins.str] | None = ...,
88
+ settings: builtins.bytes | None = ...,
89
+ per_surface_visualizations: collections.abc.Iterable[global___VisualizationOutput.ValueType] | None = ...,
90
+ merged_visualizations: collections.abc.Iterable[global___VisualizationOutput.ValueType] | None = ...,
91
+ conditions: builtins.bytes | None = ...,
92
+ ) -> None: ...
93
+ def HasField(self, field_name: typing_extensions.Literal["_conditions", b"_conditions", "_settings", b"_settings", "conditions", b"conditions", "settings", b"settings"]) -> builtins.bool: ...
94
+ def ClearField(self, field_name: typing_extensions.Literal["_conditions", b"_conditions", "_settings", b"_settings", "conditions", b"conditions", "geometry", b"geometry", "inference_fields", b"inference_fields", "merged_visualizations", b"merged_visualizations", "model_version_id", b"model_version_id", "per_surface_visualizations", b"per_surface_visualizations", "project_id", b"project_id", "request_id", b"request_id", "settings", b"settings", "surfaces", b"surfaces"]) -> None: ...
95
+ @typing.overload
96
+ def WhichOneof(self, oneof_group: typing_extensions.Literal["_conditions", b"_conditions"]) -> typing_extensions.Literal["conditions"] | None: ...
97
+ @typing.overload
98
+ def WhichOneof(self, oneof_group: typing_extensions.Literal["_settings", b"_settings"]) -> typing_extensions.Literal["settings"] | None: ...
99
+
100
+ global___CreateInferenceServiceJobRequest = CreateInferenceServiceJobRequest
101
+
102
+ class SurfaceForInference(google.protobuf.message.Message):
103
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
104
+
105
+ NAME_FIELD_NUMBER: builtins.int
106
+ URL_FIELD_NUMBER: builtins.int
107
+ name: builtins.str
108
+ """Name of the surface"""
109
+ url: builtins.str
110
+ """STL defining the surface"""
111
+ def __init__(
112
+ self,
113
+ *,
114
+ name: builtins.str = ...,
115
+ url: builtins.str = ...,
116
+ ) -> None: ...
117
+ def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "url", b"url"]) -> None: ...
118
+
119
+ global___SurfaceForInference = SurfaceForInference
120
+
121
+ class DoubleVector(google.protobuf.message.Message):
122
+ """Double vector message for inference results."""
123
+
124
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
125
+
126
+ VALUES_FIELD_NUMBER: builtins.int
127
+ @property
128
+ def values(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: ...
129
+ def __init__(
130
+ self,
131
+ *,
132
+ values: collections.abc.Iterable[builtins.float] | None = ...,
133
+ ) -> None: ...
134
+ def ClearField(self, field_name: typing_extensions.Literal["values", b"values"]) -> None: ...
135
+
136
+ global___DoubleVector = DoubleVector
137
+
138
+ class VisualizationExport(google.protobuf.message.Message):
139
+ """Visualization export information."""
140
+
141
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
142
+
143
+ TYPE_FIELD_NUMBER: builtins.int
144
+ URL_FIELD_NUMBER: builtins.int
145
+ type: global___VisualizationOutput.ValueType
146
+ url: builtins.str
147
+ def __init__(
148
+ self,
149
+ *,
150
+ type: global___VisualizationOutput.ValueType = ...,
151
+ url: builtins.str = ...,
152
+ ) -> None: ...
153
+ def ClearField(self, field_name: typing_extensions.Literal["type", b"type", "url", b"url"]) -> None: ...
154
+
155
+ global___VisualizationExport = VisualizationExport
156
+
157
+ class NumericResult(google.protobuf.message.Message):
158
+ """Inference result value."""
159
+
160
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
161
+
162
+ SCALAR_FIELD_NUMBER: builtins.int
163
+ VECTOR_FIELD_NUMBER: builtins.int
164
+ scalar: builtins.float
165
+ @property
166
+ def vector(self) -> global___DoubleVector: ...
167
+ def __init__(
168
+ self,
169
+ *,
170
+ scalar: builtins.float = ...,
171
+ vector: global___DoubleVector | None = ...,
172
+ ) -> None: ...
173
+ def HasField(self, field_name: typing_extensions.Literal["scalar", b"scalar", "value", b"value", "vector", b"vector"]) -> builtins.bool: ...
174
+ def ClearField(self, field_name: typing_extensions.Literal["scalar", b"scalar", "value", b"value", "vector", b"vector"]) -> None: ...
175
+ def WhichOneof(self, oneof_group: typing_extensions.Literal["value", b"value"]) -> typing_extensions.Literal["scalar", "vector"] | None: ...
176
+
177
+ global___NumericResult = NumericResult
178
+
179
+ class InferenceResult(google.protobuf.message.Message):
180
+ """An inference service job."""
181
+
182
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
183
+
184
+ class NumberOutputsEntry(google.protobuf.message.Message):
185
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
186
+
187
+ KEY_FIELD_NUMBER: builtins.int
188
+ VALUE_FIELD_NUMBER: builtins.int
189
+ key: builtins.str
190
+ @property
191
+ def value(self) -> global___NumericResult: ...
192
+ def __init__(
193
+ self,
194
+ *,
195
+ key: builtins.str = ...,
196
+ value: global___NumericResult | None = ...,
197
+ ) -> None: ...
198
+ def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
199
+ def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
200
+
201
+ class SurfaceResultsEntry(google.protobuf.message.Message):
202
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
203
+
204
+ KEY_FIELD_NUMBER: builtins.int
205
+ VALUE_FIELD_NUMBER: builtins.int
206
+ key: builtins.str
207
+ value: builtins.str
208
+ def __init__(
209
+ self,
210
+ *,
211
+ key: builtins.str = ...,
212
+ value: builtins.str = ...,
213
+ ) -> None: ...
214
+ def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
215
+
216
+ class VolumeResultsEntry(google.protobuf.message.Message):
217
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
218
+
219
+ KEY_FIELD_NUMBER: builtins.int
220
+ VALUE_FIELD_NUMBER: builtins.int
221
+ key: builtins.str
222
+ value: builtins.str
223
+ def __init__(
224
+ self,
225
+ *,
226
+ key: builtins.str = ...,
227
+ value: builtins.str = ...,
228
+ ) -> None: ...
229
+ def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
230
+
231
+ NAME_FIELD_NUMBER: builtins.int
232
+ NUMBER_OUTPUTS_FIELD_NUMBER: builtins.int
233
+ SURFACE_RESULTS_FIELD_NUMBER: builtins.int
234
+ VOLUME_RESULTS_FIELD_NUMBER: builtins.int
235
+ VISUALIZATIONS_FIELD_NUMBER: builtins.int
236
+ name: builtins.str
237
+ """Name of surface"""
238
+ @property
239
+ def number_outputs(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___NumericResult]:
240
+ """Map of number outputs (e.g., lift and drag forces)."""
241
+ @property
242
+ def surface_results(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]:
243
+ """Map of surface results (e.g., URL to data in GCS for surface pressure)."""
244
+ @property
245
+ def volume_results(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]:
246
+ """Map of volume results (e.g., URL to data in GCS for pressure, velocity)."""
247
+ @property
248
+ def visualizations(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___VisualizationExport]:
249
+ """List of visualization exports."""
250
+ def __init__(
251
+ self,
252
+ *,
253
+ name: builtins.str = ...,
254
+ number_outputs: collections.abc.Mapping[builtins.str, global___NumericResult] | None = ...,
255
+ surface_results: collections.abc.Mapping[builtins.str, builtins.str] | None = ...,
256
+ volume_results: collections.abc.Mapping[builtins.str, builtins.str] | None = ...,
257
+ visualizations: collections.abc.Iterable[global___VisualizationExport] | None = ...,
258
+ ) -> None: ...
259
+ def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "number_outputs", b"number_outputs", "surface_results", b"surface_results", "visualizations", b"visualizations", "volume_results", b"volume_results"]) -> None: ...
260
+
261
+ global___InferenceResult = InferenceResult
262
+
263
+ class InferenceServiceJob(google.protobuf.message.Message):
264
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
265
+
266
+ JOB_ID_FIELD_NUMBER: builtins.int
267
+ RESULTS_FIELD_NUMBER: builtins.int
268
+ MERGED_VISUALIZATIONS_FIELD_NUMBER: builtins.int
269
+ STATUS_FIELD_NUMBER: builtins.int
270
+ job_id: builtins.str
271
+ """Unique identifier for the job."""
272
+ @property
273
+ def results(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___InferenceResult]: ...
274
+ @property
275
+ def merged_visualizations(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___VisualizationExport]: ...
276
+ @property
277
+ def status(self) -> luminarycloud._proto.base.base_pb2.JobStatus:
278
+ """Current status of the job (e.g., Active, Completed, Failed)."""
279
+ def __init__(
280
+ self,
281
+ *,
282
+ job_id: builtins.str = ...,
283
+ results: collections.abc.Iterable[global___InferenceResult] | None = ...,
284
+ merged_visualizations: collections.abc.Iterable[global___VisualizationExport] | None = ...,
285
+ status: luminarycloud._proto.base.base_pb2.JobStatus | None = ...,
286
+ ) -> None: ...
287
+ def HasField(self, field_name: typing_extensions.Literal["status", b"status"]) -> builtins.bool: ...
288
+ def ClearField(self, field_name: typing_extensions.Literal["job_id", b"job_id", "merged_visualizations", b"merged_visualizations", "results", b"results", "status", b"status"]) -> None: ...
289
+
290
+ global___InferenceServiceJob = InferenceServiceJob
291
+
292
+ class GetInferenceServiceJobRequest(google.protobuf.message.Message):
293
+ """Request message for getting an inference service job."""
294
+
295
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
296
+
297
+ JOB_ID_FIELD_NUMBER: builtins.int
298
+ job_id: builtins.str
299
+ """Required. Inference job ID."""
300
+ def __init__(
301
+ self,
302
+ *,
303
+ job_id: builtins.str = ...,
304
+ ) -> None: ...
305
+ def ClearField(self, field_name: typing_extensions.Literal["job_id", b"job_id"]) -> None: ...
306
+
307
+ global___GetInferenceServiceJobRequest = GetInferenceServiceJobRequest
308
+
309
+ class GetInferenceServiceJobResponse(google.protobuf.message.Message):
310
+ """Response message for getting an inference service job."""
311
+
312
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
313
+
314
+ JOB_FIELD_NUMBER: builtins.int
315
+ @property
316
+ def job(self) -> global___InferenceServiceJob:
317
+ """The inference service job."""
318
+ def __init__(
319
+ self,
320
+ *,
321
+ job: global___InferenceServiceJob | None = ...,
322
+ ) -> None: ...
323
+ def HasField(self, field_name: typing_extensions.Literal["job", b"job"]) -> builtins.bool: ...
324
+ def ClearField(self, field_name: typing_extensions.Literal["job", b"job"]) -> None: ...
325
+
326
+ global___GetInferenceServiceJobResponse = GetInferenceServiceJobResponse
327
+
328
+ class ListInferenceServiceJobsRequest(google.protobuf.message.Message):
329
+ """Request message for listing inference service jobs."""
330
+
331
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
332
+
333
+ PROJECT_ID_FIELD_NUMBER: builtins.int
334
+ project_id: builtins.str
335
+ """Required. Project ID to list inference jobs for."""
336
+ def __init__(
337
+ self,
338
+ *,
339
+ project_id: builtins.str = ...,
340
+ ) -> None: ...
341
+ def ClearField(self, field_name: typing_extensions.Literal["project_id", b"project_id"]) -> None: ...
342
+
343
+ global___ListInferenceServiceJobsRequest = ListInferenceServiceJobsRequest
344
+
345
+ class ListInferenceServiceJobsResponse(google.protobuf.message.Message):
346
+ """Response message for listing inference service jobs."""
347
+
348
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
349
+
350
+ JOBS_FIELD_NUMBER: builtins.int
351
+ @property
352
+ def jobs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___InferenceServiceJob]:
353
+ """List of inference service jobs."""
354
+ def __init__(
355
+ self,
356
+ *,
357
+ jobs: collections.abc.Iterable[global___InferenceServiceJob] | None = ...,
358
+ ) -> None: ...
359
+ def ClearField(self, field_name: typing_extensions.Literal["jobs", b"jobs"]) -> None: ...
360
+
361
+ global___ListInferenceServiceJobsResponse = ListInferenceServiceJobsResponse
@@ -0,0 +1,172 @@
1
+ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
2
+ """Client and server classes corresponding to protobuf-defined services."""
3
+ import grpc
4
+
5
+ from luminarycloud._proto.api.v0.luminarycloud.physicsaiinference import physicsaiinference_pb2 as proto_dot_api_dot_v0_dot_luminarycloud_dot_physicsaiinference_dot_physicsaiinference__pb2
6
+
7
+
8
+ class PhysicsAiInferenceServiceStub(object):
9
+ """Manages physics AI inference jobs.
10
+ """
11
+
12
+ def __init__(self, channel):
13
+ """Constructor.
14
+
15
+ Args:
16
+ channel: A grpc.Channel.
17
+ """
18
+ self.CreateInferenceServiceJob = channel.unary_unary(
19
+ '/luminary.proto.api.v0.luminarycloud.physicsaiinference.PhysicsAiInferenceService/CreateInferenceServiceJob',
20
+ request_serializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physicsaiinference_dot_physicsaiinference__pb2.CreateInferenceServiceJobRequest.SerializeToString,
21
+ response_deserializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physicsaiinference_dot_physicsaiinference__pb2.GetInferenceServiceJobResponse.FromString,
22
+ )
23
+ self.CreateInferenceServiceJobAsync = channel.unary_unary(
24
+ '/luminary.proto.api.v0.luminarycloud.physicsaiinference.PhysicsAiInferenceService/CreateInferenceServiceJobAsync',
25
+ request_serializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physicsaiinference_dot_physicsaiinference__pb2.CreateInferenceServiceJobRequest.SerializeToString,
26
+ response_deserializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physicsaiinference_dot_physicsaiinference__pb2.GetInferenceServiceJobResponse.FromString,
27
+ )
28
+ self.GetInferenceServiceJob = channel.unary_unary(
29
+ '/luminary.proto.api.v0.luminarycloud.physicsaiinference.PhysicsAiInferenceService/GetInferenceServiceJob',
30
+ request_serializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physicsaiinference_dot_physicsaiinference__pb2.GetInferenceServiceJobRequest.SerializeToString,
31
+ response_deserializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physicsaiinference_dot_physicsaiinference__pb2.GetInferenceServiceJobResponse.FromString,
32
+ )
33
+ self.ListInferenceServiceJobs = channel.unary_unary(
34
+ '/luminary.proto.api.v0.luminarycloud.physicsaiinference.PhysicsAiInferenceService/ListInferenceServiceJobs',
35
+ request_serializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physicsaiinference_dot_physicsaiinference__pb2.ListInferenceServiceJobsRequest.SerializeToString,
36
+ response_deserializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physicsaiinference_dot_physicsaiinference__pb2.ListInferenceServiceJobsResponse.FromString,
37
+ )
38
+
39
+
40
+ class PhysicsAiInferenceServiceServicer(object):
41
+ """Manages physics AI inference jobs.
42
+ """
43
+
44
+ def CreateInferenceServiceJob(self, request, context):
45
+ """Creates an inference service job synchronously, waiting for completion.
46
+ """
47
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
48
+ context.set_details('Method not implemented!')
49
+ raise NotImplementedError('Method not implemented!')
50
+
51
+ def CreateInferenceServiceJobAsync(self, request, context):
52
+ """Creates an inference service job asynchronously without waiting for completion.
53
+ """
54
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
55
+ context.set_details('Method not implemented!')
56
+ raise NotImplementedError('Method not implemented!')
57
+
58
+ def GetInferenceServiceJob(self, request, context):
59
+ """Retrieves an inference service job by its ID.
60
+ """
61
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
62
+ context.set_details('Method not implemented!')
63
+ raise NotImplementedError('Method not implemented!')
64
+
65
+ def ListInferenceServiceJobs(self, request, context):
66
+ """Lists all inference service jobs for a given project.
67
+ """
68
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
69
+ context.set_details('Method not implemented!')
70
+ raise NotImplementedError('Method not implemented!')
71
+
72
+
73
+ def add_PhysicsAiInferenceServiceServicer_to_server(servicer, server):
74
+ rpc_method_handlers = {
75
+ 'CreateInferenceServiceJob': grpc.unary_unary_rpc_method_handler(
76
+ servicer.CreateInferenceServiceJob,
77
+ request_deserializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physicsaiinference_dot_physicsaiinference__pb2.CreateInferenceServiceJobRequest.FromString,
78
+ response_serializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physicsaiinference_dot_physicsaiinference__pb2.GetInferenceServiceJobResponse.SerializeToString,
79
+ ),
80
+ 'CreateInferenceServiceJobAsync': grpc.unary_unary_rpc_method_handler(
81
+ servicer.CreateInferenceServiceJobAsync,
82
+ request_deserializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physicsaiinference_dot_physicsaiinference__pb2.CreateInferenceServiceJobRequest.FromString,
83
+ response_serializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physicsaiinference_dot_physicsaiinference__pb2.GetInferenceServiceJobResponse.SerializeToString,
84
+ ),
85
+ 'GetInferenceServiceJob': grpc.unary_unary_rpc_method_handler(
86
+ servicer.GetInferenceServiceJob,
87
+ request_deserializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physicsaiinference_dot_physicsaiinference__pb2.GetInferenceServiceJobRequest.FromString,
88
+ response_serializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physicsaiinference_dot_physicsaiinference__pb2.GetInferenceServiceJobResponse.SerializeToString,
89
+ ),
90
+ 'ListInferenceServiceJobs': grpc.unary_unary_rpc_method_handler(
91
+ servicer.ListInferenceServiceJobs,
92
+ request_deserializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physicsaiinference_dot_physicsaiinference__pb2.ListInferenceServiceJobsRequest.FromString,
93
+ response_serializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_physicsaiinference_dot_physicsaiinference__pb2.ListInferenceServiceJobsResponse.SerializeToString,
94
+ ),
95
+ }
96
+ generic_handler = grpc.method_handlers_generic_handler(
97
+ 'luminary.proto.api.v0.luminarycloud.physicsaiinference.PhysicsAiInferenceService', rpc_method_handlers)
98
+ server.add_generic_rpc_handlers((generic_handler,))
99
+
100
+
101
+ # This class is part of an EXPERIMENTAL API.
102
+ class PhysicsAiInferenceService(object):
103
+ """Manages physics AI inference jobs.
104
+ """
105
+
106
+ @staticmethod
107
+ def CreateInferenceServiceJob(request,
108
+ target,
109
+ options=(),
110
+ channel_credentials=None,
111
+ call_credentials=None,
112
+ insecure=False,
113
+ compression=None,
114
+ wait_for_ready=None,
115
+ timeout=None,
116
+ metadata=None):
117
+ return grpc.experimental.unary_unary(request, target, '/luminary.proto.api.v0.luminarycloud.physicsaiinference.PhysicsAiInferenceService/CreateInferenceServiceJob',
118
+ proto_dot_api_dot_v0_dot_luminarycloud_dot_physicsaiinference_dot_physicsaiinference__pb2.CreateInferenceServiceJobRequest.SerializeToString,
119
+ proto_dot_api_dot_v0_dot_luminarycloud_dot_physicsaiinference_dot_physicsaiinference__pb2.GetInferenceServiceJobResponse.FromString,
120
+ options, channel_credentials,
121
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
122
+
123
+ @staticmethod
124
+ def CreateInferenceServiceJobAsync(request,
125
+ target,
126
+ options=(),
127
+ channel_credentials=None,
128
+ call_credentials=None,
129
+ insecure=False,
130
+ compression=None,
131
+ wait_for_ready=None,
132
+ timeout=None,
133
+ metadata=None):
134
+ return grpc.experimental.unary_unary(request, target, '/luminary.proto.api.v0.luminarycloud.physicsaiinference.PhysicsAiInferenceService/CreateInferenceServiceJobAsync',
135
+ proto_dot_api_dot_v0_dot_luminarycloud_dot_physicsaiinference_dot_physicsaiinference__pb2.CreateInferenceServiceJobRequest.SerializeToString,
136
+ proto_dot_api_dot_v0_dot_luminarycloud_dot_physicsaiinference_dot_physicsaiinference__pb2.GetInferenceServiceJobResponse.FromString,
137
+ options, channel_credentials,
138
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
139
+
140
+ @staticmethod
141
+ def GetInferenceServiceJob(request,
142
+ target,
143
+ options=(),
144
+ channel_credentials=None,
145
+ call_credentials=None,
146
+ insecure=False,
147
+ compression=None,
148
+ wait_for_ready=None,
149
+ timeout=None,
150
+ metadata=None):
151
+ return grpc.experimental.unary_unary(request, target, '/luminary.proto.api.v0.luminarycloud.physicsaiinference.PhysicsAiInferenceService/GetInferenceServiceJob',
152
+ proto_dot_api_dot_v0_dot_luminarycloud_dot_physicsaiinference_dot_physicsaiinference__pb2.GetInferenceServiceJobRequest.SerializeToString,
153
+ proto_dot_api_dot_v0_dot_luminarycloud_dot_physicsaiinference_dot_physicsaiinference__pb2.GetInferenceServiceJobResponse.FromString,
154
+ options, channel_credentials,
155
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
156
+
157
+ @staticmethod
158
+ def ListInferenceServiceJobs(request,
159
+ target,
160
+ options=(),
161
+ channel_credentials=None,
162
+ call_credentials=None,
163
+ insecure=False,
164
+ compression=None,
165
+ wait_for_ready=None,
166
+ timeout=None,
167
+ metadata=None):
168
+ return grpc.experimental.unary_unary(request, target, '/luminary.proto.api.v0.luminarycloud.physicsaiinference.PhysicsAiInferenceService/ListInferenceServiceJobs',
169
+ proto_dot_api_dot_v0_dot_luminarycloud_dot_physicsaiinference_dot_physicsaiinference__pb2.ListInferenceServiceJobsRequest.SerializeToString,
170
+ proto_dot_api_dot_v0_dot_luminarycloud_dot_physicsaiinference_dot_physicsaiinference__pb2.ListInferenceServiceJobsResponse.FromString,
171
+ options, channel_credentials,
172
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@@ -0,0 +1,66 @@
1
+ """
2
+ @generated by mypy-protobuf. Do not edit manually!
3
+ isort:skip_file
4
+ """
5
+ import abc
6
+ import grpc
7
+ import luminarycloud._proto.api.v0.luminarycloud.physicsaiinference.physicsaiinference_pb2
8
+
9
+ class PhysicsAiInferenceServiceStub:
10
+ """Manages physics AI inference jobs."""
11
+
12
+ def __init__(self, channel: grpc.Channel) -> None: ...
13
+ CreateInferenceServiceJob: grpc.UnaryUnaryMultiCallable[
14
+ luminarycloud._proto.api.v0.luminarycloud.physicsaiinference.physicsaiinference_pb2.CreateInferenceServiceJobRequest,
15
+ luminarycloud._proto.api.v0.luminarycloud.physicsaiinference.physicsaiinference_pb2.GetInferenceServiceJobResponse,
16
+ ]
17
+ """Creates an inference service job synchronously, waiting for completion."""
18
+ CreateInferenceServiceJobAsync: grpc.UnaryUnaryMultiCallable[
19
+ luminarycloud._proto.api.v0.luminarycloud.physicsaiinference.physicsaiinference_pb2.CreateInferenceServiceJobRequest,
20
+ luminarycloud._proto.api.v0.luminarycloud.physicsaiinference.physicsaiinference_pb2.GetInferenceServiceJobResponse,
21
+ ]
22
+ """Creates an inference service job asynchronously without waiting for completion."""
23
+ GetInferenceServiceJob: grpc.UnaryUnaryMultiCallable[
24
+ luminarycloud._proto.api.v0.luminarycloud.physicsaiinference.physicsaiinference_pb2.GetInferenceServiceJobRequest,
25
+ luminarycloud._proto.api.v0.luminarycloud.physicsaiinference.physicsaiinference_pb2.GetInferenceServiceJobResponse,
26
+ ]
27
+ """Retrieves an inference service job by its ID."""
28
+ ListInferenceServiceJobs: grpc.UnaryUnaryMultiCallable[
29
+ luminarycloud._proto.api.v0.luminarycloud.physicsaiinference.physicsaiinference_pb2.ListInferenceServiceJobsRequest,
30
+ luminarycloud._proto.api.v0.luminarycloud.physicsaiinference.physicsaiinference_pb2.ListInferenceServiceJobsResponse,
31
+ ]
32
+ """Lists all inference service jobs for a given project."""
33
+
34
+ class PhysicsAiInferenceServiceServicer(metaclass=abc.ABCMeta):
35
+ """Manages physics AI inference jobs."""
36
+
37
+ @abc.abstractmethod
38
+ def CreateInferenceServiceJob(
39
+ self,
40
+ request: luminarycloud._proto.api.v0.luminarycloud.physicsaiinference.physicsaiinference_pb2.CreateInferenceServiceJobRequest,
41
+ context: grpc.ServicerContext,
42
+ ) -> luminarycloud._proto.api.v0.luminarycloud.physicsaiinference.physicsaiinference_pb2.GetInferenceServiceJobResponse:
43
+ """Creates an inference service job synchronously, waiting for completion."""
44
+ @abc.abstractmethod
45
+ def CreateInferenceServiceJobAsync(
46
+ self,
47
+ request: luminarycloud._proto.api.v0.luminarycloud.physicsaiinference.physicsaiinference_pb2.CreateInferenceServiceJobRequest,
48
+ context: grpc.ServicerContext,
49
+ ) -> luminarycloud._proto.api.v0.luminarycloud.physicsaiinference.physicsaiinference_pb2.GetInferenceServiceJobResponse:
50
+ """Creates an inference service job asynchronously without waiting for completion."""
51
+ @abc.abstractmethod
52
+ def GetInferenceServiceJob(
53
+ self,
54
+ request: luminarycloud._proto.api.v0.luminarycloud.physicsaiinference.physicsaiinference_pb2.GetInferenceServiceJobRequest,
55
+ context: grpc.ServicerContext,
56
+ ) -> luminarycloud._proto.api.v0.luminarycloud.physicsaiinference.physicsaiinference_pb2.GetInferenceServiceJobResponse:
57
+ """Retrieves an inference service job by its ID."""
58
+ @abc.abstractmethod
59
+ def ListInferenceServiceJobs(
60
+ self,
61
+ request: luminarycloud._proto.api.v0.luminarycloud.physicsaiinference.physicsaiinference_pb2.ListInferenceServiceJobsRequest,
62
+ context: grpc.ServicerContext,
63
+ ) -> luminarycloud._proto.api.v0.luminarycloud.physicsaiinference.physicsaiinference_pb2.ListInferenceServiceJobsResponse:
64
+ """Lists all inference service jobs for a given project."""
65
+
66
+ def add_PhysicsAiInferenceServiceServicer_to_server(servicer: PhysicsAiInferenceServiceServicer, server: grpc.Server) -> None: ...