luminarycloud 0.22.1__py3-none-any.whl → 0.22.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- luminarycloud/_client/client.py +5 -3
- luminarycloud/_helpers/__init__.py +9 -0
- luminarycloud/_helpers/_inference_jobs.py +227 -0
- luminarycloud/_helpers/_parse_iso_datetime.py +54 -0
- luminarycloud/_helpers/proto_decorator.py +38 -7
- luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2.py +45 -25
- luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2.pyi +30 -0
- luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2_grpc.py +34 -0
- luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2_grpc.pyi +12 -0
- luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2.py +118 -45
- luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2.pyi +246 -2
- luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2_grpc.py +34 -0
- luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2_grpc.pyi +12 -0
- luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2.py +93 -33
- luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2.pyi +105 -0
- luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2_grpc.py +70 -0
- luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2_grpc.pyi +29 -0
- luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2.py +29 -7
- luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2.pyi +39 -0
- luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2_grpc.py +36 -0
- luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2_grpc.pyi +18 -0
- luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2.py +70 -70
- luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2.pyi +5 -5
- luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2.py +163 -153
- luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2.pyi +37 -3
- luminarycloud/_proto/client/simulation_pb2.py +356 -337
- luminarycloud/_proto/client/simulation_pb2.pyi +89 -3
- luminarycloud/_proto/physicsaiinferenceservice/physicsaiinferenceservice_pb2.py +9 -4
- luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2.py +6 -3
- luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2_grpc.py +34 -0
- luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2_grpc.pyi +12 -0
- luminarycloud/_wrapper.py +53 -7
- luminarycloud/feature_modification.py +25 -32
- luminarycloud/geometry.py +6 -6
- luminarycloud/outputs/__init__.py +2 -0
- luminarycloud/outputs/output_definitions.py +3 -3
- luminarycloud/outputs/stopping_conditions.py +94 -0
- luminarycloud/params/enum/_enum_wrappers.py +16 -0
- luminarycloud/params/geometry/shapes.py +33 -33
- luminarycloud/params/simulation/adaptive_mesh_refinement/__init__.py +1 -0
- luminarycloud/params/simulation/adaptive_mesh_refinement/active_region_.py +83 -0
- luminarycloud/params/simulation/adaptive_mesh_refinement/boundary_layer_profile_.py +1 -1
- luminarycloud/params/simulation/adaptive_mesh_refinement_.py +8 -1
- luminarycloud/physics_ai/__init__.py +7 -0
- luminarycloud/physics_ai/inference.py +166 -199
- luminarycloud/physics_ai/models.py +22 -0
- luminarycloud/pipelines/api.py +45 -9
- luminarycloud/project.py +56 -2
- luminarycloud/simulation.py +25 -0
- luminarycloud/types/__init__.py +2 -0
- luminarycloud/types/ids.py +2 -0
- luminarycloud/vis/__init__.py +1 -0
- luminarycloud/vis/filters.py +97 -0
- luminarycloud/vis/visualization.py +3 -0
- luminarycloud/volume_selection.py +6 -6
- luminarycloud/workflow_utils.py +149 -0
- {luminarycloud-0.22.1.dist-info → luminarycloud-0.22.2.dist-info}/METADATA +1 -1
- {luminarycloud-0.22.1.dist-info → luminarycloud-0.22.2.dist-info}/RECORD +59 -60
- luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2.py +0 -61
- luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2.pyi +0 -85
- luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2_grpc.py +0 -67
- luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2_grpc.pyi +0 -26
- luminarycloud/_proto/inferenceservice/inferenceservice_pb2.py +0 -69
- {luminarycloud-0.22.1.dist-info → luminarycloud-0.22.2.dist-info}/WHEEL +0 -0
|
@@ -1,61 +0,0 @@
|
|
|
1
|
-
# -*- coding: utf-8 -*-
|
|
2
|
-
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
3
|
-
# source: proto/api/v0/luminarycloud/inference/inference.proto
|
|
4
|
-
"""Generated protocol buffer code."""
|
|
5
|
-
from google.protobuf.internal import enum_type_wrapper
|
|
6
|
-
from google.protobuf import descriptor as _descriptor
|
|
7
|
-
from google.protobuf import descriptor_pool as _descriptor_pool
|
|
8
|
-
from google.protobuf import message as _message
|
|
9
|
-
from google.protobuf import reflection as _reflection
|
|
10
|
-
from google.protobuf import symbol_database as _symbol_database
|
|
11
|
-
# @@protoc_insertion_point(imports)
|
|
12
|
-
|
|
13
|
-
_sym_db = _symbol_database.Default()
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
|
|
17
|
-
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
|
|
18
|
-
from luminarycloud._proto.inferenceservice import inferenceservice_pb2 as proto_dot_inferenceservice_dot_inferenceservice__pb2
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n4proto/api/v0/luminarycloud/inference/inference.proto\x12-luminary.proto.api.v0.luminarycloud.inference\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a-proto/inferenceservice/inferenceservice.proto\"\xaf\x01\n CreateInferenceServiceJobRequest\x12\x18\n\x10model_version_id\x18\x08 \x01(\t\x12\x0f\n\x07stl_url\x18\x02 \x01(\t\x12\x10\n\x08settings\x18\x07 \x01(\x0c\x12\x12\n\nconditions\x18\x03 \x01(\x0c\x12\x12\n\nproject_id\x18\x05 \x01(\t\x12 \n\x18write_visualization_data\x18\x06 \x01(\x08J\x04\x08\x04\x10\x05\"\x8e\x01\n!CreateInferenceServiceJobResponse\x12\x45\n\x06status\x18\x01 \x01(\x0e\x32\x35.luminary.proto.api.v0.luminarycloud.inference.Status\x12\x15\n\x08response\x18\x02 \x01(\x0cH\x00\x88\x01\x01\x42\x0b\n\t_response*D\n\x06Status\x12\x12\n\x0eSTATUS_PENDING\x10\x00\x12\x12\n\x0eSTATUS_SUCCESS\x10\x01\x12\x12\n\x0eSTATUS_FAILURE\x10\x02\x32\xea\x01\n\x10InferenceService\x12\xd5\x01\n\x19\x43reateInferenceServiceJob\x12O.luminary.proto.api.v0.luminarycloud.inference.CreateInferenceServiceJobRequest\x1aP.luminary.proto.api.v0.luminarycloud.inference.CreateInferenceServiceJobResponse\"\x15\x82\xd3\xe4\x93\x02\x0f\"\r/v0/inferenceB=Z;luminarycloud.com/core/proto/api/v0/luminarycloud/inferenceb\x06proto3')
|
|
22
|
-
|
|
23
|
-
_STATUS = DESCRIPTOR.enum_types_by_name['Status']
|
|
24
|
-
Status = enum_type_wrapper.EnumTypeWrapper(_STATUS)
|
|
25
|
-
STATUS_PENDING = 0
|
|
26
|
-
STATUS_SUCCESS = 1
|
|
27
|
-
STATUS_FAILURE = 2
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
_CREATEINFERENCESERVICEJOBREQUEST = DESCRIPTOR.message_types_by_name['CreateInferenceServiceJobRequest']
|
|
31
|
-
_CREATEINFERENCESERVICEJOBRESPONSE = DESCRIPTOR.message_types_by_name['CreateInferenceServiceJobResponse']
|
|
32
|
-
CreateInferenceServiceJobRequest = _reflection.GeneratedProtocolMessageType('CreateInferenceServiceJobRequest', (_message.Message,), {
|
|
33
|
-
'DESCRIPTOR' : _CREATEINFERENCESERVICEJOBREQUEST,
|
|
34
|
-
'__module__' : 'proto.api.v0.luminarycloud.inference.inference_pb2'
|
|
35
|
-
# @@protoc_insertion_point(class_scope:luminary.proto.api.v0.luminarycloud.inference.CreateInferenceServiceJobRequest)
|
|
36
|
-
})
|
|
37
|
-
_sym_db.RegisterMessage(CreateInferenceServiceJobRequest)
|
|
38
|
-
|
|
39
|
-
CreateInferenceServiceJobResponse = _reflection.GeneratedProtocolMessageType('CreateInferenceServiceJobResponse', (_message.Message,), {
|
|
40
|
-
'DESCRIPTOR' : _CREATEINFERENCESERVICEJOBRESPONSE,
|
|
41
|
-
'__module__' : 'proto.api.v0.luminarycloud.inference.inference_pb2'
|
|
42
|
-
# @@protoc_insertion_point(class_scope:luminary.proto.api.v0.luminarycloud.inference.CreateInferenceServiceJobResponse)
|
|
43
|
-
})
|
|
44
|
-
_sym_db.RegisterMessage(CreateInferenceServiceJobResponse)
|
|
45
|
-
|
|
46
|
-
_INFERENCESERVICE = DESCRIPTOR.services_by_name['InferenceService']
|
|
47
|
-
if _descriptor._USE_C_DESCRIPTORS == False:
|
|
48
|
-
|
|
49
|
-
DESCRIPTOR._options = None
|
|
50
|
-
DESCRIPTOR._serialized_options = b'Z;luminarycloud.com/core/proto/api/v0/luminarycloud/inference'
|
|
51
|
-
_INFERENCESERVICE.methods_by_name['CreateInferenceServiceJob']._options = None
|
|
52
|
-
_INFERENCESERVICE.methods_by_name['CreateInferenceServiceJob']._serialized_options = b'\202\323\344\223\002\017\"\r/v0/inference'
|
|
53
|
-
_STATUS._serialized_start=532
|
|
54
|
-
_STATUS._serialized_end=600
|
|
55
|
-
_CREATEINFERENCESERVICEJOBREQUEST._serialized_start=210
|
|
56
|
-
_CREATEINFERENCESERVICEJOBREQUEST._serialized_end=385
|
|
57
|
-
_CREATEINFERENCESERVICEJOBRESPONSE._serialized_start=388
|
|
58
|
-
_CREATEINFERENCESERVICEJOBRESPONSE._serialized_end=530
|
|
59
|
-
_INFERENCESERVICE._serialized_start=603
|
|
60
|
-
_INFERENCESERVICE._serialized_end=837
|
|
61
|
-
# @@protoc_insertion_point(module_scope)
|
|
@@ -1,85 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
@generated by mypy-protobuf. Do not edit manually!
|
|
3
|
-
isort:skip_file
|
|
4
|
-
"""
|
|
5
|
-
import builtins
|
|
6
|
-
import google.protobuf.descriptor
|
|
7
|
-
import google.protobuf.internal.enum_type_wrapper
|
|
8
|
-
import google.protobuf.message
|
|
9
|
-
import sys
|
|
10
|
-
import typing
|
|
11
|
-
|
|
12
|
-
if sys.version_info >= (3, 10):
|
|
13
|
-
import typing as typing_extensions
|
|
14
|
-
else:
|
|
15
|
-
import typing_extensions
|
|
16
|
-
|
|
17
|
-
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
|
|
18
|
-
|
|
19
|
-
class _Status:
|
|
20
|
-
ValueType = typing.NewType("ValueType", builtins.int)
|
|
21
|
-
V: typing_extensions.TypeAlias = ValueType
|
|
22
|
-
|
|
23
|
-
class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_Status.ValueType], builtins.type): # noqa: F821
|
|
24
|
-
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
|
|
25
|
-
STATUS_PENDING: _Status.ValueType # 0
|
|
26
|
-
STATUS_SUCCESS: _Status.ValueType # 1
|
|
27
|
-
STATUS_FAILURE: _Status.ValueType # 2
|
|
28
|
-
|
|
29
|
-
class Status(_Status, metaclass=_StatusEnumTypeWrapper): ...
|
|
30
|
-
|
|
31
|
-
STATUS_PENDING: Status.ValueType # 0
|
|
32
|
-
STATUS_SUCCESS: Status.ValueType # 1
|
|
33
|
-
STATUS_FAILURE: Status.ValueType # 2
|
|
34
|
-
global___Status = Status
|
|
35
|
-
|
|
36
|
-
class CreateInferenceServiceJobRequest(google.protobuf.message.Message):
|
|
37
|
-
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
38
|
-
|
|
39
|
-
MODEL_VERSION_ID_FIELD_NUMBER: builtins.int
|
|
40
|
-
STL_URL_FIELD_NUMBER: builtins.int
|
|
41
|
-
SETTINGS_FIELD_NUMBER: builtins.int
|
|
42
|
-
CONDITIONS_FIELD_NUMBER: builtins.int
|
|
43
|
-
PROJECT_ID_FIELD_NUMBER: builtins.int
|
|
44
|
-
WRITE_VISUALIZATION_DATA_FIELD_NUMBER: builtins.int
|
|
45
|
-
model_version_id: builtins.str
|
|
46
|
-
"""ID of the trained model version to use for inference"""
|
|
47
|
-
stl_url: builtins.str
|
|
48
|
-
settings: builtins.bytes
|
|
49
|
-
"""JSON encoded settings, like stencil_size."""
|
|
50
|
-
conditions: builtins.bytes
|
|
51
|
-
"""JSON encoded conditions, like alpha, beta, etc."""
|
|
52
|
-
project_id: builtins.str
|
|
53
|
-
write_visualization_data: builtins.bool
|
|
54
|
-
def __init__(
|
|
55
|
-
self,
|
|
56
|
-
*,
|
|
57
|
-
model_version_id: builtins.str = ...,
|
|
58
|
-
stl_url: builtins.str = ...,
|
|
59
|
-
settings: builtins.bytes = ...,
|
|
60
|
-
conditions: builtins.bytes = ...,
|
|
61
|
-
project_id: builtins.str = ...,
|
|
62
|
-
write_visualization_data: builtins.bool = ...,
|
|
63
|
-
) -> None: ...
|
|
64
|
-
def ClearField(self, field_name: typing_extensions.Literal["conditions", b"conditions", "model_version_id", b"model_version_id", "project_id", b"project_id", "settings", b"settings", "stl_url", b"stl_url", "write_visualization_data", b"write_visualization_data"]) -> None: ...
|
|
65
|
-
|
|
66
|
-
global___CreateInferenceServiceJobRequest = CreateInferenceServiceJobRequest
|
|
67
|
-
|
|
68
|
-
class CreateInferenceServiceJobResponse(google.protobuf.message.Message):
|
|
69
|
-
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
70
|
-
|
|
71
|
-
STATUS_FIELD_NUMBER: builtins.int
|
|
72
|
-
RESPONSE_FIELD_NUMBER: builtins.int
|
|
73
|
-
status: global___Status.ValueType
|
|
74
|
-
response: builtins.bytes
|
|
75
|
-
def __init__(
|
|
76
|
-
self,
|
|
77
|
-
*,
|
|
78
|
-
status: global___Status.ValueType = ...,
|
|
79
|
-
response: builtins.bytes | None = ...,
|
|
80
|
-
) -> None: ...
|
|
81
|
-
def HasField(self, field_name: typing_extensions.Literal["_response", b"_response", "response", b"response"]) -> builtins.bool: ...
|
|
82
|
-
def ClearField(self, field_name: typing_extensions.Literal["_response", b"_response", "response", b"response", "status", b"status"]) -> None: ...
|
|
83
|
-
def WhichOneof(self, oneof_group: typing_extensions.Literal["_response", b"_response"]) -> typing_extensions.Literal["response"] | None: ...
|
|
84
|
-
|
|
85
|
-
global___CreateInferenceServiceJobResponse = CreateInferenceServiceJobResponse
|
|
@@ -1,67 +0,0 @@
|
|
|
1
|
-
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
|
|
2
|
-
"""Client and server classes corresponding to protobuf-defined services."""
|
|
3
|
-
import grpc
|
|
4
|
-
|
|
5
|
-
from luminarycloud._proto.api.v0.luminarycloud.inference import inference_pb2 as proto_dot_api_dot_v0_dot_luminarycloud_dot_inference_dot_inference__pb2
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
class InferenceServiceStub(object):
|
|
9
|
-
"""Missing associated documentation comment in .proto file."""
|
|
10
|
-
|
|
11
|
-
def __init__(self, channel):
|
|
12
|
-
"""Constructor.
|
|
13
|
-
|
|
14
|
-
Args:
|
|
15
|
-
channel: A grpc.Channel.
|
|
16
|
-
"""
|
|
17
|
-
self.CreateInferenceServiceJob = channel.unary_unary(
|
|
18
|
-
'/luminary.proto.api.v0.luminarycloud.inference.InferenceService/CreateInferenceServiceJob',
|
|
19
|
-
request_serializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_inference_dot_inference__pb2.CreateInferenceServiceJobRequest.SerializeToString,
|
|
20
|
-
response_deserializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_inference_dot_inference__pb2.CreateInferenceServiceJobResponse.FromString,
|
|
21
|
-
)
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
class InferenceServiceServicer(object):
|
|
25
|
-
"""Missing associated documentation comment in .proto file."""
|
|
26
|
-
|
|
27
|
-
def CreateInferenceServiceJob(self, request, context):
|
|
28
|
-
"""Lists the geometries available in a project.
|
|
29
|
-
"""
|
|
30
|
-
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
|
31
|
-
context.set_details('Method not implemented!')
|
|
32
|
-
raise NotImplementedError('Method not implemented!')
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
def add_InferenceServiceServicer_to_server(servicer, server):
|
|
36
|
-
rpc_method_handlers = {
|
|
37
|
-
'CreateInferenceServiceJob': grpc.unary_unary_rpc_method_handler(
|
|
38
|
-
servicer.CreateInferenceServiceJob,
|
|
39
|
-
request_deserializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_inference_dot_inference__pb2.CreateInferenceServiceJobRequest.FromString,
|
|
40
|
-
response_serializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_inference_dot_inference__pb2.CreateInferenceServiceJobResponse.SerializeToString,
|
|
41
|
-
),
|
|
42
|
-
}
|
|
43
|
-
generic_handler = grpc.method_handlers_generic_handler(
|
|
44
|
-
'luminary.proto.api.v0.luminarycloud.inference.InferenceService', rpc_method_handlers)
|
|
45
|
-
server.add_generic_rpc_handlers((generic_handler,))
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
# This class is part of an EXPERIMENTAL API.
|
|
49
|
-
class InferenceService(object):
|
|
50
|
-
"""Missing associated documentation comment in .proto file."""
|
|
51
|
-
|
|
52
|
-
@staticmethod
|
|
53
|
-
def CreateInferenceServiceJob(request,
|
|
54
|
-
target,
|
|
55
|
-
options=(),
|
|
56
|
-
channel_credentials=None,
|
|
57
|
-
call_credentials=None,
|
|
58
|
-
insecure=False,
|
|
59
|
-
compression=None,
|
|
60
|
-
wait_for_ready=None,
|
|
61
|
-
timeout=None,
|
|
62
|
-
metadata=None):
|
|
63
|
-
return grpc.experimental.unary_unary(request, target, '/luminary.proto.api.v0.luminarycloud.inference.InferenceService/CreateInferenceServiceJob',
|
|
64
|
-
proto_dot_api_dot_v0_dot_luminarycloud_dot_inference_dot_inference__pb2.CreateInferenceServiceJobRequest.SerializeToString,
|
|
65
|
-
proto_dot_api_dot_v0_dot_luminarycloud_dot_inference_dot_inference__pb2.CreateInferenceServiceJobResponse.FromString,
|
|
66
|
-
options, channel_credentials,
|
|
67
|
-
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
|
|
@@ -1,26 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
@generated by mypy-protobuf. Do not edit manually!
|
|
3
|
-
isort:skip_file
|
|
4
|
-
"""
|
|
5
|
-
import abc
|
|
6
|
-
import grpc
|
|
7
|
-
import luminarycloud._proto.api.v0.luminarycloud.inference.inference_pb2
|
|
8
|
-
|
|
9
|
-
class InferenceServiceStub:
|
|
10
|
-
def __init__(self, channel: grpc.Channel) -> None: ...
|
|
11
|
-
CreateInferenceServiceJob: grpc.UnaryUnaryMultiCallable[
|
|
12
|
-
luminarycloud._proto.api.v0.luminarycloud.inference.inference_pb2.CreateInferenceServiceJobRequest,
|
|
13
|
-
luminarycloud._proto.api.v0.luminarycloud.inference.inference_pb2.CreateInferenceServiceJobResponse,
|
|
14
|
-
]
|
|
15
|
-
"""Lists the geometries available in a project."""
|
|
16
|
-
|
|
17
|
-
class InferenceServiceServicer(metaclass=abc.ABCMeta):
|
|
18
|
-
@abc.abstractmethod
|
|
19
|
-
def CreateInferenceServiceJob(
|
|
20
|
-
self,
|
|
21
|
-
request: luminarycloud._proto.api.v0.luminarycloud.inference.inference_pb2.CreateInferenceServiceJobRequest,
|
|
22
|
-
context: grpc.ServicerContext,
|
|
23
|
-
) -> luminarycloud._proto.api.v0.luminarycloud.inference.inference_pb2.CreateInferenceServiceJobResponse:
|
|
24
|
-
"""Lists the geometries available in a project."""
|
|
25
|
-
|
|
26
|
-
def add_InferenceServiceServicer_to_server(servicer: InferenceServiceServicer, server: grpc.Server) -> None: ...
|
|
@@ -1,69 +0,0 @@
|
|
|
1
|
-
# -*- coding: utf-8 -*-
|
|
2
|
-
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
3
|
-
# source: proto/inferenceservice/inferenceservice.proto
|
|
4
|
-
"""Generated protocol buffer code."""
|
|
5
|
-
from google.protobuf import descriptor as _descriptor
|
|
6
|
-
from google.protobuf import descriptor_pool as _descriptor_pool
|
|
7
|
-
from google.protobuf import message as _message
|
|
8
|
-
from google.protobuf import reflection as _reflection
|
|
9
|
-
from google.protobuf import symbol_database as _symbol_database
|
|
10
|
-
# @@protoc_insertion_point(imports)
|
|
11
|
-
|
|
12
|
-
_sym_db = _symbol_database.Default()
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
from luminarycloud._proto.base import base_pb2 as proto_dot_base_dot_base__pb2
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n-proto/inferenceservice/inferenceservice.proto\x12\x1fluminary.proto.inferenceservice\x1a\x15proto/base/base.proto\"\xaf\x01\n CreateInferenceServiceJobRequest\x12\x18\n\x10model_version_id\x18\x08 \x01(\t\x12\x0f\n\x07stl_url\x18\x02 \x01(\t\x12\x10\n\x08settings\x18\x07 \x01(\x0c\x12\x12\n\nconditions\x18\x03 \x01(\x0c\x12\x12\n\nproject_id\x18\x05 \x01(\t\x12 \n\x18write_visualization_data\x18\x06 \x01(\x08J\x04\x08\x04\x10\x05\"5\n!CreateInferenceServiceJobResponse\x12\x10\n\x08response\x18\x01 \x01(\x0c\"*\n\x1bPingInferenceServiceRequest\x12\x0b\n\x03msg\x18\x01 \x01(\t\"+\n\x1cPingInferenceServiceResponse\x12\x0b\n\x03msg\x18\x01 \x01(\t2\xcd\x02\n\x10InferenceService\x12\xa2\x01\n\x19\x43reateInferenceServiceJob\x12\x41.luminary.proto.inferenceservice.CreateInferenceServiceJobRequest\x1a\x42.luminary.proto.inferenceservice.CreateInferenceServiceJobResponse\x12\x93\x01\n\x14PingInferenceService\x12<.luminary.proto.inferenceservice.PingInferenceServiceRequest\x1a=.luminary.proto.inferenceservice.PingInferenceServiceResponseB/Z-luminarycloud.com/core/proto/inferenceserviceb\x06proto3')
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
_CREATEINFERENCESERVICEJOBREQUEST = DESCRIPTOR.message_types_by_name['CreateInferenceServiceJobRequest']
|
|
23
|
-
_CREATEINFERENCESERVICEJOBRESPONSE = DESCRIPTOR.message_types_by_name['CreateInferenceServiceJobResponse']
|
|
24
|
-
_PINGINFERENCESERVICEREQUEST = DESCRIPTOR.message_types_by_name['PingInferenceServiceRequest']
|
|
25
|
-
_PINGINFERENCESERVICERESPONSE = DESCRIPTOR.message_types_by_name['PingInferenceServiceResponse']
|
|
26
|
-
CreateInferenceServiceJobRequest = _reflection.GeneratedProtocolMessageType('CreateInferenceServiceJobRequest', (_message.Message,), {
|
|
27
|
-
'DESCRIPTOR' : _CREATEINFERENCESERVICEJOBREQUEST,
|
|
28
|
-
'__module__' : 'proto.inferenceservice.inferenceservice_pb2'
|
|
29
|
-
# @@protoc_insertion_point(class_scope:luminary.proto.inferenceservice.CreateInferenceServiceJobRequest)
|
|
30
|
-
})
|
|
31
|
-
_sym_db.RegisterMessage(CreateInferenceServiceJobRequest)
|
|
32
|
-
|
|
33
|
-
CreateInferenceServiceJobResponse = _reflection.GeneratedProtocolMessageType('CreateInferenceServiceJobResponse', (_message.Message,), {
|
|
34
|
-
'DESCRIPTOR' : _CREATEINFERENCESERVICEJOBRESPONSE,
|
|
35
|
-
'__module__' : 'proto.inferenceservice.inferenceservice_pb2'
|
|
36
|
-
# @@protoc_insertion_point(class_scope:luminary.proto.inferenceservice.CreateInferenceServiceJobResponse)
|
|
37
|
-
})
|
|
38
|
-
_sym_db.RegisterMessage(CreateInferenceServiceJobResponse)
|
|
39
|
-
|
|
40
|
-
PingInferenceServiceRequest = _reflection.GeneratedProtocolMessageType('PingInferenceServiceRequest', (_message.Message,), {
|
|
41
|
-
'DESCRIPTOR' : _PINGINFERENCESERVICEREQUEST,
|
|
42
|
-
'__module__' : 'proto.inferenceservice.inferenceservice_pb2'
|
|
43
|
-
# @@protoc_insertion_point(class_scope:luminary.proto.inferenceservice.PingInferenceServiceRequest)
|
|
44
|
-
})
|
|
45
|
-
_sym_db.RegisterMessage(PingInferenceServiceRequest)
|
|
46
|
-
|
|
47
|
-
PingInferenceServiceResponse = _reflection.GeneratedProtocolMessageType('PingInferenceServiceResponse', (_message.Message,), {
|
|
48
|
-
'DESCRIPTOR' : _PINGINFERENCESERVICERESPONSE,
|
|
49
|
-
'__module__' : 'proto.inferenceservice.inferenceservice_pb2'
|
|
50
|
-
# @@protoc_insertion_point(class_scope:luminary.proto.inferenceservice.PingInferenceServiceResponse)
|
|
51
|
-
})
|
|
52
|
-
_sym_db.RegisterMessage(PingInferenceServiceResponse)
|
|
53
|
-
|
|
54
|
-
_INFERENCESERVICE = DESCRIPTOR.services_by_name['InferenceService']
|
|
55
|
-
if _descriptor._USE_C_DESCRIPTORS == False:
|
|
56
|
-
|
|
57
|
-
DESCRIPTOR._options = None
|
|
58
|
-
DESCRIPTOR._serialized_options = b'Z-luminarycloud.com/core/proto/inferenceservice'
|
|
59
|
-
_CREATEINFERENCESERVICEJOBREQUEST._serialized_start=106
|
|
60
|
-
_CREATEINFERENCESERVICEJOBREQUEST._serialized_end=281
|
|
61
|
-
_CREATEINFERENCESERVICEJOBRESPONSE._serialized_start=283
|
|
62
|
-
_CREATEINFERENCESERVICEJOBRESPONSE._serialized_end=336
|
|
63
|
-
_PINGINFERENCESERVICEREQUEST._serialized_start=338
|
|
64
|
-
_PINGINFERENCESERVICEREQUEST._serialized_end=380
|
|
65
|
-
_PINGINFERENCESERVICERESPONSE._serialized_start=382
|
|
66
|
-
_PINGINFERENCESERVICERESPONSE._serialized_end=425
|
|
67
|
-
_INFERENCESERVICE._serialized_start=428
|
|
68
|
-
_INFERENCESERVICE._serialized_end=761
|
|
69
|
-
# @@protoc_insertion_point(module_scope)
|
|
File without changes
|