luminarycloud 0.22.0__py3-none-any.whl → 0.22.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (86) hide show
  1. luminarycloud/_client/authentication_plugin.py +49 -0
  2. luminarycloud/_client/client.py +38 -11
  3. luminarycloud/_client/http_client.py +1 -1
  4. luminarycloud/_client/retry_interceptor.py +64 -2
  5. luminarycloud/_helpers/__init__.py +9 -0
  6. luminarycloud/_helpers/_inference_jobs.py +227 -0
  7. luminarycloud/_helpers/_parse_iso_datetime.py +54 -0
  8. luminarycloud/_helpers/download.py +11 -0
  9. luminarycloud/_helpers/proto_decorator.py +38 -7
  10. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2.py +152 -132
  11. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2.pyi +66 -8
  12. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2_grpc.py +34 -0
  13. luminarycloud/_proto/api/v0/luminarycloud/geometry/geometry_pb2_grpc.pyi +12 -0
  14. luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2.py +142 -39
  15. luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2.pyi +300 -3
  16. luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2_grpc.py +34 -0
  17. luminarycloud/_proto/api/v0/luminarycloud/physics_ai/physics_ai_pb2_grpc.pyi +12 -0
  18. luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2.py +255 -0
  19. luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2.pyi +466 -0
  20. luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2_grpc.py +242 -0
  21. luminarycloud/_proto/api/v0/luminarycloud/physicsaiinference/physicsaiinference_pb2_grpc.pyi +95 -0
  22. luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2.py +29 -7
  23. luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2.pyi +39 -0
  24. luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2_grpc.py +36 -0
  25. luminarycloud/_proto/api/v0/luminarycloud/simulation/simulation_pb2_grpc.pyi +18 -0
  26. luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2.py +88 -65
  27. luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2.pyi +42 -0
  28. luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2_grpc.py +34 -0
  29. luminarycloud/_proto/api/v0/luminarycloud/thirdpartyintegration/onshape/onshape_pb2_grpc.pyi +12 -0
  30. luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2.py +163 -153
  31. luminarycloud/_proto/api/v0/luminarycloud/vis/vis_pb2.pyi +37 -3
  32. luminarycloud/_proto/base/base_pb2.py +7 -6
  33. luminarycloud/_proto/base/base_pb2.pyi +4 -0
  34. luminarycloud/_proto/client/simulation_pb2.py +358 -339
  35. luminarycloud/_proto/client/simulation_pb2.pyi +89 -3
  36. luminarycloud/_proto/physicsaiinferenceservice/physicsaiinferenceservice_pb2.py +35 -0
  37. luminarycloud/_proto/physicsaiinferenceservice/physicsaiinferenceservice_pb2.pyi +7 -0
  38. luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2.py +6 -3
  39. luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2_grpc.py +68 -0
  40. luminarycloud/_proto/physicsaitrainingservice/physicsaitrainingservice_pb2_grpc.pyi +24 -0
  41. luminarycloud/_wrapper.py +53 -7
  42. luminarycloud/enum/vis_enums.py +6 -0
  43. luminarycloud/feature_modification.py +25 -32
  44. luminarycloud/geometry.py +10 -6
  45. luminarycloud/geometry_version.py +4 -0
  46. luminarycloud/mesh.py +4 -0
  47. luminarycloud/meshing/mesh_generation_params.py +5 -6
  48. luminarycloud/meshing/sizing_strategy/sizing_strategies.py +1 -2
  49. luminarycloud/outputs/__init__.py +2 -0
  50. luminarycloud/outputs/output_definitions.py +3 -3
  51. luminarycloud/outputs/stopping_conditions.py +94 -0
  52. luminarycloud/params/enum/_enum_wrappers.py +16 -0
  53. luminarycloud/params/geometry/shapes.py +33 -33
  54. luminarycloud/params/simulation/adaptive_mesh_refinement/__init__.py +1 -0
  55. luminarycloud/params/simulation/adaptive_mesh_refinement/active_region_.py +83 -0
  56. luminarycloud/params/simulation/adaptive_mesh_refinement/boundary_layer_profile_.py +1 -1
  57. luminarycloud/params/simulation/adaptive_mesh_refinement_.py +8 -1
  58. luminarycloud/physics_ai/__init__.py +7 -0
  59. luminarycloud/physics_ai/inference.py +166 -199
  60. luminarycloud/physics_ai/models.py +22 -0
  61. luminarycloud/physics_ai/solution.py +4 -0
  62. luminarycloud/pipelines/api.py +143 -16
  63. luminarycloud/pipelines/core.py +1 -1
  64. luminarycloud/pipelines/stages.py +22 -9
  65. luminarycloud/project.py +61 -8
  66. luminarycloud/simulation.py +25 -0
  67. luminarycloud/types/__init__.py +2 -0
  68. luminarycloud/types/ids.py +2 -0
  69. luminarycloud/types/vector3.py +1 -2
  70. luminarycloud/vis/__init__.py +1 -0
  71. luminarycloud/vis/data_extraction.py +7 -7
  72. luminarycloud/vis/filters.py +97 -0
  73. luminarycloud/vis/interactive_report.py +163 -7
  74. luminarycloud/vis/report.py +113 -1
  75. luminarycloud/vis/visualization.py +3 -0
  76. luminarycloud/volume_selection.py +16 -8
  77. luminarycloud/workflow_utils.py +149 -0
  78. {luminarycloud-0.22.0.dist-info → luminarycloud-0.22.2.dist-info}/METADATA +1 -1
  79. {luminarycloud-0.22.0.dist-info → luminarycloud-0.22.2.dist-info}/RECORD +80 -76
  80. {luminarycloud-0.22.0.dist-info → luminarycloud-0.22.2.dist-info}/WHEEL +1 -1
  81. luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2.py +0 -61
  82. luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2.pyi +0 -85
  83. luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2_grpc.py +0 -67
  84. luminarycloud/_proto/api/v0/luminarycloud/inference/inference_pb2_grpc.pyi +0 -26
  85. luminarycloud/_proto/inferenceservice/inferenceservice_pb2.py +0 -69
  86. luminarycloud/pipeline_util/dictable.py +0 -27
@@ -1,85 +0,0 @@
1
- """
2
- @generated by mypy-protobuf. Do not edit manually!
3
- isort:skip_file
4
- """
5
- import builtins
6
- import google.protobuf.descriptor
7
- import google.protobuf.internal.enum_type_wrapper
8
- import google.protobuf.message
9
- import sys
10
- import typing
11
-
12
- if sys.version_info >= (3, 10):
13
- import typing as typing_extensions
14
- else:
15
- import typing_extensions
16
-
17
- DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
18
-
19
- class _Status:
20
- ValueType = typing.NewType("ValueType", builtins.int)
21
- V: typing_extensions.TypeAlias = ValueType
22
-
23
- class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_Status.ValueType], builtins.type): # noqa: F821
24
- DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
25
- STATUS_PENDING: _Status.ValueType # 0
26
- STATUS_SUCCESS: _Status.ValueType # 1
27
- STATUS_FAILURE: _Status.ValueType # 2
28
-
29
- class Status(_Status, metaclass=_StatusEnumTypeWrapper): ...
30
-
31
- STATUS_PENDING: Status.ValueType # 0
32
- STATUS_SUCCESS: Status.ValueType # 1
33
- STATUS_FAILURE: Status.ValueType # 2
34
- global___Status = Status
35
-
36
- class CreateInferenceServiceJobRequest(google.protobuf.message.Message):
37
- DESCRIPTOR: google.protobuf.descriptor.Descriptor
38
-
39
- MODEL_VERSION_ID_FIELD_NUMBER: builtins.int
40
- STL_URL_FIELD_NUMBER: builtins.int
41
- SETTINGS_FIELD_NUMBER: builtins.int
42
- CONDITIONS_FIELD_NUMBER: builtins.int
43
- PROJECT_ID_FIELD_NUMBER: builtins.int
44
- WRITE_VISUALIZATION_DATA_FIELD_NUMBER: builtins.int
45
- model_version_id: builtins.str
46
- """ID of the trained model version to use for inference"""
47
- stl_url: builtins.str
48
- settings: builtins.bytes
49
- """JSON encoded settings, like stencil_size."""
50
- conditions: builtins.bytes
51
- """JSON encoded conditions, like alpha, beta, etc."""
52
- project_id: builtins.str
53
- write_visualization_data: builtins.bool
54
- def __init__(
55
- self,
56
- *,
57
- model_version_id: builtins.str = ...,
58
- stl_url: builtins.str = ...,
59
- settings: builtins.bytes = ...,
60
- conditions: builtins.bytes = ...,
61
- project_id: builtins.str = ...,
62
- write_visualization_data: builtins.bool = ...,
63
- ) -> None: ...
64
- def ClearField(self, field_name: typing_extensions.Literal["conditions", b"conditions", "model_version_id", b"model_version_id", "project_id", b"project_id", "settings", b"settings", "stl_url", b"stl_url", "write_visualization_data", b"write_visualization_data"]) -> None: ...
65
-
66
- global___CreateInferenceServiceJobRequest = CreateInferenceServiceJobRequest
67
-
68
- class CreateInferenceServiceJobResponse(google.protobuf.message.Message):
69
- DESCRIPTOR: google.protobuf.descriptor.Descriptor
70
-
71
- STATUS_FIELD_NUMBER: builtins.int
72
- RESPONSE_FIELD_NUMBER: builtins.int
73
- status: global___Status.ValueType
74
- response: builtins.bytes
75
- def __init__(
76
- self,
77
- *,
78
- status: global___Status.ValueType = ...,
79
- response: builtins.bytes | None = ...,
80
- ) -> None: ...
81
- def HasField(self, field_name: typing_extensions.Literal["_response", b"_response", "response", b"response"]) -> builtins.bool: ...
82
- def ClearField(self, field_name: typing_extensions.Literal["_response", b"_response", "response", b"response", "status", b"status"]) -> None: ...
83
- def WhichOneof(self, oneof_group: typing_extensions.Literal["_response", b"_response"]) -> typing_extensions.Literal["response"] | None: ...
84
-
85
- global___CreateInferenceServiceJobResponse = CreateInferenceServiceJobResponse
@@ -1,67 +0,0 @@
1
- # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
2
- """Client and server classes corresponding to protobuf-defined services."""
3
- import grpc
4
-
5
- from luminarycloud._proto.api.v0.luminarycloud.inference import inference_pb2 as proto_dot_api_dot_v0_dot_luminarycloud_dot_inference_dot_inference__pb2
6
-
7
-
8
- class InferenceServiceStub(object):
9
- """Missing associated documentation comment in .proto file."""
10
-
11
- def __init__(self, channel):
12
- """Constructor.
13
-
14
- Args:
15
- channel: A grpc.Channel.
16
- """
17
- self.CreateInferenceServiceJob = channel.unary_unary(
18
- '/luminary.proto.api.v0.luminarycloud.inference.InferenceService/CreateInferenceServiceJob',
19
- request_serializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_inference_dot_inference__pb2.CreateInferenceServiceJobRequest.SerializeToString,
20
- response_deserializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_inference_dot_inference__pb2.CreateInferenceServiceJobResponse.FromString,
21
- )
22
-
23
-
24
- class InferenceServiceServicer(object):
25
- """Missing associated documentation comment in .proto file."""
26
-
27
- def CreateInferenceServiceJob(self, request, context):
28
- """Lists the geometries available in a project.
29
- """
30
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
31
- context.set_details('Method not implemented!')
32
- raise NotImplementedError('Method not implemented!')
33
-
34
-
35
- def add_InferenceServiceServicer_to_server(servicer, server):
36
- rpc_method_handlers = {
37
- 'CreateInferenceServiceJob': grpc.unary_unary_rpc_method_handler(
38
- servicer.CreateInferenceServiceJob,
39
- request_deserializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_inference_dot_inference__pb2.CreateInferenceServiceJobRequest.FromString,
40
- response_serializer=proto_dot_api_dot_v0_dot_luminarycloud_dot_inference_dot_inference__pb2.CreateInferenceServiceJobResponse.SerializeToString,
41
- ),
42
- }
43
- generic_handler = grpc.method_handlers_generic_handler(
44
- 'luminary.proto.api.v0.luminarycloud.inference.InferenceService', rpc_method_handlers)
45
- server.add_generic_rpc_handlers((generic_handler,))
46
-
47
-
48
- # This class is part of an EXPERIMENTAL API.
49
- class InferenceService(object):
50
- """Missing associated documentation comment in .proto file."""
51
-
52
- @staticmethod
53
- def CreateInferenceServiceJob(request,
54
- target,
55
- options=(),
56
- channel_credentials=None,
57
- call_credentials=None,
58
- insecure=False,
59
- compression=None,
60
- wait_for_ready=None,
61
- timeout=None,
62
- metadata=None):
63
- return grpc.experimental.unary_unary(request, target, '/luminary.proto.api.v0.luminarycloud.inference.InferenceService/CreateInferenceServiceJob',
64
- proto_dot_api_dot_v0_dot_luminarycloud_dot_inference_dot_inference__pb2.CreateInferenceServiceJobRequest.SerializeToString,
65
- proto_dot_api_dot_v0_dot_luminarycloud_dot_inference_dot_inference__pb2.CreateInferenceServiceJobResponse.FromString,
66
- options, channel_credentials,
67
- insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@@ -1,26 +0,0 @@
1
- """
2
- @generated by mypy-protobuf. Do not edit manually!
3
- isort:skip_file
4
- """
5
- import abc
6
- import grpc
7
- import luminarycloud._proto.api.v0.luminarycloud.inference.inference_pb2
8
-
9
- class InferenceServiceStub:
10
- def __init__(self, channel: grpc.Channel) -> None: ...
11
- CreateInferenceServiceJob: grpc.UnaryUnaryMultiCallable[
12
- luminarycloud._proto.api.v0.luminarycloud.inference.inference_pb2.CreateInferenceServiceJobRequest,
13
- luminarycloud._proto.api.v0.luminarycloud.inference.inference_pb2.CreateInferenceServiceJobResponse,
14
- ]
15
- """Lists the geometries available in a project."""
16
-
17
- class InferenceServiceServicer(metaclass=abc.ABCMeta):
18
- @abc.abstractmethod
19
- def CreateInferenceServiceJob(
20
- self,
21
- request: luminarycloud._proto.api.v0.luminarycloud.inference.inference_pb2.CreateInferenceServiceJobRequest,
22
- context: grpc.ServicerContext,
23
- ) -> luminarycloud._proto.api.v0.luminarycloud.inference.inference_pb2.CreateInferenceServiceJobResponse:
24
- """Lists the geometries available in a project."""
25
-
26
- def add_InferenceServiceServicer_to_server(servicer: InferenceServiceServicer, server: grpc.Server) -> None: ...
@@ -1,69 +0,0 @@
1
- # -*- coding: utf-8 -*-
2
- # Generated by the protocol buffer compiler. DO NOT EDIT!
3
- # source: proto/inferenceservice/inferenceservice.proto
4
- """Generated protocol buffer code."""
5
- from google.protobuf import descriptor as _descriptor
6
- from google.protobuf import descriptor_pool as _descriptor_pool
7
- from google.protobuf import message as _message
8
- from google.protobuf import reflection as _reflection
9
- from google.protobuf import symbol_database as _symbol_database
10
- # @@protoc_insertion_point(imports)
11
-
12
- _sym_db = _symbol_database.Default()
13
-
14
-
15
- from luminarycloud._proto.base import base_pb2 as proto_dot_base_dot_base__pb2
16
-
17
-
18
- DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n-proto/inferenceservice/inferenceservice.proto\x12\x1fluminary.proto.inferenceservice\x1a\x15proto/base/base.proto\"\xaf\x01\n CreateInferenceServiceJobRequest\x12\x18\n\x10model_version_id\x18\x08 \x01(\t\x12\x0f\n\x07stl_url\x18\x02 \x01(\t\x12\x10\n\x08settings\x18\x07 \x01(\x0c\x12\x12\n\nconditions\x18\x03 \x01(\x0c\x12\x12\n\nproject_id\x18\x05 \x01(\t\x12 \n\x18write_visualization_data\x18\x06 \x01(\x08J\x04\x08\x04\x10\x05\"5\n!CreateInferenceServiceJobResponse\x12\x10\n\x08response\x18\x01 \x01(\x0c\"*\n\x1bPingInferenceServiceRequest\x12\x0b\n\x03msg\x18\x01 \x01(\t\"+\n\x1cPingInferenceServiceResponse\x12\x0b\n\x03msg\x18\x01 \x01(\t2\xcd\x02\n\x10InferenceService\x12\xa2\x01\n\x19\x43reateInferenceServiceJob\x12\x41.luminary.proto.inferenceservice.CreateInferenceServiceJobRequest\x1a\x42.luminary.proto.inferenceservice.CreateInferenceServiceJobResponse\x12\x93\x01\n\x14PingInferenceService\x12<.luminary.proto.inferenceservice.PingInferenceServiceRequest\x1a=.luminary.proto.inferenceservice.PingInferenceServiceResponseB/Z-luminarycloud.com/core/proto/inferenceserviceb\x06proto3')
19
-
20
-
21
-
22
- _CREATEINFERENCESERVICEJOBREQUEST = DESCRIPTOR.message_types_by_name['CreateInferenceServiceJobRequest']
23
- _CREATEINFERENCESERVICEJOBRESPONSE = DESCRIPTOR.message_types_by_name['CreateInferenceServiceJobResponse']
24
- _PINGINFERENCESERVICEREQUEST = DESCRIPTOR.message_types_by_name['PingInferenceServiceRequest']
25
- _PINGINFERENCESERVICERESPONSE = DESCRIPTOR.message_types_by_name['PingInferenceServiceResponse']
26
- CreateInferenceServiceJobRequest = _reflection.GeneratedProtocolMessageType('CreateInferenceServiceJobRequest', (_message.Message,), {
27
- 'DESCRIPTOR' : _CREATEINFERENCESERVICEJOBREQUEST,
28
- '__module__' : 'proto.inferenceservice.inferenceservice_pb2'
29
- # @@protoc_insertion_point(class_scope:luminary.proto.inferenceservice.CreateInferenceServiceJobRequest)
30
- })
31
- _sym_db.RegisterMessage(CreateInferenceServiceJobRequest)
32
-
33
- CreateInferenceServiceJobResponse = _reflection.GeneratedProtocolMessageType('CreateInferenceServiceJobResponse', (_message.Message,), {
34
- 'DESCRIPTOR' : _CREATEINFERENCESERVICEJOBRESPONSE,
35
- '__module__' : 'proto.inferenceservice.inferenceservice_pb2'
36
- # @@protoc_insertion_point(class_scope:luminary.proto.inferenceservice.CreateInferenceServiceJobResponse)
37
- })
38
- _sym_db.RegisterMessage(CreateInferenceServiceJobResponse)
39
-
40
- PingInferenceServiceRequest = _reflection.GeneratedProtocolMessageType('PingInferenceServiceRequest', (_message.Message,), {
41
- 'DESCRIPTOR' : _PINGINFERENCESERVICEREQUEST,
42
- '__module__' : 'proto.inferenceservice.inferenceservice_pb2'
43
- # @@protoc_insertion_point(class_scope:luminary.proto.inferenceservice.PingInferenceServiceRequest)
44
- })
45
- _sym_db.RegisterMessage(PingInferenceServiceRequest)
46
-
47
- PingInferenceServiceResponse = _reflection.GeneratedProtocolMessageType('PingInferenceServiceResponse', (_message.Message,), {
48
- 'DESCRIPTOR' : _PINGINFERENCESERVICERESPONSE,
49
- '__module__' : 'proto.inferenceservice.inferenceservice_pb2'
50
- # @@protoc_insertion_point(class_scope:luminary.proto.inferenceservice.PingInferenceServiceResponse)
51
- })
52
- _sym_db.RegisterMessage(PingInferenceServiceResponse)
53
-
54
- _INFERENCESERVICE = DESCRIPTOR.services_by_name['InferenceService']
55
- if _descriptor._USE_C_DESCRIPTORS == False:
56
-
57
- DESCRIPTOR._options = None
58
- DESCRIPTOR._serialized_options = b'Z-luminarycloud.com/core/proto/inferenceservice'
59
- _CREATEINFERENCESERVICEJOBREQUEST._serialized_start=106
60
- _CREATEINFERENCESERVICEJOBREQUEST._serialized_end=281
61
- _CREATEINFERENCESERVICEJOBRESPONSE._serialized_start=283
62
- _CREATEINFERENCESERVICEJOBRESPONSE._serialized_end=336
63
- _PINGINFERENCESERVICEREQUEST._serialized_start=338
64
- _PINGINFERENCESERVICEREQUEST._serialized_end=380
65
- _PINGINFERENCESERVICERESPONSE._serialized_start=382
66
- _PINGINFERENCESERVICERESPONSE._serialized_end=425
67
- _INFERENCESERVICE._serialized_start=428
68
- _INFERENCESERVICE._serialized_end=761
69
- # @@protoc_insertion_point(module_scope)
@@ -1,27 +0,0 @@
1
- from dataclasses import fields, is_dataclass
2
- from typing import TYPE_CHECKING
3
-
4
- if TYPE_CHECKING:
5
- from luminarycloud.pipelines.parameters import PipelineParameter
6
-
7
-
8
- class PipelineDictable:
9
- """
10
- A mixin for dataclasses that can contain PipelineParameters and/or other PipelineDictables
11
- (i.e. it's recursive). Used to construct a dictionary that can be serialized to YAML for a
12
- Pipeline definition, and collects all PipelineParameters encountered along the way.
13
- """
14
-
15
- def _to_pipeline_dict(self) -> tuple[dict, list["PipelineParameter"]]:
16
- if not is_dataclass(self):
17
- raise ValueError("PipelineDictable can only be used on dataclasses")
18
- result = {}
19
- params = []
20
- for field in fields(self):
21
- value = getattr(self, field.name)
22
- if hasattr(value, "_to_pipeline_dict"):
23
- result[field.name], downstream_params = value._to_pipeline_dict()
24
- params.extend(downstream_params)
25
- else:
26
- result[field.name] = value
27
- return result, params