UncountablePythonSDK 0.0.125__py3-none-any.whl → 0.0.127__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of UncountablePythonSDK might be problematic. Click here for more details.
- examples/integration-server/jobs/materials_auto/example_instrument.py +67 -38
- examples/integration-server/jobs/materials_auto/example_predictions.py +61 -0
- examples/integration-server/jobs/materials_auto/profile.yaml +9 -0
- examples/integration-server/pyproject.toml +3 -3
- pkgs/type_spec/builder.py +19 -9
- pkgs/type_spec/emit_typescript.py +2 -2
- pkgs/type_spec/type_info/emit_type_info.py +14 -1
- pkgs/type_spec/value_spec/__main__.py +2 -2
- uncountable/integration/cli.py +29 -1
- uncountable/integration/executors/executors.py +1 -2
- uncountable/integration/executors/generic_upload_executor.py +1 -1
- uncountable/integration/job.py +1 -0
- uncountable/integration/queue_runner/command_server/__init__.py +4 -0
- uncountable/integration/queue_runner/command_server/command_client.py +39 -0
- uncountable/integration/queue_runner/command_server/command_server.py +37 -0
- uncountable/integration/queue_runner/command_server/protocol/command_server.proto +18 -0
- uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.py +21 -13
- uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.pyi +28 -1
- uncountable/integration/queue_runner/command_server/protocol/command_server_pb2_grpc.py +90 -0
- uncountable/integration/queue_runner/command_server/types.py +24 -1
- uncountable/integration/queue_runner/datastore/datastore_sqlite.py +107 -8
- uncountable/integration/queue_runner/datastore/model.py +8 -1
- uncountable/integration/queue_runner/job_scheduler.py +42 -2
- uncountable/integration/queue_runner/worker.py +1 -1
- uncountable/integration/server.py +36 -6
- uncountable/types/__init__.py +8 -0
- uncountable/types/api/integrations/register_sockets_token.py +41 -0
- uncountable/types/api/recipes/edit_recipe_inputs.py +1 -1
- uncountable/types/api/recipes/get_recipe_output_metadata.py +2 -2
- uncountable/types/api/recipes/get_recipes_data.py +16 -0
- uncountable/types/api/recipes/lock_recipes.py +2 -1
- uncountable/types/api/recipes/set_recipe_total.py +59 -0
- uncountable/types/api/recipes/unlock_recipes.py +2 -1
- uncountable/types/api/uploader/complete_async_parse.py +46 -0
- uncountable/types/async_batch_processor.py +124 -0
- uncountable/types/async_batch_t.py +2 -0
- uncountable/types/client_base.py +76 -0
- uncountable/types/entity_t.py +1 -1
- uncountable/types/queued_job.py +1 -0
- uncountable/types/queued_job_t.py +9 -0
- uncountable/types/sockets.py +9 -0
- uncountable/types/sockets_t.py +99 -0
- uncountable/types/uploader.py +24 -0
- uncountable/types/uploader_t.py +222 -0
- {uncountablepythonsdk-0.0.125.dist-info → uncountablepythonsdk-0.0.127.dist-info}/METADATA +1 -1
- {uncountablepythonsdk-0.0.125.dist-info → uncountablepythonsdk-0.0.127.dist-info}/RECORD +48 -42
- {uncountablepythonsdk-0.0.125.dist-info → uncountablepythonsdk-0.0.127.dist-info}/WHEEL +0 -0
- {uncountablepythonsdk-0.0.125.dist-info → uncountablepythonsdk-0.0.127.dist-info}/top_level.txt +0 -0
uncountable/types/client_base.py
CHANGED
|
@@ -17,6 +17,7 @@ import uncountable.types.api.recipes.associate_recipe_as_lot as associate_recipe
|
|
|
17
17
|
from uncountable.types import async_batch_t
|
|
18
18
|
from uncountable.types import base_t
|
|
19
19
|
import uncountable.types.api.recipes.clear_recipe_outputs as clear_recipe_outputs_t
|
|
20
|
+
import uncountable.types.api.uploader.complete_async_parse as complete_async_parse_t
|
|
20
21
|
import uncountable.types.api.runsheet.complete_async_upload as complete_async_upload_t
|
|
21
22
|
import uncountable.types.api.chemical.convert_chemical_formats as convert_chemical_formats_t
|
|
22
23
|
import uncountable.types.api.entity.create_entities as create_entities_t
|
|
@@ -72,6 +73,7 @@ from uncountable.types import recipe_identifiers_t
|
|
|
72
73
|
from uncountable.types import recipe_links_t
|
|
73
74
|
from uncountable.types import recipe_metadata_t
|
|
74
75
|
from uncountable.types import recipe_workflow_steps_t
|
|
76
|
+
import uncountable.types.api.integrations.register_sockets_token as register_sockets_token_t
|
|
75
77
|
import uncountable.types.api.recipes.remove_recipe_from_project as remove_recipe_from_project_t
|
|
76
78
|
import uncountable.types.api.recipe_links.remove_recipe_link as remove_recipe_link_t
|
|
77
79
|
import uncountable.types.api.entity.resolve_entity_ids as resolve_entity_ids_t
|
|
@@ -89,12 +91,15 @@ import uncountable.types.api.recipes.set_recipe_output_annotations as set_recipe
|
|
|
89
91
|
import uncountable.types.api.recipes.set_recipe_output_file as set_recipe_output_file_t
|
|
90
92
|
import uncountable.types.api.recipes.set_recipe_outputs as set_recipe_outputs_t
|
|
91
93
|
import uncountable.types.api.recipes.set_recipe_tags as set_recipe_tags_t
|
|
94
|
+
import uncountable.types.api.recipes.set_recipe_total as set_recipe_total_t
|
|
92
95
|
import uncountable.types.api.entity.set_values as set_values_t
|
|
96
|
+
from uncountable.types import sockets_t
|
|
93
97
|
import uncountable.types.api.entity.transition_entity_phase as transition_entity_phase_t
|
|
94
98
|
import uncountable.types.api.recipes.unarchive_recipes as unarchive_recipes_t
|
|
95
99
|
import uncountable.types.api.entity.unlock_entity as unlock_entity_t
|
|
96
100
|
import uncountable.types.api.recipes.unlock_recipes as unlock_recipes_t
|
|
97
101
|
import uncountable.types.api.material_families.update_entity_material_families as update_entity_material_families_t
|
|
102
|
+
from uncountable.types import uploader_t
|
|
98
103
|
import uncountable.types.api.condition_parameters.upsert_condition_match as upsert_condition_match_t
|
|
99
104
|
import uncountable.types.api.field_options.upsert_field_options as upsert_field_options_t
|
|
100
105
|
from abc import ABC, abstractmethod
|
|
@@ -273,6 +278,28 @@ class ClientMethods(ABC):
|
|
|
273
278
|
)
|
|
274
279
|
return self.do_request(api_request=api_request, return_type=clear_recipe_outputs_t.Data)
|
|
275
280
|
|
|
281
|
+
def complete_async_parse(
|
|
282
|
+
self,
|
|
283
|
+
*,
|
|
284
|
+
parsed_file_data: list[uploader_t.ParsedFileData],
|
|
285
|
+
async_job_key: identifier_t.IdentifierKey,
|
|
286
|
+
upload_destination: generic_upload_t.UploadDestinationRecipe,
|
|
287
|
+
) -> complete_async_parse_t.Data:
|
|
288
|
+
"""Parses uploaded files asynchronously
|
|
289
|
+
|
|
290
|
+
"""
|
|
291
|
+
args = complete_async_parse_t.Arguments(
|
|
292
|
+
parsed_file_data=parsed_file_data,
|
|
293
|
+
async_job_key=async_job_key,
|
|
294
|
+
upload_destination=upload_destination,
|
|
295
|
+
)
|
|
296
|
+
api_request = APIRequest(
|
|
297
|
+
method=complete_async_parse_t.ENDPOINT_METHOD,
|
|
298
|
+
endpoint=complete_async_parse_t.ENDPOINT_PATH,
|
|
299
|
+
args=args,
|
|
300
|
+
)
|
|
301
|
+
return self.do_request(api_request=api_request, return_type=complete_async_parse_t.Data)
|
|
302
|
+
|
|
276
303
|
def complete_async_upload(
|
|
277
304
|
self,
|
|
278
305
|
*,
|
|
@@ -1027,6 +1054,7 @@ class ClientMethods(ABC):
|
|
|
1027
1054
|
project_id: base_t.ObjectId | None = None,
|
|
1028
1055
|
offset: int | None = None,
|
|
1029
1056
|
limit: int | None = None,
|
|
1057
|
+
empty_output_behavior: get_recipes_data_t.RecipeOutputInclusion | None = None,
|
|
1030
1058
|
) -> get_recipes_data_t.Data:
|
|
1031
1059
|
"""Gets all data associated with a set of recipes. Because Uncountables recipe structure is complex, various data values are exploded out to increase efficiency in parsing, and this page is paginated to prevent too large of return values
|
|
1032
1060
|
|
|
@@ -1034,12 +1062,14 @@ class ClientMethods(ABC):
|
|
|
1034
1062
|
:param project_id: The projects to get the data from. Either these or recipe_ids must be filled in
|
|
1035
1063
|
:param offset: Used for pagination. All pagination is done in order of Recipe ID. [Pagination More Info](#pagination)
|
|
1036
1064
|
:param limit: The number of data points to return. If not filled in, it will be set to 100, and cannot be set higher than 100. [Pagination More Info](#pagination)
|
|
1065
|
+
:param empty_output_behavior: Recipe output inclusion behavior for empty outputs. Empty recipe outputs are outputs added to the experiment that do not have a value associated with them.
|
|
1037
1066
|
"""
|
|
1038
1067
|
args = get_recipes_data_t.Arguments(
|
|
1039
1068
|
recipe_ids=recipe_ids,
|
|
1040
1069
|
project_id=project_id,
|
|
1041
1070
|
offset=offset,
|
|
1042
1071
|
limit=limit,
|
|
1072
|
+
empty_output_behavior=empty_output_behavior,
|
|
1043
1073
|
)
|
|
1044
1074
|
api_request = APIRequest(
|
|
1045
1075
|
method=get_recipes_data_t.ENDPOINT_METHOD,
|
|
@@ -1286,6 +1316,24 @@ class ClientMethods(ABC):
|
|
|
1286
1316
|
)
|
|
1287
1317
|
return self.do_request(api_request=api_request, return_type=push_notification_t.Data)
|
|
1288
1318
|
|
|
1319
|
+
def register_sockets_token(
|
|
1320
|
+
self,
|
|
1321
|
+
*,
|
|
1322
|
+
socket_request: sockets_t.SocketTokenRequest,
|
|
1323
|
+
) -> register_sockets_token_t.Data:
|
|
1324
|
+
"""Request token for connecting to sockets server
|
|
1325
|
+
|
|
1326
|
+
"""
|
|
1327
|
+
args = register_sockets_token_t.Arguments(
|
|
1328
|
+
socket_request=socket_request,
|
|
1329
|
+
)
|
|
1330
|
+
api_request = APIRequest(
|
|
1331
|
+
method=register_sockets_token_t.ENDPOINT_METHOD,
|
|
1332
|
+
endpoint=register_sockets_token_t.ENDPOINT_PATH,
|
|
1333
|
+
args=args,
|
|
1334
|
+
)
|
|
1335
|
+
return self.do_request(api_request=api_request, return_type=register_sockets_token_t.Data)
|
|
1336
|
+
|
|
1289
1337
|
def remove_recipe_from_project(
|
|
1290
1338
|
self,
|
|
1291
1339
|
*,
|
|
@@ -1656,6 +1704,34 @@ class ClientMethods(ABC):
|
|
|
1656
1704
|
)
|
|
1657
1705
|
return self.do_request(api_request=api_request, return_type=set_recipe_tags_t.Data)
|
|
1658
1706
|
|
|
1707
|
+
def set_recipe_total(
|
|
1708
|
+
self,
|
|
1709
|
+
*,
|
|
1710
|
+
recipe_key: identifier_t.IdentifierKey,
|
|
1711
|
+
value: set_recipe_total_t.ValueNumeric,
|
|
1712
|
+
recipe_workflow_step_identifier: recipe_workflow_steps_t.RecipeWorkflowStepIdentifier | None = None,
|
|
1713
|
+
calculation_key: identifier_t.IdentifierKey | None = None,
|
|
1714
|
+
) -> set_recipe_total_t.Data:
|
|
1715
|
+
"""Updates the Set Total value for a recipe or one of its workflow steps
|
|
1716
|
+
|
|
1717
|
+
:param recipe_key: Identifier for the recipe
|
|
1718
|
+
:param recipe_workflow_step_identifier: Identifier for the recipe workflow step
|
|
1719
|
+
:param value: The quantity in mass, volume, or moles to set for the total
|
|
1720
|
+
:param calculation_key: The linked basis calculation to set
|
|
1721
|
+
"""
|
|
1722
|
+
args = set_recipe_total_t.Arguments(
|
|
1723
|
+
recipe_key=recipe_key,
|
|
1724
|
+
recipe_workflow_step_identifier=recipe_workflow_step_identifier,
|
|
1725
|
+
value=value,
|
|
1726
|
+
calculation_key=calculation_key,
|
|
1727
|
+
)
|
|
1728
|
+
api_request = APIRequest(
|
|
1729
|
+
method=set_recipe_total_t.ENDPOINT_METHOD,
|
|
1730
|
+
endpoint=set_recipe_total_t.ENDPOINT_PATH,
|
|
1731
|
+
args=args,
|
|
1732
|
+
)
|
|
1733
|
+
return self.do_request(api_request=api_request, return_type=set_recipe_total_t.Data)
|
|
1734
|
+
|
|
1659
1735
|
def set_values(
|
|
1660
1736
|
self,
|
|
1661
1737
|
*,
|
uncountable/types/entity_t.py
CHANGED
|
@@ -398,7 +398,7 @@ class EntityType(StrEnum):
|
|
|
398
398
|
|
|
399
399
|
# DO NOT MODIFY -- This file is generated by type_spec
|
|
400
400
|
LimitedEntityType = typing.Annotated[
|
|
401
|
-
typing.Literal[EntityType.LAB_REQUEST] | typing.Literal[EntityType.APPROVAL] | typing.Literal[EntityType.CUSTOM_ENTITY] | typing.Literal[EntityType.INGREDIENT_ATTRIBUTE] | typing.Literal[EntityType.INVENTORY_AMOUNT] | typing.Literal[EntityType.TASK] | typing.Literal[EntityType.PROJECT] | typing.Literal[EntityType.EQUIPMENT] | typing.Literal[EntityType.INV_LOCAL_LOCATIONS] | typing.Literal[EntityType.FIELD_OPTION_SET] | typing.Literal[EntityType.WEBHOOK] | typing.Literal[EntityType.SPECS] | typing.Literal[EntityType.GOAL] | typing.Literal[EntityType.INGREDIENT_TAG_MAP] | typing.Literal[EntityType.INGREDIENT_TAG] | typing.Literal[EntityType.CONDITION_PARAMETER] | typing.Literal[EntityType.OUTPUT] | typing.Literal[EntityType.OUTPUT_CONDITION_PARAMETER] | typing.Literal[EntityType.ASYNC_JOB] | typing.Literal[EntityType.CONSTRAINT] | typing.Literal[EntityType.INGREDIENT_CATEGORY_ALL] | typing.Literal[EntityType.TIME_SERIES_SEGMENT] | typing.Literal[EntityType.EQUIPMENT_MAINTENANCE] | typing.Literal[EntityType.MAINTENANCE_SCHEDULE] | typing.Literal[EntityType.CONDITION_PARAMETER_RULE] | typing.Literal[EntityType.INGREDIENT] | typing.Literal[EntityType.TIMESHEET_ENTRY] | typing.Literal[EntityType.SAVE] | typing.Literal[EntityType.RECIPE_CHECK],
|
|
401
|
+
typing.Literal[EntityType.LAB_REQUEST] | typing.Literal[EntityType.APPROVAL] | typing.Literal[EntityType.CUSTOM_ENTITY] | typing.Literal[EntityType.INGREDIENT_ATTRIBUTE] | typing.Literal[EntityType.INVENTORY_AMOUNT] | typing.Literal[EntityType.TASK] | typing.Literal[EntityType.PROJECT] | typing.Literal[EntityType.EQUIPMENT] | typing.Literal[EntityType.INV_LOCAL_LOCATIONS] | typing.Literal[EntityType.FIELD_OPTION_SET] | typing.Literal[EntityType.WEBHOOK] | typing.Literal[EntityType.SPECS] | typing.Literal[EntityType.GOAL] | typing.Literal[EntityType.INGREDIENT_TAG_MAP] | typing.Literal[EntityType.INGREDIENT_TAG] | typing.Literal[EntityType.CONDITION_PARAMETER] | typing.Literal[EntityType.OUTPUT] | typing.Literal[EntityType.OUTPUT_CONDITION_PARAMETER] | typing.Literal[EntityType.ASYNC_JOB] | typing.Literal[EntityType.CONSTRAINT] | typing.Literal[EntityType.INGREDIENT_CATEGORY_ALL] | typing.Literal[EntityType.TIME_SERIES_SEGMENT] | typing.Literal[EntityType.EQUIPMENT_MAINTENANCE] | typing.Literal[EntityType.MAINTENANCE_SCHEDULE] | typing.Literal[EntityType.CONDITION_PARAMETER_RULE] | typing.Literal[EntityType.INGREDIENT] | typing.Literal[EntityType.TIMESHEET_ENTRY] | typing.Literal[EntityType.SAVE] | typing.Literal[EntityType.RECIPE_CHECK] | typing.Literal[EntityType.EXPERIMENT_GROUP_MEMBER],
|
|
402
402
|
serial_alias_annotation(
|
|
403
403
|
named_type_path="sdk.entity.LimitedEntityType",
|
|
404
404
|
),
|
uncountable/types/queued_job.py
CHANGED
|
@@ -13,4 +13,5 @@ from .queued_job_t import QueuedJobPayload as QueuedJobPayload
|
|
|
13
13
|
from .queued_job_t import QueuedJobResult as QueuedJobResult
|
|
14
14
|
from .queued_job_t import QueuedJobMetadata as QueuedJobMetadata
|
|
15
15
|
from .queued_job_t import QueuedJob as QueuedJob
|
|
16
|
+
from .queued_job_t import JobStatus as JobStatus
|
|
16
17
|
# DO NOT MODIFY -- This file is generated by type_spec
|
|
@@ -20,6 +20,7 @@ __all__: list[str] = [
|
|
|
20
20
|
"InvocationContextManual",
|
|
21
21
|
"InvocationContextType",
|
|
22
22
|
"InvocationContextWebhook",
|
|
23
|
+
"JobStatus",
|
|
23
24
|
"QueuedJob",
|
|
24
25
|
"QueuedJobMetadata",
|
|
25
26
|
"QueuedJobPayload",
|
|
@@ -119,6 +120,7 @@ class QueuedJobMetadata:
|
|
|
119
120
|
job_ref_name: str
|
|
120
121
|
num_attempts: int
|
|
121
122
|
submitted_at: datetime.datetime
|
|
123
|
+
status: JobStatus | None = None
|
|
122
124
|
|
|
123
125
|
|
|
124
126
|
# DO NOT MODIFY -- This file is generated by type_spec
|
|
@@ -128,4 +130,11 @@ class QueuedJobMetadata:
|
|
|
128
130
|
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
129
131
|
class QueuedJob(QueuedJobMetadata):
|
|
130
132
|
payload: QueuedJobPayload
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
136
|
+
class JobStatus(StrEnum):
|
|
137
|
+
QUEUED = "queued"
|
|
138
|
+
FAILED = "failed"
|
|
139
|
+
SUCCESS = "success"
|
|
131
140
|
# DO NOT MODIFY -- This file is generated by type_spec
|
uncountable/types/sockets.py
CHANGED
|
@@ -8,4 +8,13 @@ from .sockets_t import SocketRequestType as SocketRequestType
|
|
|
8
8
|
from .sockets_t import SocketRequestIntegrationSession as SocketRequestIntegrationSession
|
|
9
9
|
from .sockets_t import SocketTokenRequest as SocketTokenRequest
|
|
10
10
|
from .sockets_t import SocketTokenResponse as SocketTokenResponse
|
|
11
|
+
from .sockets_t import SocketEventType as SocketEventType
|
|
12
|
+
from .sockets_t import BaseSocketEventData as BaseSocketEventData
|
|
13
|
+
from .sockets_t import UsersInRoomUpdatedEventData as UsersInRoomUpdatedEventData
|
|
14
|
+
from .sockets_t import SocketEventData as SocketEventData
|
|
15
|
+
from .sockets_t import SocketResponse as SocketResponse
|
|
16
|
+
from .sockets_t import SocketClientMessageType as SocketClientMessageType
|
|
17
|
+
from .sockets_t import JoinRoomWithTokenSocketClientMessage as JoinRoomWithTokenSocketClientMessage
|
|
18
|
+
from .sockets_t import SendInstrumentReadingClientMessage as SendInstrumentReadingClientMessage
|
|
19
|
+
from .sockets_t import SocketClientMessage as SocketClientMessage
|
|
11
20
|
# DO NOT MODIFY -- This file is generated by type_spec
|
uncountable/types/sockets_t.py
CHANGED
|
@@ -10,15 +10,25 @@ from enum import StrEnum
|
|
|
10
10
|
import dataclasses
|
|
11
11
|
from pkgs.serialization import serial_class
|
|
12
12
|
from pkgs.serialization import serial_union_annotation
|
|
13
|
+
from pkgs.serialization import serial_alias_annotation
|
|
13
14
|
from . import base_t
|
|
14
15
|
from . import integration_session_t
|
|
15
16
|
|
|
16
17
|
__all__: list[str] = [
|
|
18
|
+
"BaseSocketEventData",
|
|
19
|
+
"JoinRoomWithTokenSocketClientMessage",
|
|
20
|
+
"SendInstrumentReadingClientMessage",
|
|
21
|
+
"SocketClientMessage",
|
|
22
|
+
"SocketClientMessageType",
|
|
23
|
+
"SocketEventData",
|
|
24
|
+
"SocketEventType",
|
|
17
25
|
"SocketRequestBase",
|
|
18
26
|
"SocketRequestIntegrationSession",
|
|
19
27
|
"SocketRequestType",
|
|
28
|
+
"SocketResponse",
|
|
20
29
|
"SocketTokenRequest",
|
|
21
30
|
"SocketTokenResponse",
|
|
31
|
+
"UsersInRoomUpdatedEventData",
|
|
22
32
|
]
|
|
23
33
|
|
|
24
34
|
|
|
@@ -67,4 +77,93 @@ SocketTokenRequest = typing.Annotated[
|
|
|
67
77
|
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
68
78
|
class SocketTokenResponse:
|
|
69
79
|
token: str
|
|
80
|
+
room_key: str
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
84
|
+
class SocketEventType(StrEnum):
|
|
85
|
+
USERS_IN_ROOM_UPDATED = "users_in_room_updated"
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
89
|
+
@serial_class(
|
|
90
|
+
named_type_path="sdk.sockets.BaseSocketEventData",
|
|
91
|
+
)
|
|
92
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
93
|
+
class BaseSocketEventData:
|
|
94
|
+
pass
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
98
|
+
@serial_class(
|
|
99
|
+
named_type_path="sdk.sockets.UsersInRoomUpdatedEventData",
|
|
100
|
+
parse_require={"type"},
|
|
101
|
+
)
|
|
102
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
103
|
+
class UsersInRoomUpdatedEventData(BaseSocketEventData):
|
|
104
|
+
type: typing.Literal[SocketEventType.USERS_IN_ROOM_UPDATED] = SocketEventType.USERS_IN_ROOM_UPDATED
|
|
105
|
+
user_ids: list[base_t.ObjectId]
|
|
106
|
+
number_of_connections: int
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
110
|
+
SocketEventData = typing.Annotated[
|
|
111
|
+
typing.Union[UsersInRoomUpdatedEventData],
|
|
112
|
+
serial_alias_annotation(
|
|
113
|
+
named_type_path="sdk.sockets.SocketEventData",
|
|
114
|
+
),
|
|
115
|
+
]
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
119
|
+
@serial_class(
|
|
120
|
+
named_type_path="sdk.sockets.SocketResponse",
|
|
121
|
+
)
|
|
122
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
123
|
+
class SocketResponse:
|
|
124
|
+
data: SocketEventData
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
128
|
+
class SocketClientMessageType(StrEnum):
|
|
129
|
+
JOIN_ROOM_WITH_TOKEN = "join_room_with_token"
|
|
130
|
+
SEND_INSTRUMENT_READING = "send_instrument_reading"
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
134
|
+
@serial_class(
|
|
135
|
+
named_type_path="sdk.sockets.JoinRoomWithTokenSocketClientMessage",
|
|
136
|
+
parse_require={"type"},
|
|
137
|
+
)
|
|
138
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
139
|
+
class JoinRoomWithTokenSocketClientMessage:
|
|
140
|
+
type: typing.Literal[SocketClientMessageType.JOIN_ROOM_WITH_TOKEN] = SocketClientMessageType.JOIN_ROOM_WITH_TOKEN
|
|
141
|
+
token: str
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
145
|
+
@serial_class(
|
|
146
|
+
named_type_path="sdk.sockets.SendInstrumentReadingClientMessage",
|
|
147
|
+
to_string_values={"value"},
|
|
148
|
+
parse_require={"type"},
|
|
149
|
+
)
|
|
150
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
151
|
+
class SendInstrumentReadingClientMessage:
|
|
152
|
+
type: typing.Literal[SocketClientMessageType.SEND_INSTRUMENT_READING] = SocketClientMessageType.SEND_INSTRUMENT_READING
|
|
153
|
+
room_key: str
|
|
154
|
+
value: Decimal
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
158
|
+
SocketClientMessage = typing.Annotated[
|
|
159
|
+
JoinRoomWithTokenSocketClientMessage | SendInstrumentReadingClientMessage,
|
|
160
|
+
serial_union_annotation(
|
|
161
|
+
named_type_path="sdk.sockets.SocketClientMessage",
|
|
162
|
+
discriminator="type",
|
|
163
|
+
discriminator_map={
|
|
164
|
+
"join_room_with_token": JoinRoomWithTokenSocketClientMessage,
|
|
165
|
+
"send_instrument_reading": SendInstrumentReadingClientMessage,
|
|
166
|
+
},
|
|
167
|
+
),
|
|
168
|
+
]
|
|
70
169
|
# DO NOT MODIFY -- This file is generated by type_spec
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
# ruff: noqa: E402 Q003
|
|
2
|
+
# fmt: off
|
|
3
|
+
# isort: skip_file
|
|
4
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
5
|
+
# Kept only for SDK backwards compatibility
|
|
6
|
+
from .uploader_t import HeaderType as HeaderType
|
|
7
|
+
from .uploader_t import ChannelType as ChannelType
|
|
8
|
+
from .uploader_t import StructureElementType as StructureElementType
|
|
9
|
+
from .uploader_t import StructureElementBase as StructureElementBase
|
|
10
|
+
from .uploader_t import DecimalValue as DecimalValue
|
|
11
|
+
from .uploader_t import StringValue as StringValue
|
|
12
|
+
from .uploader_t import BaseData as BaseData
|
|
13
|
+
from .uploader_t import NumericHeaderData as NumericHeaderData
|
|
14
|
+
from .uploader_t import TextHeaderData as TextHeaderData
|
|
15
|
+
from .uploader_t import HeaderValue as HeaderValue
|
|
16
|
+
from .uploader_t import NumericChannelData as NumericChannelData
|
|
17
|
+
from .uploader_t import TimestampChannelData as TimestampChannelData
|
|
18
|
+
from .uploader_t import TextChannelData as TextChannelData
|
|
19
|
+
from .uploader_t import Channel as Channel
|
|
20
|
+
from .uploader_t import DataChannel as DataChannel
|
|
21
|
+
from .uploader_t import HeaderEntry as HeaderEntry
|
|
22
|
+
from .uploader_t import StructureElement as StructureElement
|
|
23
|
+
from .uploader_t import ParsedFileData as ParsedFileData
|
|
24
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
@@ -0,0 +1,222 @@
|
|
|
1
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
2
|
+
# ruff: noqa: E402 Q003
|
|
3
|
+
# fmt: off
|
|
4
|
+
# isort: skip_file
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
import typing # noqa: F401
|
|
7
|
+
import datetime # noqa: F401
|
|
8
|
+
from decimal import Decimal # noqa: F401
|
|
9
|
+
from enum import StrEnum
|
|
10
|
+
import dataclasses
|
|
11
|
+
from pkgs.serialization import serial_class
|
|
12
|
+
from pkgs.serialization import serial_union_annotation
|
|
13
|
+
from . import base_t
|
|
14
|
+
|
|
15
|
+
__all__: list[str] = [
|
|
16
|
+
"BaseData",
|
|
17
|
+
"Channel",
|
|
18
|
+
"ChannelType",
|
|
19
|
+
"DataChannel",
|
|
20
|
+
"DecimalValue",
|
|
21
|
+
"HeaderEntry",
|
|
22
|
+
"HeaderType",
|
|
23
|
+
"HeaderValue",
|
|
24
|
+
"NumericChannelData",
|
|
25
|
+
"NumericHeaderData",
|
|
26
|
+
"ParsedFileData",
|
|
27
|
+
"StringValue",
|
|
28
|
+
"StructureElement",
|
|
29
|
+
"StructureElementBase",
|
|
30
|
+
"StructureElementType",
|
|
31
|
+
"TextChannelData",
|
|
32
|
+
"TextHeaderData",
|
|
33
|
+
"TimestampChannelData",
|
|
34
|
+
]
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
38
|
+
class HeaderType(StrEnum):
|
|
39
|
+
NUMERIC_HEADER = "numeric_header"
|
|
40
|
+
TEXT_HEADER = "text_header"
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
44
|
+
class ChannelType(StrEnum):
|
|
45
|
+
NUMERIC_CHANNEL = "numeric_channel"
|
|
46
|
+
TIMESTAMP_CHANNEL = "timestamp_channel"
|
|
47
|
+
TEXT_CHANNEL = "text_channel"
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
51
|
+
class StructureElementType(StrEnum):
|
|
52
|
+
CHANNEL = "channel"
|
|
53
|
+
HEADER = "header"
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
57
|
+
@serial_class(
|
|
58
|
+
named_type_path="sdk.uploader.StructureElementBase",
|
|
59
|
+
)
|
|
60
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
61
|
+
class StructureElementBase:
|
|
62
|
+
type: StructureElementType
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
66
|
+
@serial_class(
|
|
67
|
+
named_type_path="sdk.uploader.DecimalValue",
|
|
68
|
+
to_string_values={"value"},
|
|
69
|
+
)
|
|
70
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
71
|
+
class DecimalValue:
|
|
72
|
+
value: Decimal
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
76
|
+
@serial_class(
|
|
77
|
+
named_type_path="sdk.uploader.StringValue",
|
|
78
|
+
)
|
|
79
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
80
|
+
class StringValue:
|
|
81
|
+
value: str
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
85
|
+
@serial_class(
|
|
86
|
+
named_type_path="sdk.uploader.BaseData",
|
|
87
|
+
)
|
|
88
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
89
|
+
class BaseData:
|
|
90
|
+
name: str
|
|
91
|
+
type: str
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
95
|
+
@serial_class(
|
|
96
|
+
named_type_path="sdk.uploader.NumericHeaderData",
|
|
97
|
+
parse_require={"type"},
|
|
98
|
+
)
|
|
99
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
100
|
+
class NumericHeaderData(BaseData):
|
|
101
|
+
type: typing.Literal[HeaderType.NUMERIC_HEADER] = HeaderType.NUMERIC_HEADER
|
|
102
|
+
data: DecimalValue | None
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
106
|
+
@serial_class(
|
|
107
|
+
named_type_path="sdk.uploader.TextHeaderData",
|
|
108
|
+
parse_require={"type"},
|
|
109
|
+
)
|
|
110
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
111
|
+
class TextHeaderData(BaseData):
|
|
112
|
+
type: typing.Literal[HeaderType.TEXT_HEADER] = HeaderType.TEXT_HEADER
|
|
113
|
+
data: StringValue | None
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
117
|
+
HeaderValue = typing.Annotated[
|
|
118
|
+
NumericHeaderData | TextHeaderData,
|
|
119
|
+
serial_union_annotation(
|
|
120
|
+
named_type_path="sdk.uploader.HeaderValue",
|
|
121
|
+
discriminator="type",
|
|
122
|
+
discriminator_map={
|
|
123
|
+
"numeric_header": NumericHeaderData,
|
|
124
|
+
"text_header": TextHeaderData,
|
|
125
|
+
},
|
|
126
|
+
),
|
|
127
|
+
]
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
131
|
+
@serial_class(
|
|
132
|
+
named_type_path="sdk.uploader.NumericChannelData",
|
|
133
|
+
parse_require={"type"},
|
|
134
|
+
)
|
|
135
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
136
|
+
class NumericChannelData(BaseData):
|
|
137
|
+
type: typing.Literal[ChannelType.NUMERIC_CHANNEL] = ChannelType.NUMERIC_CHANNEL
|
|
138
|
+
data: list[DecimalValue]
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
142
|
+
@serial_class(
|
|
143
|
+
named_type_path="sdk.uploader.TimestampChannelData",
|
|
144
|
+
parse_require={"type"},
|
|
145
|
+
)
|
|
146
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
147
|
+
class TimestampChannelData(BaseData):
|
|
148
|
+
type: typing.Literal[ChannelType.TIMESTAMP_CHANNEL] = ChannelType.TIMESTAMP_CHANNEL
|
|
149
|
+
data: list[DecimalValue]
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
153
|
+
@serial_class(
|
|
154
|
+
named_type_path="sdk.uploader.TextChannelData",
|
|
155
|
+
parse_require={"type"},
|
|
156
|
+
)
|
|
157
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
158
|
+
class TextChannelData(BaseData):
|
|
159
|
+
type: typing.Literal[ChannelType.TEXT_CHANNEL] = ChannelType.TEXT_CHANNEL
|
|
160
|
+
data: list[StringValue]
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
164
|
+
Channel = typing.Annotated[
|
|
165
|
+
TextChannelData | TimestampChannelData | NumericChannelData,
|
|
166
|
+
serial_union_annotation(
|
|
167
|
+
named_type_path="sdk.uploader.Channel",
|
|
168
|
+
discriminator="type",
|
|
169
|
+
discriminator_map={
|
|
170
|
+
"text_channel": TextChannelData,
|
|
171
|
+
"timestamp_channel": TimestampChannelData,
|
|
172
|
+
"numeric_channel": NumericChannelData,
|
|
173
|
+
},
|
|
174
|
+
),
|
|
175
|
+
]
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
179
|
+
@serial_class(
|
|
180
|
+
named_type_path="sdk.uploader.DataChannel",
|
|
181
|
+
parse_require={"type"},
|
|
182
|
+
)
|
|
183
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
184
|
+
class DataChannel(StructureElementBase):
|
|
185
|
+
type: typing.Literal[StructureElementType.CHANNEL] = StructureElementType.CHANNEL
|
|
186
|
+
channel: Channel
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
190
|
+
@serial_class(
|
|
191
|
+
named_type_path="sdk.uploader.HeaderEntry",
|
|
192
|
+
parse_require={"type"},
|
|
193
|
+
)
|
|
194
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
195
|
+
class HeaderEntry(StructureElementBase):
|
|
196
|
+
type: typing.Literal[StructureElementType.HEADER] = StructureElementType.HEADER
|
|
197
|
+
value: HeaderValue
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
201
|
+
StructureElement = typing.Annotated[
|
|
202
|
+
DataChannel | HeaderEntry,
|
|
203
|
+
serial_union_annotation(
|
|
204
|
+
named_type_path="sdk.uploader.StructureElement",
|
|
205
|
+
discriminator="type",
|
|
206
|
+
discriminator_map={
|
|
207
|
+
"channel": DataChannel,
|
|
208
|
+
"header": HeaderEntry,
|
|
209
|
+
},
|
|
210
|
+
),
|
|
211
|
+
]
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
215
|
+
@serial_class(
|
|
216
|
+
named_type_path="sdk.uploader.ParsedFileData",
|
|
217
|
+
)
|
|
218
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
219
|
+
class ParsedFileData:
|
|
220
|
+
file_name: str
|
|
221
|
+
file_structures: list[StructureElement]
|
|
222
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: UncountablePythonSDK
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.127
|
|
4
4
|
Summary: Uncountable SDK
|
|
5
5
|
Project-URL: Homepage, https://github.com/uncountableinc/uncountable-python-sdk
|
|
6
6
|
Project-URL: Repository, https://github.com/uncountableinc/uncountable-python-sdk.git
|