UncountablePythonSDK 0.0.129__py3-none-any.whl → 0.0.130__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of UncountablePythonSDK might be problematic. Click here for more details.

docs/requirements.txt CHANGED
@@ -1,4 +1,4 @@
1
- furo==2025.7.19
1
+ furo==2025.9.25
2
2
  myst-parser==4.0.1
3
3
  sphinx-autoapi==3.6.0
4
4
  sphinx-copybutton==0.5.2
@@ -23,9 +23,40 @@ class ParseExample(WebhookJob[ParsePayload]):
23
23
  def run(
24
24
  self, args: JobArguments, payload: ParsePayload
25
25
  ) -> job_definition_t.JobResult:
26
+ user_id: base_t.ObjectId | None = None
27
+ recipe_id: base_t.ObjectId | None = None
28
+ file_name: str | None = None
29
+ data = args.client.get_entities_data(
30
+ entity_ids=[payload.async_job_id], entity_type=entity_t.EntityType.ASYNC_JOB
31
+ )
32
+ for field_value in data.entity_details[0].field_values:
33
+ if field_value.field_ref_name == "core_async_job_jobData":
34
+ assert isinstance(field_value.value, dict)
35
+ assert isinstance(field_value.value["user_id"], int)
36
+ user_id = field_value.value["user_id"]
37
+ elif (
38
+ field_value.field_ref_name
39
+ == "unc_async_job_custom_parser_recipe_ids_in_view"
40
+ ):
41
+ if field_value.value is None:
42
+ continue
43
+ assert isinstance(field_value.value, list)
44
+ if len(field_value.value) > 0:
45
+ assert isinstance(field_value.value[0], int)
46
+ recipe_id = field_value.value[0]
47
+ elif field_value.field_ref_name == "unc_async_job_custom_parser_input_file":
48
+ assert isinstance(field_value.value, list)
49
+ assert len(field_value.value) == 1
50
+ assert isinstance(field_value.value[0], dict)
51
+ assert isinstance(field_value.value[0]["name"], str)
52
+ file_name = field_value.value[0]["name"]
53
+
54
+ assert user_id is not None
55
+ assert file_name is not None
56
+
26
57
  dummy_parsed_file_data: list[uploader_t.ParsedFileData] = [
27
58
  uploader_t.ParsedFileData(
28
- file_name="my_file_to_upload.xlsx",
59
+ file_name=file_name,
29
60
  file_structures=[
30
61
  uploader_t.DataChannel(
31
62
  type=uploader_t.StructureElementType.CHANNEL,
@@ -82,32 +113,11 @@ class ParseExample(WebhookJob[ParsePayload]):
82
113
  )
83
114
  ]
84
115
 
85
- user_id: base_t.ObjectId | None = None
86
- recipe_id: base_t.ObjectId | None = None
87
- data = args.client.get_entities_data(
88
- entity_ids=[payload.async_job_id], entity_type=entity_t.EntityType.ASYNC_JOB
89
- )
90
- for field_value in data.entity_details[0].field_values:
91
- if field_value.field_ref_name == "core_async_job_jobData":
92
- assert isinstance(field_value.value, dict)
93
- assert isinstance(field_value.value["user_id"], int)
94
- user_id = field_value.value["user_id"]
95
- if (
96
- field_value.field_ref_name
97
- == "unc_async_job_custom_parser_recipe_ids_in_view"
98
- ):
99
- assert isinstance(field_value.value, list)
100
- assert isinstance(field_value.value[0], int)
101
- recipe_id = field_value.value[0]
102
-
103
- assert user_id is not None
104
- assert recipe_id is not None
105
-
106
116
  complete_async_parse_req = args.batch_processor.complete_async_parse(
107
117
  parsed_file_data=dummy_parsed_file_data,
108
118
  async_job_key=identifier_t.IdentifierKeyId(id=payload.async_job_id),
109
119
  upload_destination=generic_upload_t.UploadDestinationRecipe(
110
- recipe_key=identifier_t.IdentifierKeyId(id=recipe_id)
120
+ recipe_key=identifier_t.IdentifierKeyId(id=recipe_id or 1)
111
121
  ),
112
122
  )
113
123
 
@@ -2,7 +2,12 @@ from io import BytesIO
2
2
 
3
3
  from uncountable.core.file_upload import DataFileUpload, FileUpload
4
4
  from uncountable.integration.job import JobArguments, RunsheetWebhookJob, register_job
5
- from uncountable.types import webhook_job_t
5
+ from uncountable.types import (
6
+ download_file_t,
7
+ entity_t,
8
+ identifier_t,
9
+ webhook_job_t,
10
+ )
6
11
 
7
12
 
8
13
  @register_job
@@ -13,24 +18,22 @@ class StandardRunsheetGenerator(RunsheetWebhookJob):
13
18
  args: JobArguments,
14
19
  payload: webhook_job_t.RunsheetWebhookPayload,
15
20
  ) -> FileUpload:
16
- entities = payload.entities
17
- args.logger.log_info(f"Generating runsheet for {len(entities)} entities")
18
-
19
- content = []
20
- content.append("STANDARD LAB RUNSHEET\n")
21
- content.append("=" * 30 + "\n\n")
22
-
23
- for entity in entities:
24
- content.append(f"Type: {entity.type}\n")
25
- content.append(f"ID: {entity.id}\n")
26
-
27
- if hasattr(entity, "field_values") and entity.field_values:
28
- content.append("Field Values:\n")
29
- for field in entity.field_values:
30
- content.append(f" - {field.name}: {field.value}\n")
31
-
32
- content.append("\n")
33
-
34
- runsheet_data = "".join(content).encode("utf-8")
35
-
36
- return DataFileUpload(name="lab_runsheet.txt", data=BytesIO(runsheet_data))
21
+ args.logger.log_info("Retrieving pre-exported runsheet file from async job")
22
+
23
+ file_query = download_file_t.FileDownloadQueryEntityField(
24
+ entity=entity_t.EntityIdentifier(
25
+ type=entity_t.EntityType.ASYNC_JOB,
26
+ identifier_key=identifier_t.IdentifierKeyId(id=payload.async_job_id),
27
+ ),
28
+ field_key=identifier_t.IdentifierKeyRefName(
29
+ ref_name="unc_async_job_export_runsheet_recipe_export"
30
+ ),
31
+ )
32
+
33
+ downloaded_files = args.client.download_files(file_query=file_query)
34
+
35
+ file_data = downloaded_files[0].data.read()
36
+ return DataFileUpload(
37
+ data=BytesIO(file_data),
38
+ name=downloaded_files[0].name,
39
+ )
@@ -168,6 +168,23 @@ def _invoke_membership_parser(
168
168
  raise ValueError(f"Expected value from {expected_values} but got value {value}")
169
169
 
170
170
 
171
+ # Uses `is` to compare
172
+ def _build_identity_parser(
173
+ identity_value: T,
174
+ ) -> ParserFunction[T]:
175
+ def parse(value: typing.Any) -> T:
176
+ if value is identity_value:
177
+ return identity_value
178
+ raise ValueError(
179
+ f"Expected value {identity_value} (type: {type(identity_value)}) but got value {value} (type: {type(value)})"
180
+ )
181
+
182
+ return parse
183
+
184
+
185
+ NONE_IDENTITY_PARSER = _build_identity_parser(None)
186
+
187
+
171
188
  def _build_parser_discriminated_union(
172
189
  context: ParserContext,
173
190
  discriminator_raw: str,
@@ -243,8 +260,10 @@ def _build_parser_inner(
243
260
  for field_name, field_parser in field_parsers
244
261
  })
245
262
 
263
+ # IMPROVE: unclear why we need == here
246
264
  if parsed_type == type(None): # noqa: E721
247
- return lambda value: _invoke_membership_parser({None}, value)
265
+ # Need to convince type checker that parsed_type is type(None)
266
+ return typing.cast(ParserFunction[T], NONE_IDENTITY_PARSER)
248
267
 
249
268
  origin = typing.get_origin(parsed_type)
250
269
  if origin is tuple:
@@ -29,3 +29,6 @@ export const IOJsonValue: IO.Type<JsonValue> = IO.recursion('JsonValue', () =>
29
29
  export interface nominal<T> {
30
30
  "nominal structural brand": T
31
31
  }
32
+
33
+ // Ids matching a strict integer number are converted to integers
34
+ export const ID_REGEX = /-?[1-9][0-9]{0,20}/
@@ -2,6 +2,7 @@
2
2
  # ruff: noqa: E402 Q003
3
3
  # fmt: off
4
4
  # isort: skip_file
5
+ from .api.notebooks import add_notebook_content as add_notebook_content_t
5
6
  from .api.recipes import add_recipe_to_project as add_recipe_to_project_t
6
7
  from .api.recipes import add_time_series_data as add_time_series_data_t
7
8
  from .api.recipes import archive_recipes as archive_recipes_t
@@ -73,6 +74,7 @@ from . import job_definition_t as job_definition_t
73
74
  from .api.entity import list_aggregate as list_aggregate_t
74
75
  from .api.entity import list_entities as list_entities_t
75
76
  from .api.id_source import list_id_source as list_id_source_t
77
+ from . import listing_t as listing_t
76
78
  from .api.entity import lock_entity as lock_entity_t
77
79
  from .api.recipes import lock_recipes as lock_recipes_t
78
80
  from .api.entity import lookup_entity as lookup_entity_t
@@ -117,6 +119,7 @@ from .api.recipes import set_recipe_tags as set_recipe_tags_t
117
119
  from .api.recipes import set_recipe_total as set_recipe_total_t
118
120
  from .api.entity import set_values as set_values_t
119
121
  from . import sockets_t as sockets_t
122
+ from . import structured_filters_t as structured_filters_t
120
123
  from .api.entity import transition_entity_phase as transition_entity_phase_t
121
124
  from .api.recipes import unarchive_recipes as unarchive_recipes_t
122
125
  from . import units_t as units_t
@@ -132,6 +135,7 @@ from . import workflows_t as workflows_t
132
135
 
133
136
 
134
137
  __all__: list[str] = [
138
+ "add_notebook_content_t",
135
139
  "add_recipe_to_project_t",
136
140
  "add_time_series_data_t",
137
141
  "archive_recipes_t",
@@ -203,6 +207,7 @@ __all__: list[str] = [
203
207
  "list_aggregate_t",
204
208
  "list_entities_t",
205
209
  "list_id_source_t",
210
+ "listing_t",
206
211
  "lock_entity_t",
207
212
  "lock_recipes_t",
208
213
  "lookup_entity_t",
@@ -247,6 +252,7 @@ __all__: list[str] = [
247
252
  "set_recipe_total_t",
248
253
  "set_values_t",
249
254
  "sockets_t",
255
+ "structured_filters_t",
250
256
  "transition_entity_phase_t",
251
257
  "unarchive_recipes_t",
252
258
  "units_t",
@@ -0,0 +1 @@
1
+ # DO NOT MODIFY -- This file is generated by type_spec
@@ -0,0 +1,119 @@
1
+ # DO NOT MODIFY -- This file is generated by type_spec
2
+ # ruff: noqa: E402 Q003
3
+ # fmt: off
4
+ # isort: skip_file
5
+ from __future__ import annotations
6
+ import typing # noqa: F401
7
+ import datetime # noqa: F401
8
+ from decimal import Decimal # noqa: F401
9
+ from enum import StrEnum
10
+ import dataclasses
11
+ from pkgs.serialization import serial_class
12
+ from pkgs.serialization import serial_union_annotation
13
+ from pkgs.serialization import serial_string_enum
14
+ from ... import base_t
15
+ from ... import identifier_t
16
+
17
+ __all__: list[str] = [
18
+ "Arguments",
19
+ "Data",
20
+ "ENDPOINT_METHOD",
21
+ "ENDPOINT_PATH",
22
+ "InputContent",
23
+ "InputContentTextMarkdown",
24
+ "InputContentType",
25
+ "Location",
26
+ "LocationAppendToFirstPage",
27
+ "LocationType",
28
+ ]
29
+
30
+ ENDPOINT_METHOD = "POST"
31
+ ENDPOINT_PATH = "api/external/notebooks/add_notebook_content"
32
+
33
+
34
+ # DO NOT MODIFY -- This file is generated by type_spec
35
+ @serial_string_enum(
36
+ labels={
37
+ "append_to_first_page": "Append To First Page",
38
+ },
39
+ )
40
+ class LocationType(StrEnum):
41
+ APPEND_TO_FIRST_PAGE = "append_to_first_page"
42
+
43
+
44
+ # DO NOT MODIFY -- This file is generated by type_spec
45
+ @serial_class(
46
+ named_type_path="sdk.api.notebooks.add_notebook_content.LocationAppendToFirstPage",
47
+ parse_require={"type"},
48
+ )
49
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
50
+ class LocationAppendToFirstPage:
51
+ type: typing.Literal[LocationType.APPEND_TO_FIRST_PAGE] = LocationType.APPEND_TO_FIRST_PAGE
52
+
53
+
54
+ # DO NOT MODIFY -- This file is generated by type_spec
55
+ Location = typing.Annotated[
56
+ typing.Union[LocationAppendToFirstPage],
57
+ serial_union_annotation(
58
+ named_type_path="sdk.api.notebooks.add_notebook_content.Location",
59
+ discriminator="type",
60
+ discriminator_map={
61
+ "append_to_first_page": LocationAppendToFirstPage,
62
+ },
63
+ ),
64
+ ]
65
+
66
+
67
+ # DO NOT MODIFY -- This file is generated by type_spec
68
+ @serial_string_enum(
69
+ labels={
70
+ "text_markdown": "Text Markdown",
71
+ },
72
+ )
73
+ class InputContentType(StrEnum):
74
+ TEXT_MARKDOWN = "text_markdown"
75
+
76
+
77
+ # DO NOT MODIFY -- This file is generated by type_spec
78
+ @serial_class(
79
+ named_type_path="sdk.api.notebooks.add_notebook_content.InputContentTextMarkdown",
80
+ parse_require={"type"},
81
+ )
82
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
83
+ class InputContentTextMarkdown:
84
+ type: typing.Literal[InputContentType.TEXT_MARKDOWN] = InputContentType.TEXT_MARKDOWN
85
+ text_markdown: str
86
+
87
+
88
+ # DO NOT MODIFY -- This file is generated by type_spec
89
+ InputContent = typing.Annotated[
90
+ typing.Union[InputContentTextMarkdown],
91
+ serial_union_annotation(
92
+ named_type_path="sdk.api.notebooks.add_notebook_content.InputContent",
93
+ discriminator="type",
94
+ discriminator_map={
95
+ "text_markdown": InputContentTextMarkdown,
96
+ },
97
+ ),
98
+ ]
99
+
100
+
101
+ # DO NOT MODIFY -- This file is generated by type_spec
102
+ @serial_class(
103
+ named_type_path="sdk.api.notebooks.add_notebook_content.Arguments",
104
+ )
105
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
106
+ class Arguments:
107
+ notebook_key: identifier_t.IdentifierKey
108
+ location: Location
109
+ contents: list[InputContent]
110
+
111
+
112
+ # DO NOT MODIFY -- This file is generated by type_spec
113
+ @serial_class(
114
+ named_type_path="sdk.api.notebooks.add_notebook_content.Data",
115
+ )
116
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
117
+ class Data:
118
+ pass
119
+ # DO NOT MODIFY -- This file is generated by type_spec
@@ -8,6 +8,7 @@ import typing # noqa: F401
8
8
  import datetime # noqa: F401
9
9
  from decimal import Decimal # noqa: F401
10
10
  from pkgs.serialization import OpaqueKey
11
+ import uncountable.types.api.notebooks.add_notebook_content as add_notebook_content_t
11
12
  import uncountable.types.api.recipes.add_recipe_to_project as add_recipe_to_project_t
12
13
  import uncountable.types.api.recipes.add_time_series_data as add_time_series_data_t
13
14
  import uncountable.types.api.recipes.archive_recipes as archive_recipes_t
@@ -124,6 +125,33 @@ class ClientMethods(ABC):
124
125
  def do_request(self, *, api_request: APIRequest, return_type: type[DT]) -> DT:
125
126
  ...
126
127
 
128
+ def add_notebook_content(
129
+ self,
130
+ *,
131
+ notebook_key: identifier_t.IdentifierKey,
132
+ location: add_notebook_content_t.Location,
133
+ contents: list[add_notebook_content_t.InputContent],
134
+ _request_options: client_config_t.RequestOptions | None = None,
135
+ ) -> add_notebook_content_t.Data:
136
+ """Adds content to an existing notebook
137
+
138
+ :param notebook_key: Key of the notebook to add the content to
139
+ :param location: The location to add the content to in the notebook
140
+ :param contents: List of input content to add to the notebook
141
+ """
142
+ args = add_notebook_content_t.Arguments(
143
+ notebook_key=notebook_key,
144
+ location=location,
145
+ contents=contents,
146
+ )
147
+ api_request = APIRequest(
148
+ method=add_notebook_content_t.ENDPOINT_METHOD,
149
+ endpoint=add_notebook_content_t.ENDPOINT_PATH,
150
+ args=args,
151
+ request_options=_request_options,
152
+ )
153
+ return self.do_request(api_request=api_request, return_type=add_notebook_content_t.Data)
154
+
127
155
  def add_recipe_to_project(
128
156
  self,
129
157
  *,
@@ -26,6 +26,8 @@ class IntegrationEnvironment(StrEnum):
26
26
  DEV = "dev"
27
27
  TEST = "test"
28
28
  PROD = "prod"
29
+ STG = "stg"
30
+ SBX = "sbx"
29
31
 
30
32
 
31
33
  # DO NOT MODIFY -- This file is generated by type_spec
@@ -0,0 +1,9 @@
1
+ # ruff: noqa: E402 Q003
2
+ # fmt: off
3
+ # isort: skip_file
4
+ # DO NOT MODIFY -- This file is generated by type_spec
5
+ # Kept only for SDK backwards compatibility
6
+ from .listing_t import ColumnType as ColumnType
7
+ from .listing_t import ColumnIdentifierEntityRefName as ColumnIdentifierEntityRefName
8
+ from .listing_t import ColumnIdentifier as ColumnIdentifier
9
+ # DO NOT MODIFY -- This file is generated by type_spec
@@ -0,0 +1,51 @@
1
+ # DO NOT MODIFY -- This file is generated by type_spec
2
+ # ruff: noqa: E402 Q003
3
+ # fmt: off
4
+ # isort: skip_file
5
+ from __future__ import annotations
6
+ import typing # noqa: F401
7
+ import datetime # noqa: F401
8
+ from decimal import Decimal # noqa: F401
9
+ from enum import StrEnum
10
+ import dataclasses
11
+ from pkgs.serialization import serial_class
12
+ from pkgs.serialization import serial_union_annotation
13
+ from . import base_t
14
+ from . import entity_t
15
+
16
+ __all__: list[str] = [
17
+ "ColumnIdentifier",
18
+ "ColumnIdentifierEntityRefName",
19
+ "ColumnType",
20
+ ]
21
+
22
+
23
+ # DO NOT MODIFY -- This file is generated by type_spec
24
+ class ColumnType(StrEnum):
25
+ REF_NAME = "ref_name"
26
+
27
+
28
+ # DO NOT MODIFY -- This file is generated by type_spec
29
+ @serial_class(
30
+ named_type_path="sdk.listing.ColumnIdentifierEntityRefName",
31
+ parse_require={"type"},
32
+ )
33
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True, frozen=True, eq=True) # type: ignore[literal-required]
34
+ class ColumnIdentifierEntityRefName:
35
+ type: typing.Literal[ColumnType.REF_NAME] = ColumnType.REF_NAME
36
+ entity_type: entity_t.EntityType
37
+ ref_name: str
38
+
39
+
40
+ # DO NOT MODIFY -- This file is generated by type_spec
41
+ ColumnIdentifier = typing.Annotated[
42
+ typing.Union[ColumnIdentifierEntityRefName],
43
+ serial_union_annotation(
44
+ named_type_path="sdk.listing.ColumnIdentifier",
45
+ discriminator="type",
46
+ discriminator_map={
47
+ "ref_name": ColumnIdentifierEntityRefName,
48
+ },
49
+ ),
50
+ ]
51
+ # DO NOT MODIFY -- This file is generated by type_spec
@@ -0,0 +1,21 @@
1
+ # ruff: noqa: E402 Q003
2
+ # fmt: off
3
+ # isort: skip_file
4
+ # DO NOT MODIFY -- This file is generated by type_spec
5
+ # Kept only for SDK backwards compatibility
6
+ from .structured_filters_t import FilterRelation as FilterRelation
7
+ from .structured_filters_t import FilterSpecBase as FilterSpecBase
8
+ from .structured_filters_t import FilterScalarType as FilterScalarType
9
+ from .structured_filters_t import FilterIdType as FilterIdType
10
+ from .structured_filters_t import StringFilterValue as StringFilterValue
11
+ from .structured_filters_t import FilterSpecEquals as FilterSpecEquals
12
+ from .structured_filters_t import FilterSpecInclude as FilterSpecInclude
13
+ from .structured_filters_t import FilterSpecIStrContains as FilterSpecIStrContains
14
+ from .structured_filters_t import FilterSpecIStrStartsWith as FilterSpecIStrStartsWith
15
+ from .structured_filters_t import FilterSpecExists as FilterSpecExists
16
+ from .structured_filters_t import FilterSpecGreater as FilterSpecGreater
17
+ from .structured_filters_t import FilterLess as FilterLess
18
+ from .structured_filters_t import FilterSpecGeq as FilterSpecGeq
19
+ from .structured_filters_t import FilterSpecLeq as FilterSpecLeq
20
+ from .structured_filters_t import FilterSpec as FilterSpec
21
+ # DO NOT MODIFY -- This file is generated by type_spec
@@ -0,0 +1,206 @@
1
+ # DO NOT MODIFY -- This file is generated by type_spec
2
+ # ruff: noqa: E402 Q003
3
+ # fmt: off
4
+ # isort: skip_file
5
+ from __future__ import annotations
6
+ import typing # noqa: F401
7
+ import datetime # noqa: F401
8
+ from decimal import Decimal # noqa: F401
9
+ from enum import StrEnum
10
+ import dataclasses
11
+ from pkgs.serialization import serial_class
12
+ from pkgs.serialization import serial_union_annotation
13
+ from pkgs.serialization import serial_alias_annotation
14
+ from . import base_t
15
+ from . import listing_t
16
+
17
+ __all__: list[str] = [
18
+ "FilterIdType",
19
+ "FilterLess",
20
+ "FilterRelation",
21
+ "FilterScalarType",
22
+ "FilterSpec",
23
+ "FilterSpecBase",
24
+ "FilterSpecEquals",
25
+ "FilterSpecExists",
26
+ "FilterSpecGeq",
27
+ "FilterSpecGreater",
28
+ "FilterSpecIStrContains",
29
+ "FilterSpecIStrStartsWith",
30
+ "FilterSpecInclude",
31
+ "FilterSpecLeq",
32
+ "StringFilterValue",
33
+ ]
34
+
35
+
36
+ # DO NOT MODIFY -- This file is generated by type_spec
37
+ class FilterRelation(StrEnum):
38
+ EQUALS = "equals"
39
+ GREATER = "greater"
40
+ GEQ = "geq"
41
+ LESS = "less"
42
+ LEQ = "leq"
43
+ ISTR_CONTAINS = "istr_contains"
44
+ ISTR_STARTS_WITH = "istr_starts_with"
45
+ INCLUDE = "include"
46
+ EXISTS = "exists"
47
+
48
+
49
+ # DO NOT MODIFY -- This file is generated by type_spec
50
+ @serial_class(
51
+ named_type_path="sdk.structured_filters.FilterSpecBase",
52
+ )
53
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True, frozen=True, eq=True) # type: ignore[literal-required]
54
+ class FilterSpecBase:
55
+ relation: FilterRelation
56
+ column: listing_t.ColumnIdentifier
57
+
58
+
59
+ # DO NOT MODIFY -- This file is generated by type_spec
60
+ FilterScalarType = typing.Annotated[
61
+ str | int | Decimal,
62
+ serial_alias_annotation(
63
+ named_type_path="sdk.structured_filters.FilterScalarType",
64
+ ),
65
+ ]
66
+
67
+
68
+ # DO NOT MODIFY -- This file is generated by type_spec
69
+ FilterIdType = typing.Annotated[
70
+ base_t.ObjectId | str,
71
+ serial_alias_annotation(
72
+ named_type_path="sdk.structured_filters.FilterIdType",
73
+ ),
74
+ ]
75
+
76
+
77
+ # DO NOT MODIFY -- This file is generated by type_spec
78
+ StringFilterValue = typing.Annotated[
79
+ typing.Union[str],
80
+ serial_alias_annotation(
81
+ named_type_path="sdk.structured_filters.StringFilterValue",
82
+ ),
83
+ ]
84
+
85
+
86
+ # DO NOT MODIFY -- This file is generated by type_spec
87
+ @serial_class(
88
+ named_type_path="sdk.structured_filters.FilterSpecEquals",
89
+ parse_require={"relation"},
90
+ )
91
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True, frozen=True, eq=True) # type: ignore[literal-required]
92
+ class FilterSpecEquals(FilterSpecBase):
93
+ relation: typing.Literal[FilterRelation.EQUALS] = FilterRelation.EQUALS
94
+ value: FilterScalarType
95
+
96
+
97
+ # DO NOT MODIFY -- This file is generated by type_spec
98
+ @serial_class(
99
+ named_type_path="sdk.structured_filters.FilterSpecInclude",
100
+ parse_require={"relation"},
101
+ )
102
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True, frozen=True, eq=True) # type: ignore[literal-required]
103
+ class FilterSpecInclude(FilterSpecBase):
104
+ relation: typing.Literal[FilterRelation.INCLUDE] = FilterRelation.INCLUDE
105
+ value: FilterIdType | tuple[FilterIdType, ...]
106
+
107
+
108
+ # DO NOT MODIFY -- This file is generated by type_spec
109
+ @serial_class(
110
+ named_type_path="sdk.structured_filters.FilterSpecIStrContains",
111
+ parse_require={"relation"},
112
+ )
113
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True, frozen=True, eq=True) # type: ignore[literal-required]
114
+ class FilterSpecIStrContains(FilterSpecBase):
115
+ relation: typing.Literal[FilterRelation.ISTR_CONTAINS] = FilterRelation.ISTR_CONTAINS
116
+ value: str
117
+
118
+
119
+ # DO NOT MODIFY -- This file is generated by type_spec
120
+ @serial_class(
121
+ named_type_path="sdk.structured_filters.FilterSpecIStrStartsWith",
122
+ parse_require={"relation"},
123
+ )
124
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True, frozen=True, eq=True) # type: ignore[literal-required]
125
+ class FilterSpecIStrStartsWith(FilterSpecBase):
126
+ relation: typing.Literal[FilterRelation.ISTR_STARTS_WITH] = FilterRelation.ISTR_STARTS_WITH
127
+ value: str
128
+
129
+
130
+ # DO NOT MODIFY -- This file is generated by type_spec
131
+ @serial_class(
132
+ named_type_path="sdk.structured_filters.FilterSpecExists",
133
+ parse_require={"relation"},
134
+ )
135
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True, frozen=True, eq=True) # type: ignore[literal-required]
136
+ class FilterSpecExists(FilterSpecBase):
137
+ relation: typing.Literal[FilterRelation.EXISTS] = FilterRelation.EXISTS
138
+
139
+
140
+ # DO NOT MODIFY -- This file is generated by type_spec
141
+ @serial_class(
142
+ named_type_path="sdk.structured_filters.FilterSpecGreater",
143
+ to_string_values={"value"},
144
+ parse_require={"relation"},
145
+ )
146
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True, frozen=True, eq=True) # type: ignore[literal-required]
147
+ class FilterSpecGreater(FilterSpecBase):
148
+ relation: typing.Literal[FilterRelation.GREATER] = FilterRelation.GREATER
149
+ value: Decimal
150
+
151
+
152
+ # DO NOT MODIFY -- This file is generated by type_spec
153
+ @serial_class(
154
+ named_type_path="sdk.structured_filters.FilterLess",
155
+ to_string_values={"value"},
156
+ parse_require={"relation"},
157
+ )
158
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True, frozen=True, eq=True) # type: ignore[literal-required]
159
+ class FilterLess(FilterSpecBase):
160
+ relation: typing.Literal[FilterRelation.LESS] = FilterRelation.LESS
161
+ value: Decimal
162
+
163
+
164
+ # DO NOT MODIFY -- This file is generated by type_spec
165
+ @serial_class(
166
+ named_type_path="sdk.structured_filters.FilterSpecGeq",
167
+ to_string_values={"value"},
168
+ parse_require={"relation"},
169
+ )
170
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True, frozen=True, eq=True) # type: ignore[literal-required]
171
+ class FilterSpecGeq(FilterSpecBase):
172
+ relation: typing.Literal[FilterRelation.GEQ] = FilterRelation.GEQ
173
+ value: Decimal
174
+
175
+
176
+ # DO NOT MODIFY -- This file is generated by type_spec
177
+ @serial_class(
178
+ named_type_path="sdk.structured_filters.FilterSpecLeq",
179
+ to_string_values={"value"},
180
+ parse_require={"relation"},
181
+ )
182
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True, frozen=True, eq=True) # type: ignore[literal-required]
183
+ class FilterSpecLeq(FilterSpecBase):
184
+ relation: typing.Literal[FilterRelation.LEQ] = FilterRelation.LEQ
185
+ value: Decimal
186
+
187
+
188
+ # DO NOT MODIFY -- This file is generated by type_spec
189
+ FilterSpec = typing.Annotated[
190
+ FilterSpecEquals | FilterSpecGreater | FilterSpecGeq | FilterSpecLeq | FilterSpecIStrContains | FilterSpecIStrStartsWith | FilterSpecInclude | FilterSpecExists,
191
+ serial_union_annotation(
192
+ named_type_path="sdk.structured_filters.FilterSpec",
193
+ discriminator="relation",
194
+ discriminator_map={
195
+ "equals": FilterSpecEquals,
196
+ "greater": FilterSpecGreater,
197
+ "geq": FilterSpecGeq,
198
+ "leq": FilterSpecLeq,
199
+ "istr_contains": FilterSpecIStrContains,
200
+ "istr_starts_with": FilterSpecIStrStartsWith,
201
+ "include": FilterSpecInclude,
202
+ "exists": FilterSpecExists,
203
+ },
204
+ ),
205
+ ]
206
+ # DO NOT MODIFY -- This file is generated by type_spec
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: UncountablePythonSDK
3
- Version: 0.0.129
3
+ Version: 0.0.130
4
4
  Summary: Uncountable SDK
5
5
  Project-URL: Homepage, https://github.com/uncountableinc/uncountable-python-sdk
6
6
  Project-URL: Repository, https://github.com/uncountableinc/uncountable-python-sdk.git
@@ -2,7 +2,7 @@ docs/.gitignore,sha256=_ebkZUcwfvfnGEJ95rfj1lxoBNd6EE9ZvtOc7FsbfFE,7
2
2
  docs/conf.py,sha256=Ky-_Y76T7pwN2aBG-dSF79Av70e7ASgcOXEdQ1qyor4,3542
3
3
  docs/index.md,sha256=g4Yi5831fEkywYkkcFohYLkKzSI91SOZF7DxKsm9zgI,3193
4
4
  docs/justfile,sha256=WymCEQ6W2A8Ak79iUPmecmuaUNN2htb7STUrz5K7ELE,273
5
- docs/requirements.txt,sha256=AAVxGQUFUCjyRe2gQSCD8Nezn42K_TVeEfrpIKJwmD0,171
5
+ docs/requirements.txt,sha256=VCcZc6d9gbj4RxuqEd4f8JzvOp03-hN6MPVrIJuwOxM,171
6
6
  docs/integration_examples/create_ingredient.md,sha256=bzTQ943YhINxa3HQylEA26rbAsjr6HvvN_HkVkrzUeA,1547
7
7
  docs/integration_examples/create_output.md,sha256=aDn2TjzKgY-HnxnvgsZS578cvajmHpF1y2HKkHfdtd4,2104
8
8
  docs/integration_examples/index.md,sha256=lVP6k79rGgdWPfEKM8oJvxeJsBKlpRJaZfrqn9lkiBc,73
@@ -32,9 +32,9 @@ examples/integration-server/jobs/materials_auto/concurrent_cron.py,sha256=xsK3H9
32
32
  examples/integration-server/jobs/materials_auto/example_cron.py,sha256=spUMiiTEFaepbVXecjD_4aEEfqEtZGGZuWTKs9J6Xcw,736
33
33
  examples/integration-server/jobs/materials_auto/example_http.py,sha256=eIL46ElWo8SKY7W5JWWkwZk6Qo7KRd9EJBxfy7YQ_sE,1429
34
34
  examples/integration-server/jobs/materials_auto/example_instrument.py,sha256=I79RLDW0m1N-vDkanBAeg2LzDlDZkk4zN_zNbFmgYvI,3434
35
- examples/integration-server/jobs/materials_auto/example_parse.py,sha256=Io8hMVmg9dVuJ3uynWFqR5oBnf0cL4BGqftJP9hzRGY,5342
35
+ examples/integration-server/jobs/materials_auto/example_parse.py,sha256=yW2iAN1AMf9qdAtR0DChWFIMYuet8d7K6-mQvMDtuvQ,5888
36
36
  examples/integration-server/jobs/materials_auto/example_predictions.py,sha256=5fO4rqRa80_968A1uVZn2TlMOUib54A8rumGW02sIMM,2112
37
- examples/integration-server/jobs/materials_auto/example_runsheet_wh.py,sha256=Mhh3PM1j_M35UnMllYNLbTvpMoIcoMOoHIPDaabN_hs,1235
37
+ examples/integration-server/jobs/materials_auto/example_runsheet_wh.py,sha256=augHaaPwA3o6GVj3KDg77gyIif4VJfh8PQF2eo8vNMk,1264
38
38
  examples/integration-server/jobs/materials_auto/example_wh.py,sha256=PN-skP27yJwDZboWk5g5EZEc3AKfVayQLfnopjsDKJc,659
39
39
  examples/integration-server/jobs/materials_auto/profile.yaml,sha256=ywDrDRAyqiUdj_HvosNP5bXBL8mCWsvdJ1eYQd-mGYo,2369
40
40
  pkgs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -42,7 +42,7 @@ pkgs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
42
42
  pkgs/argument_parser/__init__.py,sha256=EG3pwLEHTp-Qltd3lRnO4K22RiVrasePzKPDOfTPxFY,924
43
43
  pkgs/argument_parser/_is_enum.py,sha256=Gw6jJa8nBwYGqXwwCZbSnWL8Rvr5alkg5lSVAqXtOZM,257
44
44
  pkgs/argument_parser/_is_namedtuple.py,sha256=InCP2orqKbUYc4JsmE7ccri2EQPvLZeRijYPGqVSeXY,323
45
- pkgs/argument_parser/argument_parser.py,sha256=AjmLCNHcGMyXLojSpuKSYvIYE3u8tbg8rjv-yrhosQs,21077
45
+ pkgs/argument_parser/argument_parser.py,sha256=0ykZ4cCLMyTk_8lxDUd_m92eYL8JmjDQaVB8rh9N_ZQ,21628
46
46
  pkgs/argument_parser/case_convert.py,sha256=NuJLJUJRbyVb6_Slen4uqaStEHbcOS1d-hBBfDrrw-c,605
47
47
  pkgs/filesystem_utils/__init__.py,sha256=2a0d2rEPlEEYwhm3Wckny4VCp4ZS7JtYSXmwdwNCRjo,1332
48
48
  pkgs/filesystem_utils/_blob_session.py,sha256=4GicmwgGHVcqO8pOTu-EJakKMb1-IsxT9QnVi2D0oKU,5143
@@ -89,7 +89,7 @@ pkgs/type_spec/actions_registry/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm
89
89
  pkgs/type_spec/actions_registry/__main__.py,sha256=SRw6kIhHTW7W2wGijYq66JARzoc4KpPmbLqwvnETyTE,4277
90
90
  pkgs/type_spec/actions_registry/emit_typescript.py,sha256=W1lI36ITdJ7MBf37wlTB7H3X9Ljt217vIGMv4e3fxfY,5986
91
91
  pkgs/type_spec/parts/base.py.prepart,sha256=Xy8my5ol_Iu0hpQpvgsmqGLkGcMsLSg-cgjm4Yp-QI4,2369
92
- pkgs/type_spec/parts/base.ts.prepart,sha256=42-1_N_K04t4c6pE62V4wBw3bR5bgPxhmXUk__A7gAs,1002
92
+ pkgs/type_spec/parts/base.ts.prepart,sha256=eD7-9cCsHutDQ9uB6SPVLSZcRNFfEIggpd0QSqXew8A,1114
93
93
  pkgs/type_spec/type_info/__main__.py,sha256=TLNvCHGcmaj_8Sj5bAQNpuNaaw2dpDzoFDWZds0V4Qo,1002
94
94
  pkgs/type_spec/type_info/emit_type_info.py,sha256=6uwoWGI7KiM3n4STwUGH17gXRF0WfzjRNPFqFgH2A4o,16576
95
95
  pkgs/type_spec/ui_entry_actions/__init__.py,sha256=WiHE_BexOEZWbkkbD7EnFau1aMLNmfgQywG9PTQNCkw,135
@@ -148,7 +148,7 @@ uncountable/integration/queue_runner/datastore/model.py,sha256=YPqlULU7FxuwjmhXG
148
148
  uncountable/integration/secret_retrieval/__init__.py,sha256=3QXVj35w8rRMxVvmmsViFYDi3lcb3g70incfalOEm6o,87
149
149
  uncountable/integration/secret_retrieval/retrieve_secret.py,sha256=LBEf18KHtXZxg-ZZ80stJ1vW39AWf0CQllP6pNu3Eq8,2994
150
150
  uncountable/integration/webhook_server/entrypoint.py,sha256=RQndrVCKdaVBk-xJ592eGqeN-O0IOM7flXDGoJ2HXsc,3505
151
- uncountable/types/__init__.py,sha256=IjUYV67XAOWoV9IO_yWznP3a_T1NKu2hYFjJbDj_wRs,11142
151
+ uncountable/types/__init__.py,sha256=PGC73-HZCLUCNJq1vUQ05pNxndCb3eT9mSKEPF0Hp6w,11387
152
152
  uncountable/types/async_batch.py,sha256=yCCWrrLQfxXVqZp-KskxLBNkNmuELdz4PJjx8ULppgs,662
153
153
  uncountable/types/async_batch_processor.py,sha256=-qLe7fBTHr3hHrAs96gMaUYJ3mkrsKkXK9Qm68X96fc,29629
154
154
  uncountable/types/async_batch_t.py,sha256=E3Lo2_wot1lm0oblkLc9Uf5lmMt_zlw0VQ1yA507z8g,3945
@@ -162,7 +162,7 @@ uncountable/types/calculations.py,sha256=fApOFpgBemt_t7IVneVR0VdI3X5EOxiG6Xhzr6R
162
162
  uncountable/types/calculations_t.py,sha256=pl-lhjyDQuj11Sf9g1-0BsSkN7Ez8UxDp8-KMQ_3enM,709
163
163
  uncountable/types/chemical_structure.py,sha256=ujyragaD26-QG5jgKnWhO7TN3N1V9b_04T2WhqNYxxo,281
164
164
  uncountable/types/chemical_structure_t.py,sha256=VFFyits_vx4t5L2euu_qFiSpsGJjURkDPr3ISnr3nPc,855
165
- uncountable/types/client_base.py,sha256=5mIKtopIqlHRTRPKKzA_Ughphi7jXmcWR6SLX54zMY0,92484
165
+ uncountable/types/client_base.py,sha256=wpWCkUDl2zUp1TdL-qMn70S_CfIal5FXNQ6g5D5jo5c,93664
166
166
  uncountable/types/client_config.py,sha256=xTQfTRTwnAc8ArvOuQdkKGy1uvGcXgQ_cgqsxhQLFgU,342
167
167
  uncountable/types/client_config_t.py,sha256=8JoXNcyYT26uJSs5qP3L6yaPgkn23y-o0NhLFU3ilbc,1089
168
168
  uncountable/types/curves.py,sha256=QyEyC20jsG-LGKVx6miiF-w70vKMwNkILFBDIJ5Ok9g,345
@@ -190,13 +190,15 @@ uncountable/types/input_attributes_t.py,sha256=8NJQeq_8MkUNn5BlDx34opp3eeZl8Sw1n
190
190
  uncountable/types/inputs.py,sha256=3ghg39_oiLF5HqWF_wNwYv4HMR1lrKLfeRLn5ptIGw4,446
191
191
  uncountable/types/inputs_t.py,sha256=eSVA7LNgLI3ja83GJm4sA9KhPICVV4zj2Dd4OhbuY9g,2158
192
192
  uncountable/types/integration_server.py,sha256=VonA8h8TGnVBiss5W8-K82lA01JQa7TLk0ubFo8iiBQ,364
193
- uncountable/types/integration_server_t.py,sha256=pgtoyuW6QvGRawidJZFB-WnOdwCE4OIoJAvGfussZKU,1304
193
+ uncountable/types/integration_server_t.py,sha256=9nEr5x24bf_g7w3HrAse-o-2SClefNOJdjSFbXiA6iQ,1336
194
194
  uncountable/types/integration_session.py,sha256=MVTtZa04INF4L8PxPjqz3l1Lse6Hze3IlYPs2bRSqE0,548
195
195
  uncountable/types/integration_session_t.py,sha256=HEfmPB6pt9GpgdaGKG0kgsJwq6W0Lid9Jy7Dzghhaic,1920
196
196
  uncountable/types/integrations.py,sha256=0fOhtbLIOl9w1GP9J3PTagRU8mjOKV48JNLLH3SJQP0,472
197
197
  uncountable/types/integrations_t.py,sha256=ihyhuMDKtJarQ19OppS0fYpJUYd8o5-w6YCDE440O-w,1871
198
198
  uncountable/types/job_definition.py,sha256=hYp5jPYLLYm3NKEqzQrQfXL0Ms5KgEQGTON13YWSPYk,1804
199
199
  uncountable/types/job_definition_t.py,sha256=E4IQvcYF3VDHbwRlvopy8y-HNAyEMZpwy7jkmp74fgQ,9563
200
+ uncountable/types/listing.py,sha256=5Z3WnK-jsh8yEjDIMsurd5REEXCEaDofDM1i3kBWbbM,402
201
+ uncountable/types/listing_t.py,sha256=gMFXWwSgvL2238aqr4BIq5iDTQNqttveqDQT1zxBMl0,1505
200
202
  uncountable/types/notifications.py,sha256=ZGr1ULMG3cPMED83NbMjrjmgVzCeOTS1Tc-pFTNuY4Y,600
201
203
  uncountable/types/notifications_t.py,sha256=qS2mhCkYHFPe2XtBespABJ3dNvisxrmIw_r8ZlUCh_g,2444
202
204
  uncountable/types/outputs.py,sha256=I6zP2WHXg_jXgMqmuEJuJOlsjKjQGHjfs1JOwW9YxBM,260
@@ -233,6 +235,8 @@ uncountable/types/secret_retrieval.py,sha256=poY_nuZBIjNu64Wa0x5Ytsmh3OdAxps2kzu
233
235
  uncountable/types/secret_retrieval_t.py,sha256=igWrOW_CwRvAE7BHIHVJojBwgcAG05Pqup8D45Sb0F4,2342
234
236
  uncountable/types/sockets.py,sha256=OogyQ-pLyhJkV6JrBSLTOz9v6cDViYY5QM1ScSXPU3U,1208
235
237
  uncountable/types/sockets_t.py,sha256=s--y5nbN4uHA2HVKW6rOz3HwIMk3MT2VKGXCA7reXb4,5608
238
+ uncountable/types/structured_filters.py,sha256=OwinQESbKhlMtJvF-g6HKned1jcgOqkFp7jfZaq0T20,1255
239
+ uncountable/types/structured_filters_t.py,sha256=FKOsExQP2L5-LXtGb24JvSvtMnu56QUgcl4CcYyZTmU,6978
236
240
  uncountable/types/units.py,sha256=yxuddayiE8cnzrjQiIsURisWc-Vm1F37uyS3fjM--Ao,254
237
241
  uncountable/types/units_t.py,sha256=d62vY2ETqIgMHASw_IcREwDDqKAqI-vPnoBOqzMt4-o,704
238
242
  uncountable/types/uploader.py,sha256=odT7wkBfXUf1MoFy6W5JzZ-WY8JX0vO6odGOS_C2Voo,1222
@@ -293,6 +297,8 @@ uncountable/types/api/integrations/push_notification.py,sha256=_ycqsGSd7pdt480JW
293
297
  uncountable/types/api/integrations/register_sockets_token.py,sha256=OOtQKY7B3T5tpz2WCtvMm1jOLNM5dXuSqpsY5FJ2IXk,1218
294
298
  uncountable/types/api/material_families/__init__.py,sha256=gCgbynxG3jA8FQHzercKtrHKHkiIKr8APdZYUniAor8,55
295
299
  uncountable/types/api/material_families/update_entity_material_families.py,sha256=qWJgAKH0MayadXvxckePCdo9yd34QXOmGZ7cKz5VLNo,1761
300
+ uncountable/types/api/notebooks/__init__.py,sha256=gCgbynxG3jA8FQHzercKtrHKHkiIKr8APdZYUniAor8,55
301
+ uncountable/types/api/notebooks/add_notebook_content.py,sha256=ruLhEYs5ScEOG2cIMK44uMbsds3lV-OwVCzs1mfgnVE,3536
296
302
  uncountable/types/api/outputs/__init__.py,sha256=gCgbynxG3jA8FQHzercKtrHKHkiIKr8APdZYUniAor8,55
297
303
  uncountable/types/api/outputs/get_output_data.py,sha256=luGoQZzbZsGIzo2dXMD5f6rDlXEgBjnnUU9n5T-VL9Q,3069
298
304
  uncountable/types/api/outputs/get_output_names.py,sha256=myxLS1YedzWlKs3st64jmM9XMUphrUltxKISBz4pVSo,1539
@@ -347,7 +353,7 @@ uncountable/types/api/uploader/complete_async_parse.py,sha256=nYYBzjT_j4L7_1Ge-i
347
353
  uncountable/types/api/uploader/invoke_uploader.py,sha256=Bj7Dq4A90k00suacwk3bLA_dCb2aovS1kAbVam2AQnM,1395
348
354
  uncountable/types/api/user/__init__.py,sha256=gCgbynxG3jA8FQHzercKtrHKHkiIKr8APdZYUniAor8,55
349
355
  uncountable/types/api/user/get_current_user_info.py,sha256=Avqi_RXtRgbefrT_dwJ9MrO6eDNSSa_Nu650FSuESlg,1109
350
- uncountablepythonsdk-0.0.129.dist-info/METADATA,sha256=VZrc3etMCiwu7MqLdACdMk-F5gnPLloOIjMySaqgj6Y,2174
351
- uncountablepythonsdk-0.0.129.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
352
- uncountablepythonsdk-0.0.129.dist-info/top_level.txt,sha256=1UVGjAU-6hJY9qw2iJ7nCBeEwZ793AEN5ZfKX9A1uj4,31
353
- uncountablepythonsdk-0.0.129.dist-info/RECORD,,
356
+ uncountablepythonsdk-0.0.130.dist-info/METADATA,sha256=7IkKvjBoMGTiX5MXInptWwYyRmjeoDaS7atlR7FEMpc,2174
357
+ uncountablepythonsdk-0.0.130.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
358
+ uncountablepythonsdk-0.0.130.dist-info/top_level.txt,sha256=1UVGjAU-6hJY9qw2iJ7nCBeEwZ793AEN5ZfKX9A1uj4,31
359
+ uncountablepythonsdk-0.0.130.dist-info/RECORD,,