UncountablePythonSDK 0.0.89__py3-none-any.whl → 0.0.91__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of UncountablePythonSDK might be problematic. Click here for more details.

Files changed (35) hide show
  1. {UncountablePythonSDK-0.0.89.dist-info → UncountablePythonSDK-0.0.91.dist-info}/METADATA +2 -2
  2. {UncountablePythonSDK-0.0.89.dist-info → UncountablePythonSDK-0.0.91.dist-info}/RECORD +35 -28
  3. {UncountablePythonSDK-0.0.89.dist-info → UncountablePythonSDK-0.0.91.dist-info}/WHEEL +1 -1
  4. docs/requirements.txt +1 -1
  5. examples/download_files.py +26 -0
  6. pkgs/argument_parser/argument_parser.py +57 -25
  7. pkgs/serialization/__init__.py +7 -2
  8. pkgs/serialization/annotation.py +64 -0
  9. pkgs/serialization/serial_alias.py +47 -0
  10. pkgs/serialization/serial_class.py +26 -32
  11. pkgs/serialization/serial_generic.py +16 -0
  12. pkgs/serialization/serial_union.py +9 -9
  13. pkgs/serialization_util/__init__.py +4 -0
  14. pkgs/serialization_util/dataclasses.py +14 -0
  15. pkgs/serialization_util/serialization_helpers.py +11 -3
  16. pkgs/type_spec/builder.py +41 -5
  17. pkgs/type_spec/emit_python.py +34 -6
  18. pkgs/type_spec/type_info/emit_type_info.py +7 -2
  19. pkgs/type_spec/value_spec/convert_type.py +5 -0
  20. uncountable/core/client.py +58 -1
  21. uncountable/core/environment.py +2 -2
  22. uncountable/integration/scheduler.py +4 -4
  23. uncountable/types/__init__.py +2 -0
  24. uncountable/types/api/chemical/convert_chemical_formats.py +7 -1
  25. uncountable/types/api/entity/transition_entity_phase.py +7 -1
  26. uncountable/types/api/files/__init__.py +1 -0
  27. uncountable/types/api/files/download_file.py +77 -0
  28. uncountable/types/api/permissions/set_core_permissions.py +7 -1
  29. uncountable/types/api/recipes/set_recipe_tags.py +7 -1
  30. uncountable/types/entity_t.py +14 -3
  31. uncountable/types/id_source_t.py +7 -1
  32. uncountable/types/identifier_t.py +7 -1
  33. uncountable/types/overrides_t.py +7 -1
  34. uncountable/types/recipe_identifiers_t.py +13 -2
  35. {UncountablePythonSDK-0.0.89.dist-info → UncountablePythonSDK-0.0.91.dist-info}/top_level.txt +0 -0
@@ -5,20 +5,18 @@ from collections.abc import Callable
5
5
  from enum import StrEnum
6
6
  from typing import Any, Optional, TypeVar, cast
7
7
 
8
+ from .annotation import SerialBase, SerialInspector
9
+
8
10
  _ClassT = TypeVar("_ClassT")
9
11
 
10
12
 
11
- @dataclasses.dataclass
12
- class _SerialClassData:
13
+ @dataclasses.dataclass(kw_only=True, frozen=True, eq=True)
14
+ class _SerialClassData(SerialBase):
13
15
  unconverted_keys: set[str] = dataclasses.field(default_factory=set)
14
16
  unconverted_values: set[str] = dataclasses.field(default_factory=set)
15
17
  to_string_values: set[str] = dataclasses.field(default_factory=set)
16
18
  parse_require: set[str] = dataclasses.field(default_factory=set)
17
19
  named_type_path: Optional[str] = None
18
- # Tracks if this data was provided as a decorator to the type.
19
- # This is used to track "proper types" which are appropriate
20
- # for serialization and/or dynamic discovery
21
- from_decorator: bool = False
22
20
 
23
21
 
24
22
  EMPTY_SERIAL_CLASS_DATA = _SerialClassData()
@@ -31,6 +29,7 @@ def serial_class(
31
29
  to_string_values: Optional[set[str]] = None,
32
30
  parse_require: Optional[set[str]] = None,
33
31
  named_type_path: Optional[str] = None,
32
+ is_dynamic_allowed: bool = False,
34
33
  ) -> Callable[[_ClassT], _ClassT]:
35
34
  """
36
35
  An additional decorator to a dataclass that specifies serialization options.
@@ -63,17 +62,20 @@ def serial_class(
63
62
  parse_require=parse_require or set(),
64
63
  named_type_path=named_type_path,
65
64
  from_decorator=True,
65
+ is_dynamic_allowed=is_dynamic_allowed,
66
66
  )
67
67
  return orig_class
68
68
 
69
69
  return decorate
70
70
 
71
71
 
72
- class SerialClassDataInspector:
72
+ class SerialClassDataInspector(SerialInspector[_ClassT]):
73
73
  def __init__(
74
74
  self,
75
+ parsed_type: type[_ClassT],
75
76
  current: _SerialClassData,
76
77
  ) -> None:
78
+ super().__init__(parsed_type, current)
77
79
  self.current = current
78
80
 
79
81
  def has_unconverted_key(self, key: str) -> bool:
@@ -88,20 +90,8 @@ class SerialClassDataInspector:
88
90
  def has_parse_require(self, key: str) -> bool:
89
91
  return key in self.current.parse_require
90
92
 
91
- @property
92
- def from_decorator(self) -> bool:
93
- return self.current.from_decorator
94
-
95
- @property
96
- def named_type_path(self) -> Optional[str]:
97
- return self.current.named_type_path
98
-
99
- @property
100
- def is_field_proper(self) -> bool:
101
- return self.current.from_decorator and self.current.named_type_path is not None
102
93
 
103
-
104
- def _get_merged_serial_class_data(type_class: type[Any]) -> _SerialClassData | None:
94
+ def get_merged_serial_class_data(type_class: type[Any]) -> _SerialClassData | None:
105
95
  base_class_data = (
106
96
  cast(_SerialClassData, type_class.__unc_serial_data)
107
97
  if hasattr(type_class, "__unc_serial_data")
@@ -110,26 +100,30 @@ def _get_merged_serial_class_data(type_class: type[Any]) -> _SerialClassData | N
110
100
  if base_class_data is None:
111
101
  return None
112
102
 
103
+ # IMPROVE: We should cache this result on the type
113
104
  if type_class.__bases__ is not None:
114
105
  for base in type_class.__bases__:
115
- curr_base_class_data = _get_merged_serial_class_data(base)
106
+ curr_base_class_data = get_merged_serial_class_data(base)
116
107
  if curr_base_class_data is not None:
117
- base_class_data.unconverted_keys |= (
118
- curr_base_class_data.unconverted_keys
119
- )
120
- base_class_data.unconverted_values |= (
121
- curr_base_class_data.unconverted_values
122
- )
123
- base_class_data.to_string_values |= (
124
- curr_base_class_data.to_string_values
108
+ base_class_data = dataclasses.replace(
109
+ base_class_data,
110
+ unconverted_keys=base_class_data.unconverted_keys
111
+ | curr_base_class_data.unconverted_keys,
112
+ unconverted_values=base_class_data.unconverted_values
113
+ | curr_base_class_data.unconverted_values,
114
+ to_string_values=base_class_data.to_string_values
115
+ | curr_base_class_data.to_string_values,
116
+ parse_require=base_class_data.parse_require
117
+ | curr_base_class_data.parse_require,
125
118
  )
126
- base_class_data.parse_require |= curr_base_class_data.parse_require
127
119
  return base_class_data
128
120
 
129
121
 
130
- def get_serial_class_data(type_class: type[Any]) -> SerialClassDataInspector:
122
+ def get_serial_class_data(
123
+ type_class: type[_ClassT],
124
+ ) -> SerialClassDataInspector[_ClassT]:
131
125
  return SerialClassDataInspector(
132
- _get_merged_serial_class_data(type_class) or EMPTY_SERIAL_CLASS_DATA
126
+ type_class, get_merged_serial_class_data(type_class) or EMPTY_SERIAL_CLASS_DATA
133
127
  )
134
128
 
135
129
 
@@ -0,0 +1,16 @@
1
+ import typing
2
+
3
+ from .annotation import SerialInspector, get_serial_annotation
4
+ from .serial_class import get_merged_serial_class_data
5
+
6
+ T = typing.TypeVar("T")
7
+
8
+
9
+ def get_serial_data(parsed_type: type[T]) -> SerialInspector[T] | None:
10
+ serial = get_serial_annotation(parsed_type)
11
+ if serial is None:
12
+ serial = get_merged_serial_class_data(parsed_type)
13
+
14
+ if serial is not None:
15
+ return SerialInspector(parsed_type, serial)
16
+ return None
@@ -1,6 +1,8 @@
1
1
  import dataclasses
2
2
  import typing
3
3
 
4
+ from .annotation import SerialBase, SerialInspector, get_serial_annotation
5
+
4
6
  T = typing.TypeVar("T")
5
7
 
6
8
 
@@ -17,7 +19,7 @@ class IdentityHashWrapper(typing.Generic[T]):
17
19
 
18
20
 
19
21
  @dataclasses.dataclass(kw_only=True, frozen=True, eq=True)
20
- class _SerialUnion:
22
+ class _SerialUnion(SerialBase):
21
23
  """
22
24
  This class is to be kept private, to provide flexibility in registration/lookup.
23
25
  Places that need the data should access it via help classes/methods.
@@ -27,7 +29,6 @@ class _SerialUnion:
27
29
  # determine which type to parse.
28
30
  discriminator: typing.Optional[str] = None
29
31
  discriminator_map: typing.Optional[IdentityHashWrapper[dict[str, type]]] = None
30
- named_type_path: typing.Optional[str] = None
31
32
 
32
33
 
33
34
  def serial_union_annotation(
@@ -35,6 +36,7 @@ def serial_union_annotation(
35
36
  discriminator: typing.Optional[str] = None,
36
37
  discriminator_map: typing.Optional[dict[str, type]] = None,
37
38
  named_type_path: typing.Optional[str] = None,
39
+ is_dynamic_allowed: bool = False,
38
40
  ) -> _SerialUnion:
39
41
  return _SerialUnion(
40
42
  discriminator=discriminator,
@@ -42,23 +44,21 @@ def serial_union_annotation(
42
44
  if discriminator_map is not None
43
45
  else None,
44
46
  named_type_path=named_type_path,
47
+ from_decorator=True,
48
+ is_dynamic_allowed=is_dynamic_allowed,
45
49
  )
46
50
 
47
51
 
48
52
  def _get_serial_union(parsed_type: type[T]) -> _SerialUnion | None:
49
- if not hasattr(parsed_type, "__metadata__"):
50
- return None
51
- metadata = parsed_type.__metadata__ # type:ignore[attr-defined]
52
- if not isinstance(metadata, tuple) or len(metadata) != 1:
53
- return None
54
- serial = metadata[0]
53
+ serial = get_serial_annotation(parsed_type)
55
54
  if not isinstance(serial, _SerialUnion):
56
55
  return None
57
56
  return serial
58
57
 
59
58
 
60
- class SerialClassInspector(typing.Generic[T]):
59
+ class SerialClassInspector(SerialInspector[T]):
61
60
  def __init__(self, parsed_type: type[T], serial_union: _SerialUnion) -> None:
61
+ super().__init__(parsed_type, serial_union)
62
62
  self._parsed_type = parsed_type
63
63
  self._serial_union = serial_union
64
64
 
@@ -1,4 +1,6 @@
1
1
  from .convert_to_snakecase import convert_dict_to_snake_case
2
+ from .dataclasses import dict_fields as dict_fields
3
+ from .dataclasses import iterate_fields as iterate_fields
2
4
  from .serialization_helpers import (
3
5
  serialize_for_api,
4
6
  serialize_for_storage,
@@ -10,4 +12,6 @@ __all__: list[str] = [
10
12
  "serialize_for_api",
11
13
  "serialize_for_storage",
12
14
  "serialize_for_storage_dict",
15
+ "iterate_fields",
16
+ "dict_fields",
13
17
  ]
@@ -0,0 +1,14 @@
1
+ from dataclasses import fields
2
+ from typing import TYPE_CHECKING, Any, Iterator
3
+
4
+ if TYPE_CHECKING:
5
+ from _typeshed import DataclassInstance
6
+
7
+
8
+ def iterate_fields(d: "DataclassInstance") -> Iterator[tuple[str, Any]]:
9
+ for field in fields(d):
10
+ yield field.name, getattr(d, field.name)
11
+
12
+
13
+ def dict_fields(d: "DataclassInstance") -> dict[str, Any]:
14
+ return dict(iterate_fields(d))
@@ -25,6 +25,7 @@ from pkgs.serialization import (
25
25
  )
26
26
 
27
27
  from ._get_type_for_serialization import SerializationType, get_serialization_type
28
+ from .dataclasses import iterate_fields
28
29
 
29
30
  # Inlined types which otherwise would import from types/base.py
30
31
  JsonScalar = Union[str, float, bool, Decimal, None, datetime.datetime, datetime.date]
@@ -77,6 +78,12 @@ def _serialize_dict(d: dict[str, Any]) -> dict[str, JsonValue]:
77
78
  return {k: serialize_for_storage(v) for k, v in d.items() if v != MISSING_SENTRY}
78
79
 
79
80
 
81
+ def _serialize_dataclass(d: Any) -> dict[str, JsonValue]:
82
+ return {
83
+ k: serialize_for_storage(v) for k, v in iterate_fields(d) if v != MISSING_SENTRY
84
+ }
85
+
86
+
80
87
  def _to_string_value(value: Any) -> str:
81
88
  assert isinstance(value, (Decimal, int))
82
89
  return str(value)
@@ -121,7 +128,7 @@ def _convert_dataclass(d: Any) -> dict[str, JsonValue]:
121
128
  conversions.key_conversions[k]: (
122
129
  conversions.value_conversion_functions[k](v) if v is not None else None
123
130
  )
124
- for k, v in d.__dict__.items()
131
+ for k, v in iterate_fields(d)
125
132
  if v != MISSING_SENTRY
126
133
  }
127
134
 
@@ -169,14 +176,15 @@ def serialize_for_api(obj: Any) -> JsonValue:
169
176
  serialization_type == SerializationType.UNKNOWN
170
177
  ): # performance optimization to not do function lookup
171
178
  return obj # type: ignore
172
- return _CONVERSION_SERIALIZATION_FUNCS[serialization_type](obj)
179
+ r = _CONVERSION_SERIALIZATION_FUNCS[serialization_type](obj)
180
+ return r
173
181
 
174
182
 
175
183
  _SERIALIZATION_FUNCS_DICT: dict[
176
184
  SerializationType, Callable[[Any], dict[str, JsonValue]]
177
185
  ] = {
178
186
  SerializationType.DICT: _serialize_dict,
179
- SerializationType.DATACLASS: lambda x: _serialize_dict(x.__dict__),
187
+ SerializationType.DATACLASS: _serialize_dataclass,
180
188
  }
181
189
 
182
190
 
pkgs/type_spec/builder.py CHANGED
@@ -293,6 +293,7 @@ class SpecTypeDefn(SpecType):
293
293
  self._is_value_converted = _is_value_converted
294
294
  self._is_value_to_string = False
295
295
  self._is_valid_parameter = True
296
+ self._is_dynamic_allowed = False
296
297
  self.ext_info: Any = None
297
298
 
298
299
  def is_value_converted(self) -> bool:
@@ -304,6 +305,9 @@ class SpecTypeDefn(SpecType):
304
305
  def is_valid_parameter(self) -> bool:
305
306
  return self._is_valid_parameter
306
307
 
308
+ def is_dynamic_allowed(self) -> bool:
309
+ return self._is_dynamic_allowed
310
+
307
311
  def is_base_type(self, type_: BaseTypeName) -> bool:
308
312
  return self.is_base and self.name == type_
309
313
 
@@ -316,11 +320,23 @@ class SpecTypeDefn(SpecType):
316
320
  def base_process(
317
321
  self, builder: SpecBuilder, data: RawDict, extra_names: list[str]
318
322
  ) -> None:
319
- util.check_fields(data, ["ext_info", "label"] + extra_names)
323
+ util.check_fields(
324
+ data,
325
+ [
326
+ "ext_info",
327
+ "label",
328
+ "is_dynamic_allowed",
329
+ ]
330
+ + extra_names,
331
+ )
320
332
 
321
333
  self.ext_info = data.get("ext_info")
322
334
  self.label = data.get("label")
323
335
 
336
+ is_dynamic_allowed = data.get("is_dynamic_allowed", False)
337
+ assert isinstance(is_dynamic_allowed, bool)
338
+ self._is_dynamic_allowed = is_dynamic_allowed
339
+
324
340
  def _process_property(
325
341
  self, builder: SpecBuilder, spec_name: str, data: RawDict
326
342
  ) -> SpecProperty:
@@ -818,6 +834,12 @@ def _resolve_endpoint_path(
818
834
  )
819
835
 
820
836
 
837
+ class EndpointEmitType(StrEnum):
838
+ EMIT_ENDPOINT = "emit_endpoint"
839
+ EMIT_TYPES = "emit_types"
840
+ EMIT_NOTHING = "emit_nothing"
841
+
842
+
821
843
  class SpecEndpoint:
822
844
  method: RouteMethod
823
845
  root: str
@@ -825,7 +847,7 @@ class SpecEndpoint:
825
847
  path_dirname: str
826
848
  path_basename: str
827
849
  data_loader: bool
828
- is_sdk: bool
850
+ is_sdk: EndpointEmitType
829
851
  is_beta: bool
830
852
  stability_level: StabilityLevel | None
831
853
  # Don't emit TypeScript endpoint code
@@ -879,8 +901,22 @@ class SpecEndpoint:
879
901
  assert isinstance(data_loader, bool)
880
902
  self.data_loader = data_loader
881
903
 
882
- is_sdk = data.get("is_sdk", False)
883
- assert isinstance(is_sdk, bool)
904
+ is_sdk = data.get("is_sdk", EndpointEmitType.EMIT_NOTHING)
905
+
906
+ # backwards compatibility
907
+ if isinstance(is_sdk, bool):
908
+ if is_sdk is True:
909
+ is_sdk = EndpointEmitType.EMIT_ENDPOINT
910
+ else:
911
+ is_sdk = EndpointEmitType.EMIT_NOTHING
912
+ elif isinstance(is_sdk, str):
913
+ try:
914
+ is_sdk = EndpointEmitType(is_sdk)
915
+ except ValueError:
916
+ raise ValueError(f"Invalid value for is_sdk: {is_sdk}")
917
+
918
+ assert isinstance(is_sdk, EndpointEmitType)
919
+
884
920
  self.is_sdk = is_sdk
885
921
 
886
922
  route_group = data.get("route_group")
@@ -924,7 +960,7 @@ class SpecEndpoint:
924
960
  self.is_external = self.path_root == "api/external"
925
961
  self.has_attachment = data.get("has_attachment", False)
926
962
 
927
- assert not is_sdk or self.desc is not None, (
963
+ assert self.is_sdk != EndpointEmitType.EMIT_ENDPOINT or self.desc is not None, (
928
964
  f"Endpoint description required for SDK endpoints, missing: {path}"
929
965
  )
930
966
 
@@ -5,6 +5,7 @@ from decimal import Decimal
5
5
  from typing import Any, Optional
6
6
 
7
7
  from . import builder, util
8
+ from .builder import EndpointEmitType
8
9
  from .config import PythonConfig
9
10
 
10
11
  INDENT = " "
@@ -45,6 +46,7 @@ class TrackingContext:
45
46
  use_serial_string_enum: bool = False
46
47
  use_dataclass: bool = False
47
48
  use_serial_union: bool = False
49
+ use_serial_alias: bool = False
48
50
  use_missing: bool = False
49
51
  use_opaque_key: bool = False
50
52
 
@@ -225,6 +227,8 @@ def _emit_types_imports(*, out: io.StringIO, ctx: Context) -> None:
225
227
  out.write("from pkgs.serialization import serial_class\n")
226
228
  if ctx.use_serial_union:
227
229
  out.write("from pkgs.serialization import serial_union_annotation\n")
230
+ if ctx.use_serial_alias:
231
+ out.write("from pkgs.serialization import serial_alias_annotation\n")
228
232
  if ctx.use_serial_string_enum:
229
233
  out.write("from pkgs.serialization import serial_string_enum\n")
230
234
  if ctx.use_missing:
@@ -249,7 +253,7 @@ def _emit_types(*, builder: builder.SpecBuilder, config: PythonConfig) -> None:
249
253
  ):
250
254
  if (
251
255
  namespace.endpoint is not None
252
- and not namespace.endpoint.is_sdk
256
+ and namespace.endpoint.is_sdk == EndpointEmitType.EMIT_NOTHING
253
257
  and config.sdk_endpoints_only is True
254
258
  ):
255
259
  continue
@@ -486,7 +490,10 @@ def _emit_async_batch_invocation_function(
486
490
  endpoint = namespace.endpoint
487
491
  if endpoint is None:
488
492
  return
489
- if endpoint.async_batch_path is None or not endpoint.is_sdk:
493
+ if (
494
+ endpoint.async_batch_path is None
495
+ or endpoint.is_sdk != EndpointEmitType.EMIT_ENDPOINT
496
+ ):
490
497
  return
491
498
 
492
499
  ctx.out.write("\n")
@@ -568,7 +575,7 @@ def _emit_endpoint_invocation_function(
568
575
  endpoint = namespace.endpoint
569
576
  if endpoint is None:
570
577
  return
571
- if not endpoint.is_sdk or endpoint.is_beta:
578
+ if endpoint.is_sdk != EndpointEmitType.EMIT_ENDPOINT or endpoint.is_beta:
572
579
  return
573
580
 
574
581
  ctx.out.write("\n")
@@ -779,7 +786,17 @@ def _emit_type(ctx: Context, stype: builder.SpecType) -> None:
779
786
  return
780
787
 
781
788
  if isinstance(stype, builder.SpecTypeDefnAlias):
782
- ctx.out.write(f"{stype.name} = {refer_to(ctx, stype.alias)}\n")
789
+ ctx.use_serial_alias = True
790
+ ctx.out.write(f"{stype.name} = typing.Annotated[\n")
791
+ ctx.out.write(f"{INDENT}{refer_to(ctx, stype.alias)},\n")
792
+ ctx.out.write(f"{INDENT}serial_alias_annotation(\n")
793
+ ctx.out.write(
794
+ f"{INDENT}named_type_path={util.encode_common_string(_named_type_path(ctx, stype))},\n"
795
+ )
796
+ if stype.is_dynamic_allowed():
797
+ ctx.out.write(f"{INDENT}is_dynamic_allowed=True,\n")
798
+ ctx.out.write(f"{INDENT}),\n")
799
+ ctx.out.write("]\n")
783
800
  return
784
801
 
785
802
  if isinstance(stype, builder.SpecTypeDefnUnion):
@@ -790,6 +807,8 @@ def _emit_type(ctx: Context, stype: builder.SpecType) -> None:
790
807
  ctx.out.write(
791
808
  f"{INDENT}named_type_path={util.encode_common_string(_named_type_path(ctx, stype))},\n"
792
809
  )
810
+ if stype.is_dynamic_allowed():
811
+ ctx.out.write(f"{INDENT}is_dynamic_allowed=True,\n")
793
812
  if stype.discriminator is not None:
794
813
  ctx.out.write(
795
814
  f"{INDENT * 2}discriminator={util.encode_common_string(stype.discriminator)},\n"
@@ -836,6 +855,8 @@ def _emit_type(ctx: Context, stype: builder.SpecType) -> None:
836
855
  ctx.out.write(
837
856
  f"{INDENT}named_type_path={util.encode_common_string(_named_type_path(ctx, stype))},\n"
838
857
  )
858
+ if stype.is_dynamic_allowed():
859
+ ctx.out.write(f"{INDENT}is_dynamic_allowed=True,\n")
839
860
 
840
861
  def write_values(key: str, values: set[str]) -> None:
841
862
  if len(values) == 0:
@@ -1027,13 +1048,19 @@ def _emit_namespace_imports(
1027
1048
  namespaces: set[builder.SpecNamespace],
1028
1049
  from_namespace: Optional[builder.SpecNamespace],
1029
1050
  config: PythonConfig,
1051
+ skip_non_sdk: bool = False,
1030
1052
  ) -> None:
1031
1053
  for ns in sorted(
1032
1054
  namespaces,
1033
1055
  key=lambda name: _resolve_namespace_name(name),
1034
1056
  ):
1057
+ if (
1058
+ skip_non_sdk
1059
+ and ns.endpoint is not None
1060
+ and ns.endpoint.is_sdk != EndpointEmitType.EMIT_ENDPOINT
1061
+ ):
1062
+ continue
1035
1063
  resolved = _resolve_namespace_name(ns)
1036
- ref = _resolve_namespace_ref(ns)
1037
1064
  if ns.endpoint is not None:
1038
1065
  import_alias = "_".join(ns.path[2:]) + "_t"
1039
1066
  out.write(
@@ -1193,7 +1220,7 @@ def _emit_api_argument_lookup(
1193
1220
  continue
1194
1221
  if endpoint.function is None:
1195
1222
  continue
1196
- if "Arguments" not in namespace.types:
1223
+ if "Arguments" not in namespace.types or "Data" not in namespace.types:
1197
1224
  continue
1198
1225
 
1199
1226
  import_alias = "_".join(namespace.path[1:])
@@ -1357,6 +1384,7 @@ def _emit_client_class(
1357
1384
  namespaces=ctx.namespaces,
1358
1385
  from_namespace=None,
1359
1386
  config=config,
1387
+ skip_non_sdk=True,
1360
1388
  )
1361
1389
 
1362
1390
  client_base_out.write(
@@ -201,8 +201,13 @@ def _extract_inheritable_property_parts(
201
201
  elif base_parts.ext_info is None:
202
202
  ext_info = local_ext_info
203
203
  else:
204
- ext_info = type_info_t.ExtInfo(
205
- **(local_ext_info.__dict__ | base_parts.ext_info.__dict__)
204
+ ext_info = dataclasses.replace(
205
+ local_ext_info,
206
+ **{
207
+ field.name: getattr(base_parts.ext_info, field.name)
208
+ for field in dataclasses.fields(type_info_t.ExtInfo)
209
+ if getattr(base_parts.ext_info, field.name) is not None
210
+ },
206
211
  )
207
212
 
208
213
  return InheritablePropertyParts(label=label, desc=desc, ext_info=ext_info)
@@ -31,6 +31,11 @@ TYPE_MAP = {
31
31
  # not part of type_spec's types now
32
32
  "Symbol": MappedType(base_type=value_spec_t.BaseType.SYMBOL),
33
33
  "Any": MappedType(base_type=value_spec_t.BaseType.ANY),
34
+ "None": MappedType(base_type=value_spec_t.BaseType.NONE),
35
+ "Tuple": MappedType(
36
+ base_type=value_spec_t.BaseType.TUPLE, variable_param_count=True
37
+ ),
38
+ "Never": MappedType(base_type=value_spec_t.BaseType.NEVER),
34
39
  }
35
40
 
36
41
 
@@ -1,9 +1,11 @@
1
1
  import base64
2
+ import re
2
3
  import typing
3
4
  from dataclasses import dataclass
4
5
  from datetime import datetime, timedelta
5
6
  from enum import StrEnum
6
- from urllib.parse import urljoin
7
+ from io import BytesIO
8
+ from urllib.parse import unquote, urljoin
7
9
  from uuid import uuid4
8
10
 
9
11
  import requests
@@ -16,6 +18,7 @@ from pkgs.serialization_util import serialize_for_api
16
18
  from pkgs.serialization_util.serialization_helpers import JsonValue
17
19
  from uncountable.core.environment import get_version
18
20
  from uncountable.integration.telemetry import JobLogger
21
+ from uncountable.types import download_file_t
19
22
  from uncountable.types.client_base import APIRequest, ClientMethods
20
23
  from uncountable.types.client_config import ClientConfigOptions
21
24
 
@@ -157,6 +160,16 @@ class GetOauthBearerTokenData:
157
160
  oauth_bearer_token_data_parser = CachedParser(GetOauthBearerTokenData)
158
161
 
159
162
 
163
+ @dataclass
164
+ class DownloadedFile:
165
+ name: str
166
+ size: int
167
+ data: BytesIO
168
+
169
+
170
+ DownloadedFiles = list[DownloadedFile]
171
+
172
+
160
173
  class Client(ClientMethods):
161
174
  _parser_map: dict[type, CachedParser] = {}
162
175
  _auth_details: AuthDetailsAll
@@ -313,6 +326,50 @@ class Client(ClientMethods):
313
326
  case _:
314
327
  raise ValueError(f"unsupported request method: {method}")
315
328
 
329
+ def _get_downloaded_filename(self, *, cd: typing.Optional[str]) -> str:
330
+ if not cd:
331
+ return "Unknown"
332
+
333
+ fname = re.findall(r"filename\*=UTF-8''(.+)", cd)
334
+ if fname:
335
+ return unquote(fname[0])
336
+
337
+ fname = re.findall(r'filename="?(.+)"?', cd)
338
+ if fname:
339
+ return str(fname[0].strip('"'))
340
+
341
+ return "Unknown"
342
+
343
+ def download_files(
344
+ self, *, file_query: download_file_t.FileDownloadQuery
345
+ ) -> DownloadedFiles:
346
+ """Download a file from uncountable."""
347
+ request_id = str(uuid4())
348
+ api_request = APIRequest(
349
+ method=download_file_t.ENDPOINT_METHOD,
350
+ endpoint=download_file_t.ENDPOINT_PATH,
351
+ args=download_file_t.Arguments(
352
+ file_query=file_query,
353
+ ),
354
+ )
355
+ http_request = self._build_http_request(
356
+ api_request=api_request, request_id=request_id
357
+ )
358
+ request = requests.Request(http_request.method.value, http_request.url)
359
+ request.headers = http_request.headers
360
+ assert isinstance(http_request, HTTPGetRequest)
361
+ request.params = http_request.query_params
362
+ response = self._send_request(request)
363
+ content = response.content
364
+ content_disposition = response.headers.get("Content-Disposition", None)
365
+ return [
366
+ DownloadedFile(
367
+ name=self._get_downloaded_filename(cd=content_disposition),
368
+ size=len(content),
369
+ data=BytesIO(content),
370
+ )
371
+ ]
372
+
316
373
  def upload_files(
317
374
  self: typing.Self, *, file_uploads: list[FileUpload]
318
375
  ) -> list[UploadedFile]:
@@ -19,11 +19,11 @@ def get_server_env() -> str | None:
19
19
 
20
20
 
21
21
  def get_webhook_server_port() -> int:
22
- return int(os.environ.get("UNC_WEBHOOK_SERVER_PORT", 5001))
22
+ return int(os.environ.get("UNC_WEBHOOK_SERVER_PORT", "5001"))
23
23
 
24
24
 
25
25
  def get_local_admin_server_port() -> int:
26
- return int(os.environ.get("UNC_ADMIN_SERVER_PORT", 50051))
26
+ return int(os.environ.get("UNC_ADMIN_SERVER_PORT", "50051"))
27
27
 
28
28
 
29
29
  def get_otel_enabled() -> bool:
@@ -92,19 +92,19 @@ def check_process_alive(logger: Logger, processes: list[ProcessInfo]) -> None:
92
92
 
93
93
 
94
94
  def _wait_queue_runner_online() -> None:
95
- _MAX_QUEUE_RUNNER_HEALTH_CHECKS = 10
96
- _QUEUE_RUNNER_HEALTH_CHECK_DELAY_SECS = 1
95
+ MAX_QUEUE_RUNNER_HEALTH_CHECKS = 10
96
+ QUEUE_RUNNER_HEALTH_CHECK_DELAY_SECS = 1
97
97
 
98
98
  num_attempts = 0
99
99
  before = datetime.now(timezone.utc)
100
- while num_attempts < _MAX_QUEUE_RUNNER_HEALTH_CHECKS:
100
+ while num_attempts < MAX_QUEUE_RUNNER_HEALTH_CHECKS:
101
101
  try:
102
102
  if check_health(port=get_local_admin_server_port()):
103
103
  return
104
104
  except CommandServerTimeout:
105
105
  pass
106
106
  num_attempts += 1
107
- time.sleep(_QUEUE_RUNNER_HEALTH_CHECK_DELAY_SECS)
107
+ time.sleep(QUEUE_RUNNER_HEALTH_CHECK_DELAY_SECS)
108
108
  after = datetime.now(timezone.utc)
109
109
  duration_secs = (after - before).seconds
110
110
  raise Exception(f"queue runner failed to come online after {duration_secs} seconds")
@@ -25,6 +25,7 @@ from .api.recipe_links import create_recipe_link as create_recipe_link_t
25
25
  from .api.recipes import create_recipes as create_recipes_t
26
26
  from . import curves_t as curves_t
27
27
  from .api.recipes import disassociate_recipe_as_input as disassociate_recipe_as_input_t
28
+ from .api.files import download_file as download_file_t
28
29
  from .api.recipes import edit_recipe_inputs as edit_recipe_inputs_t
29
30
  from . import entity_t as entity_t
30
31
  from .api.batch import execute_batch as execute_batch_t
@@ -132,6 +133,7 @@ __all__: list[str] = [
132
133
  "create_recipes_t",
133
134
  "curves_t",
134
135
  "disassociate_recipe_as_input_t",
136
+ "download_file_t",
135
137
  "edit_recipe_inputs_t",
136
138
  "entity_t",
137
139
  "execute_batch_t",