UncountablePythonSDK 0.0.92__py3-none-any.whl → 0.0.95__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of UncountablePythonSDK might be problematic. Click here for more details.

Files changed (178) hide show
  1. {UncountablePythonSDK-0.0.92.dist-info → UncountablePythonSDK-0.0.95.dist-info}/METADATA +2 -1
  2. UncountablePythonSDK-0.0.95.dist-info/RECORD +302 -0
  3. pkgs/argument_parser/argument_parser.py +7 -7
  4. pkgs/filesystem_utils/_s3_session.py +2 -1
  5. pkgs/serialization/annotation.py +3 -3
  6. pkgs/serialization/missing_sentry.py +1 -1
  7. pkgs/serialization/serial_alias.py +2 -2
  8. pkgs/serialization/serial_class.py +1 -1
  9. pkgs/serialization/serial_generic.py +1 -1
  10. pkgs/serialization/serial_union.py +3 -3
  11. pkgs/serialization_util/_env_flags.py +3 -0
  12. pkgs/serialization_util/serialization_helpers.py +5 -5
  13. pkgs/type_spec/actions_registry/__main__.py +0 -4
  14. pkgs/type_spec/builder.py +25 -21
  15. pkgs/type_spec/config.py +2 -5
  16. pkgs/type_spec/emit_open_api.py +1 -1
  17. pkgs/type_spec/emit_python.py +44 -28
  18. pkgs/type_spec/emit_typescript.py +13 -6
  19. pkgs/type_spec/parts/base.py.prepart +2 -0
  20. pkgs/type_spec/util.py +3 -5
  21. pkgs/type_spec/value_spec/__main__.py +15 -5
  22. pkgs/type_spec/value_spec/emit_python.py +5 -2
  23. pkgs/type_spec/value_spec/types.py +1 -1
  24. uncountable/core/client.py +14 -6
  25. uncountable/core/file_upload.py +5 -0
  26. uncountable/integration/job.py +2 -2
  27. uncountable/integration/queue_runner/command_server/types.py +1 -1
  28. uncountable/integration/server.py +4 -4
  29. uncountable/types/__init__.py +0 -1
  30. uncountable/types/api/batch/execute_batch.py +1 -2
  31. uncountable/types/api/batch/execute_batch_load_async.py +0 -1
  32. uncountable/types/api/chemical/convert_chemical_formats.py +0 -1
  33. uncountable/types/api/entity/create_entities.py +1 -2
  34. uncountable/types/api/entity/create_entity.py +2 -3
  35. uncountable/types/api/entity/get_entities_data.py +0 -1
  36. uncountable/types/api/entity/grant_entity_permissions.py +0 -1
  37. uncountable/types/api/entity/list_entities.py +3 -4
  38. uncountable/types/api/entity/lock_entity.py +0 -1
  39. uncountable/types/api/entity/resolve_entity_ids.py +2 -3
  40. uncountable/types/api/entity/set_entity_field_values.py +0 -1
  41. uncountable/types/api/entity/set_values.py +0 -1
  42. uncountable/types/api/entity/transition_entity_phase.py +1 -2
  43. uncountable/types/api/entity/unlock_entity.py +0 -1
  44. uncountable/types/api/equipment/associate_equipment_input.py +0 -1
  45. uncountable/types/api/field_options/upsert_field_options.py +0 -1
  46. uncountable/types/api/files/download_file.py +1 -2
  47. uncountable/types/api/id_source/list_id_source.py +3 -4
  48. uncountable/types/api/id_source/match_id_source.py +1 -2
  49. uncountable/types/api/input_groups/get_input_group_names.py +0 -1
  50. uncountable/types/api/inputs/create_inputs.py +2 -3
  51. uncountable/types/api/inputs/get_input_data.py +5 -6
  52. uncountable/types/api/inputs/get_input_names.py +3 -4
  53. uncountable/types/api/inputs/get_inputs_data.py +0 -1
  54. uncountable/types/api/inputs/set_input_attribute_values.py +0 -1
  55. uncountable/types/api/inputs/set_input_category.py +1 -2
  56. uncountable/types/api/inputs/set_input_subcategories.py +0 -1
  57. uncountable/types/api/inputs/set_intermediate_type.py +1 -2
  58. uncountable/types/api/material_families/update_entity_material_families.py +1 -2
  59. uncountable/types/api/outputs/get_output_data.py +2 -3
  60. uncountable/types/api/outputs/get_output_names.py +0 -1
  61. uncountable/types/api/outputs/resolve_output_conditions.py +0 -1
  62. uncountable/types/api/permissions/set_core_permissions.py +1 -2
  63. uncountable/types/api/project/get_projects.py +3 -4
  64. uncountable/types/api/project/get_projects_data.py +4 -5
  65. uncountable/types/api/recipe_links/create_recipe_link.py +1 -2
  66. uncountable/types/api/recipe_links/remove_recipe_link.py +1 -2
  67. uncountable/types/api/recipe_metadata/get_recipe_metadata_data.py +3 -4
  68. uncountable/types/api/recipes/add_recipe_to_project.py +0 -1
  69. uncountable/types/api/recipes/archive_recipes.py +0 -1
  70. uncountable/types/api/recipes/associate_recipe_as_input.py +0 -1
  71. uncountable/types/api/recipes/associate_recipe_as_lot.py +0 -1
  72. uncountable/types/api/recipes/clear_recipe_outputs.py +0 -1
  73. uncountable/types/api/recipes/create_recipe.py +1 -2
  74. uncountable/types/api/recipes/create_recipes.py +1 -2
  75. uncountable/types/api/recipes/disassociate_recipe_as_input.py +0 -1
  76. uncountable/types/api/recipes/edit_recipe_inputs.py +3 -4
  77. uncountable/types/api/recipes/get_column_calculation_values.py +0 -1
  78. uncountable/types/api/recipes/get_curve.py +0 -1
  79. uncountable/types/api/recipes/get_recipe_calculations.py +0 -1
  80. uncountable/types/api/recipes/get_recipe_links.py +1 -2
  81. uncountable/types/api/recipes/get_recipe_names.py +0 -1
  82. uncountable/types/api/recipes/get_recipe_output_metadata.py +0 -1
  83. uncountable/types/api/recipes/get_recipes_data.py +13 -14
  84. uncountable/types/api/recipes/lock_recipes.py +1 -2
  85. uncountable/types/api/recipes/remove_recipe_from_project.py +0 -1
  86. uncountable/types/api/recipes/set_recipe_inputs.py +1 -2
  87. uncountable/types/api/recipes/set_recipe_metadata.py +0 -1
  88. uncountable/types/api/recipes/set_recipe_output_annotations.py +2 -3
  89. uncountable/types/api/recipes/set_recipe_output_file.py +0 -1
  90. uncountable/types/api/recipes/set_recipe_outputs.py +3 -4
  91. uncountable/types/api/recipes/set_recipe_tags.py +2 -3
  92. uncountable/types/api/recipes/unarchive_recipes.py +0 -1
  93. uncountable/types/api/recipes/unlock_recipes.py +1 -2
  94. uncountable/types/api/triggers/run_trigger.py +0 -1
  95. uncountable/types/api/uploader/invoke_uploader.py +0 -1
  96. uncountable/types/async_batch.py +0 -1
  97. uncountable/types/async_batch_processor.py +1 -2
  98. uncountable/types/async_batch_t.py +1 -2
  99. uncountable/types/async_jobs.py +0 -1
  100. uncountable/types/async_jobs_t.py +1 -2
  101. uncountable/types/auth_retrieval.py +0 -1
  102. uncountable/types/auth_retrieval_t.py +2 -3
  103. uncountable/types/base.py +0 -1
  104. uncountable/types/base_t.py +2 -1
  105. uncountable/types/calculations.py +0 -1
  106. uncountable/types/calculations_t.py +0 -1
  107. uncountable/types/chemical_structure.py +0 -1
  108. uncountable/types/chemical_structure_t.py +0 -1
  109. uncountable/types/client_base.py +29 -30
  110. uncountable/types/client_config.py +0 -1
  111. uncountable/types/client_config_t.py +1 -2
  112. uncountable/types/curves.py +0 -1
  113. uncountable/types/curves_t.py +2 -3
  114. uncountable/types/entity.py +0 -1
  115. uncountable/types/entity_t.py +3 -4
  116. uncountable/types/experiment_groups.py +0 -1
  117. uncountable/types/experiment_groups_t.py +0 -1
  118. uncountable/types/field_values.py +0 -1
  119. uncountable/types/field_values_t.py +6 -7
  120. uncountable/types/fields.py +0 -1
  121. uncountable/types/fields_t.py +0 -1
  122. uncountable/types/generic_upload.py +0 -1
  123. uncountable/types/generic_upload_t.py +2 -3
  124. uncountable/types/id_source.py +0 -1
  125. uncountable/types/id_source_t.py +1 -2
  126. uncountable/types/identifier.py +0 -1
  127. uncountable/types/identifier_t.py +1 -2
  128. uncountable/types/input_attributes.py +0 -1
  129. uncountable/types/input_attributes_t.py +0 -1
  130. uncountable/types/inputs.py +0 -1
  131. uncountable/types/inputs_t.py +2 -3
  132. uncountable/types/integration_server.py +0 -1
  133. uncountable/types/integration_server_t.py +1 -2
  134. uncountable/types/job_definition.py +0 -1
  135. uncountable/types/job_definition_t.py +5 -6
  136. uncountable/types/outputs.py +0 -1
  137. uncountable/types/outputs_t.py +1 -2
  138. uncountable/types/overrides.py +0 -1
  139. uncountable/types/overrides_t.py +0 -1
  140. uncountable/types/permissions.py +0 -1
  141. uncountable/types/permissions_t.py +1 -2
  142. uncountable/types/phases.py +0 -1
  143. uncountable/types/phases_t.py +0 -1
  144. uncountable/types/post_base.py +0 -1
  145. uncountable/types/post_base_t.py +1 -2
  146. uncountable/types/queued_job.py +0 -1
  147. uncountable/types/queued_job_t.py +2 -3
  148. uncountable/types/recipe_identifiers.py +0 -1
  149. uncountable/types/recipe_identifiers_t.py +1 -2
  150. uncountable/types/recipe_inputs.py +0 -1
  151. uncountable/types/recipe_inputs_t.py +1 -2
  152. uncountable/types/recipe_links.py +0 -1
  153. uncountable/types/recipe_links_t.py +2 -3
  154. uncountable/types/recipe_metadata.py +0 -1
  155. uncountable/types/recipe_metadata_t.py +0 -1
  156. uncountable/types/recipe_output_metadata.py +0 -1
  157. uncountable/types/recipe_output_metadata_t.py +0 -1
  158. uncountable/types/recipe_tags.py +0 -1
  159. uncountable/types/recipe_tags_t.py +0 -1
  160. uncountable/types/recipe_workflow_steps.py +0 -1
  161. uncountable/types/recipe_workflow_steps_t.py +2 -3
  162. uncountable/types/recipes.py +0 -1
  163. uncountable/types/recipes_t.py +0 -1
  164. uncountable/types/response.py +0 -1
  165. uncountable/types/response_t.py +0 -1
  166. uncountable/types/secret_retrieval.py +0 -1
  167. uncountable/types/secret_retrieval_t.py +2 -3
  168. uncountable/types/units.py +0 -1
  169. uncountable/types/units_t.py +0 -1
  170. uncountable/types/users.py +0 -1
  171. uncountable/types/users_t.py +0 -1
  172. uncountable/types/webhook_job.py +0 -1
  173. uncountable/types/webhook_job_t.py +0 -1
  174. uncountable/types/workflows.py +0 -1
  175. uncountable/types/workflows_t.py +1 -2
  176. UncountablePythonSDK-0.0.92.dist-info/RECORD +0 -301
  177. {UncountablePythonSDK-0.0.92.dist-info → UncountablePythonSDK-0.0.95.dist-info}/WHEEL +0 -0
  178. {UncountablePythonSDK-0.0.92.dist-info → UncountablePythonSDK-0.0.95.dist-info}/top_level.txt +0 -0
pkgs/type_spec/builder.py CHANGED
@@ -901,16 +901,9 @@ class SpecEndpoint:
901
901
  account_type: str | None
902
902
  route_group: str | None
903
903
 
904
- # to be deprecated in favor of path_per_api_endpoint:
905
- # default function, path details
906
- function: str | None
907
- root: EndpointKey
908
- path_root: str
909
- path_dirname: str
910
- path_basename: str
911
-
912
904
  # function, path details per api endpoint
913
905
  path_per_api_endpoint: dict[str, EndpointSpecificPath]
906
+ default_endpoint_key: EndpointKey
914
907
 
915
908
  is_external: bool = False
916
909
 
@@ -997,14 +990,18 @@ class SpecEndpoint:
997
990
  self.has_attachment = data.get("has_attachment", False)
998
991
  self.desc = data.get("desc")
999
992
 
993
+ # compatibility with single-endpoint files
1000
994
  default_endpoint_path = parse_endpoint_specific_path(
1001
995
  builder,
1002
996
  {"path": data.get("path"), "function": data.get("function")},
1003
997
  )
1004
998
  if default_endpoint_path is not None:
1005
- self.root = default_endpoint_path.root
999
+ assert default_endpoint_path.root in builder.api_endpoints, (
1000
+ "Default endpoint is not a valid API endpoint"
1001
+ )
1002
+ self.default_endpoint_key = default_endpoint_path.root
1006
1003
  self.path_per_api_endpoint = {
1007
- self.root: default_endpoint_path,
1004
+ self.default_endpoint_key: default_endpoint_path,
1008
1005
  }
1009
1006
  else:
1010
1007
  self.path_per_api_endpoint = {}
@@ -1021,27 +1018,34 @@ class SpecEndpoint:
1021
1018
  if shared_function_name is None:
1022
1019
  shared_function_name = fn_name
1023
1020
  assert shared_function_name == fn_name
1024
- assert self.path_per_api_endpoint != {}
1025
1021
 
1026
- assert builder.top_namespace in self.path_per_api_endpoint
1027
- self.root = builder.top_namespace
1022
+ if builder.top_namespace in self.path_per_api_endpoint:
1023
+ self.default_endpoint_key = builder.top_namespace
1024
+ elif len(self.path_per_api_endpoint) == 1:
1025
+ self.default_endpoint_key = next(
1026
+ iter(self.path_per_api_endpoint.keys())
1027
+ )
1028
+ else:
1029
+ raise RuntimeError("no clear default endpoint")
1028
1030
 
1029
- default_endpoint_path = self.path_per_api_endpoint[self.root]
1030
- self.function = default_endpoint_path.function
1031
- self.path_dirname = default_endpoint_path.path_dirname
1032
- self.path_basename = default_endpoint_path.path_basename
1033
- self.path_root = default_endpoint_path.path_root
1031
+ assert len(self.path_per_api_endpoint) > 0, (
1032
+ "Missing API endpoint path and function definitions for API call"
1033
+ )
1034
1034
 
1035
1035
  # IMPROVE: remove need for is_external flag
1036
- self.is_external = self.path_root == "api/external"
1036
+ self.is_external = (
1037
+ self.path_per_api_endpoint[self.default_endpoint_key].path_root
1038
+ == "api/external"
1039
+ )
1037
1040
 
1038
1041
  assert self.is_sdk != EndpointEmitType.EMIT_ENDPOINT or self.desc is not None, (
1039
- f"Endpoint description required for SDK endpoints, missing: {self.path_dirname}/{self.path_basename}"
1042
+ f"Endpoint description required for SDK endpoints, missing: {self.resolved_path}"
1040
1043
  )
1041
1044
 
1042
1045
  @property
1043
1046
  def resolved_path(self: Self) -> str:
1044
- return f"{self.path_root}/{self.path_dirname}/{self.path_basename}"
1047
+ default_endpoint_path = self.path_per_api_endpoint[self.default_endpoint_key]
1048
+ return f"{default_endpoint_path.path_root}/{default_endpoint_path.path_dirname}/{default_endpoint_path.path_basename}"
1045
1049
 
1046
1050
 
1047
1051
  def _parse_const(
pkgs/type_spec/config.py CHANGED
@@ -1,7 +1,7 @@
1
1
  import os
2
2
  from collections.abc import Callable, Mapping
3
3
  from dataclasses import dataclass
4
- from typing import Self, TypeVar
4
+ from typing import Self
5
5
 
6
6
  from pkgs.serialization import yaml
7
7
 
@@ -103,10 +103,7 @@ class Config:
103
103
  open_api: OpenAPIConfig | None
104
104
 
105
105
 
106
- _T = TypeVar("_T")
107
-
108
-
109
- def _parse_language(config_class: type[_T], raw_value: ConfigValueType) -> _T:
106
+ def _parse_language[_T](config_class: type[_T], raw_value: ConfigValueType) -> _T:
110
107
  assert isinstance(raw_value, dict), "expecting language config to have key/values."
111
108
  return config_class(**raw_value)
112
109
 
@@ -563,7 +563,7 @@ def _emit_endpoint(
563
563
  ep = namespace.endpoint
564
564
  gctx.paths.append(
565
565
  EmitOpenAPIPath(
566
- path=f"/{ep.path_root}/{ep.path_dirname}/{ep.path_basename}",
566
+ path=f"/{ep.resolved_path}",
567
567
  ref=ref_path,
568
568
  )
569
569
  )
@@ -5,16 +5,14 @@ from decimal import Decimal
5
5
  from typing import Any
6
6
 
7
7
  from . import builder, util
8
- from .builder import EndpointEmitType
8
+ from .builder import EndpointEmitType, EndpointSpecificPath
9
9
  from .config import PythonConfig
10
10
 
11
11
  INDENT = " "
12
12
  LINE_BREAK = "\n"
13
13
  MODIFY_NOTICE = "# DO NOT MODIFY -- This file is generated by type_spec\n"
14
14
  # Turn excess line length warning and turn off ruff formatting
15
- LINT_HEADER = (
16
- "# flake8: noqa: F821\n# ruff: noqa: E402 Q003\n# fmt: off\n# isort: skip_file\n"
17
- )
15
+ LINT_HEADER = "# ruff: noqa: E402 Q003\n# fmt: off\n# isort: skip_file\n"
18
16
  LINT_FOOTER = "# fmt: on\n"
19
17
  ROUTE_NOTICE = """# Routes are generated from $endpoint specifications in the
20
18
  # type_spec API YAML files. Refer to the section on endpoints in the type_spec/README"""
@@ -221,7 +219,7 @@ def _emit_types_imports(*, out: io.StringIO, ctx: Context) -> None:
221
219
  out.write("import datetime # noqa: F401\n")
222
220
  out.write("from decimal import Decimal # noqa: F401\n")
223
221
  if ctx.use_enum:
224
- out.write("from pkgs.strenum_compat import StrEnum\n")
222
+ out.write("from enum import StrEnum\n")
225
223
  if ctx.use_dataclass:
226
224
  out.write("import dataclasses\n")
227
225
  out.write("from pkgs.serialization import serial_class\n")
@@ -349,13 +347,17 @@ ENDPOINT_PATH = "ENDPOINT_PATH"
349
347
  ENDPOINT_PATH_ALTERNATE = "ENDPOINT_PATH_ALTERNATE"
350
348
 
351
349
 
350
+ def _get_epf_root(endpoint_specific_path: EndpointSpecificPath) -> str:
351
+ return endpoint_specific_path.root
352
+
353
+
352
354
  def _emit_namespace(ctx: Context, namespace: builder.SpecNamespace) -> None:
353
355
  endpoint = namespace.endpoint
354
356
  if endpoint is not None:
355
357
  ctx.out.write("\n")
356
358
  ctx.out.write(f'{ENDPOINT_METHOD} = "{endpoint.method.upper()}"\n')
357
359
  for endpoint_specific_path in sorted(
358
- endpoint.path_per_api_endpoint.values(), key=lambda epf: epf.root
360
+ endpoint.path_per_api_endpoint.values(), key=_get_epf_root
359
361
  ):
360
362
  endpoint_path_name = ENDPOINT_PATH
361
363
 
@@ -457,8 +459,10 @@ def _emit_endpoint_invocation_function_signature(
457
459
  else []
458
460
  ) + (extra_params if extra_params is not None else [])
459
461
 
460
- assert endpoint.function is not None
461
- function_name = endpoint.function.split(".")[-1]
462
+ # All endpoints share a function name
463
+ function = endpoint.path_per_api_endpoint[endpoint.default_endpoint_key].function
464
+ assert function is not None
465
+ function_name = function.split(".")[-1]
462
466
  ctx.out.write(
463
467
  f"""
464
468
  def {function_name}(
@@ -716,40 +720,43 @@ def _emit_properties(
716
720
  if len(properties) > 0:
717
721
 
718
722
  def write_field(prop: builder.SpecProperty) -> None:
723
+ stype = prop.spec_type
719
724
  if prop.name_case == builder.NameCase.preserve:
720
725
  unconverted_keys.add(prop.name)
721
726
  py_name = python_field_name(prop.name, prop.name_case)
722
727
 
723
728
  if prop.convert_value == builder.PropertyConvertValue.no_convert:
724
729
  unconverted_values.add(py_name)
725
- elif not prop.spec_type.is_value_converted():
730
+ elif not stype.is_value_converted():
726
731
  assert prop.convert_value == builder.PropertyConvertValue.auto
727
732
  unconverted_values.add(py_name)
728
- if prop.spec_type.is_value_to_string():
733
+ if stype.is_value_to_string():
729
734
  to_string_values.add(py_name)
730
735
 
731
736
  if prop.parse_require:
732
737
  parse_require.add(py_name)
733
738
 
734
- ref_type = refer_to(ctx, prop.spec_type)
739
+ ref_type = refer_to(ctx, stype)
735
740
  default = None
736
741
  if prop.extant == builder.PropertyExtant.missing:
737
742
  ref_type = f"MissingType[{ref_type}]"
738
743
  default = "MISSING_SENTRY"
739
744
  ctx.use_missing = True
740
745
  elif prop.extant == builder.PropertyExtant.optional:
741
- if ref_type != "None":
746
+ if isinstance(
747
+ stype, builder.SpecTypeInstance
748
+ ) and stype.defn_type.is_base_type(builder.BaseTypeName.s_optional):
749
+ pass # base type already adds the None union
750
+ elif ref_type == "None":
751
+ pass # no need to add a None union to a none type
752
+ else:
742
753
  ref_type = f"{ref_type} | None"
743
754
  default = "None"
744
755
  elif prop.has_default:
745
- default = _emit_value(ctx, prop.spec_type, prop.default)
756
+ default = _emit_value(ctx, stype, prop.default)
746
757
  if (
747
- isinstance(prop.spec_type, builder.SpecTypeInstance)
748
- and (
749
- prop.spec_type.defn_type.is_base_type(
750
- builder.BaseTypeName.s_list
751
- )
752
- )
758
+ isinstance(stype, builder.SpecTypeInstance)
759
+ and (stype.defn_type.is_base_type(builder.BaseTypeName.s_list))
753
760
  and default == "[]"
754
761
  ):
755
762
  default = "dataclasses.field(default_factory=list)"
@@ -939,13 +946,22 @@ base_name_map = {
939
946
 
940
947
  def refer_to(ctx: TrackingContext, stype: builder.SpecType) -> str:
941
948
  if isinstance(stype, builder.SpecTypeInstance):
942
- params = ", ".join([refer_to(ctx, p) for p in stype.parameters])
949
+ params = [refer_to(ctx, p) for p in stype.parameters]
950
+
951
+ if stype.defn_type.is_base_type(builder.BaseTypeName.s_union):
952
+ if len(stype.parameters) == 1:
953
+ return f"typing.Union[{params[0]}]"
954
+ return " | ".join(params)
943
955
 
944
956
  if stype.defn_type.is_base_type(builder.BaseTypeName.s_readonly_array):
945
- assert len(stype.parameters) == 1, "Read Only Array takes one parameter"
946
- params = f"{params}, ..."
957
+ assert len(params) == 1, "Read Only Array takes one parameter"
958
+ return f"tuple[{params[0]}, ...]"
959
+
960
+ if stype.defn_type.is_base_type(builder.BaseTypeName.s_optional):
961
+ assert len(params) == 1, "Optional only takes one parameter"
962
+ return f"{params[0]} | None"
947
963
 
948
- return f"{refer_to(ctx, stype.defn_type)}[{params}]"
964
+ return f"{refer_to(ctx, stype.defn_type)}[{', '.join(params)}]"
949
965
 
950
966
  if isinstance(stype, builder.SpecTypeLiteralWrapper):
951
967
  return _emit_value(ctx, stype.value_type, stype.value)
@@ -1111,8 +1127,8 @@ def _emit_id_source(*, builder: builder.SpecBuilder, config: PythonConfig) -> No
1111
1127
  return None
1112
1128
  enum_out = io.StringIO()
1113
1129
  enum_out.write(f"{LINT_HEADER}{MODIFY_NOTICE}\n")
1114
- enum_out.write("from typing import Literal, Union\n")
1115
- enum_out.write("from pkgs.strenum_compat import StrEnum\n")
1130
+ enum_out.write("import typing\n")
1131
+ enum_out.write("from enum import StrEnum\n")
1116
1132
 
1117
1133
  ctx = TrackingContext()
1118
1134
  # In this context the propername
@@ -1128,11 +1144,11 @@ def _emit_id_source(*, builder: builder.SpecBuilder, config: PythonConfig) -> No
1128
1144
  known_keys = []
1129
1145
  enum_out.write("\nENUM_MAP: dict[str, type[StrEnum]] = {\n")
1130
1146
  for key in sorted(named_enums.keys()):
1131
- enum_out.write(f'"{key}": {named_enums[key]},\n')
1132
- known_keys.append(f'Literal["{key}"]')
1147
+ enum_out.write(f'{INDENT}"{key}": {named_enums[key]},\n')
1148
+ known_keys.append(f'"{key}"')
1133
1149
  enum_out.write(f"}}\n{MODIFY_NOTICE}\n")
1134
1150
 
1135
- enum_out.write(f"\nKnownEnumsType = Union[\n{INDENT}")
1151
+ enum_out.write(f"\nKnownEnumsType = typing.Literal[\n{INDENT}")
1136
1152
  enum_out.write(f",\n{INDENT}".join(known_keys))
1137
1153
  enum_out.write(f"\n]\n{MODIFY_NOTICE}\n")
1138
1154
 
@@ -2,6 +2,7 @@ import io
2
2
  import os
3
3
 
4
4
  from . import builder, util
5
+ from .builder import EndpointKey, EndpointSpecificPath
5
6
  from .config import TypeScriptConfig
6
7
  from .emit_io_ts import emit_type_io_ts
7
8
  from .emit_typescript_util import (
@@ -46,7 +47,10 @@ def _emit_types(builder: builder.SpecBuilder, config: TypeScriptConfig) -> None:
46
47
  and len(namespace.constants) == 0
47
48
  ):
48
49
  # Try to capture some common incompleteness errors
49
- if namespace.endpoint is None or namespace.endpoint.function is None:
50
+ if namespace.endpoint is None or any(
51
+ endpoint_specific_path.function is None
52
+ for endpoint_specific_path in namespace.endpoint.path_per_api_endpoint.values()
53
+ ):
50
54
  raise Exception(
51
55
  f"Namespace {'/'.join(namespace.path)} is incomplete. It should have an endpoint with function, types, and/or constants"
52
56
  )
@@ -209,12 +213,15 @@ export type Response = DeprecatedResult
209
213
 
210
214
  """
211
215
 
212
- endpoint_path_part = (
213
- f'"{endpoint.path_root}/{endpoint.path_dirname}/{endpoint.path_basename}",'
214
- )
215
-
216
- if has_multiple_endpoints:
216
+ if not has_multiple_endpoints:
217
+ default_endpoint_path = endpoint.path_per_api_endpoint[
218
+ endpoint.default_endpoint_key
219
+ ]
220
+ endpoint_path_part = f'"{default_endpoint_path.path_root}/{default_endpoint_path.path_dirname}/{default_endpoint_path.path_basename}",'
221
+ else:
217
222
  path_lookup_map = ""
223
+ api_endpoint_key: EndpointKey
224
+ endpoint_specific_path: EndpointSpecificPath
218
225
  for (
219
226
  api_endpoint_key,
220
227
  endpoint_specific_path,
@@ -22,6 +22,8 @@ PureJsonScalar = Union[str, float, bool, None]
22
22
  # Regular expressions for identifying ref names and IDs. Ref names should be
23
23
  # using this regular expression as a constriant in the database.
24
24
  REF_NAME_REGEX = r"^[a-zA-Z0-9_/-]+$"
25
+ REF_NAME_STRICT_REGEX_STRING = "^[a-zA-Z_][a-zA-Z0-9_]*$"
26
+ REF_NAME_STRICT_REGEX = rf"{REF_NAME_STRICT_REGEX_STRING}"
25
27
  # Ids matching a strict integer number are converted to integers
26
28
  ID_REGEX = r"-?[1-9][0-9]{0,20}"
27
29
 
pkgs/type_spec/util.py CHANGED
@@ -1,12 +1,10 @@
1
1
  import json
2
2
  import os
3
3
  from dataclasses import dataclass
4
- from typing import TypeVar, Union
4
+ from typing import Union
5
5
 
6
6
  import regex as re
7
7
 
8
- T = TypeVar("T")
9
-
10
8
 
11
9
  def rewrite_file(filename: str, content: str) -> bool:
12
10
  os.makedirs(os.path.dirname(filename), exist_ok=True)
@@ -156,7 +154,7 @@ def is_valid_property_name(name: str) -> bool:
156
154
  return re_pattern_property_name.match(name) is not None
157
155
 
158
156
 
159
- def check_fields(data: dict[str, T], allowed: list[str]) -> None:
157
+ def check_fields[T](data: dict[str, T], allowed: list[str]) -> None:
160
158
  for key in data:
161
159
  if key not in allowed:
162
160
  raise Exception(f"unexpected-field: {key}. Allowed: {allowed}")
@@ -180,7 +178,7 @@ def encode_common_string(value: str) -> str:
180
178
  return rep
181
179
 
182
180
 
183
- def unused(_arg: T) -> None:
181
+ def unused[T](_arg: T) -> None:
184
182
  """
185
183
  Identifies that an argument is intended not be used, as opposed to
186
184
  simply forgotten, or a remnant. This can happen in patterned calls
@@ -13,7 +13,10 @@ One of the following can be specified on the name of a argument:
13
13
  After that you can also specify a `!` indicating the argument may not be null.
14
14
  If this is not specified, then a null input on this argument should produce a null output.
15
15
  We prefer not to use `!` as we want to encourage null pass-through where possible.
16
- If null is allowed as a legitimate value, such as in conditionals like `if`, then `!` must be specified.
16
+
17
+ If null is allowed as a legitimate value, such as in conditionals like `is_null`,
18
+ then `!usenull` must be specified, this distinguishes it from the pass-through case.
19
+ The accepted argument type must accept "None", it is not implied.
17
20
  """
18
21
 
19
22
  import sys
@@ -84,7 +87,7 @@ class Source:
84
87
  return self._text[start : self._at]
85
88
 
86
89
 
87
- _re_argument_name = re.compile(r"([a-z_]+)(\?|\+)?(!)?:")
90
+ _re_argument_name = re.compile(r"([a-z_]+)(\?|\+)?(!|!usenull)?:")
88
91
 
89
92
 
90
93
  def parse_function_signature(text: str) -> ParsedFunctionSignature:
@@ -103,9 +106,16 @@ def parse_function_signature(text: str) -> ParsedFunctionSignature:
103
106
  ref_name = arg_group.group(1)
104
107
  is_missing = arg_group.group(2) == "?"
105
108
  is_repeating = arg_group.group(2) == "+"
106
- pass_null = arg_group.group(3) is None
107
109
  type_path = parse_type_str(type_str)
108
110
 
111
+ match arg_group.group(3):
112
+ case "!":
113
+ on_null = value_spec_t.OnNull.DISALLOW
114
+ case "!usenull":
115
+ on_null = value_spec_t.OnNull.USE
116
+ case _:
117
+ on_null = value_spec_t.OnNull.PASS
118
+
109
119
  extant = value_spec_t.ArgumentExtant.REQUIRED
110
120
  extant_marker = arg_group.group(2)
111
121
  if extant_marker == "?":
@@ -116,7 +126,7 @@ def parse_function_signature(text: str) -> ParsedFunctionSignature:
116
126
  arguments.append(
117
127
  ParsedFunctionArgument(
118
128
  ref_name=ref_name,
119
- pass_null=pass_null,
129
+ on_null=on_null,
120
130
  extant=extant,
121
131
  type_path=type_path,
122
132
  )
@@ -208,7 +218,7 @@ def main() -> None:
208
218
  name=arg_name,
209
219
  description=arg_description,
210
220
  type=convert_to_value_spec_type(in_argument.type_path),
211
- pass_null=in_argument.pass_null,
221
+ on_null=in_argument.on_null,
212
222
  extant=in_argument.extant,
213
223
  )
214
224
  )
@@ -70,7 +70,7 @@ def _emit_function_wrapper(function: value_spec_t.Function) -> str:
70
70
  else:
71
71
  python_type = _emit_python_type(argument.type)
72
72
  if (
73
- argument.pass_null
73
+ argument.on_null == value_spec_t.OnNull.PASS
74
74
  or argument.extant == value_spec_t.ArgumentExtant.MISSING
75
75
  ):
76
76
  python_type += " | None"
@@ -189,7 +189,10 @@ def _emit_argument(argument: value_spec_t.FunctionArgument, indent: str) -> str:
189
189
  out.write(
190
190
  f"{sub_indent}description={encode_common_string(argument.description)},\n"
191
191
  )
192
- out.write(f"{sub_indent}pass_null={str(argument.pass_null)},\n")
192
+ # Quick enum emit since we have only one such type here
193
+ out.write(
194
+ f"{sub_indent}on_null=value_spec_t.OnNull.{str(argument.on_null).upper()},\n"
195
+ )
193
196
  out.write(
194
197
  f"{sub_indent}extant=value_spec_t.ArgumentExtant.{argument.extant.name},\n"
195
198
  )
@@ -8,7 +8,7 @@ from ..util import ParsedTypePath
8
8
  @dataclass(kw_only=True, frozen=True)
9
9
  class ParsedFunctionArgument:
10
10
  ref_name: str
11
- pass_null: bool
11
+ on_null: value_spec_t.OnNull
12
12
  extant: value_spec_t.ArgumentExtant
13
13
  type_path: ParsedTypePath
14
14
 
@@ -50,7 +50,7 @@ class HTTPGetRequest(HTTPRequestBase):
50
50
  @dataclass(kw_only=True)
51
51
  class HTTPPostRequest(HTTPRequestBase):
52
52
  method = EndpointMethod.POST
53
- body: typing.Union[str, dict[str, str]]
53
+ body: str | dict[str, str]
54
54
 
55
55
 
56
56
  HTTPRequest = HTTPPostRequest | HTTPGetRequest
@@ -61,7 +61,7 @@ class ClientConfig(ClientConfigOptions):
61
61
  transform_request: typing.Callable[[requests.Request], requests.Request] | None = (
62
62
  None
63
63
  )
64
- logger: typing.Optional[Logger] = None
64
+ logger: Logger | None = None
65
65
 
66
66
 
67
67
  OAUTH_REFRESH_WINDOW_SECONDS = 60 * 5
@@ -197,9 +197,10 @@ class Client(ClientMethods):
197
197
  logger=self._cfg.logger,
198
198
  )
199
199
 
200
- def _get_response_json(
201
- self, response: requests.Response, request_id: str
202
- ) -> dict[str, JsonValue]:
200
+ @classmethod
201
+ def _validate_response_status(
202
+ cls, response: requests.Response, request_id: str
203
+ ) -> None:
203
204
  if response.status_code < 200 or response.status_code > 299:
204
205
  extra_details: dict[str, JsonValue] | None = None
205
206
  try:
@@ -214,6 +215,11 @@ class Client(ClientMethods):
214
215
  extra_details=extra_details,
215
216
  request_id=request_id,
216
217
  )
218
+
219
+ def _get_response_json(
220
+ self, response: requests.Response, request_id: str
221
+ ) -> dict[str, JsonValue]:
222
+ self._validate_response_status(response, request_id)
217
223
  try:
218
224
  return typing.cast(dict[str, JsonValue], response.json())
219
225
  except JSONDecodeError as e:
@@ -327,7 +333,7 @@ class Client(ClientMethods):
327
333
  case _:
328
334
  raise ValueError(f"unsupported request method: {method}")
329
335
 
330
- def _get_downloaded_filename(self, *, cd: typing.Optional[str]) -> str:
336
+ def _get_downloaded_filename(self, *, cd: str | None) -> str:
331
337
  if not cd:
332
338
  return "Unknown"
333
339
 
@@ -361,6 +367,8 @@ class Client(ClientMethods):
361
367
  assert isinstance(http_request, HTTPGetRequest)
362
368
  request.params = http_request.query_params
363
369
  response = self._send_request(request)
370
+ self._validate_response_status(response, request_id)
371
+
364
372
  content = response.content
365
373
  content_disposition = response.headers.get("Content-Disposition", None)
366
374
  return [
@@ -111,6 +111,11 @@ class FileUploader:
111
111
  if self._logger is not None:
112
112
  self._logger.log_info("Uploading file", attributes=attributes)
113
113
  with file_upload_data(file_upload) as file_bytes:
114
+ if file_bytes.bytes_data.read(1) == b"":
115
+ raise UploadFailed(
116
+ f"Failed to upload empty file: {file_bytes.name}"
117
+ )
118
+ file_bytes.bytes_data.seek(0)
114
119
  location = await aiotus.upload(
115
120
  creation_url,
116
121
  file_bytes.bytes_data,
@@ -28,7 +28,7 @@ CronJobArguments = JobArguments
28
28
  PT = typing.TypeVar("PT")
29
29
 
30
30
 
31
- class Job(ABC, typing.Generic[PT]):
31
+ class Job[PT](ABC):
32
32
  _unc_job_registered: bool = False
33
33
 
34
34
  @property
@@ -62,7 +62,7 @@ class CronJob(Job):
62
62
  WPT = typing.TypeVar("WPT")
63
63
 
64
64
 
65
- class WebhookJob(Job[webhook_job_t.WebhookEventPayload], typing.Generic[WPT]):
65
+ class WebhookJob[WPT](Job[webhook_job_t.WebhookEventPayload]):
66
66
  @property
67
67
  def payload_type(self) -> type[webhook_job_t.WebhookEventPayload]:
68
68
  return webhook_job_t.WebhookEventPayload
@@ -14,7 +14,7 @@ RT = typing.TypeVar("RT")
14
14
 
15
15
 
16
16
  @dataclass(kw_only=True)
17
- class CommandBase(typing.Generic[RT]):
17
+ class CommandBase[RT]:
18
18
  type: CommandType
19
19
  response_queue: asyncio.Queue[RT]
20
20
 
@@ -1,7 +1,7 @@
1
1
  import signal
2
2
  from dataclasses import asdict
3
3
  from types import TracebackType
4
- from typing import Optional, assert_never
4
+ from typing import assert_never
5
5
 
6
6
  from apscheduler.executors.pool import ThreadPoolExecutor
7
7
  from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
@@ -110,8 +110,8 @@ class IntegrationServer:
110
110
 
111
111
  def __exit__(
112
112
  self,
113
- exc_type: Optional[type[BaseException]],
114
- exc_val: Optional[BaseException],
115
- exc_tb: Optional[TracebackType],
113
+ exc_type: type[BaseException] | None,
114
+ exc_val: BaseException | None,
115
+ exc_tb: TracebackType | None,
116
116
  ) -> None:
117
117
  self._stop_apscheduler()
@@ -1,5 +1,4 @@
1
1
  # DO NOT MODIFY -- This file is generated by type_spec
2
- # flake8: noqa: F821
3
2
  # ruff: noqa: E402 Q003
4
3
  # fmt: off
5
4
  # isort: skip_file
@@ -1,5 +1,4 @@
1
1
  # DO NOT MODIFY -- This file is generated by type_spec
2
- # flake8: noqa: F821
3
2
  # ruff: noqa: E402 Q003
4
3
  # fmt: off
5
4
  # isort: skip_file
@@ -7,7 +6,7 @@ from __future__ import annotations
7
6
  import typing # noqa: F401
8
7
  import datetime # noqa: F401
9
8
  from decimal import Decimal # noqa: F401
10
- from pkgs.strenum_compat import StrEnum
9
+ from enum import StrEnum
11
10
  import dataclasses
12
11
  from pkgs.serialization import serial_class
13
12
  from ... import base_t
@@ -1,5 +1,4 @@
1
1
  # DO NOT MODIFY -- This file is generated by type_spec
2
- # flake8: noqa: F821
3
2
  # ruff: noqa: E402 Q003
4
3
  # fmt: off
5
4
  # isort: skip_file
@@ -1,5 +1,4 @@
1
1
  # DO NOT MODIFY -- This file is generated by type_spec
2
- # flake8: noqa: F821
3
2
  # ruff: noqa: E402 Q003
4
3
  # fmt: off
5
4
  # isort: skip_file
@@ -1,5 +1,4 @@
1
1
  # DO NOT MODIFY -- This file is generated by type_spec
2
- # flake8: noqa: F821
3
2
  # ruff: noqa: E402 Q003
4
3
  # fmt: off
5
4
  # isort: skip_file
@@ -32,7 +31,7 @@ ENDPOINT_PATH = "api/external/entity/external_create_entities"
32
31
  )
33
32
  @dataclasses.dataclass(kw_only=True)
34
33
  class EntityToCreate:
35
- field_values: typing.Optional[list[field_values_t.FieldRefNameValue]] | None = None
34
+ field_values: list[field_values_t.FieldRefNameValue] | None = None
36
35
 
37
36
 
38
37
  # DO NOT MODIFY -- This file is generated by type_spec
@@ -1,5 +1,4 @@
1
1
  # DO NOT MODIFY -- This file is generated by type_spec
2
- # flake8: noqa: F821
3
2
  # ruff: noqa: E402 Q003
4
3
  # fmt: off
5
4
  # isort: skip_file
@@ -35,7 +34,7 @@ ENDPOINT_PATH = "api/external/entity/external_create_entity"
35
34
  class EntityFieldInitialValue:
36
35
  field_ref_name: str
37
36
  value: base_t.JsonValue
38
- row_index: typing.Optional[int] | None = None
37
+ row_index: int | None = None
39
38
 
40
39
 
41
40
  # DO NOT MODIFY -- This file is generated by type_spec
@@ -47,7 +46,7 @@ class Arguments:
47
46
  entity_type: entity_t.LimitedEntityType
48
47
  definition_id: base_t.ObjectId | None = None
49
48
  definition_key: identifier_t.IdentifierKey | None = None
50
- field_values: typing.Optional[list[field_values_t.FieldRefNameValue]] | None = None
49
+ field_values: list[field_values_t.FieldRefNameValue] | None = None
51
50
 
52
51
 
53
52
  # DO NOT MODIFY -- This file is generated by type_spec
@@ -1,5 +1,4 @@
1
1
  # DO NOT MODIFY -- This file is generated by type_spec
2
- # flake8: noqa: F821
3
2
  # ruff: noqa: E402 Q003
4
3
  # fmt: off
5
4
  # isort: skip_file