UncountablePythonSDK 0.0.54__py3-none-any.whl → 0.0.56__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of UncountablePythonSDK might be problematic. Click here for more details.

Files changed (42) hide show
  1. {UncountablePythonSDK-0.0.54.dist-info → UncountablePythonSDK-0.0.56.dist-info}/METADATA +1 -1
  2. {UncountablePythonSDK-0.0.54.dist-info → UncountablePythonSDK-0.0.56.dist-info}/RECORD +42 -41
  3. {UncountablePythonSDK-0.0.54.dist-info → UncountablePythonSDK-0.0.56.dist-info}/WHEEL +1 -1
  4. examples/create_entity.py +3 -1
  5. examples/edit_recipe_inputs.py +4 -2
  6. examples/invoke_uploader.py +4 -1
  7. pkgs/argument_parser/argument_parser.py +4 -2
  8. pkgs/filesystem_utils/_gdrive_session.py +5 -2
  9. pkgs/filesystem_utils/_s3_session.py +2 -1
  10. pkgs/filesystem_utils/_sftp_session.py +5 -4
  11. pkgs/serialization/serial_class.py +6 -2
  12. pkgs/serialization/yaml.py +4 -1
  13. pkgs/type_spec/actions_registry/emit_typescript.py +3 -1
  14. pkgs/type_spec/builder.py +16 -6
  15. pkgs/type_spec/config.py +3 -1
  16. pkgs/type_spec/emit_io_ts.py +5 -5
  17. pkgs/type_spec/emit_open_api.py +10 -6
  18. pkgs/type_spec/emit_open_api_util.py +3 -4
  19. pkgs/type_spec/emit_python.py +9 -5
  20. pkgs/type_spec/emit_typescript.py +17 -8
  21. pkgs/type_spec/type_info/emit_type_info.py +5 -3
  22. pkgs/type_spec/value_spec/convert_type.py +3 -1
  23. pkgs/type_spec/value_spec/emit_python.py +12 -4
  24. uncountable/core/client.py +3 -1
  25. uncountable/core/file_upload.py +3 -1
  26. uncountable/integration/construct_client.py +2 -1
  27. uncountable/integration/executors/generic_upload_executor.py +11 -9
  28. uncountable/integration/secret_retrieval/retrieve_secret.py +1 -3
  29. uncountable/integration/telemetry.py +12 -4
  30. uncountable/types/__init__.py +2 -0
  31. uncountable/types/api/entity/create_entities.py +1 -1
  32. uncountable/types/api/entity/create_entity.py +1 -1
  33. uncountable/types/api/recipes/clear_recipe_outputs.py +35 -0
  34. uncountable/types/api/uploader/invoke_uploader.py +2 -2
  35. uncountable/types/async_batch.py +1 -0
  36. uncountable/types/async_batch_processor.py +75 -4
  37. uncountable/types/async_batch_t.py +9 -0
  38. uncountable/types/client_base.py +25 -6
  39. uncountable/types/entity_t.py +2 -0
  40. uncountable/types/generic_upload.py +6 -0
  41. uncountable/types/generic_upload_t.py +67 -1
  42. {UncountablePythonSDK-0.0.54.dist-info → UncountablePythonSDK-0.0.56.dist-info}/top_level.txt +0 -0
@@ -92,7 +92,9 @@ def _emit_types(builder: builder.SpecBuilder, config: TypeScriptConfig) -> None:
92
92
  builder.namespaces.values(),
93
93
  key=lambda ns: _resolve_namespace_name(ns),
94
94
  ):
95
- ctx = EmitTypescriptContext(out=io.StringIO(), namespace=namespace, config=config)
95
+ ctx = EmitTypescriptContext(
96
+ out=io.StringIO(), namespace=namespace, config=config
97
+ )
96
98
 
97
99
  _emit_namespace(ctx, namespace)
98
100
 
@@ -109,7 +111,7 @@ def _emit_types(builder: builder.SpecBuilder, config: TypeScriptConfig) -> None:
109
111
  # Try to capture some common incompleteness errors
110
112
  if namespace.endpoint is None or namespace.endpoint.function is None:
111
113
  raise Exception(
112
- f"Namespace {'/'.join(namespace.path)} is incomplete. It should have an endpoint with function, types, and/or constants"
114
+ f"Namespace {"/".join(namespace.path)} is incomplete. It should have an endpoint with function, types, and/or constants"
113
115
  )
114
116
  continue
115
117
 
@@ -161,7 +163,9 @@ def _emit_types(builder: builder.SpecBuilder, config: TypeScriptConfig) -> None:
161
163
  util.rewrite_file(f"{config.types_output}/index.ts", index_out.getvalue())
162
164
 
163
165
 
164
- def _emit_namespace(ctx: EmitTypescriptContext, namespace: builder.SpecNamespace) -> None:
166
+ def _emit_namespace(
167
+ ctx: EmitTypescriptContext, namespace: builder.SpecNamespace
168
+ ) -> None:
165
169
  for stype in namespace.types.values():
166
170
  if namespace.emit_io_ts:
167
171
  emit_type_io_ts(ctx, stype, namespace.derive_types_from_io_ts)
@@ -222,7 +226,7 @@ def _emit_endpoint(
222
226
  wrap_call = (
223
227
  f"{wrap_name}<Arguments>" if is_binary else f"{wrap_name}<Arguments, Response>"
224
228
  )
225
- type_path = f"unc_mat/types/{'/'.join(namespace.path)}"
229
+ type_path = f"unc_mat/types/{"/".join(namespace.path)}"
226
230
 
227
231
  if is_binary:
228
232
  tsx_response_part = f"""import {{ {wrap_name} }} from "unc_base/api"
@@ -261,12 +265,14 @@ export const apiCall = {wrap_call}(
261
265
  )
262
266
  {data_loader_body}"""
263
267
 
264
- output = f"{ctx.config.routes_output}/{'/'.join(namespace.path)}.tsx"
268
+ output = f"{ctx.config.routes_output}/{"/".join(namespace.path)}.tsx"
265
269
  util.rewrite_file(output, tsx_api)
266
270
 
267
271
  # Hacky index support, until enough is migrated to regen entirely
268
272
  # Emits the import into the UI API index file
269
- index_path = f"{ctx.config.routes_output}/{'/'.join(namespace.path[0:-1])}/index.tsx"
273
+ index_path = (
274
+ f"{ctx.config.routes_output}/{"/".join(namespace.path[0:-1])}/index.tsx"
275
+ )
270
276
  api_name = f"Api{ts_type_name(namespace.path[0 - 1])}"
271
277
  if os.path.exists(index_path):
272
278
  with open(index_path) as index:
@@ -404,7 +410,10 @@ def refer_to_impl(
404
410
  spec, multi = refer_to_impl(ctx, stype.parameters[0])
405
411
  return f"readonly ({spec})[]" if multi else f"readonly {spec}[]", False
406
412
  if stype.defn_type.name == builder.BaseTypeName.s_union:
407
- return f'({" | ".join([refer_to(ctx, p) for p in stype.parameters])})', False
413
+ return (
414
+ f'({" | ".join([refer_to(ctx, p) for p in stype.parameters])})',
415
+ False,
416
+ )
408
417
  if stype.defn_type.name == builder.BaseTypeName.s_literal:
409
418
  parts = []
410
419
  for parameter in stype.parameters:
@@ -414,7 +423,7 @@ def refer_to_impl(
414
423
  if stype.defn_type.name == builder.BaseTypeName.s_optional:
415
424
  return f"{refer_to(ctx, stype.parameters[0])} | null", True
416
425
  if stype.defn_type.name == builder.BaseTypeName.s_tuple:
417
- return f"[{', '.join([refer_to(ctx, p) for p in stype.parameters])}]", False
426
+ return f"[{", ".join([refer_to(ctx, p) for p in stype.parameters])}]", False
418
427
  params = ", ".join([refer_to(ctx, p) for p in stype.parameters])
419
428
  return f"{refer_to(ctx, stype.defn_type)}<{params}>", False
420
429
 
@@ -3,7 +3,7 @@ import dataclasses
3
3
  import decimal
4
4
  import io
5
5
  import json
6
- from typing import Any, Optional, TypeAlias, Union, cast
6
+ from typing import Any, Optional, Union, cast
7
7
 
8
8
  from main.base.types import data_t
9
9
  from main.base.types.base_t import PureJsonValue
@@ -134,7 +134,7 @@ class MapStringEnum(MapTypeBase):
134
134
  values: dict[str, str]
135
135
 
136
136
 
137
- MapType: TypeAlias = Union[MapTypeObject, MapTypeAlias, MapStringEnum]
137
+ type MapType = Union[MapTypeObject, MapTypeAlias, MapStringEnum]
138
138
 
139
139
 
140
140
  @dataclasses.dataclass
@@ -255,7 +255,9 @@ def _extract_and_validate_layout(
255
255
  assert group_ref_name in layout, f"missing-base-group:{group_ref_name}"
256
256
 
257
257
  for prop_ref_name in stype.properties:
258
- assert prop_ref_name in all_fields_group, f"layout-missing-field:{prop_ref_name}"
258
+ assert (
259
+ prop_ref_name in all_fields_group
260
+ ), f"layout-missing-field:{prop_ref_name}"
259
261
 
260
262
  return layout
261
263
 
@@ -25,7 +25,9 @@ TYPE_MAP = {
25
25
  "List": MappedType(base_type=value_spec_t.BaseType.LIST, param_count=1),
26
26
  "Optional": MappedType(base_type=value_spec_t.BaseType.OPTIONAL, param_count=1),
27
27
  "String": MappedType(base_type=value_spec_t.BaseType.STRING),
28
- "Union": MappedType(base_type=value_spec_t.BaseType.UNION, variable_param_count=True),
28
+ "Union": MappedType(
29
+ base_type=value_spec_t.BaseType.UNION, variable_param_count=True
30
+ ),
29
31
  # not part of type_spec's types now
30
32
  "Symbol": MappedType(base_type=value_spec_t.BaseType.SYMBOL),
31
33
  "Any": MappedType(base_type=value_spec_t.BaseType.ANY),
@@ -160,7 +160,9 @@ def _emit_function(function: value_spec_t.Function, indent: str) -> str:
160
160
  sub_indent = indent + INDENT
161
161
  out.write(f"{_function_symbol_name(function)} = value_spec_t.Function(\n")
162
162
  out.write(f"{sub_indent}name={encode_common_string(function.name)},\n")
163
- out.write(f"{sub_indent}description={encode_common_string(function.description)},\n")
163
+ out.write(
164
+ f"{sub_indent}description={encode_common_string(function.description)},\n"
165
+ )
164
166
  out.write(f"{sub_indent}brief={encode_common_string(function.brief)},\n")
165
167
  out.write(
166
168
  f"{sub_indent}return_value={_emit_function_return(function.return_value, sub_indent)},\n"
@@ -184,16 +186,22 @@ def _emit_argument(argument: value_spec_t.FunctionArgument, indent: str) -> str:
184
186
  out.write("value_spec_t.FunctionArgument(\n")
185
187
  out.write(f"{sub_indent}ref_name={encode_common_string(argument.ref_name)},\n")
186
188
  out.write(f"{sub_indent}name={encode_common_string(argument.name)},\n")
187
- out.write(f"{sub_indent}description={encode_common_string(argument.description)},\n")
189
+ out.write(
190
+ f"{sub_indent}description={encode_common_string(argument.description)},\n"
191
+ )
188
192
  out.write(f"{sub_indent}pass_null={str(argument.pass_null)},\n")
189
- out.write(f"{sub_indent}extant=value_spec_t.ArgumentExtant.{argument.extant.name},\n")
193
+ out.write(
194
+ f"{sub_indent}extant=value_spec_t.ArgumentExtant.{argument.extant.name},\n"
195
+ )
190
196
  out.write(f"{sub_indent}type={_emit_type(argument.type, sub_indent)},\n")
191
197
  out.write(f"{indent})")
192
198
 
193
199
  return out.getvalue()
194
200
 
195
201
 
196
- def _emit_function_return(return_value: value_spec_t.FunctionReturn, indent: str) -> str:
202
+ def _emit_function_return(
203
+ return_value: value_spec_t.FunctionReturn, indent: str
204
+ ) -> str:
197
205
  out = io.StringIO()
198
206
 
199
207
  sub_indent = indent + INDENT
@@ -56,7 +56,9 @@ HTTPRequest = HTTPPostRequest | HTTPGetRequest
56
56
 
57
57
  @dataclass(kw_only=True)
58
58
  class ClientConfig(ClientConfigOptions):
59
- transform_request: typing.Callable[[requests.Request], requests.Request] | None = None
59
+ transform_request: typing.Callable[[requests.Request], requests.Request] | None = (
60
+ None
61
+ )
60
62
  job_logger: typing.Optional[JobLogger] = None
61
63
 
62
64
 
@@ -100,7 +100,9 @@ class FileUploader:
100
100
  name=file_bytes.name, file_id=int(location.path.split("/")[-1])
101
101
  )
102
102
 
103
- def upload_files(self: Self, *, file_uploads: list[FileUpload]) -> list[UploadedFile]:
103
+ def upload_files(
104
+ self: Self, *, file_uploads: list[FileUpload]
105
+ ) -> list[UploadedFile]:
104
106
  return [
105
107
  asyncio.run(self._upload_file(file_upload)) for file_upload in file_uploads
106
108
  ]
@@ -23,7 +23,8 @@ def _construct_auth_details(profile_meta: ProfileMetadata) -> AuthDetailsAll:
23
23
  profile_meta.auth_retrieval.api_id_secret, profile_metadata=profile_meta
24
24
  )
25
25
  api_key = retrieve_secret(
26
- profile_meta.auth_retrieval.api_key_secret, profile_metadata=profile_meta
26
+ profile_meta.auth_retrieval.api_key_secret,
27
+ profile_metadata=profile_meta,
27
28
  )
28
29
 
29
30
  return AuthDetailsApiKey(api_id=api_id, api_secret_key=api_key)
@@ -68,7 +68,8 @@ def _filter_by_file_extension(
68
68
  file
69
69
  for file in files
70
70
  if file.filename is not None
71
- and os.path.splitext(file.filename)[-1] in remote_directory.valid_file_extensions
71
+ and os.path.splitext(file.filename)[-1]
72
+ in remote_directory.valid_file_extensions
72
73
  ]
73
74
 
74
75
 
@@ -102,7 +103,7 @@ def _pull_remote_directory_data(
102
103
  files_to_pull = _filter_by_max_files(remote_directory, files_to_pull)
103
104
 
104
105
  logger.log_info(
105
- f"Accessing SFTP directory: {remote_directory.src_path} and pulling files: {', '.join([f.filename for f in files_to_pull if f.filename is not None])}",
106
+ f"Accessing SFTP directory: {remote_directory.src_path} and pulling files: {", ".join([f.filename for f in files_to_pull if f.filename is not None])}",
106
107
  )
107
108
  return filesystem_session.download_files(files_to_pull)
108
109
 
@@ -210,9 +211,7 @@ class GenericUploadJob(Job):
210
211
  assert (
211
212
  self.data_source.region_name is not None
212
213
  ), "region_name must be specified for cloud_provider OVH"
213
- endpoint_url = (
214
- f"https://s3.{self.data_source.region_name}.cloud.ovh.net"
215
- )
214
+ endpoint_url = f"https://s3.{self.data_source.region_name}.cloud.ovh.net"
216
215
  else:
217
216
  endpoint_url = self.data_source.endpoint_url
218
217
 
@@ -248,7 +247,8 @@ class GenericUploadJob(Job):
248
247
  for file_data in filtered_file_data:
249
248
  files_to_upload.append(
250
249
  DataFileUpload(
251
- data=io.BytesIO(file_data.file_data), name=file_data.filename
250
+ data=io.BytesIO(file_data.file_data),
251
+ name=file_data.filename,
252
252
  )
253
253
  )
254
254
  if not self.upload_strategy.skip_moving_files:
@@ -256,7 +256,9 @@ class GenericUploadJob(Job):
256
256
  filesystem_session=filesystem_session,
257
257
  remote_directory_scope=remote_directory,
258
258
  success_file_paths=[
259
- file.filepath if file.filepath is not None else file.filename
259
+ file.filepath
260
+ if file.filepath is not None
261
+ else file.filename
260
262
  for file in filtered_file_data
261
263
  ],
262
264
  # IMPROVE: use triggers/webhooks to mark failed files as failed
@@ -267,12 +269,12 @@ class GenericUploadJob(Job):
267
269
 
268
270
  file_ids = [file.file_id for file in uploaded_files]
269
271
 
270
- for material_family_key in self.upload_strategy.material_family_keys:
272
+ for destination in self.upload_strategy.destinations:
271
273
  for file_id in file_ids:
272
274
  batch_processor.invoke_uploader(
273
275
  file_id=file_id,
274
276
  uploader_key=self.upload_strategy.uploader_key,
275
- material_family_key=material_family_key,
277
+ destination=destination,
276
278
  )
277
279
 
278
280
  return JobResult(success=True)
@@ -75,9 +75,7 @@ def retrieve_secret(
75
75
 
76
76
  match secret_retrieval:
77
77
  case SecretRetrievalEnv():
78
- env_name = (
79
- f"UNC_{profile_metadata.name.upper()}_{secret_retrieval.env_key.upper()}"
80
- )
78
+ env_name = f"UNC_{profile_metadata.name.upper()}_{secret_retrieval.env_key.upper()}"
81
79
  secret = os.environ.get(env_name)
82
80
  if secret is None:
83
81
  raise SecretRetrievalError(
@@ -72,13 +72,21 @@ class Logger:
72
72
  log_file.flush()
73
73
 
74
74
  def log_info(self, message: str, *, attributes: Attributes | None = None) -> None:
75
- self._emit_log(message=message, severity=LogSeverity.INFO, attributes=attributes)
75
+ self._emit_log(
76
+ message=message, severity=LogSeverity.INFO, attributes=attributes
77
+ )
76
78
 
77
- def log_warning(self, message: str, *, attributes: Attributes | None = None) -> None:
78
- self._emit_log(message=message, severity=LogSeverity.WARN, attributes=attributes)
79
+ def log_warning(
80
+ self, message: str, *, attributes: Attributes | None = None
81
+ ) -> None:
82
+ self._emit_log(
83
+ message=message, severity=LogSeverity.WARN, attributes=attributes
84
+ )
79
85
 
80
86
  def log_error(self, message: str, *, attributes: Attributes | None = None) -> None:
81
- self._emit_log(message=message, severity=LogSeverity.ERROR, attributes=attributes)
87
+ self._emit_log(
88
+ message=message, severity=LogSeverity.ERROR, attributes=attributes
89
+ )
82
90
 
83
91
 
84
92
  class JobLogger(Logger):
@@ -12,6 +12,7 @@ from . import async_batch_t as async_batch_t
12
12
  from . import base_t as base_t
13
13
  from . import calculations_t as calculations_t
14
14
  from . import chemical_structure_t as chemical_structure_t
15
+ from .api.recipes import clear_recipe_outputs as clear_recipe_outputs_t
15
16
  from . import client_config_t as client_config_t
16
17
  from .api.chemical import convert_chemical_formats as convert_chemical_formats_t
17
18
  from .api.entity import create_entities as create_entities_t
@@ -109,6 +110,7 @@ __all__: list[str] = [
109
110
  "base_t",
110
111
  "calculations_t",
111
112
  "chemical_structure_t",
113
+ "clear_recipe_outputs_t",
112
114
  "client_config_t",
113
115
  "convert_chemical_formats_t",
114
116
  "create_entities_t",
@@ -34,7 +34,7 @@ class EntityToCreate:
34
34
  @dataclasses.dataclass(kw_only=True)
35
35
  class Arguments:
36
36
  definition_id: base_t.ObjectId
37
- entity_type: typing.Union[typing.Literal[entity_t.EntityType.LAB_REQUEST], typing.Literal[entity_t.EntityType.APPROVAL], typing.Literal[entity_t.EntityType.CUSTOM_ENTITY], typing.Literal[entity_t.EntityType.INVENTORY_AMOUNT], typing.Literal[entity_t.EntityType.TASK], typing.Literal[entity_t.EntityType.PROJECT], typing.Literal[entity_t.EntityType.EQUIPMENT], typing.Literal[entity_t.EntityType.INV_LOCAL_LOCATIONS], typing.Literal[entity_t.EntityType.FIELD_OPTION_SET], typing.Literal[entity_t.EntityType.WEBHOOK], typing.Literal[entity_t.EntityType.SPECS], typing.Literal[entity_t.EntityType.GOAL]]
37
+ entity_type: typing.Union[typing.Literal[entity_t.EntityType.LAB_REQUEST], typing.Literal[entity_t.EntityType.APPROVAL], typing.Literal[entity_t.EntityType.CUSTOM_ENTITY], typing.Literal[entity_t.EntityType.INVENTORY_AMOUNT], typing.Literal[entity_t.EntityType.TASK], typing.Literal[entity_t.EntityType.PROJECT], typing.Literal[entity_t.EntityType.EQUIPMENT], typing.Literal[entity_t.EntityType.INV_LOCAL_LOCATIONS], typing.Literal[entity_t.EntityType.FIELD_OPTION_SET], typing.Literal[entity_t.EntityType.WEBHOOK], typing.Literal[entity_t.EntityType.SPECS], typing.Literal[entity_t.EntityType.GOAL], typing.Literal[entity_t.EntityType.INGREDIENT_TAG_MAP], typing.Literal[entity_t.EntityType.INGREDIENT_TAG]]
38
38
  entities_to_create: list[EntityToCreate]
39
39
 
40
40
 
@@ -40,7 +40,7 @@ class EntityFieldInitialValue:
40
40
  @dataclasses.dataclass(kw_only=True)
41
41
  class Arguments:
42
42
  definition_id: base_t.ObjectId
43
- entity_type: typing.Union[typing.Literal[entity_t.EntityType.LAB_REQUEST], typing.Literal[entity_t.EntityType.APPROVAL], typing.Literal[entity_t.EntityType.CUSTOM_ENTITY], typing.Literal[entity_t.EntityType.INVENTORY_AMOUNT], typing.Literal[entity_t.EntityType.TASK], typing.Literal[entity_t.EntityType.PROJECT], typing.Literal[entity_t.EntityType.EQUIPMENT], typing.Literal[entity_t.EntityType.INV_LOCAL_LOCATIONS], typing.Literal[entity_t.EntityType.FIELD_OPTION_SET], typing.Literal[entity_t.EntityType.WEBHOOK], typing.Literal[entity_t.EntityType.SPECS], typing.Literal[entity_t.EntityType.GOAL]]
43
+ entity_type: typing.Union[typing.Literal[entity_t.EntityType.LAB_REQUEST], typing.Literal[entity_t.EntityType.APPROVAL], typing.Literal[entity_t.EntityType.CUSTOM_ENTITY], typing.Literal[entity_t.EntityType.INVENTORY_AMOUNT], typing.Literal[entity_t.EntityType.TASK], typing.Literal[entity_t.EntityType.PROJECT], typing.Literal[entity_t.EntityType.EQUIPMENT], typing.Literal[entity_t.EntityType.INV_LOCAL_LOCATIONS], typing.Literal[entity_t.EntityType.FIELD_OPTION_SET], typing.Literal[entity_t.EntityType.WEBHOOK], typing.Literal[entity_t.EntityType.SPECS], typing.Literal[entity_t.EntityType.GOAL], typing.Literal[entity_t.EntityType.INGREDIENT_TAG_MAP], typing.Literal[entity_t.EntityType.INGREDIENT_TAG]]
44
44
  field_values: typing.Optional[typing.Optional[list[field_values_t.FieldRefNameValue]]] = None
45
45
 
46
46
 
@@ -0,0 +1,35 @@
1
+ # DO NOT MODIFY -- This file is generated by type_spec
2
+ # flake8: noqa: F821
3
+ # ruff: noqa: E402 Q003
4
+ # fmt: off
5
+ # isort: skip_file
6
+ from __future__ import annotations
7
+ import typing # noqa: F401
8
+ import datetime # noqa: F401
9
+ from decimal import Decimal # noqa: F401
10
+ import dataclasses
11
+ from ... import async_batch_t
12
+ from ... import identifier_t
13
+
14
+ __all__: list[str] = [
15
+ "Arguments",
16
+ "Data",
17
+ "ENDPOINT_METHOD",
18
+ "ENDPOINT_PATH",
19
+ ]
20
+
21
+ ENDPOINT_METHOD = "POST"
22
+ ENDPOINT_PATH = "api/external/recipes/clear_recipe_outputs"
23
+
24
+
25
+ # DO NOT MODIFY -- This file is generated by type_spec
26
+ @dataclasses.dataclass(kw_only=True)
27
+ class Arguments:
28
+ recipe_key: identifier_t.IdentifierKey
29
+
30
+
31
+ # DO NOT MODIFY -- This file is generated by type_spec
32
+ @dataclasses.dataclass(kw_only=True)
33
+ class Data(async_batch_t.AsyncBatchActionReturn):
34
+ pass
35
+ # DO NOT MODIFY -- This file is generated by type_spec
@@ -10,6 +10,7 @@ from decimal import Decimal # noqa: F401
10
10
  import dataclasses
11
11
  from ... import async_batch_t
12
12
  from ... import base_t
13
+ from ... import generic_upload_t
13
14
  from ... import identifier_t
14
15
 
15
16
  __all__: list[str] = [
@@ -28,8 +29,7 @@ ENDPOINT_PATH = "api/external/uploader/invoke_uploader"
28
29
  class Arguments:
29
30
  file_id: base_t.ObjectId
30
31
  uploader_key: identifier_t.IdentifierKey
31
- material_family_key: identifier_t.IdentifierKey
32
- recipe_key: typing.Optional[identifier_t.IdentifierKey] = None
32
+ destination: generic_upload_t.UploadDestination
33
33
 
34
34
 
35
35
  # DO NOT MODIFY -- This file is generated by type_spec
@@ -7,5 +7,6 @@
7
7
  from .async_batch_t import AsyncBatchRequestPath as AsyncBatchRequestPath
8
8
  from .async_batch_t import AsyncBatchRequest as AsyncBatchRequest
9
9
  from .async_batch_t import AsyncBatchActionReturn as AsyncBatchActionReturn
10
+ from .async_batch_t import SavedAsyncBatchActionReturn as SavedAsyncBatchActionReturn
10
11
  from .async_batch_t import QueuedAsyncBatchRequest as QueuedAsyncBatchRequest
11
12
  # DO NOT MODIFY -- This file is generated by type_spec
@@ -9,10 +9,13 @@ import typing # noqa: F401
9
9
  import datetime # noqa: F401
10
10
  from decimal import Decimal # noqa: F401
11
11
  import uncountable.types.api.equipment.associate_equipment_input as associate_equipment_input_t
12
+ import uncountable.types.api.recipes.associate_recipe_as_input as associate_recipe_as_input_t
12
13
  from uncountable.types import async_batch_t
13
14
  from uncountable.types import base_t
15
+ import uncountable.types.api.recipes.clear_recipe_outputs as clear_recipe_outputs_t
14
16
  import uncountable.types.api.recipes.create_recipe as create_recipe_t
15
17
  import uncountable.types.api.recipes.edit_recipe_inputs as edit_recipe_inputs_t
18
+ from uncountable.types import generic_upload_t
16
19
  from uncountable.types import identifier_t
17
20
  import uncountable.types.api.uploader.invoke_uploader as invoke_uploader_t
18
21
  from uncountable.types import recipe_identifiers_t
@@ -68,6 +71,76 @@ class AsyncBatchProcessorBase(ABC):
68
71
  batch_reference=req.batch_reference,
69
72
  )
70
73
 
74
+ def associate_recipe_as_input(
75
+ self,
76
+ *,
77
+ recipe_key: identifier_t.IdentifierKey,
78
+ input_key: typing.Optional[identifier_t.IdentifierKey] = None,
79
+ show_in_listings: typing.Optional[bool] = None,
80
+ depends_on: typing.Optional[list[str]] = None,
81
+ ) -> async_batch_t.QueuedAsyncBatchRequest:
82
+ """Create or return the input association for a recipe
83
+
84
+ :param recipe_key: Identifier for the recipe
85
+ :param input_key: Identifier for an input to use for the association. Optionally supplied. If not supplied, one is created
86
+ :param show_in_listings: After associating the input should it be present in listings
87
+ :param depends_on: A list of batch reference keys to process before processing this request
88
+ """
89
+ args = associate_recipe_as_input_t.Arguments(
90
+ recipe_key=recipe_key,
91
+ input_key=input_key,
92
+ show_in_listings=show_in_listings,
93
+ )
94
+ json_data = serialize_for_api(args)
95
+
96
+ batch_reference = str(uuid.uuid4())
97
+
98
+ req = async_batch_t.AsyncBatchRequest(
99
+ path=async_batch_t.AsyncBatchRequestPath.ASSOCIATE_RECIPE_AS_INPUT,
100
+ data=json_data,
101
+ depends_on=depends_on,
102
+ batch_reference=batch_reference,
103
+ )
104
+
105
+ self._enqueue(req)
106
+
107
+ return async_batch_t.QueuedAsyncBatchRequest(
108
+ path=req.path,
109
+ batch_reference=req.batch_reference,
110
+ )
111
+
112
+ def clear_recipe_outputs(
113
+ self,
114
+ *,
115
+ recipe_key: identifier_t.IdentifierKey,
116
+ depends_on: typing.Optional[list[str]] = None,
117
+ ) -> async_batch_t.QueuedAsyncBatchRequest:
118
+ """Clears all output values & output metadata for a given recipe
119
+
120
+ :param recipe_key: The identifier of the recipe
121
+ :param depends_on: A list of batch reference keys to process before processing this request
122
+ """
123
+ args = clear_recipe_outputs_t.Arguments(
124
+ recipe_key=recipe_key,
125
+ )
126
+ json_data = serialize_for_api(args)
127
+
128
+ batch_reference = str(uuid.uuid4())
129
+
130
+ req = async_batch_t.AsyncBatchRequest(
131
+ path=async_batch_t.AsyncBatchRequestPath.CLEAR_RECIPE_OUTPUTS,
132
+ data=json_data,
133
+ depends_on=depends_on,
134
+ batch_reference=batch_reference,
135
+ )
136
+
137
+ self._enqueue(req)
138
+
139
+ return async_batch_t.QueuedAsyncBatchRequest(
140
+ path=req.path,
141
+ batch_reference=req.batch_reference,
142
+ )
143
+
71
144
  def create_recipe(
72
145
  self,
73
146
  *,
@@ -162,8 +235,7 @@ class AsyncBatchProcessorBase(ABC):
162
235
  *,
163
236
  file_id: base_t.ObjectId,
164
237
  uploader_key: identifier_t.IdentifierKey,
165
- material_family_key: identifier_t.IdentifierKey,
166
- recipe_key: typing.Optional[identifier_t.IdentifierKey] = None,
238
+ destination: generic_upload_t.UploadDestination,
167
239
  depends_on: typing.Optional[list[str]] = None,
168
240
  ) -> async_batch_t.QueuedAsyncBatchRequest:
169
241
  """Runs a file through an uploader.
@@ -173,8 +245,7 @@ class AsyncBatchProcessorBase(ABC):
173
245
  args = invoke_uploader_t.Arguments(
174
246
  file_id=file_id,
175
247
  uploader_key=uploader_key,
176
- material_family_key=material_family_key,
177
- recipe_key=recipe_key,
248
+ destination=destination,
178
249
  )
179
250
  json_data = serialize_for_api(args)
180
251
 
@@ -17,6 +17,7 @@ __all__: list[str] = [
17
17
  "AsyncBatchRequest",
18
18
  "AsyncBatchRequestPath",
19
19
  "QueuedAsyncBatchRequest",
20
+ "SavedAsyncBatchActionReturn",
20
21
  ]
21
22
 
22
23
 
@@ -32,6 +33,7 @@ class AsyncBatchRequestPath(StrEnum):
32
33
  ASSOCIATE_EQUIPMENT_INPUT = "equipment/associate_equipment_input"
33
34
  INVOKE_UPLOADER = "uploader/invoke_uploader"
34
35
  ASSOCIATE_RECIPE_AS_INPUT = "recipes/associate_recipe_as_input"
36
+ CLEAR_RECIPE_OUTPUTS = "recipes/clear_recipe_outputs"
35
37
 
36
38
 
37
39
  # DO NOT MODIFY -- This file is generated by type_spec
@@ -53,6 +55,13 @@ class AsyncBatchActionReturn:
53
55
  result_id: typing.Optional[base_t.ObjectId] = None
54
56
 
55
57
 
58
+ # DO NOT MODIFY -- This file is generated by type_spec
59
+ @dataclasses.dataclass(kw_only=True)
60
+ class SavedAsyncBatchActionReturn:
61
+ identifier: str
62
+ result_data: AsyncBatchActionReturn
63
+
64
+
56
65
  # DO NOT MODIFY -- This file is generated by type_spec
57
66
  @dataclasses.dataclass(kw_only=True, frozen=True, eq=True)
58
67
  class QueuedAsyncBatchRequest:
@@ -16,6 +16,7 @@ import uncountable.types.api.recipes.associate_recipe_as_input as associate_reci
16
16
  import uncountable.types.api.recipes.associate_recipe_as_lot as associate_recipe_as_lot_t
17
17
  from uncountable.types import async_batch_t
18
18
  from uncountable.types import base_t
19
+ import uncountable.types.api.recipes.clear_recipe_outputs as clear_recipe_outputs_t
19
20
  import uncountable.types.api.chemical.convert_chemical_formats as convert_chemical_formats_t
20
21
  import uncountable.types.api.entity.create_entities as create_entities_t
21
22
  import uncountable.types.api.entity.create_entity as create_entity_t
@@ -29,6 +30,7 @@ from uncountable.types import entity_t
29
30
  import uncountable.types.api.batch.execute_batch as execute_batch_t
30
31
  import uncountable.types.api.batch.execute_batch_load_async as execute_batch_load_async_t
31
32
  from uncountable.types import field_values_t
33
+ from uncountable.types import generic_upload_t
32
34
  import uncountable.types.api.recipes.get_curve as get_curve_t
33
35
  import uncountable.types.api.entity.get_entities_data as get_entities_data_t
34
36
  import uncountable.types.api.inputs.get_input_data as get_input_data_t
@@ -213,6 +215,25 @@ class ClientMethods(ABC):
213
215
  )
214
216
  return self.do_request(api_request=api_request, return_type=associate_recipe_as_lot_t.Data)
215
217
 
218
+ def clear_recipe_outputs(
219
+ self,
220
+ *,
221
+ recipe_key: identifier_t.IdentifierKey,
222
+ ) -> clear_recipe_outputs_t.Data:
223
+ """Clears all output values & output metadata for a given recipe
224
+
225
+ :param recipe_key: The identifier of the recipe
226
+ """
227
+ args = clear_recipe_outputs_t.Arguments(
228
+ recipe_key=recipe_key,
229
+ )
230
+ api_request = APIRequest(
231
+ method=clear_recipe_outputs_t.ENDPOINT_METHOD,
232
+ endpoint=clear_recipe_outputs_t.ENDPOINT_PATH,
233
+ args=args,
234
+ )
235
+ return self.do_request(api_request=api_request, return_type=clear_recipe_outputs_t.Data)
236
+
216
237
  def convert_chemical_formats(
217
238
  self,
218
239
  *,
@@ -235,7 +256,7 @@ class ClientMethods(ABC):
235
256
  self,
236
257
  *,
237
258
  definition_id: base_t.ObjectId,
238
- entity_type: typing.Union[typing.Literal[entity_t.EntityType.LAB_REQUEST], typing.Literal[entity_t.EntityType.APPROVAL], typing.Literal[entity_t.EntityType.CUSTOM_ENTITY], typing.Literal[entity_t.EntityType.INVENTORY_AMOUNT], typing.Literal[entity_t.EntityType.TASK], typing.Literal[entity_t.EntityType.PROJECT], typing.Literal[entity_t.EntityType.EQUIPMENT], typing.Literal[entity_t.EntityType.INV_LOCAL_LOCATIONS], typing.Literal[entity_t.EntityType.FIELD_OPTION_SET], typing.Literal[entity_t.EntityType.WEBHOOK], typing.Literal[entity_t.EntityType.SPECS], typing.Literal[entity_t.EntityType.GOAL]],
259
+ entity_type: typing.Union[typing.Literal[entity_t.EntityType.LAB_REQUEST], typing.Literal[entity_t.EntityType.APPROVAL], typing.Literal[entity_t.EntityType.CUSTOM_ENTITY], typing.Literal[entity_t.EntityType.INVENTORY_AMOUNT], typing.Literal[entity_t.EntityType.TASK], typing.Literal[entity_t.EntityType.PROJECT], typing.Literal[entity_t.EntityType.EQUIPMENT], typing.Literal[entity_t.EntityType.INV_LOCAL_LOCATIONS], typing.Literal[entity_t.EntityType.FIELD_OPTION_SET], typing.Literal[entity_t.EntityType.WEBHOOK], typing.Literal[entity_t.EntityType.SPECS], typing.Literal[entity_t.EntityType.GOAL], typing.Literal[entity_t.EntityType.INGREDIENT_TAG_MAP], typing.Literal[entity_t.EntityType.INGREDIENT_TAG]],
239
260
  entities_to_create: list[create_entities_t.EntityToCreate],
240
261
  ) -> create_entities_t.Data:
241
262
  """Creates new Uncountable entities
@@ -260,7 +281,7 @@ class ClientMethods(ABC):
260
281
  self,
261
282
  *,
262
283
  definition_id: base_t.ObjectId,
263
- entity_type: typing.Union[typing.Literal[entity_t.EntityType.LAB_REQUEST], typing.Literal[entity_t.EntityType.APPROVAL], typing.Literal[entity_t.EntityType.CUSTOM_ENTITY], typing.Literal[entity_t.EntityType.INVENTORY_AMOUNT], typing.Literal[entity_t.EntityType.TASK], typing.Literal[entity_t.EntityType.PROJECT], typing.Literal[entity_t.EntityType.EQUIPMENT], typing.Literal[entity_t.EntityType.INV_LOCAL_LOCATIONS], typing.Literal[entity_t.EntityType.FIELD_OPTION_SET], typing.Literal[entity_t.EntityType.WEBHOOK], typing.Literal[entity_t.EntityType.SPECS], typing.Literal[entity_t.EntityType.GOAL]],
284
+ entity_type: typing.Union[typing.Literal[entity_t.EntityType.LAB_REQUEST], typing.Literal[entity_t.EntityType.APPROVAL], typing.Literal[entity_t.EntityType.CUSTOM_ENTITY], typing.Literal[entity_t.EntityType.INVENTORY_AMOUNT], typing.Literal[entity_t.EntityType.TASK], typing.Literal[entity_t.EntityType.PROJECT], typing.Literal[entity_t.EntityType.EQUIPMENT], typing.Literal[entity_t.EntityType.INV_LOCAL_LOCATIONS], typing.Literal[entity_t.EntityType.FIELD_OPTION_SET], typing.Literal[entity_t.EntityType.WEBHOOK], typing.Literal[entity_t.EntityType.SPECS], typing.Literal[entity_t.EntityType.GOAL], typing.Literal[entity_t.EntityType.INGREDIENT_TAG_MAP], typing.Literal[entity_t.EntityType.INGREDIENT_TAG]],
264
285
  field_values: typing.Optional[typing.Optional[list[field_values_t.FieldRefNameValue]]] = None,
265
286
  ) -> create_entity_t.Data:
266
287
  """Creates a new Uncountable entity
@@ -856,8 +877,7 @@ class ClientMethods(ABC):
856
877
  *,
857
878
  file_id: base_t.ObjectId,
858
879
  uploader_key: identifier_t.IdentifierKey,
859
- material_family_key: identifier_t.IdentifierKey,
860
- recipe_key: typing.Optional[identifier_t.IdentifierKey] = None,
880
+ destination: generic_upload_t.UploadDestination,
861
881
  ) -> invoke_uploader_t.Data:
862
882
  """Runs a file through an uploader.
863
883
 
@@ -865,8 +885,7 @@ class ClientMethods(ABC):
865
885
  args = invoke_uploader_t.Arguments(
866
886
  file_id=file_id,
867
887
  uploader_key=uploader_key,
868
- material_family_key=material_family_key,
869
- recipe_key=recipe_key,
888
+ destination=destination,
870
889
  )
871
890
  api_request = APIRequest(
872
891
  method=invoke_uploader_t.ENDPOINT_METHOD,
@@ -71,6 +71,7 @@ __all__: list[str] = [
71
71
  "ingredient_lot": "Ingredient Lot",
72
72
  "ingredient_role": "Ingredient Role",
73
73
  "ingredient_tag": "Ingredient Subcategory",
74
+ "ingredient_tag_map": "Ingredient Tag Map",
74
75
  "input_group": "Input Group",
75
76
  "inv_local_locations": "Inventory Location",
76
77
  "inventory_amount": "Inventory Amount",
@@ -221,6 +222,7 @@ class EntityType(StrEnum):
221
222
  INGREDIENT_LOT = "ingredient_lot"
222
223
  INGREDIENT_ROLE = "ingredient_role"
223
224
  INGREDIENT_TAG = "ingredient_tag"
225
+ INGREDIENT_TAG_MAP = "ingredient_tag_map"
224
226
  INPUT_GROUP = "input_group"
225
227
  INV_LOCAL_LOCATIONS = "inv_local_locations"
226
228
  INVENTORY_AMOUNT = "inventory_amount"
@@ -5,5 +5,11 @@
5
5
  # DO NOT MODIFY -- This file is generated by type_spec
6
6
  # Kept only for SDK backwards compatibility
7
7
  from .generic_upload_t import GenericRemoteDirectoryScope as GenericRemoteDirectoryScope
8
+ from .generic_upload_t import UploadDestinationType as UploadDestinationType
9
+ from .generic_upload_t import UploadDestinationBase as UploadDestinationBase
10
+ from .generic_upload_t import UploadDestinationProject as UploadDestinationProject
11
+ from .generic_upload_t import UploadDestinationMaterialFamily as UploadDestinationMaterialFamily
12
+ from .generic_upload_t import UploadDestinationRecipe as UploadDestinationRecipe
13
+ from .generic_upload_t import UploadDestination as UploadDestination
8
14
  from .generic_upload_t import GenericUploadStrategy as GenericUploadStrategy
9
15
  # DO NOT MODIFY -- This file is generated by type_spec