UncountablePythonSDK 0.0.115__py3-none-any.whl → 0.0.142.dev0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of UncountablePythonSDK might be problematic. Click here for more details.

Files changed (119) hide show
  1. docs/conf.py +52 -5
  2. docs/index.md +107 -4
  3. docs/integration_examples/create_ingredient.md +43 -0
  4. docs/integration_examples/create_output.md +56 -0
  5. docs/integration_examples/index.md +6 -0
  6. docs/justfile +1 -1
  7. docs/requirements.txt +3 -2
  8. examples/basic_auth.py +7 -0
  9. examples/integration-server/jobs/materials_auto/example_cron.py +3 -0
  10. examples/integration-server/jobs/materials_auto/example_http.py +19 -7
  11. examples/integration-server/jobs/materials_auto/example_instrument.py +100 -0
  12. examples/integration-server/jobs/materials_auto/example_parse.py +140 -0
  13. examples/integration-server/jobs/materials_auto/example_predictions.py +61 -0
  14. examples/integration-server/jobs/materials_auto/example_runsheet_wh.py +57 -16
  15. examples/integration-server/jobs/materials_auto/profile.yaml +27 -0
  16. examples/integration-server/pyproject.toml +4 -4
  17. examples/oauth.py +7 -0
  18. pkgs/argument_parser/__init__.py +1 -0
  19. pkgs/argument_parser/_is_namedtuple.py +3 -0
  20. pkgs/argument_parser/argument_parser.py +22 -3
  21. pkgs/serialization_util/serialization_helpers.py +3 -1
  22. pkgs/type_spec/builder.py +66 -19
  23. pkgs/type_spec/builder_types.py +9 -0
  24. pkgs/type_spec/config.py +26 -5
  25. pkgs/type_spec/cross_output_links.py +10 -16
  26. pkgs/type_spec/emit_open_api.py +72 -22
  27. pkgs/type_spec/emit_open_api_util.py +1 -0
  28. pkgs/type_spec/emit_python.py +76 -12
  29. pkgs/type_spec/emit_typescript.py +48 -32
  30. pkgs/type_spec/emit_typescript_util.py +44 -6
  31. pkgs/type_spec/load_types.py +2 -2
  32. pkgs/type_spec/open_api_util.py +16 -1
  33. pkgs/type_spec/parts/base.ts.prepart +4 -0
  34. pkgs/type_spec/type_info/emit_type_info.py +37 -4
  35. pkgs/type_spec/ui_entry_actions/generate_ui_entry_actions.py +1 -0
  36. pkgs/type_spec/value_spec/__main__.py +2 -2
  37. pkgs/type_spec/value_spec/emit_python.py +6 -1
  38. uncountable/core/client.py +10 -3
  39. uncountable/integration/cli.py +175 -23
  40. uncountable/integration/executors/executors.py +1 -2
  41. uncountable/integration/executors/generic_upload_executor.py +1 -1
  42. uncountable/integration/http_server/types.py +3 -1
  43. uncountable/integration/job.py +35 -3
  44. uncountable/integration/queue_runner/command_server/__init__.py +4 -0
  45. uncountable/integration/queue_runner/command_server/command_client.py +89 -0
  46. uncountable/integration/queue_runner/command_server/command_server.py +117 -5
  47. uncountable/integration/queue_runner/command_server/constants.py +4 -0
  48. uncountable/integration/queue_runner/command_server/protocol/command_server.proto +51 -0
  49. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.py +34 -11
  50. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.pyi +102 -1
  51. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2_grpc.py +180 -0
  52. uncountable/integration/queue_runner/command_server/types.py +44 -1
  53. uncountable/integration/queue_runner/datastore/datastore_sqlite.py +189 -8
  54. uncountable/integration/queue_runner/datastore/interface.py +13 -0
  55. uncountable/integration/queue_runner/datastore/model.py +8 -1
  56. uncountable/integration/queue_runner/job_scheduler.py +85 -21
  57. uncountable/integration/queue_runner/queue_runner.py +10 -2
  58. uncountable/integration/queue_runner/types.py +2 -0
  59. uncountable/integration/queue_runner/worker.py +28 -29
  60. uncountable/integration/scheduler.py +121 -23
  61. uncountable/integration/server.py +36 -6
  62. uncountable/integration/telemetry.py +129 -8
  63. uncountable/integration/webhook_server/entrypoint.py +2 -0
  64. uncountable/types/__init__.py +38 -0
  65. uncountable/types/api/entity/create_or_update_entity.py +1 -0
  66. uncountable/types/api/entity/export_entities.py +13 -0
  67. uncountable/types/api/entity/list_aggregate.py +79 -0
  68. uncountable/types/api/entity/list_entities.py +25 -0
  69. uncountable/types/api/entity/set_barcode.py +43 -0
  70. uncountable/types/api/entity/transition_entity_phase.py +2 -1
  71. uncountable/types/api/files/download_file.py +15 -1
  72. uncountable/types/api/integrations/__init__.py +1 -0
  73. uncountable/types/api/integrations/publish_realtime_data.py +41 -0
  74. uncountable/types/api/integrations/push_notification.py +49 -0
  75. uncountable/types/api/integrations/register_sockets_token.py +41 -0
  76. uncountable/types/api/listing/__init__.py +1 -0
  77. uncountable/types/api/listing/fetch_listing.py +57 -0
  78. uncountable/types/api/notebooks/__init__.py +1 -0
  79. uncountable/types/api/notebooks/add_notebook_content.py +119 -0
  80. uncountable/types/api/outputs/get_output_organization.py +173 -0
  81. uncountable/types/api/recipes/edit_recipe_inputs.py +1 -1
  82. uncountable/types/api/recipes/get_recipe_output_metadata.py +2 -2
  83. uncountable/types/api/recipes/get_recipes_data.py +29 -0
  84. uncountable/types/api/recipes/lock_recipes.py +2 -1
  85. uncountable/types/api/recipes/set_recipe_total.py +59 -0
  86. uncountable/types/api/recipes/unlock_recipes.py +2 -1
  87. uncountable/types/api/runsheet/export_default_runsheet.py +44 -0
  88. uncountable/types/api/uploader/complete_async_parse.py +46 -0
  89. uncountable/types/api/user/__init__.py +1 -0
  90. uncountable/types/api/user/get_current_user_info.py +40 -0
  91. uncountable/types/async_batch_processor.py +266 -0
  92. uncountable/types/async_batch_t.py +5 -0
  93. uncountable/types/client_base.py +432 -2
  94. uncountable/types/client_config.py +1 -0
  95. uncountable/types/client_config_t.py +10 -0
  96. uncountable/types/entity_t.py +9 -1
  97. uncountable/types/exports_t.py +1 -0
  98. uncountable/types/integration_server_t.py +2 -0
  99. uncountable/types/integration_session.py +10 -0
  100. uncountable/types/integration_session_t.py +60 -0
  101. uncountable/types/integrations.py +10 -0
  102. uncountable/types/integrations_t.py +62 -0
  103. uncountable/types/listing.py +46 -0
  104. uncountable/types/listing_t.py +533 -0
  105. uncountable/types/notices.py +8 -0
  106. uncountable/types/notices_t.py +37 -0
  107. uncountable/types/notifications.py +11 -0
  108. uncountable/types/notifications_t.py +74 -0
  109. uncountable/types/queued_job.py +2 -0
  110. uncountable/types/queued_job_t.py +20 -2
  111. uncountable/types/sockets.py +20 -0
  112. uncountable/types/sockets_t.py +169 -0
  113. uncountable/types/uploader.py +24 -0
  114. uncountable/types/uploader_t.py +222 -0
  115. {uncountablepythonsdk-0.0.115.dist-info → uncountablepythonsdk-0.0.142.dev0.dist-info}/METADATA +5 -2
  116. {uncountablepythonsdk-0.0.115.dist-info → uncountablepythonsdk-0.0.142.dev0.dist-info}/RECORD +118 -79
  117. docs/quickstart.md +0 -19
  118. {uncountablepythonsdk-0.0.115.dist-info → uncountablepythonsdk-0.0.142.dev0.dist-info}/WHEEL +0 -0
  119. {uncountablepythonsdk-0.0.115.dist-info → uncountablepythonsdk-0.0.142.dev0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,61 @@
1
+ import random
2
+ from dataclasses import dataclass
3
+ from datetime import UTC, datetime
4
+ from decimal import Decimal
5
+
6
+ from uncountable.integration.job import JobArguments, WebhookJob, register_job
7
+ from uncountable.types import (
8
+ base_t,
9
+ identifier_t,
10
+ job_definition_t,
11
+ recipe_links_t,
12
+ set_recipe_outputs_t,
13
+ )
14
+
15
+
16
+ @dataclass(kw_only=True)
17
+ class PredictionsPayload:
18
+ output_id: base_t.ObjectId
19
+ recipe_ids: list[base_t.ObjectId]
20
+
21
+
22
+ @register_job
23
+ class PredictionsExample(WebhookJob[PredictionsPayload]):
24
+ def run(
25
+ self, args: JobArguments, payload: PredictionsPayload
26
+ ) -> job_definition_t.JobResult:
27
+ recipe_data = args.client.get_recipes_data(recipe_ids=payload.recipe_ids)
28
+ formatted_datetime = datetime.now(UTC).strftime("%Y-%m-%d %H:%M:%S")
29
+
30
+ for recipe in recipe_data.recipes:
31
+ test_sample_name = f"Predictions Model ({formatted_datetime})"
32
+ created_recipe_id = args.client.create_recipe(
33
+ name=test_sample_name,
34
+ material_family_id=1,
35
+ workflow_id=1,
36
+ definition_key=identifier_t.IdentifierKeyRefName(
37
+ ref_name="unc_test_sample"
38
+ ),
39
+ ).result_id
40
+ args.client.set_recipe_outputs(
41
+ output_data=[
42
+ set_recipe_outputs_t.RecipeOutputValue(
43
+ recipe_id=created_recipe_id,
44
+ output_id=payload.output_id,
45
+ experiment_num=1,
46
+ value_numeric=Decimal(random.random() * 10),
47
+ )
48
+ ]
49
+ )
50
+ args.client.create_recipe_link(
51
+ recipe_from_key=identifier_t.IdentifierKeyId(id=recipe.recipe_id),
52
+ recipe_to_key=identifier_t.IdentifierKeyId(id=created_recipe_id),
53
+ link_type=recipe_links_t.RecipeLinkType.CHILD,
54
+ name=test_sample_name,
55
+ )
56
+
57
+ return job_definition_t.JobResult(success=True)
58
+
59
+ @property
60
+ def webhook_payload_type(self) -> type:
61
+ return PredictionsPayload
@@ -1,8 +1,19 @@
1
1
  from io import BytesIO
2
2
 
3
+ from openpyxl import Workbook, load_workbook
3
4
  from uncountable.core.file_upload import DataFileUpload, FileUpload
4
5
  from uncountable.integration.job import JobArguments, RunsheetWebhookJob, register_job
5
- from uncountable.types import entity_t
6
+ from uncountable.types import (
7
+ download_file_t,
8
+ entity_t,
9
+ export_default_runsheet_t,
10
+ identifier_t,
11
+ webhook_job_t,
12
+ )
13
+ from uncountable.types.client_base import APIRequest
14
+
15
+ RUNSHEET_REF_NAME = "recipe_export_runsheet"
16
+ RUNSHEET_REF_NAME_2 = "recipe_export_runsheet_2"
6
17
 
7
18
 
8
19
  @register_job
@@ -11,25 +22,55 @@ class StandardRunsheetGenerator(RunsheetWebhookJob):
11
22
  self,
12
23
  *,
13
24
  args: JobArguments,
14
- entities: list[entity_t.Entity],
25
+ payload: webhook_job_t.RunsheetWebhookPayload,
15
26
  ) -> FileUpload:
16
- args.logger.log_info(f"Generating runsheet for {len(entities)} entities")
27
+ args.logger.log_info("Exporting default runsheets")
28
+
29
+ entity_identifiers: list[identifier_t.IdentifierKey] = [
30
+ identifier_t.IdentifierKeyId(id=entity.id) for entity in payload.entities
31
+ ]
32
+
33
+ combined_wb = Workbook()
34
+ combined_sheet = combined_wb.active or combined_wb.create_sheet(
35
+ title="Combined Runsheet"
36
+ )
37
+ combined_sheet.title = "Combined Runsheet"
38
+
39
+ for ref_name in [RUNSHEET_REF_NAME, RUNSHEET_REF_NAME_2]:
40
+ api_request = APIRequest(
41
+ method=export_default_runsheet_t.ENDPOINT_METHOD,
42
+ endpoint=export_default_runsheet_t.ENDPOINT_PATH,
43
+ args=export_default_runsheet_t.Arguments(
44
+ entities=entity_identifiers,
45
+ runsheet_key=identifier_t.IdentifierKeyRefName(ref_name=ref_name),
46
+ entity_type=payload.entities[0].type
47
+ if payload.entities
48
+ else entity_t.EntityType.RECIPE,
49
+ ),
50
+ )
17
51
 
18
- content = []
19
- content.append("STANDARD LAB RUNSHEET\n")
20
- content.append("=" * 30 + "\n\n")
52
+ response = args.client.do_request(
53
+ api_request=api_request,
54
+ return_type=export_default_runsheet_t.Data,
55
+ )
21
56
 
22
- for entity in entities:
23
- content.append(f"Type: {entity.type}\n")
24
- content.append(f"ID: {entity.id}\n")
57
+ file_query = download_file_t.FileDownloadQueryTextDocumentId(
58
+ text_document_id=response.text_document_id,
59
+ )
25
60
 
26
- if hasattr(entity, "field_values") and entity.field_values:
27
- content.append("Field Values:\n")
28
- for field in entity.field_values:
29
- content.append(f" - {field.name}: {field.value}\n")
61
+ downloaded_files = args.client.download_files(file_query=file_query)
62
+ file_data = downloaded_files[0].data.read()
30
63
 
31
- content.append("\n")
64
+ wb = load_workbook(filename=BytesIO(file_data))
65
+ for sheet_name in wb.sheetnames:
66
+ for row in wb[sheet_name].iter_rows(values_only=True):
67
+ combined_sheet.append(row)
32
68
 
33
- runsheet_data = "".join(content).encode("utf-8")
69
+ output = BytesIO()
70
+ combined_wb.save(output)
71
+ output.seek(0)
34
72
 
35
- return DataFileUpload(name="lab_runsheet.txt", data=BytesIO(runsheet_data))
73
+ return DataFileUpload(
74
+ data=output,
75
+ name="combined_runsheet.xlsx",
76
+ )
@@ -75,3 +75,30 @@ jobs:
75
75
  executor:
76
76
  type: script
77
77
  import_path: example_runsheet_wh
78
+ - id: example_instrument
79
+ type: webhook
80
+ name: Webhook Instrument Connection
81
+ signature_key_secret:
82
+ type: env
83
+ env_key: WH_INSTRUMENT_SIGNATURE_KEY
84
+ executor:
85
+ type: script
86
+ import_path: example_instrument
87
+ - id: example_predictions
88
+ type: webhook
89
+ name: Webook Predictions
90
+ signature_key_secret:
91
+ type: env
92
+ env_key: WH_PREDICTIONS_SIGNATURE_KEY
93
+ executor:
94
+ type: script
95
+ import_path: example_predictions
96
+ - id: example_parse
97
+ type: webhook
98
+ name: Webhook Parse
99
+ signature_key_secret:
100
+ type: env
101
+ env_key: WH_PARSE_SIGNATURE_KEY
102
+ executor:
103
+ type: script
104
+ import_path: example_parse
@@ -9,7 +9,7 @@ dependencies = [
9
9
  "ruff == 0.*",
10
10
  "openpyxl == 3.*",
11
11
  "more_itertools == 10.*",
12
- "types-paramiko ==3.5.0.20240918",
12
+ "types-paramiko ==4.0.0.20250822",
13
13
  "types-openpyxl == 3.*",
14
14
  "types-pysftp == 0.*",
15
15
  "types-pytz ==2025.*",
@@ -17,7 +17,8 @@ dependencies = [
17
17
  "types-simplejson == 3.*",
18
18
  "pandas-stubs",
19
19
  "xlrd == 2.*",
20
- "msgspec == 0.19.*"
20
+ "msgspec == 0.19.*",
21
+ "websockets==15.0.1",
21
22
  ]
22
23
 
23
24
  [tool.mypy]
@@ -114,7 +115,6 @@ lint.ignore = [
114
115
  "PD010", # .pivottable. Should add
115
116
  "PD011", # use .to_numpy. Skip
116
117
  "PD015", # use .merge. Should add
117
- "PD901", # avoid generic df name. Skip
118
118
  "PERF203", # avoid try except in loop. Skip
119
119
  "PERF401", # use list comprehension. Skip
120
120
  "PERF402", # use list.copy. Skip
@@ -194,7 +194,6 @@ lint.ignore = [
194
194
  "RUF022", # __all__ is not sorted. skip due to isort complication
195
195
  "UP017", # use datetime.UTC, TODO add back in
196
196
  "UP035", # replacing List with list, TODO add back in
197
- "UP038", # isinstance X | Y instead of (X, Y), TODO add back in
198
197
  # ## FROM RUFF UPGRADE
199
198
  "PLC2701", # private name imports. should add
200
199
  "PLR1702", # too many nested blocks -- add with config. skip
@@ -213,6 +212,7 @@ exclude = [
213
212
 
214
213
  [tool.ruff.lint.isort]
215
214
  split-on-trailing-comma = true
215
+ known-first-party = ["pkgs"]
216
216
 
217
217
  [tool.ruff.lint.mccabe]
218
218
  max-complexity = 130 # goal would be to bring this down to ~50 or so
examples/oauth.py ADDED
@@ -0,0 +1,7 @@
1
+ from uncountable.core.client import Client
2
+ from uncountable.core.types import AuthDetailsOAuth
3
+
4
+ client = Client(
5
+ base_url="https://app.uncountable.com",
6
+ auth_details=AuthDetailsOAuth(refresh_token="x"),
7
+ )
@@ -7,6 +7,7 @@ from .argument_parser import ParserFunction as ParserFunction
7
7
  from .argument_parser import ParserOptions as ParserOptions
8
8
  from .argument_parser import SourceEncoding as SourceEncoding
9
9
  from .argument_parser import build_parser as build_parser
10
+ from .argument_parser import is_missing as is_missing
10
11
  from .argument_parser import is_optional as is_optional
11
12
  from .argument_parser import is_union as is_union
12
13
  from .case_convert import camel_to_snake_case as camel_to_snake_case
@@ -5,6 +5,9 @@ def is_namedtuple_type(x: Any) -> bool:
5
5
  if not hasattr(x, "__annotations__"):
6
6
  return False
7
7
 
8
+ if not hasattr(x, "__bases__"):
9
+ return False
10
+
8
11
  b = x.__bases__
9
12
  if len(b) != 1 or b[0] is not tuple:
10
13
  return False
@@ -168,6 +168,23 @@ def _invoke_membership_parser(
168
168
  raise ValueError(f"Expected value from {expected_values} but got value {value}")
169
169
 
170
170
 
171
+ # Uses `is` to compare
172
+ def _build_identity_parser(
173
+ identity_value: T,
174
+ ) -> ParserFunction[T]:
175
+ def parse(value: typing.Any) -> T:
176
+ if value is identity_value:
177
+ return identity_value
178
+ raise ValueError(
179
+ f"Expected value {identity_value} (type: {type(identity_value)}) but got value {value} (type: {type(value)})"
180
+ )
181
+
182
+ return parse
183
+
184
+
185
+ NONE_IDENTITY_PARSER = _build_identity_parser(None)
186
+
187
+
171
188
  def _build_parser_discriminated_union(
172
189
  context: ParserContext,
173
190
  discriminator_raw: str,
@@ -223,7 +240,7 @@ def _build_parser_inner(
223
240
  )
224
241
 
225
242
  if dataclasses.is_dataclass(parsed_type):
226
- return _build_parser_dataclass(parsed_type, context) # type: ignore[arg-type]
243
+ return _build_parser_dataclass(parsed_type, context)
227
244
 
228
245
  # namedtuple support
229
246
  if is_namedtuple_type(parsed_type):
@@ -243,8 +260,10 @@ def _build_parser_inner(
243
260
  for field_name, field_parser in field_parsers
244
261
  })
245
262
 
263
+ # IMPROVE: unclear why we need == here
246
264
  if parsed_type == type(None): # noqa: E721
247
- return lambda value: _invoke_membership_parser({None}, value) # type: ignore
265
+ # Need to convince type checker that parsed_type is type(None)
266
+ return typing.cast(ParserFunction[T], NONE_IDENTITY_PARSER)
248
267
 
249
268
  origin = typing.get_origin(parsed_type)
250
269
  if origin is tuple:
@@ -271,7 +290,7 @@ def _build_parser_inner(
271
290
  arg_parsers = [_build_parser_inner(arg, context) for arg in sorted_args]
272
291
  return lambda value: _invoke_fallback_parsers(parsed_type, arg_parsers, value)
273
292
 
274
- if parsed_type is typing.Any: # type: ignore[comparison-overlap]
293
+ if parsed_type is typing.Any:
275
294
  return lambda value: value
276
295
 
277
296
  if origin in (list, set):
@@ -85,7 +85,9 @@ def _serialize_dataclass(d: Any) -> dict[str, JsonValue]:
85
85
 
86
86
 
87
87
  def _to_string_value(value: Any) -> str:
88
- assert isinstance(value, (Decimal, int))
88
+ assert isinstance(value, (Decimal, int)), (
89
+ f"Expecting decimal or int, received: {value} (type={type(value)})"
90
+ )
89
91
  return str(value)
90
92
 
91
93
 
pkgs/type_spec/builder.py CHANGED
@@ -13,14 +13,25 @@ from enum import Enum, StrEnum, auto
13
13
  from typing import Any, Self
14
14
 
15
15
  from . import util
16
- from .cross_output_links import CrossOutputPaths
16
+ from .builder_types import CrossOutputPaths
17
17
  from .non_discriminated_union_exceptions import NON_DISCRIMINATED_UNION_EXCEPTIONS
18
- from .util import parse_type_str, unused
18
+ from .util import parse_type_str
19
19
 
20
20
  RawDict = dict[Any, Any]
21
21
  EndpointKey = str
22
22
 
23
23
 
24
+ class PathMapping(StrEnum):
25
+ NO_MAPPING = "no_mapping"
26
+ DEFAULT_MAPPING = "default_mapping"
27
+
28
+
29
+ @dataclass(kw_only=True)
30
+ class APIEndpointInfo:
31
+ root_path: str
32
+ path_mapping: PathMapping
33
+
34
+
24
35
  class StabilityLevel(StrEnum):
25
36
  """These are currently used for open api,
26
37
  see: https://github.com/Tufin/oasdiff/blob/main/docs/STABILITY.md
@@ -297,6 +308,7 @@ class SpecTypeDefn(SpecType):
297
308
  self._is_value_to_string = False
298
309
  self._is_valid_parameter = True
299
310
  self._is_dynamic_allowed = False
311
+ self._default_extant: PropertyExtant | None = None
300
312
  self.ext_info: Any = None
301
313
 
302
314
  def is_value_converted(self) -> bool:
@@ -329,6 +341,7 @@ class SpecTypeDefn(SpecType):
329
341
  "ext_info",
330
342
  "label",
331
343
  "is_dynamic_allowed",
344
+ "default_extant",
332
345
  ]
333
346
  + extra_names,
334
347
  )
@@ -340,6 +353,10 @@ class SpecTypeDefn(SpecType):
340
353
  assert isinstance(is_dynamic_allowed, bool)
341
354
  self._is_dynamic_allowed = is_dynamic_allowed
342
355
 
356
+ default_extant = data.get("default_extant")
357
+ if default_extant is not None:
358
+ self._default_extant = PropertyExtant(default_extant)
359
+
343
360
  def _process_property(
344
361
  self, builder: SpecBuilder, spec_name: str, data: RawDict
345
362
  ) -> SpecProperty:
@@ -358,18 +375,18 @@ class SpecTypeDefn(SpecType):
358
375
  ],
359
376
  )
360
377
  try:
361
- extant_type = data.get("extant")
378
+ extant_type_str = data.get("extant")
379
+ extant_type = (
380
+ PropertyExtant(extant_type_str) if extant_type_str is not None else None
381
+ )
382
+ extant = extant_type or self._default_extant
362
383
  if spec_name.endswith("?"):
363
- if extant_type is not None:
384
+ if extant is not None:
364
385
  raise Exception("cannot specify extant with ?")
365
386
  extant = PropertyExtant.optional
366
387
  name = spec_name[:-1]
367
388
  else:
368
- extant = (
369
- PropertyExtant.required
370
- if extant_type is None
371
- else PropertyExtant(extant_type)
372
- )
389
+ extant = extant or PropertyExtant.required
373
390
  name = spec_name
374
391
 
375
392
  property_name_case = self.name_case
@@ -406,7 +423,16 @@ class SpecTypeDefn(SpecType):
406
423
  parse_require = False
407
424
  literal = unwrap_literal_type(ptype)
408
425
  if literal is not None:
409
- default = literal.value
426
+ if isinstance(
427
+ literal.value_type, SpecTypeDefnStringEnum
428
+ ) and isinstance(literal.value, str):
429
+ resolved_value = literal.value_type.values.get(literal.value)
430
+ assert resolved_value is not None, (
431
+ f"Value {literal.value} not found in enum"
432
+ )
433
+ default = resolved_value.value
434
+ else:
435
+ default = literal.value
410
436
  has_default = True
411
437
  parse_require = True
412
438
 
@@ -734,6 +760,8 @@ class SpecTypeDefnStringEnum(SpecTypeDefn):
734
760
  builder.ensure(
735
761
  isinstance(enum_value, str), "enum value should be string"
736
762
  )
763
+ assert isinstance(enum_value, str)
764
+
737
765
  deprecated = value.get("deprecated", False)
738
766
  builder.ensure(
739
767
  isinstance(deprecated, bool),
@@ -835,7 +863,7 @@ class _EndpointPathDetails:
835
863
 
836
864
 
837
865
  def _resolve_endpoint_path(
838
- path: str, api_endpoints: dict[EndpointKey, str]
866
+ path: str, api_endpoints: dict[EndpointKey, APIEndpointInfo]
839
867
  ) -> _EndpointPathDetails:
840
868
  root_path_source = path.split("/")[0]
841
869
  root_match = RE_ENDPOINT_ROOT.fullmatch(root_path_source)
@@ -843,7 +871,7 @@ def _resolve_endpoint_path(
843
871
  raise Exception(f"invalid-api-path-root:{root_path_source}")
844
872
 
845
873
  root_var = root_match.group(1)
846
- root_path = api_endpoints[root_var]
874
+ root_path = api_endpoints[root_var].root_path
847
875
 
848
876
  _, *rest_path = path.split("/", 1)
849
877
  resolved_path = "/".join([root_path] + rest_path)
@@ -911,6 +939,7 @@ class SpecEndpoint:
911
939
  stability_level: StabilityLevel | None
912
940
  # Don't emit TypeScript endpoint code
913
941
  suppress_ts: bool
942
+ deprecated: bool = False
914
943
  async_batch_path: str | None = None
915
944
  result_type: ResultType = ResultType.json
916
945
  has_attachment: bool = False
@@ -928,13 +957,13 @@ class SpecEndpoint:
928
957
  pass
929
958
 
930
959
  def process(self, builder: SpecBuilder, data: RawDict) -> None:
931
- unused(builder)
932
960
  util.check_fields(
933
961
  data,
934
962
  [
935
963
  "method",
936
964
  "path",
937
965
  "data_loader",
966
+ "deprecated",
938
967
  "is_sdk",
939
968
  "stability_level",
940
969
  "async_batch_path",
@@ -954,6 +983,7 @@ class SpecEndpoint:
954
983
  data_loader = data.get("data_loader", False)
955
984
  assert isinstance(data_loader, bool)
956
985
  self.data_loader = data_loader
986
+ self.deprecated = data.get("deprecated", False)
957
987
 
958
988
  is_sdk = data.get("is_sdk", EndpointEmitType.EMIT_NOTHING)
959
989
 
@@ -1080,7 +1110,7 @@ def _parse_const(
1080
1110
  elif const_type.defn_type.name == BaseTypeName.s_dict:
1081
1111
  assert isinstance(value, dict)
1082
1112
  builder.ensure(
1083
- len(const_type.parameters) == 2, "constant-dict-expects-one-type"
1113
+ len(const_type.parameters) == 2, "constant-dict-expects-two-types"
1084
1114
  )
1085
1115
  key_type = const_type.parameters[0]
1086
1116
  value_type = const_type.parameters[1]
@@ -1129,6 +1159,11 @@ def _parse_const(
1129
1159
  )
1130
1160
  return value
1131
1161
 
1162
+ if not const_type.is_base:
1163
+ # IMPROVE: validate the object type properties before emission stage
1164
+ builder.ensure(isinstance(value, dict), "invalid value for object constant")
1165
+ return value
1166
+
1132
1167
  raise Exception("unsupported-const-scalar-type", const_type)
1133
1168
 
1134
1169
 
@@ -1250,7 +1285,8 @@ class SpecNamespace:
1250
1285
 
1251
1286
  assert util.is_valid_type_name(name), f"{name} is not a valid type name"
1252
1287
  assert name not in self.types, f"{name} is duplicate"
1253
- defn_type = defn["type"]
1288
+ defn_type = defn.get("type")
1289
+ assert isinstance(defn_type, str), f"{name} requires a string type"
1254
1290
  spec_type: SpecTypeDefn
1255
1291
  if defn_type == DefnTypeName.s_alias:
1256
1292
  spec_type = SpecTypeDefnAlias(self, name)
@@ -1346,7 +1382,7 @@ class SpecBuilder:
1346
1382
  def __init__(
1347
1383
  self,
1348
1384
  *,
1349
- api_endpoints: dict[EndpointKey, str],
1385
+ api_endpoints: dict[EndpointKey, APIEndpointInfo],
1350
1386
  top_namespace: str,
1351
1387
  cross_output_paths: CrossOutputPaths | None,
1352
1388
  ) -> None:
@@ -1377,9 +1413,13 @@ class SpecBuilder:
1377
1413
  self.emit_id_source_enums: set[SpecTypeDefnStringEnum] = set()
1378
1414
 
1379
1415
  this_dir = os.path.dirname(os.path.realpath(__file__))
1380
- with open(f"{this_dir}/parts/base.py.prepart") as py_base_part:
1416
+ with open(
1417
+ f"{this_dir}/parts/base.py.prepart", encoding="utf-8"
1418
+ ) as py_base_part:
1381
1419
  self.preparts["python"][base_namespace_name] = py_base_part.read()
1382
- with open(f"{this_dir}/parts/base.ts.prepart") as ts_base_part:
1420
+ with open(
1421
+ f"{this_dir}/parts/base.ts.prepart", encoding="utf-8"
1422
+ ) as ts_base_part:
1383
1423
  self.preparts["typescript"][base_namespace_name] = ts_base_part.read()
1384
1424
 
1385
1425
  base_namespace.types["ObjectId"] = SpecTypeDefnObject(
@@ -1506,8 +1546,10 @@ class SpecBuilder:
1506
1546
  if len(path) == 2:
1507
1547
  if isinstance(defn_type, SpecTypeDefnStringEnum):
1508
1548
  assert path[1].parameters is None
1549
+ statement = f"$import: [{defn_type.namespace.name}]"
1509
1550
  self.ensure(
1510
- path[1].name in defn_type.values, f"missing-enum-value: {path}"
1551
+ path[1].name in defn_type.values,
1552
+ f"missing-enum-value: {path} have you specified the dependency in an import statement: {statement}",
1511
1553
  )
1512
1554
  return SpecTypeLiteralWrapper(
1513
1555
  value=path[1].name,
@@ -1554,6 +1596,11 @@ class SpecBuilder:
1554
1596
  f"'examples' in example files are expected to be a list, endpoint_path={path_details.resolved_path}"
1555
1597
  )
1556
1598
  for example in examples_data:
1599
+ if not isinstance(example, dict):
1600
+ raise Exception(
1601
+ f"each example in example file is expected to be a dict, endpoint_path={path_details.resolved_path}"
1602
+ )
1603
+
1557
1604
  arguments = example["arguments"]
1558
1605
  data_example = example["data"]
1559
1606
  if not isinstance(arguments, dict) or not isinstance(data_example, dict):
@@ -0,0 +1,9 @@
1
+ from dataclasses import dataclass
2
+
3
+
4
+ @dataclass(kw_only=True, frozen=True)
5
+ class CrossOutputPaths:
6
+ python_types_output: str
7
+ typescript_types_output: str
8
+ typescript_routes_output_by_endpoint: dict[str, str]
9
+ typespec_files_input: list[str]
pkgs/type_spec/config.py CHANGED
@@ -4,6 +4,7 @@ from dataclasses import dataclass
4
4
  from typing import Self, TypeVar
5
5
 
6
6
  from pkgs.serialization import yaml
7
+ from pkgs.type_spec.builder import APIEndpointInfo, EndpointKey
7
8
 
8
9
  ConfigValueType = str | None | Mapping[str, str | None] | list[str]
9
10
 
@@ -19,6 +20,22 @@ def _parse_string_lookup(
19
20
  }
20
21
 
21
22
 
23
+ VT = TypeVar("VT")
24
+
25
+
26
+ def _parse_data_lookup(
27
+ key: str,
28
+ raw_value: ConfigValueType,
29
+ conv_func: type[VT],
30
+ ) -> dict[str, VT]:
31
+ assert isinstance(raw_value, dict), f"{key} must be key/values"
32
+ return {
33
+ k: conv_func(**v)
34
+ for k, v in raw_value.items()
35
+ if v is not None and isinstance(v, dict)
36
+ }
37
+
38
+
22
39
  @dataclass(kw_only=True)
23
40
  class BaseLanguageConfig:
24
41
  types_output: (
@@ -31,7 +48,9 @@ class BaseLanguageConfig:
31
48
 
32
49
  @dataclass(kw_only=True)
33
50
  class TypeScriptConfig(BaseLanguageConfig):
34
- routes_output: str # folder for generate route files will be located.
51
+ endpoint_to_routes_output: dict[
52
+ EndpointKey, str
53
+ ] # folder for generate route files will be located.
35
54
  type_info_output: str # folder for generated type info files
36
55
  id_source_output: str | None = None # folder for emitted id source maps.
37
56
  endpoint_to_frontend_app_type: dict[
@@ -39,7 +58,7 @@ class TypeScriptConfig(BaseLanguageConfig):
39
58
  ] # map from api_endpoint to frontend app type
40
59
 
41
60
  def __post_init__(self: Self) -> None:
42
- self.routes_output = self.routes_output
61
+ self.endpoint_to_routes_output = self.endpoint_to_routes_output
43
62
  self.type_info_output = os.path.abspath(self.type_info_output)
44
63
  self.id_source_output = (
45
64
  os.path.abspath(self.id_source_output)
@@ -100,7 +119,7 @@ class OpenAPIConfig(BaseLanguageConfig):
100
119
  class Config:
101
120
  top_namespace: str
102
121
  type_spec_types: list[str] # folders containing the yaml type spec definitions
103
- api_endpoint: dict[str, str]
122
+ api_endpoint: dict[str, APIEndpointInfo]
104
123
  # languages
105
124
  typescript: TypeScriptConfig | None
106
125
  python: PythonConfig
@@ -125,8 +144,10 @@ def parse_yaml_config(config_file: str) -> Config:
125
144
  )
126
145
  type_spec_types = [os.path.abspath(folder) for folder in raw_type_spec_types]
127
146
 
128
- api_endpoint = _parse_string_lookup(
129
- "api_endpoint", raw_config.get("api_endpoint", {}), lambda x: x
147
+ api_endpoint = _parse_data_lookup(
148
+ "api_endpoint",
149
+ raw_config.get("api_endpoint", {}),
150
+ APIEndpointInfo,
130
151
  )
131
152
 
132
153
  raw_typescript = raw_config.get("typescript")