UncountablePythonSDK 0.0.126__py3-none-any.whl → 0.0.142.dev0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of UncountablePythonSDK might be problematic. Click here for more details.

Files changed (84) hide show
  1. docs/requirements.txt +1 -1
  2. examples/integration-server/jobs/materials_auto/example_cron.py +1 -1
  3. examples/integration-server/jobs/materials_auto/example_instrument.py +68 -38
  4. examples/integration-server/jobs/materials_auto/example_parse.py +140 -0
  5. examples/integration-server/jobs/materials_auto/example_predictions.py +61 -0
  6. examples/integration-server/jobs/materials_auto/example_runsheet_wh.py +57 -16
  7. examples/integration-server/jobs/materials_auto/profile.yaml +18 -0
  8. examples/integration-server/pyproject.toml +4 -4
  9. pkgs/argument_parser/argument_parser.py +20 -1
  10. pkgs/serialization_util/serialization_helpers.py +3 -1
  11. pkgs/type_spec/builder.py +43 -13
  12. pkgs/type_spec/builder_types.py +9 -0
  13. pkgs/type_spec/cross_output_links.py +2 -10
  14. pkgs/type_spec/emit_open_api.py +0 -12
  15. pkgs/type_spec/emit_python.py +72 -11
  16. pkgs/type_spec/emit_typescript.py +2 -2
  17. pkgs/type_spec/emit_typescript_util.py +28 -6
  18. pkgs/type_spec/load_types.py +1 -1
  19. pkgs/type_spec/parts/base.ts.prepart +3 -0
  20. pkgs/type_spec/type_info/emit_type_info.py +27 -3
  21. pkgs/type_spec/value_spec/__main__.py +2 -2
  22. uncountable/core/client.py +10 -3
  23. uncountable/integration/cli.py +89 -2
  24. uncountable/integration/executors/executors.py +1 -2
  25. uncountable/integration/executors/generic_upload_executor.py +1 -1
  26. uncountable/integration/job.py +3 -3
  27. uncountable/integration/queue_runner/command_server/__init__.py +4 -0
  28. uncountable/integration/queue_runner/command_server/command_client.py +63 -0
  29. uncountable/integration/queue_runner/command_server/command_server.py +77 -5
  30. uncountable/integration/queue_runner/command_server/protocol/command_server.proto +33 -0
  31. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.py +27 -13
  32. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.pyi +53 -1
  33. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2_grpc.py +135 -0
  34. uncountable/integration/queue_runner/command_server/types.py +44 -1
  35. uncountable/integration/queue_runner/datastore/datastore_sqlite.py +132 -8
  36. uncountable/integration/queue_runner/datastore/interface.py +3 -0
  37. uncountable/integration/queue_runner/datastore/model.py +8 -1
  38. uncountable/integration/queue_runner/job_scheduler.py +78 -3
  39. uncountable/integration/queue_runner/types.py +2 -0
  40. uncountable/integration/queue_runner/worker.py +28 -26
  41. uncountable/integration/scheduler.py +64 -13
  42. uncountable/integration/server.py +36 -6
  43. uncountable/integration/telemetry.py +120 -7
  44. uncountable/integration/webhook_server/entrypoint.py +2 -0
  45. uncountable/types/__init__.py +18 -0
  46. uncountable/types/api/entity/list_aggregate.py +79 -0
  47. uncountable/types/api/entity/list_entities.py +25 -0
  48. uncountable/types/api/entity/set_barcode.py +43 -0
  49. uncountable/types/api/entity/transition_entity_phase.py +2 -1
  50. uncountable/types/api/files/download_file.py +15 -1
  51. uncountable/types/api/integrations/push_notification.py +2 -0
  52. uncountable/types/api/integrations/register_sockets_token.py +41 -0
  53. uncountable/types/api/listing/__init__.py +1 -0
  54. uncountable/types/api/listing/fetch_listing.py +57 -0
  55. uncountable/types/api/notebooks/__init__.py +1 -0
  56. uncountable/types/api/notebooks/add_notebook_content.py +119 -0
  57. uncountable/types/api/outputs/get_output_organization.py +1 -1
  58. uncountable/types/api/recipes/edit_recipe_inputs.py +1 -1
  59. uncountable/types/api/recipes/get_recipes_data.py +29 -0
  60. uncountable/types/api/recipes/lock_recipes.py +2 -1
  61. uncountable/types/api/recipes/set_recipe_total.py +59 -0
  62. uncountable/types/api/recipes/unlock_recipes.py +2 -1
  63. uncountable/types/api/runsheet/export_default_runsheet.py +44 -0
  64. uncountable/types/api/uploader/complete_async_parse.py +4 -0
  65. uncountable/types/async_batch_processor.py +222 -0
  66. uncountable/types/async_batch_t.py +4 -0
  67. uncountable/types/client_base.py +367 -2
  68. uncountable/types/client_config.py +1 -0
  69. uncountable/types/client_config_t.py +10 -0
  70. uncountable/types/entity_t.py +3 -1
  71. uncountable/types/integration_server_t.py +2 -0
  72. uncountable/types/listing.py +46 -0
  73. uncountable/types/listing_t.py +533 -0
  74. uncountable/types/notices.py +8 -0
  75. uncountable/types/notices_t.py +37 -0
  76. uncountable/types/queued_job.py +1 -0
  77. uncountable/types/queued_job_t.py +9 -0
  78. uncountable/types/sockets.py +9 -0
  79. uncountable/types/sockets_t.py +99 -0
  80. uncountable/types/uploader_t.py +3 -2
  81. {uncountablepythonsdk-0.0.126.dist-info → uncountablepythonsdk-0.0.142.dev0.dist-info}/METADATA +4 -2
  82. {uncountablepythonsdk-0.0.126.dist-info → uncountablepythonsdk-0.0.142.dev0.dist-info}/RECORD +84 -68
  83. {uncountablepythonsdk-0.0.126.dist-info → uncountablepythonsdk-0.0.142.dev0.dist-info}/WHEEL +0 -0
  84. {uncountablepythonsdk-0.0.126.dist-info → uncountablepythonsdk-0.0.142.dev0.dist-info}/top_level.txt +0 -0
docs/requirements.txt CHANGED
@@ -1,4 +1,4 @@
1
- furo==2025.7.19
1
+ furo==2025.9.25
2
2
  myst-parser==4.0.1
3
3
  sphinx-autoapi==3.6.0
4
4
  sphinx-copybutton==0.5.2
@@ -17,5 +17,5 @@ class MyCronJob(CronJob):
17
17
  if field_val.field_ref_name == "name":
18
18
  name = field_val.value
19
19
  args.logger.log_info(f"material family found with name: {name}")
20
- time.sleep(1.5)
20
+ time.sleep(20)
21
21
  return JobResult(success=True)
@@ -1,3 +1,4 @@
1
+ import json
1
2
  import time
2
3
  from dataclasses import dataclass
3
4
  from decimal import Decimal
@@ -5,13 +6,16 @@ from decimal import Decimal
5
6
  from uncountable.integration.job import JobArguments, WebhookJob, register_job
6
7
  from uncountable.types import (
7
8
  base_t,
8
- entity_t,
9
9
  identifier_t,
10
- integrations_t,
11
10
  job_definition_t,
12
- publish_realtime_data_t,
11
+ sockets_t,
13
12
  )
14
- from uncountable.types.client_base import APIRequest
13
+ from uncountable.types.integration_session_t import IntegrationSessionInstrument
14
+ from websockets.sync.client import connect
15
+ from websockets.typing import Data
16
+
17
+ from pkgs.argument_parser.argument_parser import CachedParser
18
+ from pkgs.serialization_util import serialize_for_api
15
19
 
16
20
 
17
21
  @dataclass(kw_only=True)
@@ -24,45 +28,71 @@ class InstrumentExample(WebhookJob[InstrumentPayload]):
24
28
  def run(
25
29
  self, args: JobArguments, payload: InstrumentPayload
26
30
  ) -> job_definition_t.JobResult:
27
- equipment_data = args.client.get_entities_data(
28
- entity_type=entity_t.EntityType.EQUIPMENT,
29
- entity_ids=[payload.equipment_id],
30
- ).entity_details[0]
31
-
32
- # Load the instrument's connection details from the entity
33
- instrument_id = None
34
- for field in equipment_data.field_values:
35
- if field.field_ref_name == "ins_instrument_id":
36
- instrument_id = field.value
37
-
38
- if instrument_id is None:
39
- args.logger.log_error("Could not find instrument ID")
40
- return job_definition_t.JobResult(success=False)
31
+ parser: CachedParser[sockets_t.SocketResponse] = CachedParser(
32
+ sockets_t.SocketResponse # type:ignore[arg-type]
33
+ )
41
34
 
42
- args.logger.log_info(f"Instrument ID: {instrument_id}")
35
+ def parse_message(message: Data) -> sockets_t.SocketEventData | None:
36
+ try:
37
+ return parser.parse_api(json.loads(message)).data
38
+ except ValueError:
39
+ return None
43
40
 
44
- for i in range(10):
45
- req_args = publish_realtime_data_t.Arguments(
46
- data_package=integrations_t.DataPackageNumericReading(
47
- value=Decimal(i * 15),
48
- target_entity=entity_t.EntityIdentifier(
49
- identifier_key=identifier_t.IdentifierKeyId(
50
- id=payload.equipment_id
51
- ),
52
- type=entity_t.EntityType.EQUIPMENT,
53
- ),
54
- ),
41
+ integration_session = IntegrationSessionInstrument(
42
+ equipment_key=identifier_t.IdentifierKeyId(id=payload.equipment_id)
43
+ )
44
+ registration_info = args.client.register_sockets_token(
45
+ socket_request=sockets_t.SocketRequestIntegrationSession(
46
+ integration_session=integration_session
55
47
  )
56
- api_request = APIRequest(
57
- method=publish_realtime_data_t.ENDPOINT_METHOD,
58
- endpoint=publish_realtime_data_t.ENDPOINT_PATH,
59
- args=req_args,
60
- )
61
- args.client.do_request(
62
- api_request=api_request, return_type=publish_realtime_data_t.Data
48
+ ).response
49
+ token = registration_info.token
50
+ room_key = registration_info.room_key
51
+ args.logger.log_info(f"Token: {token}")
52
+
53
+ with connect(
54
+ "ws://host.docker.internal:8765",
55
+ additional_headers={
56
+ "Authorization": f"Bearer {token}",
57
+ "X-UNC-EXTERNAL": "true",
58
+ },
59
+ ) as ws:
60
+ ws.send(
61
+ json.dumps(
62
+ serialize_for_api(
63
+ sockets_t.JoinRoomWithTokenSocketClientMessage(token=token)
64
+ )
65
+ )
63
66
  )
64
- time.sleep(0.75)
67
+ for i in range(10):
68
+ args.logger.log_info("Sending reading...")
69
+ ws.send(
70
+ json.dumps(
71
+ serialize_for_api(
72
+ sockets_t.SendInstrumentReadingClientMessage(
73
+ value=Decimal(i * 100), room_key=room_key
74
+ )
75
+ )
76
+ )
77
+ )
78
+ time.sleep(0.75)
79
+
80
+ while True:
81
+ message = parse_message(ws.recv())
82
+ match message:
83
+ case sockets_t.UsersInRoomUpdatedEventData():
84
+ num_users = len(message.user_ids)
85
+ if num_users <= 1:
86
+ break
87
+ else:
88
+ args.logger.log_info(
89
+ f"Session still open, {num_users} users in room."
90
+ )
91
+ case _:
92
+ args.logger.log_info("Session still open...")
93
+ continue
65
94
 
95
+ args.logger.log_info("Session closed.")
66
96
  return job_definition_t.JobResult(success=True)
67
97
 
68
98
  @property
@@ -0,0 +1,140 @@
1
+ from dataclasses import dataclass
2
+ from decimal import Decimal
3
+
4
+ from uncountable.integration.job import JobArguments, WebhookJob, register_job
5
+ from uncountable.types import (
6
+ base_t,
7
+ entity_t,
8
+ generic_upload_t,
9
+ identifier_t,
10
+ job_definition_t,
11
+ notifications_t,
12
+ uploader_t,
13
+ )
14
+
15
+
16
+ @dataclass(kw_only=True)
17
+ class ParsePayload:
18
+ async_job_id: base_t.ObjectId
19
+
20
+
21
+ @register_job
22
+ class ParseExample(WebhookJob[ParsePayload]):
23
+ def run(
24
+ self, args: JobArguments, payload: ParsePayload
25
+ ) -> job_definition_t.JobResult:
26
+ user_id: base_t.ObjectId | None = None
27
+ recipe_id: base_t.ObjectId | None = None
28
+ file_name: str | None = None
29
+ data = args.client.get_entities_data(
30
+ entity_ids=[payload.async_job_id], entity_type=entity_t.EntityType.ASYNC_JOB
31
+ )
32
+ for field_value in data.entity_details[0].field_values:
33
+ if field_value.field_ref_name == "core_async_job_jobData":
34
+ assert isinstance(field_value.value, dict)
35
+ assert isinstance(field_value.value["user_id"], int)
36
+ user_id = field_value.value["user_id"]
37
+ elif (
38
+ field_value.field_ref_name
39
+ == "unc_async_job_custom_parser_recipe_ids_in_view"
40
+ ):
41
+ if field_value.value is None:
42
+ continue
43
+ assert isinstance(field_value.value, list)
44
+ if len(field_value.value) > 0:
45
+ assert isinstance(field_value.value[0], int)
46
+ recipe_id = field_value.value[0]
47
+ elif field_value.field_ref_name == "unc_async_job_custom_parser_input_file":
48
+ assert isinstance(field_value.value, list)
49
+ assert len(field_value.value) == 1
50
+ assert isinstance(field_value.value[0], dict)
51
+ assert isinstance(field_value.value[0]["name"], str)
52
+ file_name = field_value.value[0]["name"]
53
+
54
+ assert user_id is not None
55
+ assert file_name is not None
56
+
57
+ dummy_parsed_file_data: list[uploader_t.ParsedFileData] = [
58
+ uploader_t.ParsedFileData(
59
+ file_name=file_name,
60
+ file_structures=[
61
+ uploader_t.DataChannel(
62
+ type=uploader_t.StructureElementType.CHANNEL,
63
+ channel=uploader_t.TextChannelData(
64
+ name="column1",
65
+ type=uploader_t.ChannelType.TEXT_CHANNEL,
66
+ data=[
67
+ uploader_t.StringValue(value="value1"),
68
+ uploader_t.StringValue(value="value4"),
69
+ uploader_t.StringValue(value="value7"),
70
+ ],
71
+ ),
72
+ ),
73
+ uploader_t.DataChannel(
74
+ type=uploader_t.StructureElementType.CHANNEL,
75
+ channel=uploader_t.TextChannelData(
76
+ name="column2",
77
+ type=uploader_t.ChannelType.TEXT_CHANNEL,
78
+ data=[
79
+ uploader_t.StringValue(value="value2"),
80
+ uploader_t.StringValue(value="value5"),
81
+ uploader_t.StringValue(value="value8"),
82
+ ],
83
+ ),
84
+ ),
85
+ uploader_t.DataChannel(
86
+ type=uploader_t.StructureElementType.CHANNEL,
87
+ channel=uploader_t.TextChannelData(
88
+ name="column3",
89
+ type=uploader_t.ChannelType.TEXT_CHANNEL,
90
+ data=[
91
+ uploader_t.StringValue(value="value3"),
92
+ uploader_t.StringValue(value="value6"),
93
+ uploader_t.StringValue(value="value9"),
94
+ ],
95
+ ),
96
+ ),
97
+ uploader_t.HeaderEntry(
98
+ type=uploader_t.StructureElementType.HEADER,
99
+ value=uploader_t.TextHeaderData(
100
+ name="file_source",
101
+ type=uploader_t.HeaderType.TEXT_HEADER,
102
+ data=uploader_t.StringValue(value="my_file_to_upload.xlsx"),
103
+ ),
104
+ ),
105
+ uploader_t.HeaderEntry(
106
+ type=uploader_t.StructureElementType.HEADER,
107
+ value=uploader_t.NumericHeaderData(
108
+ name="file structure number",
109
+ data=uploader_t.DecimalValue(value=Decimal(99)),
110
+ ),
111
+ ),
112
+ ],
113
+ )
114
+ ]
115
+
116
+ complete_async_parse_req = args.batch_processor.complete_async_parse(
117
+ parsed_file_data=dummy_parsed_file_data,
118
+ async_job_key=identifier_t.IdentifierKeyId(id=payload.async_job_id),
119
+ upload_destination=generic_upload_t.UploadDestinationRecipe(
120
+ recipe_key=identifier_t.IdentifierKeyId(id=recipe_id or 1)
121
+ ),
122
+ )
123
+
124
+ args.batch_processor.push_notification(
125
+ depends_on=[complete_async_parse_req.batch_reference],
126
+ notification_targets=[
127
+ notifications_t.NotificationTargetUser(
128
+ user_key=identifier_t.IdentifierKeyId(id=user_id)
129
+ )
130
+ ],
131
+ subject="Upload complete",
132
+ message="Your file has been uploaded",
133
+ display_notice=True,
134
+ )
135
+
136
+ return job_definition_t.JobResult(success=True)
137
+
138
+ @property
139
+ def webhook_payload_type(self) -> type:
140
+ return ParsePayload
@@ -0,0 +1,61 @@
1
+ import random
2
+ from dataclasses import dataclass
3
+ from datetime import UTC, datetime
4
+ from decimal import Decimal
5
+
6
+ from uncountable.integration.job import JobArguments, WebhookJob, register_job
7
+ from uncountable.types import (
8
+ base_t,
9
+ identifier_t,
10
+ job_definition_t,
11
+ recipe_links_t,
12
+ set_recipe_outputs_t,
13
+ )
14
+
15
+
16
+ @dataclass(kw_only=True)
17
+ class PredictionsPayload:
18
+ output_id: base_t.ObjectId
19
+ recipe_ids: list[base_t.ObjectId]
20
+
21
+
22
+ @register_job
23
+ class PredictionsExample(WebhookJob[PredictionsPayload]):
24
+ def run(
25
+ self, args: JobArguments, payload: PredictionsPayload
26
+ ) -> job_definition_t.JobResult:
27
+ recipe_data = args.client.get_recipes_data(recipe_ids=payload.recipe_ids)
28
+ formatted_datetime = datetime.now(UTC).strftime("%Y-%m-%d %H:%M:%S")
29
+
30
+ for recipe in recipe_data.recipes:
31
+ test_sample_name = f"Predictions Model ({formatted_datetime})"
32
+ created_recipe_id = args.client.create_recipe(
33
+ name=test_sample_name,
34
+ material_family_id=1,
35
+ workflow_id=1,
36
+ definition_key=identifier_t.IdentifierKeyRefName(
37
+ ref_name="unc_test_sample"
38
+ ),
39
+ ).result_id
40
+ args.client.set_recipe_outputs(
41
+ output_data=[
42
+ set_recipe_outputs_t.RecipeOutputValue(
43
+ recipe_id=created_recipe_id,
44
+ output_id=payload.output_id,
45
+ experiment_num=1,
46
+ value_numeric=Decimal(random.random() * 10),
47
+ )
48
+ ]
49
+ )
50
+ args.client.create_recipe_link(
51
+ recipe_from_key=identifier_t.IdentifierKeyId(id=recipe.recipe_id),
52
+ recipe_to_key=identifier_t.IdentifierKeyId(id=created_recipe_id),
53
+ link_type=recipe_links_t.RecipeLinkType.CHILD,
54
+ name=test_sample_name,
55
+ )
56
+
57
+ return job_definition_t.JobResult(success=True)
58
+
59
+ @property
60
+ def webhook_payload_type(self) -> type:
61
+ return PredictionsPayload
@@ -1,8 +1,19 @@
1
1
  from io import BytesIO
2
2
 
3
+ from openpyxl import Workbook, load_workbook
3
4
  from uncountable.core.file_upload import DataFileUpload, FileUpload
4
5
  from uncountable.integration.job import JobArguments, RunsheetWebhookJob, register_job
5
- from uncountable.types import entity_t
6
+ from uncountable.types import (
7
+ download_file_t,
8
+ entity_t,
9
+ export_default_runsheet_t,
10
+ identifier_t,
11
+ webhook_job_t,
12
+ )
13
+ from uncountable.types.client_base import APIRequest
14
+
15
+ RUNSHEET_REF_NAME = "recipe_export_runsheet"
16
+ RUNSHEET_REF_NAME_2 = "recipe_export_runsheet_2"
6
17
 
7
18
 
8
19
  @register_job
@@ -11,25 +22,55 @@ class StandardRunsheetGenerator(RunsheetWebhookJob):
11
22
  self,
12
23
  *,
13
24
  args: JobArguments,
14
- entities: list[entity_t.Entity],
25
+ payload: webhook_job_t.RunsheetWebhookPayload,
15
26
  ) -> FileUpload:
16
- args.logger.log_info(f"Generating runsheet for {len(entities)} entities")
27
+ args.logger.log_info("Exporting default runsheets")
28
+
29
+ entity_identifiers: list[identifier_t.IdentifierKey] = [
30
+ identifier_t.IdentifierKeyId(id=entity.id) for entity in payload.entities
31
+ ]
32
+
33
+ combined_wb = Workbook()
34
+ combined_sheet = combined_wb.active or combined_wb.create_sheet(
35
+ title="Combined Runsheet"
36
+ )
37
+ combined_sheet.title = "Combined Runsheet"
38
+
39
+ for ref_name in [RUNSHEET_REF_NAME, RUNSHEET_REF_NAME_2]:
40
+ api_request = APIRequest(
41
+ method=export_default_runsheet_t.ENDPOINT_METHOD,
42
+ endpoint=export_default_runsheet_t.ENDPOINT_PATH,
43
+ args=export_default_runsheet_t.Arguments(
44
+ entities=entity_identifiers,
45
+ runsheet_key=identifier_t.IdentifierKeyRefName(ref_name=ref_name),
46
+ entity_type=payload.entities[0].type
47
+ if payload.entities
48
+ else entity_t.EntityType.RECIPE,
49
+ ),
50
+ )
17
51
 
18
- content = []
19
- content.append("STANDARD LAB RUNSHEET\n")
20
- content.append("=" * 30 + "\n\n")
52
+ response = args.client.do_request(
53
+ api_request=api_request,
54
+ return_type=export_default_runsheet_t.Data,
55
+ )
21
56
 
22
- for entity in entities:
23
- content.append(f"Type: {entity.type}\n")
24
- content.append(f"ID: {entity.id}\n")
57
+ file_query = download_file_t.FileDownloadQueryTextDocumentId(
58
+ text_document_id=response.text_document_id,
59
+ )
25
60
 
26
- if hasattr(entity, "field_values") and entity.field_values:
27
- content.append("Field Values:\n")
28
- for field in entity.field_values:
29
- content.append(f" - {field.name}: {field.value}\n")
61
+ downloaded_files = args.client.download_files(file_query=file_query)
62
+ file_data = downloaded_files[0].data.read()
30
63
 
31
- content.append("\n")
64
+ wb = load_workbook(filename=BytesIO(file_data))
65
+ for sheet_name in wb.sheetnames:
66
+ for row in wb[sheet_name].iter_rows(values_only=True):
67
+ combined_sheet.append(row)
32
68
 
33
- runsheet_data = "".join(content).encode("utf-8")
69
+ output = BytesIO()
70
+ combined_wb.save(output)
71
+ output.seek(0)
34
72
 
35
- return DataFileUpload(name="lab_runsheet.txt", data=BytesIO(runsheet_data))
73
+ return DataFileUpload(
74
+ data=output,
75
+ name="combined_runsheet.xlsx",
76
+ )
@@ -84,3 +84,21 @@ jobs:
84
84
  executor:
85
85
  type: script
86
86
  import_path: example_instrument
87
+ - id: example_predictions
88
+ type: webhook
89
+ name: Webook Predictions
90
+ signature_key_secret:
91
+ type: env
92
+ env_key: WH_PREDICTIONS_SIGNATURE_KEY
93
+ executor:
94
+ type: script
95
+ import_path: example_predictions
96
+ - id: example_parse
97
+ type: webhook
98
+ name: Webhook Parse
99
+ signature_key_secret:
100
+ type: env
101
+ env_key: WH_PARSE_SIGNATURE_KEY
102
+ executor:
103
+ type: script
104
+ import_path: example_parse
@@ -9,7 +9,7 @@ dependencies = [
9
9
  "ruff == 0.*",
10
10
  "openpyxl == 3.*",
11
11
  "more_itertools == 10.*",
12
- "types-paramiko ==4.0.0.20250806",
12
+ "types-paramiko ==4.0.0.20250822",
13
13
  "types-openpyxl == 3.*",
14
14
  "types-pysftp == 0.*",
15
15
  "types-pytz ==2025.*",
@@ -17,7 +17,8 @@ dependencies = [
17
17
  "types-simplejson == 3.*",
18
18
  "pandas-stubs",
19
19
  "xlrd == 2.*",
20
- "msgspec == 0.19.*"
20
+ "msgspec == 0.19.*",
21
+ "websockets==15.0.1",
21
22
  ]
22
23
 
23
24
  [tool.mypy]
@@ -114,7 +115,6 @@ lint.ignore = [
114
115
  "PD010", # .pivottable. Should add
115
116
  "PD011", # use .to_numpy. Skip
116
117
  "PD015", # use .merge. Should add
117
- "PD901", # avoid generic df name. Skip
118
118
  "PERF203", # avoid try except in loop. Skip
119
119
  "PERF401", # use list comprehension. Skip
120
120
  "PERF402", # use list.copy. Skip
@@ -194,7 +194,6 @@ lint.ignore = [
194
194
  "RUF022", # __all__ is not sorted. skip due to isort complication
195
195
  "UP017", # use datetime.UTC, TODO add back in
196
196
  "UP035", # replacing List with list, TODO add back in
197
- "UP038", # isinstance X | Y instead of (X, Y), TODO add back in
198
197
  # ## FROM RUFF UPGRADE
199
198
  "PLC2701", # private name imports. should add
200
199
  "PLR1702", # too many nested blocks -- add with config. skip
@@ -213,6 +212,7 @@ exclude = [
213
212
 
214
213
  [tool.ruff.lint.isort]
215
214
  split-on-trailing-comma = true
215
+ known-first-party = ["pkgs"]
216
216
 
217
217
  [tool.ruff.lint.mccabe]
218
218
  max-complexity = 130 # goal would be to bring this down to ~50 or so
@@ -168,6 +168,23 @@ def _invoke_membership_parser(
168
168
  raise ValueError(f"Expected value from {expected_values} but got value {value}")
169
169
 
170
170
 
171
+ # Uses `is` to compare
172
+ def _build_identity_parser(
173
+ identity_value: T,
174
+ ) -> ParserFunction[T]:
175
+ def parse(value: typing.Any) -> T:
176
+ if value is identity_value:
177
+ return identity_value
178
+ raise ValueError(
179
+ f"Expected value {identity_value} (type: {type(identity_value)}) but got value {value} (type: {type(value)})"
180
+ )
181
+
182
+ return parse
183
+
184
+
185
+ NONE_IDENTITY_PARSER = _build_identity_parser(None)
186
+
187
+
171
188
  def _build_parser_discriminated_union(
172
189
  context: ParserContext,
173
190
  discriminator_raw: str,
@@ -243,8 +260,10 @@ def _build_parser_inner(
243
260
  for field_name, field_parser in field_parsers
244
261
  })
245
262
 
263
+ # IMPROVE: unclear why we need == here
246
264
  if parsed_type == type(None): # noqa: E721
247
- return lambda value: _invoke_membership_parser({None}, value)
265
+ # Need to convince type checker that parsed_type is type(None)
266
+ return typing.cast(ParserFunction[T], NONE_IDENTITY_PARSER)
248
267
 
249
268
  origin = typing.get_origin(parsed_type)
250
269
  if origin is tuple:
@@ -85,7 +85,9 @@ def _serialize_dataclass(d: Any) -> dict[str, JsonValue]:
85
85
 
86
86
 
87
87
  def _to_string_value(value: Any) -> str:
88
- assert isinstance(value, (Decimal, int))
88
+ assert isinstance(value, (Decimal, int)), (
89
+ f"Expecting decimal or int, received: {value} (type={type(value)})"
90
+ )
89
91
  return str(value)
90
92
 
91
93