UncountablePythonSDK 0.0.117__py3-none-any.whl → 0.0.118__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of UncountablePythonSDK might be problematic. Click here for more details.

Files changed (27) hide show
  1. docs/justfile +1 -1
  2. examples/integration-server/jobs/materials_auto/example_instrument.py +33 -1
  3. examples/integration-server/pyproject.toml +1 -1
  4. pkgs/argument_parser/__init__.py +1 -0
  5. pkgs/argument_parser/argument_parser.py +3 -3
  6. pkgs/type_spec/builder.py +2 -0
  7. pkgs/type_spec/emit_open_api.py +54 -8
  8. pkgs/type_spec/value_spec/emit_python.py +6 -1
  9. uncountable/integration/cli.py +78 -21
  10. uncountable/integration/queue_runner/command_server/command_client.py +26 -0
  11. uncountable/integration/queue_runner/command_server/command_server.py +42 -2
  12. uncountable/integration/queue_runner/command_server/constants.py +4 -0
  13. uncountable/integration/queue_runner/command_server/protocol/command_server.proto +18 -0
  14. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.py +20 -11
  15. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.pyi +50 -1
  16. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2_grpc.py +45 -0
  17. uncountable/integration/queue_runner/datastore/datastore_sqlite.py +28 -0
  18. uncountable/integration/queue_runner/datastore/interface.py +5 -0
  19. uncountable/integration/queue_runner/job_scheduler.py +3 -8
  20. uncountable/integration/queue_runner/queue_runner.py +10 -2
  21. uncountable/types/entity_t.py +1 -1
  22. uncountable/types/queued_job.py +1 -0
  23. uncountable/types/queued_job_t.py +11 -2
  24. {uncountablepythonsdk-0.0.117.dist-info → uncountablepythonsdk-0.0.118.dist-info}/METADATA +1 -1
  25. {uncountablepythonsdk-0.0.117.dist-info → uncountablepythonsdk-0.0.118.dist-info}/RECORD +27 -26
  26. {uncountablepythonsdk-0.0.117.dist-info → uncountablepythonsdk-0.0.118.dist-info}/WHEEL +0 -0
  27. {uncountablepythonsdk-0.0.117.dist-info → uncountablepythonsdk-0.0.118.dist-info}/top_level.txt +0 -0
docs/justfile CHANGED
@@ -1,5 +1,5 @@
1
1
  docs-setup-python:
2
- curl -LsSf https://astral.sh/uv/0.7.1/install.sh | sh
2
+ curl -LsSf https://astral.sh/uv/0.8.4/install.sh | sh
3
3
  uv pip install -r requirements.txt
4
4
 
5
5
  docs-clean:
@@ -1,7 +1,17 @@
1
+ import time
1
2
  from dataclasses import dataclass
3
+ from decimal import Decimal
2
4
 
3
5
  from uncountable.integration.job import JobArguments, WebhookJob, register_job
4
- from uncountable.types import base_t, entity_t, job_definition_t
6
+ from uncountable.types import (
7
+ base_t,
8
+ entity_t,
9
+ identifier_t,
10
+ integrations_t,
11
+ job_definition_t,
12
+ publish_realtime_data_t,
13
+ )
14
+ from uncountable.types.client_base import APIRequest
5
15
 
6
16
 
7
17
  @dataclass(kw_only=True)
@@ -31,6 +41,28 @@ class InstrumentExample(WebhookJob[InstrumentPayload]):
31
41
 
32
42
  args.logger.log_info(f"Instrument ID: {instrument_id}")
33
43
 
44
+ for i in range(10):
45
+ req_args = publish_realtime_data_t.Arguments(
46
+ data_package=integrations_t.DataPackageNumericReading(
47
+ value=Decimal(i * 15),
48
+ target_entity=entity_t.EntityIdentifier(
49
+ identifier_key=identifier_t.IdentifierKeyId(
50
+ id=payload.equipment_id
51
+ ),
52
+ type=entity_t.EntityType.EQUIPMENT,
53
+ ),
54
+ ),
55
+ )
56
+ api_request = APIRequest(
57
+ method=publish_realtime_data_t.ENDPOINT_METHOD,
58
+ endpoint=publish_realtime_data_t.ENDPOINT_PATH,
59
+ args=req_args,
60
+ )
61
+ args.client.do_request(
62
+ api_request=api_request, return_type=publish_realtime_data_t.Data
63
+ )
64
+ time.sleep(0.75)
65
+
34
66
  return job_definition_t.JobResult(success=True)
35
67
 
36
68
  @property
@@ -9,7 +9,7 @@ dependencies = [
9
9
  "ruff == 0.*",
10
10
  "openpyxl == 3.*",
11
11
  "more_itertools == 10.*",
12
- "types-paramiko ==3.5.0.20240918",
12
+ "types-paramiko ==3.5.0.20250708",
13
13
  "types-openpyxl == 3.*",
14
14
  "types-pysftp == 0.*",
15
15
  "types-pytz ==2025.*",
@@ -7,6 +7,7 @@ from .argument_parser import ParserFunction as ParserFunction
7
7
  from .argument_parser import ParserOptions as ParserOptions
8
8
  from .argument_parser import SourceEncoding as SourceEncoding
9
9
  from .argument_parser import build_parser as build_parser
10
+ from .argument_parser import is_missing as is_missing
10
11
  from .argument_parser import is_optional as is_optional
11
12
  from .argument_parser import is_union as is_union
12
13
  from .case_convert import camel_to_snake_case as camel_to_snake_case
@@ -223,7 +223,7 @@ def _build_parser_inner(
223
223
  )
224
224
 
225
225
  if dataclasses.is_dataclass(parsed_type):
226
- return _build_parser_dataclass(parsed_type, context) # type: ignore[arg-type]
226
+ return _build_parser_dataclass(parsed_type, context)
227
227
 
228
228
  # namedtuple support
229
229
  if is_namedtuple_type(parsed_type):
@@ -244,7 +244,7 @@ def _build_parser_inner(
244
244
  })
245
245
 
246
246
  if parsed_type == type(None): # noqa: E721
247
- return lambda value: _invoke_membership_parser({None}, value) # type: ignore
247
+ return lambda value: _invoke_membership_parser({None}, value)
248
248
 
249
249
  origin = typing.get_origin(parsed_type)
250
250
  if origin is tuple:
@@ -271,7 +271,7 @@ def _build_parser_inner(
271
271
  arg_parsers = [_build_parser_inner(arg, context) for arg in sorted_args]
272
272
  return lambda value: _invoke_fallback_parsers(parsed_type, arg_parsers, value)
273
273
 
274
- if parsed_type is typing.Any: # type: ignore[comparison-overlap]
274
+ if parsed_type is typing.Any:
275
275
  return lambda value: value
276
276
 
277
277
  if origin in (list, set):
pkgs/type_spec/builder.py CHANGED
@@ -745,6 +745,8 @@ class SpecTypeDefnStringEnum(SpecTypeDefn):
745
745
  builder.ensure(
746
746
  isinstance(enum_value, str), "enum value should be string"
747
747
  )
748
+ assert isinstance(enum_value, str)
749
+
748
750
  deprecated = value.get("deprecated", False)
749
751
  builder.ensure(
750
752
  isinstance(deprecated, bool),
@@ -279,7 +279,12 @@ def _emit_stability_level(
279
279
  case EmitOpenAPIStabilityLevel.draft:
280
280
  stability_info["x-beta"] = True
281
281
  case EmitOpenAPIStabilityLevel.beta:
282
- stability_info["x-badges"] = [{"name": "Beta", "color": "DarkOrange"}]
282
+ stability_info["x-badges"] = [
283
+ {
284
+ "name": "Beta",
285
+ "color": "DarkOrange",
286
+ }
287
+ ]
283
288
  case EmitOpenAPIStabilityLevel.stable:
284
289
  pass
285
290
  case _:
@@ -334,18 +339,57 @@ def _emit_endpoint_response_examples(
334
339
  return {"examples": response_examples}
335
340
 
336
341
 
342
+ def _create_warning_banner(api_type: str, message: str) -> str:
343
+ return (
344
+ f'<div style="background-color: #fff3cd; border: 1px solid #ffeaa7; '
345
+ f'border-radius: 4px; padding: 12px; margin-bottom: 16px;">'
346
+ f"<strong>⚠️ {api_type} API:</strong> {message}"
347
+ f"</div>"
348
+ )
349
+
350
+
351
+ def _get_stability_warning(
352
+ stability_level: EmitOpenAPIStabilityLevel | None,
353
+ ) -> str:
354
+ resolved_stability_level = (
355
+ stability_level
356
+ if stability_level is not None
357
+ else EmitOpenAPIStabilityLevel.stable
358
+ )
359
+
360
+ match resolved_stability_level:
361
+ case EmitOpenAPIStabilityLevel.draft:
362
+ return _create_warning_banner(
363
+ "Draft",
364
+ "This endpoint is in draft status and may change significantly. Not recommended for production use.",
365
+ )
366
+ case EmitOpenAPIStabilityLevel.beta:
367
+ return _create_warning_banner(
368
+ "Beta",
369
+ "This endpoint is in beta and its required parameters may change. Use with caution in production environments.",
370
+ )
371
+ case EmitOpenAPIStabilityLevel.stable:
372
+ return ""
373
+
374
+
337
375
  def _emit_endpoint_description(
338
- description: str, guides: list[EmitOpenAPIGuide]
376
+ description: str,
377
+ guides: list[EmitOpenAPIGuide],
378
+ stability_level: EmitOpenAPIStabilityLevel | None = None,
339
379
  ) -> dict[str, str]:
380
+ stability_warning = _get_stability_warning(stability_level)
381
+
340
382
  full_guides = "<br/>".join([
341
383
  _write_guide_as_html(guide, is_open=False)
342
384
  for guide in sorted(guides, key=lambda g: g.ref_name)
343
385
  ])
344
- return {
345
- "description": description
346
- if len(guides) == 0
347
- else f"{description}<br/>{full_guides}"
348
- }
386
+
387
+ full_description_parts = [
388
+ part for part in [stability_warning, description, full_guides] if part
389
+ ]
390
+ full_description = "<br/>".join(full_description_parts)
391
+
392
+ return {"description": full_description}
349
393
 
350
394
 
351
395
  def _emit_namespace(
@@ -381,7 +425,9 @@ def _emit_namespace(
381
425
  "summary": endpoint.summary,
382
426
  }
383
427
  | _emit_endpoint_deprecated(endpoint.deprecated)
384
- | _emit_endpoint_description(endpoint.description, ctx.endpoint.guides)
428
+ | _emit_endpoint_description(
429
+ endpoint.description, ctx.endpoint.guides, endpoint.stability_level
430
+ )
385
431
  | _emit_stability_level(endpoint.stability_level)
386
432
  | _emit_endpoint_parameters(endpoint, argument_type, ctx.endpoint.examples)
387
433
  | _emit_endpoint_request_body(
@@ -75,9 +75,14 @@ def _emit_function_wrapper(function: value_spec_t.Function) -> str:
75
75
  ):
76
76
  python_type += " | None"
77
77
  any_pass_null = True
78
+
79
+ if python_type.startswith("base_t.ExtJsonValue"):
80
+ return_statement = f"self._extract({index})"
81
+ else:
82
+ return_statement = f"cast({python_type}, self._extract({index}))"
78
83
  out.write(
79
84
  f"""{INDENT}def get_{argument.ref_name}(self) -> {python_type}:
80
- {INDENT}{INDENT}return cast({python_type}, self._extract({index}))
85
+ {INDENT}{INDENT}return {return_statement}
81
86
  """
82
87
  )
83
88
  out.write("\n")
@@ -1,49 +1,106 @@
1
1
  import argparse
2
2
 
3
+ from dateutil import tz
3
4
  from opentelemetry.trace import get_current_span
5
+ from tabulate import tabulate
4
6
 
5
7
  from uncountable.core.environment import get_local_admin_server_port
6
8
  from uncountable.integration.queue_runner.command_server.command_client import (
7
9
  send_job_queue_message,
10
+ send_list_queued_jobs_message,
8
11
  )
9
12
  from uncountable.integration.telemetry import Logger
10
13
  from uncountable.types import queued_job_t
11
14
 
12
15
 
16
+ def register_enqueue_job_parser(
17
+ sub_parser_manager: argparse._SubParsersAction,
18
+ ) -> None:
19
+ run_parser = sub_parser_manager.add_parser(
20
+ "run",
21
+ description="Process a job with a given command and job ID.",
22
+ )
23
+ run_parser.add_argument("job_id", type=str, help="The ID of the job to process")
24
+
25
+ def _handle_enqueue_job(args: argparse.Namespace) -> None:
26
+ send_job_queue_message(
27
+ job_ref_name=args.job_id,
28
+ payload=queued_job_t.QueuedJobPayload(
29
+ invocation_context=queued_job_t.InvocationContextManual()
30
+ ),
31
+ host=args.host,
32
+ port=get_local_admin_server_port(),
33
+ )
34
+
35
+ run_parser.set_defaults(func=_handle_enqueue_job)
36
+
37
+
38
+ def register_list_queued_jobs(
39
+ sub_parser_manager: argparse._SubParsersAction,
40
+ ) -> None:
41
+ list_queued_jobs_parser = sub_parser_manager.add_parser(
42
+ "list-queued-jobs", description="List jobs queued on the integration server."
43
+ )
44
+
45
+ list_queued_jobs_parser.add_argument(
46
+ "--offset",
47
+ type=int,
48
+ default=0,
49
+ help="Number of jobs to skip. Should be non-negative.",
50
+ )
51
+ list_queued_jobs_parser.add_argument(
52
+ "--limit",
53
+ type=int,
54
+ default=100,
55
+ help="A number between 1 and 100 specifying the number of jobs to return in the result set.",
56
+ )
57
+
58
+ def _handle_list_queued_jobs(args: argparse.Namespace) -> None:
59
+ queued_jobs = send_list_queued_jobs_message(
60
+ offset=args.offset,
61
+ limit=args.limit,
62
+ host=args.host,
63
+ port=get_local_admin_server_port(),
64
+ )
65
+
66
+ headers = ["UUID", "Job Ref Name", "Attempts", "Submitted At"]
67
+ rows = [
68
+ [
69
+ job.uuid,
70
+ job.job_ref_name,
71
+ job.num_attempts,
72
+ job.submitted_at.ToDatetime(tz.UTC).astimezone(tz.tzlocal()),
73
+ ]
74
+ for job in queued_jobs
75
+ ]
76
+ print(tabulate(rows, headers=headers, tablefmt="grid"))
77
+
78
+ list_queued_jobs_parser.set_defaults(func=_handle_list_queued_jobs)
79
+
80
+
13
81
  def main() -> None:
14
82
  logger = Logger(get_current_span())
15
83
 
16
84
  parser = argparse.ArgumentParser(
17
- description="Process a job with a given command and job ID."
85
+ description="Execute a given integrations server command."
18
86
  )
19
87
 
20
88
  parser.add_argument(
21
- "command",
22
- type=str,
23
- choices=["run"],
24
- help="The command to execute (e.g., 'run')",
89
+ "--host", type=str, default="localhost", nargs="?", help="The host to run on"
25
90
  )
26
91
 
27
- parser.add_argument("job_id", type=str, help="The ID of the job to process")
28
-
29
- parser.add_argument(
30
- "--host", type=str, default="localhost", nargs="?", help="The host to run on"
92
+ subparser_action = parser.add_subparsers(
93
+ dest="command",
94
+ required=True,
95
+ help="The command to execute (e.g., 'run')",
31
96
  )
32
97
 
33
- args = parser.parse_args()
98
+ register_enqueue_job_parser(subparser_action)
99
+ register_list_queued_jobs(subparser_action)
34
100
 
101
+ args = parser.parse_args()
35
102
  with logger.push_scope(args.command):
36
- if args.command == "run":
37
- send_job_queue_message(
38
- job_ref_name=args.job_id,
39
- payload=queued_job_t.QueuedJobPayload(
40
- invocation_context=queued_job_t.InvocationContextManual()
41
- ),
42
- host=args.host,
43
- port=get_local_admin_server_port(),
44
- )
45
- else:
46
- parser.print_usage()
103
+ args.func(args)
47
104
 
48
105
 
49
106
  main()
@@ -10,6 +10,8 @@ from uncountable.integration.queue_runner.command_server.protocol.command_server
10
10
  CheckHealthResult,
11
11
  EnqueueJobRequest,
12
12
  EnqueueJobResult,
13
+ ListQueuedJobsRequest,
14
+ ListQueuedJobsResult,
13
15
  )
14
16
  from uncountable.integration.queue_runner.command_server.types import (
15
17
  CommandServerBadResponse,
@@ -66,3 +68,27 @@ def check_health(*, host: str = _LOCAL_RPC_HOST, port: int) -> bool:
66
68
  assert isinstance(response, CheckHealthResult)
67
69
 
68
70
  return response.success
71
+
72
+
73
+ def send_list_queued_jobs_message(
74
+ *,
75
+ offset: int,
76
+ limit: int,
77
+ host: str = "localhost",
78
+ port: int,
79
+ ) -> list[ListQueuedJobsResult.ListQueuedJobsResultItem]:
80
+ with command_server_connection(host=host, port=port) as stub:
81
+ request = ListQueuedJobsRequest(
82
+ offset=offset,
83
+ limit=limit,
84
+ )
85
+
86
+ try:
87
+ response = stub.ListQueuedJobs(
88
+ request, timeout=_DEFAULT_MESSAGE_TIMEOUT_SECS
89
+ )
90
+ except grpc.RpcError as e:
91
+ raise ValueError(e.details()) # type: ignore
92
+
93
+ assert isinstance(response, ListQueuedJobsResult)
94
+ return list(response.queued_jobs)
@@ -1,7 +1,8 @@
1
1
  import asyncio
2
2
 
3
3
  import simplejson as json
4
- from grpc import aio
4
+ from google.protobuf.timestamp_pb2 import Timestamp
5
+ from grpc import StatusCode, aio
5
6
 
6
7
  from pkgs.argument_parser import CachedParser
7
8
  from uncountable.core.environment import get_local_admin_server_port
@@ -10,14 +11,18 @@ from uncountable.integration.queue_runner.command_server.protocol.command_server
10
11
  CheckHealthResult,
11
12
  EnqueueJobRequest,
12
13
  EnqueueJobResult,
14
+ ListQueuedJobsRequest,
15
+ ListQueuedJobsResult,
13
16
  )
14
17
  from uncountable.integration.queue_runner.command_server.types import (
15
18
  CommandEnqueueJob,
16
19
  CommandEnqueueJobResponse,
17
20
  CommandQueue,
18
21
  )
22
+ from uncountable.integration.queue_runner.datastore import DatastoreSqlite
19
23
  from uncountable.types import queued_job_t
20
24
 
25
+ from .constants import ListQueuedJobsConstants
21
26
  from .protocol.command_server_pb2_grpc import (
22
27
  CommandServerServicer,
23
28
  add_CommandServerServicer_to_server,
@@ -26,7 +31,7 @@ from .protocol.command_server_pb2_grpc import (
26
31
  queued_job_payload_parser = CachedParser(queued_job_t.QueuedJobPayload)
27
32
 
28
33
 
29
- async def serve(command_queue: CommandQueue) -> None:
34
+ async def serve(command_queue: CommandQueue, datastore: DatastoreSqlite) -> None:
30
35
  server = aio.server()
31
36
 
32
37
  class CommandServerHandler(CommandServerServicer):
@@ -54,6 +59,41 @@ async def serve(command_queue: CommandQueue) -> None:
54
59
  ) -> CheckHealthResult:
55
60
  return CheckHealthResult(success=True)
56
61
 
62
+ async def ListQueuedJobs(
63
+ self, request: ListQueuedJobsRequest, context: aio.ServicerContext
64
+ ) -> ListQueuedJobsResult:
65
+ if (
66
+ request.limit < ListQueuedJobsConstants.LIMIT_MIN
67
+ or request.limit > ListQueuedJobsConstants.LIMIT_MAX
68
+ ):
69
+ await context.abort(
70
+ StatusCode.INVALID_ARGUMENT, "Limit must be between 1 and 100."
71
+ )
72
+
73
+ if request.offset < ListQueuedJobsConstants.OFFSET_MIN:
74
+ await context.abort(
75
+ StatusCode.INVALID_ARGUMENT, "Offset cannot be negative."
76
+ )
77
+
78
+ queued_job_metadata = datastore.list_queued_job_metadata(
79
+ offset=request.offset, limit=request.limit
80
+ )
81
+
82
+ response_list: list[ListQueuedJobsResult.ListQueuedJobsResultItem] = []
83
+ for item in queued_job_metadata:
84
+ proto_timestamp = Timestamp()
85
+ proto_timestamp.FromDatetime(item.submitted_at)
86
+
87
+ response_list.append(
88
+ ListQueuedJobsResult.ListQueuedJobsResultItem(
89
+ uuid=item.queued_job_uuid,
90
+ job_ref_name=item.job_ref_name,
91
+ num_attempts=item.num_attempts,
92
+ submitted_at=proto_timestamp,
93
+ )
94
+ )
95
+ return ListQueuedJobsResult(queued_jobs=response_list)
96
+
57
97
  add_CommandServerServicer_to_server(CommandServerHandler(), server)
58
98
 
59
99
  listen_addr = f"[::]:{get_local_admin_server_port()}"
@@ -0,0 +1,4 @@
1
+ class ListQueuedJobsConstants:
2
+ LIMIT_MIN = 1
3
+ LIMIT_MAX = 100
4
+ OFFSET_MIN = 0
@@ -1,8 +1,10 @@
1
1
  syntax = "proto3";
2
+ import "google/protobuf/timestamp.proto";
2
3
 
3
4
  service CommandServer {
4
5
  rpc EnqueueJob(EnqueueJobRequest) returns (EnqueueJobResult) {}
5
6
  rpc CheckHealth(CheckHealthRequest) returns (CheckHealthResult) {}
7
+ rpc ListQueuedJobs(ListQueuedJobsRequest) returns (ListQueuedJobsResult) {}
6
8
  }
7
9
 
8
10
  message EnqueueJobRequest {
@@ -20,3 +22,19 @@ message CheckHealthRequest {}
20
22
  message CheckHealthResult {
21
23
  bool success = 1;
22
24
  }
25
+
26
+ message ListQueuedJobsRequest {
27
+ uint32 offset = 1;
28
+ uint32 limit = 2;
29
+ }
30
+
31
+ message ListQueuedJobsResult {
32
+ message ListQueuedJobsResultItem {
33
+ string uuid = 1;
34
+ string job_ref_name = 2;
35
+ int64 num_attempts = 3;
36
+ google.protobuf.Timestamp submitted_at = 4;
37
+ }
38
+
39
+ repeated ListQueuedJobsResultItem queued_jobs = 1;
40
+ }
@@ -14,8 +14,11 @@ from google.protobuf.internal import builder as _builder
14
14
  _sym_db = _symbol_database.Default()
15
15
 
16
16
 
17
+ from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
18
+
19
+
17
20
  DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
18
- b'\nQuncountable/integration/queue_runner/command_server/protocol/command_server.proto"E\n\x11\x45nqueueJobRequest\x12\x14\n\x0cjob_ref_name\x18\x01 \x01(\t\x12\x1a\n\x12serialized_payload\x18\x02 \x01(\t"H\n\x10\x45nqueueJobResult\x12\x1b\n\x13successfully_queued\x18\x01 \x01(\x08\x12\x17\n\x0fqueued_job_uuid\x18\x02 \x01(\t"\x14\n\x12\x43heckHealthRequest"$\n\x11\x43heckHealthResult\x12\x0f\n\x07success\x18\x01 \x01(\x08\x32\x80\x01\n\rCommandServer\x12\x35\n\nEnqueueJob\x12\x12.EnqueueJobRequest\x1a\x11.EnqueueJobResult"\x00\x12\x38\n\x0b\x43heckHealth\x12\x13.CheckHealthRequest\x1a\x12.CheckHealthResult"\x00\x62\x06proto3'
21
+ b'\nQuncountable/integration/queue_runner/command_server/protocol/command_server.proto\x1a\x1fgoogle/protobuf/timestamp.proto"E\n\x11\x45nqueueJobRequest\x12\x14\n\x0cjob_ref_name\x18\x01 \x01(\t\x12\x1a\n\x12serialized_payload\x18\x02 \x01(\t"H\n\x10\x45nqueueJobResult\x12\x1b\n\x13successfully_queued\x18\x01 \x01(\x08\x12\x17\n\x0fqueued_job_uuid\x18\x02 \x01(\t"\x14\n\x12\x43heckHealthRequest"$\n\x11\x43heckHealthResult\x12\x0f\n\x07success\x18\x01 \x01(\x08"6\n\x15ListQueuedJobsRequest\x12\x0e\n\x06offset\x18\x01 \x01(\r\x12\r\n\x05limit\x18\x02 \x01(\r"\xe4\x01\n\x14ListQueuedJobsResult\x12\x43\n\x0bqueued_jobs\x18\x01 \x03(\x0b\x32..ListQueuedJobsResult.ListQueuedJobsResultItem\x1a\x86\x01\n\x18ListQueuedJobsResultItem\x12\x0c\n\x04uuid\x18\x01 \x01(\t\x12\x14\n\x0cjob_ref_name\x18\x02 \x01(\t\x12\x14\n\x0cnum_attempts\x18\x03 \x01(\x03\x12\x30\n\x0csubmitted_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp2\xc3\x01\n\rCommandServer\x12\x35\n\nEnqueueJob\x12\x12.EnqueueJobRequest\x1a\x11.EnqueueJobResult"\x00\x12\x38\n\x0b\x43heckHealth\x12\x13.CheckHealthRequest\x1a\x12.CheckHealthResult"\x00\x12\x41\n\x0eListQueuedJobs\x12\x16.ListQueuedJobsRequest\x1a\x15.ListQueuedJobsResult"\x00\x62\x06proto3'
19
22
  )
20
23
 
21
24
  _globals = globals()
@@ -27,14 +30,20 @@ _builder.BuildTopDescriptorsAndMessages(
27
30
  )
28
31
  if _descriptor._USE_C_DESCRIPTORS == False:
29
32
  DESCRIPTOR._options = None
30
- _globals["_ENQUEUEJOBREQUEST"]._serialized_start = 85
31
- _globals["_ENQUEUEJOBREQUEST"]._serialized_end = 154
32
- _globals["_ENQUEUEJOBRESULT"]._serialized_start = 156
33
- _globals["_ENQUEUEJOBRESULT"]._serialized_end = 228
34
- _globals["_CHECKHEALTHREQUEST"]._serialized_start = 230
35
- _globals["_CHECKHEALTHREQUEST"]._serialized_end = 250
36
- _globals["_CHECKHEALTHRESULT"]._serialized_start = 252
37
- _globals["_CHECKHEALTHRESULT"]._serialized_end = 288
38
- _globals["_COMMANDSERVER"]._serialized_start = 291
39
- _globals["_COMMANDSERVER"]._serialized_end = 419
33
+ _globals["_ENQUEUEJOBREQUEST"]._serialized_start = 118
34
+ _globals["_ENQUEUEJOBREQUEST"]._serialized_end = 187
35
+ _globals["_ENQUEUEJOBRESULT"]._serialized_start = 189
36
+ _globals["_ENQUEUEJOBRESULT"]._serialized_end = 261
37
+ _globals["_CHECKHEALTHREQUEST"]._serialized_start = 263
38
+ _globals["_CHECKHEALTHREQUEST"]._serialized_end = 283
39
+ _globals["_CHECKHEALTHRESULT"]._serialized_start = 285
40
+ _globals["_CHECKHEALTHRESULT"]._serialized_end = 321
41
+ _globals["_LISTQUEUEDJOBSREQUEST"]._serialized_start = 323
42
+ _globals["_LISTQUEUEDJOBSREQUEST"]._serialized_end = 377
43
+ _globals["_LISTQUEUEDJOBSRESULT"]._serialized_start = 380
44
+ _globals["_LISTQUEUEDJOBSRESULT"]._serialized_end = 608
45
+ _globals["_LISTQUEUEDJOBSRESULT_LISTQUEUEDJOBSRESULTITEM"]._serialized_start = 474
46
+ _globals["_LISTQUEUEDJOBSRESULT_LISTQUEUEDJOBSRESULTITEM"]._serialized_end = 608
47
+ _globals["_COMMANDSERVER"]._serialized_start = 611
48
+ _globals["_COMMANDSERVER"]._serialized_end = 806
40
49
  # @@protoc_insertion_point(module_scope)
@@ -1,7 +1,15 @@
1
1
  # ruff: noqa
2
+ from google.protobuf import timestamp_pb2 as _timestamp_pb2
3
+ from google.protobuf.internal import containers as _containers
2
4
  from google.protobuf import descriptor as _descriptor
3
5
  from google.protobuf import message as _message
4
- from typing import ClassVar as _ClassVar, Optional as _Optional
6
+ from typing import (
7
+ ClassVar as _ClassVar,
8
+ Iterable as _Iterable,
9
+ Mapping as _Mapping,
10
+ Optional as _Optional,
11
+ Union as _Union,
12
+ )
5
13
 
6
14
  DESCRIPTOR: _descriptor.FileDescriptor
7
15
 
@@ -36,3 +44,44 @@ class CheckHealthResult(_message.Message):
36
44
  SUCCESS_FIELD_NUMBER: _ClassVar[int]
37
45
  success: bool
38
46
  def __init__(self, success: bool = ...) -> None: ...
47
+
48
+ class ListQueuedJobsRequest(_message.Message):
49
+ __slots__ = ("offset", "limit")
50
+ OFFSET_FIELD_NUMBER: _ClassVar[int]
51
+ LIMIT_FIELD_NUMBER: _ClassVar[int]
52
+ offset: int
53
+ limit: int
54
+ def __init__(
55
+ self, offset: _Optional[int] = ..., limit: _Optional[int] = ...
56
+ ) -> None: ...
57
+
58
+ class ListQueuedJobsResult(_message.Message):
59
+ __slots__ = ("queued_jobs",)
60
+ class ListQueuedJobsResultItem(_message.Message):
61
+ __slots__ = ("uuid", "job_ref_name", "num_attempts", "submitted_at")
62
+ UUID_FIELD_NUMBER: _ClassVar[int]
63
+ JOB_REF_NAME_FIELD_NUMBER: _ClassVar[int]
64
+ NUM_ATTEMPTS_FIELD_NUMBER: _ClassVar[int]
65
+ SUBMITTED_AT_FIELD_NUMBER: _ClassVar[int]
66
+ uuid: str
67
+ job_ref_name: str
68
+ num_attempts: int
69
+ submitted_at: _timestamp_pb2.Timestamp
70
+ def __init__(
71
+ self,
72
+ uuid: _Optional[str] = ...,
73
+ job_ref_name: _Optional[str] = ...,
74
+ num_attempts: _Optional[int] = ...,
75
+ submitted_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ...,
76
+ ) -> None: ...
77
+
78
+ QUEUED_JOBS_FIELD_NUMBER: _ClassVar[int]
79
+ queued_jobs: _containers.RepeatedCompositeFieldContainer[
80
+ ListQueuedJobsResult.ListQueuedJobsResultItem
81
+ ]
82
+ def __init__(
83
+ self,
84
+ queued_jobs: _Optional[
85
+ _Iterable[_Union[ListQueuedJobsResult.ListQueuedJobsResultItem, _Mapping]]
86
+ ] = ...,
87
+ ) -> None: ...
@@ -29,6 +29,11 @@ class CommandServerStub(object):
29
29
  request_serializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.CheckHealthRequest.SerializeToString,
30
30
  response_deserializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.CheckHealthResult.FromString,
31
31
  )
32
+ self.ListQueuedJobs = channel.unary_unary(
33
+ "/CommandServer/ListQueuedJobs",
34
+ request_serializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.ListQueuedJobsRequest.SerializeToString,
35
+ response_deserializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.ListQueuedJobsResult.FromString,
36
+ )
32
37
 
33
38
 
34
39
  class CommandServerServicer(object):
@@ -46,6 +51,12 @@ class CommandServerServicer(object):
46
51
  context.set_details("Method not implemented!")
47
52
  raise NotImplementedError("Method not implemented!")
48
53
 
54
+ def ListQueuedJobs(self, request, context):
55
+ """Missing associated documentation comment in .proto file."""
56
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
57
+ context.set_details("Method not implemented!")
58
+ raise NotImplementedError("Method not implemented!")
59
+
49
60
 
50
61
  def add_CommandServerServicer_to_server(servicer, server):
51
62
  rpc_method_handlers = {
@@ -59,6 +70,11 @@ def add_CommandServerServicer_to_server(servicer, server):
59
70
  request_deserializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.CheckHealthRequest.FromString,
60
71
  response_serializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.CheckHealthResult.SerializeToString,
61
72
  ),
73
+ "ListQueuedJobs": grpc.unary_unary_rpc_method_handler(
74
+ servicer.ListQueuedJobs,
75
+ request_deserializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.ListQueuedJobsRequest.FromString,
76
+ response_serializer=uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.ListQueuedJobsResult.SerializeToString,
77
+ ),
62
78
  }
63
79
  generic_handler = grpc.method_handlers_generic_handler(
64
80
  "CommandServer", rpc_method_handlers
@@ -127,3 +143,32 @@ class CommandServer(object):
127
143
  timeout,
128
144
  metadata,
129
145
  )
146
+
147
+ @staticmethod
148
+ def ListQueuedJobs(
149
+ request,
150
+ target,
151
+ options=(),
152
+ channel_credentials=None,
153
+ call_credentials=None,
154
+ insecure=False,
155
+ compression=None,
156
+ wait_for_ready=None,
157
+ timeout=None,
158
+ metadata=None,
159
+ ):
160
+ return grpc.experimental.unary_unary(
161
+ request,
162
+ target,
163
+ "/CommandServer/ListQueuedJobs",
164
+ uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.ListQueuedJobsRequest.SerializeToString,
165
+ uncountable_dot_integration_dot_queue__runner_dot_command__server_dot_protocol_dot_command__server__pb2.ListQueuedJobsResult.FromString,
166
+ options,
167
+ channel_credentials,
168
+ insecure,
169
+ call_credentials,
170
+ compression,
171
+ wait_for_ready,
172
+ timeout,
173
+ metadata,
174
+ )
@@ -68,6 +68,34 @@ class DatastoreSqlite(Datastore):
68
68
  delete_stmt = delete(QueuedJob).filter(QueuedJob.id == queued_job_uuid)
69
69
  session.execute(delete_stmt)
70
70
 
71
+ def list_queued_job_metadata(
72
+ self, offset: int = 0, limit: int | None = 100
73
+ ) -> list[queued_job_t.QueuedJobMetadata]:
74
+ with self.session_maker() as session:
75
+ select_statement = (
76
+ select(
77
+ QueuedJob.id,
78
+ QueuedJob.job_ref_name,
79
+ QueuedJob.num_attempts,
80
+ QueuedJob.submitted_at,
81
+ )
82
+ .order_by(QueuedJob.submitted_at)
83
+ .offset(offset)
84
+ .limit(limit)
85
+ )
86
+
87
+ queued_job_metadata: list[queued_job_t.QueuedJobMetadata] = [
88
+ queued_job_t.QueuedJobMetadata(
89
+ queued_job_uuid=row.id,
90
+ job_ref_name=row.job_ref_name,
91
+ num_attempts=row.num_attempts,
92
+ submitted_at=row.submitted_at,
93
+ )
94
+ for row in session.execute(select_statement)
95
+ ]
96
+
97
+ return queued_job_metadata
98
+
71
99
  def load_job_queue(self) -> list[queued_job_t.QueuedJob]:
72
100
  with self.session_maker() as session:
73
101
  select_stmt = select(
@@ -17,3 +17,8 @@ class Datastore(ABC):
17
17
 
18
18
  @abstractmethod
19
19
  def load_job_queue(self) -> list[queued_job_t.QueuedJob]: ...
20
+
21
+ @abstractmethod
22
+ def list_queued_job_metadata(
23
+ self, offset: int, limit: int | None
24
+ ) -> list[queued_job_t.QueuedJobMetadata]: ...
@@ -6,8 +6,6 @@ from dataclasses import dataclass
6
6
 
7
7
  from opentelemetry.trace import get_current_span
8
8
 
9
- from uncountable.integration.db.connect import IntegrationDBService, create_db_engine
10
- from uncountable.integration.db.session import get_session_maker
11
9
  from uncountable.integration.queue_runner.command_server import (
12
10
  CommandEnqueueJob,
13
11
  CommandEnqueueJobResponse,
@@ -83,14 +81,11 @@ def _start_workers(
83
81
  return job_worker_lookup
84
82
 
85
83
 
86
- async def start_scheduler(command_queue: CommandQueue) -> None:
84
+ async def start_scheduler(
85
+ command_queue: CommandQueue, datastore: DatastoreSqlite
86
+ ) -> None:
87
87
  logger = Logger(get_current_span())
88
88
  result_queue: ResultQueue = asyncio.Queue()
89
- engine = create_db_engine(IntegrationDBService.RUNNER)
90
- session_maker = get_session_maker(engine)
91
-
92
- datastore = DatastoreSqlite(session_maker)
93
- datastore.setup(engine)
94
89
 
95
90
  with ProcessPoolExecutor(max_workers=_MAX_JOB_WORKERS) as process_pool:
96
91
  job_worker_lookup = _start_workers(
@@ -1,16 +1,24 @@
1
1
  import asyncio
2
2
 
3
+ from uncountable.integration.db.connect import IntegrationDBService, create_db_engine
4
+ from uncountable.integration.db.session import get_session_maker
3
5
  from uncountable.integration.queue_runner.command_server import serve
4
6
  from uncountable.integration.queue_runner.command_server.types import CommandQueue
7
+ from uncountable.integration.queue_runner.datastore import DatastoreSqlite
5
8
  from uncountable.integration.queue_runner.job_scheduler import start_scheduler
6
9
 
7
10
 
8
11
  async def queue_runner_loop() -> None:
9
12
  command_queue: CommandQueue = asyncio.Queue()
13
+ engine = create_db_engine(IntegrationDBService.RUNNER)
14
+ session_maker = get_session_maker(engine)
10
15
 
11
- command_server = asyncio.create_task(serve(command_queue))
16
+ datastore = DatastoreSqlite(session_maker)
17
+ datastore.setup(engine)
12
18
 
13
- scheduler = asyncio.create_task(start_scheduler(command_queue))
19
+ command_server = asyncio.create_task(serve(command_queue, datastore))
20
+
21
+ scheduler = asyncio.create_task(start_scheduler(command_queue, datastore))
14
22
 
15
23
  await scheduler
16
24
  await command_server
@@ -398,7 +398,7 @@ class EntityType(StrEnum):
398
398
 
399
399
  # DO NOT MODIFY -- This file is generated by type_spec
400
400
  LimitedEntityType = typing.Annotated[
401
- typing.Literal[EntityType.LAB_REQUEST] | typing.Literal[EntityType.APPROVAL] | typing.Literal[EntityType.CUSTOM_ENTITY] | typing.Literal[EntityType.INGREDIENT_ATTRIBUTE] | typing.Literal[EntityType.INVENTORY_AMOUNT] | typing.Literal[EntityType.TASK] | typing.Literal[EntityType.PROJECT] | typing.Literal[EntityType.EQUIPMENT] | typing.Literal[EntityType.INV_LOCAL_LOCATIONS] | typing.Literal[EntityType.FIELD_OPTION_SET] | typing.Literal[EntityType.WEBHOOK] | typing.Literal[EntityType.SPECS] | typing.Literal[EntityType.GOAL] | typing.Literal[EntityType.INGREDIENT_TAG_MAP] | typing.Literal[EntityType.INGREDIENT_TAG] | typing.Literal[EntityType.CONDITION_PARAMETER] | typing.Literal[EntityType.OUTPUT] | typing.Literal[EntityType.OUTPUT_CONDITION_PARAMETER] | typing.Literal[EntityType.ASYNC_JOB] | typing.Literal[EntityType.CONSTRAINT] | typing.Literal[EntityType.INGREDIENT_CATEGORY_ALL] | typing.Literal[EntityType.TIME_SERIES_SEGMENT] | typing.Literal[EntityType.EQUIPMENT_MAINTENANCE] | typing.Literal[EntityType.MAINTENANCE_SCHEDULE] | typing.Literal[EntityType.CONDITION_PARAMETER_RULE] | typing.Literal[EntityType.INGREDIENT] | typing.Literal[EntityType.TIMESHEET_ENTRY],
401
+ typing.Literal[EntityType.LAB_REQUEST] | typing.Literal[EntityType.APPROVAL] | typing.Literal[EntityType.CUSTOM_ENTITY] | typing.Literal[EntityType.INGREDIENT_ATTRIBUTE] | typing.Literal[EntityType.INVENTORY_AMOUNT] | typing.Literal[EntityType.TASK] | typing.Literal[EntityType.PROJECT] | typing.Literal[EntityType.EQUIPMENT] | typing.Literal[EntityType.INV_LOCAL_LOCATIONS] | typing.Literal[EntityType.FIELD_OPTION_SET] | typing.Literal[EntityType.WEBHOOK] | typing.Literal[EntityType.SPECS] | typing.Literal[EntityType.GOAL] | typing.Literal[EntityType.INGREDIENT_TAG_MAP] | typing.Literal[EntityType.INGREDIENT_TAG] | typing.Literal[EntityType.CONDITION_PARAMETER] | typing.Literal[EntityType.OUTPUT] | typing.Literal[EntityType.OUTPUT_CONDITION_PARAMETER] | typing.Literal[EntityType.ASYNC_JOB] | typing.Literal[EntityType.CONSTRAINT] | typing.Literal[EntityType.INGREDIENT_CATEGORY_ALL] | typing.Literal[EntityType.TIME_SERIES_SEGMENT] | typing.Literal[EntityType.EQUIPMENT_MAINTENANCE] | typing.Literal[EntityType.MAINTENANCE_SCHEDULE] | typing.Literal[EntityType.CONDITION_PARAMETER_RULE] | typing.Literal[EntityType.INGREDIENT] | typing.Literal[EntityType.TIMESHEET_ENTRY] | typing.Literal[EntityType.SAVE] | typing.Literal[EntityType.RECIPE_CHECK],
402
402
  serial_alias_annotation(
403
403
  named_type_path="sdk.entity.LimitedEntityType",
404
404
  ),
@@ -11,5 +11,6 @@ from .queued_job_t import InvocationContextWebhook as InvocationContextWebhook
11
11
  from .queued_job_t import InvocationContext as InvocationContext
12
12
  from .queued_job_t import QueuedJobPayload as QueuedJobPayload
13
13
  from .queued_job_t import QueuedJobResult as QueuedJobResult
14
+ from .queued_job_t import QueuedJobMetadata as QueuedJobMetadata
14
15
  from .queued_job_t import QueuedJob as QueuedJob
15
16
  # DO NOT MODIFY -- This file is generated by type_spec
@@ -21,6 +21,7 @@ __all__: list[str] = [
21
21
  "InvocationContextType",
22
22
  "InvocationContextWebhook",
23
23
  "QueuedJob",
24
+ "QueuedJobMetadata",
24
25
  "QueuedJobPayload",
25
26
  "QueuedJobResult",
26
27
  ]
@@ -110,13 +111,21 @@ class QueuedJobResult:
110
111
 
111
112
  # DO NOT MODIFY -- This file is generated by type_spec
112
113
  @serial_class(
113
- named_type_path="sdk.queued_job.QueuedJob",
114
+ named_type_path="sdk.queued_job.QueuedJobMetadata",
114
115
  )
115
116
  @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
116
- class QueuedJob:
117
+ class QueuedJobMetadata:
117
118
  queued_job_uuid: str
118
119
  job_ref_name: str
119
120
  num_attempts: int
120
121
  submitted_at: datetime.datetime
122
+
123
+
124
+ # DO NOT MODIFY -- This file is generated by type_spec
125
+ @serial_class(
126
+ named_type_path="sdk.queued_job.QueuedJob",
127
+ )
128
+ @dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
129
+ class QueuedJob(QueuedJobMetadata):
121
130
  payload: QueuedJobPayload
122
131
  # DO NOT MODIFY -- This file is generated by type_spec
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: UncountablePythonSDK
3
- Version: 0.0.117
3
+ Version: 0.0.118
4
4
  Summary: Uncountable SDK
5
5
  Project-URL: Homepage, https://github.com/uncountableinc/uncountable-python-sdk
6
6
  Project-URL: Repository, https://github.com/uncountableinc/uncountable-python-sdk.git
@@ -1,7 +1,7 @@
1
1
  docs/.gitignore,sha256=_ebkZUcwfvfnGEJ95rfj1lxoBNd6EE9ZvtOc7FsbfFE,7
2
2
  docs/conf.py,sha256=B3WBkqPxlf3xYHXCy91599SJ75G2eGrDs-K_RbsxT5k,1725
3
3
  docs/index.md,sha256=eEdirX_Ds6ICTRtIS5iT4irCquHcQyKN7E4M5QP9T8A,257
4
- docs/justfile,sha256=4MY8aCduyJvy33syeYr0omReSw9o3lSOuZ8QvOv-4a4,273
4
+ docs/justfile,sha256=WymCEQ6W2A8Ak79iUPmecmuaUNN2htb7STUrz5K7ELE,273
5
5
  docs/quickstart.md,sha256=3GuJ0MB1O5kjlsrgAmdSkDq0rYqATrYy-tzEHDy8H-c,422
6
6
  docs/requirements.txt,sha256=IBoo8nKwyuZXoaSX7XOYRJvfT6VjwJPXz49eZvcZGuY,153
7
7
  docs/static/logo_blue.png,sha256=SyYpMTVhhBbhF5Wl8lWaVwz-_p1MIR6dW6bVhufQRME,46708
@@ -23,20 +23,20 @@ examples/invoke_uploader.py,sha256=rEvmVY5TjigN_-4PTQdkjY-bC5DrYMcJgquyZ4Tt5FM,7
23
23
  examples/set_recipe_metadata_file.py,sha256=cRVXGz4UN4aqnNrNSzyBmikYHpe63lMIuzOpMwD9EDU,1036
24
24
  examples/set_recipe_output_file_sdk.py,sha256=Lz1amqppnWTX83z-C090wCJ4hcKmCD3kb-4v0uBRi0Y,782
25
25
  examples/upload_files.py,sha256=qMaSvMSdTMPOOP55y1AwEurc0SOdZAMvEydlqJPsGpg,432
26
- examples/integration-server/pyproject.toml,sha256=i4Px7I__asDvP4WlAd2PncfRRQ-U4t5xp0tqT9YYs3s,9149
26
+ examples/integration-server/pyproject.toml,sha256=-ZZ1R3B-Pf-F6gQX0-Me6u3G9cVW2B2_eechemCe7_4,9149
27
27
  examples/integration-server/jobs/materials_auto/concurrent_cron.py,sha256=xsK3H9ZEaniedC2nJUB0rqOcFI8y-ojfl_nLSJb9AMM,312
28
28
  examples/integration-server/jobs/materials_auto/example_cron.py,sha256=7VVQ-UJsq3DbGpN3XPnorRVZYo-vCwbfSU3VVDluIzA,699
29
29
  examples/integration-server/jobs/materials_auto/example_http.py,sha256=eVq-Fss_AhmztxOMqqO-GYGF3KvPt1O5HbNwwC2arh8,1037
30
- examples/integration-server/jobs/materials_auto/example_instrument.py,sha256=6qq6a8S5soKC-ypVswZEhkLzB_mxd4dxCSIoXKrbGDs,1261
30
+ examples/integration-server/jobs/materials_auto/example_instrument.py,sha256=czJF3qBFay1S8fuESOvmkvBv1wCtZGAlHjwvCyYr-Mw,2336
31
31
  examples/integration-server/jobs/materials_auto/example_runsheet_wh.py,sha256=_wILTnbzzLf9zrcQb_KQKytxxcya1ej6MqQnoUSS4fA,1180
32
32
  examples/integration-server/jobs/materials_auto/example_wh.py,sha256=PN-skP27yJwDZboWk5g5EZEc3AKfVayQLfnopjsDKJc,659
33
33
  examples/integration-server/jobs/materials_auto/profile.yaml,sha256=btUdn8hStM6Zp4zr0kBI2lL461MqDdzfSuF4LvuSQ8k,1926
34
34
  pkgs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
35
35
  pkgs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
36
- pkgs/argument_parser/__init__.py,sha256=VWUOOtJ-ueRF2lkIJzgQe4xhBKR9IPkgf9vY28nF35s,870
36
+ pkgs/argument_parser/__init__.py,sha256=EG3pwLEHTp-Qltd3lRnO4K22RiVrasePzKPDOfTPxFY,924
37
37
  pkgs/argument_parser/_is_enum.py,sha256=Gw6jJa8nBwYGqXwwCZbSnWL8Rvr5alkg5lSVAqXtOZM,257
38
38
  pkgs/argument_parser/_is_namedtuple.py,sha256=Rjc1bKanIPPogl3qG5JPBxglG1TqWYOo1nxxhBASQWY,265
39
- pkgs/argument_parser/argument_parser.py,sha256=Hlyb3-FXy8PFWtTIzkyatreJ9P0GSDaEEB1ZyW1pS0E,21155
39
+ pkgs/argument_parser/argument_parser.py,sha256=AjmLCNHcGMyXLojSpuKSYvIYE3u8tbg8rjv-yrhosQs,21077
40
40
  pkgs/argument_parser/case_convert.py,sha256=NuJLJUJRbyVb6_Slen4uqaStEHbcOS1d-hBBfDrrw-c,605
41
41
  pkgs/filesystem_utils/__init__.py,sha256=2a0d2rEPlEEYwhm3Wckny4VCp4ZS7JtYSXmwdwNCRjo,1332
42
42
  pkgs/filesystem_utils/_blob_session.py,sha256=4GicmwgGHVcqO8pOTu-EJakKMb1-IsxT9QnVi2D0oKU,5143
@@ -64,11 +64,11 @@ pkgs/strenum_compat/__init__.py,sha256=wXRFeNvBm8RU6dy1PFJ5sRLgUIEeH_DVR95Sv5qpG
64
64
  pkgs/strenum_compat/strenum_compat.py,sha256=uOUAgpYTjHs1MX8dG81jRlyTkt3KNbkV_25zp7xTX2s,36
65
65
  pkgs/type_spec/__init__.py,sha256=h5DmJTca4QVV10sZR1x0-MlkZfuGYDfapR3zHvXfzto,19
66
66
  pkgs/type_spec/__main__.py,sha256=5bJaX9Y_-FavP0qwzhk-z-V97UY7uaezJTa1zhO_HHQ,1048
67
- pkgs/type_spec/builder.py,sha256=17_hkJI5jDb_IGOHZCiZDJ8_er-amVH0-WRpqJb7hEE,54365
67
+ pkgs/type_spec/builder.py,sha256=upgRImVIikI7osMczX-Yiv-IBFo2Cl5NO4RIpj0NTvA,54421
68
68
  pkgs/type_spec/config.py,sha256=m0Rky7Rg2jMglDPQChF30p5h5P86Ap1GObwzLzmypNE,5829
69
69
  pkgs/type_spec/cross_output_links.py,sha256=ttFNfuQmR3sNnPSeUER5IPgLiYc-FB5gjlf7RyFYMpc,3293
70
70
  pkgs/type_spec/emit_io_ts.py,sha256=CUvBs0boB_X-Kndh66yYcqFfq3oC_LGs8YffLkJ0ZXA,5707
71
- pkgs/type_spec/emit_open_api.py,sha256=-2mNpucrEbWNXGZ2xAlqfouPPDP_yOkZVC_ycWpC8UE,26922
71
+ pkgs/type_spec/emit_open_api.py,sha256=pvb41mJhHPATOJ6473q2-WGExyz7w-Q_msEiJlDA6TQ,28453
72
72
  pkgs/type_spec/emit_open_api_util.py,sha256=bTmRvrGP82-eB75hwf9ySI7pDEC87FNQTF18VKEWSXY,2367
73
73
  pkgs/type_spec/emit_python.py,sha256=aURsc-wWdamVDCrIWxA7s8_MLAMjLdXZor6ykkibzXY,52707
74
74
  pkgs/type_spec/emit_typescript.py,sha256=FINir79bz4tJYgJuUylNJFvqChzaFlHNCfZ5D7A6B1I,11447
@@ -90,7 +90,7 @@ pkgs/type_spec/ui_entry_actions/generate_ui_entry_actions.py,sha256=65qUEp9zVcAs
90
90
  pkgs/type_spec/value_spec/__init__.py,sha256=Z-grlcZtxAfEXhPHsK0nD7PFLGsv4eqvunaPN7_TA84,83
91
91
  pkgs/type_spec/value_spec/__main__.py,sha256=oM5lcV6Hv_03okjtfWn2fzSHsarFVa9ArU_g02XnQJw,8879
92
92
  pkgs/type_spec/value_spec/convert_type.py,sha256=OvP7dwUMHXNHVXWYT4jkaYJ96S3a2SnFuC_iMdYVB7s,2927
93
- pkgs/type_spec/value_spec/emit_python.py,sha256=YQCkc9nHYKkFbdqLOW3YT39wciunE58yDuzdXn2rW5Q,7214
93
+ pkgs/type_spec/value_spec/emit_python.py,sha256=OmSvhR5RO_h2wJOVtulkv_Jr0OUJtZ28TfjLuYl2VY8,7413
94
94
  pkgs/type_spec/value_spec/types.py,sha256=Yc3LaKHN1G6wbgrBv0dpu5vijUXtS2GcDTusYPnDvK0,454
95
95
  uncountable/__init__.py,sha256=8l8XWNCKsu7TG94c-xa2KHpDegvxDC2FyQISdWC763Y,89
96
96
  uncountable/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -101,7 +101,7 @@ uncountable/core/environment.py,sha256=4gdJB0ZhRxKlqSKLaE4vUvEUGZ5fy8IAwXcGDRdYt
101
101
  uncountable/core/file_upload.py,sha256=bgvXk9vfF5qlhy2NAUcEEG7Q7i-c1wr2HrpaWD7HldU,4516
102
102
  uncountable/core/types.py,sha256=s2CjqYJpsmbC7xMwxxT7kJ_V9bwokrjjWVVjpMcQpKI,333
103
103
  uncountable/integration/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
104
- uncountable/integration/cli.py,sha256=h3RE0l1SdjkveOKeY2amlmrJppK4HEQJXk8VG9UJRWg,1359
104
+ uncountable/integration/cli.py,sha256=DxqnwKYDNzgWwJaCpta8nPGfX37wLLtUGAYsNAGxWOU,3188
105
105
  uncountable/integration/construct_client.py,sha256=I53mGcdS88hba3HFwgXmWQaTd1d5u0jWNSwyc_vlVsQ,1937
106
106
  uncountable/integration/cron.py,sha256=6eH-kIs3sdYPCyb62_L2M7U_uQTdMTdwY5hreEJb0hw,887
107
107
  uncountable/integration/entrypoint.py,sha256=BHOYPQgKvZE6HG8Rv15MkdYl8lRkvfDgv1OdLo0oQ9Q,433
@@ -120,22 +120,23 @@ uncountable/integration/executors/script_executor.py,sha256=BBQ9f0l7uH2hgKf60jtm
120
120
  uncountable/integration/http_server/__init__.py,sha256=WY2HMcL0UCAGYv8y6Pz-j0azbDGXwubFF21EH_zNPkc,189
121
121
  uncountable/integration/http_server/types.py,sha256=zVXXN8FPstrF9qFduwQBtxPG8I4AOK41nXAnxrtSgxw,1832
122
122
  uncountable/integration/queue_runner/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
123
- uncountable/integration/queue_runner/job_scheduler.py,sha256=lLP3R8RVE_4CJ9D-AsJSsZVciKCISsvgUMRs4tIZZpE,6557
124
- uncountable/integration/queue_runner/queue_runner.py,sha256=0BmYu5zHdothTevGsB-nXg6MBd1UD-WkP3h1WCKMdQg,710
123
+ uncountable/integration/queue_runner/job_scheduler.py,sha256=3HE9inu3scwRtxh4wrProKR0P7ghjxlXlo57b706rC4,6259
124
+ uncountable/integration/queue_runner/queue_runner.py,sha256=N4sUXmlGzVquybiJ7NQZavCJOBGrxBj6k7mb-TITaN0,1139
125
125
  uncountable/integration/queue_runner/types.py,sha256=8qTq29BTSa5rmW6CBlBntP0pNIiDcwu1wHa78pjroS0,219
126
126
  uncountable/integration/queue_runner/worker.py,sha256=WwJmwHkgovfiqrMeNJVtIyDYJAib5ajog5ag2l_AquI,4584
127
127
  uncountable/integration/queue_runner/command_server/__init__.py,sha256=gQPVILGpWzCr2i5GJyoqna7AOSFvtn4tav69gB78mTQ,571
128
- uncountable/integration/queue_runner/command_server/command_client.py,sha256=DJb0TUVFkiiLBEQzHSN94sTRnuEbutNEgdN39XmnOXI,2046
129
- uncountable/integration/queue_runner/command_server/command_server.py,sha256=yyXryhiEC2eGS0yFElLGsVzSKwOuYvj-zp22jQorkv0,2138
128
+ uncountable/integration/queue_runner/command_server/command_client.py,sha256=Kjov_fQdx-_jJpDeWxQEg8XmlfwWQL94ufIgEjib-nc,2776
129
+ uncountable/integration/queue_runner/command_server/command_server.py,sha256=nDTvFN3U5Tm2ZejQhqZpOaaWpVOJiVB26JlNIT2y0fY,3876
130
+ uncountable/integration/queue_runner/command_server/constants.py,sha256=7J9mQIAMOfV50wnwpn7HgrPFEi3Ritj6HwrGYwxGLoU,88
130
131
  uncountable/integration/queue_runner/command_server/types.py,sha256=PGq6EAGPC_nppXQKp9alAjuBQx1h87BefiMa0XU--PY,975
131
132
  uncountable/integration/queue_runner/command_server/protocol/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
132
- uncountable/integration/queue_runner/command_server/protocol/command_server.proto,sha256=pf7FAT2eGuao0VYCFrgTAsM-tiPi1Bhz19XN5So1WFk,439
133
- uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.py,sha256=-lBTc5Tz48agqNSeOSpBE69e2kRmWF59sUaowCl8p7U,2207
134
- uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.pyi,sha256=9viBn6PHvtfMSRwam57ke5O2D_k8LapWYVfBRjknIYg,1281
135
- uncountable/integration/queue_runner/command_server/protocol/command_server_pb2_grpc.py,sha256=ZVHkuLDjEbXMCxBsw1UrRhT3EEF8CDDqEvmE3Kbp1H4,5359
133
+ uncountable/integration/queue_runner/command_server/protocol/command_server.proto,sha256=V-ztuvK-K3tFdN4F3UblSZ5JETNKYDHGSF_1WYIDo4w,873
134
+ uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.py,sha256=3eXIJCQC0swHgEIx_aTpGnPE5s8MvYkyWNf6SZ2uDQQ,3307
135
+ uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.pyi,sha256=lWWZuDGoWwtP-gzhADnJi40_lFbO35fUPqNkoOXVnkA,2954
136
+ uncountable/integration/queue_runner/command_server/protocol/command_server_pb2_grpc.py,sha256=I2kU_anlxOqfaiUR9IcMTuQYvCXa8xliY4sbvXgLYAE,7504
136
137
  uncountable/integration/queue_runner/datastore/__init__.py,sha256=6BefApqN8D2zlVOH14QAeVzwQ8j5NIb41-njT02Za0k,88
137
- uncountable/integration/queue_runner/datastore/datastore_sqlite.py,sha256=OdE4gSDeGj3hC6jNQj_cgFeHai7NIuRXKXfm6cm12Mc,3799
138
- uncountable/integration/queue_runner/datastore/interface.py,sha256=j4D-zVvLq-48VTVwHVei82UVUJ_P3cxiseyiTl0MoNw,534
138
+ uncountable/integration/queue_runner/datastore/datastore_sqlite.py,sha256=M5cg5pC0FokaRZPhmWDTS_JCzrmSE5yNChN15J47klw,4802
139
+ uncountable/integration/queue_runner/datastore/interface.py,sha256=IrKdA7i_PWYKb4HXYYsggR589vn-6y408r6Bz8qFFOY,685
139
140
  uncountable/integration/queue_runner/datastore/model.py,sha256=8-RI5A2yPZVGBLWINVmMd6VOl_oHtqGtnaNXcapAChw,577
140
141
  uncountable/integration/secret_retrieval/__init__.py,sha256=3QXVj35w8rRMxVvmmsViFYDi3lcb3g70incfalOEm6o,87
141
142
  uncountable/integration/secret_retrieval/retrieve_secret.py,sha256=LBEf18KHtXZxg-ZZ80stJ1vW39AWf0CQllP6pNu3Eq8,2994
@@ -162,7 +163,7 @@ uncountable/types/curves_t.py,sha256=DxYepdC3QKKR7mepOOBoyarNcFZQdUa5ZYH-hwCY3BI
162
163
  uncountable/types/data.py,sha256=u2isf4XEug3Eu-xSIoqGaCQmW2dFaKBHCkP_WKYwwBc,500
163
164
  uncountable/types/data_t.py,sha256=vFoypK_WMGfN28r1sSlDYHZNUdBQC0XCN7-_Mlo4FJk,2832
164
165
  uncountable/types/entity.py,sha256=Zclk1LYcRaYrMDhqyCjMSLEg0fE6_q8LHvV22Qvscgs,566
165
- uncountable/types/entity_t.py,sha256=Q-Ji3IMpQxXoY680ZOYz5Zkcy_wrz3lgQOnKqoU9noA,20666
166
+ uncountable/types/entity_t.py,sha256=cULJs6qZAWnN4U8pZadcsf9A5KmlLTkIlwW_sQf627I,20742
166
167
  uncountable/types/experiment_groups.py,sha256=qUpFOx1AKgzaT_4khCOv5Xs6jwiQGbvHH-GUh3v1nv4,288
167
168
  uncountable/types/experiment_groups_t.py,sha256=29Ct-WPejpYMuGfnFfOoosU9iSfjzxpabpBX6oTPFUA,761
168
169
  uncountable/types/exports.py,sha256=VMmxUO2PpV1Y63hZ2AnVor4H-B6aswJ7YpSru_u89lU,334
@@ -197,8 +198,8 @@ uncountable/types/phases.py,sha256=Capx0Tbx52151tHWw8tdOT_NMKMOyHZhrNuGrhuBzfo,2
197
198
  uncountable/types/phases_t.py,sha256=q6ooPMO60JhzoE_a6MrSmDHYXKyTcRr4TXez3Xu64uE,685
198
199
  uncountable/types/post_base.py,sha256=nHqFw6U6ENxcuj_Y3VG-Sk1NEt4Tud2iBxPhRsJpQKM,258
199
200
  uncountable/types/post_base_t.py,sha256=nZl7XQHc9cSnLgccaBZM93bcnSSjTlo2_TL40n-o7K0,734
200
- uncountable/types/queued_job.py,sha256=TlQMf69foLfr134k00LvFEB4OuGAZHZJl9ro9UzVpaU,821
201
- uncountable/types/queued_job_t.py,sha256=3S69z-oYi0C1FPWhaGtdvp7DWdfI85TiNDPN6Fi0V4c,3954
201
+ uncountable/types/queued_job.py,sha256=mhHR4J1agqepnSV62d6gi10UdVRg6MbqzzO3aft7BxA,886
202
+ uncountable/types/queued_job_t.py,sha256=qX9BQx8um9HxOqq3qCa0bquo1tCcuvJ09I6g9yxh84w,4251
202
203
  uncountable/types/recipe_identifiers.py,sha256=nqrubqofaeg_zV6vOrzqbiuX5tDUQYrGyvugpWX38mY,633
203
204
  uncountable/types/recipe_identifiers_t.py,sha256=OpA884KEZEWrymlwEDwIkuv_qrEiNV9kyBuLeBeD0N8,2443
204
205
  uncountable/types/recipe_inputs.py,sha256=dLqKvac-Ff3owutgvBD8Hc5KPoiu-6Zy22WOUJtAuus,330
@@ -323,7 +324,7 @@ uncountable/types/api/triggers/__init__.py,sha256=gCgbynxG3jA8FQHzercKtrHKHkiIKr
323
324
  uncountable/types/api/triggers/run_trigger.py,sha256=dgDX_sRWSJ36UuzMZhG25oHV1HIOUKYY2G3fjKugZrw,1204
324
325
  uncountable/types/api/uploader/__init__.py,sha256=gCgbynxG3jA8FQHzercKtrHKHkiIKr8APdZYUniAor8,55
325
326
  uncountable/types/api/uploader/invoke_uploader.py,sha256=Bj7Dq4A90k00suacwk3bLA_dCb2aovS1kAbVam2AQnM,1395
326
- uncountablepythonsdk-0.0.117.dist-info/METADATA,sha256=tj3OqRKTNxJzMEkVnW6fwy_8S1OUT-00sE7vV4W9lag,2143
327
- uncountablepythonsdk-0.0.117.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
328
- uncountablepythonsdk-0.0.117.dist-info/top_level.txt,sha256=1UVGjAU-6hJY9qw2iJ7nCBeEwZ793AEN5ZfKX9A1uj4,31
329
- uncountablepythonsdk-0.0.117.dist-info/RECORD,,
327
+ uncountablepythonsdk-0.0.118.dist-info/METADATA,sha256=B6aSQpiK_t2hfoZCL1e2C6FrEJfaNuMNZCDs-lQ9EdI,2143
328
+ uncountablepythonsdk-0.0.118.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
329
+ uncountablepythonsdk-0.0.118.dist-info/top_level.txt,sha256=1UVGjAU-6hJY9qw2iJ7nCBeEwZ793AEN5ZfKX9A1uj4,31
330
+ uncountablepythonsdk-0.0.118.dist-info/RECORD,,