UncountablePythonSDK 0.0.115__py3-none-any.whl → 0.0.142.dev0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of UncountablePythonSDK might be problematic. Click here for more details.

Files changed (119) hide show
  1. docs/conf.py +52 -5
  2. docs/index.md +107 -4
  3. docs/integration_examples/create_ingredient.md +43 -0
  4. docs/integration_examples/create_output.md +56 -0
  5. docs/integration_examples/index.md +6 -0
  6. docs/justfile +1 -1
  7. docs/requirements.txt +3 -2
  8. examples/basic_auth.py +7 -0
  9. examples/integration-server/jobs/materials_auto/example_cron.py +3 -0
  10. examples/integration-server/jobs/materials_auto/example_http.py +19 -7
  11. examples/integration-server/jobs/materials_auto/example_instrument.py +100 -0
  12. examples/integration-server/jobs/materials_auto/example_parse.py +140 -0
  13. examples/integration-server/jobs/materials_auto/example_predictions.py +61 -0
  14. examples/integration-server/jobs/materials_auto/example_runsheet_wh.py +57 -16
  15. examples/integration-server/jobs/materials_auto/profile.yaml +27 -0
  16. examples/integration-server/pyproject.toml +4 -4
  17. examples/oauth.py +7 -0
  18. pkgs/argument_parser/__init__.py +1 -0
  19. pkgs/argument_parser/_is_namedtuple.py +3 -0
  20. pkgs/argument_parser/argument_parser.py +22 -3
  21. pkgs/serialization_util/serialization_helpers.py +3 -1
  22. pkgs/type_spec/builder.py +66 -19
  23. pkgs/type_spec/builder_types.py +9 -0
  24. pkgs/type_spec/config.py +26 -5
  25. pkgs/type_spec/cross_output_links.py +10 -16
  26. pkgs/type_spec/emit_open_api.py +72 -22
  27. pkgs/type_spec/emit_open_api_util.py +1 -0
  28. pkgs/type_spec/emit_python.py +76 -12
  29. pkgs/type_spec/emit_typescript.py +48 -32
  30. pkgs/type_spec/emit_typescript_util.py +44 -6
  31. pkgs/type_spec/load_types.py +2 -2
  32. pkgs/type_spec/open_api_util.py +16 -1
  33. pkgs/type_spec/parts/base.ts.prepart +4 -0
  34. pkgs/type_spec/type_info/emit_type_info.py +37 -4
  35. pkgs/type_spec/ui_entry_actions/generate_ui_entry_actions.py +1 -0
  36. pkgs/type_spec/value_spec/__main__.py +2 -2
  37. pkgs/type_spec/value_spec/emit_python.py +6 -1
  38. uncountable/core/client.py +10 -3
  39. uncountable/integration/cli.py +175 -23
  40. uncountable/integration/executors/executors.py +1 -2
  41. uncountable/integration/executors/generic_upload_executor.py +1 -1
  42. uncountable/integration/http_server/types.py +3 -1
  43. uncountable/integration/job.py +35 -3
  44. uncountable/integration/queue_runner/command_server/__init__.py +4 -0
  45. uncountable/integration/queue_runner/command_server/command_client.py +89 -0
  46. uncountable/integration/queue_runner/command_server/command_server.py +117 -5
  47. uncountable/integration/queue_runner/command_server/constants.py +4 -0
  48. uncountable/integration/queue_runner/command_server/protocol/command_server.proto +51 -0
  49. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.py +34 -11
  50. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.pyi +102 -1
  51. uncountable/integration/queue_runner/command_server/protocol/command_server_pb2_grpc.py +180 -0
  52. uncountable/integration/queue_runner/command_server/types.py +44 -1
  53. uncountable/integration/queue_runner/datastore/datastore_sqlite.py +189 -8
  54. uncountable/integration/queue_runner/datastore/interface.py +13 -0
  55. uncountable/integration/queue_runner/datastore/model.py +8 -1
  56. uncountable/integration/queue_runner/job_scheduler.py +85 -21
  57. uncountable/integration/queue_runner/queue_runner.py +10 -2
  58. uncountable/integration/queue_runner/types.py +2 -0
  59. uncountable/integration/queue_runner/worker.py +28 -29
  60. uncountable/integration/scheduler.py +121 -23
  61. uncountable/integration/server.py +36 -6
  62. uncountable/integration/telemetry.py +129 -8
  63. uncountable/integration/webhook_server/entrypoint.py +2 -0
  64. uncountable/types/__init__.py +38 -0
  65. uncountable/types/api/entity/create_or_update_entity.py +1 -0
  66. uncountable/types/api/entity/export_entities.py +13 -0
  67. uncountable/types/api/entity/list_aggregate.py +79 -0
  68. uncountable/types/api/entity/list_entities.py +25 -0
  69. uncountable/types/api/entity/set_barcode.py +43 -0
  70. uncountable/types/api/entity/transition_entity_phase.py +2 -1
  71. uncountable/types/api/files/download_file.py +15 -1
  72. uncountable/types/api/integrations/__init__.py +1 -0
  73. uncountable/types/api/integrations/publish_realtime_data.py +41 -0
  74. uncountable/types/api/integrations/push_notification.py +49 -0
  75. uncountable/types/api/integrations/register_sockets_token.py +41 -0
  76. uncountable/types/api/listing/__init__.py +1 -0
  77. uncountable/types/api/listing/fetch_listing.py +57 -0
  78. uncountable/types/api/notebooks/__init__.py +1 -0
  79. uncountable/types/api/notebooks/add_notebook_content.py +119 -0
  80. uncountable/types/api/outputs/get_output_organization.py +173 -0
  81. uncountable/types/api/recipes/edit_recipe_inputs.py +1 -1
  82. uncountable/types/api/recipes/get_recipe_output_metadata.py +2 -2
  83. uncountable/types/api/recipes/get_recipes_data.py +29 -0
  84. uncountable/types/api/recipes/lock_recipes.py +2 -1
  85. uncountable/types/api/recipes/set_recipe_total.py +59 -0
  86. uncountable/types/api/recipes/unlock_recipes.py +2 -1
  87. uncountable/types/api/runsheet/export_default_runsheet.py +44 -0
  88. uncountable/types/api/uploader/complete_async_parse.py +46 -0
  89. uncountable/types/api/user/__init__.py +1 -0
  90. uncountable/types/api/user/get_current_user_info.py +40 -0
  91. uncountable/types/async_batch_processor.py +266 -0
  92. uncountable/types/async_batch_t.py +5 -0
  93. uncountable/types/client_base.py +432 -2
  94. uncountable/types/client_config.py +1 -0
  95. uncountable/types/client_config_t.py +10 -0
  96. uncountable/types/entity_t.py +9 -1
  97. uncountable/types/exports_t.py +1 -0
  98. uncountable/types/integration_server_t.py +2 -0
  99. uncountable/types/integration_session.py +10 -0
  100. uncountable/types/integration_session_t.py +60 -0
  101. uncountable/types/integrations.py +10 -0
  102. uncountable/types/integrations_t.py +62 -0
  103. uncountable/types/listing.py +46 -0
  104. uncountable/types/listing_t.py +533 -0
  105. uncountable/types/notices.py +8 -0
  106. uncountable/types/notices_t.py +37 -0
  107. uncountable/types/notifications.py +11 -0
  108. uncountable/types/notifications_t.py +74 -0
  109. uncountable/types/queued_job.py +2 -0
  110. uncountable/types/queued_job_t.py +20 -2
  111. uncountable/types/sockets.py +20 -0
  112. uncountable/types/sockets_t.py +169 -0
  113. uncountable/types/uploader.py +24 -0
  114. uncountable/types/uploader_t.py +222 -0
  115. {uncountablepythonsdk-0.0.115.dist-info → uncountablepythonsdk-0.0.142.dev0.dist-info}/METADATA +5 -2
  116. {uncountablepythonsdk-0.0.115.dist-info → uncountablepythonsdk-0.0.142.dev0.dist-info}/RECORD +118 -79
  117. docs/quickstart.md +0 -19
  118. {uncountablepythonsdk-0.0.115.dist-info → uncountablepythonsdk-0.0.142.dev0.dist-info}/WHEEL +0 -0
  119. {uncountablepythonsdk-0.0.115.dist-info → uncountablepythonsdk-0.0.142.dev0.dist-info}/top_level.txt +0 -0
@@ -3,7 +3,7 @@ import typing
3
3
  from dataclasses import dataclass, field
4
4
 
5
5
  from . import builder, util
6
- from .cross_output_links import CrossOutputPaths
6
+ from .builder_types import CrossOutputPaths
7
7
 
8
8
  INDENT = " "
9
9
 
@@ -32,6 +32,7 @@ class EmitTypescriptContext:
32
32
  namespace: builder.SpecNamespace
33
33
  namespaces: set[builder.SpecNamespace] = field(default_factory=set)
34
34
  cross_output_paths: CrossOutputPaths | None = None
35
+ api_endpoints: dict[builder.EndpointKey, builder.APIEndpointInfo]
35
36
 
36
37
 
37
38
  def ts_type_name(name: str) -> str:
@@ -50,7 +51,10 @@ def ts_name(name: str, name_case: builder.NameCase) -> str:
50
51
 
51
52
 
52
53
  def emit_value_ts(
53
- ctx: EmitTypescriptContext, stype: builder.SpecType, value: typing.Any
54
+ ctx: EmitTypescriptContext,
55
+ stype: builder.SpecType,
56
+ value: typing.Any,
57
+ indent: int = 0,
54
58
  ) -> str:
55
59
  """Mimics emit_python even if not all types are used in TypeScript yet"""
56
60
  literal = builder.unwrap_literal_type(stype)
@@ -80,18 +84,24 @@ def emit_value_ts(
80
84
  if stype.defn_type.is_base_type(builder.BaseTypeName.s_dict):
81
85
  key_type = stype.parameters[0]
82
86
  value_type = stype.parameters[1]
87
+
88
+ if not key_type.is_base_type(
89
+ builder.BaseTypeName.s_string
90
+ ) and not isinstance(key_type, builder.SpecTypeDefnStringEnum):
91
+ raise Exception("invalid dict keys -- dict keys must be string or enum")
92
+
83
93
  return (
84
- "{\n\t"
85
- + ",\n\t".join(
94
+ f"{{\n{INDENT * (indent + 1)}"
95
+ + f",\n{INDENT * (indent + 1)}".join(
86
96
  (
87
97
  f"[{emit_value_ts(ctx, key_type, dkey)}]: "
88
98
  if not key_type.is_base_type(builder.BaseTypeName.s_string)
89
99
  else f"{dkey}: "
90
100
  )
91
- + emit_value_ts(ctx, value_type, dvalue)
101
+ + emit_value_ts(ctx, value_type, dvalue, indent=indent + 1)
92
102
  for dkey, dvalue in value.items()
93
103
  )
94
- + "\n}"
104
+ + f"\n{INDENT * (indent)}}}"
95
105
  )
96
106
 
97
107
  if stype.defn_type.is_base_type(builder.BaseTypeName.s_optional):
@@ -102,10 +112,37 @@ def emit_value_ts(
102
112
 
103
113
  elif isinstance(stype, builder.SpecTypeDefnStringEnum):
104
114
  return f"{refer_to(ctx, stype)}.{ts_enum_name(value, stype.name_case)}"
115
+ elif isinstance(stype, builder.SpecTypeDefnObject):
116
+ assert isinstance(value, dict), (
117
+ f"Expected dict value for {stype.name} but got {value}"
118
+ )
119
+ obj_out = "{"
120
+ did_emit = False
121
+ for prop_name, prop in (stype.properties or {}).items():
122
+ if prop_name not in value and prop.has_default:
123
+ value_to_emit = prop.default
124
+ elif prop_name not in value:
125
+ continue
126
+ else:
127
+ value_to_emit = value[prop_name]
128
+ did_emit = True
129
+ typescript_name = ts_name(prop.name, prop.name_case)
130
+ obj_out += f"\n{INDENT * (indent + 1)}{typescript_name}: {emit_value_ts(ctx, prop.spec_type, value_to_emit, indent=indent + 1)},"
131
+ whitespace = f"\n{INDENT * indent}" if did_emit else ""
132
+ obj_out += f"{whitespace}}} as const"
133
+ return obj_out
105
134
 
106
135
  raise Exception("invalid constant type", value, stype, type(stype))
107
136
 
108
137
 
138
+ def emit_constant_ts(ctx: EmitTypescriptContext, sconst: builder.SpecConstant) -> None:
139
+ ctx.out.write("\n\n")
140
+ ctx.out.write(MODIFY_NOTICE)
141
+ value = emit_value_ts(ctx, sconst.value_type, sconst.value)
142
+ const_name = sconst.name.upper()
143
+ ctx.out.write(f"export const {const_name} = {value}\n")
144
+
145
+
109
146
  def emit_type_ts(ctx: EmitTypescriptContext, stype: builder.SpecType) -> None:
110
147
  if not isinstance(stype, builder.SpecTypeDefn):
111
148
  return
@@ -118,6 +155,7 @@ def emit_type_ts(ctx: EmitTypescriptContext, stype: builder.SpecType) -> None:
118
155
 
119
156
  if isinstance(stype, builder.SpecTypeDefnExternal):
120
157
  assert not stype.is_exported, "expecting private names"
158
+ ctx.out.write("\n")
121
159
  ctx.out.write(stype.external_map["ts"])
122
160
  ctx.out.write("\n")
123
161
  return
@@ -7,8 +7,8 @@ from shelljob import fs
7
7
  from pkgs.serialization import yaml
8
8
 
9
9
  from .builder import SpecBuilder
10
+ from .builder_types import CrossOutputPaths
10
11
  from .config import Config
11
- from .cross_output_links import CrossOutputPaths
12
12
 
13
13
  ext_map = {
14
14
  ".ts": "typescript",
@@ -46,7 +46,7 @@ def load_types(config: Config) -> SpecBuilder | None:
46
46
  cross_output_paths = CrossOutputPaths(
47
47
  python_types_output=config.python.types_output,
48
48
  typescript_types_output=config.typescript.types_output,
49
- typescript_routes_output=config.typescript.routes_output,
49
+ typescript_routes_output_by_endpoint=config.typescript.endpoint_to_routes_output,
50
50
  typespec_files_input=config.type_spec_types,
51
51
  # IMPROVE not sure how to know which one is the correct one in emit_typescript
52
52
  )
@@ -223,13 +223,28 @@ class OpenAPIUnionType(OpenAPIType):
223
223
  base_types: list[OpenAPIType],
224
224
  description: str | None = None,
225
225
  nullable: bool = False,
226
+ discriminator: str | None = None,
227
+ discriminator_map: dict[str, OpenAPIRefType] | None = None,
226
228
  ) -> None:
227
229
  self.base_types = base_types
230
+ self._discriminator = discriminator
231
+ self._discriminator_map = discriminator_map
228
232
  super().__init__(description=description, nullable=nullable)
229
233
 
230
234
  def asdict(self) -> dict[str, object]:
231
235
  # TODO: use parents description and nullable
232
- return {"oneOf": [base_type.asdict() for base_type in self.base_types]}
236
+ return {
237
+ "oneOf": [base_type.asdict() for base_type in self.base_types],
238
+ "discriminator": {
239
+ "propertyName": self._discriminator,
240
+ "mapping": {
241
+ discriminator_value: base_type.source
242
+ for discriminator_value, base_type in self._discriminator_map.items()
243
+ },
244
+ }
245
+ if self._discriminator is not None and self._discriminator_map is not None
246
+ else None,
247
+ }
233
248
 
234
249
 
235
250
  class OpenAPIIntersectionType(OpenAPIType):
@@ -3,6 +3,7 @@
3
3
  // doesn't allow referring explicitly to global names (thus cannot override here)
4
4
  // IMPROVE: invert relationship for global.d.ts looks here instead
5
5
  import * as IO from 'io-ts';
6
+
6
7
  type localJsonScalar = JsonScalar
7
8
  type localJsonValue = JsonValue
8
9
  type localObjectId = ObjectId
@@ -28,3 +29,6 @@ export const IOJsonValue: IO.Type<JsonValue> = IO.recursion('JsonValue', () =>
28
29
  export interface nominal<T> {
29
30
  "nominal structural brand": T
30
31
  }
32
+
33
+ // Ids matching a strict integer number are converted to integers
34
+ export const ID_REGEX = /^-?[1-9][0-9]{0,20}$/
@@ -41,10 +41,23 @@ def type_path_of(stype: builder.SpecType) -> object: # NamePath
41
41
  parts: list[object] = ["$literal"]
42
42
  for parameter in stype.parameters:
43
43
  assert isinstance(parameter, builder.SpecTypeLiteralWrapper)
44
+ emit_value = parameter.value
45
+ if isinstance(parameter.value_type, builder.SpecTypeDefnObject):
46
+ emit_value = parameter.value
47
+ assert isinstance(emit_value, (str, bool)), (
48
+ f"invalid-literal-value:{emit_value}"
49
+ )
50
+ elif isinstance(parameter.value_type, builder.SpecTypeDefnStringEnum):
51
+ key = parameter.value
52
+ assert isinstance(key, str)
53
+ emit_value = parameter.value_type.values[key].value
54
+ else:
55
+ raise Exception("unhandled-literal-type")
56
+
44
57
  # This allows expansion to enum literal values later
45
58
  parts.append([
46
59
  "$value",
47
- parameter.value,
60
+ emit_value,
48
61
  type_path_of(parameter.value_type),
49
62
  ])
50
63
  return parts
@@ -158,9 +171,16 @@ class MapTypeAlias(MapTypeBase):
158
171
  discriminator: str | None
159
172
 
160
173
 
174
+ @dataclasses.dataclass
175
+ class StringEnumValue:
176
+ value: str
177
+ label: str
178
+ deprecated: bool = False
179
+
180
+
161
181
  @dataclasses.dataclass
162
182
  class MapStringEnum(MapTypeBase):
163
- values: dict[str, str]
183
+ values: dict[str, StringEnumValue]
164
184
 
165
185
 
166
186
  MapType = MapTypeObject | MapTypeAlias | MapStringEnum
@@ -267,7 +287,9 @@ def _extract_and_validate_layout(
267
287
  for group in ext_info.layout.groups:
268
288
  fields = set(group.fields or [])
269
289
  for field in fields:
270
- assert field in stype.properties, f"layout-refers-to-missing-field:{field}"
290
+ assert field in stype.properties or field == DISCRIMINATOR_COMMON_NAME, (
291
+ f"layout-refers-to-missing-field:{field}"
292
+ )
271
293
 
272
294
  local_ref_name = None
273
295
  if group.ref_name is not None:
@@ -314,6 +336,9 @@ def _pull_property_from_type_recursively(
314
336
  return _pull_property_from_type_recursively(stype.base, property_name)
315
337
 
316
338
 
339
+ DISCRIMINATOR_COMMON_NAME = "type"
340
+
341
+
317
342
  def _validate_type_ext_info(
318
343
  stype: builder.SpecTypeDefnObject,
319
344
  ) -> tuple[ExtInfoLayout | None, type_info_t.ExtInfo | None]:
@@ -324,12 +349,16 @@ def _validate_type_ext_info(
324
349
  if ext_info.label_fields is not None:
325
350
  assert stype.properties is not None
326
351
  for name in ext_info.label_fields:
352
+ if name == DISCRIMINATOR_COMMON_NAME:
353
+ continue
327
354
  prop = _pull_property_from_type_recursively(stype, name)
328
355
  assert prop is not None, f"missing-label-field:{name}"
329
356
 
330
357
  if ext_info.actions is not None:
331
358
  assert stype.properties is not None
332
359
  for action in ext_info.actions:
360
+ if action.property == DISCRIMINATOR_COMMON_NAME:
361
+ continue
333
362
  prop = _pull_property_from_type_recursively(stype, action.property)
334
363
  assert prop is not None, f"missing-action-field:{action.property}"
335
364
 
@@ -414,7 +443,11 @@ def _build_map_type(
414
443
  # IMPROVE: We probably want the label here, but this requires a change
415
444
  # to the front-end type-info and form code to handle
416
445
  values={
417
- entry.value: (entry.label or entry.name)
446
+ entry.value: StringEnumValue(
447
+ value=entry.value,
448
+ label=entry.label or entry.name,
449
+ deprecated=entry.deprecated,
450
+ )
418
451
  for entry in stype.values.values()
419
452
  },
420
453
  )
@@ -282,6 +282,7 @@ def generate_entry_actions_typescript(
282
282
  ctx = emit_typescript_util.EmitTypescriptContext(
283
283
  out=definition_buffer,
284
284
  namespace=index_namespace,
285
+ api_endpoints={},
285
286
  )
286
287
  builder.namespaces[index_namespace.name] = index_namespace
287
288
 
@@ -20,7 +20,7 @@ The accepted argument type must accept "None", it is not implied.
20
20
  """
21
21
 
22
22
  import sys
23
- from typing import TypeVar, cast
23
+ from typing import Match, Pattern, TypeVar, cast
24
24
 
25
25
  import regex as re
26
26
 
@@ -56,7 +56,7 @@ class Source:
56
56
  def has_more(self) -> bool:
57
57
  return self._at < len(self._text)
58
58
 
59
- def match(self, expression: re.Pattern) -> re.Match | None:
59
+ def match(self, expression: Pattern[str]) -> Match[str] | None:
60
60
  self.skip_space()
61
61
  m = expression.match(self._text, self._at)
62
62
  if m is not None:
@@ -75,9 +75,14 @@ def _emit_function_wrapper(function: value_spec_t.Function) -> str:
75
75
  ):
76
76
  python_type += " | None"
77
77
  any_pass_null = True
78
+
79
+ if python_type.startswith("base_t.ExtJsonValue"):
80
+ return_statement = f"self._extract({index})"
81
+ else:
82
+ return_statement = f"cast({python_type}, self._extract({index}))"
78
83
  out.write(
79
84
  f"""{INDENT}def get_{argument.ref_name}(self) -> {python_type}:
80
- {INDENT}{INDENT}return cast({python_type}, self._extract({index}))
85
+ {INDENT}{INDENT}return {return_statement}
81
86
  """
82
87
  )
83
88
  out.write("\n")
@@ -226,13 +226,15 @@ class Client(ClientMethods):
226
226
  except JSONDecodeError as e:
227
227
  raise SDKError("unable to process response", request_id=request_id) from e
228
228
 
229
- def _send_request(self, request: requests.Request) -> requests.Response:
229
+ def _send_request(
230
+ self, request: requests.Request, *, timeout: float | None = None
231
+ ) -> requests.Response:
230
232
  if self._cfg.extra_headers is not None:
231
233
  request.headers = {**request.headers, **self._cfg.extra_headers}
232
234
  if self._cfg.transform_request is not None:
233
235
  request = self._cfg.transform_request(request)
234
236
  prepared_request = request.prepare()
235
- response = self._session.send(prepared_request)
237
+ response = self._session.send(prepared_request, timeout=timeout)
236
238
  return response
237
239
 
238
240
  def do_request(self, *, api_request: APIRequest, return_type: type[DT]) -> DT:
@@ -257,7 +259,12 @@ class Client(ClientMethods):
257
259
  with push_scope_optional(self._cfg.logger, "api_call", attributes=attributes):
258
260
  if self._cfg.logger is not None:
259
261
  self._cfg.logger.log_info(api_request.endpoint, attributes=attributes)
260
- response = self._send_request(request)
262
+ timeout = (
263
+ api_request.request_options.timeout_secs
264
+ if api_request.request_options is not None
265
+ else None
266
+ )
267
+ response = self._send_request(request, timeout=timeout)
261
268
  response_data = self._get_response_json(response, request_id=request_id)
262
269
  cached_parser = self._get_cached_parser(return_type)
263
270
  try:
@@ -1,49 +1,201 @@
1
1
  import argparse
2
+ import json
3
+ from typing import assert_never
2
4
 
5
+ from dateutil import tz
3
6
  from opentelemetry.trace import get_current_span
7
+ from tabulate import tabulate
4
8
 
5
9
  from uncountable.core.environment import get_local_admin_server_port
6
10
  from uncountable.integration.queue_runner.command_server.command_client import (
11
+ send_job_cancellation_message,
7
12
  send_job_queue_message,
13
+ send_list_queued_jobs_message,
14
+ send_retry_job_message,
15
+ )
16
+ from uncountable.integration.queue_runner.command_server.types import (
17
+ CommandCancelJobStatus,
8
18
  )
9
19
  from uncountable.integration.telemetry import Logger
10
20
  from uncountable.types import queued_job_t
11
21
 
12
22
 
13
- def main() -> None:
14
- logger = Logger(get_current_span())
23
+ def register_enqueue_job_parser(
24
+ sub_parser_manager: argparse._SubParsersAction,
25
+ parents: list[argparse.ArgumentParser],
26
+ ) -> None:
27
+ run_parser = sub_parser_manager.add_parser(
28
+ "run",
29
+ parents=parents,
30
+ help="Process a job with a given host and job ID",
31
+ description="Process a job with a given host and job ID",
32
+ )
33
+ run_parser.add_argument("job_id", type=str, help="The ID of the job to process")
34
+ run_parser.add_argument(
35
+ "--payload", type=str, help="JSON payload for webhook invocation context"
36
+ )
37
+
38
+ def _handle_enqueue_job(args: argparse.Namespace) -> None:
39
+ invocation_context: queued_job_t.InvocationContext
40
+
41
+ if args.payload is not None:
42
+ try:
43
+ webhook_payload = json.loads(args.payload)
44
+ invocation_context = queued_job_t.InvocationContextWebhook(
45
+ webhook_payload=webhook_payload
46
+ )
47
+ except json.JSONDecodeError as e:
48
+ raise ValueError(f"Invalid JSON payload: {e}")
49
+ else:
50
+ invocation_context = queued_job_t.InvocationContextManual()
51
+
52
+ send_job_queue_message(
53
+ job_ref_name=args.job_id,
54
+ payload=queued_job_t.QueuedJobPayload(
55
+ invocation_context=invocation_context
56
+ ),
57
+ host=args.host,
58
+ port=get_local_admin_server_port(),
59
+ )
60
+
61
+ run_parser.set_defaults(func=_handle_enqueue_job)
15
62
 
16
- parser = argparse.ArgumentParser(
17
- description="Process a job with a given command and job ID."
63
+
64
+ def register_cancel_queued_job_parser(
65
+ sub_parser_manager: argparse._SubParsersAction,
66
+ parents: list[argparse.ArgumentParser],
67
+ ) -> None:
68
+ cancel_parser = sub_parser_manager.add_parser(
69
+ "cancel",
70
+ parents=parents,
71
+ help="Cancel a queued job with a given host and queued job UUID",
72
+ description="Cancel a job with a given host and queued job UUID",
73
+ )
74
+ cancel_parser.add_argument(
75
+ "uuid", type=str, help="The UUID of the queued job to cancel"
18
76
  )
19
77
 
20
- parser.add_argument(
21
- "command",
22
- type=str,
23
- choices=["run"],
24
- help="The command to execute (e.g., 'run')",
78
+ def _handle_cancel_queued_job(args: argparse.Namespace) -> None:
79
+ resp = send_job_cancellation_message(
80
+ queued_job_uuid=args.uuid,
81
+ host=args.host,
82
+ port=get_local_admin_server_port(),
83
+ )
84
+
85
+ match resp:
86
+ case CommandCancelJobStatus.CANCELLED_WITH_RESTART:
87
+ print(
88
+ "Job successfully cancelled. The integration server will restart."
89
+ )
90
+ case CommandCancelJobStatus.NO_JOB_FOUND:
91
+ print("Job not found.")
92
+ case CommandCancelJobStatus.JOB_ALREADY_COMPLETED:
93
+ print("Job already completed.")
94
+ case _:
95
+ assert_never(resp)
96
+
97
+ cancel_parser.set_defaults(func=_handle_cancel_queued_job)
98
+
99
+
100
+ def register_list_queued_jobs(
101
+ sub_parser_manager: argparse._SubParsersAction,
102
+ parents: list[argparse.ArgumentParser],
103
+ ) -> None:
104
+ list_queued_jobs_parser = sub_parser_manager.add_parser(
105
+ "list-queued-jobs",
106
+ parents=parents,
107
+ help="List all jobs queued on the integration server",
108
+ description="List all jobs queued on the integration server",
109
+ )
110
+
111
+ list_queued_jobs_parser.add_argument(
112
+ "--offset",
113
+ type=int,
114
+ default=0,
115
+ help="Number of jobs to skip. Should be non-negative.",
116
+ )
117
+ list_queued_jobs_parser.add_argument(
118
+ "--limit",
119
+ type=int,
120
+ default=100,
121
+ help="A number between 1 and 100 specifying the number of jobs to return in the result set.",
25
122
  )
26
123
 
27
- parser.add_argument("job_id", type=str, help="The ID of the job to process")
124
+ def _handle_list_queued_jobs(args: argparse.Namespace) -> None:
125
+ queued_jobs = send_list_queued_jobs_message(
126
+ offset=args.offset,
127
+ limit=args.limit,
128
+ host=args.host,
129
+ port=get_local_admin_server_port(),
130
+ )
28
131
 
29
- parser.add_argument(
132
+ headers = ["UUID", "Job Ref Name", "Attempts", "Status", "Submitted At"]
133
+ rows = [
134
+ [
135
+ job.uuid,
136
+ job.job_ref_name,
137
+ job.num_attempts,
138
+ job.status,
139
+ job.submitted_at.ToDatetime(tz.UTC).astimezone(tz.tzlocal()),
140
+ ]
141
+ for job in queued_jobs
142
+ ]
143
+ print(tabulate(rows, headers=headers, tablefmt="grid"))
144
+
145
+ list_queued_jobs_parser.set_defaults(func=_handle_list_queued_jobs)
146
+
147
+
148
+ def register_retry_job_parser(
149
+ sub_parser_manager: argparse._SubParsersAction,
150
+ parents: list[argparse.ArgumentParser],
151
+ ) -> None:
152
+ retry_failed_jobs_parser = sub_parser_manager.add_parser(
153
+ "retry-job",
154
+ parents=parents,
155
+ help="Retry failed job on the integration server",
156
+ description="Retry failed job on the integration server",
157
+ )
158
+
159
+ retry_failed_jobs_parser.add_argument(
160
+ "job_uuid", type=str, help="The uuid of the job to retry"
161
+ )
162
+
163
+ def _handle_retry_job(args: argparse.Namespace) -> None:
164
+ send_retry_job_message(
165
+ job_uuid=args.job_uuid,
166
+ host=args.host,
167
+ port=get_local_admin_server_port(),
168
+ )
169
+
170
+ retry_failed_jobs_parser.set_defaults(func=_handle_retry_job)
171
+
172
+
173
+ def main() -> None:
174
+ logger = Logger(get_current_span())
175
+
176
+ main_parser = argparse.ArgumentParser(
177
+ description="Execute a given integrations server command."
178
+ )
179
+
180
+ base_parser = argparse.ArgumentParser(add_help=False)
181
+ base_parser.add_argument(
30
182
  "--host", type=str, default="localhost", nargs="?", help="The host to run on"
31
183
  )
32
184
 
33
- args = parser.parse_args()
185
+ subparser_action = main_parser.add_subparsers(
186
+ dest="command",
187
+ required=True,
188
+ help="The command to execute (e.g., 'run')",
189
+ )
34
190
 
191
+ register_enqueue_job_parser(subparser_action, parents=[base_parser])
192
+ register_retry_job_parser(subparser_action, parents=[base_parser])
193
+ register_list_queued_jobs(subparser_action, parents=[base_parser])
194
+ register_cancel_queued_job_parser(subparser_action, parents=[base_parser])
195
+
196
+ args = main_parser.parse_args()
35
197
  with logger.push_scope(args.command):
36
- if args.command == "run":
37
- send_job_queue_message(
38
- job_ref_name=args.job_id,
39
- payload=queued_job_t.QueuedJobPayload(
40
- invocation_context=queued_job_t.InvocationContextManual()
41
- ),
42
- host=args.host,
43
- port=get_local_admin_server_port(),
44
- )
45
- else:
46
- parser.print_usage()
198
+ args.func(args)
47
199
 
48
200
 
49
201
  main()
@@ -88,7 +88,6 @@ def execute_job(
88
88
  job_definition: job_definition_t.JobDefinition,
89
89
  profile_metadata: job_definition_t.ProfileMetadata,
90
90
  args: JobArguments,
91
- job_uuid: str,
92
91
  ) -> job_definition_t.JobResult:
93
92
  with args.logger.push_scope(job_definition.name) as job_logger:
94
93
  job = resolve_executor(job_definition.executor, profile_metadata)
@@ -104,7 +103,7 @@ def execute_job(
104
103
  run_entity = _create_run_entity(
105
104
  client=args.client,
106
105
  logging_settings=job_definition.logging_settings,
107
- job_uuid=job_uuid,
106
+ job_uuid=args.job_uuid,
108
107
  )
109
108
  result = job.run_outer(args=args)
110
109
  except Exception as e:
@@ -41,7 +41,7 @@ def _get_extension(filename: str) -> str | None:
41
41
 
42
42
  def _run_keyword_detection(data: io.BytesIO, keyword: str) -> bool:
43
43
  try:
44
- text = io.TextIOWrapper(data)
44
+ text = io.TextIOWrapper(data, encoding="utf-8")
45
45
  for line in text:
46
46
  if (
47
47
  keyword in line
@@ -1,5 +1,6 @@
1
1
  import base64
2
2
  import functools
3
+ import json
3
4
  from dataclasses import dataclass
4
5
 
5
6
  from flask.wrappers import Response
@@ -42,7 +43,8 @@ class HttpException(Exception):
42
43
 
43
44
  def make_error_response(self) -> Response:
44
45
  return Response(
45
- status=self.error_code, response={"error": {"message": str(self)}}
46
+ status=self.error_code,
47
+ response=json.dumps({"error": {"message": str(self)}}),
46
48
  )
47
49
 
48
50