UncountablePythonSDK 0.0.126__py3-none-any.whl → 0.0.142.dev0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of UncountablePythonSDK might be problematic. Click here for more details.
- docs/requirements.txt +1 -1
- examples/integration-server/jobs/materials_auto/example_cron.py +1 -1
- examples/integration-server/jobs/materials_auto/example_instrument.py +68 -38
- examples/integration-server/jobs/materials_auto/example_parse.py +140 -0
- examples/integration-server/jobs/materials_auto/example_predictions.py +61 -0
- examples/integration-server/jobs/materials_auto/example_runsheet_wh.py +57 -16
- examples/integration-server/jobs/materials_auto/profile.yaml +18 -0
- examples/integration-server/pyproject.toml +4 -4
- pkgs/argument_parser/argument_parser.py +20 -1
- pkgs/serialization_util/serialization_helpers.py +3 -1
- pkgs/type_spec/builder.py +43 -13
- pkgs/type_spec/builder_types.py +9 -0
- pkgs/type_spec/cross_output_links.py +2 -10
- pkgs/type_spec/emit_open_api.py +0 -12
- pkgs/type_spec/emit_python.py +72 -11
- pkgs/type_spec/emit_typescript.py +2 -2
- pkgs/type_spec/emit_typescript_util.py +28 -6
- pkgs/type_spec/load_types.py +1 -1
- pkgs/type_spec/parts/base.ts.prepart +3 -0
- pkgs/type_spec/type_info/emit_type_info.py +27 -3
- pkgs/type_spec/value_spec/__main__.py +2 -2
- uncountable/core/client.py +10 -3
- uncountable/integration/cli.py +89 -2
- uncountable/integration/executors/executors.py +1 -2
- uncountable/integration/executors/generic_upload_executor.py +1 -1
- uncountable/integration/job.py +3 -3
- uncountable/integration/queue_runner/command_server/__init__.py +4 -0
- uncountable/integration/queue_runner/command_server/command_client.py +63 -0
- uncountable/integration/queue_runner/command_server/command_server.py +77 -5
- uncountable/integration/queue_runner/command_server/protocol/command_server.proto +33 -0
- uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.py +27 -13
- uncountable/integration/queue_runner/command_server/protocol/command_server_pb2.pyi +53 -1
- uncountable/integration/queue_runner/command_server/protocol/command_server_pb2_grpc.py +135 -0
- uncountable/integration/queue_runner/command_server/types.py +44 -1
- uncountable/integration/queue_runner/datastore/datastore_sqlite.py +132 -8
- uncountable/integration/queue_runner/datastore/interface.py +3 -0
- uncountable/integration/queue_runner/datastore/model.py +8 -1
- uncountable/integration/queue_runner/job_scheduler.py +78 -3
- uncountable/integration/queue_runner/types.py +2 -0
- uncountable/integration/queue_runner/worker.py +28 -26
- uncountable/integration/scheduler.py +64 -13
- uncountable/integration/server.py +36 -6
- uncountable/integration/telemetry.py +120 -7
- uncountable/integration/webhook_server/entrypoint.py +2 -0
- uncountable/types/__init__.py +18 -0
- uncountable/types/api/entity/list_aggregate.py +79 -0
- uncountable/types/api/entity/list_entities.py +25 -0
- uncountable/types/api/entity/set_barcode.py +43 -0
- uncountable/types/api/entity/transition_entity_phase.py +2 -1
- uncountable/types/api/files/download_file.py +15 -1
- uncountable/types/api/integrations/push_notification.py +2 -0
- uncountable/types/api/integrations/register_sockets_token.py +41 -0
- uncountable/types/api/listing/__init__.py +1 -0
- uncountable/types/api/listing/fetch_listing.py +57 -0
- uncountable/types/api/notebooks/__init__.py +1 -0
- uncountable/types/api/notebooks/add_notebook_content.py +119 -0
- uncountable/types/api/outputs/get_output_organization.py +1 -1
- uncountable/types/api/recipes/edit_recipe_inputs.py +1 -1
- uncountable/types/api/recipes/get_recipes_data.py +29 -0
- uncountable/types/api/recipes/lock_recipes.py +2 -1
- uncountable/types/api/recipes/set_recipe_total.py +59 -0
- uncountable/types/api/recipes/unlock_recipes.py +2 -1
- uncountable/types/api/runsheet/export_default_runsheet.py +44 -0
- uncountable/types/api/uploader/complete_async_parse.py +4 -0
- uncountable/types/async_batch_processor.py +222 -0
- uncountable/types/async_batch_t.py +4 -0
- uncountable/types/client_base.py +367 -2
- uncountable/types/client_config.py +1 -0
- uncountable/types/client_config_t.py +10 -0
- uncountable/types/entity_t.py +3 -1
- uncountable/types/integration_server_t.py +2 -0
- uncountable/types/listing.py +46 -0
- uncountable/types/listing_t.py +533 -0
- uncountable/types/notices.py +8 -0
- uncountable/types/notices_t.py +37 -0
- uncountable/types/queued_job.py +1 -0
- uncountable/types/queued_job_t.py +9 -0
- uncountable/types/sockets.py +9 -0
- uncountable/types/sockets_t.py +99 -0
- uncountable/types/uploader_t.py +3 -2
- {uncountablepythonsdk-0.0.126.dist-info → uncountablepythonsdk-0.0.142.dev0.dist-info}/METADATA +4 -2
- {uncountablepythonsdk-0.0.126.dist-info → uncountablepythonsdk-0.0.142.dev0.dist-info}/RECORD +84 -68
- {uncountablepythonsdk-0.0.126.dist-info → uncountablepythonsdk-0.0.142.dev0.dist-info}/WHEEL +0 -0
- {uncountablepythonsdk-0.0.126.dist-info → uncountablepythonsdk-0.0.142.dev0.dist-info}/top_level.txt +0 -0
|
@@ -87,30 +87,32 @@ def run_queued_job(
|
|
|
87
87
|
base_span=span,
|
|
88
88
|
profile_metadata=job_details.profile_metadata,
|
|
89
89
|
job_definition=job_details.job_definition,
|
|
90
|
+
queued_job_uuid=queued_job.queued_job_uuid,
|
|
90
91
|
)
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
92
|
+
with job_logger.resource_tracking():
|
|
93
|
+
try:
|
|
94
|
+
client = construct_uncountable_client(
|
|
95
|
+
profile_meta=job_details.profile_metadata, logger=job_logger
|
|
96
|
+
)
|
|
97
|
+
batch_processor = AsyncBatchProcessor(client=client)
|
|
98
|
+
|
|
99
|
+
payload = _resolve_queued_job_payload(queued_job)
|
|
100
|
+
|
|
101
|
+
args = JobArguments(
|
|
102
|
+
job_definition=job_details.job_definition,
|
|
103
|
+
client=client,
|
|
104
|
+
batch_processor=batch_processor,
|
|
105
|
+
profile_metadata=job_details.profile_metadata,
|
|
106
|
+
logger=job_logger,
|
|
107
|
+
payload=payload,
|
|
108
|
+
job_uuid=queued_job.queued_job_uuid,
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
return execute_job(
|
|
112
|
+
args=args,
|
|
113
|
+
profile_metadata=job_details.profile_metadata,
|
|
114
|
+
job_definition=job_details.job_definition,
|
|
115
|
+
)
|
|
116
|
+
except BaseException as e:
|
|
117
|
+
job_logger.log_exception(e)
|
|
118
|
+
return job_definition_t.JobResult(success=False)
|
|
@@ -6,6 +6,7 @@ import time
|
|
|
6
6
|
from dataclasses import dataclass
|
|
7
7
|
from datetime import UTC
|
|
8
8
|
from enum import StrEnum
|
|
9
|
+
from typing import assert_never
|
|
9
10
|
|
|
10
11
|
from opentelemetry.trace import get_current_span
|
|
11
12
|
|
|
@@ -16,6 +17,7 @@ from uncountable.integration.queue_runner.command_server import (
|
|
|
16
17
|
check_health,
|
|
17
18
|
)
|
|
18
19
|
from uncountable.integration.queue_runner.queue_runner import start_queue_runner
|
|
20
|
+
from uncountable.integration.queue_runner.types import RESTART_EXIT_CODE
|
|
19
21
|
from uncountable.integration.telemetry import Logger
|
|
20
22
|
|
|
21
23
|
SHUTDOWN_TIMEOUT_SECS = 30
|
|
@@ -55,6 +57,19 @@ class ProcessInfo:
|
|
|
55
57
|
return self.process.poll()
|
|
56
58
|
|
|
57
59
|
|
|
60
|
+
@dataclass(kw_only=True)
|
|
61
|
+
class ProcessAlarmRestart:
|
|
62
|
+
process: ProcessInfo
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
@dataclass(kw_only=True)
|
|
66
|
+
class ProcessAlarmShutdownAll:
|
|
67
|
+
pass
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
ProcessAlarm = ProcessAlarmRestart | ProcessAlarmShutdownAll
|
|
71
|
+
|
|
72
|
+
|
|
58
73
|
def handle_shutdown(logger: Logger, processes: dict[ProcessName, ProcessInfo]) -> None:
|
|
59
74
|
logger.log_info("received shutdown command, shutting down sub-processes")
|
|
60
75
|
for proc_info in processes.values():
|
|
@@ -129,12 +144,21 @@ def restart_process(
|
|
|
129
144
|
logger.log_info("uwsgi restarted successfully")
|
|
130
145
|
|
|
131
146
|
|
|
132
|
-
def
|
|
147
|
+
def check_process_alarms(
|
|
133
148
|
logger: Logger, processes: dict[ProcessName, ProcessInfo]
|
|
134
|
-
) -> None:
|
|
149
|
+
) -> ProcessAlarm | None:
|
|
135
150
|
for proc_info in processes.values():
|
|
136
151
|
if not proc_info.is_alive:
|
|
137
|
-
|
|
152
|
+
if proc_info.exitcode == RESTART_EXIT_CODE:
|
|
153
|
+
logger.log_warning(
|
|
154
|
+
f"process {proc_info.name} requested restart! restarting"
|
|
155
|
+
)
|
|
156
|
+
return ProcessAlarmRestart(process=proc_info)
|
|
157
|
+
logger.log_error(
|
|
158
|
+
f"process {proc_info.name} shut down unexpectedly! shutting down scheduler; exit code is {proc_info.exitcode}"
|
|
159
|
+
)
|
|
160
|
+
return ProcessAlarmShutdownAll()
|
|
161
|
+
return None
|
|
138
162
|
|
|
139
163
|
|
|
140
164
|
def _wait_queue_runner_online() -> None:
|
|
@@ -166,16 +190,24 @@ def main() -> None:
|
|
|
166
190
|
processes[process.name] = process
|
|
167
191
|
logger.log_info(f"started process {process.name}")
|
|
168
192
|
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
193
|
+
def _start_queue_runner() -> None:
|
|
194
|
+
runner_process = multiprocessing.Process(target=start_queue_runner)
|
|
195
|
+
runner_process.start()
|
|
196
|
+
add_process(
|
|
197
|
+
ProcessInfo(
|
|
198
|
+
name=ProcessName.QUEUE_RUNNER,
|
|
199
|
+
process=runner_process,
|
|
200
|
+
)
|
|
201
|
+
)
|
|
172
202
|
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
203
|
+
try:
|
|
204
|
+
_wait_queue_runner_online()
|
|
205
|
+
except Exception as e:
|
|
206
|
+
logger.log_exception(e)
|
|
207
|
+
handle_shutdown(logger, processes=processes)
|
|
208
|
+
return
|
|
209
|
+
|
|
210
|
+
_start_queue_runner()
|
|
179
211
|
|
|
180
212
|
cron_process = multiprocessing.Process(target=cron_target)
|
|
181
213
|
cron_process.start()
|
|
@@ -189,7 +221,26 @@ def main() -> None:
|
|
|
189
221
|
|
|
190
222
|
try:
|
|
191
223
|
while True:
|
|
192
|
-
|
|
224
|
+
process_alarm = check_process_alarms(logger, processes=processes)
|
|
225
|
+
match process_alarm:
|
|
226
|
+
case ProcessAlarmRestart():
|
|
227
|
+
match process_alarm.process.name:
|
|
228
|
+
case ProcessName.QUEUE_RUNNER:
|
|
229
|
+
del processes[ProcessName.QUEUE_RUNNER]
|
|
230
|
+
_start_queue_runner()
|
|
231
|
+
case ProcessName.CRON_SERVER | ProcessName.UWSGI:
|
|
232
|
+
raise NotImplementedError(
|
|
233
|
+
f"restarting {process_alarm.process.name} not yet implemented"
|
|
234
|
+
)
|
|
235
|
+
case _:
|
|
236
|
+
assert_never(process_alarm.process.name)
|
|
237
|
+
case ProcessAlarmShutdownAll():
|
|
238
|
+
handle_shutdown(logger, processes)
|
|
239
|
+
sys.exit(1)
|
|
240
|
+
case None:
|
|
241
|
+
pass
|
|
242
|
+
case _:
|
|
243
|
+
assert_never(process_alarm)
|
|
193
244
|
time.sleep(1)
|
|
194
245
|
except KeyboardInterrupt:
|
|
195
246
|
handle_shutdown(logger, processes=processes)
|
|
@@ -11,7 +11,11 @@ from apscheduler.triggers.cron import CronTrigger
|
|
|
11
11
|
from opentelemetry.trace import get_current_span
|
|
12
12
|
from sqlalchemy.engine.base import Engine
|
|
13
13
|
|
|
14
|
+
from uncountable.core.environment import get_local_admin_server_port
|
|
14
15
|
from uncountable.integration.cron import CronJobArgs, cron_job_executor
|
|
16
|
+
from uncountable.integration.queue_runner.command_server.command_client import (
|
|
17
|
+
send_vaccuum_queued_jobs_message,
|
|
18
|
+
)
|
|
15
19
|
from uncountable.integration.telemetry import Logger
|
|
16
20
|
from uncountable.types import base_t, job_definition_t
|
|
17
21
|
from uncountable.types.job_definition_t import (
|
|
@@ -21,6 +25,14 @@ from uncountable.types.job_definition_t import (
|
|
|
21
25
|
|
|
22
26
|
_MAX_APSCHEDULER_CONCURRENT_JOBS = 1
|
|
23
27
|
|
|
28
|
+
VACCUUM_QUEUED_JOBS_JOB_ID = "vacuum_queued_jobs"
|
|
29
|
+
|
|
30
|
+
STATIC_JOB_IDS = {VACCUUM_QUEUED_JOBS_JOB_ID}
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def vaccuum_queued_jobs() -> None:
|
|
34
|
+
send_vaccuum_queued_jobs_message(port=get_local_admin_server_port())
|
|
35
|
+
|
|
24
36
|
|
|
25
37
|
class IntegrationServer:
|
|
26
38
|
_scheduler: BaseScheduler
|
|
@@ -36,11 +48,27 @@ class IntegrationServer:
|
|
|
36
48
|
)
|
|
37
49
|
self._server_logger = Logger(get_current_span())
|
|
38
50
|
|
|
51
|
+
def _register_static_jobs(self) -> None:
|
|
52
|
+
all_job_ids = {job.id for job in self._scheduler.get_jobs()}
|
|
53
|
+
if VACCUUM_QUEUED_JOBS_JOB_ID in all_job_ids:
|
|
54
|
+
self._scheduler.remove_job(VACCUUM_QUEUED_JOBS_JOB_ID)
|
|
55
|
+
|
|
56
|
+
self._scheduler.add_job(
|
|
57
|
+
vaccuum_queued_jobs,
|
|
58
|
+
max_instances=1,
|
|
59
|
+
coalesce=True,
|
|
60
|
+
trigger=CronTrigger.from_crontab("5 4 * * 4"),
|
|
61
|
+
name="Vaccuum queued jobs",
|
|
62
|
+
id=VACCUUM_QUEUED_JOBS_JOB_ID,
|
|
63
|
+
kwargs={},
|
|
64
|
+
misfire_grace_time=None,
|
|
65
|
+
)
|
|
66
|
+
|
|
39
67
|
def register_jobs(self, profiles: list[job_definition_t.ProfileMetadata]) -> None:
|
|
40
|
-
valid_job_ids =
|
|
68
|
+
valid_job_ids: set[str] = set()
|
|
41
69
|
for profile_metadata in profiles:
|
|
42
70
|
for job_defn in profile_metadata.jobs:
|
|
43
|
-
valid_job_ids.
|
|
71
|
+
valid_job_ids.add(job_defn.id)
|
|
44
72
|
match job_defn:
|
|
45
73
|
case CronJobDefinition():
|
|
46
74
|
# Add to ap scheduler
|
|
@@ -90,10 +118,11 @@ class IntegrationServer:
|
|
|
90
118
|
pass
|
|
91
119
|
case _:
|
|
92
120
|
assert_never(job_defn)
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
121
|
+
all_job_ids = {job.id for job in self._scheduler.get_jobs()}
|
|
122
|
+
invalid_job_ids = all_job_ids.difference(valid_job_ids.union(STATIC_JOB_IDS))
|
|
123
|
+
|
|
124
|
+
for job_id in invalid_job_ids:
|
|
125
|
+
self._scheduler.remove_job(job_id)
|
|
97
126
|
|
|
98
127
|
def serve_forever(self) -> None:
|
|
99
128
|
signal.pause()
|
|
@@ -106,6 +135,7 @@ class IntegrationServer:
|
|
|
106
135
|
|
|
107
136
|
def __enter__(self) -> "IntegrationServer":
|
|
108
137
|
self._start_apscheduler()
|
|
138
|
+
self._register_static_jobs()
|
|
109
139
|
return self
|
|
110
140
|
|
|
111
141
|
def __exit__(
|
|
@@ -1,18 +1,24 @@
|
|
|
1
1
|
import functools
|
|
2
2
|
import json
|
|
3
3
|
import os
|
|
4
|
+
import threading
|
|
4
5
|
import time
|
|
5
6
|
import traceback
|
|
7
|
+
import types
|
|
6
8
|
import typing
|
|
7
9
|
from contextlib import contextmanager
|
|
8
10
|
from enum import StrEnum
|
|
9
11
|
from typing import Generator, assert_never, cast
|
|
10
12
|
|
|
13
|
+
import psutil
|
|
11
14
|
from opentelemetry import _logs, trace
|
|
12
15
|
from opentelemetry.exporter.otlp.proto.http._log_exporter import OTLPLogExporter
|
|
13
16
|
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
|
|
14
17
|
from opentelemetry.sdk._logs import Logger as OTELLogger
|
|
15
|
-
from opentelemetry.sdk._logs import
|
|
18
|
+
from opentelemetry.sdk._logs import (
|
|
19
|
+
LoggerProvider,
|
|
20
|
+
LogRecord,
|
|
21
|
+
)
|
|
16
22
|
from opentelemetry.sdk._logs.export import BatchLogRecordProcessor, ConsoleLogExporter
|
|
17
23
|
from opentelemetry.sdk.resources import Attributes, Resource
|
|
18
24
|
from opentelemetry.sdk.trace import TracerProvider
|
|
@@ -95,8 +101,27 @@ class Logger:
|
|
|
95
101
|
def current_trace_id(self) -> int | None:
|
|
96
102
|
return self.current_span.get_span_context().trace_id
|
|
97
103
|
|
|
98
|
-
def _patch_attributes(
|
|
99
|
-
|
|
104
|
+
def _patch_attributes(
|
|
105
|
+
self,
|
|
106
|
+
attributes: Attributes | None,
|
|
107
|
+
*,
|
|
108
|
+
message: str | None = None,
|
|
109
|
+
severity: LogSeverity | None = None,
|
|
110
|
+
) -> Attributes:
|
|
111
|
+
patched_attributes = {**(attributes if attributes is not None else {})}
|
|
112
|
+
if message is not None:
|
|
113
|
+
patched_attributes["message"] = message
|
|
114
|
+
elif "body" in patched_attributes:
|
|
115
|
+
patched_attributes["message"] = patched_attributes["body"]
|
|
116
|
+
|
|
117
|
+
if severity is not None:
|
|
118
|
+
patched_attributes["status"] = severity.lower()
|
|
119
|
+
elif "severity_text" in patched_attributes and isinstance(
|
|
120
|
+
patched_attributes["severity_text"], str
|
|
121
|
+
):
|
|
122
|
+
patched_attributes["status"] = patched_attributes["severity_text"].lower()
|
|
123
|
+
|
|
124
|
+
return patched_attributes
|
|
100
125
|
|
|
101
126
|
def _emit_log(
|
|
102
127
|
self, message: str, *, severity: LogSeverity, attributes: Attributes | None
|
|
@@ -106,7 +131,9 @@ class Logger:
|
|
|
106
131
|
body=message,
|
|
107
132
|
severity_text=severity,
|
|
108
133
|
timestamp=time.time_ns(),
|
|
109
|
-
attributes=self._patch_attributes(
|
|
134
|
+
attributes=self._patch_attributes(
|
|
135
|
+
message=message, severity=severity, attributes=attributes
|
|
136
|
+
),
|
|
110
137
|
span_id=self.current_span_id,
|
|
111
138
|
trace_id=self.current_trace_id,
|
|
112
139
|
trace_flags=DEFAULT_TRACE_OPTIONS,
|
|
@@ -140,7 +167,9 @@ class Logger:
|
|
|
140
167
|
attributes: Attributes | None = None,
|
|
141
168
|
) -> None:
|
|
142
169
|
traceback_str = "".join(traceback.format_exception(exception))
|
|
143
|
-
patched_attributes = self._patch_attributes(
|
|
170
|
+
patched_attributes = self._patch_attributes(
|
|
171
|
+
message=message, severity=LogSeverity.ERROR, attributes=attributes
|
|
172
|
+
)
|
|
144
173
|
self.current_span.record_exception(
|
|
145
174
|
exception=exception, attributes=patched_attributes
|
|
146
175
|
)
|
|
@@ -159,6 +188,76 @@ class Logger:
|
|
|
159
188
|
yield self
|
|
160
189
|
|
|
161
190
|
|
|
191
|
+
class PerJobResourceTracker:
|
|
192
|
+
def __init__(self, logger: "JobLogger", sample_interval: float = 0.5) -> None:
|
|
193
|
+
self.logger = logger
|
|
194
|
+
self.sample_interval = sample_interval
|
|
195
|
+
self._process = psutil.Process(os.getpid())
|
|
196
|
+
self._stop_event = threading.Event()
|
|
197
|
+
self._thread: threading.Thread | None = None
|
|
198
|
+
|
|
199
|
+
self.max_rss: int = 0
|
|
200
|
+
self.start_cpu_times: psutil._common.pcputimes | None = None
|
|
201
|
+
self.end_cpu_times: psutil._common.pcputimes | None = None
|
|
202
|
+
self.start_wall_time: float | None = None
|
|
203
|
+
self.end_wall_time: float | None = None
|
|
204
|
+
|
|
205
|
+
def start(self) -> None:
|
|
206
|
+
self.start_cpu_times = self._process.cpu_times()
|
|
207
|
+
self.start_wall_time = time.monotonic()
|
|
208
|
+
|
|
209
|
+
def _monitor() -> None:
|
|
210
|
+
try:
|
|
211
|
+
while not self._stop_event.is_set():
|
|
212
|
+
rss = self._process.memory_info().rss
|
|
213
|
+
self.max_rss = max(self.max_rss, rss)
|
|
214
|
+
time.sleep(self.sample_interval)
|
|
215
|
+
except Exception:
|
|
216
|
+
self._stop_event.set()
|
|
217
|
+
|
|
218
|
+
self._thread = threading.Thread(target=_monitor, daemon=True)
|
|
219
|
+
self._thread.start()
|
|
220
|
+
|
|
221
|
+
def stop(self) -> None:
|
|
222
|
+
self._stop_event.set()
|
|
223
|
+
if self._thread is not None:
|
|
224
|
+
self._thread.join()
|
|
225
|
+
self.end_cpu_times = self._process.cpu_times()
|
|
226
|
+
self.end_wall_time = time.monotonic()
|
|
227
|
+
|
|
228
|
+
def __enter__(self) -> typing.Self:
|
|
229
|
+
self.start()
|
|
230
|
+
return self
|
|
231
|
+
|
|
232
|
+
def __exit__(
|
|
233
|
+
self,
|
|
234
|
+
exc_type: type[BaseException] | None,
|
|
235
|
+
exc_value: BaseException | None,
|
|
236
|
+
traceback: types.TracebackType | None,
|
|
237
|
+
) -> None:
|
|
238
|
+
self.stop()
|
|
239
|
+
stats = dict(self.summary())
|
|
240
|
+
self.logger.log_info("Job resource usage summary", attributes=stats)
|
|
241
|
+
|
|
242
|
+
def summary(self) -> Attributes:
|
|
243
|
+
assert self.start_cpu_times is not None
|
|
244
|
+
assert self.end_cpu_times is not None
|
|
245
|
+
assert self.start_wall_time is not None
|
|
246
|
+
assert self.end_wall_time is not None
|
|
247
|
+
|
|
248
|
+
cpu_user = self.end_cpu_times.user - self.start_cpu_times.user
|
|
249
|
+
cpu_sys = self.end_cpu_times.system - self.start_cpu_times.system
|
|
250
|
+
cpu_total = cpu_user + cpu_sys
|
|
251
|
+
elapsed = self.end_wall_time - self.start_wall_time
|
|
252
|
+
return {
|
|
253
|
+
"cpu_user_s": round(cpu_user, 3),
|
|
254
|
+
"cpu_system_s": round(cpu_sys, 3),
|
|
255
|
+
"cpu_total_s": round(cpu_total, 3),
|
|
256
|
+
"wall_time_s": round(elapsed, 3),
|
|
257
|
+
"peak_rss_mb": round(self.max_rss / (1024 * 1024), 2),
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
|
|
162
261
|
class JobLogger(Logger):
|
|
163
262
|
def __init__(
|
|
164
263
|
self,
|
|
@@ -166,20 +265,31 @@ class JobLogger(Logger):
|
|
|
166
265
|
base_span: Span,
|
|
167
266
|
profile_metadata: job_definition_t.ProfileMetadata,
|
|
168
267
|
job_definition: job_definition_t.JobDefinition,
|
|
268
|
+
queued_job_uuid: str,
|
|
169
269
|
) -> None:
|
|
170
270
|
self.profile_metadata = profile_metadata
|
|
171
271
|
self.job_definition = job_definition
|
|
272
|
+
self.queued_job_uuid = queued_job_uuid
|
|
172
273
|
super().__init__(base_span)
|
|
173
274
|
|
|
174
|
-
def _patch_attributes(
|
|
275
|
+
def _patch_attributes(
|
|
276
|
+
self,
|
|
277
|
+
attributes: Attributes | None,
|
|
278
|
+
*,
|
|
279
|
+
message: str | None = None,
|
|
280
|
+
severity: LogSeverity | None = None,
|
|
281
|
+
) -> Attributes:
|
|
175
282
|
patched_attributes: dict[str, base_t.JsonValue] = {
|
|
176
|
-
**(
|
|
283
|
+
**super()._patch_attributes(
|
|
284
|
+
attributes=attributes, message=message, severity=severity
|
|
285
|
+
)
|
|
177
286
|
}
|
|
178
287
|
patched_attributes["profile.name"] = self.profile_metadata.name
|
|
179
288
|
patched_attributes["profile.base_url"] = self.profile_metadata.base_url
|
|
180
289
|
patched_attributes["job.name"] = self.job_definition.name
|
|
181
290
|
patched_attributes["job.id"] = self.job_definition.id
|
|
182
291
|
patched_attributes["job.definition_type"] = self.job_definition.type
|
|
292
|
+
patched_attributes["job.queued_job_uuid"] = self.queued_job_uuid
|
|
183
293
|
match self.job_definition:
|
|
184
294
|
case job_definition_t.CronJobDefinition():
|
|
185
295
|
patched_attributes["job.definition.cron_spec"] = (
|
|
@@ -205,6 +315,9 @@ class JobLogger(Logger):
|
|
|
205
315
|
assert_never(self.job_definition.executor)
|
|
206
316
|
return _cast_attributes(patched_attributes)
|
|
207
317
|
|
|
318
|
+
def resource_tracking(self) -> PerJobResourceTracker:
|
|
319
|
+
return PerJobResourceTracker(self)
|
|
320
|
+
|
|
208
321
|
|
|
209
322
|
@contextmanager
|
|
210
323
|
def push_scope_optional(
|
uncountable/types/__init__.py
CHANGED
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
# ruff: noqa: E402 Q003
|
|
3
3
|
# fmt: off
|
|
4
4
|
# isort: skip_file
|
|
5
|
+
from .api.notebooks import add_notebook_content as add_notebook_content_t
|
|
5
6
|
from .api.recipes import add_recipe_to_project as add_recipe_to_project_t
|
|
6
7
|
from .api.recipes import add_time_series_data as add_time_series_data_t
|
|
7
8
|
from .api.recipes import archive_recipes as archive_recipes_t
|
|
@@ -36,8 +37,10 @@ from . import entity_t as entity_t
|
|
|
36
37
|
from .api.batch import execute_batch as execute_batch_t
|
|
37
38
|
from .api.batch import execute_batch_load_async as execute_batch_load_async_t
|
|
38
39
|
from . import experiment_groups_t as experiment_groups_t
|
|
40
|
+
from .api.runsheet import export_default_runsheet as export_default_runsheet_t
|
|
39
41
|
from .api.entity import export_entities as export_entities_t
|
|
40
42
|
from . import exports_t as exports_t
|
|
43
|
+
from .api.listing import fetch_listing as fetch_listing_t
|
|
41
44
|
from . import field_values_t as field_values_t
|
|
42
45
|
from . import fields_t as fields_t
|
|
43
46
|
from . import generic_upload_t as generic_upload_t
|
|
@@ -70,12 +73,15 @@ from . import integration_session_t as integration_session_t
|
|
|
70
73
|
from . import integrations_t as integrations_t
|
|
71
74
|
from .api.uploader import invoke_uploader as invoke_uploader_t
|
|
72
75
|
from . import job_definition_t as job_definition_t
|
|
76
|
+
from .api.entity import list_aggregate as list_aggregate_t
|
|
73
77
|
from .api.entity import list_entities as list_entities_t
|
|
74
78
|
from .api.id_source import list_id_source as list_id_source_t
|
|
79
|
+
from . import listing_t as listing_t
|
|
75
80
|
from .api.entity import lock_entity as lock_entity_t
|
|
76
81
|
from .api.recipes import lock_recipes as lock_recipes_t
|
|
77
82
|
from .api.entity import lookup_entity as lookup_entity_t
|
|
78
83
|
from .api.id_source import match_id_source as match_id_source_t
|
|
84
|
+
from . import notices_t as notices_t
|
|
79
85
|
from . import notifications_t as notifications_t
|
|
80
86
|
from . import outputs_t as outputs_t
|
|
81
87
|
from . import overrides_t as overrides_t
|
|
@@ -93,6 +99,7 @@ from . import recipe_output_metadata_t as recipe_output_metadata_t
|
|
|
93
99
|
from . import recipe_tags_t as recipe_tags_t
|
|
94
100
|
from . import recipe_workflow_steps_t as recipe_workflow_steps_t
|
|
95
101
|
from . import recipes_t as recipes_t
|
|
102
|
+
from .api.integrations import register_sockets_token as register_sockets_token_t
|
|
96
103
|
from .api.recipes import remove_recipe_from_project as remove_recipe_from_project_t
|
|
97
104
|
from .api.recipe_links import remove_recipe_link as remove_recipe_link_t
|
|
98
105
|
from .api.entity import resolve_entity_ids as resolve_entity_ids_t
|
|
@@ -100,6 +107,7 @@ from .api.outputs import resolve_output_conditions as resolve_output_conditions_
|
|
|
100
107
|
from . import response_t as response_t
|
|
101
108
|
from .api.triggers import run_trigger as run_trigger_t
|
|
102
109
|
from . import secret_retrieval_t as secret_retrieval_t
|
|
110
|
+
from .api.entity import set_barcode as set_barcode_t
|
|
103
111
|
from .api.permissions import set_core_permissions as set_core_permissions_t
|
|
104
112
|
from .api.entity import set_entity_field_values as set_entity_field_values_t
|
|
105
113
|
from .api.inputs import set_input_attribute_values as set_input_attribute_values_t
|
|
@@ -112,6 +120,7 @@ from .api.recipes import set_recipe_output_annotations as set_recipe_output_anno
|
|
|
112
120
|
from .api.recipes import set_recipe_output_file as set_recipe_output_file_t
|
|
113
121
|
from .api.recipes import set_recipe_outputs as set_recipe_outputs_t
|
|
114
122
|
from .api.recipes import set_recipe_tags as set_recipe_tags_t
|
|
123
|
+
from .api.recipes import set_recipe_total as set_recipe_total_t
|
|
115
124
|
from .api.entity import set_values as set_values_t
|
|
116
125
|
from . import sockets_t as sockets_t
|
|
117
126
|
from .api.entity import transition_entity_phase as transition_entity_phase_t
|
|
@@ -129,6 +138,7 @@ from . import workflows_t as workflows_t
|
|
|
129
138
|
|
|
130
139
|
|
|
131
140
|
__all__: list[str] = [
|
|
141
|
+
"add_notebook_content_t",
|
|
132
142
|
"add_recipe_to_project_t",
|
|
133
143
|
"add_time_series_data_t",
|
|
134
144
|
"archive_recipes_t",
|
|
@@ -163,8 +173,10 @@ __all__: list[str] = [
|
|
|
163
173
|
"execute_batch_t",
|
|
164
174
|
"execute_batch_load_async_t",
|
|
165
175
|
"experiment_groups_t",
|
|
176
|
+
"export_default_runsheet_t",
|
|
166
177
|
"export_entities_t",
|
|
167
178
|
"exports_t",
|
|
179
|
+
"fetch_listing_t",
|
|
168
180
|
"field_values_t",
|
|
169
181
|
"fields_t",
|
|
170
182
|
"generic_upload_t",
|
|
@@ -197,12 +209,15 @@ __all__: list[str] = [
|
|
|
197
209
|
"integrations_t",
|
|
198
210
|
"invoke_uploader_t",
|
|
199
211
|
"job_definition_t",
|
|
212
|
+
"list_aggregate_t",
|
|
200
213
|
"list_entities_t",
|
|
201
214
|
"list_id_source_t",
|
|
215
|
+
"listing_t",
|
|
202
216
|
"lock_entity_t",
|
|
203
217
|
"lock_recipes_t",
|
|
204
218
|
"lookup_entity_t",
|
|
205
219
|
"match_id_source_t",
|
|
220
|
+
"notices_t",
|
|
206
221
|
"notifications_t",
|
|
207
222
|
"outputs_t",
|
|
208
223
|
"overrides_t",
|
|
@@ -220,6 +235,7 @@ __all__: list[str] = [
|
|
|
220
235
|
"recipe_tags_t",
|
|
221
236
|
"recipe_workflow_steps_t",
|
|
222
237
|
"recipes_t",
|
|
238
|
+
"register_sockets_token_t",
|
|
223
239
|
"remove_recipe_from_project_t",
|
|
224
240
|
"remove_recipe_link_t",
|
|
225
241
|
"resolve_entity_ids_t",
|
|
@@ -227,6 +243,7 @@ __all__: list[str] = [
|
|
|
227
243
|
"response_t",
|
|
228
244
|
"run_trigger_t",
|
|
229
245
|
"secret_retrieval_t",
|
|
246
|
+
"set_barcode_t",
|
|
230
247
|
"set_core_permissions_t",
|
|
231
248
|
"set_entity_field_values_t",
|
|
232
249
|
"set_input_attribute_values_t",
|
|
@@ -239,6 +256,7 @@ __all__: list[str] = [
|
|
|
239
256
|
"set_recipe_output_file_t",
|
|
240
257
|
"set_recipe_outputs_t",
|
|
241
258
|
"set_recipe_tags_t",
|
|
259
|
+
"set_recipe_total_t",
|
|
242
260
|
"set_values_t",
|
|
243
261
|
"sockets_t",
|
|
244
262
|
"transition_entity_phase_t",
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
2
|
+
# ruff: noqa: E402 Q003
|
|
3
|
+
# fmt: off
|
|
4
|
+
# isort: skip_file
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
import typing # noqa: F401
|
|
7
|
+
import datetime # noqa: F401
|
|
8
|
+
from decimal import Decimal # noqa: F401
|
|
9
|
+
import dataclasses
|
|
10
|
+
from pkgs.serialization import serial_class
|
|
11
|
+
from ... import base_t
|
|
12
|
+
|
|
13
|
+
__all__: list[str] = [
|
|
14
|
+
"Arguments",
|
|
15
|
+
"AttributeValue",
|
|
16
|
+
"ColumnAccess",
|
|
17
|
+
"ColumnResults",
|
|
18
|
+
"Data",
|
|
19
|
+
"ENDPOINT_METHOD",
|
|
20
|
+
"ENDPOINT_PATH",
|
|
21
|
+
]
|
|
22
|
+
|
|
23
|
+
ENDPOINT_METHOD = "GET"
|
|
24
|
+
ENDPOINT_PATH = "api/external/entity/list_aggregate"
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
28
|
+
@serial_class(
|
|
29
|
+
named_type_path="sdk.api.entity.list_aggregate.AttributeValue",
|
|
30
|
+
unconverted_values={"value"},
|
|
31
|
+
)
|
|
32
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
33
|
+
class AttributeValue:
|
|
34
|
+
name: str
|
|
35
|
+
value: base_t.JsonValue
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
39
|
+
@serial_class(
|
|
40
|
+
named_type_path="sdk.api.entity.list_aggregate.Arguments",
|
|
41
|
+
unconverted_values={"attribute_values"},
|
|
42
|
+
)
|
|
43
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
44
|
+
class Arguments:
|
|
45
|
+
config_reference: str
|
|
46
|
+
attribute_values: list[AttributeValue] | None = None
|
|
47
|
+
offset: int | None = None
|
|
48
|
+
limit: int | None = None
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
52
|
+
@serial_class(
|
|
53
|
+
named_type_path="sdk.api.entity.list_aggregate.ColumnResults",
|
|
54
|
+
unconverted_values={"column_values"},
|
|
55
|
+
)
|
|
56
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
57
|
+
class ColumnResults:
|
|
58
|
+
column_values: list[base_t.JsonValue]
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
62
|
+
@serial_class(
|
|
63
|
+
named_type_path="sdk.api.entity.list_aggregate.ColumnAccess",
|
|
64
|
+
)
|
|
65
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
66
|
+
class ColumnAccess:
|
|
67
|
+
name: str
|
|
68
|
+
table_label: str | None
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|
|
72
|
+
@serial_class(
|
|
73
|
+
named_type_path="sdk.api.entity.list_aggregate.Data",
|
|
74
|
+
)
|
|
75
|
+
@dataclasses.dataclass(slots=base_t.ENABLE_SLOTS, kw_only=True) # type: ignore[literal-required]
|
|
76
|
+
class Data:
|
|
77
|
+
columns: list[ColumnAccess]
|
|
78
|
+
results: list[ColumnResults]
|
|
79
|
+
# DO NOT MODIFY -- This file is generated by type_spec
|