hatchet-sdk 1.0.0a1__py3-none-any.whl → 1.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of hatchet-sdk might be problematic. Click here for more details.
- hatchet_sdk/__init__.py +5 -0
- hatchet_sdk/client.py +17 -5
- hatchet_sdk/clients/admin.py +1 -18
- hatchet_sdk/clients/dispatcher/action_listener.py +2 -4
- hatchet_sdk/clients/durable_event_listener.py +6 -3
- hatchet_sdk/clients/events.py +1 -2
- hatchet_sdk/clients/rest/models/workflow_runs_metrics.py +5 -1
- hatchet_sdk/clients/v1/api_client.py +81 -0
- hatchet_sdk/clients/workflow_listener.py +6 -3
- hatchet_sdk/context/context.py +1 -12
- hatchet_sdk/features/cron.py +89 -119
- hatchet_sdk/features/logs.py +16 -0
- hatchet_sdk/features/metrics.py +75 -0
- hatchet_sdk/features/rate_limits.py +45 -0
- hatchet_sdk/features/runs.py +221 -0
- hatchet_sdk/features/scheduled.py +114 -131
- hatchet_sdk/features/workers.py +41 -0
- hatchet_sdk/features/workflows.py +55 -0
- hatchet_sdk/hatchet.py +36 -14
- hatchet_sdk/runnables/standalone.py +9 -11
- hatchet_sdk/runnables/types.py +1 -1
- hatchet_sdk/runnables/workflow.py +26 -19
- hatchet_sdk/worker/action_listener_process.py +3 -3
- hatchet_sdk/worker/runner/run_loop_manager.py +0 -1
- hatchet_sdk/worker/runner/runner.py +4 -11
- hatchet_sdk/worker/runner/utils/capture_logs.py +0 -3
- hatchet_sdk/worker/worker.py +30 -21
- {hatchet_sdk-1.0.0a1.dist-info → hatchet_sdk-1.0.2.dist-info}/METADATA +2 -1
- {hatchet_sdk-1.0.0a1.dist-info → hatchet_sdk-1.0.2.dist-info}/RECORD +31 -25
- hatchet_sdk/clients/rest_client.py +0 -657
- {hatchet_sdk-1.0.0a1.dist-info → hatchet_sdk-1.0.2.dist-info}/WHEEL +0 -0
- {hatchet_sdk-1.0.0a1.dist-info → hatchet_sdk-1.0.2.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
from hatchet_sdk.clients.rest.api.worker_api import WorkerApi
|
|
2
|
+
from hatchet_sdk.clients.rest.api_client import ApiClient
|
|
3
|
+
from hatchet_sdk.clients.rest.models.update_worker_request import UpdateWorkerRequest
|
|
4
|
+
from hatchet_sdk.clients.rest.models.worker import Worker
|
|
5
|
+
from hatchet_sdk.clients.rest.models.worker_list import WorkerList
|
|
6
|
+
from hatchet_sdk.clients.v1.api_client import BaseRestClient
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class WorkersClient(BaseRestClient):
|
|
10
|
+
def _wa(self, client: ApiClient) -> WorkerApi:
|
|
11
|
+
return WorkerApi(client)
|
|
12
|
+
|
|
13
|
+
async def aio_get(self, worker_id: str) -> Worker:
|
|
14
|
+
async with self.client() as client:
|
|
15
|
+
return await self._wa(client).worker_get(worker_id)
|
|
16
|
+
|
|
17
|
+
def get(self, worker_id: str) -> Worker:
|
|
18
|
+
return self._run_async_from_sync(self.aio_get, worker_id)
|
|
19
|
+
|
|
20
|
+
async def aio_list(
|
|
21
|
+
self,
|
|
22
|
+
) -> WorkerList:
|
|
23
|
+
async with self.client() as client:
|
|
24
|
+
return await self._wa(client).worker_list(
|
|
25
|
+
tenant=self.client_config.tenant_id,
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
def list(
|
|
29
|
+
self,
|
|
30
|
+
) -> WorkerList:
|
|
31
|
+
return self._run_async_from_sync(self.aio_list)
|
|
32
|
+
|
|
33
|
+
async def aio_update(self, worker_id: str, opts: UpdateWorkerRequest) -> Worker:
|
|
34
|
+
async with self.client() as client:
|
|
35
|
+
return await self._wa(client).worker_update(
|
|
36
|
+
worker=worker_id,
|
|
37
|
+
update_worker_request=opts,
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
def update(self, worker_id: str, opts: UpdateWorkerRequest) -> Worker:
|
|
41
|
+
return self._run_async_from_sync(self.aio_update, worker_id, opts)
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
from hatchet_sdk.clients.rest.api.workflow_api import WorkflowApi
|
|
2
|
+
from hatchet_sdk.clients.rest.api.workflow_run_api import WorkflowRunApi
|
|
3
|
+
from hatchet_sdk.clients.rest.api_client import ApiClient
|
|
4
|
+
from hatchet_sdk.clients.rest.models.workflow import Workflow
|
|
5
|
+
from hatchet_sdk.clients.rest.models.workflow_list import WorkflowList
|
|
6
|
+
from hatchet_sdk.clients.rest.models.workflow_version import WorkflowVersion
|
|
7
|
+
from hatchet_sdk.clients.v1.api_client import BaseRestClient
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class WorkflowsClient(BaseRestClient):
|
|
11
|
+
def _wra(self, client: ApiClient) -> WorkflowRunApi:
|
|
12
|
+
return WorkflowRunApi(client)
|
|
13
|
+
|
|
14
|
+
def _wa(self, client: ApiClient) -> WorkflowApi:
|
|
15
|
+
return WorkflowApi(client)
|
|
16
|
+
|
|
17
|
+
async def aio_get(self, workflow_id: str) -> Workflow:
|
|
18
|
+
async with self.client() as client:
|
|
19
|
+
return await self._wa(client).workflow_get(workflow_id)
|
|
20
|
+
|
|
21
|
+
def get(self, workflow_id: str) -> Workflow:
|
|
22
|
+
return self._run_async_from_sync(self.aio_get, workflow_id)
|
|
23
|
+
|
|
24
|
+
async def aio_list(
|
|
25
|
+
self,
|
|
26
|
+
workflow_name: str | None = None,
|
|
27
|
+
limit: int | None = None,
|
|
28
|
+
offset: int | None = None,
|
|
29
|
+
) -> WorkflowList:
|
|
30
|
+
async with self.client() as client:
|
|
31
|
+
return await self._wa(client).workflow_list(
|
|
32
|
+
tenant=self.client_config.tenant_id,
|
|
33
|
+
limit=limit,
|
|
34
|
+
offset=offset,
|
|
35
|
+
name=workflow_name,
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
def list(
|
|
39
|
+
self,
|
|
40
|
+
workflow_name: str | None = None,
|
|
41
|
+
limit: int | None = None,
|
|
42
|
+
offset: int | None = None,
|
|
43
|
+
) -> WorkflowList:
|
|
44
|
+
return self._run_async_from_sync(self.aio_list, workflow_name, limit, offset)
|
|
45
|
+
|
|
46
|
+
async def aio_get_version(
|
|
47
|
+
self, workflow_id: str, version: str | None = None
|
|
48
|
+
) -> WorkflowVersion:
|
|
49
|
+
async with self.client() as client:
|
|
50
|
+
return await self._wa(client).workflow_version_get(workflow_id, version)
|
|
51
|
+
|
|
52
|
+
def get_version(
|
|
53
|
+
self, workflow_id: str, version: str | None = None
|
|
54
|
+
) -> WorkflowVersion:
|
|
55
|
+
return self._run_async_from_sync(self.aio_get_version, workflow_id, version)
|
hatchet_sdk/hatchet.py
CHANGED
|
@@ -4,14 +4,18 @@ from typing import Any, Callable, Type, cast, overload
|
|
|
4
4
|
|
|
5
5
|
from hatchet_sdk import Context, DurableContext
|
|
6
6
|
from hatchet_sdk.client import Client
|
|
7
|
-
from hatchet_sdk.clients.admin import AdminClient
|
|
8
7
|
from hatchet_sdk.clients.dispatcher.dispatcher import DispatcherClient
|
|
9
8
|
from hatchet_sdk.clients.events import EventClient
|
|
10
|
-
from hatchet_sdk.clients.rest_client import RestApi
|
|
11
9
|
from hatchet_sdk.clients.run_event_listener import RunEventListenerClient
|
|
12
10
|
from hatchet_sdk.config import ClientConfig
|
|
13
11
|
from hatchet_sdk.features.cron import CronClient
|
|
12
|
+
from hatchet_sdk.features.logs import LogsClient
|
|
13
|
+
from hatchet_sdk.features.metrics import MetricsClient
|
|
14
|
+
from hatchet_sdk.features.rate_limits import RateLimitsClient
|
|
15
|
+
from hatchet_sdk.features.runs import RunsClient
|
|
14
16
|
from hatchet_sdk.features.scheduled import ScheduledClient
|
|
17
|
+
from hatchet_sdk.features.workers import WorkersClient
|
|
18
|
+
from hatchet_sdk.features.workflows import WorkflowsClient
|
|
15
19
|
from hatchet_sdk.labels import DesiredWorkerLabel
|
|
16
20
|
from hatchet_sdk.logger import logger
|
|
17
21
|
from hatchet_sdk.rate_limit import RateLimit
|
|
@@ -48,10 +52,6 @@ class Hatchet:
|
|
|
48
52
|
rest (RestApi): Interface for REST API operations.
|
|
49
53
|
"""
|
|
50
54
|
|
|
51
|
-
_client: Client
|
|
52
|
-
cron: CronClient
|
|
53
|
-
scheduled: ScheduledClient
|
|
54
|
-
|
|
55
55
|
def __init__(
|
|
56
56
|
self,
|
|
57
57
|
debug: bool = False,
|
|
@@ -75,12 +75,38 @@ class Hatchet:
|
|
|
75
75
|
logger.setLevel(logging.DEBUG)
|
|
76
76
|
|
|
77
77
|
self._client = client if client else Client(config=config, debug=debug)
|
|
78
|
-
self.cron = CronClient(self._client)
|
|
79
|
-
self.scheduled = ScheduledClient(self._client)
|
|
80
78
|
|
|
81
79
|
@property
|
|
82
|
-
def
|
|
83
|
-
return self._client.
|
|
80
|
+
def cron(self) -> CronClient:
|
|
81
|
+
return self._client.cron
|
|
82
|
+
|
|
83
|
+
@property
|
|
84
|
+
def logs(self) -> LogsClient:
|
|
85
|
+
return self._client.logs
|
|
86
|
+
|
|
87
|
+
@property
|
|
88
|
+
def metrics(self) -> MetricsClient:
|
|
89
|
+
return self._client.metrics
|
|
90
|
+
|
|
91
|
+
@property
|
|
92
|
+
def rate_limits(self) -> RateLimitsClient:
|
|
93
|
+
return self._client.rate_limits
|
|
94
|
+
|
|
95
|
+
@property
|
|
96
|
+
def runs(self) -> RunsClient:
|
|
97
|
+
return self._client.runs
|
|
98
|
+
|
|
99
|
+
@property
|
|
100
|
+
def scheduled(self) -> ScheduledClient:
|
|
101
|
+
return self._client.scheduled
|
|
102
|
+
|
|
103
|
+
@property
|
|
104
|
+
def workers(self) -> WorkersClient:
|
|
105
|
+
return self._client.workers
|
|
106
|
+
|
|
107
|
+
@property
|
|
108
|
+
def workflows(self) -> WorkflowsClient:
|
|
109
|
+
return self._client.workflows
|
|
84
110
|
|
|
85
111
|
@property
|
|
86
112
|
def dispatcher(self) -> DispatcherClient:
|
|
@@ -90,10 +116,6 @@ class Hatchet:
|
|
|
90
116
|
def event(self) -> EventClient:
|
|
91
117
|
return self._client.event
|
|
92
118
|
|
|
93
|
-
@property
|
|
94
|
-
def rest(self) -> RestApi:
|
|
95
|
-
return self._client.rest
|
|
96
|
-
|
|
97
119
|
@property
|
|
98
120
|
def listener(self) -> RunEventListenerClient:
|
|
99
121
|
return self._client.listener
|
|
@@ -2,8 +2,6 @@ import asyncio
|
|
|
2
2
|
from datetime import datetime
|
|
3
3
|
from typing import Any, Generic, cast, get_type_hints
|
|
4
4
|
|
|
5
|
-
from google.protobuf import timestamp_pb2
|
|
6
|
-
|
|
7
5
|
from hatchet_sdk.clients.admin import (
|
|
8
6
|
ScheduleTriggerWorkflowOptions,
|
|
9
7
|
TriggerWorkflowOptions,
|
|
@@ -12,7 +10,7 @@ from hatchet_sdk.clients.admin import (
|
|
|
12
10
|
from hatchet_sdk.clients.rest.models.cron_workflows import CronWorkflows
|
|
13
11
|
from hatchet_sdk.contracts.workflows_pb2 import WorkflowVersion
|
|
14
12
|
from hatchet_sdk.runnables.task import Task
|
|
15
|
-
from hatchet_sdk.runnables.types import R, TWorkflowInput
|
|
13
|
+
from hatchet_sdk.runnables.types import EmptyModel, R, TWorkflowInput
|
|
16
14
|
from hatchet_sdk.runnables.workflow import BaseWorkflow, Workflow
|
|
17
15
|
from hatchet_sdk.utils.aio_utils import get_active_event_loop
|
|
18
16
|
from hatchet_sdk.utils.typing import JSONSerializableMapping, is_basemodel_subclass
|
|
@@ -81,14 +79,14 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]):
|
|
|
81
79
|
|
|
82
80
|
def run(
|
|
83
81
|
self,
|
|
84
|
-
input: TWorkflowInput
|
|
82
|
+
input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()),
|
|
85
83
|
options: TriggerWorkflowOptions = TriggerWorkflowOptions(),
|
|
86
84
|
) -> R:
|
|
87
85
|
return self._extract_result(self._workflow.run(input, options))
|
|
88
86
|
|
|
89
87
|
async def aio_run(
|
|
90
88
|
self,
|
|
91
|
-
input: TWorkflowInput
|
|
89
|
+
input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()),
|
|
92
90
|
options: TriggerWorkflowOptions = TriggerWorkflowOptions(),
|
|
93
91
|
) -> R:
|
|
94
92
|
result = await self._workflow.aio_run(input, options)
|
|
@@ -96,7 +94,7 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]):
|
|
|
96
94
|
|
|
97
95
|
def run_no_wait(
|
|
98
96
|
self,
|
|
99
|
-
input: TWorkflowInput
|
|
97
|
+
input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()),
|
|
100
98
|
options: TriggerWorkflowOptions = TriggerWorkflowOptions(),
|
|
101
99
|
) -> TaskRunRef[TWorkflowInput, R]:
|
|
102
100
|
ref = self._workflow.run_no_wait(input, options)
|
|
@@ -105,7 +103,7 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]):
|
|
|
105
103
|
|
|
106
104
|
async def aio_run_no_wait(
|
|
107
105
|
self,
|
|
108
|
-
input: TWorkflowInput
|
|
106
|
+
input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()),
|
|
109
107
|
options: TriggerWorkflowOptions = TriggerWorkflowOptions(),
|
|
110
108
|
) -> TaskRunRef[TWorkflowInput, R]:
|
|
111
109
|
ref = await self._workflow.aio_run_no_wait(input, options)
|
|
@@ -140,24 +138,24 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]):
|
|
|
140
138
|
|
|
141
139
|
def schedule(
|
|
142
140
|
self,
|
|
143
|
-
|
|
141
|
+
run_at: datetime,
|
|
144
142
|
input: TWorkflowInput | None = None,
|
|
145
143
|
options: ScheduleTriggerWorkflowOptions = ScheduleTriggerWorkflowOptions(),
|
|
146
144
|
) -> WorkflowVersion:
|
|
147
145
|
return self._workflow.schedule(
|
|
148
|
-
|
|
146
|
+
run_at=run_at,
|
|
149
147
|
input=input,
|
|
150
148
|
options=options,
|
|
151
149
|
)
|
|
152
150
|
|
|
153
151
|
async def aio_schedule(
|
|
154
152
|
self,
|
|
155
|
-
|
|
153
|
+
run_at: datetime,
|
|
156
154
|
input: TWorkflowInput,
|
|
157
155
|
options: ScheduleTriggerWorkflowOptions = ScheduleTriggerWorkflowOptions(),
|
|
158
156
|
) -> WorkflowVersion:
|
|
159
157
|
return await self._workflow.aio_schedule(
|
|
160
|
-
|
|
158
|
+
run_at=run_at,
|
|
161
159
|
input=input,
|
|
162
160
|
options=options,
|
|
163
161
|
)
|
hatchet_sdk/runnables/types.py
CHANGED
|
@@ -29,6 +29,7 @@ from hatchet_sdk.runnables.types import (
|
|
|
29
29
|
DEFAULT_EXECUTION_TIMEOUT,
|
|
30
30
|
DEFAULT_SCHEDULE_TIMEOUT,
|
|
31
31
|
ConcurrencyExpression,
|
|
32
|
+
EmptyModel,
|
|
32
33
|
R,
|
|
33
34
|
StepType,
|
|
34
35
|
TWorkflowInput,
|
|
@@ -271,7 +272,7 @@ class BaseWorkflow(Generic[TWorkflowInput]):
|
|
|
271
272
|
def is_durable(self) -> bool:
|
|
272
273
|
return any(task.is_durable for task in self.tasks)
|
|
273
274
|
|
|
274
|
-
def
|
|
275
|
+
def create_bulk_run_item(
|
|
275
276
|
self,
|
|
276
277
|
input: TWorkflowInput | None = None,
|
|
277
278
|
key: str | None = None,
|
|
@@ -293,10 +294,10 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
|
|
293
294
|
|
|
294
295
|
def run_no_wait(
|
|
295
296
|
self,
|
|
296
|
-
input: TWorkflowInput
|
|
297
|
+
input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()),
|
|
297
298
|
options: TriggerWorkflowOptions = TriggerWorkflowOptions(),
|
|
298
299
|
) -> WorkflowRunRef:
|
|
299
|
-
return self.client.admin.run_workflow(
|
|
300
|
+
return self.client._client.admin.run_workflow(
|
|
300
301
|
workflow_name=self.config.name,
|
|
301
302
|
input=input.model_dump() if input else {},
|
|
302
303
|
options=options,
|
|
@@ -304,10 +305,10 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
|
|
304
305
|
|
|
305
306
|
def run(
|
|
306
307
|
self,
|
|
307
|
-
input: TWorkflowInput
|
|
308
|
+
input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()),
|
|
308
309
|
options: TriggerWorkflowOptions = TriggerWorkflowOptions(),
|
|
309
310
|
) -> dict[str, Any]:
|
|
310
|
-
ref = self.client.admin.run_workflow(
|
|
311
|
+
ref = self.client._client.admin.run_workflow(
|
|
311
312
|
workflow_name=self.config.name,
|
|
312
313
|
input=input.model_dump() if input else {},
|
|
313
314
|
options=options,
|
|
@@ -317,10 +318,10 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
|
|
317
318
|
|
|
318
319
|
async def aio_run_no_wait(
|
|
319
320
|
self,
|
|
320
|
-
input: TWorkflowInput
|
|
321
|
+
input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()),
|
|
321
322
|
options: TriggerWorkflowOptions = TriggerWorkflowOptions(),
|
|
322
323
|
) -> WorkflowRunRef:
|
|
323
|
-
return await self.client.admin.aio_run_workflow(
|
|
324
|
+
return await self.client._client.admin.aio_run_workflow(
|
|
324
325
|
workflow_name=self.config.name,
|
|
325
326
|
input=input.model_dump() if input else {},
|
|
326
327
|
options=options,
|
|
@@ -328,10 +329,10 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
|
|
328
329
|
|
|
329
330
|
async def aio_run(
|
|
330
331
|
self,
|
|
331
|
-
input: TWorkflowInput
|
|
332
|
+
input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()),
|
|
332
333
|
options: TriggerWorkflowOptions = TriggerWorkflowOptions(),
|
|
333
334
|
) -> dict[str, Any]:
|
|
334
|
-
ref = await self.client.admin.aio_run_workflow(
|
|
335
|
+
ref = await self.client._client.admin.aio_run_workflow(
|
|
335
336
|
workflow_name=self.config.name,
|
|
336
337
|
input=input.model_dump() if input else {},
|
|
337
338
|
options=options,
|
|
@@ -343,7 +344,7 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
|
|
343
344
|
self,
|
|
344
345
|
workflows: list[WorkflowRunTriggerConfig],
|
|
345
346
|
) -> list[dict[str, Any]]:
|
|
346
|
-
refs = self.client.admin.run_workflows(
|
|
347
|
+
refs = self.client._client.admin.run_workflows(
|
|
347
348
|
workflows=workflows,
|
|
348
349
|
)
|
|
349
350
|
|
|
@@ -353,7 +354,7 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
|
|
353
354
|
self,
|
|
354
355
|
workflows: list[WorkflowRunTriggerConfig],
|
|
355
356
|
) -> list[dict[str, Any]]:
|
|
356
|
-
refs = await self.client.admin.aio_run_workflows(
|
|
357
|
+
refs = await self.client._client.admin.aio_run_workflows(
|
|
357
358
|
workflows=workflows,
|
|
358
359
|
)
|
|
359
360
|
|
|
@@ -363,7 +364,7 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
|
|
363
364
|
self,
|
|
364
365
|
workflows: list[WorkflowRunTriggerConfig],
|
|
365
366
|
) -> list[WorkflowRunRef]:
|
|
366
|
-
return self.client.admin.run_workflows(
|
|
367
|
+
return self.client._client.admin.run_workflows(
|
|
367
368
|
workflows=workflows,
|
|
368
369
|
)
|
|
369
370
|
|
|
@@ -371,32 +372,32 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
|
|
371
372
|
self,
|
|
372
373
|
workflows: list[WorkflowRunTriggerConfig],
|
|
373
374
|
) -> list[WorkflowRunRef]:
|
|
374
|
-
return await self.client.admin.aio_run_workflows(
|
|
375
|
+
return await self.client._client.admin.aio_run_workflows(
|
|
375
376
|
workflows=workflows,
|
|
376
377
|
)
|
|
377
378
|
|
|
378
379
|
def schedule(
|
|
379
380
|
self,
|
|
380
|
-
|
|
381
|
+
run_at: datetime,
|
|
381
382
|
input: TWorkflowInput | None = None,
|
|
382
383
|
options: ScheduleTriggerWorkflowOptions = ScheduleTriggerWorkflowOptions(),
|
|
383
384
|
) -> WorkflowVersion:
|
|
384
|
-
return self.client.admin.schedule_workflow(
|
|
385
|
+
return self.client._client.admin.schedule_workflow(
|
|
385
386
|
name=self.config.name,
|
|
386
|
-
schedules=cast(list[datetime | timestamp_pb2.Timestamp],
|
|
387
|
+
schedules=cast(list[datetime | timestamp_pb2.Timestamp], [run_at]),
|
|
387
388
|
input=input.model_dump() if input else {},
|
|
388
389
|
options=options,
|
|
389
390
|
)
|
|
390
391
|
|
|
391
392
|
async def aio_schedule(
|
|
392
393
|
self,
|
|
393
|
-
|
|
394
|
+
run_at: datetime,
|
|
394
395
|
input: TWorkflowInput,
|
|
395
396
|
options: ScheduleTriggerWorkflowOptions = ScheduleTriggerWorkflowOptions(),
|
|
396
397
|
) -> WorkflowVersion:
|
|
397
|
-
return await self.client.admin.aio_schedule_workflow(
|
|
398
|
+
return await self.client._client.admin.aio_schedule_workflow(
|
|
398
399
|
name=self.config.name,
|
|
399
|
-
schedules=
|
|
400
|
+
schedules=cast(list[datetime | timestamp_pb2.Timestamp], [run_at]),
|
|
400
401
|
input=input.model_dump(),
|
|
401
402
|
options=options,
|
|
402
403
|
)
|
|
@@ -661,6 +662,9 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
|
|
661
662
|
concurrency=concurrency,
|
|
662
663
|
)
|
|
663
664
|
|
|
665
|
+
if self._on_failure_task:
|
|
666
|
+
raise ValueError("Only one on-failure task is allowed")
|
|
667
|
+
|
|
664
668
|
self._on_failure_task = task
|
|
665
669
|
|
|
666
670
|
return task
|
|
@@ -722,6 +726,9 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
|
|
722
726
|
parents=[],
|
|
723
727
|
)
|
|
724
728
|
|
|
729
|
+
if self._on_failure_task:
|
|
730
|
+
raise ValueError("Only one on-failure task is allowed")
|
|
731
|
+
|
|
725
732
|
self._on_success_task = task
|
|
726
733
|
|
|
727
734
|
return task
|
|
@@ -98,9 +98,9 @@ class WorkerActionListenerProcess:
|
|
|
98
98
|
if self.listener is None:
|
|
99
99
|
raise ValueError("listener not started")
|
|
100
100
|
|
|
101
|
-
await self.client.
|
|
102
|
-
|
|
103
|
-
|
|
101
|
+
await self.client.workers.aio_update(
|
|
102
|
+
worker_id=self.listener.worker_id,
|
|
103
|
+
opts=UpdateWorkerRequest(isPaused=True),
|
|
104
104
|
)
|
|
105
105
|
|
|
106
106
|
async def start(self, retry_attempt: int = 0) -> None:
|
|
@@ -55,7 +55,6 @@ class WorkerStatus(Enum):
|
|
|
55
55
|
class Runner:
|
|
56
56
|
def __init__(
|
|
57
57
|
self,
|
|
58
|
-
name: str,
|
|
59
58
|
event_queue: "Queue[ActionEvent]",
|
|
60
59
|
config: ClientConfig,
|
|
61
60
|
slots: int | None = None,
|
|
@@ -67,7 +66,6 @@ class Runner:
|
|
|
67
66
|
# We store the config so we can dynamically create clients for the dispatcher client.
|
|
68
67
|
self.config = config
|
|
69
68
|
self.client = Client(config)
|
|
70
|
-
self.name = self.client.config.namespace + name
|
|
71
69
|
self.slots = slots
|
|
72
70
|
self.tasks: dict[str, asyncio.Task[Any]] = {} # Store run ids and futures
|
|
73
71
|
self.contexts: dict[str, Context] = {} # Store run ids and contexts
|
|
@@ -125,6 +123,7 @@ class Runner:
|
|
|
125
123
|
|
|
126
124
|
errored = False
|
|
127
125
|
cancelled = task.cancelled()
|
|
126
|
+
output = None
|
|
128
127
|
|
|
129
128
|
# Get the output from the future
|
|
130
129
|
try:
|
|
@@ -169,6 +168,7 @@ class Runner:
|
|
|
169
168
|
|
|
170
169
|
errored = False
|
|
171
170
|
cancelled = task.cancelled()
|
|
171
|
+
output = None
|
|
172
172
|
|
|
173
173
|
# Get the output from the future
|
|
174
174
|
try:
|
|
@@ -206,12 +206,9 @@ class Runner:
|
|
|
206
206
|
def thread_action_func(
|
|
207
207
|
self, ctx: Context, task: Task[TWorkflowInput, R], action: Action
|
|
208
208
|
) -> R:
|
|
209
|
-
if action.step_run_id
|
|
209
|
+
if action.step_run_id:
|
|
210
210
|
self.threads[action.step_run_id] = current_thread()
|
|
211
|
-
elif
|
|
212
|
-
action.get_group_key_run_id is not None
|
|
213
|
-
and action.get_group_key_run_id != ""
|
|
214
|
-
):
|
|
211
|
+
elif action.get_group_key_run_id:
|
|
215
212
|
self.threads[action.get_group_key_run_id] = current_thread()
|
|
216
213
|
|
|
217
214
|
return task.call(ctx)
|
|
@@ -286,12 +283,8 @@ class Runner:
|
|
|
286
283
|
self.dispatcher_client,
|
|
287
284
|
self.admin_client,
|
|
288
285
|
self.client.event,
|
|
289
|
-
self.client.rest,
|
|
290
|
-
self.client.workflow_listener,
|
|
291
286
|
self.durable_event_listener,
|
|
292
|
-
self.workflow_run_event_listener,
|
|
293
287
|
self.worker_context,
|
|
294
|
-
self.client.config.namespace,
|
|
295
288
|
validator_registry=self.validator_registry,
|
|
296
289
|
)
|
|
297
290
|
|
|
@@ -63,9 +63,6 @@ def capture_logs(
|
|
|
63
63
|
) -> Callable[P, Awaitable[T]]:
|
|
64
64
|
@functools.wraps(func)
|
|
65
65
|
async def wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
|
|
66
|
-
if not logger:
|
|
67
|
-
raise Exception("No logger configured on client")
|
|
68
|
-
|
|
69
66
|
log_stream = StringIO()
|
|
70
67
|
custom_handler = CustomLogHandler(event_client, log_stream)
|
|
71
68
|
custom_handler.setLevel(logging.INFO)
|
hatchet_sdk/worker/worker.py
CHANGED
|
@@ -103,7 +103,7 @@ class Worker:
|
|
|
103
103
|
self.durable_action_queue: "Queue[Action | STOP_LOOP_TYPE]" = self.ctx.Queue()
|
|
104
104
|
self.durable_event_queue: "Queue[ActionEvent]" = self.ctx.Queue()
|
|
105
105
|
|
|
106
|
-
self.loop: asyncio.AbstractEventLoop
|
|
106
|
+
self.loop: asyncio.AbstractEventLoop | None
|
|
107
107
|
|
|
108
108
|
self.client = Client(config=self.config, debug=self.debug)
|
|
109
109
|
|
|
@@ -226,6 +226,9 @@ class Worker:
|
|
|
226
226
|
def start(self, options: WorkerStartOptions = WorkerStartOptions()) -> None:
|
|
227
227
|
self.owned_loop = self._setup_loop(options.loop)
|
|
228
228
|
|
|
229
|
+
if not self.loop:
|
|
230
|
+
raise RuntimeError("event loop not set, cannot start worker")
|
|
231
|
+
|
|
229
232
|
asyncio.run_coroutine_threadsafe(self._aio_start(), self.loop)
|
|
230
233
|
|
|
231
234
|
# start the loop and wait until its closed
|
|
@@ -265,27 +268,31 @@ class Worker:
|
|
|
265
268
|
)
|
|
266
269
|
self.durable_action_runner = self._run_action_runner(is_durable=True)
|
|
267
270
|
|
|
268
|
-
|
|
269
|
-
self.
|
|
270
|
-
|
|
271
|
+
if self.loop:
|
|
272
|
+
self.action_listener_health_check = self.loop.create_task(
|
|
273
|
+
self._check_listener_health()
|
|
274
|
+
)
|
|
271
275
|
|
|
272
|
-
|
|
276
|
+
await self.action_listener_health_check
|
|
273
277
|
|
|
274
278
|
def _run_action_runner(self, is_durable: bool) -> WorkerActionRunLoopManager:
|
|
275
279
|
# Retrieve the shared queue
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
280
|
+
if self.loop:
|
|
281
|
+
return WorkerActionRunLoopManager(
|
|
282
|
+
self.name + ("_durable" if is_durable else ""),
|
|
283
|
+
self.durable_action_registry if is_durable else self.action_registry,
|
|
284
|
+
self.validator_registry,
|
|
285
|
+
1_000 if is_durable else self.slots,
|
|
286
|
+
self.config,
|
|
287
|
+
self.durable_action_queue if is_durable else self.action_queue,
|
|
288
|
+
self.durable_event_queue if is_durable else self.event_queue,
|
|
289
|
+
self.loop,
|
|
290
|
+
self.handle_kill,
|
|
291
|
+
self.client.debug,
|
|
292
|
+
self.labels,
|
|
293
|
+
)
|
|
294
|
+
|
|
295
|
+
raise RuntimeError("event loop not set, cannot start action runner")
|
|
289
296
|
|
|
290
297
|
def _start_action_listener(
|
|
291
298
|
self, is_durable: bool
|
|
@@ -332,7 +339,7 @@ class Worker:
|
|
|
332
339
|
):
|
|
333
340
|
logger.debug("child action listener process killed...")
|
|
334
341
|
self._status = WorkerStatus.UNHEALTHY
|
|
335
|
-
if
|
|
342
|
+
if self.loop:
|
|
336
343
|
self.loop.create_task(self.exit_gracefully())
|
|
337
344
|
break
|
|
338
345
|
else:
|
|
@@ -349,11 +356,13 @@ class Worker:
|
|
|
349
356
|
def _handle_exit_signal(self, signum: int, frame: FrameType | None) -> None:
|
|
350
357
|
sig_name = "SIGTERM" if signum == signal.SIGTERM else "SIGINT"
|
|
351
358
|
logger.info(f"received signal {sig_name}...")
|
|
352
|
-
self.loop
|
|
359
|
+
if self.loop:
|
|
360
|
+
self.loop.create_task(self.exit_gracefully())
|
|
353
361
|
|
|
354
362
|
def _handle_force_quit_signal(self, signum: int, frame: FrameType | None) -> None:
|
|
355
363
|
logger.info("received SIGQUIT...")
|
|
356
|
-
self.loop
|
|
364
|
+
if self.loop:
|
|
365
|
+
self.loop.create_task(self._exit_forcefully())
|
|
357
366
|
|
|
358
367
|
async def _close(self) -> None:
|
|
359
368
|
logger.info(f"closing worker '{self.name}'...")
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: hatchet-sdk
|
|
3
|
-
Version: 1.0.
|
|
3
|
+
Version: 1.0.2
|
|
4
4
|
Summary:
|
|
5
5
|
Author: Alexander Belanger
|
|
6
6
|
Author-email: alexander@hatchet.run
|
|
@@ -29,6 +29,7 @@ Requires-Dist: prometheus-client (>=0.21.1,<0.22.0)
|
|
|
29
29
|
Requires-Dist: protobuf (>=5.29.1,<6.0.0)
|
|
30
30
|
Requires-Dist: pydantic (>=2.6.3,<3.0.0)
|
|
31
31
|
Requires-Dist: pydantic-settings (>=2.7.1,<3.0.0)
|
|
32
|
+
Requires-Dist: pytest-timeout (>=2.3.1,<3.0.0)
|
|
32
33
|
Requires-Dist: python-dateutil (>=2.9.0.post0,<3.0.0)
|
|
33
34
|
Requires-Dist: pyyaml (>=6.0.1,<7.0.0)
|
|
34
35
|
Requires-Dist: tenacity (>=8.4.1)
|