hatchet-sdk 1.11.1__py3-none-any.whl → 1.12.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of hatchet-sdk might be problematic. Click here for more details.
- hatchet_sdk/__init__.py +2 -0
- hatchet_sdk/client.py +2 -0
- hatchet_sdk/clients/events.py +42 -3
- hatchet_sdk/clients/rest/__init__.py +5 -0
- hatchet_sdk/clients/rest/api/event_api.py +490 -0
- hatchet_sdk/clients/rest/api/filter_api.py +339 -0
- hatchet_sdk/clients/rest/api/tenant_api.py +275 -0
- hatchet_sdk/clients/rest/api/workflow_runs_api.py +310 -0
- hatchet_sdk/clients/rest/models/__init__.py +5 -0
- hatchet_sdk/clients/rest/models/create_tenant_request.py +22 -2
- hatchet_sdk/clients/rest/models/tenant.py +6 -0
- hatchet_sdk/clients/rest/models/tenant_ui_version.py +37 -0
- hatchet_sdk/clients/rest/models/update_tenant_request.py +6 -0
- hatchet_sdk/clients/rest/models/v1_event.py +42 -0
- hatchet_sdk/clients/rest/models/v1_event_triggered_run.py +94 -0
- hatchet_sdk/clients/rest/models/v1_update_filter_request.py +98 -0
- hatchet_sdk/contracts/v1/workflows_pb2.py +26 -24
- hatchet_sdk/contracts/v1/workflows_pb2.pyi +14 -2
- hatchet_sdk/features/filters.py +36 -0
- hatchet_sdk/features/runs.py +22 -3
- hatchet_sdk/features/tenant.py +32 -0
- hatchet_sdk/hatchet.py +51 -8
- hatchet_sdk/runnables/action.py +1 -1
- hatchet_sdk/runnables/types.py +22 -4
- hatchet_sdk/runnables/workflow.py +413 -188
- hatchet_sdk/waits.py +2 -2
- {hatchet_sdk-1.11.1.dist-info → hatchet_sdk-1.12.1.dist-info}/METADATA +1 -1
- {hatchet_sdk-1.11.1.dist-info → hatchet_sdk-1.12.1.dist-info}/RECORD +30 -27
- hatchet_sdk/runnables/standalone.py +0 -391
- {hatchet_sdk-1.11.1.dist-info → hatchet_sdk-1.12.1.dist-info}/WHEEL +0 -0
- {hatchet_sdk-1.11.1.dist-info → hatchet_sdk-1.12.1.dist-info}/entry_points.txt +0 -0
hatchet_sdk/hatchet.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import asyncio
|
|
2
2
|
import logging
|
|
3
3
|
from datetime import timedelta
|
|
4
|
+
from functools import cached_property
|
|
4
5
|
from typing import Any, Callable, Type, Union, cast, overload
|
|
5
6
|
|
|
6
7
|
from hatchet_sdk import Context, DurableContext
|
|
@@ -8,6 +9,7 @@ from hatchet_sdk.client import Client
|
|
|
8
9
|
from hatchet_sdk.clients.dispatcher.dispatcher import DispatcherClient
|
|
9
10
|
from hatchet_sdk.clients.events import EventClient
|
|
10
11
|
from hatchet_sdk.clients.listeners.run_event_listener import RunEventListenerClient
|
|
12
|
+
from hatchet_sdk.clients.rest.models.tenant_version import TenantVersion
|
|
11
13
|
from hatchet_sdk.config import ClientConfig
|
|
12
14
|
from hatchet_sdk.features.cron import CronClient
|
|
13
15
|
from hatchet_sdk.features.filters import FiltersClient
|
|
@@ -21,9 +23,9 @@ from hatchet_sdk.features.workflows import WorkflowsClient
|
|
|
21
23
|
from hatchet_sdk.labels import DesiredWorkerLabel
|
|
22
24
|
from hatchet_sdk.logger import logger
|
|
23
25
|
from hatchet_sdk.rate_limit import RateLimit
|
|
24
|
-
from hatchet_sdk.runnables.standalone import Standalone
|
|
25
26
|
from hatchet_sdk.runnables.types import (
|
|
26
27
|
ConcurrencyExpression,
|
|
28
|
+
DefaultFilter,
|
|
27
29
|
EmptyModel,
|
|
28
30
|
R,
|
|
29
31
|
StickyStrategy,
|
|
@@ -31,7 +33,7 @@ from hatchet_sdk.runnables.types import (
|
|
|
31
33
|
TWorkflowInput,
|
|
32
34
|
WorkflowConfig,
|
|
33
35
|
)
|
|
34
|
-
from hatchet_sdk.runnables.workflow import BaseWorkflow, Workflow
|
|
36
|
+
from hatchet_sdk.runnables.workflow import BaseWorkflow, Standalone, Workflow
|
|
35
37
|
from hatchet_sdk.utils.timedelta_to_expression import Duration
|
|
36
38
|
from hatchet_sdk.utils.typing import CoroutineLike
|
|
37
39
|
from hatchet_sdk.worker.worker import LifespanFn, Worker
|
|
@@ -58,6 +60,11 @@ class Hatchet:
|
|
|
58
60
|
client if client else Client(config=config or ClientConfig(), debug=debug)
|
|
59
61
|
)
|
|
60
62
|
|
|
63
|
+
if self.tenant_engine_version != TenantVersion.V1:
|
|
64
|
+
logger.warning(
|
|
65
|
+
"🚨⚠️‼️ YOU ARE USING A V0 ENGINE WITH A V1 SDK, WHICH IS NOT SUPPORTED. PLEASE UPGRADE YOUR ENGINE TO V1.🚨⚠️‼️"
|
|
66
|
+
)
|
|
67
|
+
|
|
61
68
|
@property
|
|
62
69
|
def cron(self) -> CronClient:
|
|
63
70
|
"""
|
|
@@ -156,6 +163,19 @@ class Hatchet:
|
|
|
156
163
|
"""
|
|
157
164
|
return self._client.config.namespace
|
|
158
165
|
|
|
166
|
+
@cached_property
|
|
167
|
+
def tenant_engine_version(self) -> TenantVersion:
|
|
168
|
+
"""
|
|
169
|
+
Get the version of the Hatchet engine running in your tenant.
|
|
170
|
+
"""
|
|
171
|
+
try:
|
|
172
|
+
return self._client.tenant.get().version
|
|
173
|
+
except Exception:
|
|
174
|
+
## Nothing we can do here - if this fails, it's probably
|
|
175
|
+
## because they don't have this endpoint yet, so we need to just
|
|
176
|
+
## assume V1 to swallow the warning.
|
|
177
|
+
return TenantVersion.V1
|
|
178
|
+
|
|
159
179
|
def worker(
|
|
160
180
|
self,
|
|
161
181
|
name: str,
|
|
@@ -214,6 +234,7 @@ class Hatchet:
|
|
|
214
234
|
default_priority: int = 1,
|
|
215
235
|
concurrency: ConcurrencyExpression | list[ConcurrencyExpression] | None = None,
|
|
216
236
|
task_defaults: TaskDefaults = TaskDefaults(),
|
|
237
|
+
default_filters: list[DefaultFilter] = [],
|
|
217
238
|
) -> Workflow[EmptyModel]: ...
|
|
218
239
|
|
|
219
240
|
@overload
|
|
@@ -230,6 +251,7 @@ class Hatchet:
|
|
|
230
251
|
default_priority: int = 1,
|
|
231
252
|
concurrency: ConcurrencyExpression | list[ConcurrencyExpression] | None = None,
|
|
232
253
|
task_defaults: TaskDefaults = TaskDefaults(),
|
|
254
|
+
default_filters: list[DefaultFilter] = [],
|
|
233
255
|
) -> Workflow[TWorkflowInput]: ...
|
|
234
256
|
|
|
235
257
|
def workflow(
|
|
@@ -245,6 +267,7 @@ class Hatchet:
|
|
|
245
267
|
default_priority: int = 1,
|
|
246
268
|
concurrency: ConcurrencyExpression | list[ConcurrencyExpression] | None = None,
|
|
247
269
|
task_defaults: TaskDefaults = TaskDefaults(),
|
|
270
|
+
default_filters: list[DefaultFilter] = [],
|
|
248
271
|
) -> Workflow[EmptyModel] | Workflow[TWorkflowInput]:
|
|
249
272
|
"""
|
|
250
273
|
Define a Hatchet workflow, which can then declare `task`s and be `run`, `schedule`d, and so on.
|
|
@@ -269,6 +292,8 @@ class Hatchet:
|
|
|
269
292
|
|
|
270
293
|
:param task_defaults: A `TaskDefaults` object controlling the default task settings for this workflow.
|
|
271
294
|
|
|
295
|
+
:param default_filters: A list of filters to create with the workflow is created. Note that this is a helper to allow you to create filters "declaratively" without needing to make a separate API call once the workflow is created to create them.
|
|
296
|
+
|
|
272
297
|
:returns: The created `Workflow` object, which can be used to declare tasks, run the workflow, and so on.
|
|
273
298
|
"""
|
|
274
299
|
|
|
@@ -285,6 +310,7 @@ class Hatchet:
|
|
|
285
310
|
or cast(Type[TWorkflowInput], EmptyModel),
|
|
286
311
|
task_defaults=task_defaults,
|
|
287
312
|
default_priority=default_priority,
|
|
313
|
+
default_filters=default_filters,
|
|
288
314
|
),
|
|
289
315
|
self,
|
|
290
316
|
)
|
|
@@ -309,6 +335,7 @@ class Hatchet:
|
|
|
309
335
|
desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
|
|
310
336
|
backoff_factor: float | None = None,
|
|
311
337
|
backoff_max_seconds: int | None = None,
|
|
338
|
+
default_filters: list[DefaultFilter] = [],
|
|
312
339
|
) -> Callable[
|
|
313
340
|
[Callable[[EmptyModel, Context], R | CoroutineLike[R]]],
|
|
314
341
|
Standalone[EmptyModel, R],
|
|
@@ -334,6 +361,7 @@ class Hatchet:
|
|
|
334
361
|
desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
|
|
335
362
|
backoff_factor: float | None = None,
|
|
336
363
|
backoff_max_seconds: int | None = None,
|
|
364
|
+
default_filters: list[DefaultFilter] = [],
|
|
337
365
|
) -> Callable[
|
|
338
366
|
[Callable[[TWorkflowInput, Context], R | CoroutineLike[R]]],
|
|
339
367
|
Standalone[TWorkflowInput, R],
|
|
@@ -358,6 +386,7 @@ class Hatchet:
|
|
|
358
386
|
desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
|
|
359
387
|
backoff_factor: float | None = None,
|
|
360
388
|
backoff_max_seconds: int | None = None,
|
|
389
|
+
default_filters: list[DefaultFilter] = [],
|
|
361
390
|
) -> (
|
|
362
391
|
Callable[
|
|
363
392
|
[Callable[[EmptyModel, Context], R | CoroutineLike[R]]],
|
|
@@ -403,6 +432,8 @@ class Hatchet:
|
|
|
403
432
|
|
|
404
433
|
:param backoff_max_seconds: The maximum number of seconds to allow retries with exponential backoff to continue.
|
|
405
434
|
|
|
435
|
+
:param default_filters: A list of filters to create with the task is created. Note that this is a helper to allow you to create filters "declaratively" without needing to make a separate API call once the task is created to create them.
|
|
436
|
+
|
|
406
437
|
:returns: A decorator which creates a `Standalone` task object.
|
|
407
438
|
"""
|
|
408
439
|
|
|
@@ -419,10 +450,10 @@ class Hatchet:
|
|
|
419
450
|
on_events=on_events,
|
|
420
451
|
on_crons=on_crons,
|
|
421
452
|
sticky=sticky,
|
|
422
|
-
concurrency=concurrency,
|
|
423
453
|
default_priority=default_priority,
|
|
424
454
|
input_validator=input_validator
|
|
425
455
|
or cast(Type[TWorkflowInput], EmptyModel),
|
|
456
|
+
default_filters=default_filters,
|
|
426
457
|
),
|
|
427
458
|
self,
|
|
428
459
|
)
|
|
@@ -468,7 +499,7 @@ class Hatchet:
|
|
|
468
499
|
version: str | None = None,
|
|
469
500
|
sticky: StickyStrategy | None = None,
|
|
470
501
|
default_priority: int = 1,
|
|
471
|
-
concurrency: ConcurrencyExpression | None = None,
|
|
502
|
+
concurrency: ConcurrencyExpression | list[ConcurrencyExpression] | None = None,
|
|
472
503
|
schedule_timeout: Duration = timedelta(minutes=5),
|
|
473
504
|
execution_timeout: Duration = timedelta(seconds=60),
|
|
474
505
|
retries: int = 0,
|
|
@@ -476,6 +507,7 @@ class Hatchet:
|
|
|
476
507
|
desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
|
|
477
508
|
backoff_factor: float | None = None,
|
|
478
509
|
backoff_max_seconds: int | None = None,
|
|
510
|
+
default_filters: list[DefaultFilter] = [],
|
|
479
511
|
) -> Callable[
|
|
480
512
|
[Callable[[EmptyModel, DurableContext], R | CoroutineLike[R]]],
|
|
481
513
|
Standalone[EmptyModel, R],
|
|
@@ -493,7 +525,7 @@ class Hatchet:
|
|
|
493
525
|
version: str | None = None,
|
|
494
526
|
sticky: StickyStrategy | None = None,
|
|
495
527
|
default_priority: int = 1,
|
|
496
|
-
concurrency: ConcurrencyExpression | None = None,
|
|
528
|
+
concurrency: ConcurrencyExpression | list[ConcurrencyExpression] | None = None,
|
|
497
529
|
schedule_timeout: Duration = timedelta(minutes=5),
|
|
498
530
|
execution_timeout: Duration = timedelta(seconds=60),
|
|
499
531
|
retries: int = 0,
|
|
@@ -501,6 +533,7 @@ class Hatchet:
|
|
|
501
533
|
desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
|
|
502
534
|
backoff_factor: float | None = None,
|
|
503
535
|
backoff_max_seconds: int | None = None,
|
|
536
|
+
default_filters: list[DefaultFilter] = [],
|
|
504
537
|
) -> Callable[
|
|
505
538
|
[Callable[[TWorkflowInput, DurableContext], R | CoroutineLike[R]]],
|
|
506
539
|
Standalone[TWorkflowInput, R],
|
|
@@ -517,7 +550,7 @@ class Hatchet:
|
|
|
517
550
|
version: str | None = None,
|
|
518
551
|
sticky: StickyStrategy | None = None,
|
|
519
552
|
default_priority: int = 1,
|
|
520
|
-
concurrency: ConcurrencyExpression | None = None,
|
|
553
|
+
concurrency: ConcurrencyExpression | list[ConcurrencyExpression] | None = None,
|
|
521
554
|
schedule_timeout: Duration = timedelta(minutes=5),
|
|
522
555
|
execution_timeout: Duration = timedelta(seconds=60),
|
|
523
556
|
retries: int = 0,
|
|
@@ -525,6 +558,7 @@ class Hatchet:
|
|
|
525
558
|
desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
|
|
526
559
|
backoff_factor: float | None = None,
|
|
527
560
|
backoff_max_seconds: int | None = None,
|
|
561
|
+
default_filters: list[DefaultFilter] = [],
|
|
528
562
|
) -> (
|
|
529
563
|
Callable[
|
|
530
564
|
[Callable[[EmptyModel, DurableContext], R | CoroutineLike[R]]],
|
|
@@ -570,6 +604,8 @@ class Hatchet:
|
|
|
570
604
|
|
|
571
605
|
:param backoff_max_seconds: The maximum number of seconds to allow retries with exponential backoff to continue.
|
|
572
606
|
|
|
607
|
+
:param default_filters: A list of filters to create with the task is created. Note that this is a helper to allow you to create filters "declaratively" without needing to make a separate API call once the task is created to create them.
|
|
608
|
+
|
|
573
609
|
:returns: A decorator which creates a `Standalone` task object.
|
|
574
610
|
"""
|
|
575
611
|
|
|
@@ -585,14 +621,21 @@ class Hatchet:
|
|
|
585
621
|
on_events=on_events,
|
|
586
622
|
on_crons=on_crons,
|
|
587
623
|
sticky=sticky,
|
|
588
|
-
concurrency=concurrency,
|
|
589
624
|
input_validator=input_validator
|
|
590
625
|
or cast(Type[TWorkflowInput], EmptyModel),
|
|
591
626
|
default_priority=default_priority,
|
|
627
|
+
default_filters=default_filters,
|
|
592
628
|
),
|
|
593
629
|
self,
|
|
594
630
|
)
|
|
595
631
|
|
|
632
|
+
if isinstance(concurrency, list):
|
|
633
|
+
_concurrency = concurrency
|
|
634
|
+
elif isinstance(concurrency, ConcurrencyExpression):
|
|
635
|
+
_concurrency = [concurrency]
|
|
636
|
+
else:
|
|
637
|
+
_concurrency = []
|
|
638
|
+
|
|
596
639
|
task_wrapper = workflow.durable_task(
|
|
597
640
|
name=inferred_name,
|
|
598
641
|
schedule_timeout=schedule_timeout,
|
|
@@ -603,7 +646,7 @@ class Hatchet:
|
|
|
603
646
|
desired_worker_labels=desired_worker_labels,
|
|
604
647
|
backoff_factor=backoff_factor,
|
|
605
648
|
backoff_max_seconds=backoff_max_seconds,
|
|
606
|
-
concurrency=
|
|
649
|
+
concurrency=_concurrency,
|
|
607
650
|
)
|
|
608
651
|
|
|
609
652
|
created_task = task_wrapper(func)
|
hatchet_sdk/runnables/action.py
CHANGED
|
@@ -41,7 +41,7 @@ class ActionPayload(BaseModel):
|
|
|
41
41
|
|
|
42
42
|
@model_validator(mode="after")
|
|
43
43
|
def validate_filter_payload(self) -> "ActionPayload":
|
|
44
|
-
self.filter_payload = self.triggers.get("filter_payload", {})
|
|
44
|
+
self.filter_payload = self.triggers.get("filter_payload", {}) or {}
|
|
45
45
|
|
|
46
46
|
return self
|
|
47
47
|
|
hatchet_sdk/runnables/types.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
+
import json
|
|
2
3
|
from enum import Enum
|
|
3
4
|
from typing import Any, Callable, ParamSpec, Type, TypeGuard, TypeVar, Union
|
|
4
5
|
|
|
@@ -6,6 +7,7 @@ from pydantic import BaseModel, ConfigDict, Field
|
|
|
6
7
|
|
|
7
8
|
from hatchet_sdk.context.context import Context, DurableContext
|
|
8
9
|
from hatchet_sdk.contracts.v1.workflows_pb2 import Concurrency
|
|
10
|
+
from hatchet_sdk.contracts.v1.workflows_pb2 import DefaultFilter as DefaultFilterProto
|
|
9
11
|
from hatchet_sdk.utils.timedelta_to_expression import Duration
|
|
10
12
|
from hatchet_sdk.utils.typing import AwaitableLike, JSONSerializableMapping
|
|
11
13
|
|
|
@@ -65,6 +67,21 @@ class TaskDefaults(BaseModel):
|
|
|
65
67
|
backoff_max_seconds: int | None = None
|
|
66
68
|
|
|
67
69
|
|
|
70
|
+
class DefaultFilter(BaseModel):
|
|
71
|
+
expression: str
|
|
72
|
+
scope: str
|
|
73
|
+
payload: JSONSerializableMapping = Field(default_factory=dict)
|
|
74
|
+
|
|
75
|
+
def to_proto(self) -> DefaultFilterProto:
|
|
76
|
+
payload_json = json.dumps(self.payload, default=str)
|
|
77
|
+
|
|
78
|
+
return DefaultFilterProto(
|
|
79
|
+
expression=self.expression,
|
|
80
|
+
scope=self.scope,
|
|
81
|
+
payload=payload_json,
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
|
|
68
85
|
class WorkflowConfig(BaseModel):
|
|
69
86
|
model_config = ConfigDict(extra="forbid", arbitrary_types_allowed=True)
|
|
70
87
|
|
|
@@ -79,6 +96,7 @@ class WorkflowConfig(BaseModel):
|
|
|
79
96
|
default_priority: int | None = None
|
|
80
97
|
|
|
81
98
|
task_defaults: TaskDefaults = TaskDefaults()
|
|
99
|
+
default_filters: list[DefaultFilter] = Field(default_factory=list)
|
|
82
100
|
|
|
83
101
|
|
|
84
102
|
class StepType(str, Enum):
|
|
@@ -93,13 +111,13 @@ TaskFunc = Union[AsyncFunc[TWorkflowInput, R], SyncFunc[TWorkflowInput, R]]
|
|
|
93
111
|
|
|
94
112
|
|
|
95
113
|
def is_async_fn(
|
|
96
|
-
fn: TaskFunc[TWorkflowInput, R]
|
|
114
|
+
fn: TaskFunc[TWorkflowInput, R],
|
|
97
115
|
) -> TypeGuard[AsyncFunc[TWorkflowInput, R]]:
|
|
98
116
|
return asyncio.iscoroutinefunction(fn)
|
|
99
117
|
|
|
100
118
|
|
|
101
119
|
def is_sync_fn(
|
|
102
|
-
fn: TaskFunc[TWorkflowInput, R]
|
|
120
|
+
fn: TaskFunc[TWorkflowInput, R],
|
|
103
121
|
) -> TypeGuard[SyncFunc[TWorkflowInput, R]]:
|
|
104
122
|
return not asyncio.iscoroutinefunction(fn)
|
|
105
123
|
|
|
@@ -112,12 +130,12 @@ DurableTaskFunc = Union[
|
|
|
112
130
|
|
|
113
131
|
|
|
114
132
|
def is_durable_async_fn(
|
|
115
|
-
fn: Callable[..., Any]
|
|
133
|
+
fn: Callable[..., Any],
|
|
116
134
|
) -> TypeGuard[DurableAsyncFunc[TWorkflowInput, R]]:
|
|
117
135
|
return asyncio.iscoroutinefunction(fn)
|
|
118
136
|
|
|
119
137
|
|
|
120
138
|
def is_durable_sync_fn(
|
|
121
|
-
fn: DurableTaskFunc[TWorkflowInput, R]
|
|
139
|
+
fn: DurableTaskFunc[TWorkflowInput, R],
|
|
122
140
|
) -> TypeGuard[DurableSyncFunc[TWorkflowInput, R]]:
|
|
123
141
|
return not asyncio.iscoroutinefunction(fn)
|