hatchet-sdk 1.0.0__py3-none-any.whl → 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of hatchet-sdk might be problematic. Click here for more details.
- hatchet_sdk/__init__.py +32 -16
- hatchet_sdk/client.py +25 -63
- hatchet_sdk/clients/admin.py +203 -142
- hatchet_sdk/clients/dispatcher/action_listener.py +42 -42
- hatchet_sdk/clients/dispatcher/dispatcher.py +18 -16
- hatchet_sdk/clients/durable_event_listener.py +327 -0
- hatchet_sdk/clients/rest/__init__.py +12 -1
- hatchet_sdk/clients/rest/api/log_api.py +258 -0
- hatchet_sdk/clients/rest/api/task_api.py +32 -6
- hatchet_sdk/clients/rest/api/workflow_runs_api.py +626 -0
- hatchet_sdk/clients/rest/models/__init__.py +12 -1
- hatchet_sdk/clients/rest/models/v1_log_line.py +94 -0
- hatchet_sdk/clients/rest/models/v1_log_line_level.py +39 -0
- hatchet_sdk/clients/rest/models/v1_log_line_list.py +110 -0
- hatchet_sdk/clients/rest/models/v1_task_summary.py +80 -64
- hatchet_sdk/clients/rest/models/v1_trigger_workflow_run_request.py +95 -0
- hatchet_sdk/clients/rest/models/v1_workflow_run_display_name.py +98 -0
- hatchet_sdk/clients/rest/models/v1_workflow_run_display_name_list.py +114 -0
- hatchet_sdk/clients/rest/models/workflow_run_shape_item_for_workflow_run_details.py +9 -4
- hatchet_sdk/clients/rest/models/workflow_runs_metrics.py +5 -1
- hatchet_sdk/clients/run_event_listener.py +0 -1
- hatchet_sdk/clients/v1/api_client.py +81 -0
- hatchet_sdk/context/context.py +86 -159
- hatchet_sdk/contracts/dispatcher_pb2_grpc.py +1 -1
- hatchet_sdk/contracts/events_pb2.py +2 -2
- hatchet_sdk/contracts/events_pb2_grpc.py +1 -1
- hatchet_sdk/contracts/v1/dispatcher_pb2.py +36 -0
- hatchet_sdk/contracts/v1/dispatcher_pb2.pyi +38 -0
- hatchet_sdk/contracts/v1/dispatcher_pb2_grpc.py +145 -0
- hatchet_sdk/contracts/v1/shared/condition_pb2.py +39 -0
- hatchet_sdk/contracts/v1/shared/condition_pb2.pyi +72 -0
- hatchet_sdk/contracts/v1/shared/condition_pb2_grpc.py +29 -0
- hatchet_sdk/contracts/v1/workflows_pb2.py +67 -0
- hatchet_sdk/contracts/v1/workflows_pb2.pyi +228 -0
- hatchet_sdk/contracts/v1/workflows_pb2_grpc.py +234 -0
- hatchet_sdk/contracts/workflows_pb2_grpc.py +1 -1
- hatchet_sdk/features/cron.py +91 -121
- hatchet_sdk/features/logs.py +16 -0
- hatchet_sdk/features/metrics.py +75 -0
- hatchet_sdk/features/rate_limits.py +45 -0
- hatchet_sdk/features/runs.py +221 -0
- hatchet_sdk/features/scheduled.py +114 -131
- hatchet_sdk/features/workers.py +41 -0
- hatchet_sdk/features/workflows.py +55 -0
- hatchet_sdk/hatchet.py +463 -165
- hatchet_sdk/opentelemetry/instrumentor.py +8 -13
- hatchet_sdk/rate_limit.py +33 -39
- hatchet_sdk/runnables/contextvars.py +12 -0
- hatchet_sdk/runnables/standalone.py +192 -0
- hatchet_sdk/runnables/task.py +144 -0
- hatchet_sdk/runnables/types.py +138 -0
- hatchet_sdk/runnables/workflow.py +771 -0
- hatchet_sdk/utils/aio_utils.py +0 -79
- hatchet_sdk/utils/proto_enums.py +0 -7
- hatchet_sdk/utils/timedelta_to_expression.py +23 -0
- hatchet_sdk/utils/typing.py +2 -2
- hatchet_sdk/v0/clients/rest_client.py +9 -0
- hatchet_sdk/v0/worker/action_listener_process.py +18 -2
- hatchet_sdk/waits.py +120 -0
- hatchet_sdk/worker/action_listener_process.py +64 -30
- hatchet_sdk/worker/runner/run_loop_manager.py +35 -26
- hatchet_sdk/worker/runner/runner.py +72 -55
- hatchet_sdk/worker/runner/utils/capture_logs.py +3 -11
- hatchet_sdk/worker/worker.py +155 -118
- hatchet_sdk/workflow_run.py +4 -5
- {hatchet_sdk-1.0.0.dist-info → hatchet_sdk-1.0.1.dist-info}/METADATA +1 -2
- {hatchet_sdk-1.0.0.dist-info → hatchet_sdk-1.0.1.dist-info}/RECORD +69 -43
- {hatchet_sdk-1.0.0.dist-info → hatchet_sdk-1.0.1.dist-info}/entry_points.txt +2 -0
- hatchet_sdk/clients/rest_client.py +0 -636
- hatchet_sdk/semver.py +0 -30
- hatchet_sdk/worker/runner/utils/error_with_traceback.py +0 -6
- hatchet_sdk/workflow.py +0 -527
- {hatchet_sdk-1.0.0.dist-info → hatchet_sdk-1.0.1.dist-info}/WHEEL +0 -0
hatchet_sdk/workflow.py
DELETED
|
@@ -1,527 +0,0 @@
|
|
|
1
|
-
import asyncio
|
|
2
|
-
from dataclasses import dataclass, field
|
|
3
|
-
from datetime import datetime
|
|
4
|
-
from enum import Enum
|
|
5
|
-
from typing import (
|
|
6
|
-
TYPE_CHECKING,
|
|
7
|
-
Any,
|
|
8
|
-
Awaitable,
|
|
9
|
-
Callable,
|
|
10
|
-
Generic,
|
|
11
|
-
ParamSpec,
|
|
12
|
-
Type,
|
|
13
|
-
TypeGuard,
|
|
14
|
-
TypeVar,
|
|
15
|
-
Union,
|
|
16
|
-
cast,
|
|
17
|
-
)
|
|
18
|
-
|
|
19
|
-
from google.protobuf import timestamp_pb2
|
|
20
|
-
from pydantic import BaseModel, ConfigDict
|
|
21
|
-
|
|
22
|
-
from hatchet_sdk.clients.admin import (
|
|
23
|
-
ChildTriggerWorkflowOptions,
|
|
24
|
-
ChildWorkflowRunDict,
|
|
25
|
-
ScheduleTriggerWorkflowOptions,
|
|
26
|
-
)
|
|
27
|
-
from hatchet_sdk.context.context import Context
|
|
28
|
-
from hatchet_sdk.contracts.workflows_pb2 import (
|
|
29
|
-
ConcurrencyLimitStrategy as ConcurrencyLimitStrategyProto,
|
|
30
|
-
)
|
|
31
|
-
from hatchet_sdk.contracts.workflows_pb2 import (
|
|
32
|
-
CreateStepRateLimit,
|
|
33
|
-
CreateWorkflowJobOpts,
|
|
34
|
-
CreateWorkflowStepOpts,
|
|
35
|
-
CreateWorkflowVersionOpts,
|
|
36
|
-
DesiredWorkerLabels,
|
|
37
|
-
)
|
|
38
|
-
from hatchet_sdk.contracts.workflows_pb2 import StickyStrategy as StickyStrategyProto
|
|
39
|
-
from hatchet_sdk.contracts.workflows_pb2 import (
|
|
40
|
-
WorkflowConcurrencyOpts,
|
|
41
|
-
WorkflowKind,
|
|
42
|
-
WorkflowVersion,
|
|
43
|
-
)
|
|
44
|
-
from hatchet_sdk.labels import DesiredWorkerLabel
|
|
45
|
-
from hatchet_sdk.logger import logger
|
|
46
|
-
from hatchet_sdk.rate_limit import RateLimit
|
|
47
|
-
from hatchet_sdk.utils.proto_enums import convert_python_enum_to_proto, maybe_int_to_str
|
|
48
|
-
from hatchet_sdk.workflow_run import WorkflowRunRef
|
|
49
|
-
|
|
50
|
-
if TYPE_CHECKING:
|
|
51
|
-
from hatchet_sdk import Hatchet
|
|
52
|
-
|
|
53
|
-
R = TypeVar("R")
|
|
54
|
-
P = ParamSpec("P")
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
class EmptyModel(BaseModel):
|
|
58
|
-
model_config = ConfigDict(extra="allow")
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
class StickyStrategy(str, Enum):
|
|
62
|
-
SOFT = "SOFT"
|
|
63
|
-
HARD = "HARD"
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
class ConcurrencyLimitStrategy(str, Enum):
|
|
67
|
-
CANCEL_IN_PROGRESS = "CANCEL_IN_PROGRESS"
|
|
68
|
-
DROP_NEWEST = "DROP_NEWEST"
|
|
69
|
-
QUEUE_NEWEST = "QUEUE_NEWEST"
|
|
70
|
-
GROUP_ROUND_ROBIN = "GROUP_ROUND_ROBIN"
|
|
71
|
-
CANCEL_NEWEST = "CANCEL_NEWEST"
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
class ConcurrencyExpression(BaseModel):
|
|
75
|
-
"""
|
|
76
|
-
Defines concurrency limits for a workflow using a CEL expression.
|
|
77
|
-
|
|
78
|
-
Args:
|
|
79
|
-
expression (str): CEL expression to determine concurrency grouping. (i.e. "input.user_id")
|
|
80
|
-
max_runs (int): Maximum number of concurrent workflow runs.
|
|
81
|
-
limit_strategy (ConcurrencyLimitStrategy): Strategy for handling limit violations.
|
|
82
|
-
|
|
83
|
-
Example:
|
|
84
|
-
ConcurrencyExpression("input.user_id", 5, ConcurrencyLimitStrategy.CANCEL_IN_PROGRESS)
|
|
85
|
-
"""
|
|
86
|
-
|
|
87
|
-
expression: str
|
|
88
|
-
max_runs: int
|
|
89
|
-
limit_strategy: ConcurrencyLimitStrategy
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
TWorkflowInput = TypeVar("TWorkflowInput", bound=BaseModel)
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
class WorkflowConfig(BaseModel):
|
|
96
|
-
model_config = ConfigDict(extra="forbid", arbitrary_types_allowed=True)
|
|
97
|
-
|
|
98
|
-
name: str = ""
|
|
99
|
-
on_events: list[str] = []
|
|
100
|
-
on_crons: list[str] = []
|
|
101
|
-
version: str = ""
|
|
102
|
-
timeout: str = "60m"
|
|
103
|
-
schedule_timeout: str = "5m"
|
|
104
|
-
sticky: StickyStrategy | None = None
|
|
105
|
-
default_priority: int = 1
|
|
106
|
-
concurrency: ConcurrencyExpression | None = None
|
|
107
|
-
input_validator: Type[BaseModel] = EmptyModel
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
class StepType(str, Enum):
|
|
111
|
-
DEFAULT = "default"
|
|
112
|
-
CONCURRENCY = "concurrency"
|
|
113
|
-
ON_FAILURE = "on_failure"
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
AsyncFunc = Callable[[Any, Context], Awaitable[R]]
|
|
117
|
-
SyncFunc = Callable[[Any, Context], R]
|
|
118
|
-
StepFunc = Union[AsyncFunc[R], SyncFunc[R]]
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
def is_async_fn(fn: StepFunc[R]) -> TypeGuard[AsyncFunc[R]]:
|
|
122
|
-
return asyncio.iscoroutinefunction(fn)
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
def is_sync_fn(fn: StepFunc[R]) -> TypeGuard[SyncFunc[R]]:
|
|
126
|
-
return not asyncio.iscoroutinefunction(fn)
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
class Step(Generic[R]):
|
|
130
|
-
def __init__(
|
|
131
|
-
self,
|
|
132
|
-
fn: Callable[[Any, Context], R] | Callable[[Any, Context], Awaitable[R]],
|
|
133
|
-
type: StepType,
|
|
134
|
-
name: str = "",
|
|
135
|
-
timeout: str = "60m",
|
|
136
|
-
parents: list[str] = [],
|
|
137
|
-
retries: int = 0,
|
|
138
|
-
rate_limits: list[CreateStepRateLimit] = [],
|
|
139
|
-
desired_worker_labels: dict[str, DesiredWorkerLabels] = {},
|
|
140
|
-
backoff_factor: float | None = None,
|
|
141
|
-
backoff_max_seconds: int | None = None,
|
|
142
|
-
concurrency__max_runs: int | None = None,
|
|
143
|
-
concurrency__limit_strategy: ConcurrencyLimitStrategy | None = None,
|
|
144
|
-
) -> None:
|
|
145
|
-
self.fn = fn
|
|
146
|
-
self.is_async_function = is_async_fn(fn)
|
|
147
|
-
self.workflow: Union["BaseWorkflow", None] = None
|
|
148
|
-
|
|
149
|
-
self.type = type
|
|
150
|
-
self.timeout = timeout
|
|
151
|
-
self.name = name
|
|
152
|
-
self.parents = parents
|
|
153
|
-
self.retries = retries
|
|
154
|
-
self.rate_limits = rate_limits
|
|
155
|
-
self.desired_worker_labels = desired_worker_labels
|
|
156
|
-
self.backoff_factor = backoff_factor
|
|
157
|
-
self.backoff_max_seconds = backoff_max_seconds
|
|
158
|
-
self.concurrency__max_runs = concurrency__max_runs
|
|
159
|
-
self.concurrency__limit_strategy = concurrency__limit_strategy
|
|
160
|
-
|
|
161
|
-
def call(self, ctx: Context) -> R:
|
|
162
|
-
if not self.is_registered:
|
|
163
|
-
raise ValueError(
|
|
164
|
-
"Only steps that have been registered can be called. To register this step, instantiate its corresponding workflow."
|
|
165
|
-
)
|
|
166
|
-
|
|
167
|
-
if self.is_async_function:
|
|
168
|
-
raise TypeError(f"{self.name} is not a sync function. Use `acall` instead.")
|
|
169
|
-
|
|
170
|
-
sync_fn = self.fn
|
|
171
|
-
if is_sync_fn(sync_fn):
|
|
172
|
-
return sync_fn(self.workflow, ctx)
|
|
173
|
-
|
|
174
|
-
raise TypeError(f"{self.name} is not a sync function. Use `acall` instead.")
|
|
175
|
-
|
|
176
|
-
async def aio_call(self, ctx: Context) -> R:
|
|
177
|
-
if not self.is_registered:
|
|
178
|
-
raise ValueError(
|
|
179
|
-
"Only steps that have been registered can be called. To register this step, instantiate its corresponding workflow."
|
|
180
|
-
)
|
|
181
|
-
|
|
182
|
-
if not self.is_async_function:
|
|
183
|
-
raise TypeError(
|
|
184
|
-
f"{self.name} is not an async function. Use `call` instead."
|
|
185
|
-
)
|
|
186
|
-
|
|
187
|
-
async_fn = self.fn
|
|
188
|
-
|
|
189
|
-
if is_async_fn(async_fn):
|
|
190
|
-
return await async_fn(self.workflow, ctx)
|
|
191
|
-
|
|
192
|
-
raise TypeError(f"{self.name} is not an async function. Use `call` instead.")
|
|
193
|
-
|
|
194
|
-
@property
|
|
195
|
-
def is_registered(self) -> bool:
|
|
196
|
-
return self.workflow is not None
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
class Task(Generic[R, TWorkflowInput]):
|
|
200
|
-
def __init__(
|
|
201
|
-
self,
|
|
202
|
-
fn: Callable[[Context], R],
|
|
203
|
-
hatchet: "Hatchet",
|
|
204
|
-
name: str = "",
|
|
205
|
-
on_events: list[str] = [],
|
|
206
|
-
on_crons: list[str] = [],
|
|
207
|
-
version: str = "",
|
|
208
|
-
timeout: str = "60m",
|
|
209
|
-
schedule_timeout: str = "5m",
|
|
210
|
-
sticky: StickyStrategy | None = None,
|
|
211
|
-
retries: int = 0,
|
|
212
|
-
rate_limits: list[RateLimit] = [],
|
|
213
|
-
desired_worker_labels: dict[str, DesiredWorkerLabel] = {},
|
|
214
|
-
concurrency: ConcurrencyExpression | None = None,
|
|
215
|
-
on_failure: Union["Task[R, TWorkflowInput]", None] = None,
|
|
216
|
-
default_priority: int = 1,
|
|
217
|
-
input_validator: Type[TWorkflowInput] | None = None,
|
|
218
|
-
backoff_factor: float | None = None,
|
|
219
|
-
backoff_max_seconds: int | None = None,
|
|
220
|
-
) -> None:
|
|
221
|
-
def func(_: Any, context: Context) -> R:
|
|
222
|
-
return fn(context)
|
|
223
|
-
|
|
224
|
-
self.hatchet = hatchet
|
|
225
|
-
self.step: Step[R] = hatchet.step(
|
|
226
|
-
name=name or fn.__name__,
|
|
227
|
-
timeout=timeout,
|
|
228
|
-
retries=retries,
|
|
229
|
-
rate_limits=rate_limits,
|
|
230
|
-
desired_worker_labels=desired_worker_labels,
|
|
231
|
-
backoff_factor=backoff_factor,
|
|
232
|
-
backoff_max_seconds=backoff_max_seconds,
|
|
233
|
-
)(func)
|
|
234
|
-
self.on_failure_step = on_failure
|
|
235
|
-
self.workflow_config = WorkflowConfig(
|
|
236
|
-
name=name or fn.__name__,
|
|
237
|
-
on_events=on_events,
|
|
238
|
-
on_crons=on_crons,
|
|
239
|
-
version=version,
|
|
240
|
-
timeout=timeout,
|
|
241
|
-
schedule_timeout=schedule_timeout,
|
|
242
|
-
sticky=sticky,
|
|
243
|
-
default_priority=default_priority,
|
|
244
|
-
concurrency=concurrency,
|
|
245
|
-
input_validator=input_validator or cast(Type[TWorkflowInput], EmptyModel),
|
|
246
|
-
)
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
@dataclass
|
|
250
|
-
class SpawnWorkflowInput(Generic[TWorkflowInput]):
|
|
251
|
-
input: TWorkflowInput
|
|
252
|
-
key: str | None = None
|
|
253
|
-
options: ChildTriggerWorkflowOptions = field(
|
|
254
|
-
default_factory=ChildTriggerWorkflowOptions
|
|
255
|
-
)
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
class WorkflowDeclaration(Generic[TWorkflowInput]):
|
|
259
|
-
def __init__(self, config: WorkflowConfig, hatchet: Union["Hatchet", None]):
|
|
260
|
-
self.config = config
|
|
261
|
-
self.hatchet = hatchet
|
|
262
|
-
|
|
263
|
-
def run(self, input: TWorkflowInput | None = None) -> WorkflowRunRef:
|
|
264
|
-
if not self.hatchet:
|
|
265
|
-
raise ValueError("Hatchet client is not initialized.")
|
|
266
|
-
|
|
267
|
-
return self.hatchet.admin.run_workflow(
|
|
268
|
-
workflow_name=self.config.name, input=input.model_dump() if input else {}
|
|
269
|
-
)
|
|
270
|
-
|
|
271
|
-
def get_workflow_input(self, ctx: Context) -> TWorkflowInput:
|
|
272
|
-
return cast(
|
|
273
|
-
TWorkflowInput,
|
|
274
|
-
self.config.input_validator.model_validate(ctx.workflow_input),
|
|
275
|
-
)
|
|
276
|
-
|
|
277
|
-
async def aio_spawn_many(
|
|
278
|
-
self, ctx: Context, spawn_inputs: list[SpawnWorkflowInput[TWorkflowInput]]
|
|
279
|
-
) -> list[WorkflowRunRef]:
|
|
280
|
-
inputs = [
|
|
281
|
-
ChildWorkflowRunDict(
|
|
282
|
-
workflow_name=self.config.name,
|
|
283
|
-
input=spawn_input.input.model_dump(),
|
|
284
|
-
key=spawn_input.key,
|
|
285
|
-
options=spawn_input.options,
|
|
286
|
-
)
|
|
287
|
-
for spawn_input in spawn_inputs
|
|
288
|
-
]
|
|
289
|
-
return await ctx.aio_spawn_workflows(inputs)
|
|
290
|
-
|
|
291
|
-
async def aio_spawn_one(
|
|
292
|
-
self,
|
|
293
|
-
ctx: Context,
|
|
294
|
-
input: TWorkflowInput,
|
|
295
|
-
key: str | None = None,
|
|
296
|
-
options: ChildTriggerWorkflowOptions = ChildTriggerWorkflowOptions(),
|
|
297
|
-
) -> WorkflowRunRef:
|
|
298
|
-
return await ctx.aio_spawn_workflow(
|
|
299
|
-
workflow_name=self.config.name,
|
|
300
|
-
input=input.model_dump(),
|
|
301
|
-
key=key,
|
|
302
|
-
options=options,
|
|
303
|
-
)
|
|
304
|
-
|
|
305
|
-
def spawn_many(
|
|
306
|
-
self, ctx: Context, spawn_inputs: list[SpawnWorkflowInput[TWorkflowInput]]
|
|
307
|
-
) -> list[WorkflowRunRef]:
|
|
308
|
-
inputs = [
|
|
309
|
-
ChildWorkflowRunDict(
|
|
310
|
-
workflow_name=self.config.name,
|
|
311
|
-
input=spawn_input.input.model_dump(),
|
|
312
|
-
key=spawn_input.key,
|
|
313
|
-
options=spawn_input.options,
|
|
314
|
-
)
|
|
315
|
-
for spawn_input in spawn_inputs
|
|
316
|
-
]
|
|
317
|
-
|
|
318
|
-
return ctx.spawn_workflows(inputs)
|
|
319
|
-
|
|
320
|
-
def spawn_one(
|
|
321
|
-
self,
|
|
322
|
-
ctx: Context,
|
|
323
|
-
input: TWorkflowInput,
|
|
324
|
-
key: str | None = None,
|
|
325
|
-
options: ChildTriggerWorkflowOptions = ChildTriggerWorkflowOptions(),
|
|
326
|
-
) -> WorkflowRunRef:
|
|
327
|
-
return ctx.spawn_workflow(
|
|
328
|
-
workflow_name=self.config.name,
|
|
329
|
-
input=input.model_dump(),
|
|
330
|
-
key=key,
|
|
331
|
-
options=options,
|
|
332
|
-
)
|
|
333
|
-
|
|
334
|
-
def schedule(
|
|
335
|
-
self,
|
|
336
|
-
schedules: list[datetime | timestamp_pb2.Timestamp],
|
|
337
|
-
input: TWorkflowInput,
|
|
338
|
-
options: ScheduleTriggerWorkflowOptions = ScheduleTriggerWorkflowOptions(),
|
|
339
|
-
) -> WorkflowVersion:
|
|
340
|
-
if not self.hatchet:
|
|
341
|
-
raise ValueError("Hatchet client is not initialized.")
|
|
342
|
-
|
|
343
|
-
return self.hatchet.admin.schedule_workflow(
|
|
344
|
-
name=self.config.name,
|
|
345
|
-
schedules=schedules,
|
|
346
|
-
input=input.model_dump(),
|
|
347
|
-
options=options,
|
|
348
|
-
)
|
|
349
|
-
|
|
350
|
-
async def aio_schedule(
|
|
351
|
-
self,
|
|
352
|
-
schedules: list[datetime | timestamp_pb2.Timestamp],
|
|
353
|
-
input: TWorkflowInput,
|
|
354
|
-
options: ScheduleTriggerWorkflowOptions = ScheduleTriggerWorkflowOptions(),
|
|
355
|
-
) -> WorkflowVersion:
|
|
356
|
-
if not self.hatchet:
|
|
357
|
-
raise ValueError("Hatchet client is not initialized.")
|
|
358
|
-
|
|
359
|
-
return await self.hatchet.admin.aio_schedule_workflow(
|
|
360
|
-
name=self.config.name,
|
|
361
|
-
schedules=schedules,
|
|
362
|
-
input=input.model_dump(),
|
|
363
|
-
options=options,
|
|
364
|
-
)
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
class BaseWorkflow:
|
|
368
|
-
"""
|
|
369
|
-
A Hatchet workflow implementation base. This class should be inherited by all workflow implementations.
|
|
370
|
-
|
|
371
|
-
Configuration is passed to the workflow implementation via the `config` attribute.
|
|
372
|
-
"""
|
|
373
|
-
|
|
374
|
-
config: WorkflowConfig = WorkflowConfig()
|
|
375
|
-
|
|
376
|
-
def __init__(self) -> None:
|
|
377
|
-
self.config.name = self.config.name or str(self.__class__.__name__)
|
|
378
|
-
|
|
379
|
-
for step in self.steps:
|
|
380
|
-
step.workflow = self
|
|
381
|
-
|
|
382
|
-
def get_service_name(self, namespace: str) -> str:
|
|
383
|
-
return f"{namespace}{self.config.name.lower()}"
|
|
384
|
-
|
|
385
|
-
def _get_steps_by_type(self, step_type: StepType) -> list[Step[Any]]:
|
|
386
|
-
return [
|
|
387
|
-
attr
|
|
388
|
-
for _, attr in self.__class__.__dict__.items()
|
|
389
|
-
if isinstance(attr, Step) and attr.type == step_type
|
|
390
|
-
]
|
|
391
|
-
|
|
392
|
-
@property
|
|
393
|
-
def on_failure_steps(self) -> list[Step[Any]]:
|
|
394
|
-
return self._get_steps_by_type(StepType.ON_FAILURE)
|
|
395
|
-
|
|
396
|
-
@property
|
|
397
|
-
def concurrency_actions(self) -> list[Step[Any]]:
|
|
398
|
-
return self._get_steps_by_type(StepType.CONCURRENCY)
|
|
399
|
-
|
|
400
|
-
@property
|
|
401
|
-
def default_steps(self) -> list[Step[Any]]:
|
|
402
|
-
return self._get_steps_by_type(StepType.DEFAULT)
|
|
403
|
-
|
|
404
|
-
@property
|
|
405
|
-
def steps(self) -> list[Step[Any]]:
|
|
406
|
-
return self.default_steps + self.concurrency_actions + self.on_failure_steps
|
|
407
|
-
|
|
408
|
-
def create_action_name(self, namespace: str, step: Step[Any]) -> str:
|
|
409
|
-
return self.get_service_name(namespace) + ":" + step.name
|
|
410
|
-
|
|
411
|
-
def get_name(self, namespace: str) -> str:
|
|
412
|
-
return namespace + self.config.name
|
|
413
|
-
|
|
414
|
-
def validate_concurrency_actions(
|
|
415
|
-
self, service_name: str
|
|
416
|
-
) -> WorkflowConcurrencyOpts | None:
|
|
417
|
-
if len(self.concurrency_actions) > 0 and self.config.concurrency:
|
|
418
|
-
raise ValueError(
|
|
419
|
-
"Error: Both concurrencyActions and concurrency_expression are defined. Please use only one concurrency configuration method."
|
|
420
|
-
)
|
|
421
|
-
|
|
422
|
-
if len(self.concurrency_actions) > 0:
|
|
423
|
-
action = self.concurrency_actions[0]
|
|
424
|
-
|
|
425
|
-
return WorkflowConcurrencyOpts(
|
|
426
|
-
action=service_name + ":" + action.name,
|
|
427
|
-
max_runs=action.concurrency__max_runs,
|
|
428
|
-
limit_strategy=maybe_int_to_str(
|
|
429
|
-
convert_python_enum_to_proto(
|
|
430
|
-
action.concurrency__limit_strategy,
|
|
431
|
-
ConcurrencyLimitStrategyProto,
|
|
432
|
-
)
|
|
433
|
-
),
|
|
434
|
-
)
|
|
435
|
-
|
|
436
|
-
if self.config.concurrency:
|
|
437
|
-
return WorkflowConcurrencyOpts(
|
|
438
|
-
expression=self.config.concurrency.expression,
|
|
439
|
-
max_runs=self.config.concurrency.max_runs,
|
|
440
|
-
limit_strategy=self.config.concurrency.limit_strategy,
|
|
441
|
-
)
|
|
442
|
-
|
|
443
|
-
return None
|
|
444
|
-
|
|
445
|
-
def validate_on_failure_steps(
|
|
446
|
-
self, name: str, service_name: str
|
|
447
|
-
) -> CreateWorkflowJobOpts | None:
|
|
448
|
-
if not self.on_failure_steps:
|
|
449
|
-
return None
|
|
450
|
-
|
|
451
|
-
on_failure_step = next(iter(self.on_failure_steps))
|
|
452
|
-
|
|
453
|
-
return CreateWorkflowJobOpts(
|
|
454
|
-
name=name + "-on-failure",
|
|
455
|
-
steps=[
|
|
456
|
-
CreateWorkflowStepOpts(
|
|
457
|
-
readable_id=on_failure_step.name,
|
|
458
|
-
action=service_name + ":" + on_failure_step.name,
|
|
459
|
-
timeout=on_failure_step.timeout or "60s",
|
|
460
|
-
inputs="{}",
|
|
461
|
-
parents=[],
|
|
462
|
-
retries=on_failure_step.retries,
|
|
463
|
-
rate_limits=on_failure_step.rate_limits,
|
|
464
|
-
backoff_factor=on_failure_step.backoff_factor,
|
|
465
|
-
backoff_max_seconds=on_failure_step.backoff_max_seconds,
|
|
466
|
-
)
|
|
467
|
-
],
|
|
468
|
-
)
|
|
469
|
-
|
|
470
|
-
def validate_priority(self, default_priority: int | None) -> int | None:
|
|
471
|
-
validated_priority = (
|
|
472
|
-
max(1, min(3, default_priority)) if default_priority else None
|
|
473
|
-
)
|
|
474
|
-
if validated_priority != default_priority:
|
|
475
|
-
logger.warning(
|
|
476
|
-
"Warning: Default Priority Must be between 1 and 3 -- inclusively. Adjusted to be within the range."
|
|
477
|
-
)
|
|
478
|
-
|
|
479
|
-
return validated_priority
|
|
480
|
-
|
|
481
|
-
def get_create_opts(self, namespace: str) -> CreateWorkflowVersionOpts:
|
|
482
|
-
service_name = self.get_service_name(namespace)
|
|
483
|
-
|
|
484
|
-
name = self.get_name(namespace)
|
|
485
|
-
event_triggers = [namespace + event for event in self.config.on_events]
|
|
486
|
-
|
|
487
|
-
create_step_opts = [
|
|
488
|
-
CreateWorkflowStepOpts(
|
|
489
|
-
readable_id=step.name,
|
|
490
|
-
action=service_name + ":" + step.name,
|
|
491
|
-
timeout=step.timeout or "60s",
|
|
492
|
-
inputs="{}",
|
|
493
|
-
parents=[x for x in step.parents],
|
|
494
|
-
retries=step.retries,
|
|
495
|
-
rate_limits=step.rate_limits,
|
|
496
|
-
worker_labels=step.desired_worker_labels,
|
|
497
|
-
backoff_factor=step.backoff_factor,
|
|
498
|
-
backoff_max_seconds=step.backoff_max_seconds,
|
|
499
|
-
)
|
|
500
|
-
for step in self.steps
|
|
501
|
-
if step.type == StepType.DEFAULT
|
|
502
|
-
]
|
|
503
|
-
|
|
504
|
-
concurrency = self.validate_concurrency_actions(service_name)
|
|
505
|
-
on_failure_job = self.validate_on_failure_steps(name, service_name)
|
|
506
|
-
validated_priority = self.validate_priority(self.config.default_priority)
|
|
507
|
-
|
|
508
|
-
return CreateWorkflowVersionOpts(
|
|
509
|
-
name=name,
|
|
510
|
-
kind=WorkflowKind.DAG,
|
|
511
|
-
version=self.config.version,
|
|
512
|
-
event_triggers=event_triggers,
|
|
513
|
-
cron_triggers=self.config.on_crons,
|
|
514
|
-
schedule_timeout=self.config.schedule_timeout,
|
|
515
|
-
sticky=maybe_int_to_str(
|
|
516
|
-
convert_python_enum_to_proto(self.config.sticky, StickyStrategyProto)
|
|
517
|
-
),
|
|
518
|
-
jobs=[
|
|
519
|
-
CreateWorkflowJobOpts(
|
|
520
|
-
name=name,
|
|
521
|
-
steps=create_step_opts,
|
|
522
|
-
)
|
|
523
|
-
],
|
|
524
|
-
on_failure_job=on_failure_job,
|
|
525
|
-
concurrency=concurrency,
|
|
526
|
-
default_priority=validated_priority,
|
|
527
|
-
)
|
|
File without changes
|