highway-dsl 1.0.2__py3-none-any.whl → 1.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of highway-dsl might be problematic. Click here for more details.
- highway_dsl/__init__.py +13 -7
- highway_dsl/workflow_dsl.py +331 -71
- highway_dsl-1.2.0.dist-info/METADATA +481 -0
- highway_dsl-1.2.0.dist-info/RECORD +7 -0
- highway_dsl-1.0.2.dist-info/METADATA +0 -228
- highway_dsl-1.0.2.dist-info/RECORD +0 -7
- {highway_dsl-1.0.2.dist-info → highway_dsl-1.2.0.dist-info}/WHEEL +0 -0
- {highway_dsl-1.0.2.dist-info → highway_dsl-1.2.0.dist-info}/licenses/LICENSE +0 -0
- {highway_dsl-1.0.2.dist-info → highway_dsl-1.2.0.dist-info}/top_level.txt +0 -0
highway_dsl/__init__.py
CHANGED
|
@@ -1,15 +1,18 @@
|
|
|
1
1
|
from .workflow_dsl import (
|
|
2
|
-
Workflow,
|
|
3
|
-
WorkflowBuilder,
|
|
4
|
-
TaskOperator,
|
|
5
2
|
ConditionOperator,
|
|
6
|
-
|
|
7
|
-
WaitOperator,
|
|
3
|
+
EmitEventOperator,
|
|
8
4
|
ForEachOperator,
|
|
9
|
-
|
|
5
|
+
OperatorType,
|
|
6
|
+
ParallelOperator,
|
|
10
7
|
RetryPolicy,
|
|
8
|
+
SwitchOperator,
|
|
9
|
+
TaskOperator,
|
|
11
10
|
TimeoutPolicy,
|
|
12
|
-
|
|
11
|
+
WaitForEventOperator,
|
|
12
|
+
WaitOperator,
|
|
13
|
+
WhileOperator,
|
|
14
|
+
Workflow,
|
|
15
|
+
WorkflowBuilder,
|
|
13
16
|
)
|
|
14
17
|
|
|
15
18
|
__all__ = [
|
|
@@ -22,6 +25,9 @@ __all__ = [
|
|
|
22
25
|
"WaitOperator",
|
|
23
26
|
"ForEachOperator",
|
|
24
27
|
"WhileOperator",
|
|
28
|
+
"EmitEventOperator",
|
|
29
|
+
"WaitForEventOperator",
|
|
30
|
+
"SwitchOperator",
|
|
25
31
|
"RetryPolicy",
|
|
26
32
|
"TimeoutPolicy",
|
|
27
33
|
"OperatorType",
|
highway_dsl/workflow_dsl.py
CHANGED
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
# workflow_dsl.py
|
|
2
|
-
from
|
|
3
|
-
from
|
|
2
|
+
from abc import ABC
|
|
3
|
+
from collections.abc import Callable
|
|
4
4
|
from datetime import datetime, timedelta
|
|
5
|
+
from enum import Enum
|
|
6
|
+
from typing import Any, Optional, Union
|
|
7
|
+
|
|
5
8
|
import yaml
|
|
6
|
-
import
|
|
7
|
-
from abc import ABC, abstractmethod
|
|
8
|
-
from pydantic import BaseModel, Field, model_validator, ConfigDict
|
|
9
|
+
from pydantic import BaseModel, ConfigDict, Field, model_validator
|
|
9
10
|
|
|
10
11
|
|
|
11
12
|
class OperatorType(Enum):
|
|
@@ -17,6 +18,8 @@ class OperatorType(Enum):
|
|
|
17
18
|
SWITCH = "switch"
|
|
18
19
|
TRY_CATCH = "try_catch"
|
|
19
20
|
WHILE = "while"
|
|
21
|
+
EMIT_EVENT = "emit_event"
|
|
22
|
+
WAIT_FOR_EVENT = "wait_for_event"
|
|
20
23
|
|
|
21
24
|
|
|
22
25
|
class RetryPolicy(BaseModel):
|
|
@@ -28,19 +31,23 @@ class RetryPolicy(BaseModel):
|
|
|
28
31
|
class TimeoutPolicy(BaseModel):
|
|
29
32
|
timeout: timedelta = Field(..., description="Timeout duration")
|
|
30
33
|
kill_on_timeout: bool = Field(
|
|
31
|
-
True, description="Whether to kill the task on timeout"
|
|
34
|
+
True, description="Whether to kill the task on timeout",
|
|
32
35
|
)
|
|
33
36
|
|
|
34
37
|
|
|
35
38
|
class BaseOperator(BaseModel, ABC):
|
|
36
39
|
task_id: str
|
|
37
40
|
operator_type: OperatorType
|
|
38
|
-
dependencies:
|
|
39
|
-
retry_policy:
|
|
40
|
-
timeout_policy:
|
|
41
|
-
metadata:
|
|
41
|
+
dependencies: list[str] = Field(default_factory=list)
|
|
42
|
+
retry_policy: RetryPolicy | None = None
|
|
43
|
+
timeout_policy: TimeoutPolicy | None = None
|
|
44
|
+
metadata: dict[str, Any] = Field(default_factory=dict)
|
|
45
|
+
description: str = Field(default="", description="Task description")
|
|
46
|
+
# Phase 3: Callback hooks
|
|
47
|
+
on_success_task_id: str | None = Field(None, description="Task to run on success")
|
|
48
|
+
on_failure_task_id: str | None = Field(None, description="Task to run on failure")
|
|
42
49
|
is_internal_loop_task: bool = Field(
|
|
43
|
-
default=False, exclude=True
|
|
50
|
+
default=False, exclude=True,
|
|
44
51
|
) # Mark if task is internal to a loop
|
|
45
52
|
|
|
46
53
|
model_config = ConfigDict(use_enum_values=True, arbitrary_types_allowed=True)
|
|
@@ -48,21 +55,21 @@ class BaseOperator(BaseModel, ABC):
|
|
|
48
55
|
|
|
49
56
|
class TaskOperator(BaseOperator):
|
|
50
57
|
function: str
|
|
51
|
-
args:
|
|
52
|
-
kwargs:
|
|
53
|
-
result_key:
|
|
58
|
+
args: list[Any] = Field(default_factory=list)
|
|
59
|
+
kwargs: dict[str, Any] = Field(default_factory=dict)
|
|
60
|
+
result_key: str | None = None
|
|
54
61
|
operator_type: OperatorType = Field(OperatorType.TASK, frozen=True)
|
|
55
62
|
|
|
56
63
|
|
|
57
64
|
class ConditionOperator(BaseOperator):
|
|
58
65
|
condition: str
|
|
59
|
-
if_true:
|
|
60
|
-
if_false:
|
|
66
|
+
if_true: str | None
|
|
67
|
+
if_false: str | None
|
|
61
68
|
operator_type: OperatorType = Field(OperatorType.CONDITION, frozen=True)
|
|
62
69
|
|
|
63
70
|
|
|
64
71
|
class WaitOperator(BaseOperator):
|
|
65
|
-
wait_for:
|
|
72
|
+
wait_for: timedelta | datetime | str
|
|
66
73
|
operator_type: OperatorType = Field(OperatorType.WAIT, frozen=True)
|
|
67
74
|
|
|
68
75
|
@model_validator(mode="before")
|
|
@@ -77,7 +84,7 @@ class WaitOperator(BaseOperator):
|
|
|
77
84
|
data["wait_for"] = datetime.fromisoformat(wait_for.split(":", 1)[1])
|
|
78
85
|
return data
|
|
79
86
|
|
|
80
|
-
def model_dump(self, **kwargs) ->
|
|
87
|
+
def model_dump(self, **kwargs: Any) -> dict[str, Any]:
|
|
81
88
|
data = super().model_dump(**kwargs)
|
|
82
89
|
wait_for = data["wait_for"]
|
|
83
90
|
if isinstance(wait_for, timedelta):
|
|
@@ -88,13 +95,14 @@ class WaitOperator(BaseOperator):
|
|
|
88
95
|
|
|
89
96
|
|
|
90
97
|
class ParallelOperator(BaseOperator):
|
|
91
|
-
branches:
|
|
98
|
+
branches: dict[str, list[str]] = Field(default_factory=dict)
|
|
99
|
+
timeout: int | None = Field(None, description="Optional timeout in seconds for branch execution")
|
|
92
100
|
operator_type: OperatorType = Field(OperatorType.PARALLEL, frozen=True)
|
|
93
101
|
|
|
94
102
|
|
|
95
103
|
class ForEachOperator(BaseOperator):
|
|
96
104
|
items: str
|
|
97
|
-
loop_body:
|
|
105
|
+
loop_body: list[
|
|
98
106
|
Union[
|
|
99
107
|
TaskOperator,
|
|
100
108
|
ConditionOperator,
|
|
@@ -102,6 +110,9 @@ class ForEachOperator(BaseOperator):
|
|
|
102
110
|
ParallelOperator,
|
|
103
111
|
"ForEachOperator",
|
|
104
112
|
"WhileOperator",
|
|
113
|
+
"EmitEventOperator",
|
|
114
|
+
"WaitForEventOperator",
|
|
115
|
+
"SwitchOperator",
|
|
105
116
|
]
|
|
106
117
|
] = Field(default_factory=list)
|
|
107
118
|
operator_type: OperatorType = Field(OperatorType.FOREACH, frozen=True)
|
|
@@ -109,7 +120,7 @@ class ForEachOperator(BaseOperator):
|
|
|
109
120
|
|
|
110
121
|
class WhileOperator(BaseOperator):
|
|
111
122
|
condition: str
|
|
112
|
-
loop_body:
|
|
123
|
+
loop_body: list[
|
|
113
124
|
Union[
|
|
114
125
|
TaskOperator,
|
|
115
126
|
ConditionOperator,
|
|
@@ -117,41 +128,109 @@ class WhileOperator(BaseOperator):
|
|
|
117
128
|
ParallelOperator,
|
|
118
129
|
ForEachOperator,
|
|
119
130
|
"WhileOperator",
|
|
131
|
+
"EmitEventOperator",
|
|
132
|
+
"WaitForEventOperator",
|
|
133
|
+
"SwitchOperator",
|
|
120
134
|
]
|
|
121
135
|
] = Field(default_factory=list)
|
|
122
136
|
operator_type: OperatorType = Field(OperatorType.WHILE, frozen=True)
|
|
123
137
|
|
|
124
138
|
|
|
139
|
+
class EmitEventOperator(BaseOperator):
|
|
140
|
+
"""Phase 2: Emit an event that other workflows can wait for."""
|
|
141
|
+
event_name: str = Field(..., description="Name of the event to emit")
|
|
142
|
+
payload: dict[str, Any] = Field(default_factory=dict, description="Event payload data")
|
|
143
|
+
operator_type: OperatorType = Field(OperatorType.EMIT_EVENT, frozen=True)
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
class WaitForEventOperator(BaseOperator):
|
|
147
|
+
"""Phase 2: Wait for an external event with optional timeout."""
|
|
148
|
+
event_name: str = Field(..., description="Name of the event to wait for")
|
|
149
|
+
timeout_seconds: int | None = Field(None, description="Timeout in seconds (None = wait forever)")
|
|
150
|
+
operator_type: OperatorType = Field(OperatorType.WAIT_FOR_EVENT, frozen=True)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
class SwitchOperator(BaseOperator):
|
|
154
|
+
"""Phase 4: Multi-branch switch/case operator."""
|
|
155
|
+
switch_on: str = Field(..., description="Expression to evaluate for switch")
|
|
156
|
+
cases: dict[str, str] = Field(default_factory=dict, description="Map of case values to task IDs")
|
|
157
|
+
default: str | None = Field(None, description="Default task ID if no case matches")
|
|
158
|
+
operator_type: OperatorType = Field(OperatorType.SWITCH, frozen=True)
|
|
159
|
+
|
|
160
|
+
|
|
125
161
|
class Workflow(BaseModel):
|
|
126
162
|
name: str
|
|
127
|
-
version: str = "1.
|
|
163
|
+
version: str = "1.1.0"
|
|
128
164
|
description: str = ""
|
|
129
|
-
tasks:
|
|
165
|
+
tasks: dict[
|
|
130
166
|
str,
|
|
131
|
-
|
|
132
|
-
TaskOperator,
|
|
133
|
-
ConditionOperator,
|
|
134
|
-
WaitOperator,
|
|
135
|
-
ParallelOperator,
|
|
136
|
-
ForEachOperator,
|
|
137
|
-
WhileOperator,
|
|
138
|
-
],
|
|
167
|
+
TaskOperator | ConditionOperator | WaitOperator | ParallelOperator | ForEachOperator | WhileOperator | EmitEventOperator | WaitForEventOperator | SwitchOperator,
|
|
139
168
|
] = Field(default_factory=dict)
|
|
140
|
-
variables:
|
|
141
|
-
start_task:
|
|
169
|
+
variables: dict[str, Any] = Field(default_factory=dict)
|
|
170
|
+
start_task: str | None = None
|
|
171
|
+
|
|
172
|
+
# Phase 1: Scheduling metadata
|
|
173
|
+
schedule: str | None = Field(None, description="Cron expression for scheduled execution")
|
|
174
|
+
start_date: datetime | None = Field(None, description="When the schedule becomes active")
|
|
175
|
+
catchup: bool = Field(False, description="Whether to backfill missed runs")
|
|
176
|
+
is_paused: bool = Field(False, description="Whether the workflow is paused")
|
|
177
|
+
tags: list[str] = Field(default_factory=list, description="Workflow categorization tags")
|
|
178
|
+
max_active_runs: int = Field(1, description="Maximum number of concurrent runs")
|
|
179
|
+
default_retry_policy: RetryPolicy | None = Field(None, description="Default retry policy for all tasks")
|
|
180
|
+
|
|
181
|
+
@model_validator(mode="before")
|
|
182
|
+
@classmethod
|
|
183
|
+
def validate_workflow_name_and_version(cls, data: Any) -> Any:
|
|
184
|
+
"""Validate workflow name and version don't contain '__' (double underscore).
|
|
185
|
+
|
|
186
|
+
The double underscore is reserved as a separator for display purposes:
|
|
187
|
+
{workflow_name}__{version}__{step_name}
|
|
188
|
+
|
|
189
|
+
Workflow names must match: ^[a-z][a-z0-9_]*$ (lowercase, alphanumeric, single underscore)
|
|
190
|
+
Workflow versions must match: ^[a-zA-Z0-9._-]+$ (semver compatible)
|
|
191
|
+
"""
|
|
192
|
+
import re
|
|
193
|
+
|
|
194
|
+
if isinstance(data, dict):
|
|
195
|
+
name = data.get("name", "")
|
|
196
|
+
version = data.get("version", "")
|
|
197
|
+
|
|
198
|
+
# Check for double underscore (reserved separator)
|
|
199
|
+
if "__" in name:
|
|
200
|
+
msg = f"Workflow name '{name}' cannot contain '__' (double underscore) - it's reserved as a separator"
|
|
201
|
+
raise ValueError(msg)
|
|
202
|
+
|
|
203
|
+
if "__" in version:
|
|
204
|
+
msg = f"Workflow version '{version}' cannot contain '__' (double underscore) - it's reserved as a separator"
|
|
205
|
+
raise ValueError(msg)
|
|
206
|
+
|
|
207
|
+
# Validate workflow name format
|
|
208
|
+
if name and not re.match(r"^[a-z][a-z0-9_]*$", name):
|
|
209
|
+
msg = f"Workflow name '{name}' must start with lowercase letter and contain only lowercase letters, digits, and single underscores"
|
|
210
|
+
raise ValueError(msg)
|
|
211
|
+
|
|
212
|
+
# Validate workflow version format (semver compatible)
|
|
213
|
+
if version and not re.match(r"^[a-zA-Z0-9._-]+$", version):
|
|
214
|
+
msg = f"Workflow version '{version}' must contain only alphanumeric characters, dots, hyphens, and underscores (semver compatible)"
|
|
215
|
+
raise ValueError(msg)
|
|
216
|
+
|
|
217
|
+
return data
|
|
142
218
|
|
|
143
219
|
@model_validator(mode="before")
|
|
144
220
|
@classmethod
|
|
145
221
|
def validate_tasks(cls, data: Any) -> Any:
|
|
146
222
|
if isinstance(data, dict) and "tasks" in data:
|
|
147
223
|
validated_tasks = {}
|
|
148
|
-
operator_classes:
|
|
224
|
+
operator_classes: dict[str, type[BaseOperator]] = {
|
|
149
225
|
OperatorType.TASK.value: TaskOperator,
|
|
150
226
|
OperatorType.CONDITION.value: ConditionOperator,
|
|
151
227
|
OperatorType.WAIT.value: WaitOperator,
|
|
152
228
|
OperatorType.PARALLEL.value: ParallelOperator,
|
|
153
229
|
OperatorType.FOREACH.value: ForEachOperator,
|
|
154
230
|
OperatorType.WHILE.value: WhileOperator,
|
|
231
|
+
OperatorType.EMIT_EVENT.value: EmitEventOperator,
|
|
232
|
+
OperatorType.WAIT_FOR_EVENT.value: WaitForEventOperator,
|
|
233
|
+
OperatorType.SWITCH.value: SwitchOperator,
|
|
155
234
|
}
|
|
156
235
|
for task_id, task_data in data["tasks"].items():
|
|
157
236
|
operator_type = task_data.get("operator_type")
|
|
@@ -159,25 +238,19 @@ class Workflow(BaseModel):
|
|
|
159
238
|
operator_class = operator_classes[operator_type]
|
|
160
239
|
validated_tasks[task_id] = operator_class.model_validate(task_data)
|
|
161
240
|
else:
|
|
162
|
-
|
|
241
|
+
msg = f"Unknown operator type: {operator_type}"
|
|
242
|
+
raise ValueError(msg)
|
|
163
243
|
data["tasks"] = validated_tasks
|
|
164
244
|
return data
|
|
165
245
|
|
|
166
246
|
def add_task(
|
|
167
247
|
self,
|
|
168
|
-
task:
|
|
169
|
-
TaskOperator,
|
|
170
|
-
ConditionOperator,
|
|
171
|
-
WaitOperator,
|
|
172
|
-
ParallelOperator,
|
|
173
|
-
ForEachOperator,
|
|
174
|
-
WhileOperator,
|
|
175
|
-
],
|
|
248
|
+
task: TaskOperator | ConditionOperator | WaitOperator | ParallelOperator | ForEachOperator | WhileOperator | EmitEventOperator | WaitForEventOperator | SwitchOperator,
|
|
176
249
|
) -> "Workflow":
|
|
177
250
|
self.tasks[task.task_id] = task
|
|
178
251
|
return self
|
|
179
252
|
|
|
180
|
-
def set_variables(self, variables:
|
|
253
|
+
def set_variables(self, variables: dict[str, Any]) -> "Workflow":
|
|
181
254
|
self.variables.update(variables)
|
|
182
255
|
return self
|
|
183
256
|
|
|
@@ -185,6 +258,42 @@ class Workflow(BaseModel):
|
|
|
185
258
|
self.start_task = task_id
|
|
186
259
|
return self
|
|
187
260
|
|
|
261
|
+
# Phase 1: Scheduling methods
|
|
262
|
+
def set_schedule(self, cron: str) -> "Workflow":
|
|
263
|
+
"""Set the cron schedule for this workflow."""
|
|
264
|
+
self.schedule = cron
|
|
265
|
+
return self
|
|
266
|
+
|
|
267
|
+
def set_start_date(self, start_date: datetime) -> "Workflow":
|
|
268
|
+
"""Set when the schedule becomes active."""
|
|
269
|
+
self.start_date = start_date
|
|
270
|
+
return self
|
|
271
|
+
|
|
272
|
+
def set_catchup(self, enabled: bool) -> "Workflow":
|
|
273
|
+
"""Set whether to backfill missed runs."""
|
|
274
|
+
self.catchup = enabled
|
|
275
|
+
return self
|
|
276
|
+
|
|
277
|
+
def set_paused(self, paused: bool) -> "Workflow":
|
|
278
|
+
"""Set whether the workflow is paused."""
|
|
279
|
+
self.is_paused = paused
|
|
280
|
+
return self
|
|
281
|
+
|
|
282
|
+
def add_tags(self, *tags: str) -> "Workflow":
|
|
283
|
+
"""Add tags to the workflow."""
|
|
284
|
+
self.tags.extend(tags)
|
|
285
|
+
return self
|
|
286
|
+
|
|
287
|
+
def set_max_active_runs(self, count: int) -> "Workflow":
|
|
288
|
+
"""Set maximum number of concurrent runs."""
|
|
289
|
+
self.max_active_runs = count
|
|
290
|
+
return self
|
|
291
|
+
|
|
292
|
+
def set_default_retry_policy(self, policy: RetryPolicy) -> "Workflow":
|
|
293
|
+
"""Set default retry policy for all tasks."""
|
|
294
|
+
self.default_retry_policy = policy
|
|
295
|
+
return self
|
|
296
|
+
|
|
188
297
|
def to_yaml(self) -> str:
|
|
189
298
|
data = self.model_dump(mode="json", by_alias=True, exclude_none=True)
|
|
190
299
|
return yaml.dump(data, default_flow_style=False)
|
|
@@ -192,6 +301,68 @@ class Workflow(BaseModel):
|
|
|
192
301
|
def to_json(self) -> str:
|
|
193
302
|
return self.model_dump_json(indent=2)
|
|
194
303
|
|
|
304
|
+
def to_mermaid(self) -> str:
|
|
305
|
+
""" convert to mermaid state diagram format """
|
|
306
|
+
lines = ["stateDiagram-v2"]
|
|
307
|
+
|
|
308
|
+
all_dependencies = {dep for task in self.tasks.values() for dep in task.dependencies}
|
|
309
|
+
|
|
310
|
+
for task_id, task in self.tasks.items():
|
|
311
|
+
# Add state with description for regular tasks
|
|
312
|
+
if task.description and not isinstance(task, (ForEachOperator, WhileOperator)):
|
|
313
|
+
lines.append(f' state "{task.description}" as {task_id}')
|
|
314
|
+
|
|
315
|
+
# Add dependencies
|
|
316
|
+
if not task.dependencies:
|
|
317
|
+
if self.start_task == task_id or not self.start_task:
|
|
318
|
+
lines.append(f' [*] --> {task_id}')
|
|
319
|
+
else:
|
|
320
|
+
for dep in task.dependencies:
|
|
321
|
+
lines.append(f' {dep} --> {task_id}')
|
|
322
|
+
|
|
323
|
+
# Add transitions for conditional operator
|
|
324
|
+
if isinstance(task, ConditionOperator):
|
|
325
|
+
if task.if_true:
|
|
326
|
+
lines.append(f' {task_id} --> {task.if_true} : True')
|
|
327
|
+
if task.if_false:
|
|
328
|
+
lines.append(f' {task_id} --> {task.if_false} : False')
|
|
329
|
+
|
|
330
|
+
# Add composite state for parallel operator
|
|
331
|
+
if isinstance(task, ParallelOperator):
|
|
332
|
+
lines.append(f' state {task_id} {{')
|
|
333
|
+
for i, branch in enumerate(task.branches):
|
|
334
|
+
lines.append(f' state "Branch {i+1}" as {branch}')
|
|
335
|
+
if i < len(task.branches) - 1:
|
|
336
|
+
lines.append(' --')
|
|
337
|
+
lines.append(' }')
|
|
338
|
+
|
|
339
|
+
# Add composite state for foreach operator
|
|
340
|
+
if isinstance(task, ForEachOperator):
|
|
341
|
+
lines.append(f' state {task_id} {{')
|
|
342
|
+
for sub_task in task.loop_body:
|
|
343
|
+
if sub_task.description:
|
|
344
|
+
lines.append(f' state "{sub_task.description}" as {sub_task.task_id}')
|
|
345
|
+
else:
|
|
346
|
+
lines.append(f' {sub_task.task_id}')
|
|
347
|
+
lines.append(' }')
|
|
348
|
+
|
|
349
|
+
# Add composite state for while operator
|
|
350
|
+
if isinstance(task, WhileOperator):
|
|
351
|
+
lines.append(f' state {task_id} {{')
|
|
352
|
+
for sub_task in task.loop_body:
|
|
353
|
+
if sub_task.description:
|
|
354
|
+
lines.append(f' state "{sub_task.description}" as {sub_task.task_id}')
|
|
355
|
+
else:
|
|
356
|
+
lines.append(f' {sub_task.task_id}')
|
|
357
|
+
lines.append(' }')
|
|
358
|
+
|
|
359
|
+
# End states
|
|
360
|
+
if task_id not in all_dependencies:
|
|
361
|
+
if not (isinstance(task, ConditionOperator) and (task.if_true or task.if_false)):
|
|
362
|
+
lines.append(f' {task_id} --> [*]')
|
|
363
|
+
|
|
364
|
+
return "\n".join(lines)
|
|
365
|
+
|
|
195
366
|
@classmethod
|
|
196
367
|
def from_yaml(cls, yaml_str: str) -> "Workflow":
|
|
197
368
|
data = yaml.safe_load(yaml_str)
|
|
@@ -206,38 +377,45 @@ class WorkflowBuilder:
|
|
|
206
377
|
def __init__(
|
|
207
378
|
self,
|
|
208
379
|
name: str,
|
|
209
|
-
existing_workflow:
|
|
380
|
+
existing_workflow: Workflow | None = None,
|
|
210
381
|
parent: Optional["WorkflowBuilder"] = None,
|
|
211
|
-
):
|
|
382
|
+
) -> None:
|
|
212
383
|
if existing_workflow:
|
|
213
384
|
self.workflow = existing_workflow
|
|
214
385
|
else:
|
|
215
|
-
self.workflow = Workflow(
|
|
216
|
-
|
|
386
|
+
self.workflow = Workflow(
|
|
387
|
+
name=name,
|
|
388
|
+
version="1.1.0",
|
|
389
|
+
description="",
|
|
390
|
+
tasks={},
|
|
391
|
+
variables={},
|
|
392
|
+
start_task=None,
|
|
393
|
+
schedule=None,
|
|
394
|
+
start_date=None,
|
|
395
|
+
catchup=False,
|
|
396
|
+
is_paused=False,
|
|
397
|
+
tags=[],
|
|
398
|
+
max_active_runs=1,
|
|
399
|
+
default_retry_policy=None,
|
|
400
|
+
)
|
|
401
|
+
self._current_task: str | None = None
|
|
217
402
|
self.parent = parent
|
|
218
403
|
|
|
219
404
|
def _add_task(
|
|
220
405
|
self,
|
|
221
|
-
task:
|
|
222
|
-
|
|
223
|
-
ConditionOperator,
|
|
224
|
-
WaitOperator,
|
|
225
|
-
ParallelOperator,
|
|
226
|
-
ForEachOperator,
|
|
227
|
-
WhileOperator,
|
|
228
|
-
],
|
|
229
|
-
**kwargs,
|
|
406
|
+
task: TaskOperator | ConditionOperator | WaitOperator | ParallelOperator | ForEachOperator | WhileOperator | EmitEventOperator | WaitForEventOperator | SwitchOperator,
|
|
407
|
+
**kwargs: Any,
|
|
230
408
|
) -> None:
|
|
231
409
|
dependencies = kwargs.get("dependencies", [])
|
|
232
410
|
if self._current_task and not dependencies:
|
|
233
411
|
dependencies.append(self._current_task)
|
|
234
412
|
|
|
235
|
-
task.dependencies = sorted(
|
|
413
|
+
task.dependencies = sorted(set(dependencies))
|
|
236
414
|
|
|
237
415
|
self.workflow.add_task(task)
|
|
238
416
|
self._current_task = task.task_id
|
|
239
417
|
|
|
240
|
-
def task(self, task_id: str, function: str, **kwargs) -> "WorkflowBuilder":
|
|
418
|
+
def task(self, task_id: str, function: str, **kwargs: Any) -> "WorkflowBuilder":
|
|
241
419
|
task = TaskOperator(task_id=task_id, function=function, **kwargs)
|
|
242
420
|
self._add_task(task, **kwargs)
|
|
243
421
|
return self
|
|
@@ -248,7 +426,7 @@ class WorkflowBuilder:
|
|
|
248
426
|
condition: str,
|
|
249
427
|
if_true: Callable[["WorkflowBuilder"], "WorkflowBuilder"],
|
|
250
428
|
if_false: Callable[["WorkflowBuilder"], "WorkflowBuilder"],
|
|
251
|
-
**kwargs,
|
|
429
|
+
**kwargs: Any,
|
|
252
430
|
) -> "WorkflowBuilder":
|
|
253
431
|
true_builder = if_true(WorkflowBuilder(f"{task_id}_true", parent=self))
|
|
254
432
|
false_builder = if_false(WorkflowBuilder(f"{task_id}_false", parent=self))
|
|
@@ -281,7 +459,7 @@ class WorkflowBuilder:
|
|
|
281
459
|
return self
|
|
282
460
|
|
|
283
461
|
def wait(
|
|
284
|
-
self, task_id: str, wait_for:
|
|
462
|
+
self, task_id: str, wait_for: timedelta | datetime | str, **kwargs: Any,
|
|
285
463
|
) -> "WorkflowBuilder":
|
|
286
464
|
task = WaitOperator(task_id=task_id, wait_for=wait_for, **kwargs)
|
|
287
465
|
self._add_task(task, **kwargs)
|
|
@@ -290,13 +468,13 @@ class WorkflowBuilder:
|
|
|
290
468
|
def parallel(
|
|
291
469
|
self,
|
|
292
470
|
task_id: str,
|
|
293
|
-
branches:
|
|
294
|
-
**kwargs,
|
|
471
|
+
branches: dict[str, Callable[["WorkflowBuilder"], "WorkflowBuilder"]],
|
|
472
|
+
**kwargs: Any,
|
|
295
473
|
) -> "WorkflowBuilder":
|
|
296
474
|
branch_builders = {}
|
|
297
475
|
for name, branch_func in branches.items():
|
|
298
476
|
branch_builder = branch_func(
|
|
299
|
-
WorkflowBuilder(f"{task_id}_{name}", parent=self)
|
|
477
|
+
WorkflowBuilder(f"{task_id}_{name}", parent=self),
|
|
300
478
|
)
|
|
301
479
|
branch_builders[name] = branch_builder
|
|
302
480
|
|
|
@@ -328,7 +506,7 @@ class WorkflowBuilder:
|
|
|
328
506
|
task_id: str,
|
|
329
507
|
items: str,
|
|
330
508
|
loop_body: Callable[["WorkflowBuilder"], "WorkflowBuilder"],
|
|
331
|
-
**kwargs,
|
|
509
|
+
**kwargs: Any,
|
|
332
510
|
) -> "WorkflowBuilder":
|
|
333
511
|
# Create a temporary builder for the loop body.
|
|
334
512
|
temp_builder = WorkflowBuilder(f"{task_id}_loop", parent=self)
|
|
@@ -369,7 +547,7 @@ class WorkflowBuilder:
|
|
|
369
547
|
task_id: str,
|
|
370
548
|
condition: str,
|
|
371
549
|
loop_body: Callable[["WorkflowBuilder"], "WorkflowBuilder"],
|
|
372
|
-
**kwargs,
|
|
550
|
+
**kwargs: Any,
|
|
373
551
|
) -> "WorkflowBuilder":
|
|
374
552
|
loop_builder = loop_body(WorkflowBuilder(f"{task_id}_loop", parent=self))
|
|
375
553
|
loop_tasks = list(loop_builder.workflow.tasks.values())
|
|
@@ -408,24 +586,106 @@ class WorkflowBuilder:
|
|
|
408
586
|
backoff_factor: float = 2.0,
|
|
409
587
|
) -> "WorkflowBuilder":
|
|
410
588
|
if self._current_task and isinstance(
|
|
411
|
-
self.workflow.tasks[self._current_task], TaskOperator
|
|
589
|
+
self.workflow.tasks[self._current_task], TaskOperator,
|
|
412
590
|
):
|
|
413
591
|
self.workflow.tasks[self._current_task].retry_policy = RetryPolicy(
|
|
414
|
-
max_retries=max_retries, delay=delay, backoff_factor=backoff_factor
|
|
592
|
+
max_retries=max_retries, delay=delay, backoff_factor=backoff_factor,
|
|
415
593
|
)
|
|
416
594
|
return self
|
|
417
595
|
|
|
418
596
|
def timeout(
|
|
419
|
-
self, timeout: timedelta, kill_on_timeout: bool = True
|
|
597
|
+
self, timeout: timedelta, kill_on_timeout: bool = True,
|
|
420
598
|
) -> "WorkflowBuilder":
|
|
421
599
|
if self._current_task and isinstance(
|
|
422
|
-
self.workflow.tasks[self._current_task], TaskOperator
|
|
600
|
+
self.workflow.tasks[self._current_task], TaskOperator,
|
|
423
601
|
):
|
|
424
602
|
self.workflow.tasks[self._current_task].timeout_policy = TimeoutPolicy(
|
|
425
|
-
timeout=timeout, kill_on_timeout=kill_on_timeout
|
|
603
|
+
timeout=timeout, kill_on_timeout=kill_on_timeout,
|
|
426
604
|
)
|
|
427
605
|
return self
|
|
428
606
|
|
|
607
|
+
# Phase 2: Event-based operators
|
|
608
|
+
def emit_event(self, task_id: str, event_name: str, **kwargs: Any) -> "WorkflowBuilder":
|
|
609
|
+
"""Emit an event that other workflows can wait for."""
|
|
610
|
+
task = EmitEventOperator(task_id=task_id, event_name=event_name, **kwargs)
|
|
611
|
+
self._add_task(task, **kwargs)
|
|
612
|
+
return self
|
|
613
|
+
|
|
614
|
+
def wait_for_event(
|
|
615
|
+
self, task_id: str, event_name: str, timeout_seconds: int | None = None, **kwargs: Any,
|
|
616
|
+
) -> "WorkflowBuilder":
|
|
617
|
+
"""Wait for an external event with optional timeout."""
|
|
618
|
+
task = WaitForEventOperator(
|
|
619
|
+
task_id=task_id, event_name=event_name, timeout_seconds=timeout_seconds, **kwargs,
|
|
620
|
+
)
|
|
621
|
+
self._add_task(task, **kwargs)
|
|
622
|
+
return self
|
|
623
|
+
|
|
624
|
+
# Phase 3: Callback hooks (applies to current task)
|
|
625
|
+
def on_success(self, success_task_id: str) -> "WorkflowBuilder":
|
|
626
|
+
"""Set the task to run when the current task succeeds."""
|
|
627
|
+
if self._current_task:
|
|
628
|
+
self.workflow.tasks[self._current_task].on_success_task_id = success_task_id
|
|
629
|
+
return self
|
|
630
|
+
|
|
631
|
+
def on_failure(self, failure_task_id: str) -> "WorkflowBuilder":
|
|
632
|
+
"""Set the task to run when the current task fails."""
|
|
633
|
+
if self._current_task:
|
|
634
|
+
self.workflow.tasks[self._current_task].on_failure_task_id = failure_task_id
|
|
635
|
+
return self
|
|
636
|
+
|
|
637
|
+
# Phase 4: Switch operator
|
|
638
|
+
def switch(
|
|
639
|
+
self,
|
|
640
|
+
task_id: str,
|
|
641
|
+
switch_on: str,
|
|
642
|
+
cases: dict[str, str],
|
|
643
|
+
default: str | None = None,
|
|
644
|
+
**kwargs: Any,
|
|
645
|
+
) -> "WorkflowBuilder":
|
|
646
|
+
"""Multi-branch switch/case operator."""
|
|
647
|
+
task = SwitchOperator(
|
|
648
|
+
task_id=task_id, switch_on=switch_on, cases=cases, default=default, **kwargs,
|
|
649
|
+
)
|
|
650
|
+
self._add_task(task, **kwargs)
|
|
651
|
+
return self
|
|
652
|
+
|
|
653
|
+
# Phase 1: Scheduling methods (delegate to Workflow)
|
|
654
|
+
def set_schedule(self, cron: str) -> "WorkflowBuilder":
|
|
655
|
+
"""Set the cron schedule for this workflow."""
|
|
656
|
+
self.workflow.set_schedule(cron)
|
|
657
|
+
return self
|
|
658
|
+
|
|
659
|
+
def set_start_date(self, start_date: datetime) -> "WorkflowBuilder":
|
|
660
|
+
"""Set when the schedule becomes active."""
|
|
661
|
+
self.workflow.set_start_date(start_date)
|
|
662
|
+
return self
|
|
663
|
+
|
|
664
|
+
def set_catchup(self, enabled: bool) -> "WorkflowBuilder":
|
|
665
|
+
"""Set whether to backfill missed runs."""
|
|
666
|
+
self.workflow.set_catchup(enabled)
|
|
667
|
+
return self
|
|
668
|
+
|
|
669
|
+
def set_paused(self, paused: bool) -> "WorkflowBuilder":
|
|
670
|
+
"""Set whether the workflow is paused."""
|
|
671
|
+
self.workflow.set_paused(paused)
|
|
672
|
+
return self
|
|
673
|
+
|
|
674
|
+
def add_tags(self, *tags: str) -> "WorkflowBuilder":
|
|
675
|
+
"""Add tags to the workflow."""
|
|
676
|
+
self.workflow.add_tags(*tags)
|
|
677
|
+
return self
|
|
678
|
+
|
|
679
|
+
def set_max_active_runs(self, count: int) -> "WorkflowBuilder":
|
|
680
|
+
"""Set maximum number of concurrent runs."""
|
|
681
|
+
self.workflow.set_max_active_runs(count)
|
|
682
|
+
return self
|
|
683
|
+
|
|
684
|
+
def set_default_retry_policy(self, policy: RetryPolicy) -> "WorkflowBuilder":
|
|
685
|
+
"""Set default retry policy for all tasks."""
|
|
686
|
+
self.workflow.set_default_retry_policy(policy)
|
|
687
|
+
return self
|
|
688
|
+
|
|
429
689
|
def build(self) -> Workflow:
|
|
430
690
|
if not self.workflow.start_task and self.workflow.tasks:
|
|
431
691
|
self.workflow.start_task = next(iter(self.workflow.tasks.keys()))
|
|
@@ -0,0 +1,481 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: highway_dsl
|
|
3
|
+
Version: 1.2.0
|
|
4
|
+
Summary: A stable domain specific language (DSL) for defining and managing data processing pipelines and workflow engines.
|
|
5
|
+
Author-email: Farseed Ashouri <farseed.ashouri@gmail.com>
|
|
6
|
+
License: MIT
|
|
7
|
+
Project-URL: Homepage, https://github.com/rodmena-limited/highway_dsl
|
|
8
|
+
Project-URL: Issues, https://github.com/rodmena-limited/highway_dsl/issues
|
|
9
|
+
Classifier: Programming Language :: Python :: 3
|
|
10
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
11
|
+
Classifier: Operating System :: OS Independent
|
|
12
|
+
Requires-Python: >=3.10
|
|
13
|
+
Description-Content-Type: text/markdown
|
|
14
|
+
License-File: LICENSE
|
|
15
|
+
Requires-Dist: pydantic>=2.12.3
|
|
16
|
+
Requires-Dist: pyyaml>=6.0
|
|
17
|
+
Provides-Extra: dev
|
|
18
|
+
Requires-Dist: pytest>=7.0.0; extra == "dev"
|
|
19
|
+
Requires-Dist: mypy>=1.0.0; extra == "dev"
|
|
20
|
+
Requires-Dist: types-PyYAML>=6.0.0; extra == "dev"
|
|
21
|
+
Requires-Dist: pytest-cov>=2.12.1; extra == "dev"
|
|
22
|
+
Dynamic: license-file
|
|
23
|
+
|
|
24
|
+
# Highway DSL
|
|
25
|
+
|
|
26
|
+
[](https://badge.fury.io/py/highway-dsl)
|
|
27
|
+
[](https://opensource.org/licenses/MIT)
|
|
28
|
+
[](https://pypi.org/project/highway-dsl/)
|
|
29
|
+
[](https://github.com/rodmena-limited/highway_dsl/actions/workflows/publish.yml)
|
|
30
|
+
|
|
31
|
+
**Highway DSL** is a Python-based domain-specific language for defining complex workflows in a clear, concise, and fluent manner. It is part of the larger **Highway** project, an advanced workflow engine capable of running complex DAG-based workflows.
|
|
32
|
+
|
|
33
|
+
## Version 1.1.0 - Feature Release
|
|
34
|
+
|
|
35
|
+
This major feature release adds **Airflow-parity** features to enable production-grade workflows:
|
|
36
|
+
|
|
37
|
+
### New Features
|
|
38
|
+
|
|
39
|
+
#### 1. **Scheduling Metadata** (Airflow Parity)
|
|
40
|
+
Define cron-based schedules directly in your workflow:
|
|
41
|
+
```python
|
|
42
|
+
builder = (
|
|
43
|
+
WorkflowBuilder("daily_pipeline")
|
|
44
|
+
.set_schedule("0 2 * * *") # Run daily at 2 AM
|
|
45
|
+
.set_start_date(datetime(2025, 1, 1))
|
|
46
|
+
.set_catchup(False)
|
|
47
|
+
.add_tags("production", "daily")
|
|
48
|
+
.set_max_active_runs(1)
|
|
49
|
+
)
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
#### 2. **Event-Based Operators** (Absurd Integration)
|
|
53
|
+
First-class support for event-driven workflows:
|
|
54
|
+
```python
|
|
55
|
+
# Emit an event that other workflows can wait for
|
|
56
|
+
builder.emit_event(
|
|
57
|
+
"notify_completion",
|
|
58
|
+
event_name="pipeline_done",
|
|
59
|
+
payload={"status": "success"}
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
# Wait for an external event
|
|
63
|
+
builder.wait_for_event(
|
|
64
|
+
"wait_upstream",
|
|
65
|
+
event_name="data_ready",
|
|
66
|
+
timeout_seconds=3600
|
|
67
|
+
)
|
|
68
|
+
```
|
|
69
|
+
|
|
70
|
+
#### 3. **Callback Hooks** (Production Workflows)
|
|
71
|
+
Durable success/failure handlers as first-class workflow nodes:
|
|
72
|
+
```python
|
|
73
|
+
builder.task("risky_operation", "process.data")
|
|
74
|
+
|
|
75
|
+
builder.task("send_alert", "alerts.notify")
|
|
76
|
+
builder.on_failure("send_alert") # Runs if risky_operation fails
|
|
77
|
+
|
|
78
|
+
builder.task("cleanup", "cleanup.resources")
|
|
79
|
+
builder.on_success("cleanup") # Runs if risky_operation succeeds
|
|
80
|
+
```
|
|
81
|
+
|
|
82
|
+
#### 4. **Switch/Case Operator**
|
|
83
|
+
Multi-branch routing with cleaner syntax than nested conditions:
|
|
84
|
+
```python
|
|
85
|
+
builder.switch(
|
|
86
|
+
"route_by_status",
|
|
87
|
+
switch_on="{{data.status}}",
|
|
88
|
+
cases={
|
|
89
|
+
"approved": "approve_task",
|
|
90
|
+
"rejected": "reject_task",
|
|
91
|
+
"pending": "review_task"
|
|
92
|
+
},
|
|
93
|
+
default="unknown_handler"
|
|
94
|
+
)
|
|
95
|
+
```
|
|
96
|
+
|
|
97
|
+
#### 5. **Task Descriptions**
|
|
98
|
+
Document your workflow inline:
|
|
99
|
+
```python
|
|
100
|
+
builder.task(
|
|
101
|
+
"process",
|
|
102
|
+
"data.transform",
|
|
103
|
+
description="Transform raw data into analytics format"
|
|
104
|
+
)
|
|
105
|
+
```
|
|
106
|
+
|
|
107
|
+
#### 6. **Workflow-Level Default Retry Policy**
|
|
108
|
+
Set a default retry policy for all tasks:
|
|
109
|
+
```python
|
|
110
|
+
builder.set_default_retry_policy(
|
|
111
|
+
RetryPolicy(max_retries=3, delay=timedelta(seconds=60))
|
|
112
|
+
)
|
|
113
|
+
```
|
|
114
|
+
|
|
115
|
+
See `examples/scheduled_event_workflow.py` for a comprehensive example using all new features.
|
|
116
|
+
|
|
117
|
+
### RFC-Style Specification
|
|
118
|
+
|
|
119
|
+
For implementers and advanced users, Highway DSL v1.1.0 includes a comprehensive **3,215-line RFC-style specification** (`spec.txt`) modeled after IETF RFCs (RFC 2119, RFC 8259). This authoritative document provides:
|
|
120
|
+
|
|
121
|
+
- Complete operator specifications with execution semantics
|
|
122
|
+
- Integration guidance for Absurd and other runtimes
|
|
123
|
+
- Security considerations and best practices
|
|
124
|
+
- Comprehensive examples for all features
|
|
125
|
+
- Formal data model definitions
|
|
126
|
+
|
|
127
|
+
Access the specification at `/dsl/spec.txt` in the repository.
|
|
128
|
+
|
|
129
|
+
## Architecture Diagram
|
|
130
|
+
|
|
131
|
+
```mermaid
|
|
132
|
+
graph TB
|
|
133
|
+
subgraph "Highway DSL v1.1.0 Features"
|
|
134
|
+
A[WorkflowBuilder<br/>Fluent API] --> B[Core Operators]
|
|
135
|
+
A --> C[Scheduling]
|
|
136
|
+
A --> D[Events]
|
|
137
|
+
A --> E[Error Handling]
|
|
138
|
+
|
|
139
|
+
B --> B1[Task]
|
|
140
|
+
B --> B2[Condition]
|
|
141
|
+
B --> B3[Parallel]
|
|
142
|
+
B --> B4[ForEach]
|
|
143
|
+
B --> B5[While]
|
|
144
|
+
B --> B6[Wait]
|
|
145
|
+
B --> B7[Switch]
|
|
146
|
+
|
|
147
|
+
C --> C1[Cron Schedules]
|
|
148
|
+
C --> C2[Start Date]
|
|
149
|
+
C --> C3[Catchup]
|
|
150
|
+
C --> C4[Tags]
|
|
151
|
+
|
|
152
|
+
D --> D1[EmitEvent]
|
|
153
|
+
D --> D2[WaitForEvent]
|
|
154
|
+
|
|
155
|
+
E --> E1[RetryPolicy]
|
|
156
|
+
E --> E2[TimeoutPolicy]
|
|
157
|
+
E --> E3[Callbacks]
|
|
158
|
+
end
|
|
159
|
+
|
|
160
|
+
subgraph "Output Formats"
|
|
161
|
+
F[YAML]
|
|
162
|
+
G[JSON]
|
|
163
|
+
end
|
|
164
|
+
|
|
165
|
+
subgraph "Runtime Integration"
|
|
166
|
+
H[Absurd Runtime]
|
|
167
|
+
I[Airflow]
|
|
168
|
+
J[Temporal]
|
|
169
|
+
K[Custom Engines]
|
|
170
|
+
end
|
|
171
|
+
|
|
172
|
+
A --> F
|
|
173
|
+
A --> G
|
|
174
|
+
F --> H
|
|
175
|
+
F --> I
|
|
176
|
+
F --> J
|
|
177
|
+
F --> K
|
|
178
|
+
G --> H
|
|
179
|
+
G --> I
|
|
180
|
+
G --> J
|
|
181
|
+
G --> K
|
|
182
|
+
|
|
183
|
+
style A fill:#2563eb,stroke:#1e40af,color:#fff
|
|
184
|
+
style B fill:#8b5cf6,stroke:#7c3aed,color:#fff
|
|
185
|
+
style C fill:#10b981,stroke:#059669,color:#fff
|
|
186
|
+
style D fill:#f59e0b,stroke:#d97706,color:#fff
|
|
187
|
+
style E fill:#ef4444,stroke:#dc2626,color:#fff
|
|
188
|
+
```
|
|
189
|
+
|
|
190
|
+
## Features
|
|
191
|
+
|
|
192
|
+
* **Fluent API:** A powerful and intuitive `WorkflowBuilder` for defining workflows programmatically.
|
|
193
|
+
* **Pydantic-based:** All models are built on Pydantic, providing robust data validation, serialization, and documentation.
|
|
194
|
+
* **Rich Operators:** A comprehensive set of operators for handling various workflow scenarios:
|
|
195
|
+
* `Task` - Basic workflow steps
|
|
196
|
+
* `Condition` (if/else) - Conditional branching
|
|
197
|
+
* `Parallel` - Execute multiple branches simultaneously
|
|
198
|
+
* `ForEach` - Iterate over collections with proper dependency management
|
|
199
|
+
* `Wait` - Pause execution for scheduled tasks
|
|
200
|
+
* `While` - Execute loops based on conditions
|
|
201
|
+
* **NEW in v1.1:** `EmitEvent` - Emit events for cross-workflow coordination
|
|
202
|
+
* **NEW in v1.1:** `WaitForEvent` - Wait for external events with timeout
|
|
203
|
+
* **NEW in v1.1:** `Switch` - Multi-branch routing (switch/case)
|
|
204
|
+
* **Scheduling:** Built-in support for cron-based scheduling, start dates, and catchup configuration
|
|
205
|
+
* **Event-Driven:** First-class support for event emission and waiting (Absurd integration)
|
|
206
|
+
* **Callback Hooks:** Durable success/failure handlers as workflow nodes
|
|
207
|
+
* **YAML/JSON Interoperability:** Workflows can be defined in Python and exported to YAML or JSON, and vice-versa.
|
|
208
|
+
* **Retry and Timeout Policies:** Built-in error handling and execution time management.
|
|
209
|
+
* **Extensible:** The DSL is designed to be extensible with custom operators and policies.
|
|
210
|
+
|
|
211
|
+
## Installation
|
|
212
|
+
|
|
213
|
+
```bash
|
|
214
|
+
pip install highway-dsl
|
|
215
|
+
```
|
|
216
|
+
|
|
217
|
+
## Quick Start
|
|
218
|
+
|
|
219
|
+
Here's a simple example of how to define a workflow using the `WorkflowBuilder`:
|
|
220
|
+
|
|
221
|
+
```python
|
|
222
|
+
from datetime import timedelta
|
|
223
|
+
from highway_dsl import WorkflowBuilder
|
|
224
|
+
|
|
225
|
+
workflow = (
|
|
226
|
+
WorkflowBuilder("simple_etl")
|
|
227
|
+
.task("extract", "etl.extract_data", result_key="raw_data")
|
|
228
|
+
.task(
|
|
229
|
+
"transform",
|
|
230
|
+
"etl.transform_data",
|
|
231
|
+
args=["{{raw_data}}"],
|
|
232
|
+
result_key="transformed_data",
|
|
233
|
+
)
|
|
234
|
+
.retry(max_retries=3, delay=timedelta(seconds=10))
|
|
235
|
+
.task("load", "etl.load_data", args=["{{transformed_data}}"])
|
|
236
|
+
.timeout(timeout=timedelta(minutes=30))
|
|
237
|
+
.wait("wait_next", timedelta(hours=24))
|
|
238
|
+
.task("cleanup", "etl.cleanup")
|
|
239
|
+
.build()
|
|
240
|
+
)
|
|
241
|
+
|
|
242
|
+
print(workflow.to_yaml())
|
|
243
|
+
```
|
|
244
|
+
|
|
245
|
+
## Real-World Example: E-Commerce Order Processing
|
|
246
|
+
|
|
247
|
+
```python
|
|
248
|
+
from highway_dsl import WorkflowBuilder, RetryPolicy
|
|
249
|
+
from datetime import datetime, timedelta
|
|
250
|
+
|
|
251
|
+
# Production-ready e-commerce order workflow
|
|
252
|
+
workflow = (
|
|
253
|
+
WorkflowBuilder("order_processing")
|
|
254
|
+
.set_schedule("*/5 * * * *") # Run every 5 minutes
|
|
255
|
+
.set_start_date(datetime(2025, 1, 1))
|
|
256
|
+
.add_tags("production", "orders", "critical")
|
|
257
|
+
.set_default_retry_policy(RetryPolicy(max_retries=3, delay=timedelta(seconds=30)))
|
|
258
|
+
|
|
259
|
+
# Fetch pending orders
|
|
260
|
+
.task("fetch_orders", "orders.get_pending", result_key="orders")
|
|
261
|
+
|
|
262
|
+
# Process each order
|
|
263
|
+
.foreach(
|
|
264
|
+
"process_each_order",
|
|
265
|
+
items="{{orders}}",
|
|
266
|
+
loop_body=lambda b: (
|
|
267
|
+
b.task("validate", "orders.validate", args=["{{item}}"])
|
|
268
|
+
.task("charge_payment", "payments.charge", args=["{{item}}"],
|
|
269
|
+
result_key="payment_result")
|
|
270
|
+
.task("send_failure_email", "email.send_failure",
|
|
271
|
+
args=["{{item.customer_email}}"])
|
|
272
|
+
.on_failure("send_failure_email") # Alert on payment failure
|
|
273
|
+
.switch(
|
|
274
|
+
"route_by_amount",
|
|
275
|
+
switch_on="{{item.total}}",
|
|
276
|
+
cases={
|
|
277
|
+
"high": "priority_shipping", # > $500
|
|
278
|
+
"medium": "standard_shipping", # $100-500
|
|
279
|
+
"low": "economy_shipping" # < $100
|
|
280
|
+
},
|
|
281
|
+
default="standard_shipping"
|
|
282
|
+
)
|
|
283
|
+
)
|
|
284
|
+
)
|
|
285
|
+
|
|
286
|
+
# Emit completion event for analytics workflow
|
|
287
|
+
.emit_event(
|
|
288
|
+
"notify_analytics",
|
|
289
|
+
event_name="orders_processed_{{ds}}",
|
|
290
|
+
payload={"count": "{{orders.length}}", "timestamp": "{{run.started_at}}"}
|
|
291
|
+
)
|
|
292
|
+
|
|
293
|
+
.build()
|
|
294
|
+
)
|
|
295
|
+
```
|
|
296
|
+
|
|
297
|
+
This workflow demonstrates:
|
|
298
|
+
- Scheduled execution every 5 minutes
|
|
299
|
+
- Default retry policy for all tasks
|
|
300
|
+
- ForEach loop processing multiple orders
|
|
301
|
+
- Payment failure callbacks
|
|
302
|
+
- Switch/case routing based on order amount
|
|
303
|
+
- Event emission for cross-workflow coordination
|
|
304
|
+
|
|
305
|
+
## Mermaid Diagram Generation
|
|
306
|
+
|
|
307
|
+
You can generate a Mermaid state diagram of your workflow using the `to_mermaid` method:
|
|
308
|
+
|
|
309
|
+
```python
|
|
310
|
+
print(workflow.to_mermaid())
|
|
311
|
+
```
|
|
312
|
+
|
|
313
|
+
This will output a Mermaid diagram in the `stateDiagram-v2` format, which can be used with a variety of tools to visualize your workflow.
|
|
314
|
+
|
|
315
|
+
## Bank ETL Example
|
|
316
|
+
|
|
317
|
+
A more complex example of a bank's end-of-day ETL process can be found in `examples/bank_end_of_the_day_etl_workflow.py`.
|
|
318
|
+
|
|
319
|
+
A mermaid diagram of this workflow can be found [here](docs/bank_etl.mermaid).
|
|
320
|
+
|
|
321
|
+
## Advanced Usage
|
|
322
|
+
|
|
323
|
+
### Conditional Logic
|
|
324
|
+
|
|
325
|
+
```python
|
|
326
|
+
from highway_dsl import WorkflowBuilder, RetryPolicy
|
|
327
|
+
from datetime import timedelta
|
|
328
|
+
|
|
329
|
+
builder = WorkflowBuilder("data_processing_pipeline")
|
|
330
|
+
|
|
331
|
+
builder.task("start", "workflows.tasks.initialize", result_key="init_data")
|
|
332
|
+
builder.task(
|
|
333
|
+
"validate",
|
|
334
|
+
"workflows.tasks.validate_data",
|
|
335
|
+
args=["{{init_data}}"],
|
|
336
|
+
result_key="validated_data",
|
|
337
|
+
)
|
|
338
|
+
|
|
339
|
+
builder.condition(
|
|
340
|
+
"check_quality",
|
|
341
|
+
condition="{{validated_data.quality_score}} > 0.8",
|
|
342
|
+
if_true=lambda b: b.task(
|
|
343
|
+
"high_quality_processing",
|
|
344
|
+
"workflows.tasks.advanced_processing",
|
|
345
|
+
args=["{{validated_data}}"],
|
|
346
|
+
retry_policy=RetryPolicy(max_retries=5, delay=timedelta(seconds=10), backoff_factor=2.0),
|
|
347
|
+
),
|
|
348
|
+
if_false=lambda b: b.task(
|
|
349
|
+
"standard_processing",
|
|
350
|
+
"workflows.tasks.basic_processing",
|
|
351
|
+
args=["{{validated_data}}"],
|
|
352
|
+
),
|
|
353
|
+
)
|
|
354
|
+
|
|
355
|
+
workflow = builder.build()
|
|
356
|
+
```
|
|
357
|
+
|
|
358
|
+
### While Loops
|
|
359
|
+
|
|
360
|
+
```python
|
|
361
|
+
from highway_dsl import WorkflowBuilder
|
|
362
|
+
|
|
363
|
+
builder = WorkflowBuilder("qa_rework_workflow")
|
|
364
|
+
|
|
365
|
+
builder.task("start_qa", "workflows.tasks.start_qa", result_key="qa_results")
|
|
366
|
+
|
|
367
|
+
builder.while_loop(
|
|
368
|
+
"qa_rework_loop",
|
|
369
|
+
condition="{{qa_results.status}} == 'failed'",
|
|
370
|
+
loop_body=lambda b: b.task("perform_rework", "workflows.tasks.perform_rework").task(
|
|
371
|
+
"re_run_qa", "workflows.tasks.run_qa", result_key="qa_results"
|
|
372
|
+
),
|
|
373
|
+
)
|
|
374
|
+
|
|
375
|
+
builder.task("finalize_product", "workflows.tasks.finalize_product", dependencies=["qa_rework_loop"])
|
|
376
|
+
|
|
377
|
+
workflow = builder.build()
|
|
378
|
+
```
|
|
379
|
+
|
|
380
|
+
### For-Each Loops with Proper Dependency Management
|
|
381
|
+
|
|
382
|
+
Fixed bug where foreach loops were incorrectly inheriting dependencies from containing parallel operators:
|
|
383
|
+
|
|
384
|
+
```python
|
|
385
|
+
# This loop now properly encapsulates its internal tasks
|
|
386
|
+
builder.foreach(
|
|
387
|
+
"process_items",
|
|
388
|
+
items="{{data.items}}",
|
|
389
|
+
loop_body=lambda fb: fb.task("process_item", "processor.handle_item", args=["{{item.id}}"])
|
|
390
|
+
# Loop body tasks only have proper dependencies, not unwanted "grandparent" dependencies
|
|
391
|
+
)
|
|
392
|
+
```
|
|
393
|
+
|
|
394
|
+
### Retry Policies
|
|
395
|
+
|
|
396
|
+
```python
|
|
397
|
+
from highway_dsl import RetryPolicy
|
|
398
|
+
from datetime import timedelta
|
|
399
|
+
|
|
400
|
+
builder.task(
|
|
401
|
+
"reliable_task",
|
|
402
|
+
"service.operation",
|
|
403
|
+
retry_policy=RetryPolicy(
|
|
404
|
+
max_retries=5,
|
|
405
|
+
delay=timedelta(seconds=10),
|
|
406
|
+
backoff_factor=2.0
|
|
407
|
+
)
|
|
408
|
+
)
|
|
409
|
+
```
|
|
410
|
+
|
|
411
|
+
### Timeout Policies
|
|
412
|
+
|
|
413
|
+
```python
|
|
414
|
+
from highway_dsl import TimeoutPolicy
|
|
415
|
+
from datetime import timedelta
|
|
416
|
+
|
|
417
|
+
builder.task(
|
|
418
|
+
"timed_task",
|
|
419
|
+
"service.operation",
|
|
420
|
+
timeout_policy=TimeoutPolicy(
|
|
421
|
+
timeout=timedelta(hours=1),
|
|
422
|
+
kill_on_timeout=True
|
|
423
|
+
)
|
|
424
|
+
)
|
|
425
|
+
```
|
|
426
|
+
|
|
427
|
+
## Version History
|
|
428
|
+
|
|
429
|
+
### Version 1.1.0 - Feature Release (Current)
|
|
430
|
+
|
|
431
|
+
**Airflow-Parity Features:**
|
|
432
|
+
- Scheduling metadata (cron, start_date, catchup, tags, max_active_runs)
|
|
433
|
+
- Workflow-level default retry policy
|
|
434
|
+
|
|
435
|
+
**Event-Driven Features:**
|
|
436
|
+
- EmitEventOperator for cross-workflow coordination
|
|
437
|
+
- WaitForEventOperator with timeout support
|
|
438
|
+
|
|
439
|
+
**Production Features:**
|
|
440
|
+
- Durable callback hooks (on_success, on_failure)
|
|
441
|
+
- SwitchOperator for multi-branch routing
|
|
442
|
+
- Task descriptions for documentation
|
|
443
|
+
- RFC-style specification document (3,215 lines)
|
|
444
|
+
|
|
445
|
+
### Version 1.0.3 - Stable Release
|
|
446
|
+
|
|
447
|
+
This is a stable release with important bug fixes and enhancements, including a critical fix for the ForEach operator dependency management issue.
|
|
448
|
+
|
|
449
|
+
## Development
|
|
450
|
+
|
|
451
|
+
To set up the development environment:
|
|
452
|
+
|
|
453
|
+
```bash
|
|
454
|
+
git clone https://github.com/your-username/highway.git
|
|
455
|
+
cd highway/dsl
|
|
456
|
+
python -m venv .venv
|
|
457
|
+
source .venv/bin/activate
|
|
458
|
+
pip install -e .[dev]
|
|
459
|
+
```
|
|
460
|
+
|
|
461
|
+
### Running Tests
|
|
462
|
+
|
|
463
|
+
```bash
|
|
464
|
+
pytest
|
|
465
|
+
```
|
|
466
|
+
|
|
467
|
+
### Type Checking
|
|
468
|
+
|
|
469
|
+
```bash
|
|
470
|
+
mypy .
|
|
471
|
+
```
|
|
472
|
+
|
|
473
|
+
## Documentation
|
|
474
|
+
|
|
475
|
+
- **README.md** (this file) - Getting started and examples
|
|
476
|
+
- **spec.txt** - RFC-style formal specification (3,215 lines)
|
|
477
|
+
- **examples/** - Comprehensive workflow examples
|
|
478
|
+
|
|
479
|
+
## License
|
|
480
|
+
|
|
481
|
+
MIT License
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
highway_dsl/__init__.py,sha256=D3gFgy3Wq5-NqfmnGcN1RDw8JxGsJBGVKwVsUFDKn4A,651
|
|
2
|
+
highway_dsl/workflow_dsl.py,sha256=NGXTddjeVz530wwKwLvvx0bjMDDmk9WNOxcPqXxNZzs,26742
|
|
3
|
+
highway_dsl-1.2.0.dist-info/licenses/LICENSE,sha256=qdFq1H66BvKg67mf4-WGpFwtG2u_dNknxuJDQ1_ubaY,1072
|
|
4
|
+
highway_dsl-1.2.0.dist-info/METADATA,sha256=4fokdO8BWKOF3v8lnM1cl77BJ4YK_1XhSwSxXks3hbE,14242
|
|
5
|
+
highway_dsl-1.2.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
6
|
+
highway_dsl-1.2.0.dist-info/top_level.txt,sha256=_5uX-bbBsQ2rsi1XMr7WRyKbr6ack5GqVBcy-QjF1C8,12
|
|
7
|
+
highway_dsl-1.2.0.dist-info/RECORD,,
|
|
@@ -1,228 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.4
|
|
2
|
-
Name: highway_dsl
|
|
3
|
-
Version: 1.0.2
|
|
4
|
-
Summary: A stable domain specific language (DSL) for defining and managing data processing pipelines and workflow engines.
|
|
5
|
-
Author-email: Farseed Ashouri <farseed.ashouri@gmail.com>
|
|
6
|
-
License: MIT
|
|
7
|
-
Project-URL: Homepage, https://github.com/rodmena-limited/highway_dsl
|
|
8
|
-
Project-URL: Issues, https://github.com/rodmena-limited/highway_dsl/issues
|
|
9
|
-
Classifier: Programming Language :: Python :: 3
|
|
10
|
-
Classifier: License :: OSI Approved :: MIT License
|
|
11
|
-
Classifier: Operating System :: OS Independent
|
|
12
|
-
Requires-Python: >=3.9
|
|
13
|
-
Description-Content-Type: text/markdown
|
|
14
|
-
License-File: LICENSE
|
|
15
|
-
Requires-Dist: pydantic>=2.12.3
|
|
16
|
-
Requires-Dist: pyyaml>=6.0
|
|
17
|
-
Provides-Extra: dev
|
|
18
|
-
Requires-Dist: pytest>=7.0.0; extra == "dev"
|
|
19
|
-
Requires-Dist: mypy>=1.0.0; extra == "dev"
|
|
20
|
-
Requires-Dist: types-PyYAML>=6.0.0; extra == "dev"
|
|
21
|
-
Requires-Dist: pytest-cov>=2.12.1; extra == "dev"
|
|
22
|
-
Dynamic: license-file
|
|
23
|
-
|
|
24
|
-
# Highway DSL
|
|
25
|
-
|
|
26
|
-
[](https://badge.fury.io/py/highway-dsl)
|
|
27
|
-
[](https://opensource.org/licenses/MIT)
|
|
28
|
-
[](https://pypi.org/project/highway-dsl/)
|
|
29
|
-
|
|
30
|
-
**Highway DSL** is a stable, Python-based domain-specific language for defining complex workflows in a clear, concise, and fluent manner. It is part of the larger **Highway** project, an advanced workflow engine capable of running complex DAG-based workflows.
|
|
31
|
-
|
|
32
|
-
## Version 1.0.2 - Stable Release
|
|
33
|
-
|
|
34
|
-
This is a stable release with important bug fixes and enhancements, including a critical fix for the ForEach operator dependency management issue.
|
|
35
|
-
|
|
36
|
-
## Features
|
|
37
|
-
|
|
38
|
-
* **Fluent API:** A powerful and intuitive `WorkflowBuilder` for defining workflows programmatically.
|
|
39
|
-
* **Pydantic-based:** All models are built on Pydantic, providing robust data validation, serialization, and documentation.
|
|
40
|
-
* **Rich Operators:** A comprehensive set of operators for handling various workflow scenarios:
|
|
41
|
-
* `Task` - Basic workflow steps
|
|
42
|
-
* `Condition` (if/else) - Conditional branching
|
|
43
|
-
* `Parallel` - Execute multiple branches simultaneously
|
|
44
|
-
* `ForEach` - Iterate over collections with proper dependency management
|
|
45
|
-
* `Wait` - Pause execution for scheduled tasks
|
|
46
|
-
* `While` - Execute loops based on conditions
|
|
47
|
-
* **Fixed ForEach Bug:** Proper encapsulation of loop body tasks to prevent unwanted "grandparent" dependencies from containing parallel operators.
|
|
48
|
-
* **YAML/JSON Interoperability:** Workflows can be defined in Python and exported to YAML or JSON, and vice-versa.
|
|
49
|
-
* **Retry and Timeout Policies:** Built-in error handling and execution time management.
|
|
50
|
-
* **Extensible:** The DSL is designed to be extensible with custom operators and policies.
|
|
51
|
-
|
|
52
|
-
## Installation
|
|
53
|
-
|
|
54
|
-
```bash
|
|
55
|
-
pip install highway-dsl
|
|
56
|
-
```
|
|
57
|
-
|
|
58
|
-
## Quick Start
|
|
59
|
-
|
|
60
|
-
Here's a simple example of how to define a workflow using the `WorkflowBuilder`:
|
|
61
|
-
|
|
62
|
-
```python
|
|
63
|
-
from datetime import timedelta
|
|
64
|
-
from highway_dsl import WorkflowBuilder
|
|
65
|
-
|
|
66
|
-
workflow = (
|
|
67
|
-
WorkflowBuilder("simple_etl")
|
|
68
|
-
.task("extract", "etl.extract_data", result_key="raw_data")
|
|
69
|
-
.task(
|
|
70
|
-
"transform",
|
|
71
|
-
"etl.transform_data",
|
|
72
|
-
args=["{{raw_data}}"],
|
|
73
|
-
result_key="transformed_data",
|
|
74
|
-
)
|
|
75
|
-
.retry(max_retries=3, delay=timedelta(seconds=10))
|
|
76
|
-
.task("load", "etl.load_data", args=["{{transformed_data}}"])
|
|
77
|
-
.timeout(timeout=timedelta(minutes=30))
|
|
78
|
-
.wait("wait_next", timedelta(hours=24))
|
|
79
|
-
.task("cleanup", "etl.cleanup")
|
|
80
|
-
.build()
|
|
81
|
-
)
|
|
82
|
-
|
|
83
|
-
print(workflow.to_yaml())
|
|
84
|
-
```
|
|
85
|
-
|
|
86
|
-
## Advanced Usage
|
|
87
|
-
|
|
88
|
-
### Conditional Logic
|
|
89
|
-
|
|
90
|
-
```python
|
|
91
|
-
from highway_dsl import WorkflowBuilder, RetryPolicy
|
|
92
|
-
from datetime import timedelta
|
|
93
|
-
|
|
94
|
-
builder = WorkflowBuilder("data_processing_pipeline")
|
|
95
|
-
|
|
96
|
-
builder.task("start", "workflows.tasks.initialize", result_key="init_data")
|
|
97
|
-
builder.task(
|
|
98
|
-
"validate",
|
|
99
|
-
"workflows.tasks.validate_data",
|
|
100
|
-
args=["{{init_data}}"],
|
|
101
|
-
result_key="validated_data",
|
|
102
|
-
)
|
|
103
|
-
|
|
104
|
-
builder.condition(
|
|
105
|
-
"check_quality",
|
|
106
|
-
condition="{{validated_data.quality_score}} > 0.8",
|
|
107
|
-
if_true=lambda b: b.task(
|
|
108
|
-
"high_quality_processing",
|
|
109
|
-
"workflows.tasks.advanced_processing",
|
|
110
|
-
args=["{{validated_data}}"],
|
|
111
|
-
retry_policy=RetryPolicy(max_retries=5, delay=timedelta(seconds=10), backoff_factor=2.0),
|
|
112
|
-
),
|
|
113
|
-
if_false=lambda b: b.task(
|
|
114
|
-
"standard_processing",
|
|
115
|
-
"workflows.tasks.basic_processing",
|
|
116
|
-
args=["{{validated_data}}"],
|
|
117
|
-
),
|
|
118
|
-
)
|
|
119
|
-
|
|
120
|
-
workflow = builder.build()
|
|
121
|
-
```
|
|
122
|
-
|
|
123
|
-
### While Loops
|
|
124
|
-
|
|
125
|
-
```python
|
|
126
|
-
from highway_dsl import WorkflowBuilder
|
|
127
|
-
|
|
128
|
-
builder = WorkflowBuilder("qa_rework_workflow")
|
|
129
|
-
|
|
130
|
-
builder.task("start_qa", "workflows.tasks.start_qa", result_key="qa_results")
|
|
131
|
-
|
|
132
|
-
builder.while_loop(
|
|
133
|
-
"qa_rework_loop",
|
|
134
|
-
condition="{{qa_results.status}} == 'failed'",
|
|
135
|
-
loop_body=lambda b: b.task("perform_rework", "workflows.tasks.perform_rework").task(
|
|
136
|
-
"re_run_qa", "workflows.tasks.run_qa", result_key="qa_results"
|
|
137
|
-
),
|
|
138
|
-
)
|
|
139
|
-
|
|
140
|
-
builder.task("finalize_product", "workflows.tasks.finalize_product", dependencies=["qa_rework_loop"])
|
|
141
|
-
|
|
142
|
-
workflow = builder.build()
|
|
143
|
-
```
|
|
144
|
-
|
|
145
|
-
### For-Each Loops with Proper Dependency Management
|
|
146
|
-
|
|
147
|
-
Fixed bug where foreach loops were incorrectly inheriting dependencies from containing parallel operators:
|
|
148
|
-
|
|
149
|
-
```python
|
|
150
|
-
# This loop now properly encapsulates its internal tasks
|
|
151
|
-
builder.foreach(
|
|
152
|
-
"process_items",
|
|
153
|
-
items="{{data.items}}",
|
|
154
|
-
loop_body=lambda fb: fb.task("process_item", "processor.handle_item", args=["{{item.id}}"])
|
|
155
|
-
# Loop body tasks only have proper dependencies, not unwanted "grandparent" dependencies
|
|
156
|
-
)
|
|
157
|
-
```
|
|
158
|
-
|
|
159
|
-
### Retry Policies
|
|
160
|
-
|
|
161
|
-
```python
|
|
162
|
-
from highway_dsl import RetryPolicy
|
|
163
|
-
from datetime import timedelta
|
|
164
|
-
|
|
165
|
-
builder.task(
|
|
166
|
-
"reliable_task",
|
|
167
|
-
"service.operation",
|
|
168
|
-
retry_policy=RetryPolicy(
|
|
169
|
-
max_retries=5,
|
|
170
|
-
delay=timedelta(seconds=10),
|
|
171
|
-
backoff_factor=2.0
|
|
172
|
-
)
|
|
173
|
-
)
|
|
174
|
-
```
|
|
175
|
-
|
|
176
|
-
### Timeout Policies
|
|
177
|
-
|
|
178
|
-
```python
|
|
179
|
-
from highway_dsl import TimeoutPolicy
|
|
180
|
-
from datetime import timedelta
|
|
181
|
-
|
|
182
|
-
builder.task(
|
|
183
|
-
"timed_task",
|
|
184
|
-
"service.operation",
|
|
185
|
-
timeout_policy=TimeoutPolicy(
|
|
186
|
-
timeout=timedelta(hours=1),
|
|
187
|
-
kill_on_timeout=True
|
|
188
|
-
)
|
|
189
|
-
)
|
|
190
|
-
```
|
|
191
|
-
|
|
192
|
-
## What's New in Version 1.0.2
|
|
193
|
-
|
|
194
|
-
### Bug Fixes
|
|
195
|
-
* **Fixed ForEach Operator Bug**: Resolved issue where foreach loops were incorrectly getting "grandparent" dependencies from containing parallel operators. Loop body tasks are now properly encapsulated and only depend on their parent loop operator and internal chain dependencies.
|
|
196
|
-
|
|
197
|
-
### Enhancements
|
|
198
|
-
* **Improved Loop Dependency Management**: While loops and ForEach loops now properly encapsulate their internal dependencies without being affected by containing parallel operators.
|
|
199
|
-
* **Better Error Handling**: Enhanced error handling throughout the DSL.
|
|
200
|
-
* **Comprehensive Test Suite**: Added functional tests for all example workflows to ensure consistency.
|
|
201
|
-
|
|
202
|
-
## Development
|
|
203
|
-
|
|
204
|
-
To set up the development environment:
|
|
205
|
-
|
|
206
|
-
```bash
|
|
207
|
-
git clone https://github.com/your-username/highway.git
|
|
208
|
-
cd highway
|
|
209
|
-
python -m venv .venv
|
|
210
|
-
source .venv/bin/activate
|
|
211
|
-
pip install -e .[dev]
|
|
212
|
-
```
|
|
213
|
-
|
|
214
|
-
### Running Tests
|
|
215
|
-
|
|
216
|
-
```bash
|
|
217
|
-
pytest
|
|
218
|
-
```
|
|
219
|
-
|
|
220
|
-
### Type Checking
|
|
221
|
-
|
|
222
|
-
```bash
|
|
223
|
-
mypy .
|
|
224
|
-
```
|
|
225
|
-
|
|
226
|
-
## License
|
|
227
|
-
|
|
228
|
-
MIT License
|
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
highway_dsl/__init__.py,sha256=mr1oMylxliFwu2VO2qpyM3sVQwYIoPL2P6JE-6ZuF7M,507
|
|
2
|
-
highway_dsl/workflow_dsl.py,sha256=bhCKDPrMaIkEI4HduKoeqd2VlZsK8wjr8RURifPufGU,14700
|
|
3
|
-
highway_dsl-1.0.2.dist-info/licenses/LICENSE,sha256=qdFq1H66BvKg67mf4-WGpFwtG2u_dNknxuJDQ1_ubaY,1072
|
|
4
|
-
highway_dsl-1.0.2.dist-info/METADATA,sha256=uCUL4xLYOkZ10TzFzxn6jZ3rjtcL_h7f5fLX5RD41Kk,7187
|
|
5
|
-
highway_dsl-1.0.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
6
|
-
highway_dsl-1.0.2.dist-info/top_level.txt,sha256=_5uX-bbBsQ2rsi1XMr7WRyKbr6ack5GqVBcy-QjF1C8,12
|
|
7
|
-
highway_dsl-1.0.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|