stabilize 0.9.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. stabilize/__init__.py +29 -0
  2. stabilize/cli.py +1193 -0
  3. stabilize/context/__init__.py +7 -0
  4. stabilize/context/stage_context.py +170 -0
  5. stabilize/dag/__init__.py +15 -0
  6. stabilize/dag/graph.py +215 -0
  7. stabilize/dag/topological.py +199 -0
  8. stabilize/examples/__init__.py +1 -0
  9. stabilize/examples/docker-example.py +759 -0
  10. stabilize/examples/golden-standard-expected-result.txt +1 -0
  11. stabilize/examples/golden-standard.py +488 -0
  12. stabilize/examples/http-example.py +606 -0
  13. stabilize/examples/llama-example.py +662 -0
  14. stabilize/examples/python-example.py +731 -0
  15. stabilize/examples/shell-example.py +399 -0
  16. stabilize/examples/ssh-example.py +603 -0
  17. stabilize/handlers/__init__.py +53 -0
  18. stabilize/handlers/base.py +226 -0
  19. stabilize/handlers/complete_stage.py +209 -0
  20. stabilize/handlers/complete_task.py +75 -0
  21. stabilize/handlers/complete_workflow.py +150 -0
  22. stabilize/handlers/run_task.py +369 -0
  23. stabilize/handlers/start_stage.py +262 -0
  24. stabilize/handlers/start_task.py +74 -0
  25. stabilize/handlers/start_workflow.py +136 -0
  26. stabilize/launcher.py +307 -0
  27. stabilize/migrations/01KDQ4N9QPJ6Q4MCV3V9GHWPV4_initial_schema.sql +97 -0
  28. stabilize/migrations/01KDRK3TXW4R2GERC1WBCQYJGG_rag_embeddings.sql +25 -0
  29. stabilize/migrations/__init__.py +1 -0
  30. stabilize/models/__init__.py +15 -0
  31. stabilize/models/stage.py +389 -0
  32. stabilize/models/status.py +146 -0
  33. stabilize/models/task.py +125 -0
  34. stabilize/models/workflow.py +317 -0
  35. stabilize/orchestrator.py +113 -0
  36. stabilize/persistence/__init__.py +28 -0
  37. stabilize/persistence/connection.py +185 -0
  38. stabilize/persistence/factory.py +136 -0
  39. stabilize/persistence/memory.py +214 -0
  40. stabilize/persistence/postgres.py +655 -0
  41. stabilize/persistence/sqlite.py +674 -0
  42. stabilize/persistence/store.py +235 -0
  43. stabilize/queue/__init__.py +59 -0
  44. stabilize/queue/messages.py +377 -0
  45. stabilize/queue/processor.py +312 -0
  46. stabilize/queue/queue.py +526 -0
  47. stabilize/queue/sqlite_queue.py +354 -0
  48. stabilize/rag/__init__.py +19 -0
  49. stabilize/rag/assistant.py +459 -0
  50. stabilize/rag/cache.py +294 -0
  51. stabilize/stages/__init__.py +11 -0
  52. stabilize/stages/builder.py +253 -0
  53. stabilize/tasks/__init__.py +19 -0
  54. stabilize/tasks/interface.py +335 -0
  55. stabilize/tasks/registry.py +255 -0
  56. stabilize/tasks/result.py +283 -0
  57. stabilize-0.9.2.dist-info/METADATA +301 -0
  58. stabilize-0.9.2.dist-info/RECORD +61 -0
  59. stabilize-0.9.2.dist-info/WHEEL +4 -0
  60. stabilize-0.9.2.dist-info/entry_points.txt +2 -0
  61. stabilize-0.9.2.dist-info/licenses/LICENSE +201 -0
@@ -0,0 +1,389 @@
1
+ """
2
+ StageExecution model.
3
+
4
+ A stage represents a logical unit of work in a pipeline. Stages can have:
5
+ - Prerequisites (other stages that must complete first)
6
+ - Tasks (sequential work units)
7
+ - Synthetic stages (before/after stages injected by builders)
8
+
9
+ The DAG structure is represented via requisite_stage_ref_ids, which contains
10
+ the ref_ids of all stages this stage depends on.
11
+ """
12
+
13
+ from __future__ import annotations
14
+
15
+ from dataclasses import dataclass, field
16
+ from enum import Enum
17
+ from typing import TYPE_CHECKING, Any
18
+
19
+ from stabilize.models.status import (
20
+ CONTINUABLE_STATUSES,
21
+ WorkflowStatus,
22
+ )
23
+ from stabilize.models.task import TaskExecution
24
+
25
+ if TYPE_CHECKING:
26
+ from stabilize.models.workflow import Workflow
27
+
28
+
29
+ def _generate_stage_id() -> str:
30
+ """Generate a unique stage ID using ULID."""
31
+ import ulid
32
+
33
+ return str(ulid.new())
34
+
35
+
36
+ class SyntheticStageOwner(Enum):
37
+ """
38
+ Indicates the relationship of a synthetic stage to its parent.
39
+
40
+ STAGE_BEFORE: Runs before the parent's tasks
41
+ STAGE_AFTER: Runs after the parent completes
42
+ """
43
+
44
+ STAGE_BEFORE = "STAGE_BEFORE"
45
+ STAGE_AFTER = "STAGE_AFTER"
46
+
47
+
48
+ @dataclass
49
+ class StageExecution:
50
+ """
51
+ Represents a stage execution within a pipeline.
52
+
53
+ The DAG structure is encoded in requisite_stage_ref_ids:
54
+ - Empty set = initial stage (no dependencies)
55
+ - Single ref_id = sequential dependency
56
+ - Multiple ref_ids = join point (waits for all)
57
+
58
+ Attributes:
59
+ id: Unique identifier (ULID)
60
+ ref_id: Reference identifier used for DAG relationships
61
+ type: Stage type (e.g., "deploy", "bake", "wait")
62
+ name: Human-readable stage name
63
+ status: Current execution status
64
+ context: Input parameters and runtime state (stage-scoped)
65
+ outputs: Values available to downstream stages (pipeline-scoped)
66
+ tasks: List of tasks to execute in this stage
67
+ requisite_stage_ref_ids: Set of ref_ids this stage depends on (DAG edges)
68
+ parent_stage_id: Parent stage ID for synthetic stages
69
+ synthetic_stage_owner: STAGE_BEFORE or STAGE_AFTER for synthetic stages
70
+ start_time: Epoch milliseconds when stage started
71
+ end_time: Epoch milliseconds when stage completed
72
+ start_time_expiry: If stage not started by this time, skip it
73
+ scheduled_time: When stage is scheduled to execute
74
+ """
75
+
76
+ id: str = field(default_factory=_generate_stage_id)
77
+ ref_id: str = ""
78
+ type: str = ""
79
+ name: str = ""
80
+ status: WorkflowStatus = WorkflowStatus.NOT_STARTED
81
+ context: dict[str, Any] = field(default_factory=dict)
82
+ outputs: dict[str, Any] = field(default_factory=dict)
83
+ tasks: list[TaskExecution] = field(default_factory=list)
84
+ requisite_stage_ref_ids: set[str] = field(default_factory=set)
85
+ parent_stage_id: str | None = None
86
+ synthetic_stage_owner: SyntheticStageOwner | None = None
87
+ start_time: int | None = None
88
+ end_time: int | None = None
89
+ start_time_expiry: int | None = None
90
+ scheduled_time: int | None = None
91
+
92
+ # Back-reference to parent execution (set after construction)
93
+ _execution: Workflow | None = field(default=None, repr=False)
94
+
95
+ @property
96
+ def execution(self) -> Workflow:
97
+ """Get the parent pipeline execution."""
98
+ if self._execution is None:
99
+ raise ValueError("Stage is not attached to an execution")
100
+ return self._execution
101
+
102
+ @execution.setter
103
+ def execution(self, value: Workflow) -> None:
104
+ """Set the parent pipeline execution."""
105
+ self._execution = value
106
+
107
+ def has_execution(self) -> bool:
108
+ """Check if this stage is attached to an execution."""
109
+ return self._execution is not None
110
+
111
+ # ========== DAG Navigation Methods ==========
112
+
113
+ def is_initial(self) -> bool:
114
+ """Check if this is an initial stage (no dependencies)."""
115
+ return len(self.requisite_stage_ref_ids) == 0
116
+
117
+ def is_join(self) -> bool:
118
+ """
119
+ Check if this is a join point (multiple dependencies).
120
+
121
+ A join point waits for multiple upstream stages to complete.
122
+ """
123
+ return len(self.requisite_stage_ref_ids) > 1
124
+
125
+ def upstream_stages(self) -> list[StageExecution]:
126
+ """
127
+ Get all stages directly upstream of this stage.
128
+
129
+ Returns stages whose ref_id is in this stage's requisite_stage_ref_ids.
130
+ """
131
+ return [stage for stage in self.execution.stages if stage.ref_id in self.requisite_stage_ref_ids]
132
+
133
+ def downstream_stages(self) -> list[StageExecution]:
134
+ """
135
+ Get all stages directly downstream of this stage.
136
+
137
+ Returns stages that have this stage's ref_id in their requisite_stage_ref_ids.
138
+ """
139
+ return [stage for stage in self.execution.stages if self.ref_id in stage.requisite_stage_ref_ids]
140
+
141
+ def all_upstream_stages_complete(self) -> bool:
142
+ """
143
+ Check if all upstream stages have completed successfully.
144
+
145
+ Returns True if all upstream stages have status in CONTINUABLE_STATUSES
146
+ (SUCCEEDED, FAILED_CONTINUE, or SKIPPED).
147
+ """
148
+ return all(stage.status in CONTINUABLE_STATUSES for stage in self.upstream_stages())
149
+
150
+ def any_upstream_stages_failed(self) -> bool:
151
+ """
152
+ Check if any upstream stages have failed with a halt status.
153
+
154
+ Returns True if any upstream stage has TERMINAL, STOPPED, or CANCELED status.
155
+ """
156
+ halt_statuses = {
157
+ WorkflowStatus.TERMINAL,
158
+ WorkflowStatus.STOPPED,
159
+ WorkflowStatus.CANCELED,
160
+ }
161
+ for upstream in self.upstream_stages():
162
+ if upstream.status in halt_statuses:
163
+ return True
164
+ # Check recursively for NOT_STARTED stages
165
+ if upstream.status == WorkflowStatus.NOT_STARTED and upstream.any_upstream_stages_failed():
166
+ return True
167
+ return False
168
+
169
+ # ========== Synthetic Stage Methods ==========
170
+
171
+ def synthetic_stages(self) -> list[StageExecution]:
172
+ """Get all synthetic stages (children) of this stage."""
173
+ return [stage for stage in self.execution.stages if stage.parent_stage_id == self.id]
174
+
175
+ def before_stages(self) -> list[StageExecution]:
176
+ """Get synthetic stages that run before this stage's tasks."""
177
+ return [
178
+ stage
179
+ for stage in self.synthetic_stages()
180
+ if stage.synthetic_stage_owner == SyntheticStageOwner.STAGE_BEFORE
181
+ ]
182
+
183
+ def after_stages(self) -> list[StageExecution]:
184
+ """Get synthetic stages that run after this stage completes."""
185
+ return [
186
+ stage for stage in self.synthetic_stages() if stage.synthetic_stage_owner == SyntheticStageOwner.STAGE_AFTER
187
+ ]
188
+
189
+ def first_before_stages(self) -> list[StageExecution]:
190
+ """Get initial before stages (those with no dependencies among before stages)."""
191
+ return [stage for stage in self.before_stages() if stage.is_initial()]
192
+
193
+ def first_after_stages(self) -> list[StageExecution]:
194
+ """Get initial after stages (those with no dependencies among after stages)."""
195
+ return [stage for stage in self.after_stages() if stage.is_initial()]
196
+
197
+ def parent(self) -> StageExecution:
198
+ """
199
+ Get the parent stage for this synthetic stage.
200
+
201
+ Raises:
202
+ ValueError: If this is not a synthetic stage
203
+ """
204
+ if self.parent_stage_id is None:
205
+ raise ValueError("Not a synthetic stage")
206
+ for stage in self.execution.stages:
207
+ if stage.id == self.parent_stage_id:
208
+ return stage
209
+ raise ValueError(f"Parent stage {self.parent_stage_id} not found")
210
+
211
+ def is_synthetic(self) -> bool:
212
+ """Check if this is a synthetic stage."""
213
+ return self.parent_stage_id is not None
214
+
215
+ # ========== Task Methods ==========
216
+
217
+ def first_task(self) -> TaskExecution | None:
218
+ """Get the first task in this stage."""
219
+ return self.tasks[0] if self.tasks else None
220
+
221
+ def next_task(self, task: TaskExecution) -> TaskExecution | None:
222
+ """Get the task that follows the given task."""
223
+ if task.is_stage_end:
224
+ return None
225
+ try:
226
+ index = self.tasks.index(task)
227
+ return self.tasks[index + 1]
228
+ except (ValueError, IndexError):
229
+ return None
230
+
231
+ def has_tasks(self) -> bool:
232
+ """Check if this stage has any tasks."""
233
+ return len(self.tasks) > 0
234
+
235
+ # ========== Status Methods ==========
236
+
237
+ def determine_status(self) -> WorkflowStatus:
238
+ """Determine the stage status based on synthetic stages and tasks."""
239
+ synthetic_statuses = [s.status for s in self.synthetic_stages()]
240
+ task_statuses = [t.status for t in self.tasks]
241
+ all_statuses = synthetic_statuses + task_statuses
242
+ after_stage_statuses = [s.status for s in self.after_stages()]
243
+
244
+ if not all_statuses:
245
+ return WorkflowStatus.NOT_STARTED
246
+
247
+ if WorkflowStatus.TERMINAL in all_statuses:
248
+ return self.failure_status()
249
+ if WorkflowStatus.STOPPED in all_statuses:
250
+ return WorkflowStatus.STOPPED
251
+ if WorkflowStatus.CANCELED in all_statuses:
252
+ return WorkflowStatus.CANCELED
253
+ if WorkflowStatus.FAILED_CONTINUE in all_statuses:
254
+ return WorkflowStatus.FAILED_CONTINUE
255
+ if all(s in {WorkflowStatus.SUCCEEDED, WorkflowStatus.SKIPPED} for s in all_statuses):
256
+ return WorkflowStatus.SUCCEEDED
257
+ if WorkflowStatus.NOT_STARTED in after_stage_statuses:
258
+ return WorkflowStatus.RUNNING
259
+
260
+ return WorkflowStatus.TERMINAL
261
+
262
+ def failure_status(self, default: WorkflowStatus = WorkflowStatus.TERMINAL) -> WorkflowStatus:
263
+ """Get the appropriate failure status based on stage configuration."""
264
+ if self.continue_pipeline_on_failure:
265
+ return WorkflowStatus.FAILED_CONTINUE
266
+ if self.should_fail_pipeline():
267
+ return default
268
+ return WorkflowStatus.STOPPED
269
+
270
+ @property
271
+ def continue_pipeline_on_failure(self) -> bool:
272
+ """Check if pipeline should continue on stage failure."""
273
+ return bool(self.context.get("continuePipelineOnFailure", False))
274
+
275
+ def should_fail_pipeline(self) -> bool:
276
+ """Check if stage failure should fail the pipeline."""
277
+ return bool(self.context.get("failPipeline", True))
278
+
279
+ @property
280
+ def allow_sibling_stages_to_continue_on_failure(self) -> bool:
281
+ """Check if sibling stages can continue on this stage's failure."""
282
+ return bool(self.context.get("allowSiblingStagesToContinueOnFailure", False))
283
+
284
+ # ========== Ancestor Traversal ==========
285
+
286
+ def ancestors(self) -> list[StageExecution]:
287
+ """
288
+ Get all ancestor stages in dependency order.
289
+
290
+ Includes requisite stages and parent stages.
291
+ """
292
+ visited: set[str] = set()
293
+ result: list[StageExecution] = []
294
+
295
+ def visit(stage: StageExecution) -> None:
296
+ if stage.id in visited:
297
+ return
298
+ visited.add(stage.id)
299
+ result.append(stage)
300
+
301
+ # Visit requisite stages
302
+ for upstream in stage.upstream_stages():
303
+ visit(upstream)
304
+
305
+ # Visit parent stage
306
+ if stage.parent_stage_id:
307
+ try:
308
+ visit(stage.parent())
309
+ except ValueError:
310
+ pass
311
+
312
+ # Start with upstream stages (not self)
313
+ for upstream in self.upstream_stages():
314
+ visit(upstream)
315
+ if self.parent_stage_id:
316
+ try:
317
+ visit(self.parent())
318
+ except ValueError:
319
+ pass
320
+
321
+ return result
322
+
323
+ # ========== Factory Methods ==========
324
+
325
+ @classmethod
326
+ def create(
327
+ cls,
328
+ type: str,
329
+ name: str,
330
+ ref_id: str,
331
+ context: dict[str, Any] | None = None,
332
+ requisite_stage_ref_ids: set[str] | None = None,
333
+ ) -> StageExecution:
334
+ """
335
+ Factory method to create a new stage execution.
336
+
337
+ Args:
338
+ type: Stage type
339
+ name: Human-readable name
340
+ ref_id: Reference ID for DAG relationships
341
+ context: Initial context/parameters
342
+ requisite_stage_ref_ids: Dependencies (empty = initial stage)
343
+
344
+ Returns:
345
+ A new StageExecution instance
346
+ """
347
+ return cls(
348
+ type=type,
349
+ name=name,
350
+ ref_id=ref_id,
351
+ context=context or {},
352
+ requisite_stage_ref_ids=requisite_stage_ref_ids or set(),
353
+ )
354
+
355
+ @classmethod
356
+ def create_synthetic(
357
+ cls,
358
+ type: str,
359
+ name: str,
360
+ parent: StageExecution,
361
+ owner: SyntheticStageOwner,
362
+ context: dict[str, Any] | None = None,
363
+ ) -> StageExecution:
364
+ """
365
+ Factory method to create a synthetic stage.
366
+
367
+ Args:
368
+ type: Stage type
369
+ name: Human-readable name
370
+ parent: Parent stage
371
+ owner: STAGE_BEFORE or STAGE_AFTER
372
+ context: Initial context/parameters
373
+
374
+ Returns:
375
+ A new synthetic StageExecution
376
+ """
377
+ import ulid
378
+
379
+ stage = cls(
380
+ type=type,
381
+ name=name,
382
+ ref_id=str(ulid.new()), # Synthetic stages get unique ref_ids
383
+ context=context or {},
384
+ parent_stage_id=parent.id,
385
+ synthetic_stage_owner=owner,
386
+ )
387
+ if parent.has_execution():
388
+ stage.execution = parent.execution
389
+ return stage
@@ -0,0 +1,146 @@
1
+ """
2
+ WorkflowStatus enum.
3
+
4
+ This enum represents all possible states for executions, stages, and tasks.
5
+ Each status has two boolean properties:
6
+ - complete: Whether the entity has finished its work (successfully or not)
7
+ - halt: Whether downstream execution should be blocked
8
+ """
9
+
10
+ from enum import Enum
11
+
12
+
13
+ class WorkflowStatus(Enum):
14
+ """
15
+ Execution status enum.
16
+
17
+ Each value is a tuple of (name, complete, halt).
18
+ """
19
+
20
+ # The task has yet to start
21
+ NOT_STARTED = ("NOT_STARTED", False, False)
22
+
23
+ # The task is still running and may be re-executed to continue
24
+ RUNNING = ("RUNNING", False, False)
25
+
26
+ # The task is paused and may be resumed to continue
27
+ PAUSED = ("PAUSED", False, False)
28
+
29
+ # The task is complete but pipeline should stop pending a trigger
30
+ SUSPENDED = ("SUSPENDED", False, False)
31
+
32
+ # The task executed successfully and pipeline may proceed
33
+ SUCCEEDED = ("SUCCEEDED", True, False)
34
+
35
+ # The task failed but pipeline may proceed to the next task
36
+ FAILED_CONTINUE = ("FAILED_CONTINUE", True, False)
37
+
38
+ # The task failed terminally - pipeline will not progress further
39
+ TERMINAL = ("TERMINAL", True, True)
40
+
41
+ # The task was canceled - pipeline will not progress further
42
+ CANCELED = ("CANCELED", True, True)
43
+
44
+ # The step completed but indicates a decision path should be followed
45
+ REDIRECT = ("REDIRECT", False, False)
46
+
47
+ # The task was stopped - pipeline will not progress further
48
+ STOPPED = ("STOPPED", True, True)
49
+
50
+ # The task was skipped and pipeline will proceed to next task
51
+ SKIPPED = ("SKIPPED", True, False)
52
+
53
+ # The task is not started and must transition to NOT_STARTED
54
+ BUFFERED = ("BUFFERED", False, False)
55
+
56
+ def __init__(self, name: str, complete: bool, halt: bool) -> None:
57
+ self._name = name
58
+ self._complete = complete
59
+ self._halt = halt
60
+
61
+ @property
62
+ def is_complete(self) -> bool:
63
+ """
64
+ Indicates that the task/stage/pipeline has finished its work.
65
+
66
+ Returns True for: CANCELED, SUCCEEDED, STOPPED, SKIPPED, TERMINAL, FAILED_CONTINUE
67
+ """
68
+ return self._complete
69
+
70
+ @property
71
+ def is_halt(self) -> bool:
72
+ """
73
+ Indicates an abnormal completion - nothing downstream should run.
74
+
75
+ Returns True for: TERMINAL, CANCELED, STOPPED
76
+ """
77
+ return self._halt
78
+
79
+ @property
80
+ def is_successful(self) -> bool:
81
+ """Check if this status represents a successful completion."""
82
+ return self in _SUCCESSFUL_STATUSES
83
+
84
+ @property
85
+ def is_failure(self) -> bool:
86
+ """Check if this status represents a failure."""
87
+ return self in _FAILURE_STATUSES
88
+
89
+ @property
90
+ def is_skipped(self) -> bool:
91
+ """Check if this status is SKIPPED."""
92
+ return self == WorkflowStatus.SKIPPED
93
+
94
+ def __str__(self) -> str:
95
+ return self._name
96
+
97
+ def __repr__(self) -> str:
98
+ return f"WorkflowStatus.{self.name}"
99
+
100
+
101
+ # Status sets for quick membership testing (matching Orca's ImmutableSets)
102
+ COMPLETED_STATUSES: frozenset[WorkflowStatus] = frozenset(
103
+ {
104
+ WorkflowStatus.CANCELED,
105
+ WorkflowStatus.SUCCEEDED,
106
+ WorkflowStatus.STOPPED,
107
+ WorkflowStatus.SKIPPED,
108
+ WorkflowStatus.TERMINAL,
109
+ WorkflowStatus.FAILED_CONTINUE,
110
+ }
111
+ )
112
+
113
+ _SUCCESSFUL_STATUSES: frozenset[WorkflowStatus] = frozenset(
114
+ {
115
+ WorkflowStatus.SUCCEEDED,
116
+ WorkflowStatus.STOPPED,
117
+ WorkflowStatus.SKIPPED,
118
+ }
119
+ )
120
+
121
+ _FAILURE_STATUSES: frozenset[WorkflowStatus] = frozenset(
122
+ {
123
+ WorkflowStatus.TERMINAL,
124
+ WorkflowStatus.STOPPED,
125
+ WorkflowStatus.FAILED_CONTINUE,
126
+ }
127
+ )
128
+
129
+ # Statuses that allow downstream stages to continue
130
+ CONTINUABLE_STATUSES: frozenset[WorkflowStatus] = frozenset(
131
+ {
132
+ WorkflowStatus.SUCCEEDED,
133
+ WorkflowStatus.FAILED_CONTINUE,
134
+ WorkflowStatus.SKIPPED,
135
+ }
136
+ )
137
+
138
+ # Statuses that indicate the entity is still actively processing
139
+ ACTIVE_STATUSES: frozenset[WorkflowStatus] = frozenset(
140
+ {
141
+ WorkflowStatus.NOT_STARTED,
142
+ WorkflowStatus.RUNNING,
143
+ WorkflowStatus.PAUSED,
144
+ WorkflowStatus.SUSPENDED,
145
+ }
146
+ )
@@ -0,0 +1,125 @@
1
+ """
2
+ TaskExecution model.
3
+
4
+ A task is the smallest unit of work within a stage. Each stage contains
5
+ one or more tasks that execute sequentially.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ from dataclasses import dataclass, field
11
+ from typing import TYPE_CHECKING, Any
12
+
13
+ from stabilize.models.status import WorkflowStatus
14
+
15
+ if TYPE_CHECKING:
16
+ from stabilize.models.stage import StageExecution
17
+
18
+
19
+ def _generate_task_id() -> str:
20
+ """Generate a unique task ID using ULID."""
21
+ import ulid
22
+
23
+ return str(ulid.new())
24
+
25
+
26
+ @dataclass
27
+ class TaskExecution:
28
+ """
29
+ Represents a single task within a stage execution.
30
+
31
+ Tasks are the atomic units of work in a pipeline. They execute sequentially
32
+ within a stage, and each task produces a result that can include:
33
+ - Status updates
34
+ - Context modifications (stage-scoped)
35
+ - Output values (pipeline-scoped)
36
+
37
+ Attributes:
38
+ id: Unique identifier for this task execution
39
+ name: Human-readable name for this task
40
+ implementing_class: Fully qualified class name of the task implementation
41
+ status: Current execution status
42
+ start_time: Epoch milliseconds when task started
43
+ end_time: Epoch milliseconds when task completed
44
+ stage_start: True if this is the first task in the stage
45
+ stage_end: True if this is the last task in the stage
46
+ loop_start: True if this task starts a loop
47
+ loop_end: True if this task ends a loop
48
+ task_exception_details: Exception information if task failed
49
+ """
50
+
51
+ id: str = field(default_factory=_generate_task_id)
52
+ name: str = ""
53
+ implementing_class: str = ""
54
+ status: WorkflowStatus = WorkflowStatus.NOT_STARTED
55
+ start_time: int | None = None
56
+ end_time: int | None = None
57
+ stage_start: bool = False
58
+ stage_end: bool = False
59
+ loop_start: bool = False
60
+ loop_end: bool = False
61
+ task_exception_details: dict[str, Any] = field(default_factory=dict)
62
+
63
+ # Back-reference to parent stage (set after construction)
64
+ _stage: StageExecution | None = field(default=None, repr=False)
65
+
66
+ @property
67
+ def stage(self) -> StageExecution | None:
68
+ """Get the parent stage for this task."""
69
+ return self._stage
70
+
71
+ @stage.setter
72
+ def stage(self, value: StageExecution) -> None:
73
+ """Set the parent stage for this task."""
74
+ self._stage = value
75
+
76
+ @property
77
+ def is_stage_start(self) -> bool:
78
+ """Check if this task starts the stage."""
79
+ return self.stage_start
80
+
81
+ @property
82
+ def is_stage_end(self) -> bool:
83
+ """Check if this task ends the stage."""
84
+ return self.stage_end
85
+
86
+ @property
87
+ def is_loop_start(self) -> bool:
88
+ """Check if this task starts a loop."""
89
+ return self.loop_start
90
+
91
+ @property
92
+ def is_loop_end(self) -> bool:
93
+ """Check if this task ends a loop."""
94
+ return self.loop_end
95
+
96
+ def set_exception_details(self, exception: dict[str, Any]) -> None:
97
+ """Store exception details for this task."""
98
+ self.task_exception_details["exception"] = exception
99
+
100
+ @classmethod
101
+ def create(
102
+ cls,
103
+ name: str,
104
+ implementing_class: str,
105
+ stage_start: bool = False,
106
+ stage_end: bool = False,
107
+ ) -> TaskExecution:
108
+ """
109
+ Factory method to create a new task execution.
110
+
111
+ Args:
112
+ name: Human-readable task name
113
+ implementing_class: Class name or callable reference for task
114
+ stage_start: Whether this is the first task
115
+ stage_end: Whether this is the last task
116
+
117
+ Returns:
118
+ A new TaskExecution instance
119
+ """
120
+ return cls(
121
+ name=name,
122
+ implementing_class=implementing_class,
123
+ stage_start=stage_start,
124
+ stage_end=stage_end,
125
+ )