pyworkflow-engine 0.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (196) hide show
  1. dashboard/backend/app/__init__.py +1 -0
  2. dashboard/backend/app/config.py +32 -0
  3. dashboard/backend/app/controllers/__init__.py +6 -0
  4. dashboard/backend/app/controllers/run_controller.py +86 -0
  5. dashboard/backend/app/controllers/workflow_controller.py +33 -0
  6. dashboard/backend/app/dependencies/__init__.py +5 -0
  7. dashboard/backend/app/dependencies/storage.py +50 -0
  8. dashboard/backend/app/repositories/__init__.py +6 -0
  9. dashboard/backend/app/repositories/run_repository.py +80 -0
  10. dashboard/backend/app/repositories/workflow_repository.py +27 -0
  11. dashboard/backend/app/rest/__init__.py +8 -0
  12. dashboard/backend/app/rest/v1/__init__.py +12 -0
  13. dashboard/backend/app/rest/v1/health.py +33 -0
  14. dashboard/backend/app/rest/v1/runs.py +133 -0
  15. dashboard/backend/app/rest/v1/workflows.py +41 -0
  16. dashboard/backend/app/schemas/__init__.py +23 -0
  17. dashboard/backend/app/schemas/common.py +16 -0
  18. dashboard/backend/app/schemas/event.py +24 -0
  19. dashboard/backend/app/schemas/hook.py +25 -0
  20. dashboard/backend/app/schemas/run.py +54 -0
  21. dashboard/backend/app/schemas/step.py +28 -0
  22. dashboard/backend/app/schemas/workflow.py +31 -0
  23. dashboard/backend/app/server.py +87 -0
  24. dashboard/backend/app/services/__init__.py +6 -0
  25. dashboard/backend/app/services/run_service.py +240 -0
  26. dashboard/backend/app/services/workflow_service.py +155 -0
  27. dashboard/backend/main.py +18 -0
  28. docs/concepts/cancellation.mdx +362 -0
  29. docs/concepts/continue-as-new.mdx +434 -0
  30. docs/concepts/events.mdx +266 -0
  31. docs/concepts/fault-tolerance.mdx +370 -0
  32. docs/concepts/hooks.mdx +552 -0
  33. docs/concepts/limitations.mdx +167 -0
  34. docs/concepts/schedules.mdx +775 -0
  35. docs/concepts/sleep.mdx +312 -0
  36. docs/concepts/steps.mdx +301 -0
  37. docs/concepts/workflows.mdx +255 -0
  38. docs/guides/cli.mdx +942 -0
  39. docs/guides/configuration.mdx +560 -0
  40. docs/introduction.mdx +155 -0
  41. docs/quickstart.mdx +279 -0
  42. examples/__init__.py +1 -0
  43. examples/celery/__init__.py +1 -0
  44. examples/celery/durable/docker-compose.yml +55 -0
  45. examples/celery/durable/pyworkflow.config.yaml +12 -0
  46. examples/celery/durable/workflows/__init__.py +122 -0
  47. examples/celery/durable/workflows/basic.py +87 -0
  48. examples/celery/durable/workflows/batch_processing.py +102 -0
  49. examples/celery/durable/workflows/cancellation.py +273 -0
  50. examples/celery/durable/workflows/child_workflow_patterns.py +240 -0
  51. examples/celery/durable/workflows/child_workflows.py +202 -0
  52. examples/celery/durable/workflows/continue_as_new.py +260 -0
  53. examples/celery/durable/workflows/fault_tolerance.py +210 -0
  54. examples/celery/durable/workflows/hooks.py +211 -0
  55. examples/celery/durable/workflows/idempotency.py +112 -0
  56. examples/celery/durable/workflows/long_running.py +99 -0
  57. examples/celery/durable/workflows/retries.py +101 -0
  58. examples/celery/durable/workflows/schedules.py +209 -0
  59. examples/celery/transient/01_basic_workflow.py +91 -0
  60. examples/celery/transient/02_fault_tolerance.py +257 -0
  61. examples/celery/transient/__init__.py +20 -0
  62. examples/celery/transient/pyworkflow.config.yaml +25 -0
  63. examples/local/__init__.py +1 -0
  64. examples/local/durable/01_basic_workflow.py +94 -0
  65. examples/local/durable/02_file_storage.py +132 -0
  66. examples/local/durable/03_retries.py +169 -0
  67. examples/local/durable/04_long_running.py +119 -0
  68. examples/local/durable/05_event_log.py +145 -0
  69. examples/local/durable/06_idempotency.py +148 -0
  70. examples/local/durable/07_hooks.py +334 -0
  71. examples/local/durable/08_cancellation.py +233 -0
  72. examples/local/durable/09_child_workflows.py +198 -0
  73. examples/local/durable/10_child_workflow_patterns.py +265 -0
  74. examples/local/durable/11_continue_as_new.py +249 -0
  75. examples/local/durable/12_schedules.py +198 -0
  76. examples/local/durable/__init__.py +1 -0
  77. examples/local/transient/01_quick_tasks.py +87 -0
  78. examples/local/transient/02_retries.py +130 -0
  79. examples/local/transient/03_sleep.py +141 -0
  80. examples/local/transient/__init__.py +1 -0
  81. pyworkflow/__init__.py +256 -0
  82. pyworkflow/aws/__init__.py +68 -0
  83. pyworkflow/aws/context.py +234 -0
  84. pyworkflow/aws/handler.py +184 -0
  85. pyworkflow/aws/testing.py +310 -0
  86. pyworkflow/celery/__init__.py +41 -0
  87. pyworkflow/celery/app.py +198 -0
  88. pyworkflow/celery/scheduler.py +315 -0
  89. pyworkflow/celery/tasks.py +1746 -0
  90. pyworkflow/cli/__init__.py +132 -0
  91. pyworkflow/cli/__main__.py +6 -0
  92. pyworkflow/cli/commands/__init__.py +1 -0
  93. pyworkflow/cli/commands/hooks.py +640 -0
  94. pyworkflow/cli/commands/quickstart.py +495 -0
  95. pyworkflow/cli/commands/runs.py +773 -0
  96. pyworkflow/cli/commands/scheduler.py +130 -0
  97. pyworkflow/cli/commands/schedules.py +794 -0
  98. pyworkflow/cli/commands/setup.py +703 -0
  99. pyworkflow/cli/commands/worker.py +413 -0
  100. pyworkflow/cli/commands/workflows.py +1257 -0
  101. pyworkflow/cli/output/__init__.py +1 -0
  102. pyworkflow/cli/output/formatters.py +321 -0
  103. pyworkflow/cli/output/styles.py +121 -0
  104. pyworkflow/cli/utils/__init__.py +1 -0
  105. pyworkflow/cli/utils/async_helpers.py +30 -0
  106. pyworkflow/cli/utils/config.py +130 -0
  107. pyworkflow/cli/utils/config_generator.py +344 -0
  108. pyworkflow/cli/utils/discovery.py +53 -0
  109. pyworkflow/cli/utils/docker_manager.py +651 -0
  110. pyworkflow/cli/utils/interactive.py +364 -0
  111. pyworkflow/cli/utils/storage.py +115 -0
  112. pyworkflow/config.py +329 -0
  113. pyworkflow/context/__init__.py +63 -0
  114. pyworkflow/context/aws.py +230 -0
  115. pyworkflow/context/base.py +416 -0
  116. pyworkflow/context/local.py +930 -0
  117. pyworkflow/context/mock.py +381 -0
  118. pyworkflow/core/__init__.py +0 -0
  119. pyworkflow/core/exceptions.py +353 -0
  120. pyworkflow/core/registry.py +313 -0
  121. pyworkflow/core/scheduled.py +328 -0
  122. pyworkflow/core/step.py +494 -0
  123. pyworkflow/core/workflow.py +294 -0
  124. pyworkflow/discovery.py +248 -0
  125. pyworkflow/engine/__init__.py +0 -0
  126. pyworkflow/engine/events.py +879 -0
  127. pyworkflow/engine/executor.py +682 -0
  128. pyworkflow/engine/replay.py +273 -0
  129. pyworkflow/observability/__init__.py +19 -0
  130. pyworkflow/observability/logging.py +234 -0
  131. pyworkflow/primitives/__init__.py +33 -0
  132. pyworkflow/primitives/child_handle.py +174 -0
  133. pyworkflow/primitives/child_workflow.py +372 -0
  134. pyworkflow/primitives/continue_as_new.py +101 -0
  135. pyworkflow/primitives/define_hook.py +150 -0
  136. pyworkflow/primitives/hooks.py +97 -0
  137. pyworkflow/primitives/resume_hook.py +210 -0
  138. pyworkflow/primitives/schedule.py +545 -0
  139. pyworkflow/primitives/shield.py +96 -0
  140. pyworkflow/primitives/sleep.py +100 -0
  141. pyworkflow/runtime/__init__.py +21 -0
  142. pyworkflow/runtime/base.py +179 -0
  143. pyworkflow/runtime/celery.py +310 -0
  144. pyworkflow/runtime/factory.py +101 -0
  145. pyworkflow/runtime/local.py +706 -0
  146. pyworkflow/scheduler/__init__.py +9 -0
  147. pyworkflow/scheduler/local.py +248 -0
  148. pyworkflow/serialization/__init__.py +0 -0
  149. pyworkflow/serialization/decoder.py +146 -0
  150. pyworkflow/serialization/encoder.py +162 -0
  151. pyworkflow/storage/__init__.py +54 -0
  152. pyworkflow/storage/base.py +612 -0
  153. pyworkflow/storage/config.py +185 -0
  154. pyworkflow/storage/dynamodb.py +1315 -0
  155. pyworkflow/storage/file.py +827 -0
  156. pyworkflow/storage/memory.py +549 -0
  157. pyworkflow/storage/postgres.py +1161 -0
  158. pyworkflow/storage/schemas.py +486 -0
  159. pyworkflow/storage/sqlite.py +1136 -0
  160. pyworkflow/utils/__init__.py +0 -0
  161. pyworkflow/utils/duration.py +177 -0
  162. pyworkflow/utils/schedule.py +391 -0
  163. pyworkflow_engine-0.1.7.dist-info/METADATA +687 -0
  164. pyworkflow_engine-0.1.7.dist-info/RECORD +196 -0
  165. pyworkflow_engine-0.1.7.dist-info/WHEEL +5 -0
  166. pyworkflow_engine-0.1.7.dist-info/entry_points.txt +2 -0
  167. pyworkflow_engine-0.1.7.dist-info/licenses/LICENSE +21 -0
  168. pyworkflow_engine-0.1.7.dist-info/top_level.txt +5 -0
  169. tests/examples/__init__.py +0 -0
  170. tests/integration/__init__.py +0 -0
  171. tests/integration/test_cancellation.py +330 -0
  172. tests/integration/test_child_workflows.py +439 -0
  173. tests/integration/test_continue_as_new.py +428 -0
  174. tests/integration/test_dynamodb_storage.py +1146 -0
  175. tests/integration/test_fault_tolerance.py +369 -0
  176. tests/integration/test_schedule_storage.py +484 -0
  177. tests/unit/__init__.py +0 -0
  178. tests/unit/backends/__init__.py +1 -0
  179. tests/unit/backends/test_dynamodb_storage.py +1554 -0
  180. tests/unit/backends/test_postgres_storage.py +1281 -0
  181. tests/unit/backends/test_sqlite_storage.py +1460 -0
  182. tests/unit/conftest.py +41 -0
  183. tests/unit/test_cancellation.py +364 -0
  184. tests/unit/test_child_workflows.py +680 -0
  185. tests/unit/test_continue_as_new.py +441 -0
  186. tests/unit/test_event_limits.py +316 -0
  187. tests/unit/test_executor.py +320 -0
  188. tests/unit/test_fault_tolerance.py +334 -0
  189. tests/unit/test_hooks.py +495 -0
  190. tests/unit/test_registry.py +261 -0
  191. tests/unit/test_replay.py +420 -0
  192. tests/unit/test_schedule_schemas.py +285 -0
  193. tests/unit/test_schedule_utils.py +286 -0
  194. tests/unit/test_scheduled_workflow.py +274 -0
  195. tests/unit/test_step.py +353 -0
  196. tests/unit/test_workflow.py +243 -0
@@ -0,0 +1,486 @@
1
+ """
2
+ Data models for workflow runs, steps, hooks, and related entities.
3
+
4
+ These schemas define the structure of data stored in various storage backends.
5
+ """
6
+
7
+ from dataclasses import dataclass, field
8
+ from datetime import UTC, datetime
9
+ from enum import Enum
10
+ from typing import Any
11
+
12
+
13
+ class RunStatus(Enum):
14
+ """Workflow run execution status."""
15
+
16
+ PENDING = "pending"
17
+ RUNNING = "running"
18
+ SUSPENDED = "suspended"
19
+ COMPLETED = "completed"
20
+ FAILED = "failed"
21
+ INTERRUPTED = "interrupted" # Recoverable infrastructure failure (worker loss)
22
+ CANCELLED = "cancelled"
23
+ CONTINUED_AS_NEW = "continued_as_new" # Workflow continued with fresh history
24
+
25
+
26
+ class StepStatus(Enum):
27
+ """Step execution status."""
28
+
29
+ PENDING = "pending"
30
+ RUNNING = "running"
31
+ COMPLETED = "completed"
32
+ FAILED = "failed"
33
+ RETRYING = "retrying"
34
+ CANCELLED = "cancelled"
35
+
36
+
37
+ class HookStatus(Enum):
38
+ """Hook/webhook status."""
39
+
40
+ PENDING = "pending"
41
+ RECEIVED = "received"
42
+ EXPIRED = "expired"
43
+ DISPOSED = "disposed"
44
+
45
+
46
+ class OverlapPolicy(Enum):
47
+ """How to handle overlapping schedule executions."""
48
+
49
+ SKIP = "skip" # Skip if previous run still active
50
+ BUFFER_ONE = "buffer_one" # Buffer at most one pending execution
51
+ BUFFER_ALL = "buffer_all" # Buffer all pending executions
52
+ CANCEL_OTHER = "cancel_other" # Cancel previous run and start new
53
+ ALLOW_ALL = "allow_all" # Allow concurrent executions
54
+
55
+
56
+ class ScheduleStatus(Enum):
57
+ """Schedule lifecycle status."""
58
+
59
+ ACTIVE = "active"
60
+ PAUSED = "paused"
61
+ DELETED = "deleted"
62
+
63
+
64
+ @dataclass
65
+ class WorkflowRun:
66
+ """
67
+ Represents a workflow execution run.
68
+
69
+ This is the primary entity tracking workflow execution state.
70
+ """
71
+
72
+ run_id: str
73
+ workflow_name: str
74
+ status: RunStatus
75
+ created_at: datetime = field(default_factory=lambda: datetime.now(UTC))
76
+ updated_at: datetime = field(default_factory=lambda: datetime.now(UTC))
77
+ started_at: datetime | None = None
78
+ completed_at: datetime | None = None
79
+
80
+ # Input/output
81
+ input_args: str = "{}" # JSON serialized list
82
+ input_kwargs: str = "{}" # JSON serialized dict
83
+ result: str | None = None # JSON serialized result
84
+ error: str | None = None # Error message if failed
85
+
86
+ # Configuration
87
+ idempotency_key: str | None = None
88
+ max_duration: str | None = None # e.g., "1h", "30m"
89
+ metadata: dict[str, Any] = field(default_factory=dict)
90
+
91
+ # Recovery tracking for fault tolerance
92
+ recovery_attempts: int = 0 # Number of recovery attempts after worker failures
93
+ max_recovery_attempts: int = 3 # Maximum recovery attempts allowed
94
+ recover_on_worker_loss: bool = True # Whether to auto-recover on worker failure
95
+
96
+ # Child workflow tracking
97
+ parent_run_id: str | None = None # Link to parent workflow (None if root)
98
+ nesting_depth: int = 0 # 0=root, 1=child, 2=grandchild (max 3)
99
+
100
+ # Continue-as-new chain tracking
101
+ continued_from_run_id: str | None = None # Previous run in chain
102
+ continued_to_run_id: str | None = None # Next run in chain
103
+
104
+ def to_dict(self) -> dict[str, Any]:
105
+ """Convert to dictionary for serialization."""
106
+ return {
107
+ "run_id": self.run_id,
108
+ "workflow_name": self.workflow_name,
109
+ "status": self.status.value,
110
+ "created_at": self.created_at.isoformat(),
111
+ "updated_at": self.updated_at.isoformat(),
112
+ "started_at": self.started_at.isoformat() if self.started_at else None,
113
+ "completed_at": self.completed_at.isoformat() if self.completed_at else None,
114
+ "input_args": self.input_args,
115
+ "input_kwargs": self.input_kwargs,
116
+ "result": self.result,
117
+ "error": self.error,
118
+ "idempotency_key": self.idempotency_key,
119
+ "max_duration": self.max_duration,
120
+ "metadata": self.metadata,
121
+ "recovery_attempts": self.recovery_attempts,
122
+ "max_recovery_attempts": self.max_recovery_attempts,
123
+ "recover_on_worker_loss": self.recover_on_worker_loss,
124
+ "parent_run_id": self.parent_run_id,
125
+ "nesting_depth": self.nesting_depth,
126
+ "continued_from_run_id": self.continued_from_run_id,
127
+ "continued_to_run_id": self.continued_to_run_id,
128
+ }
129
+
130
+ @classmethod
131
+ def from_dict(cls, data: dict[str, Any]) -> "WorkflowRun":
132
+ """Create from dictionary."""
133
+ return cls(
134
+ run_id=data["run_id"],
135
+ workflow_name=data["workflow_name"],
136
+ status=RunStatus(data["status"]),
137
+ created_at=datetime.fromisoformat(data["created_at"]),
138
+ updated_at=datetime.fromisoformat(data["updated_at"]),
139
+ started_at=(
140
+ datetime.fromisoformat(data["started_at"]) if data.get("started_at") else None
141
+ ),
142
+ completed_at=(
143
+ datetime.fromisoformat(data["completed_at"]) if data.get("completed_at") else None
144
+ ),
145
+ input_args=data.get("input_args", "{}"),
146
+ input_kwargs=data.get("input_kwargs", "{}"),
147
+ result=data.get("result"),
148
+ error=data.get("error"),
149
+ idempotency_key=data.get("idempotency_key"),
150
+ max_duration=data.get("max_duration"),
151
+ metadata=data.get("metadata", {}),
152
+ recovery_attempts=data.get("recovery_attempts", 0),
153
+ max_recovery_attempts=data.get("max_recovery_attempts", 3),
154
+ recover_on_worker_loss=data.get("recover_on_worker_loss", True),
155
+ parent_run_id=data.get("parent_run_id"),
156
+ nesting_depth=data.get("nesting_depth", 0),
157
+ continued_from_run_id=data.get("continued_from_run_id"),
158
+ continued_to_run_id=data.get("continued_to_run_id"),
159
+ )
160
+
161
+
162
+ @dataclass
163
+ class StepExecution:
164
+ """
165
+ Represents a step execution within a workflow.
166
+
167
+ Steps are isolated units of work that can be retried independently.
168
+ """
169
+
170
+ step_id: str
171
+ run_id: str
172
+ step_name: str
173
+ status: StepStatus
174
+
175
+ # Execution tracking
176
+ attempt: int = 1
177
+ max_retries: int = 3
178
+ created_at: datetime = field(default_factory=lambda: datetime.now(UTC))
179
+ updated_at: datetime = field(default_factory=lambda: datetime.now(UTC))
180
+ started_at: datetime | None = None
181
+ completed_at: datetime | None = None
182
+
183
+ # Input/output
184
+ input_args: str = "{}" # JSON serialized list
185
+ input_kwargs: str = "{}" # JSON serialized dict
186
+ result: str | None = None # JSON serialized result
187
+ error: str | None = None # Error message if failed
188
+
189
+ # Retry configuration
190
+ retry_after: datetime | None = None
191
+ retry_delay: str | None = None # e.g., "exponential", "10s"
192
+
193
+ def to_dict(self) -> dict[str, Any]:
194
+ """Convert to dictionary for serialization."""
195
+ return {
196
+ "step_id": self.step_id,
197
+ "run_id": self.run_id,
198
+ "step_name": self.step_name,
199
+ "status": self.status.value,
200
+ "attempt": self.attempt,
201
+ "max_retries": self.max_retries,
202
+ "created_at": self.created_at.isoformat(),
203
+ "updated_at": self.updated_at.isoformat(),
204
+ "started_at": self.started_at.isoformat() if self.started_at else None,
205
+ "completed_at": self.completed_at.isoformat() if self.completed_at else None,
206
+ "input_args": self.input_args,
207
+ "input_kwargs": self.input_kwargs,
208
+ "result": self.result,
209
+ "error": self.error,
210
+ "retry_after": self.retry_after.isoformat() if self.retry_after else None,
211
+ "retry_delay": self.retry_delay,
212
+ }
213
+
214
+ @classmethod
215
+ def from_dict(cls, data: dict[str, Any]) -> "StepExecution":
216
+ """Create from dictionary."""
217
+ return cls(
218
+ step_id=data["step_id"],
219
+ run_id=data["run_id"],
220
+ step_name=data["step_name"],
221
+ status=StepStatus(data["status"]),
222
+ attempt=data.get("attempt", 1),
223
+ max_retries=data.get("max_retries", 3),
224
+ created_at=datetime.fromisoformat(data["created_at"]),
225
+ updated_at=datetime.fromisoformat(data["updated_at"]),
226
+ started_at=(
227
+ datetime.fromisoformat(data["started_at"]) if data.get("started_at") else None
228
+ ),
229
+ completed_at=(
230
+ datetime.fromisoformat(data["completed_at"]) if data.get("completed_at") else None
231
+ ),
232
+ input_args=data.get("input_args", "{}"),
233
+ input_kwargs=data.get("input_kwargs", "{}"),
234
+ result=data.get("result"),
235
+ error=data.get("error"),
236
+ retry_after=(
237
+ datetime.fromisoformat(data["retry_after"]) if data.get("retry_after") else None
238
+ ),
239
+ retry_delay=data.get("retry_delay"),
240
+ )
241
+
242
+
243
+ @dataclass
244
+ class Hook:
245
+ """
246
+ Represents a webhook/hook for external event integration.
247
+
248
+ Hooks allow workflows to suspend and wait for external data.
249
+ """
250
+
251
+ hook_id: str
252
+ run_id: str
253
+ token: str
254
+ url: str = "" # Optional webhook URL
255
+ status: HookStatus = HookStatus.PENDING
256
+
257
+ # Timestamps
258
+ created_at: datetime = field(default_factory=lambda: datetime.now(UTC))
259
+ received_at: datetime | None = None
260
+ expires_at: datetime | None = None
261
+
262
+ # Data
263
+ payload: str | None = None # JSON serialized payload from webhook
264
+ name: str | None = None # Optional human-readable name
265
+ payload_schema: str | None = None # JSON schema for payload validation (from Pydantic)
266
+
267
+ # Metadata
268
+ metadata: dict[str, Any] = field(default_factory=dict)
269
+
270
+ def to_dict(self) -> dict[str, Any]:
271
+ """Convert to dictionary for serialization."""
272
+ return {
273
+ "hook_id": self.hook_id,
274
+ "run_id": self.run_id,
275
+ "url": self.url,
276
+ "token": self.token,
277
+ "status": self.status.value,
278
+ "created_at": self.created_at.isoformat(),
279
+ "received_at": self.received_at.isoformat() if self.received_at else None,
280
+ "expires_at": self.expires_at.isoformat() if self.expires_at else None,
281
+ "payload": self.payload,
282
+ "name": self.name,
283
+ "payload_schema": self.payload_schema,
284
+ "metadata": self.metadata,
285
+ }
286
+
287
+ @classmethod
288
+ def from_dict(cls, data: dict[str, Any]) -> "Hook":
289
+ """Create from dictionary."""
290
+ return cls(
291
+ hook_id=data["hook_id"],
292
+ run_id=data["run_id"],
293
+ token=data["token"],
294
+ url=data.get("url", ""),
295
+ status=HookStatus(data["status"]),
296
+ created_at=datetime.fromisoformat(data["created_at"]),
297
+ received_at=(
298
+ datetime.fromisoformat(data["received_at"]) if data.get("received_at") else None
299
+ ),
300
+ expires_at=(
301
+ datetime.fromisoformat(data["expires_at"]) if data.get("expires_at") else None
302
+ ),
303
+ payload=data.get("payload"),
304
+ name=data.get("name"),
305
+ payload_schema=data.get("payload_schema"),
306
+ metadata=data.get("metadata", {}),
307
+ )
308
+
309
+
310
+ @dataclass
311
+ class CalendarSpec:
312
+ """
313
+ Specification for calendar-based scheduling.
314
+
315
+ Defines specific times when a schedule should trigger based on
316
+ calendar components (hour, minute, day of week, etc.).
317
+ """
318
+
319
+ second: int = 0
320
+ minute: int = 0
321
+ hour: int = 0
322
+ day_of_month: int | None = None # 1-31
323
+ month: int | None = None # 1-12
324
+ day_of_week: int | None = None # 0=Monday, 6=Sunday (ISO weekday)
325
+
326
+ def to_dict(self) -> dict[str, Any]:
327
+ """Convert to dictionary for serialization."""
328
+ return {
329
+ "second": self.second,
330
+ "minute": self.minute,
331
+ "hour": self.hour,
332
+ "day_of_month": self.day_of_month,
333
+ "month": self.month,
334
+ "day_of_week": self.day_of_week,
335
+ }
336
+
337
+ @classmethod
338
+ def from_dict(cls, data: dict[str, Any]) -> "CalendarSpec":
339
+ """Create from dictionary."""
340
+ return cls(
341
+ second=data.get("second", 0),
342
+ minute=data.get("minute", 0),
343
+ hour=data.get("hour", 0),
344
+ day_of_month=data.get("day_of_month"),
345
+ month=data.get("month"),
346
+ day_of_week=data.get("day_of_week"),
347
+ )
348
+
349
+
350
+ @dataclass
351
+ class ScheduleSpec:
352
+ """
353
+ Specification for when a schedule should trigger.
354
+
355
+ Supports three types of scheduling:
356
+ - cron: Standard cron expression (e.g., "0 9 * * *" for 9 AM daily)
357
+ - interval: Simple interval (e.g., "5m", "1h", "24h")
358
+ - calendar: List of specific calendar times
359
+
360
+ Only one of cron, interval, or calendar should be specified.
361
+ """
362
+
363
+ cron: str | None = None # Cron expression
364
+ interval: str | None = None # Interval string (e.g., "5m", "1h")
365
+ calendar: list[CalendarSpec] | None = None # Calendar-based specs
366
+ timezone: str = "UTC" # Timezone for schedule
367
+ start_at: datetime | None = None # When to start scheduling
368
+ end_at: datetime | None = None # When to stop scheduling
369
+ jitter: str | None = None # Random delay to add (e.g., "30s")
370
+
371
+ def to_dict(self) -> dict[str, Any]:
372
+ """Convert to dictionary for serialization."""
373
+ return {
374
+ "cron": self.cron,
375
+ "interval": self.interval,
376
+ "calendar": [c.to_dict() for c in self.calendar] if self.calendar else None,
377
+ "timezone": self.timezone,
378
+ "start_at": self.start_at.isoformat() if self.start_at else None,
379
+ "end_at": self.end_at.isoformat() if self.end_at else None,
380
+ "jitter": self.jitter,
381
+ }
382
+
383
+ @classmethod
384
+ def from_dict(cls, data: dict[str, Any]) -> "ScheduleSpec":
385
+ """Create from dictionary."""
386
+ calendar = None
387
+ if data.get("calendar"):
388
+ calendar = [CalendarSpec.from_dict(c) for c in data["calendar"]]
389
+ return cls(
390
+ cron=data.get("cron"),
391
+ interval=data.get("interval"),
392
+ calendar=calendar,
393
+ timezone=data.get("timezone", "UTC"),
394
+ start_at=(datetime.fromisoformat(data["start_at"]) if data.get("start_at") else None),
395
+ end_at=(datetime.fromisoformat(data["end_at"]) if data.get("end_at") else None),
396
+ jitter=data.get("jitter"),
397
+ )
398
+
399
+
400
+ @dataclass
401
+ class Schedule:
402
+ """
403
+ Represents a workflow schedule.
404
+
405
+ Schedules define when and how often a workflow should be automatically
406
+ triggered. They support cron expressions, intervals, and calendar-based
407
+ scheduling with configurable overlap policies.
408
+ """
409
+
410
+ schedule_id: str
411
+ workflow_name: str
412
+ spec: ScheduleSpec
413
+ status: ScheduleStatus = ScheduleStatus.ACTIVE
414
+ args: str = "[]" # JSON serialized list
415
+ kwargs: str = "{}" # JSON serialized dict
416
+ overlap_policy: OverlapPolicy = OverlapPolicy.SKIP
417
+
418
+ # Timestamps
419
+ created_at: datetime = field(default_factory=lambda: datetime.now(UTC))
420
+ updated_at: datetime | None = None
421
+ last_run_at: datetime | None = None
422
+ next_run_time: datetime | None = None
423
+
424
+ # Execution tracking
425
+ last_run_id: str | None = None
426
+ running_run_ids: list[str] = field(default_factory=list)
427
+ buffered_count: int = 0
428
+
429
+ # Statistics
430
+ total_runs: int = 0
431
+ successful_runs: int = 0
432
+ failed_runs: int = 0
433
+ skipped_runs: int = 0
434
+
435
+ def to_dict(self) -> dict[str, Any]:
436
+ """Convert to dictionary for serialization."""
437
+ return {
438
+ "schedule_id": self.schedule_id,
439
+ "workflow_name": self.workflow_name,
440
+ "spec": self.spec.to_dict(),
441
+ "status": self.status.value,
442
+ "args": self.args,
443
+ "kwargs": self.kwargs,
444
+ "overlap_policy": self.overlap_policy.value,
445
+ "created_at": self.created_at.isoformat(),
446
+ "updated_at": self.updated_at.isoformat() if self.updated_at else None,
447
+ "last_run_at": self.last_run_at.isoformat() if self.last_run_at else None,
448
+ "next_run_time": self.next_run_time.isoformat() if self.next_run_time else None,
449
+ "last_run_id": self.last_run_id,
450
+ "running_run_ids": self.running_run_ids,
451
+ "buffered_count": self.buffered_count,
452
+ "total_runs": self.total_runs,
453
+ "successful_runs": self.successful_runs,
454
+ "failed_runs": self.failed_runs,
455
+ "skipped_runs": self.skipped_runs,
456
+ }
457
+
458
+ @classmethod
459
+ def from_dict(cls, data: dict[str, Any]) -> "Schedule":
460
+ """Create from dictionary."""
461
+ return cls(
462
+ schedule_id=data["schedule_id"],
463
+ workflow_name=data["workflow_name"],
464
+ spec=ScheduleSpec.from_dict(data["spec"]),
465
+ status=ScheduleStatus(data.get("status", "active")),
466
+ args=data.get("args", "[]"),
467
+ kwargs=data.get("kwargs", "{}"),
468
+ overlap_policy=OverlapPolicy(data.get("overlap_policy", "skip")),
469
+ created_at=datetime.fromisoformat(data["created_at"]),
470
+ updated_at=(
471
+ datetime.fromisoformat(data["updated_at"]) if data.get("updated_at") else None
472
+ ),
473
+ last_run_at=(
474
+ datetime.fromisoformat(data["last_run_at"]) if data.get("last_run_at") else None
475
+ ),
476
+ next_run_time=(
477
+ datetime.fromisoformat(data["next_run_time"]) if data.get("next_run_time") else None
478
+ ),
479
+ last_run_id=data.get("last_run_id"),
480
+ running_run_ids=data.get("running_run_ids", []),
481
+ buffered_count=data.get("buffered_count", 0),
482
+ total_runs=data.get("total_runs", 0),
483
+ successful_runs=data.get("successful_runs", 0),
484
+ failed_runs=data.get("failed_runs", 0),
485
+ skipped_runs=data.get("skipped_runs", 0),
486
+ )