horsies 0.1.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. horsies/__init__.py +115 -0
  2. horsies/core/__init__.py +0 -0
  3. horsies/core/app.py +552 -0
  4. horsies/core/banner.py +144 -0
  5. horsies/core/brokers/__init__.py +5 -0
  6. horsies/core/brokers/listener.py +444 -0
  7. horsies/core/brokers/postgres.py +864 -0
  8. horsies/core/cli.py +624 -0
  9. horsies/core/codec/serde.py +575 -0
  10. horsies/core/errors.py +535 -0
  11. horsies/core/logging.py +90 -0
  12. horsies/core/models/__init__.py +0 -0
  13. horsies/core/models/app.py +268 -0
  14. horsies/core/models/broker.py +79 -0
  15. horsies/core/models/queues.py +23 -0
  16. horsies/core/models/recovery.py +101 -0
  17. horsies/core/models/schedule.py +229 -0
  18. horsies/core/models/task_pg.py +307 -0
  19. horsies/core/models/tasks.py +332 -0
  20. horsies/core/models/workflow.py +1988 -0
  21. horsies/core/models/workflow_pg.py +245 -0
  22. horsies/core/registry/tasks.py +101 -0
  23. horsies/core/scheduler/__init__.py +26 -0
  24. horsies/core/scheduler/calculator.py +267 -0
  25. horsies/core/scheduler/service.py +569 -0
  26. horsies/core/scheduler/state.py +260 -0
  27. horsies/core/task_decorator.py +615 -0
  28. horsies/core/types/status.py +38 -0
  29. horsies/core/utils/imports.py +203 -0
  30. horsies/core/utils/loop_runner.py +44 -0
  31. horsies/core/worker/current.py +17 -0
  32. horsies/core/worker/worker.py +1967 -0
  33. horsies/core/workflows/__init__.py +23 -0
  34. horsies/core/workflows/engine.py +2344 -0
  35. horsies/core/workflows/recovery.py +501 -0
  36. horsies/core/workflows/registry.py +97 -0
  37. horsies/py.typed +0 -0
  38. horsies-0.1.0a1.dist-info/METADATA +31 -0
  39. horsies-0.1.0a1.dist-info/RECORD +42 -0
  40. horsies-0.1.0a1.dist-info/WHEEL +5 -0
  41. horsies-0.1.0a1.dist-info/entry_points.txt +2 -0
  42. horsies-0.1.0a1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,615 @@
1
+ # app/core/task_decorator.py
2
+ from __future__ import annotations
3
+ import asyncio
4
+ from typing import (
5
+ Callable,
6
+ get_origin,
7
+ get_type_hints,
8
+ get_args,
9
+ ParamSpec,
10
+ TypeVar,
11
+ Generic,
12
+ Protocol,
13
+ TYPE_CHECKING,
14
+ Optional,
15
+ Any,
16
+ )
17
+ from abc import abstractmethod
18
+ from datetime import datetime, timedelta, timezone
19
+ from pydantic import TypeAdapter, ValidationError
20
+ from horsies.core.codec.serde import serialize_task_options
21
+
22
+ if TYPE_CHECKING:
23
+ from horsies.core.app import Horsies
24
+ from horsies.core.models.tasks import TaskOptions
25
+ from horsies.core.models.tasks import TaskError, TaskResult
26
+
27
+ from horsies.core.models.tasks import TaskResult, TaskError, LibraryErrorCode
28
+ from horsies.core.models.workflow import WorkflowContextMissingIdError
29
+ from horsies.core.errors import TaskDefinitionError, ErrorCode, SourceLocation
30
+
31
+ P = ParamSpec('P')
32
+ T = TypeVar('T')
33
+ E = TypeVar('E')
34
+
35
+
36
+ def effective_priority(
37
+ app: 'Horsies',
38
+ queue_name: str,
39
+ ) -> int:
40
+ if app.config.queue_mode.name == 'DEFAULT':
41
+ return 100 # default priority, least important
42
+ config = next(q for q in (app.config.custom_queues or []) if q.name == queue_name)
43
+ return config.priority
44
+
45
+
46
+ class TaskHandle(Generic[T]):
47
+ """
48
+ Handle for a submitted task, used to retrieve results.
49
+
50
+ The get() and get_async() methods always return TaskResult[T, TaskError]:
51
+ - On success: TaskResult(ok=value) where value is of type T
52
+ - On task error: TaskResult(err=TaskError) from task execution
53
+ - On retrieval error: TaskResult(err=TaskError) with WAIT_TIMEOUT, TASK_NOT_FOUND, etc.
54
+ - On broker error: TaskResult(err=TaskError) with BROKER_ERROR
55
+
56
+ This unified return type enables consistent error handling without try/except.
57
+ """
58
+
59
+ def __init__(
60
+ self, task_id: str, app: Optional['Horsies'] = None, broker_mode: bool = False
61
+ ):
62
+ self.task_id = task_id
63
+ self._app = app
64
+ self._broker_mode = broker_mode
65
+ self._cached_result: Optional[TaskResult[T, TaskError]] = None
66
+ self._result_fetched = False
67
+
68
+ def _error_result(
69
+ self,
70
+ *,
71
+ error_code: LibraryErrorCode,
72
+ message: str,
73
+ data: dict[str, Any],
74
+ exception: BaseException | None = None,
75
+ ) -> TaskResult[T, TaskError]:
76
+ error_result: TaskResult[T, TaskError] = TaskResult(
77
+ err=TaskError(
78
+ error_code=error_code,
79
+ message=message,
80
+ data=data,
81
+ exception=exception,
82
+ )
83
+ )
84
+ self._cached_result = error_result
85
+ self._result_fetched = True
86
+ return error_result
87
+
88
+ def get(
89
+ self,
90
+ timeout_ms: Optional[int] = None,
91
+ ) -> TaskResult[T, TaskError]:
92
+ """
93
+ Get the task result (blocking).
94
+
95
+ Args:
96
+ timeout_ms: Maximum time to wait for result (milliseconds)
97
+
98
+ Returns:
99
+ TaskResult[T, TaskError] - always returns TaskResult, never raises for task/retrieval errors.
100
+ Check result.is_err() and result.err.error_code for error handling.
101
+ """
102
+ if self._result_fetched:
103
+ match self._cached_result:
104
+ case None:
105
+ return self._error_result(
106
+ error_code=LibraryErrorCode.RESULT_NOT_AVAILABLE,
107
+ message='Result cache is empty after fetch',
108
+ data={'task_id': self.task_id},
109
+ )
110
+ case result:
111
+ return result
112
+
113
+ if self._broker_mode and self._app:
114
+ # Fetch from app's broker - broker now returns TaskResult for all cases
115
+ broker = self._app.get_broker()
116
+ try:
117
+ result = broker.get_result(self.task_id, timeout_ms)
118
+ self._cached_result = result
119
+ self._result_fetched = True
120
+ return result
121
+ except Exception as exc:
122
+ return self._error_result(
123
+ error_code=LibraryErrorCode.BROKER_ERROR,
124
+ message='Broker error while retrieving task result',
125
+ data={'task_id': self.task_id},
126
+ exception=exc,
127
+ )
128
+ else:
129
+ # For synchronous/immediate execution, result should already be set
130
+ match self._cached_result:
131
+ case None:
132
+ return self._error_result(
133
+ error_code=LibraryErrorCode.RESULT_NOT_AVAILABLE,
134
+ message='Result not available - task may not have been executed',
135
+ data={'task_id': self.task_id},
136
+ )
137
+ case result:
138
+ return result
139
+
140
+ async def get_async(
141
+ self,
142
+ timeout_ms: Optional[int] = None,
143
+ ) -> TaskResult[T, TaskError]:
144
+ """
145
+ Get the task result asynchronously.
146
+
147
+ Args:
148
+ timeout_ms: Maximum time to wait for result (milliseconds)
149
+
150
+ Returns:
151
+ TaskResult[T, TaskError] - always returns TaskResult, never raises for task/retrieval errors.
152
+ Check result.is_err() and result.err.error_code for error handling.
153
+ """
154
+ if self._result_fetched:
155
+ match self._cached_result:
156
+ case None:
157
+ return self._error_result(
158
+ error_code=LibraryErrorCode.RESULT_NOT_AVAILABLE,
159
+ message='Result cache is empty after fetch',
160
+ data={'task_id': self.task_id},
161
+ )
162
+ case result:
163
+ return result
164
+
165
+ if self._broker_mode and self._app:
166
+ # Fetch from app's broker - broker now returns TaskResult for all cases
167
+ broker = self._app.get_broker()
168
+ try:
169
+ result = await broker.get_result_async(self.task_id, timeout_ms)
170
+ self._cached_result = result
171
+ self._result_fetched = True
172
+ return result
173
+ except asyncio.CancelledError:
174
+ raise
175
+ except Exception as exc:
176
+ return self._error_result(
177
+ error_code=LibraryErrorCode.BROKER_ERROR,
178
+ message='Broker error while retrieving task result',
179
+ data={'task_id': self.task_id},
180
+ exception=exc,
181
+ )
182
+ else:
183
+ # For synchronous/immediate execution, result should already be set
184
+ match self._cached_result:
185
+ case None:
186
+ return self._error_result(
187
+ error_code=LibraryErrorCode.RESULT_NOT_AVAILABLE,
188
+ message='Result not available - task may not have been executed',
189
+ data={'task_id': self.task_id},
190
+ )
191
+ case result:
192
+ return result
193
+
194
+ def set_immediate_result(
195
+ self,
196
+ result: TaskResult[T, TaskError],
197
+ ) -> None:
198
+ """Internal method to set result for synchronous execution"""
199
+ self._cached_result = result
200
+ self._result_fetched = True
201
+
202
+
203
+ class TaskFunction(Protocol[P, T]):
204
+ """
205
+ A TaskFunction is a function that gets a @task decorator applied to it.
206
+ Protocol extends the simple function signature to include the send and send_async methods,
207
+ thus being able to be called with or without the @task decorator.
208
+
209
+ The generic parameter T represents the success type in TaskResult[T, TaskError].
210
+ """
211
+
212
+ task_name: str
213
+
214
+ @abstractmethod
215
+ def __call__(
216
+ self,
217
+ *args: P.args,
218
+ **kwargs: P.kwargs,
219
+ ) -> TaskResult[T, TaskError]: ...
220
+
221
+ @abstractmethod
222
+ def send(
223
+ self,
224
+ *args: P.args,
225
+ **kwargs: P.kwargs,
226
+ ) -> 'TaskHandle[T]': ...
227
+
228
+ @abstractmethod
229
+ async def send_async(
230
+ self,
231
+ *args: P.args,
232
+ **kwargs: P.kwargs,
233
+ ) -> 'TaskHandle[T]': ...
234
+
235
+ @abstractmethod
236
+ def schedule(
237
+ self,
238
+ delay: int,
239
+ *args: P.args,
240
+ **kwargs: P.kwargs,
241
+ ) -> 'TaskHandle[T]': ...
242
+
243
+
244
+ def create_task_wrapper(
245
+ fn: Callable[P, TaskResult[T, TaskError]],
246
+ app: 'Horsies',
247
+ task_name: str,
248
+ task_options: Optional['TaskOptions'] = None,
249
+ ) -> 'TaskFunction[P, T]':
250
+ """
251
+ Create a task wrapper for a specific app instance.
252
+ Called by app.task() decorator.
253
+ """
254
+
255
+ hints = get_type_hints(fn)
256
+
257
+ # Validate that return type is TaskResult[*, TaskError]
258
+ fn_location = SourceLocation.from_function(fn)
259
+ return_hint = hints.get('return')
260
+ if return_hint is None:
261
+ raise TaskDefinitionError(
262
+ message='task function must declare an explicit return type',
263
+ code=ErrorCode.TASK_NO_RETURN_TYPE,
264
+ location=fn_location,
265
+ notes=[f"function '{fn.__name__}' has no return type annotation"],
266
+ help_text='add return type annotation: `-> TaskResult[YourType, TaskError]`',
267
+ )
268
+
269
+ if get_origin(return_hint) is not TaskResult:
270
+ raise TaskDefinitionError(
271
+ message='task function must return TaskResult',
272
+ code=ErrorCode.TASK_INVALID_RETURN_TYPE,
273
+ location=fn_location,
274
+ notes=[
275
+ f"function '{fn.__name__}' returns `{return_hint}`",
276
+ 'tasks must return TaskResult[T, TaskError]',
277
+ ],
278
+ help_text='change return type to `-> TaskResult[YourType, TaskError]`',
279
+ )
280
+
281
+ # Extract T and E from TaskResult[T, E] for runtime validation
282
+ type_args = get_args(return_hint)
283
+ if len(type_args) != 2:
284
+ raise TaskDefinitionError(
285
+ message='TaskResult must have exactly 2 type parameters',
286
+ code=ErrorCode.TASK_INVALID_RETURN_TYPE,
287
+ location=fn_location,
288
+ notes=[
289
+ f"function '{fn.__name__}' returns `{return_hint}`",
290
+ 'expected TaskResult[T, E] with exactly 2 type parameters',
291
+ ],
292
+ help_text='use `-> TaskResult[YourType, TaskError]`',
293
+ )
294
+
295
+ ok_type, err_type = type_args
296
+ ok_type_adapter: TypeAdapter[Any] = TypeAdapter(ok_type)
297
+ err_type_adapter: TypeAdapter[Any] = TypeAdapter(err_type)
298
+
299
+ def _immediate_error_handle(
300
+ exception: BaseException,
301
+ message: str,
302
+ ) -> TaskHandle[T]:
303
+ """Create a handle that already contains an error TaskResult."""
304
+ handle: TaskHandle[T] = TaskHandle('<error>', app, broker_mode=False)
305
+ handle.set_immediate_result(
306
+ TaskResult(
307
+ err=TaskError(
308
+ exception=exception,
309
+ error_code=LibraryErrorCode.UNHANDLED_EXCEPTION,
310
+ message=message,
311
+ data={
312
+ 'task_name': task_name,
313
+ 'exception_type': type(exception).__name__,
314
+ },
315
+ )
316
+ )
317
+ )
318
+ return handle
319
+
320
+ # Create a wrapper function that preserves the exact signature
321
+ def wrapped_function(
322
+ *args: P.args,
323
+ **kwargs: P.kwargs,
324
+ ) -> TaskResult[T, TaskError]:
325
+ try:
326
+ result = fn(*args, **kwargs)
327
+
328
+ # Runtime type validation: validate result against declared types
329
+ try:
330
+ if result.is_ok():
331
+ # Validate ok value against T
332
+ ok_type_adapter.validate_python(result.ok)
333
+ else:
334
+ # Validate err value against E
335
+ err_type_adapter.validate_python(result.err)
336
+ except ValidationError as ve:
337
+ # Type validation failed - return error result
338
+ variant = 'ok' if result.is_ok() else 'err'
339
+ expected_type = ok_type if result.is_ok() else err_type
340
+ actual_value = result.ok if result.is_ok() else result.err
341
+ return TaskResult(
342
+ err=TaskError(
343
+ exception=ve,
344
+ error_code=LibraryErrorCode.RETURN_TYPE_MISMATCH,
345
+ message=f'Task {fn.__name__} returned TaskResult({variant}={actual_value!r}) but expected type {expected_type}',
346
+ data={
347
+ 'variant': variant,
348
+ 'expected_type': str(expected_type),
349
+ 'actual_value': str(actual_value),
350
+ 'validation_errors': ve.errors(),
351
+ },
352
+ )
353
+ )
354
+
355
+ return result
356
+
357
+ except KeyboardInterrupt:
358
+ # Allow KeyboardInterrupt to propagate for graceful worker shutdown
359
+ raise
360
+ except WorkflowContextMissingIdError as e:
361
+ error_result: TaskResult[T, TaskError] = TaskResult(
362
+ err=TaskError(
363
+ exception=e,
364
+ error_code=LibraryErrorCode.WORKFLOW_CTX_MISSING_ID,
365
+ message=str(e),
366
+ data={'task_name': task_name},
367
+ )
368
+ )
369
+ return error_result
370
+ except BaseException as e:
371
+ # Catch SystemExit, GeneratorExit, Exception, etc.
372
+ error_result: TaskResult[T, TaskError] = TaskResult(
373
+ err=TaskError(
374
+ exception=e,
375
+ error_code=LibraryErrorCode.UNHANDLED_EXCEPTION,
376
+ message=f'Unhandled exception in task {fn.__name__}: {type(e).__name__}: {str(e)}',
377
+ data={'exception_type': type(e).__name__},
378
+ )
379
+ )
380
+ return error_result
381
+
382
+ def send(
383
+ *args: P.args,
384
+ **kwargs: P.kwargs,
385
+ ) -> TaskHandle[T]:
386
+ """Execute task asynchronously via app's broker."""
387
+ # Prevent import side-effects: if the worker is importing modules for
388
+ # discovery, suppress enqueuing and return an immediate error result.
389
+ if hasattr(app, 'are_sends_suppressed') and app.are_sends_suppressed():
390
+ try:
391
+ app.logger.warning(
392
+ 'Send suppressed for %s during module import/discovery; no task enqueued',
393
+ task_name,
394
+ )
395
+ except Exception:
396
+ pass
397
+ suppressed_handle: TaskHandle[T] = TaskHandle('<suppressed>')
398
+ suppressed_handle.set_immediate_result(
399
+ TaskResult(
400
+ err=TaskError(
401
+ error_code=LibraryErrorCode.SEND_SUPPRESSED,
402
+ message='Task send suppressed during module import/discovery',
403
+ data={'task_name': task_name},
404
+ )
405
+ )
406
+ )
407
+ return suppressed_handle
408
+ # VALIDATION AT EXECUTION TIME
409
+ # Re-validate queue_name to catch any configuration changes
410
+ queue_name = task_options.queue_name if task_options else None
411
+ try:
412
+ validated_queue_name = app.validate_queue_name(queue_name)
413
+ priority = effective_priority(app, validated_queue_name)
414
+ except BaseException as e:
415
+ return _immediate_error_handle(
416
+ e,
417
+ f'Task execution error for {fn.__name__}: {e}',
418
+ )
419
+
420
+ try:
421
+ broker = app.get_broker()
422
+ good_until = task_options.good_until if task_options else None
423
+
424
+ task_options_json = None
425
+ if task_options:
426
+ task_options_json = serialize_task_options(task_options)
427
+
428
+ task_id = broker.enqueue(
429
+ task_name,
430
+ args,
431
+ kwargs,
432
+ validated_queue_name,
433
+ priority=priority,
434
+ good_until=good_until,
435
+ task_options=task_options_json,
436
+ )
437
+ return TaskHandle(task_id, app, broker_mode=True)
438
+ except BaseException as e:
439
+ return _immediate_error_handle(
440
+ e,
441
+ f'Failed to enqueue task {fn.__name__}: {e}',
442
+ )
443
+
444
+ async def send_async(
445
+ *args: P.args,
446
+ **kwargs: P.kwargs,
447
+ ) -> TaskHandle[T]:
448
+ """Async variant for frameworks like FastAPI."""
449
+ if hasattr(app, 'are_sends_suppressed') and app.are_sends_suppressed():
450
+ try:
451
+ app.logger.warning(
452
+ 'Send (async) suppressed for %s during module import/discovery; no task enqueued',
453
+ task_name,
454
+ )
455
+ except Exception:
456
+ pass
457
+ suppressed_handle: TaskHandle[T] = TaskHandle('<suppressed>')
458
+ suppressed_handle.set_immediate_result(
459
+ TaskResult(
460
+ err=TaskError(
461
+ error_code=LibraryErrorCode.SEND_SUPPRESSED,
462
+ message='Task send suppressed during module import/discovery',
463
+ data={'task_name': task_name},
464
+ )
465
+ )
466
+ )
467
+ return suppressed_handle
468
+ queue_name = task_options.queue_name if task_options else None
469
+ try:
470
+ validated = app.validate_queue_name(queue_name)
471
+ priority = effective_priority(app, validated)
472
+ except BaseException as e:
473
+ return _immediate_error_handle(
474
+ e,
475
+ f'Task execution error for {fn.__name__}: {e}',
476
+ )
477
+ try:
478
+ broker = app.get_broker()
479
+ good_until = task_options.good_until if task_options else None
480
+
481
+ task_options_json = None
482
+ if task_options:
483
+ task_options_json = serialize_task_options(task_options)
484
+
485
+ task_id = await broker.enqueue_async(
486
+ task_name,
487
+ args,
488
+ kwargs,
489
+ validated,
490
+ priority=priority,
491
+ good_until=good_until,
492
+ task_options=task_options_json,
493
+ )
494
+ return TaskHandle(task_id, app, broker_mode=True)
495
+ except BaseException as e:
496
+ return _immediate_error_handle(
497
+ e,
498
+ f'Failed to enqueue task {fn.__name__}: {e}',
499
+ )
500
+
501
+ def schedule(
502
+ delay: int,
503
+ *args: P.args,
504
+ **kwargs: P.kwargs,
505
+ ) -> TaskHandle[T]:
506
+ """Execute task asynchronously after a delay."""
507
+ if hasattr(app, 'are_sends_suppressed') and app.are_sends_suppressed():
508
+ try:
509
+ app.logger.warning(
510
+ 'Schedule suppressed for %s during module import/discovery; no task enqueued',
511
+ task_name,
512
+ )
513
+ except Exception:
514
+ pass
515
+ suppressed_handle: TaskHandle[T] = TaskHandle('<suppressed>')
516
+ suppressed_handle.set_immediate_result(
517
+ TaskResult(
518
+ err=TaskError(
519
+ error_code=LibraryErrorCode.SEND_SUPPRESSED,
520
+ message='Task schedule suppressed during module import/discovery',
521
+ data={'task_name': task_name},
522
+ )
523
+ )
524
+ )
525
+ return suppressed_handle
526
+ # VALIDATION AT EXECUTION TIME
527
+ # Re-validate queue_name to catch any configuration changes
528
+ queue_name = task_options.queue_name if task_options else None
529
+ try:
530
+ validated_queue_name = app.validate_queue_name(queue_name)
531
+ priority = effective_priority(app, validated_queue_name)
532
+ except BaseException as e:
533
+ return _immediate_error_handle(
534
+ e,
535
+ f'Task execution error for {fn.__name__}: {e}',
536
+ )
537
+
538
+ try:
539
+ broker = app.get_broker()
540
+ good_until = task_options.good_until if task_options else None
541
+ sent_at = datetime.now(timezone.utc) + timedelta(seconds=delay)
542
+
543
+ task_options_json = None
544
+ if task_options:
545
+ task_options_json = serialize_task_options(task_options)
546
+
547
+ task_id = broker.enqueue(
548
+ task_name,
549
+ args,
550
+ kwargs,
551
+ validated_queue_name,
552
+ priority=priority,
553
+ good_until=good_until,
554
+ sent_at=sent_at,
555
+ task_options=task_options_json,
556
+ )
557
+ return TaskHandle(task_id, app, broker_mode=True)
558
+ except BaseException as e:
559
+ return _immediate_error_handle(
560
+ e,
561
+ f'Failed to schedule task {fn.__name__}: {e}',
562
+ )
563
+
564
+ class TaskFunctionImpl:
565
+ def __init__(self) -> None:
566
+ self.__name__ = fn.__name__
567
+ self.__doc__ = fn.__doc__
568
+ self.__annotations__ = fn.__annotations__
569
+ self.task_name = task_name
570
+ # Persist the declared queue (validated at definition time) so other components
571
+ # (e.g., scheduler) can infer a task's home queue in CUSTOM mode.
572
+ self.task_queue_name = task_options.queue_name if task_options else None
573
+ # Keep a reference to the original function for introspection (signature checks).
574
+ self._original_fn = fn
575
+ # Pre-serialize task_options so workflow engine can access retry config
576
+ self.task_options_json: str | None = (
577
+ serialize_task_options(task_options) if task_options else None
578
+ )
579
+
580
+ def __call__(
581
+ self,
582
+ *args: P.args,
583
+ **kwargs: P.kwargs,
584
+ ) -> TaskResult[T, TaskError]:
585
+ return wrapped_function(*args, **kwargs)
586
+
587
+ def send(
588
+ self,
589
+ *args: P.args,
590
+ **kwargs: P.kwargs,
591
+ ) -> TaskHandle[T]:
592
+ return send(*args, **kwargs)
593
+
594
+ async def send_async(
595
+ self,
596
+ *args: P.args,
597
+ **kwargs: P.kwargs,
598
+ ) -> TaskHandle[T]:
599
+ return await send_async(*args, **kwargs)
600
+
601
+ def schedule(
602
+ self,
603
+ delay: int,
604
+ *args: P.args,
605
+ **kwargs: P.kwargs,
606
+ ) -> TaskHandle[T]:
607
+ return schedule(delay, *args, **kwargs)
608
+
609
+ # Copy metadata
610
+ def __getattr__(self, name: str) -> Any:
611
+ return getattr(wrapped_function, name)
612
+
613
+ task_func = TaskFunctionImpl()
614
+
615
+ return task_func
@@ -0,0 +1,38 @@
1
+ # core/types/status.py
2
+ """
3
+ Core types and enums used throughout the application.
4
+ This module should not import from other application modules.
5
+ """
6
+
7
+ from enum import Enum
8
+
9
+
10
+ class TaskStatus(Enum):
11
+ """Task execution status"""
12
+
13
+ PENDING = 'pending' # It awaits to be a candidate for execution.
14
+ # Default status when the task is sent.
15
+
16
+ CLAIMED = (
17
+ 'claimed' # It has been claimed by a worker but not yet started executing.
18
+ )
19
+
20
+ RUNNING = 'running' # It is being executed by a process.
21
+
22
+ COMPLETED = 'completed' # It has been executed successfully.
23
+
24
+ FAILED = 'failed' # It has failed to be executed.
25
+ CANCELLED = 'cancelled' # It has been cancelled.
26
+ REQUEUED = 'requeued' # It has been requeued after a failure.
27
+
28
+ @property
29
+ def is_terminal(self) -> bool:
30
+ """Whether this status represents a final state (no further transitions)."""
31
+ return self in TASK_TERMINAL_STATES
32
+
33
+
34
+ TASK_TERMINAL_STATES: frozenset[TaskStatus] = frozenset({
35
+ TaskStatus.COMPLETED,
36
+ TaskStatus.FAILED,
37
+ TaskStatus.CANCELLED,
38
+ })