horsies 0.1.0a4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. horsies/__init__.py +117 -0
  2. horsies/core/__init__.py +0 -0
  3. horsies/core/app.py +552 -0
  4. horsies/core/banner.py +144 -0
  5. horsies/core/brokers/__init__.py +5 -0
  6. horsies/core/brokers/listener.py +444 -0
  7. horsies/core/brokers/postgres.py +993 -0
  8. horsies/core/cli.py +624 -0
  9. horsies/core/codec/serde.py +596 -0
  10. horsies/core/errors.py +535 -0
  11. horsies/core/logging.py +90 -0
  12. horsies/core/models/__init__.py +0 -0
  13. horsies/core/models/app.py +268 -0
  14. horsies/core/models/broker.py +79 -0
  15. horsies/core/models/queues.py +23 -0
  16. horsies/core/models/recovery.py +101 -0
  17. horsies/core/models/schedule.py +229 -0
  18. horsies/core/models/task_pg.py +307 -0
  19. horsies/core/models/tasks.py +358 -0
  20. horsies/core/models/workflow.py +1990 -0
  21. horsies/core/models/workflow_pg.py +245 -0
  22. horsies/core/registry/tasks.py +101 -0
  23. horsies/core/scheduler/__init__.py +26 -0
  24. horsies/core/scheduler/calculator.py +267 -0
  25. horsies/core/scheduler/service.py +569 -0
  26. horsies/core/scheduler/state.py +260 -0
  27. horsies/core/task_decorator.py +656 -0
  28. horsies/core/types/status.py +38 -0
  29. horsies/core/utils/imports.py +203 -0
  30. horsies/core/utils/loop_runner.py +44 -0
  31. horsies/core/worker/current.py +17 -0
  32. horsies/core/worker/worker.py +1967 -0
  33. horsies/core/workflows/__init__.py +23 -0
  34. horsies/core/workflows/engine.py +2344 -0
  35. horsies/core/workflows/recovery.py +501 -0
  36. horsies/core/workflows/registry.py +97 -0
  37. horsies/py.typed +0 -0
  38. horsies-0.1.0a4.dist-info/METADATA +35 -0
  39. horsies-0.1.0a4.dist-info/RECORD +42 -0
  40. horsies-0.1.0a4.dist-info/WHEEL +5 -0
  41. horsies-0.1.0a4.dist-info/entry_points.txt +2 -0
  42. horsies-0.1.0a4.dist-info/top_level.txt +1 -0
@@ -0,0 +1,656 @@
1
+ # app/core/task_decorator.py
2
+ from __future__ import annotations
3
+ import asyncio
4
+ from typing import (
5
+ Callable,
6
+ get_origin,
7
+ get_type_hints,
8
+ get_args,
9
+ ParamSpec,
10
+ TypeVar,
11
+ Generic,
12
+ Protocol,
13
+ TYPE_CHECKING,
14
+ Optional,
15
+ Any,
16
+ )
17
+ from abc import abstractmethod
18
+ from datetime import datetime, timedelta, timezone
19
+ from pydantic import TypeAdapter, ValidationError
20
+ from horsies.core.codec.serde import serialize_task_options
21
+
22
+ if TYPE_CHECKING:
23
+ from horsies.core.app import Horsies
24
+ from horsies.core.models.tasks import TaskOptions
25
+ from horsies.core.models.tasks import TaskError, TaskResult
26
+ from horsies.core.models.tasks import TaskInfo
27
+
28
+ from horsies.core.models.tasks import TaskResult, TaskError, LibraryErrorCode
29
+ from horsies.core.models.workflow import WorkflowContextMissingIdError
30
+ from horsies.core.errors import TaskDefinitionError, ErrorCode, SourceLocation
31
+
32
+ P = ParamSpec('P')
33
+ T = TypeVar('T')
34
+ E = TypeVar('E')
35
+
36
+
37
+ def effective_priority(
38
+ app: 'Horsies',
39
+ queue_name: str,
40
+ ) -> int:
41
+ if app.config.queue_mode.name == 'DEFAULT':
42
+ return 100 # default priority, least important
43
+ config = next(q for q in (app.config.custom_queues or []) if q.name == queue_name)
44
+ return config.priority
45
+
46
+
47
+ class TaskHandle(Generic[T]):
48
+ """
49
+ Handle for a submitted task, used to retrieve results.
50
+
51
+ The get() and get_async() methods always return TaskResult[T, TaskError]:
52
+ - On success: TaskResult(ok=value) where value is of type T
53
+ - On task error: TaskResult(err=TaskError) from task execution
54
+ - On retrieval error: TaskResult(err=TaskError) with WAIT_TIMEOUT, TASK_NOT_FOUND, etc.
55
+ - On broker error: TaskResult(err=TaskError) with BROKER_ERROR
56
+
57
+ This unified return type enables consistent error handling without try/except.
58
+ """
59
+
60
+ def __init__(
61
+ self, task_id: str, app: Optional['Horsies'] = None, broker_mode: bool = False
62
+ ):
63
+ self.task_id = task_id
64
+ self._app = app
65
+ self._broker_mode = broker_mode
66
+ self._cached_result: Optional[TaskResult[T, TaskError]] = None
67
+ self._result_fetched = False
68
+
69
+ def _error_result(
70
+ self,
71
+ *,
72
+ error_code: LibraryErrorCode,
73
+ message: str,
74
+ data: dict[str, Any],
75
+ exception: BaseException | None = None,
76
+ ) -> TaskResult[T, TaskError]:
77
+ error_result: TaskResult[T, TaskError] = TaskResult(
78
+ err=TaskError(
79
+ error_code=error_code,
80
+ message=message,
81
+ data=data,
82
+ exception=exception,
83
+ )
84
+ )
85
+ self._cached_result = error_result
86
+ self._result_fetched = True
87
+ return error_result
88
+
89
+ def get(
90
+ self,
91
+ timeout_ms: Optional[int] = None,
92
+ ) -> TaskResult[T, TaskError]:
93
+ """
94
+ Get the task result (blocking).
95
+
96
+ Args:
97
+ timeout_ms: Maximum time to wait for result (milliseconds)
98
+
99
+ Returns:
100
+ TaskResult[T, TaskError] - always returns TaskResult, never raises for task/retrieval errors.
101
+ Check result.is_err() and result.err.error_code for error handling.
102
+ """
103
+ if self._result_fetched:
104
+ match self._cached_result:
105
+ case None:
106
+ return self._error_result(
107
+ error_code=LibraryErrorCode.RESULT_NOT_AVAILABLE,
108
+ message='Result cache is empty after fetch',
109
+ data={'task_id': self.task_id},
110
+ )
111
+ case result:
112
+ return result
113
+
114
+ if self._broker_mode and self._app:
115
+ # Fetch from app's broker - broker now returns TaskResult for all cases
116
+ broker = self._app.get_broker()
117
+ try:
118
+ result = broker.get_result(self.task_id, timeout_ms)
119
+ self._cached_result = result
120
+ self._result_fetched = True
121
+ return result
122
+ except Exception as exc:
123
+ return self._error_result(
124
+ error_code=LibraryErrorCode.BROKER_ERROR,
125
+ message='Broker error while retrieving task result',
126
+ data={'task_id': self.task_id},
127
+ exception=exc,
128
+ )
129
+ else:
130
+ # For synchronous/immediate execution, result should already be set
131
+ match self._cached_result:
132
+ case None:
133
+ return self._error_result(
134
+ error_code=LibraryErrorCode.RESULT_NOT_AVAILABLE,
135
+ message='Result not available - task may not have been executed',
136
+ data={'task_id': self.task_id},
137
+ )
138
+ case result:
139
+ return result
140
+
141
+ async def get_async(
142
+ self,
143
+ timeout_ms: Optional[int] = None,
144
+ ) -> TaskResult[T, TaskError]:
145
+ """
146
+ Get the task result asynchronously.
147
+
148
+ Args:
149
+ timeout_ms: Maximum time to wait for result (milliseconds)
150
+
151
+ Returns:
152
+ TaskResult[T, TaskError] - always returns TaskResult, never raises for task/retrieval errors.
153
+ Check result.is_err() and result.err.error_code for error handling.
154
+ """
155
+ if self._result_fetched:
156
+ match self._cached_result:
157
+ case None:
158
+ return self._error_result(
159
+ error_code=LibraryErrorCode.RESULT_NOT_AVAILABLE,
160
+ message='Result cache is empty after fetch',
161
+ data={'task_id': self.task_id},
162
+ )
163
+ case result:
164
+ return result
165
+
166
+ if self._broker_mode and self._app:
167
+ # Fetch from app's broker - broker now returns TaskResult for all cases
168
+ broker = self._app.get_broker()
169
+ try:
170
+ result = await broker.get_result_async(self.task_id, timeout_ms)
171
+ self._cached_result = result
172
+ self._result_fetched = True
173
+ return result
174
+ except asyncio.CancelledError:
175
+ raise
176
+ except Exception as exc:
177
+ return self._error_result(
178
+ error_code=LibraryErrorCode.BROKER_ERROR,
179
+ message='Broker error while retrieving task result',
180
+ data={'task_id': self.task_id},
181
+ exception=exc,
182
+ )
183
+ else:
184
+ # For synchronous/immediate execution, result should already be set
185
+ match self._cached_result:
186
+ case None:
187
+ return self._error_result(
188
+ error_code=LibraryErrorCode.RESULT_NOT_AVAILABLE,
189
+ message='Result not available - task may not have been executed',
190
+ data={'task_id': self.task_id},
191
+ )
192
+ case result:
193
+ return result
194
+
195
+ def info(
196
+ self,
197
+ *,
198
+ include_result: bool = False,
199
+ include_failed_reason: bool = False,
200
+ ) -> 'TaskInfo | None':
201
+ """Fetch metadata for this task from the broker."""
202
+ if not self._broker_mode or not self._app:
203
+ raise RuntimeError(
204
+ 'TaskHandle.info() requires a broker-backed task handle '
205
+ '(use .send() or .send_async())'
206
+ )
207
+
208
+ broker = self._app.get_broker()
209
+ return broker.get_task_info(
210
+ self.task_id,
211
+ include_result=include_result,
212
+ include_failed_reason=include_failed_reason,
213
+ )
214
+
215
+ async def info_async(
216
+ self,
217
+ *,
218
+ include_result: bool = False,
219
+ include_failed_reason: bool = False,
220
+ ) -> 'TaskInfo | None':
221
+ """Async version of info()."""
222
+ if not self._broker_mode or not self._app:
223
+ raise RuntimeError(
224
+ 'TaskHandle.info_async() requires a broker-backed task handle '
225
+ '(use .send() or .send_async())'
226
+ )
227
+
228
+ broker = self._app.get_broker()
229
+ return await broker.get_task_info_async(
230
+ self.task_id,
231
+ include_result=include_result,
232
+ include_failed_reason=include_failed_reason,
233
+ )
234
+
235
+ def set_immediate_result(
236
+ self,
237
+ result: TaskResult[T, TaskError],
238
+ ) -> None:
239
+ """Internal method to set result for synchronous execution"""
240
+ self._cached_result = result
241
+ self._result_fetched = True
242
+
243
+
244
+ class TaskFunction(Protocol[P, T]):
245
+ """
246
+ A TaskFunction is a function that gets a @task decorator applied to it.
247
+ Protocol extends the simple function signature to include the send and send_async methods,
248
+ thus being able to be called with or without the @task decorator.
249
+
250
+ The generic parameter T represents the success type in TaskResult[T, TaskError].
251
+ """
252
+
253
+ task_name: str
254
+
255
+ @abstractmethod
256
+ def __call__(
257
+ self,
258
+ *args: P.args,
259
+ **kwargs: P.kwargs,
260
+ ) -> TaskResult[T, TaskError]: ...
261
+
262
+ @abstractmethod
263
+ def send(
264
+ self,
265
+ *args: P.args,
266
+ **kwargs: P.kwargs,
267
+ ) -> 'TaskHandle[T]': ...
268
+
269
+ @abstractmethod
270
+ async def send_async(
271
+ self,
272
+ *args: P.args,
273
+ **kwargs: P.kwargs,
274
+ ) -> 'TaskHandle[T]': ...
275
+
276
+ @abstractmethod
277
+ def schedule(
278
+ self,
279
+ delay: int,
280
+ *args: P.args,
281
+ **kwargs: P.kwargs,
282
+ ) -> 'TaskHandle[T]': ...
283
+
284
+
285
+ def create_task_wrapper(
286
+ fn: Callable[P, TaskResult[T, TaskError]],
287
+ app: 'Horsies',
288
+ task_name: str,
289
+ task_options: Optional['TaskOptions'] = None,
290
+ ) -> 'TaskFunction[P, T]':
291
+ """
292
+ Create a task wrapper for a specific app instance.
293
+ Called by app.task() decorator.
294
+ """
295
+
296
+ hints = get_type_hints(fn)
297
+
298
+ # Validate that return type is TaskResult[*, TaskError]
299
+ fn_location = SourceLocation.from_function(fn)
300
+ return_hint = hints.get('return')
301
+ if return_hint is None:
302
+ raise TaskDefinitionError(
303
+ message='task function must declare an explicit return type',
304
+ code=ErrorCode.TASK_NO_RETURN_TYPE,
305
+ location=fn_location,
306
+ notes=[f"function '{fn.__name__}' has no return type annotation"],
307
+ help_text='add return type annotation: `-> TaskResult[YourType, TaskError]`',
308
+ )
309
+
310
+ if get_origin(return_hint) is not TaskResult:
311
+ raise TaskDefinitionError(
312
+ message='task function must return TaskResult',
313
+ code=ErrorCode.TASK_INVALID_RETURN_TYPE,
314
+ location=fn_location,
315
+ notes=[
316
+ f"function '{fn.__name__}' returns `{return_hint}`",
317
+ 'tasks must return TaskResult[T, TaskError]',
318
+ ],
319
+ help_text='change return type to `-> TaskResult[YourType, TaskError]`',
320
+ )
321
+
322
+ # Extract T and E from TaskResult[T, E] for runtime validation
323
+ type_args = get_args(return_hint)
324
+ if len(type_args) != 2:
325
+ raise TaskDefinitionError(
326
+ message='TaskResult must have exactly 2 type parameters',
327
+ code=ErrorCode.TASK_INVALID_RETURN_TYPE,
328
+ location=fn_location,
329
+ notes=[
330
+ f"function '{fn.__name__}' returns `{return_hint}`",
331
+ 'expected TaskResult[T, E] with exactly 2 type parameters',
332
+ ],
333
+ help_text='use `-> TaskResult[YourType, TaskError]`',
334
+ )
335
+
336
+ ok_type, err_type = type_args
337
+ ok_type_adapter: TypeAdapter[Any] = TypeAdapter(ok_type)
338
+ err_type_adapter: TypeAdapter[Any] = TypeAdapter(err_type)
339
+
340
+ def _immediate_error_handle(
341
+ exception: BaseException,
342
+ message: str,
343
+ ) -> TaskHandle[T]:
344
+ """Create a handle that already contains an error TaskResult."""
345
+ handle: TaskHandle[T] = TaskHandle('<error>', app, broker_mode=False)
346
+ handle.set_immediate_result(
347
+ TaskResult(
348
+ err=TaskError(
349
+ exception=exception,
350
+ error_code=LibraryErrorCode.UNHANDLED_EXCEPTION,
351
+ message=message,
352
+ data={
353
+ 'task_name': task_name,
354
+ 'exception_type': type(exception).__name__,
355
+ },
356
+ )
357
+ )
358
+ )
359
+ return handle
360
+
361
+ # Create a wrapper function that preserves the exact signature
362
+ def wrapped_function(
363
+ *args: P.args,
364
+ **kwargs: P.kwargs,
365
+ ) -> TaskResult[T, TaskError]:
366
+ try:
367
+ result = fn(*args, **kwargs)
368
+
369
+ # Runtime type validation: validate result against declared types
370
+ try:
371
+ if result.is_ok():
372
+ # Validate ok value against T
373
+ ok_type_adapter.validate_python(result.ok)
374
+ else:
375
+ # Validate err value against E
376
+ err_type_adapter.validate_python(result.err)
377
+ except ValidationError as ve:
378
+ # Type validation failed - return error result
379
+ variant = 'ok' if result.is_ok() else 'err'
380
+ expected_type = ok_type if result.is_ok() else err_type
381
+ actual_value = result.ok if result.is_ok() else result.err
382
+ return TaskResult(
383
+ err=TaskError(
384
+ exception=ve,
385
+ error_code=LibraryErrorCode.RETURN_TYPE_MISMATCH,
386
+ message=f'Task {fn.__name__} returned TaskResult({variant}={actual_value!r}) but expected type {expected_type}',
387
+ data={
388
+ 'variant': variant,
389
+ 'expected_type': str(expected_type),
390
+ 'actual_value': str(actual_value),
391
+ 'validation_errors': ve.errors(),
392
+ },
393
+ )
394
+ )
395
+
396
+ return result
397
+
398
+ except KeyboardInterrupt:
399
+ # Allow KeyboardInterrupt to propagate for graceful worker shutdown
400
+ raise
401
+ except WorkflowContextMissingIdError as e:
402
+ error_result: TaskResult[T, TaskError] = TaskResult(
403
+ err=TaskError(
404
+ exception=e,
405
+ error_code=LibraryErrorCode.WORKFLOW_CTX_MISSING_ID,
406
+ message=str(e),
407
+ data={'task_name': task_name},
408
+ )
409
+ )
410
+ return error_result
411
+ except BaseException as e:
412
+ # Catch SystemExit, GeneratorExit, Exception, etc.
413
+ error_result: TaskResult[T, TaskError] = TaskResult(
414
+ err=TaskError(
415
+ exception=e,
416
+ error_code=LibraryErrorCode.UNHANDLED_EXCEPTION,
417
+ message=f'Unhandled exception in task {fn.__name__}: {type(e).__name__}: {str(e)}',
418
+ data={'exception_type': type(e).__name__},
419
+ )
420
+ )
421
+ return error_result
422
+
423
+ def send(
424
+ *args: P.args,
425
+ **kwargs: P.kwargs,
426
+ ) -> TaskHandle[T]:
427
+ """Execute task asynchronously via app's broker."""
428
+ # Prevent import side-effects: if the worker is importing modules for
429
+ # discovery, suppress enqueuing and return an immediate error result.
430
+ if hasattr(app, 'are_sends_suppressed') and app.are_sends_suppressed():
431
+ try:
432
+ app.logger.warning(
433
+ 'Send suppressed for %s during module import/discovery; no task enqueued',
434
+ task_name,
435
+ )
436
+ except Exception:
437
+ pass
438
+ suppressed_handle: TaskHandle[T] = TaskHandle('<suppressed>')
439
+ suppressed_handle.set_immediate_result(
440
+ TaskResult(
441
+ err=TaskError(
442
+ error_code=LibraryErrorCode.SEND_SUPPRESSED,
443
+ message='Task send suppressed during module import/discovery',
444
+ data={'task_name': task_name},
445
+ )
446
+ )
447
+ )
448
+ return suppressed_handle
449
+ # VALIDATION AT EXECUTION TIME
450
+ # Re-validate queue_name to catch any configuration changes
451
+ queue_name = task_options.queue_name if task_options else None
452
+ try:
453
+ validated_queue_name = app.validate_queue_name(queue_name)
454
+ priority = effective_priority(app, validated_queue_name)
455
+ except BaseException as e:
456
+ return _immediate_error_handle(
457
+ e,
458
+ f'Task execution error for {fn.__name__}: {e}',
459
+ )
460
+
461
+ try:
462
+ broker = app.get_broker()
463
+ good_until = task_options.good_until if task_options else None
464
+
465
+ task_options_json = None
466
+ if task_options:
467
+ task_options_json = serialize_task_options(task_options)
468
+
469
+ task_id = broker.enqueue(
470
+ task_name,
471
+ args,
472
+ kwargs,
473
+ validated_queue_name,
474
+ priority=priority,
475
+ good_until=good_until,
476
+ task_options=task_options_json,
477
+ )
478
+ return TaskHandle(task_id, app, broker_mode=True)
479
+ except BaseException as e:
480
+ return _immediate_error_handle(
481
+ e,
482
+ f'Failed to enqueue task {fn.__name__}: {e}',
483
+ )
484
+
485
+ async def send_async(
486
+ *args: P.args,
487
+ **kwargs: P.kwargs,
488
+ ) -> TaskHandle[T]:
489
+ """Async variant for frameworks like FastAPI."""
490
+ if hasattr(app, 'are_sends_suppressed') and app.are_sends_suppressed():
491
+ try:
492
+ app.logger.warning(
493
+ 'Send (async) suppressed for %s during module import/discovery; no task enqueued',
494
+ task_name,
495
+ )
496
+ except Exception:
497
+ pass
498
+ suppressed_handle: TaskHandle[T] = TaskHandle('<suppressed>')
499
+ suppressed_handle.set_immediate_result(
500
+ TaskResult(
501
+ err=TaskError(
502
+ error_code=LibraryErrorCode.SEND_SUPPRESSED,
503
+ message='Task send suppressed during module import/discovery',
504
+ data={'task_name': task_name},
505
+ )
506
+ )
507
+ )
508
+ return suppressed_handle
509
+ queue_name = task_options.queue_name if task_options else None
510
+ try:
511
+ validated = app.validate_queue_name(queue_name)
512
+ priority = effective_priority(app, validated)
513
+ except BaseException as e:
514
+ return _immediate_error_handle(
515
+ e,
516
+ f'Task execution error for {fn.__name__}: {e}',
517
+ )
518
+ try:
519
+ broker = app.get_broker()
520
+ good_until = task_options.good_until if task_options else None
521
+
522
+ task_options_json = None
523
+ if task_options:
524
+ task_options_json = serialize_task_options(task_options)
525
+
526
+ task_id = await broker.enqueue_async(
527
+ task_name,
528
+ args,
529
+ kwargs,
530
+ validated,
531
+ priority=priority,
532
+ good_until=good_until,
533
+ task_options=task_options_json,
534
+ )
535
+ return TaskHandle(task_id, app, broker_mode=True)
536
+ except BaseException as e:
537
+ return _immediate_error_handle(
538
+ e,
539
+ f'Failed to enqueue task {fn.__name__}: {e}',
540
+ )
541
+
542
+ def schedule(
543
+ delay: int,
544
+ *args: P.args,
545
+ **kwargs: P.kwargs,
546
+ ) -> TaskHandle[T]:
547
+ """Execute task asynchronously after a delay."""
548
+ if hasattr(app, 'are_sends_suppressed') and app.are_sends_suppressed():
549
+ try:
550
+ app.logger.warning(
551
+ 'Schedule suppressed for %s during module import/discovery; no task enqueued',
552
+ task_name,
553
+ )
554
+ except Exception:
555
+ pass
556
+ suppressed_handle: TaskHandle[T] = TaskHandle('<suppressed>')
557
+ suppressed_handle.set_immediate_result(
558
+ TaskResult(
559
+ err=TaskError(
560
+ error_code=LibraryErrorCode.SEND_SUPPRESSED,
561
+ message='Task schedule suppressed during module import/discovery',
562
+ data={'task_name': task_name},
563
+ )
564
+ )
565
+ )
566
+ return suppressed_handle
567
+ # VALIDATION AT EXECUTION TIME
568
+ # Re-validate queue_name to catch any configuration changes
569
+ queue_name = task_options.queue_name if task_options else None
570
+ try:
571
+ validated_queue_name = app.validate_queue_name(queue_name)
572
+ priority = effective_priority(app, validated_queue_name)
573
+ except BaseException as e:
574
+ return _immediate_error_handle(
575
+ e,
576
+ f'Task execution error for {fn.__name__}: {e}',
577
+ )
578
+
579
+ try:
580
+ broker = app.get_broker()
581
+ good_until = task_options.good_until if task_options else None
582
+ sent_at = datetime.now(timezone.utc) + timedelta(seconds=delay)
583
+
584
+ task_options_json = None
585
+ if task_options:
586
+ task_options_json = serialize_task_options(task_options)
587
+
588
+ task_id = broker.enqueue(
589
+ task_name,
590
+ args,
591
+ kwargs,
592
+ validated_queue_name,
593
+ priority=priority,
594
+ good_until=good_until,
595
+ sent_at=sent_at,
596
+ task_options=task_options_json,
597
+ )
598
+ return TaskHandle(task_id, app, broker_mode=True)
599
+ except BaseException as e:
600
+ return _immediate_error_handle(
601
+ e,
602
+ f'Failed to schedule task {fn.__name__}: {e}',
603
+ )
604
+
605
+ class TaskFunctionImpl:
606
+ def __init__(self) -> None:
607
+ self.__name__ = fn.__name__
608
+ self.__doc__ = fn.__doc__
609
+ self.__annotations__ = fn.__annotations__
610
+ self.task_name = task_name
611
+ # Persist the declared queue (validated at definition time) so other components
612
+ # (e.g., scheduler) can infer a task's home queue in CUSTOM mode.
613
+ self.task_queue_name = task_options.queue_name if task_options else None
614
+ # Keep a reference to the original function for introspection (signature checks).
615
+ self._original_fn = fn
616
+ # Pre-serialize task_options so workflow engine can access retry config
617
+ self.task_options_json: str | None = (
618
+ serialize_task_options(task_options) if task_options else None
619
+ )
620
+
621
+ def __call__(
622
+ self,
623
+ *args: P.args,
624
+ **kwargs: P.kwargs,
625
+ ) -> TaskResult[T, TaskError]:
626
+ return wrapped_function(*args, **kwargs)
627
+
628
+ def send(
629
+ self,
630
+ *args: P.args,
631
+ **kwargs: P.kwargs,
632
+ ) -> TaskHandle[T]:
633
+ return send(*args, **kwargs)
634
+
635
+ async def send_async(
636
+ self,
637
+ *args: P.args,
638
+ **kwargs: P.kwargs,
639
+ ) -> TaskHandle[T]:
640
+ return await send_async(*args, **kwargs)
641
+
642
+ def schedule(
643
+ self,
644
+ delay: int,
645
+ *args: P.args,
646
+ **kwargs: P.kwargs,
647
+ ) -> TaskHandle[T]:
648
+ return schedule(delay, *args, **kwargs)
649
+
650
+ # Copy metadata
651
+ def __getattr__(self, name: str) -> Any:
652
+ return getattr(wrapped_function, name)
653
+
654
+ task_func = TaskFunctionImpl()
655
+
656
+ return task_func
@@ -0,0 +1,38 @@
1
+ # core/types/status.py
2
+ """
3
+ Core types and enums used throughout the application.
4
+ This module should not import from other application modules.
5
+ """
6
+
7
+ from enum import Enum
8
+
9
+
10
+ class TaskStatus(Enum):
11
+ """Task execution status"""
12
+
13
+ PENDING = 'PENDING' # It awaits to be a candidate for execution.
14
+ # Default status when the task is sent.
15
+
16
+ CLAIMED = (
17
+ 'CLAIMED' # It has been claimed by a worker but not yet started executing.
18
+ )
19
+
20
+ RUNNING = 'RUNNING' # It is being executed by a process.
21
+
22
+ COMPLETED = 'COMPLETED' # It has been executed successfully.
23
+
24
+ FAILED = 'FAILED' # It has failed to be executed.
25
+ CANCELLED = 'CANCELLED' # It has been cancelled.
26
+ REQUEUED = 'REQUEUED' # It has been requeued after a failure.
27
+
28
+ @property
29
+ def is_terminal(self) -> bool:
30
+ """Whether this status represents a final state (no further transitions)."""
31
+ return self in TASK_TERMINAL_STATES
32
+
33
+
34
+ TASK_TERMINAL_STATES: frozenset[TaskStatus] = frozenset({
35
+ TaskStatus.COMPLETED,
36
+ TaskStatus.FAILED,
37
+ TaskStatus.CANCELLED,
38
+ })