horsies 0.1.0a2__tar.gz → 0.1.0a4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {horsies-0.1.0a2 → horsies-0.1.0a4}/PKG-INFO +1 -1
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/__init__.py +2 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/brokers/postgres.py +129 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/models/tasks.py +26 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/models/workflow.py +10 -8
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/task_decorator.py +41 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/types/status.py +7 -7
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies.egg-info/PKG-INFO +1 -1
- {horsies-0.1.0a2 → horsies-0.1.0a4}/pyproject.toml +1 -1
- {horsies-0.1.0a2 → horsies-0.1.0a4}/README.md +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/__init__.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/app.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/banner.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/brokers/__init__.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/brokers/listener.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/cli.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/codec/serde.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/errors.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/logging.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/models/__init__.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/models/app.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/models/broker.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/models/queues.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/models/recovery.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/models/schedule.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/models/task_pg.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/models/workflow_pg.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/registry/tasks.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/scheduler/__init__.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/scheduler/calculator.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/scheduler/service.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/scheduler/state.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/utils/imports.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/utils/loop_runner.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/worker/current.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/worker/worker.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/workflows/__init__.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/workflows/engine.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/workflows/recovery.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/core/workflows/registry.py +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies/py.typed +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies.egg-info/SOURCES.txt +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies.egg-info/dependency_links.txt +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies.egg-info/entry_points.txt +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies.egg-info/requires.txt +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/horsies.egg-info/top_level.txt +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/setup.cfg +0 -0
- {horsies-0.1.0a2 → horsies-0.1.0a4}/tests/test_issue_fixes.py +0 -0
|
@@ -14,6 +14,7 @@ from .core.models.tasks import (
|
|
|
14
14
|
LibraryErrorCode,
|
|
15
15
|
SubWorkflowError,
|
|
16
16
|
RetryPolicy,
|
|
17
|
+
TaskInfo,
|
|
17
18
|
)
|
|
18
19
|
from .core.models.queues import QueueMode, CustomQueueConfig
|
|
19
20
|
from .core.models.workflow import (
|
|
@@ -67,6 +68,7 @@ __all__ = [
|
|
|
67
68
|
'LibraryErrorCode',
|
|
68
69
|
'SubWorkflowError',
|
|
69
70
|
'RetryPolicy',
|
|
71
|
+
'TaskInfo',
|
|
70
72
|
'QueueMode',
|
|
71
73
|
'CustomQueueConfig',
|
|
72
74
|
'TaskStatus',
|
|
@@ -16,6 +16,7 @@ from horsies.core.codec.serde import (
|
|
|
16
16
|
loads_json,
|
|
17
17
|
task_result_from_json,
|
|
18
18
|
)
|
|
19
|
+
from horsies.core.models.tasks import TaskInfo
|
|
19
20
|
from horsies.core.utils.loop_runner import LoopRunner
|
|
20
21
|
from horsies.core.logging import get_logger
|
|
21
22
|
|
|
@@ -854,6 +855,134 @@ class PostgresBroker:
|
|
|
854
855
|
"""
|
|
855
856
|
return self._loop_runner.call(self.get_result_async, task_id, timeout_ms)
|
|
856
857
|
|
|
858
|
+
async def get_task_info_async(
|
|
859
|
+
self,
|
|
860
|
+
task_id: str,
|
|
861
|
+
*,
|
|
862
|
+
include_result: bool = False,
|
|
863
|
+
include_failed_reason: bool = False,
|
|
864
|
+
) -> 'TaskInfo | None':
|
|
865
|
+
"""Fetch metadata for a task by ID."""
|
|
866
|
+
await self._ensure_initialized()
|
|
867
|
+
|
|
868
|
+
async with self.session_factory() as session:
|
|
869
|
+
base_columns = [
|
|
870
|
+
'id',
|
|
871
|
+
'task_name',
|
|
872
|
+
'status',
|
|
873
|
+
'queue_name',
|
|
874
|
+
'priority',
|
|
875
|
+
'retry_count',
|
|
876
|
+
'max_retries',
|
|
877
|
+
'next_retry_at',
|
|
878
|
+
'sent_at',
|
|
879
|
+
'claimed_at',
|
|
880
|
+
'started_at',
|
|
881
|
+
'completed_at',
|
|
882
|
+
'failed_at',
|
|
883
|
+
'worker_hostname',
|
|
884
|
+
'worker_pid',
|
|
885
|
+
'worker_process_name',
|
|
886
|
+
]
|
|
887
|
+
if include_result:
|
|
888
|
+
base_columns.append('result')
|
|
889
|
+
if include_failed_reason:
|
|
890
|
+
base_columns.append('failed_reason')
|
|
891
|
+
|
|
892
|
+
query = text(
|
|
893
|
+
f"""
|
|
894
|
+
SELECT {', '.join(base_columns)}
|
|
895
|
+
FROM horsies_tasks
|
|
896
|
+
WHERE id = :id
|
|
897
|
+
"""
|
|
898
|
+
)
|
|
899
|
+
result = await session.execute(query, {'id': task_id})
|
|
900
|
+
row = result.fetchone()
|
|
901
|
+
if row is None:
|
|
902
|
+
return None
|
|
903
|
+
|
|
904
|
+
result_value = None
|
|
905
|
+
failed_reason = None
|
|
906
|
+
|
|
907
|
+
idx = 0
|
|
908
|
+
task_id_value = row[idx]
|
|
909
|
+
idx += 1
|
|
910
|
+
task_name = row[idx]
|
|
911
|
+
idx += 1
|
|
912
|
+
status = TaskStatus(row[idx])
|
|
913
|
+
idx += 1
|
|
914
|
+
queue_name = row[idx]
|
|
915
|
+
idx += 1
|
|
916
|
+
priority = row[idx]
|
|
917
|
+
idx += 1
|
|
918
|
+
retry_count = row[idx] or 0
|
|
919
|
+
idx += 1
|
|
920
|
+
max_retries = row[idx] or 0
|
|
921
|
+
idx += 1
|
|
922
|
+
next_retry_at = row[idx]
|
|
923
|
+
idx += 1
|
|
924
|
+
sent_at = row[idx]
|
|
925
|
+
idx += 1
|
|
926
|
+
claimed_at = row[idx]
|
|
927
|
+
idx += 1
|
|
928
|
+
started_at = row[idx]
|
|
929
|
+
idx += 1
|
|
930
|
+
completed_at = row[idx]
|
|
931
|
+
idx += 1
|
|
932
|
+
failed_at = row[idx]
|
|
933
|
+
idx += 1
|
|
934
|
+
worker_hostname = row[idx]
|
|
935
|
+
idx += 1
|
|
936
|
+
worker_pid = row[idx]
|
|
937
|
+
idx += 1
|
|
938
|
+
worker_process_name = row[idx]
|
|
939
|
+
idx += 1
|
|
940
|
+
|
|
941
|
+
if include_result:
|
|
942
|
+
raw_result = row[idx]
|
|
943
|
+
idx += 1
|
|
944
|
+
if raw_result:
|
|
945
|
+
result_value = task_result_from_json(loads_json(raw_result))
|
|
946
|
+
|
|
947
|
+
if include_failed_reason:
|
|
948
|
+
failed_reason = row[idx]
|
|
949
|
+
|
|
950
|
+
return TaskInfo(
|
|
951
|
+
task_id=task_id_value,
|
|
952
|
+
task_name=task_name,
|
|
953
|
+
status=status,
|
|
954
|
+
queue_name=queue_name,
|
|
955
|
+
priority=priority,
|
|
956
|
+
retry_count=retry_count,
|
|
957
|
+
max_retries=max_retries,
|
|
958
|
+
next_retry_at=next_retry_at,
|
|
959
|
+
sent_at=sent_at,
|
|
960
|
+
claimed_at=claimed_at,
|
|
961
|
+
started_at=started_at,
|
|
962
|
+
completed_at=completed_at,
|
|
963
|
+
failed_at=failed_at,
|
|
964
|
+
worker_hostname=worker_hostname,
|
|
965
|
+
worker_pid=worker_pid,
|
|
966
|
+
worker_process_name=worker_process_name,
|
|
967
|
+
result=result_value,
|
|
968
|
+
failed_reason=failed_reason,
|
|
969
|
+
)
|
|
970
|
+
|
|
971
|
+
def get_task_info(
|
|
972
|
+
self,
|
|
973
|
+
task_id: str,
|
|
974
|
+
*,
|
|
975
|
+
include_result: bool = False,
|
|
976
|
+
include_failed_reason: bool = False,
|
|
977
|
+
) -> 'TaskInfo | None':
|
|
978
|
+
"""Synchronous wrapper for get_task_info_async()."""
|
|
979
|
+
return self._loop_runner.call(
|
|
980
|
+
self.get_task_info_async,
|
|
981
|
+
task_id,
|
|
982
|
+
include_result=include_result,
|
|
983
|
+
include_failed_reason=include_failed_reason,
|
|
984
|
+
)
|
|
985
|
+
|
|
857
986
|
def close(self) -> None:
|
|
858
987
|
"""
|
|
859
988
|
Synchronous cleanup (runs close_async in background loop).
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
# app/core/models/tasks.py
|
|
2
2
|
from __future__ import annotations
|
|
3
3
|
import datetime
|
|
4
|
+
from dataclasses import dataclass
|
|
4
5
|
from typing import (
|
|
5
6
|
TYPE_CHECKING,
|
|
6
7
|
Any,
|
|
@@ -20,6 +21,7 @@ from enum import Enum
|
|
|
20
21
|
if TYPE_CHECKING:
|
|
21
22
|
from horsies.core.models.workflow import SubWorkflowSummary
|
|
22
23
|
|
|
24
|
+
from horsies.core.types.status import TaskStatus
|
|
23
25
|
T = TypeVar('T') # success payload
|
|
24
26
|
E = TypeVar('E') # error payload (TaskError )
|
|
25
27
|
|
|
@@ -219,6 +221,30 @@ class TaskResult(Generic[T, E]):
|
|
|
219
221
|
raise ValueError('Result is not error - check is_err() first')
|
|
220
222
|
|
|
221
223
|
|
|
224
|
+
@dataclass
|
|
225
|
+
class TaskInfo:
|
|
226
|
+
"""Metadata for a broker-backed task."""
|
|
227
|
+
|
|
228
|
+
task_id: str
|
|
229
|
+
task_name: str
|
|
230
|
+
status: TaskStatus
|
|
231
|
+
queue_name: str
|
|
232
|
+
priority: int
|
|
233
|
+
retry_count: int
|
|
234
|
+
max_retries: int
|
|
235
|
+
next_retry_at: datetime.datetime | None
|
|
236
|
+
sent_at: datetime.datetime | None
|
|
237
|
+
claimed_at: datetime.datetime | None
|
|
238
|
+
started_at: datetime.datetime | None
|
|
239
|
+
completed_at: datetime.datetime | None
|
|
240
|
+
failed_at: datetime.datetime | None
|
|
241
|
+
worker_hostname: str | None
|
|
242
|
+
worker_pid: int | None
|
|
243
|
+
worker_process_name: str | None
|
|
244
|
+
result: TaskResult[Any, TaskError] | None = None
|
|
245
|
+
failed_reason: str | None = None
|
|
246
|
+
|
|
247
|
+
|
|
222
248
|
class RetryPolicy(BaseModel):
|
|
223
249
|
"""
|
|
224
250
|
Retry policy configuration for tasks.
|
|
@@ -1374,6 +1374,7 @@ class WorkflowContext(BaseModel):
|
|
|
1374
1374
|
class WorkflowTaskInfo:
|
|
1375
1375
|
"""Information about a task within a workflow."""
|
|
1376
1376
|
|
|
1377
|
+
node_id: str | None
|
|
1377
1378
|
index: int
|
|
1378
1379
|
name: str
|
|
1379
1380
|
status: WorkflowTaskStatus
|
|
@@ -1700,7 +1701,7 @@ class WorkflowHandle:
|
|
|
1700
1701
|
async with self.broker.session_factory() as session:
|
|
1701
1702
|
result = await session.execute(
|
|
1702
1703
|
text("""
|
|
1703
|
-
SELECT task_index, task_name, status, result, started_at, completed_at
|
|
1704
|
+
SELECT node_id, task_index, task_name, status, result, started_at, completed_at
|
|
1704
1705
|
FROM horsies_workflow_tasks
|
|
1705
1706
|
WHERE workflow_id = :wf_id
|
|
1706
1707
|
ORDER BY task_index
|
|
@@ -1710,14 +1711,15 @@ class WorkflowHandle:
|
|
|
1710
1711
|
|
|
1711
1712
|
return [
|
|
1712
1713
|
WorkflowTaskInfo(
|
|
1713
|
-
|
|
1714
|
-
|
|
1715
|
-
|
|
1716
|
-
|
|
1717
|
-
|
|
1714
|
+
node_id=row[0],
|
|
1715
|
+
index=row[1],
|
|
1716
|
+
name=row[2],
|
|
1717
|
+
status=WorkflowTaskStatus(row[3]),
|
|
1718
|
+
result=task_result_from_json(loads_json(row[4]))
|
|
1719
|
+
if row[4]
|
|
1718
1720
|
else None,
|
|
1719
|
-
started_at=row[
|
|
1720
|
-
completed_at=row[
|
|
1721
|
+
started_at=row[5],
|
|
1722
|
+
completed_at=row[6],
|
|
1721
1723
|
)
|
|
1722
1724
|
for row in result.fetchall()
|
|
1723
1725
|
]
|
|
@@ -23,6 +23,7 @@ if TYPE_CHECKING:
|
|
|
23
23
|
from horsies.core.app import Horsies
|
|
24
24
|
from horsies.core.models.tasks import TaskOptions
|
|
25
25
|
from horsies.core.models.tasks import TaskError, TaskResult
|
|
26
|
+
from horsies.core.models.tasks import TaskInfo
|
|
26
27
|
|
|
27
28
|
from horsies.core.models.tasks import TaskResult, TaskError, LibraryErrorCode
|
|
28
29
|
from horsies.core.models.workflow import WorkflowContextMissingIdError
|
|
@@ -191,6 +192,46 @@ class TaskHandle(Generic[T]):
|
|
|
191
192
|
case result:
|
|
192
193
|
return result
|
|
193
194
|
|
|
195
|
+
def info(
|
|
196
|
+
self,
|
|
197
|
+
*,
|
|
198
|
+
include_result: bool = False,
|
|
199
|
+
include_failed_reason: bool = False,
|
|
200
|
+
) -> 'TaskInfo | None':
|
|
201
|
+
"""Fetch metadata for this task from the broker."""
|
|
202
|
+
if not self._broker_mode or not self._app:
|
|
203
|
+
raise RuntimeError(
|
|
204
|
+
'TaskHandle.info() requires a broker-backed task handle '
|
|
205
|
+
'(use .send() or .send_async())'
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
broker = self._app.get_broker()
|
|
209
|
+
return broker.get_task_info(
|
|
210
|
+
self.task_id,
|
|
211
|
+
include_result=include_result,
|
|
212
|
+
include_failed_reason=include_failed_reason,
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
async def info_async(
|
|
216
|
+
self,
|
|
217
|
+
*,
|
|
218
|
+
include_result: bool = False,
|
|
219
|
+
include_failed_reason: bool = False,
|
|
220
|
+
) -> 'TaskInfo | None':
|
|
221
|
+
"""Async version of info()."""
|
|
222
|
+
if not self._broker_mode or not self._app:
|
|
223
|
+
raise RuntimeError(
|
|
224
|
+
'TaskHandle.info_async() requires a broker-backed task handle '
|
|
225
|
+
'(use .send() or .send_async())'
|
|
226
|
+
)
|
|
227
|
+
|
|
228
|
+
broker = self._app.get_broker()
|
|
229
|
+
return await broker.get_task_info_async(
|
|
230
|
+
self.task_id,
|
|
231
|
+
include_result=include_result,
|
|
232
|
+
include_failed_reason=include_failed_reason,
|
|
233
|
+
)
|
|
234
|
+
|
|
194
235
|
def set_immediate_result(
|
|
195
236
|
self,
|
|
196
237
|
result: TaskResult[T, TaskError],
|
|
@@ -10,20 +10,20 @@ from enum import Enum
|
|
|
10
10
|
class TaskStatus(Enum):
|
|
11
11
|
"""Task execution status"""
|
|
12
12
|
|
|
13
|
-
PENDING = '
|
|
13
|
+
PENDING = 'PENDING' # It awaits to be a candidate for execution.
|
|
14
14
|
# Default status when the task is sent.
|
|
15
15
|
|
|
16
16
|
CLAIMED = (
|
|
17
|
-
'
|
|
17
|
+
'CLAIMED' # It has been claimed by a worker but not yet started executing.
|
|
18
18
|
)
|
|
19
19
|
|
|
20
|
-
RUNNING = '
|
|
20
|
+
RUNNING = 'RUNNING' # It is being executed by a process.
|
|
21
21
|
|
|
22
|
-
COMPLETED = '
|
|
22
|
+
COMPLETED = 'COMPLETED' # It has been executed successfully.
|
|
23
23
|
|
|
24
|
-
FAILED = '
|
|
25
|
-
CANCELLED = '
|
|
26
|
-
REQUEUED = '
|
|
24
|
+
FAILED = 'FAILED' # It has failed to be executed.
|
|
25
|
+
CANCELLED = 'CANCELLED' # It has been cancelled.
|
|
26
|
+
REQUEUED = 'REQUEUED' # It has been requeued after a failure.
|
|
27
27
|
|
|
28
28
|
@property
|
|
29
29
|
def is_terminal(self) -> bool:
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|