mageflow 0.0.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mageflow-0.0.1/LICENSE +21 -0
- mageflow-0.0.1/PKG-INFO +164 -0
- mageflow-0.0.1/README.md +125 -0
- mageflow-0.0.1/mageflow/__init__.py +30 -0
- mageflow-0.0.1/mageflow/callbacks.py +72 -0
- mageflow-0.0.1/mageflow/chain/__init__.py +0 -0
- mageflow-0.0.1/mageflow/chain/consts.py +8 -0
- mageflow-0.0.1/mageflow/chain/creator.py +73 -0
- mageflow-0.0.1/mageflow/chain/messages.py +9 -0
- mageflow-0.0.1/mageflow/chain/model.py +61 -0
- mageflow-0.0.1/mageflow/chain/workflows.py +65 -0
- mageflow-0.0.1/mageflow/client.py +140 -0
- mageflow-0.0.1/mageflow/errors.py +22 -0
- mageflow-0.0.1/mageflow/init.py +53 -0
- mageflow-0.0.1/mageflow/invokers/__init__.py +0 -0
- mageflow-0.0.1/mageflow/invokers/base.py +34 -0
- mageflow-0.0.1/mageflow/invokers/hatchet.py +82 -0
- mageflow-0.0.1/mageflow/models/__init__.py +0 -0
- mageflow-0.0.1/mageflow/models/message.py +6 -0
- mageflow-0.0.1/mageflow/signature/__init__.py +0 -0
- mageflow-0.0.1/mageflow/signature/consts.py +3 -0
- mageflow-0.0.1/mageflow/signature/creator.py +69 -0
- mageflow-0.0.1/mageflow/signature/model.py +319 -0
- mageflow-0.0.1/mageflow/signature/status.py +25 -0
- mageflow-0.0.1/mageflow/signature/types.py +6 -0
- mageflow-0.0.1/mageflow/startup.py +65 -0
- mageflow-0.0.1/mageflow/swarm/__init__.py +0 -0
- mageflow-0.0.1/mageflow/swarm/consts.py +12 -0
- mageflow-0.0.1/mageflow/swarm/creator.py +34 -0
- mageflow-0.0.1/mageflow/swarm/messages.py +7 -0
- mageflow-0.0.1/mageflow/swarm/model.py +260 -0
- mageflow-0.0.1/mageflow/swarm/workflows.py +120 -0
- mageflow-0.0.1/mageflow/task/__init__.py +0 -0
- mageflow-0.0.1/mageflow/task/model.py +19 -0
- mageflow-0.0.1/mageflow/typing_support.py +8 -0
- mageflow-0.0.1/mageflow/utils/__init__.py +0 -0
- mageflow-0.0.1/mageflow/utils/models.py +19 -0
- mageflow-0.0.1/mageflow/utils/pythonic.py +21 -0
- mageflow-0.0.1/mageflow/visualizer/__init__.py +0 -0
- mageflow-0.0.1/mageflow/visualizer/app.py +221 -0
- mageflow-0.0.1/mageflow/visualizer/assets/cytoscape_styles.py +63 -0
- mageflow-0.0.1/mageflow/visualizer/assets/styles.css +143 -0
- mageflow-0.0.1/mageflow/visualizer/builder.py +497 -0
- mageflow-0.0.1/mageflow/visualizer/data.py +65 -0
- mageflow-0.0.1/mageflow/visualizer/utils.py +72 -0
- mageflow-0.0.1/mageflow/workflows.py +128 -0
- mageflow-0.0.1/pyproject.toml +110 -0
mageflow-0.0.1/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 yedidyakfir
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
mageflow-0.0.1/PKG-INFO
ADDED
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: mageflow
|
|
3
|
+
Version: 0.0.1
|
|
4
|
+
Summary: Manage Graph Execution Flow - A unified interface for task orchestration across different task managers
|
|
5
|
+
License: MIT
|
|
6
|
+
License-File: LICENSE
|
|
7
|
+
Keywords: task-orchestration,workflow,task-manager,hatchet,async,redis,task-chain,task-swarm,distributed,microservices,pipeline,graph-execution,workflow-engine,task-queue,python
|
|
8
|
+
Author: yedidyakfir
|
|
9
|
+
Author-email: yedidyakfir@gmail.com
|
|
10
|
+
Requires-Python: >=3.10,<3.14
|
|
11
|
+
Classifier: Development Status :: 3 - Alpha
|
|
12
|
+
Classifier: Intended Audience :: Developers
|
|
13
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
14
|
+
Classifier: Programming Language :: Python :: 3
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
19
|
+
Classifier: Framework :: AsyncIO
|
|
20
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
21
|
+
Classifier: Topic :: System :: Distributed Computing
|
|
22
|
+
Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
|
|
23
|
+
Classifier: Typing :: Typed
|
|
24
|
+
Classifier: Operating System :: OS Independent
|
|
25
|
+
Provides-Extra: display
|
|
26
|
+
Provides-Extra: hatchet
|
|
27
|
+
Requires-Dist: dash-bootstrap-components (>=2.0.4,<3.0.0) ; extra == "display"
|
|
28
|
+
Requires-Dist: dash-cytoscape (>=1.0.2,<2.0.0) ; extra == "display"
|
|
29
|
+
Requires-Dist: dash[async] (>=3.3.0,<4.0.0) ; extra == "display"
|
|
30
|
+
Requires-Dist: hatchet-sdk (>=1.21.1,<2.0.0) ; extra == "hatchet"
|
|
31
|
+
Requires-Dist: rapyer (>=1.1.0,<2.0.0)
|
|
32
|
+
Project-URL: Bug Tracker, https://github.com/yedidyakfir/mageflow/issues
|
|
33
|
+
Project-URL: Changelog, https://github.com/yedidyakfir/mageflow/releases
|
|
34
|
+
Project-URL: Documentation, https://yedidyakfir.github.io/mageflow/
|
|
35
|
+
Project-URL: Homepage, https://yedidyakfir.github.io/mageflow/
|
|
36
|
+
Project-URL: Repository, https://github.com/yedidyakfir/mageflow
|
|
37
|
+
Project-URL: Source Code, https://github.com/yedidyakfir/mageflow
|
|
38
|
+
Description-Content-Type: text/markdown
|
|
39
|
+
|
|
40
|
+
<div align="center">
|
|
41
|
+
<img src="logo.png" alt="MageFlow Logo" width="200"/>
|
|
42
|
+
|
|
43
|
+
[](https://www.python.org/downloads/)
|
|
44
|
+
[](https://opensource.org/licenses/MIT)
|
|
45
|
+
[](https://codecov.io/gh/yedidyakfir/mageflow)
|
|
46
|
+
[](https://badge.fury.io/py/mageflow)
|
|
47
|
+
[](https://pepy.tech/project/mageflow)
|
|
48
|
+
[](https://yedidyakfir.github.io/mageflow/)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
📚 **[Full Documentation](https://yedidyakfir.github.io/mageflow/)** | [Installation](https://yedidyakfir.github.io/mageflow/setup/) | [API Reference](https://yedidyakfir.github.io/mageflow/api/)
|
|
52
|
+
|
|
53
|
+
</div>
|
|
54
|
+
|
|
55
|
+
# MageFlow
|
|
56
|
+
|
|
57
|
+
**Ma**nage **G**raph **E**xecution Flow - A unified interface for task orchestration across different task managers.
|
|
58
|
+
|
|
59
|
+
## Why MageFlow?
|
|
60
|
+
|
|
61
|
+
Instead of spreading workflow logic throughout your codebase, MageFlow centralizes task orchestration with a clean, unified API. Switch between task managers (Hatchet, Taskiq, etc.) without rewriting your orchestration code.
|
|
62
|
+
|
|
63
|
+
## Key Features
|
|
64
|
+
|
|
65
|
+
🔗 **Task Chaining** - Sequential workflows where tasks depend on previous completions
|
|
66
|
+
🐝 **Task Swarms** - Parallel execution with intelligent coordination
|
|
67
|
+
📞 **Callback System** - Robust success/error handling
|
|
68
|
+
🎯 **Task Signatures** - Flexible task definition with validation
|
|
69
|
+
⏯️ **Lifecycle Control** - Pause, resume, and monitor task execution
|
|
70
|
+
💾 **Persistent State** - Redis-backed state management with recovery
|
|
71
|
+
|
|
72
|
+
## Installation
|
|
73
|
+
|
|
74
|
+
```bash
|
|
75
|
+
pip install mageflow[hatchet] # For Hatchet backend
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
## Quick Setup
|
|
79
|
+
|
|
80
|
+
```python
|
|
81
|
+
import asyncio
|
|
82
|
+
import redis
|
|
83
|
+
from hatchet_sdk import Hatchet, ClientConfig
|
|
84
|
+
import mageflow
|
|
85
|
+
|
|
86
|
+
# Configure backend and Redis
|
|
87
|
+
config = ClientConfig(token="your-hatchet-token")
|
|
88
|
+
redis_client = redis.asyncio.from_url("redis://localhost", decode_responses=True)
|
|
89
|
+
hatchet_client = Hatchet(config=config)
|
|
90
|
+
|
|
91
|
+
# Create MageFlow instance
|
|
92
|
+
mf = mageflow.Mageflow(hatchet_client, redis_client=redis_client)
|
|
93
|
+
```
|
|
94
|
+
|
|
95
|
+
## Example Usage
|
|
96
|
+
|
|
97
|
+
### Define Tasks
|
|
98
|
+
|
|
99
|
+
```python
|
|
100
|
+
from pydantic import BaseModel
|
|
101
|
+
|
|
102
|
+
class ProcessData(BaseModel):
|
|
103
|
+
data: str
|
|
104
|
+
|
|
105
|
+
@mf.task(name="process-data", input_validator=ProcessData)
|
|
106
|
+
async def process_data(msg: ProcessData):
|
|
107
|
+
return {"processed": msg.data}
|
|
108
|
+
|
|
109
|
+
@mf.task(name="send-notification")
|
|
110
|
+
async def send_notification(msg):
|
|
111
|
+
print(f"Notification sent: {msg}")
|
|
112
|
+
return {"status": "sent"}
|
|
113
|
+
```
|
|
114
|
+
|
|
115
|
+
### Chain Tasks
|
|
116
|
+
|
|
117
|
+
```python
|
|
118
|
+
# Sequential execution
|
|
119
|
+
workflow = await mageflow.chain([
|
|
120
|
+
process_data_task,
|
|
121
|
+
send_notification_task
|
|
122
|
+
], name="data-pipeline")
|
|
123
|
+
```
|
|
124
|
+
|
|
125
|
+
### Parallel Swarms
|
|
126
|
+
|
|
127
|
+
```python
|
|
128
|
+
# Parallel execution
|
|
129
|
+
swarm = await mageflow.swarm([
|
|
130
|
+
process_user_task,
|
|
131
|
+
update_cache_task,
|
|
132
|
+
send_email_task
|
|
133
|
+
], task_name="user-onboarding")
|
|
134
|
+
```
|
|
135
|
+
|
|
136
|
+
### Task Signatures with Callbacks
|
|
137
|
+
|
|
138
|
+
```python
|
|
139
|
+
task_signature = await mageflow.sign(
|
|
140
|
+
task_name="process-order",
|
|
141
|
+
task_identifiers={"order_id": "12345"},
|
|
142
|
+
success_callbacks=[send_confirmation_task],
|
|
143
|
+
error_callbacks=[handle_error_task]
|
|
144
|
+
)
|
|
145
|
+
```
|
|
146
|
+
|
|
147
|
+
## Use Cases
|
|
148
|
+
|
|
149
|
+
- **Data Pipelines** - ETL operations with error handling
|
|
150
|
+
- **Microservice Coordination** - Orchestrate distributed service calls
|
|
151
|
+
- **Batch Processing** - Parallel processing of large datasets
|
|
152
|
+
- **User Workflows** - Multi-step onboarding and registration
|
|
153
|
+
- **Content Processing** - Media processing with multiple stages
|
|
154
|
+
|
|
155
|
+
## Documentation
|
|
156
|
+
|
|
157
|
+
- [Setup Guide](docs/setup.md)
|
|
158
|
+
- [API Reference](docs/api/)
|
|
159
|
+
- [Task Lifecycle](docs/documentation/task-lifecycle.md)
|
|
160
|
+
- [Callbacks](docs/documentation/callbacks.md)
|
|
161
|
+
|
|
162
|
+
## License
|
|
163
|
+
|
|
164
|
+
MIT
|
mageflow-0.0.1/README.md
ADDED
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
<div align="center">
|
|
2
|
+
<img src="logo.png" alt="MageFlow Logo" width="200"/>
|
|
3
|
+
|
|
4
|
+
[](https://www.python.org/downloads/)
|
|
5
|
+
[](https://opensource.org/licenses/MIT)
|
|
6
|
+
[](https://codecov.io/gh/yedidyakfir/mageflow)
|
|
7
|
+
[](https://badge.fury.io/py/mageflow)
|
|
8
|
+
[](https://pepy.tech/project/mageflow)
|
|
9
|
+
[](https://yedidyakfir.github.io/mageflow/)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
📚 **[Full Documentation](https://yedidyakfir.github.io/mageflow/)** | [Installation](https://yedidyakfir.github.io/mageflow/setup/) | [API Reference](https://yedidyakfir.github.io/mageflow/api/)
|
|
13
|
+
|
|
14
|
+
</div>
|
|
15
|
+
|
|
16
|
+
# MageFlow
|
|
17
|
+
|
|
18
|
+
**Ma**nage **G**raph **E**xecution Flow - A unified interface for task orchestration across different task managers.
|
|
19
|
+
|
|
20
|
+
## Why MageFlow?
|
|
21
|
+
|
|
22
|
+
Instead of spreading workflow logic throughout your codebase, MageFlow centralizes task orchestration with a clean, unified API. Switch between task managers (Hatchet, Taskiq, etc.) without rewriting your orchestration code.
|
|
23
|
+
|
|
24
|
+
## Key Features
|
|
25
|
+
|
|
26
|
+
🔗 **Task Chaining** - Sequential workflows where tasks depend on previous completions
|
|
27
|
+
🐝 **Task Swarms** - Parallel execution with intelligent coordination
|
|
28
|
+
📞 **Callback System** - Robust success/error handling
|
|
29
|
+
🎯 **Task Signatures** - Flexible task definition with validation
|
|
30
|
+
⏯️ **Lifecycle Control** - Pause, resume, and monitor task execution
|
|
31
|
+
💾 **Persistent State** - Redis-backed state management with recovery
|
|
32
|
+
|
|
33
|
+
## Installation
|
|
34
|
+
|
|
35
|
+
```bash
|
|
36
|
+
pip install mageflow[hatchet] # For Hatchet backend
|
|
37
|
+
```
|
|
38
|
+
|
|
39
|
+
## Quick Setup
|
|
40
|
+
|
|
41
|
+
```python
|
|
42
|
+
import asyncio
|
|
43
|
+
import redis
|
|
44
|
+
from hatchet_sdk import Hatchet, ClientConfig
|
|
45
|
+
import mageflow
|
|
46
|
+
|
|
47
|
+
# Configure backend and Redis
|
|
48
|
+
config = ClientConfig(token="your-hatchet-token")
|
|
49
|
+
redis_client = redis.asyncio.from_url("redis://localhost", decode_responses=True)
|
|
50
|
+
hatchet_client = Hatchet(config=config)
|
|
51
|
+
|
|
52
|
+
# Create MageFlow instance
|
|
53
|
+
mf = mageflow.Mageflow(hatchet_client, redis_client=redis_client)
|
|
54
|
+
```
|
|
55
|
+
|
|
56
|
+
## Example Usage
|
|
57
|
+
|
|
58
|
+
### Define Tasks
|
|
59
|
+
|
|
60
|
+
```python
|
|
61
|
+
from pydantic import BaseModel
|
|
62
|
+
|
|
63
|
+
class ProcessData(BaseModel):
|
|
64
|
+
data: str
|
|
65
|
+
|
|
66
|
+
@mf.task(name="process-data", input_validator=ProcessData)
|
|
67
|
+
async def process_data(msg: ProcessData):
|
|
68
|
+
return {"processed": msg.data}
|
|
69
|
+
|
|
70
|
+
@mf.task(name="send-notification")
|
|
71
|
+
async def send_notification(msg):
|
|
72
|
+
print(f"Notification sent: {msg}")
|
|
73
|
+
return {"status": "sent"}
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
### Chain Tasks
|
|
77
|
+
|
|
78
|
+
```python
|
|
79
|
+
# Sequential execution
|
|
80
|
+
workflow = await mageflow.chain([
|
|
81
|
+
process_data_task,
|
|
82
|
+
send_notification_task
|
|
83
|
+
], name="data-pipeline")
|
|
84
|
+
```
|
|
85
|
+
|
|
86
|
+
### Parallel Swarms
|
|
87
|
+
|
|
88
|
+
```python
|
|
89
|
+
# Parallel execution
|
|
90
|
+
swarm = await mageflow.swarm([
|
|
91
|
+
process_user_task,
|
|
92
|
+
update_cache_task,
|
|
93
|
+
send_email_task
|
|
94
|
+
], task_name="user-onboarding")
|
|
95
|
+
```
|
|
96
|
+
|
|
97
|
+
### Task Signatures with Callbacks
|
|
98
|
+
|
|
99
|
+
```python
|
|
100
|
+
task_signature = await mageflow.sign(
|
|
101
|
+
task_name="process-order",
|
|
102
|
+
task_identifiers={"order_id": "12345"},
|
|
103
|
+
success_callbacks=[send_confirmation_task],
|
|
104
|
+
error_callbacks=[handle_error_task]
|
|
105
|
+
)
|
|
106
|
+
```
|
|
107
|
+
|
|
108
|
+
## Use Cases
|
|
109
|
+
|
|
110
|
+
- **Data Pipelines** - ETL operations with error handling
|
|
111
|
+
- **Microservice Coordination** - Orchestrate distributed service calls
|
|
112
|
+
- **Batch Processing** - Parallel processing of large datasets
|
|
113
|
+
- **User Workflows** - Multi-step onboarding and registration
|
|
114
|
+
- **Content Processing** - Media processing with multiple stages
|
|
115
|
+
|
|
116
|
+
## Documentation
|
|
117
|
+
|
|
118
|
+
- [Setup Guide](docs/setup.md)
|
|
119
|
+
- [API Reference](docs/api/)
|
|
120
|
+
- [Task Lifecycle](docs/documentation/task-lifecycle.md)
|
|
121
|
+
- [Callbacks](docs/documentation/callbacks.md)
|
|
122
|
+
|
|
123
|
+
## License
|
|
124
|
+
|
|
125
|
+
MIT
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
from mageflow.callbacks import register_task, handle_task_callback
|
|
2
|
+
from mageflow.chain.creator import chain
|
|
3
|
+
from mageflow.client import Mageflow
|
|
4
|
+
from mageflow.init import init_mageflow_hatchet_tasks
|
|
5
|
+
from mageflow.signature.creator import (
|
|
6
|
+
sign,
|
|
7
|
+
load_signature,
|
|
8
|
+
resume_task,
|
|
9
|
+
lock_task,
|
|
10
|
+
resume,
|
|
11
|
+
pause,
|
|
12
|
+
)
|
|
13
|
+
from mageflow.signature.status import TaskStatus
|
|
14
|
+
from mageflow.swarm.creator import swarm
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
__all__ = [
|
|
18
|
+
"load_signature",
|
|
19
|
+
"resume_task",
|
|
20
|
+
"lock_task",
|
|
21
|
+
"resume",
|
|
22
|
+
"pause",
|
|
23
|
+
"sign",
|
|
24
|
+
"init_mageflow_hatchet_tasks",
|
|
25
|
+
"register_task",
|
|
26
|
+
"handle_task_callback",
|
|
27
|
+
"Mageflow",
|
|
28
|
+
"chain",
|
|
29
|
+
"swarm",
|
|
30
|
+
]
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import functools
|
|
3
|
+
import inspect
|
|
4
|
+
from enum import Enum
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from hatchet_sdk import Context
|
|
8
|
+
from hatchet_sdk.runnables.types import EmptyModel
|
|
9
|
+
from pydantic import BaseModel
|
|
10
|
+
|
|
11
|
+
from mageflow.invokers.hatchet import HatchetInvoker
|
|
12
|
+
from mageflow.utils.pythonic import flexible_call
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class AcceptParams(Enum):
|
|
16
|
+
JUST_MESSAGE = 1
|
|
17
|
+
NO_CTX = 2
|
|
18
|
+
ALL = 3
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class HatchetResult(BaseModel):
|
|
22
|
+
hatchet_results: Any
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def handle_task_callback(
|
|
26
|
+
expected_params: AcceptParams = AcceptParams.NO_CTX, wrap_res: bool = True
|
|
27
|
+
):
|
|
28
|
+
def task_decorator(func):
|
|
29
|
+
@functools.wraps(func)
|
|
30
|
+
async def wrapper(message: EmptyModel, ctx: Context, *args, **kwargs):
|
|
31
|
+
invoker = HatchetInvoker(message, ctx)
|
|
32
|
+
if not await invoker.should_run_task():
|
|
33
|
+
await ctx.aio_cancel()
|
|
34
|
+
await asyncio.sleep(10)
|
|
35
|
+
# NOTE: This should not run, the task should cancel, but just in case
|
|
36
|
+
return {"Error": "Task should have been canceled"}
|
|
37
|
+
try:
|
|
38
|
+
await invoker.start_task()
|
|
39
|
+
if expected_params == AcceptParams.JUST_MESSAGE:
|
|
40
|
+
result = await flexible_call(func, message)
|
|
41
|
+
elif expected_params == AcceptParams.NO_CTX:
|
|
42
|
+
result = await flexible_call(func, message, *args, **kwargs)
|
|
43
|
+
else:
|
|
44
|
+
result = await flexible_call(func, message, ctx, *args, **kwargs)
|
|
45
|
+
except (Exception, asyncio.CancelledError) as e:
|
|
46
|
+
await invoker.run_error()
|
|
47
|
+
await invoker.remove_task(with_error=False)
|
|
48
|
+
raise
|
|
49
|
+
else:
|
|
50
|
+
task_results = HatchetResult(hatchet_results=result)
|
|
51
|
+
dumped_results = task_results.model_dump(mode="json")
|
|
52
|
+
await invoker.run_success(dumped_results["hatchet_results"])
|
|
53
|
+
await invoker.remove_task(with_success=False)
|
|
54
|
+
if wrap_res:
|
|
55
|
+
return task_results
|
|
56
|
+
else:
|
|
57
|
+
return result
|
|
58
|
+
|
|
59
|
+
wrapper.__signature__ = inspect.signature(func)
|
|
60
|
+
return wrapper
|
|
61
|
+
|
|
62
|
+
return task_decorator
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def register_task(register_name: str):
|
|
66
|
+
from mageflow.startup import REGISTERED_TASKS
|
|
67
|
+
|
|
68
|
+
def decorator(func):
|
|
69
|
+
REGISTERED_TASKS.append((func, register_name))
|
|
70
|
+
return func
|
|
71
|
+
|
|
72
|
+
return decorator
|
|
File without changes
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
|
|
3
|
+
from mageflow.chain.consts import ON_CHAIN_END, ON_CHAIN_ERROR
|
|
4
|
+
from mageflow.chain.messages import ChainSuccessTaskCommandMessage
|
|
5
|
+
from mageflow.chain.model import ChainTaskSignature
|
|
6
|
+
from mageflow.signature.creator import (
|
|
7
|
+
TaskSignatureConvertible,
|
|
8
|
+
resolve_signature_key,
|
|
9
|
+
)
|
|
10
|
+
from mageflow.signature.model import (
|
|
11
|
+
TaskIdentifierType,
|
|
12
|
+
TaskSignature,
|
|
13
|
+
TaskInputType,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
async def chain(
|
|
18
|
+
tasks: list[TaskSignatureConvertible],
|
|
19
|
+
name: str = None,
|
|
20
|
+
error: TaskInputType = None,
|
|
21
|
+
success: TaskInputType = None,
|
|
22
|
+
) -> ChainTaskSignature:
|
|
23
|
+
tasks = [await resolve_signature_key(task) for task in tasks]
|
|
24
|
+
|
|
25
|
+
# Create a chain task that will be deleted only at the end of the chain
|
|
26
|
+
first_task = tasks[0]
|
|
27
|
+
chain_task_signature = ChainTaskSignature(
|
|
28
|
+
task_name=f"chain-task:{name or first_task.task_name}",
|
|
29
|
+
success_callbacks=[success] if success else [],
|
|
30
|
+
error_callbacks=[error] if error else [],
|
|
31
|
+
tasks=tasks,
|
|
32
|
+
)
|
|
33
|
+
await chain_task_signature.save()
|
|
34
|
+
|
|
35
|
+
callback_kwargs = dict(chain_task_id=chain_task_signature.key)
|
|
36
|
+
on_chain_error = TaskSignature(
|
|
37
|
+
task_name=ON_CHAIN_ERROR,
|
|
38
|
+
task_identifiers=callback_kwargs,
|
|
39
|
+
model_validators=ChainSuccessTaskCommandMessage,
|
|
40
|
+
)
|
|
41
|
+
on_chain_success = TaskSignature(
|
|
42
|
+
task_name=ON_CHAIN_END,
|
|
43
|
+
task_identifiers=callback_kwargs,
|
|
44
|
+
model_validators=ChainSuccessTaskCommandMessage,
|
|
45
|
+
)
|
|
46
|
+
await _chain_task_to_previous_success(tasks, on_chain_error, on_chain_success)
|
|
47
|
+
return chain_task_signature
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
async def _chain_task_to_previous_success(
|
|
51
|
+
tasks: list[TaskSignature], error: TaskSignature, success: TaskSignature
|
|
52
|
+
) -> TaskIdentifierType:
|
|
53
|
+
"""
|
|
54
|
+
Take a list of tasks and connect each one to the previous one.
|
|
55
|
+
"""
|
|
56
|
+
if len(tasks) < 2:
|
|
57
|
+
raise ValueError(
|
|
58
|
+
"Chained tasks must contain at least two tasks. "
|
|
59
|
+
"If you want to run a single task, use `create_workflow` instead."
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
total_tasks = tasks + [success]
|
|
63
|
+
error_tasks = await error.duplicate_many(len(tasks))
|
|
64
|
+
store_errors = [error.save() for error in error_tasks]
|
|
65
|
+
|
|
66
|
+
# Store tasks
|
|
67
|
+
await asyncio.gather(success.save(), *store_errors)
|
|
68
|
+
update_tasks = [
|
|
69
|
+
task.add_callbacks(success=[total_tasks[i + 1]], errors=[error_tasks[i]])
|
|
70
|
+
for i, task in enumerate(tasks)
|
|
71
|
+
]
|
|
72
|
+
chained_tasks = await asyncio.gather(*update_tasks)
|
|
73
|
+
return chained_tasks[0]
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
|
|
3
|
+
from pydantic import field_validator, Field
|
|
4
|
+
|
|
5
|
+
from mageflow.errors import MissingSignatureError
|
|
6
|
+
from mageflow.signature.model import TaskSignature, TaskIdentifierType
|
|
7
|
+
from mageflow.signature.status import SignatureStatus
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class ChainTaskSignature(TaskSignature):
|
|
11
|
+
tasks: list[TaskIdentifierType] = Field(default_factory=list)
|
|
12
|
+
|
|
13
|
+
@field_validator("tasks", mode="before")
|
|
14
|
+
@classmethod
|
|
15
|
+
def validate_tasks(cls, v: list[TaskSignature]):
|
|
16
|
+
return [cls.validate_task_key(item) for item in v]
|
|
17
|
+
|
|
18
|
+
async def workflow(self, **task_additional_params):
|
|
19
|
+
first_task = await TaskSignature.get_safe(self.tasks[0])
|
|
20
|
+
if first_task is None:
|
|
21
|
+
raise MissingSignatureError(f"First task from chain {self.key} not found")
|
|
22
|
+
return await first_task.workflow(**task_additional_params)
|
|
23
|
+
|
|
24
|
+
async def delete_chain_tasks(self, with_errors=True, with_success=True):
|
|
25
|
+
signatures = await asyncio.gather(
|
|
26
|
+
*[TaskSignature.get_safe(signature_id) for signature_id in self.tasks],
|
|
27
|
+
return_exceptions=True,
|
|
28
|
+
)
|
|
29
|
+
signatures = [sign for sign in signatures if isinstance(sign, TaskSignature)]
|
|
30
|
+
delete_tasks = [
|
|
31
|
+
signature.remove(with_errors, with_success) for signature in signatures
|
|
32
|
+
]
|
|
33
|
+
await asyncio.gather(*delete_tasks)
|
|
34
|
+
|
|
35
|
+
async def change_status(self, status: SignatureStatus):
|
|
36
|
+
pause_chain_tasks = [
|
|
37
|
+
TaskSignature.safe_change_status(task, status) for task in self.tasks
|
|
38
|
+
]
|
|
39
|
+
pause_chain = super().change_status(status)
|
|
40
|
+
await asyncio.gather(pause_chain, *pause_chain_tasks, return_exceptions=True)
|
|
41
|
+
|
|
42
|
+
async def suspend(self):
|
|
43
|
+
await asyncio.gather(
|
|
44
|
+
*[TaskSignature.suspend_from_key(task_id) for task_id in self.tasks],
|
|
45
|
+
return_exceptions=True,
|
|
46
|
+
)
|
|
47
|
+
await super().change_status(SignatureStatus.SUSPENDED)
|
|
48
|
+
|
|
49
|
+
async def interrupt(self):
|
|
50
|
+
await asyncio.gather(
|
|
51
|
+
*[TaskSignature.interrupt_from_key(task_id) for task_id in self.tasks],
|
|
52
|
+
return_exceptions=True,
|
|
53
|
+
)
|
|
54
|
+
await super().change_status(SignatureStatus.INTERRUPTED)
|
|
55
|
+
|
|
56
|
+
async def resume(self):
|
|
57
|
+
await asyncio.gather(
|
|
58
|
+
*[TaskSignature.resume_from_key(task_key) for task_key in self.tasks],
|
|
59
|
+
return_exceptions=True,
|
|
60
|
+
)
|
|
61
|
+
await super().change_status(self.task_status.last_status)
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
|
|
3
|
+
from hatchet_sdk import Context
|
|
4
|
+
from hatchet_sdk.runnables.types import EmptyModel
|
|
5
|
+
|
|
6
|
+
from mageflow.chain.consts import CHAIN_TASK_ID_NAME
|
|
7
|
+
from mageflow.chain.messages import ChainSuccessTaskCommandMessage
|
|
8
|
+
from mageflow.chain.model import ChainTaskSignature
|
|
9
|
+
from mageflow.invokers.hatchet import HatchetInvoker
|
|
10
|
+
from mageflow.signature.consts import TASK_ID_PARAM_NAME
|
|
11
|
+
from mageflow.signature.model import TaskSignature
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
async def chain_end_task(msg: ChainSuccessTaskCommandMessage, ctx: Context) -> None:
|
|
15
|
+
try:
|
|
16
|
+
task_data = HatchetInvoker(msg, ctx).task_ctx
|
|
17
|
+
chain_task_id = task_data[CHAIN_TASK_ID_NAME]
|
|
18
|
+
current_task_id = task_data[TASK_ID_PARAM_NAME]
|
|
19
|
+
|
|
20
|
+
chain_task_signature, current_task = await asyncio.gather(
|
|
21
|
+
ChainTaskSignature.get_safe(chain_task_id),
|
|
22
|
+
TaskSignature.get_safe(current_task_id),
|
|
23
|
+
)
|
|
24
|
+
ctx.log(f"Chain task done {chain_task_signature.task_name}")
|
|
25
|
+
|
|
26
|
+
# Calling error callback from a chain task - This is done before deletion because a deletion error should not disturb the workflow
|
|
27
|
+
await chain_task_signature.activate_success(msg.chain_results)
|
|
28
|
+
ctx.log(f"Chain task success {chain_task_signature.task_name}")
|
|
29
|
+
|
|
30
|
+
# Remove tasks
|
|
31
|
+
await asyncio.gather(
|
|
32
|
+
chain_task_signature.remove(with_success=False), current_task.remove()
|
|
33
|
+
)
|
|
34
|
+
except Exception as e:
|
|
35
|
+
ctx.log(f"MAJOR - infrastructure error in chain end task: {e}")
|
|
36
|
+
raise
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
# This task needs to be added as a workflow
|
|
40
|
+
async def chain_error_task(msg: EmptyModel, ctx: Context) -> None:
|
|
41
|
+
try:
|
|
42
|
+
task_data = HatchetInvoker(msg, ctx).task_ctx
|
|
43
|
+
chain_task_id = task_data[CHAIN_TASK_ID_NAME]
|
|
44
|
+
current_task_id = task_data[TASK_ID_PARAM_NAME]
|
|
45
|
+
chain_packed_task, current_task = await asyncio.gather(
|
|
46
|
+
ChainTaskSignature.get_safe(chain_task_id),
|
|
47
|
+
TaskSignature.get_safe(current_task_id),
|
|
48
|
+
)
|
|
49
|
+
ctx.log(
|
|
50
|
+
f"Chain task failed {chain_packed_task.task_name} on task id - {current_task_id}"
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
# Calling error callback from chain task
|
|
54
|
+
await chain_packed_task.activate_error(msg)
|
|
55
|
+
ctx.log(f"Chain task error {chain_packed_task.task_name}")
|
|
56
|
+
|
|
57
|
+
# Remove tasks
|
|
58
|
+
await chain_packed_task.delete_chain_tasks()
|
|
59
|
+
await asyncio.gather(
|
|
60
|
+
chain_packed_task.remove(with_error=False), current_task.remove()
|
|
61
|
+
)
|
|
62
|
+
ctx.log(f"Clean redis from chain tasks {chain_packed_task.task_name}")
|
|
63
|
+
except Exception as e:
|
|
64
|
+
ctx.log(f"MAJOR - infrastructure error in chain error task: {e}")
|
|
65
|
+
raise
|