pyoco 0.1.0__tar.gz → 0.3.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. pyoco-0.3.0/PKG-INFO +146 -0
  2. pyoco-0.3.0/README.md +135 -0
  3. {pyoco-0.1.0 → pyoco-0.3.0}/pyproject.toml +4 -1
  4. {pyoco-0.1.0 → pyoco-0.3.0}/src/pyoco/cli/main.py +122 -16
  5. pyoco-0.3.0/src/pyoco/client.py +69 -0
  6. {pyoco-0.1.0 → pyoco-0.3.0}/src/pyoco/core/context.py +20 -4
  7. pyoco-0.3.0/src/pyoco/core/engine.py +352 -0
  8. {pyoco-0.1.0 → pyoco-0.3.0}/src/pyoco/core/models.py +41 -0
  9. {pyoco-0.1.0 → pyoco-0.3.0}/src/pyoco/discovery/loader.py +1 -2
  10. pyoco-0.3.0/src/pyoco/server/api.py +71 -0
  11. pyoco-0.3.0/src/pyoco/server/models.py +28 -0
  12. pyoco-0.3.0/src/pyoco/server/store.py +82 -0
  13. {pyoco-0.1.0 → pyoco-0.3.0}/src/pyoco/trace/backend.py +1 -1
  14. {pyoco-0.1.0 → pyoco-0.3.0}/src/pyoco/trace/console.py +12 -4
  15. pyoco-0.3.0/src/pyoco/worker/__init__.py +0 -0
  16. pyoco-0.3.0/src/pyoco/worker/client.py +43 -0
  17. pyoco-0.3.0/src/pyoco/worker/runner.py +171 -0
  18. pyoco-0.3.0/src/pyoco.egg-info/PKG-INFO +146 -0
  19. {pyoco-0.1.0 → pyoco-0.3.0}/src/pyoco.egg-info/SOURCES.txt +14 -1
  20. pyoco-0.3.0/src/pyoco.egg-info/requires.txt +4 -0
  21. pyoco-0.3.0/tests/test_cancellation.py +82 -0
  22. pyoco-0.3.0/tests/test_cli_cancellation.py +101 -0
  23. pyoco-0.3.0/tests/test_engine_state.py +68 -0
  24. pyoco-0.3.0/tests/test_integration_v030.py +134 -0
  25. pyoco-0.3.0/tests/test_state_models.py +43 -0
  26. pyoco-0.1.0/PKG-INFO +0 -7
  27. pyoco-0.1.0/src/pyoco/core/engine.py +0 -249
  28. pyoco-0.1.0/src/pyoco.egg-info/PKG-INFO +0 -7
  29. pyoco-0.1.0/src/pyoco.egg-info/requires.txt +0 -1
  30. {pyoco-0.1.0 → pyoco-0.3.0}/setup.cfg +0 -0
  31. {pyoco-0.1.0 → pyoco-0.3.0}/src/pyoco/__init__.py +0 -0
  32. {pyoco-0.1.0 → pyoco-0.3.0}/src/pyoco/cli/entry.py +0 -0
  33. {pyoco-0.1.0 → pyoco-0.3.0}/src/pyoco/core/base_task.py +0 -0
  34. {pyoco-0.1.0 → pyoco-0.3.0}/src/pyoco/dsl/__init__.py +0 -0
  35. {pyoco-0.1.0 → pyoco-0.3.0}/src/pyoco/dsl/syntax.py +0 -0
  36. {pyoco-0.1.0 → pyoco-0.3.0}/src/pyoco/schemas/config.py +0 -0
  37. /pyoco-0.1.0/README.md → /pyoco-0.3.0/src/pyoco/server/__init__.py +0 -0
  38. {pyoco-0.1.0 → pyoco-0.3.0}/src/pyoco.egg-info/dependency_links.txt +0 -0
  39. {pyoco-0.1.0 → pyoco-0.3.0}/src/pyoco.egg-info/top_level.txt +0 -0
  40. {pyoco-0.1.0 → pyoco-0.3.0}/tests/test_dsl.py +0 -0
  41. {pyoco-0.1.0 → pyoco-0.3.0}/tests/test_engine.py +0 -0
pyoco-0.3.0/PKG-INFO ADDED
@@ -0,0 +1,146 @@
1
+ Metadata-Version: 2.4
2
+ Name: pyoco
3
+ Version: 0.3.0
4
+ Summary: A workflow engine with sugar syntax
5
+ Requires-Python: >=3.10
6
+ Description-Content-Type: text/markdown
7
+ Requires-Dist: pyyaml>=6.0.3
8
+ Requires-Dist: fastapi>=0.100.0
9
+ Requires-Dist: uvicorn>=0.20.0
10
+ Requires-Dist: httpx>=0.24.0
11
+
12
+ # 🐇 Pyoco
13
+
14
+ **pyoco is a minimal, pure-Python DAG engine for defining and running simple task-based workflows.**
15
+
16
+ ## Overview
17
+
18
+ Pyoco is designed to be significantly smaller, lighter, and have fewer dependencies than full-scale workflow engines like Airflow. It is optimized for local development and single-machine execution.
19
+
20
+ You can define tasks and their dependencies entirely in Python code using decorators and a simple API. There is no need for complex configuration files or external databases.
21
+
22
+ It is ideal for small jobs, development environments, and personal projects where a full-stack workflow engine would be overkill.
23
+
24
+ ## ✨ Features
25
+
26
+ - **Pure Python**: No external services or heavy dependencies required.
27
+ - **Minimal DAG model**: Tasks and dependencies are defined directly in code.
28
+ - **Task-oriented**: Focus on "small workflows" that should be easy to read and maintain.
29
+ - **Friendly trace logs**: Runs can be traced step by step from the terminal with cute (or plain) logs.
30
+ - **Parallel Execution**: Automatically runs independent tasks in parallel.
31
+ - **Artifact Management**: Easily save and manage task outputs and files.
32
+ - **Observability**: Track execution with unique Run IDs and detailed state transitions.
33
+ - **Control**: Cancel running workflows gracefully with `Ctrl+C`.
34
+
35
+ ## 📦 Installation
36
+
37
+ ```bash
38
+ pip install pyoco
39
+ ```
40
+
41
+ ## 🚀 Usage
42
+
43
+ Here is a minimal example of a pure-Python workflow.
44
+
45
+ ```python
46
+ from pyoco import task
47
+ from pyoco.core.models import Flow
48
+ from pyoco.core.engine import Engine
49
+
50
+ @task
51
+ def fetch_data(ctx):
52
+ print("🐰 Fetching data...")
53
+ return {"id": 1, "value": "carrot"}
54
+
55
+ @task
56
+ def process_data(ctx, data):
57
+ print(f"🥕 Processing: {data['value']}")
58
+ return data['value'].upper()
59
+
60
+ @task
61
+ def save_result(ctx, result):
62
+ print(f"✨ Saved: {result}")
63
+
64
+ # Define the flow
65
+ flow = Flow(name="hello_pyoco")
66
+ flow >> fetch_data >> process_data >> save_result
67
+
68
+ # Wire inputs (explicitly for this example)
69
+ process_data.task.inputs = {"data": "$node.fetch_data.output"}
70
+ save_result.task.inputs = {"result": "$node.process_data.output"}
71
+
72
+ if __name__ == "__main__":
73
+ engine = Engine()
74
+ engine.run(flow)
75
+ ```
76
+
77
+ Run it:
78
+
79
+ ```bash
80
+ python examples/hello_pyoco.py
81
+ ```
82
+
83
+ Output:
84
+
85
+ ```
86
+ 🐇 pyoco > start flow=hello_pyoco
87
+ 🏃 start node=fetch_data
88
+ 🐰 Fetching data...
89
+ ✅ done node=fetch_data (0.30 ms)
90
+ 🏃 start node=process_data
91
+ 🥕 Processing: carrot
92
+ ✅ done node=process_data (0.23 ms)
93
+ 🏃 start node=save_result
94
+ ✨ Saved: CARROT
95
+ ✅ done node=save_result (0.30 ms)
96
+ 🥕 done flow=hello_pyoco
97
+ ```
98
+
99
+ See [examples/hello_pyoco.py](examples/hello_pyoco.py) for the full code.
100
+
101
+ ## 🏗️ Architecture
102
+
103
+ Pyoco is designed with a simple flow:
104
+
105
+ ```
106
+ +-----------+ +------------------+ +-----------------+
107
+ | User Code | ---> | pyoco.core.Flow | ---> | trace/logger |
108
+ | (Tasks) | | (Engine) | | (Console/File) |
109
+ +-----------+ +------------------+ +-----------------+
110
+ ```
111
+
112
+ 1. **User Code**: You define tasks and flows using Python decorators.
113
+ 2. **Core Engine**: The engine resolves dependencies and executes tasks (in parallel where possible).
114
+ 3. **Trace**: Execution events are sent to the trace backend for logging (cute or plain).
115
+
116
+ ## 🎭 Modes
117
+
118
+ Pyoco has two output modes:
119
+
120
+ - **Cute Mode** (Default): Uses emojis and friendly messages. Best for local development and learning.
121
+ - **Non-Cute Mode**: Plain text logs. Best for CI/CD and production monitoring.
122
+
123
+ You can switch modes using an environment variable:
124
+
125
+ ```bash
126
+ export PYOCO_CUTE=0 # Disable cute mode
127
+ ```
128
+
129
+ Or via CLI flag:
130
+
131
+ ```bash
132
+ pyoco run --non-cute ...
133
+ ```
134
+
135
+ ## 📚 Documentation
136
+
137
+ - [Tutorials](docs/tutorial/index.md)
138
+ - [Roadmap](docs/roadmap.md)
139
+
140
+ ## 💖 Contributing
141
+
142
+ We love contributions! Please feel free to submit a Pull Request.
143
+
144
+ ---
145
+
146
+ *Made with 🥕 by the Pyoco Team.*
pyoco-0.3.0/README.md ADDED
@@ -0,0 +1,135 @@
1
+ # 🐇 Pyoco
2
+
3
+ **pyoco is a minimal, pure-Python DAG engine for defining and running simple task-based workflows.**
4
+
5
+ ## Overview
6
+
7
+ Pyoco is designed to be significantly smaller, lighter, and have fewer dependencies than full-scale workflow engines like Airflow. It is optimized for local development and single-machine execution.
8
+
9
+ You can define tasks and their dependencies entirely in Python code using decorators and a simple API. There is no need for complex configuration files or external databases.
10
+
11
+ It is ideal for small jobs, development environments, and personal projects where a full-stack workflow engine would be overkill.
12
+
13
+ ## ✨ Features
14
+
15
+ - **Pure Python**: No external services or heavy dependencies required.
16
+ - **Minimal DAG model**: Tasks and dependencies are defined directly in code.
17
+ - **Task-oriented**: Focus on "small workflows" that should be easy to read and maintain.
18
+ - **Friendly trace logs**: Runs can be traced step by step from the terminal with cute (or plain) logs.
19
+ - **Parallel Execution**: Automatically runs independent tasks in parallel.
20
+ - **Artifact Management**: Easily save and manage task outputs and files.
21
+ - **Observability**: Track execution with unique Run IDs and detailed state transitions.
22
+ - **Control**: Cancel running workflows gracefully with `Ctrl+C`.
23
+
24
+ ## 📦 Installation
25
+
26
+ ```bash
27
+ pip install pyoco
28
+ ```
29
+
30
+ ## 🚀 Usage
31
+
32
+ Here is a minimal example of a pure-Python workflow.
33
+
34
+ ```python
35
+ from pyoco import task
36
+ from pyoco.core.models import Flow
37
+ from pyoco.core.engine import Engine
38
+
39
+ @task
40
+ def fetch_data(ctx):
41
+ print("🐰 Fetching data...")
42
+ return {"id": 1, "value": "carrot"}
43
+
44
+ @task
45
+ def process_data(ctx, data):
46
+ print(f"🥕 Processing: {data['value']}")
47
+ return data['value'].upper()
48
+
49
+ @task
50
+ def save_result(ctx, result):
51
+ print(f"✨ Saved: {result}")
52
+
53
+ # Define the flow
54
+ flow = Flow(name="hello_pyoco")
55
+ flow >> fetch_data >> process_data >> save_result
56
+
57
+ # Wire inputs (explicitly for this example)
58
+ process_data.task.inputs = {"data": "$node.fetch_data.output"}
59
+ save_result.task.inputs = {"result": "$node.process_data.output"}
60
+
61
+ if __name__ == "__main__":
62
+ engine = Engine()
63
+ engine.run(flow)
64
+ ```
65
+
66
+ Run it:
67
+
68
+ ```bash
69
+ python examples/hello_pyoco.py
70
+ ```
71
+
72
+ Output:
73
+
74
+ ```
75
+ 🐇 pyoco > start flow=hello_pyoco
76
+ 🏃 start node=fetch_data
77
+ 🐰 Fetching data...
78
+ ✅ done node=fetch_data (0.30 ms)
79
+ 🏃 start node=process_data
80
+ 🥕 Processing: carrot
81
+ ✅ done node=process_data (0.23 ms)
82
+ 🏃 start node=save_result
83
+ ✨ Saved: CARROT
84
+ ✅ done node=save_result (0.30 ms)
85
+ 🥕 done flow=hello_pyoco
86
+ ```
87
+
88
+ See [examples/hello_pyoco.py](examples/hello_pyoco.py) for the full code.
89
+
90
+ ## 🏗️ Architecture
91
+
92
+ Pyoco is designed with a simple flow:
93
+
94
+ ```
95
+ +-----------+ +------------------+ +-----------------+
96
+ | User Code | ---> | pyoco.core.Flow | ---> | trace/logger |
97
+ | (Tasks) | | (Engine) | | (Console/File) |
98
+ +-----------+ +------------------+ +-----------------+
99
+ ```
100
+
101
+ 1. **User Code**: You define tasks and flows using Python decorators.
102
+ 2. **Core Engine**: The engine resolves dependencies and executes tasks (in parallel where possible).
103
+ 3. **Trace**: Execution events are sent to the trace backend for logging (cute or plain).
104
+
105
+ ## 🎭 Modes
106
+
107
+ Pyoco has two output modes:
108
+
109
+ - **Cute Mode** (Default): Uses emojis and friendly messages. Best for local development and learning.
110
+ - **Non-Cute Mode**: Plain text logs. Best for CI/CD and production monitoring.
111
+
112
+ You can switch modes using an environment variable:
113
+
114
+ ```bash
115
+ export PYOCO_CUTE=0 # Disable cute mode
116
+ ```
117
+
118
+ Or via CLI flag:
119
+
120
+ ```bash
121
+ pyoco run --non-cute ...
122
+ ```
123
+
124
+ ## 📚 Documentation
125
+
126
+ - [Tutorials](docs/tutorial/index.md)
127
+ - [Roadmap](docs/roadmap.md)
128
+
129
+ ## 💖 Contributing
130
+
131
+ We love contributions! Please feel free to submit a Pull Request.
132
+
133
+ ---
134
+
135
+ *Made with 🥕 by the Pyoco Team.*
@@ -1,9 +1,12 @@
1
1
  [project]
2
2
  name = "pyoco"
3
- version = "0.1.0"
3
+ version = "0.3.0"
4
4
  description = "A workflow engine with sugar syntax"
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.10"
7
7
  dependencies = [
8
8
  "pyyaml>=6.0.3",
9
+ "fastapi>=0.100.0",
10
+ "uvicorn>=0.20.0",
11
+ "httpx>=0.24.0",
9
12
  ]
@@ -1,11 +1,13 @@
1
1
  import argparse
2
2
  import sys
3
3
  import os
4
+ import signal
4
5
  from ..schemas.config import PyocoConfig
5
6
  from ..discovery.loader import TaskLoader
6
7
  from ..core.models import Flow
7
8
  from ..core.engine import Engine
8
9
  from ..trace.console import ConsoleTraceBackend
10
+ from ..client import Client
9
11
 
10
12
  def main():
11
13
  parser = argparse.ArgumentParser(description="Pyoco Workflow Engine")
@@ -20,6 +22,7 @@ def main():
20
22
  run_parser.add_argument("--non-cute", action="store_false", dest="cute", help="Use plain trace style")
21
23
  # Allow overriding params via CLI
22
24
  run_parser.add_argument("--param", action="append", help="Override params (key=value)")
25
+ run_parser.add_argument("--server", help="Server URL for remote execution")
23
26
 
24
27
  # Check command
25
28
  check_parser = subparsers.add_parser("check", help="Verify a workflow")
@@ -30,35 +33,136 @@ def main():
30
33
  list_parser = subparsers.add_parser("list-tasks", help="List available tasks")
31
34
  list_parser.add_argument("--config", required=True, help="Path to flow.yaml")
32
35
 
36
+ # Server command
37
+ server_parser = subparsers.add_parser("server", help="Manage Kanban Server")
38
+ server_subparsers = server_parser.add_subparsers(dest="server_command")
39
+ server_start = server_subparsers.add_parser("start", help="Start the server")
40
+ server_start.add_argument("--host", default="0.0.0.0", help="Host to bind")
41
+ server_start.add_argument("--port", type=int, default=8000, help="Port to bind")
42
+
43
+ # Worker command
44
+ worker_parser = subparsers.add_parser("worker", help="Manage Worker")
45
+ worker_subparsers = worker_parser.add_subparsers(dest="worker_command")
46
+ worker_start = worker_subparsers.add_parser("start", help="Start a worker")
47
+ worker_start.add_argument("--server", required=True, help="Server URL")
48
+ worker_start.add_argument("--config", required=True, help="Path to flow.yaml")
49
+ worker_start.add_argument("--tags", help="Comma-separated tags")
50
+
51
+ # Runs command
52
+ runs_parser = subparsers.add_parser("runs", help="Manage runs")
53
+ runs_subparsers = runs_parser.add_subparsers(dest="runs_command")
54
+
55
+ runs_list = runs_subparsers.add_parser("list", help="List runs")
56
+ runs_list.add_argument("--server", default="http://localhost:8000", help="Server URL")
57
+ runs_list.add_argument("--status", help="Filter by status")
58
+
59
+ runs_show = runs_subparsers.add_parser("show", help="Show run details")
60
+ runs_show.add_argument("run_id", help="Run ID")
61
+ runs_show.add_argument("--server", default="http://localhost:8000", help="Server URL")
62
+
63
+ runs_cancel = runs_subparsers.add_parser("cancel", help="Cancel a run")
64
+ runs_cancel.add_argument("run_id", help="Run ID")
65
+ runs_cancel.add_argument("--server", default="http://localhost:8000", help="Server URL")
66
+
33
67
  args = parser.parse_args()
34
68
 
35
69
  if not args.command:
36
70
  parser.print_help()
37
71
  sys.exit(1)
38
72
 
39
- # Load config
40
- try:
41
- config = PyocoConfig.from_yaml(args.config)
42
- except Exception as e:
43
- print(f"Error loading config: {e}")
44
- sys.exit(1)
73
+ # Load config only if needed
74
+ config = None
75
+ if hasattr(args, 'config') and args.config:
76
+ try:
77
+ config = PyocoConfig.from_yaml(args.config)
78
+ except Exception as e:
79
+ print(f"Error loading config: {e}")
80
+ sys.exit(1)
45
81
 
46
- # Discover tasks
47
- loader = TaskLoader(config)
48
- loader.load()
82
+ # Discover tasks only if config is loaded
83
+ loader = None
84
+ if config:
85
+ loader = TaskLoader(config)
86
+ loader.load()
49
87
 
50
88
  if args.command == "list-tasks":
89
+ if not loader:
90
+ print("Error: Config not loaded.")
91
+ sys.exit(1)
51
92
  print("Available tasks:")
52
93
  for name in loader.tasks:
53
94
  print(f" - {name}")
54
95
  return
55
96
 
97
+ if args.command == "server":
98
+ if args.server_command == "start":
99
+ import uvicorn
100
+ print(f"🐇 Starting Kanban Server on {args.host}:{args.port}")
101
+ uvicorn.run("pyoco.server.api:app", host=args.host, port=args.port, log_level="info")
102
+ return
103
+
104
+ if args.command == "worker":
105
+ if args.worker_command == "start":
106
+ from ..worker.runner import Worker
107
+ tags = args.tags.split(",") if args.tags else []
108
+ worker = Worker(args.server, config, tags)
109
+ worker.start()
110
+ return
111
+
112
+ if args.command == "runs":
113
+ client = Client(args.server)
114
+ try:
115
+ if args.runs_command == "list":
116
+ runs = client.list_runs(status=args.status)
117
+ print(f"🐇 Active Runs ({len(runs)}):")
118
+ print(f"{'ID':<36} | {'Status':<12} | {'Flow':<15}")
119
+ print("-" * 70)
120
+ for r in runs:
121
+ # RunContext doesn't have flow_name in core model, but store adds it.
122
+ # We need to access it safely.
123
+ flow_name = r.get("flow_name", "???")
124
+ print(f"{r['run_id']:<36} | {r['status']:<12} | {flow_name:<15}")
125
+
126
+ elif args.runs_command == "show":
127
+ run = client.get_run(args.run_id)
128
+ print(f"🐇 Run: {run['run_id']}")
129
+ print(f"Status: {run['status']}")
130
+ print("Tasks:")
131
+ for t_name, t_state in run.get("tasks", {}).items():
132
+ print(f" [{t_state}] {t_name}")
133
+
134
+ elif args.runs_command == "cancel":
135
+ client.cancel_run(args.run_id)
136
+ print(f"🛑 Cancellation requested for run {args.run_id}")
137
+ except Exception as e:
138
+ print(f"Error: {e}")
139
+ return
140
+
56
141
  if args.command == "run":
57
142
  flow_conf = config.flows.get(args.flow)
58
143
  if not flow_conf:
59
144
  print(f"Flow '{args.flow}' not found in config.")
60
145
  sys.exit(1)
61
146
 
147
+ # Params
148
+ params = flow_conf.defaults.copy()
149
+ if args.param:
150
+ for p in args.param:
151
+ if "=" in p:
152
+ k, v = p.split("=", 1)
153
+ params[k] = v # Simple string parsing for now
154
+
155
+ if args.server:
156
+ # Remote execution
157
+ client = Client(args.server)
158
+ try:
159
+ run_id = client.submit_run(args.flow, params)
160
+ print(f"🚀 Flow submitted! Run ID: {run_id}")
161
+ print(f"📋 View status: pyoco runs show {run_id} --server {args.server}")
162
+ except Exception as e:
163
+ print(f"Error submitting flow: {e}")
164
+ sys.exit(1)
165
+ return
62
166
  # Build Flow from graph string
63
167
  from ..dsl.syntax import TaskWrapper
64
168
  eval_context = {name: TaskWrapper(task) for name, task in loader.tasks.items()}
@@ -76,13 +180,15 @@ def main():
76
180
  backend = ConsoleTraceBackend(style="cute" if args.cute else "plain")
77
181
  engine = Engine(trace_backend=backend)
78
182
 
79
- # Params
80
- params = flow_conf.defaults.copy()
81
- if args.param:
82
- for p in args.param:
83
- if "=" in p:
84
- k, v = p.split("=", 1)
85
- params[k] = v # Simple string parsing for now
183
+ # Params (Moved up)
184
+
185
+ # Signal handler for cancellation
186
+ def signal_handler(sig, frame):
187
+ print("\n🛑 Ctrl+C detected. Cancelling active runs...")
188
+ for rid in list(engine.active_runs.keys()):
189
+ engine.cancel(rid)
190
+
191
+ signal.signal(signal.SIGINT, signal_handler)
86
192
 
87
193
  engine.run(flow, params)
88
194
 
@@ -0,0 +1,69 @@
1
+ import httpx
2
+ from typing import Dict, List, Optional, Any
3
+ from .core.models import RunStatus, TaskState
4
+
5
+ class Client:
6
+ def __init__(self, server_url: str, client_id: str = "cli"):
7
+ self.server_url = server_url.rstrip("/")
8
+ self.client_id = client_id
9
+ self.client = httpx.Client(base_url=self.server_url)
10
+
11
+ def submit_run(self, flow_name: str, params: Dict[str, Any], tags: List[str] = []) -> str:
12
+ resp = self.client.post("/runs", json={
13
+ "flow_name": flow_name,
14
+ "params": params,
15
+ "tags": tags
16
+ })
17
+ resp.raise_for_status()
18
+ return resp.json()["run_id"]
19
+
20
+ def list_runs(self, status: Optional[str] = None) -> List[Dict]:
21
+ params = {}
22
+ if status:
23
+ params["status"] = status
24
+ resp = self.client.get("/runs", params=params)
25
+ resp.raise_for_status()
26
+ return resp.json()
27
+
28
+ def get_run(self, run_id: str) -> Dict:
29
+ resp = self.client.get(f"/runs/{run_id}")
30
+ resp.raise_for_status()
31
+ return resp.json()
32
+
33
+ def cancel_run(self, run_id: str):
34
+ resp = self.client.post(f"/runs/{run_id}/cancel")
35
+ resp.raise_for_status()
36
+
37
+ def poll(self, tags: List[str] = []) -> Optional[Dict[str, Any]]:
38
+ try:
39
+ resp = self.client.post("/workers/poll", json={
40
+ "worker_id": self.client_id,
41
+ "tags": tags
42
+ })
43
+ resp.raise_for_status()
44
+ data = resp.json()
45
+ if data.get("run_id"):
46
+ return data
47
+ return None
48
+ except Exception as e:
49
+ # print(f"Poll failed: {e}")
50
+ return None
51
+
52
+ def heartbeat(self, run_id: str, task_states: Dict[str, TaskState], run_status: RunStatus) -> bool:
53
+ """
54
+ Sends heartbeat. Returns True if cancellation is requested.
55
+ """
56
+ try:
57
+ # Convert Enums to values
58
+ states_json = {k: v.value if hasattr(v, 'value') else v for k, v in task_states.items()}
59
+ status_value = run_status.value if hasattr(run_status, 'value') else run_status
60
+
61
+ resp = self.client.post(f"/runs/{run_id}/heartbeat", json={
62
+ "task_states": states_json,
63
+ "run_status": status_value
64
+ })
65
+ resp.raise_for_status()
66
+ return resp.json().get("cancel_requested", False)
67
+ except Exception as e:
68
+ print(f"Heartbeat failed: {e}")
69
+ return False
@@ -1,21 +1,37 @@
1
1
  import threading
2
- from typing import Any, Dict, Optional
2
+ from typing import Any, Dict, List, Optional
3
3
  from dataclasses import dataclass, field
4
+ from .models import RunContext
4
5
 
5
6
  @dataclass
6
7
  class Context:
8
+ """
9
+ Execution context passed to tasks.
10
+ """
7
11
  params: Dict[str, Any] = field(default_factory=dict)
8
- env: Dict[str, str] = field(default_factory=dict)
9
12
  results: Dict[str, Any] = field(default_factory=dict)
10
13
  scratch: Dict[str, Any] = field(default_factory=dict)
11
14
  artifacts: Dict[str, Any] = field(default_factory=dict)
12
- run_id: Optional[str] = None
13
- artifact_dir: str = field(default="./artifacts")
15
+ env: Dict[str, str] = field(default_factory=dict)
16
+ artifact_dir: Optional[str] = None
17
+
18
+ # Reference to the parent run context (v0.2.0+)
19
+ run_context: Optional[RunContext] = None
14
20
 
15
21
  _lock: threading.Lock = field(default_factory=threading.Lock, repr=False)
16
22
 
23
+ @property
24
+ def is_cancelled(self) -> bool:
25
+ if self.run_context:
26
+ from .models import RunStatus
27
+ return self.run_context.status in [RunStatus.CANCELLING, RunStatus.CANCELLED]
28
+ return False
29
+
17
30
  def __post_init__(self):
18
31
  # Ensure artifact directory exists
32
+ if self.artifact_dir is None:
33
+ self.artifact_dir = "./artifacts"
34
+
19
35
  import pathlib
20
36
  pathlib.Path(self.artifact_dir).mkdir(parents=True, exist_ok=True)
21
37