loom-core 0.2.0__tar.gz → 1.0.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {loom_core-0.2.0 → loom_core-1.0.0}/PKG-INFO +5 -2
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/__init__.py +7 -0
- loom_core-1.0.0/loom/__main__.py +9 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/cli/cli.py +57 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/common/errors.py +1 -1
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/core/context.py +3 -2
- loom_core-1.0.0/loom/core/graph.py +360 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/database/db.py +9 -4
- loom_core-1.0.0/loom/schemas/graph.py +168 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom_core.egg-info/PKG-INFO +5 -2
- {loom_core-0.2.0 → loom_core-1.0.0}/loom_core.egg-info/SOURCES.txt +3 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom_core.egg-info/requires.txt +4 -1
- {loom_core-0.2.0 → loom_core-1.0.0}/pyproject.toml +7 -4
- {loom_core-0.2.0 → loom_core-1.0.0}/LICENSE +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/MANIFEST.in +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/QUICKSTART.md +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/README.md +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/cli/__init__.py +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/common/activity.py +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/common/config.py +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/common/workflow.py +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/core/__init__.py +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/core/compiled.py +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/core/engine.py +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/core/handle.py +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/core/logger.py +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/core/runner.py +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/core/state.py +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/core/worker.py +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/core/workflow.py +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/database/__init__.py +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/decorators/__init__.py +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/decorators/activity.py +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/decorators/workflow.py +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/lib/progress.py +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/lib/utils.py +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/migrations/down/001_setup_pragma.sql +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/migrations/down/002_create_workflows.sql +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/migrations/down/003.create_events.sql +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/migrations/down/004.create_tasks.sql +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/migrations/down/005.create_indexes.sql +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/migrations/down/006_auto_update_triggers.sql +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/migrations/down/007_create_logs.sql +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/migrations/up/001_setup_pragma.sql +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/migrations/up/002_create_workflows.sql +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/migrations/up/003_create_events.sql +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/migrations/up/004_create_tasks.sql +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/migrations/up/005_create_indexes.sql +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/migrations/up/006_auto_update_triggers.sql +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/migrations/up/007_create_logs.sql +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/schemas/__init__.py +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/schemas/activity.py +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/schemas/database.py +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/schemas/events.py +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/schemas/tasks.py +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom/schemas/workflow.py +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom_core.egg-info/dependency_links.txt +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom_core.egg-info/entry_points.txt +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/loom_core.egg-info/top_level.txt +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/setup.cfg +0 -0
- {loom_core-0.2.0 → loom_core-1.0.0}/setup.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: loom-core
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 1.0.0
|
|
4
4
|
Summary: Durable workflow orchestration engine for Python
|
|
5
5
|
Home-page: https://github.com/satadeep3927/loom
|
|
6
6
|
Author: Satadeep Dasgupta
|
|
@@ -23,7 +23,10 @@ Description-Content-Type: text/markdown
|
|
|
23
23
|
License-File: LICENSE
|
|
24
24
|
Requires-Dist: aiosqlite>=0.19.0
|
|
25
25
|
Requires-Dist: click>=8.0.0
|
|
26
|
-
Requires-Dist: rich>=
|
|
26
|
+
Requires-Dist: rich>=14.3.1
|
|
27
|
+
Requires-Dist: fastapi[standard]>=0.95.0
|
|
28
|
+
Requires-Dist: uvicorn[standard]>=0.22.0
|
|
29
|
+
Requires-Dist: pydantic>=2.0.0
|
|
27
30
|
Provides-Extra: dev
|
|
28
31
|
Requires-Dist: pytest>=7.0.0; extra == "dev"
|
|
29
32
|
Requires-Dist: pytest-asyncio>=0.21.0; extra == "dev"
|
|
@@ -25,6 +25,10 @@ from loom.database.db import Database
|
|
|
25
25
|
from loom.decorators.activity import activity
|
|
26
26
|
from loom.decorators.workflow import step, workflow
|
|
27
27
|
|
|
28
|
+
# Import web module and app for uvicorn compatibility
|
|
29
|
+
from loom import web
|
|
30
|
+
from loom.web.main import app
|
|
31
|
+
|
|
28
32
|
__version__ = "0.1.0"
|
|
29
33
|
|
|
30
34
|
__all__ = [
|
|
@@ -40,6 +44,9 @@ __all__ = [
|
|
|
40
44
|
# Functions
|
|
41
45
|
"start_worker",
|
|
42
46
|
"run_once",
|
|
47
|
+
# Web
|
|
48
|
+
"web",
|
|
49
|
+
"app",
|
|
43
50
|
# Version
|
|
44
51
|
"__version__",
|
|
45
52
|
]
|
|
@@ -70,6 +70,63 @@ def worker(workers: int, poll_interval: float):
|
|
|
70
70
|
pass
|
|
71
71
|
|
|
72
72
|
|
|
73
|
+
@cli.command()
|
|
74
|
+
@click.option(
|
|
75
|
+
"--host",
|
|
76
|
+
default="127.0.0.1",
|
|
77
|
+
help="Host to bind to",
|
|
78
|
+
show_default=True,
|
|
79
|
+
)
|
|
80
|
+
@click.option(
|
|
81
|
+
"--port",
|
|
82
|
+
default=8000,
|
|
83
|
+
type=int,
|
|
84
|
+
help="Port to bind to",
|
|
85
|
+
show_default=True,
|
|
86
|
+
)
|
|
87
|
+
@click.option(
|
|
88
|
+
"--reload",
|
|
89
|
+
is_flag=True,
|
|
90
|
+
help="Enable auto-reload for development",
|
|
91
|
+
)
|
|
92
|
+
def web(host: str, port: int, reload: bool):
|
|
93
|
+
"""Start the Loom web dashboard.
|
|
94
|
+
|
|
95
|
+
Launches a FastAPI-based web interface for monitoring and managing
|
|
96
|
+
workflows, tasks, events, and logs with real-time Server-Sent Events.
|
|
97
|
+
|
|
98
|
+
Examples:
|
|
99
|
+
loom web # Start on 127.0.0.1:8000
|
|
100
|
+
loom web --host 0.0.0.0 # Bind to all interfaces
|
|
101
|
+
loom web --port 3000 # Use port 3000
|
|
102
|
+
loom web --reload # Enable auto-reload for development
|
|
103
|
+
"""
|
|
104
|
+
try:
|
|
105
|
+
import uvicorn
|
|
106
|
+
|
|
107
|
+
console.print("[bold green]Starting Loom web dashboard...[/bold green]")
|
|
108
|
+
console.print(f"[blue]Dashboard: http://{host}:{port}[/blue]")
|
|
109
|
+
console.print(f"[blue]API docs: http://{host}:{port}/docs[/blue]")
|
|
110
|
+
console.print(f"[blue]ReDoc: http://{host}:{port}/redoc[/blue]")
|
|
111
|
+
|
|
112
|
+
uvicorn.run(
|
|
113
|
+
"loom.web.main:app",
|
|
114
|
+
host=host,
|
|
115
|
+
port=port,
|
|
116
|
+
reload=reload,
|
|
117
|
+
log_level="info",
|
|
118
|
+
access_log=not reload, # Reduce noise in dev mode
|
|
119
|
+
)
|
|
120
|
+
except ImportError:
|
|
121
|
+
console.print("[red]FastAPI and uvicorn are required for web dashboard[/red]")
|
|
122
|
+
console.print(
|
|
123
|
+
"[yellow]Install with: pip install 'loom[web]' or pip install fastapi uvicorn[standard][/yellow]"
|
|
124
|
+
)
|
|
125
|
+
sys.exit(1)
|
|
126
|
+
except KeyboardInterrupt:
|
|
127
|
+
console.print("\n[yellow]Web server stopped[/yellow]")
|
|
128
|
+
|
|
129
|
+
|
|
73
130
|
@cli.command()
|
|
74
131
|
def init():
|
|
75
132
|
"""Initialize the Loom database and migrations.
|
|
@@ -86,7 +86,7 @@ class WorkflowContext(Generic[InputT, StateT]):
|
|
|
86
86
|
|
|
87
87
|
def _skip_step_events(self) -> None:
|
|
88
88
|
"""Skip over STEP_START and STEP_END events during replay.
|
|
89
|
-
|
|
89
|
+
|
|
90
90
|
These are internal workflow management events that don't affect
|
|
91
91
|
the deterministic execution logic.
|
|
92
92
|
"""
|
|
@@ -117,6 +117,7 @@ class WorkflowContext(Generic[InputT, StateT]):
|
|
|
117
117
|
"""
|
|
118
118
|
# We're replaying if we haven't consumed all the original events yet
|
|
119
119
|
return self.cursor < self._original_history_length
|
|
120
|
+
|
|
120
121
|
def is_at_end_of_history(self) -> bool:
|
|
121
122
|
"""Check if we've consumed all events in history.
|
|
122
123
|
|
|
@@ -211,7 +212,7 @@ class WorkflowContext(Generic[InputT, StateT]):
|
|
|
211
212
|
|
|
212
213
|
# Skip any step events first
|
|
213
214
|
self._skip_step_events()
|
|
214
|
-
|
|
215
|
+
|
|
215
216
|
scheduled_event = self._match_event("TIMER_SCHEDULED")
|
|
216
217
|
|
|
217
218
|
if scheduled_event:
|
|
@@ -0,0 +1,360 @@
|
|
|
1
|
+
import ast
|
|
2
|
+
import inspect
|
|
3
|
+
from typing import Dict, List, Any, Optional
|
|
4
|
+
|
|
5
|
+
from ..schemas.graph import WorkflowDefinitionGraph, GraphNode, GraphEdge
|
|
6
|
+
from .workflow import Workflow
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class WorkflowAnalyzer:
|
|
10
|
+
"""Analyzes workflow definitions to extract structure and dependencies."""
|
|
11
|
+
|
|
12
|
+
@staticmethod
|
|
13
|
+
def analyze_workflow_definition(workflow_class: type[Workflow]) -> WorkflowDefinitionGraph:
|
|
14
|
+
"""Analyze workflow class to generate definition graph.
|
|
15
|
+
|
|
16
|
+
Args:
|
|
17
|
+
workflow_class: The workflow class to analyze
|
|
18
|
+
|
|
19
|
+
Returns:
|
|
20
|
+
WorkflowDefinitionGraph: Graph representation of the workflow structure
|
|
21
|
+
"""
|
|
22
|
+
graph = WorkflowDefinitionGraph(
|
|
23
|
+
nodes=[],
|
|
24
|
+
edges=[],
|
|
25
|
+
metadata={
|
|
26
|
+
"workflow_name": getattr(workflow_class, "_workflow_name", workflow_class.__name__),
|
|
27
|
+
"workflow_version": getattr(workflow_class, "_workflow_version", "1.0.0"),
|
|
28
|
+
"workflow_description": getattr(workflow_class, "_workflow_description", ""),
|
|
29
|
+
}
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
# Get compiled workflow to extract step information
|
|
33
|
+
try:
|
|
34
|
+
workflow_instance = workflow_class()
|
|
35
|
+
compiled = workflow_instance._compile_instance()
|
|
36
|
+
except Exception as e:
|
|
37
|
+
raise ValueError(f"Failed to compile workflow {workflow_class.__name__}: {e}")
|
|
38
|
+
|
|
39
|
+
previous_step_id = None
|
|
40
|
+
|
|
41
|
+
# Analyze each step
|
|
42
|
+
for step_info in compiled.steps:
|
|
43
|
+
step_id = f"step_{step_info['name']}"
|
|
44
|
+
|
|
45
|
+
# Add step node
|
|
46
|
+
step_node = GraphNode(
|
|
47
|
+
id=step_id,
|
|
48
|
+
type="step",
|
|
49
|
+
label=step_info["name"],
|
|
50
|
+
metadata={
|
|
51
|
+
"description": step_info["description"],
|
|
52
|
+
"function": step_info["fn"]
|
|
53
|
+
}
|
|
54
|
+
)
|
|
55
|
+
graph.nodes.append(step_node)
|
|
56
|
+
|
|
57
|
+
# Add sequence edge from previous step
|
|
58
|
+
if previous_step_id:
|
|
59
|
+
sequence_edge = GraphEdge(**{
|
|
60
|
+
"from": previous_step_id,
|
|
61
|
+
"to": step_id,
|
|
62
|
+
"type": "sequence",
|
|
63
|
+
"label": "then"
|
|
64
|
+
})
|
|
65
|
+
graph.edges.append(sequence_edge)
|
|
66
|
+
|
|
67
|
+
# Analyze step method for dependencies
|
|
68
|
+
step_method = getattr(workflow_instance, step_info["fn"])
|
|
69
|
+
dependencies = WorkflowAnalyzer._analyze_step_dependencies(step_method)
|
|
70
|
+
|
|
71
|
+
# Add activity nodes and edges
|
|
72
|
+
for activity_name in dependencies.get("activities", []):
|
|
73
|
+
activity_id = f"activity_{activity_name}_{step_info['name']}"
|
|
74
|
+
activity_node = GraphNode(
|
|
75
|
+
id=activity_id,
|
|
76
|
+
type="activity",
|
|
77
|
+
label=activity_name,
|
|
78
|
+
metadata={"called_from_step": step_info["name"]}
|
|
79
|
+
)
|
|
80
|
+
graph.nodes.append(activity_node)
|
|
81
|
+
|
|
82
|
+
activity_edge = GraphEdge(**{
|
|
83
|
+
"from": step_id,
|
|
84
|
+
"to": activity_id,
|
|
85
|
+
"type": "calls",
|
|
86
|
+
"label": "executes"
|
|
87
|
+
})
|
|
88
|
+
graph.edges.append(activity_edge)
|
|
89
|
+
|
|
90
|
+
# Add timer nodes
|
|
91
|
+
for i, timer_info in enumerate(dependencies.get("timers", [])):
|
|
92
|
+
timer_id = f"timer_{step_info['name']}_{i}"
|
|
93
|
+
timer_node = GraphNode(
|
|
94
|
+
id=timer_id,
|
|
95
|
+
type="timer",
|
|
96
|
+
label=f"Sleep {timer_info}",
|
|
97
|
+
metadata={"step": step_info["name"]}
|
|
98
|
+
)
|
|
99
|
+
graph.nodes.append(timer_node)
|
|
100
|
+
|
|
101
|
+
timer_edge = GraphEdge(**{
|
|
102
|
+
"from": step_id,
|
|
103
|
+
"to": timer_id,
|
|
104
|
+
"type": "waits",
|
|
105
|
+
"label": "pauses for"
|
|
106
|
+
})
|
|
107
|
+
graph.edges.append(timer_edge)
|
|
108
|
+
|
|
109
|
+
# Add state dependency edges
|
|
110
|
+
for state_key in dependencies.get("state_reads", []):
|
|
111
|
+
state_id = f"state_{state_key}"
|
|
112
|
+
|
|
113
|
+
# Add state node if not exists
|
|
114
|
+
if not any(n.id == state_id for n in graph.nodes):
|
|
115
|
+
state_node = GraphNode(
|
|
116
|
+
id=state_id,
|
|
117
|
+
type="state",
|
|
118
|
+
label=f"state.{state_key}",
|
|
119
|
+
metadata={"key": state_key}
|
|
120
|
+
)
|
|
121
|
+
graph.nodes.append(state_node)
|
|
122
|
+
|
|
123
|
+
read_edge = GraphEdge(**{
|
|
124
|
+
"from": state_id,
|
|
125
|
+
"to": step_id,
|
|
126
|
+
"type": "reads",
|
|
127
|
+
"label": "reads"
|
|
128
|
+
})
|
|
129
|
+
graph.edges.append(read_edge)
|
|
130
|
+
|
|
131
|
+
for state_key in dependencies.get("state_writes", []):
|
|
132
|
+
state_id = f"state_{state_key}"
|
|
133
|
+
|
|
134
|
+
# Add state node if not exists
|
|
135
|
+
if not any(n.id == state_id for n in graph.nodes):
|
|
136
|
+
state_node = GraphNode(
|
|
137
|
+
id=state_id,
|
|
138
|
+
type="state",
|
|
139
|
+
label=f"state.{state_key}",
|
|
140
|
+
metadata={"key": state_key}
|
|
141
|
+
)
|
|
142
|
+
graph.nodes.append(state_node)
|
|
143
|
+
|
|
144
|
+
write_edge = GraphEdge(**{
|
|
145
|
+
"from": step_id,
|
|
146
|
+
"to": state_id,
|
|
147
|
+
"type": "writes",
|
|
148
|
+
"label": "updates"
|
|
149
|
+
})
|
|
150
|
+
graph.edges.append(write_edge)
|
|
151
|
+
|
|
152
|
+
previous_step_id = step_id
|
|
153
|
+
|
|
154
|
+
return graph
|
|
155
|
+
|
|
156
|
+
@staticmethod
|
|
157
|
+
def _analyze_step_dependencies(method) -> Dict[str, List[str]]:
|
|
158
|
+
"""Analyze step method source code to find dependencies.
|
|
159
|
+
|
|
160
|
+
Args:
|
|
161
|
+
method: The step method to analyze
|
|
162
|
+
|
|
163
|
+
Returns:
|
|
164
|
+
Dict containing lists of activities, timers, state reads/writes
|
|
165
|
+
"""
|
|
166
|
+
dependencies = {
|
|
167
|
+
"activities": [],
|
|
168
|
+
"timers": [],
|
|
169
|
+
"state_reads": [],
|
|
170
|
+
"state_writes": []
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
try:
|
|
174
|
+
# Get source code and parse AST
|
|
175
|
+
source = inspect.getsource(method)
|
|
176
|
+
|
|
177
|
+
# Remove common indentation to make it parseable
|
|
178
|
+
import textwrap
|
|
179
|
+
source = textwrap.dedent(source)
|
|
180
|
+
|
|
181
|
+
# Remove decorators - find the first 'async def' or 'def' line
|
|
182
|
+
lines = source.split('\n')
|
|
183
|
+
def_line_idx = None
|
|
184
|
+
for i, line in enumerate(lines):
|
|
185
|
+
if 'def ' in line and ('async def' in line or line.strip().startswith('def')):
|
|
186
|
+
def_line_idx = i
|
|
187
|
+
break
|
|
188
|
+
|
|
189
|
+
if def_line_idx is not None:
|
|
190
|
+
# Keep only the function definition and body
|
|
191
|
+
source = '\n'.join(lines[def_line_idx:])
|
|
192
|
+
|
|
193
|
+
tree = ast.parse(source)
|
|
194
|
+
|
|
195
|
+
class DependencyVisitor(ast.NodeVisitor):
|
|
196
|
+
def visit_Call(self, node):
|
|
197
|
+
# Only handle non-awaited calls here
|
|
198
|
+
# Look for ctx.state.get() calls (non-awaited)
|
|
199
|
+
if (isinstance(node.func, ast.Attribute) and
|
|
200
|
+
isinstance(node.func.value, ast.Attribute) and
|
|
201
|
+
isinstance(node.func.value.value, ast.Name) and
|
|
202
|
+
node.func.value.value.id == "ctx" and
|
|
203
|
+
node.func.value.attr == "state" and
|
|
204
|
+
node.func.attr == "get"):
|
|
205
|
+
|
|
206
|
+
# Extract state key from first argument
|
|
207
|
+
if (node.args and isinstance(node.args[0], ast.Constant)):
|
|
208
|
+
state_key = node.args[0].value
|
|
209
|
+
dependencies["state_reads"].append(state_key)
|
|
210
|
+
|
|
211
|
+
self.generic_visit(node)
|
|
212
|
+
|
|
213
|
+
def visit_Await(self, node):
|
|
214
|
+
# Handle await ctx.activity(), await ctx.sleep(), await ctx.state.set()
|
|
215
|
+
if isinstance(node.value, ast.Call):
|
|
216
|
+
call_node = node.value
|
|
217
|
+
|
|
218
|
+
# Check for await ctx.activity()
|
|
219
|
+
if (isinstance(call_node.func, ast.Attribute) and
|
|
220
|
+
isinstance(call_node.func.value, ast.Name) and
|
|
221
|
+
call_node.func.value.id == "ctx" and
|
|
222
|
+
call_node.func.attr == "activity"):
|
|
223
|
+
|
|
224
|
+
if call_node.args and isinstance(call_node.args[0], ast.Name):
|
|
225
|
+
activity_name = call_node.args[0].id
|
|
226
|
+
dependencies["activities"].append(activity_name)
|
|
227
|
+
|
|
228
|
+
# Check for await ctx.sleep()
|
|
229
|
+
elif (isinstance(call_node.func, ast.Attribute) and
|
|
230
|
+
isinstance(call_node.func.value, ast.Name) and
|
|
231
|
+
call_node.func.value.id == "ctx" and
|
|
232
|
+
call_node.func.attr == "sleep"):
|
|
233
|
+
dependencies["timers"].append("sleep")
|
|
234
|
+
|
|
235
|
+
# Check for await ctx.state.set()
|
|
236
|
+
elif (isinstance(call_node.func, ast.Attribute) and
|
|
237
|
+
isinstance(call_node.func.value, ast.Attribute) and
|
|
238
|
+
isinstance(call_node.func.value.value, ast.Name) and
|
|
239
|
+
call_node.func.value.value.id == "ctx" and
|
|
240
|
+
call_node.func.value.attr == "state" and
|
|
241
|
+
call_node.func.attr == "set"):
|
|
242
|
+
|
|
243
|
+
if (call_node.args and isinstance(call_node.args[0], ast.Constant)):
|
|
244
|
+
state_key = call_node.args[0].value
|
|
245
|
+
dependencies["state_writes"].append(state_key)
|
|
246
|
+
|
|
247
|
+
# Check for await ctx.state.update()
|
|
248
|
+
elif (isinstance(call_node.func, ast.Attribute) and
|
|
249
|
+
isinstance(call_node.func.value, ast.Attribute) and
|
|
250
|
+
isinstance(call_node.func.value.value, ast.Name) and
|
|
251
|
+
call_node.func.value.value.id == "ctx" and
|
|
252
|
+
call_node.func.value.attr == "state" and
|
|
253
|
+
call_node.func.attr == "update"):
|
|
254
|
+
dependencies["state_writes"].append("bulk_update")
|
|
255
|
+
|
|
256
|
+
self.generic_visit(node)
|
|
257
|
+
|
|
258
|
+
def visit_Attribute(self, node):
|
|
259
|
+
# Look for ctx.state.get('key') reads (non-await calls)
|
|
260
|
+
if (isinstance(node.value, ast.Attribute) and
|
|
261
|
+
isinstance(node.value.value, ast.Name) and
|
|
262
|
+
node.value.value.id == "ctx" and
|
|
263
|
+
node.value.attr == "state" and
|
|
264
|
+
node.attr == "get"):
|
|
265
|
+
|
|
266
|
+
# This is a ctx.state.get access - we need to find the parent call
|
|
267
|
+
# For now, we'll skip this complex case
|
|
268
|
+
pass
|
|
269
|
+
|
|
270
|
+
self.generic_visit(node)
|
|
271
|
+
|
|
272
|
+
visitor = DependencyVisitor()
|
|
273
|
+
visitor.visit(tree)
|
|
274
|
+
|
|
275
|
+
except Exception as e:
|
|
276
|
+
# If source analysis fails, return empty dependencies
|
|
277
|
+
print(f"Warning: Could not analyze step method: {e}")
|
|
278
|
+
|
|
279
|
+
return dependencies
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
def generate_mermaid_graph(graph: WorkflowDefinitionGraph) -> str:
|
|
283
|
+
"""Generate Mermaid diagram from definition graph.
|
|
284
|
+
|
|
285
|
+
Args:
|
|
286
|
+
graph: The workflow definition graph
|
|
287
|
+
|
|
288
|
+
Returns:
|
|
289
|
+
String containing Mermaid diagram syntax
|
|
290
|
+
"""
|
|
291
|
+
lines = ["graph TD"]
|
|
292
|
+
|
|
293
|
+
# Add nodes with appropriate shapes
|
|
294
|
+
for node in graph.nodes:
|
|
295
|
+
if node.type == "step":
|
|
296
|
+
lines.append(f' {node.id}["{node.label}"]')
|
|
297
|
+
elif node.type == "activity":
|
|
298
|
+
lines.append(f' {node.id}("{node.label}")')
|
|
299
|
+
elif node.type == "timer":
|
|
300
|
+
lines.append(f' {node.id}[["{node.label}"]]')
|
|
301
|
+
elif node.type == "state":
|
|
302
|
+
lines.append(f' {node.id}{{{node.label}}}')
|
|
303
|
+
|
|
304
|
+
# Add edges with appropriate styles
|
|
305
|
+
for edge in graph.edges:
|
|
306
|
+
if edge.type == "sequence":
|
|
307
|
+
lines.append(f' {edge.from_node} --> {edge.to_node}')
|
|
308
|
+
elif edge.type == "calls":
|
|
309
|
+
lines.append(f' {edge.from_node} --> {edge.to_node}')
|
|
310
|
+
elif edge.type == "reads":
|
|
311
|
+
lines.append(f' {edge.from_node} -.-> {edge.to_node}')
|
|
312
|
+
elif edge.type == "writes":
|
|
313
|
+
lines.append(f' {edge.from_node} --> {edge.to_node}')
|
|
314
|
+
elif edge.type == "waits":
|
|
315
|
+
lines.append(f' {edge.from_node} -.-> {edge.to_node}')
|
|
316
|
+
|
|
317
|
+
return "\n".join(lines)
|
|
318
|
+
|
|
319
|
+
|
|
320
|
+
def generate_graphviz_dot(graph: WorkflowDefinitionGraph) -> str:
|
|
321
|
+
"""Generate GraphViz DOT format from definition graph.
|
|
322
|
+
|
|
323
|
+
Args:
|
|
324
|
+
graph: The workflow definition graph
|
|
325
|
+
|
|
326
|
+
Returns:
|
|
327
|
+
String containing DOT format graph
|
|
328
|
+
"""
|
|
329
|
+
lines = [
|
|
330
|
+
"digraph workflow {",
|
|
331
|
+
" rankdir=TD;",
|
|
332
|
+
" node [fontname=\"Arial\"]"
|
|
333
|
+
]
|
|
334
|
+
|
|
335
|
+
# Add nodes with shapes and colors
|
|
336
|
+
for node in graph.nodes:
|
|
337
|
+
if node.type == "step":
|
|
338
|
+
lines.append(f' {node.id} [label="{node.label}" shape=box style=filled fillcolor=lightblue];')
|
|
339
|
+
elif node.type == "activity":
|
|
340
|
+
lines.append(f' {node.id} [label="{node.label}" shape=ellipse style=filled fillcolor=lightgreen];')
|
|
341
|
+
elif node.type == "timer":
|
|
342
|
+
lines.append(f' {node.id} [label="{node.label}" shape=diamond style=filled fillcolor=lightyellow];')
|
|
343
|
+
elif node.type == "state":
|
|
344
|
+
lines.append(f' {node.id} [label="{node.label}" shape=hexagon style=filled fillcolor=lightcoral];')
|
|
345
|
+
|
|
346
|
+
# Add edges with styles
|
|
347
|
+
for edge in graph.edges:
|
|
348
|
+
if edge.type == "sequence":
|
|
349
|
+
lines.append(f' {edge.from_node} -> {edge.to_node} [style=solid];')
|
|
350
|
+
elif edge.type == "calls":
|
|
351
|
+
lines.append(f' {edge.from_node} -> {edge.to_node} [style=solid];')
|
|
352
|
+
elif edge.type == "reads":
|
|
353
|
+
lines.append(f' {edge.from_node} -> {edge.to_node} [style=dashed];')
|
|
354
|
+
elif edge.type == "writes":
|
|
355
|
+
lines.append(f' {edge.from_node} -> {edge.to_node} [style=solid];')
|
|
356
|
+
elif edge.type == "waits":
|
|
357
|
+
lines.append(f' {edge.from_node} -> {edge.to_node} [style=dotted];')
|
|
358
|
+
|
|
359
|
+
lines.append("}")
|
|
360
|
+
return "\n".join(lines)
|
|
@@ -346,7 +346,11 @@ class Database(Generic[InputT, StateT]):
|
|
|
346
346
|
)
|
|
347
347
|
|
|
348
348
|
async def workflow_failed(
|
|
349
|
-
self,
|
|
349
|
+
self,
|
|
350
|
+
workflow_id: str,
|
|
351
|
+
error: str,
|
|
352
|
+
task_id: str | None = None,
|
|
353
|
+
task_kind: str | None = None,
|
|
350
354
|
) -> None:
|
|
351
355
|
"""Mark a workflow as failed due to an unhandled exception.
|
|
352
356
|
|
|
@@ -436,9 +440,10 @@ class Database(Generic[InputT, StateT]):
|
|
|
436
440
|
INSERT INTO events (workflow_id, type, payload)
|
|
437
441
|
VALUES (?, 'WORKFLOW_COMPLETED', ?)
|
|
438
442
|
""",
|
|
439
|
-
(
|
|
440
|
-
|
|
441
|
-
|
|
443
|
+
(
|
|
444
|
+
workflow_id,
|
|
445
|
+
json.dumps({"completed_at": datetime.now(timezone.utc).isoformat()}),
|
|
446
|
+
),
|
|
442
447
|
)
|
|
443
448
|
|
|
444
449
|
# Update workflow status
|
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
from typing import Dict, List, Any
|
|
2
|
+
from pydantic import BaseModel, Field
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class GraphNode(BaseModel):
|
|
6
|
+
"""Represents a node in the workflow definition graph."""
|
|
7
|
+
|
|
8
|
+
id: str = Field(
|
|
9
|
+
...,
|
|
10
|
+
description="Unique identifier for the node",
|
|
11
|
+
examples=["step_process_payment", "activity_send_email", "state_user_id"]
|
|
12
|
+
)
|
|
13
|
+
type: str = Field(
|
|
14
|
+
...,
|
|
15
|
+
description="Type of the node",
|
|
16
|
+
examples=["step", "activity", "timer", "state"]
|
|
17
|
+
)
|
|
18
|
+
label: str = Field(
|
|
19
|
+
...,
|
|
20
|
+
description="Display label for the node",
|
|
21
|
+
examples=["Process Payment", "send_email", "Sleep 5s", "state.user_id"]
|
|
22
|
+
)
|
|
23
|
+
metadata: Dict[str, Any] = Field(
|
|
24
|
+
default_factory=dict,
|
|
25
|
+
description="Additional metadata about the node",
|
|
26
|
+
examples=[
|
|
27
|
+
{"description": "Processes user payment", "function": "process_payment_step"},
|
|
28
|
+
{"retry_count": 3, "timeout_seconds": 30},
|
|
29
|
+
{"key": "user_id", "type": "string"}
|
|
30
|
+
]
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class GraphEdge(BaseModel):
|
|
35
|
+
"""Represents an edge (connection) in the workflow definition graph."""
|
|
36
|
+
|
|
37
|
+
from_node: str = Field(
|
|
38
|
+
...,
|
|
39
|
+
alias="from",
|
|
40
|
+
description="Source node ID",
|
|
41
|
+
examples=["step_validate_input", "state_user_data"]
|
|
42
|
+
)
|
|
43
|
+
to_node: str = Field(
|
|
44
|
+
...,
|
|
45
|
+
alias="to",
|
|
46
|
+
description="Target node ID",
|
|
47
|
+
examples=["step_process_payment", "activity_send_notification"]
|
|
48
|
+
)
|
|
49
|
+
type: str = Field(
|
|
50
|
+
...,
|
|
51
|
+
description="Type of relationship",
|
|
52
|
+
examples=["sequence", "calls", "reads", "writes", "waits"]
|
|
53
|
+
)
|
|
54
|
+
label: str = Field(
|
|
55
|
+
default="",
|
|
56
|
+
description="Display label for the edge",
|
|
57
|
+
examples=["then", "executes", "reads", "updates", "pauses for"]
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class WorkflowDefinitionGraph(BaseModel):
|
|
62
|
+
"""Complete workflow definition graph structure."""
|
|
63
|
+
|
|
64
|
+
nodes: List[GraphNode] = Field(
|
|
65
|
+
...,
|
|
66
|
+
description="List of nodes in the graph"
|
|
67
|
+
)
|
|
68
|
+
edges: List[GraphEdge] = Field(
|
|
69
|
+
...,
|
|
70
|
+
description="List of edges connecting the nodes"
|
|
71
|
+
)
|
|
72
|
+
metadata: Dict[str, Any] = Field(
|
|
73
|
+
default_factory=dict,
|
|
74
|
+
description="Workflow-level metadata",
|
|
75
|
+
examples=[
|
|
76
|
+
{
|
|
77
|
+
"workflow_name": "OrderProcessingWorkflow",
|
|
78
|
+
"workflow_version": "1.2.0",
|
|
79
|
+
"workflow_description": "Processes customer orders with payment and shipping"
|
|
80
|
+
}
|
|
81
|
+
]
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
class Config:
|
|
85
|
+
json_encoders = {
|
|
86
|
+
# Add any custom encoders if needed
|
|
87
|
+
}
|
|
88
|
+
json_schema_extra = {
|
|
89
|
+
"examples": [
|
|
90
|
+
{
|
|
91
|
+
"nodes": [
|
|
92
|
+
{
|
|
93
|
+
"id": "step_validate_order",
|
|
94
|
+
"type": "step",
|
|
95
|
+
"label": "Validate Order",
|
|
96
|
+
"metadata": {
|
|
97
|
+
"description": "Validates order data and inventory",
|
|
98
|
+
"function": "validate_order_step"
|
|
99
|
+
}
|
|
100
|
+
},
|
|
101
|
+
{
|
|
102
|
+
"id": "activity_check_inventory",
|
|
103
|
+
"type": "activity",
|
|
104
|
+
"label": "check_inventory",
|
|
105
|
+
"metadata": {
|
|
106
|
+
"called_from_step": "validate_order",
|
|
107
|
+
"retry_count": 3
|
|
108
|
+
}
|
|
109
|
+
},
|
|
110
|
+
{
|
|
111
|
+
"id": "state_order_valid",
|
|
112
|
+
"type": "state",
|
|
113
|
+
"label": "state.order_valid",
|
|
114
|
+
"metadata": {
|
|
115
|
+
"key": "order_valid"
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
],
|
|
119
|
+
"edges": [
|
|
120
|
+
{
|
|
121
|
+
"from": "step_validate_order",
|
|
122
|
+
"to": "activity_check_inventory",
|
|
123
|
+
"type": "calls",
|
|
124
|
+
"label": "executes"
|
|
125
|
+
},
|
|
126
|
+
{
|
|
127
|
+
"from": "step_validate_order",
|
|
128
|
+
"to": "state_order_valid",
|
|
129
|
+
"type": "writes",
|
|
130
|
+
"label": "updates"
|
|
131
|
+
}
|
|
132
|
+
],
|
|
133
|
+
"metadata": {
|
|
134
|
+
"workflow_name": "OrderProcessingWorkflow",
|
|
135
|
+
"workflow_version": "1.0.0",
|
|
136
|
+
"workflow_description": "Handles order processing with validation and payment"
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
]
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
class GraphFormat(BaseModel):
|
|
144
|
+
"""Supported graph output formats."""
|
|
145
|
+
|
|
146
|
+
format: str = Field(
|
|
147
|
+
...,
|
|
148
|
+
description="Output format for the graph",
|
|
149
|
+
examples=["mermaid", "dot", "json"]
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
class GraphResponse(BaseModel):
|
|
154
|
+
"""Response containing the generated graph."""
|
|
155
|
+
|
|
156
|
+
format: str = Field(
|
|
157
|
+
...,
|
|
158
|
+
description="Format of the generated graph",
|
|
159
|
+
examples=["mermaid", "dot", "json"]
|
|
160
|
+
)
|
|
161
|
+
content: str = Field(
|
|
162
|
+
...,
|
|
163
|
+
description="Generated graph content in the specified format"
|
|
164
|
+
)
|
|
165
|
+
metadata: Dict[str, Any] = Field(
|
|
166
|
+
default_factory=dict,
|
|
167
|
+
description="Additional metadata about the graph generation"
|
|
168
|
+
)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: loom-core
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 1.0.0
|
|
4
4
|
Summary: Durable workflow orchestration engine for Python
|
|
5
5
|
Home-page: https://github.com/satadeep3927/loom
|
|
6
6
|
Author: Satadeep Dasgupta
|
|
@@ -23,7 +23,10 @@ Description-Content-Type: text/markdown
|
|
|
23
23
|
License-File: LICENSE
|
|
24
24
|
Requires-Dist: aiosqlite>=0.19.0
|
|
25
25
|
Requires-Dist: click>=8.0.0
|
|
26
|
-
Requires-Dist: rich>=
|
|
26
|
+
Requires-Dist: rich>=14.3.1
|
|
27
|
+
Requires-Dist: fastapi[standard]>=0.95.0
|
|
28
|
+
Requires-Dist: uvicorn[standard]>=0.22.0
|
|
29
|
+
Requires-Dist: pydantic>=2.0.0
|
|
27
30
|
Provides-Extra: dev
|
|
28
31
|
Requires-Dist: pytest>=7.0.0; extra == "dev"
|
|
29
32
|
Requires-Dist: pytest-asyncio>=0.21.0; extra == "dev"
|
|
@@ -5,6 +5,7 @@ README.md
|
|
|
5
5
|
pyproject.toml
|
|
6
6
|
setup.py
|
|
7
7
|
loom/__init__.py
|
|
8
|
+
loom/__main__.py
|
|
8
9
|
loom/cli/__init__.py
|
|
9
10
|
loom/cli/cli.py
|
|
10
11
|
loom/common/activity.py
|
|
@@ -15,6 +16,7 @@ loom/core/__init__.py
|
|
|
15
16
|
loom/core/compiled.py
|
|
16
17
|
loom/core/context.py
|
|
17
18
|
loom/core/engine.py
|
|
19
|
+
loom/core/graph.py
|
|
18
20
|
loom/core/handle.py
|
|
19
21
|
loom/core/logger.py
|
|
20
22
|
loom/core/runner.py
|
|
@@ -46,6 +48,7 @@ loom/schemas/__init__.py
|
|
|
46
48
|
loom/schemas/activity.py
|
|
47
49
|
loom/schemas/database.py
|
|
48
50
|
loom/schemas/events.py
|
|
51
|
+
loom/schemas/graph.py
|
|
49
52
|
loom/schemas/tasks.py
|
|
50
53
|
loom/schemas/workflow.py
|
|
51
54
|
loom_core.egg-info/PKG-INFO
|
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "loom-core"
|
|
7
|
-
version = "0.
|
|
7
|
+
version = "1.0.0"
|
|
8
8
|
description = "Durable workflow orchestration engine for Python"
|
|
9
9
|
readme = "README.md"
|
|
10
10
|
requires-python = ">=3.12"
|
|
@@ -26,7 +26,10 @@ classifiers = [
|
|
|
26
26
|
dependencies = [
|
|
27
27
|
"aiosqlite>=0.19.0",
|
|
28
28
|
"click>=8.0.0",
|
|
29
|
-
"rich>=
|
|
29
|
+
"rich>=14.3.1",
|
|
30
|
+
"fastapi[standard]>=0.95.0",
|
|
31
|
+
"uvicorn[standard]>=0.22.0",
|
|
32
|
+
"pydantic>=2.0.0",
|
|
30
33
|
]
|
|
31
34
|
|
|
32
35
|
[project.optional-dependencies]
|
|
@@ -83,6 +86,6 @@ addopts = "-v --cov=loom --cov-report=term-missing"
|
|
|
83
86
|
[tool.ruff]
|
|
84
87
|
line-length = 88
|
|
85
88
|
target-version = "py312"
|
|
86
|
-
select = ["E", "F", "I", "N", "W"]
|
|
87
|
-
ignore = ["E501"]
|
|
89
|
+
lint.select = ["E", "F", "I", "N", "W"]
|
|
90
|
+
lint.ignore = ["E501", "F405"]
|
|
88
91
|
exclude = ["tests", "examples", "__pycache__"]
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|