loom-core 1.5.0__py3-none-any.whl → 1.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- loom/core/graph.py +62 -62
- loom/core/workflow.py +16 -0
- loom/schemas/graph.py +14 -13
- loom/schemas/state.py +26 -0
- loom/schemas/workflow.py +4 -2
- loom/web/api/graphs.py +36 -31
- loom/web/main.py +5 -6
- {loom_core-1.5.0.dist-info → loom_core-1.7.0.dist-info}/METADATA +1 -1
- {loom_core-1.5.0.dist-info → loom_core-1.7.0.dist-info}/RECORD +13 -12
- {loom_core-1.5.0.dist-info → loom_core-1.7.0.dist-info}/WHEEL +0 -0
- {loom_core-1.5.0.dist-info → loom_core-1.7.0.dist-info}/entry_points.txt +0 -0
- {loom_core-1.5.0.dist-info → loom_core-1.7.0.dist-info}/licenses/LICENSE +0 -0
- {loom_core-1.5.0.dist-info → loom_core-1.7.0.dist-info}/top_level.txt +0 -0
loom/core/graph.py
CHANGED
|
@@ -1,21 +1,21 @@
|
|
|
1
1
|
import ast
|
|
2
2
|
import inspect
|
|
3
|
-
from typing import Dict, List
|
|
3
|
+
from typing import Dict, List
|
|
4
4
|
|
|
5
|
-
from ..schemas.graph import
|
|
5
|
+
from ..schemas.graph import GraphEdge, GraphNode, WorkflowDefinitionGraph
|
|
6
6
|
from .workflow import Workflow
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
class WorkflowAnalyzer:
|
|
10
10
|
"""Analyzes workflow definitions to extract structure and dependencies."""
|
|
11
|
-
|
|
11
|
+
|
|
12
12
|
@staticmethod
|
|
13
13
|
def analyze_workflow_definition(workflow_class: type[Workflow]) -> WorkflowDefinitionGraph:
|
|
14
14
|
"""Analyze workflow class to generate definition graph.
|
|
15
|
-
|
|
15
|
+
|
|
16
16
|
Args:
|
|
17
17
|
workflow_class: The workflow class to analyze
|
|
18
|
-
|
|
18
|
+
|
|
19
19
|
Returns:
|
|
20
20
|
WorkflowDefinitionGraph: Graph representation of the workflow structure
|
|
21
21
|
"""
|
|
@@ -28,20 +28,20 @@ class WorkflowAnalyzer:
|
|
|
28
28
|
"workflow_description": getattr(workflow_class, "_workflow_description", ""),
|
|
29
29
|
}
|
|
30
30
|
)
|
|
31
|
-
|
|
31
|
+
|
|
32
32
|
# Get compiled workflow to extract step information
|
|
33
33
|
try:
|
|
34
34
|
workflow_instance = workflow_class()
|
|
35
35
|
compiled = workflow_instance._compile_instance()
|
|
36
36
|
except Exception as e:
|
|
37
37
|
raise ValueError(f"Failed to compile workflow {workflow_class.__name__}: {e}")
|
|
38
|
-
|
|
38
|
+
|
|
39
39
|
previous_step_id = None
|
|
40
|
-
|
|
40
|
+
|
|
41
41
|
# Analyze each step
|
|
42
42
|
for step_info in compiled.steps:
|
|
43
43
|
step_id = f"step_{step_info['name']}"
|
|
44
|
-
|
|
44
|
+
|
|
45
45
|
# Add step node
|
|
46
46
|
step_node = GraphNode(
|
|
47
47
|
id=step_id,
|
|
@@ -53,7 +53,7 @@ class WorkflowAnalyzer:
|
|
|
53
53
|
}
|
|
54
54
|
)
|
|
55
55
|
graph.nodes.append(step_node)
|
|
56
|
-
|
|
56
|
+
|
|
57
57
|
# Add sequence edge from previous step
|
|
58
58
|
if previous_step_id:
|
|
59
59
|
sequence_edge = GraphEdge(**{
|
|
@@ -63,11 +63,11 @@ class WorkflowAnalyzer:
|
|
|
63
63
|
"label": "then"
|
|
64
64
|
})
|
|
65
65
|
graph.edges.append(sequence_edge)
|
|
66
|
-
|
|
66
|
+
|
|
67
67
|
# Analyze step method for dependencies
|
|
68
68
|
step_method = getattr(workflow_instance, step_info["fn"])
|
|
69
69
|
dependencies = WorkflowAnalyzer._analyze_step_dependencies(step_method)
|
|
70
|
-
|
|
70
|
+
|
|
71
71
|
# Add activity nodes and edges
|
|
72
72
|
for activity_name in dependencies.get("activities", []):
|
|
73
73
|
activity_id = f"activity_{activity_name}_{step_info['name']}"
|
|
@@ -78,7 +78,7 @@ class WorkflowAnalyzer:
|
|
|
78
78
|
metadata={"called_from_step": step_info["name"]}
|
|
79
79
|
)
|
|
80
80
|
graph.nodes.append(activity_node)
|
|
81
|
-
|
|
81
|
+
|
|
82
82
|
activity_edge = GraphEdge(**{
|
|
83
83
|
"from": step_id,
|
|
84
84
|
"to": activity_id,
|
|
@@ -86,7 +86,7 @@ class WorkflowAnalyzer:
|
|
|
86
86
|
"label": "executes"
|
|
87
87
|
})
|
|
88
88
|
graph.edges.append(activity_edge)
|
|
89
|
-
|
|
89
|
+
|
|
90
90
|
# Add timer nodes
|
|
91
91
|
for i, timer_info in enumerate(dependencies.get("timers", [])):
|
|
92
92
|
timer_id = f"timer_{step_info['name']}_{i}"
|
|
@@ -97,7 +97,7 @@ class WorkflowAnalyzer:
|
|
|
97
97
|
metadata={"step": step_info["name"]}
|
|
98
98
|
)
|
|
99
99
|
graph.nodes.append(timer_node)
|
|
100
|
-
|
|
100
|
+
|
|
101
101
|
timer_edge = GraphEdge(**{
|
|
102
102
|
"from": step_id,
|
|
103
103
|
"to": timer_id,
|
|
@@ -105,11 +105,11 @@ class WorkflowAnalyzer:
|
|
|
105
105
|
"label": "pauses for"
|
|
106
106
|
})
|
|
107
107
|
graph.edges.append(timer_edge)
|
|
108
|
-
|
|
108
|
+
|
|
109
109
|
# Add state dependency edges
|
|
110
110
|
for state_key in dependencies.get("state_reads", []):
|
|
111
111
|
state_id = f"state_{state_key}"
|
|
112
|
-
|
|
112
|
+
|
|
113
113
|
# Add state node if not exists
|
|
114
114
|
if not any(n.id == state_id for n in graph.nodes):
|
|
115
115
|
state_node = GraphNode(
|
|
@@ -119,7 +119,7 @@ class WorkflowAnalyzer:
|
|
|
119
119
|
metadata={"key": state_key}
|
|
120
120
|
)
|
|
121
121
|
graph.nodes.append(state_node)
|
|
122
|
-
|
|
122
|
+
|
|
123
123
|
read_edge = GraphEdge(**{
|
|
124
124
|
"from": state_id,
|
|
125
125
|
"to": step_id,
|
|
@@ -127,11 +127,11 @@ class WorkflowAnalyzer:
|
|
|
127
127
|
"label": "reads"
|
|
128
128
|
})
|
|
129
129
|
graph.edges.append(read_edge)
|
|
130
|
-
|
|
130
|
+
|
|
131
131
|
for state_key in dependencies.get("state_writes", []):
|
|
132
132
|
state_id = f"state_{state_key}"
|
|
133
|
-
|
|
134
|
-
# Add state node if not exists
|
|
133
|
+
|
|
134
|
+
# Add state node if not exists
|
|
135
135
|
if not any(n.id == state_id for n in graph.nodes):
|
|
136
136
|
state_node = GraphNode(
|
|
137
137
|
id=state_id,
|
|
@@ -140,7 +140,7 @@ class WorkflowAnalyzer:
|
|
|
140
140
|
metadata={"key": state_key}
|
|
141
141
|
)
|
|
142
142
|
graph.nodes.append(state_node)
|
|
143
|
-
|
|
143
|
+
|
|
144
144
|
write_edge = GraphEdge(**{
|
|
145
145
|
"from": step_id,
|
|
146
146
|
"to": state_id,
|
|
@@ -148,18 +148,18 @@ class WorkflowAnalyzer:
|
|
|
148
148
|
"label": "updates"
|
|
149
149
|
})
|
|
150
150
|
graph.edges.append(write_edge)
|
|
151
|
-
|
|
151
|
+
|
|
152
152
|
previous_step_id = step_id
|
|
153
|
-
|
|
153
|
+
|
|
154
154
|
return graph
|
|
155
|
-
|
|
155
|
+
|
|
156
156
|
@staticmethod
|
|
157
157
|
def _analyze_step_dependencies(method) -> Dict[str, List[str]]:
|
|
158
158
|
"""Analyze step method source code to find dependencies.
|
|
159
|
-
|
|
159
|
+
|
|
160
160
|
Args:
|
|
161
161
|
method: The step method to analyze
|
|
162
|
-
|
|
162
|
+
|
|
163
163
|
Returns:
|
|
164
164
|
Dict containing lists of activities, timers, state reads/writes
|
|
165
165
|
"""
|
|
@@ -169,15 +169,15 @@ class WorkflowAnalyzer:
|
|
|
169
169
|
"state_reads": [],
|
|
170
170
|
"state_writes": []
|
|
171
171
|
}
|
|
172
|
-
|
|
172
|
+
|
|
173
173
|
try:
|
|
174
174
|
# Get source code and parse AST
|
|
175
175
|
source = inspect.getsource(method)
|
|
176
|
-
|
|
176
|
+
|
|
177
177
|
# Remove common indentation to make it parseable
|
|
178
178
|
import textwrap
|
|
179
179
|
source = textwrap.dedent(source)
|
|
180
|
-
|
|
180
|
+
|
|
181
181
|
# Remove decorators - find the first 'async def' or 'def' line
|
|
182
182
|
lines = source.split('\n')
|
|
183
183
|
def_line_idx = None
|
|
@@ -185,13 +185,13 @@ class WorkflowAnalyzer:
|
|
|
185
185
|
if 'def ' in line and ('async def' in line or line.strip().startswith('def')):
|
|
186
186
|
def_line_idx = i
|
|
187
187
|
break
|
|
188
|
-
|
|
188
|
+
|
|
189
189
|
if def_line_idx is not None:
|
|
190
190
|
# Keep only the function definition and body
|
|
191
191
|
source = '\n'.join(lines[def_line_idx:])
|
|
192
|
-
|
|
192
|
+
|
|
193
193
|
tree = ast.parse(source)
|
|
194
|
-
|
|
194
|
+
|
|
195
195
|
class DependencyVisitor(ast.NodeVisitor):
|
|
196
196
|
def visit_Call(self, node):
|
|
197
197
|
# Only handle non-awaited calls here
|
|
@@ -202,36 +202,36 @@ class WorkflowAnalyzer:
|
|
|
202
202
|
node.func.value.value.id == "ctx" and
|
|
203
203
|
node.func.value.attr == "state" and
|
|
204
204
|
node.func.attr == "get"):
|
|
205
|
-
|
|
205
|
+
|
|
206
206
|
# Extract state key from first argument
|
|
207
207
|
if (node.args and isinstance(node.args[0], ast.Constant)):
|
|
208
208
|
state_key = node.args[0].value
|
|
209
209
|
dependencies["state_reads"].append(state_key)
|
|
210
|
-
|
|
210
|
+
|
|
211
211
|
self.generic_visit(node)
|
|
212
|
-
|
|
212
|
+
|
|
213
213
|
def visit_Await(self, node):
|
|
214
214
|
# Handle await ctx.activity(), await ctx.sleep(), await ctx.state.set()
|
|
215
215
|
if isinstance(node.value, ast.Call):
|
|
216
216
|
call_node = node.value
|
|
217
|
-
|
|
218
|
-
# Check for await ctx.activity()
|
|
217
|
+
|
|
218
|
+
# Check for await ctx.activity()
|
|
219
219
|
if (isinstance(call_node.func, ast.Attribute) and
|
|
220
220
|
isinstance(call_node.func.value, ast.Name) and
|
|
221
221
|
call_node.func.value.id == "ctx" and
|
|
222
222
|
call_node.func.attr == "activity"):
|
|
223
|
-
|
|
223
|
+
|
|
224
224
|
if call_node.args and isinstance(call_node.args[0], ast.Name):
|
|
225
225
|
activity_name = call_node.args[0].id
|
|
226
226
|
dependencies["activities"].append(activity_name)
|
|
227
|
-
|
|
227
|
+
|
|
228
228
|
# Check for await ctx.sleep()
|
|
229
229
|
elif (isinstance(call_node.func, ast.Attribute) and
|
|
230
230
|
isinstance(call_node.func.value, ast.Name) and
|
|
231
231
|
call_node.func.value.id == "ctx" and
|
|
232
232
|
call_node.func.attr == "sleep"):
|
|
233
233
|
dependencies["timers"].append("sleep")
|
|
234
|
-
|
|
234
|
+
|
|
235
235
|
# Check for await ctx.state.set()
|
|
236
236
|
elif (isinstance(call_node.func, ast.Attribute) and
|
|
237
237
|
isinstance(call_node.func.value, ast.Attribute) and
|
|
@@ -239,11 +239,11 @@ class WorkflowAnalyzer:
|
|
|
239
239
|
call_node.func.value.value.id == "ctx" and
|
|
240
240
|
call_node.func.value.attr == "state" and
|
|
241
241
|
call_node.func.attr == "set"):
|
|
242
|
-
|
|
242
|
+
|
|
243
243
|
if (call_node.args and isinstance(call_node.args[0], ast.Constant)):
|
|
244
244
|
state_key = call_node.args[0].value
|
|
245
245
|
dependencies["state_writes"].append(state_key)
|
|
246
|
-
|
|
246
|
+
|
|
247
247
|
# Check for await ctx.state.update()
|
|
248
248
|
elif (isinstance(call_node.func, ast.Attribute) and
|
|
249
249
|
isinstance(call_node.func.value, ast.Attribute) and
|
|
@@ -252,44 +252,44 @@ class WorkflowAnalyzer:
|
|
|
252
252
|
call_node.func.value.attr == "state" and
|
|
253
253
|
call_node.func.attr == "update"):
|
|
254
254
|
dependencies["state_writes"].append("bulk_update")
|
|
255
|
-
|
|
255
|
+
|
|
256
256
|
self.generic_visit(node)
|
|
257
|
-
|
|
257
|
+
|
|
258
258
|
def visit_Attribute(self, node):
|
|
259
|
-
# Look for ctx.state.get('key') reads (non-await calls)
|
|
259
|
+
# Look for ctx.state.get('key') reads (non-await calls)
|
|
260
260
|
if (isinstance(node.value, ast.Attribute) and
|
|
261
261
|
isinstance(node.value.value, ast.Name) and
|
|
262
262
|
node.value.value.id == "ctx" and
|
|
263
263
|
node.value.attr == "state" and
|
|
264
264
|
node.attr == "get"):
|
|
265
|
-
|
|
265
|
+
|
|
266
266
|
# This is a ctx.state.get access - we need to find the parent call
|
|
267
267
|
# For now, we'll skip this complex case
|
|
268
268
|
pass
|
|
269
|
-
|
|
269
|
+
|
|
270
270
|
self.generic_visit(node)
|
|
271
|
-
|
|
271
|
+
|
|
272
272
|
visitor = DependencyVisitor()
|
|
273
273
|
visitor.visit(tree)
|
|
274
|
-
|
|
274
|
+
|
|
275
275
|
except Exception as e:
|
|
276
276
|
# If source analysis fails, return empty dependencies
|
|
277
277
|
print(f"Warning: Could not analyze step method: {e}")
|
|
278
|
-
|
|
278
|
+
|
|
279
279
|
return dependencies
|
|
280
280
|
|
|
281
281
|
|
|
282
282
|
def generate_mermaid_graph(graph: WorkflowDefinitionGraph) -> str:
|
|
283
283
|
"""Generate Mermaid diagram from definition graph.
|
|
284
|
-
|
|
284
|
+
|
|
285
285
|
Args:
|
|
286
286
|
graph: The workflow definition graph
|
|
287
|
-
|
|
287
|
+
|
|
288
288
|
Returns:
|
|
289
289
|
String containing Mermaid diagram syntax
|
|
290
290
|
"""
|
|
291
291
|
lines = ["graph TD"]
|
|
292
|
-
|
|
292
|
+
|
|
293
293
|
# Add nodes with appropriate shapes
|
|
294
294
|
for node in graph.nodes:
|
|
295
295
|
if node.type == "step":
|
|
@@ -300,7 +300,7 @@ def generate_mermaid_graph(graph: WorkflowDefinitionGraph) -> str:
|
|
|
300
300
|
lines.append(f' {node.id}[["{node.label}"]]')
|
|
301
301
|
elif node.type == "state":
|
|
302
302
|
lines.append(f' {node.id}{{{node.label}}}')
|
|
303
|
-
|
|
303
|
+
|
|
304
304
|
# Add edges with appropriate styles
|
|
305
305
|
for edge in graph.edges:
|
|
306
306
|
if edge.type == "sequence":
|
|
@@ -313,16 +313,16 @@ def generate_mermaid_graph(graph: WorkflowDefinitionGraph) -> str:
|
|
|
313
313
|
lines.append(f' {edge.from_node} --> {edge.to_node}')
|
|
314
314
|
elif edge.type == "waits":
|
|
315
315
|
lines.append(f' {edge.from_node} -.-> {edge.to_node}')
|
|
316
|
-
|
|
316
|
+
|
|
317
317
|
return "\n".join(lines)
|
|
318
318
|
|
|
319
319
|
|
|
320
320
|
def generate_graphviz_dot(graph: WorkflowDefinitionGraph) -> str:
|
|
321
321
|
"""Generate GraphViz DOT format from definition graph.
|
|
322
|
-
|
|
322
|
+
|
|
323
323
|
Args:
|
|
324
324
|
graph: The workflow definition graph
|
|
325
|
-
|
|
325
|
+
|
|
326
326
|
Returns:
|
|
327
327
|
String containing DOT format graph
|
|
328
328
|
"""
|
|
@@ -331,7 +331,7 @@ def generate_graphviz_dot(graph: WorkflowDefinitionGraph) -> str:
|
|
|
331
331
|
" rankdir=TD;",
|
|
332
332
|
" node [fontname=\"Arial\"]"
|
|
333
333
|
]
|
|
334
|
-
|
|
334
|
+
|
|
335
335
|
# Add nodes with shapes and colors
|
|
336
336
|
for node in graph.nodes:
|
|
337
337
|
if node.type == "step":
|
|
@@ -342,7 +342,7 @@ def generate_graphviz_dot(graph: WorkflowDefinitionGraph) -> str:
|
|
|
342
342
|
lines.append(f' {node.id} [label="{node.label}" shape=diamond style=filled fillcolor=lightyellow];')
|
|
343
343
|
elif node.type == "state":
|
|
344
344
|
lines.append(f' {node.id} [label="{node.label}" shape=hexagon style=filled fillcolor=lightcoral];')
|
|
345
|
-
|
|
345
|
+
|
|
346
346
|
# Add edges with styles
|
|
347
347
|
for edge in graph.edges:
|
|
348
348
|
if edge.type == "sequence":
|
|
@@ -355,6 +355,6 @@ def generate_graphviz_dot(graph: WorkflowDefinitionGraph) -> str:
|
|
|
355
355
|
lines.append(f' {edge.from_node} -> {edge.to_node} [style=solid];')
|
|
356
356
|
elif edge.type == "waits":
|
|
357
357
|
lines.append(f' {edge.from_node} -> {edge.to_node} [style=dotted];')
|
|
358
|
-
|
|
358
|
+
|
|
359
359
|
lines.append("}")
|
|
360
|
-
return "\n".join(lines)
|
|
360
|
+
return "\n".join(lines)
|
loom/core/workflow.py
CHANGED
|
@@ -35,6 +35,22 @@ class Workflow(Generic[InputT, StateT]):
|
|
|
35
35
|
pass
|
|
36
36
|
"""
|
|
37
37
|
|
|
38
|
+
@classmethod
|
|
39
|
+
async def start(cls, input: InputT):
|
|
40
|
+
"""
|
|
41
|
+
Start a new workflow instance with the given input.
|
|
42
|
+
|
|
43
|
+
This method compiles the workflow definition and initializes it with the provided input.
|
|
44
|
+
It is a convenience method that combines compilation and execution initiation.
|
|
45
|
+
|
|
46
|
+
Args:
|
|
47
|
+
input (InputT): The immutable input data for the workflow
|
|
48
|
+
|
|
49
|
+
Returns:
|
|
50
|
+
The Workflow Handle.
|
|
51
|
+
"""
|
|
52
|
+
return await cls.compile().start(input)
|
|
53
|
+
|
|
38
54
|
@classmethod
|
|
39
55
|
def compile(cls) -> CompiledWorkflow[InputT, StateT]:
|
|
40
56
|
"""
|
loom/schemas/graph.py
CHANGED
|
@@ -1,10 +1,11 @@
|
|
|
1
|
-
from typing import Dict, List
|
|
1
|
+
from typing import Any, Dict, List
|
|
2
|
+
|
|
2
3
|
from pydantic import BaseModel, Field
|
|
3
4
|
|
|
4
5
|
|
|
5
6
|
class GraphNode(BaseModel):
|
|
6
7
|
"""Represents a node in the workflow definition graph."""
|
|
7
|
-
|
|
8
|
+
|
|
8
9
|
id: str = Field(
|
|
9
10
|
...,
|
|
10
11
|
description="Unique identifier for the node",
|
|
@@ -33,7 +34,7 @@ class GraphNode(BaseModel):
|
|
|
33
34
|
|
|
34
35
|
class GraphEdge(BaseModel):
|
|
35
36
|
"""Represents an edge (connection) in the workflow definition graph."""
|
|
36
|
-
|
|
37
|
+
|
|
37
38
|
from_node: str = Field(
|
|
38
39
|
...,
|
|
39
40
|
alias="from",
|
|
@@ -41,7 +42,7 @@ class GraphEdge(BaseModel):
|
|
|
41
42
|
examples=["step_validate_input", "state_user_data"]
|
|
42
43
|
)
|
|
43
44
|
to_node: str = Field(
|
|
44
|
-
...,
|
|
45
|
+
...,
|
|
45
46
|
alias="to",
|
|
46
47
|
description="Target node ID",
|
|
47
48
|
examples=["step_process_payment", "activity_send_notification"]
|
|
@@ -60,7 +61,7 @@ class GraphEdge(BaseModel):
|
|
|
60
61
|
|
|
61
62
|
class WorkflowDefinitionGraph(BaseModel):
|
|
62
63
|
"""Complete workflow definition graph structure."""
|
|
63
|
-
|
|
64
|
+
|
|
64
65
|
nodes: List[GraphNode] = Field(
|
|
65
66
|
...,
|
|
66
67
|
description="List of nodes in the graph"
|
|
@@ -80,7 +81,7 @@ class WorkflowDefinitionGraph(BaseModel):
|
|
|
80
81
|
}
|
|
81
82
|
]
|
|
82
83
|
)
|
|
83
|
-
|
|
84
|
+
|
|
84
85
|
class Config:
|
|
85
86
|
json_encoders = {
|
|
86
87
|
# Add any custom encoders if needed
|
|
@@ -91,7 +92,7 @@ class WorkflowDefinitionGraph(BaseModel):
|
|
|
91
92
|
"nodes": [
|
|
92
93
|
{
|
|
93
94
|
"id": "step_validate_order",
|
|
94
|
-
"type": "step",
|
|
95
|
+
"type": "step",
|
|
95
96
|
"label": "Validate Order",
|
|
96
97
|
"metadata": {
|
|
97
98
|
"description": "Validates order data and inventory",
|
|
@@ -109,7 +110,7 @@ class WorkflowDefinitionGraph(BaseModel):
|
|
|
109
110
|
},
|
|
110
111
|
{
|
|
111
112
|
"id": "state_order_valid",
|
|
112
|
-
"type": "state",
|
|
113
|
+
"type": "state",
|
|
113
114
|
"label": "state.order_valid",
|
|
114
115
|
"metadata": {
|
|
115
116
|
"key": "order_valid"
|
|
@@ -119,7 +120,7 @@ class WorkflowDefinitionGraph(BaseModel):
|
|
|
119
120
|
"edges": [
|
|
120
121
|
{
|
|
121
122
|
"from": "step_validate_order",
|
|
122
|
-
"to": "activity_check_inventory",
|
|
123
|
+
"to": "activity_check_inventory",
|
|
123
124
|
"type": "calls",
|
|
124
125
|
"label": "executes"
|
|
125
126
|
},
|
|
@@ -142,17 +143,17 @@ class WorkflowDefinitionGraph(BaseModel):
|
|
|
142
143
|
|
|
143
144
|
class GraphFormat(BaseModel):
|
|
144
145
|
"""Supported graph output formats."""
|
|
145
|
-
|
|
146
|
+
|
|
146
147
|
format: str = Field(
|
|
147
148
|
...,
|
|
148
149
|
description="Output format for the graph",
|
|
149
150
|
examples=["mermaid", "dot", "json"]
|
|
150
151
|
)
|
|
151
|
-
|
|
152
|
+
|
|
152
153
|
|
|
153
154
|
class GraphResponse(BaseModel):
|
|
154
155
|
"""Response containing the generated graph."""
|
|
155
|
-
|
|
156
|
+
|
|
156
157
|
format: str = Field(
|
|
157
158
|
...,
|
|
158
159
|
description="Format of the generated graph",
|
|
@@ -165,4 +166,4 @@ class GraphResponse(BaseModel):
|
|
|
165
166
|
metadata: Dict[str, Any] = Field(
|
|
166
167
|
default_factory=dict,
|
|
167
168
|
description="Additional metadata about the graph generation"
|
|
168
|
-
)
|
|
169
|
+
)
|
loom/schemas/state.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from typing import TypedDict
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class State(TypedDict):
|
|
5
|
+
"""Represents the mutable state of a workflow.
|
|
6
|
+
|
|
7
|
+
This schema defines the structure of the state object that is passed
|
|
8
|
+
to each step of a workflow during its execution. The state is mutable
|
|
9
|
+
and can be updated by workflow steps to reflect the current progress
|
|
10
|
+
and data of the workflow.
|
|
11
|
+
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
...
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class Input(TypedDict):
|
|
18
|
+
"""Represents the immutable input to a workflow.
|
|
19
|
+
|
|
20
|
+
This schema defines the structure of the input object that is provided
|
|
21
|
+
when starting a new workflow instance. The input is immutable and
|
|
22
|
+
remains constant throughout the lifecycle of the workflow.
|
|
23
|
+
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
...
|
loom/schemas/workflow.py
CHANGED
|
@@ -2,11 +2,13 @@ from datetime import datetime
|
|
|
2
2
|
from enum import Enum
|
|
3
3
|
from typing import Awaitable, Callable, TypedDict, TypeVar
|
|
4
4
|
|
|
5
|
-
|
|
5
|
+
from .state import Input, State
|
|
6
|
+
|
|
7
|
+
StateT = TypeVar("StateT", bound=State)
|
|
6
8
|
|
|
7
9
|
Func = TypeVar("Func", bound=Callable[..., Awaitable[object]])
|
|
8
10
|
|
|
9
|
-
InputT = TypeVar("InputT", bound=
|
|
11
|
+
InputT = TypeVar("InputT", bound=Input)
|
|
10
12
|
|
|
11
13
|
ClsT = TypeVar("ClsT")
|
|
12
14
|
|
loom/web/api/graphs.py
CHANGED
|
@@ -3,15 +3,20 @@
|
|
|
3
3
|
Provides REST endpoints for generating workflow definition graphs.
|
|
4
4
|
"""
|
|
5
5
|
|
|
6
|
-
from typing import Any, Dict
|
|
7
|
-
from fastapi import APIRouter, HTTPException, Query, Depends
|
|
8
6
|
from enum import Enum
|
|
7
|
+
from typing import Any, Dict
|
|
8
|
+
|
|
9
|
+
from fastapi import APIRouter, Depends, HTTPException, Query
|
|
9
10
|
|
|
10
|
-
from ...core.graph import WorkflowAnalyzer, generate_mermaid_graph, generate_graphviz_dot
|
|
11
11
|
from ...common.workflow import workflow_registry
|
|
12
|
+
from ...core.graph import (
|
|
13
|
+
WorkflowAnalyzer,
|
|
14
|
+
generate_graphviz_dot,
|
|
15
|
+
generate_mermaid_graph,
|
|
16
|
+
)
|
|
12
17
|
from ...database.db import Database
|
|
18
|
+
from ...schemas.graph import GraphResponse, WorkflowDefinitionGraph
|
|
13
19
|
from ..schemas import ErrorResponse
|
|
14
|
-
from ...schemas.graph import WorkflowDefinitionGraph, GraphResponse, GraphFormat
|
|
15
20
|
|
|
16
21
|
router = APIRouter()
|
|
17
22
|
|
|
@@ -34,25 +39,25 @@ class GraphFormatEnum(str, Enum):
|
|
|
34
39
|
response_model=WorkflowDefinitionGraph,
|
|
35
40
|
summary="Get workflow definition graph",
|
|
36
41
|
description="""
|
|
37
|
-
Generate a static workflow definition graph showing the structure of steps,
|
|
42
|
+
Generate a static workflow definition graph showing the structure of steps,
|
|
38
43
|
activities, timers, and state dependencies as defined in the workflow code.
|
|
39
|
-
|
|
40
|
-
This is similar to Airflow's DAG view - it shows the workflow structure
|
|
44
|
+
|
|
45
|
+
This is similar to Airflow's DAG view - it shows the workflow structure
|
|
41
46
|
based on code analysis, not runtime execution.
|
|
42
|
-
|
|
47
|
+
|
|
43
48
|
**Features:**
|
|
44
49
|
- Step sequence and dependencies
|
|
45
|
-
- Activity calls within each step
|
|
50
|
+
- Activity calls within each step
|
|
46
51
|
- Timer/sleep operations
|
|
47
52
|
- State read/write dependencies
|
|
48
53
|
- Workflow metadata
|
|
49
|
-
|
|
54
|
+
|
|
50
55
|
**Node Types:**
|
|
51
56
|
- `step`: Workflow steps (blue boxes)
|
|
52
57
|
- `activity`: Activity calls (green circles)
|
|
53
58
|
- `timer`: Sleep/delay operations (yellow diamonds)
|
|
54
59
|
- `state`: State variables (red hexagons)
|
|
55
|
-
|
|
60
|
+
|
|
56
61
|
**Edge Types:**
|
|
57
62
|
- `sequence`: Step-to-step flow
|
|
58
63
|
- `calls`: Step calls activity
|
|
@@ -73,25 +78,25 @@ async def get_workflow_definition_graph(workflow_id: str, db: Database = Depends
|
|
|
73
78
|
workflow_info = await db.get_workflow_info(workflow_id)
|
|
74
79
|
if not workflow_info:
|
|
75
80
|
raise HTTPException(
|
|
76
|
-
status_code=404,
|
|
81
|
+
status_code=404,
|
|
77
82
|
detail=f"Workflow with ID '{workflow_id}' not found"
|
|
78
83
|
)
|
|
79
|
-
|
|
84
|
+
|
|
80
85
|
# Get workflow class using module and name from database
|
|
81
86
|
workflow_class = workflow_registry(workflow_info["module"], workflow_info["name"])
|
|
82
|
-
|
|
87
|
+
|
|
83
88
|
# Analyze workflow definition
|
|
84
89
|
graph = WorkflowAnalyzer.analyze_workflow_definition(workflow_class)
|
|
85
|
-
|
|
90
|
+
|
|
86
91
|
return graph
|
|
87
|
-
|
|
92
|
+
|
|
88
93
|
except ValueError as e:
|
|
89
94
|
raise HTTPException(status_code=400, detail=str(e))
|
|
90
95
|
except (ModuleNotFoundError, AttributeError, TypeError) as e:
|
|
91
96
|
raise HTTPException(status_code=400, detail=f"Failed to load workflow class: {str(e)}")
|
|
92
97
|
except Exception as e:
|
|
93
98
|
raise HTTPException(
|
|
94
|
-
status_code=500,
|
|
99
|
+
status_code=500,
|
|
95
100
|
detail=f"Failed to analyze workflow: {str(e)}"
|
|
96
101
|
)
|
|
97
102
|
|
|
@@ -102,17 +107,17 @@ async def get_workflow_definition_graph(workflow_id: str, db: Database = Depends
|
|
|
102
107
|
summary="Render workflow definition graph",
|
|
103
108
|
description="""
|
|
104
109
|
Generate a workflow definition graph in various output formats for visualization.
|
|
105
|
-
|
|
110
|
+
|
|
106
111
|
**Supported Formats:**
|
|
107
112
|
- `json`: Structured JSON data (same as /definition endpoint)
|
|
108
113
|
- `mermaid`: Mermaid diagram syntax for rendering
|
|
109
114
|
- `dot`: GraphViz DOT format for advanced visualization
|
|
110
|
-
|
|
115
|
+
|
|
111
116
|
**Usage Examples:**
|
|
112
117
|
- Use `mermaid` format to render in web UIs or documentation
|
|
113
118
|
- Use `dot` format for GraphViz tools (dot, neato, fdp, etc.)
|
|
114
119
|
- Use `json` format for custom visualization libraries
|
|
115
|
-
|
|
120
|
+
|
|
116
121
|
**Mermaid Example:**
|
|
117
122
|
```
|
|
118
123
|
graph TD
|
|
@@ -146,13 +151,13 @@ async def render_workflow_definition_graph(
|
|
|
146
151
|
status_code=404,
|
|
147
152
|
detail=f"Workflow with ID '{workflow_id}' not found"
|
|
148
153
|
)
|
|
149
|
-
|
|
154
|
+
|
|
150
155
|
# Get workflow class using module and name from database
|
|
151
156
|
workflow_class = workflow_registry(workflow_info["module"], workflow_info["name"])
|
|
152
|
-
|
|
157
|
+
|
|
153
158
|
# Analyze workflow definition
|
|
154
159
|
graph = WorkflowAnalyzer.analyze_workflow_definition(workflow_class)
|
|
155
|
-
|
|
160
|
+
|
|
156
161
|
# Generate output based on format
|
|
157
162
|
if format == GraphFormatEnum.JSON:
|
|
158
163
|
content = graph.json(indent=2)
|
|
@@ -165,7 +170,7 @@ async def render_workflow_definition_graph(
|
|
|
165
170
|
status_code=400,
|
|
166
171
|
detail=f"Unsupported format: {format}"
|
|
167
172
|
)
|
|
168
|
-
|
|
173
|
+
|
|
169
174
|
return GraphResponse(
|
|
170
175
|
format=format.value,
|
|
171
176
|
content=content,
|
|
@@ -177,7 +182,7 @@ async def render_workflow_definition_graph(
|
|
|
177
182
|
**graph.metadata
|
|
178
183
|
}
|
|
179
184
|
)
|
|
180
|
-
|
|
185
|
+
|
|
181
186
|
except ValueError as e:
|
|
182
187
|
raise HTTPException(status_code=400, detail=str(e))
|
|
183
188
|
except (ModuleNotFoundError, AttributeError, TypeError) as e:
|
|
@@ -192,10 +197,10 @@ async def render_workflow_definition_graph(
|
|
|
192
197
|
@router.get(
|
|
193
198
|
"/workflows/",
|
|
194
199
|
response_model=Dict[str, Any],
|
|
195
|
-
summary="List workflows for graph generation",
|
|
200
|
+
summary="List workflows for graph generation",
|
|
196
201
|
description="""
|
|
197
202
|
Get a list of all workflows in the database that can be analyzed for graphs.
|
|
198
|
-
|
|
203
|
+
|
|
199
204
|
Returns workflow IDs, names, versions, and basic metadata for each workflow.
|
|
200
205
|
Use the workflow ID with the graph endpoints to generate visualizations.
|
|
201
206
|
"""
|
|
@@ -210,7 +215,7 @@ async def list_workflows_for_graphs(db: Database = Depends(get_db)):
|
|
|
210
215
|
ORDER BY created_at DESC
|
|
211
216
|
"""
|
|
212
217
|
workflows = await db.query(workflows_sql)
|
|
213
|
-
|
|
218
|
+
|
|
214
219
|
workflow_list = []
|
|
215
220
|
for workflow in workflows:
|
|
216
221
|
workflow_list.append({
|
|
@@ -223,14 +228,14 @@ async def list_workflows_for_graphs(db: Database = Depends(get_db)):
|
|
|
223
228
|
"created_at": workflow["created_at"],
|
|
224
229
|
"updated_at": workflow["updated_at"]
|
|
225
230
|
})
|
|
226
|
-
|
|
231
|
+
|
|
227
232
|
return {
|
|
228
233
|
"total_count": len(workflow_list),
|
|
229
234
|
"workflows": workflow_list
|
|
230
235
|
}
|
|
231
|
-
|
|
236
|
+
|
|
232
237
|
except Exception as e:
|
|
233
238
|
raise HTTPException(
|
|
234
239
|
status_code=500,
|
|
235
240
|
detail=f"Failed to list workflows: {str(e)}"
|
|
236
|
-
)
|
|
241
|
+
)
|
loom/web/main.py
CHANGED
|
@@ -5,7 +5,6 @@ for monitoring and managing Loom workflows.
|
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
7
|
import json
|
|
8
|
-
import os
|
|
9
8
|
from contextlib import asynccontextmanager
|
|
10
9
|
from pathlib import Path
|
|
11
10
|
from typing import Any
|
|
@@ -277,15 +276,15 @@ async def root(request: Request):
|
|
|
277
276
|
"""Serve React UI with API URL configuration"""
|
|
278
277
|
dist_dir = Path(__file__).parent / "dist"
|
|
279
278
|
index_file = dist_dir / "index.html"
|
|
280
|
-
|
|
279
|
+
|
|
281
280
|
# Check if React build exists
|
|
282
281
|
if not index_file.exists():
|
|
283
282
|
return {"message": "Loom Dashboard API", "docs": "/docs", "note": "React UI not built"}
|
|
284
|
-
|
|
283
|
+
|
|
285
284
|
# Read index.html
|
|
286
285
|
with open(index_file, "r", encoding="utf-8") as f:
|
|
287
286
|
html_content = f.read()
|
|
288
|
-
|
|
287
|
+
|
|
289
288
|
# Inject API URL configuration
|
|
290
289
|
api_url = str(request.base_url).rstrip('/')
|
|
291
290
|
config_script = f"""
|
|
@@ -293,10 +292,10 @@ async def root(request: Request):
|
|
|
293
292
|
window.__API_URL__ = "{api_url}";
|
|
294
293
|
</script>
|
|
295
294
|
"""
|
|
296
|
-
|
|
295
|
+
|
|
297
296
|
# Insert before </head> tag
|
|
298
297
|
html_content = html_content.replace("</head>", f"{config_script}</head>")
|
|
299
|
-
|
|
298
|
+
|
|
300
299
|
return HTMLResponse(content=html_content)
|
|
301
300
|
|
|
302
301
|
|
|
@@ -10,13 +10,13 @@ loom/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
10
10
|
loom/core/compiled.py,sha256=Y2NQDMZVLee65ILgJoK2Ny_OcR1T4LJnOQWQcyg0Z7U,1178
|
|
11
11
|
loom/core/context.py,sha256=i8trZfv0xdsu148tjzYGI_fNsPGfFmPY2KwXfZLICsE,10910
|
|
12
12
|
loom/core/engine.py,sha256=oHz_4YviHiw1E9XHWkELEHQ5KmAQXCKBebZDOwHXCCQ,5744
|
|
13
|
-
loom/core/graph.py,sha256=
|
|
13
|
+
loom/core/graph.py,sha256=_1u7CjhkynDFKewFWxCkxavO_AECFQqwY7DSe865jiM,14712
|
|
14
14
|
loom/core/handle.py,sha256=beOVmJit-gSrk7II4c2KuSubmr5OIPls82_vJ-pI04U,5532
|
|
15
15
|
loom/core/logger.py,sha256=ZSOZydQ-o8bfhh8VBcSWZNVE-NBvEedN7LDcbAEVJL0,1829
|
|
16
16
|
loom/core/runner.py,sha256=nNA-EcdJt3UEgwzKHyb22xD7aLT8kI01x5UVdLteSI8,1998
|
|
17
17
|
loom/core/state.py,sha256=KynxRQfroYxUgv3Toy0c-yrPsmD5zHn7L3s_y8tkgcU,3142
|
|
18
18
|
loom/core/worker.py,sha256=jpTtvM1rIToVkG4SwJu1W9Q-eL3bw8b3N8KsxxrRovA,5322
|
|
19
|
-
loom/core/workflow.py,sha256=
|
|
19
|
+
loom/core/workflow.py,sha256=aAnnUqnvhz8m6wyZ7m6FRb21Wg8jsQxmnIyVF4IwYfM,6283
|
|
20
20
|
loom/database/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
21
21
|
loom/database/db.py,sha256=WcP0B4lVYyXTLc1obVaeULJk-5uIs7nFSoBP3YL5ax0,27551
|
|
22
22
|
loom/decorators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -42,15 +42,16 @@ loom/schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
42
42
|
loom/schemas/activity.py,sha256=0sYvmmsVhDFFPyOHkVm0tajVaAAzoq5wY6-K03nPSos,266
|
|
43
43
|
loom/schemas/database.py,sha256=DlMBvacsJ9oceBHkrMiJC7uD3_rCHFe6Kwa2STFcKOw,283
|
|
44
44
|
loom/schemas/events.py,sha256=Gz-R836nVXNSsp4Y54idhKs_WTvzFPmx5KlA8PunP28,2216
|
|
45
|
-
loom/schemas/graph.py,sha256=
|
|
45
|
+
loom/schemas/graph.py,sha256=4p6_kBsPozq_f-9l8RCZCPxWwAAQvuQsY6stfs8Zj2c,5656
|
|
46
|
+
loom/schemas/state.py,sha256=7zuN2a2AYnNdcpG0mgXCVXccs2rf8-oFYJFYQR34TTA,707
|
|
46
47
|
loom/schemas/tasks.py,sha256=DJbLInggIGxI-CfP1kSyK79Bz83e3sZyEKh6C2HE8q4,1341
|
|
47
|
-
loom/schemas/workflow.py,sha256=
|
|
48
|
+
loom/schemas/workflow.py,sha256=sM05vl7JY7OEcHF0tKW2k2XoQmh9HttyHv7JROQcPqo,708
|
|
48
49
|
loom/web/__init__.py,sha256=PZC66J5iBFo4PPOkj7QR9RNab8BbQcOHyy_YbXLQr1Y,200
|
|
49
|
-
loom/web/main.py,sha256=
|
|
50
|
+
loom/web/main.py,sha256=7wlcxdB3LvTIaJ6MnhYOvuwmHqdV70S6l6c3kP2BLTo,10335
|
|
50
51
|
loom/web/schemas.py,sha256=yHVFA9xIH-jrVcai8EWSpl8_ZeiESj9vTxOZshzBjCI,21561
|
|
51
52
|
loom/web/api/__init__.py,sha256=QIKxk00wHRkU9lgPr-FNSOnoUStGpf08rZleXXM51GE,97
|
|
52
53
|
loom/web/api/events.py,sha256=mSP09HaI2lbgZ6rlXUCEYcqMUywGSQ7Id-fryLYXNUo,10292
|
|
53
|
-
loom/web/api/graphs.py,sha256=
|
|
54
|
+
loom/web/api/graphs.py,sha256=OzYJ13mF9Dfhe_DH9zo0BpvekzDLmLCLVXs84pw0Keg,8355
|
|
54
55
|
loom/web/api/logs.py,sha256=8xRparl14uiqliWrynlOBhH_KZZkxULWamqxd4tcoYs,10522
|
|
55
56
|
loom/web/api/stats.py,sha256=sZkxHzrTLuYJXRFXjSWyb5pxT4IKQCaCM_5fyr58meQ,10451
|
|
56
57
|
loom/web/api/tasks.py,sha256=U8Ap995pbjDW8jQAYNpzGb-9-Nd1Xr8PPHdY7L_pEAA,10383
|
|
@@ -59,9 +60,9 @@ loom/web/dist/index.html,sha256=j04yxn_fMdXOhyS1spt5ayGgvVLV4lAjcWqxvxLmaWU,507
|
|
|
59
60
|
loom/web/dist/vite.svg,sha256=SnSK_UQ5GLsWWRyDTEAdrjPoeGGrXbrQgRw6O0qSFPs,1497
|
|
60
61
|
loom/web/dist/assets/index-CKj6DvD9.css,sha256=mYQs_55713g8TdhODZm-2gRU12pK74CyqfIx58gyurc,63560
|
|
61
62
|
loom/web/dist/assets/index-Vavv5UoS.js,sha256=TOGd_FVWGKJ6ovnNTQBdqBE2WwWNCy-W8E7y2HvwlaQ,841621
|
|
62
|
-
loom_core-1.
|
|
63
|
-
loom_core-1.
|
|
64
|
-
loom_core-1.
|
|
65
|
-
loom_core-1.
|
|
66
|
-
loom_core-1.
|
|
67
|
-
loom_core-1.
|
|
63
|
+
loom_core-1.7.0.dist-info/licenses/LICENSE,sha256=8EpC-clAYRUfJQ92T3iQEIIWYjx2A3Kfk28zOd8lh7I,1095
|
|
64
|
+
loom_core-1.7.0.dist-info/METADATA,sha256=oCDFh6_O01E1C-f6VEiuUvq5gqqkGqouOCV5pVaoTIQ,9013
|
|
65
|
+
loom_core-1.7.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
66
|
+
loom_core-1.7.0.dist-info/entry_points.txt,sha256=Jx5HXHL2y7jvSjkwkH3QqF954cbSxiE6OGwL2coldyE,42
|
|
67
|
+
loom_core-1.7.0.dist-info/top_level.txt,sha256=cAfRgAuCuit-cU9iBrf0bS4ovvmq-URykNd9fmYMojg,5
|
|
68
|
+
loom_core-1.7.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|