edda-framework 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,355 @@
1
+ """
2
+ Mermaid diagram generator for workflow visualization.
3
+
4
+ This module generates Mermaid flowchart syntax from workflow structure
5
+ extracted by the AST analyzer.
6
+ """
7
+
8
+ from typing import Any
9
+
10
+
11
+ class MermaidGenerator:
12
+ """
13
+ Generator for Mermaid flowchart diagrams.
14
+
15
+ Converts workflow structure dictionaries into Mermaid flowchart syntax.
16
+ """
17
+
18
+ def __init__(self) -> None:
19
+ """Initialize the Mermaid generator."""
20
+ self.node_counter = 0
21
+ self.lines: list[str] = []
22
+ self.compensation_nodes: list[tuple[str, str]] = [] # (from, to) pairs
23
+
24
+ def generate(self, workflow: dict[str, Any]) -> str:
25
+ """
26
+ Generate Mermaid flowchart from workflow structure.
27
+
28
+ Args:
29
+ workflow: Workflow dictionary from WorkflowAnalyzer
30
+
31
+ Returns:
32
+ Mermaid flowchart syntax as string
33
+ """
34
+ self.node_counter = 0
35
+ self.lines = ["flowchart TD"]
36
+ self.compensation_nodes = []
37
+
38
+ # Start node
39
+ start_id = "Start"
40
+ self.lines.append(f" {start_id}([{workflow['name']}])")
41
+
42
+ # Generate steps
43
+ prev_id = start_id
44
+ prev_id = self._generate_steps(workflow["steps"], prev_id)
45
+
46
+ # End node
47
+ end_id = "End"
48
+ self.lines.append(f" {end_id}([Complete])")
49
+ self.lines.append(f" {prev_id} --> {end_id}")
50
+
51
+ # Add compensation paths (dashed arrows in reverse)
52
+ if self.compensation_nodes:
53
+ self.lines.append("")
54
+ self.lines.append(" %% Compensation paths")
55
+ for from_node, to_node in reversed(self.compensation_nodes):
56
+ self.lines.append(f" {from_node} -.compensation.-> {to_node}")
57
+
58
+ return "\n".join(self.lines)
59
+
60
+ def _generate_steps(self, steps: list[dict[str, Any]], prev_id: str) -> str:
61
+ """
62
+ Generate Mermaid nodes for a sequence of steps.
63
+
64
+ Args:
65
+ steps: List of step dictionaries
66
+ prev_id: ID of the previous node
67
+
68
+ Returns:
69
+ ID of the last node in the sequence
70
+ """
71
+ current_id = prev_id
72
+
73
+ for step in steps:
74
+ step_type = step.get("type")
75
+
76
+ if step_type == "activity":
77
+ # Regular activity call
78
+ node_id = self._next_node_id()
79
+ self.lines.append(f" {node_id}[{step['function']}]")
80
+ self.lines.append(f" {current_id} --> {node_id}")
81
+ current_id = node_id
82
+
83
+ elif step_type == "compensation":
84
+ # Compensation registration
85
+ node_id = self._next_node_id()
86
+ func_name = step["function"]
87
+ self.lines.append(f" {node_id}[register_compensation:<br/>{func_name}]")
88
+ self.lines.append(f" {current_id} --> {node_id}")
89
+ self.lines.append(f" style {node_id} fill:#ffe6e6")
90
+
91
+ # Track compensation for reverse path
92
+ self.compensation_nodes.append((current_id, node_id))
93
+
94
+ current_id = node_id
95
+
96
+ elif step_type == "wait_event":
97
+ # Event waiting
98
+ node_id = self._next_node_id()
99
+ event_type = step.get("event_type", "unknown")
100
+ timeout = step.get("timeout")
101
+
102
+ label = f"wait_event:<br/>{event_type}"
103
+ if timeout:
104
+ label += f"<br/>timeout: {timeout}s"
105
+
106
+ self.lines.append(f" {node_id}{{{{{label}}}}}")
107
+ self.lines.append(f" {current_id} --> {node_id}")
108
+ self.lines.append(f" style {node_id} fill:#fff4e6")
109
+ current_id = node_id
110
+
111
+ elif step_type == "condition":
112
+ # Conditional branch (if/else)
113
+ current_id = self._generate_conditional(step, current_id)
114
+
115
+ elif step_type == "try":
116
+ # Try-except block
117
+ current_id = self._generate_try_except(step, current_id)
118
+
119
+ elif step_type == "loop":
120
+ # Loop (for/while)
121
+ current_id = self._generate_loop(step, current_id)
122
+
123
+ elif step_type == "match":
124
+ # Match-case statement (Python 3.10+)
125
+ current_id = self._generate_match(step, current_id)
126
+
127
+ return current_id
128
+
129
+ def _generate_conditional(self, condition: dict[str, Any], prev_id: str) -> str:
130
+ """
131
+ Generate conditional branch (if/else).
132
+
133
+ Args:
134
+ condition: Condition step dictionary
135
+ prev_id: Previous node ID
136
+
137
+ Returns:
138
+ ID of merge node
139
+ """
140
+ # Condition node
141
+ cond_id = self._next_node_id()
142
+ test_expr = condition.get("test", "?")
143
+ self.lines.append(f" {cond_id}{{{test_expr}?}}")
144
+ self.lines.append(f" {prev_id} --> {cond_id}")
145
+
146
+ # Process if branch
147
+ if_branch = condition.get("if_branch", [])
148
+ else_branch = condition.get("else_branch", [])
149
+
150
+ # Create merge node
151
+ merge_id = self._next_node_id()
152
+
153
+ if if_branch:
154
+ if_end = self._generate_steps(if_branch, cond_id)
155
+ # Update last connection to show "Yes" label
156
+ if self.lines and "-->" in self.lines[-1]:
157
+ self.lines[-1] = self.lines[-1].replace("-->", "-->|Yes|", 1)
158
+ self.lines.append(f" {if_end} --> {merge_id}")
159
+ else:
160
+ self.lines.append(f" {cond_id} -->|Yes| {merge_id}")
161
+
162
+ # Process else branch
163
+ if else_branch:
164
+ else_end = self._generate_steps(else_branch, cond_id)
165
+ # Update last connection to show "No" label
166
+ if self.lines and "-->" in self.lines[-1]:
167
+ self.lines[-1] = self.lines[-1].replace("-->", "-->|No|", 1)
168
+ self.lines.append(f" {else_end} --> {merge_id}")
169
+ else:
170
+ self.lines.append(f" {cond_id} -->|No| {merge_id}")
171
+
172
+ return merge_id
173
+
174
+ def _generate_try_except(self, try_block: dict[str, Any], prev_id: str) -> str:
175
+ """
176
+ Generate try-except block.
177
+
178
+ Args:
179
+ try_block: Try block dictionary
180
+ prev_id: Previous node ID
181
+
182
+ Returns:
183
+ ID of merge node after try-except
184
+ """
185
+ # Try block marker
186
+ try_id = self._next_node_id()
187
+ self.lines.append(f" {try_id}[try block]")
188
+ self.lines.append(f" {prev_id} --> {try_id}")
189
+ self.lines.append(f" style {try_id} fill:#e6f2ff")
190
+
191
+ # Process try body
192
+ try_body = try_block.get("try_body", [])
193
+ try_end = self._generate_steps(try_body, try_id)
194
+
195
+ # Merge node after try-except
196
+ merge_id = self._next_node_id()
197
+
198
+ # Success path
199
+ self.lines.append(f" {try_end} -->|success| {merge_id}")
200
+
201
+ # Exception handlers
202
+ except_handlers = try_block.get("except_handlers", [])
203
+ for handler in except_handlers:
204
+ except_id = self._next_node_id()
205
+ exception = handler.get("exception", "Exception")
206
+ self.lines.append(f" {except_id}[except {exception}]")
207
+ self.lines.append(f" {try_end} -.error.-> {except_id}")
208
+ self.lines.append(f" style {except_id} fill:#ffe6e6")
209
+
210
+ # Process except body
211
+ except_body = handler.get("body", [])
212
+ if except_body:
213
+ except_end = self._generate_steps(except_body, except_id)
214
+ self.lines.append(f" {except_end} --> {merge_id}")
215
+ else:
216
+ self.lines.append(f" {except_id} --> {merge_id}")
217
+
218
+ # Finally block (if exists)
219
+ finally_body = try_block.get("finally_body", [])
220
+ if finally_body:
221
+ finally_id = self._next_node_id()
222
+ self.lines.append(f" {finally_id}[finally]")
223
+ self.lines.append(f" style {finally_id} fill:#f0f0f0")
224
+ self.lines.append(f" {merge_id} --> {finally_id}")
225
+ merge_id = self._generate_steps(finally_body, finally_id)
226
+
227
+ return merge_id
228
+
229
+ def _generate_loop(self, loop: dict[str, Any], prev_id: str) -> str:
230
+ """
231
+ Generate loop structure (simplified).
232
+
233
+ Args:
234
+ loop: Loop dictionary
235
+ prev_id: Previous node ID
236
+
237
+ Returns:
238
+ ID of node after loop
239
+ """
240
+ loop_type = loop.get("loop_type", "loop")
241
+ loop_id = self._next_node_id()
242
+
243
+ if loop_type == "for":
244
+ target = loop.get("target", "item")
245
+ iter_expr = loop.get("iter", "items")
246
+ label = f"for {target} in {iter_expr}"
247
+ else: # while
248
+ test = loop.get("test", "condition")
249
+ label = f"while {test}"
250
+
251
+ self.lines.append(f" {loop_id}[{label}]")
252
+ self.lines.append(f" {prev_id} --> {loop_id}")
253
+ self.lines.append(f" style {loop_id} fill:#fff0f0")
254
+
255
+ # Process loop body (simplified - show as subgraph or single block)
256
+ body = loop.get("body", [])
257
+ if body:
258
+ body_end = self._generate_steps(body, loop_id)
259
+ # Loop back
260
+ self.lines.append(f" {body_end} -.loop.-> {loop_id}")
261
+
262
+ # Exit loop
263
+ exit_id = self._next_node_id()
264
+ self.lines.append(f" {loop_id} -->|exit| {exit_id}")
265
+
266
+ return exit_id
267
+
268
+ def _generate_match(self, match: dict[str, Any], prev_id: str) -> str:
269
+ """
270
+ Generate match-case structure (Python 3.10+).
271
+
272
+ Args:
273
+ match: Match block dictionary
274
+ prev_id: Previous node ID
275
+
276
+ Returns:
277
+ ID of merge node after match
278
+ """
279
+ # Match node (diamond shape for the subject)
280
+ match_id = self._next_node_id()
281
+ subject = match.get("subject", "value")
282
+
283
+ # Sanitize subject expression for Mermaid
284
+ subject = (
285
+ subject.replace('"', "'")
286
+ .replace("{", "(")
287
+ .replace("}", ")")
288
+ .replace("[", ".")
289
+ .replace("]", "")
290
+ .replace("'", "")
291
+ )
292
+ if len(subject) > 30:
293
+ subject = subject[:27] + "..."
294
+
295
+ self.lines.append(f" {match_id}{{{{match {subject}}}}}")
296
+ self.lines.append(f" {prev_id} --> {match_id}")
297
+ self.lines.append(f" style {match_id} fill:#e8f5e9,stroke:#4caf50,stroke-width:2px")
298
+
299
+ # Create merge node
300
+ merge_id = self._next_node_id()
301
+
302
+ # Process each case
303
+ cases = match.get("cases", [])
304
+ for _i, case in enumerate(cases):
305
+ pattern = case.get("pattern", "_")
306
+ guard = case.get("guard")
307
+ body = case.get("body", [])
308
+
309
+ # Sanitize pattern for Mermaid
310
+ pattern = (
311
+ pattern.replace('"', "'").replace("{", "(").replace("}", ")").replace("|", " or ")
312
+ )
313
+ if len(pattern) > 25:
314
+ pattern = pattern[:22] + "..."
315
+
316
+ # Create label for the edge
317
+ if guard:
318
+ # Sanitize guard
319
+ guard_str = guard.replace('"', "'").replace("{", "(").replace("}", ")")
320
+ if len(guard_str) > 15:
321
+ guard_str = guard_str[:12] + "..."
322
+ edge_label = f"case {pattern} if {guard_str}"
323
+ else:
324
+ edge_label = f"case {pattern}"
325
+
326
+ # Process case body
327
+ if body:
328
+ # Create a case-specific start node to ensure proper branching visualization
329
+ case_start_id = self._next_node_id()
330
+ self.lines.append(f" {case_start_id}(( ))")
331
+ self.lines.append(f" {match_id} -->|{edge_label}| {case_start_id}")
332
+ self.lines.append(
333
+ f" style {case_start_id} fill:#fff,stroke:#999,stroke-width:1px"
334
+ )
335
+
336
+ # Generate body steps starting from case_start_id
337
+ case_end = self._generate_steps(body, case_start_id)
338
+
339
+ # Connect to merge node
340
+ self.lines.append(f" {case_end} --> {merge_id}")
341
+ else:
342
+ # Empty case body - direct connection to merge
343
+ self.lines.append(f" {match_id} -->|{edge_label}| {merge_id}")
344
+
345
+ return merge_id
346
+
347
+ def _next_node_id(self) -> str:
348
+ """
349
+ Generate next unique node ID.
350
+
351
+ Returns:
352
+ Unique node ID string
353
+ """
354
+ self.node_counter += 1
355
+ return f"N{self.node_counter}"
edda/workflow.py ADDED
@@ -0,0 +1,218 @@
1
+ """
2
+ Workflow module for Edda framework.
3
+
4
+ This module provides the @workflow decorator for defining workflow functions
5
+ and managing workflow instances.
6
+ """
7
+
8
+ import functools
9
+ import inspect
10
+ from collections.abc import Callable
11
+ from typing import Any, TypeVar, cast
12
+
13
+ from edda.pydantic_utils import to_json_dict
14
+
15
+ F = TypeVar("F", bound=Callable[..., Any])
16
+
17
+ # Global registry of workflow instances (will be set by EddaApp)
18
+ _replay_engine: Any = None
19
+
20
+ # Global registry of all @workflow decorated workflows
21
+ _workflow_registry: dict[str, "Workflow"] = {}
22
+
23
+
24
+ def set_replay_engine(engine: Any) -> None:
25
+ """
26
+ Set the global replay engine.
27
+
28
+ This is called by EddaApp during initialization.
29
+
30
+ Args:
31
+ engine: ReplayEngine instance
32
+ """
33
+ global _replay_engine
34
+ _replay_engine = engine
35
+
36
+
37
+ def get_all_workflows() -> dict[str, "Workflow"]:
38
+ """
39
+ Get all registered workflow definitions.
40
+
41
+ Returns:
42
+ Dictionary mapping workflow names to Workflow instances
43
+ """
44
+ return _workflow_registry.copy()
45
+
46
+
47
+ class Workflow:
48
+ """
49
+ Wrapper class for workflow functions.
50
+
51
+ Provides methods for starting and managing workflow instances.
52
+ """
53
+
54
+ def __init__(
55
+ self,
56
+ func: Callable[..., Any],
57
+ event_handler: bool = False,
58
+ lock_timeout_seconds: int | None = None,
59
+ ):
60
+ """
61
+ Initialize workflow wrapper.
62
+
63
+ Args:
64
+ func: The async function to wrap as a workflow
65
+ event_handler: Whether to auto-register as CloudEvent handler
66
+ lock_timeout_seconds: Default lock timeout for this workflow (None = global default 300s)
67
+ """
68
+ self.func = func
69
+ self.name = func.__name__
70
+ self.event_handler = event_handler
71
+ self.lock_timeout_seconds = lock_timeout_seconds
72
+ functools.update_wrapper(self, func)
73
+
74
+ # Register in global workflow registry for auto-discovery
75
+ _workflow_registry[self.name] = self
76
+
77
+ async def start(self, lock_timeout_seconds: int | None = None, **kwargs: Any) -> str:
78
+ """
79
+ Start a new workflow instance.
80
+
81
+ Pydantic models in kwargs are automatically converted to JSON-compatible dicts
82
+ for storage. During execution, they will be restored back to Pydantic models
83
+ based on the workflow function's type hints.
84
+
85
+ Args:
86
+ lock_timeout_seconds: Override lock timeout for this specific execution
87
+ (None = use decorator default or global default 300s)
88
+ **kwargs: Input parameters for the workflow (can include Pydantic models)
89
+
90
+ Returns:
91
+ Instance ID of the started workflow
92
+
93
+ Raises:
94
+ RuntimeError: If replay engine not initialized
95
+ """
96
+ if _replay_engine is None:
97
+ raise RuntimeError(
98
+ "Replay engine not initialized. "
99
+ "Ensure EddaApp is properly initialized before starting workflows."
100
+ )
101
+
102
+ # Convert Pydantic models and Enums to JSON-compatible values for storage
103
+ processed_kwargs = {k: to_json_dict(v) for k, v in kwargs.items()}
104
+
105
+ # Determine actual lock timeout (priority: runtime > decorator > global default)
106
+ actual_timeout = lock_timeout_seconds or self.lock_timeout_seconds
107
+
108
+ instance_id: str = await _replay_engine.start_workflow(
109
+ workflow_name=self.name,
110
+ workflow_func=self.func,
111
+ input_data=processed_kwargs,
112
+ lock_timeout_seconds=actual_timeout,
113
+ )
114
+ return instance_id
115
+
116
+ async def resume(self, instance_id: str, event: Any = None) -> None:
117
+ """
118
+ Resume an existing workflow instance.
119
+
120
+ Args:
121
+ instance_id: Workflow instance ID
122
+ event: Optional event that triggered the resume
123
+
124
+ Raises:
125
+ RuntimeError: If replay engine not initialized
126
+ """
127
+ if _replay_engine is None:
128
+ raise RuntimeError(
129
+ "Replay engine not initialized. "
130
+ "Ensure EddaApp is properly initialized before resuming workflows."
131
+ )
132
+
133
+ await _replay_engine.resume_workflow(
134
+ instance_id=instance_id, workflow_func=self.func, _event=event
135
+ )
136
+
137
+ async def __call__(self, *args: Any, **kwargs: Any) -> Any:
138
+ """
139
+ Direct call to the workflow function.
140
+
141
+ This is typically used during replay by the replay engine.
142
+
143
+ Args:
144
+ *args: Positional arguments
145
+ **kwargs: Keyword arguments
146
+
147
+ Returns:
148
+ Workflow result
149
+ """
150
+ return await self.func(*args, **kwargs)
151
+
152
+
153
+ def workflow(
154
+ func: F | None = None,
155
+ *,
156
+ event_handler: bool = False,
157
+ lock_timeout_seconds: int | None = None,
158
+ ) -> F | Callable[[F], F]:
159
+ """
160
+ Decorator for defining workflows.
161
+
162
+ Workflows are the top-level orchestration functions that coordinate
163
+ multiple activities. They support deterministic replay and can wait for
164
+ external events.
165
+
166
+ By default, workflows are NOT automatically registered as CloudEvent handlers.
167
+ Set event_handler=True to enable automatic CloudEvent handling.
168
+
169
+ Example:
170
+ >>> # Basic workflow (manual event handling)
171
+ >>> @workflow
172
+ ... async def order_workflow(ctx: WorkflowContext, order_id: str, amount: int):
173
+ ... inventory = await reserve_inventory(ctx, order_id)
174
+ ... payment = await process_payment(ctx, order_id, amount)
175
+ ... return {"status": "completed"}
176
+ ...
177
+ ... # Start the workflow manually
178
+ ... instance_id = await order_workflow.start(order_id="123", amount=100)
179
+ ...
180
+ >>> # Workflow with automatic CloudEvent handling
181
+ >>> @workflow(event_handler=True)
182
+ ... async def auto_workflow(ctx: WorkflowContext, **kwargs):
183
+ ... # This will automatically handle CloudEvents with type="auto_workflow"
184
+ ... pass
185
+ ...
186
+ >>> # Workflow with custom lock timeout
187
+ >>> @workflow(lock_timeout_seconds=600)
188
+ ... async def long_running_workflow(ctx: WorkflowContext, **kwargs):
189
+ ... # This workflow will use a 10-minute lock timeout instead of the default 5 minutes
190
+ ... pass
191
+
192
+ Args:
193
+ func: Async function to wrap as a workflow
194
+ event_handler: If True, automatically register as CloudEvent handler
195
+ lock_timeout_seconds: Default lock timeout for this workflow (None = global default 300s)
196
+
197
+ Returns:
198
+ Decorated Workflow instance
199
+ """
200
+
201
+ def decorator(f: F) -> F:
202
+ # Verify the function is async
203
+ if not inspect.iscoroutinefunction(f):
204
+ raise TypeError(f"Workflow {f.__name__} must be an async function")
205
+
206
+ # Create the Workflow wrapper
207
+ workflow_wrapper = Workflow(
208
+ f, event_handler=event_handler, lock_timeout_seconds=lock_timeout_seconds
209
+ )
210
+ return cast(F, workflow_wrapper)
211
+
212
+ # Support both @workflow and @workflow(...) patterns
213
+ if func is None:
214
+ # Called with parameters: @workflow(event_handler=True, lock_timeout_seconds=600)
215
+ return decorator
216
+ else:
217
+ # Called without parameters: @workflow
218
+ return decorator(func)