edda-framework 0.14.1__py3-none-any.whl → 0.15.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- edda/integrations/graph/__init__.py +58 -0
- edda/integrations/graph/context.py +81 -0
- edda/integrations/graph/exceptions.py +9 -0
- edda/integrations/graph/graph.py +385 -0
- edda/integrations/graph/nodes.py +144 -0
- edda/integrations/llamaindex/__init__.py +51 -0
- edda/integrations/llamaindex/events.py +160 -0
- edda/integrations/llamaindex/exceptions.py +15 -0
- edda/integrations/llamaindex/workflow.py +306 -0
- edda/viewer_app.py +147 -0
- {edda_framework-0.14.1.dist-info → edda_framework-0.15.1.dist-info}/METADATA +13 -1
- {edda_framework-0.14.1.dist-info → edda_framework-0.15.1.dist-info}/RECORD +15 -5
- edda_framework-0.15.1.dist-info/entry_points.txt +2 -0
- edda_framework-0.14.1.dist-info/entry_points.txt +0 -2
- {edda_framework-0.14.1.dist-info → edda_framework-0.15.1.dist-info}/WHEEL +0 -0
- {edda_framework-0.14.1.dist-info → edda_framework-0.15.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
"""
|
|
2
|
+
LlamaIndex Workflow Integration for Edda.
|
|
3
|
+
|
|
4
|
+
This module provides integration between LlamaIndex Workflow and Edda's durable
|
|
5
|
+
execution framework, making workflow execution crash-recoverable and supporting
|
|
6
|
+
durable wait operations.
|
|
7
|
+
|
|
8
|
+
Example:
|
|
9
|
+
from llama_index.core.workflow import Workflow, step, Event, StartEvent, StopEvent
|
|
10
|
+
from edda import workflow, WorkflowContext
|
|
11
|
+
from edda.integrations.llamaindex import DurableWorkflowRunner, DurableSleepEvent
|
|
12
|
+
|
|
13
|
+
# Define events
|
|
14
|
+
class ProcessedEvent(Event):
|
|
15
|
+
data: str
|
|
16
|
+
|
|
17
|
+
# Define workflow
|
|
18
|
+
class MyWorkflow(Workflow):
|
|
19
|
+
@step
|
|
20
|
+
async def process(self, ctx: Context, ev: StartEvent) -> ProcessedEvent:
|
|
21
|
+
return ProcessedEvent(data=f"processed: {ev.input}")
|
|
22
|
+
|
|
23
|
+
@step
|
|
24
|
+
async def finalize(self, ctx: Context, ev: ProcessedEvent) -> StopEvent:
|
|
25
|
+
return StopEvent(result={"status": "done", "data": ev.data})
|
|
26
|
+
|
|
27
|
+
# Create durable runner
|
|
28
|
+
runner = DurableWorkflowRunner(MyWorkflow)
|
|
29
|
+
|
|
30
|
+
# Use in Edda workflow
|
|
31
|
+
@workflow
|
|
32
|
+
async def my_workflow(ctx: WorkflowContext, input_data: str) -> dict:
|
|
33
|
+
result = await runner.run(ctx, input=input_data)
|
|
34
|
+
return result
|
|
35
|
+
|
|
36
|
+
Installation:
|
|
37
|
+
pip install 'edda-framework[llamaindex]'
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
from .events import DurableSleepEvent, DurableWaitEvent, ResumeEvent
|
|
41
|
+
from .exceptions import WorkflowExecutionError, WorkflowReplayError
|
|
42
|
+
from .workflow import DurableWorkflowRunner
|
|
43
|
+
|
|
44
|
+
__all__ = [
|
|
45
|
+
"DurableWorkflowRunner",
|
|
46
|
+
"DurableSleepEvent",
|
|
47
|
+
"DurableWaitEvent",
|
|
48
|
+
"ResumeEvent",
|
|
49
|
+
"WorkflowExecutionError",
|
|
50
|
+
"WorkflowReplayError",
|
|
51
|
+
]
|
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
"""Durable events for LlamaIndex Workflow integration.
|
|
2
|
+
|
|
3
|
+
These events signal to the DurableWorkflow that a durable operation
|
|
4
|
+
(sleep or wait for external event) should be performed.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
from typing import TYPE_CHECKING, Any
|
|
10
|
+
|
|
11
|
+
# Lazy import to avoid requiring llama-index at import time
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
pass
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def _import_event_class() -> type[Any]:
|
|
17
|
+
"""Import Event class with helpful error message."""
|
|
18
|
+
try:
|
|
19
|
+
from llama_index.core.workflow import Event # type: ignore[import-not-found]
|
|
20
|
+
|
|
21
|
+
return Event # type: ignore[no-any-return]
|
|
22
|
+
except ImportError as e:
|
|
23
|
+
msg = (
|
|
24
|
+
"llama-index-core is not installed. Install with:\n"
|
|
25
|
+
" pip install llama-index-core\n"
|
|
26
|
+
"or\n"
|
|
27
|
+
" pip install 'edda-framework[llamaindex]'"
|
|
28
|
+
)
|
|
29
|
+
raise ImportError(msg) from e
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class DurableSleepEvent:
|
|
33
|
+
"""
|
|
34
|
+
Event that signals a durable sleep operation.
|
|
35
|
+
|
|
36
|
+
When a step returns this event, the DurableWorkflow will:
|
|
37
|
+
1. Record the step completion
|
|
38
|
+
2. Call Edda's sleep() function (durable timer)
|
|
39
|
+
3. Resume with the specified resume_data after the sleep completes
|
|
40
|
+
|
|
41
|
+
Example:
|
|
42
|
+
@step
|
|
43
|
+
async def rate_limited_step(self, ctx: Context, ev: SomeEvent) -> DurableSleepEvent:
|
|
44
|
+
# Hit rate limit, need to wait
|
|
45
|
+
return DurableSleepEvent(
|
|
46
|
+
seconds=60,
|
|
47
|
+
resume_data={"retry_count": ev.retry_count + 1},
|
|
48
|
+
)
|
|
49
|
+
"""
|
|
50
|
+
|
|
51
|
+
def __init__(
|
|
52
|
+
self,
|
|
53
|
+
seconds: float,
|
|
54
|
+
resume_data: dict[str, Any] | None = None,
|
|
55
|
+
) -> None:
|
|
56
|
+
"""
|
|
57
|
+
Initialize a durable sleep event.
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
seconds: Number of seconds to sleep
|
|
61
|
+
resume_data: Data to include when resuming after sleep
|
|
62
|
+
"""
|
|
63
|
+
self.seconds = seconds
|
|
64
|
+
self.resume_data = resume_data or {}
|
|
65
|
+
|
|
66
|
+
def to_dict(self) -> dict[str, Any]:
|
|
67
|
+
"""Serialize to dictionary."""
|
|
68
|
+
return {
|
|
69
|
+
"_type": "DurableSleepEvent",
|
|
70
|
+
"seconds": self.seconds,
|
|
71
|
+
"resume_data": self.resume_data,
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
@classmethod
|
|
75
|
+
def from_dict(cls, data: dict[str, Any]) -> DurableSleepEvent:
|
|
76
|
+
"""Deserialize from dictionary."""
|
|
77
|
+
return cls(
|
|
78
|
+
seconds=data["seconds"],
|
|
79
|
+
resume_data=data.get("resume_data", {}),
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class DurableWaitEvent:
|
|
84
|
+
"""
|
|
85
|
+
Event that signals waiting for an external event.
|
|
86
|
+
|
|
87
|
+
When a step returns this event, the DurableWorkflow will:
|
|
88
|
+
1. Record the step completion
|
|
89
|
+
2. Call Edda's wait_event() function (durable event subscription)
|
|
90
|
+
3. Resume with the received event data after the event arrives
|
|
91
|
+
|
|
92
|
+
Example:
|
|
93
|
+
@step
|
|
94
|
+
async def wait_for_approval(self, ctx: Context, ev: OrderEvent) -> DurableWaitEvent:
|
|
95
|
+
return DurableWaitEvent(
|
|
96
|
+
event_type=f"approval.{ev.order_id}",
|
|
97
|
+
timeout_seconds=3600, # 1 hour timeout
|
|
98
|
+
)
|
|
99
|
+
"""
|
|
100
|
+
|
|
101
|
+
def __init__(
|
|
102
|
+
self,
|
|
103
|
+
event_type: str,
|
|
104
|
+
timeout_seconds: float | None = None,
|
|
105
|
+
) -> None:
|
|
106
|
+
"""
|
|
107
|
+
Initialize a durable wait event.
|
|
108
|
+
|
|
109
|
+
Args:
|
|
110
|
+
event_type: The event type to wait for (e.g., "payment.completed")
|
|
111
|
+
timeout_seconds: Optional timeout in seconds
|
|
112
|
+
"""
|
|
113
|
+
self.event_type = event_type
|
|
114
|
+
self.timeout_seconds = timeout_seconds
|
|
115
|
+
|
|
116
|
+
def to_dict(self) -> dict[str, Any]:
|
|
117
|
+
"""Serialize to dictionary."""
|
|
118
|
+
return {
|
|
119
|
+
"_type": "DurableWaitEvent",
|
|
120
|
+
"event_type": self.event_type,
|
|
121
|
+
"timeout_seconds": self.timeout_seconds,
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
@classmethod
|
|
125
|
+
def from_dict(cls, data: dict[str, Any]) -> DurableWaitEvent:
|
|
126
|
+
"""Deserialize from dictionary."""
|
|
127
|
+
return cls(
|
|
128
|
+
event_type=data["event_type"],
|
|
129
|
+
timeout_seconds=data.get("timeout_seconds"),
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
class ResumeEvent:
|
|
134
|
+
"""
|
|
135
|
+
Event used to resume workflow after a durable operation.
|
|
136
|
+
|
|
137
|
+
This is an internal event type used by DurableWorkflow to resume
|
|
138
|
+
execution after a DurableSleepEvent or DurableWaitEvent completes.
|
|
139
|
+
"""
|
|
140
|
+
|
|
141
|
+
def __init__(self, data: dict[str, Any] | None = None) -> None:
|
|
142
|
+
"""
|
|
143
|
+
Initialize a resume event.
|
|
144
|
+
|
|
145
|
+
Args:
|
|
146
|
+
data: Data from the completed operation (sleep resume_data or received event)
|
|
147
|
+
"""
|
|
148
|
+
self.data = data or {}
|
|
149
|
+
|
|
150
|
+
def to_dict(self) -> dict[str, Any]:
|
|
151
|
+
"""Serialize to dictionary."""
|
|
152
|
+
return {
|
|
153
|
+
"_type": "ResumeEvent",
|
|
154
|
+
"data": self.data,
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
@classmethod
|
|
158
|
+
def from_dict(cls, data: dict[str, Any]) -> ResumeEvent:
|
|
159
|
+
"""Deserialize from dictionary."""
|
|
160
|
+
return cls(data=data.get("data", {}))
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"""Exceptions for LlamaIndex Workflow integration."""
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class WorkflowExecutionError(Exception):
|
|
5
|
+
"""Error during workflow execution."""
|
|
6
|
+
|
|
7
|
+
def __init__(self, message: str, step_name: str | None = None) -> None:
|
|
8
|
+
self.step_name = step_name
|
|
9
|
+
super().__init__(message)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class WorkflowReplayError(Exception):
|
|
13
|
+
"""Error during workflow replay."""
|
|
14
|
+
|
|
15
|
+
pass
|
|
@@ -0,0 +1,306 @@
|
|
|
1
|
+
"""DurableWorkflow - makes LlamaIndex Workflow execution durable via Edda.
|
|
2
|
+
|
|
3
|
+
This module provides integration between LlamaIndex Workflow and Edda's durable
|
|
4
|
+
execution framework, making workflow execution crash-recoverable and supporting
|
|
5
|
+
durable wait operations.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import importlib
|
|
11
|
+
from dataclasses import dataclass
|
|
12
|
+
from typing import TYPE_CHECKING, Any, TypeVar
|
|
13
|
+
|
|
14
|
+
from edda.activity import activity
|
|
15
|
+
from edda.pydantic_utils import to_json_dict
|
|
16
|
+
|
|
17
|
+
from .events import DurableSleepEvent, DurableWaitEvent, ResumeEvent
|
|
18
|
+
from .exceptions import WorkflowExecutionError
|
|
19
|
+
|
|
20
|
+
if TYPE_CHECKING:
|
|
21
|
+
from edda.context import WorkflowContext
|
|
22
|
+
|
|
23
|
+
T = TypeVar("T")
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def _import_llamaindex_workflow() -> Any:
|
|
27
|
+
"""Import llama_index.core.workflow with helpful error message."""
|
|
28
|
+
try:
|
|
29
|
+
from llama_index.core import workflow # type: ignore[import-not-found]
|
|
30
|
+
|
|
31
|
+
return workflow
|
|
32
|
+
except ImportError as e:
|
|
33
|
+
msg = (
|
|
34
|
+
"llama-index-core is not installed. Install with:\n"
|
|
35
|
+
" pip install llama-index-core\n"
|
|
36
|
+
"or\n"
|
|
37
|
+
" pip install 'edda-framework[llamaindex]'"
|
|
38
|
+
)
|
|
39
|
+
raise ImportError(msg) from e
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def _serialize_event(event: Any) -> dict[str, Any]:
|
|
43
|
+
"""Serialize a LlamaIndex Event to a dictionary."""
|
|
44
|
+
if isinstance(event, DurableSleepEvent):
|
|
45
|
+
return event.to_dict()
|
|
46
|
+
if isinstance(event, DurableWaitEvent):
|
|
47
|
+
return event.to_dict()
|
|
48
|
+
if isinstance(event, ResumeEvent):
|
|
49
|
+
return event.to_dict()
|
|
50
|
+
|
|
51
|
+
# For LlamaIndex events, use model_dump if available (Pydantic)
|
|
52
|
+
if hasattr(event, "model_dump"):
|
|
53
|
+
data = event.model_dump()
|
|
54
|
+
elif hasattr(event, "__dict__"):
|
|
55
|
+
data = {k: v for k, v in event.__dict__.items() if not k.startswith("_")}
|
|
56
|
+
else:
|
|
57
|
+
data = {}
|
|
58
|
+
|
|
59
|
+
return {
|
|
60
|
+
"_type": f"{event.__class__.__module__}:{event.__class__.__qualname__}",
|
|
61
|
+
"_data": to_json_dict(data),
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def _deserialize_event(data: dict[str, Any]) -> Any:
|
|
66
|
+
"""Deserialize a dictionary to a LlamaIndex Event."""
|
|
67
|
+
event_type = data.get("_type", "")
|
|
68
|
+
|
|
69
|
+
# Handle our special events
|
|
70
|
+
if event_type == "DurableSleepEvent":
|
|
71
|
+
return DurableSleepEvent.from_dict(data)
|
|
72
|
+
if event_type == "DurableWaitEvent":
|
|
73
|
+
return DurableWaitEvent.from_dict(data)
|
|
74
|
+
if event_type == "ResumeEvent":
|
|
75
|
+
return ResumeEvent.from_dict(data)
|
|
76
|
+
|
|
77
|
+
# Handle LlamaIndex events
|
|
78
|
+
if ":" in event_type:
|
|
79
|
+
module_path, class_name = event_type.rsplit(":", 1)
|
|
80
|
+
module = importlib.import_module(module_path)
|
|
81
|
+
event_class = getattr(module, class_name)
|
|
82
|
+
event_data = data.get("_data", {})
|
|
83
|
+
|
|
84
|
+
# Use model_validate for Pydantic models
|
|
85
|
+
if hasattr(event_class, "model_validate"):
|
|
86
|
+
return event_class.model_validate(event_data)
|
|
87
|
+
return event_class(**event_data)
|
|
88
|
+
|
|
89
|
+
raise ValueError(f"Unknown event type: {event_type}")
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
@dataclass
|
|
93
|
+
class StepResult:
|
|
94
|
+
"""Result of a step execution."""
|
|
95
|
+
|
|
96
|
+
event: Any
|
|
97
|
+
step_name: str
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
@activity
|
|
101
|
+
async def _run_step(
|
|
102
|
+
ctx: WorkflowContext, # noqa: ARG001 - Used by @activity decorator
|
|
103
|
+
workflow_class_path: str,
|
|
104
|
+
step_name: str,
|
|
105
|
+
event_data: dict[str, Any],
|
|
106
|
+
context_data: dict[str, Any],
|
|
107
|
+
) -> dict[str, Any]:
|
|
108
|
+
"""
|
|
109
|
+
Execute a single workflow step as a durable activity.
|
|
110
|
+
|
|
111
|
+
This activity is the core of DurableWorkflow - it runs one step and returns
|
|
112
|
+
the serialized result event.
|
|
113
|
+
"""
|
|
114
|
+
# Import the workflow class
|
|
115
|
+
module_path, class_name = workflow_class_path.rsplit(":", 1)
|
|
116
|
+
module = importlib.import_module(module_path)
|
|
117
|
+
workflow_class = getattr(module, class_name)
|
|
118
|
+
|
|
119
|
+
# Create workflow instance
|
|
120
|
+
workflow_instance = workflow_class()
|
|
121
|
+
|
|
122
|
+
# Deserialize the input event
|
|
123
|
+
input_event = _deserialize_event(event_data)
|
|
124
|
+
|
|
125
|
+
# Get the step method
|
|
126
|
+
step_method = getattr(workflow_instance, step_name, None)
|
|
127
|
+
if step_method is None:
|
|
128
|
+
raise WorkflowExecutionError(f"Step '{step_name}' not found", step_name)
|
|
129
|
+
|
|
130
|
+
# Create a minimal context for the step
|
|
131
|
+
# Note: LlamaIndex Context is created per-run, we create a simple mock
|
|
132
|
+
|
|
133
|
+
# Execute the step
|
|
134
|
+
try:
|
|
135
|
+
# The step method signature is: async def step(self, ctx, event) -> Event
|
|
136
|
+
# We need to provide a context - use a simple object with store
|
|
137
|
+
@dataclass
|
|
138
|
+
class SimpleContext:
|
|
139
|
+
store: dict[str, Any]
|
|
140
|
+
|
|
141
|
+
simple_ctx = SimpleContext(store=context_data.get("store", {}))
|
|
142
|
+
result_event = await step_method(simple_ctx, input_event)
|
|
143
|
+
except Exception as e:
|
|
144
|
+
raise WorkflowExecutionError(f"Step '{step_name}' failed: {e}", step_name) from e
|
|
145
|
+
|
|
146
|
+
# Serialize the result
|
|
147
|
+
return {
|
|
148
|
+
"event": _serialize_event(result_event),
|
|
149
|
+
"context_store": simple_ctx.store,
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
class DurableWorkflowRunner:
|
|
154
|
+
"""
|
|
155
|
+
Runner that executes a LlamaIndex Workflow with Edda durability.
|
|
156
|
+
|
|
157
|
+
This class wraps a LlamaIndex Workflow and executes it step-by-step,
|
|
158
|
+
recording each step as an Edda Activity for crash recovery.
|
|
159
|
+
|
|
160
|
+
Example:
|
|
161
|
+
from llama_index.core.workflow import Workflow, step, StartEvent, StopEvent
|
|
162
|
+
|
|
163
|
+
class MyWorkflow(Workflow):
|
|
164
|
+
@step
|
|
165
|
+
async def process(self, ctx: Context, ev: StartEvent) -> StopEvent:
|
|
166
|
+
return StopEvent(result="done")
|
|
167
|
+
|
|
168
|
+
runner = DurableWorkflowRunner(MyWorkflow)
|
|
169
|
+
|
|
170
|
+
@workflow
|
|
171
|
+
async def my_edda_workflow(ctx: WorkflowContext) -> str:
|
|
172
|
+
result = await runner.run(ctx, input_data="hello")
|
|
173
|
+
return result
|
|
174
|
+
"""
|
|
175
|
+
|
|
176
|
+
def __init__(self, workflow_class: type) -> None:
|
|
177
|
+
"""
|
|
178
|
+
Initialize DurableWorkflowRunner.
|
|
179
|
+
|
|
180
|
+
Args:
|
|
181
|
+
workflow_class: A LlamaIndex Workflow class (not instance)
|
|
182
|
+
"""
|
|
183
|
+
self._workflow_class = workflow_class
|
|
184
|
+
self._class_path = f"{workflow_class.__module__}:{workflow_class.__qualname__}"
|
|
185
|
+
|
|
186
|
+
# Validate it's a Workflow subclass
|
|
187
|
+
llamaindex_workflow = _import_llamaindex_workflow()
|
|
188
|
+
if not issubclass(workflow_class, llamaindex_workflow.Workflow):
|
|
189
|
+
raise TypeError(f"Expected a Workflow subclass, got {type(workflow_class).__name__}")
|
|
190
|
+
|
|
191
|
+
async def run(
|
|
192
|
+
self,
|
|
193
|
+
ctx: WorkflowContext,
|
|
194
|
+
**kwargs: Any,
|
|
195
|
+
) -> Any:
|
|
196
|
+
"""
|
|
197
|
+
Execute the workflow durably with Edda crash recovery.
|
|
198
|
+
|
|
199
|
+
Args:
|
|
200
|
+
ctx: Edda WorkflowContext
|
|
201
|
+
**kwargs: Arguments passed to StartEvent
|
|
202
|
+
|
|
203
|
+
Returns:
|
|
204
|
+
The result from StopEvent
|
|
205
|
+
"""
|
|
206
|
+
from edda.channels import sleep as edda_sleep
|
|
207
|
+
from edda.channels import wait_event as edda_wait_event
|
|
208
|
+
|
|
209
|
+
llamaindex_workflow = _import_llamaindex_workflow()
|
|
210
|
+
|
|
211
|
+
# Create a workflow instance to analyze its steps
|
|
212
|
+
workflow_instance = self._workflow_class()
|
|
213
|
+
|
|
214
|
+
# Build step registry from the workflow
|
|
215
|
+
step_registry = self._build_step_registry(workflow_instance)
|
|
216
|
+
|
|
217
|
+
# Start with StartEvent
|
|
218
|
+
start_event_class = llamaindex_workflow.StartEvent
|
|
219
|
+
current_event = start_event_class(**kwargs)
|
|
220
|
+
context_store: dict[str, Any] = {}
|
|
221
|
+
|
|
222
|
+
# Main execution loop
|
|
223
|
+
while True:
|
|
224
|
+
# Find the step that handles this event type
|
|
225
|
+
event_type = type(current_event)
|
|
226
|
+
step_name = self._find_step_for_event(step_registry, event_type)
|
|
227
|
+
|
|
228
|
+
if step_name is None:
|
|
229
|
+
# No step found - check if it's a stop event
|
|
230
|
+
if isinstance(current_event, llamaindex_workflow.StopEvent):
|
|
231
|
+
return current_event.result
|
|
232
|
+
raise WorkflowExecutionError(f"No step found for event type: {event_type.__name__}")
|
|
233
|
+
|
|
234
|
+
# Execute the step as an activity
|
|
235
|
+
result = await _run_step( # type: ignore[misc,call-arg]
|
|
236
|
+
ctx, # type: ignore[arg-type]
|
|
237
|
+
self._class_path,
|
|
238
|
+
step_name,
|
|
239
|
+
_serialize_event(current_event),
|
|
240
|
+
{"store": context_store},
|
|
241
|
+
)
|
|
242
|
+
|
|
243
|
+
# Update context store
|
|
244
|
+
context_store = result.get("context_store", {})
|
|
245
|
+
|
|
246
|
+
# Deserialize the result event
|
|
247
|
+
result_event = _deserialize_event(result["event"])
|
|
248
|
+
|
|
249
|
+
# Handle special durable events
|
|
250
|
+
if isinstance(result_event, DurableSleepEvent):
|
|
251
|
+
# Durable sleep
|
|
252
|
+
await edda_sleep(ctx, int(result_event.seconds))
|
|
253
|
+
# Resume with ResumeEvent containing the sleep's resume_data
|
|
254
|
+
current_event = ResumeEvent(data=result_event.resume_data)
|
|
255
|
+
|
|
256
|
+
elif isinstance(result_event, DurableWaitEvent):
|
|
257
|
+
# Durable wait for external event
|
|
258
|
+
received = await edda_wait_event(
|
|
259
|
+
ctx,
|
|
260
|
+
result_event.event_type,
|
|
261
|
+
timeout_seconds=(
|
|
262
|
+
int(result_event.timeout_seconds) if result_event.timeout_seconds else None
|
|
263
|
+
),
|
|
264
|
+
)
|
|
265
|
+
# Resume with ResumeEvent containing the received data
|
|
266
|
+
current_event = ResumeEvent(data=received.data if hasattr(received, "data") else {})
|
|
267
|
+
|
|
268
|
+
elif isinstance(result_event, llamaindex_workflow.StopEvent):
|
|
269
|
+
# Workflow completed
|
|
270
|
+
return result_event.result
|
|
271
|
+
|
|
272
|
+
else:
|
|
273
|
+
# Normal event transition
|
|
274
|
+
current_event = result_event
|
|
275
|
+
|
|
276
|
+
def _build_step_registry(self, workflow_instance: Any) -> dict[str, list[type]]:
|
|
277
|
+
"""Build a registry mapping step names to their input event types."""
|
|
278
|
+
registry: dict[str, list[type]] = {}
|
|
279
|
+
|
|
280
|
+
# Look for methods decorated with @step in the class
|
|
281
|
+
workflow_class = type(workflow_instance)
|
|
282
|
+
for name in dir(workflow_class):
|
|
283
|
+
if name.startswith("_"):
|
|
284
|
+
continue
|
|
285
|
+
|
|
286
|
+
# Get the raw function from the class (not bound method)
|
|
287
|
+
method = getattr(workflow_class, name, None)
|
|
288
|
+
if method is None or not callable(method):
|
|
289
|
+
continue
|
|
290
|
+
|
|
291
|
+
# Check if it's a step (has _step_config attribute from @step decorator)
|
|
292
|
+
if hasattr(method, "_step_config"):
|
|
293
|
+
step_config = method._step_config
|
|
294
|
+
# Get accepted event types directly from step config
|
|
295
|
+
if hasattr(step_config, "accepted_events"):
|
|
296
|
+
registry[name] = list(step_config.accepted_events)
|
|
297
|
+
|
|
298
|
+
return registry
|
|
299
|
+
|
|
300
|
+
def _find_step_for_event(self, registry: dict[str, list[type]], event_type: type) -> str | None:
|
|
301
|
+
"""Find the step that handles the given event type."""
|
|
302
|
+
for step_name, accepted_types in registry.items():
|
|
303
|
+
for accepted_type in accepted_types:
|
|
304
|
+
if issubclass(event_type, accepted_type):
|
|
305
|
+
return step_name
|
|
306
|
+
return None
|
edda/viewer_app.py
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Edda Workflow Viewer - Interactive web interface for workflow visualization.
|
|
3
|
+
|
|
4
|
+
This application provides an interactive web interface to visualize
|
|
5
|
+
workflow instances with clickable Mermaid diagrams.
|
|
6
|
+
|
|
7
|
+
Installation:
|
|
8
|
+
# Install Edda core only (viewer command available but won't run)
|
|
9
|
+
pip install edda-framework
|
|
10
|
+
|
|
11
|
+
# Install with viewer dependencies (recommended)
|
|
12
|
+
uv sync --extra viewer
|
|
13
|
+
# or
|
|
14
|
+
pip install edda-framework[viewer]
|
|
15
|
+
|
|
16
|
+
Usage:
|
|
17
|
+
# View existing workflows (no module imports)
|
|
18
|
+
uv run edda-viewer
|
|
19
|
+
|
|
20
|
+
# Import demo_app workflows
|
|
21
|
+
uv run edda-viewer --import-module demo_app
|
|
22
|
+
|
|
23
|
+
# Import custom workflows
|
|
24
|
+
uv run edda-viewer -m my_workflows
|
|
25
|
+
|
|
26
|
+
# Multiple modules
|
|
27
|
+
uv run edda-viewer -m demo_app -m my_workflows
|
|
28
|
+
|
|
29
|
+
# Custom database and port
|
|
30
|
+
uv run edda-viewer --db my.db --port 9000 -m my_workflows
|
|
31
|
+
|
|
32
|
+
# Using Just (project root)
|
|
33
|
+
just viewer # No modules
|
|
34
|
+
just viewer-demo # With demo_app
|
|
35
|
+
just viewer demo.db 8080 "-m demo_app" # Custom
|
|
36
|
+
|
|
37
|
+
Alternative (direct script execution):
|
|
38
|
+
# Run the viewer script directly
|
|
39
|
+
uv run python viewer_app.py -m demo_app
|
|
40
|
+
|
|
41
|
+
# Or with auto-reload for development
|
|
42
|
+
nicegui viewer_app.py --reload
|
|
43
|
+
|
|
44
|
+
Then open http://localhost:<port> in your browser.
|
|
45
|
+
"""
|
|
46
|
+
|
|
47
|
+
import argparse
|
|
48
|
+
import os
|
|
49
|
+
import sys
|
|
50
|
+
|
|
51
|
+
from edda import EddaApp
|
|
52
|
+
from edda.viewer_ui import start_viewer
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def parse_args() -> argparse.Namespace:
|
|
56
|
+
"""Parse command line arguments."""
|
|
57
|
+
parser = argparse.ArgumentParser(
|
|
58
|
+
description="Edda Workflow Viewer - Interactive web interface for workflow visualization",
|
|
59
|
+
epilog="Example: edda-viewer --db my.db --port 9000 -m demo_app -m my_workflows",
|
|
60
|
+
)
|
|
61
|
+
parser.add_argument(
|
|
62
|
+
"--db",
|
|
63
|
+
"-d",
|
|
64
|
+
type=str,
|
|
65
|
+
default="demo.db",
|
|
66
|
+
help="Database file path (default: demo.db)",
|
|
67
|
+
)
|
|
68
|
+
parser.add_argument(
|
|
69
|
+
"--port",
|
|
70
|
+
"-p",
|
|
71
|
+
type=int,
|
|
72
|
+
default=8080,
|
|
73
|
+
help="Port number for the web server (default: 8080)",
|
|
74
|
+
)
|
|
75
|
+
parser.add_argument(
|
|
76
|
+
"--import-module",
|
|
77
|
+
"-m",
|
|
78
|
+
type=str,
|
|
79
|
+
action="append",
|
|
80
|
+
dest="import_modules",
|
|
81
|
+
help="Python module to import (can be specified multiple times). "
|
|
82
|
+
"Workflows decorated with @workflow in these modules will be available in the viewer. "
|
|
83
|
+
"Example: --import-module demo_app",
|
|
84
|
+
)
|
|
85
|
+
return parser.parse_args()
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def main() -> None:
|
|
89
|
+
"""Main entry point for the Edda Viewer CLI."""
|
|
90
|
+
# Step 1: Check if nicegui is installed
|
|
91
|
+
try:
|
|
92
|
+
import nicegui # type: ignore[import-not-found] # noqa: F401
|
|
93
|
+
except ImportError:
|
|
94
|
+
print("Error: Viewer dependencies are not installed.", file=sys.stderr)
|
|
95
|
+
print("", file=sys.stderr)
|
|
96
|
+
print("Please install viewer dependencies with one of:", file=sys.stderr)
|
|
97
|
+
print(" uv sync --extra viewer", file=sys.stderr)
|
|
98
|
+
print(" uv pip install edda-framework[viewer]", file=sys.stderr)
|
|
99
|
+
print(" pip install edda-framework[viewer]", file=sys.stderr)
|
|
100
|
+
sys.exit(1)
|
|
101
|
+
|
|
102
|
+
# Step 2: Parse arguments
|
|
103
|
+
args = parse_args()
|
|
104
|
+
|
|
105
|
+
# Step 3: Import specified modules
|
|
106
|
+
if args.import_modules:
|
|
107
|
+
print("Importing modules...")
|
|
108
|
+
for module_name in args.import_modules:
|
|
109
|
+
try:
|
|
110
|
+
__import__(module_name)
|
|
111
|
+
print(f" ✓ Imported: {module_name}")
|
|
112
|
+
except ImportError as e:
|
|
113
|
+
print(
|
|
114
|
+
f" ✗ Warning: Could not import module '{module_name}': {e}",
|
|
115
|
+
file=sys.stderr,
|
|
116
|
+
)
|
|
117
|
+
print(" Continuing without this module...", file=sys.stderr)
|
|
118
|
+
print()
|
|
119
|
+
|
|
120
|
+
# Step 4: Determine database URL (prioritize environment variable)
|
|
121
|
+
db_url = os.getenv("EDDA_DB_URL")
|
|
122
|
+
if db_url is None:
|
|
123
|
+
# Fallback to --db option (SQLite)
|
|
124
|
+
db_url = f"sqlite:///{args.db}"
|
|
125
|
+
db_display = args.db
|
|
126
|
+
else:
|
|
127
|
+
# Hide password in display
|
|
128
|
+
db_display = db_url.split("@")[-1] if "@" in db_url else db_url
|
|
129
|
+
|
|
130
|
+
# Create Edda app (for database access only)
|
|
131
|
+
edda_app = EddaApp(
|
|
132
|
+
service_name="viewer",
|
|
133
|
+
db_url=db_url,
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
# Step 5: Start the viewer (storage will be initialized on startup)
|
|
137
|
+
print("Starting Edda Viewer...")
|
|
138
|
+
print(f" Database: {db_display}")
|
|
139
|
+
print(f" Port: {args.port}")
|
|
140
|
+
print(f" URL: http://localhost:{args.port}")
|
|
141
|
+
print()
|
|
142
|
+
|
|
143
|
+
start_viewer(edda_app, port=args.port)
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
if __name__ == "__main__":
|
|
147
|
+
main()
|