edda-framework 0.9.0__py3-none-any.whl → 0.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- edda/app.py +428 -56
- edda/context.py +8 -0
- edda/outbox/relayer.py +21 -2
- edda/storage/__init__.py +8 -0
- edda/storage/notify_base.py +162 -0
- edda/storage/pg_notify.py +325 -0
- edda/storage/protocol.py +9 -1
- edda/storage/sqlalchemy_storage.py +193 -13
- edda/viewer_ui/app.py +26 -0
- edda/viewer_ui/data_service.py +4 -0
- {edda_framework-0.9.0.dist-info → edda_framework-0.10.0.dist-info}/METADATA +13 -1
- {edda_framework-0.9.0.dist-info → edda_framework-0.10.0.dist-info}/RECORD +15 -13
- {edda_framework-0.9.0.dist-info → edda_framework-0.10.0.dist-info}/WHEEL +0 -0
- {edda_framework-0.9.0.dist-info → edda_framework-0.10.0.dist-info}/entry_points.txt +0 -0
- {edda_framework-0.9.0.dist-info → edda_framework-0.10.0.dist-info}/licenses/LICENSE +0 -0
edda/outbox/relayer.py
CHANGED
|
@@ -48,6 +48,7 @@ class OutboxRelayer:
|
|
|
48
48
|
max_retries: int = 3,
|
|
49
49
|
batch_size: int = 10,
|
|
50
50
|
max_age_hours: float | None = None,
|
|
51
|
+
wake_event: asyncio.Event | None = None,
|
|
51
52
|
):
|
|
52
53
|
"""
|
|
53
54
|
Initialize the Outbox Relayer.
|
|
@@ -60,6 +61,8 @@ class OutboxRelayer:
|
|
|
60
61
|
batch_size: Number of events to process per batch (default: 10)
|
|
61
62
|
max_age_hours: Maximum event age in hours before expiration (default: None, disabled)
|
|
62
63
|
Events older than this are marked as 'expired' and won't be retried.
|
|
64
|
+
wake_event: Optional asyncio.Event to wake the relayer immediately when new
|
|
65
|
+
events are added. Used with PostgreSQL LISTEN/NOTIFY integration.
|
|
63
66
|
"""
|
|
64
67
|
self.storage = storage
|
|
65
68
|
self.broker_url = broker_url
|
|
@@ -67,6 +70,7 @@ class OutboxRelayer:
|
|
|
67
70
|
self.max_retries = max_retries
|
|
68
71
|
self.batch_size = batch_size
|
|
69
72
|
self.max_age_hours = max_age_hours
|
|
73
|
+
self._wake_event = wake_event
|
|
70
74
|
|
|
71
75
|
self._task: asyncio.Task[Any] | None = None
|
|
72
76
|
self._running = False
|
|
@@ -120,6 +124,8 @@ class OutboxRelayer:
|
|
|
120
124
|
Main polling loop.
|
|
121
125
|
|
|
122
126
|
Continuously polls the database for pending events and publishes them.
|
|
127
|
+
When wake_event is provided (PostgreSQL NOTIFY integration), wakes up
|
|
128
|
+
immediately on notification, otherwise falls back to poll_interval.
|
|
123
129
|
"""
|
|
124
130
|
while self._running:
|
|
125
131
|
try:
|
|
@@ -127,8 +133,21 @@ class OutboxRelayer:
|
|
|
127
133
|
except Exception as e:
|
|
128
134
|
logger.error(f"Error in outbox relayer poll loop: {e}")
|
|
129
135
|
|
|
130
|
-
# Wait before next poll
|
|
131
|
-
|
|
136
|
+
# Wait before next poll (with optional NOTIFY wake)
|
|
137
|
+
if self._wake_event is not None:
|
|
138
|
+
try:
|
|
139
|
+
await asyncio.wait_for(
|
|
140
|
+
self._wake_event.wait(),
|
|
141
|
+
timeout=self.poll_interval,
|
|
142
|
+
)
|
|
143
|
+
# Clear the event for next notification
|
|
144
|
+
self._wake_event.clear()
|
|
145
|
+
logger.debug("Outbox relayer woken by NOTIFY")
|
|
146
|
+
except TimeoutError:
|
|
147
|
+
# Fallback polling timeout reached
|
|
148
|
+
pass
|
|
149
|
+
else:
|
|
150
|
+
await asyncio.sleep(self.poll_interval)
|
|
132
151
|
|
|
133
152
|
async def _poll_and_publish(self) -> None:
|
|
134
153
|
"""
|
edda/storage/__init__.py
CHANGED
|
@@ -1,9 +1,17 @@
|
|
|
1
1
|
"""Storage layer for Edda framework."""
|
|
2
2
|
|
|
3
|
+
from edda.storage.notify_base import (
|
|
4
|
+
NoopNotifyListener,
|
|
5
|
+
NotifyProtocol,
|
|
6
|
+
create_notify_listener,
|
|
7
|
+
)
|
|
3
8
|
from edda.storage.protocol import StorageProtocol
|
|
4
9
|
from edda.storage.sqlalchemy_storage import SQLAlchemyStorage
|
|
5
10
|
|
|
6
11
|
__all__ = [
|
|
7
12
|
"StorageProtocol",
|
|
8
13
|
"SQLAlchemyStorage",
|
|
14
|
+
"NotifyProtocol",
|
|
15
|
+
"NoopNotifyListener",
|
|
16
|
+
"create_notify_listener",
|
|
9
17
|
]
|
|
@@ -0,0 +1,162 @@
|
|
|
1
|
+
"""Base classes and protocols for notification systems.
|
|
2
|
+
|
|
3
|
+
This module defines the NotifyProtocol interface and provides a NoopNotifyListener
|
|
4
|
+
implementation for databases that don't support LISTEN/NOTIFY (SQLite, MySQL).
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import logging
|
|
10
|
+
from collections.abc import Awaitable, Callable
|
|
11
|
+
from typing import Protocol, runtime_checkable
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
NotifyCallback = Callable[[str], Awaitable[None]]
|
|
17
|
+
"""Type alias for notification callback functions.
|
|
18
|
+
|
|
19
|
+
The callback receives the payload string from the notification.
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@runtime_checkable
|
|
24
|
+
class NotifyProtocol(Protocol):
|
|
25
|
+
"""Protocol for notification systems.
|
|
26
|
+
|
|
27
|
+
This protocol defines the interface for LISTEN/NOTIFY style notification
|
|
28
|
+
systems. Implementations should handle:
|
|
29
|
+
- Connection management with automatic reconnection
|
|
30
|
+
- Channel subscription/unsubscription
|
|
31
|
+
- Callback dispatch on notification receipt
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
async def start(self) -> None:
|
|
35
|
+
"""Start the notification listener.
|
|
36
|
+
|
|
37
|
+
Establishes the connection and begins listening for notifications.
|
|
38
|
+
Should be called before any subscribe() calls.
|
|
39
|
+
"""
|
|
40
|
+
...
|
|
41
|
+
|
|
42
|
+
async def stop(self) -> None:
|
|
43
|
+
"""Stop the notification listener.
|
|
44
|
+
|
|
45
|
+
Closes the connection and cleans up resources.
|
|
46
|
+
All subscriptions are automatically removed.
|
|
47
|
+
"""
|
|
48
|
+
...
|
|
49
|
+
|
|
50
|
+
async def subscribe(self, channel: str, callback: NotifyCallback) -> None:
|
|
51
|
+
"""Subscribe to notifications on a channel.
|
|
52
|
+
|
|
53
|
+
Args:
|
|
54
|
+
channel: The channel name to listen on.
|
|
55
|
+
callback: Async function called when a notification arrives.
|
|
56
|
+
Receives the payload string as its argument.
|
|
57
|
+
"""
|
|
58
|
+
...
|
|
59
|
+
|
|
60
|
+
async def unsubscribe(self, channel: str) -> None:
|
|
61
|
+
"""Unsubscribe from notifications on a channel.
|
|
62
|
+
|
|
63
|
+
Args:
|
|
64
|
+
channel: The channel name to stop listening on.
|
|
65
|
+
"""
|
|
66
|
+
...
|
|
67
|
+
|
|
68
|
+
async def notify(self, channel: str, payload: str) -> None:
|
|
69
|
+
"""Send a notification on a channel.
|
|
70
|
+
|
|
71
|
+
Args:
|
|
72
|
+
channel: The channel name to send the notification on.
|
|
73
|
+
payload: The payload string (typically JSON, max ~7500 bytes for PostgreSQL).
|
|
74
|
+
"""
|
|
75
|
+
...
|
|
76
|
+
|
|
77
|
+
@property
|
|
78
|
+
def is_connected(self) -> bool:
|
|
79
|
+
"""Check if the listener is currently connected."""
|
|
80
|
+
...
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class NoopNotifyListener:
|
|
84
|
+
"""No-op implementation of NotifyProtocol for SQLite/MySQL.
|
|
85
|
+
|
|
86
|
+
This implementation does nothing - all methods are no-ops.
|
|
87
|
+
When using SQLite or MySQL, the application falls back to polling-based
|
|
88
|
+
updates with the default intervals (no reduction in polling frequency).
|
|
89
|
+
"""
|
|
90
|
+
|
|
91
|
+
def __init__(self) -> None:
|
|
92
|
+
"""Initialize the no-op listener."""
|
|
93
|
+
self._connected = False
|
|
94
|
+
|
|
95
|
+
async def start(self) -> None:
|
|
96
|
+
"""No-op start - does nothing."""
|
|
97
|
+
self._connected = True
|
|
98
|
+
logger.debug("NoopNotifyListener started (no-op)")
|
|
99
|
+
|
|
100
|
+
async def stop(self) -> None:
|
|
101
|
+
"""No-op stop - does nothing."""
|
|
102
|
+
self._connected = False
|
|
103
|
+
logger.debug("NoopNotifyListener stopped (no-op)")
|
|
104
|
+
|
|
105
|
+
async def subscribe(self, channel: str, _callback: NotifyCallback) -> None:
|
|
106
|
+
"""No-op subscribe - callbacks will never be called.
|
|
107
|
+
|
|
108
|
+
Args:
|
|
109
|
+
channel: Ignored.
|
|
110
|
+
_callback: Ignored - will never be called.
|
|
111
|
+
"""
|
|
112
|
+
logger.debug(f"NoopNotifyListener: subscribe to '{channel}' (no-op)")
|
|
113
|
+
|
|
114
|
+
async def unsubscribe(self, channel: str) -> None:
|
|
115
|
+
"""No-op unsubscribe.
|
|
116
|
+
|
|
117
|
+
Args:
|
|
118
|
+
channel: Ignored.
|
|
119
|
+
"""
|
|
120
|
+
logger.debug(f"NoopNotifyListener: unsubscribe from '{channel}' (no-op)")
|
|
121
|
+
|
|
122
|
+
async def notify(self, channel: str, payload: str) -> None:
|
|
123
|
+
"""No-op notify - does nothing.
|
|
124
|
+
|
|
125
|
+
Args:
|
|
126
|
+
channel: Ignored.
|
|
127
|
+
payload: Ignored.
|
|
128
|
+
"""
|
|
129
|
+
# Intentionally silent - this is called frequently during normal operation
|
|
130
|
+
pass
|
|
131
|
+
|
|
132
|
+
@property
|
|
133
|
+
def is_connected(self) -> bool:
|
|
134
|
+
"""Always returns the internal connected state."""
|
|
135
|
+
return self._connected
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def create_notify_listener(db_url: str) -> NotifyProtocol:
|
|
139
|
+
"""Create appropriate notify listener based on database type.
|
|
140
|
+
|
|
141
|
+
Args:
|
|
142
|
+
db_url: Database connection URL.
|
|
143
|
+
|
|
144
|
+
Returns:
|
|
145
|
+
PostgresNotifyListener for PostgreSQL, NoopNotifyListener for others.
|
|
146
|
+
|
|
147
|
+
Example:
|
|
148
|
+
>>> listener = create_notify_listener("postgresql://localhost/db")
|
|
149
|
+
>>> await listener.start()
|
|
150
|
+
>>> await listener.subscribe("my_channel", handle_notification)
|
|
151
|
+
"""
|
|
152
|
+
if db_url.startswith("postgresql"):
|
|
153
|
+
# Import here to avoid requiring asyncpg when not using PostgreSQL
|
|
154
|
+
from edda.storage.pg_notify import PostgresNotifyListener
|
|
155
|
+
|
|
156
|
+
return PostgresNotifyListener(dsn=db_url)
|
|
157
|
+
else:
|
|
158
|
+
logger.info(
|
|
159
|
+
"Database URL does not start with 'postgresql', "
|
|
160
|
+
"using NoopNotifyListener (polling-based updates)"
|
|
161
|
+
)
|
|
162
|
+
return NoopNotifyListener()
|
|
@@ -0,0 +1,325 @@
|
|
|
1
|
+
"""PostgreSQL LISTEN/NOTIFY implementation using asyncpg.
|
|
2
|
+
|
|
3
|
+
This module provides a dedicated listener for PostgreSQL's LISTEN/NOTIFY
|
|
4
|
+
mechanism, enabling near-instant notification delivery for workflow events.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import asyncio
|
|
10
|
+
import json
|
|
11
|
+
import logging
|
|
12
|
+
from collections.abc import Awaitable, Callable
|
|
13
|
+
from contextlib import suppress
|
|
14
|
+
from typing import Any
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
NotifyCallback = Callable[[str], Awaitable[None]]
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class PostgresNotifyListener:
|
|
23
|
+
"""PostgreSQL LISTEN/NOTIFY listener using asyncpg.
|
|
24
|
+
|
|
25
|
+
This class maintains a dedicated connection for LISTEN/NOTIFY operations.
|
|
26
|
+
It provides:
|
|
27
|
+
- Automatic reconnection on connection loss
|
|
28
|
+
- Channel subscription management
|
|
29
|
+
- Callback dispatch for notifications
|
|
30
|
+
|
|
31
|
+
Example:
|
|
32
|
+
>>> listener = PostgresNotifyListener(dsn="postgresql://localhost/db")
|
|
33
|
+
>>> await listener.start()
|
|
34
|
+
>>> await listener.subscribe("my_channel", handle_notification)
|
|
35
|
+
>>> # ... later
|
|
36
|
+
>>> await listener.stop()
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
def __init__(
|
|
40
|
+
self,
|
|
41
|
+
dsn: str,
|
|
42
|
+
reconnect_interval: float = 5.0,
|
|
43
|
+
max_reconnect_attempts: int | None = None,
|
|
44
|
+
) -> None:
|
|
45
|
+
"""Initialize the PostgreSQL notify listener.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
dsn: PostgreSQL connection string (postgresql://user:pass@host/db).
|
|
49
|
+
reconnect_interval: Seconds to wait between reconnection attempts.
|
|
50
|
+
max_reconnect_attempts: Maximum number of reconnection attempts.
|
|
51
|
+
None means unlimited.
|
|
52
|
+
"""
|
|
53
|
+
self._dsn = dsn
|
|
54
|
+
self._reconnect_interval = reconnect_interval
|
|
55
|
+
self._max_reconnect_attempts = max_reconnect_attempts
|
|
56
|
+
|
|
57
|
+
self._connection: Any = None # asyncpg.Connection
|
|
58
|
+
self._callbacks: dict[str, list[NotifyCallback]] = {}
|
|
59
|
+
self._channel_handlers: dict[str, Callable[..., None]] = {}
|
|
60
|
+
self._running = False
|
|
61
|
+
self._reconnect_task: asyncio.Task[None] | None = None
|
|
62
|
+
self._lock = asyncio.Lock()
|
|
63
|
+
|
|
64
|
+
async def start(self) -> None:
|
|
65
|
+
"""Start the notification listener.
|
|
66
|
+
|
|
67
|
+
Establishes the connection and begins listening for notifications.
|
|
68
|
+
Starts the automatic reconnection task.
|
|
69
|
+
|
|
70
|
+
Raises:
|
|
71
|
+
ImportError: If asyncpg is not installed.
|
|
72
|
+
"""
|
|
73
|
+
if self._running:
|
|
74
|
+
logger.warning("PostgresNotifyListener already running")
|
|
75
|
+
return
|
|
76
|
+
|
|
77
|
+
self._running = True
|
|
78
|
+
await self._establish_connection()
|
|
79
|
+
|
|
80
|
+
# Start reconnection monitor
|
|
81
|
+
self._reconnect_task = asyncio.create_task(self._reconnect_loop())
|
|
82
|
+
logger.info("PostgresNotifyListener started")
|
|
83
|
+
|
|
84
|
+
async def stop(self) -> None:
|
|
85
|
+
"""Stop the notification listener.
|
|
86
|
+
|
|
87
|
+
Closes the connection and stops the reconnection task.
|
|
88
|
+
"""
|
|
89
|
+
self._running = False
|
|
90
|
+
|
|
91
|
+
# Cancel reconnection task
|
|
92
|
+
if self._reconnect_task is not None:
|
|
93
|
+
self._reconnect_task.cancel()
|
|
94
|
+
with suppress(asyncio.CancelledError):
|
|
95
|
+
await self._reconnect_task
|
|
96
|
+
self._reconnect_task = None
|
|
97
|
+
|
|
98
|
+
# Close connection
|
|
99
|
+
await self._close_connection()
|
|
100
|
+
self._callbacks.clear()
|
|
101
|
+
logger.info("PostgresNotifyListener stopped")
|
|
102
|
+
|
|
103
|
+
async def subscribe(self, channel: str, callback: NotifyCallback) -> None:
|
|
104
|
+
"""Subscribe to notifications on a channel.
|
|
105
|
+
|
|
106
|
+
Args:
|
|
107
|
+
channel: The PostgreSQL channel name to listen on.
|
|
108
|
+
callback: Async function called when a notification arrives.
|
|
109
|
+
|
|
110
|
+
Note:
|
|
111
|
+
Channel names must be valid PostgreSQL identifiers (max 63 chars).
|
|
112
|
+
Multiple callbacks can be registered for the same channel.
|
|
113
|
+
"""
|
|
114
|
+
async with self._lock:
|
|
115
|
+
is_new_channel = channel not in self._callbacks
|
|
116
|
+
|
|
117
|
+
if channel not in self._callbacks:
|
|
118
|
+
self._callbacks[channel] = []
|
|
119
|
+
self._callbacks[channel].append(callback)
|
|
120
|
+
|
|
121
|
+
# Register listener if this is a new channel and we're connected
|
|
122
|
+
if is_new_channel and self._connection is not None:
|
|
123
|
+
try:
|
|
124
|
+
await self._connection.add_listener(
|
|
125
|
+
channel, self._create_notification_handler(channel)
|
|
126
|
+
)
|
|
127
|
+
logger.debug(f"Subscribed to channel: {channel}")
|
|
128
|
+
except Exception as e:
|
|
129
|
+
logger.error(f"Failed to LISTEN on channel {channel}: {e}")
|
|
130
|
+
|
|
131
|
+
async def unsubscribe(self, channel: str) -> None:
|
|
132
|
+
"""Unsubscribe from notifications on a channel.
|
|
133
|
+
|
|
134
|
+
Args:
|
|
135
|
+
channel: The PostgreSQL channel name to stop listening on.
|
|
136
|
+
"""
|
|
137
|
+
async with self._lock:
|
|
138
|
+
if channel in self._callbacks:
|
|
139
|
+
del self._callbacks[channel]
|
|
140
|
+
|
|
141
|
+
# Remove listener if we're connected
|
|
142
|
+
if self._connection is not None:
|
|
143
|
+
try:
|
|
144
|
+
await self._connection.remove_listener(
|
|
145
|
+
channel, self._create_notification_handler(channel)
|
|
146
|
+
)
|
|
147
|
+
logger.debug(f"Unsubscribed from channel: {channel}")
|
|
148
|
+
except Exception as e:
|
|
149
|
+
logger.error(f"Failed to UNLISTEN on channel {channel}: {e}")
|
|
150
|
+
|
|
151
|
+
async def notify(self, channel: str, payload: str) -> None:
|
|
152
|
+
"""Send a notification on a channel.
|
|
153
|
+
|
|
154
|
+
Args:
|
|
155
|
+
channel: The PostgreSQL channel name.
|
|
156
|
+
payload: The payload string (max ~7500 bytes recommended).
|
|
157
|
+
|
|
158
|
+
Note:
|
|
159
|
+
This uses the existing connection pool from SQLAlchemy,
|
|
160
|
+
not the dedicated listener connection.
|
|
161
|
+
"""
|
|
162
|
+
if self._connection is None:
|
|
163
|
+
logger.warning("Cannot send NOTIFY: not connected")
|
|
164
|
+
return
|
|
165
|
+
|
|
166
|
+
try:
|
|
167
|
+
# Use pg_notify function to properly escape the payload
|
|
168
|
+
await self._connection.execute("SELECT pg_notify($1, $2)", channel, payload)
|
|
169
|
+
except Exception as e:
|
|
170
|
+
logger.warning(f"Failed to send NOTIFY on channel {channel}: {e}")
|
|
171
|
+
|
|
172
|
+
@property
|
|
173
|
+
def is_connected(self) -> bool:
|
|
174
|
+
"""Check if the listener is currently connected."""
|
|
175
|
+
return self._connection is not None and not self._connection.is_closed()
|
|
176
|
+
|
|
177
|
+
async def _establish_connection(self) -> None:
|
|
178
|
+
"""Establish connection to PostgreSQL."""
|
|
179
|
+
try:
|
|
180
|
+
import asyncpg
|
|
181
|
+
except ImportError as e:
|
|
182
|
+
raise ImportError(
|
|
183
|
+
"asyncpg is required for PostgreSQL LISTEN/NOTIFY support. "
|
|
184
|
+
"Install it with: pip install edda[postgres-notify]"
|
|
185
|
+
) from e
|
|
186
|
+
|
|
187
|
+
try:
|
|
188
|
+
self._connection = await asyncpg.connect(self._dsn)
|
|
189
|
+
|
|
190
|
+
# Re-subscribe to all channels (this also registers listeners)
|
|
191
|
+
await self._resubscribe_all()
|
|
192
|
+
|
|
193
|
+
logger.info("PostgresNotifyListener connected to database")
|
|
194
|
+
except Exception as e:
|
|
195
|
+
logger.error(f"Failed to connect to PostgreSQL: {e}")
|
|
196
|
+
self._connection = None
|
|
197
|
+
raise
|
|
198
|
+
|
|
199
|
+
async def _close_connection(self) -> None:
|
|
200
|
+
"""Close the database connection."""
|
|
201
|
+
if self._connection is not None:
|
|
202
|
+
try:
|
|
203
|
+
await self._connection.close()
|
|
204
|
+
except Exception as e:
|
|
205
|
+
logger.warning(f"Error closing connection: {e}")
|
|
206
|
+
finally:
|
|
207
|
+
self._connection = None
|
|
208
|
+
|
|
209
|
+
async def _resubscribe_all(self) -> None:
|
|
210
|
+
"""Re-subscribe to all registered channels after reconnection."""
|
|
211
|
+
if self._connection is None:
|
|
212
|
+
return
|
|
213
|
+
|
|
214
|
+
for channel in self._callbacks:
|
|
215
|
+
try:
|
|
216
|
+
# Register listener for this channel
|
|
217
|
+
await self._connection.add_listener(
|
|
218
|
+
channel, self._create_notification_handler(channel)
|
|
219
|
+
)
|
|
220
|
+
logger.debug(f"Re-subscribed to channel: {channel}")
|
|
221
|
+
except Exception as e:
|
|
222
|
+
logger.error(f"Failed to re-subscribe to channel {channel}: {e}")
|
|
223
|
+
|
|
224
|
+
def _create_notification_handler(self, channel: str) -> Callable[..., None]:
|
|
225
|
+
"""Create or retrieve a notification handler for a channel.
|
|
226
|
+
|
|
227
|
+
Args:
|
|
228
|
+
channel: The channel name.
|
|
229
|
+
|
|
230
|
+
Returns:
|
|
231
|
+
A handler function that can be passed to add_listener/remove_listener.
|
|
232
|
+
"""
|
|
233
|
+
if channel not in self._channel_handlers:
|
|
234
|
+
|
|
235
|
+
def handler(_connection: Any, _pid: int, ch: str, payload: str) -> None:
|
|
236
|
+
"""Handle incoming notification from PostgreSQL."""
|
|
237
|
+
callbacks = self._callbacks.get(ch, [])
|
|
238
|
+
for callback in callbacks:
|
|
239
|
+
asyncio.create_task(self._safe_callback(callback, payload, ch))
|
|
240
|
+
|
|
241
|
+
self._channel_handlers[channel] = handler
|
|
242
|
+
|
|
243
|
+
return self._channel_handlers[channel]
|
|
244
|
+
|
|
245
|
+
async def _safe_callback(self, callback: NotifyCallback, payload: str, channel: str) -> None:
|
|
246
|
+
"""Execute callback with error handling."""
|
|
247
|
+
try:
|
|
248
|
+
await callback(payload)
|
|
249
|
+
except Exception as e:
|
|
250
|
+
logger.error(
|
|
251
|
+
f"Error in notification callback for channel {channel}: {e}",
|
|
252
|
+
exc_info=True,
|
|
253
|
+
)
|
|
254
|
+
|
|
255
|
+
async def _reconnect_loop(self) -> None:
|
|
256
|
+
"""Monitor connection and reconnect on failure."""
|
|
257
|
+
attempt = 0
|
|
258
|
+
|
|
259
|
+
with suppress(asyncio.CancelledError):
|
|
260
|
+
while self._running:
|
|
261
|
+
await asyncio.sleep(1) # Check every second
|
|
262
|
+
|
|
263
|
+
if self._connection is None or self._connection.is_closed():
|
|
264
|
+
attempt += 1
|
|
265
|
+
if (
|
|
266
|
+
self._max_reconnect_attempts is not None
|
|
267
|
+
and attempt > self._max_reconnect_attempts
|
|
268
|
+
):
|
|
269
|
+
logger.error(
|
|
270
|
+
f"Max reconnection attempts ({self._max_reconnect_attempts}) "
|
|
271
|
+
"exceeded, giving up"
|
|
272
|
+
)
|
|
273
|
+
break
|
|
274
|
+
|
|
275
|
+
logger.info(
|
|
276
|
+
f"Connection lost, attempting reconnection " f"(attempt {attempt})..."
|
|
277
|
+
)
|
|
278
|
+
|
|
279
|
+
try:
|
|
280
|
+
await self._close_connection()
|
|
281
|
+
await self._establish_connection()
|
|
282
|
+
attempt = 0 # Reset on success
|
|
283
|
+
logger.info("Reconnection successful")
|
|
284
|
+
except Exception as e:
|
|
285
|
+
logger.error(
|
|
286
|
+
f"Reconnection failed: {e}, " f"retrying in {self._reconnect_interval}s"
|
|
287
|
+
)
|
|
288
|
+
await asyncio.sleep(self._reconnect_interval)
|
|
289
|
+
|
|
290
|
+
|
|
291
|
+
def get_notify_channel_for_message(channel: str) -> str:
|
|
292
|
+
"""Convert Edda channel name to PostgreSQL NOTIFY channel.
|
|
293
|
+
|
|
294
|
+
Uses a hash to ensure valid PostgreSQL identifier (max 63 chars).
|
|
295
|
+
|
|
296
|
+
Args:
|
|
297
|
+
channel: The Edda channel name.
|
|
298
|
+
|
|
299
|
+
Returns:
|
|
300
|
+
PostgreSQL-safe channel name.
|
|
301
|
+
"""
|
|
302
|
+
import hashlib
|
|
303
|
+
|
|
304
|
+
h = hashlib.sha256(channel.encode()).hexdigest()[:16]
|
|
305
|
+
return f"edda_msg_{h}"
|
|
306
|
+
|
|
307
|
+
|
|
308
|
+
def make_notify_payload(data: dict[str, Any]) -> str:
|
|
309
|
+
"""Create JSON payload for NOTIFY.
|
|
310
|
+
|
|
311
|
+
Args:
|
|
312
|
+
data: Dictionary to serialize as JSON.
|
|
313
|
+
|
|
314
|
+
Returns:
|
|
315
|
+
JSON string (kept under 7500 bytes for PostgreSQL safety).
|
|
316
|
+
"""
|
|
317
|
+
payload = json.dumps(data, separators=(",", ":")) # Compact JSON
|
|
318
|
+
if len(payload) > 7500:
|
|
319
|
+
logger.warning(
|
|
320
|
+
f"NOTIFY payload exceeds recommended size " f"({len(payload)} > 7500 bytes), truncating"
|
|
321
|
+
)
|
|
322
|
+
# For safety, just include essential fields
|
|
323
|
+
minimal_data = {k: v for k, v in data.items() if k in ("wf_id", "ts")}
|
|
324
|
+
payload = json.dumps(minimal_data, separators=(",", ":"))
|
|
325
|
+
return payload
|
edda/storage/protocol.py
CHANGED
|
@@ -262,6 +262,7 @@ class StorageProtocol(Protocol):
|
|
|
262
262
|
instance_id_filter: str | None = None,
|
|
263
263
|
started_after: datetime | None = None,
|
|
264
264
|
started_before: datetime | None = None,
|
|
265
|
+
input_filters: dict[str, Any] | None = None,
|
|
265
266
|
) -> dict[str, Any]:
|
|
266
267
|
"""
|
|
267
268
|
List workflow instances with cursor-based pagination and filtering.
|
|
@@ -277,6 +278,9 @@ class StorageProtocol(Protocol):
|
|
|
277
278
|
instance_id_filter: Optional instance ID filter (partial match, case-insensitive)
|
|
278
279
|
started_after: Filter instances started after this datetime (inclusive)
|
|
279
280
|
started_before: Filter instances started before this datetime (inclusive)
|
|
281
|
+
input_filters: Filter by input data values. Keys are JSON paths
|
|
282
|
+
(e.g., "order_id" or "customer.email"), values are expected
|
|
283
|
+
values (exact match). All filters are AND-combined.
|
|
280
284
|
|
|
281
285
|
Returns:
|
|
282
286
|
Dictionary containing:
|
|
@@ -860,7 +864,7 @@ class StorageProtocol(Protocol):
|
|
|
860
864
|
# Workflow Resumption Methods
|
|
861
865
|
# -------------------------------------------------------------------------
|
|
862
866
|
|
|
863
|
-
async def find_resumable_workflows(self) -> list[dict[str, Any]]:
|
|
867
|
+
async def find_resumable_workflows(self, limit: int | None = None) -> list[dict[str, Any]]:
|
|
864
868
|
"""
|
|
865
869
|
Find workflows that are ready to be resumed.
|
|
866
870
|
|
|
@@ -873,6 +877,10 @@ class StorageProtocol(Protocol):
|
|
|
873
877
|
This allows immediate resumption after message delivery rather than
|
|
874
878
|
waiting for the stale lock cleanup cycle (60+ seconds).
|
|
875
879
|
|
|
880
|
+
Args:
|
|
881
|
+
limit: Optional maximum number of workflows to return.
|
|
882
|
+
If None, returns all resumable workflows.
|
|
883
|
+
|
|
876
884
|
Returns:
|
|
877
885
|
List of resumable workflows.
|
|
878
886
|
Each item contains: instance_id, workflow_name
|