sqlnotify 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
sqlnotify/__init__.py ADDED
@@ -0,0 +1,14 @@
1
+ from importlib.metadata import version
2
+
3
+ from .constants import PACKAGE_NAME
4
+ from .notifiers import Notifier
5
+ from .types import ChangeEvent, FilterOnParams, Operation
6
+
7
+ __version__ = version(PACKAGE_NAME)
8
+
9
+ __all__ = [
10
+ "ChangeEvent",
11
+ "FilterOnParams",
12
+ "Notifier",
13
+ "Operation",
14
+ ]
File without changes
@@ -0,0 +1,40 @@
1
+ from contextlib import asynccontextmanager
2
+
3
+ from ..notifiers.notifier import Notifier
4
+
5
+
6
+ @asynccontextmanager
7
+ async def sqlnotify_lifespan(notifier: Notifier):
8
+ """
9
+ Lifespan context manager for FastAPI.
10
+
11
+ Automatically detects whether the notifier is using an async or sync engine
12
+ and uses the appropriate methods (astart/astop for async, start/stop for sync).
13
+
14
+ Examples:
15
+
16
+ from fastapi import FastAPI # Or any ASGI framework that supports lifespan events
17
+ from contextlib import asynccontextmanager
18
+
19
+ notifier = Notifier(...)
20
+
21
+ @asynccontextmanager
22
+ async def lifespan(app: FastAPI):
23
+ async with sqlnotify_lifespan(notifier):
24
+ yield
25
+
26
+ app = FastAPI(lifespan=lifespan)
27
+ """
28
+
29
+ if notifier.async_mode:
30
+ await notifier.astart()
31
+ try:
32
+ yield
33
+ finally:
34
+ await notifier.astop()
35
+ else:
36
+ notifier.start()
37
+ try:
38
+ yield
39
+ finally:
40
+ notifier.stop()
sqlnotify/constants.py ADDED
@@ -0,0 +1,9 @@
1
+ PACKAGE_NAME = "sqlnotify"
2
+
3
+ MAX_SQLNOTIFY_PAYLOAD_BYTES = 7999 # PostgreSQL NOTIFY payload limit (8000 - 1 for terminator)
4
+
5
+ MAX_SQLNOTIFY_IDENTIFER_BYTES = 63 # PostgreSQL identifier limit (63 bytes)
6
+
7
+ MAX_SQLNOTIFY_EXTRA_COLUMNS = 5 # Limit extra columns to help stay within payload size limit, but this is not a hard limit since column data size can vary greatly
8
+
9
+ MAX_SQLNOTIFY_EVENT_RETRIES = 3 # Max retries for any event in sqlnotify
@@ -0,0 +1,12 @@
1
+ from .base import BaseDialect
2
+ from .postgresql import PostgreSQLDialect
3
+ from .sqlite import SQLiteDialect
4
+ from .utils import detect_dialect_name, get_dialect_for_engine
5
+
6
+ __all__ = [
7
+ "BaseDialect",
8
+ "PostgreSQLDialect",
9
+ "SQLiteDialect",
10
+ "get_dialect_for_engine",
11
+ "detect_dialect_name",
12
+ ]
@@ -0,0 +1,183 @@
1
+ import logging
2
+ from abc import ABC, abstractmethod
3
+ from collections.abc import Callable
4
+ from typing import Any
5
+
6
+ from sqlalchemy.engine import Engine
7
+ from sqlalchemy.ext.asyncio import AsyncEngine
8
+
9
+ from ..watcher import Watcher
10
+
11
+
12
+ class BaseDialect(ABC):
13
+ """
14
+ Abstract base class for database dialect implementations
15
+ """
16
+
17
+ def __init__(
18
+ self,
19
+ async_engine: AsyncEngine | None,
20
+ sync_engine: Engine | None,
21
+ logger: logging.Logger | None = None,
22
+ revoke_on_model_change: bool = True,
23
+ ):
24
+ self._async_engine = async_engine
25
+ self._sync_engine = sync_engine
26
+ self._logger = logger
27
+ self._listen_conn: Any | None = None
28
+ self.revoke_on_model_change = revoke_on_model_change
29
+
30
+ @property
31
+ @abstractmethod
32
+ def name(self) -> str:
33
+ """
34
+ Return the dialect name (e.g., 'postgresql', 'mysql', 'sqlite')
35
+ """
36
+ pass
37
+
38
+ @abstractmethod
39
+ async def table_exists_async(self, schema: str, table: str) -> bool:
40
+ """
41
+ Check if a table exists in the database asynchronously
42
+ """
43
+ pass
44
+
45
+ @abstractmethod
46
+ def table_exists_sync(self, schema: str, table: str) -> bool:
47
+ """
48
+ Check if a table exists in the database synchronously
49
+ """
50
+ pass
51
+
52
+ @abstractmethod
53
+ async def create_trigger_async(self, watcher: Watcher) -> None:
54
+ """
55
+ Create database trigger asynchronously
56
+ """
57
+ pass
58
+
59
+ @abstractmethod
60
+ def create_trigger_sync(self, watcher: Watcher) -> None:
61
+ """
62
+ Create database trigger synchronously
63
+ """
64
+ pass
65
+
66
+ @abstractmethod
67
+ async def listen_async(
68
+ self,
69
+ watchers: list[Watcher],
70
+ running_check: Callable[[], bool],
71
+ handle_notification: Callable[[str, str], Any],
72
+ database_url: str,
73
+ listener_ready: Any | None = None,
74
+ ) -> None:
75
+ """
76
+ Listen for database notifications asynchronously
77
+ """
78
+ pass
79
+
80
+ @abstractmethod
81
+ async def cleanup_async(self, watchers: list[Watcher]) -> None:
82
+ """
83
+ Remove all triggers and functions asynchronously
84
+ """
85
+ pass
86
+
87
+ @abstractmethod
88
+ def cleanup_sync(self, watchers: list[Watcher]) -> None:
89
+ """
90
+ Remove all triggers and functions synchronously
91
+ """
92
+ pass
93
+
94
+ @abstractmethod
95
+ async def notify_async(
96
+ self,
97
+ watcher: Watcher,
98
+ payload: dict[str, Any],
99
+ use_overflow_table: bool = False,
100
+ ) -> None:
101
+ """
102
+ Send a notification asynchronously
103
+
104
+ Args:
105
+ watcher (Watcher): The watcher configuration
106
+ payload (dict[str, Any]): The payload dict to send
107
+ use_overflow_table (bool): If True, use overflow table for large payloads
108
+ """
109
+ pass
110
+
111
+ @abstractmethod
112
+ def notify_sync(
113
+ self,
114
+ watcher: Watcher,
115
+ payload: dict[str, Any],
116
+ use_overflow_table: bool = False,
117
+ ) -> None:
118
+ """
119
+ Send a notification synchronously
120
+
121
+ Args:
122
+ watcher (Watcher): The watcher configuration
123
+ payload (dict[str, Any]): The payload dict to send
124
+ use_overflow_table (bool): If True, use overflow table for large payloads
125
+ """
126
+ pass
127
+
128
+ @abstractmethod
129
+ async def store_overflow_async(self, watcher: Watcher, payload_str: str) -> str:
130
+ """
131
+ Store large payload in overflow table asynchronously
132
+ """
133
+ pass
134
+
135
+ @abstractmethod
136
+ def store_overflow_sync(self, watcher: Watcher, payload_str: str) -> str:
137
+ """
138
+ Store large payload in overflow table synchronously
139
+ """
140
+ pass
141
+
142
+ @abstractmethod
143
+ async def fetch_overflow_async(self, watcher: Watcher, overflow_id: int) -> dict[str, Any] | None:
144
+ """
145
+ Fetch and consume overflow payload asynchronously
146
+ """
147
+ pass
148
+
149
+ @abstractmethod
150
+ def fetch_overflow_sync(self, watcher: Watcher, overflow_id: int) -> dict[str, Any] | None:
151
+ """
152
+ Fetch and consume overflow payload synchronously
153
+ """
154
+ pass
155
+
156
+ @abstractmethod
157
+ def build_watcher_sql(self, watcher: Watcher) -> tuple[str, str]:
158
+ """
159
+ Build SQL for trigger and function creation
160
+ """
161
+ pass
162
+
163
+ @abstractmethod
164
+ def create_overflow_table_sql(self, schema: str) -> str:
165
+ """
166
+ Generate SQL to create overflow table
167
+ """
168
+ pass
169
+
170
+ async def stop_listening(self) -> None:
171
+ """
172
+ Clean up listening connection
173
+ """
174
+
175
+ if self._listen_conn is not None:
176
+ try:
177
+ await self._listen_conn.close()
178
+ self._listen_conn = None
179
+ if self._logger:
180
+ self._logger.debug("Closed listen connection")
181
+ except Exception as e:
182
+ if self._logger:
183
+ self._logger.debug(f"Error closing listen connection: {str(e)}")