ry-pg-utils 1.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ry_pg_utils/__init__.py +0 -0
- ry_pg_utils/config.py +44 -0
- ry_pg_utils/connect.py +288 -0
- ry_pg_utils/dynamic_table.py +199 -0
- ry_pg_utils/ipc/__init__.py +0 -0
- ry_pg_utils/ipc/channels.py +14 -0
- ry_pg_utils/notify_trigger.py +346 -0
- ry_pg_utils/parse_args.py +15 -0
- ry_pg_utils/pb_types/__init__.py +0 -0
- ry_pg_utils/pb_types/database_pb2.py +38 -0
- ry_pg_utils/pb_types/database_pb2.pyi +156 -0
- ry_pg_utils/pb_types/py.typed +0 -0
- ry_pg_utils/postgres_info.py +47 -0
- ry_pg_utils/py.typed +0 -0
- ry_pg_utils/updater.py +181 -0
- ry_pg_utils-1.0.2.dist-info/METADATA +473 -0
- ry_pg_utils-1.0.2.dist-info/RECORD +20 -0
- ry_pg_utils-1.0.2.dist-info/WHEEL +5 -0
- ry_pg_utils-1.0.2.dist-info/licenses/LICENSE +21 -0
- ry_pg_utils-1.0.2.dist-info/top_level.txt +1 -0
@@ -0,0 +1,346 @@
|
|
1
|
+
import json
|
2
|
+
import threading
|
3
|
+
import time
|
4
|
+
from contextlib import contextmanager
|
5
|
+
from typing import Any, Callable, Dict, Generator, Iterator, List, Optional, Set
|
6
|
+
|
7
|
+
import psycopg2
|
8
|
+
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
|
9
|
+
from sqlalchemy import text
|
10
|
+
from sqlalchemy.engine import Engine
|
11
|
+
|
12
|
+
from ry_pg_utils.connect import get_engine
|
13
|
+
|
14
|
+
|
15
|
+
def create_notify_trigger(
|
16
|
+
engine: Engine,
|
17
|
+
table_name: str,
|
18
|
+
channel_name: Optional[str] = None,
|
19
|
+
trigger_name: Optional[str] = None,
|
20
|
+
events: Optional[List[str]] = None,
|
21
|
+
columns: Optional[List[str]] = None,
|
22
|
+
) -> None:
|
23
|
+
"""
|
24
|
+
Create a notification trigger on a specified table.
|
25
|
+
|
26
|
+
Args:
|
27
|
+
engine: SQLAlchemy engine instance
|
28
|
+
table_name: Name of the table to add the trigger to
|
29
|
+
channel_name: Name of the notification channel (defaults to table_name)
|
30
|
+
trigger_name: Name of the trigger (defaults to f"{table_name}_notify_trigger")
|
31
|
+
events: List of events to trigger on (defaults to ['INSERT', 'UPDATE', 'DELETE'])
|
32
|
+
columns: List of columns to include in the notification payload. If None, all columns.
|
33
|
+
"""
|
34
|
+
if events is None:
|
35
|
+
events = ["INSERT", "UPDATE", "DELETE"]
|
36
|
+
|
37
|
+
# Validate events
|
38
|
+
valid_events = {"INSERT", "UPDATE", "DELETE"}
|
39
|
+
invalid_events = set(events) - valid_events
|
40
|
+
if invalid_events:
|
41
|
+
raise ValueError(f"Invalid events: {invalid_events}. Valid events are: {valid_events}")
|
42
|
+
|
43
|
+
channel_name = channel_name or table_name
|
44
|
+
trigger_name = trigger_name or f"{table_name}_notify_trigger"
|
45
|
+
|
46
|
+
# Validate requested columns
|
47
|
+
if columns is not None:
|
48
|
+
with engine.connect() as conn:
|
49
|
+
result = conn.execute(
|
50
|
+
text(
|
51
|
+
"""
|
52
|
+
SELECT column_name
|
53
|
+
FROM information_schema.columns
|
54
|
+
WHERE table_name = :table_name
|
55
|
+
AND table_schema = current_schema()
|
56
|
+
"""
|
57
|
+
),
|
58
|
+
{"table_name": table_name},
|
59
|
+
)
|
60
|
+
available_columns = {row[0] for row in result}
|
61
|
+
invalid_columns = set(columns) - available_columns
|
62
|
+
if invalid_columns:
|
63
|
+
raise ValueError(
|
64
|
+
f"Invalid columns: {invalid_columns}. "
|
65
|
+
f"Available columns are: {available_columns}"
|
66
|
+
)
|
67
|
+
|
68
|
+
# Build JSON payload expression
|
69
|
+
if columns:
|
70
|
+
data_builder = (
|
71
|
+
"json_build_object("
|
72
|
+
+ ", ".join(
|
73
|
+
f"'{col}', CASE WHEN TG_OP = 'DELETE' THEN OLD.{col} ELSE NEW.{col} END"
|
74
|
+
for col in columns
|
75
|
+
)
|
76
|
+
+ ")"
|
77
|
+
)
|
78
|
+
else:
|
79
|
+
data_builder = "row_to_json(CASE WHEN TG_OP = 'DELETE' THEN OLD ELSE NEW END)"
|
80
|
+
|
81
|
+
trigger_function = f"""
|
82
|
+
CREATE OR REPLACE FUNCTION "{trigger_name}_function"()
|
83
|
+
RETURNS TRIGGER AS $$
|
84
|
+
BEGIN
|
85
|
+
PERFORM pg_notify(
|
86
|
+
:channel,
|
87
|
+
json_build_object(
|
88
|
+
'table', TG_TABLE_NAME,
|
89
|
+
'action', TG_OP,
|
90
|
+
'data', {data_builder}
|
91
|
+
)::text
|
92
|
+
);
|
93
|
+
RETURN CASE WHEN TG_OP = 'DELETE' THEN OLD ELSE NEW END;
|
94
|
+
END;
|
95
|
+
$$ LANGUAGE plpgsql;
|
96
|
+
"""
|
97
|
+
|
98
|
+
drop_commands = f"""
|
99
|
+
DROP TRIGGER IF EXISTS "{trigger_name}_insert" ON "{table_name}";
|
100
|
+
DROP TRIGGER IF EXISTS "{trigger_name}_update" ON "{table_name}";
|
101
|
+
DROP TRIGGER IF EXISTS "{trigger_name}_delete" ON "{table_name}";
|
102
|
+
DROP FUNCTION IF EXISTS "{trigger_name}_function"();
|
103
|
+
"""
|
104
|
+
|
105
|
+
with engine.begin() as conn:
|
106
|
+
conn.execute(text(drop_commands))
|
107
|
+
conn.execute(text(trigger_function), {"channel": channel_name})
|
108
|
+
for event in events:
|
109
|
+
trigger_sql = f"""
|
110
|
+
CREATE TRIGGER "{trigger_name}_{event.lower()}"
|
111
|
+
AFTER {event} ON "{table_name}"
|
112
|
+
FOR EACH ROW EXECUTE FUNCTION "{trigger_name}_function"();
|
113
|
+
"""
|
114
|
+
conn.execute(text(trigger_sql))
|
115
|
+
|
116
|
+
|
117
|
+
def drop_notify_trigger(
|
118
|
+
engine: Engine,
|
119
|
+
table_name: str,
|
120
|
+
trigger_name: Optional[str] = None,
|
121
|
+
) -> None:
|
122
|
+
"""
|
123
|
+
Drop a notification trigger and its associated function.
|
124
|
+
|
125
|
+
Args:
|
126
|
+
engine: SQLAlchemy engine instance
|
127
|
+
table_name: Name of the table the trigger is on
|
128
|
+
trigger_name: Name of the trigger (defaults to f"{table_name}_notify_trigger")
|
129
|
+
"""
|
130
|
+
trigger_name = trigger_name or f"{table_name}_notify_trigger"
|
131
|
+
|
132
|
+
drop_commands = f"""
|
133
|
+
DROP TRIGGER IF EXISTS "{trigger_name}_insert" ON "{table_name}";
|
134
|
+
DROP TRIGGER IF EXISTS "{trigger_name}_update" ON "{table_name}";
|
135
|
+
DROP TRIGGER IF EXISTS "{trigger_name}_delete" ON "{table_name}";
|
136
|
+
DROP FUNCTION IF EXISTS "{trigger_name}_function"();
|
137
|
+
"""
|
138
|
+
|
139
|
+
with engine.begin() as conn:
|
140
|
+
conn.execute(text(drop_commands))
|
141
|
+
|
142
|
+
|
143
|
+
@contextmanager
|
144
|
+
def subscribe_to_notifications(
|
145
|
+
engine: Engine,
|
146
|
+
channel_name: str,
|
147
|
+
callback: Optional[Callable[[Dict[str, Any]], None]] = None,
|
148
|
+
timeout: float = 60.0,
|
149
|
+
) -> Iterator[Generator[Dict[str, Any], None, None]]:
|
150
|
+
"""
|
151
|
+
Subscribe to PostgreSQL notifications from a channel. Clean shutdown on exit.
|
152
|
+
|
153
|
+
If a callback is given, notifications are processed in a background thread.
|
154
|
+
Otherwise, notifications can be consumed via the yielded generator.
|
155
|
+
"""
|
156
|
+
conn_params = engine.url.translate_connect_args()
|
157
|
+
conn = psycopg2.connect(
|
158
|
+
dbname=conn_params.get("database"),
|
159
|
+
user=conn_params.get("username"),
|
160
|
+
password=conn_params.get("password"),
|
161
|
+
host=conn_params.get("host"),
|
162
|
+
port=conn_params.get("port"),
|
163
|
+
)
|
164
|
+
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
|
165
|
+
|
166
|
+
stop_flag = threading.Event()
|
167
|
+
|
168
|
+
def notification_generator() -> Generator[Dict[str, Any], None, None]:
|
169
|
+
start_time = time.time()
|
170
|
+
with conn.cursor() as cur:
|
171
|
+
cur.execute(f'LISTEN "{channel_name}";')
|
172
|
+
while not stop_flag.is_set() and (time.time() - start_time) < timeout:
|
173
|
+
conn.poll()
|
174
|
+
while conn.notifies:
|
175
|
+
notify = conn.notifies.pop()
|
176
|
+
try:
|
177
|
+
notification = json.loads(notify.payload)
|
178
|
+
if callback:
|
179
|
+
callback(notification)
|
180
|
+
else:
|
181
|
+
yield notification
|
182
|
+
except json.JSONDecodeError:
|
183
|
+
continue
|
184
|
+
time.sleep(0.1)
|
185
|
+
|
186
|
+
try:
|
187
|
+
if callback:
|
188
|
+
thread = threading.Thread(target=lambda: list(notification_generator()), daemon=True)
|
189
|
+
thread.start()
|
190
|
+
yield notification_generator() # yield generator even in callback mode
|
191
|
+
stop_flag.set()
|
192
|
+
thread.join(timeout=1.0)
|
193
|
+
else:
|
194
|
+
yield notification_generator()
|
195
|
+
finally:
|
196
|
+
if not conn.closed:
|
197
|
+
conn.close()
|
198
|
+
|
199
|
+
|
200
|
+
class NotificationListener:
|
201
|
+
"""
|
202
|
+
A class to handle PostgreSQL notifications in the background.
|
203
|
+
|
204
|
+
Example usage:
|
205
|
+
```python
|
206
|
+
# Create a listener
|
207
|
+
listener = NotificationListener(engine)
|
208
|
+
|
209
|
+
# Create a listener for a specific table
|
210
|
+
listener.create_listener(table_name="my_table", channel_name="my_channel")
|
211
|
+
|
212
|
+
# Add a callback for a specific channel
|
213
|
+
def handle_changes(notification):
|
214
|
+
print(f"Received notification: {notification}")
|
215
|
+
|
216
|
+
listener.add_callback("my_table", handle_changes)
|
217
|
+
|
218
|
+
# Start listening in the background
|
219
|
+
listener.start()
|
220
|
+
|
221
|
+
# ... your main application code ...
|
222
|
+
|
223
|
+
# When done, stop the listener
|
224
|
+
listener.stop()
|
225
|
+
```
|
226
|
+
"""
|
227
|
+
|
228
|
+
def __init__(self, db_name: str) -> None:
|
229
|
+
self.db_name = db_name
|
230
|
+
self._running = False
|
231
|
+
self._stop_flag = threading.Event()
|
232
|
+
self._thread: Optional[threading.Thread] = None
|
233
|
+
self._callbacks: Dict[str, Set[Callable[[Dict[str, Any]], None]]] = {}
|
234
|
+
self._lock = threading.Lock()
|
235
|
+
|
236
|
+
def add_callback(self, channel_name: str, callback: Callable[[Dict[str, Any]], None]) -> None:
|
237
|
+
with self._lock:
|
238
|
+
self._callbacks.setdefault(channel_name, set()).add(callback)
|
239
|
+
|
240
|
+
def remove_callback(
|
241
|
+
self, channel_name: str, callback: Callable[[Dict[str, Any]], None]
|
242
|
+
) -> None:
|
243
|
+
with self._lock:
|
244
|
+
callbacks = self._callbacks.get(channel_name)
|
245
|
+
if callbacks:
|
246
|
+
callbacks.discard(callback)
|
247
|
+
if not callbacks:
|
248
|
+
del self._callbacks[channel_name]
|
249
|
+
|
250
|
+
def _process_notification(self, notify: Any) -> None:
|
251
|
+
"""Process a single notification and execute callbacks."""
|
252
|
+
try:
|
253
|
+
notification = json.loads(notify.payload)
|
254
|
+
with self._lock:
|
255
|
+
callbacks = self._callbacks.get(notify.channel, set())
|
256
|
+
for cb in callbacks:
|
257
|
+
try:
|
258
|
+
cb(notification)
|
259
|
+
except Exception as e: # pylint: disable=broad-exception-caught
|
260
|
+
print(f"Error in callback for channel {notify.channel}: {e}")
|
261
|
+
except json.JSONDecodeError:
|
262
|
+
pass
|
263
|
+
|
264
|
+
def _handle_connection(self, conn: Any, channels: List[str]) -> None:
|
265
|
+
"""Handle the active connection and process notifications."""
|
266
|
+
with conn.cursor() as cur:
|
267
|
+
for channel in channels:
|
268
|
+
cur.execute(f'LISTEN "{channel}";')
|
269
|
+
|
270
|
+
while not self._stop_flag.is_set():
|
271
|
+
conn.poll()
|
272
|
+
while conn.notifies:
|
273
|
+
notify = conn.notifies.pop()
|
274
|
+
self._process_notification(notify)
|
275
|
+
time.sleep(0.1)
|
276
|
+
|
277
|
+
def _create_connection(self, conn_params: Dict[str, Any]) -> Any:
|
278
|
+
"""Create and configure a new database connection."""
|
279
|
+
conn = psycopg2.connect(
|
280
|
+
dbname=conn_params.get("database"),
|
281
|
+
user=conn_params.get("username"),
|
282
|
+
password=conn_params.get("password"),
|
283
|
+
host=conn_params.get("host"),
|
284
|
+
port=conn_params.get("port"),
|
285
|
+
)
|
286
|
+
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
|
287
|
+
return conn
|
288
|
+
|
289
|
+
def _listen_loop(self) -> None:
|
290
|
+
"""Main loop for listening to database notifications."""
|
291
|
+
engine = get_engine(self.db_name)
|
292
|
+
conn_params = engine.url.translate_connect_args()
|
293
|
+
|
294
|
+
while not self._stop_flag.is_set():
|
295
|
+
conn = None
|
296
|
+
try:
|
297
|
+
conn = self._create_connection(conn_params)
|
298
|
+
with self._lock:
|
299
|
+
channels = list(self._callbacks.keys())
|
300
|
+
self._handle_connection(conn, channels)
|
301
|
+
except (psycopg2.OperationalError, psycopg2.InterfaceError) as e:
|
302
|
+
print(f"Listener connection error: {e}")
|
303
|
+
time.sleep(5)
|
304
|
+
finally:
|
305
|
+
if conn and not conn.closed:
|
306
|
+
conn.close()
|
307
|
+
|
308
|
+
def start(self) -> None:
|
309
|
+
if not self._running:
|
310
|
+
self._stop_flag.clear()
|
311
|
+
self._running = True
|
312
|
+
self._thread = threading.Thread(target=self._listen_loop, daemon=True)
|
313
|
+
self._thread.start()
|
314
|
+
|
315
|
+
def stop(self) -> None:
|
316
|
+
if self._running:
|
317
|
+
self._stop_flag.set()
|
318
|
+
if self._thread is not None:
|
319
|
+
self._thread.join(timeout=2.0)
|
320
|
+
self._thread = None
|
321
|
+
self._running = False
|
322
|
+
|
323
|
+
def create_listener(
|
324
|
+
self,
|
325
|
+
table_name: str,
|
326
|
+
channel_name: str,
|
327
|
+
columns: Optional[List[str]] = None,
|
328
|
+
events: Optional[List[str]] = None,
|
329
|
+
) -> None:
|
330
|
+
engine = get_engine(self.db_name)
|
331
|
+
create_notify_trigger(
|
332
|
+
engine=engine,
|
333
|
+
table_name=table_name,
|
334
|
+
channel_name=channel_name,
|
335
|
+
trigger_name=f"{table_name}_notify_trigger",
|
336
|
+
events=events,
|
337
|
+
columns=columns,
|
338
|
+
)
|
339
|
+
|
340
|
+
def remove_listener(self, table_name: str) -> None:
|
341
|
+
engine = get_engine(self.db_name)
|
342
|
+
drop_notify_trigger(
|
343
|
+
engine=engine,
|
344
|
+
table_name=table_name,
|
345
|
+
trigger_name=f"{table_name}_notify_trigger",
|
346
|
+
)
|
@@ -0,0 +1,15 @@
|
|
1
|
+
import argparse
|
2
|
+
|
3
|
+
from . import config
|
4
|
+
|
5
|
+
|
6
|
+
def add_postrgres_db_args(parser: argparse.ArgumentParser) -> None:
|
7
|
+
postgres_parser = parser.add_argument_group("postgres-options")
|
8
|
+
postgres_parser.add_argument("--postgres-host", default=config.pg_config.postgres_host)
|
9
|
+
postgres_parser.add_argument(
|
10
|
+
"--postgres-port", type=int, default=config.pg_config.postgres_port
|
11
|
+
)
|
12
|
+
postgres_parser.add_argument("--postgres-db", default=config.pg_config.postgres_db)
|
13
|
+
postgres_parser.add_argument("--postgres-user", default=config.pg_config.postgres_user)
|
14
|
+
postgres_parser.add_argument("--postgres-password", default=config.pg_config.postgres_password)
|
15
|
+
postgres_parser.add_argument("--do-publish-db", action="store_true", default=False)
|
File without changes
|
@@ -0,0 +1,38 @@
|
|
1
|
+
# -*- coding: utf-8 -*-
|
2
|
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
3
|
+
# source: database.proto
|
4
|
+
"""Generated protocol buffer code."""
|
5
|
+
from google.protobuf.internal import builder as _builder
|
6
|
+
from google.protobuf import descriptor as _descriptor
|
7
|
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
8
|
+
from google.protobuf import symbol_database as _symbol_database
|
9
|
+
# @@protoc_insertion_point(imports)
|
10
|
+
|
11
|
+
_sym_db = _symbol_database.Default()
|
12
|
+
|
13
|
+
|
14
|
+
from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
|
15
|
+
|
16
|
+
|
17
|
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0e\x64\x61tabase.proto\x12\x08\x64\x61tabase\x1a\x1fgoogle/protobuf/timestamp.proto\"m\n\nPostgresPb\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0c\n\x04user\x18\x02 \x01(\t\x12\x10\n\x08password\x18\x03 \x01(\t\x12\x0c\n\x04host\x18\x04 \x01(\t\x12\x0c\n\x04port\x18\x05 \x01(\x05\x12\x11\n\tbackendId\x18\x06 \x01(\t\"f\n\x11PostgresMessagePb\x12)\n\x05utime\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12&\n\x08postgres\x18\x02 \x01(\x0b\x32\x14.database.PostgresPb\"\xb2\x01\n\x12\x44\x61tabaseSettingsPb\x12\x17\n\x0fprimaryDatabase\x18\x01 \x01(\x08\x12<\n\x08postgres\x18\x02 \x03(\x0b\x32*.database.DatabaseSettingsPb.PostgresEntry\x1a\x45\n\rPostgresEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.database.PostgresPb:\x02\x38\x01\"u\n\x10\x44\x61tabaseConfigPb\x12)\n\x05utime\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x10\x64\x61tabaseSettings\x18\x02 \x01(\x0b\x32\x1c.database.DatabaseSettingsPb\"\x8e\x01\n\x16\x44\x61tabaseNotificationPb\x12)\n\x05utime\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x12\n\ntable_name\x18\x02 \x01(\t\x12\x14\n\x0c\x63hannel_name\x18\x03 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x04 \x01(\t\x12\x0f\n\x07payload\x18\x05 \x01(\tb\x06proto3')
|
18
|
+
|
19
|
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
20
|
+
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'database_pb2', globals())
|
21
|
+
if _descriptor._USE_C_DESCRIPTORS == False:
|
22
|
+
|
23
|
+
DESCRIPTOR._options = None
|
24
|
+
_DATABASESETTINGSPB_POSTGRESENTRY._options = None
|
25
|
+
_DATABASESETTINGSPB_POSTGRESENTRY._serialized_options = b'8\001'
|
26
|
+
_POSTGRESPB._serialized_start=61
|
27
|
+
_POSTGRESPB._serialized_end=170
|
28
|
+
_POSTGRESMESSAGEPB._serialized_start=172
|
29
|
+
_POSTGRESMESSAGEPB._serialized_end=274
|
30
|
+
_DATABASESETTINGSPB._serialized_start=277
|
31
|
+
_DATABASESETTINGSPB._serialized_end=455
|
32
|
+
_DATABASESETTINGSPB_POSTGRESENTRY._serialized_start=386
|
33
|
+
_DATABASESETTINGSPB_POSTGRESENTRY._serialized_end=455
|
34
|
+
_DATABASECONFIGPB._serialized_start=457
|
35
|
+
_DATABASECONFIGPB._serialized_end=574
|
36
|
+
_DATABASENOTIFICATIONPB._serialized_start=577
|
37
|
+
_DATABASENOTIFICATIONPB._serialized_end=719
|
38
|
+
# @@protoc_insertion_point(module_scope)
|
@@ -0,0 +1,156 @@
|
|
1
|
+
"""
|
2
|
+
@generated by mypy-protobuf. Do not edit manually!
|
3
|
+
isort:skip_file
|
4
|
+
"""
|
5
|
+
|
6
|
+
import builtins
|
7
|
+
import collections.abc
|
8
|
+
import google.protobuf.descriptor
|
9
|
+
import google.protobuf.internal.containers
|
10
|
+
import google.protobuf.message
|
11
|
+
import google.protobuf.timestamp_pb2
|
12
|
+
import typing
|
13
|
+
|
14
|
+
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
|
15
|
+
|
16
|
+
@typing.final
|
17
|
+
class PostgresPb(google.protobuf.message.Message):
|
18
|
+
"""Message for Postgres"""
|
19
|
+
|
20
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
21
|
+
|
22
|
+
DATABASE_FIELD_NUMBER: builtins.int
|
23
|
+
USER_FIELD_NUMBER: builtins.int
|
24
|
+
PASSWORD_FIELD_NUMBER: builtins.int
|
25
|
+
HOST_FIELD_NUMBER: builtins.int
|
26
|
+
PORT_FIELD_NUMBER: builtins.int
|
27
|
+
BACKENDID_FIELD_NUMBER: builtins.int
|
28
|
+
database: builtins.str
|
29
|
+
user: builtins.str
|
30
|
+
password: builtins.str
|
31
|
+
host: builtins.str
|
32
|
+
port: builtins.int
|
33
|
+
backendId: builtins.str
|
34
|
+
def __init__(
|
35
|
+
self,
|
36
|
+
*,
|
37
|
+
database: builtins.str = ...,
|
38
|
+
user: builtins.str = ...,
|
39
|
+
password: builtins.str = ...,
|
40
|
+
host: builtins.str = ...,
|
41
|
+
port: builtins.int = ...,
|
42
|
+
backendId: builtins.str = ...,
|
43
|
+
) -> None: ...
|
44
|
+
def ClearField(self, field_name: typing.Literal["backendId", b"backendId", "database", b"database", "host", b"host", "password", b"password", "port", b"port", "user", b"user"]) -> None: ...
|
45
|
+
|
46
|
+
global___PostgresPb = PostgresPb
|
47
|
+
|
48
|
+
@typing.final
|
49
|
+
class PostgresMessagePb(google.protobuf.message.Message):
|
50
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
51
|
+
|
52
|
+
UTIME_FIELD_NUMBER: builtins.int
|
53
|
+
POSTGRES_FIELD_NUMBER: builtins.int
|
54
|
+
@property
|
55
|
+
def utime(self) -> google.protobuf.timestamp_pb2.Timestamp: ...
|
56
|
+
@property
|
57
|
+
def postgres(self) -> global___PostgresPb: ...
|
58
|
+
def __init__(
|
59
|
+
self,
|
60
|
+
*,
|
61
|
+
utime: google.protobuf.timestamp_pb2.Timestamp | None = ...,
|
62
|
+
postgres: global___PostgresPb | None = ...,
|
63
|
+
) -> None: ...
|
64
|
+
def HasField(self, field_name: typing.Literal["postgres", b"postgres", "utime", b"utime"]) -> builtins.bool: ...
|
65
|
+
def ClearField(self, field_name: typing.Literal["postgres", b"postgres", "utime", b"utime"]) -> None: ...
|
66
|
+
|
67
|
+
global___PostgresMessagePb = PostgresMessagePb
|
68
|
+
|
69
|
+
@typing.final
|
70
|
+
class DatabaseSettingsPb(google.protobuf.message.Message):
|
71
|
+
"""Message for DatabaseSettings"""
|
72
|
+
|
73
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
74
|
+
|
75
|
+
@typing.final
|
76
|
+
class PostgresEntry(google.protobuf.message.Message):
|
77
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
78
|
+
|
79
|
+
KEY_FIELD_NUMBER: builtins.int
|
80
|
+
VALUE_FIELD_NUMBER: builtins.int
|
81
|
+
key: builtins.str
|
82
|
+
@property
|
83
|
+
def value(self) -> global___PostgresPb: ...
|
84
|
+
def __init__(
|
85
|
+
self,
|
86
|
+
*,
|
87
|
+
key: builtins.str = ...,
|
88
|
+
value: global___PostgresPb | None = ...,
|
89
|
+
) -> None: ...
|
90
|
+
def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ...
|
91
|
+
def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ...
|
92
|
+
|
93
|
+
PRIMARYDATABASE_FIELD_NUMBER: builtins.int
|
94
|
+
POSTGRES_FIELD_NUMBER: builtins.int
|
95
|
+
primaryDatabase: builtins.bool
|
96
|
+
@property
|
97
|
+
def postgres(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___PostgresPb]: ...
|
98
|
+
def __init__(
|
99
|
+
self,
|
100
|
+
*,
|
101
|
+
primaryDatabase: builtins.bool = ...,
|
102
|
+
postgres: collections.abc.Mapping[builtins.str, global___PostgresPb] | None = ...,
|
103
|
+
) -> None: ...
|
104
|
+
def ClearField(self, field_name: typing.Literal["postgres", b"postgres", "primaryDatabase", b"primaryDatabase"]) -> None: ...
|
105
|
+
|
106
|
+
global___DatabaseSettingsPb = DatabaseSettingsPb
|
107
|
+
|
108
|
+
@typing.final
|
109
|
+
class DatabaseConfigPb(google.protobuf.message.Message):
|
110
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
111
|
+
|
112
|
+
UTIME_FIELD_NUMBER: builtins.int
|
113
|
+
DATABASESETTINGS_FIELD_NUMBER: builtins.int
|
114
|
+
@property
|
115
|
+
def utime(self) -> google.protobuf.timestamp_pb2.Timestamp: ...
|
116
|
+
@property
|
117
|
+
def databaseSettings(self) -> global___DatabaseSettingsPb: ...
|
118
|
+
def __init__(
|
119
|
+
self,
|
120
|
+
*,
|
121
|
+
utime: google.protobuf.timestamp_pb2.Timestamp | None = ...,
|
122
|
+
databaseSettings: global___DatabaseSettingsPb | None = ...,
|
123
|
+
) -> None: ...
|
124
|
+
def HasField(self, field_name: typing.Literal["databaseSettings", b"databaseSettings", "utime", b"utime"]) -> builtins.bool: ...
|
125
|
+
def ClearField(self, field_name: typing.Literal["databaseSettings", b"databaseSettings", "utime", b"utime"]) -> None: ...
|
126
|
+
|
127
|
+
global___DatabaseConfigPb = DatabaseConfigPb
|
128
|
+
|
129
|
+
@typing.final
|
130
|
+
class DatabaseNotificationPb(google.protobuf.message.Message):
|
131
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
132
|
+
|
133
|
+
UTIME_FIELD_NUMBER: builtins.int
|
134
|
+
TABLE_NAME_FIELD_NUMBER: builtins.int
|
135
|
+
CHANNEL_NAME_FIELD_NUMBER: builtins.int
|
136
|
+
ACTION_FIELD_NUMBER: builtins.int
|
137
|
+
PAYLOAD_FIELD_NUMBER: builtins.int
|
138
|
+
table_name: builtins.str
|
139
|
+
channel_name: builtins.str
|
140
|
+
action: builtins.str
|
141
|
+
payload: builtins.str
|
142
|
+
@property
|
143
|
+
def utime(self) -> google.protobuf.timestamp_pb2.Timestamp: ...
|
144
|
+
def __init__(
|
145
|
+
self,
|
146
|
+
*,
|
147
|
+
utime: google.protobuf.timestamp_pb2.Timestamp | None = ...,
|
148
|
+
table_name: builtins.str = ...,
|
149
|
+
channel_name: builtins.str = ...,
|
150
|
+
action: builtins.str = ...,
|
151
|
+
payload: builtins.str = ...,
|
152
|
+
) -> None: ...
|
153
|
+
def HasField(self, field_name: typing.Literal["utime", b"utime"]) -> builtins.bool: ...
|
154
|
+
def ClearField(self, field_name: typing.Literal["action", b"action", "channel_name", b"channel_name", "payload", b"payload", "table_name", b"table_name", "utime", b"utime"]) -> None: ...
|
155
|
+
|
156
|
+
global___DatabaseNotificationPb = DatabaseNotificationPb
|
File without changes
|
@@ -0,0 +1,47 @@
|
|
1
|
+
class PostgresInfo:
|
2
|
+
"""Class to store the Postgres database information."""
|
3
|
+
|
4
|
+
def __init__(self, db_name: str, user: str, password: str, host: str, port: int) -> None:
|
5
|
+
self.db_name = db_name
|
6
|
+
self.user = user
|
7
|
+
self.password = password
|
8
|
+
self.host = host
|
9
|
+
self.port = port
|
10
|
+
|
11
|
+
@staticmethod
|
12
|
+
def null() -> "PostgresInfo":
|
13
|
+
"""Return a null PostgresInfo object."""
|
14
|
+
return PostgresInfo("", "", "", "", 0)
|
15
|
+
|
16
|
+
def is_null(self) -> bool:
|
17
|
+
"""Return if the PostgresInfo object is null."""
|
18
|
+
if bool(self == PostgresInfo.null()):
|
19
|
+
return True
|
20
|
+
if not self.db_name.strip() or not self.host.strip() or not self.user.strip():
|
21
|
+
return True
|
22
|
+
if self.port == 0:
|
23
|
+
return True
|
24
|
+
return False
|
25
|
+
|
26
|
+
def __eq__(self, other: object) -> bool:
|
27
|
+
if not isinstance(other, PostgresInfo):
|
28
|
+
return False
|
29
|
+
|
30
|
+
return bool(
|
31
|
+
self.db_name == other.db_name
|
32
|
+
and self.user == other.user
|
33
|
+
and self.password == other.password
|
34
|
+
and self.host == other.host
|
35
|
+
and self.port == other.port
|
36
|
+
)
|
37
|
+
|
38
|
+
def __str__(self) -> str:
|
39
|
+
values = [
|
40
|
+
f"{key}={'*' * 8 if key == 'password' else value}"
|
41
|
+
for key, value in self.__dict__.items()
|
42
|
+
]
|
43
|
+
values_string = "\n\t".join(values)
|
44
|
+
return f"PostgresInfo({values_string})"
|
45
|
+
|
46
|
+
def __repr__(self) -> str:
|
47
|
+
return self.__str__()
|
ry_pg_utils/py.typed
ADDED
File without changes
|