trovesuite 1.0.30__py3-none-any.whl → 1.0.32__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,20 @@
1
1
  """
2
2
  Database configuration and connection management
3
+
4
+ FIXED VERSION - Removed pool reinitialization during runtime to prevent "unkeyed connection" errors.
5
+ Key changes:
6
+ - Pool created once at startup, never recreated during runtime
7
+ - Removed connection validation queries that consume pool connections
8
+ - Increased default pool size to 2 (safe for Azure Basic tier)
9
+ - Simplified get_db_connection() - let pool.getconn() block naturally
10
+ - Removed _recover_connection_pool() and _is_pool_valid() runtime checks
11
+
12
+ NOTE: Database initialization is NOT automatic for package version.
13
+ You must call initialize_database() explicitly in your application startup.
14
+ Example in FastAPI:
15
+ @app.on_event("startup")
16
+ async def startup_event():
17
+ initialize_database()
3
18
  """
4
19
  from contextlib import contextmanager
5
20
  from typing import Generator, Optional
@@ -12,7 +27,7 @@ from .logging import get_logger
12
27
 
13
28
  logger = get_logger("database")
14
29
 
15
- # Database connection pool
30
+ # Database connection pool - created once at startup, never replaced during runtime
16
31
  _connection_pool: Optional[psycopg2.pool.ThreadedConnectionPool] = None
17
32
  _initialization_lock = threading.Lock()
18
33
 
@@ -24,13 +39,11 @@ class DatabaseConfig:
24
39
  import os
25
40
  self.settings = db_settings
26
41
  self.database_url = self.settings.database_url
27
- # CRITICAL: Azure PostgreSQL B_Standard_B1ms has very limited connections (~50 max, practical limit ~30-40)
28
- # Default pool size is set to 1 to avoid connection exhaustion with multiple workers/replicas
29
- # Formula: connections = pool_size × workers × replicas
30
- # Example: 1 pool × 4 workers × 2 replicas = 8 connections (safe)
31
- # With old default (5): 5 × 4 × 2 = 40 connections (exceeds limit!)
42
+ # Safe pool size for Azure Basic tier (B_Standard_B1ms)
43
+ # With 2 workers and pool_size=2: 2 × 2 = 4 connections (very safe)
44
+ # With 4 workers and pool_size=2: 4 × 2 = 8 connections (still safe, well under 50 limit)
32
45
  # Can be overridden with DB_POOL_SIZE environment variable
33
- self.pool_size = int(os.getenv("DB_POOL_SIZE", "1"))
46
+ self.pool_size = int(os.getenv("DB_POOL_SIZE", "2"))
34
47
 
35
48
  def get_connection_params(self) -> dict:
36
49
  """Get database connection parameters"""
@@ -63,26 +76,12 @@ class DatabaseConfig:
63
76
  }
64
77
 
65
78
  def create_connection_pool(self) -> psycopg2.pool.ThreadedConnectionPool:
66
- """Create a connection pool for psycopg2
67
-
68
- Note: For Azure PostgreSQL B_Standard_B1ms, keep pool_size ≤ 1 per worker
69
- to avoid connection exhaustion. Consider using PgBouncer for higher concurrency.
70
- """
79
+ """Create a connection pool for psycopg2"""
71
80
  try:
72
- # For Azure PostgreSQL, use minimum pool size to avoid connection exhaustion
73
- # Multiple replicas/workers can quickly exhaust database connections on Basic tier
74
81
  import os
75
82
  dsn = os.getenv("DATABASE_URL", "") or str(self.database_url or "")
76
83
  is_azure = "database.azure.com" in dsn.lower()
77
-
78
- # Ensure pool size is appropriate for Azure Basic tier
79
84
  pool_size = self.pool_size
80
- if is_azure and pool_size > 2:
81
- logger.warning(
82
- f"⚠️ Pool size {pool_size} may be too high for Azure Basic tier. "
83
- f"Recommended: 1-2 connections per worker. "
84
- f"Set DB_POOL_SIZE=1 to avoid connection exhaustion."
85
- )
86
85
 
87
86
  pool = psycopg2.pool.ThreadedConnectionPool(
88
87
  minconn=1,
@@ -95,7 +94,6 @@ class DatabaseConfig:
95
94
  )
96
95
  return pool
97
96
  except psycopg2.OperationalError as e:
98
- # Check if it's a connection limit error
99
97
  error_str = str(e).lower()
100
98
  if any(keyword in error_str for keyword in ["connection", "slot", "limit", "exhausted", "too many"]):
101
99
  logger.error("⚠️ Database connection limit reached!")
@@ -105,8 +103,8 @@ class DatabaseConfig:
105
103
  logger.error(" 3. Too many Gunicorn workers (GUNICORN_WORKERS environment variable)")
106
104
  logger.error(" 4. Connections not being properly returned to pool")
107
105
  logger.error(" Solutions:")
108
- logger.error(" - Set DB_POOL_SIZE=1 (recommended for Azure Basic tier)")
109
- logger.error(" - Reduce GUNICORN_WORKERS (default: 4)")
106
+ logger.error(" - Set DB_POOL_SIZE=2 (current default)")
107
+ logger.error(" - Reduce GUNICORN_WORKERS (default: 2)")
110
108
  logger.error(" - Consider using PgBouncer for connection pooling")
111
109
  logger.error(" - Upgrade to a higher PostgreSQL tier if needed")
112
110
  logger.error(f"Failed to create database connection pool: {str(e)}")
@@ -116,7 +114,7 @@ class DatabaseConfig:
116
114
  raise
117
115
 
118
116
  def test_connection(self) -> bool:
119
- """Test database connection"""
117
+ """Test database connection (only used at startup)"""
120
118
  try:
121
119
  with psycopg2.connect(**self.get_connection_params()) as conn:
122
120
  with conn.cursor() as cursor:
@@ -136,190 +134,104 @@ db_config = DatabaseConfig()
136
134
 
137
135
 
138
136
  def initialize_database():
139
- """Initialize database connections and pool"""
137
+ """
138
+ Initialize database connections and pool.
139
+ This should ONLY be called at application startup.
140
+ Pool is created once and never recreated during runtime.
141
+
142
+ NOTE: For package version, this must be called explicitly in application startup.
143
+ """
140
144
  global _connection_pool
141
145
 
142
- # Close existing pool if it exists (cleanup before reinitializing)
143
- if _connection_pool is not None:
144
- try:
145
- _connection_pool.closeall()
146
- logger.info("Closed existing connection pool before reinitialization")
147
- except Exception as e:
148
- logger.warning(f"Error closing existing pool: {str(e)}")
149
- _connection_pool = None
150
-
151
- try:
152
- # Test connection first
153
- if not db_config.test_connection():
154
- raise Exception("Database connection test failed")
155
-
156
- # Create connection pool
157
- _connection_pool = db_config.create_connection_pool()
146
+ with _initialization_lock:
147
+ # If pool already exists, don't recreate it
148
+ if _connection_pool is not None:
149
+ logger.warning("Database pool already initialized, skipping reinitialization")
150
+ return
158
151
 
159
- # Verify pool was created successfully
160
- if _connection_pool is None:
161
- raise Exception("Connection pool creation returned None")
162
-
163
- logger.info("✅ Database initialization completed successfully")
164
-
165
- except Exception as e:
166
- logger.error(f"❌ Database initialization failed: {str(e)}")
167
- _connection_pool = None # Ensure pool is None on failure
168
- raise
169
-
152
+ try:
153
+ # Test connection first (only at startup)
154
+ if not db_config.test_connection():
155
+ raise Exception("Database connection test failed")
170
156
 
171
- def _is_pool_valid(pool) -> bool:
172
- """Check if the connection pool is valid and usable"""
173
- if pool is None:
174
- return False
175
- try:
176
- # ThreadedConnectionPool doesn't expose a direct "closed" attribute
177
- # Check if pool has the necessary internal structures
178
- if not hasattr(pool, '_pool'):
179
- return False
180
- if pool._pool is None:
181
- return False
182
- # Additional check: verify pool has connection parameters
183
- if not hasattr(pool, '_kwargs'):
184
- return False
185
- return True
186
- except (AttributeError, Exception) as e:
187
- logger.debug(f"Pool validation check: {str(e)}")
188
- return False
157
+ # Create connection pool (only once at startup)
158
+ _connection_pool = db_config.create_connection_pool()
159
+
160
+ # Verify pool was created successfully
161
+ if _connection_pool is None:
162
+ raise Exception("Connection pool creation returned None")
189
163
 
164
+ logger.info("✅ Database initialization completed successfully")
190
165
 
191
- def _recover_connection_pool() -> bool:
192
- """Attempt to recover the connection pool with retry logic"""
193
- global _connection_pool, _initialization_lock
194
- import time
195
-
196
- with _initialization_lock:
197
- # Double-check after acquiring lock
198
- if _connection_pool is not None and _is_pool_valid(_connection_pool):
199
- return True
200
-
201
- # Close invalid pool if it exists
202
- if _connection_pool is not None:
203
- try:
204
- _connection_pool.closeall()
205
- logger.info("Closed invalid connection pool")
206
- except Exception as e:
207
- logger.warning(f"Error closing invalid pool: {str(e)}")
208
- _connection_pool = None
209
-
210
- # Retry with exponential backoff
211
- max_retries = 3
212
- base_delay = 1 # Start with 1 second
213
-
214
- for attempt in range(1, max_retries + 1):
215
- try:
216
- logger.warning(f"Attempting to reinitialize connection pool (attempt {attempt}/{max_retries})...")
217
- initialize_database()
218
-
219
- if _connection_pool is not None and _is_pool_valid(_connection_pool):
220
- logger.info(f"✅ Connection pool reinitialized successfully (attempt {attempt})")
221
- return True
222
- else:
223
- logger.warning(f"Pool initialized but validation failed (attempt {attempt})")
224
-
225
- except Exception as e:
226
- logger.error(f"Pool reinitialization attempt {attempt} failed: {str(e)}")
227
- if attempt < max_retries:
228
- delay = base_delay * (2 ** (attempt - 1)) # Exponential backoff: 1s, 2s, 4s
229
- logger.info(f"Retrying in {delay} seconds...")
230
- time.sleep(delay)
231
-
232
- logger.error("❌ Failed to reinitialize connection pool after all retries")
233
- return False
166
+ except Exception as e:
167
+ logger.error(f" Database initialization failed: {str(e)}")
168
+ _connection_pool = None # Ensure pool is None on failure
169
+ raise
234
170
 
235
171
 
236
172
  def get_connection_pool() -> psycopg2.pool.ThreadedConnectionPool:
237
- """Get the database connection pool, with automatic reinitialization if needed"""
173
+ """
174
+ Get the database connection pool.
175
+ Pool must be initialized at startup. This function will raise if pool is None.
176
+ """
238
177
  global _connection_pool
239
178
 
240
- # Fast path: pool exists and is valid
241
- if _connection_pool is not None and _is_pool_valid(_connection_pool):
242
- return _connection_pool
243
-
244
- # Pool is None or invalid, attempt recovery
245
- if not _recover_connection_pool():
179
+ if _connection_pool is None:
246
180
  error_msg = (
247
- "Database connection pool is unavailable. This usually means:\n"
248
- "1. Database server is unreachable or down\n"
249
- "2. Network connectivity issues\n"
250
- "3. Database credentials are incorrect\n"
251
- "4. Connection pool exhausted or closed\n"
252
- "5. Database initialization failed\n"
253
- "Please check the startup logs and database status."
181
+ "Database connection pool is not initialized. "
182
+ "Please ensure initialize_database() was called at application startup."
254
183
  )
255
184
  logger.error(error_msg)
256
185
  raise Exception(error_msg)
257
186
 
258
- if _connection_pool is None:
259
- error_msg = "Connection pool recovery completed but pool is still None"
260
- logger.error(error_msg)
261
- raise Exception(error_msg)
262
-
263
187
  return _connection_pool
264
188
 
265
189
 
266
- def _validate_connection(conn) -> bool:
267
- """Validate if a connection is still alive"""
268
- try:
269
- # Check if connection is closed first
270
- if conn.closed:
271
- return False
272
-
273
- # Test if connection is alive with a simple query
274
- with conn.cursor() as cursor:
275
- cursor.execute("SELECT 1")
276
- cursor.fetchone()
277
- return True
278
- except (psycopg2.OperationalError, psycopg2.InterfaceError, psycopg2.DatabaseError) as e:
279
- logger.warning(f"Connection validation failed: {str(e)}")
280
- return False
281
- except Exception as e:
282
- logger.warning(f"Unexpected error during connection validation: {str(e)}")
283
- return False
284
-
285
-
286
190
  @contextmanager
287
191
  def get_db_connection():
288
- """Get a database connection from the pool (context manager)"""
192
+ """
193
+ Get a database connection from the pool (context manager).
194
+
195
+ This is simplified - we let pool.getconn() block naturally.
196
+ No retries, no validation queries, no pool recovery.
197
+ """
289
198
  pool = get_connection_pool()
290
199
  conn = None
291
200
  try:
201
+ # Get connection from pool - this will block if pool is exhausted
202
+ # That's the correct behavior - let backpressure happen naturally
292
203
  conn = pool.getconn()
293
-
294
- # Validate connection before using it
295
- if not _validate_connection(conn):
296
- logger.warning("Stale connection detected, getting new connection")
297
- pool.putconn(conn, close=True)
298
- conn = pool.getconn()
299
-
300
204
  logger.debug("Database connection acquired from pool")
301
205
  yield conn
302
206
  except Exception as e:
303
- logger.error(f"Database connection error: {str(e)}")
304
- if conn:
207
+ # If connection exists and isn't closed, rollback transaction
208
+ if conn and not conn.closed:
305
209
  try:
306
- # Only rollback if connection is still open
307
- if not conn.closed:
308
- conn.rollback()
309
- except (psycopg2.OperationalError, psycopg2.InterfaceError) as rollback_error:
310
- logger.warning(f"Could not rollback closed connection: {str(rollback_error)}")
210
+ conn.rollback()
211
+ except Exception as rollback_error:
212
+ logger.warning(f"Could not rollback transaction: {str(rollback_error)}")
213
+ # Re-raise the exception - don't retry here
311
214
  raise
312
215
  finally:
216
+ # Always return connection to pool
313
217
  if conn:
314
218
  try:
315
- # If connection is broken, close it instead of returning to pool
316
219
  if conn.closed:
220
+ # If connection is closed, tell pool to close it instead of returning
317
221
  pool.putconn(conn, close=True)
318
222
  else:
223
+ # Return connection to pool normally
319
224
  pool.putconn(conn)
320
225
  logger.debug("Database connection returned to pool")
321
226
  except Exception as put_error:
227
+ # Log error but don't fail - connection will be cleaned up by pool
322
228
  logger.error(f"Error returning connection to pool: {str(put_error)}")
229
+ # Try to close connection manually as last resort
230
+ try:
231
+ if not conn.closed:
232
+ conn.close()
233
+ except Exception:
234
+ pass
323
235
 
324
236
 
325
237
  @contextmanager
@@ -392,6 +304,7 @@ class DatabaseManager:
392
304
  cursor.execute("INSERT INTO table2 ...")
393
305
  # Auto-commits on success, auto-rollbacks on exception
394
306
  """
307
+ # Use get_db_connection() instead of directly accessing pool
395
308
  with get_db_connection() as conn:
396
309
  cursor = conn.cursor(cursor_factory=RealDictCursor)
397
310
  try:
@@ -415,7 +328,10 @@ class DatabaseManager:
415
328
 
416
329
  @staticmethod
417
330
  def health_check() -> dict:
418
- """Perform database health check"""
331
+ """
332
+ Perform database health check.
333
+ Health checks are allowed to fail - they don't attempt to repair the pool.
334
+ """
419
335
  try:
420
336
  with get_db_cursor() as cursor:
421
337
  cursor.execute("SELECT version(), current_database(), current_user")
@@ -451,9 +367,9 @@ class DatabaseManager:
451
367
  }
452
368
 
453
369
 
454
- # NOTE: Database initialization is NOT automatic
370
+ # NOTE: Database initialization is NOT automatic for package version
455
371
  # You must call initialize_database() explicitly in your application startup
456
372
  # Example in FastAPI:
457
373
  # @app.on_event("startup")
458
374
  # async def startup_event():
459
- # initialize_database()
375
+ # initialize_database()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: trovesuite
3
- Version: 1.0.30
3
+ Version: 1.0.32
4
4
  Summary: TroveSuite services package providing authentication, authorization, notifications, Azure Storage, and other enterprise services for TroveSuite applications
5
5
  Home-page: https://dev.azure.com/brightgclt/trovesuite/_git/packages
6
6
  Author: Bright Debrah Owusu
@@ -6,7 +6,7 @@ trovesuite/auth/auth_read_dto.py,sha256=e27JqKVPVUM83A_mYF452QCflsvGNo7aKje7q_ur
6
6
  trovesuite/auth/auth_service.py,sha256=TQOJFG0AzhPGwZBAXVxMkHxyG2wyct4Zcoq4z0cVBO4,22201
7
7
  trovesuite/auth/auth_write_dto.py,sha256=rdwI7w6-9QZGv1H0PAGrjkLBCzaMHjgPIXeLb9RmNec,234
8
8
  trovesuite/configs/__init__.py,sha256=h1mSZOaZ3kUy1ZMO_m9O9KklsxywM0RfMVZLh9h9WvQ,328
9
- trovesuite/configs/database.py,sha256=x6ucbF7lNLSNqeCqFgJ4NUbkkAvYifdcQbOd9kxZUPQ,18414
9
+ trovesuite/configs/database.py,sha256=u4aTAZD4vyByzwPkmH47rAZ8iA4nsYyMiaL634uUcAM,15008
10
10
  trovesuite/configs/logging.py,sha256=mGjR2d4urVNry9l5_aXycMMtcY2RAFIpEL35hw33KZg,9308
11
11
  trovesuite/configs/settings.py,sha256=s-RvzFI7IR8-Q_A0ZRxlTr-iDoY6MvbxM0RkbhmJOL4,6915
12
12
  trovesuite/entities/__init__.py,sha256=Dbl_03Bueyh2vOP2hykd40MmNMrl5nNHSRGP-kqwwNo,160
@@ -27,8 +27,8 @@ trovesuite/storage/storage_write_dto.py,sha256=vl1iCZ93bpFmpvkCrn587QtMtOA_TPDse
27
27
  trovesuite/utils/__init__.py,sha256=mDZuY77BphvQFYLmcWxjP5Tcq9ZZ3WXJWBKB1v6wzHU,185
28
28
  trovesuite/utils/helper.py,sha256=qpd-EWPaX3-QJA5xvxb4s9rEb9W2RKPCDXcdAKSUwSM,30858
29
29
  trovesuite/utils/templates.py,sha256=_92k4-EkqWs-h0LNJxPgorbspmp24kDngS7O3qWIFyQ,20388
30
- trovesuite-1.0.30.dist-info/licenses/LICENSE,sha256=EJT35ct-Q794JYPdAQy3XNczQGKkU1HzToLeK1YVw2s,1070
31
- trovesuite-1.0.30.dist-info/METADATA,sha256=jlg46ILbbTBMpJ7SMUVcH9lBjA_HmnFm6oejaTshjX4,21737
32
- trovesuite-1.0.30.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
33
- trovesuite-1.0.30.dist-info/top_level.txt,sha256=GzKhG_-MTaxeHrIgkGkBH_nof2vroGFBrjeHKWUIwNc,11
34
- trovesuite-1.0.30.dist-info/RECORD,,
30
+ trovesuite-1.0.32.dist-info/licenses/LICENSE,sha256=EJT35ct-Q794JYPdAQy3XNczQGKkU1HzToLeK1YVw2s,1070
31
+ trovesuite-1.0.32.dist-info/METADATA,sha256=HoifrGTHx071bljGeM7PCOeg4taR1Anie_s5SDwaOCs,21737
32
+ trovesuite-1.0.32.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
33
+ trovesuite-1.0.32.dist-info/top_level.txt,sha256=GzKhG_-MTaxeHrIgkGkBH_nof2vroGFBrjeHKWUIwNc,11
34
+ trovesuite-1.0.32.dist-info/RECORD,,