maleo-database 0.0.3__py3-none-any.whl → 0.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  from pydantic import BaseModel, ConfigDict, Field, model_validator
2
- from typing import Self, TypeVar
3
- from maleo.types.base.dict import OptionalStringToStringDict
2
+ from typing import Self, Set, TypeVar
3
+ from maleo.types.base.dict import OptionalStringToStringDict, StringToAnyDict
4
4
  from maleo.types.base.integer import ListOfIntegers
5
5
  from maleo.types.base.string import OptionalString
6
6
  from maleo.utils.formatters.case import to_camel
@@ -14,75 +14,95 @@ class BasePoolingConfig(BaseModel):
14
14
  PoolingConfigT = TypeVar("PoolingConfigT", bound=BasePoolingConfig)
15
15
 
16
16
 
17
- class PostgreSQLPoolingConfig(BasePoolingConfig):
18
- """PostgreSQL-specific pooling configuration."""
17
+ class MySQLPoolingConfig(BasePoolingConfig):
18
+ """MySQL-specific pooling configuration."""
19
19
 
20
20
  pool_size: int = Field(
21
- default=10, ge=1, le=1000, description="Number of connections in the pool"
21
+ default=8, ge=1, le=500, description="Number of connections in the pool"
22
22
  )
23
23
  max_overflow: int = Field(
24
- default=20, ge=0, le=500, description="Maximum number of overflow connections"
24
+ default=15, ge=0, le=200, description="Maximum number of overflow connections"
25
25
  )
26
26
  pool_timeout: float = Field(
27
- default=30.0,
27
+ default=20.0,
28
28
  ge=1.0,
29
29
  le=300.0,
30
30
  description="Timeout in seconds for getting connection",
31
31
  )
32
32
  pool_recycle: int = Field(
33
- default=3600, ge=60, le=86400, description="Connection recycle time in seconds"
33
+ default=7200, ge=60, le=86400, description="Connection recycle time in seconds"
34
34
  )
35
35
  pool_pre_ping: bool = Field(
36
36
  default=True, description="Validate connections before use"
37
37
  )
38
- # Keep strategy and prepared_statement_cache_size as they're pooling-related
39
38
  strategy: PoolingStrategy = Field(
40
- default=PoolingStrategy.DYNAMIC, description="Pooling strategy"
41
- )
42
- prepared_statement_cache_size: int = Field(
43
- default=100, ge=0, le=10000, description="Prepared statement cache size"
39
+ default=PoolingStrategy.FIXED, description="Pooling strategy"
44
40
  )
45
- pool_reset_on_return: bool = Field(
46
- default=True, description="Reset connection state on return to pool"
41
+ # Add autocommit to pooling since it affects connection behavior in the pool
42
+ autocommit: bool = Field(default=False, description="Enable autocommit mode")
43
+ # Move connect_timeout here since it's about pool connection establishment
44
+ connect_timeout: float = Field(
45
+ default=10.0, ge=1.0, le=60.0, description="Connection timeout in seconds"
47
46
  )
48
47
 
49
- @model_validator(mode="after")
50
- def validate_overflow(self) -> Self:
51
- if self.max_overflow > self.pool_size * 5:
52
- raise ValueError("max_overflow should not exceed 5x pool_size")
53
- return self
48
+ @property
49
+ def engine_kwargs_exclusions(self) -> Set[str]:
50
+ return {"strategy"}
54
51
 
52
+ @property
53
+ def engine_kwargs(self) -> StringToAnyDict:
54
+ return self.model_dump(exclude=self.engine_kwargs_exclusions, exclude_none=True)
55
55
 
56
- class MySQLPoolingConfig(BasePoolingConfig):
57
- """MySQL-specific pooling configuration."""
56
+
57
+ class PostgreSQLPoolingConfig(BasePoolingConfig):
58
+ """PostgreSQL-specific pooling configuration."""
58
59
 
59
60
  pool_size: int = Field(
60
- default=8, ge=1, le=500, description="Number of connections in the pool"
61
+ default=10, ge=1, le=1000, description="Number of connections in the pool"
61
62
  )
62
63
  max_overflow: int = Field(
63
- default=15, ge=0, le=200, description="Maximum number of overflow connections"
64
+ default=20, ge=0, le=500, description="Maximum number of overflow connections"
64
65
  )
65
66
  pool_timeout: float = Field(
66
- default=20.0,
67
+ default=30.0,
67
68
  ge=1.0,
68
69
  le=300.0,
69
70
  description="Timeout in seconds for getting connection",
70
71
  )
71
72
  pool_recycle: int = Field(
72
- default=7200, ge=60, le=86400, description="Connection recycle time in seconds"
73
+ default=3600, ge=60, le=86400, description="Connection recycle time in seconds"
73
74
  )
74
75
  pool_pre_ping: bool = Field(
75
76
  default=True, description="Validate connections before use"
76
77
  )
78
+ # Keep strategy and prepared_statement_cache_size as they're pooling-related
77
79
  strategy: PoolingStrategy = Field(
78
- default=PoolingStrategy.FIXED, description="Pooling strategy"
80
+ default=PoolingStrategy.DYNAMIC, description="Pooling strategy"
79
81
  )
80
- # Add autocommit to pooling since it affects connection behavior in the pool
81
- autocommit: bool = Field(default=False, description="Enable autocommit mode")
82
- # Move connect_timeout here since it's about pool connection establishment
83
- connect_timeout: float = Field(
84
- default=10.0, ge=1.0, le=60.0, description="Connection timeout in seconds"
82
+ prepared_statement_cache_size: int = Field(
83
+ default=100, ge=0, le=10000, description="Prepared statement cache size"
85
84
  )
85
+ pool_reset_on_return: bool = Field(
86
+ default=True, description="Reset connection state on return to pool"
87
+ )
88
+
89
+ @model_validator(mode="after")
90
+ def validate_overflow(self) -> Self:
91
+ if self.max_overflow > self.pool_size * 5:
92
+ raise ValueError("max_overflow should not exceed 5x pool_size")
93
+ return self
94
+
95
+ @property
96
+ def engine_kwargs_exclusions(self) -> Set[str]:
97
+ return {
98
+ "strategy",
99
+ "prepared_statement_cache_size",
100
+ "pool_reset_on_return",
101
+ }
102
+
103
+ @property
104
+ def engine_kwargs(self) -> StringToAnyDict:
105
+ return self.model_dump(exclude=self.engine_kwargs_exclusions, exclude_none=True)
86
106
 
87
107
 
88
108
  class SQLitePoolingConfig(BasePoolingConfig):
@@ -105,6 +125,18 @@ class SQLitePoolingConfig(BasePoolingConfig):
105
125
  default=30000, ge=1000, le=300000, description="Busy timeout in milliseconds"
106
126
  )
107
127
 
128
+ @property
129
+ def engine_kwargs_exclusions(self) -> Set[str]:
130
+ return {
131
+ "strategy",
132
+ "wal_mode",
133
+ "busy_timeout",
134
+ }
135
+
136
+ @property
137
+ def engine_kwargs(self) -> StringToAnyDict:
138
+ return self.model_dump(exclude=self.engine_kwargs_exclusions, exclude_none=True)
139
+
108
140
 
109
141
  class SQLServerPoolingConfig(BasePoolingConfig):
110
142
  """SQL Server-specific pooling configuration."""
@@ -152,6 +184,79 @@ class SQLServerPoolingConfig(BasePoolingConfig):
152
184
  raise ValueError("max_overflow should not exceed 3x pool_size")
153
185
  return self
154
186
 
187
+ @property
188
+ def engine_kwargs_exclusions(self) -> Set[str]:
189
+ return {
190
+ "connection_timeout",
191
+ "command_timeout",
192
+ "packet_size",
193
+ "trust_server_certificate",
194
+ }
195
+
196
+ @property
197
+ def engine_kwargs(self) -> StringToAnyDict:
198
+ return self.model_dump(exclude=self.engine_kwargs_exclusions, exclude_none=True)
199
+
200
+
201
+ class ElasticsearchPoolingConfig(BasePoolingConfig):
202
+ """Elasticsearch-specific pooling configuration."""
203
+
204
+ # Connection pool settings
205
+ maxsize: int = Field(
206
+ default=25, ge=1, le=100, description="Maximum number of connections in pool"
207
+ )
208
+ connections_per_node: int = Field(
209
+ default=10, ge=1, le=50, description="Connections per Elasticsearch node"
210
+ )
211
+
212
+ # Timeout settings
213
+ timeout: float = Field(
214
+ default=10.0, ge=1.0, le=300.0, description="Request timeout in seconds"
215
+ )
216
+ max_retries: int = Field(
217
+ default=3, ge=0, le=10, description="Maximum number of retries"
218
+ )
219
+ retry_on_timeout: bool = Field(default=False, description="Retry on timeout")
220
+ retry_on_status: ListOfIntegers = Field(
221
+ default_factory=lambda: [502, 503, 504],
222
+ description="HTTP status codes to retry on",
223
+ )
224
+
225
+ # Connection behavior (move from connection config)
226
+ http_compress: bool = Field(default=True, description="Enable HTTP compression")
227
+ verify_certs: bool = Field(default=True, description="Verify SSL certificates")
228
+ ca_certs: OptionalString = Field(
229
+ default=None, description="Path to CA certificates"
230
+ )
231
+
232
+ # Advanced pool settings
233
+ block: bool = Field(default=False, description="Block when pool is full")
234
+ headers: OptionalStringToStringDict = Field(
235
+ default=None, description="Default headers for requests"
236
+ )
237
+ dead_timeout: float = Field(
238
+ default=60.0, ge=5.0, le=600.0, description="Dead node timeout in seconds"
239
+ )
240
+
241
+ @model_validator(mode="after")
242
+ def validate_overflow(self) -> Self:
243
+ if self.connections_per_node > self.maxsize:
244
+ raise ValueError("connections_per_node must not exceed maxsize")
245
+ return self
246
+
247
+ @property
248
+ def client_kwargs_exclusions(self) -> Set[str]:
249
+ return {
250
+ "connections_per_node",
251
+ "block",
252
+ "headers",
253
+ "dead_timeout",
254
+ }
255
+
256
+ @property
257
+ def client_kwargs(self) -> StringToAnyDict:
258
+ return self.model_dump(exclude=self.client_kwargs_exclusions, exclude_none=True)
259
+
155
260
 
156
261
  class MongoDBPoolingConfig(BasePoolingConfig):
157
262
  """MongoDB-specific pooling configuration."""
@@ -159,30 +264,52 @@ class MongoDBPoolingConfig(BasePoolingConfig):
159
264
  model_config = ConfigDict(alias_generator=to_camel)
160
265
 
161
266
  max_pool_size: int = Field(
162
- default=100, ge=1, le=500, description="Maximum number of connections in pool"
267
+ 100,
268
+ ge=1,
269
+ le=500,
270
+ description="Maximum number of connections in pool",
271
+ alias="maxPoolSiza",
163
272
  )
164
273
  min_pool_size: int = Field(
165
- default=0, ge=0, le=100, description="Minimum number of connections in pool"
274
+ 0,
275
+ ge=0,
276
+ le=100,
277
+ description="Minimum number of connections in pool",
278
+ alias="minPoolSize",
166
279
  )
167
280
  max_idle_time_ms: int = Field(
168
- default=600000, ge=1000, le=3600000, description="Max idle time in milliseconds"
281
+ 600000,
282
+ ge=1000,
283
+ le=3600000,
284
+ description="Max idle time in milliseconds",
285
+ alias="maxIdleTimeMS",
169
286
  )
170
287
  connect_timeout_ms: int = Field(
171
- default=20000,
288
+ 20000,
172
289
  ge=1000,
173
290
  le=300000,
174
291
  description="Connection timeout in milliseconds",
292
+ alias="connectTimeoutMS",
175
293
  )
176
294
  server_selection_timeout_ms: int = Field(
177
- default=30000, ge=1000, le=300000, description="Server selection timeout"
295
+ 30000,
296
+ ge=1000,
297
+ le=300000,
298
+ description="Server selection timeout",
299
+ alias="serverSelectionTimeoutMS",
178
300
  )
179
301
  max_connecting: int = Field(
180
- default=2,
302
+ 2,
181
303
  ge=1,
182
304
  le=10,
183
305
  description="Maximum number of concurrent connection attempts",
306
+ alias="maxConnecting",
184
307
  )
185
308
 
309
+ @property
310
+ def client_kwargs(self) -> StringToAnyDict:
311
+ return self.model_dump(by_alias=True, exclude_none=True)
312
+
186
313
 
187
314
  class RedisPoolingConfig(BasePoolingConfig):
188
315
  """Redis-specific pooling configuration."""
@@ -207,49 +334,10 @@ class RedisPoolingConfig(BasePoolingConfig):
207
334
  default=True, description="Decode responses to strings"
208
335
  )
209
336
 
337
+ @property
338
+ def client_kwargs_exclusions(self) -> Set[str]:
339
+ return {"health_check_interval"}
210
340
 
211
- class ElasticsearchPoolingConfig(BasePoolingConfig):
212
- """Elasticsearch-specific pooling configuration."""
213
-
214
- # Connection pool settings
215
- maxsize: int = Field(
216
- default=25, ge=1, le=100, description="Maximum number of connections in pool"
217
- )
218
- connections_per_node: int = Field(
219
- default=10, ge=1, le=50, description="Connections per Elasticsearch node"
220
- )
221
-
222
- # Timeout settings
223
- timeout: float = Field(
224
- default=10.0, ge=1.0, le=300.0, description="Request timeout in seconds"
225
- )
226
- max_retries: int = Field(
227
- default=3, ge=0, le=10, description="Maximum number of retries"
228
- )
229
- retry_on_timeout: bool = Field(default=False, description="Retry on timeout")
230
- retry_on_status: ListOfIntegers = Field(
231
- default_factory=lambda: [502, 503, 504],
232
- description="HTTP status codes to retry on",
233
- )
234
-
235
- # Connection behavior (move from connection config)
236
- http_compress: bool = Field(default=True, description="Enable HTTP compression")
237
- verify_certs: bool = Field(default=True, description="Verify SSL certificates")
238
- ca_certs: OptionalString = Field(
239
- default=None, description="Path to CA certificates"
240
- )
241
-
242
- # Advanced pool settings
243
- block: bool = Field(default=False, description="Block when pool is full")
244
- headers: OptionalStringToStringDict = Field(
245
- default=None, description="Default headers for requests"
246
- )
247
- dead_timeout: float = Field(
248
- default=60.0, ge=5.0, le=600.0, description="Dead node timeout in seconds"
249
- )
250
-
251
- @model_validator(mode="after")
252
- def validate_overflow(self) -> Self:
253
- if self.connections_per_node > self.maxsize:
254
- raise ValueError("connections_per_node must not exceed maxsize")
255
- return self
341
+ @property
342
+ def client_kwargs(self) -> StringToAnyDict:
343
+ return self.model_dump(exclude=self.client_kwargs_exclusions, exclude_none=True)
maleo/database/dtos.py ADDED
@@ -0,0 +1,6 @@
1
+ from pydantic import BaseModel, Field
2
+
3
+
4
+ class ConnectionCheck(BaseModel):
5
+ is_connected: bool = Field(..., description="Whether database is connected")
6
+ duration: float = Field(..., ge=0.0, description="Duration")