fixturify 0.1.10__py3-none-any.whl → 0.1.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
fixturify/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  """PyTools - A collection of reusable Python utility modules."""
2
2
 
3
- __version__ = "0.1.10"
3
+ __version__ = "0.1.12"
4
4
 
5
5
  from fixturify.sql_d import sql, Phase, SqlTestConfig
6
6
  from fixturify.read_d import read
@@ -811,10 +811,36 @@ def create_response_from_http_client(
811
811
  Returns:
812
812
  HttpResponse model
813
813
  """
814
+ import gzip
815
+ import zlib
816
+
814
817
  headers = dict(response.getheaders()) if hasattr(response, "getheaders") else {}
815
818
  content_type = headers.get("content-type", headers.get("Content-Type", ""))
819
+ content_encoding = headers.get(
820
+ "content-encoding", headers.get("Content-Encoding", "")
821
+ ).lower()
816
822
 
817
823
  body_bytes = response.read()
824
+
825
+ # Decompress if needed (http.client doesn't auto-decompress)
826
+ if body_bytes and content_encoding in ("gzip", "deflate"):
827
+ try:
828
+ if content_encoding == "gzip":
829
+ body_bytes = gzip.decompress(body_bytes)
830
+ elif content_encoding == "deflate":
831
+ # deflate can be raw or zlib-wrapped
832
+ try:
833
+ body_bytes = zlib.decompress(body_bytes)
834
+ except zlib.error:
835
+ body_bytes = zlib.decompress(body_bytes, -zlib.MAX_WBITS)
836
+ # Remove content-encoding header since we decompressed
837
+ headers = {
838
+ k: v for k, v in headers.items()
839
+ if k.lower() != "content-encoding"
840
+ }
841
+ except (gzip.BadGzipFile, OSError, zlib.error):
842
+ pass # Not actually compressed, use as-is
843
+
818
844
  body, body_encoding, is_binary = _serialize_body(body_bytes, content_type)
819
845
 
820
846
  if is_binary:
@@ -1312,18 +1338,43 @@ def create_response_from_httpcore(
1312
1338
  Returns:
1313
1339
  HttpResponse model
1314
1340
  """
1341
+ import gzip
1342
+ import zlib
1343
+
1315
1344
  # Convert headers from list of tuples to dict
1316
1345
  headers_dict: Dict[str, str] = {}
1346
+ content_encoding = ""
1317
1347
  if response.headers:
1318
1348
  for k, v in response.headers:
1319
1349
  key = k.decode() if isinstance(k, bytes) else k
1320
1350
  val = v.decode() if isinstance(v, bytes) else v
1321
1351
  headers_dict[key] = val
1352
+ if key.lower() == "content-encoding":
1353
+ content_encoding = val.lower()
1322
1354
 
1323
1355
  content_type = headers_dict.get("content-type", headers_dict.get("Content-Type", ""))
1324
1356
 
1325
1357
  # Get body content
1326
1358
  body_bytes = response.content
1359
+
1360
+ # Decompress if needed (httpcore doesn't auto-decompress)
1361
+ if body_bytes and content_encoding in ("gzip", "deflate"):
1362
+ try:
1363
+ if content_encoding == "gzip":
1364
+ body_bytes = gzip.decompress(body_bytes)
1365
+ elif content_encoding == "deflate":
1366
+ try:
1367
+ body_bytes = zlib.decompress(body_bytes)
1368
+ except zlib.error:
1369
+ body_bytes = zlib.decompress(body_bytes, -zlib.MAX_WBITS)
1370
+ # Remove content-encoding header since we decompressed
1371
+ headers_dict = {
1372
+ k: v for k, v in headers_dict.items()
1373
+ if k.lower() != "content-encoding"
1374
+ }
1375
+ except (gzip.BadGzipFile, OSError, zlib.error):
1376
+ pass # Not actually compressed, use as-is
1377
+
1327
1378
  body, body_encoding, is_binary = _serialize_body(body_bytes, content_type)
1328
1379
 
1329
1380
  if is_binary:
@@ -9,7 +9,7 @@ import functools
9
9
  from typing import TYPE_CHECKING, Any
10
10
 
11
11
  from .._models import HttpRequest, HttpResponse
12
- from .._recorder import create_request_from_httpcore, create_response_from_httpcore
12
+ from .._recorder import create_request_from_httpcore
13
13
 
14
14
  if TYPE_CHECKING:
15
15
  from .._mock_context import HttpMockContext
@@ -72,13 +72,14 @@ def _build_response(response_model: HttpResponse) -> Any:
72
72
  """Build httpcore Response from HttpResponse model."""
73
73
  from httpcore import Response
74
74
  import gzip
75
-
75
+ import zlib
76
+
76
77
  # Get content encoding before filtering headers
77
78
  content_encoding = response_model.headers.get(
78
- "content-encoding",
79
+ "content-encoding",
79
80
  response_model.headers.get("Content-Encoding", "")
80
81
  ).lower()
81
-
82
+
82
83
  # Build headers as list of tuples
83
84
  headers = [
84
85
  (k.encode("ascii"), v.encode("ascii"))
@@ -86,15 +87,21 @@ def _build_response(response_model: HttpResponse) -> Any:
86
87
  # Skip headers that cause issues
87
88
  if k.lower() not in ("transfer-encoding", "content-encoding")
88
89
  ]
89
-
90
+
90
91
  body = response_model.get_body_bytes()
91
-
92
- # Decompress gzip body if needed (since we removed content-encoding header)
93
- if content_encoding == "gzip" and body:
92
+
93
+ # Decompress body if needed (since we removed content-encoding header)
94
+ if body and content_encoding in ("gzip", "deflate"):
94
95
  try:
95
- body = gzip.decompress(body)
96
- except (gzip.BadGzipFile, OSError):
97
- pass # Not actually gzip, use as-is
96
+ if content_encoding == "gzip":
97
+ body = gzip.decompress(body)
98
+ elif content_encoding == "deflate":
99
+ try:
100
+ body = zlib.decompress(body)
101
+ except zlib.error:
102
+ body = zlib.decompress(body, -zlib.MAX_WBITS)
103
+ except (gzip.BadGzipFile, OSError, zlib.error):
104
+ pass # Not actually compressed, use as-is
98
105
 
99
106
  return Response(
100
107
  status=response_model.status,
@@ -105,16 +112,17 @@ def _build_response(response_model: HttpResponse) -> Any:
105
112
 
106
113
  def _serialize_response(httpcore_response: Any) -> HttpResponse:
107
114
  """Create HttpResponse from httpcore Response object.
108
-
109
- Handles gzip decompression before serialization to ensure
115
+
116
+ Handles gzip/deflate decompression before serialization to ensure
110
117
  body is stored as readable text, not binary.
111
118
  """
112
119
  import gzip
120
+ import zlib
113
121
  from .._models import HttpResponse
114
-
122
+
115
123
  # Get body - should be available after read()/aread()
116
124
  body_bytes = httpcore_response.content
117
-
125
+
118
126
  # Convert headers
119
127
  headers_dict = {}
120
128
  content_encoding = ""
@@ -124,21 +132,24 @@ def _serialize_response(httpcore_response: Any) -> HttpResponse:
124
132
  headers_dict[key] = val
125
133
  if key.lower() == "content-encoding":
126
134
  content_encoding = val.lower()
127
-
128
- # If gzip, decompress body before serialization
129
- if content_encoding == "gzip" and body_bytes:
135
+
136
+ # Decompress body before serialization if needed
137
+ if body_bytes and content_encoding in ("gzip", "deflate"):
130
138
  try:
131
- body_bytes = gzip.decompress(body_bytes)
139
+ if content_encoding == "gzip":
140
+ body_bytes = gzip.decompress(body_bytes)
141
+ elif content_encoding == "deflate":
142
+ try:
143
+ body_bytes = zlib.decompress(body_bytes)
144
+ except zlib.error:
145
+ body_bytes = zlib.decompress(body_bytes, -zlib.MAX_WBITS)
132
146
  # Remove content-encoding header since we decompressed
133
147
  headers_dict = {
134
148
  k: v for k, v in headers_dict.items()
135
149
  if k.lower() != "content-encoding"
136
150
  }
137
- except (gzip.BadGzipFile, OSError):
138
- pass # Not actually gzip, use as-is
139
-
140
- # Determine content type
141
- content_type = headers_dict.get("content-type", headers_dict.get("Content-Type", ""))
151
+ except (gzip.BadGzipFile, OSError, zlib.error):
152
+ pass # Not actually compressed, use as-is
142
153
 
143
154
  # Try to decode as text/JSON
144
155
  body_str = None
@@ -1,7 +1,8 @@
1
1
  """SQL module for executing SQL files before/after tests."""
2
2
 
3
3
  from fixturify.sql_d._config import SqlTestConfig
4
+ from fixturify.sql_d._connection_cache import get_connection_cache
4
5
  from fixturify.sql_d._decorator import sql
5
6
  from fixturify.sql_d._phase import Phase
6
7
 
7
- __all__ = ["sql", "Phase", "SqlTestConfig"]
8
+ __all__ = ["sql", "Phase", "SqlTestConfig", "get_connection_cache"]
@@ -0,0 +1,235 @@
1
+ """Connection cache for sharing database connections across test decorators.
2
+
3
+ This module provides a singleton cache that stores database connections
4
+ keyed by their configuration. This avoids creating a new connection
5
+ for each @sql decorator invocation, significantly improving test performance.
6
+ """
7
+
8
+ import atexit
9
+ import asyncio
10
+ from dataclasses import dataclass
11
+ from typing import Any, Dict, Optional, Tuple, TYPE_CHECKING
12
+
13
+ if TYPE_CHECKING:
14
+ from ._config import SqlTestConfig
15
+
16
+
17
+ @dataclass(frozen=True)
18
+ class CacheKey:
19
+ """Immutable key for connection cache lookup."""
20
+
21
+ driver: str
22
+ host: str
23
+ database: str
24
+ user: str
25
+ port: Optional[int]
26
+
27
+ @classmethod
28
+ def from_config(cls, config: "SqlTestConfig") -> "CacheKey":
29
+ """Create cache key from SqlTestConfig."""
30
+ return cls(
31
+ driver=config.driver,
32
+ host=config.host,
33
+ database=config.database,
34
+ user=config.user,
35
+ port=config.port,
36
+ )
37
+
38
+
39
+ class ConnectionCache:
40
+ """
41
+ Singleton cache for database connections.
42
+
43
+ Maintains separate caches for sync and async connections.
44
+ Connections are created on first access and reused for subsequent calls.
45
+ All connections are closed when the process exits via atexit handler.
46
+
47
+ Async connections are tracked with their event loop - if the loop changes
48
+ or closes, the cached connection is invalidated automatically.
49
+ """
50
+
51
+ _instance: Optional["ConnectionCache"] = None
52
+
53
+ def __new__(cls) -> "ConnectionCache":
54
+ if cls._instance is None:
55
+ cls._instance = super().__new__(cls)
56
+ cls._instance._initialized = False
57
+ return cls._instance
58
+
59
+ def __init__(self) -> None:
60
+ if self._initialized:
61
+ return
62
+ self._initialized = True
63
+ self._sync_connections: Dict[CacheKey, Any] = {}
64
+ # Store (connection, event_loop) tuple for async connections
65
+ self._async_connections: Dict[CacheKey, Tuple[Any, Any]] = {}
66
+ self._sync_close_funcs: Dict[CacheKey, Any] = {}
67
+ self._async_close_funcs: Dict[CacheKey, Any] = {}
68
+
69
+ def get_sync(
70
+ self,
71
+ config: "SqlTestConfig",
72
+ connect_func: Any,
73
+ connect_params: Dict[str, Any],
74
+ ) -> Any:
75
+ """
76
+ Get or create a sync connection.
77
+
78
+ Args:
79
+ config: Database configuration
80
+ connect_func: Function to create connection (e.g., driver.connect)
81
+ connect_params: Parameters to pass to connect_func
82
+
83
+ Returns:
84
+ Database connection object
85
+ """
86
+ key = CacheKey.from_config(config)
87
+
88
+ if key not in self._sync_connections:
89
+ connection = connect_func(**connect_params)
90
+ self._sync_connections[key] = connection
91
+
92
+ return self._sync_connections[key]
93
+
94
+ async def get_async(
95
+ self,
96
+ config: "SqlTestConfig",
97
+ connect_factory: Any,
98
+ ) -> Any:
99
+ """
100
+ Get or create an async connection.
101
+
102
+ Tracks the event loop - if the cached connection was created in a
103
+ different or closed loop, it's invalidated and a new one is created.
104
+
105
+ Args:
106
+ config: Database configuration
107
+ connect_factory: Callable that returns a coroutine when called
108
+
109
+ Returns:
110
+ Database connection object
111
+ """
112
+ key = CacheKey.from_config(config)
113
+ current_loop = asyncio.get_running_loop()
114
+
115
+ if key in self._async_connections:
116
+ connection, cached_loop = self._async_connections[key]
117
+ # Check if loop is the same and still running
118
+ if cached_loop is current_loop and not cached_loop.is_closed():
119
+ return connection
120
+ # Loop changed or closed - remove stale connection
121
+ del self._async_connections[key]
122
+
123
+ # Create new connection and track with current loop
124
+ connection = await connect_factory()
125
+ self._async_connections[key] = (connection, current_loop)
126
+ return connection
127
+
128
+ def register_sync_closer(
129
+ self,
130
+ config: "SqlTestConfig",
131
+ close_func: Any,
132
+ ) -> None:
133
+ """Register a close function for a sync connection."""
134
+ key = CacheKey.from_config(config)
135
+ if key not in self._sync_close_funcs:
136
+ self._sync_close_funcs[key] = close_func
137
+
138
+ def register_async_closer(
139
+ self,
140
+ config: "SqlTestConfig",
141
+ close_func: Any,
142
+ ) -> None:
143
+ """Register a close function for an async connection."""
144
+ key = CacheKey.from_config(config)
145
+ if key not in self._async_close_funcs:
146
+ self._async_close_funcs[key] = close_func
147
+
148
+ def close_all_sync(self) -> None:
149
+ """Close all cached sync connections."""
150
+ for key, connection in list(self._sync_connections.items()):
151
+ try:
152
+ if key in self._sync_close_funcs:
153
+ self._sync_close_funcs[key](connection)
154
+ elif hasattr(connection, "close"):
155
+ connection.close()
156
+ except Exception:
157
+ pass # Best effort cleanup
158
+ self._sync_connections.clear()
159
+ self._sync_close_funcs.clear()
160
+
161
+ def close_all_async_sync(self) -> None:
162
+ """
163
+ Close all async connections from sync context.
164
+
165
+ This is called by atexit handler which runs in sync context.
166
+ We need to handle async cleanup carefully.
167
+ """
168
+ for key, (connection, _loop) in list(self._async_connections.items()):
169
+ try:
170
+ if hasattr(connection, "close"):
171
+ # Some async connections have sync close()
172
+ result = connection.close()
173
+ # If close() returns a coroutine, we need to run it
174
+ if asyncio.iscoroutine(result):
175
+ try:
176
+ loop = asyncio.get_event_loop()
177
+ if loop.is_running():
178
+ # Can't run from within running loop
179
+ pass
180
+ else:
181
+ loop.run_until_complete(result)
182
+ except RuntimeError:
183
+ # No event loop, create new one
184
+ asyncio.run(result)
185
+ except Exception:
186
+ pass # Best effort cleanup
187
+ self._async_connections.clear()
188
+ self._async_close_funcs.clear()
189
+
190
+ async def close_all_async(self) -> None:
191
+ """Close all cached async connections from async context."""
192
+ for key, (connection, _loop) in list(self._async_connections.items()):
193
+ try:
194
+ if key in self._async_close_funcs:
195
+ await self._async_close_funcs[key](connection)
196
+ elif hasattr(connection, "close"):
197
+ result = connection.close()
198
+ if asyncio.iscoroutine(result):
199
+ await result
200
+ # Handle wait_closed for aiomysql-style connections
201
+ if hasattr(connection, "wait_closed"):
202
+ await connection.wait_closed()
203
+ except Exception:
204
+ pass # Best effort cleanup
205
+ self._async_connections.clear()
206
+ self._async_close_funcs.clear()
207
+
208
+ def clear(self) -> None:
209
+ """Clear all connections (for testing purposes)."""
210
+ self.close_all_sync()
211
+ self.close_all_async_sync()
212
+
213
+
214
+ # Global cache instance
215
+ _cache: Optional[ConnectionCache] = None
216
+
217
+
218
+ def get_connection_cache() -> ConnectionCache:
219
+ """Get the global connection cache instance."""
220
+ global _cache
221
+ if _cache is None:
222
+ _cache = ConnectionCache()
223
+ return _cache
224
+
225
+
226
+ def _cleanup_connections() -> None:
227
+ """Cleanup handler called at process exit."""
228
+ global _cache
229
+ if _cache is not None:
230
+ _cache.close_all_sync()
231
+ _cache.close_all_async_sync()
232
+
233
+
234
+ # Register cleanup handler
235
+ atexit.register(_cleanup_connections)
@@ -26,17 +26,14 @@ class AiomysqlStrategy(AsyncSqlExecutionStrategy):
26
26
  """Execute SQL using aiomysql."""
27
27
  driver = self.get_driver_module()
28
28
  params = self.build_connection_params(config)
29
-
30
- connection = await driver.connect(**params)
31
- try:
32
- async with connection.cursor() as cursor:
33
- # aiomysql doesn't support multi-statement by default
34
- await self._execute_statements(cursor, sql_content)
35
- await connection.commit()
36
- finally:
37
- connection.close()
38
- if hasattr(connection, "wait_closed"):
39
- await connection.wait_closed()
29
+
30
+ connection = await self.get_cached_connection_async(
31
+ config, lambda: driver.connect(**params)
32
+ )
33
+ async with connection.cursor() as cursor:
34
+ # aiomysql doesn't support multi-statement by default
35
+ await self._execute_statements(cursor, sql_content)
36
+ await connection.commit()
40
37
 
41
38
  async def _execute_statements(self, cursor: Any, sql_content: str) -> None:
42
39
  """
@@ -22,8 +22,11 @@ class AiosqliteStrategy(AsyncSqlExecutionStrategy):
22
22
  """Execute SQL using aiosqlite."""
23
23
  driver = self.get_driver_module()
24
24
  params = self.build_connection_params(config)
25
-
25
+
26
26
  database_path = params.get("database", ":memory:")
27
- async with driver.connect(database_path) as connection:
28
- await connection.executescript(sql_content)
29
- await connection.commit()
27
+
28
+ connection = await self.get_cached_connection_async(
29
+ config, lambda: driver.connect(database_path)
30
+ )
31
+ await connection.executescript(sql_content)
32
+ await connection.commit()
@@ -9,7 +9,11 @@ if TYPE_CHECKING:
9
9
 
10
10
 
11
11
  class AsyncpgStrategy(AsyncSqlExecutionStrategy):
12
- """Execution strategy for asyncpg (PostgreSQL async)."""
12
+ """Execution strategy for asyncpg (PostgreSQL async).
13
+
14
+ Uses connection pools instead of single connections to support
15
+ concurrent operations across multiple tests.
16
+ """
13
17
 
14
18
  driver_name = "asyncpg"
15
19
  is_async = True
@@ -23,12 +27,13 @@ class AsyncpgStrategy(AsyncSqlExecutionStrategy):
23
27
  }
24
28
 
25
29
  async def execute_async(self, sql_content: str, config: "SqlTestConfig") -> None:
26
- """Execute SQL using asyncpg."""
30
+ """Execute SQL using asyncpg with connection pool."""
27
31
  driver = self.get_driver_module()
28
32
  params = self.build_connection_params(config)
29
-
30
- connection = await driver.connect(**params)
31
- try:
33
+
34
+ # Use pool instead of single connection for concurrency support
35
+ pool = await self.get_cached_connection_async(
36
+ config, lambda: driver.create_pool(**params, min_size=1, max_size=5)
37
+ )
38
+ async with pool.acquire() as connection:
32
39
  await connection.execute(sql_content)
33
- finally:
34
- await connection.close()
@@ -1,7 +1,7 @@
1
1
  """Base classes for SQL execution strategies."""
2
2
 
3
3
  from abc import ABC, abstractmethod
4
- from typing import Any, Dict, TYPE_CHECKING
4
+ from typing import Any, Callable, Dict, TYPE_CHECKING
5
5
 
6
6
  if TYPE_CHECKING:
7
7
  from .._config import SqlTestConfig
@@ -92,6 +92,28 @@ class SqlExecutionStrategy(ABC):
92
92
  f"Install it with: pip install {self.driver_name}"
93
93
  ) from e
94
94
 
95
+ def get_cached_connection(
96
+ self,
97
+ config: "SqlTestConfig",
98
+ connect_func: Callable[..., Any],
99
+ connect_params: Dict[str, Any],
100
+ ) -> Any:
101
+ """
102
+ Get a cached connection or create a new one.
103
+
104
+ Args:
105
+ config: Database configuration
106
+ connect_func: Function to create connection
107
+ connect_params: Parameters for connect_func
108
+
109
+ Returns:
110
+ Database connection (cached or newly created)
111
+ """
112
+ from .._connection_cache import get_connection_cache
113
+
114
+ cache = get_connection_cache()
115
+ return cache.get_sync(config, connect_func, connect_params)
116
+
95
117
 
96
118
  class AsyncSqlExecutionStrategy(SqlExecutionStrategy):
97
119
  """
@@ -116,3 +138,29 @@ class AsyncSqlExecutionStrategy(SqlExecutionStrategy):
116
138
  raise RuntimeError(
117
139
  f"Strategy {self.driver_name} is async. Use execute_async() instead."
118
140
  )
141
+
142
+ async def get_cached_connection_async(
143
+ self,
144
+ config: "SqlTestConfig",
145
+ connect_factory: Any,
146
+ ) -> Any:
147
+ """
148
+ Get a cached async connection or create a new one.
149
+
150
+ Note: Async connections are tied to the event loop. For caching to work,
151
+ pytest must use session-scoped event loop:
152
+
153
+ [tool.pytest.ini_options]
154
+ asyncio_default_fixture_loop_scope = "session"
155
+
156
+ Args:
157
+ config: Database configuration
158
+ connect_factory: Callable that returns a coroutine when called
159
+
160
+ Returns:
161
+ Database connection (cached or newly created)
162
+ """
163
+ from .._connection_cache import get_connection_cache
164
+
165
+ cache = get_connection_cache()
166
+ return await cache.get_async(config, connect_factory)
@@ -26,18 +26,15 @@ class MysqlConnectorStrategy(SqlExecutionStrategy):
26
26
  """Execute SQL using mysql.connector."""
27
27
  driver = self.get_driver_module()
28
28
  params = self.build_connection_params(config)
29
-
30
- connection = driver.connect(**params)
29
+
30
+ connection = self.get_cached_connection(config, driver.connect, params)
31
+ cursor = connection.cursor()
31
32
  try:
32
- cursor = connection.cursor()
33
- try:
34
- # MySQL connector doesn't support multi-statement by default
35
- self._execute_statements(cursor, sql_content, driver)
36
- connection.commit()
37
- finally:
38
- cursor.close()
33
+ # MySQL connector doesn't support multi-statement by default
34
+ self._execute_statements(cursor, sql_content, driver)
35
+ connection.commit()
39
36
  finally:
40
- connection.close()
37
+ cursor.close()
41
38
 
42
39
  def _execute_statements(self, cursor: Any, sql_content: str, driver: Any) -> None:
43
40
  """
@@ -27,9 +27,8 @@ class PsycopgStrategy(AsyncSqlExecutionStrategy):
27
27
  driver = self.get_driver_module()
28
28
  params = self.build_connection_params(config)
29
29
 
30
- connection = await driver.AsyncConnection.connect(**params)
31
- try:
32
- await connection.execute(sql_content)
33
- await connection.commit()
34
- finally:
35
- await connection.close()
30
+ connection = await self.get_cached_connection_async(
31
+ config, lambda: driver.AsyncConnection.connect(**params)
32
+ )
33
+ await connection.execute(sql_content)
34
+ await connection.commit()
@@ -26,15 +26,12 @@ class Psycopg2Strategy(SqlExecutionStrategy):
26
26
  """Execute SQL using psycopg2."""
27
27
  driver = self.get_driver_module()
28
28
  params = self.build_connection_params(config)
29
-
30
- connection = driver.connect(**params)
29
+
30
+ connection = self.get_cached_connection(config, driver.connect, params)
31
+ cursor = connection.cursor()
31
32
  try:
32
- cursor = connection.cursor()
33
- try:
34
- # psycopg2 supports multi-statement execution
35
- cursor.execute(sql_content)
36
- connection.commit()
37
- finally:
38
- cursor.close()
33
+ # psycopg2 supports multi-statement execution
34
+ cursor.execute(sql_content)
35
+ connection.commit()
39
36
  finally:
40
- connection.close()
37
+ cursor.close()
@@ -1,6 +1,6 @@
1
1
  """Strategy for sqlite3 driver."""
2
2
 
3
- from typing import TYPE_CHECKING
3
+ from typing import Any, TYPE_CHECKING
4
4
 
5
5
  from ._base import SqlExecutionStrategy
6
6
 
@@ -22,12 +22,13 @@ class Sqlite3Strategy(SqlExecutionStrategy):
22
22
  """Execute SQL using sqlite3."""
23
23
  driver = self.get_driver_module()
24
24
  params = self.build_connection_params(config)
25
-
25
+
26
26
  database_path = params.get("database", ":memory:")
27
- connection = driver.connect(database_path)
28
- try:
29
- # SQLite supports executescript for multi-statement execution
30
- connection.executescript(sql_content)
31
- connection.commit()
32
- finally:
33
- connection.close()
27
+
28
+ def connect_sqlite() -> Any:
29
+ return driver.connect(database_path)
30
+
31
+ connection = self.get_cached_connection(config, connect_sqlite, {})
32
+ # SQLite supports executescript for multi-statement execution
33
+ connection.executescript(sql_content)
34
+ connection.commit()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fixturify
3
- Version: 0.1.10
3
+ Version: 0.1.12
4
4
  Summary: A collection of convenient testing utilities for Python
5
5
  Project-URL: Homepage, https://github.com/eleven-sea/pytools
6
6
  Project-URL: Repository, https://github.com/eleven-sea/pytools
@@ -1,4 +1,4 @@
1
- fixturify/__init__.py,sha256=xzimdoGamcRB-WMmfeGc4W-HBYTWQ_M_ypiqWK16_2A,485
1
+ fixturify/__init__.py,sha256=mVvtOZMpeQKxYJ3TR0GtY0HjT6CKi2RgWATp0ErJX-A,485
2
2
  fixturify/_utils/__init__.py,sha256=Ago2DIAS0UgTvVRxWGisxPoawDFNAB5mbjnC0hYsmNw,334
3
3
  fixturify/_utils/_constants.py,sha256=CYkqgI-ljyMcntBUP8ZCQmx7JSsLqyNxzIwNfxm2XkA,453
4
4
  fixturify/_utils/_fixture_discovery.py,sha256=u9hfoNdbLz35-X7h6fwdGuQQ8edryGNWfg_ba5i8Nq8,5185
@@ -13,12 +13,12 @@ fixturify/http_d/_mock_context.py,sha256=cqKzlaGPDuAPNF1k1M3tVUjDU9SUlgICPe0d2YJ
13
13
  fixturify/http_d/_models.py,sha256=4hz9S_OTOE55zsCewfTabaR5fDjKJjPTuO_hLcfq8bk,6372
14
14
  fixturify/http_d/_patcher.py,sha256=KTMc9pDcr7yA82hHe4aKjP9clmwSOZTwBcsiQm1S8Cc,18700
15
15
  fixturify/http_d/_player.py,sha256=4ayKgDOans7AXX8PIfWxn2nxauQBswIUSY2jvL2eIlU,7275
16
- fixturify/http_d/_recorder.py,sha256=VQVCxIAxeDhXo_uZ3b1Yr7TKw5-jvUEY-ZXO_0DVr0c,40633
16
+ fixturify/http_d/_recorder.py,sha256=mZEu41SAvvD02tOOePy2K20-fxg88OU5VpqfnXX8vk8,42600
17
17
  fixturify/http_d/_utils.py,sha256=FJFZC8IS2MzLbPo6D0CUizcJ-LYXc0fVoSJ-04bL5aQ,5799
18
18
  fixturify/http_d/_stubs/__init__.py,sha256=sFcWbdqhnHNiYLfKfrrLt6wjGPWo3J-byTDZd2z41wk,171
19
19
  fixturify/http_d/_stubs/_aiohttp.py,sha256=c-zGo78_MHfJz_2OsSv0bGnFAR1NgGkACMuzrKd-qII,6672
20
20
  fixturify/http_d/_stubs/_connection.py,sha256=Z_g4C_WWL2ErWO26xV3rJqZU48slabtX_al-_LKmGIk,16253
21
- fixturify/http_d/_stubs/_httpcore.py,sha256=WR_KZGInuVqoBBRML_T6bGe5LEMjbW4ElRem5pz6HiE,9007
21
+ fixturify/http_d/_stubs/_httpcore.py,sha256=vBpvcaf81PVwMDbL0Gxa6uvokV9k4_gGyJybocxNBRE,9473
22
22
  fixturify/http_d/_stubs/_tornado.py,sha256=Vm-hETi01xGIjVVvYmVoIOXhzSWAukae48Tzmy0afFY,3219
23
23
  fixturify/json_assert/__init__.py,sha256=eDwsoGZPMDparbzKAJ2xXorVsEVPMcbHMXMfue3a3YM,375
24
24
  fixturify/json_assert/_actual_saver.py,sha256=_BXTI2CScIUJ7UeeU1-mIKssaDrHGldlFHDtq1o_i80,2355
@@ -49,23 +49,24 @@ fixturify/object_mapper/_serializers/_sqlmodel.py,sha256=66PHxMw32rACjaYgjLHSGu_
49
49
  fixturify/read_d/__init__.py,sha256=t3f3OxBenqMN-DeEifXJ6A2_jZ4XHo4tkby9XtpIoo0,130
50
50
  fixturify/read_d/_decorator.py,sha256=7uKRZ-WkGHxSNf87qlC7u-0PfiNrlZHbnQ-jWXrk5no,7700
51
51
  fixturify/read_d/_fixture_loader.py,sha256=9pXJxLOEcdLylZup41wXuok6n7bHYExPBqvFZP2Fya0,2857
52
- fixturify/sql_d/__init__.py,sha256=nqjA4Bd6h1RCq_kzSVrN2A5kuv8zHqzvYEbgBRyJhEQ,241
52
+ fixturify/sql_d/__init__.py,sha256=Y18FgZFI_z-HUAdQ6uHOppMDZC1W5hYGZhC3ZJZ5pB0,332
53
53
  fixturify/sql_d/_config.py,sha256=W3RL1UW9WrYWNHu2FqcOW22Q_loYLi9htXsqARALH6s,762
54
+ fixturify/sql_d/_connection_cache.py,sha256=SEzfcLsj3aX2Rz7cVKCatol7m4cEKFEXqKuE4gw-SGI,8031
54
55
  fixturify/sql_d/_decorator.py,sha256=9ynQlODhU5m4d29DjMQMpuMG_Fd0jikhzuMKkNDXUmA,12531
55
56
  fixturify/sql_d/_driver_registry.py,sha256=Fwxt86Gv2UbnO7V_FgzaGc1UvCzd8QV5UtHiRdahlNs,3349
56
57
  fixturify/sql_d/_executor.py,sha256=iEizcgSKrShh1CHwiVjkM1Xjs1Iiw7WESnzborAQpqY,2271
57
58
  fixturify/sql_d/_fixture_discovery.py,sha256=17m3RmJvSH8ygdZoDbkm7RIdcewe5wEyIx6X_40MxjY,1635
58
59
  fixturify/sql_d/_phase.py,sha256=zQv2YwRkXSB4Kdeqrcp5BELplWHJVgQd_0wlcjS2eqY,228
59
60
  fixturify/sql_d/_strategies/__init__.py,sha256=36IdX6pFdBgZJ1-Cn3HQfwhKgKp-bB9g4qvN3KwoncQ,323
60
- fixturify/sql_d/_strategies/_aiomysql.py,sha256=IYG1G-yQfugSLTic7bH_iPHfMMF3gaCAF8cZrjmedDk,2179
61
- fixturify/sql_d/_strategies/_aiosqlite.py,sha256=GmZA2mUtV2fJYCIroqc1ttcPC6ZzksWNQmVwC7cAUEw,916
62
- fixturify/sql_d/_strategies/_asyncpg.py,sha256=bRgTPoof4GBMhcp-0-1a2dpt4MwGb2NggewemqeShqs,945
63
- fixturify/sql_d/_strategies/_base.py,sha256=BhtbqN5tQjQzM3au_EZtWawCs1AuSbFUIsyjELrReO8,3459
64
- fixturify/sql_d/_strategies/_mysql.py,sha256=3l7aoP9FSfG-_sUofN8q9EGvSNjuavWoPU8lkPNQIsE,2452
65
- fixturify/sql_d/_strategies/_psycopg.py,sha256=3P75nMDJDwx1-JlRDnnPlCHWPXFOKI9BjEfdJChuHxA,1006
66
- fixturify/sql_d/_strategies/_psycopg2.py,sha256=aeQRpj1k1cTtOWLFnOEaQ7IcTZ-4oV_Z4p6YHsM-iyA,1105
61
+ fixturify/sql_d/_strategies/_aiomysql.py,sha256=4_E0iuRJW0cEikOi4Oh0JkskViEWI6Z6BwLezTzUnSg,2068
62
+ fixturify/sql_d/_strategies/_aiosqlite.py,sha256=NHsnpL2RR7oEnCKwZfhsroMPPOJtj0hhGydbbAypSpk,966
63
+ fixturify/sql_d/_strategies/_asyncpg.py,sha256=JdLS8ckRieEwgSWpq9omQW_Vx0gxR7_ZIsFpiwqRhG4,1228
64
+ fixturify/sql_d/_strategies/_base.py,sha256=KskvsxFpy9DlNpmteLaZ-dgAeJ6FuBjo4qJTYlKMNQk,4957
65
+ fixturify/sql_d/_strategies/_mysql.py,sha256=4MG0Pn7GWNYaakqAbFkNT3xOKKJh4DJ9JshHdbJwgts,2389
66
+ fixturify/sql_d/_strategies/_psycopg.py,sha256=rs86H2FnSnFSCMruRF5xjg7tdMsMlYY5Ew7k_J-gpho,1003
67
+ fixturify/sql_d/_strategies/_psycopg2.py,sha256=2M0fmYtLxFxJXHsgxomaEEzPVS9u0TFs6u4ldIINy08,1042
67
68
  fixturify/sql_d/_strategies/_registry.py,sha256=ddQpp9cU4SHA2OylsPpMBMarLyQKmV-4Ge4X395tg1Q,3346
68
- fixturify/sql_d/_strategies/_sqlite.py,sha256=auW6nV1bfE8UJlk4TW5fZkcqlnr_9q6Eia5bQjE2n3w,965
69
- fixturify-0.1.10.dist-info/METADATA,sha256=siIGshwAVslx6ql--TW4lmqDi6_NYGd3WxK_2Vg-wcw,2979
70
- fixturify-0.1.10.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
71
- fixturify-0.1.10.dist-info/RECORD,,
69
+ fixturify/sql_d/_strategies/_sqlite.py,sha256=ZXZwdgzcMdbrE2dUMjbBJD4m1gLec5nAM6DThs108_g,1002
70
+ fixturify-0.1.12.dist-info/METADATA,sha256=hrmOdh-BmfKlt_NfJnjqF_7donwxNu3GVj4zmnyMgTY,2979
71
+ fixturify-0.1.12.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
72
+ fixturify-0.1.12.dist-info/RECORD,,