fixturify 0.1.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. fixturify/__init__.py +21 -0
  2. fixturify/_utils/__init__.py +7 -0
  3. fixturify/_utils/_constants.py +10 -0
  4. fixturify/_utils/_fixture_discovery.py +165 -0
  5. fixturify/_utils/_path_resolver.py +135 -0
  6. fixturify/http_d/__init__.py +80 -0
  7. fixturify/http_d/_config.py +214 -0
  8. fixturify/http_d/_decorator.py +267 -0
  9. fixturify/http_d/_exceptions.py +153 -0
  10. fixturify/http_d/_fixture_discovery.py +33 -0
  11. fixturify/http_d/_matcher.py +372 -0
  12. fixturify/http_d/_mock_context.py +154 -0
  13. fixturify/http_d/_models.py +205 -0
  14. fixturify/http_d/_patcher.py +524 -0
  15. fixturify/http_d/_player.py +222 -0
  16. fixturify/http_d/_recorder.py +1350 -0
  17. fixturify/http_d/_stubs/__init__.py +8 -0
  18. fixturify/http_d/_stubs/_aiohttp.py +220 -0
  19. fixturify/http_d/_stubs/_connection.py +478 -0
  20. fixturify/http_d/_stubs/_httpcore.py +269 -0
  21. fixturify/http_d/_stubs/_tornado.py +95 -0
  22. fixturify/http_d/_utils.py +194 -0
  23. fixturify/json_assert/__init__.py +13 -0
  24. fixturify/json_assert/_actual_saver.py +67 -0
  25. fixturify/json_assert/_assert.py +173 -0
  26. fixturify/json_assert/_comparator.py +183 -0
  27. fixturify/json_assert/_diff_formatter.py +265 -0
  28. fixturify/json_assert/_normalizer.py +83 -0
  29. fixturify/object_mapper/__init__.py +5 -0
  30. fixturify/object_mapper/_deserializers/__init__.py +19 -0
  31. fixturify/object_mapper/_deserializers/_base.py +186 -0
  32. fixturify/object_mapper/_deserializers/_dataclass.py +52 -0
  33. fixturify/object_mapper/_deserializers/_plain.py +55 -0
  34. fixturify/object_mapper/_deserializers/_pydantic_v1.py +38 -0
  35. fixturify/object_mapper/_deserializers/_pydantic_v2.py +41 -0
  36. fixturify/object_mapper/_deserializers/_sqlalchemy.py +72 -0
  37. fixturify/object_mapper/_deserializers/_sqlmodel.py +43 -0
  38. fixturify/object_mapper/_detectors/__init__.py +5 -0
  39. fixturify/object_mapper/_detectors/_type_detector.py +186 -0
  40. fixturify/object_mapper/_serializers/__init__.py +19 -0
  41. fixturify/object_mapper/_serializers/_base.py +260 -0
  42. fixturify/object_mapper/_serializers/_dataclass.py +55 -0
  43. fixturify/object_mapper/_serializers/_plain.py +49 -0
  44. fixturify/object_mapper/_serializers/_pydantic_v1.py +49 -0
  45. fixturify/object_mapper/_serializers/_pydantic_v2.py +49 -0
  46. fixturify/object_mapper/_serializers/_sqlalchemy.py +70 -0
  47. fixturify/object_mapper/_serializers/_sqlmodel.py +54 -0
  48. fixturify/object_mapper/mapper.py +256 -0
  49. fixturify/read_d/__init__.py +5 -0
  50. fixturify/read_d/_decorator.py +193 -0
  51. fixturify/read_d/_fixture_loader.py +88 -0
  52. fixturify/sql_d/__init__.py +7 -0
  53. fixturify/sql_d/_config.py +30 -0
  54. fixturify/sql_d/_decorator.py +373 -0
  55. fixturify/sql_d/_driver_registry.py +133 -0
  56. fixturify/sql_d/_executor.py +82 -0
  57. fixturify/sql_d/_fixture_discovery.py +55 -0
  58. fixturify/sql_d/_phase.py +10 -0
  59. fixturify/sql_d/_strategies/__init__.py +11 -0
  60. fixturify/sql_d/_strategies/_aiomysql.py +63 -0
  61. fixturify/sql_d/_strategies/_aiosqlite.py +29 -0
  62. fixturify/sql_d/_strategies/_asyncpg.py +34 -0
  63. fixturify/sql_d/_strategies/_base.py +118 -0
  64. fixturify/sql_d/_strategies/_mysql.py +70 -0
  65. fixturify/sql_d/_strategies/_psycopg.py +35 -0
  66. fixturify/sql_d/_strategies/_psycopg2.py +40 -0
  67. fixturify/sql_d/_strategies/_registry.py +109 -0
  68. fixturify/sql_d/_strategies/_sqlite.py +33 -0
  69. fixturify-0.1.9.dist-info/METADATA +122 -0
  70. fixturify-0.1.9.dist-info/RECORD +71 -0
  71. fixturify-0.1.9.dist-info/WHEEL +4 -0
@@ -0,0 +1,269 @@
1
+ """Stubs for httpcore HTTP client.
2
+
3
+ httpcore is the low-level transport layer used by httpx.
4
+ By patching at this level, we intercept ALL httpx traffic,
5
+ including async requests in FastAPI TestClient.
6
+ """
7
+
8
+ import functools
9
+ from typing import TYPE_CHECKING, Any
10
+
11
+ from .._models import HttpRequest, HttpResponse
12
+ from .._recorder import create_request_from_httpcore, create_response_from_httpcore
13
+
14
+ if TYPE_CHECKING:
15
+ from .._mock_context import HttpMockContext
16
+
17
+
18
+ def _make_request(httpcore_request: Any) -> HttpRequest:
19
+ """Create HttpRequest from httpcore Request object."""
20
+ # httpcore Request has: method, url, headers, stream
21
+ method = httpcore_request.method
22
+ if isinstance(method, bytes):
23
+ method = method.decode("ascii")
24
+
25
+ url = bytes(httpcore_request.url).decode("ascii")
26
+
27
+ # Read body from stream
28
+ body_parts = []
29
+ for chunk in httpcore_request.stream:
30
+ body_parts.append(chunk)
31
+ body = b"".join(body_parts)
32
+
33
+ # Restore stream for potential real request
34
+ from httpcore._models import ByteStream
35
+ httpcore_request.stream = ByteStream(body)
36
+
37
+ return create_request_from_httpcore(
38
+ method=method,
39
+ url=url,
40
+ content=body,
41
+ headers=httpcore_request.headers,
42
+ )
43
+
44
+
45
+ async def _make_request_async(httpcore_request: Any) -> HttpRequest:
46
+ """Create HttpRequest from httpcore Request object (async version)."""
47
+ method = httpcore_request.method
48
+ if isinstance(method, bytes):
49
+ method = method.decode("ascii")
50
+
51
+ url = bytes(httpcore_request.url).decode("ascii")
52
+
53
+ # Read body from async stream
54
+ body_parts = []
55
+ async for chunk in httpcore_request.stream:
56
+ body_parts.append(chunk)
57
+ body = b"".join(body_parts)
58
+
59
+ # Restore stream for potential real request
60
+ from httpcore._models import ByteStream
61
+ httpcore_request.stream = ByteStream(body)
62
+
63
+ return create_request_from_httpcore(
64
+ method=method,
65
+ url=url,
66
+ content=body,
67
+ headers=httpcore_request.headers,
68
+ )
69
+
70
+
71
+ def _build_response(response_model: HttpResponse) -> Any:
72
+ """Build httpcore Response from HttpResponse model."""
73
+ from httpcore import Response
74
+ import gzip
75
+
76
+ # Get content encoding before filtering headers
77
+ content_encoding = response_model.headers.get(
78
+ "content-encoding",
79
+ response_model.headers.get("Content-Encoding", "")
80
+ ).lower()
81
+
82
+ # Build headers as list of tuples
83
+ headers = [
84
+ (k.encode("ascii"), v.encode("ascii"))
85
+ for k, v in response_model.headers.items()
86
+ # Skip headers that cause issues
87
+ if k.lower() not in ("transfer-encoding", "content-encoding")
88
+ ]
89
+
90
+ body = response_model.get_body_bytes()
91
+
92
+ # Decompress gzip body if needed (since we removed content-encoding header)
93
+ if content_encoding == "gzip" and body:
94
+ try:
95
+ body = gzip.decompress(body)
96
+ except (gzip.BadGzipFile, OSError):
97
+ pass # Not actually gzip, use as-is
98
+
99
+ return Response(
100
+ status=response_model.status,
101
+ headers=headers,
102
+ content=body,
103
+ )
104
+
105
+
106
+ def _serialize_response(httpcore_response: Any) -> HttpResponse:
107
+ """Create HttpResponse from httpcore Response object.
108
+
109
+ Handles gzip decompression before serialization to ensure
110
+ body is stored as readable text, not binary.
111
+ """
112
+ import gzip
113
+ from .._models import HttpResponse
114
+
115
+ # Get body - should be available after read()/aread()
116
+ body_bytes = httpcore_response.content
117
+
118
+ # Convert headers
119
+ headers_dict = {}
120
+ content_encoding = ""
121
+ for k, v in httpcore_response.headers:
122
+ key = k.decode() if isinstance(k, bytes) else k
123
+ val = v.decode() if isinstance(v, bytes) else v
124
+ headers_dict[key] = val
125
+ if key.lower() == "content-encoding":
126
+ content_encoding = val.lower()
127
+
128
+ # If gzip, decompress body before serialization
129
+ if content_encoding == "gzip" and body_bytes:
130
+ try:
131
+ body_bytes = gzip.decompress(body_bytes)
132
+ # Remove content-encoding header since we decompressed
133
+ headers_dict = {
134
+ k: v for k, v in headers_dict.items()
135
+ if k.lower() != "content-encoding"
136
+ }
137
+ except (gzip.BadGzipFile, OSError):
138
+ pass # Not actually gzip, use as-is
139
+
140
+ # Determine content type
141
+ content_type = headers_dict.get("content-type", headers_dict.get("Content-Type", ""))
142
+
143
+ # Try to decode as text/JSON
144
+ body_str = None
145
+ if body_bytes:
146
+ try:
147
+ body_str = body_bytes.decode("utf-8")
148
+ except UnicodeDecodeError:
149
+ # Binary content - store for file saving
150
+ resp = HttpResponse(
151
+ status=httpcore_response.status,
152
+ headers=headers_dict,
153
+ body=None,
154
+ )
155
+ resp._body_bytes = body_bytes
156
+ return resp
157
+
158
+ return HttpResponse(
159
+ status=httpcore_response.status,
160
+ headers=headers_dict,
161
+ body=body_str,
162
+ )
163
+
164
+
165
+ def _read_response_body(response: Any) -> bytes:
166
+ """Read response body using httpcore's built-in method.
167
+
168
+ This properly sets response._content so response.content works.
169
+ """
170
+ return response.read()
171
+
172
+
173
+ async def _read_response_body_async(response: Any) -> bytes:
174
+ """Read response body using httpcore's built-in method (async version).
175
+
176
+ This properly sets response._content so response.content works.
177
+ """
178
+ return await response.aread()
179
+
180
+
181
+ def make_sync_handler(mock_context: "HttpMockContext"):
182
+ """
183
+ Create patched handle_request for httpcore.ConnectionPool.
184
+
185
+ This intercepts all synchronous httpx requests.
186
+ """
187
+ import httpcore
188
+ original = httpcore.ConnectionPool.handle_request
189
+
190
+ @functools.wraps(original)
191
+ def handle_request(self, request):
192
+ # Build our request model
193
+ http_request = _make_request(request)
194
+
195
+ # Check for excluded hosts
196
+ if mock_context.is_host_excluded(http_request.url):
197
+ from .._patcher import force_reset
198
+ with force_reset():
199
+ return original(self, request)
200
+
201
+ # Playback mode - require a recorded response
202
+ if mock_context.mode == "playback":
203
+ return _build_response(mock_context.play_response(http_request))
204
+
205
+ # Record mode - use recording if available
206
+ if mock_context.can_play_response_for(http_request):
207
+ return _build_response(mock_context.play_response(http_request))
208
+
209
+ # Record mode - make real request and read body
210
+ from .._patcher import force_reset
211
+ with force_reset():
212
+ response = original(self, request)
213
+ # Read response body inside force_reset (uses same connection)
214
+ _read_response_body(response)
215
+
216
+ # Record the interaction
217
+ resp_model = _serialize_response(response)
218
+ mock_context.record(http_request, resp_model)
219
+
220
+ # Return new response with body as content (original has consumed stream)
221
+ return _build_response(resp_model)
222
+
223
+ return handle_request
224
+
225
+
226
+ def make_async_handler(mock_context: "HttpMockContext"):
227
+ """
228
+ Create patched handle_async_request for httpcore.AsyncConnectionPool.
229
+
230
+ This intercepts all asynchronous httpx requests, including those
231
+ made from FastAPI endpoints via TestClient.
232
+ """
233
+ import httpcore
234
+ original = httpcore.AsyncConnectionPool.handle_async_request
235
+
236
+ @functools.wraps(original)
237
+ async def handle_async_request(self, request):
238
+ # Build our request model
239
+ http_request = await _make_request_async(request)
240
+
241
+ # Check for excluded hosts
242
+ if mock_context.is_host_excluded(http_request.url):
243
+ from .._patcher import force_reset
244
+ with force_reset():
245
+ return await original(self, request)
246
+
247
+ # Playback mode - require a recorded response
248
+ if mock_context.mode == "playback":
249
+ return _build_response(mock_context.play_response(http_request))
250
+
251
+ # Record mode - use recording if available
252
+ if mock_context.can_play_response_for(http_request):
253
+ return _build_response(mock_context.play_response(http_request))
254
+
255
+ # Record mode - make real request and read body
256
+ from .._patcher import force_reset
257
+ with force_reset():
258
+ response = await original(self, request)
259
+ # Read response body inside force_reset (uses same connection)
260
+ await _read_response_body_async(response)
261
+
262
+ # Record the interaction
263
+ resp_model = _serialize_response(response)
264
+ mock_context.record(http_request, resp_model)
265
+
266
+ # Return new response with body as content (original has consumed stream)
267
+ return _build_response(resp_model)
268
+
269
+ return handle_async_request
@@ -0,0 +1,95 @@
1
+ """Stubs for tornado HTTP client.
2
+
3
+ Tornado has its own async HTTP client that doesn't use http.client,
4
+ so it needs its own patching strategy.
5
+ """
6
+
7
+ import functools
8
+ from typing import TYPE_CHECKING, Any
9
+
10
+ from .._models import HttpResponse
11
+ from .._recorder import create_request_from_tornado, create_response_from_tornado
12
+
13
+ if TYPE_CHECKING:
14
+ from .._mock_context import HttpMockContext
15
+
16
+
17
+ class MockTornadoResponse:
18
+ """Mock tornado HTTPResponse for playback."""
19
+
20
+ def __init__(self, request: Any, response_model: HttpResponse):
21
+ from tornado.httputil import HTTPHeaders
22
+
23
+ self.request = request
24
+ self._response_model = response_model
25
+
26
+ self.code = response_model.status
27
+ self.reason = self._get_reason(response_model.status)
28
+ self.body = response_model.get_body_bytes()
29
+ self.headers = HTTPHeaders(response_model.headers)
30
+ self.buffer = None
31
+ self.effective_url = request.url
32
+ self.error = None
33
+ self.request_time = 0.0
34
+ self.start_time = 0.0
35
+ self.time_info = {}
36
+
37
+ @staticmethod
38
+ def _get_reason(status: int) -> str:
39
+ from http import HTTPStatus
40
+ try:
41
+ return HTTPStatus(status).phrase
42
+ except ValueError:
43
+ return "Unknown"
44
+
45
+ def rethrow(self):
46
+ """Raise error if request failed."""
47
+ if self.error:
48
+ raise self.error
49
+
50
+
51
+ def make_tornado_fetch_handler(mock_context: "HttpMockContext", original_fetch_impl):
52
+ """
53
+ Create patched fetch_impl for tornado HTTP clients.
54
+
55
+ Works for both SimpleAsyncHTTPClient and CurlAsyncHTTPClient.
56
+ Note: fetch_impl is NOT a coroutine - it uses callbacks.
57
+ """
58
+
59
+ @functools.wraps(original_fetch_impl)
60
+ def fetch_impl(self, request, callback):
61
+ """Patched fetch implementation."""
62
+ # Create request model
63
+ http_request = create_request_from_tornado(request)
64
+
65
+ # Check for excluded hosts
66
+ if mock_context.is_host_excluded(http_request.url):
67
+ from .._patcher import force_reset
68
+ with force_reset():
69
+ return original_fetch_impl(self, request, callback)
70
+
71
+ # In playback mode, return recorded response
72
+ if mock_context.mode == "playback":
73
+ response_model = mock_context.play_response(http_request)
74
+ mock_response = MockTornadoResponse(request, response_model)
75
+
76
+ # Call callback with mock response via IOLoop
77
+ from tornado.ioloop import IOLoop
78
+ IOLoop.current().add_callback(callback, mock_response)
79
+ return
80
+
81
+ # Record mode - wrap the callback to record the response
82
+ def recording_callback(response):
83
+ # Create response model and record
84
+ resp_model = create_response_from_tornado(response)
85
+ mock_context.record(http_request, resp_model)
86
+
87
+ # Call original callback
88
+ if callback:
89
+ callback(response)
90
+
91
+ from .._patcher import force_reset
92
+ with force_reset():
93
+ return original_fetch_impl(self, request, recording_callback)
94
+
95
+ return fetch_impl
@@ -0,0 +1,194 @@
1
+ """Utility functions for HTTP recording and playback."""
2
+
3
+ import json
4
+ import mimetypes
5
+ from typing import Any, Optional, Tuple, Union
6
+
7
+
8
+ # Content types that should be stored as separate binary files
9
+ BINARY_CONTENT_TYPES = (
10
+ "image/",
11
+ "audio/",
12
+ "video/",
13
+ "application/octet-stream",
14
+ "application/pdf",
15
+ "application/zip",
16
+ "application/gzip",
17
+ "application/x-tar",
18
+ "application/x-rar",
19
+ "application/x-7z-compressed",
20
+ "application/vnd.ms-excel",
21
+ "application/vnd.openxmlformats",
22
+ "application/msword",
23
+ "font/",
24
+ )
25
+
26
+
27
+ def normalize_content_type(content_type: Union[str, bytes, None]) -> str:
28
+ """Normalize content type to string."""
29
+ if content_type is None:
30
+ return ""
31
+ if isinstance(content_type, bytes):
32
+ return content_type.decode("utf-8", errors="ignore")
33
+ return content_type
34
+
35
+
36
+ def is_text_content_type(content_type: Union[str, bytes]) -> bool:
37
+ """Check if content type indicates text content."""
38
+ text_types = (
39
+ "text/",
40
+ "application/json",
41
+ "application/xml",
42
+ "application/javascript",
43
+ "application/x-www-form-urlencoded",
44
+ )
45
+ content_str = normalize_content_type(content_type)
46
+ return any(t in content_str.lower() for t in text_types)
47
+
48
+
49
+ def is_json_content_type(content_type: Union[str, bytes]) -> bool:
50
+ """Check if content type indicates JSON content."""
51
+ content_str = normalize_content_type(content_type)
52
+ return "application/json" in content_str.lower()
53
+
54
+
55
+ def is_binary_content_type(content_type: Union[str, bytes]) -> bool:
56
+ """Check if content type indicates binary content that should be stored as file."""
57
+ content_str = normalize_content_type(content_type)
58
+ content_lower = content_str.lower()
59
+ return any(t in content_lower for t in BINARY_CONTENT_TYPES)
60
+
61
+
62
+ def try_parse_json(content: str) -> Any:
63
+ """Try to parse string as JSON, return parsed or original string."""
64
+ try:
65
+ return json.loads(content)
66
+ except (json.JSONDecodeError, TypeError):
67
+ return content
68
+
69
+
70
+ def get_file_extension(content_type: Union[str, bytes]) -> str:
71
+ """Get appropriate file extension for a content type."""
72
+ extension_map = {
73
+ "application/json": ".json",
74
+ "application/pdf": ".pdf",
75
+ "application/zip": ".zip",
76
+ "application/gzip": ".gz",
77
+ "application/xml": ".xml",
78
+ "text/html": ".html",
79
+ "text/plain": ".txt",
80
+ "text/css": ".css",
81
+ "text/javascript": ".js",
82
+ "image/jpeg": ".jpg",
83
+ "image/png": ".png",
84
+ "image/gif": ".gif",
85
+ "image/webp": ".webp",
86
+ "image/svg+xml": ".svg",
87
+ "audio/mpeg": ".mp3",
88
+ "audio/wav": ".wav",
89
+ "video/mp4": ".mp4",
90
+ "video/webm": ".webm",
91
+ }
92
+
93
+ content_str = normalize_content_type(content_type)
94
+ content_lower = content_str.lower().split(";")[0].strip()
95
+
96
+ if content_lower in extension_map:
97
+ return extension_map[content_lower]
98
+
99
+ ext = mimetypes.guess_extension(content_lower)
100
+ if ext:
101
+ return ext
102
+
103
+ return ".bin"
104
+
105
+
106
+ def serialize_body(
107
+ body: Optional[Union[bytes, str]],
108
+ content_type: Union[str, bytes] = "",
109
+ ) -> Tuple[Any, Optional[str], bool]:
110
+ """
111
+ Serialize body for storage.
112
+
113
+ For JSON content: returns parsed JSON object (not stringified)
114
+ For text content: returns string
115
+ For binary content: returns raw bytes and flag to save as file
116
+
117
+ Args:
118
+ body: Request or response body (bytes, string, or BytesIO)
119
+ content_type: Content-Type header value (str or bytes)
120
+
121
+ Returns:
122
+ Tuple of (body_value, encoding, is_binary_file)
123
+ - For text/JSON: (value, None, False)
124
+ - For binary: (bytes, None, True)
125
+ """
126
+ if body is None:
127
+ return None, None, False
128
+
129
+ # Handle BytesIO and other file-like objects
130
+ if hasattr(body, "read"):
131
+ file_obj = body
132
+ body = file_obj.read()
133
+ if hasattr(file_obj, "seek"):
134
+ file_obj.seek(0)
135
+
136
+ # Normalize content_type to string
137
+ content_type = normalize_content_type(content_type)
138
+
139
+ # Check if this should be stored as a binary file
140
+ if is_binary_content_type(content_type):
141
+ if isinstance(body, bytes):
142
+ return body, None, True
143
+ elif isinstance(body, str):
144
+ return body.encode("utf-8"), None, True
145
+ else:
146
+ return str(body).encode("utf-8"), None, True
147
+
148
+ # Convert bytes to string if needed
149
+ if isinstance(body, bytes):
150
+ if is_text_content_type(content_type) or not content_type:
151
+ try:
152
+ body_str = body.decode("utf-8")
153
+ except UnicodeDecodeError:
154
+ return body, None, True
155
+ else:
156
+ try:
157
+ body_str = body.decode("utf-8")
158
+ except UnicodeDecodeError:
159
+ return body, None, True
160
+ else:
161
+ body_str = body
162
+
163
+ # Empty body
164
+ if not body_str:
165
+ return None, None, False
166
+
167
+ # For JSON content, parse and store as native JSON
168
+ if is_json_content_type(content_type):
169
+ return try_parse_json(body_str), None, False
170
+
171
+ # Return as string for non-JSON text content
172
+ return body_str, None, False
173
+
174
+
175
+ def normalize_headers(headers: Any) -> dict:
176
+ """
177
+ Normalize headers to a dict with string keys and values.
178
+
179
+ Handles:
180
+ - Dict with bytes keys/values
181
+ - List of tuples
182
+ - Dict-like objects with items() method
183
+ """
184
+ result = {}
185
+ if headers is None:
186
+ return result
187
+
188
+ items = headers.items() if hasattr(headers, "items") else headers
189
+ for k, v in items:
190
+ key = k.decode("utf-8") if isinstance(k, bytes) else str(k)
191
+ val = v.decode("utf-8") if isinstance(v, bytes) else str(v)
192
+ result[key] = val
193
+
194
+ return result
@@ -0,0 +1,13 @@
1
+ """JsonAssert module for JSON comparison assertions in testing."""
2
+
3
+ # Check for deepdiff dependency at import time
4
+ try:
5
+ import deepdiff # noqa: F401
6
+ except ImportError:
7
+ raise ImportError(
8
+ "JsonAssert requires 'deepdiff'. Install it with: pip install pytools[json-assert]"
9
+ )
10
+
11
+ from fixturify.json_assert._assert import JsonAssert
12
+
13
+ __all__ = ["JsonAssert"]
@@ -0,0 +1,67 @@
1
+ """Actual saver for saving actual JSON data on assertion failure."""
2
+
3
+ import json
4
+ from pathlib import Path
5
+ from typing import Tuple, Union
6
+
7
+ from fixturify._utils._constants import ENCODING
8
+
9
+
10
+ class _ActualSaver:
11
+ """Saves actual JSON data to ACTUAL folder on assertion failure."""
12
+
13
+ ACTUAL_FOLDER_NAME = "ACTUAL"
14
+
15
+ @staticmethod
16
+ def save(
17
+ actual_data: Union[dict, list], expected_file_path: Path
18
+ ) -> Tuple[Path, bool]:
19
+ """
20
+ Save actual JSON data to ACTUAL folder, mirroring the expected file path.
21
+
22
+ Given expected file: /path/to/fixtures/expected_user.json
23
+ Creates actual file: /path/to/fixtures/ACTUAL/expected_user.json
24
+
25
+ Args:
26
+ actual_data: The normalized actual data (dict or list)
27
+ expected_file_path: Absolute path to the expected JSON file
28
+
29
+ Returns:
30
+ Tuple of (path to saved file, success flag).
31
+ The path is returned even if save fails so it can be included
32
+ in error messages.
33
+
34
+ Note:
35
+ - Creates ACTUAL folder if it doesn't exist
36
+ - Overwrites existing actual file if present
37
+ - Saves JSON with indent=2 for readability
38
+ """
39
+ actual_path = _ActualSaver._get_actual_path(expected_file_path)
40
+
41
+ try:
42
+ # Create ACTUAL directory if it doesn't exist
43
+ actual_path.parent.mkdir(parents=True, exist_ok=True)
44
+
45
+ # Write actual data as JSON
46
+ with open(actual_path, "w", encoding=ENCODING) as f:
47
+ json.dump(actual_data, f, indent=2, ensure_ascii=False)
48
+
49
+ return actual_path, True
50
+
51
+ except Exception:
52
+ # If save fails (permissions, disk full, etc.), return failure flag
53
+ # so the error message can indicate the actual data could not be saved
54
+ return actual_path, False
55
+
56
+ @staticmethod
57
+ def _get_actual_path(expected_file_path: Path) -> Path:
58
+ """
59
+ Calculate the ACTUAL folder path based on expected file path.
60
+
61
+ Example:
62
+ Input: /project/tests/fixtures/expected_user.json
63
+ Output: /project/tests/fixtures/ACTUAL/expected_user.json
64
+ """
65
+ expected_dir = expected_file_path.parent
66
+ actual_dir = expected_dir / _ActualSaver.ACTUAL_FOLDER_NAME
67
+ return actual_dir / expected_file_path.name