fixturify 0.1.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fixturify/__init__.py +21 -0
- fixturify/_utils/__init__.py +7 -0
- fixturify/_utils/_constants.py +10 -0
- fixturify/_utils/_fixture_discovery.py +165 -0
- fixturify/_utils/_path_resolver.py +135 -0
- fixturify/http_d/__init__.py +80 -0
- fixturify/http_d/_config.py +214 -0
- fixturify/http_d/_decorator.py +267 -0
- fixturify/http_d/_exceptions.py +153 -0
- fixturify/http_d/_fixture_discovery.py +33 -0
- fixturify/http_d/_matcher.py +372 -0
- fixturify/http_d/_mock_context.py +154 -0
- fixturify/http_d/_models.py +205 -0
- fixturify/http_d/_patcher.py +524 -0
- fixturify/http_d/_player.py +222 -0
- fixturify/http_d/_recorder.py +1350 -0
- fixturify/http_d/_stubs/__init__.py +8 -0
- fixturify/http_d/_stubs/_aiohttp.py +220 -0
- fixturify/http_d/_stubs/_connection.py +478 -0
- fixturify/http_d/_stubs/_httpcore.py +269 -0
- fixturify/http_d/_stubs/_tornado.py +95 -0
- fixturify/http_d/_utils.py +194 -0
- fixturify/json_assert/__init__.py +13 -0
- fixturify/json_assert/_actual_saver.py +67 -0
- fixturify/json_assert/_assert.py +173 -0
- fixturify/json_assert/_comparator.py +183 -0
- fixturify/json_assert/_diff_formatter.py +265 -0
- fixturify/json_assert/_normalizer.py +83 -0
- fixturify/object_mapper/__init__.py +5 -0
- fixturify/object_mapper/_deserializers/__init__.py +19 -0
- fixturify/object_mapper/_deserializers/_base.py +186 -0
- fixturify/object_mapper/_deserializers/_dataclass.py +52 -0
- fixturify/object_mapper/_deserializers/_plain.py +55 -0
- fixturify/object_mapper/_deserializers/_pydantic_v1.py +38 -0
- fixturify/object_mapper/_deserializers/_pydantic_v2.py +41 -0
- fixturify/object_mapper/_deserializers/_sqlalchemy.py +72 -0
- fixturify/object_mapper/_deserializers/_sqlmodel.py +43 -0
- fixturify/object_mapper/_detectors/__init__.py +5 -0
- fixturify/object_mapper/_detectors/_type_detector.py +186 -0
- fixturify/object_mapper/_serializers/__init__.py +19 -0
- fixturify/object_mapper/_serializers/_base.py +260 -0
- fixturify/object_mapper/_serializers/_dataclass.py +55 -0
- fixturify/object_mapper/_serializers/_plain.py +49 -0
- fixturify/object_mapper/_serializers/_pydantic_v1.py +49 -0
- fixturify/object_mapper/_serializers/_pydantic_v2.py +49 -0
- fixturify/object_mapper/_serializers/_sqlalchemy.py +70 -0
- fixturify/object_mapper/_serializers/_sqlmodel.py +54 -0
- fixturify/object_mapper/mapper.py +256 -0
- fixturify/read_d/__init__.py +5 -0
- fixturify/read_d/_decorator.py +193 -0
- fixturify/read_d/_fixture_loader.py +88 -0
- fixturify/sql_d/__init__.py +7 -0
- fixturify/sql_d/_config.py +30 -0
- fixturify/sql_d/_decorator.py +373 -0
- fixturify/sql_d/_driver_registry.py +133 -0
- fixturify/sql_d/_executor.py +82 -0
- fixturify/sql_d/_fixture_discovery.py +55 -0
- fixturify/sql_d/_phase.py +10 -0
- fixturify/sql_d/_strategies/__init__.py +11 -0
- fixturify/sql_d/_strategies/_aiomysql.py +63 -0
- fixturify/sql_d/_strategies/_aiosqlite.py +29 -0
- fixturify/sql_d/_strategies/_asyncpg.py +34 -0
- fixturify/sql_d/_strategies/_base.py +118 -0
- fixturify/sql_d/_strategies/_mysql.py +70 -0
- fixturify/sql_d/_strategies/_psycopg.py +35 -0
- fixturify/sql_d/_strategies/_psycopg2.py +40 -0
- fixturify/sql_d/_strategies/_registry.py +109 -0
- fixturify/sql_d/_strategies/_sqlite.py +33 -0
- fixturify-0.1.9.dist-info/METADATA +122 -0
- fixturify-0.1.9.dist-info/RECORD +71 -0
- fixturify-0.1.9.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,186 @@
|
|
|
1
|
+
"""Type detection for determining object types."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from dataclasses import is_dataclass
|
|
5
|
+
from enum import Enum, auto
|
|
6
|
+
from typing import Any, Type
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class ObjectType(Enum):
|
|
10
|
+
"""Enumeration of supported object types."""
|
|
11
|
+
|
|
12
|
+
JSON_STRING = auto()
|
|
13
|
+
DICT = auto()
|
|
14
|
+
COLLECTION = auto()
|
|
15
|
+
SQLMODEL = auto()
|
|
16
|
+
SQLALCHEMY = auto()
|
|
17
|
+
PYDANTIC_V2 = auto()
|
|
18
|
+
PYDANTIC_V1 = auto()
|
|
19
|
+
DATACLASS = auto()
|
|
20
|
+
PLAIN_OBJECT = auto()
|
|
21
|
+
PRIMITIVE = auto()
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class _TypeDetector:
|
|
25
|
+
"""Detects the type of input data or class."""
|
|
26
|
+
|
|
27
|
+
# Primitive types that should be serialized directly
|
|
28
|
+
PRIMITIVE_TYPES = (str, int, float, bool, type(None))
|
|
29
|
+
|
|
30
|
+
@staticmethod
|
|
31
|
+
def detect(data: Any) -> ObjectType:
|
|
32
|
+
"""
|
|
33
|
+
Detect the type of input data.
|
|
34
|
+
|
|
35
|
+
Detection order is critical for correct handling of hybrid types like SQLModel.
|
|
36
|
+
|
|
37
|
+
Args:
|
|
38
|
+
data: Any input data
|
|
39
|
+
|
|
40
|
+
Returns:
|
|
41
|
+
ObjectType enum value
|
|
42
|
+
"""
|
|
43
|
+
# 1. Check for primitive types first
|
|
44
|
+
if isinstance(data, _TypeDetector.PRIMITIVE_TYPES):
|
|
45
|
+
if isinstance(data, str):
|
|
46
|
+
# Check if it's a valid JSON string
|
|
47
|
+
if _TypeDetector._is_json_string(data):
|
|
48
|
+
return ObjectType.JSON_STRING
|
|
49
|
+
return ObjectType.PRIMITIVE
|
|
50
|
+
|
|
51
|
+
# 2. Check for dict
|
|
52
|
+
if isinstance(data, dict):
|
|
53
|
+
return ObjectType.DICT
|
|
54
|
+
|
|
55
|
+
# 3. Check for collections (list, tuple, set)
|
|
56
|
+
if isinstance(data, (list, tuple, set, frozenset)):
|
|
57
|
+
return ObjectType.COLLECTION
|
|
58
|
+
|
|
59
|
+
# 4. Check for SQLModel (must be before SQLAlchemy and Pydantic v2)
|
|
60
|
+
# SQLModel has both __tablename__ (from SQLAlchemy) and model_fields (from Pydantic v2)
|
|
61
|
+
if _TypeDetector._is_sqlmodel(data):
|
|
62
|
+
return ObjectType.SQLMODEL
|
|
63
|
+
|
|
64
|
+
# 5. Check for SQLAlchemy
|
|
65
|
+
if _TypeDetector._is_sqlalchemy(data):
|
|
66
|
+
return ObjectType.SQLALCHEMY
|
|
67
|
+
|
|
68
|
+
# 6. Check for Pydantic v2
|
|
69
|
+
if _TypeDetector._is_pydantic_v2(data):
|
|
70
|
+
return ObjectType.PYDANTIC_V2
|
|
71
|
+
|
|
72
|
+
# 7. Check for Pydantic v1
|
|
73
|
+
if _TypeDetector._is_pydantic_v1(data):
|
|
74
|
+
return ObjectType.PYDANTIC_V1
|
|
75
|
+
|
|
76
|
+
# 8. Check for dataclass
|
|
77
|
+
if is_dataclass(data) and not isinstance(data, type):
|
|
78
|
+
return ObjectType.DATACLASS
|
|
79
|
+
|
|
80
|
+
# 9. Fallback to plain object
|
|
81
|
+
return ObjectType.PLAIN_OBJECT
|
|
82
|
+
|
|
83
|
+
@staticmethod
|
|
84
|
+
def detect_class(cls: Type) -> ObjectType:
|
|
85
|
+
"""
|
|
86
|
+
Detect the type from a class definition.
|
|
87
|
+
|
|
88
|
+
Args:
|
|
89
|
+
cls: A class type
|
|
90
|
+
|
|
91
|
+
Returns:
|
|
92
|
+
ObjectType enum value
|
|
93
|
+
"""
|
|
94
|
+
# Check for SQLModel class
|
|
95
|
+
if _TypeDetector._is_sqlmodel_class(cls):
|
|
96
|
+
return ObjectType.SQLMODEL
|
|
97
|
+
|
|
98
|
+
# Check for SQLAlchemy class
|
|
99
|
+
if _TypeDetector._is_sqlalchemy_class(cls):
|
|
100
|
+
return ObjectType.SQLALCHEMY
|
|
101
|
+
|
|
102
|
+
# Check for Pydantic v2 class
|
|
103
|
+
if _TypeDetector._is_pydantic_v2_class(cls):
|
|
104
|
+
return ObjectType.PYDANTIC_V2
|
|
105
|
+
|
|
106
|
+
# Check for Pydantic v1 class
|
|
107
|
+
if _TypeDetector._is_pydantic_v1_class(cls):
|
|
108
|
+
return ObjectType.PYDANTIC_V1
|
|
109
|
+
|
|
110
|
+
# Check for dataclass
|
|
111
|
+
if is_dataclass(cls):
|
|
112
|
+
return ObjectType.DATACLASS
|
|
113
|
+
|
|
114
|
+
# Check for Enum
|
|
115
|
+
if isinstance(cls, type) and issubclass(cls, Enum):
|
|
116
|
+
return ObjectType.PRIMITIVE
|
|
117
|
+
|
|
118
|
+
# Fallback to plain object
|
|
119
|
+
return ObjectType.PLAIN_OBJECT
|
|
120
|
+
|
|
121
|
+
@staticmethod
|
|
122
|
+
def _is_json_string(data: str) -> bool:
|
|
123
|
+
"""Check if a string is valid JSON.
|
|
124
|
+
|
|
125
|
+
This detects any valid JSON including primitives like "null", "true",
|
|
126
|
+
"123" and strings like '"active"'. The ObjectMapper.to_object() method
|
|
127
|
+
handles the parsed result appropriately based on target class.
|
|
128
|
+
"""
|
|
129
|
+
try:
|
|
130
|
+
json.loads(data)
|
|
131
|
+
return True
|
|
132
|
+
except (json.JSONDecodeError, ValueError):
|
|
133
|
+
return False
|
|
134
|
+
|
|
135
|
+
@staticmethod
|
|
136
|
+
def _is_sqlmodel(obj: Any) -> bool:
|
|
137
|
+
"""Check if object is a SQLModel instance."""
|
|
138
|
+
# SQLModel has both __tablename__ and model_fields
|
|
139
|
+
obj_type = type(obj)
|
|
140
|
+
return hasattr(obj_type, "__tablename__") and hasattr(obj_type, "model_fields")
|
|
141
|
+
|
|
142
|
+
@staticmethod
|
|
143
|
+
def _is_sqlmodel_class(cls: Type) -> bool:
|
|
144
|
+
"""Check if class is a SQLModel class."""
|
|
145
|
+
return hasattr(cls, "__tablename__") and hasattr(cls, "model_fields")
|
|
146
|
+
|
|
147
|
+
@staticmethod
|
|
148
|
+
def _is_sqlalchemy(obj: Any) -> bool:
|
|
149
|
+
"""Check if object is a SQLAlchemy model instance."""
|
|
150
|
+
obj_type = type(obj)
|
|
151
|
+
return hasattr(obj_type, "__tablename__") and hasattr(obj_type, "__mapper__")
|
|
152
|
+
|
|
153
|
+
@staticmethod
|
|
154
|
+
def _is_sqlalchemy_class(cls: Type) -> bool:
|
|
155
|
+
"""Check if class is a SQLAlchemy model class."""
|
|
156
|
+
return hasattr(cls, "__tablename__") and hasattr(cls, "__mapper__")
|
|
157
|
+
|
|
158
|
+
@staticmethod
|
|
159
|
+
def _is_pydantic_v2(obj: Any) -> bool:
|
|
160
|
+
"""Check if object is a Pydantic v2 model instance."""
|
|
161
|
+
obj_type = type(obj)
|
|
162
|
+
return hasattr(obj_type, "model_fields")
|
|
163
|
+
|
|
164
|
+
@staticmethod
|
|
165
|
+
def _is_pydantic_v2_class(cls: Type) -> bool:
|
|
166
|
+
"""Check if class is a Pydantic v2 model class."""
|
|
167
|
+
return hasattr(cls, "model_fields")
|
|
168
|
+
|
|
169
|
+
@staticmethod
|
|
170
|
+
def _is_pydantic_v1(obj: Any) -> bool:
|
|
171
|
+
"""Check if object is a Pydantic v1 model instance."""
|
|
172
|
+
obj_type = type(obj)
|
|
173
|
+
return (
|
|
174
|
+
hasattr(obj_type, "__fields__")
|
|
175
|
+
and hasattr(obj_type, "schema")
|
|
176
|
+
and callable(getattr(obj_type, "schema", None))
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
@staticmethod
|
|
180
|
+
def _is_pydantic_v1_class(cls: Type) -> bool:
|
|
181
|
+
"""Check if class is a Pydantic v1 model class."""
|
|
182
|
+
return (
|
|
183
|
+
hasattr(cls, "__fields__")
|
|
184
|
+
and hasattr(cls, "schema")
|
|
185
|
+
and callable(getattr(cls, "schema", None))
|
|
186
|
+
)
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"""Serializer implementations for different object types."""
|
|
2
|
+
|
|
3
|
+
from fixturify.object_mapper._serializers._base import _BaseSerializer
|
|
4
|
+
from fixturify.object_mapper._serializers._dataclass import _DataclassSerializer
|
|
5
|
+
from fixturify.object_mapper._serializers._plain import _PlainObjectSerializer
|
|
6
|
+
from fixturify.object_mapper._serializers._pydantic_v1 import _PydanticV1Serializer
|
|
7
|
+
from fixturify.object_mapper._serializers._pydantic_v2 import _PydanticV2Serializer
|
|
8
|
+
from fixturify.object_mapper._serializers._sqlalchemy import _SQLAlchemySerializer
|
|
9
|
+
from fixturify.object_mapper._serializers._sqlmodel import _SQLModelSerializer
|
|
10
|
+
|
|
11
|
+
__all__ = [
|
|
12
|
+
"_BaseSerializer",
|
|
13
|
+
"_DataclassSerializer",
|
|
14
|
+
"_PlainObjectSerializer",
|
|
15
|
+
"_PydanticV1Serializer",
|
|
16
|
+
"_PydanticV2Serializer",
|
|
17
|
+
"_SQLAlchemySerializer",
|
|
18
|
+
"_SQLModelSerializer",
|
|
19
|
+
]
|
|
@@ -0,0 +1,260 @@
|
|
|
1
|
+
"""Base serializer class for all serializers."""
|
|
2
|
+
|
|
3
|
+
from abc import ABC, abstractmethod
|
|
4
|
+
from datetime import datetime, date, time
|
|
5
|
+
from enum import Enum
|
|
6
|
+
from typing import Any, Dict, TYPE_CHECKING
|
|
7
|
+
from uuid import UUID
|
|
8
|
+
|
|
9
|
+
from fixturify._utils._constants import MAX_DEPTH
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
pass
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class _BaseSerializer(ABC):
|
|
16
|
+
"""Abstract base class for all serializers."""
|
|
17
|
+
|
|
18
|
+
# Reference path delimiter for $ref
|
|
19
|
+
REF_DELIMITER = "/"
|
|
20
|
+
|
|
21
|
+
# Class-level serializer registry (set by ObjectMapper)
|
|
22
|
+
_serializer_registry: Dict[Any, type] = {}
|
|
23
|
+
|
|
24
|
+
def __init__(self):
|
|
25
|
+
"""Initialize the serializer with tracking state."""
|
|
26
|
+
self._visited: Dict[int, str] = {} # id -> json path
|
|
27
|
+
self._current_depth: int = 0
|
|
28
|
+
self._max_depth: int = MAX_DEPTH
|
|
29
|
+
|
|
30
|
+
def reset(self):
|
|
31
|
+
"""Reset the serializer state for a new serialization."""
|
|
32
|
+
self._visited.clear()
|
|
33
|
+
self._current_depth = 0
|
|
34
|
+
|
|
35
|
+
@classmethod
|
|
36
|
+
def set_serializer_registry(cls, registry: Dict[Any, type]) -> None:
|
|
37
|
+
"""Set the serializer registry for nested object routing."""
|
|
38
|
+
cls._serializer_registry = registry
|
|
39
|
+
|
|
40
|
+
@abstractmethod
|
|
41
|
+
def serialize(self, obj: Any) -> dict:
|
|
42
|
+
"""
|
|
43
|
+
Convert an object to a dictionary.
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
obj: The object to serialize
|
|
47
|
+
|
|
48
|
+
Returns:
|
|
49
|
+
Dictionary representation of the object
|
|
50
|
+
"""
|
|
51
|
+
pass
|
|
52
|
+
|
|
53
|
+
def _serialize_with_path(self, obj: Any, path: str = "#") -> Any:
|
|
54
|
+
"""
|
|
55
|
+
Serialize an object while tracking the JSON path for circular reference detection.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
obj: The object to serialize
|
|
59
|
+
path: Current JSON path (default: root "#")
|
|
60
|
+
|
|
61
|
+
Returns:
|
|
62
|
+
Serialized value (could be dict, list, primitive, or $ref)
|
|
63
|
+
"""
|
|
64
|
+
return self._serialize_value(obj, path)
|
|
65
|
+
|
|
66
|
+
def _serialize_value(self, value: Any, path: str) -> Any:
|
|
67
|
+
"""
|
|
68
|
+
Serialize a single value, handling nested objects and collections.
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
value: The value to serialize
|
|
72
|
+
path: Current JSON path
|
|
73
|
+
|
|
74
|
+
Returns:
|
|
75
|
+
Serialized value
|
|
76
|
+
"""
|
|
77
|
+
# Increment depth for complex types
|
|
78
|
+
self._current_depth += 1
|
|
79
|
+
|
|
80
|
+
if self._current_depth > self._max_depth:
|
|
81
|
+
raise ValueError(
|
|
82
|
+
f"Maximum serialization depth ({self._max_depth}) exceeded at path: {path}"
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
try:
|
|
86
|
+
# Handle None
|
|
87
|
+
if value is None:
|
|
88
|
+
return None
|
|
89
|
+
|
|
90
|
+
# Handle primitives
|
|
91
|
+
if isinstance(value, (str, int, float, bool)):
|
|
92
|
+
return value
|
|
93
|
+
|
|
94
|
+
# Handle special types
|
|
95
|
+
if isinstance(value, datetime):
|
|
96
|
+
return value.isoformat()
|
|
97
|
+
|
|
98
|
+
if isinstance(value, date):
|
|
99
|
+
return value.isoformat()
|
|
100
|
+
|
|
101
|
+
if isinstance(value, time):
|
|
102
|
+
return value.isoformat()
|
|
103
|
+
|
|
104
|
+
if isinstance(value, UUID):
|
|
105
|
+
return str(value)
|
|
106
|
+
|
|
107
|
+
if isinstance(value, Enum):
|
|
108
|
+
return value.value
|
|
109
|
+
|
|
110
|
+
# Handle bytes
|
|
111
|
+
if isinstance(value, bytes):
|
|
112
|
+
return value.decode("utf-8", errors="replace")
|
|
113
|
+
|
|
114
|
+
# Handle dict
|
|
115
|
+
if isinstance(value, dict):
|
|
116
|
+
return self._serialize_dict(value, path)
|
|
117
|
+
|
|
118
|
+
# Handle collections (list, tuple, set, frozenset)
|
|
119
|
+
if isinstance(value, (list, tuple, set, frozenset)):
|
|
120
|
+
return self._serialize_collection(value, path)
|
|
121
|
+
|
|
122
|
+
# Handle complex objects - check for circular reference
|
|
123
|
+
obj_id = id(value)
|
|
124
|
+
if obj_id in self._visited:
|
|
125
|
+
# Return a JSON reference
|
|
126
|
+
return {"$ref": self._visited[obj_id]}
|
|
127
|
+
|
|
128
|
+
# Mark as visited with current path
|
|
129
|
+
self._visited[obj_id] = path
|
|
130
|
+
|
|
131
|
+
# Route to appropriate serializer based on object type
|
|
132
|
+
return self._serialize_nested_object(value, path)
|
|
133
|
+
finally:
|
|
134
|
+
self._current_depth -= 1
|
|
135
|
+
|
|
136
|
+
def _serialize_nested_object(self, value: Any, path: str) -> dict:
|
|
137
|
+
"""
|
|
138
|
+
Serialize a nested object using the appropriate type-specific serializer.
|
|
139
|
+
|
|
140
|
+
This method detects the nested object's type and routes to the correct
|
|
141
|
+
serializer, ensuring mixed-type object graphs are serialized correctly.
|
|
142
|
+
|
|
143
|
+
Args:
|
|
144
|
+
value: The nested object to serialize
|
|
145
|
+
path: Current JSON path
|
|
146
|
+
|
|
147
|
+
Returns:
|
|
148
|
+
Dictionary representation of the nested object
|
|
149
|
+
"""
|
|
150
|
+
# Import here to avoid circular imports
|
|
151
|
+
from .._detectors import _TypeDetector
|
|
152
|
+
|
|
153
|
+
# Detect the nested object's type
|
|
154
|
+
obj_type = _TypeDetector.detect(value)
|
|
155
|
+
|
|
156
|
+
# If we have a registry and this type has a specific serializer, use it
|
|
157
|
+
if self._serializer_registry and obj_type in self._serializer_registry:
|
|
158
|
+
serializer_class = self._serializer_registry[obj_type]
|
|
159
|
+
# Create a new serializer but share state for circular reference detection
|
|
160
|
+
nested_serializer = serializer_class()
|
|
161
|
+
nested_serializer._visited = self._visited
|
|
162
|
+
nested_serializer._current_depth = self._current_depth
|
|
163
|
+
nested_serializer._max_depth = self._max_depth
|
|
164
|
+
return nested_serializer._serialize_object(value, path)
|
|
165
|
+
|
|
166
|
+
# Fallback to current serializer's _serialize_object
|
|
167
|
+
return self._serialize_object(value, path)
|
|
168
|
+
|
|
169
|
+
def _serialize_dict(self, d: dict, path: str) -> dict:
|
|
170
|
+
"""Serialize a dictionary."""
|
|
171
|
+
result = {}
|
|
172
|
+
for key, value in d.items():
|
|
173
|
+
item_path = f"{path}/{key}"
|
|
174
|
+
result[key] = self._serialize_value(value, item_path)
|
|
175
|
+
return result
|
|
176
|
+
|
|
177
|
+
def _serialize_collection(self, collection: Any, path: str) -> list:
|
|
178
|
+
"""Serialize a collection (list, tuple, set, frozenset).
|
|
179
|
+
|
|
180
|
+
For sets and frozensets, the output is sorted by serialized representation
|
|
181
|
+
to ensure deterministic output for stable tests and diffs.
|
|
182
|
+
"""
|
|
183
|
+
result = []
|
|
184
|
+
for i, item in enumerate(collection):
|
|
185
|
+
item_path = f"{path}/{i}"
|
|
186
|
+
result.append(self._serialize_value(item, item_path))
|
|
187
|
+
|
|
188
|
+
# Sort sets and frozensets for deterministic output
|
|
189
|
+
if isinstance(collection, (set, frozenset)):
|
|
190
|
+
try:
|
|
191
|
+
# Try to sort by the serialized values
|
|
192
|
+
# For primitives, this sorts them directly
|
|
193
|
+
# For complex objects, this sorts by their string representation
|
|
194
|
+
result = sorted(result, key=lambda x: (type(x).__name__, str(x)))
|
|
195
|
+
except TypeError:
|
|
196
|
+
# If items are not comparable at all, leave unsorted
|
|
197
|
+
pass
|
|
198
|
+
|
|
199
|
+
return result
|
|
200
|
+
|
|
201
|
+
def _serialize_object(self, obj: Any, path: str) -> dict:
|
|
202
|
+
"""
|
|
203
|
+
Serialize a complex object. Override in subclasses for specific handling.
|
|
204
|
+
|
|
205
|
+
Args:
|
|
206
|
+
obj: The object to serialize
|
|
207
|
+
path: Current JSON path
|
|
208
|
+
|
|
209
|
+
Returns:
|
|
210
|
+
Dictionary representation
|
|
211
|
+
"""
|
|
212
|
+
# Default implementation - get object attributes
|
|
213
|
+
result = {}
|
|
214
|
+
for attr_name in self._get_serializable_attributes(obj):
|
|
215
|
+
value = getattr(obj, attr_name, None)
|
|
216
|
+
item_path = f"{path}/{attr_name}"
|
|
217
|
+
result[attr_name] = self._serialize_value(value, item_path)
|
|
218
|
+
return result
|
|
219
|
+
|
|
220
|
+
def _get_serializable_attributes(self, obj: Any) -> list:
|
|
221
|
+
"""
|
|
222
|
+
Get list of attribute names that should be serialized.
|
|
223
|
+
|
|
224
|
+
Rules:
|
|
225
|
+
- Single underscore prefix (_attr): ARE serialized
|
|
226
|
+
- Double underscore dunder (__attr__): ARE NOT serialized
|
|
227
|
+
|
|
228
|
+
Args:
|
|
229
|
+
obj: The object to inspect
|
|
230
|
+
|
|
231
|
+
Returns:
|
|
232
|
+
List of attribute names to serialize
|
|
233
|
+
"""
|
|
234
|
+
attributes = []
|
|
235
|
+
for attr_name in dir(obj):
|
|
236
|
+
# Skip dunder attributes
|
|
237
|
+
if attr_name.startswith("__") and attr_name.endswith("__"):
|
|
238
|
+
continue
|
|
239
|
+
|
|
240
|
+
# Skip methods and callables
|
|
241
|
+
attr_value = getattr(obj, attr_name, None)
|
|
242
|
+
if callable(attr_value) and not isinstance(attr_value, property):
|
|
243
|
+
continue
|
|
244
|
+
|
|
245
|
+
# Skip class-level attributes that are not instance-specific
|
|
246
|
+
if not hasattr(obj, "__dict__") or attr_name not in obj.__dict__:
|
|
247
|
+
# Also check __slots__ if present
|
|
248
|
+
if hasattr(type(obj), "__slots__"):
|
|
249
|
+
if attr_name not in type(obj).__slots__:
|
|
250
|
+
continue
|
|
251
|
+
else:
|
|
252
|
+
continue
|
|
253
|
+
|
|
254
|
+
attributes.append(attr_name)
|
|
255
|
+
|
|
256
|
+
return attributes
|
|
257
|
+
|
|
258
|
+
def _is_dunder(self, name: str) -> bool:
|
|
259
|
+
"""Check if a name is a dunder (double underscore) attribute."""
|
|
260
|
+
return name.startswith("__") and name.endswith("__")
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
"""Dataclass serializer."""
|
|
2
|
+
|
|
3
|
+
from dataclasses import fields, is_dataclass
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
from fixturify.object_mapper._serializers._base import _BaseSerializer
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class _DataclassSerializer(_BaseSerializer):
|
|
10
|
+
"""Serializer for dataclass objects."""
|
|
11
|
+
|
|
12
|
+
def serialize(self, obj: Any) -> dict:
|
|
13
|
+
"""
|
|
14
|
+
Serialize a dataclass object to a dictionary.
|
|
15
|
+
|
|
16
|
+
Args:
|
|
17
|
+
obj: A dataclass instance
|
|
18
|
+
|
|
19
|
+
Returns:
|
|
20
|
+
Dictionary representation of the dataclass
|
|
21
|
+
|
|
22
|
+
Raises:
|
|
23
|
+
TypeError: If obj is not a dataclass instance
|
|
24
|
+
"""
|
|
25
|
+
if not is_dataclass(obj) or isinstance(obj, type):
|
|
26
|
+
raise TypeError(f"Expected a dataclass instance, got {type(obj)}")
|
|
27
|
+
|
|
28
|
+
self.reset()
|
|
29
|
+
return self._serialize_with_path(obj, "#")
|
|
30
|
+
|
|
31
|
+
def _serialize_object(self, obj: Any, path: str) -> dict:
|
|
32
|
+
"""Serialize a dataclass object."""
|
|
33
|
+
# Check if this is a dataclass
|
|
34
|
+
if is_dataclass(obj) and not isinstance(obj, type):
|
|
35
|
+
return self._serialize_dataclass(obj, path)
|
|
36
|
+
|
|
37
|
+
# Fall back to base implementation for nested non-dataclass objects
|
|
38
|
+
return super()._serialize_object(obj, path)
|
|
39
|
+
|
|
40
|
+
def _serialize_dataclass(self, obj: Any, path: str) -> dict:
|
|
41
|
+
"""Serialize a dataclass using its fields."""
|
|
42
|
+
result = {}
|
|
43
|
+
|
|
44
|
+
for field in fields(obj):
|
|
45
|
+
field_name = field.name
|
|
46
|
+
|
|
47
|
+
# Skip dunder fields (though unlikely in dataclasses)
|
|
48
|
+
if self._is_dunder(field_name):
|
|
49
|
+
continue
|
|
50
|
+
|
|
51
|
+
value = getattr(obj, field_name)
|
|
52
|
+
item_path = f"{path}/{field_name}"
|
|
53
|
+
result[field_name] = self._serialize_value(value, item_path)
|
|
54
|
+
|
|
55
|
+
return result
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
"""Plain Python object serializer."""
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from fixturify.object_mapper._serializers._base import _BaseSerializer
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class _PlainObjectSerializer(_BaseSerializer):
|
|
9
|
+
"""Serializer for plain Python objects."""
|
|
10
|
+
|
|
11
|
+
def serialize(self, obj: Any) -> dict:
|
|
12
|
+
"""
|
|
13
|
+
Serialize a plain Python object to a dictionary.
|
|
14
|
+
|
|
15
|
+
Args:
|
|
16
|
+
obj: A plain Python object with __dict__ or __slots__
|
|
17
|
+
|
|
18
|
+
Returns:
|
|
19
|
+
Dictionary representation of the object
|
|
20
|
+
"""
|
|
21
|
+
self.reset()
|
|
22
|
+
return self._serialize_with_path(obj, "#")
|
|
23
|
+
|
|
24
|
+
def _serialize_object(self, obj: Any, path: str) -> dict:
|
|
25
|
+
"""Serialize a plain Python object."""
|
|
26
|
+
result = {}
|
|
27
|
+
|
|
28
|
+
# Try to get attributes from __dict__
|
|
29
|
+
if hasattr(obj, "__dict__"):
|
|
30
|
+
for attr_name, value in obj.__dict__.items():
|
|
31
|
+
# Skip dunder attributes
|
|
32
|
+
if self._is_dunder(attr_name):
|
|
33
|
+
continue
|
|
34
|
+
item_path = f"{path}/{attr_name}"
|
|
35
|
+
result[attr_name] = self._serialize_value(value, item_path)
|
|
36
|
+
|
|
37
|
+
# Also check __slots__ if present
|
|
38
|
+
if hasattr(type(obj), "__slots__"):
|
|
39
|
+
for slot_name in type(obj).__slots__:
|
|
40
|
+
if self._is_dunder(slot_name):
|
|
41
|
+
continue
|
|
42
|
+
if slot_name in result:
|
|
43
|
+
continue # Already added from __dict__
|
|
44
|
+
if hasattr(obj, slot_name):
|
|
45
|
+
value = getattr(obj, slot_name)
|
|
46
|
+
item_path = f"{path}/{slot_name}"
|
|
47
|
+
result[slot_name] = self._serialize_value(value, item_path)
|
|
48
|
+
|
|
49
|
+
return result
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
"""Pydantic v1 model serializer."""
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from fixturify.object_mapper._serializers._base import _BaseSerializer
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class _PydanticV1Serializer(_BaseSerializer):
|
|
9
|
+
"""Serializer for Pydantic v1 models."""
|
|
10
|
+
|
|
11
|
+
def serialize(self, obj: Any) -> dict:
|
|
12
|
+
"""
|
|
13
|
+
Serialize a Pydantic v1 model to a dictionary.
|
|
14
|
+
|
|
15
|
+
Args:
|
|
16
|
+
obj: A Pydantic v1 model instance
|
|
17
|
+
|
|
18
|
+
Returns:
|
|
19
|
+
Dictionary representation of the model
|
|
20
|
+
"""
|
|
21
|
+
self.reset()
|
|
22
|
+
return self._serialize_with_path(obj, "#")
|
|
23
|
+
|
|
24
|
+
def _serialize_object(self, obj: Any, path: str) -> dict:
|
|
25
|
+
"""Serialize a Pydantic v1 model."""
|
|
26
|
+
# Check if this is a Pydantic v1 model
|
|
27
|
+
if hasattr(type(obj), "__fields__") and hasattr(type(obj), "schema"):
|
|
28
|
+
return self._serialize_pydantic_v1(obj, path)
|
|
29
|
+
|
|
30
|
+
# Fall back to base implementation for nested non-Pydantic objects
|
|
31
|
+
return super()._serialize_object(obj, path)
|
|
32
|
+
|
|
33
|
+
def _serialize_pydantic_v1(self, obj: Any, path: str) -> dict:
|
|
34
|
+
"""Serialize a Pydantic v1 model using its fields."""
|
|
35
|
+
result = {}
|
|
36
|
+
|
|
37
|
+
# Get fields from __fields__
|
|
38
|
+
model_fields = getattr(type(obj), "__fields__", {})
|
|
39
|
+
|
|
40
|
+
for field_name in model_fields:
|
|
41
|
+
# Skip dunder fields
|
|
42
|
+
if self._is_dunder(field_name):
|
|
43
|
+
continue
|
|
44
|
+
|
|
45
|
+
value = getattr(obj, field_name)
|
|
46
|
+
item_path = f"{path}/{field_name}"
|
|
47
|
+
result[field_name] = self._serialize_value(value, item_path)
|
|
48
|
+
|
|
49
|
+
return result
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
"""Pydantic v2 model serializer."""
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from fixturify.object_mapper._serializers._base import _BaseSerializer
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class _PydanticV2Serializer(_BaseSerializer):
|
|
9
|
+
"""Serializer for Pydantic v2 models."""
|
|
10
|
+
|
|
11
|
+
def serialize(self, obj: Any) -> dict:
|
|
12
|
+
"""
|
|
13
|
+
Serialize a Pydantic v2 model to a dictionary.
|
|
14
|
+
|
|
15
|
+
Args:
|
|
16
|
+
obj: A Pydantic v2 model instance
|
|
17
|
+
|
|
18
|
+
Returns:
|
|
19
|
+
Dictionary representation of the model
|
|
20
|
+
"""
|
|
21
|
+
self.reset()
|
|
22
|
+
return self._serialize_with_path(obj, "#")
|
|
23
|
+
|
|
24
|
+
def _serialize_object(self, obj: Any, path: str) -> dict:
|
|
25
|
+
"""Serialize a Pydantic v2 model."""
|
|
26
|
+
# Check if this is a Pydantic v2 model
|
|
27
|
+
if hasattr(type(obj), "model_fields"):
|
|
28
|
+
return self._serialize_pydantic_v2(obj, path)
|
|
29
|
+
|
|
30
|
+
# Fall back to base implementation for nested non-Pydantic objects
|
|
31
|
+
return super()._serialize_object(obj, path)
|
|
32
|
+
|
|
33
|
+
def _serialize_pydantic_v2(self, obj: Any, path: str) -> dict:
|
|
34
|
+
"""Serialize a Pydantic v2 model using its model_fields."""
|
|
35
|
+
result = {}
|
|
36
|
+
|
|
37
|
+
# Get fields from model_fields
|
|
38
|
+
model_fields = getattr(type(obj), "model_fields", {})
|
|
39
|
+
|
|
40
|
+
for field_name in model_fields:
|
|
41
|
+
# Skip dunder fields
|
|
42
|
+
if self._is_dunder(field_name):
|
|
43
|
+
continue
|
|
44
|
+
|
|
45
|
+
value = getattr(obj, field_name)
|
|
46
|
+
item_path = f"{path}/{field_name}"
|
|
47
|
+
result[field_name] = self._serialize_value(value, item_path)
|
|
48
|
+
|
|
49
|
+
return result
|