compressedfhir 1.0.2__py3-none-any.whl → 1.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of compressedfhir might be problematic. Click here for more details.
- compressedfhir/fhir/fhir_bundle.py +4 -3
- compressedfhir/fhir/fhir_bundle_entry.py +1 -1
- compressedfhir/fhir/fhir_bundle_entry_response.py +0 -1
- compressedfhir/fhir/fhir_resource.py +37 -27
- compressedfhir/fhir/fhir_resource_list.py +1 -1
- compressedfhir/utilities/compressed_dict/v1/compressed_dict.py +54 -16
- compressedfhir/utilities/compressed_dict/v1/compressed_dict_storage_mode.py +7 -7
- compressedfhir/utilities/fhir_json_encoder.py +1 -1
- compressedfhir/utilities/json_serializers/__init__.py +0 -0
- compressedfhir/utilities/json_serializers/test/__init__.py +0 -0
- compressedfhir/utilities/json_serializers/test/test_type_preservation_decoder.py +82 -0
- compressedfhir/utilities/json_serializers/test/test_type_preservation_encoder.py +59 -0
- compressedfhir/utilities/json_serializers/test/test_type_preservation_serializer.py +60 -0
- compressedfhir/utilities/json_serializers/type_preservation_decoder.py +63 -0
- compressedfhir/utilities/json_serializers/type_preservation_encoder.py +50 -0
- compressedfhir/utilities/json_serializers/type_preservation_serializer.py +55 -0
- compressedfhir/utilities/ordered_dict_to_dict_converter/__init__.py +0 -0
- compressedfhir/utilities/ordered_dict_to_dict_converter/ordered_dict_to_dict_converter.py +24 -0
- compressedfhir/utilities/string_compressor/__init__.py +0 -0
- compressedfhir/utilities/string_compressor/v1/__init__.py +0 -0
- compressedfhir/utilities/string_compressor/v1/string_compressor.py +99 -0
- compressedfhir/utilities/string_compressor/v1/test/__init__.py +0 -0
- compressedfhir/utilities/string_compressor/v1/test/test_string_compressor.py +189 -0
- {compressedfhir-1.0.2.dist-info → compressedfhir-1.0.3.dist-info}/METADATA +1 -1
- {compressedfhir-1.0.2.dist-info → compressedfhir-1.0.3.dist-info}/RECORD +28 -13
- {compressedfhir-1.0.2.dist-info → compressedfhir-1.0.3.dist-info}/WHEEL +0 -0
- {compressedfhir-1.0.2.dist-info → compressedfhir-1.0.3.dist-info}/licenses/LICENSE +0 -0
- {compressedfhir-1.0.2.dist-info → compressedfhir-1.0.3.dist-info}/top_level.txt +0 -0
|
@@ -13,6 +13,9 @@ from compressedfhir.utilities.compressed_dict.v1.compressed_dict_storage_mode im
|
|
|
13
13
|
)
|
|
14
14
|
from compressedfhir.utilities.fhir_json_encoder import FhirJSONEncoder
|
|
15
15
|
from compressedfhir.utilities.json_helpers import FhirClientJsonHelpers
|
|
16
|
+
from compressedfhir.utilities.ordered_dict_to_dict_converter.ordered_dict_to_dict_converter import (
|
|
17
|
+
OrderedDictToDictConverter,
|
|
18
|
+
)
|
|
16
19
|
|
|
17
20
|
|
|
18
21
|
class FhirBundle:
|
|
@@ -286,6 +289,4 @@ class FhirBundle:
|
|
|
286
289
|
|
|
287
290
|
:return: Plain dictionary representation of the Bundle
|
|
288
291
|
"""
|
|
289
|
-
return
|
|
290
|
-
Dict[str, Any], json.loads(json.dumps(self.dict(), cls=FhirJSONEncoder))
|
|
291
|
-
)
|
|
292
|
+
return OrderedDictToDictConverter.convert(self.dict())
|
|
@@ -65,17 +65,6 @@ class FhirResource(CompressedDict[str, Any]):
|
|
|
65
65
|
"""Convert the resource to a JSON string."""
|
|
66
66
|
return json.dumps(obj=self.dict(), cls=FhirJSONEncoder)
|
|
67
67
|
|
|
68
|
-
@classmethod
|
|
69
|
-
def from_json(cls, json_str: str) -> "FhirResource":
|
|
70
|
-
"""
|
|
71
|
-
Create a FhirResource object from a JSON string.
|
|
72
|
-
|
|
73
|
-
:param json_str: The JSON string to convert.
|
|
74
|
-
:return: A FhirResource object.
|
|
75
|
-
"""
|
|
76
|
-
data = json.loads(json_str)
|
|
77
|
-
return cls.from_dict(data)
|
|
78
|
-
|
|
79
68
|
def __deepcopy__(self, memo: Dict[int, Any]) -> "FhirResource":
|
|
80
69
|
"""Create a copy of the resource."""
|
|
81
70
|
return FhirResource(
|
|
@@ -112,22 +101,6 @@ class FhirResource(CompressedDict[str, Any]):
|
|
|
112
101
|
|
|
113
102
|
return result
|
|
114
103
|
|
|
115
|
-
@classmethod
|
|
116
|
-
def from_dict(
|
|
117
|
-
cls,
|
|
118
|
-
d: Dict[str, Any],
|
|
119
|
-
*,
|
|
120
|
-
storage_mode: CompressedDictStorageMode = CompressedDictStorageMode.default(),
|
|
121
|
-
) -> "FhirResource":
|
|
122
|
-
"""
|
|
123
|
-
Creates a FhirResource object from a dictionary.
|
|
124
|
-
|
|
125
|
-
:param d: The dictionary to convert.
|
|
126
|
-
:param storage_mode: The storage mode for the CompressedDict.
|
|
127
|
-
:return: A FhirResource object.
|
|
128
|
-
"""
|
|
129
|
-
return cls(initial_dict=d, storage_mode=storage_mode)
|
|
130
|
-
|
|
131
104
|
def remove_nulls(self) -> None:
|
|
132
105
|
"""
|
|
133
106
|
Removes None values from the resource dictionary.
|
|
@@ -161,3 +134,40 @@ class FhirResource(CompressedDict[str, Any]):
|
|
|
161
134
|
else:
|
|
162
135
|
assert isinstance(value, FhirMeta)
|
|
163
136
|
self["meta"] = value.dict()
|
|
137
|
+
|
|
138
|
+
@classmethod
|
|
139
|
+
@override
|
|
140
|
+
def from_json(cls, json_str: str) -> "FhirResource":
|
|
141
|
+
"""
|
|
142
|
+
Creates a FhirResource object from a JSON string.
|
|
143
|
+
|
|
144
|
+
:param json_str: JSON string representing the resource.
|
|
145
|
+
:return: A FhirResource object.
|
|
146
|
+
"""
|
|
147
|
+
return cast(FhirResource, super().from_json(json_str=json_str))
|
|
148
|
+
|
|
149
|
+
@classmethod
|
|
150
|
+
@override
|
|
151
|
+
def from_dict(
|
|
152
|
+
cls,
|
|
153
|
+
d: Dict[str, Any],
|
|
154
|
+
*,
|
|
155
|
+
storage_mode: CompressedDictStorageMode = CompressedDictStorageMode.default(),
|
|
156
|
+
properties_to_cache: List[str] | None = None,
|
|
157
|
+
) -> "FhirResource":
|
|
158
|
+
"""
|
|
159
|
+
Creates a FhirResource object from a dictionary.
|
|
160
|
+
|
|
161
|
+
:param d: Dictionary representing the resource.
|
|
162
|
+
:param storage_mode: Storage mode for the CompressedDict.
|
|
163
|
+
:param properties_to_cache: List of properties to cache.
|
|
164
|
+
:return: A FhirResource object.
|
|
165
|
+
"""
|
|
166
|
+
return cast(
|
|
167
|
+
FhirResource,
|
|
168
|
+
super().from_dict(
|
|
169
|
+
d=d,
|
|
170
|
+
storage_mode=storage_mode,
|
|
171
|
+
properties_to_cache=properties_to_cache,
|
|
172
|
+
),
|
|
173
|
+
)
|
|
@@ -103,7 +103,7 @@ class FhirResourceList(BaseResourceList[FhirResource]):
|
|
|
103
103
|
if len(self) == 0:
|
|
104
104
|
return resources_by_type
|
|
105
105
|
|
|
106
|
-
|
|
106
|
+
resource: FhirResource
|
|
107
107
|
for resource in [r for r in self if r is not None]:
|
|
108
108
|
resource_type: str = resource.resource_type or "unknown"
|
|
109
109
|
if resource_type not in resources_by_type:
|
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import copy
|
|
2
|
-
import json
|
|
3
2
|
from collections.abc import KeysView, ValuesView, ItemsView, MutableMapping
|
|
4
3
|
from contextlib import contextmanager
|
|
5
4
|
from typing import Dict, Optional, Iterator, cast, List, Any, overload, OrderedDict
|
|
@@ -14,7 +13,12 @@ from compressedfhir.utilities.compressed_dict.v1.compressed_dict_storage_mode im
|
|
|
14
13
|
CompressedDictStorageMode,
|
|
15
14
|
CompressedDictStorageType,
|
|
16
15
|
)
|
|
17
|
-
from compressedfhir.utilities.
|
|
16
|
+
from compressedfhir.utilities.json_serializers.type_preservation_serializer import (
|
|
17
|
+
TypePreservationSerializer,
|
|
18
|
+
)
|
|
19
|
+
from compressedfhir.utilities.ordered_dict_to_dict_converter.ordered_dict_to_dict_converter import (
|
|
20
|
+
OrderedDictToDictConverter,
|
|
21
|
+
)
|
|
18
22
|
|
|
19
23
|
|
|
20
24
|
class CompressedDict[K, V](MutableMapping[K, V]):
|
|
@@ -146,7 +150,7 @@ class CompressedDict[K, V](MutableMapping[K, V]):
|
|
|
146
150
|
# For serialized modes, deserialize
|
|
147
151
|
working_dict = (
|
|
148
152
|
self._deserialize_dict(
|
|
149
|
-
|
|
153
|
+
serialized_dict_bytes=self._serialized_dict,
|
|
150
154
|
storage_type=self._storage_mode.storage_type,
|
|
151
155
|
)
|
|
152
156
|
if self._serialized_dict
|
|
@@ -172,9 +176,9 @@ class CompressedDict[K, V](MutableMapping[K, V]):
|
|
|
172
176
|
assert isinstance(dictionary, OrderedDict)
|
|
173
177
|
if storage_type == "compressed":
|
|
174
178
|
# Serialize to JSON and compress with zlib
|
|
175
|
-
json_str =
|
|
176
|
-
dictionary, separators=(",", ":")
|
|
177
|
-
)
|
|
179
|
+
json_str = TypePreservationSerializer.serialize(
|
|
180
|
+
dictionary, separators=(",", ":")
|
|
181
|
+
)
|
|
178
182
|
return zlib.compress(
|
|
179
183
|
json_str.encode("utf-8"), level=zlib.Z_BEST_COMPRESSION
|
|
180
184
|
)
|
|
@@ -195,34 +199,37 @@ class CompressedDict[K, V](MutableMapping[K, V]):
|
|
|
195
199
|
@staticmethod
|
|
196
200
|
def _deserialize_dict(
|
|
197
201
|
*,
|
|
198
|
-
|
|
202
|
+
serialized_dict_bytes: bytes,
|
|
199
203
|
storage_type: CompressedDictStorageType,
|
|
200
204
|
) -> OrderedDict[K, V]:
|
|
201
205
|
"""
|
|
202
206
|
Deserialize entire dictionary from MessagePack
|
|
203
207
|
|
|
204
208
|
Args:
|
|
205
|
-
|
|
209
|
+
serialized_dict_bytes: Serialized dictionary bytes
|
|
206
210
|
|
|
207
211
|
Returns:
|
|
208
212
|
Deserialized dictionary
|
|
209
213
|
"""
|
|
210
|
-
assert
|
|
214
|
+
assert serialized_dict_bytes is not None, "Serialized dictionary cannot be None"
|
|
215
|
+
assert isinstance(serialized_dict_bytes, bytes)
|
|
211
216
|
|
|
212
217
|
if storage_type == "compressed":
|
|
213
218
|
# Decompress and parse JSON
|
|
214
|
-
|
|
215
|
-
decoded_text =
|
|
219
|
+
decompressed_bytes: bytes = zlib.decompress(serialized_dict_bytes)
|
|
220
|
+
decoded_text: str = decompressed_bytes.decode("utf-8")
|
|
216
221
|
# noinspection PyTypeChecker
|
|
217
|
-
decompressed_dict =
|
|
222
|
+
decompressed_dict = TypePreservationSerializer.deserialize(
|
|
223
|
+
decoded_text, object_pairs_hook=OrderedDict
|
|
224
|
+
)
|
|
218
225
|
assert isinstance(decompressed_dict, OrderedDict)
|
|
219
226
|
return cast(OrderedDict[K, V], decompressed_dict)
|
|
220
227
|
|
|
221
228
|
# Decompress if needed
|
|
222
229
|
to_unpack = (
|
|
223
|
-
zlib.decompress(
|
|
230
|
+
zlib.decompress(serialized_dict_bytes)
|
|
224
231
|
if storage_type == "compressed_msgpack"
|
|
225
|
-
else
|
|
232
|
+
else serialized_dict_bytes
|
|
226
233
|
)
|
|
227
234
|
|
|
228
235
|
# Deserialize
|
|
@@ -630,6 +637,37 @@ class CompressedDict[K, V](MutableMapping[K, V]):
|
|
|
630
637
|
Returns:
|
|
631
638
|
Plain dictionary
|
|
632
639
|
"""
|
|
633
|
-
return
|
|
634
|
-
|
|
640
|
+
return OrderedDictToDictConverter.convert(self.dict())
|
|
641
|
+
|
|
642
|
+
@classmethod
|
|
643
|
+
def from_json(cls, json_str: str) -> "CompressedDict[K, V]":
|
|
644
|
+
"""
|
|
645
|
+
Create a FhirResource object from a JSON string.
|
|
646
|
+
|
|
647
|
+
:param json_str: The JSON string to convert.
|
|
648
|
+
:return: A FhirResource object.
|
|
649
|
+
"""
|
|
650
|
+
data = TypePreservationSerializer.deserialize(json_str)
|
|
651
|
+
return cls.from_dict(data)
|
|
652
|
+
|
|
653
|
+
@classmethod
|
|
654
|
+
def from_dict(
|
|
655
|
+
cls,
|
|
656
|
+
d: Dict[K, V],
|
|
657
|
+
*,
|
|
658
|
+
storage_mode: CompressedDictStorageMode = CompressedDictStorageMode.default(),
|
|
659
|
+
properties_to_cache: List[K] | None = None,
|
|
660
|
+
) -> "CompressedDict[K, V]":
|
|
661
|
+
"""
|
|
662
|
+
Creates a FhirResource object from a dictionary.
|
|
663
|
+
|
|
664
|
+
:param d: The dictionary to convert.
|
|
665
|
+
:param storage_mode: The storage mode for the CompressedDict.
|
|
666
|
+
:param properties_to_cache: Optional list of properties to cache
|
|
667
|
+
:return: A FhirResource object.
|
|
668
|
+
"""
|
|
669
|
+
return cls(
|
|
670
|
+
initial_dict=d,
|
|
671
|
+
storage_mode=storage_mode,
|
|
672
|
+
properties_to_cache=properties_to_cache,
|
|
635
673
|
)
|
|
@@ -4,13 +4,13 @@ from typing import Literal, TypeAlias
|
|
|
4
4
|
CompressedDictStorageType: TypeAlias = Literal[
|
|
5
5
|
"raw", "compressed", "msgpack", "compressed_msgpack"
|
|
6
6
|
]
|
|
7
|
-
|
|
8
|
-
CompressedDictStorageType is a type alias for the different storage types
|
|
9
|
-
raw: No compression
|
|
10
|
-
compressed: Compressed using zlib
|
|
11
|
-
msgpack: Compressed using msgpack
|
|
12
|
-
compressed_msgpack: Compressed using msgpack with zlib
|
|
13
|
-
|
|
7
|
+
###
|
|
8
|
+
# CompressedDictStorageType is a type alias for the different storage types
|
|
9
|
+
# raw: No compression
|
|
10
|
+
# compressed: Compressed using zlib
|
|
11
|
+
# msgpack: Compressed using msgpack
|
|
12
|
+
# compressed_msgpack: Compressed using msgpack with zlib
|
|
13
|
+
###
|
|
14
14
|
|
|
15
15
|
|
|
16
16
|
@dataclasses.dataclass(slots=True)
|
|
@@ -18,7 +18,7 @@ class FhirJSONEncoder(json.JSONEncoder):
|
|
|
18
18
|
def default(self, o: Any) -> Any:
|
|
19
19
|
# Existing type handlers
|
|
20
20
|
if dataclasses.is_dataclass(o):
|
|
21
|
-
return dataclasses.asdict(o) # type:ignore
|
|
21
|
+
return dataclasses.asdict(o) # type:ignore[arg-type]
|
|
22
22
|
|
|
23
23
|
if isinstance(o, Enum):
|
|
24
24
|
return o.value
|
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
from datetime import datetime, date
|
|
2
|
+
from decimal import Decimal
|
|
3
|
+
from typing import Type, Any, Dict
|
|
4
|
+
|
|
5
|
+
import pytest
|
|
6
|
+
|
|
7
|
+
from compressedfhir.utilities.json_serializers.type_preservation_decoder import (
|
|
8
|
+
TypePreservationDecoder,
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class TestCustomObject:
|
|
13
|
+
def __init__(self, name: str, value: int):
|
|
14
|
+
self.name: str = name
|
|
15
|
+
self.value: int = value
|
|
16
|
+
|
|
17
|
+
def __eq__(self, other: Any) -> bool:
|
|
18
|
+
if not isinstance(other, TestCustomObject):
|
|
19
|
+
return False
|
|
20
|
+
return self.name == other.name and self.value == other.value
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@pytest.mark.parametrize(
|
|
24
|
+
"input_type, input_dict, expected_type",
|
|
25
|
+
[
|
|
26
|
+
(
|
|
27
|
+
"datetime",
|
|
28
|
+
{"__type__": "datetime", "iso": "2023-01-01T00:00:00+00:00"},
|
|
29
|
+
datetime,
|
|
30
|
+
),
|
|
31
|
+
("date", {"__type__": "date", "iso": "2023-01-01"}, date),
|
|
32
|
+
("decimal", {"__type__": "decimal", "value": "3.14"}, Decimal),
|
|
33
|
+
("complex", {"__type__": "complex", "real": 3, "imag": 4}, complex),
|
|
34
|
+
("bytes", {"__type__": "bytes", "value": "test"}, bytes),
|
|
35
|
+
("set", {"__type__": "set", "values": [1, 2, 3]}, set),
|
|
36
|
+
],
|
|
37
|
+
)
|
|
38
|
+
def test_complex_type_decoding(
|
|
39
|
+
input_type: str, input_dict: Dict[str, Any], expected_type: Type[Any]
|
|
40
|
+
) -> None:
|
|
41
|
+
"""
|
|
42
|
+
Test decoding of various complex types
|
|
43
|
+
"""
|
|
44
|
+
decoded = TypePreservationDecoder.decode(input_dict)
|
|
45
|
+
|
|
46
|
+
assert isinstance(decoded, expected_type)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def test_custom_object_decoding() -> None:
|
|
50
|
+
"""
|
|
51
|
+
Test decoding of custom objects
|
|
52
|
+
"""
|
|
53
|
+
custom_obj_dict = {
|
|
54
|
+
"__type__": "TestCustomObject",
|
|
55
|
+
"__module__": __name__,
|
|
56
|
+
"attributes": {"name": "test", "value": 42},
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
decoded = TypePreservationDecoder.decode(custom_obj_dict)
|
|
60
|
+
|
|
61
|
+
assert isinstance(decoded, TestCustomObject)
|
|
62
|
+
assert decoded.name == "test"
|
|
63
|
+
assert decoded.value == 42
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def test_custom_decoder() -> None:
|
|
67
|
+
"""
|
|
68
|
+
Test custom decoder functionality
|
|
69
|
+
"""
|
|
70
|
+
|
|
71
|
+
def custom_decoder(data: Dict[str, Any]) -> Any:
|
|
72
|
+
if data.get("__type__") == "special_type":
|
|
73
|
+
return f"Decoded: {data['value']}"
|
|
74
|
+
return data
|
|
75
|
+
|
|
76
|
+
special_dict = {"__type__": "special_type", "value": "test"}
|
|
77
|
+
|
|
78
|
+
decoded = TypePreservationDecoder.decode(
|
|
79
|
+
special_dict, custom_decoders={"special_type": custom_decoder}
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
assert decoded == "Decoded: test"
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
from datetime import datetime, timezone, date
|
|
2
|
+
from decimal import Decimal
|
|
3
|
+
from typing import Type, Any
|
|
4
|
+
|
|
5
|
+
import pytest
|
|
6
|
+
|
|
7
|
+
from compressedfhir.utilities.json_serializers.type_preservation_encoder import (
|
|
8
|
+
TypePreservationEncoder,
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class TestCustomObject:
|
|
13
|
+
def __init__(self, name: str, value: int):
|
|
14
|
+
self.name: str = name
|
|
15
|
+
self.value: int = value
|
|
16
|
+
|
|
17
|
+
def __eq__(self, other: Any) -> bool:
|
|
18
|
+
if not isinstance(other, TestCustomObject):
|
|
19
|
+
return False
|
|
20
|
+
return self.name == other.name and self.value == other.value
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@pytest.mark.parametrize(
|
|
24
|
+
"input_type, input_value, expected_type",
|
|
25
|
+
[
|
|
26
|
+
(datetime, datetime(2023, 1, 1, tzinfo=timezone.utc), "datetime"),
|
|
27
|
+
(date, date(2023, 1, 1), "date"),
|
|
28
|
+
(Decimal, Decimal("3.14"), "decimal"),
|
|
29
|
+
(complex, 3 + 4j, "complex"),
|
|
30
|
+
(bytes, b"test", "bytes"),
|
|
31
|
+
(set, {1, 2, 3}, "set"),
|
|
32
|
+
],
|
|
33
|
+
)
|
|
34
|
+
def test_complex_type_serialization(
|
|
35
|
+
input_type: Type[Any], input_value: Any, expected_type: str
|
|
36
|
+
) -> None:
|
|
37
|
+
"""
|
|
38
|
+
Test serialization of various complex types
|
|
39
|
+
"""
|
|
40
|
+
encoder = TypePreservationEncoder()
|
|
41
|
+
serialized = encoder.default(input_value)
|
|
42
|
+
|
|
43
|
+
assert isinstance(serialized, dict)
|
|
44
|
+
assert serialized.get("__type__") == expected_type
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
# noinspection PyMethodMayBeStatic
|
|
48
|
+
def test_custom_object_serialization() -> None:
|
|
49
|
+
"""
|
|
50
|
+
Test serialization of custom objects
|
|
51
|
+
"""
|
|
52
|
+
custom_obj = TestCustomObject("test", 42)
|
|
53
|
+
encoder = TypePreservationEncoder()
|
|
54
|
+
serialized = encoder.default(custom_obj)
|
|
55
|
+
|
|
56
|
+
assert isinstance(serialized, dict)
|
|
57
|
+
assert serialized.get("__type__") == "TestCustomObject"
|
|
58
|
+
assert serialized.get("__module__") == __name__
|
|
59
|
+
assert "attributes" in serialized
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
from datetime import datetime, timezone, date
|
|
2
|
+
from decimal import Decimal
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from compressedfhir.utilities.json_serializers.type_preservation_serializer import (
|
|
6
|
+
TypePreservationSerializer,
|
|
7
|
+
)
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class TestCustomObject:
|
|
11
|
+
def __init__(self, name: str, value: int):
|
|
12
|
+
self.name: str = name
|
|
13
|
+
self.value: int = value
|
|
14
|
+
|
|
15
|
+
def __eq__(self, other: Any) -> bool:
|
|
16
|
+
if not isinstance(other, TestCustomObject):
|
|
17
|
+
return False
|
|
18
|
+
return self.name == other.name and self.value == other.value
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def test_complex_data_serialization() -> None:
|
|
22
|
+
"""
|
|
23
|
+
Test serialization and deserialization of complex data
|
|
24
|
+
"""
|
|
25
|
+
complex_data = {
|
|
26
|
+
"timestamp": datetime.now(timezone.utc),
|
|
27
|
+
"today": date.today(),
|
|
28
|
+
"precise_value": Decimal("3.14159"),
|
|
29
|
+
"complex_number": 3 + 4j,
|
|
30
|
+
"byte_data": b"Hello",
|
|
31
|
+
"unique_items": {1, 2, 3},
|
|
32
|
+
"custom_obj": TestCustomObject("test", 42),
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
# Serialize
|
|
36
|
+
serialized = TypePreservationSerializer.serialize(complex_data)
|
|
37
|
+
|
|
38
|
+
# Deserialize
|
|
39
|
+
deserialized = TypePreservationSerializer.deserialize(serialized)
|
|
40
|
+
|
|
41
|
+
# Verify types
|
|
42
|
+
assert isinstance(deserialized["timestamp"], datetime)
|
|
43
|
+
assert isinstance(deserialized["today"], date)
|
|
44
|
+
assert isinstance(deserialized["precise_value"], Decimal)
|
|
45
|
+
assert isinstance(deserialized["complex_number"], complex)
|
|
46
|
+
assert isinstance(deserialized["byte_data"], bytes)
|
|
47
|
+
assert isinstance(deserialized["unique_items"], set)
|
|
48
|
+
assert isinstance(deserialized["custom_obj"], TestCustomObject)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def test_nested_complex_data() -> None:
|
|
52
|
+
"""
|
|
53
|
+
Test serialization of nested complex data
|
|
54
|
+
"""
|
|
55
|
+
nested_data = {"level1": {"level2": {"timestamp": datetime.now(timezone.utc)}}}
|
|
56
|
+
|
|
57
|
+
serialized = TypePreservationSerializer.serialize(nested_data)
|
|
58
|
+
deserialized = TypePreservationSerializer.deserialize(serialized)
|
|
59
|
+
|
|
60
|
+
assert isinstance(deserialized["level1"]["level2"]["timestamp"], datetime)
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
from datetime import datetime, date
|
|
2
|
+
from decimal import Decimal
|
|
3
|
+
from typing import Any, Dict, Callable
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class TypePreservationDecoder:
|
|
7
|
+
"""
|
|
8
|
+
Advanced JSON decoder for complex type reconstruction
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
@classmethod
|
|
12
|
+
def decode(
|
|
13
|
+
cls,
|
|
14
|
+
dct: Dict[str, Any],
|
|
15
|
+
custom_decoders: Dict[str, Callable[[Any], Any]] | None = None,
|
|
16
|
+
) -> Any:
|
|
17
|
+
"""
|
|
18
|
+
Decode complex types
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
dct: Dictionary to decode
|
|
22
|
+
custom_decoders: Optional additional custom decoders
|
|
23
|
+
|
|
24
|
+
Returns:
|
|
25
|
+
Reconstructed object or original dictionary
|
|
26
|
+
"""
|
|
27
|
+
# Default decoders for built-in types
|
|
28
|
+
default_decoders: Dict[str, Callable[[Any], Any]] = {
|
|
29
|
+
"datetime": lambda d: datetime.fromisoformat(d["iso"]),
|
|
30
|
+
"date": lambda d: date.fromisoformat(d["iso"]),
|
|
31
|
+
"decimal": lambda d: Decimal(d["value"]),
|
|
32
|
+
"complex": lambda d: complex(d["real"], d["imag"]),
|
|
33
|
+
"bytes": lambda d: d["value"].encode("latin-1"),
|
|
34
|
+
"set": lambda d: set(d["values"]),
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
# Merge custom decoders with default decoders
|
|
38
|
+
decoders = {**default_decoders, **(custom_decoders or {})}
|
|
39
|
+
|
|
40
|
+
# Check for type marker
|
|
41
|
+
if "__type__" in dct:
|
|
42
|
+
type_name = dct["__type__"]
|
|
43
|
+
|
|
44
|
+
# Handle built-in type decoders
|
|
45
|
+
if type_name in decoders:
|
|
46
|
+
return decoders[type_name](dct)
|
|
47
|
+
|
|
48
|
+
# Handle custom object reconstruction
|
|
49
|
+
if "__module__" in dct and "attributes" in dct:
|
|
50
|
+
try:
|
|
51
|
+
# Dynamically import the class
|
|
52
|
+
module = __import__(dct["__module__"], fromlist=[type_name])
|
|
53
|
+
cls_ = getattr(module, type_name)
|
|
54
|
+
|
|
55
|
+
# Create instance and set attributes
|
|
56
|
+
obj = cls_.__new__(cls_)
|
|
57
|
+
obj.__dict__.update(dct["attributes"])
|
|
58
|
+
return obj
|
|
59
|
+
except (ImportError, AttributeError) as e:
|
|
60
|
+
print(f"Could not reconstruct {type_name}: {e}")
|
|
61
|
+
return dct
|
|
62
|
+
|
|
63
|
+
return dct
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from collections.abc import Callable
|
|
3
|
+
from datetime import datetime, date
|
|
4
|
+
from decimal import Decimal
|
|
5
|
+
from typing import Any, Dict, Type
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class TypePreservationEncoder(json.JSONEncoder):
|
|
9
|
+
"""
|
|
10
|
+
Advanced JSON encoder for complex type serialization
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
TYPE_MAP: Dict[Type[Any], Callable[[Any], Any]] = {
|
|
14
|
+
datetime: lambda dt: {
|
|
15
|
+
"__type__": "datetime",
|
|
16
|
+
"iso": dt.isoformat(),
|
|
17
|
+
"tzinfo": str(dt.tzinfo) if dt.tzinfo else None,
|
|
18
|
+
},
|
|
19
|
+
date: lambda d: {"__type__": "date", "iso": d.isoformat()},
|
|
20
|
+
Decimal: lambda d: {"__type__": "decimal", "value": str(d)},
|
|
21
|
+
complex: lambda c: {"__type__": "complex", "real": c.real, "imag": c.imag},
|
|
22
|
+
bytes: lambda b: {"__type__": "bytes", "value": b.decode("latin-1")},
|
|
23
|
+
set: lambda s: {"__type__": "set", "values": list(s)},
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
def default(self, obj: Any) -> Any:
|
|
27
|
+
"""
|
|
28
|
+
Custom serialization for complex types
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
obj: Object to serialize
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
Serializable representation of the object
|
|
35
|
+
"""
|
|
36
|
+
# Check if the type is in our custom type map
|
|
37
|
+
for type_, serializer in self.TYPE_MAP.items():
|
|
38
|
+
if isinstance(obj, type_):
|
|
39
|
+
return serializer(obj)
|
|
40
|
+
|
|
41
|
+
# Handle custom objects with __dict__
|
|
42
|
+
if hasattr(obj, "__dict__"):
|
|
43
|
+
return {
|
|
44
|
+
"__type__": obj.__class__.__name__,
|
|
45
|
+
"__module__": obj.__class__.__module__,
|
|
46
|
+
"attributes": obj.__dict__,
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
# Fallback to default JSON encoder
|
|
50
|
+
return super().default(obj)
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from typing import Any, Callable, Dict
|
|
3
|
+
|
|
4
|
+
from compressedfhir.utilities.json_serializers.type_preservation_decoder import (
|
|
5
|
+
TypePreservationDecoder,
|
|
6
|
+
)
|
|
7
|
+
from compressedfhir.utilities.json_serializers.type_preservation_encoder import (
|
|
8
|
+
TypePreservationEncoder,
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class TypePreservationSerializer:
|
|
13
|
+
"""
|
|
14
|
+
Comprehensive serialization and deserialization utility
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
@classmethod
|
|
18
|
+
def serialize(cls, data: Any, **kwargs: Any) -> str:
|
|
19
|
+
"""
|
|
20
|
+
Serialize data with advanced type handling
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
data: Data to serialize
|
|
24
|
+
kwargs: Additional JSON dumps arguments
|
|
25
|
+
|
|
26
|
+
Returns:
|
|
27
|
+
JSON string representation
|
|
28
|
+
"""
|
|
29
|
+
return json.dumps(data, cls=TypePreservationEncoder, indent=2, **kwargs)
|
|
30
|
+
|
|
31
|
+
@classmethod
|
|
32
|
+
def deserialize(
|
|
33
|
+
cls,
|
|
34
|
+
json_str: str,
|
|
35
|
+
custom_decoders: Dict[str, Callable[[Any], Any]] | None = None,
|
|
36
|
+
**kwargs: Any,
|
|
37
|
+
) -> Any:
|
|
38
|
+
"""
|
|
39
|
+
Deserialize JSON string with advanced type reconstruction
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
json_str: JSON string to deserialize
|
|
43
|
+
custom_decoders: Optional additional custom decoders
|
|
44
|
+
kwargs: Additional JSON loads arguments
|
|
45
|
+
|
|
46
|
+
Returns:
|
|
47
|
+
Reconstructed object
|
|
48
|
+
"""
|
|
49
|
+
return json.loads(
|
|
50
|
+
json_str,
|
|
51
|
+
object_hook=lambda dct: TypePreservationDecoder.decode(
|
|
52
|
+
dct, custom_decoders
|
|
53
|
+
),
|
|
54
|
+
**kwargs,
|
|
55
|
+
)
|
|
File without changes
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
from typing import OrderedDict, cast
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class OrderedDictToDictConverter:
|
|
5
|
+
@staticmethod
|
|
6
|
+
def convert[K, V](ordered_dict: OrderedDict[K, V]) -> dict[K, V]:
|
|
7
|
+
"""
|
|
8
|
+
Converts an OrderedDict to a regular dict in a recursive manner.
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
:param ordered_dict: The OrderedDict to convert
|
|
12
|
+
:return: A regular dict with the same key-value pairs
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
def _convert[T](value: T) -> T:
|
|
16
|
+
if isinstance(value, OrderedDict):
|
|
17
|
+
return cast(T, {k: _convert(v) for k, v in value.items()})
|
|
18
|
+
elif isinstance(value, dict):
|
|
19
|
+
return cast(T, {k: _convert(v) for k, v in value.items()})
|
|
20
|
+
elif isinstance(value, list):
|
|
21
|
+
return cast(T, [_convert(item) for item in value])
|
|
22
|
+
return value
|
|
23
|
+
|
|
24
|
+
return _convert(ordered_dict)
|
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
import zlib
|
|
2
|
+
from typing import Union, Optional
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class StringCompressor:
|
|
6
|
+
"""
|
|
7
|
+
A utility class for compressing and decompressing strings using zlib.
|
|
8
|
+
|
|
9
|
+
Provides methods to compress strings to bytes and decompress bytes back to strings.
|
|
10
|
+
Uses UTF-8 encoding and zlib's best compression level.
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
@staticmethod
|
|
14
|
+
def compress(text: str, encoding: str = "utf-8") -> bytes:
|
|
15
|
+
"""
|
|
16
|
+
Compress a given string to bytes using zlib.
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
text (str): The input string to compress
|
|
20
|
+
encoding (str, optional): The encoding to use. Defaults to 'utf-8'
|
|
21
|
+
|
|
22
|
+
Returns:
|
|
23
|
+
bytes: Compressed representation of the input string
|
|
24
|
+
|
|
25
|
+
Raises:
|
|
26
|
+
TypeError: If input is not a string
|
|
27
|
+
zlib.error: If compression fails
|
|
28
|
+
"""
|
|
29
|
+
if not isinstance(text, str):
|
|
30
|
+
raise TypeError("Input must be a string")
|
|
31
|
+
|
|
32
|
+
try:
|
|
33
|
+
# Encode string to bytes, then compress with best compression
|
|
34
|
+
return zlib.compress(text.encode(encoding), level=zlib.Z_BEST_COMPRESSION)
|
|
35
|
+
except Exception as e:
|
|
36
|
+
raise zlib.error(f"Compression failed: {e}")
|
|
37
|
+
|
|
38
|
+
@staticmethod
|
|
39
|
+
def decompress(
|
|
40
|
+
compressed_data: Union[bytes, bytearray], encoding: str = "utf-8"
|
|
41
|
+
) -> str:
|
|
42
|
+
"""
|
|
43
|
+
Decompress bytes back to the original string.
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
compressed_data (Union[bytes, bytearray]): Compressed data to decompress
|
|
47
|
+
encoding (str, optional): The encoding to use. Defaults to 'utf-8'
|
|
48
|
+
|
|
49
|
+
Returns:
|
|
50
|
+
str: Decompressed original string
|
|
51
|
+
|
|
52
|
+
Raises:
|
|
53
|
+
TypeError: If input is not bytes or bytearray
|
|
54
|
+
zlib.error: If decompression fails
|
|
55
|
+
"""
|
|
56
|
+
if not isinstance(compressed_data, (bytes, bytearray)):
|
|
57
|
+
raise TypeError("Input must be bytes or bytearray")
|
|
58
|
+
|
|
59
|
+
try:
|
|
60
|
+
# Decompress bytes, then decode to string
|
|
61
|
+
return zlib.decompress(compressed_data).decode(encoding)
|
|
62
|
+
except Exception as e:
|
|
63
|
+
raise zlib.error(f"Decompression failed: {e}")
|
|
64
|
+
|
|
65
|
+
@classmethod
|
|
66
|
+
def compress_safe(
|
|
67
|
+
cls, text: Optional[str], encoding: str = "utf-8"
|
|
68
|
+
) -> Optional[bytes]:
|
|
69
|
+
"""
|
|
70
|
+
Safely compress a string, handling None input.
|
|
71
|
+
|
|
72
|
+
Args:
|
|
73
|
+
text (Optional[str]): The input string to compress
|
|
74
|
+
encoding (str, optional): The encoding to use. Defaults to 'utf-8'
|
|
75
|
+
|
|
76
|
+
Returns:
|
|
77
|
+
Optional[bytes]: Compressed bytes or None if input is None
|
|
78
|
+
"""
|
|
79
|
+
if text is None:
|
|
80
|
+
return None
|
|
81
|
+
return cls.compress(text, encoding)
|
|
82
|
+
|
|
83
|
+
@classmethod
|
|
84
|
+
def decompress_safe(
|
|
85
|
+
cls, compressed_data: Optional[Union[bytes, bytearray]], encoding: str = "utf-8"
|
|
86
|
+
) -> Optional[str]:
|
|
87
|
+
"""
|
|
88
|
+
Safely decompress bytes, handling None input.
|
|
89
|
+
|
|
90
|
+
Args:
|
|
91
|
+
compressed_data (Optional[Union[bytes, bytearray]]): Compressed data to decompress
|
|
92
|
+
encoding (str, optional): The encoding to use. Defaults to 'utf-8'
|
|
93
|
+
|
|
94
|
+
Returns:
|
|
95
|
+
Optional[str]: Decompressed string or None if input is None
|
|
96
|
+
"""
|
|
97
|
+
if compressed_data is None:
|
|
98
|
+
return None
|
|
99
|
+
return cls.decompress(compressed_data, encoding)
|
|
File without changes
|
|
@@ -0,0 +1,189 @@
|
|
|
1
|
+
# Basic Compression and Decompression Tests
|
|
2
|
+
from typing import Optional, Union
|
|
3
|
+
|
|
4
|
+
import pytest
|
|
5
|
+
import zlib
|
|
6
|
+
|
|
7
|
+
from compressedfhir.utilities.string_compressor.v1.string_compressor import (
|
|
8
|
+
StringCompressor,
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@pytest.mark.parametrize(
|
|
13
|
+
"input_text",
|
|
14
|
+
[
|
|
15
|
+
"Hello, World!",
|
|
16
|
+
"Python is awesome",
|
|
17
|
+
"12345",
|
|
18
|
+
"", # Empty string
|
|
19
|
+
"🌍🚀", # Unicode characters
|
|
20
|
+
],
|
|
21
|
+
)
|
|
22
|
+
def test_compress_decompress_basic(input_text: str) -> None:
|
|
23
|
+
"""
|
|
24
|
+
Test basic compression and decompression functionality
|
|
25
|
+
"""
|
|
26
|
+
# Compress
|
|
27
|
+
compressed = StringCompressor.compress(input_text)
|
|
28
|
+
|
|
29
|
+
# Verify compression reduces size
|
|
30
|
+
# assert len(compressed) < len(input_text.encode('utf-8'))
|
|
31
|
+
|
|
32
|
+
# Decompress
|
|
33
|
+
decompressed = StringCompressor.decompress(compressed)
|
|
34
|
+
|
|
35
|
+
# Verify original text is preserved
|
|
36
|
+
assert decompressed == input_text
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
# Error Handling Tests
|
|
40
|
+
def test_compress_invalid_input() -> None:
|
|
41
|
+
"""
|
|
42
|
+
Test compression with invalid input type
|
|
43
|
+
"""
|
|
44
|
+
with pytest.raises(TypeError, match="Input must be a string"):
|
|
45
|
+
StringCompressor.compress(123) # type:ignore[arg-type]
|
|
46
|
+
|
|
47
|
+
with pytest.raises(TypeError, match="Input must be a string"):
|
|
48
|
+
StringCompressor.compress(None) # type:ignore[arg-type]
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def test_decompress_invalid_input() -> None:
|
|
52
|
+
"""
|
|
53
|
+
Test decompression with invalid input type
|
|
54
|
+
"""
|
|
55
|
+
with pytest.raises(TypeError, match="Input must be bytes or bytearray"):
|
|
56
|
+
StringCompressor.decompress("not bytes") # type:ignore[arg-type]
|
|
57
|
+
|
|
58
|
+
with pytest.raises(TypeError, match="Input must be bytes or bytearray"):
|
|
59
|
+
StringCompressor.decompress(123) # type:ignore[arg-type]
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
# Safe Method Tests
|
|
63
|
+
@pytest.mark.parametrize("input_text", ["Test string", None, ""])
|
|
64
|
+
def test_compress_safe(input_text: Optional[str]) -> None:
|
|
65
|
+
"""
|
|
66
|
+
Test safe compression method
|
|
67
|
+
"""
|
|
68
|
+
compressed = StringCompressor.compress_safe(input_text)
|
|
69
|
+
|
|
70
|
+
if input_text is None:
|
|
71
|
+
assert compressed is None
|
|
72
|
+
else:
|
|
73
|
+
assert isinstance(compressed, bytes)
|
|
74
|
+
# Verify we can decompress
|
|
75
|
+
decompressed = StringCompressor.decompress_safe(compressed)
|
|
76
|
+
assert decompressed == input_text
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
@pytest.mark.parametrize(
|
|
80
|
+
"input_data", [b"compressed data", None, bytearray(b"another compressed data")]
|
|
81
|
+
)
|
|
82
|
+
def test_decompress_safe(input_data: Optional[Union[bytes, bytearray]]) -> None:
|
|
83
|
+
"""
|
|
84
|
+
Test safe decompression method
|
|
85
|
+
"""
|
|
86
|
+
if input_data is None:
|
|
87
|
+
decompressed = StringCompressor.decompress_safe(input_data)
|
|
88
|
+
assert decompressed is None
|
|
89
|
+
else:
|
|
90
|
+
# First compress a string
|
|
91
|
+
original = "Test string to compress"
|
|
92
|
+
compressed = StringCompressor.compress(original)
|
|
93
|
+
|
|
94
|
+
# Then decompress
|
|
95
|
+
decompressed = StringCompressor.decompress_safe(compressed)
|
|
96
|
+
assert decompressed == original
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
# Encoding Tests
|
|
100
|
+
@pytest.mark.parametrize("encoding", ["utf-8", "ascii", "latin-1"])
|
|
101
|
+
def test_custom_encoding(encoding: str) -> None:
|
|
102
|
+
"""
|
|
103
|
+
Test compression and decompression with different encodings
|
|
104
|
+
"""
|
|
105
|
+
input_text = "Hello, World!"
|
|
106
|
+
|
|
107
|
+
# Compress with custom encoding
|
|
108
|
+
compressed = StringCompressor.compress(input_text, encoding=encoding)
|
|
109
|
+
|
|
110
|
+
# Decompress with same encoding
|
|
111
|
+
decompressed = StringCompressor.decompress(compressed, encoding=encoding)
|
|
112
|
+
|
|
113
|
+
assert decompressed == input_text
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
# Compression Efficiency Tests
|
|
117
|
+
def test_compression_efficiency() -> None:
|
|
118
|
+
"""
|
|
119
|
+
Test that compression actually reduces data size
|
|
120
|
+
"""
|
|
121
|
+
# Long repetitive string for better compression
|
|
122
|
+
input_text = "Hello " * 1000
|
|
123
|
+
|
|
124
|
+
# Compress
|
|
125
|
+
compressed = StringCompressor.compress(input_text)
|
|
126
|
+
|
|
127
|
+
# Check compression ratio
|
|
128
|
+
original_size = len(input_text.encode("utf-8"))
|
|
129
|
+
compressed_size = len(compressed)
|
|
130
|
+
|
|
131
|
+
# Verify significant size reduction
|
|
132
|
+
assert compressed_size < original_size
|
|
133
|
+
|
|
134
|
+
# Verify lossless decompression
|
|
135
|
+
decompressed = StringCompressor.decompress(compressed)
|
|
136
|
+
assert decompressed == input_text
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
# Edge Case Tests
|
|
140
|
+
def test_very_large_string() -> None:
|
|
141
|
+
"""
|
|
142
|
+
Test compression and decompression of a very large string
|
|
143
|
+
"""
|
|
144
|
+
# Generate a large string
|
|
145
|
+
large_text = "A" * (1024 * 1024) # 1MB of text
|
|
146
|
+
|
|
147
|
+
# Compress
|
|
148
|
+
compressed = StringCompressor.compress(large_text)
|
|
149
|
+
|
|
150
|
+
# Decompress
|
|
151
|
+
decompressed = StringCompressor.decompress(compressed)
|
|
152
|
+
|
|
153
|
+
assert decompressed == large_text
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
# Error Scenario Tests
|
|
157
|
+
def test_decompress_corrupted_data() -> None:
|
|
158
|
+
"""
|
|
159
|
+
Test decompression of corrupted data
|
|
160
|
+
"""
|
|
161
|
+
# Create some corrupted compressed data
|
|
162
|
+
with pytest.raises(zlib.error):
|
|
163
|
+
StringCompressor.decompress(b"corrupted data")
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
# Performance Benchmark (optional)
|
|
167
|
+
def test_compression_performance() -> None:
|
|
168
|
+
"""
|
|
169
|
+
Basic performance test for compression and decompression
|
|
170
|
+
"""
|
|
171
|
+
import timeit
|
|
172
|
+
|
|
173
|
+
# Test string
|
|
174
|
+
test_string = "Performance test " * 100
|
|
175
|
+
|
|
176
|
+
# Measure compression time
|
|
177
|
+
compression_time = timeit.timeit(
|
|
178
|
+
lambda: StringCompressor.compress(test_string), number=100
|
|
179
|
+
)
|
|
180
|
+
|
|
181
|
+
# Measure decompression time
|
|
182
|
+
compressed = StringCompressor.compress(test_string)
|
|
183
|
+
decompression_time = timeit.timeit(
|
|
184
|
+
lambda: StringCompressor.decompress(compressed), number=100
|
|
185
|
+
)
|
|
186
|
+
|
|
187
|
+
# Basic performance assertions (these can be adjusted)
|
|
188
|
+
assert compression_time < 1.0 # 100 compressions in less than 1 second
|
|
189
|
+
assert decompression_time < 1.0 # 100 decompressions in less than 1 second
|
|
@@ -2,17 +2,17 @@ compressedfhir/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
2
2
|
compressedfhir/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
3
|
compressedfhir/fhir/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
4
|
compressedfhir/fhir/base_resource_list.py,sha256=hhlQLT_HFrLmVPuirTsiXQsiUadxpfSQTPS4CofthTM,4924
|
|
5
|
-
compressedfhir/fhir/fhir_bundle.py,sha256=
|
|
6
|
-
compressedfhir/fhir/fhir_bundle_entry.py,sha256=
|
|
5
|
+
compressedfhir/fhir/fhir_bundle.py,sha256=ec5A5OGgaJGwm6UO7uRaWQxNQaWyW6rH0OoXL9BWzaE,10185
|
|
6
|
+
compressedfhir/fhir/fhir_bundle_entry.py,sha256=g7msoFmRAywS2WsBMqmJGYbEeQFPcBqf0o0vVI8pLkc,8458
|
|
7
7
|
compressedfhir/fhir/fhir_bundle_entry_list.py,sha256=tjZueiviQ4ucSDNGSR9CpN-Kwv3BIBcmal3_0J1HE_E,2655
|
|
8
8
|
compressedfhir/fhir/fhir_bundle_entry_request.py,sha256=8UqJw388aDYgZCz1rvk2kmDa03vOEsmZOaJeb5CLqzw,2841
|
|
9
|
-
compressedfhir/fhir/fhir_bundle_entry_response.py,sha256=
|
|
9
|
+
compressedfhir/fhir/fhir_bundle_entry_response.py,sha256=5u-ycyWVdFyLhIUM4xf-5QioKWAc2kEOFeFcJRrR6_o,2512
|
|
10
10
|
compressedfhir/fhir/fhir_bundle_entry_search.py,sha256=uYVJxuNN3gt3Q6BZ5FhRs47x7l54Lo_H-7JdoOvkx94,2554
|
|
11
11
|
compressedfhir/fhir/fhir_identifier.py,sha256=tA_nmhBaYHu5zjJdE0IWMFEF8lrIPV3_nu-yairiIKw,2711
|
|
12
12
|
compressedfhir/fhir/fhir_link.py,sha256=jf2RrwmsPrKW3saP77y42xVqI0xwHFYXxm6YHQJk7gU,1922
|
|
13
13
|
compressedfhir/fhir/fhir_meta.py,sha256=vNI4O6SoG4hJRHyd-bJ_QnYFTfBHyR3UA6h21ByQmWo,1669
|
|
14
|
-
compressedfhir/fhir/fhir_resource.py,sha256=
|
|
15
|
-
compressedfhir/fhir/fhir_resource_list.py,sha256=
|
|
14
|
+
compressedfhir/fhir/fhir_resource.py,sha256=GIz0g8O-Nw9Av8M5wYRoRY4FS2kEk2Nb03RPSeDYUqo,5588
|
|
15
|
+
compressedfhir/fhir/fhir_resource_list.py,sha256=qlAAwWWphtFicBxPG8iriz2eOHGcrWJk5kGThmvkbPE,4480
|
|
16
16
|
compressedfhir/fhir/fhir_resource_map.py,sha256=6Zt_K8KVolS-lgT_Ztu_6YxNo8BXhweQfWO-QFriInA,6588
|
|
17
17
|
compressedfhir/fhir/test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
18
18
|
compressedfhir/fhir/test/test_bundle_entry.py,sha256=Ki2sSu1V1WZkAM6UTCghtzjvjYYI8UcF6AXnx8FWlMI,5115
|
|
@@ -24,21 +24,36 @@ compressedfhir/fhir/test/test_fhir_resource.py,sha256=4Fl6QaqjW4CsYqkxVj2WRXITv_
|
|
|
24
24
|
compressedfhir/fhir/test/test_fhir_resource_list.py,sha256=SrSPJ1yWU4UgMUCht6JwgKh2Y5JeTS4-Wky0kWZOXH8,5664
|
|
25
25
|
compressedfhir/fhir/test/test_fhir_resource_map.py,sha256=jtQ5fq_jhmFfhHGyK5mdiwIQiO-Sfp2eG9mco_Tr9Qk,10995
|
|
26
26
|
compressedfhir/utilities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
27
|
-
compressedfhir/utilities/fhir_json_encoder.py,sha256=
|
|
27
|
+
compressedfhir/utilities/fhir_json_encoder.py,sha256=hn-ZuDrTEdYZmILk_5_k4R72PQB_OHYXo_3eTKTO24c,1856
|
|
28
28
|
compressedfhir/utilities/json_helpers.py,sha256=lEiPapLN0p-kLu6PFm-h971ieXRxwPB2M-8FCZ2Buo8,5642
|
|
29
29
|
compressedfhir/utilities/compressed_dict/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
30
30
|
compressedfhir/utilities/compressed_dict/v1/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
31
|
-
compressedfhir/utilities/compressed_dict/v1/compressed_dict.py,sha256=
|
|
31
|
+
compressedfhir/utilities/compressed_dict/v1/compressed_dict.py,sha256=sf8mGBdvYpjcMfVSWUVFGTiEi_pimutwCWyfKbAY2OU,21314
|
|
32
32
|
compressedfhir/utilities/compressed_dict/v1/compressed_dict_access_error.py,sha256=xuwED0KGZcQORIcZRfi--5CdXplHJ5vYLBUqpbDi344,132
|
|
33
|
-
compressedfhir/utilities/compressed_dict/v1/compressed_dict_storage_mode.py,sha256=
|
|
33
|
+
compressedfhir/utilities/compressed_dict/v1/compressed_dict_storage_mode.py,sha256=mEdtJjPX2I9DqP0Ly_VsZZWhEMNTI1psqQ8iJtUQ2oE,1412
|
|
34
34
|
compressedfhir/utilities/compressed_dict/v1/test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
35
35
|
compressedfhir/utilities/compressed_dict/v1/test/test_compressed_dict.py,sha256=7AsOX1Nw7Woo9C7OzdBXMXFQhgEBAZZ8py1aHfFh-4k,11970
|
|
36
|
+
compressedfhir/utilities/json_serializers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
37
|
+
compressedfhir/utilities/json_serializers/type_preservation_decoder.py,sha256=vLGSuyN7FXlpoJwpBvCPf27RpA1DmmKQ0BO_xPOLJiw,2135
|
|
38
|
+
compressedfhir/utilities/json_serializers/type_preservation_encoder.py,sha256=f7RL67l7QtDbijCPq1ki6axrLte1vH--bi1AsN7Y3yk,1646
|
|
39
|
+
compressedfhir/utilities/json_serializers/type_preservation_serializer.py,sha256=jhut-eqVMhAYnAVA9GOH8moJBn20pqA7-MBqsW-JXeY,1488
|
|
40
|
+
compressedfhir/utilities/json_serializers/test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
41
|
+
compressedfhir/utilities/json_serializers/test/test_type_preservation_decoder.py,sha256=sVdZoZ6u8luyjmBLae_6Bk8lsYTaxBNU-e-P-nWyVMk,2329
|
|
42
|
+
compressedfhir/utilities/json_serializers/test/test_type_preservation_encoder.py,sha256=O4VczBdsJF35WozZiwSdJ8638qDn01JQsai2wTXu5Vo,1737
|
|
43
|
+
compressedfhir/utilities/json_serializers/test/test_type_preservation_serializer.py,sha256=RwshpoLN-f3bXmT1QhwWANpndGMxwtyu9O-1SMMwmgQ,1985
|
|
44
|
+
compressedfhir/utilities/ordered_dict_to_dict_converter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
45
|
+
compressedfhir/utilities/ordered_dict_to_dict_converter/ordered_dict_to_dict_converter.py,sha256=CMerJQD7O0vMyGtUp1rKSerZA1tDZeY5GTQT3AykL4w,831
|
|
46
|
+
compressedfhir/utilities/string_compressor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
47
|
+
compressedfhir/utilities/string_compressor/v1/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
48
|
+
compressedfhir/utilities/string_compressor/v1/string_compressor.py,sha256=28CvEJPQVKS56S9YPdVM1i-xWEuizYeyKiICWEYOV0k,3263
|
|
49
|
+
compressedfhir/utilities/string_compressor/v1/test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
50
|
+
compressedfhir/utilities/string_compressor/v1/test/test_string_compressor.py,sha256=ydlJIpp-IDPcLlv4YvxMph19OndLEt3kuNQ9buNwy0Y,5473
|
|
36
51
|
compressedfhir/utilities/test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
37
52
|
compressedfhir/utilities/test/test_fhir_json_encoder.py,sha256=6pbNmZp5eBWY66bHjgjm_pZVhs5HDKP8hCGnwNFzpEw,5171
|
|
38
53
|
compressedfhir/utilities/test/test_json_helpers.py,sha256=V0R9oHDQAs0m0012niEz50sHJxMSUQvA3km7kK8HgjE,3860
|
|
39
|
-
compressedfhir-1.0.
|
|
54
|
+
compressedfhir-1.0.3.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
40
55
|
tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
41
|
-
compressedfhir-1.0.
|
|
42
|
-
compressedfhir-1.0.
|
|
43
|
-
compressedfhir-1.0.
|
|
44
|
-
compressedfhir-1.0.
|
|
56
|
+
compressedfhir-1.0.3.dist-info/METADATA,sha256=4HYsDmJhx91m8f7HbhReC5tL6mkP3Z7a_ZqDbzIEgaI,3456
|
|
57
|
+
compressedfhir-1.0.3.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
|
58
|
+
compressedfhir-1.0.3.dist-info/top_level.txt,sha256=YMKdvBBdiCzFbpI9fG8BUDjaRd-f4R0qAvUoVETpoWw,21
|
|
59
|
+
compressedfhir-1.0.3.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|