compressedfhir 3.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. compressedfhir/__init__.py +0 -0
  2. compressedfhir/fhir/__init__.py +0 -0
  3. compressedfhir/fhir/base_resource_list.py +165 -0
  4. compressedfhir/fhir/fhir_bundle.py +295 -0
  5. compressedfhir/fhir/fhir_bundle_entry.py +240 -0
  6. compressedfhir/fhir/fhir_bundle_entry_list.py +97 -0
  7. compressedfhir/fhir/fhir_bundle_entry_request.py +73 -0
  8. compressedfhir/fhir/fhir_bundle_entry_response.py +67 -0
  9. compressedfhir/fhir/fhir_bundle_entry_search.py +75 -0
  10. compressedfhir/fhir/fhir_identifier.py +84 -0
  11. compressedfhir/fhir/fhir_link.py +63 -0
  12. compressedfhir/fhir/fhir_meta.py +47 -0
  13. compressedfhir/fhir/fhir_resource.py +170 -0
  14. compressedfhir/fhir/fhir_resource_list.py +149 -0
  15. compressedfhir/fhir/fhir_resource_map.py +193 -0
  16. compressedfhir/fhir/test/__init__.py +0 -0
  17. compressedfhir/fhir/test/test_bundle_entry.py +129 -0
  18. compressedfhir/fhir/test/test_bundle_entry_list.py +187 -0
  19. compressedfhir/fhir/test/test_bundle_entry_request.py +74 -0
  20. compressedfhir/fhir/test/test_bundle_entry_response.py +65 -0
  21. compressedfhir/fhir/test/test_fhir_bundle.py +245 -0
  22. compressedfhir/fhir/test/test_fhir_resource.py +104 -0
  23. compressedfhir/fhir/test/test_fhir_resource_list.py +160 -0
  24. compressedfhir/fhir/test/test_fhir_resource_map.py +293 -0
  25. compressedfhir/py.typed +0 -0
  26. compressedfhir/utilities/__init__.py +0 -0
  27. compressedfhir/utilities/compressed_dict/__init__.py +0 -0
  28. compressedfhir/utilities/compressed_dict/v1/__init__.py +0 -0
  29. compressedfhir/utilities/compressed_dict/v1/compressed_dict.py +701 -0
  30. compressedfhir/utilities/compressed_dict/v1/compressed_dict_access_error.py +2 -0
  31. compressedfhir/utilities/compressed_dict/v1/compressed_dict_storage_mode.py +50 -0
  32. compressedfhir/utilities/compressed_dict/v1/test/__init__.py +0 -0
  33. compressedfhir/utilities/compressed_dict/v1/test/test_compressed_dict.py +467 -0
  34. compressedfhir/utilities/fhir_json_encoder.py +71 -0
  35. compressedfhir/utilities/json_helpers.py +181 -0
  36. compressedfhir/utilities/json_serializers/__init__.py +0 -0
  37. compressedfhir/utilities/json_serializers/test/__init__.py +0 -0
  38. compressedfhir/utilities/json_serializers/test/test_type_preservation_decoder.py +165 -0
  39. compressedfhir/utilities/json_serializers/test/test_type_preservation_encoder.py +71 -0
  40. compressedfhir/utilities/json_serializers/test/test_type_preservation_serializer.py +197 -0
  41. compressedfhir/utilities/json_serializers/type_preservation_decoder.py +135 -0
  42. compressedfhir/utilities/json_serializers/type_preservation_encoder.py +55 -0
  43. compressedfhir/utilities/json_serializers/type_preservation_serializer.py +57 -0
  44. compressedfhir/utilities/ordered_dict_to_dict_converter/__init__.py +0 -0
  45. compressedfhir/utilities/ordered_dict_to_dict_converter/ordered_dict_to_dict_converter.py +24 -0
  46. compressedfhir/utilities/string_compressor/__init__.py +0 -0
  47. compressedfhir/utilities/string_compressor/v1/__init__.py +0 -0
  48. compressedfhir/utilities/string_compressor/v1/string_compressor.py +99 -0
  49. compressedfhir/utilities/string_compressor/v1/test/__init__.py +0 -0
  50. compressedfhir/utilities/string_compressor/v1/test/test_string_compressor.py +189 -0
  51. compressedfhir/utilities/test/__init__.py +0 -0
  52. compressedfhir/utilities/test/test_fhir_json_encoder.py +177 -0
  53. compressedfhir/utilities/test/test_json_helpers.py +99 -0
  54. compressedfhir-3.0.2.dist-info/METADATA +139 -0
  55. compressedfhir-3.0.2.dist-info/RECORD +59 -0
  56. compressedfhir-3.0.2.dist-info/WHEEL +5 -0
  57. compressedfhir-3.0.2.dist-info/licenses/LICENSE +201 -0
  58. compressedfhir-3.0.2.dist-info/top_level.txt +2 -0
  59. tests/__init__.py +0 -0
@@ -0,0 +1,135 @@
1
+ import logging
2
+ from collections import OrderedDict
3
+ from datetime import datetime, date, time
4
+ from decimal import Decimal
5
+ from logging import Logger
6
+ from typing import Any, Dict, Callable, Optional, Union, cast, List
7
+ from zoneinfo import ZoneInfo
8
+
9
+
10
+ class TypePreservationDecoder:
11
+ """
12
+ Advanced JSON decoder for complex type reconstruction with nested type support
13
+ """
14
+
15
+ @classmethod
16
+ def decode(
17
+ cls,
18
+ dct: Union[str, Dict[str, Any], List[Any]],
19
+ custom_decoders: Optional[Dict[str, Callable[[Any], Any]]] = None,
20
+ use_ordered_dict: bool = True,
21
+ ) -> Any:
22
+ """
23
+ Decode complex types, including nested datetime fields
24
+
25
+ Args:
26
+ dct: Dictionary to decode
27
+ custom_decoders: Optional additional custom decoders
28
+ use_ordered_dict: Flag to control whether to use OrderedDict or not
29
+
30
+ Returns:
31
+ Reconstructed object or original dictionary
32
+ """
33
+ logger: Logger = logging.getLogger(__name__)
34
+
35
+ # Default decoders for built-in types with nested support
36
+ def datetime_decoder(d: Union[str, Dict[str, Any]]) -> datetime:
37
+ if isinstance(d, str):
38
+ return datetime.fromisoformat(d)
39
+ elif isinstance(d, dict) and "iso" in d:
40
+ return datetime.fromisoformat(d["iso"])
41
+ return cast(datetime, d)
42
+
43
+ def date_decoder(d: Union[str, Dict[str, Any]]) -> date:
44
+ if isinstance(d, str):
45
+ return date.fromisoformat(d)
46
+ elif isinstance(d, dict) and "iso" in d:
47
+ return date.fromisoformat(d["iso"])
48
+ return cast(date, d)
49
+
50
+ def time_decoder(d: Union[str, Dict[str, Any]]) -> time:
51
+ if isinstance(d, str):
52
+ return time.fromisoformat(d)
53
+ elif isinstance(d, dict) and "iso" in d:
54
+ # Extract ISO time string
55
+ iso_time: str = d["iso"]
56
+
57
+ # Parse time from ISO format
58
+ parsed_time = time.fromisoformat(iso_time)
59
+
60
+ # Add timezone if specified
61
+ tz_info = d.get("tzinfo")
62
+ if tz_info:
63
+ try:
64
+ tz_aware_time = parsed_time.replace(tzinfo=ZoneInfo(tz_info))
65
+ return tz_aware_time
66
+ except Exception as e:
67
+ raise ValueError(f"Invalid timezone: {tz_info}") from e
68
+ else:
69
+ # If no timezone info, return naive time
70
+ return parsed_time
71
+ return cast(time, d)
72
+
73
+ default_decoders: Dict[str, Callable[[Any], Any]] = {
74
+ "datetime": datetime_decoder,
75
+ "date": date_decoder,
76
+ "time": time_decoder,
77
+ "decimal": lambda d: Decimal(d["value"] if isinstance(d, dict) else d),
78
+ "complex": lambda d: complex(d["real"], d["imag"])
79
+ if isinstance(d, dict)
80
+ else d,
81
+ "bytes": lambda d: d["value"].encode("latin-1")
82
+ if isinstance(d, dict)
83
+ else d,
84
+ "set": lambda d: set(d["values"]) if isinstance(d, dict) else d,
85
+ }
86
+
87
+ # Merge custom decoders with default decoders
88
+ decoders = {**default_decoders, **(custom_decoders or {})}
89
+
90
+ # Recursively decode nested structures
91
+ def recursive_decode(value: Any) -> Any:
92
+ if isinstance(value, dict):
93
+ # Check for type marker in the dictionary
94
+ if "__type__" in value:
95
+ type_name = value["__type__"]
96
+
97
+ # Handle built-in type decoders
98
+ if type_name in decoders:
99
+ return decoders[type_name](value)
100
+
101
+ # Handle custom object reconstruction
102
+ if "__module__" in value and "attributes" in value:
103
+ try:
104
+ # Dynamically import the class
105
+ module = __import__(
106
+ value["__module__"], fromlist=[type_name]
107
+ )
108
+ cls_ = getattr(module, type_name)
109
+
110
+ # Create instance and set attributes with recursive decoding
111
+ obj = cls_.__new__(cls_)
112
+ obj.__dict__.update(
113
+ {
114
+ k: recursive_decode(v)
115
+ for k, v in value["attributes"].items()
116
+ }
117
+ )
118
+ return obj
119
+ except (ImportError, AttributeError) as e:
120
+ logger.error(f"Could not reconstruct {type_name}: {e}")
121
+ return value
122
+
123
+ # Recursively decode dictionary values
124
+ # Conditionally use OrderedDict or regular dict
125
+ dict_type = OrderedDict if use_ordered_dict else dict
126
+ return dict_type((k, recursive_decode(v)) for k, v in value.items())
127
+
128
+ # Recursively decode list or tuple
129
+ elif isinstance(value, (list, tuple)):
130
+ return type(value)(recursive_decode(item) for item in value)
131
+
132
+ return value
133
+
134
+ # Start recursive decoding
135
+ return recursive_decode(dct)
@@ -0,0 +1,55 @@
1
+ import json
2
+ from collections.abc import Callable
3
+ from datetime import datetime, date, time
4
+ from decimal import Decimal
5
+ from typing import Any, Dict, Type
6
+
7
+
8
+ class TypePreservationEncoder(json.JSONEncoder):
9
+ """
10
+ Advanced JSON encoder for complex type serialization
11
+ """
12
+
13
+ TYPE_MAP: Dict[Type[Any], Callable[[Any], Any]] = {
14
+ datetime: lambda dt: {
15
+ "__type__": "datetime",
16
+ "iso": dt.isoformat(),
17
+ "tzinfo": str(dt.tzinfo) if dt.tzinfo else None,
18
+ },
19
+ date: lambda d: {"__type__": "date", "iso": d.isoformat()},
20
+ time: lambda t: {
21
+ "__type__": "time",
22
+ "iso": t.isoformat(),
23
+ "tzinfo": str(t.tzinfo) if t.tzinfo else None,
24
+ },
25
+ Decimal: lambda d: {"__type__": "decimal", "value": str(d)},
26
+ complex: lambda c: {"__type__": "complex", "real": c.real, "imag": c.imag},
27
+ bytes: lambda b: {"__type__": "bytes", "value": b.decode("latin-1")},
28
+ set: lambda s: {"__type__": "set", "values": list(s)},
29
+ }
30
+
31
+ def default(self, obj: Any) -> Any:
32
+ """
33
+ Custom serialization for complex types
34
+
35
+ Args:
36
+ obj: Object to serialize
37
+
38
+ Returns:
39
+ Serializable representation of the object
40
+ """
41
+ # Check if the type is in our custom type map
42
+ for type_, serializer in self.TYPE_MAP.items():
43
+ if isinstance(obj, type_):
44
+ return serializer(obj)
45
+
46
+ # Handle custom objects with __dict__
47
+ if hasattr(obj, "__dict__"):
48
+ return {
49
+ "__type__": obj.__class__.__name__,
50
+ "__module__": obj.__class__.__module__,
51
+ "attributes": obj.__dict__,
52
+ }
53
+
54
+ # Fallback to default JSON encoder
55
+ return super().default(obj)
@@ -0,0 +1,57 @@
1
+ import json
2
+ from typing import Any, Callable, Dict
3
+
4
+ from compressedfhir.utilities.json_serializers.type_preservation_decoder import (
5
+ TypePreservationDecoder,
6
+ )
7
+ from compressedfhir.utilities.json_serializers.type_preservation_encoder import (
8
+ TypePreservationEncoder,
9
+ )
10
+
11
+
12
+ class TypePreservationSerializer:
13
+ """
14
+ Comprehensive serialization and deserialization utility
15
+ """
16
+
17
+ @classmethod
18
+ def serialize(cls, data: Any, **kwargs: Any) -> str:
19
+ """
20
+ Serialize data with advanced type handling
21
+
22
+ Args:
23
+ data: Data to serialize
24
+ kwargs: Additional JSON dumps arguments
25
+
26
+ Returns:
27
+ JSON string representation
28
+ """
29
+ return json.dumps(
30
+ data, cls=TypePreservationEncoder, separators=(",", ":"), **kwargs
31
+ )
32
+
33
+ @classmethod
34
+ def deserialize(
35
+ cls,
36
+ json_str: str,
37
+ custom_decoders: Dict[str, Callable[[Any], Any]] | None = None,
38
+ **kwargs: Any,
39
+ ) -> Any:
40
+ """
41
+ Deserialize JSON string with advanced type reconstruction
42
+
43
+ Args:
44
+ json_str: JSON string to deserialize
45
+ custom_decoders: Optional additional custom decoders
46
+ kwargs: Additional JSON loads arguments
47
+
48
+ Returns:
49
+ Reconstructed object
50
+ """
51
+ return json.loads(
52
+ json_str,
53
+ object_hook=lambda dct: TypePreservationDecoder.decode(
54
+ dct, custom_decoders
55
+ ),
56
+ **kwargs,
57
+ )
@@ -0,0 +1,24 @@
1
+ from typing import OrderedDict, cast
2
+
3
+
4
+ class OrderedDictToDictConverter:
5
+ @staticmethod
6
+ def convert[K, V](ordered_dict: OrderedDict[K, V]) -> dict[K, V]:
7
+ """
8
+ Converts an OrderedDict to a regular dict in a recursive manner.
9
+
10
+
11
+ :param ordered_dict: The OrderedDict to convert
12
+ :return: A regular dict with the same key-value pairs
13
+ """
14
+
15
+ def _convert[T](value: T) -> T:
16
+ if isinstance(value, OrderedDict):
17
+ return cast(T, {k: _convert(v) for k, v in value.items()})
18
+ elif isinstance(value, dict):
19
+ return cast(T, {k: _convert(v) for k, v in value.items()})
20
+ elif isinstance(value, list):
21
+ return cast(T, [_convert(item) for item in value])
22
+ return value
23
+
24
+ return _convert(ordered_dict)
File without changes
@@ -0,0 +1,99 @@
1
+ import zlib
2
+ from typing import Union, Optional
3
+
4
+
5
+ class StringCompressor:
6
+ """
7
+ A utility class for compressing and decompressing strings using zlib.
8
+
9
+ Provides methods to compress strings to bytes and decompress bytes back to strings.
10
+ Uses UTF-8 encoding and zlib's best compression level.
11
+ """
12
+
13
+ @staticmethod
14
+ def compress(text: str, encoding: str = "utf-8") -> bytes:
15
+ """
16
+ Compress a given string to bytes using zlib.
17
+
18
+ Args:
19
+ text (str): The input string to compress
20
+ encoding (str, optional): The encoding to use. Defaults to 'utf-8'
21
+
22
+ Returns:
23
+ bytes: Compressed representation of the input string
24
+
25
+ Raises:
26
+ TypeError: If input is not a string
27
+ zlib.error: If compression fails
28
+ """
29
+ if not isinstance(text, str):
30
+ raise TypeError("Input must be a string")
31
+
32
+ try:
33
+ # Encode string to bytes, then compress with best compression
34
+ return zlib.compress(text.encode(encoding), level=zlib.Z_BEST_COMPRESSION)
35
+ except Exception as e:
36
+ raise zlib.error(f"Compression failed: {e}")
37
+
38
+ @staticmethod
39
+ def decompress(
40
+ compressed_data: Union[bytes, bytearray], encoding: str = "utf-8"
41
+ ) -> str:
42
+ """
43
+ Decompress bytes back to the original string.
44
+
45
+ Args:
46
+ compressed_data (Union[bytes, bytearray]): Compressed data to decompress
47
+ encoding (str, optional): The encoding to use. Defaults to 'utf-8'
48
+
49
+ Returns:
50
+ str: Decompressed original string
51
+
52
+ Raises:
53
+ TypeError: If input is not bytes or bytearray
54
+ zlib.error: If decompression fails
55
+ """
56
+ if not isinstance(compressed_data, (bytes, bytearray)):
57
+ raise TypeError("Input must be bytes or bytearray")
58
+
59
+ try:
60
+ # Decompress bytes, then decode to string
61
+ return zlib.decompress(compressed_data).decode(encoding)
62
+ except Exception as e:
63
+ raise zlib.error(f"Decompression failed: {e}")
64
+
65
+ @classmethod
66
+ def compress_safe(
67
+ cls, text: Optional[str], encoding: str = "utf-8"
68
+ ) -> Optional[bytes]:
69
+ """
70
+ Safely compress a string, handling None input.
71
+
72
+ Args:
73
+ text (Optional[str]): The input string to compress
74
+ encoding (str, optional): The encoding to use. Defaults to 'utf-8'
75
+
76
+ Returns:
77
+ Optional[bytes]: Compressed bytes or None if input is None
78
+ """
79
+ if text is None:
80
+ return None
81
+ return cls.compress(text, encoding)
82
+
83
+ @classmethod
84
+ def decompress_safe(
85
+ cls, compressed_data: Optional[Union[bytes, bytearray]], encoding: str = "utf-8"
86
+ ) -> Optional[str]:
87
+ """
88
+ Safely decompress bytes, handling None input.
89
+
90
+ Args:
91
+ compressed_data (Optional[Union[bytes, bytearray]]): Compressed data to decompress
92
+ encoding (str, optional): The encoding to use. Defaults to 'utf-8'
93
+
94
+ Returns:
95
+ Optional[str]: Decompressed string or None if input is None
96
+ """
97
+ if compressed_data is None:
98
+ return None
99
+ return cls.decompress(compressed_data, encoding)
@@ -0,0 +1,189 @@
1
+ # Basic Compression and Decompression Tests
2
+ from typing import Optional, Union
3
+
4
+ import pytest
5
+ import zlib
6
+
7
+ from compressedfhir.utilities.string_compressor.v1.string_compressor import (
8
+ StringCompressor,
9
+ )
10
+
11
+
12
+ @pytest.mark.parametrize(
13
+ "input_text",
14
+ [
15
+ "Hello, World!",
16
+ "Python is awesome",
17
+ "12345",
18
+ "", # Empty string
19
+ "🌍🚀", # Unicode characters
20
+ ],
21
+ )
22
+ def test_compress_decompress_basic(input_text: str) -> None:
23
+ """
24
+ Test basic compression and decompression functionality
25
+ """
26
+ # Compress
27
+ compressed = StringCompressor.compress(input_text)
28
+
29
+ # Verify compression reduces size
30
+ # assert len(compressed) < len(input_text.encode('utf-8'))
31
+
32
+ # Decompress
33
+ decompressed = StringCompressor.decompress(compressed)
34
+
35
+ # Verify original text is preserved
36
+ assert decompressed == input_text
37
+
38
+
39
+ # Error Handling Tests
40
+ def test_compress_invalid_input() -> None:
41
+ """
42
+ Test compression with invalid input type
43
+ """
44
+ with pytest.raises(TypeError, match="Input must be a string"):
45
+ StringCompressor.compress(123) # type:ignore[arg-type]
46
+
47
+ with pytest.raises(TypeError, match="Input must be a string"):
48
+ StringCompressor.compress(None) # type:ignore[arg-type]
49
+
50
+
51
+ def test_decompress_invalid_input() -> None:
52
+ """
53
+ Test decompression with invalid input type
54
+ """
55
+ with pytest.raises(TypeError, match="Input must be bytes or bytearray"):
56
+ StringCompressor.decompress("not bytes") # type:ignore[arg-type]
57
+
58
+ with pytest.raises(TypeError, match="Input must be bytes or bytearray"):
59
+ StringCompressor.decompress(123) # type:ignore[arg-type]
60
+
61
+
62
+ # Safe Method Tests
63
+ @pytest.mark.parametrize("input_text", ["Test string", None, ""])
64
+ def test_compress_safe(input_text: Optional[str]) -> None:
65
+ """
66
+ Test safe compression method
67
+ """
68
+ compressed = StringCompressor.compress_safe(input_text)
69
+
70
+ if input_text is None:
71
+ assert compressed is None
72
+ else:
73
+ assert isinstance(compressed, bytes)
74
+ # Verify we can decompress
75
+ decompressed = StringCompressor.decompress_safe(compressed)
76
+ assert decompressed == input_text
77
+
78
+
79
+ @pytest.mark.parametrize(
80
+ "input_data", [b"compressed data", None, bytearray(b"another compressed data")]
81
+ )
82
+ def test_decompress_safe(input_data: Optional[Union[bytes, bytearray]]) -> None:
83
+ """
84
+ Test safe decompression method
85
+ """
86
+ if input_data is None:
87
+ decompressed = StringCompressor.decompress_safe(input_data)
88
+ assert decompressed is None
89
+ else:
90
+ # First compress a string
91
+ original = "Test string to compress"
92
+ compressed = StringCompressor.compress(original)
93
+
94
+ # Then decompress
95
+ decompressed = StringCompressor.decompress_safe(compressed)
96
+ assert decompressed == original
97
+
98
+
99
+ # Encoding Tests
100
+ @pytest.mark.parametrize("encoding", ["utf-8", "ascii", "latin-1"])
101
+ def test_custom_encoding(encoding: str) -> None:
102
+ """
103
+ Test compression and decompression with different encodings
104
+ """
105
+ input_text = "Hello, World!"
106
+
107
+ # Compress with custom encoding
108
+ compressed = StringCompressor.compress(input_text, encoding=encoding)
109
+
110
+ # Decompress with same encoding
111
+ decompressed = StringCompressor.decompress(compressed, encoding=encoding)
112
+
113
+ assert decompressed == input_text
114
+
115
+
116
+ # Compression Efficiency Tests
117
+ def test_compression_efficiency() -> None:
118
+ """
119
+ Test that compression actually reduces data size
120
+ """
121
+ # Long repetitive string for better compression
122
+ input_text = "Hello " * 1000
123
+
124
+ # Compress
125
+ compressed = StringCompressor.compress(input_text)
126
+
127
+ # Check compression ratio
128
+ original_size = len(input_text.encode("utf-8"))
129
+ compressed_size = len(compressed)
130
+
131
+ # Verify significant size reduction
132
+ assert compressed_size < original_size
133
+
134
+ # Verify lossless decompression
135
+ decompressed = StringCompressor.decompress(compressed)
136
+ assert decompressed == input_text
137
+
138
+
139
+ # Edge Case Tests
140
+ def test_very_large_string() -> None:
141
+ """
142
+ Test compression and decompression of a very large string
143
+ """
144
+ # Generate a large string
145
+ large_text = "A" * (1024 * 1024) # 1MB of text
146
+
147
+ # Compress
148
+ compressed = StringCompressor.compress(large_text)
149
+
150
+ # Decompress
151
+ decompressed = StringCompressor.decompress(compressed)
152
+
153
+ assert decompressed == large_text
154
+
155
+
156
+ # Error Scenario Tests
157
+ def test_decompress_corrupted_data() -> None:
158
+ """
159
+ Test decompression of corrupted data
160
+ """
161
+ # Create some corrupted compressed data
162
+ with pytest.raises(zlib.error):
163
+ StringCompressor.decompress(b"corrupted data")
164
+
165
+
166
+ # Performance Benchmark (optional)
167
+ def test_compression_performance() -> None:
168
+ """
169
+ Basic performance test for compression and decompression
170
+ """
171
+ import timeit
172
+
173
+ # Test string
174
+ test_string = "Performance test " * 100
175
+
176
+ # Measure compression time
177
+ compression_time = timeit.timeit(
178
+ lambda: StringCompressor.compress(test_string), number=100
179
+ )
180
+
181
+ # Measure decompression time
182
+ compressed = StringCompressor.compress(test_string)
183
+ decompression_time = timeit.timeit(
184
+ lambda: StringCompressor.decompress(compressed), number=100
185
+ )
186
+
187
+ # Basic performance assertions (these can be adjusted)
188
+ assert compression_time < 1.0 # 100 compressions in less than 1 second
189
+ assert decompression_time < 1.0 # 100 decompressions in less than 1 second
File without changes