compressedfhir 1.0.3__tar.gz → 1.0.5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of compressedfhir might be problematic. Click here for more details.
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/Makefile +18 -0
- {compressedfhir-1.0.3/compressedfhir.egg-info → compressedfhir-1.0.5}/PKG-INFO +1 -1
- compressedfhir-1.0.5/VERSION +1 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/fhir/fhir_resource.py +13 -28
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/fhir/fhir_resource_map.py +1 -1
- compressedfhir-1.0.5/compressedfhir/fhir/test/test_fhir_resource.py +104 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/utilities/compressed_dict/v1/compressed_dict.py +34 -12
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/utilities/compressed_dict/v1/test/test_compressed_dict.py +111 -4
- compressedfhir-1.0.5/compressedfhir/utilities/json_serializers/test/test_type_preservation_decoder.py +150 -0
- compressedfhir-1.0.5/compressedfhir/utilities/json_serializers/test/test_type_preservation_serializer.py +171 -0
- compressedfhir-1.0.5/compressedfhir/utilities/json_serializers/type_preservation_decoder.py +110 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/utilities/json_serializers/type_preservation_serializer.py +3 -1
- {compressedfhir-1.0.3 → compressedfhir-1.0.5/compressedfhir.egg-info}/PKG-INFO +1 -1
- compressedfhir-1.0.3/VERSION +0 -1
- compressedfhir-1.0.3/compressedfhir/fhir/test/test_fhir_resource.py +0 -225
- compressedfhir-1.0.3/compressedfhir/utilities/json_serializers/test/test_type_preservation_decoder.py +0 -82
- compressedfhir-1.0.3/compressedfhir/utilities/json_serializers/test/test_type_preservation_serializer.py +0 -60
- compressedfhir-1.0.3/compressedfhir/utilities/json_serializers/type_preservation_decoder.py +0 -63
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/LICENSE +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/MANIFEST.in +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/README.md +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/__init__.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/fhir/__init__.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/fhir/base_resource_list.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/fhir/fhir_bundle.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/fhir/fhir_bundle_entry.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/fhir/fhir_bundle_entry_list.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/fhir/fhir_bundle_entry_request.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/fhir/fhir_bundle_entry_response.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/fhir/fhir_bundle_entry_search.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/fhir/fhir_identifier.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/fhir/fhir_link.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/fhir/fhir_meta.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/fhir/fhir_resource_list.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/fhir/test/__init__.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/fhir/test/test_bundle_entry.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/fhir/test/test_bundle_entry_list.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/fhir/test/test_bundle_entry_request.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/fhir/test/test_bundle_entry_response.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/fhir/test/test_fhir_bundle.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/fhir/test/test_fhir_resource_list.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/fhir/test/test_fhir_resource_map.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/py.typed +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/utilities/__init__.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/utilities/compressed_dict/__init__.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/utilities/compressed_dict/v1/__init__.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/utilities/compressed_dict/v1/compressed_dict_access_error.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/utilities/compressed_dict/v1/compressed_dict_storage_mode.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/utilities/compressed_dict/v1/test/__init__.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/utilities/fhir_json_encoder.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/utilities/json_helpers.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/utilities/json_serializers/__init__.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/utilities/json_serializers/test/__init__.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/utilities/json_serializers/test/test_type_preservation_encoder.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/utilities/json_serializers/type_preservation_encoder.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/utilities/ordered_dict_to_dict_converter/__init__.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/utilities/ordered_dict_to_dict_converter/ordered_dict_to_dict_converter.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/utilities/string_compressor/__init__.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/utilities/string_compressor/v1/__init__.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/utilities/string_compressor/v1/string_compressor.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/utilities/string_compressor/v1/test/__init__.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/utilities/string_compressor/v1/test/test_string_compressor.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/utilities/test/__init__.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/utilities/test/test_fhir_json_encoder.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir/utilities/test/test_json_helpers.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir.egg-info/SOURCES.txt +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir.egg-info/dependency_links.txt +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir.egg-info/not-zip-safe +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir.egg-info/requires.txt +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/compressedfhir.egg-info/top_level.txt +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/setup.cfg +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/setup.py +0 -0
- {compressedfhir-1.0.3 → compressedfhir-1.0.5}/tests/__init__.py +0 -0
|
@@ -73,3 +73,21 @@ help: ## Show this help.
|
|
|
73
73
|
pipenv-setup:devdocker ## Run pipenv-setup to update setup.py with latest dependencies
|
|
74
74
|
docker compose run --rm dev sh -c "pipenv run pipenv install --skip-lock --categories \"pipenvsetup\" && pipenv run pipenv-setup sync --pipfile" && \
|
|
75
75
|
make run-pre-commit
|
|
76
|
+
|
|
77
|
+
.PHONY: coverage
|
|
78
|
+
coverage: up ## Run code coverage and generate reports
|
|
79
|
+
mkdir -p reports
|
|
80
|
+
docker compose run --rm \
|
|
81
|
+
-v $(PWD)/reports:/reports \
|
|
82
|
+
--name compressedfhir_coverage \
|
|
83
|
+
dev pytest \
|
|
84
|
+
--cov=. \
|
|
85
|
+
--cov-report=term-missing \
|
|
86
|
+
--cov-report=xml:/reports/coverage.xml \
|
|
87
|
+
--cov-report=html:/reports/htmlcov \
|
|
88
|
+
--cov-fail-under=80 \
|
|
89
|
+
tests compressedfhir
|
|
90
|
+
|
|
91
|
+
.PHONY: clean-coverage
|
|
92
|
+
clean-coverage: ## Remove coverage reports
|
|
93
|
+
rm -rf reports/coverage.xml reports/htmlcov .coverage
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
1.0.5
|
|
@@ -61,14 +61,10 @@ class FhirResource(CompressedDict[str, Any]):
|
|
|
61
61
|
else None
|
|
62
62
|
)
|
|
63
63
|
|
|
64
|
-
def json(self) -> str:
|
|
65
|
-
"""Convert the resource to a JSON string."""
|
|
66
|
-
return json.dumps(obj=self.dict(), cls=FhirJSONEncoder)
|
|
67
|
-
|
|
68
64
|
def __deepcopy__(self, memo: Dict[int, Any]) -> "FhirResource":
|
|
69
65
|
"""Create a copy of the resource."""
|
|
70
66
|
return FhirResource(
|
|
71
|
-
initial_dict=super().
|
|
67
|
+
initial_dict=super().raw_dict(),
|
|
72
68
|
storage_mode=self._storage_mode,
|
|
73
69
|
)
|
|
74
70
|
|
|
@@ -84,29 +80,6 @@ class FhirResource(CompressedDict[str, Any]):
|
|
|
84
80
|
"""
|
|
85
81
|
return copy.deepcopy(self)
|
|
86
82
|
|
|
87
|
-
@override
|
|
88
|
-
def dict(self, *, remove_nulls: bool = True) -> OrderedDict[str, Any]:
|
|
89
|
-
"""
|
|
90
|
-
Converts the FhirResource object to a dictionary.
|
|
91
|
-
|
|
92
|
-
:param remove_nulls: If True, removes None values from the dictionary.
|
|
93
|
-
:return: A dictionary representation of the FhirResource object.
|
|
94
|
-
"""
|
|
95
|
-
ordered_dict = super().dict()
|
|
96
|
-
result: OrderedDict[str, Any] = copy.deepcopy(ordered_dict)
|
|
97
|
-
if remove_nulls:
|
|
98
|
-
result = FhirClientJsonHelpers.remove_empty_elements_from_ordered_dict(
|
|
99
|
-
result
|
|
100
|
-
)
|
|
101
|
-
|
|
102
|
-
return result
|
|
103
|
-
|
|
104
|
-
def remove_nulls(self) -> None:
|
|
105
|
-
"""
|
|
106
|
-
Removes None values from the resource dictionary.
|
|
107
|
-
"""
|
|
108
|
-
self.replace(value=self.dict(remove_nulls=True))
|
|
109
|
-
|
|
110
83
|
@property
|
|
111
84
|
def id(self) -> Optional[str]:
|
|
112
85
|
"""Get the ID from the resource dictionary."""
|
|
@@ -171,3 +144,15 @@ class FhirResource(CompressedDict[str, Any]):
|
|
|
171
144
|
properties_to_cache=properties_to_cache,
|
|
172
145
|
),
|
|
173
146
|
)
|
|
147
|
+
|
|
148
|
+
@override
|
|
149
|
+
def json(self) -> str:
|
|
150
|
+
"""Convert the resource to a JSON string."""
|
|
151
|
+
|
|
152
|
+
# working_dict preserves the python types so create a fhir friendly version
|
|
153
|
+
raw_dict: OrderedDict[str, Any] = self.raw_dict()
|
|
154
|
+
|
|
155
|
+
raw_dict = FhirClientJsonHelpers.remove_empty_elements_from_ordered_dict(
|
|
156
|
+
raw_dict
|
|
157
|
+
)
|
|
158
|
+
return json.dumps(obj=raw_dict, cls=FhirJSONEncoder)
|
|
@@ -43,7 +43,7 @@ class FhirResourceMap:
|
|
|
43
43
|
"""
|
|
44
44
|
result: OrderedDict[str, Any] = OrderedDict[str, Any]()
|
|
45
45
|
for key, value in self._resource_map.items():
|
|
46
|
-
result[key] = [resource.dict(
|
|
46
|
+
result[key] = [resource.dict() for resource in value]
|
|
47
47
|
return result
|
|
48
48
|
|
|
49
49
|
def get(self, *, resource_type: str) -> Optional[FhirResourceList]:
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from typing import Dict, Any
|
|
3
|
+
|
|
4
|
+
from compressedfhir.fhir.fhir_resource import FhirResource
|
|
5
|
+
from compressedfhir.utilities.compressed_dict.v1.compressed_dict_storage_mode import (
|
|
6
|
+
CompressedDictStorageMode,
|
|
7
|
+
)
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class TestFhirResource:
|
|
11
|
+
def test_init_empty(self) -> None:
|
|
12
|
+
"""Test initializing FhirResource with no initial dictionary."""
|
|
13
|
+
resource = FhirResource(storage_mode=CompressedDictStorageMode())
|
|
14
|
+
assert len(resource) == 0
|
|
15
|
+
assert resource.resource_type is None
|
|
16
|
+
assert resource.id is None
|
|
17
|
+
assert resource.resource_type_and_id is None
|
|
18
|
+
|
|
19
|
+
def test_init_with_data(self) -> None:
|
|
20
|
+
"""Test initializing FhirResource with a dictionary."""
|
|
21
|
+
initial_data: Dict[str, Any] = {
|
|
22
|
+
"resourceType": "Patient",
|
|
23
|
+
"id": "123",
|
|
24
|
+
"name": [{"given": ["John"]}],
|
|
25
|
+
}
|
|
26
|
+
resource = FhirResource(
|
|
27
|
+
initial_dict=initial_data, storage_mode=CompressedDictStorageMode()
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
with resource.transaction():
|
|
31
|
+
assert resource.resource_type == "Patient"
|
|
32
|
+
assert resource.id == "123"
|
|
33
|
+
assert resource.resource_type_and_id == "Patient/123"
|
|
34
|
+
assert resource["name"][0]["given"][0] == "John"
|
|
35
|
+
|
|
36
|
+
def test_resource_type_and_id_property(self) -> None:
|
|
37
|
+
"""Test resource_type_and_id property with various scenarios."""
|
|
38
|
+
# Scenario 1: Both resource type and id present
|
|
39
|
+
resource1 = FhirResource(
|
|
40
|
+
initial_dict={"resourceType": "Observation", "id": "456"},
|
|
41
|
+
storage_mode=CompressedDictStorageMode(),
|
|
42
|
+
)
|
|
43
|
+
assert resource1.resource_type_and_id == "Observation/456"
|
|
44
|
+
|
|
45
|
+
# Scenario 2: Missing resource type
|
|
46
|
+
resource2 = FhirResource(
|
|
47
|
+
initial_dict={"id": "789"}, storage_mode=CompressedDictStorageMode()
|
|
48
|
+
)
|
|
49
|
+
assert resource2.resource_type_and_id is None
|
|
50
|
+
|
|
51
|
+
# Scenario 3: Missing id
|
|
52
|
+
resource3 = FhirResource(
|
|
53
|
+
initial_dict={"resourceType": "Patient"},
|
|
54
|
+
storage_mode=CompressedDictStorageMode(),
|
|
55
|
+
)
|
|
56
|
+
assert resource3.resource_type_and_id is None
|
|
57
|
+
|
|
58
|
+
def test_equality(self) -> None:
|
|
59
|
+
"""Test equality comparison between FhirResource instances."""
|
|
60
|
+
# Scenario 1: Equal resources
|
|
61
|
+
resource1 = FhirResource(
|
|
62
|
+
initial_dict={"resourceType": "Patient", "id": "123"},
|
|
63
|
+
storage_mode=CompressedDictStorageMode(),
|
|
64
|
+
)
|
|
65
|
+
resource2 = FhirResource(
|
|
66
|
+
initial_dict={"resourceType": "Patient", "id": "123"},
|
|
67
|
+
storage_mode=CompressedDictStorageMode(),
|
|
68
|
+
)
|
|
69
|
+
assert resource1 == resource2
|
|
70
|
+
|
|
71
|
+
# Scenario 2: Different resource types
|
|
72
|
+
resource3 = FhirResource(
|
|
73
|
+
initial_dict={"resourceType": "Observation", "id": "123"},
|
|
74
|
+
storage_mode=CompressedDictStorageMode(),
|
|
75
|
+
)
|
|
76
|
+
assert resource1 != resource3
|
|
77
|
+
|
|
78
|
+
# Scenario 3: Different ids
|
|
79
|
+
resource4 = FhirResource(
|
|
80
|
+
initial_dict={"resourceType": "Patient", "id": "456"},
|
|
81
|
+
storage_mode=CompressedDictStorageMode(),
|
|
82
|
+
)
|
|
83
|
+
assert resource1 != resource4
|
|
84
|
+
|
|
85
|
+
# Scenario 4: Comparing with non-FhirResource
|
|
86
|
+
assert resource1 != "Not a FhirResource"
|
|
87
|
+
|
|
88
|
+
def test_to_json(self) -> None:
|
|
89
|
+
"""Test JSON serialization of FhirResource."""
|
|
90
|
+
initial_data: Dict[str, Any] = {
|
|
91
|
+
"resourceType": "Patient",
|
|
92
|
+
"id": "123",
|
|
93
|
+
"name": [{"given": ["John"]}],
|
|
94
|
+
}
|
|
95
|
+
resource = FhirResource(
|
|
96
|
+
initial_dict=initial_data, storage_mode=CompressedDictStorageMode()
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
json_str = resource.json()
|
|
100
|
+
parsed_json = json.loads(json_str)
|
|
101
|
+
|
|
102
|
+
assert parsed_json == initial_data
|
|
103
|
+
assert "resourceType" in parsed_json
|
|
104
|
+
assert "id" in parsed_json
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import copy
|
|
2
|
+
import json
|
|
2
3
|
from collections.abc import KeysView, ValuesView, ItemsView, MutableMapping
|
|
3
4
|
from contextlib import contextmanager
|
|
4
5
|
from typing import Dict, Optional, Iterator, cast, List, Any, overload, OrderedDict
|
|
@@ -13,6 +14,7 @@ from compressedfhir.utilities.compressed_dict.v1.compressed_dict_storage_mode im
|
|
|
13
14
|
CompressedDictStorageMode,
|
|
14
15
|
CompressedDictStorageType,
|
|
15
16
|
)
|
|
17
|
+
from compressedfhir.utilities.fhir_json_encoder import FhirJSONEncoder
|
|
16
18
|
from compressedfhir.utilities.json_serializers.type_preservation_serializer import (
|
|
17
19
|
TypePreservationSerializer,
|
|
18
20
|
)
|
|
@@ -176,9 +178,7 @@ class CompressedDict[K, V](MutableMapping[K, V]):
|
|
|
176
178
|
assert isinstance(dictionary, OrderedDict)
|
|
177
179
|
if storage_type == "compressed":
|
|
178
180
|
# Serialize to JSON and compress with zlib
|
|
179
|
-
json_str = TypePreservationSerializer.serialize(
|
|
180
|
-
dictionary, separators=(",", ":")
|
|
181
|
-
)
|
|
181
|
+
json_str = TypePreservationSerializer.serialize(dictionary)
|
|
182
182
|
return zlib.compress(
|
|
183
183
|
json_str.encode("utf-8"), level=zlib.Z_BEST_COMPRESSION
|
|
184
184
|
)
|
|
@@ -219,9 +219,7 @@ class CompressedDict[K, V](MutableMapping[K, V]):
|
|
|
219
219
|
decompressed_bytes: bytes = zlib.decompress(serialized_dict_bytes)
|
|
220
220
|
decoded_text: str = decompressed_bytes.decode("utf-8")
|
|
221
221
|
# noinspection PyTypeChecker
|
|
222
|
-
decompressed_dict = TypePreservationSerializer.deserialize(
|
|
223
|
-
decoded_text, object_pairs_hook=OrderedDict
|
|
224
|
-
)
|
|
222
|
+
decompressed_dict = TypePreservationSerializer.deserialize(decoded_text)
|
|
225
223
|
assert isinstance(decompressed_dict, OrderedDict)
|
|
226
224
|
return cast(OrderedDict[K, V], decompressed_dict)
|
|
227
225
|
|
|
@@ -427,19 +425,43 @@ class CompressedDict[K, V](MutableMapping[K, V]):
|
|
|
427
425
|
"""
|
|
428
426
|
return self._get_dict().items()
|
|
429
427
|
|
|
430
|
-
def
|
|
428
|
+
def raw_dict(self) -> OrderedDict[K, V]:
|
|
431
429
|
"""
|
|
432
|
-
|
|
430
|
+
Returns the raw dictionary. Deserializes if necessary.
|
|
431
|
+
Note that this dictionary preserves the python types so it is not FHIR friendly.
|
|
432
|
+
Use dict() if you want a FHIR friendly version.
|
|
433
433
|
|
|
434
434
|
Returns:
|
|
435
|
-
|
|
435
|
+
raw dictionary
|
|
436
436
|
"""
|
|
437
437
|
if self._working_dict:
|
|
438
438
|
return self._working_dict
|
|
439
439
|
else:
|
|
440
|
-
# if the working dict is None, return it but don't store it in the self._working_dict to keep memory low
|
|
440
|
+
# if the working dict is not None, return it but don't store it in the self._working_dict to keep memory low
|
|
441
441
|
return self.create_working_dict()
|
|
442
442
|
|
|
443
|
+
def dict(self) -> OrderedDict[K, V]:
|
|
444
|
+
"""
|
|
445
|
+
Convert to a FHIR friendly dictionary where the python types like datetime are converted to string versions
|
|
446
|
+
|
|
447
|
+
Returns:
|
|
448
|
+
FHIR friendly dictionary
|
|
449
|
+
"""
|
|
450
|
+
return cast(
|
|
451
|
+
OrderedDict[K, V],
|
|
452
|
+
json.loads(
|
|
453
|
+
self.json(),
|
|
454
|
+
object_pairs_hook=lambda pairs: OrderedDict(pairs),
|
|
455
|
+
),
|
|
456
|
+
)
|
|
457
|
+
|
|
458
|
+
def json(self) -> str:
|
|
459
|
+
"""Convert the resource to a JSON string."""
|
|
460
|
+
|
|
461
|
+
raw_dict: OrderedDict[K, V] = self.raw_dict()
|
|
462
|
+
|
|
463
|
+
return json.dumps(obj=raw_dict, cls=FhirJSONEncoder)
|
|
464
|
+
|
|
443
465
|
def __repr__(self) -> str:
|
|
444
466
|
"""
|
|
445
467
|
String representation of the dictionary
|
|
@@ -563,7 +585,7 @@ class CompressedDict[K, V](MutableMapping[K, V]):
|
|
|
563
585
|
"""
|
|
564
586
|
# Create a new instance with the same storage mode
|
|
565
587
|
new_instance = CompressedDict(
|
|
566
|
-
initial_dict=copy.deepcopy(self.
|
|
588
|
+
initial_dict=copy.deepcopy(self.raw_dict()),
|
|
567
589
|
storage_mode=self._storage_mode,
|
|
568
590
|
properties_to_cache=self._properties_to_cache,
|
|
569
591
|
)
|
|
@@ -637,7 +659,7 @@ class CompressedDict[K, V](MutableMapping[K, V]):
|
|
|
637
659
|
Returns:
|
|
638
660
|
Plain dictionary
|
|
639
661
|
"""
|
|
640
|
-
return OrderedDictToDictConverter.convert(self.
|
|
662
|
+
return OrderedDictToDictConverter.convert(self.raw_dict())
|
|
641
663
|
|
|
642
664
|
@classmethod
|
|
643
665
|
def from_json(cls, json_str: str) -> "CompressedDict[K, V]":
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
|
|
1
3
|
import pytest
|
|
2
4
|
from typing import Any, cast
|
|
3
5
|
|
|
@@ -232,7 +234,7 @@ def test_transaction_basic_raw_storage() -> None:
|
|
|
232
234
|
|
|
233
235
|
# After transaction
|
|
234
236
|
assert compressed_dict._transaction_depth == 0
|
|
235
|
-
assert compressed_dict.
|
|
237
|
+
assert compressed_dict.raw_dict() == {
|
|
236
238
|
"key1": "value1",
|
|
237
239
|
"key2": "value2",
|
|
238
240
|
"key3": "value3",
|
|
@@ -260,7 +262,7 @@ def test_transaction_nested_context() -> None:
|
|
|
260
262
|
assert compressed_dict._transaction_depth == 1
|
|
261
263
|
|
|
262
264
|
assert compressed_dict._transaction_depth == 0
|
|
263
|
-
assert compressed_dict.
|
|
265
|
+
assert compressed_dict.raw_dict() == {"key1": "value1", "key2": "value2"}
|
|
264
266
|
|
|
265
267
|
|
|
266
268
|
def test_transaction_access_error() -> None:
|
|
@@ -309,7 +311,7 @@ def test_transaction_different_storage_modes() -> None:
|
|
|
309
311
|
with compressed_dict.transaction() as d:
|
|
310
312
|
d["key2"] = "value2"
|
|
311
313
|
|
|
312
|
-
assert compressed_dict.
|
|
314
|
+
assert compressed_dict.raw_dict() == {"key1": "value1", "key2": "value2"}
|
|
313
315
|
|
|
314
316
|
|
|
315
317
|
def test_transaction_with_properties_to_cache() -> None:
|
|
@@ -328,7 +330,7 @@ def test_transaction_with_properties_to_cache() -> None:
|
|
|
328
330
|
with compressed_dict.transaction() as d:
|
|
329
331
|
d["key2"] = "value2"
|
|
330
332
|
|
|
331
|
-
assert compressed_dict.
|
|
333
|
+
assert compressed_dict.raw_dict() == {
|
|
332
334
|
"key1": "value1",
|
|
333
335
|
"important_prop": "cached_value",
|
|
334
336
|
"key2": "value2",
|
|
@@ -358,3 +360,108 @@ def test_transaction_error_handling() -> None:
|
|
|
358
360
|
# Verify the dictionary state remains unchanged
|
|
359
361
|
with compressed_dict.transaction() as d:
|
|
360
362
|
assert d.dict() == {"key1": "value1", "key2": "value2"}
|
|
363
|
+
|
|
364
|
+
|
|
365
|
+
def test_nested_dict_with_datetime() -> None:
|
|
366
|
+
nested_dict = {
|
|
367
|
+
"beneficiary": {"reference": "Patient/1234567890123456703", "type": "Patient"},
|
|
368
|
+
"class": [
|
|
369
|
+
{
|
|
370
|
+
"name": "Aetna Plan",
|
|
371
|
+
"type": {
|
|
372
|
+
"coding": [
|
|
373
|
+
{
|
|
374
|
+
"code": "plan",
|
|
375
|
+
"display": "Plan",
|
|
376
|
+
"system": "http://terminology.hl7.org/CodeSystem/coverage-class",
|
|
377
|
+
}
|
|
378
|
+
]
|
|
379
|
+
},
|
|
380
|
+
"value": "AE303",
|
|
381
|
+
}
|
|
382
|
+
],
|
|
383
|
+
"costToBeneficiary": [
|
|
384
|
+
{
|
|
385
|
+
"type": {"text": "Annual Physical Exams NMC - In Network"},
|
|
386
|
+
"valueQuantity": {
|
|
387
|
+
"system": "http://aetna.com/Medicare/CostToBeneficiary/ValueQuantity/code",
|
|
388
|
+
"unit": "$",
|
|
389
|
+
"value": 50.0,
|
|
390
|
+
},
|
|
391
|
+
}
|
|
392
|
+
],
|
|
393
|
+
"id": "3456789012345670304",
|
|
394
|
+
"identifier": [
|
|
395
|
+
{
|
|
396
|
+
"system": "https://sources.aetna.com/coverage/identifier/membershipid/59",
|
|
397
|
+
"type": {
|
|
398
|
+
"coding": [
|
|
399
|
+
{
|
|
400
|
+
"code": "SN",
|
|
401
|
+
"system": "http://terminology.hl7.org/CodeSystem/v2-0203",
|
|
402
|
+
}
|
|
403
|
+
]
|
|
404
|
+
},
|
|
405
|
+
"value": "435679010300+AE303+2021-01-01",
|
|
406
|
+
},
|
|
407
|
+
{
|
|
408
|
+
"id": "uuid",
|
|
409
|
+
"system": "https://www.icanbwell.com/uuid",
|
|
410
|
+
"value": "92266603-aa8b-58c6-99bd-326fd1da1896",
|
|
411
|
+
},
|
|
412
|
+
],
|
|
413
|
+
"meta": {
|
|
414
|
+
"security": [
|
|
415
|
+
{"code": "aetna", "system": "https://www.icanbwell.com/owner"},
|
|
416
|
+
{"code": "aetna", "system": "https://www.icanbwell.com/access"},
|
|
417
|
+
{"code": "aetna", "system": "https://www.icanbwell.com/vendor"},
|
|
418
|
+
{"code": "proa", "system": "https://www.icanbwell.com/connectionType"},
|
|
419
|
+
],
|
|
420
|
+
"source": "http://mock-server:1080/test_patient_access_transformer/source/4_0_0/Coverage/3456789012345670304",
|
|
421
|
+
},
|
|
422
|
+
"network": "Medicare - MA/NY/NJ - Full Reciprocity",
|
|
423
|
+
"payor": [
|
|
424
|
+
{
|
|
425
|
+
"display": "Aetna",
|
|
426
|
+
"reference": "Organization/6667778889990000015",
|
|
427
|
+
"type": "Organization",
|
|
428
|
+
}
|
|
429
|
+
],
|
|
430
|
+
"period": {
|
|
431
|
+
"end": datetime.fromisoformat("2021-12-31").date(),
|
|
432
|
+
"start": datetime.fromisoformat("2021-01-01").date(),
|
|
433
|
+
},
|
|
434
|
+
"policyHolder": {"reference": "Patient/1234567890123456703", "type": "Patient"},
|
|
435
|
+
"relationship": {
|
|
436
|
+
"coding": [
|
|
437
|
+
{
|
|
438
|
+
"code": "self",
|
|
439
|
+
"system": "http://terminology.hl7.org/CodeSystem/subscriber-relationship",
|
|
440
|
+
}
|
|
441
|
+
]
|
|
442
|
+
},
|
|
443
|
+
"resourceType": "Coverage",
|
|
444
|
+
"status": "active",
|
|
445
|
+
"subscriber": {"reference": "Patient/1234567890123456703", "type": "Patient"},
|
|
446
|
+
"subscriberId": "435679010300",
|
|
447
|
+
"type": {
|
|
448
|
+
"coding": [
|
|
449
|
+
{
|
|
450
|
+
"code": "PPO",
|
|
451
|
+
"display": "preferred provider organization policy",
|
|
452
|
+
"system": "http://terminology.hl7.org/CodeSystem/v3-ActCode",
|
|
453
|
+
}
|
|
454
|
+
]
|
|
455
|
+
},
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
compressed_dict = CompressedDict(
|
|
459
|
+
initial_dict=nested_dict,
|
|
460
|
+
storage_mode=CompressedDictStorageMode.compressed(),
|
|
461
|
+
properties_to_cache=[],
|
|
462
|
+
)
|
|
463
|
+
|
|
464
|
+
plain_dict = compressed_dict.to_plain_dict()
|
|
465
|
+
|
|
466
|
+
assert plain_dict["period"]["start"] == nested_dict["period"]["start"] # type: ignore[index]
|
|
467
|
+
assert plain_dict == nested_dict
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
from datetime import datetime, date
|
|
2
|
+
from decimal import Decimal
|
|
3
|
+
from typing import Type, Any, Dict, Optional
|
|
4
|
+
import pytest
|
|
5
|
+
|
|
6
|
+
from compressedfhir.utilities.json_serializers.type_preservation_decoder import (
|
|
7
|
+
TypePreservationDecoder,
|
|
8
|
+
)
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class TestCustomObject:
|
|
12
|
+
def __init__(
|
|
13
|
+
self,
|
|
14
|
+
name: str,
|
|
15
|
+
value: int,
|
|
16
|
+
created_at: Optional[datetime] = None,
|
|
17
|
+
nested_data: Optional[Dict[str, Any]] = None,
|
|
18
|
+
):
|
|
19
|
+
self.name: str = name
|
|
20
|
+
self.value: int = value
|
|
21
|
+
self.created_at: Optional[datetime] = created_at
|
|
22
|
+
self.nested_data: Optional[Dict[str, Any]] = nested_data
|
|
23
|
+
|
|
24
|
+
def __eq__(self, other: Any) -> bool:
|
|
25
|
+
if not isinstance(other, TestCustomObject):
|
|
26
|
+
return False
|
|
27
|
+
return (
|
|
28
|
+
self.name == other.name
|
|
29
|
+
and self.value == other.value
|
|
30
|
+
and self.created_at == other.created_at
|
|
31
|
+
and self.nested_data == other.nested_data
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
@pytest.mark.parametrize(
|
|
36
|
+
"input_type, input_dict, expected_type",
|
|
37
|
+
[
|
|
38
|
+
(
|
|
39
|
+
"datetime",
|
|
40
|
+
{"__type__": "datetime", "iso": "2023-01-01T00:00:00+00:00"},
|
|
41
|
+
datetime,
|
|
42
|
+
),
|
|
43
|
+
("date", {"__type__": "date", "iso": "2023-01-01"}, date),
|
|
44
|
+
("decimal", {"__type__": "decimal", "value": "3.14"}, Decimal),
|
|
45
|
+
("complex", {"__type__": "complex", "real": 3, "imag": 4}, complex),
|
|
46
|
+
("bytes", {"__type__": "bytes", "value": "test"}, bytes),
|
|
47
|
+
("set", {"__type__": "set", "values": [1, 2, 3]}, set),
|
|
48
|
+
],
|
|
49
|
+
)
|
|
50
|
+
def test_complex_type_decoding(
|
|
51
|
+
input_type: str, input_dict: Dict[str, Any], expected_type: Type[Any]
|
|
52
|
+
) -> None:
|
|
53
|
+
"""
|
|
54
|
+
Test decoding of various complex types
|
|
55
|
+
"""
|
|
56
|
+
decoded = TypePreservationDecoder.decode(input_dict)
|
|
57
|
+
assert isinstance(decoded, expected_type)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def test_custom_object_decoding() -> None:
|
|
61
|
+
"""
|
|
62
|
+
Test decoding of custom objects
|
|
63
|
+
"""
|
|
64
|
+
custom_obj_dict = {
|
|
65
|
+
"__type__": "TestCustomObject",
|
|
66
|
+
"__module__": __name__,
|
|
67
|
+
"attributes": {"name": "test", "value": 42},
|
|
68
|
+
}
|
|
69
|
+
decoded = TypePreservationDecoder.decode(custom_obj_dict)
|
|
70
|
+
assert isinstance(decoded, TestCustomObject)
|
|
71
|
+
assert decoded.name == "test"
|
|
72
|
+
assert decoded.value == 42
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def test_custom_decoder() -> None:
|
|
76
|
+
"""
|
|
77
|
+
Test custom decoder functionality
|
|
78
|
+
"""
|
|
79
|
+
|
|
80
|
+
def custom_decoder(data: Dict[str, Any]) -> Any:
|
|
81
|
+
if data.get("__type__") == "special_type":
|
|
82
|
+
return f"Decoded: {data['value']}"
|
|
83
|
+
return data
|
|
84
|
+
|
|
85
|
+
special_dict = {"__type__": "special_type", "value": "test"}
|
|
86
|
+
decoded = TypePreservationDecoder.decode(
|
|
87
|
+
special_dict, custom_decoders={"special_type": custom_decoder}
|
|
88
|
+
)
|
|
89
|
+
assert decoded == "Decoded: test"
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def test_nested_datetime_decoding() -> None:
|
|
93
|
+
"""
|
|
94
|
+
Test decoding of nested datetime fields
|
|
95
|
+
"""
|
|
96
|
+
nested_datetime_dict = {
|
|
97
|
+
"__type__": "TestCustomObject",
|
|
98
|
+
"__module__": __name__,
|
|
99
|
+
"attributes": {
|
|
100
|
+
"name": "test",
|
|
101
|
+
"value": 42,
|
|
102
|
+
"created_at": {"__type__": "datetime", "iso": "2023-06-15T10:30:00"},
|
|
103
|
+
"nested_data": {
|
|
104
|
+
"timestamp": {"__type__": "datetime", "iso": "2023-06-16T15:45:00"}
|
|
105
|
+
},
|
|
106
|
+
},
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
decoded: TestCustomObject = TypePreservationDecoder.decode(nested_datetime_dict)
|
|
110
|
+
|
|
111
|
+
assert isinstance(decoded, TestCustomObject)
|
|
112
|
+
assert decoded.name == "test"
|
|
113
|
+
assert decoded.value == 42
|
|
114
|
+
|
|
115
|
+
# Check nested datetime fields
|
|
116
|
+
assert hasattr(decoded, "created_at")
|
|
117
|
+
assert isinstance(decoded.created_at, datetime)
|
|
118
|
+
assert decoded.created_at.year == 2023
|
|
119
|
+
assert decoded.created_at.month == 6
|
|
120
|
+
assert decoded.created_at.day == 15
|
|
121
|
+
|
|
122
|
+
assert hasattr(decoded, "nested_data")
|
|
123
|
+
assert isinstance(decoded.nested_data, dict)
|
|
124
|
+
assert "timestamp" in decoded.nested_data
|
|
125
|
+
assert isinstance(decoded.nested_data["timestamp"], datetime)
|
|
126
|
+
assert decoded.nested_data["timestamp"].year == 2023
|
|
127
|
+
assert decoded.nested_data["timestamp"].month == 6
|
|
128
|
+
assert decoded.nested_data["timestamp"].day == 16
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def test_direct_value_decoding() -> None:
|
|
132
|
+
"""
|
|
133
|
+
Test decoding of direct values without type markers
|
|
134
|
+
"""
|
|
135
|
+
# Test datetime direct string
|
|
136
|
+
datetime_str = "2023-01-01T00:00:00"
|
|
137
|
+
decoded_datetime = TypePreservationDecoder.decode(datetime_str)
|
|
138
|
+
assert decoded_datetime == datetime_str
|
|
139
|
+
|
|
140
|
+
# Test list with mixed types
|
|
141
|
+
mixed_list = [
|
|
142
|
+
{"__type__": "datetime", "iso": "2023-06-15T10:30:00"},
|
|
143
|
+
42,
|
|
144
|
+
"plain string",
|
|
145
|
+
]
|
|
146
|
+
decoded_list = TypePreservationDecoder.decode(mixed_list)
|
|
147
|
+
assert len(decoded_list) == 3
|
|
148
|
+
assert isinstance(decoded_list[0], datetime)
|
|
149
|
+
assert decoded_list[1] == 42
|
|
150
|
+
assert decoded_list[2] == "plain string"
|