helix.fhir.client.sdk 4.2.8__py3-none-any.whl → 4.2.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- helix_fhir_client_sdk/fhir_client.py +39 -11
- helix_fhir_client_sdk/fhir_merge_resources_mixin.py +193 -3
- helix_fhir_client_sdk/responses/merge/fhir_merge_resource_response_entry.py +30 -0
- helix_fhir_client_sdk/utilities/retryable_aiohttp_client.py +1 -2
- {helix_fhir_client_sdk-4.2.8.dist-info → helix_fhir_client_sdk-4.2.9.dist-info}/METADATA +47 -2
- {helix_fhir_client_sdk-4.2.8.dist-info → helix_fhir_client_sdk-4.2.9.dist-info}/RECORD +11 -9
- tests/async/test_benchmark_compress.py +448 -0
- tests/test_fhir_client_clone.py +78 -0
- {helix_fhir_client_sdk-4.2.8.dist-info → helix_fhir_client_sdk-4.2.9.dist-info}/WHEEL +0 -0
- {helix_fhir_client_sdk-4.2.8.dist-info → helix_fhir_client_sdk-4.2.9.dist-info}/licenses/LICENSE +0 -0
- {helix_fhir_client_sdk-4.2.8.dist-info → helix_fhir_client_sdk-4.2.9.dist-info}/top_level.txt +0 -0
|
@@ -150,9 +150,10 @@ class FhirClient(
|
|
|
150
150
|
self._log_all_response_urls: bool = False
|
|
151
151
|
""" If True, logs all response URLs and status codes. Can take a lot of memory for when there are many responses. """
|
|
152
152
|
|
|
153
|
-
|
|
153
|
+
# Default to "raw" storage mode - no in-memory compression, resources stored as plain Python dicts
|
|
154
|
+
self._storage_mode: CompressedDictStorageMode = CompressedDictStorageMode(storage_type="raw")
|
|
154
155
|
|
|
155
|
-
self._create_operation_outcome_for_error = False
|
|
156
|
+
self._create_operation_outcome_for_error: bool | None = False
|
|
156
157
|
|
|
157
158
|
def action(self, action: str) -> FhirClient:
|
|
158
159
|
"""
|
|
@@ -455,9 +456,14 @@ class FhirClient(
|
|
|
455
456
|
|
|
456
457
|
def compress(self, compress: bool) -> FhirClient:
|
|
457
458
|
"""
|
|
458
|
-
Sets the
|
|
459
|
+
Sets whether to use HTTP compression (gzip) when sending request data to the server.
|
|
459
460
|
|
|
460
|
-
|
|
461
|
+
This controls compression of the HTTP request body only, not in-memory storage.
|
|
462
|
+
Default is True (compression enabled).
|
|
463
|
+
|
|
464
|
+
To disable all compression, call: .compress(False)
|
|
465
|
+
|
|
466
|
+
:param compress: whether to compress HTTP request body (default: True)
|
|
461
467
|
"""
|
|
462
468
|
self._compress = compress
|
|
463
469
|
return self
|
|
@@ -826,17 +832,17 @@ class FhirClient(
|
|
|
826
832
|
Whether to ask the server to include the total count in the result
|
|
827
833
|
|
|
828
834
|
|
|
829
|
-
:param include_total: whether to include total count
|
|
835
|
+
:param include_total: whether to include the total count
|
|
830
836
|
"""
|
|
831
837
|
self._include_total = include_total
|
|
832
838
|
return self
|
|
833
839
|
|
|
834
840
|
def filter(self, filter_: list[BaseFilter]) -> FhirClient:
|
|
835
841
|
"""
|
|
836
|
-
Allows adding in
|
|
842
|
+
Allows adding in custom filters that derive from BaseFilter
|
|
837
843
|
|
|
838
844
|
|
|
839
|
-
:param filter_: list of custom filter instances that
|
|
845
|
+
:param filter_: list of custom filter instances that derive from BaseFilter.
|
|
840
846
|
"""
|
|
841
847
|
assert isinstance(filter_, list), "This function requires a list"
|
|
842
848
|
self._filters.extend(filter_)
|
|
@@ -889,6 +895,16 @@ class FhirClient(
|
|
|
889
895
|
fhir_client._time_to_live_in_secs_for_cache = self._time_to_live_in_secs_for_cache
|
|
890
896
|
fhir_client._validation_server_url = self._validation_server_url
|
|
891
897
|
fhir_client._smart_merge = self._smart_merge
|
|
898
|
+
fhir_client._compress = self._compress
|
|
899
|
+
fhir_client._storage_mode = self._storage_mode
|
|
900
|
+
fhir_client._send_data_as_chunked = self._send_data_as_chunked
|
|
901
|
+
fhir_client._use_post_for_search = self._use_post_for_search
|
|
902
|
+
fhir_client._maximum_time_to_retry_on_429 = self._maximum_time_to_retry_on_429
|
|
903
|
+
fhir_client._retry_count = self._retry_count
|
|
904
|
+
fhir_client._throw_exception_on_error = self._throw_exception_on_error
|
|
905
|
+
fhir_client._trace_request_function = self._trace_request_function
|
|
906
|
+
fhir_client._log_all_response_urls = self._log_all_response_urls
|
|
907
|
+
fhir_client._create_operation_outcome_for_error = self._create_operation_outcome_for_error
|
|
892
908
|
return fhir_client
|
|
893
909
|
|
|
894
910
|
def set_log_all_response_urls(self, value: bool) -> FhirClient:
|
|
@@ -911,18 +927,30 @@ class FhirClient(
|
|
|
911
927
|
|
|
912
928
|
def set_storage_mode(self, value: CompressedDictStorageMode) -> FhirClient:
|
|
913
929
|
"""
|
|
914
|
-
Sets the storage mode
|
|
930
|
+
Sets the in-memory storage mode for FHIR resources.
|
|
931
|
+
|
|
932
|
+
This controls how FHIR resources are stored in memory after being received.
|
|
933
|
+
The default is "raw" (no compression - resources stored as plain Python dicts).
|
|
934
|
+
|
|
935
|
+
Available storage types:
|
|
936
|
+
- "raw": No compression, standard Python dictionaries (default)
|
|
937
|
+
- "compressed": Zlib/gzip compression in memory
|
|
938
|
+
- "msgpack": MessagePack binary serialization
|
|
939
|
+
- "compressed_msgpack": MessagePack + compression
|
|
940
|
+
|
|
941
|
+
Note: This is separate from HTTP compression (controlled by .compress()).
|
|
942
|
+
With default settings, no in-memory compression is applied.
|
|
915
943
|
|
|
916
|
-
:param value: storage mode
|
|
944
|
+
:param value: storage mode (default: raw)
|
|
917
945
|
"""
|
|
918
946
|
self._storage_mode = value
|
|
919
947
|
return self
|
|
920
948
|
|
|
921
|
-
def set_create_operation_outcome_for_error(self, value: bool) -> FhirClient:
|
|
949
|
+
def set_create_operation_outcome_for_error(self, value: bool | None) -> FhirClient:
|
|
922
950
|
"""
|
|
923
951
|
Sets the create_operation_outcome_for_error flag
|
|
924
952
|
|
|
925
|
-
:param value: whether to create operation outcome for error
|
|
953
|
+
:param value: whether to create an operation outcome for error (True, False, or None)
|
|
926
954
|
"""
|
|
927
955
|
self._create_operation_outcome_for_error = value
|
|
928
956
|
return self
|
|
@@ -2,9 +2,7 @@ import json
|
|
|
2
2
|
import time
|
|
3
3
|
from collections import deque
|
|
4
4
|
from collections.abc import AsyncGenerator
|
|
5
|
-
from typing import
|
|
6
|
-
cast,
|
|
7
|
-
)
|
|
5
|
+
from typing import Any, cast
|
|
8
6
|
from urllib import parse
|
|
9
7
|
|
|
10
8
|
import requests
|
|
@@ -45,6 +43,198 @@ from helix_fhir_client_sdk.validators.async_fhir_validator import AsyncFhirValid
|
|
|
45
43
|
|
|
46
44
|
|
|
47
45
|
class FhirMergeResourcesMixin(FhirClientProtocol):
|
|
46
|
+
async def merge_bundle_uncompressed(
|
|
47
|
+
self,
|
|
48
|
+
id_: str | None,
|
|
49
|
+
bundle: FhirBundle,
|
|
50
|
+
) -> FhirMergeResourceResponse:
|
|
51
|
+
"""
|
|
52
|
+
Optimized variant of :meth:`merge_bundle_async` that bypasses storage-mode handling.
|
|
53
|
+
Use this method when you do not need storage-mode behavior, or features such as request/response compression.
|
|
54
|
+
:param id_: id of the resource to merge
|
|
55
|
+
:param bundle: FHIR Bundle to merge
|
|
56
|
+
:return: FhirMergeResourceResponse
|
|
57
|
+
"""
|
|
58
|
+
# Initialize profiling dictionary
|
|
59
|
+
profiling: dict[str, float] = {
|
|
60
|
+
"total_time": 0.0,
|
|
61
|
+
"build_url": 0.0,
|
|
62
|
+
"get_access_token": 0.0,
|
|
63
|
+
"prepare_payload": 0.0,
|
|
64
|
+
"http_post": 0.0,
|
|
65
|
+
"parse_response": 0.0,
|
|
66
|
+
"create_response_objects": 0.0,
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
merge_start_time = time.time()
|
|
70
|
+
|
|
71
|
+
request_id: str | None = None
|
|
72
|
+
response_status: int = 500
|
|
73
|
+
|
|
74
|
+
# Build URL
|
|
75
|
+
build_url_start = time.time()
|
|
76
|
+
full_uri: furl = furl(self._url)
|
|
77
|
+
full_uri /= self._resource
|
|
78
|
+
|
|
79
|
+
# Prepare headers
|
|
80
|
+
headers = {"Content-Type": "application/fhir+json"}
|
|
81
|
+
headers.update(self._additional_request_headers)
|
|
82
|
+
profiling["build_url"] = time.time() - build_url_start
|
|
83
|
+
|
|
84
|
+
# Get access token
|
|
85
|
+
get_token_start = time.time()
|
|
86
|
+
access_token_result: GetAccessTokenResult = await self.get_access_token_async()
|
|
87
|
+
access_token: str | None = access_token_result.access_token
|
|
88
|
+
if access_token:
|
|
89
|
+
headers["Authorization"] = f"Bearer {access_token}"
|
|
90
|
+
profiling["get_access_token"] = time.time() - get_token_start
|
|
91
|
+
|
|
92
|
+
# Prepare JSON payload
|
|
93
|
+
prepare_payload_start = time.time()
|
|
94
|
+
first_resource: FhirResource | None = bundle.entry[0].resource
|
|
95
|
+
assert first_resource is not None
|
|
96
|
+
json_payload: str = first_resource.json() if len(bundle.entry) == 1 else bundle.json()
|
|
97
|
+
|
|
98
|
+
# Build merge URL
|
|
99
|
+
obj_id: str = id_ or "1"
|
|
100
|
+
resource_uri: furl = full_uri / parse.quote(str(obj_id), safe="") / "$merge"
|
|
101
|
+
profiling["prepare_payload"] = time.time() - prepare_payload_start
|
|
102
|
+
|
|
103
|
+
response_text: str | None = None
|
|
104
|
+
responses: list[dict[str, Any]] = []
|
|
105
|
+
errors: list[dict[str, Any]] = []
|
|
106
|
+
|
|
107
|
+
try:
|
|
108
|
+
async with RetryableAioHttpClient(
|
|
109
|
+
fn_get_session=self.create_http_session,
|
|
110
|
+
refresh_token_func=self._refresh_token_function,
|
|
111
|
+
tracer_request_func=self._trace_request_function,
|
|
112
|
+
retries=self._retry_count,
|
|
113
|
+
exclude_status_codes_from_retry=self._exclude_status_codes_from_retry,
|
|
114
|
+
use_data_streaming=self._use_data_streaming,
|
|
115
|
+
send_data_as_chunked=self._send_data_as_chunked,
|
|
116
|
+
compress=self._compress,
|
|
117
|
+
throw_exception_on_error=self._throw_exception_on_error,
|
|
118
|
+
log_all_url_results=self._log_all_response_urls,
|
|
119
|
+
access_token=self._access_token,
|
|
120
|
+
access_token_expiry_date=self._access_token_expiry_date,
|
|
121
|
+
persistent_session=self._persistent_session,
|
|
122
|
+
use_persistent_session=self._use_persistent_session,
|
|
123
|
+
close_session_on_exit=self._close_session,
|
|
124
|
+
) as client:
|
|
125
|
+
http_post_start = time.time()
|
|
126
|
+
response: RetryableAioHttpResponse = await client.post(
|
|
127
|
+
url=resource_uri.url,
|
|
128
|
+
data=json_payload,
|
|
129
|
+
headers=headers,
|
|
130
|
+
)
|
|
131
|
+
profiling["http_post"] = time.time() - http_post_start
|
|
132
|
+
|
|
133
|
+
response_status = response.status
|
|
134
|
+
request_id = response.response_headers.get("X-Request-ID", None)
|
|
135
|
+
|
|
136
|
+
parse_response_start = time.time()
|
|
137
|
+
if response.status == 200:
|
|
138
|
+
response_text = await response.get_text_async()
|
|
139
|
+
if response_text:
|
|
140
|
+
try:
|
|
141
|
+
# Parse response as plain dicts for speed
|
|
142
|
+
parsed_response = json.loads(response_text)
|
|
143
|
+
if isinstance(parsed_response, list):
|
|
144
|
+
responses = parsed_response
|
|
145
|
+
else:
|
|
146
|
+
responses = [parsed_response]
|
|
147
|
+
except (ValueError, json.JSONDecodeError) as e:
|
|
148
|
+
errors.append(
|
|
149
|
+
{
|
|
150
|
+
"issue": [
|
|
151
|
+
{
|
|
152
|
+
"severity": "error",
|
|
153
|
+
"code": "exception",
|
|
154
|
+
"diagnostics": f"Failed to parse response: {str(e)}",
|
|
155
|
+
}
|
|
156
|
+
]
|
|
157
|
+
}
|
|
158
|
+
)
|
|
159
|
+
else:
|
|
160
|
+
# HTTP error
|
|
161
|
+
response_text = await response.get_text_async()
|
|
162
|
+
errors.append(
|
|
163
|
+
{
|
|
164
|
+
"issue": [
|
|
165
|
+
{
|
|
166
|
+
"severity": "error",
|
|
167
|
+
"code": "exception",
|
|
168
|
+
"diagnostics": response_text or f"HTTP {response.status}",
|
|
169
|
+
}
|
|
170
|
+
]
|
|
171
|
+
}
|
|
172
|
+
)
|
|
173
|
+
profiling["parse_response"] = time.time() - parse_response_start
|
|
174
|
+
|
|
175
|
+
except requests.exceptions.HTTPError as e:
|
|
176
|
+
raise FhirSenderException(
|
|
177
|
+
request_id=request_id,
|
|
178
|
+
url=resource_uri.url,
|
|
179
|
+
headers=headers,
|
|
180
|
+
json_data=json_payload,
|
|
181
|
+
response_text=response_text,
|
|
182
|
+
response_status_code=response_status,
|
|
183
|
+
exception=e,
|
|
184
|
+
variables=FhirClientLogger.get_variables_to_log(vars(self)),
|
|
185
|
+
message=f"HttpError: {e}",
|
|
186
|
+
elapsed_time=time.time() - merge_start_time,
|
|
187
|
+
) from e
|
|
188
|
+
except Exception as e:
|
|
189
|
+
raise FhirSenderException(
|
|
190
|
+
request_id=request_id,
|
|
191
|
+
url=resource_uri.url,
|
|
192
|
+
headers=headers,
|
|
193
|
+
json_data=json_payload,
|
|
194
|
+
response_text=response_text,
|
|
195
|
+
response_status_code=response_status,
|
|
196
|
+
exception=e,
|
|
197
|
+
variables=FhirClientLogger.get_variables_to_log(vars(self)),
|
|
198
|
+
message=f"Unknown Error: {e}",
|
|
199
|
+
elapsed_time=time.time() - merge_start_time,
|
|
200
|
+
) from e
|
|
201
|
+
|
|
202
|
+
# Convert dict responses to proper objects using fast method
|
|
203
|
+
create_objects_start = time.time()
|
|
204
|
+
response_entries: deque[BaseFhirMergeResourceResponseEntry] = deque()
|
|
205
|
+
|
|
206
|
+
for resp_dict in responses:
|
|
207
|
+
response_entries.append(FhirMergeResourceResponseEntry.from_dict_uncompressed(resp_dict))
|
|
208
|
+
|
|
209
|
+
for error_dict in errors:
|
|
210
|
+
response_entries.append(FhirMergeResponseEntryError.from_dict(error_dict, storage_mode=self._storage_mode))
|
|
211
|
+
profiling["create_response_objects"] = time.time() - create_objects_start
|
|
212
|
+
|
|
213
|
+
profiling["total_time"] = time.time() - merge_start_time
|
|
214
|
+
|
|
215
|
+
# Log profiling information if logger is available
|
|
216
|
+
if self._logger:
|
|
217
|
+
self._logger.debug(
|
|
218
|
+
f"merge_bundle_without_storage profiling: "
|
|
219
|
+
f"total={profiling['total_time']:.3f}s, "
|
|
220
|
+
f"build_url={profiling['build_url']:.3f}s, "
|
|
221
|
+
f"get_token={profiling['get_access_token']:.3f}s, "
|
|
222
|
+
f"prepare_payload={profiling['prepare_payload']:.3f}s, "
|
|
223
|
+
f"http_post={profiling['http_post']:.3f}s, "
|
|
224
|
+
f"parse_response={profiling['parse_response']:.3f}s, "
|
|
225
|
+
f"create_objects={profiling['create_response_objects']:.3f}s"
|
|
226
|
+
)
|
|
227
|
+
|
|
228
|
+
return FhirMergeResourceResponse(
|
|
229
|
+
request_id=request_id,
|
|
230
|
+
url=resource_uri.url,
|
|
231
|
+
responses=response_entries,
|
|
232
|
+
error=None if response_status == 200 else (response_text or f"HTTP {response_status}"),
|
|
233
|
+
access_token=self._access_token,
|
|
234
|
+
status=response_status,
|
|
235
|
+
response_text=response_text,
|
|
236
|
+
)
|
|
237
|
+
|
|
48
238
|
async def merge_bundle_async(
|
|
49
239
|
self,
|
|
50
240
|
id_: str | None,
|
|
@@ -75,6 +75,36 @@ class FhirMergeResourceResponseEntry(BaseFhirMergeResourceResponseEntry):
|
|
|
75
75
|
status=data.get("status"),
|
|
76
76
|
)
|
|
77
77
|
|
|
78
|
+
@classmethod
|
|
79
|
+
def from_dict_uncompressed(cls, data: dict[str, Any]) -> "FhirMergeResourceResponseEntry":
|
|
80
|
+
"""
|
|
81
|
+
Creates a FhirMergeResourceResponseEntry from a dictionary without storage_mode overhead.
|
|
82
|
+
Uses FhirResource.construct for faster object creation.
|
|
83
|
+
|
|
84
|
+
:param data: Dictionary containing the response entry data
|
|
85
|
+
:return: FhirMergeResourceResponseEntry instance
|
|
86
|
+
"""
|
|
87
|
+
resource_payload = data.get("resource")
|
|
88
|
+
resource_obj: FhirResource | None = (
|
|
89
|
+
FhirResource.construct(**resource_payload) if isinstance(resource_payload, dict) else None
|
|
90
|
+
)
|
|
91
|
+
return FhirMergeResourceResponseEntry(
|
|
92
|
+
created=data.get("created"),
|
|
93
|
+
updated=data.get("updated"),
|
|
94
|
+
deleted=data.get("deleted"),
|
|
95
|
+
id_=data.get("id"),
|
|
96
|
+
uuid=data.get("uuid"),
|
|
97
|
+
resource_type=data.get("resourceType"),
|
|
98
|
+
source_assigning_authority=data.get("source_assigning_authority"),
|
|
99
|
+
resource_version=data.get("resource_version"),
|
|
100
|
+
message=data.get("message"),
|
|
101
|
+
issue=data.get("issue"),
|
|
102
|
+
error=data.get("error"),
|
|
103
|
+
token=data.get("token"),
|
|
104
|
+
resource=resource_obj,
|
|
105
|
+
status=data.get("status"),
|
|
106
|
+
)
|
|
107
|
+
|
|
78
108
|
@classmethod
|
|
79
109
|
@override
|
|
80
110
|
def from_json(
|
|
@@ -8,7 +8,7 @@ import async_timeout
|
|
|
8
8
|
from aiohttp import ClientError, ClientResponse, ClientResponseError, ClientSession
|
|
9
9
|
from multidict import MultiMapping
|
|
10
10
|
from opentelemetry import trace
|
|
11
|
-
|
|
11
|
+
|
|
12
12
|
from helix_fhir_client_sdk.function_types import (
|
|
13
13
|
RefreshTokenFunction,
|
|
14
14
|
RefreshTokenResult,
|
|
@@ -130,7 +130,6 @@ class RetryableAioHttpClient:
|
|
|
130
130
|
if self.compress:
|
|
131
131
|
kwargs["compress"] = self.compress
|
|
132
132
|
assert self.session is not None
|
|
133
|
-
logging.info(f"Using Session ID: {id(self.session)} for URL: {url}")
|
|
134
133
|
with TRACER.start_as_current_span(FhirClientSdkOpenTelemetrySpanNames.HTTP_GET) as span:
|
|
135
134
|
span.set_attribute(
|
|
136
135
|
FhirClientSdkOpenTelemetryAttributeNames.URL,
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: helix.fhir.client.sdk
|
|
3
|
-
Version: 4.2.
|
|
3
|
+
Version: 4.2.9
|
|
4
4
|
Summary: helix.fhir.client.sdk
|
|
5
5
|
Home-page: https://github.com/icanbwell/helix.fhir.client.sdk
|
|
6
6
|
Author: Imran Qureshi
|
|
@@ -115,4 +115,49 @@ For FHIR servers that support data streaming (e.g., b.well FHIR server), you can
|
|
|
115
115
|
The data will be streamed in AsyncGenerators as described above.
|
|
116
116
|
|
|
117
117
|
# Storage Compression
|
|
118
|
-
The FHIR client SDK
|
|
118
|
+
The FHIR client SDK supports two types of compression:
|
|
119
|
+
|
|
120
|
+
1. **HTTP Compression** (`compress`): Compresses HTTP request body when sending data to the server. Default: **enabled**
|
|
121
|
+
2. **In-Memory Storage** (`storage_mode`): Controls how FHIR resources are stored in memory. Default: **raw (no compression)**
|
|
122
|
+
|
|
123
|
+
## Disabling HTTP Compression
|
|
124
|
+
HTTP compression (gzip) is enabled by default for request bodies. To disable it:
|
|
125
|
+
|
|
126
|
+
```python
|
|
127
|
+
from helix_fhir_client_sdk.fhir_client import FhirClient
|
|
128
|
+
|
|
129
|
+
# Disable HTTP compression for requests
|
|
130
|
+
fhir_client = FhirClient().url("https://fhir.example.com").compress(False)
|
|
131
|
+
```
|
|
132
|
+
|
|
133
|
+
## In-Memory Storage Modes
|
|
134
|
+
The SDK supports different storage modes for FHIR resources through the `set_storage_mode()` method.
|
|
135
|
+
By default, resources are stored as raw Python dictionaries (no compression).
|
|
136
|
+
|
|
137
|
+
```python
|
|
138
|
+
from helix_fhir_client_sdk.fhir_client import FhirClient
|
|
139
|
+
from compressedfhir.utilities.compressed_dict.v1.compressed_dict_storage_mode import CompressedDictStorageMode
|
|
140
|
+
|
|
141
|
+
# Use raw storage (default) - no compression, resources stored as plain Python dicts
|
|
142
|
+
fhir_client = FhirClient().set_storage_mode(CompressedDictStorageMode(storage_type="raw"))
|
|
143
|
+
|
|
144
|
+
# Use msgpack storage - stores resources in msgpack format
|
|
145
|
+
fhir_client = FhirClient().set_storage_mode(CompressedDictStorageMode(storage_type="msgpack"))
|
|
146
|
+
|
|
147
|
+
# Use compressed msgpack storage - stores resources in compressed msgpack format
|
|
148
|
+
fhir_client = FhirClient().set_storage_mode(CompressedDictStorageMode(storage_type="compressed_msgpack"))
|
|
149
|
+
```
|
|
150
|
+
|
|
151
|
+
Available storage types:
|
|
152
|
+
- `raw`: Default. Resources are stored as standard Python dictionaries (no compression)
|
|
153
|
+
- `msgpack`: Resources are serialized using MessagePack for efficient storage
|
|
154
|
+
- `compressed_msgpack`: Resources are serialized using MessagePack and then compressed
|
|
155
|
+
|
|
156
|
+
## Getting Raw Python Dictionaries
|
|
157
|
+
To completely bypass the `compressedfhir` library and get plain Python dictionaries:
|
|
158
|
+
|
|
159
|
+
```python
|
|
160
|
+
# Returns plain Python dicts, not FhirResource objects
|
|
161
|
+
result = await fhir_client.get_raw_resources_async()
|
|
162
|
+
resources = result["_resources"] # list[dict[str, Any]]
|
|
163
|
+
```
|
|
@@ -3,10 +3,10 @@ helix_fhir_client_sdk/dictionary_parser.py,sha256=WrGkVAxMlUvVycRVrX7UZt2oP2e_Vk
|
|
|
3
3
|
helix_fhir_client_sdk/dictionary_writer.py,sha256=V7Bx9Z69s0LRYF6Lc6Xp0d-Gj0BnAVKA1vBuwf3JORE,1486
|
|
4
4
|
helix_fhir_client_sdk/fhir_auth_mixin.py,sha256=p2QIYrCv7ZktutY7SzYizmHADahxBddRTSCqX1EKyHc,14465
|
|
5
5
|
helix_fhir_client_sdk/fhir_bundle_appender.py,sha256=t1hs7p_vXKC9MUFyUnN9dTuDhRF-kw-kkgVFtGHv9QQ,11749
|
|
6
|
-
helix_fhir_client_sdk/fhir_client.py,sha256
|
|
6
|
+
helix_fhir_client_sdk/fhir_client.py,sha256=vSkgWVgNlRkdu7RFPkD1-4BF9N88PnRXhCTSqUJqHlI,34930
|
|
7
7
|
helix_fhir_client_sdk/fhir_delete_mixin.py,sha256=1YiKddTJTUzzrRvG7WgSisXY8rfJAHPAEXTsOevrom8,6521
|
|
8
8
|
helix_fhir_client_sdk/fhir_merge_mixin.py,sha256=YTUODvc2rqXhk5_kwNMFPykm1I9_omww2WUcDExMESg,15264
|
|
9
|
-
helix_fhir_client_sdk/fhir_merge_resources_mixin.py,sha256=
|
|
9
|
+
helix_fhir_client_sdk/fhir_merge_resources_mixin.py,sha256=GsVeqcYpDvV5oeNbwyHS5YQozAyayICNcIRTQTRcrY0,36130
|
|
10
10
|
helix_fhir_client_sdk/fhir_patch_mixin.py,sha256=YGcCPStoqVxWCYOaNI8vCKSFkAyRzP0YM_UWp4UKYro,5858
|
|
11
11
|
helix_fhir_client_sdk/fhir_update_mixin.py,sha256=7psQTBsGPY1izuwn3yD4MGLjLVWQjqA_15_IeaUspew,6230
|
|
12
12
|
helix_fhir_client_sdk/function_types.py,sha256=x95j6ix3Xa9b276Q741xX1jguqBuFT6EBLDw35_EoVM,3916
|
|
@@ -70,7 +70,7 @@ helix_fhir_client_sdk/responses/get/test/test_get_single_response.py,sha256=cDK6
|
|
|
70
70
|
helix_fhir_client_sdk/responses/merge/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
71
71
|
helix_fhir_client_sdk/responses/merge/base_fhir_merge_resource_response_entry.py,sha256=0PKxQc6sfO2RJka6AS4cX3U_yFnHRR0nsfgva91WrYk,3734
|
|
72
72
|
helix_fhir_client_sdk/responses/merge/fhir_merge_resource_response.py,sha256=jLHzg-mHYKYBMrAnsx_vnTaP0OCuHJoPEHODCHFxaXA,3226
|
|
73
|
-
helix_fhir_client_sdk/responses/merge/fhir_merge_resource_response_entry.py,sha256=
|
|
73
|
+
helix_fhir_client_sdk/responses/merge/fhir_merge_resource_response_entry.py,sha256=DyTCoix8MqD9_860I0-Ikl6wCiACzQFMOaEossxyTWc,4626
|
|
74
74
|
helix_fhir_client_sdk/responses/merge/fhir_merge_response_entry_issue.py,sha256=e7yKvYqdcb45YWblQafyoK1scf62j8Ux80AOdrkQbF4,2048
|
|
75
75
|
helix_fhir_client_sdk/responses/test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
76
76
|
helix_fhir_client_sdk/responses/test/test_bundle_expander.py,sha256=-IMu42ZcRSif8pwDWI0SsBXDI-8gRmOAsshtNhmoROM,877
|
|
@@ -104,7 +104,7 @@ helix_fhir_client_sdk/utilities/hash_util.py,sha256=YNUy7-IC_OtC0l-T45UO9UkA-_ps
|
|
|
104
104
|
helix_fhir_client_sdk/utilities/list_chunker.py,sha256=2h2k5CCFmOhICaugOx6UI-9dh4q5w1lVdF7WQLX0LCM,1456
|
|
105
105
|
helix_fhir_client_sdk/utilities/ndjson_chunk_streaming_parser.py,sha256=3TCYfWVCEpJbqRxqlSDsGnFnraO4T9bxzYdShvu6Pos,1954
|
|
106
106
|
helix_fhir_client_sdk/utilities/practitioner_generator.py,sha256=gneCAXNDNEphBY-Nc2nMQBbEWJgHcjvv3S8JQ75yiJI,3778
|
|
107
|
-
helix_fhir_client_sdk/utilities/retryable_aiohttp_client.py,sha256=
|
|
107
|
+
helix_fhir_client_sdk/utilities/retryable_aiohttp_client.py,sha256=pxB4N0nkNBo2LT59tr1WnLBqiyr0J6l2OfXVMad1WJU,20898
|
|
108
108
|
helix_fhir_client_sdk/utilities/retryable_aiohttp_response.py,sha256=DvNX6WO1m2Hz6LoI5CwSPDECPd8oDsqRCVsyq_Oxf-0,3542
|
|
109
109
|
helix_fhir_client_sdk/utilities/retryable_aiohttp_url_result.py,sha256=Gdmvn6qIM2JF0YOhobQUHY41fCxvYyaths_CZs0iJfo,616
|
|
110
110
|
helix_fhir_client_sdk/utilities/url_checker.py,sha256=_JRSIvu7WNXh2OA79HJbEEiomGT-quGhAUGh44-9824,3580
|
|
@@ -130,9 +130,10 @@ helix_fhir_client_sdk/validators/async_fhir_validator.py,sha256=Bgiw5atbc5YzBYpk
|
|
|
130
130
|
helix_fhir_client_sdk/validators/fhir_validator.py,sha256=HWBldSEB9yeKIcnLcV8R-LoTzwT_OMu8SchtUUBKzys,2331
|
|
131
131
|
helix_fhir_client_sdk/validators/test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
132
132
|
helix_fhir_client_sdk/validators/test/test_async_fhir_validator.py,sha256=RmSowjPUdZee5nYuYujghxWyqJ20cu7U0lJFtFT-ZBs,3285
|
|
133
|
-
helix_fhir_client_sdk-4.2.
|
|
133
|
+
helix_fhir_client_sdk-4.2.9.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
134
134
|
tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
135
135
|
tests/logger_for_test.py,sha256=UC-7F6w6fDsUIYf37aRnvUdiUUVk8qkJEUSuO17NQnI,1525
|
|
136
|
+
tests/test_fhir_client_clone.py,sha256=ZjC0ctog5r8Ba0ZD94ggwbnbQMeQdxk5pTt3zTpE8rQ,2832
|
|
136
137
|
tests/test_get_nested_property.py,sha256=dA7eNmPJuwzQTViORRmJkcn9RAZzxeajSxUghobHpAo,2381
|
|
137
138
|
tests/async/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
138
139
|
tests/async/test_async_fhir_client_fetch_response_in_chunks.py,sha256=Ql95DuOqaidFL6u553f-Bkp4Rsd662C__NA-5sw0KL0,3993
|
|
@@ -150,6 +151,7 @@ tests/async/test_async_fhir_client_patient_merge_with_validate.py,sha256=CRTfV0v
|
|
|
150
151
|
tests/async/test_async_fhir_client_patient_update.py,sha256=Q-hNO9D4FZ_r7oa62pyBG9keN-ZtkKvlsMK0hoE-5RA,1685
|
|
151
152
|
tests/async/test_async_real_fhir_server_get_patients.py,sha256=0oMnUJg1KEspJ5_4eBen8z12Ion5HXv-gKAzFAkyII0,3837
|
|
152
153
|
tests/async/test_async_real_fhir_server_get_patients_error.py,sha256=_s7chLogAg0yKgGpsq1o9_dDHBrzGaRWBAo8agFTN6U,1914
|
|
154
|
+
tests/async/test_benchmark_compress.py,sha256=q1gDG7qXvof-3uVAqJlZAW7uO8cR0vEeDfzl-iwIEtY,16470
|
|
153
155
|
tests/async/fhir_server/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
154
156
|
tests/async/fhir_server/test_async_real_fhir_server_get_graph_large.py,sha256=fM2MPF47nDF2Qwj2AkdTZ2CfvgUYGN4AVIS253KC9MQ,9430
|
|
155
157
|
tests/async/fhir_server/test_async_real_fhir_server_get_patients_large.py,sha256=rXRF8E8Al7XANCmef1d_WqxSA9TVQjVC7B41OZaEQlY,5583
|
|
@@ -209,7 +211,7 @@ tests_integration/test_emr_server_auth.py,sha256=2I4QUAspQN89uGf6JB2aVuYaBeDnRJz
|
|
|
209
211
|
tests_integration/test_firely_fhir.py,sha256=ll6-plwQrKfdrEyfbw0wLTC1jB-Qei1Mj-81tYTl5eQ,697
|
|
210
212
|
tests_integration/test_merge_vs_smart_merge_behavior.py,sha256=LrIuyxzw0YLaTjcRtG0jzy0M6xSv9qebmdBtMPDcacQ,3733
|
|
211
213
|
tests_integration/test_staging_server_graph.py,sha256=5RfMxjhdX9o4-n_ZRvze4Sm8u8NjRijRLDpqiz8qD_0,7132
|
|
212
|
-
helix_fhir_client_sdk-4.2.
|
|
213
|
-
helix_fhir_client_sdk-4.2.
|
|
214
|
-
helix_fhir_client_sdk-4.2.
|
|
215
|
-
helix_fhir_client_sdk-4.2.
|
|
214
|
+
helix_fhir_client_sdk-4.2.9.dist-info/METADATA,sha256=e-ajvH1vdbrt84TIgYqnldqWv9RzfHeuyzgSURayZyk,5841
|
|
215
|
+
helix_fhir_client_sdk-4.2.9.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
216
|
+
helix_fhir_client_sdk-4.2.9.dist-info/top_level.txt,sha256=BRnDS6ceQxs-4u2jXznATObgP8G2cGAerlH0ZS4sJ6M,46
|
|
217
|
+
helix_fhir_client_sdk-4.2.9.dist-info/RECORD,,
|
|
@@ -0,0 +1,448 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Benchmark tests for comparing compressed vs uncompressed FHIR client operations.
|
|
3
|
+
|
|
4
|
+
These tests measure the performance of:
|
|
5
|
+
- get_async() with compress=True vs compress=False
|
|
6
|
+
- get_raw_resources_async() with compress=True vs compress=False
|
|
7
|
+
|
|
8
|
+
=============================================================================
|
|
9
|
+
HOW TO RUN THESE TESTS
|
|
10
|
+
=============================================================================
|
|
11
|
+
|
|
12
|
+
1. Start services using docker-compose:
|
|
13
|
+
docker-compose up -d mock-server
|
|
14
|
+
|
|
15
|
+
2. First time only - rebuild dev container to include pytest-benchmark:
|
|
16
|
+
docker-compose build dev
|
|
17
|
+
|
|
18
|
+
OR install pytest-benchmark in the running container:
|
|
19
|
+
docker-compose run --rm dev pip install pytest-benchmark
|
|
20
|
+
|
|
21
|
+
3. Run benchmark tests inside docker container:
|
|
22
|
+
docker-compose run --rm dev pytest tests/async/test_benchmark_compress.py -v --benchmark-only
|
|
23
|
+
|
|
24
|
+
4. Or run all benchmark variations:
|
|
25
|
+
docker-compose run --rm dev pytest tests/async/test_benchmark_compress.py -v --benchmark-only --benchmark-group-by=func
|
|
26
|
+
|
|
27
|
+
5. Save benchmark results for comparison:
|
|
28
|
+
docker-compose run --rm dev pytest tests/async/test_benchmark_compress.py -v --benchmark-autosave
|
|
29
|
+
|
|
30
|
+
6. Compare with previous runs:
|
|
31
|
+
docker-compose run --rm dev pytest tests/async/test_benchmark_compress.py -v --benchmark-compare
|
|
32
|
+
|
|
33
|
+
7. Run with more iterations for accuracy:
|
|
34
|
+
docker-compose run --rm dev pytest tests/async/test_benchmark_compress.py -v --benchmark-min-rounds=10
|
|
35
|
+
|
|
36
|
+
8. To stop mock-server:
|
|
37
|
+
docker-compose down mock-server
|
|
38
|
+
|
|
39
|
+
=============================================================================
|
|
40
|
+
"""
|
|
41
|
+
|
|
42
|
+
import asyncio
|
|
43
|
+
import json
|
|
44
|
+
import socket
|
|
45
|
+
from typing import Any
|
|
46
|
+
|
|
47
|
+
import pytest
|
|
48
|
+
from mockserver_client.mockserver_client import (
|
|
49
|
+
MockServerFriendlyClient,
|
|
50
|
+
mock_request,
|
|
51
|
+
mock_response,
|
|
52
|
+
times,
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
from helix_fhir_client_sdk.fhir_client import FhirClient
|
|
56
|
+
from helix_fhir_client_sdk.responses.fhir_get_response import FhirGetResponse
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def is_mock_server_running(host: str = "mock-server", port: int = 1080) -> bool:
|
|
60
|
+
"""Check if mock-server is reachable."""
|
|
61
|
+
try:
|
|
62
|
+
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
63
|
+
sock.settimeout(2)
|
|
64
|
+
result = sock.connect_ex((host, port))
|
|
65
|
+
sock.close()
|
|
66
|
+
return result == 0
|
|
67
|
+
except OSError:
|
|
68
|
+
return False
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
# Skip all tests if mock-server is not running
|
|
72
|
+
pytestmark = pytest.mark.skipif(
|
|
73
|
+
not is_mock_server_running(), reason="Mock server not running. Start with: docker-compose up -d mock-server"
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def generate_patient_resource(index: int) -> dict[str, Any]:
|
|
78
|
+
"""Generate a realistic FHIR Patient resource."""
|
|
79
|
+
return {
|
|
80
|
+
"resourceType": "Patient",
|
|
81
|
+
"id": f"patient-{index}",
|
|
82
|
+
"meta": {
|
|
83
|
+
"versionId": "1",
|
|
84
|
+
"lastUpdated": "2025-01-15T10:30:00.000Z",
|
|
85
|
+
"source": "http://example.org/fhir",
|
|
86
|
+
"profile": ["http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient"],
|
|
87
|
+
},
|
|
88
|
+
"identifier": [
|
|
89
|
+
{
|
|
90
|
+
"use": "official",
|
|
91
|
+
"type": {
|
|
92
|
+
"coding": [
|
|
93
|
+
{
|
|
94
|
+
"system": "http://terminology.hl7.org/CodeSystem/v2-0203",
|
|
95
|
+
"code": "MR",
|
|
96
|
+
"display": "Medical Record Number",
|
|
97
|
+
}
|
|
98
|
+
]
|
|
99
|
+
},
|
|
100
|
+
"system": "http://hospital.example.org/mrn",
|
|
101
|
+
"value": f"MRN-{index:08d}",
|
|
102
|
+
},
|
|
103
|
+
{
|
|
104
|
+
"use": "official",
|
|
105
|
+
"type": {
|
|
106
|
+
"coding": [
|
|
107
|
+
{
|
|
108
|
+
"system": "http://terminology.hl7.org/CodeSystem/v2-0203",
|
|
109
|
+
"code": "SS",
|
|
110
|
+
"display": "Social Security Number",
|
|
111
|
+
}
|
|
112
|
+
]
|
|
113
|
+
},
|
|
114
|
+
"system": "http://hl7.org/fhir/sid/us-ssn",
|
|
115
|
+
"value": f"{100 + index:03d}-{50 + index:02d}-{1000 + index:04d}",
|
|
116
|
+
},
|
|
117
|
+
],
|
|
118
|
+
"active": True,
|
|
119
|
+
"name": [
|
|
120
|
+
{
|
|
121
|
+
"use": "official",
|
|
122
|
+
"family": f"TestFamily{index}",
|
|
123
|
+
"given": [f"TestGiven{index}", f"MiddleName{index}"],
|
|
124
|
+
"prefix": ["Mr."],
|
|
125
|
+
"suffix": ["Jr."],
|
|
126
|
+
},
|
|
127
|
+
{
|
|
128
|
+
"use": "nickname",
|
|
129
|
+
"given": [f"Nick{index}"],
|
|
130
|
+
},
|
|
131
|
+
],
|
|
132
|
+
"telecom": [
|
|
133
|
+
{"system": "phone", "value": f"555-{100 + index:03d}-{1000 + index:04d}", "use": "home"},
|
|
134
|
+
{"system": "phone", "value": f"555-{200 + index:03d}-{2000 + index:04d}", "use": "mobile"},
|
|
135
|
+
{"system": "email", "value": f"patient{index}@example.com", "use": "home"},
|
|
136
|
+
],
|
|
137
|
+
"gender": "male" if index % 2 == 0 else "female",
|
|
138
|
+
"birthDate": f"{1950 + (index % 50)}-{(index % 12) + 1:02d}-{(index % 28) + 1:02d}",
|
|
139
|
+
"deceasedBoolean": False,
|
|
140
|
+
"address": [
|
|
141
|
+
{
|
|
142
|
+
"use": "home",
|
|
143
|
+
"type": "physical",
|
|
144
|
+
"line": [f"{100 + index} Main Street", f"Apt {index}"],
|
|
145
|
+
"city": "Boston",
|
|
146
|
+
"state": "MA",
|
|
147
|
+
"postalCode": f"02{100 + (index % 900):03d}",
|
|
148
|
+
"country": "USA",
|
|
149
|
+
},
|
|
150
|
+
{
|
|
151
|
+
"use": "work",
|
|
152
|
+
"type": "postal",
|
|
153
|
+
"line": [f"{200 + index} Business Ave"],
|
|
154
|
+
"city": "Cambridge",
|
|
155
|
+
"state": "MA",
|
|
156
|
+
"postalCode": f"02{200 + (index % 800):03d}",
|
|
157
|
+
"country": "USA",
|
|
158
|
+
},
|
|
159
|
+
],
|
|
160
|
+
"maritalStatus": {
|
|
161
|
+
"coding": [
|
|
162
|
+
{
|
|
163
|
+
"system": "http://terminology.hl7.org/CodeSystem/v3-MaritalStatus",
|
|
164
|
+
"code": "M" if index % 2 == 0 else "S",
|
|
165
|
+
"display": "Married" if index % 2 == 0 else "Never Married",
|
|
166
|
+
}
|
|
167
|
+
]
|
|
168
|
+
},
|
|
169
|
+
"communication": [
|
|
170
|
+
{
|
|
171
|
+
"language": {
|
|
172
|
+
"coding": [
|
|
173
|
+
{
|
|
174
|
+
"system": "urn:ietf:bcp:47",
|
|
175
|
+
"code": "en-US",
|
|
176
|
+
"display": "English (United States)",
|
|
177
|
+
}
|
|
178
|
+
]
|
|
179
|
+
},
|
|
180
|
+
"preferred": True,
|
|
181
|
+
}
|
|
182
|
+
],
|
|
183
|
+
"generalPractitioner": [{"reference": f"Practitioner/practitioner-{index % 10}"}],
|
|
184
|
+
"managingOrganization": {"reference": "Organization/org-1"},
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
def generate_patient_bundle(count: int) -> dict[str, Any]:
|
|
189
|
+
"""Generate a FHIR Bundle with multiple Patient resources."""
|
|
190
|
+
entries = []
|
|
191
|
+
for i in range(count):
|
|
192
|
+
entries.append(
|
|
193
|
+
{
|
|
194
|
+
"fullUrl": f"http://example.org/fhir/Patient/patient-{i}",
|
|
195
|
+
"resource": generate_patient_resource(i),
|
|
196
|
+
"search": {"mode": "match"},
|
|
197
|
+
}
|
|
198
|
+
)
|
|
199
|
+
return {
|
|
200
|
+
"resourceType": "Bundle",
|
|
201
|
+
"id": "bundle-search-result",
|
|
202
|
+
"type": "searchset",
|
|
203
|
+
"total": count,
|
|
204
|
+
"link": [
|
|
205
|
+
{"relation": "self", "url": f"http://example.org/fhir/Patient?_count={count}"},
|
|
206
|
+
],
|
|
207
|
+
"entry": entries,
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
@pytest.fixture(scope="module")
|
|
212
|
+
def mock_server_url() -> str:
|
|
213
|
+
return "http://mock-server:1080"
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
@pytest.fixture(scope="module")
|
|
217
|
+
def mock_client(mock_server_url: str) -> MockServerFriendlyClient:
|
|
218
|
+
return MockServerFriendlyClient(base_url=mock_server_url)
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
@pytest.fixture(scope="module")
|
|
222
|
+
def setup_mock_endpoints(mock_client: MockServerFriendlyClient, mock_server_url: str) -> str:
|
|
223
|
+
"""Set up mock endpoints for different payload sizes."""
|
|
224
|
+
test_name = "benchmark_compress"
|
|
225
|
+
|
|
226
|
+
mock_client.clear(f"/{test_name}/*.*")
|
|
227
|
+
mock_client.reset()
|
|
228
|
+
|
|
229
|
+
# Create payloads of different sizes for benchmarking
|
|
230
|
+
payloads = {
|
|
231
|
+
"small": generate_patient_bundle(10), # ~10KB
|
|
232
|
+
"medium": generate_patient_bundle(100), # ~100KB
|
|
233
|
+
"large": generate_patient_bundle(500), # ~500KB
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
# Setup mock endpoints for each payload size
|
|
237
|
+
for size, bundle in payloads.items():
|
|
238
|
+
response_body = json.dumps(bundle)
|
|
239
|
+
# Endpoint for GET /Patient (returns bundle)
|
|
240
|
+
mock_client.expect(
|
|
241
|
+
request=mock_request(path=f"/{test_name}/{size}/Patient", method="GET"),
|
|
242
|
+
response=mock_response(body=response_body),
|
|
243
|
+
timing=times(10000), # Allow many requests for benchmarking
|
|
244
|
+
)
|
|
245
|
+
# Endpoint for GET /Patient/{id} (returns single resource)
|
|
246
|
+
mock_client.expect(
|
|
247
|
+
request=mock_request(path=f"/{test_name}/{size}/Patient/{size}", method="GET"),
|
|
248
|
+
response=mock_response(body=response_body),
|
|
249
|
+
timing=times(10000),
|
|
250
|
+
)
|
|
251
|
+
|
|
252
|
+
return f"{mock_server_url}/{test_name}"
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
# ============================================================================
|
|
256
|
+
# Benchmark Tests for get_async()
|
|
257
|
+
# ============================================================================
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
def test_benchmark_get_async_compress_false_small(benchmark: Any, setup_mock_endpoints: str) -> None:
|
|
261
|
+
"""Benchmark get_async with compress=False and a small payload (10 patients)."""
|
|
262
|
+
base_url = f"{setup_mock_endpoints}/small"
|
|
263
|
+
|
|
264
|
+
async def run_get_async() -> FhirGetResponse:
|
|
265
|
+
fhir_client = FhirClient().url(base_url).resource("Patient")
|
|
266
|
+
return await fhir_client.compress(False).get_async()
|
|
267
|
+
|
|
268
|
+
def run_sync() -> FhirGetResponse:
|
|
269
|
+
return asyncio.run(run_get_async())
|
|
270
|
+
|
|
271
|
+
result = benchmark(run_sync)
|
|
272
|
+
assert result is not None
|
|
273
|
+
assert result.get_response_text() is not None
|
|
274
|
+
|
|
275
|
+
|
|
276
|
+
def test_benchmark_get_async_compress_true_small(benchmark: Any, setup_mock_endpoints: str) -> None:
|
|
277
|
+
"""Benchmark get_async with compress=True and a small payload (10 patients)."""
|
|
278
|
+
base_url = f"{setup_mock_endpoints}/small"
|
|
279
|
+
|
|
280
|
+
async def run_get_async() -> FhirGetResponse:
|
|
281
|
+
fhir_client = FhirClient().url(base_url).resource("Patient")
|
|
282
|
+
return await fhir_client.compress(True).get_async()
|
|
283
|
+
|
|
284
|
+
def run_sync() -> FhirGetResponse:
|
|
285
|
+
return asyncio.run(run_get_async())
|
|
286
|
+
|
|
287
|
+
result = benchmark(run_sync)
|
|
288
|
+
assert result is not None
|
|
289
|
+
assert result.get_response_text() is not None
|
|
290
|
+
|
|
291
|
+
|
|
292
|
+
def test_benchmark_get_async_compress_false_medium(benchmark: Any, setup_mock_endpoints: str) -> None:
|
|
293
|
+
"""Benchmark get_async with compress=False and medium payload (100 patients)."""
|
|
294
|
+
base_url = f"{setup_mock_endpoints}/medium"
|
|
295
|
+
|
|
296
|
+
async def run_get_async() -> FhirGetResponse:
|
|
297
|
+
fhir_client = FhirClient().url(base_url).resource("Patient")
|
|
298
|
+
return await fhir_client.compress(False).get_async()
|
|
299
|
+
|
|
300
|
+
def run_sync() -> FhirGetResponse:
|
|
301
|
+
return asyncio.run(run_get_async())
|
|
302
|
+
|
|
303
|
+
result = benchmark(run_sync)
|
|
304
|
+
assert result is not None
|
|
305
|
+
assert result.get_response_text() is not None
|
|
306
|
+
|
|
307
|
+
|
|
308
|
+
def test_benchmark_get_async_compress_true_medium(benchmark: Any, setup_mock_endpoints: str) -> None:
|
|
309
|
+
"""Benchmark get_async with compress=True and medium payload (100 patients)."""
|
|
310
|
+
base_url = f"{setup_mock_endpoints}/medium"
|
|
311
|
+
|
|
312
|
+
async def run_get_async() -> FhirGetResponse:
|
|
313
|
+
fhir_client = FhirClient().url(base_url).resource("Patient")
|
|
314
|
+
return await fhir_client.compress(True).get_async()
|
|
315
|
+
|
|
316
|
+
def run_sync() -> FhirGetResponse:
|
|
317
|
+
return asyncio.run(run_get_async())
|
|
318
|
+
|
|
319
|
+
result = benchmark(run_sync)
|
|
320
|
+
assert result is not None
|
|
321
|
+
assert result.get_response_text() is not None
|
|
322
|
+
|
|
323
|
+
|
|
324
|
+
def test_benchmark_get_async_compress_false_large(benchmark: Any, setup_mock_endpoints: str) -> None:
|
|
325
|
+
"""Benchmark get_async with compress=False and a large payload (500 patients)."""
|
|
326
|
+
base_url = f"{setup_mock_endpoints}/large"
|
|
327
|
+
|
|
328
|
+
async def run_get_async() -> FhirGetResponse:
|
|
329
|
+
fhir_client = FhirClient().url(base_url).resource("Patient")
|
|
330
|
+
return await fhir_client.compress(False).get_async()
|
|
331
|
+
|
|
332
|
+
def run_sync() -> FhirGetResponse:
|
|
333
|
+
return asyncio.run(run_get_async())
|
|
334
|
+
|
|
335
|
+
result = benchmark(run_sync)
|
|
336
|
+
assert result is not None
|
|
337
|
+
assert result.get_response_text() is not None
|
|
338
|
+
|
|
339
|
+
|
|
340
|
+
def test_benchmark_get_async_compress_true_large(benchmark: Any, setup_mock_endpoints: str) -> None:
|
|
341
|
+
"""Benchmark get_async with compress=True and a large payload (500 patients)."""
|
|
342
|
+
base_url = f"{setup_mock_endpoints}/large"
|
|
343
|
+
|
|
344
|
+
async def run_get_async() -> FhirGetResponse:
|
|
345
|
+
fhir_client = FhirClient().url(base_url).resource("Patient")
|
|
346
|
+
return await fhir_client.compress(True).get_async()
|
|
347
|
+
|
|
348
|
+
def run_sync() -> FhirGetResponse:
|
|
349
|
+
return asyncio.run(run_get_async())
|
|
350
|
+
|
|
351
|
+
result = benchmark(run_sync)
|
|
352
|
+
assert result is not None
|
|
353
|
+
assert result.get_response_text() is not None
|
|
354
|
+
|
|
355
|
+
|
|
356
|
+
# ============================================================================
|
|
357
|
+
# Benchmark Tests for get_raw_resources_async()
|
|
358
|
+
# ============================================================================
|
|
359
|
+
|
|
360
|
+
|
|
361
|
+
def test_benchmark_get_raw_resources_async_compress_false_small(benchmark: Any, setup_mock_endpoints: str) -> None:
|
|
362
|
+
"""Benchmark get_raw_resources_async with compress=False and small payload."""
|
|
363
|
+
base_url = f"{setup_mock_endpoints}/small"
|
|
364
|
+
|
|
365
|
+
async def run_get_raw() -> dict[str, Any]:
|
|
366
|
+
fhir_client = FhirClient().url(base_url).resource("Patient")
|
|
367
|
+
return await fhir_client.compress(False).get_raw_resources_async()
|
|
368
|
+
|
|
369
|
+
def run_sync() -> dict[str, Any]:
|
|
370
|
+
return asyncio.run(run_get_raw())
|
|
371
|
+
|
|
372
|
+
result = benchmark(run_sync)
|
|
373
|
+
assert result is not None
|
|
374
|
+
|
|
375
|
+
|
|
376
|
+
def test_benchmark_get_raw_resources_async_compress_true_small(benchmark: Any, setup_mock_endpoints: str) -> None:
|
|
377
|
+
"""Benchmark get_raw_resources_async with compress=True and a small payload."""
|
|
378
|
+
base_url = f"{setup_mock_endpoints}/small"
|
|
379
|
+
|
|
380
|
+
async def run_get_raw() -> dict[str, Any]:
|
|
381
|
+
fhir_client = FhirClient().url(base_url).resource("Patient")
|
|
382
|
+
return await fhir_client.compress(True).get_raw_resources_async()
|
|
383
|
+
|
|
384
|
+
def run_sync() -> dict[str, Any]:
|
|
385
|
+
return asyncio.run(run_get_raw())
|
|
386
|
+
|
|
387
|
+
result = benchmark(run_sync)
|
|
388
|
+
assert result is not None
|
|
389
|
+
|
|
390
|
+
|
|
391
|
+
def test_benchmark_get_raw_resources_async_compress_false_medium(benchmark: Any, setup_mock_endpoints: str) -> None:
|
|
392
|
+
"""Benchmark get_raw_resources_async with compress=False and medium payload."""
|
|
393
|
+
base_url = f"{setup_mock_endpoints}/medium"
|
|
394
|
+
|
|
395
|
+
async def run_get_raw() -> dict[str, Any]:
|
|
396
|
+
fhir_client = FhirClient().url(base_url).resource("Patient")
|
|
397
|
+
return await fhir_client.compress(False).get_raw_resources_async()
|
|
398
|
+
|
|
399
|
+
def run_sync() -> dict[str, Any]:
|
|
400
|
+
return asyncio.run(run_get_raw())
|
|
401
|
+
|
|
402
|
+
result = benchmark(run_sync)
|
|
403
|
+
assert result is not None
|
|
404
|
+
|
|
405
|
+
|
|
406
|
+
def test_benchmark_get_raw_resources_async_compress_true_medium(benchmark: Any, setup_mock_endpoints: str) -> None:
|
|
407
|
+
"""Benchmark get_raw_resources_async with compress=True and medium payload."""
|
|
408
|
+
base_url = f"{setup_mock_endpoints}/medium"
|
|
409
|
+
|
|
410
|
+
async def run_get_raw() -> dict[str, Any]:
|
|
411
|
+
fhir_client = FhirClient().url(base_url).resource("Patient")
|
|
412
|
+
return await fhir_client.compress(True).get_raw_resources_async()
|
|
413
|
+
|
|
414
|
+
def run_sync() -> dict[str, Any]:
|
|
415
|
+
return asyncio.run(run_get_raw())
|
|
416
|
+
|
|
417
|
+
result = benchmark(run_sync)
|
|
418
|
+
assert result is not None
|
|
419
|
+
|
|
420
|
+
|
|
421
|
+
def test_benchmark_get_raw_resources_async_compress_false_large(benchmark: Any, setup_mock_endpoints: str) -> None:
|
|
422
|
+
"""Benchmark get_raw_resources_async with compress=False and a large payload."""
|
|
423
|
+
base_url = f"{setup_mock_endpoints}/large"
|
|
424
|
+
|
|
425
|
+
async def run_get_raw() -> dict[str, Any]:
|
|
426
|
+
fhir_client = FhirClient().url(base_url).resource("Patient")
|
|
427
|
+
return await fhir_client.compress(False).get_raw_resources_async()
|
|
428
|
+
|
|
429
|
+
def run_sync() -> dict[str, Any]:
|
|
430
|
+
return asyncio.run(run_get_raw())
|
|
431
|
+
|
|
432
|
+
result = benchmark(run_sync)
|
|
433
|
+
assert result is not None
|
|
434
|
+
|
|
435
|
+
|
|
436
|
+
def test_benchmark_get_raw_resources_async_compress_true_large(benchmark: Any, setup_mock_endpoints: str) -> None:
|
|
437
|
+
"""Benchmark get_raw_resources_async with compress=True and a large payload."""
|
|
438
|
+
base_url = f"{setup_mock_endpoints}/large"
|
|
439
|
+
|
|
440
|
+
async def run_get_raw() -> dict[str, Any]:
|
|
441
|
+
fhir_client = FhirClient().url(base_url).resource("Patient")
|
|
442
|
+
return await fhir_client.compress(True).get_raw_resources_async()
|
|
443
|
+
|
|
444
|
+
def run_sync() -> dict[str, Any]:
|
|
445
|
+
return asyncio.run(run_get_raw())
|
|
446
|
+
|
|
447
|
+
result = benchmark(run_sync)
|
|
448
|
+
assert result is not None
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Tests for FhirClient.clone() method to ensure all properties are properly copied.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from compressedfhir.utilities.compressed_dict.v1.compressed_dict_storage_mode import (
|
|
6
|
+
CompressedDictStorageMode,
|
|
7
|
+
)
|
|
8
|
+
|
|
9
|
+
from helix_fhir_client_sdk.fhir_client import FhirClient
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def test_clone_preserves_compress_setting() -> None:
|
|
13
|
+
"""Test that clone() preserves the _compress setting"""
|
|
14
|
+
# Create a client with compression disabled
|
|
15
|
+
fhir_client = FhirClient().url("http://example.com").compress(False)
|
|
16
|
+
assert fhir_client._compress is False
|
|
17
|
+
|
|
18
|
+
# Clone and verify compression is still disabled
|
|
19
|
+
cloned_client = fhir_client.clone()
|
|
20
|
+
assert cloned_client._compress is False
|
|
21
|
+
|
|
22
|
+
# Test with compression enabled
|
|
23
|
+
fhir_client2 = FhirClient().url("http://example.com").compress(True)
|
|
24
|
+
assert fhir_client2._compress is True
|
|
25
|
+
|
|
26
|
+
cloned_client2 = fhir_client2.clone()
|
|
27
|
+
assert cloned_client2._compress is True
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def test_clone_preserves_storage_mode() -> None:
|
|
31
|
+
"""Test that clone() preserves the _storage_mode setting"""
|
|
32
|
+
# Create a client with msgpack storage mode
|
|
33
|
+
storage_mode = CompressedDictStorageMode(storage_type="msgpack")
|
|
34
|
+
fhir_client = FhirClient().url("http://example.com").set_storage_mode(storage_mode)
|
|
35
|
+
assert fhir_client._storage_mode.storage_type == "msgpack"
|
|
36
|
+
|
|
37
|
+
# Clone and verify storage mode is preserved
|
|
38
|
+
cloned_client = fhir_client.clone()
|
|
39
|
+
assert cloned_client._storage_mode.storage_type == "msgpack"
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def test_clone_preserves_additional_settings() -> None:
|
|
43
|
+
"""Test that clone() preserves other compression-related settings"""
|
|
44
|
+
fhir_client = (
|
|
45
|
+
FhirClient()
|
|
46
|
+
.url("http://example.com")
|
|
47
|
+
.compress(False)
|
|
48
|
+
.send_data_as_chunked(True)
|
|
49
|
+
.use_post_for_search(True)
|
|
50
|
+
.maximum_time_to_retry_on_429(120)
|
|
51
|
+
.retry_count(5)
|
|
52
|
+
.throw_exception_on_error(False)
|
|
53
|
+
.set_log_all_response_urls(True)
|
|
54
|
+
.set_create_operation_outcome_for_error(True)
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
cloned_client = fhir_client.clone()
|
|
58
|
+
|
|
59
|
+
assert cloned_client._compress is False
|
|
60
|
+
assert cloned_client._send_data_as_chunked is True
|
|
61
|
+
assert cloned_client._use_post_for_search is True
|
|
62
|
+
assert cloned_client._maximum_time_to_retry_on_429 == 120
|
|
63
|
+
assert cloned_client._retry_count == 5
|
|
64
|
+
assert cloned_client._throw_exception_on_error is False
|
|
65
|
+
assert cloned_client._log_all_response_urls is True
|
|
66
|
+
assert cloned_client._create_operation_outcome_for_error is True
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def test_default_compression_is_enabled() -> None:
|
|
70
|
+
"""Test that compression is enabled by default"""
|
|
71
|
+
fhir_client = FhirClient()
|
|
72
|
+
assert fhir_client._compress is True
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def test_default_storage_mode_is_raw() -> None:
|
|
76
|
+
"""Test that the default storage mode is 'raw'"""
|
|
77
|
+
fhir_client = FhirClient()
|
|
78
|
+
assert fhir_client._storage_mode.storage_type == "raw"
|
|
File without changes
|
{helix_fhir_client_sdk-4.2.8.dist-info → helix_fhir_client_sdk-4.2.9.dist-info}/licenses/LICENSE
RENAMED
|
File without changes
|
{helix_fhir_client_sdk-4.2.8.dist-info → helix_fhir_client_sdk-4.2.9.dist-info}/top_level.txt
RENAMED
|
File without changes
|