helix.fhir.client.sdk 4.2.3__py3-none-any.whl → 4.2.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- helix_fhir_client_sdk/fhir_auth_mixin.py +17 -10
- helix_fhir_client_sdk/fhir_client.py +152 -79
- helix_fhir_client_sdk/fhir_delete_mixin.py +62 -48
- helix_fhir_client_sdk/fhir_merge_mixin.py +188 -166
- helix_fhir_client_sdk/fhir_merge_resources_mixin.py +200 -15
- helix_fhir_client_sdk/fhir_patch_mixin.py +97 -84
- helix_fhir_client_sdk/fhir_update_mixin.py +71 -57
- helix_fhir_client_sdk/graph/simulated_graph_processor_mixin.py +147 -49
- helix_fhir_client_sdk/open_telemetry/__init__.py +0 -0
- helix_fhir_client_sdk/open_telemetry/attribute_names.py +7 -0
- helix_fhir_client_sdk/open_telemetry/span_names.py +12 -0
- helix_fhir_client_sdk/queue/request_queue_mixin.py +17 -12
- helix_fhir_client_sdk/responses/fhir_client_protocol.py +10 -6
- helix_fhir_client_sdk/responses/fhir_get_response.py +3 -4
- helix_fhir_client_sdk/responses/fhir_response_processor.py +73 -54
- helix_fhir_client_sdk/responses/get/fhir_get_bundle_response.py +49 -28
- helix_fhir_client_sdk/responses/get/fhir_get_error_response.py +0 -1
- helix_fhir_client_sdk/responses/get/fhir_get_list_by_resource_type_response.py +1 -1
- helix_fhir_client_sdk/responses/get/fhir_get_list_response.py +1 -1
- helix_fhir_client_sdk/responses/get/fhir_get_response_factory.py +0 -1
- helix_fhir_client_sdk/responses/get/fhir_get_single_response.py +1 -1
- helix_fhir_client_sdk/responses/merge/fhir_merge_resource_response_entry.py +30 -0
- helix_fhir_client_sdk/responses/resource_separator.py +35 -40
- helix_fhir_client_sdk/utilities/cache/request_cache.py +32 -43
- helix_fhir_client_sdk/utilities/retryable_aiohttp_client.py +185 -154
- helix_fhir_client_sdk/utilities/retryable_aiohttp_response.py +2 -1
- helix_fhir_client_sdk/validators/async_fhir_validator.py +3 -0
- helix_fhir_client_sdk-4.2.19.dist-info/METADATA +200 -0
- {helix_fhir_client_sdk-4.2.3.dist-info → helix_fhir_client_sdk-4.2.19.dist-info}/RECORD +36 -29
- tests/async/test_benchmark_compress.py +448 -0
- tests/async/test_benchmark_merge.py +506 -0
- tests/async/test_retryable_client_session_management.py +159 -0
- tests/test_fhir_client_clone.py +155 -0
- helix_fhir_client_sdk-4.2.3.dist-info/METADATA +0 -115
- {helix_fhir_client_sdk-4.2.3.dist-info → helix_fhir_client_sdk-4.2.19.dist-info}/WHEEL +0 -0
- {helix_fhir_client_sdk-4.2.3.dist-info → helix_fhir_client_sdk-4.2.19.dist-info}/licenses/LICENSE +0 -0
- {helix_fhir_client_sdk-4.2.3.dist-info → helix_fhir_client_sdk-4.2.19.dist-info}/top_level.txt +0 -0
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import json
|
|
2
|
+
import time
|
|
2
3
|
from abc import ABC
|
|
3
4
|
from collections.abc import AsyncGenerator
|
|
4
5
|
from datetime import UTC, datetime
|
|
@@ -80,7 +81,6 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
80
81
|
add_cached_bundles_to_result: bool = True,
|
|
81
82
|
input_cache: RequestCache | None = None,
|
|
82
83
|
compare_hash: bool = True,
|
|
83
|
-
make_persistent_connection: bool = False,
|
|
84
84
|
) -> AsyncGenerator[FhirGetResponse, None]:
|
|
85
85
|
"""
|
|
86
86
|
Asynchronously simulate a FHIR $graph query with advanced processing capabilities.
|
|
@@ -120,11 +120,19 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
120
120
|
add_cached_bundles_to_result: Optional flag to add cached bundles to result
|
|
121
121
|
input_cache: Optional cache for resource retrieval
|
|
122
122
|
compare_hash: Flag to compare resource hashes for changes
|
|
123
|
-
make_persistent_connection: Flag to make a persistent HTTP connection
|
|
124
123
|
|
|
125
124
|
Yields:
|
|
126
125
|
FhirGetResponse objects representing retrieved resources
|
|
127
126
|
"""
|
|
127
|
+
|
|
128
|
+
profiling: dict[str, Any] = {
|
|
129
|
+
"function": "process_simulate_graph_async",
|
|
130
|
+
"start_time": time.perf_counter(),
|
|
131
|
+
"steps": {},
|
|
132
|
+
"extend_calls": [],
|
|
133
|
+
"append_calls": [],
|
|
134
|
+
}
|
|
135
|
+
|
|
128
136
|
# Validate graph definition input
|
|
129
137
|
assert graph_json, "Graph JSON must be provided"
|
|
130
138
|
graph_definition: GraphDefinition = GraphDefinition.from_dict(graph_json)
|
|
@@ -134,17 +142,6 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
134
142
|
# Parse authentication scopes for resource access control
|
|
135
143
|
scope_parser: FhirScopeParser = FhirScopeParser(scopes=auth_scopes)
|
|
136
144
|
|
|
137
|
-
persistent_session = None
|
|
138
|
-
if make_persistent_connection:
|
|
139
|
-
# Create a persistent HTTP session for reuse across all requests in this graph traversal
|
|
140
|
-
persistent_session = self.create_http_session()
|
|
141
|
-
|
|
142
|
-
# Store the origninal create_http_session method
|
|
143
|
-
original_create_http_session = self.create_http_session
|
|
144
|
-
|
|
145
|
-
# Override create_http_session to return the persistent session
|
|
146
|
-
self.create_http_session = lambda: persistent_session # type: ignore[method-assign]
|
|
147
|
-
|
|
148
145
|
# Ensure bundle resources are not separated by default
|
|
149
146
|
self.separate_bundle_resources(False)
|
|
150
147
|
|
|
@@ -171,6 +168,7 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
171
168
|
cache: RequestCache = input_cache if input_cache is not None else RequestCache()
|
|
172
169
|
async with cache:
|
|
173
170
|
# Retrieve start resources based on graph definition
|
|
171
|
+
step_start = time.perf_counter()
|
|
174
172
|
start: str = graph_definition.start
|
|
175
173
|
parent_response: FhirGetResponse
|
|
176
174
|
cache_hits: int
|
|
@@ -184,10 +182,18 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
184
182
|
add_cached_bundles_to_result=add_cached_bundles_to_result,
|
|
185
183
|
compare_hash=compare_hash,
|
|
186
184
|
)
|
|
185
|
+
profiling["steps"]["get_start_resources"] = time.perf_counter() - step_start
|
|
187
186
|
|
|
188
187
|
# If no parent resources found, yield empty response and exit
|
|
189
188
|
parent_response_resource_count = parent_response.get_resource_count()
|
|
190
189
|
if parent_response_resource_count == 0:
|
|
190
|
+
profiling["total_time"] = time.perf_counter() - profiling["start_time"]
|
|
191
|
+
if logger:
|
|
192
|
+
logger.info(
|
|
193
|
+
f"[PROFILING] process_simulate_graph_async: total={profiling['total_time']:.3f}s, "
|
|
194
|
+
f"get_start_resources={profiling['steps'].get('get_start_resources', 0):.3f}s, "
|
|
195
|
+
f"no parent resources found"
|
|
196
|
+
)
|
|
191
197
|
yield parent_response
|
|
192
198
|
return # no resources to process
|
|
193
199
|
|
|
@@ -209,6 +215,7 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
209
215
|
)
|
|
210
216
|
|
|
211
217
|
# now process the graph links
|
|
218
|
+
step_start = time.perf_counter()
|
|
212
219
|
child_responses: list[FhirGetResponse] = []
|
|
213
220
|
parent_link_map: list[tuple[list[GraphDefinitionLink], FhirBundleEntryList]] = []
|
|
214
221
|
|
|
@@ -217,6 +224,7 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
217
224
|
parent_link_map.append((graph_definition.link, parent_bundle_entries))
|
|
218
225
|
|
|
219
226
|
# Process graph links in parallel
|
|
227
|
+
link_processing_count = 0
|
|
220
228
|
while len(parent_link_map):
|
|
221
229
|
new_parent_link_map: list[tuple[list[GraphDefinitionLink], FhirBundleEntryList]] = []
|
|
222
230
|
|
|
@@ -243,20 +251,38 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
243
251
|
add_cached_bundles_to_result=add_cached_bundles_to_result,
|
|
244
252
|
ifModifiedSince=ifModifiedSince,
|
|
245
253
|
):
|
|
254
|
+
# Track extend operation
|
|
255
|
+
extend_start = time.perf_counter()
|
|
246
256
|
child_responses.extend(link_responses)
|
|
257
|
+
extend_time = time.perf_counter() - extend_start
|
|
258
|
+
profiling["extend_calls"].append(
|
|
259
|
+
{"location": "child_responses.extend", "count": len(link_responses), "time": extend_time}
|
|
260
|
+
)
|
|
261
|
+
link_processing_count += 1
|
|
247
262
|
|
|
248
263
|
# Update parent link map for next iteration
|
|
249
264
|
parent_link_map = new_parent_link_map
|
|
250
265
|
|
|
266
|
+
profiling["steps"]["process_graph_links"] = time.perf_counter() - step_start
|
|
267
|
+
profiling["steps"]["link_processing_iterations"] = link_processing_count
|
|
268
|
+
|
|
251
269
|
# Combine and process responses
|
|
270
|
+
step_start = time.perf_counter()
|
|
252
271
|
parent_response = cast(FhirGetBundleResponse, parent_response.extend(child_responses))
|
|
253
|
-
|
|
272
|
+
extend_time = time.perf_counter() - step_start
|
|
273
|
+
profiling["steps"]["parent_response.extend"] = extend_time
|
|
274
|
+
profiling["extend_calls"].append(
|
|
275
|
+
{"location": "parent_response.extend", "count": len(child_responses), "time": extend_time}
|
|
276
|
+
)
|
|
254
277
|
|
|
255
278
|
# Optional resource sorting
|
|
256
279
|
if sort_resources:
|
|
280
|
+
step_start = time.perf_counter()
|
|
257
281
|
parent_response = parent_response.sort_resources()
|
|
282
|
+
profiling["steps"]["sort_resources"] = time.perf_counter() - step_start
|
|
258
283
|
|
|
259
284
|
# Prepare final response based on bundling preferences
|
|
285
|
+
step_start = time.perf_counter()
|
|
260
286
|
full_response: FhirGetResponse
|
|
261
287
|
if separate_bundle_resources:
|
|
262
288
|
full_response = FhirGetListByResourceTypeResponse.from_response(other_response=parent_response)
|
|
@@ -264,10 +290,38 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
264
290
|
full_response = FhirGetListResponse.from_response(other_response=parent_response)
|
|
265
291
|
else:
|
|
266
292
|
full_response = parent_response
|
|
293
|
+
profiling["steps"]["prepare_final_response"] = time.perf_counter() - step_start
|
|
267
294
|
|
|
268
295
|
# Set response URL
|
|
269
296
|
full_response.url = url or parent_response.url
|
|
270
297
|
|
|
298
|
+
# Calculate profiling summary
|
|
299
|
+
profiling["total_time"] = time.perf_counter() - profiling["start_time"]
|
|
300
|
+
total_extend_time = sum(call["time"] for call in profiling["extend_calls"])
|
|
301
|
+
total_extend_count = sum(call["count"] for call in profiling["extend_calls"])
|
|
302
|
+
|
|
303
|
+
# Log profiling information
|
|
304
|
+
if logger:
|
|
305
|
+
logger.info(
|
|
306
|
+
f"[PROFILING] process_simulate_graph_async for id={id_}: "
|
|
307
|
+
f"total={profiling['total_time']:.3f}s, "
|
|
308
|
+
f"get_start_resources={profiling['steps'].get('get_start_resources', 0):.3f}s, "
|
|
309
|
+
f"process_graph_links={profiling['steps'].get('process_graph_links', 0):.3f}s, "
|
|
310
|
+
f"parent_response.extend={profiling['steps'].get('parent_response.extend', 0):.3f}s, "
|
|
311
|
+
f"sort_resources={profiling['steps'].get('sort_resources', 0):.3f}s, "
|
|
312
|
+
f"prepare_final_response={profiling['steps'].get('prepare_final_response', 0):.3f}s"
|
|
313
|
+
)
|
|
314
|
+
logger.info(
|
|
315
|
+
f"[PROFILING] process_simulate_graph_async extend operations: "
|
|
316
|
+
f"total_calls={len(profiling['extend_calls'])}, "
|
|
317
|
+
f"total_items={total_extend_count}, "
|
|
318
|
+
f"total_time={total_extend_time:.3f}s"
|
|
319
|
+
)
|
|
320
|
+
for call in profiling["extend_calls"]:
|
|
321
|
+
logger.info(
|
|
322
|
+
f"[PROFILING] extend at {call['location']}: items={call['count']}, time={call['time']:.3f}s"
|
|
323
|
+
)
|
|
324
|
+
|
|
271
325
|
# Log cache performance
|
|
272
326
|
if logger:
|
|
273
327
|
logger.info(
|
|
@@ -280,13 +334,6 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
280
334
|
# Yield the final response
|
|
281
335
|
yield full_response
|
|
282
336
|
|
|
283
|
-
if persistent_session:
|
|
284
|
-
# Clean up: close the persistent session
|
|
285
|
-
await persistent_session.close()
|
|
286
|
-
|
|
287
|
-
if make_persistent_connection:
|
|
288
|
-
self.create_http_session = original_create_http_session # type: ignore[method-assign]
|
|
289
|
-
|
|
290
337
|
# noinspection PyUnusedLocal
|
|
291
338
|
async def process_link_async_parallel_function(
|
|
292
339
|
self,
|
|
@@ -297,26 +344,9 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
297
344
|
) -> list[FhirGetResponse]:
|
|
298
345
|
"""
|
|
299
346
|
Parallel processing function for graph definition links.
|
|
300
|
-
|
|
301
|
-
This method is designed to be used with AsyncParallelProcessor to process
|
|
302
|
-
graph links concurrently, improving performance for complex FHIR resource
|
|
303
|
-
graph traversals.
|
|
304
|
-
|
|
305
|
-
Key Responsibilities:
|
|
306
|
-
- Process individual graph links in parallel
|
|
307
|
-
- Track and log processing details
|
|
308
|
-
- Handle resource retrieval for each link
|
|
309
|
-
- Manage parallel processing context
|
|
310
|
-
|
|
311
|
-
Args:
|
|
312
|
-
context: Parallel processing context information
|
|
313
|
-
row: Current GraphDefinitionLink being processed
|
|
314
|
-
parameters: Parameters for link processing
|
|
315
|
-
additional_parameters: Extra parameters for extended processing
|
|
316
|
-
|
|
317
|
-
Returns:
|
|
318
|
-
List of FhirGetResponse objects retrieved during link processing
|
|
319
347
|
"""
|
|
348
|
+
profiling_start = time.perf_counter()
|
|
349
|
+
|
|
320
350
|
# Record the start time for performance tracking
|
|
321
351
|
start_time: datetime = datetime.now()
|
|
322
352
|
|
|
@@ -347,11 +377,8 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
347
377
|
logger=parameters.logger,
|
|
348
378
|
cache=parameters.cache,
|
|
349
379
|
scope_parser=parameters.scope_parser,
|
|
350
|
-
# Handle parent link map from additional parameters
|
|
351
380
|
parent_link_map=(additional_parameters["parent_link_map"] if additional_parameters else []),
|
|
352
|
-
# Determine request size, default to 1 if not specified
|
|
353
381
|
request_size=(additional_parameters["request_size"] if additional_parameters else 1),
|
|
354
|
-
# Track unsupported resources for ID-based search
|
|
355
382
|
id_search_unsupported_resources=(
|
|
356
383
|
additional_parameters["id_search_unsupported_resources"] if additional_parameters else []
|
|
357
384
|
),
|
|
@@ -367,6 +394,8 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
367
394
|
# Record end time for performance tracking
|
|
368
395
|
end_time: datetime = datetime.now()
|
|
369
396
|
|
|
397
|
+
total_time = time.perf_counter() - profiling_start
|
|
398
|
+
|
|
370
399
|
# Log detailed processing information
|
|
371
400
|
if parameters.logger:
|
|
372
401
|
parameters.logger.debug(
|
|
@@ -378,6 +407,11 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
378
407
|
+ f" | duration: {end_time - start_time}"
|
|
379
408
|
+ f" | resource_count: {len(result)}"
|
|
380
409
|
)
|
|
410
|
+
parameters.logger.info(
|
|
411
|
+
f"[PROFILING] process_link_async_parallel_function for path={row.path}: "
|
|
412
|
+
f"total={total_time:.3f}s, "
|
|
413
|
+
f"results={len(result)}"
|
|
414
|
+
)
|
|
381
415
|
|
|
382
416
|
# Return the list of retrieved responses
|
|
383
417
|
return result
|
|
@@ -863,9 +897,18 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
863
897
|
logger: Logger | None,
|
|
864
898
|
compare_hash: bool = True,
|
|
865
899
|
) -> FhirGetResponse | None:
|
|
900
|
+
profiling_start = time.perf_counter()
|
|
901
|
+
http_request_time = 0.0
|
|
902
|
+
http_request_count = 0
|
|
903
|
+
cache_check_time = 0.0
|
|
904
|
+
cache_update_time = 0.0
|
|
905
|
+
append_time = 0.0
|
|
906
|
+
|
|
866
907
|
result: FhirGetResponse | None = None
|
|
867
908
|
non_cached_id_list: list[str] = []
|
|
909
|
+
|
|
868
910
|
# first check to see if we can find these in the cache
|
|
911
|
+
cache_check_start = time.perf_counter()
|
|
869
912
|
if ids:
|
|
870
913
|
for resource_id in ids:
|
|
871
914
|
cache_entry: RequestCacheEntry | None = await cache.get_async(
|
|
@@ -878,9 +921,12 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
878
921
|
if logger:
|
|
879
922
|
logger.info(f"Cache entry not found for {resource_type}/{resource_id} (1by1)")
|
|
880
923
|
non_cached_id_list.append(resource_id)
|
|
924
|
+
cache_check_time = time.perf_counter() - cache_check_start
|
|
881
925
|
|
|
926
|
+
cache_update_start = time.perf_counter()
|
|
882
927
|
for single_id in non_cached_id_list:
|
|
883
928
|
result2: FhirGetResponse
|
|
929
|
+
http_start = time.perf_counter()
|
|
884
930
|
async for result2 in self._get_with_session_async(
|
|
885
931
|
page_number=None,
|
|
886
932
|
ids=[single_id],
|
|
@@ -889,10 +935,15 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
889
935
|
fn_handle_streaming_chunk=None,
|
|
890
936
|
resource_type=resource_type,
|
|
891
937
|
):
|
|
938
|
+
http_request_time += time.perf_counter() - http_start
|
|
939
|
+
http_request_count += 1
|
|
940
|
+
|
|
892
941
|
if result2.resource_type == "OperationOutcome":
|
|
893
942
|
result2 = FhirGetErrorResponse.from_response(other_response=result2)
|
|
894
943
|
if result:
|
|
944
|
+
append_start = time.perf_counter()
|
|
895
945
|
result = result.append(result2)
|
|
946
|
+
append_time += time.perf_counter() - append_start
|
|
896
947
|
else:
|
|
897
948
|
result = result2
|
|
898
949
|
if result2.successful:
|
|
@@ -926,6 +977,21 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
926
977
|
)
|
|
927
978
|
if cache_updated and logger:
|
|
928
979
|
logger.info(f"Inserted {result2.status} for {resource_type}/{single_id} into cache (1by1)")
|
|
980
|
+
cache_update_time = time.perf_counter() - cache_update_start - http_request_time - append_time
|
|
981
|
+
|
|
982
|
+
total_time = time.perf_counter() - profiling_start
|
|
983
|
+
processing_time = total_time - http_request_time - cache_check_time - cache_update_time - append_time
|
|
984
|
+
|
|
985
|
+
if logger and http_request_count > 0:
|
|
986
|
+
logger.info(
|
|
987
|
+
f"[PROFILING] _get_resources_by_id_one_by_one_async for {resource_type}: "
|
|
988
|
+
f"total={total_time:.3f}s, "
|
|
989
|
+
f"http_requests={http_request_time:.3f}s ({http_request_count} calls), "
|
|
990
|
+
f"cache_check={cache_check_time:.3f}s, "
|
|
991
|
+
f"cache_update={cache_update_time:.3f}s, "
|
|
992
|
+
f"append={append_time:.3f}s, "
|
|
993
|
+
f"processing={processing_time:.3f}s"
|
|
994
|
+
)
|
|
929
995
|
|
|
930
996
|
return result
|
|
931
997
|
|
|
@@ -942,6 +1008,13 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
942
1008
|
add_cached_bundles_to_result: bool = True,
|
|
943
1009
|
compare_hash: bool = True,
|
|
944
1010
|
) -> tuple[FhirGetResponse, int]:
|
|
1011
|
+
profiling_start = time.perf_counter()
|
|
1012
|
+
http_request_time = 0.0
|
|
1013
|
+
http_request_count = 0
|
|
1014
|
+
cache_check_time = 0.0
|
|
1015
|
+
cache_update_time = 0.0
|
|
1016
|
+
append_time = 0.0
|
|
1017
|
+
|
|
945
1018
|
assert resource_type
|
|
946
1019
|
if not scope_parser.scope_allows(resource_type=resource_type):
|
|
947
1020
|
if logger:
|
|
@@ -975,14 +1048,13 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
975
1048
|
|
|
976
1049
|
non_cached_id_list: list[str] = []
|
|
977
1050
|
# get any cached resources
|
|
1051
|
+
cache_check_start = time.perf_counter()
|
|
978
1052
|
if id_list:
|
|
979
1053
|
for resource_id in id_list:
|
|
980
1054
|
cache_entry: RequestCacheEntry | None = await cache.get_async(
|
|
981
1055
|
resource_type=resource_type, resource_id=resource_id
|
|
982
1056
|
)
|
|
983
1057
|
if cache_entry:
|
|
984
|
-
# if there is an entry then it means we tried to get it in the past
|
|
985
|
-
# so don't get it again whether we were successful or not
|
|
986
1058
|
if logger:
|
|
987
1059
|
logger.info(
|
|
988
1060
|
f"{cache_entry.status} Returning {resource_type}/{resource_id} from cache (ByParam)"
|
|
@@ -991,6 +1063,7 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
991
1063
|
if logger:
|
|
992
1064
|
logger.info(f"Cache entry not found for {resource_type}/{resource_id} (ByParam)")
|
|
993
1065
|
non_cached_id_list.append(resource_id)
|
|
1066
|
+
cache_check_time = time.perf_counter() - cache_check_start
|
|
994
1067
|
|
|
995
1068
|
all_result: FhirGetResponse | None = None
|
|
996
1069
|
# either we have non-cached ids or this is a query without id but has other parameters
|
|
@@ -1002,6 +1075,7 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
1002
1075
|
# call the server to get the resources
|
|
1003
1076
|
result1: FhirGetResponse
|
|
1004
1077
|
result: FhirGetResponse | None
|
|
1078
|
+
http_start = time.perf_counter()
|
|
1005
1079
|
async for result1 in self._get_with_session_async(
|
|
1006
1080
|
page_number=None,
|
|
1007
1081
|
ids=non_cached_id_list,
|
|
@@ -1010,6 +1084,8 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
1010
1084
|
fn_handle_streaming_chunk=None,
|
|
1011
1085
|
resource_type=resource_type,
|
|
1012
1086
|
):
|
|
1087
|
+
http_request_time += time.perf_counter() - http_start
|
|
1088
|
+
http_request_count += 1
|
|
1013
1089
|
result = result1
|
|
1014
1090
|
# if we got a failure then check if we can get it one by one
|
|
1015
1091
|
if (not result or result.status != 200) and len(non_cached_id_list) > 1:
|
|
@@ -1022,6 +1098,7 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
1022
1098
|
f" Fetching one by one ids: {non_cached_id_list}"
|
|
1023
1099
|
)
|
|
1024
1100
|
# For some resources if search by _id doesn't work then fetch one by one.
|
|
1101
|
+
one_by_one_start = time.perf_counter()
|
|
1025
1102
|
result = await self._get_resources_by_id_one_by_one_async(
|
|
1026
1103
|
resource_type=resource_type,
|
|
1027
1104
|
ids=non_cached_id_list,
|
|
@@ -1030,6 +1107,9 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
1030
1107
|
logger=logger,
|
|
1031
1108
|
compare_hash=compare_hash,
|
|
1032
1109
|
)
|
|
1110
|
+
one_by_one_time = time.perf_counter() - one_by_one_start
|
|
1111
|
+
http_request_time += one_by_one_time
|
|
1112
|
+
http_request_count += len(non_cached_id_list)
|
|
1033
1113
|
else:
|
|
1034
1114
|
if logger:
|
|
1035
1115
|
logger.info(f"Fetched {resource_type} resources using _id for url {self._url}")
|
|
@@ -1045,11 +1125,14 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
1045
1125
|
|
|
1046
1126
|
# append to the response
|
|
1047
1127
|
if all_result:
|
|
1128
|
+
append_start = time.perf_counter()
|
|
1048
1129
|
all_result = all_result.append(result)
|
|
1130
|
+
append_time += time.perf_counter() - append_start
|
|
1049
1131
|
else:
|
|
1050
1132
|
all_result = result
|
|
1051
1133
|
# If non_cached_id_list is not empty and resource_type does not support ?_id search then fetch it one by one
|
|
1052
1134
|
elif len(non_cached_id_list):
|
|
1135
|
+
one_by_one_start = time.perf_counter()
|
|
1053
1136
|
all_result = await self._get_resources_by_id_one_by_one_async(
|
|
1054
1137
|
resource_type=resource_type,
|
|
1055
1138
|
ids=non_cached_id_list,
|
|
@@ -1058,10 +1141,13 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
1058
1141
|
logger=logger,
|
|
1059
1142
|
compare_hash=compare_hash,
|
|
1060
1143
|
)
|
|
1144
|
+
http_request_time += time.perf_counter() - one_by_one_start
|
|
1145
|
+
http_request_count += len(non_cached_id_list)
|
|
1061
1146
|
|
|
1062
1147
|
# This list tracks the non-cached ids that were found
|
|
1063
1148
|
found_non_cached_id_list: list[str] = []
|
|
1064
1149
|
# Cache the fetched entries
|
|
1150
|
+
cache_update_start = time.perf_counter()
|
|
1065
1151
|
if all_result:
|
|
1066
1152
|
non_cached_bundle_entry: FhirBundleEntry
|
|
1067
1153
|
for non_cached_bundle_entry in all_result.get_bundle_entries():
|
|
@@ -1095,7 +1181,6 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
1095
1181
|
logger.debug(f"Inserted {resource_type}/{non_cached_resource_id} into cache (ByParam)")
|
|
1096
1182
|
found_non_cached_id_list.append(non_cached_resource_id)
|
|
1097
1183
|
|
|
1098
|
-
# now add all the non-cached ids that were NOT found to the cache too so we don't look for them again
|
|
1099
1184
|
for non_cached_id in non_cached_id_list:
|
|
1100
1185
|
if non_cached_id not in found_non_cached_id_list:
|
|
1101
1186
|
cache_updated = await cache.add_async(
|
|
@@ -1110,6 +1195,7 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
1110
1195
|
)
|
|
1111
1196
|
if cache_updated and logger:
|
|
1112
1197
|
logger.info(f"Inserted 404 for {resource_type}/{non_cached_id} into cache (ByParam)")
|
|
1198
|
+
cache_update_time = time.perf_counter() - cache_update_start
|
|
1113
1199
|
|
|
1114
1200
|
bundle_response: FhirGetBundleResponse = (
|
|
1115
1201
|
FhirGetBundleResponse.from_response(other_response=all_result)
|
|
@@ -1151,6 +1237,21 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
1151
1237
|
storage_mode=self._storage_mode,
|
|
1152
1238
|
)
|
|
1153
1239
|
)
|
|
1240
|
+
|
|
1241
|
+
total_time = time.perf_counter() - profiling_start
|
|
1242
|
+
processing_time = total_time - http_request_time - cache_check_time - cache_update_time - append_time
|
|
1243
|
+
|
|
1244
|
+
if logger and http_request_count > 0:
|
|
1245
|
+
logger.info(
|
|
1246
|
+
f"[PROFILING] _get_resources_by_parameters_async for {resource_type}: "
|
|
1247
|
+
f"total={total_time:.3f}s, "
|
|
1248
|
+
f"http_requests={http_request_time:.3f}s ({http_request_count} calls), "
|
|
1249
|
+
f"cache_check={cache_check_time:.3f}s, "
|
|
1250
|
+
f"cache_update={cache_update_time:.3f}s, "
|
|
1251
|
+
f"append={append_time:.3f}s, "
|
|
1252
|
+
f"processing={processing_time:.3f}s"
|
|
1253
|
+
)
|
|
1254
|
+
|
|
1154
1255
|
return bundle_response, cache.cache_hits
|
|
1155
1256
|
|
|
1156
1257
|
# noinspection PyPep8Naming
|
|
@@ -1173,7 +1274,6 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
1173
1274
|
add_cached_bundles_to_result: bool = True,
|
|
1174
1275
|
input_cache: RequestCache | None = None,
|
|
1175
1276
|
compare_hash: bool = True,
|
|
1176
|
-
make_persistent_connection: bool = False,
|
|
1177
1277
|
) -> FhirGetResponse:
|
|
1178
1278
|
"""
|
|
1179
1279
|
Simulates the $graph query on the FHIR server
|
|
@@ -1196,7 +1296,6 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
1196
1296
|
:param add_cached_bundles_to_result: Optional flag to add cached bundles to result
|
|
1197
1297
|
:param input_cache: Optional cache to use for input
|
|
1198
1298
|
:param compare_hash: Optional flag to compare hash of the resources
|
|
1199
|
-
:param make_persistent_connection: Whether to make the connection persistent for reuse across all requests in this graph traversal
|
|
1200
1299
|
:return: FhirGetResponse
|
|
1201
1300
|
"""
|
|
1202
1301
|
if contained:
|
|
@@ -1227,7 +1326,6 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
|
|
|
1227
1326
|
add_cached_bundles_to_result=add_cached_bundles_to_result,
|
|
1228
1327
|
input_cache=input_cache,
|
|
1229
1328
|
compare_hash=compare_hash,
|
|
1230
|
-
make_persistent_connection=make_persistent_connection,
|
|
1231
1329
|
)
|
|
1232
1330
|
)
|
|
1233
1331
|
assert result, "No result returned from simulate_graph_async"
|
|
File without changes
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
class FhirClientSdkOpenTelemetryAttributeNames:
|
|
2
|
+
"""Constants for OpenTelemetry attribute names used in the FHIR Client SDK."""
|
|
3
|
+
|
|
4
|
+
URL: str = "fhir.client_sdk.url"
|
|
5
|
+
RESOURCE: str = "fhir.client_sdk.resource"
|
|
6
|
+
JSON_DATA_COUNT: str = "fhir.client_sdk.json_data.count"
|
|
7
|
+
BATCH_SIZE: str = "fhir.client_sdk.batch.size"
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
class FhirClientSdkOpenTelemetrySpanNames:
|
|
2
|
+
"""Span names for OpenTelemetry tracing in the FHIR Client SDK."""
|
|
3
|
+
|
|
4
|
+
GET: str = "fhir.client_sdk.get"
|
|
5
|
+
GET_STREAMING: str = "fhir.client_sdk.streaming.get"
|
|
6
|
+
GET_ACCESS_TOKEN: str = "fhir.client_sdk.access_token.get"
|
|
7
|
+
HTTP_GET: str = "fhir.client_sdk.http.get"
|
|
8
|
+
HANDLE_RESPONSE: str = "fhir.client_sdk.handle_response"
|
|
9
|
+
DELETE: str = "fhir.client_sdk.delete"
|
|
10
|
+
UPDATE: str = "fhir.client_sdk.update"
|
|
11
|
+
PATCH: str = "fhir.client_sdk.patch"
|
|
12
|
+
MERGE: str = "fhir.client_sdk.merge"
|
|
@@ -74,6 +74,15 @@ class RequestQueueMixin(ABC, FhirClientProtocol):
|
|
|
74
74
|
request_id: str | None = None
|
|
75
75
|
total_results = 0 # total number of resources returned so far
|
|
76
76
|
|
|
77
|
+
limit_count: int | None = self._limit
|
|
78
|
+
|
|
79
|
+
# if _count parameter is present in additional_parameters then set limit_count to it
|
|
80
|
+
if additional_parameters:
|
|
81
|
+
for param in additional_parameters:
|
|
82
|
+
if self._limit is None:
|
|
83
|
+
if param.startswith("_count="):
|
|
84
|
+
limit_count = int(param.split("=")[1])
|
|
85
|
+
|
|
77
86
|
# create url and query to request from FHIR server
|
|
78
87
|
resources_json: str = ""
|
|
79
88
|
full_url = await self.build_url(
|
|
@@ -110,7 +119,8 @@ class RequestQueueMixin(ABC, FhirClientProtocol):
|
|
|
110
119
|
)
|
|
111
120
|
|
|
112
121
|
async with RetryableAioHttpClient(
|
|
113
|
-
fn_get_session=
|
|
122
|
+
fn_get_session=self._fn_create_http_session or self.create_http_session,
|
|
123
|
+
caller_managed_session=self._fn_create_http_session is not None,
|
|
114
124
|
refresh_token_func=self._refresh_token_function,
|
|
115
125
|
tracer_request_func=self._trace_request_function,
|
|
116
126
|
retries=self._retry_count,
|
|
@@ -121,9 +131,6 @@ class RequestQueueMixin(ABC, FhirClientProtocol):
|
|
|
121
131
|
log_all_url_results=self._log_all_response_urls,
|
|
122
132
|
access_token=self._access_token,
|
|
123
133
|
access_token_expiry_date=self._access_token_expiry_date,
|
|
124
|
-
close_session_on_exit=self._close_session,
|
|
125
|
-
persistent_session=self._persistent_session,
|
|
126
|
-
use_persistent_session=self._use_persistent_session,
|
|
127
134
|
) as client:
|
|
128
135
|
while next_url:
|
|
129
136
|
# set access token in request if present
|
|
@@ -168,7 +175,7 @@ class RequestQueueMixin(ABC, FhirClientProtocol):
|
|
|
168
175
|
)
|
|
169
176
|
|
|
170
177
|
request_id = response.response_headers.get("X-Request-ID", None)
|
|
171
|
-
self._internal_logger.
|
|
178
|
+
self._internal_logger.debug(f"X-Request-ID={request_id}")
|
|
172
179
|
|
|
173
180
|
async for r in FhirResponseProcessor.handle_response(
|
|
174
181
|
internal_logger=self._internal_logger,
|
|
@@ -198,9 +205,9 @@ class RequestQueueMixin(ABC, FhirClientProtocol):
|
|
|
198
205
|
total_results += resource_count
|
|
199
206
|
|
|
200
207
|
# Stop if limit reached
|
|
201
|
-
if
|
|
208
|
+
if limit_count and total_results >= limit_count:
|
|
202
209
|
self._internal_logger.info(
|
|
203
|
-
f"Reached limit={
|
|
210
|
+
f"Reached limit={limit_count} after collecting {total_results} "
|
|
204
211
|
f"resources, stopping pagination"
|
|
205
212
|
)
|
|
206
213
|
return
|
|
@@ -281,7 +288,8 @@ class RequestQueueMixin(ABC, FhirClientProtocol):
|
|
|
281
288
|
)
|
|
282
289
|
|
|
283
290
|
async with RetryableAioHttpClient(
|
|
284
|
-
fn_get_session=
|
|
291
|
+
fn_get_session=self._fn_create_http_session or self.create_http_session,
|
|
292
|
+
caller_managed_session=self._fn_create_http_session is not None,
|
|
285
293
|
refresh_token_func=self._refresh_token_function,
|
|
286
294
|
tracer_request_func=self._trace_request_function,
|
|
287
295
|
retries=self._retry_count,
|
|
@@ -292,9 +300,6 @@ class RequestQueueMixin(ABC, FhirClientProtocol):
|
|
|
292
300
|
log_all_url_results=self._log_all_response_urls,
|
|
293
301
|
access_token=self._access_token,
|
|
294
302
|
access_token_expiry_date=self._access_token_expiry_date,
|
|
295
|
-
close_session_on_exit=self._close_session,
|
|
296
|
-
persistent_session=self._persistent_session,
|
|
297
|
-
use_persistent_session=self._use_persistent_session,
|
|
298
303
|
) as client:
|
|
299
304
|
while next_url:
|
|
300
305
|
# set access token in request if present
|
|
@@ -339,7 +344,7 @@ class RequestQueueMixin(ABC, FhirClientProtocol):
|
|
|
339
344
|
)
|
|
340
345
|
|
|
341
346
|
request_id = response.response_headers.get("X-Request-ID", None)
|
|
342
|
-
self._internal_logger.
|
|
347
|
+
self._internal_logger.debug(f"X-Request-ID={request_id}")
|
|
343
348
|
|
|
344
349
|
if response.status == 200:
|
|
345
350
|
response_next_url = None
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import uuid
|
|
2
|
-
from collections.abc import AsyncGenerator
|
|
2
|
+
from collections.abc import AsyncGenerator, Callable
|
|
3
3
|
from datetime import datetime
|
|
4
4
|
from logging import Logger
|
|
5
5
|
from threading import Lock
|
|
@@ -89,10 +89,6 @@ class FhirClientProtocol(Protocol):
|
|
|
89
89
|
_send_data_as_chunked: bool = False
|
|
90
90
|
_last_page_lock: Lock
|
|
91
91
|
|
|
92
|
-
_persistent_session: ClientSession | None = None
|
|
93
|
-
_use_persistent_session: bool = False
|
|
94
|
-
_close_session: bool = True
|
|
95
|
-
|
|
96
92
|
_use_post_for_search: bool = False
|
|
97
93
|
|
|
98
94
|
_accept: str
|
|
@@ -124,12 +120,18 @@ class FhirClientProtocol(Protocol):
|
|
|
124
120
|
|
|
125
121
|
_log_all_response_urls: bool
|
|
126
122
|
|
|
127
|
-
_storage_mode: CompressedDictStorageMode
|
|
123
|
+
_storage_mode: CompressedDictStorageMode
|
|
128
124
|
""" storage mode to store the responses """
|
|
129
125
|
|
|
130
126
|
_create_operation_outcome_for_error: bool | None
|
|
131
127
|
""" whether to create OperationOutcome resource for errors """
|
|
132
128
|
|
|
129
|
+
_max_concurrent_requests: int | None
|
|
130
|
+
""" maximum number of concurrent requests to make to the FHIR server """
|
|
131
|
+
|
|
132
|
+
_fn_create_http_session: Callable[[], ClientSession] | None
|
|
133
|
+
""" optional callable to create HTTP sessions """
|
|
134
|
+
|
|
133
135
|
async def get_access_token_async(self) -> GetAccessTokenResult: ...
|
|
134
136
|
|
|
135
137
|
async def _send_fhir_request_async(
|
|
@@ -212,3 +214,5 @@ class FhirClientProtocol(Protocol):
|
|
|
212
214
|
) -> AsyncGenerator[FhirMergeResourceResponse, None]:
|
|
213
215
|
# this is just here to tell Python this returns a generator
|
|
214
216
|
yield None # type: ignore[misc]
|
|
217
|
+
|
|
218
|
+
def use_http_session(self, fn_create_http_session: Callable[[], ClientSession] | None) -> "FhirClientProtocol": ...
|
|
@@ -119,7 +119,7 @@ class FhirGetResponse:
|
|
|
119
119
|
""" Count of cache hits """
|
|
120
120
|
self.results_by_url: list[RetryableAioHttpUrlResult] = results_by_url
|
|
121
121
|
""" Count of errors in the response by status """
|
|
122
|
-
self.storage_mode: CompressedDictStorageMode =
|
|
122
|
+
self.storage_mode: CompressedDictStorageMode = storage_mode
|
|
123
123
|
""" Storage mode for the response """
|
|
124
124
|
|
|
125
125
|
@abstractmethod
|
|
@@ -170,9 +170,8 @@ class FhirGetResponse:
|
|
|
170
170
|
"""
|
|
171
171
|
result: FhirGetResponse = self._extend(others=others)
|
|
172
172
|
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
result.chunk_number = latest_chunk_number[0]
|
|
173
|
+
if others and others[-1].chunk_number:
|
|
174
|
+
result.chunk_number = others[-1].chunk_number
|
|
176
175
|
return result
|
|
177
176
|
|
|
178
177
|
@abstractmethod
|