helix.fhir.client.sdk 4.1.67__py3-none-any.whl → 4.2.18__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- helix_fhir_client_sdk/dictionary_parser.py +4 -0
- helix_fhir_client_sdk/fhir_auth_mixin.py +17 -10
- helix_fhir_client_sdk/fhir_client.py +161 -61
- helix_fhir_client_sdk/fhir_delete_mixin.py +62 -45
- helix_fhir_client_sdk/fhir_merge_mixin.py +188 -163
- helix_fhir_client_sdk/fhir_merge_resources_mixin.py +200 -9
- helix_fhir_client_sdk/fhir_patch_mixin.py +97 -81
- helix_fhir_client_sdk/fhir_update_mixin.py +71 -54
- helix_fhir_client_sdk/graph/simulated_graph_processor_mixin.py +5 -174
- helix_fhir_client_sdk/open_telemetry/__init__.py +0 -0
- helix_fhir_client_sdk/open_telemetry/attribute_names.py +7 -0
- helix_fhir_client_sdk/open_telemetry/span_names.py +12 -0
- helix_fhir_client_sdk/queue/request_queue_mixin.py +46 -119
- helix_fhir_client_sdk/responses/fhir_client_protocol.py +9 -1
- helix_fhir_client_sdk/responses/fhir_response_processor.py +73 -54
- helix_fhir_client_sdk/responses/get/fhir_get_bundle_response.py +0 -2
- helix_fhir_client_sdk/responses/merge/fhir_merge_resource_response_entry.py +30 -0
- helix_fhir_client_sdk/utilities/async_parallel_processor/v1/async_parallel_processor.py +1 -24
- helix_fhir_client_sdk/utilities/cache/request_cache.py +32 -43
- helix_fhir_client_sdk/utilities/retryable_aiohttp_client.py +184 -144
- helix_fhir_client_sdk/utilities/retryable_aiohttp_response.py +2 -1
- helix_fhir_client_sdk/utilities/url_checker.py +46 -12
- helix_fhir_client_sdk/validators/async_fhir_validator.py +3 -0
- helix_fhir_client_sdk-4.2.18.dist-info/METADATA +200 -0
- {helix_fhir_client_sdk-4.1.67.dist-info → helix_fhir_client_sdk-4.2.18.dist-info}/RECORD +32 -25
- tests/async/test_benchmark_compress.py +448 -0
- tests/async/test_benchmark_merge.py +506 -0
- tests/async/test_retryable_client_session_management.py +159 -0
- tests/test_fhir_client_clone.py +155 -0
- helix_fhir_client_sdk-4.1.67.dist-info/METADATA +0 -115
- {helix_fhir_client_sdk-4.1.67.dist-info → helix_fhir_client_sdk-4.2.18.dist-info}/WHEEL +0 -0
- {helix_fhir_client_sdk-4.1.67.dist-info → helix_fhir_client_sdk-4.2.18.dist-info}/licenses/LICENSE +0 -0
- {helix_fhir_client_sdk-4.1.67.dist-info → helix_fhir_client_sdk-4.2.18.dist-info}/top_level.txt +0 -0
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
import asyncio
|
|
4
3
|
import json
|
|
5
4
|
import logging
|
|
6
5
|
import time
|
|
@@ -70,18 +69,22 @@ class RequestQueueMixin(ABC, FhirClientProtocol):
|
|
|
70
69
|
:param additional_parameters: additional parameters to add to the request
|
|
71
70
|
:return: response
|
|
72
71
|
"""
|
|
73
|
-
# TIME: Track overall method execution
|
|
74
|
-
method_start_time = time.time()
|
|
75
|
-
|
|
76
72
|
assert self._url, "No FHIR server url was set"
|
|
77
73
|
assert resource_type or self._resource, "No Resource was set"
|
|
78
74
|
request_id: str | None = None
|
|
79
75
|
total_results = 0 # total number of resources returned so far
|
|
80
76
|
|
|
77
|
+
limit_count: int | None = self._limit
|
|
78
|
+
|
|
79
|
+
# if _count parameter is present in additional_parameters then set limit_count to it
|
|
80
|
+
if additional_parameters:
|
|
81
|
+
for param in additional_parameters:
|
|
82
|
+
if self._limit is None:
|
|
83
|
+
if param.startswith("_count="):
|
|
84
|
+
limit_count = int(param.split("=")[1])
|
|
85
|
+
|
|
81
86
|
# create url and query to request from FHIR server
|
|
82
87
|
resources_json: str = ""
|
|
83
|
-
|
|
84
|
-
# TIME: Track URL building
|
|
85
88
|
full_url = await self.build_url(
|
|
86
89
|
ids=ids,
|
|
87
90
|
id_above=id_above,
|
|
@@ -104,10 +107,6 @@ class RequestQueueMixin(ABC, FhirClientProtocol):
|
|
|
104
107
|
last_status_code: int | None = None
|
|
105
108
|
last_response_text: str | None = None
|
|
106
109
|
next_url: str | None = full_url
|
|
107
|
-
page_count = 0
|
|
108
|
-
total_http_time = 0.0
|
|
109
|
-
total_response_processing_time = 0.0
|
|
110
|
-
|
|
111
110
|
try:
|
|
112
111
|
await FhirResponseProcessor.log_request(
|
|
113
112
|
full_url=full_url,
|
|
@@ -120,7 +119,8 @@ class RequestQueueMixin(ABC, FhirClientProtocol):
|
|
|
120
119
|
)
|
|
121
120
|
|
|
122
121
|
async with RetryableAioHttpClient(
|
|
123
|
-
fn_get_session=
|
|
122
|
+
fn_get_session=self._fn_create_http_session or self.create_http_session,
|
|
123
|
+
caller_managed_session=self._fn_create_http_session is not None,
|
|
124
124
|
refresh_token_func=self._refresh_token_function,
|
|
125
125
|
tracer_request_func=self._trace_request_function,
|
|
126
126
|
retries=self._retry_count,
|
|
@@ -133,9 +133,6 @@ class RequestQueueMixin(ABC, FhirClientProtocol):
|
|
|
133
133
|
access_token_expiry_date=self._access_token_expiry_date,
|
|
134
134
|
) as client:
|
|
135
135
|
while next_url:
|
|
136
|
-
page_count += 1
|
|
137
|
-
page_start_time = time.time()
|
|
138
|
-
|
|
139
136
|
# set access token in request if present
|
|
140
137
|
access_token_result: GetAccessTokenResult = await self.get_access_token_async()
|
|
141
138
|
access_token: str | None = access_token_result.access_token
|
|
@@ -146,18 +143,16 @@ class RequestQueueMixin(ABC, FhirClientProtocol):
|
|
|
146
143
|
next_url = UrlChecker.convert_relative_url_to_absolute_url(
|
|
147
144
|
base_url=self._url, relative_url=next_url
|
|
148
145
|
)
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
146
|
+
else:
|
|
147
|
+
# INC-285: Preserve port from base URL when next_url is absolute
|
|
148
|
+
# but missing the port (FHIR server bug workaround)
|
|
149
|
+
next_url = UrlChecker.preserve_port_from_base_url(base_url=self._url, next_url=next_url)
|
|
152
150
|
response: RetryableAioHttpResponse = await self._send_fhir_request_async(
|
|
153
151
|
client=client,
|
|
154
152
|
full_url=next_url,
|
|
155
153
|
headers=headers,
|
|
156
154
|
payload=payload,
|
|
157
155
|
)
|
|
158
|
-
http_request_duration = time.time() - http_request_start
|
|
159
|
-
total_http_time += http_request_duration
|
|
160
|
-
|
|
161
156
|
assert isinstance(response, RetryableAioHttpResponse)
|
|
162
157
|
|
|
163
158
|
if response.access_token:
|
|
@@ -180,10 +175,8 @@ class RequestQueueMixin(ABC, FhirClientProtocol):
|
|
|
180
175
|
)
|
|
181
176
|
|
|
182
177
|
request_id = response.response_headers.get("X-Request-ID", None)
|
|
183
|
-
self._internal_logger.
|
|
178
|
+
self._internal_logger.debug(f"X-Request-ID={request_id}")
|
|
184
179
|
|
|
185
|
-
# TIME: Track response processing
|
|
186
|
-
response_processing_start = time.time()
|
|
187
180
|
async for r in FhirResponseProcessor.handle_response(
|
|
188
181
|
internal_logger=self._internal_logger,
|
|
189
182
|
access_token=access_token,
|
|
@@ -212,46 +205,17 @@ class RequestQueueMixin(ABC, FhirClientProtocol):
|
|
|
212
205
|
total_results += resource_count
|
|
213
206
|
|
|
214
207
|
# Stop if limit reached
|
|
215
|
-
if
|
|
208
|
+
if limit_count and total_results >= limit_count:
|
|
216
209
|
self._internal_logger.info(
|
|
217
|
-
f"Reached limit={
|
|
210
|
+
f"Reached limit={limit_count} after collecting {total_results} "
|
|
218
211
|
f"resources, stopping pagination"
|
|
219
212
|
)
|
|
220
213
|
return
|
|
221
214
|
|
|
222
215
|
# Update next_url for the next loop iteration
|
|
223
216
|
next_url = r.next_url
|
|
224
|
-
|
|
225
|
-
response_processing_duration = time.time() - response_processing_start
|
|
226
|
-
total_response_processing_time += response_processing_duration
|
|
227
|
-
|
|
228
|
-
page_duration = time.time() - page_start_time
|
|
229
|
-
self._internal_logger.info(
|
|
230
|
-
f"[PERF] _get_with_session_async page {page_count} completed in {page_duration:.3f}s | "
|
|
231
|
-
f"http_request={http_request_duration:.3f}s | "
|
|
232
|
-
f"response_processing={response_processing_duration:.3f}s | "
|
|
233
|
-
f"resources_in_page={resource_count} | "
|
|
234
|
-
f"total_resources={total_results}"
|
|
235
|
-
)
|
|
236
|
-
|
|
237
|
-
# Log overall method performance
|
|
238
|
-
method_total_duration = time.time() - method_start_time
|
|
239
|
-
self._internal_logger.info(
|
|
240
|
-
f"[PERF] _get_with_session_async TOTAL completed in {method_total_duration:.3f}s | "
|
|
241
|
-
f"resource_type={resource_type or self._resource} | "
|
|
242
|
-
f"total_http_time={total_http_time:.3f}s ({total_http_time/method_total_duration*100:.1f}%) | "
|
|
243
|
-
f"total_response_processing={total_response_processing_time:.3f}s ({total_response_processing_time/method_total_duration*100:.1f}%) | "
|
|
244
|
-
f"pages={page_count} | "
|
|
245
|
-
f"total_resources={total_results}"
|
|
246
|
-
)
|
|
247
217
|
|
|
248
218
|
except Exception as ex:
|
|
249
|
-
method_duration = time.time() - method_start_time
|
|
250
|
-
self._internal_logger.error(
|
|
251
|
-
f"[PERF] _get_with_session_async FAILED after {method_duration:.3f}s | "
|
|
252
|
-
f"resource_type={resource_type or self._resource} | "
|
|
253
|
-
f"error={str(ex)}"
|
|
254
|
-
)
|
|
255
219
|
raise FhirSenderException(
|
|
256
220
|
request_id=request_id,
|
|
257
221
|
exception=ex,
|
|
@@ -324,7 +288,8 @@ class RequestQueueMixin(ABC, FhirClientProtocol):
|
|
|
324
288
|
)
|
|
325
289
|
|
|
326
290
|
async with RetryableAioHttpClient(
|
|
327
|
-
fn_get_session=
|
|
291
|
+
fn_get_session=self._fn_create_http_session or self.create_http_session,
|
|
292
|
+
caller_managed_session=self._fn_create_http_session is not None,
|
|
328
293
|
refresh_token_func=self._refresh_token_function,
|
|
329
294
|
tracer_request_func=self._trace_request_function,
|
|
330
295
|
retries=self._retry_count,
|
|
@@ -347,6 +312,10 @@ class RequestQueueMixin(ABC, FhirClientProtocol):
|
|
|
347
312
|
next_url = UrlChecker.convert_relative_url_to_absolute_url(
|
|
348
313
|
base_url=self._url, relative_url=next_url
|
|
349
314
|
)
|
|
315
|
+
else:
|
|
316
|
+
# INC-285: Preserve port from base URL when next_url is absolute
|
|
317
|
+
# but missing the port (FHIR server bug workaround)
|
|
318
|
+
next_url = UrlChecker.preserve_port_from_base_url(base_url=self._url, next_url=next_url)
|
|
350
319
|
response: RetryableAioHttpResponse = await self._send_fhir_request_async(
|
|
351
320
|
client=client,
|
|
352
321
|
full_url=next_url,
|
|
@@ -375,7 +344,7 @@ class RequestQueueMixin(ABC, FhirClientProtocol):
|
|
|
375
344
|
)
|
|
376
345
|
|
|
377
346
|
request_id = response.response_headers.get("X-Request-ID", None)
|
|
378
|
-
self._internal_logger.
|
|
347
|
+
self._internal_logger.debug(f"X-Request-ID={request_id}")
|
|
379
348
|
|
|
380
349
|
if response.status == 200:
|
|
381
350
|
response_next_url = None
|
|
@@ -460,32 +429,14 @@ class RequestQueueMixin(ABC, FhirClientProtocol):
|
|
|
460
429
|
:param headers: headers to send
|
|
461
430
|
:param payload: payload to send
|
|
462
431
|
"""
|
|
463
|
-
def log_running_tasks(prefix: str = "") -> int:
|
|
464
|
-
"""Log currently running asyncio tasks"""
|
|
465
|
-
all_tasks = asyncio.all_tasks()
|
|
466
|
-
running_tasks = [t for t in all_tasks if not t.done()]
|
|
467
|
-
task_count = len(running_tasks)
|
|
468
|
-
waiting_tasks = [t for t in running_tasks if t._coro.cr_await is not None]
|
|
469
|
-
|
|
470
|
-
self._internal_logger.info(
|
|
471
|
-
f"{prefix} | ASYNC TASKS: Total Running={task_count} | "
|
|
472
|
-
f"all_tasks={len(all_tasks)} | "
|
|
473
|
-
)
|
|
474
|
-
print(f"{prefix} | ASYNC TASKS: Total Running={task_count} | all_tasks={len(all_tasks)} | waiting_tasks={len(waiting_tasks)}")
|
|
475
|
-
|
|
476
|
-
return task_count
|
|
477
|
-
|
|
478
432
|
if self._max_concurrent_requests_semaphore:
|
|
479
|
-
log_running_tasks(prefix="[BEFORE SEMAPHORE ACQUIRE]")
|
|
480
433
|
async with self._max_concurrent_requests_semaphore:
|
|
481
|
-
|
|
482
|
-
result = await self._send_fhir_request_internal_async(
|
|
434
|
+
return await self._send_fhir_request_internal_async(
|
|
483
435
|
client=client,
|
|
484
436
|
full_url=full_url,
|
|
485
437
|
headers=headers,
|
|
486
438
|
payload=payload,
|
|
487
439
|
)
|
|
488
|
-
return result
|
|
489
440
|
else:
|
|
490
441
|
return await self._send_fhir_request_internal_async(
|
|
491
442
|
client=client, full_url=full_url, headers=headers, payload=payload
|
|
@@ -514,52 +465,28 @@ class RequestQueueMixin(ABC, FhirClientProtocol):
|
|
|
514
465
|
if payload:
|
|
515
466
|
assert isinstance(payload, dict)
|
|
516
467
|
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
468
|
+
if self._action == "$graph":
|
|
469
|
+
if self._logger:
|
|
470
|
+
self._logger.info(
|
|
471
|
+
f"sending a post: {full_url} with client_id={self._client_id} and scopes={self._auth_scopes}"
|
|
472
|
+
)
|
|
473
|
+
logging.info(f"sending a post: {full_url} with client_id={self._client_id} and scopes={self._auth_scopes}")
|
|
474
|
+
if payload:
|
|
475
|
+
return await client.post(url=full_url, headers=headers, json=payload)
|
|
476
|
+
else:
|
|
477
|
+
raise Exception(
|
|
478
|
+
"$graph needs a payload to define the returning response (use action_payload parameter)"
|
|
479
|
+
)
|
|
480
|
+
else:
|
|
481
|
+
if self._log_level == "DEBUG":
|
|
522
482
|
if self._logger:
|
|
523
483
|
self._logger.info(
|
|
524
|
-
f"sending a
|
|
484
|
+
f"sending a get: {full_url} with client_id={self._client_id} "
|
|
485
|
+
+ f"and scopes={self._auth_scopes} instance_id={self._uuid}"
|
|
525
486
|
)
|
|
526
|
-
logging.info(f"sending a post: {full_url} with client_id={self._client_id} and scopes={self._auth_scopes}")
|
|
527
|
-
if payload:
|
|
528
|
-
response = await client.post(url=full_url, headers=headers, json=payload)
|
|
529
487
|
else:
|
|
530
|
-
|
|
531
|
-
"
|
|
488
|
+
self._internal_logger.info(
|
|
489
|
+
f"sending a get: {full_url} with client_id={self._client_id} "
|
|
490
|
+
+ f"and scopes={self._auth_scopes} instance_id={self._uuid}"
|
|
532
491
|
)
|
|
533
|
-
|
|
534
|
-
if self._log_level == "DEBUG":
|
|
535
|
-
if self._logger:
|
|
536
|
-
self._logger.info(
|
|
537
|
-
f"sending a get: {full_url} with client_id={self._client_id} "
|
|
538
|
-
+ f"and scopes={self._auth_scopes} instance_id={self._uuid}"
|
|
539
|
-
)
|
|
540
|
-
else:
|
|
541
|
-
self._internal_logger.info(
|
|
542
|
-
f"sending a get: {full_url} with client_id={self._client_id} "
|
|
543
|
-
+ f"and scopes={self._auth_scopes} instance_id={self._uuid}"
|
|
544
|
-
)
|
|
545
|
-
response = await client.get(url=full_url, headers=headers, data=payload)
|
|
546
|
-
|
|
547
|
-
# Log request duration
|
|
548
|
-
request_duration = time.time() - request_start_time
|
|
549
|
-
self._internal_logger.info(
|
|
550
|
-
f"Request completed in {request_duration:.3f}s | "
|
|
551
|
-
f"URL: {full_url} | "
|
|
552
|
-
f"Status: {response.status}"
|
|
553
|
-
)
|
|
554
|
-
|
|
555
|
-
return response
|
|
556
|
-
|
|
557
|
-
except Exception as e:
|
|
558
|
-
# Log request duration even on error
|
|
559
|
-
request_duration = time.time() - request_start_time
|
|
560
|
-
self._internal_logger.error(
|
|
561
|
-
f"Request failed after {request_duration:.3f}s | "
|
|
562
|
-
f"URL: {full_url} | "
|
|
563
|
-
f"Error: {str(e)}"
|
|
564
|
-
)
|
|
565
|
-
raise
|
|
492
|
+
return await client.get(url=full_url, headers=headers, data=payload)
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import uuid
|
|
2
|
-
from collections.abc import AsyncGenerator
|
|
2
|
+
from collections.abc import AsyncGenerator, Callable
|
|
3
3
|
from datetime import datetime
|
|
4
4
|
from logging import Logger
|
|
5
5
|
from threading import Lock
|
|
@@ -126,6 +126,12 @@ class FhirClientProtocol(Protocol):
|
|
|
126
126
|
_create_operation_outcome_for_error: bool | None
|
|
127
127
|
""" whether to create OperationOutcome resource for errors """
|
|
128
128
|
|
|
129
|
+
_max_concurrent_requests: int | None
|
|
130
|
+
""" maximum number of concurrent requests to make to the FHIR server """
|
|
131
|
+
|
|
132
|
+
_fn_create_http_session: Callable[[], ClientSession] | None
|
|
133
|
+
""" optional callable to create HTTP sessions """
|
|
134
|
+
|
|
129
135
|
async def get_access_token_async(self) -> GetAccessTokenResult: ...
|
|
130
136
|
|
|
131
137
|
async def _send_fhir_request_async(
|
|
@@ -208,3 +214,5 @@ class FhirClientProtocol(Protocol):
|
|
|
208
214
|
) -> AsyncGenerator[FhirMergeResourceResponse, None]:
|
|
209
215
|
# this is just here to tell Python this returns a generator
|
|
210
216
|
yield None # type: ignore[misc]
|
|
217
|
+
|
|
218
|
+
def use_http_session(self, fn_create_http_session: Callable[[], ClientSession] | None) -> "FhirClientProtocol": ...
|
|
@@ -11,10 +11,13 @@ from aiohttp.streams import AsyncStreamIterator
|
|
|
11
11
|
from compressedfhir.utilities.compressed_dict.v1.compressed_dict_storage_mode import (
|
|
12
12
|
CompressedDictStorageMode,
|
|
13
13
|
)
|
|
14
|
+
from opentelemetry import trace
|
|
15
|
+
from opentelemetry.trace import Status, StatusCode
|
|
14
16
|
|
|
15
17
|
from helix_fhir_client_sdk.function_types import (
|
|
16
18
|
HandleStreamingChunkFunction,
|
|
17
19
|
)
|
|
20
|
+
from helix_fhir_client_sdk.open_telemetry.span_names import FhirClientSdkOpenTelemetrySpanNames
|
|
18
21
|
from helix_fhir_client_sdk.responses.bundle_expander import (
|
|
19
22
|
BundleExpander,
|
|
20
23
|
BundleExpanderResult,
|
|
@@ -37,6 +40,8 @@ from helix_fhir_client_sdk.utilities.retryable_aiohttp_response import (
|
|
|
37
40
|
RetryableAioHttpResponse,
|
|
38
41
|
)
|
|
39
42
|
|
|
43
|
+
TRACER = trace.get_tracer(__name__)
|
|
44
|
+
|
|
40
45
|
|
|
41
46
|
class FhirResponseProcessor:
|
|
42
47
|
"""
|
|
@@ -93,60 +98,74 @@ class FhirResponseProcessor:
|
|
|
93
98
|
|
|
94
99
|
:return: An async generator of FhirGetResponse objects.
|
|
95
100
|
"""
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
101
|
+
span = TRACER.start_span(FhirClientSdkOpenTelemetrySpanNames.HANDLE_RESPONSE)
|
|
102
|
+
try:
|
|
103
|
+
# if request is ok (200) then return the data
|
|
104
|
+
if response.ok:
|
|
105
|
+
async for r in FhirResponseProcessor._handle_response_200(
|
|
106
|
+
full_url=full_url,
|
|
107
|
+
request_id=request_id,
|
|
108
|
+
response=response,
|
|
109
|
+
response_headers=response_headers,
|
|
110
|
+
fn_handle_streaming_chunk=fn_handle_streaming_chunk,
|
|
111
|
+
access_token=access_token,
|
|
112
|
+
resources_json=resources_json,
|
|
113
|
+
resource=resource,
|
|
114
|
+
id_=id_,
|
|
115
|
+
logger=logger,
|
|
116
|
+
use_data_streaming=use_data_streaming,
|
|
117
|
+
chunk_size=chunk_size,
|
|
118
|
+
extra_context_to_return=extra_context_to_return,
|
|
119
|
+
expand_fhir_bundle=expand_fhir_bundle,
|
|
120
|
+
url=url,
|
|
121
|
+
separate_bundle_resources=separate_bundle_resources,
|
|
122
|
+
storage_mode=storage_mode,
|
|
123
|
+
create_operation_outcome_for_error=create_operation_outcome_for_error,
|
|
124
|
+
):
|
|
125
|
+
yield r
|
|
126
|
+
elif response.status == 404: # not found
|
|
127
|
+
async for r in FhirResponseProcessor._handle_response_404(
|
|
128
|
+
full_url=full_url,
|
|
129
|
+
request_id=request_id,
|
|
130
|
+
response=response,
|
|
131
|
+
response_headers=response_headers,
|
|
132
|
+
extra_context_to_return=extra_context_to_return,
|
|
133
|
+
resource=resource,
|
|
134
|
+
logger=logger,
|
|
135
|
+
id_=id_,
|
|
136
|
+
access_token=access_token,
|
|
137
|
+
storage_mode=storage_mode,
|
|
138
|
+
create_operation_outcome_for_error=create_operation_outcome_for_error,
|
|
139
|
+
):
|
|
140
|
+
yield r
|
|
141
|
+
else: # unknown response
|
|
142
|
+
async for r in FhirResponseProcessor._handle_response_unknown(
|
|
143
|
+
full_url=full_url,
|
|
144
|
+
request_id=request_id,
|
|
145
|
+
response=response,
|
|
146
|
+
response_headers=response_headers,
|
|
147
|
+
resource=resource,
|
|
148
|
+
logger=logger,
|
|
149
|
+
access_token=access_token,
|
|
150
|
+
extra_context_to_return=extra_context_to_return,
|
|
151
|
+
id_=id_,
|
|
152
|
+
internal_logger=internal_logger,
|
|
153
|
+
storage_mode=storage_mode,
|
|
154
|
+
create_operation_outcome_for_error=create_operation_outcome_for_error,
|
|
155
|
+
):
|
|
156
|
+
yield r
|
|
157
|
+
# Mark span as successful
|
|
158
|
+
span.set_status(Status(StatusCode.OK))
|
|
159
|
+
|
|
160
|
+
except Exception as e:
|
|
161
|
+
# Record exception in span
|
|
162
|
+
span.record_exception(e)
|
|
163
|
+
span.set_status(Status(StatusCode.ERROR, str(e)))
|
|
164
|
+
raise
|
|
165
|
+
|
|
166
|
+
finally:
|
|
167
|
+
# Ensure span is ended after generator is exhausted or error occurs
|
|
168
|
+
span.end()
|
|
150
169
|
|
|
151
170
|
@staticmethod
|
|
152
171
|
async def _handle_response_unknown(
|
|
@@ -278,7 +278,6 @@ class FhirGetBundleResponse(FhirGetResponse):
|
|
|
278
278
|
resource_type_plus_id_seen: set[str] = set()
|
|
279
279
|
entry_request_url_seen: set[str] = set()
|
|
280
280
|
i = 0
|
|
281
|
-
print(f"Initial bundle entries count: {len(self._bundle_entries)}")
|
|
282
281
|
while i < len(self._bundle_entries):
|
|
283
282
|
if self._bundle_entries[i] is not None:
|
|
284
283
|
# Create a tuple of values for specified keys
|
|
@@ -302,7 +301,6 @@ class FhirGetBundleResponse(FhirGetResponse):
|
|
|
302
301
|
else:
|
|
303
302
|
resource_type_plus_id_seen.add(resource_type_plus_id)
|
|
304
303
|
i += 1
|
|
305
|
-
print(f"Final bundle entries count: {len(self._bundle_entries)}")
|
|
306
304
|
return self
|
|
307
305
|
except Exception as e:
|
|
308
306
|
raise Exception(f"Could not get parse json from: {bundle}") from e
|
|
@@ -75,6 +75,36 @@ class FhirMergeResourceResponseEntry(BaseFhirMergeResourceResponseEntry):
|
|
|
75
75
|
status=data.get("status"),
|
|
76
76
|
)
|
|
77
77
|
|
|
78
|
+
@classmethod
|
|
79
|
+
def from_dict_uncompressed(cls, data: dict[str, Any]) -> "FhirMergeResourceResponseEntry":
|
|
80
|
+
"""
|
|
81
|
+
Creates a FhirMergeResourceResponseEntry from a dictionary without storage_mode overhead.
|
|
82
|
+
Uses FhirResource.construct for faster object creation.
|
|
83
|
+
|
|
84
|
+
:param data: Dictionary containing the response entry data
|
|
85
|
+
:return: FhirMergeResourceResponseEntry instance
|
|
86
|
+
"""
|
|
87
|
+
resource_payload = data.get("resource")
|
|
88
|
+
resource_obj: FhirResource | None = (
|
|
89
|
+
FhirResource.construct(**resource_payload) if isinstance(resource_payload, dict) else None
|
|
90
|
+
)
|
|
91
|
+
return FhirMergeResourceResponseEntry(
|
|
92
|
+
created=data.get("created"),
|
|
93
|
+
updated=data.get("updated"),
|
|
94
|
+
deleted=data.get("deleted"),
|
|
95
|
+
id_=data.get("id"),
|
|
96
|
+
uuid=data.get("uuid"),
|
|
97
|
+
resource_type=data.get("resourceType"),
|
|
98
|
+
source_assigning_authority=data.get("source_assigning_authority"),
|
|
99
|
+
resource_version=data.get("resource_version"),
|
|
100
|
+
message=data.get("message"),
|
|
101
|
+
issue=data.get("issue"),
|
|
102
|
+
error=data.get("error"),
|
|
103
|
+
token=data.get("token"),
|
|
104
|
+
resource=resource_obj,
|
|
105
|
+
status=data.get("status"),
|
|
106
|
+
)
|
|
107
|
+
|
|
78
108
|
@classmethod
|
|
79
109
|
@override
|
|
80
110
|
def from_json(
|
|
@@ -7,7 +7,6 @@ from typing import (
|
|
|
7
7
|
Protocol,
|
|
8
8
|
runtime_checkable,
|
|
9
9
|
)
|
|
10
|
-
import logging
|
|
11
10
|
|
|
12
11
|
|
|
13
12
|
@dataclass(slots=True)
|
|
@@ -113,25 +112,7 @@ class AsyncParallelProcessor:
|
|
|
113
112
|
async def process_with_semaphore_async(
|
|
114
113
|
*, name: str, row1: TInput, task_index: int, total_task_count: int
|
|
115
114
|
) -> TOutput:
|
|
116
|
-
def log_tasks(prefix: str = "") -> int:
|
|
117
|
-
"""Log currently running asyncio tasks"""
|
|
118
|
-
all_tasks = asyncio.all_tasks()
|
|
119
|
-
running_tasks = [t for t in all_tasks if not t.done()]
|
|
120
|
-
task_count = len(running_tasks)
|
|
121
|
-
waiting_tasks = [t for t in running_tasks if t._coro.cr_await is not None]
|
|
122
|
-
|
|
123
|
-
# Log using standard logging module
|
|
124
|
-
logger = logging.getLogger(__name__)
|
|
125
|
-
logger.info(
|
|
126
|
-
f"{prefix} | ASYNC TASKS: Total Running={task_count} | "
|
|
127
|
-
f"all_tasks={len(all_tasks)} | "
|
|
128
|
-
f"waiting_tasks={len(waiting_tasks)}"
|
|
129
|
-
)
|
|
130
|
-
|
|
131
|
-
return task_count
|
|
132
|
-
|
|
133
115
|
if self.semaphore is None:
|
|
134
|
-
log_tasks(prefix="[NO SEMAPHORE IN PROCESSOR]")
|
|
135
116
|
return await process_row_fn(
|
|
136
117
|
context=ParallelFunctionContext(
|
|
137
118
|
name=name,
|
|
@@ -144,10 +125,8 @@ class AsyncParallelProcessor:
|
|
|
144
125
|
additional_parameters=kwargs,
|
|
145
126
|
)
|
|
146
127
|
else:
|
|
147
|
-
log_tasks(prefix="[BEFORE PROCESSOR SEMAPHORE ACQUIRE]")
|
|
148
128
|
async with self.semaphore:
|
|
149
|
-
|
|
150
|
-
result = await process_row_fn(
|
|
129
|
+
return await process_row_fn(
|
|
151
130
|
context=ParallelFunctionContext(
|
|
152
131
|
name=name,
|
|
153
132
|
log_level=log_level,
|
|
@@ -158,8 +137,6 @@ class AsyncParallelProcessor:
|
|
|
158
137
|
parameters=parameters,
|
|
159
138
|
additional_parameters=kwargs,
|
|
160
139
|
)
|
|
161
|
-
log_tasks(prefix="[BEFORE PROCESSOR SEMAPHORE RELEASE]")
|
|
162
|
-
return result
|
|
163
140
|
|
|
164
141
|
total_task_count: int = len(rows)
|
|
165
142
|
|