helix.fhir.client.sdk 4.1.67__py3-none-any.whl → 4.2.18__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- helix_fhir_client_sdk/dictionary_parser.py +4 -0
- helix_fhir_client_sdk/fhir_auth_mixin.py +17 -10
- helix_fhir_client_sdk/fhir_client.py +161 -61
- helix_fhir_client_sdk/fhir_delete_mixin.py +62 -45
- helix_fhir_client_sdk/fhir_merge_mixin.py +188 -163
- helix_fhir_client_sdk/fhir_merge_resources_mixin.py +200 -9
- helix_fhir_client_sdk/fhir_patch_mixin.py +97 -81
- helix_fhir_client_sdk/fhir_update_mixin.py +71 -54
- helix_fhir_client_sdk/graph/simulated_graph_processor_mixin.py +5 -174
- helix_fhir_client_sdk/open_telemetry/__init__.py +0 -0
- helix_fhir_client_sdk/open_telemetry/attribute_names.py +7 -0
- helix_fhir_client_sdk/open_telemetry/span_names.py +12 -0
- helix_fhir_client_sdk/queue/request_queue_mixin.py +46 -119
- helix_fhir_client_sdk/responses/fhir_client_protocol.py +9 -1
- helix_fhir_client_sdk/responses/fhir_response_processor.py +73 -54
- helix_fhir_client_sdk/responses/get/fhir_get_bundle_response.py +0 -2
- helix_fhir_client_sdk/responses/merge/fhir_merge_resource_response_entry.py +30 -0
- helix_fhir_client_sdk/utilities/async_parallel_processor/v1/async_parallel_processor.py +1 -24
- helix_fhir_client_sdk/utilities/cache/request_cache.py +32 -43
- helix_fhir_client_sdk/utilities/retryable_aiohttp_client.py +184 -144
- helix_fhir_client_sdk/utilities/retryable_aiohttp_response.py +2 -1
- helix_fhir_client_sdk/utilities/url_checker.py +46 -12
- helix_fhir_client_sdk/validators/async_fhir_validator.py +3 -0
- helix_fhir_client_sdk-4.2.18.dist-info/METADATA +200 -0
- {helix_fhir_client_sdk-4.1.67.dist-info → helix_fhir_client_sdk-4.2.18.dist-info}/RECORD +32 -25
- tests/async/test_benchmark_compress.py +448 -0
- tests/async/test_benchmark_merge.py +506 -0
- tests/async/test_retryable_client_session_management.py +159 -0
- tests/test_fhir_client_clone.py +155 -0
- helix_fhir_client_sdk-4.1.67.dist-info/METADATA +0 -115
- {helix_fhir_client_sdk-4.1.67.dist-info → helix_fhir_client_sdk-4.2.18.dist-info}/WHEEL +0 -0
- {helix_fhir_client_sdk-4.1.67.dist-info → helix_fhir_client_sdk-4.2.18.dist-info}/licenses/LICENSE +0 -0
- {helix_fhir_client_sdk-4.1.67.dist-info → helix_fhir_client_sdk-4.2.18.dist-info}/top_level.txt +0 -0
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import asyncio
|
|
2
1
|
from collections.abc import AsyncGenerator
|
|
3
2
|
from datetime import datetime
|
|
4
3
|
from types import TracebackType
|
|
@@ -11,7 +10,7 @@ from helix_fhir_client_sdk.utilities.cache.request_cache_entry import RequestCac
|
|
|
11
10
|
|
|
12
11
|
class RequestCache:
|
|
13
12
|
"""
|
|
14
|
-
This is a class that caches requests to the FHIR server
|
|
13
|
+
This is a class that caches requests to the FHIR server.
|
|
15
14
|
It is used to avoid multiple requests to the FHIR server when doing a large number
|
|
16
15
|
of requests for the same resource.
|
|
17
16
|
"""
|
|
@@ -20,7 +19,6 @@ class RequestCache:
|
|
|
20
19
|
"cache_hits",
|
|
21
20
|
"cache_misses",
|
|
22
21
|
"_cache",
|
|
23
|
-
"_lock",
|
|
24
22
|
"_clear_cache_at_the_end",
|
|
25
23
|
]
|
|
26
24
|
|
|
@@ -33,7 +31,6 @@ class RequestCache:
|
|
|
33
31
|
self.cache_hits: int = 0
|
|
34
32
|
self.cache_misses: int = 0
|
|
35
33
|
self._cache: dict[str, RequestCacheEntry] = initial_dict or {}
|
|
36
|
-
self._lock: asyncio.Lock = asyncio.Lock()
|
|
37
34
|
self._clear_cache_at_the_end: bool | None = clear_cache_at_the_end
|
|
38
35
|
|
|
39
36
|
async def __aenter__(self) -> "RequestCache":
|
|
@@ -42,8 +39,7 @@ class RequestCache:
|
|
|
42
39
|
It returns the RequestCache instance.
|
|
43
40
|
"""
|
|
44
41
|
if self._clear_cache_at_the_end:
|
|
45
|
-
|
|
46
|
-
self._cache.clear()
|
|
42
|
+
self._cache.clear()
|
|
47
43
|
return self
|
|
48
44
|
|
|
49
45
|
async def __aexit__(
|
|
@@ -57,8 +53,7 @@ class RequestCache:
|
|
|
57
53
|
It clears the cache.
|
|
58
54
|
"""
|
|
59
55
|
if self._clear_cache_at_the_end:
|
|
60
|
-
|
|
61
|
-
self._cache.clear()
|
|
56
|
+
self._cache.clear()
|
|
62
57
|
|
|
63
58
|
if exc_value is not None:
|
|
64
59
|
raise exc_value.with_traceback(traceback)
|
|
@@ -75,15 +70,14 @@ class RequestCache:
|
|
|
75
70
|
"""
|
|
76
71
|
key: str = f"{resource_type}/{resource_id}"
|
|
77
72
|
|
|
78
|
-
|
|
79
|
-
cached_entry = self._cache.get(key)
|
|
73
|
+
cached_entry = self._cache.get(key)
|
|
80
74
|
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
75
|
+
if cached_entry is not None:
|
|
76
|
+
self.cache_hits += 1
|
|
77
|
+
return cached_entry
|
|
84
78
|
|
|
85
|
-
|
|
86
|
-
|
|
79
|
+
self.cache_misses += 1
|
|
80
|
+
return None
|
|
87
81
|
|
|
88
82
|
async def add_async(
|
|
89
83
|
self,
|
|
@@ -111,43 +105,40 @@ class RequestCache:
|
|
|
111
105
|
"""
|
|
112
106
|
key: str = f"{resource_type}/{resource_id}"
|
|
113
107
|
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
self._cache[key] = cache_entry
|
|
108
|
+
# Create the cache entry
|
|
109
|
+
cache_entry = RequestCacheEntry(
|
|
110
|
+
id_=resource_id,
|
|
111
|
+
resource_type=resource_type,
|
|
112
|
+
status=status,
|
|
113
|
+
bundle_entry=bundle_entry,
|
|
114
|
+
last_modified=last_modified,
|
|
115
|
+
etag=etag,
|
|
116
|
+
from_input_cache=from_input_cache,
|
|
117
|
+
raw_hash=raw_hash,
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
# Add to the dictionary
|
|
121
|
+
self._cache[key] = cache_entry
|
|
129
122
|
|
|
130
|
-
|
|
123
|
+
return True
|
|
131
124
|
|
|
132
125
|
async def remove_async(self, *, resource_key: str) -> bool:
|
|
133
126
|
"""
|
|
134
127
|
This method remove the given data from the cache.
|
|
135
128
|
:param resource_key: resource key contains both resourceType and resourceId. Eg: Patient/123
|
|
136
129
|
"""
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
return False
|
|
130
|
+
if resource_key not in self._cache:
|
|
131
|
+
return False
|
|
140
132
|
|
|
141
|
-
|
|
133
|
+
del self._cache[resource_key]
|
|
142
134
|
|
|
143
|
-
|
|
135
|
+
return True
|
|
144
136
|
|
|
145
137
|
async def clear_async(self) -> None:
|
|
146
138
|
"""
|
|
147
139
|
This method clears the cache.
|
|
148
140
|
"""
|
|
149
|
-
|
|
150
|
-
self._cache.clear()
|
|
141
|
+
self._cache.clear()
|
|
151
142
|
|
|
152
143
|
async def get_entries_async(self) -> AsyncGenerator[RequestCacheEntry, None]:
|
|
153
144
|
"""
|
|
@@ -155,9 +146,8 @@ class RequestCache:
|
|
|
155
146
|
|
|
156
147
|
:return: The keys in the cache.
|
|
157
148
|
"""
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
yield entry
|
|
149
|
+
for entry in self._cache.values():
|
|
150
|
+
yield entry
|
|
161
151
|
|
|
162
152
|
async def get_keys_async(self) -> list[str]:
|
|
163
153
|
"""
|
|
@@ -165,8 +155,7 @@ class RequestCache:
|
|
|
165
155
|
|
|
166
156
|
:return: The entries in the cache.
|
|
167
157
|
"""
|
|
168
|
-
|
|
169
|
-
return list(self._cache.keys())
|
|
158
|
+
return list(self._cache.keys())
|
|
170
159
|
|
|
171
160
|
def __len__(self) -> int:
|
|
172
161
|
"""
|
|
@@ -7,12 +7,15 @@ from typing import Any, cast
|
|
|
7
7
|
import async_timeout
|
|
8
8
|
from aiohttp import ClientError, ClientResponse, ClientResponseError, ClientSession
|
|
9
9
|
from multidict import MultiMapping
|
|
10
|
+
from opentelemetry import trace
|
|
10
11
|
|
|
11
12
|
from helix_fhir_client_sdk.function_types import (
|
|
12
13
|
RefreshTokenFunction,
|
|
13
14
|
RefreshTokenResult,
|
|
14
15
|
TraceRequestFunction,
|
|
15
16
|
)
|
|
17
|
+
from helix_fhir_client_sdk.open_telemetry.attribute_names import FhirClientSdkOpenTelemetryAttributeNames
|
|
18
|
+
from helix_fhir_client_sdk.open_telemetry.span_names import FhirClientSdkOpenTelemetrySpanNames
|
|
16
19
|
from helix_fhir_client_sdk.utilities.retryable_aiohttp_response import (
|
|
17
20
|
RetryableAioHttpResponse,
|
|
18
21
|
)
|
|
@@ -20,6 +23,8 @@ from helix_fhir_client_sdk.utilities.retryable_aiohttp_url_result import (
|
|
|
20
23
|
RetryableAioHttpUrlResult,
|
|
21
24
|
)
|
|
22
25
|
|
|
26
|
+
TRACER = trace.get_tracer(__name__)
|
|
27
|
+
|
|
23
28
|
|
|
24
29
|
class RetryableAioHttpClient:
|
|
25
30
|
def __init__(
|
|
@@ -32,6 +37,7 @@ class RetryableAioHttpClient:
|
|
|
32
37
|
refresh_token_func: RefreshTokenFunction | None,
|
|
33
38
|
tracer_request_func: TraceRequestFunction | None,
|
|
34
39
|
fn_get_session: Callable[[], ClientSession] | None = None,
|
|
40
|
+
caller_managed_session: bool = False,
|
|
35
41
|
exclude_status_codes_from_retry: list[int] | None = None,
|
|
36
42
|
use_data_streaming: bool | None,
|
|
37
43
|
compress: bool | None = False,
|
|
@@ -42,8 +48,32 @@ class RetryableAioHttpClient:
|
|
|
42
48
|
access_token_expiry_date: datetime | None,
|
|
43
49
|
) -> None:
|
|
44
50
|
"""
|
|
45
|
-
RetryableClient provides a way to make HTTP calls with automatic retry and automatic refreshing of access tokens
|
|
51
|
+
RetryableClient provides a way to make HTTP calls with automatic retry and automatic refreshing of access tokens.
|
|
52
|
+
|
|
53
|
+
Session Lifecycle Management:
|
|
54
|
+
- If caller_managed_session is False (default): The SDK manages the session lifecycle.
|
|
55
|
+
The session will be automatically closed when exiting the context manager.
|
|
56
|
+
- If caller_managed_session is True: The caller is responsible for managing the session lifecycle.
|
|
57
|
+
The SDK will NOT close the session - the caller must close it themselves.
|
|
46
58
|
|
|
59
|
+
:param retries: Number of retry attempts for failed requests
|
|
60
|
+
:param timeout_in_seconds: Timeout for HTTP requests
|
|
61
|
+
:param backoff_factor: Factor for exponential backoff between retries
|
|
62
|
+
:param retry_status_codes: HTTP status codes that trigger a retry
|
|
63
|
+
:param refresh_token_func: Function to refresh authentication tokens
|
|
64
|
+
:param tracer_request_func: Function to trace/log requests
|
|
65
|
+
:param fn_get_session: Optional callable that returns a ClientSession. If None, a basic
|
|
66
|
+
ClientSession will be created internally.
|
|
67
|
+
:param caller_managed_session: If True, the caller is responsible for closing the session.
|
|
68
|
+
If False (default), the SDK will close the session on exit.
|
|
69
|
+
:param exclude_status_codes_from_retry: Status codes to exclude from retry logic
|
|
70
|
+
:param use_data_streaming: Whether to stream response data
|
|
71
|
+
:param compress: Whether to compress request data
|
|
72
|
+
:param send_data_as_chunked: Whether to use chunked transfer encoding
|
|
73
|
+
:param throw_exception_on_error: Whether to raise exceptions on HTTP errors
|
|
74
|
+
:param log_all_url_results: Whether to log all URL results
|
|
75
|
+
:param access_token: Access token for authentication
|
|
76
|
+
:param access_token_expiry_date: Expiry date of the access token
|
|
47
77
|
"""
|
|
48
78
|
self.retries: int = retries
|
|
49
79
|
self.timeout_in_seconds: float | None = timeout_in_seconds
|
|
@@ -53,6 +83,8 @@ class RetryableAioHttpClient:
|
|
|
53
83
|
)
|
|
54
84
|
self.refresh_token_func_async: RefreshTokenFunction | None = refresh_token_func
|
|
55
85
|
self.trace_function_async: TraceRequestFunction | None = tracer_request_func
|
|
86
|
+
self._caller_managed_session: bool = caller_managed_session
|
|
87
|
+
# If no session factory provided, use a default one that creates a basic ClientSession
|
|
56
88
|
self.fn_get_session: Callable[[], ClientSession] = (
|
|
57
89
|
fn_get_session if fn_get_session is not None else lambda: ClientSession()
|
|
58
90
|
)
|
|
@@ -76,7 +108,9 @@ class RetryableAioHttpClient:
|
|
|
76
108
|
exc_val: BaseException | None,
|
|
77
109
|
exc_tb: type[BaseException] | None | None,
|
|
78
110
|
) -> None:
|
|
79
|
-
|
|
111
|
+
# Only close the session if SDK created it (fn_get_session was not provided)
|
|
112
|
+
# If the caller provided fn_get_session, they are responsible for closing the session
|
|
113
|
+
if not self._caller_managed_session and self.session is not None:
|
|
80
114
|
await self.session.close()
|
|
81
115
|
|
|
82
116
|
@staticmethod
|
|
@@ -110,7 +144,7 @@ class RetryableAioHttpClient:
|
|
|
110
144
|
try:
|
|
111
145
|
if headers:
|
|
112
146
|
kwargs["headers"] = headers
|
|
113
|
-
# if there is no data then remove from kwargs so as not to confuse aiohttp
|
|
147
|
+
# if there is no data, then remove from kwargs so as not to confuse aiohttp
|
|
114
148
|
if "data" in kwargs and kwargs["data"] is None:
|
|
115
149
|
del kwargs["data"]
|
|
116
150
|
# compression and chunked can only be enabled if there is content sent
|
|
@@ -120,164 +154,101 @@ class RetryableAioHttpClient:
|
|
|
120
154
|
if self.compress:
|
|
121
155
|
kwargs["compress"] = self.compress
|
|
122
156
|
assert self.session is not None
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
method,
|
|
157
|
+
with TRACER.start_as_current_span(FhirClientSdkOpenTelemetrySpanNames.HTTP_GET) as span:
|
|
158
|
+
span.set_attribute(
|
|
159
|
+
FhirClientSdkOpenTelemetryAttributeNames.URL,
|
|
127
160
|
url,
|
|
128
|
-
**kwargs,
|
|
129
161
|
)
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
162
|
+
async with async_timeout.timeout(self.timeout_in_seconds):
|
|
163
|
+
start_time: float = time.time()
|
|
164
|
+
response: ClientResponse = await self.session.request(
|
|
165
|
+
method,
|
|
166
|
+
url,
|
|
167
|
+
**kwargs,
|
|
168
|
+
)
|
|
169
|
+
# Append the result to the list of results
|
|
170
|
+
if self.log_all_url_results:
|
|
171
|
+
results_by_url.append(
|
|
172
|
+
RetryableAioHttpUrlResult(
|
|
173
|
+
ok=response.ok,
|
|
174
|
+
url=url,
|
|
175
|
+
status_code=response.status,
|
|
176
|
+
retry_count=retry_attempts,
|
|
177
|
+
start_time=start_time,
|
|
178
|
+
end_time=time.time(),
|
|
179
|
+
)
|
|
180
|
+
)
|
|
181
|
+
response_headers: dict[str, str] = {
|
|
182
|
+
k: ",".join(response.headers.getall(k)) for k in response.headers.keys()
|
|
183
|
+
}
|
|
184
|
+
response_headers_multi_mapping: MultiMapping[str] = cast(MultiMapping[str], response.headers)
|
|
185
|
+
|
|
186
|
+
if self.trace_function_async:
|
|
187
|
+
request_headers: dict[str, str] = {
|
|
188
|
+
k: ",".join(response.request_info.headers.getall(k))
|
|
189
|
+
for k in response.request_info.headers.keys()
|
|
190
|
+
}
|
|
191
|
+
await self.trace_function_async(
|
|
134
192
|
ok=response.ok,
|
|
135
193
|
url=url,
|
|
136
194
|
status_code=response.status,
|
|
195
|
+
access_token=access_token,
|
|
196
|
+
expiry_date=expiry_date,
|
|
137
197
|
retry_count=retry_attempts,
|
|
138
198
|
start_time=start_time,
|
|
139
199
|
end_time=time.time(),
|
|
200
|
+
request_headers=request_headers,
|
|
201
|
+
response_headers=response_headers,
|
|
140
202
|
)
|
|
141
|
-
)
|
|
142
|
-
response_headers: dict[str, str] = {
|
|
143
|
-
k: ",".join(response.headers.getall(k)) for k in response.headers.keys()
|
|
144
|
-
}
|
|
145
|
-
response_headers_multi_mapping: MultiMapping[str] = cast(MultiMapping[str], response.headers)
|
|
146
|
-
|
|
147
|
-
if self.trace_function_async:
|
|
148
|
-
request_headers: dict[str, str] = {
|
|
149
|
-
k: ",".join(response.request_info.headers.getall(k))
|
|
150
|
-
for k in response.request_info.headers.keys()
|
|
151
|
-
}
|
|
152
|
-
await self.trace_function_async(
|
|
153
|
-
ok=response.ok,
|
|
154
|
-
url=url,
|
|
155
|
-
status_code=response.status,
|
|
156
|
-
access_token=access_token,
|
|
157
|
-
expiry_date=expiry_date,
|
|
158
|
-
retry_count=retry_attempts,
|
|
159
|
-
start_time=start_time,
|
|
160
|
-
end_time=time.time(),
|
|
161
|
-
request_headers=request_headers,
|
|
162
|
-
response_headers=response_headers,
|
|
163
|
-
)
|
|
164
203
|
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
return RetryableAioHttpResponse(
|
|
168
|
-
ok=response.ok,
|
|
169
|
-
status=response.status,
|
|
170
|
-
response_headers=response_headers,
|
|
171
|
-
response_text=(
|
|
172
|
-
await self.get_safe_response_text_async(response=response)
|
|
173
|
-
if not self.use_data_streaming
|
|
174
|
-
else ""
|
|
175
|
-
),
|
|
176
|
-
content=response.content,
|
|
177
|
-
use_data_streaming=self.use_data_streaming,
|
|
178
|
-
results_by_url=results_by_url,
|
|
179
|
-
access_token=access_token,
|
|
180
|
-
access_token_expiry_date=expiry_date,
|
|
181
|
-
retry_count=retry_attempts,
|
|
182
|
-
)
|
|
183
|
-
elif (
|
|
184
|
-
self.exclude_status_codes_from_retry and response.status in self.exclude_status_codes_from_retry
|
|
185
|
-
):
|
|
186
|
-
return RetryableAioHttpResponse(
|
|
187
|
-
ok=response.ok,
|
|
188
|
-
status=response.status,
|
|
189
|
-
response_headers=response_headers,
|
|
190
|
-
response_text=await self.get_safe_response_text_async(response=response),
|
|
191
|
-
content=response.content,
|
|
192
|
-
use_data_streaming=self.use_data_streaming,
|
|
193
|
-
results_by_url=results_by_url,
|
|
194
|
-
access_token=access_token,
|
|
195
|
-
access_token_expiry_date=expiry_date,
|
|
196
|
-
retry_count=retry_attempts,
|
|
197
|
-
)
|
|
198
|
-
elif response.status == 400:
|
|
199
|
-
return RetryableAioHttpResponse(
|
|
200
|
-
ok=response.ok,
|
|
201
|
-
status=response.status,
|
|
202
|
-
response_headers=response_headers,
|
|
203
|
-
response_text=await self.get_safe_response_text_async(response=response),
|
|
204
|
-
content=response.content,
|
|
205
|
-
use_data_streaming=self.use_data_streaming,
|
|
206
|
-
results_by_url=results_by_url,
|
|
207
|
-
access_token=access_token,
|
|
208
|
-
access_token_expiry_date=expiry_date,
|
|
209
|
-
retry_count=retry_attempts,
|
|
210
|
-
)
|
|
211
|
-
elif response.status in [403, 404]:
|
|
212
|
-
return RetryableAioHttpResponse(
|
|
213
|
-
ok=response.ok,
|
|
214
|
-
status=response.status,
|
|
215
|
-
response_headers=response_headers,
|
|
216
|
-
response_text=await self.get_safe_response_text_async(response=response),
|
|
217
|
-
content=response.content,
|
|
218
|
-
use_data_streaming=self.use_data_streaming,
|
|
219
|
-
results_by_url=results_by_url,
|
|
220
|
-
access_token=access_token,
|
|
221
|
-
access_token_expiry_date=expiry_date,
|
|
222
|
-
retry_count=retry_attempts,
|
|
223
|
-
)
|
|
224
|
-
elif response.status == 429:
|
|
225
|
-
await self._handle_429(response=response, full_url=url)
|
|
226
|
-
elif self.retry_status_codes and response.status in self.retry_status_codes:
|
|
227
|
-
raise ClientResponseError(
|
|
228
|
-
status=response.status,
|
|
229
|
-
message="Retryable status code received",
|
|
230
|
-
headers=response_headers_multi_mapping,
|
|
231
|
-
history=response.history,
|
|
232
|
-
request_info=response.request_info,
|
|
233
|
-
)
|
|
234
|
-
elif response.status == 401 and self.refresh_token_func_async:
|
|
235
|
-
# Call the token refresh function if status code is 401
|
|
236
|
-
refresh_token_result: RefreshTokenResult = await self.refresh_token_func_async(
|
|
237
|
-
current_token=access_token,
|
|
238
|
-
expiry_date=expiry_date,
|
|
239
|
-
url=url,
|
|
240
|
-
status_code=response.status,
|
|
241
|
-
retry_count=retry_attempts,
|
|
242
|
-
)
|
|
243
|
-
if refresh_token_result.abort_request or refresh_token_result.access_token is None:
|
|
204
|
+
if response.ok:
|
|
205
|
+
# If the response is successful, return the response
|
|
244
206
|
return RetryableAioHttpResponse(
|
|
245
|
-
ok=
|
|
246
|
-
status=
|
|
247
|
-
response_headers=
|
|
248
|
-
response_text=
|
|
249
|
-
|
|
207
|
+
ok=response.ok,
|
|
208
|
+
status=response.status,
|
|
209
|
+
response_headers=response_headers,
|
|
210
|
+
response_text=(
|
|
211
|
+
await self.get_safe_response_text_async(response=response)
|
|
212
|
+
if not self.use_data_streaming
|
|
213
|
+
else ""
|
|
214
|
+
),
|
|
215
|
+
content=response.content,
|
|
250
216
|
use_data_streaming=self.use_data_streaming,
|
|
251
217
|
results_by_url=results_by_url,
|
|
252
218
|
access_token=access_token,
|
|
253
219
|
access_token_expiry_date=expiry_date,
|
|
254
220
|
retry_count=retry_attempts,
|
|
255
221
|
)
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
if retry_attempts >= self.retries:
|
|
263
|
-
raise ClientResponseError(
|
|
264
|
-
status=response.status,
|
|
265
|
-
message="Unauthorized",
|
|
266
|
-
headers=response_headers_multi_mapping,
|
|
267
|
-
history=response.history,
|
|
268
|
-
request_info=response.request_info,
|
|
269
|
-
)
|
|
270
|
-
await asyncio.sleep(self.backoff_factor * (2 ** (retry_attempts - 1)))
|
|
271
|
-
else:
|
|
272
|
-
if self._throw_exception_on_error:
|
|
273
|
-
raise ClientResponseError(
|
|
222
|
+
elif (
|
|
223
|
+
self.exclude_status_codes_from_retry
|
|
224
|
+
and response.status in self.exclude_status_codes_from_retry
|
|
225
|
+
):
|
|
226
|
+
return RetryableAioHttpResponse(
|
|
227
|
+
ok=response.ok,
|
|
274
228
|
status=response.status,
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
229
|
+
response_headers=response_headers,
|
|
230
|
+
response_text=await self.get_safe_response_text_async(response=response),
|
|
231
|
+
content=response.content,
|
|
232
|
+
use_data_streaming=self.use_data_streaming,
|
|
233
|
+
results_by_url=results_by_url,
|
|
234
|
+
access_token=access_token,
|
|
235
|
+
access_token_expiry_date=expiry_date,
|
|
236
|
+
retry_count=retry_attempts,
|
|
279
237
|
)
|
|
280
|
-
|
|
238
|
+
elif response.status == 400:
|
|
239
|
+
return RetryableAioHttpResponse(
|
|
240
|
+
ok=response.ok,
|
|
241
|
+
status=response.status,
|
|
242
|
+
response_headers=response_headers,
|
|
243
|
+
response_text=await self.get_safe_response_text_async(response=response),
|
|
244
|
+
content=response.content,
|
|
245
|
+
use_data_streaming=self.use_data_streaming,
|
|
246
|
+
results_by_url=results_by_url,
|
|
247
|
+
access_token=access_token,
|
|
248
|
+
access_token_expiry_date=expiry_date,
|
|
249
|
+
retry_count=retry_attempts,
|
|
250
|
+
)
|
|
251
|
+
elif response.status in [403, 404]:
|
|
281
252
|
return RetryableAioHttpResponse(
|
|
282
253
|
ok=response.ok,
|
|
283
254
|
status=response.status,
|
|
@@ -290,6 +261,75 @@ class RetryableAioHttpClient:
|
|
|
290
261
|
access_token_expiry_date=expiry_date,
|
|
291
262
|
retry_count=retry_attempts,
|
|
292
263
|
)
|
|
264
|
+
elif response.status == 429:
|
|
265
|
+
await self._handle_429(response=response, full_url=url)
|
|
266
|
+
elif response.status == 401 and self.refresh_token_func_async:
|
|
267
|
+
# Call the token refresh function if status code is 401
|
|
268
|
+
refresh_token_result: RefreshTokenResult = await self.refresh_token_func_async(
|
|
269
|
+
current_token=access_token,
|
|
270
|
+
expiry_date=expiry_date,
|
|
271
|
+
url=url,
|
|
272
|
+
status_code=response.status,
|
|
273
|
+
retry_count=retry_attempts,
|
|
274
|
+
)
|
|
275
|
+
if refresh_token_result.abort_request or refresh_token_result.access_token is None:
|
|
276
|
+
return RetryableAioHttpResponse(
|
|
277
|
+
ok=False,
|
|
278
|
+
status=401,
|
|
279
|
+
response_headers={},
|
|
280
|
+
response_text="Unauthorized",
|
|
281
|
+
content=None,
|
|
282
|
+
use_data_streaming=self.use_data_streaming,
|
|
283
|
+
results_by_url=results_by_url,
|
|
284
|
+
access_token=access_token,
|
|
285
|
+
access_token_expiry_date=expiry_date,
|
|
286
|
+
retry_count=retry_attempts,
|
|
287
|
+
)
|
|
288
|
+
else: # we got a valid token
|
|
289
|
+
access_token = refresh_token_result.access_token
|
|
290
|
+
expiry_date = refresh_token_result.expiry_date
|
|
291
|
+
if not headers:
|
|
292
|
+
headers = {}
|
|
293
|
+
headers["Authorization"] = f"Bearer {access_token}"
|
|
294
|
+
if retry_attempts >= self.retries:
|
|
295
|
+
raise ClientResponseError(
|
|
296
|
+
status=response.status,
|
|
297
|
+
message="Unauthorized",
|
|
298
|
+
headers=response_headers_multi_mapping,
|
|
299
|
+
history=response.history,
|
|
300
|
+
request_info=response.request_info,
|
|
301
|
+
)
|
|
302
|
+
await asyncio.sleep(self.backoff_factor * (2 ** (retry_attempts - 1)))
|
|
303
|
+
elif self.retry_status_codes and response.status in self.retry_status_codes:
|
|
304
|
+
raise ClientResponseError(
|
|
305
|
+
status=response.status,
|
|
306
|
+
message="Retryable status code received",
|
|
307
|
+
headers=response_headers_multi_mapping,
|
|
308
|
+
history=response.history,
|
|
309
|
+
request_info=response.request_info,
|
|
310
|
+
)
|
|
311
|
+
else:
|
|
312
|
+
if self._throw_exception_on_error:
|
|
313
|
+
raise ClientResponseError(
|
|
314
|
+
status=response.status,
|
|
315
|
+
message="Non-retryable status code received",
|
|
316
|
+
headers=response_headers_multi_mapping,
|
|
317
|
+
history=response.history,
|
|
318
|
+
request_info=response.request_info,
|
|
319
|
+
)
|
|
320
|
+
else:
|
|
321
|
+
return RetryableAioHttpResponse(
|
|
322
|
+
ok=response.ok,
|
|
323
|
+
status=response.status,
|
|
324
|
+
response_headers=response_headers,
|
|
325
|
+
response_text=await self.get_safe_response_text_async(response=response),
|
|
326
|
+
content=response.content,
|
|
327
|
+
use_data_streaming=self.use_data_streaming,
|
|
328
|
+
results_by_url=results_by_url,
|
|
329
|
+
access_token=access_token,
|
|
330
|
+
access_token_expiry_date=expiry_date,
|
|
331
|
+
retry_count=retry_attempts,
|
|
332
|
+
)
|
|
293
333
|
except (TimeoutError, ClientError, ClientResponseError) as e:
|
|
294
334
|
if retry_attempts >= self.retries:
|
|
295
335
|
if self._throw_exception_on_error:
|
|
@@ -388,7 +428,7 @@ class RetryableAioHttpClient:
|
|
|
388
428
|
if retry_after_text:
|
|
389
429
|
# noinspection PyBroadException
|
|
390
430
|
try:
|
|
391
|
-
if retry_after_text.isnumeric(): # it is number of seconds
|
|
431
|
+
if retry_after_text.isnumeric(): # it is a number of seconds
|
|
392
432
|
await asyncio.sleep(int(retry_after_text))
|
|
393
433
|
else:
|
|
394
434
|
wait_till: datetime = datetime.strptime(retry_after_text, "%a, %d %b %Y %H:%M:%S GMT")
|
|
@@ -402,7 +442,7 @@ class RetryableAioHttpClient:
|
|
|
402
442
|
if time_diff > 0:
|
|
403
443
|
await asyncio.sleep(time_diff)
|
|
404
444
|
except Exception:
|
|
405
|
-
# if there was some exception parsing the Retry-After header, sleep for 60 seconds
|
|
445
|
+
# if there was some exception, parsing the Retry-After header, sleep for 60 seconds
|
|
406
446
|
await asyncio.sleep(60)
|
|
407
447
|
else:
|
|
408
448
|
await asyncio.sleep(60)
|
|
@@ -3,6 +3,7 @@ from datetime import datetime
|
|
|
3
3
|
from typing import Any, cast
|
|
4
4
|
|
|
5
5
|
from aiohttp import StreamReader
|
|
6
|
+
from multidict import CIMultiDict
|
|
6
7
|
|
|
7
8
|
from helix_fhir_client_sdk.utilities.retryable_aiohttp_url_result import (
|
|
8
9
|
RetryableAioHttpUrlResult,
|
|
@@ -53,7 +54,7 @@ class RetryableAioHttpResponse:
|
|
|
53
54
|
self.status: int = status
|
|
54
55
|
""" Status code of the response """
|
|
55
56
|
|
|
56
|
-
self.response_headers:
|
|
57
|
+
self.response_headers: CIMultiDict[str] = CIMultiDict(response_headers)
|
|
57
58
|
""" Headers of the response """
|
|
58
59
|
|
|
59
60
|
self._response_text: str = response_text
|
|
@@ -5,6 +5,52 @@ from furl import furl
|
|
|
5
5
|
|
|
6
6
|
|
|
7
7
|
class UrlChecker:
|
|
8
|
+
@staticmethod
|
|
9
|
+
def preserve_port_from_base_url(*, base_url: str, next_url: str) -> str:
|
|
10
|
+
"""
|
|
11
|
+
INC-285: Preserve the port from the base URL when the next URL has the same host
|
|
12
|
+
but is missing the port.
|
|
13
|
+
|
|
14
|
+
This fixes an issue where FHIR servers generate pagination URLs without the port,
|
|
15
|
+
causing requests to go to the default port (80 for HTTP, 443 for HTTPS) instead
|
|
16
|
+
of the correct port.
|
|
17
|
+
|
|
18
|
+
Example:
|
|
19
|
+
base_url: http://fhir-server:3000/4_0_0/Observation
|
|
20
|
+
next_url: http://fhir-server/4_0_0/Observation?_count=10&_getpagesoffset=10
|
|
21
|
+
result: http://fhir-server:3000/4_0_0/Observation?_count=10&_getpagesoffset=10
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
base_url (str): The original base URL with the correct port
|
|
25
|
+
next_url (str): The URL returned by the FHIR server (may be missing port)
|
|
26
|
+
|
|
27
|
+
Returns:
|
|
28
|
+
str: The next URL with the port preserved from the base URL if applicable
|
|
29
|
+
"""
|
|
30
|
+
base_parsed = urlparse(base_url)
|
|
31
|
+
next_parsed = urlparse(next_url)
|
|
32
|
+
|
|
33
|
+
# Only apply fix if:
|
|
34
|
+
# 1. Both URLs have the same scheme
|
|
35
|
+
# 2. Both URLs have the same hostname (ignoring port)
|
|
36
|
+
# 3. Base URL has an explicit port
|
|
37
|
+
# 4. The next URL does NOT have an explicit port
|
|
38
|
+
base_hostname = base_parsed.hostname
|
|
39
|
+
next_hostname = next_parsed.hostname
|
|
40
|
+
|
|
41
|
+
if (
|
|
42
|
+
base_parsed.scheme == next_parsed.scheme
|
|
43
|
+
and base_hostname == next_hostname
|
|
44
|
+
and base_parsed.port is not None
|
|
45
|
+
and next_parsed.port is None
|
|
46
|
+
):
|
|
47
|
+
# Reconstruct the next URL with the port from base URL
|
|
48
|
+
next_furl = furl(next_url)
|
|
49
|
+
next_furl.port = base_parsed.port
|
|
50
|
+
return str(next_furl)
|
|
51
|
+
|
|
52
|
+
return next_url
|
|
53
|
+
|
|
8
54
|
@staticmethod
|
|
9
55
|
def is_absolute_url(*, url: str | furl) -> bool:
|
|
10
56
|
"""
|
|
@@ -57,16 +103,4 @@ class UrlChecker:
|
|
|
57
103
|
|
|
58
104
|
absolute_url = absolute_url.join(relative)
|
|
59
105
|
|
|
60
|
-
# # Update path
|
|
61
|
-
# if relative.path:
|
|
62
|
-
# absolute_url.path.segments += relative.path.segments
|
|
63
|
-
#
|
|
64
|
-
# # Update query parameters
|
|
65
|
-
# if relative.query:
|
|
66
|
-
# absolute_url.query.set(relative.query.params)
|
|
67
|
-
#
|
|
68
|
-
# # Update fragment if present
|
|
69
|
-
# if relative.fragment:
|
|
70
|
-
# absolute_url.fragment = relative.fragment
|
|
71
|
-
#
|
|
72
106
|
return cast(str, absolute_url.url)
|