awslabs.healthlake-mcp-server 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- awslabs/__init__.py +16 -0
- awslabs/healthlake_mcp_server/__init__.py +17 -0
- awslabs/healthlake_mcp_server/fhir_operations.py +701 -0
- awslabs/healthlake_mcp_server/main.py +77 -0
- awslabs/healthlake_mcp_server/models.py +120 -0
- awslabs/healthlake_mcp_server/server.py +665 -0
- awslabs_healthlake_mcp_server-0.0.1.dist-info/METADATA +631 -0
- awslabs_healthlake_mcp_server-0.0.1.dist-info/RECORD +12 -0
- awslabs_healthlake_mcp_server-0.0.1.dist-info/WHEEL +4 -0
- awslabs_healthlake_mcp_server-0.0.1.dist-info/entry_points.txt +2 -0
- awslabs_healthlake_mcp_server-0.0.1.dist-info/licenses/LICENSE +175 -0
- awslabs_healthlake_mcp_server-0.0.1.dist-info/licenses/NOTICE +2 -0
|
@@ -0,0 +1,701 @@
|
|
|
1
|
+
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
"""AWS HealthLake client for FHIR operations."""
|
|
16
|
+
|
|
17
|
+
# Standard library imports
|
|
18
|
+
# Third-party imports
|
|
19
|
+
import boto3
|
|
20
|
+
import httpx
|
|
21
|
+
|
|
22
|
+
# Local imports
|
|
23
|
+
from . import __version__
|
|
24
|
+
from botocore.auth import SigV4Auth
|
|
25
|
+
from botocore.awsrequest import AWSRequest
|
|
26
|
+
from botocore.config import Config
|
|
27
|
+
from botocore.exceptions import ClientError, NoCredentialsError
|
|
28
|
+
from loguru import logger
|
|
29
|
+
from typing import Any, Dict, List, Optional, Tuple
|
|
30
|
+
from urllib.parse import urljoin
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
# HealthLake API limits
|
|
34
|
+
MAX_SEARCH_COUNT = 100 # Maximum number of resources per search request
|
|
35
|
+
DATASTORE_ID_LENGTH = 32 # AWS HealthLake datastore ID length
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def validate_datastore_id(datastore_id: str) -> str:
|
|
39
|
+
"""Validate AWS HealthLake datastore ID format."""
|
|
40
|
+
if not datastore_id or len(datastore_id) != DATASTORE_ID_LENGTH:
|
|
41
|
+
raise ValueError(f'Datastore ID must be {DATASTORE_ID_LENGTH} characters')
|
|
42
|
+
return datastore_id
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class FHIRSearchError(Exception):
|
|
46
|
+
"""Exception raised for FHIR search parameter errors."""
|
|
47
|
+
|
|
48
|
+
def __init__(self, message: str, invalid_params: Optional[List[str]] = None):
|
|
49
|
+
"""Initialize FHIR search error with message and optional invalid parameters."""
|
|
50
|
+
self.invalid_params = invalid_params or []
|
|
51
|
+
super().__init__(message)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
class AWSAuth(httpx.Auth):
|
|
55
|
+
"""Custom AWS SigV4 authentication for httpx."""
|
|
56
|
+
|
|
57
|
+
def __init__(self, credentials, region: str, service: str = 'healthlake'):
|
|
58
|
+
"""Initialize AWS SigV4 authentication with credentials and region."""
|
|
59
|
+
self.credentials = credentials
|
|
60
|
+
self.region = region
|
|
61
|
+
self.service = service
|
|
62
|
+
|
|
63
|
+
def auth_flow(self, request):
|
|
64
|
+
"""Apply AWS SigV4 authentication to the request."""
|
|
65
|
+
# Preserve the original Content-Length if it exists
|
|
66
|
+
original_content_length = request.headers.get('content-length')
|
|
67
|
+
|
|
68
|
+
# Use minimal headers for signing - include Content-Length if present
|
|
69
|
+
headers = {
|
|
70
|
+
'Accept': 'application/fhir+json',
|
|
71
|
+
'Content-Type': 'application/fhir+json',
|
|
72
|
+
'Host': request.url.host,
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
# Add Content-Length to headers for signing if present
|
|
76
|
+
if original_content_length:
|
|
77
|
+
headers['Content-Length'] = original_content_length
|
|
78
|
+
|
|
79
|
+
# For GET requests, no body
|
|
80
|
+
body = None if request.method.upper() == 'GET' else request.content
|
|
81
|
+
|
|
82
|
+
# Create AWS request for signing
|
|
83
|
+
aws_request = AWSRequest(
|
|
84
|
+
method=request.method, url=str(request.url), data=body, headers=headers
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
# Sign the request
|
|
88
|
+
signer = SigV4Auth(self.credentials, self.service, self.region)
|
|
89
|
+
signer.add_auth(aws_request)
|
|
90
|
+
|
|
91
|
+
# Clear existing headers and set only the signed ones
|
|
92
|
+
request.headers.clear()
|
|
93
|
+
for key, value in aws_request.headers.items():
|
|
94
|
+
request.headers[key] = value
|
|
95
|
+
|
|
96
|
+
yield request
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
class HealthLakeClient:
|
|
100
|
+
"""Client for AWS HealthLake FHIR operations."""
|
|
101
|
+
|
|
102
|
+
def __init__(self, region_name: Optional[str] = None):
|
|
103
|
+
"""Initialize the HealthLake client."""
|
|
104
|
+
try:
|
|
105
|
+
self.session = boto3.Session()
|
|
106
|
+
self.healthlake_client = self.session.client(
|
|
107
|
+
'healthlake',
|
|
108
|
+
region_name=region_name,
|
|
109
|
+
config=Config(user_agent_extra=f'awslabs/mcp/healthlake-mcp-server/{__version__}'),
|
|
110
|
+
)
|
|
111
|
+
self.region = region_name or self.session.region_name or 'us-east-1'
|
|
112
|
+
|
|
113
|
+
except NoCredentialsError:
|
|
114
|
+
logger.error('AWS credentials not found. Please configure your credentials.')
|
|
115
|
+
raise
|
|
116
|
+
|
|
117
|
+
async def list_datastores(self, filter_status: Optional[str] = None) -> Dict[str, Any]:
|
|
118
|
+
"""List HealthLake datastores."""
|
|
119
|
+
try:
|
|
120
|
+
kwargs = {}
|
|
121
|
+
if filter_status:
|
|
122
|
+
kwargs['Filter'] = {'DatastoreStatus': filter_status}
|
|
123
|
+
|
|
124
|
+
response = self.healthlake_client.list_fhir_datastores(**kwargs)
|
|
125
|
+
return response
|
|
126
|
+
except ClientError as e:
|
|
127
|
+
logger.error(f'Error listing datastores: {e}')
|
|
128
|
+
raise
|
|
129
|
+
|
|
130
|
+
async def get_datastore_details(self, datastore_id: str) -> Dict[str, Any]:
|
|
131
|
+
"""Get details of a specific datastore."""
|
|
132
|
+
try:
|
|
133
|
+
response = self.healthlake_client.describe_fhir_datastore(DatastoreId=datastore_id)
|
|
134
|
+
return response
|
|
135
|
+
except ClientError as e:
|
|
136
|
+
logger.error(f'Error getting datastore details: {e}')
|
|
137
|
+
raise
|
|
138
|
+
|
|
139
|
+
def _get_fhir_endpoint(self, datastore_id: str) -> str:
|
|
140
|
+
"""Get the FHIR endpoint URL for a datastore."""
|
|
141
|
+
return f'https://healthlake.{self.region}.amazonaws.com/datastore/{datastore_id}/r4/'
|
|
142
|
+
|
|
143
|
+
def _build_search_request(
|
|
144
|
+
self,
|
|
145
|
+
base_url: str,
|
|
146
|
+
resource_type: str,
|
|
147
|
+
search_params: Optional[Dict[str, Any]] = None,
|
|
148
|
+
include_params: Optional[List[str]] = None,
|
|
149
|
+
revinclude_params: Optional[List[str]] = None,
|
|
150
|
+
chained_params: Optional[Dict[str, str]] = None,
|
|
151
|
+
count: int = 100,
|
|
152
|
+
next_token: Optional[str] = None,
|
|
153
|
+
) -> Tuple[str, Dict[str, str]]:
|
|
154
|
+
"""Build search request with minimal processing."""
|
|
155
|
+
# Handle pagination first
|
|
156
|
+
if next_token:
|
|
157
|
+
return next_token, {}
|
|
158
|
+
|
|
159
|
+
# Build the search URL
|
|
160
|
+
url = f'{base_url.rstrip("/")}/{resource_type}/_search'
|
|
161
|
+
|
|
162
|
+
# Build form data with minimal processing
|
|
163
|
+
form_data = {'_count': str(count)}
|
|
164
|
+
|
|
165
|
+
# Add basic search parameters with proper encoding for FHIR modifiers
|
|
166
|
+
if search_params:
|
|
167
|
+
for key, value in search_params.items():
|
|
168
|
+
# URL-encode colons in parameter names for FHIR modifiers
|
|
169
|
+
encoded_key = key.replace(':', '%3A')
|
|
170
|
+
if isinstance(value, list):
|
|
171
|
+
form_data[encoded_key] = ','.join(str(v) for v in value)
|
|
172
|
+
else:
|
|
173
|
+
form_data[encoded_key] = str(value)
|
|
174
|
+
|
|
175
|
+
# Add chained parameters with proper encoding for FHIR modifiers
|
|
176
|
+
if chained_params:
|
|
177
|
+
for key, value in chained_params.items():
|
|
178
|
+
# URL-encode colons in parameter names for FHIR modifiers
|
|
179
|
+
encoded_key = key.replace(':', '%3A')
|
|
180
|
+
form_data[encoded_key] = str(value)
|
|
181
|
+
|
|
182
|
+
# Add include parameters
|
|
183
|
+
if include_params:
|
|
184
|
+
form_data['_include'] = ','.join(include_params)
|
|
185
|
+
|
|
186
|
+
# Add revinclude parameters
|
|
187
|
+
if revinclude_params:
|
|
188
|
+
form_data['_revinclude'] = ','.join(revinclude_params)
|
|
189
|
+
|
|
190
|
+
return url, form_data
|
|
191
|
+
|
|
192
|
+
def _validate_search_request(
|
|
193
|
+
self,
|
|
194
|
+
resource_type: str,
|
|
195
|
+
search_params: Optional[Dict[str, Any]] = None,
|
|
196
|
+
include_params: Optional[List[str]] = None,
|
|
197
|
+
revinclude_params: Optional[List[str]] = None,
|
|
198
|
+
chained_params: Optional[Dict[str, str]] = None,
|
|
199
|
+
count: int = 100,
|
|
200
|
+
) -> List[str]:
|
|
201
|
+
"""Minimal validation - only catch obvious errors."""
|
|
202
|
+
errors = []
|
|
203
|
+
|
|
204
|
+
# Basic sanity checks only
|
|
205
|
+
if not resource_type or not resource_type.strip():
|
|
206
|
+
errors.append('Resource type is required')
|
|
207
|
+
|
|
208
|
+
if count < 1 or count > 100:
|
|
209
|
+
errors.append('Count must be between 1 and 100')
|
|
210
|
+
|
|
211
|
+
# Basic format checks for include parameters
|
|
212
|
+
if include_params:
|
|
213
|
+
for param in include_params:
|
|
214
|
+
if ':' not in param:
|
|
215
|
+
errors.append(
|
|
216
|
+
f"Invalid include format: '{param}'. Expected 'ResourceType:parameter'"
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
if revinclude_params:
|
|
220
|
+
for param in revinclude_params:
|
|
221
|
+
if ':' not in param:
|
|
222
|
+
errors.append(
|
|
223
|
+
f"Invalid revinclude format: '{param}'. Expected 'ResourceType:parameter'"
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
return errors
|
|
227
|
+
|
|
228
|
+
def _process_bundle(self, bundle: Dict[str, Any]) -> Dict[str, Any]:
|
|
229
|
+
"""Process FHIR Bundle response and extract pagination information."""
|
|
230
|
+
from urllib.parse import parse_qs, quote, urlparse
|
|
231
|
+
|
|
232
|
+
result = {
|
|
233
|
+
'resourceType': bundle.get('resourceType', 'Bundle'),
|
|
234
|
+
'id': bundle.get('id'),
|
|
235
|
+
'type': bundle.get('type', 'searchset'),
|
|
236
|
+
'total': bundle.get('total'),
|
|
237
|
+
'entry': bundle.get('entry', []),
|
|
238
|
+
'link': bundle.get('link', []),
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
# Add total field if not present (some HealthLake responses may not include it)
|
|
242
|
+
if 'total' not in result or result['total'] is None:
|
|
243
|
+
result['total'] = len(result.get('entry', []))
|
|
244
|
+
|
|
245
|
+
# Extract next URL from Bundle links and handle encoding issues
|
|
246
|
+
next_url = None
|
|
247
|
+
for link in bundle.get('link', []):
|
|
248
|
+
if link.get('relation') == 'next':
|
|
249
|
+
next_url = link.get('url', '')
|
|
250
|
+
break
|
|
251
|
+
|
|
252
|
+
# Process the next URL to handle HealthLake pagination encoding issues
|
|
253
|
+
next_token = None
|
|
254
|
+
if next_url:
|
|
255
|
+
try:
|
|
256
|
+
# Parse the URL to handle encoding issues
|
|
257
|
+
link_parse = urlparse(next_url)
|
|
258
|
+
link_qs = parse_qs(link_parse.query)
|
|
259
|
+
|
|
260
|
+
if 'page' in link_qs:
|
|
261
|
+
# Encode the page parameter to prevent auth errors
|
|
262
|
+
encoded_page = quote(link_qs['page'][0])
|
|
263
|
+
|
|
264
|
+
# Reconstruct the URL with properly encoded page parameter
|
|
265
|
+
next_link_values = {
|
|
266
|
+
'scheme': link_parse.scheme,
|
|
267
|
+
'hostname': link_parse.hostname,
|
|
268
|
+
'path': link_parse.path,
|
|
269
|
+
'count': '?_count=' + link_qs['_count'][0] if '_count' in link_qs else '',
|
|
270
|
+
'page': '&page=' + encoded_page,
|
|
271
|
+
}
|
|
272
|
+
next_token = '{scheme}://{hostname}{path}{count}{page}'.format(
|
|
273
|
+
**next_link_values
|
|
274
|
+
)
|
|
275
|
+
else:
|
|
276
|
+
# Fallback to original URL if no page parameter found
|
|
277
|
+
next_token = next_url
|
|
278
|
+
|
|
279
|
+
except Exception as e:
|
|
280
|
+
logger.warning(f'Error processing next URL: {e}, using original URL')
|
|
281
|
+
next_token = next_url
|
|
282
|
+
|
|
283
|
+
result['pagination'] = {'has_next': bool(next_token), 'next_token': next_token}
|
|
284
|
+
return result
|
|
285
|
+
|
|
286
|
+
def _process_bundle_with_includes(self, bundle: Dict[str, Any]) -> Dict[str, Any]:
|
|
287
|
+
"""Process bundle and organize included resources."""
|
|
288
|
+
# Separate main results from included resources
|
|
289
|
+
main_entries = []
|
|
290
|
+
included_entries = []
|
|
291
|
+
|
|
292
|
+
for entry in bundle.get('entry', []):
|
|
293
|
+
search_mode = entry.get('search', {}).get('mode', 'match')
|
|
294
|
+
if search_mode == 'match':
|
|
295
|
+
main_entries.append(entry)
|
|
296
|
+
elif search_mode == 'include':
|
|
297
|
+
included_entries.append(entry)
|
|
298
|
+
|
|
299
|
+
# Organize included resources by type and ID for easier access
|
|
300
|
+
included_by_type: Dict[str, Dict[str, Dict[str, Any]]] = {}
|
|
301
|
+
for entry in included_entries:
|
|
302
|
+
resource = entry.get('resource', {})
|
|
303
|
+
resource_type = resource.get('resourceType')
|
|
304
|
+
resource_id = resource.get('id')
|
|
305
|
+
|
|
306
|
+
if resource_type and resource_id:
|
|
307
|
+
if resource_type not in included_by_type:
|
|
308
|
+
included_by_type[resource_type] = {}
|
|
309
|
+
included_by_type[resource_type][resource_id] = resource
|
|
310
|
+
|
|
311
|
+
# Build response
|
|
312
|
+
result = {
|
|
313
|
+
'resourceType': bundle.get('resourceType', 'Bundle'),
|
|
314
|
+
'id': bundle.get('id'),
|
|
315
|
+
'type': bundle.get('type', 'searchset'),
|
|
316
|
+
'total': bundle.get('total', len(main_entries)), # Use main_entries count as fallback
|
|
317
|
+
'entry': main_entries,
|
|
318
|
+
'link': bundle.get('link', []),
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
# Add organized included resources
|
|
322
|
+
if included_by_type:
|
|
323
|
+
result['included'] = included_by_type
|
|
324
|
+
|
|
325
|
+
# Add pagination metadata
|
|
326
|
+
next_url = None
|
|
327
|
+
for link in bundle.get('link', []):
|
|
328
|
+
if link.get('relation') == 'next':
|
|
329
|
+
next_url = link.get('url', '')
|
|
330
|
+
break
|
|
331
|
+
|
|
332
|
+
result['pagination'] = {'has_next': bool(next_url), 'next_token': next_url}
|
|
333
|
+
|
|
334
|
+
return result
|
|
335
|
+
|
|
336
|
+
def _create_helpful_error_message(self, error: Exception) -> str:
|
|
337
|
+
"""Create helpful error messages without over-engineering."""
|
|
338
|
+
error_str = str(error)
|
|
339
|
+
|
|
340
|
+
# Simple, actionable guidance
|
|
341
|
+
if '400' in error_str:
|
|
342
|
+
return (
|
|
343
|
+
f'HealthLake rejected the search request: {error_str}\n\n'
|
|
344
|
+
'💡 Common solutions:\n'
|
|
345
|
+
'• Check parameter names and values\n'
|
|
346
|
+
'• Try simpler search parameters\n'
|
|
347
|
+
'• Verify resource type is correct\n'
|
|
348
|
+
'• Some advanced FHIR features may not be supported'
|
|
349
|
+
)
|
|
350
|
+
elif 'validation' in error_str.lower():
|
|
351
|
+
return (
|
|
352
|
+
f'Search validation failed: {error_str}\n\n'
|
|
353
|
+
'💡 Check your search parameters format and try again.'
|
|
354
|
+
)
|
|
355
|
+
else:
|
|
356
|
+
return f'Search error: {error_str}'
|
|
357
|
+
|
|
358
|
+
async def patient_everything(
|
|
359
|
+
self,
|
|
360
|
+
datastore_id: str,
|
|
361
|
+
patient_id: str,
|
|
362
|
+
start: Optional[str] = None,
|
|
363
|
+
end: Optional[str] = None,
|
|
364
|
+
count: int = 100,
|
|
365
|
+
next_token: Optional[str] = None,
|
|
366
|
+
) -> Dict[str, Any]:
|
|
367
|
+
"""Retrieve all resources related to a specific patient using $patient-everything operation."""
|
|
368
|
+
try:
|
|
369
|
+
endpoint = self._get_fhir_endpoint(datastore_id)
|
|
370
|
+
auth = self._get_aws_auth()
|
|
371
|
+
|
|
372
|
+
# Ensure count is within valid range
|
|
373
|
+
count = max(1, min(count, MAX_SEARCH_COUNT))
|
|
374
|
+
|
|
375
|
+
async with httpx.AsyncClient() as client:
|
|
376
|
+
if next_token:
|
|
377
|
+
# For pagination, use the next_token URL directly
|
|
378
|
+
response = await client.get(next_token, auth=auth)
|
|
379
|
+
else:
|
|
380
|
+
# Build $patient-everything URL
|
|
381
|
+
url = urljoin(endpoint, f'Patient/{patient_id}/$everything')
|
|
382
|
+
|
|
383
|
+
# Build query parameters
|
|
384
|
+
params = {'_count': str(count)}
|
|
385
|
+
if start:
|
|
386
|
+
params['start'] = start
|
|
387
|
+
if end:
|
|
388
|
+
params['end'] = end
|
|
389
|
+
|
|
390
|
+
logger.debug(f'Query params: {params}')
|
|
391
|
+
|
|
392
|
+
response = await client.get(url, params=params, auth=auth)
|
|
393
|
+
|
|
394
|
+
response.raise_for_status()
|
|
395
|
+
fhir_bundle = response.json()
|
|
396
|
+
|
|
397
|
+
# Process the response
|
|
398
|
+
result = self._process_bundle(fhir_bundle)
|
|
399
|
+
return result
|
|
400
|
+
|
|
401
|
+
except Exception as e:
|
|
402
|
+
logger.error(f'Error in patient everything operation: {e}')
|
|
403
|
+
raise
|
|
404
|
+
|
|
405
|
+
async def search_resources(
|
|
406
|
+
self,
|
|
407
|
+
datastore_id: str,
|
|
408
|
+
resource_type: str,
|
|
409
|
+
search_params: Optional[Dict[str, str]] = None,
|
|
410
|
+
include_params: Optional[List[str]] = None,
|
|
411
|
+
revinclude_params: Optional[List[str]] = None,
|
|
412
|
+
chained_params: Optional[Dict[str, str]] = None,
|
|
413
|
+
count: int = 100,
|
|
414
|
+
next_token: Optional[str] = None,
|
|
415
|
+
) -> Dict[str, Any]:
|
|
416
|
+
"""Search for FHIR resources."""
|
|
417
|
+
try:
|
|
418
|
+
endpoint = self._get_fhir_endpoint(datastore_id)
|
|
419
|
+
auth = self._get_aws_auth()
|
|
420
|
+
|
|
421
|
+
# Ensure count is within valid range
|
|
422
|
+
count = max(1, min(count, MAX_SEARCH_COUNT))
|
|
423
|
+
|
|
424
|
+
# Minimal validation
|
|
425
|
+
validation_errors = self._validate_search_request(
|
|
426
|
+
resource_type=resource_type,
|
|
427
|
+
search_params=search_params,
|
|
428
|
+
include_params=include_params,
|
|
429
|
+
revinclude_params=revinclude_params,
|
|
430
|
+
chained_params=chained_params,
|
|
431
|
+
count=count,
|
|
432
|
+
)
|
|
433
|
+
|
|
434
|
+
if validation_errors:
|
|
435
|
+
raise FHIRSearchError(f'Search validation failed: {"; ".join(validation_errors)}')
|
|
436
|
+
|
|
437
|
+
# Build request
|
|
438
|
+
url, form_data = self._build_search_request(
|
|
439
|
+
base_url=endpoint,
|
|
440
|
+
resource_type=resource_type,
|
|
441
|
+
search_params=search_params,
|
|
442
|
+
include_params=include_params,
|
|
443
|
+
revinclude_params=revinclude_params,
|
|
444
|
+
chained_params=chained_params,
|
|
445
|
+
count=count,
|
|
446
|
+
next_token=next_token,
|
|
447
|
+
)
|
|
448
|
+
|
|
449
|
+
async with httpx.AsyncClient() as client:
|
|
450
|
+
if next_token:
|
|
451
|
+
# For pagination, use GET with the next_token URL
|
|
452
|
+
response = await client.get(next_token, auth=auth)
|
|
453
|
+
else:
|
|
454
|
+
# Use POST for search
|
|
455
|
+
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
|
|
456
|
+
|
|
457
|
+
logger.debug(f'Search URL: {url}')
|
|
458
|
+
logger.debug(f'Form data: {form_data}')
|
|
459
|
+
|
|
460
|
+
response = await client.post(url, data=form_data, headers=headers, auth=auth)
|
|
461
|
+
|
|
462
|
+
response.raise_for_status()
|
|
463
|
+
fhir_bundle = response.json()
|
|
464
|
+
|
|
465
|
+
# Process response with appropriate handling
|
|
466
|
+
has_includes = bool(include_params or revinclude_params)
|
|
467
|
+
if has_includes:
|
|
468
|
+
result = self._process_bundle_with_includes(fhir_bundle)
|
|
469
|
+
else:
|
|
470
|
+
result = self._process_bundle(fhir_bundle)
|
|
471
|
+
|
|
472
|
+
return result
|
|
473
|
+
|
|
474
|
+
except FHIRSearchError:
|
|
475
|
+
# Re-raise FHIR search errors as-is
|
|
476
|
+
raise
|
|
477
|
+
except Exception as e:
|
|
478
|
+
logger.error(f'Error searching resources: {e}')
|
|
479
|
+
# Provide helpful error message
|
|
480
|
+
raise Exception(self._create_helpful_error_message(e))
|
|
481
|
+
|
|
482
|
+
async def read_resource(
|
|
483
|
+
self, datastore_id: str, resource_type: str, resource_id: str
|
|
484
|
+
) -> Dict[str, Any]:
|
|
485
|
+
"""Get a specific FHIR resource by ID."""
|
|
486
|
+
try:
|
|
487
|
+
endpoint = self._get_fhir_endpoint(datastore_id)
|
|
488
|
+
url = urljoin(endpoint, f'{resource_type}/{resource_id}')
|
|
489
|
+
|
|
490
|
+
auth = self._get_aws_auth()
|
|
491
|
+
|
|
492
|
+
async with httpx.AsyncClient() as client:
|
|
493
|
+
response = await client.get(url, auth=auth)
|
|
494
|
+
response.raise_for_status()
|
|
495
|
+
return response.json()
|
|
496
|
+
|
|
497
|
+
except Exception as e:
|
|
498
|
+
logger.error(f'Error getting resource: {e}')
|
|
499
|
+
raise
|
|
500
|
+
|
|
501
|
+
async def create_resource(
|
|
502
|
+
self, datastore_id: str, resource_type: str, resource_data: Dict[str, Any]
|
|
503
|
+
) -> Dict[str, Any]:
|
|
504
|
+
"""Create a new FHIR resource."""
|
|
505
|
+
try:
|
|
506
|
+
endpoint = self._get_fhir_endpoint(datastore_id)
|
|
507
|
+
url = urljoin(endpoint, resource_type)
|
|
508
|
+
|
|
509
|
+
# Ensure resource has correct resourceType
|
|
510
|
+
resource_data['resourceType'] = resource_type
|
|
511
|
+
|
|
512
|
+
auth = self._get_aws_auth()
|
|
513
|
+
|
|
514
|
+
async with httpx.AsyncClient() as client:
|
|
515
|
+
response = await client.post(url, json=resource_data, auth=auth)
|
|
516
|
+
response.raise_for_status()
|
|
517
|
+
return response.json()
|
|
518
|
+
|
|
519
|
+
except Exception as e:
|
|
520
|
+
logger.error(f'Error creating resource: {e}')
|
|
521
|
+
raise
|
|
522
|
+
|
|
523
|
+
async def update_resource(
|
|
524
|
+
self,
|
|
525
|
+
datastore_id: str,
|
|
526
|
+
resource_type: str,
|
|
527
|
+
resource_id: str,
|
|
528
|
+
resource_data: Dict[str, Any],
|
|
529
|
+
) -> Dict[str, Any]:
|
|
530
|
+
"""Update an existing FHIR resource."""
|
|
531
|
+
try:
|
|
532
|
+
endpoint = self._get_fhir_endpoint(datastore_id)
|
|
533
|
+
url = urljoin(endpoint, f'{resource_type}/{resource_id}')
|
|
534
|
+
|
|
535
|
+
# Ensure resource has correct resourceType and id
|
|
536
|
+
resource_data['resourceType'] = resource_type
|
|
537
|
+
resource_data['id'] = resource_id
|
|
538
|
+
|
|
539
|
+
auth = self._get_aws_auth()
|
|
540
|
+
|
|
541
|
+
async with httpx.AsyncClient() as client:
|
|
542
|
+
response = await client.put(url, json=resource_data, auth=auth)
|
|
543
|
+
response.raise_for_status()
|
|
544
|
+
return response.json()
|
|
545
|
+
|
|
546
|
+
except Exception as e:
|
|
547
|
+
logger.error(f'Error updating resource: {e}')
|
|
548
|
+
raise
|
|
549
|
+
|
|
550
|
+
async def delete_resource(
|
|
551
|
+
self, datastore_id: str, resource_type: str, resource_id: str
|
|
552
|
+
) -> Dict[str, Any]:
|
|
553
|
+
"""Delete a FHIR resource."""
|
|
554
|
+
try:
|
|
555
|
+
endpoint = self._get_fhir_endpoint(datastore_id)
|
|
556
|
+
url = urljoin(endpoint, f'{resource_type}/{resource_id}')
|
|
557
|
+
|
|
558
|
+
auth = self._get_aws_auth()
|
|
559
|
+
|
|
560
|
+
async with httpx.AsyncClient() as client:
|
|
561
|
+
response = await client.delete(url, auth=auth)
|
|
562
|
+
response.raise_for_status()
|
|
563
|
+
return {'status': 'deleted', 'resourceType': resource_type, 'id': resource_id}
|
|
564
|
+
|
|
565
|
+
except Exception as e:
|
|
566
|
+
logger.error(f'Error deleting resource: {e}')
|
|
567
|
+
raise
|
|
568
|
+
|
|
569
|
+
async def start_import_job(
|
|
570
|
+
self,
|
|
571
|
+
datastore_id: str,
|
|
572
|
+
input_data_config: Dict[str, Any],
|
|
573
|
+
job_output_data_config: Dict[str, Any],
|
|
574
|
+
data_access_role_arn: str,
|
|
575
|
+
job_name: Optional[str] = None,
|
|
576
|
+
) -> Dict[str, Any]:
|
|
577
|
+
"""Start a FHIR import job."""
|
|
578
|
+
try:
|
|
579
|
+
# Validate required parameters
|
|
580
|
+
if not input_data_config.get('s3_uri'):
|
|
581
|
+
raise ValueError("input_data_config must contain 's3_uri'")
|
|
582
|
+
|
|
583
|
+
if not job_output_data_config.get('s3_configuration', {}).get('s3_uri'):
|
|
584
|
+
raise ValueError(
|
|
585
|
+
'job_output_data_config must contain s3_configuration with s3_uri'
|
|
586
|
+
)
|
|
587
|
+
|
|
588
|
+
# Transform input_data_config to match AWS API format
|
|
589
|
+
input_config = {'S3Uri': input_data_config['s3_uri']}
|
|
590
|
+
|
|
591
|
+
# Transform job_output_data_config to match AWS API format
|
|
592
|
+
s3_config = job_output_data_config['s3_configuration']
|
|
593
|
+
output_config = {'S3Configuration': {'S3Uri': s3_config['s3_uri']}}
|
|
594
|
+
|
|
595
|
+
# Add KMS key if provided
|
|
596
|
+
if s3_config.get('kms_key_id'):
|
|
597
|
+
output_config['S3Configuration']['KmsKeyId'] = s3_config['kms_key_id']
|
|
598
|
+
|
|
599
|
+
kwargs = {
|
|
600
|
+
'DatastoreId': datastore_id,
|
|
601
|
+
'InputDataConfig': input_config,
|
|
602
|
+
'JobOutputDataConfig': output_config,
|
|
603
|
+
'DataAccessRoleArn': data_access_role_arn,
|
|
604
|
+
}
|
|
605
|
+
|
|
606
|
+
if job_name:
|
|
607
|
+
kwargs['JobName'] = job_name
|
|
608
|
+
|
|
609
|
+
response = self.healthlake_client.start_fhir_import_job(**kwargs)
|
|
610
|
+
return response
|
|
611
|
+
|
|
612
|
+
except ClientError as e:
|
|
613
|
+
error_code = e.response.get('Error', {}).get('Code', 'Unknown')
|
|
614
|
+
error_message = e.response.get('Error', {}).get('Message', str(e))
|
|
615
|
+
|
|
616
|
+
# Provide more specific error messages
|
|
617
|
+
if error_code == 'ValidationException':
|
|
618
|
+
logger.error(f'Validation error starting import job: {error_message}')
|
|
619
|
+
raise ValueError(f'Invalid parameters: {error_message}')
|
|
620
|
+
elif error_code == 'AccessDeniedException':
|
|
621
|
+
logger.error(f'Access denied starting import job: {error_message}')
|
|
622
|
+
raise PermissionError(f'Access denied: {error_message}')
|
|
623
|
+
elif error_code == 'ResourceNotFoundException':
|
|
624
|
+
logger.error(f'Resource not found starting import job: {error_message}')
|
|
625
|
+
raise ValueError(f'Datastore not found: {error_message}')
|
|
626
|
+
else:
|
|
627
|
+
logger.error(f'Error starting import job: {error_message}')
|
|
628
|
+
raise
|
|
629
|
+
|
|
630
|
+
async def start_export_job(
|
|
631
|
+
self,
|
|
632
|
+
datastore_id: str,
|
|
633
|
+
output_data_config: Dict[str, Any],
|
|
634
|
+
data_access_role_arn: str,
|
|
635
|
+
job_name: Optional[str] = None,
|
|
636
|
+
) -> Dict[str, Any]:
|
|
637
|
+
"""Start a FHIR export job."""
|
|
638
|
+
try:
|
|
639
|
+
kwargs = {
|
|
640
|
+
'DatastoreId': datastore_id,
|
|
641
|
+
'OutputDataConfig': output_data_config,
|
|
642
|
+
'DataAccessRoleArn': data_access_role_arn,
|
|
643
|
+
}
|
|
644
|
+
if job_name:
|
|
645
|
+
kwargs['JobName'] = job_name
|
|
646
|
+
|
|
647
|
+
response = self.healthlake_client.start_fhir_export_job(**kwargs)
|
|
648
|
+
return response
|
|
649
|
+
except ClientError as e:
|
|
650
|
+
logger.error(f'Error starting export job: {e}')
|
|
651
|
+
raise
|
|
652
|
+
|
|
653
|
+
async def list_jobs(
|
|
654
|
+
self, datastore_id: str, job_status: Optional[str] = None, job_type: Optional[str] = None
|
|
655
|
+
) -> Dict[str, Any]:
|
|
656
|
+
"""List FHIR import/export jobs."""
|
|
657
|
+
try:
|
|
658
|
+
if job_type == 'IMPORT':
|
|
659
|
+
kwargs: Dict[str, Any] = {'DatastoreId': datastore_id}
|
|
660
|
+
if job_status:
|
|
661
|
+
kwargs['JobStatus'] = job_status
|
|
662
|
+
response = self.healthlake_client.list_fhir_import_jobs(**kwargs)
|
|
663
|
+
elif job_type == 'EXPORT':
|
|
664
|
+
kwargs: Dict[str, Any] = {'DatastoreId': datastore_id}
|
|
665
|
+
if job_status:
|
|
666
|
+
kwargs['JobStatus'] = job_status
|
|
667
|
+
response = self.healthlake_client.list_fhir_export_jobs(**kwargs)
|
|
668
|
+
else:
|
|
669
|
+
# List both import and export jobs
|
|
670
|
+
import_jobs = self.healthlake_client.list_fhir_import_jobs(
|
|
671
|
+
DatastoreId=datastore_id
|
|
672
|
+
)
|
|
673
|
+
export_jobs = self.healthlake_client.list_fhir_export_jobs(
|
|
674
|
+
DatastoreId=datastore_id
|
|
675
|
+
)
|
|
676
|
+
response = {
|
|
677
|
+
'ImportJobs': import_jobs.get('ImportJobPropertiesList', []),
|
|
678
|
+
'ExportJobs': export_jobs.get('ExportJobPropertiesList', []),
|
|
679
|
+
}
|
|
680
|
+
return response
|
|
681
|
+
except ClientError as e:
|
|
682
|
+
logger.error(f'Error listing jobs: {e}')
|
|
683
|
+
# Return error information instead of crashing
|
|
684
|
+
return {'error': True, 'message': str(e), 'ImportJobs': [], 'ExportJobs': []}
|
|
685
|
+
|
|
686
|
+
def _get_aws_auth(self):
|
|
687
|
+
"""Get AWS authentication for HTTP requests."""
|
|
688
|
+
try:
|
|
689
|
+
# Get AWS credentials from the session
|
|
690
|
+
credentials = self.session.get_credentials()
|
|
691
|
+
if not credentials:
|
|
692
|
+
raise NoCredentialsError()
|
|
693
|
+
|
|
694
|
+
# Create custom AWS authentication instance
|
|
695
|
+
auth = AWSAuth(credentials=credentials, region=self.region, service='healthlake')
|
|
696
|
+
|
|
697
|
+
return auth
|
|
698
|
+
|
|
699
|
+
except Exception as e:
|
|
700
|
+
logger.error(f'Failed to get AWS authentication: {e}')
|
|
701
|
+
raise
|