awslabs.healthlake-mcp-server 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,665 @@
1
+ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """AWS HealthLake MCP Server implementation."""
16
+
17
+ # Standard library imports
18
+ import json
19
+
20
+ # Local imports
21
+ from .fhir_operations import MAX_SEARCH_COUNT, HealthLakeClient, validate_datastore_id
22
+ from .models import (
23
+ CreateResourceRequest,
24
+ DatastoreFilter,
25
+ ExportJobConfig,
26
+ ImportJobConfig,
27
+ JobFilter,
28
+ UpdateResourceRequest,
29
+ )
30
+
31
+ # Third-party imports
32
+ from botocore.exceptions import ClientError, NoCredentialsError
33
+ from datetime import datetime
34
+ from loguru import logger
35
+ from mcp.server import Server
36
+ from mcp.types import Resource, TextContent, Tool
37
+ from pydantic import AnyUrl
38
+ from typing import Any, Dict, List, Sequence
39
+
40
+
41
+ # Tool categories for read-only mode
42
+ READ_ONLY_TOOLS = {
43
+ 'list_datastores',
44
+ 'get_datastore_details',
45
+ 'read_fhir_resource',
46
+ 'search_fhir_resources',
47
+ 'patient_everything',
48
+ 'list_fhir_jobs',
49
+ }
50
+
51
+ WRITE_TOOLS = {
52
+ 'create_fhir_resource',
53
+ 'update_fhir_resource',
54
+ 'delete_fhir_resource',
55
+ 'start_fhir_import_job',
56
+ 'start_fhir_export_job',
57
+ }
58
+
59
+
60
+ class DateTimeEncoder(json.JSONEncoder):
61
+ """Custom JSON encoder that handles datetime objects."""
62
+
63
+ def default(self, o):
64
+ """Convert datetime objects to ISO format strings."""
65
+ if isinstance(o, datetime):
66
+ return o.isoformat()
67
+ return super().default(o)
68
+
69
+
70
+ class InputValidationError(Exception):
71
+ """Custom validation error for input parameters."""
72
+
73
+ pass
74
+
75
+
76
+ def validate_count(count: int) -> int:
77
+ """Validate and normalize count parameter."""
78
+ if count < 1 or count > MAX_SEARCH_COUNT:
79
+ raise InputValidationError(f'Count must be between 1 and {MAX_SEARCH_COUNT}')
80
+ return count
81
+
82
+
83
+ def create_error_response(message: str, error_type: str = 'error') -> List[TextContent]:
84
+ """Create standardized error response."""
85
+ return [
86
+ TextContent(
87
+ type='text',
88
+ text=json.dumps({'error': True, 'type': error_type, 'message': message}, indent=2),
89
+ )
90
+ ]
91
+
92
+
93
+ def create_success_response(data: Any) -> List[TextContent]:
94
+ """Create standardized success response."""
95
+ return [TextContent(type='text', text=json.dumps(data, indent=2, cls=DateTimeEncoder))]
96
+
97
+
98
+ class ToolHandler:
99
+ """Handles tool dispatch and execution."""
100
+
101
+ def __init__(self, healthlake_client: HealthLakeClient, read_only: bool = False):
102
+ """Initialize tool handler with HealthLake client and read-only mode support."""
103
+ self.client = healthlake_client
104
+ self.read_only = read_only
105
+
106
+ # Define all possible handlers
107
+ all_handlers = {
108
+ 'list_datastores': self._handle_list_datastores,
109
+ 'get_datastore_details': self._handle_get_datastore,
110
+ 'create_fhir_resource': self._handle_create,
111
+ 'read_fhir_resource': self._handle_read,
112
+ 'update_fhir_resource': self._handle_update,
113
+ 'delete_fhir_resource': self._handle_delete,
114
+ 'search_fhir_resources': self._handle_search,
115
+ 'patient_everything': self._handle_patient_everything,
116
+ 'start_fhir_import_job': self._handle_import_job,
117
+ 'start_fhir_export_job': self._handle_export_job,
118
+ 'list_fhir_jobs': self._handle_list_jobs,
119
+ }
120
+
121
+ # Filter handlers based on read-only mode
122
+ if read_only:
123
+ self.handlers = {k: v for k, v in all_handlers.items() if k in READ_ONLY_TOOLS}
124
+ else:
125
+ self.handlers = all_handlers
126
+
127
+ async def handle_tool(self, name: str, arguments: Dict[str, Any]) -> List[TextContent]:
128
+ """Dispatch tool call to appropriate handler with read-only safety check."""
129
+ if name not in self.handlers:
130
+ if self.read_only and name in WRITE_TOOLS:
131
+ raise ValueError(f'Tool {name} not available in read-only mode')
132
+ else:
133
+ raise ValueError(f'Unknown tool: {name}')
134
+
135
+ handler = self.handlers[name]
136
+ result = await handler(arguments)
137
+ return create_success_response(result)
138
+
139
+ async def _handle_list_datastores(self, args: Dict[str, Any]) -> Dict[str, Any]:
140
+ filter_obj = DatastoreFilter(**args)
141
+ return await self.client.list_datastores(filter_status=filter_obj.status)
142
+
143
+ async def _handle_get_datastore(self, args: Dict[str, Any]) -> Dict[str, Any]:
144
+ datastore_id = validate_datastore_id(args['datastore_id'])
145
+
146
+ return await self.client.get_datastore_details(datastore_id=datastore_id)
147
+
148
+ async def _handle_create(self, args: Dict[str, Any]) -> Dict[str, Any]:
149
+ if self.read_only:
150
+ raise ValueError('Create operation not allowed in read-only mode')
151
+
152
+ request = CreateResourceRequest(**args)
153
+
154
+ return await self.client.create_resource(
155
+ datastore_id=request.datastore_id,
156
+ resource_type=request.resource_type,
157
+ resource_data=request.resource_data,
158
+ )
159
+
160
+ async def _handle_read(self, args: Dict[str, Any]) -> Dict[str, Any]:
161
+ datastore_id = validate_datastore_id(args['datastore_id'])
162
+
163
+ return await self.client.read_resource(
164
+ datastore_id=datastore_id,
165
+ resource_type=args['resource_type'],
166
+ resource_id=args['resource_id'],
167
+ )
168
+
169
+ async def _handle_update(self, args: Dict[str, Any]) -> Dict[str, Any]:
170
+ if self.read_only:
171
+ raise ValueError('Update operation not allowed in read-only mode')
172
+
173
+ request = UpdateResourceRequest(**args)
174
+
175
+ return await self.client.update_resource(
176
+ datastore_id=request.datastore_id,
177
+ resource_type=request.resource_type,
178
+ resource_id=request.resource_id,
179
+ resource_data=request.resource_data,
180
+ )
181
+
182
+ async def _handle_delete(self, args: Dict[str, Any]) -> Dict[str, Any]:
183
+ if self.read_only:
184
+ raise ValueError('Delete operation not allowed in read-only mode')
185
+
186
+ datastore_id = validate_datastore_id(args['datastore_id'])
187
+
188
+ return await self.client.delete_resource(
189
+ datastore_id=datastore_id,
190
+ resource_type=args['resource_type'],
191
+ resource_id=args['resource_id'],
192
+ )
193
+
194
+ async def _handle_search(self, args: Dict[str, Any]) -> Dict[str, Any]:
195
+ datastore_id = validate_datastore_id(args['datastore_id'])
196
+
197
+ count = args.get('count', 100)
198
+ if count < 1 or count > MAX_SEARCH_COUNT:
199
+ raise ValueError(f'Count must be between 1 and {MAX_SEARCH_COUNT}')
200
+
201
+ return await self.client.search_resources(
202
+ datastore_id=datastore_id,
203
+ resource_type=args['resource_type'],
204
+ search_params=args.get('search_params', {}),
205
+ include_params=args.get('include_params'),
206
+ revinclude_params=args.get('revinclude_params'),
207
+ chained_params=args.get('chained_params'),
208
+ count=count,
209
+ next_token=args.get('next_token'),
210
+ )
211
+
212
+ async def _handle_patient_everything(self, args: Dict[str, Any]) -> Dict[str, Any]:
213
+ datastore_id = validate_datastore_id(args['datastore_id'])
214
+
215
+ count = args.get('count', 100)
216
+ if count < 1 or count > MAX_SEARCH_COUNT:
217
+ raise ValueError(f'Count must be between 1 and {MAX_SEARCH_COUNT}')
218
+
219
+ return await self.client.patient_everything(
220
+ datastore_id=datastore_id,
221
+ patient_id=args['patient_id'],
222
+ start=args.get('start'),
223
+ end=args.get('end'),
224
+ count=count,
225
+ next_token=args.get('next_token'),
226
+ )
227
+
228
+ async def _handle_import_job(self, args: Dict[str, Any]) -> Dict[str, Any]:
229
+ if self.read_only:
230
+ raise ValueError('Import job operation not allowed in read-only mode')
231
+
232
+ request = ImportJobConfig(**args)
233
+
234
+ return await self.client.start_import_job(
235
+ datastore_id=request.datastore_id,
236
+ input_data_config=request.input_data_config,
237
+ job_output_data_config=args['job_output_data_config'],
238
+ data_access_role_arn=request.data_access_role_arn,
239
+ job_name=request.job_name,
240
+ )
241
+
242
+ async def _handle_export_job(self, args: Dict[str, Any]) -> Dict[str, Any]:
243
+ if self.read_only:
244
+ raise ValueError('Export job operation not allowed in read-only mode')
245
+
246
+ request = ExportJobConfig(**args)
247
+
248
+ return await self.client.start_export_job(
249
+ datastore_id=request.datastore_id,
250
+ output_data_config=request.output_data_config,
251
+ data_access_role_arn=request.data_access_role_arn,
252
+ job_name=request.job_name,
253
+ )
254
+
255
+ async def _handle_list_jobs(self, args: Dict[str, Any]) -> Dict[str, Any]:
256
+ datastore_id = validate_datastore_id(args['datastore_id'])
257
+
258
+ filter_obj = JobFilter(job_status=args.get('job_status'), job_type=args.get('job_type'))
259
+
260
+ return await self.client.list_jobs(
261
+ datastore_id=datastore_id,
262
+ job_status=filter_obj.job_status,
263
+ job_type=filter_obj.job_type,
264
+ )
265
+
266
+
267
+ def create_healthlake_server(read_only: bool = False) -> Server:
268
+ """Create and configure the HealthLake MCP server."""
269
+ server = Server('healthlake-mcp-server')
270
+ healthlake_client = HealthLakeClient()
271
+ tool_handler = ToolHandler(healthlake_client, read_only=read_only)
272
+
273
+ @server.list_tools()
274
+ async def handle_list_tools() -> List[Tool]:
275
+ """List available HealthLake tools based on mode."""
276
+ # Define all tools
277
+ all_tools = [
278
+ # Datastore Management (foundational operations)
279
+ Tool(
280
+ name='list_datastores',
281
+ description='List all HealthLake datastores in the account',
282
+ inputSchema={
283
+ 'type': 'object',
284
+ 'properties': {
285
+ 'filter': {
286
+ 'type': 'string',
287
+ 'description': 'Filter datastores by status (CREATING, ACTIVE, DELETING, DELETED)',
288
+ 'enum': ['CREATING', 'ACTIVE', 'DELETING', 'DELETED'],
289
+ }
290
+ },
291
+ },
292
+ ),
293
+ Tool(
294
+ name='get_datastore_details',
295
+ description='Get detailed information about a specific HealthLake datastore',
296
+ inputSchema={
297
+ 'type': 'object',
298
+ 'properties': {
299
+ 'datastore_id': {
300
+ 'type': 'string',
301
+ 'description': 'HealthLake datastore ID',
302
+ }
303
+ },
304
+ 'required': ['datastore_id'],
305
+ },
306
+ ),
307
+ # CRUD Operations (core functionality)
308
+ Tool(
309
+ name='create_fhir_resource',
310
+ description='Create a new FHIR resource in HealthLake',
311
+ inputSchema={
312
+ 'type': 'object',
313
+ 'properties': {
314
+ 'datastore_id': {
315
+ 'type': 'string',
316
+ 'description': 'HealthLake datastore ID',
317
+ },
318
+ 'resource_type': {'type': 'string', 'description': 'FHIR resource type'},
319
+ 'resource_data': {
320
+ 'type': 'object',
321
+ 'description': 'FHIR resource data as JSON object',
322
+ },
323
+ },
324
+ 'required': ['datastore_id', 'resource_type', 'resource_data'],
325
+ },
326
+ ),
327
+ Tool(
328
+ name='read_fhir_resource',
329
+ description='Get a specific FHIR resource by ID',
330
+ inputSchema={
331
+ 'type': 'object',
332
+ 'properties': {
333
+ 'datastore_id': {
334
+ 'type': 'string',
335
+ 'description': 'HealthLake datastore ID',
336
+ },
337
+ 'resource_type': {'type': 'string', 'description': 'FHIR resource type'},
338
+ 'resource_id': {'type': 'string', 'description': 'FHIR resource ID'},
339
+ },
340
+ 'required': ['datastore_id', 'resource_type', 'resource_id'],
341
+ },
342
+ ),
343
+ Tool(
344
+ name='update_fhir_resource',
345
+ description='Update an existing FHIR resource in HealthLake',
346
+ inputSchema={
347
+ 'type': 'object',
348
+ 'properties': {
349
+ 'datastore_id': {
350
+ 'type': 'string',
351
+ 'description': 'HealthLake datastore ID',
352
+ },
353
+ 'resource_type': {'type': 'string', 'description': 'FHIR resource type'},
354
+ 'resource_id': {'type': 'string', 'description': 'FHIR resource ID'},
355
+ 'resource_data': {
356
+ 'type': 'object',
357
+ 'description': 'Updated FHIR resource data as JSON object',
358
+ },
359
+ },
360
+ 'required': ['datastore_id', 'resource_type', 'resource_id', 'resource_data'],
361
+ },
362
+ ),
363
+ Tool(
364
+ name='delete_fhir_resource',
365
+ description='Delete a FHIR resource from HealthLake',
366
+ inputSchema={
367
+ 'type': 'object',
368
+ 'properties': {
369
+ 'datastore_id': {
370
+ 'type': 'string',
371
+ 'description': 'HealthLake datastore ID',
372
+ },
373
+ 'resource_type': {'type': 'string', 'description': 'FHIR resource type'},
374
+ 'resource_id': {'type': 'string', 'description': 'FHIR resource ID'},
375
+ },
376
+ 'required': ['datastore_id', 'resource_type', 'resource_id'],
377
+ },
378
+ ),
379
+ # Advanced Search Operations
380
+ Tool(
381
+ name='search_fhir_resources',
382
+ description='Search for FHIR resources in HealthLake datastore with advanced search capabilities. Returns up to 100 results per call. If pagination.has_next is true, call this tool again with the next_token to get more results.',
383
+ inputSchema={
384
+ 'type': 'object',
385
+ 'properties': {
386
+ 'datastore_id': {
387
+ 'type': 'string',
388
+ 'description': 'HealthLake datastore ID',
389
+ },
390
+ 'resource_type': {
391
+ 'type': 'string',
392
+ 'description': 'FHIR resource type (e.g., Patient, Observation, Condition)',
393
+ },
394
+ 'search_params': {
395
+ 'type': 'object',
396
+ 'description': "Basic FHIR search parameters. Supports modifiers (e.g., 'name:contains'), prefixes (e.g., 'birthdate': 'ge1990-01-01'), and simple chaining (e.g., 'subject:Patient')",
397
+ 'additionalProperties': True,
398
+ },
399
+ 'chained_params': {
400
+ 'type': 'object',
401
+ 'description': "Advanced chained search parameters. Key format: 'param.chain' or 'param:TargetType.chain' (e.g., {'subject.name': 'Smith', 'general-practitioner:Practitioner.name': 'Johnson'})",
402
+ 'additionalProperties': {'type': 'string'},
403
+ },
404
+ 'include_params': {
405
+ 'type': 'array',
406
+ 'description': "Include related resources in the response. Format: 'ResourceType:parameter' or 'ResourceType:parameter:target-type' (e.g., ['Patient:general-practitioner', 'Observation:subject:Patient'])",
407
+ 'items': {'type': 'string'},
408
+ },
409
+ 'revinclude_params': {
410
+ 'type': 'array',
411
+ 'description': "Include resources that reference the found resources. Format: 'ResourceType:parameter' (e.g., ['Observation:subject', 'Condition:subject'])",
412
+ 'items': {'type': 'string'},
413
+ },
414
+ 'count': {
415
+ 'type': 'integer',
416
+ 'description': 'Maximum number of results to return (1-100, default: 100)',
417
+ 'minimum': 1,
418
+ 'maximum': 100,
419
+ 'default': 100,
420
+ },
421
+ 'next_token': {
422
+ 'type': 'string',
423
+ 'description': "Pagination token for retrieving the next page of results. Use the complete URL from a previous response's pagination.next_token field. When provided, other search parameters are ignored.",
424
+ },
425
+ },
426
+ 'required': ['datastore_id', 'resource_type'],
427
+ },
428
+ ),
429
+ Tool(
430
+ name='patient_everything',
431
+ description='Retrieve all resources related to a specific patient using the FHIR $patient-everything operation',
432
+ inputSchema={
433
+ 'type': 'object',
434
+ 'properties': {
435
+ 'datastore_id': {
436
+ 'type': 'string',
437
+ 'description': 'HealthLake datastore ID',
438
+ },
439
+ 'patient_id': {'type': 'string', 'description': 'Patient resource ID'},
440
+ 'start': {
441
+ 'type': 'string',
442
+ 'description': 'Start date for filtering resources (YYYY-MM-DD format)',
443
+ },
444
+ 'end': {
445
+ 'type': 'string',
446
+ 'description': 'End date for filtering resources (YYYY-MM-DD format)',
447
+ },
448
+ 'count': {
449
+ 'type': 'integer',
450
+ 'description': 'Maximum number of results to return (1-100, default: 100)',
451
+ 'minimum': 1,
452
+ 'maximum': 100,
453
+ 'default': 100,
454
+ },
455
+ 'next_token': {
456
+ 'type': 'string',
457
+ 'description': "Pagination token for retrieving the next page of results. Use the complete URL from a previous response's pagination.next_token field.",
458
+ },
459
+ },
460
+ 'required': ['datastore_id', 'patient_id'],
461
+ },
462
+ ),
463
+ # Job Management Operations
464
+ Tool(
465
+ name='start_fhir_import_job',
466
+ description='Start a FHIR import job to load data into HealthLake',
467
+ inputSchema={
468
+ 'type': 'object',
469
+ 'properties': {
470
+ 'datastore_id': {
471
+ 'type': 'string',
472
+ 'description': 'HealthLake datastore ID',
473
+ },
474
+ 'input_data_config': {
475
+ 'type': 'object',
476
+ 'description': 'Input data configuration',
477
+ 'properties': {
478
+ 's3_uri': {
479
+ 'type': 'string',
480
+ 'description': 'S3 URI containing FHIR data',
481
+ }
482
+ },
483
+ 'required': ['s3_uri'],
484
+ },
485
+ 'job_output_data_config': {
486
+ 'type': 'object',
487
+ 'description': 'Output data configuration (required for import jobs)',
488
+ 'properties': {
489
+ 's3_configuration': {
490
+ 'type': 'object',
491
+ 'properties': {
492
+ 's3_uri': {
493
+ 'type': 'string',
494
+ 'description': 'S3 URI for job output/logs',
495
+ },
496
+ 'kms_key_id': {
497
+ 'type': 'string',
498
+ 'description': 'KMS key ID for encryption (optional)',
499
+ },
500
+ },
501
+ 'required': ['s3_uri'],
502
+ }
503
+ },
504
+ 'required': ['s3_configuration'],
505
+ },
506
+ 'data_access_role_arn': {
507
+ 'type': 'string',
508
+ 'description': 'IAM role ARN for data access',
509
+ },
510
+ 'job_name': {'type': 'string', 'description': 'Name for the import job'},
511
+ },
512
+ 'required': [
513
+ 'datastore_id',
514
+ 'input_data_config',
515
+ 'job_output_data_config',
516
+ 'data_access_role_arn',
517
+ ],
518
+ },
519
+ ),
520
+ Tool(
521
+ name='start_fhir_export_job',
522
+ description='Start a FHIR export job to export data from HealthLake',
523
+ inputSchema={
524
+ 'type': 'object',
525
+ 'properties': {
526
+ 'datastore_id': {
527
+ 'type': 'string',
528
+ 'description': 'HealthLake datastore ID',
529
+ },
530
+ 'output_data_config': {
531
+ 'type': 'object',
532
+ 'description': 'Output data configuration',
533
+ 'properties': {
534
+ 's3_configuration': {
535
+ 'type': 'object',
536
+ 'properties': {
537
+ 's3_uri': {
538
+ 'type': 'string',
539
+ 'description': 'S3 URI for export destination',
540
+ },
541
+ 'kms_key_id': {
542
+ 'type': 'string',
543
+ 'description': 'KMS key ID for encryption',
544
+ },
545
+ },
546
+ 'required': ['s3_uri'],
547
+ }
548
+ },
549
+ 'required': ['s3_configuration'],
550
+ },
551
+ 'data_access_role_arn': {
552
+ 'type': 'string',
553
+ 'description': 'IAM role ARN for data access',
554
+ },
555
+ 'job_name': {'type': 'string', 'description': 'Name for the export job'},
556
+ },
557
+ 'required': ['datastore_id', 'output_data_config', 'data_access_role_arn'],
558
+ },
559
+ ),
560
+ Tool(
561
+ name='list_fhir_jobs',
562
+ description='List FHIR import/export jobs',
563
+ inputSchema={
564
+ 'type': 'object',
565
+ 'properties': {
566
+ 'datastore_id': {
567
+ 'type': 'string',
568
+ 'description': 'HealthLake datastore ID',
569
+ },
570
+ 'job_status': {
571
+ 'type': 'string',
572
+ 'description': 'Filter jobs by status',
573
+ 'enum': [
574
+ 'SUBMITTED',
575
+ 'IN_PROGRESS',
576
+ 'COMPLETED',
577
+ 'FAILED',
578
+ 'STOP_REQUESTED',
579
+ 'STOPPED',
580
+ ],
581
+ },
582
+ 'job_type': {
583
+ 'type': 'string',
584
+ 'description': 'Type of job to list',
585
+ 'enum': ['IMPORT', 'EXPORT'],
586
+ },
587
+ },
588
+ 'required': ['datastore_id'],
589
+ },
590
+ ),
591
+ ]
592
+
593
+ # Filter tools based on read-only mode
594
+ if read_only:
595
+ return [tool for tool in all_tools if tool.name in READ_ONLY_TOOLS]
596
+ else:
597
+ return all_tools
598
+
599
+ @server.list_resources()
600
+ async def handle_list_resources() -> List[Resource]:
601
+ """List available HealthLake datastores as discoverable resources."""
602
+ try:
603
+ response = await healthlake_client.list_datastores()
604
+ return [
605
+ Resource(
606
+ uri=AnyUrl(f'healthlake://datastore/{ds["DatastoreId"]}'),
607
+ name=f'{"✅" if ds["DatastoreStatus"] == "ACTIVE" else "⏳"} {ds.get("DatastoreName", "Unnamed")} ({ds["DatastoreStatus"]})',
608
+ description=f'FHIR {ds["DatastoreTypeVersion"]} datastore\nCreated: {ds["CreatedAt"].strftime("%Y-%m-%d")}\nEndpoint: {ds["DatastoreEndpoint"]}\nID: {ds["DatastoreId"]}',
609
+ mimeType='application/json',
610
+ )
611
+ for ds in response.get('DatastorePropertiesList', [])
612
+ ]
613
+ except Exception as e:
614
+ logger.error(f'Error listing datastore resources: {e}')
615
+ return []
616
+
617
+ @server.read_resource()
618
+ async def handle_read_resource(uri: AnyUrl) -> str:
619
+ """Read detailed datastore information."""
620
+ uri_str = str(uri)
621
+ if not uri_str.startswith('healthlake://datastore/'):
622
+ raise ValueError(f'Unknown resource URI: {uri_str}')
623
+ datastore_id = uri_str.split('/')[-1]
624
+ return json.dumps(
625
+ await healthlake_client.get_datastore_details(datastore_id),
626
+ indent=2,
627
+ cls=DateTimeEncoder,
628
+ )
629
+
630
+ @server.call_tool()
631
+ async def handle_call_tool(name: str, arguments: Dict[str, Any]) -> Sequence[TextContent]:
632
+ """Handle tool calls using dispatch pattern."""
633
+ try:
634
+ return await tool_handler.handle_tool(name, arguments)
635
+ except (InputValidationError, ValueError) as e:
636
+ if 'read-only mode' in str(e):
637
+ logger.warning(f'Read-only mode violation attempt: {name}')
638
+ return create_error_response(
639
+ f'Operation {name} not available in read-only mode. '
640
+ 'Remove --readonly flag to enable write operations.',
641
+ 'read_only_violation',
642
+ )
643
+ else:
644
+ logger.warning(f'Validation error in {name}: {e}')
645
+ return create_error_response(str(e), 'validation_error')
646
+ except ClientError as e:
647
+ error_code = e.response['Error']['Code']
648
+ logger.error(f'AWS error in {name}: {error_code}')
649
+ errors = {
650
+ 'ResourceNotFoundException': ('Resource not found', 'not_found'),
651
+ 'ValidationException': (
652
+ f'Invalid parameters: {e.response["Error"]["Message"]}',
653
+ 'validation_error',
654
+ ),
655
+ }
656
+ msg, typ = errors.get(error_code, ('AWS service error', 'service_error'))
657
+ return create_error_response(msg, typ)
658
+ except NoCredentialsError:
659
+ logger.error(f'Credentials error in {name}')
660
+ return create_error_response('AWS credentials not configured', 'auth_error')
661
+ except Exception:
662
+ logger.exception('Unexpected error in tool call', tool=name)
663
+ return create_error_response('Internal server error', 'server_error')
664
+
665
+ return server