awslabs.dynamodb-mcp-server 2.0.1__py3-none-any.whl → 2.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of awslabs.dynamodb-mcp-server might be problematic. Click here for more details.

awslabs/__init__.py CHANGED
@@ -14,3 +14,23 @@
14
14
 
15
15
  # This file is part of the awslabs namespace.
16
16
  # It is intentionally minimal to support PEP 420 namespace packages.
17
+
18
+ # Namespace Package Configuration
19
+ #
20
+ # This line resolves namespace conflicts when multiple packages share the 'awslabs' namespace prefix.
21
+ # Without this configuration, test suites fail and build issues occur because Python cannot properly
22
+ # resolve which package owns the 'awslabs' namespace when both 'awslabs.dynamodb-mcp-server' and
23
+ # 'awslabs.mysql-mcp-server' are installed in the same environment.
24
+ #
25
+ # The extend_path() function implements PEP 420 namespace packages, allowing multiple distributions
26
+ # to contribute modules to the same namespace. This ensures that:
27
+ # 1. Both DynamoDB and MySQL MCP servers can coexist in the same Python environment
28
+ # 2. Import statements like 'from awslabs.dynamodb_mcp_server import ...' work correctly
29
+ # 3. Test discovery and execution functions properly across both packages
30
+ # 4. Build processes complete successfully without namespace collision errors
31
+ #
32
+ # This is the standard solution for namespace packages in Python and is required for proper
33
+ # multi-package namespace support in the awslabs ecosystem.
34
+
35
+ # Extend namespace to include installed packages
36
+ __path__ = __import__('pkgutil').extend_path(__path__, __name__)
@@ -14,4 +14,4 @@
14
14
 
15
15
  """awslabs.dynamodb-mcp-server"""
16
16
 
17
- __version__ = '2.0.1'
17
+ __version__ = '2.0.2'
@@ -0,0 +1,210 @@
1
+ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """Source Database Analysis SQL Query Resources for DynamoDB Data Modeling."""
16
+
17
+ from typing import Any, Dict
18
+
19
+
20
+ # SQL Query Templates for MySQL
21
+ mysql_analysis_queries = {
22
+ 'performance_schema_check': {
23
+ 'name': 'Performance Schema Status Check',
24
+ 'description': 'Returns the status of the performance_schema system variable (ON/OFF)',
25
+ 'sql': 'SELECT @@performance_schema;',
26
+ 'parameters': [],
27
+ },
28
+ 'query_pattern_analysis': {
29
+ 'name': 'Query Pattern Analysis',
30
+ 'description': 'Returns query patterns from Performance Schema with execution counts, calculated RPS, average execution time, average rows examined per execution, scan counts, execution timeframes, and SQL complexity classification',
31
+ 'sql': """SELECT
32
+ -- Basic pattern information
33
+ DIGEST_TEXT as query_pattern,
34
+ COUNT_STAR as frequency,
35
+ -- Timing information
36
+ FIRST_SEEN as first_seen,
37
+ LAST_SEEN as last_seen
38
+ FROM performance_schema.events_statements_summary_by_digest
39
+ WHERE SCHEMA_NAME = '{target_database}'
40
+ AND COUNT_STAR > 0
41
+ AND LAST_SEEN >= DATE_SUB(NOW(), INTERVAL {pattern_analysis_days} DAY)
42
+ -- Exclude system and administrative queries
43
+ AND DIGEST_TEXT NOT LIKE 'SET%'
44
+ AND DIGEST_TEXT NOT LIKE 'USE %'
45
+ AND DIGEST_TEXT NOT LIKE 'SHOW%'
46
+ AND DIGEST_TEXT NOT LIKE '/* RDS Data API */%'
47
+ AND DIGEST_TEXT NOT LIKE '%information_schema%'
48
+ AND DIGEST_TEXT NOT LIKE '%performance_schema%'
49
+ AND DIGEST_TEXT NOT LIKE '%mysql.%'
50
+ AND DIGEST_TEXT NOT LIKE 'SELECT @@%'
51
+ AND DIGEST_TEXT NOT LIKE '%sys.%'
52
+ AND DIGEST_TEXT NOT LIKE 'select ?'
53
+ AND DIGEST_TEXT NOT LIKE '%mysql.general_log%'
54
+ AND DIGEST_TEXT NOT LIKE 'DESCRIBE %'
55
+ AND DIGEST_TEXT NOT LIKE 'EXPLAIN %'
56
+ AND DIGEST_TEXT NOT LIKE '%configured_database%'
57
+ AND DIGEST_TEXT NOT LIKE 'FLUSH %'
58
+ AND DIGEST_TEXT NOT LIKE 'RESET %'
59
+ AND DIGEST_TEXT NOT LIKE 'OPTIMIZE %'
60
+ AND DIGEST_TEXT NOT LIKE 'ANALYZE %'
61
+ AND DIGEST_TEXT NOT LIKE 'CHECK %'
62
+ AND DIGEST_TEXT NOT LIKE 'REPAIR %'
63
+ AND DIGEST_TEXT NOT LIKE '%@@default_storage_engine%'
64
+ AND DIGEST_TEXT NOT LIKE '%@%:=%'
65
+ AND DIGEST_TEXT NOT LIKE '%MD5%'
66
+ AND DIGEST_TEXT NOT LIKE '%SHA%'
67
+ AND DIGEST_TEXT NOT LIKE '%CONCAT_WS%'
68
+ ORDER BY frequency DESC;""",
69
+ 'parameters': ['target_database', 'pattern_analysis_days'],
70
+ },
71
+ 'table_analysis': {
72
+ 'name': 'Table Structure Analysis',
73
+ 'description': 'Returns table statistics including row counts, data size in MB, index size in MB, column counts, foreign key counts, and creation/modification timestamps',
74
+ 'sql': """SELECT
75
+ TABLE_NAME,
76
+ TABLE_ROWS,
77
+ -- Storage sizes in MB
78
+ ROUND(DATA_LENGTH/1024/1024, 2) as datamb,
79
+ ROUND(INDEX_LENGTH/1024/1024, 2) as indexmb,
80
+ -- Count columns in this table
81
+ (SELECT COUNT(*) FROM information_schema.COLUMNS c
82
+ WHERE c.TABLE_SCHEMA = t.TABLE_SCHEMA AND c.TABLE_NAME = t.TABLE_NAME) as columncount,
83
+ -- Count foreign keys in this table
84
+ (SELECT COUNT(*) FROM information_schema.KEY_COLUMN_USAGE k
85
+ WHERE k.TABLE_SCHEMA = t.TABLE_SCHEMA AND k.TABLE_NAME = t.TABLE_NAME
86
+ AND k.REFERENCED_TABLE_NAME IS NOT NULL) as fkcount,
87
+ CREATE_TIME,
88
+ UPDATE_TIME
89
+ FROM information_schema.TABLES t
90
+ WHERE TABLE_SCHEMA = '{target_database}'
91
+ ORDER BY TABLE_ROWS DESC;""",
92
+ 'parameters': ['target_database'],
93
+ },
94
+ 'column_analysis': {
95
+ 'name': 'Column Information Analysis',
96
+ 'description': 'Returns all column definitions including table name, column name, data type, nullability, key type, default value, and extra attributes',
97
+ 'sql': """SELECT
98
+ TABLE_NAME,
99
+ COLUMN_NAME,
100
+ COLUMN_TYPE,
101
+ IS_NULLABLE,
102
+ COLUMN_KEY,
103
+ COLUMN_DEFAULT,
104
+ EXTRA
105
+ FROM information_schema.COLUMNS
106
+ WHERE TABLE_SCHEMA = '{target_database}'
107
+ ORDER BY TABLE_NAME, ORDINAL_POSITION;""",
108
+ 'parameters': ['target_database'],
109
+ },
110
+ 'index_analysis': {
111
+ 'name': 'Index Statistics Analysis',
112
+ 'description': 'Returns index structure details including table name, index name, column name, uniqueness flag, and column position within each index',
113
+ 'sql': """SELECT
114
+ TABLE_NAME,
115
+ INDEX_NAME,
116
+ COLUMN_NAME,
117
+ NON_UNIQUE,
118
+ SEQ_IN_INDEX
119
+ FROM information_schema.STATISTICS
120
+ WHERE TABLE_SCHEMA = '{target_database}'
121
+ ORDER BY TABLE_NAME, INDEX_NAME, SEQ_IN_INDEX;""",
122
+ 'parameters': ['target_database'],
123
+ },
124
+ 'foreign_key_analysis': {
125
+ 'name': 'Foreign Key Relationship Analysis',
126
+ 'description': 'Returns foreign key relationships including constraint names, child/parent table and column mappings, referential action rules, and estimated relationship cardinality',
127
+ 'sql': """SELECT
128
+ kcu.CONSTRAINT_NAME,
129
+ kcu.TABLE_NAME as child_table,
130
+ kcu.COLUMN_NAME as child_column,
131
+ kcu.REFERENCED_TABLE_NAME as parent_table,
132
+ kcu.REFERENCED_COLUMN_NAME as parent_column,
133
+ rc.UPDATE_RULE,
134
+ rc.DELETE_RULE,
135
+ -- Estimate relationship cardinality based on unique constraints
136
+ CASE
137
+ WHEN EXISTS (
138
+ SELECT 1 FROM information_schema.STATISTICS s
139
+ WHERE s.TABLE_SCHEMA = '{target_database}'
140
+ AND s.TABLE_NAME = kcu.TABLE_NAME
141
+ AND s.COLUMN_NAME = kcu.COLUMN_NAME
142
+ AND s.NON_UNIQUE = 0 -- Unique constraint exists
143
+ AND (SELECT COUNT(*) FROM information_schema.KEY_COLUMN_USAGE kcu2
144
+ WHERE kcu2.CONSTRAINT_NAME = s.INDEX_NAME
145
+ AND kcu2.TABLE_SCHEMA = s.TABLE_SCHEMA) = 1 -- Single column constraint
146
+ ) THEN '1:1 or 1:0..1'
147
+ ELSE '1:Many'
148
+ END as estimated_cardinality
149
+ FROM information_schema.KEY_COLUMN_USAGE kcu
150
+ LEFT JOIN information_schema.REFERENTIAL_CONSTRAINTS rc
151
+ ON kcu.CONSTRAINT_NAME = rc.CONSTRAINT_NAME
152
+ AND kcu.CONSTRAINT_SCHEMA = rc.CONSTRAINT_SCHEMA
153
+ WHERE kcu.TABLE_SCHEMA = '{target_database}'
154
+ AND kcu.REFERENCED_TABLE_NAME IS NOT NULL -- Only foreign key constraints
155
+ ORDER BY kcu.TABLE_NAME, kcu.COLUMN_NAME;""",
156
+ 'parameters': ['target_database'],
157
+ },
158
+ 'database_objects': {
159
+ 'name': 'Database Objects Summary',
160
+ 'description': 'Returns object counts and concatenated names grouped by object type: tables, triggers, stored procedures, and functions',
161
+ 'sql': """SELECT
162
+ 'Tables' as object_type,
163
+ COUNT(*) as count,
164
+ GROUP_CONCAT(TABLE_NAME) as names
165
+ FROM information_schema.TABLES
166
+ WHERE TABLE_SCHEMA = '{target_database}'
167
+ UNION ALL
168
+ SELECT
169
+ 'Triggers' as object_type,
170
+ COUNT(*) as count,
171
+ COALESCE(GROUP_CONCAT(TRIGGER_NAME), 'None') as names
172
+ FROM information_schema.TRIGGERS
173
+ WHERE TRIGGER_SCHEMA = '{target_database}'
174
+ UNION ALL
175
+ SELECT
176
+ 'Stored Procedures' as object_type,
177
+ COUNT(*) as count,
178
+ COALESCE(GROUP_CONCAT(ROUTINE_NAME), 'None') as names
179
+ FROM information_schema.ROUTINES
180
+ WHERE ROUTINE_SCHEMA = '{target_database}'
181
+ AND ROUTINE_TYPE = 'PROCEDURE'
182
+ UNION ALL
183
+ SELECT
184
+ 'Functions' as object_type,
185
+ COUNT(*) as count,
186
+ COALESCE(GROUP_CONCAT(ROUTINE_NAME), 'None') as names
187
+ FROM information_schema.ROUTINES
188
+ WHERE ROUTINE_SCHEMA = '{target_database}'
189
+ AND ROUTINE_TYPE = 'FUNCTION';""",
190
+ 'parameters': ['target_database'],
191
+ },
192
+ }
193
+
194
+
195
+ def get_query_resource(query_name: str, max_query_results: int, **params) -> Dict[str, Any]:
196
+ """Get a SQL query resource with parameters substituted."""
197
+ if query_name not in mysql_analysis_queries:
198
+ raise ValueError(f"Query '{query_name}' not found")
199
+
200
+ query_info = mysql_analysis_queries[query_name].copy()
201
+
202
+ # Substitute parameters in SQL
203
+ if params:
204
+ query_info['sql'] = query_info['sql'].format(**params)
205
+
206
+ # Apply LIMIT to all queries
207
+ sql = query_info['sql'].rstrip(';')
208
+ query_info['sql'] = f'{sql} LIMIT {max_query_results};'
209
+
210
+ return query_info
@@ -0,0 +1,383 @@
1
+ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """Database analyzer classes for source database analysis."""
16
+
17
+ import json
18
+ import os
19
+ from awslabs.dynamodb_mcp_server.database_analysis_queries import get_query_resource
20
+ from awslabs.mysql_mcp_server.server import DBConnection, DummyCtx
21
+ from awslabs.mysql_mcp_server.server import run_query as mysql_query
22
+ from datetime import datetime
23
+ from loguru import logger
24
+ from typing import Any, Dict, List, Tuple
25
+
26
+
27
+ DEFAULT_ANALYSIS_DAYS = 30
28
+ DEFAULT_MAX_QUERY_RESULTS = 500
29
+ SECONDS_PER_DAY = 86400
30
+ DDL_PREFIXES = ('CREATE ', 'DROP ', 'ALTER ', 'TRUNCATE ')
31
+
32
+
33
+ class DatabaseAnalyzer:
34
+ """Base class for database analyzers."""
35
+
36
+ @staticmethod
37
+ def build_connection_params(source_db_type: str, **kwargs) -> Dict[str, Any]:
38
+ """Build connection parameters for database analysis.
39
+
40
+ Args:
41
+ source_db_type: Type of source database (e.g., 'mysql')
42
+ **kwargs: Connection parameters (aws_cluster_arn, aws_secret_arn, etc.)
43
+
44
+ Returns:
45
+ Dictionary of connection parameters
46
+
47
+ Raises:
48
+ ValueError: If database type is not supported
49
+ """
50
+ if source_db_type == 'mysql':
51
+ user_provided_dir = kwargs.get('output_dir')
52
+
53
+ # Validate user-provided directory
54
+ if not os.path.isabs(user_provided_dir):
55
+ raise ValueError(f'Output directory must be an absolute path: {user_provided_dir}')
56
+ if not os.path.isdir(user_provided_dir) or not os.access(user_provided_dir, os.W_OK):
57
+ raise ValueError(
58
+ f'Output directory does not exist or is not writable: {user_provided_dir}'
59
+ )
60
+ output_dir = user_provided_dir
61
+
62
+ return {
63
+ 'cluster_arn': kwargs.get('aws_cluster_arn') or os.getenv('MYSQL_CLUSTER_ARN'),
64
+ 'secret_arn': kwargs.get('aws_secret_arn') or os.getenv('MYSQL_SECRET_ARN'),
65
+ 'database': kwargs.get('database_name') or os.getenv('MYSQL_DATABASE'),
66
+ 'region': kwargs.get('aws_region') or os.getenv('AWS_REGION'),
67
+ 'max_results': kwargs.get('max_query_results')
68
+ or int(os.getenv('MYSQL_MAX_QUERY_RESULTS', str(DEFAULT_MAX_QUERY_RESULTS))),
69
+ 'pattern_analysis_days': kwargs.get(
70
+ 'pattern_analysis_days', DEFAULT_ANALYSIS_DAYS
71
+ ),
72
+ 'output_dir': output_dir,
73
+ }
74
+ raise ValueError(f'Unsupported database type: {source_db_type}')
75
+
76
+ @staticmethod
77
+ def validate_connection_params(
78
+ source_db_type: str, connection_params: Dict[str, Any]
79
+ ) -> Tuple[List[str], Dict[str, str]]:
80
+ """Validate connection parameters for database type.
81
+
82
+ Args:
83
+ source_db_type: Type of source database
84
+ connection_params: Dictionary of connection parameters
85
+
86
+ Returns:
87
+ Tuple of (missing_params, param_descriptions)
88
+ """
89
+ if source_db_type == 'mysql':
90
+ required_params = ['cluster_arn', 'secret_arn', 'database', 'region']
91
+ missing_params = [
92
+ param
93
+ for param in required_params
94
+ if not connection_params.get(param)
95
+ or (
96
+ isinstance(connection_params[param], str)
97
+ and connection_params[param].strip() == ''
98
+ )
99
+ ]
100
+
101
+ param_descriptions = {
102
+ 'cluster_arn': 'AWS cluster ARN',
103
+ 'secret_arn': 'AWS secret ARN',
104
+ 'database': 'Database name',
105
+ 'region': 'AWS region',
106
+ }
107
+ return missing_params, param_descriptions
108
+ return [], {}
109
+
110
+ @staticmethod
111
+ def save_analysis_files(
112
+ results: Dict[str, Any],
113
+ source_db_type: str,
114
+ database: str,
115
+ pattern_analysis_days: int,
116
+ max_results: int,
117
+ output_dir: str,
118
+ ) -> Tuple[List[str], List[str]]:
119
+ """Save analysis results to JSON files.
120
+
121
+ Args:
122
+ results: Dictionary of query results
123
+ source_db_type: Type of source database
124
+ database: Database name
125
+ pattern_analysis_days: Number of days to analyze the logs for pattern analysis query
126
+ max_results: Maximum results per query
127
+ output_dir: Absolute directory path where the timestamped output analysis folder will be created
128
+
129
+ Returns:
130
+ Tuple of (saved_files, save_errors)
131
+ """
132
+ saved_files = []
133
+ save_errors = []
134
+
135
+ logger.info(f'save_analysis_files called with {len(results) if results else 0} results')
136
+
137
+ if not results:
138
+ logger.warning('No results to save - returning empty lists')
139
+ return saved_files, save_errors
140
+
141
+ timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
142
+ analysis_folder = os.path.join(output_dir, f'database_analysis_{timestamp}')
143
+ logger.info(f'Creating analysis folder: {analysis_folder}')
144
+
145
+ try:
146
+ os.makedirs(analysis_folder, exist_ok=True)
147
+ logger.info(f'Created folder at: {analysis_folder}')
148
+ except OSError as e:
149
+ logger.error(f'Failed to create analysis folder: {str(e)}')
150
+ save_errors.append(f'Failed to create folder {analysis_folder}: {str(e)}')
151
+ return saved_files, save_errors
152
+
153
+ for query_name, query_result in results.items():
154
+ filename = os.path.join(analysis_folder, f'{query_name}_results.json')
155
+
156
+ analysis_data = query_result['data']
157
+ if query_name == 'query_pattern_analysis':
158
+ analysis_data = DatabaseAnalyzer.filter_pattern_data(
159
+ analysis_data, pattern_analysis_days
160
+ )
161
+
162
+ try:
163
+ with open(filename, 'w') as f:
164
+ json.dump(
165
+ {
166
+ 'query_name': query_name,
167
+ 'description': query_result['description'],
168
+ 'source_db_type': source_db_type,
169
+ 'database': database,
170
+ 'pattern_analysis_days': pattern_analysis_days,
171
+ 'max_query_results': max_results,
172
+ 'data': analysis_data,
173
+ },
174
+ f,
175
+ indent=2,
176
+ default=str,
177
+ )
178
+ saved_files.append(filename)
179
+ logger.info(f'Saved {query_name} results to {filename}')
180
+ except Exception as e:
181
+ logger.error(f'Failed to save {query_name}: {str(e)}')
182
+ save_errors.append(f'Failed to save {query_name}: {str(e)}')
183
+
184
+ return saved_files, save_errors
185
+
186
+ @staticmethod
187
+ def filter_pattern_data(
188
+ data: List[Dict[str, Any]], pattern_analysis_days: int
189
+ ) -> List[Dict[str, Any]]:
190
+ """Filter pattern analysis data to exclude DDL statements and add RPS calculations.
191
+
192
+ Args:
193
+ data: List of query pattern dictionaries
194
+ pattern_analysis_days: Number of days in analysis period
195
+
196
+ Returns:
197
+ Filtered list with calculated RPS added to each pattern
198
+ """
199
+ if not data:
200
+ return data
201
+
202
+ total_seconds = (pattern_analysis_days or DEFAULT_ANALYSIS_DAYS) * SECONDS_PER_DAY
203
+ filtered_patterns = []
204
+
205
+ for pattern in data:
206
+ digest = pattern.get('DIGEST_TEXT', '')
207
+ # Skip DDL statements
208
+ if not any(digest.upper().startswith(prefix) for prefix in DDL_PREFIXES):
209
+ pattern_with_rps = pattern.copy()
210
+ count = pattern.get('COUNT_STAR', 0)
211
+ pattern_with_rps['calculated_rps'] = (
212
+ round(count / total_seconds, 6) if total_seconds > 0 else 0
213
+ )
214
+ filtered_patterns.append(pattern_with_rps)
215
+
216
+ return filtered_patterns
217
+
218
+
219
+ class MySQLAnalyzer(DatabaseAnalyzer):
220
+ """MySQL-specific database analyzer."""
221
+
222
+ SCHEMA_QUERIES = [
223
+ 'table_analysis',
224
+ 'column_analysis',
225
+ 'foreign_key_analysis',
226
+ 'index_analysis',
227
+ ]
228
+ ACCESS_PATTERN_QUERIES = ['performance_schema_check', 'query_pattern_analysis']
229
+
230
+ @staticmethod
231
+ def is_performance_schema_enabled(result):
232
+ """Check if MySQL performance schema is enabled from query result."""
233
+ if result and len(result) > 0:
234
+ performance_schema_value = str(
235
+ result[0].get('', '0')
236
+ ) # Key is empty string by mysql package design, so checking only value here
237
+ return performance_schema_value == '1'
238
+ return False
239
+
240
+ def __init__(self, connection_params):
241
+ """Initialize MySQL analyzer with connection parameters."""
242
+ self.cluster_arn = connection_params['cluster_arn']
243
+ self.secret_arn = connection_params['secret_arn']
244
+ self.database = connection_params['database']
245
+ self.region = connection_params['region']
246
+ self.max_results = connection_params['max_results']
247
+ self.pattern_analysis_days = connection_params['pattern_analysis_days']
248
+
249
+ async def _run_query(self, sql, query_parameters=None):
250
+ """Internal method to run SQL queries against MySQL database."""
251
+ try:
252
+ # Create a new connection with current parameters
253
+ db_connection = DBConnection(
254
+ self.cluster_arn, self.secret_arn, self.database, self.region, True
255
+ )
256
+ # Pass connection parameter directly to mysql_query
257
+ result = await mysql_query(sql, DummyCtx(), db_connection, query_parameters)
258
+ return result
259
+ except Exception as e:
260
+ logger.error(f'MySQL query execution failed - {type(e).__name__}: {str(e)}')
261
+ return [{'error': f'MySQL query failed: {str(e)}'}]
262
+
263
+ async def execute_query_batch(
264
+ self, query_names: List[str], pattern_analysis_days: int = None
265
+ ) -> Tuple[Dict[str, Any], List[str]]:
266
+ """Execute a batch of analysis queries.
267
+
268
+ Args:
269
+ query_names: List of query names to execute
270
+ pattern_analysis_days: Optional analysis period for pattern queries
271
+
272
+ Returns:
273
+ Tuple of (results_dict, errors_list)
274
+ """
275
+ results = {}
276
+ errors = []
277
+
278
+ for query_name in query_names:
279
+ try:
280
+ # Get query with appropriate parameters
281
+ if query_name == 'query_pattern_analysis' and pattern_analysis_days:
282
+ query = get_query_resource(
283
+ query_name,
284
+ max_query_results=self.max_results,
285
+ target_database=self.database,
286
+ pattern_analysis_days=pattern_analysis_days,
287
+ )
288
+ else:
289
+ query = get_query_resource(
290
+ query_name,
291
+ max_query_results=self.max_results,
292
+ target_database=self.database,
293
+ )
294
+
295
+ result = await self._run_query(query['sql'])
296
+
297
+ if result and isinstance(result, list) and len(result) > 0:
298
+ if 'error' in result[0]:
299
+ errors.append(f'{query_name}: {result[0]["error"]}')
300
+ else:
301
+ results[query_name] = {
302
+ 'description': query['description'],
303
+ 'data': result,
304
+ }
305
+ else:
306
+ # Handle empty results
307
+ results[query_name] = {
308
+ 'description': query['description'],
309
+ 'data': [],
310
+ }
311
+
312
+ except Exception as e:
313
+ errors.append(f'{query_name}: {str(e)}')
314
+
315
+ return results, errors
316
+
317
+ @classmethod
318
+ async def analyze(cls, connection_params: Dict[str, Any]) -> Dict[str, Any]:
319
+ """Execute MySQL-specific analysis workflow.
320
+
321
+ Args:
322
+ connection_params: Dictionary of connection parameters
323
+
324
+ Returns:
325
+ Dictionary containing results, errors, and performance schema status
326
+ """
327
+ analyzer = cls(connection_params)
328
+
329
+ # Execute schema analysis
330
+ schema_results, schema_errors = await analyzer.execute_query_batch(cls.SCHEMA_QUERIES)
331
+
332
+ # Execute performance schema check
333
+ (
334
+ performance_schema_check_results,
335
+ performance_schema_check_errors,
336
+ ) = await analyzer.execute_query_batch(['performance_schema_check'])
337
+
338
+ performance_enabled = False
339
+ all_results = {**schema_results}
340
+ all_errors = schema_errors + performance_schema_check_errors
341
+
342
+ # Check performance schema status and run pattern analysis if enabled
343
+ if 'performance_schema_check' in performance_schema_check_results:
344
+ performance_enabled = cls.is_performance_schema_enabled(
345
+ performance_schema_check_results['performance_schema_check']['data']
346
+ )
347
+
348
+ if performance_enabled:
349
+ pattern_results, pattern_errors = await analyzer.execute_query_batch(
350
+ ['query_pattern_analysis'], analyzer.pattern_analysis_days
351
+ )
352
+ all_results.update(pattern_results)
353
+ all_errors.extend(pattern_errors)
354
+ if not performance_enabled:
355
+ all_errors.append('Performance Schema disabled - skipping query_pattern_analysis')
356
+
357
+ return {
358
+ 'results': all_results,
359
+ 'errors': all_errors,
360
+ 'performance_enabled': performance_enabled,
361
+ 'performance_feature': 'Performance Schema',
362
+ }
363
+
364
+
365
+ class DatabaseAnalyzerRegistry:
366
+ """Registry for database-specific analyzers."""
367
+
368
+ _analyzers = {
369
+ 'mysql': MySQLAnalyzer,
370
+ }
371
+
372
+ @classmethod
373
+ def get_analyzer(cls, source_db_type: str):
374
+ """Get the appropriate analyzer class for the database type."""
375
+ analyzer = cls._analyzers.get(source_db_type.lower())
376
+ if not analyzer:
377
+ raise ValueError(f'Unsupported database type: {source_db_type}')
378
+ return analyzer
379
+
380
+ @classmethod
381
+ def get_supported_types(cls) -> List[str]:
382
+ """Get list of supported database types."""
383
+ return list(cls._analyzers.keys())
@@ -9,6 +9,19 @@ You are an AI pair programming with a USER. Your goal is to help the USER create
9
9
 
10
10
  🔴 **CRITICAL**: You MUST limit the number of questions you ask at any given time, try to limit it to one question, or AT MOST: three related questions.
11
11
 
12
+ ## Initial Assessment for Requirement Gathering
13
+
14
+ **If user provides specific context, respond accordingly. Otherwise, present these options:**
15
+ "How would you like to gather requirements for your DynamoDB model?
16
+
17
+ **Natural Language Requirement gathering** - We'll gather requirements through Q&A (for new or existing applications)
18
+ **Existing Database Analysis** - I can analyze your database to discover schema and patterns automatically using the `source_db_analyzer` tool
19
+
20
+ Which approach would you prefer?"
21
+
22
+ 🔴 **CRITICAL DATABASE ANALYSIS WORKFLOW**:
23
+ After running `source_db_analyzer`, you MUST IMMEDIATELY read ALL JSON files from the timestamped analysis directory (database_analysis_YYYYMMDD_HHMMSS) and proceed with DynamoDB Data Modeling using the complete analysis.
24
+
12
25
  ## Documentation Workflow
13
26
 
14
27
  🔴 CRITICAL FILE MANAGEMENT:
@@ -14,9 +14,17 @@
14
14
 
15
15
  #!/usr/bin/env python3
16
16
 
17
+
17
18
  from awslabs.dynamodb_mcp_server.common import handle_exceptions
19
+ from awslabs.dynamodb_mcp_server.database_analyzers import (
20
+ DatabaseAnalyzer,
21
+ DatabaseAnalyzerRegistry,
22
+ )
23
+ from loguru import logger
18
24
  from mcp.server.fastmcp import FastMCP
19
25
  from pathlib import Path
26
+ from pydantic import Field
27
+ from typing import Optional
20
28
 
21
29
 
22
30
  # Define server instructions and dependencies
@@ -29,11 +37,18 @@ Option 1(RECOMMENDED): AWS API MCP Server
29
37
  Migration guide: https://github.com/awslabs/mcp/tree/main/src/aws-api-mcp-server
30
38
  Option 2(NOT RECOMMENDED): Legacy version 1.0.9
31
39
 
32
- Available Tool:
40
+ Available Tools:
33
41
  --------------
34
42
  Use the `dynamodb_data_modeling` tool to access enterprise-level DynamoDB design expertise.
35
43
  This tool provides systematic methodology for creating production-ready multi-table design with
36
44
  advanced optimizations, cost analysis, and integration patterns.
45
+
46
+ Use the `source_db_analyzer` tool to analyze existing MySQL/Aurora databases for DynamoDB Data Modeling:
47
+ - Extracts schema structure (tables, columns, indexes, foreign keys)
48
+ - Captures access patterns from Performance Schema (query patterns, RPS, frequencies)
49
+ - Generates timestamped analysis files (JSON format) for use with dynamodb_data_modeling
50
+ - Requires AWS RDS Data API and credentials in Secrets Manager
51
+ - Safe for production use (read-only analysis)
37
52
  """
38
53
 
39
54
 
@@ -72,6 +87,156 @@ async def dynamodb_data_modeling() -> str:
72
87
  return architect_prompt
73
88
 
74
89
 
90
+ @app.tool()
91
+ @handle_exceptions
92
+ async def source_db_analyzer(
93
+ source_db_type: str = Field(description="Supported Source Database type: 'mysql'"),
94
+ database_name: Optional[str] = Field(
95
+ default=None, description='Database name to analyze (overrides MYSQL_DATABASE env var)'
96
+ ),
97
+ pattern_analysis_days: Optional[int] = Field(
98
+ default=30,
99
+ description='Number of days to analyze the logs for pattern analysis query',
100
+ ge=1,
101
+ ),
102
+ max_query_results: Optional[int] = Field(
103
+ default=None,
104
+ description='Maximum number of rows to include in analysis output files for schema and query log data (overrides MYSQL_MAX_QUERY_RESULTS env var)',
105
+ ge=1,
106
+ ),
107
+ aws_cluster_arn: Optional[str] = Field(
108
+ default=None, description='AWS cluster ARN (overrides MYSQL_CLUSTER_ARN env var)'
109
+ ),
110
+ aws_secret_arn: Optional[str] = Field(
111
+ default=None, description='AWS secret ARN (overrides MYSQL_SECRET_ARN env var)'
112
+ ),
113
+ aws_region: Optional[str] = Field(
114
+ default=None, description='AWS region (overrides AWS_REGION env var)'
115
+ ),
116
+ output_dir: str = Field(
117
+ description='Absolute directory path where the timestamped output analysis folder will be created.'
118
+ ),
119
+ ) -> str:
120
+ """Analyzes your source database to extract schema and access patterns for DynamoDB Data Modeling.
121
+
122
+ This tool connects to your existing relational database, examines your current database structure and query
123
+ patterns to help you design an optimal DynamoDB data model.
124
+
125
+ Output & Next Steps:
126
+ - Creates timestamped folder (database_analysis_YYYYMMDD_HHMMSS) with 4-5 JSON files:
127
+ * table_analysis_results.json - Table-level statistics
128
+ * column_analysis_results.json - Column definitions for all tables
129
+ * index_analysis_results.json - Index structures and compositions
130
+ * foreign_key_analysis_results.json - Relationship mappings
131
+ * query_pattern_analysis_results.json - Query patterns (only if Performance Schema enabled)
132
+ - Each file contains query results with metadata (database name, analysis period, descriptions)
133
+ - Use these files with the dynamodb_data_modeling tool to design your DynamoDB schema
134
+ - Analysis is read-only
135
+
136
+ Connection Requirements (MySQL/Aurora):
137
+ - AWS RDS Data API enabled on your Aurora MySQL cluster
138
+ - Database credentials stored in AWS Secrets Manager
139
+ - Appropriate IAM permissions to access RDS Data API and Secrets Manager
140
+ - For complete analysis: MySQL Performance Schema must be enabled (set performance_schema=ON)
141
+ - Without Performance Schema: Schema-only analysis is performed (no query pattern data)
142
+
143
+ Environment Variables (Optional):
144
+ You can set these instead of passing parameters:
145
+ - MYSQL_DATABASE: Database name to analyze
146
+ - MYSQL_CLUSTER_ARN: Aurora cluster ARN
147
+ - MYSQL_SECRET_ARN: Secrets Manager secret ARN containing DB credentials
148
+ - AWS_REGION: AWS region where your database is located
149
+ - MYSQL_MAX_QUERY_RESULTS: Maximum rows per query (default: 500)
150
+
151
+ Typical Usage:
152
+ 1. Run this tool against your source database
153
+ 2. Review the generated analysis files to understand your current schema and patterns
154
+ 3. Use dynamodb_data_modeling tool with these files to design your DynamoDB tables
155
+ 4. The analysis helps identify entity relationships, access patterns, and optimization opportunities
156
+
157
+ Returns: Analysis summary with saved file locations, query statistics, and next steps.
158
+ """
159
+ try:
160
+ analyzer_class = DatabaseAnalyzerRegistry.get_analyzer(source_db_type)
161
+ except ValueError as e:
162
+ supported_types = DatabaseAnalyzerRegistry.get_supported_types()
163
+ return f'{str(e)}. Supported types: {supported_types}'
164
+
165
+ # Build connection parameters based on database type
166
+ connection_params = DatabaseAnalyzer.build_connection_params(
167
+ source_db_type,
168
+ database_name=database_name,
169
+ pattern_analysis_days=pattern_analysis_days,
170
+ max_query_results=max_query_results,
171
+ aws_cluster_arn=aws_cluster_arn,
172
+ aws_secret_arn=aws_secret_arn,
173
+ aws_region=aws_region,
174
+ output_dir=output_dir,
175
+ )
176
+
177
+ # Validate parameters based on database type
178
+ missing_params, param_descriptions = DatabaseAnalyzer.validate_connection_params(
179
+ source_db_type, connection_params
180
+ )
181
+ if missing_params:
182
+ missing_descriptions = [param_descriptions[param] for param in missing_params]
183
+ return (
184
+ f'To analyze your {source_db_type} database, I need: {", ".join(missing_descriptions)}'
185
+ )
186
+
187
+ logger.info(
188
+ f'Starting database analysis for {source_db_type} database: {connection_params.get("database")}'
189
+ )
190
+
191
+ try:
192
+ analysis_result = await analyzer_class.analyze(connection_params)
193
+
194
+ # Save results to files
195
+ saved_files, save_errors = DatabaseAnalyzer.save_analysis_files(
196
+ analysis_result['results'],
197
+ source_db_type,
198
+ connection_params.get('database'),
199
+ connection_params.get('pattern_analysis_days'),
200
+ connection_params.get('max_results'),
201
+ connection_params.get('output_dir'),
202
+ )
203
+
204
+ # Generate report
205
+ logger.info('Generating analysis report')
206
+ if analysis_result['results']:
207
+ report = f"""Database Analysis Complete
208
+
209
+ Summary:
210
+ - Database: {connection_params.get('database')}
211
+ - Analysis Period: {connection_params.get('pattern_analysis_days')} days
212
+ - {analysis_result['performance_feature']}: {'Enabled' if analysis_result['performance_enabled'] else 'Disabled'}"""
213
+
214
+ if saved_files:
215
+ report += f'\n\nSaved Files:\n{chr(10).join(f"- {f}" for f in saved_files)}'
216
+
217
+ if save_errors:
218
+ report += f'\n\nFile Save Errors:\n{chr(10).join(f"- {e}" for e in save_errors)}'
219
+
220
+ if analysis_result['errors']:
221
+ report += f'\n\nQuery Errors ({len(analysis_result["errors"])}):\n' + '\n'.join(
222
+ f'{i}. {error}' for i, error in enumerate(analysis_result['errors'], 1)
223
+ )
224
+
225
+ else:
226
+ report = (
227
+ f'Database Analysis Failed\n\nAll {len(analysis_result["errors"])} queries failed:\n'
228
+ + '\n'.join(
229
+ f'{i}. {error}' for i, error in enumerate(analysis_result['errors'], 1)
230
+ )
231
+ )
232
+
233
+ return report
234
+
235
+ except Exception as e:
236
+ logger.error(f'Analysis failed with exception: {str(e)}')
237
+ return f'Analysis failed: {str(e)}'
238
+
239
+
75
240
  def main():
76
241
  """Main entry point for the MCP server application."""
77
242
  app.run()
@@ -0,0 +1,256 @@
1
+ Metadata-Version: 2.4
2
+ Name: awslabs.dynamodb-mcp-server
3
+ Version: 2.0.2
4
+ Summary: The official MCP Server for interacting with AWS DynamoDB
5
+ Project-URL: homepage, https://awslabs.github.io/mcp/
6
+ Project-URL: docs, https://awslabs.github.io/mcp/servers/dynamodb-mcp-server/
7
+ Project-URL: documentation, https://awslabs.github.io/mcp/servers/dynamodb-mcp-server/
8
+ Project-URL: repository, https://github.com/awslabs/mcp.git
9
+ Project-URL: changelog, https://github.com/awslabs/mcp/blob/main/src/dynamodb-mcp-server/CHANGELOG.md
10
+ Author: Amazon Web Services
11
+ Author-email: AWSLabs MCP <203918161+awslabs-mcp@users.noreply.github.com>, Erben Mo <moerben@amazon.com>
12
+ License: Apache-2.0
13
+ License-File: LICENSE
14
+ License-File: NOTICE
15
+ Classifier: License :: OSI Approved :: Apache Software License
16
+ Classifier: Operating System :: OS Independent
17
+ Classifier: Programming Language :: Python
18
+ Classifier: Programming Language :: Python :: 3
19
+ Classifier: Programming Language :: Python :: 3.10
20
+ Classifier: Programming Language :: Python :: 3.11
21
+ Classifier: Programming Language :: Python :: 3.12
22
+ Classifier: Programming Language :: Python :: 3.13
23
+ Requires-Python: >=3.10
24
+ Requires-Dist: awslabs-mysql-mcp-server==1.0.5
25
+ Requires-Dist: boto3==1.40.5
26
+ Requires-Dist: dspy-ai>=2.6.27
27
+ Requires-Dist: loguru==0.7.3
28
+ Requires-Dist: mcp[cli]==1.12.4
29
+ Requires-Dist: pydantic==2.11.7
30
+ Requires-Dist: strands-agents>=1.5.0
31
+ Requires-Dist: typing-extensions==4.14.1
32
+ Description-Content-Type: text/markdown
33
+
34
+ # AWS DynamoDB MCP Server
35
+
36
+ The official developer experience MCP Server for Amazon DynamoDB. This server provides DynamoDB expert design guidance and data modeling assistance.
37
+
38
+ ## Available MCP Tools
39
+
40
+ Right now the DynamoDB MCP server contains two tools that support data modeling tasks. You can design a data model in natural language by using only the `dynamodb_data_modeling` tool or you can analyze your MySQL database and convert the analysis into a DynamoDB data model by using the `source_db_analyzer` tool.
41
+
42
+ ### Design & Modeling
43
+
44
+ * `dynamodb_data_modeling` - Retrieves the complete DynamoDB Data Modeling Expert prompt
45
+ * `source_db_analyzer` - Executes predefined SQL queries against source databases to analyze schema and access patterns
46
+
47
+ ## Migration Notice
48
+
49
+ Starting with version 2.0.0, this server focuses exclusively on DynamoDB design and modeling guidance. All operational DynamoDB management tools (table operations, item operations, queries, backups, etc.) have been removed in favor of the [AWS API MCP Server](https://github.com/awslabs/mcp/tree/main/src/aws-api-mcp-server) which provides the same capability and more.
50
+
51
+ **This server does not do:**
52
+
53
+ - ❌ Operational DynamoDB management (CRUD operations)
54
+ - ❌ Table creation or data migration
55
+ - ❌ Direct data queries or transformations
56
+
57
+ ### Recommended: AWS API MCP Server
58
+
59
+ For operational DynamoDB management (retrieving data, managing tables, etc.), use the [AWS API MCP Server](https://github.com/awslabs/mcp/tree/main/src/aws-api-mcp-server) which provides comprehensive DynamoDB operations. [Migration guide available here](https://github.com/awslabs/mcp/tree/main/src/aws-api-mcp-server).
60
+
61
+ ### Not Recommended: Legacy Version
62
+
63
+ If you must use the previous operational tools, you can pin to version 1.0.9, though this is not recommended:
64
+
65
+ ```json
66
+ {
67
+ "mcpServers": {
68
+ "awslabs.dynamodb-mcp-server": {
69
+ "command": "uvx",
70
+ "args": ["awslabs.dynamodb-mcp-server@1.0.9"],
71
+ "env": {
72
+ "DDB-MCP-READONLY": "true",
73
+ "AWS_PROFILE": "default",
74
+ "AWS_REGION": "us-west-2",
75
+ "FASTMCP_LOG_LEVEL": "ERROR"
76
+ },
77
+ "disabled": false,
78
+ "autoApprove": []
79
+ }
80
+ }
81
+ }
82
+ ```
83
+
84
+ ## Instructions
85
+
86
+ To design a data model in natural language you can simply ask your AI agent to “use my DynamoDB MCP to help me design a DynamoDB data model,” or something similar. If you want to analyze your MySQL query patterns then you can follow these additional steps below to setup connectivity and then say something like “analyze my MySQL database and then help me design a DynamoDB data model.”
87
+
88
+ ## Source Database Integration
89
+
90
+ The DynamoDB MCP server includes source database integration for database analysis and the tool `source_db_analyzer` is useful to get the actual source database schema and access patterns which helps to design the model in DynamoDB. We recommend running this tool against a non-production database instance and it currently supports Aurora MySQL with additional database support planned for future releases.
91
+
92
+ ### Prerequisites for MySQL Integration
93
+
94
+ 1. Aurora MySQL Cluster with MySQL username and password stored in AWS Secrets Manager
95
+ 2. Enable RDS Data API for your Aurora MySQL Cluster
96
+ 3. Enable Performance Schema for access pattern analysis (optional):
97
+
98
+ * Go to the parameter group for your DB instance and set performance_schema value to 1. Make sure to reboot the DB instance after the changes whenever you turn the Performance Schema on or off. Follow the [Instructions](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/USER_WorkingWithParamGroups.Modifying.html) to modify DB parameter group in Amazon Aurora.
99
+ * After the parameter values are modified, you can run the "SHOW GLOBAL VARIABLES LIKE'%performance_schema'"; command to view the value of the performance_schema parameter of the database instance, also consider tunning the below parameters if required.
100
+ * `performance_schema_digests_size` [parameter](https://dev.mysql.com/doc/refman/8.0/en/performance-schema-system-variables.html#sysvar_performance_schema_digests_size) - Sets the maximum number of rows stored in the events_statements_summary_by_digest table for querying access pattern. (When you hit this limit, some logs will be lost, potentially missing important access patterns)
101
+ * `performance_schema_max_digest_length` [parameter](https://dev.mysql.com/doc/refman/8.0/en/performance-schema-system-variables.html#sysvar_performance_schema_max_digest_length) - Sets the maximum byte length for each individual statement digest (access pattern) that the Performance Schema stores. (Default is 1024 bytes, Complex queries might not be fully captured when you hit this limit)
102
+ * Without these Performance Schema query access patterns, DynamoDB Data Modeler tool recommends access patterns based on the information schema from the source Database.
103
+
104
+ 1. Set up AWS credentials with access to AWS services:
105
+
106
+ * Configure AWS credentials with `aws configure` or environment variables (AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_SESSION_TOKEN) . The server will automatically use credentials from environment variables or other standard AWS credential sources.
107
+ * AWS profile with permissions to access RDS Data API and AWS Secrets Manager
108
+
109
+ ### MySQL Environment Variables
110
+
111
+ Add these environment variables to DynamoDB MCP Server configuration to enable MySQL integration:
112
+
113
+ * `MYSQL_CLUSTER_ARN`: The Resource ARN of the Aurora MySQL cluster
114
+ * `MYSQL_SECRET_ARN`: The ARN of the secret containing database credentials
115
+ * `MYSQL_DATABASE`: The name of the database to connect to
116
+ * `AWS_REGION`: AWS region of the Aurora MySQL cluster
117
+ * `MYSQL_MAX_QUERY_RESULTS`: Maximum number of rows to include in analysis output files for schema and access_pattern logs (optional, default: "500")
118
+
119
+ ### MCP configuration with MySQL Environment Variables
120
+
121
+ ```json
122
+ {
123
+ "mcpServers": {
124
+ "awslabs.dynamodb-mcp-server": {
125
+ "command": "uvx",
126
+ "args": ["awslabs.dynamodb-mcp-server@latest"],
127
+ "env": {
128
+ "DDB-MCP-READONLY": "true",
129
+ "AWS_PROFILE": "default",
130
+ "AWS_REGION": "us-west-2",
131
+ "FASTMCP_LOG_LEVEL": "ERROR",
132
+ "MYSQL_CLUSTER_ARN":"arn:aws:rds:$REGION:$ACCOUNT_ID:cluster:$CLUSTER_NAME",
133
+ "MYSQL_SECRET_ARN":"arn:aws:secretsmanager:$REGION:$ACCOUNT_ID:secret:$SECRET_NAME",
134
+ "MYSQL_DATABASE":"<DATABASE_NAME>",
135
+ "MYSQL_MAX_QUERY_RESULTS": 500
136
+ },
137
+ "disabled": false,
138
+ "autoApprove": []
139
+ }
140
+ }
141
+ }
142
+ ```
143
+
144
+ ## Prerequisites
145
+
146
+ 1. Install `uv` from [Astral](https://docs.astral.sh/uv/getting-started/installation/) or the [GitHub README](https://github.com/astral-sh/uv#installation)
147
+ 2. Install Python using `uv python install 3.10`
148
+ 3. Set up AWS credentials with access to AWS services
149
+
150
+ * Consider setting up Read-only permission if you don't want the LLM to modify any resources
151
+
152
+ ## Installation
153
+
154
+ | Cursor | VS Code |
155
+ |:------:|:-------:|
156
+ | [![Install MCP Server](https://cursor.com/deeplink/mcp-install-light.svg)](https://cursor.com/en/install-mcp?name=awslabs.dynamodb-mcp-server&config=JTdCJTIyY29tbWFuZCUyMiUzQSUyMnV2eCUyMGF3c2xhYnMuZHluYW1vZGItbWNwLXNlcnZlciU0MGxhdGVzdCUyMiUyQyUyMmVudiUyMiUzQSU3QiUyMkREQi1NQ1AtUkVBRE9OTFklMjIlM0ElMjJ0cnVlJTIyJTJDJTIyQVdTX1BST0ZJTEUlMjIlM0ElMjJkZWZhdWx0JTIyJTJDJTIyQVdTX1JFR0lPTiUyMiUzQSUyMnVzLXdlc3QtMiUyMiUyQyUyMkZBU1RNQ1BfTE9HX0xFVkVMJTIyJTNBJTIyRVJST1IlMjIlN0QlMkMlMjJkaXNhYmxlZCUyMiUzQWZhbHNlJTJDJTIyYXV0b0FwcHJvdmUlMjIlM0ElNUIlNUQlN0Q%3D)| [![Install on VS Code](https://img.shields.io/badge/Install_on-VS_Code-FF9900?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=DynamoDB%20MCP%20Server&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22awslabs.dynamodb-mcp-server%40latest%22%5D%2C%22env%22%3A%7B%22DDB-MCP-READONLY%22%3A%22true%22%2C%22AWS_PROFILE%22%3A%22default%22%2C%22AWS_REGION%22%3A%22us-west-2%22%2C%22FASTMCP_LOG_LEVEL%22%3A%22ERROR%22%7D%2C%22disabled%22%3Afalse%2C%22autoApprove%22%3A%5B%5D%7D) |
157
+
158
+ Add the MCP to your favorite agentic tools. (e.g. for Amazon Q Developer CLI MCP, `~/.aws/amazonq/mcp.json`):
159
+
160
+ ```json
161
+ {
162
+ "mcpServers": {
163
+ "awslabs.dynamodb-mcp-server": {
164
+ "command": "uvx",
165
+ "args": ["awslabs.dynamodb-mcp-server@latest"],
166
+ "env": {
167
+ "DDB-MCP-READONLY": "true",
168
+ "AWS_PROFILE": "default",
169
+ "AWS_REGION": "us-west-2",
170
+ "FASTMCP_LOG_LEVEL": "ERROR"
171
+ },
172
+ "disabled": false,
173
+ "autoApprove": []
174
+ }
175
+ }
176
+ }
177
+ ```
178
+
179
+ ### Windows Installation
180
+
181
+ For Windows users, the MCP server configuration format is slightly different:
182
+
183
+ ```json
184
+ {
185
+ "mcpServers": {
186
+ "awslabs.dynamodb-mcp-server": {
187
+ "disabled": false,
188
+ "timeout": 60,
189
+ "type": "stdio",
190
+ "command": "uv",
191
+ "args": [
192
+ "tool",
193
+ "run",
194
+ "--from",
195
+ "awslabs.dynamodb-mcp-server@latest",
196
+ "awslabs.dynamodb-mcp-server.exe"
197
+ ],
198
+ "env": {
199
+ "FASTMCP_LOG_LEVEL": "ERROR",
200
+ "AWS_PROFILE": "your-aws-profile",
201
+ "AWS_REGION": "us-west-2"
202
+ }
203
+ }
204
+ }
205
+ }
206
+ ```
207
+
208
+
209
+ or docker after a successful `docker build -t awslabs/dynamodb-mcp-server .`:
210
+
211
+ ```json
212
+ {
213
+ "mcpServers": {
214
+ "awslabs.dynamodb-mcp-server": {
215
+ "command": "docker",
216
+ "args": [
217
+ "run",
218
+ "--rm",
219
+ "--interactive",
220
+ "--env",
221
+ "FASTMCP_LOG_LEVEL=ERROR",
222
+ "awslabs/dynamodb-mcp-server:latest"
223
+ ],
224
+ "env": {},
225
+ "disabled": false,
226
+ "autoApprove": []
227
+ }
228
+ }
229
+ }
230
+ ```
231
+
232
+ ## Limitations & Considerations
233
+
234
+ ### **Application-Level Patterns:**
235
+
236
+ * Queries generated dynamically in application code
237
+ * Caching layer behavior (Redis, Memcached)
238
+ * Real-time vs. analytics query differentiation
239
+ * Background job access patterns
240
+
241
+ ### Business Context:
242
+
243
+ * Data consistency requirements
244
+ * Compliance and audit requirements
245
+ * Geographic distribution requirements
246
+
247
+ ### Recommendation:
248
+
249
+ Supplement analysis with documentation or natural language descriptions based on:
250
+
251
+ * Application code review
252
+ * Architecture documentation review
253
+ * Stakeholder interviews with development team
254
+ * Load testing results analysis
255
+
256
+ There are also more complex patterns that result from stored procedures, triggers, aggregations, that the tool does not currently handle consistently but we plan to improve in future iterations.
@@ -0,0 +1,13 @@
1
+ awslabs/__init__.py,sha256=eSOf255HxkdQFKpFsCp-JsTvhFfgcK2rW2e_sJ_WjUU,1898
2
+ awslabs/dynamodb_mcp_server/__init__.py,sha256=l3xb-tR1nhO6wtXwU9pZ3xwEykkQfqP9rqZcUSfWb7Q,673
3
+ awslabs/dynamodb_mcp_server/common.py,sha256=--RWFURrGYjRjAnOuIbyX-DDrfZy1EeQXXRg4lASFB4,1677
4
+ awslabs/dynamodb_mcp_server/database_analysis_queries.py,sha256=iHngCENDnX5F0XBOrYVq0cA-fP4QCpmPhDNRCFtvGgA,8166
5
+ awslabs/dynamodb_mcp_server/database_analyzers.py,sha256=GEZubzjEiCW898LbKlDYHuvvirt_Z7ubJ8QZdkFMh-c,15046
6
+ awslabs/dynamodb_mcp_server/server.py,sha256=sbCebTs6AtW7nYQRNwGbpVokxSRR0VQE_rRUqFqZTPA,10181
7
+ awslabs/dynamodb_mcp_server/prompts/dynamodb_architect.md,sha256=ZTu64seZOwhnep7Jqi9PG5rkGwO8irkk3Lro6bdzU48,40644
8
+ awslabs_dynamodb_mcp_server-2.0.2.dist-info/METADATA,sha256=DNKybWJCXo8RtvISfwjCcA4KVczArYGu1aoFxLsoxLk,11868
9
+ awslabs_dynamodb_mcp_server-2.0.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
10
+ awslabs_dynamodb_mcp_server-2.0.2.dist-info/entry_points.txt,sha256=Vn6TvAN9d67Lsbkcs0UcIiOBI5xDpNBm_MOOzc1h-YU,88
11
+ awslabs_dynamodb_mcp_server-2.0.2.dist-info/licenses/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142
12
+ awslabs_dynamodb_mcp_server-2.0.2.dist-info/licenses/NOTICE,sha256=47UMmTFkf8rUc_JaJfdWe6NsAJQOcZNPZIL6JzU_k5U,95
13
+ awslabs_dynamodb_mcp_server-2.0.2.dist-info/RECORD,,
@@ -1,164 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: awslabs.dynamodb-mcp-server
3
- Version: 2.0.1
4
- Summary: The official MCP Server for interacting with AWS DynamoDB
5
- Project-URL: homepage, https://awslabs.github.io/mcp/
6
- Project-URL: docs, https://awslabs.github.io/mcp/servers/dynamodb-mcp-server/
7
- Project-URL: documentation, https://awslabs.github.io/mcp/servers/dynamodb-mcp-server/
8
- Project-URL: repository, https://github.com/awslabs/mcp.git
9
- Project-URL: changelog, https://github.com/awslabs/mcp/blob/main/src/dynamodb-mcp-server/CHANGELOG.md
10
- Author: Amazon Web Services
11
- Author-email: AWSLabs MCP <203918161+awslabs-mcp@users.noreply.github.com>, Erben Mo <moerben@amazon.com>
12
- License: Apache-2.0
13
- License-File: LICENSE
14
- License-File: NOTICE
15
- Classifier: License :: OSI Approved :: Apache Software License
16
- Classifier: Operating System :: OS Independent
17
- Classifier: Programming Language :: Python
18
- Classifier: Programming Language :: Python :: 3
19
- Classifier: Programming Language :: Python :: 3.10
20
- Classifier: Programming Language :: Python :: 3.11
21
- Classifier: Programming Language :: Python :: 3.12
22
- Classifier: Programming Language :: Python :: 3.13
23
- Requires-Python: >=3.10
24
- Requires-Dist: boto3==1.40.5
25
- Requires-Dist: dspy-ai>=2.6.27
26
- Requires-Dist: loguru==0.7.3
27
- Requires-Dist: mcp[cli]==1.12.4
28
- Requires-Dist: pydantic==2.11.7
29
- Requires-Dist: strands-agents>=1.5.0
30
- Requires-Dist: typing-extensions==4.14.1
31
- Description-Content-Type: text/markdown
32
-
33
- # AWS DynamoDB MCP Server
34
-
35
- The official MCP Server for interacting with AWS DynamoDB
36
-
37
- This server provides expert DynamoDB design guidance and data modeling assistance.
38
-
39
- ## Available MCP Tools
40
-
41
- ### Design & Modeling
42
- - `dynamodb_data_modeling` - Retrieves the complete DynamoDB Data Modeling Expert prompt
43
-
44
- ## Migration Notice
45
-
46
- Starting with version 2.0.0, this server focuses exclusively on DynamoDB design and modeling guidance. All operational DynamoDB management tools (table operations, item operations, queries, backups, etc.) have been removed in favour of the [AWS API MCP Server](https://github.com/awslabs/mcp/tree/main/src/aws-api-mcp-server) which provides the same capability and more.
47
-
48
- ### Recommended: AWS API MCP Server
49
-
50
- For operational DynamoDB management, use the [AWS API MCP Server](https://github.com/awslabs/mcp/tree/main/src/aws-api-mcp-server) which provides comprehensive AWS service management including all DynamoDB operations. [Migration guide available here](https://github.com/awslabs/mcp/tree/main/src/aws-api-mcp-server).
51
-
52
- ### Not Recommended: Legacy Version
53
-
54
- If you must use the previous operational tools, you can pin to version 1.0.9, though this is not recommended:
55
-
56
- ```json
57
- {
58
- "mcpServers": {
59
- "awslabs.dynamodb-mcp-server": {
60
- "command": "uvx",
61
- "args": ["awslabs.dynamodb-mcp-server@1.0.9"],
62
- "env": {
63
- "DDB-MCP-READONLY": "true",
64
- "AWS_PROFILE": "default",
65
- "AWS_REGION": "us-west-2",
66
- "FASTMCP_LOG_LEVEL": "ERROR"
67
- },
68
- "disabled": false,
69
- "autoApprove": []
70
- }
71
- }
72
- }
73
- ```
74
-
75
- ## Instructions
76
-
77
- This MCP Server provides DynamoDB design and modeling guidance only. For operational DynamoDB management (retrieving data, managing tables, etc.), use the [AWS API MCP Server](https://github.com/awslabs/mcp/tree/main/src/aws-api-mcp-server) which provides comprehensive DynamoDB operations. [Migration guide available here](https://github.com/awslabs/mcp/tree/main/src/aws-api-mcp-server).
78
-
79
-
80
- ## Prerequisites
81
-
82
- 1. Install `uv` from [Astral](https://docs.astral.sh/uv/getting-started/installation/) or the [GitHub README](https://github.com/astral-sh/uv#installation)
83
- 2. Install Python using `uv python install 3.10`
84
- 3. Set up AWS credentials with access to AWS services
85
- - Consider setting up Read-only permission if you don't want the LLM to modify any resources
86
-
87
- ## Installation
88
-
89
- | Cursor | VS Code |
90
- |:------:|:-------:|
91
- | [![Install MCP Server](https://cursor.com/deeplink/mcp-install-light.svg)](https://cursor.com/en/install-mcp?name=awslabs.dynamodb-mcp-server&config=JTdCJTIyY29tbWFuZCUyMiUzQSUyMnV2eCUyMGF3c2xhYnMuZHluYW1vZGItbWNwLXNlcnZlciU0MGxhdGVzdCUyMiUyQyUyMmVudiUyMiUzQSU3QiUyMkREQi1NQ1AtUkVBRE9OTFklMjIlM0ElMjJ0cnVlJTIyJTJDJTIyQVdTX1BST0ZJTEUlMjIlM0ElMjJkZWZhdWx0JTIyJTJDJTIyQVdTX1JFR0lPTiUyMiUzQSUyMnVzLXdlc3QtMiUyMiUyQyUyMkZBU1RNQ1BfTE9HX0xFVkVMJTIyJTNBJTIyRVJST1IlMjIlN0QlMkMlMjJkaXNhYmxlZCUyMiUzQWZhbHNlJTJDJTIyYXV0b0FwcHJvdmUlMjIlM0ElNUIlNUQlN0Q%3D)| [![Install on VS Code](https://img.shields.io/badge/Install_on-VS_Code-FF9900?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=DynamoDB%20MCP%20Server&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22awslabs.dynamodb-mcp-server%40latest%22%5D%2C%22env%22%3A%7B%22DDB-MCP-READONLY%22%3A%22true%22%2C%22AWS_PROFILE%22%3A%22default%22%2C%22AWS_REGION%22%3A%22us-west-2%22%2C%22FASTMCP_LOG_LEVEL%22%3A%22ERROR%22%7D%2C%22disabled%22%3Afalse%2C%22autoApprove%22%3A%5B%5D%7D) |
92
-
93
- Add the MCP to your favorite agentic tools. (e.g. for Amazon Q Developer CLI MCP, `~/.aws/amazonq/mcp.json`):
94
-
95
- ```json
96
- {
97
- "mcpServers": {
98
- "awslabs.dynamodb-mcp-server": {
99
- "command": "uvx",
100
- "args": ["awslabs.dynamodb-mcp-server@latest"],
101
- "env": {
102
- "DDB-MCP-READONLY": "true",
103
- "AWS_PROFILE": "default",
104
- "AWS_REGION": "us-west-2",
105
- "FASTMCP_LOG_LEVEL": "ERROR"
106
- },
107
- "disabled": false,
108
- "autoApprove": []
109
- }
110
- }
111
- }
112
- ```
113
- ### Windows Installation
114
-
115
- For Windows users, the MCP server configuration format is slightly different:
116
-
117
- ```json
118
- {
119
- "mcpServers": {
120
- "awslabs.dynamodb-mcp-server": {
121
- "disabled": false,
122
- "timeout": 60,
123
- "type": "stdio",
124
- "command": "uv",
125
- "args": [
126
- "tool",
127
- "run",
128
- "--from",
129
- "awslabs.dynamodb-mcp-server@latest",
130
- "awslabs.dynamodb-mcp-server.exe"
131
- ],
132
- "env": {
133
- "FASTMCP_LOG_LEVEL": "ERROR",
134
- "AWS_PROFILE": "your-aws-profile",
135
- "AWS_REGION": "us-east-1"
136
- }
137
- }
138
- }
139
- }
140
- ```
141
-
142
-
143
- or docker after a successful `docker build -t awslabs/dynamodb-mcp-server .`:
144
-
145
- ```json
146
- {
147
- "mcpServers": {
148
- "awslabs.dynamodb-mcp-server": {
149
- "command": "docker",
150
- "args": [
151
- "run",
152
- "--rm",
153
- "--interactive",
154
- "--env",
155
- "FASTMCP_LOG_LEVEL=ERROR",
156
- "awslabs/dynamodb-mcp-server:latest"
157
- ],
158
- "env": {},
159
- "disabled": false,
160
- "autoApprove": []
161
- }
162
- }
163
- }
164
- ```
@@ -1,11 +0,0 @@
1
- awslabs/__init__.py,sha256=WuqxdDgUZylWNmVoPKiK7qGsTB_G4UmuXIrJ-VBwDew,731
2
- awslabs/dynamodb_mcp_server/__init__.py,sha256=hJ7kw5_k2dM1Oyf4t2H4FTU6MFmdqfhbdtGgoodLci4,673
3
- awslabs/dynamodb_mcp_server/common.py,sha256=--RWFURrGYjRjAnOuIbyX-DDrfZy1EeQXXRg4lASFB4,1677
4
- awslabs/dynamodb_mcp_server/server.py,sha256=-g3MIL7wYG7LnOUV47mAb2lrJTvOGipTbWbBA-jvGFs,2919
5
- awslabs/dynamodb_mcp_server/prompts/dynamodb_architect.md,sha256=gaWjHmTu2oFiFnEKCs20Xe2JbClr6q4kP9e4_MK1Shw,39866
6
- awslabs_dynamodb_mcp_server-2.0.1.dist-info/METADATA,sha256=lMeU3euDvfmGDzSKS_vimIw-c-3ErSA4WoJ1b-iNvOs,6459
7
- awslabs_dynamodb_mcp_server-2.0.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
8
- awslabs_dynamodb_mcp_server-2.0.1.dist-info/entry_points.txt,sha256=Vn6TvAN9d67Lsbkcs0UcIiOBI5xDpNBm_MOOzc1h-YU,88
9
- awslabs_dynamodb_mcp_server-2.0.1.dist-info/licenses/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142
10
- awslabs_dynamodb_mcp_server-2.0.1.dist-info/licenses/NOTICE,sha256=47UMmTFkf8rUc_JaJfdWe6NsAJQOcZNPZIL6JzU_k5U,95
11
- awslabs_dynamodb_mcp_server-2.0.1.dist-info/RECORD,,