awslabs.dynamodb-mcp-server 1.0.9__py3-none-any.whl → 2.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of awslabs.dynamodb-mcp-server might be problematic. Click here for more details.
- awslabs/__init__.py +20 -0
- awslabs/dynamodb_mcp_server/__init__.py +1 -1
- awslabs/dynamodb_mcp_server/common.py +1 -282
- awslabs/dynamodb_mcp_server/database_analysis_queries.py +210 -0
- awslabs/dynamodb_mcp_server/database_analyzers.py +383 -0
- awslabs/dynamodb_mcp_server/prompts/dynamodb_architect.md +13 -0
- awslabs/dynamodb_mcp_server/server.py +153 -858
- awslabs_dynamodb_mcp_server-2.0.2.dist-info/METADATA +256 -0
- awslabs_dynamodb_mcp_server-2.0.2.dist-info/RECORD +13 -0
- awslabs_dynamodb_mcp_server-1.0.9.dist-info/METADATA +0 -180
- awslabs_dynamodb_mcp_server-1.0.9.dist-info/RECORD +0 -11
- {awslabs_dynamodb_mcp_server-1.0.9.dist-info → awslabs_dynamodb_mcp_server-2.0.2.dist-info}/WHEEL +0 -0
- {awslabs_dynamodb_mcp_server-1.0.9.dist-info → awslabs_dynamodb_mcp_server-2.0.2.dist-info}/entry_points.txt +0 -0
- {awslabs_dynamodb_mcp_server-1.0.9.dist-info → awslabs_dynamodb_mcp_server-2.0.2.dist-info}/licenses/LICENSE +0 -0
- {awslabs_dynamodb_mcp_server-1.0.9.dist-info → awslabs_dynamodb_mcp_server-2.0.2.dist-info}/licenses/NOTICE +0 -0
|
@@ -14,103 +14,42 @@
|
|
|
14
14
|
|
|
15
15
|
#!/usr/bin/env python3
|
|
16
16
|
|
|
17
|
-
|
|
18
|
-
import
|
|
19
|
-
import
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
AttributeValue,
|
|
23
|
-
CreateTableInput,
|
|
24
|
-
DeleteItemInput,
|
|
25
|
-
GetItemInput,
|
|
26
|
-
GetResourcePolicyInput,
|
|
27
|
-
GlobalSecondaryIndex,
|
|
28
|
-
GlobalSecondaryIndexUpdate,
|
|
29
|
-
KeyAttributeValue,
|
|
30
|
-
KeySchemaElement,
|
|
31
|
-
OnDemandThroughput,
|
|
32
|
-
ProvisionedThroughput,
|
|
33
|
-
PutItemInput,
|
|
34
|
-
PutResourcePolicyInput,
|
|
35
|
-
QueryInput,
|
|
36
|
-
ReplicationGroupUpdate,
|
|
37
|
-
ScanInput,
|
|
38
|
-
Select,
|
|
39
|
-
SSESpecification,
|
|
40
|
-
StreamSpecification,
|
|
41
|
-
Tag,
|
|
42
|
-
TimeToLiveSpecification,
|
|
43
|
-
UpdateItemInput,
|
|
44
|
-
UpdateTableInput,
|
|
45
|
-
WarmThroughput,
|
|
46
|
-
handle_exceptions,
|
|
47
|
-
mutation_check,
|
|
17
|
+
|
|
18
|
+
from awslabs.dynamodb_mcp_server.common import handle_exceptions
|
|
19
|
+
from awslabs.dynamodb_mcp_server.database_analyzers import (
|
|
20
|
+
DatabaseAnalyzer,
|
|
21
|
+
DatabaseAnalyzerRegistry,
|
|
48
22
|
)
|
|
49
|
-
from
|
|
23
|
+
from loguru import logger
|
|
50
24
|
from mcp.server.fastmcp import FastMCP
|
|
51
25
|
from pathlib import Path
|
|
52
26
|
from pydantic import Field
|
|
53
|
-
from typing import
|
|
27
|
+
from typing import Optional
|
|
54
28
|
|
|
55
29
|
|
|
56
30
|
# Define server instructions and dependencies
|
|
57
|
-
SERVER_INSTRUCTIONS = """The official MCP Server for
|
|
58
|
-
|
|
59
|
-
This server provides comprehensive DynamoDB capabilities with over 30 operational tools for managing DynamoDB tables,
|
|
60
|
-
items, indexes, backups, and more, plus expert data modeling guidance through DynamoDB data modeling expert prompt
|
|
61
|
-
|
|
62
|
-
IMPORTANT: DynamoDB Attribute Value Format
|
|
63
|
-
-----------------------------------------
|
|
64
|
-
When working with DynamoDB, all attribute values must be specified with their data types.
|
|
65
|
-
Each attribute value is represented as a dictionary with a single key-value pair where the key
|
|
66
|
-
is the data type and the value is the data itself:
|
|
67
|
-
|
|
68
|
-
- S: String
|
|
69
|
-
Example: {"S": "Hello"}
|
|
70
|
-
|
|
71
|
-
- N: Number (sent as a string)
|
|
72
|
-
Example: {"N": "123.45"}
|
|
73
|
-
|
|
74
|
-
- B: Binary data (Base64-encoded)
|
|
75
|
-
Example: {"B": "dGhpcyB0ZXh0IGlzIGJhc2U2NC1lbmNvZGVk"}
|
|
76
|
-
|
|
77
|
-
- BOOL: Boolean value
|
|
78
|
-
Example: {"BOOL": true}
|
|
31
|
+
SERVER_INSTRUCTIONS = """The official MCP Server for AWS DynamoDB design and modeling guidance
|
|
79
32
|
|
|
80
|
-
|
|
81
|
-
Example: {"NULL": true}
|
|
33
|
+
This server provides DynamoDB design and modeling expertise.
|
|
82
34
|
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
Example: {"M": {"Name": {"S": "Joe"}, "Age": {"N": "35"}}}
|
|
88
|
-
|
|
89
|
-
- SS: String Set (array of strings)
|
|
90
|
-
Example: {"SS": ["Giraffe", "Hippo", "Zebra"]}
|
|
91
|
-
|
|
92
|
-
- NS: Number Set (array of strings representing numbers)
|
|
93
|
-
Example: {"NS": ["42.2", "-19", "7.5", "3.14"]}
|
|
94
|
-
|
|
95
|
-
- BS: Binary Set (array of Base64-encoded binary data objects)
|
|
96
|
-
Example: {"BS": ["U3Vubnk=", "UmFpbnk=", "U25vd3k="]}
|
|
97
|
-
|
|
98
|
-
Common usage examples:
|
|
99
|
-
- Primary key: {"userId": {"S": "user123"}}
|
|
100
|
-
- Composite key: {"userId": {"S": "user123"}, "timestamp": {"N": "1612345678"}}
|
|
101
|
-
- Expression attribute values: {":minScore": {"N": "100"}, ":active": {"BOOL": true}}
|
|
102
|
-
- Complete item: {"userId": {"S": "user123"}, "score": {"N": "100"}, "data": {"B": "binarydata=="}}
|
|
35
|
+
When users ask for dynamodb operational tasks, provide EXACTLY these two options:
|
|
36
|
+
Option 1(RECOMMENDED): AWS API MCP Server
|
|
37
|
+
Migration guide: https://github.com/awslabs/mcp/tree/main/src/aws-api-mcp-server
|
|
38
|
+
Option 2(NOT RECOMMENDED): Legacy version 1.0.9
|
|
103
39
|
|
|
40
|
+
Available Tools:
|
|
41
|
+
--------------
|
|
104
42
|
Use the `dynamodb_data_modeling` tool to access enterprise-level DynamoDB design expertise.
|
|
105
43
|
This tool provides systematic methodology for creating production-ready multi-table design with
|
|
106
44
|
advanced optimizations, cost analysis, and integration patterns.
|
|
107
|
-
"""
|
|
108
45
|
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
46
|
+
Use the `source_db_analyzer` tool to analyze existing MySQL/Aurora databases for DynamoDB Data Modeling:
|
|
47
|
+
- Extracts schema structure (tables, columns, indexes, foreign keys)
|
|
48
|
+
- Captures access patterns from Performance Schema (query patterns, RPS, frequencies)
|
|
49
|
+
- Generates timestamped analysis files (JSON format) for use with dynamodb_data_modeling
|
|
50
|
+
- Requires AWS RDS Data API and credentials in Secrets Manager
|
|
51
|
+
- Safe for production use (read-only analysis)
|
|
52
|
+
"""
|
|
114
53
|
|
|
115
54
|
|
|
116
55
|
def create_server():
|
|
@@ -118,70 +57,12 @@ def create_server():
|
|
|
118
57
|
return FastMCP(
|
|
119
58
|
'awslabs.dynamodb-mcp-server',
|
|
120
59
|
instructions=SERVER_INSTRUCTIONS,
|
|
121
|
-
dependencies=SERVER_DEPENDENCIES,
|
|
122
60
|
)
|
|
123
61
|
|
|
124
62
|
|
|
125
63
|
app = create_server()
|
|
126
64
|
|
|
127
65
|
|
|
128
|
-
def get_dynamodb_client(region_name: str | None):
|
|
129
|
-
"""Create a boto3 DynamoDB client using credentials from environment variables. Falls back to 'us-west-2' if no region is specified or found in environment."""
|
|
130
|
-
# Use provided region, or get from env, or fall back to us-west-2
|
|
131
|
-
region = region_name or os.getenv('AWS_REGION') or 'us-west-2'
|
|
132
|
-
|
|
133
|
-
# Configure custom user agent to identify requests from LLM/MCP
|
|
134
|
-
config = Config(user_agent_extra='MCP/DynamoDBServer')
|
|
135
|
-
|
|
136
|
-
# Create a new session to force credentials to reload
|
|
137
|
-
# so that if user changes credential, it will be reflected immediately in the next call
|
|
138
|
-
session = boto3.Session()
|
|
139
|
-
|
|
140
|
-
# boto3 will automatically load credentials from environment variables:
|
|
141
|
-
# AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_SESSION_TOKEN
|
|
142
|
-
return session.client('dynamodb', region_name=region, config=config)
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
table_name = Field(description='Table Name or Amazon Resource Name (ARN)')
|
|
146
|
-
index_name = Field(
|
|
147
|
-
default=None,
|
|
148
|
-
description='The name of a GSI',
|
|
149
|
-
)
|
|
150
|
-
key: Dict[str, KeyAttributeValue] = Field(
|
|
151
|
-
description='The primary key of an item. Must use DynamoDB attribute value format (see IMPORTANT note about DynamoDB Attribute Value Format).'
|
|
152
|
-
)
|
|
153
|
-
filter_expression: str = Field(
|
|
154
|
-
default=None,
|
|
155
|
-
description='Filter conditions expression that DynamoDB applies to filter out data',
|
|
156
|
-
)
|
|
157
|
-
projection_expression: str = Field(
|
|
158
|
-
default=None,
|
|
159
|
-
description='Attributes to retrieve, can include scalars, sets, or elements of a JSON document.',
|
|
160
|
-
)
|
|
161
|
-
expression_attribute_names: Dict[str, str] = Field(
|
|
162
|
-
default=None, description='Substitution tokens for attribute names in an expression.'
|
|
163
|
-
)
|
|
164
|
-
expression_attribute_values: Dict[str, AttributeValue] = Field(
|
|
165
|
-
default=None,
|
|
166
|
-
description='Values that can be substituted in an expression. Must use DynamoDB attribute value format (see IMPORTANT note about DynamoDB Attribute Value Format).',
|
|
167
|
-
)
|
|
168
|
-
select: Select = Field(
|
|
169
|
-
default=None,
|
|
170
|
-
description='The attributes to be returned. Valid values: ALL_ATTRIBUTES, ALL_PROJECTED_ATTRIBUTES, SPECIFIC_ATTRIBUTES, COUNT',
|
|
171
|
-
)
|
|
172
|
-
limit: int = Field(default=None, description='The maximum number of items to evaluate', ge=1)
|
|
173
|
-
exclusive_start_key: Dict[str, KeyAttributeValue] = Field(
|
|
174
|
-
default=None,
|
|
175
|
-
description='Use the LastEvaluatedKey from the previous call. Must use DynamoDB attribute value format (see IMPORTANT note about DynamoDB Attribute Value Format).',
|
|
176
|
-
)
|
|
177
|
-
|
|
178
|
-
billing_mode: Literal['PROVISIONED', 'PAY_PER_REQUEST'] = Field(
|
|
179
|
-
default=None,
|
|
180
|
-
description='Specifies if billing is PAY_PER_REQUEST or by provisioned throughput',
|
|
181
|
-
)
|
|
182
|
-
resource_arn: str = Field(description='The Amazon Resource Name (ARN) of the DynamoDB resource')
|
|
183
|
-
|
|
184
|
-
|
|
185
66
|
@app.tool()
|
|
186
67
|
@handle_exceptions
|
|
187
68
|
async def dynamodb_data_modeling() -> str:
|
|
@@ -208,738 +89,152 @@ async def dynamodb_data_modeling() -> str:
|
|
|
208
89
|
|
|
209
90
|
@app.tool()
|
|
210
91
|
@handle_exceptions
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
description='An AWS resource-based policy document in JSON format or dictionary.'
|
|
216
|
-
),
|
|
217
|
-
region_name: str = Field(default=None, description='The aws region to run the tool'),
|
|
218
|
-
) -> dict:
|
|
219
|
-
"""Attaches a resource-based policy document (max 20 KB) to a DynamoDB table or stream. You can control permissions for both tables and their indexes through the policy."""
|
|
220
|
-
client = get_dynamodb_client(region_name)
|
|
221
|
-
# Convert policy to string if it's a dictionary
|
|
222
|
-
policy_str = json.dumps(policy) if isinstance(policy, dict) else policy
|
|
223
|
-
|
|
224
|
-
params: PutResourcePolicyInput = {'ResourceArn': resource_arn, 'Policy': policy_str}
|
|
225
|
-
|
|
226
|
-
response = client.put_resource_policy(**params)
|
|
227
|
-
return {'RevisionId': response.get('RevisionId')}
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
@app.tool()
|
|
231
|
-
@handle_exceptions
|
|
232
|
-
async def get_resource_policy(
|
|
233
|
-
resource_arn: str = resource_arn,
|
|
234
|
-
region_name: str = Field(default=None, description='The aws region to run the tool'),
|
|
235
|
-
) -> dict:
|
|
236
|
-
"""Returns the resource-based policy document attached to a DynamoDB table or stream in JSON format."""
|
|
237
|
-
client = get_dynamodb_client(region_name)
|
|
238
|
-
params: GetResourcePolicyInput = {'ResourceArn': resource_arn}
|
|
239
|
-
|
|
240
|
-
response = client.get_resource_policy(**params)
|
|
241
|
-
return {'Policy': response.get('Policy'), 'RevisionId': response.get('RevisionId')}
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
@app.tool()
|
|
245
|
-
@handle_exceptions
|
|
246
|
-
async def scan(
|
|
247
|
-
table_name: str = table_name,
|
|
248
|
-
index_name: str = index_name,
|
|
249
|
-
filter_expression: str = filter_expression,
|
|
250
|
-
projection_expression: str = projection_expression,
|
|
251
|
-
expression_attribute_names: Dict[str, str] = expression_attribute_names,
|
|
252
|
-
expression_attribute_values: Dict[str, AttributeValue] = expression_attribute_values,
|
|
253
|
-
select: Select = select,
|
|
254
|
-
limit: int = limit,
|
|
255
|
-
exclusive_start_key: Dict[str, KeyAttributeValue] = exclusive_start_key,
|
|
256
|
-
region_name: str = Field(default=None, description='The aws region to run the tool'),
|
|
257
|
-
) -> dict:
|
|
258
|
-
"""Returns items and attributes by scanning a table or secondary index. Reads up to Limit items or 1 MB of data, with optional FilterExpression to reduce results."""
|
|
259
|
-
client = get_dynamodb_client(region_name)
|
|
260
|
-
params: ScanInput = {'TableName': table_name}
|
|
261
|
-
|
|
262
|
-
if index_name:
|
|
263
|
-
params['IndexName'] = index_name
|
|
264
|
-
if filter_expression:
|
|
265
|
-
params['FilterExpression'] = filter_expression
|
|
266
|
-
if projection_expression:
|
|
267
|
-
params['ProjectionExpression'] = projection_expression
|
|
268
|
-
if expression_attribute_names:
|
|
269
|
-
params['ExpressionAttributeNames'] = expression_attribute_names
|
|
270
|
-
if expression_attribute_values:
|
|
271
|
-
params['ExpressionAttributeValues'] = expression_attribute_values
|
|
272
|
-
if select:
|
|
273
|
-
params['Select'] = select
|
|
274
|
-
if limit:
|
|
275
|
-
params['Limit'] = limit
|
|
276
|
-
if exclusive_start_key:
|
|
277
|
-
params['ExclusiveStartKey'] = exclusive_start_key
|
|
278
|
-
params['ReturnConsumedCapacity'] = 'TOTAL'
|
|
279
|
-
|
|
280
|
-
response = client.scan(**params)
|
|
281
|
-
return {
|
|
282
|
-
'Items': response.get('Items', []),
|
|
283
|
-
'Count': response.get('Count'),
|
|
284
|
-
'ScannedCount': response.get('ScannedCount'),
|
|
285
|
-
'LastEvaluatedKey': response.get('LastEvaluatedKey'),
|
|
286
|
-
'ConsumedCapacity': response.get('ConsumedCapacity'),
|
|
287
|
-
}
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
@app.tool()
|
|
291
|
-
@handle_exceptions
|
|
292
|
-
async def query(
|
|
293
|
-
table_name: str = table_name,
|
|
294
|
-
key_condition_expression: str = Field(
|
|
295
|
-
description='Key condition expression. Must perform an equality test on partition key value.'
|
|
92
|
+
async def source_db_analyzer(
|
|
93
|
+
source_db_type: str = Field(description="Supported Source Database type: 'mysql'"),
|
|
94
|
+
database_name: Optional[str] = Field(
|
|
95
|
+
default=None, description='Database name to analyze (overrides MYSQL_DATABASE env var)'
|
|
296
96
|
),
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
expression_attribute_values: Dict[str, AttributeValue] = expression_attribute_values,
|
|
302
|
-
select: Select = select,
|
|
303
|
-
limit: int = limit,
|
|
304
|
-
scan_index_forward: bool = Field(
|
|
305
|
-
default=None, description='Ascending (true) or descending (false).'
|
|
97
|
+
pattern_analysis_days: Optional[int] = Field(
|
|
98
|
+
default=30,
|
|
99
|
+
description='Number of days to analyze the logs for pattern analysis query',
|
|
100
|
+
ge=1,
|
|
306
101
|
),
|
|
307
|
-
|
|
308
|
-
region_name: str = Field(default=None, description='The aws region to run the tool'),
|
|
309
|
-
) -> dict:
|
|
310
|
-
"""Returns items from a table or index matching a partition key value, with optional sort key filtering."""
|
|
311
|
-
client = get_dynamodb_client(region_name)
|
|
312
|
-
params: QueryInput = {
|
|
313
|
-
'TableName': table_name,
|
|
314
|
-
'KeyConditionExpression': key_condition_expression,
|
|
315
|
-
}
|
|
316
|
-
|
|
317
|
-
if index_name:
|
|
318
|
-
params['IndexName'] = index_name
|
|
319
|
-
if filter_expression:
|
|
320
|
-
params['FilterExpression'] = filter_expression
|
|
321
|
-
if projection_expression:
|
|
322
|
-
params['ProjectionExpression'] = projection_expression
|
|
323
|
-
if expression_attribute_names:
|
|
324
|
-
params['ExpressionAttributeNames'] = expression_attribute_names
|
|
325
|
-
if expression_attribute_values:
|
|
326
|
-
params['ExpressionAttributeValues'] = expression_attribute_values
|
|
327
|
-
if select:
|
|
328
|
-
params['Select'] = select
|
|
329
|
-
if limit:
|
|
330
|
-
params['Limit'] = limit
|
|
331
|
-
if scan_index_forward is not None:
|
|
332
|
-
params['ScanIndexForward'] = scan_index_forward
|
|
333
|
-
if exclusive_start_key:
|
|
334
|
-
params['ExclusiveStartKey'] = exclusive_start_key
|
|
335
|
-
params['ReturnConsumedCapacity'] = 'TOTAL'
|
|
336
|
-
|
|
337
|
-
response = client.query(**params)
|
|
338
|
-
return {
|
|
339
|
-
'Items': response.get('Items', []),
|
|
340
|
-
'Count': response.get('Count'),
|
|
341
|
-
'ScannedCount': response.get('ScannedCount'),
|
|
342
|
-
'LastEvaluatedKey': response.get('LastEvaluatedKey'),
|
|
343
|
-
'ConsumedCapacity': response.get('ConsumedCapacity'),
|
|
344
|
-
}
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
@app.tool()
|
|
348
|
-
@handle_exceptions
|
|
349
|
-
@mutation_check
|
|
350
|
-
async def update_item(
|
|
351
|
-
table_name: str = table_name,
|
|
352
|
-
key: Dict[str, KeyAttributeValue] = key,
|
|
353
|
-
update_expression: str = Field(
|
|
354
|
-
default=None,
|
|
355
|
-
description="""Defines the attributes to be updated, the action to be performed on them, and new value(s) for them. The following actions are available:
|
|
356
|
-
* SET - Adds one or more attributes and values to an item. If any of these attributes already exist, they are replaced by the new values.
|
|
357
|
-
* REMOVE - Removes one or more attributes from an item.
|
|
358
|
-
* ADD - Only supports Number and Set data types. Adds a value to a number attribute or adds elements to a set.
|
|
359
|
-
* DELETE - Only supports Set data type. Removes elements from a set.
|
|
360
|
-
For example: 'SET a=:value1, b=:value2 DELETE :value3, :value4, :value5'""",
|
|
361
|
-
),
|
|
362
|
-
condition_expression: str = Field(
|
|
363
|
-
default=None,
|
|
364
|
-
description='A condition that must be satisfied in order for a conditional update to succeed.',
|
|
365
|
-
),
|
|
366
|
-
expression_attribute_names: Dict[str, str] = expression_attribute_names,
|
|
367
|
-
expression_attribute_values: Dict[str, AttributeValue] = expression_attribute_values,
|
|
368
|
-
region_name: str = Field(default=None, description='The aws region to run the tool'),
|
|
369
|
-
) -> dict:
|
|
370
|
-
"""Edits an existing item's attributes, or adds a new item to the table if it does not already exist."""
|
|
371
|
-
client = get_dynamodb_client(region_name)
|
|
372
|
-
params: UpdateItemInput = {'TableName': table_name, 'Key': key}
|
|
373
|
-
|
|
374
|
-
if update_expression:
|
|
375
|
-
params['UpdateExpression'] = update_expression
|
|
376
|
-
if condition_expression:
|
|
377
|
-
params['ConditionExpression'] = condition_expression
|
|
378
|
-
if expression_attribute_names:
|
|
379
|
-
params['ExpressionAttributeNames'] = expression_attribute_names
|
|
380
|
-
if expression_attribute_values:
|
|
381
|
-
params['ExpressionAttributeValues'] = expression_attribute_values
|
|
382
|
-
params['ReturnConsumedCapacity'] = 'TOTAL'
|
|
383
|
-
params['ReturnValuesOnConditionCheckFailure'] = 'ALL_OLD'
|
|
384
|
-
|
|
385
|
-
response = client.update_item(**params)
|
|
386
|
-
return {
|
|
387
|
-
'Attributes': response.get('Attributes'),
|
|
388
|
-
'ConsumedCapacity': response.get('ConsumedCapacity'),
|
|
389
|
-
}
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
@app.tool()
|
|
393
|
-
@handle_exceptions
|
|
394
|
-
async def get_item(
|
|
395
|
-
table_name: str = table_name,
|
|
396
|
-
key: Dict[str, KeyAttributeValue] = key,
|
|
397
|
-
expression_attribute_names: Dict[str, str] = expression_attribute_names,
|
|
398
|
-
projection_expression: str = projection_expression,
|
|
399
|
-
region_name: str = Field(default=None, description='The aws region to run the tool'),
|
|
400
|
-
) -> dict:
|
|
401
|
-
"""Returns attributes for an item with the given primary key. Uses eventually consistent reads by default, or set ConsistentRead=true for strongly consistent reads."""
|
|
402
|
-
client = get_dynamodb_client(region_name)
|
|
403
|
-
params: GetItemInput = {'TableName': table_name, 'Key': key}
|
|
404
|
-
|
|
405
|
-
if expression_attribute_names:
|
|
406
|
-
params['ExpressionAttributeNames'] = expression_attribute_names
|
|
407
|
-
if projection_expression:
|
|
408
|
-
params['ProjectionExpression'] = projection_expression
|
|
409
|
-
params['ReturnConsumedCapacity'] = 'TOTAL'
|
|
410
|
-
|
|
411
|
-
response = client.get_item(**params)
|
|
412
|
-
return {'Item': response.get('Item'), 'ConsumedCapacity': response.get('ConsumedCapacity')}
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
@app.tool()
|
|
416
|
-
@handle_exceptions
|
|
417
|
-
@mutation_check
|
|
418
|
-
async def put_item(
|
|
419
|
-
table_name: str = table_name,
|
|
420
|
-
item: Dict[str, AttributeValue] = Field(
|
|
421
|
-
description='A map of attribute name/value pairs, one for each attribute. Must use DynamoDB attribute value format (see IMPORTANT note about DynamoDB Attribute Value Format).'
|
|
422
|
-
),
|
|
423
|
-
condition_expression: str = Field(
|
|
424
|
-
default=None,
|
|
425
|
-
description='A condition that must be satisfied in order for a conditional put operation to succeed.',
|
|
426
|
-
),
|
|
427
|
-
expression_attribute_names: Dict[str, str] = expression_attribute_names,
|
|
428
|
-
expression_attribute_values: Dict[str, Any] = expression_attribute_values,
|
|
429
|
-
region_name: str = Field(default=None, description='The aws region to run the tool'),
|
|
430
|
-
) -> dict:
|
|
431
|
-
"""Creates a new item or replaces an existing item in a table. Use condition expressions to control whether to create new items or update existing ones."""
|
|
432
|
-
client = get_dynamodb_client(region_name)
|
|
433
|
-
params: PutItemInput = {'TableName': table_name, 'Item': item}
|
|
434
|
-
|
|
435
|
-
if condition_expression:
|
|
436
|
-
params['ConditionExpression'] = condition_expression
|
|
437
|
-
if expression_attribute_names:
|
|
438
|
-
params['ExpressionAttributeNames'] = expression_attribute_names
|
|
439
|
-
if expression_attribute_values:
|
|
440
|
-
params['ExpressionAttributeValues'] = expression_attribute_values
|
|
441
|
-
params['ReturnConsumedCapacity'] = 'TOTAL'
|
|
442
|
-
|
|
443
|
-
response = client.put_item(**params)
|
|
444
|
-
return {
|
|
445
|
-
'Attributes': response.get('Attributes'),
|
|
446
|
-
'ConsumedCapacity': response.get('ConsumedCapacity'),
|
|
447
|
-
}
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
@app.tool()
|
|
451
|
-
@handle_exceptions
|
|
452
|
-
@mutation_check
|
|
453
|
-
async def delete_item(
|
|
454
|
-
table_name: str = table_name,
|
|
455
|
-
key: Dict[str, KeyAttributeValue] = key,
|
|
456
|
-
condition_expression: str = Field(
|
|
102
|
+
max_query_results: Optional[int] = Field(
|
|
457
103
|
default=None,
|
|
458
|
-
description='
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
)
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
104
|
+
description='Maximum number of rows to include in analysis output files for schema and query log data (overrides MYSQL_MAX_QUERY_RESULTS env var)',
|
|
105
|
+
ge=1,
|
|
106
|
+
),
|
|
107
|
+
aws_cluster_arn: Optional[str] = Field(
|
|
108
|
+
default=None, description='AWS cluster ARN (overrides MYSQL_CLUSTER_ARN env var)'
|
|
109
|
+
),
|
|
110
|
+
aws_secret_arn: Optional[str] = Field(
|
|
111
|
+
default=None, description='AWS secret ARN (overrides MYSQL_SECRET_ARN env var)'
|
|
112
|
+
),
|
|
113
|
+
aws_region: Optional[str] = Field(
|
|
114
|
+
default=None, description='AWS region (overrides AWS_REGION env var)'
|
|
115
|
+
),
|
|
116
|
+
output_dir: str = Field(
|
|
117
|
+
description='Absolute directory path where the timestamped output analysis folder will be created.'
|
|
118
|
+
),
|
|
119
|
+
) -> str:
|
|
120
|
+
"""Analyzes your source database to extract schema and access patterns for DynamoDB Data Modeling.
|
|
121
|
+
|
|
122
|
+
This tool connects to your existing relational database, examines your current database structure and query
|
|
123
|
+
patterns to help you design an optimal DynamoDB data model.
|
|
124
|
+
|
|
125
|
+
Output & Next Steps:
|
|
126
|
+
- Creates timestamped folder (database_analysis_YYYYMMDD_HHMMSS) with 4-5 JSON files:
|
|
127
|
+
* table_analysis_results.json - Table-level statistics
|
|
128
|
+
* column_analysis_results.json - Column definitions for all tables
|
|
129
|
+
* index_analysis_results.json - Index structures and compositions
|
|
130
|
+
* foreign_key_analysis_results.json - Relationship mappings
|
|
131
|
+
* query_pattern_analysis_results.json - Query patterns (only if Performance Schema enabled)
|
|
132
|
+
- Each file contains query results with metadata (database name, analysis period, descriptions)
|
|
133
|
+
- Use these files with the dynamodb_data_modeling tool to design your DynamoDB schema
|
|
134
|
+
- Analysis is read-only
|
|
135
|
+
|
|
136
|
+
Connection Requirements (MySQL/Aurora):
|
|
137
|
+
- AWS RDS Data API enabled on your Aurora MySQL cluster
|
|
138
|
+
- Database credentials stored in AWS Secrets Manager
|
|
139
|
+
- Appropriate IAM permissions to access RDS Data API and Secrets Manager
|
|
140
|
+
- For complete analysis: MySQL Performance Schema must be enabled (set performance_schema=ON)
|
|
141
|
+
- Without Performance Schema: Schema-only analysis is performed (no query pattern data)
|
|
142
|
+
|
|
143
|
+
Environment Variables (Optional):
|
|
144
|
+
You can set these instead of passing parameters:
|
|
145
|
+
- MYSQL_DATABASE: Database name to analyze
|
|
146
|
+
- MYSQL_CLUSTER_ARN: Aurora cluster ARN
|
|
147
|
+
- MYSQL_SECRET_ARN: Secrets Manager secret ARN containing DB credentials
|
|
148
|
+
- AWS_REGION: AWS region where your database is located
|
|
149
|
+
- MYSQL_MAX_QUERY_RESULTS: Maximum rows per query (default: 500)
|
|
150
|
+
|
|
151
|
+
Typical Usage:
|
|
152
|
+
1. Run this tool against your source database
|
|
153
|
+
2. Review the generated analysis files to understand your current schema and patterns
|
|
154
|
+
3. Use dynamodb_data_modeling tool with these files to design your DynamoDB tables
|
|
155
|
+
4. The analysis helps identify entity relationships, access patterns, and optimization opportunities
|
|
156
|
+
|
|
157
|
+
Returns: Analysis summary with saved file locations, query statistics, and next steps.
|
|
158
|
+
"""
|
|
159
|
+
try:
|
|
160
|
+
analyzer_class = DatabaseAnalyzerRegistry.get_analyzer(source_db_type)
|
|
161
|
+
except ValueError as e:
|
|
162
|
+
supported_types = DatabaseAnalyzerRegistry.get_supported_types()
|
|
163
|
+
return f'{str(e)}. Supported types: {supported_types}'
|
|
164
|
+
|
|
165
|
+
# Build connection parameters based on database type
|
|
166
|
+
connection_params = DatabaseAnalyzer.build_connection_params(
|
|
167
|
+
source_db_type,
|
|
168
|
+
database_name=database_name,
|
|
169
|
+
pattern_analysis_days=pattern_analysis_days,
|
|
170
|
+
max_query_results=max_query_results,
|
|
171
|
+
aws_cluster_arn=aws_cluster_arn,
|
|
172
|
+
aws_secret_arn=aws_secret_arn,
|
|
173
|
+
aws_region=aws_region,
|
|
174
|
+
output_dir=output_dir,
|
|
498
175
|
)
|
|
499
|
-
return response['TimeToLiveSpecification']
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
@app.tool()
|
|
503
|
-
@handle_exceptions
|
|
504
|
-
@mutation_check
|
|
505
|
-
async def update_table(
|
|
506
|
-
table_name: str = table_name,
|
|
507
|
-
attribute_definitions: List[AttributeDefinition] = Field(
|
|
508
|
-
default=None,
|
|
509
|
-
description='Describe the key schema for the table and indexes. Required when adding a new GSI.',
|
|
510
|
-
),
|
|
511
|
-
billing_mode: Literal['PROVISIONED', 'PAY_PER_REQUEST'] = billing_mode,
|
|
512
|
-
deletion_protection_enabled: bool = Field(
|
|
513
|
-
default=None, description='Indicates whether deletion protection is to be enabled'
|
|
514
|
-
),
|
|
515
|
-
global_secondary_index_updates: List[GlobalSecondaryIndexUpdate] = Field(
|
|
516
|
-
default=None, description='List of GSIs to be added, updated or deleted.'
|
|
517
|
-
),
|
|
518
|
-
on_demand_throughput: OnDemandThroughput = Field(
|
|
519
|
-
default=None, description='Set the max number of read and write units.'
|
|
520
|
-
),
|
|
521
|
-
provisioned_throughput: ProvisionedThroughput = Field(
|
|
522
|
-
default=None, description='The new provisioned throughput settings.'
|
|
523
|
-
),
|
|
524
|
-
replica_updates: List[ReplicationGroupUpdate] = Field(
|
|
525
|
-
default=None, description='A list of replica update actions (create, delete, or update).'
|
|
526
|
-
),
|
|
527
|
-
sse_specification: SSESpecification = Field(
|
|
528
|
-
default=None, description='The new server-side encryption settings.'
|
|
529
|
-
),
|
|
530
|
-
stream_specification: StreamSpecification = Field(
|
|
531
|
-
default=None, description='DynamoDB Streams configuration.'
|
|
532
|
-
),
|
|
533
|
-
table_class: Literal['STANDARD', 'STANDARD_INFREQUENT_ACCESS'] = Field(
|
|
534
|
-
default=None, description='The new table class.'
|
|
535
|
-
),
|
|
536
|
-
warm_throughput: WarmThroughput = Field(
|
|
537
|
-
default=None, description='The new warm throughput settings.'
|
|
538
|
-
),
|
|
539
|
-
region_name: str = Field(default=None, description='The aws region to run the tool'),
|
|
540
|
-
) -> dict:
|
|
541
|
-
"""Modifies table settings including provisioned throughput, global secondary indexes, and DynamoDB Streams configuration. This is an asynchronous operation."""
|
|
542
|
-
client = get_dynamodb_client(region_name)
|
|
543
|
-
params: UpdateTableInput = {'TableName': table_name}
|
|
544
|
-
|
|
545
|
-
if attribute_definitions:
|
|
546
|
-
params['AttributeDefinitions'] = attribute_definitions
|
|
547
|
-
if billing_mode:
|
|
548
|
-
params['BillingMode'] = billing_mode
|
|
549
|
-
if deletion_protection_enabled is not None:
|
|
550
|
-
params['DeletionProtectionEnabled'] = deletion_protection_enabled
|
|
551
|
-
if global_secondary_index_updates:
|
|
552
|
-
params['GlobalSecondaryIndexUpdates'] = global_secondary_index_updates
|
|
553
|
-
if on_demand_throughput:
|
|
554
|
-
params['OnDemandThroughput'] = on_demand_throughput
|
|
555
|
-
if provisioned_throughput:
|
|
556
|
-
params['ProvisionedThroughput'] = provisioned_throughput
|
|
557
|
-
if replica_updates:
|
|
558
|
-
params['ReplicaUpdates'] = replica_updates
|
|
559
|
-
if sse_specification:
|
|
560
|
-
params['SSESpecification'] = sse_specification
|
|
561
|
-
if stream_specification:
|
|
562
|
-
params['StreamSpecification'] = stream_specification
|
|
563
|
-
if table_class:
|
|
564
|
-
params['TableClass'] = table_class
|
|
565
|
-
if warm_throughput:
|
|
566
|
-
params['WarmThroughput'] = warm_throughput
|
|
567
|
-
|
|
568
|
-
response = client.update_table(**params)
|
|
569
|
-
return response['TableDescription']
|
|
570
|
-
|
|
571
176
|
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
description='Max number of table names to return',
|
|
582
|
-
),
|
|
583
|
-
region_name: str = Field(default=None, description='The aws region to run the tool'),
|
|
584
|
-
) -> dict:
|
|
585
|
-
"""Returns a paginated list of table names in your account."""
|
|
586
|
-
client = get_dynamodb_client(region_name)
|
|
587
|
-
params = {}
|
|
588
|
-
if exclusive_start_table_name:
|
|
589
|
-
params['ExclusiveStartTableName'] = exclusive_start_table_name
|
|
590
|
-
if limit:
|
|
591
|
-
params['Limit'] = limit
|
|
592
|
-
response = client.list_tables(**params)
|
|
593
|
-
return {
|
|
594
|
-
'TableNames': response['TableNames'],
|
|
595
|
-
'LastEvaluatedTableName': response.get('LastEvaluatedTableName'),
|
|
596
|
-
}
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
@app.tool()
|
|
600
|
-
@handle_exceptions
|
|
601
|
-
@mutation_check
|
|
602
|
-
async def create_table(
|
|
603
|
-
table_name: str = Field(
|
|
604
|
-
description='The name of the table to create.',
|
|
605
|
-
),
|
|
606
|
-
attribute_definitions: List[AttributeDefinition] = Field(
|
|
607
|
-
description='Describe the key schema for the table and indexes.'
|
|
608
|
-
),
|
|
609
|
-
key_schema: List[KeySchemaElement] = Field(
|
|
610
|
-
description='Specifies primary key attributes of the table.'
|
|
611
|
-
),
|
|
612
|
-
billing_mode: Literal['PROVISIONED', 'PAY_PER_REQUEST'] = billing_mode,
|
|
613
|
-
global_secondary_indexes: List[GlobalSecondaryIndex] = Field(
|
|
614
|
-
default=None, description='GSIs to be created on the table.'
|
|
615
|
-
),
|
|
616
|
-
provisioned_throughput: ProvisionedThroughput = Field(
|
|
617
|
-
default=None,
|
|
618
|
-
description='Provisioned throughput settings. Required if BillingMode is PROVISIONED.',
|
|
619
|
-
),
|
|
620
|
-
region_name: str = Field(default=None, description='The aws region to run the tool'),
|
|
621
|
-
) -> dict:
|
|
622
|
-
"""Creates a new DynamoDB table with optional secondary indexes. This is an asynchronous operation."""
|
|
623
|
-
client = get_dynamodb_client(region_name)
|
|
624
|
-
params: CreateTableInput = {
|
|
625
|
-
'TableName': table_name,
|
|
626
|
-
'AttributeDefinitions': attribute_definitions,
|
|
627
|
-
'KeySchema': key_schema,
|
|
628
|
-
}
|
|
629
|
-
|
|
630
|
-
if billing_mode:
|
|
631
|
-
params['BillingMode'] = billing_mode
|
|
632
|
-
if global_secondary_indexes:
|
|
633
|
-
params['GlobalSecondaryIndexes'] = global_secondary_indexes
|
|
634
|
-
if provisioned_throughput:
|
|
635
|
-
params['ProvisionedThroughput'] = provisioned_throughput
|
|
636
|
-
|
|
637
|
-
response = client.create_table(**params)
|
|
638
|
-
return response['TableDescription']
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
@app.tool()
|
|
642
|
-
@handle_exceptions
|
|
643
|
-
async def describe_table(
|
|
644
|
-
table_name: str = table_name,
|
|
645
|
-
region_name: str = Field(default=None, description='The aws region to run the tool'),
|
|
646
|
-
) -> dict:
|
|
647
|
-
"""Returns table information including status, creation time, key schema and indexes."""
|
|
648
|
-
client = get_dynamodb_client(region_name)
|
|
649
|
-
response = client.describe_table(TableName=table_name)
|
|
650
|
-
return response['Table']
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
@app.tool()
|
|
654
|
-
@handle_exceptions
|
|
655
|
-
@mutation_check
|
|
656
|
-
async def create_backup(
|
|
657
|
-
table_name: str = table_name,
|
|
658
|
-
backup_name: str = Field(
|
|
659
|
-
description='Specified name for the backup.',
|
|
660
|
-
),
|
|
661
|
-
region_name: str = Field(default=None, description='The aws region to run the tool'),
|
|
662
|
-
) -> dict:
|
|
663
|
-
"""Creates a backup of a DynamoDB table."""
|
|
664
|
-
client = get_dynamodb_client(region_name)
|
|
665
|
-
response = client.create_backup(TableName=table_name, BackupName=backup_name)
|
|
666
|
-
return response['BackupDetails']
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
@app.tool()
|
|
670
|
-
@handle_exceptions
|
|
671
|
-
async def describe_backup(
|
|
672
|
-
backup_arn: str = Field(
|
|
673
|
-
description='The Amazon Resource Name (ARN) associated with the backup.',
|
|
674
|
-
),
|
|
675
|
-
region_name: str = Field(default=None, description='The aws region to run the tool'),
|
|
676
|
-
) -> dict:
|
|
677
|
-
"""Describes an existing backup of a table."""
|
|
678
|
-
client = get_dynamodb_client(region_name)
|
|
679
|
-
response = client.describe_backup(BackupArn=backup_arn)
|
|
680
|
-
return response['BackupDescription']
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
@app.tool()
|
|
684
|
-
@handle_exceptions
|
|
685
|
-
async def list_backups(
|
|
686
|
-
table_name: str = table_name,
|
|
687
|
-
backup_type: str = Field(
|
|
688
|
-
default=None,
|
|
689
|
-
description='Filter by backup type: USER (on-demand backup created by you), SYSTEM (automatically created by DynamoDB), AWS_BACKUP (created by AWS Backup), or ALL (all types).',
|
|
690
|
-
pattern='^(USER|SYSTEM|AWS_BACKUP|ALL)$',
|
|
691
|
-
),
|
|
692
|
-
exclusive_start_backup_arn: str = Field(
|
|
693
|
-
default=None,
|
|
694
|
-
description='LastEvaluatedBackupArn from a previous paginated call.',
|
|
695
|
-
),
|
|
696
|
-
limit: int = Field(
|
|
697
|
-
default=None, description='Maximum number of backups to return.', ge=1, le=100
|
|
698
|
-
),
|
|
699
|
-
region_name: str = Field(default=None, description='The aws region to run the tool'),
|
|
700
|
-
) -> dict:
|
|
701
|
-
"""Returns a list of table backups."""
|
|
702
|
-
client = get_dynamodb_client(region_name)
|
|
703
|
-
params = {}
|
|
704
|
-
if backup_type:
|
|
705
|
-
params['BackupType'] = backup_type
|
|
706
|
-
if exclusive_start_backup_arn:
|
|
707
|
-
params['ExclusiveStartBackupArn'] = exclusive_start_backup_arn
|
|
708
|
-
if limit:
|
|
709
|
-
params['Limit'] = limit
|
|
710
|
-
if table_name:
|
|
711
|
-
params['TableName'] = table_name
|
|
712
|
-
|
|
713
|
-
response = client.list_backups(**params)
|
|
714
|
-
return {
|
|
715
|
-
'BackupSummaries': response.get('BackupSummaries', []),
|
|
716
|
-
'LastEvaluatedBackupArn': response.get('LastEvaluatedBackupArn'),
|
|
717
|
-
}
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
@app.tool()
|
|
721
|
-
@handle_exceptions
|
|
722
|
-
@mutation_check
|
|
723
|
-
async def restore_table_from_backup(
|
|
724
|
-
backup_arn: str = Field(
|
|
725
|
-
description='The Amazon Resource Name (ARN) associated with the backup.',
|
|
726
|
-
),
|
|
727
|
-
target_table_name: str = Field(
|
|
728
|
-
description='The name of the new table.',
|
|
729
|
-
),
|
|
730
|
-
region_name: str = Field(default=None, description='The aws region to run the tool'),
|
|
731
|
-
) -> dict:
|
|
732
|
-
"""Creates a new table from a backup."""
|
|
733
|
-
client = get_dynamodb_client(region_name)
|
|
734
|
-
params = {'BackupArn': backup_arn, 'TargetTableName': target_table_name}
|
|
735
|
-
|
|
736
|
-
response = client.restore_table_from_backup(**params)
|
|
737
|
-
return response['TableDescription']
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
@app.tool()
|
|
741
|
-
@handle_exceptions
|
|
742
|
-
async def describe_limits(
|
|
743
|
-
region_name: str = Field(default=None, description='The aws region to run the tool'),
|
|
744
|
-
) -> dict:
|
|
745
|
-
"""Returns the current provisioned-capacity quotas for your AWS account and tables in a Region."""
|
|
746
|
-
client = get_dynamodb_client(region_name)
|
|
747
|
-
response = client.describe_limits()
|
|
748
|
-
return {
|
|
749
|
-
'AccountMaxReadCapacityUnits': response['AccountMaxReadCapacityUnits'],
|
|
750
|
-
'AccountMaxWriteCapacityUnits': response['AccountMaxWriteCapacityUnits'],
|
|
751
|
-
'TableMaxReadCapacityUnits': response['TableMaxReadCapacityUnits'],
|
|
752
|
-
'TableMaxWriteCapacityUnits': response['TableMaxWriteCapacityUnits'],
|
|
753
|
-
}
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
@app.tool()
|
|
757
|
-
@handle_exceptions
|
|
758
|
-
async def describe_time_to_live(
|
|
759
|
-
table_name: str = table_name,
|
|
760
|
-
region_name: str = Field(default=None, description='The aws region to run the tool'),
|
|
761
|
-
) -> dict:
|
|
762
|
-
"""Returns the Time to Live (TTL) settings for a table."""
|
|
763
|
-
client = get_dynamodb_client(region_name)
|
|
764
|
-
response = client.describe_time_to_live(TableName=table_name)
|
|
765
|
-
return response['TimeToLiveDescription']
|
|
766
|
-
|
|
767
|
-
|
|
768
|
-
@app.tool()
|
|
769
|
-
@handle_exceptions
|
|
770
|
-
async def describe_endpoints(
|
|
771
|
-
region_name: str = Field(default=None, description='The aws region to run the tool'),
|
|
772
|
-
) -> dict:
|
|
773
|
-
"""Returns DynamoDB endpoints for the current region."""
|
|
774
|
-
client = get_dynamodb_client(region_name)
|
|
775
|
-
response = client.describe_endpoints()
|
|
776
|
-
return {'Endpoints': response['Endpoints']}
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
@app.tool()
|
|
780
|
-
@handle_exceptions
|
|
781
|
-
async def describe_export(
|
|
782
|
-
export_arn: str = Field(
|
|
783
|
-
description='The Amazon Resource Name (ARN) associated with the export.',
|
|
784
|
-
),
|
|
785
|
-
region_name: str = Field(default=None, description='The aws region to run the tool'),
|
|
786
|
-
) -> dict:
|
|
787
|
-
"""Returns information about a table export."""
|
|
788
|
-
client = get_dynamodb_client(region_name)
|
|
789
|
-
response = client.describe_export(ExportArn=export_arn)
|
|
790
|
-
return response['ExportDescription']
|
|
791
|
-
|
|
792
|
-
|
|
793
|
-
@app.tool()
|
|
794
|
-
@handle_exceptions
|
|
795
|
-
async def list_exports(
|
|
796
|
-
max_results: int = Field(
|
|
797
|
-
default=None,
|
|
798
|
-
description='Maximum number of results to return per page.',
|
|
799
|
-
),
|
|
800
|
-
next_token: str = Field(default=None, description='Token to fetch the next page of results.'),
|
|
801
|
-
table_arn: str = Field(
|
|
802
|
-
default=None,
|
|
803
|
-
description='The Amazon Resource Name (ARN) associated with the exported table.',
|
|
804
|
-
),
|
|
805
|
-
region_name: str = Field(default=None, description='The aws region to run the tool'),
|
|
806
|
-
) -> dict:
|
|
807
|
-
"""Returns a list of table exports."""
|
|
808
|
-
client = get_dynamodb_client(region_name)
|
|
809
|
-
params = {}
|
|
810
|
-
if max_results:
|
|
811
|
-
params['MaxResults'] = max_results
|
|
812
|
-
if next_token:
|
|
813
|
-
params['NextToken'] = next_token
|
|
814
|
-
if table_arn:
|
|
815
|
-
params['TableArn'] = table_arn
|
|
816
|
-
|
|
817
|
-
response = client.list_exports(**params)
|
|
818
|
-
return {
|
|
819
|
-
'ExportSummaries': response.get('ExportSummaries', []),
|
|
820
|
-
'NextToken': response.get('NextToken'),
|
|
821
|
-
}
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
@app.tool()
|
|
825
|
-
@handle_exceptions
|
|
826
|
-
async def describe_continuous_backups(
|
|
827
|
-
table_name: str = table_name,
|
|
828
|
-
region_name: str = Field(default=None, description='The aws region to run the tool'),
|
|
829
|
-
) -> dict:
|
|
830
|
-
"""Returns continuous backup and point in time recovery status for a table."""
|
|
831
|
-
client = get_dynamodb_client(region_name)
|
|
832
|
-
response = client.describe_continuous_backups(TableName=table_name)
|
|
833
|
-
return response['ContinuousBackupsDescription']
|
|
834
|
-
|
|
835
|
-
|
|
836
|
-
@app.tool()
|
|
837
|
-
@handle_exceptions
|
|
838
|
-
@mutation_check
|
|
839
|
-
async def untag_resource(
|
|
840
|
-
resource_arn: str = resource_arn,
|
|
841
|
-
tag_keys: List[str] = Field(description='List of tags to remove.', min_length=1),
|
|
842
|
-
region_name: str = Field(default=None, description='The aws region to run the tool'),
|
|
843
|
-
) -> dict:
|
|
844
|
-
"""Removes tags from a DynamoDB resource."""
|
|
845
|
-
client = get_dynamodb_client(region_name)
|
|
846
|
-
response = client.untag_resource(ResourceArn=resource_arn, TagKeys=tag_keys)
|
|
847
|
-
return response
|
|
848
|
-
|
|
849
|
-
|
|
850
|
-
@app.tool()
|
|
851
|
-
@handle_exceptions
|
|
852
|
-
@mutation_check
|
|
853
|
-
async def tag_resource(
|
|
854
|
-
resource_arn: str = resource_arn,
|
|
855
|
-
tags: List[Tag] = Field(description='Tags to be assigned.'),
|
|
856
|
-
region_name: str = Field(default=None, description='The aws region to run the tool'),
|
|
857
|
-
) -> dict:
|
|
858
|
-
"""Adds tags to a DynamoDB resource."""
|
|
859
|
-
client = get_dynamodb_client(region_name)
|
|
860
|
-
response = client.tag_resource(ResourceArn=resource_arn, Tags=tags)
|
|
861
|
-
return response
|
|
177
|
+
# Validate parameters based on database type
|
|
178
|
+
missing_params, param_descriptions = DatabaseAnalyzer.validate_connection_params(
|
|
179
|
+
source_db_type, connection_params
|
|
180
|
+
)
|
|
181
|
+
if missing_params:
|
|
182
|
+
missing_descriptions = [param_descriptions[param] for param in missing_params]
|
|
183
|
+
return (
|
|
184
|
+
f'To analyze your {source_db_type} database, I need: {", ".join(missing_descriptions)}'
|
|
185
|
+
)
|
|
862
186
|
|
|
187
|
+
logger.info(
|
|
188
|
+
f'Starting database analysis for {source_db_type} database: {connection_params.get("database")}'
|
|
189
|
+
)
|
|
863
190
|
|
|
864
|
-
|
|
865
|
-
|
|
866
|
-
|
|
867
|
-
|
|
868
|
-
|
|
869
|
-
|
|
870
|
-
|
|
871
|
-
|
|
872
|
-
)
|
|
873
|
-
|
|
874
|
-
|
|
875
|
-
|
|
876
|
-
if next_token:
|
|
877
|
-
params['NextToken'] = next_token
|
|
191
|
+
try:
|
|
192
|
+
analysis_result = await analyzer_class.analyze(connection_params)
|
|
193
|
+
|
|
194
|
+
# Save results to files
|
|
195
|
+
saved_files, save_errors = DatabaseAnalyzer.save_analysis_files(
|
|
196
|
+
analysis_result['results'],
|
|
197
|
+
source_db_type,
|
|
198
|
+
connection_params.get('database'),
|
|
199
|
+
connection_params.get('pattern_analysis_days'),
|
|
200
|
+
connection_params.get('max_results'),
|
|
201
|
+
connection_params.get('output_dir'),
|
|
202
|
+
)
|
|
878
203
|
|
|
879
|
-
|
|
880
|
-
|
|
204
|
+
# Generate report
|
|
205
|
+
logger.info('Generating analysis report')
|
|
206
|
+
if analysis_result['results']:
|
|
207
|
+
report = f"""Database Analysis Complete
|
|
881
208
|
|
|
209
|
+
Summary:
|
|
210
|
+
- Database: {connection_params.get('database')}
|
|
211
|
+
- Analysis Period: {connection_params.get('pattern_analysis_days')} days
|
|
212
|
+
- {analysis_result['performance_feature']}: {'Enabled' if analysis_result['performance_enabled'] else 'Disabled'}"""
|
|
882
213
|
|
|
883
|
-
|
|
884
|
-
|
|
885
|
-
@mutation_check
|
|
886
|
-
async def delete_table(
|
|
887
|
-
table_name: str = table_name,
|
|
888
|
-
region_name: str = Field(default=None, description='The aws region to run the tool'),
|
|
889
|
-
) -> dict:
|
|
890
|
-
"""The DeleteTable operation deletes a table and all of its items. This is an asynchronous operation that puts the table into DELETING state until DynamoDB completes the deletion."""
|
|
891
|
-
client = get_dynamodb_client(region_name)
|
|
892
|
-
response = client.delete_table(TableName=table_name)
|
|
893
|
-
return response['TableDescription']
|
|
214
|
+
if saved_files:
|
|
215
|
+
report += f'\n\nSaved Files:\n{chr(10).join(f"- {f}" for f in saved_files)}'
|
|
894
216
|
|
|
217
|
+
if save_errors:
|
|
218
|
+
report += f'\n\nFile Save Errors:\n{chr(10).join(f"- {e}" for e in save_errors)}'
|
|
895
219
|
|
|
896
|
-
|
|
897
|
-
|
|
898
|
-
|
|
899
|
-
|
|
900
|
-
point_in_time_recovery_enabled: bool = Field(
|
|
901
|
-
description='Enable or disable point in time recovery.'
|
|
902
|
-
),
|
|
903
|
-
recovery_period_in_days: int = Field(
|
|
904
|
-
default=None,
|
|
905
|
-
description='Number of days to retain point in time recovery backups.',
|
|
906
|
-
),
|
|
907
|
-
region_name: str = Field(default=None, description='The aws region to run the tool'),
|
|
908
|
-
) -> dict:
|
|
909
|
-
"""Enables or disables point in time recovery for the specified table."""
|
|
910
|
-
client = get_dynamodb_client(region_name)
|
|
911
|
-
params = {
|
|
912
|
-
'TableName': table_name,
|
|
913
|
-
'PointInTimeRecoverySpecification': {
|
|
914
|
-
'PointInTimeRecoveryEnabled': point_in_time_recovery_enabled
|
|
915
|
-
},
|
|
916
|
-
}
|
|
917
|
-
if recovery_period_in_days:
|
|
918
|
-
params['PointInTimeRecoverySpecification']['RecoveryPeriodInDays'] = (
|
|
919
|
-
recovery_period_in_days
|
|
920
|
-
)
|
|
220
|
+
if analysis_result['errors']:
|
|
221
|
+
report += f'\n\nQuery Errors ({len(analysis_result["errors"])}):\n' + '\n'.join(
|
|
222
|
+
f'{i}. {error}' for i, error in enumerate(analysis_result['errors'], 1)
|
|
223
|
+
)
|
|
921
224
|
|
|
922
|
-
|
|
923
|
-
|
|
225
|
+
else:
|
|
226
|
+
report = (
|
|
227
|
+
f'Database Analysis Failed\n\nAll {len(analysis_result["errors"])} queries failed:\n'
|
|
228
|
+
+ '\n'.join(
|
|
229
|
+
f'{i}. {error}' for i, error in enumerate(analysis_result['errors'], 1)
|
|
230
|
+
)
|
|
231
|
+
)
|
|
924
232
|
|
|
233
|
+
return report
|
|
925
234
|
|
|
926
|
-
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
next_token: str = Field(default=None, description='Token to fetch the next page of results.'),
|
|
930
|
-
region_name: str = Field(default=None, description='The aws region to run the tool'),
|
|
931
|
-
) -> dict:
|
|
932
|
-
"""Lists imports completed within the past 90 days."""
|
|
933
|
-
client = get_dynamodb_client(region_name)
|
|
934
|
-
params = {}
|
|
935
|
-
if next_token:
|
|
936
|
-
params['NextToken'] = next_token
|
|
937
|
-
params['PageSize'] = 25
|
|
938
|
-
response = client.list_imports(**params)
|
|
939
|
-
return {
|
|
940
|
-
'ImportSummaryList': response.get('ImportSummaryList', []),
|
|
941
|
-
'NextToken': response.get('NextToken'),
|
|
942
|
-
}
|
|
235
|
+
except Exception as e:
|
|
236
|
+
logger.error(f'Analysis failed with exception: {str(e)}')
|
|
237
|
+
return f'Analysis failed: {str(e)}'
|
|
943
238
|
|
|
944
239
|
|
|
945
240
|
def main():
|