awslabs.dynamodb-mcp-server 2.0.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. awslabs/__init__.py +17 -0
  2. awslabs/dynamodb_mcp_server/__init__.py +17 -0
  3. awslabs/dynamodb_mcp_server/cdk_generator/__init__.py +19 -0
  4. awslabs/dynamodb_mcp_server/cdk_generator/generator.py +276 -0
  5. awslabs/dynamodb_mcp_server/cdk_generator/models.py +521 -0
  6. awslabs/dynamodb_mcp_server/cdk_generator/templates/README.md +57 -0
  7. awslabs/dynamodb_mcp_server/cdk_generator/templates/stack.ts.j2 +70 -0
  8. awslabs/dynamodb_mcp_server/common.py +94 -0
  9. awslabs/dynamodb_mcp_server/db_analyzer/__init__.py +30 -0
  10. awslabs/dynamodb_mcp_server/db_analyzer/analyzer_utils.py +394 -0
  11. awslabs/dynamodb_mcp_server/db_analyzer/base_plugin.py +355 -0
  12. awslabs/dynamodb_mcp_server/db_analyzer/mysql.py +450 -0
  13. awslabs/dynamodb_mcp_server/db_analyzer/plugin_registry.py +73 -0
  14. awslabs/dynamodb_mcp_server/db_analyzer/postgresql.py +215 -0
  15. awslabs/dynamodb_mcp_server/db_analyzer/sqlserver.py +255 -0
  16. awslabs/dynamodb_mcp_server/markdown_formatter.py +513 -0
  17. awslabs/dynamodb_mcp_server/model_validation_utils.py +845 -0
  18. awslabs/dynamodb_mcp_server/prompts/dynamodb_architect.md +851 -0
  19. awslabs/dynamodb_mcp_server/prompts/json_generation_guide.md +185 -0
  20. awslabs/dynamodb_mcp_server/prompts/transform_model_validation_result.md +168 -0
  21. awslabs/dynamodb_mcp_server/server.py +524 -0
  22. awslabs_dynamodb_mcp_server-2.0.10.dist-info/METADATA +306 -0
  23. awslabs_dynamodb_mcp_server-2.0.10.dist-info/RECORD +27 -0
  24. awslabs_dynamodb_mcp_server-2.0.10.dist-info/WHEEL +4 -0
  25. awslabs_dynamodb_mcp_server-2.0.10.dist-info/entry_points.txt +2 -0
  26. awslabs_dynamodb_mcp_server-2.0.10.dist-info/licenses/LICENSE +175 -0
  27. awslabs_dynamodb_mcp_server-2.0.10.dist-info/licenses/NOTICE +2 -0
@@ -0,0 +1,521 @@
1
+ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """Data model classes for CDK generator."""
16
+
17
+ from dataclasses import dataclass, field
18
+ from typing import List, Optional
19
+
20
+
21
+ # AWS DynamoDB Limits
22
+ # Source: https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Limits.html
23
+ MAX_GSI_PARTITION_KEYS = 4 # Maximum number of partition key attributes per GSI
24
+ MAX_GSI_SORT_KEYS = 4 # Maximum number of sort key attributes per GSI
25
+
26
+
27
+ @dataclass
28
+ class KeyAttribute:
29
+ """Represents a key attribute (partition or sort key)."""
30
+
31
+ name: str
32
+ type: str # 'S', 'N', or 'B'
33
+
34
+ def to_cdk_type(self) -> str:
35
+ """Map DynamoDB type to CDK AttributeType.
36
+
37
+ Returns:
38
+ CDK AttributeType string (STRING, NUMBER, or BINARY)
39
+
40
+ Raises:
41
+ ValueError: If type is not S, N, or B
42
+ """
43
+ mapping = {'S': 'STRING', 'N': 'NUMBER', 'B': 'BINARY'}
44
+ if self.type not in mapping:
45
+ raise ValueError(f"Invalid attribute type. type: '{self.type}', expected: S, N, or B")
46
+ return mapping[self.type]
47
+
48
+
49
+ @dataclass
50
+ class GlobalSecondaryIndex:
51
+ """Represents a GSI with support for multi-attribute composite keys."""
52
+
53
+ index_name: str
54
+ partition_keys: List[KeyAttribute] # One or more partition key attributes
55
+ sort_keys: List[KeyAttribute] = field(default_factory=list) # Zero or more sort key attributes
56
+ projection_type: str = 'ALL' # 'ALL', 'KEYS_ONLY', 'INCLUDE'
57
+ non_key_attributes: List[str] = field(default_factory=list) # For INCLUDE projection
58
+
59
+ def has_multi_partition_keys(self) -> bool:
60
+ """Return True if GSI has multiple partition key attributes."""
61
+ return len(self.partition_keys) > 1
62
+
63
+ def has_multi_sort_keys(self) -> bool:
64
+ """Return True if GSI has multiple sort key attributes."""
65
+ return len(self.sort_keys) > 1
66
+
67
+
68
+ @dataclass
69
+ class TableDefinition:
70
+ """Represents a DynamoDB table definition."""
71
+
72
+ table_name: str # Logical name from JSON (used for CfnOutput, not hardcoded in CDK)
73
+ partition_key: KeyAttribute
74
+ sort_key: Optional[KeyAttribute] = None
75
+ global_secondary_indexes: Optional[List[GlobalSecondaryIndex]] = field(default_factory=list)
76
+ ttl_attribute: Optional[str] = None
77
+
78
+
79
+ @dataclass
80
+ class DataModel:
81
+ """Root data model containing all table definitions."""
82
+
83
+ tables: List[TableDefinition] = field(default_factory=list)
84
+
85
+ @staticmethod
86
+ def _validate_is_object(data, context: str) -> None:
87
+ """Validate that data is a dictionary object.
88
+
89
+ Args:
90
+ data: Data to validate
91
+ context: Context string for error messages
92
+
93
+ Raises:
94
+ ValueError: If data is not a dictionary
95
+ """
96
+ if not isinstance(data, dict):
97
+ raise ValueError(f'{context} must be an object')
98
+
99
+ @staticmethod
100
+ def _validate_string_field(data: dict, field_name: str, context: str) -> str:
101
+ """Validate that a field exists and is a string.
102
+
103
+ Args:
104
+ data: Dictionary containing the field
105
+ field_name: Name of the field to validate
106
+ context: Context string for error messages
107
+
108
+ Returns:
109
+ The string value
110
+
111
+ Raises:
112
+ ValueError: If field is missing or not a string
113
+ """
114
+ if field_name not in data:
115
+ raise ValueError(f'{context}.{field_name} must be a string')
116
+ if not isinstance(data[field_name], str):
117
+ raise ValueError(f'{context}.{field_name} must be a string')
118
+ return data[field_name]
119
+
120
+ @staticmethod
121
+ def _validate_array_field(data: dict, field_name: str, context: str) -> list:
122
+ """Validate that a field exists and is an array.
123
+
124
+ Args:
125
+ data: Dictionary containing the field
126
+ field_name: Name of the field to validate
127
+ context: Context string for error messages
128
+
129
+ Returns:
130
+ The list value
131
+
132
+ Raises:
133
+ ValueError: If field is missing or not a list
134
+ """
135
+ if field_name not in data:
136
+ raise ValueError(f'{context}.{field_name} must be an array')
137
+ if not isinstance(data[field_name], list):
138
+ raise ValueError(f'{context}.{field_name} must be an array')
139
+ return data[field_name]
140
+
141
+ @classmethod
142
+ def from_json(cls, data: dict) -> 'DataModel':
143
+ """Parse JSON dict into DataModel with validation.
144
+
145
+ Args:
146
+ data: Dictionary containing table definitions
147
+
148
+ Returns:
149
+ DataModel instance
150
+
151
+ Raises:
152
+ ValueError: If required fields are missing or invalid, with hierarchical context
153
+ """
154
+ if not isinstance(data, dict):
155
+ raise ValueError('Input must be a dictionary')
156
+
157
+ if 'tables' not in data:
158
+ raise ValueError('Configuration must contain a "tables" property')
159
+
160
+ if not isinstance(data['tables'], list):
161
+ raise ValueError('Configuration "tables" property must be an array')
162
+
163
+ tables = []
164
+ for i, table_data in enumerate(data['tables']):
165
+ table = cls._parse_table(table_data, table_index=i)
166
+ tables.append(table)
167
+
168
+ model = cls(tables=tables)
169
+ model.validate()
170
+ return model
171
+
172
+ @classmethod
173
+ def _parse_attribute_definitions(cls, attr_definitions: list, context: str) -> dict:
174
+ """Parse AttributeDefinitions and return a map of attribute names to types.
175
+
176
+ Args:
177
+ attr_definitions: List of attribute definition dictionaries
178
+ context: Context string for error messages (e.g., 'tables[0]')
179
+
180
+ Returns:
181
+ Dictionary mapping attribute names to types
182
+
183
+ Raises:
184
+ ValueError: If attribute definitions are invalid
185
+ """
186
+ attr_types = {}
187
+ for attr_index, attr_def in enumerate(attr_definitions):
188
+ attr_context = f'{context}.AttributeDefinitions[{attr_index}]'
189
+
190
+ cls._validate_is_object(attr_def, attr_context)
191
+ attr_name = cls._validate_string_field(attr_def, 'AttributeName', attr_context)
192
+
193
+ if 'AttributeType' not in attr_def:
194
+ raise ValueError(f"{attr_context}.AttributeType must be 'S', 'N', or 'B'")
195
+
196
+ attr_type = attr_def['AttributeType']
197
+
198
+ if attr_type not in ['S', 'N', 'B']:
199
+ raise ValueError(f"{attr_context}.AttributeType must be 'S', 'N', or 'B'")
200
+
201
+ attr_types[attr_name] = attr_type
202
+
203
+ return attr_types
204
+
205
+ @classmethod
206
+ def _parse_key_schema(cls, key_schema: list, attr_types: dict, context: str) -> tuple:
207
+ """Parse KeySchema and return partition and sort keys.
208
+
209
+ Args:
210
+ key_schema: List of key schema element dictionaries
211
+ attr_types: Map of attribute names to types
212
+ context: Context string for error messages (e.g., 'tables[0]')
213
+
214
+ Returns:
215
+ Tuple of (partition_key, sort_key) where sort_key may be None
216
+
217
+ Raises:
218
+ ValueError: If key schema is invalid
219
+ """
220
+ partition_key = None
221
+ sort_key = None
222
+
223
+ for key_index, key_element in enumerate(key_schema):
224
+ key_context = f'{context}.KeySchema[{key_index}]'
225
+
226
+ cls._validate_is_object(key_element, key_context)
227
+ attr_name = cls._validate_string_field(key_element, 'AttributeName', key_context)
228
+
229
+ if 'KeyType' not in key_element:
230
+ raise ValueError(f"{key_context}.KeyType must be 'HASH' or 'RANGE'")
231
+
232
+ key_type = key_element['KeyType']
233
+
234
+ if key_type not in ['HASH', 'RANGE']:
235
+ raise ValueError(f"{key_context}.KeyType must be 'HASH' or 'RANGE'")
236
+
237
+ if attr_name not in attr_types:
238
+ raise ValueError(
239
+ f"{key_context}: AttributeName '{attr_name}' not found in AttributeDefinitions"
240
+ )
241
+
242
+ if key_type == 'HASH':
243
+ if partition_key is not None:
244
+ raise ValueError(
245
+ f'{context}.KeySchema must contain exactly one HASH key, found 2'
246
+ )
247
+ partition_key = KeyAttribute(name=attr_name, type=attr_types[attr_name])
248
+ elif key_type == 'RANGE':
249
+ if sort_key is not None:
250
+ raise ValueError(
251
+ f'{context}.KeySchema must contain at most one RANGE key, found 2'
252
+ )
253
+ sort_key = KeyAttribute(name=attr_name, type=attr_types[attr_name])
254
+
255
+ if partition_key is None:
256
+ raise ValueError(f'{context}.KeySchema must contain exactly one HASH key')
257
+
258
+ return partition_key, sort_key
259
+
260
+ @classmethod
261
+ def _parse_ttl_specification(cls, ttl_data: dict, context: str) -> Optional[str]:
262
+ """Parse TimeToLiveSpecification and return TTL attribute name if enabled.
263
+
264
+ Args:
265
+ ttl_data: TimeToLiveSpecification dictionary
266
+ context: Context string for error messages (e.g., 'tables[0]')
267
+
268
+ Returns:
269
+ TTL attribute name if enabled, None otherwise
270
+
271
+ Raises:
272
+ ValueError: If TTL specification is invalid
273
+ """
274
+ ttl_context = f'{context}.TimeToLiveSpecification'
275
+
276
+ cls._validate_is_object(ttl_data, ttl_context)
277
+
278
+ if 'Enabled' not in ttl_data:
279
+ raise ValueError(f'{ttl_context}.Enabled must be a boolean')
280
+
281
+ if not isinstance(ttl_data['Enabled'], bool):
282
+ raise ValueError(f'{ttl_context}.Enabled must be a boolean')
283
+
284
+ if ttl_data['Enabled']:
285
+ return cls._validate_string_field(ttl_data, 'AttributeName', ttl_context)
286
+
287
+ return None
288
+
289
+ @classmethod
290
+ def _parse_table(cls, table_data: dict, table_index: int) -> TableDefinition:
291
+ """Parse a single table definition from JSON.
292
+
293
+ Args:
294
+ table_data: Dictionary containing table definition
295
+ table_index: Index of the table in the tables array
296
+
297
+ Returns:
298
+ TableDefinition instance
299
+
300
+ Raises:
301
+ ValueError: If required fields are missing or invalid, with hierarchical context
302
+ """
303
+ context = f'tables[{table_index}]'
304
+
305
+ cls._validate_is_object(table_data, context)
306
+
307
+ table_name = cls._validate_string_field(table_data, 'TableName', context)
308
+ cls._validate_array_field(table_data, 'KeySchema', context)
309
+ attr_definitions = cls._validate_array_field(table_data, 'AttributeDefinitions', context)
310
+
311
+ attr_types = cls._parse_attribute_definitions(attr_definitions, context)
312
+
313
+ partition_key, sort_key = cls._parse_key_schema(
314
+ table_data['KeySchema'], attr_types, context
315
+ )
316
+
317
+ gsis = []
318
+ if 'GlobalSecondaryIndexes' in table_data:
319
+ gsi_list = cls._validate_array_field(table_data, 'GlobalSecondaryIndexes', context)
320
+
321
+ for gsi_index, gsi_data in enumerate(gsi_list):
322
+ gsi = cls._parse_gsi(gsi_data, attr_types, table_index, gsi_index)
323
+ gsis.append(gsi)
324
+
325
+ ttl_attribute = None
326
+ if 'TimeToLiveSpecification' in table_data:
327
+ ttl_attribute = cls._parse_ttl_specification(
328
+ table_data['TimeToLiveSpecification'], context
329
+ )
330
+
331
+ return TableDefinition(
332
+ table_name=table_name,
333
+ partition_key=partition_key,
334
+ sort_key=sort_key,
335
+ global_secondary_indexes=gsis,
336
+ ttl_attribute=ttl_attribute,
337
+ )
338
+
339
+ @classmethod
340
+ def _parse_gsi_key_schema(cls, key_schema: list, attr_types: dict, context: str) -> tuple:
341
+ """Parse GSI KeySchema and return partition and sort keys.
342
+
343
+ GSI KeySchema supports multiple HASH and RANGE entries (up to 4 each).
344
+
345
+ Args:
346
+ key_schema: List of key schema element dictionaries
347
+ attr_types: Map of attribute names to types
348
+ context: Context string for error messages (e.g., 'tables[0].GlobalSecondaryIndexes[0]')
349
+
350
+ Returns:
351
+ Tuple of (partition_keys, sort_keys) as lists
352
+
353
+ Raises:
354
+ ValueError: If key schema is invalid
355
+ """
356
+ partition_keys = []
357
+ sort_keys = []
358
+
359
+ for key_index, key_element in enumerate(key_schema):
360
+ key_context = f'{context}.KeySchema[{key_index}]'
361
+
362
+ cls._validate_is_object(key_element, key_context)
363
+ attr_name = cls._validate_string_field(key_element, 'AttributeName', key_context)
364
+
365
+ if 'KeyType' not in key_element:
366
+ raise ValueError(f"{key_context}.KeyType must be 'HASH' or 'RANGE'")
367
+
368
+ key_type = key_element['KeyType']
369
+
370
+ if key_type not in ['HASH', 'RANGE']:
371
+ raise ValueError(f"{key_context}.KeyType must be 'HASH' or 'RANGE'")
372
+
373
+ if attr_name not in attr_types:
374
+ raise ValueError(
375
+ f"{key_context}: AttributeName '{attr_name}' not found in AttributeDefinitions"
376
+ )
377
+
378
+ if key_type == 'HASH':
379
+ partition_keys.append(KeyAttribute(name=attr_name, type=attr_types[attr_name]))
380
+ elif key_type == 'RANGE':
381
+ sort_keys.append(KeyAttribute(name=attr_name, type=attr_types[attr_name]))
382
+
383
+ if not partition_keys:
384
+ raise ValueError(f'{context}.KeySchema must contain at least one HASH key')
385
+
386
+ # Validate against AWS limits
387
+ if len(partition_keys) > MAX_GSI_PARTITION_KEYS:
388
+ raise ValueError(
389
+ f'{context}.KeySchema must contain at most {MAX_GSI_PARTITION_KEYS} HASH keys, found {len(partition_keys)}'
390
+ )
391
+
392
+ if len(sort_keys) > MAX_GSI_SORT_KEYS:
393
+ raise ValueError(
394
+ f'{context}.KeySchema must contain at most {MAX_GSI_SORT_KEYS} RANGE keys, found {len(sort_keys)}'
395
+ )
396
+
397
+ return partition_keys, sort_keys
398
+
399
+ @classmethod
400
+ def _parse_gsi_projection(cls, projection: dict, context: str) -> tuple:
401
+ """Parse GSI Projection configuration.
402
+
403
+ Args:
404
+ projection: Projection dictionary (may be empty)
405
+ context: Context string for error messages
406
+
407
+ Returns:
408
+ Tuple of (projection_type, non_key_attributes)
409
+
410
+ Raises:
411
+ ValueError: If projection configuration is invalid
412
+ """
413
+ projection_type = 'ALL'
414
+ non_key_attributes = []
415
+
416
+ if not projection:
417
+ return projection_type, non_key_attributes
418
+
419
+ if 'ProjectionType' in projection:
420
+ projection_type = projection['ProjectionType']
421
+ if projection_type not in ['ALL', 'KEYS_ONLY', 'INCLUDE']:
422
+ raise ValueError(
423
+ f"{context}.Projection.ProjectionType must be 'ALL', 'KEYS_ONLY', or 'INCLUDE'"
424
+ )
425
+
426
+ if 'NonKeyAttributes' in projection:
427
+ non_key_attributes = projection['NonKeyAttributes']
428
+ if not isinstance(non_key_attributes, list):
429
+ raise ValueError(f'{context}.Projection.NonKeyAttributes must be an array')
430
+
431
+ # Validate NonKeyAttributes based on ProjectionType
432
+ if projection_type == 'INCLUDE':
433
+ if not non_key_attributes:
434
+ raise ValueError(
435
+ f'{context}.Projection.NonKeyAttributes is required when ProjectionType is INCLUDE'
436
+ )
437
+ for i, attr in enumerate(non_key_attributes):
438
+ if not isinstance(attr, str):
439
+ raise ValueError(
440
+ f'{context}.Projection.NonKeyAttributes[{i}] must be a string'
441
+ )
442
+ if not attr:
443
+ raise ValueError(
444
+ f'{context}.Projection.NonKeyAttributes[{i}] must not be empty'
445
+ )
446
+ elif projection_type in ['ALL', 'KEYS_ONLY']:
447
+ if non_key_attributes:
448
+ raise ValueError(
449
+ f'{context}.Projection.NonKeyAttributes is not allowed when ProjectionType is {projection_type}'
450
+ )
451
+
452
+ return projection_type, non_key_attributes
453
+
454
+ @classmethod
455
+ def _parse_gsi(
456
+ cls, gsi_data: dict, attr_types: dict, table_index: int, gsi_index: int
457
+ ) -> GlobalSecondaryIndex:
458
+ """Parse a GlobalSecondaryIndex definition.
459
+
460
+ Args:
461
+ gsi_data: Dictionary containing GSI definition
462
+ attr_types: Map of attribute names to types
463
+ table_index: Index of the parent table in the tables array
464
+ gsi_index: Index of the GSI in the GlobalSecondaryIndexes array
465
+
466
+ Returns:
467
+ GlobalSecondaryIndex instance
468
+
469
+ Raises:
470
+ ValueError: If required fields are missing or invalid, with hierarchical context
471
+ """
472
+ context = f'tables[{table_index}].GlobalSecondaryIndexes[{gsi_index}]'
473
+
474
+ cls._validate_is_object(gsi_data, context)
475
+ index_name = cls._validate_string_field(gsi_data, 'IndexName', context)
476
+ cls._validate_array_field(gsi_data, 'KeySchema', context)
477
+
478
+ partition_keys, sort_keys = cls._parse_gsi_key_schema(
479
+ gsi_data['KeySchema'], attr_types, context
480
+ )
481
+
482
+ projection_type, non_key_attributes = cls._parse_gsi_projection(
483
+ gsi_data.get('Projection', {}), context
484
+ )
485
+
486
+ return GlobalSecondaryIndex(
487
+ index_name=index_name,
488
+ partition_keys=partition_keys,
489
+ sort_keys=sort_keys,
490
+ projection_type=projection_type,
491
+ non_key_attributes=non_key_attributes,
492
+ )
493
+
494
+ def validate(self) -> None:
495
+ """Validate the data model structure.
496
+
497
+ Raises:
498
+ ValueError: With descriptive message identifying the specific failure
499
+ """
500
+ if not self.tables:
501
+ raise ValueError('Data model must contain at least one table')
502
+
503
+ # Check for duplicate table names
504
+ table_names = [table.table_name for table in self.tables]
505
+ duplicates = [name for name in table_names if table_names.count(name) > 1]
506
+ if duplicates:
507
+ unique_duplicates = list(set(duplicates))
508
+ raise ValueError(
509
+ f'Data model contains duplicate table names. table_names: {", ".join(unique_duplicates)}'
510
+ )
511
+
512
+ for table in self.tables:
513
+ # Check for duplicate GSI names within a table
514
+ if table.global_secondary_indexes:
515
+ gsi_names = [gsi.index_name for gsi in table.global_secondary_indexes]
516
+ duplicates = [name for name in gsi_names if gsi_names.count(name) > 1]
517
+ if duplicates:
518
+ unique_duplicates = list(set(duplicates))
519
+ raise ValueError(
520
+ f"Table contains duplicate GSI names. table_name: '{table.table_name}', gsi_names: {', '.join(unique_duplicates)}"
521
+ )
@@ -0,0 +1,57 @@
1
+ # Cost Performance DynamoDB CDK
2
+
3
+ CDK app to provision your DynamoDB data model.
4
+
5
+ This is part of AWS DynamoDB MCP Server, for more details see: https://github.com/awslabs/mcp/tree/main/src/dynamodb-mcp-server
6
+
7
+ ## Usage
8
+
9
+ Note the stack name is fixed, so deploying this multiple times to the same AWS account and region would update the same stack and not create a new one. If you need to deploy two instances of this tasks at once, you'll need to use another AWS account or another region or to change the CDK app to use a different stack name.
10
+
11
+ ### Prerequisites
12
+
13
+ - Data modeling resources created using the AWS DynamoDB MCP Server.
14
+ - Node.js 22+
15
+ - AWS account credentials. See the CDK documentation [here](https://docs.aws.amazon.com/cdk/v2/guide/configure-access.html) for details.
16
+
17
+ ### Bootstrap
18
+
19
+ You only need to run the CDK bootstrap process once per account and region.
20
+
21
+ ```bash
22
+ npx cdk bootstrap aws://${account}/${region}
23
+ ```
24
+
25
+ ### Deploy
26
+
27
+ To deploy the stack run:
28
+
29
+ ```bash
30
+ npx cdk deploy
31
+ ```
32
+
33
+ ### Destroy
34
+
35
+ To destroy the stack run:
36
+
37
+ ```bash
38
+ npx cdk destroy
39
+ ```
40
+
41
+ ### Example
42
+
43
+ ```bash
44
+ export AWS_PROFILE=my-profile
45
+ export AWS_REGION=us-west-2
46
+
47
+ npx cdk bootstrap aws://123456789012/us-west-2
48
+
49
+ npx cdk deploy
50
+
51
+ npx cdk destroy
52
+ ```
53
+
54
+ ### Other Commands
55
+
56
+ - `npx cdk synth` emits the synthesized CloudFormation template
57
+ - `npx cdk diff` compare deployed stack with your AWS account/region
@@ -0,0 +1,70 @@
1
+ import * as cdk from 'aws-cdk-lib';
2
+ import * as dynamodb from 'aws-cdk-lib/aws-dynamodb';
3
+ import { Construct } from 'constructs';
4
+
5
+ export class {{ stack_class_name }} extends cdk.Stack {
6
+ constructor(scope: Construct, id: string, props?: cdk.StackProps) {
7
+ super(scope, id, props);
8
+
9
+ {% for table in data_model.tables %}
10
+ this.create{{ table.table_name | to_pascal_case }}Table();
11
+ {% endfor %}
12
+ }
13
+ {% for table in data_model.tables %}
14
+
15
+ private create{{ table.table_name | to_pascal_case }}Table(): void {
16
+ const {{ table.table_name | to_camel_case }}Table = new dynamodb.TableV2(this, '{{ table.table_name }}', {
17
+ partitionKey: {
18
+ name: '{{ table.partition_key.name }}',
19
+ type: dynamodb.AttributeType.{{ table.partition_key.to_cdk_type() }},
20
+ },
21
+ {% if table.sort_key %}
22
+ sortKey: {
23
+ name: '{{ table.sort_key.name }}',
24
+ type: dynamodb.AttributeType.{{ table.sort_key.to_cdk_type() }},
25
+ },
26
+ {% endif %}
27
+ {% if table.global_secondary_indexes %}
28
+ globalSecondaryIndexes: [
29
+ {% for gsi in table.global_secondary_indexes %}
30
+ {
31
+ indexName: '{{ gsi.index_name }}',
32
+ partitionKeys: [
33
+ {% for pk in gsi.partition_keys %}
34
+ {
35
+ name: '{{ pk.name }}',
36
+ type: dynamodb.AttributeType.{{ pk.to_cdk_type() }},
37
+ },
38
+ {% endfor %}
39
+ ],
40
+ {% if gsi.sort_keys %}
41
+ sortKeys: [
42
+ {% for sk in gsi.sort_keys %}
43
+ {
44
+ name: '{{ sk.name }}',
45
+ type: dynamodb.AttributeType.{{ sk.to_cdk_type() }},
46
+ },
47
+ {% endfor %}
48
+ ],
49
+ {% endif %}
50
+ projectionType: dynamodb.ProjectionType.{{ gsi.projection_type }},
51
+ {% if gsi.non_key_attributes %}
52
+ nonKeyAttributes: [{{ gsi.non_key_attributes | map('tojson') | join(', ') }}],
53
+ {% endif %}
54
+ },
55
+ {% endfor %}
56
+ ],
57
+ {% endif %}
58
+ {% if table.ttl_attribute %}
59
+ timeToLiveAttribute: '{{ table.ttl_attribute }}',
60
+ {% endif %}
61
+ removalPolicy: cdk.RemovalPolicy.DESTROY,
62
+ });
63
+
64
+ new cdk.CfnOutput(this, '{{ table.table_name }}Name', {
65
+ value: {{ table.table_name | to_camel_case }}Table.tableName,
66
+ description: 'Physical table name for {{ table.table_name }}',
67
+ });
68
+ }
69
+ {% endfor %}
70
+ }