awslabs.cdk-mcp-server 0.0.10417__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. awslabs/__init__.py +2 -0
  2. awslabs/cdk_mcp_server/__init__.py +8 -0
  3. awslabs/cdk_mcp_server/core/__init__.py +1 -0
  4. awslabs/cdk_mcp_server/core/resources.py +271 -0
  5. awslabs/cdk_mcp_server/core/search_utils.py +182 -0
  6. awslabs/cdk_mcp_server/core/server.py +74 -0
  7. awslabs/cdk_mcp_server/core/tools.py +324 -0
  8. awslabs/cdk_mcp_server/data/__init__.py +1 -0
  9. awslabs/cdk_mcp_server/data/cdk_nag_parser.py +331 -0
  10. awslabs/cdk_mcp_server/data/construct_descriptions.py +32 -0
  11. awslabs/cdk_mcp_server/data/genai_cdk_loader.py +423 -0
  12. awslabs/cdk_mcp_server/data/lambda_powertools_loader.py +48 -0
  13. awslabs/cdk_mcp_server/data/schema_generator.py +666 -0
  14. awslabs/cdk_mcp_server/data/solutions_constructs_parser.py +782 -0
  15. awslabs/cdk_mcp_server/server.py +7 -0
  16. awslabs/cdk_mcp_server/static/CDK_GENERAL_GUIDANCE.md +232 -0
  17. awslabs/cdk_mcp_server/static/CDK_NAG_GUIDANCE.md +192 -0
  18. awslabs/cdk_mcp_server/static/__init__.py +5 -0
  19. awslabs/cdk_mcp_server/static/bedrock/agent/actiongroups.md +137 -0
  20. awslabs/cdk_mcp_server/static/bedrock/agent/alias.md +39 -0
  21. awslabs/cdk_mcp_server/static/bedrock/agent/collaboration.md +91 -0
  22. awslabs/cdk_mcp_server/static/bedrock/agent/creation.md +149 -0
  23. awslabs/cdk_mcp_server/static/bedrock/agent/custom_orchestration.md +74 -0
  24. awslabs/cdk_mcp_server/static/bedrock/agent/overview.md +78 -0
  25. awslabs/cdk_mcp_server/static/bedrock/agent/prompt_override.md +70 -0
  26. awslabs/cdk_mcp_server/static/bedrock/bedrockguardrails.md +188 -0
  27. awslabs/cdk_mcp_server/static/bedrock/knowledgebases/chunking.md +137 -0
  28. awslabs/cdk_mcp_server/static/bedrock/knowledgebases/datasources.md +225 -0
  29. awslabs/cdk_mcp_server/static/bedrock/knowledgebases/kendra.md +81 -0
  30. awslabs/cdk_mcp_server/static/bedrock/knowledgebases/overview.md +116 -0
  31. awslabs/cdk_mcp_server/static/bedrock/knowledgebases/parsing.md +36 -0
  32. awslabs/cdk_mcp_server/static/bedrock/knowledgebases/transformation.md +30 -0
  33. awslabs/cdk_mcp_server/static/bedrock/knowledgebases/vector/aurora.md +185 -0
  34. awslabs/cdk_mcp_server/static/bedrock/knowledgebases/vector/creation.md +80 -0
  35. awslabs/cdk_mcp_server/static/bedrock/knowledgebases/vector/opensearch.md +56 -0
  36. awslabs/cdk_mcp_server/static/bedrock/knowledgebases/vector/pinecone.md +66 -0
  37. awslabs/cdk_mcp_server/static/bedrock/profiles.md +153 -0
  38. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/agent/actiongroups.md +137 -0
  39. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/agent/alias.md +39 -0
  40. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/agent/collaboration.md +91 -0
  41. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/agent/creation.md +149 -0
  42. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/agent/custom_orchestration.md +74 -0
  43. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/agent/overview.md +78 -0
  44. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/agent/prompt_override.md +70 -0
  45. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/bedrockguardrails.md +188 -0
  46. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/knowledgebases/chunking.md +137 -0
  47. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/knowledgebases/datasources.md +225 -0
  48. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/knowledgebases/kendra.md +81 -0
  49. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/knowledgebases/overview.md +116 -0
  50. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/knowledgebases/parsing.md +36 -0
  51. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/knowledgebases/transformation.md +30 -0
  52. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/knowledgebases/vector/aurora.md +185 -0
  53. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/knowledgebases/vector/creation.md +80 -0
  54. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/knowledgebases/vector/opensearch.md +56 -0
  55. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/knowledgebases/vector/pinecone.md +66 -0
  56. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/profiles.md +153 -0
  57. awslabs/cdk_mcp_server/static/genai_cdk/opensearch-vectorindex/overview.md +135 -0
  58. awslabs/cdk_mcp_server/static/genai_cdk/opensearchserverless/overview.md +17 -0
  59. awslabs/cdk_mcp_server/static/lambda_powertools/bedrock.md +127 -0
  60. awslabs/cdk_mcp_server/static/lambda_powertools/cdk.md +99 -0
  61. awslabs/cdk_mcp_server/static/lambda_powertools/dependencies.md +45 -0
  62. awslabs/cdk_mcp_server/static/lambda_powertools/index.md +36 -0
  63. awslabs/cdk_mcp_server/static/lambda_powertools/insights.md +95 -0
  64. awslabs/cdk_mcp_server/static/lambda_powertools/logging.md +43 -0
  65. awslabs/cdk_mcp_server/static/lambda_powertools/metrics.md +93 -0
  66. awslabs/cdk_mcp_server/static/lambda_powertools/tracing.md +63 -0
  67. awslabs/cdk_mcp_server/static/opensearch-vectorindex/overview.md +135 -0
  68. awslabs/cdk_mcp_server/static/opensearchserverless/overview.md +17 -0
  69. awslabs_cdk_mcp_server-0.0.10417.dist-info/METADATA +14 -0
  70. awslabs_cdk_mcp_server-0.0.10417.dist-info/RECORD +72 -0
  71. awslabs_cdk_mcp_server-0.0.10417.dist-info/WHEEL +4 -0
  72. awslabs_cdk_mcp_server-0.0.10417.dist-info/entry_points.txt +2 -0
@@ -0,0 +1,153 @@
1
+ # Bedrock Inference Profiles
2
+
3
+ ## System Defined Inference Profiles
4
+
5
+ You can build a CrossRegionInferenceProfile using a system defined inference profile. The inference profile will route requests to the Regions defined in the cross region (system-defined) inference profile that you choose. You can find the system defined inference profiles by navigating to your console (Amazon Bedrock -> Cross-region inference) or programmatically, for instance using [boto3](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/bedrock/client/list_inference_profiles.html).
6
+
7
+ Before using creating a CrossRegionInferenceProfile, ensure that you have access to the models and regions defined in the inference profiles. For instance, if you see the system defined inference profile "us.anthropic.claude-3-5-sonnet-20241022-v2:0" defined in your region, the table mentions that inference requests will be routed to US East (Virginia) us-east-1, US East (Ohio) us-east-2 and US West (Oregon) us-west-2. Thus, you need to have model access enabled in those regions for the model `anthropic.claude-3-5-sonnet-20241022-v2:0`. You can then create the CrossRegionInferenceProfile as follows:
8
+
9
+ ### Examples
10
+
11
+ #### TypeScript
12
+
13
+ ```ts
14
+ const cris = bedrock.CrossRegionInferenceProfile.fromConfig({
15
+ geoRegion: bedrock.CrossRegionInferenceProfileRegion.US,
16
+ model: bedrock.BedrockFoundationModel.ANTHROPIC_CLAUDE_3_5_SONNET_V2_0,
17
+ });
18
+ ```
19
+
20
+ #### Python
21
+
22
+ ```python
23
+ cris = bedrock.CrossRegionInferenceProfile.from_config(
24
+ geo_region= bedrock.CrossRegionInferenceProfileRegion.US,
25
+ model= bedrock.BedrockFoundationModel.ANTHROPIC_CLAUDE_3_5_SONNET_V2_0
26
+ )
27
+ ```
28
+
29
+ [View full documentation](https://github.com/awslabs/generative-ai-cdk-constructs/tree/main/src/cdk-lib/bedrock#system-defined-inference-profiles)
30
+
31
+ ## Application Inference Profile
32
+
33
+ You can create an application inference profile with one or more Regions to track usage and costs when invoking a model.
34
+
35
+ To create an application inference profile for one Region, specify a foundation model. Usage and costs for requests made to that Region with that model will be tracked.
36
+
37
+ To create an application inference profile for multiple Regions, specify a cross region (system-defined) inference profile. The inference profile will route requests to the Regions defined in the cross region (system-defined) inference profile that you choose. Usage and costs for requests made to the Regions in the inference profile will be tracked. You can find the system defined inference profiles by navigating to your console (Amazon Bedrock -> Cross-region inference) or programmatically, for instance using [boto3](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/bedrock/client/list_inference_profiles.html):
38
+
39
+ ```
40
+ bedrock = session.client("bedrock", region_name="us-east-1")
41
+ bedrock.list_inference_profiles(typeEquals='SYSTEM_DEFINED')
42
+ ```
43
+
44
+ Before using application inference profiles, ensure that:
45
+
46
+ - You have appropriate IAM permissions
47
+ - You have access to the models and regions defined in the inference profiles
48
+ - Ensure proper configuration of the required API permissions for inference profile-related actions
49
+
50
+ Specifically the role you are assuming needs to have permissions for following actions in the IAM policy
51
+
52
+ ```
53
+ "Action": [
54
+ "bedrock:GetInferenceProfile",
55
+ "bedrock:ListInferenceProfiles",
56
+ "bedrock:DeleteInferenceProfile"
57
+ "bedrock:TagResource",
58
+ "bedrock:UntagResource",
59
+ "bedrock:ListTagsForResource"
60
+ ]
61
+ ```
62
+
63
+ You can restrict to specific resources by applying "Resources" tag in the IAM policy.
64
+
65
+ ```
66
+ "Resource": ["arn:aws:bedrock:*:*:application-inference-profile/*"]
67
+ ```
68
+
69
+ ### Examples
70
+
71
+ #### TypeScript
72
+
73
+ ```ts
74
+ // Create an application inference profile for one Region
75
+ // You can use the 'bedrock.BedrockFoundationModel' or pass the arn as a string
76
+ const appInfProfile1 = new ApplicationInferenceProfile(this, 'myapplicationprofile', {
77
+ inferenceProfileName: 'claude 3 sonnet v1',
78
+ modelSource: bedrock.BedrockFoundationModel.ANTHROPIC_CLAUDE_SONNET_V1_0,
79
+ tags: [{ key: 'test', value: 'test' }],
80
+ });
81
+
82
+ // To create an application inference profile across regions, specify the cross region inference profile
83
+ const cris = bedrock.CrossRegionInferenceProfile.fromConfig({
84
+ geoRegion: bedrock.CrossRegionInferenceProfileRegion.US,
85
+ model: bedrock.BedrockFoundationModel.ANTHROPIC_CLAUDE_3_5_SONNET_V2_0,
86
+ });
87
+
88
+ const appInfProfile2 = new ApplicationInferenceProfile(this, 'myapplicationprofile2', {
89
+ inferenceProfileName: 'claude 3 sonnet v1',
90
+ modelSource: cris,
91
+ });
92
+
93
+ // Import a Cfn L1 construct created application inference profile
94
+ const cfnapp = new CfnApplicationInferenceProfile(this, 'mytestaip3', {
95
+ inferenceProfileName: 'mytest',
96
+ modelSource: {
97
+ copyFrom: 'arn:aws:bedrock:us-east-1::foundation-model/anthropic.claude-3-sonnet-20240229-v1:0',
98
+ },
99
+ });
100
+
101
+ const appInfProfile3 = bedrock.ApplicationInferenceProfile.fromCfnApplicationInferenceProfile(cfnapp);
102
+
103
+ // Import an inference profile through attributes
104
+ const appInfProfile4 = bedrock.ApplicationInferenceProfile.fromApplicationInferenceProfileAttributes(this, 'TestAIP', {
105
+ inferenceProfileArn: 'arn:aws:bedrock:us-east-1:XXXXX:application-inference-profile/ID',
106
+ inferenceProfileIdentifier: 'arn:aws:bedrock:us-east-1:XXXXXXX:application-inference-profile/ID',
107
+ });
108
+ ```
109
+
110
+ #### Python
111
+
112
+ ```python
113
+
114
+ # Create an application inference profile for one Region
115
+ # You can use the 'bedrock.BedrockFoundationModel' or pass the arn as a string
116
+ appInfProfile1 = bedrock.ApplicationInferenceProfile(self, 'myapplicationprofile',
117
+ inference_profile_name='claude 3 sonnet v1',
118
+ model_source=bedrock.BedrockFoundationModel.ANTHROPIC_CLAUDE_SONNET_V1_0,
119
+ tags=[CfnTag(
120
+ key="key",
121
+ value="value"
122
+ )]
123
+ )
124
+
125
+ # To create an application inference profile across regions, specify the cross region inference profile
126
+ cris = bedrock.CrossRegionInferenceProfile.from_config(
127
+ geo_region= bedrock.CrossRegionInferenceProfileRegion.US,
128
+ model= bedrock.BedrockFoundationModel.ANTHROPIC_CLAUDE_3_5_SONNET_V2_0
129
+ )
130
+
131
+ appInfProfile2 = bedrock.ApplicationInferenceProfile(self, 'myapplicationprofile2',
132
+ inference_profile_name='claude 35 sonnet v2',
133
+ model_source=cris
134
+ )
135
+
136
+ # Import an inference profile through attributes
137
+ appInfProfile3 = bedrock.ApplicationInferenceProfile.from_application_inference_profile_attributes(self, 'TestAIP',
138
+ inference_profile_arn='arn:aws:bedrock:us-east-1:XXXXX:application-inference-profile/ID',
139
+ inference_profile_identifier='arn:aws:bedrock:us-east-1:XXXXXXX:application-inference-profile/ID',
140
+ )
141
+
142
+ # Import a Cfn L1 construct created application inference profile
143
+ cfnaip = CfnApplicationInferenceProfile(this, 'mytestaip4',
144
+ inference_profile_name='mytest',
145
+ model_source= CfnApplicationInferenceProfile.InferenceProfileModelSourceProperty(
146
+ copy_from='arn:aws:bedrock:us-east-1::foundation-model/anthropic.claude-3-sonnet-20240229-v1:0'
147
+ ),
148
+ )
149
+
150
+ appInfProfile4 = bedrock.ApplicationInferenceProfile.from_cfn_application_inference_profile(cfnaip);
151
+ ```
152
+
153
+ [View full documentation](https://github.com/awslabs/generative-ai-cdk-constructs/tree/main/src/cdk-lib/bedrock#application-inference-profile)
@@ -0,0 +1,135 @@
1
+ # Amazon OpenSearch Vector Index Construct Library
2
+
3
+ ## Table of contents
4
+
5
+ - [Amazon OpenSearch Vector Index Construct Library](#amazon-opensearch-vector-index-construct-library)
6
+ - [Table of contents](#table-of-contents)
7
+ - [API](#api)
8
+ - [Vector Index](#vector-index)
9
+ - [Example](#example)
10
+ - [TypeScript](#typescript)
11
+ - [Python](#python)
12
+ - [Default values](#default-values)
13
+
14
+ ## API
15
+
16
+ See the [API documentation](../../../apidocs/namespaces/opensearch_vectorindex/README.md).
17
+
18
+ ## Vector Index
19
+
20
+ The `VectorIndex` resource connects to OpenSearch and creates an index suitable for use with Amazon Bedrock Knowledge Bases.
21
+
22
+ ## Example
23
+
24
+ ### TypeScript
25
+
26
+ ```ts
27
+ import {
28
+ opensearchserverless,
29
+ opensearch_vectorindex,
30
+ } from '@cdklabs/generative-ai-cdk-constructs';
31
+
32
+ const vectorStore = new opensearchserverless.VectorCollection(
33
+ this,
34
+ 'VectorCollection'
35
+ );
36
+
37
+ new opensearch_vectorindex.VectorIndex(this, 'VectorIndex', {
38
+ collection: vectorStore,
39
+ indexName: 'bedrock-knowledge-base-default-index',
40
+ vectorField: 'bedrock-knowledge-base-default-vector',
41
+ vectorDimensions: 1536,
42
+ precision: 'float',
43
+ distanceType: 'l2',
44
+ mappings: [
45
+ {
46
+ mappingField: 'AMAZON_BEDROCK_TEXT_CHUNK',
47
+ dataType: 'text',
48
+ filterable: true,
49
+ },
50
+ {
51
+ mappingField: 'AMAZON_BEDROCK_METADATA',
52
+ dataType: 'text',
53
+ filterable: false,
54
+ },
55
+ ],
56
+ analyzer: {
57
+ characterFilters: [opensearchserverless.CharacterFilterType.ICU_NORMALIZER],
58
+ tokenizer: opensearchserverless.TokenizerType.KUROMOJI_TOKENIZER,
59
+ tokenFilters: [
60
+ opensearchserverless.TokenFilterType.KUROMOJI_BASEFORM,
61
+ opensearchserverless.TokenFilterType.JA_STOP,
62
+ ],
63
+ },
64
+ });
65
+ ```
66
+
67
+ ### Python
68
+
69
+ ```python
70
+ from cdklabs.generative_ai_cdk_constructs import (
71
+ opensearchserverless,
72
+ opensearch_vectorindex,
73
+ )
74
+
75
+ vectorCollection = opensearchserverless.VectorCollection(self, "VectorCollection")
76
+
77
+ vectorIndex = opensearch_vectorindex.VectorIndex(self, "VectorIndex",
78
+ vector_dimensions= 1536,
79
+ collection=vectorCollection,
80
+ index_name='bedrock-knowledge-base-default-index',
81
+ vector_field='bedrock-knowledge-base-default-vector',
82
+ precision='float',
83
+ distance_type='l2',
84
+ mappings= [
85
+ opensearch_vectorindex.MetadataManagementFieldProps(
86
+ mapping_field='AMAZON_BEDROCK_TEXT_CHUNK',
87
+ data_type='text',
88
+ filterable=True
89
+ ),
90
+ opensearch_vectorindex.MetadataManagementFieldProps(
91
+ mapping_field='AMAZON_BEDROCK_METADATA',
92
+ data_type='text',
93
+ filterable=False
94
+ )
95
+ ],
96
+ analyzer=opensearchserverless.AnalyzerProps(
97
+ character_filters=[opensearchserverless.CharacterFilterType.ICU_NORMALIZER],
98
+ tokenizer=opensearchserverless.TokenizerType.KUROMOJI_TOKENIZER,
99
+ token_filters=[
100
+ opensearchserverless.TokenFilterType.KUROMOJI_BASEFORM,
101
+ opensearchserverless.TokenFilterType.JA_STOP,
102
+ ],
103
+ )
104
+ )
105
+ ```
106
+
107
+ ## Default values
108
+
109
+ Behind the scenes, the custom resource creates a k-NN vector in the OpenSearch index, allowing to perform different kinds of k-NN search. The knn_vector field is highly configurable and can serve many different k-NN workloads. It is created as follows:
110
+
111
+ Python
112
+
113
+ ```py
114
+ "properties": {
115
+ vector_field: {
116
+ "type": "knn_vector",
117
+ "dimension": dimensions,
118
+ "data_type": precision,
119
+ "method": {
120
+ "engine": "faiss",
121
+ "space_type": distance_type,
122
+ "name": "hnsw",
123
+ "parameters": {},
124
+ },
125
+ },
126
+ "id": {
127
+ "type": "text",
128
+ "fields": {"keyword": {"type": "keyword", "ignore_above": 256}},
129
+ },
130
+ },
131
+ ```
132
+
133
+ Users can currently configure the ```vector_field```, ```dimension```, ```data_type```, and ```distance_type``` fields through the construct interface.
134
+
135
+ For details on the different settings, you can refer to the [Knn plugin documentation](https://opensearch.org/docs/latest/search-plugins/knn/knn-index/).
@@ -0,0 +1,17 @@
1
+ # Amazon OpenSearch Serverless Construct Library
2
+
3
+ ## Table of contents
4
+
5
+ - [API](#api)
6
+ - [Vector Collection](#vector-collection)
7
+
8
+ ## API
9
+ See the [API documentation](../../../apidocs/namespaces/opensearchserverless/README.md).
10
+
11
+ ## Vector Collection
12
+
13
+ This resource creates an Amazon OpenSearch Serverless collection configured for `VECTORSEARCH`. It creates default encryption, network, and data policies for use with Amazon Bedrock Knowledge Bases. For encryption, it uses the default AWS owned KMS key. It allows network connections from the public internet, but access is restricted to specific IAM principals.
14
+
15
+ ### Granting Data Access
16
+
17
+ The `grantDataAccess` method grants the specified role access to read and write the data in the collection.
@@ -0,0 +1,127 @@
1
+ # Bedrock Agent Integration
2
+
3
+ Use Lambda Powertools with Bedrock Agent actions:
4
+
5
+ ```python
6
+ from typing import Dict, List, Optional
7
+ from aws_lambda_powertools import Logger
8
+ from aws_lambda_powertools.event_handler import BedrockAgentResolver
9
+ from aws_lambda_powertools.event_handler.openapi.params import Body, Path, Query
10
+ from pydantic import BaseModel, Field
11
+
12
+ # Initialize Powertools
13
+ logger = Logger(service="agent-actions")
14
+ app = BedrockAgentResolver()
15
+
16
+ # Define request/response models with type hints
17
+ class Product(BaseModel):
18
+ product_id: str = Field(description="Unique product identifier")
19
+ name: str = Field(description="Product name")
20
+ price: float = Field(description="Product price in USD")
21
+
22
+ @app.get("/products", description="List all products")
23
+ def list_products(
24
+ category: Optional[str] = Query(None, description="Filter by category")
25
+ ) -> List[Product]:
26
+ """Get a list of products, optionally filtered by category"""
27
+ logger.info("Listing products", extra={"category": category})
28
+
29
+ # Your business logic here
30
+ products = get_products_from_database(category)
31
+
32
+ return products
33
+
34
+ @logger.inject_lambda_context
35
+ def lambda_handler(event, context):
36
+ """Main Lambda handler for Bedrock Agent actions"""
37
+ return app.resolve(event, context)
38
+ ```
39
+
40
+ ## Key Benefits
41
+
42
+ - **Type Safety**: Pydantic models ensure type safety and validation
43
+ - **OpenAPI Schema Generation**: Automatically generates OpenAPI schemas for Bedrock Agents
44
+ - **Structured Logging**: Integrates with Lambda Powertools logging
45
+ - **Parameter Validation**: Automatically validates request parameters
46
+ - **Documentation**: Generates documentation for your API
47
+
48
+ ## Generating OpenAPI Schema
49
+
50
+ To generate a Bedrock-compatible OpenAPI schema:
51
+
52
+ ```python
53
+ # Generate schema from a file
54
+ result = await use_mcp_tool(
55
+ server_name="awslabs.cdk-mcp-server",
56
+ tool_name="GenerateBedrockAgentSchemaFromFile",
57
+ arguments={
58
+ "lambda_code_path": "/path/to/your/agent_actions.py",
59
+ "output_path": "/path/to/output/schema.json"
60
+ }
61
+ )
62
+ ```
63
+
64
+ ## Common Schema Issues
65
+
66
+ - **OpenAPI version**: Must be exactly 3.0.0
67
+ - **operationId**: Each operation needs a unique operationId
68
+ - **Response schemas**: All responses must have properly defined schemas
69
+ - **Parameter descriptions**: All parameters should have descriptions
70
+
71
+ ## Best Practices
72
+
73
+ 1. **Use Pydantic models**: Define request and response models with Pydantic
74
+ 2. **Add descriptions**: Add descriptions to all fields and parameters
75
+ 3. **Use type hints**: Specify return types for all route handlers
76
+ 4. **Handle errors gracefully**: Return appropriate error responses
77
+ 5. **Log with context**: Use structured logging with business context
78
+ 6. **Validate inputs**: Use the validation features to ensure valid inputs
79
+
80
+ ## CDK Integration
81
+
82
+ ```typescript
83
+ import { bedrock } from '@cdklabs/generative-ai-cdk-constructs';
84
+ import { PythonFunction } from '@aws-cdk/aws-lambda-python-alpha';
85
+ import { Runtime, Tracing } from 'aws-cdk-lib/aws-lambda';
86
+ import * as path from 'path';
87
+
88
+ // Create Lambda function for Bedrock Agent actions
89
+ const actionFunction = new PythonFunction(this, 'AgentActionFunction', {
90
+ entry: path.join(__dirname, '../src/agent_actions'),
91
+ runtime: Runtime.PYTHON_3_13,
92
+ tracing: Tracing.ACTIVE,
93
+ environment: {
94
+ POWERTOOLS_SERVICE_NAME: "agent-actions",
95
+ LOG_LEVEL: "INFO",
96
+ },
97
+ });
98
+
99
+ // Create a Bedrock Agent
100
+ const agent = new bedrock.Agent(this, 'Agent', {
101
+ name: 'PowertoolsAgent',
102
+ foundationModel: bedrock.BedrockFoundationModel.ANTHROPIC_CLAUDE_3_5_HAIKU_V1_0,
103
+ shouldPrepareAgent: true,
104
+ userInputEnabled: true,
105
+ instruction: 'You are a helpful assistant that can perform product-related actions.',
106
+ description: 'Agent for product management',
107
+ });
108
+
109
+ // Add action group to the agent
110
+ agent.addActionGroup(
111
+ new bedrock.AgentActionGroup({
112
+ name: 'product-actions',
113
+ description: 'Actions for managing products',
114
+ executor: bedrock.ActionGroupExecutor.fromlambdaFunction(actionFunction),
115
+ apiSchema: bedrock.ApiSchema.fromAsset(
116
+ path.join(__dirname, '../schema/product_actions.json')
117
+ ),
118
+ })
119
+ );
120
+
121
+ // Create agent alias for deployment
122
+ const agentAlias = new bedrock.AgentAlias(this, 'AgentAlias', {
123
+ aliasName: 'latest',
124
+ agent: agent,
125
+ description: 'Latest agent alias',
126
+ });
127
+ ```
@@ -0,0 +1,99 @@
1
+ # CDK Integration
2
+
3
+ Integrate Lambda Powertools with CDK:
4
+
5
+ > **IMPORTANT**: When using Tracer functionality with CDK, ensure your dependency management includes the tracer extras. For Python, your package specification should use `aws-lambda-powertools[tracer]` or `aws-lambda-powertools[all]` rather than just `aws-lambda-powertools`.
6
+
7
+ ```typescript
8
+ import * as path from "path";
9
+ import { Duration } from 'aws-cdk-lib';
10
+ import { PythonFunction } from "@aws-cdk/aws-lambda-python-alpha";
11
+ import { Runtime, Tracing } from "aws-cdk-lib/aws-lambda";
12
+ import { RetentionDays } from 'aws-cdk-lib/aws-logs';
13
+
14
+ // Create Lambda function with Powertools
15
+ const paymentFunction = new PythonFunction(this, 'PaymentFunction', {
16
+ entry: path.join(__dirname, '../src/payment_function'), // Directory containing requirements.txt
17
+ runtime: Runtime.PYTHON_3_13, // Always use the latest available runtime
18
+
19
+ // Enable X-Ray tracing
20
+ tracing: Tracing.ACTIVE,
21
+
22
+ // Configure Powertools environment variables
23
+ environment: {
24
+ POWERTOOLS_SERVICE_NAME: "payment-service",
25
+ POWERTOOLS_METRICS_NAMESPACE: "PaymentService",
26
+ LOG_LEVEL: "INFO",
27
+ POWERTOOLS_LOGGER_LOG_EVENT: "true", // Log event for debugging
28
+ },
29
+
30
+ // Set appropriate log retention
31
+ logRetention: RetentionDays.ONE_WEEK,
32
+ });
33
+ ```
34
+
35
+ ## Best Practices
36
+
37
+ - **Always use language-specific function constructs** instead of the generic Function construct
38
+ - **Enable X-Ray tracing** by setting `tracing: Tracing.ACTIVE`
39
+ - **Configure Powertools environment variables** for consistent naming
40
+ - **Set appropriate log retention** to manage CloudWatch Logs costs
41
+ - **Ensure requirements.txt includes the correct extras** (e.g., `aws-lambda-powertools[tracer]`)
42
+
43
+ ## Language-Specific Function Constructs
44
+
45
+ When implementing Lambda functions with CDK, it's recommended to use language-specific constructs instead of the generic Function construct:
46
+
47
+ ### PythonFunction Benefits
48
+
49
+ - **Automatic Dependency Management**: Bundles Python dependencies from requirements.txt without manual packaging
50
+ - **Proper Python Runtime Configuration**: Sets up the correct Python runtime environment with appropriate file permissions
51
+ - **Simplified Asset Bundling**: Handles asset bundling with appropriate exclusions for Python-specific files
52
+ - **Poetry/Pipenv Support**: Works with modern Python dependency management tools
53
+ - **Layer Management**: Simplifies the creation and attachment of Lambda layers
54
+
55
+ ### NodeJSFunction Benefits
56
+
57
+ - **TypeScript Support**: Automatically transpiles TypeScript to JavaScript
58
+ - **Dependency Bundling**: Uses esbuild to bundle only required dependencies for smaller packages
59
+ - **Source Map Support**: Maintains source maps for easier debugging
60
+ - **Minification Options**: Provides options for code minification
61
+ - **Tree Shaking**: Eliminates unused code from the final bundle
62
+
63
+ ## Example requirements.txt
64
+
65
+ For a Python Lambda function using Powertools with tracing:
66
+
67
+ ```
68
+ aws-lambda-powertools[tracer]>=2.0.0
69
+ ```
70
+
71
+ Or for all Powertools features:
72
+
73
+ ```
74
+ aws-lambda-powertools[all]>=2.0.0
75
+ ```
76
+
77
+ ## Combining with Lambda Insights
78
+
79
+ For comprehensive observability, combine Lambda Powertools with Lambda Insights:
80
+
81
+ ```typescript
82
+ import { LambdaInsightsVersion } from 'aws-cdk-lib/aws-lambda';
83
+
84
+ const function = new PythonFunction(this, 'MyFunction', {
85
+ // ... other configuration
86
+
87
+ // Enable Lambda Insights
88
+ insightsVersion: LambdaInsightsVersion.VERSION_1_0_119_0,
89
+
90
+ // Configure Powertools
91
+ environment: {
92
+ POWERTOOLS_SERVICE_NAME: "my-service",
93
+ POWERTOOLS_METRICS_NAMESPACE: "MyService",
94
+ // ... other environment variables
95
+ },
96
+ });
97
+ ```
98
+
99
+ This approach provides both system-level metrics (Lambda Insights) and business-level metrics (Powertools) for complete observability. See the [Lambda Insights](lambda-powertools://insights) section for more details.
@@ -0,0 +1,45 @@
1
+ # Dependencies
2
+
3
+ When using Lambda Powertools features, use the appropriate extras syntax to ensure all required dependencies are included:
4
+
5
+ ```bash
6
+ # For tracing only
7
+ pip install "aws-lambda-powertools[tracer]"
8
+
9
+ # For validation and parser features
10
+ pip install "aws-lambda-powertools[validation]"
11
+
12
+ # For all features
13
+ pip install "aws-lambda-powertools[all]"
14
+ ```
15
+
16
+ This approach ensures that all required dependencies (like aws_xray_sdk for tracing) are automatically included without having to specify them individually.
17
+
18
+ ## Why Extras Are Important
19
+
20
+ Since version 2.0.0 of Lambda Powertools, the package has been optimized to reduce its size by making certain dependencies optional. This means:
21
+
22
+ 1. The base package (`aws-lambda-powertools`) does not include all dependencies
23
+ 2. Features like Tracer require additional dependencies (e.g., `aws_xray_sdk`)
24
+ 3. Using extras ensures you get the right dependencies for the features you use
25
+
26
+ ## Available Extras
27
+
28
+ | Extra | Description | Key Dependencies |
29
+ |-------|-------------|-----------------|
30
+ | `tracer` | For X-Ray tracing | `aws_xray_sdk` |
31
+ | `validation` | For event validation | `pydantic` |
32
+ | `parser` | For event parsing | `pydantic` |
33
+ | `all` | All features | All dependencies |
34
+
35
+ ## In requirements.txt
36
+
37
+ For CDK deployments, make sure your dependency management system uses these extras specifications rather than just the base package:
38
+
39
+ ```
40
+ # For specific features
41
+ aws-lambda-powertools[tracer]>=2.0.0
42
+
43
+ # OR for all features
44
+ aws-lambda-powertools[all]>=2.0.0
45
+ ```
@@ -0,0 +1,36 @@
1
+ # AWS Lambda Powertools Guidance
2
+
3
+ This guide provides essential patterns for implementing AWS Lambda Powertools to enhance your serverless applications with observability and operational excellence.
4
+
5
+ ## Core Capabilities
6
+
7
+ AWS Lambda Powertools provides three core capabilities to improve your serverless applications:
8
+
9
+ 1. **Structured Logging**: Transform text logs into JSON objects with consistent fields for better filtering and analysis
10
+ 2. **Tracing**: Gain visibility into request flows across distributed services with AWS X-Ray integration
11
+ 3. **Metrics**: Collect quantitative data about your application's behavior with CloudWatch Metrics
12
+
13
+ ## Table of Contents
14
+
15
+ - [Structured Logging](lambda-powertools://logging): Transform text logs into JSON objects with consistent fields
16
+ - [Tracing](lambda-powertools://tracing): Gain visibility into request flows across distributed services
17
+ - [Metrics](lambda-powertools://metrics): Collect quantitative data about your application's behavior
18
+ - [CDK Integration](lambda-powertools://cdk): Integrate Lambda Powertools with AWS CDK
19
+ - [Dependencies](lambda-powertools://dependencies): Manage Lambda Powertools dependencies correctly
20
+ - [Lambda Insights](lambda-powertools://insights): Enhanced monitoring with CloudWatch Lambda Insights
21
+ - [Bedrock Agent Integration](lambda-powertools://bedrock): Use Lambda Powertools with Amazon Bedrock Agents
22
+
23
+ ## Getting Started
24
+
25
+ To get started with Lambda Powertools, install the package with the appropriate extras for your needs:
26
+
27
+ ```bash
28
+ # For all features
29
+ pip install "aws-lambda-powertools[all]"
30
+
31
+ # For specific features
32
+ pip install "aws-lambda-powertools[tracer]" # For tracing only
33
+ pip install "aws-lambda-powertools[validation]" # For validation only
34
+ ```
35
+
36
+ Then follow the guidance in the specific sections for each capability.