awslabs.cdk-mcp-server 0.0.10417__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. awslabs/__init__.py +2 -0
  2. awslabs/cdk_mcp_server/__init__.py +8 -0
  3. awslabs/cdk_mcp_server/core/__init__.py +1 -0
  4. awslabs/cdk_mcp_server/core/resources.py +271 -0
  5. awslabs/cdk_mcp_server/core/search_utils.py +182 -0
  6. awslabs/cdk_mcp_server/core/server.py +74 -0
  7. awslabs/cdk_mcp_server/core/tools.py +324 -0
  8. awslabs/cdk_mcp_server/data/__init__.py +1 -0
  9. awslabs/cdk_mcp_server/data/cdk_nag_parser.py +331 -0
  10. awslabs/cdk_mcp_server/data/construct_descriptions.py +32 -0
  11. awslabs/cdk_mcp_server/data/genai_cdk_loader.py +423 -0
  12. awslabs/cdk_mcp_server/data/lambda_powertools_loader.py +48 -0
  13. awslabs/cdk_mcp_server/data/schema_generator.py +666 -0
  14. awslabs/cdk_mcp_server/data/solutions_constructs_parser.py +782 -0
  15. awslabs/cdk_mcp_server/server.py +7 -0
  16. awslabs/cdk_mcp_server/static/CDK_GENERAL_GUIDANCE.md +232 -0
  17. awslabs/cdk_mcp_server/static/CDK_NAG_GUIDANCE.md +192 -0
  18. awslabs/cdk_mcp_server/static/__init__.py +5 -0
  19. awslabs/cdk_mcp_server/static/bedrock/agent/actiongroups.md +137 -0
  20. awslabs/cdk_mcp_server/static/bedrock/agent/alias.md +39 -0
  21. awslabs/cdk_mcp_server/static/bedrock/agent/collaboration.md +91 -0
  22. awslabs/cdk_mcp_server/static/bedrock/agent/creation.md +149 -0
  23. awslabs/cdk_mcp_server/static/bedrock/agent/custom_orchestration.md +74 -0
  24. awslabs/cdk_mcp_server/static/bedrock/agent/overview.md +78 -0
  25. awslabs/cdk_mcp_server/static/bedrock/agent/prompt_override.md +70 -0
  26. awslabs/cdk_mcp_server/static/bedrock/bedrockguardrails.md +188 -0
  27. awslabs/cdk_mcp_server/static/bedrock/knowledgebases/chunking.md +137 -0
  28. awslabs/cdk_mcp_server/static/bedrock/knowledgebases/datasources.md +225 -0
  29. awslabs/cdk_mcp_server/static/bedrock/knowledgebases/kendra.md +81 -0
  30. awslabs/cdk_mcp_server/static/bedrock/knowledgebases/overview.md +116 -0
  31. awslabs/cdk_mcp_server/static/bedrock/knowledgebases/parsing.md +36 -0
  32. awslabs/cdk_mcp_server/static/bedrock/knowledgebases/transformation.md +30 -0
  33. awslabs/cdk_mcp_server/static/bedrock/knowledgebases/vector/aurora.md +185 -0
  34. awslabs/cdk_mcp_server/static/bedrock/knowledgebases/vector/creation.md +80 -0
  35. awslabs/cdk_mcp_server/static/bedrock/knowledgebases/vector/opensearch.md +56 -0
  36. awslabs/cdk_mcp_server/static/bedrock/knowledgebases/vector/pinecone.md +66 -0
  37. awslabs/cdk_mcp_server/static/bedrock/profiles.md +153 -0
  38. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/agent/actiongroups.md +137 -0
  39. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/agent/alias.md +39 -0
  40. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/agent/collaboration.md +91 -0
  41. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/agent/creation.md +149 -0
  42. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/agent/custom_orchestration.md +74 -0
  43. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/agent/overview.md +78 -0
  44. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/agent/prompt_override.md +70 -0
  45. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/bedrockguardrails.md +188 -0
  46. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/knowledgebases/chunking.md +137 -0
  47. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/knowledgebases/datasources.md +225 -0
  48. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/knowledgebases/kendra.md +81 -0
  49. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/knowledgebases/overview.md +116 -0
  50. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/knowledgebases/parsing.md +36 -0
  51. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/knowledgebases/transformation.md +30 -0
  52. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/knowledgebases/vector/aurora.md +185 -0
  53. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/knowledgebases/vector/creation.md +80 -0
  54. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/knowledgebases/vector/opensearch.md +56 -0
  55. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/knowledgebases/vector/pinecone.md +66 -0
  56. awslabs/cdk_mcp_server/static/genai_cdk/bedrock/profiles.md +153 -0
  57. awslabs/cdk_mcp_server/static/genai_cdk/opensearch-vectorindex/overview.md +135 -0
  58. awslabs/cdk_mcp_server/static/genai_cdk/opensearchserverless/overview.md +17 -0
  59. awslabs/cdk_mcp_server/static/lambda_powertools/bedrock.md +127 -0
  60. awslabs/cdk_mcp_server/static/lambda_powertools/cdk.md +99 -0
  61. awslabs/cdk_mcp_server/static/lambda_powertools/dependencies.md +45 -0
  62. awslabs/cdk_mcp_server/static/lambda_powertools/index.md +36 -0
  63. awslabs/cdk_mcp_server/static/lambda_powertools/insights.md +95 -0
  64. awslabs/cdk_mcp_server/static/lambda_powertools/logging.md +43 -0
  65. awslabs/cdk_mcp_server/static/lambda_powertools/metrics.md +93 -0
  66. awslabs/cdk_mcp_server/static/lambda_powertools/tracing.md +63 -0
  67. awslabs/cdk_mcp_server/static/opensearch-vectorindex/overview.md +135 -0
  68. awslabs/cdk_mcp_server/static/opensearchserverless/overview.md +17 -0
  69. awslabs_cdk_mcp_server-0.0.10417.dist-info/METADATA +14 -0
  70. awslabs_cdk_mcp_server-0.0.10417.dist-info/RECORD +72 -0
  71. awslabs_cdk_mcp_server-0.0.10417.dist-info/WHEEL +4 -0
  72. awslabs_cdk_mcp_server-0.0.10417.dist-info/entry_points.txt +2 -0
@@ -0,0 +1,185 @@
1
+ #### Example of `Amazon RDS Aurora PostgreSQL`:
2
+
3
+ ##### TypeScript
4
+
5
+ ```ts
6
+ import * as s3 from 'aws-cdk-lib/aws-s3';
7
+ import { amazonaurora, bedrock } from '@cdklabs/generative-ai-cdk-constructs';
8
+
9
+ // Dimension of your vector embedding
10
+ embeddingsModelVectorDimension = 1024;
11
+ const auroraDb = new amazonaurora.AmazonAuroraVectorStore(stack, 'AuroraDefaultVectorStore', {
12
+ embeddingsModelVectorDimension: embeddingsModelVectorDimension,
13
+ });
14
+
15
+ const kb = new bedrock.VectorKnowledgeBase(this, 'KnowledgeBase', {
16
+ vectorStore: auroraDb,
17
+ embeddingsModel: foundation_models.BedrockFoundationModel.TITAN_EMBED_TEXT_V1,
18
+ instruction: 'Use this knowledge base to answer questions about books. ' + 'It contains the full text of novels.',
19
+ });
20
+
21
+ const docBucket = new s3.Bucket(this, 'DocBucket');
22
+
23
+ new bedrock.S3DataSource(this, 'DataSource', {
24
+ bucket: docBucket,
25
+ knowledgeBase: kb,
26
+ dataSourceName: 'books',
27
+ chunkingStrategy: bedrock.ChunkingStrategy.FIXED_SIZE,
28
+ });
29
+ ```
30
+
31
+ ##### Python
32
+
33
+ ```python
34
+
35
+ from aws_cdk import (
36
+ aws_s3 as s3,
37
+ aws_rds as rds,
38
+ aws_ec2 as ec2,
39
+ Stack,
40
+ ArnFormat
41
+ )
42
+ from cdklabs.generative_ai_cdk_constructs import (
43
+ bedrock,
44
+ amazonaurora,
45
+ )
46
+
47
+ # Dimension of your vector embedding
48
+ embeddings_model_vector_dimension = 1024
49
+ aurora_db = amazonaurora.AmazonAuroraVectorStore(self, 'AuroraDefaultVectorStore',
50
+ embeddings_model_vector_dimension=embeddings_model_vector_dimension
51
+ )
52
+
53
+ kb = bedrock.VectorKnowledgeBase(self, 'KnowledgeBase',
54
+ vector_store= aurora_db,
55
+ embeddings_model= foundation_models.BedrockFoundationModel.TITAN_EMBED_TEXT_V1,
56
+ instruction= 'Use this knowledge base to answer questions about books. ' +
57
+ 'It contains the full text of novels.'
58
+ )
59
+
60
+ docBucket = s3.Bucket(self, 'DockBucket')
61
+
62
+ bedrock.S3DataSource(self, 'DataSource',
63
+ bucket= docBucket,
64
+ knowledge_base=kb,
65
+ data_source_name='books',
66
+ chunking_strategy= bedrock.ChunkingStrategy.FIXED_SIZE,
67
+ )
68
+
69
+ ```
70
+
71
+ #### Example of importing existing `Amazon RDS Aurora PostgreSQL` using `fromExistingAuroraVectorStore()` method.
72
+
73
+ **Note** - you need to provide `clusterIdentifier`, `databaseName`, `vpc`, `secret` and `auroraSecurityGroupId` used in deployment of your existing RDS Amazon Aurora DB, as well as `embeddingsModel` that you want to be used by a Knowledge Base for chunking:
74
+
75
+ ##### TypeScript
76
+
77
+ ```ts
78
+ import * as s3 from "aws-cdk-lib/aws-s3";
79
+ import { amazonaurora, bedrock } from '@cdklabs/generative-ai-cdk-constructs';
80
+
81
+ const auroraDb = aurora.AmazonAuroraVectorStore.fromExistingAuroraVectorStore(stack, 'ExistingAuroraVectorStore', {
82
+ clusterIdentifier: 'aurora-serverless-vector-cluster',
83
+ databaseName: 'bedrock_vector_db',
84
+ schemaName: 'bedrock_integration',
85
+ tableName: 'bedrock_kb',
86
+ vectorField: 'embedding',
87
+ textField: 'chunks',
88
+ metadataField: 'metadata',
89
+ primaryKeyField: 'id',
90
+ embeddingsModel: bedrock.BedrockFoundationModel.COHERE_EMBED_ENGLISH_V3,
91
+ vpc: cdk.aws_ec2.Vpc.fromLookup(stack, 'VPC', {
92
+ vpcId: 'vpc-0c1a234567ee8bc90',
93
+ }),
94
+ auroraSecurityGroupId: 'sg-012ef345678c98a76',,
95
+ secret: cdk.aws_rds.DatabaseSecret.fromSecretCompleteArn(
96
+ stack,
97
+ 'Secret',
98
+ cdk.Stack.of(stack).formatArn({
99
+ service: 'secretsmanager',
100
+ resource: 'secret',
101
+ resourceName: 'rds-db-credentials/cluster-1234567890',
102
+ region: cdk.Stack.of(stack).region,
103
+ account: cdk.Stack.of(stack).account,
104
+ arnFormat: cdk.ArnFormat.COLON_RESOURCE_NAME,
105
+ }),
106
+ ),
107
+ });
108
+
109
+ const kb = new bedrock.VectorKnowledgeBase(this, "KnowledgeBase", {
110
+ vectorStore: auroraDb,
111
+ embeddingsModel: bedrock.BedrockFoundationModel.COHERE_EMBED_ENGLISH_V3,
112
+ instruction:
113
+ "Use this knowledge base to answer questions about books. " +
114
+ "It contains the full text of novels.",
115
+ });
116
+
117
+ const docBucket = new s3.Bucket(this, "DocBucket");
118
+
119
+ new bedrock.S3DataSource(this, "DataSource", {
120
+ bucket: docBucket,
121
+ knowledgeBase: kb,
122
+ dataSourceName: "books",
123
+ chunkingStrategy: bedrock.ChunkingStrategy.FIXED_SIZE,
124
+ });
125
+ ```
126
+
127
+ ##### Python
128
+
129
+ ```python
130
+
131
+ from aws_cdk import (
132
+ aws_s3 as s3,
133
+ aws_rds as rds,
134
+ aws_ec2 as ec2,
135
+ Stack,
136
+ ArnFormat
137
+ )
138
+ from cdklabs.generative_ai_cdk_constructs import (
139
+ bedrock,
140
+ amazonaurora,
141
+ )
142
+
143
+ aurora_db = amazonaurora.AmazonAuroraVectorStore.from_existing_aurora_vector_store(
144
+ self, 'ExistingAuroraVectorStore',
145
+ cluster_identifier='aurora-serverless-vector-cluster',
146
+ database_name='bedrock_vector_db',
147
+ schema_name='bedrock_integration',
148
+ table_name='bedrock_kb',
149
+ vector_field='embedding',
150
+ text_field='chunks',
151
+ metadata_field='metadata',
152
+ primary_key_field='id',
153
+ embeddings_model=bedrock.BedrockFoundationModel.COHERE_EMBED_ENGLISH_V3,
154
+ vpc=ec2.Vpc.from_lookup(self, 'VPC', vpc_id='vpc-0c1a234567ee8bc90'),
155
+ aurora_security_group_id='sg-012ef345678c98a76',,
156
+ secret=rds.DatabaseSecret.from_secret_complete_arn(
157
+ self,
158
+ 'Secret',
159
+ Stack.of(self).format_arn(
160
+ service= 'secretsmanager',
161
+ resource= 'secret',
162
+ resource_name= 'rds-db-credentials/cluster-1234567890',
163
+ region= Stack.of(self).region,
164
+ account= Stack.of(self).account,
165
+ arn_format= ArnFormat.COLON_RESOURCE_NAME
166
+ )
167
+ )
168
+ )
169
+
170
+ kb = bedrock.VectorKnowledgeBase(self, 'KnowledgeBase',
171
+ vector_store= aurora_db,
172
+ embeddings_model= bedrock.BedrockFoundationModel.COHERE_EMBED_ENGLISH_V3,
173
+ instruction= 'Use this knowledge base to answer questions about books. ' +
174
+ 'It contains the full text of novels.'
175
+ )
176
+
177
+ docBucket = s3.Bucket(self, 'DockBucket')
178
+
179
+ bedrock.S3DataSource(self, 'DataSource',
180
+ bucket= docBucket,
181
+ knowledge_base=kb,
182
+ data_source_name='books',
183
+ chunking_strategy= bedrock.ChunkingStrategy.FIXED_SIZE,
184
+ )
185
+ ```
@@ -0,0 +1,80 @@
1
+ # Vector Knowledge Base Properties
2
+
3
+ | Name | Type | Required | Description |
4
+ |---|---|---|---|
5
+ | embeddingsModel | BedrockFoundationModel | Yes | The embeddings model for the knowledge base |
6
+ | name | string | No | The name of the knowledge base |
7
+ | vectorType | VectorType | No | The vector type to store vector embeddings |
8
+ | description | string | No | The description of the knowledge base |
9
+ | instruction | string | No | Instructions for agents based on the design and type of information of the Knowledge Base that will impact how Agents interact with the Knowledge Base |
10
+ | existingRole | iam.IRole | No | Existing IAM role with a policy statement granting permission to invoke the specific embeddings model |
11
+ | indexName | string | No | The name of the vector index (only applicable if vectorStore is of type VectorCollection) |
12
+ | vectorField | string | No | The name of the field in the vector index (only applicable if vectorStore is of type VectorCollection) |
13
+ | vectorStore | VectorCollection \| PineconeVectorStore \| AmazonAuroraVectorStore \| ExistingAmazonAuroraVectorStore | No | The vector store for the knowledge base |
14
+ | vectorIndex | VectorIndex | No | The vector index for the OpenSearch Serverless backed knowledge base |
15
+ | knowledgeBaseState | string | No | Specifies whether to use the knowledge base or not when sending an InvokeAgent request |
16
+ | tags | Record<string, string> | No | Tag (KEY-VALUE) bedrock agent resource |
17
+
18
+
19
+ ### Vector Knowledge Base - Vector Type
20
+
21
+ The data type for the vectors when using a model to convert text into vector embeddings. Embeddings type may impact the availability of some embeddings models and vector stores. The following vector types are available:
22
+
23
+ - Floating point: More precise vector representation of the text, but more costly in storage.
24
+ - Binary: Not as precise vector representation of the text, but not as costly in storage as a standard floating-point (float32). Not all embedding models and vector stores support binary embeddings
25
+
26
+ See [Supported embeddings models](https://docs.aws.amazon.com/bedrock/latest/userguide/knowledge-base-supported.html) for information on the available models and their vector data types.
27
+
28
+ #### Example
29
+
30
+ ##### Typescript
31
+
32
+ ```ts
33
+ const app = new cdk.App();
34
+ const stack = new cdk.Stack(app, 'aws-cdk-bedrock-data-sources-integ-test');
35
+
36
+ const kb = new VectorKnowledgeBase(stack, 'MyKnowledgeBase', {
37
+ name: 'MyKnowledgeBase',
38
+ vectorType: bedrock.VectorType.BINARY,
39
+ embeddingsModel: BedrockFoundationModel.COHERE_EMBED_MULTILINGUAL_V3,
40
+ });
41
+ ```
42
+
43
+ ##### Python
44
+
45
+ ```python
46
+
47
+ from aws_cdk import (
48
+ aws_s3 as s3,
49
+ )
50
+ from cdklabs.generative_ai_cdk_constructs import (
51
+ bedrock
52
+ )
53
+
54
+ kb = bedrock.VectorKnowledgeBase(self, 'KnowledgeBase',
55
+ name= 'MyKnowledgeBase',
56
+ vector_type= bedrock.VectorType.BINARY,
57
+ embeddings_model= bedrock.BedrockFoundationModel.COHERE_EMBED_MULTILINGUAL_V3,
58
+ )
59
+ ```
60
+
61
+ ### Vector Knowledge Base - Data Sources
62
+
63
+ Data sources are the various repositories or systems from which information is extracted and ingested into the
64
+ knowledge base. These sources provide the raw content that will be processed, indexed, and made available for
65
+ querying within the knowledge base system. Data sources can include various types of systems such as document
66
+ management systems, databases, file storage systems, and content management platforms. Suuported Data Sources
67
+ include Amazon S3 buckets, Web Crawlers, SharePoint sites, Salesforce instances, and Confluence spaces.
68
+
69
+ - **Amazon S3**. You can either create a new data source using the `bedrock.S3DataSource(..)` class, or using the
70
+ `kb.addS3DataSource(..)`.
71
+ - **Web Crawler**. You can either create a new data source using the `bedrock.WebCrawlerDataSource(..)` class, or using the
72
+ `kb.addWebCrawlerDataSource(..)`.
73
+ - **Confluence**. You can either create a new data source using the `bedrock.ConfluenceDataSource(..)` class, or using the
74
+ `kb.addConfluenceDataSource(..)`.
75
+ - **SharePoint**. You can either create a new data source using the `bedrock.SharePointDataSource(..)` class, or using the
76
+ `kb.addSharePointDataSource(..)`.
77
+ - **Salesforce**. You can either create a new data source using the `bedrock.SalesforceDataSource(..)` class, or using the
78
+ `kb.addSalesforceDataSource(..)`.
79
+ - **Custom**. You can either create a new data source using the `bedrock.CustomDataSource(..)` class, or using the
80
+ `kb.addCustomDataSource(..)`. This allows you to add your own custom data source to the knowledge base.
@@ -0,0 +1,56 @@
1
+
2
+ # OpenSearch Serverless Vector Store
3
+
4
+ ## Example
5
+
6
+ ### TypeScript
7
+
8
+ ```ts
9
+ import * as s3 from 'aws-cdk-lib/aws-s3';
10
+ import { bedrock } from '@cdklabs/generative-ai-cdk-constructs';
11
+
12
+ const kb = new bedrock.VectorKnowledgeBase(this, 'KnowledgeBase', {
13
+ embeddingsModel: bedrock.BedrockFoundationModel.TITAN_EMBED_TEXT_V1,
14
+ instruction: 'Use this knowledge base to answer questions about books. ' + 'It contains the full text of novels.',
15
+ });
16
+
17
+ const docBucket = new s3.Bucket(this, 'DocBucket');
18
+
19
+ new bedrock.S3DataSource(this, 'DataSource', {
20
+ bucket: docBucket,
21
+ knowledgeBase: kb,
22
+ dataSourceName: 'books',
23
+ chunkingStrategy: bedrock.ChunkingStrategy.fixedSize({
24
+ maxTokens: 500,
25
+ overlapPercentage: 20,
26
+ }),
27
+ });
28
+ ```
29
+
30
+ ### Python
31
+
32
+ ```python
33
+
34
+ from aws_cdk import (
35
+ aws_s3 as s3,
36
+ )
37
+ from cdklabs.generative_ai_cdk_constructs import (
38
+ bedrock
39
+ )
40
+
41
+ kb = bedrock.VectorKnowledgeBase(self, 'KnowledgeBase',
42
+ embeddings_model= bedrock.BedrockFoundationModel.TITAN_EMBED_TEXT_V1,
43
+ instruction= 'Use this knowledge base to answer questions about books. ' +
44
+ 'It contains the full text of novels.'
45
+ )
46
+
47
+ docBucket = s3.Bucket(self, 'DockBucket')
48
+
49
+ bedrock.S3DataSource(self, 'DataSource',
50
+ bucket= docBucket,
51
+ knowledge_base=kb,
52
+ data_source_name='books',
53
+ chunking_strategy= bedrock.ChunkingStrategy.FIXED_SIZE,
54
+ )
55
+
56
+ ```
@@ -0,0 +1,66 @@
1
+ #### Example of `Pinecone` (manual, you must have Pinecone vector store created):
2
+
3
+ ##### TypeScript
4
+
5
+ ```ts
6
+ import * as s3 from 'aws-cdk-lib/aws-s3';
7
+ import { pinecone, bedrock } from '@cdklabs/generative-ai-cdk-constructs';
8
+
9
+ const pineconeds = new pinecone.PineconeVectorStore({
10
+ connectionString: 'https://your-index-1234567.svc.gcp-starter.pinecone.io',
11
+ credentialsSecretArn: 'arn:aws:secretsmanager:your-region:123456789876:secret:your-key-name',
12
+ textField: 'question',
13
+ metadataField: 'metadata',
14
+ });
15
+
16
+ const kb = new bedrock.VectorKnowledgeBase(this, 'KnowledgeBase', {
17
+ vectorStore: pineconeds,
18
+ embeddingsModel: bedrock.BedrockFoundationModel.TITAN_EMBED_TEXT_V1,
19
+ instruction: 'Use this knowledge base to answer questions about books. ' + 'It contains the full text of novels.',
20
+ });
21
+
22
+ const docBucket = new s3.Bucket(this, 'DocBucket');
23
+
24
+ new bedrock.S3DataSource(this, 'DataSource', {
25
+ bucket: docBucket,
26
+ knowledgeBase: kb,
27
+ dataSourceName: 'books',
28
+ chunkingStrategy: bedrock.ChunkingStrategy.FIXED_SIZE,
29
+ });
30
+ ```
31
+
32
+ ##### Python
33
+
34
+ ```python
35
+
36
+ from aws_cdk import (
37
+ aws_s3 as s3,
38
+ )
39
+ from cdklabs.generative_ai_cdk_constructs import (
40
+ bedrock,
41
+ pinecone,
42
+ )
43
+
44
+ pineconevs = pinecone.PineconeVectorStore(
45
+ connection_string='https://your-index-1234567.svc.gcp-starter.pinecone.io',
46
+ credentials_secret_arn='arn:aws:secretsmanager:your-region:123456789876:secret:your-key-name',
47
+ text_field='question',
48
+ metadata_field='metadata'
49
+ )
50
+
51
+ kb = bedrock.VectorKnowledgeBase(self, 'KnowledgeBase',
52
+ vector_store= pineconevs,
53
+ embeddings_model= bedrock.BedrockFoundationModel.COHERE_EMBED_ENGLISH_V3,
54
+ instruction= 'Use this knowledge base to answer questions about books. ' +
55
+ 'It contains the full text of novels.'
56
+ )
57
+
58
+ docBucket = s3.Bucket(self, 'DockBucket')
59
+
60
+ bedrock.S3DataSource(self, 'DataSource',
61
+ bucket= docBucket,
62
+ knowledge_base=kb,
63
+ data_source_name='books',
64
+ chunking_strategy= bedrock.ChunkingStrategy.FIXED_SIZE,
65
+ )
66
+ ```
@@ -0,0 +1,153 @@
1
+ # Bedrock Inference Profiles
2
+
3
+ ## System Defined Inference Profiles
4
+
5
+ You can build a CrossRegionInferenceProfile using a system defined inference profile. The inference profile will route requests to the Regions defined in the cross region (system-defined) inference profile that you choose. You can find the system defined inference profiles by navigating to your console (Amazon Bedrock -> Cross-region inference) or programmatically, for instance using [boto3](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/bedrock/client/list_inference_profiles.html).
6
+
7
+ Before using creating a CrossRegionInferenceProfile, ensure that you have access to the models and regions defined in the inference profiles. For instance, if you see the system defined inference profile "us.anthropic.claude-3-5-sonnet-20241022-v2:0" defined in your region, the table mentions that inference requests will be routed to US East (Virginia) us-east-1, US East (Ohio) us-east-2 and US West (Oregon) us-west-2. Thus, you need to have model access enabled in those regions for the model `anthropic.claude-3-5-sonnet-20241022-v2:0`. You can then create the CrossRegionInferenceProfile as follows:
8
+
9
+ ### Examples
10
+
11
+ #### TypeScript
12
+
13
+ ```ts
14
+ const cris = bedrock.CrossRegionInferenceProfile.fromConfig({
15
+ geoRegion: bedrock.CrossRegionInferenceProfileRegion.US,
16
+ model: bedrock.BedrockFoundationModel.ANTHROPIC_CLAUDE_3_5_SONNET_V2_0,
17
+ });
18
+ ```
19
+
20
+ #### Python
21
+
22
+ ```python
23
+ cris = bedrock.CrossRegionInferenceProfile.from_config(
24
+ geo_region= bedrock.CrossRegionInferenceProfileRegion.US,
25
+ model= bedrock.BedrockFoundationModel.ANTHROPIC_CLAUDE_3_5_SONNET_V2_0
26
+ )
27
+ ```
28
+
29
+ [View full documentation](https://github.com/awslabs/generative-ai-cdk-constructs/tree/main/src/cdk-lib/bedrock#system-defined-inference-profiles)
30
+
31
+ ## Application Inference Profile
32
+
33
+ You can create an application inference profile with one or more Regions to track usage and costs when invoking a model.
34
+
35
+ To create an application inference profile for one Region, specify a foundation model. Usage and costs for requests made to that Region with that model will be tracked.
36
+
37
+ To create an application inference profile for multiple Regions, specify a cross region (system-defined) inference profile. The inference profile will route requests to the Regions defined in the cross region (system-defined) inference profile that you choose. Usage and costs for requests made to the Regions in the inference profile will be tracked. You can find the system defined inference profiles by navigating to your console (Amazon Bedrock -> Cross-region inference) or programmatically, for instance using [boto3](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/bedrock/client/list_inference_profiles.html):
38
+
39
+ ```
40
+ bedrock = session.client("bedrock", region_name="us-east-1")
41
+ bedrock.list_inference_profiles(typeEquals='SYSTEM_DEFINED')
42
+ ```
43
+
44
+ Before using application inference profiles, ensure that:
45
+
46
+ - You have appropriate IAM permissions
47
+ - You have access to the models and regions defined in the inference profiles
48
+ - Ensure proper configuration of the required API permissions for inference profile-related actions
49
+
50
+ Specifically the role you are assuming needs to have permissions for following actions in the IAM policy
51
+
52
+ ```
53
+ "Action": [
54
+ "bedrock:GetInferenceProfile",
55
+ "bedrock:ListInferenceProfiles",
56
+ "bedrock:DeleteInferenceProfile"
57
+ "bedrock:TagResource",
58
+ "bedrock:UntagResource",
59
+ "bedrock:ListTagsForResource"
60
+ ]
61
+ ```
62
+
63
+ You can restrict to specific resources by applying "Resources" tag in the IAM policy.
64
+
65
+ ```
66
+ "Resource": ["arn:aws:bedrock:*:*:application-inference-profile/*"]
67
+ ```
68
+
69
+ ### Examples
70
+
71
+ #### TypeScript
72
+
73
+ ```ts
74
+ // Create an application inference profile for one Region
75
+ // You can use the 'bedrock.BedrockFoundationModel' or pass the arn as a string
76
+ const appInfProfile1 = new ApplicationInferenceProfile(this, 'myapplicationprofile', {
77
+ inferenceProfileName: 'claude 3 sonnet v1',
78
+ modelSource: bedrock.BedrockFoundationModel.ANTHROPIC_CLAUDE_SONNET_V1_0,
79
+ tags: [{ key: 'test', value: 'test' }],
80
+ });
81
+
82
+ // To create an application inference profile across regions, specify the cross region inference profile
83
+ const cris = bedrock.CrossRegionInferenceProfile.fromConfig({
84
+ geoRegion: bedrock.CrossRegionInferenceProfileRegion.US,
85
+ model: bedrock.BedrockFoundationModel.ANTHROPIC_CLAUDE_3_5_SONNET_V2_0,
86
+ });
87
+
88
+ const appInfProfile2 = new ApplicationInferenceProfile(this, 'myapplicationprofile2', {
89
+ inferenceProfileName: 'claude 3 sonnet v1',
90
+ modelSource: cris,
91
+ });
92
+
93
+ // Import a Cfn L1 construct created application inference profile
94
+ const cfnapp = new CfnApplicationInferenceProfile(this, 'mytestaip3', {
95
+ inferenceProfileName: 'mytest',
96
+ modelSource: {
97
+ copyFrom: 'arn:aws:bedrock:us-east-1::foundation-model/anthropic.claude-3-sonnet-20240229-v1:0',
98
+ },
99
+ });
100
+
101
+ const appInfProfile3 = bedrock.ApplicationInferenceProfile.fromCfnApplicationInferenceProfile(cfnapp);
102
+
103
+ // Import an inference profile through attributes
104
+ const appInfProfile4 = bedrock.ApplicationInferenceProfile.fromApplicationInferenceProfileAttributes(this, 'TestAIP', {
105
+ inferenceProfileArn: 'arn:aws:bedrock:us-east-1:XXXXX:application-inference-profile/ID',
106
+ inferenceProfileIdentifier: 'arn:aws:bedrock:us-east-1:XXXXXXX:application-inference-profile/ID',
107
+ });
108
+ ```
109
+
110
+ #### Python
111
+
112
+ ```python
113
+
114
+ # Create an application inference profile for one Region
115
+ # You can use the 'bedrock.BedrockFoundationModel' or pass the arn as a string
116
+ appInfProfile1 = bedrock.ApplicationInferenceProfile(self, 'myapplicationprofile',
117
+ inference_profile_name='claude 3 sonnet v1',
118
+ model_source=bedrock.BedrockFoundationModel.ANTHROPIC_CLAUDE_SONNET_V1_0,
119
+ tags=[CfnTag(
120
+ key="key",
121
+ value="value"
122
+ )]
123
+ )
124
+
125
+ # To create an application inference profile across regions, specify the cross region inference profile
126
+ cris = bedrock.CrossRegionInferenceProfile.from_config(
127
+ geo_region= bedrock.CrossRegionInferenceProfileRegion.US,
128
+ model= bedrock.BedrockFoundationModel.ANTHROPIC_CLAUDE_3_5_SONNET_V2_0
129
+ )
130
+
131
+ appInfProfile2 = bedrock.ApplicationInferenceProfile(self, 'myapplicationprofile2',
132
+ inference_profile_name='claude 35 sonnet v2',
133
+ model_source=cris
134
+ )
135
+
136
+ # Import an inference profile through attributes
137
+ appInfProfile3 = bedrock.ApplicationInferenceProfile.from_application_inference_profile_attributes(self, 'TestAIP',
138
+ inference_profile_arn='arn:aws:bedrock:us-east-1:XXXXX:application-inference-profile/ID',
139
+ inference_profile_identifier='arn:aws:bedrock:us-east-1:XXXXXXX:application-inference-profile/ID',
140
+ )
141
+
142
+ # Import a Cfn L1 construct created application inference profile
143
+ cfnaip = CfnApplicationInferenceProfile(this, 'mytestaip4',
144
+ inference_profile_name='mytest',
145
+ model_source= CfnApplicationInferenceProfile.InferenceProfileModelSourceProperty(
146
+ copy_from='arn:aws:bedrock:us-east-1::foundation-model/anthropic.claude-3-sonnet-20240229-v1:0'
147
+ ),
148
+ )
149
+
150
+ appInfProfile4 = bedrock.ApplicationInferenceProfile.from_cfn_application_inference_profile(cfnaip);
151
+ ```
152
+
153
+ [View full documentation](https://github.com/awslabs/generative-ai-cdk-constructs/tree/main/src/cdk-lib/bedrock#application-inference-profile)
@@ -0,0 +1,137 @@
1
+ # Action Groups
2
+
3
+ ## Overview
4
+
5
+ Action groups define functions your agent can call, connecting Bedrock Agents to your business logic via Lambda functions. The agent uses an OpenAPI schema to understand what your functions do and how to call them.
6
+
7
+ ```mermaid
8
+ graph LR
9
+ A[Agent] --> B[Action Group]
10
+ B --> C[Lambda Function]
11
+ B --> D[OpenAPI Schema]
12
+ ```
13
+
14
+ ## Action Group Properties
15
+
16
+ | Name | Type | Required | Description |
17
+ |---|---|---|---|
18
+ | name | string | Yes | The name of the action group |
19
+ | description | string | No | A description of the action group |
20
+ | apiSchema | ApiSchema | No | The API Schema |
21
+ | executor | ActionGroupExecutor | No | The action group executor |
22
+ | enabled | boolean | No | Specifies whether the action group is available for the agent to invoke or not when sending an InvokeAgent request. Defaults to true |
23
+ | forceDelete | boolean | No | Specifies whether to delete the resource even if it's in use. Defaults to false |
24
+ | functionSchema | CfnAgent.FunctionSchemaProperty | No | Defines functions that each define parameters that the agent needs to invoke from the user |
25
+ | parentActionGroupSignature | ParentActionGroupSignature | No | The AWS Defined signature for enabling certain capabilities in your agent. When specified, description, apiSchema, and actionGroupExecutor must be blank |
26
+
27
+ ## OpenAPI Schema Generation & Best Practices
28
+
29
+ For Action Groups, use the built-in OpenAPI schema generation tool provided by the AWS CDK MCP server:
30
+
31
+ ```typescript
32
+ // Using MCP client
33
+ const result = await use_mcp_tool({
34
+ server_name: "awslabs.cdk-mcp-server",
35
+ tool_name: "GenerateBedrockAgentSchemaFromFile",
36
+ arguments: {
37
+ lambda_code_path: "path/to/your/lambda.py",
38
+ output_path: "path/to/output/openapi.json"
39
+ }
40
+ });
41
+ ```
42
+
43
+ The tool will:
44
+
45
+ 1. Parse your Lambda function code
46
+ 2. Extract function signatures and docstrings
47
+ 3. Generate a Bedrock-compatible OpenAPI schema
48
+ 4. Save it to the specified output path
49
+
50
+ ### Key Requirements
51
+ - Ensure each operation has a unique operationId
52
+ - Define complete response schemas
53
+ - Use `fromLocalAsset` (not `fromAsset`) for API schemas
54
+ - Include detailed descriptions for all endpoints, parameters, and return values
55
+ - Use proper type annotations with Python type hints
56
+
57
+ ### Lambda Function Example
58
+
59
+ ```python
60
+ from aws_lambda_powertools.event_handler import BedrockAgentResolver
61
+ from typing import Annotated, List
62
+ from aws_lambda_powertools.event_handler.openapi.params import Query, Path
63
+ from pydantic import BaseModel, Field
64
+
65
+ app = BedrockAgentResolver()
66
+
67
+ class Product(BaseModel):
68
+ product_id: str = Field(description="Unique product identifier")
69
+ name: str = Field(description="Product name")
70
+ price: float = Field(description="Product price in USD")
71
+
72
+ @app.get("/products", description="List available products")
73
+ def list_products(
74
+ category: Annotated[str, Query(description="Product category")] = None
75
+ ) -> List[Product]:
76
+ # Your business logic here
77
+ return [Product(product_id="1", name="Product A", price=19.99)]
78
+
79
+ def lambda_handler(event, context):
80
+ return app.resolve(event, context)
81
+ ```
82
+
83
+ ## Examples
84
+
85
+ ### TypeScript
86
+
87
+ ```ts
88
+ const actionGroupFunction = new lambda_python.PythonFunction(this, 'ActionGroupFunction', {
89
+ runtime: lambda.Runtime.PYTHON_3_12,
90
+ entry: path.join(__dirname, '../lambda/action-group'),
91
+ });
92
+
93
+ // Example of proper Action Group setup with fromLocalAsset
94
+ const actionGroup = new AgentActionGroup({
95
+ name: 'query-library',
96
+ description: 'Use these functions to get information about the books in the library.',
97
+ executor: bedrock.ActionGroupExecutor.fromlambdaFunction(actionGroupFunction),
98
+ enabled: true,
99
+ apiSchema: bedrock.ApiSchema.fromLocalAsset(path.join(__dirname, 'action-group.yaml')),
100
+ });
101
+
102
+ agent.addActionGroup(actionGroup);
103
+
104
+ // Real-world example with account actions
105
+ const agentAccountActions = new AgentActionGroup({
106
+ name: "agent-account-actions",
107
+ description: "Use these functions to take actions on authenticated user's accounts",
108
+ executor: bedrock.ActionGroupExecutor.fromlambdaFunction(accountActionsLambda),
109
+ apiSchema: bedrock.ApiSchema.fromLocalAsset(
110
+ path.join(agentsLambdaDir, "account_actions", "openapi.json"),
111
+ ),
112
+ });
113
+
114
+ agent.addActionGroup(agentAccountActions);
115
+ ```
116
+
117
+ ### Python
118
+
119
+ ```python
120
+ action_group_function = PythonFunction(
121
+ self,
122
+ "LambdaFunction",
123
+ runtime=Runtime.PYTHON_3_12,
124
+ entry="./lambda",
125
+ index="app.py",
126
+ handler="lambda_handler",
127
+ )
128
+
129
+ actionGroup = bedrock.AgentActionGroup(
130
+ name="query-library",
131
+ description="Use these functions to get information about the books in the library.",
132
+ executor= bedrock.ActionGroupExecutor.fromlambda_function(action_group_function),
133
+ enabled=True,
134
+ api_schema=bedrock.ApiSchema.from_local_asset("action-group.yaml"))
135
+
136
+ agent.add_action_group(actionGroup)
137
+ ```