bedrock-agentcore-starter-toolkit 0.0.1__py3-none-any.whl → 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bedrock-agentcore-starter-toolkit might be problematic. Click here for more details.

Files changed (50) hide show
  1. bedrock_agentcore_starter_toolkit/__init__.py +5 -0
  2. bedrock_agentcore_starter_toolkit/cli/cli.py +32 -0
  3. bedrock_agentcore_starter_toolkit/cli/common.py +44 -0
  4. bedrock_agentcore_starter_toolkit/cli/gateway/__init__.py +1 -0
  5. bedrock_agentcore_starter_toolkit/cli/gateway/commands.py +88 -0
  6. bedrock_agentcore_starter_toolkit/cli/runtime/__init__.py +1 -0
  7. bedrock_agentcore_starter_toolkit/cli/runtime/commands.py +651 -0
  8. bedrock_agentcore_starter_toolkit/cli/runtime/configuration_manager.py +133 -0
  9. bedrock_agentcore_starter_toolkit/notebook/__init__.py +5 -0
  10. bedrock_agentcore_starter_toolkit/notebook/runtime/__init__.py +1 -0
  11. bedrock_agentcore_starter_toolkit/notebook/runtime/bedrock_agentcore.py +239 -0
  12. bedrock_agentcore_starter_toolkit/operations/__init__.py +1 -0
  13. bedrock_agentcore_starter_toolkit/operations/gateway/README.md +277 -0
  14. bedrock_agentcore_starter_toolkit/operations/gateway/__init__.py +6 -0
  15. bedrock_agentcore_starter_toolkit/operations/gateway/client.py +456 -0
  16. bedrock_agentcore_starter_toolkit/operations/gateway/constants.py +152 -0
  17. bedrock_agentcore_starter_toolkit/operations/gateway/create_lambda.py +85 -0
  18. bedrock_agentcore_starter_toolkit/operations/gateway/create_role.py +90 -0
  19. bedrock_agentcore_starter_toolkit/operations/gateway/exceptions.py +13 -0
  20. bedrock_agentcore_starter_toolkit/operations/runtime/__init__.py +26 -0
  21. bedrock_agentcore_starter_toolkit/operations/runtime/configure.py +241 -0
  22. bedrock_agentcore_starter_toolkit/operations/runtime/create_role.py +404 -0
  23. bedrock_agentcore_starter_toolkit/operations/runtime/invoke.py +129 -0
  24. bedrock_agentcore_starter_toolkit/operations/runtime/launch.py +439 -0
  25. bedrock_agentcore_starter_toolkit/operations/runtime/models.py +79 -0
  26. bedrock_agentcore_starter_toolkit/operations/runtime/status.py +66 -0
  27. bedrock_agentcore_starter_toolkit/services/codebuild.py +332 -0
  28. bedrock_agentcore_starter_toolkit/services/ecr.py +84 -0
  29. bedrock_agentcore_starter_toolkit/services/runtime.py +473 -0
  30. bedrock_agentcore_starter_toolkit/utils/endpoints.py +32 -0
  31. bedrock_agentcore_starter_toolkit/utils/logging_config.py +72 -0
  32. bedrock_agentcore_starter_toolkit/utils/runtime/config.py +129 -0
  33. bedrock_agentcore_starter_toolkit/utils/runtime/container.py +310 -0
  34. bedrock_agentcore_starter_toolkit/utils/runtime/entrypoint.py +197 -0
  35. bedrock_agentcore_starter_toolkit/utils/runtime/logs.py +33 -0
  36. bedrock_agentcore_starter_toolkit/utils/runtime/policy_template.py +74 -0
  37. bedrock_agentcore_starter_toolkit/utils/runtime/schema.py +151 -0
  38. bedrock_agentcore_starter_toolkit/utils/runtime/templates/Dockerfile.j2 +44 -0
  39. bedrock_agentcore_starter_toolkit/utils/runtime/templates/dockerignore.template +68 -0
  40. bedrock_agentcore_starter_toolkit/utils/runtime/templates/execution_role_policy.json.j2 +98 -0
  41. bedrock_agentcore_starter_toolkit/utils/runtime/templates/execution_role_trust_policy.json.j2 +21 -0
  42. bedrock_agentcore_starter_toolkit-0.1.1.dist-info/METADATA +137 -0
  43. bedrock_agentcore_starter_toolkit-0.1.1.dist-info/RECORD +47 -0
  44. bedrock_agentcore_starter_toolkit-0.1.1.dist-info/entry_points.txt +2 -0
  45. bedrock_agentcore_starter_toolkit-0.1.1.dist-info/licenses/NOTICE.txt +190 -0
  46. bedrock_agentcore_starter_toolkit/init.py +0 -3
  47. bedrock_agentcore_starter_toolkit-0.0.1.dist-info/METADATA +0 -26
  48. bedrock_agentcore_starter_toolkit-0.0.1.dist-info/RECORD +0 -5
  49. {bedrock_agentcore_starter_toolkit-0.0.1.dist-info → bedrock_agentcore_starter_toolkit-0.1.1.dist-info}/WHEEL +0 -0
  50. /bedrock_agentcore_starter_toolkit-0.0.1.dist-info/licenses/LICENSE → /bedrock_agentcore_starter_toolkit-0.1.1.dist-info/licenses/LICENSE.txt +0 -0
@@ -0,0 +1,332 @@
1
+ """CodeBuild service for ARM64 container builds."""
2
+
3
+ import fnmatch
4
+ import logging
5
+ import os
6
+ import tempfile
7
+ import time
8
+ import zipfile
9
+ from datetime import datetime, timezone
10
+ from pathlib import Path
11
+ from typing import List
12
+
13
+ import boto3
14
+ from botocore.exceptions import ClientError
15
+
16
+ from ..operations.runtime.create_role import get_or_create_codebuild_execution_role
17
+
18
+
19
+ class CodeBuildService:
20
+ """Service for managing CodeBuild projects and builds for ARM64."""
21
+
22
+ def __init__(self, session: boto3.Session):
23
+ """Initialize CodeBuild service with AWS session."""
24
+ self.session = session
25
+ self.client = session.client("codebuild")
26
+ self.s3_client = session.client("s3")
27
+ self.iam_client = session.client("iam")
28
+ self.logger = logging.getLogger(__name__)
29
+ self.source_bucket = None
30
+
31
+ def get_source_bucket_name(self, account_id: str) -> str:
32
+ """Get S3 bucket name for CodeBuild sources."""
33
+ region = self.session.region_name
34
+ return f"bedrock-agentcore-codebuild-sources-{account_id}-{region}"
35
+
36
+ def ensure_source_bucket(self, account_id: str) -> str:
37
+ """Ensure S3 bucket exists for CodeBuild sources."""
38
+ bucket_name = self.get_source_bucket_name(account_id)
39
+
40
+ try:
41
+ self.s3_client.head_bucket(Bucket=bucket_name)
42
+ self.logger.debug("Using existing S3 bucket: %s", bucket_name)
43
+ except ClientError:
44
+ # Create bucket
45
+ region = self.session.region_name
46
+ self.s3_client.create_bucket(Bucket=bucket_name, CreateBucketConfiguration={"LocationConstraint": region})
47
+
48
+ # Set lifecycle to cleanup old builds
49
+ self.s3_client.put_bucket_lifecycle_configuration(
50
+ Bucket=bucket_name,
51
+ LifecycleConfiguration={
52
+ "Rules": [{"ID": "DeleteOldBuilds", "Status": "Enabled", "Filter": {}, "Expiration": {"Days": 7}}]
53
+ },
54
+ )
55
+
56
+ self.logger.info("Created S3 bucket: %s", bucket_name)
57
+
58
+ return bucket_name
59
+
60
+ def upload_source(self, agent_name: str) -> str:
61
+ """Upload current directory to S3, respecting .dockerignore patterns."""
62
+ account_id = self.session.client("sts").get_caller_identity()["Account"]
63
+ bucket_name = self.ensure_source_bucket(account_id)
64
+ self.source_bucket = bucket_name
65
+
66
+ # Parse .dockerignore patterns
67
+ ignore_patterns = self._parse_dockerignore()
68
+
69
+ with tempfile.NamedTemporaryFile(suffix=".zip", delete=False) as temp_zip:
70
+ try:
71
+ with zipfile.ZipFile(temp_zip.name, "w", zipfile.ZIP_DEFLATED) as zipf:
72
+ for root, dirs, files in os.walk("."):
73
+ # Convert to relative path
74
+ rel_root = os.path.relpath(root, ".")
75
+ if rel_root == ".":
76
+ rel_root = ""
77
+
78
+ # Filter directories
79
+ dirs[:] = [
80
+ d
81
+ for d in dirs
82
+ if not self._should_ignore(
83
+ os.path.join(rel_root, d) if rel_root else d, ignore_patterns, is_dir=True
84
+ )
85
+ ]
86
+
87
+ for file in files:
88
+ file_rel_path = os.path.join(rel_root, file) if rel_root else file
89
+
90
+ # Skip if matches ignore pattern
91
+ if self._should_ignore(file_rel_path, ignore_patterns, is_dir=False):
92
+ continue
93
+
94
+ file_path = Path(root) / file
95
+ zipf.write(file_path, file_rel_path)
96
+
97
+ # Create agent-organized S3 key: agentname/timestamp.zip
98
+ timestamp = datetime.now(timezone.utc).strftime("%Y%m%d-%H%M%S")
99
+ s3_key = f"{agent_name}/{timestamp}.zip"
100
+
101
+ self.s3_client.upload_file(temp_zip.name, bucket_name, s3_key)
102
+
103
+ self.logger.info("Uploaded source to S3: %s", s3_key)
104
+ return f"s3://{bucket_name}/{s3_key}"
105
+
106
+ finally:
107
+ os.unlink(temp_zip.name)
108
+
109
+ def _normalize_s3_location(self, source_location: str) -> str:
110
+ """Convert s3:// URL to bucket/key format for CodeBuild."""
111
+ return source_location.replace("s3://", "") if source_location.startswith("s3://") else source_location
112
+
113
+ def create_codebuild_execution_role(self, account_id: str, ecr_repository_arn: str, agent_name: str) -> str:
114
+ """Get or create CodeBuild execution role using shared role creation logic."""
115
+ return get_or_create_codebuild_execution_role(
116
+ session=self.session,
117
+ logger=self.logger,
118
+ region=self.session.region_name,
119
+ account_id=account_id,
120
+ agent_name=agent_name,
121
+ ecr_repository_arn=ecr_repository_arn,
122
+ source_bucket_name=self.get_source_bucket_name(account_id),
123
+ )
124
+
125
+ def create_or_update_project(
126
+ self, agent_name: str, ecr_repository_uri: str, execution_role: str, source_location: str
127
+ ) -> str:
128
+ """Create or update CodeBuild project for ARM64 builds."""
129
+ project_name = f"bedrock-agentcore-{agent_name}-builder"
130
+
131
+ buildspec = self._get_arm64_buildspec(ecr_repository_uri)
132
+
133
+ # CodeBuild expects S3 location without s3:// prefix (bucket/key format)
134
+ codebuild_source_location = self._normalize_s3_location(source_location)
135
+
136
+ project_config = {
137
+ "name": project_name,
138
+ "source": {
139
+ "type": "S3",
140
+ "location": codebuild_source_location,
141
+ "buildspec": buildspec,
142
+ },
143
+ "artifacts": {
144
+ "type": "NO_ARTIFACTS",
145
+ },
146
+ "environment": {
147
+ "type": "ARM_CONTAINER", # ARM64 images require ARM_CONTAINER environment type
148
+ "image": "aws/codebuild/amazonlinux2-aarch64-standard:3.0",
149
+ "computeType": "BUILD_GENERAL1_LARGE", # 4 vCPUs, 7GB RAM for faster builds
150
+ "privilegedMode": True, # Required for Docker
151
+ },
152
+ "serviceRole": execution_role,
153
+ }
154
+
155
+ try:
156
+ self.client.create_project(**project_config)
157
+ self.logger.info("Created CodeBuild project: %s", project_name)
158
+ except ClientError as e:
159
+ if e.response["Error"]["Code"] == "ResourceAlreadyExistsException":
160
+ self.client.update_project(**project_config)
161
+ self.logger.info("Updated CodeBuild project: %s", project_name)
162
+ else:
163
+ raise
164
+
165
+ return project_name
166
+
167
+ def start_build(self, project_name: str, source_location: str) -> str:
168
+ """Start a CodeBuild build."""
169
+ # CodeBuild expects S3 location without s3:// prefix (bucket/key format)
170
+ codebuild_source_location = self._normalize_s3_location(source_location)
171
+
172
+ response = self.client.start_build(
173
+ projectName=project_name,
174
+ sourceLocationOverride=codebuild_source_location,
175
+ )
176
+
177
+ return response["build"]["id"]
178
+
179
+ def wait_for_completion(self, build_id: str, timeout: int = 900):
180
+ """Wait for CodeBuild to complete with detailed phase tracking."""
181
+ self.logger.info("Starting CodeBuild monitoring...")
182
+
183
+ # Phase tracking variables
184
+ current_phase = None
185
+ phase_start_time = None
186
+ build_start_time = time.time()
187
+
188
+ while time.time() - build_start_time < timeout:
189
+ response = self.client.batch_get_builds(ids=[build_id])
190
+ build = response["builds"][0]
191
+ status = build["buildStatus"]
192
+ build_phase = build.get("currentPhase", "UNKNOWN")
193
+
194
+ # Track phase changes
195
+ if build_phase != current_phase:
196
+ # Log previous phase completion (if any)
197
+ if current_phase and phase_start_time:
198
+ phase_duration = time.time() - phase_start_time
199
+ self.logger.info("✅ %s completed in %.1fs", current_phase, phase_duration)
200
+
201
+ # Log new phase start
202
+ current_phase = build_phase
203
+ phase_start_time = time.time()
204
+ total_duration = phase_start_time - build_start_time
205
+ self.logger.info("🔄 %s started (total: %.0fs)", current_phase, total_duration)
206
+
207
+ # Check for completion
208
+ if status == "SUCCEEDED":
209
+ # Log final phase completion
210
+ if current_phase and phase_start_time:
211
+ phase_duration = time.time() - phase_start_time
212
+ self.logger.info("✅ %s completed in %.1fs", current_phase, phase_duration)
213
+
214
+ total_duration = time.time() - build_start_time
215
+ minutes, seconds = divmod(int(total_duration), 60)
216
+ self.logger.info("🎉 CodeBuild completed successfully in %dm %ds", minutes, seconds)
217
+ return
218
+
219
+ elif status in ["FAILED", "FAULT", "STOPPED", "TIMED_OUT"]:
220
+ # Log failure with phase info
221
+ if current_phase:
222
+ self.logger.error("❌ Build failed during %s phase", current_phase)
223
+ raise RuntimeError(f"CodeBuild failed with status: {status}")
224
+
225
+ time.sleep(5)
226
+
227
+ total_duration = time.time() - build_start_time
228
+ minutes, seconds = divmod(int(total_duration), 60)
229
+ raise TimeoutError(f"CodeBuild timed out after {minutes}m {seconds}s (current phase: {current_phase})")
230
+
231
+ def _get_arm64_buildspec(self, ecr_repository_uri: str) -> str:
232
+ """Get optimized buildspec for ARM64 Docker."""
233
+ return f"""
234
+ version: 0.2
235
+ phases:
236
+ pre_build:
237
+ commands:
238
+ - echo Logging in to Amazon ECR...
239
+ - aws ecr get-login-password --region $AWS_DEFAULT_REGION |
240
+ docker login --username AWS --password-stdin {ecr_repository_uri}
241
+ - export DOCKER_BUILDKIT=1
242
+ - export BUILDKIT_PROGRESS=plain
243
+ build:
244
+ commands:
245
+ - echo Build started on `date`
246
+ - echo Building ARM64 Docker image with BuildKit processing...
247
+ - export DOCKER_BUILDKIT=1
248
+ - docker buildx create --name arm64builder --use || true
249
+ - docker buildx build --platform linux/arm64 --load -t bedrock-agentcore-arm64 .
250
+ - docker tag bedrock-agentcore-arm64:latest {ecr_repository_uri}:latest
251
+ post_build:
252
+ commands:
253
+ - echo Build completed on `date`
254
+ - echo Pushing ARM64 image to ECR...
255
+ - docker push {ecr_repository_uri}:latest
256
+ """
257
+
258
+ def _parse_dockerignore(self) -> List[str]:
259
+ """Parse .dockerignore file and return list of patterns."""
260
+ dockerignore_path = Path(".dockerignore")
261
+ patterns = []
262
+
263
+ if dockerignore_path.exists():
264
+ with open(dockerignore_path, "r") as f:
265
+ for line in f:
266
+ line = line.strip()
267
+ if line and not line.startswith("#"):
268
+ patterns.append(line)
269
+
270
+ self.logger.info("Using .dockerignore with %d patterns", len(patterns))
271
+ else:
272
+ # Default patterns if no .dockerignore
273
+ patterns = [
274
+ ".git",
275
+ "__pycache__",
276
+ "*.pyc",
277
+ ".DS_Store",
278
+ "node_modules",
279
+ ".venv",
280
+ "venv",
281
+ "*.egg-info",
282
+ ".bedrock_agentcore.yaml", # Always exclude config
283
+ ]
284
+ self.logger.info("No .dockerignore found, using default exclude patterns")
285
+
286
+ return patterns
287
+
288
+ def _should_ignore(self, path: str, patterns: List[str], is_dir: bool = False) -> bool:
289
+ """Check if path should be ignored based on dockerignore patterns."""
290
+ # Normalize path
291
+ if path.startswith("./"):
292
+ path = path[2:]
293
+
294
+ should_ignore = False # Default state: don't ignore
295
+
296
+ for pattern in patterns:
297
+ # Handle negation patterns
298
+ if pattern.startswith("!"):
299
+ if self._matches_pattern(path, pattern[1:], is_dir):
300
+ should_ignore = False # Negation pattern: don't ignore
301
+ else:
302
+ # Regular ignore patterns
303
+ if self._matches_pattern(path, pattern, is_dir):
304
+ should_ignore = True # Regular pattern: ignore
305
+
306
+ return should_ignore
307
+
308
+ def _matches_pattern(self, path: str, pattern: str, is_dir: bool) -> bool:
309
+ """Check if path matches a dockerignore pattern."""
310
+ # Directory-specific patterns
311
+ if pattern.endswith("/"):
312
+ if not is_dir:
313
+ return False
314
+ pattern = pattern[:-1]
315
+
316
+ # Exact match
317
+ if path == pattern:
318
+ return True
319
+
320
+ # Glob pattern match
321
+ if fnmatch.fnmatch(path, pattern):
322
+ return True
323
+
324
+ # Directory prefix match
325
+ if is_dir and pattern in path.split("/"):
326
+ return True
327
+
328
+ # File in ignored directory
329
+ if not is_dir and any(fnmatch.fnmatch(part, pattern) for part in path.split("/")):
330
+ return True
331
+
332
+ return False
@@ -0,0 +1,84 @@
1
+ """ECR (Elastic Container Registry) service integration."""
2
+
3
+ import base64
4
+
5
+ import boto3
6
+
7
+ from ..utils.runtime.container import ContainerRuntime
8
+
9
+
10
+ def get_account_id() -> str:
11
+ """Get AWS account ID."""
12
+ return boto3.client("sts").get_caller_identity()["Account"]
13
+
14
+
15
+ def get_region() -> str:
16
+ """Get AWS region."""
17
+ return boto3.Session().region_name or "us-west-2"
18
+
19
+
20
+ def create_ecr_repository(repo_name: str, region: str) -> str:
21
+ """Create or get existing ECR repository."""
22
+ ecr = boto3.client("ecr", region_name=region)
23
+ try:
24
+ response = ecr.create_repository(repositoryName=repo_name)
25
+ return response["repository"]["repositoryUri"]
26
+ except ecr.exceptions.RepositoryAlreadyExistsException:
27
+ response = ecr.describe_repositories(repositoryNames=[repo_name])
28
+ return response["repositories"][0]["repositoryUri"]
29
+
30
+
31
+ def get_or_create_ecr_repository(agent_name: str, region: str) -> str:
32
+ """Get existing ECR repository or create a new one (idempotent).
33
+
34
+ Args:
35
+ agent_name: Name of the agent
36
+ region: AWS region
37
+
38
+ Returns:
39
+ ECR repository URI
40
+ """
41
+ # Generate deterministic repository name based on agent name
42
+ repo_name = f"bedrock-agentcore-{agent_name}"
43
+
44
+ ecr = boto3.client("ecr", region_name=region)
45
+
46
+ try:
47
+ # Step 1: Check if repository already exists
48
+ response = ecr.describe_repositories(repositoryNames=[repo_name])
49
+ existing_repo_uri = response["repositories"][0]["repositoryUri"]
50
+
51
+ print(f"✅ Reusing existing ECR repository: {existing_repo_uri}")
52
+ return existing_repo_uri
53
+
54
+ except ecr.exceptions.RepositoryNotFoundException:
55
+ # Step 2: Repository doesn't exist, create it
56
+ print(f"Repository doesn't exist, creating new ECR repository: {repo_name}")
57
+ return create_ecr_repository(repo_name, region)
58
+
59
+
60
+ def deploy_to_ecr(local_tag: str, repo_name: str, region: str, container_runtime: ContainerRuntime) -> str:
61
+ """Build and push image to ECR."""
62
+ ecr = boto3.client("ecr", region_name=region)
63
+
64
+ # Get or create repository
65
+ ecr_uri = create_ecr_repository(repo_name, region)
66
+
67
+ # Get auth token
68
+ auth_data = ecr.get_authorization_token()["authorizationData"][0]
69
+ token = base64.b64decode(auth_data["authorizationToken"]).decode("utf-8")
70
+ username, password = token.split(":")
71
+
72
+ # Login to ECR
73
+ if not container_runtime.login(auth_data["proxyEndpoint"], username, password):
74
+ raise RuntimeError("Failed to login to ECR")
75
+
76
+ # Tag and push
77
+ ecr_tag = f"{ecr_uri}:latest"
78
+ if not container_runtime.tag(local_tag, ecr_tag):
79
+ raise RuntimeError("Failed to tag image")
80
+
81
+ if not container_runtime.push(ecr_tag):
82
+ raise RuntimeError("Failed to push image to ECR")
83
+
84
+ return ecr_tag