daita-agents 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. daita/__init__.py +216 -0
  2. daita/agents/__init__.py +33 -0
  3. daita/agents/base.py +743 -0
  4. daita/agents/substrate.py +1141 -0
  5. daita/cli/__init__.py +145 -0
  6. daita/cli/__main__.py +7 -0
  7. daita/cli/ascii_art.py +44 -0
  8. daita/cli/core/__init__.py +0 -0
  9. daita/cli/core/create.py +254 -0
  10. daita/cli/core/deploy.py +473 -0
  11. daita/cli/core/deployments.py +309 -0
  12. daita/cli/core/import_detector.py +219 -0
  13. daita/cli/core/init.py +481 -0
  14. daita/cli/core/logs.py +239 -0
  15. daita/cli/core/managed_deploy.py +709 -0
  16. daita/cli/core/run.py +648 -0
  17. daita/cli/core/status.py +421 -0
  18. daita/cli/core/test.py +239 -0
  19. daita/cli/core/webhooks.py +172 -0
  20. daita/cli/main.py +588 -0
  21. daita/cli/utils.py +541 -0
  22. daita/config/__init__.py +62 -0
  23. daita/config/base.py +159 -0
  24. daita/config/settings.py +184 -0
  25. daita/core/__init__.py +262 -0
  26. daita/core/decision_tracing.py +701 -0
  27. daita/core/exceptions.py +480 -0
  28. daita/core/focus.py +251 -0
  29. daita/core/interfaces.py +76 -0
  30. daita/core/plugin_tracing.py +550 -0
  31. daita/core/relay.py +779 -0
  32. daita/core/reliability.py +381 -0
  33. daita/core/scaling.py +459 -0
  34. daita/core/tools.py +554 -0
  35. daita/core/tracing.py +770 -0
  36. daita/core/workflow.py +1144 -0
  37. daita/display/__init__.py +1 -0
  38. daita/display/console.py +160 -0
  39. daita/execution/__init__.py +58 -0
  40. daita/execution/client.py +856 -0
  41. daita/execution/exceptions.py +92 -0
  42. daita/execution/models.py +317 -0
  43. daita/llm/__init__.py +60 -0
  44. daita/llm/anthropic.py +291 -0
  45. daita/llm/base.py +530 -0
  46. daita/llm/factory.py +101 -0
  47. daita/llm/gemini.py +355 -0
  48. daita/llm/grok.py +219 -0
  49. daita/llm/mock.py +172 -0
  50. daita/llm/openai.py +220 -0
  51. daita/plugins/__init__.py +141 -0
  52. daita/plugins/base.py +37 -0
  53. daita/plugins/base_db.py +167 -0
  54. daita/plugins/elasticsearch.py +849 -0
  55. daita/plugins/mcp.py +481 -0
  56. daita/plugins/mongodb.py +520 -0
  57. daita/plugins/mysql.py +362 -0
  58. daita/plugins/postgresql.py +342 -0
  59. daita/plugins/redis_messaging.py +500 -0
  60. daita/plugins/rest.py +537 -0
  61. daita/plugins/s3.py +770 -0
  62. daita/plugins/slack.py +729 -0
  63. daita/utils/__init__.py +18 -0
  64. daita_agents-0.2.0.dist-info/METADATA +409 -0
  65. daita_agents-0.2.0.dist-info/RECORD +69 -0
  66. daita_agents-0.2.0.dist-info/WHEEL +5 -0
  67. daita_agents-0.2.0.dist-info/entry_points.txt +2 -0
  68. daita_agents-0.2.0.dist-info/licenses/LICENSE +56 -0
  69. daita_agents-0.2.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,709 @@
1
+ """
2
+ Managed deployment for Daita CLI - No AWS credentials required.
3
+
4
+ This uploads packages to the Daita API, which handles all AWS infrastructure.
5
+ Users only need DAITA_API_KEY, not AWS credentials.
6
+ """
7
+ import os
8
+ import yaml
9
+ import json
10
+ import tarfile
11
+ import tempfile
12
+ import asyncio
13
+ import aiohttp
14
+ import aiofiles
15
+ import ssl
16
+ from pathlib import Path
17
+ from datetime import datetime
18
+ from ..utils import find_project_root
19
+ from .import_detector import ImportDetector
20
+ from ...config.settings import settings
21
+
22
+ async def deploy_to_managed_environment(environment='production', force=False, dry_run=False, verbose=False):
23
+ """Deploy to Daita-managed cloud environment."""
24
+
25
+ # Find project root
26
+ project_root = find_project_root()
27
+ if not project_root:
28
+ raise ValueError("Not in a Daita project. Run 'daita init' first.")
29
+
30
+ # Load project config
31
+ config = _load_project_config(project_root)
32
+ if not config:
33
+ raise ValueError("No daita-project.yaml found")
34
+
35
+ project_name = config.get('name', 'unknown')
36
+
37
+ # Validate version is specified in YAML
38
+ if not config.get('version'):
39
+ raise ValueError("Version must be specified in daita-project.yaml")
40
+
41
+ if verbose:
42
+ print(f" Version: {config.get('version')}")
43
+
44
+ print(f" Deploying '{project_name}' to Daita-managed {environment}")
45
+
46
+ if dry_run:
47
+ print(f" Dry run - showing what would be deployed:")
48
+ _show_deployment_plan(project_root, config, environment)
49
+ return
50
+
51
+ # Check for DAITA_API_KEY (this should be caught by main() but adding as safeguard)
52
+ api_key = os.getenv("DAITA_API_KEY")
53
+ if not api_key:
54
+ from ..utils import show_upgrade_message
55
+ show_upgrade_message()
56
+ return
57
+
58
+ # Create deployment package
59
+ package_path = _create_deployment_package(project_root, config, verbose)
60
+
61
+ try:
62
+ # Upload package to Daita API
63
+ upload_result = await _upload_package_to_api(
64
+ package_path=package_path,
65
+ project_name=project_name,
66
+ environment=environment,
67
+ api_key=api_key,
68
+ verbose=verbose
69
+ )
70
+
71
+ if verbose:
72
+ print(f" Package uploaded: {upload_result['upload_id']}")
73
+ print(f" Package hash: {upload_result['package_hash'][:16]}...")
74
+ print(f" Package size: {upload_result['package_size_bytes'] / 1024 / 1024:.1f}MB")
75
+
76
+ # Analyze project imports to determine required layers
77
+ if verbose:
78
+ print(f" Analyzing project imports for layer optimization...")
79
+
80
+ detector = ImportDetector()
81
+ import_analysis = detector.analyze_project(project_root)
82
+
83
+ # Deploy uploaded package with layer information
84
+ deployment_id = _generate_deployment_id(project_name, environment)
85
+
86
+ deploy_result = await _deploy_package_via_api(
87
+ upload_id=upload_result['upload_id'],
88
+ deployment_id=deployment_id,
89
+ project_name=project_name,
90
+ environment=environment,
91
+ config=config,
92
+ import_analysis=import_analysis,
93
+ api_key=api_key,
94
+ verbose=verbose
95
+ )
96
+
97
+ print(f" Deployed to Daita-managed {environment}")
98
+ print(f" Deployment ID: {deployment_id}")
99
+
100
+ # Show deployed functions
101
+ if deploy_result.get('functions'):
102
+ print(f" Lambda Functions:")
103
+ for func in deploy_result['functions']:
104
+ # Get name from either agent_name or workflow_name
105
+ name = func.get('agent_name') or func.get('workflow_name', 'Unknown')
106
+ if func.get('status') == 'deployed':
107
+ print(f" {name}: {func['function_name']}")
108
+ else:
109
+ print(f" {name}: {func.get('error', 'Unknown error')}")
110
+
111
+
112
+ except aiohttp.ClientConnectorError:
113
+ print(" Cannot connect to deployment host")
114
+ print(" Check your internet connection and try again")
115
+ raise ValueError("Cannot connect to deployment host")
116
+ except aiohttp.ClientError as e:
117
+ print(" Deployment connection failed")
118
+ if verbose:
119
+ print(f" Details: {str(e)}")
120
+ raise ValueError("Deployment connection failed")
121
+ finally:
122
+ # Clean up temporary package
123
+ if package_path.exists():
124
+ os.unlink(package_path)
125
+
126
+ def _get_secure_api_endpoint() -> str:
127
+ """Get validated API endpoint with security checks."""
128
+ # Use production API endpoint (can be overridden via environment)
129
+ endpoint = os.getenv("DAITA_API_ENDPOINT") or "https://ondk4sdyv0.execute-api.us-east-1.amazonaws.com"
130
+
131
+ try:
132
+ return settings.validate_endpoint(endpoint)
133
+ except ValueError as e:
134
+ raise ValueError(f"Invalid API endpoint configuration: {e}")
135
+
136
+ async def _upload_package_to_api(
137
+ package_path: Path,
138
+ project_name: str,
139
+ environment: str,
140
+ api_key: str,
141
+ verbose: bool = False
142
+ ) -> dict:
143
+ """Upload deployment package to Daita API with progress tracking."""
144
+
145
+ api_endpoint = _get_secure_api_endpoint()
146
+ package_size = package_path.stat().st_size
147
+
148
+ if verbose:
149
+ print(f" Uploading package to secure API endpoint...")
150
+ print(f" Package size: {package_size / 1024 / 1024:.1f}MB")
151
+
152
+ headers = {
153
+ "Authorization": f"Bearer {api_key}",
154
+ "User-Agent": "Daita-CLI/1.0.0"
155
+ }
156
+
157
+ # Create secure SSL context
158
+ ssl_context = ssl.create_default_context()
159
+ ssl_context.check_hostname = True
160
+ ssl_context.verify_mode = ssl.CERT_REQUIRED
161
+
162
+ # For large packages, show progress
163
+ if package_size > 10 * 1024 * 1024: # Show progress for packages > 10MB
164
+ return await _upload_with_progress(
165
+ package_path, project_name, environment, api_endpoint, headers, ssl_context, verbose
166
+ )
167
+ else:
168
+ return await _upload_standard(
169
+ package_path, project_name, environment, api_endpoint, headers, ssl_context, verbose
170
+ )
171
+
172
+
173
+ async def _upload_standard(
174
+ package_path: Path,
175
+ project_name: str,
176
+ environment: str,
177
+ api_endpoint: str,
178
+ headers: dict,
179
+ ssl_context: ssl.SSLContext,
180
+ verbose: bool
181
+ ) -> dict:
182
+ """Standard upload for smaller packages."""
183
+ # Prepare multipart form data
184
+ data = aiohttp.FormData()
185
+ data.add_field('project_name', project_name)
186
+ data.add_field('environment', environment)
187
+
188
+ # Add file
189
+ async with aiofiles.open(package_path, 'rb') as f:
190
+ file_content = await f.read()
191
+ data.add_field('package', file_content, filename=f"{project_name}.zip", content_type='application/zip')
192
+
193
+ # Create secure connector
194
+ connector = aiohttp.TCPConnector(ssl=ssl_context)
195
+
196
+ async with aiohttp.ClientSession(connector=connector) as session:
197
+ url = f"{api_endpoint}/api/v1/packages/upload"
198
+
199
+ async with session.post(url, data=data, headers=headers, timeout=300) as response:
200
+ return await _handle_upload_response(response, verbose)
201
+
202
+
203
+ async def _upload_with_progress(
204
+ package_path: Path,
205
+ project_name: str,
206
+ environment: str,
207
+ api_endpoint: str,
208
+ headers: dict,
209
+ ssl_context: ssl.SSLContext,
210
+ verbose: bool
211
+ ) -> dict:
212
+ """Upload with progress tracking for large packages."""
213
+ import sys
214
+
215
+ package_size = package_path.stat().st_size
216
+ uploaded_size = 0
217
+
218
+ class ProgressReader:
219
+ def __init__(self, file_obj, total_size):
220
+ self.file_obj = file_obj
221
+ self.total_size = total_size
222
+ self.uploaded = 0
223
+ self.last_progress = 0
224
+
225
+ def read(self, chunk_size):
226
+ chunk = self.file_obj.read(chunk_size)
227
+ if chunk:
228
+ self.uploaded += len(chunk)
229
+ progress = int((self.uploaded / self.total_size) * 100)
230
+
231
+ # Update progress every 5%
232
+ if progress >= self.last_progress + 5:
233
+ self.last_progress = progress
234
+ if verbose:
235
+ print(f" Upload progress: {progress}% ({self.uploaded / 1024 / 1024:.1f}MB / {self.total_size / 1024 / 1024:.1f}MB)")
236
+ else:
237
+ # Simple progress bar
238
+ bar_length = 30
239
+ filled_length = int(bar_length * progress // 100)
240
+ bar = '█' * filled_length + '░' * (bar_length - filled_length)
241
+ print(f"\r Uploading: [{bar}] {progress}%", end='', flush=True)
242
+
243
+ return chunk
244
+
245
+ # Prepare multipart form data with progress tracking
246
+ data = aiohttp.FormData()
247
+ data.add_field('project_name', project_name)
248
+ data.add_field('environment', environment)
249
+
250
+ # Add file with progress tracking
251
+ with open(package_path, 'rb') as f:
252
+ progress_reader = ProgressReader(f, package_size)
253
+ data.add_field('package', progress_reader, filename=f"{project_name}.zip", content_type='application/zip')
254
+
255
+ # Create secure connector
256
+ connector = aiohttp.TCPConnector(ssl=ssl_context)
257
+
258
+ async with aiohttp.ClientSession(connector=connector) as session:
259
+ url = f"{api_endpoint}/api/v1/packages/upload"
260
+
261
+ async with session.post(url, data=data, headers=headers, timeout=600) as response:
262
+ if not verbose:
263
+ print() # New line after progress bar
264
+ return await _handle_upload_response(response, verbose)
265
+
266
+
267
+ async def _handle_upload_response(response, verbose: bool) -> dict:
268
+ """Handle upload response with proper error handling."""
269
+ if response.status == 200:
270
+ result = await response.json()
271
+ if verbose:
272
+ print(f" Package uploaded successfully")
273
+ return result
274
+ elif response.status == 401:
275
+ error_text = await response.text()
276
+ print(" Authentication failed - check your DAITA_API_KEY")
277
+ print(" Get a new API key at daita-tech.io")
278
+ raise ValueError("Invalid API key")
279
+ elif response.status == 413:
280
+ print(" Package too large (max 250MB)")
281
+ print(" Try removing large dependencies or data files")
282
+ raise ValueError("Package size exceeded")
283
+ else:
284
+ error_text = await response.text()
285
+ print(f" Upload failed (HTTP {response.status})")
286
+ if verbose:
287
+ print(f" Details: {error_text}")
288
+ raise ValueError("Upload failed")
289
+
290
+ async def _deploy_package_via_api(
291
+ upload_id: str,
292
+ deployment_id: str,
293
+ project_name: str,
294
+ environment: str,
295
+ config: dict,
296
+ import_analysis: dict,
297
+ api_key: str,
298
+ verbose: bool = False
299
+ ) -> dict:
300
+ """Deploy uploaded package via Daita API."""
301
+
302
+ api_endpoint = _get_secure_api_endpoint()
303
+
304
+ if verbose:
305
+ print(f" Deploying to secure Lambda functions...")
306
+
307
+ headers = {
308
+ "Authorization": f"Bearer {api_key}",
309
+ "Content-Type": "application/json",
310
+ "User-Agent": "Daita-CLI/1.0.0"
311
+ }
312
+
313
+ # Create secure SSL context
314
+ ssl_context = ssl.create_default_context()
315
+ ssl_context.check_hostname = True
316
+ ssl_context.verify_mode = ssl.CERT_REQUIRED
317
+
318
+ # Get version from YAML (required)
319
+ yaml_version = config.get("version")
320
+ if not yaml_version:
321
+ raise ValueError("Version must be specified in daita-project.yaml")
322
+
323
+ # Prepare deployment request with layer optimization
324
+ deploy_data = {
325
+ "upload_id": upload_id,
326
+ "deployment_id": deployment_id,
327
+ "project_name": project_name,
328
+ "environment": environment,
329
+ "version": yaml_version,
330
+ "agents_config": _extract_agent_configs(config),
331
+ "workflows_config": _extract_workflow_configs(config),
332
+ "schedules_config": _extract_schedules_config(config, environment, verbose),
333
+ "import_analysis": import_analysis,
334
+ "layer_requirements": _determine_layer_requirements(import_analysis, verbose)
335
+ }
336
+
337
+ # Create secure connector
338
+ connector = aiohttp.TCPConnector(ssl=ssl_context)
339
+
340
+ async with aiohttp.ClientSession(connector=connector) as session:
341
+ url = f"{api_endpoint}/api/v1/packages/deploy"
342
+
343
+ async with session.post(url, json=deploy_data, headers=headers, timeout=600) as response:
344
+ if response.status == 200:
345
+ result = await response.json()
346
+ if verbose:
347
+ print(f" Deployment completed securely")
348
+ return result
349
+ elif response.status == 401:
350
+ print(" Authentication failed during deployment")
351
+ print(" Get a new API key at daita-tech.io")
352
+ raise ValueError("Invalid API key")
353
+ elif response.status == 404:
354
+ print(" Upload not found - it may have expired")
355
+ print(" Try uploading again with: daita push")
356
+ raise ValueError("Upload expired")
357
+ else:
358
+ error_text = await response.text()
359
+ print(f" Deployment failed (HTTP {response.status})")
360
+ if verbose:
361
+ print(f" Details: {error_text}")
362
+ raise ValueError("Deployment failed")
363
+
364
+ def _create_deployment_package(project_root: Path, config: dict, verbose: bool = False) -> Path:
365
+ """Create deployment package with all user project files."""
366
+ print(f" Creating deployment package...")
367
+
368
+ # Create temp directory
369
+ with tempfile.NamedTemporaryFile(suffix='.zip', delete=False) as temp_file:
370
+ package_path = Path(temp_file.name)
371
+
372
+ import zipfile
373
+ with zipfile.ZipFile(package_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
374
+
375
+ # OPTIMIZATION: Framework now served via Lambda layers - no longer bundled!
376
+ # This reduces package size from 50MB+ to <5MB
377
+
378
+ # Directories to exclude from packaging
379
+ exclude_dirs = {'.daita', '__pycache__', '.git', '.pytest_cache',
380
+ 'venv', 'env', '.venv', 'node_modules', '.mypy_cache',
381
+ 'tests', 'data'} # Add common dirs to exclude
382
+
383
+ # Add all project directories (except excluded ones)
384
+ for item in project_root.iterdir():
385
+ if item.is_dir() and item.name not in exclude_dirs and not item.name.startswith('.'):
386
+ _add_directory_to_zip(zipf, item, item.name)
387
+ if verbose:
388
+ file_count = len(list(item.rglob('*.py')))
389
+ print(f" Added directory: {item.name}/ ({file_count} Python files)")
390
+
391
+ # Add project configuration (required)
392
+ config_file = project_root / 'daita-project.yaml'
393
+ if config_file.exists():
394
+ zipf.write(config_file, 'daita-project.yaml')
395
+
396
+ # Add requirements if they exist
397
+ requirements_file = project_root / 'requirements.txt'
398
+ if requirements_file.exists():
399
+ zipf.write(requirements_file, 'requirements.txt')
400
+
401
+ # Add .env file for user's API keys (even though it's in .gitignore)
402
+ env_file = project_root / '.env'
403
+ if env_file.exists():
404
+ zipf.write(env_file, '.env')
405
+ if verbose:
406
+ print(f" Added .env file to package")
407
+
408
+ # Add minimal bootstrap handler (framework loaded from layers)
409
+ _add_bootstrap_handler(zipf)
410
+
411
+ if verbose:
412
+ package_size = package_path.stat().st_size
413
+ print(f" Package: {package_size / 1024 / 1024:.1f}MB")
414
+
415
+ return package_path
416
+
417
+ def _add_bootstrap_handler(zipf):
418
+ """Add minimal bootstrap handler that loads framework from layers."""
419
+
420
+ # Create bootstrap handler that delegates to the framework layer
421
+ bootstrap_handler_content = '''"""
422
+ Bootstrap handler for Daita Lambda functions.
423
+
424
+ This handler loads the Daita framework from Lambda layers and delegates
425
+ execution to the universal handler. This approach dramatically reduces
426
+ package sizes by serving the framework from pre-built layers.
427
+ """
428
+
429
+ import sys
430
+ import os
431
+ import json
432
+ from typing import Dict, Any
433
+
434
+ def lambda_handler(event: Dict[str, Any], context) -> Dict[str, Any]:
435
+ """
436
+ Bootstrap handler that loads framework from layers and delegates execution.
437
+
438
+ The Daita framework is provided via Lambda layers:
439
+ - daita-framework-optimized: Core framework code (0.12MB)
440
+ - daita-core-dependencies: Essential dependencies (19MB)
441
+ """
442
+
443
+ try:
444
+ # Framework is available via layers - import directly
445
+ from cloud.lambda_handler import lambda_handler as framework_handler
446
+
447
+ # Delegate to the framework handler
448
+ return framework_handler(event, context)
449
+
450
+ except ImportError as e:
451
+ # Fallback error handling if layers aren't properly configured
452
+ return {
453
+ 'statusCode': 500,
454
+ 'body': json.dumps({
455
+ 'error': 'Framework layer not available',
456
+ 'message': str(e),
457
+ 'help': 'Ensure Lambda function has daita-framework-optimized and daita-core-dependencies layers attached'
458
+ })
459
+ }
460
+ except Exception as e:
461
+ # General error handling
462
+ return {
463
+ 'statusCode': 500,
464
+ 'body': json.dumps({
465
+ 'error': 'Execution failed',
466
+ 'message': str(e)
467
+ })
468
+ }
469
+ '''
470
+
471
+ # Add bootstrap handler to package
472
+ import io
473
+ handler_bytes = bootstrap_handler_content.encode('utf-8')
474
+ zipf.writestr('lambda_handler.py', handler_bytes)
475
+
476
+ def _add_directory_to_zip(zipf, source_dir: Path, archive_name: str):
477
+ """Add directory to zip recursively."""
478
+ for file_path in source_dir.rglob('*'):
479
+ # Include .env files even though they start with '.'
480
+ if file_path.is_file() and (not file_path.name.startswith('.') or file_path.name == '.env'):
481
+ relative_path = file_path.relative_to(source_dir)
482
+ archive_path = f"{archive_name}/{relative_path}"
483
+ zipf.write(file_path, archive_path)
484
+
485
+ def _extract_agent_configs(config: dict) -> list:
486
+ """Extract agent configurations."""
487
+ agents = []
488
+
489
+ # Get agents from config
490
+ config_agents = config.get("agents", [])
491
+ for agent in config_agents:
492
+ agents.append({
493
+ "name": agent.get("name", "Unknown Agent"),
494
+ "type": agent.get("type", "substrate"),
495
+ "enabled": agent.get("enabled", True),
496
+ "settings": agent.get("settings", {})
497
+ })
498
+
499
+ # If no agents in config, scan agents directory
500
+ if not agents:
501
+ project_root = find_project_root()
502
+ if project_root:
503
+ agents_dir = project_root / "agents"
504
+ if agents_dir.exists():
505
+ for agent_file in agents_dir.glob("*.py"):
506
+ if agent_file.name != "__init__.py":
507
+ agent_name = agent_file.stem.replace("_", " ").title()
508
+ agents.append({
509
+ "name": agent_name,
510
+ "type": "substrate",
511
+ "enabled": True,
512
+ "file": agent_file.name
513
+ })
514
+
515
+ return agents
516
+
517
+ def _extract_workflow_configs(config: dict) -> list:
518
+ """Extract workflow configurations."""
519
+ workflows = []
520
+
521
+ # Get workflows from config
522
+ config_workflows = config.get("workflows", [])
523
+ for workflow in config_workflows:
524
+ workflows.append({
525
+ "name": workflow.get("name", "Unknown Workflow"),
526
+ "type": workflow.get("type", "basic"),
527
+ "enabled": workflow.get("enabled", True),
528
+ "settings": workflow.get("settings", {})
529
+ })
530
+
531
+ return workflows
532
+
533
+
534
+ def _extract_schedules_config(config: dict, environment: str, verbose: bool = False) -> dict:
535
+ """Extract and validate scheduling configuration."""
536
+ try:
537
+ from ...config.scheduling import parse_schedules_from_yaml, apply_environment_overrides
538
+
539
+ # Get base schedules configuration
540
+ schedules_data = config.get('schedules', {})
541
+
542
+ if not schedules_data:
543
+ # No schedules configured
544
+ return {}
545
+
546
+ # Parse base schedule configuration
547
+ base_schedules = parse_schedules_from_yaml(schedules_data)
548
+
549
+ # Apply environment-specific overrides
550
+ environments_config = config.get('environments', {})
551
+ final_schedules = apply_environment_overrides(
552
+ base_schedules, environments_config, environment
553
+ )
554
+
555
+ # Validate against available agents and workflows
556
+ available_agents = [agent.get('name') for agent in config.get('agents', [])]
557
+ available_workflows = [workflow.get('name') for workflow in config.get('workflows', [])]
558
+
559
+ final_schedules.validate(available_agents, available_workflows)
560
+
561
+ # Convert to serializable format
562
+ schedules_dict = {
563
+ 'agents': {},
564
+ 'workflows': {}
565
+ }
566
+
567
+ for agent_name, schedule_config in final_schedules.agents.items():
568
+ schedules_dict['agents'][agent_name] = {
569
+ 'cron': schedule_config.cron,
570
+ 'data': schedule_config.data,
571
+ 'enabled': schedule_config.enabled,
572
+ 'timezone': schedule_config.timezone,
573
+ 'description': schedule_config.description
574
+ }
575
+
576
+ for workflow_name, schedule_config in final_schedules.workflows.items():
577
+ schedules_dict['workflows'][workflow_name] = {
578
+ 'cron': schedule_config.cron,
579
+ 'data': schedule_config.data,
580
+ 'enabled': schedule_config.enabled,
581
+ 'timezone': schedule_config.timezone,
582
+ 'description': schedule_config.description
583
+ }
584
+
585
+ if verbose and not final_schedules.is_empty():
586
+ agent_count = len(final_schedules.agents)
587
+ workflow_count = len(final_schedules.workflows)
588
+ print(f" Schedules: {agent_count} agents, {workflow_count} workflows")
589
+
590
+ for agent_name, schedule in final_schedules.agents.items():
591
+ if schedule.enabled:
592
+ print(f" Agent {agent_name}: {schedule.cron}")
593
+
594
+ for workflow_name, schedule in final_schedules.workflows.items():
595
+ if schedule.enabled:
596
+ print(f" Workflow {workflow_name}: {schedule.cron}")
597
+
598
+ return schedules_dict
599
+
600
+ except ImportError:
601
+ # Schedule dependencies not available - skip scheduling
602
+ if verbose:
603
+ print(" Scheduling: Not available (missing croniter dependency)")
604
+ return {}
605
+
606
+ except Exception as e:
607
+ print(f" Schedule validation failed: {str(e)}")
608
+ print(" Check your schedule configuration in daita-project.yaml")
609
+ raise ValueError(f"Invalid schedule configuration: {e}")
610
+
611
+
612
+ def _determine_layer_requirements(import_analysis: dict, verbose: bool = False) -> dict:
613
+ """Determine layer requirements without exposing internal ARNs."""
614
+ layer_requirements = {
615
+ 'needs_framework': True, # Always needed
616
+ 'needs_core_dependencies': True, # Always needed
617
+ 'needs_common_dependencies': False,
618
+ 'common_packages_needed': [],
619
+ 'optimization_summary': {}
620
+ }
621
+
622
+ # Check if common dependencies layer is needed
623
+ required_layer_types = import_analysis.get('required_layers', {})
624
+ if 'common_dependencies' in required_layer_types:
625
+ layer_requirements['needs_common_dependencies'] = True
626
+ packages = required_layer_types['common_dependencies']
627
+ layer_requirements['common_packages_needed'] = packages
628
+ if verbose:
629
+ print(f" Common dependencies needed for: {', '.join(packages)}")
630
+
631
+ # Add optimization summary (no internal details)
632
+ total_imports = import_analysis.get('total_imports', 0)
633
+ common_packages = len(layer_requirements['common_packages_needed'])
634
+
635
+ layer_requirements['optimization_summary'] = {
636
+ 'total_imports_detected': total_imports,
637
+ 'packages_optimized_by_layers': common_packages,
638
+ 'optimization_enabled': common_packages > 0
639
+ }
640
+
641
+ if verbose:
642
+ print(f" Framework layer: Required")
643
+ print(f" Core dependencies layer: Required")
644
+ if layer_requirements['needs_common_dependencies']:
645
+ print(f" Optimization: {common_packages}/{total_imports} packages served by layers")
646
+ else:
647
+ print(f" No additional layer optimization needed")
648
+
649
+ return layer_requirements
650
+
651
+ def _generate_deployment_id(project_name: str, environment: str) -> str:
652
+ """Generate deployment ID."""
653
+ import uuid
654
+
655
+ # Generate a proper UUID for deployment_id
656
+ return str(uuid.uuid4())
657
+
658
+ def _show_deployment_plan(project_root: Path, config: dict, environment: str):
659
+ """Show deployment plan."""
660
+ print(f"")
661
+ print(f"Project: {config.get('name')}")
662
+ print(f"Environment: Daita-managed {environment}")
663
+ print(f"")
664
+
665
+ # Show agents
666
+ agents = _extract_agent_configs(config)
667
+ if agents:
668
+ print(f"Agents ({len(agents)}):")
669
+ for agent in agents:
670
+ print(f" {agent['name']}")
671
+
672
+ # Show workflows
673
+ workflows = _extract_workflow_configs(config)
674
+ if workflows:
675
+ print(f"Workflows ({len(workflows)}):")
676
+ for workflow in workflows:
677
+ print(f" {workflow['name']}")
678
+
679
+ # Show schedules
680
+ try:
681
+ schedules = _extract_schedules_config(config, environment, verbose=False)
682
+ if schedules.get('agents') or schedules.get('workflows'):
683
+ print(f"Schedules:")
684
+ for agent_name, schedule in schedules.get('agents', {}).items():
685
+ if schedule.get('enabled', True):
686
+ print(f" Agent {agent_name}: {schedule['cron']}")
687
+ for workflow_name, schedule in schedules.get('workflows', {}).items():
688
+ if schedule.get('enabled', True):
689
+ print(f" Workflow {workflow_name}: {schedule['cron']}")
690
+ except Exception:
691
+ # Skip schedule display if there are issues
692
+ pass
693
+
694
+ print(f"")
695
+ print(f"Deployment Details:")
696
+ print(f" Infrastructure: Daita-managed AWS Lambda + EventBridge")
697
+ print(f" Package Upload: Via Daita API")
698
+ print(f" Authentication: DAITA_API_KEY only")
699
+ print(f" No AWS credentials required")
700
+
701
+
702
+ def _load_project_config(project_root: Path):
703
+ """Load project configuration."""
704
+ config_file = project_root / 'daita-project.yaml'
705
+ if not config_file.exists():
706
+ return None
707
+
708
+ with open(config_file, 'r') as f:
709
+ return yaml.safe_load(f)