daita-agents 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- daita/__init__.py +216 -0
- daita/agents/__init__.py +33 -0
- daita/agents/base.py +743 -0
- daita/agents/substrate.py +1141 -0
- daita/cli/__init__.py +145 -0
- daita/cli/__main__.py +7 -0
- daita/cli/ascii_art.py +44 -0
- daita/cli/core/__init__.py +0 -0
- daita/cli/core/create.py +254 -0
- daita/cli/core/deploy.py +473 -0
- daita/cli/core/deployments.py +309 -0
- daita/cli/core/import_detector.py +219 -0
- daita/cli/core/init.py +481 -0
- daita/cli/core/logs.py +239 -0
- daita/cli/core/managed_deploy.py +709 -0
- daita/cli/core/run.py +648 -0
- daita/cli/core/status.py +421 -0
- daita/cli/core/test.py +239 -0
- daita/cli/core/webhooks.py +172 -0
- daita/cli/main.py +588 -0
- daita/cli/utils.py +541 -0
- daita/config/__init__.py +62 -0
- daita/config/base.py +159 -0
- daita/config/settings.py +184 -0
- daita/core/__init__.py +262 -0
- daita/core/decision_tracing.py +701 -0
- daita/core/exceptions.py +480 -0
- daita/core/focus.py +251 -0
- daita/core/interfaces.py +76 -0
- daita/core/plugin_tracing.py +550 -0
- daita/core/relay.py +779 -0
- daita/core/reliability.py +381 -0
- daita/core/scaling.py +459 -0
- daita/core/tools.py +554 -0
- daita/core/tracing.py +770 -0
- daita/core/workflow.py +1144 -0
- daita/display/__init__.py +1 -0
- daita/display/console.py +160 -0
- daita/execution/__init__.py +58 -0
- daita/execution/client.py +856 -0
- daita/execution/exceptions.py +92 -0
- daita/execution/models.py +317 -0
- daita/llm/__init__.py +60 -0
- daita/llm/anthropic.py +291 -0
- daita/llm/base.py +530 -0
- daita/llm/factory.py +101 -0
- daita/llm/gemini.py +355 -0
- daita/llm/grok.py +219 -0
- daita/llm/mock.py +172 -0
- daita/llm/openai.py +220 -0
- daita/plugins/__init__.py +141 -0
- daita/plugins/base.py +37 -0
- daita/plugins/base_db.py +167 -0
- daita/plugins/elasticsearch.py +849 -0
- daita/plugins/mcp.py +481 -0
- daita/plugins/mongodb.py +520 -0
- daita/plugins/mysql.py +362 -0
- daita/plugins/postgresql.py +342 -0
- daita/plugins/redis_messaging.py +500 -0
- daita/plugins/rest.py +537 -0
- daita/plugins/s3.py +770 -0
- daita/plugins/slack.py +729 -0
- daita/utils/__init__.py +18 -0
- daita_agents-0.2.0.dist-info/METADATA +409 -0
- daita_agents-0.2.0.dist-info/RECORD +69 -0
- daita_agents-0.2.0.dist-info/WHEEL +5 -0
- daita_agents-0.2.0.dist-info/entry_points.txt +2 -0
- daita_agents-0.2.0.dist-info/licenses/LICENSE +56 -0
- daita_agents-0.2.0.dist-info/top_level.txt +1 -0
daita/cli/core/deploy.py
ADDED
|
@@ -0,0 +1,473 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Deployment wrapper for Daita CLI with managed cloud infrastructure.
|
|
3
|
+
Users only need DAITA_API_KEY - no AWS credentials required.
|
|
4
|
+
|
|
5
|
+
ARCHITECTURE: This module serves as compatibility layer that delegates
|
|
6
|
+
to managed_deploy.py. The CLI uses a single deployment path via 'push' command.
|
|
7
|
+
No duplicate deployment systems - architecture is already consolidated.
|
|
8
|
+
"""
|
|
9
|
+
import os
|
|
10
|
+
import yaml
|
|
11
|
+
import json
|
|
12
|
+
import tarfile
|
|
13
|
+
import tempfile
|
|
14
|
+
import asyncio
|
|
15
|
+
import aiohttp
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
from datetime import datetime
|
|
18
|
+
from .managed_deploy import deploy_to_managed_environment
|
|
19
|
+
from ..utils import find_project_root
|
|
20
|
+
|
|
21
|
+
async def deploy_to_environment(environment='production', force=False, dry_run=False, verbose=False):
|
|
22
|
+
"""Deploy to Daita-managed environment - no AWS credentials required."""
|
|
23
|
+
|
|
24
|
+
# Use the new managed deployment system
|
|
25
|
+
await deploy_to_managed_environment(
|
|
26
|
+
environment=environment,
|
|
27
|
+
force=force,
|
|
28
|
+
dry_run=dry_run,
|
|
29
|
+
verbose=verbose
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
async def _register_deployment_with_dashboard(
|
|
33
|
+
deployment_id: str,
|
|
34
|
+
project_name: str,
|
|
35
|
+
environment: str,
|
|
36
|
+
config: dict,
|
|
37
|
+
verbose: bool = False
|
|
38
|
+
):
|
|
39
|
+
"""Register deployment with Daita Dashboard API."""
|
|
40
|
+
try:
|
|
41
|
+
# Get API key
|
|
42
|
+
api_key = os.getenv("DAITA_API_KEY")
|
|
43
|
+
if not api_key:
|
|
44
|
+
from ..utils import show_upgrade_message
|
|
45
|
+
show_upgrade_message()
|
|
46
|
+
return
|
|
47
|
+
|
|
48
|
+
# Get dashboard API endpoint
|
|
49
|
+
api_endpoint = os.getenv("DAITA_DASHBOARD_API_OVERRIDE") or os.getenv("DAITA_API_ENDPOINT")
|
|
50
|
+
if not api_endpoint:
|
|
51
|
+
raise ValueError("DAITA_DASHBOARD_API_OVERRIDE or DAITA_API_ENDPOINT environment variable required")
|
|
52
|
+
|
|
53
|
+
# Prepare deployment data
|
|
54
|
+
deployment_data = {
|
|
55
|
+
"deployment_id": deployment_id,
|
|
56
|
+
"project_name": project_name,
|
|
57
|
+
"environment": environment,
|
|
58
|
+
"version": config.get("version", "1.0.0"),
|
|
59
|
+
"deployed_at": datetime.utcnow().isoformat(),
|
|
60
|
+
"agents": _extract_agent_configs(config),
|
|
61
|
+
"workflows": _extract_workflow_configs(config),
|
|
62
|
+
"deployment_info": {
|
|
63
|
+
"cli_version": "0.1.0",
|
|
64
|
+
"deployed_from": "daita_cli",
|
|
65
|
+
"project_type": config.get("type", "basic"),
|
|
66
|
+
"environments": list(config.get("environments", {}).keys())
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
if verbose:
|
|
71
|
+
print(f" Registering deployment with dashboard...")
|
|
72
|
+
print(f" API endpoint: {api_endpoint}")
|
|
73
|
+
|
|
74
|
+
# Make API request
|
|
75
|
+
headers = {
|
|
76
|
+
"Authorization": f"Bearer {api_key}",
|
|
77
|
+
"Content-Type": "application/json",
|
|
78
|
+
"User-Agent": "Daita-CLI/0.1.0"
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
async with aiohttp.ClientSession() as session:
|
|
82
|
+
url = f"{api_endpoint}/api/v1/deployments"
|
|
83
|
+
|
|
84
|
+
async with session.post(url, json=deployment_data, headers=headers) as response:
|
|
85
|
+
if response.status == 201:
|
|
86
|
+
print(f" Deployment registered with dashboard")
|
|
87
|
+
if verbose:
|
|
88
|
+
response_data = await response.json()
|
|
89
|
+
print(f" Response: {response_data.get('message', 'Success')}")
|
|
90
|
+
|
|
91
|
+
elif response.status == 401:
|
|
92
|
+
print(f" Dashboard authentication failed - check your DAITA_API_KEY")
|
|
93
|
+
if verbose:
|
|
94
|
+
error_text = await response.text()
|
|
95
|
+
print(f" Error: {error_text}")
|
|
96
|
+
|
|
97
|
+
else:
|
|
98
|
+
error_text = await response.text()
|
|
99
|
+
print(f" Dashboard registration failed (HTTP {response.status})")
|
|
100
|
+
if verbose:
|
|
101
|
+
print(f" Error: {error_text}")
|
|
102
|
+
print(f" Deployment will still work, just won't appear in dashboard")
|
|
103
|
+
|
|
104
|
+
except asyncio.TimeoutError:
|
|
105
|
+
print(f" Dashboard registration timed out")
|
|
106
|
+
print(f" Deployment successful, but dashboard connection failed")
|
|
107
|
+
|
|
108
|
+
except Exception as e:
|
|
109
|
+
if verbose:
|
|
110
|
+
print(f" Dashboard registration failed: {str(e)}")
|
|
111
|
+
else:
|
|
112
|
+
print(f" Dashboard registration failed")
|
|
113
|
+
print(f" Deployment successful, but won't appear in dashboard")
|
|
114
|
+
|
|
115
|
+
def _extract_agent_configs(config: dict) -> list:
|
|
116
|
+
"""Extract agent configurations from project config."""
|
|
117
|
+
agents = []
|
|
118
|
+
|
|
119
|
+
# Get agents from config
|
|
120
|
+
config_agents = config.get("agents", [])
|
|
121
|
+
for agent in config_agents:
|
|
122
|
+
agent_config = {
|
|
123
|
+
"name": agent.get("name", "Unknown Agent"),
|
|
124
|
+
"type": agent.get("type", "substrate"),
|
|
125
|
+
"enabled": agent.get("enabled", True),
|
|
126
|
+
"settings": agent.get("settings", {})
|
|
127
|
+
}
|
|
128
|
+
agents.append(agent_config)
|
|
129
|
+
|
|
130
|
+
# If no agents in config, scan agents directory
|
|
131
|
+
if not agents:
|
|
132
|
+
project_root = find_project_root()
|
|
133
|
+
if project_root:
|
|
134
|
+
agents_dir = project_root / "agents"
|
|
135
|
+
if agents_dir.exists():
|
|
136
|
+
for agent_file in agents_dir.glob("*.py"):
|
|
137
|
+
if agent_file.name != "__init__.py":
|
|
138
|
+
agent_name = agent_file.stem.replace("_", " ").title()
|
|
139
|
+
agents.append({
|
|
140
|
+
"name": agent_name,
|
|
141
|
+
"type": "substrate",
|
|
142
|
+
"enabled": True,
|
|
143
|
+
"file": agent_file.name
|
|
144
|
+
})
|
|
145
|
+
|
|
146
|
+
return agents
|
|
147
|
+
|
|
148
|
+
def _extract_workflow_configs(config: dict) -> list:
|
|
149
|
+
"""Extract workflow configurations from project config."""
|
|
150
|
+
workflows = []
|
|
151
|
+
|
|
152
|
+
# Get workflows from config
|
|
153
|
+
config_workflows = config.get("workflows", [])
|
|
154
|
+
for workflow in config_workflows:
|
|
155
|
+
workflow_config = {
|
|
156
|
+
"name": workflow.get("name", "Unknown Workflow"),
|
|
157
|
+
"type": workflow.get("type", "basic"),
|
|
158
|
+
"enabled": workflow.get("enabled", True),
|
|
159
|
+
"agents": workflow.get("agents", []),
|
|
160
|
+
"settings": workflow.get("settings", {})
|
|
161
|
+
}
|
|
162
|
+
workflows.append(workflow_config)
|
|
163
|
+
|
|
164
|
+
# If no workflows in config, scan workflows directory
|
|
165
|
+
if not workflows:
|
|
166
|
+
project_root = find_project_root()
|
|
167
|
+
if project_root:
|
|
168
|
+
workflows_dir = project_root / "workflows"
|
|
169
|
+
if workflows_dir.exists():
|
|
170
|
+
for workflow_file in workflows_dir.glob("*.py"):
|
|
171
|
+
if workflow_file.name != "__init__.py":
|
|
172
|
+
workflow_name = workflow_file.stem.replace("_", " ").title()
|
|
173
|
+
workflows.append({
|
|
174
|
+
"name": workflow_name,
|
|
175
|
+
"type": "basic",
|
|
176
|
+
"enabled": True,
|
|
177
|
+
"file": workflow_file.name
|
|
178
|
+
})
|
|
179
|
+
|
|
180
|
+
return workflows
|
|
181
|
+
|
|
182
|
+
def _generate_deployment_id(project_name: str, environment: str) -> str:
|
|
183
|
+
"""Generate a unique deployment ID."""
|
|
184
|
+
import hashlib
|
|
185
|
+
import uuid
|
|
186
|
+
|
|
187
|
+
# Create a deterministic but unique ID
|
|
188
|
+
timestamp = datetime.utcnow().strftime("%Y%m%d_%H%M%S")
|
|
189
|
+
unique_suffix = str(uuid.uuid4())[:8]
|
|
190
|
+
|
|
191
|
+
# Clean project name for ID
|
|
192
|
+
clean_name = "".join(c for c in project_name if c.isalnum() or c in "_-").lower()
|
|
193
|
+
|
|
194
|
+
return f"{clean_name}_{environment}_{timestamp}_{unique_suffix}"
|
|
195
|
+
|
|
196
|
+
def _create_deployment_package(project_root, config, verbose):
|
|
197
|
+
"""Create a deployment package (tar.gz)."""
|
|
198
|
+
print(f" Creating deployment package...")
|
|
199
|
+
|
|
200
|
+
# Create temp directory
|
|
201
|
+
with tempfile.TemporaryDirectory() as temp_dir:
|
|
202
|
+
package_dir = Path(temp_dir) / 'package'
|
|
203
|
+
package_dir.mkdir()
|
|
204
|
+
|
|
205
|
+
# Copy project files
|
|
206
|
+
files_to_include = [
|
|
207
|
+
'agents/',
|
|
208
|
+
'workflows/',
|
|
209
|
+
'daita-project.yaml',
|
|
210
|
+
'requirements.txt'
|
|
211
|
+
]
|
|
212
|
+
|
|
213
|
+
for file_pattern in files_to_include:
|
|
214
|
+
_copy_files(project_root, package_dir, file_pattern, verbose)
|
|
215
|
+
|
|
216
|
+
# Create package info
|
|
217
|
+
package_info = {
|
|
218
|
+
'name': config.get('name'),
|
|
219
|
+
'version': config.get('version', '1.0.0'),
|
|
220
|
+
'created_at': datetime.utcnow().isoformat(),
|
|
221
|
+
'agents': [agent['name'] for agent in config.get('agents', [])],
|
|
222
|
+
'workflows': [wf['name'] for wf in config.get('workflows', [])]
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
info_file = package_dir / 'package-info.json'
|
|
226
|
+
with open(info_file, 'w') as f:
|
|
227
|
+
json.dump(package_info, f, indent=2)
|
|
228
|
+
|
|
229
|
+
# Create tar.gz
|
|
230
|
+
package_path = project_root / '.daita' / f'deploy-{int(datetime.utcnow().timestamp())}.tar.gz'
|
|
231
|
+
package_path.parent.mkdir(exist_ok=True)
|
|
232
|
+
|
|
233
|
+
with tarfile.open(package_path, 'w:gz') as tar:
|
|
234
|
+
tar.add(package_dir, arcname='.')
|
|
235
|
+
|
|
236
|
+
if verbose:
|
|
237
|
+
print(f" Package: {package_path}")
|
|
238
|
+
print(f" Size: {package_path.stat().st_size} bytes")
|
|
239
|
+
|
|
240
|
+
return package_path
|
|
241
|
+
|
|
242
|
+
async def _deploy_to_staging(package_path, config, verbose):
|
|
243
|
+
"""Deploy to staging environment using AWS Lambda."""
|
|
244
|
+
print(f" Deploying to staging using AWS Lambda...")
|
|
245
|
+
|
|
246
|
+
try:
|
|
247
|
+
from ...cloud.lambda_deploy import LambdaDeployer
|
|
248
|
+
|
|
249
|
+
# Find project root for Lambda packaging
|
|
250
|
+
project_root = find_project_root()
|
|
251
|
+
if not project_root:
|
|
252
|
+
raise ValueError("Could not find project root")
|
|
253
|
+
|
|
254
|
+
# Generate deployment ID
|
|
255
|
+
deployment_id = _generate_deployment_id(config.get('name', 'unknown'), 'staging')
|
|
256
|
+
|
|
257
|
+
# Deploy to Lambda
|
|
258
|
+
deployer = LambdaDeployer()
|
|
259
|
+
result = await deployer.deploy_agent(project_root, config, deployment_id, 'staging')
|
|
260
|
+
|
|
261
|
+
print(f" Lambda functions deployed")
|
|
262
|
+
print(f" API Gateway endpoints created")
|
|
263
|
+
print(f" Agents ready for cloud execution")
|
|
264
|
+
|
|
265
|
+
if verbose:
|
|
266
|
+
for func in result['functions']:
|
|
267
|
+
if func.get('status') == 'deployed':
|
|
268
|
+
print(f" {func['name']}: {func['function_name']}")
|
|
269
|
+
if 'api_endpoint' in func:
|
|
270
|
+
print(f" API: {func['api_endpoint'].get('endpoint_url', 'N/A')}")
|
|
271
|
+
else:
|
|
272
|
+
print(f" {func['name']}: {func.get('error', 'Unknown error')}")
|
|
273
|
+
|
|
274
|
+
print(f" Agents will automatically report traces to dashboard")
|
|
275
|
+
return result
|
|
276
|
+
|
|
277
|
+
except Exception as e:
|
|
278
|
+
print(f" Lambda deployment failed: {e}")
|
|
279
|
+
if verbose:
|
|
280
|
+
import traceback
|
|
281
|
+
print(f" Error details: {traceback.format_exc()}")
|
|
282
|
+
raise
|
|
283
|
+
|
|
284
|
+
async def _deploy_to_production(package_path, config, force, verbose):
|
|
285
|
+
"""Deploy to production environment using AWS Lambda."""
|
|
286
|
+
if not force:
|
|
287
|
+
# Safety check for production
|
|
288
|
+
confirm = input(f" Deploy to PRODUCTION? Type 'yes' to confirm: ")
|
|
289
|
+
if confirm != 'yes':
|
|
290
|
+
print(f" Deployment cancelled")
|
|
291
|
+
return
|
|
292
|
+
|
|
293
|
+
print(f" Deploying to production using AWS Lambda...")
|
|
294
|
+
|
|
295
|
+
try:
|
|
296
|
+
from ...cloud.lambda_deploy import LambdaDeployer
|
|
297
|
+
|
|
298
|
+
# Find project root for Lambda packaging
|
|
299
|
+
project_root = find_project_root()
|
|
300
|
+
if not project_root:
|
|
301
|
+
raise ValueError("Could not find project root")
|
|
302
|
+
|
|
303
|
+
# Generate deployment ID
|
|
304
|
+
deployment_id = _generate_deployment_id(config.get('name', 'unknown'), 'production')
|
|
305
|
+
|
|
306
|
+
# Deploy to Lambda
|
|
307
|
+
deployer = LambdaDeployer()
|
|
308
|
+
result = await deployer.deploy_agent(project_root, config, deployment_id, 'production')
|
|
309
|
+
|
|
310
|
+
print(f" Lambda functions deployed to production")
|
|
311
|
+
print(f" API Gateway endpoints configured")
|
|
312
|
+
print(f" Production environment variables set")
|
|
313
|
+
print(f" Agents ready for production workloads")
|
|
314
|
+
|
|
315
|
+
if verbose:
|
|
316
|
+
for func in result['functions']:
|
|
317
|
+
if func.get('status') == 'deployed':
|
|
318
|
+
print(f" {func['name']}: {func['function_name']}")
|
|
319
|
+
if 'api_endpoint' in func:
|
|
320
|
+
print(f" API: {func['api_endpoint'].get('endpoint_url', 'N/A')}")
|
|
321
|
+
else:
|
|
322
|
+
print(f" {func['name']}: {func.get('error', 'Unknown error')}")
|
|
323
|
+
|
|
324
|
+
print(f" All traces will be sent to dashboard API")
|
|
325
|
+
return result
|
|
326
|
+
|
|
327
|
+
except Exception as e:
|
|
328
|
+
print(f" Production deployment failed: {e}")
|
|
329
|
+
if verbose:
|
|
330
|
+
import traceback
|
|
331
|
+
print(f" Error details: {traceback.format_exc()}")
|
|
332
|
+
raise
|
|
333
|
+
|
|
334
|
+
async def _deploy_to_custom(package_path, config, environment, verbose):
|
|
335
|
+
"""Deploy to custom environment."""
|
|
336
|
+
print(f" Deploying to {environment}...")
|
|
337
|
+
|
|
338
|
+
# Load environment config
|
|
339
|
+
env_config = config.get('environments', {}).get(environment, {})
|
|
340
|
+
if not env_config:
|
|
341
|
+
print(f" No configuration found for environment '{environment}'")
|
|
342
|
+
print(f" Add it to daita-project.yaml under 'environments'")
|
|
343
|
+
|
|
344
|
+
# Basic deployment
|
|
345
|
+
await asyncio.sleep(1)
|
|
346
|
+
print(f" Deployed to {environment}")
|
|
347
|
+
print(f" Set DAITA_ENVIRONMENT=production on {environment} for API-only operation tracking")
|
|
348
|
+
|
|
349
|
+
def _show_deployment_plan(project_root, config, environment):
|
|
350
|
+
"""Show what would be deployed (dry run)."""
|
|
351
|
+
print(f"")
|
|
352
|
+
print(f"Project: {config.get('name')}")
|
|
353
|
+
print(f"Environment: {environment}")
|
|
354
|
+
print(f"")
|
|
355
|
+
|
|
356
|
+
# Show agents
|
|
357
|
+
agents = config.get('agents', [])
|
|
358
|
+
if agents:
|
|
359
|
+
print(f"Agents ({len(agents)}):")
|
|
360
|
+
for agent in agents:
|
|
361
|
+
print(f" {agent['name']}")
|
|
362
|
+
else:
|
|
363
|
+
# Scan agents directory
|
|
364
|
+
agents_dir = project_root / 'agents'
|
|
365
|
+
if agents_dir.exists():
|
|
366
|
+
agent_files = [f for f in agents_dir.glob('*.py') if f.name != '__init__.py']
|
|
367
|
+
print(f"Agents ({len(agent_files)}):")
|
|
368
|
+
for agent_file in agent_files:
|
|
369
|
+
agent_name = agent_file.stem.replace('_', ' ').title()
|
|
370
|
+
print(f" {agent_name}")
|
|
371
|
+
|
|
372
|
+
# Show workflows
|
|
373
|
+
workflows = config.get('workflows', [])
|
|
374
|
+
if workflows:
|
|
375
|
+
print(f"Workflows ({len(workflows)}):")
|
|
376
|
+
for workflow in workflows:
|
|
377
|
+
print(f" {workflow['name']}")
|
|
378
|
+
else:
|
|
379
|
+
# Scan workflows directory
|
|
380
|
+
workflows_dir = project_root / 'workflows'
|
|
381
|
+
if workflows_dir.exists():
|
|
382
|
+
workflow_files = [f for f in workflows_dir.glob('*.py') if f.name != '__init__.py']
|
|
383
|
+
if workflow_files:
|
|
384
|
+
print(f"Workflows ({len(workflow_files)}):")
|
|
385
|
+
for workflow_file in workflow_files:
|
|
386
|
+
workflow_name = workflow_file.stem.replace('_', ' ').title()
|
|
387
|
+
print(f" {workflow_name}")
|
|
388
|
+
|
|
389
|
+
# Show files
|
|
390
|
+
print(f"")
|
|
391
|
+
print(f"Files to deploy:")
|
|
392
|
+
for file_path in ['agents/', 'workflows/', 'daita-project.yaml', 'requirements.txt']:
|
|
393
|
+
full_path = project_root / file_path
|
|
394
|
+
if full_path.exists():
|
|
395
|
+
if full_path.is_dir():
|
|
396
|
+
count = len(list(full_path.glob('*.py')))
|
|
397
|
+
print(f" {file_path} ({count} files)")
|
|
398
|
+
else:
|
|
399
|
+
print(f" {file_path}")
|
|
400
|
+
|
|
401
|
+
# Show dashboard integration info
|
|
402
|
+
print(f"")
|
|
403
|
+
print(f"Dashboard Integration:")
|
|
404
|
+
api_key = os.getenv("DAITA_API_KEY")
|
|
405
|
+
if api_key:
|
|
406
|
+
print(f" DAITA_API_KEY configured")
|
|
407
|
+
print(f" Deployment will be tracked in dashboard")
|
|
408
|
+
else:
|
|
409
|
+
print(f" No DAITA_API_KEY - set environment variable for dashboard integration")
|
|
410
|
+
|
|
411
|
+
def _copy_files(src_dir, dest_dir, pattern, verbose):
|
|
412
|
+
"""Copy files matching pattern."""
|
|
413
|
+
src_path = src_dir / pattern
|
|
414
|
+
|
|
415
|
+
if src_path.is_file():
|
|
416
|
+
# Single file
|
|
417
|
+
dest_file = dest_dir / pattern
|
|
418
|
+
dest_file.parent.mkdir(parents=True, exist_ok=True)
|
|
419
|
+
dest_file.write_bytes(src_path.read_bytes())
|
|
420
|
+
if verbose:
|
|
421
|
+
print(f" Copied: {pattern}")
|
|
422
|
+
|
|
423
|
+
elif src_path.is_dir():
|
|
424
|
+
# Directory
|
|
425
|
+
for file_path in src_path.rglob('*.py'):
|
|
426
|
+
rel_path = file_path.relative_to(src_dir)
|
|
427
|
+
dest_file = dest_dir / rel_path
|
|
428
|
+
dest_file.parent.mkdir(parents=True, exist_ok=True)
|
|
429
|
+
dest_file.write_bytes(file_path.read_bytes())
|
|
430
|
+
if verbose:
|
|
431
|
+
print(f" Copied: {rel_path}")
|
|
432
|
+
|
|
433
|
+
def _save_deployment_record(project_root, environment, config, deployment_id):
|
|
434
|
+
"""Save deployment record for history."""
|
|
435
|
+
deployments_file = project_root / '.daita' / 'deployments.json'
|
|
436
|
+
|
|
437
|
+
# Load existing deployments
|
|
438
|
+
if deployments_file.exists():
|
|
439
|
+
with open(deployments_file, 'r') as f:
|
|
440
|
+
deployments = json.load(f)
|
|
441
|
+
else:
|
|
442
|
+
deployments = []
|
|
443
|
+
|
|
444
|
+
# Add new deployment
|
|
445
|
+
deployment = {
|
|
446
|
+
'deployment_id': deployment_id,
|
|
447
|
+
'environment': environment,
|
|
448
|
+
'timestamp': datetime.utcnow().isoformat(),
|
|
449
|
+
'project_name': config.get('name'),
|
|
450
|
+
'version': config.get('version', '1.0.0'),
|
|
451
|
+
'agents': [agent['name'] for agent in config.get('agents', [])],
|
|
452
|
+
'workflows': [wf['name'] for wf in config.get('workflows', [])],
|
|
453
|
+
'dashboard_registered': bool(os.getenv("DAITA_API_KEY"))
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
deployments.append(deployment)
|
|
457
|
+
|
|
458
|
+
# Keep only last 50 deployments
|
|
459
|
+
deployments = deployments[-50:]
|
|
460
|
+
|
|
461
|
+
# Save
|
|
462
|
+
with open(deployments_file, 'w') as f:
|
|
463
|
+
json.dump(deployments, f, indent=2)
|
|
464
|
+
|
|
465
|
+
def _load_project_config(project_root):
|
|
466
|
+
"""Load project configuration."""
|
|
467
|
+
config_file = project_root / 'daita-project.yaml'
|
|
468
|
+
if not config_file.exists():
|
|
469
|
+
return None
|
|
470
|
+
|
|
471
|
+
with open(config_file, 'r') as f:
|
|
472
|
+
return yaml.safe_load(f)
|
|
473
|
+
|