daita-agents 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of daita-agents might be problematic. Click here for more details.

Files changed (69) hide show
  1. daita/__init__.py +208 -0
  2. daita/agents/__init__.py +33 -0
  3. daita/agents/base.py +722 -0
  4. daita/agents/substrate.py +895 -0
  5. daita/cli/__init__.py +145 -0
  6. daita/cli/__main__.py +7 -0
  7. daita/cli/ascii_art.py +44 -0
  8. daita/cli/core/__init__.py +0 -0
  9. daita/cli/core/create.py +254 -0
  10. daita/cli/core/deploy.py +473 -0
  11. daita/cli/core/deployments.py +309 -0
  12. daita/cli/core/import_detector.py +219 -0
  13. daita/cli/core/init.py +382 -0
  14. daita/cli/core/logs.py +239 -0
  15. daita/cli/core/managed_deploy.py +709 -0
  16. daita/cli/core/run.py +648 -0
  17. daita/cli/core/status.py +421 -0
  18. daita/cli/core/test.py +239 -0
  19. daita/cli/core/webhooks.py +172 -0
  20. daita/cli/main.py +588 -0
  21. daita/cli/utils.py +541 -0
  22. daita/config/__init__.py +62 -0
  23. daita/config/base.py +159 -0
  24. daita/config/settings.py +184 -0
  25. daita/core/__init__.py +262 -0
  26. daita/core/decision_tracing.py +701 -0
  27. daita/core/exceptions.py +480 -0
  28. daita/core/focus.py +251 -0
  29. daita/core/interfaces.py +76 -0
  30. daita/core/plugin_tracing.py +550 -0
  31. daita/core/relay.py +695 -0
  32. daita/core/reliability.py +381 -0
  33. daita/core/scaling.py +444 -0
  34. daita/core/tools.py +402 -0
  35. daita/core/tracing.py +770 -0
  36. daita/core/workflow.py +1084 -0
  37. daita/display/__init__.py +1 -0
  38. daita/display/console.py +160 -0
  39. daita/execution/__init__.py +58 -0
  40. daita/execution/client.py +856 -0
  41. daita/execution/exceptions.py +92 -0
  42. daita/execution/models.py +317 -0
  43. daita/llm/__init__.py +60 -0
  44. daita/llm/anthropic.py +166 -0
  45. daita/llm/base.py +373 -0
  46. daita/llm/factory.py +101 -0
  47. daita/llm/gemini.py +152 -0
  48. daita/llm/grok.py +114 -0
  49. daita/llm/mock.py +135 -0
  50. daita/llm/openai.py +109 -0
  51. daita/plugins/__init__.py +141 -0
  52. daita/plugins/base.py +37 -0
  53. daita/plugins/base_db.py +167 -0
  54. daita/plugins/elasticsearch.py +844 -0
  55. daita/plugins/mcp.py +481 -0
  56. daita/plugins/mongodb.py +510 -0
  57. daita/plugins/mysql.py +351 -0
  58. daita/plugins/postgresql.py +331 -0
  59. daita/plugins/redis_messaging.py +500 -0
  60. daita/plugins/rest.py +529 -0
  61. daita/plugins/s3.py +761 -0
  62. daita/plugins/slack.py +729 -0
  63. daita/utils/__init__.py +18 -0
  64. daita_agents-0.1.0.dist-info/METADATA +350 -0
  65. daita_agents-0.1.0.dist-info/RECORD +69 -0
  66. daita_agents-0.1.0.dist-info/WHEEL +5 -0
  67. daita_agents-0.1.0.dist-info/entry_points.txt +2 -0
  68. daita_agents-0.1.0.dist-info/licenses/LICENSE +56 -0
  69. daita_agents-0.1.0.dist-info/top_level.txt +1 -0
daita/cli/core/init.py ADDED
@@ -0,0 +1,382 @@
1
+ """
2
+ Simple project initialization for Daita CLI.
3
+ Creates minimal, universal project template like create-react-app.
4
+ """
5
+ import os
6
+ import yaml
7
+ from pathlib import Path
8
+ from datetime import datetime
9
+ from ..utils import find_project_root
10
+
11
+ async def initialize_project(project_name=None, project_type='basic', template=None, force=False, verbose=False):
12
+ """Initialize a new Daita project with minimal template."""
13
+
14
+ # Get project name
15
+ if not project_name:
16
+ project_name = input("Project name: ").strip()
17
+ if not project_name:
18
+ project_name = Path.cwd().name
19
+
20
+ # Determine project directory
21
+ project_dir = Path.cwd() / project_name
22
+
23
+ # Check if directory exists
24
+ if project_dir.exists() and not force:
25
+ if any(project_dir.iterdir()): # Directory not empty
26
+ confirm = input(f"Directory '{project_name}' exists and is not empty. Continue? (y/N): ")
27
+ if confirm.lower() != 'y':
28
+ print(" Initialization cancelled")
29
+ return
30
+
31
+ # Create project directory
32
+ project_dir.mkdir(exist_ok=True)
33
+
34
+ print(f" Creating Daita project: {project_name}")
35
+ print(f" Location: {project_dir}")
36
+
37
+ # Create minimal project structure
38
+ _create_project_structure(project_dir, verbose)
39
+ _create_project_config(project_dir, project_name, verbose)
40
+ _create_starter_files(project_dir, project_name, verbose)
41
+ _create_supporting_files(project_dir, project_name, verbose)
42
+
43
+ # Import freemium utilities
44
+ try:
45
+ from ..utils import get_freemium_success_message
46
+ print("")
47
+ print(get_freemium_success_message(project_name))
48
+ print("")
49
+ print(" Development setup:")
50
+ print(f" export OPENAI_API_KEY=your_key_here # Configure LLM")
51
+ print(f" pip install -r requirements.txt # Install dependencies")
52
+ print(f" python agents/my_agent.py # Test example agent")
53
+ except ImportError:
54
+ # Fallback to original message if utils not available
55
+ print(f"")
56
+ print(f"Project created successfully")
57
+ print(f"")
58
+ print(f"Get started:")
59
+ print(f" cd {project_name}")
60
+ print(f" export OPENAI_API_KEY=your_key_here")
61
+ print(f" pip install -r requirements.txt")
62
+ print(f" python agents/my_agent.py # Test the example agent")
63
+ print(f" daita create agent new_agent # Create a new agent")
64
+ print(f" daita test # Test all components")
65
+ print(f" daita test --watch # Watch for changes while developing")
66
+
67
+ def _create_project_structure(project_dir, verbose):
68
+ """Create minimal directory structure."""
69
+ directories = [
70
+ '.daita',
71
+ 'agents',
72
+ 'workflows',
73
+ 'data',
74
+ 'tests'
75
+ ]
76
+
77
+ for dir_name in directories:
78
+ dir_path = project_dir / dir_name
79
+ dir_path.mkdir(exist_ok=True)
80
+
81
+ # Create __init__.py for Python packages
82
+ if dir_name in ['agents', 'workflows', 'tests']:
83
+ init_file = dir_path / '__init__.py'
84
+ init_file.write_text('"""Daita project components."""\n')
85
+
86
+ if verbose:
87
+ print(f" Created: {dir_name}/")
88
+
89
+ def _create_project_config(project_dir, project_name, verbose):
90
+ """Create minimal daita-project.yaml configuration."""
91
+
92
+ config = {
93
+ 'name': project_name,
94
+ 'version': '1.0.0',
95
+ 'description': f'A Daita AI agent project',
96
+ 'created_at': datetime.utcnow().isoformat(),
97
+
98
+ # Project components (will be populated as user creates them)
99
+ 'agents': [],
100
+ 'workflows': []
101
+ }
102
+
103
+ config_file = project_dir / 'daita-project.yaml'
104
+ with open(config_file, 'w') as f:
105
+ yaml.dump(config, f, default_flow_style=False, sort_keys=False)
106
+
107
+ if verbose:
108
+ print(f" Created: daita-project.yaml")
109
+
110
+ def _create_starter_files(project_dir, project_name, verbose):
111
+ """Create minimal starter agent and workflow files."""
112
+
113
+ # Simple starter agent
114
+ starter_agent = '''"""
115
+ My Agent
116
+
117
+ A simple starter agent. Replace this with your own logic.
118
+ """
119
+ from daita import SubstrateAgent
120
+
121
+ def create_agent():
122
+ """Create the agent instance using direct SubstrateAgent pattern."""
123
+ # Option 1: Simple instantiation (uses defaults)
124
+ agent = SubstrateAgent(name="My Agent")
125
+
126
+ # Option 2: Direct LLM configuration (uncomment and modify as needed)
127
+ # import os
128
+ # agent = SubstrateAgent(
129
+ # name="My Agent",
130
+ # llm_provider="openai",
131
+ # model="gpt-4",
132
+ # api_key=os.getenv("OPENAI_API_KEY")
133
+ # )
134
+
135
+ # Optional: Add plugins
136
+ # from daita.plugins import postgresql
137
+ # agent.add_plugin(postgresql(host="localhost", database="mydb"))
138
+
139
+ return agent
140
+
141
+ if __name__ == "__main__":
142
+ import asyncio
143
+
144
+ async def main():
145
+ agent = create_agent()
146
+ result = await agent.process("test_task", "Hello, world!")
147
+ print(result)
148
+
149
+ asyncio.run(main())
150
+ '''
151
+
152
+ # Simple starter workflow
153
+ starter_workflow = '''"""
154
+ My Workflow
155
+
156
+ A simple starter workflow. Replace this with your own logic.
157
+ """
158
+ from daita import SubstrateAgent, Workflow
159
+
160
+ def create_workflow():
161
+ """Create the workflow instance using direct Workflow pattern."""
162
+ workflow = Workflow("My Workflow")
163
+
164
+ # Add your agents here
165
+ # agent = SubstrateAgent(name="Agent")
166
+ # workflow.add_agent("agent", agent)
167
+
168
+ return workflow
169
+
170
+ async def run_workflow(data=None):
171
+ """Run the workflow with direct pattern."""
172
+ workflow = create_workflow()
173
+
174
+ try:
175
+ await workflow.start()
176
+
177
+ # Your workflow logic here
178
+ result = f"Workflow processed: {data}"
179
+
180
+ return {
181
+ 'status': 'success',
182
+ 'result': result
183
+ }
184
+
185
+ finally:
186
+ await workflow.stop()
187
+
188
+ if __name__ == "__main__":
189
+ import asyncio
190
+
191
+ async def main():
192
+ result = await run_workflow("Hello, workflow!")
193
+ print(result)
194
+
195
+ asyncio.run(main())
196
+ '''
197
+
198
+ # Write starter files
199
+ (project_dir / 'agents' / 'my_agent.py').write_text(starter_agent)
200
+ (project_dir / 'workflows' / 'my_workflow.py').write_text(starter_workflow)
201
+
202
+ if verbose:
203
+ print(f" Created: agents/my_agent.py")
204
+ print(f" Created: workflows/my_workflow.py")
205
+
206
+ def _create_supporting_files(project_dir, project_name, verbose):
207
+ """Create supporting files (requirements, README, etc.)."""
208
+
209
+ # Minimal requirements.txt
210
+ requirements = '''# Daita Agents Framework
211
+ daita-agents>=0.1.0
212
+
213
+ # LLM provider (choose one)
214
+ openai>=1.0.0
215
+
216
+ # Development
217
+ pytest>=7.0.0
218
+ pytest-asyncio>=0.21.0
219
+ '''
220
+
221
+ # Simple .gitignore
222
+ gitignore = '''# Python
223
+ __pycache__/
224
+ *.py[cod]
225
+ *.so
226
+ .Python
227
+ build/
228
+ dist/
229
+ *.egg-info/
230
+
231
+ # Virtual environments
232
+ .env
233
+ .venv
234
+ venv/
235
+
236
+ # IDE
237
+ .vscode/
238
+ .idea/
239
+
240
+ # OS
241
+ .DS_Store
242
+
243
+ # API keys
244
+ .env.local
245
+ '''
246
+
247
+ # README with freemium messaging
248
+ readme = f'''# {project_name}
249
+
250
+ A Daita AI agent project.
251
+
252
+ ## Quick Setup
253
+
254
+ 1. Install dependencies:
255
+ ```bash
256
+ pip install -r requirements.txt
257
+ ```
258
+
259
+ 2. Set your LLM API key:
260
+ ```bash
261
+ export OPENAI_API_KEY=your_key_here
262
+ ```
263
+
264
+ ## Free Local Development
265
+
266
+ Build and test your agents locally - completely free:
267
+
268
+ ```bash
269
+ # Test the example agent
270
+ python agents/my_agent.py
271
+
272
+ # Test all components
273
+ daita test
274
+
275
+ # Watch for changes while developing
276
+ daita test --watch
277
+
278
+ # Create new components
279
+ daita create agent my_new_agent
280
+ daita create workflow my_new_workflow
281
+ ```
282
+
283
+ ## Production Cloud Hosting
284
+
285
+ Ready to deploy to the cloud? Get 24/7 hosting, monitoring, and insights:
286
+
287
+ ```bash
288
+ # Get your API key at daita-tech.io
289
+ export DAITA_API_KEY='your-key-here'
290
+
291
+ # Deploy to cloud
292
+ daita push # Deploy to production
293
+
294
+ # Monitor your deployments
295
+ daita status # Deployment status
296
+ daita logs # View execution logs
297
+ ```
298
+
299
+ ## Project Structure
300
+
301
+ ```
302
+ {project_name}/
303
+ ├── agents/ # Your AI agents (free to create & test)
304
+ │ └── my_agent.py
305
+ ├── workflows/ # Your workflows (free to create & test)
306
+ │ └── my_workflow.py
307
+ ├── data/ # Data files
308
+ ├── tests/ # Tests
309
+ └── daita-project.yaml # Project config
310
+ ```
311
+
312
+ ## Command Reference
313
+
314
+ **Free Commands (Local Development):**
315
+ - `daita test` - Test all agents and workflows
316
+ - `daita test --watch` - Development mode with auto-reload
317
+ - `daita create agent <name>` - Create new agent
318
+ - `daita create workflow <name>` - Create new workflow
319
+
320
+ **Premium Commands (Cloud Hosting):**
321
+ - `daita push <env>` - Deploy to cloud
322
+ - `daita status` - Monitor deployments
323
+ - `daita logs <env>` - View execution logs
324
+ - `daita run <agent>` - Execute remotely
325
+
326
+ ## Learn More
327
+
328
+ - [Get API Key](https://daita-tech.io) - Start your free trial
329
+ - [Documentation](https://docs.daita-tech.io)
330
+ '''
331
+
332
+ # Simple test file
333
+ test_file = '''"""
334
+ Basic test for your agents and workflows.
335
+ """
336
+ import pytest
337
+ import asyncio
338
+
339
+ # Example test - replace with your own
340
+ @pytest.mark.asyncio
341
+ async def test_my_agent():
342
+ """Test the example agent."""
343
+ from agents.my_agent import create_agent
344
+
345
+ agent = create_agent()
346
+ result = await agent.process("test data")
347
+
348
+ assert result["status"] == "success"
349
+ assert "test data" in result["result"]
350
+
351
+ @pytest.mark.asyncio
352
+ async def test_my_workflow():
353
+ """Test the example workflow."""
354
+ from workflows.my_workflow import create_workflow
355
+
356
+ workflow = create_workflow()
357
+ result = await workflow.run("test data")
358
+
359
+ assert result["status"] == "success"
360
+
361
+ if __name__ == "__main__":
362
+ # Run tests directly
363
+ asyncio.run(test_my_agent())
364
+ asyncio.run(test_my_workflow())
365
+ print(" All tests passed!")
366
+ '''
367
+
368
+ # Write all supporting files
369
+ (project_dir / 'requirements.txt').write_text(requirements)
370
+ (project_dir / '.gitignore').write_text(gitignore)
371
+ (project_dir / 'README.md').write_text(readme)
372
+ (project_dir / 'tests' / 'test_basic.py').write_text(test_file)
373
+
374
+ # Create empty data directory with placeholder
375
+ (project_dir / 'data' / '.gitkeep').write_text('')
376
+
377
+ if verbose:
378
+ print(f" Created: requirements.txt")
379
+ print(f" Created: .gitignore")
380
+ print(f" Created: README.md")
381
+ print(f" Created: tests/test_basic.py")
382
+
daita/cli/core/logs.py ADDED
@@ -0,0 +1,239 @@
1
+ """
2
+ Simple logs display for Daita CLI.
3
+ Shows deployment history and logs like git log from cloud API.
4
+ """
5
+ import os
6
+ import aiohttp
7
+ from datetime import datetime
8
+ from ..utils import find_project_root
9
+
10
+
11
+ async def show_deployment_logs(environment=None, limit=10, follow=False, verbose=False):
12
+ """Show deployment logs (like git log) from cloud API."""
13
+
14
+ # Check API key first
15
+ api_key = os.getenv('DAITA_API_KEY')
16
+ if not api_key:
17
+ from ..utils import show_upgrade_message
18
+ show_upgrade_message()
19
+ return
20
+
21
+ # Get current project name for display
22
+ project_root = find_project_root()
23
+ current_project = None
24
+ if project_root:
25
+ import yaml
26
+ config_file = project_root / 'daita-project.yaml'
27
+ if config_file.exists():
28
+ try:
29
+ with open(config_file, 'r') as f:
30
+ config = yaml.safe_load(f)
31
+ current_project = config.get('name')
32
+ except:
33
+ current_project = None
34
+
35
+ # Load deployments from cloud API
36
+ deployments, api_error = await _load_cloud_deployments(environment, limit)
37
+
38
+ if api_error:
39
+ print(f" Failed to fetch deployments: {api_error}")
40
+ if verbose:
41
+ print(f" API Key: {api_key[:20]}...")
42
+ print(f" Endpoint: {os.getenv('DAITA_API_ENDPOINT', 'https://api.daita.ai')}")
43
+ return
44
+
45
+ if not deployments:
46
+ if current_project:
47
+ if environment:
48
+ print(f" No deployments found ({current_project}, {environment})")
49
+ else:
50
+ print(f" No deployments found ({current_project})")
51
+ print(" Run 'daita push staging' to create your first deployment")
52
+ else:
53
+ print(" No deployments found")
54
+ if environment:
55
+ print(f" No deployments found for environment: {environment}")
56
+ else:
57
+ print(" Run 'daita push staging' to create your first deployment")
58
+ return
59
+
60
+ # Filter by environment if specified (additional client-side filtering)
61
+ if environment:
62
+ deployments = [d for d in deployments if d['environment'] == environment]
63
+ if not deployments:
64
+ print(f" No deployments found for environment: {environment}")
65
+ return
66
+
67
+ # Take most recent deployments (API already returns newest first)
68
+ deployments = deployments[:limit]
69
+
70
+ # Find the most recent active deployment
71
+ latest_active_deployment = None
72
+ for deployment in deployments:
73
+ if deployment.get('status') == 'active':
74
+ latest_active_deployment = deployment
75
+ break
76
+
77
+ # Build header with scope indication
78
+ header_parts = []
79
+ if current_project:
80
+ header_parts.append(current_project)
81
+ if environment:
82
+ header_parts.append(environment)
83
+
84
+ if header_parts:
85
+ scope_info = f" ({', '.join(header_parts)})"
86
+ elif current_project is None:
87
+ scope_info = " (Organization)"
88
+ else:
89
+ scope_info = ""
90
+
91
+ print(f" Deployment History{scope_info}")
92
+ print("")
93
+
94
+ for i, deployment in enumerate(deployments):
95
+ is_current = (latest_active_deployment and
96
+ deployment.get('deployment_id') == latest_active_deployment.get('deployment_id'))
97
+ _show_deployment(deployment, verbose, is_latest=is_current)
98
+ if i < len(deployments) - 1:
99
+ print("")
100
+
101
+ if follow:
102
+ print(f"\n Following logs... (Press Ctrl+C to stop)")
103
+ try:
104
+ import asyncio
105
+ while True:
106
+ await asyncio.sleep(5)
107
+ # Check for new deployments
108
+ new_deployments, error = await _load_cloud_deployments(environment, limit)
109
+ if error:
110
+ continue # Skip this check if API fails
111
+
112
+ if len(new_deployments) > len(deployments):
113
+ latest = new_deployments[-1]
114
+ print(f"\n New deployment:")
115
+ _show_deployment(latest, verbose, is_latest=True)
116
+ deployments = new_deployments
117
+ except KeyboardInterrupt:
118
+ print(f"\n Stopped following logs")
119
+
120
+ def _show_deployment(deployment, verbose, is_latest=False):
121
+ """Show a single deployment entry."""
122
+ env = deployment['environment']
123
+ timestamp = deployment['deployed_at']
124
+ version = deployment.get('version', '1.0.0')
125
+ project = deployment.get('project_name', 'Unknown')
126
+
127
+ # Format timestamp
128
+ if timestamp:
129
+ # Handle ISO format from API
130
+ if 'T' in timestamp:
131
+ dt = datetime.fromisoformat(timestamp.replace('Z', '+00:00'))
132
+ time_str = dt.strftime('%Y-%m-%d %H:%M:%S')
133
+ else:
134
+ time_str = timestamp
135
+ else:
136
+ time_str = 'Unknown time'
137
+
138
+ # Status indicator
139
+ status = "●" if is_latest else "○"
140
+ current = " (current)" if is_latest else ""
141
+
142
+ print(f"{status} {env}: {project} v{version}{current}")
143
+ print(f" {time_str}")
144
+
145
+ if verbose:
146
+ # Show detailed info
147
+ agents = deployment.get('agents_config', [])
148
+ workflows = deployment.get('workflows_config', [])
149
+
150
+ if agents:
151
+ agent_names = [agent.get('name', 'unknown') for agent in agents]
152
+ print(f" Agents: {', '.join(agent_names)}")
153
+ if workflows:
154
+ workflow_names = [wf.get('name', 'unknown') for wf in workflows]
155
+ print(f" Workflows: {', '.join(workflow_names)}")
156
+
157
+ # Show deployment ID
158
+ deploy_id = deployment.get('deployment_id', 'N/A')
159
+ if len(deploy_id) > 8:
160
+ deploy_id = deploy_id[:8] # Show first 8 characters
161
+ print(f" ID: {deploy_id}")
162
+
163
+ # Show status
164
+ status = deployment.get('status', 'unknown')
165
+ print(f" Status: {status}")
166
+
167
+ async def _load_cloud_deployments(environment=None, limit=10):
168
+ """Load deployment history from cloud API."""
169
+ try:
170
+ api_key = os.getenv('DAITA_API_KEY')
171
+ if not api_key:
172
+ return [], "API key not found"
173
+
174
+ # Get current project name to filter deployments when in project directory
175
+ project_root = find_project_root()
176
+ if project_root:
177
+ import yaml
178
+ config_file = project_root / 'daita-project.yaml'
179
+ current_project = None
180
+ if config_file.exists():
181
+ try:
182
+ with open(config_file, 'r') as f:
183
+ config = yaml.safe_load(f)
184
+ current_project = config.get('name')
185
+ except:
186
+ current_project = None
187
+ else:
188
+ current_project = None
189
+
190
+ api_endpoint = os.getenv('DAITA_API_ENDPOINT', 'https://ondk4sdyv0.execute-api.us-east-1.amazonaws.com')
191
+
192
+ headers = {
193
+ "Authorization": f"Bearer {api_key}",
194
+ "User-Agent": "Daita-CLI/1.0.0"
195
+ }
196
+
197
+ async with aiohttp.ClientSession() as session:
198
+ # Build URL with filters
199
+ url = f"{api_endpoint}/api/v1/deployments/api-key"
200
+ params = {}
201
+ if environment:
202
+ params['environment'] = environment
203
+ if current_project:
204
+ params['project_name'] = current_project
205
+ if limit and limit != 10:
206
+ params['per_page'] = min(limit, 100) # API limit
207
+
208
+ async with session.get(url, headers=headers, params=params, timeout=30) as response:
209
+ if response.status == 200:
210
+ data = await response.json()
211
+
212
+ # Handle paginated response from API (like in status.py)
213
+ if isinstance(data, dict) and 'deployments' in data:
214
+ deployments = data['deployments']
215
+ else:
216
+ deployments = data if isinstance(data, list) else []
217
+
218
+ # Return deployments in API format (no conversion needed)
219
+ return deployments, None
220
+ elif response.status == 401:
221
+ return [], "Invalid API key"
222
+ elif response.status == 403:
223
+ return [], "Access denied - check API key permissions"
224
+ else:
225
+ error_text = await response.text()
226
+ return [], f"API error {response.status}: {error_text[:100]}"
227
+ except Exception as e:
228
+ if "timeout" in str(e).lower():
229
+ return [], "Request timeout - check your connection"
230
+ elif "dns" in str(e).lower() or "name" in str(e).lower():
231
+ return [], f"Cannot resolve API endpoint - check DAITA_API_ENDPOINT setting"
232
+ else:
233
+ return [], f"Network error: {str(e)}"
234
+
235
+ def _get_deployment_id(deployment):
236
+ """Generate short deployment ID."""
237
+ import hashlib
238
+ content = f"{deployment['deployed_at']}{deployment['environment']}"
239
+ return hashlib.md5(content.encode()).hexdigest()[:8]