diagram-to-iac 1.3.0__py3-none-any.whl → 1.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -16,113 +16,104 @@ logger = logging.getLogger(__name__)
16
16
  class ConfigLoader:
17
17
  """
18
18
  Central configuration management for the diagram-to-iac project.
19
- Loads configuration from YAML files and provides environment variable override capability.
19
+ Loads configuration from central config.yaml file with environment variable override capability.
20
20
  """
21
21
 
22
- def __init__(self, app_config_path: Optional[str] = None, system_config_path: Optional[str] = None):
22
+ def __init__(self, config_path: Optional[str] = None):
23
23
  """
24
- Initialize ConfigLoader with optional custom config paths.
24
+ Initialize ConfigLoader with optional custom config path.
25
25
 
26
26
  Args:
27
- app_config_path: Path to application config file (default: src/diagram_to_iac/config.yaml)
28
- system_config_path: Path to system config file (default: config/system.yaml)
27
+ config_path: Path to central config file (default: src/diagram_to_iac/config.yaml)
29
28
  """
30
29
  self.logger = logging.getLogger(self.__class__.__name__)
31
30
 
32
- # Set default paths
33
- self.base_path = Path(__file__).parent.parent.parent.parent # diagram-to-iac root
34
- self.app_config_path = Path(app_config_path) if app_config_path else self.base_path / "src" / "diagram_to_iac" / "config.yaml"
35
- self.system_config_path = Path(system_config_path) if system_config_path else self.base_path / "config" / "system.yaml"
31
+ # Set default paths (container-safe)
32
+ if config_path:
33
+ self.config_path = Path(config_path)
34
+ else:
35
+ # Try multiple locations for central config
36
+ possible_paths = [
37
+ Path.cwd() / "src" / "diagram_to_iac" / "config.yaml", # Development
38
+ Path(__file__).parent.parent / "config.yaml", # Package location
39
+ Path("/workspace/src/diagram_to_iac/config.yaml"), # Container workspace
40
+ Path("/workspace/config.yaml"), # Container root
41
+ ]
42
+ self.config_path = None
43
+ for path in possible_paths:
44
+ if path.exists():
45
+ self.config_path = path
46
+ break
47
+ # Default to package location if none found (will create defaults)
48
+ if not self.config_path:
49
+ self.config_path = Path(__file__).parent.parent / "config.yaml"
36
50
 
37
- # Cache for loaded configs
38
- self._app_config = None
39
- self._system_config = None
40
- self._merged_config = None
51
+ # Cache for loaded config
52
+ self._config = None
41
53
 
42
54
  @lru_cache(maxsize=1)
43
55
  def get_config(self) -> Dict[str, Any]:
44
56
  """
45
- Get the complete merged configuration with environment variable overrides.
57
+ Get the complete configuration with environment variable overrides.
46
58
 
47
59
  Returns:
48
- Merged configuration dictionary
60
+ Configuration dictionary
49
61
  """
50
- if self._merged_config is None:
51
- self._merged_config = self._load_and_merge_configs()
52
- return self._merged_config
62
+ if self._config is None:
63
+ self._config = self._load_config()
64
+ return self._config
53
65
 
54
- def _load_and_merge_configs(self) -> Dict[str, Any]:
66
+ def _load_config(self) -> Dict[str, Any]:
55
67
  """
56
- Load and merge all configuration sources.
68
+ Load configuration from central config file.
57
69
 
58
70
  Returns:
59
- Merged configuration dictionary
71
+ Configuration dictionary with environment overrides applied
60
72
  """
61
- # Load base configs
62
- app_config = self._load_app_config()
63
- system_config = self._load_system_config()
64
-
65
- # Start with system config as base, overlay app config
66
- merged = self._deep_merge(system_config, app_config)
73
+ # Load base config
74
+ config = self._load_config_file()
67
75
 
68
76
  # Apply environment variable overrides
69
- merged = self._apply_env_overrides(merged)
77
+ config = self._apply_env_overrides(config)
70
78
 
71
- self.logger.debug("Configuration loaded and merged successfully")
72
- return merged
73
-
74
- def _load_app_config(self) -> Dict[str, Any]:
75
- """Load application configuration from YAML file."""
76
- if self._app_config is None:
77
- try:
78
- if self.app_config_path.exists():
79
- with open(self.app_config_path, 'r') as f:
80
- self._app_config = yaml.safe_load(f) or {}
81
- self.logger.debug(f"Loaded app config from {self.app_config_path}")
82
- else:
83
- self.logger.warning(f"App config file not found: {self.app_config_path}")
84
- self._app_config = {}
85
- except Exception as e:
86
- self.logger.error(f"Failed to load app config: {e}")
87
- self._app_config = {}
88
- return self._app_config
79
+ self.logger.debug("Configuration loaded successfully")
80
+ return config
89
81
 
90
- def _load_system_config(self) -> Dict[str, Any]:
91
- """Load system configuration from YAML file."""
92
- if self._system_config is None:
93
- try:
94
- if self.system_config_path.exists():
95
- with open(self.system_config_path, 'r') as f:
96
- self._system_config = yaml.safe_load(f) or {}
97
- self.logger.debug(f"Loaded system config from {self.system_config_path}")
98
- else:
99
- self.logger.warning(f"System config file not found: {self.system_config_path}")
100
- self._system_config = {}
101
- except Exception as e:
102
- self.logger.error(f"Failed to load system config: {e}")
103
- self._system_config = {}
104
- return self._system_config
105
-
106
- def _deep_merge(self, base: Dict[str, Any], overlay: Dict[str, Any]) -> Dict[str, Any]:
107
- """
108
- Deep merge two dictionaries, with overlay taking precedence.
109
-
110
- Args:
111
- base: Base dictionary
112
- overlay: Dictionary to overlay on base
113
-
114
- Returns:
115
- Merged dictionary
116
- """
117
- result = base.copy()
118
-
119
- for key, value in overlay.items():
120
- if key in result and isinstance(result[key], dict) and isinstance(value, dict):
121
- result[key] = self._deep_merge(result[key], value)
82
+ def _load_config_file(self) -> Dict[str, Any]:
83
+ """Load configuration from central YAML file."""
84
+ try:
85
+ if self.config_path.exists():
86
+ with open(self.config_path, 'r') as f:
87
+ config = yaml.safe_load(f) or {}
88
+ self.logger.debug(f"Loaded config from {self.config_path}")
122
89
  else:
123
- result[key] = value
124
-
125
- return result
90
+ self.logger.warning(f"Config file not found at {self.config_path}, using built-in defaults")
91
+ config = self._get_default_config()
92
+ except Exception as e:
93
+ self.logger.error(f"Failed to load config: {e}")
94
+ config = self._get_default_config()
95
+ return config
96
+
97
+ def _get_default_config(self) -> Dict[str, Any]:
98
+ """Get default configuration when config file is not available."""
99
+ return {
100
+ 'system': {
101
+ 'workspace_base': '/workspace',
102
+ 'log_level': 'INFO'
103
+ },
104
+ 'network': {
105
+ 'api_timeout': 10,
106
+ 'shell_timeout': 30,
107
+ 'terraform_timeout': 300,
108
+ 'github_timeout': 15,
109
+ 'git_timeout': 300
110
+ },
111
+ 'ai': {
112
+ 'default_model': 'gpt-4o-mini',
113
+ 'default_temperature': 0.1,
114
+ 'max_tokens': 1000
115
+ }
116
+ }
126
117
 
127
118
  def _apply_env_overrides(self, config: Dict[str, Any]) -> Dict[str, Any]:
128
119
  """
@@ -246,10 +237,8 @@ class ConfigLoader:
246
237
  return default
247
238
 
248
239
  def reload(self) -> None:
249
- """Reload configuration from files (clears cache)."""
250
- self._app_config = None
251
- self._system_config = None
252
- self._merged_config = None
240
+ """Reload configuration from file (clears cache)."""
241
+ self._config = None
253
242
  self.get_config.cache_clear()
254
243
  self.logger.debug("Configuration cache cleared, will reload on next access")
255
244
 
@@ -4,6 +4,13 @@ import json
4
4
  import os
5
5
  from pathlib import Path
6
6
 
7
+ try:
8
+ from .config_loader import get_config_value
9
+ except ImportError:
10
+ # Fallback for tests or standalone usage
11
+ def get_config_value(path: str, default: Any = None) -> Any:
12
+ return default
13
+
7
14
  # Abstract base for memory implementations
8
15
  class MemoryInterface(ABC):
9
16
  """Abstract interface for agent memory systems."""
@@ -94,11 +101,21 @@ class PersistentFileMemory(MemoryInterface):
94
101
 
95
102
  def __init__(self, file_path: Optional[str] = None):
96
103
  if file_path is None:
97
- # Default to data/db directory
98
- base_dir = Path(__file__).parent.parent.parent.parent
99
- data_dir = base_dir / "data" / "db"
100
- data_dir.mkdir(parents=True, exist_ok=True)
101
- file_path = data_dir / "agent_memory.json"
104
+ # Get workspace base from config with fallback
105
+ workspace_base = get_config_value("system.workspace_base", "/workspace")
106
+
107
+ # Default to workspace or /tmp directory for container safety
108
+ try:
109
+ # Try workspace first (from config)
110
+ base_dir = Path(workspace_base) if Path(workspace_base).exists() else Path.cwd()
111
+ data_dir = base_dir / "data" / "db"
112
+ data_dir.mkdir(parents=True, exist_ok=True)
113
+ file_path = data_dir / "agent_memory.json"
114
+ except (PermissionError, OSError):
115
+ # Fallback to /tmp for container environments
116
+ data_dir = Path("/tmp") / "diagram_to_iac" / "data" / "db"
117
+ data_dir.mkdir(parents=True, exist_ok=True)
118
+ file_path = data_dir / "agent_memory.json"
102
119
 
103
120
  self.file_path = Path(file_path)
104
121
  self._state: Dict[str, Any] = {}
@@ -33,6 +33,8 @@ from enum import Enum
33
33
 
34
34
  from pydantic import BaseModel, Field, field_validator
35
35
 
36
+ from .config_loader import get_config_value
37
+
36
38
 
37
39
  class RunStatus(str, Enum):
38
40
  """Status values for deployment runs."""
@@ -126,9 +128,27 @@ class RunRegistry:
126
128
  if registry_path:
127
129
  self.registry_path = Path(registry_path)
128
130
  else:
129
- # Default to data/state/issue_registry.json
130
- base_path = Path(__file__).parent.parent.parent.parent
131
- self.registry_path = base_path / "data" / "state" / "issue_registry.json"
131
+ # Get workspace base from config (with fallback)
132
+ workspace_base = get_config_value("system.workspace_base", "/workspace")
133
+
134
+ # Try multiple locations for registry file
135
+ possible_paths = [
136
+ Path.cwd() / "data" / "state" / "issue_registry.json", # Development
137
+ Path(workspace_base) / "data" / "state" / "issue_registry.json", # Container workspace
138
+ Path("/tmp/diagram_to_iac/data/state/issue_registry.json"), # Container fallback
139
+ ]
140
+ self.registry_path = None
141
+ for path in possible_paths:
142
+ try:
143
+ path.parent.mkdir(parents=True, exist_ok=True)
144
+ self.registry_path = path
145
+ break
146
+ except (PermissionError, OSError):
147
+ continue
148
+
149
+ # Final fallback if all locations fail
150
+ if not self.registry_path:
151
+ self.registry_path = Path("/tmp/issue_registry.json")
132
152
 
133
153
  # Ensure the directory exists
134
154
  self.registry_path.parent.mkdir(parents=True, exist_ok=True)
@@ -0,0 +1,123 @@
1
+ """
2
+ Configuration loader for diagram-to-iac tests.
3
+
4
+ This module provides utilities to load test configuration from the main config.yaml file,
5
+ making test repository URLs and other test settings configurable and centralized.
6
+ """
7
+
8
+ import os
9
+ import yaml
10
+ from pathlib import Path
11
+ from typing import Dict, Any
12
+
13
+
14
+ def load_test_config() -> Dict[str, Any]:
15
+ """
16
+ Load test configuration from the main config.yaml file.
17
+
18
+ Returns:
19
+ Dict containing test configuration settings
20
+ """
21
+ # Find the config file relative to this module
22
+ # The config.yaml is at src/diagram_to_iac/config.yaml
23
+ # This file is at src/diagram_to_iac/core/test_config.py
24
+ # So we need to go up one directory
25
+ current_dir = Path(__file__).parent
26
+ config_path = current_dir.parent / "config.yaml"
27
+
28
+ if not config_path.exists():
29
+ raise FileNotFoundError(f"Config file not found at {config_path}")
30
+
31
+ with open(config_path, 'r') as f:
32
+ config = yaml.safe_load(f)
33
+
34
+ return config.get('test', {})
35
+
36
+
37
+ def get_test_repo_url() -> str:
38
+ """
39
+ Get the test repository URL from configuration.
40
+
41
+ Returns:
42
+ The configured test repository URL
43
+ """
44
+ config = load_test_config()
45
+ return config.get('github', {}).get('test_repo_url', 'https://github.com/amartyamandal/test_iac_agent_private.git')
46
+
47
+
48
+ def get_test_repo_owner() -> str:
49
+ """
50
+ Get the test repository owner from configuration.
51
+
52
+ Returns:
53
+ The configured test repository owner
54
+ """
55
+ config = load_test_config()
56
+ return config.get('github', {}).get('test_repo_owner', 'amartyamandal')
57
+
58
+
59
+ def get_test_repo_name() -> str:
60
+ """
61
+ Get the test repository name from configuration.
62
+
63
+ Returns:
64
+ The configured test repository name
65
+ """
66
+ config = load_test_config()
67
+ return config.get('github', {}).get('test_repo_name', 'test_iac_agent_private')
68
+
69
+
70
+ def get_public_test_repo_url() -> str:
71
+ """
72
+ Get the public test repository URL from configuration.
73
+
74
+ Returns:
75
+ The configured public test repository URL
76
+ """
77
+ config = load_test_config()
78
+ return config.get('github', {}).get('public_test_repo_url', 'https://github.com/amartyamandal/test_iac_agent_public.git')
79
+
80
+
81
+ def should_skip_integration_tests() -> bool:
82
+ """
83
+ Check if integration tests should be skipped when no GitHub token is available.
84
+
85
+ Returns:
86
+ True if integration tests should be skipped without a token
87
+ """
88
+ config = load_test_config()
89
+ return config.get('settings', {}).get('skip_integration_tests_without_token', True)
90
+
91
+
92
+ def should_use_real_github_api() -> bool:
93
+ """
94
+ Check if tests should use real GitHub API calls.
95
+
96
+ Returns:
97
+ True if real GitHub API calls should be used
98
+ """
99
+ config = load_test_config()
100
+ return config.get('settings', {}).get('use_real_github_api', False)
101
+
102
+
103
+ def should_mock_network_calls() -> bool:
104
+ """
105
+ Check if network calls should be mocked by default.
106
+
107
+ Returns:
108
+ True if network calls should be mocked
109
+ """
110
+ config = load_test_config()
111
+ return config.get('settings', {}).get('mock_network_calls', True)
112
+
113
+
114
+ # Convenience function for backwards compatibility
115
+ def get_test_github_repo() -> str:
116
+ """
117
+ Get the test GitHub repository URL.
118
+ Alias for get_test_repo_url() for backwards compatibility.
119
+
120
+ Returns:
121
+ The configured test repository URL
122
+ """
123
+ return get_test_repo_url()
@@ -0,0 +1,240 @@
1
+ # Issue Frontmatter Template for R2D Umbrella Issues
2
+ # =====================================================
3
+ # This template provides the structured metadata format for GitHub issues
4
+ # created by the DevOps-in-a-Box R2D Action.
5
+
6
+ # Standard R2D Issue Metadata
7
+ r2d_metadata:
8
+ # Core identifiers
9
+ run_key: "{run_key}"
10
+ repo_url: "{repo_url}"
11
+ commit_sha: "{commit_sha}"
12
+ job_name: "{job_name}"
13
+
14
+ # Timestamps
15
+ created_at: "{created_at}"
16
+ updated_at: "{updated_at}"
17
+
18
+ # Status tracking
19
+ status: "{status}"
20
+ wait_reason: "{wait_reason}"
21
+
22
+ # Associated resources
23
+ umbrella_issue_id: "{umbrella_issue_id}"
24
+ linked_pr: "{linked_pr}"
25
+ branch_name: "{branch_name}"
26
+ thread_id: "{thread_id}"
27
+
28
+ # Artifacts and outputs
29
+ artifacts_path: "{artifacts_path}"
30
+ terraform_summary: "{terraform_summary}"
31
+
32
+ # Retry information
33
+ retry_count: {retry_count}
34
+ predecessor_run: "{predecessor_run}"
35
+
36
+ # Agent status summary
37
+ agent_statuses:
38
+ supervisor:
39
+ status: "{supervisor_status}"
40
+ last_updated: "{supervisor_last_updated}"
41
+ git_agent:
42
+ status: "{git_agent_status}"
43
+ last_updated: "{git_agent_last_updated}"
44
+ terraform_agent:
45
+ status: "{terraform_agent_status}"
46
+ last_updated: "{terraform_agent_last_updated}"
47
+ shell_agent:
48
+ status: "{shell_agent_status}"
49
+ last_updated: "{shell_agent_last_updated}"
50
+
51
+ # Issue Template Configuration
52
+ issue_template:
53
+ title: "🚀 R2D Deployment: {repo_name} ({short_sha})"
54
+ labels:
55
+ - "r2d-deployment"
56
+ - "automated"
57
+ - "infrastructure"
58
+ assignees:
59
+ - "github-copilot" # Auto-assign to GitHub Copilot for AI assistance
60
+
61
+ # Issue body sections
62
+ body_sections:
63
+ header:
64
+ title: "## 🤖 DevOps-in-a-Box: Automated R2D Deployment"
65
+ content: |
66
+ This is an automated deployment issue created by the DevOps-in-a-Box R2D Action.
67
+
68
+ **Repository:** {repo_url}
69
+ **Commit:** `{commit_sha}`
70
+ **Job:** {job_name}
71
+ **Started:** {created_at}
72
+
73
+ status:
74
+ title: "## 📊 Deployment Status"
75
+ content: |
76
+ - **Overall Status:** {status}
77
+ - **Current Phase:** {current_phase}
78
+ - **Progress:** {progress_percentage}%
79
+
80
+ {status_details}
81
+
82
+ agents:
83
+ title: "## 🤖 Agent Status"
84
+ content: |
85
+ | Agent | Status | Last Updated | Notes |
86
+ |-------|--------|--------------|-------|
87
+ | Supervisor | {supervisor_status} | {supervisor_last_updated} | {supervisor_notes} |
88
+ | Git Agent | {git_agent_status} | {git_agent_last_updated} | {git_agent_notes} |
89
+ | Terraform Agent | {terraform_agent_status} | {terraform_agent_last_updated} | {terraform_agent_notes} |
90
+ | Shell Agent | {shell_agent_status} | {shell_agent_last_updated} | {shell_agent_notes} |
91
+
92
+ resources:
93
+ title: "## 🔗 Related Resources"
94
+ content: |
95
+ - **Branch:** {branch_name}
96
+ - **Thread ID:** {thread_id}
97
+ - **Artifacts:** {artifacts_path}
98
+ {linked_resources}
99
+
100
+ terraform:
101
+ title: "## ⚡ Terraform Operations"
102
+ content: |
103
+ ```
104
+ {terraform_summary}
105
+ ```
106
+
107
+ logs:
108
+ title: "## 📋 Deployment Logs"
109
+ content: |
110
+ <details>
111
+ <summary>Click to view detailed logs</summary>
112
+
113
+ ```
114
+ {deployment_logs}
115
+ ```
116
+ </details>
117
+
118
+ commands:
119
+ title: "## 🔧 Available Commands"
120
+ content: |
121
+ You can control this deployment by commenting on this issue:
122
+
123
+ - `retry` - Retry the current step
124
+ - `cancel` - Cancel the deployment
125
+ - `status` - Get current status
126
+ - `logs` - Show recent logs
127
+ - `help` - Show all available commands
128
+
129
+ footer:
130
+ title: "## 🤖 Automation Info"
131
+ content: |
132
+ This issue is managed by the DevOps-in-a-Box R2D Action.
133
+
134
+ **Run Key:** `{run_key}`
135
+ **Retry Count:** {retry_count}
136
+ **Predecessor:** {predecessor_run}
137
+
138
+ ---
139
+ *Powered by [DevOps-in-a-Box](https://github.com/amartyamandal/diagram-to-iac)*
140
+
141
+ # Status messages for different phases
142
+ status_messages:
143
+ created: "🆕 Deployment created and queued"
144
+ in_progress: "🚀 Deployment in progress"
145
+ waiting_for_pat: "⏳ Waiting for Personal Access Token configuration"
146
+ waiting_for_pr: "🔄 Waiting for Pull Request review and merge"
147
+ completed: "✅ Deployment completed successfully"
148
+ failed: "❌ Deployment failed"
149
+ cancelled: "🛑 Deployment cancelled"
150
+
151
+ # Progress indicators
152
+ progress_indicators:
153
+ created: 10
154
+ in_progress: 50
155
+ waiting_for_pat: 30
156
+ waiting_for_pr: 80
157
+ completed: 100
158
+ failed: 0
159
+ cancelled: 0
160
+
161
+ # Command responses
162
+ command_responses:
163
+ retry: |
164
+ 🔄 **Retry Requested**
165
+
166
+ The deployment will be retried from the current step.
167
+ This may take a few minutes to start.
168
+
169
+ cancel: |
170
+ 🛑 **Cancellation Requested**
171
+
172
+ The deployment has been marked for cancellation.
173
+ Any running operations will be stopped gracefully.
174
+
175
+ status: |
176
+ 📊 **Current Status**
177
+
178
+ - **Phase:** {current_phase}
179
+ - **Progress:** {progress_percentage}%
180
+ - **Last Updated:** {last_updated}
181
+
182
+ {detailed_status}
183
+
184
+ help: |
185
+ 🔧 **Available Commands**
186
+
187
+ Comment on this issue with any of these commands:
188
+
189
+ - `retry` - Retry the current step
190
+ - `cancel` - Cancel the deployment
191
+ - `status` - Get current status
192
+ - `logs` - Show recent logs
193
+ - `help` - Show this help message
194
+
195
+ **Note:** Only repository members can execute commands.
196
+
197
+ # Error templates
198
+ error_templates:
199
+ terraform_auth_error: |
200
+ ❌ **Terraform Authentication Error**
201
+
202
+ The deployment failed because Terraform Cloud authentication is not properly configured.
203
+
204
+ **Required Action:**
205
+ 1. Ensure your `TFE_TOKEN` secret is set in repository settings
206
+ 2. Verify the token has access to the required workspace
207
+ 3. Comment `retry` to restart the deployment
208
+
209
+ git_auth_error: |
210
+ ❌ **Git Authentication Error**
211
+
212
+ The deployment failed because Git operations require authentication.
213
+
214
+ **Required Action:**
215
+ 1. Ensure your `GITHUB_TOKEN` has sufficient permissions
216
+ 2. Verify repository access settings
217
+ 3. Comment `retry` to restart the deployment
218
+
219
+ missing_terraform_files: |
220
+ ❌ **Missing Terraform Files**
221
+
222
+ The deployment failed because no Terraform files were found in the repository.
223
+
224
+ **Required Action:**
225
+ 1. Ensure your repository contains `.tf` files
226
+ 2. Check that files are in the expected directory structure
227
+ 3. Update the repository and create a new issue
228
+
229
+ policy_violation: |
230
+ ❌ **Security Policy Violation**
231
+
232
+ The deployment was blocked by security policies.
233
+
234
+ **Details:**
235
+ {policy_details}
236
+
237
+ **Required Action:**
238
+ 1. Review and fix the security issues listed above
239
+ 2. Update your Terraform code
240
+ 3. Create a new deployment issue
@@ -3,8 +3,14 @@ import os
3
3
  from openai import OpenAI
4
4
  from anthropic import Anthropic
5
5
  import requests
6
- import google.generativeai as genai
7
- import googleapiclient.discovery
6
+ try:
7
+ import google.generativeai as genai
8
+ except ImportError:
9
+ genai = None
10
+ try:
11
+ import googleapiclient.discovery
12
+ except ImportError:
13
+ googleapiclient = None
8
14
  from concurrent.futures import ThreadPoolExecutor, TimeoutError
9
15
 
10
16
  # Import centralized configuration
@@ -45,6 +51,10 @@ def test_openai_api():
45
51
 
46
52
  def test_gemini_api():
47
53
  try:
54
+ if genai is None:
55
+ print("❌ Gemini API error: google-generativeai package not installed.")
56
+ return False
57
+
48
58
  google_api_key = os.environ.get("GOOGLE_API_KEY")
49
59
  if not google_api_key:
50
60
  print("❌ Gemini API error: GOOGLE_API_KEY environment variable not set.")