tetra-rp 0.6.0__py3-none-any.whl → 0.24.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (97) hide show
  1. tetra_rp/__init__.py +109 -19
  2. tetra_rp/cli/commands/__init__.py +1 -0
  3. tetra_rp/cli/commands/apps.py +143 -0
  4. tetra_rp/cli/commands/build.py +1082 -0
  5. tetra_rp/cli/commands/build_utils/__init__.py +1 -0
  6. tetra_rp/cli/commands/build_utils/handler_generator.py +176 -0
  7. tetra_rp/cli/commands/build_utils/lb_handler_generator.py +309 -0
  8. tetra_rp/cli/commands/build_utils/manifest.py +430 -0
  9. tetra_rp/cli/commands/build_utils/mothership_handler_generator.py +75 -0
  10. tetra_rp/cli/commands/build_utils/scanner.py +596 -0
  11. tetra_rp/cli/commands/deploy.py +580 -0
  12. tetra_rp/cli/commands/init.py +123 -0
  13. tetra_rp/cli/commands/resource.py +108 -0
  14. tetra_rp/cli/commands/run.py +296 -0
  15. tetra_rp/cli/commands/test_mothership.py +458 -0
  16. tetra_rp/cli/commands/undeploy.py +533 -0
  17. tetra_rp/cli/main.py +97 -0
  18. tetra_rp/cli/utils/__init__.py +1 -0
  19. tetra_rp/cli/utils/app.py +15 -0
  20. tetra_rp/cli/utils/conda.py +127 -0
  21. tetra_rp/cli/utils/deployment.py +530 -0
  22. tetra_rp/cli/utils/ignore.py +143 -0
  23. tetra_rp/cli/utils/skeleton.py +184 -0
  24. tetra_rp/cli/utils/skeleton_template/.env.example +4 -0
  25. tetra_rp/cli/utils/skeleton_template/.flashignore +40 -0
  26. tetra_rp/cli/utils/skeleton_template/.gitignore +44 -0
  27. tetra_rp/cli/utils/skeleton_template/README.md +263 -0
  28. tetra_rp/cli/utils/skeleton_template/main.py +44 -0
  29. tetra_rp/cli/utils/skeleton_template/mothership.py +55 -0
  30. tetra_rp/cli/utils/skeleton_template/pyproject.toml +58 -0
  31. tetra_rp/cli/utils/skeleton_template/requirements.txt +1 -0
  32. tetra_rp/cli/utils/skeleton_template/workers/__init__.py +0 -0
  33. tetra_rp/cli/utils/skeleton_template/workers/cpu/__init__.py +19 -0
  34. tetra_rp/cli/utils/skeleton_template/workers/cpu/endpoint.py +36 -0
  35. tetra_rp/cli/utils/skeleton_template/workers/gpu/__init__.py +19 -0
  36. tetra_rp/cli/utils/skeleton_template/workers/gpu/endpoint.py +61 -0
  37. tetra_rp/client.py +136 -33
  38. tetra_rp/config.py +29 -0
  39. tetra_rp/core/api/runpod.py +591 -39
  40. tetra_rp/core/deployment.py +232 -0
  41. tetra_rp/core/discovery.py +425 -0
  42. tetra_rp/core/exceptions.py +50 -0
  43. tetra_rp/core/resources/__init__.py +27 -9
  44. tetra_rp/core/resources/app.py +738 -0
  45. tetra_rp/core/resources/base.py +139 -4
  46. tetra_rp/core/resources/constants.py +21 -0
  47. tetra_rp/core/resources/cpu.py +115 -13
  48. tetra_rp/core/resources/gpu.py +182 -16
  49. tetra_rp/core/resources/live_serverless.py +153 -16
  50. tetra_rp/core/resources/load_balancer_sls_resource.py +440 -0
  51. tetra_rp/core/resources/network_volume.py +126 -31
  52. tetra_rp/core/resources/resource_manager.py +436 -35
  53. tetra_rp/core/resources/serverless.py +537 -120
  54. tetra_rp/core/resources/serverless_cpu.py +201 -0
  55. tetra_rp/core/resources/template.py +1 -59
  56. tetra_rp/core/utils/constants.py +10 -0
  57. tetra_rp/core/utils/file_lock.py +260 -0
  58. tetra_rp/core/utils/http.py +67 -0
  59. tetra_rp/core/utils/lru_cache.py +75 -0
  60. tetra_rp/core/utils/singleton.py +36 -1
  61. tetra_rp/core/validation.py +44 -0
  62. tetra_rp/execute_class.py +301 -0
  63. tetra_rp/protos/remote_execution.py +98 -9
  64. tetra_rp/runtime/__init__.py +1 -0
  65. tetra_rp/runtime/circuit_breaker.py +274 -0
  66. tetra_rp/runtime/config.py +12 -0
  67. tetra_rp/runtime/exceptions.py +49 -0
  68. tetra_rp/runtime/generic_handler.py +206 -0
  69. tetra_rp/runtime/lb_handler.py +189 -0
  70. tetra_rp/runtime/load_balancer.py +160 -0
  71. tetra_rp/runtime/manifest_fetcher.py +192 -0
  72. tetra_rp/runtime/metrics.py +325 -0
  73. tetra_rp/runtime/models.py +73 -0
  74. tetra_rp/runtime/mothership_provisioner.py +512 -0
  75. tetra_rp/runtime/production_wrapper.py +266 -0
  76. tetra_rp/runtime/reliability_config.py +149 -0
  77. tetra_rp/runtime/retry_manager.py +118 -0
  78. tetra_rp/runtime/serialization.py +124 -0
  79. tetra_rp/runtime/service_registry.py +346 -0
  80. tetra_rp/runtime/state_manager_client.py +248 -0
  81. tetra_rp/stubs/live_serverless.py +35 -17
  82. tetra_rp/stubs/load_balancer_sls.py +357 -0
  83. tetra_rp/stubs/registry.py +145 -19
  84. {tetra_rp-0.6.0.dist-info → tetra_rp-0.24.0.dist-info}/METADATA +398 -60
  85. tetra_rp-0.24.0.dist-info/RECORD +99 -0
  86. {tetra_rp-0.6.0.dist-info → tetra_rp-0.24.0.dist-info}/WHEEL +1 -1
  87. tetra_rp-0.24.0.dist-info/entry_points.txt +2 -0
  88. tetra_rp/core/pool/cluster_manager.py +0 -177
  89. tetra_rp/core/pool/dataclass.py +0 -18
  90. tetra_rp/core/pool/ex.py +0 -38
  91. tetra_rp/core/pool/job.py +0 -22
  92. tetra_rp/core/pool/worker.py +0 -19
  93. tetra_rp/core/resources/utils.py +0 -50
  94. tetra_rp/core/utils/json.py +0 -33
  95. tetra_rp-0.6.0.dist-info/RECORD +0 -39
  96. /tetra_rp/{core/pool → cli}/__init__.py +0 -0
  97. {tetra_rp-0.6.0.dist-info → tetra_rp-0.24.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,232 @@
1
+ """Background deployment orchestrator with progress tracking."""
2
+
3
+ import asyncio
4
+ import logging
5
+ import threading
6
+ from dataclasses import dataclass
7
+ from datetime import datetime
8
+ from enum import Enum
9
+ from typing import List
10
+
11
+ from rich.console import Console
12
+ from rich.progress import Progress, SpinnerColumn, TextColumn
13
+
14
+ from .resources.base import DeployableResource
15
+ from .resources.resource_manager import ResourceManager
16
+
17
+ log = logging.getLogger(__name__)
18
+ console = Console()
19
+
20
+
21
+ class DeploymentStatus(Enum):
22
+ """Status of a resource deployment."""
23
+
24
+ PENDING = "pending"
25
+ CHECKING = "checking"
26
+ CACHED = "cached"
27
+ DEPLOYING = "deploying"
28
+ SUCCESS = "success"
29
+ FAILED = "failed"
30
+
31
+
32
+ @dataclass
33
+ class DeploymentResult:
34
+ """Result of a resource deployment operation."""
35
+
36
+ resource: DeployableResource
37
+ status: DeploymentStatus
38
+ duration: float
39
+ error: str = ""
40
+ endpoint_id: str = ""
41
+
42
+
43
+ class DeploymentOrchestrator:
44
+ """Orchestrates parallel resource deployment with progress tracking."""
45
+
46
+ def __init__(self, max_concurrent: int = 3):
47
+ """Initialize deployment orchestrator.
48
+
49
+ Args:
50
+ max_concurrent: Maximum number of concurrent deployments
51
+ """
52
+ self.max_concurrent = max_concurrent
53
+ self.manager = ResourceManager()
54
+ self.results: List[DeploymentResult] = []
55
+
56
+ def deploy_all_background(self, resources: List[DeployableResource]) -> None:
57
+ """Deploy all resources in background thread.
58
+
59
+ This method spawns a background thread to deploy resources without
60
+ blocking the main thread. Progress is logged to console.
61
+
62
+ Args:
63
+ resources: List of resources to deploy
64
+ """
65
+ if not resources:
66
+ console.print("[dim]No resources to deploy[/dim]")
67
+ return
68
+
69
+ def run_async_deployment():
70
+ """Run async deployment in background thread."""
71
+ try:
72
+ # Create new event loop for this thread
73
+ loop = asyncio.new_event_loop()
74
+ asyncio.set_event_loop(loop)
75
+
76
+ # Run deployment silently in background
77
+ loop.run_until_complete(self.deploy_all(resources, show_progress=False))
78
+
79
+ except Exception as e:
80
+ log.error(f"Background deployment failed: {e}")
81
+ finally:
82
+ loop.close()
83
+
84
+ # Start background thread
85
+ thread = threading.Thread(target=run_async_deployment, daemon=True)
86
+ thread.start()
87
+
88
+ console.print(
89
+ f"[dim]Auto-provisioning {len(resources)} resource(s) in background...[/dim]"
90
+ )
91
+
92
+ async def deploy_all(
93
+ self, resources: List[DeployableResource], show_progress: bool = True
94
+ ) -> List[DeploymentResult]:
95
+ """Deploy all resources in parallel with progress tracking.
96
+
97
+ Args:
98
+ resources: List of resources to deploy
99
+ show_progress: Whether to show progress indicator and summary (default: True)
100
+
101
+ Returns:
102
+ List of deployment results
103
+ """
104
+ if not resources:
105
+ return []
106
+
107
+ # Create semaphore for concurrency control
108
+ semaphore = asyncio.Semaphore(self.max_concurrent)
109
+
110
+ # Create deployment tasks
111
+ deploy_tasks = [
112
+ self._deploy_with_semaphore(resource, semaphore) for resource in resources
113
+ ]
114
+
115
+ # Deploy with progress indication
116
+ if show_progress:
117
+ with Progress(
118
+ SpinnerColumn(),
119
+ TextColumn("[progress.description]{task.description}"),
120
+ console=console,
121
+ ) as progress:
122
+ task_id = progress.add_task(
123
+ f"Provisioning {len(resources)} resource(s)...",
124
+ total=None,
125
+ )
126
+
127
+ # Wait for all deployments
128
+ self.results = await asyncio.gather(
129
+ *deploy_tasks, return_exceptions=False
130
+ )
131
+
132
+ progress.update(
133
+ task_id,
134
+ description=f"[green]✓ Provisioned {len(resources)} resource(s)",
135
+ )
136
+ progress.stop_task(task_id)
137
+
138
+ # Display summary
139
+ self._display_summary()
140
+ else:
141
+ # Silent deployment for background provisioning
142
+ self.results = await asyncio.gather(*deploy_tasks, return_exceptions=False)
143
+
144
+ return self.results
145
+
146
+ async def _deploy_with_semaphore(
147
+ self, resource: DeployableResource, semaphore: asyncio.Semaphore
148
+ ) -> DeploymentResult:
149
+ """Deploy single resource with semaphore control.
150
+
151
+ Args:
152
+ resource: Resource to deploy
153
+ semaphore: Semaphore for concurrency limiting
154
+
155
+ Returns:
156
+ Deployment result
157
+ """
158
+ start_time = datetime.now()
159
+ resource_name = getattr(resource, "name", "Unknown")
160
+
161
+ async with semaphore:
162
+ try:
163
+ # Quick check if already deployed
164
+ if resource.is_deployed():
165
+ duration = (datetime.now() - start_time).total_seconds()
166
+ return DeploymentResult(
167
+ resource=resource,
168
+ status=DeploymentStatus.CACHED,
169
+ duration=duration,
170
+ endpoint_id=getattr(resource, "id", "N/A"),
171
+ )
172
+
173
+ # Deploy resource
174
+ deployed = await self.manager.get_or_deploy_resource(resource)
175
+ duration = (datetime.now() - start_time).total_seconds()
176
+
177
+ return DeploymentResult(
178
+ resource=deployed,
179
+ status=DeploymentStatus.SUCCESS,
180
+ duration=duration,
181
+ endpoint_id=getattr(deployed, "id", "N/A"),
182
+ )
183
+
184
+ except Exception as e:
185
+ duration = (datetime.now() - start_time).total_seconds()
186
+ log.error(f"Failed to deploy {resource_name}: {e}")
187
+
188
+ return DeploymentResult(
189
+ resource=resource,
190
+ status=DeploymentStatus.FAILED,
191
+ duration=duration,
192
+ error=str(e),
193
+ )
194
+
195
+ def _display_summary(self):
196
+ """Display deployment summary."""
197
+ if not self.results:
198
+ return
199
+
200
+ # Count statuses
201
+ cached = sum(1 for r in self.results if r.status == DeploymentStatus.CACHED)
202
+ deployed = sum(1 for r in self.results if r.status == DeploymentStatus.SUCCESS)
203
+ failed = sum(1 for r in self.results if r.status == DeploymentStatus.FAILED)
204
+ total_time = sum(r.duration for r in self.results)
205
+
206
+ # Build summary message
207
+ parts = []
208
+ if cached > 0:
209
+ parts.append(f"{cached} cached")
210
+ if deployed > 0:
211
+ parts.append(f"{deployed} deployed")
212
+ if failed > 0:
213
+ parts.append(f"{failed} failed")
214
+
215
+ status_text = ", ".join(parts)
216
+
217
+ console.print()
218
+ if failed > 0:
219
+ console.print(
220
+ f"[yellow]⚠[/yellow] Provisioning completed: {len(self.results)} resources "
221
+ f"({status_text}) in {total_time:.1f}s"
222
+ )
223
+ console.print(
224
+ "[yellow]Note:[/yellow] Failed resources will deploy on-demand when first called"
225
+ )
226
+ else:
227
+ console.print(
228
+ f"[green]✓[/green] Provisioning completed: {len(self.results)} resources "
229
+ f"({status_text}) in {total_time:.1f}s"
230
+ )
231
+
232
+ console.print()
@@ -0,0 +1,425 @@
1
+ """Resource discovery for auto-provisioning during flash run startup."""
2
+
3
+ import ast
4
+ import importlib.util
5
+ import logging
6
+ import sys
7
+ from pathlib import Path
8
+ from typing import Dict, List, Set
9
+
10
+ from .resources.base import DeployableResource
11
+
12
+ log = logging.getLogger(__name__)
13
+
14
+
15
+ class ResourceDiscovery:
16
+ """Discovers DeployableResource instances by parsing @remote decorators."""
17
+
18
+ def __init__(self, entry_point: str, max_depth: int = 2):
19
+ """Initialize resource discovery.
20
+
21
+ Args:
22
+ entry_point: Path to entry point file (e.g., "main.py")
23
+ max_depth: Maximum depth for recursive module scanning
24
+ """
25
+ self.entry_point = Path(entry_point)
26
+ self.max_depth = max_depth
27
+ self._cache: Dict[str, List[DeployableResource]] = {}
28
+ self._scanned_modules: Set[str] = set()
29
+
30
+ def discover(self) -> List[DeployableResource]:
31
+ """Discover all DeployableResource instances in entry point and imports.
32
+
33
+ Returns:
34
+ List of discovered deployable resources
35
+ """
36
+ if str(self.entry_point) in self._cache:
37
+ return self._cache[str(self.entry_point)]
38
+
39
+ resources = []
40
+
41
+ try:
42
+ # Parse entry point to find @remote decorators
43
+ resource_var_names = self._find_resource_config_vars(self.entry_point)
44
+
45
+ # Import entry point module to resolve variables (if any found)
46
+ if resource_var_names:
47
+ module = self._import_module(self.entry_point)
48
+
49
+ if module:
50
+ # Resolve variable names to actual DeployableResource objects
51
+ for var_name in resource_var_names:
52
+ resource = self._resolve_resource_variable(module, var_name)
53
+ if resource:
54
+ resources.append(resource)
55
+ log.debug(
56
+ f"Discovered resource: {var_name} -> {resource.__class__.__name__}"
57
+ )
58
+ else:
59
+ log.warning(f"Failed to import {self.entry_point}")
60
+
61
+ log.info(f"[Discovery] After entry point: {len(resources)} resource(s)")
62
+
63
+ # Recursively scan imported modules (static imports)
64
+ imported_resources = self._scan_imports(self.entry_point, depth=1)
65
+ resources.extend(imported_resources)
66
+
67
+ log.info(f"[Discovery] After static imports: {len(resources)} resource(s)")
68
+
69
+ # Fallback: Scan project directory for Python files with @remote decorators
70
+ # This handles dynamic imports (importlib.util) that AST parsing misses
71
+ if not resources:
72
+ log.debug(
73
+ "No resources found via static imports, scanning project directory"
74
+ )
75
+ directory_resources = self._scan_project_directory()
76
+ resources.extend(directory_resources)
77
+ log.info(
78
+ f"[Discovery] After directory scan: {len(resources)} resource(s)"
79
+ )
80
+
81
+ log.info(f"[Discovery] Total: {len(resources)} resource(s) discovered")
82
+ for res in resources:
83
+ res_name = getattr(res, "name", "Unknown")
84
+ res_type = res.__class__.__name__
85
+ log.info(f"[Discovery] • {res_name} ({res_type})")
86
+
87
+ # Cache results
88
+ self._cache[str(self.entry_point)] = resources
89
+
90
+ except Exception as e:
91
+ log.error(f"Error discovering resources in {self.entry_point}: {e}")
92
+
93
+ return resources
94
+
95
+ def _find_resource_config_vars(self, file_path: Path) -> Set[str]:
96
+ """Find variable names used in @remote decorators via AST parsing.
97
+
98
+ Args:
99
+ file_path: Path to Python file to parse
100
+
101
+ Returns:
102
+ Set of variable names referenced in @remote decorators
103
+ """
104
+ var_names = set()
105
+
106
+ try:
107
+ tree = ast.parse(file_path.read_text(encoding="utf-8"))
108
+
109
+ for node in ast.walk(tree):
110
+ if isinstance(
111
+ node, (ast.ClassDef, ast.FunctionDef, ast.AsyncFunctionDef)
112
+ ):
113
+ for decorator in node.decorator_list:
114
+ if self._is_remote_decorator(decorator):
115
+ # Extract resource_config variable name
116
+ var_name = self._extract_resource_config_var(decorator)
117
+ if var_name:
118
+ var_names.add(var_name)
119
+
120
+ except Exception as e:
121
+ log.warning(f"Failed to parse {file_path}: {e}")
122
+
123
+ return var_names
124
+
125
+ def _is_remote_decorator(self, decorator: ast.expr) -> bool:
126
+ """Check if decorator is @remote.
127
+
128
+ Args:
129
+ decorator: AST decorator node
130
+
131
+ Returns:
132
+ True if decorator is @remote
133
+ """
134
+ if isinstance(decorator, ast.Call):
135
+ func_name = None
136
+ if isinstance(decorator.func, ast.Name):
137
+ func_name = decorator.func.id
138
+ elif isinstance(decorator.func, ast.Attribute):
139
+ func_name = decorator.func.attr
140
+
141
+ return func_name == "remote"
142
+
143
+ return False
144
+
145
+ def _extract_resource_config_var(self, decorator: ast.Call) -> str:
146
+ """Extract resource_config variable name from @remote decorator.
147
+
148
+ Handles both:
149
+ - @remote(resource_config=my_config)
150
+ - @remote(my_config) (positional argument)
151
+
152
+ Args:
153
+ decorator: AST Call node for @remote decorator
154
+
155
+ Returns:
156
+ Variable name or empty string
157
+ """
158
+ # Check keyword argument: resource_config=var_name
159
+ for keyword in decorator.keywords:
160
+ if keyword.arg == "resource_config":
161
+ if isinstance(keyword.value, ast.Name):
162
+ return keyword.value.id
163
+
164
+ # Check positional argument: @remote(var_name)
165
+ if decorator.args and isinstance(decorator.args[0], ast.Name):
166
+ return decorator.args[0].id
167
+
168
+ return ""
169
+
170
+ def _import_module(self, file_path: Path):
171
+ """Import a Python module from file path.
172
+
173
+ Args:
174
+ file_path: Path to Python file
175
+
176
+ Returns:
177
+ Imported module or None if import fails
178
+ """
179
+ try:
180
+ # Create module spec
181
+ module_name = file_path.stem
182
+ spec = importlib.util.spec_from_file_location(module_name, file_path)
183
+
184
+ if not spec or not spec.loader:
185
+ return None
186
+
187
+ # Load module
188
+ module = importlib.util.module_from_spec(spec)
189
+ sys.modules[module_name] = module
190
+ spec.loader.exec_module(module)
191
+
192
+ self._scanned_modules.add(module_name)
193
+
194
+ return module
195
+
196
+ except Exception as e:
197
+ log.warning(f"Failed to import {file_path}: {e}")
198
+ return None
199
+
200
+ def _resolve_resource_variable(self, module, var_name: str) -> DeployableResource:
201
+ """Resolve variable name to DeployableResource instance.
202
+
203
+ Args:
204
+ module: Imported module
205
+ var_name: Variable name to resolve
206
+
207
+ Returns:
208
+ DeployableResource instance or None
209
+ """
210
+ try:
211
+ obj = getattr(module, var_name, None)
212
+
213
+ if obj and isinstance(obj, DeployableResource):
214
+ return obj
215
+
216
+ log.warning(
217
+ f"Resource '{var_name}' failed to resolve to DeployableResource "
218
+ f"(found type: {type(obj).__name__}). "
219
+ f"Check that '{var_name}' is defined as a ServerlessResource or other DeployableResource type."
220
+ )
221
+
222
+ except Exception as e:
223
+ log.warning(f"Failed to resolve variable '{var_name}': {e}")
224
+
225
+ return None
226
+
227
+ def _scan_imports(self, file_path: Path, depth: int) -> List[DeployableResource]:
228
+ """Recursively scan imported modules for resources.
229
+
230
+ Args:
231
+ file_path: Path to Python file
232
+ depth: Current recursion depth
233
+
234
+ Returns:
235
+ List of discovered resources from imports
236
+ """
237
+ if depth > self.max_depth:
238
+ return []
239
+
240
+ resources = []
241
+
242
+ try:
243
+ tree = ast.parse(file_path.read_text(encoding="utf-8"))
244
+
245
+ # Find import statements
246
+ for node in ast.walk(tree):
247
+ if isinstance(node, ast.Import):
248
+ for alias in node.names:
249
+ module_name = alias.name
250
+ if module_name not in self._scanned_modules:
251
+ imported_resources = self._scan_imported_module(
252
+ module_name, depth
253
+ )
254
+ resources.extend(imported_resources)
255
+
256
+ elif isinstance(node, ast.ImportFrom):
257
+ if node.module and node.module not in self._scanned_modules:
258
+ imported_resources = self._scan_imported_module(
259
+ node.module, depth
260
+ )
261
+ resources.extend(imported_resources)
262
+
263
+ except Exception as e:
264
+ log.debug(f"Failed to scan imports in {file_path}: {e}")
265
+
266
+ return resources
267
+
268
+ def _scan_imported_module(
269
+ self, module_name: str, depth: int
270
+ ) -> List[DeployableResource]:
271
+ """Scan an imported module for resources.
272
+
273
+ Args:
274
+ module_name: Name of module to scan
275
+ depth: Current recursion depth
276
+
277
+ Returns:
278
+ List of discovered resources
279
+ """
280
+ resources = []
281
+
282
+ try:
283
+ # Try to find module file
284
+ module_path = self._resolve_module_path(module_name)
285
+
286
+ if not module_path or not module_path.exists():
287
+ return []
288
+
289
+ # Mark as scanned to avoid cycles
290
+ self._scanned_modules.add(module_name)
291
+
292
+ # Find resources in this module
293
+ resource_vars = self._find_resource_config_vars(module_path)
294
+
295
+ if resource_vars:
296
+ # Import module and resolve variables
297
+ module = self._import_module(module_path)
298
+ if module:
299
+ for var_name in resource_vars:
300
+ resource = self._resolve_resource_variable(module, var_name)
301
+ if resource:
302
+ resources.append(resource)
303
+
304
+ # Recursively scan imports
305
+ imported_resources = self._scan_imports(module_path, depth + 1)
306
+ resources.extend(imported_resources)
307
+
308
+ except Exception as e:
309
+ log.debug(f"Failed to scan imported module '{module_name}': {e}")
310
+
311
+ return resources
312
+
313
+ def _resolve_module_path(self, module_name: str) -> Path:
314
+ """Resolve module name to file path.
315
+
316
+ Args:
317
+ module_name: Name of module (e.g., "workers.gpu")
318
+
319
+ Returns:
320
+ Path to module file or None
321
+ """
322
+ try:
323
+ # Handle relative imports from entry point directory
324
+ parts = module_name.split(".")
325
+ current_dir = self.entry_point.parent
326
+
327
+ # Try as relative path first
328
+ module_path = current_dir.joinpath(*parts)
329
+
330
+ # Check for .py file
331
+ if module_path.with_suffix(".py").exists():
332
+ return module_path.with_suffix(".py")
333
+
334
+ # Check for package (__init__.py)
335
+ if (module_path / "__init__.py").exists():
336
+ return module_path / "__init__.py"
337
+
338
+ except Exception as e:
339
+ log.debug(f"Failed to resolve module path for '{module_name}': {e}")
340
+
341
+ return None
342
+
343
+ def _scan_project_directory(self) -> List[DeployableResource]:
344
+ """Scan project directory for Python files with @remote decorators.
345
+
346
+ This is a fallback for projects that use dynamic imports (importlib.util)
347
+ which cannot be detected via static AST import scanning.
348
+
349
+ Returns:
350
+ List of discovered resources
351
+ """
352
+ resources = []
353
+ project_root = self.entry_point.parent
354
+
355
+ try:
356
+ # Find all Python files in project (excluding common ignore patterns)
357
+ python_files = []
358
+ for pattern in ["**/*.py"]:
359
+ for file_path in project_root.glob(pattern):
360
+ # Skip entry point (already processed)
361
+ if file_path == self.entry_point:
362
+ continue
363
+
364
+ # Skip common directories
365
+ rel_path = str(file_path.relative_to(project_root))
366
+ if any(
367
+ skip in rel_path
368
+ for skip in [
369
+ ".venv/",
370
+ "venv/",
371
+ "__pycache__/",
372
+ ".git/",
373
+ "site-packages/",
374
+ ".pytest_cache/",
375
+ "build/",
376
+ "dist/",
377
+ ".tox/",
378
+ "node_modules/",
379
+ ".flash/",
380
+ ]
381
+ ):
382
+ continue
383
+
384
+ python_files.append(file_path)
385
+
386
+ log.debug(f"Scanning {len(python_files)} Python files in {project_root}")
387
+
388
+ # Check each file for @remote decorators
389
+ for file_path in python_files:
390
+ try:
391
+ # Quick check: does file contain "@remote"?
392
+ content = file_path.read_text(encoding="utf-8")
393
+ if "@remote" not in content:
394
+ continue
395
+
396
+ # Find resource config variables via AST
397
+ resource_vars = self._find_resource_config_vars(file_path)
398
+ if not resource_vars:
399
+ continue
400
+
401
+ # Import module and resolve variables
402
+ module = self._import_module(file_path)
403
+ if module:
404
+ for var_name in resource_vars:
405
+ resource = self._resolve_resource_variable(module, var_name)
406
+ if resource:
407
+ resources.append(resource)
408
+ log.debug(
409
+ f"Discovered resource in {file_path.relative_to(project_root)}: "
410
+ f"{var_name} -> {resource.__class__.__name__}"
411
+ )
412
+
413
+ except Exception as e:
414
+ log.debug(f"Failed to scan {file_path}: {e}")
415
+ continue
416
+
417
+ except Exception as e:
418
+ log.warning(f"Failed to scan project directory: {e}")
419
+
420
+ return resources
421
+
422
+ def clear_cache(self):
423
+ """Clear discovery cache (for reload mode)."""
424
+ self._cache.clear()
425
+ self._scanned_modules.clear()
@@ -0,0 +1,50 @@
1
+ """Custom exceptions for tetra_rp.
2
+
3
+ Provides clear, actionable error messages for common failure scenarios.
4
+ """
5
+
6
+
7
+ class RunpodAPIKeyError(Exception):
8
+ """Raised when RUNPOD_API_KEY environment variable is missing or invalid.
9
+
10
+ This exception provides helpful guidance on how to obtain and configure
11
+ the API key required for remote execution and deployment features.
12
+ """
13
+
14
+ def __init__(self, message: str | None = None):
15
+ """Initialize with optional custom message.
16
+
17
+ Args:
18
+ message: Optional custom error message. If not provided, uses default.
19
+ """
20
+ if message is None:
21
+ message = self._default_message()
22
+ super().__init__(message)
23
+
24
+ @staticmethod
25
+ def _default_message() -> str:
26
+ """Generate default error message with setup instructions.
27
+
28
+ Returns:
29
+ Formatted error message with actionable steps.
30
+ """
31
+ return """RUNPOD_API_KEY environment variable is required but not set.
32
+
33
+ To use Flash remote execution features, you need a Runpod API key.
34
+
35
+ Get your API key:
36
+ https://docs.runpod.io/get-started/api-keys
37
+
38
+ Set your API key using one of these methods:
39
+
40
+ 1. Environment variable:
41
+ export RUNPOD_API_KEY=your_api_key_here
42
+
43
+ 2. In your project's .env file:
44
+ echo "RUNPOD_API_KEY=your_api_key_here" >> .env
45
+
46
+ 3. In your shell profile (~/.bashrc, ~/.zshrc):
47
+ echo 'export RUNPOD_API_KEY=your_api_key_here' >> ~/.bashrc
48
+
49
+ Note: If you created a .env file, make sure it's in your current directory
50
+ or project root where Flash can find it."""