gitarsenal-cli 1.9.96 → 1.9.97
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.venv_status.json +1 -1
- package/README.md +29 -0
- package/bin/gitarsenal.js +220 -127
- package/kill_claude/requirements.txt +1 -1
- package/lib/dependencies.js +130 -4
- package/lib/e2b-sandbox.js +158 -0
- package/lib/execAsync.js +12 -0
- package/lib/sandbox.js +97 -113
- package/package.json +2 -1
- package/python/__pycache__/credentials_manager.cpython-312.pyc +0 -0
- package/python/__pycache__/e2b_sandbox_agent.cpython-313.pyc +0 -0
- package/python/__pycache__/fetch_modal_tokens.cpython-312.pyc +0 -0
- package/python/credentials_manager.py +2 -1
- package/python/e2b_sandbox_agent.py +787 -0
- package/python/fetch_modal_tokens.py +47 -25
- package/python/requirements.txt +2 -1
- package/python/test_enhanced_sandbox_script.py +1429 -0
- package/python/test_modalSandboxScript.py +41 -5
- package/scripts/setup_e2b.js +162 -0
- package/kill_claude/.claude/settings.local.json +0 -9
- package/kill_claude/__pycache__/bash_output_tool.cpython-313.pyc +0 -0
- package/kill_claude/__pycache__/bash_tool.cpython-313.pyc +0 -0
- package/kill_claude/__pycache__/claude_code_agent.cpython-313.pyc +0 -0
- package/kill_claude/__pycache__/edit_tool.cpython-313.pyc +0 -0
- package/kill_claude/__pycache__/exit_plan_mode_tool.cpython-313.pyc +0 -0
- package/kill_claude/__pycache__/glob_tool.cpython-313.pyc +0 -0
- package/kill_claude/__pycache__/grep_tool.cpython-313.pyc +0 -0
- package/kill_claude/__pycache__/kill_bash_tool.cpython-313.pyc +0 -0
- package/kill_claude/__pycache__/ls_tool.cpython-313.pyc +0 -0
- package/kill_claude/__pycache__/multiedit_tool.cpython-313.pyc +0 -0
- package/kill_claude/__pycache__/notebook_edit_tool.cpython-313.pyc +0 -0
- package/kill_claude/__pycache__/read_tool.cpython-313.pyc +0 -0
- package/kill_claude/__pycache__/task_tool.cpython-313.pyc +0 -0
- package/kill_claude/__pycache__/todo_write_tool.cpython-313.pyc +0 -0
- package/kill_claude/__pycache__/web_fetch_tool.cpython-313.pyc +0 -0
- package/kill_claude/__pycache__/web_search_tool.cpython-313.pyc +0 -0
- package/kill_claude/__pycache__/write_tool.cpython-313.pyc +0 -0
|
@@ -0,0 +1,1429 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
import time
|
|
4
|
+
import subprocess
|
|
5
|
+
import json
|
|
6
|
+
import re
|
|
7
|
+
import datetime
|
|
8
|
+
import getpass
|
|
9
|
+
import secrets
|
|
10
|
+
import string
|
|
11
|
+
import argparse
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
import modal
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def generate_random_password(length=16):
|
|
17
|
+
"""Generate a random password for SSH access"""
|
|
18
|
+
alphabet = string.ascii_letters + string.digits + "!@#$%^&*"
|
|
19
|
+
password = ''.join(secrets.choice(alphabet) for i in range(length))
|
|
20
|
+
return password
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def get_stored_credentials():
|
|
24
|
+
"""Load stored credentials from ~/.gitarsenal/credentials.json"""
|
|
25
|
+
import json
|
|
26
|
+
from pathlib import Path
|
|
27
|
+
|
|
28
|
+
try:
|
|
29
|
+
credentials_file = Path.home() / ".gitarsenal" / "credentials.json"
|
|
30
|
+
if credentials_file.exists():
|
|
31
|
+
with open(credentials_file, 'r') as f:
|
|
32
|
+
credentials = json.load(f)
|
|
33
|
+
return credentials
|
|
34
|
+
else:
|
|
35
|
+
return {}
|
|
36
|
+
except Exception as e:
|
|
37
|
+
print(f"⚠️ Error loading stored credentials: {e}")
|
|
38
|
+
return {}
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def parse_analysis_data(analysis_data_json):
|
|
42
|
+
"""Parse analysis data JSON string and return structured data"""
|
|
43
|
+
if not analysis_data_json:
|
|
44
|
+
return None
|
|
45
|
+
|
|
46
|
+
try:
|
|
47
|
+
import json
|
|
48
|
+
analysis_data = json.loads(analysis_data_json)
|
|
49
|
+
return analysis_data
|
|
50
|
+
except json.JSONDecodeError as e:
|
|
51
|
+
print(f"⚠️ Error parsing analysis data: {e}")
|
|
52
|
+
return None
|
|
53
|
+
except Exception as e:
|
|
54
|
+
print(f"⚠️ Unexpected error parsing analysis data: {e}")
|
|
55
|
+
return None
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def select_base_image_from_analysis(analysis_data):
|
|
59
|
+
"""Select the appropriate Docker base image based on CUDA recommendations from analysis"""
|
|
60
|
+
# Default fallback image
|
|
61
|
+
default_image = "nvidia/cuda:12.4.1-devel-ubuntu22.04"
|
|
62
|
+
default_python = "3.11"
|
|
63
|
+
|
|
64
|
+
if not analysis_data:
|
|
65
|
+
print("🐳 No analysis data available, using default CUDA image")
|
|
66
|
+
return default_image, default_python
|
|
67
|
+
|
|
68
|
+
# Extract CUDA recommendation
|
|
69
|
+
cuda_rec = analysis_data.get('cudaRecommendation', {})
|
|
70
|
+
if not cuda_rec:
|
|
71
|
+
print("🐳 No CUDA recommendation found, using default CUDA image")
|
|
72
|
+
return default_image, default_python
|
|
73
|
+
|
|
74
|
+
recommended_cuda = cuda_rec.get('recommendedCudaVersion', '')
|
|
75
|
+
docker_image = cuda_rec.get('dockerImage', '')
|
|
76
|
+
|
|
77
|
+
# Handle null values from analysis
|
|
78
|
+
if recommended_cuda == 'null' or recommended_cuda is None:
|
|
79
|
+
recommended_cuda = ''
|
|
80
|
+
if docker_image == 'null' or docker_image is None:
|
|
81
|
+
docker_image = ''
|
|
82
|
+
|
|
83
|
+
# Check if analysis returned no useful CUDA information
|
|
84
|
+
if not recommended_cuda and not docker_image:
|
|
85
|
+
print("⚠️ Repository analysis did not detect specific CUDA requirements")
|
|
86
|
+
print("🐳 Will use default CUDA 12.4.1 image for broad compatibility")
|
|
87
|
+
|
|
88
|
+
print(f"🔍 CUDA Analysis Results:")
|
|
89
|
+
print(f" - Recommended CUDA Version: {recommended_cuda or 'None'}")
|
|
90
|
+
print(f" - Recommended Docker Image: {docker_image or 'None'}")
|
|
91
|
+
print(f" - Full CUDA Recommendation: {cuda_rec}")
|
|
92
|
+
|
|
93
|
+
# If a specific docker image is recommended, use it
|
|
94
|
+
if docker_image and docker_image.strip():
|
|
95
|
+
print(f"🔍 Validating recommended Docker image: {docker_image}")
|
|
96
|
+
# Validate that the recommended Docker image follows expected patterns
|
|
97
|
+
is_valid_image = False
|
|
98
|
+
|
|
99
|
+
# Check if it's a pytorch image or other specialized image
|
|
100
|
+
if 'pytorch' in docker_image.lower():
|
|
101
|
+
print(f"🐳 Using PyTorch-optimized image: {docker_image}")
|
|
102
|
+
return docker_image, "3.11" # PyTorch images usually have Python pre-installed
|
|
103
|
+
elif 'tensorflow' in docker_image.lower():
|
|
104
|
+
print(f"🐳 Using TensorFlow-optimized image: {docker_image}")
|
|
105
|
+
return docker_image, "3.11"
|
|
106
|
+
elif 'nvidia/cuda' in docker_image.lower():
|
|
107
|
+
# Instead of regex validation, use a whitelist of known good images
|
|
108
|
+
# This prevents using images that match the pattern but don't exist
|
|
109
|
+
known_good_images = {
|
|
110
|
+
# CUDA 12.x images
|
|
111
|
+
'nvidia/cuda:12.4.1-devel-ubuntu22.04',
|
|
112
|
+
'nvidia/cuda:12.4.1-runtime-ubuntu22.04',
|
|
113
|
+
'nvidia/cuda:12.3.2-devel-ubuntu22.04',
|
|
114
|
+
'nvidia/cuda:12.3.2-runtime-ubuntu22.04',
|
|
115
|
+
'nvidia/cuda:12.2.2-devel-ubuntu22.04',
|
|
116
|
+
'nvidia/cuda:12.2.2-runtime-ubuntu22.04',
|
|
117
|
+
'nvidia/cuda:12.1.0-devel-ubuntu22.04',
|
|
118
|
+
'nvidia/cuda:12.1.0-runtime-ubuntu22.04',
|
|
119
|
+
'nvidia/cuda:12.0.1-devel-ubuntu22.04',
|
|
120
|
+
# CUDA 11.x images
|
|
121
|
+
'nvidia/cuda:11.8.0-devel-ubuntu22.04',
|
|
122
|
+
'nvidia/cuda:11.8.0-runtime-ubuntu22.04',
|
|
123
|
+
'nvidia/cuda:11.8.0-cudnn8-devel-ubuntu22.04',
|
|
124
|
+
'nvidia/cuda:11.8.0-cudnn8-runtime-ubuntu22.04',
|
|
125
|
+
'nvidia/cuda:11.7.1-devel-ubuntu22.04',
|
|
126
|
+
'nvidia/cuda:11.7.1-runtime-ubuntu22.04',
|
|
127
|
+
'nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04',
|
|
128
|
+
'nvidia/cuda:11.7.1-cudnn8-runtime-ubuntu22.04',
|
|
129
|
+
'nvidia/cuda:11.6.2-devel-ubuntu20.04',
|
|
130
|
+
'nvidia/cuda:11.6.2-runtime-ubuntu20.04',
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
if docker_image.lower() in known_good_images:
|
|
134
|
+
print(f"🐳 Using verified NVIDIA CUDA image: {docker_image}")
|
|
135
|
+
return docker_image, "3.11"
|
|
136
|
+
else:
|
|
137
|
+
print(f"⚠️ NVIDIA CUDA image not in whitelist: {docker_image}")
|
|
138
|
+
print(f"🔄 Falling back to CUDA version mapping...")
|
|
139
|
+
# Continue to version mapping below
|
|
140
|
+
else:
|
|
141
|
+
print(f"🐳 Using custom recommended image: {docker_image}")
|
|
142
|
+
return docker_image, "3.11"
|
|
143
|
+
|
|
144
|
+
# Map CUDA versions to appropriate base images
|
|
145
|
+
cuda_image_mapping = {
|
|
146
|
+
'12.4': 'nvidia/cuda:12.4.1-devel-ubuntu22.04',
|
|
147
|
+
'12.3': 'nvidia/cuda:12.3.2-devel-ubuntu22.04',
|
|
148
|
+
'12.2': 'nvidia/cuda:12.2.2-devel-ubuntu22.04',
|
|
149
|
+
'12.1': 'nvidia/cuda:12.1.0-devel-ubuntu22.04',
|
|
150
|
+
'12.0': 'nvidia/cuda:12.0.1-devel-ubuntu22.04',
|
|
151
|
+
'11.8': 'nvidia/cuda:11.8.0-devel-ubuntu22.04',
|
|
152
|
+
'11.7': 'nvidia/cuda:11.7.1-devel-ubuntu22.04',
|
|
153
|
+
'11.6': 'nvidia/cuda:11.6.2-devel-ubuntu20.04',
|
|
154
|
+
# Add some runtime variants for better compatibility
|
|
155
|
+
'11.8-runtime': 'nvidia/cuda:11.8.0-runtime-ubuntu22.04',
|
|
156
|
+
'11.7-runtime': 'nvidia/cuda:11.7.1-runtime-ubuntu22.04',
|
|
157
|
+
'12.1-runtime': 'nvidia/cuda:12.1.0-runtime-ubuntu22.04',
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
# Extract major.minor version from recommended CUDA version
|
|
161
|
+
if recommended_cuda and recommended_cuda.strip():
|
|
162
|
+
# Handle versions like "12.4", "CUDA 12.4", "12.4.0", "11.8-runtime", etc.
|
|
163
|
+
import re
|
|
164
|
+
version_match = re.search(r'(\d+\.\d+)', recommended_cuda)
|
|
165
|
+
if version_match:
|
|
166
|
+
cuda_version = version_match.group(1)
|
|
167
|
+
|
|
168
|
+
# First try exact match
|
|
169
|
+
if cuda_version in cuda_image_mapping:
|
|
170
|
+
selected_image = cuda_image_mapping[cuda_version]
|
|
171
|
+
print(f"🐳 Selected CUDA {cuda_version} image: {selected_image}")
|
|
172
|
+
return selected_image, "3.11"
|
|
173
|
+
|
|
174
|
+
# Try with -runtime suffix if original recommended image was runtime
|
|
175
|
+
if (docker_image and 'runtime' in docker_image.lower()) or 'runtime' in recommended_cuda.lower():
|
|
176
|
+
runtime_key = f"{cuda_version}-runtime"
|
|
177
|
+
if runtime_key in cuda_image_mapping:
|
|
178
|
+
selected_image = cuda_image_mapping[runtime_key]
|
|
179
|
+
print(f"🐳 Selected CUDA {cuda_version} runtime image: {selected_image}")
|
|
180
|
+
return selected_image, "3.11"
|
|
181
|
+
|
|
182
|
+
# If no exact match, try to find the closest version
|
|
183
|
+
available_versions = [v for v in cuda_image_mapping.keys() if not v.endswith('-runtime')]
|
|
184
|
+
available_versions.sort(reverse=True) # Sort descending to prefer newer versions
|
|
185
|
+
|
|
186
|
+
for available_version in available_versions:
|
|
187
|
+
if available_version.startswith(cuda_version.split('.')[0]): # Same major version
|
|
188
|
+
selected_image = cuda_image_mapping[available_version]
|
|
189
|
+
print(f"🐳 Selected closest CUDA {available_version} image for requested {cuda_version}: {selected_image}")
|
|
190
|
+
return selected_image, "3.11"
|
|
191
|
+
|
|
192
|
+
print(f"⚠️ CUDA version {cuda_version} not in mapping, using default")
|
|
193
|
+
else:
|
|
194
|
+
print(f"⚠️ Could not parse CUDA version from: {recommended_cuda}")
|
|
195
|
+
|
|
196
|
+
print(f"🐳 Using default CUDA image: {default_image}")
|
|
197
|
+
return default_image, default_python
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
# Global SSH container function (must be at global scope for Modal)
|
|
201
|
+
def ssh_container_function(ssh_password=None, repo_url=None, repo_name=None, setup_commands=None, openai_api_key=None, anthropic_api_key=None, stored_credentials=None):
|
|
202
|
+
"""Start SSH container with password authentication and intelligent repository setup using Agent."""
|
|
203
|
+
import subprocess
|
|
204
|
+
import time
|
|
205
|
+
import os
|
|
206
|
+
import json
|
|
207
|
+
import sys
|
|
208
|
+
|
|
209
|
+
# Set root password
|
|
210
|
+
subprocess.run(["bash", "-c", f"echo 'root:{ssh_password}' | chpasswd"], check=True)
|
|
211
|
+
|
|
212
|
+
# Set OpenAI API key if provided
|
|
213
|
+
if openai_api_key:
|
|
214
|
+
os.environ['OPENAI_API_KEY'] = openai_api_key
|
|
215
|
+
else:
|
|
216
|
+
print("⚠️ No OpenAI API key provided to container")
|
|
217
|
+
|
|
218
|
+
# Set up stored credentials in container environment
|
|
219
|
+
if stored_credentials:
|
|
220
|
+
print(f"🔐 Setting up {len(stored_credentials)} stored credentials in container...")
|
|
221
|
+
for key, value in stored_credentials.items():
|
|
222
|
+
# Set each credential as an environment variable
|
|
223
|
+
env_var_name = key.upper().replace('-', '_').replace(' ', '_')
|
|
224
|
+
os.environ[env_var_name] = value
|
|
225
|
+
print(f"✅ Set {env_var_name} in container environment")
|
|
226
|
+
|
|
227
|
+
# Also save credentials to a file in the container for easy access
|
|
228
|
+
credentials_dir = "/root/.gitarsenal"
|
|
229
|
+
os.makedirs(credentials_dir, exist_ok=True)
|
|
230
|
+
credentials_file = os.path.join(credentials_dir, "credentials.json")
|
|
231
|
+
with open(credentials_file, 'w') as f:
|
|
232
|
+
json.dump(stored_credentials, f, indent=2)
|
|
233
|
+
print(f"✅ Saved credentials to {credentials_file}")
|
|
234
|
+
|
|
235
|
+
# Print available credentials for user reference
|
|
236
|
+
print("\n🔐 AVAILABLE CREDENTIALS IN CONTAINER:")
|
|
237
|
+
print("="*50)
|
|
238
|
+
for key, value in stored_credentials.items():
|
|
239
|
+
masked_value = value[:8] + "..." if len(value) > 8 else "***"
|
|
240
|
+
env_var_name = key.upper().replace('-', '_').replace(' ', '_')
|
|
241
|
+
print(f" {key} -> {env_var_name} = {masked_value}")
|
|
242
|
+
print("="*50)
|
|
243
|
+
print("💡 These credentials are available as environment variables and in /root/.gitarsenal/credentials.json")
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
# Start SSH service
|
|
247
|
+
subprocess.run(["service", "ssh", "start"], check=True)
|
|
248
|
+
|
|
249
|
+
# Use Agent for intelligent repository setup
|
|
250
|
+
if repo_url:
|
|
251
|
+
print("🤖 Using Agent for intelligent repository setup...")
|
|
252
|
+
|
|
253
|
+
# Set up environment variables for the Agent
|
|
254
|
+
if openai_api_key:
|
|
255
|
+
os.environ['OPENAI_API_KEY'] = openai_api_key
|
|
256
|
+
if anthropic_api_key:
|
|
257
|
+
os.environ['ANTHROPIC_API_KEY'] = anthropic_api_key
|
|
258
|
+
|
|
259
|
+
# Set up Anthropic API key from stored credentials
|
|
260
|
+
if stored_credentials:
|
|
261
|
+
# Look for Anthropic API key in various possible names
|
|
262
|
+
for key_name in ['ANTHROPIC_API_KEY', 'anthropic_api_key', 'anthropic-api-key']:
|
|
263
|
+
if key_name in stored_credentials:
|
|
264
|
+
anthropic_api_key = stored_credentials[key_name]
|
|
265
|
+
os.environ['ANTHROPIC_API_KEY'] = anthropic_api_key
|
|
266
|
+
print(f"✅ Set Anthropic API key from stored credentials")
|
|
267
|
+
break
|
|
268
|
+
|
|
269
|
+
if not anthropic_api_key:
|
|
270
|
+
print("⚠️ No Anthropic API key found in stored credentials")
|
|
271
|
+
print("💡 Agent will require an Anthropic API key for operation")
|
|
272
|
+
|
|
273
|
+
try:
|
|
274
|
+
print("🔧 Running Agent for repository setup...")
|
|
275
|
+
|
|
276
|
+
print("\n" + "="*80)
|
|
277
|
+
print("🤖 AGENT REPOSITORY SETUP")
|
|
278
|
+
print("="*80)
|
|
279
|
+
print(f"Repository: {repo_url}")
|
|
280
|
+
print(f"Working Directory: /root")
|
|
281
|
+
if stored_credentials:
|
|
282
|
+
print(f"Available Credentials: {len(stored_credentials)} items")
|
|
283
|
+
print("="*80 + "\n")
|
|
284
|
+
|
|
285
|
+
# Call Agent directly as subprocess with real-time output
|
|
286
|
+
claude_prompt = f"clone, setup and run {repo_url}. At the end of the setup process, print the ordered list of every shell command that actually ran successfully (exclude any commands that returned non-zero exit codes). Show each command exactly as executed, one per line."
|
|
287
|
+
# print(f"🚀 Executing the task: \"{claude_prompt}\"")
|
|
288
|
+
print("\n" + "="*60)
|
|
289
|
+
print("🎉 AGENT OUTPUT (LIVE)")
|
|
290
|
+
print("="*60)
|
|
291
|
+
|
|
292
|
+
# Use Popen for real-time output streaming with optimizations
|
|
293
|
+
import sys
|
|
294
|
+
import select
|
|
295
|
+
import fcntl
|
|
296
|
+
import os as os_module
|
|
297
|
+
|
|
298
|
+
process = subprocess.Popen(
|
|
299
|
+
["python", "-u", "/python/kill_claude/claude_code_agent.py", claude_prompt], # -u for unbuffered output
|
|
300
|
+
cwd="/root",
|
|
301
|
+
stdout=subprocess.PIPE,
|
|
302
|
+
stderr=subprocess.PIPE, # Keep separate for better handling
|
|
303
|
+
text=True,
|
|
304
|
+
bufsize=0, # Unbuffered for fastest output
|
|
305
|
+
universal_newlines=True,
|
|
306
|
+
env=dict(os.environ, PYTHONUNBUFFERED='1') # Force unbuffered Python output
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
# Make stdout and stderr non-blocking for faster reading
|
|
310
|
+
def make_non_blocking(fd):
|
|
311
|
+
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
|
|
312
|
+
fcntl.fcntl(fd, fcntl.F_SETFL, flags | os_module.O_NONBLOCK)
|
|
313
|
+
|
|
314
|
+
make_non_blocking(process.stdout)
|
|
315
|
+
make_non_blocking(process.stderr)
|
|
316
|
+
|
|
317
|
+
# Stream output in real-time with robust error handling
|
|
318
|
+
try:
|
|
319
|
+
stdout_buffer = ""
|
|
320
|
+
stderr_buffer = ""
|
|
321
|
+
|
|
322
|
+
while process.poll() is None:
|
|
323
|
+
try:
|
|
324
|
+
# Use select for efficient I/O multiplexing with error handling
|
|
325
|
+
ready, _, _ = select.select([process.stdout, process.stderr], [], [], 0.1) # 100ms timeout
|
|
326
|
+
|
|
327
|
+
for stream in ready:
|
|
328
|
+
try:
|
|
329
|
+
if stream == process.stdout:
|
|
330
|
+
chunk = stream.read(1024) # Read in chunks for efficiency
|
|
331
|
+
if chunk is not None and chunk:
|
|
332
|
+
stdout_buffer += chunk
|
|
333
|
+
# Process complete lines immediately
|
|
334
|
+
while '\n' in stdout_buffer:
|
|
335
|
+
line, stdout_buffer = stdout_buffer.split('\n', 1)
|
|
336
|
+
# Add subtle color to agent output for better readability
|
|
337
|
+
print(line, flush=True) # Use default terminal colors
|
|
338
|
+
elif stream == process.stderr:
|
|
339
|
+
chunk = stream.read(1024)
|
|
340
|
+
if chunk is not None and chunk:
|
|
341
|
+
stderr_buffer += chunk
|
|
342
|
+
# Process complete lines immediately
|
|
343
|
+
while '\n' in stderr_buffer:
|
|
344
|
+
line, stderr_buffer = stderr_buffer.split('\n', 1)
|
|
345
|
+
print(f"STDERR: {line}", flush=True)
|
|
346
|
+
except (BlockingIOError, OSError, ValueError):
|
|
347
|
+
# Handle various I/O errors gracefully
|
|
348
|
+
continue
|
|
349
|
+
except (select.error, OSError):
|
|
350
|
+
# If select fails, fall back to simple polling
|
|
351
|
+
time.sleep(0.1)
|
|
352
|
+
continue
|
|
353
|
+
|
|
354
|
+
# Process any remaining output after process ends
|
|
355
|
+
try:
|
|
356
|
+
# Read any remaining data from streams
|
|
357
|
+
remaining_stdout = process.stdout.read()
|
|
358
|
+
remaining_stderr = process.stderr.read()
|
|
359
|
+
|
|
360
|
+
if remaining_stdout:
|
|
361
|
+
stdout_buffer += remaining_stdout
|
|
362
|
+
if remaining_stderr:
|
|
363
|
+
stderr_buffer += remaining_stderr
|
|
364
|
+
|
|
365
|
+
# Output remaining buffered content
|
|
366
|
+
if stdout_buffer.strip():
|
|
367
|
+
print(stdout_buffer.strip(), flush=True)
|
|
368
|
+
if stderr_buffer.strip():
|
|
369
|
+
print(f"STDERR: {stderr_buffer.strip()}", flush=True)
|
|
370
|
+
except (OSError, ValueError):
|
|
371
|
+
# Handle cases where streams are already closed
|
|
372
|
+
pass
|
|
373
|
+
|
|
374
|
+
# Get final return code
|
|
375
|
+
return_code = process.returncode
|
|
376
|
+
|
|
377
|
+
print("\n" + "="*60)
|
|
378
|
+
if return_code == 0:
|
|
379
|
+
print("✅ Agent completed successfully!")
|
|
380
|
+
else:
|
|
381
|
+
print(f"⚠️ Agent exited with code: {return_code}")
|
|
382
|
+
print("="*60)
|
|
383
|
+
|
|
384
|
+
except subprocess.TimeoutExpired:
|
|
385
|
+
print("\n⚠️ Agent timed out after 10 minutes")
|
|
386
|
+
process.kill()
|
|
387
|
+
process.wait()
|
|
388
|
+
except Exception as stream_error:
|
|
389
|
+
pass
|
|
390
|
+
|
|
391
|
+
# Fallback to simple readline approach
|
|
392
|
+
try:
|
|
393
|
+
# Restart the process with simpler streaming
|
|
394
|
+
if process.poll() is None:
|
|
395
|
+
process.kill()
|
|
396
|
+
process.wait()
|
|
397
|
+
|
|
398
|
+
fallback_process = subprocess.Popen(
|
|
399
|
+
["python", "-u", "/python/kill_claude/claude_code_agent.py", claude_prompt],
|
|
400
|
+
cwd="/root",
|
|
401
|
+
stdout=subprocess.PIPE,
|
|
402
|
+
stderr=subprocess.STDOUT,
|
|
403
|
+
text=True,
|
|
404
|
+
bufsize=1,
|
|
405
|
+
universal_newlines=True
|
|
406
|
+
)
|
|
407
|
+
|
|
408
|
+
# Simple line-by-line reading
|
|
409
|
+
while True:
|
|
410
|
+
line = fallback_process.stdout.readline()
|
|
411
|
+
if line == '' and fallback_process.poll() is not None:
|
|
412
|
+
break
|
|
413
|
+
if line:
|
|
414
|
+
print(line.rstrip(), flush=True)
|
|
415
|
+
|
|
416
|
+
return_code = fallback_process.returncode
|
|
417
|
+
|
|
418
|
+
print("\n" + "="*60)
|
|
419
|
+
if return_code == 0:
|
|
420
|
+
print("✅ Agent completed successfully!")
|
|
421
|
+
else:
|
|
422
|
+
print(f"⚠️ Agent exited with code: {return_code}")
|
|
423
|
+
print("="*60)
|
|
424
|
+
|
|
425
|
+
except Exception as fallback_error:
|
|
426
|
+
print(f"\n❌ Fallback streaming also failed: {fallback_error}")
|
|
427
|
+
print("⚠️ Agent may have completed, but output streaming failed")
|
|
428
|
+
return_code = 1
|
|
429
|
+
|
|
430
|
+
except Exception as e:
|
|
431
|
+
print(f"❌ Error during repository setup: {e}")
|
|
432
|
+
print("⚠️ Proceeding without setup...")
|
|
433
|
+
import traceback
|
|
434
|
+
traceback.print_exc()
|
|
435
|
+
else:
|
|
436
|
+
print("⚠️ No repository URL provided, skipping setup")
|
|
437
|
+
|
|
438
|
+
print("🔌 Creating SSH tunnel on port 22...")
|
|
439
|
+
# Create SSH tunnel
|
|
440
|
+
with modal.forward(22, unencrypted=True) as tunnel:
|
|
441
|
+
host, port = tunnel.tcp_socket
|
|
442
|
+
|
|
443
|
+
print("\n" + "=" * 80)
|
|
444
|
+
print("🎉 SSH CONTAINER IS READY!")
|
|
445
|
+
print("=" * 80)
|
|
446
|
+
print(f"🌐 SSH Host: {host}")
|
|
447
|
+
print(f"🔌 SSH Port: {port}")
|
|
448
|
+
print(f"👤 Username: root")
|
|
449
|
+
print(f"🔐 Password: {ssh_password}")
|
|
450
|
+
print()
|
|
451
|
+
print("🔗 CONNECT USING THIS COMMAND:")
|
|
452
|
+
print(f"ssh -p {port} root@{host}")
|
|
453
|
+
print("=" * 80)
|
|
454
|
+
|
|
455
|
+
print("🔄 Starting keep-alive loop...")
|
|
456
|
+
# Keep the container running
|
|
457
|
+
iteration = 0
|
|
458
|
+
while True:
|
|
459
|
+
iteration += 1
|
|
460
|
+
if iteration % 10 == 1: # Print every 5 minutes (10 * 30 seconds = 5 minutes)
|
|
461
|
+
print(f"💓 Container alive (iteration {iteration})")
|
|
462
|
+
|
|
463
|
+
time.sleep(30)
|
|
464
|
+
# Check if SSH service is still running
|
|
465
|
+
try:
|
|
466
|
+
subprocess.run(["service", "ssh", "status"], check=True,
|
|
467
|
+
capture_output=True)
|
|
468
|
+
except subprocess.CalledProcessError:
|
|
469
|
+
print("⚠️ SSH service stopped, restarting...")
|
|
470
|
+
subprocess.run(["service", "ssh", "start"], check=True)
|
|
471
|
+
|
|
472
|
+
|
|
473
|
+
# Create Modal SSH container with GPU support and intelligent repository setup using Agent
|
|
474
|
+
def create_modal_ssh_container(gpu_type, repo_url=None, repo_name=None, setup_commands=None,
|
|
475
|
+
volume_name=None, timeout_minutes=60, ssh_password=None, interactive=False, gpu_count=1, analysis_data=None):
|
|
476
|
+
"""Create a Modal SSH container with GPU support and intelligent repository setup.
|
|
477
|
+
|
|
478
|
+
When repo_url is provided, uses Agent for intelligent repository setup.
|
|
479
|
+
The setup_commands parameter is maintained for backwards compatibility but ignored when using Agent.
|
|
480
|
+
Args:
|
|
481
|
+
analysis_data: Repository analysis data from best_gpu endpoint for dynamic image selection
|
|
482
|
+
"""
|
|
483
|
+
|
|
484
|
+
# Use interactive mode if specified
|
|
485
|
+
if interactive:
|
|
486
|
+
# If GPU type is not specified, use default
|
|
487
|
+
if not gpu_type:
|
|
488
|
+
gpu_type = "A10G"
|
|
489
|
+
print(f"✅ Using default GPU type: {gpu_type}")
|
|
490
|
+
else:
|
|
491
|
+
print(f"✅ Using provided GPU type: {gpu_type}")
|
|
492
|
+
|
|
493
|
+
# If repo URL is not specified, prompt for it
|
|
494
|
+
if not repo_url:
|
|
495
|
+
try:
|
|
496
|
+
repo_url = input("? Enter GitHub repository URL: ").strip()
|
|
497
|
+
if not repo_url:
|
|
498
|
+
print("❌ Repository URL is required.")
|
|
499
|
+
return None
|
|
500
|
+
except KeyboardInterrupt:
|
|
501
|
+
print("\n🛑 Setup cancelled.")
|
|
502
|
+
return None
|
|
503
|
+
|
|
504
|
+
# If volume name is not specified, ask about persistent volume
|
|
505
|
+
if not volume_name:
|
|
506
|
+
try:
|
|
507
|
+
use_volume = input("? Use persistent volume for faster installs? (Y/n): ").strip().lower()
|
|
508
|
+
if use_volume in ('', 'y', 'yes'):
|
|
509
|
+
volume_name = input("? Enter volume name: ").strip()
|
|
510
|
+
if not volume_name:
|
|
511
|
+
volume_name = "gitarsenal-volume"
|
|
512
|
+
print(f"Using default volume name: {volume_name}")
|
|
513
|
+
except KeyboardInterrupt:
|
|
514
|
+
print("\n🛑 Setup cancelled.")
|
|
515
|
+
sys.exit(1)
|
|
516
|
+
|
|
517
|
+
# Check if Modal is authenticated
|
|
518
|
+
try:
|
|
519
|
+
modal_token_id = os.environ.get("MODAL_TOKEN_ID")
|
|
520
|
+
modal_token = os.environ.get("MODAL_TOKEN")
|
|
521
|
+
openai_api_key = os.environ.get("OPENAI_API_KEY")
|
|
522
|
+
# Try to access Modal token to check authentication
|
|
523
|
+
try:
|
|
524
|
+
# Check if token is set in environment
|
|
525
|
+
modal_token_id = os.environ.get("MODAL_TOKEN_ID")
|
|
526
|
+
if not modal_token_id:
|
|
527
|
+
print("⚠️ MODAL_TOKEN_ID not found in environment.")
|
|
528
|
+
# Try to get from MODAL_TOKEN
|
|
529
|
+
modal_token = os.environ.get("MODAL_TOKEN")
|
|
530
|
+
if modal_token:
|
|
531
|
+
print("✅ Found token in environment variable")
|
|
532
|
+
os.environ["MODAL_TOKEN_ID"] = modal_token
|
|
533
|
+
modal_token_id = modal_token
|
|
534
|
+
print(f"✅ Set token (length: {len(modal_token)})")
|
|
535
|
+
except Exception as e:
|
|
536
|
+
print(f"⚠️ Error checking Modal token: {e}")
|
|
537
|
+
# Try to use the token from environment
|
|
538
|
+
modal_token_id = os.environ.get("MODAL_TOKEN_ID")
|
|
539
|
+
modal_token = os.environ.get("MODAL_TOKEN")
|
|
540
|
+
if modal_token_id:
|
|
541
|
+
print(f"🔄 Using token from environment (length: {len(modal_token_id)})")
|
|
542
|
+
elif modal_token:
|
|
543
|
+
print(f"🔄 Using token from environment (length: {len(modal_token)})")
|
|
544
|
+
os.environ["MODAL_TOKEN_ID"] = modal_token
|
|
545
|
+
modal_token_id = modal_token
|
|
546
|
+
else:
|
|
547
|
+
print("❌ No Modal token available. Cannot proceed.")
|
|
548
|
+
return None
|
|
549
|
+
|
|
550
|
+
# Set it in both environment variables
|
|
551
|
+
os.environ["MODAL_TOKEN_ID"] = modal_token_id
|
|
552
|
+
os.environ["MODAL_TOKEN"] = modal_token_id
|
|
553
|
+
print("✅ Set both token and id environment variables")
|
|
554
|
+
except Exception as e:
|
|
555
|
+
print(f"⚠️ Error checking Modal authentication: {e}")
|
|
556
|
+
print("Continuing anyway, but Modal operations may fail")
|
|
557
|
+
|
|
558
|
+
# Generate a unique app name with timestamp to avoid conflicts
|
|
559
|
+
timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
|
|
560
|
+
app_name = f"ssh-container-{timestamp}"
|
|
561
|
+
|
|
562
|
+
gpu_configs = {
|
|
563
|
+
'T4': {'gpu': 't4', 'memory': 16},
|
|
564
|
+
'L4': {'gpu': 'l4', 'memory': 24},
|
|
565
|
+
'A10G': {'gpu': 'a10g', 'memory': 24},
|
|
566
|
+
'A100-40GB': {'gpu': 'a100', 'memory': 40},
|
|
567
|
+
'A100-80GB': {'gpu': 'a100-80gb', 'memory': 80},
|
|
568
|
+
'L40S': {'gpu': 'l40s', 'memory': 48},
|
|
569
|
+
'H100': {'gpu': 'h100', 'memory': 80},
|
|
570
|
+
'H200': {'gpu': 'h200', 'memory': 141},
|
|
571
|
+
'B200': {'gpu': 'b200', 'memory': 96}
|
|
572
|
+
}
|
|
573
|
+
|
|
574
|
+
if gpu_type not in gpu_configs:
|
|
575
|
+
print(f"⚠️ Unknown GPU type: {gpu_type}. Using A10G as default.")
|
|
576
|
+
gpu_type = 'A10G'
|
|
577
|
+
|
|
578
|
+
gpu_spec = gpu_configs[gpu_type]
|
|
579
|
+
|
|
580
|
+
# Configure GPU string for Modal (support multiple GPUs)
|
|
581
|
+
if gpu_count > 1:
|
|
582
|
+
modal_gpu_spec = f"{gpu_spec['gpu']}:{gpu_count}"
|
|
583
|
+
total_memory = gpu_spec['memory'] * gpu_count
|
|
584
|
+
print(f"🚀 Creating SSH container with {gpu_count}x {gpu_spec['gpu']} GPUs ({total_memory}GB total VRAM)")
|
|
585
|
+
else:
|
|
586
|
+
modal_gpu_spec = gpu_spec['gpu']
|
|
587
|
+
print(f"🚀 Creating SSH container with {gpu_spec['gpu']} GPU ({gpu_spec['memory']}GB VRAM)")
|
|
588
|
+
|
|
589
|
+
# Store the modal GPU specification for the decorator
|
|
590
|
+
gpu_spec['modal_gpu'] = modal_gpu_spec
|
|
591
|
+
|
|
592
|
+
# Generate or use provided SSH password
|
|
593
|
+
if not ssh_password:
|
|
594
|
+
ssh_password = generate_random_password()
|
|
595
|
+
print(f"🔐 Generated SSH password: {ssh_password}")
|
|
596
|
+
|
|
597
|
+
# Setup volume if specified
|
|
598
|
+
volume = None
|
|
599
|
+
volume_mount_path = "/persistent"
|
|
600
|
+
|
|
601
|
+
if volume_name:
|
|
602
|
+
print(f"📦 Setting up volume: {volume_name}")
|
|
603
|
+
try:
|
|
604
|
+
volume = modal.Volume.from_name(volume_name, create_if_missing=True)
|
|
605
|
+
print(f"✅ Volume '{volume_name}' ready for use")
|
|
606
|
+
except Exception as e:
|
|
607
|
+
print(f"⚠️ Could not setup volume '{volume_name}': {e}")
|
|
608
|
+
print("⚠️ Continuing without persistent volume")
|
|
609
|
+
volume = None
|
|
610
|
+
else:
|
|
611
|
+
# Create a default volume for this session
|
|
612
|
+
default_volume_name = f"ssh-vol-{timestamp}"
|
|
613
|
+
print(f"📦 Creating default volume: {default_volume_name}")
|
|
614
|
+
try:
|
|
615
|
+
volume = modal.Volume.from_name(default_volume_name, create_if_missing=True)
|
|
616
|
+
volume_name = default_volume_name
|
|
617
|
+
print(f"✅ Default volume '{default_volume_name}' created")
|
|
618
|
+
except Exception as e:
|
|
619
|
+
print(f"⚠️ Could not create default volume: {e}")
|
|
620
|
+
print("⚠️ Continuing without persistent volume")
|
|
621
|
+
volume = None
|
|
622
|
+
|
|
623
|
+
modal_token = os.environ.get("MODAL_TOKEN_ID")
|
|
624
|
+
|
|
625
|
+
# Create SSH-enabled image
|
|
626
|
+
print("📦 Building SSH-enabled image...")
|
|
627
|
+
|
|
628
|
+
# Get the current directory path for mounting local Python sources
|
|
629
|
+
current_dir = os.path.dirname(os.path.abspath(__file__))
|
|
630
|
+
# Get the gitarsenal-cli root directory for kill_claude files
|
|
631
|
+
gitarsenal_root = os.path.dirname(current_dir)
|
|
632
|
+
|
|
633
|
+
# Select base image dynamically based on analysis data
|
|
634
|
+
base_image_name, python_version = select_base_image_from_analysis(analysis_data)
|
|
635
|
+
print(f"🐳 Building image with: {base_image_name} (Python {python_version})")
|
|
636
|
+
|
|
637
|
+
try:
|
|
638
|
+
base_image = modal.Image.from_registry(base_image_name, add_python=python_version)
|
|
639
|
+
except Exception as e:
|
|
640
|
+
print(f"⚠️ Failed to load recommended image {base_image_name}: {e}")
|
|
641
|
+
print("🐳 Falling back to default CUDA image")
|
|
642
|
+
base_image = modal.Image.from_registry("nvidia/cuda:12.4.1-devel-ubuntu22.04", add_python="3.11")
|
|
643
|
+
|
|
644
|
+
# Build the SSH image with the chosen base
|
|
645
|
+
ssh_image = (
|
|
646
|
+
base_image
|
|
647
|
+
# Set timezone and debconf to non-interactive mode to prevent prompts
|
|
648
|
+
.env({"DEBIAN_FRONTEND": "noninteractive", "TZ": "UTC"})
|
|
649
|
+
.run_commands(
|
|
650
|
+
# Configure timezone and package management non-interactively
|
|
651
|
+
"ln -snf /usr/share/zoneinfo/UTC /etc/localtime",
|
|
652
|
+
"echo UTC > /etc/timezone",
|
|
653
|
+
# Configure dpkg to avoid interactive prompts
|
|
654
|
+
"echo 'debconf debconf/frontend select Noninteractive' | debconf-set-selections"
|
|
655
|
+
)
|
|
656
|
+
.apt_install(
|
|
657
|
+
"openssh-server", "sudo", "curl", "wget", "vim", "htop", "git",
|
|
658
|
+
"python3", "python3-pip", "unzip", "ca-certificates"
|
|
659
|
+
)
|
|
660
|
+
)
|
|
661
|
+
|
|
662
|
+
# Add Python packages using the appropriate method
|
|
663
|
+
ssh_image = ssh_image.uv_pip_install("uv", "modal", "gitingest", "requests", "openai", "anthropic", "exa-py")
|
|
664
|
+
|
|
665
|
+
# Add the rest of the configuration
|
|
666
|
+
ssh_image = ssh_image.run_commands(
|
|
667
|
+
# Create SSH directory
|
|
668
|
+
"mkdir -p /var/run/sshd",
|
|
669
|
+
"mkdir -p /root/.ssh",
|
|
670
|
+
"chmod 700 /root/.ssh",
|
|
671
|
+
|
|
672
|
+
"ssh-keygen -A",
|
|
673
|
+
"sed -i 's/#PermitRootLogin prohibit-password/PermitRootLogin yes/' /etc/ssh/sshd_config",
|
|
674
|
+
"sed -i 's/#PasswordAuthentication yes/PasswordAuthentication yes/' /etc/ssh/sshd_config",
|
|
675
|
+
"echo 'export PATH=/usr/local/cuda/bin:$PATH' >> /root/.bashrc",
|
|
676
|
+
|
|
677
|
+
# Install Bun (JavaScript runtime and package manager)
|
|
678
|
+
"curl -fsSL https://bun.com/install | bash",
|
|
679
|
+
"echo 'export PATH=/root/.bun/bin:$PATH' >> /root/.bashrc",
|
|
680
|
+
|
|
681
|
+
# Install Pixi (modern package management tool)
|
|
682
|
+
"curl -fsSL https://pixi.sh/install.sh | sh",
|
|
683
|
+
"echo 'export PATH=/root/.pixi/bin:$PATH' >> /root/.bashrc",
|
|
684
|
+
|
|
685
|
+
# Create base directories (subdirectories will be created automatically when mounting)
|
|
686
|
+
"mkdir -p /python",
|
|
687
|
+
).add_local_dir(current_dir, "/python", ignore=lambda p: not p.name.endswith('.py')).add_local_dir(os.path.join(gitarsenal_root, "kill_claude"), "/python/kill_claude")
|
|
688
|
+
print("✅ SSH image built successfully")
|
|
689
|
+
|
|
690
|
+
# Configure volumes if available
|
|
691
|
+
volumes_config = {}
|
|
692
|
+
if volume:
|
|
693
|
+
volumes_config[volume_mount_path] = volume
|
|
694
|
+
|
|
695
|
+
# Create app with image passed directly (THIS IS THE KEY CHANGE)
|
|
696
|
+
print("🔍 Testing app creation...")
|
|
697
|
+
app = modal.App(app_name, image=ssh_image) # Pass image here
|
|
698
|
+
print("✅ Created app successfully")
|
|
699
|
+
|
|
700
|
+
# Apply the decorator to the global SSH container function
|
|
701
|
+
decorated_ssh_function = app.function(
|
|
702
|
+
timeout=timeout_minutes * 60, # Convert to seconds
|
|
703
|
+
gpu=gpu_spec['modal_gpu'], # Use the user-selected GPU type and count
|
|
704
|
+
volumes=volumes_config if volumes_config else None,
|
|
705
|
+
)(ssh_container_function)
|
|
706
|
+
|
|
707
|
+
# Run the container
|
|
708
|
+
try:
|
|
709
|
+
print("⏳ Starting container... This may take 1-2 minutes...")
|
|
710
|
+
|
|
711
|
+
# Start the container and wait for it to complete (blocking)
|
|
712
|
+
with modal.enable_output():
|
|
713
|
+
with app.run():
|
|
714
|
+
# Get the API key from environment
|
|
715
|
+
openai_api_key = os.environ.get("OPENAI_API_KEY")
|
|
716
|
+
anthropic_api_key = os.environ.get("ANTHROPIC_API_KEY")
|
|
717
|
+
|
|
718
|
+
# Get stored credentials from local file
|
|
719
|
+
stored_credentials = get_stored_credentials()
|
|
720
|
+
if stored_credentials:
|
|
721
|
+
print(f"🔐 Found {len(stored_credentials)} stored credentials to send to container")
|
|
722
|
+
else:
|
|
723
|
+
print("⚠️ No stored credentials found")
|
|
724
|
+
|
|
725
|
+
# Use spawn() to get a FunctionCall handle, then wait for it
|
|
726
|
+
print("🚀 Spawning SSH container...")
|
|
727
|
+
try:
|
|
728
|
+
function_call = decorated_ssh_function.spawn(ssh_password, repo_url, repo_name, setup_commands, openai_api_key, anthropic_api_key, stored_credentials)
|
|
729
|
+
print(f"✅ Container spawned with call ID: {function_call.object_id}")
|
|
730
|
+
print(f"🔍 Function call status: {function_call}")
|
|
731
|
+
except Exception as spawn_error:
|
|
732
|
+
print(f"❌ Error during spawn: {spawn_error}")
|
|
733
|
+
raise
|
|
734
|
+
|
|
735
|
+
try:
|
|
736
|
+
# Wait for the function to start and print connection info (with timeout)
|
|
737
|
+
print("⏳ Waiting for container to initialize...")
|
|
738
|
+
try:
|
|
739
|
+
print("\n⏳ Monitoring container (press Ctrl+C to stop monitoring)...")
|
|
740
|
+
result = function_call.get() # Wait indefinitely
|
|
741
|
+
print(f"🔚 Container function completed with result: {result}")
|
|
742
|
+
except KeyboardInterrupt:
|
|
743
|
+
print("\n🛑 Stopped monitoring. Container is still running remotely.")
|
|
744
|
+
print("💡 Use Modal's web UI or CLI to stop the container when done.")
|
|
745
|
+
print("🔒 Keeping tokens active since container is still running.")
|
|
746
|
+
return {
|
|
747
|
+
"app_name": app_name,
|
|
748
|
+
"ssh_password": ssh_password,
|
|
749
|
+
"volume_name": volume_name,
|
|
750
|
+
"status": "monitoring_stopped",
|
|
751
|
+
"function_call_id": function_call.object_id
|
|
752
|
+
}
|
|
753
|
+
|
|
754
|
+
except KeyboardInterrupt:
|
|
755
|
+
print("\n🛑 Interrupted by user. Container may still be running remotely.")
|
|
756
|
+
print("💡 Use Modal's web UI or CLI to check running containers.")
|
|
757
|
+
print("🔒 Keeping tokens active since container may still be running.")
|
|
758
|
+
return {
|
|
759
|
+
"app_name": app_name,
|
|
760
|
+
"ssh_password": ssh_password,
|
|
761
|
+
"volume_name": volume_name,
|
|
762
|
+
"status": "interrupted",
|
|
763
|
+
"function_call_id": function_call.object_id
|
|
764
|
+
}
|
|
765
|
+
except Exception as e:
|
|
766
|
+
print(f"⚠️ Container execution error: {e}")
|
|
767
|
+
print("💡 Container may still be accessible via SSH if it started successfully.")
|
|
768
|
+
print("🧹 Cleaning up tokens due to execution error.")
|
|
769
|
+
cleanup_modal_token()
|
|
770
|
+
raise
|
|
771
|
+
|
|
772
|
+
# Only clean up tokens if container actually completed normally
|
|
773
|
+
print("🧹 Container completed normally, cleaning up tokens.")
|
|
774
|
+
cleanup_modal_token()
|
|
775
|
+
|
|
776
|
+
return {
|
|
777
|
+
"app_name": app_name,
|
|
778
|
+
"ssh_password": ssh_password,
|
|
779
|
+
"volume_name": volume_name
|
|
780
|
+
}
|
|
781
|
+
except Exception as e:
|
|
782
|
+
print(f"❌ Error running container: {e}")
|
|
783
|
+
return None
|
|
784
|
+
|
|
785
|
+
|
|
786
|
+
def cleanup_security_tokens():
|
|
787
|
+
"""Delete all security tokens and API keys after SSH container is started"""
|
|
788
|
+
print("🧹 Cleaning up security tokens and API keys...")
|
|
789
|
+
|
|
790
|
+
try:
|
|
791
|
+
# Remove Modal tokens from environment variables
|
|
792
|
+
modal_env_vars = ["MODAL_TOKEN_ID", "MODAL_TOKEN", "MODAL_TOKEN_SECRET"]
|
|
793
|
+
for var in modal_env_vars:
|
|
794
|
+
if var in os.environ:
|
|
795
|
+
del os.environ[var]
|
|
796
|
+
|
|
797
|
+
# Remove OpenAI API key from environment
|
|
798
|
+
if "OPENAI_API_KEY" in os.environ:
|
|
799
|
+
del os.environ["OPENAI_API_KEY"]
|
|
800
|
+
|
|
801
|
+
# Delete ~/.modal.toml file
|
|
802
|
+
home_dir = os.path.expanduser("~")
|
|
803
|
+
modal_toml = os.path.join(home_dir, ".modal.toml")
|
|
804
|
+
if os.path.exists(modal_toml):
|
|
805
|
+
os.remove(modal_toml)
|
|
806
|
+
|
|
807
|
+
# Delete ~/.gitarsenal/openai_key file
|
|
808
|
+
openai_key_file = os.path.join(home_dir, ".gitarsenal", "openai_key")
|
|
809
|
+
if os.path.exists(openai_key_file):
|
|
810
|
+
os.remove(openai_key_file)
|
|
811
|
+
|
|
812
|
+
except Exception as e:
|
|
813
|
+
print(f"❌ Error during security cleanup: {e}")
|
|
814
|
+
|
|
815
|
+
# Keep the old function for backward compatibility
|
|
816
|
+
def cleanup_modal_token():
|
|
817
|
+
"""Legacy function - now calls the comprehensive cleanup"""
|
|
818
|
+
cleanup_security_tokens()
|
|
819
|
+
|
|
820
|
+
|
|
821
|
+
def show_usage_examples():
|
|
822
|
+
"""Display usage examples for the script."""
|
|
823
|
+
print("Usage Examples\n")
|
|
824
|
+
|
|
825
|
+
print("🔐 Authentication Commands")
|
|
826
|
+
print("┌────────────────────────────────────────────────────────────────────────────────────┐")
|
|
827
|
+
print("│ gitarsenal --register # Register new account │")
|
|
828
|
+
print("│ gitarsenal --login # Login to existing account │")
|
|
829
|
+
print("│ gitarsenal --logout # Logout from account │")
|
|
830
|
+
print("│ gitarsenal --user-info # Show current user information │")
|
|
831
|
+
print("│ gitarsenal --change-password # Change password │")
|
|
832
|
+
print("│ gitarsenal --delete-account # Delete account │")
|
|
833
|
+
print("│ gitarsenal --store-api-key openai # Store OpenAI API key │")
|
|
834
|
+
print("│ gitarsenal --auth # Interactive auth management │")
|
|
835
|
+
print("└────────────────────────────────────────────────────────────────────────────────────┘\n")
|
|
836
|
+
|
|
837
|
+
print("Basic Container Creation with Agent")
|
|
838
|
+
print("┌────────────────────────────────────────────────────────────────────────┐")
|
|
839
|
+
print("│ gitarsenal --gpu A10G --repo https://github.com/username/repo.git │")
|
|
840
|
+
print("│ # Agent will intelligently clone and setup the repository │")
|
|
841
|
+
print("└────────────────────────────────────────────────────────────────────────┘\n")
|
|
842
|
+
|
|
843
|
+
print("With Persistent Storage")
|
|
844
|
+
print("┌────────────────────────────────────────────────────────────────────────────────────┐")
|
|
845
|
+
print("│ gitarsenal --gpu A10G --repo https://github.com/username/repo.git \\ │")
|
|
846
|
+
print("│ --volume-name my-persistent-volume │")
|
|
847
|
+
print("└────────────────────────────────────────────────────────────────────────────────────┘\n")
|
|
848
|
+
|
|
849
|
+
print("With Multiple GPUs")
|
|
850
|
+
print("┌────────────────────────────────────────────────────────────────────────────────────┐")
|
|
851
|
+
print("│ gitarsenal --gpu A100-80GB --gpu-count 4 \\ │")
|
|
852
|
+
print("│ --repo https://github.com/username/repo.git │")
|
|
853
|
+
print("└────────────────────────────────────────────────────────────────────────────────────┘\n")
|
|
854
|
+
|
|
855
|
+
print("Intelligent Repository Setup (default)")
|
|
856
|
+
print("┌────────────────────────────────────────────────────────────────────────────────────┐")
|
|
857
|
+
print("│ gitarsenal --gpu A10G --repo https://github.com/username/repo.git │")
|
|
858
|
+
print("│ # Agent analyzes repo and sets up environment automatically │")
|
|
859
|
+
print("└────────────────────────────────────────────────────────────────────────────────────┘\n")
|
|
860
|
+
|
|
861
|
+
print("With Manual Setup Commands (Advanced)")
|
|
862
|
+
print("┌────────────────────────────────────────────────────────────────────────────────────┐")
|
|
863
|
+
print("│ gitarsenal --gpu A10G --setup-commands \"pip install torch\" \"python train.py\" │")
|
|
864
|
+
print("│ # Only use when not providing --repo (bypasses Agent) │")
|
|
865
|
+
print("└────────────────────────────────────────────────────────────────────────────────────┘\n")
|
|
866
|
+
|
|
867
|
+
print("Development Mode (Skip Authentication)")
|
|
868
|
+
print("┌────────────────────────────────────────────────────────────────────────────────────┐")
|
|
869
|
+
print("│ gitarsenal --skip-auth --gpu A10G --repo-url https://github.com/username/repo.git │")
|
|
870
|
+
print("└────────────────────────────────────────────────────────────────────────────────────┘\n")
|
|
871
|
+
|
|
872
|
+
print("Available GPU Options:")
|
|
873
|
+
print(" T4, L4, A10G, A100-40GB, A100-80GB, L40S, H100, H200, B200")
|
|
874
|
+
print(" Use --gpu-count to specify multiple GPUs (1-8)")
|
|
875
|
+
print()
|
|
876
|
+
print("Authentication Behavior:")
|
|
877
|
+
print(" • First time: Interactive registration/login required")
|
|
878
|
+
print(" • Subsequent runs: Automatic login with stored session")
|
|
879
|
+
print(" • Use --skip-auth for development (bypasses auth)")
|
|
880
|
+
print()
|
|
881
|
+
print("GPU Selection Behavior:")
|
|
882
|
+
print(" • With --gpu: Uses specified GPU without prompting")
|
|
883
|
+
print(" • Without --gpu: Shows interactive GPU selection menu")
|
|
884
|
+
print()
|
|
885
|
+
print("Repository Setup Behavior:")
|
|
886
|
+
print(" • With --repo Agent intelligently clones and sets up repository")
|
|
887
|
+
print(" • Without --repo: Manual container setup (no automatic repository setup)")
|
|
888
|
+
print(" • Legacy --setup-commands: Only used when --repo not provided")
|
|
889
|
+
print()
|
|
890
|
+
print("Examples:")
|
|
891
|
+
print(" # Intelligent repository setup (recommended):")
|
|
892
|
+
print(" gitarsenal --gpu A10G --repo https://github.com/username/repo.git")
|
|
893
|
+
print()
|
|
894
|
+
print(" # Development mode (skip authentication):")
|
|
895
|
+
print(" gitarsenal --skip-auth --gpu A10G --repo https://github.com/username/repo.git")
|
|
896
|
+
print()
|
|
897
|
+
print(" # Manual setup (advanced users):")
|
|
898
|
+
print(" gitarsenal --gpu A10G --setup-commands \"pip install torch\" \"python train.py\"")
|
|
899
|
+
|
|
900
|
+
|
|
901
|
+
def _check_authentication(auth_manager):
|
|
902
|
+
"""Check if user is authenticated, prompt for login if not"""
|
|
903
|
+
if auth_manager.is_authenticated():
|
|
904
|
+
user = auth_manager.get_current_user()
|
|
905
|
+
print(f"✅ Authenticated as: {user['username']}")
|
|
906
|
+
return True
|
|
907
|
+
|
|
908
|
+
print("\n🔐 Authentication required")
|
|
909
|
+
return auth_manager.interactive_auth_flow()
|
|
910
|
+
|
|
911
|
+
|
|
912
|
+
def _handle_auth_commands(auth_manager, args):
|
|
913
|
+
"""Handle authentication-related commands"""
|
|
914
|
+
if args.login:
|
|
915
|
+
print("\n🔐 LOGIN")
|
|
916
|
+
username = input("Username: ").strip()
|
|
917
|
+
password = getpass.getpass("Password: ").strip()
|
|
918
|
+
if auth_manager.login_user(username, password):
|
|
919
|
+
print("✅ Login successful!")
|
|
920
|
+
else:
|
|
921
|
+
print("❌ Login failed.")
|
|
922
|
+
|
|
923
|
+
elif args.register:
|
|
924
|
+
print("\n🔐 REGISTRATION")
|
|
925
|
+
username = input("Username (min 3 characters): ").strip()
|
|
926
|
+
email = input("Email: ").strip()
|
|
927
|
+
password = getpass.getpass("Password (min 8 characters): ").strip()
|
|
928
|
+
confirm_password = getpass.getpass("Confirm password: ").strip()
|
|
929
|
+
|
|
930
|
+
if password != confirm_password:
|
|
931
|
+
print("❌ Passwords do not match.")
|
|
932
|
+
return
|
|
933
|
+
|
|
934
|
+
if auth_manager.register_user(username, email, password):
|
|
935
|
+
print("✅ Registration successful!")
|
|
936
|
+
# Auto-login after registration
|
|
937
|
+
if auth_manager.login_user(username, password):
|
|
938
|
+
print("✅ Auto-login successful!")
|
|
939
|
+
else:
|
|
940
|
+
print("❌ Registration failed.")
|
|
941
|
+
|
|
942
|
+
elif args.logout:
|
|
943
|
+
auth_manager.logout_user()
|
|
944
|
+
|
|
945
|
+
elif args.user_info:
|
|
946
|
+
auth_manager.show_user_info()
|
|
947
|
+
|
|
948
|
+
elif args.change_password:
|
|
949
|
+
if not auth_manager.is_authenticated():
|
|
950
|
+
print("❌ Not logged in. Please login first.")
|
|
951
|
+
return
|
|
952
|
+
|
|
953
|
+
current_password = getpass.getpass("Current password: ").strip()
|
|
954
|
+
new_password = getpass.getpass("New password (min 8 characters): ").strip()
|
|
955
|
+
confirm_password = getpass.getpass("Confirm new password: ").strip()
|
|
956
|
+
|
|
957
|
+
if new_password != confirm_password:
|
|
958
|
+
print("❌ New passwords do not match.")
|
|
959
|
+
return
|
|
960
|
+
|
|
961
|
+
if auth_manager.change_password(current_password, new_password):
|
|
962
|
+
print("✅ Password changed successfully!")
|
|
963
|
+
else:
|
|
964
|
+
print("❌ Failed to change password.")
|
|
965
|
+
|
|
966
|
+
elif args.delete_account:
|
|
967
|
+
if not auth_manager.is_authenticated():
|
|
968
|
+
print("❌ Not logged in. Please login first.")
|
|
969
|
+
return
|
|
970
|
+
|
|
971
|
+
password = getpass.getpass("Enter your password to confirm deletion: ").strip()
|
|
972
|
+
if auth_manager.delete_account(password):
|
|
973
|
+
print("✅ Account deleted successfully!")
|
|
974
|
+
else:
|
|
975
|
+
print("❌ Failed to delete account.")
|
|
976
|
+
|
|
977
|
+
elif args.store_api_key:
|
|
978
|
+
if not auth_manager.is_authenticated():
|
|
979
|
+
print("❌ Not logged in. Please login first.")
|
|
980
|
+
return
|
|
981
|
+
|
|
982
|
+
service = args.store_api_key
|
|
983
|
+
api_key = getpass.getpass(f"Enter {service} API key: ").strip()
|
|
984
|
+
|
|
985
|
+
if auth_manager.store_api_key(service, api_key):
|
|
986
|
+
print(f"✅ {service} API key stored successfully!")
|
|
987
|
+
else:
|
|
988
|
+
print(f"❌ Failed to store {service} API key.")
|
|
989
|
+
|
|
990
|
+
elif args.auth:
|
|
991
|
+
# Interactive authentication management
|
|
992
|
+
while True:
|
|
993
|
+
print("\n" + "="*60)
|
|
994
|
+
print("🔐 AUTHENTICATION MANAGEMENT")
|
|
995
|
+
print("="*60)
|
|
996
|
+
print("1. Login")
|
|
997
|
+
print("2. Register")
|
|
998
|
+
print("3. Show user info")
|
|
999
|
+
print("4. Change password")
|
|
1000
|
+
print("5. Store API key")
|
|
1001
|
+
print("6. Delete account")
|
|
1002
|
+
print("7. Logout")
|
|
1003
|
+
print("8. Exit")
|
|
1004
|
+
|
|
1005
|
+
choice = input("\nSelect an option (1-8): ").strip()
|
|
1006
|
+
|
|
1007
|
+
if choice == "1":
|
|
1008
|
+
username = input("Username: ").strip()
|
|
1009
|
+
password = getpass.getpass("Password: ").strip()
|
|
1010
|
+
auth_manager.login_user(username, password)
|
|
1011
|
+
elif choice == "2":
|
|
1012
|
+
username = input("Username (min 3 characters): ").strip()
|
|
1013
|
+
email = input("Email: ").strip()
|
|
1014
|
+
password = getpass.getpass("Password (min 8 characters): ").strip()
|
|
1015
|
+
confirm_password = getpass.getpass("Confirm password: ").strip()
|
|
1016
|
+
if password == confirm_password:
|
|
1017
|
+
auth_manager.register_user(username, email, password)
|
|
1018
|
+
else:
|
|
1019
|
+
print("❌ Passwords do not match.")
|
|
1020
|
+
elif choice == "3":
|
|
1021
|
+
auth_manager.show_user_info()
|
|
1022
|
+
elif choice == "4":
|
|
1023
|
+
if auth_manager.is_authenticated():
|
|
1024
|
+
current_password = getpass.getpass("Current password: ").strip()
|
|
1025
|
+
new_password = getpass.getpass("New password (min 8 characters): ").strip()
|
|
1026
|
+
confirm_password = getpass.getpass("Confirm new password: ").strip()
|
|
1027
|
+
if new_password == confirm_password:
|
|
1028
|
+
auth_manager.change_password(current_password, new_password)
|
|
1029
|
+
else:
|
|
1030
|
+
print("❌ New passwords do not match.")
|
|
1031
|
+
else:
|
|
1032
|
+
print("❌ Not logged in.")
|
|
1033
|
+
elif choice == "5":
|
|
1034
|
+
if auth_manager.is_authenticated():
|
|
1035
|
+
service = input("Service name (e.g., openai, modal): ").strip()
|
|
1036
|
+
api_key = getpass.getpass(f"Enter {service} API key: ").strip()
|
|
1037
|
+
auth_manager.store_api_key(service, api_key)
|
|
1038
|
+
else:
|
|
1039
|
+
print("❌ Not logged in.")
|
|
1040
|
+
elif choice == "6":
|
|
1041
|
+
if auth_manager.is_authenticated():
|
|
1042
|
+
password = getpass.getpass("Enter your password to confirm deletion: ").strip()
|
|
1043
|
+
auth_manager.delete_account(password)
|
|
1044
|
+
else:
|
|
1045
|
+
print("❌ Not logged in.")
|
|
1046
|
+
elif choice == "7":
|
|
1047
|
+
auth_manager.logout_user()
|
|
1048
|
+
elif choice == "8":
|
|
1049
|
+
print("👋 Goodbye!")
|
|
1050
|
+
break
|
|
1051
|
+
else:
|
|
1052
|
+
print("❌ Invalid option. Please try again.")
|
|
1053
|
+
|
|
1054
|
+
# Replace the existing GPU argument parsing in the main section
|
|
1055
|
+
if __name__ == "__main__":
|
|
1056
|
+
# Parse command line arguments when script is run directly
|
|
1057
|
+
import argparse
|
|
1058
|
+
import sys
|
|
1059
|
+
|
|
1060
|
+
parser = argparse.ArgumentParser()
|
|
1061
|
+
parser.add_argument('--repo-name', type=str, help='Repository name override')
|
|
1062
|
+
parser.add_argument('--setup-commands', type=str, nargs='+', help='Setup commands to run (deprecated)')
|
|
1063
|
+
parser.add_argument('--setup-commands-json', type=str, help='Setup commands as JSON array')
|
|
1064
|
+
parser.add_argument('--commands-file', type=str, help='Path to file containing setup commands (one per line)')
|
|
1065
|
+
parser.add_argument('--setup-script', type=str, help='Path to bash script containing setup commands')
|
|
1066
|
+
parser.add_argument('--working-dir', type=str, help='Working directory for the setup script')
|
|
1067
|
+
parser.add_argument('--volume-name', type=str, help='Name of the Modal volume for persistent storage')
|
|
1068
|
+
parser.add_argument('--timeout', type=int, default=60, help='Container timeout in minutes (default: 60)')
|
|
1069
|
+
parser.add_argument('--ssh-password', type=str, help='SSH password (random if not provided)')
|
|
1070
|
+
parser.add_argument('--show-examples', action='store_true', help='Show usage examples')
|
|
1071
|
+
parser.add_argument('--list-gpus', action='store_true', help='List available GPU types with their specifications')
|
|
1072
|
+
parser.add_argument('--interactive', action='store_true', help='Run in interactive mode with prompts')
|
|
1073
|
+
parser.add_argument('--yes', action='store_true', help='Automatically confirm prompts (non-interactive)')
|
|
1074
|
+
|
|
1075
|
+
parser.add_argument('--gpu', default='A10G', help='GPU type to use')
|
|
1076
|
+
parser.add_argument('--gpu-count', type=int, default=None, help='Number of GPUs to use (default: 1)')
|
|
1077
|
+
parser.add_argument('--repo-url', help='Repository URL')
|
|
1078
|
+
|
|
1079
|
+
# Authentication-related arguments
|
|
1080
|
+
parser.add_argument('--auth', action='store_true', help='Manage authentication (login, register, logout)')
|
|
1081
|
+
parser.add_argument('--login', action='store_true', help='Login to GitArsenal')
|
|
1082
|
+
parser.add_argument('--register', action='store_true', help='Register new account')
|
|
1083
|
+
parser.add_argument('--logout', action='store_true', help='Logout from GitArsenal')
|
|
1084
|
+
parser.add_argument('--user-info', action='store_true', help='Show current user information')
|
|
1085
|
+
parser.add_argument('--change-password', action='store_true', help='Change password')
|
|
1086
|
+
parser.add_argument('--delete-account', action='store_true', help='Delete account')
|
|
1087
|
+
parser.add_argument('--store-api-key', type=str, help='Store API key for a service (e.g., openai, modal)')
|
|
1088
|
+
parser.add_argument('--skip-auth', action='store_true', help='Skip authentication check (for development)')
|
|
1089
|
+
|
|
1090
|
+
# User credential arguments (passed from JavaScript CLI)
|
|
1091
|
+
parser.add_argument('--user-id', type=str, help='User email address (passed from JavaScript CLI)')
|
|
1092
|
+
parser.add_argument('--user-name', type=str, help='Username (passed from JavaScript CLI)')
|
|
1093
|
+
parser.add_argument('--display-name', type=str, help='Display name (passed from JavaScript CLI)')
|
|
1094
|
+
|
|
1095
|
+
# Analysis data argument (passed from JavaScript CLI)
|
|
1096
|
+
parser.add_argument('--analysis-data', type=str, help='Repository analysis data as JSON string (from best_gpu endpoint)')
|
|
1097
|
+
|
|
1098
|
+
args = parser.parse_args()
|
|
1099
|
+
|
|
1100
|
+
# Initialize tokens (import here to avoid container import issues)
|
|
1101
|
+
from fetch_modal_tokens import get_tokens
|
|
1102
|
+
token_id, token_secret, openai_api_key, anthropic_api_key, openrouter_api_key, groq_api_key = get_tokens()
|
|
1103
|
+
|
|
1104
|
+
# Check if we got valid tokens
|
|
1105
|
+
if token_id is None or token_secret is None:
|
|
1106
|
+
raise ValueError("Could not get valid tokens")
|
|
1107
|
+
|
|
1108
|
+
# Explicitly set the environment variables again to be sure
|
|
1109
|
+
os.environ["MODAL_TOKEN_ID"] = token_id
|
|
1110
|
+
os.environ["MODAL_TOKEN_SECRET"] = token_secret
|
|
1111
|
+
if openai_api_key:
|
|
1112
|
+
os.environ["OPENAI_API_KEY"] = openai_api_key
|
|
1113
|
+
if anthropic_api_key:
|
|
1114
|
+
os.environ["ANTHROPIC_API_KEY"] = anthropic_api_key
|
|
1115
|
+
# Also set the old environment variable for backward compatibility
|
|
1116
|
+
os.environ["MODAL_TOKEN"] = token_id
|
|
1117
|
+
|
|
1118
|
+
# Set token variables for later use
|
|
1119
|
+
token = token_id # For backward compatibility
|
|
1120
|
+
|
|
1121
|
+
# Initialize authentication manager (import here to avoid container import issues)
|
|
1122
|
+
from auth_manager import AuthManager
|
|
1123
|
+
auth_manager = AuthManager()
|
|
1124
|
+
|
|
1125
|
+
# Handle authentication-related commands
|
|
1126
|
+
if args.auth or args.login or args.register or args.logout or args.user_info or args.change_password or args.delete_account or args.store_api_key:
|
|
1127
|
+
_handle_auth_commands(auth_manager, args)
|
|
1128
|
+
sys.exit(0)
|
|
1129
|
+
|
|
1130
|
+
# If --list-gpus is specified, just show GPU options and exit
|
|
1131
|
+
if args.list_gpus:
|
|
1132
|
+
print("\n📊 Available GPU Options:")
|
|
1133
|
+
print("┌──────────────┬─────────┐")
|
|
1134
|
+
print("│ GPU Type │ VRAM │")
|
|
1135
|
+
print("├──────────────┼─────────┤")
|
|
1136
|
+
print("│ 1. T4 │ 16GB │")
|
|
1137
|
+
print("│ 2. L4 │ 24GB │")
|
|
1138
|
+
print("│ 3. A10G │ 24GB │")
|
|
1139
|
+
print("│ 4. A100-40 │ 40GB │")
|
|
1140
|
+
print("│ 5. A100-80 │ 80GB │")
|
|
1141
|
+
print("│ 6. L40S │ 48GB │")
|
|
1142
|
+
print("│ 7. H100 │ 80GB │")
|
|
1143
|
+
print("│ 8. H200 │ 141GB │")
|
|
1144
|
+
print("│ 9. B200 │ 141GB │")
|
|
1145
|
+
print("└──────────────┴─────────┘")
|
|
1146
|
+
print("Use --gpu <type> to specify a GPU type")
|
|
1147
|
+
print("Use --gpu-count <number> to specify multiple GPUs (1-8)")
|
|
1148
|
+
print("\nExample: --gpu A100-80GB --gpu-count 4 (for 4x A100-80GB GPUs)")
|
|
1149
|
+
sys.exit(0)
|
|
1150
|
+
|
|
1151
|
+
# If no arguments or only --show-examples is provided, show usage examples
|
|
1152
|
+
if len(sys.argv) == 1 or args.show_examples:
|
|
1153
|
+
show_usage_examples()
|
|
1154
|
+
sys.exit(0)
|
|
1155
|
+
|
|
1156
|
+
# Authentication is handled by the JavaScript CLI when credentials are passed
|
|
1157
|
+
if args.user_id and args.user_name and args.display_name:
|
|
1158
|
+
print(f"✅ Authenticated as: {args.display_name} ({args.user_id})")
|
|
1159
|
+
elif not args.skip_auth:
|
|
1160
|
+
# Only perform authentication check if running Python script directly (not from CLI)
|
|
1161
|
+
if not _check_authentication(auth_manager):
|
|
1162
|
+
print("\n❌ Authentication required. Please login or register first.")
|
|
1163
|
+
print("Use --login to login or --register to create an account.")
|
|
1164
|
+
sys.exit(1)
|
|
1165
|
+
|
|
1166
|
+
# Check for dependencies
|
|
1167
|
+
print("⠏ Checking dependencies...")
|
|
1168
|
+
print("--- Dependency Check ---")
|
|
1169
|
+
|
|
1170
|
+
# Check Python version
|
|
1171
|
+
python_version = sys.version.split()[0]
|
|
1172
|
+
print(f"✓ Python {python_version} found")
|
|
1173
|
+
|
|
1174
|
+
# Check Modal CLI
|
|
1175
|
+
try:
|
|
1176
|
+
subprocess.run(["modal", "--version"], check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
1177
|
+
print("✓ Modal CLI found")
|
|
1178
|
+
except (subprocess.SubprocessError, FileNotFoundError):
|
|
1179
|
+
print("❌ Modal CLI not found. Please install with: pip install modal")
|
|
1180
|
+
|
|
1181
|
+
# Check Gitingest CLI
|
|
1182
|
+
try:
|
|
1183
|
+
subprocess.run(["gitingest", "--help"], check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
1184
|
+
print("✓ Gitingest CLI found")
|
|
1185
|
+
except (subprocess.SubprocessError, FileNotFoundError):
|
|
1186
|
+
print("⚠️ Gitingest CLI not found (optional)")
|
|
1187
|
+
|
|
1188
|
+
# Check Git
|
|
1189
|
+
try:
|
|
1190
|
+
subprocess.run(["git", "--version"], check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
1191
|
+
print("✓ Git found")
|
|
1192
|
+
except (subprocess.SubprocessError, FileNotFoundError):
|
|
1193
|
+
print("❌ Git not found. Please install Git.")
|
|
1194
|
+
|
|
1195
|
+
print("------------------------")
|
|
1196
|
+
print("\n✔ Dependencies checked")
|
|
1197
|
+
|
|
1198
|
+
# Use provided GPU argument or prompt for selection
|
|
1199
|
+
if args.gpu:
|
|
1200
|
+
gpu_type = args.gpu
|
|
1201
|
+
# Validate the provided GPU type
|
|
1202
|
+
valid_gpus = ['T4', 'L4', 'A10G', 'A100-40GB', 'A100-80GB', 'L40S', 'H100', 'H200', 'B200']
|
|
1203
|
+
if gpu_type not in valid_gpus:
|
|
1204
|
+
print(f"⚠️ Warning: '{gpu_type}' is not in the list of known GPU types.")
|
|
1205
|
+
print(f"Available GPU types: {', '.join(valid_gpus)}")
|
|
1206
|
+
print(f"Proceeding with '{gpu_type}' anyway...")
|
|
1207
|
+
else:
|
|
1208
|
+
print(f"✅ Using specified GPU: {gpu_type}")
|
|
1209
|
+
else:
|
|
1210
|
+
print("\n📋 No GPU type specified with --gpu flag.")
|
|
1211
|
+
print("🔄 Using default GPU type: A10G")
|
|
1212
|
+
gpu_type = "A10G"
|
|
1213
|
+
args.gpu = gpu_type
|
|
1214
|
+
|
|
1215
|
+
# Parse analysis data if provided
|
|
1216
|
+
analysis_data = None
|
|
1217
|
+
if args.analysis_data:
|
|
1218
|
+
print("🔍 Parsing repository analysis data...")
|
|
1219
|
+
analysis_data = parse_analysis_data(args.analysis_data)
|
|
1220
|
+
if analysis_data:
|
|
1221
|
+
print("✅ Analysis data parsed successfully")
|
|
1222
|
+
else:
|
|
1223
|
+
print("⚠️ Failed to parse analysis data, proceeding without it")
|
|
1224
|
+
else:
|
|
1225
|
+
print("ℹ️ No analysis data provided")
|
|
1226
|
+
|
|
1227
|
+
# Display configuration after GPU selection
|
|
1228
|
+
print("\n📋 Container Configuration:")
|
|
1229
|
+
print(f"Repository URL: {args.repo_url or 'Not specified'}")
|
|
1230
|
+
gpu_count = args.gpu_count if args.gpu_count is not None else 1
|
|
1231
|
+
if gpu_count > 1:
|
|
1232
|
+
print(f"GPU Type: {gpu_count}x {gpu_type}")
|
|
1233
|
+
else:
|
|
1234
|
+
print(f"GPU Type: {gpu_type}")
|
|
1235
|
+
print(f"Volume: {args.volume_name or 'None'}")
|
|
1236
|
+
if args.repo_url:
|
|
1237
|
+
print("Repository Setup: Agent (intelligent)")
|
|
1238
|
+
elif args.setup_commands:
|
|
1239
|
+
print(f"Setup Commands: {len(args.setup_commands)} custom commands")
|
|
1240
|
+
else:
|
|
1241
|
+
print("Setup Commands: None")
|
|
1242
|
+
|
|
1243
|
+
# Confirm settings (skip if --yes specified)
|
|
1244
|
+
if not getattr(args, 'yes', False):
|
|
1245
|
+
try:
|
|
1246
|
+
proceed = input("Proceed with these settings? (Y/n): ").strip().lower()
|
|
1247
|
+
if proceed in ('n', 'no'):
|
|
1248
|
+
print("🛑 Operation cancelled by user.")
|
|
1249
|
+
sys.exit(0)
|
|
1250
|
+
except KeyboardInterrupt:
|
|
1251
|
+
print("\n🛑 Operation cancelled by user.")
|
|
1252
|
+
sys.exit(0)
|
|
1253
|
+
else:
|
|
1254
|
+
print("")
|
|
1255
|
+
|
|
1256
|
+
# Interactive mode or missing required arguments
|
|
1257
|
+
if args.interactive or not args.repo_url or not args.volume_name:
|
|
1258
|
+
# Get repository URL if not provided
|
|
1259
|
+
repo_url = args.repo_url
|
|
1260
|
+
if not repo_url:
|
|
1261
|
+
try:
|
|
1262
|
+
repo_url = input("? Enter GitHub repository URL: ").strip()
|
|
1263
|
+
if not repo_url:
|
|
1264
|
+
print("❌ Repository URL is required.")
|
|
1265
|
+
sys.exit(1)
|
|
1266
|
+
except KeyboardInterrupt:
|
|
1267
|
+
print("\n🛑 Setup cancelled.")
|
|
1268
|
+
sys.exit(1)
|
|
1269
|
+
|
|
1270
|
+
# Ask about persistent volume
|
|
1271
|
+
volume_name = args.volume_name
|
|
1272
|
+
if not volume_name:
|
|
1273
|
+
try:
|
|
1274
|
+
use_volume = input("? Use persistent volume for faster installs? (Y/n): ").strip().lower()
|
|
1275
|
+
if use_volume in ('', 'y', 'yes'):
|
|
1276
|
+
volume_name = input("? Enter volume name: ").strip()
|
|
1277
|
+
if not volume_name:
|
|
1278
|
+
volume_name = "gitarsenal-volume"
|
|
1279
|
+
print(f"Using default volume name: {volume_name}")
|
|
1280
|
+
except KeyboardInterrupt:
|
|
1281
|
+
print("\n🛑 Setup cancelled.")
|
|
1282
|
+
sys.exit(1)
|
|
1283
|
+
|
|
1284
|
+
# Ask about GPU count if not specified
|
|
1285
|
+
gpu_count = getattr(args, 'gpu_count', None)
|
|
1286
|
+
if gpu_count is None:
|
|
1287
|
+
try:
|
|
1288
|
+
gpu_count_input = input("? How many GPUs do you need? (1-8, default: 1): ").strip()
|
|
1289
|
+
if gpu_count_input:
|
|
1290
|
+
try:
|
|
1291
|
+
gpu_count = int(gpu_count_input)
|
|
1292
|
+
if gpu_count < 1 or gpu_count > 8:
|
|
1293
|
+
print("⚠️ GPU count must be between 1 and 8. Using default: 1")
|
|
1294
|
+
gpu_count = 1
|
|
1295
|
+
except ValueError:
|
|
1296
|
+
print("⚠️ Invalid GPU count. Using default: 1")
|
|
1297
|
+
gpu_count = 1
|
|
1298
|
+
else:
|
|
1299
|
+
# User pressed enter without input, use default
|
|
1300
|
+
gpu_count = 1
|
|
1301
|
+
except KeyboardInterrupt:
|
|
1302
|
+
print("\n🛑 Setup cancelled.")
|
|
1303
|
+
sys.exit(1)
|
|
1304
|
+
|
|
1305
|
+
# Update args with interactive values
|
|
1306
|
+
args.repo_url = repo_url
|
|
1307
|
+
args.volume_name = volume_name
|
|
1308
|
+
args.gpu_count = gpu_count
|
|
1309
|
+
|
|
1310
|
+
try:
|
|
1311
|
+
# Setup commands are no longer used when repo_url is provided (Agent handles setup)
|
|
1312
|
+
setup_commands = args.setup_commands or []
|
|
1313
|
+
|
|
1314
|
+
# Repository setup approach
|
|
1315
|
+
if args.repo_url:
|
|
1316
|
+
print("🤖 Repository setup will be handled by Agent in container")
|
|
1317
|
+
setup_commands = [] # Agent will handle setup intelligently
|
|
1318
|
+
else:
|
|
1319
|
+
print("⚠️ No repository URL provided - setup commands may be needed manually")
|
|
1320
|
+
|
|
1321
|
+
# Parse setup commands from JSON if provided
|
|
1322
|
+
if args.setup_commands_json:
|
|
1323
|
+
try:
|
|
1324
|
+
json_commands = json.loads(args.setup_commands_json)
|
|
1325
|
+
if isinstance(json_commands, list):
|
|
1326
|
+
setup_commands = json_commands
|
|
1327
|
+
print(f"📋 Parsed {len(setup_commands)} commands from JSON:")
|
|
1328
|
+
for i, cmd in enumerate(setup_commands, 1):
|
|
1329
|
+
print(f" {i}. {cmd}")
|
|
1330
|
+
else:
|
|
1331
|
+
print(f"⚠️ Invalid JSON format for setup commands: not a list")
|
|
1332
|
+
except json.JSONDecodeError as e:
|
|
1333
|
+
print(f"⚠️ Error parsing JSON setup commands: {e}")
|
|
1334
|
+
print(f"Received JSON string: {args.setup_commands_json}")
|
|
1335
|
+
|
|
1336
|
+
# Load commands from file if specified
|
|
1337
|
+
if args.commands_file and os.path.exists(args.commands_file):
|
|
1338
|
+
try:
|
|
1339
|
+
with open(args.commands_file, 'r') as f:
|
|
1340
|
+
# Check if the file contains JSON or line-by-line commands
|
|
1341
|
+
content = f.read().strip()
|
|
1342
|
+
|
|
1343
|
+
if content.startswith('[') and content.endswith(']'):
|
|
1344
|
+
# JSON format
|
|
1345
|
+
try:
|
|
1346
|
+
json_commands = json.loads(content)
|
|
1347
|
+
if isinstance(json_commands, list):
|
|
1348
|
+
setup_commands.extend(json_commands)
|
|
1349
|
+
print(f"📋 Loaded {len(json_commands)} commands from JSON file {args.commands_file}")
|
|
1350
|
+
else:
|
|
1351
|
+
print(f"⚠️ Invalid JSON format in commands file: not a list")
|
|
1352
|
+
except json.JSONDecodeError as json_err:
|
|
1353
|
+
print(f"⚠️ Error parsing JSON commands file: {json_err}")
|
|
1354
|
+
# Fall back to line-by-line parsing
|
|
1355
|
+
file_commands = [line.strip() for line in content.split('\n') if line.strip()]
|
|
1356
|
+
setup_commands.extend(file_commands)
|
|
1357
|
+
print(f"📋 Loaded {len(file_commands)} commands from file (line-by-line fallback)")
|
|
1358
|
+
else:
|
|
1359
|
+
# Line-by-line format
|
|
1360
|
+
file_commands = [line.strip() for line in content.split('\n') if line.strip()]
|
|
1361
|
+
setup_commands.extend(file_commands)
|
|
1362
|
+
print(f"📋 Loaded {len(file_commands)} commands from file (line-by-line format)")
|
|
1363
|
+
except Exception as e:
|
|
1364
|
+
print(f"⚠️ Error loading commands from file: {e}")
|
|
1365
|
+
|
|
1366
|
+
# Load commands from setup script if specified
|
|
1367
|
+
if args.setup_script and os.path.exists(args.setup_script):
|
|
1368
|
+
try:
|
|
1369
|
+
with open(args.setup_script, 'r') as f:
|
|
1370
|
+
script_content = f.read().strip()
|
|
1371
|
+
# Convert script to individual commands
|
|
1372
|
+
script_commands = [line.strip() for line in script_content.split('\n')
|
|
1373
|
+
if line.strip() and not line.strip().startswith('#')]
|
|
1374
|
+
setup_commands.extend(script_commands)
|
|
1375
|
+
print(f"📋 Loaded {len(script_commands)} commands from script {args.setup_script}")
|
|
1376
|
+
except Exception as e:
|
|
1377
|
+
print(f"⚠️ Error loading commands from script: {e}")
|
|
1378
|
+
|
|
1379
|
+
# Create the container with the specified options
|
|
1380
|
+
if args.ssh_password:
|
|
1381
|
+
print(f"🔑 Using provided SSH password")
|
|
1382
|
+
ssh_password = args.ssh_password
|
|
1383
|
+
else:
|
|
1384
|
+
ssh_password = generate_random_password()
|
|
1385
|
+
print(f"🔑 Generated random SSH password: {ssh_password}")
|
|
1386
|
+
|
|
1387
|
+
# Extract repository name from URL if not provided
|
|
1388
|
+
repo_name = args.repo_name
|
|
1389
|
+
if not repo_name and args.repo_url:
|
|
1390
|
+
# Try to extract repo name from URL
|
|
1391
|
+
url_parts = args.repo_url.rstrip('/').split('/')
|
|
1392
|
+
if url_parts:
|
|
1393
|
+
repo_name = url_parts[-1]
|
|
1394
|
+
if repo_name.endswith('.git'):
|
|
1395
|
+
repo_name = repo_name[:-4]
|
|
1396
|
+
|
|
1397
|
+
# Create the container
|
|
1398
|
+
result = create_modal_ssh_container(
|
|
1399
|
+
gpu_type=args.gpu,
|
|
1400
|
+
repo_url=args.repo_url,
|
|
1401
|
+
repo_name=repo_name,
|
|
1402
|
+
setup_commands=setup_commands,
|
|
1403
|
+
volume_name=args.volume_name,
|
|
1404
|
+
timeout_minutes=args.timeout,
|
|
1405
|
+
ssh_password=ssh_password,
|
|
1406
|
+
interactive=args.interactive,
|
|
1407
|
+
gpu_count=args.gpu_count if args.gpu_count is not None else 1,
|
|
1408
|
+
analysis_data=analysis_data, # Pass parsed analysis_data instead of raw args.analysis_data
|
|
1409
|
+
)
|
|
1410
|
+
|
|
1411
|
+
if result:
|
|
1412
|
+
print(f"\n✅ Container operation completed: {result.get('status', 'success')}")
|
|
1413
|
+
if result.get('function_call_id'):
|
|
1414
|
+
print(f"🆔 Function Call ID: {result['function_call_id']}")
|
|
1415
|
+
print("💡 You can use this ID to check container status via Modal CLI")
|
|
1416
|
+
else:
|
|
1417
|
+
print("\n❌ Container creation failed")
|
|
1418
|
+
|
|
1419
|
+
except KeyboardInterrupt:
|
|
1420
|
+
print("\n🛑 Operation cancelled by user")
|
|
1421
|
+
cleanup_modal_token()
|
|
1422
|
+
sys.exit(1)
|
|
1423
|
+
except Exception as e:
|
|
1424
|
+
print(f"\n❌ Unexpected error: {e}")
|
|
1425
|
+
print("📋 Error details:")
|
|
1426
|
+
import traceback
|
|
1427
|
+
traceback.print_exc()
|
|
1428
|
+
cleanup_modal_token()
|
|
1429
|
+
sys.exit(1)
|