gitarsenal-cli 1.9.80 → 1.9.82

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/.venv_status.json CHANGED
@@ -1 +1 @@
1
- {"created":"2025-08-18T04:28:55.195Z","packages":["modal","gitingest","requests","anthropic"],"uv_version":"uv 0.8.4 (Homebrew 2025-07-30)"}
1
+ {"created":"2025-08-18T07:17:19.626Z","packages":["modal","gitingest","requests","anthropic"],"uv_version":"uv 0.8.4 (Homebrew 2025-07-30)"}
package/bin/gitarsenal.js CHANGED
@@ -89,7 +89,7 @@ function activateVirtualEnvironment() {
89
89
  process.env.PYTHON_EXECUTABLE = pythonPath;
90
90
  process.env.PIP_EXECUTABLE = pipPath;
91
91
 
92
- console.log(chalk.green('✅ Virtual environment activated successfully'));
92
+ // console.log(chalk.green('✅ Virtual environment activated successfully'));
93
93
 
94
94
  return true;
95
95
  }
@@ -629,7 +629,7 @@ async function fetchFullSetupAndRecs(repoUrl, storedCredentials = null) {
629
629
 
630
630
  for (const url of endpoints) {
631
631
  try {
632
- spinner.text = `Analyzing repository: ${url}`;
632
+ spinner.text = `Analyzing repository...`;
633
633
  const res = await fetch(url, {
634
634
  method: 'POST',
635
635
  headers: { 'Content-Type': 'application/json', 'User-Agent': 'GitArsenal-CLI/1.0' },
package/lib/sandbox.js CHANGED
@@ -34,6 +34,7 @@ function getPythonScriptPath() {
34
34
  * @param {string} options.volumeName - Volume name
35
35
  * @param {Array<string>} options.setupCommands - Setup commands
36
36
  * @param {boolean} options.showExamples - Whether to show usage examples
37
+ * @param {Object} options.analysisData - Repository analysis data from best_gpu endpoint
37
38
  * @returns {Promise<void>}
38
39
  */
39
40
  async function runContainer(options) {
@@ -47,7 +48,8 @@ async function runContainer(options) {
47
48
  yes = false,
48
49
  userId,
49
50
  userName,
50
- userEmail
51
+ userEmail,
52
+ analysisData
51
53
  } = options;
52
54
 
53
55
  // Get the path to the Python script
@@ -116,6 +118,12 @@ async function runContainer(options) {
116
118
  args.push('--display-name', userName);
117
119
  // console.log(chalk.gray(`🔍 Debug: Passing user credentials to Python script`));
118
120
  }
121
+
122
+ // Add analysis data if provided
123
+ if (analysisData) {
124
+ args.push('--analysis-data', JSON.stringify(analysisData));
125
+ console.log(chalk.gray(`🔍 Debug: Passing analysis data to Python script`));
126
+ }
119
127
 
120
128
  // Handle manual setup commands if provided
121
129
  if (setupCommands.length > 0) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "gitarsenal-cli",
3
- "version": "1.9.80",
3
+ "version": "1.9.82",
4
4
  "description": "CLI tool for creating Modal sandboxes with GitHub repositories",
5
5
  "main": "index.js",
6
6
  "bin": {
@@ -38,6 +38,154 @@ def get_stored_credentials():
38
38
  return {}
39
39
 
40
40
 
41
+ def parse_analysis_data(analysis_data_json):
42
+ """Parse analysis data JSON string and return structured data"""
43
+ if not analysis_data_json:
44
+ return None
45
+
46
+ try:
47
+ import json
48
+ analysis_data = json.loads(analysis_data_json)
49
+ return analysis_data
50
+ except json.JSONDecodeError as e:
51
+ print(f"⚠️ Error parsing analysis data: {e}")
52
+ return None
53
+ except Exception as e:
54
+ print(f"⚠️ Unexpected error parsing analysis data: {e}")
55
+ return None
56
+
57
+
58
+ def select_base_image_from_analysis(analysis_data):
59
+ """Select the appropriate Docker base image based on CUDA recommendations from analysis"""
60
+ # Default fallback image
61
+ default_image = "nvidia/cuda:12.4.1-devel-ubuntu22.04"
62
+ default_python = "3.11"
63
+
64
+ if not analysis_data:
65
+ print("🐳 No analysis data available, using default CUDA image")
66
+ return default_image, default_python
67
+
68
+ # Extract CUDA recommendation
69
+ cuda_rec = analysis_data.get('cudaRecommendation', {})
70
+ if not cuda_rec:
71
+ print("🐳 No CUDA recommendation found, using default CUDA image")
72
+ return default_image, default_python
73
+
74
+ recommended_cuda = cuda_rec.get('recommendedCudaVersion', '')
75
+ docker_image = cuda_rec.get('dockerImage', '')
76
+
77
+ print(f"🔍 CUDA Analysis Results:")
78
+ print(f" - Recommended CUDA Version: {recommended_cuda}")
79
+ print(f" - Recommended Docker Image: {docker_image}")
80
+ print(f" - Full CUDA Recommendation: {cuda_rec}")
81
+
82
+ # If a specific docker image is recommended, use it
83
+ if docker_image:
84
+ print(f"🔍 Validating recommended Docker image: {docker_image}")
85
+ # Validate that the recommended Docker image follows expected patterns
86
+ is_valid_image = False
87
+
88
+ # Check if it's a pytorch image or other specialized image
89
+ if 'pytorch' in docker_image.lower():
90
+ print(f"🐳 Using PyTorch-optimized image: {docker_image}")
91
+ return docker_image, "3.11" # PyTorch images usually have Python pre-installed
92
+ elif 'tensorflow' in docker_image.lower():
93
+ print(f"🐳 Using TensorFlow-optimized image: {docker_image}")
94
+ return docker_image, "3.11"
95
+ elif 'nvidia/cuda' in docker_image.lower():
96
+ # Instead of regex validation, use a whitelist of known good images
97
+ # This prevents using images that match the pattern but don't exist
98
+ known_good_images = {
99
+ # CUDA 12.x images
100
+ 'nvidia/cuda:12.4.1-devel-ubuntu22.04',
101
+ 'nvidia/cuda:12.4.1-runtime-ubuntu22.04',
102
+ 'nvidia/cuda:12.3.2-devel-ubuntu22.04',
103
+ 'nvidia/cuda:12.3.2-runtime-ubuntu22.04',
104
+ 'nvidia/cuda:12.2.2-devel-ubuntu22.04',
105
+ 'nvidia/cuda:12.2.2-runtime-ubuntu22.04',
106
+ 'nvidia/cuda:12.1.0-devel-ubuntu22.04',
107
+ 'nvidia/cuda:12.1.0-runtime-ubuntu22.04',
108
+ 'nvidia/cuda:12.0.1-devel-ubuntu22.04',
109
+ # CUDA 11.x images
110
+ 'nvidia/cuda:11.8.0-devel-ubuntu22.04',
111
+ 'nvidia/cuda:11.8.0-runtime-ubuntu22.04',
112
+ 'nvidia/cuda:11.8.0-cudnn8-devel-ubuntu22.04',
113
+ 'nvidia/cuda:11.8.0-cudnn8-runtime-ubuntu22.04',
114
+ 'nvidia/cuda:11.7.1-devel-ubuntu22.04',
115
+ 'nvidia/cuda:11.7.1-runtime-ubuntu22.04',
116
+ 'nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04',
117
+ 'nvidia/cuda:11.7.1-cudnn8-runtime-ubuntu22.04',
118
+ 'nvidia/cuda:11.6.2-devel-ubuntu20.04',
119
+ 'nvidia/cuda:11.6.2-runtime-ubuntu20.04',
120
+ }
121
+
122
+ if docker_image.lower() in known_good_images:
123
+ print(f"🐳 Using verified NVIDIA CUDA image: {docker_image}")
124
+ return docker_image, "3.11"
125
+ else:
126
+ print(f"⚠️ NVIDIA CUDA image not in whitelist: {docker_image}")
127
+ print(f"🔄 Falling back to CUDA version mapping...")
128
+ # Continue to version mapping below
129
+ else:
130
+ print(f"🐳 Using custom recommended image: {docker_image}")
131
+ return docker_image, "3.11"
132
+
133
+ # Map CUDA versions to appropriate base images
134
+ cuda_image_mapping = {
135
+ '12.4': 'nvidia/cuda:12.4.1-devel-ubuntu22.04',
136
+ '12.3': 'nvidia/cuda:12.3.2-devel-ubuntu22.04',
137
+ '12.2': 'nvidia/cuda:12.2.2-devel-ubuntu22.04',
138
+ '12.1': 'nvidia/cuda:12.1.0-devel-ubuntu22.04',
139
+ '12.0': 'nvidia/cuda:12.0.1-devel-ubuntu22.04',
140
+ '11.8': 'nvidia/cuda:11.8.0-devel-ubuntu22.04',
141
+ '11.7': 'nvidia/cuda:11.7.1-devel-ubuntu22.04',
142
+ '11.6': 'nvidia/cuda:11.6.2-devel-ubuntu20.04',
143
+ # Add some runtime variants for better compatibility
144
+ '11.8-runtime': 'nvidia/cuda:11.8.0-runtime-ubuntu22.04',
145
+ '11.7-runtime': 'nvidia/cuda:11.7.1-runtime-ubuntu22.04',
146
+ '12.1-runtime': 'nvidia/cuda:12.1.0-runtime-ubuntu22.04',
147
+ }
148
+
149
+ # Extract major.minor version from recommended CUDA version
150
+ if recommended_cuda:
151
+ # Handle versions like "12.4", "CUDA 12.4", "12.4.0", "11.8-runtime", etc.
152
+ import re
153
+ version_match = re.search(r'(\d+\.\d+)', recommended_cuda)
154
+ if version_match:
155
+ cuda_version = version_match.group(1)
156
+
157
+ # First try exact match
158
+ if cuda_version in cuda_image_mapping:
159
+ selected_image = cuda_image_mapping[cuda_version]
160
+ print(f"🐳 Selected CUDA {cuda_version} image: {selected_image}")
161
+ return selected_image, "3.11"
162
+
163
+ # Try with -runtime suffix if original recommended image was runtime
164
+ if 'runtime' in docker_image.lower() or 'runtime' in recommended_cuda.lower():
165
+ runtime_key = f"{cuda_version}-runtime"
166
+ if runtime_key in cuda_image_mapping:
167
+ selected_image = cuda_image_mapping[runtime_key]
168
+ print(f"🐳 Selected CUDA {cuda_version} runtime image: {selected_image}")
169
+ return selected_image, "3.11"
170
+
171
+ # If no exact match, try to find the closest version
172
+ available_versions = [v for v in cuda_image_mapping.keys() if not v.endswith('-runtime')]
173
+ available_versions.sort(reverse=True) # Sort descending to prefer newer versions
174
+
175
+ for available_version in available_versions:
176
+ if available_version.startswith(cuda_version.split('.')[0]): # Same major version
177
+ selected_image = cuda_image_mapping[available_version]
178
+ print(f"🐳 Selected closest CUDA {available_version} image for requested {cuda_version}: {selected_image}")
179
+ return selected_image, "3.11"
180
+
181
+ print(f"⚠️ CUDA version {cuda_version} not in mapping, using default")
182
+ else:
183
+ print(f"⚠️ Could not parse CUDA version from: {recommended_cuda}")
184
+
185
+ print(f"🐳 Using default CUDA image: {default_image}")
186
+ return default_image, default_python
187
+
188
+
41
189
  # Global SSH container function (must be at global scope for Modal)
42
190
  def ssh_container_function(ssh_password=None, repo_url=None, repo_name=None, setup_commands=None, openai_api_key=None, anthropic_api_key=None, stored_credentials=None):
43
191
  """Start SSH container with password authentication and intelligent repository setup using Agent."""
@@ -312,11 +460,13 @@ def ssh_container_function(ssh_password=None, repo_url=None, repo_name=None, set
312
460
 
313
461
  # Create Modal SSH container with GPU support and intelligent repository setup using Agent
314
462
  def create_modal_ssh_container(gpu_type, repo_url=None, repo_name=None, setup_commands=None,
315
- volume_name=None, timeout_minutes=60, ssh_password=None, interactive=False, gpu_count=1):
463
+ volume_name=None, timeout_minutes=60, ssh_password=None, interactive=False, gpu_count=1, analysis_data=None):
316
464
  """Create a Modal SSH container with GPU support and intelligent repository setup.
317
465
 
318
466
  When repo_url is provided, uses Agent for intelligent repository setup.
319
467
  The setup_commands parameter is maintained for backwards compatibility but ignored when using Agent.
468
+ Args:
469
+ analysis_data: Repository analysis data from best_gpu endpoint for dynamic image selection
320
470
  """
321
471
 
322
472
  # Use interactive mode if specified
@@ -468,14 +618,29 @@ def create_modal_ssh_container(gpu_type, repo_url=None, repo_name=None, setup_co
468
618
  # Get the gitarsenal-cli root directory for kill_claude files
469
619
  gitarsenal_root = os.path.dirname(current_dir)
470
620
 
471
- # Choose base image to avoid CUDA segfault issues
472
- print("⚠️ Using CUDA base image - this may cause segfaults on some systems")
473
- base_image = modal.Image.from_registry("nvidia/cuda:12.4.0-devel-ubuntu22.04", add_python="3.11")
474
- # base_image = modal.Image.debian_slim()
621
+ # Select base image dynamically based on analysis data
622
+ base_image_name, python_version = select_base_image_from_analysis(analysis_data)
623
+ print(f"🐳 Building image with: {base_image_name} (Python {python_version})")
624
+
625
+ try:
626
+ base_image = modal.Image.from_registry(base_image_name, add_python=python_version)
627
+ except Exception as e:
628
+ print(f"⚠️ Failed to load recommended image {base_image_name}: {e}")
629
+ print("🐳 Falling back to default CUDA image")
630
+ base_image = modal.Image.from_registry("nvidia/cuda:12.4.1-devel-ubuntu22.04", add_python="3.11")
475
631
 
476
632
  # Build the SSH image with the chosen base
477
633
  ssh_image = (
478
634
  base_image
635
+ # Set timezone and debconf to non-interactive mode to prevent prompts
636
+ .env({"DEBIAN_FRONTEND": "noninteractive", "TZ": "UTC"})
637
+ .run_commands(
638
+ # Configure timezone and package management non-interactively
639
+ "ln -snf /usr/share/zoneinfo/UTC /etc/localtime",
640
+ "echo UTC > /etc/timezone",
641
+ # Configure dpkg to avoid interactive prompts
642
+ "echo 'debconf debconf/frontend select Noninteractive' | debconf-set-selections"
643
+ )
479
644
  .apt_install(
480
645
  "openssh-server", "sudo", "curl", "wget", "vim", "htop", "git",
481
646
  "python3", "python3-pip"
@@ -907,6 +1072,9 @@ if __name__ == "__main__":
907
1072
  parser.add_argument('--user-name', type=str, help='Username (passed from JavaScript CLI)')
908
1073
  parser.add_argument('--display-name', type=str, help='Display name (passed from JavaScript CLI)')
909
1074
 
1075
+ # Analysis data argument (passed from JavaScript CLI)
1076
+ parser.add_argument('--analysis-data', type=str, help='Repository analysis data as JSON string (from best_gpu endpoint)')
1077
+
910
1078
  args = parser.parse_args()
911
1079
 
912
1080
  # Initialize tokens (import here to avoid container import issues)
@@ -1024,6 +1192,18 @@ if __name__ == "__main__":
1024
1192
  gpu_type = "A10G"
1025
1193
  args.gpu = gpu_type
1026
1194
 
1195
+ # Parse analysis data if provided
1196
+ analysis_data = None
1197
+ if args.analysis_data:
1198
+ print("🔍 Parsing repository analysis data...")
1199
+ analysis_data = parse_analysis_data(args.analysis_data)
1200
+ if analysis_data:
1201
+ print("✅ Analysis data parsed successfully")
1202
+ else:
1203
+ print("⚠️ Failed to parse analysis data, proceeding without it")
1204
+ else:
1205
+ print("ℹ️ No analysis data provided")
1206
+
1027
1207
  # Display configuration after GPU selection
1028
1208
  print("\n📋 Container Configuration:")
1029
1209
  print(f"Repository URL: {args.repo_url or 'Not specified'}")
@@ -1202,6 +1382,7 @@ if __name__ == "__main__":
1202
1382
  ssh_password=ssh_password,
1203
1383
  interactive=args.interactive,
1204
1384
  gpu_count=getattr(args, 'gpu_count', 1),
1385
+ analysis_data=analysis_data, # Pass parsed analysis_data instead of raw args.analysis_data
1205
1386
  )
1206
1387
 
1207
1388
  if result: