gitarsenal-cli 1.9.56 ā 1.9.59
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.venv_status.json +1 -1
- package/bin/gitarsenal.js +15 -115
- package/config.json +5 -0
- package/package.json +1 -1
- package/python/test_modalSandboxScript.py +47 -290
package/.venv_status.json
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"created":"2025-08-
|
|
1
|
+
{"created":"2025-08-13T05:23:55.166Z","packages":["modal","gitingest","requests","anthropic"],"uv_version":"uv 0.8.4 (Homebrew 2025-07-30)"}
|
package/bin/gitarsenal.js
CHANGED
|
@@ -22,9 +22,6 @@ function activateVirtualEnvironment() {
|
|
|
22
22
|
const venvPath = path.join(__dirname, '..', '.venv');
|
|
23
23
|
const statusFile = path.join(__dirname, '..', '.venv_status.json');
|
|
24
24
|
|
|
25
|
-
// Debug: Log the path we're looking for
|
|
26
|
-
// console.log(chalk.gray(`š Looking for virtual environment at: ${venvPath}`));
|
|
27
|
-
|
|
28
25
|
// Check if virtual environment exists
|
|
29
26
|
if (!fs.existsSync(venvPath)) {
|
|
30
27
|
console.log(chalk.red('ā Virtual environment not found. Please reinstall the package:'));
|
|
@@ -41,7 +38,6 @@ function activateVirtualEnvironment() {
|
|
|
41
38
|
if (fs.existsSync(statusFile)) {
|
|
42
39
|
try {
|
|
43
40
|
const status = JSON.parse(fs.readFileSync(statusFile, 'utf8'));
|
|
44
|
-
// console.log(chalk.gray(`š¦ Packages: ${status.packages.join(', ')}`));
|
|
45
41
|
} catch (error) {
|
|
46
42
|
console.log(chalk.gray('ā
Virtual environment found'));
|
|
47
43
|
}
|
|
@@ -103,6 +99,7 @@ function activateVirtualEnvironment() {
|
|
|
103
99
|
return true;
|
|
104
100
|
}
|
|
105
101
|
|
|
102
|
+
|
|
106
103
|
// Lightweight preview of GPU/Torch/CUDA recommendations prior to GPU selection
|
|
107
104
|
async function previewRecommendations(repoUrl, optsOrShowSummary = true) {
|
|
108
105
|
const showSummary = typeof optsOrShowSummary === 'boolean' ? optsOrShowSummary : (optsOrShowSummary?.showSummary ?? true);
|
|
@@ -214,46 +211,6 @@ async function previewRecommendations(repoUrl, optsOrShowSummary = true) {
|
|
|
214
211
|
}
|
|
215
212
|
}
|
|
216
213
|
|
|
217
|
-
function httpPostJson(urlString, body) {
|
|
218
|
-
return new Promise((resolve) => {
|
|
219
|
-
try {
|
|
220
|
-
const urlObj = new URL(urlString);
|
|
221
|
-
const data = JSON.stringify(body);
|
|
222
|
-
const options = {
|
|
223
|
-
hostname: urlObj.hostname,
|
|
224
|
-
port: urlObj.port || (urlObj.protocol === 'https:' ? 443 : 80),
|
|
225
|
-
path: urlObj.pathname,
|
|
226
|
-
method: 'POST',
|
|
227
|
-
headers: {
|
|
228
|
-
'Content-Type': 'application/json',
|
|
229
|
-
'Content-Length': Buffer.byteLength(data),
|
|
230
|
-
'User-Agent': 'GitArsenal-CLI/1.0'
|
|
231
|
-
}
|
|
232
|
-
};
|
|
233
|
-
const client = urlObj.protocol === 'https:' ? https : http;
|
|
234
|
-
const req = client.request(options, (res) => {
|
|
235
|
-
let responseData = '';
|
|
236
|
-
res.on('data', (chunk) => {
|
|
237
|
-
responseData += chunk;
|
|
238
|
-
});
|
|
239
|
-
res.on('end', () => {
|
|
240
|
-
try {
|
|
241
|
-
const parsed = JSON.parse(responseData);
|
|
242
|
-
resolve(parsed);
|
|
243
|
-
} catch (err) {
|
|
244
|
-
resolve(null);
|
|
245
|
-
}
|
|
246
|
-
});
|
|
247
|
-
});
|
|
248
|
-
req.on('error', () => resolve(null));
|
|
249
|
-
req.write(data);
|
|
250
|
-
req.end();
|
|
251
|
-
} catch (e) {
|
|
252
|
-
resolve(null);
|
|
253
|
-
}
|
|
254
|
-
});
|
|
255
|
-
}
|
|
256
|
-
|
|
257
214
|
function printGpuTorchCudaSummary(result) {
|
|
258
215
|
try {
|
|
259
216
|
console.log(chalk.bold('\nš RESULT SUMMARY (GPU/Torch/CUDA)'));
|
|
@@ -307,66 +264,6 @@ function printGpuTorchCudaSummary(result) {
|
|
|
307
264
|
} catch {}
|
|
308
265
|
}
|
|
309
266
|
|
|
310
|
-
// Full fetch to get both setup commands and recommendations in one request
|
|
311
|
-
async function fetchFullSetupAndRecs(repoUrl) {
|
|
312
|
-
const envUrl = process.env.GITARSENAL_API_URL;
|
|
313
|
-
const endpoints = envUrl ? [envUrl] : ['https://www.gitarsenal.dev/api/best_gpu'];
|
|
314
|
-
const payload = {
|
|
315
|
-
repoUrl,
|
|
316
|
-
gitingestData: {
|
|
317
|
-
system_info: {
|
|
318
|
-
platform: process.platform,
|
|
319
|
-
python_version: process.version,
|
|
320
|
-
detected_language: 'Unknown',
|
|
321
|
-
detected_technologies: [],
|
|
322
|
-
file_count: 0,
|
|
323
|
-
repo_stars: 0,
|
|
324
|
-
repo_forks: 0,
|
|
325
|
-
primary_package_manager: 'Unknown',
|
|
326
|
-
complexity_level: 'Unknown'
|
|
327
|
-
},
|
|
328
|
-
repository_analysis: {
|
|
329
|
-
summary: `Repository: ${repoUrl}`,
|
|
330
|
-
tree: '',
|
|
331
|
-
content_preview: ''
|
|
332
|
-
},
|
|
333
|
-
success: true
|
|
334
|
-
}
|
|
335
|
-
};
|
|
336
|
-
const timeoutMs = Number(process.env.GITARSENAL_FULL_TIMEOUT_MS || 180000);
|
|
337
|
-
|
|
338
|
-
const fetchWithTimeout = async (url, body, timeout) => {
|
|
339
|
-
const controller = new AbortController();
|
|
340
|
-
const id = setTimeout(() => controller.abort(), timeout);
|
|
341
|
-
try {
|
|
342
|
-
const res = await fetch(url, {
|
|
343
|
-
method: 'POST',
|
|
344
|
-
headers: { 'Content-Type': 'application/json', 'User-Agent': 'GitArsenal-CLI/1.0' },
|
|
345
|
-
body: JSON.stringify(body),
|
|
346
|
-
redirect: 'follow',
|
|
347
|
-
signal: controller.signal
|
|
348
|
-
});
|
|
349
|
-
clearTimeout(id);
|
|
350
|
-
return res;
|
|
351
|
-
} catch (e) {
|
|
352
|
-
clearTimeout(id);
|
|
353
|
-
throw e;
|
|
354
|
-
}
|
|
355
|
-
};
|
|
356
|
-
|
|
357
|
-
for (const url of endpoints) {
|
|
358
|
-
try {
|
|
359
|
-
const res = await fetchWithTimeout(url, payload, timeoutMs);
|
|
360
|
-
if (!res.ok) continue;
|
|
361
|
-
const data = await res.json().catch(() => null);
|
|
362
|
-
if (data) return data;
|
|
363
|
-
} catch (_e) {
|
|
364
|
-
continue;
|
|
365
|
-
}
|
|
366
|
-
}
|
|
367
|
-
return null;
|
|
368
|
-
}
|
|
369
|
-
|
|
370
267
|
// Helper to derive a default volume name from the repository URL
|
|
371
268
|
function getDefaultVolumeName(repoUrl) {
|
|
372
269
|
try {
|
|
@@ -423,13 +320,20 @@ function getDefaultVolumeName(repoUrl) {
|
|
|
423
320
|
}
|
|
424
321
|
}
|
|
425
322
|
|
|
323
|
+
// Full fetch to get both setup commands and recommendations in one request
|
|
324
|
+
async function fetchFullSetupAndRecs(repoUrl) {
|
|
325
|
+
// For now, just use the preview function but don't show summary to avoid duplicates
|
|
326
|
+
// The Python implementation will handle setup commands
|
|
327
|
+
return await previewRecommendations(repoUrl, { showSummary: false, hideSpinner: true });
|
|
328
|
+
}
|
|
329
|
+
|
|
426
330
|
// Function to send user data to web application
|
|
427
331
|
async function sendUserData(userId, userName, userEmail) {
|
|
428
332
|
try {
|
|
429
|
-
console.log(chalk.blue(`š Attempting to register user: ${userName} (${
|
|
333
|
+
console.log(chalk.blue(`š Attempting to register user: ${userName} (${userId})`));
|
|
430
334
|
|
|
431
335
|
const userData = {
|
|
432
|
-
email: userEmail, // Use
|
|
336
|
+
email: userEmail, // Use userId as email (assuming it's an email)
|
|
433
337
|
name: userName,
|
|
434
338
|
username: userId
|
|
435
339
|
};
|
|
@@ -453,8 +357,7 @@ async function sendUserData(userId, userName, userEmail) {
|
|
|
453
357
|
if (process.env.GITARSENAL_WEBHOOK_URL) {
|
|
454
358
|
webhookUrl = process.env.GITARSENAL_WEBHOOK_URL;
|
|
455
359
|
}
|
|
456
|
-
|
|
457
|
-
// console.log(chalk.gray(`š” Sending to: ${webhookUrl}`));
|
|
360
|
+
|
|
458
361
|
console.log(chalk.gray(`š¦ Data: ${data}`));
|
|
459
362
|
|
|
460
363
|
const urlObj = new URL(webhookUrl);
|
|
@@ -521,11 +424,10 @@ async function collectUserCredentials(options) {
|
|
|
521
424
|
let userName = options.userName;
|
|
522
425
|
let userEmail = options.userEmail;
|
|
523
426
|
|
|
524
|
-
// Check for
|
|
427
|
+
// Check for config file first
|
|
525
428
|
const os = require('os');
|
|
526
429
|
const userConfigDir = path.join(os.homedir(), '.gitarsenal');
|
|
527
430
|
const userConfigPath = path.join(userConfigDir, 'user-config.json');
|
|
528
|
-
|
|
529
431
|
if (fs.existsSync(userConfigPath)) {
|
|
530
432
|
try {
|
|
531
433
|
const config = JSON.parse(fs.readFileSync(userConfigPath, 'utf8'));
|
|
@@ -639,7 +541,7 @@ async function collectUserCredentials(options) {
|
|
|
639
541
|
if (input !== answers.password) return 'Passwords do not match';
|
|
640
542
|
return true;
|
|
641
543
|
}
|
|
642
|
-
|
|
544
|
+
}
|
|
643
545
|
]);
|
|
644
546
|
|
|
645
547
|
userId = credentials.userId;
|
|
@@ -947,7 +849,7 @@ async function runContainerCommand(options) {
|
|
|
947
849
|
]);
|
|
948
850
|
|
|
949
851
|
if (volumeAnswers.useVolume) {
|
|
950
|
-
volumeName =
|
|
852
|
+
volumeName = getDefaultVolumeName(repoUrl);
|
|
951
853
|
}
|
|
952
854
|
} else if (!volumeName && skipConfirmation) {
|
|
953
855
|
// If --yes flag is used and no volume specified, use default
|
|
@@ -1001,7 +903,6 @@ async function runContainerCommand(options) {
|
|
|
1001
903
|
}
|
|
1002
904
|
|
|
1003
905
|
// Confirm settings (configuration will be shown by Python script after GPU selection)
|
|
1004
|
-
// console.log(chalk.gray(`š Debug: skipConfirmation = ${skipConfirmation}, options.yes = ${options.yes}`));
|
|
1005
906
|
if (!skipConfirmation) {
|
|
1006
907
|
const confirmAnswers = await inquirer.prompt([
|
|
1007
908
|
{
|
|
@@ -1023,7 +924,6 @@ async function runContainerCommand(options) {
|
|
|
1023
924
|
|
|
1024
925
|
// Run the container
|
|
1025
926
|
try {
|
|
1026
|
-
// console.log(chalk.gray(`š Debug: skipConfirmation = ${skipConfirmation}`));
|
|
1027
927
|
await runContainer({
|
|
1028
928
|
repoUrl,
|
|
1029
929
|
gpuType,
|
|
@@ -1287,4 +1187,4 @@ async function handleKeysDelete(options) {
|
|
|
1287
1187
|
console.error(chalk.red(`Error: ${error.message}`));
|
|
1288
1188
|
process.exit(1);
|
|
1289
1189
|
}
|
|
1290
|
-
}
|
|
1190
|
+
}
|
package/config.json
ADDED
package/package.json
CHANGED
|
@@ -235,23 +235,9 @@ def create_modal_ssh_container(gpu_type, repo_url=None, repo_name=None, setup_co
|
|
|
235
235
|
|
|
236
236
|
# Check if Modal is authenticated
|
|
237
237
|
try:
|
|
238
|
-
# Print all environment variables for debugging
|
|
239
|
-
# print("š DEBUG: Checking environment variables")
|
|
240
238
|
modal_token_id = os.environ.get("MODAL_TOKEN_ID")
|
|
241
239
|
modal_token = os.environ.get("MODAL_TOKEN")
|
|
242
240
|
openai_api_key = os.environ.get("OPENAI_API_KEY")
|
|
243
|
-
# print(f"š token exists: {'Yes' if modal_token_id else 'No'}")
|
|
244
|
-
# print(f"š token exists: {'Yes' if modal_token else 'No'}")
|
|
245
|
-
# print(f"š openai_api_key exists: {'Yes' if openai_api_key else 'No'}")
|
|
246
|
-
if modal_token_id:
|
|
247
|
-
# print(f"š token length: {len(modal_token_id)}")
|
|
248
|
-
pass
|
|
249
|
-
if modal_token:
|
|
250
|
-
# print(f"š token length: {len(modal_token)}")
|
|
251
|
-
pass
|
|
252
|
-
if openai_api_key:
|
|
253
|
-
# print(f"š openai_api_key length: {len(openai_api_key)}")
|
|
254
|
-
pass
|
|
255
241
|
# Try to access Modal token to check authentication
|
|
256
242
|
try:
|
|
257
243
|
# Check if token is set in environment
|
|
@@ -342,62 +328,54 @@ def create_modal_ssh_container(gpu_type, repo_url=None, repo_name=None, setup_co
|
|
|
342
328
|
print("ā ļø Continuing without persistent volume")
|
|
343
329
|
volume = None
|
|
344
330
|
|
|
345
|
-
# Print debug info for authentication
|
|
346
|
-
# print("š Modal authentication debug info:")
|
|
347
331
|
modal_token = os.environ.get("MODAL_TOKEN_ID")
|
|
348
|
-
# print(f" - token in env: {'Yes' if modal_token else 'No'}")
|
|
349
|
-
# print(f" - Token length: {len(modal_token) if modal_token else 'N/A'}")
|
|
350
332
|
|
|
351
333
|
# Create SSH-enabled image
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
"gpg", "ca-certificates", "software-properties-common"
|
|
367
|
-
)
|
|
368
|
-
.uv_pip_install("uv", "modal", "requests", "openai", "anthropic", "exa-py") # Remove problematic CUDA packages
|
|
369
|
-
.run_commands(
|
|
370
|
-
# Create SSH directory
|
|
371
|
-
"mkdir -p /var/run/sshd",
|
|
372
|
-
"mkdir -p /root/.ssh",
|
|
373
|
-
"chmod 700 /root/.ssh",
|
|
374
|
-
|
|
375
|
-
# Configure SSH server
|
|
376
|
-
"sed -i 's/#PermitRootLogin prohibit-password/PermitRootLogin yes/' /etc/ssh/sshd_config",
|
|
377
|
-
"sed -i 's/#PasswordAuthentication yes/PasswordAuthentication yes/' /etc/ssh/sshd_config",
|
|
378
|
-
"sed -i 's/#PubkeyAuthentication yes/PubkeyAuthentication yes/' /etc/ssh/sshd_config",
|
|
379
|
-
|
|
380
|
-
# SSH keep-alive settings
|
|
381
|
-
"echo 'ClientAliveInterval 60' >> /etc/ssh/sshd_config",
|
|
382
|
-
"echo 'ClientAliveCountMax 3' >> /etc/ssh/sshd_config",
|
|
383
|
-
|
|
384
|
-
# Generate SSH host keys
|
|
385
|
-
"ssh-keygen -A",
|
|
386
|
-
|
|
387
|
-
# Set up a nice bash prompt
|
|
388
|
-
"echo 'export PS1=\"\\[\\e[1;32m\\]modal:\\[\\e[1;34m\\]\\w\\[\\e[0m\\]$ \"' >> /root/.bashrc",
|
|
389
|
-
)
|
|
390
|
-
.add_local_file(os.path.join(current_dir, "shell.py"), "/python/shell.py") # Mount shell.py
|
|
391
|
-
.add_local_file(os.path.join(current_dir, "command_manager.py"), "/python/command_manager.py") # Mount command_manager.py
|
|
392
|
-
.add_local_file(os.path.join(current_dir, "fetch_modal_tokens.py"), "/python/fetch_modal_tokens.py") # Mount fetch_modal_token.py
|
|
393
|
-
.add_local_file(os.path.join(current_dir, "llm_debugging.py"), "/python/llm_debugging.py") # Mount llm_debugging.py
|
|
394
|
-
.add_local_file(os.path.join(current_dir, "credentials_manager.py"), "/python/credentials_manager.py") # Mount credentials_manager.py
|
|
395
|
-
|
|
334
|
+
print("š¦ Building SSH-enabled image...")
|
|
335
|
+
|
|
336
|
+
# Get the current directory path for mounting local Python sources
|
|
337
|
+
current_dir = os.path.dirname(os.path.abspath(__file__))
|
|
338
|
+
# print(f"š Current directory for mounting: {current_dir}")
|
|
339
|
+
|
|
340
|
+
# Use a more stable CUDA base image and avoid problematic packages
|
|
341
|
+
ssh_image = (
|
|
342
|
+
# modal.Image.from_registry("nvidia/cuda:12.4.0-devel-ubuntu22.04", add_python="3.11")
|
|
343
|
+
modal.Image.debian_slim()
|
|
344
|
+
.apt_install(
|
|
345
|
+
"openssh-server", "sudo", "curl", "wget", "vim", "htop", "git",
|
|
346
|
+
"python3", "python3-pip", "build-essential", "tmux", "screen", "nano",
|
|
347
|
+
"gpg", "ca-certificates", "software-properties-common"
|
|
396
348
|
)
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
349
|
+
.uv_pip_install("uv", "modal", "gitingest", "requests", "openai", "anthropic", "exa-py") # Remove problematic CUDA packages
|
|
350
|
+
.run_commands(
|
|
351
|
+
# Create SSH directory
|
|
352
|
+
"mkdir -p /var/run/sshd",
|
|
353
|
+
"mkdir -p /root/.ssh",
|
|
354
|
+
"chmod 700 /root/.ssh",
|
|
355
|
+
|
|
356
|
+
# Configure SSH server
|
|
357
|
+
"sed -i 's/#PermitRootLogin prohibit-password/PermitRootLogin yes/' /etc/ssh/sshd_config",
|
|
358
|
+
"sed -i 's/#PasswordAuthentication yes/PasswordAuthentication yes/' /etc/ssh/sshd_config",
|
|
359
|
+
"sed -i 's/#PubkeyAuthentication yes/PubkeyAuthentication yes/' /etc/ssh/sshd_config",
|
|
360
|
+
|
|
361
|
+
# SSH keep-alive settings
|
|
362
|
+
"echo 'ClientAliveInterval 60' >> /etc/ssh/sshd_config",
|
|
363
|
+
"echo 'ClientAliveCountMax 3' >> /etc/ssh/sshd_config",
|
|
364
|
+
|
|
365
|
+
# Generate SSH host keys
|
|
366
|
+
"ssh-keygen -A",
|
|
367
|
+
|
|
368
|
+
# Set up a nice bash prompt
|
|
369
|
+
"echo 'export PS1=\"\\[\\e[1;32m\\]modal:\\[\\e[1;34m\\]\\w\\[\\e[0m\\]$ \"' >> /root/.bashrc",
|
|
370
|
+
)
|
|
371
|
+
.add_local_file(os.path.join(current_dir, "shell.py"), "/python/shell.py") # Mount shell.py
|
|
372
|
+
.add_local_file(os.path.join(current_dir, "command_manager.py"), "/python/command_manager.py") # Mount command_manager.py
|
|
373
|
+
.add_local_file(os.path.join(current_dir, "fetch_modal_tokens.py"), "/python/fetch_modal_tokens.py") # Mount fetch_modal_token.py
|
|
374
|
+
.add_local_file(os.path.join(current_dir, "llm_debugging.py"), "/python/llm_debugging.py") # Mount llm_debugging.py
|
|
375
|
+
.add_local_file(os.path.join(current_dir, "credentials_manager.py"), "/python/credentials_manager.py") # Mount credentials_manager.py
|
|
376
|
+
|
|
377
|
+
)
|
|
378
|
+
print("ā
SSH image built successfully")
|
|
401
379
|
|
|
402
380
|
# Configure volumes if available
|
|
403
381
|
volumes_config = {}
|
|
@@ -1106,115 +1084,7 @@ def fetch_setup_commands_from_api(repo_url):
|
|
|
1106
1084
|
shutil.rmtree(temp_dir, ignore_errors=True)
|
|
1107
1085
|
|
|
1108
1086
|
def generate_fallback_commands(gitingest_data):
|
|
1109
|
-
|
|
1110
|
-
print("\n" + "="*80)
|
|
1111
|
-
print("š GENERATING FALLBACK SETUP COMMANDS")
|
|
1112
|
-
print("="*80)
|
|
1113
|
-
print("Using basic repository analysis to generate setup commands")
|
|
1114
|
-
|
|
1115
|
-
# Default commands that work for most repositories
|
|
1116
|
-
default_commands = [
|
|
1117
|
-
"apt-get update -y",
|
|
1118
|
-
"apt-get install -y git curl wget",
|
|
1119
|
-
"pip install --upgrade pip setuptools wheel"
|
|
1120
|
-
]
|
|
1121
|
-
|
|
1122
|
-
# If we don't have any analysis data, return default commands
|
|
1123
|
-
if not gitingest_data:
|
|
1124
|
-
print("ā ļø No repository analysis data available. Using default commands.")
|
|
1125
|
-
return default_commands
|
|
1126
|
-
|
|
1127
|
-
# Extract language and technologies information
|
|
1128
|
-
detected_language = gitingest_data.get("system_info", {}).get("detected_language", "Unknown")
|
|
1129
|
-
detected_technologies = gitingest_data.get("system_info", {}).get("detected_technologies", [])
|
|
1130
|
-
primary_package_manager = gitingest_data.get("system_info", {}).get("primary_package_manager", "Unknown")
|
|
1131
|
-
|
|
1132
|
-
# Add language-specific commands
|
|
1133
|
-
language_commands = []
|
|
1134
|
-
|
|
1135
|
-
print(f"š Detected primary language: {detected_language}")
|
|
1136
|
-
print(f"š Detected technologies: {', '.join(detected_technologies) if detected_technologies else 'None'}")
|
|
1137
|
-
print(f"š Detected package manager: {primary_package_manager}")
|
|
1138
|
-
|
|
1139
|
-
# Python-specific commands
|
|
1140
|
-
if detected_language == "Python" or primary_package_manager == "pip":
|
|
1141
|
-
print("š¦ Adding Python-specific setup commands")
|
|
1142
|
-
|
|
1143
|
-
# Check for requirements.txt
|
|
1144
|
-
requirements_check = [
|
|
1145
|
-
"if [ -f requirements.txt ]; then",
|
|
1146
|
-
" echo 'Installing from requirements.txt'",
|
|
1147
|
-
" pip install -r requirements.txt",
|
|
1148
|
-
"elif [ -f setup.py ]; then",
|
|
1149
|
-
" echo 'Installing from setup.py'",
|
|
1150
|
-
" pip install -e .",
|
|
1151
|
-
"fi"
|
|
1152
|
-
]
|
|
1153
|
-
language_commands.extend(requirements_check)
|
|
1154
|
-
|
|
1155
|
-
# Add common Python packages
|
|
1156
|
-
language_commands.append("pip install pytest numpy pandas matplotlib")
|
|
1157
|
-
|
|
1158
|
-
# JavaScript/Node.js specific commands
|
|
1159
|
-
elif detected_language in ["JavaScript", "TypeScript"] or primary_package_manager in ["npm", "yarn", "pnpm"]:
|
|
1160
|
-
print("š¦ Adding JavaScript/Node.js-specific setup commands")
|
|
1161
|
-
|
|
1162
|
-
# Install Node.js if not available
|
|
1163
|
-
language_commands.append("apt-get install -y nodejs npm")
|
|
1164
|
-
|
|
1165
|
-
# Check for package.json
|
|
1166
|
-
package_json_check = [
|
|
1167
|
-
"if [ -f package.json ]; then",
|
|
1168
|
-
" echo 'Installing from package.json'",
|
|
1169
|
-
" npm install",
|
|
1170
|
-
"fi"
|
|
1171
|
-
]
|
|
1172
|
-
language_commands.extend(package_json_check)
|
|
1173
|
-
|
|
1174
|
-
# Java specific commands
|
|
1175
|
-
elif detected_language == "Java" or primary_package_manager in ["maven", "gradle"]:
|
|
1176
|
-
print("š¦ Adding Java-specific setup commands")
|
|
1177
|
-
|
|
1178
|
-
language_commands.append("apt-get install -y openjdk-11-jdk maven gradle")
|
|
1179
|
-
|
|
1180
|
-
# Check for Maven or Gradle
|
|
1181
|
-
build_check = [
|
|
1182
|
-
"if [ -f pom.xml ]; then",
|
|
1183
|
-
" echo 'Building with Maven'",
|
|
1184
|
-
" mvn clean install -DskipTests",
|
|
1185
|
-
"elif [ -f build.gradle ]; then",
|
|
1186
|
-
" echo 'Building with Gradle'",
|
|
1187
|
-
" gradle build --no-daemon",
|
|
1188
|
-
"fi"
|
|
1189
|
-
]
|
|
1190
|
-
language_commands.extend(build_check)
|
|
1191
|
-
|
|
1192
|
-
# Go specific commands
|
|
1193
|
-
elif detected_language == "Go" or primary_package_manager == "go":
|
|
1194
|
-
print("š¦ Adding Go-specific setup commands")
|
|
1195
|
-
|
|
1196
|
-
language_commands.append("apt-get install -y golang-go")
|
|
1197
|
-
language_commands.append("go mod tidy")
|
|
1198
|
-
|
|
1199
|
-
# Rust specific commands
|
|
1200
|
-
elif detected_language == "Rust" or primary_package_manager == "cargo":
|
|
1201
|
-
print("š¦ Adding Rust-specific setup commands")
|
|
1202
|
-
|
|
1203
|
-
language_commands.append("curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y")
|
|
1204
|
-
language_commands.append("source $HOME/.cargo/env")
|
|
1205
|
-
language_commands.append("cargo build")
|
|
1206
|
-
|
|
1207
|
-
# Combine all commands
|
|
1208
|
-
all_commands = default_commands + language_commands
|
|
1209
|
-
|
|
1210
|
-
# Fix the commands
|
|
1211
|
-
fixed_commands = fix_setup_commands(all_commands)
|
|
1212
|
-
|
|
1213
|
-
print("\nš Generated fallback setup commands:")
|
|
1214
|
-
for i, cmd in enumerate(fixed_commands, 1):
|
|
1215
|
-
print(f" {i}. {cmd}")
|
|
1216
|
-
|
|
1217
|
-
return fixed_commands
|
|
1087
|
+
return True
|
|
1218
1088
|
|
|
1219
1089
|
def generate_basic_repo_analysis_from_url(repo_url):
|
|
1220
1090
|
"""Generate basic repository analysis data from a repository URL."""
|
|
@@ -1266,120 +1136,7 @@ def generate_basic_repo_analysis_from_url(repo_url):
|
|
|
1266
1136
|
shutil.rmtree(temp_dir, ignore_errors=True)
|
|
1267
1137
|
|
|
1268
1138
|
def generate_basic_repo_analysis(repo_dir):
|
|
1269
|
-
|
|
1270
|
-
import os
|
|
1271
|
-
import subprocess
|
|
1272
|
-
|
|
1273
|
-
# Detect language and technologies based on file extensions
|
|
1274
|
-
file_extensions = {}
|
|
1275
|
-
file_count = 0
|
|
1276
|
-
|
|
1277
|
-
for root, _, files in os.walk(repo_dir):
|
|
1278
|
-
for file in files:
|
|
1279
|
-
file_count += 1
|
|
1280
|
-
ext = os.path.splitext(file)[1].lower()
|
|
1281
|
-
if ext:
|
|
1282
|
-
file_extensions[ext] = file_extensions.get(ext, 0) + 1
|
|
1283
|
-
|
|
1284
|
-
# Determine primary language
|
|
1285
|
-
language_map = {
|
|
1286
|
-
'.py': 'Python',
|
|
1287
|
-
'.js': 'JavaScript',
|
|
1288
|
-
'.ts': 'TypeScript',
|
|
1289
|
-
'.jsx': 'JavaScript',
|
|
1290
|
-
'.tsx': 'TypeScript',
|
|
1291
|
-
'.java': 'Java',
|
|
1292
|
-
'.cpp': 'C++',
|
|
1293
|
-
'.c': 'C',
|
|
1294
|
-
'.go': 'Go',
|
|
1295
|
-
'.rs': 'Rust',
|
|
1296
|
-
'.rb': 'Ruby',
|
|
1297
|
-
'.php': 'PHP',
|
|
1298
|
-
'.swift': 'Swift',
|
|
1299
|
-
'.kt': 'Kotlin',
|
|
1300
|
-
'.cs': 'C#'
|
|
1301
|
-
}
|
|
1302
|
-
|
|
1303
|
-
# Count files by language
|
|
1304
|
-
language_counts = {}
|
|
1305
|
-
for ext, count in file_extensions.items():
|
|
1306
|
-
if ext in language_map:
|
|
1307
|
-
lang = language_map[ext]
|
|
1308
|
-
language_counts[lang] = language_counts.get(lang, 0) + count
|
|
1309
|
-
|
|
1310
|
-
# Determine primary language
|
|
1311
|
-
primary_language = max(language_counts.items(), key=lambda x: x[1])[0] if language_counts else "Unknown"
|
|
1312
|
-
|
|
1313
|
-
# Detect package managers
|
|
1314
|
-
package_managers = []
|
|
1315
|
-
package_files = {
|
|
1316
|
-
'requirements.txt': 'pip',
|
|
1317
|
-
'setup.py': 'pip',
|
|
1318
|
-
'pyproject.toml': 'pip',
|
|
1319
|
-
'package.json': 'npm',
|
|
1320
|
-
'yarn.lock': 'yarn',
|
|
1321
|
-
'pnpm-lock.yaml': 'pnpm',
|
|
1322
|
-
'Cargo.toml': 'cargo',
|
|
1323
|
-
'go.mod': 'go',
|
|
1324
|
-
'Gemfile': 'bundler',
|
|
1325
|
-
'pom.xml': 'maven',
|
|
1326
|
-
'build.gradle': 'gradle',
|
|
1327
|
-
'composer.json': 'composer'
|
|
1328
|
-
}
|
|
1329
|
-
|
|
1330
|
-
for file, manager in package_files.items():
|
|
1331
|
-
if os.path.exists(os.path.join(repo_dir, file)):
|
|
1332
|
-
package_managers.append(manager)
|
|
1333
|
-
|
|
1334
|
-
primary_package_manager = package_managers[0] if package_managers else "Unknown"
|
|
1335
|
-
|
|
1336
|
-
# Get README content
|
|
1337
|
-
readme_content = ""
|
|
1338
|
-
for readme_name in ['README.md', 'README', 'README.txt', 'readme.md']:
|
|
1339
|
-
readme_path = os.path.join(repo_dir, readme_name)
|
|
1340
|
-
if os.path.exists(readme_path):
|
|
1341
|
-
with open(readme_path, 'r', encoding='utf-8', errors='ignore') as f:
|
|
1342
|
-
readme_content = f.read()
|
|
1343
|
-
break
|
|
1344
|
-
|
|
1345
|
-
# Try to get repository info
|
|
1346
|
-
repo_info = {}
|
|
1347
|
-
try:
|
|
1348
|
-
# Get remote origin URL
|
|
1349
|
-
cmd = ["git", "config", "--get", "remote.origin.url"]
|
|
1350
|
-
result = subprocess.run(cmd, cwd=repo_dir, capture_output=True, text=True)
|
|
1351
|
-
if result.returncode == 0:
|
|
1352
|
-
repo_info["url"] = result.stdout.strip()
|
|
1353
|
-
|
|
1354
|
-
# Get commit count as a proxy for activity
|
|
1355
|
-
cmd = ["git", "rev-list", "--count", "HEAD"]
|
|
1356
|
-
result = subprocess.run(cmd, cwd=repo_dir, capture_output=True, text=True)
|
|
1357
|
-
if result.returncode == 0:
|
|
1358
|
-
repo_info["commit_count"] = int(result.stdout.strip())
|
|
1359
|
-
except Exception:
|
|
1360
|
-
pass
|
|
1361
|
-
|
|
1362
|
-
# Build the analysis data
|
|
1363
|
-
return {
|
|
1364
|
-
"system_info": {
|
|
1365
|
-
"platform": "linux", # Assuming Linux for container environment
|
|
1366
|
-
"python_version": "3.10", # Common Python version
|
|
1367
|
-
"detected_language": primary_language,
|
|
1368
|
-
"detected_technologies": list(language_counts.keys()),
|
|
1369
|
-
"file_count": file_count,
|
|
1370
|
-
"repo_stars": repo_info.get("stars", 0),
|
|
1371
|
-
"repo_forks": repo_info.get("forks", 0),
|
|
1372
|
-
"primary_package_manager": primary_package_manager,
|
|
1373
|
-
"complexity_level": "medium" # Default assumption
|
|
1374
|
-
},
|
|
1375
|
-
"repository_analysis": {
|
|
1376
|
-
"summary": f"Repository analysis for {repo_dir}",
|
|
1377
|
-
"readme_content": readme_content[:5000] if readme_content else "No README found",
|
|
1378
|
-
"package_managers": package_managers,
|
|
1379
|
-
"file_extensions": list(file_extensions.keys())
|
|
1380
|
-
},
|
|
1381
|
-
"success": True
|
|
1382
|
-
}
|
|
1139
|
+
return True
|
|
1383
1140
|
|
|
1384
1141
|
def fix_setup_commands(commands):
|
|
1385
1142
|
"""Fix setup commands by removing placeholders and comments."""
|
|
@@ -1952,7 +1709,7 @@ Return only the JSON array, no other text.
|
|
|
1952
1709
|
client = openai.OpenAI(api_key=api_key)
|
|
1953
1710
|
|
|
1954
1711
|
response = client.chat.completions.create(
|
|
1955
|
-
model="gpt-4.1",
|
|
1712
|
+
model="gpt-4.1",
|
|
1956
1713
|
messages=[
|
|
1957
1714
|
{"role": "system", "content": "You are a command preprocessing assistant that modifies setup commands to use available credentials and make them non-interactive."},
|
|
1958
1715
|
{"role": "user", "content": prompt}
|