gitarsenal-cli 1.4.11 → 1.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "gitarsenal-cli",
3
- "version": "1.4.11",
3
+ "version": "1.5.1",
4
4
  "description": "CLI tool for creating Modal sandboxes with GitHub repositories",
5
5
  "main": "index.js",
6
6
  "bin": {
@@ -21,15 +21,16 @@
21
21
  "author": "",
22
22
  "license": "MIT",
23
23
  "dependencies": {
24
+ "boxen": "^5.1.2",
24
25
  "chalk": "^4.1.2",
25
- "inquirer": "^8.2.4",
26
- "ora": "^5.4.1",
27
26
  "commander": "^9.4.1",
28
- "which": "^3.0.0",
29
- "fs-extra": "^11.1.0",
30
27
  "execa": "^5.1.1",
31
- "boxen": "^5.1.2",
32
- "update-notifier": "^5.1.0"
28
+ "fs-extra": "^11.1.0",
29
+ "g": "^2.0.1",
30
+ "inquirer": "^8.2.4",
31
+ "ora": "^5.4.1",
32
+ "update-notifier": "^5.1.0",
33
+ "which": "^3.0.0"
33
34
  },
34
35
  "engines": {
35
36
  "node": ">=14.0.0"
@@ -1,8 +1,8 @@
1
1
  #!/usr/bin/env python3
2
2
  """
3
- Fetch Modal Tokens
3
+ Fetch Modal Tokens and OpenAI API Key
4
4
 
5
- This script fetches Modal tokens from the proxy server.
5
+ This script fetches Modal tokens and OpenAI API key from the proxy server.
6
6
  """
7
7
 
8
8
  import os
@@ -14,10 +14,10 @@ from pathlib import Path
14
14
 
15
15
  def fetch_default_tokens_from_gitarsenal():
16
16
  """
17
- Fetch default Modal tokens from gitarsenal.dev API.
17
+ Fetch default Modal tokens and OpenAI API key from gitarsenal.dev API.
18
18
 
19
19
  Returns:
20
- tuple: (token_id, token_secret) if successful, (None, None) otherwise
20
+ tuple: (token_id, token_secret, openai_api_key) if successful, (None, None, None) otherwise
21
21
  """
22
22
  endpoint = "https://gitarsenal.dev/api/credentials"
23
23
 
@@ -38,40 +38,41 @@ def fetch_default_tokens_from_gitarsenal():
38
38
  data = response.json()
39
39
  token_id = data.get("modalTokenId")
40
40
  token_secret = data.get("modalTokenSecret")
41
+ openai_api_key = data.get("openaiApiKey")
41
42
 
42
43
  if token_id and token_secret:
43
44
  print("✅ Successfully fetched default tokens from gitarsenal.dev")
44
- return token_id, token_secret
45
+ return token_id, token_secret, openai_api_key
45
46
  else:
46
47
  print("❌ Modal tokens not found in gitarsenal.dev response")
47
- return None, None
48
+ return None, None, None
48
49
  except json.JSONDecodeError:
49
50
  print("❌ Invalid JSON response from gitarsenal.dev")
50
- return None, None
51
+ return None, None, None
51
52
  else:
52
53
  print(f"❌ Failed to fetch from gitarsenal.dev: {response.status_code} - {response.text[:200]}")
53
- return None, None
54
+ return None, None, None
54
55
 
55
56
  except requests.exceptions.Timeout:
56
57
  print("❌ Request timeout when fetching from gitarsenal.dev")
57
- return None, None
58
+ return None, None, None
58
59
  except requests.exceptions.ConnectionError:
59
60
  print("❌ Connection failed to gitarsenal.dev")
60
- return None, None
61
+ return None, None, None
61
62
  except requests.exceptions.RequestException as e:
62
63
  print(f"❌ Request failed to gitarsenal.dev: {e}")
63
- return None, None
64
+ return None, None, None
64
65
 
65
66
  def fetch_tokens_from_proxy(proxy_url=None, api_key=None):
66
67
  """
67
- Fetch Modal tokens from the proxy server.
68
+ Fetch Modal tokens and OpenAI API key from the proxy server.
68
69
 
69
70
  Args:
70
71
  proxy_url: URL of the proxy server
71
72
  api_key: API key for authentication
72
73
 
73
74
  Returns:
74
- tuple: (token_id, token_secret) if successful, (None, None) otherwise
75
+ tuple: (token_id, token_secret, openai_api_key) if successful, (None, None, None) otherwise
75
76
  """
76
77
  # Use environment variables if not provided
77
78
  if not proxy_url:
@@ -88,12 +89,12 @@ def fetch_tokens_from_proxy(proxy_url=None, api_key=None):
88
89
  if not proxy_url:
89
90
  # print("❌ No proxy URL provided or found in environment")
90
91
  print("💡 Set MODAL_PROXY_URL environment variable or use --proxy-url argument")
91
- return None, None
92
+ return None, None, None
92
93
 
93
94
  if not api_key:
94
95
  print("❌ No API key provided or found in environment")
95
96
  print("💡 Set MODAL_PROXY_API_KEY environment variable or use --proxy-api-key argument")
96
- return None, None
97
+ return None, None, None
97
98
 
98
99
  # Ensure the URL ends with a slash
99
100
  if not proxy_url.endswith("/"):
@@ -115,54 +116,60 @@ def fetch_tokens_from_proxy(proxy_url=None, api_key=None):
115
116
  data = response.json()
116
117
  token_id = data.get("token_id")
117
118
  token_secret = data.get("token_secret")
119
+ openai_api_key = data.get("openai_api_key")
118
120
 
119
121
  if token_id and token_secret:
120
122
  print("✅ Successfully fetched tokens from proxy server")
121
- return token_id, token_secret
123
+ return token_id, token_secret, openai_api_key
122
124
  else:
123
125
  print("❌ Tokens not found in response")
124
- return None, None
126
+ return None, None, None
125
127
  else:
126
128
  print(f"❌ Failed to fetch tokens: {response.status_code} - {response.text}")
127
- return None, None
129
+ return None, None, None
128
130
  except Exception as e:
129
131
  print(f"❌ Error fetching tokens: {e}")
130
- return None, None
132
+ return None, None, None
131
133
 
132
134
  def get_tokens():
133
135
  """
134
- Get Modal tokens, trying to fetch from the proxy server first.
136
+ Get Modal tokens and OpenAI API key, trying to fetch from the proxy server first.
135
137
  Also sets the tokens in environment variables.
136
138
 
137
139
  Returns:
138
- tuple: (token_id, token_secret)
140
+ tuple: (token_id, token_secret, openai_api_key)
139
141
  """
140
142
  # Try to fetch from the proxy server
141
- token_id, token_secret = fetch_tokens_from_proxy()
143
+ token_id, token_secret, openai_api_key = fetch_tokens_from_proxy()
142
144
 
143
145
  # If we couldn't fetch from the server, try to get default tokens from gitarsenal.dev
144
146
  if not token_id or not token_secret:
145
147
  print("⚠️ Proxy server failed, trying to fetch default tokens from gitarsenal.dev")
146
- token_id, token_secret = fetch_default_tokens_from_gitarsenal()
148
+ token_id, token_secret, openai_api_key = fetch_default_tokens_from_gitarsenal()
147
149
 
148
150
  # If we still don't have tokens, we can't proceed
149
151
  if not token_id or not token_secret:
150
152
  print("❌ Failed to fetch tokens from both proxy server and gitarsenal.dev")
151
153
  print("💡 Please check your network connection and API endpoints")
152
- return None, None
154
+ return None, None, None
153
155
 
154
156
  # Set the tokens in environment variables
155
157
  os.environ["MODAL_TOKEN_ID"] = token_id
156
158
  os.environ["MODAL_TOKEN_SECRET"] = token_secret
157
159
  # print(f"✅ Set MODAL_TOKEN_ID and MODAL_TOKEN_SECRET environment variables")
158
160
 
159
- return token_id, token_secret
161
+ # Set OpenAI API key if available
162
+ if openai_api_key:
163
+ os.environ["OPENAI_API_KEY"] = openai_api_key
164
+ print(f"✅ Set OPENAI_API_KEY environment variable")
165
+
166
+ return token_id, token_secret, openai_api_key
160
167
 
161
168
  if __name__ == "__main__":
162
169
  # Parse command-line arguments if run directly
163
170
  import argparse
164
171
 
165
- parser = argparse.ArgumentParser(description='Fetch Modal tokens from the proxy server')
172
+ parser = argparse.ArgumentParser(description='Fetch Modal tokens and OpenAI API key from the proxy server')
166
173
  parser.add_argument('--proxy-url', help='URL of the proxy server')
167
174
  parser.add_argument('--proxy-api-key', help='API key for the proxy server')
168
175
  args = parser.parse_args()
@@ -177,25 +184,30 @@ if __name__ == "__main__":
177
184
  print(f"✅ Set MODAL_PROXY_API_KEY from command line")
178
185
 
179
186
  # Get tokens
180
- token_id, token_secret = get_tokens()
187
+ token_id, token_secret, openai_api_key = get_tokens()
181
188
  print(f"Token ID: {token_id}")
182
189
  print(f"Token Secret: {token_secret}")
190
+ print(f"OpenAI API Key: {openai_api_key[:5] + '...' if openai_api_key else None}")
183
191
 
184
192
  # Check if tokens are set in environment variables
185
193
  print(f"\n🔍 DEBUG: Checking environment variables")
186
194
  print(f"🔍 MODAL_TOKEN_ID exists: {'Yes' if os.environ.get('MODAL_TOKEN_ID') else 'No'}")
187
195
  print(f"🔍 MODAL_TOKEN_SECRET exists: {'Yes' if os.environ.get('MODAL_TOKEN_SECRET') else 'No'}")
196
+ print(f"🔍 OPENAI_API_KEY exists: {'Yes' if os.environ.get('OPENAI_API_KEY') else 'No'}")
188
197
  if os.environ.get('MODAL_TOKEN_ID'):
189
198
  print(f"🔍 MODAL_TOKEN_ID length: {len(os.environ.get('MODAL_TOKEN_ID'))}")
190
199
  if os.environ.get('MODAL_TOKEN_SECRET'):
191
200
  print(f"🔍 MODAL_TOKEN_SECRET length: {len(os.environ.get('MODAL_TOKEN_SECRET'))}")
201
+ if os.environ.get('OPENAI_API_KEY'):
202
+ print(f"🔍 OPENAI_API_KEY length: {len(os.environ.get('OPENAI_API_KEY'))}")
192
203
 
193
204
  # Write the tokens to a file for use by other scripts
194
205
  tokens_file = Path(__file__).parent / "modal_tokens.json"
195
206
  with open(tokens_file, 'w') as f:
196
207
  json.dump({
197
208
  "token_id": token_id,
198
- "token_secret": token_secret
209
+ "token_secret": token_secret,
210
+ "openai_api_key": openai_api_key
199
211
  }, f)
200
212
  print(f"\n✅ Tokens written to {tokens_file}")
201
213
 
@@ -216,6 +228,27 @@ if __name__ == "__main__":
216
228
  f.write(f"token_secret = {token_secret}\n")
217
229
  print(f"✅ Created .modalconfig file at {modalconfig_file}")
218
230
 
231
+ # Create or update .env file with OpenAI API key
232
+ env_file = Path.home() / ".env"
233
+ env_content = ""
234
+ if env_file.exists():
235
+ with open(env_file, 'r') as f:
236
+ env_content = f.read()
237
+
238
+ # Update or add OPENAI_API_KEY
239
+ if openai_api_key:
240
+ if "OPENAI_API_KEY" in env_content:
241
+ # Replace existing key
242
+ import re
243
+ env_content = re.sub(r'OPENAI_API_KEY=.*\n', f'OPENAI_API_KEY={openai_api_key}\n', env_content)
244
+ else:
245
+ # Add new key
246
+ env_content += f'\nOPENAI_API_KEY={openai_api_key}\n'
247
+
248
+ with open(env_file, 'w') as f:
249
+ f.write(env_content)
250
+ print(f"✅ Updated OpenAI API key in {env_file}")
251
+
219
252
  # Try to use the Modal CLI to set the token
220
253
  try:
221
254
  print(f"\n🔄 Setting token via Modal CLI...")
@@ -35,7 +35,7 @@ try:
35
35
  # Fall back to the basic implementation
36
36
  print("🔄 Falling back to basic implementation")
37
37
  except Exception as e:
38
- print(f"❌ Error running advanced Modal token fixer: {e}")
38
+ # print(f"❌ Error running advanced Modal token fixer: {e}")
39
39
  print("🔄 Falling back to basic implementation")
40
40
 
41
41
  # Try to get tokens from the proxy server
@@ -38,7 +38,7 @@ try:
38
38
  # Import the fetch_modal_tokens module
39
39
  # print("🔄 Fetching tokens from proxy server...")
40
40
  from fetch_modal_tokens import get_tokens
41
- token_id, token_secret = get_tokens()
41
+ token_id, token_secret, openai_api_key = get_tokens()
42
42
 
43
43
  # Check if we got valid tokens
44
44
  if token_id is None or token_secret is None:
@@ -49,7 +49,7 @@ try:
49
49
  # Explicitly set the environment variables again to be sure
50
50
  os.environ["MODAL_TOKEN_ID"] = token_id
51
51
  os.environ["MODAL_TOKEN_SECRET"] = token_secret
52
-
52
+ os.environ["OPENAI_API_KEY"] = openai_api_key
53
53
  # Also set the old environment variable for backward compatibility
54
54
  os.environ["MODAL_TOKEN"] = token_id
55
55
 
@@ -1742,158 +1742,6 @@ cd "{current_dir}"
1742
1742
  print("❌ No token provided. Cannot set up Hugging Face authentication.")
1743
1743
  return False, "", "No Hugging Face token provided"
1744
1744
 
1745
- # Check if the error is related to missing pytest
1746
- if "ModuleNotFoundError: No module named 'pytest'" in stderr_buffer or "ImportError: No module named pytest" in stderr_buffer:
1747
- print("🔍 Detected missing pytest module, installing it automatically...")
1748
- pytest_install_success, _, _ = run_command("pip install pytest", show_output=True, debug_with_llm=False)
1749
- if pytest_install_success:
1750
- print("✅ Successfully installed pytest, retrying original command...")
1751
- return run_command(cmd, show_output, retry_count + 1, max_retries)
1752
-
1753
- # Check for common errors that we can fix automatically
1754
- common_errors = [
1755
- # Source command not found in sh shell
1756
- {
1757
- "pattern": "source: not found",
1758
- "fix": lambda cmd: cmd.replace("source ", ". ")
1759
- },
1760
- # No virtual environment found for uv
1761
- {
1762
- "pattern": "No virtual environment found; run `uv venv`",
1763
- "fix": lambda cmd: "uv venv .venv && . .venv/bin/activate && " + cmd
1764
- }
1765
- ]
1766
-
1767
- # Check if any of the common errors match and apply automatic fix
1768
- for error_info in common_errors:
1769
- if error_info["pattern"] in stderr_buffer:
1770
- print(f"🔍 Detected common error: {error_info['pattern']}")
1771
- fix_func = error_info["fix"]
1772
- fixed_cmd = fix_func(cmd_to_execute)
1773
- print(f"🔧 Applying automatic fix: {fixed_cmd}")
1774
-
1775
- # Run the fixed command
1776
- fix_success, fix_stdout, fix_stderr = run_command(fixed_cmd, show_output=True, debug_with_llm=False)
1777
- if fix_success:
1778
- print("✅ Automatic fix succeeded!")
1779
- return True, fix_stdout, ""
1780
- else:
1781
- print("❌ Automatic fix failed, continuing with LLM debugging")
1782
-
1783
- # Check for Python version-specific errors
1784
- python_version_errors = [
1785
- # Python 3.13 distutils issue
1786
- ("ModuleNotFoundError: No module named 'distutils'", "3.13"),
1787
- # Add more version-specific error patterns here
1788
- ("ImportError: cannot import name 'soft_unicode' from 'markupsafe'", None),
1789
- ("AttributeError: module 'setuptools.dist' has no attribute 'check_specifier'", None)
1790
- ]
1791
-
1792
- # Check if any of the error patterns match
1793
- for error_pattern, problematic_version in python_version_errors:
1794
- if error_pattern in stderr_buffer:
1795
- print(f"🔍 Detected Python version-specific error: {error_pattern}")
1796
-
1797
- # Get current Python version if not already known
1798
- if not current_python_version:
1799
- version_cmd = "python --version"
1800
- version_success, version_stdout, _ = run_command(version_cmd, show_output=False, debug_with_llm=False)
1801
- if version_success:
1802
- current_python_version = version_stdout.strip()
1803
- print(f"🐍 Current Python version: {current_python_version}")
1804
-
1805
- # Check if we've already tried switching Python versions
1806
- if python_version_switched:
1807
- print("⚠️ Already attempted to switch Python versions once, not trying again")
1808
- break
1809
-
1810
- print("🔄 Attempting to fix by switching Python version...")
1811
-
1812
- # Install conda if not already installed
1813
- if not conda_installed:
1814
- print("📦 Installing Miniconda to manage Python versions...")
1815
- conda_install_cmds = [
1816
- "apt-get update -y",
1817
- "apt-get install -y wget bzip2",
1818
- "wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O /tmp/miniconda.sh",
1819
- "bash /tmp/miniconda.sh -b -p /opt/conda",
1820
- "rm /tmp/miniconda.sh",
1821
- "echo 'export PATH=/opt/conda/bin:$PATH' >> ~/.bashrc",
1822
- "export PATH=/opt/conda/bin:$PATH",
1823
- "conda init bash",
1824
- "source ~/.bashrc",
1825
- "conda activate base"
1826
- ]
1827
-
1828
- for conda_cmd in conda_install_cmds:
1829
- print(f"🔄 Running: {conda_cmd}")
1830
- conda_success, _, _ = run_command(conda_cmd, show_output=True, debug_with_llm=False)
1831
- if not conda_success:
1832
- print("⚠️ Failed to install conda, continuing with system Python")
1833
- break
1834
-
1835
- # Check if conda was successfully installed
1836
- conda_check_cmd = "conda --version"
1837
- conda_check_success, conda_check_stdout, _ = run_command(conda_check_cmd, show_output=True, debug_with_llm=False)
1838
- conda_installed = conda_check_success
1839
-
1840
- if conda_installed:
1841
- print(f"✅ Successfully installed conda: {conda_check_stdout.strip()}")
1842
- else:
1843
- print("⚠️ Failed to verify conda installation")
1844
- break
1845
-
1846
- # Determine target Python version
1847
- target_version = "3.10" # Default to a stable version
1848
- if problematic_version == "3.13":
1849
- # If we're on 3.13 and having issues, go to 3.10
1850
- target_version = "3.10"
1851
- elif "3.13" in str(current_python_version):
1852
- # If we're on 3.13 for any other error, try 3.10
1853
- target_version = "3.10"
1854
- elif "3.10" in str(current_python_version):
1855
- # If we're on 3.10 and having issues, try 3.9
1856
- target_version = "3.9"
1857
-
1858
- print(f"🐍 Switching from {current_python_version} to Python {target_version}...")
1859
-
1860
- # Create and activate a conda environment with the target Python version
1861
- conda_cmds = [
1862
- f"conda create -y -n py{target_version} python={target_version}",
1863
- f"echo 'conda activate py{target_version}' >> ~/.bashrc",
1864
- f"conda init bash",
1865
- f"source ~/.bashrc",
1866
- f"conda activate py{target_version}"
1867
- ]
1868
-
1869
- for conda_cmd in conda_cmds:
1870
- print(f"🔄 Running: {conda_cmd}")
1871
- conda_success, _, _ = run_command(conda_cmd, show_output=True, debug_with_llm=False)
1872
- if not conda_success:
1873
- print(f"⚠️ Failed to run conda command: {conda_cmd}")
1874
-
1875
- # Verify Python version changed
1876
- verify_cmd = "python --version"
1877
- verify_success, verify_stdout, _ = run_command(verify_cmd, show_output=True, debug_with_llm=False)
1878
-
1879
- if verify_success and target_version in verify_stdout:
1880
- print(f"✅ Successfully switched to Python {verify_stdout.strip()}")
1881
- python_version_switched = True
1882
- current_python_version = verify_stdout.strip()
1883
-
1884
- # Reinstall pip and setuptools in the new environment
1885
- print("📦 Installing pip and setuptools in new environment...")
1886
- run_command("pip install --upgrade pip setuptools wheel", show_output=True, debug_with_llm=False)
1887
-
1888
- # Retry the original command with the new Python version
1889
- print(f"🔄 Retrying original command with Python {target_version}...")
1890
- # Reset the retry counter since we've made a significant change
1891
- return run_command(cmd, show_output, 0, max_retries)
1892
- else:
1893
- print("⚠️ Failed to switch Python version, continuing with current version")
1894
-
1895
- break
1896
-
1897
1745
  # Check if stderr is empty, try to use stdout as fallback
1898
1746
  debug_output = stderr_buffer
1899
1747
  if not debug_output or not debug_output.strip():
@@ -2559,7 +2407,7 @@ ssh_app = modal.App("ssh-container-app")
2559
2407
  "python3", "python3-pip", "build-essential", "tmux", "screen", "nano",
2560
2408
  "gpg", "ca-certificates", "software-properties-common"
2561
2409
  )
2562
- .pip_install("uv", "modal", "requests", "openai") # Install required packages for LLM debugging
2410
+ .pip_install("uv", "modal", "requests", "openai") # Fast Python package installer and Modal
2563
2411
  .run_commands(
2564
2412
  # Create SSH directory
2565
2413
  "mkdir -p /var/run/sshd",
@@ -2663,62 +2511,6 @@ def ssh_container_function(ssh_password, repo_url=None, repo_name=None, setup_co
2663
2511
  print(f"❌ Command failed: {e}")
2664
2512
  print(f"❌ Error: {error_output}")
2665
2513
 
2666
- # Check for common errors that we can fix automatically
2667
- common_errors = [
2668
- # Source command not found
2669
- {
2670
- "pattern": "source: not found",
2671
- "fix": lambda cmd: cmd.replace("source ", ". ")
2672
- },
2673
- # Conda not found
2674
- {
2675
- "pattern": "conda: not found",
2676
- "fix": lambda cmd: "apt-get update && apt-get install -y wget bzip2 && wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O /tmp/miniconda.sh && bash /tmp/miniconda.sh -b -p /opt/conda && export PATH=/opt/conda/bin:$PATH && echo 'export PATH=/opt/conda/bin:$PATH' >> ~/.bashrc && source ~/.bashrc && " + cmd
2677
- },
2678
- # File not found for chmod
2679
- {
2680
- "pattern": "chmod: cannot access",
2681
- "fix": lambda cmd: "pwd && ls -la && echo 'File not found, checking current directory and contents'"
2682
- },
2683
- # No such file or directory
2684
- {
2685
- "pattern": "No such file or directory",
2686
- "fix": lambda cmd: "pwd && ls -la && echo 'Checking current directory and contents'"
2687
- },
2688
- # Directory navigation issues
2689
- {
2690
- "pattern": "cd: no such file or directory",
2691
- "fix": lambda cmd: "pwd && ls -la && echo 'Directory not found, checking current location'"
2692
- },
2693
- # File not found for execution
2694
- {
2695
- "pattern": "not found",
2696
- "fix": lambda cmd: "pwd && ls -la && echo 'File not found, checking current location'"
2697
- }
2698
- ]
2699
-
2700
- # Check if any of the common errors match and apply automatic fix
2701
- for error_info in common_errors:
2702
- if error_info["pattern"] in error_output:
2703
- print(f"🔍 Detected common error: {error_info['pattern']}")
2704
- fix_func = error_info["fix"]
2705
- fixed_cmd = fix_func(cmd)
2706
- print(f"🔧 Applying automatic fix: {fixed_cmd}")
2707
-
2708
- # Run the fixed command
2709
- try:
2710
- fix_result = subprocess.run(fixed_cmd, shell=True, check=True,
2711
- capture_output=True, text=True)
2712
- if fix_result.stdout:
2713
- print(f"✅ Automatic fix output: {fix_result.stdout}")
2714
-
2715
- # Retry the original command
2716
- print(f"🔄 Retrying original command: {cmd}")
2717
- return run_command_with_llm_debug(cmd, show_output, retry_count + 1, max_retries)
2718
- except subprocess.CalledProcessError as fix_e:
2719
- print(f"❌ Automatic fix failed: {fix_e}")
2720
- # Continue with LLM debugging
2721
-
2722
2514
  # Call OpenAI for debugging
2723
2515
  print("🔍 Attempting to debug the failed command with OpenAI...")
2724
2516
  try:
@@ -3025,7 +2817,7 @@ def create_modal_ssh_container(gpu_type, repo_url=None, repo_name=None, setup_co
3025
2817
  "python3", "python3-pip", "build-essential", "tmux", "screen", "nano",
3026
2818
  "gpg", "ca-certificates", "software-properties-common"
3027
2819
  )
3028
- .pip_install("uv", "modal", "requests") # Fast Python package installer, Modal, and requests for API calls
2820
+ .pip_install("uv", "modal", "requests", "openai") # Fast Python package installer and Modal
3029
2821
  .run_commands(
3030
2822
  # Create SSH directory
3031
2823
  "mkdir -p /var/run/sshd",
@@ -3058,7 +2850,7 @@ def create_modal_ssh_container(gpu_type, repo_url=None, repo_name=None, setup_co
3058
2850
  if volume:
3059
2851
  volumes_config[volume_mount_path] = volume
3060
2852
 
3061
- # Define the SSH container function with all necessary imports
2853
+ # Define the SSH container function
3062
2854
  @app.function(
3063
2855
  image=ssh_image,
3064
2856
  timeout=timeout_minutes * 60, # Convert to seconds
@@ -3067,8 +2859,6 @@ def create_modal_ssh_container(gpu_type, repo_url=None, repo_name=None, setup_co
3067
2859
  memory=8192,
3068
2860
  serialized=True,
3069
2861
  volumes=volumes_config if volumes_config else None,
3070
- # Include all required modules in container
3071
- mounts=[modal.Mount.from_local_python_packages("requests", "openai")]
3072
2862
  )
3073
2863
  def ssh_container_function():
3074
2864
  """Start SSH container with password authentication and optional setup."""
@@ -3132,62 +2922,6 @@ def create_modal_ssh_container(gpu_type, repo_url=None, repo_name=None, setup_co
3132
2922
  print(f"❌ Command failed: {e}")
3133
2923
  print(f"❌ Error: {error_output}")
3134
2924
 
3135
- # Check for common errors that we can fix automatically
3136
- common_errors = [
3137
- # Source command not found
3138
- {
3139
- "pattern": "source: not found",
3140
- "fix": lambda cmd: cmd.replace("source ", ". ")
3141
- },
3142
- # Conda not found
3143
- {
3144
- "pattern": "conda: not found",
3145
- "fix": lambda cmd: "apt-get update && apt-get install -y wget bzip2 && wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O /tmp/miniconda.sh && bash /tmp/miniconda.sh -b -p /opt/conda && export PATH=/opt/conda/bin:$PATH && echo 'export PATH=/opt/conda/bin:$PATH' >> ~/.bashrc && source ~/.bashrc && " + cmd
3146
- },
3147
- # File not found for chmod
3148
- {
3149
- "pattern": "chmod: cannot access",
3150
- "fix": lambda cmd: "pwd && ls -la && echo 'File not found, checking current directory and contents'"
3151
- },
3152
- # No such file or directory
3153
- {
3154
- "pattern": "No such file or directory",
3155
- "fix": lambda cmd: "pwd && ls -la && echo 'Checking current directory and contents'"
3156
- },
3157
- # Directory navigation issues
3158
- {
3159
- "pattern": "cd: no such file or directory",
3160
- "fix": lambda cmd: "pwd && ls -la && echo 'Directory not found, checking current location'"
3161
- },
3162
- # File not found for execution
3163
- {
3164
- "pattern": "not found",
3165
- "fix": lambda cmd: "pwd && ls -la && echo 'File not found, checking current location'"
3166
- }
3167
- ]
3168
-
3169
- # Check if any of the common errors match and apply automatic fix
3170
- for error_info in common_errors:
3171
- if error_info["pattern"] in error_output:
3172
- print(f"🔍 Detected common error: {error_info['pattern']}")
3173
- fix_func = error_info["fix"]
3174
- fixed_cmd = fix_func(cmd)
3175
- print(f"🔧 Applying automatic fix: {fixed_cmd}")
3176
-
3177
- # Run the fixed command
3178
- try:
3179
- fix_result = subprocess.run(fixed_cmd, shell=True, check=True,
3180
- capture_output=True, text=True)
3181
- if fix_result.stdout:
3182
- print(f"✅ Automatic fix output: {fix_result.stdout}")
3183
-
3184
- # Retry the original command
3185
- print(f"🔄 Retrying original command: {cmd}")
3186
- return run_command_with_basic_error_handling(cmd, show_output, retry_count + 1, max_retries)
3187
- except subprocess.CalledProcessError as fix_e:
3188
- print(f"❌ Automatic fix failed: {fix_e}")
3189
- # Continue with LLM debugging
3190
-
3191
2925
  # Call OpenAI for debugging
3192
2926
  print("🔍 Attempting to debug the failed command with OpenAI...")
3193
2927
  try:
@@ -3949,7 +3683,7 @@ def create_ssh_container_function(gpu_type="a10g", timeout_minutes=60, volume=No
3949
3683
  "python3", "python3-pip", "build-essential", "tmux", "screen", "nano",
3950
3684
  "gpg", "ca-certificates", "software-properties-common"
3951
3685
  )
3952
- .pip_install("uv", "modal", "requests", "openai") # Install required packages for LLM debugging
3686
+ .pip_install("uv", "modal", "requests", "openai") # Fast Python package installer and Modal
3953
3687
  .run_commands(
3954
3688
  # Create SSH directory
3955
3689
  "mkdir -p /var/run/sshd",
@@ -3987,8 +3721,6 @@ def create_ssh_container_function(gpu_type="a10g", timeout_minutes=60, volume=No
3987
3721
  memory=8192,
3988
3722
  serialized=True,
3989
3723
  volumes=volumes if volumes else None,
3990
- # Include all required modules in container
3991
- mounts=[modal.Mount.from_local_python_packages("requests", "openai")]
3992
3724
  )
3993
3725
  def ssh_container(ssh_password, repo_url=None, repo_name=None, setup_commands=None):
3994
3726
  import subprocess