alphai 0.1.2__py3-none-any.whl → 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alphai/__init__.py +40 -2
- alphai/auth.py +31 -11
- alphai/cleanup.py +351 -0
- alphai/cli.py +45 -910
- alphai/client.py +115 -70
- alphai/commands/__init__.py +24 -0
- alphai/commands/config.py +67 -0
- alphai/commands/docker.py +615 -0
- alphai/commands/jupyter.py +350 -0
- alphai/commands/notebooks.py +1173 -0
- alphai/commands/orgs.py +27 -0
- alphai/commands/projects.py +35 -0
- alphai/config.py +15 -5
- alphai/docker.py +80 -45
- alphai/exceptions.py +122 -0
- alphai/jupyter_manager.py +577 -0
- alphai/notebook_renderer.py +473 -0
- alphai/utils.py +67 -0
- {alphai-0.1.2.dist-info → alphai-0.2.1.dist-info}/METADATA +8 -9
- alphai-0.2.1.dist-info/RECORD +23 -0
- alphai-0.1.2.dist-info/RECORD +0 -12
- {alphai-0.1.2.dist-info → alphai-0.2.1.dist-info}/WHEEL +0 -0
- {alphai-0.1.2.dist-info → alphai-0.2.1.dist-info}/entry_points.txt +0 -0
- {alphai-0.1.2.dist-info → alphai-0.2.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,577 @@
|
|
|
1
|
+
"""Jupyter management with automatic tunneling for alphai CLI."""
|
|
2
|
+
|
|
3
|
+
import sys
|
|
4
|
+
import shutil
|
|
5
|
+
import subprocess
|
|
6
|
+
import platform
|
|
7
|
+
import time
|
|
8
|
+
import socket
|
|
9
|
+
import webbrowser
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Optional, List, Tuple, Any
|
|
12
|
+
from rich.console import Console
|
|
13
|
+
from rich.panel import Panel
|
|
14
|
+
from rich.prompt import Confirm
|
|
15
|
+
|
|
16
|
+
from .utils import get_logger
|
|
17
|
+
from . import exceptions
|
|
18
|
+
|
|
19
|
+
logger = get_logger(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class JupyterManager:
|
|
23
|
+
"""Manage Jupyter Lab/Notebook instances with automatic tunneling."""
|
|
24
|
+
|
|
25
|
+
def __init__(self, console: Console):
|
|
26
|
+
"""Initialize the Jupyter manager."""
|
|
27
|
+
self.console = console
|
|
28
|
+
self._jupyter_process = None
|
|
29
|
+
self._cloudflared_process = None
|
|
30
|
+
self._tunnel_data = None
|
|
31
|
+
|
|
32
|
+
def is_jupyter_installed(self, command: str = "jupyter") -> bool:
|
|
33
|
+
"""Check if Jupyter is installed."""
|
|
34
|
+
installed = shutil.which(command) is not None
|
|
35
|
+
if installed:
|
|
36
|
+
logger.debug(f"{command} is installed")
|
|
37
|
+
else:
|
|
38
|
+
logger.warning(f"{command} not found in PATH")
|
|
39
|
+
return installed
|
|
40
|
+
|
|
41
|
+
def check_jupyter_or_exit(self, command: str = "jupyter") -> None:
|
|
42
|
+
"""Check if Jupyter is installed, exit with helpful message if not."""
|
|
43
|
+
if not self.is_jupyter_installed(command):
|
|
44
|
+
logger.error(f"{command} not found")
|
|
45
|
+
self.console.print(f"[red]Error: {command} not found[/red]")
|
|
46
|
+
self.console.print(f"[yellow]Install it with: pip install jupyterlab[/yellow]")
|
|
47
|
+
sys.exit(1)
|
|
48
|
+
|
|
49
|
+
def generate_jupyter_token(self) -> str:
|
|
50
|
+
"""Generate a secure token for Jupyter."""
|
|
51
|
+
import secrets
|
|
52
|
+
token = secrets.token_hex(32)
|
|
53
|
+
logger.debug(f"Generated Jupyter token: {token[:12]}...")
|
|
54
|
+
return token
|
|
55
|
+
|
|
56
|
+
def build_jupyter_command(
|
|
57
|
+
self,
|
|
58
|
+
command: List[str],
|
|
59
|
+
port: int,
|
|
60
|
+
token: str,
|
|
61
|
+
extra_args: List[str],
|
|
62
|
+
allow_remote: bool = False
|
|
63
|
+
) -> List[str]:
|
|
64
|
+
"""Build the full Jupyter command with all arguments.
|
|
65
|
+
|
|
66
|
+
Args:
|
|
67
|
+
command: Base command (e.g., ['jupyter', 'lab'])
|
|
68
|
+
port: Port number
|
|
69
|
+
token: Authentication token
|
|
70
|
+
extra_args: Additional user arguments
|
|
71
|
+
allow_remote: If True, allow remote access (needed for tunnels)
|
|
72
|
+
"""
|
|
73
|
+
full_command = command + [
|
|
74
|
+
f'--port={port}',
|
|
75
|
+
'--no-browser', # We'll open browser to cloud URL instead
|
|
76
|
+
'--ServerApp.allow_origin=*',
|
|
77
|
+
]
|
|
78
|
+
|
|
79
|
+
# Add token (different format for lab vs notebook)
|
|
80
|
+
if 'lab' in command:
|
|
81
|
+
full_command.append(f'--ServerApp.token={token}')
|
|
82
|
+
if allow_remote:
|
|
83
|
+
# Allow remote access for tunnel (secure via token + cloudflare)
|
|
84
|
+
full_command.append('--ServerApp.allow_remote_access=true')
|
|
85
|
+
else:
|
|
86
|
+
full_command.append(f'--NotebookApp.token={token}')
|
|
87
|
+
if allow_remote:
|
|
88
|
+
# Allow remote access for tunnel
|
|
89
|
+
full_command.append('--NotebookApp.allow_remote_access=true')
|
|
90
|
+
|
|
91
|
+
# Add user's extra arguments
|
|
92
|
+
full_command.extend(extra_args)
|
|
93
|
+
|
|
94
|
+
logger.debug(f"Built command: {' '.join(full_command[:3])}... (+ {len(extra_args)} extra args, allow_remote={allow_remote})")
|
|
95
|
+
return full_command
|
|
96
|
+
|
|
97
|
+
def start_jupyter(
|
|
98
|
+
self,
|
|
99
|
+
command: List[str],
|
|
100
|
+
port: int,
|
|
101
|
+
token: str,
|
|
102
|
+
extra_args: List[str] = None,
|
|
103
|
+
allow_remote: bool = False
|
|
104
|
+
) -> subprocess.Popen:
|
|
105
|
+
"""Start Jupyter in background process.
|
|
106
|
+
|
|
107
|
+
Args:
|
|
108
|
+
command: Base command (e.g., ['jupyter', 'lab'])
|
|
109
|
+
port: Port number
|
|
110
|
+
token: Authentication token
|
|
111
|
+
extra_args: Additional user arguments
|
|
112
|
+
allow_remote: If True, allow remote access (needed for tunnels)
|
|
113
|
+
"""
|
|
114
|
+
extra_args = extra_args or []
|
|
115
|
+
full_command = self.build_jupyter_command(command, port, token, extra_args, allow_remote)
|
|
116
|
+
|
|
117
|
+
logger.info(f"Starting {' '.join(command)} on port {port} (allow_remote={allow_remote})")
|
|
118
|
+
self.console.print(f"[yellow]Starting {' '.join(command)}...[/yellow]")
|
|
119
|
+
|
|
120
|
+
try:
|
|
121
|
+
process = subprocess.Popen(
|
|
122
|
+
full_command,
|
|
123
|
+
stdout=subprocess.PIPE,
|
|
124
|
+
stderr=subprocess.PIPE,
|
|
125
|
+
text=True
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
self._jupyter_process = process
|
|
129
|
+
logger.info(f"Jupyter process started with PID {process.pid}")
|
|
130
|
+
return process
|
|
131
|
+
|
|
132
|
+
except Exception as e:
|
|
133
|
+
logger.error(f"Failed to start Jupyter: {e}", exc_info=True)
|
|
134
|
+
self.console.print(f"[red]Failed to start Jupyter: {e}[/red]")
|
|
135
|
+
raise exceptions.JupyterError(f"Failed to start Jupyter: {e}")
|
|
136
|
+
|
|
137
|
+
def is_port_available(self, port: int) -> bool:
|
|
138
|
+
"""Check if a port is available for use."""
|
|
139
|
+
try:
|
|
140
|
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
|
141
|
+
s.settimeout(1)
|
|
142
|
+
s.bind(('localhost', port))
|
|
143
|
+
return True
|
|
144
|
+
except (socket.error, OSError):
|
|
145
|
+
return False
|
|
146
|
+
|
|
147
|
+
def find_available_port(self, start_port: int, max_attempts: int = 10) -> int:
|
|
148
|
+
"""Find an available port starting from start_port.
|
|
149
|
+
|
|
150
|
+
Args:
|
|
151
|
+
start_port: Port to start searching from
|
|
152
|
+
max_attempts: Maximum number of ports to try
|
|
153
|
+
|
|
154
|
+
Returns:
|
|
155
|
+
An available port number
|
|
156
|
+
|
|
157
|
+
Raises:
|
|
158
|
+
JupyterError: If no available port found
|
|
159
|
+
"""
|
|
160
|
+
for offset in range(max_attempts):
|
|
161
|
+
port = start_port + offset
|
|
162
|
+
if self.is_port_available(port):
|
|
163
|
+
if offset > 0:
|
|
164
|
+
logger.info(f"Port {start_port} was in use, using port {port} instead")
|
|
165
|
+
self.console.print(f"[yellow]Port {start_port} is in use, using port {port}[/yellow]")
|
|
166
|
+
return port
|
|
167
|
+
|
|
168
|
+
raise exceptions.JupyterError(
|
|
169
|
+
f"Could not find available port (tried {start_port} to {start_port + max_attempts - 1})"
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
def wait_for_jupyter_ready(self, port: int, timeout: int = 30) -> bool:
|
|
173
|
+
"""Wait for Jupyter to be ready on the specified port."""
|
|
174
|
+
logger.debug(f"Waiting for Jupyter on port {port} (timeout: {timeout}s)")
|
|
175
|
+
|
|
176
|
+
start_time = time.time()
|
|
177
|
+
while time.time() - start_time < timeout:
|
|
178
|
+
try:
|
|
179
|
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
|
180
|
+
s.settimeout(1)
|
|
181
|
+
s.connect(('localhost', port))
|
|
182
|
+
logger.info(f"Jupyter is ready on port {port}")
|
|
183
|
+
return True
|
|
184
|
+
except (socket.error, socket.timeout):
|
|
185
|
+
time.sleep(0.5)
|
|
186
|
+
|
|
187
|
+
logger.error(f"Jupyter failed to start within {timeout}s")
|
|
188
|
+
return False
|
|
189
|
+
|
|
190
|
+
def _get_cloudflared_path(self) -> Path:
|
|
191
|
+
"""Get the path where cloudflared should be installed."""
|
|
192
|
+
return Path.home() / ".alphai" / "bin" / "cloudflared"
|
|
193
|
+
|
|
194
|
+
def is_cloudflared_installed(self) -> bool:
|
|
195
|
+
"""Check if cloudflared is installed (either in PATH or in ~/.alphai/bin)."""
|
|
196
|
+
# Check system PATH first
|
|
197
|
+
if shutil.which("cloudflared") is not None:
|
|
198
|
+
logger.debug("cloudflared found in system PATH")
|
|
199
|
+
return True
|
|
200
|
+
|
|
201
|
+
# Check local installation
|
|
202
|
+
local_path = self._get_cloudflared_path()
|
|
203
|
+
if local_path.exists() and local_path.is_file():
|
|
204
|
+
logger.debug(f"cloudflared found at {local_path}")
|
|
205
|
+
return True
|
|
206
|
+
|
|
207
|
+
logger.debug("cloudflared not found")
|
|
208
|
+
return False
|
|
209
|
+
|
|
210
|
+
def _get_cloudflared_binary(self) -> str:
|
|
211
|
+
"""Get the cloudflared binary path (system or local)."""
|
|
212
|
+
# Prefer system installation
|
|
213
|
+
system_binary = shutil.which("cloudflared")
|
|
214
|
+
if system_binary:
|
|
215
|
+
return system_binary
|
|
216
|
+
|
|
217
|
+
# Fall back to local installation
|
|
218
|
+
local_path = self._get_cloudflared_path()
|
|
219
|
+
if local_path.exists():
|
|
220
|
+
return str(local_path)
|
|
221
|
+
|
|
222
|
+
return "cloudflared" # Fallback
|
|
223
|
+
|
|
224
|
+
def install_cloudflared(self) -> bool:
|
|
225
|
+
"""Install cloudflared to user directory (no sudo required)."""
|
|
226
|
+
logger.info("Installing connector to user directory")
|
|
227
|
+
self.console.print("[yellow]Installing connector...[/yellow]")
|
|
228
|
+
|
|
229
|
+
system = platform.system().lower()
|
|
230
|
+
bin_dir = Path.home() / ".alphai" / "bin"
|
|
231
|
+
bin_dir.mkdir(parents=True, exist_ok=True)
|
|
232
|
+
|
|
233
|
+
cloudflared_path = bin_dir / "cloudflared"
|
|
234
|
+
|
|
235
|
+
try:
|
|
236
|
+
# Determine architecture
|
|
237
|
+
machine = platform.machine().lower()
|
|
238
|
+
if machine in ["x86_64", "amd64"]:
|
|
239
|
+
arch = "amd64"
|
|
240
|
+
elif machine in ["aarch64", "arm64"]:
|
|
241
|
+
arch = "arm64"
|
|
242
|
+
elif machine.startswith("arm"):
|
|
243
|
+
arch = "arm"
|
|
244
|
+
else:
|
|
245
|
+
arch = "amd64" # Default fallback
|
|
246
|
+
|
|
247
|
+
# Build download URL based on OS
|
|
248
|
+
if system == "linux":
|
|
249
|
+
url = f"https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-{arch}"
|
|
250
|
+
download_cmd = ["wget", "-q", "-O", str(cloudflared_path), url]
|
|
251
|
+
|
|
252
|
+
elif system == "darwin": # macOS
|
|
253
|
+
url = f"https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-darwin-{arch}"
|
|
254
|
+
download_cmd = ["curl", "-sL", "-o", str(cloudflared_path), url]
|
|
255
|
+
|
|
256
|
+
elif system == "windows":
|
|
257
|
+
self.console.print("[yellow]Windows requires manual connector setup[/yellow]")
|
|
258
|
+
self.console.print("[dim]Visit: https://github.com/cloudflare/cloudflared/releases[/dim]")
|
|
259
|
+
self.console.print(f"[dim]Place the binary in: {bin_dir}[/dim]")
|
|
260
|
+
return False
|
|
261
|
+
|
|
262
|
+
else:
|
|
263
|
+
logger.error(f"Unsupported operating system: {system}")
|
|
264
|
+
self.console.print(f"[red]Unsupported operating system: {system}[/red]")
|
|
265
|
+
return False
|
|
266
|
+
|
|
267
|
+
# Download the binary
|
|
268
|
+
logger.debug(f"Downloading cloudflared from {url}")
|
|
269
|
+
result = subprocess.run(download_cmd, capture_output=True, text=True)
|
|
270
|
+
if result.returncode != 0:
|
|
271
|
+
logger.error(f"Download failed: {result.stderr}")
|
|
272
|
+
self.console.print(f"[red]Download failed: {result.stderr}[/red]")
|
|
273
|
+
return False
|
|
274
|
+
|
|
275
|
+
# Make it executable
|
|
276
|
+
cloudflared_path.chmod(0o755)
|
|
277
|
+
|
|
278
|
+
logger.info(f"Connector installed successfully to {cloudflared_path}")
|
|
279
|
+
self.console.print("[green]✓ Connector installed[/green]")
|
|
280
|
+
return True
|
|
281
|
+
|
|
282
|
+
except Exception as e:
|
|
283
|
+
logger.error(f"Failed to install connector: {e}", exc_info=True)
|
|
284
|
+
self.console.print(f"[red]Error installing connector: {e}[/red]")
|
|
285
|
+
return False
|
|
286
|
+
|
|
287
|
+
def ensure_cloudflared(self) -> bool:
|
|
288
|
+
"""Ensure connector is installed, offer to install if not."""
|
|
289
|
+
if self.is_cloudflared_installed():
|
|
290
|
+
self.console.print("[green]✓ Connector ready[/green]")
|
|
291
|
+
return True
|
|
292
|
+
|
|
293
|
+
logger.info("Connector not found, prompting user to install")
|
|
294
|
+
self.console.print("[yellow]Connector not found[/yellow]")
|
|
295
|
+
|
|
296
|
+
if not Confirm.ask("Install connector now?", default=True):
|
|
297
|
+
logger.info("User declined connector installation")
|
|
298
|
+
return False
|
|
299
|
+
|
|
300
|
+
return self.install_cloudflared()
|
|
301
|
+
|
|
302
|
+
def setup_cloudflared_tunnel(self, token: str) -> bool:
|
|
303
|
+
"""Start cloudflared connector as a subprocess (no sudo required)."""
|
|
304
|
+
logger.info("Starting cloudflared connector process")
|
|
305
|
+
self.console.print("[yellow]Establishing connection...[/yellow]")
|
|
306
|
+
|
|
307
|
+
try:
|
|
308
|
+
cloudflared_bin = self._get_cloudflared_binary()
|
|
309
|
+
|
|
310
|
+
# Run cloudflared tunnel with the token
|
|
311
|
+
# This doesn't require sudo and runs as a regular process
|
|
312
|
+
self._cloudflared_process = subprocess.Popen(
|
|
313
|
+
[cloudflared_bin, "tunnel", "run", "--token", token],
|
|
314
|
+
stdout=subprocess.PIPE,
|
|
315
|
+
stderr=subprocess.PIPE,
|
|
316
|
+
text=True
|
|
317
|
+
)
|
|
318
|
+
|
|
319
|
+
# Give it a moment to start
|
|
320
|
+
time.sleep(2)
|
|
321
|
+
|
|
322
|
+
# Check if it's still running
|
|
323
|
+
if self._cloudflared_process.poll() is not None:
|
|
324
|
+
# Process exited
|
|
325
|
+
_, stderr = self._cloudflared_process.communicate(timeout=1)
|
|
326
|
+
logger.error(f"Connection failed to start: {stderr}")
|
|
327
|
+
self.console.print(f"[red]Connection failed: {stderr}[/red]")
|
|
328
|
+
return False
|
|
329
|
+
|
|
330
|
+
logger.info(f"Cloudflared connector started (PID: {self._cloudflared_process.pid})")
|
|
331
|
+
self.console.print("[green]✓ Connection established[/green]")
|
|
332
|
+
return True
|
|
333
|
+
|
|
334
|
+
except Exception as e:
|
|
335
|
+
logger.error(f"Error starting cloudflared: {e}", exc_info=True)
|
|
336
|
+
self.console.print(f"[red]Error establishing connection: {e}[/red]")
|
|
337
|
+
return False
|
|
338
|
+
|
|
339
|
+
def cleanup_cloudflared_tunnel(self) -> bool:
|
|
340
|
+
"""Stop cloudflared tunnel process."""
|
|
341
|
+
logger.info("Stopping cloudflared tunnel")
|
|
342
|
+
|
|
343
|
+
if not self._cloudflared_process:
|
|
344
|
+
logger.debug("No cloudflared process to stop")
|
|
345
|
+
return True
|
|
346
|
+
|
|
347
|
+
try:
|
|
348
|
+
# Terminate the process gracefully
|
|
349
|
+
logger.debug(f"Terminating cloudflared process (PID: {self._cloudflared_process.pid})")
|
|
350
|
+
self._cloudflared_process.terminate()
|
|
351
|
+
|
|
352
|
+
# Wait for graceful shutdown
|
|
353
|
+
try:
|
|
354
|
+
self._cloudflared_process.wait(timeout=5)
|
|
355
|
+
logger.info("Cloudflared process terminated gracefully")
|
|
356
|
+
return True
|
|
357
|
+
except subprocess.TimeoutExpired:
|
|
358
|
+
# Force kill if it doesn't stop
|
|
359
|
+
logger.warning("Cloudflared didn't stop gracefully, forcing...")
|
|
360
|
+
self._cloudflared_process.kill()
|
|
361
|
+
self._cloudflared_process.wait(timeout=2)
|
|
362
|
+
logger.info("Cloudflared process force-killed")
|
|
363
|
+
return True
|
|
364
|
+
|
|
365
|
+
except Exception as e:
|
|
366
|
+
logger.warning(f"Error stopping cloudflared: {e}")
|
|
367
|
+
# Try force kill as last resort
|
|
368
|
+
try:
|
|
369
|
+
if self._cloudflared_process and self._cloudflared_process.poll() is None:
|
|
370
|
+
self._cloudflared_process.kill()
|
|
371
|
+
except Exception:
|
|
372
|
+
pass
|
|
373
|
+
return True # Don't fail cleanup if cloudflared cleanup has issues
|
|
374
|
+
|
|
375
|
+
def display_jupyter_info(
|
|
376
|
+
self,
|
|
377
|
+
jupyter_port: int,
|
|
378
|
+
token: str,
|
|
379
|
+
tunnel_data: Optional[Any] = None,
|
|
380
|
+
org: Optional[str] = None,
|
|
381
|
+
project: Optional[str] = None,
|
|
382
|
+
api_url: str = "https://www.runalph.ai",
|
|
383
|
+
app_port: Optional[int] = None
|
|
384
|
+
) -> None:
|
|
385
|
+
"""Display access information for Jupyter and optionally app."""
|
|
386
|
+
summary = []
|
|
387
|
+
|
|
388
|
+
summary.append("[bold]🎓 Jupyter Lab Access Information[/bold]")
|
|
389
|
+
summary.append("")
|
|
390
|
+
|
|
391
|
+
# Local access
|
|
392
|
+
summary.append("[bold blue]Local URL:[/bold blue]")
|
|
393
|
+
summary.append(f" • Jupyter: http://localhost:{jupyter_port}?token={token}")
|
|
394
|
+
if app_port:
|
|
395
|
+
summary.append(f" • App: http://localhost:{app_port}")
|
|
396
|
+
summary.append("")
|
|
397
|
+
|
|
398
|
+
# Cloud access
|
|
399
|
+
if tunnel_data:
|
|
400
|
+
summary.append("[bold green]Public URL:[/bold green]")
|
|
401
|
+
summary.append(f" • Jupyter: {tunnel_data.jupyter_url}?token={token}")
|
|
402
|
+
if app_port and hasattr(tunnel_data, 'app_url'):
|
|
403
|
+
summary.append(f" • App: {tunnel_data.app_url}")
|
|
404
|
+
summary.append("")
|
|
405
|
+
|
|
406
|
+
# Project URL - use slug from project_data if available
|
|
407
|
+
frontend_url = api_url.replace("/api", "").rstrip("/")
|
|
408
|
+
project_slug = project
|
|
409
|
+
if tunnel_data.project_data:
|
|
410
|
+
# Prefer slug from API response
|
|
411
|
+
if hasattr(tunnel_data.project_data, 'slug') and tunnel_data.project_data.slug:
|
|
412
|
+
project_slug = tunnel_data.project_data.slug
|
|
413
|
+
elif hasattr(tunnel_data.project_data, 'name') and tunnel_data.project_data.name:
|
|
414
|
+
# Fallback to name if no slug
|
|
415
|
+
project_slug = tunnel_data.project_data.name
|
|
416
|
+
|
|
417
|
+
project_url = f"{frontend_url}/{org}/{project_slug}"
|
|
418
|
+
summary.append(f"[bold cyan]Dashboard:[/bold cyan]")
|
|
419
|
+
summary.append(f" {project_url}")
|
|
420
|
+
summary.append("")
|
|
421
|
+
|
|
422
|
+
# Auto-open browser
|
|
423
|
+
try:
|
|
424
|
+
webbrowser.open(project_url)
|
|
425
|
+
logger.info(f"Opened browser to {project_url}")
|
|
426
|
+
summary.append("[dim]→ Browser opened to dashboard[/dim]")
|
|
427
|
+
except Exception as e:
|
|
428
|
+
logger.warning(f"Could not open browser: {e}")
|
|
429
|
+
else:
|
|
430
|
+
summary.append("[dim]Local only - no cloud connection[/dim]")
|
|
431
|
+
summary.append("[dim]Run 'alphai login' to enable cloud access[/dim]")
|
|
432
|
+
|
|
433
|
+
summary.append("")
|
|
434
|
+
summary.append(f"[bold yellow]Jupyter Token:[/bold yellow] {token}")
|
|
435
|
+
|
|
436
|
+
panel = Panel(
|
|
437
|
+
"\n".join(summary),
|
|
438
|
+
title="🚀 Jupyter Running",
|
|
439
|
+
border_style="green"
|
|
440
|
+
)
|
|
441
|
+
self.console.print(panel)
|
|
442
|
+
|
|
443
|
+
def monitor_jupyter(self, show_logs: bool = True) -> None:
|
|
444
|
+
"""Monitor Jupyter process until interrupted.
|
|
445
|
+
|
|
446
|
+
Args:
|
|
447
|
+
show_logs: If True, stream Jupyter output to console (default: True)
|
|
448
|
+
"""
|
|
449
|
+
if not self._jupyter_process:
|
|
450
|
+
logger.error("No Jupyter process to monitor")
|
|
451
|
+
return
|
|
452
|
+
|
|
453
|
+
self.console.print("\n[bold green]🎯 Jupyter is running! Press Ctrl+C to stop.[/bold green]")
|
|
454
|
+
|
|
455
|
+
if show_logs:
|
|
456
|
+
self.console.print("[dim]Streaming Jupyter logs below...[/dim]\n")
|
|
457
|
+
|
|
458
|
+
try:
|
|
459
|
+
if show_logs:
|
|
460
|
+
# Stream logs in real-time
|
|
461
|
+
logger.debug("Streaming Jupyter logs")
|
|
462
|
+
import threading
|
|
463
|
+
|
|
464
|
+
def stream_output(pipe, prefix=""):
|
|
465
|
+
"""Stream output from pipe."""
|
|
466
|
+
try:
|
|
467
|
+
for line in iter(pipe.readline, ''):
|
|
468
|
+
if line:
|
|
469
|
+
# Print without rich formatting to preserve Jupyter's colors
|
|
470
|
+
print(f"{prefix}{line}", end='')
|
|
471
|
+
except Exception:
|
|
472
|
+
pass
|
|
473
|
+
|
|
474
|
+
# Stream both stdout and stderr
|
|
475
|
+
stdout_thread = threading.Thread(
|
|
476
|
+
target=stream_output,
|
|
477
|
+
args=(self._jupyter_process.stdout, ""),
|
|
478
|
+
daemon=True
|
|
479
|
+
)
|
|
480
|
+
stderr_thread = threading.Thread(
|
|
481
|
+
target=stream_output,
|
|
482
|
+
args=(self._jupyter_process.stderr, ""),
|
|
483
|
+
daemon=True
|
|
484
|
+
)
|
|
485
|
+
|
|
486
|
+
stdout_thread.start()
|
|
487
|
+
stderr_thread.start()
|
|
488
|
+
|
|
489
|
+
# Wait for process to exit
|
|
490
|
+
self._jupyter_process.wait()
|
|
491
|
+
else:
|
|
492
|
+
# Just wait for process silently
|
|
493
|
+
self._jupyter_process.wait()
|
|
494
|
+
|
|
495
|
+
except KeyboardInterrupt:
|
|
496
|
+
logger.info("Received interrupt signal, stopping Jupyter")
|
|
497
|
+
self.console.print("\n[yellow]Stopping Jupyter...[/yellow]")
|
|
498
|
+
|
|
499
|
+
def cleanup(
|
|
500
|
+
self,
|
|
501
|
+
client: Optional[Any] = None,
|
|
502
|
+
tunnel_id: Optional[str] = None,
|
|
503
|
+
project_id: Optional[str] = None,
|
|
504
|
+
force: bool = False
|
|
505
|
+
) -> bool:
|
|
506
|
+
"""Comprehensive cleanup of Jupyter process and tunnel resources.
|
|
507
|
+
|
|
508
|
+
Args:
|
|
509
|
+
client: AlphAIClient instance for API cleanup
|
|
510
|
+
tunnel_id: Tunnel ID to delete (optional, uses stored tunnel_data if not provided)
|
|
511
|
+
project_id: Project ID to delete (optional)
|
|
512
|
+
force: If True, force cleanup even on errors
|
|
513
|
+
|
|
514
|
+
Returns:
|
|
515
|
+
bool: True if all cleanup successful, False if any warnings
|
|
516
|
+
"""
|
|
517
|
+
logger.info("Starting comprehensive cleanup")
|
|
518
|
+
success = True
|
|
519
|
+
|
|
520
|
+
# Step 1: Stop Jupyter process
|
|
521
|
+
if self._jupyter_process:
|
|
522
|
+
try:
|
|
523
|
+
logger.debug(f"Stopping Jupyter process (PID: {self._jupyter_process.pid})")
|
|
524
|
+
# Use SIGKILL directly since SIGTERM causes Jupyter to prompt for
|
|
525
|
+
# confirmation which hangs because stdin isn't connected
|
|
526
|
+
self._jupyter_process.kill()
|
|
527
|
+
self._jupyter_process.wait(timeout=5)
|
|
528
|
+
logger.info("Jupyter process stopped")
|
|
529
|
+
self.console.print("[green]✓ Jupyter stopped[/green]")
|
|
530
|
+
|
|
531
|
+
except Exception as e:
|
|
532
|
+
logger.error(f"Error stopping Jupyter: {e}", exc_info=True)
|
|
533
|
+
self.console.print(f"[yellow]⚠ Error stopping Jupyter: {e}[/yellow]")
|
|
534
|
+
success = False
|
|
535
|
+
|
|
536
|
+
# Step 2: Cleanup cloudflared connection service
|
|
537
|
+
if not self.cleanup_cloudflared_tunnel():
|
|
538
|
+
success = False
|
|
539
|
+
self.console.print("[yellow]⚠ Connection cleanup had issues[/yellow]")
|
|
540
|
+
else:
|
|
541
|
+
self.console.print("[green]✓ Connection closed[/green]")
|
|
542
|
+
|
|
543
|
+
# Step 3: Delete tunnel and project from API
|
|
544
|
+
if client:
|
|
545
|
+
# Use provided tunnel_id or fall back to stored tunnel_data
|
|
546
|
+
actual_tunnel_id = tunnel_id or (self._tunnel_data.id if self._tunnel_data else None)
|
|
547
|
+
|
|
548
|
+
if actual_tunnel_id or project_id:
|
|
549
|
+
try:
|
|
550
|
+
logger.info(f"Deleting tunnel and project from API (tunnel={actual_tunnel_id}, project={project_id})")
|
|
551
|
+
if not client.cleanup_tunnel_and_project(
|
|
552
|
+
tunnel_id=actual_tunnel_id,
|
|
553
|
+
project_id=project_id,
|
|
554
|
+
force=force
|
|
555
|
+
):
|
|
556
|
+
logger.warning("API cleanup had issues")
|
|
557
|
+
success = False
|
|
558
|
+
except Exception as e:
|
|
559
|
+
logger.error(f"Error during API cleanup: {e}", exc_info=True)
|
|
560
|
+
self.console.print(f"[yellow]⚠ Error during API cleanup: {e}[/yellow]")
|
|
561
|
+
success = False
|
|
562
|
+
|
|
563
|
+
# Summary
|
|
564
|
+
if success:
|
|
565
|
+
self.console.print("\n[bold green]✅ Cleanup completed successfully![/bold green]")
|
|
566
|
+
logger.info("Cleanup completed successfully")
|
|
567
|
+
else:
|
|
568
|
+
self.console.print("\n[bold yellow]⚠ Cleanup completed with warnings[/bold yellow]")
|
|
569
|
+
self.console.print("[dim]Check logs for details: ~/.alphai/logs/alphai.log[/dim]")
|
|
570
|
+
logger.warning("Cleanup completed with warnings")
|
|
571
|
+
|
|
572
|
+
return success
|
|
573
|
+
|
|
574
|
+
def set_tunnel_data(self, tunnel_data: Any) -> None:
|
|
575
|
+
"""Store tunnel data for cleanup."""
|
|
576
|
+
self._tunnel_data = tunnel_data
|
|
577
|
+
|