alphai 0.0.7__py3-none-any.whl → 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alphai/__init__.py +8 -4
- alphai/auth.py +362 -0
- alphai/cli.py +1015 -0
- alphai/client.py +400 -0
- alphai/config.py +88 -0
- alphai/docker.py +764 -0
- alphai/utils.py +192 -0
- alphai-0.1.0.dist-info/METADATA +394 -0
- alphai-0.1.0.dist-info/RECORD +12 -0
- {alphai-0.0.7.dist-info → alphai-0.1.0.dist-info}/WHEEL +2 -1
- alphai-0.1.0.dist-info/entry_points.txt +2 -0
- alphai-0.1.0.dist-info/top_level.txt +1 -0
- alphai/alphai.py +0 -786
- alphai/api/client.py +0 -0
- alphai/benchmarking/benchmarker.py +0 -37
- alphai/client/__init__.py +0 -0
- alphai/client/client.py +0 -382
- alphai/profilers/__init__.py +0 -0
- alphai/profilers/configs_base.py +0 -7
- alphai/profilers/jax.py +0 -37
- alphai/profilers/pytorch.py +0 -83
- alphai/profilers/pytorch_utils.py +0 -419
- alphai/util.py +0 -19
- alphai-0.0.7.dist-info/LICENSE +0 -201
- alphai-0.0.7.dist-info/METADATA +0 -125
- alphai-0.0.7.dist-info/RECORD +0 -16
alphai/cli.py
ADDED
|
@@ -0,0 +1,1015 @@
|
|
|
1
|
+
"""Main CLI module for alphai."""
|
|
2
|
+
|
|
3
|
+
import sys
|
|
4
|
+
import time
|
|
5
|
+
import signal
|
|
6
|
+
import atexit
|
|
7
|
+
import webbrowser
|
|
8
|
+
from typing import Optional
|
|
9
|
+
import click
|
|
10
|
+
from rich.console import Console
|
|
11
|
+
from rich.prompt import Prompt, Confirm
|
|
12
|
+
from rich.progress import Progress, SpinnerColumn, TextColumn
|
|
13
|
+
from rich.panel import Panel
|
|
14
|
+
from rich.text import Text
|
|
15
|
+
import subprocess
|
|
16
|
+
import urllib.request
|
|
17
|
+
import urllib.error
|
|
18
|
+
import questionary
|
|
19
|
+
|
|
20
|
+
from .client import AlphAIClient
|
|
21
|
+
from .config import Config
|
|
22
|
+
from .auth import AuthManager
|
|
23
|
+
from .docker import DockerManager
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
console = Console()
|
|
27
|
+
|
|
28
|
+
# Global cleanup state
|
|
29
|
+
_cleanup_state = {
|
|
30
|
+
'container_id': None,
|
|
31
|
+
'tunnel_id': None,
|
|
32
|
+
'project_id': None,
|
|
33
|
+
'client': None,
|
|
34
|
+
'docker_manager': None,
|
|
35
|
+
'cleanup_done': False
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def _get_frontend_url(api_url: str) -> str:
|
|
40
|
+
"""Convert API URL to frontend URL for browser opening."""
|
|
41
|
+
if api_url.startswith("http://localhost") or api_url.startswith("https://localhost"):
|
|
42
|
+
# For local development, assume frontend is on same host without /api
|
|
43
|
+
return api_url.replace("/api", "").rstrip("/")
|
|
44
|
+
elif "runalph.ai" in api_url:
|
|
45
|
+
# For production, convert from runalph.ai/api to .ai
|
|
46
|
+
if "/api" in api_url:
|
|
47
|
+
return api_url.replace("runalph.ai/api", "runalph.ai").rstrip("/")
|
|
48
|
+
else:
|
|
49
|
+
return api_url.replace("runalph.ai", "runalph.ai").rstrip("/")
|
|
50
|
+
else:
|
|
51
|
+
# For other domains, just remove /api suffix
|
|
52
|
+
return api_url.replace("/api", "").rstrip("/")
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def _cleanup_handler(signum=None, frame=None):
|
|
56
|
+
"""Handle cleanup when script is interrupted."""
|
|
57
|
+
# Check if cleanup has already been done
|
|
58
|
+
if _cleanup_state['cleanup_done']:
|
|
59
|
+
return
|
|
60
|
+
|
|
61
|
+
# Check if there's anything to clean up
|
|
62
|
+
if not any(v for k, v in _cleanup_state.items() if k != 'cleanup_done'):
|
|
63
|
+
if signum is not None: # Called by signal handler
|
|
64
|
+
sys.exit(0)
|
|
65
|
+
return
|
|
66
|
+
|
|
67
|
+
console.print("\n[yellow]🔄 Cleaning up resources...[/yellow]")
|
|
68
|
+
|
|
69
|
+
try:
|
|
70
|
+
# Clean up container and cloudflared service
|
|
71
|
+
if _cleanup_state['container_id'] and _cleanup_state['docker_manager']:
|
|
72
|
+
_cleanup_state['docker_manager'].cleanup_container_and_tunnel(
|
|
73
|
+
container_id=_cleanup_state['container_id'],
|
|
74
|
+
tunnel_id=_cleanup_state['tunnel_id'],
|
|
75
|
+
project_id=_cleanup_state['project_id'],
|
|
76
|
+
force=True
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
# Clean up tunnel and project
|
|
80
|
+
if _cleanup_state['client'] and (_cleanup_state['tunnel_id'] or _cleanup_state['project_id']):
|
|
81
|
+
_cleanup_state['client'].cleanup_tunnel_and_project(
|
|
82
|
+
tunnel_id=_cleanup_state['tunnel_id'],
|
|
83
|
+
project_id=_cleanup_state['project_id'],
|
|
84
|
+
force=True
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
console.print("[green]✓ Cleanup completed[/green]")
|
|
88
|
+
|
|
89
|
+
except Exception as e:
|
|
90
|
+
console.print(f"[red]Error during cleanup: {e}[/red]")
|
|
91
|
+
|
|
92
|
+
# Mark cleanup as done and reset cleanup state
|
|
93
|
+
_cleanup_state.update({
|
|
94
|
+
'container_id': None,
|
|
95
|
+
'tunnel_id': None,
|
|
96
|
+
'project_id': None,
|
|
97
|
+
'client': None,
|
|
98
|
+
'docker_manager': None,
|
|
99
|
+
'cleanup_done': True
|
|
100
|
+
})
|
|
101
|
+
|
|
102
|
+
# Only exit if called by signal handler (not by atexit)
|
|
103
|
+
if signum is not None:
|
|
104
|
+
sys.exit(0)
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
# Register cleanup handler
|
|
108
|
+
signal.signal(signal.SIGINT, _cleanup_handler)
|
|
109
|
+
signal.signal(signal.SIGTERM, _cleanup_handler)
|
|
110
|
+
atexit.register(_cleanup_handler)
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
@click.group(invoke_without_command=True)
|
|
114
|
+
@click.option('--debug', is_flag=True, help='Enable debug mode')
|
|
115
|
+
@click.option('--version', is_flag=True, help='Show version information')
|
|
116
|
+
@click.pass_context
|
|
117
|
+
def main(ctx: click.Context, debug: bool, version: bool) -> None:
|
|
118
|
+
"""alphai - A CLI tool for the runalph.ai platform."""
|
|
119
|
+
|
|
120
|
+
if version:
|
|
121
|
+
from . import __version__
|
|
122
|
+
console.print(f"alphai version {__version__}")
|
|
123
|
+
return
|
|
124
|
+
|
|
125
|
+
# Set up context
|
|
126
|
+
ctx.ensure_object(dict)
|
|
127
|
+
config = Config.load()
|
|
128
|
+
|
|
129
|
+
if debug:
|
|
130
|
+
config.debug = True
|
|
131
|
+
config.save()
|
|
132
|
+
|
|
133
|
+
ctx.obj['config'] = config
|
|
134
|
+
ctx.obj['client'] = AlphAIClient(config)
|
|
135
|
+
|
|
136
|
+
# If no command is provided, show status
|
|
137
|
+
if ctx.invoked_subcommand is None:
|
|
138
|
+
ctx.obj['client'].display_status()
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
@main.command()
|
|
142
|
+
@click.option('--token', help='Bearer token for authentication')
|
|
143
|
+
@click.option('--api-url', help='API base URL (optional)')
|
|
144
|
+
@click.option('--browser', is_flag=True, help='Use browser-based authentication')
|
|
145
|
+
@click.option('--force', is_flag=True, help='Force re-authentication even if already logged in')
|
|
146
|
+
@click.pass_context
|
|
147
|
+
def login(ctx: click.Context, token: Optional[str], api_url: Optional[str], browser: bool, force: bool) -> None:
|
|
148
|
+
"""Authenticate with the runalph.ai API.
|
|
149
|
+
|
|
150
|
+
If you're already authenticated, this command will validate your existing
|
|
151
|
+
credentials and exit. Use --force to re-authenticate."""
|
|
152
|
+
config: Config = ctx.obj['config']
|
|
153
|
+
|
|
154
|
+
if api_url:
|
|
155
|
+
config.api_url = api_url
|
|
156
|
+
|
|
157
|
+
auth_manager = AuthManager(config)
|
|
158
|
+
|
|
159
|
+
# Check if already authenticated (unless force is used or token is provided)
|
|
160
|
+
if not force and not token:
|
|
161
|
+
if auth_manager.check_existing_authentication():
|
|
162
|
+
console.print("[green]✓ You are already logged in![/green]")
|
|
163
|
+
console.print("[dim]Use 'alphai login --force' to re-authenticate or 'alphai status' to view details[/dim]")
|
|
164
|
+
return
|
|
165
|
+
|
|
166
|
+
if token:
|
|
167
|
+
# Use provided token
|
|
168
|
+
success = auth_manager.login_with_token(token)
|
|
169
|
+
elif browser:
|
|
170
|
+
# Use browser login
|
|
171
|
+
success = auth_manager.browser_login()
|
|
172
|
+
else:
|
|
173
|
+
# Interactive login (will offer browser as default option)
|
|
174
|
+
success = auth_manager.interactive_login()
|
|
175
|
+
|
|
176
|
+
if success:
|
|
177
|
+
config.save()
|
|
178
|
+
console.print("[green]✓ Successfully logged in![/green]")
|
|
179
|
+
|
|
180
|
+
# Test the connection
|
|
181
|
+
client = AlphAIClient(config)
|
|
182
|
+
if client.test_connection():
|
|
183
|
+
console.print("[green]✓ Connection to API verified[/green]")
|
|
184
|
+
else:
|
|
185
|
+
console.print("[yellow]⚠ Warning: Could not verify API connection[/yellow]")
|
|
186
|
+
else:
|
|
187
|
+
console.print("[red]✗ Login failed[/red]")
|
|
188
|
+
sys.exit(1)
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
@main.command()
|
|
192
|
+
@click.pass_context
|
|
193
|
+
def logout(ctx: click.Context) -> None:
|
|
194
|
+
"""Log out and clear authentication credentials."""
|
|
195
|
+
config: Config = ctx.obj['config']
|
|
196
|
+
|
|
197
|
+
if not config.bearer_token:
|
|
198
|
+
console.print("[yellow]Already logged out[/yellow]")
|
|
199
|
+
return
|
|
200
|
+
|
|
201
|
+
if Confirm.ask("Are you sure you want to log out?"):
|
|
202
|
+
config.clear_bearer_token()
|
|
203
|
+
config.current_org = None
|
|
204
|
+
config.current_project = None
|
|
205
|
+
config.save()
|
|
206
|
+
console.print("[green]✓ Successfully logged out[/green]")
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
@main.command()
|
|
210
|
+
@click.pass_context
|
|
211
|
+
def status(ctx: click.Context) -> None:
|
|
212
|
+
"""Show current configuration and authentication status."""
|
|
213
|
+
client: AlphAIClient = ctx.obj['client']
|
|
214
|
+
client.display_status()
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
@main.group()
|
|
218
|
+
@click.pass_context
|
|
219
|
+
def orgs(ctx: click.Context) -> None:
|
|
220
|
+
"""Manage organizations."""
|
|
221
|
+
pass
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
@orgs.command('list')
|
|
225
|
+
@click.pass_context
|
|
226
|
+
def orgs_list(ctx: click.Context) -> None:
|
|
227
|
+
"""List all organizations."""
|
|
228
|
+
client: AlphAIClient = ctx.obj['client']
|
|
229
|
+
|
|
230
|
+
with Progress(
|
|
231
|
+
SpinnerColumn(),
|
|
232
|
+
TextColumn("[progress.description]{task.description}"),
|
|
233
|
+
console=console
|
|
234
|
+
) as progress:
|
|
235
|
+
task = progress.add_task("Fetching organizations...", total=None)
|
|
236
|
+
orgs = client.get_organizations()
|
|
237
|
+
progress.update(task, completed=1)
|
|
238
|
+
|
|
239
|
+
client.display_organizations(orgs)
|
|
240
|
+
|
|
241
|
+
|
|
242
|
+
@orgs.command('create')
|
|
243
|
+
@click.option('--name', required=True, help='Organization name')
|
|
244
|
+
@click.option('--description', help='Organization description')
|
|
245
|
+
@click.pass_context
|
|
246
|
+
def orgs_create(ctx: click.Context, name: str, description: Optional[str]) -> None:
|
|
247
|
+
"""Create a new organization."""
|
|
248
|
+
client: AlphAIClient = ctx.obj['client']
|
|
249
|
+
|
|
250
|
+
with Progress(
|
|
251
|
+
SpinnerColumn(),
|
|
252
|
+
TextColumn("[progress.description]{task.description}"),
|
|
253
|
+
console=console
|
|
254
|
+
) as progress:
|
|
255
|
+
task = progress.add_task(f"Creating organization '{name}'...", total=None)
|
|
256
|
+
org = client.create_organization(name, description)
|
|
257
|
+
progress.update(task, completed=1)
|
|
258
|
+
|
|
259
|
+
if org:
|
|
260
|
+
console.print(f"[green]Organization ID: {org.get('id', 'N/A')}[/green]")
|
|
261
|
+
|
|
262
|
+
|
|
263
|
+
@orgs.command('select')
|
|
264
|
+
@click.argument('org_id')
|
|
265
|
+
@click.pass_context
|
|
266
|
+
def orgs_select(ctx: click.Context, org_id: str) -> None:
|
|
267
|
+
"""Select an organization as the current context."""
|
|
268
|
+
config: Config = ctx.obj['config']
|
|
269
|
+
config.current_org = org_id
|
|
270
|
+
config.save()
|
|
271
|
+
console.print(f"[green]✓ Selected organization: {org_id}[/green]")
|
|
272
|
+
|
|
273
|
+
|
|
274
|
+
@main.group()
|
|
275
|
+
@click.pass_context
|
|
276
|
+
def projects(ctx: click.Context) -> None:
|
|
277
|
+
"""Manage projects."""
|
|
278
|
+
pass
|
|
279
|
+
|
|
280
|
+
|
|
281
|
+
@projects.command('list')
|
|
282
|
+
@click.option('--org', help='Organization ID to filter by')
|
|
283
|
+
@click.pass_context
|
|
284
|
+
def projects_list(ctx: click.Context, org: Optional[str]) -> None:
|
|
285
|
+
"""List all projects."""
|
|
286
|
+
client: AlphAIClient = ctx.obj['client']
|
|
287
|
+
config: Config = ctx.obj['config']
|
|
288
|
+
|
|
289
|
+
# Use provided org or current org
|
|
290
|
+
org_id = org or config.current_org
|
|
291
|
+
|
|
292
|
+
with Progress(
|
|
293
|
+
SpinnerColumn(),
|
|
294
|
+
TextColumn("[progress.description]{task.description}"),
|
|
295
|
+
console=console
|
|
296
|
+
) as progress:
|
|
297
|
+
task = progress.add_task("Fetching projects...", total=None)
|
|
298
|
+
projects = client.get_projects(org_id)
|
|
299
|
+
progress.update(task, completed=1)
|
|
300
|
+
|
|
301
|
+
client.display_projects(projects)
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
@projects.command('select')
|
|
305
|
+
@click.argument('project_id')
|
|
306
|
+
@click.pass_context
|
|
307
|
+
def projects_select(ctx: click.Context, project_id: str) -> None:
|
|
308
|
+
"""Select a project as the current context."""
|
|
309
|
+
config: Config = ctx.obj['config']
|
|
310
|
+
config.current_project = project_id
|
|
311
|
+
config.save()
|
|
312
|
+
console.print(f"[green]✓ Selected project: {project_id}[/green]")
|
|
313
|
+
|
|
314
|
+
|
|
315
|
+
@main.command()
|
|
316
|
+
#@click.option('--image', default="runalph/ai:latest", required=True, help='Docker image to run')
|
|
317
|
+
@click.option('--image', default="quay.io/jupyter/datascience-notebook:latest", required=True, help='Docker image to run')
|
|
318
|
+
@click.option('--app-port', default=5000, help='Application port (default: 5000)')
|
|
319
|
+
@click.option('--jupyter-port', default=8888, help='Jupyter port (default: 8888)')
|
|
320
|
+
@click.option('--name', help='Container name')
|
|
321
|
+
@click.option('--env', multiple=True, help='Environment variables (format: KEY=VALUE)')
|
|
322
|
+
@click.option('--volume', multiple=True, help='Volume mounts (format: HOST_PATH:CONTAINER_PATH)')
|
|
323
|
+
@click.option('--detach', '-d', is_flag=True, help='Run container in background')
|
|
324
|
+
@click.option('--local', is_flag=True, help='Run locally only (no tunnel creation)')
|
|
325
|
+
@click.option('--org', help='Organization slug for tunnel (interactive selection if not provided)')
|
|
326
|
+
@click.option('--project', help='Project name for tunnel (interactive selection if not provided)')
|
|
327
|
+
@click.option('--command', help='Custom command to run in container (overrides default)')
|
|
328
|
+
@click.option('--ensure-jupyter', is_flag=True, help='Ensure Jupyter is running (auto-start if needed)')
|
|
329
|
+
@click.pass_context
|
|
330
|
+
def run(
|
|
331
|
+
ctx: click.Context,
|
|
332
|
+
image: str,
|
|
333
|
+
app_port: int,
|
|
334
|
+
jupyter_port: int,
|
|
335
|
+
name: Optional[str],
|
|
336
|
+
env: tuple,
|
|
337
|
+
volume: tuple,
|
|
338
|
+
detach: bool,
|
|
339
|
+
local: bool,
|
|
340
|
+
org: Optional[str],
|
|
341
|
+
project: Optional[str],
|
|
342
|
+
command: Optional[str],
|
|
343
|
+
ensure_jupyter: bool
|
|
344
|
+
) -> None:
|
|
345
|
+
"""Launch and manage local Docker containers with tunnel setup (default) or local-only mode."""
|
|
346
|
+
config: Config = ctx.obj['config']
|
|
347
|
+
client: AlphAIClient = ctx.obj['client']
|
|
348
|
+
docker_manager = DockerManager(console)
|
|
349
|
+
|
|
350
|
+
# Tunnel is default behavior unless --local is specified
|
|
351
|
+
tunnel = not local
|
|
352
|
+
|
|
353
|
+
# Validate tunnel requirements
|
|
354
|
+
if tunnel:
|
|
355
|
+
if not config.bearer_token:
|
|
356
|
+
console.print("[red]Error: Authentication required for tunnel creation. Please run 'alphai login' first.[/red]")
|
|
357
|
+
console.print("[yellow]Tip: Use --local flag to run without tunnel creation[/yellow]")
|
|
358
|
+
sys.exit(1)
|
|
359
|
+
|
|
360
|
+
# Interactive selection for org if not provided
|
|
361
|
+
if not org:
|
|
362
|
+
console.print("[yellow]No organization specified. Please select one:[/yellow]")
|
|
363
|
+
|
|
364
|
+
with Progress(
|
|
365
|
+
SpinnerColumn(),
|
|
366
|
+
TextColumn("[progress.description]{task.description}"),
|
|
367
|
+
console=console
|
|
368
|
+
) as progress:
|
|
369
|
+
task = progress.add_task("Fetching organizations...", total=None)
|
|
370
|
+
orgs_data = client.get_organizations()
|
|
371
|
+
progress.update(task, completed=1)
|
|
372
|
+
|
|
373
|
+
if not orgs_data or len(orgs_data) == 0:
|
|
374
|
+
console.print("[red]No organizations found. Please create one first.[/red]")
|
|
375
|
+
sys.exit(1)
|
|
376
|
+
|
|
377
|
+
# Create choices for questionary
|
|
378
|
+
org_choices = []
|
|
379
|
+
for org_data in orgs_data:
|
|
380
|
+
display_name = f"{org_data.name} ({org_data.slug})"
|
|
381
|
+
org_choices.append(questionary.Choice(title=display_name, value=org_data.slug))
|
|
382
|
+
|
|
383
|
+
# Interactive selection with arrow keys
|
|
384
|
+
selected_org_slug = questionary.select(
|
|
385
|
+
"Select organization (use ↑↓ arrows and press Enter):",
|
|
386
|
+
choices=org_choices,
|
|
387
|
+
style=questionary.Style([
|
|
388
|
+
('question', 'bold'),
|
|
389
|
+
('pointer', 'fg:#673ab7 bold'),
|
|
390
|
+
('highlighted', 'fg:#673ab7 bold'),
|
|
391
|
+
('selected', 'fg:#cc5454'),
|
|
392
|
+
('instruction', 'fg:#888888 italic')
|
|
393
|
+
])
|
|
394
|
+
).ask()
|
|
395
|
+
|
|
396
|
+
if not selected_org_slug:
|
|
397
|
+
console.print("[red]No organization selected. Exiting.[/red]")
|
|
398
|
+
sys.exit(1)
|
|
399
|
+
|
|
400
|
+
org = selected_org_slug
|
|
401
|
+
# Find the org name for display
|
|
402
|
+
selected_org_name = next((o.name for o in orgs_data if o.slug == org), org)
|
|
403
|
+
console.print(f"[green]✓ Selected organization: {selected_org_name} ({org})[/green]")
|
|
404
|
+
|
|
405
|
+
# Interactive input for project name if not provided
|
|
406
|
+
if not project:
|
|
407
|
+
console.print("[yellow]No project specified. Please enter a project name:[/yellow]")
|
|
408
|
+
|
|
409
|
+
# Direct project name input for run command
|
|
410
|
+
while True:
|
|
411
|
+
project = Prompt.ask("Enter project name")
|
|
412
|
+
if project and project.strip():
|
|
413
|
+
project = project.strip()
|
|
414
|
+
console.print(f"[green]✓ Will create project: {project}[/green]")
|
|
415
|
+
break
|
|
416
|
+
else:
|
|
417
|
+
console.print("[red]Project name cannot be empty[/red]")
|
|
418
|
+
|
|
419
|
+
# Auto-enable ensure-jupyter for tunnel mode
|
|
420
|
+
ensure_jupyter = True
|
|
421
|
+
|
|
422
|
+
# Generate Jupyter token upfront if we'll need it
|
|
423
|
+
jupyter_token = None
|
|
424
|
+
if ensure_jupyter or tunnel:
|
|
425
|
+
jupyter_token = docker_manager.generate_jupyter_token()
|
|
426
|
+
console.print(f"[cyan]Generated Jupyter token: {jupyter_token[:12]}...[/cyan]")
|
|
427
|
+
|
|
428
|
+
# Parse environment variables
|
|
429
|
+
env_vars = {}
|
|
430
|
+
for e in env:
|
|
431
|
+
if '=' in e:
|
|
432
|
+
key, value = e.split('=', 1)
|
|
433
|
+
env_vars[key] = value
|
|
434
|
+
else:
|
|
435
|
+
console.print(f"[yellow]Warning: Invalid environment variable format: {e}[/yellow]")
|
|
436
|
+
|
|
437
|
+
# Parse volume mounts
|
|
438
|
+
volumes = {}
|
|
439
|
+
for v in volume:
|
|
440
|
+
if ':' in v:
|
|
441
|
+
host_path, container_path = v.split(':', 1)
|
|
442
|
+
volumes[host_path] = container_path
|
|
443
|
+
else:
|
|
444
|
+
console.print(f"[yellow]Warning: Invalid volume format: {v}[/yellow]")
|
|
445
|
+
|
|
446
|
+
# Generate Jupyter startup command if needed
|
|
447
|
+
startup_command = None
|
|
448
|
+
if command:
|
|
449
|
+
startup_command = command
|
|
450
|
+
elif ensure_jupyter or tunnel:
|
|
451
|
+
# When we need to control Jupyter token, always override the image's CMD/ENTRYPOINT
|
|
452
|
+
# This ensures we can start Jupyter with our own token regardless of image type
|
|
453
|
+
startup_command = "tail -f /dev/null" # Keep container alive
|
|
454
|
+
console.print(f"[yellow]Using keep-alive command to control Jupyter startup[/yellow]")
|
|
455
|
+
else:
|
|
456
|
+
# For images without custom command, keep them alive so we can interact with them
|
|
457
|
+
startup_command = "tail -f /dev/null"
|
|
458
|
+
console.print(f"[yellow]Keeping container alive for interactive use[/yellow]")
|
|
459
|
+
|
|
460
|
+
# Start the container
|
|
461
|
+
container = docker_manager.run_container(
|
|
462
|
+
image=image,
|
|
463
|
+
name=name,
|
|
464
|
+
ports={app_port: app_port, jupyter_port: jupyter_port},
|
|
465
|
+
environment=env_vars,
|
|
466
|
+
volumes=volumes,
|
|
467
|
+
detach=True, # Always detach when using tunnel
|
|
468
|
+
command=startup_command
|
|
469
|
+
)
|
|
470
|
+
|
|
471
|
+
if not container:
|
|
472
|
+
console.print("[red]Failed to start container[/red]")
|
|
473
|
+
sys.exit(1)
|
|
474
|
+
|
|
475
|
+
console.print(f"[green]✓ Container started[/green]")
|
|
476
|
+
console.print(f"[blue]Container ID: {container.id[:12]}[/blue]")
|
|
477
|
+
|
|
478
|
+
# Store cleanup state for signal handling
|
|
479
|
+
_cleanup_state.update({
|
|
480
|
+
'container_id': container.id,
|
|
481
|
+
'client': client,
|
|
482
|
+
'docker_manager': docker_manager,
|
|
483
|
+
'cleanup_done': False
|
|
484
|
+
})
|
|
485
|
+
|
|
486
|
+
# Verify container is actually running
|
|
487
|
+
time.sleep(2) # Give container a moment to start
|
|
488
|
+
|
|
489
|
+
if not docker_manager.is_container_running(container.id):
|
|
490
|
+
status = docker_manager.get_container_status(container.id)
|
|
491
|
+
console.print(f"[red]Container failed to start or exited immediately[/red]")
|
|
492
|
+
console.print(f"[red]Status: {status}[/red]")
|
|
493
|
+
|
|
494
|
+
# Show container logs for debugging
|
|
495
|
+
logs = docker_manager.get_container_logs(container.id, tail=20)
|
|
496
|
+
if logs:
|
|
497
|
+
console.print(f"[yellow]Container logs:[/yellow]")
|
|
498
|
+
console.print(f"[dim]{logs}[/dim]")
|
|
499
|
+
|
|
500
|
+
sys.exit(1)
|
|
501
|
+
|
|
502
|
+
console.print(f"[green]✓ Container is running[/green]")
|
|
503
|
+
|
|
504
|
+
# Install and ensure Jupyter is running if requested
|
|
505
|
+
if ensure_jupyter:
|
|
506
|
+
# Check if Jupyter is already installed
|
|
507
|
+
if not _is_jupyter_installed(docker_manager, container.id):
|
|
508
|
+
console.print("[yellow]Installing Jupyter in container...[/yellow]")
|
|
509
|
+
if not _install_jupyter_in_container(docker_manager, container.id):
|
|
510
|
+
console.print("[red]Failed to install Jupyter[/red]")
|
|
511
|
+
sys.exit(1)
|
|
512
|
+
else:
|
|
513
|
+
console.print("[green]✓ Jupyter is already installed[/green]")
|
|
514
|
+
|
|
515
|
+
# Start Jupyter with our controlled token
|
|
516
|
+
success, actual_token = docker_manager.ensure_jupyter_running(
|
|
517
|
+
container.id,
|
|
518
|
+
jupyter_port,
|
|
519
|
+
jupyter_token,
|
|
520
|
+
force_restart=True # Always force restart since we overrode the entrypoint
|
|
521
|
+
)
|
|
522
|
+
if not success:
|
|
523
|
+
console.print("[yellow]⚠ Jupyter may not be running - tunnel token extraction might fail[/yellow]")
|
|
524
|
+
else:
|
|
525
|
+
# Update our token if it was generated by the method
|
|
526
|
+
if actual_token and not jupyter_token:
|
|
527
|
+
jupyter_token = actual_token
|
|
528
|
+
|
|
529
|
+
if tunnel:
|
|
530
|
+
# Create tunnel
|
|
531
|
+
console.print("[yellow]Creating tunnel...[/yellow]")
|
|
532
|
+
|
|
533
|
+
# Create tunnel with Jupyter token
|
|
534
|
+
tunnel_data = client.create_tunnel_with_project(
|
|
535
|
+
org_slug=org,
|
|
536
|
+
project_name=project,
|
|
537
|
+
app_port=app_port,
|
|
538
|
+
jupyter_port=jupyter_port,
|
|
539
|
+
jupyter_token=jupyter_token
|
|
540
|
+
)
|
|
541
|
+
|
|
542
|
+
if not tunnel_data:
|
|
543
|
+
console.print("[red]Failed to create tunnel[/red]")
|
|
544
|
+
sys.exit(1)
|
|
545
|
+
|
|
546
|
+
# Store tunnel and project IDs for cleanup
|
|
547
|
+
_cleanup_state.update({
|
|
548
|
+
'tunnel_id': tunnel_data.id,
|
|
549
|
+
'project_id': tunnel_data.project_data.id if tunnel_data.project_data and hasattr(tunnel_data.project_data, 'id') else None
|
|
550
|
+
})
|
|
551
|
+
|
|
552
|
+
# Check if cloudflared is already installed, install if needed
|
|
553
|
+
if not _is_cloudflared_installed(docker_manager, container.id):
|
|
554
|
+
console.print("[yellow]Installing cloudflared in container...[/yellow]")
|
|
555
|
+
if not docker_manager.install_cloudflared_in_container(container.id):
|
|
556
|
+
console.print("[yellow]Warning: cloudflared installation failed, but container is running[/yellow]")
|
|
557
|
+
return
|
|
558
|
+
else:
|
|
559
|
+
console.print("[green]✓ cloudflared is already installed[/green]")
|
|
560
|
+
|
|
561
|
+
# Set up tunnel service using the cloudflared token
|
|
562
|
+
cloudflared_token = tunnel_data.cloudflared_token if hasattr(tunnel_data, 'cloudflared_token') else tunnel_data.cloudflared_token
|
|
563
|
+
if docker_manager.setup_tunnel_in_container(container.id, cloudflared_token):
|
|
564
|
+
console.print("\n[bold green]🎉 Container with tunnel setup complete![/bold green]")
|
|
565
|
+
|
|
566
|
+
# Create a nice summary panel
|
|
567
|
+
summary_content = []
|
|
568
|
+
summary_content.append(f"[bold]Container ID:[/bold] {container.id[:12]}")
|
|
569
|
+
summary_content.append(f"[bold]Tunnel ID:[/bold] {tunnel_data.id}")
|
|
570
|
+
summary_content.append("")
|
|
571
|
+
summary_content.append("[bold blue]Local Access:[/bold blue]")
|
|
572
|
+
summary_content.append(f" • App: http://localhost:{app_port}")
|
|
573
|
+
if jupyter_token:
|
|
574
|
+
summary_content.append(f" • Jupyter: http://localhost:{jupyter_port}?token={jupyter_token}")
|
|
575
|
+
else:
|
|
576
|
+
summary_content.append(f" • Jupyter: http://localhost:{jupyter_port}")
|
|
577
|
+
summary_content.append("")
|
|
578
|
+
summary_content.append("[bold green]Public Access:[/bold green]")
|
|
579
|
+
summary_content.append(f" • App: {tunnel_data.app_url}")
|
|
580
|
+
if jupyter_token:
|
|
581
|
+
summary_content.append(f" • Jupyter: {tunnel_data.jupyter_url}?token={jupyter_token}")
|
|
582
|
+
else:
|
|
583
|
+
summary_content.append(f" • Jupyter: {tunnel_data.jupyter_url}")
|
|
584
|
+
summary_content.append("")
|
|
585
|
+
if jupyter_token:
|
|
586
|
+
summary_content.append("[bold cyan]Jupyter Token:[/bold cyan]")
|
|
587
|
+
summary_content.append(f" {jupyter_token}")
|
|
588
|
+
summary_content.append("")
|
|
589
|
+
summary_content.append("[bold yellow]Management:[/bold yellow]")
|
|
590
|
+
summary_content.append(f" • Stop container: docker stop {container.id[:12]}")
|
|
591
|
+
summary_content.append(f" • View logs: docker logs {container.id[:12]}")
|
|
592
|
+
summary_content.append(f" • Delete tunnel: alphai tunnels delete {tunnel_data.id}")
|
|
593
|
+
summary_content.append(f" • Full cleanup: alphai cleanup {container.id[:12]} --tunnel-id {tunnel_data.id}")
|
|
594
|
+
summary_content.append("")
|
|
595
|
+
summary_content.append("[bold cyan]Quick Cleanup:[/bold cyan]")
|
|
596
|
+
summary_content.append(" • Press Ctrl+C to automatically cleanup all resources")
|
|
597
|
+
|
|
598
|
+
panel = Panel(
|
|
599
|
+
"\n".join(summary_content),
|
|
600
|
+
title="🚀 Deployment Summary",
|
|
601
|
+
title_align="left",
|
|
602
|
+
border_style="green"
|
|
603
|
+
)
|
|
604
|
+
console.print(panel)
|
|
605
|
+
|
|
606
|
+
# Wait for tunnel URLs to become available
|
|
607
|
+
with Progress(
|
|
608
|
+
SpinnerColumn(),
|
|
609
|
+
TextColumn("[progress.description]{task.description}"),
|
|
610
|
+
console=console
|
|
611
|
+
) as progress:
|
|
612
|
+
task = progress.add_task("Waiting for tunnel URLs to become available...", total=None)
|
|
613
|
+
#_wait_for_tunnel_ready(tunnel_data)
|
|
614
|
+
time.sleep(5)
|
|
615
|
+
progress.update(task, completed=1)
|
|
616
|
+
|
|
617
|
+
# Open browser to the project page
|
|
618
|
+
frontend_url = _get_frontend_url(config.api_url)
|
|
619
|
+
project_url = f"{frontend_url}/{org}/{project}"
|
|
620
|
+
console.print(f"\n[cyan]🌐 Opening browser to: {project_url}[/cyan]")
|
|
621
|
+
try:
|
|
622
|
+
webbrowser.open(project_url)
|
|
623
|
+
except Exception as e:
|
|
624
|
+
console.print(f"[yellow]Warning: Could not open browser automatically: {e}[/yellow]")
|
|
625
|
+
console.print(f"[yellow]Please manually visit: {project_url}[/yellow]")
|
|
626
|
+
else:
|
|
627
|
+
console.print("[yellow]Warning: Tunnel service setup failed, but container is running[/yellow]")
|
|
628
|
+
else:
|
|
629
|
+
# Non-tunnel mode - just display local URLs
|
|
630
|
+
console.print(f"[blue]Application: http://localhost:{app_port}[/blue]")
|
|
631
|
+
if jupyter_token:
|
|
632
|
+
console.print(f"[blue]Jupyter: http://localhost:{jupyter_port}?token={jupyter_token}[/blue]")
|
|
633
|
+
console.print(f"[dim]Jupyter Token: {jupyter_token}[/dim]")
|
|
634
|
+
else:
|
|
635
|
+
console.print(f"[blue]Jupyter: http://localhost:{jupyter_port}[/blue]")
|
|
636
|
+
console.print(f"[dim]Check container logs for Jupyter token: docker logs {container.id[:12]}[/dim]")
|
|
637
|
+
|
|
638
|
+
console.print(f"\n[bold yellow]Cleanup:[/bold yellow]")
|
|
639
|
+
console.print(f" • Stop container: docker stop {container.id[:12]}")
|
|
640
|
+
console.print(f" • Quick cleanup: alphai cleanup {container.id[:12]}")
|
|
641
|
+
console.print(f" • Press Ctrl+C to automatically stop and remove container")
|
|
642
|
+
|
|
643
|
+
if not detach:
|
|
644
|
+
console.print(f"[dim]Container is running in background. Use 'docker logs {container.id[:12]}' to view logs.[/dim]")
|
|
645
|
+
|
|
646
|
+
# Keep the process running and wait for Ctrl+C for cleanup
|
|
647
|
+
try:
|
|
648
|
+
console.print(f"\n[bold green]🎯 Container is running! Press Ctrl+C to cleanup all resources.[/bold green]")
|
|
649
|
+
# Keep the main process alive to handle signals
|
|
650
|
+
while True:
|
|
651
|
+
time.sleep(1)
|
|
652
|
+
except KeyboardInterrupt:
|
|
653
|
+
# Signal handler will take care of cleanup and exit
|
|
654
|
+
pass
|
|
655
|
+
|
|
656
|
+
|
|
657
|
+
def _wait_for_tunnel_ready(tunnel_data, timeout_seconds: int = 30) -> bool:
|
|
658
|
+
"""Wait for Jupyter tunnel URL to become available."""
|
|
659
|
+
import time
|
|
660
|
+
import urllib.request
|
|
661
|
+
import urllib.error
|
|
662
|
+
|
|
663
|
+
jupyter_url = tunnel_data.jupyter_url
|
|
664
|
+
console.print(f"[yellow]🔄 Checking if {jupyter_url} is ready...[/yellow]")
|
|
665
|
+
|
|
666
|
+
start_time = time.time()
|
|
667
|
+
while time.time() - start_time < timeout_seconds:
|
|
668
|
+
try:
|
|
669
|
+
# Simple HEAD request with short timeout
|
|
670
|
+
req = urllib.request.Request(jupyter_url)
|
|
671
|
+
with urllib.request.urlopen(req, timeout=5) as response:
|
|
672
|
+
console.print(f"[green]✅ Jupyter tunnel is ready! ({response.status})[/green]")
|
|
673
|
+
time.sleep(2)
|
|
674
|
+
return True
|
|
675
|
+
except (urllib.error.URLError, urllib.error.HTTPError, OSError):
|
|
676
|
+
# Any response (even errors) means the route exists
|
|
677
|
+
pass
|
|
678
|
+
except Exception:
|
|
679
|
+
# Still not ready, continue waiting
|
|
680
|
+
pass
|
|
681
|
+
|
|
682
|
+
time.sleep(2)
|
|
683
|
+
|
|
684
|
+
console.print(f"[yellow]⚠ Tunnel check timed out after {timeout_seconds}s[/yellow]")
|
|
685
|
+
return False
|
|
686
|
+
|
|
687
|
+
|
|
688
|
+
def _is_jupyter_installed(docker_manager, container_id: str) -> bool:
|
|
689
|
+
"""Check if Jupyter is actually installed in the container."""
|
|
690
|
+
try:
|
|
691
|
+
# Try to check if jupyter command exists
|
|
692
|
+
result = subprocess.run(
|
|
693
|
+
["docker", "exec", container_id, "which", "jupyter"],
|
|
694
|
+
capture_output=True,
|
|
695
|
+
text=True,
|
|
696
|
+
timeout=10
|
|
697
|
+
)
|
|
698
|
+
|
|
699
|
+
if result.returncode == 0:
|
|
700
|
+
return True
|
|
701
|
+
|
|
702
|
+
# Also try checking for jupyter-lab specifically
|
|
703
|
+
result = subprocess.run(
|
|
704
|
+
["docker", "exec", container_id, "which", "jupyter-lab"],
|
|
705
|
+
capture_output=True,
|
|
706
|
+
text=True,
|
|
707
|
+
timeout=10
|
|
708
|
+
)
|
|
709
|
+
|
|
710
|
+
return result.returncode == 0
|
|
711
|
+
|
|
712
|
+
except Exception as e:
|
|
713
|
+
console.print(f"[yellow]Warning: Could not check Jupyter installation: {e}[/yellow]")
|
|
714
|
+
return False
|
|
715
|
+
|
|
716
|
+
|
|
717
|
+
def _is_cloudflared_installed(docker_manager, container_id: str) -> bool:
|
|
718
|
+
"""Check if cloudflared is actually installed in the container."""
|
|
719
|
+
try:
|
|
720
|
+
# Try to check if cloudflared command exists
|
|
721
|
+
result = subprocess.run(
|
|
722
|
+
["docker", "exec", container_id, "which", "cloudflared"],
|
|
723
|
+
capture_output=True,
|
|
724
|
+
text=True,
|
|
725
|
+
timeout=10
|
|
726
|
+
)
|
|
727
|
+
|
|
728
|
+
return result.returncode == 0
|
|
729
|
+
|
|
730
|
+
except Exception as e:
|
|
731
|
+
console.print(f"[yellow]Warning: Could not check cloudflared installation: {e}[/yellow]")
|
|
732
|
+
return False
|
|
733
|
+
|
|
734
|
+
|
|
735
|
+
def _install_jupyter_in_container(docker_manager, container_id: str) -> bool:
|
|
736
|
+
"""Install Jupyter in a container that doesn't have it."""
|
|
737
|
+
# Detect package manager
|
|
738
|
+
package_manager = docker_manager._detect_package_manager(container_id)
|
|
739
|
+
|
|
740
|
+
if not package_manager:
|
|
741
|
+
console.print("[red]Could not detect package manager for Jupyter installation[/red]")
|
|
742
|
+
return False
|
|
743
|
+
|
|
744
|
+
try:
|
|
745
|
+
if package_manager in ['apt', 'apt-get']:
|
|
746
|
+
install_commands = [
|
|
747
|
+
"apt-get update",
|
|
748
|
+
"apt-get install -y python3-pip",
|
|
749
|
+
"pip3 install jupyter jupyterlab"
|
|
750
|
+
]
|
|
751
|
+
elif package_manager in ['yum', 'dnf']:
|
|
752
|
+
install_commands = [
|
|
753
|
+
f"{package_manager} update -y",
|
|
754
|
+
f"{package_manager} install -y python3-pip",
|
|
755
|
+
"pip3 install jupyter jupyterlab"
|
|
756
|
+
]
|
|
757
|
+
elif package_manager == 'apk':
|
|
758
|
+
install_commands = [
|
|
759
|
+
"apk update",
|
|
760
|
+
"apk add --no-cache python3 py3-pip",
|
|
761
|
+
"pip3 install jupyter jupyterlab"
|
|
762
|
+
]
|
|
763
|
+
else:
|
|
764
|
+
# Try generic approach
|
|
765
|
+
install_commands = [
|
|
766
|
+
"pip3 install jupyter jupyterlab"
|
|
767
|
+
]
|
|
768
|
+
|
|
769
|
+
for cmd in install_commands:
|
|
770
|
+
result = subprocess.run(
|
|
771
|
+
["docker", "exec", "--user", "root", container_id, "bash", "-c", cmd],
|
|
772
|
+
capture_output=True,
|
|
773
|
+
text=True,
|
|
774
|
+
timeout=120 # Longer timeout for installations
|
|
775
|
+
)
|
|
776
|
+
|
|
777
|
+
if result.returncode != 0:
|
|
778
|
+
console.print(f"[red]Failed to run: {cmd}[/red]")
|
|
779
|
+
console.print(f"[red]Error: {result.stderr}[/red]")
|
|
780
|
+
return False
|
|
781
|
+
|
|
782
|
+
console.print("[green]✓ Jupyter installed successfully[/green]")
|
|
783
|
+
return True
|
|
784
|
+
|
|
785
|
+
except Exception as e:
|
|
786
|
+
console.print(f"[red]Error installing Jupyter: {e}[/red]")
|
|
787
|
+
return False
|
|
788
|
+
|
|
789
|
+
|
|
790
|
+
@main.group()
|
|
791
|
+
@click.pass_context
|
|
792
|
+
def tunnels(ctx: click.Context) -> None:
|
|
793
|
+
"""Manage tunnels."""
|
|
794
|
+
pass
|
|
795
|
+
|
|
796
|
+
|
|
797
|
+
@tunnels.command('create')
|
|
798
|
+
@click.option('--org', required=True, help='Organization slug')
|
|
799
|
+
@click.option('--project', required=True, help='Project name')
|
|
800
|
+
@click.option('--app-port', default=5000, help='Application port (default: 5000)')
|
|
801
|
+
@click.option('--jupyter-port', default=8888, help='Jupyter port (default: 8888)')
|
|
802
|
+
@click.option('--project-only', is_flag=True, help='Create project only, skip tunnel creation')
|
|
803
|
+
@click.pass_context
|
|
804
|
+
def tunnels_create(
|
|
805
|
+
ctx: click.Context,
|
|
806
|
+
org: str,
|
|
807
|
+
project: str,
|
|
808
|
+
app_port: int,
|
|
809
|
+
jupyter_port: int,
|
|
810
|
+
project_only: bool
|
|
811
|
+
) -> None:
|
|
812
|
+
"""Create a new tunnel and associated project."""
|
|
813
|
+
client: AlphAIClient = ctx.obj['client']
|
|
814
|
+
|
|
815
|
+
if project_only:
|
|
816
|
+
# Create project only
|
|
817
|
+
org_data = client.get_organization_by_slug(org)
|
|
818
|
+
if not org_data:
|
|
819
|
+
console.print(f"[red]Organization with slug '{org}' not found[/red]")
|
|
820
|
+
sys.exit(1)
|
|
821
|
+
|
|
822
|
+
project_data = client.create_project(
|
|
823
|
+
name=project,
|
|
824
|
+
organization_id=org_data.id,
|
|
825
|
+
port=app_port
|
|
826
|
+
)
|
|
827
|
+
|
|
828
|
+
if project_data:
|
|
829
|
+
console.print(f"[green]✓ Project '{project}' created successfully[/green]")
|
|
830
|
+
else:
|
|
831
|
+
# Create tunnel and project
|
|
832
|
+
tunnel_data = client.create_tunnel_with_project(
|
|
833
|
+
org_slug=org,
|
|
834
|
+
project_name=project,
|
|
835
|
+
app_port=app_port,
|
|
836
|
+
jupyter_port=jupyter_port
|
|
837
|
+
)
|
|
838
|
+
|
|
839
|
+
if tunnel_data:
|
|
840
|
+
console.print(f"\n[bold]Manual Setup Command:[/bold]")
|
|
841
|
+
cloudflared_token = tunnel_data.cloudflared_token if hasattr(tunnel_data, 'cloudflared_token') else tunnel_data.token
|
|
842
|
+
console.print(f"[green]cloudflared service install {cloudflared_token}[/green]")
|
|
843
|
+
console.print(f"\n[dim]Copy the above command to set up cloudflared manually in your container[/dim]")
|
|
844
|
+
console.print(f"[dim]Note: Add Jupyter token to project after starting your container[/dim]")
|
|
845
|
+
|
|
846
|
+
|
|
847
|
+
@tunnels.command('get')
|
|
848
|
+
@click.argument('tunnel_id')
|
|
849
|
+
@click.pass_context
|
|
850
|
+
def tunnels_get(ctx: click.Context, tunnel_id: str) -> None:
|
|
851
|
+
"""Get tunnel information."""
|
|
852
|
+
client: AlphAIClient = ctx.obj['client']
|
|
853
|
+
|
|
854
|
+
tunnel_data = client.get_tunnel(tunnel_id)
|
|
855
|
+
if tunnel_data:
|
|
856
|
+
console.print(f"[bold]Tunnel ID:[/bold] {tunnel_data.id}")
|
|
857
|
+
console.print(f"[bold]Name:[/bold] {tunnel_data.name}")
|
|
858
|
+
console.print(f"[bold]App URL:[/bold] {tunnel_data.app_url}")
|
|
859
|
+
console.print(f"[bold]Jupyter URL:[/bold] {tunnel_data.jupyter_url}")
|
|
860
|
+
console.print(f"[bold]Created:[/bold] {tunnel_data.created_at}")
|
|
861
|
+
else:
|
|
862
|
+
console.print("[red]Tunnel not found[/red]")
|
|
863
|
+
|
|
864
|
+
|
|
865
|
+
@tunnels.command('delete')
|
|
866
|
+
@click.argument('tunnel_id')
|
|
867
|
+
@click.option('--force', is_flag=True, help='Skip confirmation')
|
|
868
|
+
@click.pass_context
|
|
869
|
+
def tunnels_delete(ctx: click.Context, tunnel_id: str, force: bool) -> None:
|
|
870
|
+
"""Delete a tunnel."""
|
|
871
|
+
client: AlphAIClient = ctx.obj['client']
|
|
872
|
+
|
|
873
|
+
if not force:
|
|
874
|
+
if not Confirm.ask(f"Are you sure you want to delete tunnel {tunnel_id}?"):
|
|
875
|
+
console.print("[yellow]Cancelled[/yellow]")
|
|
876
|
+
return
|
|
877
|
+
|
|
878
|
+
client.delete_tunnel(tunnel_id)
|
|
879
|
+
|
|
880
|
+
|
|
881
|
+
@main.command()
|
|
882
|
+
@click.argument('container_id')
|
|
883
|
+
@click.option('--tunnel-id', help='Tunnel ID to delete')
|
|
884
|
+
@click.option('--project-id', help='Project ID to delete')
|
|
885
|
+
@click.option('--force', is_flag=True, help='Skip confirmation and force cleanup')
|
|
886
|
+
@click.option('--containers-only', is_flag=True, help='Only cleanup container and cloudflared service, skip tunnel/project deletion')
|
|
887
|
+
@click.pass_context
|
|
888
|
+
def cleanup(
|
|
889
|
+
ctx: click.Context,
|
|
890
|
+
container_id: str,
|
|
891
|
+
tunnel_id: Optional[str],
|
|
892
|
+
project_id: Optional[str],
|
|
893
|
+
force: bool,
|
|
894
|
+
containers_only: bool
|
|
895
|
+
) -> None:
|
|
896
|
+
"""Clean up containers, tunnels, and projects created by alphai run.
|
|
897
|
+
|
|
898
|
+
This command performs comprehensive cleanup by:
|
|
899
|
+
1. Uninstalling cloudflared service from the container
|
|
900
|
+
2. Stopping and removing the Docker container
|
|
901
|
+
3. Deleting the tunnel (unless --containers-only is used)
|
|
902
|
+
4. Deleting the project (unless --containers-only is used)
|
|
903
|
+
|
|
904
|
+
Note: Project deletion has SDK limitations and may require manual cleanup
|
|
905
|
+
via the web interface for specific project IDs.
|
|
906
|
+
|
|
907
|
+
Examples:
|
|
908
|
+
alphai cleanup abc123456789 # Container only
|
|
909
|
+
alphai cleanup abc123456789 --tunnel-id xyz # Container + tunnel
|
|
910
|
+
alphai cleanup abc123456789 --force # Skip confirmations
|
|
911
|
+
"""
|
|
912
|
+
config: Config = ctx.obj['config']
|
|
913
|
+
client: AlphAIClient = ctx.obj['client']
|
|
914
|
+
docker_manager = DockerManager(console)
|
|
915
|
+
|
|
916
|
+
# Confirmation unless force is used
|
|
917
|
+
if not force:
|
|
918
|
+
cleanup_items = [f"Container {container_id[:12]}"]
|
|
919
|
+
if tunnel_id and not containers_only:
|
|
920
|
+
cleanup_items.append(f"Tunnel {tunnel_id}")
|
|
921
|
+
if project_id and not containers_only:
|
|
922
|
+
cleanup_items.append(f"Project {project_id}")
|
|
923
|
+
|
|
924
|
+
console.print(f"[yellow]Will cleanup: {', '.join(cleanup_items)}[/yellow]")
|
|
925
|
+
if not Confirm.ask("Continue with cleanup?"):
|
|
926
|
+
console.print("[yellow]Cancelled[/yellow]")
|
|
927
|
+
return
|
|
928
|
+
|
|
929
|
+
console.print("[bold]🔄 Starting cleanup process...[/bold]")
|
|
930
|
+
|
|
931
|
+
# Step 1: Container and cloudflared cleanup
|
|
932
|
+
success = docker_manager.cleanup_container_and_tunnel(
|
|
933
|
+
container_id=container_id,
|
|
934
|
+
tunnel_id=tunnel_id,
|
|
935
|
+
project_id=project_id,
|
|
936
|
+
force=force
|
|
937
|
+
)
|
|
938
|
+
|
|
939
|
+
# Step 2: API cleanup (unless containers-only)
|
|
940
|
+
if not containers_only and (tunnel_id or project_id):
|
|
941
|
+
if not config.bearer_token:
|
|
942
|
+
console.print("[yellow]Warning: No authentication token - skipping tunnel/project cleanup[/yellow]")
|
|
943
|
+
console.print("[dim]Run 'alphai login' to enable tunnel/project cleanup[/dim]")
|
|
944
|
+
else:
|
|
945
|
+
api_success = client.cleanup_tunnel_and_project(
|
|
946
|
+
tunnel_id=tunnel_id,
|
|
947
|
+
project_id=project_id,
|
|
948
|
+
force=force
|
|
949
|
+
)
|
|
950
|
+
success = success and api_success
|
|
951
|
+
|
|
952
|
+
# Summary
|
|
953
|
+
if success:
|
|
954
|
+
console.print("\n[bold green]✅ Cleanup completed successfully![/bold green]")
|
|
955
|
+
else:
|
|
956
|
+
console.print("\n[bold yellow]⚠ Cleanup completed with warnings[/bold yellow]")
|
|
957
|
+
console.print("[dim]Check the output above for details[/dim]")
|
|
958
|
+
|
|
959
|
+
|
|
960
|
+
@main.group()
|
|
961
|
+
@click.pass_context
|
|
962
|
+
def config(ctx: click.Context) -> None:
|
|
963
|
+
"""Manage configuration settings."""
|
|
964
|
+
pass
|
|
965
|
+
|
|
966
|
+
|
|
967
|
+
@config.command('show')
|
|
968
|
+
@click.pass_context
|
|
969
|
+
def config_show(ctx: click.Context) -> None:
|
|
970
|
+
"""Show current configuration."""
|
|
971
|
+
client: AlphAIClient = ctx.obj['client']
|
|
972
|
+
client.display_status()
|
|
973
|
+
|
|
974
|
+
|
|
975
|
+
@config.command('set')
|
|
976
|
+
@click.argument('key')
|
|
977
|
+
@click.argument('value')
|
|
978
|
+
@click.pass_context
|
|
979
|
+
def config_set(ctx: click.Context, key: str, value: str) -> None:
|
|
980
|
+
"""Set a configuration value."""
|
|
981
|
+
config: Config = ctx.obj['config']
|
|
982
|
+
|
|
983
|
+
valid_keys = {'api_url', 'debug', 'current_org', 'current_project'}
|
|
984
|
+
|
|
985
|
+
if key not in valid_keys:
|
|
986
|
+
console.print(f"[red]Invalid configuration key. Valid keys: {', '.join(valid_keys)}[/red]")
|
|
987
|
+
sys.exit(1)
|
|
988
|
+
|
|
989
|
+
# Convert string values to appropriate types
|
|
990
|
+
if key == 'debug':
|
|
991
|
+
value = value.lower() in ('true', '1', 'yes', 'on')
|
|
992
|
+
|
|
993
|
+
setattr(config, key, value)
|
|
994
|
+
config.save()
|
|
995
|
+
console.print(f"[green]✓ Set {key} = {value}[/green]")
|
|
996
|
+
|
|
997
|
+
|
|
998
|
+
@config.command('reset')
|
|
999
|
+
@click.pass_context
|
|
1000
|
+
def config_reset(ctx: click.Context) -> None:
|
|
1001
|
+
"""Reset configuration to defaults."""
|
|
1002
|
+
if Confirm.ask("Are you sure you want to reset all configuration to defaults?"):
|
|
1003
|
+
config_file = Config.get_config_file()
|
|
1004
|
+
if config_file.exists():
|
|
1005
|
+
config_file.unlink()
|
|
1006
|
+
|
|
1007
|
+
# Clear keyring
|
|
1008
|
+
config = Config()
|
|
1009
|
+
config.clear_bearer_token()
|
|
1010
|
+
|
|
1011
|
+
console.print("[green]✓ Configuration reset to defaults[/green]")
|
|
1012
|
+
|
|
1013
|
+
|
|
1014
|
+
if __name__ == '__main__':
|
|
1015
|
+
main()
|