aws-bootstrap-g4dn 0.6.0__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aws_bootstrap/cli.py +348 -96
- aws_bootstrap/ec2.py +9 -8
- aws_bootstrap/output.py +106 -0
- aws_bootstrap/ssh.py +21 -20
- aws_bootstrap/tests/test_cli.py +280 -0
- aws_bootstrap/tests/test_output.py +192 -0
- {aws_bootstrap_g4dn-0.6.0.dist-info → aws_bootstrap_g4dn-0.7.0.dist-info}/METADATA +27 -1
- {aws_bootstrap_g4dn-0.6.0.dist-info → aws_bootstrap_g4dn-0.7.0.dist-info}/RECORD +12 -10
- {aws_bootstrap_g4dn-0.6.0.dist-info → aws_bootstrap_g4dn-0.7.0.dist-info}/WHEEL +0 -0
- {aws_bootstrap_g4dn-0.6.0.dist-info → aws_bootstrap_g4dn-0.7.0.dist-info}/entry_points.txt +0 -0
- {aws_bootstrap_g4dn-0.6.0.dist-info → aws_bootstrap_g4dn-0.7.0.dist-info}/licenses/LICENSE +0 -0
- {aws_bootstrap_g4dn-0.6.0.dist-info → aws_bootstrap_g4dn-0.7.0.dist-info}/top_level.txt +0 -0
aws_bootstrap/ec2.py
CHANGED
|
@@ -7,6 +7,7 @@ import botocore.exceptions
|
|
|
7
7
|
import click
|
|
8
8
|
|
|
9
9
|
from .config import LaunchConfig
|
|
10
|
+
from .output import echo, is_text, secho
|
|
10
11
|
|
|
11
12
|
|
|
12
13
|
EBS_DEVICE_NAME = "/dev/sdf"
|
|
@@ -81,7 +82,7 @@ def ensure_security_group(ec2_client, name: str, tag_value: str, ssh_port: int =
|
|
|
81
82
|
if existing["SecurityGroups"]:
|
|
82
83
|
sg_id = existing["SecurityGroups"][0]["GroupId"]
|
|
83
84
|
msg = " Security group " + click.style(f"'{name}'", fg="bright_white")
|
|
84
|
-
|
|
85
|
+
echo(msg + f" already exists ({sg_id}), reusing.")
|
|
85
86
|
return sg_id
|
|
86
87
|
|
|
87
88
|
# Create new SG
|
|
@@ -113,7 +114,7 @@ def ensure_security_group(ec2_client, name: str, tag_value: str, ssh_port: int =
|
|
|
113
114
|
}
|
|
114
115
|
],
|
|
115
116
|
)
|
|
116
|
-
|
|
117
|
+
secho(f" Created security group '{name}' ({sg_id}) with SSH ingress.", fg="green")
|
|
117
118
|
return sg_id
|
|
118
119
|
|
|
119
120
|
|
|
@@ -163,8 +164,8 @@ def launch_instance(ec2_client, config: LaunchConfig, ami_id: str, sg_id: str) -
|
|
|
163
164
|
if code in ("MaxSpotInstanceCountExceeded", "VcpuLimitExceeded"):
|
|
164
165
|
_raise_quota_error(code, config)
|
|
165
166
|
elif code in ("InsufficientInstanceCapacity", "SpotMaxPriceTooLow") and config.spot:
|
|
166
|
-
|
|
167
|
-
if click.confirm(" Retry as on-demand instance?"):
|
|
167
|
+
secho(f"\n Spot request failed: {e.response['Error']['Message']}", fg="yellow")
|
|
168
|
+
if not is_text() or click.confirm(" Retry as on-demand instance?"):
|
|
168
169
|
launch_params.pop("InstanceMarketOptions", None)
|
|
169
170
|
try:
|
|
170
171
|
response = ec2_client.run_instances(**launch_params)
|
|
@@ -329,15 +330,15 @@ def terminate_tagged_instances(ec2_client, instance_ids: list[str]) -> list[dict
|
|
|
329
330
|
|
|
330
331
|
def wait_instance_ready(ec2_client, instance_id: str) -> dict:
|
|
331
332
|
"""Wait for the instance to be running and pass status checks."""
|
|
332
|
-
|
|
333
|
+
echo(" Waiting for instance " + click.style(instance_id, fg="bright_white") + " to enter 'running' state...")
|
|
333
334
|
waiter = ec2_client.get_waiter("instance_running")
|
|
334
335
|
waiter.wait(InstanceIds=[instance_id], WaiterConfig={"Delay": 10, "MaxAttempts": 60})
|
|
335
|
-
|
|
336
|
+
secho(" Instance running.", fg="green")
|
|
336
337
|
|
|
337
|
-
|
|
338
|
+
echo(" Waiting for instance status checks to pass...")
|
|
338
339
|
waiter = ec2_client.get_waiter("instance_status_ok")
|
|
339
340
|
waiter.wait(InstanceIds=[instance_id], WaiterConfig={"Delay": 15, "MaxAttempts": 60})
|
|
340
|
-
|
|
341
|
+
secho(" Status checks passed.", fg="green")
|
|
341
342
|
|
|
342
343
|
# Refresh instance info to get public IP
|
|
343
344
|
desc = ec2_client.describe_instances(InstanceIds=[instance_id])
|
aws_bootstrap/output.py
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
"""Output formatting for structured CLI output (JSON, YAML, table, text)."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
import json
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from enum import StrEnum
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Any
|
|
9
|
+
|
|
10
|
+
import click
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class OutputFormat(StrEnum):
|
|
14
|
+
TEXT = "text"
|
|
15
|
+
JSON = "json"
|
|
16
|
+
YAML = "yaml"
|
|
17
|
+
TABLE = "table"
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def get_format(ctx: click.Context | None = None) -> OutputFormat:
|
|
21
|
+
"""Return the current output format from the click context."""
|
|
22
|
+
if ctx is None:
|
|
23
|
+
ctx = click.get_current_context(silent=True)
|
|
24
|
+
if ctx is None or ctx.obj is None:
|
|
25
|
+
return OutputFormat.TEXT
|
|
26
|
+
return ctx.obj.get("output_format", OutputFormat.TEXT)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def is_text(ctx: click.Context | None = None) -> bool:
|
|
30
|
+
"""Return True if the current output format is text (default)."""
|
|
31
|
+
return get_format(ctx) == OutputFormat.TEXT
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def _default_serializer(obj: Any) -> Any:
|
|
35
|
+
"""JSON serializer for objects not serializable by default."""
|
|
36
|
+
if isinstance(obj, datetime):
|
|
37
|
+
return obj.isoformat()
|
|
38
|
+
if isinstance(obj, Path):
|
|
39
|
+
return str(obj)
|
|
40
|
+
raise TypeError(f"Object of type {type(obj).__name__} is not JSON serializable")
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def emit(data: dict | list, *, headers: dict[str, str] | None = None, ctx: click.Context | None = None) -> None:
|
|
44
|
+
"""Emit structured data in the configured output format.
|
|
45
|
+
|
|
46
|
+
For JSON/YAML: serializes the data directly.
|
|
47
|
+
For TABLE: renders using tabulate. If *data* is a list of dicts, uses
|
|
48
|
+
*headers* mapping ``{dict_key: column_label}`` for column selection/ordering.
|
|
49
|
+
If *data* is a single dict, renders as key-value pairs.
|
|
50
|
+
"""
|
|
51
|
+
fmt = get_format(ctx)
|
|
52
|
+
|
|
53
|
+
if fmt == OutputFormat.JSON:
|
|
54
|
+
click.echo(json.dumps(data, indent=2, default=_default_serializer))
|
|
55
|
+
return
|
|
56
|
+
|
|
57
|
+
if fmt == OutputFormat.YAML:
|
|
58
|
+
import yaml # noqa: PLC0415
|
|
59
|
+
|
|
60
|
+
# Convert datetime/Path objects before YAML dump
|
|
61
|
+
prepared = json.loads(json.dumps(data, default=_default_serializer))
|
|
62
|
+
click.echo(yaml.dump(prepared, default_flow_style=False, sort_keys=False).rstrip())
|
|
63
|
+
return
|
|
64
|
+
|
|
65
|
+
if fmt == OutputFormat.TABLE:
|
|
66
|
+
from tabulate import tabulate # noqa: PLC0415
|
|
67
|
+
|
|
68
|
+
table_data = data
|
|
69
|
+
# Unwrap dict-wrapped lists (e.g. {"instances": [...]}) for table rendering
|
|
70
|
+
if isinstance(data, dict) and headers:
|
|
71
|
+
for v in data.values():
|
|
72
|
+
if isinstance(v, list):
|
|
73
|
+
table_data = v
|
|
74
|
+
break
|
|
75
|
+
|
|
76
|
+
if isinstance(table_data, list) and table_data and isinstance(table_data[0], dict):
|
|
77
|
+
if headers:
|
|
78
|
+
keys = list(headers.keys())
|
|
79
|
+
col_labels = list(headers.values())
|
|
80
|
+
rows = [[row.get(k, "") for k in keys] for row in table_data]
|
|
81
|
+
else:
|
|
82
|
+
col_labels = list(table_data[0].keys())
|
|
83
|
+
keys = col_labels
|
|
84
|
+
rows = [[row.get(k, "") for k in keys] for row in table_data]
|
|
85
|
+
click.echo(tabulate(rows, headers=col_labels, tablefmt="simple"))
|
|
86
|
+
elif isinstance(table_data, dict):
|
|
87
|
+
rows = [[k, v] for k, v in table_data.items()]
|
|
88
|
+
click.echo(tabulate(rows, headers=["Key", "Value"], tablefmt="simple"))
|
|
89
|
+
elif isinstance(table_data, list):
|
|
90
|
+
# Empty list
|
|
91
|
+
click.echo("(no data)")
|
|
92
|
+
return
|
|
93
|
+
|
|
94
|
+
# TEXT format: emit() is a no-op in text mode (text output is handled inline)
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def echo(msg: str = "", **kwargs: Any) -> None:
|
|
98
|
+
"""Wrap ``click.echo``; silent in non-text output modes."""
|
|
99
|
+
if is_text():
|
|
100
|
+
click.echo(msg, **kwargs)
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def secho(msg: str = "", **kwargs: Any) -> None:
|
|
104
|
+
"""Wrap ``click.secho``; silent in non-text output modes."""
|
|
105
|
+
if is_text():
|
|
106
|
+
click.secho(msg, **kwargs)
|
aws_bootstrap/ssh.py
CHANGED
|
@@ -13,6 +13,7 @@ from pathlib import Path
|
|
|
13
13
|
import click
|
|
14
14
|
|
|
15
15
|
from .gpu import _GPU_ARCHITECTURES, GpuInfo
|
|
16
|
+
from .output import echo, secho
|
|
16
17
|
|
|
17
18
|
|
|
18
19
|
# ---------------------------------------------------------------------------
|
|
@@ -54,7 +55,7 @@ def import_key_pair(ec2_client, key_name: str, key_path: Path) -> str:
|
|
|
54
55
|
# Check if key pair already exists
|
|
55
56
|
try:
|
|
56
57
|
existing = ec2_client.describe_key_pairs(KeyNames=[key_name])
|
|
57
|
-
|
|
58
|
+
echo(" Key pair " + click.style(f"'{key_name}'", fg="bright_white") + " already exists, reusing.")
|
|
58
59
|
return existing["KeyPairs"][0]["KeyName"]
|
|
59
60
|
except ec2_client.exceptions.ClientError as e:
|
|
60
61
|
if "InvalidKeyPair.NotFound" not in str(e):
|
|
@@ -70,7 +71,7 @@ def import_key_pair(ec2_client, key_name: str, key_path: Path) -> str:
|
|
|
70
71
|
}
|
|
71
72
|
],
|
|
72
73
|
)
|
|
73
|
-
|
|
74
|
+
secho(f" Imported key pair '{key_name}' from {key_path}", fg="green")
|
|
74
75
|
return key_name
|
|
75
76
|
|
|
76
77
|
|
|
@@ -88,7 +89,7 @@ def wait_for_ssh(host: str, user: str, key_path: Path, retries: int = 30, delay:
|
|
|
88
89
|
sock = socket.create_connection((host, port), timeout=5)
|
|
89
90
|
sock.close()
|
|
90
91
|
except (TimeoutError, ConnectionRefusedError, OSError):
|
|
91
|
-
|
|
92
|
+
echo(" SSH not ready " + click.style(f"(attempt {attempt}/{retries})", dim=True) + ", waiting...")
|
|
92
93
|
time.sleep(delay)
|
|
93
94
|
continue
|
|
94
95
|
|
|
@@ -106,10 +107,10 @@ def wait_for_ssh(host: str, user: str, key_path: Path, retries: int = 30, delay:
|
|
|
106
107
|
]
|
|
107
108
|
result = subprocess.run(cmd, capture_output=True, text=True)
|
|
108
109
|
if result.returncode == 0:
|
|
109
|
-
|
|
110
|
+
secho(" SSH connection established.", fg="green")
|
|
110
111
|
return True
|
|
111
112
|
|
|
112
|
-
|
|
113
|
+
echo(" SSH not ready " + click.style(f"(attempt {attempt}/{retries})", dim=True) + ", waiting...")
|
|
113
114
|
time.sleep(delay)
|
|
114
115
|
|
|
115
116
|
return False
|
|
@@ -125,89 +126,89 @@ def run_remote_setup(
|
|
|
125
126
|
requirements_path = script_path.parent / "requirements.txt"
|
|
126
127
|
|
|
127
128
|
# SCP the requirements file
|
|
128
|
-
|
|
129
|
+
echo(" Uploading requirements.txt...")
|
|
129
130
|
req_result = subprocess.run(
|
|
130
131
|
["scp", *ssh_opts, *scp_port_opts, str(requirements_path), f"{user}@{host}:/tmp/requirements.txt"],
|
|
131
132
|
capture_output=True,
|
|
132
133
|
text=True,
|
|
133
134
|
)
|
|
134
135
|
if req_result.returncode != 0:
|
|
135
|
-
|
|
136
|
+
secho(f" SCP failed: {req_result.stderr}", fg="red", err=True)
|
|
136
137
|
return False
|
|
137
138
|
|
|
138
139
|
# SCP the GPU benchmark script
|
|
139
140
|
benchmark_path = script_path.parent / "gpu_benchmark.py"
|
|
140
|
-
|
|
141
|
+
echo(" Uploading gpu_benchmark.py...")
|
|
141
142
|
bench_result = subprocess.run(
|
|
142
143
|
["scp", *ssh_opts, *scp_port_opts, str(benchmark_path), f"{user}@{host}:/tmp/gpu_benchmark.py"],
|
|
143
144
|
capture_output=True,
|
|
144
145
|
text=True,
|
|
145
146
|
)
|
|
146
147
|
if bench_result.returncode != 0:
|
|
147
|
-
|
|
148
|
+
secho(f" SCP failed: {bench_result.stderr}", fg="red", err=True)
|
|
148
149
|
return False
|
|
149
150
|
|
|
150
151
|
# SCP the GPU smoke test notebook
|
|
151
152
|
notebook_path = script_path.parent / "gpu_smoke_test.ipynb"
|
|
152
|
-
|
|
153
|
+
echo(" Uploading gpu_smoke_test.ipynb...")
|
|
153
154
|
nb_result = subprocess.run(
|
|
154
155
|
["scp", *ssh_opts, *scp_port_opts, str(notebook_path), f"{user}@{host}:/tmp/gpu_smoke_test.ipynb"],
|
|
155
156
|
capture_output=True,
|
|
156
157
|
text=True,
|
|
157
158
|
)
|
|
158
159
|
if nb_result.returncode != 0:
|
|
159
|
-
|
|
160
|
+
secho(f" SCP failed: {nb_result.stderr}", fg="red", err=True)
|
|
160
161
|
return False
|
|
161
162
|
|
|
162
163
|
# SCP the CUDA example source
|
|
163
164
|
saxpy_path = script_path.parent / "saxpy.cu"
|
|
164
|
-
|
|
165
|
+
echo(" Uploading saxpy.cu...")
|
|
165
166
|
saxpy_result = subprocess.run(
|
|
166
167
|
["scp", *ssh_opts, *scp_port_opts, str(saxpy_path), f"{user}@{host}:/tmp/saxpy.cu"],
|
|
167
168
|
capture_output=True,
|
|
168
169
|
text=True,
|
|
169
170
|
)
|
|
170
171
|
if saxpy_result.returncode != 0:
|
|
171
|
-
|
|
172
|
+
secho(f" SCP failed: {saxpy_result.stderr}", fg="red", err=True)
|
|
172
173
|
return False
|
|
173
174
|
|
|
174
175
|
# SCP the VSCode launch.json
|
|
175
176
|
launch_json_path = script_path.parent / "launch.json"
|
|
176
|
-
|
|
177
|
+
echo(" Uploading launch.json...")
|
|
177
178
|
launch_result = subprocess.run(
|
|
178
179
|
["scp", *ssh_opts, *scp_port_opts, str(launch_json_path), f"{user}@{host}:/tmp/launch.json"],
|
|
179
180
|
capture_output=True,
|
|
180
181
|
text=True,
|
|
181
182
|
)
|
|
182
183
|
if launch_result.returncode != 0:
|
|
183
|
-
|
|
184
|
+
secho(f" SCP failed: {launch_result.stderr}", fg="red", err=True)
|
|
184
185
|
return False
|
|
185
186
|
|
|
186
187
|
# SCP the VSCode tasks.json
|
|
187
188
|
tasks_json_path = script_path.parent / "tasks.json"
|
|
188
|
-
|
|
189
|
+
echo(" Uploading tasks.json...")
|
|
189
190
|
tasks_result = subprocess.run(
|
|
190
191
|
["scp", *ssh_opts, *scp_port_opts, str(tasks_json_path), f"{user}@{host}:/tmp/tasks.json"],
|
|
191
192
|
capture_output=True,
|
|
192
193
|
text=True,
|
|
193
194
|
)
|
|
194
195
|
if tasks_result.returncode != 0:
|
|
195
|
-
|
|
196
|
+
secho(f" SCP failed: {tasks_result.stderr}", fg="red", err=True)
|
|
196
197
|
return False
|
|
197
198
|
|
|
198
199
|
# SCP the script
|
|
199
|
-
|
|
200
|
+
echo(" Uploading remote_setup.sh...")
|
|
200
201
|
scp_result = subprocess.run(
|
|
201
202
|
["scp", *ssh_opts, *scp_port_opts, str(script_path), f"{user}@{host}:/tmp/remote_setup.sh"],
|
|
202
203
|
capture_output=True,
|
|
203
204
|
text=True,
|
|
204
205
|
)
|
|
205
206
|
if scp_result.returncode != 0:
|
|
206
|
-
|
|
207
|
+
secho(f" SCP failed: {scp_result.stderr}", fg="red", err=True)
|
|
207
208
|
return False
|
|
208
209
|
|
|
209
210
|
# Execute the script, passing PYTHON_VERSION as an inline env var if specified
|
|
210
|
-
|
|
211
|
+
echo(" Running remote_setup.sh on instance...")
|
|
211
212
|
remote_cmd = "chmod +x /tmp/remote_setup.sh && "
|
|
212
213
|
if python_version:
|
|
213
214
|
remote_cmd += f"PYTHON_VERSION={python_version} "
|
aws_bootstrap/tests/test_cli.py
CHANGED
|
@@ -1,11 +1,13 @@
|
|
|
1
1
|
"""Tests for CLI entry point and help output."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
+
import json
|
|
4
5
|
from datetime import UTC, datetime
|
|
5
6
|
from pathlib import Path
|
|
6
7
|
from unittest.mock import MagicMock, patch
|
|
7
8
|
|
|
8
9
|
import botocore.exceptions
|
|
10
|
+
import yaml
|
|
9
11
|
from click.testing import CliRunner
|
|
10
12
|
|
|
11
13
|
from aws_bootstrap.cli import main
|
|
@@ -1254,3 +1256,281 @@ def test_cleanup_with_yes(mock_find, mock_session, mock_stale, mock_cleanup):
|
|
|
1254
1256
|
assert "Removed aws-gpu1" in result.output
|
|
1255
1257
|
assert "Cleaned up 1" in result.output
|
|
1256
1258
|
mock_cleanup.assert_called_once()
|
|
1259
|
+
|
|
1260
|
+
|
|
1261
|
+
# ---------------------------------------------------------------------------
|
|
1262
|
+
# --output structured format tests
|
|
1263
|
+
# ---------------------------------------------------------------------------
|
|
1264
|
+
|
|
1265
|
+
|
|
1266
|
+
def test_help_shows_output_option():
|
|
1267
|
+
runner = CliRunner()
|
|
1268
|
+
result = runner.invoke(main, ["--help"])
|
|
1269
|
+
assert result.exit_code == 0
|
|
1270
|
+
assert "--output" in result.output
|
|
1271
|
+
assert "-o" in result.output
|
|
1272
|
+
|
|
1273
|
+
|
|
1274
|
+
@patch("aws_bootstrap.cli.find_ebs_volumes_for_instance", return_value=[])
|
|
1275
|
+
@patch("aws_bootstrap.cli.get_ssh_host_details", return_value=None)
|
|
1276
|
+
@patch("aws_bootstrap.cli.list_ssh_hosts", return_value={})
|
|
1277
|
+
@patch("aws_bootstrap.cli.boto3.Session")
|
|
1278
|
+
@patch("aws_bootstrap.cli.get_spot_price")
|
|
1279
|
+
@patch("aws_bootstrap.cli.find_tagged_instances")
|
|
1280
|
+
def test_status_output_json(mock_find, mock_spot_price, mock_session, mock_ssh_hosts, mock_details, mock_ebs):
|
|
1281
|
+
mock_find.return_value = [
|
|
1282
|
+
{
|
|
1283
|
+
"InstanceId": "i-abc123",
|
|
1284
|
+
"Name": "aws-bootstrap-g4dn.xlarge",
|
|
1285
|
+
"State": "running",
|
|
1286
|
+
"InstanceType": "g4dn.xlarge",
|
|
1287
|
+
"PublicIp": "1.2.3.4",
|
|
1288
|
+
"LaunchTime": datetime(2025, 1, 1, tzinfo=UTC),
|
|
1289
|
+
"Lifecycle": "spot",
|
|
1290
|
+
"AvailabilityZone": "us-west-2a",
|
|
1291
|
+
}
|
|
1292
|
+
]
|
|
1293
|
+
mock_spot_price.return_value = 0.1578
|
|
1294
|
+
runner = CliRunner()
|
|
1295
|
+
result = runner.invoke(main, ["-o", "json", "status"])
|
|
1296
|
+
assert result.exit_code == 0
|
|
1297
|
+
data = json.loads(result.output)
|
|
1298
|
+
assert "instances" in data
|
|
1299
|
+
assert len(data["instances"]) == 1
|
|
1300
|
+
inst = data["instances"][0]
|
|
1301
|
+
assert inst["instance_id"] == "i-abc123"
|
|
1302
|
+
assert inst["state"] == "running"
|
|
1303
|
+
assert inst["instance_type"] == "g4dn.xlarge"
|
|
1304
|
+
assert inst["public_ip"] == "1.2.3.4"
|
|
1305
|
+
assert inst["lifecycle"] == "spot"
|
|
1306
|
+
assert inst["spot_price_per_hour"] == 0.1578
|
|
1307
|
+
assert "uptime_seconds" in inst
|
|
1308
|
+
assert "estimated_cost" in inst
|
|
1309
|
+
# No ANSI or progress text in structured output
|
|
1310
|
+
assert "\x1b[" not in result.output
|
|
1311
|
+
assert "Found" not in result.output
|
|
1312
|
+
|
|
1313
|
+
|
|
1314
|
+
@patch("aws_bootstrap.cli.find_ebs_volumes_for_instance", return_value=[])
|
|
1315
|
+
@patch("aws_bootstrap.cli.get_ssh_host_details", return_value=None)
|
|
1316
|
+
@patch("aws_bootstrap.cli.list_ssh_hosts", return_value={})
|
|
1317
|
+
@patch("aws_bootstrap.cli.boto3.Session")
|
|
1318
|
+
@patch("aws_bootstrap.cli.get_spot_price")
|
|
1319
|
+
@patch("aws_bootstrap.cli.find_tagged_instances")
|
|
1320
|
+
def test_status_output_yaml(mock_find, mock_spot_price, mock_session, mock_ssh_hosts, mock_details, mock_ebs):
|
|
1321
|
+
mock_find.return_value = [
|
|
1322
|
+
{
|
|
1323
|
+
"InstanceId": "i-abc123",
|
|
1324
|
+
"Name": "aws-bootstrap-g4dn.xlarge",
|
|
1325
|
+
"State": "running",
|
|
1326
|
+
"InstanceType": "g4dn.xlarge",
|
|
1327
|
+
"PublicIp": "1.2.3.4",
|
|
1328
|
+
"LaunchTime": datetime(2025, 1, 1, tzinfo=UTC),
|
|
1329
|
+
"Lifecycle": "spot",
|
|
1330
|
+
"AvailabilityZone": "us-west-2a",
|
|
1331
|
+
}
|
|
1332
|
+
]
|
|
1333
|
+
mock_spot_price.return_value = 0.15
|
|
1334
|
+
runner = CliRunner()
|
|
1335
|
+
result = runner.invoke(main, ["-o", "yaml", "status"])
|
|
1336
|
+
assert result.exit_code == 0
|
|
1337
|
+
data = yaml.safe_load(result.output)
|
|
1338
|
+
assert "instances" in data
|
|
1339
|
+
assert data["instances"][0]["instance_id"] == "i-abc123"
|
|
1340
|
+
|
|
1341
|
+
|
|
1342
|
+
@patch("aws_bootstrap.cli.find_ebs_volumes_for_instance", return_value=[])
|
|
1343
|
+
@patch("aws_bootstrap.cli.get_ssh_host_details", return_value=None)
|
|
1344
|
+
@patch("aws_bootstrap.cli.list_ssh_hosts", return_value={})
|
|
1345
|
+
@patch("aws_bootstrap.cli.boto3.Session")
|
|
1346
|
+
@patch("aws_bootstrap.cli.get_spot_price")
|
|
1347
|
+
@patch("aws_bootstrap.cli.find_tagged_instances")
|
|
1348
|
+
def test_status_output_table(mock_find, mock_spot_price, mock_session, mock_ssh_hosts, mock_details, mock_ebs):
|
|
1349
|
+
mock_find.return_value = [
|
|
1350
|
+
{
|
|
1351
|
+
"InstanceId": "i-abc123",
|
|
1352
|
+
"Name": "aws-bootstrap-g4dn.xlarge",
|
|
1353
|
+
"State": "running",
|
|
1354
|
+
"InstanceType": "g4dn.xlarge",
|
|
1355
|
+
"PublicIp": "1.2.3.4",
|
|
1356
|
+
"LaunchTime": datetime(2025, 1, 1, tzinfo=UTC),
|
|
1357
|
+
"Lifecycle": "spot",
|
|
1358
|
+
"AvailabilityZone": "us-west-2a",
|
|
1359
|
+
}
|
|
1360
|
+
]
|
|
1361
|
+
mock_spot_price.return_value = 0.15
|
|
1362
|
+
runner = CliRunner()
|
|
1363
|
+
result = runner.invoke(main, ["-o", "table", "status"])
|
|
1364
|
+
assert result.exit_code == 0
|
|
1365
|
+
assert "Instance ID" in result.output
|
|
1366
|
+
assert "i-abc123" in result.output
|
|
1367
|
+
|
|
1368
|
+
|
|
1369
|
+
@patch("aws_bootstrap.cli.boto3.Session")
|
|
1370
|
+
@patch("aws_bootstrap.cli.find_tagged_instances")
|
|
1371
|
+
def test_status_no_instances_json(mock_find, mock_session):
|
|
1372
|
+
mock_find.return_value = []
|
|
1373
|
+
runner = CliRunner()
|
|
1374
|
+
result = runner.invoke(main, ["-o", "json", "status"])
|
|
1375
|
+
assert result.exit_code == 0
|
|
1376
|
+
data = json.loads(result.output)
|
|
1377
|
+
assert data == {"instances": []}
|
|
1378
|
+
|
|
1379
|
+
|
|
1380
|
+
@patch("aws_bootstrap.cli.boto3.Session")
|
|
1381
|
+
@patch("aws_bootstrap.cli.get_latest_ami")
|
|
1382
|
+
@patch("aws_bootstrap.cli.import_key_pair", return_value="aws-bootstrap-key")
|
|
1383
|
+
@patch("aws_bootstrap.cli.ensure_security_group", return_value="sg-123")
|
|
1384
|
+
def test_launch_output_json_dry_run(mock_sg, mock_import, mock_ami, mock_session, tmp_path):
|
|
1385
|
+
mock_ami.return_value = {"ImageId": "ami-123", "Name": "TestAMI"}
|
|
1386
|
+
|
|
1387
|
+
key_path = tmp_path / "id_ed25519.pub"
|
|
1388
|
+
key_path.write_text("ssh-ed25519 AAAA test@host")
|
|
1389
|
+
|
|
1390
|
+
runner = CliRunner()
|
|
1391
|
+
result = runner.invoke(main, ["-o", "json", "launch", "--key-path", str(key_path), "--dry-run"])
|
|
1392
|
+
assert result.exit_code == 0
|
|
1393
|
+
data = json.loads(result.output)
|
|
1394
|
+
assert data["dry_run"] is True
|
|
1395
|
+
assert data["instance_type"] == "g4dn.xlarge"
|
|
1396
|
+
assert data["ami_id"] == "ami-123"
|
|
1397
|
+
assert data["pricing"] == "spot"
|
|
1398
|
+
assert data["region"] == "us-west-2"
|
|
1399
|
+
|
|
1400
|
+
|
|
1401
|
+
@patch("aws_bootstrap.cli.remove_ssh_host", return_value="aws-gpu1")
|
|
1402
|
+
@patch("aws_bootstrap.cli.boto3.Session")
|
|
1403
|
+
@patch("aws_bootstrap.cli.find_tagged_instances")
|
|
1404
|
+
@patch("aws_bootstrap.cli.terminate_tagged_instances")
|
|
1405
|
+
def test_terminate_output_json(mock_terminate, mock_find, mock_session, mock_remove_ssh):
|
|
1406
|
+
mock_find.return_value = [
|
|
1407
|
+
{
|
|
1408
|
+
"InstanceId": "i-abc123",
|
|
1409
|
+
"Name": "test",
|
|
1410
|
+
"State": "running",
|
|
1411
|
+
"InstanceType": "g4dn.xlarge",
|
|
1412
|
+
"PublicIp": "1.2.3.4",
|
|
1413
|
+
"LaunchTime": datetime(2025, 1, 1, tzinfo=UTC),
|
|
1414
|
+
}
|
|
1415
|
+
]
|
|
1416
|
+
mock_terminate.return_value = [
|
|
1417
|
+
{
|
|
1418
|
+
"InstanceId": "i-abc123",
|
|
1419
|
+
"PreviousState": {"Name": "running"},
|
|
1420
|
+
"CurrentState": {"Name": "shutting-down"},
|
|
1421
|
+
}
|
|
1422
|
+
]
|
|
1423
|
+
runner = CliRunner()
|
|
1424
|
+
result = runner.invoke(main, ["-o", "json", "terminate", "--yes"])
|
|
1425
|
+
assert result.exit_code == 0
|
|
1426
|
+
data = json.loads(result.output)
|
|
1427
|
+
assert "terminated" in data
|
|
1428
|
+
assert len(data["terminated"]) == 1
|
|
1429
|
+
assert data["terminated"][0]["instance_id"] == "i-abc123"
|
|
1430
|
+
assert data["terminated"][0]["previous_state"] == "running"
|
|
1431
|
+
assert data["terminated"][0]["current_state"] == "shutting-down"
|
|
1432
|
+
assert data["terminated"][0]["ssh_alias_removed"] == "aws-gpu1"
|
|
1433
|
+
|
|
1434
|
+
|
|
1435
|
+
@patch("aws_bootstrap.cli.cleanup_stale_ssh_hosts")
|
|
1436
|
+
@patch("aws_bootstrap.cli.find_stale_ssh_hosts", return_value=[("i-dead1234", "aws-gpu1")])
|
|
1437
|
+
@patch("aws_bootstrap.cli.boto3.Session")
|
|
1438
|
+
@patch("aws_bootstrap.cli.find_tagged_instances", return_value=[])
|
|
1439
|
+
def test_cleanup_output_json(mock_find, mock_session, mock_stale, mock_cleanup):
|
|
1440
|
+
mock_cleanup.return_value = [CleanupResult(instance_id="i-dead1234", alias="aws-gpu1", removed=True)]
|
|
1441
|
+
runner = CliRunner()
|
|
1442
|
+
result = runner.invoke(main, ["-o", "json", "cleanup", "--yes"])
|
|
1443
|
+
assert result.exit_code == 0
|
|
1444
|
+
data = json.loads(result.output)
|
|
1445
|
+
assert "cleaned" in data
|
|
1446
|
+
assert len(data["cleaned"]) == 1
|
|
1447
|
+
assert data["cleaned"][0]["instance_id"] == "i-dead1234"
|
|
1448
|
+
assert data["cleaned"][0]["alias"] == "aws-gpu1"
|
|
1449
|
+
assert data["cleaned"][0]["removed"] is True
|
|
1450
|
+
|
|
1451
|
+
|
|
1452
|
+
@patch("aws_bootstrap.cli.find_stale_ssh_hosts", return_value=[("i-dead1234", "aws-gpu1")])
|
|
1453
|
+
@patch("aws_bootstrap.cli.boto3.Session")
|
|
1454
|
+
@patch("aws_bootstrap.cli.find_tagged_instances", return_value=[])
|
|
1455
|
+
def test_cleanup_dry_run_json(mock_find, mock_session, mock_stale):
|
|
1456
|
+
runner = CliRunner()
|
|
1457
|
+
result = runner.invoke(main, ["-o", "json", "cleanup", "--dry-run"])
|
|
1458
|
+
assert result.exit_code == 0
|
|
1459
|
+
data = json.loads(result.output)
|
|
1460
|
+
assert data["dry_run"] is True
|
|
1461
|
+
assert "stale" in data
|
|
1462
|
+
assert data["stale"][0]["alias"] == "aws-gpu1"
|
|
1463
|
+
|
|
1464
|
+
|
|
1465
|
+
@patch("aws_bootstrap.cli.boto3.Session")
|
|
1466
|
+
@patch("aws_bootstrap.cli.list_instance_types")
|
|
1467
|
+
def test_list_instance_types_json(mock_list, mock_session):
|
|
1468
|
+
mock_list.return_value = [
|
|
1469
|
+
{
|
|
1470
|
+
"InstanceType": "g4dn.xlarge",
|
|
1471
|
+
"VCpuCount": 4,
|
|
1472
|
+
"MemoryMiB": 16384,
|
|
1473
|
+
"GpuSummary": "1x T4 (16384 MiB)",
|
|
1474
|
+
},
|
|
1475
|
+
]
|
|
1476
|
+
runner = CliRunner()
|
|
1477
|
+
result = runner.invoke(main, ["-o", "json", "list", "instance-types"])
|
|
1478
|
+
assert result.exit_code == 0
|
|
1479
|
+
data = json.loads(result.output)
|
|
1480
|
+
assert isinstance(data, list)
|
|
1481
|
+
assert data[0]["instance_type"] == "g4dn.xlarge"
|
|
1482
|
+
assert data[0]["vcpus"] == 4
|
|
1483
|
+
assert data[0]["memory_mib"] == 16384
|
|
1484
|
+
assert data[0]["gpu"] == "1x T4 (16384 MiB)"
|
|
1485
|
+
|
|
1486
|
+
|
|
1487
|
+
@patch("aws_bootstrap.cli.boto3.Session")
|
|
1488
|
+
@patch("aws_bootstrap.cli.list_amis")
|
|
1489
|
+
def test_list_amis_json(mock_list, mock_session):
|
|
1490
|
+
mock_list.return_value = [
|
|
1491
|
+
{
|
|
1492
|
+
"ImageId": "ami-abc123",
|
|
1493
|
+
"Name": "Deep Learning AMI v42",
|
|
1494
|
+
"CreationDate": "2025-06-01T00:00:00Z",
|
|
1495
|
+
"Architecture": "x86_64",
|
|
1496
|
+
},
|
|
1497
|
+
]
|
|
1498
|
+
runner = CliRunner()
|
|
1499
|
+
result = runner.invoke(main, ["-o", "json", "list", "amis"])
|
|
1500
|
+
assert result.exit_code == 0
|
|
1501
|
+
data = json.loads(result.output)
|
|
1502
|
+
assert isinstance(data, list)
|
|
1503
|
+
assert data[0]["image_id"] == "ami-abc123"
|
|
1504
|
+
assert data[0]["name"] == "Deep Learning AMI v42"
|
|
1505
|
+
assert data[0]["creation_date"] == "2025-06-01"
|
|
1506
|
+
|
|
1507
|
+
|
|
1508
|
+
@patch("aws_bootstrap.cli.boto3.Session")
|
|
1509
|
+
@patch("aws_bootstrap.cli.find_tagged_instances")
|
|
1510
|
+
def test_terminate_json_requires_yes(mock_find, mock_session):
|
|
1511
|
+
"""Structured output without --yes should error."""
|
|
1512
|
+
mock_find.return_value = [
|
|
1513
|
+
{
|
|
1514
|
+
"InstanceId": "i-abc123",
|
|
1515
|
+
"Name": "test",
|
|
1516
|
+
"State": "running",
|
|
1517
|
+
"InstanceType": "g4dn.xlarge",
|
|
1518
|
+
"PublicIp": "1.2.3.4",
|
|
1519
|
+
"LaunchTime": datetime(2025, 1, 1, tzinfo=UTC),
|
|
1520
|
+
}
|
|
1521
|
+
]
|
|
1522
|
+
runner = CliRunner()
|
|
1523
|
+
result = runner.invoke(main, ["-o", "json", "terminate"])
|
|
1524
|
+
assert result.exit_code != 0
|
|
1525
|
+
assert "--yes is required" in result.output
|
|
1526
|
+
|
|
1527
|
+
|
|
1528
|
+
@patch("aws_bootstrap.cli.boto3.Session")
|
|
1529
|
+
@patch("aws_bootstrap.cli.find_tagged_instances")
|
|
1530
|
+
def test_terminate_no_instances_json(mock_find, mock_session):
|
|
1531
|
+
mock_find.return_value = []
|
|
1532
|
+
runner = CliRunner()
|
|
1533
|
+
result = runner.invoke(main, ["-o", "json", "terminate", "--yes"])
|
|
1534
|
+
assert result.exit_code == 0
|
|
1535
|
+
data = json.loads(result.output)
|
|
1536
|
+
assert data == {"terminated": []}
|