aws-bootstrap-g4dn 0.5.0__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aws_bootstrap/cli.py +502 -86
- aws_bootstrap/config.py +2 -0
- aws_bootstrap/ec2.py +137 -8
- aws_bootstrap/output.py +106 -0
- aws_bootstrap/resources/remote_setup.sh +2 -2
- aws_bootstrap/ssh.py +142 -20
- aws_bootstrap/tests/test_cli.py +652 -4
- aws_bootstrap/tests/test_config.py +18 -0
- aws_bootstrap/tests/test_ebs.py +245 -0
- aws_bootstrap/tests/test_output.py +192 -0
- aws_bootstrap/tests/test_ssh_config.py +76 -0
- aws_bootstrap/tests/test_ssh_ebs.py +76 -0
- {aws_bootstrap_g4dn-0.5.0.dist-info → aws_bootstrap_g4dn-0.7.0.dist-info}/METADATA +79 -7
- aws_bootstrap_g4dn-0.7.0.dist-info/RECORD +31 -0
- aws_bootstrap_g4dn-0.5.0.dist-info/RECORD +0 -27
- {aws_bootstrap_g4dn-0.5.0.dist-info → aws_bootstrap_g4dn-0.7.0.dist-info}/WHEEL +0 -0
- {aws_bootstrap_g4dn-0.5.0.dist-info → aws_bootstrap_g4dn-0.7.0.dist-info}/entry_points.txt +0 -0
- {aws_bootstrap_g4dn-0.5.0.dist-info → aws_bootstrap_g4dn-0.7.0.dist-info}/licenses/LICENSE +0 -0
- {aws_bootstrap_g4dn-0.5.0.dist-info → aws_bootstrap_g4dn-0.7.0.dist-info}/top_level.txt +0 -0
|
@@ -20,6 +20,12 @@ def test_defaults():
|
|
|
20
20
|
assert config.dry_run is False
|
|
21
21
|
|
|
22
22
|
|
|
23
|
+
def test_ebs_fields_default_none():
|
|
24
|
+
config = LaunchConfig()
|
|
25
|
+
assert config.ebs_storage is None
|
|
26
|
+
assert config.ebs_volume_id is None
|
|
27
|
+
|
|
28
|
+
|
|
23
29
|
def test_overrides():
|
|
24
30
|
config = LaunchConfig(
|
|
25
31
|
instance_type="g5.xlarge",
|
|
@@ -33,3 +39,15 @@ def test_overrides():
|
|
|
33
39
|
assert config.spot is False
|
|
34
40
|
assert config.volume_size == 200
|
|
35
41
|
assert config.key_path == Path("/tmp/test.pub")
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def test_ebs_storage_override():
|
|
45
|
+
config = LaunchConfig(ebs_storage=96)
|
|
46
|
+
assert config.ebs_storage == 96
|
|
47
|
+
assert config.ebs_volume_id is None
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def test_ebs_volume_id_override():
|
|
51
|
+
config = LaunchConfig(ebs_volume_id="vol-abc123")
|
|
52
|
+
assert config.ebs_volume_id == "vol-abc123"
|
|
53
|
+
assert config.ebs_storage is None
|
|
@@ -0,0 +1,245 @@
|
|
|
1
|
+
"""Tests for EBS data volume operations in ec2.py."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
from unittest.mock import MagicMock
|
|
5
|
+
|
|
6
|
+
import botocore.exceptions
|
|
7
|
+
import pytest
|
|
8
|
+
|
|
9
|
+
from aws_bootstrap.ec2 import (
|
|
10
|
+
EBS_DEVICE_NAME,
|
|
11
|
+
CLIError,
|
|
12
|
+
attach_ebs_volume,
|
|
13
|
+
create_ebs_volume,
|
|
14
|
+
delete_ebs_volume,
|
|
15
|
+
detach_ebs_volume,
|
|
16
|
+
find_ebs_volumes_for_instance,
|
|
17
|
+
validate_ebs_volume,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
# ---------------------------------------------------------------------------
|
|
22
|
+
# create_ebs_volume
|
|
23
|
+
# ---------------------------------------------------------------------------
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def test_create_ebs_volume():
|
|
27
|
+
ec2 = MagicMock()
|
|
28
|
+
ec2.create_volume.return_value = {"VolumeId": "vol-abc123"}
|
|
29
|
+
waiter = MagicMock()
|
|
30
|
+
ec2.get_waiter.return_value = waiter
|
|
31
|
+
|
|
32
|
+
vol_id = create_ebs_volume(ec2, 96, "us-west-2a", "aws-bootstrap-g4dn", "i-test123")
|
|
33
|
+
|
|
34
|
+
assert vol_id == "vol-abc123"
|
|
35
|
+
ec2.create_volume.assert_called_once()
|
|
36
|
+
create_kwargs = ec2.create_volume.call_args[1]
|
|
37
|
+
assert create_kwargs["AvailabilityZone"] == "us-west-2a"
|
|
38
|
+
assert create_kwargs["Size"] == 96
|
|
39
|
+
assert create_kwargs["VolumeType"] == "gp3"
|
|
40
|
+
|
|
41
|
+
# Check tags
|
|
42
|
+
tags = create_kwargs["TagSpecifications"][0]["Tags"]
|
|
43
|
+
tag_dict = {t["Key"]: t["Value"] for t in tags}
|
|
44
|
+
assert tag_dict["created-by"] == "aws-bootstrap-g4dn"
|
|
45
|
+
assert tag_dict["Name"] == "aws-bootstrap-data-i-test123"
|
|
46
|
+
assert tag_dict["aws-bootstrap-instance"] == "i-test123"
|
|
47
|
+
|
|
48
|
+
ec2.get_waiter.assert_called_once_with("volume_available")
|
|
49
|
+
waiter.wait.assert_called_once()
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
# ---------------------------------------------------------------------------
|
|
53
|
+
# validate_ebs_volume
|
|
54
|
+
# ---------------------------------------------------------------------------
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def test_validate_ebs_volume_valid():
|
|
58
|
+
ec2 = MagicMock()
|
|
59
|
+
ec2.describe_volumes.return_value = {
|
|
60
|
+
"Volumes": [
|
|
61
|
+
{
|
|
62
|
+
"VolumeId": "vol-abc123",
|
|
63
|
+
"State": "available",
|
|
64
|
+
"AvailabilityZone": "us-west-2a",
|
|
65
|
+
"Size": 100,
|
|
66
|
+
}
|
|
67
|
+
]
|
|
68
|
+
}
|
|
69
|
+
vol = validate_ebs_volume(ec2, "vol-abc123", "us-west-2a")
|
|
70
|
+
assert vol["VolumeId"] == "vol-abc123"
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def test_validate_ebs_volume_wrong_az():
|
|
74
|
+
ec2 = MagicMock()
|
|
75
|
+
ec2.describe_volumes.return_value = {
|
|
76
|
+
"Volumes": [
|
|
77
|
+
{
|
|
78
|
+
"VolumeId": "vol-abc123",
|
|
79
|
+
"State": "available",
|
|
80
|
+
"AvailabilityZone": "us-east-1a",
|
|
81
|
+
"Size": 100,
|
|
82
|
+
}
|
|
83
|
+
]
|
|
84
|
+
}
|
|
85
|
+
with pytest.raises(CLIError, match="us-east-1a"):
|
|
86
|
+
validate_ebs_volume(ec2, "vol-abc123", "us-west-2a")
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def test_validate_ebs_volume_in_use():
|
|
90
|
+
ec2 = MagicMock()
|
|
91
|
+
ec2.describe_volumes.return_value = {
|
|
92
|
+
"Volumes": [
|
|
93
|
+
{
|
|
94
|
+
"VolumeId": "vol-abc123",
|
|
95
|
+
"State": "in-use",
|
|
96
|
+
"AvailabilityZone": "us-west-2a",
|
|
97
|
+
"Size": 100,
|
|
98
|
+
}
|
|
99
|
+
]
|
|
100
|
+
}
|
|
101
|
+
with pytest.raises(CLIError, match="in-use"):
|
|
102
|
+
validate_ebs_volume(ec2, "vol-abc123", "us-west-2a")
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def test_validate_ebs_volume_not_found():
|
|
106
|
+
ec2 = MagicMock()
|
|
107
|
+
ec2.describe_volumes.side_effect = botocore.exceptions.ClientError(
|
|
108
|
+
{"Error": {"Code": "InvalidVolume.NotFound", "Message": "not found"}},
|
|
109
|
+
"DescribeVolumes",
|
|
110
|
+
)
|
|
111
|
+
with pytest.raises(CLIError, match="not found"):
|
|
112
|
+
validate_ebs_volume(ec2, "vol-notfound", "us-west-2a")
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def test_validate_ebs_volume_empty_response():
|
|
116
|
+
ec2 = MagicMock()
|
|
117
|
+
ec2.describe_volumes.return_value = {"Volumes": []}
|
|
118
|
+
with pytest.raises(CLIError, match="not found"):
|
|
119
|
+
validate_ebs_volume(ec2, "vol-empty", "us-west-2a")
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
# ---------------------------------------------------------------------------
|
|
123
|
+
# attach_ebs_volume
|
|
124
|
+
# ---------------------------------------------------------------------------
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def test_attach_ebs_volume():
|
|
128
|
+
ec2 = MagicMock()
|
|
129
|
+
waiter = MagicMock()
|
|
130
|
+
ec2.get_waiter.return_value = waiter
|
|
131
|
+
|
|
132
|
+
attach_ebs_volume(ec2, "vol-abc123", "i-test123")
|
|
133
|
+
|
|
134
|
+
ec2.attach_volume.assert_called_once_with(
|
|
135
|
+
VolumeId="vol-abc123",
|
|
136
|
+
InstanceId="i-test123",
|
|
137
|
+
Device=EBS_DEVICE_NAME,
|
|
138
|
+
)
|
|
139
|
+
ec2.get_waiter.assert_called_once_with("volume_in_use")
|
|
140
|
+
waiter.wait.assert_called_once()
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def test_attach_ebs_volume_custom_device():
|
|
144
|
+
ec2 = MagicMock()
|
|
145
|
+
waiter = MagicMock()
|
|
146
|
+
ec2.get_waiter.return_value = waiter
|
|
147
|
+
|
|
148
|
+
attach_ebs_volume(ec2, "vol-abc123", "i-test123", device_name="/dev/sdg")
|
|
149
|
+
|
|
150
|
+
ec2.attach_volume.assert_called_once_with(
|
|
151
|
+
VolumeId="vol-abc123",
|
|
152
|
+
InstanceId="i-test123",
|
|
153
|
+
Device="/dev/sdg",
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
# ---------------------------------------------------------------------------
|
|
158
|
+
# detach_ebs_volume
|
|
159
|
+
# ---------------------------------------------------------------------------
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
def test_detach_ebs_volume():
|
|
163
|
+
ec2 = MagicMock()
|
|
164
|
+
waiter = MagicMock()
|
|
165
|
+
ec2.get_waiter.return_value = waiter
|
|
166
|
+
|
|
167
|
+
detach_ebs_volume(ec2, "vol-abc123")
|
|
168
|
+
|
|
169
|
+
ec2.detach_volume.assert_called_once_with(VolumeId="vol-abc123")
|
|
170
|
+
ec2.get_waiter.assert_called_once_with("volume_available")
|
|
171
|
+
waiter.wait.assert_called_once()
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
# ---------------------------------------------------------------------------
|
|
175
|
+
# delete_ebs_volume
|
|
176
|
+
# ---------------------------------------------------------------------------
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
def test_delete_ebs_volume():
|
|
180
|
+
ec2 = MagicMock()
|
|
181
|
+
delete_ebs_volume(ec2, "vol-abc123")
|
|
182
|
+
ec2.delete_volume.assert_called_once_with(VolumeId="vol-abc123")
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
# ---------------------------------------------------------------------------
|
|
186
|
+
# find_ebs_volumes_for_instance
|
|
187
|
+
# ---------------------------------------------------------------------------
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
def test_find_ebs_volumes_for_instance():
|
|
191
|
+
ec2 = MagicMock()
|
|
192
|
+
ec2.describe_volumes.return_value = {
|
|
193
|
+
"Volumes": [
|
|
194
|
+
{
|
|
195
|
+
"VolumeId": "vol-data1",
|
|
196
|
+
"Size": 96,
|
|
197
|
+
"State": "in-use",
|
|
198
|
+
"Attachments": [{"Device": "/dev/sdf", "InstanceId": "i-test123"}],
|
|
199
|
+
}
|
|
200
|
+
]
|
|
201
|
+
}
|
|
202
|
+
volumes = find_ebs_volumes_for_instance(ec2, "i-test123", "aws-bootstrap-g4dn")
|
|
203
|
+
assert len(volumes) == 1
|
|
204
|
+
assert volumes[0]["VolumeId"] == "vol-data1"
|
|
205
|
+
assert volumes[0]["Size"] == 96
|
|
206
|
+
assert volumes[0]["Device"] == "/dev/sdf"
|
|
207
|
+
assert volumes[0]["State"] == "in-use"
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
def test_find_ebs_volumes_empty():
|
|
211
|
+
ec2 = MagicMock()
|
|
212
|
+
ec2.describe_volumes.return_value = {"Volumes": []}
|
|
213
|
+
volumes = find_ebs_volumes_for_instance(ec2, "i-test123", "aws-bootstrap-g4dn")
|
|
214
|
+
assert volumes == []
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
def test_find_ebs_volumes_includes_available():
|
|
218
|
+
"""Detached (available) volumes are still discovered by tags."""
|
|
219
|
+
ec2 = MagicMock()
|
|
220
|
+
ec2.describe_volumes.return_value = {
|
|
221
|
+
"Volumes": [
|
|
222
|
+
{
|
|
223
|
+
"VolumeId": "vol-avail",
|
|
224
|
+
"Size": 50,
|
|
225
|
+
"State": "available",
|
|
226
|
+
"Attachments": [],
|
|
227
|
+
}
|
|
228
|
+
]
|
|
229
|
+
}
|
|
230
|
+
volumes = find_ebs_volumes_for_instance(ec2, "i-old", "aws-bootstrap-g4dn")
|
|
231
|
+
assert len(volumes) == 1
|
|
232
|
+
assert volumes[0]["VolumeId"] == "vol-avail"
|
|
233
|
+
assert volumes[0]["State"] == "available"
|
|
234
|
+
assert volumes[0]["Device"] == ""
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
def test_find_ebs_volumes_client_error_returns_empty():
|
|
238
|
+
"""ClientError (e.g. permissions) returns empty list instead of raising."""
|
|
239
|
+
ec2 = MagicMock()
|
|
240
|
+
ec2.describe_volumes.side_effect = botocore.exceptions.ClientError(
|
|
241
|
+
{"Error": {"Code": "UnauthorizedOperation", "Message": "no access"}},
|
|
242
|
+
"DescribeVolumes",
|
|
243
|
+
)
|
|
244
|
+
volumes = find_ebs_volumes_for_instance(ec2, "i-test", "aws-bootstrap-g4dn")
|
|
245
|
+
assert volumes == []
|
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
"""Tests for the output formatting module."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
import json
|
|
5
|
+
from datetime import UTC, datetime
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
import click
|
|
9
|
+
import yaml
|
|
10
|
+
from click.testing import CliRunner
|
|
11
|
+
|
|
12
|
+
from aws_bootstrap.output import OutputFormat, echo, emit, is_text
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def test_output_format_enum_values():
|
|
16
|
+
assert OutputFormat.TEXT.value == "text"
|
|
17
|
+
assert OutputFormat.JSON.value == "json"
|
|
18
|
+
assert OutputFormat.YAML.value == "yaml"
|
|
19
|
+
assert OutputFormat.TABLE.value == "table"
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def test_serialize_datetime():
|
|
23
|
+
"""datetime objects should serialize to ISO format strings."""
|
|
24
|
+
dt = datetime(2025, 6, 15, 12, 30, 0, tzinfo=UTC)
|
|
25
|
+
|
|
26
|
+
@click.command()
|
|
27
|
+
@click.pass_context
|
|
28
|
+
def cli(ctx):
|
|
29
|
+
ctx.ensure_object(dict)
|
|
30
|
+
ctx.obj["output_format"] = OutputFormat.JSON
|
|
31
|
+
emit({"timestamp": dt}, ctx=ctx)
|
|
32
|
+
|
|
33
|
+
runner = CliRunner()
|
|
34
|
+
result = runner.invoke(cli, [])
|
|
35
|
+
assert result.exit_code == 0
|
|
36
|
+
data = json.loads(result.output)
|
|
37
|
+
assert data["timestamp"] == "2025-06-15T12:30:00+00:00"
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def test_serialize_path():
|
|
41
|
+
"""Path objects should serialize to strings."""
|
|
42
|
+
p = Path("/home/user/.ssh/id_ed25519")
|
|
43
|
+
|
|
44
|
+
@click.command()
|
|
45
|
+
@click.pass_context
|
|
46
|
+
def cli(ctx):
|
|
47
|
+
ctx.ensure_object(dict)
|
|
48
|
+
ctx.obj["output_format"] = OutputFormat.JSON
|
|
49
|
+
emit({"path": p}, ctx=ctx)
|
|
50
|
+
|
|
51
|
+
runner = CliRunner()
|
|
52
|
+
result = runner.invoke(cli, [])
|
|
53
|
+
assert result.exit_code == 0
|
|
54
|
+
data = json.loads(result.output)
|
|
55
|
+
assert data["path"] == "/home/user/.ssh/id_ed25519"
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def test_emit_json():
|
|
59
|
+
"""emit() should produce valid JSON in JSON mode."""
|
|
60
|
+
|
|
61
|
+
@click.command()
|
|
62
|
+
@click.pass_context
|
|
63
|
+
def cli(ctx):
|
|
64
|
+
ctx.ensure_object(dict)
|
|
65
|
+
ctx.obj["output_format"] = OutputFormat.JSON
|
|
66
|
+
emit({"key": "value", "count": 42}, ctx=ctx)
|
|
67
|
+
|
|
68
|
+
runner = CliRunner()
|
|
69
|
+
result = runner.invoke(cli, [])
|
|
70
|
+
assert result.exit_code == 0
|
|
71
|
+
data = json.loads(result.output)
|
|
72
|
+
assert data == {"key": "value", "count": 42}
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def test_emit_yaml():
|
|
76
|
+
"""emit() should produce valid YAML in YAML mode."""
|
|
77
|
+
|
|
78
|
+
@click.command()
|
|
79
|
+
@click.pass_context
|
|
80
|
+
def cli(ctx):
|
|
81
|
+
ctx.ensure_object(dict)
|
|
82
|
+
ctx.obj["output_format"] = OutputFormat.YAML
|
|
83
|
+
emit({"key": "value", "count": 42}, ctx=ctx)
|
|
84
|
+
|
|
85
|
+
runner = CliRunner()
|
|
86
|
+
result = runner.invoke(cli, [])
|
|
87
|
+
assert result.exit_code == 0
|
|
88
|
+
data = yaml.safe_load(result.output)
|
|
89
|
+
assert data == {"key": "value", "count": 42}
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def test_emit_table_list():
|
|
93
|
+
"""emit() should render a list of dicts as a table with headers."""
|
|
94
|
+
|
|
95
|
+
@click.command()
|
|
96
|
+
@click.pass_context
|
|
97
|
+
def cli(ctx):
|
|
98
|
+
ctx.ensure_object(dict)
|
|
99
|
+
ctx.obj["output_format"] = OutputFormat.TABLE
|
|
100
|
+
emit(
|
|
101
|
+
[{"name": "Alice", "age": 30}, {"name": "Bob", "age": 25}],
|
|
102
|
+
headers={"name": "Name", "age": "Age"},
|
|
103
|
+
ctx=ctx,
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
runner = CliRunner()
|
|
107
|
+
result = runner.invoke(cli, [])
|
|
108
|
+
assert result.exit_code == 0
|
|
109
|
+
assert "Name" in result.output
|
|
110
|
+
assert "Age" in result.output
|
|
111
|
+
assert "Alice" in result.output
|
|
112
|
+
assert "Bob" in result.output
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def test_emit_table_dict():
|
|
116
|
+
"""emit() should render a single dict as key-value pairs."""
|
|
117
|
+
|
|
118
|
+
@click.command()
|
|
119
|
+
@click.pass_context
|
|
120
|
+
def cli(ctx):
|
|
121
|
+
ctx.ensure_object(dict)
|
|
122
|
+
ctx.obj["output_format"] = OutputFormat.TABLE
|
|
123
|
+
emit({"instance_id": "i-abc123", "state": "running"}, ctx=ctx)
|
|
124
|
+
|
|
125
|
+
runner = CliRunner()
|
|
126
|
+
result = runner.invoke(cli, [])
|
|
127
|
+
assert result.exit_code == 0
|
|
128
|
+
assert "instance_id" in result.output
|
|
129
|
+
assert "i-abc123" in result.output
|
|
130
|
+
assert "running" in result.output
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def test_echo_suppressed_in_json_mode():
|
|
134
|
+
"""echo() should produce no output when format is JSON."""
|
|
135
|
+
|
|
136
|
+
@click.command()
|
|
137
|
+
@click.pass_context
|
|
138
|
+
def cli(ctx):
|
|
139
|
+
ctx.ensure_object(dict)
|
|
140
|
+
ctx.obj["output_format"] = OutputFormat.JSON
|
|
141
|
+
echo("This should not appear")
|
|
142
|
+
|
|
143
|
+
runner = CliRunner()
|
|
144
|
+
result = runner.invoke(cli, [])
|
|
145
|
+
assert result.exit_code == 0
|
|
146
|
+
assert result.output == ""
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
def test_echo_emits_in_text_mode():
|
|
150
|
+
"""echo() should work normally in text mode."""
|
|
151
|
+
|
|
152
|
+
@click.command()
|
|
153
|
+
@click.pass_context
|
|
154
|
+
def cli(ctx):
|
|
155
|
+
ctx.ensure_object(dict)
|
|
156
|
+
ctx.obj["output_format"] = OutputFormat.TEXT
|
|
157
|
+
echo("Hello world")
|
|
158
|
+
|
|
159
|
+
runner = CliRunner()
|
|
160
|
+
result = runner.invoke(cli, [])
|
|
161
|
+
assert result.exit_code == 0
|
|
162
|
+
assert "Hello world" in result.output
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
def test_is_text_default():
|
|
166
|
+
"""is_text() should return True when no context is set (default behavior)."""
|
|
167
|
+
|
|
168
|
+
@click.command()
|
|
169
|
+
@click.pass_context
|
|
170
|
+
def cli(ctx):
|
|
171
|
+
ctx.ensure_object(dict)
|
|
172
|
+
ctx.obj["output_format"] = OutputFormat.TEXT
|
|
173
|
+
assert is_text(ctx) is True
|
|
174
|
+
|
|
175
|
+
runner = CliRunner()
|
|
176
|
+
result = runner.invoke(cli, [])
|
|
177
|
+
assert result.exit_code == 0
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def test_is_text_false_for_json():
|
|
181
|
+
"""is_text() should return False when format is JSON."""
|
|
182
|
+
|
|
183
|
+
@click.command()
|
|
184
|
+
@click.pass_context
|
|
185
|
+
def cli(ctx):
|
|
186
|
+
ctx.ensure_object(dict)
|
|
187
|
+
ctx.obj["output_format"] = OutputFormat.JSON
|
|
188
|
+
assert is_text(ctx) is False
|
|
189
|
+
|
|
190
|
+
runner = CliRunner()
|
|
191
|
+
result = runner.invoke(cli, [])
|
|
192
|
+
assert result.exit_code == 0
|
|
@@ -10,7 +10,9 @@ from aws_bootstrap.ssh import (
|
|
|
10
10
|
_next_alias,
|
|
11
11
|
_read_ssh_config,
|
|
12
12
|
add_ssh_host,
|
|
13
|
+
cleanup_stale_ssh_hosts,
|
|
13
14
|
find_ssh_alias,
|
|
15
|
+
find_stale_ssh_hosts,
|
|
14
16
|
get_ssh_host_details,
|
|
15
17
|
list_ssh_hosts,
|
|
16
18
|
remove_ssh_host,
|
|
@@ -407,3 +409,77 @@ def test_resolve_unknown_alias_returns_none(tmp_path):
|
|
|
407
409
|
def test_resolve_nonexistent_config_returns_none(tmp_path):
|
|
408
410
|
cfg = tmp_path / "no_such_file"
|
|
409
411
|
assert resolve_instance_id("aws-gpu1", config_path=cfg) is None
|
|
412
|
+
|
|
413
|
+
|
|
414
|
+
# ---------------------------------------------------------------------------
|
|
415
|
+
# Cleanup: find_stale_ssh_hosts / cleanup_stale_ssh_hosts
|
|
416
|
+
# ---------------------------------------------------------------------------
|
|
417
|
+
|
|
418
|
+
|
|
419
|
+
def test_find_stale_ssh_hosts_finds_orphans(tmp_path):
|
|
420
|
+
cfg = _config_path(tmp_path)
|
|
421
|
+
add_ssh_host("i-111aaaa1", "1.1.1.1", "ubuntu", KEY_PATH, config_path=cfg)
|
|
422
|
+
add_ssh_host("i-222bbbb2", "2.2.2.2", "ubuntu", KEY_PATH, config_path=cfg)
|
|
423
|
+
stale = find_stale_ssh_hosts({"i-111aaaa1"}, config_path=cfg)
|
|
424
|
+
assert stale == [("i-222bbbb2", "aws-gpu2")]
|
|
425
|
+
|
|
426
|
+
|
|
427
|
+
def test_find_stale_ssh_hosts_none_stale(tmp_path):
|
|
428
|
+
cfg = _config_path(tmp_path)
|
|
429
|
+
add_ssh_host("i-111aaaa1", "1.1.1.1", "ubuntu", KEY_PATH, config_path=cfg)
|
|
430
|
+
add_ssh_host("i-222bbbb2", "2.2.2.2", "ubuntu", KEY_PATH, config_path=cfg)
|
|
431
|
+
stale = find_stale_ssh_hosts({"i-111aaaa1", "i-222bbbb2"}, config_path=cfg)
|
|
432
|
+
assert stale == []
|
|
433
|
+
|
|
434
|
+
|
|
435
|
+
def test_find_stale_ssh_hosts_all_stale(tmp_path):
|
|
436
|
+
cfg = _config_path(tmp_path)
|
|
437
|
+
add_ssh_host("i-111aaaa1", "1.1.1.1", "ubuntu", KEY_PATH, config_path=cfg)
|
|
438
|
+
add_ssh_host("i-222bbbb2", "2.2.2.2", "ubuntu", KEY_PATH, config_path=cfg)
|
|
439
|
+
stale = find_stale_ssh_hosts(set(), config_path=cfg)
|
|
440
|
+
assert len(stale) == 2
|
|
441
|
+
assert ("i-111aaaa1", "aws-gpu1") in stale
|
|
442
|
+
assert ("i-222bbbb2", "aws-gpu2") in stale
|
|
443
|
+
|
|
444
|
+
|
|
445
|
+
def test_find_stale_ssh_hosts_empty_config(tmp_path):
|
|
446
|
+
cfg = _config_path(tmp_path)
|
|
447
|
+
cfg.parent.mkdir(parents=True, exist_ok=True)
|
|
448
|
+
cfg.write_text("")
|
|
449
|
+
stale = find_stale_ssh_hosts(set(), config_path=cfg)
|
|
450
|
+
assert stale == []
|
|
451
|
+
|
|
452
|
+
|
|
453
|
+
def test_cleanup_stale_ssh_hosts_removes(tmp_path):
|
|
454
|
+
cfg = _config_path(tmp_path)
|
|
455
|
+
add_ssh_host("i-111aaaa1", "1.1.1.1", "ubuntu", KEY_PATH, config_path=cfg)
|
|
456
|
+
add_ssh_host("i-222bbbb2", "2.2.2.2", "ubuntu", KEY_PATH, config_path=cfg)
|
|
457
|
+
results = cleanup_stale_ssh_hosts({"i-111aaaa1"}, config_path=cfg)
|
|
458
|
+
assert len(results) == 1
|
|
459
|
+
assert results[0].instance_id == "i-222bbbb2"
|
|
460
|
+
assert results[0].alias == "aws-gpu2"
|
|
461
|
+
assert results[0].removed is True
|
|
462
|
+
# Verify it was actually removed from the config
|
|
463
|
+
content = cfg.read_text()
|
|
464
|
+
assert "i-222bbbb2" not in content
|
|
465
|
+
assert "i-111aaaa1" in content
|
|
466
|
+
|
|
467
|
+
|
|
468
|
+
def test_cleanup_stale_ssh_hosts_dry_run(tmp_path):
|
|
469
|
+
cfg = _config_path(tmp_path)
|
|
470
|
+
add_ssh_host("i-111aaaa1", "1.1.1.1", "ubuntu", KEY_PATH, config_path=cfg)
|
|
471
|
+
add_ssh_host("i-222bbbb2", "2.2.2.2", "ubuntu", KEY_PATH, config_path=cfg)
|
|
472
|
+
results = cleanup_stale_ssh_hosts({"i-111aaaa1"}, config_path=cfg, dry_run=True)
|
|
473
|
+
assert len(results) == 1
|
|
474
|
+
assert results[0].removed is False
|
|
475
|
+
# Verify config is unchanged
|
|
476
|
+
content = cfg.read_text()
|
|
477
|
+
assert "i-222bbbb2" in content
|
|
478
|
+
assert "i-111aaaa1" in content
|
|
479
|
+
|
|
480
|
+
|
|
481
|
+
def test_cleanup_stale_ssh_hosts_no_stale(tmp_path):
|
|
482
|
+
cfg = _config_path(tmp_path)
|
|
483
|
+
add_ssh_host("i-111aaaa1", "1.1.1.1", "ubuntu", KEY_PATH, config_path=cfg)
|
|
484
|
+
results = cleanup_stale_ssh_hosts({"i-111aaaa1"}, config_path=cfg)
|
|
485
|
+
assert results == []
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
"""Tests for mount_ebs_volume SSH function."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from unittest.mock import MagicMock, patch
|
|
6
|
+
|
|
7
|
+
from aws_bootstrap.ssh import mount_ebs_volume
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
KEY_PATH = Path("/home/user/.ssh/id_ed25519.pub")
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@patch("aws_bootstrap.ssh.subprocess.run")
|
|
14
|
+
def test_mount_ebs_volume_success_format(mock_run):
|
|
15
|
+
"""New volume: SSH command includes mkfs."""
|
|
16
|
+
mock_run.return_value = MagicMock(returncode=0)
|
|
17
|
+
|
|
18
|
+
result = mount_ebs_volume("1.2.3.4", "ubuntu", KEY_PATH, "vol-abc123", format_volume=True)
|
|
19
|
+
|
|
20
|
+
assert result is True
|
|
21
|
+
mock_run.assert_called_once()
|
|
22
|
+
cmd = mock_run.call_args[0][0]
|
|
23
|
+
script = cmd[-1]
|
|
24
|
+
assert "mkfs.ext4" in script
|
|
25
|
+
assert "/data" in script
|
|
26
|
+
assert "volabc123" in script # stripped vol- hyphen
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
@patch("aws_bootstrap.ssh.subprocess.run")
|
|
30
|
+
def test_mount_ebs_volume_success_no_format(mock_run):
|
|
31
|
+
"""Existing volume: SSH command skips mkfs."""
|
|
32
|
+
mock_run.return_value = MagicMock(returncode=0)
|
|
33
|
+
|
|
34
|
+
result = mount_ebs_volume("1.2.3.4", "ubuntu", KEY_PATH, "vol-abc123", format_volume=False)
|
|
35
|
+
|
|
36
|
+
assert result is True
|
|
37
|
+
mock_run.assert_called_once()
|
|
38
|
+
cmd = mock_run.call_args[0][0]
|
|
39
|
+
script = cmd[-1]
|
|
40
|
+
assert "mkfs" not in script
|
|
41
|
+
assert "/data" in script
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
@patch("aws_bootstrap.ssh.subprocess.run")
|
|
45
|
+
def test_mount_ebs_volume_failure(mock_run):
|
|
46
|
+
"""Non-zero exit code returns False."""
|
|
47
|
+
mock_run.return_value = MagicMock(returncode=1)
|
|
48
|
+
|
|
49
|
+
result = mount_ebs_volume("1.2.3.4", "ubuntu", KEY_PATH, "vol-abc123")
|
|
50
|
+
|
|
51
|
+
assert result is False
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
@patch("aws_bootstrap.ssh.subprocess.run")
|
|
55
|
+
def test_mount_ebs_volume_custom_port(mock_run):
|
|
56
|
+
"""Non-default port is passed as -p flag."""
|
|
57
|
+
mock_run.return_value = MagicMock(returncode=0)
|
|
58
|
+
|
|
59
|
+
mount_ebs_volume("1.2.3.4", "ubuntu", KEY_PATH, "vol-abc123", port=2222)
|
|
60
|
+
|
|
61
|
+
cmd = mock_run.call_args[0][0]
|
|
62
|
+
assert "-p" in cmd
|
|
63
|
+
port_idx = cmd.index("-p")
|
|
64
|
+
assert cmd[port_idx + 1] == "2222"
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
@patch("aws_bootstrap.ssh.subprocess.run")
|
|
68
|
+
def test_mount_ebs_volume_custom_mount_point(mock_run):
|
|
69
|
+
"""Custom mount point appears in the SSH script."""
|
|
70
|
+
mock_run.return_value = MagicMock(returncode=0)
|
|
71
|
+
|
|
72
|
+
mount_ebs_volume("1.2.3.4", "ubuntu", KEY_PATH, "vol-abc123", mount_point="/mnt/data")
|
|
73
|
+
|
|
74
|
+
cmd = mock_run.call_args[0][0]
|
|
75
|
+
script = cmd[-1]
|
|
76
|
+
assert "/mnt/data" in script
|