s3ui 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- s3ui/__init__.py +1 -0
- s3ui/app.py +56 -0
- s3ui/constants.py +39 -0
- s3ui/core/__init__.py +0 -0
- s3ui/core/cost.py +218 -0
- s3ui/core/credentials.py +165 -0
- s3ui/core/download_worker.py +260 -0
- s3ui/core/errors.py +104 -0
- s3ui/core/listing_cache.py +178 -0
- s3ui/core/s3_client.py +358 -0
- s3ui/core/stats.py +128 -0
- s3ui/core/transfers.py +281 -0
- s3ui/core/upload_worker.py +311 -0
- s3ui/db/__init__.py +0 -0
- s3ui/db/database.py +143 -0
- s3ui/db/migrations/001_initial.sql +114 -0
- s3ui/logging_setup.py +18 -0
- s3ui/main_window.py +969 -0
- s3ui/models/__init__.py +0 -0
- s3ui/models/s3_objects.py +295 -0
- s3ui/models/transfer_model.py +282 -0
- s3ui/resources/__init__.py +0 -0
- s3ui/resources/s3ui.png +0 -0
- s3ui/ui/__init__.py +0 -0
- s3ui/ui/breadcrumb_bar.py +150 -0
- s3ui/ui/confirm_delete.py +60 -0
- s3ui/ui/cost_dialog.py +163 -0
- s3ui/ui/get_info.py +50 -0
- s3ui/ui/local_pane.py +226 -0
- s3ui/ui/name_conflict.py +68 -0
- s3ui/ui/s3_pane.py +547 -0
- s3ui/ui/settings_dialog.py +328 -0
- s3ui/ui/setup_wizard.py +462 -0
- s3ui/ui/stats_dialog.py +162 -0
- s3ui/ui/transfer_panel.py +153 -0
- s3ui-1.0.0.dist-info/METADATA +118 -0
- s3ui-1.0.0.dist-info/RECORD +40 -0
- s3ui-1.0.0.dist-info/WHEEL +4 -0
- s3ui-1.0.0.dist-info/entry_points.txt +2 -0
- s3ui-1.0.0.dist-info/licenses/LICENSE +21 -0
s3ui/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "1.0.0"
|
s3ui/app.py
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import sys
|
|
3
|
+
|
|
4
|
+
from s3ui.constants import APP_DIR, APP_NAME
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def main() -> None:
|
|
8
|
+
# Ensure app directory exists before anything else
|
|
9
|
+
APP_DIR.mkdir(parents=True, exist_ok=True)
|
|
10
|
+
|
|
11
|
+
# Set up logging before any other imports that might log
|
|
12
|
+
from s3ui.logging_setup import setup_logging
|
|
13
|
+
|
|
14
|
+
setup_logging()
|
|
15
|
+
logger = logging.getLogger("s3ui.app")
|
|
16
|
+
logger.info("Starting %s", APP_NAME)
|
|
17
|
+
|
|
18
|
+
from PyQt6.QtCore import QLockFile
|
|
19
|
+
from PyQt6.QtGui import QIcon
|
|
20
|
+
from PyQt6.QtWidgets import QApplication
|
|
21
|
+
|
|
22
|
+
app = QApplication(sys.argv)
|
|
23
|
+
app.setApplicationName(APP_NAME)
|
|
24
|
+
|
|
25
|
+
# Set application icon (window, dock, tray)
|
|
26
|
+
from importlib.resources import files
|
|
27
|
+
|
|
28
|
+
icon_path = str(files("s3ui.resources").joinpath("s3ui.png"))
|
|
29
|
+
app.setWindowIcon(QIcon(icon_path))
|
|
30
|
+
|
|
31
|
+
# Single-instance check
|
|
32
|
+
lock_file = QLockFile(str(APP_DIR / "s3ui.lock"))
|
|
33
|
+
if not lock_file.tryLock(100):
|
|
34
|
+
logger.warning("Another instance is already running, exiting")
|
|
35
|
+
sys.exit(0)
|
|
36
|
+
|
|
37
|
+
# Initialize database
|
|
38
|
+
from s3ui.db.database import Database
|
|
39
|
+
|
|
40
|
+
db = Database()
|
|
41
|
+
|
|
42
|
+
from s3ui.main_window import MainWindow
|
|
43
|
+
|
|
44
|
+
window = MainWindow(db=db)
|
|
45
|
+
window.show()
|
|
46
|
+
logger.info("Window shown, entering event loop")
|
|
47
|
+
|
|
48
|
+
exit_code = app.exec()
|
|
49
|
+
db.close()
|
|
50
|
+
lock_file.unlock()
|
|
51
|
+
logger.info("Exiting with code %d", exit_code)
|
|
52
|
+
sys.exit(exit_code)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
if __name__ == "__main__":
|
|
56
|
+
main()
|
s3ui/constants.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
APP_NAME = "S3UI"
|
|
4
|
+
APP_DIR = Path.home() / ".s3ui"
|
|
5
|
+
DB_PATH = APP_DIR / "s3ui.db"
|
|
6
|
+
LOG_DIR = APP_DIR / "logs"
|
|
7
|
+
LOG_FILE = LOG_DIR / "s3ui.log"
|
|
8
|
+
KEYRING_SERVICE = "s3ui"
|
|
9
|
+
|
|
10
|
+
# Transfer defaults
|
|
11
|
+
DEFAULT_PART_SIZE = 8 * 1024 * 1024 # 8 MB
|
|
12
|
+
LARGE_PART_SIZE = 64 * 1024 * 1024 # 64 MB
|
|
13
|
+
HUGE_PART_SIZE = 512 * 1024 * 1024 # 512 MB
|
|
14
|
+
MULTIPART_THRESHOLD = 8 * 1024 * 1024 # 8 MB
|
|
15
|
+
MAX_CONCURRENT_TRANSFERS = 4
|
|
16
|
+
MAX_RETRY_ATTEMPTS = 3
|
|
17
|
+
|
|
18
|
+
# Cache defaults
|
|
19
|
+
LISTING_CACHE_MAX_ENTRIES = 30
|
|
20
|
+
LISTING_CACHE_STALE_SECONDS = 30.0
|
|
21
|
+
|
|
22
|
+
# UI defaults
|
|
23
|
+
MIN_WINDOW_WIDTH = 900
|
|
24
|
+
MIN_WINDOW_HEIGHT = 600
|
|
25
|
+
NAV_HISTORY_MAX = 50
|
|
26
|
+
TRANSFER_COALESCE_MS = 100
|
|
27
|
+
|
|
28
|
+
# Temp directory for quick-open downloads
|
|
29
|
+
TEMP_DIR = APP_DIR / "temp"
|
|
30
|
+
|
|
31
|
+
# Notification threshold
|
|
32
|
+
NOTIFY_SIZE_THRESHOLD = 100 * 1024 * 1024 # 100 MB
|
|
33
|
+
|
|
34
|
+
# Quick-open size threshold (files larger than this go through the transfer queue)
|
|
35
|
+
QUICK_OPEN_THRESHOLD = 10 * 1024 * 1024 # 10 MB
|
|
36
|
+
|
|
37
|
+
# Logging
|
|
38
|
+
MAX_LOG_SIZE = 5 * 1024 * 1024 # 5 MB per file
|
|
39
|
+
LOG_BACKUP_COUNT = 3
|
s3ui/core/__init__.py
ADDED
|
File without changes
|
s3ui/core/cost.py
ADDED
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
"""Cost tracking and estimation for S3 operations."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
from datetime import date, timedelta
|
|
8
|
+
from typing import TYPE_CHECKING
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from s3ui.db.database import Database
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger("s3ui.cost")
|
|
14
|
+
|
|
15
|
+
REQUEST_TYPE_COLUMN = {
|
|
16
|
+
"put": "put_requests",
|
|
17
|
+
"get": "get_requests",
|
|
18
|
+
"list": "list_requests",
|
|
19
|
+
"delete": "delete_requests",
|
|
20
|
+
"copy": "copy_requests",
|
|
21
|
+
"head": "head_requests",
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@dataclass
|
|
26
|
+
class DailyCost:
|
|
27
|
+
date: str
|
|
28
|
+
storage: float
|
|
29
|
+
requests: float
|
|
30
|
+
transfer: float
|
|
31
|
+
|
|
32
|
+
@property
|
|
33
|
+
def total(self) -> float:
|
|
34
|
+
return self.storage + self.requests + self.transfer
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class CostTracker:
|
|
38
|
+
"""Tracks S3 API usage and calculates cost estimates."""
|
|
39
|
+
|
|
40
|
+
def __init__(self, db: Database, bucket_id: int) -> None:
|
|
41
|
+
self._db = db
|
|
42
|
+
self._bucket_id = bucket_id
|
|
43
|
+
|
|
44
|
+
def record_request(self, request_type: str, count: int = 1) -> None:
|
|
45
|
+
"""Record API request(s) for cost tracking."""
|
|
46
|
+
column = REQUEST_TYPE_COLUMN.get(request_type)
|
|
47
|
+
if not column:
|
|
48
|
+
logger.warning("Unknown request type: %s", request_type)
|
|
49
|
+
return
|
|
50
|
+
|
|
51
|
+
today = date.today().isoformat()
|
|
52
|
+
self._db.execute(
|
|
53
|
+
f"INSERT INTO daily_usage (bucket_id, usage_date, {column}) "
|
|
54
|
+
f"VALUES (?, ?, ?) "
|
|
55
|
+
f"ON CONFLICT(bucket_id, usage_date) DO UPDATE SET {column} = {column} + ?",
|
|
56
|
+
(self._bucket_id, today, count, count),
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
def record_upload_bytes(self, size: int) -> None:
|
|
60
|
+
"""Record bytes uploaded."""
|
|
61
|
+
today = date.today().isoformat()
|
|
62
|
+
self._db.execute(
|
|
63
|
+
"INSERT INTO daily_usage (bucket_id, usage_date, bytes_uploaded) "
|
|
64
|
+
"VALUES (?, ?, ?) "
|
|
65
|
+
"ON CONFLICT(bucket_id, usage_date) DO UPDATE SET bytes_uploaded = bytes_uploaded + ?",
|
|
66
|
+
(self._bucket_id, today, size, size),
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
def record_download_bytes(self, size: int) -> None:
|
|
70
|
+
"""Record bytes downloaded."""
|
|
71
|
+
today = date.today().isoformat()
|
|
72
|
+
self._db.execute(
|
|
73
|
+
"INSERT INTO daily_usage (bucket_id, usage_date, bytes_downloaded) "
|
|
74
|
+
"VALUES (?, ?, ?) "
|
|
75
|
+
"ON CONFLICT(bucket_id, usage_date) DO UPDATE "
|
|
76
|
+
"SET bytes_downloaded = bytes_downloaded + ?",
|
|
77
|
+
(self._bucket_id, today, size, size),
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
def get_rate(self, name: str) -> float:
|
|
81
|
+
"""Get a cost rate by name."""
|
|
82
|
+
row = self._db.fetchone("SELECT rate FROM cost_rates WHERE name = ?", (name,))
|
|
83
|
+
return row["rate"] if row else 0.0
|
|
84
|
+
|
|
85
|
+
def set_rate(self, name: str, rate: float) -> None:
|
|
86
|
+
"""Update a cost rate."""
|
|
87
|
+
self._db.execute(
|
|
88
|
+
"UPDATE cost_rates SET rate = ?, updated_at = datetime('now') WHERE name = ?",
|
|
89
|
+
(rate, name),
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
def get_daily_cost(self, target_date: str) -> DailyCost:
|
|
93
|
+
"""Calculate estimated cost for a single day."""
|
|
94
|
+
storage_cost = self._calc_storage_cost(target_date)
|
|
95
|
+
request_cost = self._calc_request_cost(target_date)
|
|
96
|
+
transfer_cost = self._calc_transfer_cost(target_date)
|
|
97
|
+
return DailyCost(
|
|
98
|
+
date=target_date,
|
|
99
|
+
storage=storage_cost,
|
|
100
|
+
requests=request_cost,
|
|
101
|
+
transfer=transfer_cost,
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
def get_daily_costs(self, start_date: str, end_date: str) -> list[DailyCost]:
|
|
105
|
+
"""Get daily costs for a date range (for charting)."""
|
|
106
|
+
costs = []
|
|
107
|
+
current = date.fromisoformat(start_date)
|
|
108
|
+
end = date.fromisoformat(end_date)
|
|
109
|
+
while current <= end:
|
|
110
|
+
costs.append(self.get_daily_cost(current.isoformat()))
|
|
111
|
+
current += timedelta(days=1)
|
|
112
|
+
return costs
|
|
113
|
+
|
|
114
|
+
def get_monthly_estimate(self) -> float:
|
|
115
|
+
"""Estimate the current month's total cost."""
|
|
116
|
+
today = date.today()
|
|
117
|
+
first_of_month = today.replace(day=1)
|
|
118
|
+
|
|
119
|
+
# Storage: use the most recent snapshot, prorate for the full month
|
|
120
|
+
snapshot = self._db.fetchone(
|
|
121
|
+
"SELECT total_bytes, standard_bytes, ia_bytes, glacier_bytes, "
|
|
122
|
+
"deep_archive_bytes, intelligent_tiering_bytes "
|
|
123
|
+
"FROM bucket_snapshots WHERE bucket_id = ? "
|
|
124
|
+
"ORDER BY snapshot_date DESC LIMIT 1",
|
|
125
|
+
(self._bucket_id,),
|
|
126
|
+
)
|
|
127
|
+
if snapshot:
|
|
128
|
+
import calendar
|
|
129
|
+
|
|
130
|
+
days_in_month = calendar.monthrange(today.year, today.month)[1]
|
|
131
|
+
storage_cost = self._storage_cost_from_snapshot(snapshot) * days_in_month
|
|
132
|
+
else:
|
|
133
|
+
storage_cost = 0.0
|
|
134
|
+
|
|
135
|
+
# Requests and transfer: sum month-to-date
|
|
136
|
+
request_cost = 0.0
|
|
137
|
+
transfer_cost = 0.0
|
|
138
|
+
rows = self._db.fetchall(
|
|
139
|
+
"SELECT * FROM daily_usage WHERE bucket_id = ? AND usage_date >= ?",
|
|
140
|
+
(self._bucket_id, first_of_month.isoformat()),
|
|
141
|
+
)
|
|
142
|
+
for row in rows:
|
|
143
|
+
request_cost += self._request_cost_from_row(row)
|
|
144
|
+
transfer_cost += self._transfer_cost_from_row(row)
|
|
145
|
+
|
|
146
|
+
return storage_cost + request_cost + transfer_cost
|
|
147
|
+
|
|
148
|
+
# --- Internal calculation methods ---
|
|
149
|
+
|
|
150
|
+
def _calc_storage_cost(self, target_date: str) -> float:
|
|
151
|
+
"""Storage cost for one day based on the most recent snapshot."""
|
|
152
|
+
snapshot = self._db.fetchone(
|
|
153
|
+
"SELECT * FROM bucket_snapshots WHERE bucket_id = ? AND snapshot_date <= ? "
|
|
154
|
+
"ORDER BY snapshot_date DESC LIMIT 1",
|
|
155
|
+
(self._bucket_id, target_date),
|
|
156
|
+
)
|
|
157
|
+
if not snapshot:
|
|
158
|
+
return 0.0
|
|
159
|
+
return self._storage_cost_from_snapshot(snapshot)
|
|
160
|
+
|
|
161
|
+
def _storage_cost_from_snapshot(self, snapshot) -> float:
|
|
162
|
+
"""Calculate daily storage cost from a snapshot row."""
|
|
163
|
+
gb = 1024**3
|
|
164
|
+
tiers = [
|
|
165
|
+
("standard_bytes", "storage_standard_gb_month"),
|
|
166
|
+
("ia_bytes", "storage_ia_gb_month"),
|
|
167
|
+
("glacier_bytes", "storage_glacier_gb_month"),
|
|
168
|
+
("deep_archive_bytes", "storage_deep_archive_gb_month"),
|
|
169
|
+
("intelligent_tiering_bytes", "storage_intelligent_tiering_gb_month"),
|
|
170
|
+
]
|
|
171
|
+
cost = 0.0
|
|
172
|
+
for col, rate_name in tiers:
|
|
173
|
+
cost += (snapshot[col] or 0) / gb * self.get_rate(rate_name) / 30
|
|
174
|
+
return cost
|
|
175
|
+
|
|
176
|
+
def _calc_request_cost(self, target_date: str) -> float:
|
|
177
|
+
row = self._db.fetchone(
|
|
178
|
+
"SELECT * FROM daily_usage WHERE bucket_id = ? AND usage_date = ?",
|
|
179
|
+
(self._bucket_id, target_date),
|
|
180
|
+
)
|
|
181
|
+
if not row:
|
|
182
|
+
return 0.0
|
|
183
|
+
return self._request_cost_from_row(row)
|
|
184
|
+
|
|
185
|
+
def _request_cost_from_row(self, row) -> float:
|
|
186
|
+
cost = 0.0
|
|
187
|
+
cost += (row["put_requests"] or 0) * self.get_rate("put_request")
|
|
188
|
+
cost += (row["get_requests"] or 0) * self.get_rate("get_request")
|
|
189
|
+
cost += (row["list_requests"] or 0) * self.get_rate("list_request")
|
|
190
|
+
cost += (row["delete_requests"] or 0) * self.get_rate("delete_request")
|
|
191
|
+
cost += (row["copy_requests"] or 0) * self.get_rate("copy_request")
|
|
192
|
+
cost += (row["head_requests"] or 0) * self.get_rate("head_request")
|
|
193
|
+
return cost
|
|
194
|
+
|
|
195
|
+
def _calc_transfer_cost(self, target_date: str) -> float:
|
|
196
|
+
row = self._db.fetchone(
|
|
197
|
+
"SELECT * FROM daily_usage WHERE bucket_id = ? AND usage_date = ?",
|
|
198
|
+
(self._bucket_id, target_date),
|
|
199
|
+
)
|
|
200
|
+
if not row:
|
|
201
|
+
return 0.0
|
|
202
|
+
return self._transfer_cost_from_row(row)
|
|
203
|
+
|
|
204
|
+
def _transfer_cost_from_row(self, row) -> float:
|
|
205
|
+
# Transfer IN is free
|
|
206
|
+
# Transfer OUT is tiered
|
|
207
|
+
gb = 1024**3
|
|
208
|
+
bytes_out = row["bytes_downloaded"] or 0
|
|
209
|
+
gb_out = bytes_out / gb
|
|
210
|
+
|
|
211
|
+
cost = 0.0
|
|
212
|
+
if gb_out <= 100:
|
|
213
|
+
cost = gb_out * self.get_rate("transfer_out_gb_first_100")
|
|
214
|
+
else:
|
|
215
|
+
cost = 100 * self.get_rate("transfer_out_gb_first_100")
|
|
216
|
+
cost += (gb_out - 100) * self.get_rate("transfer_out_gb_next_10k")
|
|
217
|
+
|
|
218
|
+
return cost
|
s3ui/core/credentials.py
ADDED
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
"""Credential storage via OS keyring, AWS config discovery, and profile management."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
from dataclasses import dataclass, field
|
|
6
|
+
|
|
7
|
+
import keyring
|
|
8
|
+
|
|
9
|
+
from s3ui.constants import KEYRING_SERVICE
|
|
10
|
+
from s3ui.core.errors import translate_error
|
|
11
|
+
|
|
12
|
+
logger = logging.getLogger("s3ui.credentials")
|
|
13
|
+
|
|
14
|
+
PROFILES_INDEX_KEY = "profiles"
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@dataclass
|
|
18
|
+
class Profile:
|
|
19
|
+
name: str
|
|
20
|
+
access_key_id: str = ""
|
|
21
|
+
secret_access_key: str = ""
|
|
22
|
+
region: str = ""
|
|
23
|
+
endpoint_url: str = ""
|
|
24
|
+
is_aws_profile: bool = False # True = use boto3 Session(profile_name=name)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@dataclass
|
|
28
|
+
class TestResult:
|
|
29
|
+
success: bool
|
|
30
|
+
buckets: list[str] = field(default_factory=list)
|
|
31
|
+
error_message: str = ""
|
|
32
|
+
error_detail: str = ""
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def discover_aws_profiles() -> list[str]:
|
|
36
|
+
"""Discover profile names from ~/.aws/config and ~/.aws/credentials.
|
|
37
|
+
|
|
38
|
+
Returns a list of available AWS CLI profile names (e.g., ["default", "work"]).
|
|
39
|
+
"""
|
|
40
|
+
try:
|
|
41
|
+
import botocore.session
|
|
42
|
+
|
|
43
|
+
session = botocore.session.Session()
|
|
44
|
+
profiles = list(session.available_profiles)
|
|
45
|
+
logger.debug("Discovered %d AWS profiles: %s", len(profiles), profiles)
|
|
46
|
+
return sorted(profiles)
|
|
47
|
+
except Exception:
|
|
48
|
+
logger.debug("Could not discover AWS profiles", exc_info=True)
|
|
49
|
+
return []
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def get_aws_profile_region(profile_name: str) -> str:
|
|
53
|
+
"""Read the region configured for an AWS CLI profile, or empty string."""
|
|
54
|
+
try:
|
|
55
|
+
import botocore.session
|
|
56
|
+
|
|
57
|
+
session = botocore.session.Session(profile=profile_name)
|
|
58
|
+
return session.get_config_variable("region") or ""
|
|
59
|
+
except Exception:
|
|
60
|
+
return ""
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
class CredentialStore:
|
|
64
|
+
"""Manages AWS credential profiles — both AWS CLI profiles and custom keyring profiles."""
|
|
65
|
+
|
|
66
|
+
def list_profiles(self) -> list[str]:
|
|
67
|
+
"""Return names of all saved custom profiles (from keyring)."""
|
|
68
|
+
raw = keyring.get_password(KEYRING_SERVICE, PROFILES_INDEX_KEY)
|
|
69
|
+
if not raw:
|
|
70
|
+
return []
|
|
71
|
+
try:
|
|
72
|
+
return json.loads(raw)
|
|
73
|
+
except (json.JSONDecodeError, TypeError):
|
|
74
|
+
return []
|
|
75
|
+
|
|
76
|
+
def get_profile(self, name: str) -> Profile | None:
|
|
77
|
+
"""Load a custom profile by name from the keyring."""
|
|
78
|
+
raw = keyring.get_password(KEYRING_SERVICE, f"profile:{name}")
|
|
79
|
+
if not raw:
|
|
80
|
+
return None
|
|
81
|
+
try:
|
|
82
|
+
data = json.loads(raw)
|
|
83
|
+
return Profile(
|
|
84
|
+
name=name,
|
|
85
|
+
access_key_id=data.get("access_key_id", ""),
|
|
86
|
+
secret_access_key=data.get("secret_access_key", ""),
|
|
87
|
+
region=data.get("region", ""),
|
|
88
|
+
endpoint_url=data.get("endpoint_url", ""),
|
|
89
|
+
is_aws_profile=data.get("is_aws_profile", False),
|
|
90
|
+
)
|
|
91
|
+
except (json.JSONDecodeError, KeyError, TypeError):
|
|
92
|
+
logger.error("Corrupt profile data for '%s'", name)
|
|
93
|
+
return None
|
|
94
|
+
|
|
95
|
+
def save_profile(self, profile: Profile) -> None:
|
|
96
|
+
"""Save a profile to the keyring and update the index."""
|
|
97
|
+
data = json.dumps(
|
|
98
|
+
{
|
|
99
|
+
"access_key_id": profile.access_key_id,
|
|
100
|
+
"secret_access_key": profile.secret_access_key,
|
|
101
|
+
"region": profile.region,
|
|
102
|
+
"endpoint_url": profile.endpoint_url,
|
|
103
|
+
"is_aws_profile": profile.is_aws_profile,
|
|
104
|
+
}
|
|
105
|
+
)
|
|
106
|
+
keyring.set_password(KEYRING_SERVICE, f"profile:{profile.name}", data)
|
|
107
|
+
|
|
108
|
+
# Update profile index
|
|
109
|
+
profiles = self.list_profiles()
|
|
110
|
+
if profile.name not in profiles:
|
|
111
|
+
profiles.append(profile.name)
|
|
112
|
+
keyring.set_password(KEYRING_SERVICE, PROFILES_INDEX_KEY, json.dumps(profiles))
|
|
113
|
+
logger.info("Saved profile '%s' (aws_profile=%s)", profile.name, profile.is_aws_profile)
|
|
114
|
+
|
|
115
|
+
def delete_profile(self, name: str) -> None:
|
|
116
|
+
"""Remove a profile from the keyring and index."""
|
|
117
|
+
keyring.delete_password(KEYRING_SERVICE, f"profile:{name}")
|
|
118
|
+
|
|
119
|
+
profiles = self.list_profiles()
|
|
120
|
+
if name in profiles:
|
|
121
|
+
profiles.remove(name)
|
|
122
|
+
keyring.set_password(KEYRING_SERVICE, PROFILES_INDEX_KEY, json.dumps(profiles))
|
|
123
|
+
logger.info("Deleted profile '%s'", name)
|
|
124
|
+
|
|
125
|
+
def test_connection(self, profile: Profile) -> TestResult:
|
|
126
|
+
"""Test AWS credentials by calling list_buckets.
|
|
127
|
+
|
|
128
|
+
Supports both AWS CLI profiles (is_aws_profile=True) and explicit keys.
|
|
129
|
+
"""
|
|
130
|
+
try:
|
|
131
|
+
import boto3
|
|
132
|
+
|
|
133
|
+
endpoint = profile.endpoint_url or None
|
|
134
|
+
if profile.is_aws_profile:
|
|
135
|
+
session = boto3.Session(profile_name=profile.name)
|
|
136
|
+
client = session.client(
|
|
137
|
+
"s3",
|
|
138
|
+
region_name=profile.region or None,
|
|
139
|
+
endpoint_url=endpoint,
|
|
140
|
+
)
|
|
141
|
+
else:
|
|
142
|
+
client = boto3.client(
|
|
143
|
+
"s3",
|
|
144
|
+
aws_access_key_id=profile.access_key_id,
|
|
145
|
+
aws_secret_access_key=profile.secret_access_key,
|
|
146
|
+
region_name=profile.region,
|
|
147
|
+
endpoint_url=endpoint,
|
|
148
|
+
)
|
|
149
|
+
response = client.list_buckets()
|
|
150
|
+
bucket_names = [b["Name"] for b in response.get("Buckets", [])]
|
|
151
|
+
logger.info(
|
|
152
|
+
"Connection test succeeded for profile '%s': %d buckets",
|
|
153
|
+
profile.name,
|
|
154
|
+
len(bucket_names),
|
|
155
|
+
)
|
|
156
|
+
return TestResult(success=True, buckets=bucket_names)
|
|
157
|
+
except Exception as e:
|
|
158
|
+
user_msg, detail = translate_error(e)
|
|
159
|
+
logger.warning("Connection test failed for profile '%s': %s", profile.name, detail)
|
|
160
|
+
return TestResult(
|
|
161
|
+
success=False,
|
|
162
|
+
buckets=[],
|
|
163
|
+
error_message=user_msg,
|
|
164
|
+
error_detail=detail,
|
|
165
|
+
)
|