mcp-souschef 3.5.3__py3-none-any.whl → 4.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {mcp_souschef-3.5.3.dist-info → mcp_souschef-4.0.0.dist-info}/METADATA +136 -8
- {mcp_souschef-3.5.3.dist-info → mcp_souschef-4.0.0.dist-info}/RECORD +17 -10
- souschef/core/ai_schemas.py +6 -1
- souschef/github/__init__.py +17 -0
- souschef/github/agent_control.py +459 -0
- souschef/server.py +193 -0
- souschef/storage/__init__.py +39 -0
- souschef/storage/blob.py +331 -0
- souschef/storage/config.py +163 -0
- souschef/storage/database.py +1182 -0
- souschef/ui/app.py +17 -4
- souschef/ui/pages/chef_server_settings.py +411 -2
- souschef/ui/pages/cookbook_analysis.py +352 -6
- souschef/ui/pages/history.py +964 -0
- {mcp_souschef-3.5.3.dist-info → mcp_souschef-4.0.0.dist-info}/WHEEL +0 -0
- {mcp_souschef-3.5.3.dist-info → mcp_souschef-4.0.0.dist-info}/entry_points.txt +0 -0
- {mcp_souschef-3.5.3.dist-info → mcp_souschef-4.0.0.dist-info}/licenses/LICENSE +0 -0
souschef/storage/blob.py
ADDED
|
@@ -0,0 +1,331 @@
|
|
|
1
|
+
"""Blob storage integration for SousChef generated assets."""
|
|
2
|
+
|
|
3
|
+
import importlib
|
|
4
|
+
import io
|
|
5
|
+
import os
|
|
6
|
+
import shutil
|
|
7
|
+
import tempfile
|
|
8
|
+
import zipfile
|
|
9
|
+
from abc import ABC, abstractmethod
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Any
|
|
12
|
+
|
|
13
|
+
from souschef.core.path_utils import _ensure_within_base_path, _normalize_path
|
|
14
|
+
from souschef.storage.config import load_blob_settings
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class BlobStorage(ABC):
|
|
18
|
+
"""Abstract base class for blob storage backends."""
|
|
19
|
+
|
|
20
|
+
@abstractmethod
|
|
21
|
+
def upload(self, local_path: Path, storage_key: str) -> str:
|
|
22
|
+
"""
|
|
23
|
+
Upload a file or directory to blob storage.
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
local_path: Path to local file or directory.
|
|
27
|
+
storage_key: Key/path in blob storage.
|
|
28
|
+
|
|
29
|
+
Returns:
|
|
30
|
+
Storage key for the uploaded content.
|
|
31
|
+
|
|
32
|
+
"""
|
|
33
|
+
pass
|
|
34
|
+
|
|
35
|
+
@abstractmethod
|
|
36
|
+
def download(self, storage_key: str, local_path: Path) -> Path:
|
|
37
|
+
"""
|
|
38
|
+
Download a file from blob storage.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
storage_key: Key/path in blob storage.
|
|
42
|
+
local_path: Path to save downloaded content.
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
Path to downloaded file.
|
|
46
|
+
|
|
47
|
+
"""
|
|
48
|
+
pass
|
|
49
|
+
|
|
50
|
+
@abstractmethod
|
|
51
|
+
def delete(self, storage_key: str) -> bool:
|
|
52
|
+
"""
|
|
53
|
+
Delete a file from blob storage.
|
|
54
|
+
|
|
55
|
+
Args:
|
|
56
|
+
storage_key: Key/path in blob storage.
|
|
57
|
+
|
|
58
|
+
Returns:
|
|
59
|
+
True if deleted successfully.
|
|
60
|
+
|
|
61
|
+
"""
|
|
62
|
+
pass
|
|
63
|
+
|
|
64
|
+
@abstractmethod
|
|
65
|
+
def list_keys(self, prefix: str = "") -> list[str]:
|
|
66
|
+
"""
|
|
67
|
+
List storage keys with optional prefix.
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
prefix: Optional prefix to filter keys.
|
|
71
|
+
|
|
72
|
+
Returns:
|
|
73
|
+
List of storage keys.
|
|
74
|
+
|
|
75
|
+
"""
|
|
76
|
+
pass
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
class LocalBlobStorage(BlobStorage):
|
|
80
|
+
"""Local filesystem implementation of blob storage."""
|
|
81
|
+
|
|
82
|
+
def __init__(self, base_path: str | Path | None = None):
|
|
83
|
+
"""
|
|
84
|
+
Initialise local blob storage.
|
|
85
|
+
|
|
86
|
+
Args:
|
|
87
|
+
base_path: Base directory for storage. If None, uses default location.
|
|
88
|
+
|
|
89
|
+
"""
|
|
90
|
+
if base_path is None:
|
|
91
|
+
base_path = self._get_default_storage_path()
|
|
92
|
+
else:
|
|
93
|
+
base_path = _normalize_path(str(base_path))
|
|
94
|
+
|
|
95
|
+
self.base_path = base_path
|
|
96
|
+
self.base_path.mkdir(parents=True, exist_ok=True, mode=0o700)
|
|
97
|
+
|
|
98
|
+
def _get_default_storage_path(self) -> Path:
|
|
99
|
+
"""Get the default storage path."""
|
|
100
|
+
data_dir = Path(tempfile.gettempdir()) / ".souschef" / "storage"
|
|
101
|
+
data_dir.mkdir(parents=True, exist_ok=True, mode=0o700)
|
|
102
|
+
return _ensure_within_base_path(data_dir, Path(tempfile.gettempdir()))
|
|
103
|
+
|
|
104
|
+
def _get_full_path(self, storage_key: str) -> Path:
|
|
105
|
+
"""Get full filesystem path for a storage key."""
|
|
106
|
+
# Sanitise the storage key to prevent path traversal
|
|
107
|
+
safe_key = storage_key.replace("..", "_").replace(os.sep, "_")
|
|
108
|
+
full_path = self.base_path / safe_key
|
|
109
|
+
return _ensure_within_base_path(full_path, self.base_path)
|
|
110
|
+
|
|
111
|
+
def upload(self, local_path: Path, storage_key: str) -> str:
|
|
112
|
+
"""Upload a file or directory to local storage."""
|
|
113
|
+
# Validate local_path is safe
|
|
114
|
+
local_path = _normalize_path(str(local_path))
|
|
115
|
+
|
|
116
|
+
dest_path = self._get_full_path(storage_key)
|
|
117
|
+
|
|
118
|
+
if local_path.is_dir():
|
|
119
|
+
# Create a ZIP archive of the directory
|
|
120
|
+
zip_path = dest_path.with_suffix(".zip")
|
|
121
|
+
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
|
|
122
|
+
for file_path in local_path.rglob("*"):
|
|
123
|
+
if file_path.is_file():
|
|
124
|
+
arcname = file_path.relative_to(local_path)
|
|
125
|
+
zipf.write(file_path, arcname)
|
|
126
|
+
return storage_key + ".zip"
|
|
127
|
+
else:
|
|
128
|
+
# Copy single file
|
|
129
|
+
dest_path.parent.mkdir(parents=True, exist_ok=True)
|
|
130
|
+
shutil.copy2(local_path, dest_path)
|
|
131
|
+
return storage_key
|
|
132
|
+
|
|
133
|
+
def download(self, storage_key: str, local_path: Path) -> Path:
|
|
134
|
+
"""Download a file from local storage."""
|
|
135
|
+
source_path = self._get_full_path(storage_key)
|
|
136
|
+
|
|
137
|
+
if not source_path.exists():
|
|
138
|
+
raise FileNotFoundError(f"Storage key not found: {storage_key}")
|
|
139
|
+
|
|
140
|
+
# Validate local_path is safe
|
|
141
|
+
local_path = _normalize_path(str(local_path))
|
|
142
|
+
|
|
143
|
+
if source_path.suffix == ".zip":
|
|
144
|
+
# Extract ZIP archive
|
|
145
|
+
local_path.mkdir(parents=True, exist_ok=True)
|
|
146
|
+
with zipfile.ZipFile(source_path, "r") as zipf:
|
|
147
|
+
zipf.extractall(local_path)
|
|
148
|
+
else:
|
|
149
|
+
# Copy single file
|
|
150
|
+
local_path.parent.mkdir(parents=True, exist_ok=True)
|
|
151
|
+
shutil.copy2(source_path, local_path)
|
|
152
|
+
|
|
153
|
+
return local_path
|
|
154
|
+
|
|
155
|
+
def delete(self, storage_key: str) -> bool:
|
|
156
|
+
"""Delete a file from local storage."""
|
|
157
|
+
try:
|
|
158
|
+
path = self._get_full_path(storage_key)
|
|
159
|
+
if path.exists():
|
|
160
|
+
if path.is_dir():
|
|
161
|
+
shutil.rmtree(path)
|
|
162
|
+
else:
|
|
163
|
+
path.unlink()
|
|
164
|
+
return True
|
|
165
|
+
except OSError:
|
|
166
|
+
# If deletion fails (permissions, file in use, etc.),
|
|
167
|
+
# silently fail and return False below.
|
|
168
|
+
pass
|
|
169
|
+
return False
|
|
170
|
+
|
|
171
|
+
def list_keys(self, prefix: str = "") -> list[str]:
|
|
172
|
+
"""List storage keys with optional prefix."""
|
|
173
|
+
keys = []
|
|
174
|
+
for path in self.base_path.rglob("*"):
|
|
175
|
+
if path.is_file():
|
|
176
|
+
relative = path.relative_to(self.base_path)
|
|
177
|
+
key = str(relative)
|
|
178
|
+
if not prefix or key.startswith(prefix):
|
|
179
|
+
keys.append(key)
|
|
180
|
+
return sorted(keys)
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
class S3BlobStorage(BlobStorage):
|
|
184
|
+
"""S3-compatible blob storage implementation."""
|
|
185
|
+
|
|
186
|
+
def __init__(
|
|
187
|
+
self,
|
|
188
|
+
bucket_name: str,
|
|
189
|
+
access_key: str | None = None,
|
|
190
|
+
secret_key: str | None = None,
|
|
191
|
+
endpoint_url: str | None = None,
|
|
192
|
+
region: str = "us-east-1",
|
|
193
|
+
):
|
|
194
|
+
"""
|
|
195
|
+
Initialise S3 blob storage.
|
|
196
|
+
|
|
197
|
+
Args:
|
|
198
|
+
bucket_name: S3 bucket name.
|
|
199
|
+
access_key: AWS access key (or from environment).
|
|
200
|
+
secret_key: AWS secret key (or from environment).
|
|
201
|
+
endpoint_url: Custom endpoint URL (for MinIO/LocalStack).
|
|
202
|
+
region: AWS region.
|
|
203
|
+
|
|
204
|
+
"""
|
|
205
|
+
self.bucket_name = bucket_name
|
|
206
|
+
self.endpoint_url = endpoint_url
|
|
207
|
+
self.region = region
|
|
208
|
+
|
|
209
|
+
# Try to import boto3
|
|
210
|
+
try:
|
|
211
|
+
boto3 = importlib.import_module("boto3")
|
|
212
|
+
except ImportError as exc:
|
|
213
|
+
raise ImportError(
|
|
214
|
+
"boto3 is required for S3 storage. Install with: pip install boto3"
|
|
215
|
+
) from exc
|
|
216
|
+
|
|
217
|
+
# Configure client
|
|
218
|
+
config_kwargs: dict[str, Any] = {}
|
|
219
|
+
if access_key and secret_key:
|
|
220
|
+
config_kwargs["aws_access_key_id"] = access_key
|
|
221
|
+
config_kwargs["aws_secret_access_key"] = secret_key
|
|
222
|
+
if endpoint_url:
|
|
223
|
+
config_kwargs["endpoint_url"] = endpoint_url
|
|
224
|
+
|
|
225
|
+
self.s3 = boto3.client("s3", region_name=region, **config_kwargs)
|
|
226
|
+
|
|
227
|
+
# Ensure bucket exists
|
|
228
|
+
try:
|
|
229
|
+
self.s3.head_bucket(Bucket=bucket_name)
|
|
230
|
+
except Exception:
|
|
231
|
+
self.s3.create_bucket(Bucket=bucket_name)
|
|
232
|
+
|
|
233
|
+
def upload(self, local_path: Path, storage_key: str) -> str:
|
|
234
|
+
"""Upload a file or directory to S3."""
|
|
235
|
+
# Validate local_path is safe
|
|
236
|
+
local_path = _normalize_path(str(local_path))
|
|
237
|
+
|
|
238
|
+
if local_path.is_dir():
|
|
239
|
+
# Create a ZIP archive
|
|
240
|
+
zip_buffer = io.BytesIO()
|
|
241
|
+
with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zipf:
|
|
242
|
+
for file_path in local_path.rglob("*"):
|
|
243
|
+
if file_path.is_file():
|
|
244
|
+
arcname = file_path.relative_to(local_path)
|
|
245
|
+
zipf.write(file_path, arcname)
|
|
246
|
+
|
|
247
|
+
zip_buffer.seek(0)
|
|
248
|
+
storage_key = storage_key + ".zip"
|
|
249
|
+
self.s3.upload_fileobj(zip_buffer, self.bucket_name, storage_key)
|
|
250
|
+
else:
|
|
251
|
+
# Upload single file
|
|
252
|
+
with local_path.open("rb") as f:
|
|
253
|
+
self.s3.upload_fileobj(f, self.bucket_name, storage_key)
|
|
254
|
+
|
|
255
|
+
return storage_key
|
|
256
|
+
|
|
257
|
+
def download(self, storage_key: str, local_path: Path) -> Path:
|
|
258
|
+
"""Download a file from S3."""
|
|
259
|
+
# Validate local_path is safe
|
|
260
|
+
local_path = _normalize_path(str(local_path))
|
|
261
|
+
|
|
262
|
+
if storage_key.endswith(".zip"):
|
|
263
|
+
# Download and extract ZIP
|
|
264
|
+
zip_buffer = io.BytesIO()
|
|
265
|
+
self.s3.download_fileobj(self.bucket_name, storage_key, zip_buffer)
|
|
266
|
+
zip_buffer.seek(0)
|
|
267
|
+
|
|
268
|
+
local_path.mkdir(parents=True, exist_ok=True)
|
|
269
|
+
with zipfile.ZipFile(zip_buffer, "r") as zipf:
|
|
270
|
+
zipf.extractall(local_path)
|
|
271
|
+
else:
|
|
272
|
+
# Download single file
|
|
273
|
+
local_path.parent.mkdir(parents=True, exist_ok=True)
|
|
274
|
+
with local_path.open("wb") as f:
|
|
275
|
+
self.s3.download_fileobj(self.bucket_name, storage_key, f)
|
|
276
|
+
|
|
277
|
+
return local_path
|
|
278
|
+
|
|
279
|
+
def delete(self, storage_key: str) -> bool:
|
|
280
|
+
"""Delete a file from S3."""
|
|
281
|
+
try:
|
|
282
|
+
self.s3.delete_object(Bucket=self.bucket_name, Key=storage_key)
|
|
283
|
+
return True
|
|
284
|
+
except Exception:
|
|
285
|
+
return False
|
|
286
|
+
|
|
287
|
+
def list_keys(self, prefix: str = "") -> list[str]:
|
|
288
|
+
"""List storage keys with optional prefix."""
|
|
289
|
+
try:
|
|
290
|
+
response = self.s3.list_objects_v2(Bucket=self.bucket_name, Prefix=prefix)
|
|
291
|
+
return [obj["Key"] for obj in response.get("Contents", [])]
|
|
292
|
+
except Exception:
|
|
293
|
+
return []
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
# Singleton instance
|
|
297
|
+
_blob_storage: BlobStorage | None = None
|
|
298
|
+
|
|
299
|
+
|
|
300
|
+
def get_blob_storage(backend: str | None = None, **config: Any) -> BlobStorage:
|
|
301
|
+
"""
|
|
302
|
+
Get or create the blob storage instance.
|
|
303
|
+
|
|
304
|
+
Args:
|
|
305
|
+
backend: Storage backend ('local', 's3', 'minio').
|
|
306
|
+
**config: Configuration options for the backend.
|
|
307
|
+
|
|
308
|
+
Returns:
|
|
309
|
+
BlobStorage instance.
|
|
310
|
+
|
|
311
|
+
"""
|
|
312
|
+
global _blob_storage
|
|
313
|
+
|
|
314
|
+
settings = load_blob_settings()
|
|
315
|
+
resolved_backend = backend or settings.backend
|
|
316
|
+
|
|
317
|
+
if _blob_storage is None:
|
|
318
|
+
if resolved_backend == "local":
|
|
319
|
+
_blob_storage = LocalBlobStorage(config.get("base_path"))
|
|
320
|
+
elif resolved_backend in ["s3", "minio"]:
|
|
321
|
+
_blob_storage = S3BlobStorage(
|
|
322
|
+
bucket_name=config.get("bucket_name", settings.s3_bucket),
|
|
323
|
+
access_key=config.get("access_key", settings.s3_access_key),
|
|
324
|
+
secret_key=config.get("secret_key", settings.s3_secret_key),
|
|
325
|
+
endpoint_url=config.get("endpoint_url", settings.s3_endpoint),
|
|
326
|
+
region=config.get("region", settings.s3_region),
|
|
327
|
+
)
|
|
328
|
+
else:
|
|
329
|
+
raise ValueError(f"Unknown blob storage backend: {resolved_backend}")
|
|
330
|
+
|
|
331
|
+
return _blob_storage
|
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
"""Storage configuration helpers for SousChef."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import os
|
|
6
|
+
from collections.abc import Mapping
|
|
7
|
+
from dataclasses import dataclass
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from urllib.parse import quote_plus
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dataclass(frozen=True)
|
|
13
|
+
class DatabaseSettings:
|
|
14
|
+
"""Database configuration settings."""
|
|
15
|
+
|
|
16
|
+
backend: str
|
|
17
|
+
sqlite_path: Path | None
|
|
18
|
+
postgres_dsn: str | None
|
|
19
|
+
postgres_host: str
|
|
20
|
+
postgres_port: int
|
|
21
|
+
postgres_name: str
|
|
22
|
+
postgres_user: str
|
|
23
|
+
postgres_password: str
|
|
24
|
+
postgres_sslmode: str
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@dataclass(frozen=True)
|
|
28
|
+
class BlobSettings:
|
|
29
|
+
"""Blob storage configuration settings."""
|
|
30
|
+
|
|
31
|
+
backend: str
|
|
32
|
+
s3_bucket: str
|
|
33
|
+
s3_region: str
|
|
34
|
+
s3_endpoint: str | None
|
|
35
|
+
s3_access_key: str | None
|
|
36
|
+
s3_secret_key: str | None
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def _normalise_backend(value: str, aliases: Mapping[str, str], default: str) -> str:
|
|
40
|
+
"""Normalise backend names with aliases and defaults."""
|
|
41
|
+
candidate = value.strip().lower()
|
|
42
|
+
if not candidate:
|
|
43
|
+
return default
|
|
44
|
+
if candidate in aliases:
|
|
45
|
+
return aliases[candidate]
|
|
46
|
+
return candidate
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def _empty_to_none(value: str | None) -> str | None:
|
|
50
|
+
"""Convert empty strings to None."""
|
|
51
|
+
if value is None:
|
|
52
|
+
return None
|
|
53
|
+
stripped = value.strip()
|
|
54
|
+
return stripped if stripped else None
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def load_database_settings(
|
|
58
|
+
env: Mapping[str, str] | None = None,
|
|
59
|
+
) -> DatabaseSettings:
|
|
60
|
+
"""
|
|
61
|
+
Load database settings from environment variables.
|
|
62
|
+
|
|
63
|
+
Args:
|
|
64
|
+
env: Optional environment mapping for testing.
|
|
65
|
+
|
|
66
|
+
Returns:
|
|
67
|
+
DatabaseSettings instance.
|
|
68
|
+
|
|
69
|
+
"""
|
|
70
|
+
source = env if env is not None else os.environ
|
|
71
|
+
|
|
72
|
+
backend = _normalise_backend(
|
|
73
|
+
source.get("SOUSCHEF_DB_BACKEND", "sqlite"),
|
|
74
|
+
{"postgresql": "postgres", "postgre": "postgres"},
|
|
75
|
+
"sqlite",
|
|
76
|
+
)
|
|
77
|
+
if backend not in {"sqlite", "postgres"}:
|
|
78
|
+
backend = "sqlite"
|
|
79
|
+
|
|
80
|
+
sqlite_path_raw = _empty_to_none(source.get("SOUSCHEF_DB_PATH"))
|
|
81
|
+
sqlite_path = Path(sqlite_path_raw) if sqlite_path_raw else None
|
|
82
|
+
|
|
83
|
+
postgres_dsn = _empty_to_none(source.get("SOUSCHEF_DB_DSN"))
|
|
84
|
+
|
|
85
|
+
postgres_host = source.get("SOUSCHEF_DB_HOST", "postgres")
|
|
86
|
+
postgres_port = int(source.get("SOUSCHEF_DB_PORT", "5432"))
|
|
87
|
+
postgres_name = source.get("SOUSCHEF_DB_NAME", "souschef")
|
|
88
|
+
postgres_user = source.get("SOUSCHEF_DB_USER", "souschef")
|
|
89
|
+
postgres_password = source.get("SOUSCHEF_DB_PASSWORD", "souschef")
|
|
90
|
+
postgres_sslmode = source.get("SOUSCHEF_DB_SSLMODE", "disable")
|
|
91
|
+
|
|
92
|
+
return DatabaseSettings(
|
|
93
|
+
backend=backend,
|
|
94
|
+
sqlite_path=sqlite_path,
|
|
95
|
+
postgres_dsn=postgres_dsn,
|
|
96
|
+
postgres_host=postgres_host,
|
|
97
|
+
postgres_port=postgres_port,
|
|
98
|
+
postgres_name=postgres_name,
|
|
99
|
+
postgres_user=postgres_user,
|
|
100
|
+
postgres_password=postgres_password,
|
|
101
|
+
postgres_sslmode=postgres_sslmode,
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def build_postgres_dsn(settings: DatabaseSettings) -> str:
|
|
106
|
+
"""
|
|
107
|
+
Build a PostgreSQL DSN from settings.
|
|
108
|
+
|
|
109
|
+
Args:
|
|
110
|
+
settings: Database settings to build the DSN from.
|
|
111
|
+
|
|
112
|
+
Returns:
|
|
113
|
+
PostgreSQL DSN string.
|
|
114
|
+
|
|
115
|
+
"""
|
|
116
|
+
if settings.postgres_dsn:
|
|
117
|
+
return settings.postgres_dsn
|
|
118
|
+
|
|
119
|
+
user = quote_plus(settings.postgres_user)
|
|
120
|
+
password = quote_plus(settings.postgres_password)
|
|
121
|
+
host = settings.postgres_host
|
|
122
|
+
port = settings.postgres_port
|
|
123
|
+
name = settings.postgres_name
|
|
124
|
+
sslmode = quote_plus(settings.postgres_sslmode)
|
|
125
|
+
|
|
126
|
+
return f"postgresql://{user}:{password}@{host}:{port}/{name}?sslmode={sslmode}"
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def load_blob_settings(env: Mapping[str, str] | None = None) -> BlobSettings:
|
|
130
|
+
"""
|
|
131
|
+
Load blob storage settings from environment variables.
|
|
132
|
+
|
|
133
|
+
Args:
|
|
134
|
+
env: Optional environment mapping for testing.
|
|
135
|
+
|
|
136
|
+
Returns:
|
|
137
|
+
BlobSettings instance.
|
|
138
|
+
|
|
139
|
+
"""
|
|
140
|
+
source = env if env is not None else os.environ
|
|
141
|
+
|
|
142
|
+
backend = _normalise_backend(
|
|
143
|
+
source.get("SOUSCHEF_STORAGE_BACKEND", "local"),
|
|
144
|
+
{"minio": "s3"},
|
|
145
|
+
"local",
|
|
146
|
+
)
|
|
147
|
+
if backend not in {"local", "s3"}:
|
|
148
|
+
backend = "local"
|
|
149
|
+
|
|
150
|
+
s3_bucket = source.get("SOUSCHEF_S3_BUCKET", "souschef")
|
|
151
|
+
s3_region = source.get("SOUSCHEF_S3_REGION", "us-east-1")
|
|
152
|
+
s3_endpoint = _empty_to_none(source.get("SOUSCHEF_S3_ENDPOINT"))
|
|
153
|
+
s3_access_key = _empty_to_none(source.get("SOUSCHEF_S3_ACCESS_KEY"))
|
|
154
|
+
s3_secret_key = _empty_to_none(source.get("SOUSCHEF_S3_SECRET_KEY"))
|
|
155
|
+
|
|
156
|
+
return BlobSettings(
|
|
157
|
+
backend=backend,
|
|
158
|
+
s3_bucket=s3_bucket,
|
|
159
|
+
s3_region=s3_region,
|
|
160
|
+
s3_endpoint=s3_endpoint,
|
|
161
|
+
s3_access_key=s3_access_key,
|
|
162
|
+
s3_secret_key=s3_secret_key,
|
|
163
|
+
)
|