goliath-utils 0.1.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- goliath_utils-0.1.1/PKG-INFO +35 -0
- goliath_utils-0.1.1/README.md +25 -0
- goliath_utils-0.1.1/pyproject.toml +27 -0
- goliath_utils-0.1.1/setup.cfg +4 -0
- goliath_utils-0.1.1/src/goliath_utils/__init__.py +1 -0
- goliath_utils-0.1.1/src/goliath_utils/s3/__init__.py +4 -0
- goliath_utils-0.1.1/src/goliath_utils/s3/_auth.py +133 -0
- goliath_utils-0.1.1/src/goliath_utils/s3/_client.py +187 -0
- goliath_utils-0.1.1/src/goliath_utils/s3/_errors.py +16 -0
- goliath_utils-0.1.1/src/goliath_utils/secrets/__init__.py +5 -0
- goliath_utils-0.1.1/src/goliath_utils.egg-info/PKG-INFO +35 -0
- goliath_utils-0.1.1/src/goliath_utils.egg-info/SOURCES.txt +13 -0
- goliath_utils-0.1.1/src/goliath_utils.egg-info/dependency_links.txt +1 -0
- goliath_utils-0.1.1/src/goliath_utils.egg-info/requires.txt +3 -0
- goliath_utils-0.1.1/src/goliath_utils.egg-info/top_level.txt +1 -0
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: goliath-utils
|
|
3
|
+
Version: 0.1.1
|
|
4
|
+
Summary: Lightweight internal utilities for the Goliath platform
|
|
5
|
+
Requires-Python: >=3.11
|
|
6
|
+
Description-Content-Type: text/markdown
|
|
7
|
+
Requires-Dist: httpx>=0.27
|
|
8
|
+
Requires-Dist: ipykernel>=7.2.0
|
|
9
|
+
Requires-Dist: python-dotenv>=1.2.2
|
|
10
|
+
|
|
11
|
+
# goliath-utils
|
|
12
|
+
|
|
13
|
+
Lightweight internal utilities for the Goliath platform.
|
|
14
|
+
|
|
15
|
+
## Install
|
|
16
|
+
|
|
17
|
+
```bash
|
|
18
|
+
uv add goliath-utils
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
## S3 Client
|
|
22
|
+
|
|
23
|
+
A minimal S3 client using httpx — no boto3 required.
|
|
24
|
+
|
|
25
|
+
```python
|
|
26
|
+
from goliath_utils.s3 import S3Client
|
|
27
|
+
|
|
28
|
+
# Reads AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_REGION from env
|
|
29
|
+
with S3Client() as s3:
|
|
30
|
+
s3.create_bucket("my-bucket")
|
|
31
|
+
s3.upload_file("my-bucket", "data/file.csv", "./file.csv")
|
|
32
|
+
s3.download_file("my-bucket", "data/file.csv", "./downloaded.csv")
|
|
33
|
+
keys = s3.list_objects("my-bucket", prefix="data/")
|
|
34
|
+
s3.delete_bucket("my-bucket")
|
|
35
|
+
```
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
# goliath-utils
|
|
2
|
+
|
|
3
|
+
Lightweight internal utilities for the Goliath platform.
|
|
4
|
+
|
|
5
|
+
## Install
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
uv add goliath-utils
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## S3 Client
|
|
12
|
+
|
|
13
|
+
A minimal S3 client using httpx — no boto3 required.
|
|
14
|
+
|
|
15
|
+
```python
|
|
16
|
+
from goliath_utils.s3 import S3Client
|
|
17
|
+
|
|
18
|
+
# Reads AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_REGION from env
|
|
19
|
+
with S3Client() as s3:
|
|
20
|
+
s3.create_bucket("my-bucket")
|
|
21
|
+
s3.upload_file("my-bucket", "data/file.csv", "./file.csv")
|
|
22
|
+
s3.download_file("my-bucket", "data/file.csv", "./downloaded.csv")
|
|
23
|
+
keys = s3.list_objects("my-bucket", prefix="data/")
|
|
24
|
+
s3.delete_bucket("my-bucket")
|
|
25
|
+
```
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["setuptools>=69", "wheel"]
|
|
3
|
+
build-backend = "setuptools.build_meta"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "goliath-utils"
|
|
7
|
+
version = "0.1.1"
|
|
8
|
+
description = "Lightweight internal utilities for the Goliath platform"
|
|
9
|
+
readme = "README.md"
|
|
10
|
+
requires-python = ">=3.11"
|
|
11
|
+
dependencies = [
|
|
12
|
+
"httpx>=0.27",
|
|
13
|
+
"ipykernel>=7.2.0",
|
|
14
|
+
"python-dotenv>=1.2.2",
|
|
15
|
+
]
|
|
16
|
+
|
|
17
|
+
[tool.setuptools]
|
|
18
|
+
package-dir = {"" = "src"}
|
|
19
|
+
|
|
20
|
+
[tool.setuptools.packages.find]
|
|
21
|
+
where = ["src"]
|
|
22
|
+
|
|
23
|
+
[dependency-groups]
|
|
24
|
+
dev = [
|
|
25
|
+
"build>=1.4.2",
|
|
26
|
+
"twine>=6.2.0",
|
|
27
|
+
]
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "0.1.1"
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
"""AWS Signature Version 4 signing — stdlib only."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import hashlib
|
|
6
|
+
import hmac
|
|
7
|
+
from datetime import datetime, timezone
|
|
8
|
+
from urllib.parse import parse_qsl, quote, urlparse
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def _sha256_hex(data: bytes) -> str:
|
|
12
|
+
return hashlib.sha256(data).hexdigest()
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _uri_encode(value: str, encode_slash: bool = True) -> str:
|
|
16
|
+
safe = "" if encode_slash else "/"
|
|
17
|
+
return quote(value, safe=safe)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def _canonical_query_string(query: str) -> str:
|
|
21
|
+
"""Parse, URI-encode, and sort query parameters per SigV4 spec."""
|
|
22
|
+
if not query:
|
|
23
|
+
return ""
|
|
24
|
+
params = parse_qsl(query, keep_blank_values=True)
|
|
25
|
+
encoded = sorted((_uri_encode(k), _uri_encode(v)) for k, v in params)
|
|
26
|
+
return "&".join(f"{k}={v}" for k, v in encoded)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _derive_signing_key(
|
|
30
|
+
secret_key: str, date_stamp: str, region: str, service: str
|
|
31
|
+
) -> bytes:
|
|
32
|
+
k_date = hmac.new(
|
|
33
|
+
f"AWS4{secret_key}".encode("utf-8"),
|
|
34
|
+
date_stamp.encode("utf-8"),
|
|
35
|
+
hashlib.sha256,
|
|
36
|
+
).digest()
|
|
37
|
+
k_region = hmac.new(k_date, region.encode("utf-8"), hashlib.sha256).digest()
|
|
38
|
+
k_service = hmac.new(k_region, service.encode("utf-8"), hashlib.sha256).digest()
|
|
39
|
+
k_signing = hmac.new(k_service, b"aws4_request", hashlib.sha256).digest()
|
|
40
|
+
return k_signing
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def _make_canonical_request(
|
|
44
|
+
method: str,
|
|
45
|
+
path: str,
|
|
46
|
+
query_string: str,
|
|
47
|
+
headers: dict[str, str],
|
|
48
|
+
signed_header_names: str,
|
|
49
|
+
payload_hash: str,
|
|
50
|
+
) -> str:
|
|
51
|
+
canonical_headers = ""
|
|
52
|
+
for name in signed_header_names.split(";"):
|
|
53
|
+
canonical_headers += f"{name}:{headers[name]}\n"
|
|
54
|
+
|
|
55
|
+
return "\n".join([
|
|
56
|
+
method,
|
|
57
|
+
_uri_encode(path, encode_slash=False),
|
|
58
|
+
query_string,
|
|
59
|
+
canonical_headers,
|
|
60
|
+
signed_header_names,
|
|
61
|
+
payload_hash,
|
|
62
|
+
])
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def _make_string_to_sign(
|
|
66
|
+
datetime_stamp: str,
|
|
67
|
+
credential_scope: str,
|
|
68
|
+
canonical_request: str,
|
|
69
|
+
) -> str:
|
|
70
|
+
return "\n".join([
|
|
71
|
+
"AWS4-HMAC-SHA256",
|
|
72
|
+
datetime_stamp,
|
|
73
|
+
credential_scope,
|
|
74
|
+
_sha256_hex(canonical_request.encode("utf-8")),
|
|
75
|
+
])
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def sign_request(
|
|
79
|
+
method: str,
|
|
80
|
+
url: str,
|
|
81
|
+
headers: dict[str, str],
|
|
82
|
+
payload: bytes,
|
|
83
|
+
*,
|
|
84
|
+
access_key: str,
|
|
85
|
+
secret_key: str,
|
|
86
|
+
region: str,
|
|
87
|
+
service: str = "s3",
|
|
88
|
+
) -> dict[str, str]:
|
|
89
|
+
"""Sign an HTTP request with AWS SigV4. Returns updated headers dict."""
|
|
90
|
+
now = datetime.now(timezone.utc)
|
|
91
|
+
datetime_stamp = now.strftime("%Y%m%dT%H%M%SZ")
|
|
92
|
+
date_stamp = now.strftime("%Y%m%d")
|
|
93
|
+
|
|
94
|
+
parsed = urlparse(url)
|
|
95
|
+
host = parsed.hostname
|
|
96
|
+
path = parsed.path or "/"
|
|
97
|
+
query_string = _canonical_query_string(parsed.query)
|
|
98
|
+
|
|
99
|
+
payload_hash = _sha256_hex(payload)
|
|
100
|
+
|
|
101
|
+
# Build headers to sign
|
|
102
|
+
headers = dict(headers)
|
|
103
|
+
headers["host"] = host
|
|
104
|
+
headers["x-amz-date"] = datetime_stamp
|
|
105
|
+
headers["x-amz-content-sha256"] = payload_hash
|
|
106
|
+
|
|
107
|
+
signed_header_names = ";".join(sorted(headers.keys()))
|
|
108
|
+
|
|
109
|
+
canonical_request = _make_canonical_request(
|
|
110
|
+
method, path, query_string, headers, signed_header_names, payload_hash
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
credential_scope = f"{date_stamp}/{region}/{service}/aws4_request"
|
|
114
|
+
string_to_sign = _make_string_to_sign(
|
|
115
|
+
datetime_stamp, credential_scope, canonical_request
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
signing_key = _derive_signing_key(secret_key, date_stamp, region, service)
|
|
119
|
+
signature = hmac.new(
|
|
120
|
+
signing_key, string_to_sign.encode("utf-8"), hashlib.sha256
|
|
121
|
+
).hexdigest()
|
|
122
|
+
|
|
123
|
+
headers["authorization"] = (
|
|
124
|
+
f"AWS4-HMAC-SHA256 "
|
|
125
|
+
f"Credential={access_key}/{credential_scope}, "
|
|
126
|
+
f"SignedHeaders={signed_header_names}, "
|
|
127
|
+
f"Signature={signature}"
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
# Remove host header — httpx sets it automatically
|
|
131
|
+
del headers["host"]
|
|
132
|
+
|
|
133
|
+
return headers
|
|
@@ -0,0 +1,187 @@
|
|
|
1
|
+
"""Lightweight S3 client using httpx and AWS SigV4."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import os
|
|
6
|
+
import xml.etree.ElementTree as ET
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
import httpx
|
|
10
|
+
from dotenv import load_dotenv
|
|
11
|
+
|
|
12
|
+
from ._auth import sign_request
|
|
13
|
+
from ._errors import S3AuthError, S3RequestError
|
|
14
|
+
|
|
15
|
+
S3_NS = "http://s3.amazonaws.com/doc/2006-03-01/"
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class S3Client:
|
|
19
|
+
"""Minimal S3 client — no boto3 required.
|
|
20
|
+
|
|
21
|
+
Reads credentials from constructor args or environment variables:
|
|
22
|
+
AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_REGION.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
def __init__(
|
|
26
|
+
self,
|
|
27
|
+
*,
|
|
28
|
+
access_key: str | None = None,
|
|
29
|
+
secret_key: str | None = None,
|
|
30
|
+
region: str | None = None,
|
|
31
|
+
timeout: float = 30.0,
|
|
32
|
+
):
|
|
33
|
+
load_dotenv()
|
|
34
|
+
|
|
35
|
+
self.access_key = access_key or os.environ.get("AWS_ACCESS_KEY_ID", "")
|
|
36
|
+
self.secret_key = secret_key or os.environ.get("AWS_SECRET_ACCESS_KEY", "")
|
|
37
|
+
self.region = region or os.environ.get("AWS_REGION", "us-east-1")
|
|
38
|
+
|
|
39
|
+
if not self.access_key or not self.secret_key:
|
|
40
|
+
raise S3AuthError(
|
|
41
|
+
"AWS credentials required. Set AWS_ACCESS_KEY_ID and "
|
|
42
|
+
"AWS_SECRET_ACCESS_KEY environment variables, or pass them directly."
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
self._http = httpx.Client(timeout=timeout)
|
|
46
|
+
|
|
47
|
+
def close(self) -> None:
|
|
48
|
+
self._http.close()
|
|
49
|
+
|
|
50
|
+
def __enter__(self):
|
|
51
|
+
return self
|
|
52
|
+
|
|
53
|
+
def __exit__(self, *exc):
|
|
54
|
+
self.close()
|
|
55
|
+
|
|
56
|
+
# -- internal helpers --------------------------------------------------
|
|
57
|
+
|
|
58
|
+
def _endpoint(self, bucket: str, key: str = "") -> str:
|
|
59
|
+
path = f"/{key}" if key else "/"
|
|
60
|
+
return f"https://{bucket}.s3.{self.region}.amazonaws.com{path}"
|
|
61
|
+
|
|
62
|
+
def _signed_request(
|
|
63
|
+
self,
|
|
64
|
+
method: str,
|
|
65
|
+
url: str,
|
|
66
|
+
payload: bytes = b"",
|
|
67
|
+
extra_headers: dict[str, str] | None = None,
|
|
68
|
+
) -> httpx.Response:
|
|
69
|
+
headers = dict(extra_headers or {})
|
|
70
|
+
signed = sign_request(
|
|
71
|
+
method,
|
|
72
|
+
url,
|
|
73
|
+
headers,
|
|
74
|
+
payload,
|
|
75
|
+
access_key=self.access_key,
|
|
76
|
+
secret_key=self.secret_key,
|
|
77
|
+
region=self.region,
|
|
78
|
+
)
|
|
79
|
+
resp = self._http.request(method, url, headers=signed, content=payload)
|
|
80
|
+
if resp.status_code >= 300:
|
|
81
|
+
self._raise_error(resp)
|
|
82
|
+
return resp
|
|
83
|
+
|
|
84
|
+
@staticmethod
|
|
85
|
+
def _raise_error(resp: httpx.Response) -> None:
|
|
86
|
+
code = "Unknown"
|
|
87
|
+
message = resp.text
|
|
88
|
+
try:
|
|
89
|
+
root = ET.fromstring(resp.text)
|
|
90
|
+
code_el = root.find("Code")
|
|
91
|
+
msg_el = root.find("Message")
|
|
92
|
+
if code_el is not None and code_el.text:
|
|
93
|
+
code = code_el.text
|
|
94
|
+
if msg_el is not None and msg_el.text:
|
|
95
|
+
message = msg_el.text
|
|
96
|
+
except ET.ParseError:
|
|
97
|
+
pass
|
|
98
|
+
raise S3RequestError(resp.status_code, code, message)
|
|
99
|
+
|
|
100
|
+
# -- public API --------------------------------------------------------
|
|
101
|
+
|
|
102
|
+
def create_bucket(self, bucket: str) -> None:
|
|
103
|
+
url = self._endpoint(bucket)
|
|
104
|
+
if self.region == "us-east-1":
|
|
105
|
+
self._signed_request("PUT", url)
|
|
106
|
+
else:
|
|
107
|
+
body = (
|
|
108
|
+
f'<CreateBucketConfiguration xmlns="{S3_NS}">'
|
|
109
|
+
f"<LocationConstraint>{self.region}</LocationConstraint>"
|
|
110
|
+
f"</CreateBucketConfiguration>"
|
|
111
|
+
).encode()
|
|
112
|
+
self._signed_request("PUT", url, payload=body)
|
|
113
|
+
|
|
114
|
+
def delete_bucket(self, bucket: str) -> None:
|
|
115
|
+
url = self._endpoint(bucket)
|
|
116
|
+
self._signed_request("DELETE", url)
|
|
117
|
+
|
|
118
|
+
def delete_object(self, bucket: str, key: str) -> None:
|
|
119
|
+
url = self._endpoint(bucket, key)
|
|
120
|
+
self._signed_request("DELETE", url)
|
|
121
|
+
|
|
122
|
+
def copy_object(
|
|
123
|
+
self,
|
|
124
|
+
src_bucket: str,
|
|
125
|
+
src_key: str,
|
|
126
|
+
dest_bucket: str,
|
|
127
|
+
dest_key: str,
|
|
128
|
+
) -> None:
|
|
129
|
+
url = self._endpoint(dest_bucket, dest_key)
|
|
130
|
+
copy_source = f"/{src_bucket}/{src_key}"
|
|
131
|
+
self._signed_request(
|
|
132
|
+
"PUT", url, extra_headers={"x-amz-copy-source": copy_source}
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
def move_object(
|
|
136
|
+
self,
|
|
137
|
+
src_bucket: str,
|
|
138
|
+
src_key: str,
|
|
139
|
+
dest_bucket: str,
|
|
140
|
+
dest_key: str,
|
|
141
|
+
) -> None:
|
|
142
|
+
self.copy_object(src_bucket, src_key, dest_bucket, dest_key)
|
|
143
|
+
self.delete_object(src_bucket, src_key)
|
|
144
|
+
|
|
145
|
+
def upload_file(
|
|
146
|
+
self, bucket: str, key: str, file_path: Path | str
|
|
147
|
+
) -> None:
|
|
148
|
+
file_path = Path(file_path)
|
|
149
|
+
data = file_path.read_bytes()
|
|
150
|
+
url = self._endpoint(bucket, key)
|
|
151
|
+
self._signed_request("PUT", url, payload=data)
|
|
152
|
+
|
|
153
|
+
def download_file(
|
|
154
|
+
self, bucket: str, key: str, dest_path: Path | str
|
|
155
|
+
) -> None:
|
|
156
|
+
dest_path = Path(dest_path)
|
|
157
|
+
url = self._endpoint(bucket, key)
|
|
158
|
+
resp = self._signed_request("GET", url)
|
|
159
|
+
dest_path.parent.mkdir(parents=True, exist_ok=True)
|
|
160
|
+
dest_path.write_bytes(resp.content)
|
|
161
|
+
|
|
162
|
+
def list_objects(self, bucket: str, prefix: str = "") -> list[str]:
|
|
163
|
+
url = self._endpoint(bucket)
|
|
164
|
+
keys: list[str] = []
|
|
165
|
+
continuation_token = ""
|
|
166
|
+
|
|
167
|
+
while True:
|
|
168
|
+
qs = f"list-type=2&prefix={prefix}"
|
|
169
|
+
if continuation_token:
|
|
170
|
+
qs += f"&continuation-token={continuation_token}"
|
|
171
|
+
resp = self._signed_request("GET", f"{url}?{qs}")
|
|
172
|
+
|
|
173
|
+
root = ET.fromstring(resp.text)
|
|
174
|
+
for contents in root.findall(f"{{{S3_NS}}}Contents"):
|
|
175
|
+
key_el = contents.find(f"{{{S3_NS}}}Key")
|
|
176
|
+
if key_el is not None and key_el.text:
|
|
177
|
+
keys.append(key_el.text)
|
|
178
|
+
|
|
179
|
+
is_truncated = root.find(f"{{{S3_NS}}}IsTruncated")
|
|
180
|
+
if is_truncated is not None and is_truncated.text == "true":
|
|
181
|
+
token_el = root.find(f"{{{S3_NS}}}NextContinuationToken")
|
|
182
|
+
if token_el is not None and token_el.text:
|
|
183
|
+
continuation_token = token_el.text
|
|
184
|
+
continue
|
|
185
|
+
break
|
|
186
|
+
|
|
187
|
+
return keys
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
class S3Error(Exception):
|
|
2
|
+
"""Base exception for S3 operations."""
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class S3AuthError(S3Error):
|
|
6
|
+
"""Raised when AWS credentials are missing or invalid."""
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class S3RequestError(S3Error):
|
|
10
|
+
"""Raised when the S3 API returns a non-2xx response."""
|
|
11
|
+
|
|
12
|
+
def __init__(self, status_code: int, code: str, message: str):
|
|
13
|
+
self.status_code = status_code
|
|
14
|
+
self.code = code
|
|
15
|
+
self.message = message
|
|
16
|
+
super().__init__(f"S3 error {status_code} ({code}): {message}")
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: goliath-utils
|
|
3
|
+
Version: 0.1.1
|
|
4
|
+
Summary: Lightweight internal utilities for the Goliath platform
|
|
5
|
+
Requires-Python: >=3.11
|
|
6
|
+
Description-Content-Type: text/markdown
|
|
7
|
+
Requires-Dist: httpx>=0.27
|
|
8
|
+
Requires-Dist: ipykernel>=7.2.0
|
|
9
|
+
Requires-Dist: python-dotenv>=1.2.2
|
|
10
|
+
|
|
11
|
+
# goliath-utils
|
|
12
|
+
|
|
13
|
+
Lightweight internal utilities for the Goliath platform.
|
|
14
|
+
|
|
15
|
+
## Install
|
|
16
|
+
|
|
17
|
+
```bash
|
|
18
|
+
uv add goliath-utils
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
## S3 Client
|
|
22
|
+
|
|
23
|
+
A minimal S3 client using httpx — no boto3 required.
|
|
24
|
+
|
|
25
|
+
```python
|
|
26
|
+
from goliath_utils.s3 import S3Client
|
|
27
|
+
|
|
28
|
+
# Reads AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_REGION from env
|
|
29
|
+
with S3Client() as s3:
|
|
30
|
+
s3.create_bucket("my-bucket")
|
|
31
|
+
s3.upload_file("my-bucket", "data/file.csv", "./file.csv")
|
|
32
|
+
s3.download_file("my-bucket", "data/file.csv", "./downloaded.csv")
|
|
33
|
+
keys = s3.list_objects("my-bucket", prefix="data/")
|
|
34
|
+
s3.delete_bucket("my-bucket")
|
|
35
|
+
```
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
README.md
|
|
2
|
+
pyproject.toml
|
|
3
|
+
src/goliath_utils/__init__.py
|
|
4
|
+
src/goliath_utils.egg-info/PKG-INFO
|
|
5
|
+
src/goliath_utils.egg-info/SOURCES.txt
|
|
6
|
+
src/goliath_utils.egg-info/dependency_links.txt
|
|
7
|
+
src/goliath_utils.egg-info/requires.txt
|
|
8
|
+
src/goliath_utils.egg-info/top_level.txt
|
|
9
|
+
src/goliath_utils/s3/__init__.py
|
|
10
|
+
src/goliath_utils/s3/_auth.py
|
|
11
|
+
src/goliath_utils/s3/_client.py
|
|
12
|
+
src/goliath_utils/s3/_errors.py
|
|
13
|
+
src/goliath_utils/secrets/__init__.py
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
goliath_utils
|