sobe 0.1__tar.gz → 0.2.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sobe might be problematic. Click here for more details.
- sobe-0.2.1/PKG-INFO +85 -0
- sobe-0.2.1/README.md +55 -0
- sobe-0.2.1/pyproject.toml +77 -0
- sobe-0.2.1/src/sobe/aws.py +74 -0
- sobe-0.2.1/src/sobe/config.py +77 -0
- sobe-0.2.1/src/sobe/main.py +84 -0
- sobe-0.1/PKG-INFO +0 -93
- sobe-0.1/README.md +0 -84
- sobe-0.1/pyproject.toml +0 -25
- sobe-0.1/src/sobe/main.py +0 -153
- {sobe-0.1 → sobe-0.2.1}/src/sobe/__init__.py +0 -0
sobe-0.2.1/PKG-INFO
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: sobe
|
|
3
|
+
Version: 0.2.1
|
|
4
|
+
Summary: AWS-based drop box uploader
|
|
5
|
+
Author: Liz Balbuena
|
|
6
|
+
License-Expression: MIT
|
|
7
|
+
Classifier: Development Status :: 5 - Production/Stable
|
|
8
|
+
Classifier: Environment :: Console
|
|
9
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
10
|
+
Classifier: Operating System :: OS Independent
|
|
11
|
+
Classifier: Programming Language :: Python
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
15
|
+
Classifier: Topic :: Communications :: File Sharing
|
|
16
|
+
Classifier: Topic :: Utilities
|
|
17
|
+
Requires-Dist: boto3>=1.40.49
|
|
18
|
+
Requires-Dist: platformdirs>=4.5.0
|
|
19
|
+
Requires-Dist: furo>=2024.8.6 ; extra == 'docs'
|
|
20
|
+
Requires-Dist: sphinx>=7.0.0 ; extra == 'docs'
|
|
21
|
+
Requires-Dist: sphinx-autodoc-typehints>=2.0.0 ; extra == 'docs'
|
|
22
|
+
Requires-Python: >=3.11
|
|
23
|
+
Project-URL: Changelog, https://github.com/Liz4v/sobe/releases
|
|
24
|
+
Project-URL: Documentation, https://github.com/Liz4v/sobe/blob/main/README.md
|
|
25
|
+
Project-URL: Homepage, https://github.com/Liz4v/sobe
|
|
26
|
+
Project-URL: Issues, https://github.com/Liz4v/sobe/issues
|
|
27
|
+
Project-URL: Repository, https://github.com/Liz4v/sobe.git
|
|
28
|
+
Provides-Extra: docs
|
|
29
|
+
Description-Content-Type: text/markdown
|
|
30
|
+
|
|
31
|
+
# sobe
|
|
32
|
+
|
|
33
|
+
[](https://sobe.readthedocs.io/en/latest/)
|
|
34
|
+
|
|
35
|
+
A simple command-line tool to upload files to an AWS S3 bucket that is publicly available through a CloudFront distribution. This is the traditional "drop box" use case that existed long before the advent of modern file sharing services.
|
|
36
|
+
|
|
37
|
+
Full documentation: https://sobe.readthedocs.io/en/latest/
|
|
38
|
+
|
|
39
|
+
It will upload any files you give it to your bucket, in a current year subdirectory, because that's the only easy way to organize chaos.
|
|
40
|
+
|
|
41
|
+
"Sobe" is Portuguese for "take it up" (in the imperative), as in "upload".
|
|
42
|
+
|
|
43
|
+
## Installation
|
|
44
|
+
|
|
45
|
+
Use [uv](https://docs.astral.sh/uv/) to manage it.
|
|
46
|
+
|
|
47
|
+
```bash
|
|
48
|
+
uv tool install sobe
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
If you have Python ≥ 3.11, you can also install it via pip:
|
|
52
|
+
|
|
53
|
+
```bash
|
|
54
|
+
pip install sobe
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
## Configuration
|
|
58
|
+
|
|
59
|
+
On first run, `sobe` will create its config file as appropriate to the platform and tell you its location. You'll need to edit this file with your AWS bucket and CloudFront details.
|
|
60
|
+
|
|
61
|
+
Here's a minimal set up.
|
|
62
|
+
|
|
63
|
+
```toml
|
|
64
|
+
url = "https://example.com/"
|
|
65
|
+
[aws]
|
|
66
|
+
bucket = "your-bucket-name"
|
|
67
|
+
cloudfront = "your-cloudfront-distribution-id"
|
|
68
|
+
```
|
|
69
|
+
|
|
70
|
+
[More information in the docs.](https://sobe.readthedocs.io/en/latest/configuration.html)
|
|
71
|
+
|
|
72
|
+
## Usage
|
|
73
|
+
|
|
74
|
+
The basic example is uploading files to current year directory:
|
|
75
|
+
```bash
|
|
76
|
+
$ sobe file1.jpg file2.pdf
|
|
77
|
+
https://example.com/2025/file1.jpg ...ok.
|
|
78
|
+
https://example.com/2025/file2.pdf ...ok.
|
|
79
|
+
```
|
|
80
|
+
|
|
81
|
+
You can call it with `--help` for all available options. You can delete files, clear the CloudFront cache (cached objects stay for 1 day by default), tweak the upload year. [The documentation contains better examples.](https://sobe.readthedocs.io/en/latest/usage.html#command-line-interface)
|
|
82
|
+
|
|
83
|
+
## License
|
|
84
|
+
|
|
85
|
+
See the [LICENSE](LICENSE) file for details.
|
sobe-0.2.1/README.md
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
# sobe
|
|
2
|
+
|
|
3
|
+
[](https://sobe.readthedocs.io/en/latest/)
|
|
4
|
+
|
|
5
|
+
A simple command-line tool to upload files to an AWS S3 bucket that is publicly available through a CloudFront distribution. This is the traditional "drop box" use case that existed long before the advent of modern file sharing services.
|
|
6
|
+
|
|
7
|
+
Full documentation: https://sobe.readthedocs.io/en/latest/
|
|
8
|
+
|
|
9
|
+
It will upload any files you give it to your bucket, in a current year subdirectory, because that's the only easy way to organize chaos.
|
|
10
|
+
|
|
11
|
+
"Sobe" is Portuguese for "take it up" (in the imperative), as in "upload".
|
|
12
|
+
|
|
13
|
+
## Installation
|
|
14
|
+
|
|
15
|
+
Use [uv](https://docs.astral.sh/uv/) to manage it.
|
|
16
|
+
|
|
17
|
+
```bash
|
|
18
|
+
uv tool install sobe
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
If you have Python ≥ 3.11, you can also install it via pip:
|
|
22
|
+
|
|
23
|
+
```bash
|
|
24
|
+
pip install sobe
|
|
25
|
+
```
|
|
26
|
+
|
|
27
|
+
## Configuration
|
|
28
|
+
|
|
29
|
+
On first run, `sobe` will create its config file as appropriate to the platform and tell you its location. You'll need to edit this file with your AWS bucket and CloudFront details.
|
|
30
|
+
|
|
31
|
+
Here's a minimal set up.
|
|
32
|
+
|
|
33
|
+
```toml
|
|
34
|
+
url = "https://example.com/"
|
|
35
|
+
[aws]
|
|
36
|
+
bucket = "your-bucket-name"
|
|
37
|
+
cloudfront = "your-cloudfront-distribution-id"
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
[More information in the docs.](https://sobe.readthedocs.io/en/latest/configuration.html)
|
|
41
|
+
|
|
42
|
+
## Usage
|
|
43
|
+
|
|
44
|
+
The basic example is uploading files to current year directory:
|
|
45
|
+
```bash
|
|
46
|
+
$ sobe file1.jpg file2.pdf
|
|
47
|
+
https://example.com/2025/file1.jpg ...ok.
|
|
48
|
+
https://example.com/2025/file2.pdf ...ok.
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
You can call it with `--help` for all available options. You can delete files, clear the CloudFront cache (cached objects stay for 1 day by default), tweak the upload year. [The documentation contains better examples.](https://sobe.readthedocs.io/en/latest/usage.html#command-line-interface)
|
|
52
|
+
|
|
53
|
+
## License
|
|
54
|
+
|
|
55
|
+
See the [LICENSE](LICENSE) file for details.
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "sobe"
|
|
3
|
+
version = "0.2.1"
|
|
4
|
+
description = "AWS-based drop box uploader"
|
|
5
|
+
classifiers = [
|
|
6
|
+
"Development Status :: 5 - Production/Stable",
|
|
7
|
+
"Environment :: Console",
|
|
8
|
+
"License :: OSI Approved :: MIT License",
|
|
9
|
+
"Operating System :: OS Independent",
|
|
10
|
+
"Programming Language :: Python",
|
|
11
|
+
"Programming Language :: Python :: 3.11",
|
|
12
|
+
"Programming Language :: Python :: 3.12",
|
|
13
|
+
"Programming Language :: Python :: 3.13",
|
|
14
|
+
"Topic :: Communications :: File Sharing",
|
|
15
|
+
"Topic :: Utilities",
|
|
16
|
+
]
|
|
17
|
+
authors = [
|
|
18
|
+
{ name = "Liz Balbuena" },
|
|
19
|
+
]
|
|
20
|
+
license = "MIT"
|
|
21
|
+
readme = "README.md"
|
|
22
|
+
requires-python = ">=3.11"
|
|
23
|
+
dependencies = [
|
|
24
|
+
"boto3>=1.40.49",
|
|
25
|
+
"platformdirs>=4.5.0",
|
|
26
|
+
]
|
|
27
|
+
|
|
28
|
+
[project.optional-dependencies]
|
|
29
|
+
# Extras used for building documentation on Read the Docs.
|
|
30
|
+
docs = [
|
|
31
|
+
"furo>=2024.8.6",
|
|
32
|
+
"sphinx>=7.0.0",
|
|
33
|
+
"sphinx-autodoc-typehints>=2.0.0",
|
|
34
|
+
]
|
|
35
|
+
|
|
36
|
+
[project.scripts]
|
|
37
|
+
sobe = "sobe.main:main"
|
|
38
|
+
|
|
39
|
+
[project.urls]
|
|
40
|
+
Homepage = "https://github.com/Liz4v/sobe"
|
|
41
|
+
Documentation = "https://github.com/Liz4v/sobe/blob/main/README.md"
|
|
42
|
+
Repository = "https://github.com/Liz4v/sobe.git"
|
|
43
|
+
Issues = "https://github.com/Liz4v/sobe/issues"
|
|
44
|
+
Changelog = "https://github.com/Liz4v/sobe/releases"
|
|
45
|
+
|
|
46
|
+
[tool.ruff]
|
|
47
|
+
line-length = 120
|
|
48
|
+
|
|
49
|
+
[tool.pytest.ini_options]
|
|
50
|
+
testpaths = ["tests"]
|
|
51
|
+
python_files = ["test_*.py"]
|
|
52
|
+
python_classes = ["Test*"]
|
|
53
|
+
python_functions = ["test_*"]
|
|
54
|
+
addopts = [
|
|
55
|
+
"--cov=src/sobe",
|
|
56
|
+
"--cov-report=term-missing",
|
|
57
|
+
"--cov-report=html",
|
|
58
|
+
"--cov-fail-under=95",
|
|
59
|
+
"--strict-markers",
|
|
60
|
+
"-v",
|
|
61
|
+
]
|
|
62
|
+
markers = [
|
|
63
|
+
"unit: Unit tests",
|
|
64
|
+
"integration: Integration tests",
|
|
65
|
+
"slow: Slow running tests",
|
|
66
|
+
]
|
|
67
|
+
|
|
68
|
+
[build-system]
|
|
69
|
+
requires = ["uv_build"]
|
|
70
|
+
build-backend = "uv_build"
|
|
71
|
+
|
|
72
|
+
[dependency-groups]
|
|
73
|
+
dev = [
|
|
74
|
+
"pytest>=8.4.2",
|
|
75
|
+
"pytest-cov>=7.0.0",
|
|
76
|
+
"ruff>=0.14.0",
|
|
77
|
+
]
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
"""Everything related to AWS. In the future, we may support other cloud providers."""
|
|
2
|
+
|
|
3
|
+
import datetime
|
|
4
|
+
import json
|
|
5
|
+
import mimetypes
|
|
6
|
+
import pathlib
|
|
7
|
+
import time
|
|
8
|
+
|
|
9
|
+
import boto3
|
|
10
|
+
import botocore.exceptions
|
|
11
|
+
|
|
12
|
+
from sobe.config import AWSConfig
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class AWS:
|
|
16
|
+
def __init__(self, config: AWSConfig) -> None:
|
|
17
|
+
self.config = config
|
|
18
|
+
self._session = boto3.Session(**self.config.session)
|
|
19
|
+
self._s3_resource = self._session.resource("s3", **self.config.service)
|
|
20
|
+
self._bucket = self._s3_resource.Bucket(self.config.bucket) # type: ignore[attr-defined]
|
|
21
|
+
self._cloudfront = self._session.client("cloudfront", **self.config.service)
|
|
22
|
+
|
|
23
|
+
def upload(self, year: str, local_path: pathlib.Path) -> None:
|
|
24
|
+
"""Uploads a file."""
|
|
25
|
+
type_guess, _ = mimetypes.guess_type(local_path)
|
|
26
|
+
extra_args = {"ContentType": type_guess or "application/octet-stream"}
|
|
27
|
+
self._bucket.upload_file(str(local_path), f"{year}/{local_path.name}", ExtraArgs=extra_args)
|
|
28
|
+
|
|
29
|
+
def delete(self, year: str, remote_filename: str) -> bool:
|
|
30
|
+
"""Delete a file, if it exists. Returns whether it did."""
|
|
31
|
+
obj = self._bucket.Object(f"{year}/{remote_filename}")
|
|
32
|
+
try:
|
|
33
|
+
obj.load()
|
|
34
|
+
obj.delete()
|
|
35
|
+
return True
|
|
36
|
+
except botocore.exceptions.ClientError as e:
|
|
37
|
+
if e.response.get("Error", {}).get("Code") == "404":
|
|
38
|
+
return False
|
|
39
|
+
raise
|
|
40
|
+
|
|
41
|
+
def invalidate_cache(self):
|
|
42
|
+
"""Create and wait for a full-path CloudFront invalidation. Iterates until completion."""
|
|
43
|
+
ref = datetime.datetime.now().astimezone().isoformat()
|
|
44
|
+
batch = {"Paths": {"Quantity": 1, "Items": ["/*"]}, "CallerReference": ref}
|
|
45
|
+
distribution = self.config.cloudfront
|
|
46
|
+
response = self._cloudfront.create_invalidation(DistributionId=distribution, InvalidationBatch=batch)
|
|
47
|
+
invalidation = response["Invalidation"]["Id"]
|
|
48
|
+
status = "Created"
|
|
49
|
+
while status != "Completed":
|
|
50
|
+
yield status
|
|
51
|
+
time.sleep(3)
|
|
52
|
+
response = self._cloudfront.get_invalidation(DistributionId=distribution, Id=invalidation)
|
|
53
|
+
status = response["Invalidation"]["Status"]
|
|
54
|
+
|
|
55
|
+
def generate_needed_permissions(self) -> str:
|
|
56
|
+
"""Return the minimal IAM policy statement required by the tool."""
|
|
57
|
+
try:
|
|
58
|
+
sts = self._session.client("sts", **self.config.service)
|
|
59
|
+
account_id = sts.get_caller_identity()["Account"]
|
|
60
|
+
except botocore.exceptions.ClientError:
|
|
61
|
+
account_id = "YOUR_ACCOUNT_ID"
|
|
62
|
+
|
|
63
|
+
actions = """
|
|
64
|
+
s3:PutObject s3:GetObject s3:ListBucket s3:DeleteObject
|
|
65
|
+
cloudfront:CreateInvalidation cloudfront:GetInvalidation
|
|
66
|
+
""".split()
|
|
67
|
+
resources = [
|
|
68
|
+
f"arn:aws:s3:::{self.config.bucket}",
|
|
69
|
+
f"arn:aws:s3:::{self.config.bucket}/*",
|
|
70
|
+
f"arn:aws:cloudfront::{account_id}:distribution/{self.config.cloudfront}",
|
|
71
|
+
]
|
|
72
|
+
statement = {"Effect": "Allow", "Action": actions, "Resource": resources}
|
|
73
|
+
policy = {"Version": "2012-10-17", "Statement": [statement]}
|
|
74
|
+
return json.dumps(policy, indent=2)
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
"""Everything related to user configuration file."""
|
|
2
|
+
|
|
3
|
+
import tomllib
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Any, NamedTuple, Self
|
|
6
|
+
|
|
7
|
+
from platformdirs import PlatformDirs
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class AWSConfig(NamedTuple):
|
|
11
|
+
bucket: str
|
|
12
|
+
cloudfront: str
|
|
13
|
+
session: dict[str, Any]
|
|
14
|
+
service: dict[str, Any]
|
|
15
|
+
|
|
16
|
+
@classmethod
|
|
17
|
+
def from_dict(cls, raw: dict[str, Any]) -> Self:
|
|
18
|
+
return cls(
|
|
19
|
+
bucket=raw.get("bucket", "example-bucket"),
|
|
20
|
+
cloudfront=raw.get("cloudfront", "E1111111111111"),
|
|
21
|
+
session=raw.get("session", {}),
|
|
22
|
+
service=raw.get("service", {}),
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class Config(NamedTuple):
|
|
27
|
+
url: str
|
|
28
|
+
aws: AWSConfig
|
|
29
|
+
|
|
30
|
+
@classmethod
|
|
31
|
+
def from_dict(cls, raw: dict[str, Any]) -> Self:
|
|
32
|
+
return cls(
|
|
33
|
+
url=raw.get("url", "https://example.com/"),
|
|
34
|
+
aws=AWSConfig.from_dict(raw.get("aws", {})),
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class MustEditConfig(Exception):
|
|
39
|
+
"""Config file must be edited before this tool can be used."""
|
|
40
|
+
|
|
41
|
+
def __init__(self, path: Path):
|
|
42
|
+
self.path = path
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
DEFAULT_TEMPLATE = """
|
|
46
|
+
# sobe configuration
|
|
47
|
+
|
|
48
|
+
url = "https://example.com/"
|
|
49
|
+
|
|
50
|
+
[aws]
|
|
51
|
+
bucket = "example-bucket"
|
|
52
|
+
cloudfront = "E1111111111111"
|
|
53
|
+
|
|
54
|
+
[aws.session]
|
|
55
|
+
# If you already have AWS CLI set up, don't fill keys here.
|
|
56
|
+
# region_name = "..."
|
|
57
|
+
# profile_name = "..."
|
|
58
|
+
# aws_access_key_id = "..."
|
|
59
|
+
# aws_secret_access_key = "..."
|
|
60
|
+
|
|
61
|
+
[aws.service]
|
|
62
|
+
# verify = true
|
|
63
|
+
"""
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def load_config() -> Config:
|
|
67
|
+
path = PlatformDirs("sobe").user_config_path / "config.toml"
|
|
68
|
+
if path.exists():
|
|
69
|
+
with path.open("rb") as f:
|
|
70
|
+
payload = tomllib.load(f)
|
|
71
|
+
if payload.get("aws", {}).get("bucket", "example-bucket") != "example-bucket":
|
|
72
|
+
return Config.from_dict(payload)
|
|
73
|
+
|
|
74
|
+
# create default file and exit for user to customize
|
|
75
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
76
|
+
path.write_text(DEFAULT_TEMPLATE.lstrip())
|
|
77
|
+
raise MustEditConfig(path)
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
"""Command-line interface entry point: input validation and output to user."""
|
|
2
|
+
|
|
3
|
+
import argparse
|
|
4
|
+
import datetime
|
|
5
|
+
import functools
|
|
6
|
+
import pathlib
|
|
7
|
+
import warnings
|
|
8
|
+
|
|
9
|
+
import urllib3.exceptions
|
|
10
|
+
|
|
11
|
+
from sobe.aws import AWS
|
|
12
|
+
from sobe.config import MustEditConfig, load_config
|
|
13
|
+
|
|
14
|
+
write = functools.partial(print, flush=True, end="")
|
|
15
|
+
print = functools.partial(print, flush=True) # type: ignore
|
|
16
|
+
warnings.filterwarnings("ignore", category=urllib3.exceptions.InsecureRequestWarning)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def main() -> None:
|
|
20
|
+
args = parse_args()
|
|
21
|
+
try:
|
|
22
|
+
config = load_config()
|
|
23
|
+
except MustEditConfig as err:
|
|
24
|
+
print("Created config file at the path below. You must edit it before use.")
|
|
25
|
+
print(err.path)
|
|
26
|
+
raise SystemExit(1) from err
|
|
27
|
+
|
|
28
|
+
aws = AWS(config.aws)
|
|
29
|
+
|
|
30
|
+
if args.policy:
|
|
31
|
+
print(aws.generate_needed_permissions())
|
|
32
|
+
return
|
|
33
|
+
|
|
34
|
+
for path in args.paths:
|
|
35
|
+
write(f"{config.url}{args.year}/{path.name} ...")
|
|
36
|
+
if args.delete:
|
|
37
|
+
existed = aws.delete(args.year, path.name)
|
|
38
|
+
print("deleted." if existed else "didn't exist.")
|
|
39
|
+
else:
|
|
40
|
+
aws.upload(args.year, path)
|
|
41
|
+
print("ok.")
|
|
42
|
+
if args.invalidate:
|
|
43
|
+
write("Clearing cache...")
|
|
44
|
+
for _ in aws.invalidate_cache():
|
|
45
|
+
write(".")
|
|
46
|
+
print("complete.")
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def parse_args(argv=None) -> argparse.Namespace:
|
|
50
|
+
parser = argparse.ArgumentParser(description="Upload files to your AWS drop box.")
|
|
51
|
+
parser.add_argument("-y", "--year", type=str, help="change year directory")
|
|
52
|
+
parser.add_argument("-i", "--invalidate", action="store_true", help="invalidate CloudFront cache")
|
|
53
|
+
parser.add_argument("-d", "--delete", action="store_true", help="delete instead of upload")
|
|
54
|
+
parser.add_argument("-p", "--policy", action="store_true", help="generate IAM policy requirements and exit")
|
|
55
|
+
parser.add_argument("files", nargs="*", help="Source files.")
|
|
56
|
+
args = parser.parse_args(argv)
|
|
57
|
+
|
|
58
|
+
if args.policy:
|
|
59
|
+
if args.year or args.delete or args.invalidate or args.files:
|
|
60
|
+
parser.error("--policy cannot be used with other arguments")
|
|
61
|
+
return args
|
|
62
|
+
|
|
63
|
+
if args.year is None:
|
|
64
|
+
args.year = datetime.date.today().year
|
|
65
|
+
elif not args.files:
|
|
66
|
+
parser.error("--year requires files to be specified")
|
|
67
|
+
|
|
68
|
+
if args.delete and not args.files:
|
|
69
|
+
parser.error("--delete requires files to be specified")
|
|
70
|
+
|
|
71
|
+
if not args.files and not args.invalidate:
|
|
72
|
+
parser.print_help()
|
|
73
|
+
raise SystemExit(0)
|
|
74
|
+
|
|
75
|
+
args.paths = [pathlib.Path(p) for p in args.files]
|
|
76
|
+
if not args.delete:
|
|
77
|
+
missing = [p for p in args.paths if not p.exists()]
|
|
78
|
+
if missing:
|
|
79
|
+
print("The following files do not exist:")
|
|
80
|
+
for p in missing:
|
|
81
|
+
print(f" {p}")
|
|
82
|
+
raise SystemExit(1)
|
|
83
|
+
|
|
84
|
+
return args
|
sobe-0.1/PKG-INFO
DELETED
|
@@ -1,93 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.3
|
|
2
|
-
Name: sobe
|
|
3
|
-
Version: 0.1
|
|
4
|
-
Summary: AWS-based drop box uploader
|
|
5
|
-
Requires-Dist: boto3>=1.40.49
|
|
6
|
-
Requires-Dist: platformdirs>=4.5.0
|
|
7
|
-
Requires-Python: ==3.13.*
|
|
8
|
-
Description-Content-Type: text/markdown
|
|
9
|
-
|
|
10
|
-
# sobe
|
|
11
|
-
|
|
12
|
-
A simple command-line tool for uploading files to an AWS S3 bucket that is publicly available through a CloudFront distribution. This is the traditional "drop box" use case that existed long before the advent of modern file sharing services.
|
|
13
|
-
|
|
14
|
-
It will upload any files you give it to your bucket, in a current year subdirectory, because that's the only easy way to organize chaos.
|
|
15
|
-
|
|
16
|
-
## Installation
|
|
17
|
-
|
|
18
|
-
Use [uv](https://docs.astral.sh/uv/) to manage it.
|
|
19
|
-
|
|
20
|
-
```bash
|
|
21
|
-
uv tool install https://github.com/Liz4v/sobe.git
|
|
22
|
-
```
|
|
23
|
-
|
|
24
|
-
## Configuration
|
|
25
|
-
|
|
26
|
-
On first run, `sobe` will create its config file as appropriate to the platform. You'll need to edit this file with your AWS bucket and CloudFront details:
|
|
27
|
-
|
|
28
|
-
```toml
|
|
29
|
-
# sobe configuration
|
|
30
|
-
bucket = "your-bucket-name"
|
|
31
|
-
url = "https://your-public-url/"
|
|
32
|
-
cloudfront = "your-cloudfront-distribution-id"
|
|
33
|
-
|
|
34
|
-
[aws_session]
|
|
35
|
-
# If you already have AWS CLI set up, don't fill keys here.
|
|
36
|
-
# region_name = "..."
|
|
37
|
-
# profile_name = "..."
|
|
38
|
-
# aws_access_key_id = "..."
|
|
39
|
-
# aws_secret_access_key = "..."
|
|
40
|
-
|
|
41
|
-
[aws_client]
|
|
42
|
-
verify = true
|
|
43
|
-
```
|
|
44
|
-
|
|
45
|
-
## Usage
|
|
46
|
-
|
|
47
|
-
```bash
|
|
48
|
-
sobe [options] files...
|
|
49
|
-
```
|
|
50
|
-
|
|
51
|
-
### Options
|
|
52
|
-
|
|
53
|
-
- `-y`, `--year`: Change the target year directory (default: current year)
|
|
54
|
-
- `-i`, `--invalidate`: Invalidate CloudFront cache after upload
|
|
55
|
-
- `-d`, `--delete`: Delete files instead of uploading
|
|
56
|
-
- `-p`, `--policy`: Display required AWS IAM policy and exit
|
|
57
|
-
|
|
58
|
-
### Examples
|
|
59
|
-
|
|
60
|
-
Upload files to current year directory:
|
|
61
|
-
```bash
|
|
62
|
-
sobe file1.jpg file2.pdf
|
|
63
|
-
```
|
|
64
|
-
|
|
65
|
-
Upload files to a specific year:
|
|
66
|
-
```bash
|
|
67
|
-
sobe -y 2024 file1.jpg file2.pdf
|
|
68
|
-
```
|
|
69
|
-
|
|
70
|
-
Upload and invalidate CloudFront cache:
|
|
71
|
-
```bash
|
|
72
|
-
sobe -i file1.jpg
|
|
73
|
-
```
|
|
74
|
-
|
|
75
|
-
Delete files:
|
|
76
|
-
```bash
|
|
77
|
-
sobe -d file1.jpg
|
|
78
|
-
```
|
|
79
|
-
|
|
80
|
-
Get required AWS IAM policy:
|
|
81
|
-
```bash
|
|
82
|
-
sobe --policy
|
|
83
|
-
```
|
|
84
|
-
|
|
85
|
-
## AWS Permissions
|
|
86
|
-
|
|
87
|
-
Use `sobe --policy` to generate the exact IAM policy required for your configuration. The tool needs permissions for:
|
|
88
|
-
- S3: PutObject, GetObject, ListBucket, DeleteObject
|
|
89
|
-
- CloudFront: CreateInvalidation, GetInvalidation
|
|
90
|
-
|
|
91
|
-
## License
|
|
92
|
-
|
|
93
|
-
See the [LICENSE](LICENSE) file for details.
|
sobe-0.1/README.md
DELETED
|
@@ -1,84 +0,0 @@
|
|
|
1
|
-
# sobe
|
|
2
|
-
|
|
3
|
-
A simple command-line tool for uploading files to an AWS S3 bucket that is publicly available through a CloudFront distribution. This is the traditional "drop box" use case that existed long before the advent of modern file sharing services.
|
|
4
|
-
|
|
5
|
-
It will upload any files you give it to your bucket, in a current year subdirectory, because that's the only easy way to organize chaos.
|
|
6
|
-
|
|
7
|
-
## Installation
|
|
8
|
-
|
|
9
|
-
Use [uv](https://docs.astral.sh/uv/) to manage it.
|
|
10
|
-
|
|
11
|
-
```bash
|
|
12
|
-
uv tool install https://github.com/Liz4v/sobe.git
|
|
13
|
-
```
|
|
14
|
-
|
|
15
|
-
## Configuration
|
|
16
|
-
|
|
17
|
-
On first run, `sobe` will create its config file as appropriate to the platform. You'll need to edit this file with your AWS bucket and CloudFront details:
|
|
18
|
-
|
|
19
|
-
```toml
|
|
20
|
-
# sobe configuration
|
|
21
|
-
bucket = "your-bucket-name"
|
|
22
|
-
url = "https://your-public-url/"
|
|
23
|
-
cloudfront = "your-cloudfront-distribution-id"
|
|
24
|
-
|
|
25
|
-
[aws_session]
|
|
26
|
-
# If you already have AWS CLI set up, don't fill keys here.
|
|
27
|
-
# region_name = "..."
|
|
28
|
-
# profile_name = "..."
|
|
29
|
-
# aws_access_key_id = "..."
|
|
30
|
-
# aws_secret_access_key = "..."
|
|
31
|
-
|
|
32
|
-
[aws_client]
|
|
33
|
-
verify = true
|
|
34
|
-
```
|
|
35
|
-
|
|
36
|
-
## Usage
|
|
37
|
-
|
|
38
|
-
```bash
|
|
39
|
-
sobe [options] files...
|
|
40
|
-
```
|
|
41
|
-
|
|
42
|
-
### Options
|
|
43
|
-
|
|
44
|
-
- `-y`, `--year`: Change the target year directory (default: current year)
|
|
45
|
-
- `-i`, `--invalidate`: Invalidate CloudFront cache after upload
|
|
46
|
-
- `-d`, `--delete`: Delete files instead of uploading
|
|
47
|
-
- `-p`, `--policy`: Display required AWS IAM policy and exit
|
|
48
|
-
|
|
49
|
-
### Examples
|
|
50
|
-
|
|
51
|
-
Upload files to current year directory:
|
|
52
|
-
```bash
|
|
53
|
-
sobe file1.jpg file2.pdf
|
|
54
|
-
```
|
|
55
|
-
|
|
56
|
-
Upload files to a specific year:
|
|
57
|
-
```bash
|
|
58
|
-
sobe -y 2024 file1.jpg file2.pdf
|
|
59
|
-
```
|
|
60
|
-
|
|
61
|
-
Upload and invalidate CloudFront cache:
|
|
62
|
-
```bash
|
|
63
|
-
sobe -i file1.jpg
|
|
64
|
-
```
|
|
65
|
-
|
|
66
|
-
Delete files:
|
|
67
|
-
```bash
|
|
68
|
-
sobe -d file1.jpg
|
|
69
|
-
```
|
|
70
|
-
|
|
71
|
-
Get required AWS IAM policy:
|
|
72
|
-
```bash
|
|
73
|
-
sobe --policy
|
|
74
|
-
```
|
|
75
|
-
|
|
76
|
-
## AWS Permissions
|
|
77
|
-
|
|
78
|
-
Use `sobe --policy` to generate the exact IAM policy required for your configuration. The tool needs permissions for:
|
|
79
|
-
- S3: PutObject, GetObject, ListBucket, DeleteObject
|
|
80
|
-
- CloudFront: CreateInvalidation, GetInvalidation
|
|
81
|
-
|
|
82
|
-
## License
|
|
83
|
-
|
|
84
|
-
See the [LICENSE](LICENSE) file for details.
|
sobe-0.1/pyproject.toml
DELETED
|
@@ -1,25 +0,0 @@
|
|
|
1
|
-
[project]
|
|
2
|
-
name = "sobe"
|
|
3
|
-
version = "0.1"
|
|
4
|
-
description = "AWS-based drop box uploader"
|
|
5
|
-
readme = "README.md"
|
|
6
|
-
requires-python = "==3.13.*"
|
|
7
|
-
dependencies = [
|
|
8
|
-
"boto3>=1.40.49",
|
|
9
|
-
"platformdirs>=4.5.0",
|
|
10
|
-
]
|
|
11
|
-
|
|
12
|
-
[project.scripts]
|
|
13
|
-
sobe = "sobe.main:main"
|
|
14
|
-
|
|
15
|
-
[tool.ruff]
|
|
16
|
-
line-length = 120
|
|
17
|
-
|
|
18
|
-
[build-system]
|
|
19
|
-
requires = ["uv_build"]
|
|
20
|
-
build-backend = "uv_build"
|
|
21
|
-
|
|
22
|
-
[dependency-groups]
|
|
23
|
-
dev = [
|
|
24
|
-
"ruff>=0.14.0",
|
|
25
|
-
]
|
sobe-0.1/src/sobe/main.py
DELETED
|
@@ -1,153 +0,0 @@
|
|
|
1
|
-
import argparse
|
|
2
|
-
import datetime
|
|
3
|
-
import functools
|
|
4
|
-
import json
|
|
5
|
-
import mimetypes
|
|
6
|
-
import pathlib
|
|
7
|
-
import sys
|
|
8
|
-
import time
|
|
9
|
-
import tomllib
|
|
10
|
-
import warnings
|
|
11
|
-
|
|
12
|
-
import boto3
|
|
13
|
-
import botocore.exceptions
|
|
14
|
-
import platformdirs
|
|
15
|
-
import urllib3.exceptions
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
def load_config():
|
|
19
|
-
path = platformdirs.PlatformDirs("sobe", "balbuena.ca").user_config_path / "config.toml"
|
|
20
|
-
if path.exists():
|
|
21
|
-
with path.open("rb") as f:
|
|
22
|
-
payload = tomllib.load(f)
|
|
23
|
-
if payload["bucket"] != "example-bucket":
|
|
24
|
-
return payload
|
|
25
|
-
|
|
26
|
-
defaults = """
|
|
27
|
-
# sobe configuration
|
|
28
|
-
bucket = "example-bucket"
|
|
29
|
-
url = "https://example.com/"
|
|
30
|
-
cloudfront = "E1111111111111"
|
|
31
|
-
|
|
32
|
-
[aws_session]
|
|
33
|
-
# If you already have AWS CLI set up, don't fill keys here.
|
|
34
|
-
# region_name = "..."
|
|
35
|
-
# profile_name = "..."
|
|
36
|
-
# aws_access_key_id = "..."
|
|
37
|
-
# aws_secret_access_key = "..."
|
|
38
|
-
|
|
39
|
-
[aws_client]
|
|
40
|
-
verify = true
|
|
41
|
-
"""
|
|
42
|
-
defaults = "\n".join(line.strip() for line in defaults.lstrip().splitlines())
|
|
43
|
-
path.parent.mkdir(parents=True, exist_ok=True)
|
|
44
|
-
path.write_text(defaults)
|
|
45
|
-
print("Created config file at the path below. You must edit it before use.")
|
|
46
|
-
print(path)
|
|
47
|
-
sys.exit(1)
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
CONFIG = load_config()
|
|
51
|
-
write = functools.partial(print, flush=True, end="")
|
|
52
|
-
print = functools.partial(print, flush=True) # type: ignore
|
|
53
|
-
warnings.filterwarnings("ignore", category=urllib3.exceptions.InsecureRequestWarning)
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
def main() -> None:
|
|
57
|
-
args = parse_args()
|
|
58
|
-
session = boto3.Session(**CONFIG["aws_session"])
|
|
59
|
-
bucket = session.resource("s3", **CONFIG["aws_client"]).Bucket(CONFIG["bucket"])
|
|
60
|
-
for path, key in zip(args.paths, args.keys):
|
|
61
|
-
if args.delete:
|
|
62
|
-
delete(bucket, key)
|
|
63
|
-
else:
|
|
64
|
-
upload(bucket, path, key)
|
|
65
|
-
if args.invalidate:
|
|
66
|
-
invalidate(session)
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
def upload(bucket, path: pathlib.Path, remote_path: str) -> None:
|
|
70
|
-
write(f"{CONFIG['url']}{remote_path} ...")
|
|
71
|
-
type_guess, _ = mimetypes.guess_type(path)
|
|
72
|
-
extra_args = {"ContentType": type_guess or "application/octet-stream"}
|
|
73
|
-
bucket.upload_file(str(path), remote_path, ExtraArgs=extra_args)
|
|
74
|
-
print("ok.")
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
def delete(bucket, remote_path: str) -> None:
|
|
78
|
-
write(f"{CONFIG['url']}{remote_path} ...")
|
|
79
|
-
obj = bucket.Object(remote_path)
|
|
80
|
-
try:
|
|
81
|
-
obj.load()
|
|
82
|
-
obj.delete()
|
|
83
|
-
print("deleted.")
|
|
84
|
-
except botocore.exceptions.ClientError as e:
|
|
85
|
-
if e.response["Error"]["Code"] != "404":
|
|
86
|
-
raise
|
|
87
|
-
print("didn't exist.")
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
def invalidate(session: boto3.Session) -> None:
|
|
91
|
-
write("Clearing cache ...")
|
|
92
|
-
ref = datetime.datetime.now().astimezone().isoformat()
|
|
93
|
-
cloudfront = session.client("cloudfront", **CONFIG["aws_client"])
|
|
94
|
-
batch = {"Paths": {"Quantity": 1, "Items": ["/*"]}, "CallerReference": ref}
|
|
95
|
-
invalidation = cloudfront.create_invalidation(DistributionId=CONFIG["cloudfront"], InvalidationBatch=batch)
|
|
96
|
-
write("ok.")
|
|
97
|
-
invalidation_id = invalidation["Invalidation"]["Id"]
|
|
98
|
-
status = ""
|
|
99
|
-
while status != "Completed":
|
|
100
|
-
time.sleep(3)
|
|
101
|
-
write(".")
|
|
102
|
-
response = cloudfront.get_invalidation(DistributionId=CONFIG["cloudfront"], Id=invalidation_id)
|
|
103
|
-
status = response["Invalidation"]["Status"]
|
|
104
|
-
print("complete.")
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
def parse_args() -> argparse.Namespace:
|
|
108
|
-
parser = argparse.ArgumentParser(description="Upload files to your AWS drop box.")
|
|
109
|
-
parser.add_argument("-y", "--year", type=int, default=datetime.date.today().year, help="change year directory")
|
|
110
|
-
parser.add_argument("-i", "--invalidate", action="store_true", help="invalidate CloudFront cache")
|
|
111
|
-
parser.add_argument("-d", "--delete", action="store_true", help="delete instead of upload")
|
|
112
|
-
parser.add_argument("--policy", action="store_true", help="display IAM policy requirements and exit")
|
|
113
|
-
parser.add_argument("files", nargs="*", help="Source files.")
|
|
114
|
-
args = parser.parse_args()
|
|
115
|
-
|
|
116
|
-
if args.policy:
|
|
117
|
-
dump_policy()
|
|
118
|
-
sys.exit(0)
|
|
119
|
-
|
|
120
|
-
if not args.files and not args.invalidate:
|
|
121
|
-
parser.print_help()
|
|
122
|
-
sys.exit(0)
|
|
123
|
-
|
|
124
|
-
args.paths = [pathlib.Path(p) for p in args.files]
|
|
125
|
-
args.keys = [f"{args.year}/{p.name}" for p in args.paths]
|
|
126
|
-
if not args.delete:
|
|
127
|
-
missing = [p for p in args.paths if not p.exists()]
|
|
128
|
-
if missing:
|
|
129
|
-
print("The following files do not exist:")
|
|
130
|
-
for p in missing:
|
|
131
|
-
print(f" {p}")
|
|
132
|
-
sys.exit(1)
|
|
133
|
-
|
|
134
|
-
return args
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
def dump_policy() -> None:
|
|
138
|
-
session = boto3.Session(**CONFIG["aws_session"])
|
|
139
|
-
sts = session.client("sts", **CONFIG["aws_client"])
|
|
140
|
-
caller = sts.get_caller_identity()["Arn"]
|
|
141
|
-
account_id = caller.split(":")[4]
|
|
142
|
-
actions = """
|
|
143
|
-
s3:PutObject s3:GetObject s3:ListBucket s3:DeleteObject
|
|
144
|
-
cloudfront:CreateInvalidation cloudfront:GetInvalidation
|
|
145
|
-
""".split()
|
|
146
|
-
resources = [
|
|
147
|
-
f"arn:aws:s3:::{CONFIG['bucket']}",
|
|
148
|
-
f"arn:aws:s3:::{CONFIG['bucket']}/*",
|
|
149
|
-
f"arn:aws:cloudfront::{account_id}:distribution/{CONFIG['cloudfront']}",
|
|
150
|
-
]
|
|
151
|
-
statement = {"Effect": "Allow", "Action": actions, "Resource": resources}
|
|
152
|
-
policy = {"Version": "2012-10-17", "Statement": [statement]}
|
|
153
|
-
print(json.dumps(policy, indent=2))
|
|
File without changes
|