PyS3Uploader 0.2.3__py3-none-any.whl → 0.2.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of PyS3Uploader might be problematic. Click here for more details.
- {pys3uploader-0.2.3.dist-info → pys3uploader-0.2.4.dist-info}/METADATA +3 -1
- pys3uploader-0.2.4.dist-info/RECORD +11 -0
- s3/__init__.py +1 -1
- s3/logger.py +59 -4
- s3/uploader.py +13 -7
- pys3uploader-0.2.3.dist-info/RECORD +0 -11
- {pys3uploader-0.2.3.dist-info → pys3uploader-0.2.4.dist-info}/LICENSE +0 -0
- {pys3uploader-0.2.3.dist-info → pys3uploader-0.2.4.dist-info}/WHEEL +0 -0
- {pys3uploader-0.2.3.dist-info → pys3uploader-0.2.4.dist-info}/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.2
|
|
2
2
|
Name: PyS3Uploader
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.4
|
|
4
4
|
Summary: Python module to upload objects to an S3 bucket.
|
|
5
5
|
Author-email: Vignesh Rao <svignesh1793@gmail.com>
|
|
6
6
|
License: MIT License
|
|
@@ -121,6 +121,8 @@ if __name__ == '__main__':
|
|
|
121
121
|
- **file_exclusion** - Sequence of files to exclude during upload. Defaults to ``None``
|
|
122
122
|
- **folder_exclusion** - Sequence of directories to exclude during upload. Defaults to ``None``
|
|
123
123
|
- **logger** - Bring your own custom pre-configured logger. Defaults to on-screen logging.
|
|
124
|
+
- **log_handler** - Choose between `stdout` vs `file` logging. Defaults to `s3.LogHandler.stdout`
|
|
125
|
+
- **log_level** - Choose the logging level. Defaults to `s3.LogLevel.debug`
|
|
124
126
|
- **env_file** – Path to a `.env` file for loading environment variables. Defaults to scanning the current directory.
|
|
125
127
|
<br><br>
|
|
126
128
|
- **region_name** - AWS region name. Defaults to the env var `AWS_DEFAULT_REGION`
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
s3/__init__.py,sha256=YsBU1Xy4sLbm_8jU5kKP8QP3ayKBmnJDaF3NCNaDOsk,66
|
|
2
|
+
s3/exceptions.py,sha256=hH3jlMOe8yjBatQK9EdndWZz4QESU74KSY_iDhQ37SY,2585
|
|
3
|
+
s3/logger.py,sha256=igwMubdTQ_GrMkwie5DAIvmxIcgj6a9UA_EGFrwFYiQ,2571
|
|
4
|
+
s3/tree.py,sha256=DiQ2ekMMaj2m_P3-iKkEqSuJCJZ_UZxcAwHtAoPVa5c,1824
|
|
5
|
+
s3/uploader.py,sha256=S480dteogtnA3xWVkHb2aRr5gtL1WnRIukw3_aAX1z0,14228
|
|
6
|
+
s3/utils.py,sha256=NbF28CYviK_St5qd1EOumMVyus9BvQON7clUFeR_SEQ,4473
|
|
7
|
+
pys3uploader-0.2.4.dist-info/LICENSE,sha256=8k-hEraOzyum0GvmmK65YxNRTFXK7eIFHJ0OshJXeTk,1068
|
|
8
|
+
pys3uploader-0.2.4.dist-info/METADATA,sha256=Dd-gyQ387bU7NcVC7Y0aqAeyXUzxlW1nsMWbMvfRDLg,7969
|
|
9
|
+
pys3uploader-0.2.4.dist-info/WHEEL,sha256=beeZ86-EfXScwlR_HKu4SllMC9wUEj_8Z_4FJ3egI2w,91
|
|
10
|
+
pys3uploader-0.2.4.dist-info/top_level.txt,sha256=iQp4y1P58Q633gj8M08kHE4mqqT0hixuDWcniDk_RJ4,3
|
|
11
|
+
pys3uploader-0.2.4.dist-info/RECORD,,
|
s3/__init__.py
CHANGED
s3/logger.py
CHANGED
|
@@ -5,9 +5,46 @@
|
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
7
|
import logging
|
|
8
|
+
import os
|
|
9
|
+
from datetime import datetime
|
|
10
|
+
from enum import IntEnum, StrEnum
|
|
8
11
|
|
|
9
12
|
|
|
10
|
-
|
|
13
|
+
class LogHandler(StrEnum):
|
|
14
|
+
"""Logging handlers to choose from when default logger is used.
|
|
15
|
+
|
|
16
|
+
>>> LogHandler
|
|
17
|
+
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
file = "file"
|
|
21
|
+
stdout = "stdout"
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class LogLevel(IntEnum):
|
|
25
|
+
"""Logging levels to choose from when default logger is used.
|
|
26
|
+
|
|
27
|
+
>>> LogLevel
|
|
28
|
+
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
debug = logging.DEBUG
|
|
32
|
+
info = logging.INFO
|
|
33
|
+
warning = logging.WARNING
|
|
34
|
+
error = logging.ERROR
|
|
35
|
+
|
|
36
|
+
@classmethod
|
|
37
|
+
def _missing_(cls, value):
|
|
38
|
+
"""Allow constructing from string names."""
|
|
39
|
+
if isinstance(value, str):
|
|
40
|
+
value = value.lower()
|
|
41
|
+
for member in cls:
|
|
42
|
+
if member.name == value:
|
|
43
|
+
return member
|
|
44
|
+
return None
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def stream_handler() -> logging.StreamHandler:
|
|
11
48
|
"""Creates a ``StreamHandler`` and assigns a default format to it.
|
|
12
49
|
|
|
13
50
|
Returns:
|
|
@@ -19,6 +56,20 @@ def default_handler() -> logging.StreamHandler:
|
|
|
19
56
|
return handler
|
|
20
57
|
|
|
21
58
|
|
|
59
|
+
def file_handler() -> logging.FileHandler:
|
|
60
|
+
"""Creates a ``StreamHandler`` and assigns a default format to it.
|
|
61
|
+
|
|
62
|
+
Returns:
|
|
63
|
+
logging.StreamHandler:
|
|
64
|
+
Returns an instance of the ``StreamHandler`` object.
|
|
65
|
+
"""
|
|
66
|
+
os.makedirs("logs", exist_ok=True)
|
|
67
|
+
filename = os.path.join("logs", datetime.now().strftime("PyS3Uploader_%d-%m-%Y_%H:%M.log"))
|
|
68
|
+
handler = logging.FileHandler(filename, mode="a")
|
|
69
|
+
handler.setFormatter(fmt=default_format())
|
|
70
|
+
return handler
|
|
71
|
+
|
|
72
|
+
|
|
22
73
|
def default_format() -> logging.Formatter:
|
|
23
74
|
"""Creates a logging ``Formatter`` with a custom message and datetime format.
|
|
24
75
|
|
|
@@ -32,7 +83,7 @@ def default_format() -> logging.Formatter:
|
|
|
32
83
|
)
|
|
33
84
|
|
|
34
85
|
|
|
35
|
-
def
|
|
86
|
+
def setup_logger(handler: LogHandler, level: LogLevel):
|
|
36
87
|
"""Creates a default logger with debug mode enabled.
|
|
37
88
|
|
|
38
89
|
Returns:
|
|
@@ -40,6 +91,10 @@ def default_logger() -> logging.Logger:
|
|
|
40
91
|
Returns an instance of the ``Logger`` object.
|
|
41
92
|
"""
|
|
42
93
|
logger = logging.getLogger(__name__)
|
|
43
|
-
|
|
44
|
-
|
|
94
|
+
if handler == LogHandler.file:
|
|
95
|
+
logger.addHandler(hdlr=file_handler())
|
|
96
|
+
elif handler == LogHandler.stdout:
|
|
97
|
+
logger.addHandler(hdlr=stream_handler())
|
|
98
|
+
|
|
99
|
+
logger.setLevel(level)
|
|
45
100
|
return logger
|
s3/uploader.py
CHANGED
|
@@ -11,7 +11,7 @@ from botocore.exceptions import ClientError
|
|
|
11
11
|
from tqdm import tqdm
|
|
12
12
|
|
|
13
13
|
from s3.exceptions import BucketNotFound
|
|
14
|
-
from s3.logger import
|
|
14
|
+
from s3.logger import LogHandler, LogLevel, setup_logger
|
|
15
15
|
from s3.utils import (
|
|
16
16
|
RETRY_CONFIG,
|
|
17
17
|
UploadResults,
|
|
@@ -45,6 +45,8 @@ class Uploader:
|
|
|
45
45
|
aws_secret_access_key: str = None,
|
|
46
46
|
retry_config: Config = RETRY_CONFIG,
|
|
47
47
|
logger: logging.Logger = None,
|
|
48
|
+
log_handler: LogHandler = LogHandler.stdout,
|
|
49
|
+
log_level: LogLevel = LogLevel.debug,
|
|
48
50
|
env_file: str = None,
|
|
49
51
|
):
|
|
50
52
|
"""Initiates all the necessary args and creates a boto3 session with retry logic.
|
|
@@ -63,6 +65,8 @@ class Uploader:
|
|
|
63
65
|
aws_access_key_id: AWS access key ID.
|
|
64
66
|
aws_secret_access_key: AWS secret access key.
|
|
65
67
|
logger: Bring your own logger.
|
|
68
|
+
log_handler: Default log handler, can be ``file`` or ``stdout``.
|
|
69
|
+
log_level: Default log level, can be ``debug``, ``info``, ``warning`` or ``error``.
|
|
66
70
|
env_file: Dotenv file (.env) filepath to load environment variables.
|
|
67
71
|
|
|
68
72
|
See Also:
|
|
@@ -85,7 +89,7 @@ class Uploader:
|
|
|
85
89
|
If a filepath is provided, PyS3Uploader loads it directly or searches the root directory for the file.
|
|
86
90
|
If no filepath is provided, PyS3Uploader searches the current directory for a .env file.
|
|
87
91
|
"""
|
|
88
|
-
self.logger = logger or
|
|
92
|
+
self.logger = logger or setup_logger(handler=LogHandler(log_handler), level=LogLevel(log_level))
|
|
89
93
|
self.env_file = env_file or getenv("ENV_FILE", default=".env")
|
|
90
94
|
|
|
91
95
|
# Check for env_file in current working directory
|
|
@@ -184,19 +188,21 @@ class Uploader:
|
|
|
184
188
|
"""
|
|
185
189
|
if self.overwrite:
|
|
186
190
|
return True
|
|
191
|
+
try:
|
|
192
|
+
file_size = os.path.getsize(filepath)
|
|
193
|
+
except (OSError, PermissionError) as error:
|
|
194
|
+
self.logger.error(error)
|
|
195
|
+
file_size = 0
|
|
187
196
|
# Indicates that the object path already exists in S3
|
|
188
197
|
if object_size := self.object_size_map.get(objectpath):
|
|
189
|
-
try:
|
|
190
|
-
file_size = os.path.getsize(filepath)
|
|
191
|
-
except (OSError, PermissionError) as error:
|
|
192
|
-
self.logger.error(error)
|
|
193
|
-
return True
|
|
194
198
|
if object_size == file_size:
|
|
195
199
|
self.logger.info("S3 object %s exists, and size [%d] matches, skipping..", objectpath, object_size)
|
|
196
200
|
return False
|
|
197
201
|
self.logger.info(
|
|
198
202
|
"S3 object %s exists, but size mismatch. Local: [%d], S3: [%d]", objectpath, file_size, object_size
|
|
199
203
|
)
|
|
204
|
+
else:
|
|
205
|
+
self.logger.debug("S3 object '%s' of size [%d bytes] doesn't exist, uploading..", objectpath, file_size)
|
|
200
206
|
return True
|
|
201
207
|
|
|
202
208
|
def _uploader(self, filepath: str, objectpath: str) -> None:
|
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
s3/__init__.py,sha256=aZ2woJ8TD2tgqXi0ElG-wWwJWoQLIdqTdm50FLaxL8w,66
|
|
2
|
-
s3/exceptions.py,sha256=hH3jlMOe8yjBatQK9EdndWZz4QESU74KSY_iDhQ37SY,2585
|
|
3
|
-
s3/logger.py,sha256=oH540oq8jY723jA4lDWlgfFPLbNgGXTkDwFpB7TLO_o,1196
|
|
4
|
-
s3/tree.py,sha256=DiQ2ekMMaj2m_P3-iKkEqSuJCJZ_UZxcAwHtAoPVa5c,1824
|
|
5
|
-
s3/uploader.py,sha256=KxrWbIInXxXQszP_uJLf_dBI5rUNjNnhco3gr9Vdrto,13767
|
|
6
|
-
s3/utils.py,sha256=NbF28CYviK_St5qd1EOumMVyus9BvQON7clUFeR_SEQ,4473
|
|
7
|
-
pys3uploader-0.2.3.dist-info/LICENSE,sha256=8k-hEraOzyum0GvmmK65YxNRTFXK7eIFHJ0OshJXeTk,1068
|
|
8
|
-
pys3uploader-0.2.3.dist-info/METADATA,sha256=ae2lA8b7dsGWZSMfB8w4joDiAlaE6Wk1f3p1Fxywkc4,7795
|
|
9
|
-
pys3uploader-0.2.3.dist-info/WHEEL,sha256=beeZ86-EfXScwlR_HKu4SllMC9wUEj_8Z_4FJ3egI2w,91
|
|
10
|
-
pys3uploader-0.2.3.dist-info/top_level.txt,sha256=iQp4y1P58Q633gj8M08kHE4mqqT0hixuDWcniDk_RJ4,3
|
|
11
|
-
pys3uploader-0.2.3.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|