bluer-objects 6.5.1__py3-none-any.whl → 6.6.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of bluer-objects might be problematic. Click here for more details.
- bluer_objects/__init__.py +1 -1
- {bluer_objects-6.5.1.dist-info → bluer_objects-6.6.1.dist-info}/METADATA +2 -2
- {bluer_objects-6.5.1.dist-info → bluer_objects-6.6.1.dist-info}/RECORD +6 -24
- bluer_objects/mysql/cache/__init__.py +0 -8
- bluer_objects/mysql/cache/__main__.py +0 -91
- bluer_objects/mysql/cache/functions.py +0 -181
- bluer_objects/mysql/relations/__init__.py +0 -9
- bluer_objects/mysql/relations/__main__.py +0 -138
- bluer_objects/mysql/relations/functions.py +0 -180
- bluer_objects/mysql/table.py +0 -144
- bluer_objects/mysql/tags/__init__.py +0 -1
- bluer_objects/mysql/tags/__main__.py +0 -130
- bluer_objects/mysql/tags/functions.py +0 -203
- bluer_objects/storage/__init__.py +0 -3
- bluer_objects/storage/__main__.py +0 -114
- bluer_objects/storage/classes.py +0 -237
- bluer_objects/tests/test_mysql_cache.py +0 -14
- bluer_objects/tests/test_mysql_relations.py +0 -16
- bluer_objects/tests/test_mysql_table.py +0 -9
- bluer_objects/tests/test_mysql_tags.py +0 -13
- bluer_objects/tests/test_storage.py +0 -7
- {bluer_objects-6.5.1.dist-info → bluer_objects-6.6.1.dist-info}/WHEEL +0 -0
- {bluer_objects-6.5.1.dist-info → bluer_objects-6.6.1.dist-info}/licenses/LICENSE +0 -0
- {bluer_objects-6.5.1.dist-info → bluer_objects-6.6.1.dist-info}/top_level.txt +0 -0
bluer_objects/storage/classes.py
DELETED
|
@@ -1,237 +0,0 @@
|
|
|
1
|
-
from typing import Union, Tuple
|
|
2
|
-
import boto3
|
|
3
|
-
import botocore
|
|
4
|
-
import os
|
|
5
|
-
import os.path
|
|
6
|
-
import time
|
|
7
|
-
|
|
8
|
-
from blueness import module
|
|
9
|
-
from blue_options import string
|
|
10
|
-
from blue_options.logger import crash_report
|
|
11
|
-
|
|
12
|
-
from bluer_objects import file, path, NAME
|
|
13
|
-
from bluer_objects.env import (
|
|
14
|
-
ABCLI_OBJECT_ROOT,
|
|
15
|
-
ABCLI_AWS_S3_BUCKET_NAME,
|
|
16
|
-
ABCLI_AWS_REGION,
|
|
17
|
-
ABCLI_PATH_STATIC,
|
|
18
|
-
ABCLI_AWS_S3_PREFIX,
|
|
19
|
-
abcli_object_name,
|
|
20
|
-
)
|
|
21
|
-
from bluer_objects.logger import logger
|
|
22
|
-
|
|
23
|
-
NAME = module.name(__file__, NAME)
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
class Storage:
|
|
27
|
-
def __init__(self, bucket_name=ABCLI_AWS_S3_BUCKET_NAME):
|
|
28
|
-
self.region = ABCLI_AWS_REGION
|
|
29
|
-
|
|
30
|
-
try:
|
|
31
|
-
self.s3 = boto3.client("s3", region_name=self.region)
|
|
32
|
-
except:
|
|
33
|
-
assert False, f"{NAME}.Storage: failed."
|
|
34
|
-
|
|
35
|
-
self.bucket_name = bucket_name
|
|
36
|
-
|
|
37
|
-
assert self.create_bucket()
|
|
38
|
-
|
|
39
|
-
def create_bucket(
|
|
40
|
-
self,
|
|
41
|
-
bucket_name: str = "",
|
|
42
|
-
) -> bool:
|
|
43
|
-
if not bucket_name:
|
|
44
|
-
bucket_name = self.bucket_name
|
|
45
|
-
|
|
46
|
-
try:
|
|
47
|
-
if boto3.resource("s3").Bucket(bucket_name).creation_date is not None:
|
|
48
|
-
logger.debug(f"-storage: create_bucket: {bucket_name}: already exists.")
|
|
49
|
-
return True
|
|
50
|
-
|
|
51
|
-
self.s3.create_bucket(
|
|
52
|
-
Bucket=bucket_name,
|
|
53
|
-
CreateBucketConfiguration={"LocationConstraint": self.region},
|
|
54
|
-
)
|
|
55
|
-
except:
|
|
56
|
-
crash_report(f"-storage: create_bucket: {bucket_name}: failed.")
|
|
57
|
-
return False
|
|
58
|
-
|
|
59
|
-
return True
|
|
60
|
-
|
|
61
|
-
def download_file(
|
|
62
|
-
self,
|
|
63
|
-
object_name: str,
|
|
64
|
-
filename: str = "",
|
|
65
|
-
bucket_name: Union[None, str] = None,
|
|
66
|
-
ignore_error: bool = False,
|
|
67
|
-
log: bool = True,
|
|
68
|
-
overwrite: bool = False,
|
|
69
|
-
) -> bool:
|
|
70
|
-
if filename == "static":
|
|
71
|
-
filename = os.path.join(
|
|
72
|
-
ABCLI_PATH_STATIC,
|
|
73
|
-
object_name.replace("/", "-"),
|
|
74
|
-
)
|
|
75
|
-
|
|
76
|
-
if filename == "object":
|
|
77
|
-
filename = os.path.join(
|
|
78
|
-
ABCLI_OBJECT_ROOT,
|
|
79
|
-
"/".join(object_name.split("/")[1:]),
|
|
80
|
-
)
|
|
81
|
-
|
|
82
|
-
if not overwrite and file.exists(filename):
|
|
83
|
-
if log:
|
|
84
|
-
logger.info(f"✅ {filename}")
|
|
85
|
-
return True
|
|
86
|
-
|
|
87
|
-
if not path.create(file.path(filename)):
|
|
88
|
-
return False
|
|
89
|
-
|
|
90
|
-
if bucket_name is None:
|
|
91
|
-
bucket_name = self.bucket_name
|
|
92
|
-
|
|
93
|
-
success = True
|
|
94
|
-
try:
|
|
95
|
-
self.s3.download_file(bucket_name, object_name, filename)
|
|
96
|
-
except:
|
|
97
|
-
success = False
|
|
98
|
-
|
|
99
|
-
message = "{}.Storage.downloaded_file: {}/{} -> {}".format(
|
|
100
|
-
NAME,
|
|
101
|
-
bucket_name,
|
|
102
|
-
object_name,
|
|
103
|
-
filename,
|
|
104
|
-
)
|
|
105
|
-
|
|
106
|
-
if not success:
|
|
107
|
-
crash_report(f"{message}: failed.")
|
|
108
|
-
elif log:
|
|
109
|
-
logger.info(message)
|
|
110
|
-
|
|
111
|
-
return success
|
|
112
|
-
|
|
113
|
-
def list_of_objects(
|
|
114
|
-
self,
|
|
115
|
-
prefix: str,
|
|
116
|
-
bucket_name: Union[None, str] = None,
|
|
117
|
-
count: int = -1,
|
|
118
|
-
suffix: str = "",
|
|
119
|
-
recursive: bool = True,
|
|
120
|
-
include_folders: bool = False,
|
|
121
|
-
):
|
|
122
|
-
prefix = f"{ABCLI_AWS_S3_PREFIX}/{prefix}"
|
|
123
|
-
|
|
124
|
-
if bucket_name is None:
|
|
125
|
-
bucket_name = self.bucket_name
|
|
126
|
-
|
|
127
|
-
output = []
|
|
128
|
-
try:
|
|
129
|
-
output = [
|
|
130
|
-
string.after(object_summary.key, prefix)
|
|
131
|
-
for object_summary in boto3.resource("s3")
|
|
132
|
-
.Bucket(bucket_name)
|
|
133
|
-
.objects.filter(Prefix=prefix)
|
|
134
|
-
# .limit(count)
|
|
135
|
-
]
|
|
136
|
-
except:
|
|
137
|
-
crash_report("-storage: list_of_objects: failed.")
|
|
138
|
-
|
|
139
|
-
output = [thing[1:] if thing.startswith("/") else thing for thing in output]
|
|
140
|
-
|
|
141
|
-
if include_folders:
|
|
142
|
-
output = sorted(list({thing.split("/")[0] for thing in output}))
|
|
143
|
-
elif not recursive:
|
|
144
|
-
output = [thing for thing in output if "/" not in thing]
|
|
145
|
-
|
|
146
|
-
if suffix:
|
|
147
|
-
output = [thing for thing in output if thing.endswith(suffix)]
|
|
148
|
-
|
|
149
|
-
if count != -1:
|
|
150
|
-
output = output[:count]
|
|
151
|
-
|
|
152
|
-
return output
|
|
153
|
-
|
|
154
|
-
def exists(
|
|
155
|
-
self,
|
|
156
|
-
object_name: str,
|
|
157
|
-
bucket_name: Union[None, str] = None,
|
|
158
|
-
) -> bool:
|
|
159
|
-
if bucket_name is None:
|
|
160
|
-
bucket_name = self.bucket_name
|
|
161
|
-
|
|
162
|
-
try:
|
|
163
|
-
boto3.resource("s3").Object(
|
|
164
|
-
bucket_name, "/".join([ABCLI_AWS_S3_PREFIX, object_name])
|
|
165
|
-
).load()
|
|
166
|
-
except botocore.exceptions.ClientError as e:
|
|
167
|
-
if e.response["Error"]["Code"] != "404":
|
|
168
|
-
crash_report("-storage: exists: failed.")
|
|
169
|
-
return False
|
|
170
|
-
|
|
171
|
-
return True
|
|
172
|
-
|
|
173
|
-
def upload_file(
|
|
174
|
-
self,
|
|
175
|
-
filename: str,
|
|
176
|
-
object_name: Union[None, str] = None,
|
|
177
|
-
bucket_name: Union[None, str] = None,
|
|
178
|
-
overwrite: bool = False,
|
|
179
|
-
) -> Tuple[bool, str, str]:
|
|
180
|
-
if bucket_name is None:
|
|
181
|
-
bucket_name = self.bucket_name
|
|
182
|
-
|
|
183
|
-
if not filename:
|
|
184
|
-
logger.warning(f"{NAME}: Storage: upload_file(): upload_file: no file.")
|
|
185
|
-
return False, bucket_name, ""
|
|
186
|
-
|
|
187
|
-
if object_name is None:
|
|
188
|
-
object_name = "{}/{}{}".format(
|
|
189
|
-
ABCLI_AWS_S3_PREFIX,
|
|
190
|
-
abcli_object_name,
|
|
191
|
-
(
|
|
192
|
-
string.after(filename, abcli_object_name)
|
|
193
|
-
if abcli_object_name in filename
|
|
194
|
-
else filename
|
|
195
|
-
),
|
|
196
|
-
)
|
|
197
|
-
|
|
198
|
-
if not overwrite and self.exists(object_name):
|
|
199
|
-
logger.info(f"✅ {object_name}.")
|
|
200
|
-
return True, bucket_name, object_name
|
|
201
|
-
|
|
202
|
-
success = True
|
|
203
|
-
time_ = time.time()
|
|
204
|
-
try:
|
|
205
|
-
self.s3.upload_file(filename, bucket_name, object_name)
|
|
206
|
-
except:
|
|
207
|
-
success = False
|
|
208
|
-
duration = time.time() - time_
|
|
209
|
-
|
|
210
|
-
message = "{}.Storage.download_file: {}:{} -> {}/{}: {}.".format(
|
|
211
|
-
NAME,
|
|
212
|
-
filename,
|
|
213
|
-
string.pretty_bytes(file.size(filename)),
|
|
214
|
-
bucket_name,
|
|
215
|
-
object_name,
|
|
216
|
-
string.pretty_duration(
|
|
217
|
-
duration,
|
|
218
|
-
include_ms=True,
|
|
219
|
-
short=True,
|
|
220
|
-
),
|
|
221
|
-
)
|
|
222
|
-
|
|
223
|
-
if not success:
|
|
224
|
-
crash_report(f"{message}: failed.")
|
|
225
|
-
else:
|
|
226
|
-
logger.info(message)
|
|
227
|
-
|
|
228
|
-
return success, bucket_name, object_name
|
|
229
|
-
|
|
230
|
-
def url(self, object_name: str, filename: str) -> str:
|
|
231
|
-
return "https://{}.s3.{}.amazonaws.com/{}/{}/{}".format(
|
|
232
|
-
self.bucket_name,
|
|
233
|
-
self.region,
|
|
234
|
-
ABCLI_AWS_S3_PREFIX,
|
|
235
|
-
object_name,
|
|
236
|
-
filename,
|
|
237
|
-
)
|
|
@@ -1,14 +0,0 @@
|
|
|
1
|
-
from blue_options import string
|
|
2
|
-
|
|
3
|
-
from bluer_objects.mysql.cache.functions import read, write
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
def test_mysql_cache_write_read():
|
|
7
|
-
keyword = string.random()
|
|
8
|
-
value = string.random()
|
|
9
|
-
|
|
10
|
-
assert write(keyword, value)
|
|
11
|
-
|
|
12
|
-
value_as_read = read(keyword)
|
|
13
|
-
|
|
14
|
-
assert value_as_read == value
|
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
from bluer_objects.objects import unique_object
|
|
2
|
-
|
|
3
|
-
from bluer_objects.mysql.relations.functions import set_, get, list_of
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
def test_mysql_cache_write_read():
|
|
7
|
-
object_1 = unique_object()
|
|
8
|
-
object_2 = unique_object()
|
|
9
|
-
|
|
10
|
-
relation = list_of[0]
|
|
11
|
-
|
|
12
|
-
assert set_(object_1, object_2, relation)
|
|
13
|
-
|
|
14
|
-
relation_as_Read = get(object_1, object_2)
|
|
15
|
-
|
|
16
|
-
assert relation_as_Read == relation
|
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
from bluer_objects.objects import unique_object
|
|
2
|
-
|
|
3
|
-
from bluer_objects.mysql.tags.functions import get, set_
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
def test_mysql_tags_get_set():
|
|
7
|
-
object_name = unique_object()
|
|
8
|
-
|
|
9
|
-
assert set_(object_name, "this,that")
|
|
10
|
-
|
|
11
|
-
tags_as_read = get(object_name)
|
|
12
|
-
assert "this" in tags_as_read
|
|
13
|
-
assert "that" in tags_as_read
|
|
File without changes
|
|
File without changes
|
|
File without changes
|