bluer-objects 6.104.1__py3-none-any.whl → 6.377.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bluer_objects/.abcli/abcli.sh +6 -0
- bluer_objects/.abcli/alias.sh +11 -0
- bluer_objects/.abcli/assets/cd.sh +20 -0
- bluer_objects/.abcli/assets/mv.sh +34 -0
- bluer_objects/.abcli/assets/publish.sh +37 -0
- bluer_objects/.abcli/assets.sh +15 -0
- bluer_objects/.abcli/create_test_asset.sh +10 -0
- bluer_objects/.abcli/download.sh +3 -1
- bluer_objects/.abcli/file.sh +15 -4
- bluer_objects/.abcli/gif.sh +18 -0
- bluer_objects/.abcli/host.sh +23 -7
- bluer_objects/.abcli/ls.sh +19 -8
- bluer_objects/.abcli/metadata/download.sh +9 -0
- bluer_objects/.abcli/metadata/edit.sh +15 -0
- bluer_objects/.abcli/metadata/upload.sh +9 -0
- bluer_objects/.abcli/mlflow/browse.sh +2 -0
- bluer_objects/.abcli/mlflow/deploy.sh +21 -5
- bluer_objects/.abcli/mlflow/lock/lock.sh +11 -0
- bluer_objects/.abcli/mlflow/lock/unlock.sh +12 -0
- bluer_objects/.abcli/mlflow/lock.sh +15 -0
- bluer_objects/.abcli/mlflow.sh +0 -2
- bluer_objects/.abcli/pdf/convert.sh +92 -0
- bluer_objects/.abcli/pdf.sh +15 -0
- bluer_objects/.abcli/storage/clear.sh +2 -0
- bluer_objects/.abcli/tests/clone.sh +2 -3
- bluer_objects/.abcli/tests/create_test_asset.sh +16 -0
- bluer_objects/.abcli/tests/file.sh +64 -0
- bluer_objects/.abcli/tests/gif.sh +3 -3
- bluer_objects/.abcli/tests/help.sh +27 -4
- bluer_objects/.abcli/tests/ls.sh +11 -4
- bluer_objects/.abcli/tests/metadata.sh +35 -0
- bluer_objects/.abcli/tests/mlflow_lock.sh +30 -0
- bluer_objects/.abcli/tests/open.sh +11 -0
- bluer_objects/.abcli/tests/open_gif_open.sh +14 -0
- bluer_objects/.abcli/tests/pdf.sh +31 -0
- bluer_objects/.abcli/tests/storage_clear.sh +11 -0
- bluer_objects/.abcli/tests/storage_public_upload.sh +25 -0
- bluer_objects/.abcli/tests/storage_status.sh +12 -0
- bluer_objects/.abcli/tests/{storage.sh → storage_upload_download.sh} +26 -8
- bluer_objects/.abcli/tests/web_is_accessible.sh +17 -0
- bluer_objects/.abcli/tests/web_where_am_ai.sh +5 -0
- bluer_objects/.abcli/upload.sh +26 -2
- bluer_objects/.abcli/url.sh +15 -0
- bluer_objects/.abcli/web/is_accessible.sh +13 -0
- bluer_objects/.abcli/web/where_am_i.sh +5 -0
- bluer_objects/README/__init__.py +24 -9
- bluer_objects/README/alias.py +56 -0
- bluer_objects/README/consts.py +39 -0
- bluer_objects/README/functions.py +127 -205
- bluer_objects/README/items.py +78 -6
- bluer_objects/README/utils.py +275 -0
- bluer_objects/__init__.py +1 -1
- bluer_objects/assets/__init__.py +0 -0
- bluer_objects/assets/__main__.py +57 -0
- bluer_objects/assets/functions.py +62 -0
- bluer_objects/config.env +9 -1
- bluer_objects/env.py +23 -0
- bluer_objects/file/__main__.py +52 -7
- bluer_objects/file/functions.py +13 -3
- bluer_objects/file/load.py +2 -9
- bluer_objects/file/save.py +17 -24
- bluer_objects/graphics/__main__.py +7 -0
- bluer_objects/graphics/gif.py +11 -7
- bluer_objects/graphics/screen.py +9 -8
- bluer_objects/help/assets.py +96 -0
- bluer_objects/help/create_test_asset.py +22 -0
- bluer_objects/help/download.py +17 -3
- bluer_objects/help/file.py +59 -0
- bluer_objects/help/functions.py +11 -1
- bluer_objects/help/gif.py +25 -0
- bluer_objects/help/host.py +6 -4
- bluer_objects/help/ls.py +26 -3
- bluer_objects/help/metadata.py +51 -0
- bluer_objects/help/mlflow/__init__.py +23 -2
- bluer_objects/help/mlflow/lock.py +52 -0
- bluer_objects/help/pdf.py +67 -0
- bluer_objects/help/upload.py +10 -3
- bluer_objects/help/web.py +38 -0
- bluer_objects/host/functions.py +4 -1
- bluer_objects/logger/confusion_matrix.py +76 -0
- bluer_objects/logger/image.py +110 -0
- bluer_objects/logger/stitch.py +107 -0
- bluer_objects/markdown.py +8 -6
- bluer_objects/metadata/__init__.py +1 -0
- bluer_objects/metadata/flatten.py +27 -0
- bluer_objects/mlflow/lock/__init__.py +1 -0
- bluer_objects/mlflow/lock/__main__.py +58 -0
- bluer_objects/mlflow/lock/functions.py +121 -0
- bluer_objects/mlflow/logging.py +47 -41
- bluer_objects/pdf/__init__.py +1 -0
- bluer_objects/pdf/__main__.py +78 -0
- bluer_objects/pdf/convert/__init__.py +0 -0
- bluer_objects/pdf/convert/batch.py +54 -0
- bluer_objects/pdf/convert/combination.py +32 -0
- bluer_objects/pdf/convert/convert.py +111 -0
- bluer_objects/pdf/convert/image.py +53 -0
- bluer_objects/pdf/convert/md.py +97 -0
- bluer_objects/pdf/convert/missing.py +96 -0
- bluer_objects/pdf/convert/pdf.py +37 -0
- bluer_objects/sample.env +6 -0
- bluer_objects/storage/WebDAV.py +11 -7
- bluer_objects/storage/WebDAVrequest.py +360 -0
- bluer_objects/storage/WebDAVzip.py +26 -29
- bluer_objects/storage/__init__.py +28 -1
- bluer_objects/storage/__main__.py +40 -6
- bluer_objects/storage/base.py +84 -5
- bluer_objects/storage/policies.py +7 -0
- bluer_objects/storage/s3.py +367 -0
- bluer_objects/testing/__main__.py +6 -0
- bluer_objects/tests/test_README_consts.py +71 -0
- bluer_objects/tests/test_README_items.py +128 -0
- bluer_objects/tests/test_alias.py +33 -0
- bluer_objects/tests/test_env.py +25 -2
- bluer_objects/tests/test_file_download.py +25 -0
- bluer_objects/tests/test_file_load_save.py +1 -2
- bluer_objects/tests/test_file_load_save_text.py +46 -0
- bluer_objects/tests/test_graphics_gif.py +2 -0
- bluer_objects/tests/test_log_image_grid.py +29 -0
- bluer_objects/tests/test_logger_confusion_matrix.py +18 -0
- bluer_objects/tests/test_logger_matrix.py +2 -2
- bluer_objects/tests/test_logger_stitch_images.py +47 -0
- bluer_objects/tests/test_metadata.py +12 -6
- bluer_objects/tests/test_metadata_flatten.py +109 -0
- bluer_objects/tests/test_mlflow.py +2 -2
- bluer_objects/tests/test_mlflow_lock.py +26 -0
- bluer_objects/tests/test_objects.py +2 -0
- bluer_objects/tests/test_shell.py +34 -0
- bluer_objects/tests/test_storage.py +8 -21
- bluer_objects/tests/test_storage_base.py +39 -0
- bluer_objects/tests/test_storage_s3.py +67 -0
- bluer_objects/tests/test_storage_webdav_request.py +75 -0
- bluer_objects/tests/test_storage_webdav_zip.py +42 -0
- bluer_objects/tests/test_web_is_accessible.py +11 -0
- bluer_objects/web/__init__.py +1 -0
- bluer_objects/web/__main__.py +31 -0
- bluer_objects/web/functions.py +9 -0
- {bluer_objects-6.104.1.dist-info → bluer_objects-6.377.1.dist-info}/METADATA +6 -3
- bluer_objects-6.377.1.dist-info/RECORD +217 -0
- {bluer_objects-6.104.1.dist-info → bluer_objects-6.377.1.dist-info}/WHEEL +1 -1
- bluer_objects/.abcli/storage/download_file.sh +0 -9
- bluer_objects/.abcli/storage/exists.sh +0 -8
- bluer_objects/.abcli/storage/list.sh +0 -8
- bluer_objects/.abcli/storage/rm.sh +0 -11
- bluer_objects-6.104.1.dist-info/RECORD +0 -143
- {bluer_objects-6.104.1.dist-info → bluer_objects-6.377.1.dist-info}/licenses/LICENSE +0 -0
- {bluer_objects-6.104.1.dist-info → bluer_objects-6.377.1.dist-info}/top_level.txt +0 -0
|
@@ -1,15 +1,22 @@
|
|
|
1
1
|
from typing import Tuple, List
|
|
2
2
|
|
|
3
|
+
from bluer_objects.storage.s3 import S3Interface
|
|
3
4
|
from bluer_objects.storage.base import StorageInterface
|
|
4
5
|
from bluer_objects.storage.WebDAV import WebDAVInterface
|
|
6
|
+
from bluer_objects.storage.WebDAVrequest import WebDAVRequestInterface
|
|
5
7
|
from bluer_objects.storage.WebDAVzip import WebDAVzipInterface
|
|
8
|
+
from bluer_objects.storage.policies import DownloadPolicy
|
|
6
9
|
from bluer_objects import env
|
|
7
10
|
from bluer_objects.logger import logger
|
|
8
11
|
|
|
9
12
|
interface = StorageInterface()
|
|
10
13
|
|
|
11
|
-
if env.BLUER_OBJECTS_STORAGE_INTERFACE ==
|
|
14
|
+
if env.BLUER_OBJECTS_STORAGE_INTERFACE == S3Interface.name:
|
|
15
|
+
interface = S3Interface()
|
|
16
|
+
elif env.BLUER_OBJECTS_STORAGE_INTERFACE == WebDAVInterface.name:
|
|
12
17
|
interface = WebDAVInterface()
|
|
18
|
+
elif env.BLUER_OBJECTS_STORAGE_INTERFACE == WebDAVRequestInterface.name:
|
|
19
|
+
interface = WebDAVRequestInterface()
|
|
13
20
|
elif env.BLUER_OBJECTS_STORAGE_INTERFACE == WebDAVzipInterface.name:
|
|
14
21
|
interface = WebDAVzipInterface()
|
|
15
22
|
else:
|
|
@@ -19,9 +26,13 @@ else:
|
|
|
19
26
|
|
|
20
27
|
def clear(
|
|
21
28
|
do_dryrun: bool = True,
|
|
29
|
+
log: bool = True,
|
|
30
|
+
public: bool = False,
|
|
22
31
|
) -> bool:
|
|
23
32
|
return interface.clear(
|
|
24
33
|
do_dryrun=do_dryrun,
|
|
34
|
+
log=log,
|
|
35
|
+
public=public,
|
|
25
36
|
)
|
|
26
37
|
|
|
27
38
|
|
|
@@ -29,11 +40,13 @@ def download(
|
|
|
29
40
|
object_name: str,
|
|
30
41
|
filename: str = "",
|
|
31
42
|
log: bool = True,
|
|
43
|
+
policy: DownloadPolicy = DownloadPolicy.NONE,
|
|
32
44
|
) -> bool:
|
|
33
45
|
return interface.download(
|
|
34
46
|
object_name=object_name,
|
|
35
47
|
filename=filename,
|
|
36
48
|
log=log,
|
|
49
|
+
policy=policy,
|
|
37
50
|
)
|
|
38
51
|
|
|
39
52
|
|
|
@@ -47,13 +60,27 @@ def ls(
|
|
|
47
60
|
)
|
|
48
61
|
|
|
49
62
|
|
|
63
|
+
def ls_objects(
|
|
64
|
+
prefix: str,
|
|
65
|
+
where: str = "local",
|
|
66
|
+
) -> Tuple[bool, List[str]]:
|
|
67
|
+
return interface.ls_objects(
|
|
68
|
+
prefix=prefix,
|
|
69
|
+
where=where,
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
|
|
50
73
|
def upload(
|
|
51
74
|
object_name: str,
|
|
52
75
|
filename: str = "",
|
|
76
|
+
public: bool = False,
|
|
77
|
+
zip: bool = False,
|
|
53
78
|
log: bool = True,
|
|
54
79
|
) -> bool:
|
|
55
80
|
return interface.upload(
|
|
56
81
|
object_name=object_name,
|
|
57
82
|
filename=filename,
|
|
83
|
+
public=public,
|
|
84
|
+
zip=zip,
|
|
58
85
|
log=log,
|
|
59
86
|
)
|
|
@@ -2,9 +2,11 @@ import argparse
|
|
|
2
2
|
|
|
3
3
|
from blueness import module
|
|
4
4
|
from blueness.argparse.generic import sys_exit
|
|
5
|
+
from bluer_options.logger.config import log_list
|
|
5
6
|
|
|
6
7
|
from bluer_objects import NAME
|
|
7
8
|
from bluer_objects import storage
|
|
9
|
+
from bluer_objects.storage.policies import DownloadPolicy
|
|
8
10
|
from bluer_objects.logger import logger
|
|
9
11
|
|
|
10
12
|
NAME = module.name(__file__, NAME)
|
|
@@ -13,7 +15,7 @@ parser = argparse.ArgumentParser(NAME)
|
|
|
13
15
|
parser.add_argument(
|
|
14
16
|
"task",
|
|
15
17
|
type=str,
|
|
16
|
-
help="clear | download | ls | upload",
|
|
18
|
+
help="clear | download | ls | ls_objects | upload",
|
|
17
19
|
)
|
|
18
20
|
parser.add_argument(
|
|
19
21
|
"--object_name",
|
|
@@ -24,6 +26,18 @@ parser.add_argument(
|
|
|
24
26
|
type=str,
|
|
25
27
|
default="",
|
|
26
28
|
)
|
|
29
|
+
parser.add_argument(
|
|
30
|
+
"--public",
|
|
31
|
+
type=int,
|
|
32
|
+
default=0,
|
|
33
|
+
help="0 | 1",
|
|
34
|
+
)
|
|
35
|
+
parser.add_argument(
|
|
36
|
+
"--zip",
|
|
37
|
+
type=int,
|
|
38
|
+
default=0,
|
|
39
|
+
help="0 | 1",
|
|
40
|
+
)
|
|
27
41
|
parser.add_argument(
|
|
28
42
|
"--where",
|
|
29
43
|
type=str,
|
|
@@ -47,6 +61,17 @@ parser.add_argument(
|
|
|
47
61
|
default=1,
|
|
48
62
|
help="0 | 1",
|
|
49
63
|
)
|
|
64
|
+
parser.add_argument(
|
|
65
|
+
"--policy",
|
|
66
|
+
type=str,
|
|
67
|
+
default="none",
|
|
68
|
+
help=" | ".join(sorted([policy.name.lower() for policy in DownloadPolicy])),
|
|
69
|
+
)
|
|
70
|
+
parser.add_argument(
|
|
71
|
+
"--prefix",
|
|
72
|
+
type=str,
|
|
73
|
+
default="",
|
|
74
|
+
)
|
|
50
75
|
args = parser.parse_args()
|
|
51
76
|
|
|
52
77
|
delim = " " if args.delim == "space" else args.delim
|
|
@@ -55,11 +80,13 @@ success = False
|
|
|
55
80
|
if args.task == "clear":
|
|
56
81
|
success = storage.clear(
|
|
57
82
|
do_dryrun=args.do_dryrun == 1,
|
|
83
|
+
public=args.public == 1,
|
|
58
84
|
)
|
|
59
85
|
elif args.task == "download":
|
|
60
86
|
success = storage.download(
|
|
61
87
|
object_name=args.object_name,
|
|
62
88
|
filename=args.filename,
|
|
89
|
+
policy=DownloadPolicy[args.policy.upper()],
|
|
63
90
|
)
|
|
64
91
|
elif args.task == "ls":
|
|
65
92
|
success, list_of_files = storage.ls(
|
|
@@ -68,18 +95,25 @@ elif args.task == "ls":
|
|
|
68
95
|
)
|
|
69
96
|
|
|
70
97
|
if args.log:
|
|
71
|
-
logger
|
|
72
|
-
"{:,} file(s).".format(len(list_of_files)),
|
|
73
|
-
)
|
|
74
|
-
for index, filename in enumerate(list_of_files):
|
|
75
|
-
logger.info(f"#{index+1: 4d} - {filename}")
|
|
98
|
+
log_list(logger, "", list_of_files, "file(s)", 999)
|
|
76
99
|
else:
|
|
77
100
|
print(delim.join(list_of_files))
|
|
101
|
+
elif args.task == "ls_objects":
|
|
102
|
+
success, list_of_objects = storage.ls_objects(
|
|
103
|
+
prefix=args.prefix,
|
|
104
|
+
where=args.where,
|
|
105
|
+
)
|
|
78
106
|
|
|
107
|
+
if args.log:
|
|
108
|
+
log_list(logger, "", list_of_objects, "objects(s)", 999)
|
|
109
|
+
else:
|
|
110
|
+
print(delim.join(list_of_objects))
|
|
79
111
|
elif args.task == "upload":
|
|
80
112
|
success = storage.upload(
|
|
81
113
|
object_name=args.object_name,
|
|
82
114
|
filename=args.filename,
|
|
115
|
+
zip=args.zip == 1,
|
|
116
|
+
public=args.public == 1,
|
|
83
117
|
)
|
|
84
118
|
else:
|
|
85
119
|
success = None
|
bluer_objects/storage/base.py
CHANGED
|
@@ -1,27 +1,50 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import glob
|
|
1
3
|
from typing import Tuple, List
|
|
2
4
|
|
|
5
|
+
from bluer_objects import objects
|
|
6
|
+
from bluer_objects import path
|
|
7
|
+
from bluer_objects.storage.policies import DownloadPolicy
|
|
3
8
|
from bluer_objects.logger import logger
|
|
9
|
+
from bluer_objects.env import ABCLI_OBJECT_ROOT
|
|
4
10
|
|
|
5
11
|
|
|
6
12
|
class StorageInterface:
|
|
7
13
|
def clear(
|
|
8
14
|
self,
|
|
9
15
|
do_dryrun: bool = True,
|
|
16
|
+
log: bool = True,
|
|
17
|
+
public: bool = False,
|
|
10
18
|
) -> bool:
|
|
11
|
-
|
|
19
|
+
logger.info(
|
|
20
|
+
"{}.clear({})".format(
|
|
21
|
+
self.__class__.__name__,
|
|
22
|
+
",".join(
|
|
23
|
+
(["dryrun"] if do_dryrun else []) + (["public"] if public else [])
|
|
24
|
+
),
|
|
25
|
+
)
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
return True
|
|
12
29
|
|
|
13
30
|
def download(
|
|
14
31
|
self,
|
|
15
32
|
object_name: str,
|
|
16
33
|
filename: str = "",
|
|
17
34
|
log: bool = True,
|
|
35
|
+
policy: DownloadPolicy = DownloadPolicy.NONE,
|
|
18
36
|
) -> bool:
|
|
19
37
|
if log:
|
|
20
38
|
logger.info(
|
|
21
|
-
"{}.download {}{}".format(
|
|
39
|
+
"{}.download {}{}{}".format(
|
|
22
40
|
self.__class__.__name__,
|
|
23
41
|
object_name,
|
|
24
42
|
f"/{filename}" if filename else "",
|
|
43
|
+
(
|
|
44
|
+
""
|
|
45
|
+
if policy == DownloadPolicy.NONE
|
|
46
|
+
else " - policy:{}".format(policy.name.lower())
|
|
47
|
+
),
|
|
25
48
|
)
|
|
26
49
|
)
|
|
27
50
|
|
|
@@ -32,20 +55,76 @@ class StorageInterface:
|
|
|
32
55
|
object_name: str,
|
|
33
56
|
where: str = "local",
|
|
34
57
|
) -> Tuple[bool, List[str]]:
|
|
35
|
-
|
|
58
|
+
if where == "local":
|
|
59
|
+
object_path = objects.object_path(
|
|
60
|
+
object_name=object_name,
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
return True, sorted(
|
|
64
|
+
[
|
|
65
|
+
os.path.relpath(filename, start=object_path)
|
|
66
|
+
for filename in glob.glob(
|
|
67
|
+
os.path.join(
|
|
68
|
+
object_path,
|
|
69
|
+
"**",
|
|
70
|
+
"*",
|
|
71
|
+
),
|
|
72
|
+
recursive=True,
|
|
73
|
+
)
|
|
74
|
+
if os.path.isfile(filename)
|
|
75
|
+
]
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
if where == "cloud":
|
|
79
|
+
logger.error("not implemented")
|
|
80
|
+
return False, []
|
|
81
|
+
|
|
82
|
+
logger.error(f"Unknown 'where': {where}")
|
|
83
|
+
return False, []
|
|
84
|
+
|
|
85
|
+
def ls_objects(
|
|
86
|
+
self,
|
|
87
|
+
prefix: str,
|
|
88
|
+
where: str = "local",
|
|
89
|
+
) -> Tuple[bool, List[str]]:
|
|
90
|
+
if where == "local":
|
|
91
|
+
return True, sorted(
|
|
92
|
+
[
|
|
93
|
+
os.path.relpath(dirname, start=ABCLI_OBJECT_ROOT)
|
|
94
|
+
for dirname in glob.glob(
|
|
95
|
+
os.path.join(
|
|
96
|
+
ABCLI_OBJECT_ROOT,
|
|
97
|
+
"*",
|
|
98
|
+
),
|
|
99
|
+
recursive=False,
|
|
100
|
+
)
|
|
101
|
+
if not os.path.isfile(dirname)
|
|
102
|
+
and path.name(dirname).startswith(prefix)
|
|
103
|
+
]
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
if where == "cloud":
|
|
107
|
+
logger.error("not implemented")
|
|
108
|
+
return False, []
|
|
109
|
+
|
|
110
|
+
logger.error(f"Unknown 'where': {where}")
|
|
111
|
+
return False, []
|
|
36
112
|
|
|
37
113
|
def upload(
|
|
38
114
|
self,
|
|
39
115
|
object_name: str,
|
|
40
116
|
filename: str = "",
|
|
117
|
+
public: bool = False,
|
|
118
|
+
zip: bool = False,
|
|
41
119
|
log: bool = True,
|
|
42
120
|
) -> bool:
|
|
43
121
|
if log:
|
|
44
122
|
logger.info(
|
|
45
|
-
"{}.upload {}{}".format(
|
|
123
|
+
"{}.upload {}{}{}".format(
|
|
46
124
|
self.__class__.__name__,
|
|
47
125
|
object_name,
|
|
48
|
-
f"/{filename}" if filename else "",
|
|
126
|
+
".tar.gz" if zip else f"/{filename}" if filename else "",
|
|
127
|
+
" [public]" if public else "",
|
|
49
128
|
)
|
|
50
129
|
)
|
|
51
130
|
|
|
@@ -0,0 +1,367 @@
|
|
|
1
|
+
import boto3
|
|
2
|
+
import os
|
|
3
|
+
from botocore.exceptions import ClientError
|
|
4
|
+
import glob
|
|
5
|
+
from typing import Tuple, List
|
|
6
|
+
from xml.etree import ElementTree as ET
|
|
7
|
+
from tqdm import tqdm
|
|
8
|
+
from functools import reduce
|
|
9
|
+
|
|
10
|
+
from bluer_objects.storage.base import StorageInterface
|
|
11
|
+
from bluer_objects.env import ABCLI_OBJECT_ROOT
|
|
12
|
+
from bluer_objects import env, file, path
|
|
13
|
+
from bluer_objects import objects
|
|
14
|
+
from bluer_objects.storage.policies import DownloadPolicy
|
|
15
|
+
from bluer_objects.logger import logger
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
# https://docs.arvancloud.ir/fa/developer-tools/sdk/object-storage/
|
|
19
|
+
class S3Interface(StorageInterface):
|
|
20
|
+
name = "s3"
|
|
21
|
+
|
|
22
|
+
def clear(
|
|
23
|
+
self,
|
|
24
|
+
do_dryrun: bool = True,
|
|
25
|
+
log: bool = True,
|
|
26
|
+
public: bool = False,
|
|
27
|
+
) -> bool:
|
|
28
|
+
if not super().clear(
|
|
29
|
+
do_dryrun=do_dryrun,
|
|
30
|
+
log=log,
|
|
31
|
+
public=public,
|
|
32
|
+
):
|
|
33
|
+
return False
|
|
34
|
+
|
|
35
|
+
bucket_name = env.S3_PUBLIC_STORAGE_BUCKET if public else env.S3_STORAGE_BUCKET
|
|
36
|
+
|
|
37
|
+
success, list_of_objects = self.list_of_objects(
|
|
38
|
+
prefix="test",
|
|
39
|
+
bucket_name=bucket_name,
|
|
40
|
+
)
|
|
41
|
+
if not success:
|
|
42
|
+
return success
|
|
43
|
+
logger.info(f"{len(list_of_objects)} object(s) to delete.")
|
|
44
|
+
|
|
45
|
+
for object_name in tqdm(list_of_objects):
|
|
46
|
+
if not self.delete(
|
|
47
|
+
object_name=object_name,
|
|
48
|
+
do_dryrun=do_dryrun,
|
|
49
|
+
bucket_name=bucket_name,
|
|
50
|
+
):
|
|
51
|
+
return False
|
|
52
|
+
|
|
53
|
+
return True
|
|
54
|
+
|
|
55
|
+
def delete(
|
|
56
|
+
self,
|
|
57
|
+
object_name: str,
|
|
58
|
+
do_dryrun: bool = True,
|
|
59
|
+
log: bool = True,
|
|
60
|
+
bucket_name: str = env.S3_STORAGE_BUCKET,
|
|
61
|
+
) -> bool:
|
|
62
|
+
if log:
|
|
63
|
+
logger.info(
|
|
64
|
+
"{}.delete({}){}".format(
|
|
65
|
+
self.__class__.__name__,
|
|
66
|
+
object_name,
|
|
67
|
+
" dryrun" if do_dryrun else "",
|
|
68
|
+
)
|
|
69
|
+
)
|
|
70
|
+
if do_dryrun:
|
|
71
|
+
return True
|
|
72
|
+
|
|
73
|
+
try:
|
|
74
|
+
s3 = boto3.resource(
|
|
75
|
+
"s3",
|
|
76
|
+
endpoint_url=env.S3_STORAGE_ENDPOINT_URL,
|
|
77
|
+
aws_access_key_id=env.S3_STORAGE_AWS_ACCESS_KEY_ID,
|
|
78
|
+
aws_secret_access_key=env.S3_STORAGE_AWS_SECRET_ACCESS_KEY,
|
|
79
|
+
)
|
|
80
|
+
bucket = s3.Bucket(bucket_name)
|
|
81
|
+
|
|
82
|
+
if object_name.endswith(".tar.gz"):
|
|
83
|
+
delete_requests = [{"Key": object_name}]
|
|
84
|
+
else:
|
|
85
|
+
objects_to_delete = bucket.objects.filter(Prefix=f"{object_name}/")
|
|
86
|
+
delete_requests = [{"Key": obj.key} for obj in objects_to_delete]
|
|
87
|
+
|
|
88
|
+
if not delete_requests:
|
|
89
|
+
logger.warning(f"no files found under {object_name}.")
|
|
90
|
+
return True
|
|
91
|
+
|
|
92
|
+
bucket.delete_objects(Delete={"Objects": delete_requests})
|
|
93
|
+
except Exception as e:
|
|
94
|
+
logger.error(e)
|
|
95
|
+
return False
|
|
96
|
+
|
|
97
|
+
return True
|
|
98
|
+
|
|
99
|
+
def download(
|
|
100
|
+
self,
|
|
101
|
+
object_name: str,
|
|
102
|
+
filename: str = "",
|
|
103
|
+
log: bool = True,
|
|
104
|
+
policy: DownloadPolicy = DownloadPolicy.NONE,
|
|
105
|
+
) -> bool:
|
|
106
|
+
if filename:
|
|
107
|
+
local_path = objects.path_of(
|
|
108
|
+
object_name=object_name,
|
|
109
|
+
filename=filename,
|
|
110
|
+
create=True,
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
if policy == DownloadPolicy.DOESNT_EXIST and file.exists(local_path):
|
|
114
|
+
if log:
|
|
115
|
+
logger.info(f"✅ {filename}")
|
|
116
|
+
return True
|
|
117
|
+
|
|
118
|
+
if not path.create(file.path(local_path)):
|
|
119
|
+
return False
|
|
120
|
+
|
|
121
|
+
try:
|
|
122
|
+
s3_resource = boto3.resource(
|
|
123
|
+
"s3",
|
|
124
|
+
endpoint_url=env.S3_STORAGE_ENDPOINT_URL,
|
|
125
|
+
aws_access_key_id=env.S3_STORAGE_AWS_ACCESS_KEY_ID,
|
|
126
|
+
aws_secret_access_key=env.S3_STORAGE_AWS_SECRET_ACCESS_KEY,
|
|
127
|
+
)
|
|
128
|
+
except Exception as e:
|
|
129
|
+
logger.error(e)
|
|
130
|
+
return False
|
|
131
|
+
|
|
132
|
+
try:
|
|
133
|
+
bucket = s3_resource.Bucket(env.S3_STORAGE_BUCKET)
|
|
134
|
+
|
|
135
|
+
bucket.download_file(
|
|
136
|
+
f"{object_name}/{filename}",
|
|
137
|
+
local_path,
|
|
138
|
+
)
|
|
139
|
+
except ClientError as e:
|
|
140
|
+
if int(e.response["Error"]["Code"]) == 404: # Not found
|
|
141
|
+
return True
|
|
142
|
+
logger.error(e)
|
|
143
|
+
return False
|
|
144
|
+
except Exception as e:
|
|
145
|
+
logger.error(e)
|
|
146
|
+
return False
|
|
147
|
+
|
|
148
|
+
return super().download(
|
|
149
|
+
object_name=object_name,
|
|
150
|
+
filename=filename,
|
|
151
|
+
log=log,
|
|
152
|
+
policy=policy,
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
success, list_of_files = self.ls(
|
|
156
|
+
object_name=object_name,
|
|
157
|
+
where="cloud",
|
|
158
|
+
)
|
|
159
|
+
if not success:
|
|
160
|
+
return False
|
|
161
|
+
|
|
162
|
+
for filename_ in tqdm(list_of_files):
|
|
163
|
+
if not self.download(
|
|
164
|
+
object_name=object_name,
|
|
165
|
+
filename=filename_,
|
|
166
|
+
log=log,
|
|
167
|
+
policy=policy,
|
|
168
|
+
):
|
|
169
|
+
return False
|
|
170
|
+
|
|
171
|
+
return True
|
|
172
|
+
|
|
173
|
+
def list_of_objects(
|
|
174
|
+
self,
|
|
175
|
+
prefix: str = "",
|
|
176
|
+
bucket_name: str = env.S3_STORAGE_BUCKET,
|
|
177
|
+
) -> Tuple[bool, List[str]]:
|
|
178
|
+
try:
|
|
179
|
+
s3 = boto3.client(
|
|
180
|
+
"s3",
|
|
181
|
+
endpoint_url=env.S3_STORAGE_ENDPOINT_URL,
|
|
182
|
+
aws_access_key_id=env.S3_STORAGE_AWS_ACCESS_KEY_ID,
|
|
183
|
+
aws_secret_access_key=env.S3_STORAGE_AWS_SECRET_ACCESS_KEY,
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
paginator = s3.get_paginator("list_objects_v2")
|
|
187
|
+
pages = paginator.paginate(
|
|
188
|
+
Bucket=bucket_name,
|
|
189
|
+
Prefix=prefix,
|
|
190
|
+
)
|
|
191
|
+
except Exception as e:
|
|
192
|
+
logger.error(e)
|
|
193
|
+
return False, []
|
|
194
|
+
|
|
195
|
+
list_of_objects = sorted(
|
|
196
|
+
list(
|
|
197
|
+
set(
|
|
198
|
+
reduce(
|
|
199
|
+
lambda x, y: x + y,
|
|
200
|
+
[
|
|
201
|
+
[
|
|
202
|
+
obj["Key"].split("/", 1)[0]
|
|
203
|
+
for obj in page.get("Contents", [])
|
|
204
|
+
]
|
|
205
|
+
for page in pages
|
|
206
|
+
],
|
|
207
|
+
[],
|
|
208
|
+
)
|
|
209
|
+
)
|
|
210
|
+
)
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
return True, list_of_objects
|
|
214
|
+
|
|
215
|
+
def ls(
|
|
216
|
+
self,
|
|
217
|
+
object_name: str,
|
|
218
|
+
where: str = "local",
|
|
219
|
+
) -> Tuple[bool, List[str]]:
|
|
220
|
+
if where == "cloud":
|
|
221
|
+
try:
|
|
222
|
+
s3 = boto3.client(
|
|
223
|
+
"s3",
|
|
224
|
+
endpoint_url=env.S3_STORAGE_ENDPOINT_URL,
|
|
225
|
+
aws_access_key_id=env.S3_STORAGE_AWS_ACCESS_KEY_ID,
|
|
226
|
+
aws_secret_access_key=env.S3_STORAGE_AWS_SECRET_ACCESS_KEY,
|
|
227
|
+
)
|
|
228
|
+
|
|
229
|
+
prefix = f"{object_name}/"
|
|
230
|
+
|
|
231
|
+
paginator = s3.get_paginator("list_objects_v2")
|
|
232
|
+
pages = paginator.paginate(
|
|
233
|
+
Bucket=env.S3_STORAGE_BUCKET,
|
|
234
|
+
Prefix=prefix,
|
|
235
|
+
)
|
|
236
|
+
except Exception as e:
|
|
237
|
+
logger.error(e)
|
|
238
|
+
return False, []
|
|
239
|
+
|
|
240
|
+
try:
|
|
241
|
+
list_of_files = sorted(
|
|
242
|
+
reduce(
|
|
243
|
+
lambda x, y: x + y,
|
|
244
|
+
[
|
|
245
|
+
[
|
|
246
|
+
obj["Key"].split(prefix, 1)[1]
|
|
247
|
+
for obj in page.get("Contents", [])
|
|
248
|
+
]
|
|
249
|
+
for page in pages
|
|
250
|
+
],
|
|
251
|
+
[],
|
|
252
|
+
)
|
|
253
|
+
)
|
|
254
|
+
except Exception as e:
|
|
255
|
+
logger.error(e)
|
|
256
|
+
return False, []
|
|
257
|
+
|
|
258
|
+
return True, list_of_files
|
|
259
|
+
|
|
260
|
+
return super().ls(
|
|
261
|
+
object_name=object_name,
|
|
262
|
+
where=where,
|
|
263
|
+
)
|
|
264
|
+
|
|
265
|
+
def ls_objects(
|
|
266
|
+
self,
|
|
267
|
+
prefix: str,
|
|
268
|
+
where: str = "local",
|
|
269
|
+
) -> Tuple[bool, List[str]]:
|
|
270
|
+
if where == "cloud":
|
|
271
|
+
return self.list_of_objects(prefix)
|
|
272
|
+
|
|
273
|
+
return super().ls_objects(
|
|
274
|
+
prefix=prefix,
|
|
275
|
+
where=where,
|
|
276
|
+
)
|
|
277
|
+
|
|
278
|
+
def upload(
|
|
279
|
+
self,
|
|
280
|
+
object_name: str,
|
|
281
|
+
filename: str = "",
|
|
282
|
+
public: bool = False,
|
|
283
|
+
zip: bool = False,
|
|
284
|
+
log: bool = True,
|
|
285
|
+
) -> bool:
|
|
286
|
+
if filename or zip:
|
|
287
|
+
local_path = (
|
|
288
|
+
os.path.join(
|
|
289
|
+
ABCLI_OBJECT_ROOT,
|
|
290
|
+
f"{object_name}.tar.gz",
|
|
291
|
+
)
|
|
292
|
+
if zip
|
|
293
|
+
else objects.path_of(
|
|
294
|
+
object_name=object_name,
|
|
295
|
+
filename=filename,
|
|
296
|
+
)
|
|
297
|
+
)
|
|
298
|
+
|
|
299
|
+
bucket_name = (
|
|
300
|
+
env.S3_PUBLIC_STORAGE_BUCKET if public else env.S3_STORAGE_BUCKET
|
|
301
|
+
)
|
|
302
|
+
|
|
303
|
+
try:
|
|
304
|
+
s3_resource = boto3.resource(
|
|
305
|
+
"s3",
|
|
306
|
+
endpoint_url=env.S3_STORAGE_ENDPOINT_URL,
|
|
307
|
+
aws_access_key_id=env.S3_STORAGE_AWS_ACCESS_KEY_ID,
|
|
308
|
+
aws_secret_access_key=env.S3_STORAGE_AWS_SECRET_ACCESS_KEY,
|
|
309
|
+
)
|
|
310
|
+
|
|
311
|
+
bucket = s3_resource.Bucket(bucket_name)
|
|
312
|
+
|
|
313
|
+
with open(local_path, "rb") as fp:
|
|
314
|
+
bucket.put_object(
|
|
315
|
+
ACL="public-read" if public else "private",
|
|
316
|
+
Body=fp,
|
|
317
|
+
Key=(
|
|
318
|
+
f"{object_name}.tar.gz"
|
|
319
|
+
if zip
|
|
320
|
+
else f"{object_name}/{filename}"
|
|
321
|
+
),
|
|
322
|
+
)
|
|
323
|
+
except ClientError as e:
|
|
324
|
+
logger.error(e)
|
|
325
|
+
return False
|
|
326
|
+
|
|
327
|
+
if public:
|
|
328
|
+
logger.info(
|
|
329
|
+
"🔗 https://{}.{}/{}".format(
|
|
330
|
+
bucket_name,
|
|
331
|
+
env.S3_STORAGE_ENDPOINT_URL.split("https://", 1)[1],
|
|
332
|
+
f"{object_name}.tar.gz" if zip else f"{object_name}/{filename}",
|
|
333
|
+
)
|
|
334
|
+
)
|
|
335
|
+
|
|
336
|
+
return super().upload(
|
|
337
|
+
object_name=object_name,
|
|
338
|
+
filename=filename,
|
|
339
|
+
public=public,
|
|
340
|
+
zip=zip,
|
|
341
|
+
log=log,
|
|
342
|
+
)
|
|
343
|
+
|
|
344
|
+
object_path = "{}/".format(objects.object_path(object_name=object_name))
|
|
345
|
+
for filename_ in tqdm(
|
|
346
|
+
sorted(
|
|
347
|
+
glob.glob(
|
|
348
|
+
objects.path_of(
|
|
349
|
+
object_name=object_name,
|
|
350
|
+
filename="**",
|
|
351
|
+
),
|
|
352
|
+
recursive=True,
|
|
353
|
+
)
|
|
354
|
+
)
|
|
355
|
+
):
|
|
356
|
+
if not file.exists(filename_):
|
|
357
|
+
continue
|
|
358
|
+
|
|
359
|
+
if not self.upload(
|
|
360
|
+
object_name=object_name,
|
|
361
|
+
filename=filename_.split(object_path, 1)[1],
|
|
362
|
+
public=public,
|
|
363
|
+
log=log,
|
|
364
|
+
):
|
|
365
|
+
return False
|
|
366
|
+
|
|
367
|
+
return True
|
|
@@ -19,12 +19,18 @@ parser.add_argument(
|
|
|
19
19
|
"--object_name",
|
|
20
20
|
type=str,
|
|
21
21
|
)
|
|
22
|
+
parser.add_argument(
|
|
23
|
+
"--depth",
|
|
24
|
+
type=int,
|
|
25
|
+
default=10,
|
|
26
|
+
)
|
|
22
27
|
args = parser.parse_args()
|
|
23
28
|
|
|
24
29
|
success = False
|
|
25
30
|
if args.task == "create_test_asset":
|
|
26
31
|
success = create_test_asset(
|
|
27
32
|
object_name=args.object_name,
|
|
33
|
+
depth=args.depth,
|
|
28
34
|
)
|
|
29
35
|
else:
|
|
30
36
|
success = None
|