bluer-objects 6.5.1__py3-none-any.whl → 6.6.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of bluer-objects might be problematic. Click here for more details.
- bluer_objects/__init__.py +1 -1
- {bluer_objects-6.5.1.dist-info → bluer_objects-6.6.1.dist-info}/METADATA +2 -2
- {bluer_objects-6.5.1.dist-info → bluer_objects-6.6.1.dist-info}/RECORD +6 -24
- bluer_objects/mysql/cache/__init__.py +0 -8
- bluer_objects/mysql/cache/__main__.py +0 -91
- bluer_objects/mysql/cache/functions.py +0 -181
- bluer_objects/mysql/relations/__init__.py +0 -9
- bluer_objects/mysql/relations/__main__.py +0 -138
- bluer_objects/mysql/relations/functions.py +0 -180
- bluer_objects/mysql/table.py +0 -144
- bluer_objects/mysql/tags/__init__.py +0 -1
- bluer_objects/mysql/tags/__main__.py +0 -130
- bluer_objects/mysql/tags/functions.py +0 -203
- bluer_objects/storage/__init__.py +0 -3
- bluer_objects/storage/__main__.py +0 -114
- bluer_objects/storage/classes.py +0 -237
- bluer_objects/tests/test_mysql_cache.py +0 -14
- bluer_objects/tests/test_mysql_relations.py +0 -16
- bluer_objects/tests/test_mysql_table.py +0 -9
- bluer_objects/tests/test_mysql_tags.py +0 -13
- bluer_objects/tests/test_storage.py +0 -7
- {bluer_objects-6.5.1.dist-info → bluer_objects-6.6.1.dist-info}/WHEEL +0 -0
- {bluer_objects-6.5.1.dist-info → bluer_objects-6.6.1.dist-info}/licenses/LICENSE +0 -0
- {bluer_objects-6.5.1.dist-info → bluer_objects-6.6.1.dist-info}/top_level.txt +0 -0
bluer_objects/mysql/table.py
DELETED
|
@@ -1,144 +0,0 @@
|
|
|
1
|
-
from typing import List, Union, Tuple, Any
|
|
2
|
-
import pymysql
|
|
3
|
-
|
|
4
|
-
from blueness import module
|
|
5
|
-
|
|
6
|
-
from bluer_objects import NAME
|
|
7
|
-
from bluer_objects.env import (
|
|
8
|
-
ABCLI_AWS_RDS_DB,
|
|
9
|
-
ABCLI_AWS_RDS_PORT,
|
|
10
|
-
ABCLI_AWS_RDS_USER,
|
|
11
|
-
ABCLI_AWS_RDS_HOST,
|
|
12
|
-
ABCLI_AWS_RDS_PASSWORD,
|
|
13
|
-
)
|
|
14
|
-
from blue_options.logger import crash_report
|
|
15
|
-
|
|
16
|
-
NAME = module.name(__file__, NAME)
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
class Table:
|
|
20
|
-
def __init__(self, name):
|
|
21
|
-
self.name = name
|
|
22
|
-
|
|
23
|
-
self.db = ABCLI_AWS_RDS_DB
|
|
24
|
-
self.port = int(ABCLI_AWS_RDS_PORT)
|
|
25
|
-
self.user = ABCLI_AWS_RDS_USER
|
|
26
|
-
|
|
27
|
-
self.host = ABCLI_AWS_RDS_HOST
|
|
28
|
-
self.password = ABCLI_AWS_RDS_PASSWORD
|
|
29
|
-
|
|
30
|
-
self.connection = None
|
|
31
|
-
|
|
32
|
-
def connect(
|
|
33
|
-
self,
|
|
34
|
-
create_command: str = "",
|
|
35
|
-
) -> bool:
|
|
36
|
-
if self.connection is not None:
|
|
37
|
-
self.disconnect()
|
|
38
|
-
|
|
39
|
-
try:
|
|
40
|
-
self.connection = pymysql.connect(
|
|
41
|
-
host=self.host,
|
|
42
|
-
user=self.user,
|
|
43
|
-
port=self.port,
|
|
44
|
-
password=self.password,
|
|
45
|
-
database=self.db,
|
|
46
|
-
)
|
|
47
|
-
except:
|
|
48
|
-
crash_report(f"-{NAME}: connect: failed on host: {self.host}.")
|
|
49
|
-
return False
|
|
50
|
-
|
|
51
|
-
return True if not create_command else self.create(create_command)
|
|
52
|
-
|
|
53
|
-
@staticmethod
|
|
54
|
-
def Create(
|
|
55
|
-
table_name: str,
|
|
56
|
-
create_command: List[str],
|
|
57
|
-
) -> bool:
|
|
58
|
-
table = Table(name=table_name)
|
|
59
|
-
|
|
60
|
-
return table.disconnect() if table.connect(create_command) else False
|
|
61
|
-
|
|
62
|
-
def create(
|
|
63
|
-
self,
|
|
64
|
-
create_command: List[str],
|
|
65
|
-
) -> bool:
|
|
66
|
-
return self.execute(
|
|
67
|
-
"CREATE TABLE IF NOT EXISTS {} ({})".format(
|
|
68
|
-
self.name,
|
|
69
|
-
",".join(
|
|
70
|
-
[
|
|
71
|
-
"id INT(24) NOT NULL AUTO_INCREMENT",
|
|
72
|
-
"timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP",
|
|
73
|
-
]
|
|
74
|
-
+ create_command
|
|
75
|
-
+ ["PRIMARY KEY (`id`)", "INDEX `index_timestamp` (`timestamp`)"]
|
|
76
|
-
),
|
|
77
|
-
),
|
|
78
|
-
commit=True,
|
|
79
|
-
)
|
|
80
|
-
|
|
81
|
-
def disconnect(self) -> bool:
|
|
82
|
-
if self.connection is None:
|
|
83
|
-
return True
|
|
84
|
-
|
|
85
|
-
success = True
|
|
86
|
-
try:
|
|
87
|
-
self.connection.close()
|
|
88
|
-
except:
|
|
89
|
-
crash_report(f"-{NAME}: disconnect: failed.")
|
|
90
|
-
success = False
|
|
91
|
-
|
|
92
|
-
self.connection = None
|
|
93
|
-
return success
|
|
94
|
-
|
|
95
|
-
def drop(self) -> bool:
|
|
96
|
-
return self.execute(f"DROP table {self.name};")
|
|
97
|
-
|
|
98
|
-
def execute(
|
|
99
|
-
self,
|
|
100
|
-
sql: str,
|
|
101
|
-
commit: bool = False,
|
|
102
|
-
returns_output: bool = True,
|
|
103
|
-
) -> Union[bool, Tuple[bool, Any]]:
|
|
104
|
-
output = []
|
|
105
|
-
success = False
|
|
106
|
-
try:
|
|
107
|
-
with self.connection.cursor() as cursor:
|
|
108
|
-
if isinstance(sql, tuple):
|
|
109
|
-
cursor.execute(sql[0], sql[1])
|
|
110
|
-
else:
|
|
111
|
-
cursor.execute(sql)
|
|
112
|
-
|
|
113
|
-
if returns_output:
|
|
114
|
-
output = cursor.fetchall()
|
|
115
|
-
|
|
116
|
-
if commit:
|
|
117
|
-
# connection is not autocommit by default. So you must commit to save
|
|
118
|
-
# your changes.
|
|
119
|
-
self.connection.commit()
|
|
120
|
-
|
|
121
|
-
success = True
|
|
122
|
-
except:
|
|
123
|
-
crash_report(f"-{NAME}: execute({sql}): failed.")
|
|
124
|
-
|
|
125
|
-
return (success, output) if returns_output else success
|
|
126
|
-
|
|
127
|
-
def insert(
|
|
128
|
-
self,
|
|
129
|
-
columns: List[str],
|
|
130
|
-
values: List[Any],
|
|
131
|
-
) -> bool:
|
|
132
|
-
return self.execute(
|
|
133
|
-
(
|
|
134
|
-
f"INSERT INTO {self.name}"
|
|
135
|
-
+ " ("
|
|
136
|
-
+ ", ".join(columns)
|
|
137
|
-
+ ") VALUES ("
|
|
138
|
-
+ ", ".join(len(columns) * ["%s"])
|
|
139
|
-
+ ")",
|
|
140
|
-
values,
|
|
141
|
-
),
|
|
142
|
-
commit=True,
|
|
143
|
-
returns_output=True,
|
|
144
|
-
)
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
from bluer_objects.mysql.tags.functions import clone, create, get, search, set_
|
|
@@ -1,130 +0,0 @@
|
|
|
1
|
-
import argparse
|
|
2
|
-
|
|
3
|
-
from blueness import module
|
|
4
|
-
from blueness.argparse.generic import sys_exit
|
|
5
|
-
|
|
6
|
-
from bluer_objects import NAME
|
|
7
|
-
from bluer_objects.mysql.tags.functions import clone, create, get, search, set_
|
|
8
|
-
from bluer_objects.logger import logger
|
|
9
|
-
|
|
10
|
-
NAME = module.name(__file__, NAME)
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
parser = argparse.ArgumentParser(NAME)
|
|
14
|
-
parser.add_argument(
|
|
15
|
-
"task",
|
|
16
|
-
type=str,
|
|
17
|
-
default="get",
|
|
18
|
-
help="clone|create|get|search|set",
|
|
19
|
-
)
|
|
20
|
-
parser.add_argument(
|
|
21
|
-
"--after",
|
|
22
|
-
type=str,
|
|
23
|
-
default="",
|
|
24
|
-
help="123-4-e",
|
|
25
|
-
)
|
|
26
|
-
parser.add_argument(
|
|
27
|
-
"--before",
|
|
28
|
-
type=str,
|
|
29
|
-
default="",
|
|
30
|
-
)
|
|
31
|
-
parser.add_argument(
|
|
32
|
-
"--count",
|
|
33
|
-
type=int,
|
|
34
|
-
default=-1,
|
|
35
|
-
)
|
|
36
|
-
parser.add_argument(
|
|
37
|
-
"--offset",
|
|
38
|
-
type=int,
|
|
39
|
-
default=0,
|
|
40
|
-
)
|
|
41
|
-
parser.add_argument(
|
|
42
|
-
"--delim",
|
|
43
|
-
type=str,
|
|
44
|
-
default=", ",
|
|
45
|
-
)
|
|
46
|
-
parser.add_argument(
|
|
47
|
-
"--host",
|
|
48
|
-
default=-1,
|
|
49
|
-
type=int,
|
|
50
|
-
help="0|1|-1",
|
|
51
|
-
)
|
|
52
|
-
parser.add_argument(
|
|
53
|
-
"--item_name",
|
|
54
|
-
default="object",
|
|
55
|
-
type=str,
|
|
56
|
-
)
|
|
57
|
-
parser.add_argument(
|
|
58
|
-
"--log",
|
|
59
|
-
default=1,
|
|
60
|
-
type=int,
|
|
61
|
-
help="0|1",
|
|
62
|
-
)
|
|
63
|
-
parser.add_argument(
|
|
64
|
-
"--object",
|
|
65
|
-
type=str,
|
|
66
|
-
default="",
|
|
67
|
-
)
|
|
68
|
-
parser.add_argument(
|
|
69
|
-
"--object_2",
|
|
70
|
-
type=str,
|
|
71
|
-
default="",
|
|
72
|
-
)
|
|
73
|
-
parser.add_argument(
|
|
74
|
-
"--shuffle",
|
|
75
|
-
default=0,
|
|
76
|
-
type=int,
|
|
77
|
-
help="0|1",
|
|
78
|
-
)
|
|
79
|
-
parser.add_argument(
|
|
80
|
-
"--tag",
|
|
81
|
-
type=str,
|
|
82
|
-
default="",
|
|
83
|
-
)
|
|
84
|
-
parser.add_argument(
|
|
85
|
-
"--tags",
|
|
86
|
-
type=str,
|
|
87
|
-
default="",
|
|
88
|
-
help="tag_1,~tag_2",
|
|
89
|
-
)
|
|
90
|
-
parser.add_argument(
|
|
91
|
-
"--type",
|
|
92
|
-
type=str,
|
|
93
|
-
default="",
|
|
94
|
-
)
|
|
95
|
-
args = parser.parse_args()
|
|
96
|
-
|
|
97
|
-
delim = " " if args.delim == "space" else args.delim
|
|
98
|
-
|
|
99
|
-
success = False
|
|
100
|
-
output = None
|
|
101
|
-
if args.task == "clone":
|
|
102
|
-
success = clone(args.object, args.object_2)
|
|
103
|
-
elif args.task == "create":
|
|
104
|
-
success = create()
|
|
105
|
-
elif args.task == "get":
|
|
106
|
-
output = get(args.object)
|
|
107
|
-
success = True
|
|
108
|
-
elif args.task == "search":
|
|
109
|
-
output = search(
|
|
110
|
-
args.tags,
|
|
111
|
-
after=args.after,
|
|
112
|
-
before=args.before,
|
|
113
|
-
count=args.count,
|
|
114
|
-
host=args.host,
|
|
115
|
-
shuffle=args.shuffle,
|
|
116
|
-
offset=args.offset,
|
|
117
|
-
)
|
|
118
|
-
success = True
|
|
119
|
-
elif args.task == "set":
|
|
120
|
-
success = set_(args.object, args.tags)
|
|
121
|
-
else:
|
|
122
|
-
success = None
|
|
123
|
-
|
|
124
|
-
if success is True and output is not None:
|
|
125
|
-
if args.log:
|
|
126
|
-
logger.info(f"{len(output):,} {args.item_name}(s): {delim.join(output)}")
|
|
127
|
-
else:
|
|
128
|
-
print(delim.join(output))
|
|
129
|
-
|
|
130
|
-
sys_exit(logger, NAME, args.task, success, log=args.log)
|
|
@@ -1,203 +0,0 @@
|
|
|
1
|
-
from typing import List
|
|
2
|
-
from functools import reduce
|
|
3
|
-
import random
|
|
4
|
-
import re
|
|
5
|
-
|
|
6
|
-
from blue_options.options import Options
|
|
7
|
-
|
|
8
|
-
from bluer_objects.mysql.table import Table
|
|
9
|
-
from bluer_objects.logger import logger
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
def clone(
|
|
13
|
-
object_1: str,
|
|
14
|
-
object_2: str,
|
|
15
|
-
) -> bool:
|
|
16
|
-
return set_(object_2, get(object_1))
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
def create() -> bool:
|
|
20
|
-
return Table.Create(
|
|
21
|
-
"tags",
|
|
22
|
-
[
|
|
23
|
-
"keyword VARCHAR(256) NOT NULL",
|
|
24
|
-
"tag VARCHAR(4096) NOT NULL",
|
|
25
|
-
"value BIT NOT NULL",
|
|
26
|
-
],
|
|
27
|
-
)
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
def get(keyword: str) -> List[str]:
|
|
31
|
-
table = Table(name="tags")
|
|
32
|
-
|
|
33
|
-
if not table.connect():
|
|
34
|
-
return []
|
|
35
|
-
|
|
36
|
-
success, output = table.execute(
|
|
37
|
-
"SELECT t.tag,t.value "
|
|
38
|
-
f"FROM {table.name} t "
|
|
39
|
-
"INNER JOIN ( "
|
|
40
|
-
"SELECT tag, MAX(timestamp) AS max_timestamp "
|
|
41
|
-
f"FROM {table.name} "
|
|
42
|
-
f'WHERE keyword="{keyword}" GROUP BY tag '
|
|
43
|
-
") tm "
|
|
44
|
-
"ON t.tag=tm.tag AND t.timestamp=tm.max_timestamp "
|
|
45
|
-
f'WHERE keyword="{keyword}";',
|
|
46
|
-
)
|
|
47
|
-
|
|
48
|
-
if success:
|
|
49
|
-
success = table.disconnect()
|
|
50
|
-
|
|
51
|
-
if not success:
|
|
52
|
-
return []
|
|
53
|
-
|
|
54
|
-
return sorted([thing[0] for thing in output if thing[1] == b"\x01"])
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
def search(
|
|
58
|
-
tags: List[str],
|
|
59
|
-
after: str = "",
|
|
60
|
-
before: str = "",
|
|
61
|
-
count: int = -1,
|
|
62
|
-
host: int = -1, # limit to/exclude/ignore (1/0/-1) hosts.
|
|
63
|
-
return_timestamp: bool = False,
|
|
64
|
-
shuffle: bool = False,
|
|
65
|
-
offset: int = 0,
|
|
66
|
-
) -> List[str]:
|
|
67
|
-
if isinstance(tags, str):
|
|
68
|
-
tags = tags.split(",")
|
|
69
|
-
|
|
70
|
-
included_tags = []
|
|
71
|
-
excluded_tags = []
|
|
72
|
-
for tag in tags:
|
|
73
|
-
if tag:
|
|
74
|
-
if tag[0] in "~-!":
|
|
75
|
-
excluded_tags += [tag[1:]]
|
|
76
|
-
else:
|
|
77
|
-
included_tags += [tag]
|
|
78
|
-
|
|
79
|
-
table = Table(name="tags")
|
|
80
|
-
|
|
81
|
-
table.connect()
|
|
82
|
-
|
|
83
|
-
list_of_keywords = None
|
|
84
|
-
timestamp = {}
|
|
85
|
-
for tag in included_tags:
|
|
86
|
-
success, output = table.execute(
|
|
87
|
-
"SELECT t.keyword,t.value,t.timestamp "
|
|
88
|
-
"FROM abcli.tags t "
|
|
89
|
-
"INNER JOIN ( "
|
|
90
|
-
"SELECT keyword, MAX(timestamp) AS max_timestamp "
|
|
91
|
-
"FROM abcli.tags "
|
|
92
|
-
f'WHERE tag="{tag}" GROUP BY keyword '
|
|
93
|
-
") tm "
|
|
94
|
-
"ON t.keyword=tm.keyword AND t.timestamp=tm.max_timestamp "
|
|
95
|
-
f'WHERE tag="{tag}"; '
|
|
96
|
-
)
|
|
97
|
-
if not success:
|
|
98
|
-
list_of_keywords = []
|
|
99
|
-
break
|
|
100
|
-
|
|
101
|
-
list_of_keywords_ = [thing[0] for thing in output if thing[1] == b"\x01"]
|
|
102
|
-
|
|
103
|
-
if return_timestamp:
|
|
104
|
-
for thing in output:
|
|
105
|
-
if thing[1] == b"\x01":
|
|
106
|
-
timestamp[thing[0]] = thing[2]
|
|
107
|
-
|
|
108
|
-
list_of_keywords = (
|
|
109
|
-
list_of_keywords_
|
|
110
|
-
if list_of_keywords is None
|
|
111
|
-
else [
|
|
112
|
-
keyword for keyword in list_of_keywords if keyword in list_of_keywords_
|
|
113
|
-
]
|
|
114
|
-
)
|
|
115
|
-
|
|
116
|
-
table.disconnect()
|
|
117
|
-
|
|
118
|
-
list_of_keywords = [] if list_of_keywords is None else sorted(list_of_keywords)
|
|
119
|
-
|
|
120
|
-
if after:
|
|
121
|
-
list_of_keywords = [keyword for keyword in list_of_keywords if keyword >= after]
|
|
122
|
-
|
|
123
|
-
if before:
|
|
124
|
-
list_of_keywords = [
|
|
125
|
-
keyword for keyword in list_of_keywords if keyword <= before
|
|
126
|
-
]
|
|
127
|
-
|
|
128
|
-
excluded_keywords = reduce(
|
|
129
|
-
lambda x, y: x + y,
|
|
130
|
-
[
|
|
131
|
-
search(
|
|
132
|
-
tag,
|
|
133
|
-
after=after,
|
|
134
|
-
before=before,
|
|
135
|
-
count=-1,
|
|
136
|
-
host=host,
|
|
137
|
-
)
|
|
138
|
-
for tag in excluded_tags
|
|
139
|
-
],
|
|
140
|
-
[],
|
|
141
|
-
)
|
|
142
|
-
|
|
143
|
-
list_of_keywords = [
|
|
144
|
-
keyword for keyword in list_of_keywords if keyword not in excluded_keywords
|
|
145
|
-
]
|
|
146
|
-
|
|
147
|
-
if shuffle:
|
|
148
|
-
random.shuffle(list_of_keywords)
|
|
149
|
-
else:
|
|
150
|
-
list_of_keywords = list_of_keywords[::-1]
|
|
151
|
-
|
|
152
|
-
p = re.compile("([0-9]{13}|(0|1)[0-9,a-z]{15}|i-[0-9,a-z]{17})")
|
|
153
|
-
if host == 1:
|
|
154
|
-
list_of_keywords = [keyword for keyword in list_of_keywords if p.match(keyword)]
|
|
155
|
-
if host == 0:
|
|
156
|
-
list_of_keywords = [
|
|
157
|
-
keyword for keyword in list_of_keywords if not p.match(keyword)
|
|
158
|
-
]
|
|
159
|
-
|
|
160
|
-
list_of_keywords = list_of_keywords[offset:]
|
|
161
|
-
|
|
162
|
-
list_of_keywords = (
|
|
163
|
-
list_of_keywords[:count]
|
|
164
|
-
if count > 0
|
|
165
|
-
else [] if count != -1 else list_of_keywords
|
|
166
|
-
)
|
|
167
|
-
|
|
168
|
-
return (list_of_keywords, timestamp) if return_timestamp else list_of_keywords
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
def set_(
|
|
172
|
-
keyword: str,
|
|
173
|
-
tags: List[str],
|
|
174
|
-
) -> bool:
|
|
175
|
-
table = Table(name="tags")
|
|
176
|
-
|
|
177
|
-
if isinstance(tags, list):
|
|
178
|
-
tags = ",".join(tags)
|
|
179
|
-
if isinstance(tags, str):
|
|
180
|
-
tags = Options(tags)
|
|
181
|
-
|
|
182
|
-
if not table.connect():
|
|
183
|
-
return False
|
|
184
|
-
|
|
185
|
-
tags = {tag.strip(): value for tag, value in tags.items()}
|
|
186
|
-
|
|
187
|
-
success = True
|
|
188
|
-
for tag in tags:
|
|
189
|
-
if not table.insert(
|
|
190
|
-
"keyword,tag,value".split(","),
|
|
191
|
-
[keyword, tag, 1 if tags[tag] else 0],
|
|
192
|
-
):
|
|
193
|
-
success = False
|
|
194
|
-
else:
|
|
195
|
-
if tags[tag]:
|
|
196
|
-
logger.info(f"{keyword} += #{tag}.")
|
|
197
|
-
else:
|
|
198
|
-
logger.info(f"{keyword} -= #{tag}.")
|
|
199
|
-
|
|
200
|
-
if not table.disconnect():
|
|
201
|
-
return False
|
|
202
|
-
|
|
203
|
-
return success
|
|
@@ -1,114 +0,0 @@
|
|
|
1
|
-
import argparse
|
|
2
|
-
|
|
3
|
-
from blueness import module
|
|
4
|
-
from blueness.argparse.generic import sys_exit
|
|
5
|
-
|
|
6
|
-
from bluer_objects import NAME
|
|
7
|
-
from bluer_objects.env import ABCLI_AWS_S3_BUCKET_NAME
|
|
8
|
-
from bluer_objects.storage.classes import Storage
|
|
9
|
-
from bluer_objects.logger import logger
|
|
10
|
-
|
|
11
|
-
NAME = module.name(__file__, NAME)
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
parser = argparse.ArgumentParser(NAME)
|
|
15
|
-
parser.add_argument(
|
|
16
|
-
"task",
|
|
17
|
-
type=str,
|
|
18
|
-
default="",
|
|
19
|
-
help="create_bucket|download_file|exists|list_of_objects|upload_file",
|
|
20
|
-
)
|
|
21
|
-
parser.add_argument(
|
|
22
|
-
"--filename",
|
|
23
|
-
type=str,
|
|
24
|
-
default="",
|
|
25
|
-
)
|
|
26
|
-
parser.add_argument(
|
|
27
|
-
"--bucket_name",
|
|
28
|
-
type=str,
|
|
29
|
-
default=ABCLI_AWS_S3_BUCKET_NAME,
|
|
30
|
-
)
|
|
31
|
-
parser.add_argument(
|
|
32
|
-
"--count",
|
|
33
|
-
type=int,
|
|
34
|
-
default=9999,
|
|
35
|
-
)
|
|
36
|
-
parser.add_argument(
|
|
37
|
-
"--delim",
|
|
38
|
-
type=str,
|
|
39
|
-
default=", ",
|
|
40
|
-
)
|
|
41
|
-
parser.add_argument(
|
|
42
|
-
"--item_name",
|
|
43
|
-
default="object",
|
|
44
|
-
type=str,
|
|
45
|
-
)
|
|
46
|
-
parser.add_argument(
|
|
47
|
-
"--log",
|
|
48
|
-
default=1,
|
|
49
|
-
type=int,
|
|
50
|
-
help="0|1",
|
|
51
|
-
)
|
|
52
|
-
parser.add_argument(
|
|
53
|
-
"--object_name",
|
|
54
|
-
type=str,
|
|
55
|
-
default="",
|
|
56
|
-
)
|
|
57
|
-
parser.add_argument(
|
|
58
|
-
"--prefix",
|
|
59
|
-
type=str,
|
|
60
|
-
default="",
|
|
61
|
-
)
|
|
62
|
-
parser.add_argument(
|
|
63
|
-
"--recursive",
|
|
64
|
-
type=int,
|
|
65
|
-
default=1,
|
|
66
|
-
help="0|1",
|
|
67
|
-
)
|
|
68
|
-
parser.add_argument(
|
|
69
|
-
"--suffix",
|
|
70
|
-
type=str,
|
|
71
|
-
default="",
|
|
72
|
-
)
|
|
73
|
-
|
|
74
|
-
args = parser.parse_args()
|
|
75
|
-
|
|
76
|
-
delim = " " if args.delim == "space" else args.delim
|
|
77
|
-
|
|
78
|
-
success = True
|
|
79
|
-
output = None
|
|
80
|
-
if args.task == "create_bucket":
|
|
81
|
-
success = Storage(args.bucket_name).s3 is not None
|
|
82
|
-
elif args.task == "download_file":
|
|
83
|
-
success = Storage(args.bucket_name).download_file(
|
|
84
|
-
bucket_name=args.bucket_name,
|
|
85
|
-
object_name=args.object_name,
|
|
86
|
-
filename=args.filename,
|
|
87
|
-
)
|
|
88
|
-
elif args.task == "exists":
|
|
89
|
-
print(Storage(args.bucket_name).exists(args.object_name))
|
|
90
|
-
success = True
|
|
91
|
-
elif args.task == "list_of_objects":
|
|
92
|
-
output = Storage(args.bucket_name).list_of_objects(
|
|
93
|
-
count=args.count,
|
|
94
|
-
suffix=args.suffix,
|
|
95
|
-
prefix=args.prefix,
|
|
96
|
-
recursive=args.recursive,
|
|
97
|
-
)
|
|
98
|
-
success = True
|
|
99
|
-
elif args.task == "upload_file":
|
|
100
|
-
success = Storage(args.bucket_name).upload_file(
|
|
101
|
-
filename=args.filename,
|
|
102
|
-
object_name=args.object_name,
|
|
103
|
-
bucket_name=args.bucket_name,
|
|
104
|
-
)
|
|
105
|
-
else:
|
|
106
|
-
print(f"{NAME}: {args.task}: command not found.")
|
|
107
|
-
|
|
108
|
-
if success and output is not None:
|
|
109
|
-
if args.log:
|
|
110
|
-
print(f"{len(output):,} {args.item_name}(s): {delim.join(output)}")
|
|
111
|
-
else:
|
|
112
|
-
print(delim.join(output))
|
|
113
|
-
|
|
114
|
-
sys_exit(logger, NAME, args.task, success, log=args.log)
|