bluer-objects 6.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of bluer-objects might be problematic. Click here for more details.
- bluer_objects/.abcli/abcli.sh +9 -0
- bluer_objects/.abcli/actions.sh +11 -0
- bluer_objects/.abcli/aka.sh +3 -0
- bluer_objects/.abcli/alias.sh +36 -0
- bluer_objects/.abcli/blue_objects.sh +11 -0
- bluer_objects/.abcli/cache.sh +5 -0
- bluer_objects/.abcli/clone.sh +94 -0
- bluer_objects/.abcli/download.sh +53 -0
- bluer_objects/.abcli/file.sh +8 -0
- bluer_objects/.abcli/gif.sh +27 -0
- bluer_objects/.abcli/host.sh +29 -0
- bluer_objects/.abcli/ls.sh +24 -0
- bluer_objects/.abcli/metadata/get.sh +24 -0
- bluer_objects/.abcli/metadata/post.sh +22 -0
- bluer_objects/.abcli/metadata.sh +16 -0
- bluer_objects/.abcli/mlflow/browse.sh +36 -0
- bluer_objects/.abcli/mlflow/cache.sh +31 -0
- bluer_objects/.abcli/mlflow/list_registered_models.sh +9 -0
- bluer_objects/.abcli/mlflow/log_artifacts.sh +10 -0
- bluer_objects/.abcli/mlflow/log_run.sh +10 -0
- bluer_objects/.abcli/mlflow/run.sh +11 -0
- bluer_objects/.abcli/mlflow/tags/clone.sh +15 -0
- bluer_objects/.abcli/mlflow/tags/get.sh +10 -0
- bluer_objects/.abcli/mlflow/tags/search.sh +12 -0
- bluer_objects/.abcli/mlflow/tags/set.sh +13 -0
- bluer_objects/.abcli/mlflow/tags.sh +16 -0
- bluer_objects/.abcli/mlflow/test.sh +11 -0
- bluer_objects/.abcli/mlflow/transition.sh +20 -0
- bluer_objects/.abcli/mlflow.sh +29 -0
- bluer_objects/.abcli/mysql/cache.sh +65 -0
- bluer_objects/.abcli/mysql/relations.sh +83 -0
- bluer_objects/.abcli/mysql/tags.sh +85 -0
- bluer_objects/.abcli/mysql.sh +16 -0
- bluer_objects/.abcli/object.sh +54 -0
- bluer_objects/.abcli/publish.sh +58 -0
- bluer_objects/.abcli/select.sh +34 -0
- bluer_objects/.abcli/storage/clear.sh +45 -0
- bluer_objects/.abcli/storage/download_file.sh +9 -0
- bluer_objects/.abcli/storage/exists.sh +8 -0
- bluer_objects/.abcli/storage/list.sh +8 -0
- bluer_objects/.abcli/storage/rm.sh +11 -0
- bluer_objects/.abcli/storage/status.sh +11 -0
- bluer_objects/.abcli/storage.sh +15 -0
- bluer_objects/.abcli/tags.sh +5 -0
- bluer_objects/.abcli/tests/README.sh +8 -0
- bluer_objects/.abcli/tests/clone.sh +32 -0
- bluer_objects/.abcli/tests/help.sh +85 -0
- bluer_objects/.abcli/tests/host.sh +7 -0
- bluer_objects/.abcli/tests/ls.sh +13 -0
- bluer_objects/.abcli/tests/metadata.sh +53 -0
- bluer_objects/.abcli/tests/mlflow_cache.sh +14 -0
- bluer_objects/.abcli/tests/mlflow_logging.sh +12 -0
- bluer_objects/.abcli/tests/mlflow_tags.sh +29 -0
- bluer_objects/.abcli/tests/mlflow_test.sh +7 -0
- bluer_objects/.abcli/tests/mysql_cache.sh +15 -0
- bluer_objects/.abcli/tests/mysql_relations.sh +20 -0
- bluer_objects/.abcli/tests/mysql_tags.sh +16 -0
- bluer_objects/.abcli/tests/test_gif.sh +13 -0
- bluer_objects/.abcli/tests/version.sh +10 -0
- bluer_objects/.abcli/upload.sh +73 -0
- bluer_objects/README/__init__.py +29 -0
- bluer_objects/README/functions.py +285 -0
- bluer_objects/README/items.py +30 -0
- bluer_objects/__init__.py +19 -0
- bluer_objects/__main__.py +16 -0
- bluer_objects/config.env +22 -0
- bluer_objects/env.py +72 -0
- bluer_objects/file/__init__.py +41 -0
- bluer_objects/file/__main__.py +51 -0
- bluer_objects/file/classes.py +38 -0
- bluer_objects/file/functions.py +290 -0
- bluer_objects/file/load.py +219 -0
- bluer_objects/file/save.py +280 -0
- bluer_objects/graphics/__init__.py +4 -0
- bluer_objects/graphics/__main__.py +84 -0
- bluer_objects/graphics/frame.py +15 -0
- bluer_objects/graphics/gif.py +86 -0
- bluer_objects/graphics/screen.py +63 -0
- bluer_objects/graphics/signature.py +97 -0
- bluer_objects/graphics/text.py +165 -0
- bluer_objects/help/__init__.py +0 -0
- bluer_objects/help/__main__.py +10 -0
- bluer_objects/help/functions.py +5 -0
- bluer_objects/host/__init__.py +1 -0
- bluer_objects/host/__main__.py +84 -0
- bluer_objects/host/functions.py +66 -0
- bluer_objects/logger/__init__.py +4 -0
- bluer_objects/logger/matrix.py +209 -0
- bluer_objects/markdown.py +43 -0
- bluer_objects/metadata/__init__.py +8 -0
- bluer_objects/metadata/__main__.py +110 -0
- bluer_objects/metadata/enums.py +29 -0
- bluer_objects/metadata/get.py +89 -0
- bluer_objects/metadata/post.py +101 -0
- bluer_objects/mlflow/__init__.py +28 -0
- bluer_objects/mlflow/__main__.py +271 -0
- bluer_objects/mlflow/cache.py +13 -0
- bluer_objects/mlflow/logging.py +81 -0
- bluer_objects/mlflow/models.py +57 -0
- bluer_objects/mlflow/objects.py +76 -0
- bluer_objects/mlflow/runs.py +100 -0
- bluer_objects/mlflow/tags.py +90 -0
- bluer_objects/mlflow/testing.py +39 -0
- bluer_objects/mysql/cache/__init__.py +8 -0
- bluer_objects/mysql/cache/__main__.py +91 -0
- bluer_objects/mysql/cache/functions.py +181 -0
- bluer_objects/mysql/relations/__init__.py +9 -0
- bluer_objects/mysql/relations/__main__.py +138 -0
- bluer_objects/mysql/relations/functions.py +180 -0
- bluer_objects/mysql/table.py +144 -0
- bluer_objects/mysql/tags/__init__.py +1 -0
- bluer_objects/mysql/tags/__main__.py +130 -0
- bluer_objects/mysql/tags/functions.py +203 -0
- bluer_objects/objects.py +167 -0
- bluer_objects/path.py +194 -0
- bluer_objects/sample.env +16 -0
- bluer_objects/storage/__init__.py +3 -0
- bluer_objects/storage/__main__.py +114 -0
- bluer_objects/storage/classes.py +237 -0
- bluer_objects/tests/__init__.py +0 -0
- bluer_objects/tests/test_README.py +5 -0
- bluer_objects/tests/test_env.py +27 -0
- bluer_objects/tests/test_file_load_save.py +105 -0
- bluer_objects/tests/test_fullname.py +5 -0
- bluer_objects/tests/test_graphics.py +28 -0
- bluer_objects/tests/test_graphics_frame.py +11 -0
- bluer_objects/tests/test_graphics_gif.py +29 -0
- bluer_objects/tests/test_graphics_screen.py +8 -0
- bluer_objects/tests/test_graphics_signature.py +80 -0
- bluer_objects/tests/test_graphics_text.py +14 -0
- bluer_objects/tests/test_logger.py +5 -0
- bluer_objects/tests/test_logger_matrix.py +73 -0
- bluer_objects/tests/test_markdown.py +10 -0
- bluer_objects/tests/test_metadata.py +204 -0
- bluer_objects/tests/test_mlflow.py +60 -0
- bluer_objects/tests/test_mysql_cache.py +14 -0
- bluer_objects/tests/test_mysql_relations.py +16 -0
- bluer_objects/tests/test_mysql_table.py +9 -0
- bluer_objects/tests/test_mysql_tags.py +13 -0
- bluer_objects/tests/test_objects.py +180 -0
- bluer_objects/tests/test_path.py +7 -0
- bluer_objects/tests/test_storage.py +7 -0
- bluer_objects/tests/test_version.py +5 -0
- bluer_objects/urls.py +3 -0
- bluer_objects-6.3.1.dist-info/METADATA +57 -0
- bluer_objects-6.3.1.dist-info/RECORD +149 -0
- bluer_objects-6.3.1.dist-info/WHEEL +5 -0
- bluer_objects-6.3.1.dist-info/licenses/LICENSE +121 -0
- bluer_objects-6.3.1.dist-info/top_level.txt +1 -0
bluer_objects/env.py
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
from typing import Union
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
from blue_options.env import load_config, load_env, get_env
|
|
5
|
+
|
|
6
|
+
load_env(__name__)
|
|
7
|
+
load_config(__name__)
|
|
8
|
+
|
|
9
|
+
HOME = get_env("HOME")
|
|
10
|
+
|
|
11
|
+
ABCLI_AWS_REGION = get_env("ABCLI_AWS_REGION")
|
|
12
|
+
|
|
13
|
+
ABCLI_AWS_S3_BUCKET_NAME = get_env(
|
|
14
|
+
"ABCLI_AWS_S3_BUCKET_NAME",
|
|
15
|
+
"kamangir",
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
ABCLI_AWS_S3_PREFIX = get_env(
|
|
19
|
+
"ABCLI_AWS_S3_PREFIX",
|
|
20
|
+
"bolt",
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
ABCLI_AWS_S3_PUBLIC_BUCKET_NAME = get_env("ABCLI_AWS_S3_PUBLIC_BUCKET_NAME")
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
abcli_object_path = get_env("abcli_object_path")
|
|
27
|
+
|
|
28
|
+
ABCLI_PATH_STORAGE = get_env(
|
|
29
|
+
"ABCLI_PATH_STORAGE",
|
|
30
|
+
os.path.join(HOME, "storage"),
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
abcli_object_name = get_env("abcli_object_name")
|
|
34
|
+
|
|
35
|
+
ABCLI_S3_OBJECT_PREFIX = get_env(
|
|
36
|
+
"ABCLI_S3_OBJECT_PREFIX",
|
|
37
|
+
f"s3://{ABCLI_AWS_S3_BUCKET_NAME}/{ABCLI_AWS_S3_PREFIX}",
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
ABCLI_OBJECT_ROOT = get_env(
|
|
42
|
+
"ABCLI_OBJECT_ROOT",
|
|
43
|
+
os.path.join(ABCLI_PATH_STORAGE, "abcli"),
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
abcli_path_git = get_env(
|
|
47
|
+
"abcli_path_git",
|
|
48
|
+
os.path.join(HOME, "git"),
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
ABCLI_PATH_STATIC = get_env("ABCLI_PATH_STATIC")
|
|
52
|
+
|
|
53
|
+
ABCLI_PUBLIC_PREFIX = get_env("ABCLI_PUBLIC_PREFIX")
|
|
54
|
+
|
|
55
|
+
VANWATCH_TEST_OBJECT = get_env("VANWATCH_TEST_OBJECT")
|
|
56
|
+
|
|
57
|
+
# https://www.randomtextgenerator.com/
|
|
58
|
+
DUMMY_TEXT = "This is some dummy text. This is some dummy text. This is some dummy text. This is some dummy text. This is some dummy text. This is some dummy text. This is some dummy text. This is some dummy text. This is some dummy text. This is some dummy text."
|
|
59
|
+
|
|
60
|
+
ABCLI_AWS_RDS_DB = get_env("ABCLI_AWS_RDS_DB")
|
|
61
|
+
ABCLI_AWS_RDS_PORT = get_env("ABCLI_AWS_RDS_PORT")
|
|
62
|
+
ABCLI_AWS_RDS_USER = get_env("ABCLI_AWS_RDS_USER")
|
|
63
|
+
|
|
64
|
+
ABCLI_AWS_RDS_HOST = get_env("ABCLI_AWS_RDS_HOST")
|
|
65
|
+
ABCLI_AWS_RDS_PASSWORD = get_env("ABCLI_AWS_RDS_PASSWORD")
|
|
66
|
+
|
|
67
|
+
DATABRICKS_WORKSPACE = get_env("DATABRICKS_WORKSPACE")
|
|
68
|
+
|
|
69
|
+
DATABRICKS_HOST = get_env("DATABRICKS_HOST")
|
|
70
|
+
DATABRICKS_TOKEN = get_env("DATABRICKS_TOKEN")
|
|
71
|
+
|
|
72
|
+
ABCLI_MLFLOW_EXPERIMENT_PREFIX = get_env("ABCLI_MLFLOW_EXPERIMENT_PREFIX")
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
from bluer_objects.file.classes import JsonEncoder, as_json
|
|
2
|
+
from bluer_objects.file.functions import (
|
|
3
|
+
absolute,
|
|
4
|
+
add_extension,
|
|
5
|
+
add_prefix,
|
|
6
|
+
add_suffix,
|
|
7
|
+
auxiliary,
|
|
8
|
+
copy,
|
|
9
|
+
delete,
|
|
10
|
+
download,
|
|
11
|
+
exists,
|
|
12
|
+
extension,
|
|
13
|
+
list_of,
|
|
14
|
+
move,
|
|
15
|
+
name_and_extension,
|
|
16
|
+
name,
|
|
17
|
+
path,
|
|
18
|
+
relative,
|
|
19
|
+
size,
|
|
20
|
+
)
|
|
21
|
+
from bluer_objects.file.load import (
|
|
22
|
+
load_dataframe,
|
|
23
|
+
load_image,
|
|
24
|
+
load_json,
|
|
25
|
+
load_matrix,
|
|
26
|
+
load_text,
|
|
27
|
+
load_xml,
|
|
28
|
+
load_yaml,
|
|
29
|
+
load,
|
|
30
|
+
)
|
|
31
|
+
from bluer_objects.file.save import (
|
|
32
|
+
prepare_for_saving,
|
|
33
|
+
save_csv,
|
|
34
|
+
save_fig,
|
|
35
|
+
save_image,
|
|
36
|
+
save_json,
|
|
37
|
+
save_matrix,
|
|
38
|
+
save_text,
|
|
39
|
+
save_yaml,
|
|
40
|
+
save,
|
|
41
|
+
)
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
from tqdm import tqdm
|
|
3
|
+
|
|
4
|
+
from blueness import module
|
|
5
|
+
from blueness.argparse.generic import sys_exit
|
|
6
|
+
from blue_options import string
|
|
7
|
+
|
|
8
|
+
from bluer_objects import file, NAME
|
|
9
|
+
from bluer_objects.logger import logger
|
|
10
|
+
|
|
11
|
+
NAME = module.name(__file__, NAME)
|
|
12
|
+
|
|
13
|
+
parser = argparse.ArgumentParser(NAME)
|
|
14
|
+
parser.add_argument(
|
|
15
|
+
"task",
|
|
16
|
+
type=str,
|
|
17
|
+
help="replace|size",
|
|
18
|
+
)
|
|
19
|
+
parser.add_argument(
|
|
20
|
+
"--filename",
|
|
21
|
+
type=str,
|
|
22
|
+
)
|
|
23
|
+
parser.add_argument(
|
|
24
|
+
"--this",
|
|
25
|
+
type=str,
|
|
26
|
+
help="<this-1+this-2+this-3>",
|
|
27
|
+
)
|
|
28
|
+
parser.add_argument(
|
|
29
|
+
"--that",
|
|
30
|
+
type=str,
|
|
31
|
+
help="<that-1+that-2+that-3>",
|
|
32
|
+
)
|
|
33
|
+
args = parser.parse_args()
|
|
34
|
+
|
|
35
|
+
success = False
|
|
36
|
+
if args.task == "replace":
|
|
37
|
+
logger.info(f"{NAME}.{args.task}: {args.this} -> {args.that} in {args.filename}")
|
|
38
|
+
|
|
39
|
+
success, content = file.load_text(args.filename)
|
|
40
|
+
if success:
|
|
41
|
+
for this, that in tqdm(zip(args.this.split("+"), args.that.split("+"))):
|
|
42
|
+
content = [line.replace(this, that) for line in content]
|
|
43
|
+
|
|
44
|
+
success = file.save_text(args.filename, content)
|
|
45
|
+
elif args.task == "size":
|
|
46
|
+
print(string.pretty_bytes(file.size(args.filename)))
|
|
47
|
+
success = True
|
|
48
|
+
else:
|
|
49
|
+
success = None
|
|
50
|
+
|
|
51
|
+
sys_exit(logger, NAME, args.task, success)
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
import datetime
|
|
3
|
+
import json
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
# https://gist.github.com/jsbueno/9b2ea63fb16b84658281ec29b375283e
|
|
7
|
+
class JsonEncoder(json.JSONEncoder):
|
|
8
|
+
def default(self, obj):
|
|
9
|
+
try:
|
|
10
|
+
return super().default(obj)
|
|
11
|
+
except TypeError:
|
|
12
|
+
pass
|
|
13
|
+
|
|
14
|
+
if obj.__class__.__name__ == "ndarray":
|
|
15
|
+
return obj.tolist()
|
|
16
|
+
|
|
17
|
+
if isinstance(obj, datetime.datetime):
|
|
18
|
+
return "{}".format(obj)
|
|
19
|
+
|
|
20
|
+
return (
|
|
21
|
+
obj.__dict__
|
|
22
|
+
if not hasattr(type(obj), "__json_encode__")
|
|
23
|
+
else obj.__json_encode__
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def as_json(
|
|
28
|
+
thing: Any,
|
|
29
|
+
) -> str:
|
|
30
|
+
# https://docs.python.org/2/library/json.html
|
|
31
|
+
return json.dumps(
|
|
32
|
+
thing,
|
|
33
|
+
sort_keys=True,
|
|
34
|
+
cls=JsonEncoder,
|
|
35
|
+
indent=0,
|
|
36
|
+
ensure_ascii=False,
|
|
37
|
+
separators=(",", ":"),
|
|
38
|
+
).replace("\n", "")
|
|
@@ -0,0 +1,290 @@
|
|
|
1
|
+
import urllib3
|
|
2
|
+
from functools import reduce
|
|
3
|
+
import fnmatch
|
|
4
|
+
from typing import Any, List, Union
|
|
5
|
+
import os
|
|
6
|
+
import shutil
|
|
7
|
+
|
|
8
|
+
from blueness import module
|
|
9
|
+
from blue_options import string
|
|
10
|
+
from blue_options.logger import crash_report
|
|
11
|
+
|
|
12
|
+
from bluer_objects import NAME
|
|
13
|
+
from bluer_objects.env import abcli_object_path
|
|
14
|
+
from bluer_objects.logger import logger
|
|
15
|
+
|
|
16
|
+
NAME = module.name(__file__, NAME)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def absolute(
|
|
20
|
+
filename: str,
|
|
21
|
+
reference_path: Any = None,
|
|
22
|
+
) -> str:
|
|
23
|
+
from bluer_objects.path import absolute as path_absolute
|
|
24
|
+
|
|
25
|
+
return os.path.join(
|
|
26
|
+
path_absolute(
|
|
27
|
+
path(filename),
|
|
28
|
+
os.getcwd() if reference_path is None else reference_path,
|
|
29
|
+
),
|
|
30
|
+
name_and_extension(filename),
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def add_extension(
|
|
35
|
+
filename: str,
|
|
36
|
+
extension_: Any,
|
|
37
|
+
force: bool = True,
|
|
38
|
+
):
|
|
39
|
+
if not isinstance(extension_, str):
|
|
40
|
+
extension_ = extension(extension_)
|
|
41
|
+
|
|
42
|
+
filename, extension_as_is = os.path.splitext(filename)
|
|
43
|
+
if extension_as_is != "":
|
|
44
|
+
extension_as_is = extension_as_is[1:]
|
|
45
|
+
|
|
46
|
+
if not force and extension_as_is == "":
|
|
47
|
+
extension_ = extension_as_is
|
|
48
|
+
|
|
49
|
+
return f"{filename}.{extension_}"
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def add_prefix(
|
|
53
|
+
filename: str,
|
|
54
|
+
prefix: str,
|
|
55
|
+
) -> str:
|
|
56
|
+
pathname, filename = os.path.split(filename)
|
|
57
|
+
return os.path.join(pathname, f"{prefix}-{filename}")
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def add_suffix(
|
|
61
|
+
filename: str,
|
|
62
|
+
suffix: str,
|
|
63
|
+
) -> str:
|
|
64
|
+
filename, extension = os.path.splitext(filename)
|
|
65
|
+
return f"{filename}-{suffix}{extension}"
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def auxiliary(
|
|
69
|
+
nickname: str,
|
|
70
|
+
extension: str,
|
|
71
|
+
add_timestamp: bool = True,
|
|
72
|
+
) -> str:
|
|
73
|
+
filename = os.path.join(
|
|
74
|
+
abcli_object_path,
|
|
75
|
+
"auxiliary",
|
|
76
|
+
"-".join(
|
|
77
|
+
[nickname]
|
|
78
|
+
+ (
|
|
79
|
+
[
|
|
80
|
+
string.pretty_date(
|
|
81
|
+
as_filename=True,
|
|
82
|
+
squeeze=True,
|
|
83
|
+
unique=True,
|
|
84
|
+
)
|
|
85
|
+
]
|
|
86
|
+
if add_timestamp
|
|
87
|
+
else []
|
|
88
|
+
)
|
|
89
|
+
)
|
|
90
|
+
+ f".{extension}",
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
os.makedirs(path(filename), exist_ok=True)
|
|
94
|
+
|
|
95
|
+
return filename
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def copy(
|
|
99
|
+
source: str,
|
|
100
|
+
destination: str,
|
|
101
|
+
log: bool = True,
|
|
102
|
+
overwrite: bool = True,
|
|
103
|
+
) -> bool:
|
|
104
|
+
if not overwrite and exists(destination):
|
|
105
|
+
if log:
|
|
106
|
+
logger.info(f"✅ {destination}")
|
|
107
|
+
return True
|
|
108
|
+
|
|
109
|
+
try:
|
|
110
|
+
os.makedirs(path(destination), exist_ok=True)
|
|
111
|
+
|
|
112
|
+
# https://stackoverflow.com/a/8858026
|
|
113
|
+
# better choice: copy2
|
|
114
|
+
shutil.copyfile(source, destination)
|
|
115
|
+
except:
|
|
116
|
+
crash_report(f"{NAME}: copy({source},{destination}): failed.")
|
|
117
|
+
return False
|
|
118
|
+
|
|
119
|
+
if log:
|
|
120
|
+
logger.info(f"{NAME}: {source} -> {destination}")
|
|
121
|
+
|
|
122
|
+
return True
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def delete(
|
|
126
|
+
filename: str,
|
|
127
|
+
) -> bool:
|
|
128
|
+
if not os.path.isfile(filename):
|
|
129
|
+
return True
|
|
130
|
+
|
|
131
|
+
try:
|
|
132
|
+
os.remove(filename)
|
|
133
|
+
|
|
134
|
+
return True
|
|
135
|
+
except:
|
|
136
|
+
crash_report(f"{NAME}: delete({filename}): failed.")
|
|
137
|
+
return False
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
def download(
|
|
141
|
+
url: str,
|
|
142
|
+
filename: str,
|
|
143
|
+
log: bool = True,
|
|
144
|
+
overwrite: bool = True,
|
|
145
|
+
) -> bool:
|
|
146
|
+
if not overwrite and exists(filename):
|
|
147
|
+
if log:
|
|
148
|
+
logger.info(f"✅ {filename}")
|
|
149
|
+
|
|
150
|
+
return True
|
|
151
|
+
|
|
152
|
+
try:
|
|
153
|
+
# https://stackoverflow.com/a/27406501
|
|
154
|
+
with urllib3.PoolManager().request(
|
|
155
|
+
"GET", url, preload_content=False
|
|
156
|
+
) as response, open(filename, "wb") as fp:
|
|
157
|
+
shutil.copyfileobj(response, fp)
|
|
158
|
+
|
|
159
|
+
response.release_conn() # not 100% sure this is required though
|
|
160
|
+
|
|
161
|
+
except:
|
|
162
|
+
crash_report(f"{NAME}: download({url},{filename}): failed.")
|
|
163
|
+
return False
|
|
164
|
+
|
|
165
|
+
if log:
|
|
166
|
+
logger.info(f"{NAME}: {url} -> {filename}")
|
|
167
|
+
|
|
168
|
+
return True
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def exists(
|
|
172
|
+
filename: str,
|
|
173
|
+
) -> bool:
|
|
174
|
+
return os.path.isfile(filename)
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
def extension(
|
|
178
|
+
filename: Any,
|
|
179
|
+
) -> str:
|
|
180
|
+
if isinstance(filename, str):
|
|
181
|
+
_, extension = os.path.splitext(filename)
|
|
182
|
+
if extension != "":
|
|
183
|
+
if extension[0] == ".":
|
|
184
|
+
extension = extension[1:]
|
|
185
|
+
return extension
|
|
186
|
+
|
|
187
|
+
if isinstance(filename, type):
|
|
188
|
+
return "py" + filename.__name__.lower()
|
|
189
|
+
|
|
190
|
+
return "py" + filename.__class__.__name__.lower()
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
def list_of(
|
|
194
|
+
template: str,
|
|
195
|
+
recursive: bool = False,
|
|
196
|
+
) -> List[str]:
|
|
197
|
+
from bluer_objects import path as path_module
|
|
198
|
+
|
|
199
|
+
if isinstance(template, list):
|
|
200
|
+
return reduce(
|
|
201
|
+
lambda x, y: x + y,
|
|
202
|
+
[list_of(template_, recursive) for template_ in template],
|
|
203
|
+
[],
|
|
204
|
+
)
|
|
205
|
+
|
|
206
|
+
if recursive:
|
|
207
|
+
return reduce(
|
|
208
|
+
lambda x, y: x + y,
|
|
209
|
+
[
|
|
210
|
+
list_of(
|
|
211
|
+
os.path.join(pathname, name_and_extension(template)),
|
|
212
|
+
recursive,
|
|
213
|
+
)
|
|
214
|
+
for pathname in path_module.list_of(path(template))
|
|
215
|
+
],
|
|
216
|
+
list_of(template),
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
# https://stackoverflow.com/a/40566802
|
|
220
|
+
template_path = path(template)
|
|
221
|
+
if template_path == "":
|
|
222
|
+
template_path = path_module.current()
|
|
223
|
+
|
|
224
|
+
try:
|
|
225
|
+
return [
|
|
226
|
+
os.path.join(template_path, filename)
|
|
227
|
+
for filename in fnmatch.filter(
|
|
228
|
+
os.listdir(template_path),
|
|
229
|
+
name_and_extension(template),
|
|
230
|
+
)
|
|
231
|
+
]
|
|
232
|
+
except:
|
|
233
|
+
return []
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
def move(
|
|
237
|
+
source: str,
|
|
238
|
+
destination: str,
|
|
239
|
+
) -> bool:
|
|
240
|
+
try:
|
|
241
|
+
os.makedirs(path(destination), exist_ok=True)
|
|
242
|
+
|
|
243
|
+
# https://stackoverflow.com/a/8858026
|
|
244
|
+
shutil.move(source, destination)
|
|
245
|
+
except:
|
|
246
|
+
crash_report(f"{NAME}: move({source},{destination}): failed.")
|
|
247
|
+
return False
|
|
248
|
+
|
|
249
|
+
return True
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
def name(
|
|
253
|
+
filename: str,
|
|
254
|
+
) -> str:
|
|
255
|
+
_, filename = os.path.split(filename)
|
|
256
|
+
|
|
257
|
+
return filename if "." not in filename else ".".join(filename.split(".")[:-1])
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
def name_and_extension(
|
|
261
|
+
filename: str,
|
|
262
|
+
) -> str:
|
|
263
|
+
return os.path.basename(filename)
|
|
264
|
+
|
|
265
|
+
|
|
266
|
+
def path(
|
|
267
|
+
filename: str,
|
|
268
|
+
) -> str:
|
|
269
|
+
return os.path.split(filename)[0]
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
def relative(
|
|
273
|
+
filename: str,
|
|
274
|
+
reference_path: Union[None, str] = None,
|
|
275
|
+
):
|
|
276
|
+
from bluer_objects.path import relative as path_relative
|
|
277
|
+
|
|
278
|
+
return path_relative(
|
|
279
|
+
path(filename),
|
|
280
|
+
os.getcwd() if reference_path is None else reference_path,
|
|
281
|
+
) + name_and_extension(filename)
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
def size(
|
|
285
|
+
filename: str,
|
|
286
|
+
) -> int:
|
|
287
|
+
try:
|
|
288
|
+
return os.path.getsize(filename)
|
|
289
|
+
except:
|
|
290
|
+
return 0
|
|
@@ -0,0 +1,219 @@
|
|
|
1
|
+
from typing import Tuple, Any, List, Any
|
|
2
|
+
from copy import deepcopy
|
|
3
|
+
import json
|
|
4
|
+
import numpy as np
|
|
5
|
+
|
|
6
|
+
from blueness import module
|
|
7
|
+
from blue_options import string
|
|
8
|
+
from blue_options.logger import crash_report
|
|
9
|
+
|
|
10
|
+
from bluer_objects import NAME
|
|
11
|
+
from bluer_objects.logger import logger
|
|
12
|
+
|
|
13
|
+
NAME = module.name(__file__, NAME)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def load(
|
|
17
|
+
filename,
|
|
18
|
+
ignore_error=False,
|
|
19
|
+
default={},
|
|
20
|
+
) -> Tuple[bool, Any]:
|
|
21
|
+
# https://wiki.python.org/moin/UsingPickle
|
|
22
|
+
data = deepcopy(default)
|
|
23
|
+
|
|
24
|
+
try:
|
|
25
|
+
import dill
|
|
26
|
+
|
|
27
|
+
with open(filename, "rb") as fp:
|
|
28
|
+
data = dill.load(fp)
|
|
29
|
+
|
|
30
|
+
return True, data
|
|
31
|
+
except:
|
|
32
|
+
if not ignore_error:
|
|
33
|
+
crash_report(f"{NAME}: load({filename}): failed.")
|
|
34
|
+
|
|
35
|
+
return False, data
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def load_dataframe(
|
|
39
|
+
filename,
|
|
40
|
+
ignore_error=False,
|
|
41
|
+
log=False,
|
|
42
|
+
) -> Tuple[bool, Any]:
|
|
43
|
+
success = False
|
|
44
|
+
df = None
|
|
45
|
+
|
|
46
|
+
try:
|
|
47
|
+
import pandas
|
|
48
|
+
|
|
49
|
+
df = pandas.read_csv(filename)
|
|
50
|
+
|
|
51
|
+
success = True
|
|
52
|
+
except:
|
|
53
|
+
if not ignore_error:
|
|
54
|
+
crash_report(f"{NAME}: load_dataframe({filename}): failed.")
|
|
55
|
+
|
|
56
|
+
if success and log:
|
|
57
|
+
logger.info(
|
|
58
|
+
"loaded {} row(s) of {} from {}".format(
|
|
59
|
+
len(df),
|
|
60
|
+
", ".join(df.columns),
|
|
61
|
+
filename,
|
|
62
|
+
)
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
return success, df
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def load_image(
|
|
69
|
+
filename,
|
|
70
|
+
ignore_error=False,
|
|
71
|
+
log=False,
|
|
72
|
+
) -> Tuple[bool, np.ndarray]:
|
|
73
|
+
import cv2
|
|
74
|
+
|
|
75
|
+
success = True
|
|
76
|
+
image = np.empty((0,))
|
|
77
|
+
|
|
78
|
+
try:
|
|
79
|
+
image = cv2.imread(filename)
|
|
80
|
+
|
|
81
|
+
if len(image.shape) == 3:
|
|
82
|
+
if image.shape[2] == 4:
|
|
83
|
+
image = image[:, :, :3]
|
|
84
|
+
|
|
85
|
+
image = np.flip(image, axis=2)
|
|
86
|
+
|
|
87
|
+
except:
|
|
88
|
+
if not ignore_error:
|
|
89
|
+
crash_report(f"{NAME}: load_image({filename}): failed.")
|
|
90
|
+
success = False
|
|
91
|
+
|
|
92
|
+
if success and log:
|
|
93
|
+
logger.info(
|
|
94
|
+
"loaded {} from {}".format(
|
|
95
|
+
string.pretty_shape_of_matrix(image),
|
|
96
|
+
filename,
|
|
97
|
+
)
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
return success, image
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def load_json(
|
|
104
|
+
filename,
|
|
105
|
+
ignore_error=False,
|
|
106
|
+
default={},
|
|
107
|
+
) -> Tuple[bool, Any]:
|
|
108
|
+
success = False
|
|
109
|
+
data = default
|
|
110
|
+
|
|
111
|
+
try:
|
|
112
|
+
with open(filename, "r") as fh:
|
|
113
|
+
data = json.load(fh)
|
|
114
|
+
|
|
115
|
+
success = True
|
|
116
|
+
except:
|
|
117
|
+
if not ignore_error:
|
|
118
|
+
crash_report(f"{NAME}: load_json({filename}): failed.")
|
|
119
|
+
|
|
120
|
+
return success, data
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def load_matrix(
|
|
124
|
+
filename: str,
|
|
125
|
+
ignore_error=False,
|
|
126
|
+
log: bool = False,
|
|
127
|
+
) -> Tuple[bool, np.ndarray]:
|
|
128
|
+
success = True
|
|
129
|
+
matrix: np.ndarray = np.empty((0,))
|
|
130
|
+
|
|
131
|
+
try:
|
|
132
|
+
matrix = np.load(filename)
|
|
133
|
+
except:
|
|
134
|
+
if not ignore_error:
|
|
135
|
+
crash_report(f"{NAME}: load_matrix({filename}) failed.")
|
|
136
|
+
success = False
|
|
137
|
+
|
|
138
|
+
if success and log:
|
|
139
|
+
logger.info(
|
|
140
|
+
"loaded {} from {}".format(
|
|
141
|
+
string.pretty_shape_of_matrix(matrix),
|
|
142
|
+
filename,
|
|
143
|
+
)
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
return success, matrix
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
def load_text(
|
|
150
|
+
filename,
|
|
151
|
+
ignore_error=False,
|
|
152
|
+
count=-1,
|
|
153
|
+
log=False,
|
|
154
|
+
) -> Tuple[bool, List[str]]:
|
|
155
|
+
success = True
|
|
156
|
+
text = []
|
|
157
|
+
|
|
158
|
+
try:
|
|
159
|
+
if count == -1:
|
|
160
|
+
with open(filename, "r") as fp:
|
|
161
|
+
text = fp.read()
|
|
162
|
+
text = text.split("\n")
|
|
163
|
+
else:
|
|
164
|
+
# https://stackoverflow.com/a/1767589/10917551
|
|
165
|
+
with open(filename) as fp:
|
|
166
|
+
text = [next(fp) for _ in range(count)]
|
|
167
|
+
except:
|
|
168
|
+
success = False
|
|
169
|
+
if not ignore_error:
|
|
170
|
+
crash_report(f"{NAME}: load_text({filename}): failed.")
|
|
171
|
+
|
|
172
|
+
if success and log:
|
|
173
|
+
logger.info("loaded {} line(s) from {}.".format(len(text), filename))
|
|
174
|
+
|
|
175
|
+
return success, text
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
def load_xml(
|
|
179
|
+
filename,
|
|
180
|
+
ignore_error=False,
|
|
181
|
+
default={},
|
|
182
|
+
) -> Tuple[bool, Any]:
|
|
183
|
+
success = False
|
|
184
|
+
data = default
|
|
185
|
+
|
|
186
|
+
try:
|
|
187
|
+
import xml.etree.ElementTree as ET
|
|
188
|
+
|
|
189
|
+
tree = ET.parse(filename)
|
|
190
|
+
data = tree.getroot()
|
|
191
|
+
|
|
192
|
+
success = True
|
|
193
|
+
except:
|
|
194
|
+
if not ignore_error:
|
|
195
|
+
crash_report(f"{NAME}: load_xml({filename}): failed.")
|
|
196
|
+
|
|
197
|
+
return success, data
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
def load_yaml(
|
|
201
|
+
filename,
|
|
202
|
+
ignore_error=False,
|
|
203
|
+
default={},
|
|
204
|
+
) -> Tuple[bool, Any]:
|
|
205
|
+
success = False
|
|
206
|
+
data = default
|
|
207
|
+
|
|
208
|
+
try:
|
|
209
|
+
import yaml
|
|
210
|
+
|
|
211
|
+
with open(filename, "r") as fh:
|
|
212
|
+
data = yaml.safe_load(fh)
|
|
213
|
+
|
|
214
|
+
success = True
|
|
215
|
+
except:
|
|
216
|
+
if not ignore_error:
|
|
217
|
+
crash_report(f"{NAME}: load_yaml({filename}): failed.")
|
|
218
|
+
|
|
219
|
+
return success, data
|