machineconfig 2.0__py3-none-any.whl → 2.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of machineconfig might be problematic. Click here for more details.
- machineconfig/cluster/cloud_manager.py +0 -3
- machineconfig/cluster/data_transfer.py +0 -1
- machineconfig/cluster/file_manager.py +0 -1
- machineconfig/cluster/job_params.py +0 -3
- machineconfig/cluster/loader_runner.py +0 -3
- machineconfig/cluster/remote_machine.py +0 -1
- machineconfig/cluster/script_notify_upon_completion.py +0 -1
- machineconfig/cluster/sessions_managers/archive/create_zellij_template.py +5 -6
- machineconfig/cluster/sessions_managers/archive/session_managers.py +0 -1
- machineconfig/cluster/sessions_managers/enhanced_command_runner.py +17 -57
- machineconfig/cluster/sessions_managers/wt_local.py +36 -110
- machineconfig/cluster/sessions_managers/wt_local_manager.py +42 -112
- machineconfig/cluster/sessions_managers/wt_remote.py +23 -30
- machineconfig/cluster/sessions_managers/wt_remote_manager.py +20 -62
- machineconfig/cluster/sessions_managers/wt_utils/layout_generator.py +10 -15
- machineconfig/cluster/sessions_managers/wt_utils/process_monitor.py +27 -127
- machineconfig/cluster/sessions_managers/wt_utils/remote_executor.py +10 -43
- machineconfig/cluster/sessions_managers/wt_utils/session_manager.py +22 -101
- machineconfig/cluster/sessions_managers/wt_utils/status_reporter.py +11 -39
- machineconfig/cluster/sessions_managers/zellij_local.py +49 -102
- machineconfig/cluster/sessions_managers/zellij_local_manager.py +34 -78
- machineconfig/cluster/sessions_managers/zellij_remote.py +17 -24
- machineconfig/cluster/sessions_managers/zellij_remote_manager.py +7 -13
- machineconfig/cluster/sessions_managers/zellij_utils/example_usage.py +4 -2
- machineconfig/cluster/sessions_managers/zellij_utils/layout_generator.py +6 -6
- machineconfig/cluster/sessions_managers/zellij_utils/process_monitor.py +18 -88
- machineconfig/cluster/sessions_managers/zellij_utils/remote_executor.py +2 -6
- machineconfig/cluster/sessions_managers/zellij_utils/session_manager.py +12 -40
- machineconfig/cluster/sessions_managers/zellij_utils/status_reporter.py +3 -2
- machineconfig/cluster/templates/cli_click.py +0 -1
- machineconfig/cluster/templates/cli_gooey.py +0 -2
- machineconfig/cluster/templates/cli_trogon.py +0 -1
- machineconfig/cluster/templates/run_cloud.py +0 -1
- machineconfig/cluster/templates/run_cluster.py +0 -1
- machineconfig/cluster/templates/run_remote.py +0 -1
- machineconfig/cluster/templates/utils.py +27 -46
- machineconfig/jobs/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/jobs/linux/msc/cli_agents.sh +16 -0
- machineconfig/jobs/python/check_installations.py +2 -1
- machineconfig/jobs/python/create_bootable_media.py +0 -2
- machineconfig/jobs/python/python_ve_symlink.py +9 -11
- machineconfig/jobs/python/tasks.py +0 -1
- machineconfig/jobs/python/vscode/api.py +5 -5
- machineconfig/jobs/python/vscode/link_ve.py +13 -14
- machineconfig/jobs/python/vscode/select_interpreter.py +21 -22
- machineconfig/jobs/python/vscode/sync_code.py +9 -13
- machineconfig/jobs/python_custom_installers/archive/ngrok.py +13 -13
- machineconfig/jobs/python_custom_installers/dev/aider.py +7 -15
- machineconfig/jobs/python_custom_installers/dev/alacritty.py +9 -18
- machineconfig/jobs/python_custom_installers/dev/brave.py +10 -19
- machineconfig/jobs/python_custom_installers/dev/bypass_paywall.py +8 -15
- machineconfig/jobs/python_custom_installers/dev/code.py +12 -32
- machineconfig/jobs/python_custom_installers/dev/cursor.py +3 -14
- machineconfig/jobs/python_custom_installers/dev/docker_desktop.py +8 -7
- machineconfig/jobs/python_custom_installers/dev/espanso.py +15 -19
- machineconfig/jobs/python_custom_installers/dev/goes.py +5 -12
- machineconfig/jobs/python_custom_installers/dev/lvim.py +9 -17
- machineconfig/jobs/python_custom_installers/dev/nerdfont.py +12 -19
- machineconfig/jobs/python_custom_installers/dev/redis.py +12 -20
- machineconfig/jobs/python_custom_installers/dev/wezterm.py +12 -19
- machineconfig/jobs/python_custom_installers/dev/winget.py +5 -23
- machineconfig/jobs/python_custom_installers/docker.py +12 -21
- machineconfig/jobs/python_custom_installers/gh.py +11 -19
- machineconfig/jobs/python_custom_installers/hx.py +32 -16
- machineconfig/jobs/python_custom_installers/warp-cli.py +12 -20
- machineconfig/jobs/python_generic_installers/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/jobs/python_generic_installers/config.json +1 -1
- machineconfig/jobs/python_linux_installers/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/jobs/windows/archive/archive_pygraphviz.ps1 +1 -1
- machineconfig/jobs/windows/msc/cli_agents.bat +0 -0
- machineconfig/jobs/windows/msc/cli_agents.ps1 +0 -0
- machineconfig/jobs/windows/start_terminal.ps1 +1 -1
- machineconfig/profile/create.py +38 -26
- machineconfig/profile/create_hardlinks.py +29 -20
- machineconfig/profile/shell.py +56 -32
- machineconfig/scripts/__init__.py +0 -2
- machineconfig/scripts/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/scripts/cloud/init.sh +2 -2
- machineconfig/scripts/linux/checkout_versions +1 -1
- machineconfig/scripts/linux/choose_wezterm_theme +1 -1
- machineconfig/scripts/linux/cloud_copy +1 -1
- machineconfig/scripts/linux/cloud_manager +1 -1
- machineconfig/scripts/linux/cloud_mount +1 -1
- machineconfig/scripts/linux/cloud_repo_sync +1 -1
- machineconfig/scripts/linux/cloud_sync +1 -1
- machineconfig/scripts/linux/croshell +1 -1
- machineconfig/scripts/linux/devops +7 -7
- machineconfig/scripts/linux/fire +1 -1
- machineconfig/scripts/linux/fire_agents +3 -2
- machineconfig/scripts/linux/ftpx +1 -1
- machineconfig/scripts/linux/gh_models +1 -1
- machineconfig/scripts/linux/kill_process +1 -1
- machineconfig/scripts/linux/mcinit +1 -1
- machineconfig/scripts/linux/repos +1 -1
- machineconfig/scripts/linux/scheduler +1 -1
- machineconfig/scripts/linux/start_slidev +1 -1
- machineconfig/scripts/linux/start_terminals +1 -1
- machineconfig/scripts/linux/url2md +1 -1
- machineconfig/scripts/linux/warp-cli.sh +122 -0
- machineconfig/scripts/linux/wifi_conn +1 -1
- machineconfig/scripts/python/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops.cpython-313.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops_devapps_install.cpython-313.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops_update_repos.cpython-313.pyc +0 -0
- machineconfig/scripts/python/__pycache__/fire_agents.cpython-313.pyc +0 -0
- machineconfig/scripts/python/ai/__init__.py +0 -0
- machineconfig/scripts/python/ai/generate_files.py +83 -0
- machineconfig/scripts/python/ai/instructions/python/dev.instructions.md +2 -2
- machineconfig/scripts/python/ai/mcinit.py +14 -7
- machineconfig/scripts/python/ai/scripts/lint_and_type_check.sh +10 -5
- machineconfig/scripts/python/archive/tmate_conn.py +5 -5
- machineconfig/scripts/python/archive/tmate_start.py +7 -7
- machineconfig/scripts/python/choose_wezterm_theme.py +35 -32
- machineconfig/scripts/python/cloud_copy.py +23 -14
- machineconfig/scripts/python/cloud_mount.py +36 -24
- machineconfig/scripts/python/cloud_repo_sync.py +40 -27
- machineconfig/scripts/python/cloud_sync.py +4 -4
- machineconfig/scripts/python/croshell.py +40 -29
- machineconfig/scripts/python/devops.py +45 -27
- machineconfig/scripts/python/devops_add_identity.py +15 -25
- machineconfig/scripts/python/devops_add_ssh_key.py +8 -8
- machineconfig/scripts/python/devops_backup_retrieve.py +18 -16
- machineconfig/scripts/python/devops_devapps_install.py +25 -20
- machineconfig/scripts/python/devops_update_repos.py +232 -59
- machineconfig/scripts/python/dotfile.py +17 -15
- machineconfig/scripts/python/fire_agents.py +48 -22
- machineconfig/scripts/python/fire_jobs.py +93 -58
- machineconfig/scripts/python/ftpx.py +26 -15
- machineconfig/scripts/python/get_zellij_cmd.py +8 -7
- machineconfig/scripts/python/helpers/cloud_helpers.py +33 -28
- machineconfig/scripts/python/helpers/helpers2.py +27 -16
- machineconfig/scripts/python/helpers/helpers4.py +45 -32
- machineconfig/scripts/python/helpers/helpers5.py +1 -1
- machineconfig/scripts/python/helpers/repo_sync_helpers.py +32 -10
- machineconfig/scripts/python/mount_nfs.py +9 -16
- machineconfig/scripts/python/mount_nw_drive.py +10 -5
- machineconfig/scripts/python/mount_ssh.py +9 -7
- machineconfig/scripts/python/repos.py +216 -58
- machineconfig/scripts/python/snapshot.py +0 -1
- machineconfig/scripts/python/start_slidev.py +11 -6
- machineconfig/scripts/python/start_terminals.py +22 -16
- machineconfig/scripts/python/viewer_template.py +0 -1
- machineconfig/scripts/python/wifi_conn.py +49 -75
- machineconfig/scripts/python/wsl_windows_transfer.py +9 -7
- machineconfig/scripts/windows/checkout_version.ps1 +1 -3
- machineconfig/scripts/windows/choose_wezterm_theme.ps1 +1 -3
- machineconfig/scripts/windows/cloud_copy.ps1 +2 -6
- machineconfig/scripts/windows/cloud_manager.ps1 +1 -1
- machineconfig/scripts/windows/cloud_repo_sync.ps1 +1 -2
- machineconfig/scripts/windows/cloud_sync.ps1 +2 -2
- machineconfig/scripts/windows/croshell.ps1 +2 -2
- machineconfig/scripts/windows/devops.ps1 +1 -4
- machineconfig/scripts/windows/dotfile.ps1 +1 -3
- machineconfig/scripts/windows/fire.ps1 +1 -1
- machineconfig/scripts/windows/ftpx.ps1 +2 -2
- machineconfig/scripts/windows/gpt.ps1 +1 -1
- machineconfig/scripts/windows/kill_process.ps1 +1 -2
- machineconfig/scripts/windows/mcinit.ps1 +1 -1
- machineconfig/scripts/windows/mount_nfs.ps1 +1 -1
- machineconfig/scripts/windows/mount_ssh.ps1 +1 -1
- machineconfig/scripts/windows/pomodoro.ps1 +1 -1
- machineconfig/scripts/windows/py2exe.ps1 +1 -3
- machineconfig/scripts/windows/repos.ps1 +1 -1
- machineconfig/scripts/windows/scheduler.ps1 +1 -1
- machineconfig/scripts/windows/snapshot.ps1 +2 -2
- machineconfig/scripts/windows/start_slidev.ps1 +1 -1
- machineconfig/scripts/windows/start_terminals.ps1 +1 -1
- machineconfig/scripts/windows/wifi_conn.ps1 +1 -1
- machineconfig/scripts/windows/wsl_windows_transfer.ps1 +1 -3
- machineconfig/settings/lf/linux/lfrc +2 -1
- machineconfig/settings/linters/.ruff_cache/.gitignore +2 -0
- machineconfig/settings/linters/.ruff_cache/CACHEDIR.TAG +1 -0
- machineconfig/settings/lvim/windows/archive/config_additional.lua +1 -1
- machineconfig/settings/svim/linux/init.toml +1 -1
- machineconfig/settings/svim/windows/init.toml +1 -1
- machineconfig/setup_linux/web_shortcuts/croshell.sh +3 -52
- machineconfig/setup_linux/web_shortcuts/interactive.sh +6 -6
- machineconfig/setup_linux/web_shortcuts/ssh.sh +0 -4
- machineconfig/setup_windows/web_shortcuts/all.ps1 +2 -2
- machineconfig/setup_windows/web_shortcuts/ascii_art.ps1 +1 -1
- machineconfig/setup_windows/web_shortcuts/croshell.ps1 +1 -1
- machineconfig/setup_windows/web_shortcuts/interactive.ps1 +5 -5
- machineconfig/setup_windows/wt_and_pwsh/install_fonts.ps1 +51 -15
- machineconfig/setup_windows/wt_and_pwsh/set_pwsh_theme.py +58 -13
- machineconfig/setup_windows/wt_and_pwsh/set_wt_settings.py +45 -37
- machineconfig/utils/ai/generate_file_checklist.py +8 -10
- machineconfig/utils/ai/url2md.py +4 -2
- machineconfig/utils/cloud/onedrive/setup_oauth.py +1 -0
- machineconfig/utils/cloud/onedrive/transaction.py +63 -98
- machineconfig/utils/code.py +62 -41
- machineconfig/utils/installer.py +29 -35
- machineconfig/utils/installer_utils/installer_abc.py +11 -11
- machineconfig/utils/installer_utils/installer_class.py +155 -74
- machineconfig/utils/links.py +112 -31
- machineconfig/utils/notifications.py +211 -0
- machineconfig/utils/options.py +41 -42
- machineconfig/utils/path.py +13 -6
- machineconfig/utils/path_reduced.py +614 -311
- machineconfig/utils/procs.py +48 -42
- machineconfig/utils/scheduling.py +0 -1
- machineconfig/utils/source_of_truth.py +27 -0
- machineconfig/utils/ssh.py +146 -85
- machineconfig/utils/terminal.py +84 -37
- machineconfig/utils/upgrade_packages.py +91 -0
- machineconfig/utils/utils2.py +39 -50
- machineconfig/utils/utils5.py +195 -116
- machineconfig/utils/ve.py +13 -5
- {machineconfig-2.0.dist-info → machineconfig-2.2.dist-info}/METADATA +14 -13
- {machineconfig-2.0.dist-info → machineconfig-2.2.dist-info}/RECORD +212 -237
- machineconfig/jobs/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/jobs/python/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/jobs/python/__pycache__/python_ve_symlink.cpython-311.pyc +0 -0
- machineconfig/jobs/python/archive/python_tools.txt +0 -12
- machineconfig/jobs/python/vscode/__pycache__/select_interpreter.cpython-311.pyc +0 -0
- machineconfig/jobs/python_custom_installers/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/jobs/python_generic_installers/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/jobs/python_generic_installers/update.py +0 -3
- machineconfig/jobs/python_linux_installers/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/profile/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/profile/__pycache__/create.cpython-311.pyc +0 -0
- machineconfig/profile/__pycache__/shell.cpython-311.pyc +0 -0
- machineconfig/scripts/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/scripts/linux/activate_ve +0 -87
- machineconfig/scripts/python/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/cloud_copy.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/cloud_mount.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/cloud_sync.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/croshell.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops_backup_retrieve.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops_devapps_install.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops_update_repos.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/fire_agents.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/fire_jobs.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/fire_jobs.cpython-313.pyc +0 -0
- machineconfig/scripts/python/__pycache__/get_zellij_cmd.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/repos.cpython-311.pyc +0 -0
- machineconfig/scripts/python/ai/__pycache__/init.cpython-311.pyc +0 -0
- machineconfig/scripts/python/ai/__pycache__/mcinit.cpython-311.pyc +0 -0
- machineconfig/scripts/python/helpers/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/scripts/python/helpers/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/scripts/python/helpers/__pycache__/cloud_helpers.cpython-311.pyc +0 -0
- machineconfig/scripts/python/helpers/__pycache__/helpers2.cpython-311.pyc +0 -0
- machineconfig/scripts/python/helpers/__pycache__/helpers4.cpython-311.pyc +0 -0
- machineconfig/scripts/python/helpers/__pycache__/helpers4.cpython-313.pyc +0 -0
- machineconfig/scripts/python/helpers/__pycache__/repo_sync_helpers.cpython-311.pyc +0 -0
- machineconfig/scripts/windows/activate_ve.ps1 +0 -54
- machineconfig/setup_linux/web_shortcuts/all.sh +0 -48
- machineconfig/setup_linux/web_shortcuts/update_system.sh +0 -48
- machineconfig/utils/utils.py +0 -95
- /machineconfig/setup_linux/web_shortcuts/{tmp.sh → android.sh} +0 -0
- {machineconfig-2.0.dist-info → machineconfig-2.2.dist-info}/WHEEL +0 -0
- {machineconfig-2.0.dist-info → machineconfig-2.2.dist-info}/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
|
|
2
|
-
|
|
3
2
|
from machineconfig.utils.utils2 import randstr
|
|
3
|
+
|
|
4
4
|
from datetime import datetime
|
|
5
5
|
import time
|
|
6
6
|
from pathlib import Path
|
|
@@ -8,12 +8,11 @@ import sys
|
|
|
8
8
|
import subprocess
|
|
9
9
|
from platform import system
|
|
10
10
|
from typing import Any, Optional, Union, Callable, TypeAlias, Literal
|
|
11
|
-
import os
|
|
12
11
|
|
|
13
12
|
|
|
14
|
-
OPLike: TypeAlias = Union[str,
|
|
15
|
-
PLike: TypeAlias = Union[str,
|
|
16
|
-
FILE_MODE: TypeAlias = Literal[
|
|
13
|
+
OPLike: TypeAlias = Union[str, "PathExtended", Path, None]
|
|
14
|
+
PLike: TypeAlias = Union[str, "PathExtended", Path]
|
|
15
|
+
FILE_MODE: TypeAlias = Literal["r", "w", "x", "a"]
|
|
17
16
|
SHUTIL_FORMATS: TypeAlias = Literal["zip", "tar", "gztar", "bztar", "xztar"]
|
|
18
17
|
|
|
19
18
|
|
|
@@ -27,20 +26,24 @@ def pwd2key(password: str, salt: Optional[bytes] = None, iterations: int = 10) -
|
|
|
27
26
|
from cryptography.hazmat.primitives import hashes
|
|
28
27
|
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
|
|
29
28
|
return base64.urlsafe_b64encode(PBKDF2HMAC(algorithm=hashes.SHA256(), length=32, salt=salt, iterations=iterations, backend=None).derive(password.encode()))
|
|
29
|
+
|
|
30
|
+
|
|
30
31
|
def encrypt(msg: bytes, key: Optional[bytes] = None, pwd: Optional[str] = None, salted: bool = True, iteration: Optional[int] = None, gen_key: bool = False) -> bytes:
|
|
31
32
|
import base64
|
|
32
33
|
from cryptography.fernet import Fernet
|
|
34
|
+
|
|
33
35
|
salt, iteration = None, None
|
|
34
36
|
if pwd is not None: # generate it from password
|
|
35
37
|
assert (key is None) and (type(pwd) is str), "❌ You can either pass key or pwd, or none of them, but not both."
|
|
36
38
|
import secrets
|
|
39
|
+
|
|
37
40
|
iteration = iteration or secrets.randbelow(exclusive_upper_bound=1_000_000)
|
|
38
41
|
salt = secrets.token_bytes(nbytes=16) if salted else None
|
|
39
42
|
key_resolved = pwd2key(password=pwd, salt=salt, iterations=iteration)
|
|
40
43
|
elif key is None:
|
|
41
44
|
if gen_key:
|
|
42
45
|
key_resolved = Fernet.generate_key()
|
|
43
|
-
Path.home().joinpath(
|
|
46
|
+
Path.home().joinpath("dotfiles/creds/data/encrypted_files_key.bytes").write_bytes(key_resolved)
|
|
44
47
|
else:
|
|
45
48
|
try:
|
|
46
49
|
key_resolved = Path.home().joinpath("dotfiles/creds/data/encrypted_files_key.bytes").read_bytes()
|
|
@@ -48,149 +51,85 @@ def encrypt(msg: bytes, key: Optional[bytes] = None, pwd: Optional[str] = None,
|
|
|
48
51
|
except FileNotFoundError as err:
|
|
49
52
|
print("\n" * 3, "~" * 50, """Consider Loading up your dotfiles or pass `gen_key=True` to make and save one.""", "~" * 50, "\n" * 3)
|
|
50
53
|
raise FileNotFoundError(err) from err
|
|
51
|
-
elif isinstance(key, (str,
|
|
52
|
-
|
|
53
|
-
|
|
54
|
+
elif isinstance(key, (str, PathExtended, Path)):
|
|
55
|
+
key_resolved = Path(key).read_bytes() # a path to a key file was passed, read it:
|
|
56
|
+
elif type(key) is bytes:
|
|
57
|
+
key_resolved = key # key passed explicitly
|
|
58
|
+
else:
|
|
59
|
+
raise TypeError("❌ Key must be either a path, bytes object or None.")
|
|
54
60
|
code = Fernet(key=key_resolved).encrypt(msg)
|
|
55
|
-
if pwd is not None and salt is not None and iteration is not None:
|
|
61
|
+
if pwd is not None and salt is not None and iteration is not None:
|
|
62
|
+
return base64.urlsafe_b64encode(b"%b%b%b" % (salt, iteration.to_bytes(4, "big"), base64.urlsafe_b64decode(code)))
|
|
56
63
|
return code
|
|
64
|
+
|
|
65
|
+
|
|
57
66
|
def decrypt(token: bytes, key: Optional[bytes] = None, pwd: Optional[str] = None, salted: bool = True) -> bytes:
|
|
58
67
|
import base64
|
|
68
|
+
|
|
59
69
|
if pwd is not None:
|
|
60
70
|
assert key is None, "❌ You can either pass key or pwd, or none of them, but not both."
|
|
61
71
|
if salted:
|
|
62
72
|
decoded = base64.urlsafe_b64decode(token)
|
|
63
73
|
salt, iterations, token = decoded[:16], decoded[16:20], base64.urlsafe_b64encode(decoded[20:])
|
|
64
|
-
key_resolved = pwd2key(password=pwd, salt=salt, iterations=int.from_bytes(bytes=iterations, byteorder=
|
|
65
|
-
else:
|
|
74
|
+
key_resolved = pwd2key(password=pwd, salt=salt, iterations=int.from_bytes(bytes=iterations, byteorder="big"))
|
|
75
|
+
else:
|
|
76
|
+
key_resolved = pwd2key(password=pwd) # trailing `;` prevents IPython from caching the result.
|
|
66
77
|
elif type(key) is bytes:
|
|
67
78
|
assert pwd is None, "❌ You can either pass key or pwd, or none of them, but not both."
|
|
68
79
|
key_resolved = key # passsed explicitly
|
|
69
|
-
elif key is None:
|
|
70
|
-
|
|
71
|
-
|
|
80
|
+
elif key is None:
|
|
81
|
+
key_resolved = Path.home().joinpath("dotfiles/creds/data/encrypted_files_key.bytes").read_bytes() # read from file
|
|
82
|
+
elif isinstance(key, (str, Path)):
|
|
83
|
+
key_resolved = Path(key).read_bytes() # passed a path to a file containing kwy
|
|
84
|
+
else:
|
|
85
|
+
raise TypeError(f"❌ Key must be either str, P, Path, bytes or None. Recieved: {type(key)}")
|
|
72
86
|
from cryptography.fernet import Fernet
|
|
87
|
+
|
|
73
88
|
return Fernet(key=key_resolved).decrypt(token)
|
|
74
89
|
|
|
75
|
-
|
|
90
|
+
|
|
91
|
+
def validate_name(astring: str, replace: str = "_") -> str:
|
|
76
92
|
import re
|
|
77
|
-
return re.sub(r'[^-a-zA-Z0-9_.()]+', replace, str(astring))
|
|
78
|
-
def timestamp(fmt: Optional[str] = None, name: Optional[str] = None) -> str:
|
|
79
|
-
return ((name + '_') if name is not None else '') + datetime.now().strftime(fmt or '%Y-%m-%d-%I-%M-%S-%p-%f') # isoformat is not compatible with file naming convention, fmt here is.
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
def modify_text(txt_raw: str, txt_search: str, txt_alt: Union[str, Callable[[str], str]], replace_line: bool = True, notfound_append: bool = False, prepend: bool = False, strict: bool = False):
|
|
83
|
-
lines, bingo = txt_raw.split("\n"), False
|
|
84
|
-
if not replace_line: # no need for line splitting
|
|
85
|
-
assert isinstance(txt_alt, str), f"txt_alt must be a string if notfound_append is True. It is not: {txt_alt}"
|
|
86
|
-
if txt_search in txt_raw: return txt_raw.replace(txt_search, txt_alt)
|
|
87
|
-
return txt_raw + "\n" + txt_alt if notfound_append else txt_raw
|
|
88
|
-
for idx, line in enumerate(lines):
|
|
89
|
-
if txt_search in line:
|
|
90
|
-
if isinstance(txt_alt, str): lines[idx] = txt_alt
|
|
91
|
-
elif callable(txt_alt): lines[idx] = txt_alt(line)
|
|
92
|
-
bingo = True
|
|
93
|
-
if strict and not bingo: raise ValueError(f"txt_search `{txt_search}` not found in txt_raw `{txt_raw}`")
|
|
94
|
-
if bingo is False and notfound_append is True:
|
|
95
|
-
assert isinstance(txt_alt, str), f"txt_alt must be a string if notfound_append is True. It is not: {txt_alt}"
|
|
96
|
-
if prepend: lines.insert(0, txt_alt)
|
|
97
|
-
else: lines.append(txt_alt) # txt not found, add it anyway.
|
|
98
|
-
return "\n".join(lines)
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
class Compression:
|
|
102
|
-
@staticmethod
|
|
103
|
-
def compress_folder(root_dir: str, op_path: str, base_dir: str, fmt: SHUTIL_FORMATS = 'zip', verbose: bool = False, **kwargs: Any) -> str: # shutil works with folders nicely (recursion is done interally) # directory to be archived: root_dir\base_dir, unless base_dir is passed as absolute path. # when archive opened; base_dir will be found."""
|
|
104
|
-
base_name = op_path[:-4] if op_path.endswith(".zip") else op_path # .zip is added automatically by library, hence we'd like to avoid repeating it if user sent it.
|
|
105
|
-
import shutil
|
|
106
|
-
return shutil.make_archive(base_name=base_name, format=fmt, root_dir=root_dir, base_dir=base_dir, verbose=verbose, **kwargs) # returned path possible have added extension.
|
|
107
|
-
@staticmethod
|
|
108
|
-
def zip_file(ip_path: str, op_path: str, arcname: Optional[str]= None, password: Optional[bytes] = None, mode: FILE_MODE = "w", **kwargs: Any):
|
|
109
|
-
"""arcname determines the directory of the file being archived inside the archive. Defaults to same as original directory except for drive.
|
|
110
|
-
When changed, it should still include the file path in its end. If arcname = filename without any path, then, it will be in the root of the archive."""
|
|
111
|
-
import zipfile
|
|
112
|
-
with zipfile.ZipFile(op_path, mode=mode) as jungle_zip:
|
|
113
|
-
if password is not None: jungle_zip.setpassword(pwd=password)
|
|
114
|
-
jungle_zip.write(filename=str(ip_path), arcname=str(arcname) if arcname is not None else None, compress_type=zipfile.ZIP_DEFLATED, **kwargs)
|
|
115
|
-
return Path(op_path)
|
|
116
|
-
@staticmethod
|
|
117
|
-
def unzip(ip_path: str, op_path: str, fname: Optional[str]= None, password: Optional[bytes] = None, memory: bool = False, **kwargs: Any) -> Path | dict[str, bytes] | bytes:
|
|
118
|
-
import zipfile
|
|
119
|
-
with zipfile.ZipFile(str(ip_path), 'r') as zipObj:
|
|
120
|
-
if memory:
|
|
121
|
-
return {name: zipObj.read(name) for name in zipObj.namelist()} if fname is None else zipObj.read(fname)
|
|
122
|
-
if fname is None:
|
|
123
|
-
zipObj.extractall(op_path, pwd=password, **kwargs)
|
|
124
|
-
return Path(op_path)
|
|
125
|
-
else:
|
|
126
|
-
zipObj.extract(member=str(fname), path=str(op_path), pwd=password)
|
|
127
|
-
return Path(op_path) / fname
|
|
128
|
-
@staticmethod
|
|
129
|
-
def gz(file: str, op_path: str): # see this on what to use: https://stackoverflow.com/questions/10540935/what-is-the-difference-between-tar-and-zip
|
|
130
|
-
import shutil
|
|
131
|
-
import gzip
|
|
132
|
-
with open(file, 'rb') as f_in:
|
|
133
|
-
with gzip.open(op_path, 'wb') as f_out: shutil.copyfileobj(f_in, f_out)
|
|
134
|
-
return Path(op_path)
|
|
135
|
-
@staticmethod
|
|
136
|
-
def ungz(path: str, op_path: str):
|
|
137
|
-
import gzip
|
|
138
|
-
import shutil
|
|
139
|
-
with gzip.open(path, 'r') as f_in, open(op_path, 'wb') as f_out: shutil.copyfileobj(f_in, f_out)
|
|
140
|
-
return Path(op_path)
|
|
141
|
-
@staticmethod
|
|
142
|
-
def unbz(path: str, op_path: str):
|
|
143
|
-
import bz2
|
|
144
|
-
import shutil
|
|
145
|
-
with bz2.BZ2File(path, 'r') as fr, open(str(op_path), 'wb') as fw: shutil.copyfileobj(fr, fw)
|
|
146
|
-
return Path(op_path)
|
|
147
|
-
@staticmethod
|
|
148
|
-
def xz(path: str, op_path: str):
|
|
149
|
-
import lzma
|
|
150
|
-
with lzma.open(op_path, "w") as f: f.write(Path(path).read_bytes())
|
|
151
|
-
@staticmethod
|
|
152
|
-
def unxz(ip_path: str, op_path: str):
|
|
153
|
-
import lzma
|
|
154
|
-
with lzma.open(ip_path) as file: Path(op_path).write_bytes(file.read())
|
|
155
|
-
@staticmethod
|
|
156
|
-
def tar(path: str, op_path: str):
|
|
157
|
-
import tarfile
|
|
158
|
-
with tarfile.open(op_path, "w:gz") as tar_: tar_.add(str(path), arcname=os.path.basename(path))
|
|
159
|
-
return Path(op_path)
|
|
160
|
-
@staticmethod
|
|
161
|
-
def untar(path: str, op_path: str, fname: Optional[str]= None, mode: Literal['r', 'w'] = 'r', **kwargs: Any):
|
|
162
|
-
import tarfile
|
|
163
|
-
with tarfile.open(str(path), mode) as file:
|
|
164
|
-
if fname is None: file.extractall(path=op_path, **kwargs) # extract all files in the archive
|
|
165
|
-
else: file.extract(fname, **kwargs)
|
|
166
|
-
return Path(op_path)
|
|
167
93
|
|
|
94
|
+
return re.sub(r"[^-a-zA-Z0-9_.()]+", replace, str(astring))
|
|
168
95
|
|
|
169
96
|
|
|
170
|
-
|
|
97
|
+
def timestamp(fmt: Optional[str] = None, name: Optional[str] = None) -> str:
|
|
98
|
+
return ((name + "_") if name is not None else "") + datetime.now().strftime(fmt or "%Y-%m-%d-%I-%M-%S-%p-%f") # isoformat is not compatible with file naming convention, fmt here is.
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
class PathExtended(type(Path()), Path): # type: ignore # pylint: disable=E0241
|
|
171
102
|
# ============= Path management ==================
|
|
172
|
-
"""
|
|
103
|
+
"""The default behaviour of methods acting on underlying disk object is to perform the action and return a new path referring to the mutated object in disk drive.
|
|
173
104
|
However, there is a flag `orig` that makes the function return orignal path object `self` as opposed to the new one pointing to new object.
|
|
174
105
|
Additionally, the fate of the original object can be decided by a flag `inplace` which means `replace` it defaults to False and in essence, it deletes the original underlying object.
|
|
175
106
|
This can be seen in `zip` and `encrypt` but not in `copy`, `move`, `retitle` because the fate of original file is dictated already.
|
|
176
107
|
Furthermore, those methods are accompanied with print statement explaining what happened to the object."""
|
|
177
|
-
|
|
108
|
+
|
|
109
|
+
def delete(self, sure: bool = False, verbose: bool = True) -> "PathExtended": # slf = self.expanduser().resolve() don't resolve symlinks.
|
|
178
110
|
if not sure:
|
|
179
|
-
if verbose:
|
|
111
|
+
if verbose:
|
|
112
|
+
print(f"❌ Did NOT DELETE because user is not sure. file: {repr(self)}.")
|
|
180
113
|
return self
|
|
181
114
|
if not self.exists():
|
|
182
115
|
self.unlink(missing_ok=True)
|
|
183
|
-
if verbose:
|
|
116
|
+
if verbose:
|
|
117
|
+
print(f"❌ Could NOT DELETE nonexisting file {repr(self)}. ")
|
|
184
118
|
return self # broken symlinks exhibit funny existence behaviour, catch them here.
|
|
185
|
-
if self.is_file() or self.is_symlink():
|
|
119
|
+
if self.is_file() or self.is_symlink():
|
|
120
|
+
self.unlink(missing_ok=True)
|
|
186
121
|
else:
|
|
187
122
|
import shutil
|
|
123
|
+
|
|
188
124
|
shutil.rmtree(self, ignore_errors=False)
|
|
189
|
-
if verbose:
|
|
125
|
+
if verbose:
|
|
126
|
+
print(f"🗑️ ❌ DELETED {repr(self)}.")
|
|
190
127
|
return self
|
|
191
|
-
|
|
128
|
+
|
|
129
|
+
def move(self, folder: OPLike = None, name: Optional[str] = None, path: OPLike = None, rel2it: bool = False, overwrite: bool = False, verbose: bool = True, parents: bool = True, content: bool = False) -> "PathExtended":
|
|
192
130
|
path = self._resolve_path(folder=folder, name=name, path=path, default_name=self.absolute().name, rel2it=rel2it)
|
|
193
|
-
if parents:
|
|
131
|
+
if parents:
|
|
132
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
194
133
|
slf = self.expanduser().resolve()
|
|
195
134
|
if content:
|
|
196
135
|
assert self.is_dir(), NotADirectoryError(f"💥 When `content` flag is set to True, path must be a directory. It is not: `{repr(self)}`")
|
|
@@ -203,202 +142,303 @@ class P(type(Path()), Path): # type: ignore # pylint: disable=E0241
|
|
|
203
142
|
else:
|
|
204
143
|
try:
|
|
205
144
|
slf.rename(path) # self._return(res=path, inplace=True, operation='rename', orig=False, verbose=verbose, strict=True, msg='')
|
|
206
|
-
except OSError as oe:
|
|
145
|
+
except OSError as oe: # OSError: [Errno 18] Invalid cross-device link:
|
|
207
146
|
# https://stackoverflow.com/questions/42392600/oserror-errno-18-invalid-cross-device-link
|
|
208
147
|
import shutil
|
|
148
|
+
|
|
209
149
|
shutil.move(str(slf), str(path))
|
|
210
150
|
_ = oe
|
|
211
|
-
if verbose:
|
|
151
|
+
if verbose:
|
|
152
|
+
print(f"🚚 MOVED {repr(self)} ==> {repr(path)}`")
|
|
212
153
|
return path
|
|
213
|
-
|
|
154
|
+
|
|
155
|
+
def copy(
|
|
156
|
+
self, folder: OPLike = None, name: Optional[str] = None, path: OPLike = None, content: bool = False, verbose: bool = True, append: Optional[str] = None, overwrite: bool = False, orig: bool = False
|
|
157
|
+
) -> "PathExtended": # tested %100 # TODO: replace `content` flag with ability to interpret "*" in resolve method.
|
|
214
158
|
dest = self._resolve_path(folder=folder, name=name, path=path, default_name=self.name, rel2it=False)
|
|
215
159
|
dest = dest.expanduser().resolve()
|
|
216
160
|
dest.parent.mkdir(parents=True, exist_ok=True)
|
|
217
161
|
slf = self.expanduser().resolve()
|
|
218
162
|
if dest == slf:
|
|
219
163
|
dest = self.append(append if append is not None else f"_copy_{randstr()}")
|
|
220
|
-
if not content and overwrite and dest.exists():
|
|
221
|
-
|
|
164
|
+
if not content and overwrite and dest.exists():
|
|
165
|
+
dest.delete(sure=True)
|
|
166
|
+
if not content and not overwrite and dest.exists():
|
|
167
|
+
raise FileExistsError(f"💥 Destination already exists: {repr(dest)}")
|
|
222
168
|
if slf.is_file():
|
|
223
169
|
import shutil
|
|
170
|
+
|
|
224
171
|
shutil.copy(str(slf), str(dest))
|
|
225
|
-
if verbose:
|
|
172
|
+
if verbose:
|
|
173
|
+
print(f"🖨️ COPIED {repr(slf)} ==> {repr(dest)}")
|
|
226
174
|
elif slf.is_dir():
|
|
227
175
|
dest = dest.parent if content else dest
|
|
228
176
|
# from distutils.dir_util import copy_tree
|
|
229
177
|
from shutil import copytree
|
|
178
|
+
|
|
230
179
|
copytree(str(slf), str(dest))
|
|
231
|
-
if verbose:
|
|
232
|
-
|
|
180
|
+
if verbose:
|
|
181
|
+
print(f"🖨️ COPIED {'Content of ' if content else ''} {repr(slf)} ==> {repr(dest)}")
|
|
182
|
+
else:
|
|
183
|
+
print(f"💥 Could NOT COPY. Not a file nor a path: {repr(slf)}.")
|
|
233
184
|
return dest if not orig else self
|
|
185
|
+
|
|
234
186
|
# ======================================= File Editing / Reading ===================================
|
|
235
|
-
def download(self, folder: OPLike = None, name: Optional[str]= None, allow_redirects: bool = True, timeout: Optional[int] = None, params: Any = None) ->
|
|
187
|
+
def download(self, folder: OPLike = None, name: Optional[str] = None, allow_redirects: bool = True, timeout: Optional[int] = None, params: Any = None) -> "PathExtended":
|
|
236
188
|
import requests
|
|
189
|
+
|
|
237
190
|
response = requests.get(self.as_url_str(), allow_redirects=allow_redirects, timeout=timeout, params=params) # Alternative: from urllib import request; request.urlopen(url).read().decode('utf-8').
|
|
238
191
|
assert response.status_code == 200, f"Download failed with status code {response.status_code}\n{response.text}"
|
|
239
|
-
if name is not None:
|
|
192
|
+
if name is not None:
|
|
193
|
+
f_name = name
|
|
240
194
|
else:
|
|
241
195
|
try:
|
|
242
|
-
f_name = response.headers[
|
|
196
|
+
f_name = response.headers["Content-Disposition"].split("filename=")[1].replace('"', "")
|
|
243
197
|
except (KeyError, IndexError):
|
|
244
|
-
f_name = validate_name(str(
|
|
245
|
-
dest_path = (
|
|
198
|
+
f_name = validate_name(str(PathExtended(response.history[-1].url).name if len(response.history) > 0 else PathExtended(response.url).name))
|
|
199
|
+
dest_path = (PathExtended.home().joinpath("Downloads") if folder is None else PathExtended(folder)).joinpath(f_name)
|
|
246
200
|
dest_path.parent.mkdir(parents=True, exist_ok=True)
|
|
247
201
|
dest_path.write_bytes(response.content)
|
|
248
202
|
return dest_path
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
if inplace:
|
|
252
|
-
assert self.exists(), f"`inplace` flag is only relevant if the path exists. It doesn't {self}"
|
|
253
|
-
if operation == "rename":
|
|
254
|
-
if overwrite and res.exists(): res.delete(sure=True, verbose=verbose)
|
|
255
|
-
if not overwrite and res.exists():
|
|
256
|
-
if strict: raise FileExistsError(f"❌ RENAMING failed. File `{res}` already exists.")
|
|
257
|
-
else:
|
|
258
|
-
if verbose: print(f"⚠️ SKIPPED RENAMING {repr(self)} ➡️ {repr(res)} because FileExistsError and scrict=False policy.")
|
|
259
|
-
return self if orig else res
|
|
260
|
-
self.rename(res)
|
|
261
|
-
msg = msg or f"RENAMED {repr(self)} ➡️ {repr(res)}"
|
|
262
|
-
elif operation == "delete":
|
|
263
|
-
self.delete(sure=True, verbose=False)
|
|
264
|
-
__delayed_msg__ = f"DELETED 🗑️❌ {repr(self)}."
|
|
265
|
-
if verbose and msg != "":
|
|
266
|
-
try: print(msg) # emojie print error.
|
|
267
|
-
except UnicodeEncodeError: print("P._return warning: UnicodeEncodeError, could not print message.")
|
|
268
|
-
if verbose and __delayed_msg__ != "":
|
|
269
|
-
try: print(__delayed_msg__)
|
|
270
|
-
except UnicodeEncodeError: print("P._return warning: UnicodeEncodeError, could not print message.")
|
|
271
|
-
return self if orig else res
|
|
272
|
-
def append(self, name: str = '', index: bool = False, suffix: Optional[str] = None, verbose: bool = True, **kwargs: Any) -> 'P':
|
|
203
|
+
|
|
204
|
+
def append(self, name: str = "", index: bool = False, suffix: Optional[str] = None, verbose: bool = True, **kwargs: Any) -> "PathExtended":
|
|
273
205
|
"""Returns a new path object with the name appended to the stem of the path. If `index` is True, the name will be the index of the path in the parent directory."""
|
|
274
206
|
if index:
|
|
275
|
-
appended_name = f
|
|
207
|
+
appended_name = f"""{name}_{len(self.parent.search(f"*{self.name.split('.')[0]}*"))}"""
|
|
276
208
|
return self.append(name=appended_name, index=False, verbose=verbose, suffix=suffix, **kwargs)
|
|
277
|
-
full_name =
|
|
278
|
-
full_suffix = suffix or
|
|
279
|
-
subpath = self.name.split(
|
|
280
|
-
|
|
209
|
+
full_name = name or ("_" + str(timestamp()))
|
|
210
|
+
full_suffix = suffix or "".join(("bruh" + self).suffixes)
|
|
211
|
+
subpath = self.name.split(".")[0] + full_name + full_suffix
|
|
212
|
+
dest = self.parent.joinpath(subpath)
|
|
213
|
+
res = PathExtended(dest)
|
|
214
|
+
inplace = bool(kwargs.get("inplace", False))
|
|
215
|
+
overwrite = bool(kwargs.get("overwrite", False))
|
|
216
|
+
orig = bool(kwargs.get("orig", False))
|
|
217
|
+
strict = bool(kwargs.get("strict", True))
|
|
218
|
+
if inplace:
|
|
219
|
+
assert self.exists(), f"`inplace` flag is only relevant if the path exists. It doesn't {self}"
|
|
220
|
+
if overwrite and res.exists():
|
|
221
|
+
res.delete(sure=True, verbose=verbose)
|
|
222
|
+
if not overwrite and res.exists():
|
|
223
|
+
if strict:
|
|
224
|
+
raise FileExistsError(f"❌ RENAMING failed. File `{res}` already exists.")
|
|
225
|
+
else:
|
|
226
|
+
if verbose:
|
|
227
|
+
try:
|
|
228
|
+
print(f"⚠️ SKIPPED RENAMING {repr(self)} ➡️ {repr(res)} because FileExistsError and scrict=False policy.")
|
|
229
|
+
except UnicodeEncodeError:
|
|
230
|
+
print("P._return warning: UnicodeEncodeError, could not print message.")
|
|
231
|
+
return self if orig else res
|
|
232
|
+
self.rename(res)
|
|
233
|
+
if verbose:
|
|
234
|
+
try:
|
|
235
|
+
print(f"RENAMED {repr(self)} ➡️ {repr(res)}")
|
|
236
|
+
except UnicodeEncodeError:
|
|
237
|
+
print("P._return warning: UnicodeEncodeError, could not print message.")
|
|
238
|
+
return self if orig else res
|
|
239
|
+
|
|
281
240
|
def with_name(self, name: str, verbose: bool = True, inplace: bool = False, overwrite: bool = False, **kwargs: Any):
|
|
282
|
-
|
|
283
|
-
|
|
241
|
+
res = PathExtended(self.parent / name)
|
|
242
|
+
orig = bool(kwargs.get("orig", False))
|
|
243
|
+
strict = bool(kwargs.get("strict", True))
|
|
244
|
+
if inplace:
|
|
245
|
+
assert self.exists(), f"`inplace` flag is only relevant if the path exists. It doesn't {self}"
|
|
246
|
+
if overwrite and res.exists():
|
|
247
|
+
res.delete(sure=True, verbose=verbose)
|
|
248
|
+
if not overwrite and res.exists():
|
|
249
|
+
if strict:
|
|
250
|
+
raise FileExistsError(f"❌ RENAMING failed. File `{res}` already exists.")
|
|
251
|
+
else:
|
|
252
|
+
if verbose:
|
|
253
|
+
try:
|
|
254
|
+
print(f"⚠️ SKIPPED RENAMING {repr(self)} ➡️ {repr(res)} because FileExistsError and scrict=False policy.")
|
|
255
|
+
except UnicodeEncodeError:
|
|
256
|
+
print("P._return warning: UnicodeEncodeError, could not print message.")
|
|
257
|
+
return self if orig else res
|
|
258
|
+
self.rename(res)
|
|
259
|
+
if verbose:
|
|
260
|
+
try:
|
|
261
|
+
print(f"RENAMED {repr(self)} ➡️ {repr(res)}")
|
|
262
|
+
except UnicodeEncodeError:
|
|
263
|
+
print("P._return warning: UnicodeEncodeError, could not print message.")
|
|
264
|
+
return self if orig else res
|
|
265
|
+
|
|
266
|
+
def __deepcopy__(self, *args: Any, **kwargs: Any) -> "PathExtended":
|
|
284
267
|
_ = args, kwargs
|
|
285
|
-
return
|
|
286
|
-
|
|
287
|
-
def
|
|
268
|
+
return PathExtended(str(self))
|
|
269
|
+
|
|
270
|
+
def __getstate__(self) -> str:
|
|
271
|
+
return str(self)
|
|
272
|
+
|
|
273
|
+
def __add__(self, other: PLike) -> "PathExtended":
|
|
288
274
|
return self.parent.joinpath(self.name + str(other)) # used append and prepend if the addition wanted to be before suffix.
|
|
289
|
-
|
|
275
|
+
|
|
276
|
+
def __radd__(self, other: PLike) -> "PathExtended":
|
|
290
277
|
return self.parent.joinpath(str(other) + self.name) # other + P and `other` doesn't know how to make this addition.
|
|
291
|
-
|
|
292
|
-
|
|
278
|
+
|
|
279
|
+
def __sub__(self, other: PLike) -> "PathExtended":
|
|
280
|
+
res = PathExtended(str(self).replace(str(other), ""))
|
|
293
281
|
return (res[1:] if str(res[0]) in {"\\", "/"} else res) if len(res.parts) else res # paths starting with "/" are problematic. e.g ~ / "/path" doesn't work.
|
|
294
282
|
|
|
295
|
-
def rel2home(self
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
283
|
+
def rel2home(self) -> "PathExtended":
|
|
284
|
+
return PathExtended(self.expanduser().absolute().relative_to(Path.home())) # very similat to collapseuser but without "~" being added so its consistent with rel2cwd.
|
|
285
|
+
|
|
286
|
+
def collapseuser(self, strict: bool = True, placeholder: str = "~") -> "PathExtended": # opposite of `expanduser` resolve is crucial to fix Windows cases insensitivty problem.
|
|
287
|
+
if strict:
|
|
288
|
+
assert str(self.expanduser().absolute().resolve()).startswith(str(PathExtended.home())), ValueError(f"`{PathExtended.home()}` is not in the subpath of `{self}`")
|
|
289
|
+
if str(self).startswith(placeholder) or PathExtended.home().as_posix() not in self.resolve().as_posix():
|
|
290
|
+
return self
|
|
291
|
+
return PathExtended(placeholder) / (self.expanduser().absolute().resolve(strict=strict) - PathExtended.home()) # resolve also solves the problem of Windows case insensitivty.
|
|
292
|
+
|
|
300
293
|
def __getitem__(self, slici: Union[int, list[int], slice]):
|
|
301
|
-
if isinstance(slici, list):
|
|
302
|
-
|
|
303
|
-
|
|
294
|
+
if isinstance(slici, list):
|
|
295
|
+
return PathExtended(*[self[item] for item in slici])
|
|
296
|
+
elif isinstance(slici, int):
|
|
297
|
+
return PathExtended(self.parts[slici])
|
|
298
|
+
return PathExtended(*self.parts[slici]) # must be a slice
|
|
299
|
+
|
|
304
300
|
def split(self, at: Optional[str] = None, index: Optional[int] = None, sep: Literal[-1, 0, 1] = 1, strict: bool = True):
|
|
305
301
|
if index is None and at is not None: # at is provided # ==================================== Splitting
|
|
306
302
|
if not strict: # behaves like split method of string
|
|
307
303
|
one, two = (items := str(self).split(sep=str(at)))[0], items[1]
|
|
308
|
-
one, two =
|
|
304
|
+
one, two = PathExtended(one[:-1]) if one.endswith("/") else PathExtended(one), PathExtended(two[1:]) if two.startswith("/") else PathExtended(two)
|
|
309
305
|
else: # "strict": # raises an error if exact match is not found.
|
|
310
306
|
index = self.parts.index(str(at))
|
|
311
|
-
one, two = self[0:index], self[index + 1:] # both one and two do not include the split item.
|
|
307
|
+
one, two = self[0:index], self[index + 1 :] # both one and two do not include the split item.
|
|
312
308
|
elif index is not None and at is None: # index is provided
|
|
313
|
-
one, two = self[:index],
|
|
309
|
+
one, two = self[:index], PathExtended(*self.parts[index + 1 :])
|
|
314
310
|
at = self.parts[index] # this is needed below.
|
|
315
|
-
else:
|
|
316
|
-
|
|
317
|
-
|
|
311
|
+
else:
|
|
312
|
+
raise ValueError("Either `index` or `at` can be provided. Both are not allowed simulatanesouly.")
|
|
313
|
+
if sep == 0:
|
|
314
|
+
return one, two # neither of the portions get the sperator appended to it. # ================================ appending `at` to one of the portions
|
|
315
|
+
elif sep == 1:
|
|
316
|
+
return one, PathExtended(at) / two # append it to right portion
|
|
318
317
|
elif sep == -1:
|
|
319
318
|
return one / at, two # append it to left portion.
|
|
320
|
-
else:
|
|
319
|
+
else:
|
|
320
|
+
raise ValueError(f"`sep` should take a value from the set [-1, 0, 1] but got {sep}")
|
|
321
|
+
|
|
321
322
|
def __repr__(self): # this is useful only for the console
|
|
322
323
|
if self.is_symlink():
|
|
323
|
-
try:
|
|
324
|
-
|
|
324
|
+
try:
|
|
325
|
+
target = self.resolve() # broken symolinks are funny, and almost always fail `resolve` method.
|
|
326
|
+
except Exception:
|
|
327
|
+
target = "BROKEN LINK " + str(self) # avoid infinite recursions for broken links.
|
|
325
328
|
return "🔗 Symlink '" + str(self) + "' ==> " + (str(target) if target == self else str(target))
|
|
326
|
-
elif self.is_absolute():
|
|
327
|
-
|
|
328
|
-
|
|
329
|
+
elif self.is_absolute():
|
|
330
|
+
return self._type() + " '" + str(self.clickable()) + "'" + (" | " + datetime.fromtimestamp(self.stat().st_ctime).isoformat()[:-7].replace("T", " ") if self.exists() else "") + (f" | {self.size()} Mb" if self.is_file() else "")
|
|
331
|
+
elif "http" in str(self):
|
|
332
|
+
return "🕸️ URL " + str(self.as_url_str())
|
|
333
|
+
else:
|
|
334
|
+
return "📍 Relative " + "'" + str(self) + "'" # not much can be said about a relative path.
|
|
335
|
+
|
|
329
336
|
# def to_str(self) -> str: return str(self)
|
|
330
|
-
def size(self, units: Literal[
|
|
337
|
+
def size(self, units: Literal["b", "kb", "mb", "gb"] = "mb") -> float: # ===================================== File Specs ==========================================================================================
|
|
331
338
|
total_size = self.stat().st_size if self.is_file() else sum([item.stat().st_size for item in self.rglob("*") if item.is_file()])
|
|
332
339
|
tmp: int
|
|
333
340
|
match units:
|
|
334
|
-
case "b":
|
|
335
|
-
|
|
336
|
-
case "
|
|
337
|
-
|
|
341
|
+
case "b":
|
|
342
|
+
tmp = 1024**0
|
|
343
|
+
case "kb":
|
|
344
|
+
tmp = 1024**1
|
|
345
|
+
case "mb":
|
|
346
|
+
tmp = 1024**2
|
|
347
|
+
case "gb":
|
|
348
|
+
tmp = 1024**3
|
|
338
349
|
return round(number=total_size / tmp, ndigits=1)
|
|
339
|
-
def time(self, which: Literal["m", "c", "a"] = "m", **kwargs: Any):
|
|
340
|
-
"""* `m`: last mofidication of content, i.e. the time it was created.
|
|
341
|
-
* `c`: last status change (its inode is changed, permissions, path, but not content)
|
|
342
|
-
* `a`: last access (read)
|
|
343
|
-
"""
|
|
344
|
-
match which:
|
|
345
|
-
case "m": tmp = self.stat().st_mtime
|
|
346
|
-
case "a": tmp = self.stat().st_atime
|
|
347
|
-
case "c": tmp = self.stat().st_ctime
|
|
348
|
-
return datetime.fromtimestamp(tmp, **kwargs)
|
|
349
350
|
|
|
350
351
|
# ================================ String Nature management ====================================
|
|
351
|
-
def clickable(self
|
|
352
|
-
|
|
352
|
+
def clickable(self) -> "PathExtended":
|
|
353
|
+
return PathExtended(self.expanduser().resolve().as_uri())
|
|
354
|
+
|
|
355
|
+
def as_url_str(self) -> "str":
|
|
356
|
+
return self.as_posix().replace("https:/", "https://").replace("http:/", "http://")
|
|
357
|
+
|
|
353
358
|
def as_zip_path(self):
|
|
354
359
|
import zipfile
|
|
360
|
+
|
|
355
361
|
res = self.expanduser().resolve()
|
|
356
362
|
return zipfile.Path(res) # .str.split(".zip") tmp=res[1]+(".zip" if len(res) > 2 else ""); root=res[0]+".zip", at=P(tmp).as_posix()) # TODO
|
|
363
|
+
|
|
357
364
|
# ========================== override =======================================
|
|
358
|
-
def __setitem__(self, key: Union[
|
|
359
|
-
fullparts, new = list(self.parts), list(
|
|
365
|
+
def __setitem__(self, key: Union["str", int, slice], value: PLike):
|
|
366
|
+
fullparts, new = list(self.parts), list(PathExtended(value).parts)
|
|
360
367
|
if type(key) is str:
|
|
361
368
|
idx = fullparts.index(key)
|
|
362
369
|
fullparts.remove(key)
|
|
363
|
-
fullparts = fullparts[:idx] + new + fullparts[idx + 1:]
|
|
364
|
-
elif type(key) is int:
|
|
365
|
-
|
|
366
|
-
|
|
370
|
+
fullparts = fullparts[:idx] + new + fullparts[idx + 1 :]
|
|
371
|
+
elif type(key) is int:
|
|
372
|
+
fullparts = fullparts[:key] + new + fullparts[key + 1 :]
|
|
373
|
+
elif type(key) is slice:
|
|
374
|
+
fullparts = fullparts[: (0 if key.start is None else key.start)] + new + fullparts[(len(fullparts) if key.stop is None else key.stop) :]
|
|
375
|
+
self._str = str(PathExtended(*fullparts)) # pylint: disable=W0201 # similar attributes: # self._parts # self._pparts # self._cparts # self._cached_cparts
|
|
367
376
|
|
|
368
377
|
def _type(self):
|
|
369
378
|
if self.absolute():
|
|
370
|
-
if self.is_file():
|
|
371
|
-
|
|
379
|
+
if self.is_file():
|
|
380
|
+
return "📄"
|
|
381
|
+
elif self.is_dir():
|
|
382
|
+
return "📁"
|
|
372
383
|
return "👻NotExist"
|
|
373
384
|
return "📍Relative"
|
|
385
|
+
|
|
374
386
|
def symlink_to(self, target: PLike, verbose: bool = True, overwrite: bool = False, orig: bool = False, strict: bool = True): # type: ignore[override] # pylint: disable=W0237
|
|
375
387
|
self.parent.mkdir(parents=True, exist_ok=True)
|
|
376
|
-
target_obj =
|
|
377
|
-
if strict:
|
|
378
|
-
|
|
388
|
+
target_obj = PathExtended(target).expanduser().resolve()
|
|
389
|
+
if strict:
|
|
390
|
+
assert target_obj.exists(), f"Target path `{target}` (aka `{target_obj}`) doesn't exist. This will create a broken link."
|
|
391
|
+
if overwrite and (self.is_symlink() or self.exists()):
|
|
392
|
+
self.delete(sure=True, verbose=verbose)
|
|
379
393
|
from machineconfig.utils.terminal import Terminal
|
|
394
|
+
|
|
380
395
|
if system() == "Windows" and not Terminal.is_user_admin(): # you cannot create symlink without priviliages.
|
|
381
396
|
import win32com.shell.shell
|
|
382
|
-
_proce_info = win32com.shell.shell.ShellExecuteEx(lpVerb=
|
|
397
|
+
_proce_info = win32com.shell.shell.ShellExecuteEx(lpVerb="runas", lpFile=sys.executable, lpParameters=f" -c \"from pathlib import Path; Path(r'{self.expanduser()}').symlink_to(r'{str(target_obj)}')\"")
|
|
383
398
|
# TODO update PATH for this to take effect immediately.
|
|
384
399
|
time.sleep(1) # wait=True equivalent
|
|
385
|
-
else:
|
|
386
|
-
|
|
400
|
+
else:
|
|
401
|
+
super(PathExtended, self.expanduser()).symlink_to(str(target_obj))
|
|
402
|
+
if verbose:
|
|
403
|
+
try:
|
|
404
|
+
print(f"LINKED {repr(self)} ➡️ {repr(target_obj)}")
|
|
405
|
+
except UnicodeEncodeError:
|
|
406
|
+
print("P._return warning: UnicodeEncodeError, could not print message.")
|
|
407
|
+
return self if orig else target_obj
|
|
408
|
+
|
|
387
409
|
def resolve(self, strict: bool = False):
|
|
388
|
-
try:
|
|
389
|
-
|
|
410
|
+
try:
|
|
411
|
+
return super(PathExtended, self).resolve(strict=strict)
|
|
412
|
+
except OSError:
|
|
413
|
+
return self
|
|
414
|
+
|
|
390
415
|
# ======================================== Folder management =======================================
|
|
391
|
-
def search(
|
|
392
|
-
|
|
416
|
+
def search(
|
|
417
|
+
self,
|
|
418
|
+
pattern: str = "*",
|
|
419
|
+
r: bool = False,
|
|
420
|
+
files: bool = True,
|
|
421
|
+
folders: bool = True,
|
|
422
|
+
compressed: bool = False,
|
|
423
|
+
dotfiles: bool = False,
|
|
424
|
+
filters_total: Optional[list[Callable[[Any], bool]]] = None,
|
|
425
|
+
not_in: Optional[list[str]] = None,
|
|
426
|
+
exts: Optional[list[str]] = None,
|
|
427
|
+
win_order: bool = False,
|
|
428
|
+
) -> list["PathExtended"]:
|
|
393
429
|
if isinstance(not_in, list):
|
|
394
430
|
filters_notin = [lambda x: all([str(a_not_in) not in str(x) for a_not_in in not_in])] # type: ignore
|
|
395
|
-
else:
|
|
431
|
+
else:
|
|
432
|
+
filters_notin = []
|
|
396
433
|
if isinstance(exts, list):
|
|
397
434
|
filters_extension = [lambda x: any([ext in x.name for ext in exts])] # type: ignore
|
|
398
|
-
else:
|
|
435
|
+
else:
|
|
436
|
+
filters_extension = []
|
|
399
437
|
filters_total = (filters_total or []) + filters_notin + filters_extension
|
|
400
|
-
if not files:
|
|
401
|
-
|
|
438
|
+
if not files:
|
|
439
|
+
filters_total.append(lambda x: x.is_dir())
|
|
440
|
+
if not folders:
|
|
441
|
+
filters_total.append(lambda x: x.is_file())
|
|
402
442
|
slf = self.expanduser().resolve()
|
|
403
443
|
if ".zip" in str(slf) and compressed: # the root (self) is itself a zip archive (as opposed to some search results are zip archives)
|
|
404
444
|
import zipfile
|
|
@@ -412,72 +452,140 @@ class P(type(Path()), Path): # type: ignore # pylint: disable=E0241
|
|
|
412
452
|
res1 = [item for item in raw if fnmatch.fnmatch(item.at, pattern)]
|
|
413
453
|
# return res1.filter(lambda x: (folders or x.is_file()) and (files or x.is_dir()))
|
|
414
454
|
return [item for item in res1 if (folders or item.is_file()) and (files or item.is_dir())] # type: ignore
|
|
415
|
-
elif dotfiles:
|
|
455
|
+
elif dotfiles:
|
|
456
|
+
raw = slf.glob(pattern) if not r else self.rglob(pattern)
|
|
416
457
|
else:
|
|
417
458
|
from glob import glob
|
|
459
|
+
|
|
418
460
|
if r:
|
|
419
461
|
raw = glob(str(slf / "**" / pattern), recursive=r)
|
|
420
462
|
else:
|
|
421
463
|
raw = glob(str(slf.joinpath(pattern))) # glob ignroes dot and hidden files
|
|
422
464
|
if ".zip" not in str(slf) and compressed:
|
|
423
|
-
filters_notin = [
|
|
465
|
+
filters_notin = [
|
|
466
|
+
PathExtended(comp_file).search(pattern=pattern, r=r, files=files, folders=folders, compressed=True, dotfiles=dotfiles, filters_total=filters_total, not_in=not_in, win_order=win_order) for comp_file in self.search("*.zip", r=r)
|
|
467
|
+
]
|
|
424
468
|
from functools import reduce
|
|
469
|
+
|
|
425
470
|
# haha = List(filters_notin).reduce(func=lambda x, y: x + y)
|
|
426
471
|
haha = reduce(lambda x, y: x + y, filters_notin) if len(filters_notin) else []
|
|
427
472
|
raw = raw + haha # type: ignore
|
|
428
473
|
processed = []
|
|
429
474
|
for item in raw:
|
|
430
|
-
item_ =
|
|
475
|
+
item_ = PathExtended(item)
|
|
431
476
|
if all([afilter(item_) for afilter in filters_total]):
|
|
432
477
|
processed.append(item_)
|
|
433
|
-
if not win_order:
|
|
478
|
+
if not win_order:
|
|
479
|
+
return list(processed)
|
|
434
480
|
import re
|
|
435
|
-
|
|
481
|
+
|
|
482
|
+
processed.sort(key=lambda x: [int(k) if k.isdigit() else k for k in re.split("([0-9]+)", string=x.stem)])
|
|
436
483
|
return list(processed)
|
|
484
|
+
|
|
437
485
|
@staticmethod
|
|
438
|
-
def tmpdir(prefix: str = "") ->
|
|
439
|
-
return
|
|
486
|
+
def tmpdir(prefix: str = "") -> "PathExtended":
|
|
487
|
+
return PathExtended.tmp(folder=rf"tmp_dirs/{prefix + ('_' if prefix != '' else '') + randstr()}")
|
|
488
|
+
|
|
440
489
|
@staticmethod
|
|
441
|
-
def tmpfile(name: Optional[str]= None, suffix: str = "", folder: OPLike = None, tstamp: bool = False, noun: bool = False) ->
|
|
490
|
+
def tmpfile(name: Optional[str] = None, suffix: str = "", folder: OPLike = None, tstamp: bool = False, noun: bool = False) -> "PathExtended":
|
|
442
491
|
name_concrete = name or randstr(noun=noun)
|
|
443
|
-
return
|
|
492
|
+
return PathExtended.tmp(file=name_concrete + "_" + randstr() + (("_" + str(timestamp())) if tstamp else "") + suffix, folder=folder or "tmp_files")
|
|
493
|
+
|
|
444
494
|
@staticmethod
|
|
445
|
-
def tmp(folder: OPLike = None, file: Optional[str] = None, root: str = "~/tmp_results") ->
|
|
446
|
-
base =
|
|
495
|
+
def tmp(folder: OPLike = None, file: Optional[str] = None, root: str = "~/tmp_results") -> "PathExtended":
|
|
496
|
+
base = PathExtended(root).expanduser().joinpath(folder or "").joinpath(file or "")
|
|
447
497
|
target_path = base.parent if file else base
|
|
448
498
|
target_path.mkdir(parents=True, exist_ok=True)
|
|
449
499
|
return base
|
|
500
|
+
|
|
450
501
|
# ====================================== Compression & Encryption ===========================================
|
|
451
|
-
def zip(
|
|
452
|
-
|
|
502
|
+
def zip(
|
|
503
|
+
self,
|
|
504
|
+
path: OPLike = None,
|
|
505
|
+
folder: OPLike = None,
|
|
506
|
+
name: Optional[str] = None,
|
|
507
|
+
arcname: Optional[str] = None,
|
|
508
|
+
inplace: bool = False,
|
|
509
|
+
verbose: bool = True,
|
|
510
|
+
content: bool = False,
|
|
511
|
+
orig: bool = False,
|
|
512
|
+
pwd: Optional[str] = None,
|
|
513
|
+
mode: FILE_MODE = "w",
|
|
514
|
+
**kwargs: Any,
|
|
515
|
+
) -> "PathExtended":
|
|
453
516
|
path_resolved, slf = self._resolve_path(folder, name, path, self.name).expanduser().resolve(), self.expanduser().resolve()
|
|
454
517
|
# if use_7z: # benefits over regular zip and encrypt: can handle very large files with low memory footprint
|
|
455
518
|
# path_resolved = path_resolved + '.7z' if not path_resolved.suffix == '.7z' else path_resolved
|
|
456
519
|
# with install_n_import("py7zr").SevenZipFile(file=path_resolved, mode=mode, password=pwd) as archive: archive.writeall(path=str(slf), arcname=None)
|
|
457
|
-
arcname_obj =
|
|
458
|
-
if arcname_obj.name != slf.name:
|
|
520
|
+
arcname_obj = PathExtended(arcname or slf.name)
|
|
521
|
+
if arcname_obj.name != slf.name:
|
|
522
|
+
arcname_obj /= slf.name # arcname has to start from somewhere and end with filename
|
|
459
523
|
if slf.is_file():
|
|
460
|
-
|
|
524
|
+
import zipfile
|
|
525
|
+
|
|
526
|
+
op_zip = str(path_resolved + ".zip" if path_resolved.suffix != ".zip" else path_resolved)
|
|
527
|
+
with zipfile.ZipFile(op_zip, mode=mode) as jungle_zip:
|
|
528
|
+
jungle_zip.write(filename=str(slf), arcname=str(arcname_obj), compress_type=zipfile.ZIP_DEFLATED, **kwargs)
|
|
529
|
+
path_resolved = PathExtended(op_zip)
|
|
461
530
|
else:
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
531
|
+
import shutil
|
|
532
|
+
|
|
533
|
+
if content:
|
|
534
|
+
root_dir, base_dir = slf, "."
|
|
535
|
+
else:
|
|
536
|
+
root_dir, base_dir = slf.split(at=str(arcname_obj[0]), sep=1)[0], str(arcname_obj)
|
|
537
|
+
base_name = str(path_resolved)[:-4] if str(path_resolved).endswith(".zip") else str(path_resolved)
|
|
538
|
+
op_zip = shutil.make_archive(base_name=base_name, format="zip", root_dir=str(root_dir), base_dir=str(base_dir), verbose=False, **kwargs)
|
|
539
|
+
path_resolved = PathExtended(op_zip)
|
|
540
|
+
msg = f"ZIPPED {repr(slf)} ==> {repr(path)}"
|
|
541
|
+
res_out = PathExtended(path_resolved)
|
|
542
|
+
ret = self if orig else res_out
|
|
543
|
+
delayed_msg = ""
|
|
544
|
+
if inplace:
|
|
545
|
+
self.delete(sure=True, verbose=False)
|
|
546
|
+
delayed_msg = f"DELETED 🗑️❌ {repr(self)}."
|
|
547
|
+
if verbose:
|
|
548
|
+
try:
|
|
549
|
+
print(msg)
|
|
550
|
+
except UnicodeEncodeError:
|
|
551
|
+
print("P._return warning: UnicodeEncodeError, could not print message.")
|
|
552
|
+
if verbose and delayed_msg != "":
|
|
553
|
+
try:
|
|
554
|
+
print(delayed_msg)
|
|
555
|
+
except UnicodeEncodeError:
|
|
556
|
+
print("P._return warning: UnicodeEncodeError, could not print message.")
|
|
557
|
+
return ret
|
|
558
|
+
|
|
559
|
+
def unzip(
|
|
560
|
+
self,
|
|
561
|
+
folder: OPLike = None,
|
|
562
|
+
path: OPLike = None,
|
|
563
|
+
name: Optional[str] = None,
|
|
564
|
+
verbose: bool = True,
|
|
565
|
+
content: bool = False,
|
|
566
|
+
inplace: bool = False,
|
|
567
|
+
overwrite: bool = False,
|
|
568
|
+
orig: bool = False,
|
|
569
|
+
pwd: Optional[str] = None,
|
|
570
|
+
tmp: bool = False,
|
|
571
|
+
pattern: Optional[str] = None,
|
|
572
|
+
merge: bool = False,
|
|
573
|
+
) -> "PathExtended":
|
|
468
574
|
assert merge is False, "I have not implemented this yet"
|
|
469
575
|
assert path is None, "I have not implemented this yet"
|
|
470
|
-
if tmp:
|
|
576
|
+
if tmp:
|
|
577
|
+
return self.unzip(folder=PathExtended.tmp().joinpath("tmp_unzips").joinpath(randstr()), content=True).joinpath(self.stem)
|
|
471
578
|
slf = zipfile__ = self.expanduser().resolve()
|
|
472
579
|
if any(ztype in str(slf.parent) for ztype in (".zip", ".7z")): # path include a zip archive in the middle.
|
|
473
580
|
tmp__ = [item for item in (".zip", ".7z", "") if item in str(slf)]
|
|
474
581
|
ztype = tmp__[0]
|
|
475
|
-
if ztype == "":
|
|
582
|
+
if ztype == "":
|
|
583
|
+
return slf
|
|
476
584
|
# zipfile__, name__ = slf.split(at=str(List(slf.parts).filter(lambda x: ztype in x)[0]), sep=-1)
|
|
477
585
|
zipfile__, name__ = slf.split(at=str(next(item for item in slf.parts if ztype in item)), sep=-1)
|
|
478
586
|
name = str(name__)
|
|
479
|
-
folder = (zipfile__.parent / zipfile__.stem) if folder is None else
|
|
480
|
-
assert isinstance(folder,
|
|
587
|
+
folder = (zipfile__.parent / zipfile__.stem) if folder is None else PathExtended(folder).expanduser().absolute().resolve().joinpath(zipfile__.stem)
|
|
588
|
+
assert isinstance(folder, PathExtended), "folder should be a P object at this point"
|
|
481
589
|
folder = folder if not content else folder.parent
|
|
482
590
|
if slf.suffix == ".7z":
|
|
483
591
|
raise NotImplementedError("I have not implemented this yet")
|
|
@@ -492,80 +600,231 @@ class P(type(Path()), Path): # type: ignore # pylint: disable=E0241
|
|
|
492
600
|
# else: archive.extractall(path=folder)
|
|
493
601
|
else:
|
|
494
602
|
if overwrite:
|
|
495
|
-
if not content:
|
|
603
|
+
if not content:
|
|
604
|
+
PathExtended(folder).joinpath(name or "").delete(sure=True, verbose=True) # deletes a specific file / folder that has the same name as the zip file without extension.
|
|
496
605
|
else:
|
|
497
606
|
import zipfile
|
|
498
|
-
|
|
607
|
+
|
|
608
|
+
mylist = [x for x in zipfile.ZipFile(str(self)).namelist() if "/" not in x or (len(x.split("/")) == 2 and x.endswith("/"))]
|
|
499
609
|
# List().apply(lambda item: P(folder).joinpath(name or "", item.replace("/", "")).delete(sure=True, verbose=True))
|
|
500
|
-
for item in mylist:
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
610
|
+
for item in mylist:
|
|
611
|
+
PathExtended(folder).joinpath(name or "", item.replace("/", "")).delete(sure=True, verbose=True)
|
|
612
|
+
import zipfile
|
|
613
|
+
|
|
614
|
+
target_name = None if name is None else PathExtended(name).as_posix()
|
|
615
|
+
with zipfile.ZipFile(str(zipfile__), "r") as zipObj:
|
|
616
|
+
if target_name is None:
|
|
617
|
+
zipObj.extractall(str(folder))
|
|
618
|
+
result = Path(str(folder))
|
|
619
|
+
else:
|
|
620
|
+
zipObj.extract(member=str(target_name), path=str(folder))
|
|
621
|
+
result = Path(str(folder)) / target_name
|
|
622
|
+
res_path = PathExtended(result)
|
|
623
|
+
msg = f"UNZIPPED {repr(zipfile__)} ==> {repr(result)}"
|
|
624
|
+
ret = self if orig else PathExtended(res_path)
|
|
625
|
+
delayed_msg = ""
|
|
626
|
+
if inplace:
|
|
627
|
+
self.delete(sure=True, verbose=False)
|
|
628
|
+
delayed_msg = f"DELETED 🗑️❌ {repr(self)}."
|
|
629
|
+
if verbose:
|
|
630
|
+
try:
|
|
631
|
+
print(msg)
|
|
632
|
+
except UnicodeEncodeError:
|
|
633
|
+
print("P._return warning: UnicodeEncodeError, could not print message.")
|
|
634
|
+
if verbose and delayed_msg != "":
|
|
635
|
+
try:
|
|
636
|
+
print(delayed_msg)
|
|
637
|
+
except UnicodeEncodeError:
|
|
638
|
+
print("P._return warning: UnicodeEncodeError, could not print message.")
|
|
639
|
+
return ret
|
|
640
|
+
|
|
641
|
+
def untar(self, folder: OPLike = None, name: Optional[str] = None, path: OPLike = None, inplace: bool = False, orig: bool = False, verbose: bool = True) -> "PathExtended":
|
|
505
642
|
op_path = self._resolve_path(folder, name, path, self.name.replace(".tar", "")).expanduser().resolve()
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
643
|
+
import tarfile
|
|
644
|
+
|
|
645
|
+
with tarfile.open(str(self.expanduser().resolve()), "r") as tf:
|
|
646
|
+
tf.extractall(path=str(op_path))
|
|
647
|
+
msg = f"UNTARRED {repr(self)} ==> {repr(op_path)}"
|
|
648
|
+
ret = self if orig else PathExtended(op_path)
|
|
649
|
+
delayed_msg = ""
|
|
650
|
+
if inplace:
|
|
651
|
+
self.delete(sure=True, verbose=False)
|
|
652
|
+
delayed_msg = f"DELETED 🗑️❌ {repr(self)}."
|
|
653
|
+
if verbose:
|
|
654
|
+
try:
|
|
655
|
+
print(msg)
|
|
656
|
+
except UnicodeEncodeError:
|
|
657
|
+
print("P._return warning: UnicodeEncodeError, could not print message.")
|
|
658
|
+
if verbose and delayed_msg != "":
|
|
659
|
+
try:
|
|
660
|
+
print(delayed_msg)
|
|
661
|
+
except UnicodeEncodeError:
|
|
662
|
+
print("P._return warning: UnicodeEncodeError, could not print message.")
|
|
663
|
+
return ret
|
|
664
|
+
|
|
665
|
+
def ungz(self, folder: OPLike = None, name: Optional[str] = None, path: OPLike = None, inplace: bool = False, orig: bool = False, verbose: bool = True) -> "PathExtended":
|
|
509
666
|
op_path = self._resolve_path(folder, name, path, self.name.replace(".gz", "")).expanduser().resolve()
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
667
|
+
import gzip
|
|
668
|
+
PathExtended(str(op_path)).write_bytes(gzip.decompress(PathExtended(str(self.expanduser().resolve())).read_bytes()))
|
|
669
|
+
msg = f"UNGZED {repr(self)} ==> {repr(op_path)}"
|
|
670
|
+
ret = self if orig else PathExtended(op_path)
|
|
671
|
+
delayed_msg = ""
|
|
672
|
+
if inplace:
|
|
673
|
+
self.delete(sure=True, verbose=False)
|
|
674
|
+
delayed_msg = f"DELETED 🗑️❌ {repr(self)}."
|
|
675
|
+
if verbose:
|
|
676
|
+
try:
|
|
677
|
+
print(msg)
|
|
678
|
+
except UnicodeEncodeError:
|
|
679
|
+
print("P._return warning: UnicodeEncodeError, could not print message.")
|
|
680
|
+
if verbose and delayed_msg != "":
|
|
681
|
+
try:
|
|
682
|
+
print(delayed_msg)
|
|
683
|
+
except UnicodeEncodeError:
|
|
684
|
+
print("P._return warning: UnicodeEncodeError, could not print message.")
|
|
685
|
+
return ret
|
|
686
|
+
|
|
687
|
+
def unxz(self, folder: OPLike = None, name: Optional[str] = None, path: OPLike = None, inplace: bool = False, orig: bool = False, verbose: bool = True) -> "PathExtended":
|
|
513
688
|
op_path = self._resolve_path(folder, name, path, self.name.replace(".xz", "")).expanduser().resolve()
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
689
|
+
import lzma
|
|
690
|
+
|
|
691
|
+
PathExtended(str(op_path)).write_bytes(lzma.decompress(PathExtended(str(self.expanduser().resolve())).read_bytes()))
|
|
692
|
+
msg = f"UNXZED {repr(self)} ==> {repr(op_path)}"
|
|
693
|
+
ret = self if orig else PathExtended(op_path)
|
|
694
|
+
delayed_msg = ""
|
|
695
|
+
if inplace:
|
|
696
|
+
self.delete(sure=True, verbose=False)
|
|
697
|
+
delayed_msg = f"DELETED 🗑️❌ {repr(self)}."
|
|
698
|
+
if verbose:
|
|
699
|
+
try:
|
|
700
|
+
print(msg)
|
|
701
|
+
except UnicodeEncodeError:
|
|
702
|
+
print("P._return warning: UnicodeEncodeError, could not print message.")
|
|
703
|
+
if verbose and delayed_msg != "":
|
|
704
|
+
try:
|
|
705
|
+
print(delayed_msg)
|
|
706
|
+
except UnicodeEncodeError:
|
|
707
|
+
print("P._return warning: UnicodeEncodeError, could not print message.")
|
|
708
|
+
return ret
|
|
709
|
+
|
|
710
|
+
def unbz(self, folder: OPLike = None, name: Optional[str] = None, path: OPLike = None, inplace: bool = False, orig: bool = False, verbose: bool = True) -> "PathExtended":
|
|
517
711
|
op_path = self._resolve_path(folder=folder, name=name, path=path, default_name=self.name.replace(".bz", "").replace(".tbz", ".tar")).expanduser().resolve()
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
712
|
+
import bz2
|
|
713
|
+
|
|
714
|
+
PathExtended(str(op_path)).write_bytes(bz2.decompress(PathExtended(str(self.expanduser().resolve())).read_bytes()))
|
|
715
|
+
msg = f"UNBZED {repr(self)} ==> {repr(op_path)}"
|
|
716
|
+
ret = self if orig else PathExtended(op_path)
|
|
717
|
+
delayed_msg = ""
|
|
718
|
+
if inplace:
|
|
719
|
+
self.delete(sure=True, verbose=False)
|
|
720
|
+
delayed_msg = f"DELETED 🗑️❌ {repr(self)}."
|
|
721
|
+
if verbose:
|
|
722
|
+
try:
|
|
723
|
+
print(msg)
|
|
724
|
+
except UnicodeEncodeError:
|
|
725
|
+
print("P._return warning: UnicodeEncodeError, could not print message.")
|
|
726
|
+
if verbose and delayed_msg != "":
|
|
727
|
+
try:
|
|
728
|
+
print(delayed_msg)
|
|
729
|
+
except UnicodeEncodeError:
|
|
730
|
+
print("P._return warning: UnicodeEncodeError, could not print message.")
|
|
731
|
+
return ret
|
|
732
|
+
|
|
733
|
+
def decompress(self, folder: OPLike = None, name: Optional[str] = None, path: OPLike = None, inplace: bool = False, orig: bool = False, verbose: bool = True) -> "PathExtended":
|
|
521
734
|
if ".tar.gz" in str(self) or ".tgz" in str(self):
|
|
522
735
|
# res = self.ungz_untar(folder=folder, path=path, name=name, inplace=inplace, verbose=verbose, orig=orig)
|
|
523
736
|
return self.ungz(name=f"tmp_{randstr()}.tar", inplace=inplace).untar(folder=folder, name=name, path=path, inplace=True, orig=orig, verbose=verbose) # this works for .tgz suffix as well as .tar.gz
|
|
524
|
-
elif ".gz" in str(self):
|
|
737
|
+
elif ".gz" in str(self):
|
|
738
|
+
res = self.ungz(folder=folder, path=path, name=name, inplace=inplace, verbose=verbose, orig=orig)
|
|
525
739
|
elif ".tar.bz" in str(self) or "tbz" in str(self):
|
|
526
740
|
res = self.unbz(name=f"tmp_{randstr()}.tar", inplace=inplace)
|
|
527
741
|
return res.untar(folder=folder, name=name, path=path, inplace=True, orig=orig, verbose=verbose)
|
|
528
742
|
elif ".tar.xz" in str(self):
|
|
529
743
|
# res = self.unxz_untar(folder=folder, path=path, name=name, inplace=inplace, verbose=verbose, orig=orig)
|
|
530
744
|
res = self.unxz(inplace=inplace).untar(folder=folder, name=name, path=path, inplace=True, orig=orig, verbose=verbose)
|
|
531
|
-
elif ".zip" in str(self):
|
|
532
|
-
|
|
745
|
+
elif ".zip" in str(self):
|
|
746
|
+
res = self.unzip(folder=folder, path=path, name=name, inplace=inplace, verbose=verbose, orig=orig)
|
|
747
|
+
else:
|
|
748
|
+
res = self
|
|
533
749
|
return res
|
|
534
|
-
|
|
535
|
-
|
|
750
|
+
|
|
751
|
+
def encrypt(
|
|
752
|
+
self, key: Optional[bytes] = None, pwd: Optional[str] = None, folder: OPLike = None, name: Optional[str] = None, path: OPLike = None, verbose: bool = True, suffix: str = ".enc", inplace: bool = False, orig: bool = False
|
|
753
|
+
) -> "PathExtended":
|
|
536
754
|
# see: https://stackoverflow.com/questions/42568262/how-to-encrypt-text-with-a-password-in-python & https://stackoverflow.com/questions/2490334/simple-way-to-encode-a-string-according-to-a-password"""
|
|
537
755
|
slf = self.expanduser().resolve()
|
|
538
756
|
path = self._resolve_path(folder, name, path, slf.name + suffix)
|
|
539
757
|
assert slf.is_file(), f"Cannot encrypt a directory. You might want to try `zip_n_encrypt`. {self}"
|
|
540
758
|
path.write_bytes(encrypt(msg=slf.read_bytes(), key=key, pwd=pwd))
|
|
541
|
-
|
|
542
|
-
|
|
759
|
+
msg = f"🔒🔑 ENCRYPTED: {repr(slf)} ==> {repr(path)}."
|
|
760
|
+
ret = self if orig else PathExtended(path)
|
|
761
|
+
delayed_msg = ""
|
|
762
|
+
if inplace:
|
|
763
|
+
self.delete(sure=True, verbose=False)
|
|
764
|
+
delayed_msg = f"DELETED 🗑️❌ {repr(self)}."
|
|
765
|
+
if verbose:
|
|
766
|
+
try:
|
|
767
|
+
print(msg)
|
|
768
|
+
except UnicodeEncodeError:
|
|
769
|
+
print("P._return warning: UnicodeEncodeError, could not print message.")
|
|
770
|
+
if verbose and delayed_msg != "":
|
|
771
|
+
try:
|
|
772
|
+
print(delayed_msg)
|
|
773
|
+
except UnicodeEncodeError:
|
|
774
|
+
print("P._return warning: UnicodeEncodeError, could not print message.")
|
|
775
|
+
return ret
|
|
776
|
+
|
|
777
|
+
def decrypt(self, key: Optional[bytes] = None, pwd: Optional[str] = None, path: OPLike = None, folder: OPLike = None, name: Optional[str] = None, verbose: bool = True, suffix: str = ".enc", inplace: bool = False) -> "PathExtended":
|
|
543
778
|
slf = self.expanduser().resolve()
|
|
544
779
|
path = self._resolve_path(folder=folder, name=name, path=path, default_name=slf.name.replace(suffix, "") if suffix in slf.name else "decrypted_" + slf.name)
|
|
545
780
|
path.write_bytes(decrypt(token=slf.read_bytes(), key=key, pwd=pwd))
|
|
546
|
-
|
|
547
|
-
|
|
781
|
+
msg = f"🔓🔑 DECRYPTED: {repr(slf)} ==> {repr(path)}."
|
|
782
|
+
ret = PathExtended(path)
|
|
783
|
+
delayed_msg = ""
|
|
784
|
+
if inplace:
|
|
785
|
+
self.delete(sure=True, verbose=False)
|
|
786
|
+
delayed_msg = f"DELETED 🗑️❌ {repr(self)}."
|
|
787
|
+
if verbose:
|
|
788
|
+
try:
|
|
789
|
+
print(msg)
|
|
790
|
+
except UnicodeEncodeError:
|
|
791
|
+
print("P._return warning: UnicodeEncodeError, could not print message.")
|
|
792
|
+
if verbose and delayed_msg != "":
|
|
793
|
+
try:
|
|
794
|
+
print(delayed_msg)
|
|
795
|
+
except UnicodeEncodeError:
|
|
796
|
+
print("P._return warning: UnicodeEncodeError, could not print message.")
|
|
797
|
+
return ret
|
|
798
|
+
|
|
799
|
+
def zip_n_encrypt(self, key: Optional[bytes] = None, pwd: Optional[str] = None, inplace: bool = False, verbose: bool = True, orig: bool = False, content: bool = False) -> "PathExtended":
|
|
548
800
|
return self.zip(inplace=inplace, verbose=verbose, content=content).encrypt(key=key, pwd=pwd, verbose=verbose, inplace=True) if not orig else self
|
|
549
|
-
|
|
550
|
-
def
|
|
801
|
+
|
|
802
|
+
def decrypt_n_unzip(self, key: Optional[bytes] = None, pwd: Optional[str] = None, inplace: bool = False, verbose: bool = True, orig: bool = False) -> "PathExtended":
|
|
803
|
+
return self.decrypt(key=key, pwd=pwd, verbose=verbose, inplace=inplace).unzip(folder=None, inplace=True, content=False) if not orig else self
|
|
804
|
+
|
|
805
|
+
def _resolve_path(self, folder: OPLike, name: Optional[str], path: OPLike, default_name: str, rel2it: bool = False) -> "PathExtended":
|
|
551
806
|
""":param rel2it: `folder` or `path` are relative to `self` as opposed to cwd. This is used when resolving '../dir'"""
|
|
552
807
|
if path is not None:
|
|
553
|
-
path =
|
|
808
|
+
path = PathExtended(self.joinpath(path).resolve() if rel2it else path).expanduser().resolve()
|
|
554
809
|
assert folder is None and name is None, "If `path` is passed, `folder` and `name` cannot be passed."
|
|
555
|
-
assert isinstance(path,
|
|
810
|
+
assert isinstance(path, PathExtended), "path should be a P object at this point"
|
|
556
811
|
assert not path.is_dir(), f"`path` passed is a directory! it must not be that. If this is meant, pass it with `folder` kwarg. `{path}`"
|
|
557
812
|
return path
|
|
558
813
|
name, folder = (default_name if name is None else str(name)), (self.parent if folder is None else folder) # good for edge cases of path with single part. # means same directory, just different name
|
|
559
|
-
return
|
|
814
|
+
return PathExtended(self.joinpath(folder).resolve() if rel2it else folder).expanduser().resolve() / name
|
|
560
815
|
|
|
561
|
-
def get_remote_path(self, root: Optional[str], os_specific: bool = False, rel2home: bool = True, strict: bool = True) ->
|
|
816
|
+
def get_remote_path(self, root: Optional[str], os_specific: bool = False, rel2home: bool = True, strict: bool = True) -> "PathExtended":
|
|
562
817
|
import platform
|
|
563
|
-
|
|
564
|
-
|
|
818
|
+
|
|
819
|
+
tmp1: str = platform.system().lower() if os_specific else "generic_os"
|
|
820
|
+
if not rel2home:
|
|
821
|
+
path = self
|
|
565
822
|
else:
|
|
566
|
-
try:
|
|
823
|
+
try:
|
|
824
|
+
path = self.rel2home()
|
|
567
825
|
except ValueError as ve:
|
|
568
|
-
if strict:
|
|
826
|
+
if strict:
|
|
827
|
+
raise ve
|
|
569
828
|
path = self
|
|
570
829
|
# if obfuscate:
|
|
571
830
|
# msc.obfuscater import obfuscate as obfuscate_func
|
|
@@ -573,14 +832,30 @@ class P(type(Path()), Path): # type: ignore # pylint: disable=E0241
|
|
|
573
832
|
# path = path.with_name(name=name)
|
|
574
833
|
if isinstance(root, str): # the following is to avoid the confusing behaviour of A.joinpath(B) if B is absolute.
|
|
575
834
|
part1 = path.parts[0]
|
|
576
|
-
if part1 == "/":
|
|
577
|
-
|
|
578
|
-
|
|
835
|
+
if part1 == "/":
|
|
836
|
+
sanitized_path = path[1:].as_posix()
|
|
837
|
+
else:
|
|
838
|
+
sanitized_path = path.as_posix()
|
|
839
|
+
return PathExtended(root + "/" + tmp1 + "/" + sanitized_path)
|
|
579
840
|
return tmp1 / path
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
841
|
+
|
|
842
|
+
def to_cloud(
|
|
843
|
+
self,
|
|
844
|
+
cloud: str,
|
|
845
|
+
remotepath: OPLike = None,
|
|
846
|
+
zip: bool = False,
|
|
847
|
+
encrypt: bool = False, # pylint: disable=W0621, W0622
|
|
848
|
+
key: Optional[bytes] = None,
|
|
849
|
+
pwd: Optional[str] = None,
|
|
850
|
+
rel2home: bool = False,
|
|
851
|
+
strict: bool = True,
|
|
852
|
+
# obfuscate: bool = False,
|
|
853
|
+
share: bool = False,
|
|
854
|
+
verbose: bool = True,
|
|
855
|
+
os_specific: bool = False,
|
|
856
|
+
transfers: int = 10,
|
|
857
|
+
root: Optional[str] = "myhome",
|
|
858
|
+
) -> "PathExtended":
|
|
584
859
|
to_del = []
|
|
585
860
|
localpath = self.expanduser().absolute() if not self.exists() else self
|
|
586
861
|
if zip:
|
|
@@ -591,17 +866,22 @@ class P(type(Path()), Path): # type: ignore # pylint: disable=E0241
|
|
|
591
866
|
to_del.append(localpath)
|
|
592
867
|
if remotepath is None:
|
|
593
868
|
rp = localpath.get_remote_path(root=root, os_specific=os_specific, rel2home=rel2home, strict=strict) # if rel2home else (P(root) / localpath if root is not None else localpath)
|
|
594
|
-
else:
|
|
595
|
-
|
|
869
|
+
else:
|
|
870
|
+
rp = PathExtended(remotepath)
|
|
871
|
+
rclone_cmd = f"""rclone copyto '{localpath.as_posix()}' '{cloud}:{rp.as_posix()}' {"--progress" if verbose else ""} --transfers={transfers}"""
|
|
596
872
|
from machineconfig.utils.terminal import Terminal
|
|
597
|
-
|
|
873
|
+
|
|
874
|
+
if verbose:
|
|
875
|
+
print(f"{'⬆️' * 5} UPLOADING with `{rclone_cmd}`")
|
|
598
876
|
shell_to_use = "powershell" if sys.platform == "win32" else "bash"
|
|
599
877
|
res = Terminal(stdout=None if verbose else subprocess.PIPE).run(rclone_cmd, shell=shell_to_use).capture()
|
|
600
878
|
_ = [item.delete(sure=True) for item in to_del]
|
|
601
879
|
assert res.is_successful(strict_err=False, strict_returcode=True), res.print(capture=False, desc="Cloud Storage Operation")
|
|
602
|
-
if verbose:
|
|
880
|
+
if verbose:
|
|
881
|
+
print(f"{'⬆️' * 5} UPLOAD COMPLETED.")
|
|
603
882
|
if share:
|
|
604
|
-
if verbose:
|
|
883
|
+
if verbose:
|
|
884
|
+
print("🔗 SHARING FILE")
|
|
605
885
|
shell_to_use = "powershell" if sys.platform == "win32" else "bash"
|
|
606
886
|
res = Terminal().run(f"""rclone link '{cloud}:{rp.as_posix()}'""", shell=shell_to_use).capture()
|
|
607
887
|
tmp = res.op2path(strict_err=False, strict_returncode=False)
|
|
@@ -610,32 +890,53 @@ class P(type(Path()), Path): # type: ignore # pylint: disable=E0241
|
|
|
610
890
|
raise RuntimeError(f"💥 Could not get link for {self}.")
|
|
611
891
|
else:
|
|
612
892
|
res.print_if_unsuccessful(desc="Cloud Storage Operation", strict_err=True, strict_returncode=True)
|
|
613
|
-
link_p:
|
|
893
|
+
link_p: "PathExtended" = PathExtended(str(tmp))
|
|
614
894
|
return link_p
|
|
615
895
|
return self
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
896
|
+
|
|
897
|
+
def from_cloud(
|
|
898
|
+
self,
|
|
899
|
+
cloud: str,
|
|
900
|
+
remotepath: OPLike = None,
|
|
901
|
+
decrypt: bool = False,
|
|
902
|
+
unzip: bool = False, # type: ignore # pylint: disable=W0621
|
|
903
|
+
key: Optional[bytes] = None,
|
|
904
|
+
pwd: Optional[str] = None,
|
|
905
|
+
rel2home: bool = False,
|
|
906
|
+
os_specific: bool = False,
|
|
907
|
+
strict: bool = True,
|
|
908
|
+
transfers: int = 10,
|
|
909
|
+
root: Optional[str] = "myhome",
|
|
910
|
+
verbose: bool = True,
|
|
911
|
+
overwrite: bool = True,
|
|
912
|
+
merge: bool = False,
|
|
913
|
+
):
|
|
619
914
|
if remotepath is None:
|
|
620
915
|
remotepath = self.get_remote_path(root=root, os_specific=os_specific, rel2home=rel2home, strict=strict)
|
|
621
916
|
remotepath += ".zip" if unzip else ""
|
|
622
917
|
remotepath += ".enc" if decrypt else ""
|
|
623
|
-
else:
|
|
918
|
+
else:
|
|
919
|
+
remotepath = PathExtended(remotepath)
|
|
624
920
|
localpath = self.expanduser().absolute()
|
|
625
921
|
localpath += ".zip" if unzip else ""
|
|
626
922
|
localpath += ".enc" if decrypt else ""
|
|
627
|
-
rclone_cmd = f"""rclone copyto '{cloud}:{remotepath.as_posix()}' '{localpath.as_posix()}' {
|
|
923
|
+
rclone_cmd = f"""rclone copyto '{cloud}:{remotepath.as_posix()}' '{localpath.as_posix()}' {"--progress" if verbose else ""} --transfers={transfers}"""
|
|
628
924
|
from machineconfig.utils.terminal import Terminal
|
|
629
|
-
|
|
925
|
+
|
|
926
|
+
if verbose:
|
|
927
|
+
print(f"{'⬇️' * 5} DOWNLOADING with `{rclone_cmd}`")
|
|
630
928
|
shell_to_use = "powershell" if sys.platform == "win32" else "bash"
|
|
631
929
|
res = Terminal(stdout=None if verbose else subprocess.PIPE).run(rclone_cmd, shell=shell_to_use)
|
|
632
930
|
success = res.is_successful(strict_err=False, strict_returcode=True)
|
|
633
931
|
if not success:
|
|
634
932
|
res.print(capture=False, desc="Cloud Storage Operation")
|
|
635
933
|
return None
|
|
636
|
-
if decrypt:
|
|
637
|
-
|
|
934
|
+
if decrypt:
|
|
935
|
+
localpath = localpath.decrypt(key=key, pwd=pwd, inplace=True)
|
|
936
|
+
if unzip:
|
|
937
|
+
localpath = localpath.unzip(inplace=True, verbose=True, overwrite=overwrite, content=True, merge=merge)
|
|
638
938
|
return localpath
|
|
939
|
+
|
|
639
940
|
def sync_to_cloud(self, cloud: str, sync_up: bool = False, sync_down: bool = False, os_specific: bool = False, rel2home: bool = True, transfers: int = 10, delete: bool = False, root: Optional[str] = "myhome", verbose: bool = True):
|
|
640
941
|
tmp_path_obj = self.expanduser().absolute()
|
|
641
942
|
tmp_path_obj.parent.mkdir(parents=True, exist_ok=True)
|
|
@@ -648,9 +949,11 @@ class P(type(Path()), Path): # type: ignore # pylint: disable=E0241
|
|
|
648
949
|
print(f"SYNCING 🔄️ {source} {'>' * 15} {target}`")
|
|
649
950
|
rclone_cmd = f"""rclone sync '{source}' '{target}' """
|
|
650
951
|
rclone_cmd += f" --progress --transfers={transfers} --verbose"
|
|
651
|
-
rclone_cmd +=
|
|
952
|
+
rclone_cmd += " --delete-during" if delete else ""
|
|
652
953
|
from machineconfig.utils.terminal import Terminal
|
|
653
|
-
|
|
954
|
+
|
|
955
|
+
if verbose:
|
|
956
|
+
print(rclone_cmd)
|
|
654
957
|
shell_to_use = "powershell" if sys.platform == "win32" else "bash"
|
|
655
958
|
res = Terminal(stdout=None if verbose else subprocess.PIPE).run(rclone_cmd, shell=shell_to_use)
|
|
656
959
|
success = res.is_successful(strict_err=False, strict_returcode=True)
|