machineconfig 2.0__py3-none-any.whl → 2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of machineconfig might be problematic. Click here for more details.
- machineconfig/cluster/cloud_manager.py +0 -3
- machineconfig/cluster/data_transfer.py +0 -1
- machineconfig/cluster/file_manager.py +0 -1
- machineconfig/cluster/job_params.py +0 -3
- machineconfig/cluster/loader_runner.py +0 -3
- machineconfig/cluster/remote_machine.py +0 -1
- machineconfig/cluster/script_notify_upon_completion.py +0 -1
- machineconfig/cluster/sessions_managers/archive/create_zellij_template.py +3 -5
- machineconfig/cluster/sessions_managers/archive/session_managers.py +0 -1
- machineconfig/cluster/sessions_managers/enhanced_command_runner.py +17 -57
- machineconfig/cluster/sessions_managers/wt_local.py +36 -110
- machineconfig/cluster/sessions_managers/wt_local_manager.py +42 -112
- machineconfig/cluster/sessions_managers/wt_remote.py +23 -30
- machineconfig/cluster/sessions_managers/wt_remote_manager.py +20 -62
- machineconfig/cluster/sessions_managers/wt_utils/layout_generator.py +10 -15
- machineconfig/cluster/sessions_managers/wt_utils/process_monitor.py +27 -127
- machineconfig/cluster/sessions_managers/wt_utils/remote_executor.py +10 -43
- machineconfig/cluster/sessions_managers/wt_utils/session_manager.py +22 -101
- machineconfig/cluster/sessions_managers/wt_utils/status_reporter.py +11 -39
- machineconfig/cluster/sessions_managers/zellij_local.py +49 -102
- machineconfig/cluster/sessions_managers/zellij_local_manager.py +34 -78
- machineconfig/cluster/sessions_managers/zellij_remote.py +17 -24
- machineconfig/cluster/sessions_managers/zellij_remote_manager.py +7 -13
- machineconfig/cluster/sessions_managers/zellij_utils/example_usage.py +4 -2
- machineconfig/cluster/sessions_managers/zellij_utils/layout_generator.py +6 -6
- machineconfig/cluster/sessions_managers/zellij_utils/process_monitor.py +18 -88
- machineconfig/cluster/sessions_managers/zellij_utils/remote_executor.py +2 -6
- machineconfig/cluster/sessions_managers/zellij_utils/session_manager.py +12 -40
- machineconfig/cluster/sessions_managers/zellij_utils/status_reporter.py +3 -2
- machineconfig/cluster/templates/cli_click.py +0 -1
- machineconfig/cluster/templates/cli_gooey.py +0 -2
- machineconfig/cluster/templates/cli_trogon.py +0 -1
- machineconfig/cluster/templates/run_cloud.py +0 -1
- machineconfig/cluster/templates/run_cluster.py +0 -1
- machineconfig/cluster/templates/run_remote.py +0 -1
- machineconfig/cluster/templates/utils.py +26 -10
- machineconfig/jobs/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/jobs/linux/msc/cli_agents.sh +16 -0
- machineconfig/jobs/python/check_installations.py +1 -0
- machineconfig/jobs/python/create_bootable_media.py +0 -2
- machineconfig/jobs/python/python_ve_symlink.py +9 -11
- machineconfig/jobs/python/tasks.py +0 -1
- machineconfig/jobs/python/vscode/api.py +5 -5
- machineconfig/jobs/python/vscode/link_ve.py +13 -14
- machineconfig/jobs/python/vscode/select_interpreter.py +21 -22
- machineconfig/jobs/python/vscode/sync_code.py +9 -13
- machineconfig/jobs/python_custom_installers/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/jobs/python_custom_installers/archive/ngrok.py +13 -13
- machineconfig/jobs/python_custom_installers/dev/aider.py +7 -15
- machineconfig/jobs/python_custom_installers/dev/alacritty.py +9 -18
- machineconfig/jobs/python_custom_installers/dev/brave.py +10 -19
- machineconfig/jobs/python_custom_installers/dev/bypass_paywall.py +8 -15
- machineconfig/jobs/python_custom_installers/dev/code.py +14 -21
- machineconfig/jobs/python_custom_installers/dev/cursor.py +3 -14
- machineconfig/jobs/python_custom_installers/dev/docker_desktop.py +8 -7
- machineconfig/jobs/python_custom_installers/dev/espanso.py +15 -19
- machineconfig/jobs/python_custom_installers/dev/goes.py +5 -12
- machineconfig/jobs/python_custom_installers/dev/lvim.py +9 -17
- machineconfig/jobs/python_custom_installers/dev/nerdfont.py +12 -19
- machineconfig/jobs/python_custom_installers/dev/redis.py +12 -20
- machineconfig/jobs/python_custom_installers/dev/wezterm.py +12 -19
- machineconfig/jobs/python_custom_installers/dev/winget.py +5 -23
- machineconfig/jobs/python_custom_installers/docker.py +12 -21
- machineconfig/jobs/python_custom_installers/gh.py +11 -19
- machineconfig/jobs/python_custom_installers/hx.py +32 -16
- machineconfig/jobs/python_custom_installers/warp-cli.py +12 -20
- machineconfig/jobs/python_generic_installers/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/jobs/python_linux_installers/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/jobs/windows/archive/archive_pygraphviz.ps1 +1 -1
- machineconfig/jobs/windows/msc/cli_agents.bat +0 -0
- machineconfig/jobs/windows/msc/cli_agents.ps1 +0 -0
- machineconfig/jobs/windows/start_terminal.ps1 +1 -1
- machineconfig/profile/create.py +29 -22
- machineconfig/profile/create_hardlinks.py +26 -19
- machineconfig/profile/shell.py +51 -28
- machineconfig/scripts/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/scripts/cloud/init.sh +2 -2
- machineconfig/scripts/linux/checkout_versions +1 -1
- machineconfig/scripts/linux/choose_wezterm_theme +1 -1
- machineconfig/scripts/linux/cloud_copy +1 -1
- machineconfig/scripts/linux/cloud_manager +1 -1
- machineconfig/scripts/linux/cloud_mount +1 -1
- machineconfig/scripts/linux/cloud_repo_sync +1 -1
- machineconfig/scripts/linux/cloud_sync +1 -1
- machineconfig/scripts/linux/croshell +1 -1
- machineconfig/scripts/linux/devops +4 -6
- machineconfig/scripts/linux/fire +1 -1
- machineconfig/scripts/linux/fire_agents +3 -2
- machineconfig/scripts/linux/ftpx +1 -1
- machineconfig/scripts/linux/gh_models +1 -1
- machineconfig/scripts/linux/kill_process +1 -1
- machineconfig/scripts/linux/mcinit +1 -1
- machineconfig/scripts/linux/repos +1 -1
- machineconfig/scripts/linux/scheduler +1 -1
- machineconfig/scripts/linux/start_slidev +1 -1
- machineconfig/scripts/linux/start_terminals +1 -1
- machineconfig/scripts/linux/url2md +1 -1
- machineconfig/scripts/linux/warp-cli.sh +122 -0
- machineconfig/scripts/linux/wifi_conn +1 -1
- machineconfig/scripts/python/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/scripts/python/__pycache__/croshell.cpython-313.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops.cpython-313.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops_devapps_install.cpython-313.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops_update_repos.cpython-313.pyc +0 -0
- machineconfig/scripts/python/__pycache__/fire_jobs.cpython-313.pyc +0 -0
- machineconfig/scripts/python/ai/__init__.py +0 -0
- machineconfig/scripts/python/ai/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/scripts/python/ai/__pycache__/generate_files.cpython-313.pyc +0 -0
- machineconfig/scripts/python/ai/__pycache__/mcinit.cpython-313.pyc +0 -0
- machineconfig/scripts/python/ai/generate_files.py +84 -0
- machineconfig/scripts/python/ai/instructions/python/dev.instructions.md +2 -2
- machineconfig/scripts/python/ai/mcinit.py +7 -3
- machineconfig/scripts/python/ai/scripts/lint_and_type_check.sh +10 -5
- machineconfig/scripts/python/cloud_copy.py +1 -1
- machineconfig/scripts/python/cloud_mount.py +1 -1
- machineconfig/scripts/python/cloud_repo_sync.py +4 -4
- machineconfig/scripts/python/croshell.py +5 -3
- machineconfig/scripts/python/devops_add_identity.py +1 -1
- machineconfig/scripts/python/devops_add_ssh_key.py +1 -1
- machineconfig/scripts/python/devops_backup_retrieve.py +1 -1
- machineconfig/scripts/python/devops_update_repos.py +140 -52
- machineconfig/scripts/python/dotfile.py +1 -1
- machineconfig/scripts/python/fire_agents.py +28 -9
- machineconfig/scripts/python/fire_jobs.py +3 -4
- machineconfig/scripts/python/ftpx.py +2 -1
- machineconfig/scripts/python/helpers/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/scripts/python/helpers/__pycache__/helpers4.cpython-313.pyc +0 -0
- machineconfig/scripts/python/helpers/helpers2.py +2 -2
- machineconfig/scripts/python/helpers/helpers4.py +1 -2
- machineconfig/scripts/python/helpers/repo_sync_helpers.py +1 -1
- machineconfig/scripts/python/mount_nfs.py +1 -1
- machineconfig/scripts/python/mount_ssh.py +1 -1
- machineconfig/scripts/python/repos.py +1 -1
- machineconfig/scripts/python/start_slidev.py +1 -1
- machineconfig/scripts/python/wsl_windows_transfer.py +1 -1
- machineconfig/scripts/windows/checkout_version.ps1 +1 -3
- machineconfig/scripts/windows/choose_wezterm_theme.ps1 +1 -3
- machineconfig/scripts/windows/cloud_copy.ps1 +2 -6
- machineconfig/scripts/windows/cloud_manager.ps1 +1 -1
- machineconfig/scripts/windows/cloud_repo_sync.ps1 +1 -2
- machineconfig/scripts/windows/cloud_sync.ps1 +2 -2
- machineconfig/scripts/windows/croshell.ps1 +2 -2
- machineconfig/scripts/windows/devops.ps1 +1 -4
- machineconfig/scripts/windows/dotfile.ps1 +1 -3
- machineconfig/scripts/windows/fire.ps1 +1 -1
- machineconfig/scripts/windows/ftpx.ps1 +2 -2
- machineconfig/scripts/windows/gpt.ps1 +1 -1
- machineconfig/scripts/windows/kill_process.ps1 +1 -2
- machineconfig/scripts/windows/mcinit.ps1 +1 -1
- machineconfig/scripts/windows/mount_nfs.ps1 +1 -1
- machineconfig/scripts/windows/mount_ssh.ps1 +1 -1
- machineconfig/scripts/windows/pomodoro.ps1 +1 -1
- machineconfig/scripts/windows/py2exe.ps1 +1 -3
- machineconfig/scripts/windows/repos.ps1 +1 -1
- machineconfig/scripts/windows/scheduler.ps1 +1 -1
- machineconfig/scripts/windows/snapshot.ps1 +2 -2
- machineconfig/scripts/windows/start_slidev.ps1 +1 -1
- machineconfig/scripts/windows/start_terminals.ps1 +1 -1
- machineconfig/scripts/windows/wifi_conn.ps1 +1 -1
- machineconfig/scripts/windows/wsl_windows_transfer.ps1 +1 -3
- machineconfig/settings/lf/linux/lfrc +1 -1
- machineconfig/settings/linters/.ruff_cache/.gitignore +2 -0
- machineconfig/settings/linters/.ruff_cache/CACHEDIR.TAG +1 -0
- machineconfig/settings/lvim/windows/archive/config_additional.lua +1 -1
- machineconfig/settings/svim/linux/init.toml +1 -1
- machineconfig/settings/svim/windows/init.toml +1 -1
- machineconfig/setup_linux/web_shortcuts/croshell.sh +0 -54
- machineconfig/setup_linux/web_shortcuts/interactive.sh +6 -6
- machineconfig/setup_windows/web_shortcuts/all.ps1 +2 -2
- machineconfig/setup_windows/web_shortcuts/ascii_art.ps1 +1 -1
- machineconfig/setup_windows/web_shortcuts/croshell.ps1 +1 -1
- machineconfig/setup_windows/web_shortcuts/interactive.ps1 +5 -5
- machineconfig/setup_windows/wt_and_pwsh/install_fonts.ps1 +51 -15
- machineconfig/setup_windows/wt_and_pwsh/set_pwsh_theme.py +66 -12
- machineconfig/setup_windows/wt_and_pwsh/set_wt_settings.py +44 -36
- machineconfig/utils/ai/generate_file_checklist.py +8 -10
- machineconfig/utils/ai/url2md.py +4 -2
- machineconfig/utils/cloud/onedrive/setup_oauth.py +1 -0
- machineconfig/utils/cloud/onedrive/transaction.py +63 -98
- machineconfig/utils/code.py +60 -39
- machineconfig/utils/installer.py +27 -33
- machineconfig/utils/installer_utils/installer_abc.py +8 -7
- machineconfig/utils/installer_utils/installer_class.py +149 -70
- machineconfig/utils/links.py +22 -11
- machineconfig/utils/notifications.py +197 -0
- machineconfig/utils/options.py +29 -23
- machineconfig/utils/path.py +13 -6
- machineconfig/utils/path_reduced.py +485 -216
- machineconfig/utils/procs.py +47 -41
- machineconfig/utils/scheduling.py +0 -1
- machineconfig/utils/ssh.py +157 -76
- machineconfig/utils/terminal.py +82 -37
- machineconfig/utils/utils.py +12 -10
- machineconfig/utils/utils2.py +38 -48
- machineconfig/utils/utils5.py +183 -116
- machineconfig/utils/ve.py +9 -4
- {machineconfig-2.0.dist-info → machineconfig-2.1.dist-info}/METADATA +3 -2
- {machineconfig-2.0.dist-info → machineconfig-2.1.dist-info}/RECORD +200 -217
- machineconfig/jobs/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/jobs/python/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/jobs/python/__pycache__/python_ve_symlink.cpython-311.pyc +0 -0
- machineconfig/jobs/python/archive/python_tools.txt +0 -12
- machineconfig/jobs/python/vscode/__pycache__/select_interpreter.cpython-311.pyc +0 -0
- machineconfig/jobs/python_custom_installers/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/jobs/python_generic_installers/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/jobs/python_generic_installers/update.py +0 -3
- machineconfig/jobs/python_linux_installers/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/profile/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/profile/__pycache__/create.cpython-311.pyc +0 -0
- machineconfig/profile/__pycache__/shell.cpython-311.pyc +0 -0
- machineconfig/scripts/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/scripts/linux/activate_ve +0 -87
- machineconfig/scripts/python/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/cloud_copy.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/cloud_mount.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/cloud_sync.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/croshell.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops_backup_retrieve.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops_devapps_install.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops_update_repos.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/fire_agents.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/fire_jobs.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/get_zellij_cmd.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/repos.cpython-311.pyc +0 -0
- machineconfig/scripts/python/ai/__pycache__/init.cpython-311.pyc +0 -0
- machineconfig/scripts/python/ai/__pycache__/mcinit.cpython-311.pyc +0 -0
- machineconfig/scripts/python/helpers/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/scripts/python/helpers/__pycache__/cloud_helpers.cpython-311.pyc +0 -0
- machineconfig/scripts/python/helpers/__pycache__/helpers2.cpython-311.pyc +0 -0
- machineconfig/scripts/python/helpers/__pycache__/helpers4.cpython-311.pyc +0 -0
- machineconfig/scripts/python/helpers/__pycache__/repo_sync_helpers.cpython-311.pyc +0 -0
- machineconfig/scripts/windows/activate_ve.ps1 +0 -54
- {machineconfig-2.0.dist-info → machineconfig-2.1.dist-info}/WHEEL +0 -0
- {machineconfig-2.0.dist-info → machineconfig-2.1.dist-info}/top_level.txt +0 -0
|
@@ -1,5 +1,3 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
1
|
from machineconfig.utils.utils2 import randstr
|
|
4
2
|
from datetime import datetime
|
|
5
3
|
import time
|
|
@@ -9,38 +7,46 @@ import subprocess
|
|
|
9
7
|
from platform import system
|
|
10
8
|
from typing import Any, Optional, Union, Callable, TypeAlias, Literal
|
|
11
9
|
import os
|
|
10
|
+
# import warnings
|
|
12
11
|
|
|
13
12
|
|
|
14
|
-
OPLike: TypeAlias = Union[str,
|
|
15
|
-
PLike: TypeAlias = Union[str,
|
|
16
|
-
FILE_MODE: TypeAlias = Literal[
|
|
13
|
+
OPLike: TypeAlias = Union[str, "PathExtended", Path, None]
|
|
14
|
+
PLike: TypeAlias = Union[str, "PathExtended", Path]
|
|
15
|
+
FILE_MODE: TypeAlias = Literal["r", "w", "x", "a"]
|
|
17
16
|
SHUTIL_FORMATS: TypeAlias = Literal["zip", "tar", "gztar", "bztar", "xztar"]
|
|
18
17
|
|
|
19
18
|
|
|
20
19
|
def pwd2key(password: str, salt: Optional[bytes] = None, iterations: int = 10) -> bytes: # Derive a secret key from a given password and salt"""
|
|
21
20
|
import base64
|
|
21
|
+
|
|
22
22
|
if salt is None:
|
|
23
23
|
import hashlib
|
|
24
|
+
|
|
24
25
|
m = hashlib.sha256()
|
|
25
26
|
m.update(password.encode(encoding="utf-8"))
|
|
26
27
|
return base64.urlsafe_b64encode(s=m.digest()) # make url-safe bytes required by Ferent.
|
|
27
28
|
from cryptography.hazmat.primitives import hashes
|
|
28
29
|
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
|
|
30
|
+
|
|
29
31
|
return base64.urlsafe_b64encode(PBKDF2HMAC(algorithm=hashes.SHA256(), length=32, salt=salt, iterations=iterations, backend=None).derive(password.encode()))
|
|
32
|
+
|
|
33
|
+
|
|
30
34
|
def encrypt(msg: bytes, key: Optional[bytes] = None, pwd: Optional[str] = None, salted: bool = True, iteration: Optional[int] = None, gen_key: bool = False) -> bytes:
|
|
31
35
|
import base64
|
|
32
36
|
from cryptography.fernet import Fernet
|
|
37
|
+
|
|
33
38
|
salt, iteration = None, None
|
|
34
39
|
if pwd is not None: # generate it from password
|
|
35
40
|
assert (key is None) and (type(pwd) is str), "❌ You can either pass key or pwd, or none of them, but not both."
|
|
36
41
|
import secrets
|
|
42
|
+
|
|
37
43
|
iteration = iteration or secrets.randbelow(exclusive_upper_bound=1_000_000)
|
|
38
44
|
salt = secrets.token_bytes(nbytes=16) if salted else None
|
|
39
45
|
key_resolved = pwd2key(password=pwd, salt=salt, iterations=iteration)
|
|
40
46
|
elif key is None:
|
|
41
47
|
if gen_key:
|
|
42
48
|
key_resolved = Fernet.generate_key()
|
|
43
|
-
Path.home().joinpath(
|
|
49
|
+
Path.home().joinpath("dotfiles/creds/data/encrypted_files_key.bytes").write_bytes(key_resolved)
|
|
44
50
|
else:
|
|
45
51
|
try:
|
|
46
52
|
key_resolved = Path.home().joinpath("dotfiles/creds/data/encrypted_files_key.bytes").read_bytes()
|
|
@@ -48,75 +54,105 @@ def encrypt(msg: bytes, key: Optional[bytes] = None, pwd: Optional[str] = None,
|
|
|
48
54
|
except FileNotFoundError as err:
|
|
49
55
|
print("\n" * 3, "~" * 50, """Consider Loading up your dotfiles or pass `gen_key=True` to make and save one.""", "~" * 50, "\n" * 3)
|
|
50
56
|
raise FileNotFoundError(err) from err
|
|
51
|
-
elif isinstance(key, (str,
|
|
52
|
-
|
|
53
|
-
|
|
57
|
+
elif isinstance(key, (str, PathExtended, Path)):
|
|
58
|
+
key_resolved = Path(key).read_bytes() # a path to a key file was passed, read it:
|
|
59
|
+
elif type(key) is bytes:
|
|
60
|
+
key_resolved = key # key passed explicitly
|
|
61
|
+
else:
|
|
62
|
+
raise TypeError("❌ Key must be either a path, bytes object or None.")
|
|
54
63
|
code = Fernet(key=key_resolved).encrypt(msg)
|
|
55
|
-
if pwd is not None and salt is not None and iteration is not None:
|
|
64
|
+
if pwd is not None and salt is not None and iteration is not None:
|
|
65
|
+
return base64.urlsafe_b64encode(b"%b%b%b" % (salt, iteration.to_bytes(4, "big"), base64.urlsafe_b64decode(code)))
|
|
56
66
|
return code
|
|
67
|
+
|
|
68
|
+
|
|
57
69
|
def decrypt(token: bytes, key: Optional[bytes] = None, pwd: Optional[str] = None, salted: bool = True) -> bytes:
|
|
58
70
|
import base64
|
|
71
|
+
|
|
59
72
|
if pwd is not None:
|
|
60
73
|
assert key is None, "❌ You can either pass key or pwd, or none of them, but not both."
|
|
61
74
|
if salted:
|
|
62
75
|
decoded = base64.urlsafe_b64decode(token)
|
|
63
76
|
salt, iterations, token = decoded[:16], decoded[16:20], base64.urlsafe_b64encode(decoded[20:])
|
|
64
|
-
key_resolved = pwd2key(password=pwd, salt=salt, iterations=int.from_bytes(bytes=iterations, byteorder=
|
|
65
|
-
else:
|
|
77
|
+
key_resolved = pwd2key(password=pwd, salt=salt, iterations=int.from_bytes(bytes=iterations, byteorder="big"))
|
|
78
|
+
else:
|
|
79
|
+
key_resolved = pwd2key(password=pwd) # trailing `;` prevents IPython from caching the result.
|
|
66
80
|
elif type(key) is bytes:
|
|
67
81
|
assert pwd is None, "❌ You can either pass key or pwd, or none of them, but not both."
|
|
68
82
|
key_resolved = key # passsed explicitly
|
|
69
|
-
elif key is None:
|
|
70
|
-
|
|
71
|
-
|
|
83
|
+
elif key is None:
|
|
84
|
+
key_resolved = Path.home().joinpath("dotfiles/creds/data/encrypted_files_key.bytes").read_bytes() # read from file
|
|
85
|
+
elif isinstance(key, (str, Path)):
|
|
86
|
+
key_resolved = Path(key).read_bytes() # passed a path to a file containing kwy
|
|
87
|
+
else:
|
|
88
|
+
raise TypeError(f"❌ Key must be either str, P, Path, bytes or None. Recieved: {type(key)}")
|
|
72
89
|
from cryptography.fernet import Fernet
|
|
90
|
+
|
|
73
91
|
return Fernet(key=key_resolved).decrypt(token)
|
|
74
92
|
|
|
75
|
-
|
|
93
|
+
|
|
94
|
+
def validate_name(astring: str, replace: str = "_") -> str:
|
|
76
95
|
import re
|
|
77
|
-
|
|
96
|
+
|
|
97
|
+
return re.sub(r"[^-a-zA-Z0-9_.()]+", replace, str(astring))
|
|
98
|
+
|
|
99
|
+
|
|
78
100
|
def timestamp(fmt: Optional[str] = None, name: Optional[str] = None) -> str:
|
|
79
|
-
return ((name +
|
|
101
|
+
return ((name + "_") if name is not None else "") + datetime.now().strftime(fmt or "%Y-%m-%d-%I-%M-%S-%p-%f") # isoformat is not compatible with file naming convention, fmt here is.
|
|
80
102
|
|
|
81
103
|
|
|
82
104
|
def modify_text(txt_raw: str, txt_search: str, txt_alt: Union[str, Callable[[str], str]], replace_line: bool = True, notfound_append: bool = False, prepend: bool = False, strict: bool = False):
|
|
83
105
|
lines, bingo = txt_raw.split("\n"), False
|
|
84
106
|
if not replace_line: # no need for line splitting
|
|
85
107
|
assert isinstance(txt_alt, str), f"txt_alt must be a string if notfound_append is True. It is not: {txt_alt}"
|
|
86
|
-
if txt_search in txt_raw:
|
|
108
|
+
if txt_search in txt_raw:
|
|
109
|
+
return txt_raw.replace(txt_search, txt_alt)
|
|
87
110
|
return txt_raw + "\n" + txt_alt if notfound_append else txt_raw
|
|
88
111
|
for idx, line in enumerate(lines):
|
|
89
112
|
if txt_search in line:
|
|
90
|
-
if isinstance(txt_alt, str):
|
|
91
|
-
|
|
113
|
+
if isinstance(txt_alt, str):
|
|
114
|
+
lines[idx] = txt_alt
|
|
115
|
+
elif callable(txt_alt):
|
|
116
|
+
lines[idx] = txt_alt(line)
|
|
92
117
|
bingo = True
|
|
93
|
-
if strict and not bingo:
|
|
118
|
+
if strict and not bingo:
|
|
119
|
+
raise ValueError(f"txt_search `{txt_search}` not found in txt_raw `{txt_raw}`")
|
|
94
120
|
if bingo is False and notfound_append is True:
|
|
95
121
|
assert isinstance(txt_alt, str), f"txt_alt must be a string if notfound_append is True. It is not: {txt_alt}"
|
|
96
|
-
if prepend:
|
|
97
|
-
|
|
122
|
+
if prepend:
|
|
123
|
+
lines.insert(0, txt_alt)
|
|
124
|
+
else:
|
|
125
|
+
lines.append(txt_alt) # txt not found, add it anyway.
|
|
98
126
|
return "\n".join(lines)
|
|
99
127
|
|
|
100
128
|
|
|
101
129
|
class Compression:
|
|
102
130
|
@staticmethod
|
|
103
|
-
def compress_folder(
|
|
131
|
+
def compress_folder(
|
|
132
|
+
root_dir: str, op_path: str, base_dir: str, fmt: SHUTIL_FORMATS = "zip", verbose: bool = False, **kwargs: Any
|
|
133
|
+
) -> str: # shutil works with folders nicely (recursion is done interally) # directory to be archived: root_dir\base_dir, unless base_dir is passed as absolute path. # when archive opened; base_dir will be found."""
|
|
104
134
|
base_name = op_path[:-4] if op_path.endswith(".zip") else op_path # .zip is added automatically by library, hence we'd like to avoid repeating it if user sent it.
|
|
105
135
|
import shutil
|
|
136
|
+
|
|
106
137
|
return shutil.make_archive(base_name=base_name, format=fmt, root_dir=root_dir, base_dir=base_dir, verbose=verbose, **kwargs) # returned path possible have added extension.
|
|
138
|
+
|
|
107
139
|
@staticmethod
|
|
108
|
-
def zip_file(ip_path: str, op_path: str, arcname: Optional[str]= None, password: Optional[bytes] = None, mode: FILE_MODE = "w", **kwargs: Any):
|
|
140
|
+
def zip_file(ip_path: str, op_path: str, arcname: Optional[str] = None, password: Optional[bytes] = None, mode: FILE_MODE = "w", **kwargs: Any):
|
|
109
141
|
"""arcname determines the directory of the file being archived inside the archive. Defaults to same as original directory except for drive.
|
|
110
142
|
When changed, it should still include the file path in its end. If arcname = filename without any path, then, it will be in the root of the archive."""
|
|
111
143
|
import zipfile
|
|
144
|
+
|
|
112
145
|
with zipfile.ZipFile(op_path, mode=mode) as jungle_zip:
|
|
113
|
-
if password is not None:
|
|
146
|
+
if password is not None:
|
|
147
|
+
jungle_zip.setpassword(pwd=password)
|
|
114
148
|
jungle_zip.write(filename=str(ip_path), arcname=str(arcname) if arcname is not None else None, compress_type=zipfile.ZIP_DEFLATED, **kwargs)
|
|
115
149
|
return Path(op_path)
|
|
150
|
+
|
|
116
151
|
@staticmethod
|
|
117
|
-
def unzip(ip_path: str, op_path: str, fname: Optional[str]= None, password: Optional[bytes] = None, memory: bool = False, **kwargs: Any) -> Path | dict[str, bytes] | bytes:
|
|
152
|
+
def unzip(ip_path: str, op_path: str, fname: Optional[str] = None, password: Optional[bytes] = None, memory: bool = False, **kwargs: Any) -> Path | dict[str, bytes] | bytes:
|
|
118
153
|
import zipfile
|
|
119
|
-
|
|
154
|
+
|
|
155
|
+
with zipfile.ZipFile(str(ip_path), "r") as zipObj:
|
|
120
156
|
if memory:
|
|
121
157
|
return {name: zipObj.read(name) for name in zipObj.namelist()} if fname is None else zipObj.read(fname)
|
|
122
158
|
if fname is None:
|
|
@@ -125,72 +161,101 @@ class Compression:
|
|
|
125
161
|
else:
|
|
126
162
|
zipObj.extract(member=str(fname), path=str(op_path), pwd=password)
|
|
127
163
|
return Path(op_path) / fname
|
|
164
|
+
|
|
128
165
|
@staticmethod
|
|
129
166
|
def gz(file: str, op_path: str): # see this on what to use: https://stackoverflow.com/questions/10540935/what-is-the-difference-between-tar-and-zip
|
|
130
167
|
import shutil
|
|
131
168
|
import gzip
|
|
132
|
-
|
|
133
|
-
|
|
169
|
+
|
|
170
|
+
with open(file, "rb") as f_in:
|
|
171
|
+
with gzip.open(op_path, "wb") as f_out:
|
|
172
|
+
shutil.copyfileobj(f_in, f_out)
|
|
134
173
|
return Path(op_path)
|
|
174
|
+
|
|
135
175
|
@staticmethod
|
|
136
176
|
def ungz(path: str, op_path: str):
|
|
137
177
|
import gzip
|
|
138
178
|
import shutil
|
|
139
|
-
|
|
179
|
+
|
|
180
|
+
with gzip.open(path, "r") as f_in, open(op_path, "wb") as f_out:
|
|
181
|
+
shutil.copyfileobj(f_in, f_out)
|
|
140
182
|
return Path(op_path)
|
|
183
|
+
|
|
141
184
|
@staticmethod
|
|
142
185
|
def unbz(path: str, op_path: str):
|
|
143
186
|
import bz2
|
|
144
187
|
import shutil
|
|
145
|
-
|
|
188
|
+
|
|
189
|
+
with bz2.BZ2File(path, "r") as fr, open(str(op_path), "wb") as fw:
|
|
190
|
+
shutil.copyfileobj(fr, fw)
|
|
146
191
|
return Path(op_path)
|
|
192
|
+
|
|
147
193
|
@staticmethod
|
|
148
194
|
def xz(path: str, op_path: str):
|
|
149
195
|
import lzma
|
|
150
|
-
|
|
196
|
+
|
|
197
|
+
with lzma.open(op_path, "w") as f:
|
|
198
|
+
f.write(Path(path).read_bytes())
|
|
199
|
+
|
|
151
200
|
@staticmethod
|
|
152
201
|
def unxz(ip_path: str, op_path: str):
|
|
153
202
|
import lzma
|
|
154
|
-
|
|
203
|
+
|
|
204
|
+
with lzma.open(ip_path) as file:
|
|
205
|
+
Path(op_path).write_bytes(file.read())
|
|
206
|
+
|
|
155
207
|
@staticmethod
|
|
156
208
|
def tar(path: str, op_path: str):
|
|
157
209
|
import tarfile
|
|
158
|
-
|
|
210
|
+
|
|
211
|
+
with tarfile.open(op_path, "w:gz") as tar_:
|
|
212
|
+
tar_.add(str(path), arcname=os.path.basename(path))
|
|
159
213
|
return Path(op_path)
|
|
214
|
+
|
|
160
215
|
@staticmethod
|
|
161
|
-
def untar(path: str, op_path: str, fname: Optional[str]= None, mode: Literal[
|
|
216
|
+
def untar(path: str, op_path: str, fname: Optional[str] = None, mode: Literal["r", "w"] = "r", **kwargs: Any):
|
|
162
217
|
import tarfile
|
|
218
|
+
|
|
163
219
|
with tarfile.open(str(path), mode) as file:
|
|
164
|
-
if fname is None:
|
|
165
|
-
|
|
220
|
+
if fname is None:
|
|
221
|
+
file.extractall(path=op_path, **kwargs) # extract all files in the archive
|
|
222
|
+
else:
|
|
223
|
+
file.extract(fname, **kwargs)
|
|
166
224
|
return Path(op_path)
|
|
167
225
|
|
|
168
226
|
|
|
169
|
-
|
|
170
|
-
class P(type(Path()), Path): # type: ignore # pylint: disable=E0241
|
|
227
|
+
class PathExtended(type(Path()), Path): # type: ignore # pylint: disable=E0241
|
|
171
228
|
# ============= Path management ==================
|
|
172
|
-
"""
|
|
229
|
+
"""The default behaviour of methods acting on underlying disk object is to perform the action and return a new path referring to the mutated object in disk drive.
|
|
173
230
|
However, there is a flag `orig` that makes the function return orignal path object `self` as opposed to the new one pointing to new object.
|
|
174
231
|
Additionally, the fate of the original object can be decided by a flag `inplace` which means `replace` it defaults to False and in essence, it deletes the original underlying object.
|
|
175
232
|
This can be seen in `zip` and `encrypt` but not in `copy`, `move`, `retitle` because the fate of original file is dictated already.
|
|
176
233
|
Furthermore, those methods are accompanied with print statement explaining what happened to the object."""
|
|
177
|
-
|
|
234
|
+
|
|
235
|
+
def delete(self, sure: bool = False, verbose: bool = True) -> "PathExtended": # slf = self.expanduser().resolve() don't resolve symlinks.
|
|
178
236
|
if not sure:
|
|
179
|
-
if verbose:
|
|
237
|
+
if verbose:
|
|
238
|
+
print(f"❌ Did NOT DELETE because user is not sure. file: {repr(self)}.")
|
|
180
239
|
return self
|
|
181
240
|
if not self.exists():
|
|
182
241
|
self.unlink(missing_ok=True)
|
|
183
|
-
if verbose:
|
|
242
|
+
if verbose:
|
|
243
|
+
print(f"❌ Could NOT DELETE nonexisting file {repr(self)}. ")
|
|
184
244
|
return self # broken symlinks exhibit funny existence behaviour, catch them here.
|
|
185
|
-
if self.is_file() or self.is_symlink():
|
|
245
|
+
if self.is_file() or self.is_symlink():
|
|
246
|
+
self.unlink(missing_ok=True)
|
|
186
247
|
else:
|
|
187
248
|
import shutil
|
|
249
|
+
|
|
188
250
|
shutil.rmtree(self, ignore_errors=False)
|
|
189
|
-
if verbose:
|
|
251
|
+
if verbose:
|
|
252
|
+
print(f"🗑️ ❌ DELETED {repr(self)}.")
|
|
190
253
|
return self
|
|
191
|
-
|
|
254
|
+
|
|
255
|
+
def move(self, folder: OPLike = None, name: Optional[str] = None, path: OPLike = None, rel2it: bool = False, overwrite: bool = False, verbose: bool = True, parents: bool = True, content: bool = False) -> "PathExtended":
|
|
192
256
|
path = self._resolve_path(folder=folder, name=name, path=path, default_name=self.absolute().name, rel2it=rel2it)
|
|
193
|
-
if parents:
|
|
257
|
+
if parents:
|
|
258
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
194
259
|
slf = self.expanduser().resolve()
|
|
195
260
|
if content:
|
|
196
261
|
assert self.is_dir(), NotADirectoryError(f"💥 When `content` flag is set to True, path must be a directory. It is not: `{repr(self)}`")
|
|
@@ -203,59 +268,80 @@ class P(type(Path()), Path): # type: ignore # pylint: disable=E0241
|
|
|
203
268
|
else:
|
|
204
269
|
try:
|
|
205
270
|
slf.rename(path) # self._return(res=path, inplace=True, operation='rename', orig=False, verbose=verbose, strict=True, msg='')
|
|
206
|
-
except OSError as oe:
|
|
271
|
+
except OSError as oe: # OSError: [Errno 18] Invalid cross-device link:
|
|
207
272
|
# https://stackoverflow.com/questions/42392600/oserror-errno-18-invalid-cross-device-link
|
|
208
273
|
import shutil
|
|
274
|
+
|
|
209
275
|
shutil.move(str(slf), str(path))
|
|
210
276
|
_ = oe
|
|
211
|
-
if verbose:
|
|
277
|
+
if verbose:
|
|
278
|
+
print(f"🚚 MOVED {repr(self)} ==> {repr(path)}`")
|
|
212
279
|
return path
|
|
213
|
-
|
|
280
|
+
|
|
281
|
+
def copy(
|
|
282
|
+
self, folder: OPLike = None, name: Optional[str] = None, path: OPLike = None, content: bool = False, verbose: bool = True, append: Optional[str] = None, overwrite: bool = False, orig: bool = False
|
|
283
|
+
) -> "PathExtended": # tested %100 # TODO: replace `content` flag with ability to interpret "*" in resolve method.
|
|
214
284
|
dest = self._resolve_path(folder=folder, name=name, path=path, default_name=self.name, rel2it=False)
|
|
215
285
|
dest = dest.expanduser().resolve()
|
|
216
286
|
dest.parent.mkdir(parents=True, exist_ok=True)
|
|
217
287
|
slf = self.expanduser().resolve()
|
|
218
288
|
if dest == slf:
|
|
219
289
|
dest = self.append(append if append is not None else f"_copy_{randstr()}")
|
|
220
|
-
if not content and overwrite and dest.exists():
|
|
221
|
-
|
|
290
|
+
if not content and overwrite and dest.exists():
|
|
291
|
+
dest.delete(sure=True)
|
|
292
|
+
if not content and not overwrite and dest.exists():
|
|
293
|
+
raise FileExistsError(f"💥 Destination already exists: {repr(dest)}")
|
|
222
294
|
if slf.is_file():
|
|
223
295
|
import shutil
|
|
296
|
+
|
|
224
297
|
shutil.copy(str(slf), str(dest))
|
|
225
|
-
if verbose:
|
|
298
|
+
if verbose:
|
|
299
|
+
print(f"🖨️ COPIED {repr(slf)} ==> {repr(dest)}")
|
|
226
300
|
elif slf.is_dir():
|
|
227
301
|
dest = dest.parent if content else dest
|
|
228
302
|
# from distutils.dir_util import copy_tree
|
|
229
303
|
from shutil import copytree
|
|
304
|
+
|
|
230
305
|
copytree(str(slf), str(dest))
|
|
231
|
-
if verbose:
|
|
232
|
-
|
|
306
|
+
if verbose:
|
|
307
|
+
print(f"🖨️ COPIED {'Content of ' if content else ''} {repr(slf)} ==> {repr(dest)}")
|
|
308
|
+
else:
|
|
309
|
+
print(f"💥 Could NOT COPY. Not a file nor a path: {repr(slf)}.")
|
|
233
310
|
return dest if not orig else self
|
|
311
|
+
|
|
234
312
|
# ======================================= File Editing / Reading ===================================
|
|
235
|
-
def download(self, folder: OPLike = None, name: Optional[str]= None, allow_redirects: bool = True, timeout: Optional[int] = None, params: Any = None) ->
|
|
313
|
+
def download(self, folder: OPLike = None, name: Optional[str] = None, allow_redirects: bool = True, timeout: Optional[int] = None, params: Any = None) -> "PathExtended":
|
|
236
314
|
import requests
|
|
315
|
+
|
|
237
316
|
response = requests.get(self.as_url_str(), allow_redirects=allow_redirects, timeout=timeout, params=params) # Alternative: from urllib import request; request.urlopen(url).read().decode('utf-8').
|
|
238
317
|
assert response.status_code == 200, f"Download failed with status code {response.status_code}\n{response.text}"
|
|
239
|
-
if name is not None:
|
|
318
|
+
if name is not None:
|
|
319
|
+
f_name = name
|
|
240
320
|
else:
|
|
241
321
|
try:
|
|
242
|
-
f_name = response.headers[
|
|
322
|
+
f_name = response.headers["Content-Disposition"].split("filename=")[1].replace('"', "")
|
|
243
323
|
except (KeyError, IndexError):
|
|
244
|
-
f_name = validate_name(str(
|
|
245
|
-
dest_path = (
|
|
324
|
+
f_name = validate_name(str(PathExtended(response.history[-1].url).name if len(response.history) > 0 else PathExtended(response.url).name))
|
|
325
|
+
dest_path = (PathExtended.home().joinpath("Downloads") if folder is None else PathExtended(folder)).joinpath(f_name)
|
|
246
326
|
dest_path.parent.mkdir(parents=True, exist_ok=True)
|
|
247
327
|
dest_path.write_bytes(response.content)
|
|
248
328
|
return dest_path
|
|
249
|
-
|
|
250
|
-
|
|
329
|
+
|
|
330
|
+
def _return(
|
|
331
|
+
self, res: Union["PathExtended", "Path"], operation: Literal["rename", "delete", "Whack"], inplace: bool = False, overwrite: bool = False, orig: bool = False, verbose: bool = False, strict: bool = True, msg: str = "", __delayed_msg__: str = ""
|
|
332
|
+
) -> "PathExtended":
|
|
333
|
+
res = PathExtended(res)
|
|
251
334
|
if inplace:
|
|
252
335
|
assert self.exists(), f"`inplace` flag is only relevant if the path exists. It doesn't {self}"
|
|
253
336
|
if operation == "rename":
|
|
254
|
-
if overwrite and res.exists():
|
|
337
|
+
if overwrite and res.exists():
|
|
338
|
+
res.delete(sure=True, verbose=verbose)
|
|
255
339
|
if not overwrite and res.exists():
|
|
256
|
-
if strict:
|
|
340
|
+
if strict:
|
|
341
|
+
raise FileExistsError(f"❌ RENAMING failed. File `{res}` already exists.")
|
|
257
342
|
else:
|
|
258
|
-
if verbose:
|
|
343
|
+
if verbose:
|
|
344
|
+
print(f"⚠️ SKIPPED RENAMING {repr(self)} ➡️ {repr(res)} because FileExistsError and scrict=False policy.")
|
|
259
345
|
return self if orig else res
|
|
260
346
|
self.rename(res)
|
|
261
347
|
msg = msg or f"RENAMED {repr(self)} ➡️ {repr(res)}"
|
|
@@ -263,146 +349,225 @@ class P(type(Path()), Path): # type: ignore # pylint: disable=E0241
|
|
|
263
349
|
self.delete(sure=True, verbose=False)
|
|
264
350
|
__delayed_msg__ = f"DELETED 🗑️❌ {repr(self)}."
|
|
265
351
|
if verbose and msg != "":
|
|
266
|
-
try:
|
|
267
|
-
|
|
352
|
+
try:
|
|
353
|
+
print(msg) # emojie print error.
|
|
354
|
+
except UnicodeEncodeError:
|
|
355
|
+
print("P._return warning: UnicodeEncodeError, could not print message.")
|
|
268
356
|
if verbose and __delayed_msg__ != "":
|
|
269
|
-
try:
|
|
270
|
-
|
|
357
|
+
try:
|
|
358
|
+
print(__delayed_msg__)
|
|
359
|
+
except UnicodeEncodeError:
|
|
360
|
+
print("P._return warning: UnicodeEncodeError, could not print message.")
|
|
271
361
|
return self if orig else res
|
|
272
|
-
|
|
362
|
+
|
|
363
|
+
def append(self, name: str = "", index: bool = False, suffix: Optional[str] = None, verbose: bool = True, **kwargs: Any) -> "PathExtended":
|
|
273
364
|
"""Returns a new path object with the name appended to the stem of the path. If `index` is True, the name will be the index of the path in the parent directory."""
|
|
274
365
|
if index:
|
|
275
|
-
appended_name = f
|
|
366
|
+
appended_name = f"""{name}_{len(self.parent.search(f"*{self.name.split('.')[0]}*"))}"""
|
|
276
367
|
return self.append(name=appended_name, index=False, verbose=verbose, suffix=suffix, **kwargs)
|
|
277
|
-
full_name =
|
|
278
|
-
full_suffix = suffix or
|
|
279
|
-
subpath = self.name.split(
|
|
368
|
+
full_name = name or ("_" + str(timestamp()))
|
|
369
|
+
full_suffix = suffix or "".join(("bruh" + self).suffixes)
|
|
370
|
+
subpath = self.name.split(".")[0] + full_name + full_suffix
|
|
280
371
|
return self._return(self.parent.joinpath(subpath), operation="rename", verbose=verbose, **kwargs)
|
|
372
|
+
|
|
281
373
|
def with_name(self, name: str, verbose: bool = True, inplace: bool = False, overwrite: bool = False, **kwargs: Any):
|
|
282
374
|
return self._return(self.parent / name, verbose=verbose, operation="rename", inplace=inplace, overwrite=overwrite, **kwargs)
|
|
283
|
-
|
|
375
|
+
|
|
376
|
+
def __deepcopy__(self, *args: Any, **kwargs: Any) -> "PathExtended":
|
|
284
377
|
_ = args, kwargs
|
|
285
|
-
return
|
|
286
|
-
|
|
287
|
-
def
|
|
378
|
+
return PathExtended(str(self))
|
|
379
|
+
|
|
380
|
+
def __getstate__(self) -> str:
|
|
381
|
+
return str(self)
|
|
382
|
+
|
|
383
|
+
def __add__(self, other: PLike) -> "PathExtended":
|
|
288
384
|
return self.parent.joinpath(self.name + str(other)) # used append and prepend if the addition wanted to be before suffix.
|
|
289
|
-
|
|
385
|
+
|
|
386
|
+
def __radd__(self, other: PLike) -> "PathExtended":
|
|
290
387
|
return self.parent.joinpath(str(other) + self.name) # other + P and `other` doesn't know how to make this addition.
|
|
291
|
-
|
|
292
|
-
|
|
388
|
+
|
|
389
|
+
def __sub__(self, other: PLike) -> "PathExtended":
|
|
390
|
+
res = PathExtended(str(self).replace(str(other), ""))
|
|
293
391
|
return (res[1:] if str(res[0]) in {"\\", "/"} else res) if len(res.parts) else res # paths starting with "/" are problematic. e.g ~ / "/path" doesn't work.
|
|
294
392
|
|
|
295
|
-
def rel2home(self
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
393
|
+
def rel2home(self) -> "PathExtended":
|
|
394
|
+
return self._return(PathExtended(self.expanduser().absolute().relative_to(Path.home())), operation="Whack") # very similat to collapseuser but without "~" being added so its consistent with rel2cwd.
|
|
395
|
+
|
|
396
|
+
def collapseuser(self, strict: bool = True, placeholder: str = "~") -> "PathExtended": # opposite of `expanduser` resolve is crucial to fix Windows cases insensitivty problem.
|
|
397
|
+
if strict:
|
|
398
|
+
assert str(self.expanduser().absolute().resolve()).startswith(str(PathExtended.home())), ValueError(f"`{PathExtended.home()}` is not in the subpath of `{self}`")
|
|
399
|
+
if str(self).startswith(placeholder) or PathExtended.home().as_posix() not in self.resolve().as_posix():
|
|
400
|
+
return self
|
|
401
|
+
return self._return(res=PathExtended(placeholder) / (self.expanduser().absolute().resolve(strict=strict) - PathExtended.home()), operation="Whack") # resolve also solves the problem of Windows case insensitivty.
|
|
402
|
+
|
|
300
403
|
def __getitem__(self, slici: Union[int, list[int], slice]):
|
|
301
|
-
if isinstance(slici, list):
|
|
302
|
-
|
|
303
|
-
|
|
404
|
+
if isinstance(slici, list):
|
|
405
|
+
return PathExtended(*[self[item] for item in slici])
|
|
406
|
+
elif isinstance(slici, int):
|
|
407
|
+
return PathExtended(self.parts[slici])
|
|
408
|
+
return PathExtended(*self.parts[slici]) # must be a slice
|
|
409
|
+
|
|
304
410
|
def split(self, at: Optional[str] = None, index: Optional[int] = None, sep: Literal[-1, 0, 1] = 1, strict: bool = True):
|
|
305
411
|
if index is None and at is not None: # at is provided # ==================================== Splitting
|
|
306
412
|
if not strict: # behaves like split method of string
|
|
307
413
|
one, two = (items := str(self).split(sep=str(at)))[0], items[1]
|
|
308
|
-
one, two =
|
|
414
|
+
one, two = PathExtended(one[:-1]) if one.endswith("/") else PathExtended(one), PathExtended(two[1:]) if two.startswith("/") else PathExtended(two)
|
|
309
415
|
else: # "strict": # raises an error if exact match is not found.
|
|
310
416
|
index = self.parts.index(str(at))
|
|
311
|
-
one, two = self[0:index], self[index + 1:] # both one and two do not include the split item.
|
|
417
|
+
one, two = self[0:index], self[index + 1 :] # both one and two do not include the split item.
|
|
312
418
|
elif index is not None and at is None: # index is provided
|
|
313
|
-
one, two = self[:index],
|
|
419
|
+
one, two = self[:index], PathExtended(*self.parts[index + 1 :])
|
|
314
420
|
at = self.parts[index] # this is needed below.
|
|
315
|
-
else:
|
|
316
|
-
|
|
317
|
-
|
|
421
|
+
else:
|
|
422
|
+
raise ValueError("Either `index` or `at` can be provided. Both are not allowed simulatanesouly.")
|
|
423
|
+
if sep == 0:
|
|
424
|
+
return one, two # neither of the portions get the sperator appended to it. # ================================ appending `at` to one of the portions
|
|
425
|
+
elif sep == 1:
|
|
426
|
+
return one, PathExtended(at) / two # append it to right portion
|
|
318
427
|
elif sep == -1:
|
|
319
428
|
return one / at, two # append it to left portion.
|
|
320
|
-
else:
|
|
429
|
+
else:
|
|
430
|
+
raise ValueError(f"`sep` should take a value from the set [-1, 0, 1] but got {sep}")
|
|
431
|
+
|
|
321
432
|
def __repr__(self): # this is useful only for the console
|
|
322
433
|
if self.is_symlink():
|
|
323
|
-
try:
|
|
324
|
-
|
|
434
|
+
try:
|
|
435
|
+
target = self.resolve() # broken symolinks are funny, and almost always fail `resolve` method.
|
|
436
|
+
except Exception:
|
|
437
|
+
target = "BROKEN LINK " + str(self) # avoid infinite recursions for broken links.
|
|
325
438
|
return "🔗 Symlink '" + str(self) + "' ==> " + (str(target) if target == self else str(target))
|
|
326
|
-
elif self.is_absolute():
|
|
327
|
-
|
|
328
|
-
|
|
439
|
+
elif self.is_absolute():
|
|
440
|
+
return self._type() + " '" + str(self.clickable()) + "'" + (" | " + datetime.fromtimestamp(self.stat().st_ctime).isoformat()[:-7].replace("T", " ") if self.exists() else "") + (f" | {self.size()} Mb" if self.is_file() else "")
|
|
441
|
+
elif "http" in str(self):
|
|
442
|
+
return "🕸️ URL " + str(self.as_url_str())
|
|
443
|
+
else:
|
|
444
|
+
return "📍 Relative " + "'" + str(self) + "'" # not much can be said about a relative path.
|
|
445
|
+
|
|
329
446
|
# def to_str(self) -> str: return str(self)
|
|
330
|
-
def size(self, units: Literal[
|
|
447
|
+
def size(self, units: Literal["b", "kb", "mb", "gb"] = "mb") -> float: # ===================================== File Specs ==========================================================================================
|
|
331
448
|
total_size = self.stat().st_size if self.is_file() else sum([item.stat().st_size for item in self.rglob("*") if item.is_file()])
|
|
332
449
|
tmp: int
|
|
333
450
|
match units:
|
|
334
|
-
case "b":
|
|
335
|
-
|
|
336
|
-
case "
|
|
337
|
-
|
|
451
|
+
case "b":
|
|
452
|
+
tmp = 1024**0
|
|
453
|
+
case "kb":
|
|
454
|
+
tmp = 1024**1
|
|
455
|
+
case "mb":
|
|
456
|
+
tmp = 1024**2
|
|
457
|
+
case "gb":
|
|
458
|
+
tmp = 1024**3
|
|
338
459
|
return round(number=total_size / tmp, ndigits=1)
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
460
|
+
|
|
461
|
+
# def time(self, which: Literal["m", "c", "a"] = "m", **kwargs: Any):
|
|
462
|
+
# """* `m`: last mofidication of content, i.e. the time it was created.
|
|
463
|
+
# * `c`: last status change (its inode is changed, permissions, path, but not content)
|
|
464
|
+
# * `a`: last access (read)
|
|
465
|
+
# """
|
|
466
|
+
# warnings.warn(
|
|
467
|
+
# "The 'time' method is deprecated. Use 'datetime.fromtimestamp(self.stat().st_mtime)' for 'm', "
|
|
468
|
+
# "'datetime.fromtimestamp(self.stat().st_ctime)' for 'c', or "
|
|
469
|
+
# "'datetime.fromtimestamp(self.stat().st_atime)' for 'a' instead.",
|
|
470
|
+
# DeprecationWarning,
|
|
471
|
+
# stacklevel=2
|
|
472
|
+
# )
|
|
473
|
+
# match which:
|
|
474
|
+
# case "m": tmp = self.stat().st_mtime
|
|
475
|
+
# case "a": tmp = self.stat().st_atime
|
|
476
|
+
# case "c": tmp = self.stat().st_ctime
|
|
477
|
+
# return datetime.fromtimestamp(tmp, **kwargs)
|
|
349
478
|
|
|
350
479
|
# ================================ String Nature management ====================================
|
|
351
|
-
def clickable(self
|
|
352
|
-
|
|
480
|
+
def clickable(self) -> "PathExtended":
|
|
481
|
+
return self._return(res=PathExtended(self.expanduser().resolve().as_uri()), operation="Whack")
|
|
482
|
+
|
|
483
|
+
def as_url_str(self) -> "str":
|
|
484
|
+
return self.as_posix().replace("https:/", "https://").replace("http:/", "http://")
|
|
485
|
+
|
|
353
486
|
def as_zip_path(self):
|
|
354
487
|
import zipfile
|
|
488
|
+
|
|
355
489
|
res = self.expanduser().resolve()
|
|
356
490
|
return zipfile.Path(res) # .str.split(".zip") tmp=res[1]+(".zip" if len(res) > 2 else ""); root=res[0]+".zip", at=P(tmp).as_posix()) # TODO
|
|
491
|
+
|
|
357
492
|
# ========================== override =======================================
|
|
358
|
-
def __setitem__(self, key: Union[
|
|
359
|
-
fullparts, new = list(self.parts), list(
|
|
493
|
+
def __setitem__(self, key: Union["str", int, slice], value: PLike):
|
|
494
|
+
fullparts, new = list(self.parts), list(PathExtended(value).parts)
|
|
360
495
|
if type(key) is str:
|
|
361
496
|
idx = fullparts.index(key)
|
|
362
497
|
fullparts.remove(key)
|
|
363
|
-
fullparts = fullparts[:idx] + new + fullparts[idx + 1:]
|
|
364
|
-
elif type(key) is int:
|
|
365
|
-
|
|
366
|
-
|
|
498
|
+
fullparts = fullparts[:idx] + new + fullparts[idx + 1 :]
|
|
499
|
+
elif type(key) is int:
|
|
500
|
+
fullparts = fullparts[:key] + new + fullparts[key + 1 :]
|
|
501
|
+
elif type(key) is slice:
|
|
502
|
+
fullparts = fullparts[: (0 if key.start is None else key.start)] + new + fullparts[(len(fullparts) if key.stop is None else key.stop) :]
|
|
503
|
+
self._str = str(PathExtended(*fullparts)) # pylint: disable=W0201 # similar attributes: # self._parts # self._pparts # self._cparts # self._cached_cparts
|
|
367
504
|
|
|
368
505
|
def _type(self):
|
|
369
506
|
if self.absolute():
|
|
370
|
-
if self.is_file():
|
|
371
|
-
|
|
507
|
+
if self.is_file():
|
|
508
|
+
return "📄"
|
|
509
|
+
elif self.is_dir():
|
|
510
|
+
return "📁"
|
|
372
511
|
return "👻NotExist"
|
|
373
512
|
return "📍Relative"
|
|
513
|
+
|
|
374
514
|
def symlink_to(self, target: PLike, verbose: bool = True, overwrite: bool = False, orig: bool = False, strict: bool = True): # type: ignore[override] # pylint: disable=W0237
|
|
375
515
|
self.parent.mkdir(parents=True, exist_ok=True)
|
|
376
|
-
target_obj =
|
|
377
|
-
if strict:
|
|
378
|
-
|
|
516
|
+
target_obj = PathExtended(target).expanduser().resolve()
|
|
517
|
+
if strict:
|
|
518
|
+
assert target_obj.exists(), f"Target path `{target}` (aka `{target_obj}`) doesn't exist. This will create a broken link."
|
|
519
|
+
if overwrite and (self.is_symlink() or self.exists()):
|
|
520
|
+
self.delete(sure=True, verbose=verbose)
|
|
379
521
|
from machineconfig.utils.terminal import Terminal
|
|
522
|
+
|
|
380
523
|
if system() == "Windows" and not Terminal.is_user_admin(): # you cannot create symlink without priviliages.
|
|
381
524
|
import win32com.shell.shell
|
|
382
|
-
|
|
525
|
+
|
|
526
|
+
_proce_info = win32com.shell.shell.ShellExecuteEx(lpVerb="runas", lpFile=sys.executable, lpParameters=f" -c \"from pathlib import Path; Path(r'{self.expanduser()}').symlink_to(r'{str(target_obj)}')\"")
|
|
383
527
|
# TODO update PATH for this to take effect immediately.
|
|
384
528
|
time.sleep(1) # wait=True equivalent
|
|
385
|
-
else:
|
|
386
|
-
|
|
529
|
+
else:
|
|
530
|
+
super(PathExtended, self.expanduser()).symlink_to(str(target_obj))
|
|
531
|
+
return self._return(target_obj, operation="Whack", inplace=False, orig=orig, verbose=verbose, msg=f"LINKED {repr(self)} ➡️ {repr(target_obj)}")
|
|
532
|
+
|
|
387
533
|
def resolve(self, strict: bool = False):
|
|
388
|
-
try:
|
|
389
|
-
|
|
534
|
+
try:
|
|
535
|
+
return super(PathExtended, self).resolve(strict=strict)
|
|
536
|
+
except OSError:
|
|
537
|
+
return self
|
|
538
|
+
|
|
390
539
|
# ======================================== Folder management =======================================
|
|
391
|
-
def search(
|
|
392
|
-
|
|
540
|
+
def search(
|
|
541
|
+
self,
|
|
542
|
+
pattern: str = "*",
|
|
543
|
+
r: bool = False,
|
|
544
|
+
files: bool = True,
|
|
545
|
+
folders: bool = True,
|
|
546
|
+
compressed: bool = False,
|
|
547
|
+
dotfiles: bool = False,
|
|
548
|
+
filters_total: Optional[list[Callable[[Any], bool]]] = None,
|
|
549
|
+
not_in: Optional[list[str]] = None,
|
|
550
|
+
exts: Optional[list[str]] = None,
|
|
551
|
+
win_order: bool = False,
|
|
552
|
+
) -> list["PathExtended"]:
|
|
393
553
|
if isinstance(not_in, list):
|
|
394
554
|
filters_notin = [lambda x: all([str(a_not_in) not in str(x) for a_not_in in not_in])] # type: ignore
|
|
395
|
-
else:
|
|
555
|
+
else:
|
|
556
|
+
filters_notin = []
|
|
396
557
|
if isinstance(exts, list):
|
|
397
558
|
filters_extension = [lambda x: any([ext in x.name for ext in exts])] # type: ignore
|
|
398
|
-
else:
|
|
559
|
+
else:
|
|
560
|
+
filters_extension = []
|
|
399
561
|
filters_total = (filters_total or []) + filters_notin + filters_extension
|
|
400
|
-
if not files:
|
|
401
|
-
|
|
562
|
+
if not files:
|
|
563
|
+
filters_total.append(lambda x: x.is_dir())
|
|
564
|
+
if not folders:
|
|
565
|
+
filters_total.append(lambda x: x.is_file())
|
|
402
566
|
slf = self.expanduser().resolve()
|
|
403
567
|
if ".zip" in str(slf) and compressed: # the root (self) is itself a zip archive (as opposed to some search results are zip archives)
|
|
404
568
|
import zipfile
|
|
405
569
|
import fnmatch
|
|
570
|
+
|
|
406
571
|
root = slf.as_zip_path()
|
|
407
572
|
if not r:
|
|
408
573
|
raw = list(root.iterdir())
|
|
@@ -412,72 +577,112 @@ class P(type(Path()), Path): # type: ignore # pylint: disable=E0241
|
|
|
412
577
|
res1 = [item for item in raw if fnmatch.fnmatch(item.at, pattern)]
|
|
413
578
|
# return res1.filter(lambda x: (folders or x.is_file()) and (files or x.is_dir()))
|
|
414
579
|
return [item for item in res1 if (folders or item.is_file()) and (files or item.is_dir())] # type: ignore
|
|
415
|
-
elif dotfiles:
|
|
580
|
+
elif dotfiles:
|
|
581
|
+
raw = slf.glob(pattern) if not r else self.rglob(pattern)
|
|
416
582
|
else:
|
|
417
583
|
from glob import glob
|
|
584
|
+
|
|
418
585
|
if r:
|
|
419
586
|
raw = glob(str(slf / "**" / pattern), recursive=r)
|
|
420
587
|
else:
|
|
421
588
|
raw = glob(str(slf.joinpath(pattern))) # glob ignroes dot and hidden files
|
|
422
589
|
if ".zip" not in str(slf) and compressed:
|
|
423
|
-
filters_notin = [
|
|
590
|
+
filters_notin = [PathExtended(comp_file).search(pattern=pattern, r=r, files=files, folders=folders, compressed=True, dotfiles=dotfiles, filters_total=filters_total, not_in=not_in, win_order=win_order) for comp_file in self.search("*.zip", r=r)]
|
|
424
591
|
from functools import reduce
|
|
592
|
+
|
|
425
593
|
# haha = List(filters_notin).reduce(func=lambda x, y: x + y)
|
|
426
594
|
haha = reduce(lambda x, y: x + y, filters_notin) if len(filters_notin) else []
|
|
427
595
|
raw = raw + haha # type: ignore
|
|
428
596
|
processed = []
|
|
429
597
|
for item in raw:
|
|
430
|
-
item_ =
|
|
598
|
+
item_ = PathExtended(item)
|
|
431
599
|
if all([afilter(item_) for afilter in filters_total]):
|
|
432
600
|
processed.append(item_)
|
|
433
|
-
if not win_order:
|
|
601
|
+
if not win_order:
|
|
602
|
+
return list(processed)
|
|
434
603
|
import re
|
|
435
|
-
|
|
604
|
+
|
|
605
|
+
processed.sort(key=lambda x: [int(k) if k.isdigit() else k for k in re.split("([0-9]+)", string=x.stem)])
|
|
436
606
|
return list(processed)
|
|
607
|
+
|
|
437
608
|
@staticmethod
|
|
438
|
-
def tmpdir(prefix: str = "") ->
|
|
439
|
-
return
|
|
609
|
+
def tmpdir(prefix: str = "") -> "PathExtended":
|
|
610
|
+
return PathExtended.tmp(folder=rf"tmp_dirs/{prefix + ('_' if prefix != '' else '') + randstr()}")
|
|
611
|
+
|
|
440
612
|
@staticmethod
|
|
441
|
-
def tmpfile(name: Optional[str]= None, suffix: str = "", folder: OPLike = None, tstamp: bool = False, noun: bool = False) ->
|
|
613
|
+
def tmpfile(name: Optional[str] = None, suffix: str = "", folder: OPLike = None, tstamp: bool = False, noun: bool = False) -> "PathExtended":
|
|
442
614
|
name_concrete = name or randstr(noun=noun)
|
|
443
|
-
return
|
|
615
|
+
return PathExtended.tmp(file=name_concrete + "_" + randstr() + (("_" + str(timestamp())) if tstamp else "") + suffix, folder=folder or "tmp_files")
|
|
616
|
+
|
|
444
617
|
@staticmethod
|
|
445
|
-
def tmp(folder: OPLike = None, file: Optional[str] = None, root: str = "~/tmp_results") ->
|
|
446
|
-
base =
|
|
618
|
+
def tmp(folder: OPLike = None, file: Optional[str] = None, root: str = "~/tmp_results") -> "PathExtended":
|
|
619
|
+
base = PathExtended(root).expanduser().joinpath(folder or "").joinpath(file or "")
|
|
447
620
|
target_path = base.parent if file else base
|
|
448
621
|
target_path.mkdir(parents=True, exist_ok=True)
|
|
449
622
|
return base
|
|
623
|
+
|
|
450
624
|
# ====================================== Compression & Encryption ===========================================
|
|
451
|
-
def zip(
|
|
452
|
-
|
|
625
|
+
def zip(
|
|
626
|
+
self,
|
|
627
|
+
path: OPLike = None,
|
|
628
|
+
folder: OPLike = None,
|
|
629
|
+
name: Optional[str] = None,
|
|
630
|
+
arcname: Optional[str] = None,
|
|
631
|
+
inplace: bool = False,
|
|
632
|
+
verbose: bool = True,
|
|
633
|
+
content: bool = False,
|
|
634
|
+
orig: bool = False,
|
|
635
|
+
pwd: Optional[str] = None,
|
|
636
|
+
mode: FILE_MODE = "w",
|
|
637
|
+
**kwargs: Any,
|
|
638
|
+
) -> "PathExtended":
|
|
453
639
|
path_resolved, slf = self._resolve_path(folder, name, path, self.name).expanduser().resolve(), self.expanduser().resolve()
|
|
454
640
|
# if use_7z: # benefits over regular zip and encrypt: can handle very large files with low memory footprint
|
|
455
641
|
# path_resolved = path_resolved + '.7z' if not path_resolved.suffix == '.7z' else path_resolved
|
|
456
642
|
# with install_n_import("py7zr").SevenZipFile(file=path_resolved, mode=mode, password=pwd) as archive: archive.writeall(path=str(slf), arcname=None)
|
|
457
|
-
arcname_obj =
|
|
458
|
-
if arcname_obj.name != slf.name:
|
|
643
|
+
arcname_obj = PathExtended(arcname or slf.name)
|
|
644
|
+
if arcname_obj.name != slf.name:
|
|
645
|
+
arcname_obj /= slf.name # arcname has to start from somewhere and end with filename
|
|
459
646
|
if slf.is_file():
|
|
460
647
|
path_resolved = Compression.zip_file(ip_path=str(slf), op_path=str(path_resolved + ".zip" if path_resolved.suffix != ".zip" else path_resolved), arcname=str(arcname_obj), mode=mode, **kwargs)
|
|
461
648
|
else:
|
|
462
|
-
if content:
|
|
463
|
-
|
|
464
|
-
|
|
649
|
+
if content:
|
|
650
|
+
root_dir, base_dir = slf, "."
|
|
651
|
+
else:
|
|
652
|
+
root_dir, base_dir = slf.split(at=str(arcname_obj[0]), sep=1)[0], str(arcname_obj)
|
|
653
|
+
path_resolved = PathExtended(Compression.compress_folder(root_dir=str(root_dir), op_path=str(path_resolved), base_dir=base_dir, fmt="zip", **kwargs)) # TODO: see if this supports mode
|
|
465
654
|
return self._return(path_resolved, inplace=inplace, operation="delete", orig=orig, verbose=verbose, msg=f"ZIPPED {repr(slf)} ==> {repr(path)}")
|
|
466
|
-
|
|
467
|
-
|
|
655
|
+
|
|
656
|
+
def unzip(
|
|
657
|
+
self,
|
|
658
|
+
folder: OPLike = None,
|
|
659
|
+
path: OPLike = None,
|
|
660
|
+
name: Optional[str] = None,
|
|
661
|
+
verbose: bool = True,
|
|
662
|
+
content: bool = False,
|
|
663
|
+
inplace: bool = False,
|
|
664
|
+
overwrite: bool = False,
|
|
665
|
+
orig: bool = False,
|
|
666
|
+
pwd: Optional[str] = None,
|
|
667
|
+
tmp: bool = False,
|
|
668
|
+
pattern: Optional[str] = None,
|
|
669
|
+
merge: bool = False,
|
|
670
|
+
) -> "PathExtended":
|
|
468
671
|
assert merge is False, "I have not implemented this yet"
|
|
469
672
|
assert path is None, "I have not implemented this yet"
|
|
470
|
-
if tmp:
|
|
673
|
+
if tmp:
|
|
674
|
+
return self.unzip(folder=PathExtended.tmp().joinpath("tmp_unzips").joinpath(randstr()), content=True).joinpath(self.stem)
|
|
471
675
|
slf = zipfile__ = self.expanduser().resolve()
|
|
472
676
|
if any(ztype in str(slf.parent) for ztype in (".zip", ".7z")): # path include a zip archive in the middle.
|
|
473
677
|
tmp__ = [item for item in (".zip", ".7z", "") if item in str(slf)]
|
|
474
678
|
ztype = tmp__[0]
|
|
475
|
-
if ztype == "":
|
|
679
|
+
if ztype == "":
|
|
680
|
+
return slf
|
|
476
681
|
# zipfile__, name__ = slf.split(at=str(List(slf.parts).filter(lambda x: ztype in x)[0]), sep=-1)
|
|
477
682
|
zipfile__, name__ = slf.split(at=str(next(item for item in slf.parts if ztype in item)), sep=-1)
|
|
478
683
|
name = str(name__)
|
|
479
|
-
folder = (zipfile__.parent / zipfile__.stem) if folder is None else
|
|
480
|
-
assert isinstance(folder,
|
|
684
|
+
folder = (zipfile__.parent / zipfile__.stem) if folder is None else PathExtended(folder).expanduser().absolute().resolve().joinpath(zipfile__.stem)
|
|
685
|
+
assert isinstance(folder, PathExtended), "folder should be a P object at this point"
|
|
481
686
|
folder = folder if not content else folder.parent
|
|
482
687
|
if slf.suffix == ".7z":
|
|
483
688
|
raise NotImplementedError("I have not implemented this yet")
|
|
@@ -492,80 +697,100 @@ class P(type(Path()), Path): # type: ignore # pylint: disable=E0241
|
|
|
492
697
|
# else: archive.extractall(path=folder)
|
|
493
698
|
else:
|
|
494
699
|
if overwrite:
|
|
495
|
-
if not content:
|
|
700
|
+
if not content:
|
|
701
|
+
PathExtended(folder).joinpath(name or "").delete(sure=True, verbose=True) # deletes a specific file / folder that has the same name as the zip file without extension.
|
|
496
702
|
else:
|
|
497
703
|
import zipfile
|
|
498
|
-
|
|
704
|
+
|
|
705
|
+
mylist = [x for x in zipfile.ZipFile(str(self)).namelist() if "/" not in x or (len(x.split("/")) == 2 and x.endswith("/"))]
|
|
499
706
|
# List().apply(lambda item: P(folder).joinpath(name or "", item.replace("/", "")).delete(sure=True, verbose=True))
|
|
500
|
-
for item in mylist:
|
|
501
|
-
|
|
707
|
+
for item in mylist:
|
|
708
|
+
PathExtended(folder).joinpath(name or "", item.replace("/", "")).delete(sure=True, verbose=True)
|
|
709
|
+
result = Compression.unzip(str(zipfile__), str(folder), None if name is None else PathExtended(name).as_posix())
|
|
502
710
|
assert isinstance(result, Path)
|
|
503
|
-
return self._return(
|
|
504
|
-
|
|
711
|
+
return self._return(PathExtended(result), inplace=inplace, operation="delete", orig=orig, verbose=verbose, msg=f"UNZIPPED {repr(zipfile__)} ==> {repr(result)}")
|
|
712
|
+
|
|
713
|
+
def untar(self, folder: OPLike = None, name: Optional[str] = None, path: OPLike = None, inplace: bool = False, orig: bool = False, verbose: bool = True) -> "PathExtended":
|
|
505
714
|
op_path = self._resolve_path(folder, name, path, self.name.replace(".tar", "")).expanduser().resolve()
|
|
506
715
|
Compression.untar(str(self.expanduser().resolve()), op_path=str(op_path))
|
|
507
716
|
return self._return(op_path, inplace=inplace, operation="delete", orig=orig, verbose=verbose, msg=f"UNTARRED {repr(self)} ==> {repr(op_path)}")
|
|
508
|
-
|
|
717
|
+
|
|
718
|
+
def ungz(self, folder: OPLike = None, name: Optional[str] = None, path: OPLike = None, inplace: bool = False, orig: bool = False, verbose: bool = True) -> "PathExtended":
|
|
509
719
|
op_path = self._resolve_path(folder, name, path, self.name.replace(".gz", "")).expanduser().resolve()
|
|
510
720
|
Compression.ungz(str(self.expanduser().resolve()), op_path=str(op_path))
|
|
511
721
|
return self._return(op_path, inplace=inplace, operation="delete", orig=orig, verbose=verbose, msg=f"UNGZED {repr(self)} ==> {repr(op_path)}")
|
|
512
|
-
|
|
722
|
+
|
|
723
|
+
def unxz(self, folder: OPLike = None, name: Optional[str] = None, path: OPLike = None, inplace: bool = False, orig: bool = False, verbose: bool = True) -> "PathExtended":
|
|
513
724
|
op_path = self._resolve_path(folder, name, path, self.name.replace(".xz", "")).expanduser().resolve()
|
|
514
725
|
Compression.unxz(str(self.expanduser().resolve()), op_path=str(op_path))
|
|
515
726
|
return self._return(op_path, inplace=inplace, operation="delete", orig=orig, verbose=verbose, msg=f"UNXZED {repr(self)} ==> {repr(op_path)}")
|
|
516
|
-
|
|
727
|
+
|
|
728
|
+
def unbz(self, folder: OPLike = None, name: Optional[str] = None, path: OPLike = None, inplace: bool = False, orig: bool = False, verbose: bool = True) -> "PathExtended":
|
|
517
729
|
op_path = self._resolve_path(folder=folder, name=name, path=path, default_name=self.name.replace(".bz", "").replace(".tbz", ".tar")).expanduser().resolve()
|
|
518
730
|
Compression.unbz(str(self.expanduser().resolve()), op_path=str(op_path))
|
|
519
731
|
return self._return(op_path, inplace=inplace, operation="delete", orig=orig, verbose=verbose, msg=f"UNBZED {repr(self)} ==> {repr(op_path)}")
|
|
520
|
-
|
|
732
|
+
|
|
733
|
+
def decompress(self, folder: OPLike = None, name: Optional[str] = None, path: OPLike = None, inplace: bool = False, orig: bool = False, verbose: bool = True) -> "PathExtended":
|
|
521
734
|
if ".tar.gz" in str(self) or ".tgz" in str(self):
|
|
522
735
|
# res = self.ungz_untar(folder=folder, path=path, name=name, inplace=inplace, verbose=verbose, orig=orig)
|
|
523
736
|
return self.ungz(name=f"tmp_{randstr()}.tar", inplace=inplace).untar(folder=folder, name=name, path=path, inplace=True, orig=orig, verbose=verbose) # this works for .tgz suffix as well as .tar.gz
|
|
524
|
-
elif ".gz" in str(self):
|
|
737
|
+
elif ".gz" in str(self):
|
|
738
|
+
res = self.ungz(folder=folder, path=path, name=name, inplace=inplace, verbose=verbose, orig=orig)
|
|
525
739
|
elif ".tar.bz" in str(self) or "tbz" in str(self):
|
|
526
740
|
res = self.unbz(name=f"tmp_{randstr()}.tar", inplace=inplace)
|
|
527
741
|
return res.untar(folder=folder, name=name, path=path, inplace=True, orig=orig, verbose=verbose)
|
|
528
742
|
elif ".tar.xz" in str(self):
|
|
529
743
|
# res = self.unxz_untar(folder=folder, path=path, name=name, inplace=inplace, verbose=verbose, orig=orig)
|
|
530
744
|
res = self.unxz(inplace=inplace).untar(folder=folder, name=name, path=path, inplace=True, orig=orig, verbose=verbose)
|
|
531
|
-
elif ".zip" in str(self):
|
|
532
|
-
|
|
745
|
+
elif ".zip" in str(self):
|
|
746
|
+
res = self.unzip(folder=folder, path=path, name=name, inplace=inplace, verbose=verbose, orig=orig)
|
|
747
|
+
else:
|
|
748
|
+
res = self
|
|
533
749
|
return res
|
|
534
|
-
|
|
535
|
-
|
|
750
|
+
|
|
751
|
+
def encrypt(self, key: Optional[bytes] = None, pwd: Optional[str] = None, folder: OPLike = None, name: Optional[str] = None, path: OPLike = None, verbose: bool = True, suffix: str = ".enc", inplace: bool = False, orig: bool = False) -> "PathExtended":
|
|
536
752
|
# see: https://stackoverflow.com/questions/42568262/how-to-encrypt-text-with-a-password-in-python & https://stackoverflow.com/questions/2490334/simple-way-to-encode-a-string-according-to-a-password"""
|
|
537
753
|
slf = self.expanduser().resolve()
|
|
538
754
|
path = self._resolve_path(folder, name, path, slf.name + suffix)
|
|
539
755
|
assert slf.is_file(), f"Cannot encrypt a directory. You might want to try `zip_n_encrypt`. {self}"
|
|
540
756
|
path.write_bytes(encrypt(msg=slf.read_bytes(), key=key, pwd=pwd))
|
|
541
757
|
return self._return(path, inplace=inplace, operation="delete", orig=orig, verbose=verbose, msg=f"🔒🔑 ENCRYPTED: {repr(slf)} ==> {repr(path)}.")
|
|
542
|
-
|
|
758
|
+
|
|
759
|
+
def decrypt(self, key: Optional[bytes] = None, pwd: Optional[str] = None, path: OPLike = None, folder: OPLike = None, name: Optional[str] = None, verbose: bool = True, suffix: str = ".enc", inplace: bool = False) -> "PathExtended":
|
|
543
760
|
slf = self.expanduser().resolve()
|
|
544
761
|
path = self._resolve_path(folder=folder, name=name, path=path, default_name=slf.name.replace(suffix, "") if suffix in slf.name else "decrypted_" + slf.name)
|
|
545
762
|
path.write_bytes(decrypt(token=slf.read_bytes(), key=key, pwd=pwd))
|
|
546
763
|
return self._return(path, operation="delete", verbose=verbose, msg=f"🔓🔑 DECRYPTED: {repr(slf)} ==> {repr(path)}.", inplace=inplace)
|
|
547
|
-
|
|
764
|
+
|
|
765
|
+
def zip_n_encrypt(self, key: Optional[bytes] = None, pwd: Optional[str] = None, inplace: bool = False, verbose: bool = True, orig: bool = False, content: bool = False) -> "PathExtended":
|
|
548
766
|
return self.zip(inplace=inplace, verbose=verbose, content=content).encrypt(key=key, pwd=pwd, verbose=verbose, inplace=True) if not orig else self
|
|
549
|
-
|
|
550
|
-
def
|
|
767
|
+
|
|
768
|
+
def decrypt_n_unzip(self, key: Optional[bytes] = None, pwd: Optional[str] = None, inplace: bool = False, verbose: bool = True, orig: bool = False) -> "PathExtended":
|
|
769
|
+
return self.decrypt(key=key, pwd=pwd, verbose=verbose, inplace=inplace).unzip(folder=None, inplace=True, content=False) if not orig else self
|
|
770
|
+
|
|
771
|
+
def _resolve_path(self, folder: OPLike, name: Optional[str], path: OPLike, default_name: str, rel2it: bool = False) -> "PathExtended":
|
|
551
772
|
""":param rel2it: `folder` or `path` are relative to `self` as opposed to cwd. This is used when resolving '../dir'"""
|
|
552
773
|
if path is not None:
|
|
553
|
-
path =
|
|
774
|
+
path = PathExtended(self.joinpath(path).resolve() if rel2it else path).expanduser().resolve()
|
|
554
775
|
assert folder is None and name is None, "If `path` is passed, `folder` and `name` cannot be passed."
|
|
555
|
-
assert isinstance(path,
|
|
776
|
+
assert isinstance(path, PathExtended), "path should be a P object at this point"
|
|
556
777
|
assert not path.is_dir(), f"`path` passed is a directory! it must not be that. If this is meant, pass it with `folder` kwarg. `{path}`"
|
|
557
778
|
return path
|
|
558
779
|
name, folder = (default_name if name is None else str(name)), (self.parent if folder is None else folder) # good for edge cases of path with single part. # means same directory, just different name
|
|
559
|
-
return
|
|
780
|
+
return PathExtended(self.joinpath(folder).resolve() if rel2it else folder).expanduser().resolve() / name
|
|
560
781
|
|
|
561
|
-
def get_remote_path(self, root: Optional[str], os_specific: bool = False, rel2home: bool = True, strict: bool = True) ->
|
|
782
|
+
def get_remote_path(self, root: Optional[str], os_specific: bool = False, rel2home: bool = True, strict: bool = True) -> "PathExtended":
|
|
562
783
|
import platform
|
|
563
|
-
|
|
564
|
-
|
|
784
|
+
|
|
785
|
+
tmp1: str = platform.system().lower() if os_specific else "generic_os"
|
|
786
|
+
if not rel2home:
|
|
787
|
+
path = self
|
|
565
788
|
else:
|
|
566
|
-
try:
|
|
789
|
+
try:
|
|
790
|
+
path = self.rel2home()
|
|
567
791
|
except ValueError as ve:
|
|
568
|
-
if strict:
|
|
792
|
+
if strict:
|
|
793
|
+
raise ve
|
|
569
794
|
path = self
|
|
570
795
|
# if obfuscate:
|
|
571
796
|
# msc.obfuscater import obfuscate as obfuscate_func
|
|
@@ -573,14 +798,30 @@ class P(type(Path()), Path): # type: ignore # pylint: disable=E0241
|
|
|
573
798
|
# path = path.with_name(name=name)
|
|
574
799
|
if isinstance(root, str): # the following is to avoid the confusing behaviour of A.joinpath(B) if B is absolute.
|
|
575
800
|
part1 = path.parts[0]
|
|
576
|
-
if part1 == "/":
|
|
577
|
-
|
|
578
|
-
|
|
801
|
+
if part1 == "/":
|
|
802
|
+
sanitized_path = path[1:].as_posix()
|
|
803
|
+
else:
|
|
804
|
+
sanitized_path = path.as_posix()
|
|
805
|
+
return PathExtended(root + "/" + tmp1 + "/" + sanitized_path)
|
|
579
806
|
return tmp1 / path
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
807
|
+
|
|
808
|
+
def to_cloud(
|
|
809
|
+
self,
|
|
810
|
+
cloud: str,
|
|
811
|
+
remotepath: OPLike = None,
|
|
812
|
+
zip: bool = False,
|
|
813
|
+
encrypt: bool = False, # pylint: disable=W0621, W0622
|
|
814
|
+
key: Optional[bytes] = None,
|
|
815
|
+
pwd: Optional[str] = None,
|
|
816
|
+
rel2home: bool = False,
|
|
817
|
+
strict: bool = True,
|
|
818
|
+
# obfuscate: bool = False,
|
|
819
|
+
share: bool = False,
|
|
820
|
+
verbose: bool = True,
|
|
821
|
+
os_specific: bool = False,
|
|
822
|
+
transfers: int = 10,
|
|
823
|
+
root: Optional[str] = "myhome",
|
|
824
|
+
) -> "PathExtended":
|
|
584
825
|
to_del = []
|
|
585
826
|
localpath = self.expanduser().absolute() if not self.exists() else self
|
|
586
827
|
if zip:
|
|
@@ -591,17 +832,22 @@ class P(type(Path()), Path): # type: ignore # pylint: disable=E0241
|
|
|
591
832
|
to_del.append(localpath)
|
|
592
833
|
if remotepath is None:
|
|
593
834
|
rp = localpath.get_remote_path(root=root, os_specific=os_specific, rel2home=rel2home, strict=strict) # if rel2home else (P(root) / localpath if root is not None else localpath)
|
|
594
|
-
else:
|
|
595
|
-
|
|
835
|
+
else:
|
|
836
|
+
rp = PathExtended(remotepath)
|
|
837
|
+
rclone_cmd = f"""rclone copyto '{localpath.as_posix()}' '{cloud}:{rp.as_posix()}' {"--progress" if verbose else ""} --transfers={transfers}"""
|
|
596
838
|
from machineconfig.utils.terminal import Terminal
|
|
597
|
-
|
|
839
|
+
|
|
840
|
+
if verbose:
|
|
841
|
+
print(f"{'⬆️' * 5} UPLOADING with `{rclone_cmd}`")
|
|
598
842
|
shell_to_use = "powershell" if sys.platform == "win32" else "bash"
|
|
599
843
|
res = Terminal(stdout=None if verbose else subprocess.PIPE).run(rclone_cmd, shell=shell_to_use).capture()
|
|
600
844
|
_ = [item.delete(sure=True) for item in to_del]
|
|
601
845
|
assert res.is_successful(strict_err=False, strict_returcode=True), res.print(capture=False, desc="Cloud Storage Operation")
|
|
602
|
-
if verbose:
|
|
846
|
+
if verbose:
|
|
847
|
+
print(f"{'⬆️' * 5} UPLOAD COMPLETED.")
|
|
603
848
|
if share:
|
|
604
|
-
if verbose:
|
|
849
|
+
if verbose:
|
|
850
|
+
print("🔗 SHARING FILE")
|
|
605
851
|
shell_to_use = "powershell" if sys.platform == "win32" else "bash"
|
|
606
852
|
res = Terminal().run(f"""rclone link '{cloud}:{rp.as_posix()}'""", shell=shell_to_use).capture()
|
|
607
853
|
tmp = res.op2path(strict_err=False, strict_returncode=False)
|
|
@@ -610,32 +856,53 @@ class P(type(Path()), Path): # type: ignore # pylint: disable=E0241
|
|
|
610
856
|
raise RuntimeError(f"💥 Could not get link for {self}.")
|
|
611
857
|
else:
|
|
612
858
|
res.print_if_unsuccessful(desc="Cloud Storage Operation", strict_err=True, strict_returncode=True)
|
|
613
|
-
link_p:
|
|
859
|
+
link_p: "PathExtended" = PathExtended(str(tmp))
|
|
614
860
|
return link_p
|
|
615
861
|
return self
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
862
|
+
|
|
863
|
+
def from_cloud(
|
|
864
|
+
self,
|
|
865
|
+
cloud: str,
|
|
866
|
+
remotepath: OPLike = None,
|
|
867
|
+
decrypt: bool = False,
|
|
868
|
+
unzip: bool = False, # type: ignore # pylint: disable=W0621
|
|
869
|
+
key: Optional[bytes] = None,
|
|
870
|
+
pwd: Optional[str] = None,
|
|
871
|
+
rel2home: bool = False,
|
|
872
|
+
os_specific: bool = False,
|
|
873
|
+
strict: bool = True,
|
|
874
|
+
transfers: int = 10,
|
|
875
|
+
root: Optional[str] = "myhome",
|
|
876
|
+
verbose: bool = True,
|
|
877
|
+
overwrite: bool = True,
|
|
878
|
+
merge: bool = False,
|
|
879
|
+
):
|
|
619
880
|
if remotepath is None:
|
|
620
881
|
remotepath = self.get_remote_path(root=root, os_specific=os_specific, rel2home=rel2home, strict=strict)
|
|
621
882
|
remotepath += ".zip" if unzip else ""
|
|
622
883
|
remotepath += ".enc" if decrypt else ""
|
|
623
|
-
else:
|
|
884
|
+
else:
|
|
885
|
+
remotepath = PathExtended(remotepath)
|
|
624
886
|
localpath = self.expanduser().absolute()
|
|
625
887
|
localpath += ".zip" if unzip else ""
|
|
626
888
|
localpath += ".enc" if decrypt else ""
|
|
627
|
-
rclone_cmd = f"""rclone copyto '{cloud}:{remotepath.as_posix()}' '{localpath.as_posix()}' {
|
|
889
|
+
rclone_cmd = f"""rclone copyto '{cloud}:{remotepath.as_posix()}' '{localpath.as_posix()}' {"--progress" if verbose else ""} --transfers={transfers}"""
|
|
628
890
|
from machineconfig.utils.terminal import Terminal
|
|
629
|
-
|
|
891
|
+
|
|
892
|
+
if verbose:
|
|
893
|
+
print(f"{'⬇️' * 5} DOWNLOADING with `{rclone_cmd}`")
|
|
630
894
|
shell_to_use = "powershell" if sys.platform == "win32" else "bash"
|
|
631
895
|
res = Terminal(stdout=None if verbose else subprocess.PIPE).run(rclone_cmd, shell=shell_to_use)
|
|
632
896
|
success = res.is_successful(strict_err=False, strict_returcode=True)
|
|
633
897
|
if not success:
|
|
634
898
|
res.print(capture=False, desc="Cloud Storage Operation")
|
|
635
899
|
return None
|
|
636
|
-
if decrypt:
|
|
637
|
-
|
|
900
|
+
if decrypt:
|
|
901
|
+
localpath = localpath.decrypt(key=key, pwd=pwd, inplace=True)
|
|
902
|
+
if unzip:
|
|
903
|
+
localpath = localpath.unzip(inplace=True, verbose=True, overwrite=overwrite, content=True, merge=merge)
|
|
638
904
|
return localpath
|
|
905
|
+
|
|
639
906
|
def sync_to_cloud(self, cloud: str, sync_up: bool = False, sync_down: bool = False, os_specific: bool = False, rel2home: bool = True, transfers: int = 10, delete: bool = False, root: Optional[str] = "myhome", verbose: bool = True):
|
|
640
907
|
tmp_path_obj = self.expanduser().absolute()
|
|
641
908
|
tmp_path_obj.parent.mkdir(parents=True, exist_ok=True)
|
|
@@ -648,9 +915,11 @@ class P(type(Path()), Path): # type: ignore # pylint: disable=E0241
|
|
|
648
915
|
print(f"SYNCING 🔄️ {source} {'>' * 15} {target}`")
|
|
649
916
|
rclone_cmd = f"""rclone sync '{source}' '{target}' """
|
|
650
917
|
rclone_cmd += f" --progress --transfers={transfers} --verbose"
|
|
651
|
-
rclone_cmd +=
|
|
918
|
+
rclone_cmd += " --delete-during" if delete else ""
|
|
652
919
|
from machineconfig.utils.terminal import Terminal
|
|
653
|
-
|
|
920
|
+
|
|
921
|
+
if verbose:
|
|
922
|
+
print(rclone_cmd)
|
|
654
923
|
shell_to_use = "powershell" if sys.platform == "win32" else "bash"
|
|
655
924
|
res = Terminal(stdout=None if verbose else subprocess.PIPE).run(rclone_cmd, shell=shell_to_use)
|
|
656
925
|
success = res.is_successful(strict_err=False, strict_returcode=True)
|