machineconfig 1.97__py3-none-any.whl → 2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of machineconfig might be problematic. Click here for more details.

Files changed (268) hide show
  1. machineconfig/cluster/cloud_manager.py +22 -29
  2. machineconfig/cluster/data_transfer.py +2 -3
  3. machineconfig/cluster/distribute.py +0 -2
  4. machineconfig/cluster/file_manager.py +4 -5
  5. machineconfig/cluster/job_params.py +1 -4
  6. machineconfig/cluster/loader_runner.py +8 -11
  7. machineconfig/cluster/remote_machine.py +4 -5
  8. machineconfig/cluster/script_execution.py +2 -2
  9. machineconfig/cluster/script_notify_upon_completion.py +0 -1
  10. machineconfig/cluster/sessions_managers/archive/create_zellij_template.py +4 -6
  11. machineconfig/cluster/sessions_managers/archive/session_managers.py +0 -1
  12. machineconfig/cluster/sessions_managers/enhanced_command_runner.py +35 -75
  13. machineconfig/cluster/sessions_managers/wt_local.py +113 -185
  14. machineconfig/cluster/sessions_managers/wt_local_manager.py +127 -197
  15. machineconfig/cluster/sessions_managers/wt_remote.py +60 -67
  16. machineconfig/cluster/sessions_managers/wt_remote_manager.py +110 -149
  17. machineconfig/cluster/sessions_managers/wt_utils/layout_generator.py +61 -64
  18. machineconfig/cluster/sessions_managers/wt_utils/process_monitor.py +72 -172
  19. machineconfig/cluster/sessions_managers/wt_utils/remote_executor.py +27 -60
  20. machineconfig/cluster/sessions_managers/wt_utils/session_manager.py +58 -137
  21. machineconfig/cluster/sessions_managers/wt_utils/status_reporter.py +46 -74
  22. machineconfig/cluster/sessions_managers/zellij_local.py +91 -147
  23. machineconfig/cluster/sessions_managers/zellij_local_manager.py +165 -190
  24. machineconfig/cluster/sessions_managers/zellij_remote.py +51 -58
  25. machineconfig/cluster/sessions_managers/zellij_remote_manager.py +40 -46
  26. machineconfig/cluster/sessions_managers/zellij_utils/example_usage.py +19 -17
  27. machineconfig/cluster/sessions_managers/zellij_utils/layout_generator.py +30 -31
  28. machineconfig/cluster/sessions_managers/zellij_utils/process_monitor.py +64 -134
  29. machineconfig/cluster/sessions_managers/zellij_utils/remote_executor.py +7 -11
  30. machineconfig/cluster/sessions_managers/zellij_utils/session_manager.py +27 -55
  31. machineconfig/cluster/sessions_managers/zellij_utils/status_reporter.py +14 -13
  32. machineconfig/cluster/templates/cli_click.py +0 -1
  33. machineconfig/cluster/templates/cli_gooey.py +0 -2
  34. machineconfig/cluster/templates/cli_trogon.py +0 -1
  35. machineconfig/cluster/templates/run_cloud.py +0 -1
  36. machineconfig/cluster/templates/run_cluster.py +0 -1
  37. machineconfig/cluster/templates/run_remote.py +0 -1
  38. machineconfig/cluster/templates/utils.py +27 -11
  39. machineconfig/jobs/__pycache__/__init__.cpython-313.pyc +0 -0
  40. machineconfig/jobs/linux/msc/cli_agents.sh +16 -0
  41. machineconfig/jobs/python/check_installations.py +9 -9
  42. machineconfig/jobs/python/create_bootable_media.py +0 -2
  43. machineconfig/jobs/python/python_cargo_build_share.py +2 -2
  44. machineconfig/jobs/python/python_ve_symlink.py +9 -11
  45. machineconfig/jobs/python/tasks.py +0 -1
  46. machineconfig/jobs/python/vscode/api.py +5 -5
  47. machineconfig/jobs/python/vscode/link_ve.py +20 -21
  48. machineconfig/jobs/python/vscode/select_interpreter.py +28 -29
  49. machineconfig/jobs/python/vscode/sync_code.py +14 -18
  50. machineconfig/jobs/python_custom_installers/__pycache__/__init__.cpython-313.pyc +0 -0
  51. machineconfig/jobs/python_custom_installers/archive/ngrok.py +15 -15
  52. machineconfig/jobs/python_custom_installers/dev/aider.py +10 -18
  53. machineconfig/jobs/python_custom_installers/dev/alacritty.py +12 -21
  54. machineconfig/jobs/python_custom_installers/dev/brave.py +13 -22
  55. machineconfig/jobs/python_custom_installers/dev/bypass_paywall.py +13 -20
  56. machineconfig/jobs/python_custom_installers/dev/code.py +17 -24
  57. machineconfig/jobs/python_custom_installers/dev/cursor.py +10 -21
  58. machineconfig/jobs/python_custom_installers/dev/docker_desktop.py +12 -11
  59. machineconfig/jobs/python_custom_installers/dev/espanso.py +19 -23
  60. machineconfig/jobs/python_custom_installers/dev/goes.py +9 -16
  61. machineconfig/jobs/python_custom_installers/dev/lvim.py +13 -21
  62. machineconfig/jobs/python_custom_installers/dev/nerdfont.py +15 -22
  63. machineconfig/jobs/python_custom_installers/dev/redis.py +15 -23
  64. machineconfig/jobs/python_custom_installers/dev/wezterm.py +15 -22
  65. machineconfig/jobs/python_custom_installers/dev/winget.py +32 -50
  66. machineconfig/jobs/python_custom_installers/docker.py +15 -24
  67. machineconfig/jobs/python_custom_installers/gh.py +18 -26
  68. machineconfig/jobs/python_custom_installers/hx.py +33 -17
  69. machineconfig/jobs/python_custom_installers/warp-cli.py +15 -23
  70. machineconfig/jobs/python_generic_installers/__pycache__/__init__.cpython-313.pyc +0 -0
  71. machineconfig/jobs/python_generic_installers/config.json +412 -389
  72. machineconfig/jobs/python_linux_installers/__pycache__/__init__.cpython-313.pyc +0 -0
  73. machineconfig/jobs/python_windows_installers/dev/config.json +1 -1
  74. machineconfig/jobs/windows/archive/archive_pygraphviz.ps1 +1 -1
  75. machineconfig/jobs/windows/msc/cli_agents.bat +0 -0
  76. machineconfig/jobs/windows/msc/cli_agents.ps1 +0 -0
  77. machineconfig/jobs/windows/start_terminal.ps1 +1 -1
  78. machineconfig/logger.py +50 -0
  79. machineconfig/profile/create.py +50 -36
  80. machineconfig/profile/create_hardlinks.py +33 -26
  81. machineconfig/profile/shell.py +87 -60
  82. machineconfig/scripts/__pycache__/__init__.cpython-313.pyc +0 -0
  83. machineconfig/scripts/cloud/init.sh +2 -2
  84. machineconfig/scripts/linux/checkout_versions +1 -1
  85. machineconfig/scripts/linux/choose_wezterm_theme +1 -1
  86. machineconfig/scripts/linux/cloud_copy +1 -1
  87. machineconfig/scripts/linux/cloud_manager +1 -1
  88. machineconfig/scripts/linux/cloud_mount +1 -1
  89. machineconfig/scripts/linux/cloud_repo_sync +1 -1
  90. machineconfig/scripts/linux/cloud_sync +1 -1
  91. machineconfig/scripts/linux/croshell +1 -1
  92. machineconfig/scripts/linux/devops +3 -5
  93. machineconfig/scripts/linux/fire +2 -1
  94. machineconfig/scripts/linux/fire_agents +3 -3
  95. machineconfig/scripts/linux/ftpx +1 -1
  96. machineconfig/scripts/linux/gh_models +1 -1
  97. machineconfig/scripts/linux/kill_process +1 -1
  98. machineconfig/scripts/linux/mcinit +2 -2
  99. machineconfig/scripts/linux/repos +1 -1
  100. machineconfig/scripts/linux/scheduler +1 -1
  101. machineconfig/scripts/linux/start_slidev +1 -1
  102. machineconfig/scripts/linux/start_terminals +1 -1
  103. machineconfig/scripts/linux/url2md +1 -1
  104. machineconfig/scripts/linux/warp-cli.sh +122 -0
  105. machineconfig/scripts/linux/wifi_conn +1 -1
  106. machineconfig/scripts/python/__pycache__/__init__.cpython-313.pyc +0 -0
  107. machineconfig/scripts/python/__pycache__/croshell.cpython-313.pyc +0 -0
  108. machineconfig/scripts/python/__pycache__/devops.cpython-313.pyc +0 -0
  109. machineconfig/scripts/python/__pycache__/devops_devapps_install.cpython-313.pyc +0 -0
  110. machineconfig/scripts/python/__pycache__/devops_update_repos.cpython-313.pyc +0 -0
  111. machineconfig/scripts/python/__pycache__/fire_jobs.cpython-313.pyc +0 -0
  112. machineconfig/scripts/python/ai/__init__.py +0 -0
  113. machineconfig/scripts/python/ai/__pycache__/__init__.cpython-313.pyc +0 -0
  114. machineconfig/scripts/python/ai/__pycache__/generate_files.cpython-313.pyc +0 -0
  115. machineconfig/scripts/python/ai/__pycache__/mcinit.cpython-313.pyc +0 -0
  116. machineconfig/scripts/python/ai/chatmodes/Thinking-Beast-Mode.chatmode.md +337 -0
  117. machineconfig/scripts/python/ai/chatmodes/Ultimate-Transparent-Thinking-Beast-Mode.chatmode.md +644 -0
  118. machineconfig/scripts/python/ai/chatmodes/deepResearch.chatmode.md +81 -0
  119. machineconfig/scripts/python/ai/configs/.gemini/settings.json +81 -0
  120. machineconfig/scripts/python/ai/generate_files.py +84 -0
  121. machineconfig/scripts/python/ai/instructions/python/dev.instructions.md +45 -0
  122. machineconfig/scripts/python/ai/mcinit.py +107 -0
  123. machineconfig/scripts/python/ai/prompts/allLintersAndTypeCheckers.prompt.md +5 -0
  124. machineconfig/scripts/python/ai/prompts/research-report-skeleton.prompt.md +38 -0
  125. machineconfig/scripts/python/ai/scripts/lint_and_type_check.sh +52 -0
  126. machineconfig/scripts/python/archive/tmate_conn.py +5 -5
  127. machineconfig/scripts/python/archive/tmate_start.py +3 -3
  128. machineconfig/scripts/python/choose_wezterm_theme.py +2 -2
  129. machineconfig/scripts/python/cloud_copy.py +20 -19
  130. machineconfig/scripts/python/cloud_mount.py +10 -8
  131. machineconfig/scripts/python/cloud_repo_sync.py +15 -15
  132. machineconfig/scripts/python/cloud_sync.py +1 -1
  133. machineconfig/scripts/python/croshell.py +18 -16
  134. machineconfig/scripts/python/devops.py +6 -6
  135. machineconfig/scripts/python/devops_add_identity.py +9 -7
  136. machineconfig/scripts/python/devops_add_ssh_key.py +19 -19
  137. machineconfig/scripts/python/devops_backup_retrieve.py +14 -14
  138. machineconfig/scripts/python/devops_devapps_install.py +3 -3
  139. machineconfig/scripts/python/devops_update_repos.py +141 -53
  140. machineconfig/scripts/python/dotfile.py +3 -3
  141. machineconfig/scripts/python/fire_agents.py +202 -41
  142. machineconfig/scripts/python/fire_jobs.py +20 -21
  143. machineconfig/scripts/python/ftpx.py +4 -3
  144. machineconfig/scripts/python/gh_models.py +94 -94
  145. machineconfig/scripts/python/helpers/__pycache__/__init__.cpython-313.pyc +0 -0
  146. machineconfig/scripts/python/helpers/__pycache__/helpers4.cpython-313.pyc +0 -0
  147. machineconfig/scripts/python/helpers/cloud_helpers.py +3 -3
  148. machineconfig/scripts/python/helpers/helpers2.py +3 -3
  149. machineconfig/scripts/python/helpers/helpers4.py +8 -7
  150. machineconfig/scripts/python/helpers/helpers5.py +7 -7
  151. machineconfig/scripts/python/helpers/repo_sync_helpers.py +2 -2
  152. machineconfig/scripts/python/mount_nfs.py +4 -3
  153. machineconfig/scripts/python/mount_nw_drive.py +4 -4
  154. machineconfig/scripts/python/mount_ssh.py +4 -3
  155. machineconfig/scripts/python/repos.py +9 -9
  156. machineconfig/scripts/python/scheduler.py +1 -1
  157. machineconfig/scripts/python/start_slidev.py +9 -8
  158. machineconfig/scripts/python/start_terminals.py +1 -1
  159. machineconfig/scripts/python/viewer.py +40 -40
  160. machineconfig/scripts/python/wifi_conn.py +65 -66
  161. machineconfig/scripts/python/wsl_windows_transfer.py +2 -2
  162. machineconfig/scripts/windows/checkout_version.ps1 +1 -3
  163. machineconfig/scripts/windows/choose_wezterm_theme.ps1 +1 -3
  164. machineconfig/scripts/windows/cloud_copy.ps1 +2 -6
  165. machineconfig/scripts/windows/cloud_manager.ps1 +1 -1
  166. machineconfig/scripts/windows/cloud_repo_sync.ps1 +1 -2
  167. machineconfig/scripts/windows/cloud_sync.ps1 +2 -2
  168. machineconfig/scripts/windows/croshell.ps1 +2 -2
  169. machineconfig/scripts/windows/devops.ps1 +1 -4
  170. machineconfig/scripts/windows/dotfile.ps1 +1 -3
  171. machineconfig/scripts/windows/fire.ps1 +1 -1
  172. machineconfig/scripts/windows/ftpx.ps1 +2 -2
  173. machineconfig/scripts/windows/gpt.ps1 +1 -1
  174. machineconfig/scripts/windows/kill_process.ps1 +1 -2
  175. machineconfig/scripts/windows/mcinit.ps1 +2 -2
  176. machineconfig/scripts/windows/mount_nfs.ps1 +1 -1
  177. machineconfig/scripts/windows/mount_ssh.ps1 +1 -1
  178. machineconfig/scripts/windows/pomodoro.ps1 +1 -1
  179. machineconfig/scripts/windows/py2exe.ps1 +1 -3
  180. machineconfig/scripts/windows/repos.ps1 +1 -1
  181. machineconfig/scripts/windows/scheduler.ps1 +1 -1
  182. machineconfig/scripts/windows/snapshot.ps1 +2 -2
  183. machineconfig/scripts/windows/start_slidev.ps1 +1 -1
  184. machineconfig/scripts/windows/start_terminals.ps1 +1 -1
  185. machineconfig/scripts/windows/wifi_conn.ps1 +1 -1
  186. machineconfig/scripts/windows/wsl_windows_transfer.ps1 +1 -3
  187. machineconfig/settings/lf/linux/lfrc +1 -1
  188. machineconfig/settings/linters/.ruff.toml +2 -2
  189. machineconfig/settings/linters/.ruff_cache/.gitignore +2 -0
  190. machineconfig/settings/linters/.ruff_cache/CACHEDIR.TAG +1 -0
  191. machineconfig/settings/lvim/windows/archive/config_additional.lua +1 -1
  192. machineconfig/settings/shells/ipy/profiles/default/startup/playext.py +71 -71
  193. machineconfig/settings/shells/wt/settings.json +8 -8
  194. machineconfig/settings/svim/linux/init.toml +1 -1
  195. machineconfig/settings/svim/windows/init.toml +1 -1
  196. machineconfig/setup_linux/web_shortcuts/croshell.sh +0 -54
  197. machineconfig/setup_linux/web_shortcuts/interactive.sh +6 -6
  198. machineconfig/setup_linux/web_shortcuts/tmp.sh +2 -0
  199. machineconfig/setup_windows/web_shortcuts/all.ps1 +2 -2
  200. machineconfig/setup_windows/web_shortcuts/ascii_art.ps1 +1 -1
  201. machineconfig/setup_windows/web_shortcuts/croshell.ps1 +1 -1
  202. machineconfig/setup_windows/web_shortcuts/interactive.ps1 +5 -5
  203. machineconfig/setup_windows/wt_and_pwsh/install_fonts.ps1 +51 -15
  204. machineconfig/setup_windows/wt_and_pwsh/set_pwsh_theme.py +75 -18
  205. machineconfig/setup_windows/wt_and_pwsh/set_wt_settings.py +52 -42
  206. machineconfig/utils/ai/browser_user_wrapper.py +5 -5
  207. machineconfig/utils/ai/generate_file_checklist.py +19 -22
  208. machineconfig/utils/ai/url2md.py +5 -3
  209. machineconfig/utils/cloud/onedrive/setup_oauth.py +5 -4
  210. machineconfig/utils/cloud/onedrive/transaction.py +192 -227
  211. machineconfig/utils/code.py +71 -43
  212. machineconfig/utils/installer.py +77 -85
  213. machineconfig/utils/installer_utils/installer_abc.py +29 -17
  214. machineconfig/utils/installer_utils/installer_class.py +188 -83
  215. machineconfig/utils/io_save.py +3 -15
  216. machineconfig/utils/links.py +22 -11
  217. machineconfig/utils/notifications.py +197 -0
  218. machineconfig/utils/options.py +38 -25
  219. machineconfig/utils/path.py +18 -6
  220. machineconfig/utils/path_reduced.py +637 -316
  221. machineconfig/utils/procs.py +69 -63
  222. machineconfig/utils/scheduling.py +11 -13
  223. machineconfig/utils/ssh.py +351 -0
  224. machineconfig/utils/terminal.py +225 -0
  225. machineconfig/utils/utils.py +13 -12
  226. machineconfig/utils/utils2.py +43 -10
  227. machineconfig/utils/utils5.py +242 -46
  228. machineconfig/utils/ve.py +11 -6
  229. {machineconfig-1.97.dist-info → machineconfig-2.1.dist-info}/METADATA +15 -9
  230. {machineconfig-1.97.dist-info → machineconfig-2.1.dist-info}/RECORD +232 -235
  231. machineconfig/cluster/self_ssh.py +0 -57
  232. machineconfig/jobs/__pycache__/__init__.cpython-311.pyc +0 -0
  233. machineconfig/jobs/python/__pycache__/__init__.cpython-311.pyc +0 -0
  234. machineconfig/jobs/python/archive/python_tools.txt +0 -12
  235. machineconfig/jobs/python/vscode/__pycache__/select_interpreter.cpython-311.pyc +0 -0
  236. machineconfig/jobs/python_custom_installers/__pycache__/__init__.cpython-311.pyc +0 -0
  237. machineconfig/jobs/python_generic_installers/__pycache__/__init__.cpython-311.pyc +0 -0
  238. machineconfig/jobs/python_generic_installers/update.py +0 -3
  239. machineconfig/jobs/python_linux_installers/__pycache__/__init__.cpython-311.pyc +0 -0
  240. machineconfig/profile/__pycache__/__init__.cpython-311.pyc +0 -0
  241. machineconfig/profile/__pycache__/create.cpython-311.pyc +0 -0
  242. machineconfig/profile/__pycache__/shell.cpython-311.pyc +0 -0
  243. machineconfig/scripts/__pycache__/__init__.cpython-311.pyc +0 -0
  244. machineconfig/scripts/linux/activate_ve +0 -87
  245. machineconfig/scripts/python/__pycache__/__init__.cpython-311.pyc +0 -0
  246. machineconfig/scripts/python/__pycache__/cloud_copy.cpython-311.pyc +0 -0
  247. machineconfig/scripts/python/__pycache__/cloud_mount.cpython-311.pyc +0 -0
  248. machineconfig/scripts/python/__pycache__/cloud_sync.cpython-311.pyc +0 -0
  249. machineconfig/scripts/python/__pycache__/croshell.cpython-311.pyc +0 -0
  250. machineconfig/scripts/python/__pycache__/devops.cpython-311.pyc +0 -0
  251. machineconfig/scripts/python/__pycache__/devops_backup_retrieve.cpython-311.pyc +0 -0
  252. machineconfig/scripts/python/__pycache__/devops_devapps_install.cpython-311.pyc +0 -0
  253. machineconfig/scripts/python/__pycache__/devops_update_repos.cpython-311.pyc +0 -0
  254. machineconfig/scripts/python/__pycache__/fire_agents.cpython-311.pyc +0 -0
  255. machineconfig/scripts/python/__pycache__/fire_jobs.cpython-311.pyc +0 -0
  256. machineconfig/scripts/python/__pycache__/get_zellij_cmd.cpython-311.pyc +0 -0
  257. machineconfig/scripts/python/__pycache__/repos.cpython-311.pyc +0 -0
  258. machineconfig/scripts/python/ai/__pycache__/init.cpython-311.pyc +0 -0
  259. machineconfig/scripts/python/ai/init.py +0 -56
  260. machineconfig/scripts/python/ai/rules/python/dev.md +0 -31
  261. machineconfig/scripts/python/helpers/__pycache__/__init__.cpython-311.pyc +0 -0
  262. machineconfig/scripts/python/helpers/__pycache__/cloud_helpers.cpython-311.pyc +0 -0
  263. machineconfig/scripts/python/helpers/__pycache__/helpers2.cpython-311.pyc +0 -0
  264. machineconfig/scripts/python/helpers/__pycache__/helpers4.cpython-311.pyc +0 -0
  265. machineconfig/scripts/python/helpers/__pycache__/repo_sync_helpers.cpython-311.pyc +0 -0
  266. machineconfig/scripts/windows/activate_ve.ps1 +0 -54
  267. {machineconfig-1.97.dist-info → machineconfig-2.1.dist-info}/WHEEL +0 -0
  268. {machineconfig-1.97.dist-info → machineconfig-2.1.dist-info}/top_level.txt +0 -0
@@ -1,71 +1,265 @@
1
-
2
-
3
-
4
-
5
- from crocodile.core import List, timestamp, randstr, install_n_import, validate_name
6
- from crocodile.file_management_helpers.file1 import encrypt, decrypt
7
- from crocodile.file_management_helpers.file2 import Compression
8
- from crocodile.file_management_helpers.file3 import Read
9
-
1
+ from machineconfig.utils.utils2 import randstr
10
2
  from datetime import datetime
3
+ import time
11
4
  from pathlib import Path
12
5
  import sys
13
6
  import subprocess
7
+ from platform import system
14
8
  from typing import Any, Optional, Union, Callable, TypeAlias, Literal
9
+ import os
10
+ # import warnings
15
11
 
16
12
 
17
- OPLike: TypeAlias = Union[str, 'P', Path, None]
18
- PLike: TypeAlias = Union[str, 'P', Path]
19
- FILE_MODE: TypeAlias = Literal['r', 'w', 'x', 'a']
13
+ OPLike: TypeAlias = Union[str, "PathExtended", Path, None]
14
+ PLike: TypeAlias = Union[str, "PathExtended", Path]
15
+ FILE_MODE: TypeAlias = Literal["r", "w", "x", "a"]
20
16
  SHUTIL_FORMATS: TypeAlias = Literal["zip", "tar", "gztar", "bztar", "xztar"]
21
17
 
22
18
 
19
+ def pwd2key(password: str, salt: Optional[bytes] = None, iterations: int = 10) -> bytes: # Derive a secret key from a given password and salt"""
20
+ import base64
21
+
22
+ if salt is None:
23
+ import hashlib
24
+
25
+ m = hashlib.sha256()
26
+ m.update(password.encode(encoding="utf-8"))
27
+ return base64.urlsafe_b64encode(s=m.digest()) # make url-safe bytes required by Ferent.
28
+ from cryptography.hazmat.primitives import hashes
29
+ from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
30
+
31
+ return base64.urlsafe_b64encode(PBKDF2HMAC(algorithm=hashes.SHA256(), length=32, salt=salt, iterations=iterations, backend=None).derive(password.encode()))
32
+
33
+
34
+ def encrypt(msg: bytes, key: Optional[bytes] = None, pwd: Optional[str] = None, salted: bool = True, iteration: Optional[int] = None, gen_key: bool = False) -> bytes:
35
+ import base64
36
+ from cryptography.fernet import Fernet
37
+
38
+ salt, iteration = None, None
39
+ if pwd is not None: # generate it from password
40
+ assert (key is None) and (type(pwd) is str), "❌ You can either pass key or pwd, or none of them, but not both."
41
+ import secrets
42
+
43
+ iteration = iteration or secrets.randbelow(exclusive_upper_bound=1_000_000)
44
+ salt = secrets.token_bytes(nbytes=16) if salted else None
45
+ key_resolved = pwd2key(password=pwd, salt=salt, iterations=iteration)
46
+ elif key is None:
47
+ if gen_key:
48
+ key_resolved = Fernet.generate_key()
49
+ Path.home().joinpath("dotfiles/creds/data/encrypted_files_key.bytes").write_bytes(key_resolved)
50
+ else:
51
+ try:
52
+ key_resolved = Path.home().joinpath("dotfiles/creds/data/encrypted_files_key.bytes").read_bytes()
53
+ print(f"⚠️ Using key from: {Path.home().joinpath('dotfiles/creds/data/encrypted_files_key.bytes')}")
54
+ except FileNotFoundError as err:
55
+ print("\n" * 3, "~" * 50, """Consider Loading up your dotfiles or pass `gen_key=True` to make and save one.""", "~" * 50, "\n" * 3)
56
+ raise FileNotFoundError(err) from err
57
+ elif isinstance(key, (str, PathExtended, Path)):
58
+ key_resolved = Path(key).read_bytes() # a path to a key file was passed, read it:
59
+ elif type(key) is bytes:
60
+ key_resolved = key # key passed explicitly
61
+ else:
62
+ raise TypeError("❌ Key must be either a path, bytes object or None.")
63
+ code = Fernet(key=key_resolved).encrypt(msg)
64
+ if pwd is not None and salt is not None and iteration is not None:
65
+ return base64.urlsafe_b64encode(b"%b%b%b" % (salt, iteration.to_bytes(4, "big"), base64.urlsafe_b64decode(code)))
66
+ return code
67
+
68
+
69
+ def decrypt(token: bytes, key: Optional[bytes] = None, pwd: Optional[str] = None, salted: bool = True) -> bytes:
70
+ import base64
71
+
72
+ if pwd is not None:
73
+ assert key is None, "❌ You can either pass key or pwd, or none of them, but not both."
74
+ if salted:
75
+ decoded = base64.urlsafe_b64decode(token)
76
+ salt, iterations, token = decoded[:16], decoded[16:20], base64.urlsafe_b64encode(decoded[20:])
77
+ key_resolved = pwd2key(password=pwd, salt=salt, iterations=int.from_bytes(bytes=iterations, byteorder="big"))
78
+ else:
79
+ key_resolved = pwd2key(password=pwd) # trailing `;` prevents IPython from caching the result.
80
+ elif type(key) is bytes:
81
+ assert pwd is None, "❌ You can either pass key or pwd, or none of them, but not both."
82
+ key_resolved = key # passsed explicitly
83
+ elif key is None:
84
+ key_resolved = Path.home().joinpath("dotfiles/creds/data/encrypted_files_key.bytes").read_bytes() # read from file
85
+ elif isinstance(key, (str, Path)):
86
+ key_resolved = Path(key).read_bytes() # passed a path to a file containing kwy
87
+ else:
88
+ raise TypeError(f"❌ Key must be either str, P, Path, bytes or None. Recieved: {type(key)}")
89
+ from cryptography.fernet import Fernet
90
+
91
+ return Fernet(key=key_resolved).decrypt(token)
92
+
93
+
94
+ def validate_name(astring: str, replace: str = "_") -> str:
95
+ import re
96
+
97
+ return re.sub(r"[^-a-zA-Z0-9_.()]+", replace, str(astring))
98
+
99
+
100
+ def timestamp(fmt: Optional[str] = None, name: Optional[str] = None) -> str:
101
+ return ((name + "_") if name is not None else "") + datetime.now().strftime(fmt or "%Y-%m-%d-%I-%M-%S-%p-%f") # isoformat is not compatible with file naming convention, fmt here is.
102
+
103
+
23
104
  def modify_text(txt_raw: str, txt_search: str, txt_alt: Union[str, Callable[[str], str]], replace_line: bool = True, notfound_append: bool = False, prepend: bool = False, strict: bool = False):
24
105
  lines, bingo = txt_raw.split("\n"), False
25
106
  if not replace_line: # no need for line splitting
26
107
  assert isinstance(txt_alt, str), f"txt_alt must be a string if notfound_append is True. It is not: {txt_alt}"
27
- if txt_search in txt_raw: return txt_raw.replace(txt_search, txt_alt)
108
+ if txt_search in txt_raw:
109
+ return txt_raw.replace(txt_search, txt_alt)
28
110
  return txt_raw + "\n" + txt_alt if notfound_append else txt_raw
29
111
  for idx, line in enumerate(lines):
30
112
  if txt_search in line:
31
- if isinstance(txt_alt, str): lines[idx] = txt_alt
32
- elif callable(txt_alt): lines[idx] = txt_alt(line)
113
+ if isinstance(txt_alt, str):
114
+ lines[idx] = txt_alt
115
+ elif callable(txt_alt):
116
+ lines[idx] = txt_alt(line)
33
117
  bingo = True
34
- if strict and not bingo: raise ValueError(f"txt_search `{txt_search}` not found in txt_raw `{txt_raw}`")
118
+ if strict and not bingo:
119
+ raise ValueError(f"txt_search `{txt_search}` not found in txt_raw `{txt_raw}`")
35
120
  if bingo is False and notfound_append is True:
36
121
  assert isinstance(txt_alt, str), f"txt_alt must be a string if notfound_append is True. It is not: {txt_alt}"
37
- if prepend: lines.insert(0, txt_alt)
38
- else: lines.append(txt_alt) # txt not found, add it anyway.
122
+ if prepend:
123
+ lines.insert(0, txt_alt)
124
+ else:
125
+ lines.append(txt_alt) # txt not found, add it anyway.
39
126
  return "\n".join(lines)
40
127
 
41
- class P(type(Path()), Path): # type: ignore # pylint: disable=E0241
128
+
129
+ class Compression:
130
+ @staticmethod
131
+ def compress_folder(
132
+ root_dir: str, op_path: str, base_dir: str, fmt: SHUTIL_FORMATS = "zip", verbose: bool = False, **kwargs: Any
133
+ ) -> str: # shutil works with folders nicely (recursion is done interally) # directory to be archived: root_dir\base_dir, unless base_dir is passed as absolute path. # when archive opened; base_dir will be found."""
134
+ base_name = op_path[:-4] if op_path.endswith(".zip") else op_path # .zip is added automatically by library, hence we'd like to avoid repeating it if user sent it.
135
+ import shutil
136
+
137
+ return shutil.make_archive(base_name=base_name, format=fmt, root_dir=root_dir, base_dir=base_dir, verbose=verbose, **kwargs) # returned path possible have added extension.
138
+
139
+ @staticmethod
140
+ def zip_file(ip_path: str, op_path: str, arcname: Optional[str] = None, password: Optional[bytes] = None, mode: FILE_MODE = "w", **kwargs: Any):
141
+ """arcname determines the directory of the file being archived inside the archive. Defaults to same as original directory except for drive.
142
+ When changed, it should still include the file path in its end. If arcname = filename without any path, then, it will be in the root of the archive."""
143
+ import zipfile
144
+
145
+ with zipfile.ZipFile(op_path, mode=mode) as jungle_zip:
146
+ if password is not None:
147
+ jungle_zip.setpassword(pwd=password)
148
+ jungle_zip.write(filename=str(ip_path), arcname=str(arcname) if arcname is not None else None, compress_type=zipfile.ZIP_DEFLATED, **kwargs)
149
+ return Path(op_path)
150
+
151
+ @staticmethod
152
+ def unzip(ip_path: str, op_path: str, fname: Optional[str] = None, password: Optional[bytes] = None, memory: bool = False, **kwargs: Any) -> Path | dict[str, bytes] | bytes:
153
+ import zipfile
154
+
155
+ with zipfile.ZipFile(str(ip_path), "r") as zipObj:
156
+ if memory:
157
+ return {name: zipObj.read(name) for name in zipObj.namelist()} if fname is None else zipObj.read(fname)
158
+ if fname is None:
159
+ zipObj.extractall(op_path, pwd=password, **kwargs)
160
+ return Path(op_path)
161
+ else:
162
+ zipObj.extract(member=str(fname), path=str(op_path), pwd=password)
163
+ return Path(op_path) / fname
164
+
165
+ @staticmethod
166
+ def gz(file: str, op_path: str): # see this on what to use: https://stackoverflow.com/questions/10540935/what-is-the-difference-between-tar-and-zip
167
+ import shutil
168
+ import gzip
169
+
170
+ with open(file, "rb") as f_in:
171
+ with gzip.open(op_path, "wb") as f_out:
172
+ shutil.copyfileobj(f_in, f_out)
173
+ return Path(op_path)
174
+
175
+ @staticmethod
176
+ def ungz(path: str, op_path: str):
177
+ import gzip
178
+ import shutil
179
+
180
+ with gzip.open(path, "r") as f_in, open(op_path, "wb") as f_out:
181
+ shutil.copyfileobj(f_in, f_out)
182
+ return Path(op_path)
183
+
184
+ @staticmethod
185
+ def unbz(path: str, op_path: str):
186
+ import bz2
187
+ import shutil
188
+
189
+ with bz2.BZ2File(path, "r") as fr, open(str(op_path), "wb") as fw:
190
+ shutil.copyfileobj(fr, fw)
191
+ return Path(op_path)
192
+
193
+ @staticmethod
194
+ def xz(path: str, op_path: str):
195
+ import lzma
196
+
197
+ with lzma.open(op_path, "w") as f:
198
+ f.write(Path(path).read_bytes())
199
+
200
+ @staticmethod
201
+ def unxz(ip_path: str, op_path: str):
202
+ import lzma
203
+
204
+ with lzma.open(ip_path) as file:
205
+ Path(op_path).write_bytes(file.read())
206
+
207
+ @staticmethod
208
+ def tar(path: str, op_path: str):
209
+ import tarfile
210
+
211
+ with tarfile.open(op_path, "w:gz") as tar_:
212
+ tar_.add(str(path), arcname=os.path.basename(path))
213
+ return Path(op_path)
214
+
215
+ @staticmethod
216
+ def untar(path: str, op_path: str, fname: Optional[str] = None, mode: Literal["r", "w"] = "r", **kwargs: Any):
217
+ import tarfile
218
+
219
+ with tarfile.open(str(path), mode) as file:
220
+ if fname is None:
221
+ file.extractall(path=op_path, **kwargs) # extract all files in the archive
222
+ else:
223
+ file.extract(fname, **kwargs)
224
+ return Path(op_path)
225
+
226
+
227
+ class PathExtended(type(Path()), Path): # type: ignore # pylint: disable=E0241
42
228
  # ============= Path management ==================
43
- """ The default behaviour of methods acting on underlying disk object is to perform the action and return a new path referring to the mutated object in disk drive.
229
+ """The default behaviour of methods acting on underlying disk object is to perform the action and return a new path referring to the mutated object in disk drive.
44
230
  However, there is a flag `orig` that makes the function return orignal path object `self` as opposed to the new one pointing to new object.
45
231
  Additionally, the fate of the original object can be decided by a flag `inplace` which means `replace` it defaults to False and in essence, it deletes the original underlying object.
46
232
  This can be seen in `zip` and `encrypt` but not in `copy`, `move`, `retitle` because the fate of original file is dictated already.
47
233
  Furthermore, those methods are accompanied with print statement explaining what happened to the object."""
48
- def delete(self, sure: bool = False, verbose: bool = True) -> 'P': # slf = self.expanduser().resolve() don't resolve symlinks.
234
+
235
+ def delete(self, sure: bool = False, verbose: bool = True) -> "PathExtended": # slf = self.expanduser().resolve() don't resolve symlinks.
49
236
  if not sure:
50
- if verbose: print(f"❌ Did NOT DELETE because user is not sure. file: {repr(self)}.")
237
+ if verbose:
238
+ print(f"❌ Did NOT DELETE because user is not sure. file: {repr(self)}.")
51
239
  return self
52
240
  if not self.exists():
53
241
  self.unlink(missing_ok=True)
54
- if verbose: print(f"❌ Could NOT DELETE nonexisting file {repr(self)}. ")
242
+ if verbose:
243
+ print(f"❌ Could NOT DELETE nonexisting file {repr(self)}. ")
55
244
  return self # broken symlinks exhibit funny existence behaviour, catch them here.
56
- if self.is_file() or self.is_symlink(): self.unlink(missing_ok=True)
245
+ if self.is_file() or self.is_symlink():
246
+ self.unlink(missing_ok=True)
57
247
  else:
58
248
  import shutil
249
+
59
250
  shutil.rmtree(self, ignore_errors=False)
60
- if verbose: print(f"🗑️ ❌ DELETED {repr(self)}.")
251
+ if verbose:
252
+ print(f"🗑️ ❌ DELETED {repr(self)}.")
61
253
  return self
62
- def move(self, folder: OPLike = None, name: Optional[str]= None, path: OPLike = None, rel2it: bool = False, overwrite: bool = False, verbose: bool = True, parents: bool = True, content: bool = False) -> 'P':
254
+
255
+ def move(self, folder: OPLike = None, name: Optional[str] = None, path: OPLike = None, rel2it: bool = False, overwrite: bool = False, verbose: bool = True, parents: bool = True, content: bool = False) -> "PathExtended":
63
256
  path = self._resolve_path(folder=folder, name=name, path=path, default_name=self.absolute().name, rel2it=rel2it)
64
- if parents: path.parent.mkdir(parents=True, exist_ok=True)
257
+ if parents:
258
+ path.parent.mkdir(parents=True, exist_ok=True)
65
259
  slf = self.expanduser().resolve()
66
260
  if content:
67
261
  assert self.is_dir(), NotADirectoryError(f"💥 When `content` flag is set to True, path must be a directory. It is not: `{repr(self)}`")
68
- self.search("*").apply(lambda x: x.move(folder=path.parent, content=False, overwrite=overwrite))
262
+ [x.move(folder=path.parent, content=False, overwrite=overwrite) for x in self.search("*")]
69
263
  return path # contents live within this directory.
70
264
  if overwrite:
71
265
  tmp_path = slf.rename(path.parent.absolute() / randstr())
@@ -74,99 +268,80 @@ class P(type(Path()), Path): # type: ignore # pylint: disable=E0241
74
268
  else:
75
269
  try:
76
270
  slf.rename(path) # self._return(res=path, inplace=True, operation='rename', orig=False, verbose=verbose, strict=True, msg='')
77
- except OSError as oe: # OSError: [Errno 18] Invalid cross-device link:
271
+ except OSError as oe: # OSError: [Errno 18] Invalid cross-device link:
78
272
  # https://stackoverflow.com/questions/42392600/oserror-errno-18-invalid-cross-device-link
79
273
  import shutil
274
+
80
275
  shutil.move(str(slf), str(path))
81
276
  _ = oe
82
- if verbose: print(f"🚚 MOVED {repr(self)} ==> {repr(path)}`")
277
+ if verbose:
278
+ print(f"🚚 MOVED {repr(self)} ==> {repr(path)}`")
83
279
  return path
84
- def copy(self, folder: OPLike = None, name: Optional[str]= None, path: OPLike = None, content: bool = False, verbose: bool = True, append: Optional[str] = None, overwrite: bool = False, orig: bool = False) -> 'P': # tested %100 # TODO: replace `content` flag with ability to interpret "*" in resolve method.
280
+
281
+ def copy(
282
+ self, folder: OPLike = None, name: Optional[str] = None, path: OPLike = None, content: bool = False, verbose: bool = True, append: Optional[str] = None, overwrite: bool = False, orig: bool = False
283
+ ) -> "PathExtended": # tested %100 # TODO: replace `content` flag with ability to interpret "*" in resolve method.
85
284
  dest = self._resolve_path(folder=folder, name=name, path=path, default_name=self.name, rel2it=False)
86
285
  dest = dest.expanduser().resolve()
87
286
  dest.parent.mkdir(parents=True, exist_ok=True)
88
287
  slf = self.expanduser().resolve()
89
288
  if dest == slf:
90
289
  dest = self.append(append if append is not None else f"_copy_{randstr()}")
91
- if not content and overwrite and dest.exists(): dest.delete(sure=True)
92
- if not content and not overwrite and dest.exists(): raise FileExistsError(f"💥 Destination already exists: {repr(dest)}")
290
+ if not content and overwrite and dest.exists():
291
+ dest.delete(sure=True)
292
+ if not content and not overwrite and dest.exists():
293
+ raise FileExistsError(f"💥 Destination already exists: {repr(dest)}")
93
294
  if slf.is_file():
94
295
  import shutil
296
+
95
297
  shutil.copy(str(slf), str(dest))
96
- if verbose: print(f"🖨️ COPIED {repr(slf)} ==> {repr(dest)}")
298
+ if verbose:
299
+ print(f"🖨️ COPIED {repr(slf)} ==> {repr(dest)}")
97
300
  elif slf.is_dir():
98
301
  dest = dest.parent if content else dest
99
302
  # from distutils.dir_util import copy_tree
100
303
  from shutil import copytree
304
+
101
305
  copytree(str(slf), str(dest))
102
- if verbose: print(f"🖨️ COPIED {'Content of ' if content else ''} {repr(slf)} ==> {repr(dest)}")
103
- else: print(f"💥 Could NOT COPY. Not a file nor a path: {repr(slf)}.")
306
+ if verbose:
307
+ print(f"🖨️ COPIED {'Content of ' if content else ''} {repr(slf)} ==> {repr(dest)}")
308
+ else:
309
+ print(f"💥 Could NOT COPY. Not a file nor a path: {repr(slf)}.")
104
310
  return dest if not orig else self
311
+
105
312
  # ======================================= File Editing / Reading ===================================
106
- def readit(self, reader: Optional[Callable[[PLike], Any]] = None, strict: bool = True, default: Optional[Any] = None, verbose: bool = False, **kwargs: Any) -> 'Any':
107
- slf = self.expanduser().resolve()
108
- if not slf.exists():
109
- if strict: raise FileNotFoundError(f"`{slf}` is no where to be found!")
110
- else:
111
- if verbose: print(f"💥 P.readit warning: FileNotFoundError, skipping reading of file `{self}")
112
- return default
113
- if verbose: print(f"Reading {slf} ({slf.size()} MB) ...")
114
- if '.tar.gz' in str(slf) or '.tgz' in str(slf) or '.gz' in str(slf) or '.tar.bz' in str(slf) or 'tbz' in str(slf) or 'tar.xz' in str(slf) or '.zip' in str(slf):
115
- filename = slf.decompress(folder=slf.tmp(folder="tmp_unzipped"), verbose=True)
116
- if filename.is_dir():
117
- tmp_content = filename.search("*")
118
- if len(tmp_content) == 1:
119
- print(f"⚠️ Found only one file in the unzipped folder: {tmp_content[0]}")
120
- filename = tmp_content.list[0]
121
- else:
122
- if strict: raise ValueError(f"❌ Expected only one file in the unzipped folder, but found {len(tmp_content)} files.")
123
- else: print(f"⚠️ Found {len(tmp_content)} files in the unzipped folder. Using the first one: {tmp_content[0]}")
124
- filename = tmp_content.list[0]
125
- else: filename = slf
126
- try:
127
- return Read.read(filename, **kwargs) if reader is None else reader(str(filename), **kwargs)
128
- except IOError as ioe: raise IOError from ioe
129
- # DEPRECATED: append_text has been removed. Use the inline equivalent instead:
130
- # p.write_text(p.read_text() + appendix)
131
- # Returning the path (p) is preserved by write_text in this class.
132
- # Example:
133
- # p = p.write_text(p.read_text() + appendix)
134
- # def append_text(self, appendix: str) -> 'P':
135
- # self.write_text(self.read_text() + appendix)
136
- # return self
137
- # DEPRECATED: Instance method modify_text is deprecated and left commented-out to prevent new usage.
138
- # Please inline using the module-level modify_text helper:
139
- # current = p.read_text() if p.exists() else ""
140
- # updated = modify_text(current, search, alt, replace_line=..., notfound_append=..., prepend=...)
141
- # p.write_text(updated)
142
- # def modify_text(self, txt_search: str, txt_alt: str, replace_line: bool = False, notfound_append: bool = False, prepend: bool = False, encoding: str = 'utf-8'):
143
- # if not self.exists():
144
- # self.parent.mkdir(parents=True, exist_ok=True)
145
- # self.write_text(txt_search)
146
- # return self.write_text(modify_text(txt_raw=self.read_text(encoding=encoding), txt_search=txt_search, txt_alt=txt_alt, replace_line=replace_line, notfound_append=notfound_append, prepend=prepend), encoding=encoding)
147
- def download(self, folder: OPLike = None, name: Optional[str]= None, allow_redirects: bool = True, timeout: Optional[int] = None, params: Any = None) -> 'P':
313
+ def download(self, folder: OPLike = None, name: Optional[str] = None, allow_redirects: bool = True, timeout: Optional[int] = None, params: Any = None) -> "PathExtended":
148
314
  import requests
315
+
149
316
  response = requests.get(self.as_url_str(), allow_redirects=allow_redirects, timeout=timeout, params=params) # Alternative: from urllib import request; request.urlopen(url).read().decode('utf-8').
150
317
  assert response.status_code == 200, f"Download failed with status code {response.status_code}\n{response.text}"
151
- if name is not None: f_name = name
318
+ if name is not None:
319
+ f_name = name
152
320
  else:
153
321
  try:
154
- f_name = response.headers['Content-Disposition'].split('filename=')[1].replace('"', '')
322
+ f_name = response.headers["Content-Disposition"].split("filename=")[1].replace('"', "")
155
323
  except (KeyError, IndexError):
156
- f_name = validate_name(str(P(response.history[-1].url).name if len(response.history) > 0 else P(response.url).name))
157
- dest_path = (P.home().joinpath("Downloads") if folder is None else P(folder)).joinpath(f_name)
324
+ f_name = validate_name(str(PathExtended(response.history[-1].url).name if len(response.history) > 0 else PathExtended(response.url).name))
325
+ dest_path = (PathExtended.home().joinpath("Downloads") if folder is None else PathExtended(folder)).joinpath(f_name)
158
326
  dest_path.parent.mkdir(parents=True, exist_ok=True)
159
- return dest_path.write_bytes(response.content)
160
- def _return(self, res: Union['P', 'Path'], operation: Literal['rename', 'delete', 'Whack'], inplace: bool = False, overwrite: bool = False, orig: bool = False, verbose: bool = False, strict: bool = True, msg: str = "", __delayed_msg__: str = "") -> 'P':
161
- res = P(res)
327
+ dest_path.write_bytes(response.content)
328
+ return dest_path
329
+
330
+ def _return(
331
+ self, res: Union["PathExtended", "Path"], operation: Literal["rename", "delete", "Whack"], inplace: bool = False, overwrite: bool = False, orig: bool = False, verbose: bool = False, strict: bool = True, msg: str = "", __delayed_msg__: str = ""
332
+ ) -> "PathExtended":
333
+ res = PathExtended(res)
162
334
  if inplace:
163
335
  assert self.exists(), f"`inplace` flag is only relevant if the path exists. It doesn't {self}"
164
336
  if operation == "rename":
165
- if overwrite and res.exists(): res.delete(sure=True, verbose=verbose)
337
+ if overwrite and res.exists():
338
+ res.delete(sure=True, verbose=verbose)
166
339
  if not overwrite and res.exists():
167
- if strict: raise FileExistsError(f"❌ RENAMING failed. File `{res}` already exists.")
340
+ if strict:
341
+ raise FileExistsError(f"❌ RENAMING failed. File `{res}` already exists.")
168
342
  else:
169
- if verbose: print(f"⚠️ SKIPPED RENAMING {repr(self)} ➡️ {repr(res)} because FileExistsError and scrict=False policy.")
343
+ if verbose:
344
+ print(f"⚠️ SKIPPED RENAMING {repr(self)} ➡️ {repr(res)} because FileExistsError and scrict=False policy.")
170
345
  return self if orig else res
171
346
  self.rename(res)
172
347
  msg = msg or f"RENAMED {repr(self)} ➡️ {repr(res)}"
@@ -174,362 +349,479 @@ class P(type(Path()), Path): # type: ignore # pylint: disable=E0241
174
349
  self.delete(sure=True, verbose=False)
175
350
  __delayed_msg__ = f"DELETED 🗑️❌ {repr(self)}."
176
351
  if verbose and msg != "":
177
- try: print(msg) # emojie print error.
178
- except UnicodeEncodeError: print("P._return warning: UnicodeEncodeError, could not print message.")
352
+ try:
353
+ print(msg) # emojie print error.
354
+ except UnicodeEncodeError:
355
+ print("P._return warning: UnicodeEncodeError, could not print message.")
179
356
  if verbose and __delayed_msg__ != "":
180
- try: print(__delayed_msg__)
181
- except UnicodeEncodeError: print("P._return warning: UnicodeEncodeError, could not print message.")
357
+ try:
358
+ print(__delayed_msg__)
359
+ except UnicodeEncodeError:
360
+ print("P._return warning: UnicodeEncodeError, could not print message.")
182
361
  return self if orig else res
183
- # # ================================ Path Object management ===========================================
184
- # """ Distinction between Path object and the underlying file on disk that the path may refer to. Two distinct flags are used:
185
- # `inplace`: the operation on the path object will affect the underlying file on disk if this flag is raised, otherwise the method will only alter the string.
186
- # `inliue`: the method acts on the path object itself instead of creating a new one if this flag is raised.
187
- # `orig`: whether the method returns the original path object or a new one."""
188
- def append(self, name: str = '', index: bool = False, suffix: Optional[str] = None, verbose: bool = True, **kwargs: Any) -> 'P':
362
+
363
+ def append(self, name: str = "", index: bool = False, suffix: Optional[str] = None, verbose: bool = True, **kwargs: Any) -> "PathExtended":
189
364
  """Returns a new path object with the name appended to the stem of the path. If `index` is True, the name will be the index of the path in the parent directory."""
190
365
  if index:
191
- appended_name = f'''{name}_{len(self.parent.search(f"*{self.name.split('.')[0]}*"))}'''
366
+ appended_name = f"""{name}_{len(self.parent.search(f"*{self.name.split('.')[0]}*"))}"""
192
367
  return self.append(name=appended_name, index=False, verbose=verbose, suffix=suffix, **kwargs)
193
- full_name = (name or ("_" + str(timestamp())))
194
- full_suffix = suffix or ''.join(('bruh' + self).suffixes)
195
- subpath = self.name.split('.')[0] + full_name + full_suffix
368
+ full_name = name or ("_" + str(timestamp()))
369
+ full_suffix = suffix or "".join(("bruh" + self).suffixes)
370
+ subpath = self.name.split(".")[0] + full_name + full_suffix
196
371
  return self._return(self.parent.joinpath(subpath), operation="rename", verbose=verbose, **kwargs)
372
+
197
373
  def with_name(self, name: str, verbose: bool = True, inplace: bool = False, overwrite: bool = False, **kwargs: Any):
198
374
  return self._return(self.parent / name, verbose=verbose, operation="rename", inplace=inplace, overwrite=overwrite, **kwargs)
199
- # ============================= attributes of object ======================================
200
- # @property
201
- # def items(self) -> List[str]: return List(self.parts)
202
- # def __len__(self) -> int: return len(self.parts)
203
- # def __contains__(self, item: PLike): return P(item).as_posix() in self.as_posix()
204
- # def __iter__(self): return self.parts.__iter__()
205
- def __deepcopy__(self, *args: Any, **kwargs: Any) -> 'P':
375
+
376
+ def __deepcopy__(self, *args: Any, **kwargs: Any) -> "PathExtended":
206
377
  _ = args, kwargs
207
- return P(str(self))
208
- def __getstate__(self) -> str: return str(self)
209
- def __add__(self, other: PLike) -> 'P':
378
+ return PathExtended(str(self))
379
+
380
+ def __getstate__(self) -> str:
381
+ return str(self)
382
+
383
+ def __add__(self, other: PLike) -> "PathExtended":
210
384
  return self.parent.joinpath(self.name + str(other)) # used append and prepend if the addition wanted to be before suffix.
211
- def __radd__(self, other: PLike) -> 'P':
385
+
386
+ def __radd__(self, other: PLike) -> "PathExtended":
212
387
  return self.parent.joinpath(str(other) + self.name) # other + P and `other` doesn't know how to make this addition.
213
- def __sub__(self, other: PLike) -> 'P':
214
- res = P(str(self).replace(str(other), ""))
388
+
389
+ def __sub__(self, other: PLike) -> "PathExtended":
390
+ res = PathExtended(str(self).replace(str(other), ""))
215
391
  return (res[1:] if str(res[0]) in {"\\", "/"} else res) if len(res.parts) else res # paths starting with "/" are problematic. e.g ~ / "/path" doesn't work.
216
-
217
- def rel2home(self, ) -> 'P': return self._return(P(self.expanduser().absolute().relative_to(Path.home())), operation='Whack') # very similat to collapseuser but without "~" being added so its consistent with rel2cwd.
218
- def collapseuser(self, strict: bool = True, placeholder: str = "~") -> 'P': # opposite of `expanduser` resolve is crucial to fix Windows cases insensitivty problem.
219
- if strict: assert P.home() in self.expanduser().absolute().resolve(), ValueError(f"`{P.home()}` is not in the subpath of `{self}`")
220
- if (str(self).startswith(placeholder) or P.home().as_posix() not in self.resolve().as_posix()): return self
221
- return self._return(res=P(placeholder) / (self.expanduser().absolute().resolve(strict=strict) - P.home()), operation='Whack') # resolve also solves the problem of Windows case insensitivty.
392
+
393
+ def rel2home(self) -> "PathExtended":
394
+ return self._return(PathExtended(self.expanduser().absolute().relative_to(Path.home())), operation="Whack") # very similat to collapseuser but without "~" being added so its consistent with rel2cwd.
395
+
396
+ def collapseuser(self, strict: bool = True, placeholder: str = "~") -> "PathExtended": # opposite of `expanduser` resolve is crucial to fix Windows cases insensitivty problem.
397
+ if strict:
398
+ assert str(self.expanduser().absolute().resolve()).startswith(str(PathExtended.home())), ValueError(f"`{PathExtended.home()}` is not in the subpath of `{self}`")
399
+ if str(self).startswith(placeholder) or PathExtended.home().as_posix() not in self.resolve().as_posix():
400
+ return self
401
+ return self._return(res=PathExtended(placeholder) / (self.expanduser().absolute().resolve(strict=strict) - PathExtended.home()), operation="Whack") # resolve also solves the problem of Windows case insensitivty.
402
+
222
403
  def __getitem__(self, slici: Union[int, list[int], slice]):
223
- if isinstance(slici, list): return P(*[self[item] for item in slici])
224
- elif isinstance(slici, int): return P(self.parts[slici])
225
- return P(*self.parts[slici]) # must be a slice
404
+ if isinstance(slici, list):
405
+ return PathExtended(*[self[item] for item in slici])
406
+ elif isinstance(slici, int):
407
+ return PathExtended(self.parts[slici])
408
+ return PathExtended(*self.parts[slici]) # must be a slice
409
+
226
410
  def split(self, at: Optional[str] = None, index: Optional[int] = None, sep: Literal[-1, 0, 1] = 1, strict: bool = True):
227
411
  if index is None and at is not None: # at is provided # ==================================== Splitting
228
412
  if not strict: # behaves like split method of string
229
413
  one, two = (items := str(self).split(sep=str(at)))[0], items[1]
230
- one, two = P(one[:-1]) if one.endswith("/") else P(one), P(two[1:]) if two.startswith("/") else P(two)
414
+ one, two = PathExtended(one[:-1]) if one.endswith("/") else PathExtended(one), PathExtended(two[1:]) if two.startswith("/") else PathExtended(two)
231
415
  else: # "strict": # raises an error if exact match is not found.
232
416
  index = self.parts.index(str(at))
233
- one, two = self[0:index], self[index + 1:] # both one and two do not include the split item.
417
+ one, two = self[0:index], self[index + 1 :] # both one and two do not include the split item.
234
418
  elif index is not None and at is None: # index is provided
235
- one, two = self[:index], P(*self.parts[index + 1:])
419
+ one, two = self[:index], PathExtended(*self.parts[index + 1 :])
236
420
  at = self.parts[index] # this is needed below.
237
- else: raise ValueError("Either `index` or `at` can be provided. Both are not allowed simulatanesouly.")
238
- if sep == 0: return one, two # neither of the portions get the sperator appended to it. # ================================ appending `at` to one of the portions
239
- elif sep == 1: return one, P(at) / two # append it to right portion
421
+ else:
422
+ raise ValueError("Either `index` or `at` can be provided. Both are not allowed simulatanesouly.")
423
+ if sep == 0:
424
+ return one, two # neither of the portions get the sperator appended to it. # ================================ appending `at` to one of the portions
425
+ elif sep == 1:
426
+ return one, PathExtended(at) / two # append it to right portion
240
427
  elif sep == -1:
241
428
  return one / at, two # append it to left portion.
242
- else: raise ValueError(f"`sep` should take a value from the set [-1, 0, 1] but got {sep}")
429
+ else:
430
+ raise ValueError(f"`sep` should take a value from the set [-1, 0, 1] but got {sep}")
431
+
243
432
  def __repr__(self): # this is useful only for the console
244
433
  if self.is_symlink():
245
- try: target = self.resolve() # broken symolinks are funny, and almost always fail `resolve` method.
246
- except Exception: target = "BROKEN LINK " + str(self) # avoid infinite recursions for broken links.
434
+ try:
435
+ target = self.resolve() # broken symolinks are funny, and almost always fail `resolve` method.
436
+ except Exception:
437
+ target = "BROKEN LINK " + str(self) # avoid infinite recursions for broken links.
247
438
  return "🔗 Symlink '" + str(self) + "' ==> " + (str(target) if target == self else str(target))
248
- elif self.is_absolute(): return self._type() + " '" + str(self.clickable()) + "'" + (" | " + self.time(which="c").isoformat()[:-7].replace("T", " ") if self.exists() else "") + (f" | {self.size()} Mb" if self.is_file() else "")
249
- elif "http" in str(self): return "🕸️ URL " + str(self.as_url_str())
250
- else: return "📍 Relative " + "'" + str(self) + "'" # not much can be said about a relative path.
439
+ elif self.is_absolute():
440
+ return self._type() + " '" + str(self.clickable()) + "'" + (" | " + datetime.fromtimestamp(self.stat().st_ctime).isoformat()[:-7].replace("T", " ") if self.exists() else "") + (f" | {self.size()} Mb" if self.is_file() else "")
441
+ elif "http" in str(self):
442
+ return "🕸️ URL " + str(self.as_url_str())
443
+ else:
444
+ return "📍 Relative " + "'" + str(self) + "'" # not much can be said about a relative path.
445
+
251
446
  # def to_str(self) -> str: return str(self)
252
- def size(self, units: Literal['b', 'kb', 'mb', 'gb'] = 'mb') -> float: # ===================================== File Specs ==========================================================================================
447
+ def size(self, units: Literal["b", "kb", "mb", "gb"] = "mb") -> float: # ===================================== File Specs ==========================================================================================
253
448
  total_size = self.stat().st_size if self.is_file() else sum([item.stat().st_size for item in self.rglob("*") if item.is_file()])
254
449
  tmp: int
255
450
  match units:
256
- case "b": tmp = 1024 ** 0
257
- case "kb": tmp = 1024 ** 1
258
- case "mb": tmp = 1024 ** 2
259
- case "gb": tmp = 1024 ** 3
451
+ case "b":
452
+ tmp = 1024**0
453
+ case "kb":
454
+ tmp = 1024**1
455
+ case "mb":
456
+ tmp = 1024**2
457
+ case "gb":
458
+ tmp = 1024**3
260
459
  return round(number=total_size / tmp, ndigits=1)
261
- def time(self, which: Literal["m", "c", "a"] = "m", **kwargs: Any):
262
- """* `m`: last mofidication of content, i.e. the time it was created.
263
- * `c`: last status change (its inode is changed, permissions, path, but not content)
264
- * `a`: last access (read)
265
- """
266
- match which:
267
- case "m": tmp = self.stat().st_mtime
268
- case "a": tmp = self.stat().st_atime
269
- case "c": tmp = self.stat().st_ctime
270
- return datetime.fromtimestamp(tmp, **kwargs)
271
-
460
+
461
+ # def time(self, which: Literal["m", "c", "a"] = "m", **kwargs: Any):
462
+ # """* `m`: last mofidication of content, i.e. the time it was created.
463
+ # * `c`: last status change (its inode is changed, permissions, path, but not content)
464
+ # * `a`: last access (read)
465
+ # """
466
+ # warnings.warn(
467
+ # "The 'time' method is deprecated. Use 'datetime.fromtimestamp(self.stat().st_mtime)' for 'm', "
468
+ # "'datetime.fromtimestamp(self.stat().st_ctime)' for 'c', or "
469
+ # "'datetime.fromtimestamp(self.stat().st_atime)' for 'a' instead.",
470
+ # DeprecationWarning,
471
+ # stacklevel=2
472
+ # )
473
+ # match which:
474
+ # case "m": tmp = self.stat().st_mtime
475
+ # case "a": tmp = self.stat().st_atime
476
+ # case "c": tmp = self.stat().st_ctime
477
+ # return datetime.fromtimestamp(tmp, **kwargs)
478
+
272
479
  # ================================ String Nature management ====================================
273
- def clickable(self, ) -> 'P': return self._return(res=P(self.expanduser().resolve().as_uri()), operation='Whack')
274
- def as_url_str(self) -> 'str': return self.as_posix().replace("https:/", "https://").replace("http:/", "http://")
480
+ def clickable(self) -> "PathExtended":
481
+ return self._return(res=PathExtended(self.expanduser().resolve().as_uri()), operation="Whack")
482
+
483
+ def as_url_str(self) -> "str":
484
+ return self.as_posix().replace("https:/", "https://").replace("http:/", "http://")
485
+
275
486
  def as_zip_path(self):
276
487
  import zipfile
488
+
277
489
  res = self.expanduser().resolve()
278
490
  return zipfile.Path(res) # .str.split(".zip") tmp=res[1]+(".zip" if len(res) > 2 else ""); root=res[0]+".zip", at=P(tmp).as_posix()) # TODO
491
+
279
492
  # ========================== override =======================================
280
- def __setitem__(self, key: Union['str', int, slice], value: PLike):
281
- fullparts, new = list(self.parts), list(P(value).parts)
493
+ def __setitem__(self, key: Union["str", int, slice], value: PLike):
494
+ fullparts, new = list(self.parts), list(PathExtended(value).parts)
282
495
  if type(key) is str:
283
496
  idx = fullparts.index(key)
284
497
  fullparts.remove(key)
285
- fullparts = fullparts[:idx] + new + fullparts[idx + 1:]
286
- elif type(key) is int: fullparts = fullparts[:key] + new + fullparts[key + 1:]
287
- elif type(key) is slice: fullparts = fullparts[:(0 if key.start is None else key.start)] + new + fullparts[(len(fullparts) if key.stop is None else key.stop):]
288
- self._str = str(P(*fullparts)) # pylint: disable=W0201 # similar attributes: # self._parts # self._pparts # self._cparts # self._cached_cparts
498
+ fullparts = fullparts[:idx] + new + fullparts[idx + 1 :]
499
+ elif type(key) is int:
500
+ fullparts = fullparts[:key] + new + fullparts[key + 1 :]
501
+ elif type(key) is slice:
502
+ fullparts = fullparts[: (0 if key.start is None else key.start)] + new + fullparts[(len(fullparts) if key.stop is None else key.stop) :]
503
+ self._str = str(PathExtended(*fullparts)) # pylint: disable=W0201 # similar attributes: # self._parts # self._pparts # self._cparts # self._cached_cparts
289
504
 
290
505
  def _type(self):
291
506
  if self.absolute():
292
- if self.is_file(): return "📄"
293
- elif self.is_dir(): return "📁"
507
+ if self.is_file():
508
+ return "📄"
509
+ elif self.is_dir():
510
+ return "📁"
294
511
  return "👻NotExist"
295
512
  return "📍Relative"
296
- def write_text(self, data: str, encoding: str = 'utf-8', newline: Optional[str] = None) -> 'P':
297
- self.parent.mkdir(parents=True, exist_ok=True)
298
- super(P, self).write_text(data, encoding=encoding, newline=newline)
299
- return self
300
- def read_text(self, encoding: Optional[str] = 'utf-8') -> str: return super(P, self).read_text(encoding=encoding)
301
- def write_bytes(self, data: bytes, overwrite: bool = False) -> 'P':
302
- slf = self.expanduser().absolute()
303
- if overwrite and slf.exists(): slf.delete(sure=True)
304
- res = super(P, slf).write_bytes(data)
305
- if res == 0: raise RuntimeError("Could not save file on disk.")
306
- return self
307
- # def touch(self, mode: int = 0o666, parents: bool = True, exist_ok: bool = True) -> 'P': # pylint: disable=W0237
308
- # """Deprecated: rely on pathlib.Path.touch at call sites.
309
- # Behavior was:
310
- # - if parents: ensure parent directories exist
311
- # - then call Path.touch(mode=mode, exist_ok=exist_ok)
312
- # - return self
313
- # Replace usages with:
314
- # p.parent.mkdir(parents=True, exist_ok=True); p.touch(mode=..., exist_ok=...)
315
- # """
316
- # if parents: self.parent.mkdir(parents=parents, exist_ok=True)
317
- # super(P, self).touch(mode=mode, exist_ok=exist_ok)
318
- # return self
319
513
 
320
- def symlink_to(self, target: PLike, verbose: bool = True, overwrite: bool = False, orig: bool = False, strict: bool = True): # pylint: disable=W0237
514
+ def symlink_to(self, target: PLike, verbose: bool = True, overwrite: bool = False, orig: bool = False, strict: bool = True): # type: ignore[override] # pylint: disable=W0237
321
515
  self.parent.mkdir(parents=True, exist_ok=True)
322
- target_obj = P(target).expanduser().resolve()
323
- if strict: assert target_obj.exists(), f"Target path `{target}` (aka `{target_obj}`) doesn't exist. This will create a broken link."
324
- if overwrite and (self.is_symlink() or self.exists()): self.delete(sure=True, verbose=verbose)
325
- from platform import system
326
- from crocodile.meta import Terminal
516
+ target_obj = PathExtended(target).expanduser().resolve()
517
+ if strict:
518
+ assert target_obj.exists(), f"Target path `{target}` (aka `{target_obj}`) doesn't exist. This will create a broken link."
519
+ if overwrite and (self.is_symlink() or self.exists()):
520
+ self.delete(sure=True, verbose=verbose)
521
+ from machineconfig.utils.terminal import Terminal
522
+
327
523
  if system() == "Windows" and not Terminal.is_user_admin(): # you cannot create symlink without priviliages.
328
- Terminal.run_as_admin(file=sys.executable, params=f" -c \"from pathlib import Path; Path(r'{self.expanduser()}').symlink_to(r'{str(target_obj)}')\"", wait=True)
329
- else: super(P, self.expanduser()).symlink_to(str(target_obj))
330
- return self._return(target_obj, operation='Whack', inplace=False, orig=orig, verbose=verbose, msg=f"LINKED {repr(self)} ➡️ {repr(target_obj)}")
524
+ import win32com.shell.shell
525
+
526
+ _proce_info = win32com.shell.shell.ShellExecuteEx(lpVerb="runas", lpFile=sys.executable, lpParameters=f" -c \"from pathlib import Path; Path(r'{self.expanduser()}').symlink_to(r'{str(target_obj)}')\"")
527
+ # TODO update PATH for this to take effect immediately.
528
+ time.sleep(1) # wait=True equivalent
529
+ else:
530
+ super(PathExtended, self.expanduser()).symlink_to(str(target_obj))
531
+ return self._return(target_obj, operation="Whack", inplace=False, orig=orig, verbose=verbose, msg=f"LINKED {repr(self)} ➡️ {repr(target_obj)}")
532
+
331
533
  def resolve(self, strict: bool = False):
332
- try: return super(P, self).resolve(strict=strict)
333
- except OSError: return self
534
+ try:
535
+ return super(PathExtended, self).resolve(strict=strict)
536
+ except OSError:
537
+ return self
538
+
334
539
  # ======================================== Folder management =======================================
335
- def search(self, pattern: str = '*', r: bool = False, files: bool = True, folders: bool = True, compressed: bool = False, dotfiles: bool = False, filters_total: Optional[list[Callable[[Any], bool]]] = None, not_in: Optional[list[str]] = None,
336
- exts: Optional[list[str]] = None, win_order: bool = False) -> List['P']:
540
+ def search(
541
+ self,
542
+ pattern: str = "*",
543
+ r: bool = False,
544
+ files: bool = True,
545
+ folders: bool = True,
546
+ compressed: bool = False,
547
+ dotfiles: bool = False,
548
+ filters_total: Optional[list[Callable[[Any], bool]]] = None,
549
+ not_in: Optional[list[str]] = None,
550
+ exts: Optional[list[str]] = None,
551
+ win_order: bool = False,
552
+ ) -> list["PathExtended"]:
337
553
  if isinstance(not_in, list):
338
554
  filters_notin = [lambda x: all([str(a_not_in) not in str(x) for a_not_in in not_in])] # type: ignore
339
- else: filters_notin = []
555
+ else:
556
+ filters_notin = []
340
557
  if isinstance(exts, list):
341
558
  filters_extension = [lambda x: any([ext in x.name for ext in exts])] # type: ignore
342
- else: filters_extension = []
559
+ else:
560
+ filters_extension = []
343
561
  filters_total = (filters_total or []) + filters_notin + filters_extension
344
- if not files: filters_total.append(lambda x: x.is_dir())
345
- if not folders: filters_total.append(lambda x: x.is_file())
562
+ if not files:
563
+ filters_total.append(lambda x: x.is_dir())
564
+ if not folders:
565
+ filters_total.append(lambda x: x.is_file())
346
566
  slf = self.expanduser().resolve()
347
567
  if ".zip" in str(slf) and compressed: # the root (self) is itself a zip archive (as opposed to some search results are zip archives)
348
568
  import zipfile
349
569
  import fnmatch
570
+
350
571
  root = slf.as_zip_path()
351
572
  if not r:
352
- raw = List(root.iterdir())
573
+ raw = list(root.iterdir())
353
574
  else:
354
- raw = List(zipfile.ZipFile(str(slf)).namelist()).apply(root.joinpath)
355
- res1 = raw.filter(lambda zip_path: fnmatch.fnmatch(zip_path.at, pattern)) # type: ignore
356
- return res1.filter(lambda x: (folders or x.is_file()) and (files or x.is_dir())) # type: ignore
357
- elif dotfiles: raw = slf.glob(pattern) if not r else self.rglob(pattern)
575
+ raw = [root.joinpath(item) for item in zipfile.ZipFile(str(slf)).namelist()]
576
+ # res1 = raw.filter(lambda zip_path: fnmatch.fnmatch(zip_path.at, pattern)) # type: ignore
577
+ res1 = [item for item in raw if fnmatch.fnmatch(item.at, pattern)]
578
+ # return res1.filter(lambda x: (folders or x.is_file()) and (files or x.is_dir()))
579
+ return [item for item in res1 if (folders or item.is_file()) and (files or item.is_dir())] # type: ignore
580
+ elif dotfiles:
581
+ raw = slf.glob(pattern) if not r else self.rglob(pattern)
358
582
  else:
359
583
  from glob import glob
584
+
360
585
  if r:
361
586
  raw = glob(str(slf / "**" / pattern), recursive=r)
362
587
  else:
363
588
  raw = glob(str(slf.joinpath(pattern))) # glob ignroes dot and hidden files
364
589
  if ".zip" not in str(slf) and compressed:
365
- filters_notin = [P(comp_file).search(pattern=pattern, r=r, files=files, folders=folders, compressed=True, dotfiles=dotfiles, filters_total=filters_total, not_in=not_in, win_order=win_order) for comp_file in self.search("*.zip", r=r)]
366
- haha = List(filters_notin).reduce(func=lambda x, y: x + y)
590
+ filters_notin = [PathExtended(comp_file).search(pattern=pattern, r=r, files=files, folders=folders, compressed=True, dotfiles=dotfiles, filters_total=filters_total, not_in=not_in, win_order=win_order) for comp_file in self.search("*.zip", r=r)]
591
+ from functools import reduce
592
+
593
+ # haha = List(filters_notin).reduce(func=lambda x, y: x + y)
594
+ haha = reduce(lambda x, y: x + y, filters_notin) if len(filters_notin) else []
367
595
  raw = raw + haha # type: ignore
368
596
  processed = []
369
597
  for item in raw:
370
- item_ = P(item)
598
+ item_ = PathExtended(item)
371
599
  if all([afilter(item_) for afilter in filters_total]):
372
600
  processed.append(item_)
373
- if not win_order: return List(processed)
601
+ if not win_order:
602
+ return list(processed)
374
603
  import re
375
- processed.sort(key=lambda x: [int(k) if k.isdigit() else k for k in re.split('([0-9]+)', string=x.stem)])
376
- return List(processed)
377
-
378
- # def create(self, parents: bool = True, exist_ok: bool = True, parents_only: bool = False) -> 'P':
379
- # """Deprecated. Use Path.mkdir directly at the call site:
380
- # - When creating a directory: self.mkdir(parents=True, exist_ok=True)
381
- # - When ensuring parent exists: self.parent.mkdir(parents=True, exist_ok=True)
382
- # This method used to:
383
- # target_path = self.parent if parents_only else self
384
- # target_path.mkdir(parents=parents, exist_ok=exist_ok)
385
- # return self
386
- # """
387
- # target_path = self.parent if parents_only else self
388
- # target_path.mkdir(parents=parents, exist_ok=exist_ok)
389
- # return self
604
+
605
+ processed.sort(key=lambda x: [int(k) if k.isdigit() else k for k in re.split("([0-9]+)", string=x.stem)])
606
+ return list(processed)
390
607
 
391
608
  @staticmethod
392
- def tmpdir(prefix: str = "") -> 'P':
393
- return P.tmp(folder=rf"tmp_dirs/{prefix + ('_' if prefix != '' else '') + randstr()}")
609
+ def tmpdir(prefix: str = "") -> "PathExtended":
610
+ return PathExtended.tmp(folder=rf"tmp_dirs/{prefix + ('_' if prefix != '' else '') + randstr()}")
611
+
394
612
  @staticmethod
395
- def tmpfile(name: Optional[str]= None, suffix: str = "", folder: OPLike = None, tstamp: bool = False, noun: bool = False) -> 'P':
613
+ def tmpfile(name: Optional[str] = None, suffix: str = "", folder: OPLike = None, tstamp: bool = False, noun: bool = False) -> "PathExtended":
396
614
  name_concrete = name or randstr(noun=noun)
397
- return P.tmp(file=name_concrete + "_" + randstr() + (("_" + str(timestamp())) if tstamp else "") + suffix, folder=folder or "tmp_files")
615
+ return PathExtended.tmp(file=name_concrete + "_" + randstr() + (("_" + str(timestamp())) if tstamp else "") + suffix, folder=folder or "tmp_files")
616
+
398
617
  @staticmethod
399
- def tmp(folder: OPLike = None, file: Optional[str] = None, root: str = "~/tmp_results") -> 'P':
400
- base = P(root).expanduser().joinpath(folder or "").joinpath(file or "")
618
+ def tmp(folder: OPLike = None, file: Optional[str] = None, root: str = "~/tmp_results") -> "PathExtended":
619
+ base = PathExtended(root).expanduser().joinpath(folder or "").joinpath(file or "")
401
620
  target_path = base.parent if file else base
402
621
  target_path.mkdir(parents=True, exist_ok=True)
403
622
  return base
623
+
404
624
  # ====================================== Compression & Encryption ===========================================
405
- def zip(self, path: OPLike = None, folder: OPLike = None, name: Optional[str]= None, arcname: Optional[str] = None, inplace: bool = False, verbose: bool = True,
406
- content: bool = False, orig: bool = False, use_7z: bool = False, pwd: Optional[str] = None, mode: FILE_MODE = 'w', **kwargs: Any) -> 'P':
625
+ def zip(
626
+ self,
627
+ path: OPLike = None,
628
+ folder: OPLike = None,
629
+ name: Optional[str] = None,
630
+ arcname: Optional[str] = None,
631
+ inplace: bool = False,
632
+ verbose: bool = True,
633
+ content: bool = False,
634
+ orig: bool = False,
635
+ pwd: Optional[str] = None,
636
+ mode: FILE_MODE = "w",
637
+ **kwargs: Any,
638
+ ) -> "PathExtended":
407
639
  path_resolved, slf = self._resolve_path(folder, name, path, self.name).expanduser().resolve(), self.expanduser().resolve()
408
- if use_7z: # benefits over regular zip and encrypt: can handle very large files with low memory footprint
409
- path_resolved = path_resolved + '.7z' if not path_resolved.suffix == '.7z' else path_resolved
410
- with install_n_import("py7zr").SevenZipFile(file=path_resolved, mode=mode, password=pwd) as archive: archive.writeall(path=str(slf), arcname=None)
640
+ # if use_7z: # benefits over regular zip and encrypt: can handle very large files with low memory footprint
641
+ # path_resolved = path_resolved + '.7z' if not path_resolved.suffix == '.7z' else path_resolved
642
+ # with install_n_import("py7zr").SevenZipFile(file=path_resolved, mode=mode, password=pwd) as archive: archive.writeall(path=str(slf), arcname=None)
643
+ arcname_obj = PathExtended(arcname or slf.name)
644
+ if arcname_obj.name != slf.name:
645
+ arcname_obj /= slf.name # arcname has to start from somewhere and end with filename
646
+ if slf.is_file():
647
+ path_resolved = Compression.zip_file(ip_path=str(slf), op_path=str(path_resolved + ".zip" if path_resolved.suffix != ".zip" else path_resolved), arcname=str(arcname_obj), mode=mode, **kwargs)
411
648
  else:
412
- arcname_obj = P(arcname or slf.name)
413
- if arcname_obj.name != slf.name: arcname_obj /= slf.name # arcname has to start from somewhere and end with filename
414
- if slf.is_file():
415
- path_resolved = Compression.zip_file(ip_path=str(slf), op_path=str(path_resolved + ".zip" if path_resolved.suffix != ".zip" else path_resolved), arcname=str(arcname_obj), mode=mode, **kwargs)
649
+ if content:
650
+ root_dir, base_dir = slf, "."
416
651
  else:
417
- if content: root_dir, base_dir = slf, "."
418
- else: root_dir, base_dir = slf.split(at=str(arcname_obj[0]), sep=1)[0], str(arcname_obj)
419
- path_resolved = P(Compression.compress_folder(root_dir=str(root_dir), op_path=str(path_resolved), base_dir=base_dir, fmt='zip', **kwargs)) # TODO: see if this supports mode
652
+ root_dir, base_dir = slf.split(at=str(arcname_obj[0]), sep=1)[0], str(arcname_obj)
653
+ path_resolved = PathExtended(Compression.compress_folder(root_dir=str(root_dir), op_path=str(path_resolved), base_dir=base_dir, fmt="zip", **kwargs)) # TODO: see if this supports mode
420
654
  return self._return(path_resolved, inplace=inplace, operation="delete", orig=orig, verbose=verbose, msg=f"ZIPPED {repr(slf)} ==> {repr(path)}")
421
- def unzip(self, folder: OPLike = None, path: OPLike = None, name: Optional[str]= None, verbose: bool = True, content: bool = False, inplace: bool = False, overwrite: bool = False, orig: bool = False,
422
- pwd: Optional[str] = None, tmp: bool = False, pattern: Optional[str] = None, merge: bool = False) -> 'P':
655
+
656
+ def unzip(
657
+ self,
658
+ folder: OPLike = None,
659
+ path: OPLike = None,
660
+ name: Optional[str] = None,
661
+ verbose: bool = True,
662
+ content: bool = False,
663
+ inplace: bool = False,
664
+ overwrite: bool = False,
665
+ orig: bool = False,
666
+ pwd: Optional[str] = None,
667
+ tmp: bool = False,
668
+ pattern: Optional[str] = None,
669
+ merge: bool = False,
670
+ ) -> "PathExtended":
423
671
  assert merge is False, "I have not implemented this yet"
424
672
  assert path is None, "I have not implemented this yet"
425
- if tmp: return self.unzip(folder=P.tmp().joinpath("tmp_unzips").joinpath(randstr()), content=True).joinpath(self.stem)
673
+ if tmp:
674
+ return self.unzip(folder=PathExtended.tmp().joinpath("tmp_unzips").joinpath(randstr()), content=True).joinpath(self.stem)
426
675
  slf = zipfile__ = self.expanduser().resolve()
427
676
  if any(ztype in str(slf.parent) for ztype in (".zip", ".7z")): # path include a zip archive in the middle.
428
677
  tmp__ = [item for item in (".zip", ".7z", "") if item in str(slf)]
429
678
  ztype = tmp__[0]
430
- if ztype == "": return slf
431
- zipfile__, name__ = slf.split(at=str(List(slf.parts).filter(lambda x: ztype in x)[0]), sep=-1)
679
+ if ztype == "":
680
+ return slf
681
+ # zipfile__, name__ = slf.split(at=str(List(slf.parts).filter(lambda x: ztype in x)[0]), sep=-1)
682
+ zipfile__, name__ = slf.split(at=str(next(item for item in slf.parts if ztype in item)), sep=-1)
432
683
  name = str(name__)
433
- folder = (zipfile__.parent / zipfile__.stem) if folder is None else P(folder).expanduser().absolute().resolve().joinpath(zipfile__.stem)
684
+ folder = (zipfile__.parent / zipfile__.stem) if folder is None else PathExtended(folder).expanduser().absolute().resolve().joinpath(zipfile__.stem)
685
+ assert isinstance(folder, PathExtended), "folder should be a P object at this point"
434
686
  folder = folder if not content else folder.parent
435
687
  if slf.suffix == ".7z":
436
- if overwrite: P(folder).delete(sure=True)
437
- result = folder
438
- import py7zr
439
- with py7zr.SevenZipFile(file=slf, mode='r', password=pwd) as archive:
440
- if pattern is not None:
441
- import re
442
- pat = re.compile(pattern)
443
- archive.extract(path=folder, targets=[f for f in archive.getnames() if pat.match(f)])
444
- else: archive.extractall(path=folder)
688
+ raise NotImplementedError("I have not implemented this yet")
689
+ # if overwrite: P(folder).delete(sure=True)
690
+ # result = folder
691
+ # import py7zr
692
+ # with py7zr.SevenZipFile(file=slf, mode='r', password=pwd) as archive:
693
+ # if pattern is not None:
694
+ # import re
695
+ # pat = re.compile(pattern)
696
+ # archive.extract(path=folder, targets=[f for f in archive.getnames() if pat.match(f)])
697
+ # else: archive.extractall(path=folder)
445
698
  else:
446
699
  if overwrite:
447
- if not content: P(folder).joinpath(name or "").delete(sure=True, verbose=True) # deletes a specific file / folder that has the same name as the zip file without extension.
700
+ if not content:
701
+ PathExtended(folder).joinpath(name or "").delete(sure=True, verbose=True) # deletes a specific file / folder that has the same name as the zip file without extension.
448
702
  else:
449
703
  import zipfile
450
- List([x for x in zipfile.ZipFile(str(self)).namelist() if "/" not in x or (len(x.split('/')) == 2 and x.endswith("/"))]).apply(lambda item: P(folder).joinpath(name or "", item.replace("/", "")).delete(sure=True, verbose=True))
451
- result = Compression.unzip(str(zipfile__), str(folder), None if name is None else P(name).as_posix())
704
+
705
+ mylist = [x for x in zipfile.ZipFile(str(self)).namelist() if "/" not in x or (len(x.split("/")) == 2 and x.endswith("/"))]
706
+ # List().apply(lambda item: P(folder).joinpath(name or "", item.replace("/", "")).delete(sure=True, verbose=True))
707
+ for item in mylist:
708
+ PathExtended(folder).joinpath(name or "", item.replace("/", "")).delete(sure=True, verbose=True)
709
+ result = Compression.unzip(str(zipfile__), str(folder), None if name is None else PathExtended(name).as_posix())
452
710
  assert isinstance(result, Path)
453
- return self._return(P(result), inplace=inplace, operation="delete", orig=orig, verbose=verbose, msg=f"UNZIPPED {repr(zipfile__)} ==> {repr(result)}")
454
- def untar(self, folder: OPLike = None, name: Optional[str]= None, path: OPLike = None, inplace: bool = False, orig: bool = False, verbose: bool = True) -> 'P':
711
+ return self._return(PathExtended(result), inplace=inplace, operation="delete", orig=orig, verbose=verbose, msg=f"UNZIPPED {repr(zipfile__)} ==> {repr(result)}")
712
+
713
+ def untar(self, folder: OPLike = None, name: Optional[str] = None, path: OPLike = None, inplace: bool = False, orig: bool = False, verbose: bool = True) -> "PathExtended":
455
714
  op_path = self._resolve_path(folder, name, path, self.name.replace(".tar", "")).expanduser().resolve()
456
715
  Compression.untar(str(self.expanduser().resolve()), op_path=str(op_path))
457
716
  return self._return(op_path, inplace=inplace, operation="delete", orig=orig, verbose=verbose, msg=f"UNTARRED {repr(self)} ==> {repr(op_path)}")
458
- def ungz(self, folder: OPLike = None, name: Optional[str]= None, path: OPLike = None, inplace: bool = False, orig: bool = False, verbose: bool = True) -> 'P':
717
+
718
+ def ungz(self, folder: OPLike = None, name: Optional[str] = None, path: OPLike = None, inplace: bool = False, orig: bool = False, verbose: bool = True) -> "PathExtended":
459
719
  op_path = self._resolve_path(folder, name, path, self.name.replace(".gz", "")).expanduser().resolve()
460
720
  Compression.ungz(str(self.expanduser().resolve()), op_path=str(op_path))
461
721
  return self._return(op_path, inplace=inplace, operation="delete", orig=orig, verbose=verbose, msg=f"UNGZED {repr(self)} ==> {repr(op_path)}")
462
- def unxz(self, folder: OPLike = None, name: Optional[str]= None, path: OPLike = None, inplace: bool = False, orig: bool = False, verbose: bool = True) -> 'P':
722
+
723
+ def unxz(self, folder: OPLike = None, name: Optional[str] = None, path: OPLike = None, inplace: bool = False, orig: bool = False, verbose: bool = True) -> "PathExtended":
463
724
  op_path = self._resolve_path(folder, name, path, self.name.replace(".xz", "")).expanduser().resolve()
464
725
  Compression.unxz(str(self.expanduser().resolve()), op_path=str(op_path))
465
726
  return self._return(op_path, inplace=inplace, operation="delete", orig=orig, verbose=verbose, msg=f"UNXZED {repr(self)} ==> {repr(op_path)}")
466
- def unbz(self, folder: OPLike = None, name: Optional[str]= None, path: OPLike = None, inplace: bool = False, orig: bool = False, verbose: bool = True) -> 'P':
727
+
728
+ def unbz(self, folder: OPLike = None, name: Optional[str] = None, path: OPLike = None, inplace: bool = False, orig: bool = False, verbose: bool = True) -> "PathExtended":
467
729
  op_path = self._resolve_path(folder=folder, name=name, path=path, default_name=self.name.replace(".bz", "").replace(".tbz", ".tar")).expanduser().resolve()
468
730
  Compression.unbz(str(self.expanduser().resolve()), op_path=str(op_path))
469
731
  return self._return(op_path, inplace=inplace, operation="delete", orig=orig, verbose=verbose, msg=f"UNBZED {repr(self)} ==> {repr(op_path)}")
470
- def decompress(self, folder: OPLike = None, name: Optional[str]= None, path: OPLike = None, inplace: bool = False, orig: bool = False, verbose: bool = True) -> 'P':
732
+
733
+ def decompress(self, folder: OPLike = None, name: Optional[str] = None, path: OPLike = None, inplace: bool = False, orig: bool = False, verbose: bool = True) -> "PathExtended":
471
734
  if ".tar.gz" in str(self) or ".tgz" in str(self):
472
735
  # res = self.ungz_untar(folder=folder, path=path, name=name, inplace=inplace, verbose=verbose, orig=orig)
473
736
  return self.ungz(name=f"tmp_{randstr()}.tar", inplace=inplace).untar(folder=folder, name=name, path=path, inplace=True, orig=orig, verbose=verbose) # this works for .tgz suffix as well as .tar.gz
474
- elif ".gz" in str(self): res = self.ungz(folder=folder, path=path, name=name, inplace=inplace, verbose=verbose, orig=orig)
737
+ elif ".gz" in str(self):
738
+ res = self.ungz(folder=folder, path=path, name=name, inplace=inplace, verbose=verbose, orig=orig)
475
739
  elif ".tar.bz" in str(self) or "tbz" in str(self):
476
740
  res = self.unbz(name=f"tmp_{randstr()}.tar", inplace=inplace)
477
741
  return res.untar(folder=folder, name=name, path=path, inplace=True, orig=orig, verbose=verbose)
478
742
  elif ".tar.xz" in str(self):
479
743
  # res = self.unxz_untar(folder=folder, path=path, name=name, inplace=inplace, verbose=verbose, orig=orig)
480
744
  res = self.unxz(inplace=inplace).untar(folder=folder, name=name, path=path, inplace=True, orig=orig, verbose=verbose)
481
- elif ".zip" in str(self): res = self.unzip(folder=folder, path=path, name=name, inplace=inplace, verbose=verbose, orig=orig)
482
- else: res = self
745
+ elif ".zip" in str(self):
746
+ res = self.unzip(folder=folder, path=path, name=name, inplace=inplace, verbose=verbose, orig=orig)
747
+ else:
748
+ res = self
483
749
  return res
484
- def encrypt(self, key: Optional[bytes] = None, pwd: Optional[str] = None, folder: OPLike = None, name: Optional[str]= None, path: OPLike = None,
485
- verbose: bool = True, suffix: str = ".enc", inplace: bool = False, orig: bool = False) -> 'P':
750
+
751
+ def encrypt(self, key: Optional[bytes] = None, pwd: Optional[str] = None, folder: OPLike = None, name: Optional[str] = None, path: OPLike = None, verbose: bool = True, suffix: str = ".enc", inplace: bool = False, orig: bool = False) -> "PathExtended":
486
752
  # see: https://stackoverflow.com/questions/42568262/how-to-encrypt-text-with-a-password-in-python & https://stackoverflow.com/questions/2490334/simple-way-to-encode-a-string-according-to-a-password"""
487
753
  slf = self.expanduser().resolve()
488
754
  path = self._resolve_path(folder, name, path, slf.name + suffix)
489
755
  assert slf.is_file(), f"Cannot encrypt a directory. You might want to try `zip_n_encrypt`. {self}"
490
756
  path.write_bytes(encrypt(msg=slf.read_bytes(), key=key, pwd=pwd))
491
757
  return self._return(path, inplace=inplace, operation="delete", orig=orig, verbose=verbose, msg=f"🔒🔑 ENCRYPTED: {repr(slf)} ==> {repr(path)}.")
492
- def decrypt(self, key: Optional[bytes] = None, pwd: Optional[str] = None, path: OPLike = None, folder: OPLike = None, name: Optional[str]= None, verbose: bool = True, suffix: str = ".enc", inplace: bool = False) -> 'P':
758
+
759
+ def decrypt(self, key: Optional[bytes] = None, pwd: Optional[str] = None, path: OPLike = None, folder: OPLike = None, name: Optional[str] = None, verbose: bool = True, suffix: str = ".enc", inplace: bool = False) -> "PathExtended":
493
760
  slf = self.expanduser().resolve()
494
761
  path = self._resolve_path(folder=folder, name=name, path=path, default_name=slf.name.replace(suffix, "") if suffix in slf.name else "decrypted_" + slf.name)
495
- path.write_bytes(data=decrypt(token=slf.read_bytes(), key=key, pwd=pwd))
762
+ path.write_bytes(decrypt(token=slf.read_bytes(), key=key, pwd=pwd))
496
763
  return self._return(path, operation="delete", verbose=verbose, msg=f"🔓🔑 DECRYPTED: {repr(slf)} ==> {repr(path)}.", inplace=inplace)
497
- def zip_n_encrypt(self, key: Optional[bytes] = None, pwd: Optional[str] = None, inplace: bool = False, verbose: bool = True, orig: bool = False, content: bool = False) -> 'P':
764
+
765
+ def zip_n_encrypt(self, key: Optional[bytes] = None, pwd: Optional[str] = None, inplace: bool = False, verbose: bool = True, orig: bool = False, content: bool = False) -> "PathExtended":
498
766
  return self.zip(inplace=inplace, verbose=verbose, content=content).encrypt(key=key, pwd=pwd, verbose=verbose, inplace=True) if not orig else self
499
- def decrypt_n_unzip(self, key: Optional[bytes] = None, pwd: Optional[str] = None, inplace: bool = False, verbose: bool = True, orig: bool = False) -> 'P': return self.decrypt(key=key, pwd=pwd, verbose=verbose, inplace=inplace).unzip(folder=None, inplace=True, content=False) if not orig else self
500
- def _resolve_path(self, folder: OPLike, name: Optional[str], path: OPLike, default_name: str, rel2it: bool = False) -> 'P':
767
+
768
+ def decrypt_n_unzip(self, key: Optional[bytes] = None, pwd: Optional[str] = None, inplace: bool = False, verbose: bool = True, orig: bool = False) -> "PathExtended":
769
+ return self.decrypt(key=key, pwd=pwd, verbose=verbose, inplace=inplace).unzip(folder=None, inplace=True, content=False) if not orig else self
770
+
771
+ def _resolve_path(self, folder: OPLike, name: Optional[str], path: OPLike, default_name: str, rel2it: bool = False) -> "PathExtended":
501
772
  """:param rel2it: `folder` or `path` are relative to `self` as opposed to cwd. This is used when resolving '../dir'"""
502
773
  if path is not None:
503
- path = P(self.joinpath(path).resolve() if rel2it else path).expanduser().resolve()
774
+ path = PathExtended(self.joinpath(path).resolve() if rel2it else path).expanduser().resolve()
504
775
  assert folder is None and name is None, "If `path` is passed, `folder` and `name` cannot be passed."
776
+ assert isinstance(path, PathExtended), "path should be a P object at this point"
505
777
  assert not path.is_dir(), f"`path` passed is a directory! it must not be that. If this is meant, pass it with `folder` kwarg. `{path}`"
506
778
  return path
507
779
  name, folder = (default_name if name is None else str(name)), (self.parent if folder is None else folder) # good for edge cases of path with single part. # means same directory, just different name
508
- return P(self.joinpath(folder).resolve() if rel2it else folder).expanduser().resolve() / name
780
+ return PathExtended(self.joinpath(folder).resolve() if rel2it else folder).expanduser().resolve() / name
509
781
 
510
- def get_remote_path(self, root: Optional[str], os_specific: bool = False, rel2home: bool = True, strict: bool = True, obfuscate: bool = False) -> 'P':
782
+ def get_remote_path(self, root: Optional[str], os_specific: bool = False, rel2home: bool = True, strict: bool = True) -> "PathExtended":
511
783
  import platform
512
- tmp1: str = (platform.system().lower() if os_specific else 'generic_os')
513
- if not rel2home: path = self
784
+
785
+ tmp1: str = platform.system().lower() if os_specific else "generic_os"
786
+ if not rel2home:
787
+ path = self
514
788
  else:
515
- try: path = self.rel2home()
789
+ try:
790
+ path = self.rel2home()
516
791
  except ValueError as ve:
517
- if strict: raise ve
792
+ if strict:
793
+ raise ve
518
794
  path = self
519
- if obfuscate:
520
- from crocodile.msc.obfuscater import obfuscate as obfuscate_func
521
- name = obfuscate_func(seed=P.home().joinpath('dotfiles/creds/data/obfuscation_seed').read_text().rstrip(), data=path.name)
522
- path = path.with_name(name=name)
795
+ # if obfuscate:
796
+ # msc.obfuscater import obfuscate as obfuscate_func
797
+ # name = obfuscate_func(seed=P.home().joinpath('dotfiles/creds/data/obfuscation_seed').read_text(encoding="utf-8").rstrip(), data=path.name)
798
+ # path = path.with_name(name=name)
523
799
  if isinstance(root, str): # the following is to avoid the confusing behaviour of A.joinpath(B) if B is absolute.
524
800
  part1 = path.parts[0]
525
- if part1 == "/": sanitized_path = path[1:].as_posix()
526
- else: sanitized_path = path.as_posix()
527
- return P(root + "/" + tmp1 + "/" + sanitized_path)
801
+ if part1 == "/":
802
+ sanitized_path = path[1:].as_posix()
803
+ else:
804
+ sanitized_path = path.as_posix()
805
+ return PathExtended(root + "/" + tmp1 + "/" + sanitized_path)
528
806
  return tmp1 / path
529
- def to_cloud(self, cloud: str, remotepath: OPLike = None, zip: bool = False,encrypt: bool = False, # pylint: disable=W0621, W0622
530
- key: Optional[bytes] = None, pwd: Optional[str] = None, rel2home: bool = False, strict: bool = True,
531
- obfuscate: bool = False,
532
- share: bool = False, verbose: bool = True, os_specific: bool = False, transfers: int = 10, root: Optional[str] = "myhome") -> 'P':
807
+
808
+ def to_cloud(
809
+ self,
810
+ cloud: str,
811
+ remotepath: OPLike = None,
812
+ zip: bool = False,
813
+ encrypt: bool = False, # pylint: disable=W0621, W0622
814
+ key: Optional[bytes] = None,
815
+ pwd: Optional[str] = None,
816
+ rel2home: bool = False,
817
+ strict: bool = True,
818
+ # obfuscate: bool = False,
819
+ share: bool = False,
820
+ verbose: bool = True,
821
+ os_specific: bool = False,
822
+ transfers: int = 10,
823
+ root: Optional[str] = "myhome",
824
+ ) -> "PathExtended":
533
825
  to_del = []
534
826
  localpath = self.expanduser().absolute() if not self.exists() else self
535
827
  if zip:
@@ -539,18 +831,23 @@ class P(type(Path()), Path): # type: ignore # pylint: disable=E0241
539
831
  localpath = localpath.encrypt(key=key, pwd=pwd, inplace=False)
540
832
  to_del.append(localpath)
541
833
  if remotepath is None:
542
- rp = localpath.get_remote_path(root=root, os_specific=os_specific, rel2home=rel2home, strict=strict, obfuscate=obfuscate) # if rel2home else (P(root) / localpath if root is not None else localpath)
543
- else: rp = P(remotepath)
544
- rclone_cmd = f"""rclone copyto '{localpath.as_posix()}' '{cloud}:{rp.as_posix()}' {'--progress' if verbose else ''} --transfers={transfers}"""
545
- from crocodile.meta import Terminal
546
- if verbose: print(f"{'⬆️'*5} UPLOADING with `{rclone_cmd}`")
834
+ rp = localpath.get_remote_path(root=root, os_specific=os_specific, rel2home=rel2home, strict=strict) # if rel2home else (P(root) / localpath if root is not None else localpath)
835
+ else:
836
+ rp = PathExtended(remotepath)
837
+ rclone_cmd = f"""rclone copyto '{localpath.as_posix()}' '{cloud}:{rp.as_posix()}' {"--progress" if verbose else ""} --transfers={transfers}"""
838
+ from machineconfig.utils.terminal import Terminal
839
+
840
+ if verbose:
841
+ print(f"{'⬆️' * 5} UPLOADING with `{rclone_cmd}`")
547
842
  shell_to_use = "powershell" if sys.platform == "win32" else "bash"
548
843
  res = Terminal(stdout=None if verbose else subprocess.PIPE).run(rclone_cmd, shell=shell_to_use).capture()
549
844
  _ = [item.delete(sure=True) for item in to_del]
550
845
  assert res.is_successful(strict_err=False, strict_returcode=True), res.print(capture=False, desc="Cloud Storage Operation")
551
- if verbose: print(f"{'⬆️'*5} UPLOAD COMPLETED.")
846
+ if verbose:
847
+ print(f"{'⬆️' * 5} UPLOAD COMPLETED.")
552
848
  if share:
553
- if verbose: print("🔗 SHARING FILE")
849
+ if verbose:
850
+ print("🔗 SHARING FILE")
554
851
  shell_to_use = "powershell" if sys.platform == "win32" else "bash"
555
852
  res = Terminal().run(f"""rclone link '{cloud}:{rp.as_posix()}'""", shell=shell_to_use).capture()
556
853
  tmp = res.op2path(strict_err=False, strict_returncode=False)
@@ -559,31 +856,53 @@ class P(type(Path()), Path): # type: ignore # pylint: disable=E0241
559
856
  raise RuntimeError(f"💥 Could not get link for {self}.")
560
857
  else:
561
858
  res.print_if_unsuccessful(desc="Cloud Storage Operation", strict_err=True, strict_returncode=True)
562
- return tmp
859
+ link_p: "PathExtended" = PathExtended(str(tmp))
860
+ return link_p
563
861
  return self
564
- def from_cloud(self, cloud: str, remotepath: OPLike = None, decrypt: bool = False, unzip: bool = False, # type: ignore # pylint: disable=W0621
565
- key: Optional[bytes] = None, pwd: Optional[str] = None, rel2home: bool = False, os_specific: bool = False, strict: bool = True,
566
- transfers: int = 10, root: Optional[str] = "myhome", verbose: bool = True, overwrite: bool = True, merge: bool = False,):
862
+
863
+ def from_cloud(
864
+ self,
865
+ cloud: str,
866
+ remotepath: OPLike = None,
867
+ decrypt: bool = False,
868
+ unzip: bool = False, # type: ignore # pylint: disable=W0621
869
+ key: Optional[bytes] = None,
870
+ pwd: Optional[str] = None,
871
+ rel2home: bool = False,
872
+ os_specific: bool = False,
873
+ strict: bool = True,
874
+ transfers: int = 10,
875
+ root: Optional[str] = "myhome",
876
+ verbose: bool = True,
877
+ overwrite: bool = True,
878
+ merge: bool = False,
879
+ ):
567
880
  if remotepath is None:
568
881
  remotepath = self.get_remote_path(root=root, os_specific=os_specific, rel2home=rel2home, strict=strict)
569
882
  remotepath += ".zip" if unzip else ""
570
883
  remotepath += ".enc" if decrypt else ""
571
- else: remotepath = P(remotepath)
884
+ else:
885
+ remotepath = PathExtended(remotepath)
572
886
  localpath = self.expanduser().absolute()
573
887
  localpath += ".zip" if unzip else ""
574
888
  localpath += ".enc" if decrypt else ""
575
- rclone_cmd = f"""rclone copyto '{cloud}:{remotepath.as_posix()}' '{localpath.as_posix()}' {'--progress' if verbose else ''} --transfers={transfers}"""
576
- from crocodile.meta import Terminal
577
- if verbose: print(f"{'⬇️' * 5} DOWNLOADING with `{rclone_cmd}`")
889
+ rclone_cmd = f"""rclone copyto '{cloud}:{remotepath.as_posix()}' '{localpath.as_posix()}' {"--progress" if verbose else ""} --transfers={transfers}"""
890
+ from machineconfig.utils.terminal import Terminal
891
+
892
+ if verbose:
893
+ print(f"{'⬇️' * 5} DOWNLOADING with `{rclone_cmd}`")
578
894
  shell_to_use = "powershell" if sys.platform == "win32" else "bash"
579
895
  res = Terminal(stdout=None if verbose else subprocess.PIPE).run(rclone_cmd, shell=shell_to_use)
580
896
  success = res.is_successful(strict_err=False, strict_returcode=True)
581
897
  if not success:
582
898
  res.print(capture=False, desc="Cloud Storage Operation")
583
899
  return None
584
- if decrypt: localpath = localpath.decrypt(key=key, pwd=pwd, inplace=True)
585
- if unzip: localpath = localpath.unzip(inplace=True, verbose=True, overwrite=overwrite, content=True, merge=merge)
900
+ if decrypt:
901
+ localpath = localpath.decrypt(key=key, pwd=pwd, inplace=True)
902
+ if unzip:
903
+ localpath = localpath.unzip(inplace=True, verbose=True, overwrite=overwrite, content=True, merge=merge)
586
904
  return localpath
905
+
587
906
  def sync_to_cloud(self, cloud: str, sync_up: bool = False, sync_down: bool = False, os_specific: bool = False, rel2home: bool = True, transfers: int = 10, delete: bool = False, root: Optional[str] = "myhome", verbose: bool = True):
588
907
  tmp_path_obj = self.expanduser().absolute()
589
908
  tmp_path_obj.parent.mkdir(parents=True, exist_ok=True)
@@ -596,9 +915,11 @@ class P(type(Path()), Path): # type: ignore # pylint: disable=E0241
596
915
  print(f"SYNCING 🔄️ {source} {'>' * 15} {target}`")
597
916
  rclone_cmd = f"""rclone sync '{source}' '{target}' """
598
917
  rclone_cmd += f" --progress --transfers={transfers} --verbose"
599
- rclone_cmd += (" --delete-during" if delete else "")
600
- from crocodile.meta import Terminal
601
- if verbose : print(rclone_cmd)
918
+ rclone_cmd += " --delete-during" if delete else ""
919
+ from machineconfig.utils.terminal import Terminal
920
+
921
+ if verbose:
922
+ print(rclone_cmd)
602
923
  shell_to_use = "powershell" if sys.platform == "win32" else "bash"
603
924
  res = Terminal(stdout=None if verbose else subprocess.PIPE).run(rclone_cmd, shell=shell_to_use)
604
925
  success = res.is_successful(strict_err=False, strict_returcode=True)