machineconfig 7.57__py3-none-any.whl → 7.79__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of machineconfig might be problematic. Click here for more details.
- machineconfig/cluster/sessions_managers/utils/maker.py +21 -9
- machineconfig/jobs/installer/custom/boxes.py +2 -2
- machineconfig/jobs/installer/custom/hx.py +3 -3
- machineconfig/jobs/installer/custom_dev/cloudflare_warp_cli.py +23 -0
- machineconfig/jobs/installer/custom_dev/dubdb_adbc.py +1 -1
- machineconfig/jobs/installer/custom_dev/nerfont_windows_helper.py +1 -1
- machineconfig/jobs/installer/custom_dev/sysabc.py +36 -28
- machineconfig/jobs/installer/custom_dev/wezterm.py +0 -4
- machineconfig/jobs/installer/installer_data.json +127 -25
- machineconfig/jobs/installer/package_groups.py +20 -13
- machineconfig/profile/create_links_export.py +2 -2
- machineconfig/scripts/__init__.py +0 -4
- machineconfig/scripts/linux/wrap_mcfg +1 -1
- machineconfig/scripts/python/agents.py +22 -17
- machineconfig/scripts/python/ai/solutions/copilot/instructions/python/dev.instructions.md +3 -0
- machineconfig/scripts/python/croshell.py +22 -17
- machineconfig/scripts/python/devops.py +3 -4
- machineconfig/scripts/python/devops_navigator.py +0 -4
- machineconfig/scripts/python/env_manager/path_manager_tui.py +1 -1
- machineconfig/scripts/python/fire_jobs.py +19 -18
- machineconfig/scripts/python/ftpx.py +36 -12
- machineconfig/scripts/python/helpers/ast_search.py +74 -0
- machineconfig/scripts/python/helpers/qr_code.py +166 -0
- machineconfig/scripts/python/helpers/repo_rag.py +325 -0
- machineconfig/scripts/python/helpers/symantic_search.py +25 -0
- machineconfig/scripts/python/helpers_cloud/cloud_copy.py +28 -21
- machineconfig/scripts/python/helpers_cloud/cloud_helpers.py +1 -1
- machineconfig/scripts/python/helpers_cloud/cloud_mount.py +19 -17
- machineconfig/scripts/python/helpers_cloud/cloud_sync.py +8 -7
- machineconfig/scripts/python/helpers_croshell/crosh.py +2 -2
- machineconfig/scripts/python/helpers_croshell/start_slidev.py +6 -7
- machineconfig/scripts/python/helpers_devops/cli_config_dotfile.py +4 -5
- machineconfig/scripts/python/helpers_devops/cli_nw.py +88 -7
- machineconfig/scripts/python/helpers_devops/cli_self.py +7 -6
- machineconfig/scripts/python/helpers_devops/cli_share_file.py +9 -9
- machineconfig/scripts/python/helpers_devops/cli_share_server.py +13 -12
- machineconfig/scripts/python/helpers_devops/cli_terminal.py +7 -6
- machineconfig/scripts/python/helpers_devops/cli_utils.py +2 -73
- machineconfig/scripts/python/helpers_devops/devops_backup_retrieve.py +4 -4
- machineconfig/scripts/python/helpers_devops/devops_status.py +7 -19
- machineconfig/scripts/python/helpers_fire_command/file_wrangler.py +2 -3
- machineconfig/scripts/python/helpers_fire_command/fire_jobs_route_helper.py +23 -13
- machineconfig/scripts/python/helpers_navigator/command_tree.py +50 -18
- machineconfig/scripts/python/helpers_repos/cloud_repo_sync.py +7 -4
- machineconfig/scripts/python/helpers_repos/count_lines_frontend.py +1 -1
- machineconfig/scripts/python/helpers_repos/entrypoint.py +2 -1
- machineconfig/scripts/python/helpers_repos/record.py +2 -1
- machineconfig/scripts/python/helpers_sessions/sessions_multiprocess.py +5 -5
- machineconfig/scripts/python/helpers_utils/download.py +152 -0
- machineconfig/scripts/python/helpers_utils/path.py +81 -31
- machineconfig/scripts/python/interactive.py +2 -2
- machineconfig/scripts/python/{machineconfig.py → mcfg_entry.py} +4 -0
- machineconfig/scripts/python/msearch.py +21 -2
- machineconfig/scripts/python/nw/address.py +132 -0
- machineconfig/scripts/python/nw/devops_add_ssh_key.py +8 -5
- machineconfig/scripts/python/nw/ssh_debug_linux.py +7 -7
- machineconfig/scripts/python/nw/ssh_debug_windows.py +4 -4
- machineconfig/scripts/python/nw/wsl_windows_transfer.py +3 -2
- machineconfig/scripts/python/sessions.py +35 -20
- machineconfig/scripts/python/terminal.py +2 -2
- machineconfig/scripts/python/utils.py +12 -10
- machineconfig/scripts/windows/mounts/mount_ssh.ps1 +1 -1
- machineconfig/settings/lf/windows/lfcd.ps1 +1 -1
- machineconfig/settings/shells/nushell/config.nu +2 -2
- machineconfig/settings/shells/nushell/env.nu +45 -6
- machineconfig/settings/shells/nushell/init.nu +282 -95
- machineconfig/settings/shells/pwsh/init.ps1 +1 -0
- machineconfig/settings/shells/zsh/init.sh +0 -7
- machineconfig/setup_linux/web_shortcuts/interactive.sh +10 -10
- machineconfig/setup_windows/uv.ps1 +8 -1
- machineconfig/setup_windows/web_shortcuts/interactive.ps1 +10 -10
- machineconfig/setup_windows/web_shortcuts/quick_init.ps1 +3 -2
- machineconfig/utils/accessories.py +7 -4
- machineconfig/utils/code.py +6 -4
- machineconfig/utils/files/headers.py +2 -2
- machineconfig/utils/installer_utils/install_from_url.py +180 -0
- machineconfig/utils/installer_utils/installer_class.py +53 -47
- machineconfig/utils/installer_utils/{installer.py → installer_cli.py} +71 -65
- machineconfig/utils/{installer.py → installer_utils/installer_runner.py} +1 -25
- machineconfig/utils/links.py +2 -2
- machineconfig/utils/meta.py +30 -16
- machineconfig/utils/options.py +4 -4
- machineconfig/utils/path_extended.py +3 -3
- machineconfig/utils/path_helper.py +33 -31
- machineconfig/utils/schemas/layouts/layout_types.py +1 -1
- machineconfig/utils/ssh.py +143 -409
- machineconfig/utils/ssh_utils/abc.py +8 -0
- machineconfig/utils/ssh_utils/copy_from_here.py +110 -0
- machineconfig/utils/ssh_utils/copy_to_here.py +302 -0
- machineconfig/utils/ssh_utils/utils.py +141 -0
- machineconfig/utils/ssh_utils/wsl.py +168 -0
- machineconfig/utils/upgrade_packages.py +2 -1
- machineconfig/utils/ve.py +11 -4
- {machineconfig-7.57.dist-info → machineconfig-7.79.dist-info}/METADATA +1 -1
- {machineconfig-7.57.dist-info → machineconfig-7.79.dist-info}/RECORD +102 -92
- {machineconfig-7.57.dist-info → machineconfig-7.79.dist-info}/entry_points.txt +2 -2
- machineconfig/jobs/installer/linux_scripts/pgsql.sh +0 -41
- machineconfig/scripts/python/explore.py +0 -49
- /machineconfig/jobs/installer/linux_scripts/{warp-cli.sh → cloudflare_warp_cli.sh} +0 -0
- /machineconfig/{settings/shells/pwsh/profile.ps1 → scripts/python/helpers_fire_command/f.py} +0 -0
- /machineconfig/scripts/{Restore-ThunderbirdProfile.ps1 → windows/mounts/Restore-ThunderbirdProfile.ps1} +0 -0
- /machineconfig/utils/installer_utils/{installer_abc.py → installer_locator_utils.py} +0 -0
- {machineconfig-7.57.dist-info → machineconfig-7.79.dist-info}/WHEEL +0 -0
- {machineconfig-7.57.dist-info → machineconfig-7.79.dist-info}/top_level.txt +0 -0
machineconfig/utils/ssh.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from typing import Callable, Optional, Any,
|
|
1
|
+
from typing import Callable, Optional, Any, cast, Union
|
|
2
2
|
import os
|
|
3
3
|
from pathlib import Path
|
|
4
4
|
import platform
|
|
@@ -7,17 +7,25 @@ import rich.console
|
|
|
7
7
|
from machineconfig.utils.terminal import Response
|
|
8
8
|
from machineconfig.utils.accessories import pprint, randstr
|
|
9
9
|
from machineconfig.utils.meta import lambda_to_python_script
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
DEFAULT_PICKLE_SUBDIR = "tmp_results/tmp_scripts/ssh"
|
|
10
|
+
from machineconfig.utils.ssh_utils.abc import UV_RUN_CMD, DEFAULT_PICKLE_SUBDIR
|
|
11
|
+
|
|
13
12
|
|
|
14
13
|
class SSH:
|
|
15
14
|
@staticmethod
|
|
16
15
|
def from_config_file(host: str) -> "SSH":
|
|
17
16
|
"""Create SSH instance from SSH config file entry."""
|
|
18
17
|
return SSH(host=host, username=None, hostname=None, ssh_key_path=None, password=None, port=22, enable_compression=False)
|
|
18
|
+
|
|
19
19
|
def __init__(
|
|
20
|
-
self,
|
|
20
|
+
self,
|
|
21
|
+
host: Optional[str],
|
|
22
|
+
username: Optional[str],
|
|
23
|
+
hostname: Optional[str],
|
|
24
|
+
ssh_key_path: Optional[str],
|
|
25
|
+
password: Optional[str],
|
|
26
|
+
port: int,
|
|
27
|
+
enable_compression: bool,
|
|
28
|
+
):
|
|
21
29
|
self.password = password
|
|
22
30
|
self.enable_compression = enable_compression
|
|
23
31
|
|
|
@@ -52,7 +60,9 @@ class SSH:
|
|
|
52
60
|
else:
|
|
53
61
|
ssh_key_path = wildcard_identity_file
|
|
54
62
|
except (FileNotFoundError, KeyError):
|
|
55
|
-
assert "@" in host or ":" in host,
|
|
63
|
+
assert "@" in host or ":" in host, (
|
|
64
|
+
f"Host must be in the form of `username@hostname:port` or `username@hostname` or `hostname:port`, but it is: {host}"
|
|
65
|
+
)
|
|
56
66
|
if "@" in host:
|
|
57
67
|
self.username, self.hostname = host.split("@")
|
|
58
68
|
else:
|
|
@@ -72,7 +82,10 @@ class SSH:
|
|
|
72
82
|
self.ssh = paramiko.SSHClient()
|
|
73
83
|
self.ssh.load_system_host_keys()
|
|
74
84
|
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
|
75
|
-
pprint(
|
|
85
|
+
pprint(
|
|
86
|
+
dict(host=self.host, hostname=self.hostname, username=self.username, password="***", port=self.port, key_filename=self.ssh_key_path),
|
|
87
|
+
title="SSHing To",
|
|
88
|
+
)
|
|
76
89
|
sock = paramiko.ProxyCommand(self.proxycommand) if self.proxycommand is not None else None
|
|
77
90
|
try:
|
|
78
91
|
if password is None:
|
|
@@ -81,11 +94,31 @@ class SSH:
|
|
|
81
94
|
else:
|
|
82
95
|
allow_agent = False
|
|
83
96
|
look_for_keys = False
|
|
84
|
-
self.ssh.connect(
|
|
97
|
+
self.ssh.connect(
|
|
98
|
+
hostname=self.hostname,
|
|
99
|
+
username=self.username,
|
|
100
|
+
password=self.password,
|
|
101
|
+
port=self.port,
|
|
102
|
+
key_filename=self.ssh_key_path,
|
|
103
|
+
compress=self.enable_compression,
|
|
104
|
+
sock=sock,
|
|
105
|
+
allow_agent=allow_agent,
|
|
106
|
+
look_for_keys=look_for_keys,
|
|
107
|
+
) # type: ignore
|
|
85
108
|
except Exception as _err:
|
|
86
109
|
rich.console.Console().print_exception()
|
|
87
110
|
self.password = getpass.getpass(f"Enter password for {self.username}@{self.hostname}: ")
|
|
88
|
-
self.ssh.connect(
|
|
111
|
+
self.ssh.connect(
|
|
112
|
+
hostname=self.hostname,
|
|
113
|
+
username=self.username,
|
|
114
|
+
password=self.password,
|
|
115
|
+
port=self.port,
|
|
116
|
+
key_filename=self.ssh_key_path,
|
|
117
|
+
compress=self.enable_compression,
|
|
118
|
+
sock=sock,
|
|
119
|
+
allow_agent=False,
|
|
120
|
+
look_for_keys=False,
|
|
121
|
+
) # type: ignore
|
|
89
122
|
try:
|
|
90
123
|
self.sftp: Optional[paramiko.SFTPClient] = self.ssh.open_sftp()
|
|
91
124
|
except Exception as err:
|
|
@@ -100,7 +133,9 @@ class SSH:
|
|
|
100
133
|
self.task: Optional[Any] = None
|
|
101
134
|
|
|
102
135
|
def __enter__(self) -> "RichProgressWrapper":
|
|
103
|
-
self.progress = Progress(
|
|
136
|
+
self.progress = Progress(
|
|
137
|
+
SpinnerColumn(), TextColumn("[bold blue]{task.description}"), BarColumn(), FileSizeColumn(), TransferSpeedColumn()
|
|
138
|
+
)
|
|
104
139
|
self.progress.start()
|
|
105
140
|
self.task = self.progress.add_task("Transferring...", total=0)
|
|
106
141
|
return self
|
|
@@ -115,71 +150,98 @@ class SSH:
|
|
|
115
150
|
self.tqdm_wrap = RichProgressWrapper
|
|
116
151
|
from machineconfig.scripts.python.helpers_utils.path import get_machine_specs
|
|
117
152
|
self.local_specs: MachineSpecs = get_machine_specs()
|
|
118
|
-
resp = self.run_shell(
|
|
153
|
+
resp = self.run_shell(
|
|
154
|
+
command="""~/.local/bin/utils get-machine-specs """,
|
|
155
|
+
verbose_output=False,
|
|
156
|
+
description="Getting remote machine specs",
|
|
157
|
+
strict_stderr=False,
|
|
158
|
+
strict_return_code=False,
|
|
159
|
+
)
|
|
119
160
|
json_str = resp.op
|
|
120
161
|
import ast
|
|
121
162
|
self.remote_specs: MachineSpecs = cast(MachineSpecs, ast.literal_eval(json_str))
|
|
122
163
|
self.terminal_responses: list[Response] = []
|
|
123
|
-
|
|
164
|
+
|
|
124
165
|
from rich import inspect
|
|
125
|
-
|
|
126
|
-
local_info = dict(distro=self.local_specs.get("distro"), system=self.local_specs.get("system"), home_dir=self.local_specs.get("home_dir"))
|
|
127
|
-
remote_info = dict(distro=self.remote_specs.get("distro"), system=self.remote_specs.get("system"), home_dir=self.remote_specs.get("home_dir"))
|
|
128
|
-
|
|
166
|
+
|
|
167
|
+
# local_info = dict(distro=self.local_specs.get("distro"), system=self.local_specs.get("system"), home_dir=self.local_specs.get("home_dir"))
|
|
168
|
+
# remote_info = dict(distro=self.remote_specs.get("distro"), system=self.remote_specs.get("system"), home_dir=self.remote_specs.get("home_dir"))
|
|
169
|
+
|
|
129
170
|
console = rich.console.Console()
|
|
130
|
-
|
|
171
|
+
|
|
131
172
|
from io import StringIO
|
|
173
|
+
|
|
132
174
|
local_buffer = StringIO()
|
|
133
175
|
remote_buffer = StringIO()
|
|
134
|
-
|
|
135
176
|
local_console = rich.console.Console(file=local_buffer, width=40)
|
|
136
177
|
remote_console = rich.console.Console(file=remote_buffer, width=40)
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
178
|
+
inspect(
|
|
179
|
+
type("LocalInfo", (object,), dict(self.local_specs))(), value=False, title="SSHing From", docs=False, dunder=False, sort=False, console=local_console
|
|
180
|
+
)
|
|
181
|
+
inspect(
|
|
182
|
+
type("RemoteInfo", (object,), dict(self.remote_specs))(), value=False, title="SSHing To", docs=False, dunder=False, sort=False, console=remote_console
|
|
183
|
+
)
|
|
141
184
|
local_lines = local_buffer.getvalue().split("\n")
|
|
142
185
|
remote_lines = remote_buffer.getvalue().split("\n")
|
|
143
|
-
|
|
144
186
|
max_lines = max(len(local_lines), len(remote_lines))
|
|
145
187
|
for i in range(max_lines):
|
|
146
188
|
left = local_lines[i] if i < len(local_lines) else ""
|
|
147
189
|
right = remote_lines[i] if i < len(remote_lines) else ""
|
|
148
|
-
console.print(f"{left:<
|
|
190
|
+
console.print(f"{left:<50} {right}")
|
|
149
191
|
|
|
150
192
|
def __enter__(self) -> "SSH":
|
|
151
193
|
return self
|
|
194
|
+
|
|
152
195
|
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
|
|
153
196
|
self.close()
|
|
197
|
+
|
|
154
198
|
def close(self) -> None:
|
|
155
199
|
if self.sftp is not None:
|
|
156
200
|
self.sftp.close()
|
|
157
201
|
self.sftp = None
|
|
158
202
|
self.ssh.close()
|
|
203
|
+
|
|
159
204
|
def restart_computer(self) -> Response:
|
|
160
|
-
return self.run_shell(
|
|
205
|
+
return self.run_shell(
|
|
206
|
+
command="Restart-Computer -Force" if self.remote_specs["system"] == "Windows" else "sudo reboot",
|
|
207
|
+
verbose_output=True,
|
|
208
|
+
description="",
|
|
209
|
+
strict_stderr=False,
|
|
210
|
+
strict_return_code=False,
|
|
211
|
+
)
|
|
212
|
+
|
|
161
213
|
def send_ssh_key(self) -> Response:
|
|
162
214
|
self.copy_from_here(source_path="~/.ssh/id_rsa.pub", target_rel2home=None, compress_with_zip=False, recursive=False, overwrite_existing=False)
|
|
163
215
|
if self.remote_specs["system"] != "Windows":
|
|
164
216
|
raise RuntimeError("send_ssh_key is only supported for Windows remote machines")
|
|
165
217
|
code_url = "https://raw.githubusercontent.com/thisismygitrepo/machineconfig/refs/heads/main/src/machineconfig/setup_windows/openssh-server_add-sshkey.ps1"
|
|
166
218
|
import urllib.request
|
|
219
|
+
|
|
167
220
|
with urllib.request.urlopen(code_url) as response:
|
|
168
221
|
code = response.read().decode("utf-8")
|
|
169
222
|
return self.run_shell(command=code, verbose_output=True, description="", strict_stderr=False, strict_return_code=False)
|
|
170
223
|
|
|
171
224
|
def get_remote_repr(self, add_machine: bool = False) -> str:
|
|
172
|
-
return f"{self.username}@{self.hostname}:{self.port}" + (
|
|
225
|
+
return f"{self.username}@{self.hostname}:{self.port}" + (
|
|
226
|
+
f" [{self.remote_specs['system']}][{self.remote_specs['distro']}]" if add_machine else ""
|
|
227
|
+
)
|
|
173
228
|
def get_local_repr(self, add_machine: bool = False) -> str:
|
|
174
229
|
import getpass
|
|
175
230
|
return f"{getpass.getuser()}@{platform.node()}" + (f" [{platform.system()}][{self.local_specs['distro']}]" if add_machine else "")
|
|
231
|
+
|
|
176
232
|
def get_ssh_conn_str(self, command: str) -> str:
|
|
177
|
-
return
|
|
233
|
+
return (
|
|
234
|
+
"ssh "
|
|
235
|
+
+ (f" -i {self.ssh_key_path}" if self.ssh_key_path else "")
|
|
236
|
+
+ self.get_remote_repr(add_machine=False).replace(":", " -p ")
|
|
237
|
+
+ (f" -t {command} " if command != "" else " ")
|
|
238
|
+
)
|
|
239
|
+
|
|
178
240
|
def __repr__(self) -> str:
|
|
179
241
|
return f"local {self.get_local_repr(add_machine=True)} >>> SSH TO >>> remote {self.get_remote_repr(add_machine=True)}"
|
|
180
242
|
|
|
181
243
|
def run_locally(self, command: str) -> Response:
|
|
182
|
-
print(f"""💻 [LOCAL EXECUTION] Running command on node: {self.local_specs[
|
|
244
|
+
print(f"""💻 [LOCAL EXECUTION] Running command on node: {self.local_specs["system"]} Command: {command}""")
|
|
183
245
|
res = Response(cmd=command)
|
|
184
246
|
res.output.returncode = os.system(command)
|
|
185
247
|
return res
|
|
@@ -190,11 +252,13 @@ class SSH:
|
|
|
190
252
|
if verbose_output:
|
|
191
253
|
res.print()
|
|
192
254
|
else:
|
|
193
|
-
res.capture().print_if_unsuccessful(
|
|
255
|
+
res.capture().print_if_unsuccessful(
|
|
256
|
+
desc=description, strict_err=strict_stderr, strict_returncode=strict_return_code, assert_success=False
|
|
257
|
+
)
|
|
194
258
|
# self.terminal_responses.append(res)
|
|
195
259
|
return res
|
|
196
260
|
|
|
197
|
-
def _run_py_prep(self, python_code: str, uv_with: Optional[list[str]], uv_project_dir: Optional[str]
|
|
261
|
+
def _run_py_prep(self, python_code: str, uv_with: Optional[list[str]], uv_project_dir: Optional[str]) -> str:
|
|
198
262
|
py_path = Path.home().joinpath(f"{DEFAULT_PICKLE_SUBDIR}/runpy_{randstr()}.py")
|
|
199
263
|
py_path.parent.mkdir(parents=True, exist_ok=True)
|
|
200
264
|
py_path.write_text(python_code, encoding="utf-8")
|
|
@@ -210,13 +274,28 @@ class SSH:
|
|
|
210
274
|
uv_cmd = f"""{UV_RUN_CMD} {with_clause} python {py_path.relative_to(Path.home())}"""
|
|
211
275
|
return uv_cmd
|
|
212
276
|
|
|
213
|
-
def run_py(
|
|
214
|
-
|
|
277
|
+
def run_py(
|
|
278
|
+
self,
|
|
279
|
+
python_code: str,
|
|
280
|
+
uv_with: Optional[list[str]],
|
|
281
|
+
uv_project_dir: Optional[str],
|
|
282
|
+
description: str,
|
|
283
|
+
verbose_output: bool,
|
|
284
|
+
strict_stderr: bool,
|
|
285
|
+
strict_return_code: bool,
|
|
286
|
+
) -> Response:
|
|
215
287
|
uv_cmd = self._run_py_prep(python_code=python_code, uv_with=uv_with, uv_project_dir=uv_project_dir)
|
|
216
|
-
return self.run_shell(
|
|
288
|
+
return self.run_shell(
|
|
289
|
+
command=uv_cmd,
|
|
290
|
+
verbose_output=verbose_output,
|
|
291
|
+
description=description or f"run_py on {self.get_remote_repr(add_machine=False)}",
|
|
292
|
+
strict_stderr=strict_stderr,
|
|
293
|
+
strict_return_code=strict_return_code,
|
|
294
|
+
)
|
|
217
295
|
|
|
218
296
|
def run_lambda_function(self, func: Callable[..., Any], import_module: bool, uv_with: Optional[list[str]], uv_project_dir: Optional[str]):
|
|
219
|
-
command = lambda_to_python_script(
|
|
297
|
+
command = lambda_to_python_script(func,
|
|
298
|
+
in_global=True, import_module=import_module)
|
|
220
299
|
# turns ou that the code below for some reason runs but zellij doesn't start, looks like things are assigned to different user.
|
|
221
300
|
# return self.run_py(python_code=command, uv_with=uv_with, uv_project_dir=uv_project_dir,
|
|
222
301
|
# description=f"run_py_func {func.__name__} on {self.get_remote_repr(add_machine=False)}",
|
|
@@ -224,395 +303,50 @@ class SSH:
|
|
|
224
303
|
uv_cmd = self._run_py_prep(python_code=command, uv_with=uv_with, uv_project_dir=uv_project_dir)
|
|
225
304
|
if self.remote_specs["system"] == "Linux":
|
|
226
305
|
uv_cmd_modified = f'bash -l -c "{uv_cmd}"'
|
|
227
|
-
else:
|
|
306
|
+
else:
|
|
307
|
+
uv_cmd_modified = uv_cmd
|
|
228
308
|
# This works even withou the modified uv cmd:
|
|
229
309
|
# from machineconfig.utils.code import run_shell_script
|
|
230
310
|
# assert self.host is not None, "SSH host must be specified to run remote commands"
|
|
231
311
|
# process = run_shell_script(f"ssh {self.host} -n '. ~/.profile; . ~/.bashrc; {uv_cmd}'")
|
|
232
312
|
# return process
|
|
233
|
-
return self.run_shell(
|
|
313
|
+
return self.run_shell(
|
|
314
|
+
command=uv_cmd_modified,
|
|
315
|
+
verbose_output=True,
|
|
316
|
+
description=f"run_py_func {func.__name__} on {self.get_remote_repr(add_machine=False)}",
|
|
317
|
+
strict_stderr=True,
|
|
318
|
+
strict_return_code=True,
|
|
319
|
+
)
|
|
234
320
|
|
|
235
|
-
def
|
|
321
|
+
def simple_sftp_get(self, remote_path: str, local_path: Path) -> None:
|
|
236
322
|
"""Simple SFTP get without any recursion or path expansion - for internal use only."""
|
|
237
323
|
if self.sftp is None:
|
|
238
324
|
raise RuntimeError(f"SFTP connection not available for {self.hostname}")
|
|
239
325
|
local_path.parent.mkdir(parents=True, exist_ok=True)
|
|
240
326
|
self.sftp.get(remotepath=remote_path, localpath=str(local_path))
|
|
241
327
|
|
|
242
|
-
def
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
else:
|
|
254
|
-
directory_path.unlink()
|
|
255
|
-
directory_path.parent.mkdir(parents=True, exist_ok=True)
|
|
256
|
-
directory_path.mkdir(parents=True, exist_ok=True)
|
|
257
|
-
command = lambda_to_python_script(lmb=lambda: create_target_dir(target_rel2home=path_rel2home, overwrite=overwrite_existing), in_global=True, import_module=False)
|
|
258
|
-
tmp_py_file = Path.home().joinpath(f"{DEFAULT_PICKLE_SUBDIR}/create_target_dir_{randstr()}.py")
|
|
259
|
-
tmp_py_file.parent.mkdir(parents=True, exist_ok=True)
|
|
260
|
-
tmp_py_file.write_text(command, encoding="utf-8")
|
|
261
|
-
# self.copy_from_here(source_path=str(tmp_py_file), target_rel2home=".tmp_file.py", compress_with_zip=False, recursive=False, overwrite_existing=True)
|
|
262
|
-
assert self.sftp is not None
|
|
263
|
-
tmp_remote_path = ".tmp_pyfile.py"
|
|
264
|
-
self.sftp.put(localpath=str(tmp_py_file), remotepath=str(Path(self.remote_specs["home_dir"]).joinpath(tmp_remote_path)))
|
|
265
|
-
self.run_shell(command=f"""{UV_RUN_CMD} python {tmp_remote_path}""", verbose_output=False, description=f"Creating target dir {path_rel2home}", strict_stderr=True, strict_return_code=True)
|
|
328
|
+
def create_parent_dir_and_check_if_exists(self, path_rel2home: str, overwrite_existing: bool) -> None:
|
|
329
|
+
from machineconfig.utils.ssh_utils.utils import create_dir_and_check_if_exists
|
|
330
|
+
return create_dir_and_check_if_exists(self, path_rel2home=path_rel2home, overwrite_existing=overwrite_existing)
|
|
331
|
+
|
|
332
|
+
def check_remote_is_dir(self, source_path: Union[str, Path]) -> bool:
|
|
333
|
+
from machineconfig.utils.ssh_utils.utils import check_remote_is_dir
|
|
334
|
+
return check_remote_is_dir(self, source_path=source_path)
|
|
335
|
+
|
|
336
|
+
def expand_remote_path(self, source_path: Union[str, Path]) -> str:
|
|
337
|
+
from machineconfig.utils.ssh_utils.utils import expand_remote_path
|
|
338
|
+
return expand_remote_path(self, source_path=source_path)
|
|
266
339
|
|
|
267
340
|
def copy_from_here(self, source_path: str, target_rel2home: Optional[str], compress_with_zip: bool, recursive: bool, overwrite_existing: bool) -> None:
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
raise RuntimeError(f"SSH Error: source `{source_obj}` is a directory! Set `recursive=True` for recursive sending or `compress_with_zip=True` to zip it first.")
|
|
278
|
-
file_paths_to_upload: list[Path] = [file_path for file_path in source_obj.rglob("*") if file_path.is_file()]
|
|
279
|
-
self.create_dir(path_rel2home=target_rel2home, overwrite_existing=overwrite_existing)
|
|
280
|
-
for idx, file_path in enumerate(file_paths_to_upload):
|
|
281
|
-
print(f" {idx + 1:03d}. {file_path}")
|
|
282
|
-
for file_path in file_paths_to_upload:
|
|
283
|
-
remote_file_target = Path(target_rel2home).joinpath(file_path.relative_to(source_obj))
|
|
284
|
-
self.copy_from_here(source_path=str(file_path), target_rel2home=str(remote_file_target), compress_with_zip=False, recursive=False, overwrite_existing=overwrite_existing)
|
|
285
|
-
return None
|
|
286
|
-
if compress_with_zip:
|
|
287
|
-
print("🗜️ ZIPPING ...")
|
|
288
|
-
import shutil
|
|
289
|
-
zip_path = Path(str(source_obj) + "_archive")
|
|
290
|
-
if source_obj.is_dir():
|
|
291
|
-
shutil.make_archive(str(zip_path), "zip", source_obj)
|
|
292
|
-
else:
|
|
293
|
-
shutil.make_archive(str(zip_path), "zip", source_obj.parent, source_obj.name)
|
|
294
|
-
source_obj = Path(str(zip_path) + ".zip")
|
|
295
|
-
if not target_rel2home.endswith(".zip"): target_rel2home = target_rel2home + ".zip"
|
|
296
|
-
self.create_dir(path_rel2home=str(Path(target_rel2home).parent), overwrite_existing=overwrite_existing)
|
|
297
|
-
print(f"""📤 [SFTP UPLOAD] Sending file: {repr(source_obj)} ==> Remote Path: {target_rel2home}""")
|
|
298
|
-
try:
|
|
299
|
-
with self.tqdm_wrap(ascii=True, unit="b", unit_scale=True) as pbar:
|
|
300
|
-
if self.sftp is None: # type: ignore[unreachable]
|
|
301
|
-
raise RuntimeError(f"SFTP connection lost for {self.hostname}")
|
|
302
|
-
self.sftp.put(localpath=str(source_obj), remotepath=str(Path(self.remote_specs["home_dir"]).joinpath(target_rel2home)), callback=pbar.view_bar)
|
|
303
|
-
except Exception:
|
|
304
|
-
if compress_with_zip and source_obj.exists() and str(source_obj).endswith("_archive.zip"):
|
|
305
|
-
source_obj.unlink()
|
|
306
|
-
raise
|
|
307
|
-
|
|
308
|
-
if compress_with_zip:
|
|
309
|
-
def unzip_archive(zip_file_path: str, overwrite_flag: bool) -> None:
|
|
310
|
-
from pathlib import Path
|
|
311
|
-
import shutil
|
|
312
|
-
import zipfile
|
|
313
|
-
archive_path = Path(zip_file_path).expanduser()
|
|
314
|
-
extraction_directory = archive_path.parent / archive_path.stem
|
|
315
|
-
if overwrite_flag and extraction_directory.exists():
|
|
316
|
-
shutil.rmtree(extraction_directory)
|
|
317
|
-
with zipfile.ZipFile(archive_path, "r") as archive_handle:
|
|
318
|
-
archive_handle.extractall(extraction_directory)
|
|
319
|
-
archive_path.unlink()
|
|
320
|
-
command = lambda_to_python_script(lmb=lambda: unzip_archive(zip_file_path=str(Path(self.remote_specs["home_dir"]).joinpath(target_rel2home)), overwrite_flag=overwrite_existing), in_global=True, import_module=False)
|
|
321
|
-
tmp_py_file = Path.home().joinpath(f"{DEFAULT_PICKLE_SUBDIR}/create_target_dir_{randstr()}.py")
|
|
322
|
-
tmp_py_file.parent.mkdir(parents=True, exist_ok=True)
|
|
323
|
-
tmp_py_file.write_text(command, encoding="utf-8")
|
|
324
|
-
remote_tmp_py = tmp_py_file.relative_to(Path.home()).as_posix()
|
|
325
|
-
self.copy_from_here(source_path=str(tmp_py_file), target_rel2home=None, compress_with_zip=False, recursive=False, overwrite_existing=True)
|
|
326
|
-
self.run_shell(command=f"""{UV_RUN_CMD} python {remote_tmp_py}""", verbose_output=False, description=f"UNZIPPING {target_rel2home}", strict_stderr=True, strict_return_code=True)
|
|
327
|
-
source_obj.unlink()
|
|
328
|
-
tmp_py_file.unlink(missing_ok=True)
|
|
329
|
-
return None
|
|
330
|
-
|
|
331
|
-
def _check_remote_is_dir(self, source_path: Union[str, Path]) -> bool:
|
|
332
|
-
"""Helper to check if a remote path is a directory."""
|
|
333
|
-
def check_is_dir(path_to_check: str, json_output_path: str) -> bool:
|
|
334
|
-
from pathlib import Path
|
|
335
|
-
import json
|
|
336
|
-
is_directory = Path(path_to_check).expanduser().absolute().is_dir()
|
|
337
|
-
json_result_path = Path(json_output_path)
|
|
338
|
-
json_result_path.parent.mkdir(parents=True, exist_ok=True)
|
|
339
|
-
json_result_path.write_text(json.dumps(is_directory, indent=2), encoding="utf-8")
|
|
340
|
-
print(json_result_path.as_posix())
|
|
341
|
-
return is_directory
|
|
342
|
-
remote_json_output = Path.home().joinpath(f"{DEFAULT_PICKLE_SUBDIR}/return_{randstr()}.json").as_posix()
|
|
343
|
-
command = lambda_to_python_script(lmb=lambda: check_is_dir(path_to_check=str(source_path), json_output_path=remote_json_output), in_global=True, import_module=False)
|
|
344
|
-
response = self.run_py(python_code=command, uv_with=[MACHINECONFIG_VERSION], uv_project_dir=None, description=f"Check if source `{source_path}` is a dir", verbose_output=False, strict_stderr=False, strict_return_code=False)
|
|
345
|
-
remote_json_path = response.op.strip()
|
|
346
|
-
if not remote_json_path:
|
|
347
|
-
raise RuntimeError(f"Failed to check if {source_path} is directory - no response from remote")
|
|
348
|
-
|
|
349
|
-
local_json = Path.home().joinpath(f"{DEFAULT_PICKLE_SUBDIR}/local_{randstr()}.json")
|
|
350
|
-
self._simple_sftp_get(remote_path=remote_json_path, local_path=local_json)
|
|
351
|
-
import json
|
|
352
|
-
try:
|
|
353
|
-
result = json.loads(local_json.read_text(encoding="utf-8"))
|
|
354
|
-
except (json.JSONDecodeError, FileNotFoundError) as err:
|
|
355
|
-
raise RuntimeError(f"Failed to check if {source_path} is directory - invalid JSON response: {err}") from err
|
|
356
|
-
finally:
|
|
357
|
-
if local_json.exists():
|
|
358
|
-
local_json.unlink()
|
|
359
|
-
assert isinstance(result, bool), f"Failed to check if {source_path} is directory"
|
|
360
|
-
return result
|
|
361
|
-
|
|
362
|
-
def _expand_remote_path(self, source_path: Union[str, Path]) -> str:
|
|
363
|
-
"""Helper to expand a path on the remote machine."""
|
|
364
|
-
def expand_source(path_to_expand: str, json_output_path: str) -> str:
|
|
365
|
-
from pathlib import Path
|
|
366
|
-
import json
|
|
367
|
-
expanded_path_posix = Path(path_to_expand).expanduser().absolute().as_posix()
|
|
368
|
-
json_result_path = Path(json_output_path)
|
|
369
|
-
json_result_path.parent.mkdir(parents=True, exist_ok=True)
|
|
370
|
-
json_result_path.write_text(json.dumps(expanded_path_posix, indent=2), encoding="utf-8")
|
|
371
|
-
print(json_result_path.as_posix())
|
|
372
|
-
return expanded_path_posix
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
remote_json_output = Path.home().joinpath(f"{DEFAULT_PICKLE_SUBDIR}/return_{randstr()}.json").as_posix()
|
|
377
|
-
command = lambda_to_python_script(lmb=lambda: expand_source(path_to_expand=str(source_path), json_output_path=remote_json_output), in_global=True, import_module=False)
|
|
378
|
-
response = self.run_py(python_code=command, uv_with=[MACHINECONFIG_VERSION], uv_project_dir=None, description="Resolving source path by expanding user", verbose_output=False, strict_stderr=False, strict_return_code=False)
|
|
379
|
-
remote_json_path = response.op.strip()
|
|
380
|
-
if not remote_json_path:
|
|
381
|
-
raise RuntimeError(f"Could not resolve source path {source_path} - no response from remote")
|
|
382
|
-
|
|
383
|
-
local_json = Path.home().joinpath(f"{DEFAULT_PICKLE_SUBDIR}/local_{randstr()}.json")
|
|
384
|
-
self._simple_sftp_get(remote_path=remote_json_path, local_path=local_json)
|
|
385
|
-
import json
|
|
386
|
-
try:
|
|
387
|
-
result = json.loads(local_json.read_text(encoding="utf-8"))
|
|
388
|
-
except (json.JSONDecodeError, FileNotFoundError) as err:
|
|
389
|
-
raise RuntimeError(f"Could not resolve source path {source_path} - invalid JSON response: {err}") from err
|
|
390
|
-
finally:
|
|
391
|
-
if local_json.exists():
|
|
392
|
-
local_json.unlink()
|
|
393
|
-
assert isinstance(result, str), f"Could not resolve source path {source_path}"
|
|
394
|
-
return result
|
|
395
|
-
|
|
396
|
-
def copy_to_here(self, source: Union[str, Path], target: Optional[Union[str, Path]], compress_with_zip: bool = False, recursive: bool = False, internal_call: bool = False) -> None:
|
|
397
|
-
if self.sftp is None:
|
|
398
|
-
raise RuntimeError(f"SFTP connection not available for {self.hostname}. Cannot transfer files.")
|
|
399
|
-
|
|
400
|
-
if not internal_call:
|
|
401
|
-
print(f"{'⬇️' * 5} SFTP DOWNLOADING FROM `{source}` TO `{target}`")
|
|
402
|
-
|
|
403
|
-
source_obj = Path(source)
|
|
404
|
-
expanded_source = self._expand_remote_path(source_path=source_obj)
|
|
405
|
-
|
|
406
|
-
if not compress_with_zip:
|
|
407
|
-
is_dir = self._check_remote_is_dir(source_path=expanded_source)
|
|
408
|
-
|
|
409
|
-
if is_dir:
|
|
410
|
-
if not recursive:
|
|
411
|
-
raise RuntimeError(f"SSH Error: source `{source_obj}` is a directory! Set recursive=True for recursive transfer or compress_with_zip=True to zip it.")
|
|
412
|
-
|
|
413
|
-
def search_files(directory_path: str, json_output_path: str) -> list[str]:
|
|
414
|
-
from pathlib import Path
|
|
415
|
-
import json
|
|
416
|
-
file_paths_list = [file_path.as_posix() for file_path in Path(directory_path).expanduser().absolute().rglob("*") if file_path.is_file()]
|
|
417
|
-
json_result_path = Path(json_output_path)
|
|
418
|
-
json_result_path.parent.mkdir(parents=True, exist_ok=True)
|
|
419
|
-
json_result_path.write_text(json.dumps(file_paths_list, indent=2), encoding="utf-8")
|
|
420
|
-
print(json_result_path.as_posix())
|
|
421
|
-
return file_paths_list
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
remote_json_output = Path.home().joinpath(f"{DEFAULT_PICKLE_SUBDIR}/return_{randstr()}.json").as_posix()
|
|
426
|
-
command = lambda_to_python_script(lmb=lambda: search_files(directory_path=expanded_source, json_output_path=remote_json_output), in_global=True, import_module=False)
|
|
427
|
-
response = self.run_py(python_code=command, uv_with=[MACHINECONFIG_VERSION], uv_project_dir=None, description="Searching for files in source", verbose_output=False, strict_stderr=False, strict_return_code=False)
|
|
428
|
-
remote_json_path = response.op.strip()
|
|
429
|
-
if not remote_json_path:
|
|
430
|
-
raise RuntimeError(f"Could not resolve source path {source} - no response from remote")
|
|
431
|
-
|
|
432
|
-
local_json = Path.home().joinpath(f"{DEFAULT_PICKLE_SUBDIR}/local_{randstr()}.json")
|
|
433
|
-
self._simple_sftp_get(remote_path=remote_json_path, local_path=local_json)
|
|
434
|
-
import json
|
|
435
|
-
try:
|
|
436
|
-
source_list_str = json.loads(local_json.read_text(encoding="utf-8"))
|
|
437
|
-
except (json.JSONDecodeError, FileNotFoundError) as err:
|
|
438
|
-
raise RuntimeError(f"Could not resolve source path {source} - invalid JSON response: {err}") from err
|
|
439
|
-
finally:
|
|
440
|
-
if local_json.exists():
|
|
441
|
-
local_json.unlink()
|
|
442
|
-
assert isinstance(source_list_str, list), f"Could not resolve source path {source}"
|
|
443
|
-
file_paths_to_download = [Path(file_path_str) for file_path_str in source_list_str]
|
|
444
|
-
|
|
445
|
-
if target is None:
|
|
446
|
-
def collapse_to_home_dir(absolute_path: str, json_output_path: str) -> str:
|
|
447
|
-
from pathlib import Path
|
|
448
|
-
import json
|
|
449
|
-
source_absolute_path = Path(absolute_path).expanduser().absolute()
|
|
450
|
-
try:
|
|
451
|
-
relative_to_home = source_absolute_path.relative_to(Path.home())
|
|
452
|
-
collapsed_path_posix = (Path("~") / relative_to_home).as_posix()
|
|
453
|
-
json_result_path = Path(json_output_path)
|
|
454
|
-
json_result_path.parent.mkdir(parents=True, exist_ok=True)
|
|
455
|
-
json_result_path.write_text(json.dumps(collapsed_path_posix, indent=2), encoding="utf-8")
|
|
456
|
-
print(json_result_path.as_posix())
|
|
457
|
-
return collapsed_path_posix
|
|
458
|
-
except ValueError:
|
|
459
|
-
raise RuntimeError(f"Source path must be relative to home directory: {source_absolute_path}")
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
remote_json_output = Path.home().joinpath(f"{DEFAULT_PICKLE_SUBDIR}/return_{randstr()}.json").as_posix()
|
|
464
|
-
command = lambda_to_python_script(lmb=lambda: collapse_to_home_dir(absolute_path=expanded_source, json_output_path=remote_json_output), in_global=True, import_module=False)
|
|
465
|
-
response = self.run_py(python_code=command, uv_with=[MACHINECONFIG_VERSION], uv_project_dir=None, description="Finding default target via relative source path", verbose_output=False, strict_stderr=False, strict_return_code=False)
|
|
466
|
-
remote_json_path_dir = response.op.strip()
|
|
467
|
-
if not remote_json_path_dir:
|
|
468
|
-
raise RuntimeError("Could not resolve target path - no response from remote")
|
|
469
|
-
|
|
470
|
-
local_json_dir = Path.home().joinpath(f"{DEFAULT_PICKLE_SUBDIR}/local_{randstr()}.json")
|
|
471
|
-
self._simple_sftp_get(remote_path=remote_json_path_dir, local_path=local_json_dir)
|
|
472
|
-
import json
|
|
473
|
-
try:
|
|
474
|
-
target_dir_str = json.loads(local_json_dir.read_text(encoding="utf-8"))
|
|
475
|
-
except (json.JSONDecodeError, FileNotFoundError) as err:
|
|
476
|
-
raise RuntimeError(f"Could not resolve target path - invalid JSON response: {err}") from err
|
|
477
|
-
finally:
|
|
478
|
-
if local_json_dir.exists():
|
|
479
|
-
local_json_dir.unlink()
|
|
480
|
-
assert isinstance(target_dir_str, str), "Could not resolve target path"
|
|
481
|
-
target = Path(target_dir_str)
|
|
482
|
-
|
|
483
|
-
target_dir = Path(target).expanduser().absolute()
|
|
484
|
-
|
|
485
|
-
for idx, file_path in enumerate(file_paths_to_download):
|
|
486
|
-
print(f" {idx + 1:03d}. {file_path}")
|
|
487
|
-
|
|
488
|
-
for file_path in file_paths_to_download:
|
|
489
|
-
local_file_target = target_dir.joinpath(Path(file_path).relative_to(expanded_source))
|
|
490
|
-
self.copy_to_here(source=file_path, target=local_file_target, compress_with_zip=False, recursive=False, internal_call=True)
|
|
491
|
-
|
|
492
|
-
return None
|
|
493
|
-
|
|
494
|
-
if compress_with_zip:
|
|
495
|
-
print("🗜️ ZIPPING ...")
|
|
496
|
-
def zip_source(path_to_zip: str, json_output_path: str) -> str:
|
|
497
|
-
from pathlib import Path
|
|
498
|
-
import shutil
|
|
499
|
-
import json
|
|
500
|
-
source_to_compress = Path(path_to_zip).expanduser().absolute()
|
|
501
|
-
archive_base_path = source_to_compress.parent / (source_to_compress.name + "_archive")
|
|
502
|
-
if source_to_compress.is_dir():
|
|
503
|
-
shutil.make_archive(str(archive_base_path), "zip", source_to_compress)
|
|
504
|
-
else:
|
|
505
|
-
shutil.make_archive(str(archive_base_path), "zip", source_to_compress.parent, source_to_compress.name)
|
|
506
|
-
zip_file_path = str(archive_base_path) + ".zip"
|
|
507
|
-
json_result_path = Path(json_output_path)
|
|
508
|
-
json_result_path.parent.mkdir(parents=True, exist_ok=True)
|
|
509
|
-
json_result_path.write_text(json.dumps(zip_file_path, indent=2), encoding="utf-8")
|
|
510
|
-
print(json_result_path.as_posix())
|
|
511
|
-
return zip_file_path
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
remote_json_output = Path.home().joinpath(f"{DEFAULT_PICKLE_SUBDIR}/return_{randstr()}.json").as_posix()
|
|
516
|
-
command = lambda_to_python_script(lmb=lambda: zip_source(path_to_zip=expanded_source, json_output_path=remote_json_output), in_global=True, import_module=False)
|
|
517
|
-
response = self.run_py(python_code=command, uv_with=[MACHINECONFIG_VERSION], uv_project_dir=None, description=f"Zipping source file {source}", verbose_output=False, strict_stderr=False, strict_return_code=False)
|
|
518
|
-
remote_json_path = response.op.strip()
|
|
519
|
-
if not remote_json_path:
|
|
520
|
-
raise RuntimeError(f"Could not zip {source} - no response from remote")
|
|
521
|
-
|
|
522
|
-
local_json = Path.home().joinpath(f"{DEFAULT_PICKLE_SUBDIR}/local_{randstr()}.json")
|
|
523
|
-
self._simple_sftp_get(remote_path=remote_json_path, local_path=local_json)
|
|
524
|
-
import json
|
|
525
|
-
try:
|
|
526
|
-
zipped_path = json.loads(local_json.read_text(encoding="utf-8"))
|
|
527
|
-
except (json.JSONDecodeError, FileNotFoundError) as err:
|
|
528
|
-
raise RuntimeError(f"Could not zip {source} - invalid JSON response: {err}") from err
|
|
529
|
-
finally:
|
|
530
|
-
if local_json.exists():
|
|
531
|
-
local_json.unlink()
|
|
532
|
-
assert isinstance(zipped_path, str), f"Could not zip {source}"
|
|
533
|
-
source_obj = Path(zipped_path)
|
|
534
|
-
expanded_source = zipped_path
|
|
535
|
-
|
|
536
|
-
if target is None:
|
|
537
|
-
def collapse_to_home(absolute_path: str, json_output_path: str) -> str:
|
|
538
|
-
from pathlib import Path
|
|
539
|
-
import json
|
|
540
|
-
source_absolute_path = Path(absolute_path).expanduser().absolute()
|
|
541
|
-
try:
|
|
542
|
-
relative_to_home = source_absolute_path.relative_to(Path.home())
|
|
543
|
-
collapsed_path_posix = (Path("~") / relative_to_home).as_posix()
|
|
544
|
-
json_result_path = Path(json_output_path)
|
|
545
|
-
json_result_path.parent.mkdir(parents=True, exist_ok=True)
|
|
546
|
-
json_result_path.write_text(json.dumps(collapsed_path_posix, indent=2), encoding="utf-8")
|
|
547
|
-
print(json_result_path.as_posix())
|
|
548
|
-
return collapsed_path_posix
|
|
549
|
-
except ValueError:
|
|
550
|
-
raise RuntimeError(f"Source path must be relative to home directory: {source_absolute_path}")
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
remote_json_output = Path.home().joinpath(f"{DEFAULT_PICKLE_SUBDIR}/return_{randstr()}.json").as_posix()
|
|
555
|
-
command = lambda_to_python_script(lmb=lambda: collapse_to_home(absolute_path=expanded_source, json_output_path=remote_json_output), in_global=True, import_module=False)
|
|
556
|
-
response = self.run_py(python_code=command, uv_with=[MACHINECONFIG_VERSION], uv_project_dir=None, description="Finding default target via relative source path", verbose_output=False, strict_stderr=False, strict_return_code=False)
|
|
557
|
-
remote_json_path = response.op.strip()
|
|
558
|
-
if not remote_json_path:
|
|
559
|
-
raise RuntimeError("Could not resolve target path - no response from remote")
|
|
560
|
-
|
|
561
|
-
local_json = Path.home().joinpath(f"{DEFAULT_PICKLE_SUBDIR}/local_{randstr()}.json")
|
|
562
|
-
self._simple_sftp_get(remote_path=remote_json_path, local_path=local_json)
|
|
563
|
-
import json
|
|
564
|
-
try:
|
|
565
|
-
target_str = json.loads(local_json.read_text(encoding="utf-8"))
|
|
566
|
-
except (json.JSONDecodeError, FileNotFoundError) as err:
|
|
567
|
-
raise RuntimeError(f"Could not resolve target path - invalid JSON response: {err}") from err
|
|
568
|
-
finally:
|
|
569
|
-
if local_json.exists():
|
|
570
|
-
local_json.unlink()
|
|
571
|
-
assert isinstance(target_str, str), "Could not resolve target path"
|
|
572
|
-
target = Path(target_str)
|
|
573
|
-
assert str(target).startswith("~"), f"If target is not specified, source must be relative to home.\n{target=}"
|
|
574
|
-
|
|
575
|
-
target_obj = Path(target).expanduser().absolute()
|
|
576
|
-
target_obj.parent.mkdir(parents=True, exist_ok=True)
|
|
577
|
-
|
|
578
|
-
if compress_with_zip and target_obj.suffix != ".zip":
|
|
579
|
-
target_obj = target_obj.with_suffix(target_obj.suffix + ".zip")
|
|
580
|
-
|
|
581
|
-
print(f"""📥 [DOWNLOAD] Receiving: {expanded_source} ==> Local Path: {target_obj}""")
|
|
582
|
-
try:
|
|
583
|
-
with self.tqdm_wrap(ascii=True, unit="b", unit_scale=True) as pbar:
|
|
584
|
-
if self.sftp is None: # type: ignore[unreachable]
|
|
585
|
-
raise RuntimeError(f"SFTP connection lost for {self.hostname}")
|
|
586
|
-
self.sftp.get(remotepath=expanded_source, localpath=str(target_obj), callback=pbar.view_bar) # type: ignore
|
|
587
|
-
except Exception:
|
|
588
|
-
if target_obj.exists():
|
|
589
|
-
target_obj.unlink()
|
|
590
|
-
raise
|
|
591
|
-
|
|
592
|
-
if compress_with_zip:
|
|
593
|
-
import zipfile
|
|
594
|
-
extract_to = target_obj.parent / target_obj.stem
|
|
595
|
-
with zipfile.ZipFile(target_obj, "r") as zip_ref:
|
|
596
|
-
zip_ref.extractall(extract_to)
|
|
597
|
-
target_obj.unlink()
|
|
598
|
-
target_obj = extract_to
|
|
599
|
-
|
|
600
|
-
def delete_temp_zip(path_to_delete: str) -> None:
|
|
601
|
-
from pathlib import Path
|
|
602
|
-
import shutil
|
|
603
|
-
file_or_dir_path = Path(path_to_delete)
|
|
604
|
-
if file_or_dir_path.exists():
|
|
605
|
-
if file_or_dir_path.is_dir():
|
|
606
|
-
shutil.rmtree(file_or_dir_path)
|
|
607
|
-
else:
|
|
608
|
-
file_or_dir_path.unlink()
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
command = lambda_to_python_script(lmb=lambda: delete_temp_zip(path_to_delete=expanded_source), in_global=True, import_module=False)
|
|
612
|
-
self.run_py(python_code=command, uv_with=[MACHINECONFIG_VERSION], uv_project_dir=None, description="Cleaning temp zip files @ remote.", verbose_output=False, strict_stderr=True, strict_return_code=True)
|
|
613
|
-
|
|
614
|
-
print("\n")
|
|
615
|
-
return None
|
|
341
|
+
from machineconfig.utils.ssh_utils.copy_from_here import copy_from_here
|
|
342
|
+
return copy_from_here(self, source_path=source_path, target_rel2home=target_rel2home, compress_with_zip=compress_with_zip, recursive=recursive, overwrite_existing=overwrite_existing)
|
|
343
|
+
|
|
344
|
+
def copy_to_here(self, source: Union[str, Path], target: Optional[Union[str, Path]], compress_with_zip: bool, recursive: bool, internal_call: bool = False) -> None:
|
|
345
|
+
from machineconfig.utils.ssh_utils.copy_to_here import copy_to_here
|
|
346
|
+
return copy_to_here(self, source=source, target=target, compress_with_zip=compress_with_zip, recursive=recursive, internal_call=internal_call)
|
|
347
|
+
|
|
348
|
+
|
|
349
|
+
|
|
616
350
|
|
|
617
351
|
if __name__ == "__main__":
|
|
618
352
|
ssh = SSH(host="p51s", username=None, hostname=None, ssh_key_path=None, password=None, port=22, enable_compression=False)
|