machineconfig 1.96__py3-none-any.whl → 2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of machineconfig might be problematic. Click here for more details.
- machineconfig/cluster/cloud_manager.py +22 -26
- machineconfig/cluster/data_transfer.py +2 -2
- machineconfig/cluster/distribute.py +0 -2
- machineconfig/cluster/file_manager.py +4 -4
- machineconfig/cluster/job_params.py +1 -1
- machineconfig/cluster/loader_runner.py +8 -8
- machineconfig/cluster/remote_machine.py +4 -4
- machineconfig/cluster/script_execution.py +2 -2
- machineconfig/cluster/sessions_managers/archive/create_zellij_template.py +1 -1
- machineconfig/cluster/sessions_managers/enhanced_command_runner.py +23 -23
- machineconfig/cluster/sessions_managers/wt_local.py +78 -76
- machineconfig/cluster/sessions_managers/wt_local_manager.py +91 -91
- machineconfig/cluster/sessions_managers/wt_remote.py +39 -39
- machineconfig/cluster/sessions_managers/wt_remote_manager.py +94 -91
- machineconfig/cluster/sessions_managers/wt_utils/layout_generator.py +56 -54
- machineconfig/cluster/sessions_managers/wt_utils/process_monitor.py +49 -49
- machineconfig/cluster/sessions_managers/wt_utils/remote_executor.py +18 -18
- machineconfig/cluster/sessions_managers/wt_utils/session_manager.py +42 -42
- machineconfig/cluster/sessions_managers/wt_utils/status_reporter.py +36 -36
- machineconfig/cluster/sessions_managers/zellij_local.py +43 -46
- machineconfig/cluster/sessions_managers/zellij_local_manager.py +139 -120
- machineconfig/cluster/sessions_managers/zellij_remote.py +35 -35
- machineconfig/cluster/sessions_managers/zellij_remote_manager.py +33 -33
- machineconfig/cluster/sessions_managers/zellij_utils/example_usage.py +15 -15
- machineconfig/cluster/sessions_managers/zellij_utils/layout_generator.py +25 -26
- machineconfig/cluster/sessions_managers/zellij_utils/process_monitor.py +49 -49
- machineconfig/cluster/sessions_managers/zellij_utils/remote_executor.py +5 -5
- machineconfig/cluster/sessions_managers/zellij_utils/session_manager.py +15 -15
- machineconfig/cluster/sessions_managers/zellij_utils/status_reporter.py +11 -11
- machineconfig/cluster/templates/utils.py +3 -3
- machineconfig/jobs/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/jobs/python/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/jobs/python/__pycache__/python_ve_symlink.cpython-311.pyc +0 -0
- machineconfig/jobs/python/check_installations.py +8 -9
- machineconfig/jobs/python/python_cargo_build_share.py +2 -2
- machineconfig/jobs/python/vscode/link_ve.py +7 -7
- machineconfig/jobs/python/vscode/select_interpreter.py +7 -7
- machineconfig/jobs/python/vscode/sync_code.py +5 -5
- machineconfig/jobs/python_custom_installers/archive/ngrok.py +2 -2
- machineconfig/jobs/python_custom_installers/dev/aider.py +3 -3
- machineconfig/jobs/python_custom_installers/dev/alacritty.py +3 -3
- machineconfig/jobs/python_custom_installers/dev/brave.py +3 -3
- machineconfig/jobs/python_custom_installers/dev/bypass_paywall.py +5 -5
- machineconfig/jobs/python_custom_installers/dev/code.py +3 -3
- machineconfig/jobs/python_custom_installers/dev/cursor.py +9 -9
- machineconfig/jobs/python_custom_installers/dev/docker_desktop.py +4 -4
- machineconfig/jobs/python_custom_installers/dev/espanso.py +4 -4
- machineconfig/jobs/python_custom_installers/dev/goes.py +4 -4
- machineconfig/jobs/python_custom_installers/dev/lvim.py +4 -4
- machineconfig/jobs/python_custom_installers/dev/nerdfont.py +3 -3
- machineconfig/jobs/python_custom_installers/dev/redis.py +3 -3
- machineconfig/jobs/python_custom_installers/dev/wezterm.py +3 -3
- machineconfig/jobs/python_custom_installers/dev/winget.py +27 -27
- machineconfig/jobs/python_custom_installers/docker.py +3 -3
- machineconfig/jobs/python_custom_installers/gh.py +7 -7
- machineconfig/jobs/python_custom_installers/hx.py +1 -1
- machineconfig/jobs/python_custom_installers/warp-cli.py +3 -3
- machineconfig/jobs/python_generic_installers/config.json +412 -389
- machineconfig/jobs/python_windows_installers/dev/config.json +1 -1
- machineconfig/logger.py +50 -0
- machineconfig/profile/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/profile/__pycache__/create.cpython-311.pyc +0 -0
- machineconfig/profile/__pycache__/shell.cpython-311.pyc +0 -0
- machineconfig/profile/create.py +23 -16
- machineconfig/profile/create_hardlinks.py +8 -8
- machineconfig/profile/shell.py +41 -37
- machineconfig/scripts/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/scripts/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/scripts/linux/devops +2 -2
- machineconfig/scripts/linux/fire +1 -0
- machineconfig/scripts/linux/fire_agents +0 -1
- machineconfig/scripts/linux/mcinit +27 -0
- machineconfig/scripts/python/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/scripts/python/__pycache__/croshell.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops.cpython-313.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops_update_repos.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops_update_repos.cpython-313.pyc +0 -0
- machineconfig/scripts/python/__pycache__/fire_agents.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/fire_jobs.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/repos.cpython-311.pyc +0 -0
- machineconfig/scripts/python/ai/__pycache__/init.cpython-311.pyc +0 -0
- machineconfig/scripts/python/ai/__pycache__/mcinit.cpython-311.pyc +0 -0
- machineconfig/scripts/python/ai/chatmodes/Thinking-Beast-Mode.chatmode.md +337 -0
- machineconfig/scripts/python/ai/chatmodes/Ultimate-Transparent-Thinking-Beast-Mode.chatmode.md +644 -0
- machineconfig/scripts/python/ai/chatmodes/deepResearch.chatmode.md +81 -0
- machineconfig/scripts/python/ai/configs/.gemini/settings.json +81 -0
- machineconfig/scripts/python/ai/instructions/python/dev.instructions.md +45 -0
- machineconfig/scripts/python/ai/mcinit.py +103 -0
- machineconfig/scripts/python/ai/prompts/allLintersAndTypeCheckers.prompt.md +5 -0
- machineconfig/scripts/python/ai/prompts/research-report-skeleton.prompt.md +38 -0
- machineconfig/scripts/python/ai/scripts/lint_and_type_check.sh +47 -0
- machineconfig/scripts/python/archive/tmate_conn.py +5 -5
- machineconfig/scripts/python/archive/tmate_start.py +3 -3
- machineconfig/scripts/python/choose_wezterm_theme.py +2 -2
- machineconfig/scripts/python/cloud_copy.py +19 -18
- machineconfig/scripts/python/cloud_mount.py +9 -7
- machineconfig/scripts/python/cloud_repo_sync.py +11 -11
- machineconfig/scripts/python/cloud_sync.py +1 -1
- machineconfig/scripts/python/croshell.py +14 -14
- machineconfig/scripts/python/devops.py +6 -6
- machineconfig/scripts/python/devops_add_identity.py +8 -6
- machineconfig/scripts/python/devops_add_ssh_key.py +18 -18
- machineconfig/scripts/python/devops_backup_retrieve.py +13 -13
- machineconfig/scripts/python/devops_devapps_install.py +3 -3
- machineconfig/scripts/python/devops_update_repos.py +1 -1
- machineconfig/scripts/python/dotfile.py +2 -2
- machineconfig/scripts/python/fire_agents.py +183 -41
- machineconfig/scripts/python/fire_jobs.py +17 -11
- machineconfig/scripts/python/ftpx.py +2 -2
- machineconfig/scripts/python/gh_models.py +94 -94
- machineconfig/scripts/python/helpers/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/scripts/python/helpers/__pycache__/cloud_helpers.cpython-311.pyc +0 -0
- machineconfig/scripts/python/helpers/__pycache__/helpers2.cpython-311.pyc +0 -0
- machineconfig/scripts/python/helpers/__pycache__/helpers4.cpython-311.pyc +0 -0
- machineconfig/scripts/python/helpers/cloud_helpers.py +3 -3
- machineconfig/scripts/python/helpers/helpers2.py +1 -1
- machineconfig/scripts/python/helpers/helpers4.py +8 -6
- machineconfig/scripts/python/helpers/helpers5.py +7 -7
- machineconfig/scripts/python/helpers/repo_sync_helpers.py +1 -1
- machineconfig/scripts/python/mount_nfs.py +3 -2
- machineconfig/scripts/python/mount_nw_drive.py +4 -4
- machineconfig/scripts/python/mount_ssh.py +3 -2
- machineconfig/scripts/python/repos.py +8 -8
- machineconfig/scripts/python/scheduler.py +1 -1
- machineconfig/scripts/python/start_slidev.py +8 -7
- machineconfig/scripts/python/start_terminals.py +1 -1
- machineconfig/scripts/python/viewer.py +40 -40
- machineconfig/scripts/python/wifi_conn.py +65 -66
- machineconfig/scripts/python/wsl_windows_transfer.py +1 -1
- machineconfig/scripts/windows/mcinit.ps1 +4 -0
- machineconfig/settings/linters/.ruff.toml +2 -2
- machineconfig/settings/shells/ipy/profiles/default/startup/playext.py +71 -71
- machineconfig/settings/shells/wt/settings.json +8 -8
- machineconfig/setup_linux/web_shortcuts/tmp.sh +2 -0
- machineconfig/setup_windows/wt_and_pwsh/set_pwsh_theme.py +10 -7
- machineconfig/setup_windows/wt_and_pwsh/set_wt_settings.py +9 -7
- machineconfig/utils/ai/browser_user_wrapper.py +5 -5
- machineconfig/utils/ai/generate_file_checklist.py +11 -12
- machineconfig/utils/ai/url2md.py +1 -1
- machineconfig/utils/cloud/onedrive/setup_oauth.py +4 -4
- machineconfig/utils/cloud/onedrive/transaction.py +129 -129
- machineconfig/utils/code.py +13 -6
- machineconfig/utils/installer.py +51 -53
- machineconfig/utils/installer_utils/installer_abc.py +21 -10
- machineconfig/utils/installer_utils/installer_class.py +42 -16
- machineconfig/utils/io_save.py +3 -15
- machineconfig/utils/options.py +10 -3
- machineconfig/utils/path.py +5 -0
- machineconfig/utils/path_reduced.py +201 -149
- machineconfig/utils/procs.py +23 -23
- machineconfig/utils/scheduling.py +11 -12
- machineconfig/utils/ssh.py +270 -0
- machineconfig/utils/terminal.py +180 -0
- machineconfig/utils/utils.py +1 -2
- machineconfig/utils/utils2.py +43 -0
- machineconfig/utils/utils5.py +163 -34
- machineconfig/utils/ve.py +2 -2
- {machineconfig-1.96.dist-info → machineconfig-2.0.dist-info}/METADATA +13 -8
- {machineconfig-1.96.dist-info → machineconfig-2.0.dist-info}/RECORD +163 -144
- machineconfig/cluster/self_ssh.py +0 -57
- {machineconfig-1.96.dist-info → machineconfig-2.0.dist-info}/WHEEL +0 -0
- {machineconfig-1.96.dist-info → machineconfig-2.0.dist-info}/top_level.txt +0 -0
machineconfig/utils/procs.py
CHANGED
|
@@ -59,7 +59,7 @@ class ProcessManager:
|
|
|
59
59
|
# Convert create_time to local timezone
|
|
60
60
|
create_time_utc = datetime.fromtimestamp(proc.create_time(), tz=timezone('UTC'))
|
|
61
61
|
create_time_local = create_time_utc.astimezone(timezone('Australia/Adelaide'))
|
|
62
|
-
|
|
62
|
+
|
|
63
63
|
process_info.append({
|
|
64
64
|
'pid': proc.pid,
|
|
65
65
|
'name': proc.name(),
|
|
@@ -71,7 +71,7 @@ class ProcessManager:
|
|
|
71
71
|
'command': " ".join(proc.cmdline())
|
|
72
72
|
})
|
|
73
73
|
except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess): pass
|
|
74
|
-
|
|
74
|
+
|
|
75
75
|
# Sort by memory usage (descending)
|
|
76
76
|
process_info.sort(key=lambda x: x['memory_usage_mb'], reverse=True)
|
|
77
77
|
self.data = process_info
|
|
@@ -81,51 +81,51 @@ class ProcessManager:
|
|
|
81
81
|
"""Format process data as table string for display."""
|
|
82
82
|
if not self.data:
|
|
83
83
|
return ""
|
|
84
|
-
|
|
84
|
+
|
|
85
85
|
# Create header
|
|
86
86
|
_headers = ['PID', 'Name', 'Username', 'CPU%', 'Memory(MB)', 'Status', 'Create Time', 'Command']
|
|
87
87
|
header_line = f"{'PID':<8} {'Name':<20} {'Username':<12} {'CPU%':<8} {'Memory(MB)':<12} {'Status':<12} {'Create Time':<20} {'Command':<50}"
|
|
88
88
|
separator = "-" * len(header_line)
|
|
89
|
-
|
|
89
|
+
|
|
90
90
|
lines = [header_line, separator]
|
|
91
|
-
|
|
91
|
+
|
|
92
92
|
for process in self.data:
|
|
93
93
|
# Format create_time as string
|
|
94
94
|
create_time_str = process['create_time'].strftime('%Y-%m-%d %H:%M:%S')
|
|
95
95
|
# Truncate command if too long
|
|
96
96
|
command = process['command'][:47] + "..." if len(process['command']) > 50 else process['command']
|
|
97
|
-
|
|
97
|
+
|
|
98
98
|
line = f"{process['pid']:<8} {process['name'][:19]:<20} {process['username'][:11]:<12} {process['cpu_percent']:<8.1f} {process['memory_usage_mb']:<12.2f} {process['status'][:11]:<12} {create_time_str:<20} {command:<50}"
|
|
99
99
|
lines.append(line)
|
|
100
|
-
|
|
100
|
+
|
|
101
101
|
return "\n".join(lines)
|
|
102
102
|
|
|
103
103
|
def choose_and_kill(self):
|
|
104
104
|
# header for interactive process selection
|
|
105
105
|
title = "🎯 INTERACTIVE PROCESS SELECTION AND TERMINATION"
|
|
106
106
|
console.print(Panel(title, title="[bold blue]Process Info[/bold blue]", border_style="blue"))
|
|
107
|
-
|
|
107
|
+
|
|
108
108
|
# Format data as table for display
|
|
109
109
|
formatted_data = self._format_process_table()
|
|
110
110
|
options = formatted_data.split("\n")[1:] # Skip header
|
|
111
111
|
res = display_options(options=formatted_data.split("\n"), msg="📋 Select processes to manage:", fzf=True, multi=True)
|
|
112
112
|
indices = [options.index(val) for val in res]
|
|
113
113
|
selected_processes = [self.data[i] for i in indices]
|
|
114
|
-
|
|
114
|
+
|
|
115
115
|
print("\n📊 All Processes:")
|
|
116
116
|
print(formatted_data)
|
|
117
117
|
print("\n🎯 Selected Processes:")
|
|
118
118
|
for process in selected_processes:
|
|
119
119
|
print(f"PID: {process['pid']}, Name: {process['name']}, Memory: {process['memory_usage_mb']:.2f}MB")
|
|
120
|
-
|
|
120
|
+
|
|
121
121
|
for idx, process in enumerate(selected_processes):
|
|
122
122
|
pprint(process, f"📌 Process {idx}")
|
|
123
|
-
|
|
123
|
+
|
|
124
124
|
kill_all = input("\n⚠️ Confirm killing ALL selected processes? y/[n] ").lower() == "y"
|
|
125
125
|
if kill_all:
|
|
126
126
|
self.kill(pids=[p['pid'] for p in selected_processes])
|
|
127
127
|
return
|
|
128
|
-
|
|
128
|
+
|
|
129
129
|
kill_by_index = input("\n🔫 Kill by index? (enter numbers separated by spaces, e.g. '1 4') or [n] to cancel: ")
|
|
130
130
|
if kill_by_index != "" and kill_by_index != "n":
|
|
131
131
|
indices = [int(val) for val in kill_by_index.split(" ")]
|
|
@@ -139,12 +139,12 @@ class ProcessManager:
|
|
|
139
139
|
# header for filtering processes by name
|
|
140
140
|
title = "🔍 FILTERING AND TERMINATING PROCESSES BY NAME"
|
|
141
141
|
console.print(Panel(title, title="[bold blue]Process Info[/bold blue]", border_style="blue"))
|
|
142
|
-
|
|
142
|
+
|
|
143
143
|
# Filter processes by name
|
|
144
144
|
filtered_processes = [p for p in self.data if p['name'] == name]
|
|
145
145
|
# Sort by create_time (ascending)
|
|
146
146
|
filtered_processes.sort(key=lambda x: x['create_time'])
|
|
147
|
-
|
|
147
|
+
|
|
148
148
|
print(f"🎯 Found {len(filtered_processes)} processes matching name: '{name}'")
|
|
149
149
|
self.kill(pids=[p['pid'] for p in filtered_processes])
|
|
150
150
|
console.print(Panel("", title="[bold blue]Process Info[/bold blue]", border_style="blue"))
|
|
@@ -159,9 +159,9 @@ class ProcessManager:
|
|
|
159
159
|
if names is None: names = []
|
|
160
160
|
if pids is None: pids = []
|
|
161
161
|
if commands is None: commands = []
|
|
162
|
-
|
|
162
|
+
|
|
163
163
|
killed_count = 0
|
|
164
|
-
|
|
164
|
+
|
|
165
165
|
for name in names:
|
|
166
166
|
matching_processes = [p for p in self.data if p['name'] == name]
|
|
167
167
|
if len(matching_processes) > 0:
|
|
@@ -169,9 +169,9 @@ class ProcessManager:
|
|
|
169
169
|
psutil.Process(process['pid']).kill()
|
|
170
170
|
print(f'💀 Killed process {name} with PID {process["pid"]}. It lived {get_age(process["create_time"])}. RIP 🪦💐')
|
|
171
171
|
killed_count += 1
|
|
172
|
-
else:
|
|
172
|
+
else:
|
|
173
173
|
print(f'❓ No process named "{name}" found')
|
|
174
|
-
|
|
174
|
+
|
|
175
175
|
for pid in pids:
|
|
176
176
|
try:
|
|
177
177
|
proc = psutil.Process(pid)
|
|
@@ -180,9 +180,9 @@ class ProcessManager:
|
|
|
180
180
|
proc.kill()
|
|
181
181
|
print(f'💀 Killed process with PID {pid} and name "{proc_name}". It lived {proc_lifetime}. RIP 🪦💐')
|
|
182
182
|
killed_count += 1
|
|
183
|
-
except psutil.NoSuchProcess:
|
|
183
|
+
except psutil.NoSuchProcess:
|
|
184
184
|
print(f'❓ No process with PID {pid} found')
|
|
185
|
-
|
|
185
|
+
|
|
186
186
|
for command in commands:
|
|
187
187
|
matching_processes = [p for p in self.data if command in p['command']]
|
|
188
188
|
if len(matching_processes) > 0:
|
|
@@ -190,9 +190,9 @@ class ProcessManager:
|
|
|
190
190
|
psutil.Process(process['pid']).kill()
|
|
191
191
|
print(f'💀 Killed process with "{command}" in its command & PID {process["pid"]}. It lived {get_age(process["create_time"])}. RIP 🪦💐')
|
|
192
192
|
killed_count += 1
|
|
193
|
-
else:
|
|
193
|
+
else:
|
|
194
194
|
print(f'❓ No process has "{command}" in its command.')
|
|
195
|
-
|
|
195
|
+
|
|
196
196
|
console.print(Panel(f"✅ Termination complete: {killed_count} processes terminated", title="[bold blue]Process Info[/bold blue]", border_style="blue"))
|
|
197
197
|
|
|
198
198
|
|
|
@@ -206,7 +206,7 @@ def get_age(create_time: Any) -> str:
|
|
|
206
206
|
else:
|
|
207
207
|
# Already a datetime object
|
|
208
208
|
create_time_local = create_time
|
|
209
|
-
|
|
209
|
+
|
|
210
210
|
now_local = datetime.now(tz=timezone('Australia/Adelaide'))
|
|
211
211
|
age = now_local - create_time_local
|
|
212
212
|
return str(age)
|
|
@@ -16,13 +16,13 @@
|
|
|
16
16
|
# """Convert string to timedelta. Simple implementation for common cases."""
|
|
17
17
|
# # Handle common formats like "1h", "30m", "2d", etc.
|
|
18
18
|
# import re
|
|
19
|
-
|
|
19
|
+
|
|
20
20
|
# # Parse patterns like "1h", "30m", "2d", "1w"
|
|
21
21
|
# match = re.match(r'^(\d+)([hdwm])$', time_str.lower())
|
|
22
22
|
# if match:
|
|
23
23
|
# value, unit = match.groups()
|
|
24
24
|
# value = int(value)
|
|
25
|
-
|
|
25
|
+
|
|
26
26
|
# if unit == 'h':
|
|
27
27
|
# return timedelta(hours=value)
|
|
28
28
|
# elif unit == 'd':
|
|
@@ -31,7 +31,7 @@
|
|
|
31
31
|
# return timedelta(weeks=value)
|
|
32
32
|
# elif unit == 'm':
|
|
33
33
|
# return timedelta(minutes=value)
|
|
34
|
-
|
|
34
|
+
|
|
35
35
|
# # Fallback: try to parse as seconds
|
|
36
36
|
# try:
|
|
37
37
|
# return timedelta(seconds=int(time_str))
|
|
@@ -43,18 +43,18 @@
|
|
|
43
43
|
# """Convert list of dictionaries to markdown table format."""
|
|
44
44
|
# if not data:
|
|
45
45
|
# return ""
|
|
46
|
-
|
|
46
|
+
|
|
47
47
|
# # Get all unique keys from all dictionaries
|
|
48
48
|
# all_keys = set()
|
|
49
49
|
# for row in data:
|
|
50
50
|
# all_keys.update(row.keys())
|
|
51
|
-
|
|
51
|
+
|
|
52
52
|
# keys = sorted(all_keys)
|
|
53
|
-
|
|
53
|
+
|
|
54
54
|
# # Create header
|
|
55
55
|
# header = "|" + "|".join(f" {key} " for key in keys) + "|"
|
|
56
56
|
# separator = "|" + "|".join(" --- " for _ in keys) + "|"
|
|
57
|
-
|
|
57
|
+
|
|
58
58
|
# # Create rows
|
|
59
59
|
# rows = []
|
|
60
60
|
# for row in data:
|
|
@@ -68,7 +68,7 @@
|
|
|
68
68
|
# value = str(value)
|
|
69
69
|
# row_values.append(f" {value} ")
|
|
70
70
|
# rows.append("|" + "|".join(row_values) + "|")
|
|
71
|
-
|
|
71
|
+
|
|
72
72
|
# return "\n".join([header, separator] + rows)
|
|
73
73
|
|
|
74
74
|
|
|
@@ -194,7 +194,6 @@
|
|
|
194
194
|
# def main(root: Optional[str] = None, ignore_conditions: bool=True):
|
|
195
195
|
# if root is None: root_resolved = SCHEDULER_DEFAULT_ROOT
|
|
196
196
|
# else: root_resolved = PathExtended(root).expanduser().absolute()
|
|
197
|
-
# # Replace crocodile List usage with pathlib iteration
|
|
198
197
|
# from pathlib import Path
|
|
199
198
|
# # Find all `task.py` files under root and use their parent directories
|
|
200
199
|
# tasks_dirs = list({PathExtended(p.parent) for p in Path(str(root_resolved)).rglob("task.py")})
|
|
@@ -210,7 +209,7 @@
|
|
|
210
209
|
# tasks.append(read_task_from_dir(a_dir))
|
|
211
210
|
|
|
212
211
|
# from machineconfig.utils.utils import choose_multiple_options
|
|
213
|
-
|
|
212
|
+
|
|
214
213
|
# # Create data for tasks display
|
|
215
214
|
# task_data = [Report.from_path(path=a_task.report_path).__dict__ for a_task in tasks]
|
|
216
215
|
# task_display = format_table_markdown(task_data)
|
|
@@ -290,10 +289,10 @@
|
|
|
290
289
|
# shell_script_root.mkdir(parents=True, exist_ok=True)
|
|
291
290
|
# try:
|
|
292
291
|
# if platform.system() == 'Windows':
|
|
293
|
-
# shell_script = shell_script_root.joinpath("run.ps1").write_text(shell_script)
|
|
292
|
+
# shell_script = shell_script_root.joinpath("run.ps1").write_text(shell_script, encoding="utf-8")
|
|
294
293
|
# subprocess.run(['powershell', '-ExecutionPolicy', 'Unrestricted', shell_script], check=True)
|
|
295
294
|
# elif platform.system() in ['Linux', 'Darwin']:
|
|
296
|
-
# shell_script = shell_script_root.joinpath("run.sh").write_text(shell_script)
|
|
295
|
+
# shell_script = shell_script_root.joinpath("run.sh").write_text(shell_script, encoding="utf-8")
|
|
297
296
|
# subprocess.run(['bash', shell_script], check=True)
|
|
298
297
|
# else: res = f"Error: Unsupported platform {platform.system()}."
|
|
299
298
|
# res = SUCCESS
|
|
@@ -0,0 +1,270 @@
|
|
|
1
|
+
|
|
2
|
+
from typing import Optional, Any, Union, List
|
|
3
|
+
import os
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
import rich.console
|
|
6
|
+
from machineconfig.utils.terminal import Terminal, Response, MACHINE
|
|
7
|
+
from machineconfig.utils.path_reduced import P, PLike, OPLike
|
|
8
|
+
from machineconfig.utils.utils2 import pprint
|
|
9
|
+
|
|
10
|
+
@dataclass
|
|
11
|
+
class Scout:
|
|
12
|
+
source_full: P
|
|
13
|
+
source_rel2home: P
|
|
14
|
+
exists: bool
|
|
15
|
+
is_dir: bool
|
|
16
|
+
files: Optional[List[P]]
|
|
17
|
+
def scout(source: PLike, z: bool = False, r: bool = False) -> Scout:
|
|
18
|
+
source_full = P(source).expanduser().absolute()
|
|
19
|
+
source_rel2home = source_full.collapseuser()
|
|
20
|
+
exists = source_full.exists()
|
|
21
|
+
is_dir = source_full.is_dir() if exists else False
|
|
22
|
+
if z and exists:
|
|
23
|
+
try: source_full = source_full.zip()
|
|
24
|
+
except Exception as ex:
|
|
25
|
+
raise Exception(f"Could not zip {source_full} due to {ex}") from ex # type: ignore # pylint: disable=W0719
|
|
26
|
+
source_rel2home = source_full.zip()
|
|
27
|
+
if r and exists and is_dir:
|
|
28
|
+
files = [item.collapseuser() for item in source_full.search(folders=False, r=True)]
|
|
29
|
+
else: files = None
|
|
30
|
+
return Scout(source_full=source_full, source_rel2home=source_rel2home, exists=exists, is_dir=is_dir, files=files)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class SSH: # inferior alternative: https://github.com/fabric/fabric
|
|
34
|
+
def __init__(self, host: Optional[str] = None, username: Optional[str] = None, hostname: Optional[str] = None, sshkey: Optional[str] = None, pwd: Optional[str] = None, port: int = 22, ve: Optional[str] = "ve", compress: bool = False): # https://stackoverflow.com/questions/51027192/execute-command-script-using-different-shell-in-ssh-paramiko
|
|
35
|
+
self.pwd = pwd
|
|
36
|
+
self.ve = ve
|
|
37
|
+
self.compress = compress # Defaults: (1) use localhost if nothing provided.
|
|
38
|
+
|
|
39
|
+
self.host: Optional[str] = None
|
|
40
|
+
self.hostname: str
|
|
41
|
+
self.username: str
|
|
42
|
+
self.port: int = port
|
|
43
|
+
self.proxycommand: Optional[str] = None
|
|
44
|
+
import platform
|
|
45
|
+
import paramiko # type: ignore
|
|
46
|
+
import getpass
|
|
47
|
+
if isinstance(host, str):
|
|
48
|
+
try:
|
|
49
|
+
import paramiko.config as pconfig
|
|
50
|
+
config = pconfig.SSHConfig.from_path(str(P.home().joinpath(".ssh/config")))
|
|
51
|
+
config_dict = config.lookup(host)
|
|
52
|
+
self.hostname = config_dict["hostname"]
|
|
53
|
+
self.username = config_dict["user"]
|
|
54
|
+
self.host = host
|
|
55
|
+
self.port = int(config_dict.get("port", port))
|
|
56
|
+
tmp = config_dict.get("identityfile", sshkey)
|
|
57
|
+
if isinstance(tmp, list): sshkey = tmp[0]
|
|
58
|
+
else: sshkey = tmp
|
|
59
|
+
self.proxycommand = config_dict.get("proxycommand", None)
|
|
60
|
+
if sshkey is not None:
|
|
61
|
+
tmp = config.lookup("*").get("identityfile", sshkey)
|
|
62
|
+
if isinstance(tmp, list): sshkey = tmp[0]
|
|
63
|
+
else: sshkey = tmp
|
|
64
|
+
except (FileNotFoundError, KeyError):
|
|
65
|
+
assert "@" in host or ":" in host, f"Host must be in the form of `username@hostname:port` or `username@hostname` or `hostname:port`, but it is: {host}"
|
|
66
|
+
if "@" in host: self.username, self.hostname = host.split("@")
|
|
67
|
+
else:
|
|
68
|
+
self.username = username or getpass.getuser()
|
|
69
|
+
self.hostname = host
|
|
70
|
+
if ":" in self.hostname:
|
|
71
|
+
self.hostname, port_ = self.hostname.split(":")
|
|
72
|
+
self.port = int(port_)
|
|
73
|
+
elif username is not None and hostname is not None:
|
|
74
|
+
self.username, self.hostname = username, hostname
|
|
75
|
+
self.proxycommand = None
|
|
76
|
+
else:
|
|
77
|
+
print(f"Provided values: host={host}, username={username}, hostname={hostname}")
|
|
78
|
+
raise ValueError("Either host or username and hostname must be provided.")
|
|
79
|
+
|
|
80
|
+
self.sshkey = str(P(sshkey).expanduser().absolute()) if sshkey is not None else None # no need to pass sshkey if it was configured properly already
|
|
81
|
+
self.ssh = paramiko.SSHClient()
|
|
82
|
+
self.ssh.load_system_host_keys()
|
|
83
|
+
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
|
84
|
+
pprint(dict(host=self.host, hostname=self.hostname, username=self.username, password="***", port=self.port, key_filename=self.sshkey, ve=self.ve), title="SSHing To")
|
|
85
|
+
|
|
86
|
+
sock = paramiko.ProxyCommand(self.proxycommand) if self.proxycommand is not None else None
|
|
87
|
+
try:
|
|
88
|
+
if pwd is None:
|
|
89
|
+
allow_agent = True
|
|
90
|
+
look_for_keys = True
|
|
91
|
+
else:
|
|
92
|
+
allow_agent = False
|
|
93
|
+
look_for_keys = False
|
|
94
|
+
self.ssh.connect(hostname=self.hostname, username=self.username, password=self.pwd, port=self.port, key_filename=self.sshkey, compress=self.compress, sock=sock,
|
|
95
|
+
allow_agent=allow_agent, look_for_keys=look_for_keys) # type: ignore
|
|
96
|
+
except Exception as _err:
|
|
97
|
+
rich.console.Console().print_exception()
|
|
98
|
+
self.pwd = getpass.getpass(f"Enter password for {self.username}@{self.hostname}: ")
|
|
99
|
+
self.ssh.connect(hostname=self.hostname, username=self.username, password=self.pwd, port=self.port, key_filename=self.sshkey, compress=self.compress, sock=sock,
|
|
100
|
+
allow_agent=False,look_for_keys=False) # type: ignore
|
|
101
|
+
|
|
102
|
+
try: self.sftp: Optional[paramiko.SFTPClient] = self.ssh.open_sftp()
|
|
103
|
+
except Exception as err:
|
|
104
|
+
self.sftp = None
|
|
105
|
+
print(f"""⚠️ WARNING: Failed to open SFTP connection to {hostname}.
|
|
106
|
+
Error Details: {err}\nData transfer may be affected!""")
|
|
107
|
+
def view_bar(slf: Any, a: Any, b: Any):
|
|
108
|
+
slf.total = int(b)
|
|
109
|
+
slf.update(int(a - slf.n)) # update pbar with increment
|
|
110
|
+
from tqdm import tqdm
|
|
111
|
+
self.tqdm_wrap = type('TqdmWrap', (tqdm,), {'view_bar': view_bar})
|
|
112
|
+
self._local_distro: Optional[str] = None
|
|
113
|
+
self._remote_distro: Optional[str] = None
|
|
114
|
+
self._remote_machine: Optional[MACHINE] = None
|
|
115
|
+
self.terminal_responses: list[Response] = []
|
|
116
|
+
self.platform = platform
|
|
117
|
+
self.remote_env_cmd = rf"""~/venvs/{self.ve}/Scripts/Activate.ps1""" if self.get_remote_machine() == "Windows" else rf"""source ~/venvs/{self.ve}/bin/activate"""
|
|
118
|
+
self.local_env_cmd = rf"""~/venvs/{self.ve}/Scripts/Activate.ps1""" if self.platform.system() == "Windows" else rf"""source ~/venvs/{self.ve}/bin/activate""" # works for both cmd and pwsh
|
|
119
|
+
def __getstate__(self): return {attr: self.__getattribute__(attr) for attr in ["username", "hostname", "host", "port", "sshkey", "compress", "pwd", "ve"]}
|
|
120
|
+
def __setstate__(self, state: dict[str, Any]): SSH(**state)
|
|
121
|
+
def get_remote_machine(self) -> MACHINE:
|
|
122
|
+
if self._remote_machine is None:
|
|
123
|
+
if (self.run("$env:OS", verbose=False, desc="Testing Remote OS Type").op == "Windows_NT" or self.run("echo %OS%", verbose=False, desc="Testing Remote OS Type Again").op == "Windows_NT"): self._remote_machine = "Windows"
|
|
124
|
+
else: self._remote_machine = "Linux"
|
|
125
|
+
return self._remote_machine # echo %OS% TODO: uname on linux
|
|
126
|
+
def get_local_distro(self) -> str:
|
|
127
|
+
if self._local_distro is None:
|
|
128
|
+
command = """uv run --with distro python -c "import distro; print(distro.name(pretty=True))" """
|
|
129
|
+
import subprocess
|
|
130
|
+
res = subprocess.run(command, shell=True, capture_output=True, text=True).stdout.strip()
|
|
131
|
+
self._local_distro = res
|
|
132
|
+
return res
|
|
133
|
+
return self._local_distro
|
|
134
|
+
def get_remote_distro(self):
|
|
135
|
+
if self._remote_distro is None:
|
|
136
|
+
self._remote_distro = self.run_py("print(install_n_import('distro').name(pretty=True))", verbose=False).op_if_successfull_or_default() or ""
|
|
137
|
+
# q.run("""~/.local/bin/uv run --with distro python -c "import distro; print(distro.name(pretty=True))" """)
|
|
138
|
+
return self._remote_distro
|
|
139
|
+
def restart_computer(self): self.run("Restart-Computer -Force" if self.get_remote_machine() == "Windows" else "sudo reboot")
|
|
140
|
+
def send_ssh_key(self):
|
|
141
|
+
self.copy_from_here("~/.ssh/id_rsa.pub")
|
|
142
|
+
assert self.get_remote_machine() == "Windows"
|
|
143
|
+
code_url = "https://raw.githubusercontent.com/thisismygitrepo/machineconfig/refs/heads/main/src/machineconfig/setup_windows/openssh-server_add-sshkey.ps1"
|
|
144
|
+
code = P(code_url).download().read_text(encoding="utf-8")
|
|
145
|
+
self.run(code)
|
|
146
|
+
def copy_env_var(self, name: str):
|
|
147
|
+
assert self.get_remote_machine() == "Linux"
|
|
148
|
+
return self.run(f"{name} = {os.environ[name]}; export {name}")
|
|
149
|
+
def get_remote_repr(self, add_machine: bool = False) -> str: return f"{self.username}@{self.hostname}:{self.port}" + (f" [{self.get_remote_machine()}][{self.get_remote_distro()}]" if add_machine else "")
|
|
150
|
+
def get_local_repr(self, add_machine: bool = False) -> str:
|
|
151
|
+
import getpass
|
|
152
|
+
return f"{getpass.getuser()}@{self.platform.node()}" + (f" [{self.platform.system()}][{self.get_local_distro()}]" if add_machine else "")
|
|
153
|
+
def __repr__(self): return f"local {self.get_local_repr(add_machine=True)} >>> SSH TO >>> remote {self.get_remote_repr(add_machine=True)}"
|
|
154
|
+
def run_locally(self, command: str):
|
|
155
|
+
print(f"""💻 [LOCAL EXECUTION] Running command on node: {self.platform.node()} Command: {command}""")
|
|
156
|
+
res = Response(cmd=command)
|
|
157
|
+
res.output.returncode = os.system(command)
|
|
158
|
+
return res
|
|
159
|
+
def get_ssh_conn_str(self, cmd: str = ""): return "ssh " + (f" -i {self.sshkey}" if self.sshkey else "") + self.get_remote_repr().replace(':', ' -p ') + (f' -t {cmd} ' if cmd != '' else ' ')
|
|
160
|
+
# def open_console(self, cmd: str = '', new_window: bool = True, terminal: Optional[str] = None, shell: str = "pwsh"): Terminal().run_async(*(self.get_ssh_conn_str(cmd=cmd).split(" ")), new_window=new_window, terminal=terminal, shell=shell)
|
|
161
|
+
def run(self, cmd: str, verbose: bool = True, desc: str = "", strict_err: bool = False, strict_returncode: bool = False, env_prefix: bool = False) -> Response: # most central method.
|
|
162
|
+
cmd = (self.remote_env_cmd + "; " + cmd) if env_prefix else cmd
|
|
163
|
+
raw = self.ssh.exec_command(cmd)
|
|
164
|
+
res = Response(stdin=raw[0], stdout=raw[1], stderr=raw[2], cmd=cmd, desc=desc) # type: ignore
|
|
165
|
+
if not verbose: res.capture().print_if_unsuccessful(desc=desc, strict_err=strict_err, strict_returncode=strict_returncode, assert_success=False)
|
|
166
|
+
else: res.print()
|
|
167
|
+
self.terminal_responses.append(res)
|
|
168
|
+
return res
|
|
169
|
+
def run_py(self, cmd: str, desc: str = "", return_obj: bool = False, verbose: bool = True, strict_err: bool = False, strict_returncode: bool = False) -> Union[Any, Response]:
|
|
170
|
+
assert '"' not in cmd, 'Avoid using `"` in your command. I dont know how to handle this when passing is as command to python in pwsh command.'
|
|
171
|
+
if not return_obj: return self.run(cmd=f"""{self.remote_env_cmd}; python -c "{Terminal.get_header(wdir=None, toolbox=True)}{cmd}\n""" + '"', desc=desc or f"run_py on {self.get_remote_repr()}", verbose=verbose, strict_err=strict_err, strict_returncode=strict_returncode)
|
|
172
|
+
assert "obj=" in cmd, "The command sent to run_py must have `obj=` statement if return_obj is set to True"
|
|
173
|
+
source_file = self.run_py(f"""{cmd}\npath = Save.pickle(obj=obj, path=P.tmpfile(suffix='.pkl'))\nprint(path)""", desc=desc, verbose=verbose, strict_err=True, strict_returncode=True).op.split('\n')[-1]
|
|
174
|
+
res = self.copy_to_here(source=source_file, target=P.tmpfile(suffix='.pkl'))
|
|
175
|
+
import pickle
|
|
176
|
+
res_bytes = res.read_bytes()
|
|
177
|
+
return pickle.loads(res_bytes)
|
|
178
|
+
def copy_from_here(self, source: PLike, target: OPLike = None, z: bool = False, r: bool = False, overwrite: bool = False, init: bool = True) -> Union[P, list[P]]:
|
|
179
|
+
if init: print(f"{'⬆️' * 5} [SFTP UPLOAD] FROM `{source}` TO `{target}`") # TODO: using return_obj do all tests required in one go.
|
|
180
|
+
source_obj = P(source).expanduser().absolute()
|
|
181
|
+
if not source_obj.exists(): raise RuntimeError(f"Meta.SSH Error: source `{source_obj}` does not exist!")
|
|
182
|
+
if target is None:
|
|
183
|
+
target = P(source_obj).expanduser().absolute().collapseuser(strict=True)
|
|
184
|
+
assert target.is_relative_to("~"), "If target is not specified, source must be relative to home."
|
|
185
|
+
if z: target += ".zip"
|
|
186
|
+
if not z and source_obj.is_dir():
|
|
187
|
+
if r is False: raise RuntimeError(f"Meta.SSH Error: source `{source_obj}` is a directory! either set `r=True` for recursive sending or raise `z=True` flag to zip it first.")
|
|
188
|
+
source_list: list[P] = source_obj.search("*", folders=False, files=True, r=True)
|
|
189
|
+
remote_root = self.run_py(f"path=P(r'{P(target).as_posix()}').expanduser()\n{'path.delete(sure=True)' if overwrite else ''}\nprint(path.create())", desc=f"Creating Target directory `{P(target).as_posix()}` @ {self.get_remote_repr()}", verbose=False).op or ''
|
|
190
|
+
for idx, item in enumerate(source_list): print(f" {idx+1:03d}. {item}")
|
|
191
|
+
for item in source_list:
|
|
192
|
+
a__target = P(remote_root).joinpath(item.relative_to(source_obj))
|
|
193
|
+
self.copy_from_here(source=item, target=a__target)
|
|
194
|
+
return list(source_list)
|
|
195
|
+
if z:
|
|
196
|
+
print("🗜️ ZIPPING ...")
|
|
197
|
+
source_obj = P(source_obj).expanduser().zip(content=True) # .append(f"_{randstr()}", inplace=True) # eventually, unzip will raise content flag, so this name doesn't matter.
|
|
198
|
+
remotepath = self.run_py(f"path=P(r'{P(target).as_posix()}').expanduser()\n{'path.delete(sure=True)' if overwrite else ''}\nprint(path.parent.create())", desc=f"Creating Target directory `{P(target).parent.as_posix()}` @ {self.get_remote_repr()}", verbose=False).op or ''
|
|
199
|
+
remotepath = P(remotepath.split("\n")[-1]).joinpath(P(target).name)
|
|
200
|
+
print(f"""📤 [SFTP UPLOAD] Sending file: {repr(P(source_obj))} ==> Remote Path: {remotepath.as_posix()}""")
|
|
201
|
+
with self.tqdm_wrap(ascii=True, unit='b', unit_scale=True) as pbar: self.sftp.put(localpath=P(source_obj).expanduser(), remotepath=remotepath.as_posix(), callback=pbar.view_bar) # type: ignore # pylint: disable=E1129
|
|
202
|
+
if z:
|
|
203
|
+
_resp = self.run_py(f"""P(r'{remotepath.as_posix()}').expanduser().unzip(content=False, inplace=True, overwrite={overwrite})""", desc=f"UNZIPPING {remotepath.as_posix()}", verbose=False, strict_err=True, strict_returncode=True)
|
|
204
|
+
source_obj.delete(sure=True)
|
|
205
|
+
print("\n")
|
|
206
|
+
return source_obj
|
|
207
|
+
def copy_to_here(self, source: PLike, target: OPLike = None, z: bool = False, r: bool = False, init: bool = True) -> P:
|
|
208
|
+
if init: print(f"{'⬇️' * 5} SFTP DOWNLOADING FROM `{source}` TO `{target}`")
|
|
209
|
+
if not z and self.run_py(f"print(P(r'{source}').expanduser().absolute().is_dir())", desc=f"Check if source `{source}` is a dir", verbose=False, strict_returncode=True, strict_err=True).op.split("\n")[-1] == 'True':
|
|
210
|
+
if r is False: raise RuntimeError(f"source `{source}` is a directory! either set r=True for recursive sending or raise zip_first flag.")
|
|
211
|
+
source_list = self.run_py(f"obj=P(r'{source}').search(folders=False, r=True).collapseuser(strict=False)", desc="Searching for files in source", return_obj=True, verbose=False)
|
|
212
|
+
assert isinstance(source_list, List), f"Could not resolve source path {source} due to error"
|
|
213
|
+
for file in source_list:
|
|
214
|
+
self.copy_to_here(source=file.as_posix(), target=P(target).joinpath(P(file).relative_to(source)) if target else None, r=False)
|
|
215
|
+
if z:
|
|
216
|
+
tmp: Response = self.run_py(f"print(P(r'{source}').expanduser().zip(inplace=False, verbose=False))", desc=f"Zipping source file {source}", verbose=False)
|
|
217
|
+
tmp2 = tmp.op2path(strict_returncode=True, strict_err=True)
|
|
218
|
+
if not isinstance(tmp2, P): raise RuntimeError(f"Could not zip {source} due to {tmp.err}")
|
|
219
|
+
else: source = tmp2
|
|
220
|
+
if target is None:
|
|
221
|
+
tmpx = self.run_py(f"print(P(r'{P(source).as_posix()}').collapseuser(strict=False).as_posix())", desc="Finding default target via relative source path", strict_returncode=True, strict_err=True, verbose=False).op2path()
|
|
222
|
+
if isinstance(tmpx, P): target = tmpx
|
|
223
|
+
else: raise RuntimeError(f"Could not resolve target path {target} due to error")
|
|
224
|
+
assert target.is_relative_to("~"), f"If target is not specified, source must be relative to home.\n{target=}"
|
|
225
|
+
target_obj = P(target).expanduser().absolute()
|
|
226
|
+
target_obj.parent.mkdir(parents=True, exist_ok=True)
|
|
227
|
+
if z and '.zip' not in target_obj.suffix: target_obj += '.zip'
|
|
228
|
+
if "~" in str(source):
|
|
229
|
+
tmp3 = self.run_py(f"print(P(r'{source}').expanduser())", desc="# Resolving source path address by expanding user", strict_returncode=True, strict_err=True, verbose=False).op2path()
|
|
230
|
+
if isinstance(tmp3, P): source = tmp3
|
|
231
|
+
else: raise RuntimeError(f"Could not resolve source path {source} due to")
|
|
232
|
+
else: source = P(source)
|
|
233
|
+
print(f"""📥 [DOWNLOAD] Receiving: {source} ==> Local Path: {target_obj}""")
|
|
234
|
+
with self.tqdm_wrap(ascii=True, unit='b', unit_scale=True) as pbar: # type: ignore # pylint: disable=E1129
|
|
235
|
+
assert self.sftp is not None, f"Could not establish SFTP connection to {self.hostname}."
|
|
236
|
+
self.sftp.get(remotepath=source.as_posix(), localpath=str(target_obj), callback=pbar.view_bar) # type: ignore
|
|
237
|
+
if z:
|
|
238
|
+
target_obj = target_obj.unzip(inplace=True, content=True)
|
|
239
|
+
self.run_py(f"P(r'{source.as_posix()}').delete(sure=True)", desc="Cleaning temp zip files @ remote.", strict_returncode=True, strict_err=True, verbose=False)
|
|
240
|
+
print("\n")
|
|
241
|
+
return target_obj
|
|
242
|
+
def receieve(self, source: PLike, target: OPLike = None, z: bool = False, r: bool = False) -> P:
|
|
243
|
+
scout = self.run_py(cmd=f"obj=scout(r'{source}', z={z}, r={r})", desc=f"Scouting source `{source}` path on remote", return_obj=True, verbose=False)
|
|
244
|
+
assert isinstance(scout, Scout)
|
|
245
|
+
if not z and scout.is_dir and scout.files is not None:
|
|
246
|
+
if r:
|
|
247
|
+
tmp: list[P] = [self.receieve(source=file.as_posix(), target=P(target).joinpath(P(file).relative_to(source)) if target else None, r=False) for file in scout.files]
|
|
248
|
+
return tmp[0]
|
|
249
|
+
else: print("Source is a directory! either set `r=True` for recursive sending or raise `zip_first=True` flag.")
|
|
250
|
+
if target:
|
|
251
|
+
target = P(target).expanduser().absolute()
|
|
252
|
+
else:
|
|
253
|
+
target = scout.source_rel2home.expanduser().absolute()
|
|
254
|
+
target.parent.mkdir(parents=True, exist_ok=True)
|
|
255
|
+
if z and '.zip' not in target.suffix: target += '.zip'
|
|
256
|
+
source = scout.source_full
|
|
257
|
+
with self.tqdm_wrap(ascii=True, unit='b', unit_scale=True) as pbar: self.sftp.get(remotepath=source.as_posix(), localpath=target.as_posix(), callback=pbar.view_bar) # type: ignore # pylint: disable=E1129
|
|
258
|
+
if z:
|
|
259
|
+
target = target.unzip(inplace=True, content=True)
|
|
260
|
+
self.run_py(f"P(r'{source.as_posix()}').delete(sure=True)", desc="Cleaning temp zip files @ remote.", strict_returncode=True, strict_err=True)
|
|
261
|
+
print("\n")
|
|
262
|
+
return target
|
|
263
|
+
# def print_summary(self):
|
|
264
|
+
# import polars as pl
|
|
265
|
+
# df = pl.DataFrame(List(self.terminal_responses).apply(lambda rsp: dict(desc=rsp.desc, err=rsp.err, returncode=rsp.returncode)).list)
|
|
266
|
+
# print("\nSummary of operations performed:")
|
|
267
|
+
# print(df.to_pandas().to_markdown())
|
|
268
|
+
# if ((df.select('returncode').to_series().to_list()[2:] == [None] * (len(df) - 2)) and (df.select('err').to_series().to_list()[2:] == [''] * (len(df) - 2))): print("\nAll operations completed successfully.\n")
|
|
269
|
+
# else: print("\nSome operations failed. \n")
|
|
270
|
+
# return df
|