machineconfig 1.97__py3-none-any.whl â 2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of machineconfig might be problematic. Click here for more details.
- machineconfig/cluster/cloud_manager.py +22 -29
- machineconfig/cluster/data_transfer.py +2 -3
- machineconfig/cluster/distribute.py +0 -2
- machineconfig/cluster/file_manager.py +4 -5
- machineconfig/cluster/job_params.py +1 -4
- machineconfig/cluster/loader_runner.py +8 -11
- machineconfig/cluster/remote_machine.py +4 -5
- machineconfig/cluster/script_execution.py +2 -2
- machineconfig/cluster/script_notify_upon_completion.py +0 -1
- machineconfig/cluster/sessions_managers/archive/create_zellij_template.py +4 -6
- machineconfig/cluster/sessions_managers/archive/session_managers.py +0 -1
- machineconfig/cluster/sessions_managers/enhanced_command_runner.py +35 -75
- machineconfig/cluster/sessions_managers/wt_local.py +113 -185
- machineconfig/cluster/sessions_managers/wt_local_manager.py +127 -197
- machineconfig/cluster/sessions_managers/wt_remote.py +60 -67
- machineconfig/cluster/sessions_managers/wt_remote_manager.py +110 -149
- machineconfig/cluster/sessions_managers/wt_utils/layout_generator.py +61 -64
- machineconfig/cluster/sessions_managers/wt_utils/process_monitor.py +72 -172
- machineconfig/cluster/sessions_managers/wt_utils/remote_executor.py +27 -60
- machineconfig/cluster/sessions_managers/wt_utils/session_manager.py +58 -137
- machineconfig/cluster/sessions_managers/wt_utils/status_reporter.py +46 -74
- machineconfig/cluster/sessions_managers/zellij_local.py +91 -147
- machineconfig/cluster/sessions_managers/zellij_local_manager.py +165 -190
- machineconfig/cluster/sessions_managers/zellij_remote.py +51 -58
- machineconfig/cluster/sessions_managers/zellij_remote_manager.py +40 -46
- machineconfig/cluster/sessions_managers/zellij_utils/example_usage.py +19 -17
- machineconfig/cluster/sessions_managers/zellij_utils/layout_generator.py +30 -31
- machineconfig/cluster/sessions_managers/zellij_utils/process_monitor.py +64 -134
- machineconfig/cluster/sessions_managers/zellij_utils/remote_executor.py +7 -11
- machineconfig/cluster/sessions_managers/zellij_utils/session_manager.py +27 -55
- machineconfig/cluster/sessions_managers/zellij_utils/status_reporter.py +14 -13
- machineconfig/cluster/templates/cli_click.py +0 -1
- machineconfig/cluster/templates/cli_gooey.py +0 -2
- machineconfig/cluster/templates/cli_trogon.py +0 -1
- machineconfig/cluster/templates/run_cloud.py +0 -1
- machineconfig/cluster/templates/run_cluster.py +0 -1
- machineconfig/cluster/templates/run_remote.py +0 -1
- machineconfig/cluster/templates/utils.py +27 -11
- machineconfig/jobs/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/jobs/linux/msc/cli_agents.sh +16 -0
- machineconfig/jobs/python/check_installations.py +9 -9
- machineconfig/jobs/python/create_bootable_media.py +0 -2
- machineconfig/jobs/python/python_cargo_build_share.py +2 -2
- machineconfig/jobs/python/python_ve_symlink.py +9 -11
- machineconfig/jobs/python/tasks.py +0 -1
- machineconfig/jobs/python/vscode/api.py +5 -5
- machineconfig/jobs/python/vscode/link_ve.py +20 -21
- machineconfig/jobs/python/vscode/select_interpreter.py +28 -29
- machineconfig/jobs/python/vscode/sync_code.py +14 -18
- machineconfig/jobs/python_custom_installers/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/jobs/python_custom_installers/archive/ngrok.py +15 -15
- machineconfig/jobs/python_custom_installers/dev/aider.py +10 -18
- machineconfig/jobs/python_custom_installers/dev/alacritty.py +12 -21
- machineconfig/jobs/python_custom_installers/dev/brave.py +13 -22
- machineconfig/jobs/python_custom_installers/dev/bypass_paywall.py +13 -20
- machineconfig/jobs/python_custom_installers/dev/code.py +17 -24
- machineconfig/jobs/python_custom_installers/dev/cursor.py +10 -21
- machineconfig/jobs/python_custom_installers/dev/docker_desktop.py +12 -11
- machineconfig/jobs/python_custom_installers/dev/espanso.py +19 -23
- machineconfig/jobs/python_custom_installers/dev/goes.py +9 -16
- machineconfig/jobs/python_custom_installers/dev/lvim.py +13 -21
- machineconfig/jobs/python_custom_installers/dev/nerdfont.py +15 -22
- machineconfig/jobs/python_custom_installers/dev/redis.py +15 -23
- machineconfig/jobs/python_custom_installers/dev/wezterm.py +15 -22
- machineconfig/jobs/python_custom_installers/dev/winget.py +32 -50
- machineconfig/jobs/python_custom_installers/docker.py +15 -24
- machineconfig/jobs/python_custom_installers/gh.py +18 -26
- machineconfig/jobs/python_custom_installers/hx.py +33 -17
- machineconfig/jobs/python_custom_installers/warp-cli.py +15 -23
- machineconfig/jobs/python_generic_installers/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/jobs/python_generic_installers/config.json +412 -389
- machineconfig/jobs/python_linux_installers/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/jobs/python_windows_installers/dev/config.json +1 -1
- machineconfig/jobs/windows/archive/archive_pygraphviz.ps1 +1 -1
- machineconfig/jobs/windows/msc/cli_agents.bat +0 -0
- machineconfig/jobs/windows/msc/cli_agents.ps1 +0 -0
- machineconfig/jobs/windows/start_terminal.ps1 +1 -1
- machineconfig/logger.py +50 -0
- machineconfig/profile/create.py +50 -36
- machineconfig/profile/create_hardlinks.py +33 -26
- machineconfig/profile/shell.py +87 -60
- machineconfig/scripts/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/scripts/cloud/init.sh +2 -2
- machineconfig/scripts/linux/checkout_versions +1 -1
- machineconfig/scripts/linux/choose_wezterm_theme +1 -1
- machineconfig/scripts/linux/cloud_copy +1 -1
- machineconfig/scripts/linux/cloud_manager +1 -1
- machineconfig/scripts/linux/cloud_mount +1 -1
- machineconfig/scripts/linux/cloud_repo_sync +1 -1
- machineconfig/scripts/linux/cloud_sync +1 -1
- machineconfig/scripts/linux/croshell +1 -1
- machineconfig/scripts/linux/devops +3 -5
- machineconfig/scripts/linux/fire +2 -1
- machineconfig/scripts/linux/fire_agents +3 -3
- machineconfig/scripts/linux/ftpx +1 -1
- machineconfig/scripts/linux/gh_models +1 -1
- machineconfig/scripts/linux/kill_process +1 -1
- machineconfig/scripts/linux/mcinit +2 -2
- machineconfig/scripts/linux/repos +1 -1
- machineconfig/scripts/linux/scheduler +1 -1
- machineconfig/scripts/linux/start_slidev +1 -1
- machineconfig/scripts/linux/start_terminals +1 -1
- machineconfig/scripts/linux/url2md +1 -1
- machineconfig/scripts/linux/warp-cli.sh +122 -0
- machineconfig/scripts/linux/wifi_conn +1 -1
- machineconfig/scripts/python/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/scripts/python/__pycache__/croshell.cpython-313.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops.cpython-313.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops_devapps_install.cpython-313.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops_update_repos.cpython-313.pyc +0 -0
- machineconfig/scripts/python/__pycache__/fire_jobs.cpython-313.pyc +0 -0
- machineconfig/scripts/python/ai/__init__.py +0 -0
- machineconfig/scripts/python/ai/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/scripts/python/ai/__pycache__/generate_files.cpython-313.pyc +0 -0
- machineconfig/scripts/python/ai/__pycache__/mcinit.cpython-313.pyc +0 -0
- machineconfig/scripts/python/ai/chatmodes/Thinking-Beast-Mode.chatmode.md +337 -0
- machineconfig/scripts/python/ai/chatmodes/Ultimate-Transparent-Thinking-Beast-Mode.chatmode.md +644 -0
- machineconfig/scripts/python/ai/chatmodes/deepResearch.chatmode.md +81 -0
- machineconfig/scripts/python/ai/configs/.gemini/settings.json +81 -0
- machineconfig/scripts/python/ai/generate_files.py +84 -0
- machineconfig/scripts/python/ai/instructions/python/dev.instructions.md +45 -0
- machineconfig/scripts/python/ai/mcinit.py +107 -0
- machineconfig/scripts/python/ai/prompts/allLintersAndTypeCheckers.prompt.md +5 -0
- machineconfig/scripts/python/ai/prompts/research-report-skeleton.prompt.md +38 -0
- machineconfig/scripts/python/ai/scripts/lint_and_type_check.sh +52 -0
- machineconfig/scripts/python/archive/tmate_conn.py +5 -5
- machineconfig/scripts/python/archive/tmate_start.py +3 -3
- machineconfig/scripts/python/choose_wezterm_theme.py +2 -2
- machineconfig/scripts/python/cloud_copy.py +20 -19
- machineconfig/scripts/python/cloud_mount.py +10 -8
- machineconfig/scripts/python/cloud_repo_sync.py +15 -15
- machineconfig/scripts/python/cloud_sync.py +1 -1
- machineconfig/scripts/python/croshell.py +18 -16
- machineconfig/scripts/python/devops.py +6 -6
- machineconfig/scripts/python/devops_add_identity.py +9 -7
- machineconfig/scripts/python/devops_add_ssh_key.py +19 -19
- machineconfig/scripts/python/devops_backup_retrieve.py +14 -14
- machineconfig/scripts/python/devops_devapps_install.py +3 -3
- machineconfig/scripts/python/devops_update_repos.py +141 -53
- machineconfig/scripts/python/dotfile.py +3 -3
- machineconfig/scripts/python/fire_agents.py +202 -41
- machineconfig/scripts/python/fire_jobs.py +20 -21
- machineconfig/scripts/python/ftpx.py +4 -3
- machineconfig/scripts/python/gh_models.py +94 -94
- machineconfig/scripts/python/helpers/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/scripts/python/helpers/__pycache__/helpers4.cpython-313.pyc +0 -0
- machineconfig/scripts/python/helpers/cloud_helpers.py +3 -3
- machineconfig/scripts/python/helpers/helpers2.py +3 -3
- machineconfig/scripts/python/helpers/helpers4.py +8 -7
- machineconfig/scripts/python/helpers/helpers5.py +7 -7
- machineconfig/scripts/python/helpers/repo_sync_helpers.py +2 -2
- machineconfig/scripts/python/mount_nfs.py +4 -3
- machineconfig/scripts/python/mount_nw_drive.py +4 -4
- machineconfig/scripts/python/mount_ssh.py +4 -3
- machineconfig/scripts/python/repos.py +9 -9
- machineconfig/scripts/python/scheduler.py +1 -1
- machineconfig/scripts/python/start_slidev.py +9 -8
- machineconfig/scripts/python/start_terminals.py +1 -1
- machineconfig/scripts/python/viewer.py +40 -40
- machineconfig/scripts/python/wifi_conn.py +65 -66
- machineconfig/scripts/python/wsl_windows_transfer.py +2 -2
- machineconfig/scripts/windows/checkout_version.ps1 +1 -3
- machineconfig/scripts/windows/choose_wezterm_theme.ps1 +1 -3
- machineconfig/scripts/windows/cloud_copy.ps1 +2 -6
- machineconfig/scripts/windows/cloud_manager.ps1 +1 -1
- machineconfig/scripts/windows/cloud_repo_sync.ps1 +1 -2
- machineconfig/scripts/windows/cloud_sync.ps1 +2 -2
- machineconfig/scripts/windows/croshell.ps1 +2 -2
- machineconfig/scripts/windows/devops.ps1 +1 -4
- machineconfig/scripts/windows/dotfile.ps1 +1 -3
- machineconfig/scripts/windows/fire.ps1 +1 -1
- machineconfig/scripts/windows/ftpx.ps1 +2 -2
- machineconfig/scripts/windows/gpt.ps1 +1 -1
- machineconfig/scripts/windows/kill_process.ps1 +1 -2
- machineconfig/scripts/windows/mcinit.ps1 +2 -2
- machineconfig/scripts/windows/mount_nfs.ps1 +1 -1
- machineconfig/scripts/windows/mount_ssh.ps1 +1 -1
- machineconfig/scripts/windows/pomodoro.ps1 +1 -1
- machineconfig/scripts/windows/py2exe.ps1 +1 -3
- machineconfig/scripts/windows/repos.ps1 +1 -1
- machineconfig/scripts/windows/scheduler.ps1 +1 -1
- machineconfig/scripts/windows/snapshot.ps1 +2 -2
- machineconfig/scripts/windows/start_slidev.ps1 +1 -1
- machineconfig/scripts/windows/start_terminals.ps1 +1 -1
- machineconfig/scripts/windows/wifi_conn.ps1 +1 -1
- machineconfig/scripts/windows/wsl_windows_transfer.ps1 +1 -3
- machineconfig/settings/lf/linux/lfrc +1 -1
- machineconfig/settings/linters/.ruff.toml +2 -2
- machineconfig/settings/linters/.ruff_cache/.gitignore +2 -0
- machineconfig/settings/linters/.ruff_cache/CACHEDIR.TAG +1 -0
- machineconfig/settings/lvim/windows/archive/config_additional.lua +1 -1
- machineconfig/settings/shells/ipy/profiles/default/startup/playext.py +71 -71
- machineconfig/settings/shells/wt/settings.json +8 -8
- machineconfig/settings/svim/linux/init.toml +1 -1
- machineconfig/settings/svim/windows/init.toml +1 -1
- machineconfig/setup_linux/web_shortcuts/croshell.sh +0 -54
- machineconfig/setup_linux/web_shortcuts/interactive.sh +6 -6
- machineconfig/setup_linux/web_shortcuts/tmp.sh +2 -0
- machineconfig/setup_windows/web_shortcuts/all.ps1 +2 -2
- machineconfig/setup_windows/web_shortcuts/ascii_art.ps1 +1 -1
- machineconfig/setup_windows/web_shortcuts/croshell.ps1 +1 -1
- machineconfig/setup_windows/web_shortcuts/interactive.ps1 +5 -5
- machineconfig/setup_windows/wt_and_pwsh/install_fonts.ps1 +51 -15
- machineconfig/setup_windows/wt_and_pwsh/set_pwsh_theme.py +75 -18
- machineconfig/setup_windows/wt_and_pwsh/set_wt_settings.py +52 -42
- machineconfig/utils/ai/browser_user_wrapper.py +5 -5
- machineconfig/utils/ai/generate_file_checklist.py +19 -22
- machineconfig/utils/ai/url2md.py +5 -3
- machineconfig/utils/cloud/onedrive/setup_oauth.py +5 -4
- machineconfig/utils/cloud/onedrive/transaction.py +192 -227
- machineconfig/utils/code.py +71 -43
- machineconfig/utils/installer.py +77 -85
- machineconfig/utils/installer_utils/installer_abc.py +29 -17
- machineconfig/utils/installer_utils/installer_class.py +188 -83
- machineconfig/utils/io_save.py +3 -15
- machineconfig/utils/links.py +22 -11
- machineconfig/utils/notifications.py +197 -0
- machineconfig/utils/options.py +38 -25
- machineconfig/utils/path.py +18 -6
- machineconfig/utils/path_reduced.py +637 -316
- machineconfig/utils/procs.py +69 -63
- machineconfig/utils/scheduling.py +11 -13
- machineconfig/utils/ssh.py +351 -0
- machineconfig/utils/terminal.py +225 -0
- machineconfig/utils/utils.py +13 -12
- machineconfig/utils/utils2.py +43 -10
- machineconfig/utils/utils5.py +242 -46
- machineconfig/utils/ve.py +11 -6
- {machineconfig-1.97.dist-info â machineconfig-2.1.dist-info}/METADATA +15 -9
- {machineconfig-1.97.dist-info â machineconfig-2.1.dist-info}/RECORD +232 -235
- machineconfig/cluster/self_ssh.py +0 -57
- machineconfig/jobs/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/jobs/python/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/jobs/python/archive/python_tools.txt +0 -12
- machineconfig/jobs/python/vscode/__pycache__/select_interpreter.cpython-311.pyc +0 -0
- machineconfig/jobs/python_custom_installers/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/jobs/python_generic_installers/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/jobs/python_generic_installers/update.py +0 -3
- machineconfig/jobs/python_linux_installers/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/profile/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/profile/__pycache__/create.cpython-311.pyc +0 -0
- machineconfig/profile/__pycache__/shell.cpython-311.pyc +0 -0
- machineconfig/scripts/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/scripts/linux/activate_ve +0 -87
- machineconfig/scripts/python/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/cloud_copy.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/cloud_mount.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/cloud_sync.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/croshell.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops_backup_retrieve.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops_devapps_install.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops_update_repos.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/fire_agents.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/fire_jobs.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/get_zellij_cmd.cpython-311.pyc +0 -0
- machineconfig/scripts/python/__pycache__/repos.cpython-311.pyc +0 -0
- machineconfig/scripts/python/ai/__pycache__/init.cpython-311.pyc +0 -0
- machineconfig/scripts/python/ai/init.py +0 -56
- machineconfig/scripts/python/ai/rules/python/dev.md +0 -31
- machineconfig/scripts/python/helpers/__pycache__/__init__.cpython-311.pyc +0 -0
- machineconfig/scripts/python/helpers/__pycache__/cloud_helpers.cpython-311.pyc +0 -0
- machineconfig/scripts/python/helpers/__pycache__/helpers2.cpython-311.pyc +0 -0
- machineconfig/scripts/python/helpers/__pycache__/helpers4.cpython-311.pyc +0 -0
- machineconfig/scripts/python/helpers/__pycache__/repo_sync_helpers.cpython-311.pyc +0 -0
- machineconfig/scripts/windows/activate_ve.ps1 +0 -54
- {machineconfig-1.97.dist-info â machineconfig-2.1.dist-info}/WHEEL +0 -0
- {machineconfig-1.97.dist-info â machineconfig-2.1.dist-info}/top_level.txt +0 -0
|
@@ -1,6 +1,3 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
1
|
# from machineconfig.utils.utils2 import read_ini
|
|
5
2
|
# from machineconfig.utils.io_save import save_pickle
|
|
6
3
|
|
|
@@ -19,18 +16,18 @@
|
|
|
19
16
|
# """Convert list of dictionaries to markdown table format."""
|
|
20
17
|
# if not data:
|
|
21
18
|
# return ""
|
|
22
|
-
|
|
19
|
+
|
|
23
20
|
# # Get all unique keys from all dictionaries
|
|
24
21
|
# all_keys = set()
|
|
25
22
|
# for row in data:
|
|
26
23
|
# all_keys.update(row.keys())
|
|
27
|
-
|
|
24
|
+
|
|
28
25
|
# keys = sorted(all_keys)
|
|
29
|
-
|
|
26
|
+
|
|
30
27
|
# # Create header
|
|
31
28
|
# header = "|" + "|".join(f" {key} " for key in keys) + "|"
|
|
32
29
|
# separator = "|" + "|".join(" --- " for _ in keys) + "|"
|
|
33
|
-
|
|
30
|
+
|
|
34
31
|
# # Create rows
|
|
35
32
|
# rows = []
|
|
36
33
|
# for row in data:
|
|
@@ -44,7 +41,7 @@
|
|
|
44
41
|
# value = str(value)
|
|
45
42
|
# row_values.append(f" {value} ")
|
|
46
43
|
# rows.append("|" + "|".join(row_values) + "|")
|
|
47
|
-
|
|
44
|
+
|
|
48
45
|
# return "\n".join([header, separator] + rows)
|
|
49
46
|
|
|
50
47
|
|
|
@@ -100,7 +97,6 @@
|
|
|
100
97
|
# @staticmethod
|
|
101
98
|
# def prepare_servers_report(cloud_root: PathExtended) -> list[dict[str, Any]]:
|
|
102
99
|
# from machineconfig.cluster.remote_machine import RemoteMachine
|
|
103
|
-
# # Replace crocodile List usage with plain Python list
|
|
104
100
|
# workers_root = [p for p in cloud_root.joinpath("workers").iterdir()]
|
|
105
101
|
# res: dict[str, list[RemoteMachine]] = {}
|
|
106
102
|
# times: dict[str, timedelta] = {}
|
|
@@ -109,7 +105,7 @@
|
|
|
109
105
|
# file_mod_time = datetime.fromtimestamp(running_jobs.stat().st_mtime) if running_jobs.exists() else datetime.min
|
|
110
106
|
# times[a_worker.name] = datetime.now() - file_mod_time
|
|
111
107
|
# res[a_worker.name] = pickle.loads(running_jobs.read_bytes()) if running_jobs.exists() else []
|
|
112
|
-
|
|
108
|
+
|
|
113
109
|
# # Create list of dictionaries instead of DataFrame
|
|
114
110
|
# servers_report = []
|
|
115
111
|
# for machine in res.keys():
|
|
@@ -127,7 +123,7 @@
|
|
|
127
123
|
# alternative_base = self.fetch_cloud_live()
|
|
128
124
|
# assert alternative_base is not None
|
|
129
125
|
# lock_path = alternative_base.expanduser().joinpath("lock.txt")
|
|
130
|
-
# if lock_path.exists(): lock_owner: str = lock_path.read_text()
|
|
126
|
+
# if lock_path.exists(): lock_owner: str = lock_path.read_text(encoding="utf-8")
|
|
131
127
|
# else: lock_owner = "None"
|
|
132
128
|
# self.console.print(f"đ Lock is held by: {lock_owner}")
|
|
133
129
|
# self.console.print("đ§ž Log File:")
|
|
@@ -139,7 +135,7 @@
|
|
|
139
135
|
# for item_name, item_list in log.items():
|
|
140
136
|
# self.console.rule(f"{item_name} Jobs (Latest {'10' if len(item_list) > 10 else len(item_list)} / {len(item_list)})")
|
|
141
137
|
# print() # empty line after the rule helps keeping the rendering clean in the terminal while zooming in and out.
|
|
142
|
-
|
|
138
|
+
|
|
143
139
|
# # Add duration calculation for non-queued items
|
|
144
140
|
# display_items = []
|
|
145
141
|
# for item in item_list:
|
|
@@ -162,13 +158,13 @@
|
|
|
162
158
|
# if item_name == "running": excluded_cols.update({"submission_time", "source_machine", "end_time"})
|
|
163
159
|
# if item_name == "completed": excluded_cols.update({"submission_time", "source_machine", "start_time", "pid"})
|
|
164
160
|
# if item_name == "failed": excluded_cols.update({"submission_time", "source_machine", "start_time"})
|
|
165
|
-
|
|
161
|
+
|
|
166
162
|
# # Filter items and take last 10
|
|
167
163
|
# filtered_items = []
|
|
168
164
|
# for item in display_items[-10:]:
|
|
169
165
|
# filtered_item = {k: v for k, v in item.items() if k not in excluded_cols}
|
|
170
166
|
# filtered_items.append(filtered_item)
|
|
171
|
-
|
|
167
|
+
|
|
172
168
|
# if filtered_items:
|
|
173
169
|
# pprint(format_table_markdown(filtered_items))
|
|
174
170
|
# pprint("\n\n")
|
|
@@ -246,9 +242,6 @@
|
|
|
246
242
|
# """This method involves manual selection but has all-files scope (failed and running) and can be used for both local and remote machines.
|
|
247
243
|
# The reason it is not automated for remotes is because even though the server might have failed, the processes therein might be running, so there is no automated way to tell."""
|
|
248
244
|
# log = self.read_log()
|
|
249
|
-
# from machineconfig.cluster.remote_machine import RemoteMachine
|
|
250
|
-
# from machineconfig.utils.utils import display_options
|
|
251
|
-
# # Replace crocodile List usage with plain Python list comprehension
|
|
252
245
|
# jobs_all: list[str] = [p.name for p in self.base_path.expanduser().joinpath("jobs").iterdir()]
|
|
253
246
|
# jobs_selected = display_options(options=jobs_all, msg="Select Jobs to Redo", multi=True, fzf=True)
|
|
254
247
|
# for a_job in jobs_selected:
|
|
@@ -263,13 +256,13 @@
|
|
|
263
256
|
# break
|
|
264
257
|
# if found_log_type:
|
|
265
258
|
# break
|
|
266
|
-
|
|
259
|
+
|
|
267
260
|
# if not found_log_type:
|
|
268
261
|
# raise ValueError(f"Job `{a_job}` is not found in any of the log lists.")
|
|
269
|
-
|
|
262
|
+
|
|
270
263
|
# if found_entry_data is None:
|
|
271
264
|
# raise ValueError(f"Job `{a_job}` has no entry data.")
|
|
272
|
-
|
|
265
|
+
|
|
273
266
|
# entry = LogEntry.from_dict(found_entry_data)
|
|
274
267
|
# a_job_path = CloudManager.base_path.expanduser().joinpath(f"jobs/{entry.name}")
|
|
275
268
|
# entry.note += f"| Job failed @ {entry.run_machine}"
|
|
@@ -308,14 +301,14 @@
|
|
|
308
301
|
# elif status == "completed" or status == "failed":
|
|
309
302
|
# job_name = a_rm.config.job_id
|
|
310
303
|
# log = self.read_log()
|
|
311
|
-
|
|
304
|
+
|
|
312
305
|
# # Find the entry in running jobs
|
|
313
306
|
# entry_data = None
|
|
314
307
|
# for job_data in log["running"]:
|
|
315
308
|
# if job_data.get("name") == job_name:
|
|
316
309
|
# entry_data = job_data
|
|
317
310
|
# break
|
|
318
|
-
|
|
311
|
+
|
|
319
312
|
# if entry_data:
|
|
320
313
|
# entry = LogEntry.from_dict(entry_data)
|
|
321
314
|
# entry.end_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
|
@@ -344,7 +337,7 @@
|
|
|
344
337
|
# while len(self.running_jobs) < self.max_jobs:
|
|
345
338
|
# if idx >= len(log["queued"]):
|
|
346
339
|
# break # looked at all jobs in the queue
|
|
347
|
-
|
|
340
|
+
|
|
348
341
|
# queue_entry = LogEntry.from_dict(log["queued"][idx])
|
|
349
342
|
# a_job_path = CloudManager.base_path.expanduser().joinpath(f"jobs/{queue_entry.name}")
|
|
350
343
|
# rm: RemoteMachine = pickle.loads(a_job_path.joinpath("data/remote_machine.Machine.pkl").read_bytes())
|
|
@@ -352,14 +345,14 @@
|
|
|
352
345
|
# print(f"Job `{queue_entry.name}` is not allowed to run on this machine. Skipping ...")
|
|
353
346
|
# idx += 1
|
|
354
347
|
# continue # look at the next job in the queue.
|
|
355
|
-
|
|
348
|
+
|
|
356
349
|
# pid, _process_cmd = rm.fire(run=True)
|
|
357
350
|
# queue_entry.pid = pid
|
|
358
351
|
# # queue_entry.cmd = process_cmd
|
|
359
352
|
# queue_entry.run_machine = f"{getpass.getuser()}@{platform.node()}"
|
|
360
353
|
# queue_entry.start_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
|
361
354
|
# queue_entry.session_name = rm.job_params.session_name
|
|
362
|
-
|
|
355
|
+
|
|
363
356
|
# # Remove from queued and add to running
|
|
364
357
|
# log["queued"] = [job for job in log["queued"] if job.get("name") != queue_entry.name]
|
|
365
358
|
# log["running"].append(queue_entry.__dict__)
|
|
@@ -374,7 +367,7 @@
|
|
|
374
367
|
# base_path.mkdir(parents=True, exist_ok=True)
|
|
375
368
|
# base_path.sync_to_cloud(cloud=self.cloud, rel2home=True, sync_up=True, verbose=True, transfers=100)
|
|
376
369
|
# self.release_lock()
|
|
377
|
-
# def reset_lock(self):
|
|
370
|
+
# def reset_lock(self):
|
|
378
371
|
# base_path = CloudManager.base_path.expanduser()
|
|
379
372
|
# base_path.mkdir(parents=True, exist_ok=True)
|
|
380
373
|
# base_path.joinpath("lock.txt").write_text("").to_cloud(cloud=self.cloud, rel2home=True, verbose=False)
|
|
@@ -402,7 +395,7 @@
|
|
|
402
395
|
# path.joinpath("lock.txt").write_text(this_machine).to_cloud(cloud=self.cloud, rel2home=True, verbose=False)
|
|
403
396
|
# return self.claim_lock(first_call=False)
|
|
404
397
|
|
|
405
|
-
# locking_machine = lock_path.read_text()
|
|
398
|
+
# locking_machine = lock_path.read_text(encoding="utf-8")
|
|
406
399
|
# if locking_machine != "" and locking_machine != this_machine:
|
|
407
400
|
# lock_mod_time = datetime.fromtimestamp(lock_path.stat().st_mtime)
|
|
408
401
|
# if (datetime.now() - lock_mod_time).total_seconds() > 3600:
|
|
@@ -424,7 +417,7 @@
|
|
|
424
417
|
# while counter < self.num_claim_checks:
|
|
425
418
|
# lock_path_tmp = path.joinpath("lock.txt").from_cloud(cloud=self.cloud, rel2home=True, verbose=False)
|
|
426
419
|
# assert lock_path_tmp is not None
|
|
427
|
-
# lock_data_tmp = lock_path_tmp.read_text()
|
|
420
|
+
# lock_data_tmp = lock_path_tmp.read_text(encoding="utf-8")
|
|
428
421
|
# if lock_data_tmp != this_machine:
|
|
429
422
|
# print(f"CloudManager: Lock already claimed by `{lock_data_tmp}`. đ¤ˇââī¸")
|
|
430
423
|
# print(f"sleeping for {self.inter_check_interval_sec} seconds and trying again.")
|
|
@@ -450,7 +443,7 @@
|
|
|
450
443
|
# path.joinpath("lock.txt").write_text("").to_cloud(cloud=self.cloud, rel2home=True, verbose=False)
|
|
451
444
|
# self.lock_claimed = False
|
|
452
445
|
# return NoReturn
|
|
453
|
-
# data = lock_path.read_text()
|
|
446
|
+
# data = lock_path.read_text(encoding="utf-8")
|
|
454
447
|
# this_machine = f"{getpass.getuser()}@{platform.node()}"
|
|
455
448
|
# if data != this_machine:
|
|
456
449
|
# raise ValueError(f"CloudManager: Lock already claimed by `{data}`. đ¤ˇââī¸ Can't release a lock not owned! This shouldn't happen. Consider increasing trails before confirming the claim.")
|
|
@@ -2,7 +2,6 @@
|
|
|
2
2
|
# """
|
|
3
3
|
|
|
4
4
|
|
|
5
|
-
|
|
6
5
|
# from machineconfig.cluster.remote_machine import RemoteMachine, FileManager
|
|
7
6
|
|
|
8
7
|
|
|
@@ -24,7 +23,7 @@
|
|
|
24
23
|
# rm.file_manager.cloud_download_py_script_path.expanduser().write_text(cloud_download_py_script, encoding="utf-8")
|
|
25
24
|
# # modify and save shell_script to including running of cloud_download_py_script before job script.
|
|
26
25
|
# shell_file = rm.file_manager.shell_script_path.expanduser()
|
|
27
|
-
# shell_script = shell_file.read_text().replace("# EXTRA-PLACEHOLDER-POST", f"cd ~; python {rm.file_manager.cloud_download_py_script_path.rel2home().as_posix()}")
|
|
26
|
+
# shell_script = shell_file.read_text(encoding="utf-8").replace("# EXTRA-PLACEHOLDER-POST", f"cd ~; python {rm.file_manager.cloud_download_py_script_path.rel2home().as_posix()}")
|
|
28
27
|
# download_url = rm.file_manager.job_root.zip().share_on_cloud()
|
|
29
28
|
# target = rm.file_manager.job_root.rel2home().parent.joinpath(download_url.name).as_posix()
|
|
30
29
|
# tmp = f"cd ~; curl -o '{target}' '{download_url.as_url_str()}'; unzip '{target}' -d {rm.file_manager.job_root.rel2home().parent.as_posix()}"
|
|
@@ -40,7 +39,7 @@
|
|
|
40
39
|
# downloads = '\n'.join([f"cloud_copy {cloud}: '{a_path.collapseuser().as_posix()} -r" for a_path in rm.data])
|
|
41
40
|
# if not rm.config.copy_repo: downloads += f"""\n cloud_copy {cloud}: {P(rm.job_params.repo_path_rh).collapseuser().as_posix()} -zer """
|
|
42
41
|
# downloads += f"\ncloud_copy {cloud}: {rm.file_manager.job_root} -zr"
|
|
43
|
-
# rm.file_manager.shell_script_path.expanduser().write_text(downloads + rm.file_manager.shell_script_path.expanduser().read_text(), encoding='utf-8') # newline={"Windows": None, "Linux": "\n"}[rm.ssh.get_remote_machine()]
|
|
42
|
+
# rm.file_manager.shell_script_path.expanduser().write_text(downloads + rm.file_manager.shell_script_path.expanduser().read_text(encoding="utf-8"), encoding='utf-8') # newline={"Windows": None, "Linux": "\n"}[rm.ssh.get_remote_machine()]
|
|
44
43
|
# PathExtended(rm.file_manager.job_root).to_cloud(cloud=cloud, zip=True, rel2home=True)
|
|
45
44
|
|
|
46
45
|
# @staticmethod
|
|
@@ -190,8 +190,6 @@
|
|
|
190
190
|
# def viz_load_ratios(self) -> None:
|
|
191
191
|
# if not self.workload_params: raise RuntimeError("func_kwargs_list is None. You need to run generate_standard_kwargs() first.")
|
|
192
192
|
# import plottext
|
|
193
|
-
# names = L(self.sshz).apply(lambda x: x.get_remote_repr(add_machine=True)).list
|
|
194
|
-
|
|
195
193
|
# plt.simple_multiple_bar(names, [[machine_specs.cpu for machine_specs in self.machines_specs], [machine_specs.ram for machine_specs in self.machines_specs]], title="Resources per machine", labels=["#cpu threads", "memory size"])
|
|
196
194
|
# plt.show()
|
|
197
195
|
# print("")
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
|
|
2
1
|
# from rich.console import Console
|
|
3
2
|
# from machineconfig.utils.utils2 import pprint
|
|
4
3
|
# from datetime import datetime
|
|
@@ -68,16 +67,16 @@
|
|
|
68
67
|
# # else:
|
|
69
68
|
# if platform.system() == "Windows" and script_path.name.endswith(".sh"):
|
|
70
69
|
# tmp = script_path.with_suffix(".ps1")
|
|
71
|
-
# tmp.write_text(script_path.read_text(), encoding="utf-8", newline=None)
|
|
70
|
+
# tmp.write_text(script_path.read_text(encoding="utf-8"), encoding="utf-8", newline=None)
|
|
72
71
|
# script_path = tmp
|
|
73
72
|
# if platform.system() == "Linux" and script_path.name.endswith(".ps1"):
|
|
74
73
|
# tmp = script_path.with_suffix(".sh")
|
|
75
|
-
# tmp.write_text(script_path.read_text(), encoding="utf-8", newline='\n')
|
|
74
|
+
# tmp.write_text(script_path.read_text(encoding="utf-8"), encoding="utf-8", newline='\n')
|
|
76
75
|
# script_path = tmp
|
|
77
76
|
# return f". {script_path}"
|
|
78
77
|
# def get_job_status(self, session_name: str, tab_name: str) -> JOB_STATUS:
|
|
79
78
|
# pid_path = self.execution_log_dir.expanduser().joinpath("pid.txt")
|
|
80
|
-
# tmp = self.execution_log_dir.expanduser().joinpath("status.txt").read_text()
|
|
79
|
+
# tmp = self.execution_log_dir.expanduser().joinpath("status.txt").read_text(encoding="utf-8")
|
|
81
80
|
# status: JOB_STATUS = tmp # type: ignore
|
|
82
81
|
# if status == "running":
|
|
83
82
|
# if not pid_path.exists():
|
|
@@ -85,7 +84,7 @@
|
|
|
85
84
|
# status = 'failed'
|
|
86
85
|
# self.execution_log_dir.expanduser().joinpath("status.txt").write_text(status)
|
|
87
86
|
# return status
|
|
88
|
-
# pid: int = int(pid_path.read_text().rstrip())
|
|
87
|
+
# pid: int = int(pid_path.read_text(encoding="utf-8").rstrip())
|
|
89
88
|
# import psutil
|
|
90
89
|
# try: proc = psutil.Process(pid=pid)
|
|
91
90
|
# except psutil.NoSuchProcess:
|
|
@@ -1,6 +1,3 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
1
|
# import git
|
|
5
2
|
|
|
6
3
|
# from typing import Optional, Callable, Union, Any
|
|
@@ -113,7 +110,7 @@
|
|
|
113
110
|
# else: base = f"""
|
|
114
111
|
# res = None # in case the file did not define it.
|
|
115
112
|
# # --------------------------------- SCRIPT AS IS
|
|
116
|
-
# {P(self.file_path_rh).expanduser().read_text()}
|
|
113
|
+
# {P(self.file_path_rh).expanduser().read_text(encoding="utf-8")}
|
|
117
114
|
# # --------------------------------- END OF SCRIPT AS IS
|
|
118
115
|
# """
|
|
119
116
|
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
|
|
2
1
|
# """
|
|
3
2
|
# Runner
|
|
4
3
|
# """
|
|
@@ -111,20 +110,19 @@
|
|
|
111
110
|
# @staticmethod
|
|
112
111
|
# def from_dict(a_dict: dict[str, Any]):
|
|
113
112
|
# return LogEntry(
|
|
114
|
-
# name=a_dict["name"],
|
|
115
|
-
# submission_time=str(a_dict["submission_time"]),
|
|
116
|
-
# start_time=str(a_dict["start_time"]) if a_dict.get("start_time") else None,
|
|
113
|
+
# name=a_dict["name"],
|
|
114
|
+
# submission_time=str(a_dict["submission_time"]),
|
|
115
|
+
# start_time=str(a_dict["start_time"]) if a_dict.get("start_time") else None,
|
|
117
116
|
# end_time=str(a_dict["end_time"]) if a_dict.get("end_time") else None,
|
|
118
|
-
# run_machine=a_dict.get("run_machine"),
|
|
119
|
-
# source_machine=a_dict.get("source_machine", ""),
|
|
120
|
-
# note=a_dict.get("note", ""),
|
|
121
|
-
# pid=a_dict.get("pid"),
|
|
122
|
-
# cmd=a_dict.get("cmd"),
|
|
117
|
+
# run_machine=a_dict.get("run_machine"),
|
|
118
|
+
# source_machine=a_dict.get("source_machine", ""),
|
|
119
|
+
# note=a_dict.get("note", ""),
|
|
120
|
+
# pid=a_dict.get("pid"),
|
|
121
|
+
# cmd=a_dict.get("cmd"),
|
|
123
122
|
# session_name=a_dict.get("session_name")
|
|
124
123
|
# )
|
|
125
124
|
|
|
126
125
|
|
|
127
|
-
|
|
128
126
|
# @dataclass
|
|
129
127
|
# class RemoteMachineConfig:
|
|
130
128
|
# # conn
|
|
@@ -179,6 +177,5 @@
|
|
|
179
177
|
# except (FileNotFoundError, KeyError, IndexError) as err: raise ValueError(f"Email config name is not provided. đ¤ˇââī¸ & default could not be read @ `{DEFAULTS_PATH}`") from err
|
|
180
178
|
|
|
181
179
|
|
|
182
|
-
|
|
183
180
|
# if __name__ == '__main__':
|
|
184
181
|
# pass
|
|
@@ -79,7 +79,7 @@
|
|
|
79
79
|
# print(f"đ§âđģ Waiting for Python process to start and declare its pid @ `{pid_path}` as dictated in python script ... ")
|
|
80
80
|
# time.sleep(3)
|
|
81
81
|
# try:
|
|
82
|
-
# pid = int(pid_path.read_text())
|
|
82
|
+
# pid = int(pid_path.read_text(encoding="utf-8"))
|
|
83
83
|
# import psutil
|
|
84
84
|
# process_command = " ".join(psutil.Process(pid).cmdline())
|
|
85
85
|
# print(f"đ Python process started running @ {pid=} & {process_command=}")
|
|
@@ -133,7 +133,7 @@
|
|
|
133
133
|
# email_script = PathExtended(cluster.__file__).parent.joinpath("script_notify_upon_completion.py").read_text(encoding="utf-8").replace("email_params = EmailParams.from_empty()", f"email_params = {email_params}").replace('manager = FileManager.from_pickle(params.file_manager_path)', '')
|
|
134
134
|
# py_script = py_script.replace("# NOTIFICATION-CODE-PLACEHOLDER", email_script)
|
|
135
135
|
# ve_path = PathExtended(self.job_params.repo_path_rh).expanduser().joinpath(".ve_path")
|
|
136
|
-
# if ve_path.exists(): ve_name = PathExtended(ve_path.read_text()).expanduser().name
|
|
136
|
+
# if ve_path.exists(): ve_name = PathExtended(ve_path.read_text(encoding="utf-8")).expanduser().name
|
|
137
137
|
# else:
|
|
138
138
|
# import sys
|
|
139
139
|
# ve_name = PathExtended(sys.executable).parent.parent.name
|
|
@@ -144,7 +144,6 @@
|
|
|
144
144
|
# echo "~~~~~~~~~~~~~~~~SHELL START~~~~~~~~~~~~~~~"
|
|
145
145
|
# {'~/scripts/devops -w update' if self.config.update_essential_repos else ''}
|
|
146
146
|
# {f'cd {P(self.job_params.repo_path_rh).collapseuser().as_posix()}'}
|
|
147
|
-
# . activate_ve {ve_name}
|
|
148
147
|
# {'git pull' if self.config.update_repo else ''}
|
|
149
148
|
# {'pip install -e .' if self.config.install_repo else ''}
|
|
150
149
|
# echo "~~~~~~~~~~~~~~~~SHELL END ~~~~~~~~~~~~~~~"
|
|
@@ -216,7 +215,7 @@
|
|
|
216
215
|
# if not start_time_file.exists():
|
|
217
216
|
# print(f"Job {self.config.job_id} is still in the queue. đ¯")
|
|
218
217
|
# else:
|
|
219
|
-
# start_time = start_time_file.read_text()
|
|
218
|
+
# start_time = start_time_file.read_text(encoding="utf-8")
|
|
220
219
|
# txt = f"Machine {self.ssh.get_remote_repr(add_machine=True)} has not yet finished job `{self.config.job_id}`. đ"
|
|
221
220
|
# txt += f"\nIt started at {start_time}. đ, and is still running. đââī¸"
|
|
222
221
|
# try:
|
|
@@ -229,7 +228,7 @@
|
|
|
229
228
|
# print("\n")
|
|
230
229
|
# else:
|
|
231
230
|
# results_folder_file = base.joinpath("results_folder_path.txt") # it could be one returned by function executed or one made up by the running context.
|
|
232
|
-
# results_folder = results_folder_file.read_text()
|
|
231
|
+
# results_folder = results_folder_file.read_text(encoding="utf-8")
|
|
233
232
|
# print("\n" * 2)
|
|
234
233
|
# console.rule("Job Completed đđĨŗđđĨđžđđĒ
")
|
|
235
234
|
# print(f"""Machine {self.ssh.get_remote_repr(add_machine=True)} has finished job `{self.config.job_id}`. đ
|
|
@@ -63,9 +63,9 @@
|
|
|
63
63
|
# â° Time: {time_at_execution_start_local}
|
|
64
64
|
# """, style="bold blue")
|
|
65
65
|
|
|
66
|
-
# if isinstance(func_kwargs, dict):
|
|
66
|
+
# if isinstance(func_kwargs, dict):
|
|
67
67
|
# pprint(func_kwargs, "đ Function Arguments")
|
|
68
|
-
# else:
|
|
68
|
+
# else:
|
|
69
69
|
# pprint(func_kwargs, f"đ Function Arguments from `{manager.kwargs_path.collapseuser().as_posix()}`")
|
|
70
70
|
|
|
71
71
|
# print("\n" + "âĸ" * 60 + "\n")
|
|
@@ -1,10 +1,8 @@
|
|
|
1
|
-
|
|
2
|
-
"""ZT
|
|
3
|
-
"""
|
|
1
|
+
"""ZT"""
|
|
4
2
|
|
|
5
3
|
import socket
|
|
6
4
|
from machineconfig.utils.utils import choose_ssh_host, write_shell_script_to_default_program_path
|
|
7
|
-
from machineconfig.utils.path_reduced import
|
|
5
|
+
from machineconfig.utils.path_reduced import PathExtended as PathExtended
|
|
8
6
|
|
|
9
7
|
prefix = """
|
|
10
8
|
|
|
@@ -44,7 +42,7 @@ def build_template(tabs: list[str]):
|
|
|
44
42
|
res += suffix.replace("THISMACHINE", socket.gethostname())
|
|
45
43
|
file = PathExtended.tmp().joinpath("tmp_files/templates/zellij_template.kdl")
|
|
46
44
|
file.parent.mkdir(parents=True, exist_ok=True)
|
|
47
|
-
file.write_text(res)
|
|
45
|
+
file.write_text(res, encoding="utf-8")
|
|
48
46
|
res = f"zellij --layout {file}"
|
|
49
47
|
return res
|
|
50
48
|
|
|
@@ -57,5 +55,5 @@ def launch_from_ssh_config():
|
|
|
57
55
|
return None
|
|
58
56
|
|
|
59
57
|
|
|
60
|
-
if __name__ ==
|
|
58
|
+
if __name__ == "__main__":
|
|
61
59
|
launch_from_ssh_config()
|
|
@@ -13,148 +13,108 @@ from rich import box
|
|
|
13
13
|
|
|
14
14
|
console = Console()
|
|
15
15
|
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
description: Optional[str] = None,
|
|
19
|
-
show_progress: bool = True,
|
|
20
|
-
timeout: Optional[int] = None
|
|
21
|
-
) -> Dict[str, Any]:
|
|
16
|
+
|
|
17
|
+
def run_enhanced_command(command: str, description: Optional[str] = None, show_progress: bool = True, timeout: Optional[int] = None) -> Dict[str, Any]:
|
|
22
18
|
"""
|
|
23
19
|
Run a command with enhanced Rich formatting and user feedback.
|
|
24
|
-
|
|
20
|
+
|
|
25
21
|
Args:
|
|
26
22
|
command: The command to execute
|
|
27
23
|
description: Optional description for progress display
|
|
28
24
|
show_progress: Whether to show a progress spinner
|
|
29
25
|
timeout: Optional timeout in seconds
|
|
30
|
-
|
|
26
|
+
|
|
31
27
|
Returns:
|
|
32
28
|
Dictionary with success status, output, and error information
|
|
33
29
|
"""
|
|
34
|
-
|
|
30
|
+
|
|
35
31
|
if description is None:
|
|
36
32
|
description = f"Executing: {command[:50]}..."
|
|
37
|
-
|
|
33
|
+
|
|
38
34
|
try:
|
|
39
35
|
if show_progress:
|
|
40
|
-
with Progress(
|
|
41
|
-
SpinnerColumn(),
|
|
42
|
-
TextColumn("[progress.description]{task.description}"),
|
|
43
|
-
console=console,
|
|
44
|
-
transient=True
|
|
45
|
-
) as progress:
|
|
36
|
+
with Progress(SpinnerColumn(), TextColumn("[progress.description]{task.description}"), console=console, transient=True) as progress:
|
|
46
37
|
task = progress.add_task(f"[cyan]{description}[/cyan]", total=None)
|
|
47
|
-
|
|
48
|
-
result = subprocess.run(
|
|
49
|
-
|
|
50
|
-
shell=True,
|
|
51
|
-
capture_output=True,
|
|
52
|
-
text=True,
|
|
53
|
-
timeout=timeout
|
|
54
|
-
)
|
|
55
|
-
|
|
38
|
+
|
|
39
|
+
result = subprocess.run(command, shell=True, capture_output=True, text=True, timeout=timeout)
|
|
40
|
+
|
|
56
41
|
progress.update(task, completed=True)
|
|
57
42
|
else:
|
|
58
|
-
result = subprocess.run(
|
|
59
|
-
|
|
60
|
-
shell=True,
|
|
61
|
-
capture_output=True,
|
|
62
|
-
text=True,
|
|
63
|
-
timeout=timeout
|
|
64
|
-
)
|
|
65
|
-
|
|
43
|
+
result = subprocess.run(command, shell=True, capture_output=True, text=True, timeout=timeout)
|
|
44
|
+
|
|
66
45
|
# Enhanced output processing
|
|
67
46
|
stdout = result.stdout.strip() if result.stdout else ""
|
|
68
47
|
stderr = result.stderr.strip() if result.stderr else ""
|
|
69
|
-
|
|
48
|
+
|
|
70
49
|
# Process common Zellij messages with enhanced formatting
|
|
71
50
|
if "Session:" in stdout and "successfully deleted" in stdout:
|
|
72
51
|
session_match = re.search(r'Session: "([^"]+)" successfully deleted', stdout)
|
|
73
52
|
if session_match:
|
|
74
53
|
session_name = session_match.group(1)
|
|
75
54
|
console.print(f"[bold red]đī¸ Session[/bold red] [yellow]'{session_name}'[/yellow] [red]successfully deleted[/red]")
|
|
76
|
-
|
|
55
|
+
|
|
77
56
|
if "zellij layout is running" in stdout:
|
|
78
|
-
console.print(stdout.replace("zellij layout is running @",
|
|
79
|
-
|
|
80
|
-
|
|
57
|
+
console.print(stdout.replace("zellij layout is running @", "[bold green]đ Zellij layout is running[/bold green] [yellow]@[/yellow]"))
|
|
58
|
+
|
|
81
59
|
# Handle pseudo-terminal warnings with less alarming appearance
|
|
82
60
|
if "Pseudo-terminal will not be allocated" in stderr:
|
|
83
61
|
console.print("[dim yellow]âšī¸ Note: Running in non-interactive mode[/dim yellow]")
|
|
84
62
|
stderr = stderr.replace("Pseudo-terminal will not be allocated because stdin is not a terminal.\n", "")
|
|
85
|
-
|
|
63
|
+
|
|
86
64
|
if result.returncode == 0:
|
|
87
65
|
if stdout and not any(msg in stdout for msg in ["Session:", "zellij layout is running"]):
|
|
88
66
|
console.print(f"[green]{stdout}[/green]")
|
|
89
|
-
return {
|
|
90
|
-
"success": True,
|
|
91
|
-
"returncode": result.returncode,
|
|
92
|
-
"stdout": stdout,
|
|
93
|
-
"stderr": stderr
|
|
94
|
-
}
|
|
67
|
+
return {"success": True, "returncode": result.returncode, "stdout": stdout, "stderr": stderr}
|
|
95
68
|
else:
|
|
96
69
|
if stderr:
|
|
97
70
|
console.print(f"[bold red]Error:[/bold red] [red]{stderr}[/red]")
|
|
98
|
-
return {
|
|
99
|
-
|
|
100
|
-
"returncode": result.returncode,
|
|
101
|
-
"stdout": stdout,
|
|
102
|
-
"stderr": stderr
|
|
103
|
-
}
|
|
104
|
-
|
|
71
|
+
return {"success": False, "returncode": result.returncode, "stdout": stdout, "stderr": stderr}
|
|
72
|
+
|
|
105
73
|
except subprocess.TimeoutExpired:
|
|
106
74
|
console.print(f"[bold red]â° Command timed out after {timeout} seconds[/bold red]")
|
|
107
|
-
return {
|
|
108
|
-
"success": False,
|
|
109
|
-
"error": "Timeout",
|
|
110
|
-
"timeout": timeout
|
|
111
|
-
}
|
|
75
|
+
return {"success": False, "error": "Timeout", "timeout": timeout}
|
|
112
76
|
except Exception as e:
|
|
113
77
|
console.print(f"[bold red]đĨ Unexpected error:[/bold red] [red]{str(e)}[/red]")
|
|
114
|
-
return {
|
|
115
|
-
|
|
116
|
-
"error": str(e)
|
|
117
|
-
}
|
|
78
|
+
return {"success": False, "error": str(e)}
|
|
79
|
+
|
|
118
80
|
|
|
119
81
|
def enhanced_zellij_session_start(session_name: str, layout_path: str) -> Dict[str, Any]:
|
|
120
82
|
"""
|
|
121
83
|
Start a Zellij session with enhanced visual feedback.
|
|
122
84
|
"""
|
|
123
85
|
console.print()
|
|
124
|
-
console.print(Panel.fit(f"đ Starting Zellij Session: [bold cyan]{session_name}[/bold cyan]",
|
|
125
|
-
|
|
126
|
-
|
|
86
|
+
console.print(Panel.fit(f"đ Starting Zellij Session: [bold cyan]{session_name}[/bold cyan]", style="green", box=box.ROUNDED))
|
|
87
|
+
|
|
127
88
|
# Delete existing session first (suppress normal output)
|
|
128
89
|
delete_cmd = f"zellij delete-session --force {session_name}"
|
|
129
90
|
run_enhanced_command(
|
|
130
|
-
delete_cmd,
|
|
91
|
+
delete_cmd,
|
|
131
92
|
f"Cleaning up existing session '{session_name}'",
|
|
132
93
|
show_progress=False,
|
|
133
|
-
timeout=5 # Quick timeout for cleanup
|
|
94
|
+
timeout=5, # Quick timeout for cleanup
|
|
134
95
|
)
|
|
135
|
-
|
|
96
|
+
|
|
136
97
|
# Start new session (use -b for background to avoid hanging)
|
|
137
98
|
start_cmd = f"zellij --layout {layout_path} a -b {session_name}"
|
|
138
99
|
start_result = run_enhanced_command(
|
|
139
100
|
start_cmd,
|
|
140
101
|
f"Starting session '{session_name}' with layout",
|
|
141
102
|
show_progress=False,
|
|
142
|
-
timeout=10 # Add timeout to prevent hanging
|
|
103
|
+
timeout=10, # Add timeout to prevent hanging
|
|
143
104
|
)
|
|
144
|
-
|
|
105
|
+
|
|
145
106
|
if start_result["success"]:
|
|
146
|
-
console.print(Panel(f"[bold green]â
Session '{session_name}' is now running![/bold green]\n[dim]Layout: {layout_path}[/dim]",
|
|
147
|
-
style="green", title="đ Success"))
|
|
107
|
+
console.print(Panel(f"[bold green]â
Session '{session_name}' is now running![/bold green]\n[dim]Layout: {layout_path}[/dim]", style="green", title="đ Success"))
|
|
148
108
|
else:
|
|
149
|
-
console.print(Panel(f"[bold red]â Failed to start session '{session_name}'[/bold red]\n[red]{start_result.get('stderr', 'Unknown error')}[/red]",
|
|
150
|
-
|
|
151
|
-
|
|
109
|
+
console.print(Panel(f"[bold red]â Failed to start session '{session_name}'[/bold red]\n[red]{start_result.get('stderr', 'Unknown error')}[/red]", style="red", title="đĨ Error"))
|
|
110
|
+
|
|
152
111
|
return start_result
|
|
153
112
|
|
|
113
|
+
|
|
154
114
|
if __name__ == "__main__":
|
|
155
115
|
# Demo the enhanced command execution
|
|
156
116
|
console.print(Panel.fit("đ¨ Enhanced Command Execution Demo", style="bold cyan"))
|
|
157
|
-
|
|
117
|
+
|
|
158
118
|
# Test with a simple command
|
|
159
119
|
result = run_enhanced_command("echo 'Hello, Rich world!'", "Testing enhanced output")
|
|
160
120
|
console.print(f"Result: {result}")
|