machineconfig 1.96__py3-none-any.whl → 2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of machineconfig might be problematic. Click here for more details.

Files changed (164) hide show
  1. machineconfig/cluster/cloud_manager.py +22 -26
  2. machineconfig/cluster/data_transfer.py +2 -2
  3. machineconfig/cluster/distribute.py +0 -2
  4. machineconfig/cluster/file_manager.py +4 -4
  5. machineconfig/cluster/job_params.py +1 -1
  6. machineconfig/cluster/loader_runner.py +8 -8
  7. machineconfig/cluster/remote_machine.py +4 -4
  8. machineconfig/cluster/script_execution.py +2 -2
  9. machineconfig/cluster/sessions_managers/archive/create_zellij_template.py +1 -1
  10. machineconfig/cluster/sessions_managers/enhanced_command_runner.py +23 -23
  11. machineconfig/cluster/sessions_managers/wt_local.py +78 -76
  12. machineconfig/cluster/sessions_managers/wt_local_manager.py +91 -91
  13. machineconfig/cluster/sessions_managers/wt_remote.py +39 -39
  14. machineconfig/cluster/sessions_managers/wt_remote_manager.py +94 -91
  15. machineconfig/cluster/sessions_managers/wt_utils/layout_generator.py +56 -54
  16. machineconfig/cluster/sessions_managers/wt_utils/process_monitor.py +49 -49
  17. machineconfig/cluster/sessions_managers/wt_utils/remote_executor.py +18 -18
  18. machineconfig/cluster/sessions_managers/wt_utils/session_manager.py +42 -42
  19. machineconfig/cluster/sessions_managers/wt_utils/status_reporter.py +36 -36
  20. machineconfig/cluster/sessions_managers/zellij_local.py +43 -46
  21. machineconfig/cluster/sessions_managers/zellij_local_manager.py +139 -120
  22. machineconfig/cluster/sessions_managers/zellij_remote.py +35 -35
  23. machineconfig/cluster/sessions_managers/zellij_remote_manager.py +33 -33
  24. machineconfig/cluster/sessions_managers/zellij_utils/example_usage.py +15 -15
  25. machineconfig/cluster/sessions_managers/zellij_utils/layout_generator.py +25 -26
  26. machineconfig/cluster/sessions_managers/zellij_utils/process_monitor.py +49 -49
  27. machineconfig/cluster/sessions_managers/zellij_utils/remote_executor.py +5 -5
  28. machineconfig/cluster/sessions_managers/zellij_utils/session_manager.py +15 -15
  29. machineconfig/cluster/sessions_managers/zellij_utils/status_reporter.py +11 -11
  30. machineconfig/cluster/templates/utils.py +3 -3
  31. machineconfig/jobs/__pycache__/__init__.cpython-311.pyc +0 -0
  32. machineconfig/jobs/python/__pycache__/__init__.cpython-311.pyc +0 -0
  33. machineconfig/jobs/python/__pycache__/python_ve_symlink.cpython-311.pyc +0 -0
  34. machineconfig/jobs/python/check_installations.py +8 -9
  35. machineconfig/jobs/python/python_cargo_build_share.py +2 -2
  36. machineconfig/jobs/python/vscode/link_ve.py +7 -7
  37. machineconfig/jobs/python/vscode/select_interpreter.py +7 -7
  38. machineconfig/jobs/python/vscode/sync_code.py +5 -5
  39. machineconfig/jobs/python_custom_installers/archive/ngrok.py +2 -2
  40. machineconfig/jobs/python_custom_installers/dev/aider.py +3 -3
  41. machineconfig/jobs/python_custom_installers/dev/alacritty.py +3 -3
  42. machineconfig/jobs/python_custom_installers/dev/brave.py +3 -3
  43. machineconfig/jobs/python_custom_installers/dev/bypass_paywall.py +5 -5
  44. machineconfig/jobs/python_custom_installers/dev/code.py +3 -3
  45. machineconfig/jobs/python_custom_installers/dev/cursor.py +9 -9
  46. machineconfig/jobs/python_custom_installers/dev/docker_desktop.py +4 -4
  47. machineconfig/jobs/python_custom_installers/dev/espanso.py +4 -4
  48. machineconfig/jobs/python_custom_installers/dev/goes.py +4 -4
  49. machineconfig/jobs/python_custom_installers/dev/lvim.py +4 -4
  50. machineconfig/jobs/python_custom_installers/dev/nerdfont.py +3 -3
  51. machineconfig/jobs/python_custom_installers/dev/redis.py +3 -3
  52. machineconfig/jobs/python_custom_installers/dev/wezterm.py +3 -3
  53. machineconfig/jobs/python_custom_installers/dev/winget.py +27 -27
  54. machineconfig/jobs/python_custom_installers/docker.py +3 -3
  55. machineconfig/jobs/python_custom_installers/gh.py +7 -7
  56. machineconfig/jobs/python_custom_installers/hx.py +1 -1
  57. machineconfig/jobs/python_custom_installers/warp-cli.py +3 -3
  58. machineconfig/jobs/python_generic_installers/config.json +412 -389
  59. machineconfig/jobs/python_windows_installers/dev/config.json +1 -1
  60. machineconfig/logger.py +50 -0
  61. machineconfig/profile/__pycache__/__init__.cpython-311.pyc +0 -0
  62. machineconfig/profile/__pycache__/create.cpython-311.pyc +0 -0
  63. machineconfig/profile/__pycache__/shell.cpython-311.pyc +0 -0
  64. machineconfig/profile/create.py +23 -16
  65. machineconfig/profile/create_hardlinks.py +8 -8
  66. machineconfig/profile/shell.py +41 -37
  67. machineconfig/scripts/__pycache__/__init__.cpython-311.pyc +0 -0
  68. machineconfig/scripts/__pycache__/__init__.cpython-313.pyc +0 -0
  69. machineconfig/scripts/linux/devops +2 -2
  70. machineconfig/scripts/linux/fire +1 -0
  71. machineconfig/scripts/linux/fire_agents +0 -1
  72. machineconfig/scripts/linux/mcinit +27 -0
  73. machineconfig/scripts/python/__pycache__/__init__.cpython-311.pyc +0 -0
  74. machineconfig/scripts/python/__pycache__/__init__.cpython-313.pyc +0 -0
  75. machineconfig/scripts/python/__pycache__/croshell.cpython-311.pyc +0 -0
  76. machineconfig/scripts/python/__pycache__/devops.cpython-311.pyc +0 -0
  77. machineconfig/scripts/python/__pycache__/devops.cpython-313.pyc +0 -0
  78. machineconfig/scripts/python/__pycache__/devops_update_repos.cpython-311.pyc +0 -0
  79. machineconfig/scripts/python/__pycache__/devops_update_repos.cpython-313.pyc +0 -0
  80. machineconfig/scripts/python/__pycache__/fire_agents.cpython-311.pyc +0 -0
  81. machineconfig/scripts/python/__pycache__/fire_jobs.cpython-311.pyc +0 -0
  82. machineconfig/scripts/python/__pycache__/repos.cpython-311.pyc +0 -0
  83. machineconfig/scripts/python/ai/__pycache__/init.cpython-311.pyc +0 -0
  84. machineconfig/scripts/python/ai/__pycache__/mcinit.cpython-311.pyc +0 -0
  85. machineconfig/scripts/python/ai/chatmodes/Thinking-Beast-Mode.chatmode.md +337 -0
  86. machineconfig/scripts/python/ai/chatmodes/Ultimate-Transparent-Thinking-Beast-Mode.chatmode.md +644 -0
  87. machineconfig/scripts/python/ai/chatmodes/deepResearch.chatmode.md +81 -0
  88. machineconfig/scripts/python/ai/configs/.gemini/settings.json +81 -0
  89. machineconfig/scripts/python/ai/instructions/python/dev.instructions.md +45 -0
  90. machineconfig/scripts/python/ai/mcinit.py +103 -0
  91. machineconfig/scripts/python/ai/prompts/allLintersAndTypeCheckers.prompt.md +5 -0
  92. machineconfig/scripts/python/ai/prompts/research-report-skeleton.prompt.md +38 -0
  93. machineconfig/scripts/python/ai/scripts/lint_and_type_check.sh +47 -0
  94. machineconfig/scripts/python/archive/tmate_conn.py +5 -5
  95. machineconfig/scripts/python/archive/tmate_start.py +3 -3
  96. machineconfig/scripts/python/choose_wezterm_theme.py +2 -2
  97. machineconfig/scripts/python/cloud_copy.py +19 -18
  98. machineconfig/scripts/python/cloud_mount.py +9 -7
  99. machineconfig/scripts/python/cloud_repo_sync.py +11 -11
  100. machineconfig/scripts/python/cloud_sync.py +1 -1
  101. machineconfig/scripts/python/croshell.py +14 -14
  102. machineconfig/scripts/python/devops.py +6 -6
  103. machineconfig/scripts/python/devops_add_identity.py +8 -6
  104. machineconfig/scripts/python/devops_add_ssh_key.py +18 -18
  105. machineconfig/scripts/python/devops_backup_retrieve.py +13 -13
  106. machineconfig/scripts/python/devops_devapps_install.py +3 -3
  107. machineconfig/scripts/python/devops_update_repos.py +1 -1
  108. machineconfig/scripts/python/dotfile.py +2 -2
  109. machineconfig/scripts/python/fire_agents.py +183 -41
  110. machineconfig/scripts/python/fire_jobs.py +17 -11
  111. machineconfig/scripts/python/ftpx.py +2 -2
  112. machineconfig/scripts/python/gh_models.py +94 -94
  113. machineconfig/scripts/python/helpers/__pycache__/__init__.cpython-311.pyc +0 -0
  114. machineconfig/scripts/python/helpers/__pycache__/cloud_helpers.cpython-311.pyc +0 -0
  115. machineconfig/scripts/python/helpers/__pycache__/helpers2.cpython-311.pyc +0 -0
  116. machineconfig/scripts/python/helpers/__pycache__/helpers4.cpython-311.pyc +0 -0
  117. machineconfig/scripts/python/helpers/cloud_helpers.py +3 -3
  118. machineconfig/scripts/python/helpers/helpers2.py +1 -1
  119. machineconfig/scripts/python/helpers/helpers4.py +8 -6
  120. machineconfig/scripts/python/helpers/helpers5.py +7 -7
  121. machineconfig/scripts/python/helpers/repo_sync_helpers.py +1 -1
  122. machineconfig/scripts/python/mount_nfs.py +3 -2
  123. machineconfig/scripts/python/mount_nw_drive.py +4 -4
  124. machineconfig/scripts/python/mount_ssh.py +3 -2
  125. machineconfig/scripts/python/repos.py +8 -8
  126. machineconfig/scripts/python/scheduler.py +1 -1
  127. machineconfig/scripts/python/start_slidev.py +8 -7
  128. machineconfig/scripts/python/start_terminals.py +1 -1
  129. machineconfig/scripts/python/viewer.py +40 -40
  130. machineconfig/scripts/python/wifi_conn.py +65 -66
  131. machineconfig/scripts/python/wsl_windows_transfer.py +1 -1
  132. machineconfig/scripts/windows/mcinit.ps1 +4 -0
  133. machineconfig/settings/linters/.ruff.toml +2 -2
  134. machineconfig/settings/shells/ipy/profiles/default/startup/playext.py +71 -71
  135. machineconfig/settings/shells/wt/settings.json +8 -8
  136. machineconfig/setup_linux/web_shortcuts/tmp.sh +2 -0
  137. machineconfig/setup_windows/wt_and_pwsh/set_pwsh_theme.py +10 -7
  138. machineconfig/setup_windows/wt_and_pwsh/set_wt_settings.py +9 -7
  139. machineconfig/utils/ai/browser_user_wrapper.py +5 -5
  140. machineconfig/utils/ai/generate_file_checklist.py +11 -12
  141. machineconfig/utils/ai/url2md.py +1 -1
  142. machineconfig/utils/cloud/onedrive/setup_oauth.py +4 -4
  143. machineconfig/utils/cloud/onedrive/transaction.py +129 -129
  144. machineconfig/utils/code.py +13 -6
  145. machineconfig/utils/installer.py +51 -53
  146. machineconfig/utils/installer_utils/installer_abc.py +21 -10
  147. machineconfig/utils/installer_utils/installer_class.py +42 -16
  148. machineconfig/utils/io_save.py +3 -15
  149. machineconfig/utils/options.py +10 -3
  150. machineconfig/utils/path.py +5 -0
  151. machineconfig/utils/path_reduced.py +201 -149
  152. machineconfig/utils/procs.py +23 -23
  153. machineconfig/utils/scheduling.py +11 -12
  154. machineconfig/utils/ssh.py +270 -0
  155. machineconfig/utils/terminal.py +180 -0
  156. machineconfig/utils/utils.py +1 -2
  157. machineconfig/utils/utils2.py +43 -0
  158. machineconfig/utils/utils5.py +163 -34
  159. machineconfig/utils/ve.py +2 -2
  160. {machineconfig-1.96.dist-info → machineconfig-2.0.dist-info}/METADATA +13 -8
  161. {machineconfig-1.96.dist-info → machineconfig-2.0.dist-info}/RECORD +163 -144
  162. machineconfig/cluster/self_ssh.py +0 -57
  163. {machineconfig-1.96.dist-info → machineconfig-2.0.dist-info}/WHEEL +0 -0
  164. {machineconfig-1.96.dist-info → machineconfig-2.0.dist-info}/top_level.txt +0 -0
@@ -19,18 +19,18 @@
19
19
  # """Convert list of dictionaries to markdown table format."""
20
20
  # if not data:
21
21
  # return ""
22
-
22
+
23
23
  # # Get all unique keys from all dictionaries
24
24
  # all_keys = set()
25
25
  # for row in data:
26
26
  # all_keys.update(row.keys())
27
-
27
+
28
28
  # keys = sorted(all_keys)
29
-
29
+
30
30
  # # Create header
31
31
  # header = "|" + "|".join(f" {key} " for key in keys) + "|"
32
32
  # separator = "|" + "|".join(" --- " for _ in keys) + "|"
33
-
33
+
34
34
  # # Create rows
35
35
  # rows = []
36
36
  # for row in data:
@@ -44,7 +44,7 @@
44
44
  # value = str(value)
45
45
  # row_values.append(f" {value} ")
46
46
  # rows.append("|" + "|".join(row_values) + "|")
47
-
47
+
48
48
  # return "\n".join([header, separator] + rows)
49
49
 
50
50
 
@@ -100,7 +100,6 @@
100
100
  # @staticmethod
101
101
  # def prepare_servers_report(cloud_root: PathExtended) -> list[dict[str, Any]]:
102
102
  # from machineconfig.cluster.remote_machine import RemoteMachine
103
- # # Replace crocodile List usage with plain Python list
104
103
  # workers_root = [p for p in cloud_root.joinpath("workers").iterdir()]
105
104
  # res: dict[str, list[RemoteMachine]] = {}
106
105
  # times: dict[str, timedelta] = {}
@@ -109,7 +108,7 @@
109
108
  # file_mod_time = datetime.fromtimestamp(running_jobs.stat().st_mtime) if running_jobs.exists() else datetime.min
110
109
  # times[a_worker.name] = datetime.now() - file_mod_time
111
110
  # res[a_worker.name] = pickle.loads(running_jobs.read_bytes()) if running_jobs.exists() else []
112
-
111
+
113
112
  # # Create list of dictionaries instead of DataFrame
114
113
  # servers_report = []
115
114
  # for machine in res.keys():
@@ -127,7 +126,7 @@
127
126
  # alternative_base = self.fetch_cloud_live()
128
127
  # assert alternative_base is not None
129
128
  # lock_path = alternative_base.expanduser().joinpath("lock.txt")
130
- # if lock_path.exists(): lock_owner: str = lock_path.read_text()
129
+ # if lock_path.exists(): lock_owner: str = lock_path.read_text(encoding="utf-8")
131
130
  # else: lock_owner = "None"
132
131
  # self.console.print(f"🔒 Lock is held by: {lock_owner}")
133
132
  # self.console.print("🧾 Log File:")
@@ -139,7 +138,7 @@
139
138
  # for item_name, item_list in log.items():
140
139
  # self.console.rule(f"{item_name} Jobs (Latest {'10' if len(item_list) > 10 else len(item_list)} / {len(item_list)})")
141
140
  # print() # empty line after the rule helps keeping the rendering clean in the terminal while zooming in and out.
142
-
141
+
143
142
  # # Add duration calculation for non-queued items
144
143
  # display_items = []
145
144
  # for item in item_list:
@@ -162,13 +161,13 @@
162
161
  # if item_name == "running": excluded_cols.update({"submission_time", "source_machine", "end_time"})
163
162
  # if item_name == "completed": excluded_cols.update({"submission_time", "source_machine", "start_time", "pid"})
164
163
  # if item_name == "failed": excluded_cols.update({"submission_time", "source_machine", "start_time"})
165
-
164
+
166
165
  # # Filter items and take last 10
167
166
  # filtered_items = []
168
167
  # for item in display_items[-10:]:
169
168
  # filtered_item = {k: v for k, v in item.items() if k not in excluded_cols}
170
169
  # filtered_items.append(filtered_item)
171
-
170
+
172
171
  # if filtered_items:
173
172
  # pprint(format_table_markdown(filtered_items))
174
173
  # pprint("\n\n")
@@ -246,9 +245,6 @@
246
245
  # """This method involves manual selection but has all-files scope (failed and running) and can be used for both local and remote machines.
247
246
  # The reason it is not automated for remotes is because even though the server might have failed, the processes therein might be running, so there is no automated way to tell."""
248
247
  # log = self.read_log()
249
- # from machineconfig.cluster.remote_machine import RemoteMachine
250
- # from machineconfig.utils.utils import display_options
251
- # # Replace crocodile List usage with plain Python list comprehension
252
248
  # jobs_all: list[str] = [p.name for p in self.base_path.expanduser().joinpath("jobs").iterdir()]
253
249
  # jobs_selected = display_options(options=jobs_all, msg="Select Jobs to Redo", multi=True, fzf=True)
254
250
  # for a_job in jobs_selected:
@@ -263,13 +259,13 @@
263
259
  # break
264
260
  # if found_log_type:
265
261
  # break
266
-
262
+
267
263
  # if not found_log_type:
268
264
  # raise ValueError(f"Job `{a_job}` is not found in any of the log lists.")
269
-
265
+
270
266
  # if found_entry_data is None:
271
267
  # raise ValueError(f"Job `{a_job}` has no entry data.")
272
-
268
+
273
269
  # entry = LogEntry.from_dict(found_entry_data)
274
270
  # a_job_path = CloudManager.base_path.expanduser().joinpath(f"jobs/{entry.name}")
275
271
  # entry.note += f"| Job failed @ {entry.run_machine}"
@@ -308,14 +304,14 @@
308
304
  # elif status == "completed" or status == "failed":
309
305
  # job_name = a_rm.config.job_id
310
306
  # log = self.read_log()
311
-
307
+
312
308
  # # Find the entry in running jobs
313
309
  # entry_data = None
314
310
  # for job_data in log["running"]:
315
311
  # if job_data.get("name") == job_name:
316
312
  # entry_data = job_data
317
313
  # break
318
-
314
+
319
315
  # if entry_data:
320
316
  # entry = LogEntry.from_dict(entry_data)
321
317
  # entry.end_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
@@ -344,7 +340,7 @@
344
340
  # while len(self.running_jobs) < self.max_jobs:
345
341
  # if idx >= len(log["queued"]):
346
342
  # break # looked at all jobs in the queue
347
-
343
+
348
344
  # queue_entry = LogEntry.from_dict(log["queued"][idx])
349
345
  # a_job_path = CloudManager.base_path.expanduser().joinpath(f"jobs/{queue_entry.name}")
350
346
  # rm: RemoteMachine = pickle.loads(a_job_path.joinpath("data/remote_machine.Machine.pkl").read_bytes())
@@ -352,14 +348,14 @@
352
348
  # print(f"Job `{queue_entry.name}` is not allowed to run on this machine. Skipping ...")
353
349
  # idx += 1
354
350
  # continue # look at the next job in the queue.
355
-
351
+
356
352
  # pid, _process_cmd = rm.fire(run=True)
357
353
  # queue_entry.pid = pid
358
354
  # # queue_entry.cmd = process_cmd
359
355
  # queue_entry.run_machine = f"{getpass.getuser()}@{platform.node()}"
360
356
  # queue_entry.start_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
361
357
  # queue_entry.session_name = rm.job_params.session_name
362
-
358
+
363
359
  # # Remove from queued and add to running
364
360
  # log["queued"] = [job for job in log["queued"] if job.get("name") != queue_entry.name]
365
361
  # log["running"].append(queue_entry.__dict__)
@@ -374,7 +370,7 @@
374
370
  # base_path.mkdir(parents=True, exist_ok=True)
375
371
  # base_path.sync_to_cloud(cloud=self.cloud, rel2home=True, sync_up=True, verbose=True, transfers=100)
376
372
  # self.release_lock()
377
- # def reset_lock(self):
373
+ # def reset_lock(self):
378
374
  # base_path = CloudManager.base_path.expanduser()
379
375
  # base_path.mkdir(parents=True, exist_ok=True)
380
376
  # base_path.joinpath("lock.txt").write_text("").to_cloud(cloud=self.cloud, rel2home=True, verbose=False)
@@ -402,7 +398,7 @@
402
398
  # path.joinpath("lock.txt").write_text(this_machine).to_cloud(cloud=self.cloud, rel2home=True, verbose=False)
403
399
  # return self.claim_lock(first_call=False)
404
400
 
405
- # locking_machine = lock_path.read_text()
401
+ # locking_machine = lock_path.read_text(encoding="utf-8")
406
402
  # if locking_machine != "" and locking_machine != this_machine:
407
403
  # lock_mod_time = datetime.fromtimestamp(lock_path.stat().st_mtime)
408
404
  # if (datetime.now() - lock_mod_time).total_seconds() > 3600:
@@ -424,7 +420,7 @@
424
420
  # while counter < self.num_claim_checks:
425
421
  # lock_path_tmp = path.joinpath("lock.txt").from_cloud(cloud=self.cloud, rel2home=True, verbose=False)
426
422
  # assert lock_path_tmp is not None
427
- # lock_data_tmp = lock_path_tmp.read_text()
423
+ # lock_data_tmp = lock_path_tmp.read_text(encoding="utf-8")
428
424
  # if lock_data_tmp != this_machine:
429
425
  # print(f"CloudManager: Lock already claimed by `{lock_data_tmp}`. đŸ¤ˇâ€â™‚ī¸")
430
426
  # print(f"sleeping for {self.inter_check_interval_sec} seconds and trying again.")
@@ -450,7 +446,7 @@
450
446
  # path.joinpath("lock.txt").write_text("").to_cloud(cloud=self.cloud, rel2home=True, verbose=False)
451
447
  # self.lock_claimed = False
452
448
  # return NoReturn
453
- # data = lock_path.read_text()
449
+ # data = lock_path.read_text(encoding="utf-8")
454
450
  # this_machine = f"{getpass.getuser()}@{platform.node()}"
455
451
  # if data != this_machine:
456
452
  # raise ValueError(f"CloudManager: Lock already claimed by `{data}`. đŸ¤ˇâ€â™‚ī¸ Can't release a lock not owned! This shouldn't happen. Consider increasing trails before confirming the claim.")
@@ -24,7 +24,7 @@
24
24
  # rm.file_manager.cloud_download_py_script_path.expanduser().write_text(cloud_download_py_script, encoding="utf-8")
25
25
  # # modify and save shell_script to including running of cloud_download_py_script before job script.
26
26
  # shell_file = rm.file_manager.shell_script_path.expanduser()
27
- # shell_script = shell_file.read_text().replace("# EXTRA-PLACEHOLDER-POST", f"cd ~; python {rm.file_manager.cloud_download_py_script_path.rel2home().as_posix()}")
27
+ # shell_script = shell_file.read_text(encoding="utf-8").replace("# EXTRA-PLACEHOLDER-POST", f"cd ~; python {rm.file_manager.cloud_download_py_script_path.rel2home().as_posix()}")
28
28
  # download_url = rm.file_manager.job_root.zip().share_on_cloud()
29
29
  # target = rm.file_manager.job_root.rel2home().parent.joinpath(download_url.name).as_posix()
30
30
  # tmp = f"cd ~; curl -o '{target}' '{download_url.as_url_str()}'; unzip '{target}' -d {rm.file_manager.job_root.rel2home().parent.as_posix()}"
@@ -40,7 +40,7 @@
40
40
  # downloads = '\n'.join([f"cloud_copy {cloud}: '{a_path.collapseuser().as_posix()} -r" for a_path in rm.data])
41
41
  # if not rm.config.copy_repo: downloads += f"""\n cloud_copy {cloud}: {P(rm.job_params.repo_path_rh).collapseuser().as_posix()} -zer """
42
42
  # downloads += f"\ncloud_copy {cloud}: {rm.file_manager.job_root} -zr"
43
- # rm.file_manager.shell_script_path.expanduser().write_text(downloads + rm.file_manager.shell_script_path.expanduser().read_text(), encoding='utf-8') # newline={"Windows": None, "Linux": "\n"}[rm.ssh.get_remote_machine()]
43
+ # rm.file_manager.shell_script_path.expanduser().write_text(downloads + rm.file_manager.shell_script_path.expanduser().read_text(encoding="utf-8"), encoding='utf-8') # newline={"Windows": None, "Linux": "\n"}[rm.ssh.get_remote_machine()]
44
44
  # PathExtended(rm.file_manager.job_root).to_cloud(cloud=cloud, zip=True, rel2home=True)
45
45
 
46
46
  # @staticmethod
@@ -190,8 +190,6 @@
190
190
  # def viz_load_ratios(self) -> None:
191
191
  # if not self.workload_params: raise RuntimeError("func_kwargs_list is None. You need to run generate_standard_kwargs() first.")
192
192
  # import plottext
193
- # names = L(self.sshz).apply(lambda x: x.get_remote_repr(add_machine=True)).list
194
-
195
193
  # plt.simple_multiple_bar(names, [[machine_specs.cpu for machine_specs in self.machines_specs], [machine_specs.ram for machine_specs in self.machines_specs]], title="Resources per machine", labels=["#cpu threads", "memory size"])
196
194
  # plt.show()
197
195
  # print("")
@@ -68,16 +68,16 @@
68
68
  # # else:
69
69
  # if platform.system() == "Windows" and script_path.name.endswith(".sh"):
70
70
  # tmp = script_path.with_suffix(".ps1")
71
- # tmp.write_text(script_path.read_text(), encoding="utf-8", newline=None)
71
+ # tmp.write_text(script_path.read_text(encoding="utf-8"), encoding="utf-8", newline=None)
72
72
  # script_path = tmp
73
73
  # if platform.system() == "Linux" and script_path.name.endswith(".ps1"):
74
74
  # tmp = script_path.with_suffix(".sh")
75
- # tmp.write_text(script_path.read_text(), encoding="utf-8", newline='\n')
75
+ # tmp.write_text(script_path.read_text(encoding="utf-8"), encoding="utf-8", newline='\n')
76
76
  # script_path = tmp
77
77
  # return f". {script_path}"
78
78
  # def get_job_status(self, session_name: str, tab_name: str) -> JOB_STATUS:
79
79
  # pid_path = self.execution_log_dir.expanduser().joinpath("pid.txt")
80
- # tmp = self.execution_log_dir.expanduser().joinpath("status.txt").read_text()
80
+ # tmp = self.execution_log_dir.expanduser().joinpath("status.txt").read_text(encoding="utf-8")
81
81
  # status: JOB_STATUS = tmp # type: ignore
82
82
  # if status == "running":
83
83
  # if not pid_path.exists():
@@ -85,7 +85,7 @@
85
85
  # status = 'failed'
86
86
  # self.execution_log_dir.expanduser().joinpath("status.txt").write_text(status)
87
87
  # return status
88
- # pid: int = int(pid_path.read_text().rstrip())
88
+ # pid: int = int(pid_path.read_text(encoding="utf-8").rstrip())
89
89
  # import psutil
90
90
  # try: proc = psutil.Process(pid=pid)
91
91
  # except psutil.NoSuchProcess:
@@ -113,7 +113,7 @@
113
113
  # else: base = f"""
114
114
  # res = None # in case the file did not define it.
115
115
  # # --------------------------------- SCRIPT AS IS
116
- # {P(self.file_path_rh).expanduser().read_text()}
116
+ # {P(self.file_path_rh).expanduser().read_text(encoding="utf-8")}
117
117
  # # --------------------------------- END OF SCRIPT AS IS
118
118
  # """
119
119
 
@@ -111,15 +111,15 @@
111
111
  # @staticmethod
112
112
  # def from_dict(a_dict: dict[str, Any]):
113
113
  # return LogEntry(
114
- # name=a_dict["name"],
115
- # submission_time=str(a_dict["submission_time"]),
116
- # start_time=str(a_dict["start_time"]) if a_dict.get("start_time") else None,
114
+ # name=a_dict["name"],
115
+ # submission_time=str(a_dict["submission_time"]),
116
+ # start_time=str(a_dict["start_time"]) if a_dict.get("start_time") else None,
117
117
  # end_time=str(a_dict["end_time"]) if a_dict.get("end_time") else None,
118
- # run_machine=a_dict.get("run_machine"),
119
- # source_machine=a_dict.get("source_machine", ""),
120
- # note=a_dict.get("note", ""),
121
- # pid=a_dict.get("pid"),
122
- # cmd=a_dict.get("cmd"),
118
+ # run_machine=a_dict.get("run_machine"),
119
+ # source_machine=a_dict.get("source_machine", ""),
120
+ # note=a_dict.get("note", ""),
121
+ # pid=a_dict.get("pid"),
122
+ # cmd=a_dict.get("cmd"),
123
123
  # session_name=a_dict.get("session_name")
124
124
  # )
125
125
 
@@ -79,7 +79,7 @@
79
79
  # print(f"🧑‍đŸ’ģ Waiting for Python process to start and declare its pid @ `{pid_path}` as dictated in python script ... ")
80
80
  # time.sleep(3)
81
81
  # try:
82
- # pid = int(pid_path.read_text())
82
+ # pid = int(pid_path.read_text(encoding="utf-8"))
83
83
  # import psutil
84
84
  # process_command = " ".join(psutil.Process(pid).cmdline())
85
85
  # print(f"🎉 Python process started running @ {pid=} & {process_command=}")
@@ -133,7 +133,7 @@
133
133
  # email_script = PathExtended(cluster.__file__).parent.joinpath("script_notify_upon_completion.py").read_text(encoding="utf-8").replace("email_params = EmailParams.from_empty()", f"email_params = {email_params}").replace('manager = FileManager.from_pickle(params.file_manager_path)', '')
134
134
  # py_script = py_script.replace("# NOTIFICATION-CODE-PLACEHOLDER", email_script)
135
135
  # ve_path = PathExtended(self.job_params.repo_path_rh).expanduser().joinpath(".ve_path")
136
- # if ve_path.exists(): ve_name = PathExtended(ve_path.read_text()).expanduser().name
136
+ # if ve_path.exists(): ve_name = PathExtended(ve_path.read_text(encoding="utf-8")).expanduser().name
137
137
  # else:
138
138
  # import sys
139
139
  # ve_name = PathExtended(sys.executable).parent.parent.name
@@ -216,7 +216,7 @@
216
216
  # if not start_time_file.exists():
217
217
  # print(f"Job {self.config.job_id} is still in the queue. đŸ˜¯")
218
218
  # else:
219
- # start_time = start_time_file.read_text()
219
+ # start_time = start_time_file.read_text(encoding="utf-8")
220
220
  # txt = f"Machine {self.ssh.get_remote_repr(add_machine=True)} has not yet finished job `{self.config.job_id}`. 😟"
221
221
  # txt += f"\nIt started at {start_time}. 🕒, and is still running. đŸƒâ€â™‚ī¸"
222
222
  # try:
@@ -229,7 +229,7 @@
229
229
  # print("\n")
230
230
  # else:
231
231
  # results_folder_file = base.joinpath("results_folder_path.txt") # it could be one returned by function executed or one made up by the running context.
232
- # results_folder = results_folder_file.read_text()
232
+ # results_folder = results_folder_file.read_text(encoding="utf-8")
233
233
  # print("\n" * 2)
234
234
  # console.rule("Job Completed 🎉đŸĨŗđŸŽ†đŸĨ‚đŸžđŸŽŠđŸĒ…")
235
235
  # print(f"""Machine {self.ssh.get_remote_repr(add_machine=True)} has finished job `{self.config.job_id}`. 😁
@@ -63,9 +63,9 @@
63
63
  # ⏰ Time: {time_at_execution_start_local}
64
64
  # """, style="bold blue")
65
65
 
66
- # if isinstance(func_kwargs, dict):
66
+ # if isinstance(func_kwargs, dict):
67
67
  # pprint(func_kwargs, "📋 Function Arguments")
68
- # else:
68
+ # else:
69
69
  # pprint(func_kwargs, f"📋 Function Arguments from `{manager.kwargs_path.collapseuser().as_posix()}`")
70
70
 
71
71
  # print("\n" + "â€ĸ" * 60 + "\n")
@@ -44,7 +44,7 @@ def build_template(tabs: list[str]):
44
44
  res += suffix.replace("THISMACHINE", socket.gethostname())
45
45
  file = PathExtended.tmp().joinpath("tmp_files/templates/zellij_template.kdl")
46
46
  file.parent.mkdir(parents=True, exist_ok=True)
47
- file.write_text(res)
47
+ file.write_text(res, encoding="utf-8")
48
48
  res = f"zellij --layout {file}"
49
49
  return res
50
50
 
@@ -14,27 +14,27 @@ from rich import box
14
14
  console = Console()
15
15
 
16
16
  def run_enhanced_command(
17
- command: str,
17
+ command: str,
18
18
  description: Optional[str] = None,
19
19
  show_progress: bool = True,
20
20
  timeout: Optional[int] = None
21
21
  ) -> Dict[str, Any]:
22
22
  """
23
23
  Run a command with enhanced Rich formatting and user feedback.
24
-
24
+
25
25
  Args:
26
26
  command: The command to execute
27
27
  description: Optional description for progress display
28
28
  show_progress: Whether to show a progress spinner
29
29
  timeout: Optional timeout in seconds
30
-
30
+
31
31
  Returns:
32
32
  Dictionary with success status, output, and error information
33
33
  """
34
-
34
+
35
35
  if description is None:
36
36
  description = f"Executing: {command[:50]}..."
37
-
37
+
38
38
  try:
39
39
  if show_progress:
40
40
  with Progress(
@@ -44,7 +44,7 @@ def run_enhanced_command(
44
44
  transient=True
45
45
  ) as progress:
46
46
  task = progress.add_task(f"[cyan]{description}[/cyan]", total=None)
47
-
47
+
48
48
  result = subprocess.run(
49
49
  command,
50
50
  shell=True,
@@ -52,7 +52,7 @@ def run_enhanced_command(
52
52
  text=True,
53
53
  timeout=timeout
54
54
  )
55
-
55
+
56
56
  progress.update(task, completed=True)
57
57
  else:
58
58
  result = subprocess.run(
@@ -62,27 +62,27 @@ def run_enhanced_command(
62
62
  text=True,
63
63
  timeout=timeout
64
64
  )
65
-
65
+
66
66
  # Enhanced output processing
67
67
  stdout = result.stdout.strip() if result.stdout else ""
68
68
  stderr = result.stderr.strip() if result.stderr else ""
69
-
69
+
70
70
  # Process common Zellij messages with enhanced formatting
71
71
  if "Session:" in stdout and "successfully deleted" in stdout:
72
72
  session_match = re.search(r'Session: "([^"]+)" successfully deleted', stdout)
73
73
  if session_match:
74
74
  session_name = session_match.group(1)
75
75
  console.print(f"[bold red]đŸ—‘ī¸ Session[/bold red] [yellow]'{session_name}'[/yellow] [red]successfully deleted[/red]")
76
-
76
+
77
77
  if "zellij layout is running" in stdout:
78
- console.print(stdout.replace("zellij layout is running @",
78
+ console.print(stdout.replace("zellij layout is running @",
79
79
  "[bold green]🚀 Zellij layout is running[/bold green] [yellow]@[/yellow]"))
80
-
80
+
81
81
  # Handle pseudo-terminal warnings with less alarming appearance
82
82
  if "Pseudo-terminal will not be allocated" in stderr:
83
83
  console.print("[dim yellow]â„šī¸ Note: Running in non-interactive mode[/dim yellow]")
84
84
  stderr = stderr.replace("Pseudo-terminal will not be allocated because stdin is not a terminal.\n", "")
85
-
85
+
86
86
  if result.returncode == 0:
87
87
  if stdout and not any(msg in stdout for msg in ["Session:", "zellij layout is running"]):
88
88
  console.print(f"[green]{stdout}[/green]")
@@ -101,7 +101,7 @@ def run_enhanced_command(
101
101
  "stdout": stdout,
102
102
  "stderr": stderr
103
103
  }
104
-
104
+
105
105
  except subprocess.TimeoutExpired:
106
106
  console.print(f"[bold red]⏰ Command timed out after {timeout} seconds[/bold red]")
107
107
  return {
@@ -121,18 +121,18 @@ def enhanced_zellij_session_start(session_name: str, layout_path: str) -> Dict[s
121
121
  Start a Zellij session with enhanced visual feedback.
122
122
  """
123
123
  console.print()
124
- console.print(Panel.fit(f"🚀 Starting Zellij Session: [bold cyan]{session_name}[/bold cyan]",
124
+ console.print(Panel.fit(f"🚀 Starting Zellij Session: [bold cyan]{session_name}[/bold cyan]",
125
125
  style="green", box=box.ROUNDED))
126
-
126
+
127
127
  # Delete existing session first (suppress normal output)
128
128
  delete_cmd = f"zellij delete-session --force {session_name}"
129
129
  run_enhanced_command(
130
- delete_cmd,
130
+ delete_cmd,
131
131
  f"Cleaning up existing session '{session_name}'",
132
132
  show_progress=False,
133
133
  timeout=5 # Quick timeout for cleanup
134
134
  )
135
-
135
+
136
136
  # Start new session (use -b for background to avoid hanging)
137
137
  start_cmd = f"zellij --layout {layout_path} a -b {session_name}"
138
138
  start_result = run_enhanced_command(
@@ -141,20 +141,20 @@ def enhanced_zellij_session_start(session_name: str, layout_path: str) -> Dict[s
141
141
  show_progress=False,
142
142
  timeout=10 # Add timeout to prevent hanging
143
143
  )
144
-
144
+
145
145
  if start_result["success"]:
146
- console.print(Panel(f"[bold green]✅ Session '{session_name}' is now running![/bold green]\n[dim]Layout: {layout_path}[/dim]",
146
+ console.print(Panel(f"[bold green]✅ Session '{session_name}' is now running![/bold green]\n[dim]Layout: {layout_path}[/dim]",
147
147
  style="green", title="🎉 Success"))
148
148
  else:
149
- console.print(Panel(f"[bold red]❌ Failed to start session '{session_name}'[/bold red]\n[red]{start_result.get('stderr', 'Unknown error')}[/red]",
149
+ console.print(Panel(f"[bold red]❌ Failed to start session '{session_name}'[/bold red]\n[red]{start_result.get('stderr', 'Unknown error')}[/red]",
150
150
  style="red", title="đŸ’Ĩ Error"))
151
-
151
+
152
152
  return start_result
153
153
 
154
154
  if __name__ == "__main__":
155
155
  # Demo the enhanced command execution
156
156
  console.print(Panel.fit("🎨 Enhanced Command Execution Demo", style="bold cyan"))
157
-
157
+
158
158
  # Test with a simple command
159
159
  result = run_enhanced_command("echo 'Hello, Rich world!'", "Testing enhanced output")
160
160
  console.print(f"Result: {result}")