machineconfig 1.95__py3-none-any.whl → 1.97__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of machineconfig might be problematic. Click here for more details.

Files changed (504) hide show
  1. machineconfig/cluster/cloud_manager.py +445 -343
  2. machineconfig/cluster/data_transfer.py +63 -57
  3. machineconfig/cluster/distribute.py +284 -280
  4. machineconfig/cluster/file_manager.py +234 -237
  5. machineconfig/cluster/job_params.py +133 -133
  6. machineconfig/cluster/loader_runner.py +183 -149
  7. machineconfig/cluster/remote_machine.py +269 -252
  8. machineconfig/cluster/script_execution.py +215 -209
  9. machineconfig/cluster/script_notify_upon_completion.py +50 -43
  10. machineconfig/cluster/self_ssh.py +52 -54
  11. machineconfig/cluster/sessions_managers/__init__.py +0 -0
  12. machineconfig/cluster/sessions_managers/archive/__init__.py +0 -0
  13. machineconfig/{jobs/python → cluster/sessions_managers/archive}/create_zellij_template.py +5 -3
  14. machineconfig/cluster/sessions_managers/archive/session_managers.py +184 -0
  15. machineconfig/cluster/sessions_managers/demo_rich_zellij.py +0 -0
  16. machineconfig/cluster/sessions_managers/enhanced_command_runner.py +160 -0
  17. machineconfig/cluster/sessions_managers/wt_local.py +494 -0
  18. machineconfig/cluster/sessions_managers/wt_local_manager.py +577 -0
  19. machineconfig/cluster/sessions_managers/wt_remote.py +288 -0
  20. machineconfig/cluster/sessions_managers/wt_remote_manager.py +483 -0
  21. machineconfig/cluster/sessions_managers/wt_utils/layout_generator.py +196 -0
  22. machineconfig/cluster/sessions_managers/wt_utils/process_monitor.py +418 -0
  23. machineconfig/cluster/sessions_managers/wt_utils/remote_executor.py +175 -0
  24. machineconfig/cluster/sessions_managers/wt_utils/session_manager.py +300 -0
  25. machineconfig/cluster/sessions_managers/wt_utils/status_reporter.py +228 -0
  26. machineconfig/cluster/sessions_managers/zellij_local.py +418 -0
  27. machineconfig/cluster/sessions_managers/zellij_local_manager.py +533 -0
  28. machineconfig/cluster/sessions_managers/zellij_remote.py +229 -0
  29. machineconfig/cluster/sessions_managers/zellij_remote_manager.py +188 -0
  30. machineconfig/cluster/sessions_managers/zellij_utils/example_usage.py +64 -0
  31. machineconfig/cluster/sessions_managers/zellij_utils/layout_generator.py +126 -0
  32. machineconfig/cluster/sessions_managers/zellij_utils/process_monitor.py +334 -0
  33. machineconfig/cluster/sessions_managers/zellij_utils/remote_executor.py +68 -0
  34. machineconfig/cluster/sessions_managers/zellij_utils/session_manager.py +119 -0
  35. machineconfig/cluster/sessions_managers/zellij_utils/status_reporter.py +85 -0
  36. machineconfig/cluster/templates/cli_click.py +0 -1
  37. machineconfig/cluster/templates/cli_gooey.py +102 -104
  38. machineconfig/cluster/templates/run_cloud.py +51 -51
  39. machineconfig/cluster/templates/run_cluster.py +103 -59
  40. machineconfig/cluster/templates/run_remote.py +57 -58
  41. machineconfig/cluster/templates/utils.py +69 -36
  42. machineconfig/jobs/__pycache__/__init__.cpython-311.pyc +0 -0
  43. machineconfig/jobs/python/__pycache__/__init__.cpython-311.pyc +0 -0
  44. machineconfig/jobs/python/check_installations.py +258 -190
  45. machineconfig/jobs/python/create_bootable_media.py +7 -3
  46. machineconfig/jobs/python/python_cargo_build_share.py +50 -50
  47. machineconfig/jobs/python/python_ve_symlink.py +6 -6
  48. machineconfig/jobs/python/vscode/__pycache__/select_interpreter.cpython-311.pyc +0 -0
  49. machineconfig/jobs/python/vscode/api.py +1 -1
  50. machineconfig/jobs/python/vscode/link_ve.py +2 -2
  51. machineconfig/jobs/python/vscode/select_interpreter.py +9 -5
  52. machineconfig/jobs/python/vscode/sync_code.py +8 -5
  53. machineconfig/jobs/python_custom_installers/__pycache__/__init__.cpython-311.pyc +0 -0
  54. machineconfig/jobs/python_custom_installers/archive/ngrok.py +1 -1
  55. machineconfig/jobs/python_custom_installers/dev/alacritty.py +3 -2
  56. machineconfig/jobs/python_custom_installers/dev/brave.py +7 -3
  57. machineconfig/jobs/python_custom_installers/dev/bypass_paywall.py +3 -4
  58. machineconfig/jobs/python_custom_installers/dev/code.py +3 -1
  59. machineconfig/jobs/python_custom_installers/dev/cursor.py +66 -5
  60. machineconfig/jobs/python_custom_installers/dev/docker_desktop.py +0 -1
  61. machineconfig/jobs/python_custom_installers/dev/espanso.py +13 -9
  62. machineconfig/jobs/python_custom_installers/dev/goes.py +2 -8
  63. machineconfig/jobs/python_custom_installers/dev/lvim.py +3 -2
  64. machineconfig/jobs/python_custom_installers/dev/nerdfont.py +1 -1
  65. machineconfig/jobs/python_custom_installers/dev/redis.py +7 -3
  66. machineconfig/jobs/python_custom_installers/dev/wezterm.py +8 -4
  67. machineconfig/jobs/python_custom_installers/dev/winget.py +194 -0
  68. machineconfig/jobs/python_custom_installers/{dev/docker.py → docker.py} +8 -3
  69. machineconfig/jobs/python_custom_installers/gh.py +4 -3
  70. machineconfig/jobs/python_custom_installers/hx.py +9 -8
  71. machineconfig/jobs/python_custom_installers/scripts/linux/vscode.sh +97 -30
  72. machineconfig/jobs/python_custom_installers/{dev/warp-cli.py → warp-cli.py} +1 -1
  73. machineconfig/jobs/python_generic_installers/__pycache__/__init__.cpython-311.pyc +0 -0
  74. machineconfig/jobs/python_generic_installers/config.json +133 -9
  75. machineconfig/jobs/python_generic_installers/dev/config.json +208 -37
  76. machineconfig/jobs/python_generic_installers/update.py +3 -0
  77. machineconfig/jobs/python_linux_installers/__pycache__/__init__.cpython-311.pyc +0 -0
  78. machineconfig/jobs/python_linux_installers/config.json +42 -6
  79. machineconfig/jobs/python_linux_installers/dev/config.json +79 -11
  80. machineconfig/jobs/python_windows_installers/config.json +6 -0
  81. machineconfig/profile/__pycache__/__init__.cpython-311.pyc +0 -0
  82. machineconfig/profile/__pycache__/create.cpython-311.pyc +0 -0
  83. machineconfig/profile/__pycache__/shell.cpython-311.pyc +0 -0
  84. machineconfig/profile/create.py +5 -5
  85. machineconfig/profile/create_hardlinks.py +5 -5
  86. machineconfig/profile/shell.py +44 -17
  87. machineconfig/scripts/__pycache__/__init__.cpython-311.pyc +0 -0
  88. machineconfig/scripts/__pycache__/__init__.cpython-313.pyc +0 -0
  89. machineconfig/scripts/linux/fire_agents +27 -0
  90. machineconfig/scripts/linux/mcinit +27 -0
  91. machineconfig/scripts/linux/wifi_conn +24 -0
  92. machineconfig/scripts/python/__pycache__/__init__.cpython-311.pyc +0 -0
  93. machineconfig/scripts/python/__pycache__/__init__.cpython-313.pyc +0 -0
  94. machineconfig/scripts/python/__pycache__/cloud_copy.cpython-311.pyc +0 -0
  95. machineconfig/scripts/python/__pycache__/cloud_mount.cpython-311.pyc +0 -0
  96. machineconfig/scripts/python/__pycache__/cloud_sync.cpython-311.pyc +0 -0
  97. machineconfig/scripts/python/__pycache__/croshell.cpython-311.pyc +0 -0
  98. machineconfig/scripts/python/__pycache__/devops.cpython-311.pyc +0 -0
  99. machineconfig/scripts/python/__pycache__/devops_backup_retrieve.cpython-311.pyc +0 -0
  100. machineconfig/scripts/python/__pycache__/devops_devapps_install.cpython-311.pyc +0 -0
  101. machineconfig/scripts/python/__pycache__/devops_update_repos.cpython-311.pyc +0 -0
  102. machineconfig/scripts/python/__pycache__/fire_agents.cpython-311.pyc +0 -0
  103. machineconfig/scripts/python/__pycache__/fire_jobs.cpython-311.pyc +0 -0
  104. machineconfig/scripts/python/__pycache__/fire_jobs.cpython-313.pyc +0 -0
  105. machineconfig/scripts/python/__pycache__/get_zellij_cmd.cpython-311.pyc +0 -0
  106. machineconfig/scripts/python/__pycache__/repos.cpython-311.pyc +0 -0
  107. machineconfig/scripts/python/ai/__pycache__/init.cpython-311.pyc +0 -0
  108. machineconfig/scripts/python/ai/init.py +56 -0
  109. machineconfig/scripts/python/ai/rules/python/dev.md +31 -0
  110. machineconfig/scripts/python/archive/im2text.py +1 -3
  111. machineconfig/scripts/python/choose_wezterm_theme.py +3 -3
  112. machineconfig/scripts/python/cloud_copy.py +10 -10
  113. machineconfig/scripts/python/cloud_manager.py +77 -99
  114. machineconfig/scripts/python/cloud_mount.py +13 -12
  115. machineconfig/scripts/python/cloud_repo_sync.py +14 -11
  116. machineconfig/scripts/python/croshell.py +24 -21
  117. machineconfig/scripts/python/devops.py +12 -17
  118. machineconfig/scripts/python/devops_add_identity.py +32 -10
  119. machineconfig/scripts/python/devops_add_ssh_key.py +10 -10
  120. machineconfig/scripts/python/devops_backup_retrieve.py +9 -8
  121. machineconfig/scripts/python/devops_devapps_install.py +6 -6
  122. machineconfig/scripts/python/devops_update_repos.py +4 -3
  123. machineconfig/scripts/python/dotfile.py +10 -7
  124. machineconfig/scripts/python/fire_agents.py +69 -0
  125. machineconfig/scripts/python/fire_jobs.py +62 -65
  126. machineconfig/scripts/python/ftpx.py +8 -8
  127. machineconfig/scripts/python/get_zellij_cmd.py +3 -3
  128. machineconfig/scripts/python/gh_models.py +6 -4
  129. machineconfig/scripts/python/helpers/__pycache__/__init__.cpython-311.pyc +0 -0
  130. machineconfig/scripts/python/helpers/__pycache__/__init__.cpython-313.pyc +0 -0
  131. machineconfig/scripts/python/helpers/__pycache__/cloud_helpers.cpython-311.pyc +0 -0
  132. machineconfig/scripts/python/helpers/__pycache__/helpers2.cpython-311.pyc +0 -0
  133. machineconfig/scripts/python/helpers/__pycache__/helpers4.cpython-311.pyc +0 -0
  134. machineconfig/scripts/python/helpers/__pycache__/helpers4.cpython-313.pyc +0 -0
  135. machineconfig/scripts/python/helpers/__pycache__/repo_sync_helpers.cpython-311.pyc +0 -0
  136. machineconfig/scripts/python/helpers/cloud_helpers.py +12 -12
  137. machineconfig/scripts/python/helpers/helpers2.py +9 -8
  138. machineconfig/scripts/python/helpers/helpers4.py +23 -35
  139. machineconfig/scripts/python/helpers/repo_sync_helpers.py +17 -16
  140. machineconfig/scripts/python/mount_nfs.py +8 -11
  141. machineconfig/scripts/python/mount_nw_drive.py +4 -4
  142. machineconfig/scripts/python/mount_ssh.py +2 -2
  143. machineconfig/scripts/python/onetimeshare.py +56 -57
  144. machineconfig/scripts/python/pomodoro.py +55 -55
  145. machineconfig/scripts/python/repos.py +26 -18
  146. machineconfig/scripts/python/scheduler.py +70 -53
  147. machineconfig/scripts/python/snapshot.py +21 -24
  148. machineconfig/scripts/python/start_slidev.py +6 -5
  149. machineconfig/scripts/python/start_terminals.py +3 -1
  150. machineconfig/scripts/python/viewer.py +5 -4
  151. machineconfig/scripts/python/viewer_template.py +138 -140
  152. machineconfig/scripts/python/wifi_conn.py +412 -60
  153. machineconfig/scripts/python/wsl_windows_transfer.py +18 -3
  154. machineconfig/scripts/windows/mcinit.ps1 +4 -0
  155. machineconfig/settings/linters/.pylintrc +6 -7
  156. machineconfig/settings/lvim/windows/config.lua +0 -0
  157. machineconfig/settings/shells/bash/init.sh +6 -0
  158. machineconfig/settings/shells/ipy/profiles/default/startup/playext.py +7 -6
  159. machineconfig/settings/shells/pwsh/init.ps1 +6 -6
  160. machineconfig/settings/shells/wt/settings.json +51 -266
  161. machineconfig/setup_linux/web_shortcuts/interactive.sh +5 -2
  162. machineconfig/setup_windows/wt_and_pwsh/set_pwsh_theme.py +3 -6
  163. machineconfig/setup_windows/wt_and_pwsh/set_wt_settings.py +11 -9
  164. machineconfig/utils/ai/url2md.py +2 -2
  165. machineconfig/utils/cloud/onedrive/setup_oauth.py +59 -0
  166. machineconfig/utils/cloud/onedrive/transaction.py +796 -0
  167. machineconfig/utils/code.py +22 -13
  168. machineconfig/utils/installer.py +78 -35
  169. machineconfig/utils/installer_utils/installer_abc.py +7 -6
  170. machineconfig/utils/installer_utils/installer_class.py +44 -25
  171. machineconfig/utils/io_save.py +107 -0
  172. machineconfig/utils/links.py +19 -15
  173. machineconfig/utils/options.py +4 -8
  174. machineconfig/utils/path.py +91 -78
  175. machineconfig/utils/path_reduced.py +608 -0
  176. machineconfig/utils/procs.py +110 -45
  177. machineconfig/utils/scheduling.py +312 -222
  178. machineconfig/utils/utils.py +7 -7
  179. machineconfig/utils/utils2.py +42 -0
  180. machineconfig/utils/utils5.py +84 -0
  181. machineconfig/utils/ve.py +49 -87
  182. {machineconfig-1.95.dist-info → machineconfig-1.97.dist-info}/METADATA +2 -2
  183. machineconfig-1.97.dist-info/RECORD +442 -0
  184. machineconfig/cluster/session_managers.py +0 -183
  185. machineconfig/cluster/templates/f.py +0 -4
  186. machineconfig/jobs/python/__pycache__/check_installations.cpython-311.pyc +0 -0
  187. machineconfig/jobs/python/__pycache__/checkout_version.cpython-311.pyc +0 -0
  188. machineconfig/jobs/python/__pycache__/python_ve_symlink.cpython-311.pyc +0 -0
  189. machineconfig/jobs/python/checkout_version.py +0 -123
  190. machineconfig/jobs/python/vscode/__pycache__/api.cpython-311.pyc +0 -0
  191. machineconfig/jobs/python/vscode/__pycache__/link_ve.cpython-311.pyc +0 -0
  192. machineconfig/jobs/python_custom_installers/__pycache__/hx.cpython-311.pyc +0 -0
  193. machineconfig/jobs/python_windows_installers/__pycache__/__init__.cpython-311.pyc +0 -0
  194. machineconfig/scripts/python/.mypy_cache/.gitignore +0 -2
  195. machineconfig/scripts/python/.mypy_cache/3.11/@plugins_snapshot.json +0 -1
  196. machineconfig/scripts/python/.mypy_cache/3.11/__future__.data.json +0 -1
  197. machineconfig/scripts/python/.mypy_cache/3.11/__future__.meta.json +0 -1
  198. machineconfig/scripts/python/.mypy_cache/3.11/_ast.data.json +0 -1
  199. machineconfig/scripts/python/.mypy_cache/3.11/_ast.meta.json +0 -1
  200. machineconfig/scripts/python/.mypy_cache/3.11/_bz2.data.json +0 -1
  201. machineconfig/scripts/python/.mypy_cache/3.11/_bz2.meta.json +0 -1
  202. machineconfig/scripts/python/.mypy_cache/3.11/_codecs.data.json +0 -1
  203. machineconfig/scripts/python/.mypy_cache/3.11/_codecs.meta.json +0 -1
  204. machineconfig/scripts/python/.mypy_cache/3.11/_collections_abc.data.json +0 -1
  205. machineconfig/scripts/python/.mypy_cache/3.11/_collections_abc.meta.json +0 -1
  206. machineconfig/scripts/python/.mypy_cache/3.11/_compression.data.json +0 -1
  207. machineconfig/scripts/python/.mypy_cache/3.11/_compression.meta.json +0 -1
  208. machineconfig/scripts/python/.mypy_cache/3.11/_decimal.data.json +0 -1
  209. machineconfig/scripts/python/.mypy_cache/3.11/_decimal.meta.json +0 -1
  210. machineconfig/scripts/python/.mypy_cache/3.11/_frozen_importlib.data.json +0 -1
  211. machineconfig/scripts/python/.mypy_cache/3.11/_frozen_importlib.meta.json +0 -1
  212. machineconfig/scripts/python/.mypy_cache/3.11/_frozen_importlib_external.data.json +0 -1
  213. machineconfig/scripts/python/.mypy_cache/3.11/_frozen_importlib_external.meta.json +0 -1
  214. machineconfig/scripts/python/.mypy_cache/3.11/_io.data.json +0 -1
  215. machineconfig/scripts/python/.mypy_cache/3.11/_io.meta.json +0 -1
  216. machineconfig/scripts/python/.mypy_cache/3.11/_locale.data.json +0 -1
  217. machineconfig/scripts/python/.mypy_cache/3.11/_locale.meta.json +0 -1
  218. machineconfig/scripts/python/.mypy_cache/3.11/_stat.data.json +0 -1
  219. machineconfig/scripts/python/.mypy_cache/3.11/_stat.meta.json +0 -1
  220. machineconfig/scripts/python/.mypy_cache/3.11/_struct.data.json +0 -1
  221. machineconfig/scripts/python/.mypy_cache/3.11/_struct.meta.json +0 -1
  222. machineconfig/scripts/python/.mypy_cache/3.11/_thread.data.json +0 -1
  223. machineconfig/scripts/python/.mypy_cache/3.11/_thread.meta.json +0 -1
  224. machineconfig/scripts/python/.mypy_cache/3.11/_typeshed/__init__.data.json +0 -1
  225. machineconfig/scripts/python/.mypy_cache/3.11/_typeshed/__init__.meta.json +0 -1
  226. machineconfig/scripts/python/.mypy_cache/3.11/_typeshed/importlib.data.json +0 -1
  227. machineconfig/scripts/python/.mypy_cache/3.11/_typeshed/importlib.meta.json +0 -1
  228. machineconfig/scripts/python/.mypy_cache/3.11/_warnings.data.json +0 -1
  229. machineconfig/scripts/python/.mypy_cache/3.11/_warnings.meta.json +0 -1
  230. machineconfig/scripts/python/.mypy_cache/3.11/_weakref.data.json +0 -1
  231. machineconfig/scripts/python/.mypy_cache/3.11/_weakref.meta.json +0 -1
  232. machineconfig/scripts/python/.mypy_cache/3.11/_weakrefset.data.json +0 -1
  233. machineconfig/scripts/python/.mypy_cache/3.11/_weakrefset.meta.json +0 -1
  234. machineconfig/scripts/python/.mypy_cache/3.11/abc.data.json +0 -1
  235. machineconfig/scripts/python/.mypy_cache/3.11/abc.meta.json +0 -1
  236. machineconfig/scripts/python/.mypy_cache/3.11/argparse.data.json +0 -1
  237. machineconfig/scripts/python/.mypy_cache/3.11/argparse.meta.json +0 -1
  238. machineconfig/scripts/python/.mypy_cache/3.11/ast.data.json +0 -1
  239. machineconfig/scripts/python/.mypy_cache/3.11/ast.meta.json +0 -1
  240. machineconfig/scripts/python/.mypy_cache/3.11/binascii.data.json +0 -1
  241. machineconfig/scripts/python/.mypy_cache/3.11/binascii.meta.json +0 -1
  242. machineconfig/scripts/python/.mypy_cache/3.11/builtins.data.json +0 -1
  243. machineconfig/scripts/python/.mypy_cache/3.11/builtins.meta.json +0 -1
  244. machineconfig/scripts/python/.mypy_cache/3.11/bz2.data.json +0 -1
  245. machineconfig/scripts/python/.mypy_cache/3.11/bz2.meta.json +0 -1
  246. machineconfig/scripts/python/.mypy_cache/3.11/calendar.data.json +0 -1
  247. machineconfig/scripts/python/.mypy_cache/3.11/calendar.meta.json +0 -1
  248. machineconfig/scripts/python/.mypy_cache/3.11/codecs.data.json +0 -1
  249. machineconfig/scripts/python/.mypy_cache/3.11/codecs.meta.json +0 -1
  250. machineconfig/scripts/python/.mypy_cache/3.11/collections/__init__.data.json +0 -1
  251. machineconfig/scripts/python/.mypy_cache/3.11/collections/__init__.meta.json +0 -1
  252. machineconfig/scripts/python/.mypy_cache/3.11/collections/abc.data.json +0 -1
  253. machineconfig/scripts/python/.mypy_cache/3.11/collections/abc.meta.json +0 -1
  254. machineconfig/scripts/python/.mypy_cache/3.11/configparser.data.json +0 -1
  255. machineconfig/scripts/python/.mypy_cache/3.11/configparser.meta.json +0 -1
  256. machineconfig/scripts/python/.mypy_cache/3.11/contextlib.data.json +0 -1
  257. machineconfig/scripts/python/.mypy_cache/3.11/contextlib.meta.json +0 -1
  258. machineconfig/scripts/python/.mypy_cache/3.11/dataclasses.data.json +0 -1
  259. machineconfig/scripts/python/.mypy_cache/3.11/dataclasses.meta.json +0 -1
  260. machineconfig/scripts/python/.mypy_cache/3.11/datetime.data.json +0 -1
  261. machineconfig/scripts/python/.mypy_cache/3.11/datetime.meta.json +0 -1
  262. machineconfig/scripts/python/.mypy_cache/3.11/decimal.data.json +0 -1
  263. machineconfig/scripts/python/.mypy_cache/3.11/decimal.meta.json +0 -1
  264. machineconfig/scripts/python/.mypy_cache/3.11/dis.data.json +0 -1
  265. machineconfig/scripts/python/.mypy_cache/3.11/dis.meta.json +0 -1
  266. machineconfig/scripts/python/.mypy_cache/3.11/email/__init__.data.json +0 -1
  267. machineconfig/scripts/python/.mypy_cache/3.11/email/__init__.meta.json +0 -1
  268. machineconfig/scripts/python/.mypy_cache/3.11/email/_policybase.data.json +0 -1
  269. machineconfig/scripts/python/.mypy_cache/3.11/email/_policybase.meta.json +0 -1
  270. machineconfig/scripts/python/.mypy_cache/3.11/email/charset.data.json +0 -1
  271. machineconfig/scripts/python/.mypy_cache/3.11/email/charset.meta.json +0 -1
  272. machineconfig/scripts/python/.mypy_cache/3.11/email/contentmanager.data.json +0 -1
  273. machineconfig/scripts/python/.mypy_cache/3.11/email/contentmanager.meta.json +0 -1
  274. machineconfig/scripts/python/.mypy_cache/3.11/email/errors.data.json +0 -1
  275. machineconfig/scripts/python/.mypy_cache/3.11/email/errors.meta.json +0 -1
  276. machineconfig/scripts/python/.mypy_cache/3.11/email/header.data.json +0 -1
  277. machineconfig/scripts/python/.mypy_cache/3.11/email/header.meta.json +0 -1
  278. machineconfig/scripts/python/.mypy_cache/3.11/email/message.data.json +0 -1
  279. machineconfig/scripts/python/.mypy_cache/3.11/email/message.meta.json +0 -1
  280. machineconfig/scripts/python/.mypy_cache/3.11/email/policy.data.json +0 -1
  281. machineconfig/scripts/python/.mypy_cache/3.11/email/policy.meta.json +0 -1
  282. machineconfig/scripts/python/.mypy_cache/3.11/enum.data.json +0 -1
  283. machineconfig/scripts/python/.mypy_cache/3.11/enum.meta.json +0 -1
  284. machineconfig/scripts/python/.mypy_cache/3.11/fnmatch.data.json +0 -1
  285. machineconfig/scripts/python/.mypy_cache/3.11/fnmatch.meta.json +0 -1
  286. machineconfig/scripts/python/.mypy_cache/3.11/functools.data.json +0 -1
  287. machineconfig/scripts/python/.mypy_cache/3.11/functools.meta.json +0 -1
  288. machineconfig/scripts/python/.mypy_cache/3.11/gc.data.json +0 -1
  289. machineconfig/scripts/python/.mypy_cache/3.11/gc.meta.json +0 -1
  290. machineconfig/scripts/python/.mypy_cache/3.11/genericpath.data.json +0 -1
  291. machineconfig/scripts/python/.mypy_cache/3.11/genericpath.meta.json +0 -1
  292. machineconfig/scripts/python/.mypy_cache/3.11/getpass.data.json +0 -1
  293. machineconfig/scripts/python/.mypy_cache/3.11/getpass.meta.json +0 -1
  294. machineconfig/scripts/python/.mypy_cache/3.11/git/__init__.data.json +0 -1
  295. machineconfig/scripts/python/.mypy_cache/3.11/git/__init__.meta.json +0 -1
  296. machineconfig/scripts/python/.mypy_cache/3.11/git/cmd.data.json +0 -1
  297. machineconfig/scripts/python/.mypy_cache/3.11/git/cmd.meta.json +0 -1
  298. machineconfig/scripts/python/.mypy_cache/3.11/git/compat.data.json +0 -1
  299. machineconfig/scripts/python/.mypy_cache/3.11/git/compat.meta.json +0 -1
  300. machineconfig/scripts/python/.mypy_cache/3.11/git/config.data.json +0 -1
  301. machineconfig/scripts/python/.mypy_cache/3.11/git/config.meta.json +0 -1
  302. machineconfig/scripts/python/.mypy_cache/3.11/git/db.data.json +0 -1
  303. machineconfig/scripts/python/.mypy_cache/3.11/git/db.meta.json +0 -1
  304. machineconfig/scripts/python/.mypy_cache/3.11/git/diff.data.json +0 -1
  305. machineconfig/scripts/python/.mypy_cache/3.11/git/diff.meta.json +0 -1
  306. machineconfig/scripts/python/.mypy_cache/3.11/git/exc.data.json +0 -1
  307. machineconfig/scripts/python/.mypy_cache/3.11/git/exc.meta.json +0 -1
  308. machineconfig/scripts/python/.mypy_cache/3.11/git/index/__init__.data.json +0 -1
  309. machineconfig/scripts/python/.mypy_cache/3.11/git/index/__init__.meta.json +0 -1
  310. machineconfig/scripts/python/.mypy_cache/3.11/git/index/base.data.json +0 -1
  311. machineconfig/scripts/python/.mypy_cache/3.11/git/index/base.meta.json +0 -1
  312. machineconfig/scripts/python/.mypy_cache/3.11/git/index/fun.data.json +0 -1
  313. machineconfig/scripts/python/.mypy_cache/3.11/git/index/fun.meta.json +0 -1
  314. machineconfig/scripts/python/.mypy_cache/3.11/git/index/typ.data.json +0 -1
  315. machineconfig/scripts/python/.mypy_cache/3.11/git/index/typ.meta.json +0 -1
  316. machineconfig/scripts/python/.mypy_cache/3.11/git/index/util.data.json +0 -1
  317. machineconfig/scripts/python/.mypy_cache/3.11/git/index/util.meta.json +0 -1
  318. machineconfig/scripts/python/.mypy_cache/3.11/git/objects/__init__.data.json +0 -1
  319. machineconfig/scripts/python/.mypy_cache/3.11/git/objects/__init__.meta.json +0 -1
  320. machineconfig/scripts/python/.mypy_cache/3.11/git/objects/base.data.json +0 -1
  321. machineconfig/scripts/python/.mypy_cache/3.11/git/objects/base.meta.json +0 -1
  322. machineconfig/scripts/python/.mypy_cache/3.11/git/objects/blob.data.json +0 -1
  323. machineconfig/scripts/python/.mypy_cache/3.11/git/objects/blob.meta.json +0 -1
  324. machineconfig/scripts/python/.mypy_cache/3.11/git/objects/commit.data.json +0 -1
  325. machineconfig/scripts/python/.mypy_cache/3.11/git/objects/commit.meta.json +0 -1
  326. machineconfig/scripts/python/.mypy_cache/3.11/git/objects/fun.data.json +0 -1
  327. machineconfig/scripts/python/.mypy_cache/3.11/git/objects/fun.meta.json +0 -1
  328. machineconfig/scripts/python/.mypy_cache/3.11/git/objects/submodule/__init__.data.json +0 -1
  329. machineconfig/scripts/python/.mypy_cache/3.11/git/objects/submodule/__init__.meta.json +0 -1
  330. machineconfig/scripts/python/.mypy_cache/3.11/git/objects/submodule/base.data.json +0 -1
  331. machineconfig/scripts/python/.mypy_cache/3.11/git/objects/submodule/base.meta.json +0 -1
  332. machineconfig/scripts/python/.mypy_cache/3.11/git/objects/submodule/root.data.json +0 -1
  333. machineconfig/scripts/python/.mypy_cache/3.11/git/objects/submodule/root.meta.json +0 -1
  334. machineconfig/scripts/python/.mypy_cache/3.11/git/objects/submodule/util.data.json +0 -1
  335. machineconfig/scripts/python/.mypy_cache/3.11/git/objects/submodule/util.meta.json +0 -1
  336. machineconfig/scripts/python/.mypy_cache/3.11/git/objects/tag.data.json +0 -1
  337. machineconfig/scripts/python/.mypy_cache/3.11/git/objects/tag.meta.json +0 -1
  338. machineconfig/scripts/python/.mypy_cache/3.11/git/objects/tree.data.json +0 -1
  339. machineconfig/scripts/python/.mypy_cache/3.11/git/objects/tree.meta.json +0 -1
  340. machineconfig/scripts/python/.mypy_cache/3.11/git/objects/util.data.json +0 -1
  341. machineconfig/scripts/python/.mypy_cache/3.11/git/objects/util.meta.json +0 -1
  342. machineconfig/scripts/python/.mypy_cache/3.11/git/refs/__init__.data.json +0 -1
  343. machineconfig/scripts/python/.mypy_cache/3.11/git/refs/__init__.meta.json +0 -1
  344. machineconfig/scripts/python/.mypy_cache/3.11/git/refs/head.data.json +0 -1
  345. machineconfig/scripts/python/.mypy_cache/3.11/git/refs/head.meta.json +0 -1
  346. machineconfig/scripts/python/.mypy_cache/3.11/git/refs/log.data.json +0 -1
  347. machineconfig/scripts/python/.mypy_cache/3.11/git/refs/log.meta.json +0 -1
  348. machineconfig/scripts/python/.mypy_cache/3.11/git/refs/reference.data.json +0 -1
  349. machineconfig/scripts/python/.mypy_cache/3.11/git/refs/reference.meta.json +0 -1
  350. machineconfig/scripts/python/.mypy_cache/3.11/git/refs/remote.data.json +0 -1
  351. machineconfig/scripts/python/.mypy_cache/3.11/git/refs/remote.meta.json +0 -1
  352. machineconfig/scripts/python/.mypy_cache/3.11/git/refs/symbolic.data.json +0 -1
  353. machineconfig/scripts/python/.mypy_cache/3.11/git/refs/symbolic.meta.json +0 -1
  354. machineconfig/scripts/python/.mypy_cache/3.11/git/refs/tag.data.json +0 -1
  355. machineconfig/scripts/python/.mypy_cache/3.11/git/refs/tag.meta.json +0 -1
  356. machineconfig/scripts/python/.mypy_cache/3.11/git/remote.data.json +0 -1
  357. machineconfig/scripts/python/.mypy_cache/3.11/git/remote.meta.json +0 -1
  358. machineconfig/scripts/python/.mypy_cache/3.11/git/repo/__init__.data.json +0 -1
  359. machineconfig/scripts/python/.mypy_cache/3.11/git/repo/__init__.meta.json +0 -1
  360. machineconfig/scripts/python/.mypy_cache/3.11/git/repo/base.data.json +0 -1
  361. machineconfig/scripts/python/.mypy_cache/3.11/git/repo/base.meta.json +0 -1
  362. machineconfig/scripts/python/.mypy_cache/3.11/git/repo/fun.data.json +0 -1
  363. machineconfig/scripts/python/.mypy_cache/3.11/git/repo/fun.meta.json +0 -1
  364. machineconfig/scripts/python/.mypy_cache/3.11/git/types.data.json +0 -1
  365. machineconfig/scripts/python/.mypy_cache/3.11/git/types.meta.json +0 -1
  366. machineconfig/scripts/python/.mypy_cache/3.11/git/util.data.json +0 -1
  367. machineconfig/scripts/python/.mypy_cache/3.11/git/util.meta.json +0 -1
  368. machineconfig/scripts/python/.mypy_cache/3.11/glob.data.json +0 -1
  369. machineconfig/scripts/python/.mypy_cache/3.11/glob.meta.json +0 -1
  370. machineconfig/scripts/python/.mypy_cache/3.11/gzip.data.json +0 -1
  371. machineconfig/scripts/python/.mypy_cache/3.11/gzip.meta.json +0 -1
  372. machineconfig/scripts/python/.mypy_cache/3.11/importlib/__init__.data.json +0 -1
  373. machineconfig/scripts/python/.mypy_cache/3.11/importlib/__init__.meta.json +0 -1
  374. machineconfig/scripts/python/.mypy_cache/3.11/importlib/_abc.data.json +0 -1
  375. machineconfig/scripts/python/.mypy_cache/3.11/importlib/_abc.meta.json +0 -1
  376. machineconfig/scripts/python/.mypy_cache/3.11/importlib/_bootstrap.data.json +0 -1
  377. machineconfig/scripts/python/.mypy_cache/3.11/importlib/_bootstrap.meta.json +0 -1
  378. machineconfig/scripts/python/.mypy_cache/3.11/importlib/_bootstrap_external.data.json +0 -1
  379. machineconfig/scripts/python/.mypy_cache/3.11/importlib/_bootstrap_external.meta.json +0 -1
  380. machineconfig/scripts/python/.mypy_cache/3.11/importlib/abc.data.json +0 -1
  381. machineconfig/scripts/python/.mypy_cache/3.11/importlib/abc.meta.json +0 -1
  382. machineconfig/scripts/python/.mypy_cache/3.11/importlib/machinery.data.json +0 -1
  383. machineconfig/scripts/python/.mypy_cache/3.11/importlib/machinery.meta.json +0 -1
  384. machineconfig/scripts/python/.mypy_cache/3.11/importlib/metadata/__init__.data.json +0 -1
  385. machineconfig/scripts/python/.mypy_cache/3.11/importlib/metadata/__init__.meta.json +0 -1
  386. machineconfig/scripts/python/.mypy_cache/3.11/importlib/metadata/_meta.data.json +0 -1
  387. machineconfig/scripts/python/.mypy_cache/3.11/importlib/metadata/_meta.meta.json +0 -1
  388. machineconfig/scripts/python/.mypy_cache/3.11/importlib/readers.data.json +0 -1
  389. machineconfig/scripts/python/.mypy_cache/3.11/importlib/readers.meta.json +0 -1
  390. machineconfig/scripts/python/.mypy_cache/3.11/importlib/resources/__init__.data.json +0 -1
  391. machineconfig/scripts/python/.mypy_cache/3.11/importlib/resources/__init__.meta.json +0 -1
  392. machineconfig/scripts/python/.mypy_cache/3.11/importlib/resources/_common.data.json +0 -1
  393. machineconfig/scripts/python/.mypy_cache/3.11/importlib/resources/_common.meta.json +0 -1
  394. machineconfig/scripts/python/.mypy_cache/3.11/importlib/resources/abc.data.json +0 -1
  395. machineconfig/scripts/python/.mypy_cache/3.11/importlib/resources/abc.meta.json +0 -1
  396. machineconfig/scripts/python/.mypy_cache/3.11/inspect.data.json +0 -1
  397. machineconfig/scripts/python/.mypy_cache/3.11/inspect.meta.json +0 -1
  398. machineconfig/scripts/python/.mypy_cache/3.11/io.data.json +0 -1
  399. machineconfig/scripts/python/.mypy_cache/3.11/io.meta.json +0 -1
  400. machineconfig/scripts/python/.mypy_cache/3.11/itertools.data.json +0 -1
  401. machineconfig/scripts/python/.mypy_cache/3.11/itertools.meta.json +0 -1
  402. machineconfig/scripts/python/.mypy_cache/3.11/locale.data.json +0 -1
  403. machineconfig/scripts/python/.mypy_cache/3.11/locale.meta.json +0 -1
  404. machineconfig/scripts/python/.mypy_cache/3.11/logging/__init__.data.json +0 -1
  405. machineconfig/scripts/python/.mypy_cache/3.11/logging/__init__.meta.json +0 -1
  406. machineconfig/scripts/python/.mypy_cache/3.11/mimetypes.data.json +0 -1
  407. machineconfig/scripts/python/.mypy_cache/3.11/mimetypes.meta.json +0 -1
  408. machineconfig/scripts/python/.mypy_cache/3.11/mmap.data.json +0 -1
  409. machineconfig/scripts/python/.mypy_cache/3.11/mmap.meta.json +0 -1
  410. machineconfig/scripts/python/.mypy_cache/3.11/numbers.data.json +0 -1
  411. machineconfig/scripts/python/.mypy_cache/3.11/numbers.meta.json +0 -1
  412. machineconfig/scripts/python/.mypy_cache/3.11/opcode.data.json +0 -1
  413. machineconfig/scripts/python/.mypy_cache/3.11/opcode.meta.json +0 -1
  414. machineconfig/scripts/python/.mypy_cache/3.11/os/__init__.data.json +0 -1
  415. machineconfig/scripts/python/.mypy_cache/3.11/os/__init__.meta.json +0 -1
  416. machineconfig/scripts/python/.mypy_cache/3.11/os/path.data.json +0 -1
  417. machineconfig/scripts/python/.mypy_cache/3.11/os/path.meta.json +0 -1
  418. machineconfig/scripts/python/.mypy_cache/3.11/pathlib.data.json +0 -1
  419. machineconfig/scripts/python/.mypy_cache/3.11/pathlib.meta.json +0 -1
  420. machineconfig/scripts/python/.mypy_cache/3.11/platform.data.json +0 -1
  421. machineconfig/scripts/python/.mypy_cache/3.11/platform.meta.json +0 -1
  422. machineconfig/scripts/python/.mypy_cache/3.11/posixpath.data.json +0 -1
  423. machineconfig/scripts/python/.mypy_cache/3.11/posixpath.meta.json +0 -1
  424. machineconfig/scripts/python/.mypy_cache/3.11/re.data.json +0 -1
  425. machineconfig/scripts/python/.mypy_cache/3.11/re.meta.json +0 -1
  426. machineconfig/scripts/python/.mypy_cache/3.11/resource.data.json +0 -1
  427. machineconfig/scripts/python/.mypy_cache/3.11/resource.meta.json +0 -1
  428. machineconfig/scripts/python/.mypy_cache/3.11/shlex.data.json +0 -1
  429. machineconfig/scripts/python/.mypy_cache/3.11/shlex.meta.json +0 -1
  430. machineconfig/scripts/python/.mypy_cache/3.11/shutil.data.json +0 -1
  431. machineconfig/scripts/python/.mypy_cache/3.11/shutil.meta.json +0 -1
  432. machineconfig/scripts/python/.mypy_cache/3.11/signal.data.json +0 -1
  433. machineconfig/scripts/python/.mypy_cache/3.11/signal.meta.json +0 -1
  434. machineconfig/scripts/python/.mypy_cache/3.11/src/__init__.data.json +0 -1
  435. machineconfig/scripts/python/.mypy_cache/3.11/src/__init__.meta.json +0 -1
  436. machineconfig/scripts/python/.mypy_cache/3.11/src/machineconfig/__init__.data.json +0 -1
  437. machineconfig/scripts/python/.mypy_cache/3.11/src/machineconfig/__init__.meta.json +0 -1
  438. machineconfig/scripts/python/.mypy_cache/3.11/src/machineconfig/scripts/__init__.data.json +0 -1
  439. machineconfig/scripts/python/.mypy_cache/3.11/src/machineconfig/scripts/__init__.meta.json +0 -1
  440. machineconfig/scripts/python/.mypy_cache/3.11/src/machineconfig/scripts/python/__init__.data.json +0 -1
  441. machineconfig/scripts/python/.mypy_cache/3.11/src/machineconfig/scripts/python/__init__.meta.json +0 -1
  442. machineconfig/scripts/python/.mypy_cache/3.11/sre_compile.data.json +0 -1
  443. machineconfig/scripts/python/.mypy_cache/3.11/sre_compile.meta.json +0 -1
  444. machineconfig/scripts/python/.mypy_cache/3.11/sre_constants.data.json +0 -1
  445. machineconfig/scripts/python/.mypy_cache/3.11/sre_constants.meta.json +0 -1
  446. machineconfig/scripts/python/.mypy_cache/3.11/sre_parse.data.json +0 -1
  447. machineconfig/scripts/python/.mypy_cache/3.11/sre_parse.meta.json +0 -1
  448. machineconfig/scripts/python/.mypy_cache/3.11/stat.data.json +0 -1
  449. machineconfig/scripts/python/.mypy_cache/3.11/stat.meta.json +0 -1
  450. machineconfig/scripts/python/.mypy_cache/3.11/string.data.json +0 -1
  451. machineconfig/scripts/python/.mypy_cache/3.11/string.meta.json +0 -1
  452. machineconfig/scripts/python/.mypy_cache/3.11/struct.data.json +0 -1
  453. machineconfig/scripts/python/.mypy_cache/3.11/struct.meta.json +0 -1
  454. machineconfig/scripts/python/.mypy_cache/3.11/subprocess.data.json +0 -1
  455. machineconfig/scripts/python/.mypy_cache/3.11/subprocess.meta.json +0 -1
  456. machineconfig/scripts/python/.mypy_cache/3.11/sys/__init__.data.json +0 -1
  457. machineconfig/scripts/python/.mypy_cache/3.11/sys/__init__.meta.json +0 -1
  458. machineconfig/scripts/python/.mypy_cache/3.11/tarfile.data.json +0 -1
  459. machineconfig/scripts/python/.mypy_cache/3.11/tarfile.meta.json +0 -1
  460. machineconfig/scripts/python/.mypy_cache/3.11/tempfile.data.json +0 -1
  461. machineconfig/scripts/python/.mypy_cache/3.11/tempfile.meta.json +0 -1
  462. machineconfig/scripts/python/.mypy_cache/3.11/textwrap.data.json +0 -1
  463. machineconfig/scripts/python/.mypy_cache/3.11/textwrap.meta.json +0 -1
  464. machineconfig/scripts/python/.mypy_cache/3.11/threading.data.json +0 -1
  465. machineconfig/scripts/python/.mypy_cache/3.11/threading.meta.json +0 -1
  466. machineconfig/scripts/python/.mypy_cache/3.11/time.data.json +0 -1
  467. machineconfig/scripts/python/.mypy_cache/3.11/time.meta.json +0 -1
  468. machineconfig/scripts/python/.mypy_cache/3.11/types.data.json +0 -1
  469. machineconfig/scripts/python/.mypy_cache/3.11/types.meta.json +0 -1
  470. machineconfig/scripts/python/.mypy_cache/3.11/typing.data.json +0 -1
  471. machineconfig/scripts/python/.mypy_cache/3.11/typing.meta.json +0 -1
  472. machineconfig/scripts/python/.mypy_cache/3.11/typing_extensions.data.json +0 -1
  473. machineconfig/scripts/python/.mypy_cache/3.11/typing_extensions.meta.json +0 -1
  474. machineconfig/scripts/python/.mypy_cache/3.11/urllib/__init__.data.json +0 -1
  475. machineconfig/scripts/python/.mypy_cache/3.11/urllib/__init__.meta.json +0 -1
  476. machineconfig/scripts/python/.mypy_cache/3.11/urllib/parse.data.json +0 -1
  477. machineconfig/scripts/python/.mypy_cache/3.11/urllib/parse.meta.json +0 -1
  478. machineconfig/scripts/python/.mypy_cache/3.11/uuid.data.json +0 -1
  479. machineconfig/scripts/python/.mypy_cache/3.11/uuid.meta.json +0 -1
  480. machineconfig/scripts/python/.mypy_cache/3.11/warnings.data.json +0 -1
  481. machineconfig/scripts/python/.mypy_cache/3.11/warnings.meta.json +0 -1
  482. machineconfig/scripts/python/.mypy_cache/3.11/weakref.data.json +0 -1
  483. machineconfig/scripts/python/.mypy_cache/3.11/weakref.meta.json +0 -1
  484. machineconfig/scripts/python/.mypy_cache/3.11/zipfile/__init__.data.json +0 -1
  485. machineconfig/scripts/python/.mypy_cache/3.11/zipfile/__init__.meta.json +0 -1
  486. machineconfig/scripts/python/.mypy_cache/3.11/zlib.data.json +0 -1
  487. machineconfig/scripts/python/.mypy_cache/3.11/zlib.meta.json +0 -1
  488. machineconfig/scripts/python/.mypy_cache/CACHEDIR.TAG +0 -3
  489. machineconfig/scripts/python/__pycache__/cloud_repo_sync.cpython-311.pyc +0 -0
  490. machineconfig/scripts/python/__pycache__/gh_models.cpython-311.pyc +0 -0
  491. machineconfig/scripts/python/__pycache__/url2md.cpython-311.pyc +0 -0
  492. machineconfig/scripts/python/__pycache__/viewer.cpython-311.pyc +0 -0
  493. machineconfig/scripts/python/__pycache__/vscode_api.cpython-311.pyc +0 -0
  494. machineconfig/settings/__pycache__/__init__.cpython-311.pyc +0 -0
  495. machineconfig/settings/linters/.ruff_cache/.gitignore +0 -2
  496. machineconfig/settings/linters/.ruff_cache/CACHEDIR.TAG +0 -1
  497. machineconfig/settings/shells/ipy/profiles/default/__pycache__/__init__.cpython-311.pyc +0 -0
  498. machineconfig/settings/shells/ipy/profiles/default/startup/__pycache__/__init__.cpython-311.pyc +0 -0
  499. machineconfig/settings/shells/ipy/profiles/default/startup/__pycache__/playext.cpython-311.pyc +0 -0
  500. machineconfig/utils/ve_utils/ve1.py +0 -111
  501. machineconfig/utils/ve_utils/ve2.py +0 -155
  502. machineconfig-1.95.dist-info/RECORD +0 -712
  503. {machineconfig-1.95.dist-info → machineconfig-1.97.dist-info}/WHEEL +0 -0
  504. {machineconfig-1.95.dist-info → machineconfig-1.97.dist-info}/top_level.txt +0 -0
@@ -1,359 +1,461 @@
1
1
 
2
2
 
3
- import pandas as pd
4
3
 
5
- from crocodile.file_management import P, Save, Read
6
- from crocodile.meta import Scheduler
7
- from machineconfig.cluster.loader_runner import JOB_STATUS, LogEntry
8
- from typing import Optional, Any, NoReturn
9
- from rich.console import Console
10
- import time
11
- from dataclasses import fields
12
- import getpass
13
- import random
14
- import platform
4
+ # from machineconfig.utils.utils2 import read_ini
5
+ # from machineconfig.utils.io_save import save_pickle
15
6
 
7
+ # from machineconfig.cluster.loader_runner import JOB_STATUS, LogEntry
8
+ # from typing import Optional, Any, NoReturn
9
+ # from rich.console import Console
10
+ # import pickle
11
+ # import time
12
+ # import getpass
13
+ # import random
14
+ # import platform
15
+ # from datetime import datetime, timedelta
16
16
 
17
- class CloudManager:
18
- base_path = P("~/tmp_results/remote_machines/cloud")
19
- server_interval_sec: int = 60 * 5
20
- num_claim_checks: int = 3
21
- inter_check_interval_sec: int = 15
22
- def __init__(self, max_jobs: int, cloud: Optional[str] = None, reset_local: bool = False) -> None:
23
- if reset_local:
24
- print("☠️ Resetting local cloud cache ☠️. Locally created / completed jobs not yet synced will not make it to the cloud.")
25
- P(self.base_path).expanduser().delete(sure=True)
26
- self.status_root: P = self.base_path.expanduser().joinpath("workers", f"{getpass.getuser()}@{platform.node()}").create()
27
- self.max_jobs: int = max_jobs
28
- if cloud is None:
29
- from machineconfig.utils.utils import DEFAULTS_PATH
30
- self.cloud = Read.ini(DEFAULTS_PATH)['general']['rclone_config_name']
31
- else: self.cloud = cloud
32
- self.lock_claimed = False
33
- from machineconfig.cluster.remote_machine import RemoteMachine
34
- self.running_jobs: list[RemoteMachine] = []
35
- self.console = Console()
36
17
 
37
- # =================== READ WRITE OF LOGS ===================
38
- def read_log(self) -> dict[JOB_STATUS, 'pd.DataFrame']:
39
- # assert self.claim_lock, f"method should never be called without claiming the lock first. This is a cloud-wide file."
40
- if not self.lock_claimed: self.claim_lock()
41
- path = self.base_path.joinpath("logs.pkl").expanduser()
42
- if not path.exists():
43
- cols = [a_field.name for a_field in fields(LogEntry)]
44
- log: dict[JOB_STATUS, 'pd.DataFrame'] = {}
45
- log['queued'] = pd.DataFrame(columns=cols)
46
- log['running'] = pd.DataFrame(columns=cols)
47
- log['completed'] = pd.DataFrame(columns=cols)
48
- log['failed'] = pd.DataFrame(columns=cols)
49
- Save.pickle(obj=log, path=path.create(parents_only=True), verbose=False)
50
- return log
51
- return Read.pickle(path=path)
52
- def write_log(self, log: dict[JOB_STATUS, 'pd.DataFrame']):
53
- # assert self.claim_lock, f"method should never be called without claiming the lock first. This is a cloud-wide file."
54
- if not self.lock_claimed: self.claim_lock()
55
- Save.pickle(obj=log, path=self.base_path.joinpath("logs.pkl").expanduser(), verbose=False)
56
- return NoReturn
18
+ # def format_table_markdown(data: list[dict[str, Any]]) -> str:
19
+ # """Convert list of dictionaries to markdown table format."""
20
+ # if not data:
21
+ # return ""
22
+
23
+ # # Get all unique keys from all dictionaries
24
+ # all_keys = set()
25
+ # for row in data:
26
+ # all_keys.update(row.keys())
27
+
28
+ # keys = sorted(all_keys)
29
+
30
+ # # Create header
31
+ # header = "|" + "|".join(f" {key} " for key in keys) + "|"
32
+ # separator = "|" + "|".join(" --- " for _ in keys) + "|"
33
+
34
+ # # Create rows
35
+ # rows = []
36
+ # for row in data:
37
+ # row_values = []
38
+ # for key in keys:
39
+ # value = row.get(key, "")
40
+ # # Convert to string and handle None values
41
+ # if value is None:
42
+ # value = ""
43
+ # else:
44
+ # value = str(value)
45
+ # row_values.append(f" {value} ")
46
+ # rows.append("|" + "|".join(row_values) + "|")
47
+
48
+ # return "\n".join([header, separator] + rows)
57
49
 
58
- # =================== CLOUD MONITORING ===================
59
- def fetch_cloud_live(self):
60
- remote = CloudManager.base_path
61
- localpath = P.tmp().joinpath("tmp_dirs/cloud_manager_live").create()
62
- alternative_base = localpath.delete(sure=True).from_cloud(cloud=self.cloud, remotepath=remote.get_remote_path(root="myhome", rel2home=True), verbose=False)
63
- return alternative_base
64
- @staticmethod
65
- def prepare_servers_report(cloud_root: P):
66
- from machineconfig.cluster.remote_machine import RemoteMachine
67
- workers_root = cloud_root.joinpath("workers").search("*")
68
- res: dict[str, list[RemoteMachine]] = {}
69
- times: dict[str, pd.Timedelta] = {}
70
- for a_worker in workers_root:
71
- running_jobs = a_worker.joinpath("running_jobs.pkl")
72
- times[a_worker.name] = pd.Timestamp.now() - pd.to_datetime(running_jobs.time("m"))
73
- res[a_worker.name] = Read.pickle(path=running_jobs) if running_jobs.exists() else []
74
- servers_report = pd.DataFrame({"machine": list(res.keys()), "#RJobs": [len(x) for x in res.values()], "LastUpdate": list(times.values())})
75
- return servers_report
76
- def run_monitor(self):
77
- """Without syncing, bring the latest from the cloud to random local path (not the default path, as that would require the lock)"""
78
- from rich import print as pprint
79
- def routine(sched: Any):
80
- _ = sched
81
- alternative_base = self.fetch_cloud_live()
82
- assert alternative_base is not None
83
- lock_path = alternative_base.expanduser().joinpath("lock.txt")
84
- if lock_path.exists(): lock_owner: str = lock_path.read_text()
85
- else: lock_owner = "None"
86
- self.console.print(f"🔒 Lock is held by: {lock_owner}")
87
- self.console.print("🧾 Log File:")
88
- log_path = alternative_base.joinpath("logs.pkl")
89
- if log_path.exists(): log: dict[JOB_STATUS, 'pd.DataFrame'] = Read.pickle(path=log_path)
90
- else:
91
- self.console.print("Log file doesn't exist! 🫤 must be that cloud is getting purged or something 🤔 ")
92
- log = {}
93
- for item_name, item_df in log.items():
94
- self.console.rule(f"{item_name} DataFrame (Latest {'10' if len(item_df) > 10 else len(item_df)} / {len(item_df)})")
95
- print() # empty line after the rule helps keeping the rendering clean in the terminal while zooming in and out.
96
- if item_name != "queued":
97
- t2 = pd.to_datetime(item_df["end_time"]) if item_name != "running" else pd.Series([pd.Timestamp.now()] * len(item_df))
98
- if len(t2) == 0 and len(item_df) == 0: pass # the subtraction below gives an error if both are empty. TypeError: cannot subtract DatetimeArray from ndarray
99
- else: item_df["duration"] = t2 - pd.to_datetime(item_df["start_time"])
100
50
 
101
- cols = item_df.columns
102
- cols = [a_col for a_col in cols if a_col not in {"cmd", "note"}]
103
- if item_name == "queued": cols = [a_col for a_col in cols if a_col not in {"pid", "start_time", "end_time", "run_machine"}]
104
- if item_name == "running": cols = [a_col for a_col in cols if a_col not in {"submission_time", "source_machine", "end_time"}]
105
- if item_name == "completed": cols = [a_col for a_col in cols if a_col not in {"submission_time", "source_machine", "start_time", "pid"}]
106
- if item_name == "failed": cols = [a_col for a_col in cols if a_col not in {"submission_time", "source_machine", "start_time"}]
107
- pprint(item_df[cols][-10:].to_markdown())
108
- pprint("\n\n")
109
- print("👷 Workers:")
110
- servers_report = self.prepare_servers_report(cloud_root=alternative_base)
111
- pprint(servers_report.to_markdown())
112
- sched = Scheduler(routine=routine, wait="5m")
113
- sched.run()
51
+ # class CloudManager:
52
+ # base_path = PathExtended("~/tmp_results/remote_machines/cloud")
53
+ # server_interval_sec: int = 60 * 5
54
+ # num_claim_checks: int = 3
55
+ # inter_check_interval_sec: int = 15
56
+ # def __init__(self, max_jobs: int, cloud: Optional[str] = None, reset_local: bool = False) -> None:
57
+ # if reset_local:
58
+ # print("☠️ Resetting local cloud cache ☠️. Locally created / completed jobs not yet synced will not make it to the cloud.")
59
+ # PathExtended(self.base_path).expanduser().delete(sure=True)
60
+ # status_root_path = self.base_path.expanduser().joinpath("workers", f"{getpass.getuser()}@{platform.node()}")
61
+ # status_root_path.mkdir(parents=True, exist_ok=True)
62
+ # self.status_root: P = status_root_path
63
+ # self.max_jobs: int = max_jobs
64
+ # if cloud is None:
65
+ # from machineconfig.utils.utils import DEFAULTS_PATH
66
+ # self.cloud = read_ini(DEFAULTS_PATH)['general']['rclone_config_name']
67
+ # else: self.cloud = cloud
68
+ # self.lock_claimed = False
69
+ # from machineconfig.cluster.remote_machine import RemoteMachine
70
+ # self.running_jobs: list[RemoteMachine] = []
71
+ # self.console = Console()
114
72
 
115
- # ================== CLEARNING METHODS ===================
116
- def clean_interrupted_jobs_mess(self, return_to_queue: bool = True):
117
- """Clean jobs that failed but in logs show running by looking at the pid.
118
- If you want to do the same for remote machines, you will need to do it manually using `rerun_jobs`"""
119
- assert len(self.running_jobs) == 0, "method should never be called while there are running jobs. This can only be called at the beginning of the run."
120
- from machineconfig.cluster.remote_machine import RemoteMachine
121
- this_machine = f"{getpass.getuser()}@{platform.node()}"
122
- log = self.read_log()
123
- # servers_report = self.prepare_servers_report(cloud_root=CloudManager.base_path.expanduser())
124
- dirt: list[str] = []
125
- for _idx, row in log["running"].iterrows():
126
- entry = LogEntry.from_dict(row.to_dict())
127
- if entry.run_machine != this_machine: continue
128
- a_job_path = CloudManager.base_path.expanduser().joinpath(f"jobs/{entry.name}")
129
- rm: RemoteMachine = Read.pickle(path=a_job_path.joinpath("data/remote_machine.Machine.pkl"))
130
- status = rm.file_manager.get_job_status(session_name=rm.job_params.session_name, tab_name=rm.job_params.tab_name)
131
- if status == "running":
132
- print(f"Job `{entry.name}` is still running, added to running jobs.")
133
- self.running_jobs.append(rm)
134
- else:
135
- entry.pid = None
136
- entry.cmd = None
137
- entry.start_time = None
138
- entry.end_time = None
139
- entry.run_machine = None
140
- entry.session_name = None
141
- rm.file_manager.execution_log_dir.expanduser().joinpath("status.txt").delete(sure=True)
142
- rm.file_manager.execution_log_dir.expanduser().joinpath("pid.txt").delete(sure=True)
143
- entry.note += f"| Job was interrupted by a crash of the machine `{this_machine}`."
144
- dirt.append(entry.name)
145
- print(f"Job `{entry.name}` is not running, removing it from log of running jobs.")
146
- if return_to_queue:
147
- log["queued"] = pd.concat([log["queued"], pd.DataFrame([entry.__dict__])], ignore_index=True)
148
- print(f"Job `{entry.name}` is not running, returning it to the queue.")
149
- else:
150
- log["failed"] = pd.concat([log["failed"], pd.DataFrame([entry.__dict__])], ignore_index=True)
151
- print(f"Job `{entry.name}` is not running, moving it to failed jobs.")
152
- log["running"] = log["running"][~log["running"]["name"].isin(dirt)]
153
- self.write_log(log=log)
154
- def clean_failed_jobs_mess(self):
155
- """If you want to do it for remote machine, use `rerun_jobs` (manual selection)"""
156
- print("⚠️ Cleaning failed jobs mess for this machine ⚠️")
157
- from machineconfig.cluster.remote_machine import RemoteMachine
158
- log = self.read_log()
159
- for _idx, row in log["failed"].iterrows():
160
- entry = LogEntry.from_dict(row.to_dict())
161
- a_job_path = CloudManager.base_path.expanduser().joinpath(f"jobs/{entry.name}")
162
- rm: RemoteMachine = Read.pickle(path=a_job_path.joinpath("data/remote_machine.Machine.pkl"))
163
- entry.note += f"| Job failed @ {entry.run_machine}"
164
- entry.pid = None
165
- entry.cmd = None
166
- entry.start_time = None
167
- entry.end_time = None
168
- entry.run_machine = None
169
- entry.session_name = None
170
- rm.file_manager.execution_log_dir.expanduser().joinpath("status.txt").delete(sure=True)
171
- rm.file_manager.execution_log_dir.expanduser().joinpath("pid.txt").delete(sure=True)
172
- print(f"Job `{entry.name}` is not running, removing it from log of running jobs.")
173
- log["queued"] = pd.concat([log["queued"], pd.DataFrame([entry.__dict__])], ignore_index=True)
174
- print(f"Job `{entry.name}` is not running, returning it to the queue.")
175
- log["failed"] = pd.DataFrame(columns=log["failed"].columns)
176
- self.write_log(log=log)
177
- self.release_lock()
178
- def rerun_jobs(self):
179
- """This method involves manual selection but has all-files scope (failed and running) and can be used for both local and remote machines.
180
- The reason it is not automated for remotes is because even though the server might have failed, the processes therein might be running, so there is no automated way to tell."""
181
- log = self.read_log()
182
- from machineconfig.cluster.remote_machine import RemoteMachine
183
- from machineconfig.utils.utils import display_options
184
- jobs_all: list[str] = self.base_path.expanduser().joinpath("jobs").search("*").apply(lambda x: x.name).list
185
- jobs_selected = display_options(options=jobs_all, msg="Select Jobs to Redo", multi=True, fzf=True)
186
- for a_job in jobs_selected:
187
- # find in which dataframe does this job lives:
188
- for log_type, log_df in log.items():
189
- if a_job in log_df["name"].values: break
190
- else: raise ValueError(f"Job `{a_job}` is not found in any of the log dataframes.")
191
- entry = LogEntry.from_dict(log_df[log_df["name"] == a_job].iloc[0].to_dict())
192
- a_job_path = CloudManager.base_path.expanduser().joinpath(f"jobs/{entry.name}")
193
- entry.note += f"| Job failed @ {entry.run_machine}"
194
- entry.pid = None
195
- entry.cmd = None
196
- entry.start_time = None
197
- entry.end_time = None
198
- entry.run_machine = None
199
- entry.session_name = None
200
- rm: RemoteMachine = Read.pickle(path=a_job_path.joinpath("data/remote_machine.Machine.pkl"))
201
- rm.file_manager.execution_log_dir.expanduser().joinpath("status.txt").delete(sure=True)
202
- rm.file_manager.execution_log_dir.expanduser().joinpath("pid.txt").delete(sure=True)
203
- log["queued"] = pd.concat([log["queued"], pd.DataFrame([entry.__dict__])], ignore_index=True)
204
- log[log_type] = log[log_type][log[log_type]["name"] != a_job]
205
- print(f"Job `{entry.name}` was removed from {log_type} and added to the queue in order to be re-run.")
206
- self.write_log(log=log)
207
- self.release_lock()
73
+ # # =================== READ WRITE OF LOGS ===================
74
+ # def read_log(self) -> dict[JOB_STATUS, list[dict[str, Any]]]:
75
+ # # assert self.claim_lock, f"method should never be called without claiming the lock first. This is a cloud-wide file."
76
+ # if not self.lock_claimed: self.claim_lock()
77
+ # path = self.base_path.joinpath("logs.pkl").expanduser()
78
+ # if not path.exists():
79
+ # log: dict[JOB_STATUS, list[dict[str, Any]]] = {}
80
+ # log['queued'] = []
81
+ # log['running'] = []
82
+ # log['completed'] = []
83
+ # log['failed'] = []
84
+ # path.parent.mkdir(parents=True, exist_ok=True)
85
+ # save_pickle(obj=log, path=path, verbose=False)
86
+ # return log
87
+ # return pickle.loads(path.read_bytes())
88
+ # def write_log(self, log: dict[JOB_STATUS, list[dict[str, Any]]]) -> None:
89
+ # # assert self.claim_lock, f"method should never be called without claiming the lock first. This is a cloud-wide file."
90
+ # if not self.lock_claimed: self.claim_lock()
91
+ # save_pickle(obj=log, path=self.base_path.joinpath("logs.pkl").expanduser(), verbose=False)
208
92
 
209
- def serve(self):
210
- self.clean_interrupted_jobs_mess()
211
- def routine(sched: Any):
212
- _ = sched
213
- self.start_jobs_if_possible()
214
- self.get_running_jobs_statuses()
215
- self.release_lock()
216
- sched = Scheduler(routine=routine, wait=f"{self.server_interval_sec}s")
217
- return sched.run()
93
+ # # =================== CLOUD MONITORING ===================
94
+ # def fetch_cloud_live(self):
95
+ # remote = CloudManager.base_path
96
+ # localpath = PathExtended.tmp().joinpath("tmp_dirs/cloud_manager_live")
97
+ # localpath.mkdir(parents=True, exist_ok=True)
98
+ # alternative_base = localpath.delete(sure=True).from_cloud(cloud=self.cloud, remotepath=remote.get_remote_path(root="myhome", rel2home=True), verbose=False)
99
+ # return alternative_base
100
+ # @staticmethod
101
+ # def prepare_servers_report(cloud_root: PathExtended) -> list[dict[str, Any]]:
102
+ # from machineconfig.cluster.remote_machine import RemoteMachine
103
+ # # Replace crocodile List usage with plain Python list
104
+ # workers_root = [p for p in cloud_root.joinpath("workers").iterdir()]
105
+ # res: dict[str, list[RemoteMachine]] = {}
106
+ # times: dict[str, timedelta] = {}
107
+ # for a_worker in workers_root:
108
+ # running_jobs = a_worker.joinpath("running_jobs.pkl")
109
+ # file_mod_time = datetime.fromtimestamp(running_jobs.stat().st_mtime) if running_jobs.exists() else datetime.min
110
+ # times[a_worker.name] = datetime.now() - file_mod_time
111
+ # res[a_worker.name] = pickle.loads(running_jobs.read_bytes()) if running_jobs.exists() else []
112
+
113
+ # # Create list of dictionaries instead of DataFrame
114
+ # servers_report = []
115
+ # for machine in res.keys():
116
+ # servers_report.append({
117
+ # "machine": machine,
118
+ # "#RJobs": len(res[machine]),
119
+ # "LastUpdate": times[machine]
120
+ # })
121
+ # return servers_report
122
+ # def run_monitor(self):
123
+ # """Without syncing, bring the latest from the cloud to random local path (not the default path, as that would require the lock)"""
124
+ # from rich import print as pprint
125
+ # def routine(sched: Any):
126
+ # _ = sched
127
+ # alternative_base = self.fetch_cloud_live()
128
+ # assert alternative_base is not None
129
+ # lock_path = alternative_base.expanduser().joinpath("lock.txt")
130
+ # if lock_path.exists(): lock_owner: str = lock_path.read_text()
131
+ # else: lock_owner = "None"
132
+ # self.console.print(f"🔒 Lock is held by: {lock_owner}")
133
+ # self.console.print("🧾 Log File:")
134
+ # log_path = alternative_base.joinpath("logs.pkl")
135
+ # if log_path.exists(): log: dict[JOB_STATUS, list[dict[str, Any]]] = pickle.loads(log_path.read_bytes())
136
+ # else:
137
+ # self.console.print("Log file doesn't exist! 🫤 must be that cloud is getting purged or something 🤔 ")
138
+ # log = {}
139
+ # for item_name, item_list in log.items():
140
+ # self.console.rule(f"{item_name} Jobs (Latest {'10' if len(item_list) > 10 else len(item_list)} / {len(item_list)})")
141
+ # print() # empty line after the rule helps keeping the rendering clean in the terminal while zooming in and out.
142
+
143
+ # # Add duration calculation for non-queued items
144
+ # display_items = []
145
+ # for item in item_list:
146
+ # display_item = item.copy()
147
+ # if item_name != "queued" and "start_time" in item and item["start_time"]:
148
+ # try:
149
+ # if item_name == "running":
150
+ # end_time = datetime.now()
151
+ # else:
152
+ # end_time = datetime.fromisoformat(item["end_time"]) if item.get("end_time") else datetime.now()
153
+ # start_time = datetime.fromisoformat(item["start_time"])
154
+ # display_item["duration"] = end_time - start_time
155
+ # except Exception:
156
+ # display_item["duration"] = "unknown"
157
+ # display_items.append(display_item)
218
158
 
219
- def get_running_jobs_statuses(self):
220
- """This is the only authority responsible for moving jobs from running df to failed df or completed df."""
221
- jobs_ids_to_be_removed_from_running: list[str] = []
222
- for a_rm in self.running_jobs:
223
- status = a_rm.file_manager.get_job_status(session_name=a_rm.job_params.session_name, tab_name=a_rm.job_params.tab_name)
224
- if status == "running": pass
225
- elif status == "completed" or status == "failed":
226
- job_name = a_rm.config.job_id
227
- log = self.read_log()
228
- df_to_add = log[status]
229
- df_to_take = log["running"]
230
- entry = LogEntry.from_dict(df_to_take[df_to_take["name"] == job_name].iloc[0].to_dict())
231
- entry.end_time = pd.Timestamp.now().strftime("%Y-%m-%d %H:%M:%S")
232
- df_to_add = pd.concat([df_to_add, pd.DataFrame([entry.__dict__])], ignore_index=True)
233
- df_to_take = df_to_take[df_to_take["name"] != job_name]
234
- log[status] = df_to_add
235
- log["running"] = df_to_take
236
- self.write_log(log=log)
237
- # self.running_jobs.remove(a_rm)
238
- jobs_ids_to_be_removed_from_running.append(a_rm.config.job_id)
239
- elif status == "queued": raise RuntimeError("I thought I'm working strictly with running jobs, and I encountered unexpected a job with `queued` status.")
240
- else: raise ValueError(f"I receieved a status that I don't know how to handle `{status}`")
241
- self.running_jobs = [a_rm for a_rm in self.running_jobs if a_rm.config.job_id not in jobs_ids_to_be_removed_from_running]
242
- Save.pickle(obj=self.running_jobs, path=self.status_root.joinpath("running_jobs.pkl"), verbose=False)
243
- self.status_root.to_cloud(cloud=self.cloud, rel2home=True, verbose=False) # no need for lock as this writes to a folder specific to this machine.
244
- def start_jobs_if_possible(self):
245
- """This is the only authority responsible for moving jobs from queue df to running df."""
246
- if len(self.running_jobs) == self.max_jobs:
247
- print(f"⚠️ No more capacity to run more jobs ({len(self.running_jobs)} / {self.max_jobs=})")
248
- return
249
- from machineconfig.cluster.remote_machine import RemoteMachine
250
- log = self.read_log() # ask for the log file.
251
- if len(log["queued"]) == 0:
252
- print("No queued jobs found.")
253
- return None
254
- idx: int = 0
255
- while len(self.running_jobs) < self.max_jobs:
256
- queue_entry = LogEntry.from_dict(log["queued"].iloc[idx].to_dict())
257
- a_job_path = CloudManager.base_path.expanduser().joinpath(f"jobs/{queue_entry.name}")
258
- rm: RemoteMachine = Read.pickle(path=a_job_path.joinpath("data/remote_machine.Machine.pkl"))
259
- if rm.config.allowed_remotes is not None and f"{getpass.getuser()}@{platform.node()}" not in rm.config.allowed_remotes:
260
- print(f"Job `{queue_entry.name}` is not allowed to run on this machine. Skipping ...")
261
- idx += 1
262
- if idx >= len(log["queued"]):
263
- break # looked at all jobs in the queue and none is allowed to run on this machine.
264
- continue # look at the next job in the queue.
265
- pid, _process_cmd = rm.fire(run=True)
266
- queue_entry.pid = pid
267
- # queue_entry.cmd = process_cmd
268
- queue_entry.run_machine = f"{getpass.getuser()}@{platform.node()}"
269
- queue_entry.start_time = pd.Timestamp.now().strftime("%Y-%m-%d %H:%M:%S")
270
- queue_entry.session_name = rm.job_params.session_name
271
- log["queued"] = log["queued"][log["queued"]["name"] != queue_entry.name]
272
- # log["queued"] = log["queued"].iloc[1:] if len(log["queued"]) > 0 else pd.DataFrame(columns=log["queued"].column)
273
- log["running"] = pd.concat([log["running"], pd.DataFrame([queue_entry.__dict__])], ignore_index=True)
274
- self.running_jobs.append(rm)
275
- self.write_log(log=log)
276
- return None
159
+ # # Filter columns based on item type
160
+ # excluded_cols = {"cmd", "note"}
161
+ # if item_name == "queued": excluded_cols.update({"pid", "start_time", "end_time", "run_machine"})
162
+ # if item_name == "running": excluded_cols.update({"submission_time", "source_machine", "end_time"})
163
+ # if item_name == "completed": excluded_cols.update({"submission_time", "source_machine", "start_time", "pid"})
164
+ # if item_name == "failed": excluded_cols.update({"submission_time", "source_machine", "start_time"})
165
+
166
+ # # Filter items and take last 10
167
+ # filtered_items = []
168
+ # for item in display_items[-10:]:
169
+ # filtered_item = {k: v for k, v in item.items() if k not in excluded_cols}
170
+ # filtered_items.append(filtered_item)
171
+
172
+ # if filtered_items:
173
+ # pprint(format_table_markdown(filtered_items))
174
+ # pprint("\n\n")
175
+ # print("👷 Workers:")
176
+ # servers_report = self.prepare_servers_report(cloud_root=alternative_base)
177
+ # pprint(format_table_markdown(servers_report))
178
+ # sched = Scheduler(routine=routine, wait="5m")
179
+ # sched.run()
277
180
 
278
- def reset_cloud(self, unsafe: bool = False):
279
- print("☠️ Resetting cloud server ☠️")
280
- if not unsafe: self.claim_lock() # it is unsafe to ignore the lock since other workers thinnk they own the lock and will push their data and overwrite the reset. Do so only when knowing that other
281
- CloudManager.base_path.expanduser().delete(sure=True).create().sync_to_cloud(cloud=self.cloud, rel2home=True, sync_up=True, verbose=True, transfers=100)
282
- self.release_lock()
283
- def reset_lock(self): CloudManager.base_path.expanduser().create().joinpath("lock.txt").write_text("").to_cloud(cloud=self.cloud, rel2home=True, verbose=False)
284
- @staticmethod
285
- def run_clean_trial():
286
- self = CloudManager(max_jobs=1)
287
- self.base_path.expanduser().delete(sure=True).create().sync_to_cloud(cloud=self.cloud, rel2home=True, sync_up=True, transfers=20)
288
- from machineconfig.cluster.templates.run_remote import run_on_cloud
289
- run_on_cloud()
290
- self.serve()
291
- def claim_lock(self, first_call: bool = True):
292
- """
293
- Note: If the parameters of the class are messed with, there is no gaurantee of zero collision by this method.
294
- It takes at least inter_check_interval_sec * num_claims_check to claim the lock.
295
- """
296
- if first_call: print("Claiming lock 🔒 ...")
297
- this_machine = f"{getpass.getuser()}@{platform.node()}"
298
- path = CloudManager.base_path.expanduser().create()
299
- lock_path = path.joinpath("lock.txt").from_cloud(cloud=self.cloud, rel2home=True, verbose=False)
300
- if lock_path is None:
301
- print("Lock doesn't exist on remote, uploading for the first time.")
302
- path.joinpath("lock.txt").write_text(this_machine).to_cloud(cloud=self.cloud, rel2home=True, verbose=False)
303
- return self.claim_lock(first_call=False)
181
+ # # ================== CLEARNING METHODS ===================
182
+ # def clean_interrupted_jobs_mess(self, return_to_queue: bool = True):
183
+ # """Clean jobs that failed but in logs show running by looking at the pid.
184
+ # If you want to do the same for remote machines, you will need to do it manually using `rerun_jobs`"""
185
+ # assert len(self.running_jobs) == 0, "method should never be called while there are running jobs. This can only be called at the beginning of the run."
186
+ # from machineconfig.cluster.remote_machine import RemoteMachine
187
+ # this_machine = f"{getpass.getuser()}@{platform.node()}"
188
+ # log = self.read_log()
189
+ # # servers_report = self.prepare_servers_report(cloud_root=CloudManager.base_path.expanduser())
190
+ # dirt: list[str] = []
191
+ # for job_data in log["running"]:
192
+ # entry = LogEntry.from_dict(job_data)
193
+ # if entry.run_machine != this_machine: continue
194
+ # a_job_path = CloudManager.base_path.expanduser().joinpath(f"jobs/{entry.name}")
195
+ # rm: RemoteMachine = pickle.loads(a_job_path.joinpath("data/remote_machine.Machine.pkl").read_bytes())
196
+ # status = rm.file_manager.get_job_status(session_name=rm.job_params.session_name, tab_name=rm.job_params.tab_name)
197
+ # if status == "running":
198
+ # print(f"Job `{entry.name}` is still running, added to running jobs.")
199
+ # self.running_jobs.append(rm)
200
+ # else:
201
+ # entry.pid = None
202
+ # entry.cmd = None
203
+ # entry.start_time = None
204
+ # entry.end_time = None
205
+ # entry.run_machine = None
206
+ # entry.session_name = None
207
+ # rm.file_manager.execution_log_dir.expanduser().joinpath("status.txt").delete(sure=True)
208
+ # rm.file_manager.execution_log_dir.expanduser().joinpath("pid.txt").delete(sure=True)
209
+ # entry.note += f"| Job was interrupted by a crash of the machine `{this_machine}`."
210
+ # dirt.append(entry.name)
211
+ # print(f"Job `{entry.name}` is not running, removing it from log of running jobs.")
212
+ # if return_to_queue:
213
+ # log["queued"].append(entry.__dict__)
214
+ # print(f"Job `{entry.name}` is not running, returning it to the queue.")
215
+ # else:
216
+ # log["failed"].append(entry.__dict__)
217
+ # print(f"Job `{entry.name}` is not running, moving it to failed jobs.")
218
+ # # Remove entries that are in dirt list
219
+ # log["running"] = [job for job in log["running"] if job.get("name") not in dirt]
220
+ # self.write_log(log=log)
221
+ # def clean_failed_jobs_mess(self):
222
+ # """If you want to do it for remote machine, use `rerun_jobs` (manual selection)"""
223
+ # print("⚠️ Cleaning failed jobs mess for this machine ⚠️")
224
+ # from machineconfig.cluster.remote_machine import RemoteMachine
225
+ # log = self.read_log()
226
+ # for job_data in log["failed"]:
227
+ # entry = LogEntry.from_dict(job_data)
228
+ # a_job_path = CloudManager.base_path.expanduser().joinpath(f"jobs/{entry.name}")
229
+ # rm: RemoteMachine = pickle.loads(a_job_path.joinpath("data/remote_machine.Machine.pkl").read_bytes())
230
+ # entry.note += f"| Job failed @ {entry.run_machine}"
231
+ # entry.pid = None
232
+ # entry.cmd = None
233
+ # entry.start_time = None
234
+ # entry.end_time = None
235
+ # entry.run_machine = None
236
+ # entry.session_name = None
237
+ # rm.file_manager.execution_log_dir.expanduser().joinpath("status.txt").delete(sure=True)
238
+ # rm.file_manager.execution_log_dir.expanduser().joinpath("pid.txt").delete(sure=True)
239
+ # print(f"Job `{entry.name}` is not running, removing it from log of running jobs.")
240
+ # log["queued"].append(entry.__dict__)
241
+ # print(f"Job `{entry.name}` is not running, returning it to the queue.")
242
+ # log["failed"] = []
243
+ # self.write_log(log=log)
244
+ # self.release_lock()
245
+ # def rerun_jobs(self):
246
+ # """This method involves manual selection but has all-files scope (failed and running) and can be used for both local and remote machines.
247
+ # The reason it is not automated for remotes is because even though the server might have failed, the processes therein might be running, so there is no automated way to tell."""
248
+ # log = self.read_log()
249
+ # from machineconfig.cluster.remote_machine import RemoteMachine
250
+ # from machineconfig.utils.utils import display_options
251
+ # # Replace crocodile List usage with plain Python list comprehension
252
+ # jobs_all: list[str] = [p.name for p in self.base_path.expanduser().joinpath("jobs").iterdir()]
253
+ # jobs_selected = display_options(options=jobs_all, msg="Select Jobs to Redo", multi=True, fzf=True)
254
+ # for a_job in jobs_selected:
255
+ # # find in which log list does this job live:
256
+ # found_log_type = None
257
+ # found_entry_data = None
258
+ # for log_type, log_list in log.items():
259
+ # for job_data in log_list:
260
+ # if job_data.get("name") == a_job:
261
+ # found_log_type = log_type
262
+ # found_entry_data = job_data
263
+ # break
264
+ # if found_log_type:
265
+ # break
266
+
267
+ # if not found_log_type:
268
+ # raise ValueError(f"Job `{a_job}` is not found in any of the log lists.")
269
+
270
+ # if found_entry_data is None:
271
+ # raise ValueError(f"Job `{a_job}` has no entry data.")
272
+
273
+ # entry = LogEntry.from_dict(found_entry_data)
274
+ # a_job_path = CloudManager.base_path.expanduser().joinpath(f"jobs/{entry.name}")
275
+ # entry.note += f"| Job failed @ {entry.run_machine}"
276
+ # entry.pid = None
277
+ # entry.cmd = None
278
+ # entry.start_time = None
279
+ # entry.end_time = None
280
+ # entry.run_machine = None
281
+ # entry.session_name = None
282
+ # rm: RemoteMachine = pickle.loads(a_job_path.joinpath("data/remote_machine.Machine.pkl").read_bytes())
283
+ # rm.file_manager.execution_log_dir.expanduser().joinpath("status.txt").delete(sure=True)
284
+ # rm.file_manager.execution_log_dir.expanduser().joinpath("pid.txt").delete(sure=True)
285
+ # log["queued"].append(entry.__dict__)
286
+ # # Remove from original log type
287
+ # log[found_log_type] = [job for job in log[found_log_type] if job.get("name") != a_job]
288
+ # print(f"Job `{entry.name}` was removed from {found_log_type} and added to the queue in order to be re-run.")
289
+ # self.write_log(log=log)
290
+ # self.release_lock()
304
291
 
305
- locking_machine = lock_path.read_text()
306
- if locking_machine != "" and locking_machine != this_machine:
307
- if (pd.Timestamp.now() - lock_path.time("m")).total_seconds() > 3600:
308
- print(f"⚠️ Lock was claimed by `{locking_machine}` for more than an hour. Something wrong happened there. Resetting the lock!")
309
- self.reset_lock()
310
- return self.claim_lock(first_call=False)
311
- print(f"CloudManager: Lock already claimed by `{locking_machine}`. 🤷‍♂️")
312
- wait = int(random.random() * 30)
313
- print(f"💤 sleeping for {wait} seconds and trying again.")
314
- time.sleep(wait)
315
- return self.claim_lock(first_call=False)
292
+ # def serve(self):
293
+ # self.clean_interrupted_jobs_mess()
294
+ # def routine(sched: Any):
295
+ # _ = sched
296
+ # self.start_jobs_if_possible()
297
+ # self.get_running_jobs_statuses()
298
+ # self.release_lock()
299
+ # sched = Scheduler(routine=routine, wait=f"{self.server_interval_sec}s")
300
+ # return sched.run()
316
301
 
317
- if locking_machine == this_machine: print("Lock already claimed by this machine. 🤭")
318
- elif locking_machine == "": print("No claims on lock, claiming it ... 🙂")
319
- else: raise ValueError("Unexpected value of lock_data at this point of code.")
302
+ # def get_running_jobs_statuses(self):
303
+ # """This is the only authority responsible for moving jobs from running df to failed df or completed df."""
304
+ # jobs_ids_to_be_removed_from_running: list[str] = []
305
+ # for a_rm in self.running_jobs:
306
+ # status = a_rm.file_manager.get_job_status(session_name=a_rm.job_params.session_name, tab_name=a_rm.job_params.tab_name)
307
+ # if status == "running": pass
308
+ # elif status == "completed" or status == "failed":
309
+ # job_name = a_rm.config.job_id
310
+ # log = self.read_log()
311
+
312
+ # # Find the entry in running jobs
313
+ # entry_data = None
314
+ # for job_data in log["running"]:
315
+ # if job_data.get("name") == job_name:
316
+ # entry_data = job_data
317
+ # break
318
+
319
+ # if entry_data:
320
+ # entry = LogEntry.from_dict(entry_data)
321
+ # entry.end_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
322
+ # log[status].append(entry.__dict__)
323
+ # # Remove from running
324
+ # log["running"] = [job for job in log["running"] if job.get("name") != job_name]
325
+ # self.write_log(log=log)
326
+ # # self.running_jobs.remove(a_rm)
327
+ # jobs_ids_to_be_removed_from_running.append(a_rm.config.job_id)
328
+ # elif status == "queued": raise RuntimeError("I thought I'm working strictly with running jobs, and I encountered unexpected a job with `queued` status.")
329
+ # else: raise ValueError(f"I receieved a status that I don't know how to handle `{status}`")
330
+ # self.running_jobs = [a_rm for a_rm in self.running_jobs if a_rm.config.job_id not in jobs_ids_to_be_removed_from_running]
331
+ # save_pickle(obj=self.running_jobs, path=self.status_root.joinpath("running_jobs.pkl"), verbose=False)
332
+ # self.status_root.to_cloud(cloud=self.cloud, rel2home=True, verbose=False) # no need for lock as this writes to a folder specific to this machine.
333
+ # def start_jobs_if_possible(self):
334
+ # """This is the only authority responsible for moving jobs from queue df to running df."""
335
+ # if len(self.running_jobs) == self.max_jobs:
336
+ # print(f"⚠️ No more capacity to run more jobs ({len(self.running_jobs)} / {self.max_jobs=})")
337
+ # return
338
+ # from machineconfig.cluster.remote_machine import RemoteMachine
339
+ # log = self.read_log() # ask for the log file.
340
+ # if len(log["queued"]) == 0:
341
+ # print("No queued jobs found.")
342
+ # return None
343
+ # idx: int = 0
344
+ # while len(self.running_jobs) < self.max_jobs:
345
+ # if idx >= len(log["queued"]):
346
+ # break # looked at all jobs in the queue
347
+
348
+ # queue_entry = LogEntry.from_dict(log["queued"][idx])
349
+ # a_job_path = CloudManager.base_path.expanduser().joinpath(f"jobs/{queue_entry.name}")
350
+ # rm: RemoteMachine = pickle.loads(a_job_path.joinpath("data/remote_machine.Machine.pkl").read_bytes())
351
+ # if rm.config.allowed_remotes is not None and f"{getpass.getuser()}@{platform.node()}" not in rm.config.allowed_remotes:
352
+ # print(f"Job `{queue_entry.name}` is not allowed to run on this machine. Skipping ...")
353
+ # idx += 1
354
+ # continue # look at the next job in the queue.
355
+
356
+ # pid, _process_cmd = rm.fire(run=True)
357
+ # queue_entry.pid = pid
358
+ # # queue_entry.cmd = process_cmd
359
+ # queue_entry.run_machine = f"{getpass.getuser()}@{platform.node()}"
360
+ # queue_entry.start_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
361
+ # queue_entry.session_name = rm.job_params.session_name
362
+
363
+ # # Remove from queued and add to running
364
+ # log["queued"] = [job for job in log["queued"] if job.get("name") != queue_entry.name]
365
+ # log["running"].append(queue_entry.__dict__)
366
+ # self.running_jobs.append(rm)
367
+ # self.write_log(log=log)
368
+ # return None
320
369
 
321
- path.joinpath("lock.txt").write_text(this_machine).to_cloud(cloud=self.cloud, rel2home=True, verbose=False)
322
- counter: int = 1
323
- while counter < self.num_claim_checks:
324
- lock_path_tmp = path.joinpath("lock.txt").from_cloud(cloud=self.cloud, rel2home=True, verbose=False)
325
- assert lock_path_tmp is not None
326
- lock_data_tmp = lock_path_tmp.read_text()
327
- if lock_data_tmp != this_machine:
328
- print(f"CloudManager: Lock already claimed by `{lock_data_tmp}`. 🤷‍♂️")
329
- print(f"sleeping for {self.inter_check_interval_sec} seconds and trying again.")
330
- time.sleep(self.inter_check_interval_sec)
331
- return self.claim_lock(first_call=False)
332
- counter += 1
333
- print(f"‼️ Claim laid, waiting for 10 seconds and checking if this is challenged: #{counter}-{self.num_claim_checks} ❓")
334
- time.sleep(10)
335
- CloudManager.base_path.expanduser().sync_to_cloud(cloud=self.cloud, rel2home=True, verbose=False, sync_down=True)
336
- print("✅ Lock Claimed 🔒")
337
- self.lock_claimed = True
370
+ # def reset_cloud(self, unsafe: bool = False):
371
+ # print("☠️ Resetting cloud server ☠️")
372
+ # if not unsafe: self.claim_lock() # it is unsafe to ignore the lock since other workers thinnk they own the lock and will push their data and overwrite the reset. Do so only when knowing that other
373
+ # base_path = CloudManager.base_path.expanduser().delete(sure=True)
374
+ # base_path.mkdir(parents=True, exist_ok=True)
375
+ # base_path.sync_to_cloud(cloud=self.cloud, rel2home=True, sync_up=True, verbose=True, transfers=100)
376
+ # self.release_lock()
377
+ # def reset_lock(self):
378
+ # base_path = CloudManager.base_path.expanduser()
379
+ # base_path.mkdir(parents=True, exist_ok=True)
380
+ # base_path.joinpath("lock.txt").write_text("").to_cloud(cloud=self.cloud, rel2home=True, verbose=False)
381
+ # @staticmethod
382
+ # def run_clean_trial():
383
+ # self = CloudManager(max_jobs=1)
384
+ # base_path = self.base_path.expanduser().delete(sure=True)
385
+ # base_path.mkdir(parents=True, exist_ok=True)
386
+ # base_path.sync_to_cloud(cloud=self.cloud, rel2home=True, sync_up=True, transfers=20)
387
+ # from machineconfig.cluster.templates.run_remote import run_on_cloud
388
+ # run_on_cloud()
389
+ # self.serve()
390
+ # def claim_lock(self, first_call: bool = True) -> None:
391
+ # """
392
+ # Note: If the parameters of the class are messed with, there is no gaurantee of zero collision by this method.
393
+ # It takes at least inter_check_interval_sec * num_claims_check to claim the lock.
394
+ # """
395
+ # if first_call: print("Claiming lock 🔒 ...")
396
+ # this_machine = f"{getpass.getuser()}@{platform.node()}"
397
+ # path = CloudManager.base_path.expanduser()
398
+ # path.mkdir(parents=True, exist_ok=True)
399
+ # lock_path = path.joinpath("lock.txt").from_cloud(cloud=self.cloud, rel2home=True, verbose=False)
400
+ # if lock_path is None:
401
+ # print("Lock doesn't exist on remote, uploading for the first time.")
402
+ # path.joinpath("lock.txt").write_text(this_machine).to_cloud(cloud=self.cloud, rel2home=True, verbose=False)
403
+ # return self.claim_lock(first_call=False)
338
404
 
339
- def release_lock(self):
340
- if not self.lock_claimed:
341
- print("⚠️ Lock is not claimed, nothing to release.")
342
- return
343
- print("Releasing Lock")
344
- path = CloudManager.base_path.expanduser().create()
345
- lock_path = path.joinpath("lock.txt").from_cloud(cloud=self.cloud, rel2home=True, verbose=False)
346
- if lock_path is None:
347
- print("Lock doesn't exist on remote, uploading for the first time.")
348
- path.joinpath("lock.txt").write_text("").to_cloud(cloud=self.cloud, rel2home=True, verbose=False)
349
- self.lock_claimed = False
350
- return NoReturn
351
- data = lock_path.read_text()
352
- this_machine = f"{getpass.getuser()}@{platform.node()}"
353
- if data != this_machine:
354
- raise ValueError(f"CloudManager: Lock already claimed by `{data}`. 🤷‍♂️ Can't release a lock not owned! This shouldn't happen. Consider increasing trails before confirming the claim.")
355
- # self.lock_claimed = False
356
- path.joinpath("lock.txt").write_text("")
357
- CloudManager.base_path.expanduser().sync_to_cloud(cloud=self.cloud, rel2home=True, verbose=False, sync_up=True) # .to_cloud(cloud=self.cloud, rel2home=True, verbose=False)
358
- self.lock_claimed = False
359
- return NoReturn
405
+ # locking_machine = lock_path.read_text()
406
+ # if locking_machine != "" and locking_machine != this_machine:
407
+ # lock_mod_time = datetime.fromtimestamp(lock_path.stat().st_mtime)
408
+ # if (datetime.now() - lock_mod_time).total_seconds() > 3600:
409
+ # print(f"⚠️ Lock was claimed by `{locking_machine}` for more than an hour. Something wrong happened there. Resetting the lock!")
410
+ # self.reset_lock()
411
+ # return self.claim_lock(first_call=False)
412
+ # print(f"CloudManager: Lock already claimed by `{locking_machine}`. 🤷‍♂️")
413
+ # wait = int(random.random() * 30)
414
+ # print(f"💤 sleeping for {wait} seconds and trying again.")
415
+ # time.sleep(wait)
416
+ # return self.claim_lock(first_call=False)
417
+
418
+ # if locking_machine == this_machine: print("Lock already claimed by this machine. 🤭")
419
+ # elif locking_machine == "": print("No claims on lock, claiming it ... 🙂")
420
+ # else: raise ValueError("Unexpected value of lock_data at this point of code.")
421
+
422
+ # path.joinpath("lock.txt").write_text(this_machine).to_cloud(cloud=self.cloud, rel2home=True, verbose=False)
423
+ # counter: int = 1
424
+ # while counter < self.num_claim_checks:
425
+ # lock_path_tmp = path.joinpath("lock.txt").from_cloud(cloud=self.cloud, rel2home=True, verbose=False)
426
+ # assert lock_path_tmp is not None
427
+ # lock_data_tmp = lock_path_tmp.read_text()
428
+ # if lock_data_tmp != this_machine:
429
+ # print(f"CloudManager: Lock already claimed by `{lock_data_tmp}`. 🤷‍♂️")
430
+ # print(f"sleeping for {self.inter_check_interval_sec} seconds and trying again.")
431
+ # time.sleep(self.inter_check_interval_sec)
432
+ # return self.claim_lock(first_call=False)
433
+ # counter += 1
434
+ # print(f"‼️ Claim laid, waiting for 10 seconds and checking if this is challenged: #{counter}-{self.num_claim_checks} ❓")
435
+ # time.sleep(10)
436
+ # CloudManager.base_path.expanduser().sync_to_cloud(cloud=self.cloud, rel2home=True, verbose=False, sync_down=True)
437
+ # print("✅ Lock Claimed 🔒")
438
+ # self.lock_claimed = True
439
+
440
+ # def release_lock(self):
441
+ # if not self.lock_claimed:
442
+ # print("⚠️ Lock is not claimed, nothing to release.")
443
+ # return
444
+ # print("Releasing Lock")
445
+ # path = CloudManager.base_path.expanduser()
446
+ # path.mkdir(parents=True, exist_ok=True)
447
+ # lock_path = path.joinpath("lock.txt").from_cloud(cloud=self.cloud, rel2home=True, verbose=False)
448
+ # if lock_path is None:
449
+ # print("Lock doesn't exist on remote, uploading for the first time.")
450
+ # path.joinpath("lock.txt").write_text("").to_cloud(cloud=self.cloud, rel2home=True, verbose=False)
451
+ # self.lock_claimed = False
452
+ # return NoReturn
453
+ # data = lock_path.read_text()
454
+ # this_machine = f"{getpass.getuser()}@{platform.node()}"
455
+ # if data != this_machine:
456
+ # raise ValueError(f"CloudManager: Lock already claimed by `{data}`. 🤷‍♂️ Can't release a lock not owned! This shouldn't happen. Consider increasing trails before confirming the claim.")
457
+ # # self.lock_claimed = False
458
+ # path.joinpath("lock.txt").write_text("")
459
+ # CloudManager.base_path.expanduser().sync_to_cloud(cloud=self.cloud, rel2home=True, verbose=False, sync_up=True) # .to_cloud(cloud=self.cloud, rel2home=True, verbose=False)
460
+ # self.lock_claimed = False
461
+ # return NoReturn