machineconfig 2.2__py3-none-any.whl → 2.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of machineconfig might be problematic. Click here for more details.
- machineconfig/cluster/sessions_managers/enhanced_command_runner.py +0 -2
- machineconfig/cluster/sessions_managers/layout_types.py +29 -0
- machineconfig/cluster/sessions_managers/wt_local.py +68 -62
- machineconfig/cluster/sessions_managers/wt_local_manager.py +51 -22
- machineconfig/cluster/sessions_managers/wt_remote.py +30 -108
- machineconfig/cluster/sessions_managers/wt_remote_manager.py +14 -11
- machineconfig/cluster/sessions_managers/wt_utils/layout_generator.py +33 -37
- machineconfig/cluster/sessions_managers/wt_utils/process_monitor.py +22 -17
- machineconfig/cluster/sessions_managers/wt_utils/session_manager.py +59 -10
- machineconfig/cluster/sessions_managers/wt_utils/status_reporter.py +16 -14
- machineconfig/cluster/sessions_managers/zellij_local.py +75 -57
- machineconfig/cluster/sessions_managers/zellij_local_manager.py +51 -23
- machineconfig/cluster/sessions_managers/zellij_remote.py +47 -27
- machineconfig/cluster/sessions_managers/zellij_remote_manager.py +13 -12
- machineconfig/cluster/sessions_managers/zellij_utils/example_usage.py +14 -10
- machineconfig/cluster/sessions_managers/zellij_utils/layout_generator.py +31 -15
- machineconfig/cluster/sessions_managers/zellij_utils/process_monitor.py +47 -21
- machineconfig/cluster/sessions_managers/zellij_utils/session_manager.py +1 -1
- machineconfig/cluster/sessions_managers/zellij_utils/status_reporter.py +8 -7
- machineconfig/cluster/templates/utils.py +1 -1
- machineconfig/profile/create.py +4 -0
- machineconfig/scripts/python/devops.py +1 -0
- machineconfig/scripts/python/devops_devapps_install.py +1 -0
- machineconfig/scripts/python/fire_agents.py +6 -6
- machineconfig/scripts/python/fire_jobs.py +16 -64
- machineconfig/scripts/python/fire_jobs_args_helper.py +84 -0
- machineconfig/scripts/python/fire_jobs_layout_helper.py +66 -0
- machineconfig/scripts/python/helpers/helpers4.py +0 -1
- machineconfig/scripts/python/wifi_conn.py +0 -1
- machineconfig/utils/code.py +0 -1
- machineconfig/utils/installer_utils/installer_abc.py +0 -1
- machineconfig/utils/options.py +7 -7
- machineconfig/utils/path.py +12 -12
- machineconfig/utils/path_reduced.py +6 -1
- machineconfig/utils/ssh.py +11 -1
- machineconfig/utils/upgrade_packages.py +12 -12
- {machineconfig-2.2.dist-info → machineconfig-2.3.dist-info}/METADATA +1 -1
- {machineconfig-2.2.dist-info → machineconfig-2.3.dist-info}/RECORD +51 -59
- machineconfig-2.3.dist-info/entry_points.txt +2 -0
- machineconfig/cluster/sessions_managers/archive/create_zellij_template.py +0 -60
- machineconfig/cluster/sessions_managers/archive/session_managers.py +0 -183
- machineconfig/cluster/sessions_managers/demo_rich_zellij.py +0 -0
- machineconfig/jobs/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/jobs/python_generic_installers/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/jobs/python_linux_installers/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/scripts/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/scripts/python/__pycache__/__init__.cpython-313.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops.cpython-313.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops_devapps_install.cpython-313.pyc +0 -0
- machineconfig/scripts/python/__pycache__/devops_update_repos.cpython-313.pyc +0 -0
- machineconfig/scripts/python/__pycache__/fire_agents.cpython-313.pyc +0 -0
- /machineconfig/cluster/{cloud_manager.py → remote/cloud_manager.py} +0 -0
- /machineconfig/cluster/{data_transfer.py → remote/data_transfer.py} +0 -0
- /machineconfig/cluster/{distribute.py → remote/distribute.py} +0 -0
- /machineconfig/cluster/{file_manager.py → remote/file_manager.py} +0 -0
- /machineconfig/cluster/{job_params.py → remote/job_params.py} +0 -0
- /machineconfig/cluster/{loader_runner.py → remote/loader_runner.py} +0 -0
- /machineconfig/cluster/{remote_machine.py → remote/remote_machine.py} +0 -0
- /machineconfig/cluster/{script_execution.py → remote/script_execution.py} +0 -0
- /machineconfig/cluster/{script_notify_upon_completion.py → remote/script_notify_upon_completion.py} +0 -0
- /machineconfig/{cluster/sessions_managers/archive/__init__.py → scripts/python/fire_jobs_streamlit_helper.py} +0 -0
- {machineconfig-2.2.dist-info → machineconfig-2.3.dist-info}/WHEEL +0 -0
- {machineconfig-2.2.dist-info → machineconfig-2.3.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import Optional
|
|
3
|
+
import argparse
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
@dataclass
|
|
7
|
+
class FireJobArgs:
|
|
8
|
+
"""Type-safe dataclass for fire_jobs command line arguments."""
|
|
9
|
+
|
|
10
|
+
path: str = "."
|
|
11
|
+
function: Optional[str] = None
|
|
12
|
+
ve: str = ""
|
|
13
|
+
cmd: bool = False
|
|
14
|
+
interactive: bool = False
|
|
15
|
+
debug: bool = False
|
|
16
|
+
choose_function: bool = False
|
|
17
|
+
loop: bool = False
|
|
18
|
+
jupyter: bool = False
|
|
19
|
+
submit_to_cloud: bool = False
|
|
20
|
+
remote: bool = False
|
|
21
|
+
module: bool = False
|
|
22
|
+
streamlit: bool = False
|
|
23
|
+
environment: str = ""
|
|
24
|
+
holdDirectory: bool = False
|
|
25
|
+
PathExport: bool = False
|
|
26
|
+
git_pull: bool = False
|
|
27
|
+
optimized: bool = False
|
|
28
|
+
Nprocess: int = 1
|
|
29
|
+
zellij_tab: Optional[str] = None
|
|
30
|
+
watch: bool = False
|
|
31
|
+
kw: Optional[list[str]] = None
|
|
32
|
+
layout: bool = False
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def get_args() -> FireJobArgs:
|
|
36
|
+
parser = argparse.ArgumentParser()
|
|
37
|
+
parser.add_argument("path", nargs="?", type=str, help="The directory containing the jobs", default=".")
|
|
38
|
+
parser.add_argument("function", nargs="?", type=str, help="Fuction to run", default=None)
|
|
39
|
+
parser.add_argument("--ve", "-v", type=str, help="virtual enviroment name", default="")
|
|
40
|
+
parser.add_argument("--cmd", "-B", action="store_true", help="Create a cmd fire command to launch the the job asynchronously.")
|
|
41
|
+
parser.add_argument("--interactive", "-i", action="store_true", help="Whether to run the job interactively using IPython")
|
|
42
|
+
parser.add_argument("--debug", "-d", action="store_true", help="debug")
|
|
43
|
+
parser.add_argument("--choose_function", "-c", action="store_true", help="debug")
|
|
44
|
+
parser.add_argument("--loop", "-l", action="store_true", help="infinite recusion (runs again after completion/interruption)")
|
|
45
|
+
parser.add_argument("--jupyter", "-j", action="store_true", help="open in a jupyter notebook")
|
|
46
|
+
parser.add_argument("--submit_to_cloud", "-C", action="store_true", help="submit to cloud compute")
|
|
47
|
+
parser.add_argument("--remote", "-r", action="store_true", help="launch on a remote machine")
|
|
48
|
+
parser.add_argument("--module", "-m", action="store_true", help="launch the main file")
|
|
49
|
+
parser.add_argument("--streamlit", "-S", action="store_true", help="run as streamlit app")
|
|
50
|
+
parser.add_argument("--environment", "-E", type=str, help="Choose ip, localhost, hostname or arbitrary url", default="")
|
|
51
|
+
parser.add_argument("--holdDirectory", "-D", action="store_true", help="hold current directory and avoid cd'ing to the script directory")
|
|
52
|
+
parser.add_argument("--PathExport", "-P", action="store_true", help="augment the PYTHONPATH with repo root.")
|
|
53
|
+
parser.add_argument("--git_pull", "-g", action="store_true", help="Start by pulling the git repo")
|
|
54
|
+
parser.add_argument("--optimized", "-O", action="store_true", help="Run the optimized version of the function")
|
|
55
|
+
parser.add_argument("--Nprocess", "-p", type=int, help="Number of processes to use", default=1)
|
|
56
|
+
parser.add_argument("--zellij_tab", "-z", type=str, dest="zellij_tab", help="open in a new zellij tab")
|
|
57
|
+
parser.add_argument("--watch", "-w", action="store_true", help="watch the file for changes")
|
|
58
|
+
parser.add_argument("--kw", nargs="*", default=None, help="keyword arguments to pass to the function in the form of k1 v1 k2 v2 ... (meaning k1=v1, k2=v2, etc)")
|
|
59
|
+
parser.add_argument("--layout", "-L", action="store_true", help="use layout configuration (Zellij Or WindowsTerminal)")
|
|
60
|
+
|
|
61
|
+
try:
|
|
62
|
+
args_raw = parser.parse_args()
|
|
63
|
+
except Exception as ex:
|
|
64
|
+
print(f"❌ Failed to parse arguments: {ex}")
|
|
65
|
+
parser.print_help()
|
|
66
|
+
raise ex
|
|
67
|
+
args = FireJobArgs(**vars(args_raw))
|
|
68
|
+
return args
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def extract_kwargs(args: FireJobArgs) -> dict[str, object]:
|
|
72
|
+
str2obj = {"True": True, "False": False, "None": None}
|
|
73
|
+
if args.kw is not None:
|
|
74
|
+
assert len(args.kw) % 2 == 0, f"args.kw must be a list of even length. Got {len(args.kw)}"
|
|
75
|
+
kwargs = dict(zip(args.kw[::2], args.kw[1::2]))
|
|
76
|
+
kwargs: dict[str, object]
|
|
77
|
+
for key, value in kwargs.items():
|
|
78
|
+
if value in str2obj:
|
|
79
|
+
kwargs[key] = str2obj[str(value)]
|
|
80
|
+
if args.function is None: # if user passed arguments and forgot to pass function, then assume they want to run the main function.
|
|
81
|
+
args.choose_function = True
|
|
82
|
+
else:
|
|
83
|
+
kwargs = {}
|
|
84
|
+
return kwargs
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
from machineconfig.cluster.sessions_managers.layout_types import LayoutConfig, LayoutsFile
|
|
3
|
+
from typing import Optional, TYPE_CHECKING
|
|
4
|
+
from machineconfig.scripts.python.helpers.helpers4 import search_for_files_of_interest
|
|
5
|
+
from machineconfig.utils.options import choose_one_option
|
|
6
|
+
from machineconfig.utils.path import match_file_name, sanitize_path
|
|
7
|
+
from machineconfig.utils.path_reduced import PathExtended as PathExtended
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from machineconfig.scripts.python.fire_jobs_args_helper import FireJobArgs
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def select_layout(layouts_json_file: Path, layout_name: Optional[str]):
|
|
14
|
+
import json
|
|
15
|
+
|
|
16
|
+
layout_file: LayoutsFile = json.loads(layouts_json_file.read_text(encoding="utf-8"))
|
|
17
|
+
if len(layout_file["layouts"]) == 0:
|
|
18
|
+
raise ValueError(f"No layouts found in {layouts_json_file}")
|
|
19
|
+
if layout_name is None:
|
|
20
|
+
options = [layout["layoutName"] for layout in layout_file["layouts"]]
|
|
21
|
+
from machineconfig.utils.options import choose_one_option
|
|
22
|
+
|
|
23
|
+
layout_name = choose_one_option(options=options, prompt="Choose a layout configuration:", fzf=True)
|
|
24
|
+
print(f"Selected layout: {layout_name}")
|
|
25
|
+
layout_chosen = next((layout for layout in layout_file["layouts"] if layout["layoutName"] == layout_name), None)
|
|
26
|
+
if layout_chosen is None:
|
|
27
|
+
layout_chosen = next((layout for layout in layout_file["layouts"] if layout["layoutName"].lower() == layout_name.lower()), None)
|
|
28
|
+
if layout_chosen is None:
|
|
29
|
+
available_layouts = [layout["layoutName"] for layout in layout_file["layouts"]]
|
|
30
|
+
raise ValueError(f"Layout '{layout_name}' not found. Available layouts: {available_layouts}")
|
|
31
|
+
return layout_chosen
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def launch_layout(layout_config: LayoutConfig) -> Optional[Exception]:
|
|
35
|
+
import platform
|
|
36
|
+
|
|
37
|
+
if platform.system() == "Linux" or platform.system() == "Darwin":
|
|
38
|
+
print("🧑💻 Launching layout using Zellij terminal multiplexer...")
|
|
39
|
+
from machineconfig.cluster.sessions_managers.zellij_local import run_zellij_layout
|
|
40
|
+
|
|
41
|
+
run_zellij_layout(layout_config=layout_config)
|
|
42
|
+
elif platform.system() == "Windows":
|
|
43
|
+
print("🧑💻 Launching layout using Windows Terminal...")
|
|
44
|
+
from machineconfig.cluster.sessions_managers.wt_local import run_wt_layout
|
|
45
|
+
|
|
46
|
+
run_wt_layout(layout_config=layout_config)
|
|
47
|
+
else:
|
|
48
|
+
print(f"❌ Unsupported platform: {platform.system()}")
|
|
49
|
+
return None
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def handle_layout_args(args: "FireJobArgs") -> None:
|
|
53
|
+
# args.function = args.path
|
|
54
|
+
# args.path = "layout.json"
|
|
55
|
+
path_obj = sanitize_path(args.path)
|
|
56
|
+
if not path_obj.exists():
|
|
57
|
+
choice_file = match_file_name(sub_string=args.path, search_root=PathExtended.cwd(), suffixes={".json"})
|
|
58
|
+
elif path_obj.is_dir():
|
|
59
|
+
print(f"🔍 Searching recursively for Python, PowerShell and Shell scripts in directory `{path_obj}`")
|
|
60
|
+
files = search_for_files_of_interest(path_obj)
|
|
61
|
+
print(f"🔍 Got #{len(files)} results.")
|
|
62
|
+
choice_file = choose_one_option(options=files, fzf=True)
|
|
63
|
+
choice_file = PathExtended(choice_file)
|
|
64
|
+
else:
|
|
65
|
+
choice_file = path_obj
|
|
66
|
+
launch_layout(layout_config=select_layout(layouts_json_file=choice_file, layout_name=args.function))
|
|
@@ -43,7 +43,6 @@ def parse_pyfile(file_path: str):
|
|
|
43
43
|
|
|
44
44
|
args_spec = NamedTuple("args_spec", [("name", str), ("type", str), ("default", Optional[str])])
|
|
45
45
|
func_args: list[list[args_spec]] = [[]] # this firt prepopulated dict is for the option 'RUN AS MAIN' which has no args
|
|
46
|
-
|
|
47
46
|
import ast
|
|
48
47
|
|
|
49
48
|
parsed_ast = ast.parse(PathExtended(file_path).read_text(encoding="utf-8"))
|
|
@@ -187,7 +187,6 @@ def connect_to_new_network(ssid: str, password: str):
|
|
|
187
187
|
def display_available_networks():
|
|
188
188
|
"""Display available networks (legacy function for compatibility)"""
|
|
189
189
|
console.print("\n[blue]📡 Scanning for available networks...[/blue]")
|
|
190
|
-
|
|
191
190
|
try:
|
|
192
191
|
if platform.system() == "Windows":
|
|
193
192
|
subprocess.run(["netsh", "wlan", "show", "networks", "interface=Wi-Fi"], check=True)
|
machineconfig/utils/code.py
CHANGED
machineconfig/utils/options.py
CHANGED
|
@@ -5,25 +5,25 @@ from rich.console import Console
|
|
|
5
5
|
import platform
|
|
6
6
|
import subprocess
|
|
7
7
|
from typing import Optional, Union, TypeVar, Iterable
|
|
8
|
+
from machineconfig.utils.source_of_truth import WINDOWS_INSTALL_PATH, LINUX_INSTALL_PATH
|
|
9
|
+
|
|
8
10
|
|
|
9
11
|
T = TypeVar("T")
|
|
10
12
|
|
|
11
13
|
|
|
12
14
|
def check_tool_exists(tool_name: str) -> bool:
|
|
13
|
-
if platform.system() == "Windows": tool_name = tool_name.replace(".exe", "") + ".exe"
|
|
14
|
-
|
|
15
|
-
from machineconfig.utils.source_of_truth import WINDOWS_INSTALL_PATH, LINUX_INSTALL_PATH
|
|
16
|
-
|
|
17
15
|
if platform.system() == "Windows":
|
|
16
|
+
tool_name = tool_name.replace(".exe", "") + ".exe"
|
|
18
17
|
cmd = "where.exe"
|
|
19
18
|
root_path = Path(WINDOWS_INSTALL_PATH)
|
|
20
19
|
elif platform.system() in ["Linux", "Darwin"]:
|
|
21
20
|
cmd = "which"
|
|
22
21
|
root_path = Path(LINUX_INSTALL_PATH)
|
|
23
|
-
|
|
24
|
-
|
|
22
|
+
return any([Path("/usr/local/bin").joinpath(tool_name).is_file(), Path("/usr/bin").joinpath(tool_name).is_file(), root_path.joinpath(tool_name).is_file()])
|
|
23
|
+
else:
|
|
24
|
+
raise NotImplementedError(f"platform {platform.system()} not implemented")
|
|
25
25
|
_ = cmd
|
|
26
|
-
# try:
|
|
26
|
+
# try: # talking to terminal is too slow.
|
|
27
27
|
# _tmp = subprocess.check_output([cmd, tool_name], stderr=subprocess.DEVNULL)
|
|
28
28
|
# res: bool = True
|
|
29
29
|
# except (subprocess.CalledProcessError, FileNotFoundError):
|
machineconfig/utils/path.py
CHANGED
|
@@ -12,7 +12,7 @@ T = TypeVar("T")
|
|
|
12
12
|
console = Console()
|
|
13
13
|
|
|
14
14
|
|
|
15
|
-
def sanitize_path(a_path:
|
|
15
|
+
def sanitize_path(a_path: str) -> PathExtended:
|
|
16
16
|
path = PathExtended(a_path)
|
|
17
17
|
if Path.cwd() == Path.home() and not path.exists():
|
|
18
18
|
result = input("Current working directory is home, and passed path is not full path, are you sure you want to continue, [y]/n? ") or "y"
|
|
@@ -26,14 +26,14 @@ def sanitize_path(a_path: PathExtended) -> PathExtended:
|
|
|
26
26
|
skip_parts = 3 if path.as_posix().startswith("/home") else 3 # Both have 3 parts to skip
|
|
27
27
|
path = PathExtended.home().joinpath(*path.parts[skip_parts:])
|
|
28
28
|
assert path.exists(), f"File not found: {path}"
|
|
29
|
-
source_os = "Linux" if
|
|
29
|
+
source_os = "Linux" if path.as_posix().startswith("/home") else "macOS"
|
|
30
30
|
console.print(Panel(f"🔗 PATH MAPPING | {source_os} → Windows: `{a_path}` ➡️ `{path}`", title="Path Mapping", expand=False))
|
|
31
31
|
elif platform.system() in ["Linux", "Darwin"] and PathExtended.home().as_posix() not in path.as_posix(): # copied between Unix-like systems with different username
|
|
32
32
|
skip_parts = 3 # Both /home/username and /Users/username have 3 parts to skip
|
|
33
33
|
path = PathExtended.home().joinpath(*path.parts[skip_parts:])
|
|
34
34
|
assert path.exists(), f"File not found: {path}"
|
|
35
35
|
current_os = "Linux" if platform.system() == "Linux" else "macOS"
|
|
36
|
-
source_os = "Linux" if
|
|
36
|
+
source_os = "Linux" if path.as_posix().startswith("/home") else "macOS"
|
|
37
37
|
console.print(Panel(f"🔗 PATH MAPPING | {source_os} → {current_os}: `{a_path}` ➡️ `{path}`", title="Path Mapping", expand=False))
|
|
38
38
|
elif path.as_posix().startswith("C:"):
|
|
39
39
|
if platform.system() in ["Linux", "Darwin"]: # path copied from Windows to Linux/Mac
|
|
@@ -49,18 +49,18 @@ def sanitize_path(a_path: PathExtended) -> PathExtended:
|
|
|
49
49
|
return path.expanduser().absolute()
|
|
50
50
|
|
|
51
51
|
|
|
52
|
-
def find_scripts(root: Path, name_substring: str) -> tuple[list[Path], list[Path]]:
|
|
52
|
+
def find_scripts(root: Path, name_substring: str, suffixes: set[str]) -> tuple[list[Path], list[Path]]:
|
|
53
53
|
filename_matches = []
|
|
54
54
|
partial_path_matches = []
|
|
55
55
|
for entry in root.iterdir():
|
|
56
56
|
if entry.is_dir():
|
|
57
|
-
if entry.name in {".links", ".venv", ".git", ".idea", ".vscode", "node_modules", "__pycache__"}:
|
|
57
|
+
if entry.name in {".links", ".venv", ".git", ".idea", ".vscode", "node_modules", "__pycache__", ".mypy_cache"}:
|
|
58
58
|
# prune this entire subtree
|
|
59
59
|
continue
|
|
60
|
-
tmp1, tmp2 = find_scripts(entry, name_substring)
|
|
60
|
+
tmp1, tmp2 = find_scripts(entry, name_substring, suffixes)
|
|
61
61
|
filename_matches.extend(tmp1)
|
|
62
62
|
partial_path_matches.extend(tmp2)
|
|
63
|
-
elif entry.is_file() and entry.suffix in
|
|
63
|
+
elif entry.is_file() and entry.suffix in suffixes:
|
|
64
64
|
if name_substring.lower() in entry.name.lower():
|
|
65
65
|
filename_matches.append(entry)
|
|
66
66
|
elif name_substring.lower() in entry.as_posix().lower():
|
|
@@ -68,14 +68,14 @@ def find_scripts(root: Path, name_substring: str) -> tuple[list[Path], list[Path
|
|
|
68
68
|
return filename_matches, partial_path_matches
|
|
69
69
|
|
|
70
70
|
|
|
71
|
-
def match_file_name(sub_string: str, search_root: PathExtended) -> PathExtended:
|
|
71
|
+
def match_file_name(sub_string: str, search_root: PathExtended, suffixes: set[str]) -> PathExtended:
|
|
72
72
|
search_root_obj = search_root.absolute()
|
|
73
73
|
# assume subscript is filename only, not a sub_path. There is no need to fzf over the paths.
|
|
74
|
-
filename_matches, partial_path_matches = find_scripts(search_root_obj, sub_string)
|
|
74
|
+
filename_matches, partial_path_matches = find_scripts(search_root_obj, sub_string, suffixes)
|
|
75
75
|
if len(filename_matches) == 1:
|
|
76
76
|
return PathExtended(filename_matches[0])
|
|
77
|
-
console.print(Panel(f"Partial filename match with case-insensitivity failed. This generated #{len(filename_matches)} results.", title="Search", expand=False))
|
|
78
|
-
if len(filename_matches) <
|
|
77
|
+
console.print(Panel(f"Partial filename {search_root_obj} match with case-insensitivity failed. This generated #{len(filename_matches)} results.", title="Search", expand=False))
|
|
78
|
+
if len(filename_matches) < 20:
|
|
79
79
|
print("\n".join([a_potential_match.as_posix() for a_potential_match in filename_matches]))
|
|
80
80
|
if len(filename_matches) > 1:
|
|
81
81
|
print("Try to narrow down filename_matches search by case-sensitivity.")
|
|
@@ -112,7 +112,7 @@ def match_file_name(sub_string: str, search_root: PathExtended) -> PathExtended:
|
|
|
112
112
|
if len(search_res) == 1:
|
|
113
113
|
return search_root_obj.joinpath(search_res_raw)
|
|
114
114
|
|
|
115
|
-
print(f"⚠️ WARNING | Multiple search results found for `{sub_string}
|
|
115
|
+
print(f"⚠️ WARNING | Multiple search results found for `{sub_string}`:\n'{search_res}'")
|
|
116
116
|
cmd = f"cd '{search_root_obj}'; fd --type file | fzf --select-1 --query={sub_string}"
|
|
117
117
|
console.print(Panel(f"🔍 SEARCH STRATEGY | Trying with raw fzf search ...\n{cmd}", title="Search Strategy", expand=False))
|
|
118
118
|
try:
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
|
|
2
1
|
from machineconfig.utils.utils2 import randstr
|
|
3
2
|
|
|
4
3
|
from datetime import datetime
|
|
@@ -18,13 +17,16 @@ SHUTIL_FORMATS: TypeAlias = Literal["zip", "tar", "gztar", "bztar", "xztar"]
|
|
|
18
17
|
|
|
19
18
|
def pwd2key(password: str, salt: Optional[bytes] = None, iterations: int = 10) -> bytes: # Derive a secret key from a given password and salt"""
|
|
20
19
|
import base64
|
|
20
|
+
|
|
21
21
|
if salt is None:
|
|
22
22
|
import hashlib
|
|
23
|
+
|
|
23
24
|
m = hashlib.sha256()
|
|
24
25
|
m.update(password.encode(encoding="utf-8"))
|
|
25
26
|
return base64.urlsafe_b64encode(s=m.digest()) # make url-safe bytes required by Ferent.
|
|
26
27
|
from cryptography.hazmat.primitives import hashes
|
|
27
28
|
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
|
|
29
|
+
|
|
28
30
|
return base64.urlsafe_b64encode(PBKDF2HMAC(algorithm=hashes.SHA256(), length=32, salt=salt, iterations=iterations, backend=None).derive(password.encode()))
|
|
29
31
|
|
|
30
32
|
|
|
@@ -394,6 +396,7 @@ class PathExtended(type(Path()), Path): # type: ignore # pylint: disable=E0241
|
|
|
394
396
|
|
|
395
397
|
if system() == "Windows" and not Terminal.is_user_admin(): # you cannot create symlink without priviliages.
|
|
396
398
|
import win32com.shell.shell
|
|
399
|
+
|
|
397
400
|
_proce_info = win32com.shell.shell.ShellExecuteEx(lpVerb="runas", lpFile=sys.executable, lpParameters=f" -c \"from pathlib import Path; Path(r'{self.expanduser()}').symlink_to(r'{str(target_obj)}')\"")
|
|
398
401
|
# TODO update PATH for this to take effect immediately.
|
|
399
402
|
time.sleep(1) # wait=True equivalent
|
|
@@ -443,6 +446,7 @@ class PathExtended(type(Path()), Path): # type: ignore # pylint: disable=E0241
|
|
|
443
446
|
if ".zip" in str(slf) and compressed: # the root (self) is itself a zip archive (as opposed to some search results are zip archives)
|
|
444
447
|
import zipfile
|
|
445
448
|
import fnmatch
|
|
449
|
+
|
|
446
450
|
root = slf.as_zip_path()
|
|
447
451
|
if not r:
|
|
448
452
|
raw = list(root.iterdir())
|
|
@@ -665,6 +669,7 @@ class PathExtended(type(Path()), Path): # type: ignore # pylint: disable=E0241
|
|
|
665
669
|
def ungz(self, folder: OPLike = None, name: Optional[str] = None, path: OPLike = None, inplace: bool = False, orig: bool = False, verbose: bool = True) -> "PathExtended":
|
|
666
670
|
op_path = self._resolve_path(folder, name, path, self.name.replace(".gz", "")).expanduser().resolve()
|
|
667
671
|
import gzip
|
|
672
|
+
|
|
668
673
|
PathExtended(str(op_path)).write_bytes(gzip.decompress(PathExtended(str(self.expanduser().resolve())).read_bytes()))
|
|
669
674
|
msg = f"UNGZED {repr(self)} ==> {repr(op_path)}"
|
|
670
675
|
ret = self if orig else PathExtended(op_path)
|
machineconfig/utils/ssh.py
CHANGED
|
@@ -119,13 +119,16 @@ class SSH: # inferior alternative: https://github.com/fabric/fabric
|
|
|
119
119
|
def view_bar(slf: Any, a: Any, b: Any):
|
|
120
120
|
slf.total = int(b)
|
|
121
121
|
slf.update(int(a - slf.n)) # update pbar with increment
|
|
122
|
+
|
|
122
123
|
from tqdm import tqdm
|
|
124
|
+
|
|
123
125
|
self.tqdm_wrap = type("TqdmWrap", (tqdm,), {"view_bar": view_bar})
|
|
124
126
|
self._local_distro: Optional[str] = None
|
|
125
127
|
self._remote_distro: Optional[str] = None
|
|
126
128
|
self._remote_machine: Optional[MACHINE] = None
|
|
127
129
|
self.terminal_responses: list[Response] = []
|
|
128
130
|
self.platform = platform
|
|
131
|
+
|
|
129
132
|
def get_remote_machine(self) -> MACHINE:
|
|
130
133
|
if self._remote_machine is None:
|
|
131
134
|
if self.run("$env:OS", verbose=False, desc="Testing Remote OS Type").op == "Windows_NT" or self.run("echo %OS%", verbose=False, desc="Testing Remote OS Type Again").op == "Windows_NT":
|
|
@@ -138,10 +141,12 @@ class SSH: # inferior alternative: https://github.com/fabric/fabric
|
|
|
138
141
|
if self._local_distro is None:
|
|
139
142
|
command = """uv run --with distro python -c "import distro; print(distro.name(pretty=True))" """
|
|
140
143
|
import subprocess
|
|
144
|
+
|
|
141
145
|
res = subprocess.run(command, shell=True, capture_output=True, text=True).stdout.strip()
|
|
142
146
|
self._local_distro = res
|
|
143
147
|
return res
|
|
144
148
|
return self._local_distro
|
|
149
|
+
|
|
145
150
|
def get_remote_distro(self):
|
|
146
151
|
if self._remote_distro is None:
|
|
147
152
|
res = self.run("""~/.local/bin/uv run --with distro python -c "import distro; print(distro.name(pretty=True))" """)
|
|
@@ -178,8 +183,10 @@ class SSH: # inferior alternative: https://github.com/fabric/fabric
|
|
|
178
183
|
res = Response(cmd=command)
|
|
179
184
|
res.output.returncode = os.system(command)
|
|
180
185
|
return res
|
|
186
|
+
|
|
181
187
|
def get_ssh_conn_str(self, cmd: str = ""):
|
|
182
188
|
return "ssh " + (f" -i {self.sshkey}" if self.sshkey else "") + self.get_remote_repr().replace(":", " -p ") + (f" -t {cmd} " if cmd != "" else " ")
|
|
189
|
+
|
|
183
190
|
def run(self, cmd: str, verbose: bool = True, desc: str = "", strict_err: bool = False, strict_returncode: bool = False) -> Response:
|
|
184
191
|
raw = self.ssh.exec_command(cmd)
|
|
185
192
|
res = Response(stdin=raw[0], stdout=raw[1], stderr=raw[2], cmd=cmd, desc=desc) # type: ignore
|
|
@@ -189,15 +196,18 @@ class SSH: # inferior alternative: https://github.com/fabric/fabric
|
|
|
189
196
|
res.print()
|
|
190
197
|
self.terminal_responses.append(res)
|
|
191
198
|
return res
|
|
199
|
+
|
|
192
200
|
def run_py(self, cmd: str, desc: str = "", return_obj: bool = False, verbose: bool = True, strict_err: bool = False, strict_returncode: bool = False) -> Union[Any, Response]:
|
|
193
201
|
assert '"' not in cmd, 'Avoid using `"` in your command. I dont know how to handle this when passing is as command to python in pwsh command.'
|
|
194
202
|
if not return_obj:
|
|
195
203
|
return self.run(
|
|
196
|
-
cmd=f"""uv run --with machineconfig -c "{Terminal.get_header(wdir=None, toolbox=True)}{cmd}\n""" + '"', desc=desc or f"run_py on {self.get_remote_repr()}", verbose=verbose, strict_err=strict_err, strict_returncode=strict_returncode
|
|
204
|
+
cmd=f"""uv run --with machineconfig -c "{Terminal.get_header(wdir=None, toolbox=True)}{cmd}\n""" + '"', desc=desc or f"run_py on {self.get_remote_repr()}", verbose=verbose, strict_err=strict_err, strict_returncode=strict_returncode
|
|
205
|
+
)
|
|
197
206
|
assert "obj=" in cmd, "The command sent to run_py must have `obj=` statement if return_obj is set to True"
|
|
198
207
|
source_file = self.run_py(f"""{cmd}\npath = Save.pickle(obj=obj, path=P.tmpfile(suffix='.pkl'))\nprint(path)""", desc=desc, verbose=verbose, strict_err=True, strict_returncode=True).op.split("\n")[-1]
|
|
199
208
|
res = self.copy_to_here(source=source_file, target=PathExtended.tmpfile(suffix=".pkl"))
|
|
200
209
|
import pickle
|
|
210
|
+
|
|
201
211
|
return pickle.loads(res.read_bytes())
|
|
202
212
|
|
|
203
213
|
def copy_from_here(self, source: PLike, target: OPLike = None, z: bool = False, r: bool = False, overwrite: bool = False, init: bool = True) -> Union[PathExtended, list[PathExtended]]:
|
|
@@ -10,7 +10,7 @@ from typing import Any
|
|
|
10
10
|
def generate_uv_add_commands(pyproject_path: Path, output_path: Path) -> None:
|
|
11
11
|
"""
|
|
12
12
|
Generate uv add commands for each dependency group in pyproject.toml.
|
|
13
|
-
|
|
13
|
+
|
|
14
14
|
Args:
|
|
15
15
|
pyproject_path: Path to the pyproject.toml file
|
|
16
16
|
output_path: Path where to write the uv add commands
|
|
@@ -18,9 +18,9 @@ def generate_uv_add_commands(pyproject_path: Path, output_path: Path) -> None:
|
|
|
18
18
|
# Read pyproject.toml
|
|
19
19
|
with open(pyproject_path, "rb") as f:
|
|
20
20
|
pyproject_data: dict[str, Any] = tomllib.load(f)
|
|
21
|
-
|
|
21
|
+
|
|
22
22
|
commands: list[str] = []
|
|
23
|
-
|
|
23
|
+
|
|
24
24
|
# Handle main dependencies (no group)
|
|
25
25
|
if "project" in pyproject_data and "dependencies" in pyproject_data["project"]:
|
|
26
26
|
main_deps = pyproject_data["project"]["dependencies"]
|
|
@@ -28,7 +28,7 @@ def generate_uv_add_commands(pyproject_path: Path, output_path: Path) -> None:
|
|
|
28
28
|
# Extract package names without version constraints
|
|
29
29
|
package_names = [extract_package_name(dep) for dep in main_deps]
|
|
30
30
|
commands.append(f"uv add {' '.join(package_names)}")
|
|
31
|
-
|
|
31
|
+
|
|
32
32
|
# Handle optional dependencies as groups
|
|
33
33
|
if "project" in pyproject_data and "optional-dependencies" in pyproject_data["project"]:
|
|
34
34
|
optional_deps = pyproject_data["project"]["optional-dependencies"]
|
|
@@ -36,7 +36,7 @@ def generate_uv_add_commands(pyproject_path: Path, output_path: Path) -> None:
|
|
|
36
36
|
if deps:
|
|
37
37
|
package_names = [extract_package_name(dep) for dep in deps]
|
|
38
38
|
commands.append(f"uv add {' '.join(package_names)} --group {group_name}")
|
|
39
|
-
|
|
39
|
+
|
|
40
40
|
# Handle dependency-groups (like dev)
|
|
41
41
|
if "dependency-groups" in pyproject_data:
|
|
42
42
|
dep_groups = pyproject_data["dependency-groups"]
|
|
@@ -47,19 +47,19 @@ def generate_uv_add_commands(pyproject_path: Path, output_path: Path) -> None:
|
|
|
47
47
|
commands.append(f"uv add {' '.join(package_names)} --dev")
|
|
48
48
|
else:
|
|
49
49
|
commands.append(f"uv add {' '.join(package_names)} --group {group_name}")
|
|
50
|
-
|
|
50
|
+
|
|
51
51
|
# Write commands to output file
|
|
52
52
|
with open(output_path, "w") as f:
|
|
53
53
|
for command in commands:
|
|
54
54
|
f.write(command + "\n")
|
|
55
|
-
|
|
55
|
+
|
|
56
56
|
print(f"Generated {len(commands)} uv add commands in {output_path}")
|
|
57
57
|
|
|
58
58
|
|
|
59
59
|
def extract_package_name(dependency_spec: str) -> str:
|
|
60
60
|
"""
|
|
61
61
|
Extract package name from dependency specification.
|
|
62
|
-
|
|
62
|
+
|
|
63
63
|
Examples:
|
|
64
64
|
"rich>=14.0.0" -> "rich"
|
|
65
65
|
"requests>=2.32.5" -> "requests"
|
|
@@ -69,12 +69,12 @@ def extract_package_name(dependency_spec: str) -> str:
|
|
|
69
69
|
# Handle extras like "package[extra]>=1.0" first
|
|
70
70
|
if "[" in dependency_spec:
|
|
71
71
|
dependency_spec = dependency_spec.split("[")[0].strip()
|
|
72
|
-
|
|
72
|
+
|
|
73
73
|
# Split on common version operators and take the first part
|
|
74
74
|
for operator in [">=", "<=", "==", "!=", ">", "<", "~=", "===", "@"]:
|
|
75
75
|
if operator in dependency_spec:
|
|
76
76
|
return dependency_spec.split(operator)[0].strip()
|
|
77
|
-
|
|
77
|
+
|
|
78
78
|
# Return as-is if no version constraint found
|
|
79
79
|
return dependency_spec.strip()
|
|
80
80
|
|
|
@@ -84,8 +84,8 @@ if __name__ == "__main__":
|
|
|
84
84
|
current_dir = Path.cwd()
|
|
85
85
|
pyproject_file = current_dir / "pyproject.toml"
|
|
86
86
|
output_file = current_dir / "uv_add_commands.txt"
|
|
87
|
-
|
|
87
|
+
|
|
88
88
|
if pyproject_file.exists():
|
|
89
89
|
generate_uv_add_commands(pyproject_file, output_file)
|
|
90
90
|
else:
|
|
91
|
-
print(f"pyproject.toml not found at {pyproject_file}")
|
|
91
|
+
print(f"pyproject.toml not found at {pyproject_file}")
|