machineconfig 7.53__py3-none-any.whl → 7.69__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of machineconfig might be problematic. Click here for more details.
- machineconfig/cluster/sessions_managers/utils/maker.py +21 -9
- machineconfig/jobs/installer/custom/boxes.py +2 -2
- machineconfig/jobs/installer/custom/hx.py +15 -12
- machineconfig/jobs/installer/custom_dev/cloudflare_warp_cli.py +23 -0
- machineconfig/jobs/installer/custom_dev/dubdb_adbc.py +1 -1
- machineconfig/jobs/installer/custom_dev/nerfont_windows_helper.py +1 -1
- machineconfig/jobs/installer/custom_dev/sysabc.py +39 -34
- machineconfig/jobs/installer/custom_dev/wezterm.py +0 -4
- machineconfig/jobs/installer/installer_data.json +103 -35
- machineconfig/jobs/installer/package_groups.py +28 -13
- machineconfig/scripts/__init__.py +0 -4
- machineconfig/scripts/linux/wrap_mcfg +1 -1
- machineconfig/scripts/python/ai/solutions/copilot/instructions/python/dev.instructions.md +3 -0
- machineconfig/scripts/python/croshell.py +22 -17
- machineconfig/scripts/python/devops.py +3 -4
- machineconfig/scripts/python/devops_navigator.py +0 -4
- machineconfig/scripts/python/env_manager/path_manager_tui.py +1 -1
- machineconfig/scripts/python/fire_jobs.py +17 -15
- machineconfig/scripts/python/ftpx.py +13 -11
- machineconfig/scripts/python/helpers/ast_search.py +74 -0
- machineconfig/scripts/python/helpers/repo_rag.py +325 -0
- machineconfig/scripts/python/helpers/symantic_search.py +25 -0
- machineconfig/scripts/python/helpers_cloud/cloud_copy.py +28 -21
- machineconfig/scripts/python/helpers_cloud/cloud_helpers.py +1 -1
- machineconfig/scripts/python/helpers_cloud/cloud_sync.py +8 -7
- machineconfig/scripts/python/helpers_croshell/crosh.py +2 -2
- machineconfig/scripts/python/helpers_devops/cli_config_dotfile.py +22 -13
- machineconfig/scripts/python/helpers_devops/cli_self.py +7 -6
- machineconfig/scripts/python/helpers_devops/cli_share_file.py +2 -2
- machineconfig/scripts/python/helpers_devops/cli_share_server.py +1 -1
- machineconfig/scripts/python/helpers_devops/cli_terminal.py +1 -1
- machineconfig/scripts/python/helpers_devops/cli_utils.py +2 -73
- machineconfig/scripts/python/helpers_devops/devops_backup_retrieve.py +4 -4
- machineconfig/scripts/python/helpers_fire_command/file_wrangler.py +2 -3
- machineconfig/scripts/python/helpers_fire_command/fire_jobs_route_helper.py +3 -4
- machineconfig/scripts/python/helpers_navigator/command_tree.py +50 -18
- machineconfig/scripts/python/helpers_repos/cloud_repo_sync.py +13 -5
- machineconfig/scripts/python/helpers_repos/count_lines_frontend.py +1 -1
- machineconfig/scripts/python/helpers_repos/entrypoint.py +2 -1
- machineconfig/scripts/python/helpers_repos/record.py +2 -1
- machineconfig/scripts/python/helpers_sessions/sessions_multiprocess.py +5 -5
- machineconfig/scripts/python/helpers_utils/download.py +152 -0
- machineconfig/scripts/python/helpers_utils/path.py +4 -2
- machineconfig/scripts/python/interactive.py +11 -14
- machineconfig/scripts/python/{machineconfig.py → mcfg_entry.py} +4 -0
- machineconfig/scripts/python/msearch.py +21 -2
- machineconfig/scripts/python/nw/devops_add_ssh_key.py +21 -5
- machineconfig/scripts/python/nw/ssh_debug_linux.py +7 -7
- machineconfig/scripts/python/nw/ssh_debug_windows.py +4 -4
- machineconfig/scripts/python/nw/wsl_windows_transfer.py +3 -2
- machineconfig/scripts/python/sessions.py +35 -20
- machineconfig/scripts/python/terminal.py +2 -2
- machineconfig/scripts/python/utils.py +12 -10
- machineconfig/scripts/windows/mounts/mount_ssh.ps1 +1 -1
- machineconfig/settings/lf/windows/lfcd.ps1 +1 -1
- machineconfig/settings/shells/pwsh/init.ps1 +1 -0
- machineconfig/settings/shells/wezterm/wezterm.lua +2 -0
- machineconfig/settings/shells/zsh/init.sh +0 -7
- machineconfig/settings/yazi/shell/yazi_cd.ps1 +29 -5
- machineconfig/setup_linux/web_shortcuts/interactive.sh +12 -11
- machineconfig/setup_windows/uv.ps1 +8 -1
- machineconfig/setup_windows/web_shortcuts/interactive.ps1 +12 -11
- machineconfig/setup_windows/web_shortcuts/quick_init.ps1 +4 -2
- machineconfig/utils/accessories.py +7 -4
- machineconfig/utils/code.py +6 -4
- machineconfig/utils/files/headers.py +2 -2
- machineconfig/utils/installer_utils/install_from_url.py +180 -0
- machineconfig/utils/installer_utils/installer_class.py +56 -46
- machineconfig/utils/installer_utils/{installer.py → installer_cli.py} +71 -65
- machineconfig/utils/{installer.py → installer_utils/installer_runner.py} +1 -25
- machineconfig/utils/meta.py +28 -15
- machineconfig/utils/options.py +4 -4
- machineconfig/utils/path_extended.py +40 -19
- machineconfig/utils/path_helper.py +33 -31
- machineconfig/utils/schemas/layouts/layout_types.py +1 -1
- machineconfig/utils/ssh.py +330 -99
- machineconfig/utils/ve.py +11 -4
- machineconfig-7.69.dist-info/METADATA +124 -0
- {machineconfig-7.53.dist-info → machineconfig-7.69.dist-info}/RECORD +85 -83
- {machineconfig-7.53.dist-info → machineconfig-7.69.dist-info}/entry_points.txt +2 -2
- machineconfig/jobs/installer/linux_scripts/pgsql.sh +0 -41
- machineconfig/scripts/python/explore.py +0 -49
- machineconfig/scripts/python/nw/add_ssh_key.py +0 -148
- machineconfig/settings/lf/linux/exe/fzf_nano.sh +0 -16
- machineconfig-7.53.dist-info/METADATA +0 -94
- /machineconfig/jobs/installer/linux_scripts/{warp-cli.sh → cloudflare_warp_cli.sh} +0 -0
- /machineconfig/scripts/{Restore-ThunderbirdProfile.ps1 → windows/mounts/Restore-ThunderbirdProfile.ps1} +0 -0
- /machineconfig/utils/installer_utils/{installer_abc.py → installer_locator_utils.py} +0 -0
- {machineconfig-7.53.dist-info → machineconfig-7.69.dist-info}/WHEEL +0 -0
- {machineconfig-7.53.dist-info → machineconfig-7.69.dist-info}/top_level.txt +0 -0
|
@@ -2,78 +2,6 @@
|
|
|
2
2
|
|
|
3
3
|
import typer
|
|
4
4
|
from typing import Annotated, Optional
|
|
5
|
-
from pathlib import Path
|
|
6
|
-
|
|
7
|
-
def download(
|
|
8
|
-
url: Annotated[Optional[str], typer.Argument(..., help="The URL to download the file from.")] = None,
|
|
9
|
-
decompress: Annotated[bool, typer.Option("--decompress", "-d", help="Decompress the file if it's an archive.")] = False,
|
|
10
|
-
output: Annotated[Optional[str], typer.Option("--output", "-o", help="The output file path.")] = None,
|
|
11
|
-
) -> None:
|
|
12
|
-
if url is None:
|
|
13
|
-
typer.echo("❌ Error: URL is required.", err=True)
|
|
14
|
-
raise typer.Exit(code=1)
|
|
15
|
-
typer.echo(f"📥 Downloading from: {url}")
|
|
16
|
-
download_path = Path(output) if output else Path(url.split("/")[-1])
|
|
17
|
-
import requests
|
|
18
|
-
import subprocess
|
|
19
|
-
try:
|
|
20
|
-
response = requests.get(url, allow_redirects=True, stream=True, timeout=60)
|
|
21
|
-
response.raise_for_status()
|
|
22
|
-
|
|
23
|
-
total_size = int(response.headers.get('content-length', 0))
|
|
24
|
-
|
|
25
|
-
with open(download_path, 'wb') as f:
|
|
26
|
-
if total_size == 0:
|
|
27
|
-
f.write(response.content)
|
|
28
|
-
else:
|
|
29
|
-
downloaded = 0
|
|
30
|
-
chunk_size = 8192
|
|
31
|
-
for chunk in response.iter_content(chunk_size=chunk_size):
|
|
32
|
-
if chunk:
|
|
33
|
-
f.write(chunk)
|
|
34
|
-
downloaded += len(chunk)
|
|
35
|
-
progress = (downloaded / total_size) * 100
|
|
36
|
-
typer.echo(f"\r⏬ Progress: {progress:.1f}% ({downloaded}/{total_size} bytes)", nl=False)
|
|
37
|
-
typer.echo()
|
|
38
|
-
|
|
39
|
-
typer.echo(f"✅ Downloaded to: {download_path}")
|
|
40
|
-
except requests.exceptions.RequestException as e:
|
|
41
|
-
typer.echo(f"❌ Download failed: {e}", err=True)
|
|
42
|
-
raise typer.Exit(code=1)
|
|
43
|
-
except OSError as e:
|
|
44
|
-
typer.echo(f"❌ File write error: {e}", err=True)
|
|
45
|
-
raise typer.Exit(code=1)
|
|
46
|
-
|
|
47
|
-
if decompress:
|
|
48
|
-
typer.echo(f"📦 Decompressing: {download_path}")
|
|
49
|
-
|
|
50
|
-
base_name = download_path.name
|
|
51
|
-
parts = base_name.split('.')
|
|
52
|
-
base_name = parts[0] if parts else download_path.stem
|
|
53
|
-
|
|
54
|
-
extract_dir = download_path.parent / base_name
|
|
55
|
-
extract_dir.mkdir(parents=True, exist_ok=True)
|
|
56
|
-
|
|
57
|
-
try:
|
|
58
|
-
subprocess.run(
|
|
59
|
-
["ouch", "decompress", str(download_path), "--dir", str(extract_dir)],
|
|
60
|
-
check=True,
|
|
61
|
-
capture_output=True,
|
|
62
|
-
text=True
|
|
63
|
-
)
|
|
64
|
-
typer.echo(f"✅ Decompressed to: {extract_dir}")
|
|
65
|
-
|
|
66
|
-
if download_path.exists():
|
|
67
|
-
download_path.unlink()
|
|
68
|
-
typer.echo(f"🗑️ Removed archive: {download_path}")
|
|
69
|
-
|
|
70
|
-
except subprocess.CalledProcessError as e:
|
|
71
|
-
typer.echo(f"❌ Decompression failed: {e.stderr}", err=True)
|
|
72
|
-
raise typer.Exit(code=1)
|
|
73
|
-
except FileNotFoundError:
|
|
74
|
-
typer.echo("❌ Error: ouch command not found. Please install ouch.", err=True)
|
|
75
|
-
typer.echo("💡 Install with: cargo install ouch", err=True)
|
|
76
|
-
raise typer.Exit(code=1)
|
|
77
5
|
|
|
78
6
|
|
|
79
7
|
def merge_pdfs(
|
|
@@ -108,7 +36,8 @@ def merge_pdfs(
|
|
|
108
36
|
writer.write(output_path)
|
|
109
37
|
print(f"✅ Merged PDF saved to: {output_path}")
|
|
110
38
|
from machineconfig.utils.meta import lambda_to_python_script
|
|
111
|
-
code = lambda_to_python_script(lambda : merge_pdfs_internal(pdfs=pdfs, output=output, compress=compress),
|
|
39
|
+
code = lambda_to_python_script(lambda : merge_pdfs_internal(pdfs=pdfs, output=output, compress=compress),
|
|
40
|
+
in_global=True, import_module=False)
|
|
112
41
|
from machineconfig.utils.code import run_shell_script, get_uv_command_executing_python_script
|
|
113
42
|
uv_command, _py_file = get_uv_command_executing_python_script(python_script=code, uv_with=["pypdf"], uv_project_dir=None)
|
|
114
43
|
run_shell_script(uv_command)
|
|
@@ -2,7 +2,6 @@
|
|
|
2
2
|
|
|
3
3
|
# import subprocess
|
|
4
4
|
from machineconfig.utils.io import read_ini
|
|
5
|
-
from machineconfig.utils.path_extended import PathExtended
|
|
6
5
|
from machineconfig.utils.source_of_truth import LIBRARY_ROOT, DEFAULTS_PATH
|
|
7
6
|
from machineconfig.utils.code import print_code
|
|
8
7
|
from machineconfig.utils.options import choose_cloud_interactively, choose_from_options
|
|
@@ -11,6 +10,7 @@ from platform import system
|
|
|
11
10
|
from typing import Any, Literal, Optional
|
|
12
11
|
from rich.console import Console
|
|
13
12
|
from rich.panel import Panel
|
|
13
|
+
from pathlib import Path
|
|
14
14
|
import tomllib
|
|
15
15
|
|
|
16
16
|
|
|
@@ -56,13 +56,13 @@ def main_backup_retrieve(direction: OPTIONS, which: Optional[str], cloud: Option
|
|
|
56
56
|
flags += "e" if item["encrypt"] == "True" else ""
|
|
57
57
|
flags += "r" if item["rel2home"] == "True" else ""
|
|
58
58
|
flags += "o" if system().lower() in item_name else ""
|
|
59
|
-
console.print(Panel(f"📦 PROCESSING: {item_name}\n📂 Path: {
|
|
59
|
+
console.print(Panel(f"📦 PROCESSING: {item_name}\n📂 Path: {Path(item['path']).as_posix()}\n🏳️ Flags: {flags or 'None'}", title=f"[bold blue]Processing Item: {item_name}[/bold blue]", border_style="blue"))
|
|
60
60
|
if flags:
|
|
61
61
|
flags = "-" + flags
|
|
62
62
|
if direction == "BACKUP":
|
|
63
|
-
program += f"""\ncloud_copy "{
|
|
63
|
+
program += f"""\ncloud_copy "{Path(item["path"]).as_posix()}" $cloud {flags}\n"""
|
|
64
64
|
elif direction == "RETRIEVE":
|
|
65
|
-
program += f"""\ncloud_copy $cloud "{
|
|
65
|
+
program += f"""\ncloud_copy $cloud "{Path(item["path"]).as_posix()}" {flags}\n"""
|
|
66
66
|
else:
|
|
67
67
|
console.print(Panel('❌ ERROR: INVALID DIRECTION\n⚠️ Direction must be either "BACKUP" or "RETRIEVE"', title="[bold red]Error: Invalid Direction[/bold red]", border_style="red"))
|
|
68
68
|
raise RuntimeError(f"Unknown direction: {direction}")
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from typing import Optional
|
|
2
2
|
import os
|
|
3
|
-
from
|
|
3
|
+
from pathlib import Path
|
|
4
4
|
import platform
|
|
5
5
|
|
|
6
6
|
|
|
@@ -12,7 +12,7 @@ def parse_pyfile(file_path: str):
|
|
|
12
12
|
func_args: list[list[args_spec]] = [[]] # this firt prepopulated dict is for the option 'RUN AS MAIN' which has no args
|
|
13
13
|
import ast
|
|
14
14
|
|
|
15
|
-
parsed_ast = ast.parse(
|
|
15
|
+
parsed_ast = ast.parse(Path(file_path).read_text(encoding="utf-8"))
|
|
16
16
|
functions = [node for node in ast.walk(parsed_ast) if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef))]
|
|
17
17
|
module__doc__ = ast.get_docstring(parsed_ast)
|
|
18
18
|
main_option = f"RUN AS MAIN -- {module__doc__ if module__doc__ is not None else 'NoDocs'}"
|
|
@@ -102,7 +102,6 @@ def wrap_import_in_try_except(import_line: str, pyfile: str, repo_root: Optional
|
|
|
102
102
|
print(fr"❌ Failed to import `{pyfile}` as a module: {ex} ")
|
|
103
103
|
print("⚠️ Attempting import with ad-hoc `$PATH` manipulation. DO NOT pickle any objects in this session as correct deserialization cannot be guaranteed.")
|
|
104
104
|
import sys
|
|
105
|
-
from pathlib import Path
|
|
106
105
|
sys.path.append(str(Path(pyfile).parent))
|
|
107
106
|
if repo_root is not None:
|
|
108
107
|
sys.path.append(repo_root)
|
|
@@ -5,11 +5,10 @@ from typing import Optional
|
|
|
5
5
|
import tomllib
|
|
6
6
|
from pathlib import Path
|
|
7
7
|
from machineconfig.utils.accessories import randstr
|
|
8
|
-
from machineconfig.utils.path_extended import PathExtended
|
|
9
8
|
from machineconfig.utils.options import choose_from_options
|
|
10
9
|
|
|
11
10
|
|
|
12
|
-
def choose_function_or_lines(choice_file:
|
|
11
|
+
def choose_function_or_lines(choice_file: Path, kwargs_dict: dict[str, object]) -> tuple[Optional[str], Path, dict[str, object]]:
|
|
13
12
|
"""
|
|
14
13
|
Choose a function to run from a Python file or lines from a shell script.
|
|
15
14
|
|
|
@@ -46,7 +45,7 @@ def choose_function_or_lines(choice_file: PathExtended, kwargs_dict: dict[str, o
|
|
|
46
45
|
continue
|
|
47
46
|
options.append(line)
|
|
48
47
|
chosen_lines = choose_from_options(msg="Choose a line to run", options=options, fzf=True, multi=True)
|
|
49
|
-
choice_file =
|
|
48
|
+
choice_file = Path.home().joinpath(f"tmp_results/tmp_scripts/shell/{randstr(10)}.sh")
|
|
50
49
|
choice_file.parent.mkdir(parents=True, exist_ok=True)
|
|
51
50
|
choice_file.write_text("\n".join(chosen_lines), encoding="utf-8")
|
|
52
51
|
choice_function = None
|
|
@@ -82,7 +81,7 @@ def get_command_streamlit(choice_file: Path, environment: str, repo_root: Option
|
|
|
82
81
|
port = config["server"]["port"]
|
|
83
82
|
secrets_path = toml_path.with_name("secrets.toml")
|
|
84
83
|
if repo_root is not None:
|
|
85
|
-
secrets_template_path = Path.home().joinpath(f"dotfiles/creds/streamlit/{
|
|
84
|
+
secrets_template_path = Path.home().joinpath(f"dotfiles/creds/streamlit/{Path(repo_root).name}/{choice_file.name}/secrets.toml")
|
|
86
85
|
if environment != "" and not secrets_path.exists() and secrets_template_path.exists():
|
|
87
86
|
secrets_template = tomllib.loads(secrets_template_path.read_text(encoding="utf-8"))
|
|
88
87
|
if environment == "ip":
|
|
@@ -188,15 +188,7 @@ class CommandTree(Tree[CommandInfo]):
|
|
|
188
188
|
description="Configure your shell profile",
|
|
189
189
|
command="devops config shell",
|
|
190
190
|
parent="config",
|
|
191
|
-
help_text="devops config shell <
|
|
192
|
-
))
|
|
193
|
-
|
|
194
|
-
config_node.add("🔗 path - Navigate PATH variable", data=CommandInfo(
|
|
195
|
-
name="path",
|
|
196
|
-
description="Navigate PATH variable with TUI",
|
|
197
|
-
command="devops config path",
|
|
198
|
-
parent="config",
|
|
199
|
-
help_text="devops config path"
|
|
191
|
+
help_text="devops config shell --which <default|nushell>"
|
|
200
192
|
))
|
|
201
193
|
|
|
202
194
|
config_node.add("🔗 starship-theme - Select starship theme", data=CommandInfo(
|
|
@@ -560,7 +552,23 @@ class CommandTree(Tree[CommandInfo]):
|
|
|
560
552
|
description="Choose a process to kill interactively",
|
|
561
553
|
command="utils kill-process",
|
|
562
554
|
parent="utils",
|
|
563
|
-
help_text="utils kill-process"
|
|
555
|
+
help_text="utils kill-process --interactive"
|
|
556
|
+
))
|
|
557
|
+
|
|
558
|
+
utils_node.add("📚 path - Navigate PATH variable", data=CommandInfo(
|
|
559
|
+
name="path",
|
|
560
|
+
description="Navigate PATH variable with TUI",
|
|
561
|
+
command="utils path",
|
|
562
|
+
parent="utils",
|
|
563
|
+
help_text="utils path"
|
|
564
|
+
))
|
|
565
|
+
|
|
566
|
+
utils_node.add("⬆️ upgrade-packages - Upgrade dependencies", data=CommandInfo(
|
|
567
|
+
name="upgrade-packages",
|
|
568
|
+
description="Upgrade project dependencies",
|
|
569
|
+
command="utils upgrade-packages",
|
|
570
|
+
parent="utils",
|
|
571
|
+
help_text="utils upgrade-packages"
|
|
564
572
|
))
|
|
565
573
|
|
|
566
574
|
utils_node.add("⬇️ download - Download file", data=CommandInfo(
|
|
@@ -571,18 +579,42 @@ class CommandTree(Tree[CommandInfo]):
|
|
|
571
579
|
help_text="utils download <url> --destination <path> --decompress"
|
|
572
580
|
))
|
|
573
581
|
|
|
574
|
-
utils_node.add("📄 merge-pdfs - Merge PDF files", data=CommandInfo(
|
|
575
|
-
name="merge-pdfs",
|
|
576
|
-
description="Merge two PDF files into one",
|
|
577
|
-
command="utils merge-pdfs",
|
|
578
|
-
parent="utils",
|
|
579
|
-
help_text="utils merge-pdfs <file1> <file2> --output <file>"
|
|
580
|
-
))
|
|
581
|
-
|
|
582
582
|
utils_node.add("🖥️ get-machine-specs - Get machine specifications", data=CommandInfo(
|
|
583
583
|
name="get-machine-specs",
|
|
584
584
|
description="Get machine specifications",
|
|
585
585
|
command="utils get-machine-specs",
|
|
586
586
|
parent="utils",
|
|
587
587
|
help_text="utils get-machine-specs"
|
|
588
|
+
))
|
|
589
|
+
|
|
590
|
+
utils_node.add("🚀 init-project - Initialize project", data=CommandInfo(
|
|
591
|
+
name="init-project",
|
|
592
|
+
description="Initialize a project with a uv virtual environment and install dev packages",
|
|
593
|
+
command="utils init-project",
|
|
594
|
+
parent="utils",
|
|
595
|
+
help_text="utils init-project"
|
|
596
|
+
))
|
|
597
|
+
|
|
598
|
+
utils_node.add("✏️ edit - Open file in editor", data=CommandInfo(
|
|
599
|
+
name="edit",
|
|
600
|
+
description="Open a file in the default editor",
|
|
601
|
+
command="utils edit",
|
|
602
|
+
parent="utils",
|
|
603
|
+
help_text="utils edit <file>"
|
|
604
|
+
))
|
|
605
|
+
|
|
606
|
+
utils_node.add("📄 pdf-merge - Merge PDF files", data=CommandInfo(
|
|
607
|
+
name="pdf-merge",
|
|
608
|
+
description="Merge two PDF files into one",
|
|
609
|
+
command="utils pdf-merge",
|
|
610
|
+
parent="utils",
|
|
611
|
+
help_text="utils pdf-merge <file1> <file2> --output <file>"
|
|
612
|
+
))
|
|
613
|
+
|
|
614
|
+
utils_node.add("� pdf-compress - Compress PDF file", data=CommandInfo(
|
|
615
|
+
name="pdf-compress",
|
|
616
|
+
description="Compress a PDF file",
|
|
617
|
+
command="utils pdf-compress",
|
|
618
|
+
parent="utils",
|
|
619
|
+
help_text="utils pdf-compress <file> --output <file>"
|
|
588
620
|
))
|
|
@@ -54,10 +54,16 @@ def main(
|
|
|
54
54
|
else:
|
|
55
55
|
cloud_resolved = cloud
|
|
56
56
|
repo_local_root = PathExtended.cwd() if repo is None else PathExtended(repo).expanduser().absolute()
|
|
57
|
-
|
|
57
|
+
try:
|
|
58
|
+
repo_local_obj = git.Repo(repo_local_root, search_parent_directories=True)
|
|
59
|
+
except git.InvalidGitRepositoryError:
|
|
60
|
+
typer.echo(f"[red]Error:[/] The specified path '{repo_local_root}' is not a valid git repository.")
|
|
61
|
+
typer.Exit(code=1)
|
|
62
|
+
return ""
|
|
58
63
|
repo_local_root = PathExtended(repo_local_obj.working_dir) # cwd might have been in a sub directory of repo_root, so its better to redefine it.
|
|
64
|
+
local_relative_home = PathExtended(repo_local_root.expanduser().absolute().relative_to(Path.home()))
|
|
59
65
|
PathExtended(CONFIG_ROOT).joinpath("remote").mkdir(parents=True, exist_ok=True)
|
|
60
|
-
repo_remote_root = PathExtended(CONFIG_ROOT).joinpath("remote",
|
|
66
|
+
repo_remote_root = PathExtended(CONFIG_ROOT).joinpath("remote", local_relative_home)
|
|
61
67
|
repo_remote_root.delete(sure=True)
|
|
62
68
|
try:
|
|
63
69
|
console.print(Panel("📥 DOWNLOADING REMOTE REPOSITORY", title_align="left", border_style="blue"))
|
|
@@ -99,7 +105,7 @@ git pull originEnc master
|
|
|
99
105
|
uv_project_dir = f"""{str(Path.home().joinpath("code/machineconfig"))}"""
|
|
100
106
|
uv_with = None
|
|
101
107
|
else:
|
|
102
|
-
uv_with = ["machineconfig>=7.
|
|
108
|
+
uv_with = ["machineconfig>=7.69"]
|
|
103
109
|
uv_project_dir = None
|
|
104
110
|
|
|
105
111
|
import tempfile
|
|
@@ -130,7 +136,8 @@ git pull originEnc master
|
|
|
130
136
|
def func2(remote_repo: str, local_repo: str, cloud: str):
|
|
131
137
|
from machineconfig.scripts.python.helpers_repos.sync import delete_remote_repo_copy_and_push_local
|
|
132
138
|
delete_remote_repo_copy_and_push_local(remote_repo=remote_repo, local_repo=local_repo, cloud=cloud)
|
|
133
|
-
program_1_py = lambda_to_python_script(lambda: func2(remote_repo=str(repo_remote_root), local_repo=str(repo_local_root), cloud=str(cloud_resolved)),
|
|
139
|
+
program_1_py = lambda_to_python_script(lambda: func2(remote_repo=str(repo_remote_root), local_repo=str(repo_local_root), cloud=str(cloud_resolved)),
|
|
140
|
+
in_global=True, import_module=False)
|
|
134
141
|
program1, _pyfile1 = get_uv_command_executing_python_script(python_script=program_1_py, uv_with=uv_with, uv_project_dir=uv_project_dir)
|
|
135
142
|
# ================================================================================
|
|
136
143
|
option2 = "Delete local repo and replace it with remote copy:"
|
|
@@ -155,7 +162,8 @@ sudo chmod +x $HOME/dotfiles/scripts/linux -R
|
|
|
155
162
|
inspect_repos(repo_local_root=repo_local_root, repo_remote_root=repo_remote_root)
|
|
156
163
|
# program_3_py = function_to_script(func=func, call_with_kwargs={"repo_local_root": str(repo_local_root), "repo_remote_root": str(repo_remote_root)})
|
|
157
164
|
# shell_file_3 = get_shell_file_executing_python_script(python_script=program_3_py, ve_path=None, executable=executable)
|
|
158
|
-
program_3_py = lambda_to_python_script(lambda: func(repo_local_root=str(repo_local_root), repo_remote_root=str(repo_remote_root)),
|
|
165
|
+
program_3_py = lambda_to_python_script(lambda: func(repo_local_root=str(repo_local_root), repo_remote_root=str(repo_remote_root)),
|
|
166
|
+
in_global=True, import_module=False)
|
|
159
167
|
program3, _pyfile3 = get_uv_command_executing_python_script(python_script=program_3_py, uv_with=uv_with, uv_project_dir=uv_project_dir)
|
|
160
168
|
# ================================================================================
|
|
161
169
|
|
|
@@ -8,7 +8,7 @@ def analyze_repo_development(repo_path: Annotated[str, typer.Argument(..., help=
|
|
|
8
8
|
from pathlib import Path
|
|
9
9
|
count_lines_path = Path(count_lines.__file__)
|
|
10
10
|
# --project $HOME/code/ machineconfig --group plot
|
|
11
|
-
cmd = f"""uv run --python 3.14 --with "machineconfig[plot]>=7.
|
|
11
|
+
cmd = f"""uv run --python 3.14 --with "machineconfig[plot]>=7.69" {count_lines_path} analyze-over-time {repo_path}"""
|
|
12
12
|
from machineconfig.utils.code import run_shell_script
|
|
13
13
|
run_shell_script(cmd)
|
|
14
14
|
|
|
@@ -38,7 +38,8 @@ def resolve_spec_path(directory: Optional[str], cloud: Optional[str]) -> Path:
|
|
|
38
38
|
repos_root = resolve_directory(directory)
|
|
39
39
|
from machineconfig.utils.path_extended import PathExtended
|
|
40
40
|
if not repos_root.exists() or repos_root.name != "repos.json":
|
|
41
|
-
|
|
41
|
+
relative_repos_root = PathExtended(repos_root).expanduser().absolute().relative_to(Path.home())
|
|
42
|
+
candidate = Path(CONFIG_ROOT).joinpath("repos").joinpath(relative_repos_root).joinpath("repos.json")
|
|
42
43
|
repos_root = candidate
|
|
43
44
|
if not repos_root.exists():
|
|
44
45
|
cloud_name: Optional[str]
|
|
@@ -242,7 +242,8 @@ def main_record(repos_root: Path):
|
|
|
242
242
|
tree_structure = build_tree_structure(repo_records, repos_root)
|
|
243
243
|
print(tree_structure)
|
|
244
244
|
|
|
245
|
-
|
|
245
|
+
relative_repos_root = PathExtended(repos_root).expanduser().absolute().relative_to(Path.home())
|
|
246
|
+
save_path = CONFIG_ROOT.joinpath("repos").joinpath(relative_repos_root).joinpath("repos.json")
|
|
246
247
|
save_json(obj=res, path=save_path, indent=4)
|
|
247
248
|
pprint(f"📁 Result saved at {PathExtended(save_path)}")
|
|
248
249
|
print(">>>>>>>>> Finished Recording")
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
|
|
2
2
|
|
|
3
3
|
from typing import Optional, Annotated
|
|
4
|
-
from pathlib import Path
|
|
5
4
|
import typer
|
|
6
5
|
|
|
7
6
|
|
|
@@ -13,20 +12,21 @@ def create_from_function(
|
|
|
13
12
|
from machineconfig.utils.ve import get_ve_activate_line, get_ve_path_and_ipython_profile
|
|
14
13
|
from machineconfig.utils.options import choose_from_options
|
|
15
14
|
from machineconfig.utils.path_helper import match_file_name, sanitize_path
|
|
16
|
-
from machineconfig.utils.path_extended import PathExtended
|
|
17
15
|
from machineconfig.utils.accessories import get_repo_root
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
|
|
18
18
|
|
|
19
19
|
path_obj = sanitize_path(path)
|
|
20
20
|
if not path_obj.exists():
|
|
21
21
|
suffixes = {".py"}
|
|
22
|
-
choice_file = match_file_name(sub_string=path, search_root=
|
|
22
|
+
choice_file = match_file_name(sub_string=path, search_root=Path.cwd(), suffixes=suffixes)
|
|
23
23
|
elif path_obj.is_dir():
|
|
24
24
|
from machineconfig.utils.path_helper import search_for_files_of_interest
|
|
25
25
|
print(f"🔍 Searching recursively for Python, PowerShell and Shell scripts in directory `{path_obj}`")
|
|
26
26
|
files = search_for_files_of_interest(path_obj, suffixes={".py", ".sh", ".ps1"})
|
|
27
27
|
print(f"🔍 Got #{len(files)} results.")
|
|
28
28
|
choice_file = choose_from_options(multi=False, options=files, fzf=True, msg="Choose one option")
|
|
29
|
-
choice_file =
|
|
29
|
+
choice_file = Path(choice_file)
|
|
30
30
|
else:
|
|
31
31
|
choice_file = path_obj
|
|
32
32
|
|
|
@@ -52,7 +52,7 @@ def create_from_function(
|
|
|
52
52
|
from machineconfig.utils.schemas.layouts.layout_types import LayoutConfig
|
|
53
53
|
layout: LayoutConfig = {"layoutName": "fireNprocess", "layoutTabs": []}
|
|
54
54
|
for an_arg in range(num_process):
|
|
55
|
-
layout["layoutTabs"].append({"tabName": f"tab{an_arg}", "startDir": str(
|
|
55
|
+
layout["layoutTabs"].append({"tabName": f"tab{an_arg}", "startDir": str(Path.cwd()), "command": f"uv run -m fire {choice_file} {choice_function} --idx={an_arg} --idx_max={num_process}"})
|
|
56
56
|
print(layout)
|
|
57
57
|
run_zellij_layout(layout_config=layout)
|
|
58
58
|
|
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
|
|
2
|
+
|
|
3
|
+
from typing import Annotated, Optional
|
|
4
|
+
import typer
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def download(
|
|
9
|
+
url: Annotated[Optional[str], typer.Argument(..., help="The URL to download the file from.")] = None,
|
|
10
|
+
decompress: Annotated[bool, typer.Option(..., "--decompress", "-d", help="Decompress the file if it's an archive.")] = False,
|
|
11
|
+
output: Annotated[Optional[str], typer.Option("--output", "-o", help="The output file path.")] = None,
|
|
12
|
+
output_dir: Annotated[Optional[str], typer.Option("--output-dir", help="Directory to place the downloaded file in.")] = None,
|
|
13
|
+
) -> Optional["Path"]:
|
|
14
|
+
|
|
15
|
+
import subprocess
|
|
16
|
+
from urllib.parse import parse_qs, unquote, urlparse
|
|
17
|
+
from requests import Response
|
|
18
|
+
import requests
|
|
19
|
+
from pathlib import Path
|
|
20
|
+
if url is None:
|
|
21
|
+
typer.echo("❌ Error: URL is required.", err=True)
|
|
22
|
+
return None
|
|
23
|
+
if output is not None and output_dir is not None:
|
|
24
|
+
typer.echo("❌ Error: --output and --output-dir cannot be used together.", err=True)
|
|
25
|
+
return None
|
|
26
|
+
typer.echo(f"📥 Downloading from: {url}")
|
|
27
|
+
|
|
28
|
+
def _sanitize_candidate_filename(name: str) -> Optional[str]:
|
|
29
|
+
candidate = Path(name).name.strip()
|
|
30
|
+
if not candidate or candidate in {".", ".."}:
|
|
31
|
+
return None
|
|
32
|
+
return candidate
|
|
33
|
+
|
|
34
|
+
def _filename_from_content_disposition(header_value: Optional[str]) -> Optional[str]:
|
|
35
|
+
if header_value is None:
|
|
36
|
+
return None
|
|
37
|
+
parts = [segment.strip() for segment in header_value.split(";")]
|
|
38
|
+
for part in parts:
|
|
39
|
+
lower = part.lower()
|
|
40
|
+
if lower.startswith("filename*="):
|
|
41
|
+
value = part.split("=", 1)[1]
|
|
42
|
+
value = value.strip().strip('"')
|
|
43
|
+
if "''" in value:
|
|
44
|
+
value = value.split("''", 1)[1]
|
|
45
|
+
decoded = unquote(value)
|
|
46
|
+
sanitized = _sanitize_candidate_filename(decoded)
|
|
47
|
+
if sanitized is not None:
|
|
48
|
+
return sanitized
|
|
49
|
+
if lower.startswith("filename="):
|
|
50
|
+
value = part.split("=", 1)[1].strip().strip('"')
|
|
51
|
+
decoded = unquote(value)
|
|
52
|
+
sanitized = _sanitize_candidate_filename(decoded)
|
|
53
|
+
if sanitized is not None:
|
|
54
|
+
return sanitized
|
|
55
|
+
return None
|
|
56
|
+
|
|
57
|
+
def _filename_from_url(source_url: str) -> Optional[str]:
|
|
58
|
+
parsed = urlparse(source_url)
|
|
59
|
+
url_candidate = _sanitize_candidate_filename(unquote(Path(parsed.path).name))
|
|
60
|
+
if url_candidate is not None:
|
|
61
|
+
return url_candidate
|
|
62
|
+
query_params = parse_qs(parsed.query, keep_blank_values=True)
|
|
63
|
+
for key, values in query_params.items():
|
|
64
|
+
lower_key = key.lower()
|
|
65
|
+
if "name" in lower_key or "file" in lower_key:
|
|
66
|
+
for value in values:
|
|
67
|
+
sanitized = _sanitize_candidate_filename(unquote(value))
|
|
68
|
+
if sanitized is not None:
|
|
69
|
+
return sanitized
|
|
70
|
+
return None
|
|
71
|
+
|
|
72
|
+
def _resolve_download_path(request_url: str, response: Response, requested_output: Optional[str], requested_output_dir: Optional[str]) -> Path:
|
|
73
|
+
if requested_output is not None:
|
|
74
|
+
return Path(requested_output)
|
|
75
|
+
header_candidate = _filename_from_content_disposition(response.headers.get("content-disposition"))
|
|
76
|
+
if header_candidate is None:
|
|
77
|
+
header_candidate = _filename_from_url(response.url)
|
|
78
|
+
if header_candidate is None:
|
|
79
|
+
header_candidate = _filename_from_url(request_url)
|
|
80
|
+
if header_candidate is None:
|
|
81
|
+
header_candidate = "downloaded_file"
|
|
82
|
+
if requested_output_dir is not None:
|
|
83
|
+
return Path(requested_output_dir) / header_candidate
|
|
84
|
+
return Path(header_candidate)
|
|
85
|
+
|
|
86
|
+
try:
|
|
87
|
+
with requests.get(url, allow_redirects=True, stream=True, timeout=60) as response:
|
|
88
|
+
response.raise_for_status()
|
|
89
|
+
download_path = _resolve_download_path(url, response, output, output_dir)
|
|
90
|
+
download_path.parent.mkdir(parents=True, exist_ok=True)
|
|
91
|
+
total_size_header = response.headers.get("content-length", "0")
|
|
92
|
+
try:
|
|
93
|
+
total_size = int(total_size_header)
|
|
94
|
+
except (TypeError, ValueError):
|
|
95
|
+
total_size = 0
|
|
96
|
+
if total_size <= 0:
|
|
97
|
+
with open(download_path, "wb") as file_handle:
|
|
98
|
+
file_handle.write(response.content)
|
|
99
|
+
else:
|
|
100
|
+
downloaded = 0
|
|
101
|
+
chunk_size = 8192 * 4
|
|
102
|
+
with open(download_path, "wb") as file_handle:
|
|
103
|
+
for chunk in response.iter_content(chunk_size=chunk_size):
|
|
104
|
+
if not chunk:
|
|
105
|
+
continue
|
|
106
|
+
file_handle.write(chunk)
|
|
107
|
+
downloaded += len(chunk)
|
|
108
|
+
progress = (downloaded / total_size) * 100
|
|
109
|
+
typer.echo(f"\r⏬ Progress: {progress:.1f}% ({downloaded}/{total_size} bytes)", nl=False)
|
|
110
|
+
typer.echo()
|
|
111
|
+
except requests.exceptions.RequestException as exception:
|
|
112
|
+
typer.echo(f"❌ Download failed: {exception}", err=True)
|
|
113
|
+
return None
|
|
114
|
+
except OSError as exception:
|
|
115
|
+
typer.echo(f"❌ File write error: {exception}", err=True)
|
|
116
|
+
return None
|
|
117
|
+
|
|
118
|
+
typer.echo(f"✅ Downloaded to: {download_path}")
|
|
119
|
+
result_path: Path = download_path
|
|
120
|
+
|
|
121
|
+
if decompress:
|
|
122
|
+
typer.echo(f"📦 Decompressing: {download_path}")
|
|
123
|
+
base_name = download_path.stem
|
|
124
|
+
if base_name in {"", ".", ".."}:
|
|
125
|
+
base_name = "extracted"
|
|
126
|
+
extract_dir = download_path.parent / base_name
|
|
127
|
+
extract_dir.mkdir(parents=True, exist_ok=True)
|
|
128
|
+
try:
|
|
129
|
+
subprocess.run(
|
|
130
|
+
["ouch", "decompress", str(download_path), "--dir", str(extract_dir)],
|
|
131
|
+
check=True,
|
|
132
|
+
capture_output=True,
|
|
133
|
+
text=True,
|
|
134
|
+
)
|
|
135
|
+
typer.echo(f"✅ Decompressed to: {extract_dir}")
|
|
136
|
+
if download_path.exists():
|
|
137
|
+
download_path.unlink()
|
|
138
|
+
typer.echo(f"🗑️ Removed archive: {download_path}")
|
|
139
|
+
result_path = extract_dir
|
|
140
|
+
except subprocess.CalledProcessError as exception:
|
|
141
|
+
typer.echo(f"❌ Decompression failed: {exception.stderr}", err=True)
|
|
142
|
+
return None
|
|
143
|
+
except FileNotFoundError:
|
|
144
|
+
typer.echo("❌ Error: ouch command not found. Please install ouch.", err=True)
|
|
145
|
+
typer.echo("💡 Install with: cargo install ouch", err=True)
|
|
146
|
+
return None
|
|
147
|
+
|
|
148
|
+
return result_path.resolve()
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
if __name__ == "__main__":
|
|
152
|
+
pass
|
|
@@ -4,7 +4,6 @@
|
|
|
4
4
|
import typer
|
|
5
5
|
|
|
6
6
|
from typing import Optional
|
|
7
|
-
from pathlib import Path
|
|
8
7
|
from typing import Annotated, Literal, TypedDict
|
|
9
8
|
|
|
10
9
|
|
|
@@ -17,7 +16,7 @@ def path():
|
|
|
17
16
|
uv_with = ["textual"]
|
|
18
17
|
uv_project_dir = None
|
|
19
18
|
if not Path.home().joinpath("code/machineconfig").exists():
|
|
20
|
-
uv_with.append("machineconfig>=7.
|
|
19
|
+
uv_with.append("machineconfig>=7.69")
|
|
21
20
|
else:
|
|
22
21
|
uv_project_dir = str(Path.home().joinpath("code/machineconfig"))
|
|
23
22
|
run_shell_script(get_uv_command_executing_python_script(python_script=path.read_text(encoding="utf-8"), uv_with=uv_with, uv_project_dir=uv_project_dir)[0])
|
|
@@ -25,6 +24,7 @@ def path():
|
|
|
25
24
|
|
|
26
25
|
def init_project(python: Annotated[Literal["3.13", "3.14"], typer.Option("--python", "-p", help="Python version for the uv virtual environment.")]= "3.13") -> None:
|
|
27
26
|
_ = python
|
|
27
|
+
from pathlib import Path
|
|
28
28
|
repo_root = Path.cwd()
|
|
29
29
|
if not (repo_root / "pyproject.toml").exists():
|
|
30
30
|
typer.echo("❌ Error: pyproject.toml not found.", err=True)
|
|
@@ -54,6 +54,7 @@ uv add --group plot \
|
|
|
54
54
|
|
|
55
55
|
|
|
56
56
|
def edit_file_with_hx(path: Annotated[Optional[str], typer.Argument(..., help="The root directory of the project to edit, or a file path.")] = None) -> None:
|
|
57
|
+
from pathlib import Path
|
|
57
58
|
if path is None:
|
|
58
59
|
root_path = Path.cwd()
|
|
59
60
|
print(f"No path provided. Using current working directory: {root_path}")
|
|
@@ -90,6 +91,7 @@ def get_machine_specs() -> MachineSpecs:
|
|
|
90
91
|
UV_RUN_CMD = "$HOME/.local/bin/uv run" if platform.system() != "Windows" else """& "$env:USERPROFILE/.local/bin/uv" run"""
|
|
91
92
|
command = f"""{UV_RUN_CMD} --with distro python -c "import distro; print(distro.name(pretty=True))" """
|
|
92
93
|
import subprocess
|
|
94
|
+
from pathlib import Path
|
|
93
95
|
distro = subprocess.run(command, shell=True, capture_output=True, text=True).stdout.strip()
|
|
94
96
|
specs: MachineSpecs = {
|
|
95
97
|
"system": platform.system(),
|