machineconfig 6.86__py3-none-any.whl → 6.88__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of machineconfig might be problematic. Click here for more details.

Files changed (31) hide show
  1. machineconfig/jobs/installer/installer_data.json +18 -1
  2. machineconfig/scripts/linux/mcfgs +44 -35
  3. machineconfig/scripts/python/agents.py +2 -15
  4. machineconfig/scripts/python/env_manager/path_manager_tui.py +1 -1
  5. machineconfig/scripts/python/fire_jobs.py +7 -11
  6. machineconfig/scripts/python/helpers_agents/agentic_frameworks/fire_crush.py +6 -7
  7. machineconfig/scripts/python/helpers_agents/agentic_frameworks/fire_cursor_agents.py +3 -4
  8. machineconfig/scripts/python/helpers_agents/agentic_frameworks/fire_gemini.py +9 -10
  9. machineconfig/scripts/python/helpers_agents/agentic_frameworks/fire_qwen.py +4 -5
  10. machineconfig/scripts/python/helpers_agents/fire_agents_help_launch.py +9 -6
  11. machineconfig/scripts/python/helpers_agents/fire_agents_helper_types.py +4 -14
  12. machineconfig/scripts/python/helpers_devops/cli_config.py +1 -1
  13. machineconfig/scripts/python/helpers_devops/cli_self.py +19 -12
  14. machineconfig/scripts/python/helpers_fire_command/file_wrangler.py +40 -1
  15. machineconfig/scripts/python/helpers_repos/cloud_repo_sync.py +1 -1
  16. machineconfig/scripts/python/nw/mount_nfs +1 -1
  17. machineconfig/scripts/windows/mcfgs.ps1 +50 -13
  18. machineconfig/scripts/windows/mounts/mount_ssh.ps1 +1 -1
  19. machineconfig/settings/lf/windows/lfrc +14 -16
  20. machineconfig/settings/shells/bash/init.sh +11 -10
  21. machineconfig/settings/shells/pwsh/init.ps1 +38 -16
  22. machineconfig/setup_linux/web_shortcuts/interactive.sh +1 -1
  23. machineconfig/setup_mac/apps_gui.sh +248 -0
  24. machineconfig/setup_windows/web_shortcuts/interactive.ps1 +1 -1
  25. machineconfig/utils/code.py +18 -10
  26. machineconfig/utils/ssh.py +1 -1
  27. {machineconfig-6.86.dist-info → machineconfig-6.88.dist-info}/METADATA +1 -1
  28. {machineconfig-6.86.dist-info → machineconfig-6.88.dist-info}/RECORD +31 -30
  29. {machineconfig-6.86.dist-info → machineconfig-6.88.dist-info}/WHEEL +0 -0
  30. {machineconfig-6.86.dist-info → machineconfig-6.88.dist-info}/entry_points.txt +0 -0
  31. {machineconfig-6.86.dist-info → machineconfig-6.88.dist-info}/top_level.txt +0 -0
@@ -208,7 +208,7 @@
208
208
  {
209
209
  "appName": "croc",
210
210
  "repoURL": "https://github.com/schollz/croc",
211
- "doc": "🦎 Easily and securely send things from one computer to another",
211
+ "doc": "🦎 Easily and securely send and share things from one computer to another",
212
212
  "fileNamePattern": {
213
213
  "amd64": {
214
214
  "linux": "croc_{version}_Linux-64bit.tar.gz",
@@ -273,6 +273,23 @@
273
273
  }
274
274
  }
275
275
  },
276
+ {
277
+ "appName": "asciinema",
278
+ "repoURL": "https://github.com/asciinema/asciinema",
279
+ "doc": "🎥 Terminal session recorder",
280
+ "fileNamePattern": {
281
+ "amd64": {
282
+ "linux": "asciinema-x86_64-unknown-linux-gnu",
283
+ "windows": null,
284
+ "macos": "asciinema-x86_64-apple-darwin"
285
+ },
286
+ "arm64": {
287
+ "linux": null,
288
+ "windows": null,
289
+ "macos": null
290
+ }
291
+ }
292
+ },
276
293
  {
277
294
  "appName": "lolcatjs",
278
295
  "repoURL": "CMD",
@@ -1,39 +1,48 @@
1
1
  #!/usr/bin/env bash
2
- set -euo pipefail
3
-
4
- RANDOM_NAME=$(date +%s%N | sha256sum | head -c 16)
5
- OP_DIR="$HOME/tmp_results/tmp_scripts/machineconfig"
6
- OP_PROGRAM_PATH="$OP_DIR/${RANDOM_NAME}.sh"
7
- export OP_PROGRAM_PATH
8
-
9
- # ANSI color/style codes
10
- BOLD="\033[1m"
11
- RESET="\033[0m"
12
- GREEN="\033[32m"
13
- YELLOW="\033[33m"
14
- BLUE="\033[34m"
15
- RED="\033[31m"
16
-
17
- timestamp=$(date -u +"%Y-%m-%d %H:%M:%SZ")
18
-
19
- printf "%b\n" "${BOLD}${BLUE}🛠️ machineconfig — running mcfg${RESET}"
20
- printf "%b\n" "${BLUE}Timestamp:${RESET} ${timestamp}"
21
- printf "%b\n" "${BLUE}Op program path:${RESET} ${OP_PROGRAM_PATH}"
22
-
23
- # Forward arguments to the mcfg command
24
- mcfg "$@"
25
-
26
- if [[ -f "$OP_PROGRAM_PATH" ]]; then
27
- printf "%b\n" "${GREEN}✅ Found op program:${RESET} ${OP_PROGRAM_PATH}"
28
- bat --style=plain --paging=never "$OP_PROGRAM_PATH"
29
- printf "%b\n" "${GREEN}▶ Running...${RESET}"
30
- . "$OP_PROGRAM_PATH"
31
- status=$?
32
- if [[ $status -eq 0 ]]; then
33
- printf "%b\n" "${GREEN}✅ Completed successfully (exit ${status})${RESET}"
2
+
3
+
4
+ wrap_in_op_program() {
5
+ # set -euo pipefail
6
+
7
+ # ANSI color/style codes
8
+ BOLD="\033[1m"
9
+ RESET="\033[0m"
10
+ GREEN="\033[32m"
11
+ YELLOW="\033[33m"
12
+ BLUE="\033[34m"
13
+ RED="\033[31m"
14
+
15
+ local command="$1"
16
+ shift
17
+ local RANDOM_NAME=$(date +%s%N | sha256sum | head -c 16)
18
+ local OP_DIR="$HOME/tmp_results/tmp_scripts/machineconfig"
19
+ local OP_PROGRAM_PATH="$OP_DIR/${RANDOM_NAME}.sh"
20
+ export OP_PROGRAM_PATH
21
+ local timestamp=$(date -u +"%Y-%m-%d %H:%M:%SZ")
22
+ printf "%b\n" "${BOLD}${BLUE}🛠️ machineconfig — running ${command}${RESET}"
23
+ printf "%b\n" "${BLUE}Timestamp:${RESET} ${timestamp}"
24
+ printf "%b\n" "${BLUE}Op program path:${RESET} ${OP_PROGRAM_PATH}"
25
+
26
+ # Forward arguments to the command
27
+ "$command" "$@"
28
+
29
+ if [[ -f "$OP_PROGRAM_PATH" ]]; then
30
+ printf "%b\n" "${GREEN}✅ Found op program:${RESET} ${OP_PROGRAM_PATH}"
31
+ bat --style=plain --paging=never "$OP_PROGRAM_PATH"
32
+ printf "%b\n" "${GREEN}▶ Running...${RESET}"
33
+ . "$OP_PROGRAM_PATH"
34
+ status=$?
35
+ if [[ $status -eq 0 ]]; then
36
+ printf "%b\n" "${GREEN}✅ Completed successfully (exit ${status})${RESET}"
37
+ else
38
+ printf "%b\n" "${YELLOW}⚠️ Program exited with status ${status}${RESET}"
39
+ fi
34
40
  else
35
- printf "%b\n" "${YELLOW}⚠️ Program exited with status ${status}${RESET}"
41
+ printf "%b\n" "${YELLOW}⚠️ No op program found at: ${OP_PROGRAM_PATH}${RESET}"
36
42
  fi
37
- else
38
- printf "%b\n" "${YELLOW}⚠️ No op program found at: ${OP_PROGRAM_PATH}${RESET}"
43
+ }
44
+
45
+ if [[ $# -gt 0 ]]; then
46
+ wrap_in_op_program "$@"
39
47
  fi
48
+
@@ -5,13 +5,13 @@
5
5
  from pathlib import Path
6
6
  from typing import cast, Optional, get_args, Annotated
7
7
  import typer
8
- from machineconfig.scripts.python.helpers_agents.fire_agents_helper_types import AGENTS, HOST, MODEL, PROVIDER
8
+ from machineconfig.scripts.python.helpers_agents.fire_agents_helper_types import AGENTS, HOST, PROVIDER
9
9
 
10
10
 
11
11
  def create(
12
12
  agent: Annotated[AGENTS, typer.Option(..., "--agents", "-a", help=f"Agent type. One of {', '.join(get_args(AGENTS)[:3])}")],
13
13
  host: Annotated[HOST, typer.Option(..., "--host", "-h", help=f"Machine to run agents on. One of {', '.join(get_args(HOST))}")],
14
- model: Annotated[MODEL, typer.Option(..., "--model", "-m", help=f"Model to use (for crush agent). One of {', '.join(get_args(MODEL)[:3])}")],
14
+ model: Annotated[str, typer.Option(..., "--model", "-m", help="Model to use (for crush agent).")],
15
15
  provider: Annotated[PROVIDER, typer.Option(..., "--provider", "-p", help=f"Provider to use (for crush agent). One of {', '.join(get_args(PROVIDER)[:3])}")],
16
16
  context_path: Annotated[Optional[Path], typer.Option(..., "--context-path", "-c", help="Path to the context file/folder, defaults to .ai/todo/")] = None,
17
17
  separator: Annotated[str, typer.Option(..., "--separator", "-s", help="Separator for context")] = "\n",
@@ -26,20 +26,9 @@ def create(
26
26
 
27
27
  from machineconfig.scripts.python.helpers_agents.fire_agents_help_launch import prep_agent_launch, get_agents_launch_layout
28
28
  from machineconfig.scripts.python.helpers_agents.fire_agents_load_balancer import chunk_prompts
29
- from machineconfig.scripts.python.helpers_agents.fire_agents_helper_types import PROVIDER2MODEL
30
29
  from machineconfig.utils.accessories import get_repo_root, randstr
31
30
  import json
32
31
 
33
- # validate model is valid for the provider
34
- valid_models_for_provider = PROVIDER2MODEL.get(provider, [])
35
- if model not in valid_models_for_provider:
36
- available_models = "\n ".join(valid_models_for_provider) if valid_models_for_provider else "(none configured)"
37
- raise typer.BadParameter(
38
- f"Model '{model}' is not valid for provider '{provider}'.\n"
39
- f"Valid models for '{provider}':\n {available_models}\n"
40
- f"All available models: {', '.join(get_args(MODEL))}"
41
- )
42
-
43
32
  # validate mutual exclusive
44
33
  prompt_options = [prompt, prompt_path]
45
34
  provided_prompt = [opt for opt in prompt_options if opt is not None]
@@ -179,8 +168,6 @@ def get_app():
179
168
  PROVIDER options: {', '.join(get_args(PROVIDER))}
180
169
  {sep}
181
170
  AGENT options: {', '.join(get_args(AGENTS))}
182
- {sep}
183
- MODEL options: {sep.join(get_args(MODEL))}
184
171
  """
185
172
  agents_app.command("create", no_args_is_help=True, help=agents_full_help)(create)
186
173
  agents_app.command("c", no_args_is_help=True, help="Create agents layout file, ready to run.", hidden=True)(create)
@@ -2,7 +2,7 @@
2
2
  # /// script
3
3
  # requires-python = ">=3.13"
4
4
  # dependencies = [
5
- # "machineconfig>=6.86",
5
+ # "machineconfig>=6.88",
6
6
  # "textual",
7
7
  # "pyperclip",
8
8
  # ]
@@ -35,7 +35,6 @@ def route(args: FireJobArgs, fire_args: str = "") -> None:
35
35
  else:
36
36
  choice_file = path_obj
37
37
 
38
-
39
38
  repo_root = get_repo_root(Path(choice_file))
40
39
  print(f"💾 Selected file: {choice_file}.\nRepo root: {repo_root}")
41
40
  if args.marimo:
@@ -67,18 +66,19 @@ uv run --project {repo_root} --with marimo marimo edit --host 0.0.0.0 marimo_nb.
67
66
 
68
67
  if choice_file.suffix == ".py":
69
68
  from machineconfig.scripts.python.helpers_fire_command.fire_jobs_route_helper import get_command_streamlit
69
+ with_project = f"--project {repo_root} " if repo_root is not None else ""
70
70
  if args.streamlit:
71
71
  exe = get_command_streamlit(choice_file=choice_file, environment=args.environment, repo_root=repo_root)
72
- exe = f"uv run {exe} "
73
- elif args.jupyter: exe = "uv run jupyter-lab"
72
+ exe = f"uv run {with_project} {exe} "
73
+ elif args.jupyter: exe = f"uv run {with_project} jupyter-lab"
74
74
  else:
75
75
  if args.interactive:
76
76
  _ve_root_from_file, ipy_profile = get_ve_path_and_ipython_profile(choice_file)
77
77
  if ipy_profile is None:
78
78
  ipy_profile = "default"
79
- exe = f"uv run ipython -i --no-banner --profile {ipy_profile} "
79
+ exe = f"uv run {with_project} ipython -i --no-banner --profile {ipy_profile} "
80
80
  else:
81
- exe = "uv run python "
81
+ exe = f"uv run {with_project} python "
82
82
  elif choice_file.suffix == ".ps1" or choice_file.suffix == ".sh": exe = "."
83
83
  elif choice_file.suffix == "": exe = ""
84
84
  else: raise NotImplementedError(f"File type {choice_file.suffix} not supported, in the sense that I don't know how to fire it.")
@@ -155,12 +155,8 @@ uv run --project {repo_root} --with marimo marimo edit --host 0.0.0.0 marimo_nb.
155
155
  if args.git_pull:
156
156
  command = f"\ngit -C {choice_file.parent} pull\n" + command
157
157
  if args.PathExport:
158
- if platform.system() in ["Linux", "Darwin"]:
159
- export_line = f"""export PYTHONPATH="{repo_root}""" + """:${PYTHONPATH}" """
160
- elif platform.system() == "Windows":
161
- export_line = f"""$env:PYTHONPATH="{repo_root}""" + """:$env:PYTHONPATH" """
162
- else:
163
- raise NotImplementedError(f"Platform {platform.system()} not supported.")
158
+ from machineconfig.scripts.python.helpers_fire_command.file_wrangler import add_to_path
159
+ export_line = add_to_path(path_variable="PYTHONPATH", directory=str(repo_root))
164
160
  command = export_line + "\n" + command
165
161
  if args.loop:
166
162
  if platform.system() in ["Linux", "Darwin"]:
@@ -1,21 +1,20 @@
1
1
 
2
2
  from pathlib import Path
3
3
  # import shlex
4
- from typing import Optional
5
- from machineconfig.scripts.python.helpers_agents.fire_agents_helper_types import HOST, PROVIDER, MODEL
4
+ from machineconfig.scripts.python.helpers_agents.fire_agents_helper_types import AI_SPEC
6
5
 
7
6
 
8
- def fire_crush(api_key: Optional[str], model: MODEL, provider: PROVIDER, machine: HOST, prompt_path: Path, repo_root: Path) -> str:
9
- match machine:
7
+ def fire_crush(ai_spec: AI_SPEC, prompt_path: Path, repo_root: Path) -> str:
8
+ match ai_spec["machine"]:
10
9
  case "local":
11
10
  cmd = f"""
12
11
  crush run {prompt_path}
13
12
  """
14
13
  case "docker":
15
- assert api_key is not None, "API key is required for Crush agent in docker mode."
14
+ assert ai_spec["api_key"] is not None, "API key is required for Crush agent in docker mode."
16
15
  json_path = Path(__file__).parent / "fire_crush.json"
17
16
  json_template = json_path.read_text(encoding="utf-8")
18
- json_filled = json_template.replace("{api_key}", api_key).replace("{model}", model).replace("{provider}", provider)
17
+ json_filled = json_template.replace("{api_key}", ai_spec["api_key"]).replace("{model}", ai_spec["model"]).replace("{provider}", ai_spec["provider"])
19
18
  from machineconfig.utils.accessories import randstr
20
19
  temp_config_file_local = Path.home().joinpath("tmp_results/tmp_files/crush_" + randstr(8) + ".json")
21
20
  temp_config_file_local.parent.mkdir(parents=True, exist_ok=True)
@@ -23,7 +22,7 @@ crush run {prompt_path}
23
22
  cmd = f"""
24
23
 
25
24
  # -e "PATH_PROMPT=$PATH_PROMPT"
26
- # opencode --model "{provider}/{model}" run {prompt_path}
25
+ # opencode --model "{ai_spec["provider"]}/{ai_spec["model"]}" run {prompt_path}
27
26
 
28
27
 
29
28
  echo "Running prompt @ {prompt_path.relative_to(repo_root)} using Docker with Crush..."
@@ -2,11 +2,10 @@
2
2
 
3
3
  from pathlib import Path
4
4
  # import shlex
5
- from machineconfig.scripts.python.helpers_agents.fire_agents_helper_types import HOST
6
- from typing import Optional
5
+ from machineconfig.scripts.python.helpers_agents.fire_agents_helper_types import AI_SPEC
7
6
 
8
- def fire_cursor(api_key: Optional[str], prompt_path: Path, machine: HOST) -> str:
9
- match machine:
7
+ def fire_cursor(ai_spec: AI_SPEC, prompt_path: Path) -> str:
8
+ match ai_spec["machine"]:
10
9
  case "local":
11
10
  # Export the environment variable so it's available to subshells
12
11
  cmd = f"""
@@ -1,26 +1,25 @@
1
1
 
2
2
  from pathlib import Path
3
3
  import shlex
4
- from machineconfig.scripts.python.helpers_agents.fire_agents_helper_types import HOST
5
- from typing import Optional, Literal
4
+ from machineconfig.scripts.python.helpers_agents.fire_agents_helper_types import AI_SPEC
6
5
 
7
6
 
8
- def fire_gemini(api_key: Optional[str], model: Literal["gemini-2.5-pro"], provider: Literal["google"], machine: HOST, prompt_path: Path, repo_root: Path) -> str:
9
- _ = provider
7
+ def fire_gemini(ai_spec: AI_SPEC, prompt_path: Path, repo_root: Path) -> str:
8
+ _ = ai_spec["provider"]
10
9
  # model = "gemini-2.5-flash-lite"
11
10
  # model = None # auto-select
12
11
  # if model is None:
13
12
  # model_arg = ""
14
13
  # else:
15
- model_arg = f"--model {shlex.quote(model)}"
14
+ model_arg = f"--model {shlex.quote(ai_spec['model'])}"
16
15
  # Need a real shell for the pipeline; otherwise '| gemini ...' is passed as args to 'cat'
17
16
  safe_path = shlex.quote(str(prompt_path))
18
17
 
19
- match machine:
18
+ match ai_spec["machine"]:
20
19
  case "local":
21
20
  # Export the environment variable so it's available to subshells
22
- if api_key is not None:
23
- define_api_key = f"""export GEMINI_API_KEY="{shlex.quote(api_key)}" """
21
+ if ai_spec["api_key"] is not None:
22
+ define_api_key = f"""export GEMINI_API_KEY="{shlex.quote(ai_spec['api_key'])}" """
24
23
  else:
25
24
  define_api_key = "echo 'Warning: No GEMINI_API_KEY provided, hoping it is set in the environment.'"
26
25
  cmd = f"""
@@ -31,10 +30,10 @@ gemini {model_arg} --yolo --prompt {safe_path}
31
30
 
32
31
 
33
32
  case "docker":
34
- assert api_key is not None, "When using docker, api_key must be provided."
33
+ assert ai_spec["api_key"] is not None, "When using docker, api_key must be provided."
35
34
  cmd = f"""
36
35
  docker run -it --rm \
37
- -e GEMINI_API_KEY="{api_key}" \
36
+ -e GEMINI_API_KEY="{ai_spec['api_key']}" \
38
37
  -v "{repo_root}:/workspace/{repo_root.name}" \
39
38
  -w "/workspace/{repo_root.name}" \
40
39
  statistician/machineconfig-ai:latest \
@@ -1,11 +1,10 @@
1
1
 
2
2
  from pathlib import Path
3
3
  import shlex
4
- from machineconfig.scripts.python.helpers_agents.fire_agents_helper_types import HOST
5
- from typing import Optional, Literal
4
+ from machineconfig.scripts.python.helpers_agents.fire_agents_helper_types import AI_SPEC
6
5
 
7
6
 
8
- def fire_qwen(config_dir: Optional[str], model: Literal["qwen"], provider: Literal["qwen"], machine: HOST, prompt_path: Path, repo_root: Path) -> str:
7
+ def fire_qwen(ai_spec: AI_SPEC, prompt_path: Path, repo_root: Path, config_dir: str | None) -> str:
9
8
  # assert model == "qwen", "Only qwen is supported currently."
10
9
  # assert provider == "qwen", "Only qwen is supported currently."
11
10
  # model = "qwen"
@@ -14,12 +13,12 @@ def fire_qwen(config_dir: Optional[str], model: Literal["qwen"], provider: Liter
14
13
  # if model is None:
15
14
  # model_arg = ""
16
15
  # else:
17
- _ = provider
16
+ _ = ai_spec["provider"]
18
17
  # model_arg = f"--model {shlex.quote(model)}"
19
18
  # Need a real shell for the pipeline; otherwise '| gemini ...' is passed as args to 'cat'
20
19
  safe_path = shlex.quote(str(prompt_path))
21
20
 
22
- match machine:
21
+ match ai_spec["machine"]:
23
22
  case "local":
24
23
  # Export the environment variable so it's available to subshells
25
24
  cmd = f"""
@@ -2,7 +2,7 @@
2
2
  import random
3
3
  import shlex
4
4
  from pathlib import Path
5
- from machineconfig.scripts.python.helpers_agents.fire_agents_helper_types import AGENTS, AGENT_NAME_FORMATTER, HOST, PROVIDER, MODEL
5
+ from machineconfig.scripts.python.helpers_agents.fire_agents_helper_types import AGENTS, AGENT_NAME_FORMATTER, HOST, PROVIDER, AI_SPEC
6
6
 
7
7
 
8
8
  def get_api_keys(provider: PROVIDER) -> list[str]:
@@ -20,7 +20,7 @@ def get_api_keys(provider: PROVIDER) -> list[str]:
20
20
 
21
21
 
22
22
  def prep_agent_launch(repo_root: Path, agents_dir: Path, prompts_material: list[str], prompt_prefix: str, keep_material_in_separate_file: bool,
23
- machine: HOST, model: MODEL, provider: PROVIDER, agent: AGENTS, *, job_name: str) -> None:
23
+ machine: HOST, model: str, provider: PROVIDER, agent: AGENTS, *, job_name: str) -> None:
24
24
  agents_dir.mkdir(parents=True, exist_ok=True)
25
25
  prompt_folder = agents_dir / "prompts"
26
26
  prompt_folder.mkdir(parents=True, exist_ok=True)
@@ -66,17 +66,20 @@ sleep 0.1
66
66
  assert provider == "google", "Gemini agent only works with google provider."
67
67
  api_keys = get_api_keys(provider="google")
68
68
  api_key = api_keys[idx % len(api_keys)] if len(api_keys) > 0 else None
69
+ ai_spec: AI_SPEC = AI_SPEC(provider=provider, model="gemini-2.5-pro", agent=agent, machine=machine, api_key=api_key, api_name="gemini")
69
70
  from machineconfig.scripts.python.helpers_agents.agentic_frameworks.fire_gemini import fire_gemini
70
- cmd = fire_gemini(api_key=api_key, prompt_path=prompt_path, machine=machine, model="gemini-2.5-pro", provider="google", repo_root=repo_root)
71
+ cmd = fire_gemini(ai_spec=ai_spec, prompt_path=prompt_path, repo_root=repo_root)
71
72
  case "cursor-agent":
73
+ ai_spec: AI_SPEC = AI_SPEC(provider=provider, model=model, agent=agent, machine=machine, api_key=None, api_name="cursor")
72
74
  from machineconfig.scripts.python.helpers_agents.agentic_frameworks.fire_cursor_agents import fire_cursor
73
- cmd = fire_cursor(prompt_path=prompt_path, machine=machine, api_key=None)
75
+ cmd = fire_cursor(ai_spec=ai_spec, prompt_path=prompt_path)
74
76
  raise NotImplementedError("Cursor agent is not implemented yet, api key missing")
75
77
  case "crush":
76
- from machineconfig.scripts.python.helpers_agents.agentic_frameworks.fire_crush import fire_crush
77
78
  api_keys = get_api_keys(provider=provider)
78
79
  api_key = api_keys[idx % len(api_keys)] if len(api_keys) > 0 else None
79
- cmd = fire_crush(api_key=api_key, prompt_path=prompt_path, machine=machine, repo_root=repo_root, model=model, provider=provider)
80
+ ai_spec: AI_SPEC = AI_SPEC(provider=provider, model=model, agent=agent, machine=machine, api_key=api_key, api_name="crush")
81
+ from machineconfig.scripts.python.helpers_agents.agentic_frameworks.fire_crush import fire_crush
82
+ cmd = fire_crush(ai_spec=ai_spec, prompt_path=prompt_path, repo_root=repo_root)
80
83
  # case "q":
81
84
  # from machineconfig.scripts.python.helpers_fire.fire_q import fire_q
82
85
  # cmd = fire_q(api_key="", prompt_path=prompt_path, machine=machine)
@@ -19,24 +19,14 @@ from typing import Literal, TypeAlias, TypedDict
19
19
  AGENTS: TypeAlias = Literal["cursor-agent", "gemini", "qwen-code", "copilot", "crush", "q", "opencode", "kilocode", "cline", "auggie", "warp", "droid"]
20
20
  HOST: TypeAlias = Literal["local", "docker"]
21
21
  PROVIDER: TypeAlias = Literal["azure", "google", "aws", "openai", "anthropic", "openrouter", "xai"]
22
- MODEL: TypeAlias = Literal["zai/glm-4.6", "anthropic/sonnet-4.5", "google/gemini-2.5-pro", "openai/gpt-5-codex",
23
- "openrouter/supernova", "openrouter/andromeda-alpha", "x-ai/grok-4-fast:free",
24
- ]
25
- PROVIDER2MODEL: dict[PROVIDER, list[MODEL]] = {
26
- "azure": ["zai/glm-4.6"],
27
- "google": ["google/gemini-2.5-pro"],
28
- "aws": [],
29
- "openai": ["openai/gpt-5-codex"],
30
- "anthropic": ["anthropic/sonnet-4.5"],
31
- "openrouter": ["openrouter/supernova", "openrouter/andromeda-alpha"],
32
- "xai": ["x-ai/grok-4-fast:free"]
33
- }
34
22
 
35
23
  class AI_SPEC(TypedDict):
36
24
  provider: PROVIDER
37
- model: MODEL
25
+ model: str
38
26
  agent: AGENTS
39
- machine: HOST
27
+ machine: HOST
28
+ api_key: str | None
29
+ api_name: str
40
30
 
41
31
 
42
32
  AGENT_NAME_FORMATTER = "agent_{idx}_cmd.sh" # e.g., agent_0_cmd.sh
@@ -46,7 +46,7 @@ def path():
46
46
  uv_with = ["textual"]
47
47
  uv_project_dir = None
48
48
  if not Path.home().joinpath("code/machineconfig").exists():
49
- uv_with.append("machineconfig>=6.86")
49
+ uv_with.append("machineconfig>=6.88")
50
50
  else:
51
51
  uv_project_dir = str(Path.home().joinpath("code/machineconfig"))
52
52
  run_shell_script(get_uv_command_executing_python_script(python_script=path.read_text(encoding="utf-8"), uv_with=uv_with, uv_project_dir=uv_project_dir)[0])
@@ -3,35 +3,42 @@ import typer
3
3
  from typing import Optional, Annotated
4
4
 
5
5
 
6
+ def copy_both_assets():
7
+ import machineconfig.profile.create_helper as create_helper
8
+ create_helper.copy_assets_to_machine(which="scripts")
9
+ create_helper.copy_assets_to_machine(which="settings")
10
+
11
+
6
12
  def update(no_copy_assets: Annotated[bool, typer.Option("--no-assets-copy", "-na", help="Copy (overwrite) assets to the machine after the update")] = False):
7
13
  """🔄 UPDATE uv and machineconfig"""
8
14
  # from machineconfig.utils.source_of_truth import LIBRARY_ROOT
9
15
  # repo_root = LIBRARY_ROOT.parent.parent
10
16
  from pathlib import Path
11
17
  if Path.home().joinpath("code", "machineconfig").exists():
12
- code = """
18
+ shell_script = """
13
19
  uv self update
14
20
  cd ~/code/machineconfig
15
21
  git pull
16
22
  uv tool install --upgrade --editable $HOME/code/machineconfig
17
23
  """
18
24
  else:
19
- code = """
25
+ shell_script = """
20
26
  uv self update
21
27
  uv tool install --upgrade machineconfig
22
28
  """
23
29
  import platform
24
30
  if platform.system() == "Windows":
25
- # from machineconfig.utils.code import run_shell_script_after_exit
26
- # run_shell_script_after_exit(code)
27
- print(f'please run {code} in powershell to update machineconfig')
31
+ from machineconfig.utils.code import exit_then_run_shell_script, get_uv_command_executing_python_script
32
+ from machineconfig.utils.meta import lambda_to_python_script
33
+ python_script = lambda_to_python_script(lambda: copy_both_assets(), in_global=True, import_module=False)
34
+ uv_command, _py_file = get_uv_command_executing_python_script(python_script=python_script, uv_with=["machineconfig"], uv_project_dir=None)
35
+ exit_then_run_shell_script(shell_script + "\n" + uv_command, strict=True)
28
36
  else:
29
37
  from machineconfig.utils.code import run_shell_script
30
- run_shell_script(code)
38
+ run_shell_script(shell_script)
31
39
  if not no_copy_assets:
32
- import machineconfig.profile.create_helper as create_helper
33
- create_helper.copy_assets_to_machine(which="scripts")
34
- create_helper.copy_assets_to_machine(which="settings")
40
+ copy_both_assets()
41
+
35
42
  def install(no_copy_assets: Annotated[bool, typer.Option("--no-assets-copy", "-na", help="Copy (overwrite) assets to the machine after the update")] = False):
36
43
  """📋 CLONE machienconfig locally and incorporate to shell profile for faster execution and nightly updates."""
37
44
  from machineconfig.utils.code import run_shell_script
@@ -41,9 +48,9 @@ def install(no_copy_assets: Annotated[bool, typer.Option("--no-assets-copy", "-n
41
48
  else:
42
49
  import platform
43
50
  if platform.system() == "Windows":
44
- run_shell_script(r"""& "$HOME\.local\bin\uv.exe" tool install --upgrade "machineconfig>=6.86" """)
51
+ run_shell_script(r"""& "$HOME\.local\bin\uv.exe" tool install --upgrade "machineconfig>=6.88" """)
45
52
  else:
46
- run_shell_script("""$HOME/.local/bin/uv tool install --upgrade "machineconfig>=6.86" """)
53
+ run_shell_script("""$HOME/.local/bin/uv tool install --upgrade "machineconfig>=6.88" """)
47
54
  from machineconfig.profile.create_shell_profile import create_default_shell_profile
48
55
  if not no_copy_assets:
49
56
  create_default_shell_profile() # involves copying assets too
@@ -68,7 +75,7 @@ def navigate():
68
75
  path = Path(navigator.__file__).resolve().parent.joinpath("devops_navigator.py")
69
76
  from machineconfig.utils.code import run_shell_script
70
77
  if Path.home().joinpath("code/machineconfig").exists(): executable = f"""--project "{str(Path.home().joinpath("code/machineconfig"))}" --with textual"""
71
- else: executable = """--with "machineconfig>=6.86,textual" """
78
+ else: executable = """--with "machineconfig>=6.88,textual" """
72
79
  run_shell_script(f"""uv run {executable} {path}""")
73
80
 
74
81
 
@@ -1,7 +1,7 @@
1
-
2
1
  from typing import Optional
3
2
  import os
4
3
  from machineconfig.utils.path_extended import PathExtended
4
+ import platform
5
5
 
6
6
 
7
7
  def search_for_files_of_interest(path_obj: PathExtended):
@@ -125,3 +125,42 @@ def wrap_import_in_try_except(import_line: str, pyfile: str, repo_root: Optional
125
125
  sys.path.append(repo_root)
126
126
  exec(f"from {Path(pyfile).stem} import *")
127
127
  print(fr"✅ Successfully imported `{pyfile}`")
128
+
129
+
130
+ def add_to_path(path_variable: str, directory: str) -> str:
131
+ """
132
+ Generate shell script to add directory to path_variable.
133
+ Handles both Windows (cmd) and Unix-like systems (bash/zsh).
134
+ Checks if variable exists before appending, otherwise creates it.
135
+ """
136
+ system = platform.system()
137
+
138
+ if system == "Windows":
139
+ script = f"""# Check if {path_variable} is defined
140
+ if (Test-Path env:{path_variable}) {{
141
+ Write-Host "Adding {directory} to existing {path_variable}"
142
+ $currentValue = [Environment]::GetEnvironmentVariable("{path_variable}", "User")
143
+ $newValue = "$currentValue;{directory}"
144
+ [Environment]::SetEnvironmentVariable("{path_variable}", $newValue, "User")
145
+ $env:{path_variable} = $newValue
146
+ }} else {{
147
+ Write-Host "Creating new {path_variable} variable"
148
+ [Environment]::SetEnvironmentVariable("{path_variable}", "{directory}", "User")
149
+ $env:{path_variable} = "{directory}"
150
+ }}
151
+ Write-Host "{path_variable} is now: $env:{path_variable}\""""
152
+ return script
153
+ else:
154
+ script = f"""#!/bin/bash
155
+ # Check if {path_variable} is defined and not empty
156
+ if [ -z "${{{path_variable}}}" ]; then
157
+ echo "Creating new {path_variable} variable"
158
+ export {path_variable}="{directory}"
159
+ else
160
+ echo "Adding {directory} to existing {path_variable}"
161
+ export {path_variable}="${{{path_variable}}}:{directory}"
162
+ fi
163
+ echo "{path_variable} is now: ${{{path_variable}}}"
164
+ """
165
+ return script
166
+
@@ -80,7 +80,7 @@ git pull originEnc master
80
80
  uv_project_dir = f"""{str(Path.home().joinpath("code/machineconfig"))}"""
81
81
  uv_with = None
82
82
  else:
83
- uv_with = ["machineconfig>=6.86"]
83
+ uv_with = ["machineconfig>=6.88"]
84
84
  uv_project_dir = None
85
85
 
86
86
  import tempfile
@@ -5,7 +5,7 @@
5
5
  # mkdir ~/data/local
6
6
  # sudo mount -o nolock,noatime,nodiratime,proto=tcp,timeo=600,retrans=2,noac alex-p51s-5:/home/alex/data/local ./data/local
7
7
 
8
- uv run --python 3.14 --with "machineconfig>=6.86" python -m machineconfig.scripts.python.mount_nfs
8
+ uv run --python 3.14 --with "machineconfig>=6.88" python -m machineconfig.scripts.python.mount_nfs
9
9
  # Check if remote server is reachable and share folder exists
10
10
  if ! ping -c 1 "$remote_server" &> /dev/null; then
11
11
  echo "💥 Error: Remote server $remote_server is not reachable."