machineconfig 8.45__py3-none-any.whl → 8.50__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of machineconfig might be problematic. Click here for more details.

Files changed (32) hide show
  1. machineconfig/jobs/installer/python_scripts/sysabc.py +1 -1
  2. machineconfig/jobs/scripts/powershell_scripts/mount_ssh.ps1 +1 -1
  3. machineconfig/profile/create_links_export.py +36 -20
  4. machineconfig/profile/mapper_dotfiles.toml +31 -31
  5. machineconfig/scripts/python/devops_navigator.py +1 -1
  6. machineconfig/scripts/python/helper_env/path_manager_tui.py +1 -1
  7. machineconfig/scripts/python/helpers/helper_env/env_manager_tui.py +1 -1
  8. machineconfig/scripts/python/helpers/helper_env/path_manager_tui.py +1 -1
  9. machineconfig/scripts/python/helpers/helpers_croshell/croshell_impl.py +4 -4
  10. machineconfig/scripts/python/helpers/helpers_devops/backup_config.py +3 -3
  11. machineconfig/scripts/python/helpers/helpers_devops/cli_backup_retrieve.py +3 -3
  12. machineconfig/scripts/python/helpers/helpers_devops/cli_config_dotfile.py +13 -13
  13. machineconfig/scripts/python/helpers/helpers_devops/cli_repos.py +27 -18
  14. machineconfig/scripts/python/helpers/helpers_devops/cli_self.py +3 -2
  15. machineconfig/scripts/python/helpers/helpers_repos/cloud_repo_sync.py +2 -1
  16. machineconfig/scripts/python/helpers/helpers_sessions/sessions_impl.py +10 -1
  17. machineconfig/scripts/python/helpers/helpers_utils/python.py +2 -1
  18. machineconfig/scripts/python/sessions.py +2 -2
  19. machineconfig/settings/shells/zsh/init.sh +9 -18
  20. machineconfig/setup_linux/web_shortcuts/interactive.sh +10 -10
  21. machineconfig/setup_windows/web_shortcuts/interactive.ps1 +10 -10
  22. machineconfig/utils/installer_utils/github_release_bulk.py +11 -92
  23. machineconfig/utils/installer_utils/github_release_scraper.py +99 -0
  24. machineconfig/utils/io.py +25 -8
  25. machineconfig/utils/ssh_utils/abc.py +1 -1
  26. machineconfig/utils/ssh_utils/copy_from_here.py +17 -12
  27. machineconfig/utils/ssh_utils/utils.py +21 -5
  28. {machineconfig-8.45.dist-info → machineconfig-8.50.dist-info}/METADATA +1 -1
  29. {machineconfig-8.45.dist-info → machineconfig-8.50.dist-info}/RECORD +32 -31
  30. {machineconfig-8.45.dist-info → machineconfig-8.50.dist-info}/WHEEL +0 -0
  31. {machineconfig-8.45.dist-info → machineconfig-8.50.dist-info}/entry_points.txt +0 -0
  32. {machineconfig-8.45.dist-info → machineconfig-8.50.dist-info}/top_level.txt +0 -0
@@ -1,5 +1,6 @@
1
1
  import typer
2
2
  from typing import Optional, Annotated, Literal, TypedDict, cast
3
+ from machineconfig.utils.ssh_utils.abc import MACHINECONFIG_VERSION
3
4
 
4
5
 
5
6
  def tui_env(which: Annotated[Literal["PATH", "p", "ENV", "e"], typer.Argument(help="Which environment variable to display.")] = "ENV") -> None:
@@ -17,7 +18,7 @@ def tui_env(which: Annotated[Literal["PATH", "p", "ENV", "e"], typer.Argument(he
17
18
  uv_with = ["textual"]
18
19
  uv_project_dir = None
19
20
  if not Path.home().joinpath("code/machineconfig").exists():
20
- uv_with.append("machineconfig>=8.45")
21
+ uv_with.append(MACHINECONFIG_VERSION)
21
22
  else:
22
23
  uv_project_dir = str(Path.home().joinpath("code/machineconfig"))
23
24
  run_shell_script(
@@ -19,8 +19,6 @@ def balance_load(
19
19
  def run(
20
20
  ctx: typer.Context,
21
21
  layout_path: Annotated[Optional[str], typer.Argument(..., help="Path to the layout.json file")] = None,
22
- max_tabs: Annotated[int, typer.Option(..., "--max-tabs", "-mt", help="A Sanity checker that throws an error if any layout exceeds the maximum number of tabs to launch.")] = 10,
23
- max_layouts: Annotated[int, typer.Option(..., "--max-layouts", "-ml", help="A Sanity checker that throws an error if the total number of *parallel layouts exceeds this number.")] = 10,
24
22
  sleep_inbetween: Annotated[float, typer.Option(..., "--sleep-inbetween", "-si", help="Sleep time in seconds between launching layouts")] = 1.0,
25
23
  monitor: Annotated[bool, typer.Option(..., "--monitor", "-m", help="Monitor the layout sessions for completion")] = False,
26
24
  parallel: Annotated[bool, typer.Option(..., "--parallel", "-p", help="Launch multiple layouts in parallel")] = False,
@@ -28,6 +26,8 @@ def run(
28
26
  choose: Annotated[Optional[str], typer.Option(..., "--choose", "-c", help="Comma separated names of layouts to be selected from the layout file passed")] = None,
29
27
  choose_interactively: Annotated[bool, typer.Option(..., "--choose-interactively", "-i", help="Select layouts interactively")] = False,
30
28
  subsitute_home: Annotated[bool, typer.Option(..., "--substitute-home", "-sh", help="Substitute ~ and $HOME in layout file with actual home directory path")] = False,
29
+ max_tabs: Annotated[int, typer.Option(..., "--max-tabs", "-mt", help="A Sanity checker that throws an error if any layout exceeds the maximum number of tabs to launch.")] = 25,
30
+ max_layouts: Annotated[int, typer.Option(..., "--max-layouts", "-ml", help="A Sanity checker that throws an error if the total number of *parallel layouts exceeds this number.")] = 25,
31
31
  ) -> None:
32
32
  """Launch terminal sessions based on a layout configuration file."""
33
33
  if layout_path is None:
@@ -88,32 +88,23 @@ eval "$(starship init zsh)"
88
88
 
89
89
  # LEVE THIS IN THE END TO AVOID EXECUTION FAILURE OF THE REST OF THE SCRIPT
90
90
  if command -v mcfly &> /dev/null; then
91
- eval "$(mcfly init bash)"
91
+ eval "$(mcfly init zsh)"
92
92
  elif command -v atuin &> /dev/null; then
93
- eval "$(atuin init bash)"
93
+ eval "$(atuin init zsh)"
94
94
  else
95
- # eval "$(tv init bash)"
96
95
  tv_shell_history() {
97
- # _disable_bracketed_paste
98
- local current_prompt="${READLINE_LINE:0:$READLINE_POINT}"
96
+ local current_prompt="$LBUFFER"
99
97
  local output
100
- # move to the next line so that the prompt is not overwritten
101
98
  printf "\n"
102
- # Get history using tv with the same arguments as zsh version
103
- output=$(tv bash-history --input "$current_prompt" --inline)
104
-
99
+ output=$(tv zsh-history --input "$current_prompt" --inline)
105
100
  if [[ -n "$output" ]]; then
106
- # Clear the right side of cursor and set new line
107
- READLINE_LINE="$output"
108
- READLINE_POINT=${#READLINE_LINE}
109
- # Uncomment this to automatically accept the line
110
- # (i.e. run the command without having to press enter twice)
111
- # accept-line() { echo; }; accept-line
101
+ BUFFER="$output"
102
+ CURSOR=${#BUFFER}
112
103
  fi
113
- # move the cursor back to the previous line
114
104
  printf "\033[A"
115
- # _enable_bracketed_paste
105
+ zle reset-prompt
116
106
  }
117
- bind -x '"\C-r": tv_shell_history'
107
+ zle -N tv_shell_history
108
+ bindkey '^R' tv_shell_history
118
109
  fi
119
110
 
@@ -2,16 +2,16 @@
2
2
  . <( curl -sSL "https://raw.githubusercontent.com/thisismygitrepo/machineconfig/main/src/machineconfig/setup_linux/uv.sh")
3
3
  . <( curl -sSL "https://raw.githubusercontent.com/thisismygitrepo/machineconfig/main/src/machineconfig/scripts/linux/wrap_mcfg")
4
4
 
5
- alias devops='$HOME/.local/bin/uvx --python 3.14 --from "machineconfig>=8.45" devops'
6
- alias cloud='$HOME/.local/bin/uvx --python 3.14 --from "machineconfig>=8.45" cloud'
7
- alias agents='$HOME/.local/bin/uvx --python 3.14 --from "machineconfig>=8.45" agents'
8
- alias sessions='$HOME/.local/bin/uvx --python 3.14 --from "machineconfig>=8.45" sessions'
9
- alias ftpx='$HOME/.local/bin/uvx --python 3.14 --from "machineconfig>=8.45" ftpx'
10
- alias fire='$HOME/.local/bin/uvx --python 3.14 --from "machineconfig>=8.45" fire'
11
- alias croshell='$HOME/.local/bin/uvx --python 3.14 --from "machineconfig>=8.45" croshell'
12
- alias utils='$HOME/.local/bin/uvx --python 3.14 --from "machineconfig>=8.45" utils'
13
- alias terminal='$HOME/.local/bin/uvx --python 3.14 --from "machineconfig>=8.45" terminal'
14
- alias msearch='$HOME/.local/bin/uvx --python 3.14 --from "machineconfig>=8.45" msearch'
5
+ alias devops='$HOME/.local/bin/uvx --python 3.14 --from "machineconfig>=8.50" devops'
6
+ alias cloud='$HOME/.local/bin/uvx --python 3.14 --from "machineconfig>=8.50" cloud'
7
+ alias agents='$HOME/.local/bin/uvx --python 3.14 --from "machineconfig>=8.50" agents'
8
+ alias sessions='$HOME/.local/bin/uvx --python 3.14 --from "machineconfig>=8.50" sessions'
9
+ alias ftpx='$HOME/.local/bin/uvx --python 3.14 --from "machineconfig>=8.50" ftpx'
10
+ alias fire='$HOME/.local/bin/uvx --python 3.14 --from "machineconfig>=8.50" fire'
11
+ alias croshell='$HOME/.local/bin/uvx --python 3.14 --from "machineconfig>=8.50" croshell'
12
+ alias utils='$HOME/.local/bin/uvx --python 3.14 --from "machineconfig>=8.50" utils'
13
+ alias terminal='$HOME/.local/bin/uvx --python 3.14 --from "machineconfig>=8.50" terminal'
14
+ alias msearch='$HOME/.local/bin/uvx --python 3.14 --from "machineconfig>=8.50" msearch'
15
15
 
16
16
  alias d='wrap_in_shell_script devops'
17
17
  alias c='wrap_in_shell_script cloud'
@@ -4,16 +4,16 @@
4
4
  irm "https://raw.githubusercontent.com/thisismygitrepo/machineconfig/main/src/machineconfig/setup_windows/uv.ps1" | iex
5
5
  irm "https://raw.githubusercontent.com/thisismygitrepo/machineconfig/main/src/machineconfig/scripts/windows/wrap_mcfg.ps1" | iex
6
6
 
7
- function devops { & "$HOME\.local\bin\uvx.exe" --python 3.14 --from "machineconfig>=8.45" devops $args }
8
- function cloud { & "$HOME\.local\bin\uvx.exe" --python 3.14 --from "machineconfig>=8.45" cloud $args }
9
- function agents { & "$HOME\.local\bin\uvx.exe" --python 3.14 --from "machineconfig>=8.45" agents $args }
10
- function sessions { & "$HOME\.local\bin\uvx.exe" --python 3.14 --from "machineconfig>=8.45" sessions $args }
11
- function ftpx { & "$HOME\.local\bin\uvx.exe" --python 3.14 --from "machineconfig>=8.45" ftpx $args }
12
- function fire { & "$HOME\.local\bin\uvx.exe" --python 3.14 --from "machineconfig>=8.45" fire $args }
13
- function croshell { & "$HOME\.local\bin\uvx.exe" --python 3.14 --from "machineconfig>=8.45" croshell $args }
14
- function utils { & "$HOME\.local\bin\uvx.exe" --python 3.14 --from "machineconfig>=8.45" utils $args }
15
- function terminal { & "$HOME\.local\bin\uvx.exe" --python 3.14 --from "machineconfig>=8.45" terminal $args }
16
- function msearch { & "$HOME\.local\bin\uvx.exe" --python 3.14 --from "machineconfig>=8.45" msearch $args }
7
+ function devops { & "$HOME\.local\bin\uvx.exe" --python 3.14 --from "machineconfig>=8.50" devops $args }
8
+ function cloud { & "$HOME\.local\bin\uvx.exe" --python 3.14 --from "machineconfig>=8.50" cloud $args }
9
+ function agents { & "$HOME\.local\bin\uvx.exe" --python 3.14 --from "machineconfig>=8.50" agents $args }
10
+ function sessions { & "$HOME\.local\bin\uvx.exe" --python 3.14 --from "machineconfig>=8.50" sessions $args }
11
+ function ftpx { & "$HOME\.local\bin\uvx.exe" --python 3.14 --from "machineconfig>=8.50" ftpx $args }
12
+ function fire { & "$HOME\.local\bin\uvx.exe" --python 3.14 --from "machineconfig>=8.50" fire $args }
13
+ function croshell { & "$HOME\.local\bin\uvx.exe" --python 3.14 --from "machineconfig>=8.50" croshell $args }
14
+ function utils { & "$HOME\.local\bin\uvx.exe" --python 3.14 --from "machineconfig>=8.50" utils $args }
15
+ function terminal { & "$HOME\.local\bin\uvx.exe" --python 3.14 --from "machineconfig>=8.50" terminal $args }
16
+ function msearch { & "$HOME\.local\bin\uvx.exe" --python 3.14 --from "machineconfig>=8.50" msearch $args }
17
17
 
18
18
  function d { wrap_in_shell_script devops @args }
19
19
  function c { wrap_in_shell_script cloud @args }
@@ -10,6 +10,8 @@ from pathlib import Path
10
10
  from typing import Any, Dict, Optional, Set, TypedDict
11
11
  from urllib.parse import urlparse
12
12
 
13
+ from machineconfig.utils.installer_utils.github_release_scraper import scrape_github_release_page
14
+
13
15
 
14
16
  class AssetInfo(TypedDict):
15
17
  """Type definition for GitHub release asset information."""
@@ -74,7 +76,7 @@ def fetch_github_release_data(
74
76
  repo_name: str,
75
77
  version: Optional[str] = None,
76
78
  ) -> Optional[Dict[str, Any]]:
77
- """Fetch GitHub release data for the latest or a specific tag."""
79
+ """Fetch GitHub release data for the latest or a specific tag. Falls back to HTML scraping if API fails."""
78
80
 
79
81
  try:
80
82
  requested_version = (version or "").strip()
@@ -85,25 +87,24 @@ def fetch_github_release_data(
85
87
 
86
88
  response = requests.get(url, timeout=30)
87
89
  if response.status_code != 200:
88
- print(f" Failed to fetch data for {username}/{repo_name}: HTTP {response.status_code}")
89
- print(f" URL: {url}")
90
- return None
90
+ print(f"⚠️ API failed for {username}/{repo_name}: HTTP {response.status_code}, trying HTML scraper...")
91
+ return scrape_github_release_page(username, repo_name, version)
91
92
 
92
93
  response_data = response.json()
93
94
  message = response_data.get("message")
94
95
  if isinstance(message, str):
95
96
  if "API rate limit exceeded" in message:
96
- print(f"🚫 Rate limit exceeded for {username}/{repo_name}")
97
- return None
97
+ print(f"🚫 Rate limit exceeded for {username}/{repo_name}, trying HTML scraper...")
98
+ return scrape_github_release_page(username, repo_name, version)
98
99
  if "Not Found" in message:
99
- print(f"🔍 No releases found for {username}/{repo_name}")
100
- return None
100
+ print(f"🔍 No releases found via API for {username}/{repo_name}, trying HTML scraper...")
101
+ return scrape_github_release_page(username, repo_name, version)
101
102
 
102
103
  return response_data
103
104
 
104
105
  except (requests.RequestException, requests.Timeout, json.JSONDecodeError) as error:
105
- print(f" Error fetching {username}/{repo_name}: {error}")
106
- return None
106
+ print(f"⚠️ API error for {username}/{repo_name}: {error}, trying HTML scraper...")
107
+ return scrape_github_release_page(username, repo_name, version)
107
108
 
108
109
 
109
110
  def get_release_info(
@@ -142,85 +143,3 @@ def extract_release_info(release_data: Dict[str, Any]) -> Optional[ReleaseInfo]:
142
143
  "assets_count": len(assets)
143
144
  }
144
145
 
145
-
146
- # def main() -> None:
147
- # """Main function to process installer JSON files and fetch GitHub release data."""
148
- # # Define paths
149
- # current_dir = Path(__file__).parent
150
- # installer_dir = current_dir.parent.parent / "jobs" / "installer"
151
-
152
- # standard_json = installer_dir / "installer_data.json"
153
- # output_json = current_dir / "github_releases.json"
154
-
155
- # print("🔍 Starting GitHub release data extraction...")
156
- # print(f"📁 Processing files from: {installer_dir}")
157
-
158
- # # Extract GitHub repositories from both files
159
- # all_github_repos: Set[str] = set()
160
-
161
- # if standard_json.exists():
162
- # print(f"📄 Reading {standard_json.name}...")
163
- # repos = extract_github_repos_from_json(standard_json)
164
- # all_github_repos.update(repos)
165
- # print(f" Found {len(repos)} GitHub repos")
166
- # else:
167
- # print(f"⚠️ File not found: {standard_json}")
168
- # print(f"🎯 Total unique GitHub repositories found: {len(all_github_repos)}")
169
-
170
- # if not all_github_repos:
171
- # print("❌ No GitHub repositories found. Exiting.")
172
- # return
173
-
174
- # # Fetch release data with rate limiting
175
- # release_mapping: Dict[str, Optional[ReleaseInfo]] = {}
176
- # total_repos = len(all_github_repos)
177
-
178
- # print(f"\n🚀 Fetching release data for {total_repos} repositories...")
179
- # print("⏰ Rate limiting: 5 seconds between requests")
180
- # print("-" * 60)
181
-
182
- # for i, repo_url in enumerate(sorted(all_github_repos), 1):
183
- # repo_info = get_repo_name_from_url(repo_url)
184
-
185
- # if not repo_info:
186
- # print(f"⚠️ [{i:3d}/{total_repos}] Invalid repo URL: {repo_url}")
187
- # continue
188
-
189
- # username, repo_name = repo_info
190
- # repo_full_name = f"{username}/{repo_name}"
191
-
192
- # print(f"📡 [{i:3d}/{total_repos}] Fetching: {repo_full_name}", end=" ... ")
193
-
194
- # release_info = get_release_info(username, repo_name)
195
-
196
- # if release_info:
197
- # release_mapping[repo_url] = release_info
198
- # assets_count = release_info["assets_count"]
199
- # tag = release_info["tag_name"]
200
- # print(f"✅ {tag} ({assets_count} assets)")
201
- # else:
202
- # release_mapping[repo_url] = None
203
- # print("❌ No data")
204
-
205
- # # Rate limiting - wait 5 seconds between requests (except for the last one)
206
- # if i < total_repos:
207
- # time.sleep(5)
208
-
209
- # # Save results
210
- # output_data: OutputData = {
211
- # "generated_at": time.strftime("%Y-%m-%d %H:%M:%S UTC", time.gmtime()),
212
- # "total_repositories": len(all_github_repos),
213
- # "successful_fetches": len([v for v in release_mapping.values() if v]),
214
- # "releases": release_mapping
215
- # }
216
-
217
- # with open(output_json, 'w', encoding='utf-8') as f:
218
- # json.dump(output_data, f, indent=2, ensure_ascii=False)
219
-
220
- # successful = len([v for v in release_mapping.values() if v])
221
- # print("\n📊 Summary:")
222
- # print(f" Total repositories processed: {len(all_github_repos)}")
223
- # print(f" Successful fetches: {successful}")
224
- # print(f" Failed fetches: {len(all_github_repos) - successful}")
225
- # print(f" Output saved to: {output_json}")
226
- # print("✅ Done!")
@@ -0,0 +1,99 @@
1
+ #!/usr/bin/env python3
2
+ """HTML scraper for GitHub release pages as fallback when API rate limit is exceeded."""
3
+
4
+ import re
5
+ from typing import Any, Optional
6
+ import requests
7
+
8
+
9
+ def extract_tag_from_html(html: str, owner: str, repo: str) -> str:
10
+ patterns = [
11
+ rf'/{re.escape(owner)}/{re.escape(repo)}/releases/tag/([^"\'<>\s]+)',
12
+ rf'/{re.escape(owner)}/{re.escape(repo)}/tree/([^"\'<>\s]+)',
13
+ r'<span[^>]*class="[^"]*ml-1[^"]*"[^>]*>([^<]+)</span>',
14
+ ]
15
+ for pattern in patterns:
16
+ match = re.search(pattern, html, re.IGNORECASE)
17
+ if match:
18
+ tag = match.group(1).strip()
19
+ if tag and not tag.startswith("http"):
20
+ return tag
21
+ return ""
22
+
23
+
24
+ def extract_release_name(html: str) -> str:
25
+ patterns = [
26
+ r'<h1[^>]*class="[^"]*d-inline[^"]*"[^>]*>([^<]+)</h1>',
27
+ r'<bdi[^>]*class="[^"]*mr-2[^"]*"[^>]*>([^<]+)</bdi>',
28
+ r'<h1[^>]*>([^<]+)</h1>',
29
+ ]
30
+ for pattern in patterns:
31
+ match = re.search(pattern, html)
32
+ if match:
33
+ name = match.group(1).strip()
34
+ if name:
35
+ return name
36
+ return ""
37
+
38
+
39
+ def extract_published_at(html: str) -> str:
40
+ pattern = r'<relative-time[^>]*datetime="([^"]+)"'
41
+ match = re.search(pattern, html)
42
+ if match:
43
+ return match.group(1)
44
+ return ""
45
+
46
+
47
+ def fetch_expanded_assets(username: str, repo_name: str, tag_name: str, headers: dict[str, str]) -> list[dict[str, Any]]:
48
+ """Fetch assets from the expanded_assets endpoint which contains all downloadable files."""
49
+ assets: list[dict[str, Any]] = []
50
+ expanded_url = f"https://github.com/{username}/{repo_name}/releases/expanded_assets/{tag_name}"
51
+ try:
52
+ response = requests.get(expanded_url, timeout=30, headers=headers)
53
+ if response.status_code != 200:
54
+ print(f"⚠️ [Scraper] Could not fetch expanded assets for {username}/{repo_name}: HTTP {response.status_code}")
55
+ return assets
56
+ html = response.text
57
+ pattern = r'href="([^"]*?/releases/download/[^"]+)"[^>]*>.*?<span[^>]*class="[^"]*Truncate-text[^"]*text-bold[^"]*"[^>]*>([^<]+)</span>'
58
+ seen_urls: set[str] = set()
59
+ matches = re.findall(pattern, html, re.DOTALL)
60
+ for href, name in matches:
61
+ asset_name = name.strip()
62
+ if not asset_name or asset_name.isspace():
63
+ continue
64
+ download_url = f"https://github.com{href}" if href.startswith("/") else href
65
+ if download_url in seen_urls:
66
+ continue
67
+ seen_urls.add(download_url)
68
+ assets.append({"name": asset_name, "size": 0, "download_count": 0, "content_type": "", "created_at": "", "updated_at": "", "browser_download_url": download_url})
69
+ except requests.RequestException as error:
70
+ print(f"⚠️ [Scraper] Error fetching expanded assets for {username}/{repo_name}: {error}")
71
+ return assets
72
+
73
+
74
+ def scrape_github_release_page(username: str, repo_name: str, version: Optional[str] = None) -> Optional[dict[str, Any]]:
75
+ """Scrape GitHub release page HTML to extract release information. Falls back to this when API rate limit is hit."""
76
+ try:
77
+ requested_version = (version or "").strip()
78
+ if requested_version and requested_version.lower() != "latest":
79
+ url = f"https://github.com/{username}/{repo_name}/releases/tag/{requested_version}"
80
+ else:
81
+ url = f"https://github.com/{username}/{repo_name}/releases/latest"
82
+ headers = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"}
83
+ response = requests.get(url, timeout=30, headers=headers, allow_redirects=True)
84
+ if response.status_code != 200:
85
+ print(f"❌ [Scraper] Failed to fetch page for {username}/{repo_name}: HTTP {response.status_code}")
86
+ return None
87
+ html = response.text
88
+ tag_name = extract_tag_from_html(html, username, repo_name)
89
+ if not tag_name:
90
+ print(f"🔍 [Scraper] No tag found for {username}/{repo_name}")
91
+ return None
92
+ release_name = extract_release_name(html) or tag_name
93
+ published_at = extract_published_at(html)
94
+ assets = fetch_expanded_assets(username, repo_name, tag_name, headers)
95
+ print(f"✅ [Scraper] Found {len(assets)} assets for {username}/{repo_name} @ {tag_name}")
96
+ return {"tag_name": tag_name, "name": release_name, "published_at": published_at, "assets": assets}
97
+ except requests.RequestException as error:
98
+ print(f"❌ [Scraper] Error fetching {username}/{repo_name}: {error}")
99
+ return None
machineconfig/utils/io.py CHANGED
@@ -55,19 +55,36 @@ def read_ini(path: "Path", encoding: Optional[str] = None):
55
55
  return res
56
56
 
57
57
 
58
+ def remove_c_style_comments(text: str) -> str:
59
+ import re
60
+ # Step 1: Escape URLs (https:// or any URLs you want to protect)
61
+ url_pattern = r'https?://[^\s]*'
62
+ urls = re.findall(url_pattern, text)
63
+ url_map = {url: f"__URL{index}__" for index, url in enumerate(urls)}
64
+
65
+ # Temporarily replace URLs with placeholders
66
+ for url, placeholder in url_map.items():
67
+ text = text.replace(url, placeholder)
68
+
69
+ # Step 2: Remove C-style comments
70
+ # Remove all // single-line comments
71
+ text = re.sub(r'//.*', '', text)
72
+ # Remove all /* … */ block comments (non-greedy)
73
+ text = re.sub(r'/\*.*?\*/', '', text, flags=re.DOTALL)
74
+
75
+ # Step 3: Restore URLs
76
+ for url, placeholder in url_map.items():
77
+ text = text.replace(placeholder, url)
78
+
79
+ return text
80
+
81
+
58
82
  def read_json(path: "Path", r: bool = False, **kwargs: Any) -> Any: # return could be list or dict etc
59
83
  import json
60
84
  try:
61
85
  mydict = json.loads(Path(path).read_text(encoding="utf-8"), **kwargs)
62
86
  except Exception:
63
- import re
64
- def remove_comments(text: str) -> str:
65
- # remove all // single-line comments
66
- text = re.sub(r'//.*', '', text)
67
- # remove all /* … */ block comments (non-greedy)
68
- text = re.sub(r'/\*.*?\*/', '', text, flags=re.DOTALL)
69
- return text
70
- mydict = json.loads(remove_comments(Path(path).read_text(encoding="utf-8")), **kwargs)
87
+ mydict = json.loads(remove_c_style_comments(Path(path).read_text(encoding="utf-8")), **kwargs)
71
88
  _ = r
72
89
  return mydict
73
90
 
@@ -1,5 +1,5 @@
1
1
 
2
2
 
3
- MACHINECONFIG_VERSION = "machineconfig>=8.45"
3
+ MACHINECONFIG_VERSION = "machineconfig>=8.50"
4
4
  DEFAULT_PICKLE_SUBDIR = "tmp_results/tmp_scripts/ssh"
5
5
 
@@ -1,12 +1,21 @@
1
1
 
2
2
 
3
- from typing import Optional
4
- from pathlib import Path
3
+ from typing import Optional, TYPE_CHECKING
4
+ from pathlib import Path, PurePosixPath, PureWindowsPath
5
5
  from machineconfig.utils.accessories import randstr
6
6
  from machineconfig.utils.meta import lambda_to_python_script
7
7
  from machineconfig.utils.ssh_utils.abc import DEFAULT_PICKLE_SUBDIR
8
8
  from machineconfig.utils.code import get_uv_command
9
9
 
10
+ if TYPE_CHECKING:
11
+ from machineconfig.utils.ssh import SSH
12
+
13
+
14
+ def _build_remote_path(self: "SSH", home_dir: str, rel_path: str) -> str:
15
+ if self.remote_specs["system"] == "Windows":
16
+ return str(PureWindowsPath(home_dir) / rel_path)
17
+ return str(PurePosixPath(home_dir) / PurePosixPath(rel_path.replace("\\", "/")))
18
+
10
19
 
11
20
  def copy_from_here(
12
21
  self: "SSH", source_path: str, target_rel2home: Optional[str], compress_with_zip: bool, recursive: bool, overwrite_existing: bool
@@ -54,15 +63,14 @@ def copy_from_here(
54
63
  target_rel2home = target_rel2home + ".zip"
55
64
  if Path(target_rel2home).parent.as_posix() not in {"", "."}:
56
65
  self.create_parent_dir_and_check_if_exists(path_rel2home=target_rel2home, overwrite_existing=overwrite_existing)
57
- print(f"""📤 [SFTP UPLOAD] Sending file: {repr(source_obj)} ==> Remote Path: {target_rel2home}""")
66
+ remote_target_full = _build_remote_path(self, self.remote_specs["home_dir"], target_rel2home)
67
+ print(f"""📤 [SFTP UPLOAD] Sending file: {repr(source_obj)} ==> Remote Path: {remote_target_full}""")
58
68
  try:
59
69
  with self.tqdm_wrap(ascii=True, unit="b", unit_scale=True) as pbar:
60
70
  if self.sftp is None: # type: ignore[unreachable]
61
71
  raise RuntimeError(f"SFTP connection lost for {self.hostname}")
62
- print(f"Uploading {source_obj} to\n{Path(self.remote_specs['home_dir']).joinpath(target_rel2home)}")
63
- self.sftp.put(
64
- localpath=str(source_obj), remotepath=str(Path(self.remote_specs["home_dir"]).joinpath(target_rel2home)), callback=pbar.view_bar
65
- )
72
+ print(f"Uploading {source_obj} to\n{remote_target_full}")
73
+ self.sftp.put(localpath=str(source_obj), remotepath=remote_target_full, callback=pbar.view_bar)
66
74
  except Exception:
67
75
  if compress_with_zip and source_obj.exists() and str(source_obj).endswith("_archive.zip"):
68
76
  source_obj.unlink()
@@ -83,12 +91,9 @@ def copy_from_here(
83
91
  archive_handle.extractall(extraction_directory)
84
92
  archive_path.unlink()
85
93
 
94
+ remote_zip_path = _build_remote_path(self, self.remote_specs["home_dir"], target_rel2home)
86
95
  command = lambda_to_python_script(
87
- lambda: unzip_archive(
88
- zip_file_path=str(Path(self.remote_specs["home_dir"]).joinpath(target_rel2home)), overwrite_flag=overwrite_existing
89
- ),
90
- in_global=True,
91
- import_module=False,
96
+ lambda: unzip_archive(zip_file_path=remote_zip_path, overwrite_flag=overwrite_existing), in_global=True, import_module=False
92
97
  )
93
98
  tmp_py_file = Path.home().joinpath(f"{DEFAULT_PICKLE_SUBDIR}/create_target_dir_{randstr()}.py")
94
99
  tmp_py_file.parent.mkdir(parents=True, exist_ok=True)
@@ -1,15 +1,30 @@
1
1
 
2
- from pathlib import Path
2
+ from pathlib import Path, PurePosixPath, PureWindowsPath
3
3
  from machineconfig.utils.accessories import randstr
4
4
  from machineconfig.utils.meta import lambda_to_python_script
5
5
  from machineconfig.utils.ssh_utils.abc import MACHINECONFIG_VERSION, DEFAULT_PICKLE_SUBDIR
6
6
  from machineconfig.utils.code import get_uv_command
7
- from typing import Union
7
+ from typing import Union, TYPE_CHECKING
8
+
9
+ if TYPE_CHECKING:
10
+ from machineconfig.utils.ssh import SSH
11
+
12
+
13
+ def _build_remote_path(self: "SSH", home_dir: str, rel_path: str) -> str:
14
+ if self.remote_specs["system"] == "Windows":
15
+ return str(PureWindowsPath(home_dir) / rel_path)
16
+ return str(PurePosixPath(home_dir) / PurePosixPath(rel_path.replace("\\", "/")))
17
+
18
+
19
+ def _normalize_rel_path_for_remote(self: "SSH", rel_path: str) -> str:
20
+ if self.remote_specs["system"] == "Windows":
21
+ return str(PureWindowsPath(rel_path))
22
+ return rel_path.replace("\\", "/")
8
23
 
9
24
 
10
25
  def create_dir_and_check_if_exists(self: "SSH", path_rel2home: str, overwrite_existing: bool) -> None:
11
26
  """Helper to create a directory on remote machine and return its path."""
12
-
27
+ path_rel2home_normalized = _normalize_rel_path_for_remote(self, path_rel2home)
13
28
  def create_target_dir(target_rel2home: str, overwrite: bool):
14
29
  from pathlib import Path
15
30
  import shutil
@@ -26,7 +41,7 @@ def create_dir_and_check_if_exists(self: "SSH", path_rel2home: str, overwrite_ex
26
41
  print(f"Creating directory for path: {target_path_abs}")
27
42
  target_path_abs.parent.mkdir(parents=True, exist_ok=True)
28
43
  command = lambda_to_python_script(
29
- lambda: create_target_dir(target_rel2home=path_rel2home, overwrite=overwrite_existing),
44
+ lambda: create_target_dir(target_rel2home=path_rel2home_normalized, overwrite=overwrite_existing),
30
45
  in_global=True, import_module=False
31
46
  )
32
47
  tmp_py_file = Path.home().joinpath(f"{DEFAULT_PICKLE_SUBDIR}/create_target_dir_{randstr()}.py")
@@ -34,7 +49,8 @@ def create_dir_and_check_if_exists(self: "SSH", path_rel2home: str, overwrite_ex
34
49
  tmp_py_file.write_text(command, encoding="utf-8")
35
50
  assert self.sftp is not None
36
51
  tmp_remote_path = ".tmp_pyfile.py"
37
- self.sftp.put(localpath=str(tmp_py_file), remotepath=str(Path(self.remote_specs["home_dir"]).joinpath(tmp_remote_path)))
52
+ remote_tmp_full = _build_remote_path(self, self.remote_specs["home_dir"], tmp_remote_path)
53
+ self.sftp.put(localpath=str(tmp_py_file), remotepath=remote_tmp_full)
38
54
  resp = self.run_shell_cmd_on_remote(
39
55
  command=f"""{get_uv_command(platform=self.remote_specs['system'])} run python {tmp_remote_path}""",
40
56
  verbose_output=False,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: machineconfig
3
- Version: 8.45
3
+ Version: 8.50
4
4
  Summary: Dotfiles management package
5
5
  Author-email: Alex Al-Saffar <programmer@usa.com>
6
6
  License: Apache 2.0