machineconfig 5.11__py3-none-any.whl → 5.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of machineconfig might be problematic. Click here for more details.

@@ -1,19 +1,6 @@
1
- # """CI
2
- # """
3
1
 
4
-
5
- # import time
6
2
  import platform
7
-
8
- # from typing import Any
9
- # from rich.console import Console
10
- # from machineconfig.utils.utils2 import pprint
11
- # # from rich.progress import track
12
3
  from machineconfig.utils.source_of_truth import LIBRARY_ROOT
13
- # from machineconfig.utils.installer import get_installed_cli_apps
14
- # from typing import Optional
15
- # from datetime import datetime
16
- # import csv
17
4
 
18
5
 
19
6
  APP_SUMMARY_PATH = LIBRARY_ROOT.joinpath(f"profile/records/{platform.system().lower()}/apps_summary_report.csv")
@@ -8,6 +8,7 @@ from rich.console import Console
8
8
  from rich.panel import Panel
9
9
  from rich.pretty import Pretty
10
10
  from rich.text import Text
11
+ from rich.table import Table
11
12
 
12
13
  from machineconfig.utils.path_extended import PathExtended
13
14
  from machineconfig.utils.links import symlink_func, symlink_copy
@@ -20,7 +21,7 @@ import os
20
21
  import ctypes
21
22
  import subprocess
22
23
  import tomllib
23
- from typing import Optional, Any, TypedDict
24
+ from typing import Optional, Any, TypedDict, Literal
24
25
 
25
26
  system = platform.system() # Linux or Windows
26
27
  ERROR_LIST: list[Any] = [] # append to this after every exception captured.
@@ -43,10 +44,35 @@ class SymlinkMapper(TypedDict):
43
44
  contents: Optional[bool]
44
45
 
45
46
 
46
- def apply_mapper(choice: Optional[str] = None):
47
+ class OperationRecord(TypedDict):
48
+ program: str
49
+ file_key: str
50
+ source: str
51
+ target: str
52
+ operation: str
53
+ action: Literal[
54
+ "already_linked",
55
+ "relinking",
56
+ "fixing_broken_link",
57
+ "identical_files",
58
+ "backing_up_source",
59
+ "backing_up_target",
60
+ "relinking_to_new_target",
61
+ "moving_to_target",
62
+ "new_link",
63
+ "new_link_and_target",
64
+ "linking",
65
+ "copying",
66
+ "error"
67
+ ]
68
+ details: str
69
+ status: str
70
+
71
+
72
+ def apply_mapper(choice: Optional[str], prioritize_to_this: bool):
47
73
  symlink_mapper: dict[str, dict[str, SymlinkMapper]] = tomllib.loads(LIBRARY_ROOT.joinpath("profile/mapper.toml").read_text(encoding="utf-8"))
48
- prioritize_to_this = True
49
74
  exclude: list[str] = [] # "wsl_linux", "wsl_windows"
75
+ operation_records: list[OperationRecord] = []
50
76
 
51
77
  program_keys_raw: list[str] = list(symlink_mapper.keys())
52
78
  program_keys: list[str] = []
@@ -64,10 +90,6 @@ def apply_mapper(choice: Optional[str] = None):
64
90
  return # terminate function.
65
91
  elif len(choice_selected) == 1 and choice_selected[0] == "all":
66
92
  choice_selected = "all" # i.e. program_keys = program_keys
67
- # overwrite = choose_from_options(msg="Overwrite existing source file?", options=["yes", "no"], default="yes") == "yes"
68
- from rich.prompt import Confirm
69
-
70
- prioritize_to_this = Confirm.ask("Overwrite existing source file?", default=True)
71
93
  else:
72
94
  choice_selected = choice
73
95
 
@@ -114,22 +136,85 @@ def apply_mapper(choice: Optional[str] = None):
114
136
  for file_key, file_map in symlink_mapper[program_key].items():
115
137
  this = PathExtended(file_map["this"])
116
138
  to_this = PathExtended(file_map["to_this"].replace("REPO_ROOT", REPO_ROOT.as_posix()).replace("LIBRARY_ROOT", LIBRARY_ROOT.as_posix()))
139
+
117
140
  if "contents" in file_map:
118
141
  try:
119
- for a_target in to_this.expanduser().search("*"):
120
- symlink_func(this=this.joinpath(a_target.name), to_this=a_target, prioritize_to_this=prioritize_to_this)
142
+ targets = list(to_this.expanduser().search("*"))
143
+ for a_target in targets:
144
+ result = symlink_func(this=this.joinpath(a_target.name), to_this=a_target, prioritize_to_this=prioritize_to_this)
145
+ operation_records.append({
146
+ "program": program_key,
147
+ "file_key": file_key,
148
+ "source": str(this.joinpath(a_target.name)),
149
+ "target": str(a_target),
150
+ "operation": "contents_symlink",
151
+ "action": result["action"],
152
+ "details": result["details"],
153
+ "status": "success"
154
+ })
121
155
  except Exception as ex:
122
156
  console.print(f"❌ [red]Config error[/red]: {program_key} | {file_key} | missing keys 'this ==> to_this'. {ex}")
123
- if "copy" in file_map:
157
+ operation_records.append({
158
+ "program": program_key,
159
+ "file_key": file_key,
160
+ "source": str(this),
161
+ "target": str(to_this),
162
+ "operation": "contents_symlink",
163
+ "action": "error",
164
+ "details": f"Failed to process contents: {str(ex)}",
165
+ "status": f"error: {str(ex)}"
166
+ })
167
+
168
+ elif "copy" in file_map:
124
169
  try:
125
- symlink_copy(this=this, to_this=to_this, prioritize_to_this=prioritize_to_this)
170
+ result = symlink_copy(this=this, to_this=to_this, prioritize_to_this=prioritize_to_this)
171
+ operation_records.append({
172
+ "program": program_key,
173
+ "file_key": file_key,
174
+ "source": str(this),
175
+ "target": str(to_this),
176
+ "operation": "copy",
177
+ "action": result["action"],
178
+ "details": result["details"],
179
+ "status": "success"
180
+ })
126
181
  except Exception as ex:
127
182
  console.print(f"❌ [red]Config error[/red]: {program_key} | {file_key} | {ex}")
183
+ operation_records.append({
184
+ "program": program_key,
185
+ "file_key": file_key,
186
+ "source": str(this),
187
+ "target": str(to_this),
188
+ "operation": "copy",
189
+ "action": "error",
190
+ "details": f"Failed to copy: {str(ex)}",
191
+ "status": f"error: {str(ex)}"
192
+ })
128
193
  else:
129
194
  try:
130
- symlink_func(this=this, to_this=to_this, prioritize_to_this=prioritize_to_this)
195
+ result = symlink_func(this=this, to_this=to_this, prioritize_to_this=prioritize_to_this)
196
+ operation_records.append({
197
+ "program": program_key,
198
+ "file_key": file_key,
199
+ "source": str(this),
200
+ "target": str(to_this),
201
+ "operation": "symlink",
202
+ "action": result["action"],
203
+ "details": result["details"],
204
+ "status": "success"
205
+ })
131
206
  except Exception as ex:
132
207
  console.print(f"❌ [red]Config error[/red]: {program_key} | {file_key} | missing keys 'this ==> to_this'. {ex}")
208
+ operation_records.append({
209
+ "program": program_key,
210
+ "file_key": file_key,
211
+ "source": str(this),
212
+ "target": str(to_this),
213
+ "operation": "symlink",
214
+ "action": "error",
215
+ "details": f"Failed to create symlink: {str(ex)}",
216
+ "status": f"error: {str(ex)}"
217
+ })
133
218
 
134
219
  if program_key == "ssh" and system == "Linux": # permissions of ~/dotfiles/.ssh should be adjusted
135
220
  try:
@@ -147,6 +232,35 @@ def apply_mapper(choice: Optional[str] = None):
147
232
  subprocess.run(f"chmod +x {LIBRARY_ROOT.joinpath(f'scripts/{system.lower()}')} -R", shell=True, capture_output=True, text=True)
148
233
  console.print("[green]✅ Script permissions updated[/green]")
149
234
 
235
+ # Display operation summary table
236
+ if operation_records:
237
+ table = Table(title="🔗 Symlink Operations Summary", show_header=True, header_style="bold magenta")
238
+ table.add_column("Program", style="cyan", no_wrap=True)
239
+ table.add_column("File Key", style="blue", no_wrap=True)
240
+ table.add_column("Source", style="green")
241
+ table.add_column("Target", style="yellow")
242
+ table.add_column("Operation", style="magenta", no_wrap=True)
243
+ table.add_column("Action", style="red", no_wrap=True)
244
+ table.add_column("Details", style="white")
245
+ table.add_column("Status", style="red", no_wrap=True)
246
+
247
+ for record in operation_records:
248
+ status_style = "green" if record["status"] == "success" else "red"
249
+ action_style = "green" if record["action"] != "error" else "red"
250
+ table.add_row(
251
+ record["program"],
252
+ record["file_key"],
253
+ record["source"],
254
+ record["target"],
255
+ record["operation"],
256
+ f"[{action_style}]{record['action']}[/{action_style}]",
257
+ record["details"],
258
+ f"[{status_style}]{record['status']}[/{status_style}]"
259
+ )
260
+
261
+ console.print("\n")
262
+ console.print(table)
263
+
150
264
  if len(ERROR_LIST) > 0:
151
265
  console.print(
152
266
  Panel(
@@ -169,7 +283,7 @@ def apply_mapper(choice: Optional[str] = None):
169
283
  def main_symlinks():
170
284
  console.print("")
171
285
  console.rule("[bold blue]🔗 CREATING SYMLINKS 🔗")
172
- apply_mapper(choice="all")
286
+ apply_mapper(choice="all", prioritize_to_this=True)
173
287
 
174
288
 
175
289
  def main_profile():
@@ -1,4 +1,3 @@
1
- from __future__ import annotations
2
1
 
3
2
  from typing import TYPE_CHECKING
4
3
  from git import Repo
@@ -7,14 +6,12 @@ from datetime import datetime
7
6
 
8
7
  from pathlib import Path
9
8
  from rich.progress import track
10
- import polars as pl
11
- import plotly.graph_objects as go
12
- import plotly.express as px
13
9
  import typer
14
10
 
15
11
 
16
12
  if TYPE_CHECKING:
17
13
  from typing import Any, Dict, List, Optional, Union
14
+ import polars as pl
18
15
 
19
16
 
20
17
  app = typer.Typer()
@@ -109,6 +106,10 @@ def analyze_over_time(repo_path: str = typer.Argument(..., help="Path to the git
109
106
  except Exception as e:
110
107
  print(f"❌ Error analyzing commits: {str(e)}")
111
108
  return
109
+
110
+ import polars as pl
111
+ import plotly.graph_objects as go
112
+
112
113
  df = pl.DataFrame(commit_data)
113
114
  df = df.sort("dtmExit")
114
115
  # Create interactive plotly figure with dark theme and all bells and whistles
@@ -183,6 +184,10 @@ def analyze_over_time(repo_path: str = typer.Argument(..., help="Path to the git
183
184
 
184
185
 
185
186
  def _print_python_files_by_size_impl(repo_path: str) -> "Union[pl.DataFrame, Exception]":
187
+ import polars as pl
188
+ import plotly.graph_objects as go
189
+ import plotly.express as px
190
+
186
191
  try:
187
192
  import os
188
193
  if not os.path.exists(repo_path):
@@ -3,7 +3,11 @@ import typer
3
3
 
4
4
 
5
5
  def analyze_repo_development(repo_path: str = typer.Argument(..., help="Path to the git repository")):
6
- cmd = f"""uv run --python 3.13 --with machineconfig machineconfig.scripts.python.count_lines analyze-over-time {repo_path}"""
6
+ from machineconfig.scripts.python import count_lines
7
+ from pathlib import Path
8
+ count_lines_path = Path(count_lines.__file__).resolve().parent.joinpath("count_lines.py")
9
+ # --project $HOME/code/machineconfig
10
+ cmd = f"""uv run --python 3.13 --with machineconfig --group plot {count_lines_path} analyze-over-time {repo_path}"""
7
11
  from machineconfig.utils.code import run_script
8
12
  run_script(cmd)
9
13
 
@@ -9,7 +9,6 @@ import typer
9
9
 
10
10
 
11
11
  app = typer.Typer(help=f"🛠️ DevOps operations @ machineconfig {__version__}", no_args_is_help=True)
12
-
13
12
  app.command(name="install", help="📦 Install essential packages")(installer_entry_point.main)
14
13
  app.command(name="share-terminal", help="📡 Share terminal via web browser")(share_terminal.main)
15
14
  app.add_typer(repos.app, name="repos", help="📁 Manage git repositories")
@@ -186,8 +186,8 @@ Set-Service -Name sshd -StartupType 'Automatic'"""
186
186
 
187
187
  if "retrieve_repositories" in selected_options:
188
188
  console.print(Panel("📚 [bold bright_magenta]REPOSITORIES[/bold bright_magenta]\n[italic]Project code retrieval[/italic]", border_style="bright_magenta"))
189
- from machineconfig.scripts.python import repos as module
190
- module.main(directory=str(Path.home() / "code"), capture=True, cloud="odg1")
189
+ from machineconfig.scripts.python import repos
190
+ repos.clone(directory=str(Path.home() / "code"), cloud="odg1")
191
191
 
192
192
  if "retrieve_data" in selected_options:
193
193
  console.print(Panel("💾 [bold bright_cyan]DATA RETRIEVAL[/bold bright_cyan]\n[italic]Backup restoration[/italic]", border_style="bright_cyan"))
@@ -6,22 +6,10 @@ in the event that username@github.com is not mentioned in the remote url.
6
6
  """
7
7
 
8
8
 
9
- from pathlib import Path
10
9
  from typing import Annotated, Optional
11
-
12
10
  import typer
13
11
 
14
12
 
15
- from machineconfig.utils.source_of_truth import CONFIG_PATH, DEFAULTS_PATH
16
- from pathlib import Path
17
-
18
-
19
- def _print_banner() -> None:
20
- typer.echo("\n" + "=" * 50)
21
- typer.echo("📂 Welcome to the Repository Manager")
22
- typer.echo("=" * 50 + "\n")
23
-
24
-
25
13
 
26
14
  app = typer.Typer(help="� Manage development repositories", no_args_is_help=True)
27
15
  sync_app = typer.Typer(help="� Manage repository specifications and syncing", no_args_is_help=True)
@@ -47,93 +35,14 @@ CloudOption = Annotated[
47
35
 
48
36
 
49
37
 
50
-
51
- def _resolve_directory(directory: Optional[str]) -> Path:
52
- if directory is None:
53
- directory = Path.cwd().as_posix()
54
- typer.echo(f"📁 Using directory: {directory}")
55
- return Path(directory).expanduser().absolute()
56
-
57
-
58
- def _git_operations(
59
- directory: Optional[str],
60
- *,
61
- pull: bool,
62
- commit: bool,
63
- push: bool,
64
- recursive: bool,
65
- no_sync: bool,
66
- ) -> None:
67
- _print_banner()
68
- repos_root = _resolve_directory(directory)
69
- auto_sync = not no_sync
70
- from machineconfig.scripts.python.repos_helper_action import perform_git_operations
71
- from machineconfig.utils.path_extended import PathExtended
72
- perform_git_operations(
73
- repos_root=PathExtended(repos_root),
74
- pull=pull,
75
- commit=commit,
76
- push=push,
77
- recursive=recursive,
78
- auto_sync=auto_sync,
79
- )
80
-
81
-
82
- def _resolve_spec_path(directory: Optional[str], cloud: Optional[str]) -> Path:
83
- repos_root = _resolve_directory(directory)
84
- from machineconfig.utils.path_extended import PathExtended
85
- if not repos_root.exists() or repos_root.name != "repos.json":
86
- candidate = Path(CONFIG_PATH).joinpath("repos").joinpath(PathExtended(repos_root).rel2home()).joinpath("repos.json")
87
- repos_root = candidate
88
- if not repos_root.exists():
89
- cloud_name: Optional[str]
90
- if cloud is None:
91
- from machineconfig.utils.io import read_ini
92
- cloud_name = read_ini(DEFAULTS_PATH)["general"]["rclone_config_name"]
93
- typer.echo(f"⚠️ Using default cloud: {cloud_name}")
94
- else:
95
- cloud_name = cloud
96
- assert cloud_name is not None, (
97
- f"Path {repos_root} does not exist and cloud was not passed. You can't clone without one of them."
98
- )
99
- from machineconfig.utils.path_extended import PathExtended
100
- PathExtended(repos_root).from_cloud(cloud=cloud_name, rel2home=True)
101
- assert repos_root.exists() and repos_root.name == "repos.json", (
102
- f"Path {repos_root} does not exist and cloud was not passed. You can't clone without one of them."
103
- )
104
- return repos_root
105
-
106
-
107
- def _clone_from_specs(
108
- directory: Optional[str],
109
- cloud: Optional[str],
110
- *,
111
- checkout_branch_flag: bool,
112
- checkout_commit_flag: bool,
113
- ) -> None:
114
- _print_banner()
115
- typer.echo("\n📥 Cloning or checking out repositories...")
116
- spec_path = _resolve_spec_path(directory, cloud)
117
- from machineconfig.scripts.python.repos_helper_clone import clone_repos
118
-
119
- clone_repos(
120
- spec_path=spec_path,
121
- preferred_remote=None,
122
- checkout_branch_flag=checkout_branch_flag,
123
- checkout_commit_flag=checkout_commit_flag,
124
- )
125
-
126
-
127
38
  @app.command()
128
- def push(
129
- directory: DirectoryArgument = None,
39
+ def push(directory: DirectoryArgument = None,
130
40
  recursive: RecursiveOption = False,
131
41
  no_sync: NoSyncOption = False,
132
42
  ) -> None:
133
43
  """🚀 Push changes across repositories."""
134
- _git_operations(directory, pull=False, commit=False, push=True, recursive=recursive, no_sync=no_sync)
135
-
136
-
44
+ from machineconfig.scripts.python.repos_helper import git_operations
45
+ git_operations(directory, pull=False, commit=False, push=True, recursive=recursive, no_sync=no_sync)
137
46
  @app.command()
138
47
  def pull(
139
48
  directory: DirectoryArgument = None,
@@ -141,9 +50,8 @@ def pull(
141
50
  no_sync: NoSyncOption = False,
142
51
  ) -> None:
143
52
  """⬇️ Pull changes across repositories."""
144
- _git_operations(directory, pull=True, commit=False, push=False, recursive=recursive, no_sync=no_sync)
145
-
146
-
53
+ from machineconfig.scripts.python.repos_helper import git_operations
54
+ git_operations(directory, pull=True, commit=False, push=False, recursive=recursive, no_sync=no_sync)
147
55
  @app.command()
148
56
  def commit(
149
57
  directory: DirectoryArgument = None,
@@ -151,9 +59,8 @@ def commit(
151
59
  no_sync: NoSyncOption = False,
152
60
  ) -> None:
153
61
  """💾 Commit changes across repositories."""
154
- _git_operations(directory, pull=False, commit=True, push=False, recursive=recursive, no_sync=no_sync)
155
-
156
-
62
+ from machineconfig.scripts.python.repos_helper import git_operations
63
+ git_operations(directory, pull=False, commit=True, push=False, recursive=recursive, no_sync=no_sync)
157
64
  @app.command()
158
65
  def all(
159
66
  directory: DirectoryArgument = None,
@@ -161,40 +68,44 @@ def all(
161
68
  no_sync: NoSyncOption = False,
162
69
  ) -> None:
163
70
  """🔄 Pull, commit, and push changes across repositories."""
164
- _git_operations(directory, pull=True, commit=True, push=True, recursive=recursive, no_sync=no_sync)
71
+ from machineconfig.scripts.python.repos_helper import git_operations
72
+ git_operations(directory, pull=True, commit=True, push=True, recursive=recursive, no_sync=no_sync)
165
73
 
166
74
 
167
75
  @sync_app.command()
168
- def record(
76
+ def capture(
169
77
  directory: DirectoryArgument = None,
170
78
  cloud: CloudOption = None,
171
79
  ) -> None:
172
80
  """📝 Record repositories into a repos.json specification."""
173
- _print_banner()
174
- repos_root = _resolve_directory(directory)
81
+ from machineconfig.scripts.python.repos_helper import print_banner, resolve_directory
82
+ print_banner()
83
+ repos_root = resolve_directory(directory)
175
84
  from machineconfig.scripts.python.repos_helper_record import main as record_repos
176
85
  save_path = record_repos(repos_root=repos_root)
177
86
  from machineconfig.utils.path_extended import PathExtended
178
87
  if cloud is not None:
179
88
  PathExtended(save_path).to_cloud(rel2home=True, cloud=cloud)
180
-
181
-
182
89
  @sync_app.command()
183
- def capture(
90
+ def clone(
184
91
  directory: DirectoryArgument = None,
185
92
  cloud: CloudOption = None,
186
93
  ) -> None:
187
94
  """📥 Clone repositories described by a repos.json specification."""
188
- _clone_from_specs(directory, cloud, checkout_branch_flag=False, checkout_commit_flag=False)
95
+ from machineconfig.scripts.python.repos_helper import print_banner, clone_from_specs
96
+ print_banner()
97
+ clone_from_specs(directory, cloud, checkout_branch_flag=False, checkout_commit_flag=False)
189
98
 
190
99
 
191
- @sync_app.command(name="checkout")
100
+ @sync_app.command(name="checkout-to-commit")
192
101
  def checkout_command(
193
102
  directory: DirectoryArgument = None,
194
103
  cloud: CloudOption = None,
195
104
  ) -> None:
196
105
  """🔀 Check out specific commits listed in the specification."""
197
- _clone_from_specs(directory, cloud, checkout_branch_flag=False, checkout_commit_flag=True)
106
+ from machineconfig.scripts.python.repos_helper import print_banner, clone_from_specs
107
+ print_banner()
108
+ clone_from_specs(directory, cloud, checkout_branch_flag=False, checkout_commit_flag=True)
198
109
 
199
110
 
200
111
  @sync_app.command(name="checkout-to-branch")
@@ -203,7 +114,9 @@ def checkout_to_branch_command(
203
114
  cloud: CloudOption = None,
204
115
  ) -> None:
205
116
  """🔀 Check out to the main branch defined in the specification."""
206
- _clone_from_specs(directory, cloud, checkout_branch_flag=True, checkout_commit_flag=False)
117
+ from machineconfig.scripts.python.repos_helper import print_banner, clone_from_specs
118
+ print_banner()
119
+ clone_from_specs(directory, cloud, checkout_branch_flag=True, checkout_commit_flag=False)
207
120
 
208
121
 
209
122
  @app.command()
@@ -211,9 +124,9 @@ def analyze(
211
124
  directory: DirectoryArgument = None,
212
125
  ) -> None:
213
126
  """📊 Analyze repository development over time."""
214
- _print_banner()
127
+ from machineconfig.scripts.python.repos_helper import print_banner
128
+ print_banner()
215
129
  repo_path = directory if directory is not None else "."
216
- from machineconfig.scripts.python.count_lines_frontend import analyze_repo_development as _analyze
217
-
218
- _analyze(repo_path=repo_path)
130
+ from machineconfig.scripts.python.count_lines_frontend import analyze_repo_development
131
+ analyze_repo_development(repo_path=repo_path)
219
132
 
@@ -0,0 +1,85 @@
1
+
2
+
3
+
4
+ from typing import Optional
5
+ from pathlib import Path
6
+ from machineconfig.utils.source_of_truth import CONFIG_PATH, DEFAULTS_PATH
7
+
8
+ import typer
9
+
10
+
11
+
12
+ def print_banner() -> None:
13
+ typer.echo("\n" + "=" * 50)
14
+ typer.echo("📂 Welcome to the Repository Manager")
15
+ typer.echo("=" * 50 + "\n")
16
+
17
+
18
+
19
+ def resolve_directory(directory: Optional[str]) -> Path:
20
+ if directory is None:
21
+ directory = Path.cwd().as_posix()
22
+ typer.echo(f"📁 Using directory: {directory}")
23
+ return Path(directory).expanduser().absolute()
24
+ def git_operations(
25
+ directory: Optional[str],
26
+ *,
27
+ pull: bool,
28
+ commit: bool,
29
+ push: bool,
30
+ recursive: bool,
31
+ no_sync: bool,
32
+ ) -> None:
33
+ print_banner()
34
+ repos_root = resolve_directory(directory)
35
+ auto_sync = not no_sync
36
+ from machineconfig.scripts.python.repos_helper_action import perform_git_operations
37
+ from machineconfig.utils.path_extended import PathExtended
38
+ perform_git_operations(
39
+ repos_root=PathExtended(repos_root),
40
+ pull=pull,
41
+ commit=commit,
42
+ push=push,
43
+ recursive=recursive,
44
+ auto_sync=auto_sync,
45
+ )
46
+ def resolve_spec_path(directory: Optional[str], cloud: Optional[str]) -> Path:
47
+ repos_root = resolve_directory(directory)
48
+ from machineconfig.utils.path_extended import PathExtended
49
+ if not repos_root.exists() or repos_root.name != "repos.json":
50
+ candidate = Path(CONFIG_PATH).joinpath("repos").joinpath(PathExtended(repos_root).rel2home()).joinpath("repos.json")
51
+ repos_root = candidate
52
+ if not repos_root.exists():
53
+ cloud_name: Optional[str]
54
+ if cloud is None:
55
+ from machineconfig.utils.io import read_ini
56
+ cloud_name = read_ini(DEFAULTS_PATH)["general"]["rclone_config_name"]
57
+ typer.echo(f"⚠️ Using default cloud: {cloud_name}")
58
+ else:
59
+ cloud_name = cloud
60
+ assert cloud_name is not None, (
61
+ f"Path {repos_root} does not exist and cloud was not passed. You can't clone without one of them."
62
+ )
63
+ from machineconfig.utils.path_extended import PathExtended
64
+ PathExtended(repos_root).from_cloud(cloud=cloud_name, rel2home=True)
65
+ assert repos_root.exists() and repos_root.name == "repos.json", (
66
+ f"Path {repos_root} does not exist and cloud was not passed. You can't clone without one of them."
67
+ )
68
+ return repos_root
69
+ def clone_from_specs(
70
+ directory: Optional[str],
71
+ cloud: Optional[str],
72
+ *,
73
+ checkout_branch_flag: bool,
74
+ checkout_commit_flag: bool,
75
+ ) -> None:
76
+ print_banner()
77
+ typer.echo("\n📥 Cloning or checking out repositories...")
78
+ spec_path = resolve_spec_path(directory, cloud)
79
+ from machineconfig.scripts.python.repos_helper_clone import clone_repos
80
+ clone_repos(
81
+ spec_path=spec_path,
82
+ preferred_remote=None,
83
+ checkout_branch_flag=checkout_branch_flag,
84
+ checkout_commit_flag=checkout_commit_flag,
85
+ )
@@ -7,6 +7,9 @@ from dataclasses import dataclass
7
7
  from enum import Enum
8
8
 
9
9
  from rich import print as pprint
10
+ from rich.table import Table
11
+ from rich.panel import Panel
12
+ from rich.columns import Columns
10
13
 
11
14
 
12
15
  class GitAction(Enum):
@@ -191,87 +194,127 @@ def git_action(path: PathExtended, action: GitAction, mess: Optional[str] = None
191
194
 
192
195
 
193
196
  def print_git_operations_summary(summary: GitOperationSummary, operations_performed: list[str]) -> None:
194
- """Print a detailed summary of git operations similar to repos_helper_record.py."""
195
- print("\n📊 Git Operations Summary:")
196
- print(f" Total paths processed: {summary.total_paths_processed}")
197
- print(f" Git repositories found: {summary.git_repos_found}")
198
- print(f" Non-git paths skipped: {summary.non_git_paths}")
199
-
200
- # Show per-operation statistics
197
+ """Print a detailed summary of git operations with rich formatting and tables."""
198
+ from rich.console import Console
199
+ console = Console()
200
+
201
+ # Main summary panel
202
+ summary_stats = [
203
+ f"Total paths processed: {summary.total_paths_processed}",
204
+ f"Git repositories found: {summary.git_repos_found}",
205
+ f"Non-git paths skipped: {summary.non_git_paths}"
206
+ ]
207
+
208
+ console.print(Panel.fit(
209
+ "\n".join(summary_stats),
210
+ title="[bold blue]📊 Git Operations Summary[/bold blue]",
211
+ border_style="blue"
212
+ ))
213
+
214
+ # Statistics panels in columns
215
+ stat_panels = []
216
+
201
217
  if "commit" in operations_performed:
202
- print("\n💾 Commit Operations:")
203
- print(f" Attempted: {summary.commits_attempted}")
204
- print(f" Successful: {summary.commits_successful}")
205
- print(f" No changes: {summary.commits_no_changes}")
206
- print(f" Failed: {summary.commits_failed}")
207
-
218
+ commit_stats = [
219
+ f"Attempted: {summary.commits_attempted}",
220
+ f"Successful: {summary.commits_successful}",
221
+ f"No changes: {summary.commits_no_changes}",
222
+ f"Failed: {summary.commits_failed}"
223
+ ]
224
+ stat_panels.append(Panel.fit(
225
+ "\n".join(commit_stats),
226
+ title="[bold green]💾 Commit Operations[/bold green]",
227
+ border_style="green"
228
+ ))
229
+
208
230
  if "pull" in operations_performed:
209
- print("\n⬇️ Pull Operations:")
210
- print(f" Attempted: {summary.pulls_attempted}")
211
- print(f" Successful: {summary.pulls_successful}")
212
- print(f" Failed: {summary.pulls_failed}")
213
-
231
+ pull_stats = [
232
+ f"Attempted: {summary.pulls_attempted}",
233
+ f"Successful: {summary.pulls_successful}",
234
+ f"Failed: {summary.pulls_failed}"
235
+ ]
236
+ stat_panels.append(Panel.fit(
237
+ "\n".join(pull_stats),
238
+ title="[bold cyan]⬇️ Pull Operations[/bold cyan]",
239
+ border_style="cyan"
240
+ ))
241
+
214
242
  if "push" in operations_performed:
215
- print("\n🚀 Push Operations:")
216
- print(f" Attempted: {summary.pushes_attempted}")
217
- print(f" Successful: {summary.pushes_successful}")
218
- print(f" Failed: {summary.pushes_failed}")
243
+ push_stats = [
244
+ f"Attempted: {summary.pushes_attempted}",
245
+ f"Successful: {summary.pushes_successful}",
246
+ f"Failed: {summary.pushes_failed}"
247
+ ]
248
+ stat_panels.append(Panel.fit(
249
+ "\n".join(push_stats),
250
+ title="[bold magenta]🚀 Push Operations[/bold magenta]",
251
+ border_style="magenta"
252
+ ))
219
253
 
220
- # Show repositories without remotes (important for push operations)
254
+ if stat_panels:
255
+ console.print(Columns(stat_panels, equal=True, expand=True))
256
+
257
+ # Repositories without remotes warning
221
258
  if summary.repos_without_remotes:
222
- print(f"\n⚠️ WARNING: {len(summary.repos_without_remotes)} repositories have no remote configurations:")
259
+ repos_table = Table(title="[bold yellow]⚠️ Repositories Without Remotes[/bold yellow]")
260
+ repos_table.add_column("Repository Name", style="cyan", no_wrap=True)
261
+ repos_table.add_column("Full Path", style="dim")
262
+
223
263
  for repo_path in summary.repos_without_remotes:
224
- print(f" • {repo_path.name} ({repo_path})")
225
- print(" These repositories cannot be pushed to remote servers.")
226
- else:
227
- if "push" in operations_performed:
228
- print("\n✅ All repositories have remote configurations.")
264
+ repos_table.add_row(repo_path.name, str(repo_path))
229
265
 
230
- # Show failed operations
266
+ console.print(repos_table)
267
+ console.print("[yellow]These repositories cannot be pushed to remote servers.[/yellow]")
268
+ elif "push" in operations_performed:
269
+ console.print("[green]✅ All repositories have remote configurations.[/green]")
270
+
271
+ # Failed operations table
231
272
  if summary.failed_operations:
232
- print(f"\nFAILED OPERATIONS ({len(summary.failed_operations)} total):")
233
-
234
- # Group failed operations by type
273
+ failed_table = Table(title=f"[bold red]Failed Operations ({len(summary.failed_operations)} total)[/bold red]")
274
+ failed_table.add_column("Action", style="bold red", no_wrap=True)
275
+ failed_table.add_column("Repository", style="cyan", no_wrap=True)
276
+ failed_table.add_column("Problem", style="red")
277
+
278
+ # Group failed operations by type for better organization
235
279
  failed_by_action = {}
236
280
  for failed_op in summary.failed_operations:
237
281
  if failed_op.action not in failed_by_action:
238
282
  failed_by_action[failed_op.action] = []
239
283
  failed_by_action[failed_op.action].append(failed_op)
240
-
284
+
241
285
  for action, failures in failed_by_action.items():
242
- print(f"\n {action.upper()} failures ({len(failures)}):")
243
286
  for failure in failures:
244
- if not failure.is_git_repo:
245
- print(f" • {failure.repo_path.name} ({failure.repo_path}) - Not a git repository")
246
- else:
247
- print(f" • {failure.repo_path.name} ({failure.repo_path}) - {failure.message}")
287
+ repo_name = failure.repo_path.name if failure.is_git_repo else f"{failure.repo_path.name} (not git repo)"
288
+ problem = failure.message if failure.is_git_repo else "Not a git repository"
289
+ failed_table.add_row(action.upper(), repo_name, problem)
290
+
291
+ console.print(failed_table)
248
292
  else:
249
- print("\n✅ All git operations completed successfully!")
293
+ console.print("[green]✅ All git operations completed successfully![/green]")
250
294
 
251
295
  # Overall success assessment
252
296
  total_failed = len(summary.failed_operations)
253
- total_operations = (summary.commits_attempted + summary.pulls_attempted +
297
+ total_operations = (summary.commits_attempted + summary.pulls_attempted +
254
298
  summary.pushes_attempted)
255
-
299
+
256
300
  if total_failed == 0 and total_operations > 0:
257
- print(f"\n🎉 SUCCESS: All {total_operations} operations completed successfully!")
301
+ console.print(f"\n[bold green]🎉 SUCCESS: All {total_operations} operations completed successfully![/bold green]")
258
302
  elif total_operations == 0:
259
- print("\n📝 No git operations were performed.")
303
+ console.print("\n[blue]📝 No git operations were performed.[/blue]")
260
304
  else:
261
305
  success_rate = ((total_operations - total_failed) / total_operations * 100) if total_operations > 0 else 0
262
- print(f"\n⚖️ SUMMARY: {total_operations - total_failed}/{total_operations} operations succeeded ({success_rate:.1f}% success rate)")
263
306
  if total_failed > 0:
264
- print(" Review the failed operations above for details on what needs attention.")
307
+ console.print(f"\n[bold yellow]⚖️ SUMMARY: {total_operations - total_failed}/{total_operations} operations succeeded ({success_rate:.1f}% success rate)[/bold yellow]")
308
+ console.print("[yellow]Review the failed operations table above for details on what needs attention.[/yellow]")
309
+ else:
310
+ console.print(f"\n[bold green]⚖️ SUMMARY: {total_operations}/{total_operations} operations succeeded (100% success rate)[/bold green]")
265
311
 
266
312
 
267
313
  def perform_git_operations(repos_root: PathExtended, pull: bool, commit: bool, push: bool, recursive: bool, auto_sync: bool) -> None:
268
314
  """Perform git operations on all repositories and provide detailed summary."""
269
315
  print(f"\n🔄 Performing Git actions on repositories @ `{repos_root}`...")
270
-
271
- # Initialize summary tracking
272
316
  summary = GitOperationSummary()
273
- operations_performed = []
274
-
317
+ operations_performed = []
275
318
  # Determine which operations to perform
276
319
  if pull:
277
320
  operations_performed.append("pull")
@@ -2,11 +2,62 @@ from machineconfig.utils.path_extended import PathExtended, PLike
2
2
  from machineconfig.utils.accessories import randstr
3
3
  from rich.console import Console
4
4
  from rich.panel import Panel
5
-
5
+ import hashlib
6
+ from typing import TypedDict, Literal
6
7
 
7
8
  console = Console()
8
9
 
9
10
 
11
+ class SymlinkResult(TypedDict):
12
+ action: Literal[
13
+ "already_linked",
14
+ "relinking",
15
+ "fixing_broken_link",
16
+ "identical_files",
17
+ "backing_up_source",
18
+ "backing_up_target",
19
+ "relinking_to_new_target",
20
+ "moving_to_target",
21
+ "new_link",
22
+ "new_link_and_target",
23
+ "linking",
24
+ "error"
25
+ ]
26
+ details: str
27
+
28
+
29
+ class CopyResult(TypedDict):
30
+ action: Literal[
31
+ "already_linked",
32
+ "relinking",
33
+ "fixing_broken_link",
34
+ "backing_up_source",
35
+ "backing_up_target",
36
+ "relinking_to_new_target",
37
+ "moving_to_target",
38
+ "new_link",
39
+ "new_link_and_target",
40
+ "copying",
41
+ "error"
42
+ ]
43
+ details: str
44
+
45
+
46
+ def files_are_identical(file1: PathExtended, file2: PathExtended) -> bool:
47
+ """Check if two files are identical by comparing their SHA256 hashes."""
48
+ def get_file_hash(path: PathExtended) -> str:
49
+ hash_sha256 = hashlib.sha256()
50
+ with open(path, "rb") as f:
51
+ for chunk in iter(lambda: f.read(4096), b""):
52
+ hash_sha256.update(chunk)
53
+ return hash_sha256.hexdigest()
54
+
55
+ try:
56
+ return get_file_hash(file1) == get_file_hash(file2)
57
+ except (OSError, IOError):
58
+ return False
59
+
60
+
10
61
  def build_links(target_paths: list[tuple[PLike, str]], repo_root: PLike):
11
62
  """Build symboic links from various relevant paths (e.g. data) to `repo_root/links/<name>` to facilitate easy access from
12
63
  tree explorer of the IDE.
@@ -33,12 +84,14 @@ def build_links(target_paths: list[tuple[PLike, str]], repo_root: PLike):
33
84
  links_path.symlink_to(target=a_target_path)
34
85
 
35
86
 
36
- def symlink_func(this: PathExtended, to_this: PathExtended, prioritize_to_this: bool):
87
+ def symlink_func(this: PathExtended, to_this: PathExtended, prioritize_to_this: bool) -> SymlinkResult:
37
88
  """helper function. creates a symlink from `this` to `to_this`.
38
89
 
90
+ Returns a dict with 'action' and 'details' keys describing what was done.
91
+
39
92
  this: exists AND to_this exists AND this is a symlink pointing to to_this ===> Resolution: AUTO: do nothing, already linked correctly.
40
93
  this: exists AND to_this exists AND this is a symlink pointing to somewhere else ===> Resolution: AUTO: delete this symlink, create symlink to to_this
41
- this: exists AND to_this exists AND this is a concrete path ===> Resolution: DANGER: require user input to decide (param prioritize_to_this). Give two options: 1) prioritize `this`: to_this is backed up as to_this.orig_<randstr()>, to_this is deleted, and symlink is created from this to to_this as normal; 2) prioritize `to_this`: `this` is backed up as this.orig_<randstr()>, `this` is deleted, and symlink is created from this to to_this as normal.
94
+ this: exists AND to_this exists AND this is a concrete path ===> Resolution: DANGER: If files are identical (same hash), delete `this` and create symlink to `to_this`. Otherwise, two options: 1) prioritize `this`: to_this is backed up as to_this.orig_<randstr()>, to_this is deleted, and symlink is created from this to to_this as normal; 2) prioritize `to_this`: `this` is backed up as this.orig_<randstr()>, `this` is deleted, and symlink is created from this to to_this as normal.
42
95
 
43
96
  this: exists AND to_this doesn't exist AND this is a symlink pointing to somewhere else ===> Resolution: AUTO: delete this symlink, create symlink to to_this (touch to_this)
44
97
  this: exists AND to_this doesn't exist AND this is a symlink pointing to to_this ===> Resolution: AUTO: delete this symlink, create symlink to to_this (touch to_this)
@@ -50,6 +103,9 @@ def symlink_func(this: PathExtended, to_this: PathExtended, prioritize_to_this:
50
103
  """
51
104
  this = PathExtended(this).expanduser().absolute()
52
105
  to_this = PathExtended(to_this).expanduser().absolute()
106
+ action_taken = ""
107
+ details = ""
108
+
53
109
  # Case analysis based on docstring
54
110
  if this.exists():
55
111
  if to_this.exists():
@@ -58,33 +114,53 @@ def symlink_func(this: PathExtended, to_this: PathExtended, prioritize_to_this:
58
114
  try:
59
115
  if this.readlink().resolve() == to_this.resolve():
60
116
  # Case: this exists AND to_this exists AND this is a symlink pointing to to_this
117
+ action_taken = "already_linked"
118
+ details = "Symlink already correctly points to target"
61
119
  console.print(Panel(f"✅ ALREADY LINKED | {this} ➡️ {to_this}", title="Already Linked", expand=False))
62
- return
120
+ return {"action": action_taken, "details": details}
63
121
  else:
64
122
  # Case: this exists AND to_this exists AND this is a symlink pointing to somewhere else
123
+ action_taken = "relinking"
124
+ details = "Updated existing symlink to point to new target"
65
125
  console.print(Panel(f"🔄 RELINKING | Updating symlink from {this} ➡️ {to_this}", title="Relinking", expand=False))
66
126
  this.delete(sure=True)
67
127
  except OSError:
68
128
  # Broken symlink case
129
+ action_taken = "fixing_broken_link"
130
+ details = "Removed broken symlink and will create new one"
69
131
  console.print(Panel(f"🔄 FIXING BROKEN LINK | Fixing broken symlink from {this} ➡️ {to_this}", title="Fixing Broken Link", expand=False))
70
132
  this.delete(sure=True)
71
133
  else:
72
134
  # Case: this exists AND to_this exists AND this is a concrete path
73
- if prioritize_to_this:
74
- # prioritize `to_this`: `this` is backed up, `this` is deleted, symlink created
75
- backup_name = f"{this}.orig_{randstr()}"
76
- console.print(Panel(f"📦 BACKING UP | Moving {this} to {backup_name}, prioritizing {to_this}", title="Backing Up", expand=False))
77
- this.move(path=backup_name)
135
+ if files_are_identical(this, to_this):
136
+ # Files are identical, just delete this and create symlink
137
+ action_taken = "identical_files"
138
+ details = "Files identical, removed source and will create symlink"
139
+ console.print(Panel(f"🔗 IDENTICAL FILES | Files are identical, deleting {this} and creating symlink to {to_this}", title="Identical Files", expand=False))
140
+ this.delete(sure=True)
78
141
  else:
79
- # prioritize `this`: to_this is backed up, to_this is deleted, this content moved to to_this location
80
- backup_name = f"{to_this}.orig_{randstr()}"
81
- console.print(Panel(f"📦 BACKING UP | Moving {to_this} to {backup_name}, prioritizing {this}", title="Backing Up", expand=False))
82
- to_this.move(path=backup_name)
83
- this.move(path=to_this)
142
+ # Files are different, use prioritization logic
143
+ if prioritize_to_this:
144
+ # prioritize `to_this`: `this` is backed up, `this` is deleted, symlink created
145
+ backup_name = f"{this}.orig_{randstr()}"
146
+ action_taken = "backing_up_source"
147
+ details = f"Backed up source to {backup_name}, prioritizing target"
148
+ console.print(Panel(f"📦 BACKING UP | Moving {this} to {backup_name}, prioritizing {to_this}", title="Backing Up", expand=False))
149
+ this.move(path=backup_name)
150
+ else:
151
+ # prioritize `this`: to_this is backed up, to_this is deleted, this content moved to to_this location
152
+ backup_name = f"{to_this}.orig_{randstr()}"
153
+ action_taken = "backing_up_target"
154
+ details = f"Backed up target to {backup_name}, prioritizing source"
155
+ console.print(Panel(f"📦 BACKING UP | Moving {to_this} to {backup_name}, prioritizing {this}", title="Backing Up", expand=False))
156
+ to_this.move(path=backup_name)
157
+ this.move(path=to_this)
84
158
  else:
85
159
  # to_this doesn't exist
86
160
  if this.is_symlink():
87
161
  # Case: this exists AND to_this doesn't exist AND this is a symlink (pointing anywhere)
162
+ action_taken = "relinking_to_new_target"
163
+ details = "Removed existing symlink, will create target and new symlink"
88
164
  console.print(Panel(f"🔄 RELINKING | Updating symlink from {this} ➡️ {to_this}", title="Relinking", expand=False))
89
165
  this.delete(sure=True)
90
166
  # Create to_this
@@ -92,70 +168,111 @@ def symlink_func(this: PathExtended, to_this: PathExtended, prioritize_to_this:
92
168
  to_this.touch()
93
169
  else:
94
170
  # Case: this exists AND to_this doesn't exist AND this is a concrete path
171
+ action_taken = "moving_to_target"
172
+ details = "Moved source to target location, will create symlink"
95
173
  console.print(Panel(f"📁 MOVING | Moving {this} to {to_this}, then creating symlink", title="Moving", expand=False))
96
174
  this.move(path=to_this)
97
175
  else:
98
176
  # this doesn't exist
99
177
  if to_this.exists():
100
178
  # Case: this doesn't exist AND to_this exists
179
+ action_taken = "new_link"
180
+ details = "Creating new symlink to existing target"
101
181
  console.print(Panel(f"🆕 NEW LINK | Creating new symlink from {this} ➡️ {to_this}", title="New Link", expand=False))
102
182
  else:
103
183
  # Case: this doesn't exist AND to_this doesn't exist
184
+ action_taken = "new_link_and_target"
185
+ details = "Creating target file and new symlink"
104
186
  console.print(Panel(f"🆕 NEW LINK & TARGET | Creating {to_this} and symlink from {this} ➡️ {to_this}", title="New Link & Target", expand=False))
105
187
  to_this.parent.mkdir(parents=True, exist_ok=True)
106
188
  to_this.touch()
189
+
107
190
  # Create the symlink
108
191
  try:
192
+ action_taken = action_taken or "linking"
193
+ details = details or "Creating symlink"
109
194
  console.print(Panel(f"🔗 LINKING | Creating symlink from {this} ➡️ {to_this}", title="Linking", expand=False))
110
195
  PathExtended(this).symlink_to(target=to_this, verbose=True, overwrite=True)
196
+ return {"action": action_taken, "details": details}
111
197
  except Exception as ex:
198
+ action_taken = "error"
199
+ details = f"Failed to create symlink: {str(ex)}"
112
200
  console.print(Panel(f"❌ ERROR | Failed at linking {this} ➡️ {to_this}. Reason: {ex}", title="Error", expand=False))
201
+ return {"action": action_taken, "details": details}
113
202
 
114
203
 
115
- def symlink_copy(this: PathExtended, to_this: PathExtended, prioritize_to_this: bool):
204
+ def symlink_copy(this: PathExtended, to_this: PathExtended, prioritize_to_this: bool) -> CopyResult:
116
205
  this = PathExtended(this).expanduser().absolute()
117
206
  to_this = PathExtended(to_this).expanduser().absolute()
207
+ action_taken = ""
208
+ details = ""
209
+
118
210
  if this.exists():
119
211
  if to_this.exists():
120
212
  if this.is_symlink():
121
213
  try:
122
214
  if this.readlink().resolve() == to_this.resolve():
215
+ action_taken = "already_linked"
216
+ details = "Symlink already correctly points to target"
123
217
  console.print(Panel(f"✅ ALREADY LINKED | {this} ➡️ {to_this}", title="Already Linked", expand=False))
124
- return
218
+ return {"action": action_taken, "details": details}
125
219
  else:
220
+ action_taken = "relinking"
221
+ details = "Updated existing symlink to point to new target"
126
222
  console.print(Panel(f"🔄 RELINKING | Updating symlink from {this} ➡️ {to_this}", title="Relinking", expand=False))
127
223
  this.delete(sure=True)
128
224
  except OSError:
225
+ action_taken = "fixing_broken_link"
226
+ details = "Removed broken symlink and will create new one"
129
227
  console.print(Panel(f"🔄 FIXING BROKEN LINK | Fixing broken symlink from {this} ➡️ {to_this}", title="Fixing Broken Link", expand=False))
130
228
  this.delete(sure=True)
131
229
  else:
132
230
  if prioritize_to_this:
133
231
  backup_name = f"{this}.orig_{randstr()}"
232
+ action_taken = "backing_up_source"
233
+ details = f"Backed up source to {backup_name}, prioritizing target"
134
234
  console.print(Panel(f"📦 BACKING UP | Moving {this} to {backup_name}, prioritizing {to_this}", title="Backing Up", expand=False))
135
235
  this.move(path=backup_name)
136
236
  else:
137
237
  backup_name = f"{to_this}.orig_{randstr()}"
238
+ action_taken = "backing_up_target"
239
+ details = f"Backed up target to {backup_name}, prioritizing source"
138
240
  console.print(Panel(f"📦 BACKING UP | Moving {to_this} to {backup_name}, prioritizing {this}", title="Backing Up", expand=False))
139
241
  to_this.move(path=backup_name)
140
242
  this.move(path=to_this)
141
243
  else:
142
244
  if this.is_symlink():
245
+ action_taken = "relinking_to_new_target"
246
+ details = "Removed existing symlink, will create target and new symlink"
143
247
  console.print(Panel(f"🔄 RELINKING | Updating symlink from {this} ➡️ {to_this}", title="Relinking", expand=False))
144
248
  this.delete(sure=True)
145
249
  to_this.parent.mkdir(parents=True, exist_ok=True)
146
250
  to_this.touch()
147
251
  else:
252
+ action_taken = "moving_to_target"
253
+ details = "Moved source to target location, will copy"
148
254
  console.print(Panel(f"📁 MOVING | Moving {this} to {to_this}, then copying", title="Moving", expand=False))
149
255
  this.move(path=to_this)
150
256
  else:
151
257
  if to_this.exists():
258
+ action_taken = "new_link"
259
+ details = "Copying existing target to source location"
152
260
  console.print(Panel(f"🆕 NEW LINK | Copying {to_this} to {this}", title="New Link", expand=False))
153
261
  else:
262
+ action_taken = "new_link_and_target"
263
+ details = "Creating target file and copying to source"
154
264
  console.print(Panel(f"🆕 NEW LINK & TARGET | Creating {to_this} and copying to {this}", title="New Link & Target", expand=False))
155
265
  to_this.parent.mkdir(parents=True, exist_ok=True)
156
266
  to_this.touch()
267
+
157
268
  try:
269
+ action_taken = action_taken or "copying"
270
+ details = details or "Copying file"
158
271
  console.print(Panel(f"📋 COPYING | Copying {to_this} to {this}", title="Copying", expand=False))
159
272
  to_this.copy(path=this, overwrite=True, verbose=True)
273
+ return {"action": action_taken, "details": details}
160
274
  except Exception as ex:
275
+ action_taken = "error"
276
+ details = f"Failed to copy file: {str(ex)}"
161
277
  console.print(Panel(f"❌ ERROR | Failed at copying {to_this} to {this}. Reason: {ex}", title="Error", expand=False))
278
+ return {"action": action_taken, "details": details}
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: machineconfig
3
- Version: 5.11
3
+ Version: 5.13
4
4
  Summary: Dotfiles management package
5
5
  Author-email: Alex Al-Saffar <programmer@usa.com>
6
6
  License: Apache 2.0
@@ -78,11 +78,8 @@ machineconfig/jobs/linux/msc/cli_agents.sh,sha256=MMa_cd4yijI69c7tztTY1b0tl9I1EC
78
78
  machineconfig/jobs/linux/msc/lid.sh,sha256=09LeoSaXCGjCn7YxPcIFQpHroYdglJlEtFU2agarh3I,1302
79
79
  machineconfig/jobs/linux/msc/network.sh,sha256=dmISsh0hioDheinqee3qHfo2k7ClFx6G_GfGDxuflmc,1796
80
80
  machineconfig/jobs/python/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
81
- machineconfig/jobs/python/check_installations.py,sha256=ksuENfJg0NAo1mWLRnsj1MpZw7Ee7eKw1nWyUQeO7fg,11096
82
- machineconfig/jobs/python/create_bootable_media.py,sha256=KKtcPk0rFLQc4eNVP6nbeYX-P7Gpqi0HvfIcUM6rVVs,827
83
- machineconfig/jobs/python/python_cargo_build_share.py,sha256=vy1v32-7Tui4NK4wG5XC5hxavQ4BeMpKprUtqzBjut0,2081
81
+ machineconfig/jobs/python/check_installations.py,sha256=wOtvWzyJSxbuFueFfcOc4gX_UbTRWv6tWpRcG-3Ml_8,10780
84
82
  machineconfig/jobs/python/python_ve_symlink.py,sha256=Mw2SK_TDLK5Ct_mEESh_Pd-Rn-B1oBSp7a_9y_eZbqw,1140
85
- machineconfig/jobs/python/tasks.py,sha256=hrBDQOnBmcXtauTkicVgC8J2AOGcfdFfyx0K8eI6Coc,150
86
83
  machineconfig/jobs/python/vscode/api.py,sha256=Et0G-VUj13D1rshYMdDrw_CUYSO7Q6XRrEQO0WjVIKU,1683
87
84
  machineconfig/jobs/python/vscode/sync_code.py,sha256=f9hxMg_nkIsC0xvfQMboJbc-Jhap9YQrV7k7a5YSI1c,2333
88
85
  machineconfig/jobs/windows/start_terminal.ps1,sha256=wy0fGwgb4U7xaHsONDrR4V5u9JEkG5vtt4NZUBx0ro8,473
@@ -93,7 +90,7 @@ machineconfig/jobs/windows/archive/openssh-server_copy-ssh-id.ps1,sha256=-7pElYi
93
90
  machineconfig/jobs/windows/msc/cli_agents.bat,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
94
91
  machineconfig/jobs/windows/msc/cli_agents.ps1,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
95
92
  machineconfig/profile/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
96
- machineconfig/profile/create.py,sha256=-BsFb-5-4ZweLGirRtTshBTtGuTtl1hCAC3tD12xgMQ,7745
93
+ machineconfig/profile/create.py,sha256=QtPOj0aXLTd7XoAu-jBOQyZ_UM0Axc-OC-LCPMZ1Q7U,12390
97
94
  machineconfig/profile/shell.py,sha256=eAAmYoROXX1V3vk9-jcRSnv03P2Wx3_N4UgFtUDLtKU,9199
98
95
  machineconfig/profile/records/generic/shares.toml,sha256=FduDztfyQtZcr5bfx-RSKhEEweweQSWfVXkKWnx8hCY,143
99
96
  machineconfig/profile/records/linux/apps_summary_report.csv,sha256=pw9djvaRUPalKDLn2sl3odcbD2_Zx3aEupsQ8UPfaaY,2738
@@ -145,10 +142,10 @@ machineconfig/scripts/python/cloud_manager.py,sha256=YN0DYLzPKtMBaks-EAVwFmkCu3X
145
142
  machineconfig/scripts/python/cloud_mount.py,sha256=GwcXbd5ohoHGESfX5edtCEl2-umDDxH_AZapmFSzc9E,6740
146
143
  machineconfig/scripts/python/cloud_repo_sync.py,sha256=8dnlHbQqRymPRU0v01pNIuaIvFeY4fReP7ewNSSCt34,9765
147
144
  machineconfig/scripts/python/cloud_sync.py,sha256=RWGpAfJ9fnN18yNBSgN44dzA38Hmd4879JL5r2pcyrM,3514
148
- machineconfig/scripts/python/count_lines.py,sha256=aVg91ArHg73swKNGMQzi_WlPnTLEbc8rkNZkCv_qpvI,15894
149
- machineconfig/scripts/python/count_lines_frontend.py,sha256=1DQn9YUbl5IYjjJ1fS5qEe60X-5ez6zZiXMQXVTA4-8,359
145
+ machineconfig/scripts/python/count_lines.py,sha256=ZexMRsV70pe9fhLbGuens9EP5gCf078EwTDRHRZo5A0,15960
146
+ machineconfig/scripts/python/count_lines_frontend.py,sha256=SCFCCYgGy96uVFAo8tcnAv-P_4ytqJOpWe0nzoPi8H8,572
150
147
  machineconfig/scripts/python/croshell.py,sha256=parFHSL859H00ExDpDBPHBFe_E_DrfVq6P8CpCGVK9A,8571
151
- machineconfig/scripts/python/devops.py,sha256=c5URta0jxlxi7fyNpUit5w7eZbQUaXpN59C6ZB_06Xk,3487
148
+ machineconfig/scripts/python/devops.py,sha256=JB4_M6S-nO3yqas8wtAlU2r6jsmHu_nlq7aoEOH-54Y,3486
152
149
  machineconfig/scripts/python/devops_add_identity.py,sha256=wvjNgqsLmqD2SxbNCW_usqfp0LI-TDvcJJKGOWt2oFw,3775
153
150
  machineconfig/scripts/python/devops_add_ssh_key.py,sha256=BXB-9RvuSZO0YTbnM2azeABW2ngLW4SKhhAGAieMzfw,6873
154
151
  machineconfig/scripts/python/devops_backup_retrieve.py,sha256=JLJHmi8JmZ_qVTeMW-qBEAYGt1fmfWXzZ7Gm-Q-GDcU,5585
@@ -165,14 +162,15 @@ machineconfig/scripts/python/fire_jobs_streamlit_helper.py,sha256=47DEQpj8HBSa-_
165
162
  machineconfig/scripts/python/ftpx.py,sha256=QfQTp-6jQP6yxfbLc5sKxiMtTgAgc8sjN7d17_uLiZc,9400
166
163
  machineconfig/scripts/python/get_zellij_cmd.py,sha256=e35-18hoXM9N3PFbvbizfkNY_-63iMicieWE3TbGcCQ,576
167
164
  machineconfig/scripts/python/gh_models.py,sha256=3BLfW25mBRiPO5VKtVm-nMlKLv-PaZDw7mObajq6F6M,5538
168
- machineconfig/scripts/python/interactive.py,sha256=Tmqes57K0Z1svEcxM6uOd6nSivwwQCthrupToeubDAo,11793
165
+ machineconfig/scripts/python/interactive.py,sha256=wjxwxU5KtCh8MgujCQjEQctZPpKfPc71lMVFLhODQFE,11769
169
166
  machineconfig/scripts/python/mount_nfs.py,sha256=aECrL64j9g-9rF49sVJAjGmzaoGgcMnl3g9v17kQF4c,3239
170
167
  machineconfig/scripts/python/mount_nw_drive.py,sha256=iru6AtnTyvyuk6WxlK5R4lDkuliVpPV5_uBTVVhXtjQ,1550
171
168
  machineconfig/scripts/python/mount_ssh.py,sha256=k2fKq3f5dKq_7anrFOlqvJoI_3U4EWNHLRZ1o3Lsy6M,2268
172
169
  machineconfig/scripts/python/onetimeshare.py,sha256=bmGsNnskym5OWfIhpOfZG5jq3m89FS0a6dF5Sb8LaZM,2539
173
170
  machineconfig/scripts/python/pomodoro.py,sha256=SPkfeoZGv8rylGiOyzQ7UK3aXZ3G2FIOuGkSuBUggOI,2019
174
- machineconfig/scripts/python/repos.py,sha256=QPmtDq1gkzWGMduHpDHPMUe-7qPO_GemjQZLNAU-SYo,7157
175
- machineconfig/scripts/python/repos_helper_action.py,sha256=6bQln9x2L_lOnvWwnTM_nJjkugl5LDDGHedVsz2zuI4,13320
171
+ machineconfig/scripts/python/repos.py,sha256=IidAfUx6jFs4dB8Wjq8ems8mS8X8jYFgvEhtCYdLs-A,4917
172
+ machineconfig/scripts/python/repos_helper.py,sha256=3jLdnNf1canpzi3JXiz5VA6UTUmLeNHuhjOWVl_thP0,3006
173
+ machineconfig/scripts/python/repos_helper_action.py,sha256=sXeOw5uHaK2GJixYW8qU_PD24mruGcQ59uf68ELC76A,14846
176
174
  machineconfig/scripts/python/repos_helper_clone.py,sha256=9vGb9NCXT0lkerPzOJjmFfhU8LSzE-_1LDvjkhgnal0,5461
177
175
  machineconfig/scripts/python/repos_helper_record.py,sha256=dtnnInQPn00u1cyr0oOgJ_jB12O3bSiNctwzC3W7_3w,10994
178
176
  machineconfig/scripts/python/repos_helper_update.py,sha256=AYyKIB7eQ48yoYmFjydIhRI1lV39TBv_S4_LCa-oKuQ,11042
@@ -381,7 +379,7 @@ machineconfig/utils/accessories.py,sha256=W_9dLzjwNTW5JQk_pe3B2ijQ1nA2-8Kdg2r7VB
381
379
  machineconfig/utils/code.py,sha256=S7uY5kLPxLcLlR7B2KHeYkenlysAYSPcxFiUYHXSxX8,5646
382
380
  machineconfig/utils/installer.py,sha256=xYM6tyctqLmr2lLXUKWgobTRufGIua31uspMXP4HGjY,9945
383
381
  machineconfig/utils/io.py,sha256=ZXB3aataS1IZ_0WMcCRSmoN1nbkvEO-bWYcs-TpngqU,2872
384
- machineconfig/utils/links.py,sha256=CndE3K0dRtEjp8qfjffs-DE3WzwNUJLRHikZE_dkWm0,10237
382
+ machineconfig/utils/links.py,sha256=S0XICdbcFESUqm5RINDrOf3O8G1b7QEADncXXcC8IQc,15520
385
383
  machineconfig/utils/notifications.py,sha256=vvdsY5IX6XEiILTnt5lNyHxhCi0ljdGX2T_67VRfrG4,9009
386
384
  machineconfig/utils/options.py,sha256=8pG-apcc28xxJ5BQiACsGNTKwWtkQyH3hCtzBEhokK8,8366
387
385
  machineconfig/utils/path_extended.py,sha256=Xjdn2AVnB8p1jfNMNe2kJutVa5zGnFFJVGZbw-Bp_hg,53200
@@ -407,8 +405,8 @@ machineconfig/utils/schemas/fire_agents/fire_agents_input.py,sha256=pTxvLzIpD5RF
407
405
  machineconfig/utils/schemas/installer/installer_types.py,sha256=QClRY61QaduBPJoSpdmTIdgS9LS-RvE-QZ-D260tD3o,1214
408
406
  machineconfig/utils/schemas/layouts/layout_types.py,sha256=TcqlZdGVoH8htG5fHn1KWXhRdPueAcoyApppZsPAPto,2020
409
407
  machineconfig/utils/schemas/repos/repos_types.py,sha256=ECVr-3IVIo8yjmYmVXX2mnDDN1SLSwvQIhx4KDDQHBQ,405
410
- machineconfig-5.11.dist-info/METADATA,sha256=HPKfljpArui1ViEtQWTnWtLDiyLzZTfrqnopro65cXY,8030
411
- machineconfig-5.11.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
412
- machineconfig-5.11.dist-info/entry_points.txt,sha256=2afE1mw-o4MUlfxyX73SV02XaQI4SV_LdL2r6_CzhPU,1074
413
- machineconfig-5.11.dist-info/top_level.txt,sha256=porRtB8qms8fOIUJgK-tO83_FeH6Bpe12oUVC670teA,14
414
- machineconfig-5.11.dist-info/RECORD,,
408
+ machineconfig-5.13.dist-info/METADATA,sha256=a6BFT3NbESfTyf5oY3DJv-eiv3Nn24gYHgrIRydpaN8,8030
409
+ machineconfig-5.13.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
410
+ machineconfig-5.13.dist-info/entry_points.txt,sha256=2afE1mw-o4MUlfxyX73SV02XaQI4SV_LdL2r6_CzhPU,1074
411
+ machineconfig-5.13.dist-info/top_level.txt,sha256=porRtB8qms8fOIUJgK-tO83_FeH6Bpe12oUVC670teA,14
412
+ machineconfig-5.13.dist-info/RECORD,,
@@ -1,16 +0,0 @@
1
- # try ventory or netboot.xyz
2
-
3
- # # one can either install rufus: https://rufus.ie/en/
4
- # # however, to create bootable media with multiple OSs to choose from:
5
-
6
- # PathExtended(r'https://github.com/ventoy/Ventoy/archive/refs/tags/v1.0.78.zip').download().unzip().search[0]()
7
- # download_folder = PathExtended.home().joinpath("Downloads/os")
8
- # download_folder.mkdir(parents=True, exist_ok=True)
9
- # PathExtended(r'https://mirrors.layeronline.com/linuxmint/stable/21/linuxmint-21-cinnamon-64bit.iso').download(folder=download_folder)
10
- # download_folder2 = PathExtended.home().joinpath("Downloads/os")
11
- # download_folder2.mkdir(parents=True, exist_ok=True)
12
- # PathExtended(r'https://download.manjaro.org/kde/21.3.7/manjaro-kde-21.3.7-minimal-220816-linux515.iso').download(folder=download_folder2)
13
-
14
-
15
- # if __name__ == '__main__':
16
- # pass
@@ -1,58 +0,0 @@
1
- """
2
- cargo install
3
- """
4
-
5
- # from machineconfig.utils.path_reduced import P as PathExtended
6
- # import platform
7
-
8
-
9
- # def build_rust_executable(url: str=r"https://github.com/atanunq/viu"):
10
- # tool_name = url.split('/')[-1]
11
-
12
- # # move command is not required since tool will go to .cargo/bin which is in PATH by default.
13
- # # move_command = f"mv {exe} {tb.get_env().WindowsApps.as_posix()}/" if platform.platform() == "Windows" else f"sudo mv {exe} /usr/local/bin/"
14
- # # {move_command}
15
-
16
- # script = f"""
17
- # cd ~
18
- # git clone --depth 1 {url}
19
- # cd {tool_name}
20
- # cargo install --path .
21
- # """
22
- # print(f"""
23
- # {'=' * 150}
24
- # 🦀 CARGO BUILD | Building Rust project: {tool_name}
25
- # 📦 Source: {url}
26
- # {'=' * 150}
27
- # """)
28
- # if platform.system() == "Windows":
29
- # Terminal(stdout=None).run(f". {PathExtended.tmpfile(suffix='.ps1').write_text(script, encoding="utf-8")}", shell="pwsh").print()
30
- # else:
31
- # Terminal(stdout=None).run(script, shell="pwsh")
32
-
33
- # exe = PathExtended.home().joinpath(f".cargo/bin/{tool_name}" + (".exe" if platform.system() == "Windows" else ""))
34
-
35
- # try:
36
- # PathExtended.home().joinpath(tool_name).delete(sure=True)
37
- # except PermissionError:
38
- # print(f"""
39
- # {'⚠️' * 20}
40
- # ⚠️ WARNING | Permission error when cleaning up
41
- # 📂 Path: {PathExtended.home().joinpath(tool_name)}
42
- # {'⚠️' * 20}
43
- # """)
44
-
45
- # if platform.system() == "Windows":
46
- # exe = exe.move(folder=PathExtended.get_env().WindowsPaths().WindowsApps)
47
- # elif platform.system() in ["Linux", "Darwin"]:
48
- # Terminal().run(f"sudo mv {exe} /usr/local/bin")
49
- # exe = PathExtended(r"/usr/local/bin").joinpath(exe.name)
50
- # else:
51
- # raise NotImplementedError(f"🚫 Platform {platform.system()} not supported.")
52
- # share_link = exe.to_cloud("gdpo", share=True)
53
- # return share_link
54
-
55
-
56
- # after cargo install diskonaut
57
- # then mv ~/.cargo/bin/diskonaut.exe ~/AppData/Local/Microsoft/WindowsApps/
58
- # then bu_gdrive_sx.ps1 .\diskonaut.exe -sRz # zipping is vital to avoid security layers and keep file metadata.
@@ -1,3 +0,0 @@
1
- from machineconfig.scripts.python.devops_backup_retrieve import main_backup_retrieve
2
-
3
- program = main_backup_retrieve(direction="BACKUP", which="all")