machineconfig 3.0__py3-none-any.whl → 3.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of machineconfig might be problematic. Click here for more details.
- machineconfig/__init__.py +16 -17
- machineconfig/cluster/sessions_managers/enhanced_command_runner.py +6 -6
- machineconfig/cluster/sessions_managers/wt_local.py +7 -7
- machineconfig/cluster/sessions_managers/wt_local_manager.py +8 -8
- machineconfig/cluster/sessions_managers/wt_remote.py +8 -8
- machineconfig/cluster/sessions_managers/wt_remote_manager.py +6 -6
- machineconfig/cluster/sessions_managers/zellij_local.py +156 -77
- machineconfig/cluster/sessions_managers/zellij_local_manager.py +18 -15
- machineconfig/cluster/sessions_managers/zellij_remote.py +14 -61
- machineconfig/cluster/sessions_managers/zellij_remote_manager.py +15 -12
- machineconfig/cluster/sessions_managers/zellij_utils/example_usage.py +9 -3
- machineconfig/cluster/sessions_managers/zellij_utils/layout_generator.py +2 -2
- machineconfig/cluster/sessions_managers/zellij_utils/monitoring_types.py +40 -67
- machineconfig/cluster/sessions_managers/zellij_utils/process_monitor.py +19 -17
- machineconfig/cluster/sessions_managers/zellij_utils/remote_executor.py +2 -2
- machineconfig/cluster/sessions_managers/zellij_utils/session_manager.py +1 -1
- machineconfig/jobs/python/python_ve_symlink.py +1 -1
- machineconfig/profile/create.py +8 -0
- machineconfig/scripts/linux/kill_process +1 -1
- machineconfig/scripts/python/ai/mcinit.py +1 -2
- machineconfig/scripts/python/cloud_mount.py +1 -1
- machineconfig/scripts/python/cloud_repo_sync.py +1 -0
- machineconfig/scripts/python/cloud_sync.py +1 -0
- machineconfig/scripts/python/croshell.py +1 -0
- machineconfig/scripts/python/devops.py +11 -1
- machineconfig/scripts/python/devops_add_identity.py +1 -0
- machineconfig/scripts/python/devops_add_ssh_key.py +1 -0
- machineconfig/scripts/python/devops_backup_retrieve.py +1 -0
- machineconfig/scripts/python/devops_devapps_install.py +5 -7
- machineconfig/scripts/python/devops_update_repos.py +21 -20
- machineconfig/scripts/python/fire_agents.py +21 -11
- machineconfig/scripts/python/fire_agents_help_launch.py +4 -3
- machineconfig/scripts/python/fire_agents_help_search.py +1 -2
- machineconfig/scripts/python/fire_agents_load_balancer.py +8 -10
- machineconfig/scripts/python/fire_jobs.py +1 -0
- machineconfig/scripts/python/helpers/cloud_helpers.py +1 -0
- machineconfig/scripts/python/mount_nfs.py +2 -0
- machineconfig/scripts/python/mount_nw_drive.py +1 -1
- machineconfig/scripts/python/mount_ssh.py +1 -0
- machineconfig/scripts/python/repos.py +0 -2
- machineconfig/scripts/python/repos_helper_record.py +43 -66
- machineconfig/scripts/python/repos_helper_update.py +3 -2
- machineconfig/scripts/python/start_slidev.py +1 -0
- machineconfig/scripts/python/start_terminals.py +1 -1
- machineconfig/setup_windows/wt_and_pwsh/set_wt_settings.py +1 -0
- machineconfig/utils/code.py +0 -1
- machineconfig/utils/notifications.py +2 -2
- machineconfig/utils/procs.py +10 -15
- machineconfig/utils/schemas/fire_agents/fire_agents_input.py +9 -4
- machineconfig/utils/schemas/layouts/layout_types.py +2 -0
- machineconfig/utils/schemas/repos/repos_types.py +0 -3
- machineconfig/utils/ssh.py +9 -11
- machineconfig/utils/utils2.py +1 -2
- machineconfig/utils/ve.py +0 -1
- {machineconfig-3.0.dist-info → machineconfig-3.2.dist-info}/METADATA +1 -1
- {machineconfig-3.0.dist-info → machineconfig-3.2.dist-info}/RECORD +59 -59
- {machineconfig-3.0.dist-info → machineconfig-3.2.dist-info}/entry_points.txt +1 -0
- {machineconfig-3.0.dist-info → machineconfig-3.2.dist-info}/WHEEL +0 -0
- {machineconfig-3.0.dist-info → machineconfig-3.2.dist-info}/top_level.txt +0 -0
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
|
|
2
1
|
from machineconfig.utils.source_of_truth import EXCLUDE_DIRS
|
|
3
2
|
import fnmatch
|
|
4
3
|
from pathlib import Path
|
|
@@ -79,4 +78,4 @@ def search_python_files(repo_root: Path, keyword: str) -> list[Path]:
|
|
|
79
78
|
return
|
|
80
79
|
|
|
81
80
|
_walk_and_filter(repo_root)
|
|
82
|
-
return matches
|
|
81
|
+
return matches
|
|
@@ -1,5 +1,3 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
1
|
from typing import Literal, TypeAlias
|
|
4
2
|
from math import ceil
|
|
5
3
|
from pathlib import Path
|
|
@@ -7,36 +5,36 @@ from pathlib import Path
|
|
|
7
5
|
|
|
8
6
|
SPLITTING_STRATEGY: TypeAlias = Literal[
|
|
9
7
|
"agent_cap", # User decides number of agents, rows/tasks determined automatically
|
|
10
|
-
"task_rows"
|
|
8
|
+
"task_rows", # User decides number of rows/tasks, number of agents determined automatically
|
|
11
9
|
]
|
|
12
10
|
DEFAULT_AGENT_CAP = 6
|
|
13
11
|
|
|
14
12
|
|
|
15
13
|
def chunk_prompts(prompt_material_path: Path, strategy: SPLITTING_STRATEGY, joiner: str, *, agent_cap: int | None, task_rows: int | None) -> list[str]:
|
|
16
14
|
"""Chunk prompts based on splitting strategy.
|
|
17
|
-
|
|
15
|
+
|
|
18
16
|
Args:
|
|
19
17
|
prompts: List of prompts to chunk
|
|
20
|
-
strategy: Either 'agent_cap' or 'task_rows'
|
|
18
|
+
strategy: Either 'agent_cap' or 'task_rows'
|
|
21
19
|
agent_cap: Maximum number of agents (used with 'agent_cap' strategy)
|
|
22
20
|
task_rows: Number of rows/tasks per agent (used with 'task_rows' strategy)
|
|
23
21
|
"""
|
|
24
22
|
prompts = [p for p in prompt_material_path.read_text(encoding="utf-8", errors="ignore").split(joiner) if p.strip() != ""] # drop blank entries
|
|
25
|
-
|
|
23
|
+
|
|
26
24
|
if strategy == "agent_cap":
|
|
27
25
|
if agent_cap is None:
|
|
28
26
|
raise ValueError("agent_cap must be provided when using 'agent_cap' strategy")
|
|
29
|
-
|
|
27
|
+
|
|
30
28
|
if len(prompts) <= agent_cap:
|
|
31
29
|
return prompts
|
|
32
|
-
|
|
30
|
+
|
|
33
31
|
print(f"Chunking {len(prompts)} prompts into groups for up to {agent_cap} agents because it exceeds the cap.")
|
|
34
32
|
chunk_size = ceil(len(prompts) / agent_cap)
|
|
35
33
|
grouped: list[str] = []
|
|
36
34
|
for i in range(0, len(prompts), chunk_size):
|
|
37
35
|
grouped.append(joiner.join(prompts[i : i + chunk_size]))
|
|
38
36
|
return grouped
|
|
39
|
-
|
|
37
|
+
|
|
40
38
|
elif strategy == "task_rows":
|
|
41
39
|
if task_rows is None:
|
|
42
40
|
raise ValueError("task_rows must be provided when using 'task_rows' strategy")
|
|
@@ -47,6 +45,6 @@ def chunk_prompts(prompt_material_path: Path, strategy: SPLITTING_STRATEGY, join
|
|
|
47
45
|
for i in range(0, len(prompts), task_rows):
|
|
48
46
|
grouped.append(joiner.join(prompts[i : i + task_rows]))
|
|
49
47
|
return grouped
|
|
50
|
-
|
|
48
|
+
|
|
51
49
|
else:
|
|
52
50
|
raise ValueError(f"Unknown splitting strategy: {strategy}")
|
|
@@ -324,6 +324,7 @@ python -m machineconfig.cluster.templates.cli_click --file {choice_file} """
|
|
|
324
324
|
# program_path.parent.mkdir(parents=True, exist_ok=True)
|
|
325
325
|
# program_path.write_text(command, encoding="utf-8")
|
|
326
326
|
import subprocess
|
|
327
|
+
|
|
327
328
|
subprocess.run(command, shell=True, check=True)
|
|
328
329
|
|
|
329
330
|
|
|
@@ -50,6 +50,7 @@ local_mount_point={local_mount_point}
|
|
|
50
50
|
"""
|
|
51
51
|
# PROGRAM_PATH.write_text(txt)
|
|
52
52
|
import subprocess
|
|
53
|
+
|
|
53
54
|
subprocess.run(txt, shell=True, check=True)
|
|
54
55
|
|
|
55
56
|
print("✅ Mount paths prepared successfully!\n")
|
|
@@ -65,6 +66,7 @@ $driveLetter = "{driver_letter}"
|
|
|
65
66
|
"""
|
|
66
67
|
# PROGRAM_PATH.write_text(txt)
|
|
67
68
|
import subprocess
|
|
69
|
+
|
|
68
70
|
subprocess.run(txt, shell=True, check=True)
|
|
69
71
|
print("✅ Drive letter selected and configuration saved!\n")
|
|
70
72
|
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
|
|
2
1
|
from pathlib import Path
|
|
3
2
|
import platform
|
|
4
3
|
|
|
@@ -34,6 +33,7 @@ password='{password}'
|
|
|
34
33
|
"""
|
|
35
34
|
# PROGRAM_PATH.write_text(txt, encoding="utf-8",)
|
|
36
35
|
import subprocess
|
|
36
|
+
|
|
37
37
|
subprocess.run(txt, shell=True, check=True)
|
|
38
38
|
print("✅ Configuration saved successfully!\n")
|
|
39
39
|
|
|
@@ -116,7 +116,6 @@ def main():
|
|
|
116
116
|
if args.cloud is not None:
|
|
117
117
|
PathExtended(save_path).to_cloud(rel2home=True, cloud=args.cloud)
|
|
118
118
|
|
|
119
|
-
|
|
120
119
|
elif args.clone or args.checkout or args.checkout_to_branch:
|
|
121
120
|
print("\n📥 Cloning or checking out repositories...")
|
|
122
121
|
print(">>>>>>>>> Cloning Repos")
|
|
@@ -154,6 +153,5 @@ def main():
|
|
|
154
153
|
print("❌ No action specified. Try passing --push, --pull, --commit, or --all.")
|
|
155
154
|
|
|
156
155
|
|
|
157
|
-
|
|
158
156
|
if __name__ == "__main__":
|
|
159
157
|
main()
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
|
|
2
1
|
from machineconfig.utils.path_reduced import PathExtended as PathExtended
|
|
3
2
|
from machineconfig.utils.schemas.repos.repos_types import GitVersionInfo, RepoRecordDict, RepoRemote
|
|
4
3
|
|
|
@@ -16,11 +15,11 @@ def build_tree_structure(repos: list[RepoRecordDict], repos_root: PathExtended)
|
|
|
16
15
|
"""Build a tree structure representation of all repositories."""
|
|
17
16
|
if not repos:
|
|
18
17
|
return "No repositories found."
|
|
19
|
-
|
|
18
|
+
|
|
20
19
|
# Group repos by their parent directories relative to repos_root
|
|
21
20
|
tree_dict: dict[str, list[RepoRecordDict]] = {}
|
|
22
21
|
repos_root_abs = repos_root.expanduser().absolute()
|
|
23
|
-
|
|
22
|
+
|
|
24
23
|
for repo in repos:
|
|
25
24
|
parent_path = PathExtended(repo["parentDir"]).expanduser().absolute()
|
|
26
25
|
try:
|
|
@@ -29,35 +28,35 @@ def build_tree_structure(repos: list[RepoRecordDict], repos_root: PathExtended)
|
|
|
29
28
|
except ValueError:
|
|
30
29
|
# If the path is not relative to repos_root, use the full path
|
|
31
30
|
relative_str = str(parent_path)
|
|
32
|
-
|
|
31
|
+
|
|
33
32
|
if relative_str not in tree_dict:
|
|
34
33
|
tree_dict[relative_str] = []
|
|
35
34
|
tree_dict[relative_str].append(repo)
|
|
36
|
-
|
|
35
|
+
|
|
37
36
|
# Sort directories for consistent output
|
|
38
37
|
sorted_dirs = sorted(tree_dict.keys())
|
|
39
|
-
|
|
38
|
+
|
|
40
39
|
tree_lines: list[str] = []
|
|
41
40
|
tree_lines.append(f"📂 {repos_root.name}/ ({repos_root_abs})")
|
|
42
|
-
|
|
41
|
+
|
|
43
42
|
for i, dir_path in enumerate(sorted_dirs):
|
|
44
43
|
is_last_dir = i == len(sorted_dirs) - 1
|
|
45
44
|
dir_prefix = "└── " if is_last_dir else "├── "
|
|
46
|
-
|
|
45
|
+
|
|
47
46
|
if dir_path:
|
|
48
47
|
tree_lines.append(f"│ {dir_prefix}📁 {dir_path}/")
|
|
49
48
|
repo_prefix_base = "│ │ " if not is_last_dir else " "
|
|
50
49
|
else:
|
|
51
50
|
repo_prefix_base = "│ "
|
|
52
|
-
|
|
51
|
+
|
|
53
52
|
repos_in_dir = tree_dict[dir_path]
|
|
54
53
|
# Sort repos by name
|
|
55
54
|
repos_in_dir.sort(key=lambda x: x["name"])
|
|
56
|
-
|
|
55
|
+
|
|
57
56
|
for j, repo in enumerate(repos_in_dir):
|
|
58
57
|
is_last_repo = j == len(repos_in_dir) - 1
|
|
59
58
|
repo_prefix = f"{repo_prefix_base}└── " if is_last_repo else f"{repo_prefix_base}├── "
|
|
60
|
-
|
|
59
|
+
|
|
61
60
|
# Create status indicators
|
|
62
61
|
status_indicators = []
|
|
63
62
|
if repo["isDirty"]:
|
|
@@ -66,20 +65,20 @@ def build_tree_structure(repos: list[RepoRecordDict], repos_root: PathExtended)
|
|
|
66
65
|
status_indicators.append("⚠️ NO_REMOTE")
|
|
67
66
|
if repo["currentBranch"] == "DETACHED":
|
|
68
67
|
status_indicators.append("🔀 DETACHED")
|
|
69
|
-
|
|
68
|
+
|
|
70
69
|
status_str = f"[{' | '.join(status_indicators)}]" if status_indicators else "[✅ CLEAN]"
|
|
71
|
-
branch_info = f" ({repo['currentBranch']})" if repo[
|
|
72
|
-
|
|
70
|
+
branch_info = f" ({repo['currentBranch']})" if repo["currentBranch"] != "DETACHED" else ""
|
|
71
|
+
|
|
73
72
|
# Build the base string without status
|
|
74
73
|
base_str = f"{repo_prefix}📦 {repo['name']}{branch_info}"
|
|
75
|
-
|
|
74
|
+
|
|
76
75
|
# Calculate padding to align status at 75 characters
|
|
77
76
|
target_width = 45
|
|
78
77
|
current_length = len(base_str)
|
|
79
78
|
padding = max(1, target_width - current_length) # At least 1 space
|
|
80
|
-
|
|
79
|
+
|
|
81
80
|
tree_lines.append(f"{base_str}{' ' * padding}{status_str}")
|
|
82
|
-
|
|
81
|
+
|
|
83
82
|
return "\n".join(tree_lines)
|
|
84
83
|
|
|
85
84
|
|
|
@@ -111,19 +110,9 @@ def record_a_repo(path: PathExtended, search_parent_directories: bool, preferred
|
|
|
111
110
|
# Check if repo is dirty (has uncommitted changes)
|
|
112
111
|
is_dirty = repo.is_dirty(untracked_files=True)
|
|
113
112
|
|
|
114
|
-
version_info: GitVersionInfo = {
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
}
|
|
118
|
-
|
|
119
|
-
res: RepoRecordDict = {
|
|
120
|
-
"name": repo_root.name,
|
|
121
|
-
"parentDir": repo_root.parent.collapseuser().as_posix(),
|
|
122
|
-
"currentBranch": current_branch,
|
|
123
|
-
"remotes": remotes,
|
|
124
|
-
"version": version_info,
|
|
125
|
-
"isDirty": is_dirty
|
|
126
|
-
}
|
|
113
|
+
version_info: GitVersionInfo = {"branch": current_branch, "commit": commit}
|
|
114
|
+
|
|
115
|
+
res: RepoRecordDict = {"name": repo_root.name, "parentDir": repo_root.parent.collapseuser().as_posix(), "currentBranch": current_branch, "remotes": remotes, "version": version_info, "isDirty": is_dirty}
|
|
127
116
|
return res
|
|
128
117
|
|
|
129
118
|
|
|
@@ -132,16 +121,16 @@ def count_git_repositories(repos_root: str, r: bool) -> int:
|
|
|
132
121
|
path_obj = PathExtended(repos_root).expanduser().absolute()
|
|
133
122
|
if path_obj.is_file():
|
|
134
123
|
return 0
|
|
135
|
-
|
|
124
|
+
|
|
136
125
|
search_res = path_obj.search("*", files=False, folders=True)
|
|
137
126
|
count = 0
|
|
138
|
-
|
|
127
|
+
|
|
139
128
|
for a_search_res in search_res:
|
|
140
129
|
if a_search_res.joinpath(".git").exists():
|
|
141
130
|
count += 1
|
|
142
131
|
elif r:
|
|
143
132
|
count += count_git_repositories(str(a_search_res), r=r)
|
|
144
|
-
|
|
133
|
+
|
|
145
134
|
return count
|
|
146
135
|
|
|
147
136
|
|
|
@@ -150,15 +139,15 @@ def count_total_directories(repos_root: str, r: bool) -> int:
|
|
|
150
139
|
path_obj = PathExtended(repos_root).expanduser().absolute()
|
|
151
140
|
if path_obj.is_file():
|
|
152
141
|
return 0
|
|
153
|
-
|
|
142
|
+
|
|
154
143
|
search_res = path_obj.search("*", files=False, folders=True)
|
|
155
144
|
count = len(search_res)
|
|
156
|
-
|
|
145
|
+
|
|
157
146
|
if r:
|
|
158
147
|
for a_search_res in search_res:
|
|
159
148
|
if not a_search_res.joinpath(".git").exists():
|
|
160
149
|
count += count_total_directories(str(a_search_res), r=r)
|
|
161
|
-
|
|
150
|
+
|
|
162
151
|
return count
|
|
163
152
|
|
|
164
153
|
|
|
@@ -166,22 +155,22 @@ def record_repos_recursively(repos_root: str, r: bool, progress: Progress | None
|
|
|
166
155
|
path_obj = PathExtended(repos_root).expanduser().absolute()
|
|
167
156
|
if path_obj.is_file():
|
|
168
157
|
return []
|
|
169
|
-
|
|
158
|
+
|
|
170
159
|
search_res = path_obj.search("*", files=False, folders=True)
|
|
171
160
|
res: list[RepoRecordDict] = []
|
|
172
|
-
|
|
161
|
+
|
|
173
162
|
for a_search_res in search_res:
|
|
174
163
|
if progress and scan_task_id:
|
|
175
164
|
progress.update(scan_task_id, description=f"Scanning: {a_search_res.name}")
|
|
176
|
-
|
|
165
|
+
|
|
177
166
|
if a_search_res.joinpath(".git").exists():
|
|
178
167
|
try:
|
|
179
168
|
if progress and process_task_id:
|
|
180
169
|
progress.update(process_task_id, description=f"Recording: {a_search_res.name}")
|
|
181
|
-
|
|
170
|
+
|
|
182
171
|
repo_record = record_a_repo(a_search_res, search_parent_directories=False, preferred_remote=None)
|
|
183
172
|
res.append(repo_record)
|
|
184
|
-
|
|
173
|
+
|
|
185
174
|
if progress and process_task_id:
|
|
186
175
|
progress.update(process_task_id, advance=1, description=f"Recorded: {repo_record['name']}")
|
|
187
176
|
except Exception as e:
|
|
@@ -189,57 +178,45 @@ def record_repos_recursively(repos_root: str, r: bool, progress: Progress | None
|
|
|
189
178
|
else:
|
|
190
179
|
if r:
|
|
191
180
|
res += record_repos_recursively(str(a_search_res), r=r, progress=progress, scan_task_id=scan_task_id, process_task_id=process_task_id)
|
|
192
|
-
|
|
181
|
+
|
|
193
182
|
if progress and scan_task_id:
|
|
194
183
|
progress.update(scan_task_id, advance=1)
|
|
195
|
-
|
|
184
|
+
|
|
196
185
|
return res
|
|
197
186
|
|
|
198
187
|
|
|
199
188
|
def main(repos_root: PathExtended):
|
|
200
189
|
print("\n📝 Recording repositories...")
|
|
201
|
-
|
|
190
|
+
|
|
202
191
|
# Count total directories and repositories for accurate progress tracking
|
|
203
192
|
print("🔍 Analyzing directory structure...")
|
|
204
193
|
total_dirs = count_total_directories(str(repos_root), r=True)
|
|
205
194
|
total_repos = count_git_repositories(str(repos_root), r=True)
|
|
206
195
|
print(f"📊 Found {total_dirs} directories to scan and {total_repos} git repositories to record")
|
|
207
|
-
|
|
196
|
+
|
|
208
197
|
# Setup progress bars
|
|
209
|
-
with Progress(
|
|
210
|
-
SpinnerColumn(),
|
|
211
|
-
TextColumn("[progress.description]{task.description}"),
|
|
212
|
-
BarColumn(),
|
|
213
|
-
MofNCompleteColumn(),
|
|
214
|
-
TimeElapsedColumn(),
|
|
215
|
-
) as progress:
|
|
198
|
+
with Progress(SpinnerColumn(), TextColumn("[progress.description]{task.description}"), BarColumn(), MofNCompleteColumn(), TimeElapsedColumn()) as progress:
|
|
216
199
|
scan_task = progress.add_task("Scanning directories...", total=total_dirs)
|
|
217
200
|
process_task = progress.add_task("Recording repositories...", total=total_repos)
|
|
218
|
-
|
|
219
|
-
repo_records = record_repos_recursively(
|
|
220
|
-
|
|
221
|
-
r=True,
|
|
222
|
-
progress=progress,
|
|
223
|
-
scan_task_id=scan_task,
|
|
224
|
-
process_task_id=process_task
|
|
225
|
-
)
|
|
226
|
-
|
|
201
|
+
|
|
202
|
+
repo_records = record_repos_recursively(repos_root=str(repos_root), r=True, progress=progress, scan_task_id=scan_task, process_task_id=process_task)
|
|
203
|
+
|
|
227
204
|
res: RepoRecordFile = {"version": "0.1", "repos": repo_records}
|
|
228
|
-
|
|
205
|
+
|
|
229
206
|
# Summary with warnings
|
|
230
207
|
total_repos = len(repo_records)
|
|
231
208
|
repos_with_no_remotes = [repo for repo in repo_records if len(repo["remotes"]) == 0]
|
|
232
209
|
repos_with_remotes = [repo for repo in repo_records if len(repo["remotes"]) > 0]
|
|
233
210
|
dirty_repos = [repo for repo in repo_records if repo["isDirty"]]
|
|
234
211
|
clean_repos = [repo for repo in repo_records if not repo["isDirty"]]
|
|
235
|
-
|
|
212
|
+
|
|
236
213
|
print("\n📊 Repository Summary:")
|
|
237
214
|
print(f" Total repositories found: {total_repos}")
|
|
238
215
|
print(f" Repositories with remotes: {len(repos_with_remotes)}")
|
|
239
216
|
print(f" Repositories without remotes: {len(repos_with_no_remotes)}")
|
|
240
217
|
print(f" Clean repositories: {len(clean_repos)}")
|
|
241
218
|
print(f" Dirty repositories: {len(dirty_repos)}")
|
|
242
|
-
|
|
219
|
+
|
|
243
220
|
if repos_with_no_remotes:
|
|
244
221
|
print(f"\n⚠️ WARNING: {len(repos_with_no_remotes)} repositories have no remotes configured:")
|
|
245
222
|
for repo in repos_with_no_remotes:
|
|
@@ -257,12 +234,12 @@ def main(repos_root: PathExtended):
|
|
|
257
234
|
print(" These repositories have uncommitted changes that may need attention.")
|
|
258
235
|
else:
|
|
259
236
|
print("\n✅ All repositories are clean (no uncommitted changes).")
|
|
260
|
-
|
|
237
|
+
|
|
261
238
|
# Display repository tree structure
|
|
262
239
|
print("\n🌳 Repository Tree Structure:")
|
|
263
240
|
tree_structure = build_tree_structure(repo_records, repos_root)
|
|
264
241
|
print(tree_structure)
|
|
265
|
-
|
|
242
|
+
|
|
266
243
|
save_path = CONFIG_PATH.joinpath("repos").joinpath(repos_root.rel2home()).joinpath("repos.json")
|
|
267
244
|
save_json(obj=res, path=save_path, indent=4)
|
|
268
245
|
pprint(f"📁 Result saved at {PathExtended(save_path)}")
|
|
@@ -4,9 +4,9 @@ import subprocess
|
|
|
4
4
|
import git
|
|
5
5
|
|
|
6
6
|
|
|
7
|
-
|
|
8
7
|
class RepositoryUpdateResult(TypedDict):
|
|
9
8
|
"""Result of updating a single repository."""
|
|
9
|
+
|
|
10
10
|
repo_path: str
|
|
11
11
|
status: str # "success", "error", "skipped", "auth_failed"
|
|
12
12
|
had_uncommitted_changes: bool
|
|
@@ -61,6 +61,7 @@ def get_file_hash(file_path: Path) -> str | None:
|
|
|
61
61
|
if not file_path.exists():
|
|
62
62
|
return None
|
|
63
63
|
import hashlib
|
|
64
|
+
|
|
64
65
|
return hashlib.sha256(file_path.read_bytes()).hexdigest()
|
|
65
66
|
|
|
66
67
|
|
|
@@ -262,4 +263,4 @@ def update_repository(repo: git.Repo, auto_sync: bool, allow_password_prompt: bo
|
|
|
262
263
|
result["status"] = "error"
|
|
263
264
|
result["error_message"] = str(e)
|
|
264
265
|
print(f"❌ Error updating repository {repo_path}: {e}")
|
|
265
|
-
return result
|
|
266
|
+
return result
|
|
@@ -103,6 +103,7 @@ def main() -> None:
|
|
|
103
103
|
program = "npm run dev slides.md -- --remote"
|
|
104
104
|
# PROGRAM_PATH.write_text(program, encoding="utf-8")
|
|
105
105
|
import subprocess
|
|
106
|
+
|
|
106
107
|
subprocess.run(program, shell=True, cwd=SLIDEV_REPO)
|
|
107
108
|
print_code(code=program, lexer="bash", desc="Run the following command to start the presentation")
|
|
108
109
|
|
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
"""Script to start terminals on windows and wsl"""
|
|
2
2
|
|
|
3
|
-
|
|
4
3
|
from machineconfig.utils.options import display_options, get_ssh_hosts
|
|
5
4
|
import platform
|
|
6
5
|
from itertools import cycle
|
|
@@ -113,6 +112,7 @@ def main():
|
|
|
113
112
|
|
|
114
113
|
# PROGRAM_PATH.write_text(cmd, encoding="utf-8")
|
|
115
114
|
import subprocess
|
|
115
|
+
|
|
116
116
|
subprocess.run(cmd, shell=True)
|
|
117
117
|
print("✅ Command saved successfully!\n")
|
|
118
118
|
|
|
@@ -4,6 +4,7 @@ from machineconfig.utils.utils2 import randstr, read_json
|
|
|
4
4
|
from machineconfig.utils.path_reduced import PathExtended as PathExtended
|
|
5
5
|
from machineconfig.utils.io_save import save_json
|
|
6
6
|
import platform
|
|
7
|
+
|
|
7
8
|
# from uuid import uuid4
|
|
8
9
|
import os
|
|
9
10
|
from typing import Any
|
machineconfig/utils/code.py
CHANGED
|
@@ -10,7 +10,6 @@ from machineconfig.utils.ve import get_ve_activate_line
|
|
|
10
10
|
from machineconfig.utils.path_reduced import PathExtended as PathExtended
|
|
11
11
|
|
|
12
12
|
|
|
13
|
-
|
|
14
13
|
def get_shell_script_executing_python_file(python_file: str, func: Optional[str], ve_path: str, strict_execution: bool = True):
|
|
15
14
|
if func is None:
|
|
16
15
|
exec_line = f"""python {python_file}"""
|
|
@@ -38,7 +38,7 @@ def md2html(body: str) -> str:
|
|
|
38
38
|
markdown_obj = Markdown(body)
|
|
39
39
|
console.print(markdown_obj)
|
|
40
40
|
html_output = console.export_html(inline_styles=True)
|
|
41
|
-
|
|
41
|
+
|
|
42
42
|
# Try to load GitHub CSS style, fallback to basic style if not found
|
|
43
43
|
gh_style_path = Path(__file__).parent.joinpath("gh_style.css")
|
|
44
44
|
if gh_style_path.exists():
|
|
@@ -51,7 +51,7 @@ def md2html(body: str) -> str:
|
|
|
51
51
|
code { background-color: #f6f8fa; padding: 2px 4px; border-radius: 3px; }
|
|
52
52
|
pre { background-color: #f6f8fa; padding: 16px; border-radius: 6px; overflow: auto; }
|
|
53
53
|
"""
|
|
54
|
-
|
|
54
|
+
|
|
55
55
|
return f"""
|
|
56
56
|
<!DOCTYPE html>
|
|
57
57
|
<html>
|
machineconfig/utils/procs.py
CHANGED
|
@@ -20,13 +20,10 @@ def get_processes_accessing_file(path: str):
|
|
|
20
20
|
title = "🔍 SEARCHING FOR PROCESSES ACCESSING FILE"
|
|
21
21
|
console.print(Panel(title, title="[bold blue]Process Info[/bold blue]", border_style="blue"))
|
|
22
22
|
res: dict[int, list[str]] = {}
|
|
23
|
-
|
|
24
|
-
with Progress(
|
|
25
|
-
SpinnerColumn(),
|
|
26
|
-
TextColumn("[progress.description]{task.description}"),
|
|
27
|
-
) as progress:
|
|
23
|
+
|
|
24
|
+
with Progress(SpinnerColumn(), TextColumn("[progress.description]{task.description}")) as progress:
|
|
28
25
|
progress.add_task("🔎 Scanning processes...", total=None)
|
|
29
|
-
|
|
26
|
+
|
|
30
27
|
for proc in psutil.process_iter():
|
|
31
28
|
try:
|
|
32
29
|
files = proc.open_files()
|
|
@@ -35,7 +32,7 @@ def get_processes_accessing_file(path: str):
|
|
|
35
32
|
tmp = [file.path for file in files if path in file.path]
|
|
36
33
|
if len(tmp) > 0:
|
|
37
34
|
res[proc.pid] = tmp
|
|
38
|
-
|
|
35
|
+
|
|
39
36
|
# Convert to list of dictionaries for consistent data structure
|
|
40
37
|
result_data = [{"pid": pid, "files": files} for pid, files in res.items()]
|
|
41
38
|
console.print(Panel(f"✅ Found {len(res)} processes accessing the specified file", title="[bold blue]Process Info[/bold blue]", border_style="blue"))
|
|
@@ -61,13 +58,10 @@ class ProcessManager:
|
|
|
61
58
|
title = "📊 INITIALIZING PROCESS MANAGER"
|
|
62
59
|
console.print(Panel(title, title="[bold blue]Process Info[/bold blue]", border_style="blue"))
|
|
63
60
|
process_info = []
|
|
64
|
-
|
|
65
|
-
with Progress(
|
|
66
|
-
SpinnerColumn(),
|
|
67
|
-
TextColumn("[progress.description]{task.description}"),
|
|
68
|
-
) as progress:
|
|
61
|
+
|
|
62
|
+
with Progress(SpinnerColumn(), TextColumn("[progress.description]{task.description}")) as progress:
|
|
69
63
|
progress.add_task("🔍 Reading system processes...", total=None)
|
|
70
|
-
|
|
64
|
+
|
|
71
65
|
for proc in psutil.process_iter():
|
|
72
66
|
try:
|
|
73
67
|
mem_usage_mb = proc.memory_info().rss / (1024 * 1024)
|
|
@@ -246,8 +240,9 @@ def get_age(create_time: Any) -> str:
|
|
|
246
240
|
|
|
247
241
|
|
|
248
242
|
def main():
|
|
249
|
-
from machineconfig.utils.procs import ProcessManager
|
|
243
|
+
from machineconfig.utils.procs import ProcessManager
|
|
244
|
+
ProcessManager().choose_and_kill()
|
|
250
245
|
|
|
251
246
|
|
|
252
247
|
if __name__ == "__main__":
|
|
253
|
-
|
|
248
|
+
pass
|
|
@@ -14,33 +14,38 @@ SEARCH_STRATEGIES = Literal["file_path", "keyword_search", "filename_pattern"]
|
|
|
14
14
|
|
|
15
15
|
class FilePathSearchInput(TypedDict):
|
|
16
16
|
"""Input for file_path search strategy."""
|
|
17
|
+
|
|
17
18
|
file_path: str
|
|
18
19
|
separator: str # Default: "\n"
|
|
19
20
|
|
|
20
21
|
|
|
21
22
|
class KeywordSearchInput(TypedDict):
|
|
22
23
|
"""Input for keyword_search strategy."""
|
|
24
|
+
|
|
23
25
|
keyword: str
|
|
24
26
|
|
|
25
27
|
|
|
26
28
|
class FilenamePatternSearchInput(TypedDict):
|
|
27
29
|
"""Input for filename_pattern search strategy."""
|
|
30
|
+
|
|
28
31
|
pattern: str # e.g., '*.py', '*test*', 'config.*'
|
|
29
32
|
|
|
30
33
|
|
|
31
34
|
class AgentCapSplittingInput(TypedDict):
|
|
32
35
|
"""Input for agent_cap splitting strategy."""
|
|
36
|
+
|
|
33
37
|
agent_cap: int # Default: 6
|
|
34
38
|
|
|
35
39
|
|
|
36
40
|
class TaskRowsSplittingInput(TypedDict):
|
|
37
41
|
"""Input for task_rows splitting strategy."""
|
|
42
|
+
|
|
38
43
|
task_rows: int # Default: 13
|
|
39
44
|
|
|
40
45
|
|
|
41
46
|
class FireAgentsMainInput(TypedDict):
|
|
42
47
|
"""Complete input structure for fire_agents main function."""
|
|
43
|
-
|
|
48
|
+
|
|
44
49
|
# Core configuration
|
|
45
50
|
repo_root: Path
|
|
46
51
|
search_strategy: SEARCH_STRATEGIES
|
|
@@ -50,12 +55,12 @@ class FireAgentsMainInput(TypedDict):
|
|
|
50
55
|
job_name: str # Default: "AI_Agents"
|
|
51
56
|
keep_material_in_separate_file: bool # Default: False
|
|
52
57
|
max_agents: int # Default: 25
|
|
53
|
-
|
|
58
|
+
|
|
54
59
|
# Search strategy specific inputs (only one will be present based on search_strategy)
|
|
55
60
|
file_path_input: NotRequired[FilePathSearchInput]
|
|
56
61
|
keyword_search_input: NotRequired[KeywordSearchInput]
|
|
57
62
|
filename_pattern_input: NotRequired[FilenamePatternSearchInput]
|
|
58
|
-
|
|
63
|
+
|
|
59
64
|
# Splitting strategy specific inputs (only one will be present based on splitting_strategy)
|
|
60
65
|
agent_cap_input: NotRequired[AgentCapSplittingInput]
|
|
61
66
|
task_rows_input: NotRequired[TaskRowsSplittingInput]
|
|
@@ -63,7 +68,7 @@ class FireAgentsMainInput(TypedDict):
|
|
|
63
68
|
|
|
64
69
|
class FireAgentsRuntimeData(TypedDict):
|
|
65
70
|
"""Runtime data derived from inputs during execution."""
|
|
66
|
-
|
|
71
|
+
|
|
67
72
|
prompt_material: str
|
|
68
73
|
separator: str
|
|
69
74
|
prompt_material_re_splitted: list[str]
|
|
@@ -17,11 +17,13 @@ class TabConfig(TypedDict):
|
|
|
17
17
|
|
|
18
18
|
class LayoutConfig(TypedDict):
|
|
19
19
|
"""Configuration for a complete layout with its tabs."""
|
|
20
|
+
|
|
20
21
|
layoutName: str
|
|
21
22
|
layoutTabs: List[TabConfig]
|
|
22
23
|
|
|
23
24
|
|
|
24
25
|
class LayoutsFile(TypedDict):
|
|
25
26
|
"""Complete layout file structure."""
|
|
27
|
+
|
|
26
28
|
version: str
|
|
27
29
|
layouts: List[LayoutConfig]
|