machineconfig 7.53__py3-none-any.whl → 7.69__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of machineconfig might be problematic. Click here for more details.
- machineconfig/cluster/sessions_managers/utils/maker.py +21 -9
- machineconfig/jobs/installer/custom/boxes.py +2 -2
- machineconfig/jobs/installer/custom/hx.py +15 -12
- machineconfig/jobs/installer/custom_dev/cloudflare_warp_cli.py +23 -0
- machineconfig/jobs/installer/custom_dev/dubdb_adbc.py +1 -1
- machineconfig/jobs/installer/custom_dev/nerfont_windows_helper.py +1 -1
- machineconfig/jobs/installer/custom_dev/sysabc.py +39 -34
- machineconfig/jobs/installer/custom_dev/wezterm.py +0 -4
- machineconfig/jobs/installer/installer_data.json +103 -35
- machineconfig/jobs/installer/package_groups.py +28 -13
- machineconfig/scripts/__init__.py +0 -4
- machineconfig/scripts/linux/wrap_mcfg +1 -1
- machineconfig/scripts/python/ai/solutions/copilot/instructions/python/dev.instructions.md +3 -0
- machineconfig/scripts/python/croshell.py +22 -17
- machineconfig/scripts/python/devops.py +3 -4
- machineconfig/scripts/python/devops_navigator.py +0 -4
- machineconfig/scripts/python/env_manager/path_manager_tui.py +1 -1
- machineconfig/scripts/python/fire_jobs.py +17 -15
- machineconfig/scripts/python/ftpx.py +13 -11
- machineconfig/scripts/python/helpers/ast_search.py +74 -0
- machineconfig/scripts/python/helpers/repo_rag.py +325 -0
- machineconfig/scripts/python/helpers/symantic_search.py +25 -0
- machineconfig/scripts/python/helpers_cloud/cloud_copy.py +28 -21
- machineconfig/scripts/python/helpers_cloud/cloud_helpers.py +1 -1
- machineconfig/scripts/python/helpers_cloud/cloud_sync.py +8 -7
- machineconfig/scripts/python/helpers_croshell/crosh.py +2 -2
- machineconfig/scripts/python/helpers_devops/cli_config_dotfile.py +22 -13
- machineconfig/scripts/python/helpers_devops/cli_self.py +7 -6
- machineconfig/scripts/python/helpers_devops/cli_share_file.py +2 -2
- machineconfig/scripts/python/helpers_devops/cli_share_server.py +1 -1
- machineconfig/scripts/python/helpers_devops/cli_terminal.py +1 -1
- machineconfig/scripts/python/helpers_devops/cli_utils.py +2 -73
- machineconfig/scripts/python/helpers_devops/devops_backup_retrieve.py +4 -4
- machineconfig/scripts/python/helpers_fire_command/file_wrangler.py +2 -3
- machineconfig/scripts/python/helpers_fire_command/fire_jobs_route_helper.py +3 -4
- machineconfig/scripts/python/helpers_navigator/command_tree.py +50 -18
- machineconfig/scripts/python/helpers_repos/cloud_repo_sync.py +13 -5
- machineconfig/scripts/python/helpers_repos/count_lines_frontend.py +1 -1
- machineconfig/scripts/python/helpers_repos/entrypoint.py +2 -1
- machineconfig/scripts/python/helpers_repos/record.py +2 -1
- machineconfig/scripts/python/helpers_sessions/sessions_multiprocess.py +5 -5
- machineconfig/scripts/python/helpers_utils/download.py +152 -0
- machineconfig/scripts/python/helpers_utils/path.py +4 -2
- machineconfig/scripts/python/interactive.py +11 -14
- machineconfig/scripts/python/{machineconfig.py → mcfg_entry.py} +4 -0
- machineconfig/scripts/python/msearch.py +21 -2
- machineconfig/scripts/python/nw/devops_add_ssh_key.py +21 -5
- machineconfig/scripts/python/nw/ssh_debug_linux.py +7 -7
- machineconfig/scripts/python/nw/ssh_debug_windows.py +4 -4
- machineconfig/scripts/python/nw/wsl_windows_transfer.py +3 -2
- machineconfig/scripts/python/sessions.py +35 -20
- machineconfig/scripts/python/terminal.py +2 -2
- machineconfig/scripts/python/utils.py +12 -10
- machineconfig/scripts/windows/mounts/mount_ssh.ps1 +1 -1
- machineconfig/settings/lf/windows/lfcd.ps1 +1 -1
- machineconfig/settings/shells/pwsh/init.ps1 +1 -0
- machineconfig/settings/shells/wezterm/wezterm.lua +2 -0
- machineconfig/settings/shells/zsh/init.sh +0 -7
- machineconfig/settings/yazi/shell/yazi_cd.ps1 +29 -5
- machineconfig/setup_linux/web_shortcuts/interactive.sh +12 -11
- machineconfig/setup_windows/uv.ps1 +8 -1
- machineconfig/setup_windows/web_shortcuts/interactive.ps1 +12 -11
- machineconfig/setup_windows/web_shortcuts/quick_init.ps1 +4 -2
- machineconfig/utils/accessories.py +7 -4
- machineconfig/utils/code.py +6 -4
- machineconfig/utils/files/headers.py +2 -2
- machineconfig/utils/installer_utils/install_from_url.py +180 -0
- machineconfig/utils/installer_utils/installer_class.py +56 -46
- machineconfig/utils/installer_utils/{installer.py → installer_cli.py} +71 -65
- machineconfig/utils/{installer.py → installer_utils/installer_runner.py} +1 -25
- machineconfig/utils/meta.py +28 -15
- machineconfig/utils/options.py +4 -4
- machineconfig/utils/path_extended.py +40 -19
- machineconfig/utils/path_helper.py +33 -31
- machineconfig/utils/schemas/layouts/layout_types.py +1 -1
- machineconfig/utils/ssh.py +330 -99
- machineconfig/utils/ve.py +11 -4
- machineconfig-7.69.dist-info/METADATA +124 -0
- {machineconfig-7.53.dist-info → machineconfig-7.69.dist-info}/RECORD +85 -83
- {machineconfig-7.53.dist-info → machineconfig-7.69.dist-info}/entry_points.txt +2 -2
- machineconfig/jobs/installer/linux_scripts/pgsql.sh +0 -41
- machineconfig/scripts/python/explore.py +0 -49
- machineconfig/scripts/python/nw/add_ssh_key.py +0 -148
- machineconfig/settings/lf/linux/exe/fzf_nano.sh +0 -16
- machineconfig-7.53.dist-info/METADATA +0 -94
- /machineconfig/jobs/installer/linux_scripts/{warp-cli.sh → cloudflare_warp_cli.sh} +0 -0
- /machineconfig/scripts/{Restore-ThunderbirdProfile.ps1 → windows/mounts/Restore-ThunderbirdProfile.ps1} +0 -0
- /machineconfig/utils/installer_utils/{installer_abc.py → installer_locator_utils.py} +0 -0
- {machineconfig-7.53.dist-info → machineconfig-7.69.dist-info}/WHEEL +0 -0
- {machineconfig-7.53.dist-info → machineconfig-7.69.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,325 @@
|
|
|
1
|
+
# #!/usr/bin/env python3
|
|
2
|
+
# from pathlib import Path
|
|
3
|
+
# from typing import Annotated
|
|
4
|
+
# import subprocess
|
|
5
|
+
# import typer
|
|
6
|
+
# from rich.console import Console
|
|
7
|
+
# from rich.progress import Progress, SpinnerColumn, TextColumn
|
|
8
|
+
# from rich.table import Table
|
|
9
|
+
# from rich.syntax import Syntax
|
|
10
|
+
|
|
11
|
+
# import chromadb
|
|
12
|
+
# from chromadb.config import Settings
|
|
13
|
+
# from sentence_transformers import SentenceTransformer
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
# app = typer.Typer(help="Semantic search over your repository using local RAG")
|
|
17
|
+
# console = Console()
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
# DEFAULT_EXTENSIONS = [".py", ".sh", ".ps1", ".md", ".toml", ".yaml", ".yml", ".json"]
|
|
21
|
+
# DEFAULT_MODEL = "all-MiniLM-L6-v2"
|
|
22
|
+
# DEFAULT_DB_PATH = Path.home() / ".cache" / "repo_rag"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
# class RepoRAG:
|
|
26
|
+
# def __init__(self, db_path: Path, model_name: str = DEFAULT_MODEL) -> None:
|
|
27
|
+
# self.db_path = db_path
|
|
28
|
+
# self.db_path.mkdir(parents=True, exist_ok=True)
|
|
29
|
+
|
|
30
|
+
# with console.status(f"[bold green]Loading embedding model: {model_name}..."):
|
|
31
|
+
# self.model = SentenceTransformer(model_name)
|
|
32
|
+
|
|
33
|
+
# self.client = chromadb.PersistentClient(
|
|
34
|
+
# path=str(db_path),
|
|
35
|
+
# settings=Settings(anonymized_telemetry=False)
|
|
36
|
+
# )
|
|
37
|
+
|
|
38
|
+
# def _get_or_create_collection(self, repo_path: Path) -> chromadb.Collection:
|
|
39
|
+
# collection_name = f"repo_{repo_path.name}".replace("-", "_").replace(".", "_")
|
|
40
|
+
# return self.client.get_or_create_collection(
|
|
41
|
+
# name=collection_name,
|
|
42
|
+
# metadata={"repo_path": str(repo_path)}
|
|
43
|
+
# )
|
|
44
|
+
|
|
45
|
+
# def _chunk_file_content(self, content: str, chunk_size: int = 500, overlap: int = 50) -> list[str]:
|
|
46
|
+
# lines = content.split("\n")
|
|
47
|
+
# chunks: list[str] = []
|
|
48
|
+
# current_chunk: list[str] = []
|
|
49
|
+
# current_size = 0
|
|
50
|
+
|
|
51
|
+
# for line in lines:
|
|
52
|
+
# line_size = len(line)
|
|
53
|
+
# if current_size + line_size > chunk_size and current_chunk:
|
|
54
|
+
# chunks.append("\n".join(current_chunk))
|
|
55
|
+
# overlap_lines = current_chunk[-overlap:] if len(current_chunk) > overlap else current_chunk
|
|
56
|
+
# current_chunk = overlap_lines
|
|
57
|
+
# current_size = sum(len(line_text) for line_text in current_chunk)
|
|
58
|
+
|
|
59
|
+
# current_chunk.append(line)
|
|
60
|
+
# current_size += line_size
|
|
61
|
+
|
|
62
|
+
# if current_chunk:
|
|
63
|
+
# chunks.append("\n".join(current_chunk))
|
|
64
|
+
|
|
65
|
+
# return chunks if chunks else [content]
|
|
66
|
+
|
|
67
|
+
# def index_repo(self, repo_path: Path, extensions: list[str], max_file_size_kb: int = 500) -> None:
|
|
68
|
+
# collection = self._get_or_create_collection(repo_path)
|
|
69
|
+
|
|
70
|
+
# files_to_index: list[Path] = []
|
|
71
|
+
# for ext in extensions:
|
|
72
|
+
# files_to_index.extend(repo_path.rglob(f"*{ext}"))
|
|
73
|
+
|
|
74
|
+
# files_to_index = [
|
|
75
|
+
# f for f in files_to_index
|
|
76
|
+
# if not any(part.startswith('.') for part in f.relative_to(repo_path).parts[:-1])
|
|
77
|
+
# and f.stat().st_size < max_file_size_kb * 1024
|
|
78
|
+
# ]
|
|
79
|
+
|
|
80
|
+
# console.print(f"[bold cyan]Found {len(files_to_index)} files to index")
|
|
81
|
+
|
|
82
|
+
# with Progress(
|
|
83
|
+
# SpinnerColumn(),
|
|
84
|
+
# TextColumn("[progress.description]{task.description}"),
|
|
85
|
+
# console=console
|
|
86
|
+
# ) as progress:
|
|
87
|
+
# task = progress.add_task("Indexing files...", total=len(files_to_index))
|
|
88
|
+
|
|
89
|
+
# for file_path in files_to_index:
|
|
90
|
+
# try:
|
|
91
|
+
# content = file_path.read_text(encoding="utf-8", errors="ignore")
|
|
92
|
+
# rel_path = str(file_path.relative_to(repo_path))
|
|
93
|
+
|
|
94
|
+
# chunks = self._chunk_file_content(content)
|
|
95
|
+
|
|
96
|
+
# for i, chunk in enumerate(chunks):
|
|
97
|
+
# doc_id = f"{rel_path}::chunk_{i}"
|
|
98
|
+
# embedding = self.model.encode(chunk).tolist()
|
|
99
|
+
|
|
100
|
+
# collection.upsert(
|
|
101
|
+
# ids=[doc_id],
|
|
102
|
+
# embeddings=[embedding],
|
|
103
|
+
# documents=[chunk],
|
|
104
|
+
# metadatas=[{
|
|
105
|
+
# "file_path": rel_path,
|
|
106
|
+
# "chunk_index": i,
|
|
107
|
+
# "total_chunks": len(chunks),
|
|
108
|
+
# "extension": file_path.suffix
|
|
109
|
+
# }]
|
|
110
|
+
# )
|
|
111
|
+
|
|
112
|
+
# except Exception as e:
|
|
113
|
+
# console.print(f"[yellow]Warning: Failed to index {file_path}: {e}")
|
|
114
|
+
|
|
115
|
+
# progress.advance(task)
|
|
116
|
+
|
|
117
|
+
# console.print("[bold green]✓ Indexing complete!")
|
|
118
|
+
|
|
119
|
+
# def search(self, repo_path: Path, query: str, n_results: int = 20) -> list[dict[str, str | dict[str, str | int]]]:
|
|
120
|
+
# collection = self._get_or_create_collection(repo_path)
|
|
121
|
+
|
|
122
|
+
# with console.status("[bold green]Searching..."):
|
|
123
|
+
# query_embedding = self.model.encode(query).tolist()
|
|
124
|
+
# results = collection.query(
|
|
125
|
+
# query_embeddings=[query_embedding],
|
|
126
|
+
# n_results=n_results
|
|
127
|
+
# )
|
|
128
|
+
|
|
129
|
+
# if not results["ids"] or not results["ids"][0]:
|
|
130
|
+
# return []
|
|
131
|
+
|
|
132
|
+
# search_results: list[dict[str, str | dict[str, str | int]]] = []
|
|
133
|
+
# for i, doc_id in enumerate(results["ids"][0]):
|
|
134
|
+
# search_results.append({
|
|
135
|
+
# "id": doc_id,
|
|
136
|
+
# "file_path": results["metadatas"][0][i]["file_path"],
|
|
137
|
+
# "content": results["documents"][0][i],
|
|
138
|
+
# "distance": results["distances"][0][i] if results.get("distances") else 0.0,
|
|
139
|
+
# "metadata": results["metadatas"][0][i]
|
|
140
|
+
# })
|
|
141
|
+
|
|
142
|
+
# return search_results
|
|
143
|
+
|
|
144
|
+
# def delete_index(self, repo_path: Path) -> None:
|
|
145
|
+
# collection_name = f"repo_{repo_path.name}".replace("-", "_").replace(".", "_")
|
|
146
|
+
# try:
|
|
147
|
+
# self.client.delete_collection(name=collection_name)
|
|
148
|
+
# console.print(f"[bold green]✓ Deleted index for {repo_path.name}")
|
|
149
|
+
# except Exception as e:
|
|
150
|
+
# console.print(f"[bold red]Error deleting index: {e}")
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
# @app.command()
|
|
154
|
+
# def index(
|
|
155
|
+
# repo_path: Annotated[Path, typer.Argument(help="Path to repository to index")] = Path.cwd(),
|
|
156
|
+
# extensions: Annotated[str, typer.Option("--ext", "-e", help="Comma-separated list of extensions")] = ",".join(DEFAULT_EXTENSIONS),
|
|
157
|
+
# model: Annotated[str, typer.Option("--model", "-m", help="Sentence transformer model name")] = DEFAULT_MODEL,
|
|
158
|
+
# db_path: Annotated[Path, typer.Option("--db", help="Database path")] = DEFAULT_DB_PATH,
|
|
159
|
+
# max_size_kb: Annotated[int, typer.Option("--max-size", help="Max file size in KB")] = 500,
|
|
160
|
+
# ) -> None:
|
|
161
|
+
# repo_path = repo_path.resolve()
|
|
162
|
+
|
|
163
|
+
# if not repo_path.exists():
|
|
164
|
+
# console.print(f"[bold red]Error: Repository path does not exist: {repo_path}")
|
|
165
|
+
# raise typer.Exit(1)
|
|
166
|
+
|
|
167
|
+
# ext_list = [ext.strip() if ext.startswith(".") else f".{ext.strip()}" for ext in extensions.split(",")]
|
|
168
|
+
|
|
169
|
+
# console.print(f"[bold cyan]Repository:[/] {repo_path}")
|
|
170
|
+
# console.print(f"[bold cyan]Extensions:[/] {', '.join(ext_list)}")
|
|
171
|
+
# console.print(f"[bold cyan]Model:[/] {model}")
|
|
172
|
+
# console.print(f"[bold cyan]Database:[/] {db_path}")
|
|
173
|
+
# console.print()
|
|
174
|
+
|
|
175
|
+
# rag = RepoRAG(db_path, model)
|
|
176
|
+
# rag.index_repo(repo_path, ext_list, max_size_kb)
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
# @app.command()
|
|
180
|
+
# def search(
|
|
181
|
+
# query: Annotated[str, typer.Argument(help="Search query")],
|
|
182
|
+
# repo_path: Annotated[Path, typer.Option("--repo", "-r", help="Path to repository")] = Path.cwd(),
|
|
183
|
+
# n_results: Annotated[int, typer.Option("--num", "-n", help="Number of results")] = 20,
|
|
184
|
+
# model: Annotated[str, typer.Option("--model", "-m", help="Sentence transformer model name")] = DEFAULT_MODEL,
|
|
185
|
+
# db_path: Annotated[Path, typer.Option("--db", help="Database path")] = DEFAULT_DB_PATH,
|
|
186
|
+
# use_fzf: Annotated[bool, typer.Option("--fzf", help="Use fzf for interactive selection")] = True,
|
|
187
|
+
# show_content: Annotated[bool, typer.Option("--content", "-c", help="Show content snippets")] = True,
|
|
188
|
+
# ) -> None:
|
|
189
|
+
# repo_path = repo_path.resolve()
|
|
190
|
+
|
|
191
|
+
# if not repo_path.exists():
|
|
192
|
+
# console.print(f"[bold red]Error: Repository path does not exist: {repo_path}")
|
|
193
|
+
# raise typer.Exit(1)
|
|
194
|
+
|
|
195
|
+
# rag = RepoRAG(db_path, model)
|
|
196
|
+
# results = rag.search(repo_path, query, n_results)
|
|
197
|
+
|
|
198
|
+
# if not results:
|
|
199
|
+
# console.print("[yellow]No results found")
|
|
200
|
+
# raise typer.Exit(0)
|
|
201
|
+
|
|
202
|
+
# if use_fzf:
|
|
203
|
+
# _search_with_fzf(results, repo_path, show_content)
|
|
204
|
+
# else:
|
|
205
|
+
# _display_results(results, show_content)
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
# def _display_results(results: list[dict[str, str | dict[str, str | int]]], show_content: bool) -> None:
|
|
209
|
+
# table = Table(title="Search Results", show_header=True, header_style="bold magenta")
|
|
210
|
+
# table.add_column("#", style="cyan", width=4)
|
|
211
|
+
# table.add_column("File", style="green")
|
|
212
|
+
# table.add_column("Chunk", style="yellow", width=8)
|
|
213
|
+
# table.add_column("Score", style="blue", width=8)
|
|
214
|
+
# if show_content:
|
|
215
|
+
# table.add_column("Content Preview", style="white", width=60)
|
|
216
|
+
|
|
217
|
+
# for i, result in enumerate(results, 1):
|
|
218
|
+
# file_path = str(result["file_path"])
|
|
219
|
+
# metadata = result["metadata"]
|
|
220
|
+
# chunk_info = f"{int(metadata['chunk_index']) + 1}/{int(metadata['total_chunks'])}"
|
|
221
|
+
# score = f"{float(result['distance']):.3f}"
|
|
222
|
+
|
|
223
|
+
# row = [str(i), file_path, chunk_info, score]
|
|
224
|
+
|
|
225
|
+
# if show_content:
|
|
226
|
+
# content = str(result["content"])
|
|
227
|
+
# preview = content[:200].replace("\n", " ") + ("..." if len(content) > 200 else "")
|
|
228
|
+
# row.append(preview)
|
|
229
|
+
|
|
230
|
+
# table.add_row(*row)
|
|
231
|
+
|
|
232
|
+
# console.print(table)
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
# def _search_with_fzf(results: list[dict[str, str | dict[str, str | int]]], repo_path: Path, show_content: bool) -> None:
|
|
236
|
+
# try:
|
|
237
|
+
# fzf_input_lines: list[str] = []
|
|
238
|
+
# for i, result in enumerate(results, 1):
|
|
239
|
+
# file_path = str(result["file_path"])
|
|
240
|
+
# metadata = result["metadata"]
|
|
241
|
+
# chunk_info = f"{int(metadata['chunk_index']) + 1}/{int(metadata['total_chunks'])}"
|
|
242
|
+
# score = f"{float(result['distance']):.3f}"
|
|
243
|
+
|
|
244
|
+
# if show_content:
|
|
245
|
+
# content = str(result["content"]).replace("\n", " ")[:100]
|
|
246
|
+
# line = f"{i:3d} │ {score:6s} │ {chunk_info:5s} │ {file_path:60s} │ {content}"
|
|
247
|
+
# else:
|
|
248
|
+
# line = f"{i:3d} │ {score:6s} │ {chunk_info:5s} │ {file_path}"
|
|
249
|
+
|
|
250
|
+
# fzf_input_lines.append(line)
|
|
251
|
+
|
|
252
|
+
# fzf_input = "\n".join(fzf_input_lines)
|
|
253
|
+
|
|
254
|
+
# result = subprocess.run(
|
|
255
|
+
# ["fzf", "--ansi", "--multi", "--reverse", "--header=Select files to open (TAB for multi-select)"],
|
|
256
|
+
# input=fzf_input.encode(),
|
|
257
|
+
# capture_output=True
|
|
258
|
+
# )
|
|
259
|
+
|
|
260
|
+
# if result.returncode != 0:
|
|
261
|
+
# console.print("[yellow]Selection cancelled")
|
|
262
|
+
# return
|
|
263
|
+
|
|
264
|
+
# selected_lines = result.stdout.decode().strip().split("\n")
|
|
265
|
+
|
|
266
|
+
# for line in selected_lines:
|
|
267
|
+
# if not line:
|
|
268
|
+
# continue
|
|
269
|
+
|
|
270
|
+
# parts = line.split("│")
|
|
271
|
+
# if len(parts) < 4:
|
|
272
|
+
# continue
|
|
273
|
+
|
|
274
|
+
# file_path_str = parts[3].strip().split()[0]
|
|
275
|
+
|
|
276
|
+
# idx = int(parts[0].strip()) - 1
|
|
277
|
+
# if idx < len(results):
|
|
278
|
+
# content = str(results[idx]["content"])
|
|
279
|
+
|
|
280
|
+
# console.print(f"\n[bold green]File:[/] {file_path_str}")
|
|
281
|
+
# console.print("[bold cyan]Content:[/]")
|
|
282
|
+
|
|
283
|
+
# syntax = Syntax(content, "python", theme="monokai", line_numbers=True)
|
|
284
|
+
# console.print(syntax)
|
|
285
|
+
# console.print("\n" + "─" * 80 + "\n")
|
|
286
|
+
|
|
287
|
+
# except FileNotFoundError:
|
|
288
|
+
# console.print("[bold red]Error: fzf not found. Install fzf or use --no-fzf flag")
|
|
289
|
+
# _display_results(results, show_content)
|
|
290
|
+
|
|
291
|
+
|
|
292
|
+
# @app.command()
|
|
293
|
+
# def delete(
|
|
294
|
+
# repo_path: Annotated[Path, typer.Argument(help="Path to repository")] = Path.cwd(),
|
|
295
|
+
# db_path: Annotated[Path, typer.Option("--db", help="Database path")] = DEFAULT_DB_PATH,
|
|
296
|
+
# model: Annotated[str, typer.Option("--model", "-m", help="Sentence transformer model name")] = DEFAULT_MODEL,
|
|
297
|
+
# ) -> None:
|
|
298
|
+
# repo_path = repo_path.resolve()
|
|
299
|
+
# rag = RepoRAG(db_path, model)
|
|
300
|
+
# rag.delete_index(repo_path)
|
|
301
|
+
|
|
302
|
+
|
|
303
|
+
# @app.command()
|
|
304
|
+
# def info() -> None:
|
|
305
|
+
# console.print("[bold cyan]Repo RAG - Semantic Code Search[/]\n")
|
|
306
|
+
|
|
307
|
+
# info_table = Table(show_header=False, box=None)
|
|
308
|
+
# info_table.add_column("Key", style="cyan")
|
|
309
|
+
# info_table.add_column("Value", style="white")
|
|
310
|
+
|
|
311
|
+
# info_table.add_row("Embedding Model", DEFAULT_MODEL)
|
|
312
|
+
# info_table.add_row("Vector Store", "ChromaDB (embedded)")
|
|
313
|
+
# info_table.add_row("Default Extensions", ", ".join(DEFAULT_EXTENSIONS))
|
|
314
|
+
# info_table.add_row("Database Path", str(DEFAULT_DB_PATH))
|
|
315
|
+
|
|
316
|
+
# console.print(info_table)
|
|
317
|
+
|
|
318
|
+
# console.print("\n[bold green]Quick Start:[/]")
|
|
319
|
+
# console.print("1. Index your repo: [yellow]repo-rag index[/]")
|
|
320
|
+
# console.print("2. Search semantically: [yellow]repo-rag search 'your query'[/]")
|
|
321
|
+
# console.print("3. Delete index: [yellow]repo-rag delete[/]")
|
|
322
|
+
|
|
323
|
+
|
|
324
|
+
# if __name__ == "__main__":
|
|
325
|
+
# app()
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"""
|
|
2
|
+
please fully read docs of https://github.com/meilisearch/meilisearch-python
|
|
3
|
+
|
|
4
|
+
then make a cli using typer in this file
|
|
5
|
+
|
|
6
|
+
I want commands to
|
|
7
|
+
create-index (pass name of index)
|
|
8
|
+
command to populate the index with files in folder
|
|
9
|
+
command is add-to-index
|
|
10
|
+
--directory [str] and --extensions .py,.ps1,.sh
|
|
11
|
+
|
|
12
|
+
command to rebuild index
|
|
13
|
+
|
|
14
|
+
coimmand to search
|
|
15
|
+
command show stats and rop index
|
|
16
|
+
|
|
17
|
+
learn from my style of building apps like this #file:msearch.py
|
|
18
|
+
|
|
19
|
+
and add option when building index to say --symantic (means use ai locall embedding to build the index)
|
|
20
|
+
|
|
21
|
+
in all cases we should be able to pass
|
|
22
|
+
MEILI_URL="http://localhost:7700" (default)
|
|
23
|
+
MEILI_MASTER_KEY="YOUR_MASTER_KEY"
|
|
24
|
+
|
|
25
|
+
"""
|
|
@@ -2,26 +2,24 @@
|
|
|
2
2
|
CC
|
|
3
3
|
"""
|
|
4
4
|
|
|
5
|
-
from machineconfig.utils.path_extended import PathExtended
|
|
6
|
-
from tenacity import retry, stop_after_attempt, wait_chain, wait_fixed
|
|
7
|
-
import getpass
|
|
8
|
-
import os
|
|
9
5
|
from typing import Optional, Annotated
|
|
10
|
-
|
|
11
6
|
import typer
|
|
12
7
|
|
|
13
|
-
from
|
|
14
|
-
from machineconfig.scripts.python.helpers_cloud.cloud_helpers import ArgsDefaults, Args
|
|
15
|
-
from rich.console import Console
|
|
16
|
-
from rich.panel import Panel
|
|
17
|
-
from rich.progress import Progress
|
|
18
|
-
from machineconfig.utils.accessories import pprint
|
|
8
|
+
from tenacity import retry, stop_after_attempt, wait_chain, wait_fixed
|
|
19
9
|
|
|
20
|
-
console = Console()
|
|
21
10
|
|
|
22
11
|
|
|
23
12
|
@retry(stop=stop_after_attempt(3), wait=wait_chain(wait_fixed(1), wait_fixed(4), wait_fixed(9)))
|
|
24
13
|
def get_securely_shared_file(url: Optional[str] = None, folder: Optional[str] = None) -> None:
|
|
14
|
+
from rich.console import Console
|
|
15
|
+
from rich.panel import Panel
|
|
16
|
+
from rich.progress import Progress
|
|
17
|
+
import getpass
|
|
18
|
+
import os
|
|
19
|
+
from machineconfig.utils.path_extended import PathExtended
|
|
20
|
+
|
|
21
|
+
console = Console()
|
|
22
|
+
|
|
25
23
|
console.print(Panel("🚀 Secure File Downloader", title="[bold blue]Downloader[/bold blue]", border_style="blue"))
|
|
26
24
|
|
|
27
25
|
folder_obj = PathExtended.cwd() if folder is None else PathExtended(folder)
|
|
@@ -62,21 +60,30 @@ def get_securely_shared_file(url: Optional[str] = None, folder: Optional[str] =
|
|
|
62
60
|
tmp_folder.delete()
|
|
63
61
|
|
|
64
62
|
|
|
63
|
+
|
|
65
64
|
def main(
|
|
66
65
|
source: Annotated[str, typer.Argument(help="📂 file/folder path to be taken from here.")],
|
|
67
66
|
target: Annotated[str, typer.Argument(help="🎯 file/folder path to be be sent to here.")],
|
|
68
|
-
overwrite: Annotated[bool, typer.Option("--overwrite", "-o", help="✍️ Overwrite existing file.")] =
|
|
69
|
-
share: Annotated[bool, typer.Option("--share", "-s", help="🔗 Share file / directory")] =
|
|
70
|
-
rel2home: Annotated[bool, typer.Option("--relative2home", "-r", help="🏠 Relative to `myhome` folder")] =
|
|
71
|
-
root: Annotated[Optional[str], typer.Option("--root", "-R", help="🌳 Remote root. None is the default, unless rel2home is raied, making the default `myhome`.")] =
|
|
72
|
-
key: Annotated[Optional[str], typer.Option("--key", "-k", help="🔑 Key for encryption")] =
|
|
73
|
-
pwd: Annotated[Optional[str], typer.Option("--password", "-p", help="🔒 Password for encryption")] =
|
|
74
|
-
encrypt: Annotated[bool, typer.Option("--encrypt", "-e", help="🔐 Encrypt before sending.")] =
|
|
75
|
-
zip_: Annotated[bool, typer.Option("--zip", "-z", help="📦 unzip after receiving.")] =
|
|
76
|
-
os_specific: Annotated[bool, typer.Option("--os-specific", "-O", help="💻 choose path specific for this OS.")] =
|
|
67
|
+
overwrite: Annotated[bool, typer.Option("--overwrite", "-o", help="✍️ Overwrite existing file.")] = False,
|
|
68
|
+
share: Annotated[bool, typer.Option("--share", "-s", help="🔗 Share file / directory")] = False,
|
|
69
|
+
rel2home: Annotated[bool, typer.Option("--relative2home", "-r", help="🏠 Relative to `myhome` folder")] = False,
|
|
70
|
+
root: Annotated[Optional[str], typer.Option("--root", "-R", help="🌳 Remote root. None is the default, unless rel2home is raied, making the default `myhome`.")] = None,
|
|
71
|
+
key: Annotated[Optional[str], typer.Option("--key", "-k", help="🔑 Key for encryption")] = None,
|
|
72
|
+
pwd: Annotated[Optional[str], typer.Option("--password", "-p", help="🔒 Password for encryption")] = None,
|
|
73
|
+
encrypt: Annotated[bool, typer.Option("--encrypt", "-e", help="🔐 Encrypt before sending.")] = False,
|
|
74
|
+
zip_: Annotated[bool, typer.Option("--zip", "-z", help="📦 unzip after receiving.")] = False,
|
|
75
|
+
os_specific: Annotated[bool, typer.Option("--os-specific", "-O", help="💻 choose path specific for this OS.")] = False,
|
|
77
76
|
config: Annotated[Optional[str], typer.Option("--config", "-c", help="⚙️ path to cloud.json file.")] = None,
|
|
78
77
|
) -> None:
|
|
79
78
|
"""📤 Upload or 📥 Download files/folders to/from cloud storage services like Google Drive, Dropbox, OneDrive, etc."""
|
|
79
|
+
from rich.console import Console
|
|
80
|
+
from rich.panel import Panel
|
|
81
|
+
from machineconfig.utils.path_extended import PathExtended
|
|
82
|
+
from machineconfig.scripts.python.helpers_cloud.helpers2 import parse_cloud_source_target
|
|
83
|
+
from machineconfig.scripts.python.helpers_cloud.cloud_helpers import Args
|
|
84
|
+
from machineconfig.utils.accessories import pprint
|
|
85
|
+
|
|
86
|
+
console = Console()
|
|
80
87
|
console.print(Panel("☁️ Cloud Copy Utility", title="[bold blue]Cloud Copy[/bold blue]", border_style="blue", width=152))
|
|
81
88
|
args_obj = Args(
|
|
82
89
|
overwrite=overwrite,
|
|
@@ -12,6 +12,7 @@ from dataclasses import dataclass
|
|
|
12
12
|
console = Console()
|
|
13
13
|
|
|
14
14
|
|
|
15
|
+
|
|
15
16
|
class ArgsDefaults:
|
|
16
17
|
# source: str=None
|
|
17
18
|
# target: str=None
|
|
@@ -25,7 +26,6 @@ class ArgsDefaults:
|
|
|
25
26
|
key = None
|
|
26
27
|
pwd = None
|
|
27
28
|
|
|
28
|
-
|
|
29
29
|
@dataclass
|
|
30
30
|
class Args:
|
|
31
31
|
cloud: Optional[str] = None
|
|
@@ -2,16 +2,9 @@
|
|
|
2
2
|
TODO: use typer or typed-argument-parser to parse args
|
|
3
3
|
"""
|
|
4
4
|
|
|
5
|
-
from machineconfig.scripts.python.helpers_cloud.helpers2 import parse_cloud_source_target
|
|
6
|
-
from machineconfig.scripts.python.helpers_cloud.cloud_helpers import Args
|
|
7
|
-
from machineconfig.scripts.python.helpers_cloud.cloud_mount import get_mprocs_mount_txt
|
|
8
5
|
|
|
9
6
|
from typing import Annotated, Optional
|
|
10
7
|
import typer
|
|
11
|
-
from rich.console import Console
|
|
12
|
-
from rich.panel import Panel
|
|
13
|
-
|
|
14
|
-
console = Console()
|
|
15
8
|
|
|
16
9
|
|
|
17
10
|
def main(
|
|
@@ -27,6 +20,14 @@ def main(
|
|
|
27
20
|
delete: Annotated[bool, typer.Option("--delete", "-D", help="Delete files in remote that are not in local.")] = False,
|
|
28
21
|
verbose: Annotated[bool, typer.Option("--verbose", "-v", help="Verbosity of mprocs to show details of syncing.")] = False,
|
|
29
22
|
) -> None:
|
|
23
|
+
|
|
24
|
+
from machineconfig.scripts.python.helpers_cloud.helpers2 import parse_cloud_source_target
|
|
25
|
+
from machineconfig.scripts.python.helpers_cloud.cloud_helpers import Args
|
|
26
|
+
from machineconfig.scripts.python.helpers_cloud.cloud_mount import get_mprocs_mount_txt
|
|
27
|
+
from rich.console import Console
|
|
28
|
+
from rich.panel import Panel
|
|
29
|
+
console = Console()
|
|
30
|
+
|
|
30
31
|
title = "☁️ Cloud Sync Utility"
|
|
31
32
|
console.print(Panel(title, title_align="left", border_style="blue"))
|
|
32
33
|
|
|
@@ -20,9 +20,9 @@ def get_read_data_pycode(path: str):
|
|
|
20
20
|
from rich.panel import Panel
|
|
21
21
|
from rich.text import Text
|
|
22
22
|
from rich.console import Console
|
|
23
|
-
from
|
|
23
|
+
from pathlib import Path
|
|
24
24
|
console = Console()
|
|
25
|
-
p =
|
|
25
|
+
p = Path(path).absolute()
|
|
26
26
|
try:
|
|
27
27
|
from machineconfig.utils.files.read import Read
|
|
28
28
|
from machineconfig.utils.accessories import pprint
|
|
@@ -12,7 +12,9 @@ def main(
|
|
|
12
12
|
method: Annotated[Literal["symlink", "s", "copy", "c"], typer.Option(..., "--method", "-m", help="Method to use for linking files")] = "copy",
|
|
13
13
|
on_conflict: Annotated[ON_CONFLICT_LOOSE, typer.Option(..., "--on-conflict", "-o", help="Action to take on conflict")] = "throw-error",
|
|
14
14
|
sensitivity: Annotated[Literal["private", "v", "public", "b"], typer.Option(..., "--sensitivity", "-s", help="Sensitivity of the config file.")] = "private",
|
|
15
|
-
destination: Annotated[str, typer.Option("--destination", "-d", help="destination folder (override the default, use at your own risk)")] = "",
|
|
15
|
+
destination: Annotated[str, typer.Option("--destination", "-d", help="destination folder (override the default, use at your own risk)")] = "",
|
|
16
|
+
shared: Annotated[bool, typer.Option("--shared", "-sh", help="Whether the config file is shared across destinations directory.")] = False,
|
|
17
|
+
) -> None:
|
|
16
18
|
from rich.console import Console
|
|
17
19
|
from rich.panel import Panel
|
|
18
20
|
from machineconfig.utils.links import symlink_map, copy_map
|
|
@@ -27,26 +29,33 @@ def main(
|
|
|
27
29
|
console = Console()
|
|
28
30
|
orig_path = Path(file).expanduser().absolute()
|
|
29
31
|
if destination == "":
|
|
30
|
-
|
|
31
|
-
|
|
32
|
+
if shared:
|
|
33
|
+
new_path = backup_root.joinpath("shared").joinpath(orig_path.name)
|
|
34
|
+
new_path.parent.mkdir(parents=True, exist_ok=True)
|
|
35
|
+
else:
|
|
36
|
+
new_path = backup_root.joinpath(orig_path.relative_to(Path.home()))
|
|
37
|
+
new_path.parent.mkdir(parents=True, exist_ok=True)
|
|
32
38
|
else:
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
+
if shared:
|
|
40
|
+
dest_path = Path(destination).expanduser().absolute()
|
|
41
|
+
dest_path.mkdir(parents=True, exist_ok=True)
|
|
42
|
+
new_path = dest_path.joinpath("shared").joinpath(orig_path.name)
|
|
43
|
+
new_path.parent.mkdir(parents=True, exist_ok=True)
|
|
44
|
+
else:
|
|
45
|
+
dest_path = Path(destination).expanduser().absolute()
|
|
46
|
+
dest_path.mkdir(parents=True, exist_ok=True)
|
|
47
|
+
new_path = dest_path.joinpath(orig_path.name)
|
|
39
48
|
match method:
|
|
40
49
|
case "copy" | "c":
|
|
41
50
|
try:
|
|
42
|
-
copy_map(config_file_default_path=
|
|
51
|
+
copy_map(config_file_default_path=orig_path, self_managed_config_file_path=new_path, on_conflict=ON_CONFLICT_MAPPER[on_conflict]) # type: ignore[arg-type]
|
|
43
52
|
except Exception as e:
|
|
44
53
|
typer.echo(f"[red]Error:[/] {e}")
|
|
45
54
|
typer.Exit(code=1)
|
|
46
55
|
return
|
|
47
56
|
case "symlink" | "s":
|
|
48
57
|
try:
|
|
49
|
-
symlink_map(config_file_default_path=
|
|
58
|
+
symlink_map(config_file_default_path=orig_path, self_managed_config_file_path=new_path, on_conflict=ON_CONFLICT_MAPPER[on_conflict]) # type: ignore[arg-type]
|
|
50
59
|
except Exception as e:
|
|
51
60
|
typer.echo(f"[red]Error:[/] {e}")
|
|
52
61
|
typer.Exit(code=1)
|
|
@@ -56,10 +65,10 @@ def main(
|
|
|
56
65
|
|
|
57
66
|
# mapper_snippet = "\n".join(
|
|
58
67
|
# [
|
|
59
|
-
# f"[bold]📝 Edit configuration file:[/] [cyan]nano {
|
|
68
|
+
# f"[bold]📝 Edit configuration file:[/] [cyan]nano {Path(CONFIG_ROOT)}/symlinks/mapper.toml[/cyan]",
|
|
60
69
|
# "",
|
|
61
70
|
# f"[{new_path.parent.name}]",
|
|
62
|
-
# f"{orig_path.name.split('.')[0]} = {{ this = '{orig_path.
|
|
71
|
+
# f"{orig_path.name.split('.')[0]} = {{ this = '{orig_path.as_posix()}', to_this = '{new_path.as_posix()}' }}",
|
|
63
72
|
# ]
|
|
64
73
|
# )
|
|
65
74
|
# console.print(
|
|
@@ -30,7 +30,8 @@ uv tool install --upgrade machineconfig
|
|
|
30
30
|
if platform.system() == "Windows":
|
|
31
31
|
from machineconfig.utils.code import exit_then_run_shell_script, get_uv_command_executing_python_script
|
|
32
32
|
from machineconfig.utils.meta import lambda_to_python_script
|
|
33
|
-
python_script = lambda_to_python_script(lambda: copy_both_assets(),
|
|
33
|
+
python_script = lambda_to_python_script(lambda: copy_both_assets(),
|
|
34
|
+
in_global=True, import_module=False)
|
|
34
35
|
uv_command, _py_file = get_uv_command_executing_python_script(python_script=python_script, uv_with=["machineconfig"], uv_project_dir=None)
|
|
35
36
|
exit_then_run_shell_script(shell_script + "\n" + uv_command, strict=True)
|
|
36
37
|
else:
|
|
@@ -52,9 +53,9 @@ def install(no_copy_assets: Annotated[bool, typer.Option("--no-assets-copy", "-n
|
|
|
52
53
|
else:
|
|
53
54
|
import platform
|
|
54
55
|
if platform.system() == "Windows":
|
|
55
|
-
run_shell_script(r"""& "$HOME\.local\bin\uv.exe" tool install --upgrade "machineconfig>=7.
|
|
56
|
+
run_shell_script(r"""& "$HOME\.local\bin\uv.exe" tool install --upgrade "machineconfig>=7.69" """)
|
|
56
57
|
else:
|
|
57
|
-
run_shell_script("""$HOME/.local/bin/uv tool install --upgrade "machineconfig>=7.
|
|
58
|
+
run_shell_script("""$HOME/.local/bin/uv tool install --upgrade "machineconfig>=7.69" """)
|
|
58
59
|
from machineconfig.profile.create_shell_profile import create_default_shell_profile
|
|
59
60
|
if not no_copy_assets:
|
|
60
61
|
create_default_shell_profile() # involves copying assets too
|
|
@@ -77,10 +78,10 @@ def navigate():
|
|
|
77
78
|
import machineconfig.scripts.python as navigator
|
|
78
79
|
from pathlib import Path
|
|
79
80
|
path = Path(navigator.__file__).resolve().parent.joinpath("devops_navigator.py")
|
|
80
|
-
from machineconfig.utils.code import
|
|
81
|
+
from machineconfig.utils.code import exit_then_run_shell_script
|
|
81
82
|
if Path.home().joinpath("code/machineconfig").exists(): executable = f"""--project "{str(Path.home().joinpath("code/machineconfig"))}" --with textual"""
|
|
82
|
-
else: executable = """--with "machineconfig>=7.
|
|
83
|
-
|
|
83
|
+
else: executable = """--with "machineconfig>=7.69,textual" """
|
|
84
|
+
exit_then_run_shell_script(f"""uv run {executable} {path}""")
|
|
84
85
|
|
|
85
86
|
|
|
86
87
|
def run_python(ip: Annotated[str, typer.Argument(..., help="Python command to run in the machineconfig environment")],
|
|
@@ -12,7 +12,7 @@ Usage examples:
|
|
|
12
12
|
devops network receive -- --relay 10.17.62.206:443 7121-donor-olympic-bicycle
|
|
13
13
|
devops network receive -- croc --relay 10.17.62.206:443 7121-donor-olympic-bicycle
|
|
14
14
|
"""
|
|
15
|
-
from machineconfig.utils.installer_utils.
|
|
15
|
+
from machineconfig.utils.installer_utils.installer_cli import install_if_missing
|
|
16
16
|
install_if_missing(which="croc")
|
|
17
17
|
import platform
|
|
18
18
|
import sys
|
|
@@ -100,7 +100,7 @@ def share_file_send(path: Annotated[str, typer.Argument(help="Path to the file o
|
|
|
100
100
|
qrcode: Annotated[bool, typer.Option("--qrcode", "--qr", help="Show receive code as a qrcode")] = False,
|
|
101
101
|
) -> None:
|
|
102
102
|
"""Send a file using croc with relay server."""
|
|
103
|
-
from machineconfig.utils.installer_utils.
|
|
103
|
+
from machineconfig.utils.installer_utils.installer_cli import install_if_missing
|
|
104
104
|
install_if_missing(which="croc")
|
|
105
105
|
# Get relay server IP from environment or use default
|
|
106
106
|
import socket
|
|
@@ -40,7 +40,7 @@ def web_file_explorer(
|
|
|
40
40
|
over_internet: Annotated[bool, typer.Option("--over-internet", "-i", help="Expose the share server over the internet using ngrok")] = False,
|
|
41
41
|
backend: Annotated[str, typer.Option("--backend", "-b", help="Backend to use: filebrowser (default), miniserve, or easy-sharing")] = "filebrowser"
|
|
42
42
|
) -> None:
|
|
43
|
-
from machineconfig.utils.installer_utils.
|
|
43
|
+
from machineconfig.utils.installer_utils.installer_cli import install_if_missing
|
|
44
44
|
|
|
45
45
|
if backend not in ["filebrowser", "miniserve", "easy-sharing"]:
|
|
46
46
|
typer.echo(f"❌ ERROR: Invalid backend '{backend}'. Must be one of: filebrowser, miniserve, easy-sharing", err=True)
|
|
@@ -60,7 +60,7 @@ def main(
|
|
|
60
60
|
ssl_ca: Annotated[Optional[str], typer.Option("--ssl-ca", "-A", help="SSL CA file path for client certificate verification")] = None,
|
|
61
61
|
over_internet: Annotated[bool, typer.Option("--over-internet", "-i", help="Expose the terminal over the internet using ngrok")] = False
|
|
62
62
|
) -> None:
|
|
63
|
-
from machineconfig.utils.installer_utils.
|
|
63
|
+
from machineconfig.utils.installer_utils.installer_cli import install_if_missing
|
|
64
64
|
install_if_missing("ttyd")
|
|
65
65
|
if over_internet: install_if_missing("ngrok")
|
|
66
66
|
|