machineconfig 4.99__py3-none-any.whl → 5.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of machineconfig might be problematic. Click here for more details.
- machineconfig/scripts/python/count_lines.py +343 -0
- machineconfig/scripts/python/count_lines_frontend.py +12 -0
- machineconfig/scripts/python/devops.py +4 -2
- machineconfig/scripts/python/interactive.py +1 -1
- machineconfig/scripts/python/repos.py +203 -71
- machineconfig/scripts/python/repos_helper_record.py +3 -1
- {machineconfig-4.99.dist-info → machineconfig-5.11.dist-info}/METADATA +21 -1
- {machineconfig-4.99.dist-info → machineconfig-5.11.dist-info}/RECORD +11 -11
- {machineconfig-4.99.dist-info → machineconfig-5.11.dist-info}/entry_points.txt +0 -1
- machineconfig/scripts/linux/repos +0 -2
- machineconfig/scripts/windows/repos.ps1 +0 -1
- {machineconfig-4.99.dist-info → machineconfig-5.11.dist-info}/WHEEL +0 -0
- {machineconfig-4.99.dist-info → machineconfig-5.11.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,343 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
from git import Repo
|
|
5
|
+
from collections import defaultdict
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from rich.progress import track
|
|
10
|
+
import polars as pl
|
|
11
|
+
import plotly.graph_objects as go
|
|
12
|
+
import plotly.express as px
|
|
13
|
+
import typer
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
if TYPE_CHECKING:
|
|
17
|
+
from typing import Any, Dict, List, Optional, Union
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
app = typer.Typer()
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def count_lines_in_commit(commit: "Any") -> int:
|
|
24
|
+
total_lines = 0
|
|
25
|
+
for file in commit.stats.files:
|
|
26
|
+
if str(file).endswith(".py"):
|
|
27
|
+
blob = commit.tree / file
|
|
28
|
+
total_lines += len(blob.data_stream.read().decode("utf-8").splitlines())
|
|
29
|
+
return total_lines
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def count_historical_loc(repo_path: str) -> int:
|
|
33
|
+
repo = Repo(repo_path)
|
|
34
|
+
file_line_counts: "Dict[str, int]" = defaultdict(int)
|
|
35
|
+
total_commits: int = sum(1 for _ in repo.iter_commits())
|
|
36
|
+
print(f"Total commits to process: {total_commits}")
|
|
37
|
+
for i, commit in enumerate(repo.iter_commits(), 1):
|
|
38
|
+
if i % 100 == 0 or i == total_commits:
|
|
39
|
+
print(f"Processing commit {i}/{total_commits} ({i / total_commits:.1%})")
|
|
40
|
+
try:
|
|
41
|
+
# Handle initial commits that have no parents
|
|
42
|
+
if not commit.parents:
|
|
43
|
+
# For initial commit, count all lines in Python files
|
|
44
|
+
for file in commit.stats.files:
|
|
45
|
+
if str(file).endswith(".py"):
|
|
46
|
+
file_line_counts[str(file)] += commit.stats.files[file]["insertions"]
|
|
47
|
+
else:
|
|
48
|
+
# For commits with parents, use stats
|
|
49
|
+
for file in commit.stats.files:
|
|
50
|
+
if str(file).endswith(".py"):
|
|
51
|
+
file_line_counts[str(file)] += commit.stats.files[file]["insertions"]
|
|
52
|
+
except Exception:
|
|
53
|
+
# If stats fail (e.g., corrupted parent), skip this commit
|
|
54
|
+
print(f"Warning: Could not get stats for commit {commit.hexsha[:8]}, skipping")
|
|
55
|
+
continue
|
|
56
|
+
|
|
57
|
+
print(f"\nProcessed files: {len(file_line_counts)}")
|
|
58
|
+
return sum(file_line_counts.values())
|
|
59
|
+
|
|
60
|
+
def count_python_lines(commit: "Any") -> int:
|
|
61
|
+
"""Count total lines in Python files for a specific commit"""
|
|
62
|
+
total_lines = 0
|
|
63
|
+
try:
|
|
64
|
+
for blob in commit.tree.traverse():
|
|
65
|
+
if blob.path.endswith(".py"):
|
|
66
|
+
try:
|
|
67
|
+
content = blob.data_stream.read().decode("utf-8")
|
|
68
|
+
total_lines += len(content.splitlines())
|
|
69
|
+
except Exception as _e:
|
|
70
|
+
continue
|
|
71
|
+
except Exception as _e:
|
|
72
|
+
return 0
|
|
73
|
+
return total_lines
|
|
74
|
+
def get_default_branch(repo: Repo) -> str:
|
|
75
|
+
"""Get the default branch name of the repository"""
|
|
76
|
+
try:
|
|
77
|
+
_ = repo.refs["main"]
|
|
78
|
+
return "main" # First try 'main'
|
|
79
|
+
except IndexError:
|
|
80
|
+
try:
|
|
81
|
+
_ = repo.refs["master"]
|
|
82
|
+
return "master" # Then try 'master'
|
|
83
|
+
except IndexError:
|
|
84
|
+
return repo.head.reference.name # If neither exists, get the branch the HEAD is pointing to
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
@app.command()
|
|
88
|
+
def count_historical(repo_path: str = typer.Argument(..., help="Path to the git repository")):
|
|
89
|
+
"""Count total historical lines of Python code in the repository."""
|
|
90
|
+
print(f"Analyzing repository: {repo_path}")
|
|
91
|
+
total_loc: int = count_historical_loc(repo_path)
|
|
92
|
+
print(f"\nTotal historical lines of Python code: {total_loc}")
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
@app.command()
|
|
96
|
+
def analyze_over_time(repo_path: str = typer.Argument(..., help="Path to the git repository")):
|
|
97
|
+
"""Analyze a git repository to track Python code size over time with visualization."""
|
|
98
|
+
repo: Repo = Repo(repo_path)
|
|
99
|
+
branch_name: str = get_default_branch(repo)
|
|
100
|
+
print(f"🔍 Using branch: {branch_name}")
|
|
101
|
+
commit_data: "List[Dict[str, Any]]" = []
|
|
102
|
+
print("⏳ Analyzing commits...")
|
|
103
|
+
try:
|
|
104
|
+
commits = list(repo.iter_commits(branch_name))
|
|
105
|
+
from datetime import timezone
|
|
106
|
+
|
|
107
|
+
for commit in track(commits, description="Processing commits..."):
|
|
108
|
+
commit_data.append({"hash": commit.hexsha, "dtmExit": datetime.fromtimestamp(commit.committed_date, tz=timezone.utc), "lines": count_python_lines(commit)})
|
|
109
|
+
except Exception as e:
|
|
110
|
+
print(f"❌ Error analyzing commits: {str(e)}")
|
|
111
|
+
return
|
|
112
|
+
df = pl.DataFrame(commit_data)
|
|
113
|
+
df = df.sort("dtmExit")
|
|
114
|
+
# Create interactive plotly figure with dark theme and all bells and whistles
|
|
115
|
+
fig = go.Figure()
|
|
116
|
+
# Add line chart with gradient fill and sparkle effect
|
|
117
|
+
fig.add_trace(go.Scatter(x=df["dtmExit"], y=df["lines"], mode="lines", line={"width": 3, "color": "#00b4ff"}, fill="tozeroy", fillcolor="rgba(0, 180, 255, 0.2)", name="Lines of Code", hovertemplate="<b>Date:</b> %{x}<br><b>Lines:</b> %{y:,}<extra></extra>"))
|
|
118
|
+
# Add markers for significant points (min, max, last)
|
|
119
|
+
min_idx = df["lines"].arg_min()
|
|
120
|
+
max_idx = df["lines"].arg_max()
|
|
121
|
+
min_point = df.slice(min_idx, 1).to_dicts()[0] if min_idx is not None else {}
|
|
122
|
+
max_point = df.slice(max_idx, 1).to_dicts()[0] if max_idx is not None else {}
|
|
123
|
+
last_point = df.slice(-1, 1).to_dicts()[0]
|
|
124
|
+
|
|
125
|
+
# Add markers for significant points
|
|
126
|
+
fig.add_trace(
|
|
127
|
+
go.Scatter(
|
|
128
|
+
x=[min_point["dtmExit"], max_point["dtmExit"], last_point["dtmExit"]],
|
|
129
|
+
y=[min_point["lines"], max_point["lines"], last_point["lines"]],
|
|
130
|
+
mode="markers",
|
|
131
|
+
marker={"size": [10, 14, 12], "color": ["#ff4f4f", "#4fff4f", "#4f4fff"], "line": {"width": 2, "color": "white"}, "symbol": ["circle", "star", "diamond"]},
|
|
132
|
+
name="Key Points",
|
|
133
|
+
hovertemplate="<b>%{text}</b><br>Date: %{x}<br>Lines: %{y:,}<extra></extra>",
|
|
134
|
+
text=[f"🔽 Min: {min_point['lines']:,} lines", f"🔼 Max: {max_point['lines']:,} lines", f"📊 Current: {last_point['lines']:,} lines"],
|
|
135
|
+
)
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
# Add annotation only for current point
|
|
139
|
+
# annotations = [
|
|
140
|
+
# {"x": last_point['date'], "y": last_point['lines'], "text": f"📊 Current: {last_point['lines']:,} lines", "showarrow": True, "arrowhead": 2, "arrowsize": 1,
|
|
141
|
+
# "arrowwidth": 2, "arrowcolor": "#ffffff", "font": {"size": 14, "color": "#ffffff"}, "bgcolor": "#00b4ff", "bordercolor": "#ffffff",
|
|
142
|
+
# "borderwidth": 1, "borderpad": 4, "ax": 40, "ay": -40}
|
|
143
|
+
# ]
|
|
144
|
+
|
|
145
|
+
# Update layout with dark theme and customizations
|
|
146
|
+
fig.update_layout(
|
|
147
|
+
title={"text": "✨ Python Code Base Size Over Time ✨", "y": 0.95, "x": 0.5, "xanchor": "center", "yanchor": "top", "font": {"size": 24, "color": "white"}},
|
|
148
|
+
xaxis_title="Date 📅",
|
|
149
|
+
yaxis_title="Lines of Code 📝",
|
|
150
|
+
hovermode="closest",
|
|
151
|
+
template="plotly_dark",
|
|
152
|
+
plot_bgcolor="rgba(25, 25, 35, 1)",
|
|
153
|
+
paper_bgcolor="rgba(15, 15, 25, 1)",
|
|
154
|
+
font={"family": "Arial, sans-serif", "size": 14, "color": "white"}, # annotations=annotations,
|
|
155
|
+
autosize=True,
|
|
156
|
+
height=700,
|
|
157
|
+
margin={"l": 80, "r": 80, "t": 100, "b": 80},
|
|
158
|
+
xaxis={"showgrid": True, "gridcolor": "rgba(80, 80, 100, 0.2)", "showline": True, "linecolor": "rgba(200, 200, 255, 0.2)", "tickfont": {"size": 12}},
|
|
159
|
+
yaxis={"showgrid": True, "gridcolor": "rgba(80, 80, 100, 0.2)", "showline": True, "linecolor": "rgba(200, 200, 255, 0.2)", "tickformat": ",", "tickfont": {"size": 12}},
|
|
160
|
+
)
|
|
161
|
+
|
|
162
|
+
# Add range slider for date selection
|
|
163
|
+
fig.update_xaxes(rangeslider_visible=True, rangeslider_thickness=0.05)
|
|
164
|
+
|
|
165
|
+
# Save as interactive HTML and static image
|
|
166
|
+
plot_dir = Path.home().joinpath("tmp_results", "tmp_images", Path(repo_path).name)
|
|
167
|
+
plot_dir.mkdir(parents=True, exist_ok=True)
|
|
168
|
+
|
|
169
|
+
html_path = plot_dir.joinpath("code_size_evolution.html")
|
|
170
|
+
png_path = plot_dir.joinpath("code_size_evolution.png")
|
|
171
|
+
|
|
172
|
+
fig.write_html(html_path, include_plotlyjs="cdn")
|
|
173
|
+
fig.write_image(png_path, width=1200, height=700, scale=2)
|
|
174
|
+
|
|
175
|
+
print(f"🖼️ Interactive plot saved as {html_path}")
|
|
176
|
+
print(f"🖼️ Static image saved as {png_path}")
|
|
177
|
+
# Print statistics
|
|
178
|
+
print("\n📊 Repository Statistics:")
|
|
179
|
+
print(f"📚 Total commits analyzed: {len(df)}")
|
|
180
|
+
print(f"🔙 Initial line count: {df['lines'][-1]:,}")
|
|
181
|
+
print(f"🔜 Final line count: {df['lines'][0]:,}")
|
|
182
|
+
print(f"📈 Net change: {df['lines'][0] - df['lines'][-1]:,} lines")
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def _print_python_files_by_size_impl(repo_path: str) -> "Union[pl.DataFrame, Exception]":
|
|
186
|
+
try:
|
|
187
|
+
import os
|
|
188
|
+
if not os.path.exists(repo_path):
|
|
189
|
+
return ValueError(f"Repository path does not exist: {repo_path}")
|
|
190
|
+
# Initialize data storage
|
|
191
|
+
file_data: "List[Dict[str, Union[str, int]]]" = []
|
|
192
|
+
|
|
193
|
+
# Walk through the repository
|
|
194
|
+
for root, _, files in os.walk(repo_path):
|
|
195
|
+
# Skip .git directory and other hidden directories
|
|
196
|
+
if ".git" in Path(root).parts or any(part.startswith(".") for part in Path(root).parts):
|
|
197
|
+
continue
|
|
198
|
+
|
|
199
|
+
for file in files:
|
|
200
|
+
if file.endswith(".py"):
|
|
201
|
+
file_path = os.path.join(root, file)
|
|
202
|
+
try:
|
|
203
|
+
# Count lines in the file
|
|
204
|
+
with open(file_path, "r", encoding="utf-8", errors="replace") as f:
|
|
205
|
+
line_count = sum(1 for _ in f)
|
|
206
|
+
|
|
207
|
+
# Make path relative to repo_path for better display
|
|
208
|
+
rel_path = os.path.relpath(file_path, repo_path)
|
|
209
|
+
file_data.append({"filename": rel_path, "lines": line_count})
|
|
210
|
+
except Exception as e:
|
|
211
|
+
print(f"⚠️ Warning: Could not read {file_path}: {str(e)}")
|
|
212
|
+
continue
|
|
213
|
+
|
|
214
|
+
# Check if any files were found
|
|
215
|
+
if not file_data:
|
|
216
|
+
return ValueError("❌ No Python files found in the repository")
|
|
217
|
+
|
|
218
|
+
# Convert to DataFrame
|
|
219
|
+
df = pl.DataFrame(file_data)
|
|
220
|
+
|
|
221
|
+
# Sort DataFrame by line count (descending)
|
|
222
|
+
df = df.sort("lines", descending=True)
|
|
223
|
+
df = df.filter(pl.col("lines") > 0) # Filter out empty files
|
|
224
|
+
|
|
225
|
+
# Add total count
|
|
226
|
+
total_lines = int(df["lines"].sum())
|
|
227
|
+
file_count: int = len(df)
|
|
228
|
+
|
|
229
|
+
# Print the DataFrame
|
|
230
|
+
print("\n📊 Python Files Line Count (sorted max to min):")
|
|
231
|
+
print(df)
|
|
232
|
+
print(f"\n📁 Total Python files: {file_count}")
|
|
233
|
+
print(f"📝 Total lines of Python code: {total_lines:,}")
|
|
234
|
+
|
|
235
|
+
# Create visualizations with Plotly
|
|
236
|
+
# Only visualize top files (too many files make the chart unreadable)
|
|
237
|
+
top_n: int = min(20, len(df))
|
|
238
|
+
top_files_df = df.head(top_n).clone()
|
|
239
|
+
|
|
240
|
+
# Calculate percentage of total for top files
|
|
241
|
+
top_files_df = top_files_df.with_columns((pl.col("lines") / total_lines * 100).round(1).alias("percentage"))
|
|
242
|
+
|
|
243
|
+
# Shorten filenames for better display
|
|
244
|
+
import os
|
|
245
|
+
|
|
246
|
+
top_files_df = top_files_df.with_columns(pl.col("filename").map_elements(lambda x: os.path.basename(x) if len(x) > 25 else x, return_dtype=pl.Utf8).alias("short_name"))
|
|
247
|
+
|
|
248
|
+
# Create bar chart with hover info showing full path
|
|
249
|
+
fig = go.Figure()
|
|
250
|
+
|
|
251
|
+
# Add bars with gradient color based on line count
|
|
252
|
+
fig.add_trace(
|
|
253
|
+
go.Bar(
|
|
254
|
+
x=top_files_df["short_name"].to_list(),
|
|
255
|
+
y=top_files_df["lines"].to_list(),
|
|
256
|
+
text=[f"{x}%" for x in top_files_df["percentage"].to_list()],
|
|
257
|
+
textposition="auto",
|
|
258
|
+
hovertemplate="<b>%{customdata}</b><br>Lines: %{y:,}<br>Percentage: %{text}<extra></extra>",
|
|
259
|
+
customdata=top_files_df["filename"],
|
|
260
|
+
marker={"color": top_files_df["lines"], "colorscale": "Viridis", "showscale": True, "colorbar": {"title": "Lines", "thickness": 20, "tickformat": ","}, "line": {"width": 1, "color": "white"}},
|
|
261
|
+
opacity=0.9,
|
|
262
|
+
)
|
|
263
|
+
)
|
|
264
|
+
|
|
265
|
+
# Update layout with dark theme
|
|
266
|
+
fig.update_layout(
|
|
267
|
+
title={"text": f"🏆 Top {top_n} Python Files by Size", "y": 0.95, "x": 0.5, "xanchor": "center", "yanchor": "top", "font": {"size": 24, "color": "white"}},
|
|
268
|
+
xaxis_title="File Name 📄",
|
|
269
|
+
yaxis_title="Lines of Code 📝",
|
|
270
|
+
template="plotly_dark",
|
|
271
|
+
plot_bgcolor="rgba(25, 25, 35, 1)",
|
|
272
|
+
paper_bgcolor="rgba(15, 15, 25, 1)",
|
|
273
|
+
font={"family": "Arial, sans-serif", "size": 14, "color": "white"},
|
|
274
|
+
height=700,
|
|
275
|
+
margin={"l": 80, "r": 80, "t": 100, "b": 100},
|
|
276
|
+
xaxis={"tickangle": 45, "showgrid": False, "showline": True, "linecolor": "rgba(200, 200, 255, 0.2)", "tickfont": {"size": 12}},
|
|
277
|
+
yaxis={"showgrid": True, "gridcolor": "rgba(80, 80, 100, 0.2)", "showline": True, "linecolor": "rgba(200, 200, 255, 0.2)", "tickformat": ",", "tickfont": {"size": 12}},
|
|
278
|
+
)
|
|
279
|
+
|
|
280
|
+
# Define pie chart figure before conditionally using it
|
|
281
|
+
fig2: "Optional[go.Figure]" = None
|
|
282
|
+
|
|
283
|
+
# Add pie chart showing distribution
|
|
284
|
+
if len(df) > top_n:
|
|
285
|
+
# Prepare data for pie chart - top files plus "Others"
|
|
286
|
+
others_lines = df.slice(top_n)["lines"].sum()
|
|
287
|
+
pie_labels = list(top_files_df["short_name"]) + ["Others"]
|
|
288
|
+
pie_values = list(top_files_df["lines"]) + [others_lines]
|
|
289
|
+
pie_customdata = list(top_files_df["filename"]) + [f"Other {len(df) - top_n} files"]
|
|
290
|
+
|
|
291
|
+
fig2 = go.Figure()
|
|
292
|
+
fig2.add_trace(go.Pie(labels=pie_labels, values=pie_values, customdata=pie_customdata, textinfo="percent", hovertemplate="<b>%{customdata}</b><br>Lines: %{value:,}<br>Percentage: %{percent}<extra></extra>", marker={"colors": px.colors.sequential.Viridis, "line": {"color": "white", "width": 1}}, hole=0.4, sort=False))
|
|
293
|
+
|
|
294
|
+
fig2.update_layout(
|
|
295
|
+
title={"text": "🍩 Python Code Distribution by File", "y": 0.95, "x": 0.5, "xanchor": "center", "yanchor": "top", "font": {"size": 24, "color": "white"}},
|
|
296
|
+
template="plotly_dark",
|
|
297
|
+
plot_bgcolor="rgba(25, 25, 35, 1)",
|
|
298
|
+
paper_bgcolor="rgba(15, 15, 25, 1)",
|
|
299
|
+
font={"family": "Arial, sans-serif", "size": 14, "color": "white"},
|
|
300
|
+
height=700,
|
|
301
|
+
annotations=[{"text": f"Total<br>{total_lines:,}<br>lines", "x": 0.5, "y": 0.5, "font": {"size": 18, "color": "white"}, "showarrow": False}],
|
|
302
|
+
)
|
|
303
|
+
|
|
304
|
+
# Save visualizations
|
|
305
|
+
plot_dir = Path.home().joinpath("tmp_results", "tmp_images", Path(repo_path).name)
|
|
306
|
+
plot_dir.mkdir(parents=True, exist_ok=True)
|
|
307
|
+
|
|
308
|
+
# Bar chart
|
|
309
|
+
bar_html_path = plot_dir.joinpath("top_files_by_size.html")
|
|
310
|
+
bar_png_path = plot_dir.joinpath("top_files_by_size.png")
|
|
311
|
+
fig.write_html(bar_html_path, include_plotlyjs="cdn")
|
|
312
|
+
fig.write_image(bar_png_path, width=1200, height=700, scale=2)
|
|
313
|
+
|
|
314
|
+
print(f"\n🖼️ Interactive bar chart saved as {bar_html_path}")
|
|
315
|
+
print(f"🖼️ Static bar chart saved as {bar_png_path}")
|
|
316
|
+
|
|
317
|
+
# Pie chart if available
|
|
318
|
+
if fig2 is not None:
|
|
319
|
+
pie_html_path = plot_dir.joinpath("files_distribution_pie.html")
|
|
320
|
+
pie_png_path = plot_dir.joinpath("files_distribution_pie.png")
|
|
321
|
+
fig2.write_html(pie_html_path, include_plotlyjs="cdn")
|
|
322
|
+
fig2.write_image(pie_png_path, width=1200, height=700, scale=2)
|
|
323
|
+
|
|
324
|
+
print(f"🖼️ Interactive pie chart saved as {pie_html_path}")
|
|
325
|
+
print(f"🖼️ Static pie chart saved as {pie_png_path}")
|
|
326
|
+
|
|
327
|
+
return df
|
|
328
|
+
|
|
329
|
+
except Exception as e:
|
|
330
|
+
return Exception(f"❌ Error analyzing repository: {str(e)}")
|
|
331
|
+
|
|
332
|
+
|
|
333
|
+
@app.command()
|
|
334
|
+
def print_python_files_by_size(repo_path: str = typer.Argument(..., help="Path to the git repository")):
|
|
335
|
+
"""Print Python files sorted by size with visualizations."""
|
|
336
|
+
result = _print_python_files_by_size_impl(repo_path)
|
|
337
|
+
if isinstance(result, Exception):
|
|
338
|
+
print(f"Error: {result}")
|
|
339
|
+
return
|
|
340
|
+
|
|
341
|
+
|
|
342
|
+
if __name__ == "__main__":
|
|
343
|
+
app()
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
|
|
2
|
+
import typer
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def analyze_repo_development(repo_path: str = typer.Argument(..., help="Path to the git repository")):
|
|
6
|
+
cmd = f"""uv run --python 3.13 --with machineconfig machineconfig.scripts.python.count_lines analyze-over-time {repo_path}"""
|
|
7
|
+
from machineconfig.utils.code import run_script
|
|
8
|
+
run_script(cmd)
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
if __name__ == "__main__":
|
|
12
|
+
pass
|
|
@@ -4,13 +4,15 @@ import machineconfig.utils.installer_utils.installer as installer_entry_point
|
|
|
4
4
|
import machineconfig.scripts.python.share_terminal as share_terminal
|
|
5
5
|
import machineconfig.scripts.python.repos as repos
|
|
6
6
|
|
|
7
|
+
from machineconfig import __version__
|
|
7
8
|
import typer
|
|
8
9
|
|
|
9
|
-
|
|
10
|
+
|
|
11
|
+
app = typer.Typer(help=f"🛠️ DevOps operations @ machineconfig {__version__}", no_args_is_help=True)
|
|
10
12
|
|
|
11
13
|
app.command(name="install", help="📦 Install essential packages")(installer_entry_point.main)
|
|
12
14
|
app.command(name="share-terminal", help="📡 Share terminal via web browser")(share_terminal.main)
|
|
13
|
-
app.
|
|
15
|
+
app.add_typer(repos.app, name="repos", help="📁 Manage git repositories")
|
|
14
16
|
|
|
15
17
|
ssh_app = typer.Typer(help="🔐 SSH operations subcommands", no_args_is_help=True)
|
|
16
18
|
app.add_typer(ssh_app, name="ssh")
|
|
@@ -187,7 +187,7 @@ Set-Service -Name sshd -StartupType 'Automatic'"""
|
|
|
187
187
|
if "retrieve_repositories" in selected_options:
|
|
188
188
|
console.print(Panel("📚 [bold bright_magenta]REPOSITORIES[/bold bright_magenta]\n[italic]Project code retrieval[/italic]", border_style="bright_magenta"))
|
|
189
189
|
from machineconfig.scripts.python import repos as module
|
|
190
|
-
module.main(directory=str(Path.home() / "code"),
|
|
190
|
+
module.main(directory=str(Path.home() / "code"), capture=True, cloud="odg1")
|
|
191
191
|
|
|
192
192
|
if "retrieve_data" in selected_options:
|
|
193
193
|
console.print(Panel("💾 [bold bright_cyan]DATA RETRIEVAL[/bold bright_cyan]\n[italic]Backup restoration[/italic]", border_style="bright_cyan"))
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
"""Repos
|
|
1
|
+
"""Repos CLI powered by Typer.
|
|
2
2
|
|
|
3
3
|
# TODO use gh api user --jq '.login' to get the username and use it to clone the repos.
|
|
4
4
|
in the event that username@github.com is not mentioned in the remote url.
|
|
@@ -6,82 +6,214 @@ in the event that username@github.com is not mentioned in the remote url.
|
|
|
6
6
|
"""
|
|
7
7
|
|
|
8
8
|
|
|
9
|
-
import
|
|
9
|
+
from pathlib import Path
|
|
10
10
|
from typing import Annotated, Optional
|
|
11
|
+
|
|
12
|
+
import typer
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
from machineconfig.utils.source_of_truth import CONFIG_PATH, DEFAULTS_PATH
|
|
11
16
|
from pathlib import Path
|
|
12
17
|
|
|
13
18
|
|
|
14
|
-
def
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
19
|
+
def _print_banner() -> None:
|
|
20
|
+
typer.echo("\n" + "=" * 50)
|
|
21
|
+
typer.echo("📂 Welcome to the Repository Manager")
|
|
22
|
+
typer.echo("=" * 50 + "\n")
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
app = typer.Typer(help="� Manage development repositories", no_args_is_help=True)
|
|
27
|
+
sync_app = typer.Typer(help="� Manage repository specifications and syncing", no_args_is_help=True)
|
|
28
|
+
app.add_typer(sync_app, name="sync", help="� Sync repositories using saved specs")
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
DirectoryArgument = Annotated[
|
|
32
|
+
Optional[str],
|
|
33
|
+
typer.Argument(help="📁 Folder containing repos or the specs JSON file to use."),
|
|
34
|
+
]
|
|
35
|
+
RecursiveOption = Annotated[
|
|
36
|
+
bool,
|
|
37
|
+
typer.Option("--recursive", "-r", help="🔍 Recurse into nested repositories."),
|
|
38
|
+
]
|
|
39
|
+
NoSyncOption = Annotated[
|
|
40
|
+
bool,
|
|
41
|
+
typer.Option("--no-sync", help="🚫 Disable automatic uv sync after pulls."),
|
|
42
|
+
]
|
|
43
|
+
CloudOption = Annotated[
|
|
44
|
+
Optional[str],
|
|
45
|
+
typer.Option("--cloud", "-c", help="☁️ Upload to or download from this cloud remote."),
|
|
46
|
+
]
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def _resolve_directory(directory: Optional[str]) -> Path:
|
|
52
|
+
if directory is None:
|
|
53
|
+
directory = Path.cwd().as_posix()
|
|
54
|
+
typer.echo(f"📁 Using directory: {directory}")
|
|
55
|
+
return Path(directory).expanduser().absolute()
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def _git_operations(
|
|
59
|
+
directory: Optional[str],
|
|
60
|
+
*,
|
|
61
|
+
pull: bool,
|
|
62
|
+
commit: bool,
|
|
63
|
+
push: bool,
|
|
64
|
+
recursive: bool,
|
|
65
|
+
no_sync: bool,
|
|
27
66
|
) -> None:
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
67
|
+
_print_banner()
|
|
68
|
+
repos_root = _resolve_directory(directory)
|
|
69
|
+
auto_sync = not no_sync
|
|
70
|
+
from machineconfig.scripts.python.repos_helper_action import perform_git_operations
|
|
71
|
+
from machineconfig.utils.path_extended import PathExtended
|
|
72
|
+
perform_git_operations(
|
|
73
|
+
repos_root=PathExtended(repos_root),
|
|
74
|
+
pull=pull,
|
|
75
|
+
commit=commit,
|
|
76
|
+
push=push,
|
|
77
|
+
recursive=recursive,
|
|
78
|
+
auto_sync=auto_sync,
|
|
79
|
+
)
|
|
31
80
|
|
|
32
|
-
|
|
33
|
-
|
|
81
|
+
|
|
82
|
+
def _resolve_spec_path(directory: Optional[str], cloud: Optional[str]) -> Path:
|
|
83
|
+
repos_root = _resolve_directory(directory)
|
|
34
84
|
from machineconfig.utils.path_extended import PathExtended
|
|
35
|
-
|
|
85
|
+
if not repos_root.exists() or repos_root.name != "repos.json":
|
|
86
|
+
candidate = Path(CONFIG_PATH).joinpath("repos").joinpath(PathExtended(repos_root).rel2home()).joinpath("repos.json")
|
|
87
|
+
repos_root = candidate
|
|
88
|
+
if not repos_root.exists():
|
|
89
|
+
cloud_name: Optional[str]
|
|
90
|
+
if cloud is None:
|
|
91
|
+
from machineconfig.utils.io import read_ini
|
|
92
|
+
cloud_name = read_ini(DEFAULTS_PATH)["general"]["rclone_config_name"]
|
|
93
|
+
typer.echo(f"⚠️ Using default cloud: {cloud_name}")
|
|
94
|
+
else:
|
|
95
|
+
cloud_name = cloud
|
|
96
|
+
assert cloud_name is not None, (
|
|
97
|
+
f"Path {repos_root} does not exist and cloud was not passed. You can't clone without one of them."
|
|
98
|
+
)
|
|
99
|
+
from machineconfig.utils.path_extended import PathExtended
|
|
100
|
+
PathExtended(repos_root).from_cloud(cloud=cloud_name, rel2home=True)
|
|
101
|
+
assert repos_root.exists() and repos_root.name == "repos.json", (
|
|
102
|
+
f"Path {repos_root} does not exist and cloud was not passed. You can't clone without one of them."
|
|
103
|
+
)
|
|
104
|
+
return repos_root
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def _clone_from_specs(
|
|
108
|
+
directory: Optional[str],
|
|
109
|
+
cloud: Optional[str],
|
|
110
|
+
*,
|
|
111
|
+
checkout_branch_flag: bool,
|
|
112
|
+
checkout_commit_flag: bool,
|
|
113
|
+
) -> None:
|
|
114
|
+
_print_banner()
|
|
115
|
+
typer.echo("\n📥 Cloning or checking out repositories...")
|
|
116
|
+
spec_path = _resolve_spec_path(directory, cloud)
|
|
36
117
|
from machineconfig.scripts.python.repos_helper_clone import clone_repos
|
|
37
|
-
from machineconfig.scripts.python.repos_helper_action import perform_git_operations
|
|
38
118
|
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
119
|
+
clone_repos(
|
|
120
|
+
spec_path=spec_path,
|
|
121
|
+
preferred_remote=None,
|
|
122
|
+
checkout_branch_flag=checkout_branch_flag,
|
|
123
|
+
checkout_commit_flag=checkout_commit_flag,
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
@app.command()
|
|
128
|
+
def push(
|
|
129
|
+
directory: DirectoryArgument = None,
|
|
130
|
+
recursive: RecursiveOption = False,
|
|
131
|
+
no_sync: NoSyncOption = False,
|
|
132
|
+
) -> None:
|
|
133
|
+
"""🚀 Push changes across repositories."""
|
|
134
|
+
_git_operations(directory, pull=False, commit=False, push=True, recursive=recursive, no_sync=no_sync)
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
@app.command()
|
|
138
|
+
def pull(
|
|
139
|
+
directory: DirectoryArgument = None,
|
|
140
|
+
recursive: RecursiveOption = False,
|
|
141
|
+
no_sync: NoSyncOption = False,
|
|
142
|
+
) -> None:
|
|
143
|
+
"""⬇️ Pull changes across repositories."""
|
|
144
|
+
_git_operations(directory, pull=True, commit=False, push=False, recursive=recursive, no_sync=no_sync)
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
@app.command()
|
|
148
|
+
def commit(
|
|
149
|
+
directory: DirectoryArgument = None,
|
|
150
|
+
recursive: RecursiveOption = False,
|
|
151
|
+
no_sync: NoSyncOption = False,
|
|
152
|
+
) -> None:
|
|
153
|
+
"""💾 Commit changes across repositories."""
|
|
154
|
+
_git_operations(directory, pull=False, commit=True, push=False, recursive=recursive, no_sync=no_sync)
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
@app.command()
|
|
158
|
+
def all(
|
|
159
|
+
directory: DirectoryArgument = None,
|
|
160
|
+
recursive: RecursiveOption = False,
|
|
161
|
+
no_sync: NoSyncOption = False,
|
|
162
|
+
) -> None:
|
|
163
|
+
"""🔄 Pull, commit, and push changes across repositories."""
|
|
164
|
+
_git_operations(directory, pull=True, commit=True, push=True, recursive=recursive, no_sync=no_sync)
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
@sync_app.command()
|
|
168
|
+
def record(
|
|
169
|
+
directory: DirectoryArgument = None,
|
|
170
|
+
cloud: CloudOption = None,
|
|
171
|
+
) -> None:
|
|
172
|
+
"""📝 Record repositories into a repos.json specification."""
|
|
173
|
+
_print_banner()
|
|
174
|
+
repos_root = _resolve_directory(directory)
|
|
175
|
+
from machineconfig.scripts.python.repos_helper_record import main as record_repos
|
|
176
|
+
save_path = record_repos(repos_root=repos_root)
|
|
177
|
+
from machineconfig.utils.path_extended import PathExtended
|
|
178
|
+
if cloud is not None:
|
|
179
|
+
PathExtended(save_path).to_cloud(rel2home=True, cloud=cloud)
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
@sync_app.command()
|
|
183
|
+
def capture(
|
|
184
|
+
directory: DirectoryArgument = None,
|
|
185
|
+
cloud: CloudOption = None,
|
|
186
|
+
) -> None:
|
|
187
|
+
"""📥 Clone repositories described by a repos.json specification."""
|
|
188
|
+
_clone_from_specs(directory, cloud, checkout_branch_flag=False, checkout_commit_flag=False)
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
@sync_app.command(name="checkout")
|
|
192
|
+
def checkout_command(
|
|
193
|
+
directory: DirectoryArgument = None,
|
|
194
|
+
cloud: CloudOption = None,
|
|
195
|
+
) -> None:
|
|
196
|
+
"""🔀 Check out specific commits listed in the specification."""
|
|
197
|
+
_clone_from_specs(directory, cloud, checkout_branch_flag=False, checkout_commit_flag=True)
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
@sync_app.command(name="checkout-to-branch")
|
|
201
|
+
def checkout_to_branch_command(
|
|
202
|
+
directory: DirectoryArgument = None,
|
|
203
|
+
cloud: CloudOption = None,
|
|
204
|
+
) -> None:
|
|
205
|
+
"""🔀 Check out to the main branch defined in the specification."""
|
|
206
|
+
_clone_from_specs(directory, cloud, checkout_branch_flag=True, checkout_commit_flag=False)
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
@app.command()
|
|
210
|
+
def analyze(
|
|
211
|
+
directory: DirectoryArgument = None,
|
|
212
|
+
) -> None:
|
|
213
|
+
"""📊 Analyze repository development over time."""
|
|
214
|
+
_print_banner()
|
|
215
|
+
repo_path = directory if directory is not None else "."
|
|
216
|
+
from machineconfig.scripts.python.count_lines_frontend import analyze_repo_development as _analyze
|
|
217
|
+
|
|
218
|
+
_analyze(repo_path=repo_path)
|
|
219
|
+
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
from machineconfig.utils.path_extended import PathExtended
|
|
2
|
+
from pathlib import Path
|
|
2
3
|
from machineconfig.utils.schemas.repos.repos_types import GitVersionInfo, RepoRecordDict, RepoRemote
|
|
3
4
|
|
|
4
5
|
from machineconfig.utils.schemas.repos.repos_types import RepoRecordFile
|
|
@@ -185,8 +186,9 @@ def record_repos_recursively(repos_root: str, r: bool, progress: Progress | None
|
|
|
185
186
|
return res
|
|
186
187
|
|
|
187
188
|
|
|
188
|
-
def main(repos_root:
|
|
189
|
+
def main(repos_root: Path):
|
|
189
190
|
print("\n📝 Recording repositories...")
|
|
191
|
+
repos_root = PathExtended(repos_root).expanduser().absolute()
|
|
190
192
|
|
|
191
193
|
# Count total directories and repositories for accurate progress tracking
|
|
192
194
|
print("🔍 Analyzing directory structure...")
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: machineconfig
|
|
3
|
-
Version:
|
|
3
|
+
Version: 5.11
|
|
4
4
|
Summary: Dotfiles management package
|
|
5
5
|
Author-email: Alex Al-Saffar <programmer@usa.com>
|
|
6
6
|
License: Apache 2.0
|
|
@@ -161,6 +161,26 @@ curl https://raw.githubusercontent.com/thisismygitrepo/machineconfig/main/src/ma
|
|
|
161
161
|
short `curl bit.ly/cfgcroshelllinux -L | bash`
|
|
162
162
|
|
|
163
163
|
|
|
164
|
+
## Repository management CLI
|
|
165
|
+
|
|
166
|
+
The DevOps CLI now exposes rich subcommands for working with git repositories. Run `python -m machineconfig.scripts.python.devops repos --help` to explore the hierarchy:
|
|
167
|
+
|
|
168
|
+
* Top-level actions: `push`, `pull`, `commit`, `all`, and `analyze`.
|
|
169
|
+
* These commands accept `--recursive/-r` and `--no-sync` to control nested repos and automatic `uv sync`.
|
|
170
|
+
* Sync workflows live under `sync`:
|
|
171
|
+
* `record` captures the current machine state into a `repos.json`.
|
|
172
|
+
* `capture` clones repos from a specification without changing commits.
|
|
173
|
+
* `checkout` aligns repositories to commits stored in the spec.
|
|
174
|
+
* `checkout-to-branch` switches repositories to the tracked branch.
|
|
175
|
+
* Each sync subcommand accepts `--cloud/-c` for fetching/saving specs from remote storage.
|
|
176
|
+
|
|
177
|
+
Example:
|
|
178
|
+
|
|
179
|
+
```bash
|
|
180
|
+
python -m machineconfig.scripts.python.devops repos sync record ~/code --cloud my_remote
|
|
181
|
+
python -m machineconfig.scripts.python.devops repos pull ~/code --recursive
|
|
182
|
+
```
|
|
183
|
+
|
|
164
184
|
# Author
|
|
165
185
|
Alex Al-Saffar. [email](mailto:programmer@usa.com)
|
|
166
186
|
|
|
@@ -124,7 +124,6 @@ machineconfig/scripts/linux/mount_drive,sha256=zemKofv7hOmRN_V3qK0q580GkfWw3Vdik
|
|
|
124
124
|
machineconfig/scripts/linux/mount_nfs,sha256=kpIbAse3igReEGgnXngez2ytWucLwmb_xo6e6KeO_rs,1870
|
|
125
125
|
machineconfig/scripts/linux/mount_nw_drive,sha256=pNzHc7yZn5YIzn2BkpKvd5530PqbestkzrdoXaChyqY,2338
|
|
126
126
|
machineconfig/scripts/linux/mount_smb,sha256=7UN5EP1kuxYL_-CnyaH4f9Wuu2CgALDZpJ0mPcdvCiY,94
|
|
127
|
-
machineconfig/scripts/linux/repos,sha256=1qbmIemZjkjcPmiL1Bp8pD46E83OXsR5EJ0XQt29Bhc,96
|
|
128
127
|
machineconfig/scripts/linux/scheduler,sha256=Z9Wu0N9vWRbi4FoRbpcc4ydq4bVaDjZOXESR35ZN0rI,100
|
|
129
128
|
machineconfig/scripts/linux/sessions,sha256=A4vxUDHnDhyph833iy-tBprgQ7av_DZ5t031PRrbqVQ,98
|
|
130
129
|
machineconfig/scripts/linux/share_cloud.sh,sha256=75IzCm7Nob1wO-zlfaNyPPod1IjAsVCG5lcMFdXmiI4,3010
|
|
@@ -146,8 +145,10 @@ machineconfig/scripts/python/cloud_manager.py,sha256=YN0DYLzPKtMBaks-EAVwFmkCu3X
|
|
|
146
145
|
machineconfig/scripts/python/cloud_mount.py,sha256=GwcXbd5ohoHGESfX5edtCEl2-umDDxH_AZapmFSzc9E,6740
|
|
147
146
|
machineconfig/scripts/python/cloud_repo_sync.py,sha256=8dnlHbQqRymPRU0v01pNIuaIvFeY4fReP7ewNSSCt34,9765
|
|
148
147
|
machineconfig/scripts/python/cloud_sync.py,sha256=RWGpAfJ9fnN18yNBSgN44dzA38Hmd4879JL5r2pcyrM,3514
|
|
148
|
+
machineconfig/scripts/python/count_lines.py,sha256=aVg91ArHg73swKNGMQzi_WlPnTLEbc8rkNZkCv_qpvI,15894
|
|
149
|
+
machineconfig/scripts/python/count_lines_frontend.py,sha256=1DQn9YUbl5IYjjJ1fS5qEe60X-5ez6zZiXMQXVTA4-8,359
|
|
149
150
|
machineconfig/scripts/python/croshell.py,sha256=parFHSL859H00ExDpDBPHBFe_E_DrfVq6P8CpCGVK9A,8571
|
|
150
|
-
machineconfig/scripts/python/devops.py,sha256=
|
|
151
|
+
machineconfig/scripts/python/devops.py,sha256=c5URta0jxlxi7fyNpUit5w7eZbQUaXpN59C6ZB_06Xk,3487
|
|
151
152
|
machineconfig/scripts/python/devops_add_identity.py,sha256=wvjNgqsLmqD2SxbNCW_usqfp0LI-TDvcJJKGOWt2oFw,3775
|
|
152
153
|
machineconfig/scripts/python/devops_add_ssh_key.py,sha256=BXB-9RvuSZO0YTbnM2azeABW2ngLW4SKhhAGAieMzfw,6873
|
|
153
154
|
machineconfig/scripts/python/devops_backup_retrieve.py,sha256=JLJHmi8JmZ_qVTeMW-qBEAYGt1fmfWXzZ7Gm-Q-GDcU,5585
|
|
@@ -164,16 +165,16 @@ machineconfig/scripts/python/fire_jobs_streamlit_helper.py,sha256=47DEQpj8HBSa-_
|
|
|
164
165
|
machineconfig/scripts/python/ftpx.py,sha256=QfQTp-6jQP6yxfbLc5sKxiMtTgAgc8sjN7d17_uLiZc,9400
|
|
165
166
|
machineconfig/scripts/python/get_zellij_cmd.py,sha256=e35-18hoXM9N3PFbvbizfkNY_-63iMicieWE3TbGcCQ,576
|
|
166
167
|
machineconfig/scripts/python/gh_models.py,sha256=3BLfW25mBRiPO5VKtVm-nMlKLv-PaZDw7mObajq6F6M,5538
|
|
167
|
-
machineconfig/scripts/python/interactive.py,sha256=
|
|
168
|
+
machineconfig/scripts/python/interactive.py,sha256=Tmqes57K0Z1svEcxM6uOd6nSivwwQCthrupToeubDAo,11793
|
|
168
169
|
machineconfig/scripts/python/mount_nfs.py,sha256=aECrL64j9g-9rF49sVJAjGmzaoGgcMnl3g9v17kQF4c,3239
|
|
169
170
|
machineconfig/scripts/python/mount_nw_drive.py,sha256=iru6AtnTyvyuk6WxlK5R4lDkuliVpPV5_uBTVVhXtjQ,1550
|
|
170
171
|
machineconfig/scripts/python/mount_ssh.py,sha256=k2fKq3f5dKq_7anrFOlqvJoI_3U4EWNHLRZ1o3Lsy6M,2268
|
|
171
172
|
machineconfig/scripts/python/onetimeshare.py,sha256=bmGsNnskym5OWfIhpOfZG5jq3m89FS0a6dF5Sb8LaZM,2539
|
|
172
173
|
machineconfig/scripts/python/pomodoro.py,sha256=SPkfeoZGv8rylGiOyzQ7UK3aXZ3G2FIOuGkSuBUggOI,2019
|
|
173
|
-
machineconfig/scripts/python/repos.py,sha256=
|
|
174
|
+
machineconfig/scripts/python/repos.py,sha256=QPmtDq1gkzWGMduHpDHPMUe-7qPO_GemjQZLNAU-SYo,7157
|
|
174
175
|
machineconfig/scripts/python/repos_helper_action.py,sha256=6bQln9x2L_lOnvWwnTM_nJjkugl5LDDGHedVsz2zuI4,13320
|
|
175
176
|
machineconfig/scripts/python/repos_helper_clone.py,sha256=9vGb9NCXT0lkerPzOJjmFfhU8LSzE-_1LDvjkhgnal0,5461
|
|
176
|
-
machineconfig/scripts/python/repos_helper_record.py,sha256=
|
|
177
|
+
machineconfig/scripts/python/repos_helper_record.py,sha256=dtnnInQPn00u1cyr0oOgJ_jB12O3bSiNctwzC3W7_3w,10994
|
|
177
178
|
machineconfig/scripts/python/repos_helper_update.py,sha256=AYyKIB7eQ48yoYmFjydIhRI1lV39TBv_S4_LCa-oKuQ,11042
|
|
178
179
|
machineconfig/scripts/python/scheduler.py,sha256=rKhssuxkD697EY6qaV6CSdNhxpAQLDWO4fE8GMCQ9FA,3061
|
|
179
180
|
machineconfig/scripts/python/sessions.py,sha256=e8gL0fVWOZ5WcJsA3ZWfqJBc5c7g-rMlVf0SF63rIaU,8547
|
|
@@ -244,7 +245,6 @@ machineconfig/scripts/windows/mount_ssh.ps1,sha256=zvU1737vR0f0S7Si1tXMb3ys_I9KV
|
|
|
244
245
|
machineconfig/scripts/windows/nano.ps1,sha256=H1PNN1x3UnOCGwijgMij-K2ZM2E20sfsLTEEap-W5dQ,50
|
|
245
246
|
machineconfig/scripts/windows/pomodoro.ps1,sha256=9r61cwRy4M2_1A-NFb0fxUuUONxXBLJmLYtY3apkyQA,80
|
|
246
247
|
machineconfig/scripts/windows/reload_path.ps1,sha256=81hQY18LFApVZWFiUfgMzzPH2pJ1WD1fHInfmicBZFA,217
|
|
247
|
-
machineconfig/scripts/windows/repos.ps1,sha256=sjUcrURmYuxdcrdhwLHeWxwByyLgY4k13i8VYSFDKuo,76
|
|
248
248
|
machineconfig/scripts/windows/scheduler.ps1,sha256=YfOlBxCkPfeQPeyCiNw0g3kIpdbjjf6daLEWuyHSaXY,81
|
|
249
249
|
machineconfig/scripts/windows/sessions.ps1,sha256=cQdgSS3rVWvhthsUi5lyFI05_GKiRGI-j4FB1SZNKpM,80
|
|
250
250
|
machineconfig/scripts/windows/share_cloud.cmd,sha256=exD7JCdxw2LqVjw2MKCYHbVZlEqmelXtwnATng-dhJ4,1028
|
|
@@ -407,8 +407,8 @@ machineconfig/utils/schemas/fire_agents/fire_agents_input.py,sha256=pTxvLzIpD5RF
|
|
|
407
407
|
machineconfig/utils/schemas/installer/installer_types.py,sha256=QClRY61QaduBPJoSpdmTIdgS9LS-RvE-QZ-D260tD3o,1214
|
|
408
408
|
machineconfig/utils/schemas/layouts/layout_types.py,sha256=TcqlZdGVoH8htG5fHn1KWXhRdPueAcoyApppZsPAPto,2020
|
|
409
409
|
machineconfig/utils/schemas/repos/repos_types.py,sha256=ECVr-3IVIo8yjmYmVXX2mnDDN1SLSwvQIhx4KDDQHBQ,405
|
|
410
|
-
machineconfig-
|
|
411
|
-
machineconfig-
|
|
412
|
-
machineconfig-
|
|
413
|
-
machineconfig-
|
|
414
|
-
machineconfig-
|
|
410
|
+
machineconfig-5.11.dist-info/METADATA,sha256=HPKfljpArui1ViEtQWTnWtLDiyLzZTfrqnopro65cXY,8030
|
|
411
|
+
machineconfig-5.11.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
412
|
+
machineconfig-5.11.dist-info/entry_points.txt,sha256=2afE1mw-o4MUlfxyX73SV02XaQI4SV_LdL2r6_CzhPU,1074
|
|
413
|
+
machineconfig-5.11.dist-info/top_level.txt,sha256=porRtB8qms8fOIUJgK-tO83_FeH6Bpe12oUVC670teA,14
|
|
414
|
+
machineconfig-5.11.dist-info/RECORD,,
|
|
@@ -13,7 +13,6 @@ initai = machineconfig.scripts.python.ai.initai:main
|
|
|
13
13
|
kill_process = machineconfig.utils.procs:main
|
|
14
14
|
mount_nfs = machineconfig.scripts.python.mount_nfs:main
|
|
15
15
|
mount_nw_drive = machineconfig.scripts.python.mount_nw_drive:main
|
|
16
|
-
repos = machineconfig.scripts.python.repos:main_from_parser
|
|
17
16
|
sessions = machineconfig.scripts.python.sessions:main_from_parser
|
|
18
17
|
start_slidev = machineconfig.scripts.python.start_slidev:arg_parser
|
|
19
18
|
wifi_conn = machineconfig.scripts.python.wifi_conn:arg_parser
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
uv run --python 3.13 --no-dev --project $HOME/code/machineconfig repos $args
|
|
File without changes
|
|
File without changes
|