bitp 1.0.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bitbake_project/__init__.py +88 -0
- bitbake_project/__main__.py +14 -0
- bitbake_project/cli.py +1580 -0
- bitbake_project/commands/__init__.py +60 -0
- bitbake_project/commands/branch.py +889 -0
- bitbake_project/commands/common.py +2372 -0
- bitbake_project/commands/config.py +1515 -0
- bitbake_project/commands/deps.py +903 -0
- bitbake_project/commands/explore.py +2269 -0
- bitbake_project/commands/export.py +1030 -0
- bitbake_project/commands/fragment.py +884 -0
- bitbake_project/commands/init.py +515 -0
- bitbake_project/commands/projects.py +1505 -0
- bitbake_project/commands/recipe.py +1374 -0
- bitbake_project/commands/repos.py +154 -0
- bitbake_project/commands/search.py +313 -0
- bitbake_project/commands/update.py +181 -0
- bitbake_project/core.py +1811 -0
- bitp-1.0.6.dist-info/METADATA +401 -0
- bitp-1.0.6.dist-info/RECORD +24 -0
- bitp-1.0.6.dist-info/WHEEL +5 -0
- bitp-1.0.6.dist-info/entry_points.txt +3 -0
- bitp-1.0.6.dist-info/licenses/COPYING +338 -0
- bitp-1.0.6.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Copyright (C) 2025 Bruce Ashfield <bruce.ashfield@gmail.com>
|
|
3
|
+
#
|
|
4
|
+
# SPDX-License-Identifier: GPL-2.0-only
|
|
5
|
+
#
|
|
6
|
+
"""Repos command - list layer repositories."""
|
|
7
|
+
|
|
8
|
+
import os
|
|
9
|
+
import subprocess
|
|
10
|
+
import sys
|
|
11
|
+
from typing import List, Optional, Tuple
|
|
12
|
+
|
|
13
|
+
from ..core import Colors, current_branch, load_defaults, repo_is_clean, terminal_color
|
|
14
|
+
from .common import (
|
|
15
|
+
resolve_base_and_layers,
|
|
16
|
+
repo_display_name,
|
|
17
|
+
get_upstream_count_ls_remote,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
from .update import fetch_repo, get_upstream_commits
|
|
21
|
+
|
|
22
|
+
def run_repos(args) -> int:
|
|
23
|
+
"""List repos, optionally with one-liner status."""
|
|
24
|
+
defaults = load_defaults(args.defaults_file)
|
|
25
|
+
pairs, repo_sets = resolve_base_and_layers(args.bblayers, defaults)
|
|
26
|
+
|
|
27
|
+
repo_layers: Dict[str, List[str]] = {}
|
|
28
|
+
for layer, repo in pairs:
|
|
29
|
+
repo_layers.setdefault(repo, []).append(layer)
|
|
30
|
+
|
|
31
|
+
show_status = args.repos_command == "status"
|
|
32
|
+
do_fetch = show_status and getattr(args, "fetch", False)
|
|
33
|
+
|
|
34
|
+
# Optionally fetch first
|
|
35
|
+
if do_fetch:
|
|
36
|
+
print("Fetching from origin...")
|
|
37
|
+
for repo in repo_layers.keys():
|
|
38
|
+
if defaults.get(repo, "rebase") != "skip":
|
|
39
|
+
fetch_repo(repo)
|
|
40
|
+
|
|
41
|
+
for repo in repo_layers.keys():
|
|
42
|
+
is_discovered = repo in repo_sets.discovered
|
|
43
|
+
discovered_marker = " (?)" if is_discovered else ""
|
|
44
|
+
|
|
45
|
+
if not show_status:
|
|
46
|
+
# Simple list mode
|
|
47
|
+
print(f"{repo}{discovered_marker}")
|
|
48
|
+
continue
|
|
49
|
+
|
|
50
|
+
# Status mode: one-liner per repo
|
|
51
|
+
default_action = defaults.get(repo, "rebase")
|
|
52
|
+
if default_action == "skip":
|
|
53
|
+
print(f"→ {repo}{discovered_marker}: default=skip")
|
|
54
|
+
continue
|
|
55
|
+
|
|
56
|
+
branch = current_branch(repo)
|
|
57
|
+
if not branch:
|
|
58
|
+
is_clean = repo_is_clean(repo)
|
|
59
|
+
worktree_status = Colors.green("[clean]") if is_clean else Colors.red("[DIRTY]")
|
|
60
|
+
print(f"→ {repo}{discovered_marker}: detached HEAD {worktree_status}")
|
|
61
|
+
continue
|
|
62
|
+
|
|
63
|
+
remote_ref = f"origin/{branch}"
|
|
64
|
+
remote_exists = (
|
|
65
|
+
subprocess.run(
|
|
66
|
+
["git", "-C", repo, "rev-parse", "--verify", remote_ref],
|
|
67
|
+
stdout=subprocess.DEVNULL,
|
|
68
|
+
stderr=subprocess.DEVNULL,
|
|
69
|
+
).returncode
|
|
70
|
+
== 0
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
# Get commit counts
|
|
74
|
+
local_count = 0
|
|
75
|
+
upstream_count = 0
|
|
76
|
+
upstream_unknown = False
|
|
77
|
+
|
|
78
|
+
if remote_exists:
|
|
79
|
+
# Local commits (always use local refs)
|
|
80
|
+
try:
|
|
81
|
+
out = subprocess.check_output(
|
|
82
|
+
["git", "-C", repo, "rev-list", "--count", f"{remote_ref}..HEAD"],
|
|
83
|
+
text=True,
|
|
84
|
+
stderr=subprocess.DEVNULL,
|
|
85
|
+
)
|
|
86
|
+
local_count = int(out.strip())
|
|
87
|
+
except (subprocess.CalledProcessError, ValueError):
|
|
88
|
+
pass
|
|
89
|
+
|
|
90
|
+
# Upstream commits - use ls-remote unless we just fetched
|
|
91
|
+
if do_fetch:
|
|
92
|
+
try:
|
|
93
|
+
out = subprocess.check_output(
|
|
94
|
+
["git", "-C", repo, "rev-list", "--count", f"HEAD..{remote_ref}"],
|
|
95
|
+
text=True,
|
|
96
|
+
stderr=subprocess.DEVNULL,
|
|
97
|
+
)
|
|
98
|
+
upstream_count = int(out.strip())
|
|
99
|
+
except (subprocess.CalledProcessError, ValueError):
|
|
100
|
+
pass
|
|
101
|
+
else:
|
|
102
|
+
result = get_upstream_count_ls_remote(repo, branch)
|
|
103
|
+
if result is None:
|
|
104
|
+
pass # No remote tracking
|
|
105
|
+
elif result == -1:
|
|
106
|
+
upstream_unknown = True # Has changes but can't count
|
|
107
|
+
else:
|
|
108
|
+
upstream_count = result
|
|
109
|
+
|
|
110
|
+
is_clean = repo_is_clean(repo)
|
|
111
|
+
|
|
112
|
+
# Build status parts
|
|
113
|
+
status_parts = []
|
|
114
|
+
if local_count:
|
|
115
|
+
status_parts.append(f"{local_count} local commit(s)")
|
|
116
|
+
if upstream_unknown:
|
|
117
|
+
status_parts.append(terminal_color("upstream", "upstream has changes"))
|
|
118
|
+
elif upstream_count:
|
|
119
|
+
status_parts.append(terminal_color("upstream", f"{upstream_count} to pull"))
|
|
120
|
+
if not status_parts:
|
|
121
|
+
status_parts.append("up-to-date")
|
|
122
|
+
|
|
123
|
+
# Format with colors
|
|
124
|
+
if is_clean:
|
|
125
|
+
if is_discovered:
|
|
126
|
+
repo_display = terminal_color("repo_discovered", f"{repo}{discovered_marker}")
|
|
127
|
+
worktree_status = terminal_color("clean", "[clean]")
|
|
128
|
+
else:
|
|
129
|
+
repo_display = terminal_color("repo", f"{repo}{discovered_marker}")
|
|
130
|
+
worktree_status = terminal_color("clean", "[clean]")
|
|
131
|
+
else:
|
|
132
|
+
repo_display = f"{repo}{discovered_marker}"
|
|
133
|
+
worktree_status = terminal_color("dirty", "[DIRTY]")
|
|
134
|
+
|
|
135
|
+
branch_display = Colors.bold(branch)
|
|
136
|
+
print(f"→ {repo_display}: {', '.join(status_parts)} on {branch_display} {worktree_status}")
|
|
137
|
+
|
|
138
|
+
return 0
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def diffstat_for_range(repo: str, range_spec: str) -> str:
|
|
142
|
+
try:
|
|
143
|
+
if range_spec == "--root":
|
|
144
|
+
empty = subprocess.check_output(
|
|
145
|
+
["git", "-C", repo, "hash-object", "-t", "tree", "/dev/null"], text=True
|
|
146
|
+
).strip()
|
|
147
|
+
return subprocess.check_output(
|
|
148
|
+
["git", "-C", repo, "diff", "--stat", f"{empty}..HEAD"], text=True
|
|
149
|
+
).strip()
|
|
150
|
+
return subprocess.check_output(["git", "-C", repo, "diff", "--stat", range_spec], text=True).strip()
|
|
151
|
+
except subprocess.CalledProcessError:
|
|
152
|
+
return ""
|
|
153
|
+
|
|
154
|
+
|
|
@@ -0,0 +1,313 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Copyright (C) 2025 Bruce Ashfield <bruce.ashfield@gmail.com>
|
|
3
|
+
#
|
|
4
|
+
# SPDX-License-Identifier: GPL-2.0-only
|
|
5
|
+
#
|
|
6
|
+
"""Search command - search OpenEmbedded Layer Index."""
|
|
7
|
+
|
|
8
|
+
import json
|
|
9
|
+
import os
|
|
10
|
+
import shutil
|
|
11
|
+
import subprocess
|
|
12
|
+
import sys
|
|
13
|
+
import urllib.request
|
|
14
|
+
from typing import List, Optional
|
|
15
|
+
|
|
16
|
+
from ..core import Colors, get_fzf_color_args, get_fzf_preview_resize_bindings
|
|
17
|
+
from .init import _fetch_layer_index, _fetch_layer_dependencies
|
|
18
|
+
|
|
19
|
+
def run_search(args) -> int:
|
|
20
|
+
"""Search OpenEmbedded Layer Index for layers."""
|
|
21
|
+
branch = args.branch
|
|
22
|
+
query = args.query
|
|
23
|
+
force = getattr(args, "force", False)
|
|
24
|
+
|
|
25
|
+
# Fetch layers (from cache or API)
|
|
26
|
+
data = _fetch_layer_index(force=force)
|
|
27
|
+
if data is None:
|
|
28
|
+
return 1
|
|
29
|
+
|
|
30
|
+
# Filter by branch and deduplicate
|
|
31
|
+
seen = set()
|
|
32
|
+
layers = []
|
|
33
|
+
for entry in data:
|
|
34
|
+
entry_branch = entry.get("branch", {}).get("name", "")
|
|
35
|
+
if entry_branch != branch:
|
|
36
|
+
continue
|
|
37
|
+
layer_info = entry.get("layer", {})
|
|
38
|
+
name = layer_info.get("name", "")
|
|
39
|
+
if name in seen:
|
|
40
|
+
continue
|
|
41
|
+
seen.add(name)
|
|
42
|
+
layers.append({
|
|
43
|
+
"name": name,
|
|
44
|
+
"summary": layer_info.get("summary", ""),
|
|
45
|
+
"description": layer_info.get("description", ""),
|
|
46
|
+
"vcs_url": layer_info.get("vcs_url", ""),
|
|
47
|
+
"vcs_subdir": entry.get("vcs_subdir", ""),
|
|
48
|
+
})
|
|
49
|
+
|
|
50
|
+
# Filter by query if provided
|
|
51
|
+
if query:
|
|
52
|
+
query_lower = query.lower()
|
|
53
|
+
layers = [
|
|
54
|
+
l for l in layers
|
|
55
|
+
if query_lower in l["name"].lower()
|
|
56
|
+
or query_lower in l["summary"].lower()
|
|
57
|
+
or query_lower in l["description"].lower()
|
|
58
|
+
]
|
|
59
|
+
|
|
60
|
+
if not layers:
|
|
61
|
+
if query:
|
|
62
|
+
print(f"No layers found matching '{query}' on branch '{branch}'")
|
|
63
|
+
else:
|
|
64
|
+
print(f"No layers found on branch '{branch}'")
|
|
65
|
+
return 1
|
|
66
|
+
|
|
67
|
+
# Handle --clone flag
|
|
68
|
+
do_clone = getattr(args, "clone", False)
|
|
69
|
+
clone_target = getattr(args, "target", None)
|
|
70
|
+
if do_clone:
|
|
71
|
+
# Try exact match first
|
|
72
|
+
exact = next((l for l in layers if l["name"].lower() == query.lower()), None) if query else None
|
|
73
|
+
if exact:
|
|
74
|
+
layer = exact
|
|
75
|
+
elif len(layers) == 1:
|
|
76
|
+
layer = layers[0]
|
|
77
|
+
else:
|
|
78
|
+
print(f"Multiple layers match '{query}' - be more specific or use exact name:")
|
|
79
|
+
for l in sorted(layers, key=lambda x: x["name"])[:10]:
|
|
80
|
+
print(f" {l['name']}")
|
|
81
|
+
if len(layers) > 10:
|
|
82
|
+
print(f" ... and {len(layers) - 10} more")
|
|
83
|
+
return 1
|
|
84
|
+
|
|
85
|
+
if not layer["vcs_url"]:
|
|
86
|
+
print(f"No VCS URL for {layer['name']}")
|
|
87
|
+
return 1
|
|
88
|
+
|
|
89
|
+
# Show dependencies
|
|
90
|
+
deps = _fetch_layer_dependencies(layer["name"], branch)
|
|
91
|
+
if deps:
|
|
92
|
+
required = [d["name"] for d in deps if d["required"]]
|
|
93
|
+
optional = [d["name"] for d in deps if not d["required"]]
|
|
94
|
+
if required:
|
|
95
|
+
print(f"{Colors.yellow('Dependencies')}: {', '.join(required)}")
|
|
96
|
+
if optional:
|
|
97
|
+
print(f"{Colors.dim('Optional')}: {', '.join(optional)}")
|
|
98
|
+
|
|
99
|
+
# Determine target directory
|
|
100
|
+
target = clone_target or f"layers/{layer['name']}"
|
|
101
|
+
if os.path.exists(target):
|
|
102
|
+
print(f"Target already exists: {target}")
|
|
103
|
+
return 1
|
|
104
|
+
|
|
105
|
+
print(f"Cloning {layer['name']} ({layer['vcs_url']}) -> {target}")
|
|
106
|
+
try:
|
|
107
|
+
subprocess.run(
|
|
108
|
+
["git", "clone", "-b", branch, layer["vcs_url"], target],
|
|
109
|
+
check=True,
|
|
110
|
+
)
|
|
111
|
+
print(f"{Colors.green('Done.')}")
|
|
112
|
+
if layer["vcs_subdir"]:
|
|
113
|
+
print(f"Layer path: {target}/{layer['vcs_subdir']}")
|
|
114
|
+
return 0
|
|
115
|
+
except subprocess.CalledProcessError as e:
|
|
116
|
+
print(f"{Colors.red('Clone failed')}: {e}")
|
|
117
|
+
return 1
|
|
118
|
+
|
|
119
|
+
# Handle --info flag (scriptable output)
|
|
120
|
+
do_info = getattr(args, "info", False)
|
|
121
|
+
if do_info:
|
|
122
|
+
if not query:
|
|
123
|
+
print("Error: --info requires a layer name", file=sys.stderr)
|
|
124
|
+
return 1
|
|
125
|
+
# Try exact match first
|
|
126
|
+
exact = next((l for l in layers if l["name"].lower() == query.lower()), None)
|
|
127
|
+
if exact:
|
|
128
|
+
layer = exact
|
|
129
|
+
elif len(layers) == 1:
|
|
130
|
+
layer = layers[0]
|
|
131
|
+
else:
|
|
132
|
+
print(f"Error: Multiple layers match '{query}' - be more specific:", file=sys.stderr)
|
|
133
|
+
for l in sorted(layers, key=lambda x: x["name"])[:10]:
|
|
134
|
+
print(f" {l['name']}", file=sys.stderr)
|
|
135
|
+
return 1
|
|
136
|
+
|
|
137
|
+
# Output machine-readable info
|
|
138
|
+
print(f"name={layer['name']}")
|
|
139
|
+
print(f"url={layer['vcs_url']}")
|
|
140
|
+
if layer["vcs_subdir"]:
|
|
141
|
+
print(f"subdir={layer['vcs_subdir']}")
|
|
142
|
+
deps = _fetch_layer_dependencies(layer["name"], branch)
|
|
143
|
+
if deps:
|
|
144
|
+
required = [d["name"] for d in deps if d["required"]]
|
|
145
|
+
optional = [d["name"] for d in deps if not d["required"]]
|
|
146
|
+
if required:
|
|
147
|
+
print(f"depends={','.join(required)}")
|
|
148
|
+
if optional:
|
|
149
|
+
print(f"optional={','.join(optional)}")
|
|
150
|
+
return 0
|
|
151
|
+
|
|
152
|
+
# Calculate max name length for alignment
|
|
153
|
+
max_name_len = max(len(l["name"]) for l in layers)
|
|
154
|
+
max_name_len = min(max_name_len, 35) # Cap at 35 chars
|
|
155
|
+
|
|
156
|
+
# Interactive selection with fzf if available
|
|
157
|
+
if shutil.which("fzf") and not query:
|
|
158
|
+
# Build menu for fzf with alignment
|
|
159
|
+
# Format: name\tdisplay\tsummary\tdescription\tvcs_url\tvcs_subdir
|
|
160
|
+
menu_lines = []
|
|
161
|
+
for l in sorted(layers, key=lambda x: x["name"]):
|
|
162
|
+
name = l["name"][:35]
|
|
163
|
+
summary = l["summary"][:50] if l["summary"] else ""
|
|
164
|
+
display = f"{name:<{max_name_len}} {summary}"
|
|
165
|
+
# Escape special chars in description for shell
|
|
166
|
+
desc = l["description"].replace("'", "'\\''").replace("\n", " ") if l["description"] else ""
|
|
167
|
+
vcs_url = l["vcs_url"] or ""
|
|
168
|
+
vcs_subdir = l["vcs_subdir"] or ""
|
|
169
|
+
menu_lines.append(f"{l['name']}\t{display}\t{desc}\t{vcs_url}\t{vcs_subdir}")
|
|
170
|
+
|
|
171
|
+
# Preview command to format layer details
|
|
172
|
+
preview_cmd = r'''
|
|
173
|
+
echo -e "\033[1m{1}\033[0m"
|
|
174
|
+
echo
|
|
175
|
+
echo "{3}" | fold -s -w 70
|
|
176
|
+
echo
|
|
177
|
+
if [ -n "{4}" ]; then
|
|
178
|
+
echo -e "\033[36mClone:\033[0m git clone {4}"
|
|
179
|
+
fi
|
|
180
|
+
if [ -n "{5}" ]; then
|
|
181
|
+
echo -e "\033[36mLayer:\033[0m {5}"
|
|
182
|
+
fi
|
|
183
|
+
'''
|
|
184
|
+
|
|
185
|
+
while True:
|
|
186
|
+
try:
|
|
187
|
+
result = subprocess.run(
|
|
188
|
+
[
|
|
189
|
+
"fzf",
|
|
190
|
+
"--height", "~70%",
|
|
191
|
+
"--header", f"OpenEmbedded Layers ({branch}) | Enter=copy clone cmd | Ctrl-y=clone | ?=preview | Esc=quit",
|
|
192
|
+
"--prompt", "Search: ",
|
|
193
|
+
"--with-nth", "2",
|
|
194
|
+
"--delimiter", "\t",
|
|
195
|
+
"--preview", preview_cmd,
|
|
196
|
+
"--preview-window", "right:50%:wrap",
|
|
197
|
+
"--bind", "?:toggle-preview",
|
|
198
|
+
"--bind", r"ctrl-y:become(printf 'CLONE\t%s\t%s\n' {1} {4})",
|
|
199
|
+
] + get_fzf_preview_resize_bindings() + get_fzf_color_args(),
|
|
200
|
+
input="\n".join(menu_lines),
|
|
201
|
+
stdout=subprocess.PIPE,
|
|
202
|
+
text=True,
|
|
203
|
+
)
|
|
204
|
+
except FileNotFoundError:
|
|
205
|
+
break
|
|
206
|
+
|
|
207
|
+
if result.returncode != 0 or not result.stdout.strip():
|
|
208
|
+
break
|
|
209
|
+
|
|
210
|
+
output = result.stdout.strip()
|
|
211
|
+
|
|
212
|
+
if output.startswith("CLONE\t"):
|
|
213
|
+
# Clone the layer repo - output is "CLONE\t<name>\t<url>"
|
|
214
|
+
parts = output.split("\t")
|
|
215
|
+
layer_name = parts[1] if len(parts) > 1 else ""
|
|
216
|
+
vcs_url = parts[2] if len(parts) > 2 else ""
|
|
217
|
+
if not vcs_url:
|
|
218
|
+
print(f"\n No VCS URL for {layer_name}")
|
|
219
|
+
continue
|
|
220
|
+
|
|
221
|
+
# Show dependencies
|
|
222
|
+
print()
|
|
223
|
+
deps = _fetch_layer_dependencies(layer_name, branch)
|
|
224
|
+
if deps:
|
|
225
|
+
required = [d["name"] for d in deps if d["required"]]
|
|
226
|
+
optional = [d["name"] for d in deps if not d["required"]]
|
|
227
|
+
if required:
|
|
228
|
+
print(f" {Colors.yellow('Dependencies')}: {', '.join(required)}")
|
|
229
|
+
if optional:
|
|
230
|
+
print(f" {Colors.dim('Optional')}: {', '.join(optional)}")
|
|
231
|
+
|
|
232
|
+
# Prompt for target directory
|
|
233
|
+
default_target = f"layers/{layer_name}"
|
|
234
|
+
try:
|
|
235
|
+
target = input(f"Clone {layer_name} to [{default_target}]: ").strip()
|
|
236
|
+
except (EOFError, KeyboardInterrupt):
|
|
237
|
+
print("\nCancelled.")
|
|
238
|
+
continue
|
|
239
|
+
if not target:
|
|
240
|
+
target = default_target
|
|
241
|
+
|
|
242
|
+
if os.path.exists(target):
|
|
243
|
+
print(f" {Colors.yellow('exists')}: {target}")
|
|
244
|
+
continue
|
|
245
|
+
|
|
246
|
+
print(f" Cloning {vcs_url} -> {target}...")
|
|
247
|
+
try:
|
|
248
|
+
subprocess.run(
|
|
249
|
+
["git", "clone", "-b", branch, vcs_url, target],
|
|
250
|
+
check=True,
|
|
251
|
+
)
|
|
252
|
+
print(f" {Colors.green('done')}")
|
|
253
|
+
except subprocess.CalledProcessError as e:
|
|
254
|
+
print(f" {Colors.red('failed')}: {e}")
|
|
255
|
+
continue
|
|
256
|
+
|
|
257
|
+
# Enter was pressed - show details
|
|
258
|
+
selected_name = output.split("\t")[0]
|
|
259
|
+
selected = next((l for l in layers if l["name"] == selected_name), None)
|
|
260
|
+
if selected:
|
|
261
|
+
_show_layer_details(selected, branch)
|
|
262
|
+
|
|
263
|
+
return 0
|
|
264
|
+
|
|
265
|
+
# Text output with alignment
|
|
266
|
+
print(f"\n{Colors.bold(f'Layers on {branch} branch')} ({len(layers)} results)\n")
|
|
267
|
+
for l in sorted(layers, key=lambda x: x["name"]):
|
|
268
|
+
name = l["name"][:35]
|
|
269
|
+
summary = l["summary"][:60] if l["summary"] else ""
|
|
270
|
+
print(f" {Colors.green(f'{name:<{max_name_len}}')} {summary}")
|
|
271
|
+
if l["vcs_url"]:
|
|
272
|
+
subdir = f" (subdir: {l['vcs_subdir']})" if l["vcs_subdir"] else ""
|
|
273
|
+
print(f" {' ' * max_name_len} {Colors.dim(l['vcs_url'])}{Colors.dim(subdir)}")
|
|
274
|
+
|
|
275
|
+
return 0
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
def _show_layer_details(layer: dict, branch: str) -> None:
|
|
279
|
+
"""Show detailed info for a layer and offer to clone."""
|
|
280
|
+
print()
|
|
281
|
+
print(f" {Colors.bold(layer['name'])}")
|
|
282
|
+
print()
|
|
283
|
+
if layer["summary"]:
|
|
284
|
+
print(f" {layer['summary']}")
|
|
285
|
+
print()
|
|
286
|
+
if layer["description"]:
|
|
287
|
+
# Word wrap description
|
|
288
|
+
desc = layer["description"]
|
|
289
|
+
for line in desc.split("\n"):
|
|
290
|
+
print(f" {line}")
|
|
291
|
+
print()
|
|
292
|
+
if layer["vcs_url"]:
|
|
293
|
+
print(f" {Colors.cyan('Clone:')} git clone {layer['vcs_url']}")
|
|
294
|
+
if layer["vcs_subdir"]:
|
|
295
|
+
print(f" {Colors.cyan('Layer path:')} {layer['vcs_subdir']}")
|
|
296
|
+
print()
|
|
297
|
+
|
|
298
|
+
# Offer to copy clone command
|
|
299
|
+
clone_cmd = f"git clone {layer['vcs_url']}"
|
|
300
|
+
if layer["vcs_subdir"]:
|
|
301
|
+
clone_cmd += f" # layer in: {layer['vcs_subdir']}"
|
|
302
|
+
|
|
303
|
+
try:
|
|
304
|
+
if shutil.which("xclip"):
|
|
305
|
+
subprocess.run(["xclip", "-selection", "clipboard"], input=clone_cmd.encode(), check=True)
|
|
306
|
+
print(Colors.dim(" (clone command copied to clipboard)"))
|
|
307
|
+
elif shutil.which("xsel"):
|
|
308
|
+
subprocess.run(["xsel", "--clipboard", "--input"], input=clone_cmd.encode(), check=True)
|
|
309
|
+
print(Colors.dim(" (clone command copied to clipboard)"))
|
|
310
|
+
except Exception:
|
|
311
|
+
pass
|
|
312
|
+
|
|
313
|
+
|
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Copyright (C) 2025 Bruce Ashfield <bruce.ashfield@gmail.com>
|
|
3
|
+
#
|
|
4
|
+
# SPDX-License-Identifier: GPL-2.0-only
|
|
5
|
+
#
|
|
6
|
+
"""Update command - git pull/rebase layer repositories."""
|
|
7
|
+
|
|
8
|
+
import subprocess
|
|
9
|
+
import sys
|
|
10
|
+
from typing import List, Optional, Tuple
|
|
11
|
+
|
|
12
|
+
from ..core import (
|
|
13
|
+
Colors,
|
|
14
|
+
current_branch,
|
|
15
|
+
load_defaults,
|
|
16
|
+
save_defaults,
|
|
17
|
+
)
|
|
18
|
+
from .common import (
|
|
19
|
+
collect_repos,
|
|
20
|
+
find_repo_by_identifier,
|
|
21
|
+
load_resume,
|
|
22
|
+
save_resume,
|
|
23
|
+
prompt_action,
|
|
24
|
+
run_cmd,
|
|
25
|
+
repo_display_name,
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
def run_update(args) -> int:
|
|
29
|
+
defaults = load_defaults(args.defaults_file)
|
|
30
|
+
discover_all = getattr(args, 'all', False)
|
|
31
|
+
repos, _repo_sets = collect_repos(args.bblayers, defaults, discover_all=discover_all)
|
|
32
|
+
|
|
33
|
+
# If specific repo requested, filter to just that one
|
|
34
|
+
if getattr(args, 'repo', None):
|
|
35
|
+
target_repo = find_repo_by_identifier(repos, args.repo, defaults)
|
|
36
|
+
if not target_repo:
|
|
37
|
+
print(f"Repo not found: {args.repo}")
|
|
38
|
+
return 1
|
|
39
|
+
repos = [target_repo]
|
|
40
|
+
|
|
41
|
+
resume_state = load_resume(args.resume_file) if args.resume else None
|
|
42
|
+
next_idx = 0
|
|
43
|
+
if resume_state:
|
|
44
|
+
saved_idx, saved_repos = resume_state
|
|
45
|
+
if saved_repos == repos and 0 <= saved_idx < len(repos):
|
|
46
|
+
next_idx = saved_idx
|
|
47
|
+
print(f"Resuming from index {next_idx+1}/{len(repos)} using {args.resume_file}.")
|
|
48
|
+
else:
|
|
49
|
+
print("Resume data does not match current layer repos; starting over.")
|
|
50
|
+
|
|
51
|
+
if args.resume:
|
|
52
|
+
save_resume(args.resume_file, next_idx, repos)
|
|
53
|
+
|
|
54
|
+
completed = False
|
|
55
|
+
idx = next_idx - 1
|
|
56
|
+
try:
|
|
57
|
+
for idx in range(next_idx, len(repos)):
|
|
58
|
+
repo = repos[idx]
|
|
59
|
+
default_action = defaults.get(repo, "rebase")
|
|
60
|
+
branch = current_branch(repo)
|
|
61
|
+
action = prompt_action(repo, branch, default_action, use_fzf=not args.plain)
|
|
62
|
+
if action is None:
|
|
63
|
+
continue
|
|
64
|
+
op, target, new_default = action
|
|
65
|
+
|
|
66
|
+
if new_default:
|
|
67
|
+
defaults[repo] = new_default
|
|
68
|
+
save_defaults(args.defaults_file, defaults)
|
|
69
|
+
|
|
70
|
+
if op == "quit":
|
|
71
|
+
print("Aborting on user request.")
|
|
72
|
+
break
|
|
73
|
+
if op == "skip":
|
|
74
|
+
if args.resume:
|
|
75
|
+
save_resume(args.resume_file, idx + 1, repos)
|
|
76
|
+
continue
|
|
77
|
+
|
|
78
|
+
if op == "custom":
|
|
79
|
+
print(f"Custom command in {repo}")
|
|
80
|
+
run_cmd(repo, target, args.dry_run, shell=True)
|
|
81
|
+
else:
|
|
82
|
+
remote_ref = f"origin/{target}"
|
|
83
|
+
verb = "pull --rebase" if op == "rebase" else "pull"
|
|
84
|
+
print(f"Updating {repo}: git {verb} {remote_ref}")
|
|
85
|
+
if op == "rebase":
|
|
86
|
+
run_cmd(repo, ["git", "pull", "--rebase", "origin", target], args.dry_run)
|
|
87
|
+
elif op == "merge":
|
|
88
|
+
run_cmd(repo, ["git", "pull", "origin", target], args.dry_run)
|
|
89
|
+
|
|
90
|
+
if args.resume:
|
|
91
|
+
save_resume(args.resume_file, idx + 1, repos)
|
|
92
|
+
else:
|
|
93
|
+
completed = True
|
|
94
|
+
except subprocess.CalledProcessError as exc:
|
|
95
|
+
print(f"Command failed in {repo}: {exc}")
|
|
96
|
+
return exc.returncode or 1
|
|
97
|
+
|
|
98
|
+
if args.resume and completed and os.path.exists(args.resume_file):
|
|
99
|
+
os.remove(args.resume_file)
|
|
100
|
+
|
|
101
|
+
return 0
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def get_repo_log(repo: str, branch: str, remote_exists: bool, max_commits: int, show_all: bool) -> Tuple[str, List[str]]:
|
|
105
|
+
remote_ref = f"origin/{branch}"
|
|
106
|
+
log_args = ["git", "-C", repo, "log", "--oneline"]
|
|
107
|
+
desc = ""
|
|
108
|
+
if remote_exists:
|
|
109
|
+
log_args.append(f"{remote_ref}..HEAD")
|
|
110
|
+
desc = f"local commits vs {remote_ref}"
|
|
111
|
+
else:
|
|
112
|
+
log_args.extend(["-n", str(max_commits if not show_all else 1000000)])
|
|
113
|
+
desc = f"recent commits (no {remote_ref})"
|
|
114
|
+
|
|
115
|
+
try:
|
|
116
|
+
out = subprocess.check_output(log_args, text=True)
|
|
117
|
+
except subprocess.CalledProcessError:
|
|
118
|
+
return "failed to read log", []
|
|
119
|
+
|
|
120
|
+
lines = [line for line in out.strip().splitlines() if line.strip()]
|
|
121
|
+
return desc, lines
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def get_upstream_commits(repo: str, branch: str) -> List[str]:
|
|
125
|
+
"""Get commits in origin/<branch> that are not in HEAD (pending upstream changes)."""
|
|
126
|
+
remote_ref = f"origin/{branch}"
|
|
127
|
+
try:
|
|
128
|
+
out = subprocess.check_output(
|
|
129
|
+
["git", "-C", repo, "log", "--oneline", f"HEAD..{remote_ref}"],
|
|
130
|
+
text=True,
|
|
131
|
+
stderr=subprocess.DEVNULL,
|
|
132
|
+
)
|
|
133
|
+
return [line for line in out.strip().splitlines() if line.strip()]
|
|
134
|
+
except subprocess.CalledProcessError:
|
|
135
|
+
return []
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def fetch_repo(repo: str) -> bool:
|
|
139
|
+
"""Fetch from origin. Returns True on success."""
|
|
140
|
+
try:
|
|
141
|
+
subprocess.run(
|
|
142
|
+
["git", "-C", repo, "fetch", "origin"],
|
|
143
|
+
check=True,
|
|
144
|
+
stdout=subprocess.DEVNULL,
|
|
145
|
+
stderr=subprocess.DEVNULL,
|
|
146
|
+
)
|
|
147
|
+
return True
|
|
148
|
+
except subprocess.CalledProcessError:
|
|
149
|
+
return False
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def run_single_repo_update(repo: str, branch: str, action: str) -> bool:
|
|
154
|
+
"""Run update for a single repo. action: 'rebase' or 'merge'. Returns True on success."""
|
|
155
|
+
if not branch:
|
|
156
|
+
print(f" No branch (detached HEAD)")
|
|
157
|
+
return False
|
|
158
|
+
|
|
159
|
+
try:
|
|
160
|
+
if action == "rebase":
|
|
161
|
+
print(f" git pull --rebase origin/{branch}")
|
|
162
|
+
subprocess.run(
|
|
163
|
+
["git", "-C", repo, "pull", "--rebase", "origin", branch],
|
|
164
|
+
check=True,
|
|
165
|
+
)
|
|
166
|
+
elif action == "merge":
|
|
167
|
+
print(f" git pull origin/{branch}")
|
|
168
|
+
subprocess.run(
|
|
169
|
+
["git", "-C", repo, "pull", "origin", branch],
|
|
170
|
+
check=True,
|
|
171
|
+
)
|
|
172
|
+
else:
|
|
173
|
+
return False
|
|
174
|
+
|
|
175
|
+
print(f" Done.")
|
|
176
|
+
return True
|
|
177
|
+
except subprocess.CalledProcessError as e:
|
|
178
|
+
print(f" Failed: {e}")
|
|
179
|
+
return False
|
|
180
|
+
|
|
181
|
+
|