bitp 1.0.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bitbake_project/__init__.py +88 -0
- bitbake_project/__main__.py +14 -0
- bitbake_project/cli.py +1580 -0
- bitbake_project/commands/__init__.py +60 -0
- bitbake_project/commands/branch.py +889 -0
- bitbake_project/commands/common.py +2372 -0
- bitbake_project/commands/config.py +1515 -0
- bitbake_project/commands/deps.py +903 -0
- bitbake_project/commands/explore.py +2269 -0
- bitbake_project/commands/export.py +1030 -0
- bitbake_project/commands/fragment.py +884 -0
- bitbake_project/commands/init.py +515 -0
- bitbake_project/commands/projects.py +1505 -0
- bitbake_project/commands/recipe.py +1374 -0
- bitbake_project/commands/repos.py +154 -0
- bitbake_project/commands/search.py +313 -0
- bitbake_project/commands/update.py +181 -0
- bitbake_project/core.py +1811 -0
- bitp-1.0.7.dist-info/METADATA +401 -0
- bitp-1.0.7.dist-info/RECORD +24 -0
- bitp-1.0.7.dist-info/WHEEL +5 -0
- bitp-1.0.7.dist-info/entry_points.txt +3 -0
- bitp-1.0.7.dist-info/licenses/COPYING +338 -0
- bitp-1.0.7.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,2269 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Copyright (C) 2025 Bruce Ashfield <bruce.ashfield@gmail.com>
|
|
3
|
+
#
|
|
4
|
+
# SPDX-License-Identifier: GPL-2.0-only
|
|
5
|
+
#
|
|
6
|
+
"""Explore command - interactively explore commits in layer repos."""
|
|
7
|
+
|
|
8
|
+
import json
|
|
9
|
+
import os
|
|
10
|
+
import re
|
|
11
|
+
import shutil
|
|
12
|
+
import signal
|
|
13
|
+
import socket
|
|
14
|
+
import subprocess
|
|
15
|
+
import sys
|
|
16
|
+
import tempfile
|
|
17
|
+
import threading
|
|
18
|
+
import time
|
|
19
|
+
import urllib.request
|
|
20
|
+
from typing import Dict, List, Optional, Set, Tuple
|
|
21
|
+
|
|
22
|
+
from ..core import (
|
|
23
|
+
Colors,
|
|
24
|
+
current_branch,
|
|
25
|
+
current_head,
|
|
26
|
+
fzf_available,
|
|
27
|
+
get_fzf_color_args,
|
|
28
|
+
get_fzf_preview_resize_bindings,
|
|
29
|
+
git_toplevel,
|
|
30
|
+
load_defaults,
|
|
31
|
+
repo_is_clean,
|
|
32
|
+
save_defaults,
|
|
33
|
+
terminal_color,
|
|
34
|
+
)
|
|
35
|
+
from .common import (
|
|
36
|
+
resolve_bblayers_path,
|
|
37
|
+
resolve_base_and_layers,
|
|
38
|
+
collect_repos,
|
|
39
|
+
dedupe_preserve_order,
|
|
40
|
+
repo_display_name,
|
|
41
|
+
layer_display_name,
|
|
42
|
+
discover_layers,
|
|
43
|
+
add_layer_to_bblayers,
|
|
44
|
+
remove_layer_from_bblayers,
|
|
45
|
+
build_layer_collection_map,
|
|
46
|
+
prompt_action,
|
|
47
|
+
run_cmd,
|
|
48
|
+
add_extra_repo,
|
|
49
|
+
add_hidden_repo,
|
|
50
|
+
remove_hidden_repo,
|
|
51
|
+
get_hidden_repos,
|
|
52
|
+
get_upstream_count_ls_remote,
|
|
53
|
+
find_repo_by_identifier,
|
|
54
|
+
)
|
|
55
|
+
from .update import fetch_repo, get_upstream_commits
|
|
56
|
+
from .branch import (
|
|
57
|
+
fzf_branch_repos,
|
|
58
|
+
get_local_commits,
|
|
59
|
+
get_upstream_context_commits,
|
|
60
|
+
get_upstream_to_pull,
|
|
61
|
+
fzf_multiselect_commits,
|
|
62
|
+
fzf_select_insertion_point,
|
|
63
|
+
prompt_branch_name,
|
|
64
|
+
)
|
|
65
|
+
from .config import (
|
|
66
|
+
show_repo_status_detail,
|
|
67
|
+
fzf_repo_config,
|
|
68
|
+
fzf_build_config,
|
|
69
|
+
)
|
|
70
|
+
from .common import copy_to_clipboard, export_commits_from_explore
|
|
71
|
+
from .projects import get_preferred_git_viewer, get_preview_window_arg
|
|
72
|
+
|
|
73
|
+
def run_status(args) -> int:
|
|
74
|
+
defaults = load_defaults(args.defaults_file)
|
|
75
|
+
discover_all = getattr(args, 'all', False)
|
|
76
|
+
pairs, repo_sets = resolve_base_and_layers(args.bblayers, defaults, discover_all=discover_all)
|
|
77
|
+
|
|
78
|
+
repo_layers: Dict[str, List[str]] = {}
|
|
79
|
+
for layer, repo in pairs:
|
|
80
|
+
repo_layers.setdefault(repo, []).append(layer)
|
|
81
|
+
|
|
82
|
+
# Optionally fetch first
|
|
83
|
+
if args.fetch:
|
|
84
|
+
print("Fetching from origin...")
|
|
85
|
+
for repo in repo_layers.keys():
|
|
86
|
+
if defaults.get(repo, "rebase") != "skip":
|
|
87
|
+
fetch_repo(repo)
|
|
88
|
+
|
|
89
|
+
repo_cache: Dict[str, Tuple[str, bool, str, List[str]]] = {}
|
|
90
|
+
|
|
91
|
+
for repo, layers in repo_layers.items():
|
|
92
|
+
default_action = defaults.get(repo, "rebase")
|
|
93
|
+
layer_list = ", ".join(layers)
|
|
94
|
+
is_discovered = repo in repo_sets.discovered
|
|
95
|
+
discovered_marker = " (?)" if is_discovered else ""
|
|
96
|
+
|
|
97
|
+
# Format layers for multi-line display
|
|
98
|
+
def format_layers(layer_paths: List[str]) -> str:
|
|
99
|
+
if len(layer_paths) == 1:
|
|
100
|
+
return f" layer: {layer_display_name(layer_paths[0])}"
|
|
101
|
+
lines = [" layers:"]
|
|
102
|
+
for lp in layer_paths:
|
|
103
|
+
lines.append(f" {layer_display_name(lp)}")
|
|
104
|
+
return "\n".join(lines)
|
|
105
|
+
|
|
106
|
+
if default_action == "skip":
|
|
107
|
+
if args.verbose == 0:
|
|
108
|
+
layer_names = ", ".join(layer_display_name(lp) for lp in layers)
|
|
109
|
+
print(f"→ {layer_names}{discovered_marker}: default=skip")
|
|
110
|
+
else:
|
|
111
|
+
print(f"→ {repo}: default=skip (skipping status)")
|
|
112
|
+
print(format_layers(layers))
|
|
113
|
+
continue
|
|
114
|
+
|
|
115
|
+
if repo not in repo_cache:
|
|
116
|
+
branch = current_branch(repo)
|
|
117
|
+
if not branch:
|
|
118
|
+
repo_cache[repo] = ("(detached)", False, "detached head", [], [])
|
|
119
|
+
else:
|
|
120
|
+
remote_ref = f"origin/{branch}"
|
|
121
|
+
remote_exists = (
|
|
122
|
+
subprocess.run(
|
|
123
|
+
["git", "-C", repo, "rev-parse", "--verify", remote_ref],
|
|
124
|
+
stdout=subprocess.DEVNULL,
|
|
125
|
+
stderr=subprocess.DEVNULL,
|
|
126
|
+
).returncode
|
|
127
|
+
== 0
|
|
128
|
+
)
|
|
129
|
+
show_all = args.verbose >= 2
|
|
130
|
+
desc, lines = get_repo_log(repo, branch, remote_exists, args.max_commits, show_all)
|
|
131
|
+
upstream = get_upstream_commits(repo, branch) if remote_exists else []
|
|
132
|
+
repo_cache[repo] = (branch, remote_exists, desc, lines, upstream)
|
|
133
|
+
|
|
134
|
+
branch, remote_exists, desc, lines, upstream = repo_cache[repo]
|
|
135
|
+
is_clean = repo_is_clean(repo)
|
|
136
|
+
|
|
137
|
+
# Format worktree status with color
|
|
138
|
+
if is_clean:
|
|
139
|
+
worktree_status = terminal_color("clean", "[clean]")
|
|
140
|
+
worktree_status_plain = "[clean]"
|
|
141
|
+
else:
|
|
142
|
+
worktree_status = terminal_color("dirty", "[DIRTY]")
|
|
143
|
+
worktree_status_plain = "[DIRTY]"
|
|
144
|
+
|
|
145
|
+
if branch == "(detached)":
|
|
146
|
+
if args.verbose == 0:
|
|
147
|
+
layer_names = ", ".join(layer_display_name(lp) for lp in layers)
|
|
148
|
+
print(f"→ {layer_names}{discovered_marker}: detached HEAD {worktree_status}")
|
|
149
|
+
else:
|
|
150
|
+
print(f"→ {repo}: detached HEAD or no branch {worktree_status}; skipping")
|
|
151
|
+
print(format_layers(layers))
|
|
152
|
+
continue
|
|
153
|
+
|
|
154
|
+
local_count = len(lines)
|
|
155
|
+
upstream_count = len(upstream)
|
|
156
|
+
|
|
157
|
+
if args.verbose == 0:
|
|
158
|
+
# Summary mode (default)
|
|
159
|
+
layer_names = ", ".join(layer_display_name(lp) for lp in layers)
|
|
160
|
+
status_parts = []
|
|
161
|
+
if local_count:
|
|
162
|
+
status_parts.append(f"{local_count} local commit(s)")
|
|
163
|
+
# Use ls-remote for upstream if not fetched (like repos status does)
|
|
164
|
+
if args.fetch:
|
|
165
|
+
if upstream_count:
|
|
166
|
+
status_parts.append(terminal_color("upstream", f"{upstream_count} to pull"))
|
|
167
|
+
else:
|
|
168
|
+
ls_result = get_upstream_count_ls_remote(repo, branch)
|
|
169
|
+
if ls_result == -1:
|
|
170
|
+
status_parts.append(terminal_color("upstream", "upstream has changes"))
|
|
171
|
+
elif ls_result and ls_result > 0:
|
|
172
|
+
status_parts.append(terminal_color("upstream", f"{ls_result} to pull"))
|
|
173
|
+
if not status_parts:
|
|
174
|
+
status_parts.append("up-to-date")
|
|
175
|
+
print(f"→ {layer_names}{discovered_marker}: {', '.join(status_parts)} on {Colors.bold(branch)} {worktree_status}")
|
|
176
|
+
continue
|
|
177
|
+
|
|
178
|
+
# Build status line with colors
|
|
179
|
+
status_parts = []
|
|
180
|
+
if local_count:
|
|
181
|
+
status_parts.append(f"{local_count} local commit(s)")
|
|
182
|
+
if upstream_count:
|
|
183
|
+
status_parts.append(terminal_color("upstream", f"{upstream_count} upstream commit(s) to pull"))
|
|
184
|
+
if not status_parts:
|
|
185
|
+
status_parts.append("up-to-date")
|
|
186
|
+
|
|
187
|
+
# Color repo path green if clean
|
|
188
|
+
repo_display = Colors.green(repo) if is_clean else repo
|
|
189
|
+
branch_display = Colors.bold(branch)
|
|
190
|
+
print(f"→ {repo_display}: {', '.join(status_parts)} on {branch_display} {worktree_status}")
|
|
191
|
+
print(format_layers(layers))
|
|
192
|
+
|
|
193
|
+
# Show local commits
|
|
194
|
+
show_all = args.verbose >= 2
|
|
195
|
+
if lines:
|
|
196
|
+
print(" local:")
|
|
197
|
+
limit = len(lines) if show_all else args.max_commits
|
|
198
|
+
for line in lines[:limit]:
|
|
199
|
+
print(f" {line}")
|
|
200
|
+
if remote_exists and not show_all and len(lines) > args.max_commits:
|
|
201
|
+
print(f" ... ({len(lines) - args.max_commits} more)")
|
|
202
|
+
|
|
203
|
+
# Show upstream commits (yellow to indicate pending pulls)
|
|
204
|
+
if upstream:
|
|
205
|
+
print(f" {terminal_color('upstream', 'upstream:')}")
|
|
206
|
+
limit = len(upstream) if show_all else args.max_commits
|
|
207
|
+
for line in upstream[:limit]:
|
|
208
|
+
print(f" {Colors.yellow(line)}")
|
|
209
|
+
if not show_all and len(upstream) > args.max_commits:
|
|
210
|
+
print(f" {terminal_color('upstream', f'... ({len(upstream) - args.max_commits} more)')}")
|
|
211
|
+
|
|
212
|
+
return 0
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
def _build_layers_view_menu(
|
|
217
|
+
repos: List[str],
|
|
218
|
+
repo_info: Dict[str, Dict],
|
|
219
|
+
repo_layers: Dict[str, List[str]],
|
|
220
|
+
discovered_repos: Set[str],
|
|
221
|
+
external_repos: Set[str],
|
|
222
|
+
configured_layers: Set[str],
|
|
223
|
+
) -> str:
|
|
224
|
+
"""Build fzf menu showing individual layers grouped by repo."""
|
|
225
|
+
menu_lines = []
|
|
226
|
+
|
|
227
|
+
# Build list of all layers with their info
|
|
228
|
+
layer_entries = []
|
|
229
|
+
for repo in repos:
|
|
230
|
+
layers = repo_layers.get(repo, [])
|
|
231
|
+
is_discovered_repo = repo in discovered_repos
|
|
232
|
+
is_external = repo in external_repos
|
|
233
|
+
|
|
234
|
+
if not layers:
|
|
235
|
+
# External repo with no layers - show repo itself
|
|
236
|
+
if is_external:
|
|
237
|
+
layer_entries.append({
|
|
238
|
+
"path": repo,
|
|
239
|
+
"name": repo_display_name(repo),
|
|
240
|
+
"repo": repo,
|
|
241
|
+
"repo_name": repo_display_name(repo),
|
|
242
|
+
"is_configured": False,
|
|
243
|
+
"is_external": True,
|
|
244
|
+
"branch": repo_info.get(repo, {}).get("branch", ""),
|
|
245
|
+
"is_dirty": repo_info.get(repo, {}).get("is_dirty", False),
|
|
246
|
+
})
|
|
247
|
+
continue
|
|
248
|
+
|
|
249
|
+
for layer in layers:
|
|
250
|
+
layer_name = os.path.basename(layer)
|
|
251
|
+
is_configured = layer in configured_layers
|
|
252
|
+
layer_entries.append({
|
|
253
|
+
"path": layer,
|
|
254
|
+
"name": layer_name,
|
|
255
|
+
"repo": repo,
|
|
256
|
+
"repo_name": repo_display_name(repo),
|
|
257
|
+
"is_configured": is_configured,
|
|
258
|
+
"is_external": False,
|
|
259
|
+
"branch": repo_info.get(repo, {}).get("branch", ""),
|
|
260
|
+
"is_dirty": repo_info.get(repo, {}).get("is_dirty", False),
|
|
261
|
+
})
|
|
262
|
+
|
|
263
|
+
if not layer_entries:
|
|
264
|
+
return ""
|
|
265
|
+
|
|
266
|
+
# Calculate column widths
|
|
267
|
+
max_layer_len = max(len(e["name"]) for e in layer_entries)
|
|
268
|
+
max_layer_len = min(max_layer_len + 6, 40) # +6 for markers, cap at 40
|
|
269
|
+
max_repo_len = max(len(e["repo_name"]) for e in layer_entries)
|
|
270
|
+
max_repo_len = min(max_repo_len, 25)
|
|
271
|
+
max_branch_len = max(len(e["branch"]) for e in layer_entries) if layer_entries else 15
|
|
272
|
+
max_branch_len = max(max_branch_len, 15) # minimum 15 chars
|
|
273
|
+
|
|
274
|
+
# Build menu lines
|
|
275
|
+
for entry in layer_entries:
|
|
276
|
+
layer_path = entry["path"]
|
|
277
|
+
layer_name = entry["name"]
|
|
278
|
+
repo_name = entry["repo_name"]
|
|
279
|
+
is_configured = entry["is_configured"]
|
|
280
|
+
is_external = entry["is_external"]
|
|
281
|
+
branch = entry["branch"]
|
|
282
|
+
is_dirty = entry["is_dirty"]
|
|
283
|
+
|
|
284
|
+
# Markers
|
|
285
|
+
if is_external:
|
|
286
|
+
marker = " (ext)"
|
|
287
|
+
elif not is_configured:
|
|
288
|
+
marker = " (?)"
|
|
289
|
+
else:
|
|
290
|
+
marker = ""
|
|
291
|
+
|
|
292
|
+
display_name = f"{layer_name}{marker}"
|
|
293
|
+
|
|
294
|
+
# Status
|
|
295
|
+
if is_dirty:
|
|
296
|
+
status = terminal_color("dirty", "[DIRTY]")
|
|
297
|
+
else:
|
|
298
|
+
status = terminal_color("clean", "[clean]")
|
|
299
|
+
|
|
300
|
+
# Color based on status
|
|
301
|
+
if is_dirty:
|
|
302
|
+
colored_name = Colors.red(f"{display_name:<{max_layer_len}}")
|
|
303
|
+
colored_repo = Colors.red(f"{repo_name:<{max_repo_len}}")
|
|
304
|
+
elif is_external:
|
|
305
|
+
colored_name = terminal_color("repo_external", f"{display_name:<{max_layer_len}}")
|
|
306
|
+
colored_repo = terminal_color("repo_external", f"{repo_name:<{max_repo_len}}")
|
|
307
|
+
elif not is_configured:
|
|
308
|
+
colored_name = terminal_color("repo_discovered", f"{display_name:<{max_layer_len}}")
|
|
309
|
+
colored_repo = terminal_color("repo_discovered", f"{repo_name:<{max_repo_len}}")
|
|
310
|
+
else:
|
|
311
|
+
colored_name = terminal_color("repo", f"{display_name:<{max_layer_len}}")
|
|
312
|
+
colored_repo = terminal_color("repo", f"{repo_name:<{max_repo_len}}")
|
|
313
|
+
|
|
314
|
+
line = f"LAYER:{layer_path}\t{colored_name} {colored_repo} {branch:<{max_branch_len}} {status}"
|
|
315
|
+
menu_lines.append(line)
|
|
316
|
+
|
|
317
|
+
# Reverse so first layer appears at top
|
|
318
|
+
menu_lines = list(reversed(menu_lines))
|
|
319
|
+
|
|
320
|
+
# Column header and separator
|
|
321
|
+
header_line = f"HEADER\t{'Layer':<{max_layer_len}} {'Repository':<{max_repo_len}} {'Branch':<{max_branch_len}} Status"
|
|
322
|
+
sep_len = max_layer_len + 2 + max_repo_len + 2 + max_branch_len + 1 + 8
|
|
323
|
+
separator = f"SEPARATOR_HEADER\t{'─' * sep_len}"
|
|
324
|
+
menu_lines.append(separator)
|
|
325
|
+
menu_lines.append(header_line)
|
|
326
|
+
|
|
327
|
+
return "\n".join(menu_lines)
|
|
328
|
+
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
def _build_explore_menu_lines(
|
|
332
|
+
repos: List[str],
|
|
333
|
+
repo_info: Dict[str, Dict],
|
|
334
|
+
repo_layers: Dict[str, List[str]],
|
|
335
|
+
verbose_mode: bool,
|
|
336
|
+
discovered_repos: Optional[Set[str]] = None,
|
|
337
|
+
external_repos: Optional[Set[str]] = None,
|
|
338
|
+
expanded_repos: Optional[Set[str]] = None,
|
|
339
|
+
layers_mode: bool = False,
|
|
340
|
+
configured_layers: Optional[Set[str]] = None,
|
|
341
|
+
bblayers_conf: Optional[str] = None,
|
|
342
|
+
) -> str:
|
|
343
|
+
"""Build fzf menu input for explore repo/layers list. Returns newline-joined string."""
|
|
344
|
+
menu_lines = []
|
|
345
|
+
discovered_repos = discovered_repos or set()
|
|
346
|
+
external_repos = external_repos or set()
|
|
347
|
+
expanded_repos = expanded_repos or set()
|
|
348
|
+
configured_layers = configured_layers or set()
|
|
349
|
+
|
|
350
|
+
# Layers mode: show individual layers instead of repos
|
|
351
|
+
if layers_mode:
|
|
352
|
+
return _build_layers_view_menu(
|
|
353
|
+
repos, repo_info, repo_layers, discovered_repos,
|
|
354
|
+
external_repos, configured_layers
|
|
355
|
+
)
|
|
356
|
+
|
|
357
|
+
def get_marker_len(repo: str) -> int:
|
|
358
|
+
"""Get marker length: 4 for '(?)', 6 for '(ext)', 0 otherwise."""
|
|
359
|
+
if repo in external_repos:
|
|
360
|
+
return 6 # " (ext)"
|
|
361
|
+
if repo in discovered_repos:
|
|
362
|
+
return 4 # " (?)"
|
|
363
|
+
return 0
|
|
364
|
+
|
|
365
|
+
# In verbose mode, show layer names; otherwise show display name
|
|
366
|
+
# Account for markers on discovered/external repos
|
|
367
|
+
if verbose_mode:
|
|
368
|
+
max_name_len = 0
|
|
369
|
+
for repo in repos:
|
|
370
|
+
layers = repo_layers.get(repo, [])
|
|
371
|
+
layer_names = ", ".join(layer_display_name(lp) for lp in layers) if layers else repo_info[repo]["display_name"]
|
|
372
|
+
name_len = len(layer_names) + get_marker_len(repo)
|
|
373
|
+
if name_len > max_name_len:
|
|
374
|
+
max_name_len = name_len
|
|
375
|
+
max_name_len = min(max_name_len, 56) # 50 + 6 for marker
|
|
376
|
+
else:
|
|
377
|
+
max_name_len = max(
|
|
378
|
+
len(repo_info[r]["display_name"]) + get_marker_len(r)
|
|
379
|
+
for r in repos
|
|
380
|
+
) if repos else 20
|
|
381
|
+
|
|
382
|
+
# Calculate max length of info column for alignment
|
|
383
|
+
max_info_len = 0
|
|
384
|
+
for repo in repos:
|
|
385
|
+
info = repo_info[repo]
|
|
386
|
+
local_count = info["local_count"]
|
|
387
|
+
branch = info["branch"]
|
|
388
|
+
upstream_count = info.get("upstream_count", 0)
|
|
389
|
+
|
|
390
|
+
count_str = f"{local_count} local"
|
|
391
|
+
if upstream_count == -1:
|
|
392
|
+
upstream_str = ", ↓ upstream"
|
|
393
|
+
elif upstream_count > 0:
|
|
394
|
+
upstream_str = f", ↓ {upstream_count}"
|
|
395
|
+
else:
|
|
396
|
+
upstream_str = ""
|
|
397
|
+
info_str = f"{count_str}{upstream_str}, {branch}"
|
|
398
|
+
if len(info_str) > max_info_len:
|
|
399
|
+
max_info_len = len(info_str)
|
|
400
|
+
|
|
401
|
+
for repo in repos:
|
|
402
|
+
info = repo_info[repo]
|
|
403
|
+
local_count = info["local_count"]
|
|
404
|
+
branch = info["branch"]
|
|
405
|
+
is_dirty = info["is_dirty"]
|
|
406
|
+
upstream_count = info.get("upstream_count", 0)
|
|
407
|
+
|
|
408
|
+
if verbose_mode:
|
|
409
|
+
layers = repo_layers.get(repo, [])
|
|
410
|
+
name = ", ".join(layer_display_name(lp) for lp in layers) if layers else info["display_name"]
|
|
411
|
+
if len(name) > 50:
|
|
412
|
+
name = name[:47] + "..."
|
|
413
|
+
else:
|
|
414
|
+
name = info["display_name"]
|
|
415
|
+
|
|
416
|
+
# Add marker for discovered/external repos
|
|
417
|
+
is_discovered = repo in discovered_repos
|
|
418
|
+
is_external = repo in external_repos
|
|
419
|
+
if is_external:
|
|
420
|
+
name = f"{name} (ext)"
|
|
421
|
+
elif is_discovered:
|
|
422
|
+
name = f"{name} (?)"
|
|
423
|
+
|
|
424
|
+
# Add + prefix for repos with multiple layers (expandable)
|
|
425
|
+
num_layers = len(repo_layers.get(repo, []))
|
|
426
|
+
expand_marker = "+ " if num_layers > 1 else " "
|
|
427
|
+
|
|
428
|
+
if is_dirty:
|
|
429
|
+
status = terminal_color("dirty", "[DIRTY]")
|
|
430
|
+
colored_name = Colors.red(f"{name:<{max_name_len}}")
|
|
431
|
+
elif is_external:
|
|
432
|
+
status = terminal_color("clean", "[clean]")
|
|
433
|
+
colored_name = terminal_color("repo_external", f"{name:<{max_name_len}}")
|
|
434
|
+
elif is_discovered:
|
|
435
|
+
status = terminal_color("clean", "[clean]")
|
|
436
|
+
colored_name = terminal_color("repo_discovered", f"{name:<{max_name_len}}")
|
|
437
|
+
else:
|
|
438
|
+
status = terminal_color("clean", "[clean]")
|
|
439
|
+
colored_name = terminal_color("repo", f"{name:<{max_name_len}}")
|
|
440
|
+
|
|
441
|
+
count_str = f"{local_count} local"
|
|
442
|
+
|
|
443
|
+
if upstream_count == -1:
|
|
444
|
+
upstream_str_plain = ", ↓ upstream"
|
|
445
|
+
upstream_str = terminal_color("upstream", upstream_str_plain)
|
|
446
|
+
elif upstream_count > 0:
|
|
447
|
+
upstream_str_plain = f", ↓ {upstream_count}"
|
|
448
|
+
upstream_str = terminal_color("upstream", upstream_str_plain)
|
|
449
|
+
else:
|
|
450
|
+
upstream_str_plain = ""
|
|
451
|
+
upstream_str = ""
|
|
452
|
+
|
|
453
|
+
info_str_plain = f"{count_str}{upstream_str_plain}, {branch}"
|
|
454
|
+
padding = max_info_len - len(info_str_plain)
|
|
455
|
+
info_str = f"{count_str}{upstream_str}, {branch}{' ' * padding}"
|
|
456
|
+
|
|
457
|
+
line = f"{repo}\t{expand_marker}{colored_name} {info_str} {status}"
|
|
458
|
+
menu_lines.append(line)
|
|
459
|
+
|
|
460
|
+
# Add layer lines if repo is expanded
|
|
461
|
+
if repo in expanded_repos:
|
|
462
|
+
layers = repo_layers.get(repo, [])
|
|
463
|
+
# Filter out layer that matches the repo display name (redundant)
|
|
464
|
+
display = repo_display_name(repo)
|
|
465
|
+
layers = [l for l in layers if os.path.basename(l) != display]
|
|
466
|
+
for i, layer in enumerate(layers):
|
|
467
|
+
layer_name = os.path.basename(layer)
|
|
468
|
+
is_last = (i == len(layers) - 1)
|
|
469
|
+
prefix = " └─ " if is_last else " ├─ "
|
|
470
|
+
# Color based on whether layer is in bblayers.conf
|
|
471
|
+
if layer in configured_layers:
|
|
472
|
+
layer_display = f"{prefix}{terminal_color('repo', layer_name)}"
|
|
473
|
+
else:
|
|
474
|
+
layer_display = f"{prefix}{terminal_color('repo_discovered', layer_name)} {Colors.dim('(?)')}"
|
|
475
|
+
# Use LAYER: prefix to identify layer entries
|
|
476
|
+
layer_line = f"LAYER:{layer}\t{layer_display}"
|
|
477
|
+
menu_lines.append(layer_line)
|
|
478
|
+
|
|
479
|
+
# Add project entry if bblayers.conf is known (with separator)
|
|
480
|
+
if bblayers_conf:
|
|
481
|
+
conf_dir = os.path.dirname(bblayers_conf)
|
|
482
|
+
# Add separator before project (unique ID to distinguish from header separator)
|
|
483
|
+
menu_lines.append(f"SEPARATOR_PROJECT\t") # Empty separator line
|
|
484
|
+
# Pad plain text, then apply color
|
|
485
|
+
project_name = f"{'project':<{max_name_len}}"
|
|
486
|
+
project_line = f"PROJECT\t {Colors.cyan(project_name)} {'config':<{max_info_len}} {conf_dir}"
|
|
487
|
+
menu_lines.append(project_line)
|
|
488
|
+
|
|
489
|
+
# Reverse so first repo appears at top
|
|
490
|
+
menu_lines = list(reversed(menu_lines))
|
|
491
|
+
|
|
492
|
+
# Column header and separator (unique ID to distinguish from project separator)
|
|
493
|
+
header_line = f"HEADER\t{'Name':<{max_name_len}} {'Commits, Branch':<{max_info_len}} Status"
|
|
494
|
+
sep_len = max_name_len + 2 + max_info_len + 1 + 8
|
|
495
|
+
separator = f"SEPARATOR_HEADER\t{'─' * sep_len}"
|
|
496
|
+
menu_lines.append(separator)
|
|
497
|
+
menu_lines.append(header_line)
|
|
498
|
+
|
|
499
|
+
return "\n".join(menu_lines)
|
|
500
|
+
|
|
501
|
+
|
|
502
|
+
|
|
503
|
+
def _find_free_port() -> int:
|
|
504
|
+
"""Find a free port for fzf --listen."""
|
|
505
|
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
|
506
|
+
s.bind(('127.0.0.1', 0))
|
|
507
|
+
return s.getsockname()[1]
|
|
508
|
+
|
|
509
|
+
|
|
510
|
+
|
|
511
|
+
def _background_upstream_check(
|
|
512
|
+
repos: List[str],
|
|
513
|
+
repo_info: Dict[str, Dict],
|
|
514
|
+
repo_layers: Dict[str, List[str]],
|
|
515
|
+
verbose_mode: bool,
|
|
516
|
+
defaults: Dict[str, str],
|
|
517
|
+
temp_file: str,
|
|
518
|
+
fzf_port: int,
|
|
519
|
+
stop_event: threading.Event,
|
|
520
|
+
discovered_repos: Optional[Set[str]] = None,
|
|
521
|
+
external_repos: Optional[Set[str]] = None,
|
|
522
|
+
expanded_repos: Optional[Set[str]] = None,
|
|
523
|
+
bblayers_conf: Optional[str] = None,
|
|
524
|
+
) -> None:
|
|
525
|
+
"""Background thread: check upstream status via ls-remote and reload fzf."""
|
|
526
|
+
any_updates = False
|
|
527
|
+
for repo in repos:
|
|
528
|
+
if stop_event.is_set():
|
|
529
|
+
return
|
|
530
|
+
info = repo_info[repo]
|
|
531
|
+
branch = info["branch"]
|
|
532
|
+
if branch == "(detached)" or defaults.get(repo, "rebase") == "skip":
|
|
533
|
+
continue
|
|
534
|
+
if info.get("upstream_count", 0) != 0:
|
|
535
|
+
# Already have upstream info
|
|
536
|
+
continue
|
|
537
|
+
try:
|
|
538
|
+
ls_result = get_upstream_count_ls_remote(repo, branch)
|
|
539
|
+
if ls_result == -1:
|
|
540
|
+
repo_info[repo]["upstream_count"] = -1
|
|
541
|
+
any_updates = True
|
|
542
|
+
elif ls_result and ls_result > 0:
|
|
543
|
+
repo_info[repo]["upstream_count"] = ls_result
|
|
544
|
+
any_updates = True
|
|
545
|
+
except Exception:
|
|
546
|
+
pass
|
|
547
|
+
|
|
548
|
+
if stop_event.is_set():
|
|
549
|
+
return
|
|
550
|
+
|
|
551
|
+
if any_updates:
|
|
552
|
+
# Rebuild menu and write to temp file
|
|
553
|
+
menu_input = _build_explore_menu_lines(repos, repo_info, repo_layers, verbose_mode, discovered_repos, external_repos, expanded_repos, bblayers_conf=bblayers_conf)
|
|
554
|
+
try:
|
|
555
|
+
with open(temp_file, 'w') as f:
|
|
556
|
+
f.write(menu_input)
|
|
557
|
+
# Tell fzf to reload
|
|
558
|
+
req = urllib.request.Request(
|
|
559
|
+
f"http://127.0.0.1:{fzf_port}",
|
|
560
|
+
data=f"reload(cat {temp_file})".encode(),
|
|
561
|
+
method='POST',
|
|
562
|
+
)
|
|
563
|
+
urllib.request.urlopen(req, timeout=2)
|
|
564
|
+
except Exception:
|
|
565
|
+
pass # fzf may have exited
|
|
566
|
+
|
|
567
|
+
|
|
568
|
+
|
|
569
|
+
def fzf_explore_repo_list(
|
|
570
|
+
repos: List[str],
|
|
571
|
+
repo_info: Dict[str, Dict],
|
|
572
|
+
repo_layers: Dict[str, List[str]] = None,
|
|
573
|
+
verbose_mode: bool = False,
|
|
574
|
+
defaults: Dict[str, str] = None,
|
|
575
|
+
enable_background_refresh: bool = False,
|
|
576
|
+
discovered_repos: Optional[Set[str]] = None,
|
|
577
|
+
external_repos: Optional[Set[str]] = None,
|
|
578
|
+
expanded_repos: Optional[Set[str]] = None,
|
|
579
|
+
layers_mode: bool = False,
|
|
580
|
+
configured_layers: Optional[Set[str]] = None,
|
|
581
|
+
initial_selection: Optional[str] = None,
|
|
582
|
+
bblayers_conf: Optional[str] = None,
|
|
583
|
+
) -> Tuple[str, Optional[str]]:
|
|
584
|
+
"""
|
|
585
|
+
Show repo/layer list with local commit counts for selection.
|
|
586
|
+
Returns: (action, repo_path)
|
|
587
|
+
action: "explore", "rebase", "merge", "refresh", "refresh_all", "verbose", "status", "quit", "layers_toggle", "interrupt", or "cancelled"
|
|
588
|
+
repo_path: selected repo/layer path (None for quit/cancelled/refresh_all/verbose/layers_toggle/interrupt)
|
|
589
|
+
"""
|
|
590
|
+
if not repos:
|
|
591
|
+
return "cancelled", None
|
|
592
|
+
|
|
593
|
+
repo_layers = repo_layers or {}
|
|
594
|
+
defaults = defaults or {}
|
|
595
|
+
discovered_repos = discovered_repos or set()
|
|
596
|
+
external_repos = external_repos or set()
|
|
597
|
+
expanded_repos = expanded_repos or set()
|
|
598
|
+
configured_layers = configured_layers or set()
|
|
599
|
+
|
|
600
|
+
# Don't reorder - we'll jump to the selected repo's position instead
|
|
601
|
+
display_repos = repos
|
|
602
|
+
|
|
603
|
+
# Build menu input using helper
|
|
604
|
+
menu_input = _build_explore_menu_lines(
|
|
605
|
+
display_repos, repo_info, repo_layers, verbose_mode,
|
|
606
|
+
discovered_repos, external_repos, expanded_repos,
|
|
607
|
+
layers_mode, configured_layers, bblayers_conf
|
|
608
|
+
)
|
|
609
|
+
|
|
610
|
+
if layers_mode:
|
|
611
|
+
header = "LAYERS VIEW | Enter=explore | a=add | d=remove | L=repos view | A=discover | q=quit\n+=track | -=hide | H=show hidden"
|
|
612
|
+
else:
|
|
613
|
+
header = "REPOS VIEW | Enter=explore | q=quit | L=layers view\nUpdate: u=single | U=all | m=merge | r=refresh | R=refresh all | B=branch all\nView: v=verbose | \\=expand | s=status | t=history\nManage: a/d=add/remove | +/-=track/hide | c=config | A=discover"
|
|
614
|
+
|
|
615
|
+
# Setup for background refresh if enabled
|
|
616
|
+
temp_file = None
|
|
617
|
+
bg_thread = None
|
|
618
|
+
stop_event = None
|
|
619
|
+
fzf_port = None
|
|
620
|
+
|
|
621
|
+
if enable_background_refresh:
|
|
622
|
+
# Write initial menu to temp file (for reload command)
|
|
623
|
+
fd, temp_file = tempfile.mkstemp(prefix="bitbake-explore-", suffix=".txt")
|
|
624
|
+
try:
|
|
625
|
+
with os.fdopen(fd, 'w') as f:
|
|
626
|
+
f.write(menu_input)
|
|
627
|
+
except Exception:
|
|
628
|
+
os.close(fd)
|
|
629
|
+
temp_file = None
|
|
630
|
+
|
|
631
|
+
if temp_file:
|
|
632
|
+
fzf_port = _find_free_port()
|
|
633
|
+
stop_event = threading.Event()
|
|
634
|
+
bg_thread = threading.Thread(
|
|
635
|
+
target=_background_upstream_check,
|
|
636
|
+
args=(repos, repo_info, repo_layers, verbose_mode, defaults, temp_file, fzf_port, stop_event, discovered_repos, external_repos, expanded_repos, bblayers_conf),
|
|
637
|
+
daemon=True,
|
|
638
|
+
)
|
|
639
|
+
bg_thread.start()
|
|
640
|
+
|
|
641
|
+
try:
|
|
642
|
+
fzf_cmd = [
|
|
643
|
+
"fzf",
|
|
644
|
+
"--no-multi",
|
|
645
|
+
"--no-sort",
|
|
646
|
+
"--no-info",
|
|
647
|
+
"--ansi",
|
|
648
|
+
"--header", header,
|
|
649
|
+
"--prompt", "Select repo: ",
|
|
650
|
+
"--with-nth", "2..", # Display from field 2 onwards (hide repo path/HEADER)
|
|
651
|
+
"--delimiter", "\t",
|
|
652
|
+
"--bind", "right:accept",
|
|
653
|
+
"--bind", "q:become(echo QUIT)",
|
|
654
|
+
"--bind", "u:become(echo REBASE {1})",
|
|
655
|
+
"--bind", "m:become(echo MERGE {1})",
|
|
656
|
+
"--bind", "r:become(echo REFRESH {1})",
|
|
657
|
+
"--bind", "R:become(echo REFRESH_ALL)",
|
|
658
|
+
"--bind", "v:become(echo VERBOSE)",
|
|
659
|
+
"--bind", "s:become(echo STATUS {1})",
|
|
660
|
+
"--bind", "t:become(echo TIG {1})",
|
|
661
|
+
"--bind", "+:become(echo ADD_TRACKED {1})",
|
|
662
|
+
"--bind", "-:become(echo HIDE {1})",
|
|
663
|
+
"--bind", "H:become(echo TOGGLE_HIDDEN)",
|
|
664
|
+
"--bind", "a:become(echo ADD_LAYER {1})",
|
|
665
|
+
"--bind", "d:become(echo REMOVE_LAYER {1})",
|
|
666
|
+
"--bind", "\\:become(echo EXPAND {1})",
|
|
667
|
+
"--bind", "L:become(echo LAYERS_TOGGLE)",
|
|
668
|
+
"--bind", "c:become(echo CONFIG {1})",
|
|
669
|
+
"--bind", "A:become(echo DISCOVER_TOGGLE)",
|
|
670
|
+
"--bind", "U:become(echo UPDATE_ALL)",
|
|
671
|
+
"--bind", "B:become(echo BRANCH_ALL)",
|
|
672
|
+
] + get_fzf_color_args()
|
|
673
|
+
|
|
674
|
+
# Add --listen for background refresh support
|
|
675
|
+
if fzf_port:
|
|
676
|
+
fzf_cmd.extend(["--listen", str(fzf_port)])
|
|
677
|
+
|
|
678
|
+
# Jump to selected repo's position (calculate line number from menu)
|
|
679
|
+
if initial_selection:
|
|
680
|
+
# Find line number of selected repo in menu (it's in field 1, tab-separated)
|
|
681
|
+
lines = menu_input.split('\n')
|
|
682
|
+
target_line = None
|
|
683
|
+
search_for = initial_selection
|
|
684
|
+
# If selection was a LAYER: entry that may no longer exist (collapsed),
|
|
685
|
+
# fall back to finding the parent repo
|
|
686
|
+
parent_repo = None
|
|
687
|
+
if initial_selection.startswith("LAYER:"):
|
|
688
|
+
layer_path = initial_selection[6:]
|
|
689
|
+
# Find parent repo by checking which repo contains this layer
|
|
690
|
+
for repo in repo_layers:
|
|
691
|
+
if layer_path in repo_layers.get(repo, []):
|
|
692
|
+
parent_repo = repo
|
|
693
|
+
break
|
|
694
|
+
for i, line in enumerate(lines):
|
|
695
|
+
if line.startswith(search_for + '\t'):
|
|
696
|
+
target_line = i + 1 # 1-indexed for fzf pos()
|
|
697
|
+
break
|
|
698
|
+
# If LAYER: entry not found, try parent repo
|
|
699
|
+
if target_line is None and parent_repo:
|
|
700
|
+
for i, line in enumerate(lines):
|
|
701
|
+
if line.startswith(parent_repo + '\t'):
|
|
702
|
+
target_line = i + 1 # 1-indexed
|
|
703
|
+
break
|
|
704
|
+
if target_line:
|
|
705
|
+
fzf_cmd.extend(["--bind", f"load:pos({target_line})"])
|
|
706
|
+
|
|
707
|
+
result = subprocess.run(
|
|
708
|
+
fzf_cmd,
|
|
709
|
+
input=menu_input,
|
|
710
|
+
stdout=subprocess.PIPE,
|
|
711
|
+
text=True,
|
|
712
|
+
)
|
|
713
|
+
except FileNotFoundError:
|
|
714
|
+
print("fzf not found. Please install fzf for the explore command.")
|
|
715
|
+
return "cancelled", None
|
|
716
|
+
finally:
|
|
717
|
+
# Cleanup background refresh resources
|
|
718
|
+
if stop_event:
|
|
719
|
+
stop_event.set()
|
|
720
|
+
if temp_file:
|
|
721
|
+
try:
|
|
722
|
+
os.unlink(temp_file)
|
|
723
|
+
except Exception:
|
|
724
|
+
pass
|
|
725
|
+
|
|
726
|
+
if result.returncode == 130:
|
|
727
|
+
# Ctrl+C pressed - return special action for double-tap detection
|
|
728
|
+
return "interrupt", None
|
|
729
|
+
if result.returncode != 0 or not result.stdout.strip():
|
|
730
|
+
return "cancelled", None
|
|
731
|
+
|
|
732
|
+
output = result.stdout.strip()
|
|
733
|
+
|
|
734
|
+
if output == "QUIT":
|
|
735
|
+
return "quit", None
|
|
736
|
+
elif output == "REFRESH_ALL":
|
|
737
|
+
return "refresh_all", None
|
|
738
|
+
elif output.startswith("REFRESH "):
|
|
739
|
+
repo_path = output[8:].strip()
|
|
740
|
+
if repo_path == "HEADER" or repo_path.startswith("SEPARATOR"):
|
|
741
|
+
return "cancelled", None
|
|
742
|
+
return "refresh", repo_path
|
|
743
|
+
elif output == "VERBOSE":
|
|
744
|
+
return "verbose", None
|
|
745
|
+
elif output.startswith("REBASE "):
|
|
746
|
+
repo_path = output[7:].strip()
|
|
747
|
+
if repo_path == "HEADER" or repo_path.startswith("SEPARATOR"):
|
|
748
|
+
return "cancelled", None
|
|
749
|
+
return "rebase", repo_path
|
|
750
|
+
elif output.startswith("MERGE "):
|
|
751
|
+
repo_path = output[6:].strip()
|
|
752
|
+
if repo_path == "HEADER" or repo_path.startswith("SEPARATOR"):
|
|
753
|
+
return "cancelled", None
|
|
754
|
+
return "merge", repo_path
|
|
755
|
+
elif output.startswith("STATUS "):
|
|
756
|
+
repo_path = output[7:].strip()
|
|
757
|
+
if repo_path == "HEADER" or repo_path.startswith("SEPARATOR"):
|
|
758
|
+
return "cancelled", None
|
|
759
|
+
return "status", repo_path
|
|
760
|
+
elif output.startswith("TIG "):
|
|
761
|
+
repo_path = output[4:].strip()
|
|
762
|
+
if repo_path == "HEADER" or repo_path.startswith("SEPARATOR"):
|
|
763
|
+
return "cancelled", None
|
|
764
|
+
return "tig", repo_path
|
|
765
|
+
elif output.startswith("ADD_TRACKED "):
|
|
766
|
+
repo_path = output[12:].strip()
|
|
767
|
+
if repo_path == "HEADER" or repo_path.startswith("SEPARATOR"):
|
|
768
|
+
return "cancelled", None
|
|
769
|
+
# Handle LAYER: prefix for layers view
|
|
770
|
+
if repo_path.startswith("LAYER:"):
|
|
771
|
+
layer_path = repo_path[6:]
|
|
772
|
+
# Find the git repo for this layer
|
|
773
|
+
try:
|
|
774
|
+
repo_path = subprocess.check_output(
|
|
775
|
+
["git", "-C", layer_path, "rev-parse", "--show-toplevel"],
|
|
776
|
+
text=True, stderr=subprocess.DEVNULL
|
|
777
|
+
).strip()
|
|
778
|
+
except subprocess.CalledProcessError:
|
|
779
|
+
repo_path = layer_path # Fallback to layer path
|
|
780
|
+
return "add_tracked", repo_path
|
|
781
|
+
elif output.startswith("HIDE "):
|
|
782
|
+
repo_path = output[5:].strip()
|
|
783
|
+
if repo_path == "HEADER" or repo_path.startswith("SEPARATOR"):
|
|
784
|
+
return "cancelled", None
|
|
785
|
+
return "hide", repo_path
|
|
786
|
+
elif output == "TOGGLE_HIDDEN":
|
|
787
|
+
return "toggle_hidden", None
|
|
788
|
+
elif output.startswith("ADD_LAYER "):
|
|
789
|
+
repo_path = output[10:].strip()
|
|
790
|
+
if repo_path == "HEADER" or repo_path.startswith("SEPARATOR"):
|
|
791
|
+
return "cancelled", None
|
|
792
|
+
# Handle LAYER: prefix for expanded layer entries
|
|
793
|
+
if repo_path.startswith("LAYER:"):
|
|
794
|
+
return "add_layer", repo_path[6:] # Strip LAYER: prefix
|
|
795
|
+
return "add_layer", repo_path
|
|
796
|
+
elif output.startswith("REMOVE_LAYER "):
|
|
797
|
+
repo_path = output[13:].strip()
|
|
798
|
+
if repo_path == "HEADER" or repo_path.startswith("SEPARATOR"):
|
|
799
|
+
return "cancelled", None
|
|
800
|
+
# Handle LAYER: prefix for expanded layer entries
|
|
801
|
+
if repo_path.startswith("LAYER:"):
|
|
802
|
+
return "remove_layer", repo_path[6:] # Strip LAYER: prefix
|
|
803
|
+
return "remove_layer", repo_path
|
|
804
|
+
elif output.startswith("EXPAND "):
|
|
805
|
+
repo_path = output[7:].strip()
|
|
806
|
+
if repo_path == "HEADER" or repo_path.startswith("SEPARATOR"):
|
|
807
|
+
return "expand_all", repo_path # Expand/collapse all, preserve selection
|
|
808
|
+
# Handle LAYER: prefix - expand parent repo instead
|
|
809
|
+
if repo_path.startswith("LAYER:"):
|
|
810
|
+
return "expand_all", repo_path # On a layer, expand/collapse all, preserve selection
|
|
811
|
+
return "expand", repo_path
|
|
812
|
+
elif output == "LAYERS_TOGGLE":
|
|
813
|
+
return "layers_toggle", None
|
|
814
|
+
elif output == "DISCOVER_TOGGLE":
|
|
815
|
+
return "discover_toggle", None
|
|
816
|
+
elif output == "UPDATE_ALL":
|
|
817
|
+
return "update_all", None
|
|
818
|
+
elif output == "BRANCH_ALL":
|
|
819
|
+
return "branch_all", None
|
|
820
|
+
elif output.startswith("CONFIG "):
|
|
821
|
+
repo_path = output[7:].strip() # After "CONFIG "
|
|
822
|
+
if repo_path == "HEADER" or repo_path.startswith("SEPARATOR"):
|
|
823
|
+
return "cancelled", None
|
|
824
|
+
return "config", repo_path
|
|
825
|
+
|
|
826
|
+
# Default: explore (Enter or right arrow)
|
|
827
|
+
# Extract repo path (first field before tab), ignore header/separator lines
|
|
828
|
+
parts = output.split("\t", 1)
|
|
829
|
+
repo_path = parts[0] if parts else None
|
|
830
|
+
if repo_path == "HEADER" or repo_path.startswith("SEPARATOR"):
|
|
831
|
+
return "cancelled", None
|
|
832
|
+
# PROJECT entry: go to config instead of explore
|
|
833
|
+
if repo_path == "PROJECT":
|
|
834
|
+
return "config", "PROJECT"
|
|
835
|
+
# Handle LAYER: prefix from layers view - strip it to get layer path
|
|
836
|
+
if repo_path and repo_path.startswith("LAYER:"):
|
|
837
|
+
repo_path = repo_path[6:]
|
|
838
|
+
return "explore", repo_path
|
|
839
|
+
|
|
840
|
+
|
|
841
|
+
|
|
842
|
+
def fzf_explore_commits(
|
|
843
|
+
repo: str,
|
|
844
|
+
branch: str,
|
|
845
|
+
local_commits: List[Tuple[str, str]],
|
|
846
|
+
upstream_context: List[Tuple[str, str]],
|
|
847
|
+
upstream_to_pull: List[Tuple[str, str]],
|
|
848
|
+
base_ref: str,
|
|
849
|
+
) -> Tuple[str, List[str]]:
|
|
850
|
+
"""
|
|
851
|
+
Browse commits with preview pane and optional range selection for export.
|
|
852
|
+
Returns: (action, commit_hashes)
|
|
853
|
+
action: "back", "quit", "copy", "export"
|
|
854
|
+
commit_hashes: list of commit hashes (for export may be multiple)
|
|
855
|
+
"""
|
|
856
|
+
display_name = repo_display_name(repo)
|
|
857
|
+
local_count = len(local_commits)
|
|
858
|
+
# Get actual upstream count (not limited by display cap)
|
|
859
|
+
try:
|
|
860
|
+
out = subprocess.check_output(
|
|
861
|
+
["git", "-C", repo, "rev-list", "--count", f"HEAD..origin/{branch}"],
|
|
862
|
+
text=True,
|
|
863
|
+
stderr=subprocess.DEVNULL,
|
|
864
|
+
)
|
|
865
|
+
upstream_count = int(out.strip())
|
|
866
|
+
except (subprocess.CalledProcessError, ValueError):
|
|
867
|
+
upstream_count = len(upstream_to_pull) # Fallback to displayed count
|
|
868
|
+
|
|
869
|
+
# Build hash list for range filling and export (local + upstream)
|
|
870
|
+
all_hashes = [h for h, _ in local_commits]
|
|
871
|
+
all_upstream_hashes = [h for h, _ in upstream_to_pull] + [h for h, _ in upstream_context]
|
|
872
|
+
|
|
873
|
+
# Build display: local commits + separator + upstream to pull + context
|
|
874
|
+
# (reversed order because fzf shows last input at top)
|
|
875
|
+
menu_lines = []
|
|
876
|
+
|
|
877
|
+
# Upstream context commits (will appear at very bottom)
|
|
878
|
+
if upstream_context:
|
|
879
|
+
for hash_val, subject in reversed(upstream_context):
|
|
880
|
+
menu_lines.append(f"{hash_val[:12]}\t {Colors.yellow(hash_val[:12])} {subject[:70]}")
|
|
881
|
+
|
|
882
|
+
# Separator with base_ref (merge base between local and upstream)
|
|
883
|
+
separator = f"──────────── {base_ref} ────────────"
|
|
884
|
+
menu_lines.append(f"---\t{separator}")
|
|
885
|
+
|
|
886
|
+
# Upstream commits to pull section
|
|
887
|
+
if upstream_to_pull:
|
|
888
|
+
for hash_val, subject in reversed(upstream_to_pull):
|
|
889
|
+
menu_lines.append(f"{hash_val[:12]}\t {Colors.cyan(hash_val[:12])} {subject[:70]}")
|
|
890
|
+
menu_lines.append(f"---\t{'─' * 50}")
|
|
891
|
+
upstream_header = f"UPSTREAM ({upstream_count} to pull from origin/{branch})"
|
|
892
|
+
menu_lines.append(f"---\t{Colors.cyan(upstream_header)}")
|
|
893
|
+
menu_lines.append(f"---\t{'─' * 50}")
|
|
894
|
+
|
|
895
|
+
# Local commits header
|
|
896
|
+
local_header = f"LOCAL ({local_count} ahead of origin/{branch})"
|
|
897
|
+
menu_lines.append(f"---\t{Colors.bold(local_header)}")
|
|
898
|
+
menu_lines.append(f"---\t{'─' * 50}")
|
|
899
|
+
|
|
900
|
+
# Local commits (oldest first in input, so newest appears at top after fzf reversal)
|
|
901
|
+
for hash_val, subject in local_commits:
|
|
902
|
+
menu_lines.append(f"{hash_val[:12]}\t {hash_val[:12]} {subject[:70]}")
|
|
903
|
+
|
|
904
|
+
menu_input = "\n".join(menu_lines)
|
|
905
|
+
|
|
906
|
+
# Temp files for state
|
|
907
|
+
diff_file = f"/tmp/fzf_explore_diff_{os.getpid()}"
|
|
908
|
+
range_file = f"/tmp/fzf_explore_range_{os.getpid()}"
|
|
909
|
+
|
|
910
|
+
# Preview command - check for diff mode toggle
|
|
911
|
+
preview_cmd = (
|
|
912
|
+
f'hash={{1}}; '
|
|
913
|
+
f'if [ "$hash" = "---" ]; then echo "Header line"; exit 0; fi; '
|
|
914
|
+
f'if [ -f {diff_file} ]; then '
|
|
915
|
+
f'git -C {repo} show --color=always $hash 2>/dev/null || echo "No commit selected"; '
|
|
916
|
+
f'else '
|
|
917
|
+
f'git -C {repo} show --stat --color=always $hash 2>/dev/null || echo "No commit selected"; '
|
|
918
|
+
f'fi'
|
|
919
|
+
)
|
|
920
|
+
|
|
921
|
+
# Dynamic prompt showing selection count
|
|
922
|
+
prompt_script = (
|
|
923
|
+
f'sel=0; rng=0; '
|
|
924
|
+
f'[ -f {range_file} ] && rng=$(wc -l < {range_file}); '
|
|
925
|
+
f'if [ $rng -gt 0 ]; then echo "Browse [range:$rng]: "; else echo "Browse: "; fi'
|
|
926
|
+
)
|
|
927
|
+
|
|
928
|
+
header = f"{Colors.bold(display_name)} ({local_count} local"
|
|
929
|
+
if upstream_count > 0:
|
|
930
|
+
header += f", {Colors.cyan(f'{upstream_count} to pull')}"
|
|
931
|
+
header += f") on {Colors.bold(branch)}\n"
|
|
932
|
+
header += "Tab=mark | Space=range | ?=preview | d=diff | c=copy | e=export | t=history | ←/b=back | q=quit"
|
|
933
|
+
|
|
934
|
+
try:
|
|
935
|
+
result = subprocess.run(
|
|
936
|
+
[
|
|
937
|
+
"fzf",
|
|
938
|
+
"--multi",
|
|
939
|
+
"--no-sort",
|
|
940
|
+
"--ansi",
|
|
941
|
+
"--height", "100%",
|
|
942
|
+
"--header", header,
|
|
943
|
+
"--prompt", "Browse: ",
|
|
944
|
+
"--with-nth", "2..", # Display from field 2 onwards (hide hash field)
|
|
945
|
+
"--delimiter", "\t",
|
|
946
|
+
"--preview", preview_cmd,
|
|
947
|
+
"--preview-window", get_preview_window_arg("50%"),
|
|
948
|
+
"--bind", "?:toggle-preview",
|
|
949
|
+
"--bind", f"d:execute-silent(if [ -f {diff_file} ]; then rm {diff_file}; else touch {diff_file}; fi)+refresh-preview",
|
|
950
|
+
"--bind", "ctrl-d:preview-half-page-down",
|
|
951
|
+
"--bind", "ctrl-u:preview-half-page-up",
|
|
952
|
+
"--bind", "page-down:preview-page-down",
|
|
953
|
+
"--bind", "page-up:preview-page-up",
|
|
954
|
+
"--bind", "tab:toggle",
|
|
955
|
+
"--bind", f"space:toggle+execute-silent(echo {{1}} >> {range_file})+transform-prompt({prompt_script})",
|
|
956
|
+
"--bind", "esc:become(echo BACK)",
|
|
957
|
+
"--bind", "b:become(echo BACK)",
|
|
958
|
+
"--bind", "left:become(echo BACK)",
|
|
959
|
+
"--bind", "q:become(echo QUIT)",
|
|
960
|
+
"--bind", "c:become(echo COPY {1})",
|
|
961
|
+
"--bind", "e:become(echo EXPORT {+1})", # {+1} = selected or current if none
|
|
962
|
+
"--bind", "t:become(echo TIG {1})",
|
|
963
|
+
] + get_fzf_preview_resize_bindings() + get_fzf_color_args(),
|
|
964
|
+
input=menu_input,
|
|
965
|
+
stdout=subprocess.PIPE,
|
|
966
|
+
text=True,
|
|
967
|
+
)
|
|
968
|
+
except FileNotFoundError:
|
|
969
|
+
return "back", []
|
|
970
|
+
|
|
971
|
+
# Read range markers before cleanup
|
|
972
|
+
range_markers = []
|
|
973
|
+
if os.path.exists(range_file):
|
|
974
|
+
with open(range_file) as f:
|
|
975
|
+
range_markers = [line.strip() for line in f if line.strip() and line.strip() != "---"]
|
|
976
|
+
|
|
977
|
+
# Clean up temp files
|
|
978
|
+
for f in [diff_file, range_file]:
|
|
979
|
+
if os.path.exists(f):
|
|
980
|
+
os.remove(f)
|
|
981
|
+
|
|
982
|
+
if result.returncode != 0 or not result.stdout.strip():
|
|
983
|
+
return "back", []
|
|
984
|
+
|
|
985
|
+
output = result.stdout.strip()
|
|
986
|
+
|
|
987
|
+
if output == "BACK":
|
|
988
|
+
return "back", []
|
|
989
|
+
elif output == "QUIT":
|
|
990
|
+
return "quit", []
|
|
991
|
+
elif output.startswith("COPY "):
|
|
992
|
+
commit = output[5:].strip()
|
|
993
|
+
return "copy", [commit] if commit and commit != "---" else []
|
|
994
|
+
elif output.startswith("EXPORT "):
|
|
995
|
+
# May have multiple hashes separated by space
|
|
996
|
+
hashes_str = output[7:].strip()
|
|
997
|
+
hashes = [h for h in hashes_str.split() if h and h != "---"]
|
|
998
|
+
|
|
999
|
+
# If we have range markers, fill in the range
|
|
1000
|
+
if len(range_markers) >= 2:
|
|
1001
|
+
first_marker = range_markers[0]
|
|
1002
|
+
last_marker = range_markers[-1]
|
|
1003
|
+
try:
|
|
1004
|
+
idx1 = next(i for i, h in enumerate(all_hashes) if h.startswith(first_marker))
|
|
1005
|
+
idx2 = next(i for i, h in enumerate(all_hashes) if h.startswith(last_marker))
|
|
1006
|
+
start_idx, end_idx = min(idx1, idx2), max(idx1, idx2)
|
|
1007
|
+
range_hashes = all_hashes[start_idx:end_idx + 1]
|
|
1008
|
+
# Merge with individually selected
|
|
1009
|
+
all_selected = set(hashes) | set(range_hashes)
|
|
1010
|
+
# Return in original order (oldest first)
|
|
1011
|
+
hashes = [h for h in all_hashes if h in all_selected or any(h.startswith(sel) for sel in all_selected)]
|
|
1012
|
+
except StopIteration:
|
|
1013
|
+
pass
|
|
1014
|
+
|
|
1015
|
+
# Filter to full hashes (check local and upstream)
|
|
1016
|
+
full_hashes = []
|
|
1017
|
+
combined_hashes = all_hashes + all_upstream_hashes
|
|
1018
|
+
for h in hashes:
|
|
1019
|
+
found = False
|
|
1020
|
+
for full_h in combined_hashes:
|
|
1021
|
+
if full_h.startswith(h):
|
|
1022
|
+
if full_h not in full_hashes:
|
|
1023
|
+
full_hashes.append(full_h)
|
|
1024
|
+
found = True
|
|
1025
|
+
break
|
|
1026
|
+
# If not found in our lists, it might be a full hash already
|
|
1027
|
+
if not found and len(h) >= 7:
|
|
1028
|
+
full_hashes.append(h)
|
|
1029
|
+
|
|
1030
|
+
return "export", full_hashes
|
|
1031
|
+
elif output.startswith("TIG "):
|
|
1032
|
+
commit = output[4:].strip()
|
|
1033
|
+
if commit and commit != "---":
|
|
1034
|
+
return "tig", [commit]
|
|
1035
|
+
return "tig", []
|
|
1036
|
+
|
|
1037
|
+
return "back", []
|
|
1038
|
+
|
|
1039
|
+
|
|
1040
|
+
|
|
1041
|
+
def text_select_insertion_point(
|
|
1042
|
+
repo: str,
|
|
1043
|
+
branch: str,
|
|
1044
|
+
base_ref: str,
|
|
1045
|
+
remaining_commits: List[Tuple[str, str]],
|
|
1046
|
+
) -> Optional[str]:
|
|
1047
|
+
"""Text-based insertion point selection."""
|
|
1048
|
+
display_name = repo_display_name(repo)
|
|
1049
|
+
|
|
1050
|
+
# Get upstream commits for context
|
|
1051
|
+
upstream_commits = get_upstream_context_commits(repo, base_ref)
|
|
1052
|
+
|
|
1053
|
+
print(f"\nSelect insertion point for {Colors.bold(display_name)}:")
|
|
1054
|
+
print("(upstream commits will be placed AFTER the selected point)")
|
|
1055
|
+
print()
|
|
1056
|
+
|
|
1057
|
+
# Local commits newest first (reversed), with index numbers
|
|
1058
|
+
if remaining_commits:
|
|
1059
|
+
for idx, (hash_val, subject) in enumerate(reversed(remaining_commits), 1):
|
|
1060
|
+
print(f" {idx}. {hash_val[:12]} {subject[:60]}")
|
|
1061
|
+
|
|
1062
|
+
# Separator and default
|
|
1063
|
+
print(" ────────────────────────────────────────")
|
|
1064
|
+
print(f" 0. {base_ref} (default - insert here)")
|
|
1065
|
+
|
|
1066
|
+
# Upstream context
|
|
1067
|
+
if upstream_commits:
|
|
1068
|
+
for hash_val, subject in upstream_commits:
|
|
1069
|
+
print(f" {hash_val[:12]} {subject[:60]}")
|
|
1070
|
+
|
|
1071
|
+
print()
|
|
1072
|
+
|
|
1073
|
+
try:
|
|
1074
|
+
choice = input("Selection [0]: ").strip()
|
|
1075
|
+
except EOFError:
|
|
1076
|
+
return None
|
|
1077
|
+
|
|
1078
|
+
if not choice or choice == "0":
|
|
1079
|
+
return base_ref
|
|
1080
|
+
|
|
1081
|
+
try:
|
|
1082
|
+
idx = int(choice)
|
|
1083
|
+
if 1 <= idx <= len(remaining_commits):
|
|
1084
|
+
# Convert from display order (newest first) to storage order (oldest first)
|
|
1085
|
+
real_idx = len(remaining_commits) - idx
|
|
1086
|
+
return remaining_commits[real_idx][0]
|
|
1087
|
+
except ValueError:
|
|
1088
|
+
pass
|
|
1089
|
+
|
|
1090
|
+
return base_ref # Default on invalid input
|
|
1091
|
+
|
|
1092
|
+
|
|
1093
|
+
|
|
1094
|
+
def text_multiselect_commits(
|
|
1095
|
+
repo: str,
|
|
1096
|
+
branch: str,
|
|
1097
|
+
commits: List[Tuple[str, str]]
|
|
1098
|
+
) -> Optional[Tuple[List[str], str]]:
|
|
1099
|
+
"""Text-based commit selection fallback."""
|
|
1100
|
+
if not commits:
|
|
1101
|
+
return ([], "skip")
|
|
1102
|
+
|
|
1103
|
+
display_name = repo_display_name(repo)
|
|
1104
|
+
print(f"\n{Colors.bold(display_name)} on {Colors.bold(branch)}")
|
|
1105
|
+
print(f"Local commits ({len(commits)}):")
|
|
1106
|
+
|
|
1107
|
+
# Display newest-first with indices (user sees 1=newest)
|
|
1108
|
+
for idx, (hash_val, subject) in enumerate(reversed(commits), 1):
|
|
1109
|
+
print(f" {idx:3}. {hash_val[:12]} {subject[:60]}")
|
|
1110
|
+
|
|
1111
|
+
print()
|
|
1112
|
+
print("Enter commit numbers for UPSTREAM (comma-separated, e.g., 1,3,5)")
|
|
1113
|
+
print(" 'a' = all, 's' = skip repo, 'S' = skip remaining repos, 'q' = quit")
|
|
1114
|
+
|
|
1115
|
+
try:
|
|
1116
|
+
choice = input("Selection: ").strip()
|
|
1117
|
+
except EOFError:
|
|
1118
|
+
return None
|
|
1119
|
+
|
|
1120
|
+
if not choice or choice.lower() == 's':
|
|
1121
|
+
return ([], "skip")
|
|
1122
|
+
if choice == 'S':
|
|
1123
|
+
return ([], "skip_rest")
|
|
1124
|
+
if choice.lower() == 'q':
|
|
1125
|
+
return None
|
|
1126
|
+
if choice.lower() == 'a':
|
|
1127
|
+
return ([h for h, _ in commits], "selected")
|
|
1128
|
+
|
|
1129
|
+
# Parse indices
|
|
1130
|
+
try:
|
|
1131
|
+
indices = [int(x.strip()) for x in choice.split(',') if x.strip()]
|
|
1132
|
+
# Convert to 0-based, accounting for reversed display
|
|
1133
|
+
n = len(commits)
|
|
1134
|
+
selected_hashes = []
|
|
1135
|
+
for idx in indices:
|
|
1136
|
+
if 1 <= idx <= n:
|
|
1137
|
+
# idx 1 = newest = commits[n-1], idx n = oldest = commits[0]
|
|
1138
|
+
real_idx = n - idx
|
|
1139
|
+
selected_hashes.append(commits[real_idx][0])
|
|
1140
|
+
# Return in original order (oldest first)
|
|
1141
|
+
selected_ordered = [h for h, _ in commits if h in selected_hashes]
|
|
1142
|
+
return (selected_ordered, "selected")
|
|
1143
|
+
except ValueError:
|
|
1144
|
+
print("Invalid input, skipping repo.")
|
|
1145
|
+
return ([], "skip")
|
|
1146
|
+
|
|
1147
|
+
|
|
1148
|
+
|
|
1149
|
+
def reorder_commits_via_cherrypick(
|
|
1150
|
+
repo: str,
|
|
1151
|
+
branch: str,
|
|
1152
|
+
base_ref: str,
|
|
1153
|
+
selected_commits: List[str],
|
|
1154
|
+
remaining_commits: List[str],
|
|
1155
|
+
commits_info: List[Tuple[str, str]], # (hash, subject) for display
|
|
1156
|
+
insertion_point: str = None, # Where to insert selected commits (default: base_ref)
|
|
1157
|
+
backup_branch: Optional[str] = None,
|
|
1158
|
+
dry_run: bool = False
|
|
1159
|
+
) -> Tuple[bool, str, Optional[str]]:
|
|
1160
|
+
"""
|
|
1161
|
+
Reorder commits using cherry-pick approach.
|
|
1162
|
+
If insertion_point is base_ref: base_ref -> selected -> remaining
|
|
1163
|
+
If insertion_point is a commit: commits up to insertion_point -> selected -> rest
|
|
1164
|
+
Returns (success, message, cut_point_hash).
|
|
1165
|
+
cut_point_hash is the commit hash at the end of selected commits (for PR branch creation).
|
|
1166
|
+
"""
|
|
1167
|
+
display_name = repo_display_name(repo)
|
|
1168
|
+
|
|
1169
|
+
if insertion_point is None:
|
|
1170
|
+
insertion_point = base_ref
|
|
1171
|
+
|
|
1172
|
+
# Build lookup for commit subjects
|
|
1173
|
+
subject_map = {h: s for h, s in commits_info}
|
|
1174
|
+
|
|
1175
|
+
# Determine final commit order
|
|
1176
|
+
if insertion_point == base_ref:
|
|
1177
|
+
all_commits = selected_commits + remaining_commits
|
|
1178
|
+
insert_desc = base_ref
|
|
1179
|
+
cut_point_idx = len(selected_commits)
|
|
1180
|
+
else:
|
|
1181
|
+
# Find where to split remaining_commits
|
|
1182
|
+
insertion_idx = None
|
|
1183
|
+
for i, h in enumerate(remaining_commits):
|
|
1184
|
+
if h == insertion_point:
|
|
1185
|
+
insertion_idx = i
|
|
1186
|
+
break
|
|
1187
|
+
if insertion_idx is not None:
|
|
1188
|
+
before = remaining_commits[:insertion_idx + 1]
|
|
1189
|
+
after = remaining_commits[insertion_idx + 1:]
|
|
1190
|
+
all_commits = before + selected_commits + after
|
|
1191
|
+
insert_desc = f"after {insertion_point[:12]}"
|
|
1192
|
+
cut_point_idx = len(before) + len(selected_commits)
|
|
1193
|
+
else:
|
|
1194
|
+
all_commits = selected_commits + remaining_commits
|
|
1195
|
+
insert_desc = base_ref
|
|
1196
|
+
cut_point_idx = len(selected_commits)
|
|
1197
|
+
|
|
1198
|
+
# Get current commit order from base_ref to HEAD
|
|
1199
|
+
try:
|
|
1200
|
+
current_commits = subprocess.check_output(
|
|
1201
|
+
["git", "-C", repo, "rev-list", "--reverse", f"{base_ref}..HEAD"],
|
|
1202
|
+
text=True,
|
|
1203
|
+
).strip().splitlines()
|
|
1204
|
+
except subprocess.CalledProcessError:
|
|
1205
|
+
current_commits = []
|
|
1206
|
+
|
|
1207
|
+
# Check if commits are already in the correct order
|
|
1208
|
+
if current_commits == all_commits:
|
|
1209
|
+
# Already in correct order - compute cut point and return
|
|
1210
|
+
if cut_point_idx > 0 and cut_point_idx <= len(current_commits):
|
|
1211
|
+
cut_point = current_commits[cut_point_idx - 1]
|
|
1212
|
+
else:
|
|
1213
|
+
cut_point = None
|
|
1214
|
+
msg = f"Already in order: {len(selected_commits)} upstream + {len(remaining_commits)} local"
|
|
1215
|
+
return True, msg, cut_point
|
|
1216
|
+
|
|
1217
|
+
# Dry run mode - show what would happen
|
|
1218
|
+
if dry_run:
|
|
1219
|
+
# Get upstream commits for context
|
|
1220
|
+
upstream_context = get_upstream_context_commits(repo, base_ref, count=3)
|
|
1221
|
+
|
|
1222
|
+
print(f"\n Selected for upstream ({len(selected_commits)}):")
|
|
1223
|
+
# Show selected commits newest-first
|
|
1224
|
+
for h in reversed(selected_commits):
|
|
1225
|
+
print(f" {h[:12]} {subject_map.get(h, '')[:60]}")
|
|
1226
|
+
|
|
1227
|
+
print(f" ────────────────────────────────────────")
|
|
1228
|
+
print(f" {base_ref}")
|
|
1229
|
+
|
|
1230
|
+
# Show upstream context
|
|
1231
|
+
if upstream_context:
|
|
1232
|
+
for h, subj in upstream_context:
|
|
1233
|
+
print(f" {h[:12]} {subj[:60]}")
|
|
1234
|
+
|
|
1235
|
+
msg = f"Would reorder {len(selected_commits)} upstream + {len(remaining_commits)} local"
|
|
1236
|
+
return True, msg, None # No cut point in dry run
|
|
1237
|
+
|
|
1238
|
+
# Record original HEAD for recovery
|
|
1239
|
+
try:
|
|
1240
|
+
original_head = subprocess.check_output(
|
|
1241
|
+
["git", "-C", repo, "rev-parse", "HEAD"],
|
|
1242
|
+
text=True,
|
|
1243
|
+
).strip()
|
|
1244
|
+
except subprocess.CalledProcessError:
|
|
1245
|
+
return False, "Failed to get current HEAD", None
|
|
1246
|
+
|
|
1247
|
+
# Create backup branch if requested
|
|
1248
|
+
if backup_branch:
|
|
1249
|
+
result = subprocess.run(
|
|
1250
|
+
["git", "-C", repo, "branch", backup_branch, "HEAD"],
|
|
1251
|
+
stdout=subprocess.DEVNULL,
|
|
1252
|
+
stderr=subprocess.DEVNULL,
|
|
1253
|
+
)
|
|
1254
|
+
if result.returncode != 0:
|
|
1255
|
+
return False, f"Failed to create backup branch '{backup_branch}'", None
|
|
1256
|
+
print(f" Created backup: {backup_branch}")
|
|
1257
|
+
|
|
1258
|
+
# Create temp branch at base_ref
|
|
1259
|
+
temp_branch = f"_prepare-export-temp-{uuid.uuid4().hex[:8]}"
|
|
1260
|
+
|
|
1261
|
+
try:
|
|
1262
|
+
# Create and checkout temp branch at base_ref
|
|
1263
|
+
subprocess.run(
|
|
1264
|
+
["git", "-C", repo, "checkout", "-b", temp_branch, base_ref],
|
|
1265
|
+
check=True,
|
|
1266
|
+
stdout=subprocess.DEVNULL,
|
|
1267
|
+
stderr=subprocess.DEVNULL,
|
|
1268
|
+
)
|
|
1269
|
+
except subprocess.CalledProcessError:
|
|
1270
|
+
return False, f"Failed to create temp branch at {base_ref}", None
|
|
1271
|
+
|
|
1272
|
+
# Cherry-pick all commits in the computed order
|
|
1273
|
+
# (cut_point_idx already computed above)
|
|
1274
|
+
cherry_pick_failed = False
|
|
1275
|
+
failed_commit = None
|
|
1276
|
+
cut_point = None
|
|
1277
|
+
|
|
1278
|
+
for i, commit in enumerate(all_commits):
|
|
1279
|
+
result = subprocess.run(
|
|
1280
|
+
["git", "-C", repo, "cherry-pick", commit],
|
|
1281
|
+
stdout=subprocess.DEVNULL,
|
|
1282
|
+
stderr=subprocess.DEVNULL,
|
|
1283
|
+
)
|
|
1284
|
+
if result.returncode != 0:
|
|
1285
|
+
cherry_pick_failed = True
|
|
1286
|
+
failed_commit = commit[:12]
|
|
1287
|
+
break
|
|
1288
|
+
|
|
1289
|
+
# Capture cut point (HEAD after last selected commit)
|
|
1290
|
+
if i + 1 == cut_point_idx:
|
|
1291
|
+
cut_point = subprocess.check_output(
|
|
1292
|
+
["git", "-C", repo, "rev-parse", "HEAD"],
|
|
1293
|
+
text=True,
|
|
1294
|
+
).strip()
|
|
1295
|
+
|
|
1296
|
+
if cherry_pick_failed:
|
|
1297
|
+
# Abort cherry-pick and restore
|
|
1298
|
+
subprocess.run(
|
|
1299
|
+
["git", "-C", repo, "cherry-pick", "--abort"],
|
|
1300
|
+
stdout=subprocess.DEVNULL,
|
|
1301
|
+
stderr=subprocess.DEVNULL,
|
|
1302
|
+
)
|
|
1303
|
+
subprocess.run(
|
|
1304
|
+
["git", "-C", repo, "checkout", branch],
|
|
1305
|
+
stdout=subprocess.DEVNULL,
|
|
1306
|
+
stderr=subprocess.DEVNULL,
|
|
1307
|
+
)
|
|
1308
|
+
subprocess.run(
|
|
1309
|
+
["git", "-C", repo, "branch", "-D", temp_branch],
|
|
1310
|
+
stdout=subprocess.DEVNULL,
|
|
1311
|
+
stderr=subprocess.DEVNULL,
|
|
1312
|
+
)
|
|
1313
|
+
return False, f"Cherry-pick failed on {failed_commit}, original branch restored", None
|
|
1314
|
+
|
|
1315
|
+
# Get new HEAD
|
|
1316
|
+
new_head = subprocess.check_output(
|
|
1317
|
+
["git", "-C", repo, "rev-parse", "HEAD"],
|
|
1318
|
+
text=True,
|
|
1319
|
+
).strip()
|
|
1320
|
+
|
|
1321
|
+
# Switch back to original branch and reset to new HEAD
|
|
1322
|
+
subprocess.run(
|
|
1323
|
+
["git", "-C", repo, "checkout", branch],
|
|
1324
|
+
check=True,
|
|
1325
|
+
stdout=subprocess.DEVNULL,
|
|
1326
|
+
stderr=subprocess.DEVNULL,
|
|
1327
|
+
)
|
|
1328
|
+
subprocess.run(
|
|
1329
|
+
["git", "-C", repo, "reset", "--hard", new_head],
|
|
1330
|
+
check=True,
|
|
1331
|
+
stdout=subprocess.DEVNULL,
|
|
1332
|
+
stderr=subprocess.DEVNULL,
|
|
1333
|
+
)
|
|
1334
|
+
|
|
1335
|
+
# Cleanup temp branch
|
|
1336
|
+
subprocess.run(
|
|
1337
|
+
["git", "-C", repo, "branch", "-D", temp_branch],
|
|
1338
|
+
stdout=subprocess.DEVNULL,
|
|
1339
|
+
stderr=subprocess.DEVNULL,
|
|
1340
|
+
)
|
|
1341
|
+
|
|
1342
|
+
return True, f"Reordered: {len(selected_commits)} upstream + {len(remaining_commits)} local", cut_point
|
|
1343
|
+
|
|
1344
|
+
|
|
1345
|
+
|
|
1346
|
+
def run_explore(args) -> int:
|
|
1347
|
+
"""Main entry point for explore subcommand."""
|
|
1348
|
+
bblayers_path = resolve_bblayers_path(args.bblayers)
|
|
1349
|
+
defaults = load_defaults(args.defaults_file)
|
|
1350
|
+
discover_all = getattr(args, 'all', False)
|
|
1351
|
+
pairs, repo_sets = resolve_base_and_layers(
|
|
1352
|
+
bblayers_path, defaults, include_external=discover_all, discover_all=discover_all
|
|
1353
|
+
)
|
|
1354
|
+
# Filter out hidden repos
|
|
1355
|
+
repos = dedupe_preserve_order(
|
|
1356
|
+
repo for _, repo in pairs if repo not in repo_sets.hidden
|
|
1357
|
+
)
|
|
1358
|
+
|
|
1359
|
+
# Add external repos to the list (they're not in pairs since they're not layers)
|
|
1360
|
+
for ext_repo in repo_sets.external:
|
|
1361
|
+
if ext_repo not in repos and ext_repo not in repo_sets.hidden:
|
|
1362
|
+
repos.append(ext_repo)
|
|
1363
|
+
|
|
1364
|
+
# For backward compat with UI functions that expect simple sets
|
|
1365
|
+
discovered_repos = repo_sets.discovered
|
|
1366
|
+
external_repos = repo_sets.external
|
|
1367
|
+
configured_layers = repo_sets.configured_layers
|
|
1368
|
+
|
|
1369
|
+
if not repos and not external_repos:
|
|
1370
|
+
print("No repos found.")
|
|
1371
|
+
return 1
|
|
1372
|
+
|
|
1373
|
+
# Build layer mapping for repos (needed for status output)
|
|
1374
|
+
repo_layers: Dict[str, List[str]] = {}
|
|
1375
|
+
for layer, repo in pairs:
|
|
1376
|
+
repo_layers.setdefault(repo, []).append(layer)
|
|
1377
|
+
|
|
1378
|
+
# Optionally fetch first
|
|
1379
|
+
if getattr(args, 'refresh', False) or getattr(args, 'fetch', False):
|
|
1380
|
+
print("Fetching from origin...")
|
|
1381
|
+
for repo in repos:
|
|
1382
|
+
if defaults.get(repo, "rebase") != "skip":
|
|
1383
|
+
fetch_repo(repo)
|
|
1384
|
+
print()
|
|
1385
|
+
|
|
1386
|
+
# Pre-collect repo info for display
|
|
1387
|
+
verbose = getattr(args, 'verbose', 0)
|
|
1388
|
+
max_commits = getattr(args, 'max_commits', 10)
|
|
1389
|
+
did_fetch = getattr(args, 'refresh', False) or getattr(args, 'fetch', False)
|
|
1390
|
+
status_mode = getattr(args, 'status', False)
|
|
1391
|
+
|
|
1392
|
+
repo_info: Dict[str, Dict] = {}
|
|
1393
|
+
for repo in repos:
|
|
1394
|
+
default_action = defaults.get(repo, "rebase")
|
|
1395
|
+
branch = current_branch(repo)
|
|
1396
|
+
if not branch:
|
|
1397
|
+
branch = "(detached)"
|
|
1398
|
+
|
|
1399
|
+
# Get local commits
|
|
1400
|
+
commits = []
|
|
1401
|
+
base_ref = None
|
|
1402
|
+
upstream_count = 0
|
|
1403
|
+
if branch != "(detached)" and default_action != "skip":
|
|
1404
|
+
try:
|
|
1405
|
+
commits_list, base_ref = get_local_commits(repo, branch)
|
|
1406
|
+
commits = commits_list if commits_list else []
|
|
1407
|
+
# Get upstream count:
|
|
1408
|
+
# - If we fetched: use local refs (accurate count)
|
|
1409
|
+
# - If --status mode: do ls-remote (user expects to wait for accurate info)
|
|
1410
|
+
# - Otherwise: skip for fast startup (user can press r/R to refresh)
|
|
1411
|
+
if did_fetch:
|
|
1412
|
+
upstream = get_upstream_commits(repo, branch)
|
|
1413
|
+
upstream_count = len(upstream)
|
|
1414
|
+
elif status_mode:
|
|
1415
|
+
ls_result = get_upstream_count_ls_remote(repo, branch)
|
|
1416
|
+
if ls_result == -1:
|
|
1417
|
+
upstream_count = -1 # indicates "has changes" but unknown count
|
|
1418
|
+
elif ls_result and ls_result > 0:
|
|
1419
|
+
upstream_count = ls_result
|
|
1420
|
+
# else: leave upstream_count = 0 (unknown) for fast interactive startup
|
|
1421
|
+
except Exception:
|
|
1422
|
+
pass
|
|
1423
|
+
|
|
1424
|
+
repo_info[repo] = {
|
|
1425
|
+
"display_name": repo_display_name(repo),
|
|
1426
|
+
"local_count": len(commits),
|
|
1427
|
+
"commits": commits,
|
|
1428
|
+
"branch": branch,
|
|
1429
|
+
"base_ref": base_ref,
|
|
1430
|
+
"upstream_count": upstream_count,
|
|
1431
|
+
"is_dirty": not repo_is_clean(repo),
|
|
1432
|
+
"default_action": default_action,
|
|
1433
|
+
}
|
|
1434
|
+
|
|
1435
|
+
# Handle --status mode: print text output instead of fzf
|
|
1436
|
+
if getattr(args, 'status', False):
|
|
1437
|
+
def format_layers(layer_paths: List[str]) -> str:
|
|
1438
|
+
if len(layer_paths) == 1:
|
|
1439
|
+
return f" layer: {layer_display_name(layer_paths[0])}"
|
|
1440
|
+
lines = [" layers:"]
|
|
1441
|
+
for lp in layer_paths:
|
|
1442
|
+
lines.append(f" {layer_display_name(lp)}")
|
|
1443
|
+
return "\n".join(lines)
|
|
1444
|
+
|
|
1445
|
+
for repo in repos:
|
|
1446
|
+
info = repo_info[repo]
|
|
1447
|
+
layers = repo_layers.get(repo, [])
|
|
1448
|
+
display_name = info["display_name"]
|
|
1449
|
+
branch = info["branch"]
|
|
1450
|
+
local_count = info["local_count"]
|
|
1451
|
+
upstream_count = info["upstream_count"]
|
|
1452
|
+
is_dirty = info["is_dirty"]
|
|
1453
|
+
default_action = info["default_action"]
|
|
1454
|
+
commits = info["commits"]
|
|
1455
|
+
|
|
1456
|
+
# Format worktree status with color
|
|
1457
|
+
if is_dirty:
|
|
1458
|
+
worktree_status = terminal_color("dirty", "[DIRTY]")
|
|
1459
|
+
else:
|
|
1460
|
+
worktree_status = terminal_color("clean", "[clean]")
|
|
1461
|
+
|
|
1462
|
+
if default_action == "skip":
|
|
1463
|
+
if verbose == 0:
|
|
1464
|
+
layer_names = ", ".join(layer_display_name(lp) for lp in layers)
|
|
1465
|
+
print(f"→ {layer_names}: default=skip")
|
|
1466
|
+
else:
|
|
1467
|
+
print(f"→ {repo}: default=skip (skipping status)")
|
|
1468
|
+
print(format_layers(layers))
|
|
1469
|
+
continue
|
|
1470
|
+
|
|
1471
|
+
if branch == "(detached)":
|
|
1472
|
+
if verbose == 0:
|
|
1473
|
+
layer_names = ", ".join(layer_display_name(lp) for lp in layers)
|
|
1474
|
+
print(f"→ {layer_names}: detached HEAD {worktree_status}")
|
|
1475
|
+
else:
|
|
1476
|
+
print(f"→ {repo}: detached HEAD or no branch {worktree_status}; skipping")
|
|
1477
|
+
print(format_layers(layers))
|
|
1478
|
+
continue
|
|
1479
|
+
|
|
1480
|
+
if verbose == 0:
|
|
1481
|
+
# Summary mode (default)
|
|
1482
|
+
layer_names = ", ".join(layer_display_name(lp) for lp in layers)
|
|
1483
|
+
status_parts = []
|
|
1484
|
+
if local_count:
|
|
1485
|
+
status_parts.append(f"{local_count} local commit(s)")
|
|
1486
|
+
if upstream_count == -1:
|
|
1487
|
+
status_parts.append(terminal_color("upstream", "upstream has changes"))
|
|
1488
|
+
elif upstream_count > 0:
|
|
1489
|
+
status_parts.append(terminal_color("upstream", f"{upstream_count} to pull"))
|
|
1490
|
+
if not status_parts:
|
|
1491
|
+
status_parts.append("up-to-date")
|
|
1492
|
+
print(f"→ {layer_names}: {', '.join(status_parts)} on {Colors.bold(branch)} {worktree_status}")
|
|
1493
|
+
else:
|
|
1494
|
+
# Detailed mode (verbose >= 1)
|
|
1495
|
+
status_parts = []
|
|
1496
|
+
if local_count:
|
|
1497
|
+
status_parts.append(f"{local_count} local commit(s)")
|
|
1498
|
+
if upstream_count == -1:
|
|
1499
|
+
status_parts.append(terminal_color("upstream", "upstream has changes"))
|
|
1500
|
+
elif upstream_count > 0:
|
|
1501
|
+
status_parts.append(terminal_color("upstream", f"{upstream_count} upstream commit(s) to pull"))
|
|
1502
|
+
if not status_parts:
|
|
1503
|
+
status_parts.append("up-to-date")
|
|
1504
|
+
|
|
1505
|
+
repo_display = Colors.green(repo) if not is_dirty else repo
|
|
1506
|
+
branch_display = Colors.bold(branch)
|
|
1507
|
+
print(f"→ {repo_display}: {', '.join(status_parts)} on {branch_display} {worktree_status}")
|
|
1508
|
+
print(format_layers(layers))
|
|
1509
|
+
|
|
1510
|
+
# Show local commits
|
|
1511
|
+
show_all = verbose >= 2
|
|
1512
|
+
if commits:
|
|
1513
|
+
print(" local:")
|
|
1514
|
+
limit = len(commits) if show_all else max_commits
|
|
1515
|
+
for hash_val, subject in commits[:limit]:
|
|
1516
|
+
print(f" {hash_val[:12]} {subject}")
|
|
1517
|
+
if not show_all and len(commits) > max_commits:
|
|
1518
|
+
print(f" ... ({len(commits) - max_commits} more)")
|
|
1519
|
+
|
|
1520
|
+
# Show upstream count hint
|
|
1521
|
+
if upstream_count > 0 and did_fetch:
|
|
1522
|
+
print(f" {terminal_color('upstream', f'upstream: {upstream_count} commit(s) to pull')}")
|
|
1523
|
+
|
|
1524
|
+
return 0
|
|
1525
|
+
|
|
1526
|
+
# fzf mode requires fzf
|
|
1527
|
+
if not fzf_available():
|
|
1528
|
+
print("fzf is required for the explore command. Please install fzf.")
|
|
1529
|
+
return 1
|
|
1530
|
+
|
|
1531
|
+
# If specific repo given, jump directly to it
|
|
1532
|
+
if args.repo:
|
|
1533
|
+
repo = find_repo_by_identifier(repos, args.repo, defaults)
|
|
1534
|
+
if not repo:
|
|
1535
|
+
print(f"Repo not found: {args.repo}")
|
|
1536
|
+
return 1
|
|
1537
|
+
|
|
1538
|
+
# Browse single repo until back/quit
|
|
1539
|
+
while True:
|
|
1540
|
+
branch = current_branch(repo) or "(detached)"
|
|
1541
|
+
commits, base_ref = get_local_commits(repo, branch)
|
|
1542
|
+
if not commits:
|
|
1543
|
+
commits = []
|
|
1544
|
+
upstream_context = get_upstream_context_commits(repo, base_ref, args.upstream_count) if base_ref else []
|
|
1545
|
+
upstream_to_pull = get_upstream_to_pull(repo, branch, count=500) if branch != "(detached)" else []
|
|
1546
|
+
|
|
1547
|
+
action, selected_hashes = fzf_explore_commits(
|
|
1548
|
+
repo, branch, commits, upstream_context, upstream_to_pull, base_ref or "HEAD"
|
|
1549
|
+
)
|
|
1550
|
+
|
|
1551
|
+
if action == "quit" or action == "back":
|
|
1552
|
+
break
|
|
1553
|
+
elif action == "copy" and selected_hashes:
|
|
1554
|
+
commit = selected_hashes[0]
|
|
1555
|
+
if copy_to_clipboard(commit):
|
|
1556
|
+
print(f"Copied: {commit}")
|
|
1557
|
+
else:
|
|
1558
|
+
print(f"Clipboard not available. Hash: {commit}")
|
|
1559
|
+
elif action == "export" and selected_hashes:
|
|
1560
|
+
export_commits_from_explore(repo, selected_hashes)
|
|
1561
|
+
elif action == "tig":
|
|
1562
|
+
viewer = get_preferred_git_viewer()
|
|
1563
|
+
if not viewer:
|
|
1564
|
+
print(f"\n{Colors.yellow('No git history viewer found.')}")
|
|
1565
|
+
print("Install one of: tig, lazygit, or gitk")
|
|
1566
|
+
print(" apt install tig (or brew install tig)")
|
|
1567
|
+
input("Press Enter to continue...")
|
|
1568
|
+
else:
|
|
1569
|
+
# Launch the preferred viewer
|
|
1570
|
+
if selected_hashes and viewer == "tig":
|
|
1571
|
+
subprocess.run(["tig", "show", selected_hashes[0]], cwd=repo)
|
|
1572
|
+
elif selected_hashes and viewer == "lazygit":
|
|
1573
|
+
subprocess.run(["lazygit"], cwd=repo)
|
|
1574
|
+
elif selected_hashes and viewer == "gitk":
|
|
1575
|
+
subprocess.run(["gitk", selected_hashes[0]], cwd=repo)
|
|
1576
|
+
else:
|
|
1577
|
+
subprocess.run([viewer], cwd=repo)
|
|
1578
|
+
|
|
1579
|
+
return 0
|
|
1580
|
+
|
|
1581
|
+
# Two-level navigation loop
|
|
1582
|
+
explore_verbose_mode = False
|
|
1583
|
+
explore_layers_mode = False
|
|
1584
|
+
expanded_repos: Set[str] = set()
|
|
1585
|
+
first_iteration = True
|
|
1586
|
+
next_selection: Optional[str] = None # Track repo to select after expand/collapse
|
|
1587
|
+
last_interrupt_time: float = 0 # Track Ctrl+C for double-tap exit
|
|
1588
|
+
while True:
|
|
1589
|
+
# Level 1: Repo/Layers list
|
|
1590
|
+
# Enable background upstream check on first iteration only
|
|
1591
|
+
list_action, selected_repo = fzf_explore_repo_list(
|
|
1592
|
+
repos, repo_info, repo_layers, explore_verbose_mode,
|
|
1593
|
+
defaults=defaults,
|
|
1594
|
+
enable_background_refresh=first_iteration,
|
|
1595
|
+
discovered_repos=discovered_repos,
|
|
1596
|
+
external_repos=external_repos,
|
|
1597
|
+
expanded_repos=expanded_repos,
|
|
1598
|
+
layers_mode=explore_layers_mode,
|
|
1599
|
+
configured_layers=configured_layers,
|
|
1600
|
+
initial_selection=next_selection,
|
|
1601
|
+
bblayers_conf=bblayers_path,
|
|
1602
|
+
)
|
|
1603
|
+
first_iteration = False
|
|
1604
|
+
next_selection = None # Clear after use
|
|
1605
|
+
|
|
1606
|
+
if list_action == "quit":
|
|
1607
|
+
break
|
|
1608
|
+
elif list_action == "interrupt":
|
|
1609
|
+
# Double Ctrl+C to exit
|
|
1610
|
+
now = time.time()
|
|
1611
|
+
if now - last_interrupt_time < 1.0:
|
|
1612
|
+
break # Exit on double Ctrl+C
|
|
1613
|
+
last_interrupt_time = now
|
|
1614
|
+
print("\nPress Ctrl+C again to exit")
|
|
1615
|
+
continue
|
|
1616
|
+
elif list_action == "cancelled":
|
|
1617
|
+
continue
|
|
1618
|
+
elif list_action in ("rebase", "merge") and selected_repo:
|
|
1619
|
+
branch = current_branch(selected_repo)
|
|
1620
|
+
display_name = repo_display_name(selected_repo)
|
|
1621
|
+
if list_action == "rebase":
|
|
1622
|
+
cmd_desc = f"git pull --rebase origin/{branch}"
|
|
1623
|
+
else:
|
|
1624
|
+
cmd_desc = f"git pull origin/{branch}"
|
|
1625
|
+
try:
|
|
1626
|
+
confirm = input(f"\n→ {display_name}: {cmd_desc} [y/N] ").strip().lower()
|
|
1627
|
+
except (EOFError, KeyboardInterrupt):
|
|
1628
|
+
print()
|
|
1629
|
+
continue
|
|
1630
|
+
if confirm != "y":
|
|
1631
|
+
print(" Cancelled.")
|
|
1632
|
+
continue
|
|
1633
|
+
run_single_repo_update(selected_repo, branch, list_action)
|
|
1634
|
+
print()
|
|
1635
|
+
# Refresh repo info after update
|
|
1636
|
+
try:
|
|
1637
|
+
commits, _ = get_local_commits(selected_repo, branch)
|
|
1638
|
+
repo_info[selected_repo]["local_count"] = len(commits) if commits else 0
|
|
1639
|
+
except Exception:
|
|
1640
|
+
pass
|
|
1641
|
+
continue
|
|
1642
|
+
elif list_action == "refresh" and selected_repo:
|
|
1643
|
+
# Skip special entries
|
|
1644
|
+
if selected_repo in ("HEADER", "PROJECT", "SETTINGS") or selected_repo.startswith("SEPARATOR"):
|
|
1645
|
+
continue
|
|
1646
|
+
# Fetch from single repo and refresh its info
|
|
1647
|
+
display_name = repo_display_name(selected_repo)
|
|
1648
|
+
print(f"\nRefreshing {display_name}...", end=" ", flush=True)
|
|
1649
|
+
try:
|
|
1650
|
+
subprocess.run(
|
|
1651
|
+
["git", "-C", selected_repo, "fetch", "--quiet"],
|
|
1652
|
+
check=True,
|
|
1653
|
+
stderr=subprocess.DEVNULL,
|
|
1654
|
+
timeout=30,
|
|
1655
|
+
)
|
|
1656
|
+
print("done")
|
|
1657
|
+
except subprocess.TimeoutExpired:
|
|
1658
|
+
print("timeout")
|
|
1659
|
+
except subprocess.CalledProcessError:
|
|
1660
|
+
print("failed")
|
|
1661
|
+
# Refresh this repo's info
|
|
1662
|
+
branch = current_branch(selected_repo) or "(detached)"
|
|
1663
|
+
try:
|
|
1664
|
+
commits, _ = get_local_commits(selected_repo, branch)
|
|
1665
|
+
repo_info[selected_repo]["local_count"] = len(commits) if commits else 0
|
|
1666
|
+
upstream = get_upstream_commits(selected_repo, branch)
|
|
1667
|
+
repo_info[selected_repo]["upstream_count"] = len(upstream)
|
|
1668
|
+
except Exception:
|
|
1669
|
+
pass
|
|
1670
|
+
repo_info[selected_repo]["is_dirty"] = not repo_is_clean(selected_repo)
|
|
1671
|
+
continue
|
|
1672
|
+
elif list_action == "refresh_all":
|
|
1673
|
+
# Fetch from all repos and refresh info
|
|
1674
|
+
print("\nRefreshing from origin...")
|
|
1675
|
+
for repo in repos:
|
|
1676
|
+
display_name = repo_display_name(repo)
|
|
1677
|
+
try:
|
|
1678
|
+
subprocess.run(
|
|
1679
|
+
["git", "-C", repo, "fetch", "--quiet"],
|
|
1680
|
+
check=True,
|
|
1681
|
+
stderr=subprocess.DEVNULL,
|
|
1682
|
+
)
|
|
1683
|
+
print(f" {display_name}: fetched")
|
|
1684
|
+
except subprocess.CalledProcessError:
|
|
1685
|
+
print(f" {display_name}: fetch failed")
|
|
1686
|
+
# Rebuild repo info
|
|
1687
|
+
for repo in repos:
|
|
1688
|
+
branch = current_branch(repo) or "(detached)"
|
|
1689
|
+
try:
|
|
1690
|
+
commits, _ = get_local_commits(repo, branch)
|
|
1691
|
+
repo_info[repo]["local_count"] = len(commits) if commits else 0
|
|
1692
|
+
upstream = get_upstream_commits(repo, branch)
|
|
1693
|
+
repo_info[repo]["upstream_count"] = len(upstream)
|
|
1694
|
+
except Exception:
|
|
1695
|
+
pass
|
|
1696
|
+
repo_info[repo]["is_dirty"] = not repo_is_clean(repo)
|
|
1697
|
+
print()
|
|
1698
|
+
continue
|
|
1699
|
+
elif list_action == "verbose":
|
|
1700
|
+
# Toggle verbose mode (show layer names instead of display names)
|
|
1701
|
+
explore_verbose_mode = not explore_verbose_mode
|
|
1702
|
+
continue
|
|
1703
|
+
elif list_action == "update_all":
|
|
1704
|
+
# Run update flow for all repos (same as 'bit update')
|
|
1705
|
+
print()
|
|
1706
|
+
for repo in repos:
|
|
1707
|
+
default_action = defaults.get(repo, "rebase")
|
|
1708
|
+
branch = current_branch(repo)
|
|
1709
|
+
action = prompt_action(repo, branch, default_action, use_fzf=True)
|
|
1710
|
+
if action is None:
|
|
1711
|
+
continue
|
|
1712
|
+
op, target, new_default = action
|
|
1713
|
+
|
|
1714
|
+
if new_default:
|
|
1715
|
+
defaults[repo] = new_default
|
|
1716
|
+
save_defaults(args.defaults_file, defaults)
|
|
1717
|
+
|
|
1718
|
+
if op == "quit":
|
|
1719
|
+
print("Aborting update on user request.")
|
|
1720
|
+
break
|
|
1721
|
+
if op == "skip":
|
|
1722
|
+
continue
|
|
1723
|
+
|
|
1724
|
+
if op == "custom":
|
|
1725
|
+
print(f"Custom command in {repo}")
|
|
1726
|
+
run_cmd(repo, target, dry_run=False, shell=True)
|
|
1727
|
+
else:
|
|
1728
|
+
remote_ref = f"origin/{target}"
|
|
1729
|
+
verb = "pull --rebase" if op == "rebase" else "pull"
|
|
1730
|
+
print(f"Updating {repo}: git {verb} {remote_ref}")
|
|
1731
|
+
try:
|
|
1732
|
+
if op == "rebase":
|
|
1733
|
+
run_cmd(repo, ["git", "pull", "--rebase", "origin", target], dry_run=False)
|
|
1734
|
+
elif op == "merge":
|
|
1735
|
+
run_cmd(repo, ["git", "pull", "origin", target], dry_run=False)
|
|
1736
|
+
except subprocess.CalledProcessError as exc:
|
|
1737
|
+
print(f"Update failed: {exc}")
|
|
1738
|
+
|
|
1739
|
+
# Update repo_info after update
|
|
1740
|
+
info = repo_info.get(repo, {})
|
|
1741
|
+
commits, base = get_local_commits(repo, branch)
|
|
1742
|
+
info["local_count"] = len(commits) if commits else 0
|
|
1743
|
+
info["commits"] = commits or []
|
|
1744
|
+
info["base_ref"] = base
|
|
1745
|
+
info["is_dirty"] = not repo_is_clean(repo)
|
|
1746
|
+
info["upstream_count"] = 0
|
|
1747
|
+
repo_info[repo] = info
|
|
1748
|
+
|
|
1749
|
+
print()
|
|
1750
|
+
continue
|
|
1751
|
+
elif list_action == "branch_all":
|
|
1752
|
+
# Jump to branch menu (same as bit branch)
|
|
1753
|
+
fzf_branch_repos(repos)
|
|
1754
|
+
# Update repo_info with new branch info
|
|
1755
|
+
for repo in repos:
|
|
1756
|
+
info = repo_info.get(repo, {})
|
|
1757
|
+
info["branch"] = current_branch(repo) or "(detached)"
|
|
1758
|
+
repo_info[repo] = info
|
|
1759
|
+
continue
|
|
1760
|
+
elif list_action == "layers_toggle":
|
|
1761
|
+
# Toggle between repos view and layers view
|
|
1762
|
+
explore_layers_mode = not explore_layers_mode
|
|
1763
|
+
continue
|
|
1764
|
+
elif list_action == "discover_toggle":
|
|
1765
|
+
# Toggle discovery mode and reload layers
|
|
1766
|
+
discover_all = not discover_all
|
|
1767
|
+
pairs, repo_sets = resolve_base_and_layers(
|
|
1768
|
+
bblayers_path, defaults, include_external=discover_all, discover_all=discover_all
|
|
1769
|
+
)
|
|
1770
|
+
repos = dedupe_preserve_order(repo for _, repo in pairs)
|
|
1771
|
+
discovered_repos = repo_sets.discovered
|
|
1772
|
+
external_repos = repo_sets.external
|
|
1773
|
+
configured_layers = repo_sets.configured_layers
|
|
1774
|
+
# Rebuild repo_layers
|
|
1775
|
+
repo_layers = {}
|
|
1776
|
+
for layer, repo in pairs:
|
|
1777
|
+
repo_layers.setdefault(repo, []).append(layer)
|
|
1778
|
+
# Rebuild repo_info for new repos
|
|
1779
|
+
for repo in repos:
|
|
1780
|
+
if repo not in repo_info:
|
|
1781
|
+
branch = current_branch(repo) or "(detached)"
|
|
1782
|
+
commits, _ = get_local_commits(repo, branch)
|
|
1783
|
+
repo_info[repo] = {
|
|
1784
|
+
"display_name": repo_display_name(repo),
|
|
1785
|
+
"branch": branch,
|
|
1786
|
+
"local_count": len(commits) if commits else 0,
|
|
1787
|
+
"is_dirty": not repo_is_clean(repo),
|
|
1788
|
+
"upstream_count": 0,
|
|
1789
|
+
}
|
|
1790
|
+
continue
|
|
1791
|
+
elif list_action == "config" and selected_repo:
|
|
1792
|
+
# Skip special entries (but PROJECT is handled specially below)
|
|
1793
|
+
if selected_repo in ("HEADER", "SETTINGS") or selected_repo.startswith("SEPARATOR"):
|
|
1794
|
+
continue
|
|
1795
|
+
# Show config menu for selected repo (or project config if PROJECT)
|
|
1796
|
+
if selected_repo == "PROJECT":
|
|
1797
|
+
fzf_build_config(args.bblayers)
|
|
1798
|
+
# Reload layer configuration after returning (bblayers.conf may have been edited)
|
|
1799
|
+
pairs, repo_sets = resolve_base_and_layers(
|
|
1800
|
+
bblayers_path, defaults, include_external=discover_all, discover_all=discover_all
|
|
1801
|
+
)
|
|
1802
|
+
repos = dedupe_preserve_order(repo for _, repo in pairs)
|
|
1803
|
+
discovered_repos = repo_sets.discovered
|
|
1804
|
+
external_repos = repo_sets.external
|
|
1805
|
+
configured_layers = repo_sets.configured_layers
|
|
1806
|
+
# Rebuild repo_layers
|
|
1807
|
+
repo_layers = {}
|
|
1808
|
+
for layer, repo in pairs:
|
|
1809
|
+
repo_layers.setdefault(repo, []).append(layer)
|
|
1810
|
+
# Rebuild repo_info
|
|
1811
|
+
for repo in repos:
|
|
1812
|
+
if repo not in repo_info:
|
|
1813
|
+
branch = current_branch(repo) or "(detached)"
|
|
1814
|
+
commits, _ = get_local_commits(repo, branch)
|
|
1815
|
+
repo_info[repo] = {
|
|
1816
|
+
"display_name": repo_display_name(repo),
|
|
1817
|
+
"branch": branch,
|
|
1818
|
+
"local_count": len(commits) if commits else 0,
|
|
1819
|
+
"is_dirty": not repo_is_clean(repo),
|
|
1820
|
+
"upstream_count": 0,
|
|
1821
|
+
}
|
|
1822
|
+
else:
|
|
1823
|
+
fzf_repo_config(selected_repo, defaults, args.defaults_file)
|
|
1824
|
+
continue
|
|
1825
|
+
elif list_action == "expand" and selected_repo:
|
|
1826
|
+
# Skip special entries
|
|
1827
|
+
if selected_repo in ("HEADER", "PROJECT", "SETTINGS") or selected_repo.startswith("SEPARATOR"):
|
|
1828
|
+
continue
|
|
1829
|
+
# Toggle expansion of repo to show layers
|
|
1830
|
+
if selected_repo in expanded_repos:
|
|
1831
|
+
expanded_repos.discard(selected_repo)
|
|
1832
|
+
else:
|
|
1833
|
+
# Only expand if repo has multiple layers
|
|
1834
|
+
layers = repo_layers.get(selected_repo, [])
|
|
1835
|
+
if len(layers) > 1:
|
|
1836
|
+
expanded_repos.add(selected_repo)
|
|
1837
|
+
# Preserve position at the expanded/collapsed repo
|
|
1838
|
+
next_selection = selected_repo
|
|
1839
|
+
continue
|
|
1840
|
+
elif list_action == "expand_all":
|
|
1841
|
+
# Toggle expand/collapse all repos with multiple layers
|
|
1842
|
+
# If any are expanded, collapse all; otherwise expand all
|
|
1843
|
+
expandable_repos = [r for r in repos if len(repo_layers.get(r, [])) > 1]
|
|
1844
|
+
if expanded_repos:
|
|
1845
|
+
# Some expanded - collapse all
|
|
1846
|
+
expanded_repos.clear()
|
|
1847
|
+
else:
|
|
1848
|
+
# None expanded - expand all
|
|
1849
|
+
expanded_repos.update(expandable_repos)
|
|
1850
|
+
# Preserve exact selection position (including HEADER/SEPARATOR)
|
|
1851
|
+
if selected_repo:
|
|
1852
|
+
next_selection = selected_repo
|
|
1853
|
+
continue
|
|
1854
|
+
elif list_action == "status" and selected_repo:
|
|
1855
|
+
# Skip special entries
|
|
1856
|
+
if selected_repo in ("HEADER", "PROJECT", "SETTINGS") or selected_repo.startswith("SEPARATOR"):
|
|
1857
|
+
continue
|
|
1858
|
+
# Show status for this repo
|
|
1859
|
+
display_name = repo_display_name(selected_repo)
|
|
1860
|
+
branch = current_branch(selected_repo)
|
|
1861
|
+
print(f"\n→ {display_name} on {branch}")
|
|
1862
|
+
show_repo_status_detail(selected_repo, branch)
|
|
1863
|
+
print()
|
|
1864
|
+
input("Press Enter to continue...")
|
|
1865
|
+
continue
|
|
1866
|
+
elif list_action == "tig" and selected_repo:
|
|
1867
|
+
# Skip special entries
|
|
1868
|
+
if selected_repo in ("HEADER", "PROJECT", "SETTINGS") or selected_repo.startswith("SEPARATOR"):
|
|
1869
|
+
continue
|
|
1870
|
+
# Launch git history viewer for this repo
|
|
1871
|
+
viewer = get_preferred_git_viewer()
|
|
1872
|
+
if not viewer:
|
|
1873
|
+
print(f"\n{Colors.yellow('No git history viewer found.')}")
|
|
1874
|
+
print("Install one of: tig, lazygit, or gitk")
|
|
1875
|
+
print(" apt install tig (or brew install tig)")
|
|
1876
|
+
input("Press Enter to continue...")
|
|
1877
|
+
continue
|
|
1878
|
+
display_name = repo_display_name(selected_repo)
|
|
1879
|
+
print(f"\nLaunching {viewer} for {display_name}...")
|
|
1880
|
+
try:
|
|
1881
|
+
subprocess.run([viewer], cwd=selected_repo)
|
|
1882
|
+
except Exception as e:
|
|
1883
|
+
print(f"Error launching {viewer}: {e}")
|
|
1884
|
+
input("Press Enter to continue...")
|
|
1885
|
+
continue
|
|
1886
|
+
elif list_action == "add_tracked" and selected_repo:
|
|
1887
|
+
# Skip special entries
|
|
1888
|
+
if selected_repo in ("HEADER", "PROJECT", "SETTINGS") or selected_repo.startswith("SEPARATOR"):
|
|
1889
|
+
continue
|
|
1890
|
+
# Add repo to tracked list AND add layer to bblayers.conf
|
|
1891
|
+
# Works for external, discovered, or hidden repos
|
|
1892
|
+
hidden_repos = set(get_hidden_repos(defaults))
|
|
1893
|
+
is_hidden = selected_repo in hidden_repos
|
|
1894
|
+
is_external = selected_repo in external_repos
|
|
1895
|
+
is_discovered = selected_repo in discovered_repos
|
|
1896
|
+
|
|
1897
|
+
if is_external or is_discovered or is_hidden:
|
|
1898
|
+
# Add to extra repos (makes it permanently tracked)
|
|
1899
|
+
add_extra_repo(args.defaults_file, defaults, selected_repo)
|
|
1900
|
+
# Remove from external/discovered sets (now it's tracked)
|
|
1901
|
+
external_repos.discard(selected_repo)
|
|
1902
|
+
discovered_repos.discard(selected_repo)
|
|
1903
|
+
# If it was hidden, unhide it
|
|
1904
|
+
if is_hidden:
|
|
1905
|
+
remove_hidden_repo(args.defaults_file, defaults, selected_repo)
|
|
1906
|
+
|
|
1907
|
+
# Also add layer to bblayers.conf if it's a layer
|
|
1908
|
+
# Check if selected_repo is a layer or contains layers
|
|
1909
|
+
layers_to_check = []
|
|
1910
|
+
if os.path.isfile(os.path.join(selected_repo, "conf", "layer.conf")):
|
|
1911
|
+
layers_to_check = [selected_repo]
|
|
1912
|
+
else:
|
|
1913
|
+
layers_to_check = repo_layers.get(selected_repo, [])
|
|
1914
|
+
|
|
1915
|
+
# Filter to unconfigured layers only
|
|
1916
|
+
unconfigured = [l for l in layers_to_check if os.path.realpath(l) not in configured_layers]
|
|
1917
|
+
|
|
1918
|
+
if unconfigured:
|
|
1919
|
+
bblayers_conf = resolve_bblayers_path(args.bblayers)
|
|
1920
|
+
if bblayers_conf:
|
|
1921
|
+
# Build collection map
|
|
1922
|
+
all_layer_paths = []
|
|
1923
|
+
for layer_list in repo_layers.values():
|
|
1924
|
+
all_layer_paths.extend(layer_list)
|
|
1925
|
+
collection_map = build_layer_collection_map(all_layer_paths)
|
|
1926
|
+
|
|
1927
|
+
# Add layers (if single layer, add directly; if multiple, use fzf)
|
|
1928
|
+
if len(unconfigured) == 1:
|
|
1929
|
+
layer_to_add = unconfigured[0]
|
|
1930
|
+
layer_name = os.path.basename(layer_to_add)
|
|
1931
|
+
print(f"\nAdding {layer_name} to bblayers.conf...")
|
|
1932
|
+
success, message, added = add_layer_to_bblayers(
|
|
1933
|
+
layer_to_add, bblayers_conf, collection_map
|
|
1934
|
+
)
|
|
1935
|
+
print(f"{message}")
|
|
1936
|
+
if success and added:
|
|
1937
|
+
for added_layer in added:
|
|
1938
|
+
configured_layers.add(os.path.realpath(added_layer))
|
|
1939
|
+
else:
|
|
1940
|
+
# Multiple layers - prompt which to add
|
|
1941
|
+
print(f"\nMultiple unconfigured layers in {repo_display_name(selected_repo)}:")
|
|
1942
|
+
for i, layer in enumerate(unconfigured, 1):
|
|
1943
|
+
print(f" {i}. {os.path.basename(layer)}")
|
|
1944
|
+
print(" a. Add all")
|
|
1945
|
+
print(" s. Skip (just track repo)")
|
|
1946
|
+
try:
|
|
1947
|
+
choice = input("\nSelect (number/a/s): ").strip().lower()
|
|
1948
|
+
if choice == "a":
|
|
1949
|
+
for layer in unconfigured:
|
|
1950
|
+
layer_name = os.path.basename(layer)
|
|
1951
|
+
print(f"Adding {layer_name}...")
|
|
1952
|
+
success, message, added = add_layer_to_bblayers(
|
|
1953
|
+
layer, bblayers_conf, collection_map
|
|
1954
|
+
)
|
|
1955
|
+
if success and added:
|
|
1956
|
+
for added_layer in added:
|
|
1957
|
+
configured_layers.add(os.path.realpath(added_layer))
|
|
1958
|
+
else:
|
|
1959
|
+
print(f" {message}")
|
|
1960
|
+
elif choice and choice != "s":
|
|
1961
|
+
idx = int(choice) - 1
|
|
1962
|
+
if 0 <= idx < len(unconfigured):
|
|
1963
|
+
layer_to_add = unconfigured[idx]
|
|
1964
|
+
layer_name = os.path.basename(layer_to_add)
|
|
1965
|
+
print(f"Adding {layer_name}...")
|
|
1966
|
+
success, message, added = add_layer_to_bblayers(
|
|
1967
|
+
layer_to_add, bblayers_conf, collection_map
|
|
1968
|
+
)
|
|
1969
|
+
print(f"{message}")
|
|
1970
|
+
if success and added:
|
|
1971
|
+
for added_layer in added:
|
|
1972
|
+
configured_layers.add(os.path.realpath(added_layer))
|
|
1973
|
+
except (ValueError, EOFError, KeyboardInterrupt):
|
|
1974
|
+
pass
|
|
1975
|
+
continue
|
|
1976
|
+
elif list_action == "hide" and selected_repo:
|
|
1977
|
+
# Skip special entries
|
|
1978
|
+
if selected_repo in ("HEADER", "PROJECT", "SETTINGS") or selected_repo.startswith("SEPARATOR"):
|
|
1979
|
+
continue
|
|
1980
|
+
# Hide repo (silently - repo disappears from list)
|
|
1981
|
+
if selected_repo in repos:
|
|
1982
|
+
add_hidden_repo(args.defaults_file, defaults, selected_repo)
|
|
1983
|
+
repos.remove(selected_repo)
|
|
1984
|
+
repo_info.pop(selected_repo, None)
|
|
1985
|
+
continue
|
|
1986
|
+
elif list_action == "toggle_hidden":
|
|
1987
|
+
# Toggle showing hidden repos (silently - fzf will show the updated list)
|
|
1988
|
+
hidden_repos = set(get_hidden_repos(defaults))
|
|
1989
|
+
if hidden_repos:
|
|
1990
|
+
# Check if we're currently showing hidden repos
|
|
1991
|
+
showing_hidden = any(r in repos for r in hidden_repos)
|
|
1992
|
+
if showing_hidden:
|
|
1993
|
+
# Hide them again
|
|
1994
|
+
for repo in list(hidden_repos):
|
|
1995
|
+
if repo in repos:
|
|
1996
|
+
repos.remove(repo)
|
|
1997
|
+
repo_info.pop(repo, None)
|
|
1998
|
+
else:
|
|
1999
|
+
# Show them
|
|
2000
|
+
for repo in hidden_repos:
|
|
2001
|
+
if repo not in repos and os.path.isdir(repo):
|
|
2002
|
+
repos.append(repo)
|
|
2003
|
+
# Add repo_info for newly shown repo
|
|
2004
|
+
branch = current_branch(repo) or "(detached)"
|
|
2005
|
+
is_dirty = not repo_is_clean(repo)
|
|
2006
|
+
commits, _ = get_local_commits(repo, branch) if branch != "(detached)" else ([], None)
|
|
2007
|
+
repo_info[repo] = {
|
|
2008
|
+
"display_name": repo_display_name(repo),
|
|
2009
|
+
"branch": branch,
|
|
2010
|
+
"local_count": len(commits) if commits else 0,
|
|
2011
|
+
"is_dirty": is_dirty,
|
|
2012
|
+
"upstream_count": 0,
|
|
2013
|
+
}
|
|
2014
|
+
continue
|
|
2015
|
+
elif list_action == "add_layer" and selected_repo:
|
|
2016
|
+
# Skip special entries
|
|
2017
|
+
if selected_repo in ("HEADER", "PROJECT", "SETTINGS") or selected_repo.startswith("SEPARATOR"):
|
|
2018
|
+
continue
|
|
2019
|
+
# Add a layer to bblayers.conf using bitbake-layers
|
|
2020
|
+
# Check if selected_repo is actually a layer path (from expanded view)
|
|
2021
|
+
is_direct_layer = os.path.isfile(os.path.join(selected_repo, "conf", "layer.conf"))
|
|
2022
|
+
|
|
2023
|
+
if is_direct_layer:
|
|
2024
|
+
# Direct layer selection from expanded view
|
|
2025
|
+
layers_in_repo = [selected_repo]
|
|
2026
|
+
else:
|
|
2027
|
+
# Repo selection - get layers from it
|
|
2028
|
+
layers_in_repo = repo_layers.get(selected_repo, [])
|
|
2029
|
+
|
|
2030
|
+
if not layers_in_repo:
|
|
2031
|
+
print(f"\nNo layers found in {repo_display_name(selected_repo)}")
|
|
2032
|
+
input("Press Enter to continue...")
|
|
2033
|
+
continue
|
|
2034
|
+
|
|
2035
|
+
# Find bblayers.conf path
|
|
2036
|
+
bblayers_conf = resolve_bblayers_path(args.bblayers)
|
|
2037
|
+
if not bblayers_conf:
|
|
2038
|
+
print("\nCould not find bblayers.conf")
|
|
2039
|
+
input("Press Enter to continue...")
|
|
2040
|
+
continue
|
|
2041
|
+
|
|
2042
|
+
# Build collection map from all known layers
|
|
2043
|
+
all_layer_paths = []
|
|
2044
|
+
for layer_list in repo_layers.values():
|
|
2045
|
+
all_layer_paths.extend(layer_list)
|
|
2046
|
+
# Also add layers from other discovered repos
|
|
2047
|
+
for repo in discovered_repos | external_repos:
|
|
2048
|
+
if repo not in repo_layers:
|
|
2049
|
+
# Check if repo itself is a layer
|
|
2050
|
+
if os.path.isfile(os.path.join(repo, "conf", "layer.conf")):
|
|
2051
|
+
all_layer_paths.append(repo)
|
|
2052
|
+
# Discover all layers for collection map
|
|
2053
|
+
peer_dirs = {os.path.dirname(r) for r in repos if r}
|
|
2054
|
+
all_discovered = discover_layers(peer_dirs=peer_dirs)
|
|
2055
|
+
for layer_path, _ in all_discovered:
|
|
2056
|
+
if layer_path not in all_layer_paths:
|
|
2057
|
+
all_layer_paths.append(layer_path)
|
|
2058
|
+
|
|
2059
|
+
collection_map = build_layer_collection_map(all_layer_paths)
|
|
2060
|
+
|
|
2061
|
+
# Determine layer to add
|
|
2062
|
+
layer_to_add = None
|
|
2063
|
+
if len(layers_in_repo) == 1:
|
|
2064
|
+
layer_to_add = layers_in_repo[0]
|
|
2065
|
+
else:
|
|
2066
|
+
# Use fzf to select layer
|
|
2067
|
+
display_name = repo_display_name(selected_repo)
|
|
2068
|
+
menu_lines = []
|
|
2069
|
+
for layer in layers_in_repo:
|
|
2070
|
+
layer_name = os.path.basename(layer)
|
|
2071
|
+
menu_lines.append(f"{layer}\t{layer_name}")
|
|
2072
|
+
menu_input = "\n".join(menu_lines)
|
|
2073
|
+
|
|
2074
|
+
try:
|
|
2075
|
+
result = subprocess.run(
|
|
2076
|
+
[
|
|
2077
|
+
"fzf",
|
|
2078
|
+
"--no-multi",
|
|
2079
|
+
"--no-sort",
|
|
2080
|
+
"--no-info",
|
|
2081
|
+
"--ansi",
|
|
2082
|
+
"--height", "~50%",
|
|
2083
|
+
"--header", f"Select layer from {display_name} (Enter=select, Esc=cancel)",
|
|
2084
|
+
"--prompt", "Layer: ",
|
|
2085
|
+
"--with-nth", "2..",
|
|
2086
|
+
"--delimiter", "\t",
|
|
2087
|
+
] + get_fzf_color_args(),
|
|
2088
|
+
input=menu_input,
|
|
2089
|
+
stdout=subprocess.PIPE,
|
|
2090
|
+
text=True,
|
|
2091
|
+
)
|
|
2092
|
+
if result.returncode == 0 and result.stdout.strip():
|
|
2093
|
+
parts = result.stdout.strip().split("\t", 1)
|
|
2094
|
+
if parts:
|
|
2095
|
+
layer_to_add = parts[0]
|
|
2096
|
+
except FileNotFoundError:
|
|
2097
|
+
# Fallback to numbered menu if fzf not available
|
|
2098
|
+
print(f"\nLayers in {display_name}:")
|
|
2099
|
+
for i, layer in enumerate(layers_in_repo, 1):
|
|
2100
|
+
print(f" {i}. {os.path.basename(layer)}")
|
|
2101
|
+
try:
|
|
2102
|
+
choice = input("\nSelect layer number (or Enter to cancel): ").strip()
|
|
2103
|
+
if choice:
|
|
2104
|
+
idx = int(choice) - 1
|
|
2105
|
+
if 0 <= idx < len(layers_in_repo):
|
|
2106
|
+
layer_to_add = layers_in_repo[idx]
|
|
2107
|
+
except (ValueError, IndexError):
|
|
2108
|
+
pass
|
|
2109
|
+
|
|
2110
|
+
if not layer_to_add:
|
|
2111
|
+
# User cancelled layer selection - go back to list
|
|
2112
|
+
continue
|
|
2113
|
+
|
|
2114
|
+
layer_name = os.path.basename(layer_to_add)
|
|
2115
|
+
print(f"\nAdding {layer_name} to bblayers.conf...")
|
|
2116
|
+
success, message, added = add_layer_to_bblayers(
|
|
2117
|
+
layer_to_add, bblayers_conf, collection_map
|
|
2118
|
+
)
|
|
2119
|
+
print(f"{message}\n")
|
|
2120
|
+
if success and added:
|
|
2121
|
+
# Update configured_layers with newly added layers
|
|
2122
|
+
for added_layer in added:
|
|
2123
|
+
configured_layers.add(os.path.realpath(added_layer))
|
|
2124
|
+
# Also track the repo (add to __extra_repos__ if not already tracked)
|
|
2125
|
+
# Find the repo for this layer
|
|
2126
|
+
try:
|
|
2127
|
+
layer_repo = subprocess.check_output(
|
|
2128
|
+
["git", "-C", layer_to_add, "rev-parse", "--show-toplevel"],
|
|
2129
|
+
text=True, stderr=subprocess.DEVNULL
|
|
2130
|
+
).strip()
|
|
2131
|
+
if layer_repo in discovered_repos or layer_repo in external_repos:
|
|
2132
|
+
add_extra_repo(args.defaults_file, defaults, layer_repo)
|
|
2133
|
+
discovered_repos.discard(layer_repo)
|
|
2134
|
+
external_repos.discard(layer_repo)
|
|
2135
|
+
except subprocess.CalledProcessError:
|
|
2136
|
+
pass
|
|
2137
|
+
continue
|
|
2138
|
+
elif list_action == "remove_layer" and selected_repo:
|
|
2139
|
+
# Skip special entries
|
|
2140
|
+
if selected_repo in ("HEADER", "PROJECT", "SETTINGS") or selected_repo.startswith("SEPARATOR"):
|
|
2141
|
+
continue
|
|
2142
|
+
# Remove a layer from bblayers.conf
|
|
2143
|
+
# Determine which layer to remove
|
|
2144
|
+
layer_to_remove = None
|
|
2145
|
+
|
|
2146
|
+
# Check if selected_repo is actually a layer path
|
|
2147
|
+
if os.path.isfile(os.path.join(selected_repo, "conf", "layer.conf")):
|
|
2148
|
+
layer_to_remove = selected_repo
|
|
2149
|
+
else:
|
|
2150
|
+
# It's a repo - get configured layers in this repo
|
|
2151
|
+
repo_layer_list = repo_layers.get(selected_repo, [])
|
|
2152
|
+
configured_in_repo = [l for l in repo_layer_list if os.path.realpath(l) in configured_layers]
|
|
2153
|
+
|
|
2154
|
+
if not configured_in_repo:
|
|
2155
|
+
print(f"\nNo configured layers in this repo to remove")
|
|
2156
|
+
input("Press Enter to continue...")
|
|
2157
|
+
continue
|
|
2158
|
+
elif len(configured_in_repo) == 1:
|
|
2159
|
+
layer_to_remove = configured_in_repo[0]
|
|
2160
|
+
else:
|
|
2161
|
+
# Multiple layers - let user choose with fzf
|
|
2162
|
+
layer_names = [os.path.basename(l) for l in configured_in_repo]
|
|
2163
|
+
fzf_input = "\n".join(layer_names)
|
|
2164
|
+
try:
|
|
2165
|
+
result = subprocess.run(
|
|
2166
|
+
["fzf", "--height", "~30%", "--prompt", "Remove layer: ", "--header", "Select layer to remove (Esc to cancel)"] + get_fzf_color_args(),
|
|
2167
|
+
input=fzf_input,
|
|
2168
|
+
capture_output=True,
|
|
2169
|
+
text=True,
|
|
2170
|
+
)
|
|
2171
|
+
if result.returncode == 0 and result.stdout.strip():
|
|
2172
|
+
selected_name = result.stdout.strip()
|
|
2173
|
+
for l in configured_in_repo:
|
|
2174
|
+
if os.path.basename(l) == selected_name:
|
|
2175
|
+
layer_to_remove = l
|
|
2176
|
+
break
|
|
2177
|
+
except FileNotFoundError:
|
|
2178
|
+
pass
|
|
2179
|
+
|
|
2180
|
+
if not layer_to_remove:
|
|
2181
|
+
continue
|
|
2182
|
+
|
|
2183
|
+
# Check layer is actually configured
|
|
2184
|
+
layer_realpath = os.path.realpath(layer_to_remove)
|
|
2185
|
+
if layer_realpath not in configured_layers:
|
|
2186
|
+
print(f"\n{os.path.basename(layer_to_remove)} is not in bblayers.conf")
|
|
2187
|
+
input("Press Enter to continue...")
|
|
2188
|
+
continue
|
|
2189
|
+
|
|
2190
|
+
# Find bblayers.conf
|
|
2191
|
+
bblayers_conf = resolve_bblayers_path(args.bblayers)
|
|
2192
|
+
if not bblayers_conf:
|
|
2193
|
+
print("\nCould not find bblayers.conf")
|
|
2194
|
+
input("Press Enter to continue...")
|
|
2195
|
+
continue
|
|
2196
|
+
|
|
2197
|
+
layer_name = os.path.basename(layer_to_remove)
|
|
2198
|
+
print(f"\nRemoving {layer_name} from bblayers.conf...")
|
|
2199
|
+
success, message = remove_layer_from_bblayers(layer_to_remove, bblayers_conf)
|
|
2200
|
+
print(f"{message}\n")
|
|
2201
|
+
|
|
2202
|
+
if success:
|
|
2203
|
+
# Update configured_layers
|
|
2204
|
+
configured_layers.discard(layer_realpath)
|
|
2205
|
+
# If no layers remain configured in repo, mark as discovered
|
|
2206
|
+
repo_layer_list = repo_layers.get(selected_repo, [])
|
|
2207
|
+
if repo_layer_list and not any(os.path.realpath(l) in configured_layers for l in repo_layer_list):
|
|
2208
|
+
# Find the repo for this layer
|
|
2209
|
+
layer_repo = git_toplevel(layer_to_remove)
|
|
2210
|
+
if layer_repo:
|
|
2211
|
+
discovered_repos.add(layer_repo)
|
|
2212
|
+
continue
|
|
2213
|
+
elif list_action != "explore" or not selected_repo:
|
|
2214
|
+
continue
|
|
2215
|
+
|
|
2216
|
+
# If selected_repo is a layer path (from layers view), resolve to its git repo
|
|
2217
|
+
explore_repo = selected_repo
|
|
2218
|
+
if not os.path.isdir(os.path.join(selected_repo, ".git")):
|
|
2219
|
+
# Not a repo root - might be a layer, find parent repo
|
|
2220
|
+
repo_path = git_toplevel(selected_repo)
|
|
2221
|
+
if repo_path:
|
|
2222
|
+
explore_repo = repo_path
|
|
2223
|
+
|
|
2224
|
+
# Level 2: Commit browser (loop until back/quit)
|
|
2225
|
+
while True:
|
|
2226
|
+
branch = current_branch(explore_repo) or "(detached)"
|
|
2227
|
+
commits, base_ref = get_local_commits(explore_repo, branch)
|
|
2228
|
+
if not commits:
|
|
2229
|
+
commits = []
|
|
2230
|
+
upstream_context = get_upstream_context_commits(explore_repo, base_ref, args.upstream_count) if base_ref else []
|
|
2231
|
+
upstream_to_pull = get_upstream_to_pull(explore_repo, branch, count=500) if branch != "(detached)" else []
|
|
2232
|
+
|
|
2233
|
+
action, selected_hashes = fzf_explore_commits(
|
|
2234
|
+
explore_repo, branch, commits, upstream_context, upstream_to_pull, base_ref or "HEAD"
|
|
2235
|
+
)
|
|
2236
|
+
|
|
2237
|
+
if action == "back":
|
|
2238
|
+
break # Return to repo list
|
|
2239
|
+
elif action == "quit":
|
|
2240
|
+
return 0 # Exit entirely
|
|
2241
|
+
elif action == "copy" and selected_hashes:
|
|
2242
|
+
commit = selected_hashes[0]
|
|
2243
|
+
if copy_to_clipboard(commit):
|
|
2244
|
+
print(f"Copied: {commit}")
|
|
2245
|
+
else:
|
|
2246
|
+
print(f"Clipboard not available. Hash: {commit}")
|
|
2247
|
+
elif action == "export" and selected_hashes:
|
|
2248
|
+
export_commits_from_explore(explore_repo, selected_hashes)
|
|
2249
|
+
elif action == "tig":
|
|
2250
|
+
viewer = get_preferred_git_viewer()
|
|
2251
|
+
if not viewer:
|
|
2252
|
+
print(f"\n{Colors.yellow('No git history viewer found.')}")
|
|
2253
|
+
print("Install one of: tig, lazygit, or gitk")
|
|
2254
|
+
print(" apt install tig (or brew install tig)")
|
|
2255
|
+
input("Press Enter to continue...")
|
|
2256
|
+
else:
|
|
2257
|
+
# Launch the preferred viewer
|
|
2258
|
+
if selected_hashes and viewer == "tig":
|
|
2259
|
+
subprocess.run(["tig", "show", selected_hashes[0]], cwd=explore_repo)
|
|
2260
|
+
elif selected_hashes and viewer == "lazygit":
|
|
2261
|
+
subprocess.run(["lazygit"], cwd=explore_repo)
|
|
2262
|
+
elif selected_hashes and viewer == "gitk":
|
|
2263
|
+
subprocess.run(["gitk", selected_hashes[0]], cwd=explore_repo)
|
|
2264
|
+
else:
|
|
2265
|
+
subprocess.run([viewer], cwd=explore_repo)
|
|
2266
|
+
|
|
2267
|
+
return 0
|
|
2268
|
+
|
|
2269
|
+
|