bitp 1.0.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bitbake_project/__init__.py +88 -0
- bitbake_project/__main__.py +14 -0
- bitbake_project/cli.py +1580 -0
- bitbake_project/commands/__init__.py +60 -0
- bitbake_project/commands/branch.py +889 -0
- bitbake_project/commands/common.py +2372 -0
- bitbake_project/commands/config.py +1515 -0
- bitbake_project/commands/deps.py +903 -0
- bitbake_project/commands/explore.py +2269 -0
- bitbake_project/commands/export.py +1030 -0
- bitbake_project/commands/fragment.py +884 -0
- bitbake_project/commands/init.py +515 -0
- bitbake_project/commands/projects.py +1505 -0
- bitbake_project/commands/recipe.py +1374 -0
- bitbake_project/commands/repos.py +154 -0
- bitbake_project/commands/search.py +313 -0
- bitbake_project/commands/update.py +181 -0
- bitbake_project/core.py +1811 -0
- bitp-1.0.7.dist-info/METADATA +401 -0
- bitp-1.0.7.dist-info/RECORD +24 -0
- bitp-1.0.7.dist-info/WHEEL +5 -0
- bitp-1.0.7.dist-info/entry_points.txt +3 -0
- bitp-1.0.7.dist-info/licenses/COPYING +338 -0
- bitp-1.0.7.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,2372 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Copyright (C) 2025 Bruce Ashfield <bruce.ashfield@gmail.com>
|
|
3
|
+
#
|
|
4
|
+
# SPDX-License-Identifier: GPL-2.0-only
|
|
5
|
+
#
|
|
6
|
+
"""
|
|
7
|
+
Shared utilities for bit commands.
|
|
8
|
+
|
|
9
|
+
This module contains all run_* functions and their supporting utilities.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
import json
|
|
13
|
+
import os
|
|
14
|
+
import re
|
|
15
|
+
import shlex
|
|
16
|
+
import shutil
|
|
17
|
+
import signal
|
|
18
|
+
import socket
|
|
19
|
+
import subprocess
|
|
20
|
+
import sys
|
|
21
|
+
import tempfile
|
|
22
|
+
import threading
|
|
23
|
+
import time
|
|
24
|
+
import urllib.request
|
|
25
|
+
import uuid
|
|
26
|
+
from dataclasses import dataclass
|
|
27
|
+
from datetime import datetime
|
|
28
|
+
from typing import Dict, Iterable, List, Optional, Set, Tuple
|
|
29
|
+
|
|
30
|
+
from ..core import (
|
|
31
|
+
Colors,
|
|
32
|
+
GitRepo,
|
|
33
|
+
FzfMenu,
|
|
34
|
+
fzf_available,
|
|
35
|
+
fzf_expandable_menu,
|
|
36
|
+
get_fzf_color_args,
|
|
37
|
+
parse_help_options,
|
|
38
|
+
git_toplevel,
|
|
39
|
+
current_branch,
|
|
40
|
+
current_head,
|
|
41
|
+
repo_is_clean,
|
|
42
|
+
load_defaults,
|
|
43
|
+
save_defaults,
|
|
44
|
+
load_prep_state,
|
|
45
|
+
save_prep_state,
|
|
46
|
+
load_export_state,
|
|
47
|
+
save_export_state,
|
|
48
|
+
terminal_color,
|
|
49
|
+
get_terminal_color,
|
|
50
|
+
ANSI_COLORS,
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
def resolve_bblayers_path(path_opt: Optional[str]) -> str:
|
|
54
|
+
if path_opt:
|
|
55
|
+
return path_opt
|
|
56
|
+
candidates = ["conf/bblayers.conf", "build/conf/bblayers.conf"]
|
|
57
|
+
for cand in candidates:
|
|
58
|
+
if os.path.exists(cand):
|
|
59
|
+
return cand
|
|
60
|
+
sys.exit("bblayers.conf not found (tried conf/bblayers.conf and build/conf/bblayers.conf). Use --bblayers to specify.")
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
class BblayersParser:
|
|
64
|
+
"""
|
|
65
|
+
Parse bblayers.conf with proper BitBake syntax support.
|
|
66
|
+
|
|
67
|
+
Handles:
|
|
68
|
+
- Line continuations (backslash)
|
|
69
|
+
- Assignment operators: =, ?=, ??=, :=, ::=
|
|
70
|
+
- Append/prepend: +=, .=, =+, =., :append, :prepend
|
|
71
|
+
- Remove: :remove
|
|
72
|
+
- Variable expansion: ${VAR}
|
|
73
|
+
- Include/require statements
|
|
74
|
+
- Comments (# lines)
|
|
75
|
+
"""
|
|
76
|
+
|
|
77
|
+
def __init__(self, conf_path: str):
|
|
78
|
+
self.conf_path = os.path.abspath(conf_path)
|
|
79
|
+
self.conf_dir = os.path.dirname(self.conf_path)
|
|
80
|
+
# Build dir is typically parent of conf/
|
|
81
|
+
self.build_dir = os.path.dirname(self.conf_dir)
|
|
82
|
+
|
|
83
|
+
# Variable storage
|
|
84
|
+
self.variables: Dict[str, str] = {}
|
|
85
|
+
self.bblayers: List[str] = []
|
|
86
|
+
self.bblayers_remove: Set[str] = set()
|
|
87
|
+
|
|
88
|
+
# Initialize common variables
|
|
89
|
+
self._init_common_variables()
|
|
90
|
+
|
|
91
|
+
def _init_common_variables(self) -> None:
|
|
92
|
+
"""Initialize commonly used variables that we can infer."""
|
|
93
|
+
# TOPDIR is the build directory (where conf/ lives)
|
|
94
|
+
self.variables['TOPDIR'] = self.build_dir
|
|
95
|
+
|
|
96
|
+
# Try to find OEROOT/COREBASE by looking for oe-init-build-env
|
|
97
|
+
# Usually it's a sibling or parent directory
|
|
98
|
+
for search_dir in [
|
|
99
|
+
os.path.dirname(self.build_dir), # Parent of build
|
|
100
|
+
self.build_dir,
|
|
101
|
+
]:
|
|
102
|
+
if os.path.isfile(os.path.join(search_dir, 'oe-init-build-env')):
|
|
103
|
+
self.variables['OEROOT'] = search_dir
|
|
104
|
+
self.variables['COREBASE'] = search_dir
|
|
105
|
+
break
|
|
106
|
+
|
|
107
|
+
# BSPDIR is often the project root (parent of build dir)
|
|
108
|
+
self.variables['BSPDIR'] = os.path.dirname(self.build_dir)
|
|
109
|
+
|
|
110
|
+
# Environment variables can also be referenced
|
|
111
|
+
for env_var in ['HOME', 'PWD', 'USER']:
|
|
112
|
+
if env_var in os.environ:
|
|
113
|
+
self.variables[env_var] = os.environ[env_var]
|
|
114
|
+
|
|
115
|
+
def _expand_variables(self, value: str) -> str:
|
|
116
|
+
"""Expand ${VAR} references in a string."""
|
|
117
|
+
if '${' not in value:
|
|
118
|
+
return value
|
|
119
|
+
|
|
120
|
+
# Iteratively expand variables (handles nested refs)
|
|
121
|
+
max_iterations = 10
|
|
122
|
+
for _ in range(max_iterations):
|
|
123
|
+
original = value
|
|
124
|
+
for var_name, var_value in self.variables.items():
|
|
125
|
+
value = value.replace(f'${{{var_name}}}', var_value)
|
|
126
|
+
if value == original:
|
|
127
|
+
break
|
|
128
|
+
|
|
129
|
+
return value
|
|
130
|
+
|
|
131
|
+
def _join_continued_lines(self, content: str) -> List[str]:
|
|
132
|
+
"""Join lines that end with backslash continuation."""
|
|
133
|
+
lines = content.splitlines()
|
|
134
|
+
result = []
|
|
135
|
+
current_line = ""
|
|
136
|
+
|
|
137
|
+
for line in lines:
|
|
138
|
+
# Strip trailing whitespace but preserve leading
|
|
139
|
+
line_stripped = line.rstrip()
|
|
140
|
+
|
|
141
|
+
if line_stripped.endswith('\\'):
|
|
142
|
+
# Continuation - append without the backslash
|
|
143
|
+
current_line += line_stripped[:-1]
|
|
144
|
+
else:
|
|
145
|
+
current_line += line_stripped
|
|
146
|
+
result.append(current_line)
|
|
147
|
+
current_line = ""
|
|
148
|
+
|
|
149
|
+
# Don't forget last line if file doesn't end with newline
|
|
150
|
+
if current_line:
|
|
151
|
+
result.append(current_line)
|
|
152
|
+
|
|
153
|
+
return result
|
|
154
|
+
|
|
155
|
+
def _extract_quoted_value(self, value_part: str) -> str:
|
|
156
|
+
"""Extract value from quoted string (handles both " and ')."""
|
|
157
|
+
value_part = value_part.strip()
|
|
158
|
+
|
|
159
|
+
# Try double quotes first
|
|
160
|
+
match = re.match(r'^"(.*)"', value_part, re.DOTALL)
|
|
161
|
+
if match:
|
|
162
|
+
return match.group(1)
|
|
163
|
+
|
|
164
|
+
# Try single quotes
|
|
165
|
+
match = re.match(r"^'(.*)'", value_part, re.DOTALL)
|
|
166
|
+
if match:
|
|
167
|
+
return match.group(1)
|
|
168
|
+
|
|
169
|
+
# Unquoted value (take until comment or end)
|
|
170
|
+
match = re.match(r'^([^#\s]+)', value_part)
|
|
171
|
+
if match:
|
|
172
|
+
return match.group(1)
|
|
173
|
+
|
|
174
|
+
return value_part
|
|
175
|
+
|
|
176
|
+
def _parse_assignment(self, line: str) -> Optional[Tuple[str, str, str]]:
|
|
177
|
+
"""
|
|
178
|
+
Parse a variable assignment line.
|
|
179
|
+
|
|
180
|
+
Returns: (var_name, operator, value) or None if not an assignment.
|
|
181
|
+
Operator includes any suffix like :append, :remove, etc.
|
|
182
|
+
"""
|
|
183
|
+
# Skip comments and empty lines
|
|
184
|
+
stripped = line.strip()
|
|
185
|
+
if not stripped or stripped.startswith('#'):
|
|
186
|
+
return None
|
|
187
|
+
|
|
188
|
+
# Match variable assignment patterns
|
|
189
|
+
# Handles: VAR =, VAR ?=, VAR ??=, VAR +=, VAR .=, VAR :=, VAR ::=
|
|
190
|
+
# Also: VAR:append =, VAR:prepend =, VAR:remove =
|
|
191
|
+
pattern = r'^([A-Za-z_][A-Za-z0-9_]*)((?::[a-z]+)?)\s*(\?\?=|\?=|:=|::=|\+=|\.=|=\+|=\.|=)\s*(.*)$'
|
|
192
|
+
match = re.match(pattern, stripped)
|
|
193
|
+
|
|
194
|
+
if not match:
|
|
195
|
+
return None
|
|
196
|
+
|
|
197
|
+
var_name = match.group(1)
|
|
198
|
+
var_suffix = match.group(2) # :append, :remove, etc.
|
|
199
|
+
operator = match.group(3)
|
|
200
|
+
value_part = match.group(4)
|
|
201
|
+
|
|
202
|
+
# Extract the actual value (may be quoted)
|
|
203
|
+
value = self._extract_quoted_value(value_part)
|
|
204
|
+
|
|
205
|
+
# Combine suffix into operator for easier handling
|
|
206
|
+
full_operator = var_suffix + operator if var_suffix else operator
|
|
207
|
+
|
|
208
|
+
return (var_name, full_operator, value)
|
|
209
|
+
|
|
210
|
+
def _apply_assignment(self, var_name: str, operator: str, value: str) -> None:
|
|
211
|
+
"""Apply a variable assignment based on the operator."""
|
|
212
|
+
# Expand variables in the value
|
|
213
|
+
expanded_value = self._expand_variables(value)
|
|
214
|
+
|
|
215
|
+
if var_name == 'BBLAYERS':
|
|
216
|
+
self._apply_bblayers_assignment(operator, expanded_value)
|
|
217
|
+
else:
|
|
218
|
+
# Track other variables for potential expansion
|
|
219
|
+
self._apply_variable_assignment(var_name, operator, expanded_value)
|
|
220
|
+
|
|
221
|
+
def _apply_variable_assignment(self, var_name: str, operator: str, value: str) -> None:
|
|
222
|
+
"""Apply assignment to a regular variable."""
|
|
223
|
+
if operator in ('=', ':=', '::='):
|
|
224
|
+
# Direct assignment
|
|
225
|
+
self.variables[var_name] = value
|
|
226
|
+
elif operator == '?=':
|
|
227
|
+
# Default assignment (only if not set)
|
|
228
|
+
if var_name not in self.variables:
|
|
229
|
+
self.variables[var_name] = value
|
|
230
|
+
elif operator == '??=':
|
|
231
|
+
# Weak default (we treat same as ?= for simplicity)
|
|
232
|
+
if var_name not in self.variables:
|
|
233
|
+
self.variables[var_name] = value
|
|
234
|
+
elif operator in ('+=', ':append='):
|
|
235
|
+
# Append with space
|
|
236
|
+
current = self.variables.get(var_name, '')
|
|
237
|
+
self.variables[var_name] = current + ' ' + value if current else value
|
|
238
|
+
elif operator in ('.=', ):
|
|
239
|
+
# Append without space
|
|
240
|
+
current = self.variables.get(var_name, '')
|
|
241
|
+
self.variables[var_name] = current + value
|
|
242
|
+
elif operator in ('=+', ':prepend='):
|
|
243
|
+
# Prepend with space
|
|
244
|
+
current = self.variables.get(var_name, '')
|
|
245
|
+
self.variables[var_name] = value + ' ' + current if current else value
|
|
246
|
+
elif operator == '=.':
|
|
247
|
+
# Prepend without space
|
|
248
|
+
current = self.variables.get(var_name, '')
|
|
249
|
+
self.variables[var_name] = value + current
|
|
250
|
+
|
|
251
|
+
def _apply_bblayers_assignment(self, operator: str, value: str) -> None:
|
|
252
|
+
"""Apply assignment specifically to BBLAYERS."""
|
|
253
|
+
# Extract paths from the value
|
|
254
|
+
paths = self._extract_paths_from_value(value)
|
|
255
|
+
|
|
256
|
+
if operator == ':remove=':
|
|
257
|
+
# Add to remove set
|
|
258
|
+
for path in paths:
|
|
259
|
+
self.bblayers_remove.add(path)
|
|
260
|
+
elif operator in ('=', ':=', '::='):
|
|
261
|
+
# Direct assignment - replace
|
|
262
|
+
self.bblayers = paths
|
|
263
|
+
elif operator == '?=':
|
|
264
|
+
# Default - only if empty
|
|
265
|
+
if not self.bblayers:
|
|
266
|
+
self.bblayers = paths
|
|
267
|
+
elif operator == '??=':
|
|
268
|
+
# Weak default
|
|
269
|
+
if not self.bblayers:
|
|
270
|
+
self.bblayers = paths
|
|
271
|
+
elif operator in ('+=', ':append='):
|
|
272
|
+
# Append
|
|
273
|
+
self.bblayers.extend(paths)
|
|
274
|
+
elif operator in ('.=',):
|
|
275
|
+
# Append (same effect for lists)
|
|
276
|
+
self.bblayers.extend(paths)
|
|
277
|
+
elif operator in ('=+', ':prepend='):
|
|
278
|
+
# Prepend
|
|
279
|
+
self.bblayers = paths + self.bblayers
|
|
280
|
+
elif operator == '=.':
|
|
281
|
+
# Prepend
|
|
282
|
+
self.bblayers = paths + self.bblayers
|
|
283
|
+
|
|
284
|
+
def _extract_paths_from_value(self, value: str) -> List[str]:
|
|
285
|
+
"""Extract layer paths from a BBLAYERS value string."""
|
|
286
|
+
paths = []
|
|
287
|
+
for token in value.split():
|
|
288
|
+
# Skip empty tokens
|
|
289
|
+
if not token:
|
|
290
|
+
continue
|
|
291
|
+
# Skip obvious non-paths
|
|
292
|
+
if '/' not in token and not token.startswith('$'):
|
|
293
|
+
continue
|
|
294
|
+
# Expand any remaining variables
|
|
295
|
+
expanded = self._expand_variables(token)
|
|
296
|
+
# Skip if still has unexpanded variables
|
|
297
|
+
if '${' in expanded:
|
|
298
|
+
# Try to warn but still include partial path
|
|
299
|
+
# This helps with debugging
|
|
300
|
+
pass
|
|
301
|
+
if expanded and '/' in expanded:
|
|
302
|
+
# Normalize the path
|
|
303
|
+
if os.path.isabs(expanded):
|
|
304
|
+
paths.append(os.path.normpath(expanded))
|
|
305
|
+
else:
|
|
306
|
+
# Relative to build dir
|
|
307
|
+
paths.append(os.path.normpath(os.path.join(self.build_dir, expanded)))
|
|
308
|
+
return paths
|
|
309
|
+
|
|
310
|
+
def _process_include(self, line: str) -> None:
|
|
311
|
+
"""Process include or require statement."""
|
|
312
|
+
stripped = line.strip()
|
|
313
|
+
|
|
314
|
+
match = re.match(r'^(include|require)\s+(.+)$', stripped)
|
|
315
|
+
if not match:
|
|
316
|
+
return
|
|
317
|
+
|
|
318
|
+
directive = match.group(1)
|
|
319
|
+
include_path = match.group(2).strip()
|
|
320
|
+
|
|
321
|
+
# Expand variables in include path
|
|
322
|
+
include_path = self._expand_variables(include_path)
|
|
323
|
+
|
|
324
|
+
# Still has unexpanded variables - skip
|
|
325
|
+
if '${' in include_path:
|
|
326
|
+
return
|
|
327
|
+
|
|
328
|
+
# Resolve relative paths
|
|
329
|
+
if not os.path.isabs(include_path):
|
|
330
|
+
include_path = os.path.join(self.conf_dir, include_path)
|
|
331
|
+
|
|
332
|
+
# For 'include', file is optional; for 'require', it must exist
|
|
333
|
+
if os.path.isfile(include_path):
|
|
334
|
+
self._parse_file(include_path)
|
|
335
|
+
elif directive == 'require':
|
|
336
|
+
# Required file missing - in real bitbake this is an error
|
|
337
|
+
# We just skip it silently
|
|
338
|
+
pass
|
|
339
|
+
|
|
340
|
+
def _parse_file(self, file_path: str) -> None:
|
|
341
|
+
"""Parse a single conf file."""
|
|
342
|
+
if not os.path.isfile(file_path):
|
|
343
|
+
return
|
|
344
|
+
|
|
345
|
+
try:
|
|
346
|
+
with open(file_path, encoding='utf-8') as f:
|
|
347
|
+
content = f.read()
|
|
348
|
+
except (IOError, OSError):
|
|
349
|
+
return
|
|
350
|
+
|
|
351
|
+
# Join continued lines
|
|
352
|
+
lines = self._join_continued_lines(content)
|
|
353
|
+
|
|
354
|
+
for line in lines:
|
|
355
|
+
stripped = line.strip()
|
|
356
|
+
|
|
357
|
+
# Skip empty and comment lines
|
|
358
|
+
if not stripped or stripped.startswith('#'):
|
|
359
|
+
continue
|
|
360
|
+
|
|
361
|
+
# Check for include/require
|
|
362
|
+
if stripped.startswith('include ') or stripped.startswith('require '):
|
|
363
|
+
self._process_include(line)
|
|
364
|
+
continue
|
|
365
|
+
|
|
366
|
+
# Try to parse as assignment
|
|
367
|
+
assignment = self._parse_assignment(line)
|
|
368
|
+
if assignment:
|
|
369
|
+
var_name, operator, value = assignment
|
|
370
|
+
self._apply_assignment(var_name, operator, value)
|
|
371
|
+
|
|
372
|
+
def parse(self) -> List[str]:
|
|
373
|
+
"""Parse bblayers.conf and return list of layer paths."""
|
|
374
|
+
if not os.path.exists(self.conf_path):
|
|
375
|
+
return []
|
|
376
|
+
|
|
377
|
+
self._parse_file(self.conf_path)
|
|
378
|
+
|
|
379
|
+
# Apply removes
|
|
380
|
+
final_layers = []
|
|
381
|
+
seen = set()
|
|
382
|
+
for layer in self.bblayers:
|
|
383
|
+
# Normalize for comparison
|
|
384
|
+
normalized = os.path.realpath(layer) if os.path.exists(layer) else layer
|
|
385
|
+
if normalized in self.bblayers_remove:
|
|
386
|
+
continue
|
|
387
|
+
if normalized in seen:
|
|
388
|
+
continue
|
|
389
|
+
seen.add(normalized)
|
|
390
|
+
final_layers.append(layer)
|
|
391
|
+
|
|
392
|
+
return final_layers
|
|
393
|
+
|
|
394
|
+
|
|
395
|
+
def extract_layer_paths(conf_path: str) -> List[str]:
|
|
396
|
+
"""
|
|
397
|
+
Extract layer paths from bblayers.conf.
|
|
398
|
+
|
|
399
|
+
Uses BblayersParser for robust parsing with support for:
|
|
400
|
+
- Variable expansion (${TOPDIR}, etc.)
|
|
401
|
+
- Append/prepend operators (+=, :append, etc.)
|
|
402
|
+
- Remove operator (:remove)
|
|
403
|
+
- Include/require statements
|
|
404
|
+
"""
|
|
405
|
+
if not os.path.exists(conf_path):
|
|
406
|
+
sys.exit(f"bblayers.conf not found: {conf_path}")
|
|
407
|
+
|
|
408
|
+
parser = BblayersParser(conf_path)
|
|
409
|
+
return parser.parse()
|
|
410
|
+
|
|
411
|
+
|
|
412
|
+
def dedupe_preserve_order(items: Iterable[str]) -> List[str]:
|
|
413
|
+
seen: Set[str] = set()
|
|
414
|
+
out: List[str] = []
|
|
415
|
+
for item in items:
|
|
416
|
+
if item in seen:
|
|
417
|
+
continue
|
|
418
|
+
seen.add(item)
|
|
419
|
+
out.append(item)
|
|
420
|
+
return out
|
|
421
|
+
|
|
422
|
+
|
|
423
|
+
def discover_layers(
|
|
424
|
+
base_dir: str = ".",
|
|
425
|
+
max_depth: int = 3,
|
|
426
|
+
exclude_layers: Optional[Set[str]] = None,
|
|
427
|
+
peer_dirs: Optional[Set[str]] = None,
|
|
428
|
+
) -> List[Tuple[str, str]]:
|
|
429
|
+
"""
|
|
430
|
+
Discover layers by finding conf/layer.conf files.
|
|
431
|
+
|
|
432
|
+
Args:
|
|
433
|
+
base_dir: Directory to start searching from
|
|
434
|
+
max_depth: Maximum directory depth to search (default 3)
|
|
435
|
+
exclude_layers: Set of layer paths to skip (e.g., already in bblayers.conf)
|
|
436
|
+
peer_dirs: Additional directories to search (e.g., parent dirs of known repos)
|
|
437
|
+
|
|
438
|
+
Returns:
|
|
439
|
+
List of (layer_path, repo_path) tuples
|
|
440
|
+
"""
|
|
441
|
+
exclude_layers = exclude_layers or set()
|
|
442
|
+
peer_dirs = peer_dirs or set()
|
|
443
|
+
results = []
|
|
444
|
+
seen_layers: Set[str] = set()
|
|
445
|
+
|
|
446
|
+
# Directories to skip (won't contain layers)
|
|
447
|
+
skip_dirs = {
|
|
448
|
+
'.git', 'build', 'builds', 'tmp', 'tmp-glibc', 'tmp-musl',
|
|
449
|
+
'downloads', 'sstate-cache', 'cache', 'node_modules',
|
|
450
|
+
'__pycache__', '.tox', '.venv', 'venv',
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
def search_from(start_dir: str) -> None:
|
|
454
|
+
"""Search for layers starting from a directory."""
|
|
455
|
+
base = os.path.abspath(start_dir)
|
|
456
|
+
if not os.path.isdir(base):
|
|
457
|
+
return
|
|
458
|
+
|
|
459
|
+
for root, dirs, _files in os.walk(base):
|
|
460
|
+
# Calculate depth from base
|
|
461
|
+
rel_path = root[len(base):]
|
|
462
|
+
if rel_path:
|
|
463
|
+
depth = rel_path.count(os.sep)
|
|
464
|
+
else:
|
|
465
|
+
depth = 0
|
|
466
|
+
|
|
467
|
+
if depth >= max_depth:
|
|
468
|
+
dirs[:] = [] # Don't descend further
|
|
469
|
+
else:
|
|
470
|
+
# Prune directories that won't contain layers
|
|
471
|
+
dirs[:] = [d for d in dirs if d not in skip_dirs]
|
|
472
|
+
|
|
473
|
+
# Check for layer.conf (standard BitBake layer structure)
|
|
474
|
+
layer_conf = os.path.join(root, "conf", "layer.conf")
|
|
475
|
+
if os.path.isfile(layer_conf):
|
|
476
|
+
# Use realpath to resolve symlinks and match bblayers.conf paths
|
|
477
|
+
layer_path = os.path.realpath(root)
|
|
478
|
+
if layer_path in exclude_layers or layer_path in seen_layers:
|
|
479
|
+
continue
|
|
480
|
+
repo_path = git_toplevel(layer_path)
|
|
481
|
+
if repo_path:
|
|
482
|
+
results.append((layer_path, repo_path))
|
|
483
|
+
seen_layers.add(layer_path)
|
|
484
|
+
|
|
485
|
+
# Search from base directory
|
|
486
|
+
search_from(base_dir)
|
|
487
|
+
|
|
488
|
+
# Also search from peer directories
|
|
489
|
+
for peer_dir in peer_dirs:
|
|
490
|
+
search_from(peer_dir)
|
|
491
|
+
|
|
492
|
+
return results
|
|
493
|
+
|
|
494
|
+
|
|
495
|
+
def discover_git_repos(
|
|
496
|
+
base_dir: str = ".",
|
|
497
|
+
max_depth: int = 3,
|
|
498
|
+
exclude_repos: Optional[Set[str]] = None,
|
|
499
|
+
peer_dirs: Optional[Set[str]] = None,
|
|
500
|
+
) -> List[str]:
|
|
501
|
+
"""
|
|
502
|
+
Discover git repositories that are NOT layers (no conf/layer.conf).
|
|
503
|
+
|
|
504
|
+
Args:
|
|
505
|
+
base_dir: Directory to start searching from
|
|
506
|
+
max_depth: Maximum directory depth to search (default 3)
|
|
507
|
+
exclude_repos: Set of repo paths to skip (e.g., already known repos)
|
|
508
|
+
peer_dirs: Additional directories to check for repos (e.g., parents of known repos)
|
|
509
|
+
|
|
510
|
+
Returns:
|
|
511
|
+
List of repo paths (git toplevels)
|
|
512
|
+
"""
|
|
513
|
+
exclude_repos = exclude_repos or set()
|
|
514
|
+
peer_dirs = peer_dirs or set()
|
|
515
|
+
results: Set[str] = set()
|
|
516
|
+
|
|
517
|
+
# Directories to skip
|
|
518
|
+
skip_dirs = {
|
|
519
|
+
'.git', 'build', 'builds', 'tmp', 'tmp-glibc', 'tmp-musl',
|
|
520
|
+
'downloads', 'sstate-cache', 'cache', 'node_modules',
|
|
521
|
+
'__pycache__', '.tox', '.venv', 'venv',
|
|
522
|
+
}
|
|
523
|
+
|
|
524
|
+
def is_non_layer_repo(path: str) -> Optional[str]:
|
|
525
|
+
"""Check if path is a git repo without conf/layer.conf. Returns repo path or None."""
|
|
526
|
+
git_dir = os.path.join(path, ".git")
|
|
527
|
+
if os.path.exists(git_dir):
|
|
528
|
+
repo_path = git_toplevel(path)
|
|
529
|
+
if repo_path and repo_path not in exclude_repos:
|
|
530
|
+
# Check it's NOT a layer
|
|
531
|
+
layer_conf = os.path.join(repo_path, "conf", "layer.conf")
|
|
532
|
+
if not os.path.isfile(layer_conf):
|
|
533
|
+
return repo_path
|
|
534
|
+
return None
|
|
535
|
+
|
|
536
|
+
# First, check peer directories (one level deep)
|
|
537
|
+
for peer_dir in peer_dirs:
|
|
538
|
+
if not os.path.isdir(peer_dir):
|
|
539
|
+
continue
|
|
540
|
+
# Check the peer dir itself
|
|
541
|
+
repo = is_non_layer_repo(peer_dir)
|
|
542
|
+
if repo:
|
|
543
|
+
results.add(repo)
|
|
544
|
+
# Check immediate children
|
|
545
|
+
try:
|
|
546
|
+
for entry in os.listdir(peer_dir):
|
|
547
|
+
if entry in skip_dirs:
|
|
548
|
+
continue
|
|
549
|
+
child = os.path.join(peer_dir, entry)
|
|
550
|
+
if os.path.isdir(child):
|
|
551
|
+
repo = is_non_layer_repo(child)
|
|
552
|
+
if repo:
|
|
553
|
+
results.add(repo)
|
|
554
|
+
except PermissionError:
|
|
555
|
+
pass
|
|
556
|
+
|
|
557
|
+
# Then do the normal walk from base_dir
|
|
558
|
+
base = os.path.abspath(base_dir)
|
|
559
|
+
for root, dirs, _files in os.walk(base):
|
|
560
|
+
# Calculate depth from base
|
|
561
|
+
rel_path = root[len(base):]
|
|
562
|
+
if rel_path:
|
|
563
|
+
depth = rel_path.count(os.sep)
|
|
564
|
+
else:
|
|
565
|
+
depth = 0
|
|
566
|
+
|
|
567
|
+
if depth >= max_depth:
|
|
568
|
+
dirs[:] = [] # Don't descend further
|
|
569
|
+
else:
|
|
570
|
+
# Prune directories that won't contain repos
|
|
571
|
+
dirs[:] = [d for d in dirs if d not in skip_dirs]
|
|
572
|
+
|
|
573
|
+
# Check if this is a git repo
|
|
574
|
+
repo = is_non_layer_repo(root)
|
|
575
|
+
if repo:
|
|
576
|
+
results.add(repo)
|
|
577
|
+
# Don't descend into git repos
|
|
578
|
+
dirs[:] = []
|
|
579
|
+
elif os.path.exists(os.path.join(root, ".git")):
|
|
580
|
+
# It's a git repo but excluded or is a layer - still don't descend
|
|
581
|
+
dirs[:] = []
|
|
582
|
+
|
|
583
|
+
return list(results)
|
|
584
|
+
|
|
585
|
+
|
|
586
|
+
def build_layer_collection_map(layer_paths: List[str]) -> Dict[str, str]:
|
|
587
|
+
"""
|
|
588
|
+
Build a map from layer collection names to layer paths.
|
|
589
|
+
|
|
590
|
+
Parses BBFILE_COLLECTIONS from each layer's conf/layer.conf.
|
|
591
|
+
|
|
592
|
+
Args:
|
|
593
|
+
layer_paths: List of layer directory paths
|
|
594
|
+
|
|
595
|
+
Returns:
|
|
596
|
+
Dict mapping collection name to layer path
|
|
597
|
+
"""
|
|
598
|
+
collection_map: Dict[str, str] = {}
|
|
599
|
+
|
|
600
|
+
for layer_path in layer_paths:
|
|
601
|
+
layer_conf = os.path.join(layer_path, "conf", "layer.conf")
|
|
602
|
+
if not os.path.isfile(layer_conf):
|
|
603
|
+
continue
|
|
604
|
+
|
|
605
|
+
try:
|
|
606
|
+
with open(layer_conf, 'r') as f:
|
|
607
|
+
content = f.read()
|
|
608
|
+
# Parse BBFILE_COLLECTIONS += "name" or BBFILE_COLLECTIONS = "name"
|
|
609
|
+
for match in re.finditer(r'BBFILE_COLLECTIONS\s*\+?=\s*"([^"]+)"', content):
|
|
610
|
+
collection_name = match.group(1).strip()
|
|
611
|
+
if collection_name:
|
|
612
|
+
collection_map[collection_name] = layer_path
|
|
613
|
+
except (IOError, OSError):
|
|
614
|
+
pass
|
|
615
|
+
|
|
616
|
+
return collection_map
|
|
617
|
+
|
|
618
|
+
|
|
619
|
+
def add_layer_to_bblayers(
|
|
620
|
+
layer_path: str,
|
|
621
|
+
bblayers_conf: str,
|
|
622
|
+
collection_map: Dict[str, str],
|
|
623
|
+
init_script: Optional[str] = None,
|
|
624
|
+
) -> Tuple[bool, str, List[str]]:
|
|
625
|
+
"""
|
|
626
|
+
Add a layer to bblayers.conf using bitbake-layers, resolving dependencies.
|
|
627
|
+
|
|
628
|
+
Args:
|
|
629
|
+
layer_path: Path to the layer to add
|
|
630
|
+
bblayers_conf: Path to bblayers.conf
|
|
631
|
+
collection_map: Map from collection names to layer paths
|
|
632
|
+
init_script: Optional path to oe-init-build-env script
|
|
633
|
+
|
|
634
|
+
Returns:
|
|
635
|
+
Tuple of (success, message, layers_added)
|
|
636
|
+
"""
|
|
637
|
+
# Ensure absolute path for bblayers.conf to correctly compute build_dir
|
|
638
|
+
bblayers_conf = os.path.abspath(bblayers_conf)
|
|
639
|
+
build_dir = os.path.dirname(os.path.dirname(bblayers_conf)) # conf/ -> build/
|
|
640
|
+
|
|
641
|
+
if not build_dir or not os.path.isdir(build_dir):
|
|
642
|
+
return False, f"Invalid build directory: {build_dir}", []
|
|
643
|
+
|
|
644
|
+
layers_added: List[str] = []
|
|
645
|
+
layers_to_add: List[str] = [os.path.abspath(layer_path)]
|
|
646
|
+
max_iterations = 20 # Prevent infinite loops
|
|
647
|
+
|
|
648
|
+
# Check if bitbake-layers is already available in PATH
|
|
649
|
+
bitbake_layers_cmd = shutil.which("bitbake-layers")
|
|
650
|
+
use_direct = bitbake_layers_cmd is not None
|
|
651
|
+
|
|
652
|
+
# If not in PATH, find init script
|
|
653
|
+
if not use_direct and not init_script:
|
|
654
|
+
# Look for oe-init-build-env in parent directories
|
|
655
|
+
search_dir = os.path.dirname(build_dir)
|
|
656
|
+
for _ in range(3):
|
|
657
|
+
candidate = os.path.join(search_dir, "oe-init-build-env")
|
|
658
|
+
if os.path.isfile(candidate):
|
|
659
|
+
init_script = candidate
|
|
660
|
+
break
|
|
661
|
+
# Also check in layers subdirectory
|
|
662
|
+
for subdir in ["layers", "poky", "openembedded-core"]:
|
|
663
|
+
candidate = os.path.join(search_dir, subdir, "oe-init-build-env")
|
|
664
|
+
if os.path.isfile(candidate):
|
|
665
|
+
init_script = candidate
|
|
666
|
+
break
|
|
667
|
+
# Check deeper
|
|
668
|
+
candidate = os.path.join(search_dir, subdir, "openembedded-core", "oe-init-build-env")
|
|
669
|
+
if os.path.isfile(candidate):
|
|
670
|
+
init_script = candidate
|
|
671
|
+
break
|
|
672
|
+
if init_script:
|
|
673
|
+
break
|
|
674
|
+
search_dir = os.path.dirname(search_dir)
|
|
675
|
+
|
|
676
|
+
if not use_direct and not init_script:
|
|
677
|
+
return False, "Could not find oe-init-build-env script or bitbake-layers in PATH", []
|
|
678
|
+
|
|
679
|
+
iteration = 0
|
|
680
|
+
attempted: Set[str] = set() # Track layers we've attempted to avoid infinite loops
|
|
681
|
+
|
|
682
|
+
while layers_to_add and iteration < max_iterations:
|
|
683
|
+
iteration += 1
|
|
684
|
+
current_layer = layers_to_add.pop(0)
|
|
685
|
+
|
|
686
|
+
if current_layer in layers_added:
|
|
687
|
+
continue
|
|
688
|
+
|
|
689
|
+
# Track how many times we've tried this layer
|
|
690
|
+
attempt_key = current_layer
|
|
691
|
+
if attempt_key in attempted:
|
|
692
|
+
# We've already tried this layer once, something is wrong
|
|
693
|
+
continue
|
|
694
|
+
attempted.add(attempt_key)
|
|
695
|
+
|
|
696
|
+
try:
|
|
697
|
+
# Use direct bitbake-layers if available, otherwise source init script
|
|
698
|
+
if use_direct:
|
|
699
|
+
result = subprocess.run(
|
|
700
|
+
["bitbake-layers", "add-layer", current_layer],
|
|
701
|
+
capture_output=True,
|
|
702
|
+
text=True,
|
|
703
|
+
cwd=build_dir,
|
|
704
|
+
)
|
|
705
|
+
else:
|
|
706
|
+
cmd = f'source "{init_script}" "{build_dir}" > /dev/null 2>&1 && bitbake-layers add-layer "{current_layer}" 2>&1'
|
|
707
|
+
result = subprocess.run(
|
|
708
|
+
cmd,
|
|
709
|
+
shell=True,
|
|
710
|
+
executable='/bin/bash',
|
|
711
|
+
capture_output=True,
|
|
712
|
+
text=True,
|
|
713
|
+
cwd=build_dir,
|
|
714
|
+
)
|
|
715
|
+
|
|
716
|
+
output = result.stdout + result.stderr
|
|
717
|
+
|
|
718
|
+
if result.returncode == 0:
|
|
719
|
+
layers_added.append(current_layer)
|
|
720
|
+
elif "already in" in output.lower() or "already enabled" in output.lower():
|
|
721
|
+
# Layer is already in bblayers.conf - treat as success
|
|
722
|
+
layers_added.append(current_layer)
|
|
723
|
+
else:
|
|
724
|
+
# Parse dependency errors
|
|
725
|
+
# Format: ERROR: Layer 'X' depends on layer 'Y', but this layer is not enabled
|
|
726
|
+
dep_pattern = r"ERROR: Layer '[^']+' depends on layer '([^']+)', but this layer is not enabled"
|
|
727
|
+
missing_deps = re.findall(dep_pattern, output)
|
|
728
|
+
|
|
729
|
+
if missing_deps:
|
|
730
|
+
# Check if the "missing" dependency is actually the layer we're adding
|
|
731
|
+
# This happens when bitbake-layers complains about pre-existing issues
|
|
732
|
+
# e.g., "networking-layer depends on meta-python" when adding meta-python
|
|
733
|
+
current_layer_name = None
|
|
734
|
+
layer_conf = os.path.join(current_layer, "conf", "layer.conf")
|
|
735
|
+
if os.path.isfile(layer_conf):
|
|
736
|
+
try:
|
|
737
|
+
with open(layer_conf, 'r') as f:
|
|
738
|
+
content = f.read()
|
|
739
|
+
match = re.search(r'BBFILE_COLLECTIONS\s*\+?=\s*"([^"]+)"', content)
|
|
740
|
+
if match:
|
|
741
|
+
current_layer_name = match.group(1).strip()
|
|
742
|
+
except (IOError, OSError):
|
|
743
|
+
pass
|
|
744
|
+
|
|
745
|
+
# Filter out deps that are the current layer itself
|
|
746
|
+
real_missing_deps = [d for d in missing_deps if d != current_layer_name]
|
|
747
|
+
|
|
748
|
+
if not real_missing_deps:
|
|
749
|
+
# The only "missing" dep is the layer we're adding - this is a pre-existing
|
|
750
|
+
# dependency issue in bblayers.conf. bitbake-layers can't add the layer
|
|
751
|
+
# because it validates all existing layers first.
|
|
752
|
+
# Suggest manual fix.
|
|
753
|
+
layer_name = os.path.basename(current_layer)
|
|
754
|
+
return False, f"Pre-existing dependency issue prevents add. Add {layer_name} manually or fix bblayers.conf", layers_added
|
|
755
|
+
|
|
756
|
+
# Remove from attempted so we can retry after adding deps
|
|
757
|
+
attempted.discard(current_layer)
|
|
758
|
+
# Add current layer back to try after dependencies
|
|
759
|
+
layers_to_add.append(current_layer)
|
|
760
|
+
|
|
761
|
+
# Find and add missing dependencies
|
|
762
|
+
added_any_dep = False
|
|
763
|
+
for dep_name in real_missing_deps:
|
|
764
|
+
if dep_name in collection_map:
|
|
765
|
+
dep_path = collection_map[dep_name]
|
|
766
|
+
if dep_path not in layers_added and dep_path not in layers_to_add:
|
|
767
|
+
# Insert at beginning so deps are added first
|
|
768
|
+
layers_to_add.insert(0, dep_path)
|
|
769
|
+
added_any_dep = True
|
|
770
|
+
else:
|
|
771
|
+
return False, f"Missing dependency '{dep_name}' not found in available layers", layers_added
|
|
772
|
+
|
|
773
|
+
# If we didn't add any new deps, we're stuck
|
|
774
|
+
if not added_any_dep:
|
|
775
|
+
deps_status = [f"{d}" for d in real_missing_deps]
|
|
776
|
+
return False, f"Dependencies already queued but still failing: {', '.join(deps_status)}", layers_added
|
|
777
|
+
else:
|
|
778
|
+
# Unknown error
|
|
779
|
+
error_msg = result.stderr.strip() or result.stdout.strip()
|
|
780
|
+
return False, f"Failed to add layer: {error_msg}", layers_added
|
|
781
|
+
|
|
782
|
+
except Exception as e:
|
|
783
|
+
return False, f"Error running bitbake-layers: {e}", layers_added
|
|
784
|
+
|
|
785
|
+
if iteration >= max_iterations:
|
|
786
|
+
return False, "Too many dependency iterations - possible circular dependency", layers_added
|
|
787
|
+
|
|
788
|
+
# Build summary of what was added
|
|
789
|
+
if layers_added:
|
|
790
|
+
added_names = [os.path.basename(l) for l in layers_added]
|
|
791
|
+
return True, f"Added: {', '.join(added_names)}", layers_added
|
|
792
|
+
return True, "No layers added", layers_added
|
|
793
|
+
|
|
794
|
+
|
|
795
|
+
def remove_layer_from_bblayers(
|
|
796
|
+
layer_path: str,
|
|
797
|
+
bblayers_conf: str,
|
|
798
|
+
) -> Tuple[bool, str]:
|
|
799
|
+
"""
|
|
800
|
+
Remove a layer from bblayers.conf using bitbake-layers.
|
|
801
|
+
|
|
802
|
+
Args:
|
|
803
|
+
layer_path: Path to the layer to remove
|
|
804
|
+
bblayers_conf: Path to bblayers.conf
|
|
805
|
+
|
|
806
|
+
Returns:
|
|
807
|
+
Tuple of (success, message)
|
|
808
|
+
"""
|
|
809
|
+
# Ensure absolute path for bblayers.conf to correctly compute build_dir
|
|
810
|
+
bblayers_conf = os.path.abspath(bblayers_conf)
|
|
811
|
+
build_dir = os.path.dirname(os.path.dirname(bblayers_conf))
|
|
812
|
+
|
|
813
|
+
if not build_dir or not os.path.isdir(build_dir):
|
|
814
|
+
return False, f"Invalid build directory: {build_dir}"
|
|
815
|
+
|
|
816
|
+
layer_path = os.path.abspath(layer_path)
|
|
817
|
+
layer_name = os.path.basename(layer_path)
|
|
818
|
+
|
|
819
|
+
# Check if bitbake-layers is already available in PATH
|
|
820
|
+
bitbake_layers_cmd = shutil.which("bitbake-layers")
|
|
821
|
+
|
|
822
|
+
try:
|
|
823
|
+
if bitbake_layers_cmd:
|
|
824
|
+
result = subprocess.run(
|
|
825
|
+
["bitbake-layers", "remove-layer", layer_path],
|
|
826
|
+
capture_output=True,
|
|
827
|
+
text=True,
|
|
828
|
+
cwd=build_dir,
|
|
829
|
+
)
|
|
830
|
+
else:
|
|
831
|
+
# Try to find and source oe-init-build-env
|
|
832
|
+
init_script = None
|
|
833
|
+
search_dir = os.path.dirname(build_dir)
|
|
834
|
+
for _ in range(3):
|
|
835
|
+
for candidate_path in [
|
|
836
|
+
os.path.join(search_dir, "oe-init-build-env"),
|
|
837
|
+
os.path.join(search_dir, "layers", "oe-init-build-env"),
|
|
838
|
+
os.path.join(search_dir, "poky", "oe-init-build-env"),
|
|
839
|
+
]:
|
|
840
|
+
if os.path.isfile(candidate_path):
|
|
841
|
+
init_script = candidate_path
|
|
842
|
+
break
|
|
843
|
+
if init_script:
|
|
844
|
+
break
|
|
845
|
+
search_dir = os.path.dirname(search_dir)
|
|
846
|
+
|
|
847
|
+
if not init_script:
|
|
848
|
+
return False, "Could not find oe-init-build-env script or bitbake-layers in PATH"
|
|
849
|
+
|
|
850
|
+
cmd = f'source "{init_script}" "{build_dir}" > /dev/null 2>&1 && bitbake-layers remove-layer "{layer_path}" 2>&1'
|
|
851
|
+
result = subprocess.run(
|
|
852
|
+
cmd,
|
|
853
|
+
shell=True,
|
|
854
|
+
executable='/bin/bash',
|
|
855
|
+
capture_output=True,
|
|
856
|
+
text=True,
|
|
857
|
+
cwd=build_dir,
|
|
858
|
+
)
|
|
859
|
+
|
|
860
|
+
if result.returncode == 0:
|
|
861
|
+
return True, f"Removed: {layer_name}"
|
|
862
|
+
else:
|
|
863
|
+
error_msg = result.stderr.strip() or result.stdout.strip()
|
|
864
|
+
return False, f"Failed to remove layer: {error_msg}"
|
|
865
|
+
|
|
866
|
+
except Exception as e:
|
|
867
|
+
return False, f"Error running bitbake-layers: {e}"
|
|
868
|
+
|
|
869
|
+
|
|
870
|
+
def run_cmd(repo: str, args, dry_run: bool, *, shell: bool = False) -> None:
|
|
871
|
+
if dry_run:
|
|
872
|
+
cmd = args if shell else " ".join(shlex.quote(str(a)) for a in args)
|
|
873
|
+
print(f"[dry-run] (cd {shlex.quote(repo)} && {cmd})")
|
|
874
|
+
return
|
|
875
|
+
subprocess.run(args, check=True, cwd=repo, shell=shell)
|
|
876
|
+
|
|
877
|
+
|
|
878
|
+
def get_upstream_count_ls_remote(repo: str, branch: str, timeout: int = 5) -> Optional[int]:
|
|
879
|
+
"""Get upstream commit count using ls-remote (no local modification).
|
|
880
|
+
|
|
881
|
+
Returns:
|
|
882
|
+
None: no remote tracking, timeout, or error
|
|
883
|
+
-1: remote has changes but can't count (not fetched)
|
|
884
|
+
0+: exact count of commits to pull
|
|
885
|
+
"""
|
|
886
|
+
try:
|
|
887
|
+
# Get remote SHA via ls-remote (with timeout to avoid hanging)
|
|
888
|
+
out = subprocess.check_output(
|
|
889
|
+
["git", "-C", repo, "ls-remote", "origin", branch],
|
|
890
|
+
text=True,
|
|
891
|
+
stderr=subprocess.DEVNULL,
|
|
892
|
+
timeout=timeout,
|
|
893
|
+
)
|
|
894
|
+
if not out.strip():
|
|
895
|
+
return None
|
|
896
|
+
remote_sha = out.split()[0]
|
|
897
|
+
|
|
898
|
+
# Check if we have this commit locally
|
|
899
|
+
exists = subprocess.run(
|
|
900
|
+
["git", "-C", repo, "cat-file", "-e", remote_sha],
|
|
901
|
+
stdout=subprocess.DEVNULL,
|
|
902
|
+
stderr=subprocess.DEVNULL,
|
|
903
|
+
).returncode == 0
|
|
904
|
+
|
|
905
|
+
if not exists:
|
|
906
|
+
# Remote has commits we don't have locally - can't count without fetch
|
|
907
|
+
return -1 # Signal "unknown but has changes"
|
|
908
|
+
|
|
909
|
+
# Count commits between HEAD and remote
|
|
910
|
+
out = subprocess.check_output(
|
|
911
|
+
["git", "-C", repo, "rev-list", "--count", f"HEAD..{remote_sha}"],
|
|
912
|
+
text=True,
|
|
913
|
+
stderr=subprocess.DEVNULL,
|
|
914
|
+
)
|
|
915
|
+
return int(out.strip())
|
|
916
|
+
except (subprocess.CalledProcessError, subprocess.TimeoutExpired, ValueError):
|
|
917
|
+
return None
|
|
918
|
+
|
|
919
|
+
|
|
920
|
+
def fzf_prompt_action(repo: str, branch: str, default_action: str, upstream_info: Optional[str] = None) -> Optional[Tuple[str, str, Optional[str]]]:
|
|
921
|
+
"""Use fzf to prompt for update action."""
|
|
922
|
+
display_name = repo_display_name(repo)
|
|
923
|
+
|
|
924
|
+
# Build menu options
|
|
925
|
+
default_label = {"rebase": "pull --rebase", "merge": "pull (merge)", "skip": "skip"}.get(default_action, default_action)
|
|
926
|
+
menu_lines = [
|
|
927
|
+
f"►► Use default: {default_label}",
|
|
928
|
+
f" Pull --rebase origin/{branch}",
|
|
929
|
+
f" Pull (merge) origin/{branch}",
|
|
930
|
+
" Custom command...",
|
|
931
|
+
"── Set default ──",
|
|
932
|
+
" Set default: rebase",
|
|
933
|
+
" Set default: merge",
|
|
934
|
+
" Set default: skip",
|
|
935
|
+
"──────────────────",
|
|
936
|
+
" Skip this repo (s)",
|
|
937
|
+
" Quit (q)",
|
|
938
|
+
]
|
|
939
|
+
|
|
940
|
+
# Build header with repo name prominent at top
|
|
941
|
+
header = f"{Colors.BOLD}{Colors.GREEN}→ {display_name}{Colors.RESET}\n"
|
|
942
|
+
header += f" branch: {Colors.BOLD}{branch}{Colors.RESET} default: {default_action}"
|
|
943
|
+
if upstream_info:
|
|
944
|
+
upstream_color = ANSI_COLORS.get(get_terminal_color("upstream"), Colors.YELLOW)
|
|
945
|
+
header += f" {upstream_color}{upstream_info}{Colors.RESET}"
|
|
946
|
+
header += "\n Enter=select | s=skip | q=quit"
|
|
947
|
+
|
|
948
|
+
try:
|
|
949
|
+
result = subprocess.run(
|
|
950
|
+
[
|
|
951
|
+
"fzf",
|
|
952
|
+
"--no-multi",
|
|
953
|
+
"--no-sort",
|
|
954
|
+
"--no-info",
|
|
955
|
+
"--ansi", # Enable ANSI color support in header
|
|
956
|
+
"--height", "~18", # Inline, fit content
|
|
957
|
+
"--header", header,
|
|
958
|
+
"--prompt", "Action: ",
|
|
959
|
+
"--bind", "s:become(echo SKIP)",
|
|
960
|
+
"--bind", "q:become(echo QUIT)",
|
|
961
|
+
] + get_fzf_color_args(),
|
|
962
|
+
input="\n".join(menu_lines),
|
|
963
|
+
stdout=subprocess.PIPE,
|
|
964
|
+
text=True,
|
|
965
|
+
)
|
|
966
|
+
except FileNotFoundError:
|
|
967
|
+
return None # fzf not available
|
|
968
|
+
|
|
969
|
+
if result.returncode != 0 or not result.stdout.strip():
|
|
970
|
+
# Escape pressed
|
|
971
|
+
print(f"Skipping {repo}.")
|
|
972
|
+
return ("skip", branch, None)
|
|
973
|
+
|
|
974
|
+
selected = result.stdout.strip()
|
|
975
|
+
|
|
976
|
+
if selected == "SKIP" or "Skip this repo" in selected:
|
|
977
|
+
print(f"Skipping {repo}.")
|
|
978
|
+
return ("skip", branch, None)
|
|
979
|
+
|
|
980
|
+
if selected == "QUIT" or "Quit" in selected:
|
|
981
|
+
return ("quit", branch, None)
|
|
982
|
+
|
|
983
|
+
if "Use default" in selected:
|
|
984
|
+
return (default_action, branch, None)
|
|
985
|
+
|
|
986
|
+
if "Pull --rebase" in selected:
|
|
987
|
+
return ("rebase", branch, None)
|
|
988
|
+
|
|
989
|
+
if "Pull (merge)" in selected:
|
|
990
|
+
return ("merge", branch, None)
|
|
991
|
+
|
|
992
|
+
if "Custom command" in selected:
|
|
993
|
+
custom_cmd = input("Enter command: ").strip()
|
|
994
|
+
if not custom_cmd:
|
|
995
|
+
print(f"No command provided, skipping {repo}.")
|
|
996
|
+
return ("skip", branch, None)
|
|
997
|
+
return ("custom", custom_cmd, None)
|
|
998
|
+
|
|
999
|
+
if "Set default: rebase" in selected:
|
|
1000
|
+
print(f"Setting default for {repo} to rebase.")
|
|
1001
|
+
return ("rebase", branch, "rebase")
|
|
1002
|
+
|
|
1003
|
+
if "Set default: merge" in selected:
|
|
1004
|
+
print(f"Setting default for {repo} to merge.")
|
|
1005
|
+
return ("merge", branch, "merge")
|
|
1006
|
+
|
|
1007
|
+
if "Set default: skip" in selected:
|
|
1008
|
+
print(f"Setting default for {repo} to skip.")
|
|
1009
|
+
return ("skip", branch, "skip")
|
|
1010
|
+
|
|
1011
|
+
return ("skip", branch, None)
|
|
1012
|
+
|
|
1013
|
+
|
|
1014
|
+
def prompt_action(repo: str, branch: Optional[str], default_action: str, use_fzf: bool = True) -> Optional[Tuple[str, str, Optional[str]]]:
|
|
1015
|
+
if not branch:
|
|
1016
|
+
print(f"Skipping {repo} (detached HEAD or no branch).")
|
|
1017
|
+
return None
|
|
1018
|
+
|
|
1019
|
+
# Get upstream status
|
|
1020
|
+
upstream_info = ""
|
|
1021
|
+
upstream_count = get_upstream_count_ls_remote(repo, branch)
|
|
1022
|
+
if upstream_count is not None:
|
|
1023
|
+
if upstream_count == -1:
|
|
1024
|
+
upstream_info = "↓ upstream has changes"
|
|
1025
|
+
elif upstream_count > 0:
|
|
1026
|
+
upstream_info = f"↓ {upstream_count} to pull"
|
|
1027
|
+
elif upstream_count == 0:
|
|
1028
|
+
upstream_info = "up-to-date"
|
|
1029
|
+
|
|
1030
|
+
# If up-to-date, default to skip (but don't change saved default)
|
|
1031
|
+
effective_default = default_action
|
|
1032
|
+
if upstream_count == 0:
|
|
1033
|
+
effective_default = "skip"
|
|
1034
|
+
|
|
1035
|
+
# Try fzf first (unless disabled)
|
|
1036
|
+
if use_fzf and fzf_available():
|
|
1037
|
+
result = fzf_prompt_action(repo, branch, effective_default, upstream_info or None)
|
|
1038
|
+
if result is not None:
|
|
1039
|
+
return result
|
|
1040
|
+
|
|
1041
|
+
# Fall back to text-based prompt
|
|
1042
|
+
print(f"\n╭─ {repo}")
|
|
1043
|
+
print(f"│ branch: {branch}")
|
|
1044
|
+
upstream_line = f" ({upstream_info})" if upstream_info else ""
|
|
1045
|
+
print(f"│ Default: {effective_default}{upstream_line}")
|
|
1046
|
+
print("│ Actions: [Enter] default | r [br] pull --rebase | m [br] pull | c <cmd> custom | d <skip|rebase|merge> set default | s skip | q quit")
|
|
1047
|
+
choice = input("╰─ choice: ").strip()
|
|
1048
|
+
|
|
1049
|
+
if not choice:
|
|
1050
|
+
return (effective_default, branch, None)
|
|
1051
|
+
|
|
1052
|
+
if choice.lower() in {"s", "n"}:
|
|
1053
|
+
print(f"Skipping {repo}.")
|
|
1054
|
+
return ("skip", branch, None)
|
|
1055
|
+
|
|
1056
|
+
if choice.lower() == "q":
|
|
1057
|
+
return ("quit", branch, None)
|
|
1058
|
+
|
|
1059
|
+
if choice.lower().startswith("d "):
|
|
1060
|
+
parts = choice.split(maxsplit=1)
|
|
1061
|
+
new_default = parts[1].strip().lower() if len(parts) > 1 else ""
|
|
1062
|
+
if new_default not in {"skip", "rebase", "merge"}:
|
|
1063
|
+
print(f"Unrecognized default '{new_default}', keeping current default.")
|
|
1064
|
+
return None
|
|
1065
|
+
print(f"Setting default for {repo} to {new_default}.")
|
|
1066
|
+
return (new_default, branch, new_default)
|
|
1067
|
+
|
|
1068
|
+
if choice.lower().startswith("c "):
|
|
1069
|
+
custom_cmd = choice[2:].strip()
|
|
1070
|
+
if not custom_cmd:
|
|
1071
|
+
print(f"No custom command provided, skipping {repo}.")
|
|
1072
|
+
return None
|
|
1073
|
+
return ("custom", custom_cmd, None)
|
|
1074
|
+
|
|
1075
|
+
parts = choice.split(maxsplit=1)
|
|
1076
|
+
action = parts[0].lower()
|
|
1077
|
+
target = branch
|
|
1078
|
+
if len(parts) > 1 and parts[1]:
|
|
1079
|
+
target = parts[1]
|
|
1080
|
+
|
|
1081
|
+
if action == "r":
|
|
1082
|
+
return ("rebase", target, None)
|
|
1083
|
+
if action == "m":
|
|
1084
|
+
return ("merge", target, None)
|
|
1085
|
+
|
|
1086
|
+
print(f"Unrecognized input '{choice}', skipping {repo}.")
|
|
1087
|
+
return None
|
|
1088
|
+
|
|
1089
|
+
|
|
1090
|
+
def repo_display_name(repo: str) -> str:
|
|
1091
|
+
"""
|
|
1092
|
+
Get display name for a repo. Checks git config for override first,
|
|
1093
|
+
then derives from origin URL, falling back to basename.
|
|
1094
|
+
|
|
1095
|
+
To set a custom name: git -C <repo> config bit.display-name "OE-core"
|
|
1096
|
+
"""
|
|
1097
|
+
# Check for user override in git config
|
|
1098
|
+
try:
|
|
1099
|
+
override = subprocess.check_output(
|
|
1100
|
+
["git", "-C", repo, "config", "--get", "bit.display-name"],
|
|
1101
|
+
stderr=subprocess.DEVNULL,
|
|
1102
|
+
text=True,
|
|
1103
|
+
).strip()
|
|
1104
|
+
if override:
|
|
1105
|
+
return override
|
|
1106
|
+
except subprocess.CalledProcessError:
|
|
1107
|
+
pass
|
|
1108
|
+
|
|
1109
|
+
# Derive from origin URL
|
|
1110
|
+
try:
|
|
1111
|
+
url = subprocess.check_output(
|
|
1112
|
+
["git", "-C", repo, "config", "--get", "remote.origin.url"],
|
|
1113
|
+
stderr=subprocess.DEVNULL,
|
|
1114
|
+
text=True,
|
|
1115
|
+
).strip()
|
|
1116
|
+
except subprocess.CalledProcessError:
|
|
1117
|
+
url = ""
|
|
1118
|
+
|
|
1119
|
+
candidate = url
|
|
1120
|
+
if candidate:
|
|
1121
|
+
candidate = candidate.rstrip("/")
|
|
1122
|
+
# handle scp-like git@host:org/repo.git
|
|
1123
|
+
if ":" in candidate and "://" not in candidate:
|
|
1124
|
+
candidate = candidate.split(":", 1)[1]
|
|
1125
|
+
candidate = candidate.split("/")[-1]
|
|
1126
|
+
if candidate.endswith(".git"):
|
|
1127
|
+
candidate = candidate[:-4]
|
|
1128
|
+
if not candidate:
|
|
1129
|
+
candidate = os.path.basename(repo.rstrip("/")) or repo
|
|
1130
|
+
return candidate
|
|
1131
|
+
|
|
1132
|
+
|
|
1133
|
+
def repo_origin_url(repo: str) -> Optional[str]:
|
|
1134
|
+
"""Get the origin URL for a repo."""
|
|
1135
|
+
try:
|
|
1136
|
+
return subprocess.check_output(
|
|
1137
|
+
["git", "-C", repo, "config", "--get", "remote.origin.url"],
|
|
1138
|
+
stderr=subprocess.DEVNULL,
|
|
1139
|
+
text=True,
|
|
1140
|
+
).strip()
|
|
1141
|
+
except subprocess.CalledProcessError:
|
|
1142
|
+
return None
|
|
1143
|
+
|
|
1144
|
+
|
|
1145
|
+
def commit_files(repo: str, commit: str) -> List[str]:
|
|
1146
|
+
"""Get list of files changed in a commit."""
|
|
1147
|
+
try:
|
|
1148
|
+
output = subprocess.check_output(
|
|
1149
|
+
["git", "-C", repo, "show", "--name-only", "--format=", commit],
|
|
1150
|
+
text=True,
|
|
1151
|
+
)
|
|
1152
|
+
return [f for f in output.strip().splitlines() if f]
|
|
1153
|
+
except subprocess.CalledProcessError:
|
|
1154
|
+
return []
|
|
1155
|
+
|
|
1156
|
+
|
|
1157
|
+
def commit_to_layer(repo: str, commit: str, layers: List[str]) -> Optional[str]:
|
|
1158
|
+
"""
|
|
1159
|
+
Determine which layer a commit belongs to.
|
|
1160
|
+
Returns the layer path if commit touches exactly one layer,
|
|
1161
|
+
None if commit touches no layers or multiple layers.
|
|
1162
|
+
"""
|
|
1163
|
+
files = commit_files(repo, commit)
|
|
1164
|
+
if not files:
|
|
1165
|
+
return None
|
|
1166
|
+
|
|
1167
|
+
# Get relative paths of layers within the repo
|
|
1168
|
+
layer_relpaths = []
|
|
1169
|
+
for layer in layers:
|
|
1170
|
+
try:
|
|
1171
|
+
relpath = os.path.relpath(layer, repo)
|
|
1172
|
+
layer_relpaths.append((layer, relpath))
|
|
1173
|
+
except ValueError:
|
|
1174
|
+
continue
|
|
1175
|
+
|
|
1176
|
+
# Find which layers this commit touches
|
|
1177
|
+
touched_layers = set()
|
|
1178
|
+
for filepath in files:
|
|
1179
|
+
for layer, relpath in layer_relpaths:
|
|
1180
|
+
if filepath.startswith(relpath + "/") or filepath == relpath:
|
|
1181
|
+
touched_layers.add(layer)
|
|
1182
|
+
break
|
|
1183
|
+
|
|
1184
|
+
if len(touched_layers) == 1:
|
|
1185
|
+
return touched_layers.pop()
|
|
1186
|
+
return None
|
|
1187
|
+
|
|
1188
|
+
|
|
1189
|
+
def layer_display_name(layer: str) -> str:
|
|
1190
|
+
"""Get display name for a layer (just the directory name)."""
|
|
1191
|
+
return os.path.basename(layer.rstrip("/"))
|
|
1192
|
+
|
|
1193
|
+
|
|
1194
|
+
def group_commits_by_layer(repo: str, commits: List[str], layers: List[str]) -> Tuple[Dict[str, List[str]], List[str], List[str]]:
|
|
1195
|
+
"""
|
|
1196
|
+
Group commits by which layer they touch.
|
|
1197
|
+
Returns (layer_commits dict, list of commits touching multiple layers, list of commits touching no layers).
|
|
1198
|
+
"""
|
|
1199
|
+
layer_commits: Dict[str, List[str]] = {}
|
|
1200
|
+
cross_layer_commits: List[str] = []
|
|
1201
|
+
no_layer_commits: List[str] = []
|
|
1202
|
+
|
|
1203
|
+
for commit in commits:
|
|
1204
|
+
layer = commit_to_layer(repo, commit, layers)
|
|
1205
|
+
if layer is None:
|
|
1206
|
+
# Check if it touches multiple layers or no layers
|
|
1207
|
+
files = commit_files(repo, commit)
|
|
1208
|
+
layer_relpaths = []
|
|
1209
|
+
for l in layers:
|
|
1210
|
+
try:
|
|
1211
|
+
relpath = os.path.relpath(l, repo)
|
|
1212
|
+
layer_relpaths.append((l, relpath))
|
|
1213
|
+
except ValueError:
|
|
1214
|
+
continue
|
|
1215
|
+
|
|
1216
|
+
touched = set()
|
|
1217
|
+
for filepath in files:
|
|
1218
|
+
for l, relpath in layer_relpaths:
|
|
1219
|
+
if filepath.startswith(relpath + "/") or filepath == relpath:
|
|
1220
|
+
touched.add(l)
|
|
1221
|
+
break
|
|
1222
|
+
|
|
1223
|
+
if len(touched) > 1:
|
|
1224
|
+
cross_layer_commits.append(commit)
|
|
1225
|
+
elif len(touched) == 1:
|
|
1226
|
+
layer = touched.pop()
|
|
1227
|
+
layer_commits.setdefault(layer, []).append(commit)
|
|
1228
|
+
else:
|
|
1229
|
+
no_layer_commits.append(commit)
|
|
1230
|
+
else:
|
|
1231
|
+
layer_commits.setdefault(layer, []).append(commit)
|
|
1232
|
+
|
|
1233
|
+
return layer_commits, cross_layer_commits, no_layer_commits
|
|
1234
|
+
|
|
1235
|
+
|
|
1236
|
+
def create_pull_branch(repo: str, branch_name: str, base_ref: str, range_spec: str, force: bool) -> Tuple[bool, str]:
|
|
1237
|
+
"""
|
|
1238
|
+
Create a branch with the selected commits for pulling.
|
|
1239
|
+
The branch is created from the parent of the first commit in range_spec,
|
|
1240
|
+
then all commits are cherry-picked onto it.
|
|
1241
|
+
Returns (success, message).
|
|
1242
|
+
"""
|
|
1243
|
+
# Check if branch exists
|
|
1244
|
+
branch_exists = subprocess.run(
|
|
1245
|
+
["git", "-C", repo, "rev-parse", "--verify", branch_name],
|
|
1246
|
+
stdout=subprocess.DEVNULL,
|
|
1247
|
+
stderr=subprocess.DEVNULL,
|
|
1248
|
+
).returncode == 0
|
|
1249
|
+
|
|
1250
|
+
if branch_exists:
|
|
1251
|
+
if force:
|
|
1252
|
+
subprocess.run(["git", "-C", repo, "branch", "-D", branch_name], check=True,
|
|
1253
|
+
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
|
1254
|
+
else:
|
|
1255
|
+
return False, f"Branch '{branch_name}' already exists (use --force to overwrite)"
|
|
1256
|
+
|
|
1257
|
+
# Get the commits in the range
|
|
1258
|
+
try:
|
|
1259
|
+
if range_spec == "--root":
|
|
1260
|
+
# All commits from root - just create branch at HEAD
|
|
1261
|
+
subprocess.run(
|
|
1262
|
+
["git", "-C", repo, "branch", branch_name, "HEAD"],
|
|
1263
|
+
check=True,
|
|
1264
|
+
)
|
|
1265
|
+
else:
|
|
1266
|
+
# Get list of commits in range
|
|
1267
|
+
commits = subprocess.check_output(
|
|
1268
|
+
["git", "-C", repo, "rev-list", "--reverse", range_spec],
|
|
1269
|
+
text=True,
|
|
1270
|
+
).strip().splitlines()
|
|
1271
|
+
|
|
1272
|
+
if not commits:
|
|
1273
|
+
return False, "No commits in range"
|
|
1274
|
+
|
|
1275
|
+
# Get parent of first commit
|
|
1276
|
+
first_commit = commits[0]
|
|
1277
|
+
try:
|
|
1278
|
+
first_parent = subprocess.check_output(
|
|
1279
|
+
["git", "-C", repo, "rev-parse", f"{first_commit}^"],
|
|
1280
|
+
text=True,
|
|
1281
|
+
stderr=subprocess.DEVNULL,
|
|
1282
|
+
).strip()
|
|
1283
|
+
except subprocess.CalledProcessError:
|
|
1284
|
+
# First commit has no parent (root commit), use base_ref as fallback
|
|
1285
|
+
first_parent = base_ref
|
|
1286
|
+
|
|
1287
|
+
# Check if first_parent is in upstream (base_ref), or if there are
|
|
1288
|
+
# intermediate local commits between upstream and our selection
|
|
1289
|
+
is_upstream = subprocess.run(
|
|
1290
|
+
["git", "-C", repo, "merge-base", "--is-ancestor", first_parent, base_ref],
|
|
1291
|
+
stdout=subprocess.DEVNULL,
|
|
1292
|
+
stderr=subprocess.DEVNULL,
|
|
1293
|
+
).returncode == 0
|
|
1294
|
+
|
|
1295
|
+
extra_count = 0
|
|
1296
|
+
if is_upstream:
|
|
1297
|
+
# Parent is upstream, we can branch from it directly
|
|
1298
|
+
branch_base = first_parent
|
|
1299
|
+
else:
|
|
1300
|
+
# Parent is a local commit - find commits between upstream and first selected
|
|
1301
|
+
try:
|
|
1302
|
+
extra_commits = subprocess.check_output(
|
|
1303
|
+
["git", "-C", repo, "rev-list", "--reverse", f"{base_ref}..{first_parent}"],
|
|
1304
|
+
text=True,
|
|
1305
|
+
).strip().splitlines()
|
|
1306
|
+
except subprocess.CalledProcessError:
|
|
1307
|
+
extra_commits = []
|
|
1308
|
+
|
|
1309
|
+
if extra_commits:
|
|
1310
|
+
# Include the extra local commits so the branch is pullable
|
|
1311
|
+
extra_count = len(extra_commits)
|
|
1312
|
+
commits = extra_commits + commits
|
|
1313
|
+
branch_base = base_ref
|
|
1314
|
+
else:
|
|
1315
|
+
branch_base = first_parent
|
|
1316
|
+
|
|
1317
|
+
# Create branch at the determined base
|
|
1318
|
+
subprocess.run(
|
|
1319
|
+
["git", "-C", repo, "branch", branch_name, branch_base],
|
|
1320
|
+
check=True,
|
|
1321
|
+
)
|
|
1322
|
+
|
|
1323
|
+
# Cherry-pick commits onto the branch
|
|
1324
|
+
# First checkout the branch
|
|
1325
|
+
original_branch = subprocess.check_output(
|
|
1326
|
+
["git", "-C", repo, "symbolic-ref", "--short", "HEAD"],
|
|
1327
|
+
text=True,
|
|
1328
|
+
).strip()
|
|
1329
|
+
|
|
1330
|
+
subprocess.run(["git", "-C", repo, "checkout", branch_name], check=True,
|
|
1331
|
+
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
|
1332
|
+
|
|
1333
|
+
cherry_pick_failed = False
|
|
1334
|
+
failed_commit = None
|
|
1335
|
+
for commit in commits:
|
|
1336
|
+
result = subprocess.run(
|
|
1337
|
+
["git", "-C", repo, "cherry-pick", commit],
|
|
1338
|
+
stdout=subprocess.DEVNULL,
|
|
1339
|
+
stderr=subprocess.DEVNULL,
|
|
1340
|
+
)
|
|
1341
|
+
if result.returncode != 0:
|
|
1342
|
+
cherry_pick_failed = True
|
|
1343
|
+
failed_commit = commit[:12]
|
|
1344
|
+
break
|
|
1345
|
+
|
|
1346
|
+
if cherry_pick_failed:
|
|
1347
|
+
# Abort cherry-pick and clean up
|
|
1348
|
+
subprocess.run(["git", "-C", repo, "cherry-pick", "--abort"],
|
|
1349
|
+
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
|
1350
|
+
subprocess.run(["git", "-C", repo, "checkout", original_branch],
|
|
1351
|
+
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
|
1352
|
+
subprocess.run(["git", "-C", repo, "branch", "-D", branch_name],
|
|
1353
|
+
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
|
1354
|
+
return False, f"Cherry-pick failed on {failed_commit} (conflict with {base_ref}?)"
|
|
1355
|
+
|
|
1356
|
+
# Switch back to original branch
|
|
1357
|
+
subprocess.run(["git", "-C", repo, "checkout", original_branch], check=True,
|
|
1358
|
+
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
|
1359
|
+
|
|
1360
|
+
if range_spec != "--root" and extra_count > 0:
|
|
1361
|
+
return True, f"Created branch '{branch_name}' (included {extra_count} additional local commit(s) before selection)"
|
|
1362
|
+
return True, f"Created branch '{branch_name}'"
|
|
1363
|
+
except subprocess.CalledProcessError as e:
|
|
1364
|
+
return False, f"Failed to create branch: {e}"
|
|
1365
|
+
|
|
1366
|
+
|
|
1367
|
+
def author_ident(repo: str) -> Tuple[str, str]:
|
|
1368
|
+
fallback = ("nobody", "nobody@localhost")
|
|
1369
|
+
try:
|
|
1370
|
+
ident = subprocess.check_output(
|
|
1371
|
+
["git", "-C", repo, "var", "GIT_AUTHOR_IDENT"],
|
|
1372
|
+
stderr=subprocess.DEVNULL,
|
|
1373
|
+
text=True,
|
|
1374
|
+
).strip()
|
|
1375
|
+
# Format: "Name <email> timestamp timezone"
|
|
1376
|
+
if "<" in ident and ">" in ident:
|
|
1377
|
+
name = ident.split("<", 1)[0].strip()
|
|
1378
|
+
email = ident.split("<", 1)[1].split(">", 1)[0].strip()
|
|
1379
|
+
return name or fallback[0], email or fallback[1]
|
|
1380
|
+
except subprocess.CalledProcessError:
|
|
1381
|
+
pass
|
|
1382
|
+
return fallback
|
|
1383
|
+
|
|
1384
|
+
|
|
1385
|
+
def patch_subject(patch_path: str) -> str:
|
|
1386
|
+
try:
|
|
1387
|
+
with open(patch_path, encoding="utf-8") as f:
|
|
1388
|
+
for line in f:
|
|
1389
|
+
if line.lower().startswith("subject:"):
|
|
1390
|
+
subj = line.split(":", 1)[1].strip()
|
|
1391
|
+
if subj.startswith("[PATCH"):
|
|
1392
|
+
end = subj.find("]")
|
|
1393
|
+
if end != -1:
|
|
1394
|
+
subj = subj[end + 1 :].strip()
|
|
1395
|
+
return subj
|
|
1396
|
+
except Exception:
|
|
1397
|
+
pass
|
|
1398
|
+
return os.path.basename(patch_path)
|
|
1399
|
+
|
|
1400
|
+
|
|
1401
|
+
def git_version() -> str:
|
|
1402
|
+
try:
|
|
1403
|
+
out = subprocess.check_output(["git", "--version"], text=True).strip()
|
|
1404
|
+
# format: git version X.Y.Z
|
|
1405
|
+
parts = out.split()
|
|
1406
|
+
if len(parts) >= 3:
|
|
1407
|
+
return parts[2]
|
|
1408
|
+
except Exception:
|
|
1409
|
+
pass
|
|
1410
|
+
return "unknown"
|
|
1411
|
+
|
|
1412
|
+
|
|
1413
|
+
def git_request_pull(
|
|
1414
|
+
repo: str,
|
|
1415
|
+
base_ref: str,
|
|
1416
|
+
push_url: str,
|
|
1417
|
+
local_ref: str,
|
|
1418
|
+
remote_branch: str,
|
|
1419
|
+
) -> Optional[str]:
|
|
1420
|
+
"""
|
|
1421
|
+
Generate a git request-pull message.
|
|
1422
|
+
Returns the request-pull output (without diffstat), or None on error.
|
|
1423
|
+
"""
|
|
1424
|
+
try:
|
|
1425
|
+
# Format: git request-pull <start> <url> <local>:<remote>
|
|
1426
|
+
cmd = ["git", "-C", repo, "request-pull", base_ref, push_url, f"{local_ref}:{remote_branch}"]
|
|
1427
|
+
output = subprocess.check_output(cmd, text=True, stderr=subprocess.STDOUT)
|
|
1428
|
+
# Remove the diffstat portion (everything after the first file stats line)
|
|
1429
|
+
# The format is: URL line, then empty line, then diffstat
|
|
1430
|
+
lines = output.splitlines()
|
|
1431
|
+
result_lines = []
|
|
1432
|
+
for line in lines:
|
|
1433
|
+
# Stop before diffstat (lines with file changes like " file | N +++ ---")
|
|
1434
|
+
if re.match(r"^\s+\S+.*\|\s+\d+", line):
|
|
1435
|
+
break
|
|
1436
|
+
# Also stop at "N files changed" summary
|
|
1437
|
+
if re.match(r"^\s*\d+ files? changed", line):
|
|
1438
|
+
break
|
|
1439
|
+
result_lines.append(line)
|
|
1440
|
+
# Remove trailing empty lines
|
|
1441
|
+
while result_lines and not result_lines[-1].strip():
|
|
1442
|
+
result_lines.pop()
|
|
1443
|
+
return "\n".join(result_lines)
|
|
1444
|
+
except subprocess.CalledProcessError as e:
|
|
1445
|
+
return None
|
|
1446
|
+
|
|
1447
|
+
|
|
1448
|
+
def push_branch_to_target(
|
|
1449
|
+
repo: str,
|
|
1450
|
+
push_url: str,
|
|
1451
|
+
local_ref: str,
|
|
1452
|
+
remote_branch: str,
|
|
1453
|
+
force: bool = False,
|
|
1454
|
+
) -> Tuple[bool, str]:
|
|
1455
|
+
"""
|
|
1456
|
+
Push a branch to the push target.
|
|
1457
|
+
Returns (success, message).
|
|
1458
|
+
"""
|
|
1459
|
+
try:
|
|
1460
|
+
cmd = ["git", "-C", repo, "push", push_url, f"{local_ref}:{remote_branch}"]
|
|
1461
|
+
if force:
|
|
1462
|
+
cmd.insert(3, "--force")
|
|
1463
|
+
subprocess.run(cmd, check=True, capture_output=True, text=True)
|
|
1464
|
+
return True, f"Pushed {local_ref} to {push_url} {remote_branch}"
|
|
1465
|
+
except subprocess.CalledProcessError as e:
|
|
1466
|
+
return False, f"Push failed: {e.stderr.strip() or e.stdout.strip() or str(e)}"
|
|
1467
|
+
|
|
1468
|
+
|
|
1469
|
+
def clean_title(subject: str) -> str:
|
|
1470
|
+
s = subject.strip()
|
|
1471
|
+
# Drop leading bracketed tokens like [PATCH 1/2], [repo], etc.
|
|
1472
|
+
s = re.sub(r"^(?:\[[^\]]*\]\s*)+", "", s)
|
|
1473
|
+
return s.strip()
|
|
1474
|
+
|
|
1475
|
+
|
|
1476
|
+
def rewrite_patch_subject(patch_path: str, repo_name: str) -> None:
|
|
1477
|
+
"""
|
|
1478
|
+
Normalize Subject to: [NN/MM][repo] title
|
|
1479
|
+
"""
|
|
1480
|
+
try:
|
|
1481
|
+
with open(patch_path, encoding="utf-8") as f:
|
|
1482
|
+
lines = f.readlines()
|
|
1483
|
+
except Exception:
|
|
1484
|
+
return
|
|
1485
|
+
|
|
1486
|
+
new_lines = []
|
|
1487
|
+
changed = False
|
|
1488
|
+
subj_re_patch = re.compile(r"Subject:\s*(?:\[[^\]]*\]\s*)*\[PATCH\s+(\d+)/(\d+)\]\s*(.*)", re.IGNORECASE)
|
|
1489
|
+
subj_re_repo_num = re.compile(r"Subject:\s*\[+([^\]]+?)\]+\s*\[?(\d+)/(\d+)\]?\s*(.*)", re.IGNORECASE)
|
|
1490
|
+
subj_re_num_repo = re.compile(r"Subject:\s*\[?(\d+)/(\d+)\]?\s*\[+([^\]]+?)\]+\s*(.*)", re.IGNORECASE)
|
|
1491
|
+
for line in lines:
|
|
1492
|
+
if not changed and line.lower().startswith("subject:"):
|
|
1493
|
+
m = subj_re_patch.match(line)
|
|
1494
|
+
repo = repo_name
|
|
1495
|
+
if m:
|
|
1496
|
+
num, den, rest = m.groups()
|
|
1497
|
+
else:
|
|
1498
|
+
m = subj_re_repo_num.match(line)
|
|
1499
|
+
if m:
|
|
1500
|
+
repo, num, den, rest = m.groups()
|
|
1501
|
+
repo = repo.strip("[]") or repo_name
|
|
1502
|
+
else:
|
|
1503
|
+
m = subj_re_num_repo.match(line)
|
|
1504
|
+
if m:
|
|
1505
|
+
num, den, repo, rest = m.groups()
|
|
1506
|
+
repo = repo.strip("[]") or repo_name
|
|
1507
|
+
else:
|
|
1508
|
+
new_lines.append(line)
|
|
1509
|
+
continue
|
|
1510
|
+
|
|
1511
|
+
# strip any leading bracket tokens from rest
|
|
1512
|
+
while rest.startswith("[") and "]" in rest:
|
|
1513
|
+
rest = rest.split("]", 1)[1].strip()
|
|
1514
|
+
new_line = f"Subject: [{repo}][PATCH {num}/{den}] {rest}\n"
|
|
1515
|
+
new_lines.append(new_line)
|
|
1516
|
+
changed = True
|
|
1517
|
+
continue
|
|
1518
|
+
new_lines.append(line)
|
|
1519
|
+
|
|
1520
|
+
if changed:
|
|
1521
|
+
try:
|
|
1522
|
+
with open(patch_path, "w", encoding="utf-8") as f:
|
|
1523
|
+
f.writelines(new_lines)
|
|
1524
|
+
except Exception:
|
|
1525
|
+
pass
|
|
1526
|
+
|
|
1527
|
+
|
|
1528
|
+
def load_resume(resume_file: str) -> Optional[Tuple[int, List[str]]]:
|
|
1529
|
+
if not os.path.exists(resume_file):
|
|
1530
|
+
return None
|
|
1531
|
+
with open(resume_file, encoding="utf-8") as f:
|
|
1532
|
+
lines = [line.rstrip("\n") for line in f]
|
|
1533
|
+
if not lines:
|
|
1534
|
+
return None
|
|
1535
|
+
try:
|
|
1536
|
+
idx = int(lines[0])
|
|
1537
|
+
except ValueError:
|
|
1538
|
+
return None
|
|
1539
|
+
return idx, lines[1:]
|
|
1540
|
+
|
|
1541
|
+
|
|
1542
|
+
def save_resume(resume_file: str, next_idx: int, repos: List[str]) -> None:
|
|
1543
|
+
with open(resume_file, "w", encoding="utf-8") as f:
|
|
1544
|
+
f.write(f"{next_idx}\n")
|
|
1545
|
+
for repo in repos:
|
|
1546
|
+
f.write(f"{repo}\n")
|
|
1547
|
+
|
|
1548
|
+
|
|
1549
|
+
def get_extra_repos(defaults: dict) -> List[str]:
|
|
1550
|
+
"""Get list of extra repos from defaults."""
|
|
1551
|
+
return defaults.get("__extra_repos__", [])
|
|
1552
|
+
|
|
1553
|
+
|
|
1554
|
+
def get_hidden_repos(defaults: dict) -> List[str]:
|
|
1555
|
+
"""Get list of hidden repos from defaults."""
|
|
1556
|
+
return defaults.get("__hidden_repos__", [])
|
|
1557
|
+
|
|
1558
|
+
|
|
1559
|
+
def add_extra_repo(defaults_file: str, defaults: dict, repo_path: str) -> None:
|
|
1560
|
+
"""Add a repo to the extra repos list and save."""
|
|
1561
|
+
extra = defaults.get("__extra_repos__", [])
|
|
1562
|
+
if repo_path not in extra:
|
|
1563
|
+
extra.append(repo_path)
|
|
1564
|
+
defaults["__extra_repos__"] = extra
|
|
1565
|
+
save_defaults(defaults_file, defaults)
|
|
1566
|
+
|
|
1567
|
+
|
|
1568
|
+
def add_hidden_repo(defaults_file: str, defaults: dict, repo_path: str) -> None:
|
|
1569
|
+
"""Add a repo to the hidden repos list and save."""
|
|
1570
|
+
hidden = defaults.get("__hidden_repos__", [])
|
|
1571
|
+
if repo_path not in hidden:
|
|
1572
|
+
hidden.append(repo_path)
|
|
1573
|
+
defaults["__hidden_repos__"] = hidden
|
|
1574
|
+
save_defaults(defaults_file, defaults)
|
|
1575
|
+
|
|
1576
|
+
|
|
1577
|
+
def remove_hidden_repo(defaults_file: str, defaults: dict, repo_path: str) -> None:
|
|
1578
|
+
"""Remove a repo from the hidden repos list and save."""
|
|
1579
|
+
hidden = defaults.get("__hidden_repos__", [])
|
|
1580
|
+
if repo_path in hidden:
|
|
1581
|
+
hidden.remove(repo_path)
|
|
1582
|
+
defaults["__hidden_repos__"] = hidden
|
|
1583
|
+
save_defaults(defaults_file, defaults)
|
|
1584
|
+
|
|
1585
|
+
|
|
1586
|
+
def get_push_target(defaults: dict, repo_path: str) -> Optional[Dict[str, str]]:
|
|
1587
|
+
"""Get push target config for a repo. Returns dict with push_url, branch_prefix, or None."""
|
|
1588
|
+
targets = defaults.get("__push_targets__", {})
|
|
1589
|
+
return targets.get(repo_path)
|
|
1590
|
+
|
|
1591
|
+
|
|
1592
|
+
def set_push_target(
|
|
1593
|
+
defaults_file: str,
|
|
1594
|
+
defaults: dict,
|
|
1595
|
+
repo_path: str,
|
|
1596
|
+
push_url: str,
|
|
1597
|
+
branch_prefix: str = "",
|
|
1598
|
+
) -> None:
|
|
1599
|
+
"""Set push target config for a repo."""
|
|
1600
|
+
targets = defaults.get("__push_targets__", {})
|
|
1601
|
+
targets[repo_path] = {
|
|
1602
|
+
"push_url": push_url,
|
|
1603
|
+
"branch_prefix": branch_prefix,
|
|
1604
|
+
}
|
|
1605
|
+
defaults["__push_targets__"] = targets
|
|
1606
|
+
save_defaults(defaults_file, defaults)
|
|
1607
|
+
|
|
1608
|
+
|
|
1609
|
+
def remove_push_target(defaults_file: str, defaults: dict, repo_path: str) -> None:
|
|
1610
|
+
"""Remove push target config for a repo."""
|
|
1611
|
+
targets = defaults.get("__push_targets__", {})
|
|
1612
|
+
if repo_path in targets:
|
|
1613
|
+
del targets[repo_path]
|
|
1614
|
+
defaults["__push_targets__"] = targets
|
|
1615
|
+
save_defaults(defaults_file, defaults)
|
|
1616
|
+
|
|
1617
|
+
|
|
1618
|
+
def resolve_bblayers_path(path_opt: Optional[str]) -> Optional[str]:
|
|
1619
|
+
"""Find bblayers.conf path, or return None if not found and not specified."""
|
|
1620
|
+
if path_opt:
|
|
1621
|
+
return path_opt
|
|
1622
|
+
candidates = ["conf/bblayers.conf", "build/conf/bblayers.conf"]
|
|
1623
|
+
for cand in candidates:
|
|
1624
|
+
if os.path.exists(cand):
|
|
1625
|
+
return cand
|
|
1626
|
+
return None
|
|
1627
|
+
|
|
1628
|
+
|
|
1629
|
+
@dataclass
|
|
1630
|
+
class RepoSets:
|
|
1631
|
+
"""Tracks different categories of repos and layers."""
|
|
1632
|
+
discovered: Set[str] # Discovered repos (magenta, "(?)")
|
|
1633
|
+
external: Set[str] # Non-layer git repos (cyan, "(ext)")
|
|
1634
|
+
hidden: Set[str] # Hidden repos (not shown unless toggled)
|
|
1635
|
+
configured_layers: Set[str] # Layers from bblayers.conf (vs discovered)
|
|
1636
|
+
|
|
1637
|
+
|
|
1638
|
+
@dataclass
|
|
1639
|
+
class LayerInfo:
|
|
1640
|
+
"""Layer metadata from layer.conf."""
|
|
1641
|
+
name: str # Collection name from BBFILE_COLLECTIONS
|
|
1642
|
+
path: str # Filesystem path to the layer
|
|
1643
|
+
depends: List[str] # Required dependencies from LAYERDEPENDS
|
|
1644
|
+
recommends: List[str] # Optional dependencies from LAYERRECOMMENDS
|
|
1645
|
+
priority: int # Layer priority from BBFILE_PRIORITY
|
|
1646
|
+
|
|
1647
|
+
|
|
1648
|
+
def parse_layer_conf(layer_path: str) -> Optional[LayerInfo]:
|
|
1649
|
+
"""
|
|
1650
|
+
Parse a layer's conf/layer.conf to extract dependency information.
|
|
1651
|
+
|
|
1652
|
+
Returns LayerInfo with collection name, dependencies, and priority,
|
|
1653
|
+
or None if the layer.conf doesn't exist or can't be parsed.
|
|
1654
|
+
"""
|
|
1655
|
+
conf_path = os.path.join(layer_path, "conf", "layer.conf")
|
|
1656
|
+
if not os.path.isfile(conf_path):
|
|
1657
|
+
return None
|
|
1658
|
+
|
|
1659
|
+
try:
|
|
1660
|
+
with open(conf_path, "r", encoding="utf-8", errors="ignore") as f:
|
|
1661
|
+
content = f.read()
|
|
1662
|
+
except (OSError, IOError):
|
|
1663
|
+
return None
|
|
1664
|
+
|
|
1665
|
+
# Handle line continuations
|
|
1666
|
+
content = content.replace("\\\n", " ")
|
|
1667
|
+
|
|
1668
|
+
# Extract BBFILE_COLLECTIONS (the layer's collection name)
|
|
1669
|
+
# Format: BBFILE_COLLECTIONS += "layer-name"
|
|
1670
|
+
collection_match = re.search(
|
|
1671
|
+
r'BBFILE_COLLECTIONS\s*\+?=\s*"([^"]*)"', content
|
|
1672
|
+
)
|
|
1673
|
+
collection_name = ""
|
|
1674
|
+
if collection_match:
|
|
1675
|
+
collection_name = collection_match.group(1).strip()
|
|
1676
|
+
|
|
1677
|
+
if not collection_name:
|
|
1678
|
+
# Try to derive from layer path as fallback
|
|
1679
|
+
collection_name = os.path.basename(layer_path)
|
|
1680
|
+
if collection_name.startswith("meta-"):
|
|
1681
|
+
collection_name = collection_name[5:]
|
|
1682
|
+
|
|
1683
|
+
# Extract LAYERDEPENDS_<collection> (required dependencies)
|
|
1684
|
+
# Format: LAYERDEPENDS_<collection> = "dep1 dep2:version dep3 (>= 4)"
|
|
1685
|
+
# Version constraints can be "layer:version" or "(>= version)" after a layer name
|
|
1686
|
+
depends = []
|
|
1687
|
+
depends_pattern = re.compile(
|
|
1688
|
+
r'LAYERDEPENDS_' + re.escape(collection_name) + r'\s*\+?=\s*"([^"]*)"'
|
|
1689
|
+
)
|
|
1690
|
+
for match in depends_pattern.finditer(content):
|
|
1691
|
+
deps_str = match.group(1).strip()
|
|
1692
|
+
# Remove parenthetical version constraints like "(>= 12)" or "(< 5)"
|
|
1693
|
+
deps_str = re.sub(r'\([^)]*\)', '', deps_str)
|
|
1694
|
+
for dep in deps_str.split():
|
|
1695
|
+
# Strip version constraints (e.g., "core:4" -> "core")
|
|
1696
|
+
dep_name = dep.split(":")[0].strip()
|
|
1697
|
+
# Skip empty strings and things that look like version numbers
|
|
1698
|
+
if dep_name and dep_name not in depends and not dep_name.isdigit():
|
|
1699
|
+
depends.append(dep_name)
|
|
1700
|
+
|
|
1701
|
+
# Extract LAYERRECOMMENDS_<collection> (optional dependencies)
|
|
1702
|
+
recommends = []
|
|
1703
|
+
recommends_pattern = re.compile(
|
|
1704
|
+
r'LAYERRECOMMENDS_' + re.escape(collection_name) + r'\s*\+?=\s*"([^"]*)"'
|
|
1705
|
+
)
|
|
1706
|
+
for match in recommends_pattern.finditer(content):
|
|
1707
|
+
rec_str = match.group(1).strip()
|
|
1708
|
+
# Remove parenthetical version constraints
|
|
1709
|
+
rec_str = re.sub(r'\([^)]*\)', '', rec_str)
|
|
1710
|
+
for rec in rec_str.split():
|
|
1711
|
+
rec_name = rec.split(":")[0].strip()
|
|
1712
|
+
if rec_name and rec_name not in recommends and not rec_name.isdigit():
|
|
1713
|
+
recommends.append(rec_name)
|
|
1714
|
+
|
|
1715
|
+
# Extract BBFILE_PRIORITY_<collection>
|
|
1716
|
+
# Format: BBFILE_PRIORITY_<collection> = "6"
|
|
1717
|
+
priority = 0
|
|
1718
|
+
priority_pattern = re.compile(
|
|
1719
|
+
r'BBFILE_PRIORITY_' + re.escape(collection_name) + r'\s*=\s*"?(\d+)"?'
|
|
1720
|
+
)
|
|
1721
|
+
priority_match = priority_pattern.search(content)
|
|
1722
|
+
if priority_match:
|
|
1723
|
+
try:
|
|
1724
|
+
priority = int(priority_match.group(1))
|
|
1725
|
+
except ValueError:
|
|
1726
|
+
pass
|
|
1727
|
+
|
|
1728
|
+
return LayerInfo(
|
|
1729
|
+
name=collection_name,
|
|
1730
|
+
path=layer_path,
|
|
1731
|
+
depends=depends,
|
|
1732
|
+
recommends=recommends,
|
|
1733
|
+
priority=priority,
|
|
1734
|
+
)
|
|
1735
|
+
|
|
1736
|
+
|
|
1737
|
+
def resolve_base_and_layers(
|
|
1738
|
+
path_opt: Optional[str],
|
|
1739
|
+
defaults: Optional[dict] = None,
|
|
1740
|
+
include_external: bool = True,
|
|
1741
|
+
discover_all: bool = True,
|
|
1742
|
+
) -> Tuple[List[Tuple[str, str]], RepoSets]:
|
|
1743
|
+
"""
|
|
1744
|
+
Resolve layers from bblayers.conf and discover additional layers/repos.
|
|
1745
|
+
|
|
1746
|
+
Args:
|
|
1747
|
+
path_opt: Path to bblayers.conf (or None for auto-detect)
|
|
1748
|
+
defaults: Defaults dict (for extra_repos and hidden_repos)
|
|
1749
|
+
include_external: Whether to discover non-layer git repos
|
|
1750
|
+
discover_all: Whether to discover layers not in bblayers.conf
|
|
1751
|
+
|
|
1752
|
+
Returns:
|
|
1753
|
+
Tuple of (pairs, repo_sets) where:
|
|
1754
|
+
- pairs: List of (layer_path, repo_path) tuples
|
|
1755
|
+
- repo_sets: RepoSets with discovered, external, and hidden repo sets
|
|
1756
|
+
"""
|
|
1757
|
+
defaults = defaults or {}
|
|
1758
|
+
pairs: List[Tuple[str, str]] = []
|
|
1759
|
+
bblayers_layer_paths: Set[str] = set()
|
|
1760
|
+
known_repos: Set[str] = set()
|
|
1761
|
+
discovered_repos: Set[str] = set()
|
|
1762
|
+
external_repos: Set[str] = set()
|
|
1763
|
+
|
|
1764
|
+
extra_repos_list = get_extra_repos(defaults)
|
|
1765
|
+
hidden_repos_set = set(get_hidden_repos(defaults))
|
|
1766
|
+
|
|
1767
|
+
# Try to get layers from bblayers.conf
|
|
1768
|
+
bblayers = resolve_bblayers_path(path_opt)
|
|
1769
|
+
if bblayers:
|
|
1770
|
+
layers = extract_layer_paths(bblayers)
|
|
1771
|
+
for layer in layers:
|
|
1772
|
+
# Normalize path with realpath to resolve symlinks and avoid duplicates
|
|
1773
|
+
layer = os.path.realpath(layer)
|
|
1774
|
+
repo = git_toplevel(layer)
|
|
1775
|
+
if not repo:
|
|
1776
|
+
print(f"Warning: {layer} is not inside a git repo; skipping.")
|
|
1777
|
+
continue
|
|
1778
|
+
pairs.append((layer, repo))
|
|
1779
|
+
bblayers_layer_paths.add(layer)
|
|
1780
|
+
known_repos.add(repo)
|
|
1781
|
+
|
|
1782
|
+
# Add extra repos from config (these are tracked, not external)
|
|
1783
|
+
for repo_path in extra_repos_list:
|
|
1784
|
+
repo_path = os.path.realpath(repo_path)
|
|
1785
|
+
if os.path.isdir(repo_path) and repo_path not in known_repos:
|
|
1786
|
+
repo = git_toplevel(repo_path)
|
|
1787
|
+
if repo:
|
|
1788
|
+
# Add as a pseudo-layer (repo path is both layer and repo)
|
|
1789
|
+
pairs.append((repo, repo))
|
|
1790
|
+
known_repos.add(repo)
|
|
1791
|
+
|
|
1792
|
+
# Collect parent directories of known repos to check for peer repos/layers
|
|
1793
|
+
# e.g., if layers are in /path/project/layers/, also check /path/project/
|
|
1794
|
+
# But avoid searching from overly broad directories like home or root
|
|
1795
|
+
home_dir = os.path.expanduser("~")
|
|
1796
|
+
too_broad = {"/", "/home", "/opt", "/usr", home_dir}
|
|
1797
|
+
peer_dirs: Set[str] = set()
|
|
1798
|
+
for repo in known_repos:
|
|
1799
|
+
parent = os.path.dirname(repo)
|
|
1800
|
+
if parent and parent not in too_broad:
|
|
1801
|
+
peer_dirs.add(parent)
|
|
1802
|
+
|
|
1803
|
+
# Discover additional layers not in bblayers.conf (only if discover_all is True)
|
|
1804
|
+
if discover_all:
|
|
1805
|
+
discovered = discover_layers(exclude_layers=bblayers_layer_paths, peer_dirs=peer_dirs)
|
|
1806
|
+
for layer, repo in discovered:
|
|
1807
|
+
# Add all discovered layers to pairs
|
|
1808
|
+
pairs.append((layer, repo))
|
|
1809
|
+
# But only mark repo as discovered once
|
|
1810
|
+
if repo not in known_repos:
|
|
1811
|
+
discovered_repos.add(repo)
|
|
1812
|
+
known_repos.add(repo)
|
|
1813
|
+
|
|
1814
|
+
# Discover external git repos (non-layers) - only if discovery is enabled
|
|
1815
|
+
if include_external and discover_all:
|
|
1816
|
+
external = discover_git_repos(exclude_repos=known_repos, peer_dirs=peer_dirs)
|
|
1817
|
+
for repo in external:
|
|
1818
|
+
external_repos.add(repo)
|
|
1819
|
+
|
|
1820
|
+
if not pairs and not external_repos:
|
|
1821
|
+
if bblayers:
|
|
1822
|
+
sys.exit("No git repositories found for the configured layers.")
|
|
1823
|
+
else:
|
|
1824
|
+
sys.exit("No layers found. Provide --bblayers, or use -a to discover layers in the current directory.")
|
|
1825
|
+
|
|
1826
|
+
repo_sets = RepoSets(
|
|
1827
|
+
discovered=discovered_repos,
|
|
1828
|
+
external=external_repos,
|
|
1829
|
+
hidden=hidden_repos_set,
|
|
1830
|
+
configured_layers=bblayers_layer_paths,
|
|
1831
|
+
)
|
|
1832
|
+
return pairs, repo_sets
|
|
1833
|
+
|
|
1834
|
+
|
|
1835
|
+
def collect_repos(
|
|
1836
|
+
path_opt: Optional[str],
|
|
1837
|
+
defaults: Optional[dict] = None,
|
|
1838
|
+
include_external: bool = False,
|
|
1839
|
+
discover_all: bool = False,
|
|
1840
|
+
) -> Tuple[List[str], RepoSets]:
|
|
1841
|
+
"""
|
|
1842
|
+
Collect unique repos from bblayers.conf and discovered layers.
|
|
1843
|
+
|
|
1844
|
+
Args:
|
|
1845
|
+
path_opt: Path to bblayers.conf (or None for auto-detect)
|
|
1846
|
+
defaults: Defaults dict (for extra_repos and hidden_repos)
|
|
1847
|
+
include_external: Whether to discover non-layer git repos
|
|
1848
|
+
discover_all: Whether to discover layers not in bblayers.conf
|
|
1849
|
+
|
|
1850
|
+
Returns:
|
|
1851
|
+
Tuple of (repos, repo_sets) where:
|
|
1852
|
+
- repos: Deduplicated list of repo paths
|
|
1853
|
+
- repo_sets: RepoSets with discovered, external, and hidden repo sets
|
|
1854
|
+
"""
|
|
1855
|
+
pairs, repo_sets = resolve_base_and_layers(path_opt, defaults, include_external, discover_all=discover_all)
|
|
1856
|
+
repos = dedupe_preserve_order(repo for _, repo in pairs)
|
|
1857
|
+
return repos, repo_sets
|
|
1858
|
+
|
|
1859
|
+
|
|
1860
|
+
def get_repo_commit_info(repo: str) -> Tuple[Optional[str], bool, str, int, str, str]:
|
|
1861
|
+
branch = current_branch(repo)
|
|
1862
|
+
if not branch:
|
|
1863
|
+
return None, False, "", 0, "", "detached head"
|
|
1864
|
+
|
|
1865
|
+
remote_ref = f"origin/{branch}"
|
|
1866
|
+
remote_exists = (
|
|
1867
|
+
subprocess.run(
|
|
1868
|
+
["git", "-C", repo, "rev-parse", "--verify", remote_ref],
|
|
1869
|
+
stdout=subprocess.DEVNULL,
|
|
1870
|
+
stderr=subprocess.DEVNULL,
|
|
1871
|
+
).returncode
|
|
1872
|
+
== 0
|
|
1873
|
+
)
|
|
1874
|
+
|
|
1875
|
+
if remote_exists:
|
|
1876
|
+
count = int(
|
|
1877
|
+
subprocess.check_output(
|
|
1878
|
+
["git", "-C", repo, "rev-list", "--count", f"{remote_ref}..HEAD"],
|
|
1879
|
+
text=True,
|
|
1880
|
+
).strip()
|
|
1881
|
+
)
|
|
1882
|
+
range_spec = f"{remote_ref}..HEAD"
|
|
1883
|
+
desc = f"local commits vs {remote_ref}"
|
|
1884
|
+
else:
|
|
1885
|
+
count = int(subprocess.check_output(["git", "-C", repo, "rev-list", "--count", "HEAD"], text=True).strip())
|
|
1886
|
+
range_spec = "--root"
|
|
1887
|
+
desc = "commits from root (no origin/<branch>)"
|
|
1888
|
+
|
|
1889
|
+
return branch, remote_exists, remote_ref, count, range_spec, desc
|
|
1890
|
+
|
|
1891
|
+
|
|
1892
|
+
def show_log_for_pick(repo: str, max_entries: int = 30) -> None:
|
|
1893
|
+
try:
|
|
1894
|
+
out = subprocess.check_output(
|
|
1895
|
+
[
|
|
1896
|
+
"git",
|
|
1897
|
+
"-C",
|
|
1898
|
+
repo,
|
|
1899
|
+
"log",
|
|
1900
|
+
"--oneline",
|
|
1901
|
+
"--decorate",
|
|
1902
|
+
f"-n{max_entries}",
|
|
1903
|
+
],
|
|
1904
|
+
text=True,
|
|
1905
|
+
)
|
|
1906
|
+
except subprocess.CalledProcessError:
|
|
1907
|
+
print(f"{repo}: failed to read log for pick mode.")
|
|
1908
|
+
return
|
|
1909
|
+
print(out.rstrip())
|
|
1910
|
+
|
|
1911
|
+
|
|
1912
|
+
# Shared command list for menus - add new commands here
|
|
1913
|
+
COMMANDS = [
|
|
1914
|
+
("explore", "Interactively explore commits in layer repos"),
|
|
1915
|
+
("update", "Update git repos referenced by layers in bblayers.conf"),
|
|
1916
|
+
("status", "Show local commit summary for layer repos"),
|
|
1917
|
+
("config", "View and configure repo/layer settings"),
|
|
1918
|
+
("branch", "View and switch branches across repos"),
|
|
1919
|
+
("export", "Export patches from layer repos"),
|
|
1920
|
+
("repos", "List layer repos"),
|
|
1921
|
+
("init", "Initialize/setup OE/Yocto build environment"),
|
|
1922
|
+
("init clone", "Show or clone core Yocto/OE repositories"),
|
|
1923
|
+
("search", "Search OpenEmbedded Layer Index for layers"),
|
|
1924
|
+
]
|
|
1925
|
+
|
|
1926
|
+
|
|
1927
|
+
def fzf_command_menu() -> Optional[str]:
|
|
1928
|
+
"""Show fzf menu to select a subcommand. Returns command name or None if cancelled."""
|
|
1929
|
+
# Commands with their subcommands (alphabetical order)
|
|
1930
|
+
commands = [
|
|
1931
|
+
("branch", "View and switch branches across repos", []),
|
|
1932
|
+
("config", "View and configure repo/layer settings", []),
|
|
1933
|
+
("export", "Export patches from layer repos", [
|
|
1934
|
+
("export prep", "Prepare commits for export (reorder/group)"),
|
|
1935
|
+
]),
|
|
1936
|
+
("explore", "Interactively explore commits in layer repos", []),
|
|
1937
|
+
("help", "Browse help for all commands", []),
|
|
1938
|
+
("init", "Initialize/setup OE/Yocto build environment", [
|
|
1939
|
+
("init clone", "Show or clone core Yocto/OE repositories"),
|
|
1940
|
+
]),
|
|
1941
|
+
("repos", "List layer repos", [
|
|
1942
|
+
("repos status", "Show one-liner status for each repo"),
|
|
1943
|
+
]),
|
|
1944
|
+
("search", "Search OpenEmbedded Layer Index for layers", []),
|
|
1945
|
+
("status", "Show local commit summary for layer repos", []),
|
|
1946
|
+
("update", "Update git repos referenced by layers in bblayers.conf", []),
|
|
1947
|
+
]
|
|
1948
|
+
|
|
1949
|
+
return fzf_expandable_menu(
|
|
1950
|
+
commands,
|
|
1951
|
+
header="Enter=run | \\=expand/collapse | q=quit",
|
|
1952
|
+
prompt="bit ",
|
|
1953
|
+
height="~80%",
|
|
1954
|
+
)
|
|
1955
|
+
|
|
1956
|
+
|
|
1957
|
+
def fzf_help_browser() -> Optional[str]:
|
|
1958
|
+
"""
|
|
1959
|
+
Interactive help browser with preview pane.
|
|
1960
|
+
Returns command name to run, or None to exit.
|
|
1961
|
+
"""
|
|
1962
|
+
# Get the script path for preview commands
|
|
1963
|
+
script_path = os.path.abspath(sys.argv[0])
|
|
1964
|
+
|
|
1965
|
+
# Commands with their subcommands (alphabetical order)
|
|
1966
|
+
commands = [
|
|
1967
|
+
("(general)", "Overview and global options", []),
|
|
1968
|
+
("branch", "View and switch branches across repos", []),
|
|
1969
|
+
("config", "View and configure repo/layer settings", []),
|
|
1970
|
+
("export", "Export patches from layer repos", [
|
|
1971
|
+
("export prep", "Prepare commits for export (reorder/group)"),
|
|
1972
|
+
]),
|
|
1973
|
+
("explore", "Interactively explore commits in layer repos", []),
|
|
1974
|
+
("help", "Browse help for all commands", []),
|
|
1975
|
+
("init", "Initialize/setup OE/Yocto build environment", [
|
|
1976
|
+
("init clone", "Show or clone core Yocto/OE repositories"),
|
|
1977
|
+
]),
|
|
1978
|
+
("repos", "List layer repos", [
|
|
1979
|
+
("repos status", "Show one-liner status for each repo"),
|
|
1980
|
+
]),
|
|
1981
|
+
("search", "Search OpenEmbedded Layer Index for layers", []),
|
|
1982
|
+
("status", "Show local commit summary for layer repos", []),
|
|
1983
|
+
("update", "Update git repos referenced by layers in bblayers.conf", []),
|
|
1984
|
+
]
|
|
1985
|
+
|
|
1986
|
+
# Preview command: show help for the selected command
|
|
1987
|
+
preview_cmd = (
|
|
1988
|
+
f'cmd={{1}}; '
|
|
1989
|
+
f'if [ "$cmd" = "(general)" ]; then '
|
|
1990
|
+
f' "{script_path}" --help 2>&1; '
|
|
1991
|
+
f'elif [ "$cmd" = "export prep" ]; then '
|
|
1992
|
+
f' "{script_path}" export prep --help 2>&1; '
|
|
1993
|
+
f'elif [ "$cmd" = "init clone" ]; then '
|
|
1994
|
+
f' "{script_path}" init clone --help 2>&1; '
|
|
1995
|
+
f'elif [ "$cmd" = "repos status" ]; then '
|
|
1996
|
+
f' "{script_path}" repos status --help 2>&1; '
|
|
1997
|
+
f'else '
|
|
1998
|
+
f' "{script_path}" "$cmd" --help 2>&1; '
|
|
1999
|
+
f'fi'
|
|
2000
|
+
)
|
|
2001
|
+
|
|
2002
|
+
# Options provider: parse --help output for command options
|
|
2003
|
+
def get_options(cmd: str) -> List[Tuple[str, str]]:
|
|
2004
|
+
if cmd == "(general)":
|
|
2005
|
+
return parse_help_options(script_path, "")
|
|
2006
|
+
return parse_help_options(script_path, cmd)
|
|
2007
|
+
|
|
2008
|
+
selected = fzf_expandable_menu(
|
|
2009
|
+
commands,
|
|
2010
|
+
header="Enter=run | \\=expand | v=options | ?=preview | q=quit",
|
|
2011
|
+
prompt="Select command: ",
|
|
2012
|
+
height="100%",
|
|
2013
|
+
preview_cmd=preview_cmd,
|
|
2014
|
+
preview_window="right,60%,wrap",
|
|
2015
|
+
options_provider=get_options,
|
|
2016
|
+
)
|
|
2017
|
+
|
|
2018
|
+
if not selected:
|
|
2019
|
+
return None
|
|
2020
|
+
|
|
2021
|
+
# Don't try to "run" general help
|
|
2022
|
+
if selected == "(general)":
|
|
2023
|
+
return None
|
|
2024
|
+
# For subcommands, return the parent command
|
|
2025
|
+
if selected == "export prep":
|
|
2026
|
+
return "export"
|
|
2027
|
+
if selected == "init clone":
|
|
2028
|
+
return "init"
|
|
2029
|
+
if selected == "repos status":
|
|
2030
|
+
return "repos"
|
|
2031
|
+
|
|
2032
|
+
return selected
|
|
2033
|
+
|
|
2034
|
+
|
|
2035
|
+
def fzf_pick_range(repo: str, branch: str, default_range: Optional[str] = None,
|
|
2036
|
+
prev_range: Optional[str] = None, max_entries: int = 100,
|
|
2037
|
+
prev_was_skip: bool = False) -> Optional[str]:
|
|
2038
|
+
"""
|
|
2039
|
+
Use fzf to interactively select a commit range.
|
|
2040
|
+
Returns a git range string like 'abc123^..def456', or:
|
|
2041
|
+
- None if cancelled (Escape)
|
|
2042
|
+
- "SKIP" if user wants to skip this repo
|
|
2043
|
+
- "SKIP_REST" if user wants to skip all remaining repos
|
|
2044
|
+
- "USE_DEFAULT" to use the default range (all local commits)
|
|
2045
|
+
- "USE_PREVIOUS" to use the previous range
|
|
2046
|
+
|
|
2047
|
+
If prev_was_skip is True, the skip option is shown first as the default.
|
|
2048
|
+
"""
|
|
2049
|
+
# Only show commits not in origin (local commits)
|
|
2050
|
+
remote_ref = f"origin/{branch}"
|
|
2051
|
+
remote_exists = subprocess.run(
|
|
2052
|
+
["git", "-C", repo, "rev-parse", "--verify", remote_ref],
|
|
2053
|
+
stdout=subprocess.DEVNULL,
|
|
2054
|
+
stderr=subprocess.DEVNULL,
|
|
2055
|
+
).returncode == 0
|
|
2056
|
+
|
|
2057
|
+
try:
|
|
2058
|
+
if remote_exists:
|
|
2059
|
+
# Show only commits not in origin
|
|
2060
|
+
log_output = subprocess.check_output(
|
|
2061
|
+
["git", "-C", repo, "log", "--oneline", "--decorate", f"{remote_ref}..HEAD"],
|
|
2062
|
+
text=True,
|
|
2063
|
+
)
|
|
2064
|
+
else:
|
|
2065
|
+
# No remote, show recent commits
|
|
2066
|
+
log_output = subprocess.check_output(
|
|
2067
|
+
["git", "-C", repo, "log", "--oneline", "--decorate", f"-n{max_entries}"],
|
|
2068
|
+
text=True,
|
|
2069
|
+
)
|
|
2070
|
+
except subprocess.CalledProcessError:
|
|
2071
|
+
return None
|
|
2072
|
+
|
|
2073
|
+
if not log_output.strip():
|
|
2074
|
+
print(f"{repo}: no local commits to export.")
|
|
2075
|
+
return "SKIP"
|
|
2076
|
+
|
|
2077
|
+
# Count local commits
|
|
2078
|
+
local_count = len(log_output.strip().splitlines())
|
|
2079
|
+
|
|
2080
|
+
# Build menu with options at top
|
|
2081
|
+
menu_lines = []
|
|
2082
|
+
|
|
2083
|
+
skip_line = "►► Skip this repo (s)"
|
|
2084
|
+
skip_all_line = "── [S] Skip all remaining repos ──"
|
|
2085
|
+
|
|
2086
|
+
# If previously skipped, show skip as first option (default)
|
|
2087
|
+
if prev_was_skip:
|
|
2088
|
+
menu_lines.append(skip_line)
|
|
2089
|
+
|
|
2090
|
+
# Add "use previous" option if we have a previous range from last export
|
|
2091
|
+
if prev_range and prev_range != default_range:
|
|
2092
|
+
menu_lines.append(f"►► Use previous: {prev_range}")
|
|
2093
|
+
|
|
2094
|
+
# Add "include all" option
|
|
2095
|
+
if remote_exists:
|
|
2096
|
+
menu_lines.append(f"►► Include all {local_count} local commits ({remote_ref}..HEAD)")
|
|
2097
|
+
else:
|
|
2098
|
+
menu_lines.append(f"►► Include all {local_count} commits")
|
|
2099
|
+
|
|
2100
|
+
# If not previously skipped, show skip after include options
|
|
2101
|
+
if not prev_was_skip:
|
|
2102
|
+
menu_lines.append(skip_line)
|
|
2103
|
+
menu_lines.append(skip_all_line)
|
|
2104
|
+
|
|
2105
|
+
menu_input = "\n".join(menu_lines) + "\n" + log_output
|
|
2106
|
+
|
|
2107
|
+
header = f"repo: {repo} branch: {branch}\n"
|
|
2108
|
+
header += "Space=range | Tab=single | s=skip | S=skip all | Enter=confirm"
|
|
2109
|
+
|
|
2110
|
+
# Temp file for tracking range markers (consistent with prep command)
|
|
2111
|
+
range_file = f"/tmp/fzf_range_{os.getpid()}"
|
|
2112
|
+
if os.path.exists(range_file):
|
|
2113
|
+
os.remove(range_file)
|
|
2114
|
+
|
|
2115
|
+
# Shell script to build prompt showing range marker count
|
|
2116
|
+
prompt_script = (
|
|
2117
|
+
f'rng=; '
|
|
2118
|
+
f'[ -f {range_file} ] && {{ n=$(wc -l < {range_file}); [ "$n" -gt 0 ] && rng="[range:$n]"; }}; '
|
|
2119
|
+
f'echo "Select$rng: "'
|
|
2120
|
+
)
|
|
2121
|
+
|
|
2122
|
+
try:
|
|
2123
|
+
result = subprocess.run(
|
|
2124
|
+
[
|
|
2125
|
+
"fzf",
|
|
2126
|
+
"--multi",
|
|
2127
|
+
"--no-sort",
|
|
2128
|
+
"--height", "~50%", # Inline, fit content up to 50% of terminal
|
|
2129
|
+
"--header", header,
|
|
2130
|
+
"--prompt", "Select: ",
|
|
2131
|
+
"--color", "header:italic",
|
|
2132
|
+
"--bind", "tab:toggle", # Tab for single commit toggle
|
|
2133
|
+
"--bind", f"space:toggle+execute-silent(echo {{}} >> {range_file})+transform-prompt({prompt_script})+down", # Space marks range
|
|
2134
|
+
"--bind", "s:become(echo SKIP_THIS)", # Shortcut for skip
|
|
2135
|
+
"--bind", "S:become(echo SKIP_REST)", # Shortcut for skip all
|
|
2136
|
+
] + get_fzf_color_args(),
|
|
2137
|
+
input=menu_input,
|
|
2138
|
+
stdout=subprocess.PIPE, # Capture selection output
|
|
2139
|
+
# Don't capture stderr - let fzf render to terminal
|
|
2140
|
+
text=True,
|
|
2141
|
+
)
|
|
2142
|
+
except FileNotFoundError:
|
|
2143
|
+
if os.path.exists(range_file):
|
|
2144
|
+
os.remove(range_file)
|
|
2145
|
+
return None
|
|
2146
|
+
|
|
2147
|
+
# Clean up range file
|
|
2148
|
+
if os.path.exists(range_file):
|
|
2149
|
+
os.remove(range_file)
|
|
2150
|
+
|
|
2151
|
+
if result.returncode != 0 or not result.stdout.strip():
|
|
2152
|
+
# Escape pressed or no selection
|
|
2153
|
+
return None
|
|
2154
|
+
|
|
2155
|
+
selected = result.stdout.strip().splitlines()
|
|
2156
|
+
if not selected:
|
|
2157
|
+
return None
|
|
2158
|
+
|
|
2159
|
+
# Check for shortcut keys and menu options
|
|
2160
|
+
for sel in selected:
|
|
2161
|
+
if sel.strip() == "SKIP_THIS":
|
|
2162
|
+
return "SKIP"
|
|
2163
|
+
if sel.strip() == "SKIP_REST":
|
|
2164
|
+
return "SKIP_REST"
|
|
2165
|
+
if "Use previous:" in sel:
|
|
2166
|
+
return "USE_PREVIOUS"
|
|
2167
|
+
if "Include all" in sel:
|
|
2168
|
+
return "USE_DEFAULT"
|
|
2169
|
+
if "Skip this repo" in sel:
|
|
2170
|
+
return "SKIP"
|
|
2171
|
+
if "Skip all remaining" in sel:
|
|
2172
|
+
return "SKIP_REST"
|
|
2173
|
+
|
|
2174
|
+
# Extract commit hashes from selected lines
|
|
2175
|
+
log_lines = log_output.strip().splitlines()
|
|
2176
|
+
hash_to_pos = {}
|
|
2177
|
+
for i, line in enumerate(log_lines):
|
|
2178
|
+
parts = line.split(maxsplit=1)
|
|
2179
|
+
if parts:
|
|
2180
|
+
hash_to_pos[parts[0]] = i
|
|
2181
|
+
|
|
2182
|
+
commits = []
|
|
2183
|
+
for line in selected:
|
|
2184
|
+
parts = line.split(maxsplit=1)
|
|
2185
|
+
if parts and parts[0] in hash_to_pos:
|
|
2186
|
+
commits.append(parts[0])
|
|
2187
|
+
|
|
2188
|
+
if not commits:
|
|
2189
|
+
return None
|
|
2190
|
+
|
|
2191
|
+
if len(commits) == 1:
|
|
2192
|
+
# Single commit selected
|
|
2193
|
+
return f"{commits[0]}^..{commits[0]}"
|
|
2194
|
+
|
|
2195
|
+
# Multiple commits - sort by position (lower = newer in git log)
|
|
2196
|
+
commits_sorted = sorted(commits, key=lambda c: hash_to_pos.get(c, 0))
|
|
2197
|
+
newest = commits_sorted[0]
|
|
2198
|
+
oldest = commits_sorted[-1]
|
|
2199
|
+
return f"{oldest}^..{newest}"
|
|
2200
|
+
|
|
2201
|
+
|
|
2202
|
+
def prepare_target_dir(target: str, force: bool) -> None:
|
|
2203
|
+
if not os.path.exists(target):
|
|
2204
|
+
os.makedirs(target, exist_ok=True)
|
|
2205
|
+
return
|
|
2206
|
+
|
|
2207
|
+
if not os.path.isdir(target):
|
|
2208
|
+
sys.exit(f"Target path '{target}' exists and is not a directory.")
|
|
2209
|
+
|
|
2210
|
+
existing = os.listdir(target)
|
|
2211
|
+
if existing and force:
|
|
2212
|
+
for entry in existing:
|
|
2213
|
+
entry_path = os.path.join(target, entry)
|
|
2214
|
+
if os.path.isdir(entry_path):
|
|
2215
|
+
shutil.rmtree(entry_path)
|
|
2216
|
+
else:
|
|
2217
|
+
os.remove(entry_path)
|
|
2218
|
+
elif existing and not force:
|
|
2219
|
+
sys.exit(f"Target directory '{target}' is not empty; use --force to proceed.")
|
|
2220
|
+
|
|
2221
|
+
|
|
2222
|
+
def prompt_export(repo: str, layer: str, info: Tuple[Optional[str], bool, str, int, str, str], default_include: bool, display_name: str = None) -> Tuple[bool, bool]:
|
|
2223
|
+
branch, remote_exists, remote_ref, count, _range, desc = info
|
|
2224
|
+
name = display_name or repo
|
|
2225
|
+
if count == 0:
|
|
2226
|
+
print(f"{name}: no local commits to export.")
|
|
2227
|
+
return False, False
|
|
2228
|
+
if not branch:
|
|
2229
|
+
print(f"{name}: detached HEAD; skipping.")
|
|
2230
|
+
return False, False
|
|
2231
|
+
|
|
2232
|
+
default_hint = "Y/n" if default_include else "y/N"
|
|
2233
|
+
prompt = f"Include {name} ({repo}) [{desc}, {count} commits]? ({default_hint}, S=skip rest) "
|
|
2234
|
+
resp = input(prompt).strip()
|
|
2235
|
+
if not resp:
|
|
2236
|
+
return default_include, False
|
|
2237
|
+
if resp == "S":
|
|
2238
|
+
return False, True
|
|
2239
|
+
if resp.lower().startswith("y"):
|
|
2240
|
+
return True, False
|
|
2241
|
+
if resp.lower() in {"n", "s"}:
|
|
2242
|
+
return False, False
|
|
2243
|
+
return default_include, False
|
|
2244
|
+
|
|
2245
|
+
|
|
2246
|
+
# ------------------------ Commands ------------------------
|
|
2247
|
+
|
|
2248
|
+
|
|
2249
|
+
def copy_to_clipboard(text: str) -> bool:
|
|
2250
|
+
"""Copy text to clipboard using available clipboard tool. Returns True on success."""
|
|
2251
|
+
import shutil
|
|
2252
|
+
|
|
2253
|
+
# Try different clipboard commands
|
|
2254
|
+
clipboard_cmds = [
|
|
2255
|
+
["xclip", "-selection", "clipboard"],
|
|
2256
|
+
["xsel", "--clipboard", "--input"],
|
|
2257
|
+
["pbcopy"], # macOS
|
|
2258
|
+
]
|
|
2259
|
+
|
|
2260
|
+
for cmd in clipboard_cmds:
|
|
2261
|
+
if shutil.which(cmd[0]):
|
|
2262
|
+
try:
|
|
2263
|
+
subprocess.run(cmd, input=text, text=True, check=True)
|
|
2264
|
+
return True
|
|
2265
|
+
except subprocess.CalledProcessError:
|
|
2266
|
+
continue
|
|
2267
|
+
return False
|
|
2268
|
+
|
|
2269
|
+
|
|
2270
|
+
|
|
2271
|
+
def find_repo_by_identifier(repos: List[str], identifier: str, defaults: Dict[str, str]) -> Optional[str]:
|
|
2272
|
+
"""
|
|
2273
|
+
Find a repo by index, display name, path, or partial match.
|
|
2274
|
+
Returns the repo path or None if not found.
|
|
2275
|
+
"""
|
|
2276
|
+
# Try as index
|
|
2277
|
+
try:
|
|
2278
|
+
idx = int(identifier)
|
|
2279
|
+
if 1 <= idx <= len(repos):
|
|
2280
|
+
return repos[idx - 1]
|
|
2281
|
+
return None
|
|
2282
|
+
except ValueError:
|
|
2283
|
+
pass
|
|
2284
|
+
|
|
2285
|
+
# Try matching by display name (case-insensitive)
|
|
2286
|
+
for repo in repos:
|
|
2287
|
+
if repo_display_name(repo).lower() == identifier.lower():
|
|
2288
|
+
return repo
|
|
2289
|
+
|
|
2290
|
+
# Try as absolute/relative path
|
|
2291
|
+
if os.path.isdir(identifier):
|
|
2292
|
+
abs_path = os.path.abspath(identifier)
|
|
2293
|
+
if abs_path in repos:
|
|
2294
|
+
return abs_path
|
|
2295
|
+
|
|
2296
|
+
# Try partial path match
|
|
2297
|
+
for repo in repos:
|
|
2298
|
+
if identifier in repo or repo.endswith(identifier):
|
|
2299
|
+
return repo
|
|
2300
|
+
|
|
2301
|
+
return None
|
|
2302
|
+
|
|
2303
|
+
|
|
2304
|
+
|
|
2305
|
+
|
|
2306
|
+
def export_single_patch(repo: str, commit: str, target_dir: str = ".") -> Optional[str]:
|
|
2307
|
+
"""Export a single commit as a .patch file using git format-patch. Returns full path or None on error."""
|
|
2308
|
+
try:
|
|
2309
|
+
# Use git format-patch to create file with standard naming (0001-subject.patch)
|
|
2310
|
+
output = subprocess.check_output(
|
|
2311
|
+
["git", "-C", repo, "format-patch", "-1", commit, "-o", target_dir],
|
|
2312
|
+
text=True,
|
|
2313
|
+
).strip()
|
|
2314
|
+
# git format-patch outputs the created filename
|
|
2315
|
+
return output
|
|
2316
|
+
except subprocess.CalledProcessError:
|
|
2317
|
+
return None
|
|
2318
|
+
|
|
2319
|
+
|
|
2320
|
+
def export_commits_from_explore(repo: str, commits: List[str]) -> None:
|
|
2321
|
+
"""Export one or more commits as patch files. Prompts for directory if multiple."""
|
|
2322
|
+
if not commits:
|
|
2323
|
+
return
|
|
2324
|
+
|
|
2325
|
+
# Get current directory for display
|
|
2326
|
+
cwd = os.getcwd()
|
|
2327
|
+
|
|
2328
|
+
if len(commits) == 1:
|
|
2329
|
+
# Single commit - export to current directory
|
|
2330
|
+
print(f"\nExporting to {cwd}...")
|
|
2331
|
+
filepath = export_single_patch(repo, commits[0], cwd)
|
|
2332
|
+
if filepath:
|
|
2333
|
+
print(f" {os.path.basename(filepath)}")
|
|
2334
|
+
else:
|
|
2335
|
+
print(f" Failed to export {commits[0][:8]}")
|
|
2336
|
+
input("Press Enter to continue...")
|
|
2337
|
+
return
|
|
2338
|
+
|
|
2339
|
+
# Multiple commits - prompt for target directory
|
|
2340
|
+
print(f"\nExporting {len(commits)} commits...")
|
|
2341
|
+
try:
|
|
2342
|
+
default_target = os.path.expanduser("~/patches")
|
|
2343
|
+
target_dir = input(f"Target directory [{default_target}]: ").strip()
|
|
2344
|
+
if not target_dir:
|
|
2345
|
+
target_dir = default_target
|
|
2346
|
+
target_dir = os.path.expanduser(target_dir)
|
|
2347
|
+
except (EOFError, KeyboardInterrupt):
|
|
2348
|
+
print("\nCancelled.")
|
|
2349
|
+
return
|
|
2350
|
+
|
|
2351
|
+
# Create directory if needed
|
|
2352
|
+
os.makedirs(target_dir, exist_ok=True)
|
|
2353
|
+
|
|
2354
|
+
print(f"Exporting to {target_dir}...")
|
|
2355
|
+
|
|
2356
|
+
# Export each commit using git format-patch (standard naming)
|
|
2357
|
+
exported = []
|
|
2358
|
+
for i, commit in enumerate(commits, 1):
|
|
2359
|
+
try:
|
|
2360
|
+
# Use git format-patch with start-number for proper sequencing
|
|
2361
|
+
output = subprocess.check_output(
|
|
2362
|
+
["git", "-C", repo, "format-patch", "-1", commit, "-o", target_dir,
|
|
2363
|
+
f"--start-number={i}"],
|
|
2364
|
+
text=True,
|
|
2365
|
+
).strip()
|
|
2366
|
+
exported.append(os.path.basename(output))
|
|
2367
|
+
print(f" {os.path.basename(output)}")
|
|
2368
|
+
except subprocess.CalledProcessError as e:
|
|
2369
|
+
print(f" Failed: {commit[:8]}")
|
|
2370
|
+
|
|
2371
|
+
print(f"Exported {len(exported)} patch(es)")
|
|
2372
|
+
input("Press Enter to continue...")
|