github2gerrit 0.1.5__py3-none-any.whl → 0.1.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- github2gerrit/cli.py +511 -271
- github2gerrit/commit_normalization.py +471 -0
- github2gerrit/config.py +32 -24
- github2gerrit/core.py +1092 -507
- github2gerrit/duplicate_detection.py +333 -217
- github2gerrit/external_api.py +518 -0
- github2gerrit/gerrit_rest.py +298 -0
- github2gerrit/gerrit_urls.py +353 -0
- github2gerrit/github_api.py +17 -95
- github2gerrit/gitutils.py +225 -41
- github2gerrit/models.py +3 -0
- github2gerrit/pr_content_filter.py +476 -0
- github2gerrit/similarity.py +458 -0
- github2gerrit/ssh_agent_setup.py +351 -0
- github2gerrit/ssh_common.py +244 -0
- github2gerrit/ssh_discovery.py +24 -67
- github2gerrit/utils.py +113 -0
- github2gerrit-0.1.7.dist-info/METADATA +798 -0
- github2gerrit-0.1.7.dist-info/RECORD +24 -0
- github2gerrit-0.1.5.dist-info/METADATA +0 -555
- github2gerrit-0.1.5.dist-info/RECORD +0 -15
- {github2gerrit-0.1.5.dist-info → github2gerrit-0.1.7.dist-info}/WHEEL +0 -0
- {github2gerrit-0.1.5.dist-info → github2gerrit-0.1.7.dist-info}/entry_points.txt +0 -0
- {github2gerrit-0.1.5.dist-info → github2gerrit-0.1.7.dist-info}/licenses/LICENSE +0 -0
- {github2gerrit-0.1.5.dist-info → github2gerrit-0.1.7.dist-info}/top_level.txt +0 -0
github2gerrit/cli.py
CHANGED
@@ -8,6 +8,8 @@ import logging
|
|
8
8
|
import os
|
9
9
|
import tempfile
|
10
10
|
from collections.abc import Callable
|
11
|
+
from concurrent.futures import ThreadPoolExecutor
|
12
|
+
from concurrent.futures import as_completed
|
11
13
|
from pathlib import Path
|
12
14
|
from typing import TYPE_CHECKING
|
13
15
|
from typing import Any
|
@@ -35,21 +37,13 @@ from .github_api import iter_open_pulls
|
|
35
37
|
from .gitutils import run_cmd
|
36
38
|
from .models import GitHubContext
|
37
39
|
from .models import Inputs
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
def _log_exception_conditionally(
|
46
|
-
logger: logging.Logger, message: str, *args: Any
|
47
|
-
) -> None:
|
48
|
-
"""Log exception with traceback only if verbose mode is enabled."""
|
49
|
-
if _is_verbose_mode():
|
50
|
-
logger.exception(message, *args)
|
51
|
-
else:
|
52
|
-
logger.error(message, *args)
|
40
|
+
from .ssh_common import build_git_ssh_command
|
41
|
+
from .ssh_common import build_non_interactive_ssh_env
|
42
|
+
from .utils import append_github_output
|
43
|
+
from .utils import env_bool
|
44
|
+
from .utils import env_str
|
45
|
+
from .utils import log_exception_conditionally
|
46
|
+
from .utils import parse_bool_env
|
53
47
|
|
54
48
|
|
55
49
|
class ConfigurationError(Exception):
|
@@ -74,7 +68,7 @@ def _parse_github_target(url: str) -> tuple[str | None, str | None, int | None]:
|
|
74
68
|
except Exception:
|
75
69
|
return None, None, None
|
76
70
|
|
77
|
-
allow_ghe =
|
71
|
+
allow_ghe = env_bool("ALLOW_GHE_URLS", False)
|
78
72
|
bad_hosts = {
|
79
73
|
"gitlab.com",
|
80
74
|
"www.gitlab.com",
|
@@ -120,13 +114,9 @@ class _ContextProto(Protocol):
|
|
120
114
|
|
121
115
|
|
122
116
|
class _SingleUsageGroup(BaseGroup):
|
123
|
-
def format_usage(
|
124
|
-
self, ctx: _ContextProto, formatter: _FormatterProto
|
125
|
-
) -> None:
|
117
|
+
def format_usage(self, ctx: _ContextProto, formatter: _FormatterProto) -> None:
|
126
118
|
# Force a simplified usage line without COMMAND [ARGS]...
|
127
|
-
formatter.write_usage(
|
128
|
-
ctx.command_path, "[OPTIONS] TARGET_URL", prefix="Usage: "
|
129
|
-
)
|
119
|
+
formatter.write_usage(ctx.command_path, "[OPTIONS] TARGET_URL", prefix="Usage: ")
|
130
120
|
|
131
121
|
|
132
122
|
# Error message constants to comply with TRY003
|
@@ -137,7 +127,7 @@ _MSG_ISSUE_ID_MULTILINE = "Issue ID must be single line"
|
|
137
127
|
app: typer.Typer = typer.Typer(
|
138
128
|
add_completion=False,
|
139
129
|
no_args_is_help=False,
|
140
|
-
cls=_SingleUsageGroup,
|
130
|
+
cls=cast(Any, _SingleUsageGroup),
|
141
131
|
)
|
142
132
|
|
143
133
|
|
@@ -154,9 +144,7 @@ def _resolve_org(default_org: str | None) -> str:
|
|
154
144
|
if TYPE_CHECKING:
|
155
145
|
F = TypeVar("F", bound=Callable[..., object])
|
156
146
|
|
157
|
-
def typed_app_command(
|
158
|
-
*args: object, **kwargs: object
|
159
|
-
) -> Callable[[F], F]: ...
|
147
|
+
def typed_app_command(*args: object, **kwargs: object) -> Callable[[F], F]: ...
|
160
148
|
else:
|
161
149
|
typed_app_command = app.command
|
162
150
|
|
@@ -172,36 +160,38 @@ def main(
|
|
172
160
|
submit_single_commits: bool = typer.Option(
|
173
161
|
False,
|
174
162
|
"--submit-single-commits",
|
163
|
+
envvar="SUBMIT_SINGLE_COMMITS",
|
175
164
|
help="Submit one commit at a time to the Gerrit repository.",
|
176
165
|
),
|
177
166
|
use_pr_as_commit: bool = typer.Option(
|
178
167
|
False,
|
179
168
|
"--use-pr-as-commit",
|
169
|
+
envvar="USE_PR_AS_COMMIT",
|
180
170
|
help="Use PR title and body as the commit message.",
|
181
171
|
),
|
182
172
|
fetch_depth: int = typer.Option(
|
183
173
|
10,
|
184
174
|
"--fetch-depth",
|
185
175
|
envvar="FETCH_DEPTH",
|
186
|
-
help="Fetch
|
176
|
+
help="Fetch depth for checkout.",
|
187
177
|
),
|
188
178
|
gerrit_known_hosts: str = typer.Option(
|
189
179
|
"",
|
190
180
|
"--gerrit-known-hosts",
|
191
181
|
envvar="GERRIT_KNOWN_HOSTS",
|
192
|
-
help="Known hosts entries for Gerrit SSH.",
|
182
|
+
help="Known hosts entries for Gerrit SSH (single or multi-line).",
|
193
183
|
),
|
194
184
|
gerrit_ssh_privkey_g2g: str = typer.Option(
|
195
185
|
"",
|
196
186
|
"--gerrit-ssh-privkey-g2g",
|
197
187
|
envvar="GERRIT_SSH_PRIVKEY_G2G",
|
198
|
-
help="SSH private key
|
188
|
+
help="SSH private key content used to authenticate to Gerrit.",
|
199
189
|
),
|
200
190
|
gerrit_ssh_user_g2g: str = typer.Option(
|
201
191
|
"",
|
202
192
|
"--gerrit-ssh-user-g2g",
|
203
193
|
envvar="GERRIT_SSH_USER_G2G",
|
204
|
-
help="Gerrit SSH
|
194
|
+
help="Gerrit SSH username (e.g. automation bot account).",
|
205
195
|
),
|
206
196
|
gerrit_ssh_user_g2g_email: str = typer.Option(
|
207
197
|
"",
|
@@ -269,12 +259,32 @@ def main(
|
|
269
259
|
envvar="ALLOW_DUPLICATES",
|
270
260
|
help="Allow submitting duplicate changes without error.",
|
271
261
|
),
|
262
|
+
ci_testing: bool = typer.Option(
|
263
|
+
False,
|
264
|
+
"--ci-testing/--no-ci-testing",
|
265
|
+
envvar="CI_TESTING",
|
266
|
+
help="Enable CI testing mode (overrides .gitreview, handles unrelated repos).",
|
267
|
+
),
|
268
|
+
duplicates: str = typer.Option(
|
269
|
+
"open",
|
270
|
+
"--duplicates",
|
271
|
+
envvar="DUPLICATES",
|
272
|
+
help=(
|
273
|
+
'Gerrit statuses for duplicate detection (comma-separated). E.g. "open,merged,abandoned". Default: "open".'
|
274
|
+
),
|
275
|
+
),
|
276
|
+
normalise_commit: bool = typer.Option(
|
277
|
+
True,
|
278
|
+
"--normalise-commit/--no-normalise-commit",
|
279
|
+
envvar="NORMALISE_COMMIT",
|
280
|
+
help="Normalize commit messages to conventional commit format.",
|
281
|
+
),
|
272
282
|
verbose: bool = typer.Option(
|
273
283
|
False,
|
274
284
|
"--verbose",
|
275
285
|
"-v",
|
276
286
|
envvar="G2G_VERBOSE",
|
277
|
-
help="
|
287
|
+
help="Verbose output (sets loglevel to DEBUG).",
|
278
288
|
),
|
279
289
|
) -> None:
|
280
290
|
"""
|
@@ -289,16 +299,33 @@ def main(
|
|
289
299
|
- No arguments for CI/CD environment; reads parameters from
|
290
300
|
environment variables
|
291
301
|
"""
|
302
|
+
# Override boolean parameters with properly parsed environment variables
|
303
|
+
# This ensures that string "false" from GitHub Actions is handled correctly
|
304
|
+
if os.getenv("SUBMIT_SINGLE_COMMITS"):
|
305
|
+
submit_single_commits = parse_bool_env(os.getenv("SUBMIT_SINGLE_COMMITS"))
|
306
|
+
|
307
|
+
if os.getenv("USE_PR_AS_COMMIT"):
|
308
|
+
use_pr_as_commit = parse_bool_env(os.getenv("USE_PR_AS_COMMIT"))
|
309
|
+
|
310
|
+
if os.getenv("PRESERVE_GITHUB_PRS"):
|
311
|
+
preserve_github_prs = parse_bool_env(os.getenv("PRESERVE_GITHUB_PRS"))
|
312
|
+
|
313
|
+
if os.getenv("DRY_RUN"):
|
314
|
+
dry_run = parse_bool_env(os.getenv("DRY_RUN"))
|
315
|
+
|
316
|
+
if os.getenv("ALLOW_DUPLICATES"):
|
317
|
+
allow_duplicates = parse_bool_env(os.getenv("ALLOW_DUPLICATES"))
|
318
|
+
|
319
|
+
if os.getenv("CI_TESTING"):
|
320
|
+
ci_testing = parse_bool_env(os.getenv("CI_TESTING"))
|
292
321
|
# Set up logging level based on verbose flag
|
293
322
|
if verbose:
|
294
323
|
os.environ["G2G_LOG_LEVEL"] = "DEBUG"
|
295
324
|
_reconfigure_logging()
|
296
325
|
# Normalize CLI options into environment for unified processing.
|
297
|
-
#
|
298
|
-
if submit_single_commits
|
299
|
-
|
300
|
-
if use_pr_as_commit:
|
301
|
-
os.environ["USE_PR_AS_COMMIT"] = "true"
|
326
|
+
# Explicitly set all boolean flags to ensure consistent behavior
|
327
|
+
os.environ["SUBMIT_SINGLE_COMMITS"] = "true" if submit_single_commits else "false"
|
328
|
+
os.environ["USE_PR_AS_COMMIT"] = "true" if use_pr_as_commit else "false"
|
302
329
|
os.environ["FETCH_DEPTH"] = str(fetch_depth)
|
303
330
|
if gerrit_known_hosts:
|
304
331
|
os.environ["GERRIT_KNOWN_HOSTS"] = gerrit_known_hosts
|
@@ -313,10 +340,9 @@ def main(
|
|
313
340
|
os.environ["ORGANIZATION"] = resolved_org
|
314
341
|
if reviewers_email:
|
315
342
|
os.environ["REVIEWERS_EMAIL"] = reviewers_email
|
316
|
-
if preserve_github_prs
|
317
|
-
|
318
|
-
if
|
319
|
-
os.environ["DRY_RUN"] = "true"
|
343
|
+
os.environ["PRESERVE_GITHUB_PRS"] = "true" if preserve_github_prs else "false"
|
344
|
+
os.environ["DRY_RUN"] = "true" if dry_run else "false"
|
345
|
+
os.environ["NORMALISE_COMMIT"] = "true" if normalise_commit else "false"
|
320
346
|
os.environ["ALLOW_GHE_URLS"] = "true" if allow_ghe_urls else "false"
|
321
347
|
if gerrit_server:
|
322
348
|
os.environ["GERRIT_SERVER"] = gerrit_server
|
@@ -326,8 +352,10 @@ def main(
|
|
326
352
|
os.environ["GERRIT_PROJECT"] = gerrit_project
|
327
353
|
if issue_id:
|
328
354
|
os.environ["ISSUE_ID"] = issue_id
|
329
|
-
if allow_duplicates
|
330
|
-
|
355
|
+
os.environ["ALLOW_DUPLICATES"] = "true" if allow_duplicates else "false"
|
356
|
+
os.environ["CI_TESTING"] = "true" if ci_testing else "false"
|
357
|
+
if duplicates:
|
358
|
+
os.environ["DUPLICATES"] = duplicates
|
331
359
|
# URL mode handling
|
332
360
|
if target_url:
|
333
361
|
org, repo, pr = _parse_github_target(target_url)
|
@@ -341,6 +369,12 @@ def main(
|
|
341
369
|
else:
|
342
370
|
os.environ["SYNC_ALL_OPEN_PRS"] = "true"
|
343
371
|
os.environ["G2G_TARGET_URL"] = "1"
|
372
|
+
# Debug: Show environment at CLI startup
|
373
|
+
log.debug("CLI startup environment check:")
|
374
|
+
for key in ["DRY_RUN", "CI_TESTING", "GERRIT_SERVER", "GERRIT_PROJECT"]:
|
375
|
+
value = os.environ.get(key, "NOT_SET")
|
376
|
+
log.debug(" %s = %s", key, value)
|
377
|
+
|
344
378
|
# Delegate to common processing path
|
345
379
|
try:
|
346
380
|
_process()
|
@@ -349,16 +383,13 @@ def main(
|
|
349
383
|
raise
|
350
384
|
except Exception as exc:
|
351
385
|
log.debug("main(): _process failed: %s", exc)
|
352
|
-
|
386
|
+
raise typer.Exit(code=1) from exc
|
353
387
|
|
354
388
|
|
355
389
|
def _setup_logging() -> logging.Logger:
|
356
390
|
level_name = os.getenv("G2G_LOG_LEVEL", "INFO").upper()
|
357
391
|
level = getattr(logging, level_name, logging.INFO)
|
358
|
-
fmt = (
|
359
|
-
"%(asctime)s %(levelname)-8s %(name)s "
|
360
|
-
"%(filename)s:%(lineno)d | %(message)s"
|
361
|
-
)
|
392
|
+
fmt = "%(asctime)s %(levelname)-8s %(name)s %(filename)s:%(lineno)d | %(message)s"
|
362
393
|
logging.basicConfig(level=level, format=fmt)
|
363
394
|
return logging.getLogger(APP_NAME)
|
364
395
|
|
@@ -375,51 +406,93 @@ def _reconfigure_logging() -> None:
|
|
375
406
|
log = _setup_logging()
|
376
407
|
|
377
408
|
|
378
|
-
def _env_str(name: str, default: str = "") -> str:
|
379
|
-
val = os.getenv(name)
|
380
|
-
return val if val is not None else default
|
381
|
-
|
382
|
-
|
383
|
-
def _env_bool(name: str, default: bool = False) -> bool:
|
384
|
-
val = os.getenv(name)
|
385
|
-
if val is None:
|
386
|
-
return default
|
387
|
-
s = val.strip().lower()
|
388
|
-
return s in ("1", "true", "yes", "on")
|
389
|
-
|
390
|
-
|
391
409
|
def _build_inputs_from_env() -> Inputs:
|
392
410
|
return Inputs(
|
393
|
-
submit_single_commits=
|
394
|
-
use_pr_as_commit=
|
395
|
-
fetch_depth=int(
|
396
|
-
gerrit_known_hosts=
|
397
|
-
gerrit_ssh_privkey_g2g=
|
398
|
-
gerrit_ssh_user_g2g=
|
399
|
-
gerrit_ssh_user_g2g_email=
|
400
|
-
organization=
|
401
|
-
|
402
|
-
),
|
403
|
-
|
404
|
-
|
405
|
-
|
406
|
-
|
407
|
-
|
408
|
-
|
409
|
-
|
410
|
-
|
411
|
+
submit_single_commits=env_bool("SUBMIT_SINGLE_COMMITS", False),
|
412
|
+
use_pr_as_commit=env_bool("USE_PR_AS_COMMIT", False),
|
413
|
+
fetch_depth=int(env_str("FETCH_DEPTH", "10") or "10"),
|
414
|
+
gerrit_known_hosts=env_str("GERRIT_KNOWN_HOSTS"),
|
415
|
+
gerrit_ssh_privkey_g2g=env_str("GERRIT_SSH_PRIVKEY_G2G"),
|
416
|
+
gerrit_ssh_user_g2g=env_str("GERRIT_SSH_USER_G2G"),
|
417
|
+
gerrit_ssh_user_g2g_email=env_str("GERRIT_SSH_USER_G2G_EMAIL"),
|
418
|
+
organization=env_str("ORGANIZATION", env_str("GITHUB_REPOSITORY_OWNER")),
|
419
|
+
reviewers_email=env_str("REVIEWERS_EMAIL", ""),
|
420
|
+
preserve_github_prs=env_bool("PRESERVE_GITHUB_PRS", False),
|
421
|
+
dry_run=env_bool("DRY_RUN", False),
|
422
|
+
normalise_commit=env_bool("NORMALISE_COMMIT", True),
|
423
|
+
gerrit_server=env_str("GERRIT_SERVER", ""),
|
424
|
+
gerrit_server_port=env_str("GERRIT_SERVER_PORT", "29418"),
|
425
|
+
gerrit_project=env_str("GERRIT_PROJECT"),
|
426
|
+
issue_id=env_str("ISSUE_ID", ""),
|
427
|
+
allow_duplicates=env_bool("ALLOW_DUPLICATES", False),
|
428
|
+
ci_testing=env_bool("CI_TESTING", False),
|
429
|
+
duplicates_filter=env_str("DUPLICATES", "open"),
|
411
430
|
)
|
412
431
|
|
413
432
|
|
414
|
-
def _process_bulk(data: Inputs, gh: GitHubContext) ->
|
433
|
+
def _process_bulk(data: Inputs, gh: GitHubContext) -> bool:
|
415
434
|
client = build_client()
|
416
435
|
repo = get_repo_from_env(client)
|
417
436
|
|
418
437
|
all_urls: list[str] = []
|
419
438
|
all_nums: list[str] = []
|
439
|
+
all_shas: list[str] = []
|
420
440
|
|
421
441
|
prs_list = list(iter_open_pulls(repo))
|
422
442
|
log.info("Found %d open PRs to process", len(prs_list))
|
443
|
+
|
444
|
+
# Result tracking for summary
|
445
|
+
processed_count = 0
|
446
|
+
succeeded_count = 0
|
447
|
+
skipped_count = 0
|
448
|
+
failed_count = 0
|
449
|
+
|
450
|
+
# Use bounded parallel processing with shared clients
|
451
|
+
max_workers = min(4, max(1, len(prs_list))) # Cap at 4 workers
|
452
|
+
|
453
|
+
def process_single_pr(
|
454
|
+
pr_data: tuple[Any, models.GitHubContext],
|
455
|
+
) -> tuple[str, SubmissionResult | None, Exception | None]:
|
456
|
+
"""Process a single PR and return (status, result, exception)."""
|
457
|
+
pr, per_ctx = pr_data
|
458
|
+
pr_number = int(getattr(pr, "number", 0) or 0)
|
459
|
+
|
460
|
+
if pr_number <= 0:
|
461
|
+
return "invalid", None, None
|
462
|
+
|
463
|
+
log.info("Starting processing of PR #%d", pr_number)
|
464
|
+
log.debug(
|
465
|
+
"Processing PR #%d in multi-PR mode with event_name=%s, event_action=%s",
|
466
|
+
pr_number,
|
467
|
+
gh.event_name,
|
468
|
+
gh.event_action,
|
469
|
+
)
|
470
|
+
|
471
|
+
try:
|
472
|
+
if data.duplicates_filter:
|
473
|
+
os.environ["DUPLICATES"] = data.duplicates_filter
|
474
|
+
check_for_duplicates(per_ctx, allow_duplicates=data.allow_duplicates)
|
475
|
+
except DuplicateChangeError as exc:
|
476
|
+
log_exception_conditionally(log, "Skipping PR #%d", pr_number)
|
477
|
+
log.warning(
|
478
|
+
"Skipping PR #%d due to duplicate detection: %s. Use --allow-duplicates to override this check.",
|
479
|
+
pr_number,
|
480
|
+
exc,
|
481
|
+
)
|
482
|
+
return "skipped", None, exc
|
483
|
+
|
484
|
+
try:
|
485
|
+
with tempfile.TemporaryDirectory() as temp_dir:
|
486
|
+
workspace = Path(temp_dir)
|
487
|
+
orch = Orchestrator(workspace=workspace)
|
488
|
+
result_multi = orch.execute(inputs=data, gh=per_ctx)
|
489
|
+
return "success", result_multi, None
|
490
|
+
except Exception as exc:
|
491
|
+
log_exception_conditionally(log, "Failed to process PR #%d", pr_number)
|
492
|
+
return "failed", None, exc
|
493
|
+
|
494
|
+
# Prepare PR processing tasks
|
495
|
+
pr_tasks = []
|
423
496
|
for pr in prs_list:
|
424
497
|
pr_number = int(getattr(pr, "number", 0) or 0)
|
425
498
|
if pr_number <= 0:
|
@@ -438,75 +511,85 @@ def _process_bulk(data: Inputs, gh: GitHubContext) -> None:
|
|
438
511
|
head_ref=gh.head_ref,
|
439
512
|
pr_number=pr_number,
|
440
513
|
)
|
514
|
+
pr_tasks.append((pr, per_ctx))
|
441
515
|
|
442
|
-
|
443
|
-
|
444
|
-
|
445
|
-
"event_action=%s",
|
446
|
-
pr_number,
|
447
|
-
gh.event_name,
|
448
|
-
gh.event_action,
|
449
|
-
)
|
516
|
+
# Process PRs in parallel
|
517
|
+
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
518
|
+
log.info("Processing %d PRs with %d parallel workers", len(pr_tasks), max_workers)
|
450
519
|
|
451
|
-
|
452
|
-
|
453
|
-
|
454
|
-
|
455
|
-
|
456
|
-
|
457
|
-
typer.echo(f"Skipping PR #{pr_number}: {exc}")
|
458
|
-
continue
|
520
|
+
# Submit all tasks
|
521
|
+
future_to_pr = {
|
522
|
+
executor.submit(process_single_pr, pr_task): pr_task[1].pr_number
|
523
|
+
for pr_task in pr_tasks
|
524
|
+
if pr_task[1].pr_number is not None
|
525
|
+
}
|
459
526
|
|
460
|
-
|
461
|
-
|
462
|
-
|
463
|
-
|
464
|
-
|
465
|
-
|
466
|
-
|
467
|
-
|
468
|
-
|
469
|
-
|
470
|
-
|
471
|
-
|
472
|
-
|
473
|
-
|
474
|
-
|
475
|
-
|
476
|
-
|
477
|
-
"PR #%d change numbers: %s",
|
478
|
-
|
479
|
-
result_multi.
|
480
|
-
|
481
|
-
|
482
|
-
|
483
|
-
|
484
|
-
|
485
|
-
|
486
|
-
|
487
|
-
|
527
|
+
# Collect results as they complete
|
528
|
+
for future in as_completed(future_to_pr):
|
529
|
+
pr_number = future_to_pr[future]
|
530
|
+
processed_count += 1
|
531
|
+
|
532
|
+
try:
|
533
|
+
status, result_multi, exc = future.result()
|
534
|
+
|
535
|
+
if status == "success" and result_multi:
|
536
|
+
succeeded_count += 1
|
537
|
+
if result_multi.change_urls:
|
538
|
+
all_urls.extend(result_multi.change_urls)
|
539
|
+
for url in result_multi.change_urls:
|
540
|
+
log.info("Gerrit change URL: %s", url)
|
541
|
+
log.info("PR #%d created Gerrit change: %s", pr_number, url)
|
542
|
+
if result_multi.change_numbers:
|
543
|
+
all_nums.extend(result_multi.change_numbers)
|
544
|
+
log.info("PR #%d change numbers: %s", pr_number, result_multi.change_numbers)
|
545
|
+
if result_multi.commit_shas:
|
546
|
+
all_shas.extend(result_multi.commit_shas)
|
547
|
+
elif status == "skipped":
|
548
|
+
skipped_count += 1
|
549
|
+
elif status == "failed":
|
550
|
+
failed_count += 1
|
551
|
+
typer.echo(f"Failed to process PR #{pr_number}: {exc}")
|
552
|
+
log.info("Continuing to next PR despite failure")
|
553
|
+
else:
|
554
|
+
failed_count += 1
|
488
555
|
|
556
|
+
except Exception as exc:
|
557
|
+
failed_count += 1
|
558
|
+
log_exception_conditionally(log, "Failed to process PR #%d", pr_number)
|
559
|
+
typer.echo(f"Failed to process PR #{pr_number}: {exc}")
|
560
|
+
log.info("Continuing to next PR despite failure")
|
561
|
+
|
562
|
+
# Aggregate results and provide summary
|
489
563
|
if all_urls:
|
490
564
|
os.environ["GERRIT_CHANGE_REQUEST_URL"] = "\n".join(all_urls)
|
491
565
|
if all_nums:
|
492
566
|
os.environ["GERRIT_CHANGE_REQUEST_NUM"] = "\n".join(all_nums)
|
567
|
+
if all_shas:
|
568
|
+
os.environ["GERRIT_COMMIT_SHA"] = "\n".join(all_shas)
|
493
569
|
|
494
|
-
|
570
|
+
append_github_output(
|
495
571
|
{
|
496
|
-
"gerrit_change_request_url":
|
497
|
-
|
498
|
-
),
|
499
|
-
"gerrit_change_request_num": os.getenv(
|
500
|
-
"GERRIT_CHANGE_REQUEST_NUM", ""
|
501
|
-
),
|
572
|
+
"gerrit_change_request_url": "\n".join(all_urls) if all_urls else "",
|
573
|
+
"gerrit_change_request_num": "\n".join(all_nums) if all_nums else "",
|
574
|
+
"gerrit_commit_sha": "\n".join(all_shas) if all_shas else "",
|
502
575
|
}
|
503
576
|
)
|
504
577
|
|
505
|
-
|
506
|
-
|
578
|
+
# Summary block
|
579
|
+
log.info("=" * 60)
|
580
|
+
log.info("BULK PROCESSING SUMMARY:")
|
581
|
+
log.info(" Total PRs processed: %d", processed_count)
|
582
|
+
log.info(" Succeeded: %d", succeeded_count)
|
583
|
+
log.info(" Skipped (duplicates): %d", skipped_count)
|
584
|
+
log.info(" Failed: %d", failed_count)
|
585
|
+
log.info(" Gerrit changes created: %d", len(all_urls))
|
586
|
+
log.info("=" * 60)
|
587
|
+
|
588
|
+
# Return True if no failures occurred
|
589
|
+
return failed_count == 0
|
507
590
|
|
508
591
|
|
509
|
-
def _process_single(data: Inputs, gh: GitHubContext) ->
|
592
|
+
def _process_single(data: Inputs, gh: GitHubContext) -> bool:
|
510
593
|
# Create temporary directory for all git operations
|
511
594
|
with tempfile.TemporaryDirectory() as temp_dir:
|
512
595
|
workspace = Path(temp_dir)
|
@@ -524,48 +607,32 @@ def _process_single(data: Inputs, gh: GitHubContext) -> None:
|
|
524
607
|
except Exception as exc:
|
525
608
|
log.debug("Execution failed; continuing to write outputs: %s", exc)
|
526
609
|
|
527
|
-
result = SubmissionResult(
|
528
|
-
change_urls=[], change_numbers=[], commit_shas=[]
|
529
|
-
)
|
610
|
+
result = SubmissionResult(change_urls=[], change_numbers=[], commit_shas=[])
|
530
611
|
if result.change_urls:
|
531
|
-
os.environ["GERRIT_CHANGE_REQUEST_URL"] = "\n".join(
|
532
|
-
result.change_urls
|
533
|
-
)
|
612
|
+
os.environ["GERRIT_CHANGE_REQUEST_URL"] = "\n".join(result.change_urls)
|
534
613
|
# Output Gerrit change URL(s) to console
|
535
614
|
for url in result.change_urls:
|
536
615
|
log.info("Gerrit change URL: %s", url)
|
537
616
|
if result.change_numbers:
|
538
|
-
os.environ["GERRIT_CHANGE_REQUEST_NUM"] = "\n".join(
|
539
|
-
|
540
|
-
)
|
617
|
+
os.environ["GERRIT_CHANGE_REQUEST_NUM"] = "\n".join(result.change_numbers)
|
618
|
+
if result.commit_shas:
|
619
|
+
os.environ["GERRIT_COMMIT_SHA"] = "\n".join(result.commit_shas)
|
541
620
|
|
542
621
|
# Also write outputs to GITHUB_OUTPUT if available
|
543
|
-
|
622
|
+
append_github_output(
|
544
623
|
{
|
545
|
-
"gerrit_change_request_url":
|
546
|
-
|
547
|
-
),
|
548
|
-
"gerrit_change_request_num": os.getenv(
|
549
|
-
"GERRIT_CHANGE_REQUEST_NUM", ""
|
550
|
-
),
|
551
|
-
"gerrit_commit_sha": os.getenv("GERRIT_COMMIT_SHA", ""),
|
624
|
+
"gerrit_change_request_url": "\n".join(result.change_urls) if result.change_urls else "",
|
625
|
+
"gerrit_change_request_num": "\n".join(result.change_numbers) if result.change_numbers else "",
|
626
|
+
"gerrit_commit_sha": "\n".join(result.commit_shas) if result.commit_shas else "",
|
552
627
|
}
|
553
628
|
)
|
554
629
|
|
555
|
-
|
556
|
-
log.info("Submission pipeline completed SUCCESSFULLY ✅")
|
557
|
-
else:
|
558
|
-
log.error("Submission pipeline FAILED ❌")
|
559
|
-
return
|
630
|
+
return pipeline_success
|
560
631
|
|
561
632
|
|
562
|
-
def _prepare_local_checkout(
|
563
|
-
workspace: Path, gh: GitHubContext, data: Inputs
|
564
|
-
) -> None:
|
633
|
+
def _prepare_local_checkout(workspace: Path, gh: GitHubContext, data: Inputs) -> None:
|
565
634
|
repo_full = gh.repository.strip() if gh.repository else ""
|
566
|
-
server_url = gh.server_url or os.getenv(
|
567
|
-
"GITHUB_SERVER_URL", "https://github.com"
|
568
|
-
)
|
635
|
+
server_url = gh.server_url or os.getenv("GITHUB_SERVER_URL", "https://github.com")
|
569
636
|
server_url = (server_url or "https://github.com").rstrip("/")
|
570
637
|
base_ref = gh.base_ref or ""
|
571
638
|
pr_num_str: str = str(gh.pr_number) if gh.pr_number else "0"
|
@@ -573,11 +640,52 @@ def _prepare_local_checkout(
|
|
573
640
|
if not repo_full:
|
574
641
|
return
|
575
642
|
|
576
|
-
|
643
|
+
# Try SSH first for private repos if available, then fall back to HTTPS/API
|
644
|
+
repo_ssh_url = f"git@{server_url.replace('https://', '').replace('http://', '')}:{repo_full}.git"
|
645
|
+
repo_https_url = f"{server_url}/{repo_full}.git"
|
646
|
+
|
577
647
|
run_cmd(["git", "init"], cwd=workspace)
|
648
|
+
|
649
|
+
# Determine which URL to use and set up authentication
|
650
|
+
env: dict[str, str] = {}
|
651
|
+
repo_url = repo_https_url # Default to HTTPS
|
652
|
+
|
653
|
+
# Check if we should try SSH for private repos
|
654
|
+
use_ssh = False
|
655
|
+
respect_user_ssh = os.getenv("G2G_RESPECT_USER_SSH", "false").lower() in ("true", "1", "yes")
|
656
|
+
gerrit_ssh_privkey = os.getenv("GERRIT_SSH_PRIVKEY_G2G")
|
657
|
+
|
658
|
+
log.debug(
|
659
|
+
"GitHub repo access decision: SSH URL available=%s, G2G_RESPECT_USER_SSH=%s, GERRIT_SSH_PRIVKEY_G2G=%s",
|
660
|
+
repo_ssh_url.startswith("git@"),
|
661
|
+
respect_user_ssh,
|
662
|
+
bool(gerrit_ssh_privkey),
|
663
|
+
)
|
664
|
+
|
665
|
+
if repo_ssh_url.startswith("git@"):
|
666
|
+
# For private repos, only try SSH if G2G_RESPECT_USER_SSH is explicitly enabled
|
667
|
+
# Don't use SSH just because GERRIT_SSH_PRIVKEY_G2G is set (that's for Gerrit, not GitHub)
|
668
|
+
if respect_user_ssh:
|
669
|
+
use_ssh = True
|
670
|
+
repo_url = repo_ssh_url
|
671
|
+
log.debug("Using SSH for GitHub repo access due to G2G_RESPECT_USER_SSH=true")
|
672
|
+
else:
|
673
|
+
log.debug("Not using SSH for GitHub repo access - G2G_RESPECT_USER_SSH not enabled")
|
674
|
+
|
675
|
+
if use_ssh:
|
676
|
+
env = {
|
677
|
+
"GIT_SSH_COMMAND": build_git_ssh_command(),
|
678
|
+
**build_non_interactive_ssh_env(),
|
679
|
+
}
|
680
|
+
log.debug("Using SSH URL for private repo: %s", repo_url)
|
681
|
+
else:
|
682
|
+
log.debug("Using HTTPS URL: %s", repo_url)
|
683
|
+
|
578
684
|
run_cmd(["git", "remote", "add", "origin", repo_url], cwd=workspace)
|
579
685
|
|
580
|
-
# Fetch base branch and PR head
|
686
|
+
# Fetch base branch and PR head with fallback to API archive
|
687
|
+
fetch_success = False
|
688
|
+
|
581
689
|
if base_ref:
|
582
690
|
try:
|
583
691
|
branch_ref = f"refs/heads/{base_ref}:refs/remotes/origin/{base_ref}"
|
@@ -590,35 +698,174 @@ def _prepare_local_checkout(
|
|
590
698
|
branch_ref,
|
591
699
|
],
|
592
700
|
cwd=workspace,
|
701
|
+
env=env,
|
593
702
|
)
|
594
703
|
except Exception as exc:
|
595
704
|
log.debug("Base branch fetch failed for %s: %s", base_ref, exc)
|
596
705
|
|
597
706
|
if pr_num_str:
|
598
|
-
|
599
|
-
f"refs/pull/{pr_num_str}/head:"
|
600
|
-
|
601
|
-
|
602
|
-
|
603
|
-
|
604
|
-
|
605
|
-
|
606
|
-
|
607
|
-
|
608
|
-
|
609
|
-
|
610
|
-
|
611
|
-
|
612
|
-
|
613
|
-
|
614
|
-
|
615
|
-
|
616
|
-
|
617
|
-
|
618
|
-
|
619
|
-
|
620
|
-
|
621
|
-
|
707
|
+
try:
|
708
|
+
pr_ref = f"refs/pull/{pr_num_str}/head:refs/remotes/origin/pr/{pr_num_str}/head"
|
709
|
+
run_cmd(
|
710
|
+
[
|
711
|
+
"git",
|
712
|
+
"fetch",
|
713
|
+
f"--depth={data.fetch_depth}",
|
714
|
+
"origin",
|
715
|
+
pr_ref,
|
716
|
+
],
|
717
|
+
cwd=workspace,
|
718
|
+
env=env,
|
719
|
+
)
|
720
|
+
run_cmd(
|
721
|
+
[
|
722
|
+
"git",
|
723
|
+
"checkout",
|
724
|
+
"-B",
|
725
|
+
"g2g_pr_head",
|
726
|
+
f"refs/remotes/origin/pr/{pr_num_str}/head",
|
727
|
+
],
|
728
|
+
cwd=workspace,
|
729
|
+
env=env,
|
730
|
+
)
|
731
|
+
fetch_success = True
|
732
|
+
except Exception as exc:
|
733
|
+
log.warning("Git fetch failed, attempting API archive fallback: %s", exc)
|
734
|
+
# Try API archive fallback for private repos
|
735
|
+
try:
|
736
|
+
_fallback_to_api_archive(workspace, gh, data, pr_num_str)
|
737
|
+
fetch_success = True
|
738
|
+
except Exception as api_exc:
|
739
|
+
log.exception("API archive fallback also failed")
|
740
|
+
raise exc from api_exc
|
741
|
+
|
742
|
+
if not fetch_success and pr_num_str:
|
743
|
+
msg = f"Failed to prepare checkout for PR #{pr_num_str}"
|
744
|
+
raise RuntimeError(msg)
|
745
|
+
|
746
|
+
|
747
|
+
def _fallback_to_api_archive(workspace: Path, gh: GitHubContext, data: Inputs, pr_num_str: str) -> None:
|
748
|
+
"""Fallback to GitHub API archive download for private repos."""
|
749
|
+
import io
|
750
|
+
import json
|
751
|
+
import shutil
|
752
|
+
import zipfile
|
753
|
+
from urllib.request import Request
|
754
|
+
from urllib.request import urlopen
|
755
|
+
|
756
|
+
log.info("Attempting API archive fallback for PR #%s", pr_num_str)
|
757
|
+
|
758
|
+
# Get GitHub token for authenticated requests
|
759
|
+
token = os.getenv("GITHUB_TOKEN")
|
760
|
+
if not token:
|
761
|
+
msg = "GITHUB_TOKEN required for API archive fallback"
|
762
|
+
raise RuntimeError(msg)
|
763
|
+
|
764
|
+
# Build API URLs
|
765
|
+
repo_full = gh.repository
|
766
|
+
server_url = gh.server_url or "https://github.com"
|
767
|
+
|
768
|
+
# Construct GitHub API base URL properly
|
769
|
+
if "github.com" in server_url:
|
770
|
+
# For github.com, use api.github.com
|
771
|
+
api_base = "https://api.github.com"
|
772
|
+
elif server_url.startswith("https://"):
|
773
|
+
# For GitHub Enterprise, append /api/v3
|
774
|
+
api_base = server_url.rstrip("/") + "/api/v3"
|
775
|
+
else:
|
776
|
+
# Fallback for unexpected formats
|
777
|
+
api_base = "https://api.github.com"
|
778
|
+
|
779
|
+
# Get PR details to find head SHA
|
780
|
+
pr_api_url = f"{api_base}/repos/{repo_full}/pulls/{pr_num_str}"
|
781
|
+
log.debug("GitHub API PR URL: %s", pr_api_url)
|
782
|
+
|
783
|
+
headers = {
|
784
|
+
"Authorization": f"token {token}",
|
785
|
+
"Accept": "application/vnd.github.v3+json",
|
786
|
+
"User-Agent": "github2gerrit",
|
787
|
+
}
|
788
|
+
|
789
|
+
try:
|
790
|
+
req = Request(pr_api_url, headers=headers) # noqa: S310
|
791
|
+
with urlopen(req, timeout=30) as response: # noqa: S310
|
792
|
+
pr_data = json.loads(response.read().decode())
|
793
|
+
except Exception:
|
794
|
+
log.exception("Failed to fetch PR data from GitHub API")
|
795
|
+
log.debug("PR API URL was: %s", pr_api_url)
|
796
|
+
raise
|
797
|
+
|
798
|
+
head_sha = pr_data["head"]["sha"]
|
799
|
+
|
800
|
+
# Download archive
|
801
|
+
archive_url = f"{api_base}/repos/{repo_full}/zipball/{head_sha}"
|
802
|
+
log.debug("GitHub API archive URL: %s", archive_url)
|
803
|
+
|
804
|
+
try:
|
805
|
+
req = Request(archive_url, headers=headers) # noqa: S310
|
806
|
+
with urlopen(req, timeout=120) as response: # noqa: S310
|
807
|
+
archive_data = response.read()
|
808
|
+
except Exception:
|
809
|
+
log.exception("Failed to download archive from GitHub API")
|
810
|
+
log.debug("Archive URL was: %s", archive_url)
|
811
|
+
raise
|
812
|
+
|
813
|
+
# Extract archive
|
814
|
+
with zipfile.ZipFile(io.BytesIO(archive_data)) as zf:
|
815
|
+
# Find the root directory in the archive (usually repo-sha format)
|
816
|
+
members = zf.namelist()
|
817
|
+
root_dir = None
|
818
|
+
for member in members:
|
819
|
+
if "/" in member:
|
820
|
+
root_dir = member.split("/")[0]
|
821
|
+
break
|
822
|
+
|
823
|
+
if not root_dir:
|
824
|
+
msg = "Could not find root directory in archive"
|
825
|
+
raise RuntimeError(msg)
|
826
|
+
|
827
|
+
# Extract to temporary location then move contents
|
828
|
+
extract_path = workspace / "archive_temp"
|
829
|
+
zf.extractall(extract_path)
|
830
|
+
|
831
|
+
# Move contents from extracted root to workspace
|
832
|
+
extracted_root = extract_path / root_dir
|
833
|
+
for item in extracted_root.iterdir():
|
834
|
+
if item.name == ".git":
|
835
|
+
continue # Skip .git if present
|
836
|
+
dest = workspace / item.name
|
837
|
+
if dest.exists():
|
838
|
+
if dest.is_dir():
|
839
|
+
shutil.rmtree(dest)
|
840
|
+
else:
|
841
|
+
dest.unlink()
|
842
|
+
item.rename(dest)
|
843
|
+
|
844
|
+
# Clean up
|
845
|
+
shutil.rmtree(extract_path)
|
846
|
+
|
847
|
+
# Set up git for the extracted content
|
848
|
+
if not (workspace / ".git").exists():
|
849
|
+
run_cmd(["git", "init"], cwd=workspace)
|
850
|
+
|
851
|
+
# Create a commit for the PR content
|
852
|
+
run_cmd(["git", "add", "."], cwd=workspace)
|
853
|
+
run_cmd(
|
854
|
+
[
|
855
|
+
"git",
|
856
|
+
"commit",
|
857
|
+
"-m",
|
858
|
+
f"PR #{pr_num_str} content from API archive",
|
859
|
+
"--author",
|
860
|
+
"GitHub API <noreply@github.com>",
|
861
|
+
],
|
862
|
+
cwd=workspace,
|
863
|
+
)
|
864
|
+
|
865
|
+
# Create the expected branch
|
866
|
+
run_cmd(["git", "checkout", "-B", "g2g_pr_head"], cwd=workspace)
|
867
|
+
|
868
|
+
log.info("Successfully extracted PR #%s content via API archive", pr_num_str)
|
622
869
|
|
623
870
|
|
624
871
|
def _load_effective_inputs() -> Inputs:
|
@@ -626,25 +873,28 @@ def _load_effective_inputs() -> Inputs:
|
|
626
873
|
data = _build_inputs_from_env()
|
627
874
|
|
628
875
|
# Load per-org configuration and apply to environment before validation
|
629
|
-
org_for_cfg = (
|
630
|
-
data.organization
|
631
|
-
or os.getenv("ORGANIZATION")
|
632
|
-
or os.getenv("GITHUB_REPOSITORY_OWNER")
|
633
|
-
)
|
876
|
+
org_for_cfg = data.organization or os.getenv("ORGANIZATION") or os.getenv("GITHUB_REPOSITORY_OWNER")
|
634
877
|
cfg = load_org_config(org_for_cfg)
|
635
878
|
|
636
879
|
# Apply dynamic parameter derivation for missing Gerrit parameters
|
637
880
|
cfg = apply_parameter_derivation(cfg, org_for_cfg, save_to_config=True)
|
638
881
|
|
882
|
+
# Debug: Show what configuration would be applied
|
883
|
+
log.debug("Configuration to apply: %s", cfg)
|
884
|
+
if "DRY_RUN" in cfg:
|
885
|
+
log.warning(
|
886
|
+
"Configuration contains DRY_RUN=%s, this may override environment DRY_RUN=%s",
|
887
|
+
cfg["DRY_RUN"],
|
888
|
+
os.getenv("DRY_RUN"),
|
889
|
+
)
|
890
|
+
|
639
891
|
apply_config_to_env(cfg)
|
640
892
|
|
641
893
|
# Refresh inputs after applying configuration to environment
|
642
894
|
data = _build_inputs_from_env()
|
643
895
|
|
644
896
|
# Derive reviewers from local git config if running locally and unset
|
645
|
-
if not os.getenv("REVIEWERS_EMAIL") and (
|
646
|
-
os.getenv("G2G_TARGET_URL") or not os.getenv("GITHUB_EVENT_NAME")
|
647
|
-
):
|
897
|
+
if not os.getenv("REVIEWERS_EMAIL") and (os.getenv("G2G_TARGET_URL") or not os.getenv("GITHUB_EVENT_NAME")):
|
648
898
|
try:
|
649
899
|
from .gitutils import enumerate_reviewer_emails
|
650
900
|
|
@@ -663,11 +913,14 @@ def _load_effective_inputs() -> Inputs:
|
|
663
913
|
reviewers_email=os.environ["REVIEWERS_EMAIL"],
|
664
914
|
preserve_github_prs=data.preserve_github_prs,
|
665
915
|
dry_run=data.dry_run,
|
916
|
+
normalise_commit=data.normalise_commit,
|
666
917
|
gerrit_server=data.gerrit_server,
|
667
918
|
gerrit_server_port=data.gerrit_server_port,
|
668
919
|
gerrit_project=data.gerrit_project,
|
669
920
|
issue_id=data.issue_id,
|
670
921
|
allow_duplicates=data.allow_duplicates,
|
922
|
+
ci_testing=data.ci_testing,
|
923
|
+
duplicates_filter=data.duplicates_filter,
|
671
924
|
)
|
672
925
|
log.info("Derived reviewers: %s", data.reviewers_email)
|
673
926
|
except Exception as exc:
|
@@ -676,44 +929,15 @@ def _load_effective_inputs() -> Inputs:
|
|
676
929
|
return data
|
677
930
|
|
678
931
|
|
679
|
-
def _append_github_output(outputs: dict[str, str]) -> None:
|
680
|
-
gh_out = os.getenv("GITHUB_OUTPUT")
|
681
|
-
if not gh_out:
|
682
|
-
return
|
683
|
-
try:
|
684
|
-
with open(gh_out, "a", encoding="utf-8") as fh:
|
685
|
-
for key, val in outputs.items():
|
686
|
-
if not val:
|
687
|
-
continue
|
688
|
-
if "\n" in val:
|
689
|
-
fh.write(f"{key}<<G2G\n")
|
690
|
-
fh.write(f"{val}\n")
|
691
|
-
fh.write("G2G\n")
|
692
|
-
else:
|
693
|
-
fh.write(f"{key}={val}\n")
|
694
|
-
except Exception as exc:
|
695
|
-
log.debug("Failed to write GITHUB_OUTPUT: %s", exc)
|
696
|
-
|
697
|
-
|
698
932
|
def _augment_pr_refs_if_needed(gh: GitHubContext) -> GitHubContext:
|
699
|
-
if (
|
700
|
-
os.getenv("G2G_TARGET_URL")
|
701
|
-
and gh.pr_number
|
702
|
-
and (not gh.head_ref or not gh.base_ref)
|
703
|
-
):
|
933
|
+
if os.getenv("G2G_TARGET_URL") and gh.pr_number and (not gh.head_ref or not gh.base_ref):
|
704
934
|
try:
|
705
935
|
client = build_client()
|
706
936
|
repo = get_repo_from_env(client)
|
707
937
|
pr_obj = get_pull(repo, int(gh.pr_number))
|
708
|
-
base_ref = str(
|
709
|
-
|
710
|
-
)
|
711
|
-
head_ref = str(
|
712
|
-
getattr(getattr(pr_obj, "head", object()), "ref", "") or ""
|
713
|
-
)
|
714
|
-
head_sha = str(
|
715
|
-
getattr(getattr(pr_obj, "head", object()), "sha", "") or ""
|
716
|
-
)
|
938
|
+
base_ref = str(getattr(getattr(pr_obj, "base", object()), "ref", "") or "")
|
939
|
+
head_ref = str(getattr(getattr(pr_obj, "head", object()), "ref", "") or "")
|
940
|
+
head_sha = str(getattr(getattr(pr_obj, "head", object()), "sha", "") or "")
|
717
941
|
if base_ref:
|
718
942
|
os.environ["GITHUB_BASE_REF"] = base_ref
|
719
943
|
log.info("Resolved base_ref via GitHub API: %s", base_ref)
|
@@ -736,7 +960,7 @@ def _process() -> None:
|
|
736
960
|
try:
|
737
961
|
_validate_inputs(data)
|
738
962
|
except ConfigurationError as exc:
|
739
|
-
|
963
|
+
log_exception_conditionally(log, "Configuration validation failed")
|
740
964
|
typer.echo(f"Configuration validation failed: {exc}", err=True)
|
741
965
|
raise typer.Exit(code=2) from exc
|
742
966
|
|
@@ -744,28 +968,40 @@ def _process() -> None:
|
|
744
968
|
_log_effective_config(data, gh)
|
745
969
|
|
746
970
|
# Test mode: short-circuit after validation
|
747
|
-
if
|
971
|
+
if env_bool("G2G_TEST_MODE", False):
|
748
972
|
log.info("Validation complete. Ready to execute submission pipeline.")
|
749
973
|
typer.echo("Validation complete. Ready to execute submission pipeline.")
|
750
974
|
return
|
751
975
|
|
752
976
|
# Bulk mode for URL/workflow_dispatch
|
753
|
-
sync_all =
|
754
|
-
if sync_all and (
|
755
|
-
|
756
|
-
|
757
|
-
|
977
|
+
sync_all = env_bool("SYNC_ALL_OPEN_PRS", False)
|
978
|
+
if sync_all and (gh.event_name == "workflow_dispatch" or os.getenv("G2G_TARGET_URL")):
|
979
|
+
bulk_success = _process_bulk(data, gh)
|
980
|
+
|
981
|
+
# Log external API metrics summary
|
982
|
+
try:
|
983
|
+
from .external_api import log_api_metrics_summary
|
984
|
+
|
985
|
+
log_api_metrics_summary()
|
986
|
+
except Exception as exc:
|
987
|
+
log.debug("Failed to log API metrics summary: %s", exc)
|
988
|
+
|
989
|
+
# Final success/failure message for bulk processing
|
990
|
+
if bulk_success:
|
991
|
+
log.info("Bulk processing completed SUCCESSFULLY ✅")
|
992
|
+
else:
|
993
|
+
log.error("Bulk processing FAILED ❌")
|
994
|
+
raise typer.Exit(code=1)
|
995
|
+
|
758
996
|
return
|
759
997
|
|
760
998
|
if not gh.pr_number:
|
761
999
|
log.error(
|
762
|
-
"PR_NUMBER is empty. This tool requires a valid pull request "
|
763
|
-
"context. Current event: %s",
|
1000
|
+
"PR_NUMBER is empty. This tool requires a valid pull request context. Current event: %s",
|
764
1001
|
gh.event_name,
|
765
1002
|
)
|
766
1003
|
typer.echo(
|
767
|
-
"PR_NUMBER is empty. This tool requires a valid pull request "
|
768
|
-
f"context. Current event: {gh.event_name}",
|
1004
|
+
f"PR_NUMBER is empty. This tool requires a valid pull request context. Current event: {gh.event_name}",
|
769
1005
|
err=True,
|
770
1006
|
)
|
771
1007
|
raise typer.Exit(code=2)
|
@@ -777,18 +1013,37 @@ def _process() -> None:
|
|
777
1013
|
gh = _augment_pr_refs_if_needed(gh)
|
778
1014
|
|
779
1015
|
# Check for duplicates in single-PR mode (before workspace setup)
|
780
|
-
if gh.pr_number and not
|
1016
|
+
if gh.pr_number and not env_bool("SYNC_ALL_OPEN_PRS", False):
|
781
1017
|
try:
|
1018
|
+
if data.duplicates_filter:
|
1019
|
+
os.environ["DUPLICATES"] = data.duplicates_filter
|
782
1020
|
check_for_duplicates(gh, allow_duplicates=data.allow_duplicates)
|
783
1021
|
except DuplicateChangeError as exc:
|
784
|
-
|
785
|
-
|
786
|
-
|
787
|
-
|
1022
|
+
log_exception_conditionally(
|
1023
|
+
log,
|
1024
|
+
"Duplicate detection blocked submission for PR #%d",
|
1025
|
+
gh.pr_number,
|
788
1026
|
)
|
1027
|
+
log.info("Use --allow-duplicates to override this check.")
|
789
1028
|
raise typer.Exit(code=3) from exc
|
790
1029
|
|
791
|
-
_process_single(data, gh)
|
1030
|
+
pipeline_success = _process_single(data, gh)
|
1031
|
+
|
1032
|
+
# Log external API metrics summary
|
1033
|
+
try:
|
1034
|
+
from .external_api import log_api_metrics_summary
|
1035
|
+
|
1036
|
+
log_api_metrics_summary()
|
1037
|
+
except Exception as exc:
|
1038
|
+
log.debug("Failed to log API metrics summary: %s", exc)
|
1039
|
+
|
1040
|
+
# Final success/failure message after all cleanup
|
1041
|
+
if pipeline_success:
|
1042
|
+
log.info("Submission pipeline completed SUCCESSFULLY ✅")
|
1043
|
+
else:
|
1044
|
+
log.error("Submission pipeline FAILED ❌")
|
1045
|
+
raise typer.Exit(code=1)
|
1046
|
+
|
792
1047
|
return
|
793
1048
|
|
794
1049
|
|
@@ -804,9 +1059,7 @@ def _load_event(path: Path | None) -> dict[str, Any]:
|
|
804
1059
|
if not path or not path.exists():
|
805
1060
|
return {}
|
806
1061
|
try:
|
807
|
-
return cast(
|
808
|
-
dict[str, Any], json.loads(path.read_text(encoding="utf-8"))
|
809
|
-
)
|
1062
|
+
return cast(dict[str, Any], json.loads(path.read_text(encoding="utf-8")))
|
810
1063
|
except Exception as exc:
|
811
1064
|
log.warning("Failed to parse GITHUB_EVENT_PATH: %s", exc)
|
812
1065
|
return {}
|
@@ -873,10 +1126,7 @@ def _read_github_context() -> GitHubContext:
|
|
873
1126
|
|
874
1127
|
def _validate_inputs(data: Inputs) -> None:
|
875
1128
|
if data.use_pr_as_commit and data.submit_single_commits:
|
876
|
-
msg =
|
877
|
-
"USE_PR_AS_COMMIT and SUBMIT_SINGLE_COMMITS cannot be enabled at "
|
878
|
-
"the same time"
|
879
|
-
)
|
1129
|
+
msg = "USE_PR_AS_COMMIT and SUBMIT_SINGLE_COMMITS cannot be enabled at the same time"
|
880
1130
|
raise ConfigurationError(msg)
|
881
1131
|
|
882
1132
|
# Context-aware validation: different requirements for GH Actions vs CLI
|
@@ -890,7 +1140,6 @@ def _validate_inputs(data: Inputs) -> None:
|
|
890
1140
|
if is_github_actions:
|
891
1141
|
# In GitHub Actions: allow derivation if organization is available
|
892
1142
|
if not data.organization:
|
893
|
-
# No organization means no derivation possible
|
894
1143
|
required_fields.extend(
|
895
1144
|
[
|
896
1145
|
"gerrit_ssh_user_g2g",
|
@@ -901,9 +1150,7 @@ def _validate_inputs(data: Inputs) -> None:
|
|
901
1150
|
# In local CLI: require explicit values or organization + derivation
|
902
1151
|
# This prevents unexpected behavior when running locally
|
903
1152
|
missing_gerrit_params = [
|
904
|
-
field
|
905
|
-
for field in ["gerrit_ssh_user_g2g", "gerrit_ssh_user_g2g_email"]
|
906
|
-
if not getattr(data, field)
|
1153
|
+
field for field in ["gerrit_ssh_user_g2g", "gerrit_ssh_user_g2g_email"] if not getattr(data, field)
|
907
1154
|
]
|
908
1155
|
if missing_gerrit_params:
|
909
1156
|
if data.organization:
|
@@ -915,7 +1162,7 @@ def _validate_inputs(data: Inputs) -> None:
|
|
915
1162
|
", ".join(missing_gerrit_params),
|
916
1163
|
)
|
917
1164
|
# Allow derivation in local mode only if explicitly enabled
|
918
|
-
if not
|
1165
|
+
if not env_bool("G2G_ENABLE_DERIVATION", False):
|
919
1166
|
required_fields.extend(missing_gerrit_params)
|
920
1167
|
else:
|
921
1168
|
required_fields.extend(missing_gerrit_params)
|
@@ -931,24 +1178,18 @@ def _validate_inputs(data: Inputs) -> None:
|
|
931
1178
|
if is_github_actions:
|
932
1179
|
log.error(
|
933
1180
|
"These fields can be derived automatically from "
|
934
|
-
"organization '%s'",
|
1181
|
+
"organization '%s' if G2G_ENABLE_DERIVATION=true",
|
935
1182
|
data.organization,
|
936
1183
|
)
|
937
1184
|
else:
|
938
1185
|
log.error(
|
939
|
-
"These fields can be derived from organization "
|
940
|
-
"'%s'",
|
1186
|
+
"These fields can be derived from organization '%s'",
|
941
1187
|
data.organization,
|
942
1188
|
)
|
943
1189
|
log.error("Set G2G_ENABLE_DERIVATION=true to enable")
|
944
1190
|
else:
|
945
|
-
log.error(
|
946
|
-
|
947
|
-
"ORGANIZATION for derivation"
|
948
|
-
)
|
949
|
-
raise ConfigurationError(
|
950
|
-
_MSG_MISSING_REQUIRED_INPUT.format(field_name=field_name)
|
951
|
-
)
|
1191
|
+
log.error("These fields require either explicit values or an ORGANIZATION for derivation")
|
1192
|
+
raise ConfigurationError(_MSG_MISSING_REQUIRED_INPUT.format(field_name=field_name))
|
952
1193
|
|
953
1194
|
# Validate fetch depth is a positive integer
|
954
1195
|
if data.fetch_depth <= 0:
|
@@ -967,9 +1208,7 @@ def _log_effective_config(data: Inputs, gh: GitHubContext) -> None:
|
|
967
1208
|
log.info(" SUBMIT_SINGLE_COMMITS: %s", data.submit_single_commits)
|
968
1209
|
log.info(" USE_PR_AS_COMMIT: %s", data.use_pr_as_commit)
|
969
1210
|
log.info(" FETCH_DEPTH: %s", data.fetch_depth)
|
970
|
-
known_hosts_status =
|
971
|
-
"<provided>" if data.gerrit_known_hosts else "<will auto-discover>"
|
972
|
-
)
|
1211
|
+
known_hosts_status = "<provided>" if data.gerrit_known_hosts else "<will auto-discover>"
|
973
1212
|
log.info(" GERRIT_KNOWN_HOSTS: %s", known_hosts_status)
|
974
1213
|
log.info(" GERRIT_SSH_PRIVKEY_G2G: %s", safe_privkey)
|
975
1214
|
log.info(" GERRIT_SSH_USER_G2G: %s", data.gerrit_ssh_user_g2g)
|
@@ -978,7 +1217,8 @@ def _log_effective_config(data: Inputs, gh: GitHubContext) -> None:
|
|
978
1217
|
log.info(" REVIEWERS_EMAIL: %s", data.reviewers_email or "")
|
979
1218
|
log.info(" PRESERVE_GITHUB_PRS: %s", data.preserve_github_prs)
|
980
1219
|
log.info(" DRY_RUN: %s", data.dry_run)
|
981
|
-
log.info("
|
1220
|
+
log.info(" CI_TESTING: %s", data.ci_testing)
|
1221
|
+
log.info(" GERRIT_SERVER: %s", data.gerrit_server)
|
982
1222
|
log.info(" GERRIT_SERVER_PORT: %s", data.gerrit_server_port or "")
|
983
1223
|
log.info(" GERRIT_PROJECT: %s", data.gerrit_project or "")
|
984
1224
|
log.info("GitHub context:")
|