github2gerrit 0.1.4__py3-none-any.whl → 0.1.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- github2gerrit/cli.py +196 -108
- github2gerrit/config.py +207 -5
- github2gerrit/core.py +542 -398
- github2gerrit/duplicate_detection.py +375 -193
- github2gerrit/gerrit_urls.py +256 -0
- github2gerrit/github_api.py +15 -20
- github2gerrit/gitutils.py +49 -13
- github2gerrit/models.py +1 -0
- github2gerrit/similarity.py +458 -0
- github2gerrit/ssh_discovery.py +365 -0
- {github2gerrit-0.1.4.dist-info → github2gerrit-0.1.6.dist-info}/METADATA +24 -25
- github2gerrit-0.1.6.dist-info/RECORD +17 -0
- github2gerrit-0.1.4.dist-info/RECORD +0 -14
- {github2gerrit-0.1.4.dist-info → github2gerrit-0.1.6.dist-info}/WHEEL +0 -0
- {github2gerrit-0.1.4.dist-info → github2gerrit-0.1.6.dist-info}/entry_points.txt +0 -0
- {github2gerrit-0.1.4.dist-info → github2gerrit-0.1.6.dist-info}/licenses/LICENSE +0 -0
- {github2gerrit-0.1.4.dist-info → github2gerrit-0.1.6.dist-info}/top_level.txt +0 -0
github2gerrit/cli.py
CHANGED
@@ -7,8 +7,12 @@ import json
|
|
7
7
|
import logging
|
8
8
|
import os
|
9
9
|
import tempfile
|
10
|
+
from collections.abc import Callable
|
10
11
|
from pathlib import Path
|
12
|
+
from typing import TYPE_CHECKING
|
11
13
|
from typing import Any
|
14
|
+
from typing import Protocol
|
15
|
+
from typing import TypeVar
|
12
16
|
from typing import cast
|
13
17
|
from urllib.parse import urlparse
|
14
18
|
|
@@ -16,7 +20,9 @@ import click
|
|
16
20
|
import typer
|
17
21
|
|
18
22
|
from . import models
|
23
|
+
from .config import _is_github_actions_context
|
19
24
|
from .config import apply_config_to_env
|
25
|
+
from .config import apply_parameter_derivation
|
20
26
|
from .config import load_org_config
|
21
27
|
from .core import Orchestrator
|
22
28
|
from .core import SubmissionResult
|
@@ -31,6 +37,19 @@ from .models import GitHubContext
|
|
31
37
|
from .models import Inputs
|
32
38
|
|
33
39
|
|
40
|
+
def _is_verbose_mode() -> bool:
|
41
|
+
"""Check if verbose mode is enabled via environment variable."""
|
42
|
+
return os.getenv("G2G_VERBOSE", "").lower() in ("true", "1", "yes")
|
43
|
+
|
44
|
+
|
45
|
+
def _log_exception_conditionally(logger: logging.Logger, message: str, *args: Any) -> None:
|
46
|
+
"""Log exception with traceback only if verbose mode is enabled."""
|
47
|
+
if _is_verbose_mode():
|
48
|
+
logger.exception(message, *args)
|
49
|
+
else:
|
50
|
+
logger.error(message, *args)
|
51
|
+
|
52
|
+
|
34
53
|
class ConfigurationError(Exception):
|
35
54
|
"""Raised when configuration validation fails.
|
36
55
|
|
@@ -83,18 +102,36 @@ def _parse_github_target(url: str) -> tuple[str | None, str | None, int | None]:
|
|
83
102
|
APP_NAME = "github2gerrit"
|
84
103
|
|
85
104
|
|
86
|
-
|
87
|
-
|
105
|
+
if TYPE_CHECKING:
|
106
|
+
BaseGroup = object
|
107
|
+
else:
|
108
|
+
BaseGroup = click.Group
|
109
|
+
|
110
|
+
|
111
|
+
class _FormatterProto(Protocol):
|
112
|
+
def write_usage(self, prog: str, args: str, prefix: str = ...) -> None: ...
|
113
|
+
|
114
|
+
|
115
|
+
class _ContextProto(Protocol):
|
116
|
+
@property
|
117
|
+
def command_path(self) -> str: ...
|
118
|
+
|
119
|
+
|
120
|
+
class _SingleUsageGroup(BaseGroup):
|
121
|
+
def format_usage(self, ctx: _ContextProto, formatter: _FormatterProto) -> None:
|
88
122
|
# Force a simplified usage line without COMMAND [ARGS]...
|
89
|
-
formatter.write_usage(
|
90
|
-
|
91
|
-
)
|
123
|
+
formatter.write_usage(ctx.command_path, "[OPTIONS] TARGET_URL", prefix="Usage: ")
|
124
|
+
|
92
125
|
|
126
|
+
# Error message constants to comply with TRY003
|
127
|
+
_MSG_MISSING_REQUIRED_INPUT = "Missing required input: {field_name}"
|
128
|
+
_MSG_INVALID_FETCH_DEPTH = "FETCH_DEPTH must be a positive integer"
|
129
|
+
_MSG_ISSUE_ID_MULTILINE = "Issue ID must be single line"
|
93
130
|
|
94
131
|
app: typer.Typer = typer.Typer(
|
95
132
|
add_completion=False,
|
96
133
|
no_args_is_help=False,
|
97
|
-
cls=_SingleUsageGroup,
|
134
|
+
cls=cast(Any, _SingleUsageGroup),
|
98
135
|
)
|
99
136
|
|
100
137
|
|
@@ -108,7 +145,15 @@ def _resolve_org(default_org: str | None) -> str:
|
|
108
145
|
return ""
|
109
146
|
|
110
147
|
|
111
|
-
|
148
|
+
if TYPE_CHECKING:
|
149
|
+
F = TypeVar("F", bound=Callable[..., object])
|
150
|
+
|
151
|
+
def typed_app_command(*args: object, **kwargs: object) -> Callable[[F], F]: ...
|
152
|
+
else:
|
153
|
+
typed_app_command = app.command
|
154
|
+
|
155
|
+
|
156
|
+
@typed_app_command()
|
112
157
|
def main(
|
113
158
|
ctx: typer.Context,
|
114
159
|
target_url: str | None = typer.Argument(
|
@@ -216,6 +261,14 @@ def main(
|
|
216
261
|
envvar="ALLOW_DUPLICATES",
|
217
262
|
help="Allow submitting duplicate changes without error.",
|
218
263
|
),
|
264
|
+
duplicates: str = typer.Option(
|
265
|
+
"open",
|
266
|
+
"--duplicates",
|
267
|
+
envvar="DUPLICATES",
|
268
|
+
help=(
|
269
|
+
'Gerrit statuses for duplicate detection (comma-separated). E.g. "open,merged,abandoned". Default: "open".'
|
270
|
+
),
|
271
|
+
),
|
219
272
|
verbose: bool = typer.Option(
|
220
273
|
False,
|
221
274
|
"--verbose",
|
@@ -275,6 +328,8 @@ def main(
|
|
275
328
|
os.environ["ISSUE_ID"] = issue_id
|
276
329
|
if allow_duplicates:
|
277
330
|
os.environ["ALLOW_DUPLICATES"] = "true"
|
331
|
+
if duplicates:
|
332
|
+
os.environ["DUPLICATES"] = duplicates
|
278
333
|
# URL mode handling
|
279
334
|
if target_url:
|
280
335
|
org, repo, pr = _parse_github_target(target_url)
|
@@ -302,10 +357,7 @@ def main(
|
|
302
357
|
def _setup_logging() -> logging.Logger:
|
303
358
|
level_name = os.getenv("G2G_LOG_LEVEL", "INFO").upper()
|
304
359
|
level = getattr(logging, level_name, logging.INFO)
|
305
|
-
fmt = (
|
306
|
-
"%(asctime)s %(levelname)-8s %(name)s "
|
307
|
-
"%(filename)s:%(lineno)d | %(message)s"
|
308
|
-
)
|
360
|
+
fmt = "%(asctime)s %(levelname)-8s %(name)s %(filename)s:%(lineno)d | %(message)s"
|
309
361
|
logging.basicConfig(level=level, format=fmt)
|
310
362
|
return logging.getLogger(APP_NAME)
|
311
363
|
|
@@ -344,9 +396,7 @@ def _build_inputs_from_env() -> Inputs:
|
|
344
396
|
gerrit_ssh_privkey_g2g=_env_str("GERRIT_SSH_PRIVKEY_G2G"),
|
345
397
|
gerrit_ssh_user_g2g=_env_str("GERRIT_SSH_USER_G2G"),
|
346
398
|
gerrit_ssh_user_g2g_email=_env_str("GERRIT_SSH_USER_G2G_EMAIL"),
|
347
|
-
organization=_env_str(
|
348
|
-
"ORGANIZATION", _env_str("GITHUB_REPOSITORY_OWNER")
|
349
|
-
),
|
399
|
+
organization=_env_str("ORGANIZATION", _env_str("GITHUB_REPOSITORY_OWNER")),
|
350
400
|
reviewers_email=_env_str("REVIEWERS_EMAIL", ""),
|
351
401
|
preserve_github_prs=_env_bool("PRESERVE_GITHUB_PRS", False),
|
352
402
|
dry_run=_env_bool("DRY_RUN", False),
|
@@ -355,6 +405,7 @@ def _build_inputs_from_env() -> Inputs:
|
|
355
405
|
gerrit_project=_env_str("GERRIT_PROJECT"),
|
356
406
|
issue_id=_env_str("ISSUE_ID"),
|
357
407
|
allow_duplicates=_env_bool("ALLOW_DUPLICATES", False),
|
408
|
+
duplicates_filter=_env_str("DUPLICATES", "open"),
|
358
409
|
)
|
359
410
|
|
360
411
|
|
@@ -388,20 +439,23 @@ def _process_bulk(data: Inputs, gh: GitHubContext) -> None:
|
|
388
439
|
|
389
440
|
log.info("Starting processing of PR #%d", pr_number)
|
390
441
|
log.debug(
|
391
|
-
"Processing PR #%d in multi-PR mode with event_name=%s, "
|
392
|
-
"event_action=%s",
|
442
|
+
"Processing PR #%d in multi-PR mode with event_name=%s, event_action=%s",
|
393
443
|
pr_number,
|
394
444
|
gh.event_name,
|
395
445
|
gh.event_action,
|
396
446
|
)
|
397
447
|
|
398
448
|
try:
|
399
|
-
|
400
|
-
|
401
|
-
)
|
449
|
+
if data.duplicates_filter:
|
450
|
+
os.environ["DUPLICATES"] = data.duplicates_filter
|
451
|
+
check_for_duplicates(per_ctx, allow_duplicates=data.allow_duplicates)
|
402
452
|
except DuplicateChangeError as exc:
|
403
|
-
log
|
404
|
-
|
453
|
+
_log_exception_conditionally(log, "Skipping PR #%d", pr_number)
|
454
|
+
log.warning(
|
455
|
+
"Skipping PR #%d due to duplicate detection: %s. Use --allow-duplicates to override this check.",
|
456
|
+
pr_number,
|
457
|
+
exc,
|
458
|
+
)
|
405
459
|
continue
|
406
460
|
|
407
461
|
try:
|
@@ -426,7 +480,7 @@ def _process_bulk(data: Inputs, gh: GitHubContext) -> None:
|
|
426
480
|
result_multi.change_numbers,
|
427
481
|
)
|
428
482
|
except Exception as exc:
|
429
|
-
log
|
483
|
+
_log_exception_conditionally(log, "Failed to process PR #%d", pr_number)
|
430
484
|
typer.echo(f"Failed to process PR #{pr_number}: {exc}")
|
431
485
|
log.info("Continuing to next PR despite failure")
|
432
486
|
continue
|
@@ -438,12 +492,8 @@ def _process_bulk(data: Inputs, gh: GitHubContext) -> None:
|
|
438
492
|
|
439
493
|
_append_github_output(
|
440
494
|
{
|
441
|
-
"gerrit_change_request_url": os.getenv(
|
442
|
-
|
443
|
-
),
|
444
|
-
"gerrit_change_request_num": os.getenv(
|
445
|
-
"GERRIT_CHANGE_REQUEST_NUM", ""
|
446
|
-
),
|
495
|
+
"gerrit_change_request_url": os.getenv("GERRIT_CHANGE_REQUEST_URL", ""),
|
496
|
+
"gerrit_change_request_num": os.getenv("GERRIT_CHANGE_REQUEST_NUM", ""),
|
447
497
|
}
|
448
498
|
)
|
449
499
|
|
@@ -469,30 +519,20 @@ def _process_single(data: Inputs, gh: GitHubContext) -> None:
|
|
469
519
|
except Exception as exc:
|
470
520
|
log.debug("Execution failed; continuing to write outputs: %s", exc)
|
471
521
|
|
472
|
-
result = SubmissionResult(
|
473
|
-
change_urls=[], change_numbers=[], commit_shas=[]
|
474
|
-
)
|
522
|
+
result = SubmissionResult(change_urls=[], change_numbers=[], commit_shas=[])
|
475
523
|
if result.change_urls:
|
476
|
-
os.environ["GERRIT_CHANGE_REQUEST_URL"] = "\n".join(
|
477
|
-
result.change_urls
|
478
|
-
)
|
524
|
+
os.environ["GERRIT_CHANGE_REQUEST_URL"] = "\n".join(result.change_urls)
|
479
525
|
# Output Gerrit change URL(s) to console
|
480
526
|
for url in result.change_urls:
|
481
527
|
log.info("Gerrit change URL: %s", url)
|
482
528
|
if result.change_numbers:
|
483
|
-
os.environ["GERRIT_CHANGE_REQUEST_NUM"] = "\n".join(
|
484
|
-
result.change_numbers
|
485
|
-
)
|
529
|
+
os.environ["GERRIT_CHANGE_REQUEST_NUM"] = "\n".join(result.change_numbers)
|
486
530
|
|
487
531
|
# Also write outputs to GITHUB_OUTPUT if available
|
488
532
|
_append_github_output(
|
489
533
|
{
|
490
|
-
"gerrit_change_request_url": os.getenv(
|
491
|
-
|
492
|
-
),
|
493
|
-
"gerrit_change_request_num": os.getenv(
|
494
|
-
"GERRIT_CHANGE_REQUEST_NUM", ""
|
495
|
-
),
|
534
|
+
"gerrit_change_request_url": os.getenv("GERRIT_CHANGE_REQUEST_URL", ""),
|
535
|
+
"gerrit_change_request_num": os.getenv("GERRIT_CHANGE_REQUEST_NUM", ""),
|
496
536
|
"gerrit_commit_sha": os.getenv("GERRIT_COMMIT_SHA", ""),
|
497
537
|
}
|
498
538
|
)
|
@@ -504,13 +544,9 @@ def _process_single(data: Inputs, gh: GitHubContext) -> None:
|
|
504
544
|
return
|
505
545
|
|
506
546
|
|
507
|
-
def _prepare_local_checkout(
|
508
|
-
workspace: Path, gh: GitHubContext, data: Inputs
|
509
|
-
) -> None:
|
547
|
+
def _prepare_local_checkout(workspace: Path, gh: GitHubContext, data: Inputs) -> None:
|
510
548
|
repo_full = gh.repository.strip() if gh.repository else ""
|
511
|
-
server_url = gh.server_url or os.getenv(
|
512
|
-
"GITHUB_SERVER_URL", "https://github.com"
|
513
|
-
)
|
549
|
+
server_url = gh.server_url or os.getenv("GITHUB_SERVER_URL", "https://github.com")
|
514
550
|
server_url = (server_url or "https://github.com").rstrip("/")
|
515
551
|
base_ref = gh.base_ref or ""
|
516
552
|
pr_num_str: str = str(gh.pr_number) if gh.pr_number else "0"
|
@@ -522,6 +558,26 @@ def _prepare_local_checkout(
|
|
522
558
|
run_cmd(["git", "init"], cwd=workspace)
|
523
559
|
run_cmd(["git", "remote", "add", "origin", repo_url], cwd=workspace)
|
524
560
|
|
561
|
+
# Non-interactive SSH/Git environment for any network operations
|
562
|
+
env = {
|
563
|
+
"GIT_SSH_COMMAND": (
|
564
|
+
"ssh -F /dev/null "
|
565
|
+
"-o IdentitiesOnly=yes "
|
566
|
+
"-o IdentityAgent=none "
|
567
|
+
"-o BatchMode=yes "
|
568
|
+
"-o PreferredAuthentications=publickey "
|
569
|
+
"-o StrictHostKeyChecking=yes "
|
570
|
+
"-o PasswordAuthentication=no "
|
571
|
+
"-o PubkeyAcceptedKeyTypes=+ssh-rsa "
|
572
|
+
"-o ConnectTimeout=10"
|
573
|
+
),
|
574
|
+
"SSH_AUTH_SOCK": "",
|
575
|
+
"SSH_AGENT_PID": "",
|
576
|
+
"SSH_ASKPASS": "/usr/bin/false",
|
577
|
+
"DISPLAY": "",
|
578
|
+
"SSH_ASKPASS_REQUIRE": "never",
|
579
|
+
}
|
580
|
+
|
525
581
|
# Fetch base branch and PR head
|
526
582
|
if base_ref:
|
527
583
|
try:
|
@@ -535,15 +591,13 @@ def _prepare_local_checkout(
|
|
535
591
|
branch_ref,
|
536
592
|
],
|
537
593
|
cwd=workspace,
|
594
|
+
env=env,
|
538
595
|
)
|
539
596
|
except Exception as exc:
|
540
597
|
log.debug("Base branch fetch failed for %s: %s", base_ref, exc)
|
541
598
|
|
542
599
|
if pr_num_str:
|
543
|
-
pr_ref =
|
544
|
-
f"refs/pull/{pr_num_str}/head:"
|
545
|
-
f"refs/remotes/origin/pr/{pr_num_str}/head"
|
546
|
-
)
|
600
|
+
pr_ref = f"refs/pull/{pr_num_str}/head:refs/remotes/origin/pr/{pr_num_str}/head"
|
547
601
|
run_cmd(
|
548
602
|
[
|
549
603
|
"git",
|
@@ -553,6 +607,7 @@ def _prepare_local_checkout(
|
|
553
607
|
pr_ref,
|
554
608
|
],
|
555
609
|
cwd=workspace,
|
610
|
+
env=env,
|
556
611
|
)
|
557
612
|
run_cmd(
|
558
613
|
[
|
@@ -563,6 +618,7 @@ def _prepare_local_checkout(
|
|
563
618
|
f"refs/remotes/origin/pr/{pr_num_str}/head",
|
564
619
|
],
|
565
620
|
cwd=workspace,
|
621
|
+
env=env,
|
566
622
|
)
|
567
623
|
|
568
624
|
|
@@ -571,21 +627,19 @@ def _load_effective_inputs() -> Inputs:
|
|
571
627
|
data = _build_inputs_from_env()
|
572
628
|
|
573
629
|
# Load per-org configuration and apply to environment before validation
|
574
|
-
org_for_cfg = (
|
575
|
-
data.organization
|
576
|
-
or os.getenv("ORGANIZATION")
|
577
|
-
or os.getenv("GITHUB_REPOSITORY_OWNER")
|
578
|
-
)
|
630
|
+
org_for_cfg = data.organization or os.getenv("ORGANIZATION") or os.getenv("GITHUB_REPOSITORY_OWNER")
|
579
631
|
cfg = load_org_config(org_for_cfg)
|
632
|
+
|
633
|
+
# Apply dynamic parameter derivation for missing Gerrit parameters
|
634
|
+
cfg = apply_parameter_derivation(cfg, org_for_cfg, save_to_config=True)
|
635
|
+
|
580
636
|
apply_config_to_env(cfg)
|
581
637
|
|
582
638
|
# Refresh inputs after applying configuration to environment
|
583
639
|
data = _build_inputs_from_env()
|
584
640
|
|
585
641
|
# Derive reviewers from local git config if running locally and unset
|
586
|
-
if not os.getenv("REVIEWERS_EMAIL") and (
|
587
|
-
os.getenv("G2G_TARGET_URL") or not os.getenv("GITHUB_EVENT_NAME")
|
588
|
-
):
|
642
|
+
if not os.getenv("REVIEWERS_EMAIL") and (os.getenv("G2G_TARGET_URL") or not os.getenv("GITHUB_EVENT_NAME")):
|
589
643
|
try:
|
590
644
|
from .gitutils import enumerate_reviewer_emails
|
591
645
|
|
@@ -609,6 +663,7 @@ def _load_effective_inputs() -> Inputs:
|
|
609
663
|
gerrit_project=data.gerrit_project,
|
610
664
|
issue_id=data.issue_id,
|
611
665
|
allow_duplicates=data.allow_duplicates,
|
666
|
+
duplicates_filter=data.duplicates_filter,
|
612
667
|
)
|
613
668
|
log.info("Derived reviewers: %s", data.reviewers_email)
|
614
669
|
except Exception as exc:
|
@@ -637,24 +692,14 @@ def _append_github_output(outputs: dict[str, str]) -> None:
|
|
637
692
|
|
638
693
|
|
639
694
|
def _augment_pr_refs_if_needed(gh: GitHubContext) -> GitHubContext:
|
640
|
-
if (
|
641
|
-
os.getenv("G2G_TARGET_URL")
|
642
|
-
and gh.pr_number
|
643
|
-
and (not gh.head_ref or not gh.base_ref)
|
644
|
-
):
|
695
|
+
if os.getenv("G2G_TARGET_URL") and gh.pr_number and (not gh.head_ref or not gh.base_ref):
|
645
696
|
try:
|
646
697
|
client = build_client()
|
647
698
|
repo = get_repo_from_env(client)
|
648
699
|
pr_obj = get_pull(repo, int(gh.pr_number))
|
649
|
-
base_ref = str(
|
650
|
-
|
651
|
-
)
|
652
|
-
head_ref = str(
|
653
|
-
getattr(getattr(pr_obj, "head", object()), "ref", "") or ""
|
654
|
-
)
|
655
|
-
head_sha = str(
|
656
|
-
getattr(getattr(pr_obj, "head", object()), "sha", "") or ""
|
657
|
-
)
|
700
|
+
base_ref = str(getattr(getattr(pr_obj, "base", object()), "ref", "") or "")
|
701
|
+
head_ref = str(getattr(getattr(pr_obj, "head", object()), "ref", "") or "")
|
702
|
+
head_sha = str(getattr(getattr(pr_obj, "head", object()), "sha", "") or "")
|
658
703
|
if base_ref:
|
659
704
|
os.environ["GITHUB_BASE_REF"] = base_ref
|
660
705
|
log.info("Resolved base_ref via GitHub API: %s", base_ref)
|
@@ -677,7 +722,7 @@ def _process() -> None:
|
|
677
722
|
try:
|
678
723
|
_validate_inputs(data)
|
679
724
|
except ConfigurationError as exc:
|
680
|
-
log
|
725
|
+
_log_exception_conditionally(log, "Configuration validation failed")
|
681
726
|
typer.echo(f"Configuration validation failed: {exc}", err=True)
|
682
727
|
raise typer.Exit(code=2) from exc
|
683
728
|
|
@@ -692,21 +737,17 @@ def _process() -> None:
|
|
692
737
|
|
693
738
|
# Bulk mode for URL/workflow_dispatch
|
694
739
|
sync_all = _env_bool("SYNC_ALL_OPEN_PRS", False)
|
695
|
-
if sync_all and (
|
696
|
-
gh.event_name == "workflow_dispatch" or os.getenv("G2G_TARGET_URL")
|
697
|
-
):
|
740
|
+
if sync_all and (gh.event_name == "workflow_dispatch" or os.getenv("G2G_TARGET_URL")):
|
698
741
|
_process_bulk(data, gh)
|
699
742
|
return
|
700
743
|
|
701
744
|
if not gh.pr_number:
|
702
745
|
log.error(
|
703
|
-
"PR_NUMBER is empty. This tool requires a valid pull request "
|
704
|
-
"context. Current event: %s",
|
746
|
+
"PR_NUMBER is empty. This tool requires a valid pull request context. Current event: %s",
|
705
747
|
gh.event_name,
|
706
748
|
)
|
707
749
|
typer.echo(
|
708
|
-
"PR_NUMBER is empty. This tool requires a valid pull request "
|
709
|
-
f"context. Current event: {gh.event_name}",
|
750
|
+
f"PR_NUMBER is empty. This tool requires a valid pull request context. Current event: {gh.event_name}",
|
710
751
|
err=True,
|
711
752
|
)
|
712
753
|
raise typer.Exit(code=2)
|
@@ -720,13 +761,16 @@ def _process() -> None:
|
|
720
761
|
# Check for duplicates in single-PR mode (before workspace setup)
|
721
762
|
if gh.pr_number and not _env_bool("SYNC_ALL_OPEN_PRS", False):
|
722
763
|
try:
|
764
|
+
if data.duplicates_filter:
|
765
|
+
os.environ["DUPLICATES"] = data.duplicates_filter
|
723
766
|
check_for_duplicates(gh, allow_duplicates=data.allow_duplicates)
|
724
767
|
except DuplicateChangeError as exc:
|
725
|
-
|
726
|
-
|
727
|
-
|
728
|
-
|
768
|
+
_log_exception_conditionally(
|
769
|
+
log,
|
770
|
+
"Duplicate detection blocked submission for PR #%d",
|
771
|
+
gh.pr_number,
|
729
772
|
)
|
773
|
+
log.info("Use --allow-duplicates to override this check.")
|
730
774
|
raise typer.Exit(code=3) from exc
|
731
775
|
|
732
776
|
_process_single(data, gh)
|
@@ -745,9 +789,7 @@ def _load_event(path: Path | None) -> dict[str, Any]:
|
|
745
789
|
if not path or not path.exists():
|
746
790
|
return {}
|
747
791
|
try:
|
748
|
-
return cast(
|
749
|
-
dict[str, Any], json.loads(path.read_text(encoding="utf-8"))
|
750
|
-
)
|
792
|
+
return cast(dict[str, Any], json.loads(path.read_text(encoding="utf-8")))
|
751
793
|
except Exception as exc:
|
752
794
|
log.warning("Failed to parse GITHUB_EVENT_PATH: %s", exc)
|
753
795
|
return {}
|
@@ -814,31 +856,79 @@ def _read_github_context() -> GitHubContext:
|
|
814
856
|
|
815
857
|
def _validate_inputs(data: Inputs) -> None:
|
816
858
|
if data.use_pr_as_commit and data.submit_single_commits:
|
817
|
-
msg =
|
818
|
-
"USE_PR_AS_COMMIT and SUBMIT_SINGLE_COMMITS cannot be enabled at "
|
819
|
-
"the same time"
|
820
|
-
)
|
859
|
+
msg = "USE_PR_AS_COMMIT and SUBMIT_SINGLE_COMMITS cannot be enabled at the same time"
|
821
860
|
raise ConfigurationError(msg)
|
822
861
|
|
823
|
-
#
|
824
|
-
|
825
|
-
|
826
|
-
|
827
|
-
|
828
|
-
|
829
|
-
|
862
|
+
# Context-aware validation: different requirements for GH Actions vs CLI
|
863
|
+
is_github_actions = _is_github_actions_context()
|
864
|
+
|
865
|
+
# SSH private key is always required
|
866
|
+
required_fields = ["gerrit_ssh_privkey_g2g"]
|
867
|
+
|
868
|
+
# Gerrit parameters can be derived in GH Actions if organization available
|
869
|
+
# In local CLI context, we're more strict about explicit configuration
|
870
|
+
if is_github_actions:
|
871
|
+
# In GitHub Actions: allow derivation if organization is available
|
872
|
+
if not data.organization:
|
873
|
+
required_fields.extend(
|
874
|
+
[
|
875
|
+
"gerrit_ssh_user_g2g",
|
876
|
+
"gerrit_ssh_user_g2g_email",
|
877
|
+
]
|
878
|
+
)
|
879
|
+
else:
|
880
|
+
# In local CLI: require explicit values or organization + derivation
|
881
|
+
# This prevents unexpected behavior when running locally
|
882
|
+
missing_gerrit_params = [
|
883
|
+
field for field in ["gerrit_ssh_user_g2g", "gerrit_ssh_user_g2g_email"] if not getattr(data, field)
|
884
|
+
]
|
885
|
+
if missing_gerrit_params:
|
886
|
+
if data.organization:
|
887
|
+
log.info(
|
888
|
+
"Local CLI usage: Gerrit parameters can be derived from "
|
889
|
+
"organization '%s'. Missing: %s. Consider setting "
|
890
|
+
"G2G_ENABLE_DERIVATION=true to enable derivation.",
|
891
|
+
data.organization,
|
892
|
+
", ".join(missing_gerrit_params),
|
893
|
+
)
|
894
|
+
# Allow derivation in local mode only if explicitly enabled
|
895
|
+
if not _env_bool("G2G_ENABLE_DERIVATION", False):
|
896
|
+
required_fields.extend(missing_gerrit_params)
|
897
|
+
else:
|
898
|
+
required_fields.extend(missing_gerrit_params)
|
899
|
+
|
900
|
+
for field_name in required_fields:
|
830
901
|
if not getattr(data, field_name):
|
831
902
|
log.error("Missing required input: %s", field_name)
|
832
|
-
|
903
|
+
if field_name in [
|
904
|
+
"gerrit_ssh_user_g2g",
|
905
|
+
"gerrit_ssh_user_g2g_email",
|
906
|
+
]:
|
907
|
+
if data.organization:
|
908
|
+
if is_github_actions:
|
909
|
+
log.error(
|
910
|
+
"These fields can be derived automatically from "
|
911
|
+
"organization '%s' if G2G_ENABLE_DERIVATION=true",
|
912
|
+
data.organization,
|
913
|
+
)
|
914
|
+
else:
|
915
|
+
log.error(
|
916
|
+
"These fields can be derived from organization '%s'",
|
917
|
+
data.organization,
|
918
|
+
)
|
919
|
+
log.error("Set G2G_ENABLE_DERIVATION=true to enable")
|
920
|
+
else:
|
921
|
+
log.error("These fields require either explicit values or an ORGANIZATION for derivation")
|
922
|
+
raise ConfigurationError(_MSG_MISSING_REQUIRED_INPUT.format(field_name=field_name))
|
833
923
|
|
834
924
|
# Validate fetch depth is a positive integer
|
835
925
|
if data.fetch_depth <= 0:
|
836
926
|
log.error("Invalid FETCH_DEPTH: %s", data.fetch_depth)
|
837
|
-
raise ConfigurationError(
|
927
|
+
raise ConfigurationError(_MSG_INVALID_FETCH_DEPTH)
|
838
928
|
|
839
929
|
# Validate Issue ID is a single line string if provided
|
840
930
|
if data.issue_id and ("\n" in data.issue_id or "\r" in data.issue_id):
|
841
|
-
raise ConfigurationError(
|
931
|
+
raise ConfigurationError(_MSG_ISSUE_ID_MULTILINE)
|
842
932
|
|
843
933
|
|
844
934
|
def _log_effective_config(data: Inputs, gh: GitHubContext) -> None:
|
@@ -848,10 +938,8 @@ def _log_effective_config(data: Inputs, gh: GitHubContext) -> None:
|
|
848
938
|
log.info(" SUBMIT_SINGLE_COMMITS: %s", data.submit_single_commits)
|
849
939
|
log.info(" USE_PR_AS_COMMIT: %s", data.use_pr_as_commit)
|
850
940
|
log.info(" FETCH_DEPTH: %s", data.fetch_depth)
|
851
|
-
|
852
|
-
|
853
|
-
"<provided>" if data.gerrit_known_hosts else "<missing>",
|
854
|
-
)
|
941
|
+
known_hosts_status = "<provided>" if data.gerrit_known_hosts else "<will auto-discover>"
|
942
|
+
log.info(" GERRIT_KNOWN_HOSTS: %s", known_hosts_status)
|
855
943
|
log.info(" GERRIT_SSH_PRIVKEY_G2G: %s", safe_privkey)
|
856
944
|
log.info(" GERRIT_SSH_USER_G2G: %s", data.gerrit_ssh_user_g2g)
|
857
945
|
log.info(" GERRIT_SSH_USER_G2G_EMAIL: %s", data.gerrit_ssh_user_g2g_email)
|