github2gerrit 0.1.9__py3-none-any.whl → 0.1.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
github2gerrit/cli.py CHANGED
@@ -6,6 +6,7 @@ from __future__ import annotations
6
6
  import json
7
7
  import logging
8
8
  import os
9
+ import sys
9
10
  import tempfile
10
11
  from collections.abc import Callable
11
12
  from concurrent.futures import ThreadPoolExecutor
@@ -31,12 +32,20 @@ from .core import SubmissionResult
31
32
  from .duplicate_detection import DuplicateChangeError
32
33
  from .duplicate_detection import check_for_duplicates
33
34
  from .github_api import build_client
35
+ from .github_api import get_pr_title_body
34
36
  from .github_api import get_pull
35
37
  from .github_api import get_repo_from_env
36
38
  from .github_api import iter_open_pulls
37
39
  from .gitutils import run_cmd
38
40
  from .models import GitHubContext
39
41
  from .models import Inputs
42
+ from .rich_display import RICH_AVAILABLE
43
+ from .rich_display import DummyProgressTracker
44
+ from .rich_display import G2GProgressTracker
45
+ from .rich_display import display_pr_info
46
+ from .rich_display import safe_console_print
47
+ from .rich_display import safe_typer_echo
48
+ from .rich_logging import setup_rich_aware_logging
40
49
  from .ssh_common import build_git_ssh_command
41
50
  from .ssh_common import build_non_interactive_ssh_env
42
51
  from .utils import append_github_output
@@ -46,6 +55,104 @@ from .utils import log_exception_conditionally
46
55
  from .utils import parse_bool_env
47
56
 
48
57
 
58
+ def get_version(package: str) -> str:
59
+ """Get package version, trying importlib.metadata first, then fallback."""
60
+ try:
61
+ from importlib.metadata import version as stdlib_version
62
+
63
+ return str(stdlib_version(package))
64
+ except ImportError:
65
+ from importlib_metadata import version as backport_version
66
+
67
+ return str(backport_version(package))
68
+
69
+
70
+ def _exit_for_pr_state_error(pr_number: int, pr_state: str) -> None:
71
+ """Exit with error message for invalid PR state."""
72
+ error_msg = (
73
+ f"❌ Pull request #{pr_number} is {pr_state} and cannot be processed"
74
+ )
75
+ safe_console_print(error_msg, style="red", err=True)
76
+ raise typer.Exit(1)
77
+
78
+
79
+ def _exit_for_pr_not_found(pr_number: int, repository: str) -> None:
80
+ """Exit with error message for PR not found."""
81
+ error_msg = (
82
+ f"❌ Pull request #{pr_number} not found in repository {repository}"
83
+ )
84
+ safe_console_print(error_msg, style="red", err=True)
85
+ raise typer.Exit(1)
86
+
87
+
88
+ def _exit_for_pr_fetch_error(exc: Exception) -> None:
89
+ """Exit with error message for PR fetch failure."""
90
+ error_msg = f"❌ Failed to fetch PR details: {exc}"
91
+ safe_console_print(error_msg, style="red", err=True)
92
+ raise typer.Exit(1)
93
+
94
+
95
+ def _extract_and_display_pr_info(
96
+ gh: GitHubContext,
97
+ progress_tracker: Any = None,
98
+ ) -> None:
99
+ """Extract PR information and display it with Rich formatting."""
100
+ if not gh.pr_number:
101
+ return
102
+
103
+ try:
104
+ # Get GitHub token and build client
105
+ token = os.getenv("GITHUB_TOKEN", "")
106
+ if not token:
107
+ safe_console_print(
108
+ "⚠️ No GITHUB_TOKEN available - skipping PR info display",
109
+ style="yellow",
110
+ progress_tracker=progress_tracker,
111
+ )
112
+ return
113
+
114
+ client = build_client(token)
115
+ repo = get_repo_from_env(client)
116
+ pr_obj = get_pull(repo, int(gh.pr_number))
117
+
118
+ # Check PR state and exit if not processable
119
+ pr_state = getattr(pr_obj, "state", "unknown")
120
+ if pr_state != "open":
121
+ _exit_for_pr_state_error(gh.pr_number, pr_state)
122
+
123
+ # Extract PR information
124
+ title, _body = get_pr_title_body(pr_obj)
125
+
126
+ # Get additional PR details
127
+ pr_info = {
128
+ "Repository": gh.repository,
129
+ "PR Number": gh.pr_number,
130
+ "Title": title or "No title",
131
+ "Author": getattr(getattr(pr_obj, "user", None), "login", "Unknown")
132
+ or "Unknown",
133
+ "Base Branch": gh.base_ref or "unknown",
134
+ "SHA": gh.sha or "unknown",
135
+ "URL": f"{gh.server_url}/{gh.repository}/pull/{gh.pr_number}",
136
+ }
137
+
138
+ # Add file changes count if available
139
+ try:
140
+ files = list(getattr(pr_obj, "get_files", list)())
141
+ pr_info["Files Changed"] = len(files)
142
+ except Exception:
143
+ pr_info["Files Changed"] = "unknown"
144
+
145
+ # Display the PR information
146
+ display_pr_info(pr_info, "Pull Request Details", progress_tracker)
147
+
148
+ except Exception as exc:
149
+ log.debug("Failed to display PR info: %s", exc)
150
+ if "404" in str(exc) or "Not Found" in str(exc):
151
+ _exit_for_pr_not_found(gh.pr_number, gh.repository)
152
+ else:
153
+ _exit_for_pr_fetch_error(exc)
154
+
155
+
49
156
  class ConfigurationError(Exception):
50
157
  """Raised when configuration validation fails.
51
158
 
@@ -114,9 +221,13 @@ class _ContextProto(Protocol):
114
221
 
115
222
 
116
223
  class _SingleUsageGroup(BaseGroup):
117
- def format_usage(self, ctx: _ContextProto, formatter: _FormatterProto) -> None:
224
+ def format_usage(
225
+ self, ctx: _ContextProto, formatter: _FormatterProto
226
+ ) -> None:
118
227
  # Force a simplified usage line without COMMAND [ARGS]...
119
- formatter.write_usage(ctx.command_path, "[OPTIONS] TARGET_URL", prefix="Usage: ")
228
+ formatter.write_usage(
229
+ ctx.command_path, "[OPTIONS] TARGET_URL", prefix="Usage: "
230
+ )
120
231
 
121
232
 
122
233
  # Error message constants to comply with TRY003
@@ -124,10 +235,20 @@ _MSG_MISSING_REQUIRED_INPUT = "Missing required input: {field_name}"
124
235
  _MSG_INVALID_FETCH_DEPTH = "FETCH_DEPTH must be a positive integer"
125
236
  _MSG_ISSUE_ID_MULTILINE = "Issue ID must be single line"
126
237
 
238
+ # Show version information when --help is used or in GitHub Actions mode
239
+ if "--help" in sys.argv or _is_github_actions_context():
240
+ try:
241
+ app_version = get_version("github2gerrit")
242
+ print(f"🏷️ github2gerrit version {app_version}")
243
+ except Exception:
244
+ print("⚠️ github2gerrit version information not available")
245
+
127
246
  app: typer.Typer = typer.Typer(
128
247
  add_completion=False,
129
248
  no_args_is_help=False,
130
249
  cls=cast(Any, _SingleUsageGroup),
250
+ rich_markup_mode="rich",
251
+ help="Tool to convert GitHub pull requests into Gerrit changes",
131
252
  )
132
253
 
133
254
 
@@ -144,7 +265,9 @@ def _resolve_org(default_org: str | None) -> str:
144
265
  if TYPE_CHECKING:
145
266
  F = TypeVar("F", bound=Callable[..., object])
146
267
 
147
- def typed_app_command(*args: object, **kwargs: object) -> Callable[[F], F]: ...
268
+ def typed_app_command(
269
+ *args: object, **kwargs: object
270
+ ) -> Callable[[F], F]: ...
148
271
  else:
149
272
  typed_app_command = app.command
150
273
 
@@ -157,17 +280,46 @@ def main(
157
280
  help="GitHub repository or PR URL",
158
281
  metavar="TARGET_URL",
159
282
  ),
160
- submit_single_commits: bool = typer.Option(
283
+ allow_duplicates: bool = typer.Option(
161
284
  False,
162
- "--submit-single-commits",
163
- envvar="SUBMIT_SINGLE_COMMITS",
164
- help="Submit one commit at a time to the Gerrit repository.",
285
+ "--allow-duplicates",
286
+ envvar="ALLOW_DUPLICATES",
287
+ help="Allow submitting duplicate changes without error.",
165
288
  ),
166
- use_pr_as_commit: bool = typer.Option(
289
+ allow_ghe_urls: bool = typer.Option(
167
290
  False,
168
- "--use-pr-as-commit",
169
- envvar="USE_PR_AS_COMMIT",
170
- help="Use PR title and body as the commit message.",
291
+ "--allow-ghe-urls/--no-allow-ghe-urls",
292
+ envvar="ALLOW_GHE_URLS",
293
+ help="Allow non-github.com GitHub Enterprise URLs in direct URL mode.",
294
+ ),
295
+ allow_orphan_changes: bool = typer.Option(
296
+ False,
297
+ "--allow-orphan-changes/--no-allow-orphan-changes",
298
+ envvar="ALLOW_ORPHAN_CHANGES",
299
+ help="Keep unmatched Gerrit changes without warning.",
300
+ ),
301
+ ci_testing: bool = typer.Option(
302
+ False,
303
+ "--ci-testing/--no-ci-testing",
304
+ envvar="CI_TESTING",
305
+ help="Enable CI testing mode (overrides .gitreview, handles "
306
+ "unrelated repos).",
307
+ ),
308
+ dry_run: bool = typer.Option(
309
+ False,
310
+ "--dry-run",
311
+ envvar="DRY_RUN",
312
+ help="Validate settings and PR metadata; do not write to Gerrit.",
313
+ ),
314
+ duplicate_types: str = typer.Option(
315
+ "open",
316
+ "--duplicate-types",
317
+ envvar="DUPLICATE_TYPES",
318
+ help=(
319
+ "Gerrit change states to evaluate when determining if a change "
320
+ "should be considered a duplicate "
321
+ '(comma-separated). E.g. "open,merged,abandoned". Default: "open".'
322
+ ),
171
323
  ),
172
324
  fetch_depth: int = typer.Option(
173
325
  10,
@@ -181,6 +333,24 @@ def main(
181
333
  envvar="GERRIT_KNOWN_HOSTS",
182
334
  help="Known hosts entries for Gerrit SSH (single or multi-line).",
183
335
  ),
336
+ gerrit_project: str = typer.Option(
337
+ "",
338
+ "--gerrit-project",
339
+ envvar="GERRIT_PROJECT",
340
+ help="Gerrit project (optional; .gitreview preferred).",
341
+ ),
342
+ gerrit_server: str = typer.Option(
343
+ "",
344
+ "--gerrit-server",
345
+ envvar="GERRIT_SERVER",
346
+ help="Gerrit server hostname (optional; .gitreview preferred).",
347
+ ),
348
+ gerrit_server_port: int = typer.Option(
349
+ 29418,
350
+ "--gerrit-server-port",
351
+ envvar="GERRIT_SERVER_PORT",
352
+ help="Gerrit SSH port (default: 29418).",
353
+ ),
184
354
  gerrit_ssh_privkey_g2g: str = typer.Option(
185
355
  "",
186
356
  "--gerrit-ssh-privkey-g2g",
@@ -199,23 +369,35 @@ def main(
199
369
  envvar="GERRIT_SSH_USER_G2G_EMAIL",
200
370
  help="Email address for the Gerrit SSH user.",
201
371
  ),
372
+ issue_id: str = typer.Option(
373
+ "",
374
+ "--issue-id",
375
+ envvar="ISSUE_ID",
376
+ help="Issue ID to include in commit message (e.g., Issue-ID: ABC-123).",
377
+ ),
378
+ log_reconcile_json: bool = typer.Option(
379
+ True,
380
+ "--log-reconcile-json/--no-log-reconcile-json",
381
+ envvar="LOG_RECONCILE_JSON",
382
+ help="Emit structured JSON reconciliation summary.",
383
+ ),
384
+ normalise_commit: bool = typer.Option(
385
+ True,
386
+ "--normalise-commit/--no-normalise-commit",
387
+ envvar="NORMALISE_COMMIT",
388
+ help="Normalize commit messages to conventional commit format.",
389
+ ),
202
390
  organization: str | None = typer.Option(
203
391
  None,
204
392
  "--organization",
205
393
  envvar="ORGANIZATION",
206
394
  help=("Organization (defaults to GITHUB_REPOSITORY_OWNER when unset)."),
207
395
  ),
208
- reviewers_email: str = typer.Option(
209
- "",
210
- "--reviewers-email",
211
- envvar="REVIEWERS_EMAIL",
212
- help="Comma-separated list of reviewer emails.",
213
- ),
214
- allow_ghe_urls: bool = typer.Option(
215
- False,
216
- "--allow-ghe-urls/--no-allow-ghe-urls",
217
- envvar="ALLOW_GHE_URLS",
218
- help="Allow non-github.com GitHub Enterprise URLs in direct URL mode.",
396
+ persist_single_mapping_comment: bool = typer.Option(
397
+ True,
398
+ "--persist-single-mapping-comment/--no-persist-single-mapping-comment",
399
+ envvar="PERSIST_SINGLE_MAPPING_COMMENT",
400
+ help="Replace existing mapping comment instead of appending.",
219
401
  ),
220
402
  preserve_github_prs: bool = typer.Option(
221
403
  False,
@@ -223,61 +405,48 @@ def main(
223
405
  envvar="PRESERVE_GITHUB_PRS",
224
406
  help="Do not close GitHub PRs after pushing to Gerrit.",
225
407
  ),
226
- dry_run: bool = typer.Option(
227
- False,
228
- "--dry-run",
229
- envvar="DRY_RUN",
230
- help="Validate settings and PR metadata; do not write to Gerrit.",
408
+ reuse_strategy: str = typer.Option(
409
+ "topic+comment",
410
+ "--reuse-strategy",
411
+ envvar="REUSE_STRATEGY",
412
+ help="Strategy for reusing Change-IDs: topic, comment, "
413
+ "topic+comment, none.",
231
414
  ),
232
- gerrit_server: str = typer.Option(
415
+ reviewers_email: str = typer.Option(
233
416
  "",
234
- "--gerrit-server",
235
- envvar="GERRIT_SERVER",
236
- help="Gerrit server hostname (optional; .gitreview preferred).",
417
+ "--reviewers-email",
418
+ envvar="REVIEWERS_EMAIL",
419
+ help="Comma-separated list of reviewer emails.",
237
420
  ),
238
- gerrit_server_port: str = typer.Option(
239
- "29418",
240
- "--gerrit-server-port",
241
- envvar="GERRIT_SERVER_PORT",
242
- help="Gerrit SSH port (default: 29418).",
421
+ show_progress: bool = typer.Option(
422
+ True,
423
+ "--progress/--no-progress",
424
+ envvar="G2G_SHOW_PROGRESS",
425
+ help="Show real-time progress updates with Rich formatting.",
243
426
  ),
244
- gerrit_project: str = typer.Option(
245
- "",
246
- "--gerrit-project",
247
- envvar="GERRIT_PROJECT",
248
- help="Gerrit project (optional; .gitreview preferred).",
427
+ similarity_files: bool = typer.Option(
428
+ True,
429
+ "--similarity-files/--no-similarity-files",
430
+ envvar="SIMILARITY_FILES",
431
+ help="Require exact file signature match for reconciliation.",
249
432
  ),
250
- issue_id: str = typer.Option(
251
- "",
252
- "--issue-id",
253
- envvar="ISSUE_ID",
254
- help="Issue ID to include in commit message (e.g., Issue-ID: ABC-123).",
433
+ similarity_subject: float = typer.Option(
434
+ 0.7,
435
+ "--similarity-subject",
436
+ envvar="SIMILARITY_SUBJECT",
437
+ help="Subject token Jaccard similarity threshold (0.0-1.0).",
255
438
  ),
256
- allow_duplicates: bool = typer.Option(
439
+ submit_single_commits: bool = typer.Option(
257
440
  False,
258
- "--allow-duplicates",
259
- envvar="ALLOW_DUPLICATES",
260
- help="Allow submitting duplicate changes without error.",
441
+ "--submit-single-commits",
442
+ envvar="SUBMIT_SINGLE_COMMITS",
443
+ help="Submit one commit at a time to the Gerrit repository.",
261
444
  ),
262
- ci_testing: bool = typer.Option(
445
+ use_pr_as_commit: bool = typer.Option(
263
446
  False,
264
- "--ci-testing/--no-ci-testing",
265
- envvar="CI_TESTING",
266
- help="Enable CI testing mode (overrides .gitreview, handles unrelated repos).",
267
- ),
268
- duplicates: str = typer.Option(
269
- "open",
270
- "--duplicates",
271
- envvar="DUPLICATES",
272
- help=(
273
- 'Gerrit statuses for duplicate detection (comma-separated). E.g. "open,merged,abandoned". Default: "open".'
274
- ),
275
- ),
276
- normalise_commit: bool = typer.Option(
277
- True,
278
- "--normalise-commit/--no-normalise-commit",
279
- envvar="NORMALISE_COMMIT",
280
- help="Normalize commit messages to conventional commit format.",
447
+ "--use-pr-as-commit",
448
+ envvar="USE_PR_AS_COMMIT",
449
+ help="Use PR title and body as the commit message.",
281
450
  ),
282
451
  verbose: bool = typer.Option(
283
452
  False,
@@ -286,23 +455,35 @@ def main(
286
455
  envvar="G2G_VERBOSE",
287
456
  help="Verbose output (sets loglevel to DEBUG).",
288
457
  ),
458
+ version_flag: bool = typer.Option(
459
+ False,
460
+ "--version",
461
+ help="Show version and exit.",
462
+ ),
289
463
  ) -> None:
290
464
  """
291
465
  Tool to convert GitHub pull requests into Gerrit changes
292
466
 
293
- - Providing a URL to a pull request: converts that pull request
294
- into a Gerrit change
467
+ - PR URL: converts the pull request to Gerrit change
468
+ - Repo URL: converts all open pull requests to Gerrit changes
469
+ - No arguments: uses environment variables (CI/CD mode)
470
+ """
295
471
 
296
- - Providing a URL to a GitHub repository converts all open pull
297
- requests into Gerrit changes
472
+ # Handle version flag first
473
+ if version_flag:
474
+ try:
475
+ app_version = get_version("github2gerrit")
476
+ typer.echo(f"github2gerrit version {app_version}")
477
+ except Exception:
478
+ typer.echo("Version information not available")
479
+ raise typer.Exit(code=0)
298
480
 
299
- - No arguments for CI/CD environment; reads parameters from
300
- environment variables
301
- """
302
481
  # Override boolean parameters with properly parsed environment variables
303
482
  # This ensures that string "false" from GitHub Actions is handled correctly
304
483
  if os.getenv("SUBMIT_SINGLE_COMMITS"):
305
- submit_single_commits = parse_bool_env(os.getenv("SUBMIT_SINGLE_COMMITS"))
484
+ submit_single_commits = parse_bool_env(
485
+ os.getenv("SUBMIT_SINGLE_COMMITS")
486
+ )
306
487
 
307
488
  if os.getenv("USE_PR_AS_COMMIT"):
308
489
  use_pr_as_commit = parse_bool_env(os.getenv("USE_PR_AS_COMMIT"))
@@ -318,13 +499,50 @@ def main(
318
499
 
319
500
  if os.getenv("CI_TESTING"):
320
501
  ci_testing = parse_bool_env(os.getenv("CI_TESTING"))
502
+
503
+ if os.getenv("SIMILARITY_FILES"):
504
+ similarity_files = parse_bool_env(os.getenv("SIMILARITY_FILES"))
505
+
506
+ if os.getenv("ALLOW_ORPHAN_CHANGES"):
507
+ allow_orphan_changes = parse_bool_env(os.getenv("ALLOW_ORPHAN_CHANGES"))
508
+
509
+ if os.getenv("PERSIST_SINGLE_MAPPING_COMMENT"):
510
+ persist_single_mapping_comment = parse_bool_env(
511
+ os.getenv("PERSIST_SINGLE_MAPPING_COMMENT")
512
+ )
513
+
514
+ if os.getenv("LOG_RECONCILE_JSON"):
515
+ log_reconcile_json = parse_bool_env(os.getenv("LOG_RECONCILE_JSON"))
321
516
  # Set up logging level based on verbose flag
322
517
  if verbose:
323
518
  os.environ["G2G_LOG_LEVEL"] = "DEBUG"
324
519
  _reconfigure_logging()
520
+
521
+ # Initialize Rich-aware logging system
522
+ setup_rich_aware_logging()
523
+
524
+ # Log version to logs in GitHub Actions environment
525
+ if _is_github_actions_context():
526
+ try:
527
+ app_version = get_version("github2gerrit")
528
+ log.info("github2gerrit version %s", app_version)
529
+ except Exception:
530
+ log.warning("Version information not available")
531
+
532
+ # Show initial progress if Rich is available and progress is enabled
533
+ if show_progress and not RICH_AVAILABLE:
534
+ safe_console_print(
535
+ "📋 Rich formatting not available - progress will be shown as "
536
+ "simple text..."
537
+ )
538
+
539
+ # Store progress flag in environment for use by processing functions
540
+ os.environ["G2G_SHOW_PROGRESS"] = "true" if show_progress else "false"
325
541
  # Normalize CLI options into environment for unified processing.
326
542
  # Explicitly set all boolean flags to ensure consistent behavior
327
- os.environ["SUBMIT_SINGLE_COMMITS"] = "true" if submit_single_commits else "false"
543
+ os.environ["SUBMIT_SINGLE_COMMITS"] = (
544
+ "true" if submit_single_commits else "false"
545
+ )
328
546
  os.environ["USE_PR_AS_COMMIT"] = "true" if use_pr_as_commit else "false"
329
547
  os.environ["FETCH_DEPTH"] = str(fetch_depth)
330
548
  if gerrit_known_hosts:
@@ -340,22 +558,34 @@ def main(
340
558
  os.environ["ORGANIZATION"] = resolved_org
341
559
  if reviewers_email:
342
560
  os.environ["REVIEWERS_EMAIL"] = reviewers_email
343
- os.environ["PRESERVE_GITHUB_PRS"] = "true" if preserve_github_prs else "false"
561
+ os.environ["PRESERVE_GITHUB_PRS"] = (
562
+ "true" if preserve_github_prs else "false"
563
+ )
344
564
  os.environ["DRY_RUN"] = "true" if dry_run else "false"
345
565
  os.environ["NORMALISE_COMMIT"] = "true" if normalise_commit else "false"
346
566
  os.environ["ALLOW_GHE_URLS"] = "true" if allow_ghe_urls else "false"
347
567
  if gerrit_server:
348
568
  os.environ["GERRIT_SERVER"] = gerrit_server
349
569
  if gerrit_server_port:
350
- os.environ["GERRIT_SERVER_PORT"] = gerrit_server_port
570
+ os.environ["GERRIT_SERVER_PORT"] = str(gerrit_server_port)
351
571
  if gerrit_project:
352
572
  os.environ["GERRIT_PROJECT"] = gerrit_project
353
573
  if issue_id:
354
574
  os.environ["ISSUE_ID"] = issue_id
355
575
  os.environ["ALLOW_DUPLICATES"] = "true" if allow_duplicates else "false"
356
576
  os.environ["CI_TESTING"] = "true" if ci_testing else "false"
357
- if duplicates:
358
- os.environ["DUPLICATES"] = duplicates
577
+ os.environ["DUPLICATE_TYPES"] = duplicate_types
578
+ if reuse_strategy:
579
+ os.environ["REUSE_STRATEGY"] = reuse_strategy
580
+ os.environ["SIMILARITY_SUBJECT"] = str(similarity_subject)
581
+ os.environ["SIMILARITY_FILES"] = "true" if similarity_files else "false"
582
+ os.environ["ALLOW_ORPHAN_CHANGES"] = (
583
+ "true" if allow_orphan_changes else "false"
584
+ )
585
+ os.environ["PERSIST_SINGLE_MAPPING_COMMENT"] = (
586
+ "true" if persist_single_mapping_comment else "false"
587
+ )
588
+ os.environ["LOG_RECONCILE_JSON"] = "true" if log_reconcile_json else "false"
359
589
  # URL mode handling
360
590
  if target_url:
361
591
  org, repo, pr = _parse_github_target(target_url)
@@ -395,7 +625,10 @@ def main(
395
625
  def _setup_logging() -> logging.Logger:
396
626
  level_name = os.getenv("G2G_LOG_LEVEL", "INFO").upper()
397
627
  level = getattr(logging, level_name, logging.INFO)
398
- fmt = "%(asctime)s %(levelname)-8s %(name)s %(filename)s:%(lineno)d | %(message)s"
628
+ fmt = (
629
+ "%(asctime)s %(levelname)-8s %(name)s %(filename)s:%(lineno)d | "
630
+ "%(message)s"
631
+ )
399
632
  logging.basicConfig(level=level, format=fmt)
400
633
  return logging.getLogger(APP_NAME)
401
634
 
@@ -421,22 +654,47 @@ def _build_inputs_from_env() -> Inputs:
421
654
  gerrit_ssh_privkey_g2g=env_str("GERRIT_SSH_PRIVKEY_G2G"),
422
655
  gerrit_ssh_user_g2g=env_str("GERRIT_SSH_USER_G2G"),
423
656
  gerrit_ssh_user_g2g_email=env_str("GERRIT_SSH_USER_G2G_EMAIL"),
424
- organization=env_str("ORGANIZATION", env_str("GITHUB_REPOSITORY_OWNER")),
657
+ organization=env_str(
658
+ "ORGANIZATION", env_str("GITHUB_REPOSITORY_OWNER")
659
+ ),
425
660
  reviewers_email=env_str("REVIEWERS_EMAIL", ""),
426
661
  preserve_github_prs=env_bool("PRESERVE_GITHUB_PRS", False),
427
662
  dry_run=env_bool("DRY_RUN", False),
428
663
  normalise_commit=env_bool("NORMALISE_COMMIT", True),
429
664
  gerrit_server=env_str("GERRIT_SERVER", ""),
430
- gerrit_server_port=env_str("GERRIT_SERVER_PORT", "29418"),
665
+ gerrit_server_port=int(
666
+ env_str("GERRIT_SERVER_PORT", "29418") or "29418"
667
+ ),
431
668
  gerrit_project=env_str("GERRIT_PROJECT"),
432
669
  issue_id=env_str("ISSUE_ID", ""),
433
670
  allow_duplicates=env_bool("ALLOW_DUPLICATES", False),
434
671
  ci_testing=env_bool("CI_TESTING", False),
435
- duplicates_filter=env_str("DUPLICATES", "open"),
672
+ duplicates_filter=env_str("DUPLICATE_TYPES", "open"),
673
+ reuse_strategy=env_str("REUSE_STRATEGY", "topic+comment"),
674
+ similarity_subject=float(env_str("SIMILARITY_SUBJECT", "0.7") or "0.7"),
675
+ similarity_files=env_bool("SIMILARITY_FILES", True),
676
+ allow_orphan_changes=env_bool("ALLOW_ORPHAN_CHANGES", False),
677
+ persist_single_mapping_comment=env_bool(
678
+ "PERSIST_SINGLE_MAPPING_COMMENT", True
679
+ ),
680
+ log_reconcile_json=env_bool("LOG_RECONCILE_JSON", True),
436
681
  )
437
682
 
438
683
 
439
684
  def _process_bulk(data: Inputs, gh: GitHubContext) -> bool:
685
+ # Initialize progress tracker for bulk processing
686
+ show_progress = env_bool("G2G_SHOW_PROGRESS", True)
687
+ progress_tracker: Any = None
688
+
689
+ if show_progress:
690
+ target = gh.repository
691
+ progress_tracker = G2GProgressTracker(target)
692
+ progress_tracker.start()
693
+ progress_tracker.update_operation("Getting repository and PRs...")
694
+ else:
695
+ target = gh.repository
696
+ progress_tracker = DummyProgressTracker("GitHub to Gerrit", target)
697
+
440
698
  client = build_client()
441
699
  repo = get_repo_from_env(client)
442
700
 
@@ -447,6 +705,11 @@ def _process_bulk(data: Inputs, gh: GitHubContext) -> bool:
447
705
  prs_list = list(iter_open_pulls(repo))
448
706
  log.info("Found %d open PRs to process", len(prs_list))
449
707
 
708
+ if progress_tracker:
709
+ progress_tracker.update_operation(
710
+ f"Processing {len(prs_list)} open PRs..."
711
+ )
712
+
450
713
  # Result tracking for summary
451
714
  processed_count = 0
452
715
  succeeded_count = 0
@@ -466,22 +729,42 @@ def _process_bulk(data: Inputs, gh: GitHubContext) -> bool:
466
729
  if pr_number <= 0:
467
730
  return "invalid", None, None
468
731
 
469
- log.info("Starting processing of PR #%d", pr_number)
732
+ log.debug("Starting processing of PR #%d", pr_number)
470
733
  log.debug(
471
- "Processing PR #%d in multi-PR mode with event_name=%s, event_action=%s",
734
+ "Processing PR #%d in multi-PR mode with event_name=%s, "
735
+ "event_action=%s",
472
736
  pr_number,
473
737
  gh.event_name,
474
738
  gh.event_action,
475
739
  )
476
740
 
741
+ # Update progress tracker for this PR
742
+ if progress_tracker:
743
+ progress_tracker.update_operation(f"Processing PR #{pr_number}...")
744
+ progress_tracker.pr_processed()
745
+
477
746
  try:
478
747
  if data.duplicates_filter:
479
- os.environ["DUPLICATES"] = data.duplicates_filter
480
- check_for_duplicates(per_ctx, allow_duplicates=data.allow_duplicates)
748
+ os.environ["DUPLICATE_TYPES"] = data.duplicates_filter
749
+ # Generate expected GitHub hash for trailer-aware duplicate
750
+ # detection
751
+ from .duplicate_detection import DuplicateDetector
752
+
753
+ expected_github_hash = (
754
+ DuplicateDetector._generate_github_change_hash(per_ctx)
755
+ )
756
+ check_for_duplicates(
757
+ per_ctx,
758
+ allow_duplicates=data.allow_duplicates,
759
+ expected_github_hash=expected_github_hash,
760
+ )
481
761
  except DuplicateChangeError as exc:
762
+ if progress_tracker:
763
+ progress_tracker.duplicate_skipped()
482
764
  log_exception_conditionally(log, "Skipping PR #%d", pr_number)
483
765
  log.warning(
484
- "Skipping PR #%d due to duplicate detection: %s. Use --allow-duplicates to override this check.",
766
+ "Skipping PR #%d due to duplicate detection: %s. Use "
767
+ "--allow-duplicates to override this check.",
485
768
  pr_number,
486
769
  exc,
487
770
  )
@@ -492,9 +775,20 @@ def _process_bulk(data: Inputs, gh: GitHubContext) -> bool:
492
775
  workspace = Path(temp_dir)
493
776
  orch = Orchestrator(workspace=workspace)
494
777
  result_multi = orch.execute(inputs=data, gh=per_ctx)
778
+ if progress_tracker and result_multi.change_urls:
779
+ if any(
780
+ "new" in url.lower() for url in result_multi.change_urls
781
+ ):
782
+ progress_tracker.change_submitted()
783
+ else:
784
+ progress_tracker.change_updated()
495
785
  return "success", result_multi, None
496
786
  except Exception as exc:
497
- log_exception_conditionally(log, "Failed to process PR #%d", pr_number)
787
+ if progress_tracker:
788
+ progress_tracker.add_error(f"PR #{pr_number} processing failed")
789
+ log_exception_conditionally(
790
+ log, "Failed to process PR #%d", pr_number
791
+ )
498
792
  return "failed", None, exc
499
793
 
500
794
  # Prepare PR processing tasks
@@ -521,7 +815,11 @@ def _process_bulk(data: Inputs, gh: GitHubContext) -> bool:
521
815
 
522
816
  # Process PRs in parallel
523
817
  with ThreadPoolExecutor(max_workers=max_workers) as executor:
524
- log.info("Processing %d PRs with %d parallel workers", len(pr_tasks), max_workers)
818
+ log.info(
819
+ "Processing %d PRs with %d parallel workers",
820
+ len(pr_tasks),
821
+ max_workers,
822
+ )
525
823
 
526
824
  # Submit all tasks
527
825
  future_to_pr = {
@@ -544,25 +842,43 @@ def _process_bulk(data: Inputs, gh: GitHubContext) -> bool:
544
842
  all_urls.extend(result_multi.change_urls)
545
843
  for url in result_multi.change_urls:
546
844
  log.info("Gerrit change URL: %s", url)
547
- log.info("PR #%d created Gerrit change: %s", pr_number, url)
845
+ log.info(
846
+ "PR #%d created Gerrit change: %s",
847
+ pr_number,
848
+ url,
849
+ )
548
850
  if result_multi.change_numbers:
549
851
  all_nums.extend(result_multi.change_numbers)
550
- log.info("PR #%d change numbers: %s", pr_number, result_multi.change_numbers)
852
+ log.info(
853
+ "PR #%d change numbers: %s",
854
+ pr_number,
855
+ result_multi.change_numbers,
856
+ )
551
857
  if result_multi.commit_shas:
552
858
  all_shas.extend(result_multi.commit_shas)
553
859
  elif status == "skipped":
554
860
  skipped_count += 1
555
861
  elif status == "failed":
556
862
  failed_count += 1
557
- typer.echo(f"Failed to process PR #{pr_number}: {exc}")
863
+ safe_typer_echo(
864
+ f"Failed to process PR #{pr_number}: {exc}",
865
+ progress_tracker=progress_tracker,
866
+ err=True,
867
+ )
558
868
  log.info("Continuing to next PR despite failure")
559
869
  else:
560
870
  failed_count += 1
561
871
 
562
872
  except Exception as exc:
563
873
  failed_count += 1
564
- log_exception_conditionally(log, "Failed to process PR #%d", pr_number)
565
- typer.echo(f"Failed to process PR #{pr_number}: {exc}")
874
+ log_exception_conditionally(
875
+ log, "Failed to process PR #%d", pr_number
876
+ )
877
+ safe_typer_echo(
878
+ f"Failed to process PR #{pr_number}: {exc}",
879
+ progress_tracker=progress_tracker,
880
+ err=True,
881
+ )
566
882
  log.info("Continuing to next PR despite failure")
567
883
 
568
884
  # Aggregate results and provide summary
@@ -575,12 +891,46 @@ def _process_bulk(data: Inputs, gh: GitHubContext) -> bool:
575
891
 
576
892
  append_github_output(
577
893
  {
578
- "gerrit_change_request_url": "\n".join(all_urls) if all_urls else "",
579
- "gerrit_change_request_num": "\n".join(all_nums) if all_nums else "",
894
+ "gerrit_change_request_url": "\n".join(all_urls)
895
+ if all_urls
896
+ else "",
897
+ "gerrit_change_request_num": "\n".join(all_nums)
898
+ if all_nums
899
+ else "",
580
900
  "gerrit_commit_sha": "\n".join(all_shas) if all_shas else "",
581
901
  }
582
902
  )
583
903
 
904
+ # Stop progress tracker and show final results
905
+ if progress_tracker:
906
+ if failed_count == 0:
907
+ progress_tracker.update_operation(
908
+ "Bulk processing completed successfully"
909
+ )
910
+ else:
911
+ progress_tracker.add_error("Some PRs failed processing")
912
+ # Aggregate results and provide summary
913
+ if progress_tracker:
914
+ progress_tracker.stop()
915
+
916
+ # Show summary after progress tracker is stopped
917
+ if show_progress and RICH_AVAILABLE:
918
+ summary = progress_tracker.get_summary() if progress_tracker else {}
919
+ safe_console_print(
920
+ "\n✅ Bulk processing completed!"
921
+ if failed_count == 0
922
+ else "\n⚠️ Bulk processing completed with errors!"
923
+ )
924
+ safe_console_print(
925
+ f"⏱️ Total time: {summary.get('elapsed_time', 'unknown')}"
926
+ )
927
+ safe_console_print(f"📊 PRs processed: {processed_count}")
928
+ safe_console_print(f"✅ Succeeded: {succeeded_count}")
929
+ safe_console_print(f"⏭️ Skipped: {skipped_count}")
930
+ if failed_count > 0:
931
+ safe_console_print(f"❌ Failed: {failed_count}")
932
+ safe_console_print(f"🔗 Gerrit changes created: {len(all_urls)}")
933
+
584
934
  # Summary block
585
935
  log.info("=" * 60)
586
936
  log.info("BULK PROCESSING SUMMARY:")
@@ -595,50 +945,101 @@ def _process_bulk(data: Inputs, gh: GitHubContext) -> bool:
595
945
  return failed_count == 0
596
946
 
597
947
 
598
- def _process_single(data: Inputs, gh: GitHubContext) -> bool:
948
+ def _process_single(
949
+ data: Inputs,
950
+ gh: GitHubContext,
951
+ progress_tracker: Any = None,
952
+ ) -> bool:
599
953
  # Create temporary directory for all git operations
600
954
  with tempfile.TemporaryDirectory() as temp_dir:
601
955
  workspace = Path(temp_dir)
602
956
 
603
957
  try:
958
+ if progress_tracker:
959
+ progress_tracker.update_operation("Preparing local checkout...")
604
960
  _prepare_local_checkout(workspace, gh, data)
605
961
  except Exception as exc:
606
962
  log.debug("Local checkout preparation failed: %s", exc)
963
+ if progress_tracker:
964
+ progress_tracker.add_error("Checkout preparation failed")
965
+
966
+ if progress_tracker:
967
+ progress_tracker.update_operation(
968
+ "Configuring SSH authentication..."
969
+ )
607
970
 
608
971
  orch = Orchestrator(workspace=workspace)
972
+
973
+ if progress_tracker:
974
+ progress_tracker.update_operation(
975
+ "Extracting commit information..."
976
+ )
977
+
609
978
  pipeline_success = False
610
979
  try:
980
+ if progress_tracker:
981
+ progress_tracker.update_operation("Submitting to Gerrit...")
611
982
  result = orch.execute(inputs=data, gh=gh)
612
983
  pipeline_success = True
984
+ if progress_tracker:
985
+ progress_tracker.pr_processed()
986
+ if progress_tracker and result.change_urls:
987
+ if any("new" in url.lower() for url in result.change_urls):
988
+ progress_tracker.change_submitted()
989
+ else:
990
+ progress_tracker.change_updated()
613
991
  except Exception as exc:
614
992
  log.debug("Execution failed; continuing to write outputs: %s", exc)
993
+ if progress_tracker:
994
+ progress_tracker.add_error("Execution failed")
615
995
 
616
- result = SubmissionResult(change_urls=[], change_numbers=[], commit_shas=[])
996
+ result = SubmissionResult(
997
+ change_urls=[], change_numbers=[], commit_shas=[]
998
+ )
617
999
  if result.change_urls:
618
- os.environ["GERRIT_CHANGE_REQUEST_URL"] = "\n".join(result.change_urls)
1000
+ os.environ["GERRIT_CHANGE_REQUEST_URL"] = "\n".join(
1001
+ result.change_urls
1002
+ )
619
1003
  # Output Gerrit change URL(s) to console
620
1004
  for url in result.change_urls:
621
- log.info("Gerrit change URL: %s", url)
1005
+ log.debug("Gerrit change URL: %s", url)
1006
+ safe_console_print(
1007
+ f"🔗 Gerrit change URL: {url}",
1008
+ style="green",
1009
+ progress_tracker=progress_tracker,
1010
+ )
622
1011
  if result.change_numbers:
623
- os.environ["GERRIT_CHANGE_REQUEST_NUM"] = "\n".join(result.change_numbers)
1012
+ os.environ["GERRIT_CHANGE_REQUEST_NUM"] = "\n".join(
1013
+ result.change_numbers
1014
+ )
624
1015
  if result.commit_shas:
625
1016
  os.environ["GERRIT_COMMIT_SHA"] = "\n".join(result.commit_shas)
626
1017
 
627
1018
  # Also write outputs to GITHUB_OUTPUT if available
628
1019
  append_github_output(
629
1020
  {
630
- "gerrit_change_request_url": "\n".join(result.change_urls) if result.change_urls else "",
631
- "gerrit_change_request_num": "\n".join(result.change_numbers) if result.change_numbers else "",
632
- "gerrit_commit_sha": "\n".join(result.commit_shas) if result.commit_shas else "",
1021
+ "gerrit_change_request_url": "\n".join(result.change_urls)
1022
+ if result.change_urls
1023
+ else "",
1024
+ "gerrit_change_request_num": "\n".join(result.change_numbers)
1025
+ if result.change_numbers
1026
+ else "",
1027
+ "gerrit_commit_sha": "\n".join(result.commit_shas)
1028
+ if result.commit_shas
1029
+ else "",
633
1030
  }
634
1031
  )
635
1032
 
636
1033
  return pipeline_success
637
1034
 
638
1035
 
639
- def _prepare_local_checkout(workspace: Path, gh: GitHubContext, data: Inputs) -> None:
1036
+ def _prepare_local_checkout(
1037
+ workspace: Path, gh: GitHubContext, data: Inputs
1038
+ ) -> None:
640
1039
  repo_full = gh.repository.strip() if gh.repository else ""
641
- server_url = gh.server_url or os.getenv("GITHUB_SERVER_URL", "https://github.com")
1040
+ server_url = gh.server_url or os.getenv(
1041
+ "GITHUB_SERVER_URL", "https://github.com"
1042
+ )
642
1043
  server_url = (server_url or "https://github.com").rstrip("/")
643
1044
  base_ref = gh.base_ref or ""
644
1045
  pr_num_str: str = str(gh.pr_number) if gh.pr_number else "0"
@@ -647,7 +1048,10 @@ def _prepare_local_checkout(workspace: Path, gh: GitHubContext, data: Inputs) ->
647
1048
  return
648
1049
 
649
1050
  # Try SSH first for private repos if available, then fall back to HTTPS/API
650
- repo_ssh_url = f"git@{server_url.replace('https://', '').replace('http://', '')}:{repo_full}.git"
1051
+ repo_ssh_url = (
1052
+ f"git@{server_url.replace('https://', '').replace('http://', '')}:"
1053
+ f"{repo_full}.git"
1054
+ )
651
1055
  repo_https_url = f"{server_url}/{repo_full}.git"
652
1056
 
653
1057
  run_cmd(["git", "init"], cwd=workspace)
@@ -658,25 +1062,38 @@ def _prepare_local_checkout(workspace: Path, gh: GitHubContext, data: Inputs) ->
658
1062
 
659
1063
  # Check if we should try SSH for private repos
660
1064
  use_ssh = False
661
- respect_user_ssh = os.getenv("G2G_RESPECT_USER_SSH", "false").lower() in ("true", "1", "yes")
1065
+ respect_user_ssh = os.getenv("G2G_RESPECT_USER_SSH", "false").lower() in (
1066
+ "true",
1067
+ "1",
1068
+ "yes",
1069
+ )
662
1070
  gerrit_ssh_privkey = os.getenv("GERRIT_SSH_PRIVKEY_G2G")
663
1071
 
664
1072
  log.debug(
665
- "GitHub repo access decision: SSH URL available=%s, G2G_RESPECT_USER_SSH=%s, GERRIT_SSH_PRIVKEY_G2G=%s",
1073
+ "GitHub repo access decision: SSH URL available=%s, "
1074
+ "G2G_RESPECT_USER_SSH=%s, GERRIT_SSH_PRIVKEY_G2G=%s",
666
1075
  repo_ssh_url.startswith("git@"),
667
1076
  respect_user_ssh,
668
1077
  bool(gerrit_ssh_privkey),
669
1078
  )
670
1079
 
671
1080
  if repo_ssh_url.startswith("git@"):
672
- # For private repos, only try SSH if G2G_RESPECT_USER_SSH is explicitly enabled
673
- # Don't use SSH just because GERRIT_SSH_PRIVKEY_G2G is set (that's for Gerrit, not GitHub)
1081
+ # For private repos, only try SSH if G2G_RESPECT_USER_SSH is
1082
+ # explicitly enabled
1083
+ # Don't use SSH just because GERRIT_SSH_PRIVKEY_G2G is set
1084
+ # (that's for Gerrit, not GitHub)
674
1085
  if respect_user_ssh:
675
1086
  use_ssh = True
676
1087
  repo_url = repo_ssh_url
677
- log.debug("Using SSH for GitHub repo access due to G2G_RESPECT_USER_SSH=true")
1088
+ log.debug(
1089
+ "Using SSH for GitHub repo access due to "
1090
+ "G2G_RESPECT_USER_SSH=true"
1091
+ )
678
1092
  else:
679
- log.debug("Not using SSH for GitHub repo access - G2G_RESPECT_USER_SSH not enabled")
1093
+ log.debug(
1094
+ "Not using SSH for GitHub repo access - "
1095
+ "G2G_RESPECT_USER_SSH not enabled"
1096
+ )
680
1097
 
681
1098
  if use_ssh:
682
1099
  env = {
@@ -711,7 +1128,10 @@ def _prepare_local_checkout(workspace: Path, gh: GitHubContext, data: Inputs) ->
711
1128
 
712
1129
  if pr_num_str:
713
1130
  try:
714
- pr_ref = f"refs/pull/{pr_num_str}/head:refs/remotes/origin/pr/{pr_num_str}/head"
1131
+ pr_ref = (
1132
+ f"refs/pull/{pr_num_str}/head:"
1133
+ f"refs/remotes/origin/pr/{pr_num_str}/head"
1134
+ )
715
1135
  run_cmd(
716
1136
  [
717
1137
  "git",
@@ -736,7 +1156,9 @@ def _prepare_local_checkout(workspace: Path, gh: GitHubContext, data: Inputs) ->
736
1156
  )
737
1157
  fetch_success = True
738
1158
  except Exception as exc:
739
- log.warning("Git fetch failed, attempting API archive fallback: %s", exc)
1159
+ log.warning(
1160
+ "Git fetch failed, attempting API archive fallback: %s", exc
1161
+ )
740
1162
  # Try API archive fallback for private repos
741
1163
  try:
742
1164
  _fallback_to_api_archive(workspace, gh, data, pr_num_str)
@@ -750,7 +1172,9 @@ def _prepare_local_checkout(workspace: Path, gh: GitHubContext, data: Inputs) ->
750
1172
  raise RuntimeError(msg)
751
1173
 
752
1174
 
753
- def _fallback_to_api_archive(workspace: Path, gh: GitHubContext, data: Inputs, pr_num_str: str) -> None:
1175
+ def _fallback_to_api_archive(
1176
+ workspace: Path, gh: GitHubContext, data: Inputs, pr_num_str: str
1177
+ ) -> None:
754
1178
  """Fallback to GitHub API archive download for private repos."""
755
1179
  import io
756
1180
  import json
@@ -871,7 +1295,9 @@ def _fallback_to_api_archive(workspace: Path, gh: GitHubContext, data: Inputs, p
871
1295
  # Create the expected branch
872
1296
  run_cmd(["git", "checkout", "-B", "g2g_pr_head"], cwd=workspace)
873
1297
 
874
- log.info("Successfully extracted PR #%s content via API archive", pr_num_str)
1298
+ log.info(
1299
+ "Successfully extracted PR #%s content via API archive", pr_num_str
1300
+ )
875
1301
 
876
1302
 
877
1303
  def _load_effective_inputs() -> Inputs:
@@ -879,7 +1305,11 @@ def _load_effective_inputs() -> Inputs:
879
1305
  data = _build_inputs_from_env()
880
1306
 
881
1307
  # Load per-org configuration and apply to environment before validation
882
- org_for_cfg = data.organization or os.getenv("ORGANIZATION") or os.getenv("GITHUB_REPOSITORY_OWNER")
1308
+ org_for_cfg = (
1309
+ data.organization
1310
+ or os.getenv("ORGANIZATION")
1311
+ or os.getenv("GITHUB_REPOSITORY_OWNER")
1312
+ )
883
1313
  cfg = load_org_config(org_for_cfg)
884
1314
 
885
1315
  # Apply dynamic parameter derivation for missing Gerrit parameters
@@ -889,7 +1319,8 @@ def _load_effective_inputs() -> Inputs:
889
1319
  log.debug("Configuration to apply: %s", cfg)
890
1320
  if "DRY_RUN" in cfg:
891
1321
  log.warning(
892
- "Configuration contains DRY_RUN=%s, this may override environment DRY_RUN=%s",
1322
+ "Configuration contains DRY_RUN=%s, this may override "
1323
+ "environment DRY_RUN=%s",
893
1324
  cfg["DRY_RUN"],
894
1325
  os.getenv("DRY_RUN"),
895
1326
  )
@@ -900,7 +1331,9 @@ def _load_effective_inputs() -> Inputs:
900
1331
  data = _build_inputs_from_env()
901
1332
 
902
1333
  # Derive reviewers from local git config if running locally and unset
903
- if not os.getenv("REVIEWERS_EMAIL") and (os.getenv("G2G_TARGET_URL") or not os.getenv("GITHUB_EVENT_NAME")):
1334
+ if not os.getenv("REVIEWERS_EMAIL") and (
1335
+ os.getenv("G2G_TARGET_URL") or not os.getenv("GITHUB_EVENT_NAME")
1336
+ ):
904
1337
  try:
905
1338
  from .gitutils import enumerate_reviewer_emails
906
1339
 
@@ -927,8 +1360,14 @@ def _load_effective_inputs() -> Inputs:
927
1360
  allow_duplicates=data.allow_duplicates,
928
1361
  ci_testing=data.ci_testing,
929
1362
  duplicates_filter=data.duplicates_filter,
1363
+ reuse_strategy=data.reuse_strategy,
1364
+ similarity_subject=data.similarity_subject,
1365
+ similarity_files=data.similarity_files,
1366
+ allow_orphan_changes=data.allow_orphan_changes,
1367
+ persist_single_mapping_comment=data.persist_single_mapping_comment,
1368
+ log_reconcile_json=data.log_reconcile_json,
930
1369
  )
931
- log.info("Derived reviewers: %s", data.reviewers_email)
1370
+ log.debug("Derived reviewers: %s", data.reviewers_email)
932
1371
  except Exception as exc:
933
1372
  log.debug("Could not derive reviewers from git config: %s", exc)
934
1373
 
@@ -936,23 +1375,33 @@ def _load_effective_inputs() -> Inputs:
936
1375
 
937
1376
 
938
1377
  def _augment_pr_refs_if_needed(gh: GitHubContext) -> GitHubContext:
939
- if os.getenv("G2G_TARGET_URL") and gh.pr_number and (not gh.head_ref or not gh.base_ref):
1378
+ if (
1379
+ os.getenv("G2G_TARGET_URL")
1380
+ and gh.pr_number
1381
+ and (not gh.head_ref or not gh.base_ref)
1382
+ ):
940
1383
  try:
941
1384
  client = build_client()
942
1385
  repo = get_repo_from_env(client)
943
1386
  pr_obj = get_pull(repo, int(gh.pr_number))
944
- base_ref = str(getattr(getattr(pr_obj, "base", object()), "ref", "") or "")
945
- head_ref = str(getattr(getattr(pr_obj, "head", object()), "ref", "") or "")
946
- head_sha = str(getattr(getattr(pr_obj, "head", object()), "sha", "") or "")
1387
+ base_ref = str(
1388
+ getattr(getattr(pr_obj, "base", object()), "ref", "") or ""
1389
+ )
1390
+ head_ref = str(
1391
+ getattr(getattr(pr_obj, "head", object()), "ref", "") or ""
1392
+ )
1393
+ head_sha = str(
1394
+ getattr(getattr(pr_obj, "head", object()), "sha", "") or ""
1395
+ )
947
1396
  if base_ref:
948
1397
  os.environ["GITHUB_BASE_REF"] = base_ref
949
- log.info("Resolved base_ref via GitHub API: %s", base_ref)
1398
+ log.debug("Resolved base_ref via GitHub API: %s", base_ref)
950
1399
  if head_ref:
951
1400
  os.environ["GITHUB_HEAD_REF"] = head_ref
952
- log.info("Resolved head_ref via GitHub API: %s", head_ref)
1401
+ log.debug("Resolved head_ref via GitHub API: %s", head_ref)
953
1402
  if head_sha:
954
1403
  os.environ["GITHUB_SHA"] = head_sha
955
- log.info("Resolved head sha via GitHub API: %s", head_sha)
1404
+ log.debug("Resolved head sha via GitHub API: %s", head_sha)
956
1405
  return _read_github_context()
957
1406
  except Exception as exc:
958
1407
  log.debug("Could not resolve PR refs via GitHub API: %s", exc)
@@ -967,21 +1416,26 @@ def _process() -> None:
967
1416
  _validate_inputs(data)
968
1417
  except ConfigurationError as exc:
969
1418
  log_exception_conditionally(log, "Configuration validation failed")
970
- typer.echo(f"Configuration validation failed: {exc}", err=True)
1419
+ safe_typer_echo(f"Configuration validation failed: {exc}", err=True)
971
1420
  raise typer.Exit(code=2) from exc
972
1421
 
973
1422
  gh = _read_github_context()
974
- _log_effective_config(data, gh)
1423
+ _display_effective_config(data, gh)
975
1424
 
976
1425
  # Test mode: short-circuit after validation
977
1426
  if env_bool("G2G_TEST_MODE", False):
978
- log.info("Validation complete. Ready to execute submission pipeline.")
979
- typer.echo("Validation complete. Ready to execute submission pipeline.")
1427
+ log.debug("Validation complete. Ready to execute submission pipeline.")
1428
+ safe_typer_echo(
1429
+ "Validation complete. Ready to execute submission pipeline."
1430
+ )
980
1431
  return
981
1432
 
982
1433
  # Bulk mode for URL/workflow_dispatch
983
1434
  sync_all = env_bool("SYNC_ALL_OPEN_PRS", False)
984
- if sync_all and (gh.event_name == "workflow_dispatch" or os.getenv("G2G_TARGET_URL")):
1435
+ if sync_all and (
1436
+ gh.event_name == "workflow_dispatch" or os.getenv("G2G_TARGET_URL")
1437
+ ):
1438
+ safe_console_print(f"🔍 Examining repository {gh.repository}")
985
1439
  bulk_success = _process_bulk(data, gh)
986
1440
 
987
1441
  # Log external API metrics summary
@@ -1003,11 +1457,13 @@ def _process() -> None:
1003
1457
 
1004
1458
  if not gh.pr_number:
1005
1459
  log.error(
1006
- "PR_NUMBER is empty. This tool requires a valid pull request context. Current event: %s",
1460
+ "PR_NUMBER is empty. This tool requires a valid pull request "
1461
+ "context. Current event: %s",
1007
1462
  gh.event_name,
1008
1463
  )
1009
- typer.echo(
1010
- f"PR_NUMBER is empty. This tool requires a valid pull request context. Current event: {gh.event_name}",
1464
+ safe_typer_echo(
1465
+ f"PR_NUMBER is empty. This tool requires a valid pull request "
1466
+ f"context. Current event: {gh.event_name}",
1011
1467
  err=True,
1012
1468
  )
1013
1469
  raise typer.Exit(code=2)
@@ -1015,25 +1471,87 @@ def _process() -> None:
1015
1471
  # Test mode handled earlier
1016
1472
 
1017
1473
  # Execute single-PR submission
1474
+ # Initialize progress tracker
1475
+ show_progress = env_bool("G2G_SHOW_PROGRESS", True)
1476
+ progress_tracker: Any = None
1477
+
1478
+ if show_progress:
1479
+ target = (
1480
+ f"{gh.repository}/pull/{gh.pr_number}"
1481
+ if gh.pr_number
1482
+ else gh.repository
1483
+ )
1484
+ progress_tracker = G2GProgressTracker(target)
1485
+ progress_tracker.start()
1486
+ progress_tracker.update_operation("Getting source PR details...")
1487
+ else:
1488
+ target = (
1489
+ f"{gh.repository}/pull/{gh.pr_number}"
1490
+ if gh.pr_number
1491
+ else gh.repository
1492
+ )
1493
+ progress_tracker = DummyProgressTracker("GitHub to Gerrit", target)
1494
+
1018
1495
  # Augment PR refs via API when in URL mode and token present
1019
1496
  gh = _augment_pr_refs_if_needed(gh)
1020
1497
 
1498
+ # Display PR information with Rich formatting
1499
+ if gh.pr_number:
1500
+ _extract_and_display_pr_info(gh, progress_tracker)
1501
+
1021
1502
  # Check for duplicates in single-PR mode (before workspace setup)
1022
1503
  if gh.pr_number and not env_bool("SYNC_ALL_OPEN_PRS", False):
1023
1504
  try:
1024
1505
  if data.duplicates_filter:
1025
- os.environ["DUPLICATES"] = data.duplicates_filter
1026
- check_for_duplicates(gh, allow_duplicates=data.allow_duplicates)
1506
+ os.environ["DUPLICATE_TYPES"] = data.duplicates_filter
1507
+ # Generate expected GitHub hash for trailer-aware duplicate
1508
+ # detection
1509
+ from .duplicate_detection import DuplicateDetector
1510
+
1511
+ expected_github_hash = (
1512
+ DuplicateDetector._generate_github_change_hash(gh)
1513
+ )
1514
+ if progress_tracker:
1515
+ progress_tracker.update_operation("Checking for duplicates...")
1516
+ check_for_duplicates(
1517
+ gh,
1518
+ allow_duplicates=data.allow_duplicates,
1519
+ expected_github_hash=expected_github_hash,
1520
+ )
1521
+ if progress_tracker:
1522
+ progress_tracker.update_operation("Duplicate check completed")
1027
1523
  except DuplicateChangeError as exc:
1028
- log_exception_conditionally(
1029
- log,
1030
- "Duplicate detection blocked submission for PR #%d",
1031
- gh.pr_number,
1524
+ if progress_tracker:
1525
+ progress_tracker.add_error("Duplicate change detected")
1526
+ progress_tracker.stop()
1527
+
1528
+ # Display clear Rich console output for duplicate detection
1529
+ if exc.urls:
1530
+ urls_display = ", ".join(exc.urls)
1531
+ safe_console_print(
1532
+ f"❌ Duplicate Gerrit change blocked submission: "
1533
+ f"{urls_display}",
1534
+ style="red",
1535
+ progress_tracker=progress_tracker,
1536
+ )
1537
+ else:
1538
+ safe_console_print(
1539
+ "❌ Duplicate Gerrit change blocked submission",
1540
+ style="red",
1541
+ progress_tracker=progress_tracker,
1542
+ )
1543
+
1544
+ safe_console_print(
1545
+ "💡 Use --allow-duplicates to override this check.",
1546
+ style="yellow",
1547
+ progress_tracker=progress_tracker,
1032
1548
  )
1033
- log.info("Use --allow-duplicates to override this check.")
1034
1549
  raise typer.Exit(code=3) from exc
1035
1550
 
1036
- pipeline_success = _process_single(data, gh)
1551
+ if progress_tracker:
1552
+ progress_tracker.update_operation("Processing pull request...")
1553
+
1554
+ pipeline_success = _process_single(data, gh, progress_tracker)
1037
1555
 
1038
1556
  # Log external API metrics summary
1039
1557
  try:
@@ -1043,11 +1561,39 @@ def _process() -> None:
1043
1561
  except Exception as exc:
1044
1562
  log.debug("Failed to log API metrics summary: %s", exc)
1045
1563
 
1564
+ # Stop progress tracker and show final results
1565
+ # Clean up progress tracker
1566
+ if progress_tracker:
1567
+ progress_tracker.stop()
1568
+
1569
+ # Show summary after progress tracker is stopped
1570
+ if show_progress and RICH_AVAILABLE:
1571
+ summary = progress_tracker.get_summary() if progress_tracker else {}
1572
+ safe_console_print(
1573
+ "\n✅ Operation completed!"
1574
+ if pipeline_success
1575
+ else "\n❌ Operation failed!",
1576
+ style="green" if pipeline_success else "red",
1577
+ )
1578
+ safe_console_print(
1579
+ f"⏱️ Total time: {summary.get('elapsed_time', 'unknown')}"
1580
+ )
1581
+ if summary.get("prs_processed", 0) > 0:
1582
+ safe_console_print(f"📊 PRs processed: {summary['prs_processed']}")
1583
+ if summary.get("changes_submitted", 0) > 0:
1584
+ safe_console_print(
1585
+ f"🔄 Changes submitted: {summary['changes_submitted']}"
1586
+ )
1587
+ if summary.get("changes_updated", 0) > 0:
1588
+ safe_console_print(
1589
+ f"📝 Changes updated: {summary['changes_updated']}"
1590
+ )
1591
+
1046
1592
  # Final success/failure message after all cleanup
1047
1593
  if pipeline_success:
1048
- log.info("Submission pipeline completed SUCCESSFULLY ✅")
1594
+ log.debug("Submission pipeline completed SUCCESSFULLY ✅")
1049
1595
  else:
1050
- log.error("Submission pipeline FAILED ❌")
1596
+ log.debug("Submission pipeline FAILED ❌")
1051
1597
  raise typer.Exit(code=1)
1052
1598
 
1053
1599
  return
@@ -1065,7 +1611,9 @@ def _load_event(path: Path | None) -> dict[str, Any]:
1065
1611
  if not path or not path.exists():
1066
1612
  return {}
1067
1613
  try:
1068
- return cast(dict[str, Any], json.loads(path.read_text(encoding="utf-8")))
1614
+ return cast(
1615
+ dict[str, Any], json.loads(path.read_text(encoding="utf-8"))
1616
+ )
1069
1617
  except Exception as exc:
1070
1618
  log.warning("Failed to parse GITHUB_EVENT_PATH: %s", exc)
1071
1619
  return {}
@@ -1132,7 +1680,10 @@ def _read_github_context() -> GitHubContext:
1132
1680
 
1133
1681
  def _validate_inputs(data: Inputs) -> None:
1134
1682
  if data.use_pr_as_commit and data.submit_single_commits:
1135
- msg = "USE_PR_AS_COMMIT and SUBMIT_SINGLE_COMMITS cannot be enabled at the same time"
1683
+ msg = (
1684
+ "USE_PR_AS_COMMIT and SUBMIT_SINGLE_COMMITS cannot be enabled "
1685
+ "at the same time"
1686
+ )
1136
1687
  raise ConfigurationError(msg)
1137
1688
 
1138
1689
  # Context-aware validation: different requirements for GH Actions vs CLI
@@ -1156,19 +1707,21 @@ def _validate_inputs(data: Inputs) -> None:
1156
1707
  # In local CLI: require explicit values or organization + derivation
1157
1708
  # This prevents unexpected behavior when running locally
1158
1709
  missing_gerrit_params = [
1159
- field for field in ["gerrit_ssh_user_g2g", "gerrit_ssh_user_g2g_email"] if not getattr(data, field)
1710
+ field
1711
+ for field in ["gerrit_ssh_user_g2g", "gerrit_ssh_user_g2g_email"]
1712
+ if not getattr(data, field)
1160
1713
  ]
1161
1714
  if missing_gerrit_params:
1162
1715
  if data.organization:
1163
1716
  log.info(
1164
- "Local CLI usage: Gerrit parameters can be derived from "
1165
- "organization '%s'. Missing: %s. Consider setting "
1166
- "G2G_ENABLE_DERIVATION=true to enable derivation.",
1717
+ "Gerrit parameters can be derived from "
1718
+ "organization '%s'. Missing: %s. Set "
1719
+ "G2G_ENABLE_DERIVATION=false to disable derivation.",
1167
1720
  data.organization,
1168
1721
  ", ".join(missing_gerrit_params),
1169
1722
  )
1170
- # Allow derivation in local mode only if explicitly enabled
1171
- if not env_bool("G2G_ENABLE_DERIVATION", False):
1723
+ # Derivation enabled by default, can be disabled explicitly
1724
+ if not env_bool("G2G_ENABLE_DERIVATION", True):
1172
1725
  required_fields.extend(missing_gerrit_params)
1173
1726
  else:
1174
1727
  required_fields.extend(missing_gerrit_params)
@@ -1181,21 +1734,20 @@ def _validate_inputs(data: Inputs) -> None:
1181
1734
  "gerrit_ssh_user_g2g_email",
1182
1735
  ]:
1183
1736
  if data.organization:
1184
- if is_github_actions:
1185
- log.error(
1186
- "These fields can be derived automatically from "
1187
- "organization '%s' if G2G_ENABLE_DERIVATION=true",
1188
- data.organization,
1189
- )
1190
- else:
1191
- log.error(
1192
- "These fields can be derived from organization '%s'",
1193
- data.organization,
1194
- )
1195
- log.error("Set G2G_ENABLE_DERIVATION=true to enable")
1737
+ log.error(
1738
+ "These fields can be derived automatically from "
1739
+ "organization '%s' (derivation enabled by default). "
1740
+ "Check that G2G_ENABLE_DERIVATION is not set to false.",
1741
+ data.organization,
1742
+ )
1196
1743
  else:
1197
- log.error("These fields require either explicit values or an ORGANIZATION for derivation")
1198
- raise ConfigurationError(_MSG_MISSING_REQUIRED_INPUT.format(field_name=field_name))
1744
+ log.error(
1745
+ "These fields require either explicit values or an "
1746
+ "ORGANIZATION for derivation"
1747
+ )
1748
+ raise ConfigurationError(
1749
+ _MSG_MISSING_REQUIRED_INPUT.format(field_name=field_name)
1750
+ )
1199
1751
 
1200
1752
  # Validate fetch depth is a positive integer
1201
1753
  if data.fetch_depth <= 0:
@@ -1207,35 +1759,79 @@ def _validate_inputs(data: Inputs) -> None:
1207
1759
  raise ConfigurationError(_MSG_ISSUE_ID_MULTILINE)
1208
1760
 
1209
1761
 
1210
- def _log_effective_config(data: Inputs, gh: GitHubContext) -> None:
1211
- # Avoid logging sensitive values
1212
- safe_privkey = _mask_secret(data.gerrit_ssh_privkey_g2g)
1213
- log.info("Effective configuration (sanitized):")
1214
- log.info(" SUBMIT_SINGLE_COMMITS: %s", data.submit_single_commits)
1215
- log.info(" USE_PR_AS_COMMIT: %s", data.use_pr_as_commit)
1216
- log.info(" FETCH_DEPTH: %s", data.fetch_depth)
1217
- known_hosts_status = "<provided>" if data.gerrit_known_hosts else "<will auto-discover>"
1218
- log.info(" GERRIT_KNOWN_HOSTS: %s", known_hosts_status)
1219
- log.info(" GERRIT_SSH_PRIVKEY_G2G: %s", safe_privkey)
1220
- log.info(" GERRIT_SSH_USER_G2G: %s", data.gerrit_ssh_user_g2g)
1221
- log.info(" GERRIT_SSH_USER_G2G_EMAIL: %s", data.gerrit_ssh_user_g2g_email)
1222
- log.info(" ORGANIZATION: %s", data.organization)
1223
- log.info(" REVIEWERS_EMAIL: %s", data.reviewers_email or "")
1224
- log.info(" PRESERVE_GITHUB_PRS: %s", data.preserve_github_prs)
1225
- log.info(" DRY_RUN: %s", data.dry_run)
1226
- log.info(" CI_TESTING: %s", data.ci_testing)
1227
- log.info(" GERRIT_SERVER: %s", data.gerrit_server)
1228
- log.info(" GERRIT_SERVER_PORT: %s", data.gerrit_server_port or "")
1229
- log.info(" GERRIT_PROJECT: %s", data.gerrit_project or "")
1230
- log.info("GitHub context:")
1231
- log.info(" event_name: %s", gh.event_name)
1232
- log.info(" event_action: %s", gh.event_action)
1233
- log.info(" repository: %s", gh.repository)
1234
- log.info(" repository_owner: %s", gh.repository_owner)
1235
- log.info(" pr_number: %s", gh.pr_number)
1236
- log.info(" base_ref: %s", gh.base_ref)
1237
- log.info(" head_ref: %s", gh.head_ref)
1238
- log.info(" sha: %s", gh.sha)
1762
+ def _display_effective_config(data: Inputs, gh: GitHubContext) -> None:
1763
+ """Display effective configuration in a formatted table."""
1764
+ from .rich_display import display_pr_info
1765
+
1766
+ # Avoid displaying sensitive values
1767
+ known_hosts_status = "✅" if data.gerrit_known_hosts else "❌"
1768
+ privkey_status = "✅" if data.gerrit_ssh_privkey_g2g else "❌"
1769
+
1770
+ # Build configuration data, filtering out empty/default values
1771
+ # Order items logically: behavioral settings first, then credentials
1772
+ config_info = {}
1773
+
1774
+ # Only show non-default boolean values
1775
+ if data.submit_single_commits:
1776
+ config_info["SUBMIT_SINGLE_COMMITS"] = str(data.submit_single_commits)
1777
+ if data.use_pr_as_commit:
1778
+ config_info["USE_PR_AS_COMMIT"] = str(data.use_pr_as_commit)
1779
+
1780
+ # Only show non-default fetch depth
1781
+ if data.fetch_depth != 10:
1782
+ config_info["FETCH_DEPTH"] = str(data.fetch_depth)
1783
+
1784
+ # SSH user and email first
1785
+ if data.gerrit_ssh_user_g2g:
1786
+ config_info["GERRIT_SSH_USER_G2G"] = data.gerrit_ssh_user_g2g
1787
+ if data.gerrit_ssh_user_g2g_email:
1788
+ config_info["GERRIT_SSH_USER_G2G_EMAIL"] = (
1789
+ data.gerrit_ssh_user_g2g_email
1790
+ )
1791
+ if data.organization:
1792
+ config_info["ORGANIZATION"] = data.organization
1793
+ if data.reviewers_email:
1794
+ config_info["REVIEWERS_EMAIL"] = data.reviewers_email
1795
+
1796
+ # Only show non-default boolean values
1797
+ if data.preserve_github_prs:
1798
+ config_info["PRESERVE_GITHUB_PRS"] = str(data.preserve_github_prs)
1799
+ if data.dry_run:
1800
+ config_info["DRY_RUN"] = str(data.dry_run)
1801
+ if data.ci_testing:
1802
+ config_info["CI_TESTING"] = str(data.ci_testing)
1803
+
1804
+ # Show Gerrit settings if they have values
1805
+ if data.gerrit_server:
1806
+ config_info["GERRIT_SERVER"] = data.gerrit_server
1807
+ # Only show non-default port (29418 is default)
1808
+ if data.gerrit_server_port and data.gerrit_server_port != 29418:
1809
+ config_info["GERRIT_SERVER_PORT"] = str(data.gerrit_server_port)
1810
+ if data.gerrit_project:
1811
+ config_info["GERRIT_PROJECT"] = data.gerrit_project
1812
+
1813
+ # Move credentials to bottom of table
1814
+ # Always show known hosts status
1815
+ if data.gerrit_known_hosts or not data.gerrit_known_hosts:
1816
+ config_info["GERRIT_KNOWN_HOSTS"] = known_hosts_status
1817
+ config_info["GERRIT_SSH_PRIVKEY_G2G"] = privkey_status
1818
+
1819
+ # Display the configuration table
1820
+ display_pr_info(config_info, "GitHub2Gerrit Configuration")
1821
+
1822
+ # Log GitHub context at debug level only
1823
+ log.debug(
1824
+ "GitHub context: event_name=%s, event_action=%s, repository=%s, "
1825
+ "repository_owner=%s, pr_number=%s, base_ref=%s, head_ref=%s, sha=%s",
1826
+ gh.event_name,
1827
+ gh.event_action,
1828
+ gh.repository,
1829
+ gh.repository_owner,
1830
+ gh.pr_number,
1831
+ gh.base_ref,
1832
+ gh.head_ref,
1833
+ gh.sha,
1834
+ )
1239
1835
 
1240
1836
 
1241
1837
  if __name__ == "__main__":