github2gerrit 0.1.10__py3-none-any.whl → 0.1.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
github2gerrit/cli.py CHANGED
@@ -6,6 +6,7 @@ from __future__ import annotations
6
6
  import json
7
7
  import logging
8
8
  import os
9
+ import sys
9
10
  import tempfile
10
11
  from collections.abc import Callable
11
12
  from concurrent.futures import ThreadPoolExecutor
@@ -31,12 +32,20 @@ from .core import SubmissionResult
31
32
  from .duplicate_detection import DuplicateChangeError
32
33
  from .duplicate_detection import check_for_duplicates
33
34
  from .github_api import build_client
35
+ from .github_api import get_pr_title_body
34
36
  from .github_api import get_pull
35
37
  from .github_api import get_repo_from_env
36
38
  from .github_api import iter_open_pulls
37
39
  from .gitutils import run_cmd
38
40
  from .models import GitHubContext
39
41
  from .models import Inputs
42
+ from .rich_display import RICH_AVAILABLE
43
+ from .rich_display import DummyProgressTracker
44
+ from .rich_display import G2GProgressTracker
45
+ from .rich_display import display_pr_info
46
+ from .rich_display import safe_console_print
47
+ from .rich_display import safe_typer_echo
48
+ from .rich_logging import setup_rich_aware_logging
40
49
  from .ssh_common import build_git_ssh_command
41
50
  from .ssh_common import build_non_interactive_ssh_env
42
51
  from .utils import append_github_output
@@ -46,6 +55,104 @@ from .utils import log_exception_conditionally
46
55
  from .utils import parse_bool_env
47
56
 
48
57
 
58
+ def get_version(package: str) -> str:
59
+ """Get package version, trying importlib.metadata first, then fallback."""
60
+ try:
61
+ from importlib.metadata import version as stdlib_version
62
+
63
+ return str(stdlib_version(package))
64
+ except ImportError:
65
+ from importlib_metadata import version as backport_version
66
+
67
+ return str(backport_version(package))
68
+
69
+
70
+ def _exit_for_pr_state_error(pr_number: int, pr_state: str) -> None:
71
+ """Exit with error message for invalid PR state."""
72
+ error_msg = (
73
+ f"❌ Pull request #{pr_number} is {pr_state} and cannot be processed"
74
+ )
75
+ safe_console_print(error_msg, style="red", err=True)
76
+ raise typer.Exit(1)
77
+
78
+
79
+ def _exit_for_pr_not_found(pr_number: int, repository: str) -> None:
80
+ """Exit with error message for PR not found."""
81
+ error_msg = (
82
+ f"❌ Pull request #{pr_number} not found in repository {repository}"
83
+ )
84
+ safe_console_print(error_msg, style="red", err=True)
85
+ raise typer.Exit(1)
86
+
87
+
88
+ def _exit_for_pr_fetch_error(exc: Exception) -> None:
89
+ """Exit with error message for PR fetch failure."""
90
+ error_msg = f"❌ Failed to fetch PR details: {exc}"
91
+ safe_console_print(error_msg, style="red", err=True)
92
+ raise typer.Exit(1)
93
+
94
+
95
+ def _extract_and_display_pr_info(
96
+ gh: GitHubContext,
97
+ progress_tracker: Any = None,
98
+ ) -> None:
99
+ """Extract PR information and display it with Rich formatting."""
100
+ if not gh.pr_number:
101
+ return
102
+
103
+ try:
104
+ # Get GitHub token and build client
105
+ token = os.getenv("GITHUB_TOKEN", "")
106
+ if not token:
107
+ safe_console_print(
108
+ "⚠️ No GITHUB_TOKEN available - skipping PR info display",
109
+ style="yellow",
110
+ progress_tracker=progress_tracker,
111
+ )
112
+ return
113
+
114
+ client = build_client(token)
115
+ repo = get_repo_from_env(client)
116
+ pr_obj = get_pull(repo, int(gh.pr_number))
117
+
118
+ # Check PR state and exit if not processable
119
+ pr_state = getattr(pr_obj, "state", "unknown")
120
+ if pr_state != "open":
121
+ _exit_for_pr_state_error(gh.pr_number, pr_state)
122
+
123
+ # Extract PR information
124
+ title, _body = get_pr_title_body(pr_obj)
125
+
126
+ # Get additional PR details
127
+ pr_info = {
128
+ "Repository": gh.repository,
129
+ "PR Number": gh.pr_number,
130
+ "Title": title or "No title",
131
+ "Author": getattr(getattr(pr_obj, "user", None), "login", "Unknown")
132
+ or "Unknown",
133
+ "Base Branch": gh.base_ref or "unknown",
134
+ "SHA": gh.sha or "unknown",
135
+ "URL": f"{gh.server_url}/{gh.repository}/pull/{gh.pr_number}",
136
+ }
137
+
138
+ # Add file changes count if available
139
+ try:
140
+ files = list(getattr(pr_obj, "get_files", list)())
141
+ pr_info["Files Changed"] = len(files)
142
+ except Exception:
143
+ pr_info["Files Changed"] = "unknown"
144
+
145
+ # Display the PR information
146
+ display_pr_info(pr_info, "Pull Request Details", progress_tracker)
147
+
148
+ except Exception as exc:
149
+ log.debug("Failed to display PR info: %s", exc)
150
+ if "404" in str(exc) or "Not Found" in str(exc):
151
+ _exit_for_pr_not_found(gh.pr_number, gh.repository)
152
+ else:
153
+ _exit_for_pr_fetch_error(exc)
154
+
155
+
49
156
  class ConfigurationError(Exception):
50
157
  """Raised when configuration validation fails.
51
158
 
@@ -114,9 +221,13 @@ class _ContextProto(Protocol):
114
221
 
115
222
 
116
223
  class _SingleUsageGroup(BaseGroup):
117
- def format_usage(self, ctx: _ContextProto, formatter: _FormatterProto) -> None:
224
+ def format_usage(
225
+ self, ctx: _ContextProto, formatter: _FormatterProto
226
+ ) -> None:
118
227
  # Force a simplified usage line without COMMAND [ARGS]...
119
- formatter.write_usage(ctx.command_path, "[OPTIONS] TARGET_URL", prefix="Usage: ")
228
+ formatter.write_usage(
229
+ ctx.command_path, "[OPTIONS] TARGET_URL", prefix="Usage: "
230
+ )
120
231
 
121
232
 
122
233
  # Error message constants to comply with TRY003
@@ -124,10 +235,20 @@ _MSG_MISSING_REQUIRED_INPUT = "Missing required input: {field_name}"
124
235
  _MSG_INVALID_FETCH_DEPTH = "FETCH_DEPTH must be a positive integer"
125
236
  _MSG_ISSUE_ID_MULTILINE = "Issue ID must be single line"
126
237
 
238
+ # Show version information when --help is used or in GitHub Actions mode
239
+ if "--help" in sys.argv or _is_github_actions_context():
240
+ try:
241
+ app_version = get_version("github2gerrit")
242
+ print(f"🏷️ github2gerrit version {app_version}")
243
+ except Exception:
244
+ print("⚠️ github2gerrit version information not available")
245
+
127
246
  app: typer.Typer = typer.Typer(
128
247
  add_completion=False,
129
248
  no_args_is_help=False,
130
249
  cls=cast(Any, _SingleUsageGroup),
250
+ rich_markup_mode="rich",
251
+ help="Tool to convert GitHub pull requests into Gerrit changes",
131
252
  )
132
253
 
133
254
 
@@ -144,7 +265,9 @@ def _resolve_org(default_org: str | None) -> str:
144
265
  if TYPE_CHECKING:
145
266
  F = TypeVar("F", bound=Callable[..., object])
146
267
 
147
- def typed_app_command(*args: object, **kwargs: object) -> Callable[[F], F]: ...
268
+ def typed_app_command(
269
+ *args: object, **kwargs: object
270
+ ) -> Callable[[F], F]: ...
148
271
  else:
149
272
  typed_app_command = app.command
150
273
 
@@ -157,17 +280,46 @@ def main(
157
280
  help="GitHub repository or PR URL",
158
281
  metavar="TARGET_URL",
159
282
  ),
160
- submit_single_commits: bool = typer.Option(
283
+ allow_duplicates: bool = typer.Option(
161
284
  False,
162
- "--submit-single-commits",
163
- envvar="SUBMIT_SINGLE_COMMITS",
164
- help="Submit one commit at a time to the Gerrit repository.",
285
+ "--allow-duplicates",
286
+ envvar="ALLOW_DUPLICATES",
287
+ help="Allow submitting duplicate changes without error.",
165
288
  ),
166
- use_pr_as_commit: bool = typer.Option(
289
+ allow_ghe_urls: bool = typer.Option(
167
290
  False,
168
- "--use-pr-as-commit",
169
- envvar="USE_PR_AS_COMMIT",
170
- help="Use PR title and body as the commit message.",
291
+ "--allow-ghe-urls/--no-allow-ghe-urls",
292
+ envvar="ALLOW_GHE_URLS",
293
+ help="Allow non-github.com GitHub Enterprise URLs in direct URL mode.",
294
+ ),
295
+ allow_orphan_changes: bool = typer.Option(
296
+ False,
297
+ "--allow-orphan-changes/--no-allow-orphan-changes",
298
+ envvar="ALLOW_ORPHAN_CHANGES",
299
+ help="Keep unmatched Gerrit changes without warning.",
300
+ ),
301
+ ci_testing: bool = typer.Option(
302
+ False,
303
+ "--ci-testing/--no-ci-testing",
304
+ envvar="CI_TESTING",
305
+ help="Enable CI testing mode (overrides .gitreview, handles "
306
+ "unrelated repos).",
307
+ ),
308
+ dry_run: bool = typer.Option(
309
+ False,
310
+ "--dry-run",
311
+ envvar="DRY_RUN",
312
+ help="Validate settings and PR metadata; do not write to Gerrit.",
313
+ ),
314
+ duplicate_types: str = typer.Option(
315
+ "open",
316
+ "--duplicate-types",
317
+ envvar="DUPLICATE_TYPES",
318
+ help=(
319
+ "Gerrit change states to evaluate when determining if a change "
320
+ "should be considered a duplicate "
321
+ '(comma-separated). E.g. "open,merged,abandoned". Default: "open".'
322
+ ),
171
323
  ),
172
324
  fetch_depth: int = typer.Option(
173
325
  10,
@@ -181,6 +333,24 @@ def main(
181
333
  envvar="GERRIT_KNOWN_HOSTS",
182
334
  help="Known hosts entries for Gerrit SSH (single or multi-line).",
183
335
  ),
336
+ gerrit_project: str = typer.Option(
337
+ "",
338
+ "--gerrit-project",
339
+ envvar="GERRIT_PROJECT",
340
+ help="Gerrit project (optional; .gitreview preferred).",
341
+ ),
342
+ gerrit_server: str = typer.Option(
343
+ "",
344
+ "--gerrit-server",
345
+ envvar="GERRIT_SERVER",
346
+ help="Gerrit server hostname (optional; .gitreview preferred).",
347
+ ),
348
+ gerrit_server_port: int = typer.Option(
349
+ 29418,
350
+ "--gerrit-server-port",
351
+ envvar="GERRIT_SERVER_PORT",
352
+ help="Gerrit SSH port (default: 29418).",
353
+ ),
184
354
  gerrit_ssh_privkey_g2g: str = typer.Option(
185
355
  "",
186
356
  "--gerrit-ssh-privkey-g2g",
@@ -199,23 +369,35 @@ def main(
199
369
  envvar="GERRIT_SSH_USER_G2G_EMAIL",
200
370
  help="Email address for the Gerrit SSH user.",
201
371
  ),
372
+ issue_id: str = typer.Option(
373
+ "",
374
+ "--issue-id",
375
+ envvar="ISSUE_ID",
376
+ help="Issue ID to include in commit message (e.g., Issue-ID: ABC-123).",
377
+ ),
378
+ log_reconcile_json: bool = typer.Option(
379
+ True,
380
+ "--log-reconcile-json/--no-log-reconcile-json",
381
+ envvar="LOG_RECONCILE_JSON",
382
+ help="Emit structured JSON reconciliation summary.",
383
+ ),
384
+ normalise_commit: bool = typer.Option(
385
+ True,
386
+ "--normalise-commit/--no-normalise-commit",
387
+ envvar="NORMALISE_COMMIT",
388
+ help="Normalize commit messages to conventional commit format.",
389
+ ),
202
390
  organization: str | None = typer.Option(
203
391
  None,
204
392
  "--organization",
205
393
  envvar="ORGANIZATION",
206
394
  help=("Organization (defaults to GITHUB_REPOSITORY_OWNER when unset)."),
207
395
  ),
208
- reviewers_email: str = typer.Option(
209
- "",
210
- "--reviewers-email",
211
- envvar="REVIEWERS_EMAIL",
212
- help="Comma-separated list of reviewer emails.",
213
- ),
214
- allow_ghe_urls: bool = typer.Option(
215
- False,
216
- "--allow-ghe-urls/--no-allow-ghe-urls",
217
- envvar="ALLOW_GHE_URLS",
218
- help="Allow non-github.com GitHub Enterprise URLs in direct URL mode.",
396
+ persist_single_mapping_comment: bool = typer.Option(
397
+ True,
398
+ "--persist-single-mapping-comment/--no-persist-single-mapping-comment",
399
+ envvar="PERSIST_SINGLE_MAPPING_COMMENT",
400
+ help="Replace existing mapping comment instead of appending.",
219
401
  ),
220
402
  preserve_github_prs: bool = typer.Option(
221
403
  False,
@@ -223,62 +405,48 @@ def main(
223
405
  envvar="PRESERVE_GITHUB_PRS",
224
406
  help="Do not close GitHub PRs after pushing to Gerrit.",
225
407
  ),
226
- dry_run: bool = typer.Option(
227
- False,
228
- "--dry-run",
229
- envvar="DRY_RUN",
230
- help="Validate settings and PR metadata; do not write to Gerrit.",
408
+ reuse_strategy: str = typer.Option(
409
+ "topic+comment",
410
+ "--reuse-strategy",
411
+ envvar="REUSE_STRATEGY",
412
+ help="Strategy for reusing Change-IDs: topic, comment, "
413
+ "topic+comment, none.",
231
414
  ),
232
- gerrit_server: str = typer.Option(
415
+ reviewers_email: str = typer.Option(
233
416
  "",
234
- "--gerrit-server",
235
- envvar="GERRIT_SERVER",
236
- help="Gerrit server hostname (optional; .gitreview preferred).",
417
+ "--reviewers-email",
418
+ envvar="REVIEWERS_EMAIL",
419
+ help="Comma-separated list of reviewer emails.",
237
420
  ),
238
- gerrit_server_port: str = typer.Option(
239
- "29418",
240
- "--gerrit-server-port",
241
- envvar="GERRIT_SERVER_PORT",
242
- help="Gerrit SSH port (default: 29418).",
421
+ show_progress: bool = typer.Option(
422
+ True,
423
+ "--progress/--no-progress",
424
+ envvar="G2G_SHOW_PROGRESS",
425
+ help="Show real-time progress updates with Rich formatting.",
243
426
  ),
244
- gerrit_project: str = typer.Option(
245
- "",
246
- "--gerrit-project",
247
- envvar="GERRIT_PROJECT",
248
- help="Gerrit project (optional; .gitreview preferred).",
427
+ similarity_files: bool = typer.Option(
428
+ True,
429
+ "--similarity-files/--no-similarity-files",
430
+ envvar="SIMILARITY_FILES",
431
+ help="Require exact file signature match for reconciliation.",
249
432
  ),
250
- issue_id: str = typer.Option(
251
- "",
252
- "--issue-id",
253
- envvar="ISSUE_ID",
254
- help="Issue ID to include in commit message (e.g., Issue-ID: ABC-123).",
433
+ similarity_subject: float = typer.Option(
434
+ 0.7,
435
+ "--similarity-subject",
436
+ envvar="SIMILARITY_SUBJECT",
437
+ help="Subject token Jaccard similarity threshold (0.0-1.0).",
255
438
  ),
256
- allow_duplicates: bool = typer.Option(
439
+ submit_single_commits: bool = typer.Option(
257
440
  False,
258
- "--allow-duplicates",
259
- envvar="ALLOW_DUPLICATES",
260
- help="Allow submitting duplicate changes without error.",
441
+ "--submit-single-commits",
442
+ envvar="SUBMIT_SINGLE_COMMITS",
443
+ help="Submit one commit at a time to the Gerrit repository.",
261
444
  ),
262
- ci_testing: bool = typer.Option(
445
+ use_pr_as_commit: bool = typer.Option(
263
446
  False,
264
- "--ci-testing/--no-ci-testing",
265
- envvar="CI_TESTING",
266
- help="Enable CI testing mode (overrides .gitreview, handles unrelated repos).",
267
- ),
268
- duplicate_types: str = typer.Option(
269
- "open",
270
- "--duplicate-types",
271
- envvar="DUPLICATE_TYPES",
272
- help=(
273
- "Gerrit change states to evaluate when determining if a change should be considered a duplicate "
274
- '(comma-separated). E.g. "open,merged,abandoned". Default: "open".'
275
- ),
276
- ),
277
- normalise_commit: bool = typer.Option(
278
- True,
279
- "--normalise-commit/--no-normalise-commit",
280
- envvar="NORMALISE_COMMIT",
281
- help="Normalize commit messages to conventional commit format.",
447
+ "--use-pr-as-commit",
448
+ envvar="USE_PR_AS_COMMIT",
449
+ help="Use PR title and body as the commit message.",
282
450
  ),
283
451
  verbose: bool = typer.Option(
284
452
  False,
@@ -287,23 +455,35 @@ def main(
287
455
  envvar="G2G_VERBOSE",
288
456
  help="Verbose output (sets loglevel to DEBUG).",
289
457
  ),
458
+ version_flag: bool = typer.Option(
459
+ False,
460
+ "--version",
461
+ help="Show version and exit.",
462
+ ),
290
463
  ) -> None:
291
464
  """
292
465
  Tool to convert GitHub pull requests into Gerrit changes
293
466
 
294
- - Providing a URL to a pull request: converts that pull request
295
- into a Gerrit change
467
+ - PR URL: converts the pull request to Gerrit change
468
+ - Repo URL: converts all open pull requests to Gerrit changes
469
+ - No arguments: uses environment variables (CI/CD mode)
470
+ """
296
471
 
297
- - Providing a URL to a GitHub repository converts all open pull
298
- requests into Gerrit changes
472
+ # Handle version flag first
473
+ if version_flag:
474
+ try:
475
+ app_version = get_version("github2gerrit")
476
+ typer.echo(f"github2gerrit version {app_version}")
477
+ except Exception:
478
+ typer.echo("Version information not available")
479
+ raise typer.Exit(code=0)
299
480
 
300
- - No arguments for CI/CD environment; reads parameters from
301
- environment variables
302
- """
303
481
  # Override boolean parameters with properly parsed environment variables
304
482
  # This ensures that string "false" from GitHub Actions is handled correctly
305
483
  if os.getenv("SUBMIT_SINGLE_COMMITS"):
306
- submit_single_commits = parse_bool_env(os.getenv("SUBMIT_SINGLE_COMMITS"))
484
+ submit_single_commits = parse_bool_env(
485
+ os.getenv("SUBMIT_SINGLE_COMMITS")
486
+ )
307
487
 
308
488
  if os.getenv("USE_PR_AS_COMMIT"):
309
489
  use_pr_as_commit = parse_bool_env(os.getenv("USE_PR_AS_COMMIT"))
@@ -319,13 +499,50 @@ def main(
319
499
 
320
500
  if os.getenv("CI_TESTING"):
321
501
  ci_testing = parse_bool_env(os.getenv("CI_TESTING"))
502
+
503
+ if os.getenv("SIMILARITY_FILES"):
504
+ similarity_files = parse_bool_env(os.getenv("SIMILARITY_FILES"))
505
+
506
+ if os.getenv("ALLOW_ORPHAN_CHANGES"):
507
+ allow_orphan_changes = parse_bool_env(os.getenv("ALLOW_ORPHAN_CHANGES"))
508
+
509
+ if os.getenv("PERSIST_SINGLE_MAPPING_COMMENT"):
510
+ persist_single_mapping_comment = parse_bool_env(
511
+ os.getenv("PERSIST_SINGLE_MAPPING_COMMENT")
512
+ )
513
+
514
+ if os.getenv("LOG_RECONCILE_JSON"):
515
+ log_reconcile_json = parse_bool_env(os.getenv("LOG_RECONCILE_JSON"))
322
516
  # Set up logging level based on verbose flag
323
517
  if verbose:
324
518
  os.environ["G2G_LOG_LEVEL"] = "DEBUG"
325
519
  _reconfigure_logging()
520
+
521
+ # Initialize Rich-aware logging system
522
+ setup_rich_aware_logging()
523
+
524
+ # Log version to logs in GitHub Actions environment
525
+ if _is_github_actions_context():
526
+ try:
527
+ app_version = get_version("github2gerrit")
528
+ log.info("github2gerrit version %s", app_version)
529
+ except Exception:
530
+ log.warning("Version information not available")
531
+
532
+ # Show initial progress if Rich is available and progress is enabled
533
+ if show_progress and not RICH_AVAILABLE:
534
+ safe_console_print(
535
+ "📋 Rich formatting not available - progress will be shown as "
536
+ "simple text..."
537
+ )
538
+
539
+ # Store progress flag in environment for use by processing functions
540
+ os.environ["G2G_SHOW_PROGRESS"] = "true" if show_progress else "false"
326
541
  # Normalize CLI options into environment for unified processing.
327
542
  # Explicitly set all boolean flags to ensure consistent behavior
328
- os.environ["SUBMIT_SINGLE_COMMITS"] = "true" if submit_single_commits else "false"
543
+ os.environ["SUBMIT_SINGLE_COMMITS"] = (
544
+ "true" if submit_single_commits else "false"
545
+ )
329
546
  os.environ["USE_PR_AS_COMMIT"] = "true" if use_pr_as_commit else "false"
330
547
  os.environ["FETCH_DEPTH"] = str(fetch_depth)
331
548
  if gerrit_known_hosts:
@@ -341,22 +558,34 @@ def main(
341
558
  os.environ["ORGANIZATION"] = resolved_org
342
559
  if reviewers_email:
343
560
  os.environ["REVIEWERS_EMAIL"] = reviewers_email
344
- os.environ["PRESERVE_GITHUB_PRS"] = "true" if preserve_github_prs else "false"
561
+ os.environ["PRESERVE_GITHUB_PRS"] = (
562
+ "true" if preserve_github_prs else "false"
563
+ )
345
564
  os.environ["DRY_RUN"] = "true" if dry_run else "false"
346
565
  os.environ["NORMALISE_COMMIT"] = "true" if normalise_commit else "false"
347
566
  os.environ["ALLOW_GHE_URLS"] = "true" if allow_ghe_urls else "false"
348
567
  if gerrit_server:
349
568
  os.environ["GERRIT_SERVER"] = gerrit_server
350
569
  if gerrit_server_port:
351
- os.environ["GERRIT_SERVER_PORT"] = gerrit_server_port
570
+ os.environ["GERRIT_SERVER_PORT"] = str(gerrit_server_port)
352
571
  if gerrit_project:
353
572
  os.environ["GERRIT_PROJECT"] = gerrit_project
354
573
  if issue_id:
355
574
  os.environ["ISSUE_ID"] = issue_id
356
575
  os.environ["ALLOW_DUPLICATES"] = "true" if allow_duplicates else "false"
357
576
  os.environ["CI_TESTING"] = "true" if ci_testing else "false"
358
- if duplicate_types:
359
- os.environ["DUPLICATE_TYPES"] = duplicate_types
577
+ os.environ["DUPLICATE_TYPES"] = duplicate_types
578
+ if reuse_strategy:
579
+ os.environ["REUSE_STRATEGY"] = reuse_strategy
580
+ os.environ["SIMILARITY_SUBJECT"] = str(similarity_subject)
581
+ os.environ["SIMILARITY_FILES"] = "true" if similarity_files else "false"
582
+ os.environ["ALLOW_ORPHAN_CHANGES"] = (
583
+ "true" if allow_orphan_changes else "false"
584
+ )
585
+ os.environ["PERSIST_SINGLE_MAPPING_COMMENT"] = (
586
+ "true" if persist_single_mapping_comment else "false"
587
+ )
588
+ os.environ["LOG_RECONCILE_JSON"] = "true" if log_reconcile_json else "false"
360
589
  # URL mode handling
361
590
  if target_url:
362
591
  org, repo, pr = _parse_github_target(target_url)
@@ -396,7 +625,10 @@ def main(
396
625
  def _setup_logging() -> logging.Logger:
397
626
  level_name = os.getenv("G2G_LOG_LEVEL", "INFO").upper()
398
627
  level = getattr(logging, level_name, logging.INFO)
399
- fmt = "%(asctime)s %(levelname)-8s %(name)s %(filename)s:%(lineno)d | %(message)s"
628
+ fmt = (
629
+ "%(asctime)s %(levelname)-8s %(name)s %(filename)s:%(lineno)d | "
630
+ "%(message)s"
631
+ )
400
632
  logging.basicConfig(level=level, format=fmt)
401
633
  return logging.getLogger(APP_NAME)
402
634
 
@@ -422,22 +654,47 @@ def _build_inputs_from_env() -> Inputs:
422
654
  gerrit_ssh_privkey_g2g=env_str("GERRIT_SSH_PRIVKEY_G2G"),
423
655
  gerrit_ssh_user_g2g=env_str("GERRIT_SSH_USER_G2G"),
424
656
  gerrit_ssh_user_g2g_email=env_str("GERRIT_SSH_USER_G2G_EMAIL"),
425
- organization=env_str("ORGANIZATION", env_str("GITHUB_REPOSITORY_OWNER")),
657
+ organization=env_str(
658
+ "ORGANIZATION", env_str("GITHUB_REPOSITORY_OWNER")
659
+ ),
426
660
  reviewers_email=env_str("REVIEWERS_EMAIL", ""),
427
661
  preserve_github_prs=env_bool("PRESERVE_GITHUB_PRS", False),
428
662
  dry_run=env_bool("DRY_RUN", False),
429
663
  normalise_commit=env_bool("NORMALISE_COMMIT", True),
430
664
  gerrit_server=env_str("GERRIT_SERVER", ""),
431
- gerrit_server_port=env_str("GERRIT_SERVER_PORT", "29418"),
665
+ gerrit_server_port=int(
666
+ env_str("GERRIT_SERVER_PORT", "29418") or "29418"
667
+ ),
432
668
  gerrit_project=env_str("GERRIT_PROJECT"),
433
669
  issue_id=env_str("ISSUE_ID", ""),
434
670
  allow_duplicates=env_bool("ALLOW_DUPLICATES", False),
435
671
  ci_testing=env_bool("CI_TESTING", False),
436
672
  duplicates_filter=env_str("DUPLICATE_TYPES", "open"),
673
+ reuse_strategy=env_str("REUSE_STRATEGY", "topic+comment"),
674
+ similarity_subject=float(env_str("SIMILARITY_SUBJECT", "0.7") or "0.7"),
675
+ similarity_files=env_bool("SIMILARITY_FILES", True),
676
+ allow_orphan_changes=env_bool("ALLOW_ORPHAN_CHANGES", False),
677
+ persist_single_mapping_comment=env_bool(
678
+ "PERSIST_SINGLE_MAPPING_COMMENT", True
679
+ ),
680
+ log_reconcile_json=env_bool("LOG_RECONCILE_JSON", True),
437
681
  )
438
682
 
439
683
 
440
684
  def _process_bulk(data: Inputs, gh: GitHubContext) -> bool:
685
+ # Initialize progress tracker for bulk processing
686
+ show_progress = env_bool("G2G_SHOW_PROGRESS", True)
687
+ progress_tracker: Any = None
688
+
689
+ if show_progress:
690
+ target = gh.repository
691
+ progress_tracker = G2GProgressTracker(target)
692
+ progress_tracker.start()
693
+ progress_tracker.update_operation("Getting repository and PRs...")
694
+ else:
695
+ target = gh.repository
696
+ progress_tracker = DummyProgressTracker("GitHub to Gerrit", target)
697
+
441
698
  client = build_client()
442
699
  repo = get_repo_from_env(client)
443
700
 
@@ -448,6 +705,11 @@ def _process_bulk(data: Inputs, gh: GitHubContext) -> bool:
448
705
  prs_list = list(iter_open_pulls(repo))
449
706
  log.info("Found %d open PRs to process", len(prs_list))
450
707
 
708
+ if progress_tracker:
709
+ progress_tracker.update_operation(
710
+ f"Processing {len(prs_list)} open PRs..."
711
+ )
712
+
451
713
  # Result tracking for summary
452
714
  processed_count = 0
453
715
  succeeded_count = 0
@@ -467,22 +729,42 @@ def _process_bulk(data: Inputs, gh: GitHubContext) -> bool:
467
729
  if pr_number <= 0:
468
730
  return "invalid", None, None
469
731
 
470
- log.info("Starting processing of PR #%d", pr_number)
732
+ log.debug("Starting processing of PR #%d", pr_number)
471
733
  log.debug(
472
- "Processing PR #%d in multi-PR mode with event_name=%s, event_action=%s",
734
+ "Processing PR #%d in multi-PR mode with event_name=%s, "
735
+ "event_action=%s",
473
736
  pr_number,
474
737
  gh.event_name,
475
738
  gh.event_action,
476
739
  )
477
740
 
741
+ # Update progress tracker for this PR
742
+ if progress_tracker:
743
+ progress_tracker.update_operation(f"Processing PR #{pr_number}...")
744
+ progress_tracker.pr_processed()
745
+
478
746
  try:
479
747
  if data.duplicates_filter:
480
748
  os.environ["DUPLICATE_TYPES"] = data.duplicates_filter
481
- check_for_duplicates(per_ctx, allow_duplicates=data.allow_duplicates)
749
+ # Generate expected GitHub hash for trailer-aware duplicate
750
+ # detection
751
+ from .duplicate_detection import DuplicateDetector
752
+
753
+ expected_github_hash = (
754
+ DuplicateDetector._generate_github_change_hash(per_ctx)
755
+ )
756
+ check_for_duplicates(
757
+ per_ctx,
758
+ allow_duplicates=data.allow_duplicates,
759
+ expected_github_hash=expected_github_hash,
760
+ )
482
761
  except DuplicateChangeError as exc:
762
+ if progress_tracker:
763
+ progress_tracker.duplicate_skipped()
483
764
  log_exception_conditionally(log, "Skipping PR #%d", pr_number)
484
765
  log.warning(
485
- "Skipping PR #%d due to duplicate detection: %s. Use --allow-duplicates to override this check.",
766
+ "Skipping PR #%d due to duplicate detection: %s. Use "
767
+ "--allow-duplicates to override this check.",
486
768
  pr_number,
487
769
  exc,
488
770
  )
@@ -493,9 +775,20 @@ def _process_bulk(data: Inputs, gh: GitHubContext) -> bool:
493
775
  workspace = Path(temp_dir)
494
776
  orch = Orchestrator(workspace=workspace)
495
777
  result_multi = orch.execute(inputs=data, gh=per_ctx)
778
+ if progress_tracker and result_multi.change_urls:
779
+ if any(
780
+ "new" in url.lower() for url in result_multi.change_urls
781
+ ):
782
+ progress_tracker.change_submitted()
783
+ else:
784
+ progress_tracker.change_updated()
496
785
  return "success", result_multi, None
497
786
  except Exception as exc:
498
- log_exception_conditionally(log, "Failed to process PR #%d", pr_number)
787
+ if progress_tracker:
788
+ progress_tracker.add_error(f"PR #{pr_number} processing failed")
789
+ log_exception_conditionally(
790
+ log, "Failed to process PR #%d", pr_number
791
+ )
499
792
  return "failed", None, exc
500
793
 
501
794
  # Prepare PR processing tasks
@@ -522,7 +815,11 @@ def _process_bulk(data: Inputs, gh: GitHubContext) -> bool:
522
815
 
523
816
  # Process PRs in parallel
524
817
  with ThreadPoolExecutor(max_workers=max_workers) as executor:
525
- log.info("Processing %d PRs with %d parallel workers", len(pr_tasks), max_workers)
818
+ log.info(
819
+ "Processing %d PRs with %d parallel workers",
820
+ len(pr_tasks),
821
+ max_workers,
822
+ )
526
823
 
527
824
  # Submit all tasks
528
825
  future_to_pr = {
@@ -545,25 +842,43 @@ def _process_bulk(data: Inputs, gh: GitHubContext) -> bool:
545
842
  all_urls.extend(result_multi.change_urls)
546
843
  for url in result_multi.change_urls:
547
844
  log.info("Gerrit change URL: %s", url)
548
- log.info("PR #%d created Gerrit change: %s", pr_number, url)
845
+ log.info(
846
+ "PR #%d created Gerrit change: %s",
847
+ pr_number,
848
+ url,
849
+ )
549
850
  if result_multi.change_numbers:
550
851
  all_nums.extend(result_multi.change_numbers)
551
- log.info("PR #%d change numbers: %s", pr_number, result_multi.change_numbers)
852
+ log.info(
853
+ "PR #%d change numbers: %s",
854
+ pr_number,
855
+ result_multi.change_numbers,
856
+ )
552
857
  if result_multi.commit_shas:
553
858
  all_shas.extend(result_multi.commit_shas)
554
859
  elif status == "skipped":
555
860
  skipped_count += 1
556
861
  elif status == "failed":
557
862
  failed_count += 1
558
- typer.echo(f"Failed to process PR #{pr_number}: {exc}")
863
+ safe_typer_echo(
864
+ f"Failed to process PR #{pr_number}: {exc}",
865
+ progress_tracker=progress_tracker,
866
+ err=True,
867
+ )
559
868
  log.info("Continuing to next PR despite failure")
560
869
  else:
561
870
  failed_count += 1
562
871
 
563
872
  except Exception as exc:
564
873
  failed_count += 1
565
- log_exception_conditionally(log, "Failed to process PR #%d", pr_number)
566
- typer.echo(f"Failed to process PR #{pr_number}: {exc}")
874
+ log_exception_conditionally(
875
+ log, "Failed to process PR #%d", pr_number
876
+ )
877
+ safe_typer_echo(
878
+ f"Failed to process PR #{pr_number}: {exc}",
879
+ progress_tracker=progress_tracker,
880
+ err=True,
881
+ )
567
882
  log.info("Continuing to next PR despite failure")
568
883
 
569
884
  # Aggregate results and provide summary
@@ -576,12 +891,46 @@ def _process_bulk(data: Inputs, gh: GitHubContext) -> bool:
576
891
 
577
892
  append_github_output(
578
893
  {
579
- "gerrit_change_request_url": "\n".join(all_urls) if all_urls else "",
580
- "gerrit_change_request_num": "\n".join(all_nums) if all_nums else "",
894
+ "gerrit_change_request_url": "\n".join(all_urls)
895
+ if all_urls
896
+ else "",
897
+ "gerrit_change_request_num": "\n".join(all_nums)
898
+ if all_nums
899
+ else "",
581
900
  "gerrit_commit_sha": "\n".join(all_shas) if all_shas else "",
582
901
  }
583
902
  )
584
903
 
904
+ # Stop progress tracker and show final results
905
+ if progress_tracker:
906
+ if failed_count == 0:
907
+ progress_tracker.update_operation(
908
+ "Bulk processing completed successfully"
909
+ )
910
+ else:
911
+ progress_tracker.add_error("Some PRs failed processing")
912
+ # Aggregate results and provide summary
913
+ if progress_tracker:
914
+ progress_tracker.stop()
915
+
916
+ # Show summary after progress tracker is stopped
917
+ if show_progress and RICH_AVAILABLE:
918
+ summary = progress_tracker.get_summary() if progress_tracker else {}
919
+ safe_console_print(
920
+ "\n✅ Bulk processing completed!"
921
+ if failed_count == 0
922
+ else "\n⚠️ Bulk processing completed with errors!"
923
+ )
924
+ safe_console_print(
925
+ f"⏱️ Total time: {summary.get('elapsed_time', 'unknown')}"
926
+ )
927
+ safe_console_print(f"📊 PRs processed: {processed_count}")
928
+ safe_console_print(f"✅ Succeeded: {succeeded_count}")
929
+ safe_console_print(f"⏭️ Skipped: {skipped_count}")
930
+ if failed_count > 0:
931
+ safe_console_print(f"❌ Failed: {failed_count}")
932
+ safe_console_print(f"🔗 Gerrit changes created: {len(all_urls)}")
933
+
585
934
  # Summary block
586
935
  log.info("=" * 60)
587
936
  log.info("BULK PROCESSING SUMMARY:")
@@ -596,50 +945,101 @@ def _process_bulk(data: Inputs, gh: GitHubContext) -> bool:
596
945
  return failed_count == 0
597
946
 
598
947
 
599
- def _process_single(data: Inputs, gh: GitHubContext) -> bool:
948
+ def _process_single(
949
+ data: Inputs,
950
+ gh: GitHubContext,
951
+ progress_tracker: Any = None,
952
+ ) -> bool:
600
953
  # Create temporary directory for all git operations
601
954
  with tempfile.TemporaryDirectory() as temp_dir:
602
955
  workspace = Path(temp_dir)
603
956
 
604
957
  try:
958
+ if progress_tracker:
959
+ progress_tracker.update_operation("Preparing local checkout...")
605
960
  _prepare_local_checkout(workspace, gh, data)
606
961
  except Exception as exc:
607
962
  log.debug("Local checkout preparation failed: %s", exc)
963
+ if progress_tracker:
964
+ progress_tracker.add_error("Checkout preparation failed")
965
+
966
+ if progress_tracker:
967
+ progress_tracker.update_operation(
968
+ "Configuring SSH authentication..."
969
+ )
608
970
 
609
971
  orch = Orchestrator(workspace=workspace)
972
+
973
+ if progress_tracker:
974
+ progress_tracker.update_operation(
975
+ "Extracting commit information..."
976
+ )
977
+
610
978
  pipeline_success = False
611
979
  try:
980
+ if progress_tracker:
981
+ progress_tracker.update_operation("Submitting to Gerrit...")
612
982
  result = orch.execute(inputs=data, gh=gh)
613
983
  pipeline_success = True
984
+ if progress_tracker:
985
+ progress_tracker.pr_processed()
986
+ if progress_tracker and result.change_urls:
987
+ if any("new" in url.lower() for url in result.change_urls):
988
+ progress_tracker.change_submitted()
989
+ else:
990
+ progress_tracker.change_updated()
614
991
  except Exception as exc:
615
992
  log.debug("Execution failed; continuing to write outputs: %s", exc)
993
+ if progress_tracker:
994
+ progress_tracker.add_error("Execution failed")
616
995
 
617
- result = SubmissionResult(change_urls=[], change_numbers=[], commit_shas=[])
996
+ result = SubmissionResult(
997
+ change_urls=[], change_numbers=[], commit_shas=[]
998
+ )
618
999
  if result.change_urls:
619
- os.environ["GERRIT_CHANGE_REQUEST_URL"] = "\n".join(result.change_urls)
1000
+ os.environ["GERRIT_CHANGE_REQUEST_URL"] = "\n".join(
1001
+ result.change_urls
1002
+ )
620
1003
  # Output Gerrit change URL(s) to console
621
1004
  for url in result.change_urls:
622
- log.info("Gerrit change URL: %s", url)
1005
+ log.debug("Gerrit change URL: %s", url)
1006
+ safe_console_print(
1007
+ f"🔗 Gerrit change URL: {url}",
1008
+ style="green",
1009
+ progress_tracker=progress_tracker,
1010
+ )
623
1011
  if result.change_numbers:
624
- os.environ["GERRIT_CHANGE_REQUEST_NUM"] = "\n".join(result.change_numbers)
1012
+ os.environ["GERRIT_CHANGE_REQUEST_NUM"] = "\n".join(
1013
+ result.change_numbers
1014
+ )
625
1015
  if result.commit_shas:
626
1016
  os.environ["GERRIT_COMMIT_SHA"] = "\n".join(result.commit_shas)
627
1017
 
628
1018
  # Also write outputs to GITHUB_OUTPUT if available
629
1019
  append_github_output(
630
1020
  {
631
- "gerrit_change_request_url": "\n".join(result.change_urls) if result.change_urls else "",
632
- "gerrit_change_request_num": "\n".join(result.change_numbers) if result.change_numbers else "",
633
- "gerrit_commit_sha": "\n".join(result.commit_shas) if result.commit_shas else "",
1021
+ "gerrit_change_request_url": "\n".join(result.change_urls)
1022
+ if result.change_urls
1023
+ else "",
1024
+ "gerrit_change_request_num": "\n".join(result.change_numbers)
1025
+ if result.change_numbers
1026
+ else "",
1027
+ "gerrit_commit_sha": "\n".join(result.commit_shas)
1028
+ if result.commit_shas
1029
+ else "",
634
1030
  }
635
1031
  )
636
1032
 
637
1033
  return pipeline_success
638
1034
 
639
1035
 
640
- def _prepare_local_checkout(workspace: Path, gh: GitHubContext, data: Inputs) -> None:
1036
+ def _prepare_local_checkout(
1037
+ workspace: Path, gh: GitHubContext, data: Inputs
1038
+ ) -> None:
641
1039
  repo_full = gh.repository.strip() if gh.repository else ""
642
- server_url = gh.server_url or os.getenv("GITHUB_SERVER_URL", "https://github.com")
1040
+ server_url = gh.server_url or os.getenv(
1041
+ "GITHUB_SERVER_URL", "https://github.com"
1042
+ )
643
1043
  server_url = (server_url or "https://github.com").rstrip("/")
644
1044
  base_ref = gh.base_ref or ""
645
1045
  pr_num_str: str = str(gh.pr_number) if gh.pr_number else "0"
@@ -648,7 +1048,10 @@ def _prepare_local_checkout(workspace: Path, gh: GitHubContext, data: Inputs) ->
648
1048
  return
649
1049
 
650
1050
  # Try SSH first for private repos if available, then fall back to HTTPS/API
651
- repo_ssh_url = f"git@{server_url.replace('https://', '').replace('http://', '')}:{repo_full}.git"
1051
+ repo_ssh_url = (
1052
+ f"git@{server_url.replace('https://', '').replace('http://', '')}:"
1053
+ f"{repo_full}.git"
1054
+ )
652
1055
  repo_https_url = f"{server_url}/{repo_full}.git"
653
1056
 
654
1057
  run_cmd(["git", "init"], cwd=workspace)
@@ -659,25 +1062,38 @@ def _prepare_local_checkout(workspace: Path, gh: GitHubContext, data: Inputs) ->
659
1062
 
660
1063
  # Check if we should try SSH for private repos
661
1064
  use_ssh = False
662
- respect_user_ssh = os.getenv("G2G_RESPECT_USER_SSH", "false").lower() in ("true", "1", "yes")
1065
+ respect_user_ssh = os.getenv("G2G_RESPECT_USER_SSH", "false").lower() in (
1066
+ "true",
1067
+ "1",
1068
+ "yes",
1069
+ )
663
1070
  gerrit_ssh_privkey = os.getenv("GERRIT_SSH_PRIVKEY_G2G")
664
1071
 
665
1072
  log.debug(
666
- "GitHub repo access decision: SSH URL available=%s, G2G_RESPECT_USER_SSH=%s, GERRIT_SSH_PRIVKEY_G2G=%s",
1073
+ "GitHub repo access decision: SSH URL available=%s, "
1074
+ "G2G_RESPECT_USER_SSH=%s, GERRIT_SSH_PRIVKEY_G2G=%s",
667
1075
  repo_ssh_url.startswith("git@"),
668
1076
  respect_user_ssh,
669
1077
  bool(gerrit_ssh_privkey),
670
1078
  )
671
1079
 
672
1080
  if repo_ssh_url.startswith("git@"):
673
- # For private repos, only try SSH if G2G_RESPECT_USER_SSH is explicitly enabled
674
- # Don't use SSH just because GERRIT_SSH_PRIVKEY_G2G is set (that's for Gerrit, not GitHub)
1081
+ # For private repos, only try SSH if G2G_RESPECT_USER_SSH is
1082
+ # explicitly enabled
1083
+ # Don't use SSH just because GERRIT_SSH_PRIVKEY_G2G is set
1084
+ # (that's for Gerrit, not GitHub)
675
1085
  if respect_user_ssh:
676
1086
  use_ssh = True
677
1087
  repo_url = repo_ssh_url
678
- log.debug("Using SSH for GitHub repo access due to G2G_RESPECT_USER_SSH=true")
1088
+ log.debug(
1089
+ "Using SSH for GitHub repo access due to "
1090
+ "G2G_RESPECT_USER_SSH=true"
1091
+ )
679
1092
  else:
680
- log.debug("Not using SSH for GitHub repo access - G2G_RESPECT_USER_SSH not enabled")
1093
+ log.debug(
1094
+ "Not using SSH for GitHub repo access - "
1095
+ "G2G_RESPECT_USER_SSH not enabled"
1096
+ )
681
1097
 
682
1098
  if use_ssh:
683
1099
  env = {
@@ -712,7 +1128,10 @@ def _prepare_local_checkout(workspace: Path, gh: GitHubContext, data: Inputs) ->
712
1128
 
713
1129
  if pr_num_str:
714
1130
  try:
715
- pr_ref = f"refs/pull/{pr_num_str}/head:refs/remotes/origin/pr/{pr_num_str}/head"
1131
+ pr_ref = (
1132
+ f"refs/pull/{pr_num_str}/head:"
1133
+ f"refs/remotes/origin/pr/{pr_num_str}/head"
1134
+ )
716
1135
  run_cmd(
717
1136
  [
718
1137
  "git",
@@ -737,7 +1156,9 @@ def _prepare_local_checkout(workspace: Path, gh: GitHubContext, data: Inputs) ->
737
1156
  )
738
1157
  fetch_success = True
739
1158
  except Exception as exc:
740
- log.warning("Git fetch failed, attempting API archive fallback: %s", exc)
1159
+ log.warning(
1160
+ "Git fetch failed, attempting API archive fallback: %s", exc
1161
+ )
741
1162
  # Try API archive fallback for private repos
742
1163
  try:
743
1164
  _fallback_to_api_archive(workspace, gh, data, pr_num_str)
@@ -751,7 +1172,9 @@ def _prepare_local_checkout(workspace: Path, gh: GitHubContext, data: Inputs) ->
751
1172
  raise RuntimeError(msg)
752
1173
 
753
1174
 
754
- def _fallback_to_api_archive(workspace: Path, gh: GitHubContext, data: Inputs, pr_num_str: str) -> None:
1175
+ def _fallback_to_api_archive(
1176
+ workspace: Path, gh: GitHubContext, data: Inputs, pr_num_str: str
1177
+ ) -> None:
755
1178
  """Fallback to GitHub API archive download for private repos."""
756
1179
  import io
757
1180
  import json
@@ -872,7 +1295,9 @@ def _fallback_to_api_archive(workspace: Path, gh: GitHubContext, data: Inputs, p
872
1295
  # Create the expected branch
873
1296
  run_cmd(["git", "checkout", "-B", "g2g_pr_head"], cwd=workspace)
874
1297
 
875
- log.info("Successfully extracted PR #%s content via API archive", pr_num_str)
1298
+ log.info(
1299
+ "Successfully extracted PR #%s content via API archive", pr_num_str
1300
+ )
876
1301
 
877
1302
 
878
1303
  def _load_effective_inputs() -> Inputs:
@@ -880,7 +1305,11 @@ def _load_effective_inputs() -> Inputs:
880
1305
  data = _build_inputs_from_env()
881
1306
 
882
1307
  # Load per-org configuration and apply to environment before validation
883
- org_for_cfg = data.organization or os.getenv("ORGANIZATION") or os.getenv("GITHUB_REPOSITORY_OWNER")
1308
+ org_for_cfg = (
1309
+ data.organization
1310
+ or os.getenv("ORGANIZATION")
1311
+ or os.getenv("GITHUB_REPOSITORY_OWNER")
1312
+ )
884
1313
  cfg = load_org_config(org_for_cfg)
885
1314
 
886
1315
  # Apply dynamic parameter derivation for missing Gerrit parameters
@@ -890,7 +1319,8 @@ def _load_effective_inputs() -> Inputs:
890
1319
  log.debug("Configuration to apply: %s", cfg)
891
1320
  if "DRY_RUN" in cfg:
892
1321
  log.warning(
893
- "Configuration contains DRY_RUN=%s, this may override environment DRY_RUN=%s",
1322
+ "Configuration contains DRY_RUN=%s, this may override "
1323
+ "environment DRY_RUN=%s",
894
1324
  cfg["DRY_RUN"],
895
1325
  os.getenv("DRY_RUN"),
896
1326
  )
@@ -901,7 +1331,9 @@ def _load_effective_inputs() -> Inputs:
901
1331
  data = _build_inputs_from_env()
902
1332
 
903
1333
  # Derive reviewers from local git config if running locally and unset
904
- if not os.getenv("REVIEWERS_EMAIL") and (os.getenv("G2G_TARGET_URL") or not os.getenv("GITHUB_EVENT_NAME")):
1334
+ if not os.getenv("REVIEWERS_EMAIL") and (
1335
+ os.getenv("G2G_TARGET_URL") or not os.getenv("GITHUB_EVENT_NAME")
1336
+ ):
905
1337
  try:
906
1338
  from .gitutils import enumerate_reviewer_emails
907
1339
 
@@ -928,8 +1360,14 @@ def _load_effective_inputs() -> Inputs:
928
1360
  allow_duplicates=data.allow_duplicates,
929
1361
  ci_testing=data.ci_testing,
930
1362
  duplicates_filter=data.duplicates_filter,
1363
+ reuse_strategy=data.reuse_strategy,
1364
+ similarity_subject=data.similarity_subject,
1365
+ similarity_files=data.similarity_files,
1366
+ allow_orphan_changes=data.allow_orphan_changes,
1367
+ persist_single_mapping_comment=data.persist_single_mapping_comment,
1368
+ log_reconcile_json=data.log_reconcile_json,
931
1369
  )
932
- log.info("Derived reviewers: %s", data.reviewers_email)
1370
+ log.debug("Derived reviewers: %s", data.reviewers_email)
933
1371
  except Exception as exc:
934
1372
  log.debug("Could not derive reviewers from git config: %s", exc)
935
1373
 
@@ -937,23 +1375,33 @@ def _load_effective_inputs() -> Inputs:
937
1375
 
938
1376
 
939
1377
  def _augment_pr_refs_if_needed(gh: GitHubContext) -> GitHubContext:
940
- if os.getenv("G2G_TARGET_URL") and gh.pr_number and (not gh.head_ref or not gh.base_ref):
1378
+ if (
1379
+ os.getenv("G2G_TARGET_URL")
1380
+ and gh.pr_number
1381
+ and (not gh.head_ref or not gh.base_ref)
1382
+ ):
941
1383
  try:
942
1384
  client = build_client()
943
1385
  repo = get_repo_from_env(client)
944
1386
  pr_obj = get_pull(repo, int(gh.pr_number))
945
- base_ref = str(getattr(getattr(pr_obj, "base", object()), "ref", "") or "")
946
- head_ref = str(getattr(getattr(pr_obj, "head", object()), "ref", "") or "")
947
- head_sha = str(getattr(getattr(pr_obj, "head", object()), "sha", "") or "")
1387
+ base_ref = str(
1388
+ getattr(getattr(pr_obj, "base", object()), "ref", "") or ""
1389
+ )
1390
+ head_ref = str(
1391
+ getattr(getattr(pr_obj, "head", object()), "ref", "") or ""
1392
+ )
1393
+ head_sha = str(
1394
+ getattr(getattr(pr_obj, "head", object()), "sha", "") or ""
1395
+ )
948
1396
  if base_ref:
949
1397
  os.environ["GITHUB_BASE_REF"] = base_ref
950
- log.info("Resolved base_ref via GitHub API: %s", base_ref)
1398
+ log.debug("Resolved base_ref via GitHub API: %s", base_ref)
951
1399
  if head_ref:
952
1400
  os.environ["GITHUB_HEAD_REF"] = head_ref
953
- log.info("Resolved head_ref via GitHub API: %s", head_ref)
1401
+ log.debug("Resolved head_ref via GitHub API: %s", head_ref)
954
1402
  if head_sha:
955
1403
  os.environ["GITHUB_SHA"] = head_sha
956
- log.info("Resolved head sha via GitHub API: %s", head_sha)
1404
+ log.debug("Resolved head sha via GitHub API: %s", head_sha)
957
1405
  return _read_github_context()
958
1406
  except Exception as exc:
959
1407
  log.debug("Could not resolve PR refs via GitHub API: %s", exc)
@@ -968,21 +1416,26 @@ def _process() -> None:
968
1416
  _validate_inputs(data)
969
1417
  except ConfigurationError as exc:
970
1418
  log_exception_conditionally(log, "Configuration validation failed")
971
- typer.echo(f"Configuration validation failed: {exc}", err=True)
1419
+ safe_typer_echo(f"Configuration validation failed: {exc}", err=True)
972
1420
  raise typer.Exit(code=2) from exc
973
1421
 
974
1422
  gh = _read_github_context()
975
- _log_effective_config(data, gh)
1423
+ _display_effective_config(data, gh)
976
1424
 
977
1425
  # Test mode: short-circuit after validation
978
1426
  if env_bool("G2G_TEST_MODE", False):
979
- log.info("Validation complete. Ready to execute submission pipeline.")
980
- typer.echo("Validation complete. Ready to execute submission pipeline.")
1427
+ log.debug("Validation complete. Ready to execute submission pipeline.")
1428
+ safe_typer_echo(
1429
+ "Validation complete. Ready to execute submission pipeline."
1430
+ )
981
1431
  return
982
1432
 
983
1433
  # Bulk mode for URL/workflow_dispatch
984
1434
  sync_all = env_bool("SYNC_ALL_OPEN_PRS", False)
985
- if sync_all and (gh.event_name == "workflow_dispatch" or os.getenv("G2G_TARGET_URL")):
1435
+ if sync_all and (
1436
+ gh.event_name == "workflow_dispatch" or os.getenv("G2G_TARGET_URL")
1437
+ ):
1438
+ safe_console_print(f"🔍 Examining repository {gh.repository}")
986
1439
  bulk_success = _process_bulk(data, gh)
987
1440
 
988
1441
  # Log external API metrics summary
@@ -1004,11 +1457,13 @@ def _process() -> None:
1004
1457
 
1005
1458
  if not gh.pr_number:
1006
1459
  log.error(
1007
- "PR_NUMBER is empty. This tool requires a valid pull request context. Current event: %s",
1460
+ "PR_NUMBER is empty. This tool requires a valid pull request "
1461
+ "context. Current event: %s",
1008
1462
  gh.event_name,
1009
1463
  )
1010
- typer.echo(
1011
- f"PR_NUMBER is empty. This tool requires a valid pull request context. Current event: {gh.event_name}",
1464
+ safe_typer_echo(
1465
+ f"PR_NUMBER is empty. This tool requires a valid pull request "
1466
+ f"context. Current event: {gh.event_name}",
1012
1467
  err=True,
1013
1468
  )
1014
1469
  raise typer.Exit(code=2)
@@ -1016,25 +1471,87 @@ def _process() -> None:
1016
1471
  # Test mode handled earlier
1017
1472
 
1018
1473
  # Execute single-PR submission
1474
+ # Initialize progress tracker
1475
+ show_progress = env_bool("G2G_SHOW_PROGRESS", True)
1476
+ progress_tracker: Any = None
1477
+
1478
+ if show_progress:
1479
+ target = (
1480
+ f"{gh.repository}/pull/{gh.pr_number}"
1481
+ if gh.pr_number
1482
+ else gh.repository
1483
+ )
1484
+ progress_tracker = G2GProgressTracker(target)
1485
+ progress_tracker.start()
1486
+ progress_tracker.update_operation("Getting source PR details...")
1487
+ else:
1488
+ target = (
1489
+ f"{gh.repository}/pull/{gh.pr_number}"
1490
+ if gh.pr_number
1491
+ else gh.repository
1492
+ )
1493
+ progress_tracker = DummyProgressTracker("GitHub to Gerrit", target)
1494
+
1019
1495
  # Augment PR refs via API when in URL mode and token present
1020
1496
  gh = _augment_pr_refs_if_needed(gh)
1021
1497
 
1498
+ # Display PR information with Rich formatting
1499
+ if gh.pr_number:
1500
+ _extract_and_display_pr_info(gh, progress_tracker)
1501
+
1022
1502
  # Check for duplicates in single-PR mode (before workspace setup)
1023
1503
  if gh.pr_number and not env_bool("SYNC_ALL_OPEN_PRS", False):
1024
1504
  try:
1025
1505
  if data.duplicates_filter:
1026
1506
  os.environ["DUPLICATE_TYPES"] = data.duplicates_filter
1027
- check_for_duplicates(gh, allow_duplicates=data.allow_duplicates)
1507
+ # Generate expected GitHub hash for trailer-aware duplicate
1508
+ # detection
1509
+ from .duplicate_detection import DuplicateDetector
1510
+
1511
+ expected_github_hash = (
1512
+ DuplicateDetector._generate_github_change_hash(gh)
1513
+ )
1514
+ if progress_tracker:
1515
+ progress_tracker.update_operation("Checking for duplicates...")
1516
+ check_for_duplicates(
1517
+ gh,
1518
+ allow_duplicates=data.allow_duplicates,
1519
+ expected_github_hash=expected_github_hash,
1520
+ )
1521
+ if progress_tracker:
1522
+ progress_tracker.update_operation("Duplicate check completed")
1028
1523
  except DuplicateChangeError as exc:
1029
- log_exception_conditionally(
1030
- log,
1031
- "Duplicate detection blocked submission for PR #%d",
1032
- gh.pr_number,
1524
+ if progress_tracker:
1525
+ progress_tracker.add_error("Duplicate change detected")
1526
+ progress_tracker.stop()
1527
+
1528
+ # Display clear Rich console output for duplicate detection
1529
+ if exc.urls:
1530
+ urls_display = ", ".join(exc.urls)
1531
+ safe_console_print(
1532
+ f"❌ Duplicate Gerrit change blocked submission: "
1533
+ f"{urls_display}",
1534
+ style="red",
1535
+ progress_tracker=progress_tracker,
1536
+ )
1537
+ else:
1538
+ safe_console_print(
1539
+ "❌ Duplicate Gerrit change blocked submission",
1540
+ style="red",
1541
+ progress_tracker=progress_tracker,
1542
+ )
1543
+
1544
+ safe_console_print(
1545
+ "💡 Use --allow-duplicates to override this check.",
1546
+ style="yellow",
1547
+ progress_tracker=progress_tracker,
1033
1548
  )
1034
- log.info("Use --allow-duplicates to override this check.")
1035
1549
  raise typer.Exit(code=3) from exc
1036
1550
 
1037
- pipeline_success = _process_single(data, gh)
1551
+ if progress_tracker:
1552
+ progress_tracker.update_operation("Processing pull request...")
1553
+
1554
+ pipeline_success = _process_single(data, gh, progress_tracker)
1038
1555
 
1039
1556
  # Log external API metrics summary
1040
1557
  try:
@@ -1044,11 +1561,39 @@ def _process() -> None:
1044
1561
  except Exception as exc:
1045
1562
  log.debug("Failed to log API metrics summary: %s", exc)
1046
1563
 
1564
+ # Stop progress tracker and show final results
1565
+ # Clean up progress tracker
1566
+ if progress_tracker:
1567
+ progress_tracker.stop()
1568
+
1569
+ # Show summary after progress tracker is stopped
1570
+ if show_progress and RICH_AVAILABLE:
1571
+ summary = progress_tracker.get_summary() if progress_tracker else {}
1572
+ safe_console_print(
1573
+ "\n✅ Operation completed!"
1574
+ if pipeline_success
1575
+ else "\n❌ Operation failed!",
1576
+ style="green" if pipeline_success else "red",
1577
+ )
1578
+ safe_console_print(
1579
+ f"⏱️ Total time: {summary.get('elapsed_time', 'unknown')}"
1580
+ )
1581
+ if summary.get("prs_processed", 0) > 0:
1582
+ safe_console_print(f"📊 PRs processed: {summary['prs_processed']}")
1583
+ if summary.get("changes_submitted", 0) > 0:
1584
+ safe_console_print(
1585
+ f"🔄 Changes submitted: {summary['changes_submitted']}"
1586
+ )
1587
+ if summary.get("changes_updated", 0) > 0:
1588
+ safe_console_print(
1589
+ f"📝 Changes updated: {summary['changes_updated']}"
1590
+ )
1591
+
1047
1592
  # Final success/failure message after all cleanup
1048
1593
  if pipeline_success:
1049
- log.info("Submission pipeline completed SUCCESSFULLY ✅")
1594
+ log.debug("Submission pipeline completed SUCCESSFULLY ✅")
1050
1595
  else:
1051
- log.error("Submission pipeline FAILED ❌")
1596
+ log.debug("Submission pipeline FAILED ❌")
1052
1597
  raise typer.Exit(code=1)
1053
1598
 
1054
1599
  return
@@ -1066,7 +1611,9 @@ def _load_event(path: Path | None) -> dict[str, Any]:
1066
1611
  if not path or not path.exists():
1067
1612
  return {}
1068
1613
  try:
1069
- return cast(dict[str, Any], json.loads(path.read_text(encoding="utf-8")))
1614
+ return cast(
1615
+ dict[str, Any], json.loads(path.read_text(encoding="utf-8"))
1616
+ )
1070
1617
  except Exception as exc:
1071
1618
  log.warning("Failed to parse GITHUB_EVENT_PATH: %s", exc)
1072
1619
  return {}
@@ -1133,7 +1680,10 @@ def _read_github_context() -> GitHubContext:
1133
1680
 
1134
1681
  def _validate_inputs(data: Inputs) -> None:
1135
1682
  if data.use_pr_as_commit and data.submit_single_commits:
1136
- msg = "USE_PR_AS_COMMIT and SUBMIT_SINGLE_COMMITS cannot be enabled at the same time"
1683
+ msg = (
1684
+ "USE_PR_AS_COMMIT and SUBMIT_SINGLE_COMMITS cannot be enabled "
1685
+ "at the same time"
1686
+ )
1137
1687
  raise ConfigurationError(msg)
1138
1688
 
1139
1689
  # Context-aware validation: different requirements for GH Actions vs CLI
@@ -1157,19 +1707,21 @@ def _validate_inputs(data: Inputs) -> None:
1157
1707
  # In local CLI: require explicit values or organization + derivation
1158
1708
  # This prevents unexpected behavior when running locally
1159
1709
  missing_gerrit_params = [
1160
- field for field in ["gerrit_ssh_user_g2g", "gerrit_ssh_user_g2g_email"] if not getattr(data, field)
1710
+ field
1711
+ for field in ["gerrit_ssh_user_g2g", "gerrit_ssh_user_g2g_email"]
1712
+ if not getattr(data, field)
1161
1713
  ]
1162
1714
  if missing_gerrit_params:
1163
1715
  if data.organization:
1164
1716
  log.info(
1165
- "Local CLI usage: Gerrit parameters can be derived from "
1166
- "organization '%s'. Missing: %s. Consider setting "
1167
- "G2G_ENABLE_DERIVATION=true to enable derivation.",
1717
+ "Gerrit parameters can be derived from "
1718
+ "organization '%s'. Missing: %s. Set "
1719
+ "G2G_ENABLE_DERIVATION=false to disable derivation.",
1168
1720
  data.organization,
1169
1721
  ", ".join(missing_gerrit_params),
1170
1722
  )
1171
- # Allow derivation in local mode only if explicitly enabled
1172
- if not env_bool("G2G_ENABLE_DERIVATION", False):
1723
+ # Derivation enabled by default, can be disabled explicitly
1724
+ if not env_bool("G2G_ENABLE_DERIVATION", True):
1173
1725
  required_fields.extend(missing_gerrit_params)
1174
1726
  else:
1175
1727
  required_fields.extend(missing_gerrit_params)
@@ -1182,21 +1734,20 @@ def _validate_inputs(data: Inputs) -> None:
1182
1734
  "gerrit_ssh_user_g2g_email",
1183
1735
  ]:
1184
1736
  if data.organization:
1185
- if is_github_actions:
1186
- log.error(
1187
- "These fields can be derived automatically from "
1188
- "organization '%s' if G2G_ENABLE_DERIVATION=true",
1189
- data.organization,
1190
- )
1191
- else:
1192
- log.error(
1193
- "These fields can be derived from organization '%s'",
1194
- data.organization,
1195
- )
1196
- log.error("Set G2G_ENABLE_DERIVATION=true to enable")
1737
+ log.error(
1738
+ "These fields can be derived automatically from "
1739
+ "organization '%s' (derivation enabled by default). "
1740
+ "Check that G2G_ENABLE_DERIVATION is not set to false.",
1741
+ data.organization,
1742
+ )
1197
1743
  else:
1198
- log.error("These fields require either explicit values or an ORGANIZATION for derivation")
1199
- raise ConfigurationError(_MSG_MISSING_REQUIRED_INPUT.format(field_name=field_name))
1744
+ log.error(
1745
+ "These fields require either explicit values or an "
1746
+ "ORGANIZATION for derivation"
1747
+ )
1748
+ raise ConfigurationError(
1749
+ _MSG_MISSING_REQUIRED_INPUT.format(field_name=field_name)
1750
+ )
1200
1751
 
1201
1752
  # Validate fetch depth is a positive integer
1202
1753
  if data.fetch_depth <= 0:
@@ -1208,35 +1759,79 @@ def _validate_inputs(data: Inputs) -> None:
1208
1759
  raise ConfigurationError(_MSG_ISSUE_ID_MULTILINE)
1209
1760
 
1210
1761
 
1211
- def _log_effective_config(data: Inputs, gh: GitHubContext) -> None:
1212
- # Avoid logging sensitive values
1213
- safe_privkey = _mask_secret(data.gerrit_ssh_privkey_g2g)
1214
- log.info("Effective configuration (sanitized):")
1215
- log.info(" SUBMIT_SINGLE_COMMITS: %s", data.submit_single_commits)
1216
- log.info(" USE_PR_AS_COMMIT: %s", data.use_pr_as_commit)
1217
- log.info(" FETCH_DEPTH: %s", data.fetch_depth)
1218
- known_hosts_status = "<provided>" if data.gerrit_known_hosts else "<will auto-discover>"
1219
- log.info(" GERRIT_KNOWN_HOSTS: %s", known_hosts_status)
1220
- log.info(" GERRIT_SSH_PRIVKEY_G2G: %s", safe_privkey)
1221
- log.info(" GERRIT_SSH_USER_G2G: %s", data.gerrit_ssh_user_g2g)
1222
- log.info(" GERRIT_SSH_USER_G2G_EMAIL: %s", data.gerrit_ssh_user_g2g_email)
1223
- log.info(" ORGANIZATION: %s", data.organization)
1224
- log.info(" REVIEWERS_EMAIL: %s", data.reviewers_email or "")
1225
- log.info(" PRESERVE_GITHUB_PRS: %s", data.preserve_github_prs)
1226
- log.info(" DRY_RUN: %s", data.dry_run)
1227
- log.info(" CI_TESTING: %s", data.ci_testing)
1228
- log.info(" GERRIT_SERVER: %s", data.gerrit_server)
1229
- log.info(" GERRIT_SERVER_PORT: %s", data.gerrit_server_port or "")
1230
- log.info(" GERRIT_PROJECT: %s", data.gerrit_project or "")
1231
- log.info("GitHub context:")
1232
- log.info(" event_name: %s", gh.event_name)
1233
- log.info(" event_action: %s", gh.event_action)
1234
- log.info(" repository: %s", gh.repository)
1235
- log.info(" repository_owner: %s", gh.repository_owner)
1236
- log.info(" pr_number: %s", gh.pr_number)
1237
- log.info(" base_ref: %s", gh.base_ref)
1238
- log.info(" head_ref: %s", gh.head_ref)
1239
- log.info(" sha: %s", gh.sha)
1762
+ def _display_effective_config(data: Inputs, gh: GitHubContext) -> None:
1763
+ """Display effective configuration in a formatted table."""
1764
+ from .rich_display import display_pr_info
1765
+
1766
+ # Avoid displaying sensitive values
1767
+ known_hosts_status = "✅" if data.gerrit_known_hosts else "❌"
1768
+ privkey_status = "✅" if data.gerrit_ssh_privkey_g2g else "❌"
1769
+
1770
+ # Build configuration data, filtering out empty/default values
1771
+ # Order items logically: behavioral settings first, then credentials
1772
+ config_info = {}
1773
+
1774
+ # Only show non-default boolean values
1775
+ if data.submit_single_commits:
1776
+ config_info["SUBMIT_SINGLE_COMMITS"] = str(data.submit_single_commits)
1777
+ if data.use_pr_as_commit:
1778
+ config_info["USE_PR_AS_COMMIT"] = str(data.use_pr_as_commit)
1779
+
1780
+ # Only show non-default fetch depth
1781
+ if data.fetch_depth != 10:
1782
+ config_info["FETCH_DEPTH"] = str(data.fetch_depth)
1783
+
1784
+ # SSH user and email first
1785
+ if data.gerrit_ssh_user_g2g:
1786
+ config_info["GERRIT_SSH_USER_G2G"] = data.gerrit_ssh_user_g2g
1787
+ if data.gerrit_ssh_user_g2g_email:
1788
+ config_info["GERRIT_SSH_USER_G2G_EMAIL"] = (
1789
+ data.gerrit_ssh_user_g2g_email
1790
+ )
1791
+ if data.organization:
1792
+ config_info["ORGANIZATION"] = data.organization
1793
+ if data.reviewers_email:
1794
+ config_info["REVIEWERS_EMAIL"] = data.reviewers_email
1795
+
1796
+ # Only show non-default boolean values
1797
+ if data.preserve_github_prs:
1798
+ config_info["PRESERVE_GITHUB_PRS"] = str(data.preserve_github_prs)
1799
+ if data.dry_run:
1800
+ config_info["DRY_RUN"] = str(data.dry_run)
1801
+ if data.ci_testing:
1802
+ config_info["CI_TESTING"] = str(data.ci_testing)
1803
+
1804
+ # Show Gerrit settings if they have values
1805
+ if data.gerrit_server:
1806
+ config_info["GERRIT_SERVER"] = data.gerrit_server
1807
+ # Only show non-default port (29418 is default)
1808
+ if data.gerrit_server_port and data.gerrit_server_port != 29418:
1809
+ config_info["GERRIT_SERVER_PORT"] = str(data.gerrit_server_port)
1810
+ if data.gerrit_project:
1811
+ config_info["GERRIT_PROJECT"] = data.gerrit_project
1812
+
1813
+ # Move credentials to bottom of table
1814
+ # Always show known hosts status
1815
+ if data.gerrit_known_hosts or not data.gerrit_known_hosts:
1816
+ config_info["GERRIT_KNOWN_HOSTS"] = known_hosts_status
1817
+ config_info["GERRIT_SSH_PRIVKEY_G2G"] = privkey_status
1818
+
1819
+ # Display the configuration table
1820
+ display_pr_info(config_info, "GitHub2Gerrit Configuration")
1821
+
1822
+ # Log GitHub context at debug level only
1823
+ log.debug(
1824
+ "GitHub context: event_name=%s, event_action=%s, repository=%s, "
1825
+ "repository_owner=%s, pr_number=%s, base_ref=%s, head_ref=%s, sha=%s",
1826
+ gh.event_name,
1827
+ gh.event_action,
1828
+ gh.repository,
1829
+ gh.repository_owner,
1830
+ gh.pr_number,
1831
+ gh.base_ref,
1832
+ gh.head_ref,
1833
+ gh.sha,
1834
+ )
1240
1835
 
1241
1836
 
1242
1837
  if __name__ == "__main__":