github2gerrit 0.1.6__py3-none-any.whl → 0.1.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- github2gerrit/cli.py +457 -186
- github2gerrit/commit_normalization.py +471 -0
- github2gerrit/core.py +828 -251
- github2gerrit/duplicate_detection.py +1 -67
- github2gerrit/external_api.py +518 -0
- github2gerrit/gerrit_rest.py +298 -0
- github2gerrit/gerrit_urls.py +149 -52
- github2gerrit/github_api.py +12 -79
- github2gerrit/gitutils.py +216 -49
- github2gerrit/models.py +2 -0
- github2gerrit/pr_content_filter.py +476 -0
- github2gerrit/similarity.py +2 -2
- github2gerrit/ssh_agent_setup.py +351 -0
- github2gerrit/ssh_common.py +244 -0
- github2gerrit/ssh_discovery.py +4 -0
- github2gerrit/utils.py +113 -0
- github2gerrit-0.1.7.dist-info/METADATA +798 -0
- github2gerrit-0.1.7.dist-info/RECORD +24 -0
- github2gerrit-0.1.6.dist-info/METADATA +0 -552
- github2gerrit-0.1.6.dist-info/RECORD +0 -17
- {github2gerrit-0.1.6.dist-info → github2gerrit-0.1.7.dist-info}/WHEEL +0 -0
- {github2gerrit-0.1.6.dist-info → github2gerrit-0.1.7.dist-info}/entry_points.txt +0 -0
- {github2gerrit-0.1.6.dist-info → github2gerrit-0.1.7.dist-info}/licenses/LICENSE +0 -0
- {github2gerrit-0.1.6.dist-info → github2gerrit-0.1.7.dist-info}/top_level.txt +0 -0
github2gerrit/cli.py
CHANGED
@@ -8,6 +8,8 @@ import logging
|
|
8
8
|
import os
|
9
9
|
import tempfile
|
10
10
|
from collections.abc import Callable
|
11
|
+
from concurrent.futures import ThreadPoolExecutor
|
12
|
+
from concurrent.futures import as_completed
|
11
13
|
from pathlib import Path
|
12
14
|
from typing import TYPE_CHECKING
|
13
15
|
from typing import Any
|
@@ -35,19 +37,13 @@ from .github_api import iter_open_pulls
|
|
35
37
|
from .gitutils import run_cmd
|
36
38
|
from .models import GitHubContext
|
37
39
|
from .models import Inputs
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
def _log_exception_conditionally(logger: logging.Logger, message: str, *args: Any) -> None:
|
46
|
-
"""Log exception with traceback only if verbose mode is enabled."""
|
47
|
-
if _is_verbose_mode():
|
48
|
-
logger.exception(message, *args)
|
49
|
-
else:
|
50
|
-
logger.error(message, *args)
|
40
|
+
from .ssh_common import build_git_ssh_command
|
41
|
+
from .ssh_common import build_non_interactive_ssh_env
|
42
|
+
from .utils import append_github_output
|
43
|
+
from .utils import env_bool
|
44
|
+
from .utils import env_str
|
45
|
+
from .utils import log_exception_conditionally
|
46
|
+
from .utils import parse_bool_env
|
51
47
|
|
52
48
|
|
53
49
|
class ConfigurationError(Exception):
|
@@ -72,7 +68,7 @@ def _parse_github_target(url: str) -> tuple[str | None, str | None, int | None]:
|
|
72
68
|
except Exception:
|
73
69
|
return None, None, None
|
74
70
|
|
75
|
-
allow_ghe =
|
71
|
+
allow_ghe = env_bool("ALLOW_GHE_URLS", False)
|
76
72
|
bad_hosts = {
|
77
73
|
"gitlab.com",
|
78
74
|
"www.gitlab.com",
|
@@ -164,36 +160,38 @@ def main(
|
|
164
160
|
submit_single_commits: bool = typer.Option(
|
165
161
|
False,
|
166
162
|
"--submit-single-commits",
|
163
|
+
envvar="SUBMIT_SINGLE_COMMITS",
|
167
164
|
help="Submit one commit at a time to the Gerrit repository.",
|
168
165
|
),
|
169
166
|
use_pr_as_commit: bool = typer.Option(
|
170
167
|
False,
|
171
168
|
"--use-pr-as-commit",
|
169
|
+
envvar="USE_PR_AS_COMMIT",
|
172
170
|
help="Use PR title and body as the commit message.",
|
173
171
|
),
|
174
172
|
fetch_depth: int = typer.Option(
|
175
173
|
10,
|
176
174
|
"--fetch-depth",
|
177
175
|
envvar="FETCH_DEPTH",
|
178
|
-
help="Fetch
|
176
|
+
help="Fetch depth for checkout.",
|
179
177
|
),
|
180
178
|
gerrit_known_hosts: str = typer.Option(
|
181
179
|
"",
|
182
180
|
"--gerrit-known-hosts",
|
183
181
|
envvar="GERRIT_KNOWN_HOSTS",
|
184
|
-
help="Known hosts entries for Gerrit SSH.",
|
182
|
+
help="Known hosts entries for Gerrit SSH (single or multi-line).",
|
185
183
|
),
|
186
184
|
gerrit_ssh_privkey_g2g: str = typer.Option(
|
187
185
|
"",
|
188
186
|
"--gerrit-ssh-privkey-g2g",
|
189
187
|
envvar="GERRIT_SSH_PRIVKEY_G2G",
|
190
|
-
help="SSH private key
|
188
|
+
help="SSH private key content used to authenticate to Gerrit.",
|
191
189
|
),
|
192
190
|
gerrit_ssh_user_g2g: str = typer.Option(
|
193
191
|
"",
|
194
192
|
"--gerrit-ssh-user-g2g",
|
195
193
|
envvar="GERRIT_SSH_USER_G2G",
|
196
|
-
help="Gerrit SSH
|
194
|
+
help="Gerrit SSH username (e.g. automation bot account).",
|
197
195
|
),
|
198
196
|
gerrit_ssh_user_g2g_email: str = typer.Option(
|
199
197
|
"",
|
@@ -261,6 +259,12 @@ def main(
|
|
261
259
|
envvar="ALLOW_DUPLICATES",
|
262
260
|
help="Allow submitting duplicate changes without error.",
|
263
261
|
),
|
262
|
+
ci_testing: bool = typer.Option(
|
263
|
+
False,
|
264
|
+
"--ci-testing/--no-ci-testing",
|
265
|
+
envvar="CI_TESTING",
|
266
|
+
help="Enable CI testing mode (overrides .gitreview, handles unrelated repos).",
|
267
|
+
),
|
264
268
|
duplicates: str = typer.Option(
|
265
269
|
"open",
|
266
270
|
"--duplicates",
|
@@ -269,12 +273,18 @@ def main(
|
|
269
273
|
'Gerrit statuses for duplicate detection (comma-separated). E.g. "open,merged,abandoned". Default: "open".'
|
270
274
|
),
|
271
275
|
),
|
276
|
+
normalise_commit: bool = typer.Option(
|
277
|
+
True,
|
278
|
+
"--normalise-commit/--no-normalise-commit",
|
279
|
+
envvar="NORMALISE_COMMIT",
|
280
|
+
help="Normalize commit messages to conventional commit format.",
|
281
|
+
),
|
272
282
|
verbose: bool = typer.Option(
|
273
283
|
False,
|
274
284
|
"--verbose",
|
275
285
|
"-v",
|
276
286
|
envvar="G2G_VERBOSE",
|
277
|
-
help="
|
287
|
+
help="Verbose output (sets loglevel to DEBUG).",
|
278
288
|
),
|
279
289
|
) -> None:
|
280
290
|
"""
|
@@ -289,16 +299,33 @@ def main(
|
|
289
299
|
- No arguments for CI/CD environment; reads parameters from
|
290
300
|
environment variables
|
291
301
|
"""
|
302
|
+
# Override boolean parameters with properly parsed environment variables
|
303
|
+
# This ensures that string "false" from GitHub Actions is handled correctly
|
304
|
+
if os.getenv("SUBMIT_SINGLE_COMMITS"):
|
305
|
+
submit_single_commits = parse_bool_env(os.getenv("SUBMIT_SINGLE_COMMITS"))
|
306
|
+
|
307
|
+
if os.getenv("USE_PR_AS_COMMIT"):
|
308
|
+
use_pr_as_commit = parse_bool_env(os.getenv("USE_PR_AS_COMMIT"))
|
309
|
+
|
310
|
+
if os.getenv("PRESERVE_GITHUB_PRS"):
|
311
|
+
preserve_github_prs = parse_bool_env(os.getenv("PRESERVE_GITHUB_PRS"))
|
312
|
+
|
313
|
+
if os.getenv("DRY_RUN"):
|
314
|
+
dry_run = parse_bool_env(os.getenv("DRY_RUN"))
|
315
|
+
|
316
|
+
if os.getenv("ALLOW_DUPLICATES"):
|
317
|
+
allow_duplicates = parse_bool_env(os.getenv("ALLOW_DUPLICATES"))
|
318
|
+
|
319
|
+
if os.getenv("CI_TESTING"):
|
320
|
+
ci_testing = parse_bool_env(os.getenv("CI_TESTING"))
|
292
321
|
# Set up logging level based on verbose flag
|
293
322
|
if verbose:
|
294
323
|
os.environ["G2G_LOG_LEVEL"] = "DEBUG"
|
295
324
|
_reconfigure_logging()
|
296
325
|
# Normalize CLI options into environment for unified processing.
|
297
|
-
#
|
298
|
-
if submit_single_commits
|
299
|
-
|
300
|
-
if use_pr_as_commit:
|
301
|
-
os.environ["USE_PR_AS_COMMIT"] = "true"
|
326
|
+
# Explicitly set all boolean flags to ensure consistent behavior
|
327
|
+
os.environ["SUBMIT_SINGLE_COMMITS"] = "true" if submit_single_commits else "false"
|
328
|
+
os.environ["USE_PR_AS_COMMIT"] = "true" if use_pr_as_commit else "false"
|
302
329
|
os.environ["FETCH_DEPTH"] = str(fetch_depth)
|
303
330
|
if gerrit_known_hosts:
|
304
331
|
os.environ["GERRIT_KNOWN_HOSTS"] = gerrit_known_hosts
|
@@ -313,10 +340,9 @@ def main(
|
|
313
340
|
os.environ["ORGANIZATION"] = resolved_org
|
314
341
|
if reviewers_email:
|
315
342
|
os.environ["REVIEWERS_EMAIL"] = reviewers_email
|
316
|
-
if preserve_github_prs
|
317
|
-
|
318
|
-
if
|
319
|
-
os.environ["DRY_RUN"] = "true"
|
343
|
+
os.environ["PRESERVE_GITHUB_PRS"] = "true" if preserve_github_prs else "false"
|
344
|
+
os.environ["DRY_RUN"] = "true" if dry_run else "false"
|
345
|
+
os.environ["NORMALISE_COMMIT"] = "true" if normalise_commit else "false"
|
320
346
|
os.environ["ALLOW_GHE_URLS"] = "true" if allow_ghe_urls else "false"
|
321
347
|
if gerrit_server:
|
322
348
|
os.environ["GERRIT_SERVER"] = gerrit_server
|
@@ -326,8 +352,8 @@ def main(
|
|
326
352
|
os.environ["GERRIT_PROJECT"] = gerrit_project
|
327
353
|
if issue_id:
|
328
354
|
os.environ["ISSUE_ID"] = issue_id
|
329
|
-
if allow_duplicates
|
330
|
-
|
355
|
+
os.environ["ALLOW_DUPLICATES"] = "true" if allow_duplicates else "false"
|
356
|
+
os.environ["CI_TESTING"] = "true" if ci_testing else "false"
|
331
357
|
if duplicates:
|
332
358
|
os.environ["DUPLICATES"] = duplicates
|
333
359
|
# URL mode handling
|
@@ -343,6 +369,12 @@ def main(
|
|
343
369
|
else:
|
344
370
|
os.environ["SYNC_ALL_OPEN_PRS"] = "true"
|
345
371
|
os.environ["G2G_TARGET_URL"] = "1"
|
372
|
+
# Debug: Show environment at CLI startup
|
373
|
+
log.debug("CLI startup environment check:")
|
374
|
+
for key in ["DRY_RUN", "CI_TESTING", "GERRIT_SERVER", "GERRIT_PROJECT"]:
|
375
|
+
value = os.environ.get(key, "NOT_SET")
|
376
|
+
log.debug(" %s = %s", key, value)
|
377
|
+
|
346
378
|
# Delegate to common processing path
|
347
379
|
try:
|
348
380
|
_process()
|
@@ -351,7 +383,7 @@ def main(
|
|
351
383
|
raise
|
352
384
|
except Exception as exc:
|
353
385
|
log.debug("main(): _process failed: %s", exc)
|
354
|
-
|
386
|
+
raise typer.Exit(code=1) from exc
|
355
387
|
|
356
388
|
|
357
389
|
def _setup_logging() -> logging.Logger:
|
@@ -374,68 +406,59 @@ def _reconfigure_logging() -> None:
|
|
374
406
|
log = _setup_logging()
|
375
407
|
|
376
408
|
|
377
|
-
def _env_str(name: str, default: str = "") -> str:
|
378
|
-
val = os.getenv(name)
|
379
|
-
return val if val is not None else default
|
380
|
-
|
381
|
-
|
382
|
-
def _env_bool(name: str, default: bool = False) -> bool:
|
383
|
-
val = os.getenv(name)
|
384
|
-
if val is None:
|
385
|
-
return default
|
386
|
-
s = val.strip().lower()
|
387
|
-
return s in ("1", "true", "yes", "on")
|
388
|
-
|
389
|
-
|
390
409
|
def _build_inputs_from_env() -> Inputs:
|
391
410
|
return Inputs(
|
392
|
-
submit_single_commits=
|
393
|
-
use_pr_as_commit=
|
394
|
-
fetch_depth=int(
|
395
|
-
gerrit_known_hosts=
|
396
|
-
gerrit_ssh_privkey_g2g=
|
397
|
-
gerrit_ssh_user_g2g=
|
398
|
-
gerrit_ssh_user_g2g_email=
|
399
|
-
organization=
|
400
|
-
reviewers_email=
|
401
|
-
preserve_github_prs=
|
402
|
-
dry_run=
|
403
|
-
|
404
|
-
|
405
|
-
|
406
|
-
|
407
|
-
|
408
|
-
|
411
|
+
submit_single_commits=env_bool("SUBMIT_SINGLE_COMMITS", False),
|
412
|
+
use_pr_as_commit=env_bool("USE_PR_AS_COMMIT", False),
|
413
|
+
fetch_depth=int(env_str("FETCH_DEPTH", "10") or "10"),
|
414
|
+
gerrit_known_hosts=env_str("GERRIT_KNOWN_HOSTS"),
|
415
|
+
gerrit_ssh_privkey_g2g=env_str("GERRIT_SSH_PRIVKEY_G2G"),
|
416
|
+
gerrit_ssh_user_g2g=env_str("GERRIT_SSH_USER_G2G"),
|
417
|
+
gerrit_ssh_user_g2g_email=env_str("GERRIT_SSH_USER_G2G_EMAIL"),
|
418
|
+
organization=env_str("ORGANIZATION", env_str("GITHUB_REPOSITORY_OWNER")),
|
419
|
+
reviewers_email=env_str("REVIEWERS_EMAIL", ""),
|
420
|
+
preserve_github_prs=env_bool("PRESERVE_GITHUB_PRS", False),
|
421
|
+
dry_run=env_bool("DRY_RUN", False),
|
422
|
+
normalise_commit=env_bool("NORMALISE_COMMIT", True),
|
423
|
+
gerrit_server=env_str("GERRIT_SERVER", ""),
|
424
|
+
gerrit_server_port=env_str("GERRIT_SERVER_PORT", "29418"),
|
425
|
+
gerrit_project=env_str("GERRIT_PROJECT"),
|
426
|
+
issue_id=env_str("ISSUE_ID", ""),
|
427
|
+
allow_duplicates=env_bool("ALLOW_DUPLICATES", False),
|
428
|
+
ci_testing=env_bool("CI_TESTING", False),
|
429
|
+
duplicates_filter=env_str("DUPLICATES", "open"),
|
409
430
|
)
|
410
431
|
|
411
432
|
|
412
|
-
def _process_bulk(data: Inputs, gh: GitHubContext) ->
|
433
|
+
def _process_bulk(data: Inputs, gh: GitHubContext) -> bool:
|
413
434
|
client = build_client()
|
414
435
|
repo = get_repo_from_env(client)
|
415
436
|
|
416
437
|
all_urls: list[str] = []
|
417
438
|
all_nums: list[str] = []
|
439
|
+
all_shas: list[str] = []
|
418
440
|
|
419
441
|
prs_list = list(iter_open_pulls(repo))
|
420
442
|
log.info("Found %d open PRs to process", len(prs_list))
|
421
|
-
|
443
|
+
|
444
|
+
# Result tracking for summary
|
445
|
+
processed_count = 0
|
446
|
+
succeeded_count = 0
|
447
|
+
skipped_count = 0
|
448
|
+
failed_count = 0
|
449
|
+
|
450
|
+
# Use bounded parallel processing with shared clients
|
451
|
+
max_workers = min(4, max(1, len(prs_list))) # Cap at 4 workers
|
452
|
+
|
453
|
+
def process_single_pr(
|
454
|
+
pr_data: tuple[Any, models.GitHubContext],
|
455
|
+
) -> tuple[str, SubmissionResult | None, Exception | None]:
|
456
|
+
"""Process a single PR and return (status, result, exception)."""
|
457
|
+
pr, per_ctx = pr_data
|
422
458
|
pr_number = int(getattr(pr, "number", 0) or 0)
|
423
|
-
if pr_number <= 0:
|
424
|
-
continue
|
425
459
|
|
426
|
-
|
427
|
-
|
428
|
-
event_action=gh.event_action,
|
429
|
-
event_path=gh.event_path,
|
430
|
-
repository=gh.repository,
|
431
|
-
repository_owner=gh.repository_owner,
|
432
|
-
server_url=gh.server_url,
|
433
|
-
run_id=gh.run_id,
|
434
|
-
sha=gh.sha,
|
435
|
-
base_ref=gh.base_ref,
|
436
|
-
head_ref=gh.head_ref,
|
437
|
-
pr_number=pr_number,
|
438
|
-
)
|
460
|
+
if pr_number <= 0:
|
461
|
+
return "invalid", None, None
|
439
462
|
|
440
463
|
log.info("Starting processing of PR #%d", pr_number)
|
441
464
|
log.debug(
|
@@ -450,58 +473,123 @@ def _process_bulk(data: Inputs, gh: GitHubContext) -> None:
|
|
450
473
|
os.environ["DUPLICATES"] = data.duplicates_filter
|
451
474
|
check_for_duplicates(per_ctx, allow_duplicates=data.allow_duplicates)
|
452
475
|
except DuplicateChangeError as exc:
|
453
|
-
|
476
|
+
log_exception_conditionally(log, "Skipping PR #%d", pr_number)
|
454
477
|
log.warning(
|
455
478
|
"Skipping PR #%d due to duplicate detection: %s. Use --allow-duplicates to override this check.",
|
456
479
|
pr_number,
|
457
480
|
exc,
|
458
481
|
)
|
459
|
-
|
482
|
+
return "skipped", None, exc
|
460
483
|
|
461
484
|
try:
|
462
485
|
with tempfile.TemporaryDirectory() as temp_dir:
|
463
486
|
workspace = Path(temp_dir)
|
464
487
|
orch = Orchestrator(workspace=workspace)
|
465
488
|
result_multi = orch.execute(inputs=data, gh=per_ctx)
|
466
|
-
|
467
|
-
all_urls.extend(result_multi.change_urls)
|
468
|
-
for url in result_multi.change_urls:
|
469
|
-
log.info("Gerrit change URL: %s", url)
|
470
|
-
log.info(
|
471
|
-
"PR #%d created Gerrit change: %s",
|
472
|
-
pr_number,
|
473
|
-
url,
|
474
|
-
)
|
475
|
-
if result_multi.change_numbers:
|
476
|
-
all_nums.extend(result_multi.change_numbers)
|
477
|
-
log.info(
|
478
|
-
"PR #%d change numbers: %s",
|
479
|
-
pr_number,
|
480
|
-
result_multi.change_numbers,
|
481
|
-
)
|
489
|
+
return "success", result_multi, None
|
482
490
|
except Exception as exc:
|
483
|
-
|
484
|
-
|
485
|
-
|
491
|
+
log_exception_conditionally(log, "Failed to process PR #%d", pr_number)
|
492
|
+
return "failed", None, exc
|
493
|
+
|
494
|
+
# Prepare PR processing tasks
|
495
|
+
pr_tasks = []
|
496
|
+
for pr in prs_list:
|
497
|
+
pr_number = int(getattr(pr, "number", 0) or 0)
|
498
|
+
if pr_number <= 0:
|
486
499
|
continue
|
487
500
|
|
501
|
+
per_ctx = models.GitHubContext(
|
502
|
+
event_name=gh.event_name,
|
503
|
+
event_action=gh.event_action,
|
504
|
+
event_path=gh.event_path,
|
505
|
+
repository=gh.repository,
|
506
|
+
repository_owner=gh.repository_owner,
|
507
|
+
server_url=gh.server_url,
|
508
|
+
run_id=gh.run_id,
|
509
|
+
sha=gh.sha,
|
510
|
+
base_ref=gh.base_ref,
|
511
|
+
head_ref=gh.head_ref,
|
512
|
+
pr_number=pr_number,
|
513
|
+
)
|
514
|
+
pr_tasks.append((pr, per_ctx))
|
515
|
+
|
516
|
+
# Process PRs in parallel
|
517
|
+
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
518
|
+
log.info("Processing %d PRs with %d parallel workers", len(pr_tasks), max_workers)
|
519
|
+
|
520
|
+
# Submit all tasks
|
521
|
+
future_to_pr = {
|
522
|
+
executor.submit(process_single_pr, pr_task): pr_task[1].pr_number
|
523
|
+
for pr_task in pr_tasks
|
524
|
+
if pr_task[1].pr_number is not None
|
525
|
+
}
|
526
|
+
|
527
|
+
# Collect results as they complete
|
528
|
+
for future in as_completed(future_to_pr):
|
529
|
+
pr_number = future_to_pr[future]
|
530
|
+
processed_count += 1
|
531
|
+
|
532
|
+
try:
|
533
|
+
status, result_multi, exc = future.result()
|
534
|
+
|
535
|
+
if status == "success" and result_multi:
|
536
|
+
succeeded_count += 1
|
537
|
+
if result_multi.change_urls:
|
538
|
+
all_urls.extend(result_multi.change_urls)
|
539
|
+
for url in result_multi.change_urls:
|
540
|
+
log.info("Gerrit change URL: %s", url)
|
541
|
+
log.info("PR #%d created Gerrit change: %s", pr_number, url)
|
542
|
+
if result_multi.change_numbers:
|
543
|
+
all_nums.extend(result_multi.change_numbers)
|
544
|
+
log.info("PR #%d change numbers: %s", pr_number, result_multi.change_numbers)
|
545
|
+
if result_multi.commit_shas:
|
546
|
+
all_shas.extend(result_multi.commit_shas)
|
547
|
+
elif status == "skipped":
|
548
|
+
skipped_count += 1
|
549
|
+
elif status == "failed":
|
550
|
+
failed_count += 1
|
551
|
+
typer.echo(f"Failed to process PR #{pr_number}: {exc}")
|
552
|
+
log.info("Continuing to next PR despite failure")
|
553
|
+
else:
|
554
|
+
failed_count += 1
|
555
|
+
|
556
|
+
except Exception as exc:
|
557
|
+
failed_count += 1
|
558
|
+
log_exception_conditionally(log, "Failed to process PR #%d", pr_number)
|
559
|
+
typer.echo(f"Failed to process PR #{pr_number}: {exc}")
|
560
|
+
log.info("Continuing to next PR despite failure")
|
561
|
+
|
562
|
+
# Aggregate results and provide summary
|
488
563
|
if all_urls:
|
489
564
|
os.environ["GERRIT_CHANGE_REQUEST_URL"] = "\n".join(all_urls)
|
490
565
|
if all_nums:
|
491
566
|
os.environ["GERRIT_CHANGE_REQUEST_NUM"] = "\n".join(all_nums)
|
567
|
+
if all_shas:
|
568
|
+
os.environ["GERRIT_COMMIT_SHA"] = "\n".join(all_shas)
|
492
569
|
|
493
|
-
|
570
|
+
append_github_output(
|
494
571
|
{
|
495
|
-
"gerrit_change_request_url":
|
496
|
-
"gerrit_change_request_num":
|
572
|
+
"gerrit_change_request_url": "\n".join(all_urls) if all_urls else "",
|
573
|
+
"gerrit_change_request_num": "\n".join(all_nums) if all_nums else "",
|
574
|
+
"gerrit_commit_sha": "\n".join(all_shas) if all_shas else "",
|
497
575
|
}
|
498
576
|
)
|
499
577
|
|
500
|
-
|
501
|
-
|
578
|
+
# Summary block
|
579
|
+
log.info("=" * 60)
|
580
|
+
log.info("BULK PROCESSING SUMMARY:")
|
581
|
+
log.info(" Total PRs processed: %d", processed_count)
|
582
|
+
log.info(" Succeeded: %d", succeeded_count)
|
583
|
+
log.info(" Skipped (duplicates): %d", skipped_count)
|
584
|
+
log.info(" Failed: %d", failed_count)
|
585
|
+
log.info(" Gerrit changes created: %d", len(all_urls))
|
586
|
+
log.info("=" * 60)
|
502
587
|
|
588
|
+
# Return True if no failures occurred
|
589
|
+
return failed_count == 0
|
503
590
|
|
504
|
-
|
591
|
+
|
592
|
+
def _process_single(data: Inputs, gh: GitHubContext) -> bool:
|
505
593
|
# Create temporary directory for all git operations
|
506
594
|
with tempfile.TemporaryDirectory() as temp_dir:
|
507
595
|
workspace = Path(temp_dir)
|
@@ -527,21 +615,19 @@ def _process_single(data: Inputs, gh: GitHubContext) -> None:
|
|
527
615
|
log.info("Gerrit change URL: %s", url)
|
528
616
|
if result.change_numbers:
|
529
617
|
os.environ["GERRIT_CHANGE_REQUEST_NUM"] = "\n".join(result.change_numbers)
|
618
|
+
if result.commit_shas:
|
619
|
+
os.environ["GERRIT_COMMIT_SHA"] = "\n".join(result.commit_shas)
|
530
620
|
|
531
621
|
# Also write outputs to GITHUB_OUTPUT if available
|
532
|
-
|
622
|
+
append_github_output(
|
533
623
|
{
|
534
|
-
"gerrit_change_request_url":
|
535
|
-
"gerrit_change_request_num":
|
536
|
-
"gerrit_commit_sha":
|
624
|
+
"gerrit_change_request_url": "\n".join(result.change_urls) if result.change_urls else "",
|
625
|
+
"gerrit_change_request_num": "\n".join(result.change_numbers) if result.change_numbers else "",
|
626
|
+
"gerrit_commit_sha": "\n".join(result.commit_shas) if result.commit_shas else "",
|
537
627
|
}
|
538
628
|
)
|
539
629
|
|
540
|
-
|
541
|
-
log.info("Submission pipeline completed SUCCESSFULLY ✅")
|
542
|
-
else:
|
543
|
-
log.error("Submission pipeline FAILED ❌")
|
544
|
-
return
|
630
|
+
return pipeline_success
|
545
631
|
|
546
632
|
|
547
633
|
def _prepare_local_checkout(workspace: Path, gh: GitHubContext, data: Inputs) -> None:
|
@@ -554,31 +640,52 @@ def _prepare_local_checkout(workspace: Path, gh: GitHubContext, data: Inputs) ->
|
|
554
640
|
if not repo_full:
|
555
641
|
return
|
556
642
|
|
557
|
-
|
643
|
+
# Try SSH first for private repos if available, then fall back to HTTPS/API
|
644
|
+
repo_ssh_url = f"git@{server_url.replace('https://', '').replace('http://', '')}:{repo_full}.git"
|
645
|
+
repo_https_url = f"{server_url}/{repo_full}.git"
|
646
|
+
|
558
647
|
run_cmd(["git", "init"], cwd=workspace)
|
648
|
+
|
649
|
+
# Determine which URL to use and set up authentication
|
650
|
+
env: dict[str, str] = {}
|
651
|
+
repo_url = repo_https_url # Default to HTTPS
|
652
|
+
|
653
|
+
# Check if we should try SSH for private repos
|
654
|
+
use_ssh = False
|
655
|
+
respect_user_ssh = os.getenv("G2G_RESPECT_USER_SSH", "false").lower() in ("true", "1", "yes")
|
656
|
+
gerrit_ssh_privkey = os.getenv("GERRIT_SSH_PRIVKEY_G2G")
|
657
|
+
|
658
|
+
log.debug(
|
659
|
+
"GitHub repo access decision: SSH URL available=%s, G2G_RESPECT_USER_SSH=%s, GERRIT_SSH_PRIVKEY_G2G=%s",
|
660
|
+
repo_ssh_url.startswith("git@"),
|
661
|
+
respect_user_ssh,
|
662
|
+
bool(gerrit_ssh_privkey),
|
663
|
+
)
|
664
|
+
|
665
|
+
if repo_ssh_url.startswith("git@"):
|
666
|
+
# For private repos, only try SSH if G2G_RESPECT_USER_SSH is explicitly enabled
|
667
|
+
# Don't use SSH just because GERRIT_SSH_PRIVKEY_G2G is set (that's for Gerrit, not GitHub)
|
668
|
+
if respect_user_ssh:
|
669
|
+
use_ssh = True
|
670
|
+
repo_url = repo_ssh_url
|
671
|
+
log.debug("Using SSH for GitHub repo access due to G2G_RESPECT_USER_SSH=true")
|
672
|
+
else:
|
673
|
+
log.debug("Not using SSH for GitHub repo access - G2G_RESPECT_USER_SSH not enabled")
|
674
|
+
|
675
|
+
if use_ssh:
|
676
|
+
env = {
|
677
|
+
"GIT_SSH_COMMAND": build_git_ssh_command(),
|
678
|
+
**build_non_interactive_ssh_env(),
|
679
|
+
}
|
680
|
+
log.debug("Using SSH URL for private repo: %s", repo_url)
|
681
|
+
else:
|
682
|
+
log.debug("Using HTTPS URL: %s", repo_url)
|
683
|
+
|
559
684
|
run_cmd(["git", "remote", "add", "origin", repo_url], cwd=workspace)
|
560
685
|
|
561
|
-
#
|
562
|
-
|
563
|
-
"GIT_SSH_COMMAND": (
|
564
|
-
"ssh -F /dev/null "
|
565
|
-
"-o IdentitiesOnly=yes "
|
566
|
-
"-o IdentityAgent=none "
|
567
|
-
"-o BatchMode=yes "
|
568
|
-
"-o PreferredAuthentications=publickey "
|
569
|
-
"-o StrictHostKeyChecking=yes "
|
570
|
-
"-o PasswordAuthentication=no "
|
571
|
-
"-o PubkeyAcceptedKeyTypes=+ssh-rsa "
|
572
|
-
"-o ConnectTimeout=10"
|
573
|
-
),
|
574
|
-
"SSH_AUTH_SOCK": "",
|
575
|
-
"SSH_AGENT_PID": "",
|
576
|
-
"SSH_ASKPASS": "/usr/bin/false",
|
577
|
-
"DISPLAY": "",
|
578
|
-
"SSH_ASKPASS_REQUIRE": "never",
|
579
|
-
}
|
686
|
+
# Fetch base branch and PR head with fallback to API archive
|
687
|
+
fetch_success = False
|
580
688
|
|
581
|
-
# Fetch base branch and PR head
|
582
689
|
if base_ref:
|
583
690
|
try:
|
584
691
|
branch_ref = f"refs/heads/{base_ref}:refs/remotes/origin/{base_ref}"
|
@@ -597,29 +704,168 @@ def _prepare_local_checkout(workspace: Path, gh: GitHubContext, data: Inputs) ->
|
|
597
704
|
log.debug("Base branch fetch failed for %s: %s", base_ref, exc)
|
598
705
|
|
599
706
|
if pr_num_str:
|
600
|
-
|
601
|
-
|
602
|
-
|
603
|
-
|
604
|
-
|
605
|
-
|
606
|
-
|
607
|
-
|
608
|
-
|
609
|
-
|
610
|
-
|
611
|
-
|
612
|
-
|
613
|
-
|
614
|
-
|
615
|
-
|
616
|
-
|
617
|
-
|
618
|
-
|
619
|
-
|
620
|
-
|
621
|
-
|
622
|
-
|
707
|
+
try:
|
708
|
+
pr_ref = f"refs/pull/{pr_num_str}/head:refs/remotes/origin/pr/{pr_num_str}/head"
|
709
|
+
run_cmd(
|
710
|
+
[
|
711
|
+
"git",
|
712
|
+
"fetch",
|
713
|
+
f"--depth={data.fetch_depth}",
|
714
|
+
"origin",
|
715
|
+
pr_ref,
|
716
|
+
],
|
717
|
+
cwd=workspace,
|
718
|
+
env=env,
|
719
|
+
)
|
720
|
+
run_cmd(
|
721
|
+
[
|
722
|
+
"git",
|
723
|
+
"checkout",
|
724
|
+
"-B",
|
725
|
+
"g2g_pr_head",
|
726
|
+
f"refs/remotes/origin/pr/{pr_num_str}/head",
|
727
|
+
],
|
728
|
+
cwd=workspace,
|
729
|
+
env=env,
|
730
|
+
)
|
731
|
+
fetch_success = True
|
732
|
+
except Exception as exc:
|
733
|
+
log.warning("Git fetch failed, attempting API archive fallback: %s", exc)
|
734
|
+
# Try API archive fallback for private repos
|
735
|
+
try:
|
736
|
+
_fallback_to_api_archive(workspace, gh, data, pr_num_str)
|
737
|
+
fetch_success = True
|
738
|
+
except Exception as api_exc:
|
739
|
+
log.exception("API archive fallback also failed")
|
740
|
+
raise exc from api_exc
|
741
|
+
|
742
|
+
if not fetch_success and pr_num_str:
|
743
|
+
msg = f"Failed to prepare checkout for PR #{pr_num_str}"
|
744
|
+
raise RuntimeError(msg)
|
745
|
+
|
746
|
+
|
747
|
+
def _fallback_to_api_archive(workspace: Path, gh: GitHubContext, data: Inputs, pr_num_str: str) -> None:
|
748
|
+
"""Fallback to GitHub API archive download for private repos."""
|
749
|
+
import io
|
750
|
+
import json
|
751
|
+
import shutil
|
752
|
+
import zipfile
|
753
|
+
from urllib.request import Request
|
754
|
+
from urllib.request import urlopen
|
755
|
+
|
756
|
+
log.info("Attempting API archive fallback for PR #%s", pr_num_str)
|
757
|
+
|
758
|
+
# Get GitHub token for authenticated requests
|
759
|
+
token = os.getenv("GITHUB_TOKEN")
|
760
|
+
if not token:
|
761
|
+
msg = "GITHUB_TOKEN required for API archive fallback"
|
762
|
+
raise RuntimeError(msg)
|
763
|
+
|
764
|
+
# Build API URLs
|
765
|
+
repo_full = gh.repository
|
766
|
+
server_url = gh.server_url or "https://github.com"
|
767
|
+
|
768
|
+
# Construct GitHub API base URL properly
|
769
|
+
if "github.com" in server_url:
|
770
|
+
# For github.com, use api.github.com
|
771
|
+
api_base = "https://api.github.com"
|
772
|
+
elif server_url.startswith("https://"):
|
773
|
+
# For GitHub Enterprise, append /api/v3
|
774
|
+
api_base = server_url.rstrip("/") + "/api/v3"
|
775
|
+
else:
|
776
|
+
# Fallback for unexpected formats
|
777
|
+
api_base = "https://api.github.com"
|
778
|
+
|
779
|
+
# Get PR details to find head SHA
|
780
|
+
pr_api_url = f"{api_base}/repos/{repo_full}/pulls/{pr_num_str}"
|
781
|
+
log.debug("GitHub API PR URL: %s", pr_api_url)
|
782
|
+
|
783
|
+
headers = {
|
784
|
+
"Authorization": f"token {token}",
|
785
|
+
"Accept": "application/vnd.github.v3+json",
|
786
|
+
"User-Agent": "github2gerrit",
|
787
|
+
}
|
788
|
+
|
789
|
+
try:
|
790
|
+
req = Request(pr_api_url, headers=headers) # noqa: S310
|
791
|
+
with urlopen(req, timeout=30) as response: # noqa: S310
|
792
|
+
pr_data = json.loads(response.read().decode())
|
793
|
+
except Exception:
|
794
|
+
log.exception("Failed to fetch PR data from GitHub API")
|
795
|
+
log.debug("PR API URL was: %s", pr_api_url)
|
796
|
+
raise
|
797
|
+
|
798
|
+
head_sha = pr_data["head"]["sha"]
|
799
|
+
|
800
|
+
# Download archive
|
801
|
+
archive_url = f"{api_base}/repos/{repo_full}/zipball/{head_sha}"
|
802
|
+
log.debug("GitHub API archive URL: %s", archive_url)
|
803
|
+
|
804
|
+
try:
|
805
|
+
req = Request(archive_url, headers=headers) # noqa: S310
|
806
|
+
with urlopen(req, timeout=120) as response: # noqa: S310
|
807
|
+
archive_data = response.read()
|
808
|
+
except Exception:
|
809
|
+
log.exception("Failed to download archive from GitHub API")
|
810
|
+
log.debug("Archive URL was: %s", archive_url)
|
811
|
+
raise
|
812
|
+
|
813
|
+
# Extract archive
|
814
|
+
with zipfile.ZipFile(io.BytesIO(archive_data)) as zf:
|
815
|
+
# Find the root directory in the archive (usually repo-sha format)
|
816
|
+
members = zf.namelist()
|
817
|
+
root_dir = None
|
818
|
+
for member in members:
|
819
|
+
if "/" in member:
|
820
|
+
root_dir = member.split("/")[0]
|
821
|
+
break
|
822
|
+
|
823
|
+
if not root_dir:
|
824
|
+
msg = "Could not find root directory in archive"
|
825
|
+
raise RuntimeError(msg)
|
826
|
+
|
827
|
+
# Extract to temporary location then move contents
|
828
|
+
extract_path = workspace / "archive_temp"
|
829
|
+
zf.extractall(extract_path)
|
830
|
+
|
831
|
+
# Move contents from extracted root to workspace
|
832
|
+
extracted_root = extract_path / root_dir
|
833
|
+
for item in extracted_root.iterdir():
|
834
|
+
if item.name == ".git":
|
835
|
+
continue # Skip .git if present
|
836
|
+
dest = workspace / item.name
|
837
|
+
if dest.exists():
|
838
|
+
if dest.is_dir():
|
839
|
+
shutil.rmtree(dest)
|
840
|
+
else:
|
841
|
+
dest.unlink()
|
842
|
+
item.rename(dest)
|
843
|
+
|
844
|
+
# Clean up
|
845
|
+
shutil.rmtree(extract_path)
|
846
|
+
|
847
|
+
# Set up git for the extracted content
|
848
|
+
if not (workspace / ".git").exists():
|
849
|
+
run_cmd(["git", "init"], cwd=workspace)
|
850
|
+
|
851
|
+
# Create a commit for the PR content
|
852
|
+
run_cmd(["git", "add", "."], cwd=workspace)
|
853
|
+
run_cmd(
|
854
|
+
[
|
855
|
+
"git",
|
856
|
+
"commit",
|
857
|
+
"-m",
|
858
|
+
f"PR #{pr_num_str} content from API archive",
|
859
|
+
"--author",
|
860
|
+
"GitHub API <noreply@github.com>",
|
861
|
+
],
|
862
|
+
cwd=workspace,
|
863
|
+
)
|
864
|
+
|
865
|
+
# Create the expected branch
|
866
|
+
run_cmd(["git", "checkout", "-B", "g2g_pr_head"], cwd=workspace)
|
867
|
+
|
868
|
+
log.info("Successfully extracted PR #%s content via API archive", pr_num_str)
|
623
869
|
|
624
870
|
|
625
871
|
def _load_effective_inputs() -> Inputs:
|
@@ -633,6 +879,15 @@ def _load_effective_inputs() -> Inputs:
|
|
633
879
|
# Apply dynamic parameter derivation for missing Gerrit parameters
|
634
880
|
cfg = apply_parameter_derivation(cfg, org_for_cfg, save_to_config=True)
|
635
881
|
|
882
|
+
# Debug: Show what configuration would be applied
|
883
|
+
log.debug("Configuration to apply: %s", cfg)
|
884
|
+
if "DRY_RUN" in cfg:
|
885
|
+
log.warning(
|
886
|
+
"Configuration contains DRY_RUN=%s, this may override environment DRY_RUN=%s",
|
887
|
+
cfg["DRY_RUN"],
|
888
|
+
os.getenv("DRY_RUN"),
|
889
|
+
)
|
890
|
+
|
636
891
|
apply_config_to_env(cfg)
|
637
892
|
|
638
893
|
# Refresh inputs after applying configuration to environment
|
@@ -658,11 +913,13 @@ def _load_effective_inputs() -> Inputs:
|
|
658
913
|
reviewers_email=os.environ["REVIEWERS_EMAIL"],
|
659
914
|
preserve_github_prs=data.preserve_github_prs,
|
660
915
|
dry_run=data.dry_run,
|
916
|
+
normalise_commit=data.normalise_commit,
|
661
917
|
gerrit_server=data.gerrit_server,
|
662
918
|
gerrit_server_port=data.gerrit_server_port,
|
663
919
|
gerrit_project=data.gerrit_project,
|
664
920
|
issue_id=data.issue_id,
|
665
921
|
allow_duplicates=data.allow_duplicates,
|
922
|
+
ci_testing=data.ci_testing,
|
666
923
|
duplicates_filter=data.duplicates_filter,
|
667
924
|
)
|
668
925
|
log.info("Derived reviewers: %s", data.reviewers_email)
|
@@ -672,25 +929,6 @@ def _load_effective_inputs() -> Inputs:
|
|
672
929
|
return data
|
673
930
|
|
674
931
|
|
675
|
-
def _append_github_output(outputs: dict[str, str]) -> None:
|
676
|
-
gh_out = os.getenv("GITHUB_OUTPUT")
|
677
|
-
if not gh_out:
|
678
|
-
return
|
679
|
-
try:
|
680
|
-
with open(gh_out, "a", encoding="utf-8") as fh:
|
681
|
-
for key, val in outputs.items():
|
682
|
-
if not val:
|
683
|
-
continue
|
684
|
-
if "\n" in val:
|
685
|
-
fh.write(f"{key}<<G2G\n")
|
686
|
-
fh.write(f"{val}\n")
|
687
|
-
fh.write("G2G\n")
|
688
|
-
else:
|
689
|
-
fh.write(f"{key}={val}\n")
|
690
|
-
except Exception as exc:
|
691
|
-
log.debug("Failed to write GITHUB_OUTPUT: %s", exc)
|
692
|
-
|
693
|
-
|
694
932
|
def _augment_pr_refs_if_needed(gh: GitHubContext) -> GitHubContext:
|
695
933
|
if os.getenv("G2G_TARGET_URL") and gh.pr_number and (not gh.head_ref or not gh.base_ref):
|
696
934
|
try:
|
@@ -722,7 +960,7 @@ def _process() -> None:
|
|
722
960
|
try:
|
723
961
|
_validate_inputs(data)
|
724
962
|
except ConfigurationError as exc:
|
725
|
-
|
963
|
+
log_exception_conditionally(log, "Configuration validation failed")
|
726
964
|
typer.echo(f"Configuration validation failed: {exc}", err=True)
|
727
965
|
raise typer.Exit(code=2) from exc
|
728
966
|
|
@@ -730,15 +968,31 @@ def _process() -> None:
|
|
730
968
|
_log_effective_config(data, gh)
|
731
969
|
|
732
970
|
# Test mode: short-circuit after validation
|
733
|
-
if
|
971
|
+
if env_bool("G2G_TEST_MODE", False):
|
734
972
|
log.info("Validation complete. Ready to execute submission pipeline.")
|
735
973
|
typer.echo("Validation complete. Ready to execute submission pipeline.")
|
736
974
|
return
|
737
975
|
|
738
976
|
# Bulk mode for URL/workflow_dispatch
|
739
|
-
sync_all =
|
977
|
+
sync_all = env_bool("SYNC_ALL_OPEN_PRS", False)
|
740
978
|
if sync_all and (gh.event_name == "workflow_dispatch" or os.getenv("G2G_TARGET_URL")):
|
741
|
-
_process_bulk(data, gh)
|
979
|
+
bulk_success = _process_bulk(data, gh)
|
980
|
+
|
981
|
+
# Log external API metrics summary
|
982
|
+
try:
|
983
|
+
from .external_api import log_api_metrics_summary
|
984
|
+
|
985
|
+
log_api_metrics_summary()
|
986
|
+
except Exception as exc:
|
987
|
+
log.debug("Failed to log API metrics summary: %s", exc)
|
988
|
+
|
989
|
+
# Final success/failure message for bulk processing
|
990
|
+
if bulk_success:
|
991
|
+
log.info("Bulk processing completed SUCCESSFULLY ✅")
|
992
|
+
else:
|
993
|
+
log.error("Bulk processing FAILED ❌")
|
994
|
+
raise typer.Exit(code=1)
|
995
|
+
|
742
996
|
return
|
743
997
|
|
744
998
|
if not gh.pr_number:
|
@@ -759,13 +1013,13 @@ def _process() -> None:
|
|
759
1013
|
gh = _augment_pr_refs_if_needed(gh)
|
760
1014
|
|
761
1015
|
# Check for duplicates in single-PR mode (before workspace setup)
|
762
|
-
if gh.pr_number and not
|
1016
|
+
if gh.pr_number and not env_bool("SYNC_ALL_OPEN_PRS", False):
|
763
1017
|
try:
|
764
1018
|
if data.duplicates_filter:
|
765
1019
|
os.environ["DUPLICATES"] = data.duplicates_filter
|
766
1020
|
check_for_duplicates(gh, allow_duplicates=data.allow_duplicates)
|
767
1021
|
except DuplicateChangeError as exc:
|
768
|
-
|
1022
|
+
log_exception_conditionally(
|
769
1023
|
log,
|
770
1024
|
"Duplicate detection blocked submission for PR #%d",
|
771
1025
|
gh.pr_number,
|
@@ -773,7 +1027,23 @@ def _process() -> None:
|
|
773
1027
|
log.info("Use --allow-duplicates to override this check.")
|
774
1028
|
raise typer.Exit(code=3) from exc
|
775
1029
|
|
776
|
-
_process_single(data, gh)
|
1030
|
+
pipeline_success = _process_single(data, gh)
|
1031
|
+
|
1032
|
+
# Log external API metrics summary
|
1033
|
+
try:
|
1034
|
+
from .external_api import log_api_metrics_summary
|
1035
|
+
|
1036
|
+
log_api_metrics_summary()
|
1037
|
+
except Exception as exc:
|
1038
|
+
log.debug("Failed to log API metrics summary: %s", exc)
|
1039
|
+
|
1040
|
+
# Final success/failure message after all cleanup
|
1041
|
+
if pipeline_success:
|
1042
|
+
log.info("Submission pipeline completed SUCCESSFULLY ✅")
|
1043
|
+
else:
|
1044
|
+
log.error("Submission pipeline FAILED ❌")
|
1045
|
+
raise typer.Exit(code=1)
|
1046
|
+
|
777
1047
|
return
|
778
1048
|
|
779
1049
|
|
@@ -892,7 +1162,7 @@ def _validate_inputs(data: Inputs) -> None:
|
|
892
1162
|
", ".join(missing_gerrit_params),
|
893
1163
|
)
|
894
1164
|
# Allow derivation in local mode only if explicitly enabled
|
895
|
-
if not
|
1165
|
+
if not env_bool("G2G_ENABLE_DERIVATION", False):
|
896
1166
|
required_fields.extend(missing_gerrit_params)
|
897
1167
|
else:
|
898
1168
|
required_fields.extend(missing_gerrit_params)
|
@@ -947,7 +1217,8 @@ def _log_effective_config(data: Inputs, gh: GitHubContext) -> None:
|
|
947
1217
|
log.info(" REVIEWERS_EMAIL: %s", data.reviewers_email or "")
|
948
1218
|
log.info(" PRESERVE_GITHUB_PRS: %s", data.preserve_github_prs)
|
949
1219
|
log.info(" DRY_RUN: %s", data.dry_run)
|
950
|
-
log.info("
|
1220
|
+
log.info(" CI_TESTING: %s", data.ci_testing)
|
1221
|
+
log.info(" GERRIT_SERVER: %s", data.gerrit_server)
|
951
1222
|
log.info(" GERRIT_SERVER_PORT: %s", data.gerrit_server_port or "")
|
952
1223
|
log.info(" GERRIT_PROJECT: %s", data.gerrit_project or "")
|
953
1224
|
log.info("GitHub context:")
|