airbyte-internal-ops 0.1.10__py3-none-any.whl → 0.1.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {airbyte_internal_ops-0.1.10.dist-info → airbyte_internal_ops-0.1.11.dist-info}/METADATA +1 -1
- {airbyte_internal_ops-0.1.10.dist-info → airbyte_internal_ops-0.1.11.dist-info}/RECORD +12 -10
- airbyte_ops_mcp/cli/cloud.py +89 -3
- airbyte_ops_mcp/github_actions.py +197 -0
- airbyte_ops_mcp/live_tests/cdk_secrets.py +90 -0
- airbyte_ops_mcp/live_tests/ci_output.py +55 -5
- airbyte_ops_mcp/live_tests/connector_runner.py +3 -0
- airbyte_ops_mcp/mcp/github.py +2 -21
- airbyte_ops_mcp/mcp/live_tests.py +44 -84
- airbyte_ops_mcp/mcp/prerelease.py +9 -31
- {airbyte_internal_ops-0.1.10.dist-info → airbyte_internal_ops-0.1.11.dist-info}/WHEEL +0 -0
- {airbyte_internal_ops-0.1.10.dist-info → airbyte_internal_ops-0.1.11.dist-info}/entry_points.txt +0 -0
|
@@ -2,6 +2,7 @@ airbyte_ops_mcp/__init__.py,sha256=HhzURuYr29_UIdMrnWYaZB8ENr_kFkBdm4uqeiIW3Vw,7
|
|
|
2
2
|
airbyte_ops_mcp/_annotations.py,sha256=MO-SBDnbykxxHDESG7d8rviZZ4WlZgJKv0a8eBqcEzQ,1757
|
|
3
3
|
airbyte_ops_mcp/constants.py,sha256=col6-5BUWuIYhbtKmlvSRR8URBoSNExoz94cn4_kujI,2333
|
|
4
4
|
airbyte_ops_mcp/gcp_auth.py,sha256=5k-k145ZoYhHLjyDES8nrA8f8BBihRI0ykrdD1IcfOs,3599
|
|
5
|
+
airbyte_ops_mcp/github_actions.py,sha256=51rHxqTR-1yHPKfZZLKldz8f-4jZbMd71ICF_LQWvCs,5995
|
|
5
6
|
airbyte_ops_mcp/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
7
|
airbyte_ops_mcp/_legacy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
7
8
|
airbyte_ops_mcp/_legacy/airbyte_ci/README.md,sha256=qEYx4geDR8AEDjrcA303h7Nol-CMDLojxUyiGzQprM8,236
|
|
@@ -351,7 +352,7 @@ airbyte_ops_mcp/cli/__init__.py,sha256=XpL7FyVfgabfBF2JR7u7NwJ2krlYqjd_OwLcWf-Xc
|
|
|
351
352
|
airbyte_ops_mcp/cli/_base.py,sha256=I8tWnyQf0ks4r3J8N8h-5GZxyn37T-55KsbuHnxYlcg,415
|
|
352
353
|
airbyte_ops_mcp/cli/_shared.py,sha256=jg-xMyGzTCGPqKd8VTfE_3kGPIyO_3Kx5sQbG4rPc0Y,1311
|
|
353
354
|
airbyte_ops_mcp/cli/app.py,sha256=SEdBpqFUG2O8zGV5ifwptxrLGFph_dLr66-MX9d69gQ,789
|
|
354
|
-
airbyte_ops_mcp/cli/cloud.py,sha256=
|
|
355
|
+
airbyte_ops_mcp/cli/cloud.py,sha256=idkqBKUlWx9toNGiZy8tVq6MOpQoi4ZWfIRVpdsIdiQ,42494
|
|
355
356
|
airbyte_ops_mcp/cli/gh.py,sha256=91b1AxFXvHQCFyXhrrym-756ZjnMCqvxFdmwCtma1zI,2046
|
|
356
357
|
airbyte_ops_mcp/cli/registry.py,sha256=-yiLJWSslV_qGi6ImXZYfXOJSE4oJBO7yICkyA_RiUo,5792
|
|
357
358
|
airbyte_ops_mcp/cli/repo.py,sha256=G1hoQpH0XYhUH3FFOsia9xabGB0LP9o3XcwBuqvFVo0,16331
|
|
@@ -365,11 +366,12 @@ airbyte_ops_mcp/connection_config_retriever/audit_logging.py,sha256=GjT4dVa0TtvG
|
|
|
365
366
|
airbyte_ops_mcp/connection_config_retriever/retrieval.py,sha256=s6yeCyrboWkUd6KdaheEo87x-rLtQNTL8XeR8O9z2HI,12160
|
|
366
367
|
airbyte_ops_mcp/connection_config_retriever/secrets_resolution.py,sha256=12g0lZzhCzAPl4Iv4eMW6d76mvXjIBGspOnNhywzks4,3644
|
|
367
368
|
airbyte_ops_mcp/live_tests/__init__.py,sha256=qJac67dt6DQCqif39HqeiG3Tr9xrxfP-ala8HsLZKis,1020
|
|
368
|
-
airbyte_ops_mcp/live_tests/
|
|
369
|
+
airbyte_ops_mcp/live_tests/cdk_secrets.py,sha256=TJ0Vbk5jfTvYElREh4fQFHWof0_bIxZfJqT33dDhtrE,3198
|
|
370
|
+
airbyte_ops_mcp/live_tests/ci_output.py,sha256=rrvCVKKShc1iVPMuQJDBqSbsiAHIDpX8SA9j0Uwl_Cg,12718
|
|
369
371
|
airbyte_ops_mcp/live_tests/config.py,sha256=dwWeY0tatdbwl9BqbhZ7EljoZDCtKmGO5fvOAIxeXmA,5873
|
|
370
372
|
airbyte_ops_mcp/live_tests/connection_fetcher.py,sha256=5wIiA0VvCFNEc-fr6Po18gZMX3E5fyPOGf2SuVOqv5U,12799
|
|
371
373
|
airbyte_ops_mcp/live_tests/connection_secret_retriever.py,sha256=DtZYB4Y8CXfUXTFhmzrqzjuEFoICzz5Md3Ol_y9HCq0,4861
|
|
372
|
-
airbyte_ops_mcp/live_tests/connector_runner.py,sha256=
|
|
374
|
+
airbyte_ops_mcp/live_tests/connector_runner.py,sha256=BLy2RY-KLCK9jNmPz5EsPCk55fJ9WlOOaxr_Xw-GOjY,9914
|
|
373
375
|
airbyte_ops_mcp/live_tests/evaluation_modes.py,sha256=lAL6pEDmy_XCC7_m4_NXjt_f6Z8CXeAhMkc0FU8bm_M,1364
|
|
374
376
|
airbyte_ops_mcp/live_tests/http_metrics.py,sha256=oTD7f2MnQOvx4plOxHop2bInQ0-whvuToSsrC7TIM-M,12469
|
|
375
377
|
airbyte_ops_mcp/live_tests/models.py,sha256=brtAT9oO1TwjFcP91dFcu0XcUNqQb-jf7di1zkoVEuo,8782
|
|
@@ -389,11 +391,11 @@ airbyte_ops_mcp/mcp/_mcp_utils.py,sha256=nhztHcoc-_ASPpJfoDBjxjjqEvQM6_QIrhp7F2U
|
|
|
389
391
|
airbyte_ops_mcp/mcp/cloud_connector_versions.py,sha256=XxaS6WBP0sJPRwT7TTPhVH2PzhPqVWMNU5fVdWdxLLk,10361
|
|
390
392
|
airbyte_ops_mcp/mcp/connector_analysis.py,sha256=OC4KrOSkMkKPkOisWnSv96BDDE5TQYHq-Jxa2vtjJpo,298
|
|
391
393
|
airbyte_ops_mcp/mcp/connector_qa.py,sha256=aImpqdnqBPDrz10BS0owsV4kuIU2XdalzgbaGZsbOL0,258
|
|
392
|
-
airbyte_ops_mcp/mcp/github.py,sha256=
|
|
394
|
+
airbyte_ops_mcp/mcp/github.py,sha256=Wum5V99A9vTsjK0YVoE1UOVu75F-M9chg0AnUGkiiT4,7215
|
|
393
395
|
airbyte_ops_mcp/mcp/github_repo_ops.py,sha256=PiERpt8abo20Gz4CfXhrDNlVM4o4FOt5sweZJND2a0s,5314
|
|
394
|
-
airbyte_ops_mcp/mcp/live_tests.py,sha256=
|
|
396
|
+
airbyte_ops_mcp/mcp/live_tests.py,sha256=WnWUeGb_fxf6oBjp1ye51Y2fP-Ld-CDbFnTO-_dnV-Q,17134
|
|
395
397
|
airbyte_ops_mcp/mcp/metadata.py,sha256=fwGW97WknR5lfKcQnFtK6dU87aA6TmLj1NkKyqDAV9g,270
|
|
396
|
-
airbyte_ops_mcp/mcp/prerelease.py,sha256=
|
|
398
|
+
airbyte_ops_mcp/mcp/prerelease.py,sha256=LHLaSd8q0l7boAsVqTXOjFGDxAGsPZdtL3kj5_IOTEE,8852
|
|
397
399
|
airbyte_ops_mcp/mcp/prod_db_queries.py,sha256=RkBVISfkbwML3grWONxYsULRnFEYdqDZVBZIyo6W8xE,14311
|
|
398
400
|
airbyte_ops_mcp/mcp/prompts.py,sha256=mJld9mdPECXYZffWXGSvNs4Xevx3rxqUGNlzGKVC2_s,1599
|
|
399
401
|
airbyte_ops_mcp/mcp/registry.py,sha256=PW-VYUj42qx2pQ_apUkVaoUFq7VgB9zEU7-aGrkSCCw,290
|
|
@@ -407,7 +409,7 @@ airbyte_ops_mcp/prod_db_access/sql.py,sha256=tWQAwMk8DzG8HpLIYglljlReI2oeYulQPsV
|
|
|
407
409
|
airbyte_ops_mcp/registry/__init__.py,sha256=iEaPlt9GrnlaLbc__98TguNeZG8wuQu7S-_2QkhHcbA,858
|
|
408
410
|
airbyte_ops_mcp/registry/models.py,sha256=B4L4TKr52wo0xs0CqvCBrpowqjShzVnZ5eTr2-EyhNs,2346
|
|
409
411
|
airbyte_ops_mcp/registry/publish.py,sha256=VoPxsM2_0zJ829orzCRN-kjgcJtuBNyXgW4I9J680ro,12717
|
|
410
|
-
airbyte_internal_ops-0.1.
|
|
411
|
-
airbyte_internal_ops-0.1.
|
|
412
|
-
airbyte_internal_ops-0.1.
|
|
413
|
-
airbyte_internal_ops-0.1.
|
|
412
|
+
airbyte_internal_ops-0.1.11.dist-info/METADATA,sha256=AgQjwFgwAvefxtJxe234AHdr61u1n_FOBy61CU4wYq4,5283
|
|
413
|
+
airbyte_internal_ops-0.1.11.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
414
|
+
airbyte_internal_ops-0.1.11.dist-info/entry_points.txt,sha256=eUgJ9xIy9PlR-CgRbqRMsh1NVp6jz08v9bul9vCYlU4,111
|
|
415
|
+
airbyte_internal_ops-0.1.11.dist-info/RECORD,,
|
airbyte_ops_mcp/cli/cloud.py
CHANGED
|
@@ -18,10 +18,13 @@ import shutil
|
|
|
18
18
|
import signal
|
|
19
19
|
import socket
|
|
20
20
|
import subprocess
|
|
21
|
+
import tempfile
|
|
21
22
|
import time
|
|
22
23
|
from pathlib import Path
|
|
23
24
|
from typing import Annotated, Literal
|
|
24
25
|
|
|
26
|
+
import requests
|
|
27
|
+
import yaml
|
|
25
28
|
from airbyte_cdk.models.connector_metadata import MetadataFile
|
|
26
29
|
from airbyte_cdk.utils.connector_paths import find_connector_root_from_name
|
|
27
30
|
from airbyte_cdk.utils.docker import build_connector_image, verify_docker_installation
|
|
@@ -42,6 +45,7 @@ from airbyte_ops_mcp.constants import (
|
|
|
42
45
|
DEFAULT_CLOUD_SQL_PROXY_PORT,
|
|
43
46
|
ENV_GCP_PROD_DB_ACCESS_CREDENTIALS,
|
|
44
47
|
)
|
|
48
|
+
from airbyte_ops_mcp.live_tests.cdk_secrets import get_first_config_from_secrets
|
|
45
49
|
from airbyte_ops_mcp.live_tests.ci_output import (
|
|
46
50
|
generate_regression_report,
|
|
47
51
|
get_report_summary,
|
|
@@ -507,6 +511,49 @@ def _build_connector_image_from_source(
|
|
|
507
511
|
return built_image
|
|
508
512
|
|
|
509
513
|
|
|
514
|
+
def _fetch_control_image_from_metadata(connector_name: str) -> str | None:
|
|
515
|
+
"""Fetch the current released connector image from metadata.yaml on main branch.
|
|
516
|
+
|
|
517
|
+
This fetches the connector's metadata.yaml from the airbyte monorepo's master branch
|
|
518
|
+
and extracts the dockerRepository and dockerImageTag to construct the control image.
|
|
519
|
+
|
|
520
|
+
Args:
|
|
521
|
+
connector_name: The connector name (e.g., 'source-github').
|
|
522
|
+
|
|
523
|
+
Returns:
|
|
524
|
+
The full connector image with tag (e.g., 'airbyte/source-github:1.0.0'),
|
|
525
|
+
or None if the metadata could not be fetched or parsed.
|
|
526
|
+
"""
|
|
527
|
+
metadata_url = (
|
|
528
|
+
f"https://raw.githubusercontent.com/airbytehq/airbyte/master/"
|
|
529
|
+
f"airbyte-integrations/connectors/{connector_name}/metadata.yaml"
|
|
530
|
+
)
|
|
531
|
+
response = requests.get(metadata_url, timeout=30)
|
|
532
|
+
if not response.ok:
|
|
533
|
+
print_error(
|
|
534
|
+
f"Failed to fetch metadata for {connector_name}: "
|
|
535
|
+
f"HTTP {response.status_code} from {metadata_url}"
|
|
536
|
+
)
|
|
537
|
+
return None
|
|
538
|
+
|
|
539
|
+
metadata = yaml.safe_load(response.text)
|
|
540
|
+
if not isinstance(metadata, dict):
|
|
541
|
+
print_error(f"Invalid metadata format for {connector_name}: expected dict")
|
|
542
|
+
return None
|
|
543
|
+
|
|
544
|
+
data = metadata.get("data", {})
|
|
545
|
+
docker_repository = data.get("dockerRepository")
|
|
546
|
+
docker_image_tag = data.get("dockerImageTag")
|
|
547
|
+
|
|
548
|
+
if not docker_repository or not docker_image_tag:
|
|
549
|
+
print_error(
|
|
550
|
+
f"Could not find dockerRepository/dockerImageTag in metadata for {connector_name}"
|
|
551
|
+
)
|
|
552
|
+
return None
|
|
553
|
+
|
|
554
|
+
return f"{docker_repository}:{docker_image_tag}"
|
|
555
|
+
|
|
556
|
+
|
|
510
557
|
@connector_app.command(name="live-test")
|
|
511
558
|
def live_test(
|
|
512
559
|
connector_image: Annotated[
|
|
@@ -906,10 +953,48 @@ def regression_test(
|
|
|
906
953
|
if not resolved_control_image and connection_data.connector_image:
|
|
907
954
|
resolved_control_image = connection_data.connector_image
|
|
908
955
|
print_success(f"Auto-detected control image: {resolved_control_image}")
|
|
956
|
+
elif config_path:
|
|
957
|
+
config_file = Path(config_path)
|
|
958
|
+
catalog_file = Path(catalog_path) if catalog_path else None
|
|
959
|
+
elif connector_name:
|
|
960
|
+
# Fallback: fetch integration test secrets from GSM using PyAirbyte API
|
|
961
|
+
print_success(
|
|
962
|
+
f"No connection_id or config_path provided. "
|
|
963
|
+
f"Attempting to fetch integration test config from GSM for {connector_name}..."
|
|
964
|
+
)
|
|
965
|
+
gsm_config = get_first_config_from_secrets(connector_name)
|
|
966
|
+
if gsm_config:
|
|
967
|
+
# Write config to a temp file (not in output_path to avoid artifact upload)
|
|
968
|
+
gsm_config_dir = Path(
|
|
969
|
+
tempfile.mkdtemp(prefix=f"gsm-config-{connector_name}-")
|
|
970
|
+
)
|
|
971
|
+
gsm_config_dir.chmod(0o700)
|
|
972
|
+
gsm_config_file = gsm_config_dir / "config.json"
|
|
973
|
+
gsm_config_file.write_text(json.dumps(gsm_config, indent=2))
|
|
974
|
+
gsm_config_file.chmod(0o600)
|
|
975
|
+
config_file = gsm_config_file
|
|
976
|
+
catalog_file = None
|
|
977
|
+
print_success(
|
|
978
|
+
f"Fetched integration test config from GSM for {connector_name}"
|
|
979
|
+
)
|
|
980
|
+
else:
|
|
981
|
+
print_error(
|
|
982
|
+
f"Failed to fetch integration test config from GSM for {connector_name}."
|
|
983
|
+
)
|
|
984
|
+
config_file = None
|
|
985
|
+
catalog_file = None
|
|
909
986
|
else:
|
|
910
|
-
config_file =
|
|
987
|
+
config_file = None
|
|
911
988
|
catalog_file = Path(catalog_path) if catalog_path else None
|
|
912
989
|
|
|
990
|
+
# Auto-detect control_image from metadata.yaml if connector_name is provided
|
|
991
|
+
if not resolved_control_image and connector_name:
|
|
992
|
+
resolved_control_image = _fetch_control_image_from_metadata(connector_name)
|
|
993
|
+
if resolved_control_image:
|
|
994
|
+
print_success(
|
|
995
|
+
f"Auto-detected control image from metadata.yaml: {resolved_control_image}"
|
|
996
|
+
)
|
|
997
|
+
|
|
913
998
|
# Validate that we have both images
|
|
914
999
|
if not resolved_target_image:
|
|
915
1000
|
write_github_output("success", False)
|
|
@@ -923,8 +1008,9 @@ def regression_test(
|
|
|
923
1008
|
write_github_output("success", False)
|
|
924
1009
|
write_github_output("error", "No control image specified")
|
|
925
1010
|
exit_with_error(
|
|
926
|
-
"You must provide one of the following: a control_image
|
|
927
|
-
"for a connection that has an associated connector image
|
|
1011
|
+
"You must provide one of the following: a control_image, a connection_id "
|
|
1012
|
+
"for a connection that has an associated connector image, or a connector_name "
|
|
1013
|
+
"to auto-detect the control image from the airbyte repo's metadata.yaml."
|
|
928
1014
|
)
|
|
929
1015
|
|
|
930
1016
|
# Pull images if they weren't just built locally
|
|
@@ -0,0 +1,197 @@
|
|
|
1
|
+
# Copyright (c) 2025 Airbyte, Inc., all rights reserved.
|
|
2
|
+
"""GitHub Actions API utilities.
|
|
3
|
+
|
|
4
|
+
This module provides core utilities for interacting with GitHub Actions workflows,
|
|
5
|
+
including workflow dispatch, run discovery, and authentication. These utilities
|
|
6
|
+
are used by MCP tools but are not MCP-specific.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
import os
|
|
12
|
+
import time
|
|
13
|
+
from dataclasses import dataclass
|
|
14
|
+
from datetime import datetime, timedelta
|
|
15
|
+
|
|
16
|
+
import requests
|
|
17
|
+
|
|
18
|
+
GITHUB_API_BASE = "https://api.github.com"
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def resolve_github_token(preferred_env_vars: list[str] | None = None) -> str:
|
|
22
|
+
"""Resolve GitHub token from environment variables.
|
|
23
|
+
|
|
24
|
+
Checks environment variables in order of preference, returning the first
|
|
25
|
+
non-empty value found.
|
|
26
|
+
|
|
27
|
+
Args:
|
|
28
|
+
preferred_env_vars: List of environment variable names to check in order.
|
|
29
|
+
Defaults to ["GITHUB_CI_WORKFLOW_TRIGGER_PAT", "GITHUB_TOKEN"].
|
|
30
|
+
|
|
31
|
+
Returns:
|
|
32
|
+
GitHub token string.
|
|
33
|
+
|
|
34
|
+
Raises:
|
|
35
|
+
ValueError: If no GitHub token is found in any of the specified env vars.
|
|
36
|
+
"""
|
|
37
|
+
if preferred_env_vars is None:
|
|
38
|
+
preferred_env_vars = ["GITHUB_CI_WORKFLOW_TRIGGER_PAT", "GITHUB_TOKEN"]
|
|
39
|
+
|
|
40
|
+
for env_var in preferred_env_vars:
|
|
41
|
+
token = os.getenv(env_var)
|
|
42
|
+
if token:
|
|
43
|
+
return token
|
|
44
|
+
|
|
45
|
+
env_var_list = ", ".join(preferred_env_vars)
|
|
46
|
+
raise ValueError(
|
|
47
|
+
f"No GitHub token found. Set one of: {env_var_list} environment variable."
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
@dataclass
|
|
52
|
+
class WorkflowDispatchResult:
|
|
53
|
+
"""Result of triggering a workflow dispatch."""
|
|
54
|
+
|
|
55
|
+
workflow_url: str
|
|
56
|
+
"""URL to the workflow file (e.g., .../actions/workflows/my-workflow.yml)"""
|
|
57
|
+
|
|
58
|
+
run_id: int | None = None
|
|
59
|
+
"""GitHub Actions run ID, if discovered"""
|
|
60
|
+
|
|
61
|
+
run_url: str | None = None
|
|
62
|
+
"""Direct URL to the workflow run, if discovered"""
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def find_workflow_run(
|
|
66
|
+
owner: str,
|
|
67
|
+
repo: str,
|
|
68
|
+
workflow_file: str,
|
|
69
|
+
ref: str,
|
|
70
|
+
token: str,
|
|
71
|
+
created_after: datetime,
|
|
72
|
+
max_wait_seconds: float = 5.0,
|
|
73
|
+
) -> tuple[int, str] | None:
|
|
74
|
+
"""Find a workflow run that was created after a given time.
|
|
75
|
+
|
|
76
|
+
This is used to find the run that was just triggered via workflow_dispatch.
|
|
77
|
+
Polls for up to max_wait_seconds to handle GitHub API eventual consistency.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
owner: Repository owner
|
|
81
|
+
repo: Repository name
|
|
82
|
+
workflow_file: Workflow file name
|
|
83
|
+
ref: Git ref the workflow was triggered on
|
|
84
|
+
token: GitHub API token
|
|
85
|
+
created_after: Only consider runs created after this time
|
|
86
|
+
max_wait_seconds: Maximum time to wait for run to appear (default 5 seconds)
|
|
87
|
+
|
|
88
|
+
Returns:
|
|
89
|
+
Tuple of (run_id, run_url) if found, None otherwise.
|
|
90
|
+
"""
|
|
91
|
+
url = (
|
|
92
|
+
f"{GITHUB_API_BASE}/repos/{owner}/{repo}/actions/workflows/{workflow_file}/runs"
|
|
93
|
+
)
|
|
94
|
+
headers = {
|
|
95
|
+
"Authorization": f"Bearer {token}",
|
|
96
|
+
"Accept": "application/vnd.github+json",
|
|
97
|
+
"X-GitHub-Api-Version": "2022-11-28",
|
|
98
|
+
}
|
|
99
|
+
params = {
|
|
100
|
+
"branch": ref,
|
|
101
|
+
"event": "workflow_dispatch",
|
|
102
|
+
"per_page": 5,
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
# Add a small buffer to handle timestamp precision differences between
|
|
106
|
+
# local time and GitHub's created_at (which has second resolution)
|
|
107
|
+
search_after = created_after - timedelta(seconds=2)
|
|
108
|
+
|
|
109
|
+
deadline = time.monotonic() + max_wait_seconds
|
|
110
|
+
attempt = 0
|
|
111
|
+
|
|
112
|
+
while time.monotonic() < deadline:
|
|
113
|
+
if attempt > 0:
|
|
114
|
+
time.sleep(1.0)
|
|
115
|
+
attempt += 1
|
|
116
|
+
|
|
117
|
+
response = requests.get(url, headers=headers, params=params, timeout=30)
|
|
118
|
+
if not response.ok:
|
|
119
|
+
continue
|
|
120
|
+
|
|
121
|
+
data = response.json()
|
|
122
|
+
runs = data.get("workflow_runs", [])
|
|
123
|
+
|
|
124
|
+
for run in runs:
|
|
125
|
+
run_created_at = datetime.fromisoformat(
|
|
126
|
+
run["created_at"].replace("Z", "+00:00")
|
|
127
|
+
)
|
|
128
|
+
if run_created_at >= search_after:
|
|
129
|
+
return run["id"], run["html_url"]
|
|
130
|
+
|
|
131
|
+
return None
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def trigger_workflow_dispatch(
|
|
135
|
+
owner: str,
|
|
136
|
+
repo: str,
|
|
137
|
+
workflow_file: str,
|
|
138
|
+
ref: str,
|
|
139
|
+
inputs: dict,
|
|
140
|
+
token: str,
|
|
141
|
+
find_run: bool = True,
|
|
142
|
+
max_wait_seconds: float = 5.0,
|
|
143
|
+
) -> WorkflowDispatchResult:
|
|
144
|
+
"""Trigger a GitHub Actions workflow via workflow_dispatch.
|
|
145
|
+
|
|
146
|
+
Args:
|
|
147
|
+
owner: Repository owner (e.g., "airbytehq")
|
|
148
|
+
repo: Repository name (e.g., "airbyte-ops-mcp")
|
|
149
|
+
workflow_file: Workflow file name (e.g., "connector-live-test.yml")
|
|
150
|
+
ref: Git ref to run the workflow on (branch name)
|
|
151
|
+
inputs: Workflow inputs dictionary
|
|
152
|
+
token: GitHub API token
|
|
153
|
+
find_run: Whether to attempt to find the run after dispatch (default True)
|
|
154
|
+
max_wait_seconds: Maximum time to wait for run discovery (default 5 seconds)
|
|
155
|
+
|
|
156
|
+
Returns:
|
|
157
|
+
WorkflowDispatchResult with workflow URL and optionally run ID/URL.
|
|
158
|
+
|
|
159
|
+
Raises:
|
|
160
|
+
requests.HTTPError: If API request fails.
|
|
161
|
+
"""
|
|
162
|
+
dispatch_time = datetime.now(tz=datetime.now().astimezone().tzinfo)
|
|
163
|
+
|
|
164
|
+
url = f"{GITHUB_API_BASE}/repos/{owner}/{repo}/actions/workflows/{workflow_file}/dispatches"
|
|
165
|
+
headers = {
|
|
166
|
+
"Authorization": f"Bearer {token}",
|
|
167
|
+
"Accept": "application/vnd.github+json",
|
|
168
|
+
"X-GitHub-Api-Version": "2022-11-28",
|
|
169
|
+
}
|
|
170
|
+
payload = {
|
|
171
|
+
"ref": ref,
|
|
172
|
+
"inputs": inputs,
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
response = requests.post(url, headers=headers, json=payload, timeout=30)
|
|
176
|
+
response.raise_for_status()
|
|
177
|
+
|
|
178
|
+
workflow_url = (
|
|
179
|
+
f"https://github.com/{owner}/{repo}/actions/workflows/{workflow_file}"
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
if not find_run:
|
|
183
|
+
return WorkflowDispatchResult(workflow_url=workflow_url)
|
|
184
|
+
|
|
185
|
+
# Best-effort lookup of the run that was just triggered
|
|
186
|
+
run_info = find_workflow_run(
|
|
187
|
+
owner, repo, workflow_file, ref, token, dispatch_time, max_wait_seconds
|
|
188
|
+
)
|
|
189
|
+
if run_info:
|
|
190
|
+
run_id, run_url = run_info
|
|
191
|
+
return WorkflowDispatchResult(
|
|
192
|
+
workflow_url=workflow_url,
|
|
193
|
+
run_id=run_id,
|
|
194
|
+
run_url=run_url,
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
return WorkflowDispatchResult(workflow_url=workflow_url)
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
# Copyright (c) 2025 Airbyte, Inc., all rights reserved.
|
|
2
|
+
"""Fetch connector test secrets using the PyAirbyte secrets API.
|
|
3
|
+
|
|
4
|
+
This module uses the PyAirbyte GoogleGSMSecretManager to retrieve
|
|
5
|
+
integration test secrets from Google Secret Manager for connectors.
|
|
6
|
+
|
|
7
|
+
Usage:
|
|
8
|
+
from airbyte_ops_mcp.live_tests.cdk_secrets import get_first_config_from_secrets
|
|
9
|
+
|
|
10
|
+
# Fetch the first config for a connector
|
|
11
|
+
config = get_first_config_from_secrets("source-github")
|
|
12
|
+
if config:
|
|
13
|
+
# Use the config dict
|
|
14
|
+
...
|
|
15
|
+
|
|
16
|
+
Note: Requires GCP credentials with access to the integration testing project.
|
|
17
|
+
The credentials can be provided via:
|
|
18
|
+
- GOOGLE_APPLICATION_CREDENTIALS environment variable
|
|
19
|
+
- GCP_GSM_CREDENTIALS environment variable (JSON string)
|
|
20
|
+
- Application Default Credentials
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
from __future__ import annotations
|
|
24
|
+
|
|
25
|
+
import logging
|
|
26
|
+
import os
|
|
27
|
+
|
|
28
|
+
from airbyte.secrets import GoogleGSMSecretManager
|
|
29
|
+
|
|
30
|
+
logger = logging.getLogger(__name__)
|
|
31
|
+
|
|
32
|
+
# Default GCP project for integration test secrets
|
|
33
|
+
DEFAULT_GSM_PROJECT = "dataline-integration-testing"
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def get_first_config_from_secrets(
|
|
37
|
+
connector_name: str,
|
|
38
|
+
project: str = DEFAULT_GSM_PROJECT,
|
|
39
|
+
) -> dict | None:
|
|
40
|
+
"""Fetch the first integration test config for a connector from GSM.
|
|
41
|
+
|
|
42
|
+
This function uses the PyAirbyte GoogleGSMSecretManager to fetch secrets
|
|
43
|
+
labeled with the connector name and returns the first one as a parsed dict.
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
connector_name: The connector name (e.g., 'source-github').
|
|
47
|
+
project: The GCP project ID containing the secrets.
|
|
48
|
+
|
|
49
|
+
Returns:
|
|
50
|
+
The parsed config dict, or None if no secrets are found or fetching fails.
|
|
51
|
+
"""
|
|
52
|
+
# Get credentials from environment
|
|
53
|
+
credentials_json: str | None = None
|
|
54
|
+
credentials_path: str | None = None
|
|
55
|
+
|
|
56
|
+
if "GCP_GSM_CREDENTIALS" in os.environ:
|
|
57
|
+
credentials_json = os.environ["GCP_GSM_CREDENTIALS"]
|
|
58
|
+
elif "GOOGLE_APPLICATION_CREDENTIALS" in os.environ:
|
|
59
|
+
credentials_path = os.environ["GOOGLE_APPLICATION_CREDENTIALS"]
|
|
60
|
+
|
|
61
|
+
# If no explicit credentials, GoogleGSMSecretManager will try ADC
|
|
62
|
+
try:
|
|
63
|
+
gsm = GoogleGSMSecretManager(
|
|
64
|
+
project=project,
|
|
65
|
+
credentials_json=credentials_json,
|
|
66
|
+
credentials_path=credentials_path,
|
|
67
|
+
)
|
|
68
|
+
except Exception as e:
|
|
69
|
+
logger.warning(f"Failed to initialize GSM client: {e}")
|
|
70
|
+
return None
|
|
71
|
+
|
|
72
|
+
logger.info(f"Fetching integration test config for {connector_name} from GSM")
|
|
73
|
+
|
|
74
|
+
try:
|
|
75
|
+
# fetch_connector_secret returns the first secret matching the connector label
|
|
76
|
+
secret_handle = gsm.fetch_connector_secret(connector_name)
|
|
77
|
+
# parse_json() calls get_value() internally and parses the result
|
|
78
|
+
config = secret_handle.parse_json()
|
|
79
|
+
logger.info(f"Successfully fetched config for {connector_name}")
|
|
80
|
+
return dict(config) if config else None
|
|
81
|
+
|
|
82
|
+
except StopIteration:
|
|
83
|
+
logger.warning(f"No secrets found for connector {connector_name}")
|
|
84
|
+
return None
|
|
85
|
+
except Exception as e:
|
|
86
|
+
# Log the exception type but not the message (may contain sensitive info)
|
|
87
|
+
logger.warning(
|
|
88
|
+
f"Failed to fetch config for {connector_name}: {type(e).__name__}"
|
|
89
|
+
)
|
|
90
|
+
return None
|
|
@@ -142,6 +142,35 @@ def _format_delta(delta: int) -> str:
|
|
|
142
142
|
return "0"
|
|
143
143
|
|
|
144
144
|
|
|
145
|
+
def _get_github_run_url() -> str | None:
|
|
146
|
+
"""Get the URL to the current GitHub Actions workflow run.
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
The workflow run URL, or None if not running in GitHub Actions.
|
|
150
|
+
"""
|
|
151
|
+
server_url = os.getenv("GITHUB_SERVER_URL")
|
|
152
|
+
repository = os.getenv("GITHUB_REPOSITORY")
|
|
153
|
+
run_id = os.getenv("GITHUB_RUN_ID")
|
|
154
|
+
|
|
155
|
+
if not all([server_url, repository, run_id]):
|
|
156
|
+
return None
|
|
157
|
+
|
|
158
|
+
return f"{server_url}/{repository}/actions/runs/{run_id}"
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
def _get_github_artifacts_url() -> str | None:
|
|
162
|
+
"""Get the URL to the artifacts section of the current workflow run.
|
|
163
|
+
|
|
164
|
+
Returns:
|
|
165
|
+
The artifacts section URL, or None if not running in GitHub Actions.
|
|
166
|
+
"""
|
|
167
|
+
run_url = _get_github_run_url()
|
|
168
|
+
if not run_url:
|
|
169
|
+
return None
|
|
170
|
+
|
|
171
|
+
return f"{run_url}#artifacts"
|
|
172
|
+
|
|
173
|
+
|
|
145
174
|
def generate_regression_report(
|
|
146
175
|
target_image: str,
|
|
147
176
|
control_image: str,
|
|
@@ -190,6 +219,9 @@ def generate_regression_report(
|
|
|
190
219
|
target_image.rsplit(":", 1)[0] if ":" in target_image else target_image
|
|
191
220
|
)
|
|
192
221
|
|
|
222
|
+
run_url = _get_github_run_url()
|
|
223
|
+
artifacts_url = _get_github_artifacts_url()
|
|
224
|
+
|
|
193
225
|
lines: list[str] = [
|
|
194
226
|
"# Regression Test Report",
|
|
195
227
|
"",
|
|
@@ -200,12 +232,23 @@ def generate_regression_report(
|
|
|
200
232
|
f"- **Control Version:** `{control_version}`",
|
|
201
233
|
f"- **Target Version:** `{target_version}`",
|
|
202
234
|
f"- **Command:** `{command.upper()}`",
|
|
203
|
-
f"- **Artifacts:** `{artifact_name}`",
|
|
204
|
-
"",
|
|
205
|
-
"## Summary",
|
|
206
|
-
"",
|
|
207
235
|
]
|
|
208
236
|
|
|
237
|
+
if run_url:
|
|
238
|
+
lines.append(f"- **Workflow Run:** [View Execution]({run_url})")
|
|
239
|
+
if artifacts_url:
|
|
240
|
+
lines.append(f"- **Artifacts:** [Download `{artifact_name}`]({artifacts_url})")
|
|
241
|
+
else:
|
|
242
|
+
lines.append(f"- **Artifacts:** `{artifact_name}`")
|
|
243
|
+
|
|
244
|
+
lines.extend(
|
|
245
|
+
[
|
|
246
|
+
"",
|
|
247
|
+
"## Summary",
|
|
248
|
+
"",
|
|
249
|
+
]
|
|
250
|
+
)
|
|
251
|
+
|
|
209
252
|
if regression_detected:
|
|
210
253
|
if target_result["success"] and not control_result["success"]:
|
|
211
254
|
lines.append("**Result:** Target succeeded, control failed (improvement)")
|
|
@@ -341,9 +384,16 @@ def get_report_summary(report_path: Path) -> str:
|
|
|
341
384
|
f"regression-test-artifacts-{run_id}" if run_id else "regression-test-artifacts"
|
|
342
385
|
)
|
|
343
386
|
|
|
387
|
+
artifacts_url = _get_github_artifacts_url()
|
|
388
|
+
artifact_link = (
|
|
389
|
+
f"[`{artifact_name}`]({artifacts_url})"
|
|
390
|
+
if artifacts_url
|
|
391
|
+
else f"`{artifact_name}`"
|
|
392
|
+
)
|
|
393
|
+
|
|
344
394
|
return f"""## Regression Test Report
|
|
345
395
|
|
|
346
|
-
Full report available in the **Regression Test Report** check or in artifact
|
|
396
|
+
Full report available in the **Regression Test Report** check or in artifact {artifact_link}.
|
|
347
397
|
|
|
348
398
|
See the Checks tab for the complete report with message counts and execution details.
|
|
349
399
|
"""
|
|
@@ -168,6 +168,9 @@ class ConnectorRunner:
|
|
|
168
168
|
|
|
169
169
|
with tempfile.TemporaryDirectory() as temp_dir:
|
|
170
170
|
temp_path = Path(temp_dir)
|
|
171
|
+
# Make temp directory world-readable so non-root container users can access it
|
|
172
|
+
# Many connector images run as non-root users (e.g., 'airbyte' user)
|
|
173
|
+
temp_path.chmod(0o755)
|
|
171
174
|
self._prepare_data_directory(temp_path)
|
|
172
175
|
|
|
173
176
|
docker_cmd = self._build_docker_command(temp_path)
|
airbyte_ops_mcp/mcp/github.py
CHANGED
|
@@ -7,7 +7,6 @@ Docker image availability, and other related operations.
|
|
|
7
7
|
|
|
8
8
|
from __future__ import annotations
|
|
9
9
|
|
|
10
|
-
import os
|
|
11
10
|
import re
|
|
12
11
|
from typing import Annotated
|
|
13
12
|
|
|
@@ -15,9 +14,9 @@ import requests
|
|
|
15
14
|
from fastmcp import FastMCP
|
|
16
15
|
from pydantic import BaseModel, Field
|
|
17
16
|
|
|
17
|
+
from airbyte_ops_mcp.github_actions import GITHUB_API_BASE, resolve_github_token
|
|
18
18
|
from airbyte_ops_mcp.mcp._mcp_utils import mcp_tool, register_mcp_tools
|
|
19
19
|
|
|
20
|
-
GITHUB_API_BASE = "https://api.github.com"
|
|
21
20
|
DOCKERHUB_API_BASE = "https://hub.docker.com/v2"
|
|
22
21
|
|
|
23
22
|
|
|
@@ -37,24 +36,6 @@ class WorkflowRunStatus(BaseModel):
|
|
|
37
36
|
jobs_url: str
|
|
38
37
|
|
|
39
38
|
|
|
40
|
-
def _get_github_token() -> str:
|
|
41
|
-
"""Get GitHub token from environment.
|
|
42
|
-
|
|
43
|
-
Returns:
|
|
44
|
-
GitHub token string.
|
|
45
|
-
|
|
46
|
-
Raises:
|
|
47
|
-
ValueError: If GITHUB_TOKEN environment variable is not set.
|
|
48
|
-
"""
|
|
49
|
-
token = os.getenv("GITHUB_TOKEN")
|
|
50
|
-
if not token:
|
|
51
|
-
raise ValueError(
|
|
52
|
-
"GITHUB_TOKEN environment variable is required. "
|
|
53
|
-
"Please set it to a GitHub personal access token."
|
|
54
|
-
)
|
|
55
|
-
return token
|
|
56
|
-
|
|
57
|
-
|
|
58
39
|
def _parse_workflow_url(url: str) -> tuple[str, str, int]:
|
|
59
40
|
"""Parse a GitHub Actions workflow run URL into components.
|
|
60
41
|
|
|
@@ -162,7 +143,7 @@ def check_workflow_status(
|
|
|
162
143
|
)
|
|
163
144
|
|
|
164
145
|
# Guard: Check for required token
|
|
165
|
-
token =
|
|
146
|
+
token = resolve_github_token()
|
|
166
147
|
|
|
167
148
|
# Get workflow run details
|
|
168
149
|
run_data = _get_workflow_run(owner, repo, run_id, token)
|
|
@@ -9,7 +9,6 @@ in GitHub Actions and results can be polled via workflow status.
|
|
|
9
9
|
from __future__ import annotations
|
|
10
10
|
|
|
11
11
|
import logging
|
|
12
|
-
import os
|
|
13
12
|
import uuid
|
|
14
13
|
from datetime import datetime
|
|
15
14
|
from enum import Enum
|
|
@@ -21,6 +20,11 @@ from airbyte.cloud.auth import resolve_cloud_client_id, resolve_cloud_client_sec
|
|
|
21
20
|
from fastmcp import FastMCP
|
|
22
21
|
from pydantic import BaseModel, Field
|
|
23
22
|
|
|
23
|
+
from airbyte_ops_mcp.github_actions import (
|
|
24
|
+
GITHUB_API_BASE,
|
|
25
|
+
resolve_github_token,
|
|
26
|
+
trigger_workflow_dispatch,
|
|
27
|
+
)
|
|
24
28
|
from airbyte_ops_mcp.mcp._mcp_utils import mcp_tool, register_mcp_tools
|
|
25
29
|
|
|
26
30
|
logger = logging.getLogger(__name__)
|
|
@@ -29,7 +33,6 @@ logger = logging.getLogger(__name__)
|
|
|
29
33
|
# GitHub Workflow Configuration
|
|
30
34
|
# =============================================================================
|
|
31
35
|
|
|
32
|
-
GITHUB_API_BASE = "https://api.github.com"
|
|
33
36
|
LIVE_TEST_REPO_OWNER = "airbytehq"
|
|
34
37
|
LIVE_TEST_REPO_NAME = "airbyte-ops-mcp"
|
|
35
38
|
LIVE_TEST_DEFAULT_BRANCH = "main"
|
|
@@ -37,76 +40,6 @@ LIVE_TEST_WORKFLOW_FILE = "connector-live-test.yml"
|
|
|
37
40
|
REGRESSION_TEST_WORKFLOW_FILE = "connector-regression-test.yml"
|
|
38
41
|
|
|
39
42
|
|
|
40
|
-
# =============================================================================
|
|
41
|
-
# GitHub API Helper Functions
|
|
42
|
-
# =============================================================================
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
def _get_github_token() -> str:
|
|
46
|
-
"""Get GitHub token from environment.
|
|
47
|
-
|
|
48
|
-
Checks for tokens in order of specificity:
|
|
49
|
-
1. GITHUB_CI_WORKFLOW_TRIGGER_PAT (general workflow triggering)
|
|
50
|
-
2. GITHUB_TOKEN (fallback)
|
|
51
|
-
|
|
52
|
-
Returns:
|
|
53
|
-
GitHub token string.
|
|
54
|
-
|
|
55
|
-
Raises:
|
|
56
|
-
ValueError: If no GitHub token environment variable is set.
|
|
57
|
-
"""
|
|
58
|
-
token = os.getenv("GITHUB_CI_WORKFLOW_TRIGGER_PAT") or os.getenv("GITHUB_TOKEN")
|
|
59
|
-
if not token:
|
|
60
|
-
raise ValueError(
|
|
61
|
-
"No GitHub token found. Set GITHUB_CI_WORKFLOW_TRIGGER_PAT or GITHUB_TOKEN "
|
|
62
|
-
"environment variable with 'actions:write' permission."
|
|
63
|
-
)
|
|
64
|
-
return token
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
def _trigger_workflow_dispatch(
|
|
68
|
-
owner: str,
|
|
69
|
-
repo: str,
|
|
70
|
-
workflow_file: str,
|
|
71
|
-
ref: str,
|
|
72
|
-
inputs: dict[str, Any],
|
|
73
|
-
token: str,
|
|
74
|
-
) -> str:
|
|
75
|
-
"""Trigger a GitHub Actions workflow via workflow_dispatch.
|
|
76
|
-
|
|
77
|
-
Args:
|
|
78
|
-
owner: Repository owner (e.g., "airbytehq")
|
|
79
|
-
repo: Repository name (e.g., "airbyte-ops-mcp")
|
|
80
|
-
workflow_file: Workflow file name (e.g., "connector-live-test.yml")
|
|
81
|
-
ref: Git ref to run the workflow on (branch name)
|
|
82
|
-
inputs: Workflow inputs dictionary
|
|
83
|
-
token: GitHub API token
|
|
84
|
-
|
|
85
|
-
Returns:
|
|
86
|
-
URL to view workflow runs.
|
|
87
|
-
|
|
88
|
-
Raises:
|
|
89
|
-
requests.HTTPError: If API request fails.
|
|
90
|
-
"""
|
|
91
|
-
url = f"{GITHUB_API_BASE}/repos/{owner}/{repo}/actions/workflows/{workflow_file}/dispatches"
|
|
92
|
-
headers = {
|
|
93
|
-
"Authorization": f"Bearer {token}",
|
|
94
|
-
"Accept": "application/vnd.github+json",
|
|
95
|
-
"X-GitHub-Api-Version": "2022-11-28",
|
|
96
|
-
}
|
|
97
|
-
payload = {
|
|
98
|
-
"ref": ref,
|
|
99
|
-
"inputs": inputs,
|
|
100
|
-
}
|
|
101
|
-
|
|
102
|
-
response = requests.post(url, headers=headers, json=payload, timeout=30)
|
|
103
|
-
response.raise_for_status()
|
|
104
|
-
|
|
105
|
-
# workflow_dispatch returns 204 No Content on success
|
|
106
|
-
# Return URL to view workflow runs
|
|
107
|
-
return f"https://github.com/{owner}/{repo}/actions/workflows/{workflow_file}"
|
|
108
|
-
|
|
109
|
-
|
|
110
43
|
def _get_workflow_run_status(
|
|
111
44
|
owner: str,
|
|
112
45
|
repo: str,
|
|
@@ -293,12 +226,22 @@ class LiveConnectionTestResult(BaseModel):
|
|
|
293
226
|
class RunLiveConnectionTestsResponse(BaseModel):
|
|
294
227
|
"""Response from starting a live connection test via GitHub Actions workflow."""
|
|
295
228
|
|
|
296
|
-
run_id: str = Field(
|
|
229
|
+
run_id: str = Field(
|
|
230
|
+
description="Unique identifier for the test run (internal tracking ID)"
|
|
231
|
+
)
|
|
297
232
|
status: TestRunStatus = Field(description="Initial status of the test run")
|
|
298
233
|
message: str = Field(description="Human-readable status message")
|
|
299
234
|
workflow_url: str | None = Field(
|
|
300
235
|
default=None,
|
|
301
|
-
description="URL to view the GitHub Actions workflow
|
|
236
|
+
description="URL to view the GitHub Actions workflow file",
|
|
237
|
+
)
|
|
238
|
+
github_run_id: int | None = Field(
|
|
239
|
+
default=None,
|
|
240
|
+
description="GitHub Actions workflow run ID (use with check_workflow_status)",
|
|
241
|
+
)
|
|
242
|
+
github_run_url: str | None = Field(
|
|
243
|
+
default=None,
|
|
244
|
+
description="Direct URL to the GitHub Actions workflow run",
|
|
302
245
|
)
|
|
303
246
|
|
|
304
247
|
|
|
@@ -348,9 +291,16 @@ def run_live_connection_tests(
|
|
|
348
291
|
] = None,
|
|
349
292
|
connector_name: Annotated[
|
|
350
293
|
str | None,
|
|
351
|
-
"Connector name to build
|
|
352
|
-
"(e.g., 'source-pokeapi'). If provided, builds the
|
|
353
|
-
"
|
|
294
|
+
"Connector name to build the connector image from source "
|
|
295
|
+
"(e.g., 'source-pokeapi'). If provided, builds the image locally with tag 'dev'. "
|
|
296
|
+
"For live tests, this builds the test image. For regression tests, this builds "
|
|
297
|
+
"the target image while control is auto-detected from the connection.",
|
|
298
|
+
] = None,
|
|
299
|
+
airbyte_ref: Annotated[
|
|
300
|
+
str | None,
|
|
301
|
+
"Git ref or PR number to checkout from the airbyte monorepo "
|
|
302
|
+
"(e.g., 'master', '70847', 'refs/pull/70847/head'). "
|
|
303
|
+
"Only used when connector_name is provided. Defaults to 'master' if not specified.",
|
|
354
304
|
] = None,
|
|
355
305
|
) -> RunLiveConnectionTestsResponse:
|
|
356
306
|
"""Start a live connection test run via GitHub Actions workflow.
|
|
@@ -377,7 +327,7 @@ def run_live_connection_tests(
|
|
|
377
327
|
|
|
378
328
|
# Get GitHub token
|
|
379
329
|
try:
|
|
380
|
-
token =
|
|
330
|
+
token = resolve_github_token()
|
|
381
331
|
except ValueError as e:
|
|
382
332
|
return RunLiveConnectionTestsResponse(
|
|
383
333
|
run_id=run_id,
|
|
@@ -422,9 +372,11 @@ def run_live_connection_tests(
|
|
|
422
372
|
}
|
|
423
373
|
if connector_image:
|
|
424
374
|
workflow_inputs["connector_image"] = connector_image
|
|
375
|
+
if connector_name:
|
|
376
|
+
workflow_inputs["connector_name"] = connector_name
|
|
425
377
|
|
|
426
378
|
try:
|
|
427
|
-
|
|
379
|
+
dispatch_result = trigger_workflow_dispatch(
|
|
428
380
|
owner=LIVE_TEST_REPO_OWNER,
|
|
429
381
|
repo=LIVE_TEST_REPO_NAME,
|
|
430
382
|
workflow_file=LIVE_TEST_WORKFLOW_FILE,
|
|
@@ -441,12 +393,15 @@ def run_live_connection_tests(
|
|
|
441
393
|
workflow_url=None,
|
|
442
394
|
)
|
|
443
395
|
|
|
396
|
+
view_url = dispatch_result.run_url or dispatch_result.workflow_url
|
|
444
397
|
return RunLiveConnectionTestsResponse(
|
|
445
398
|
run_id=run_id,
|
|
446
399
|
status=TestRunStatus.QUEUED,
|
|
447
400
|
message=f"Live-test workflow triggered for connection {connection_id}. "
|
|
448
|
-
f"View progress at: {
|
|
449
|
-
workflow_url=workflow_url,
|
|
401
|
+
f"View progress at: {view_url}",
|
|
402
|
+
workflow_url=dispatch_result.workflow_url,
|
|
403
|
+
github_run_id=dispatch_result.run_id,
|
|
404
|
+
github_run_url=dispatch_result.run_url,
|
|
450
405
|
)
|
|
451
406
|
|
|
452
407
|
# Regression test workflow (skip_regression_tests=False)
|
|
@@ -472,9 +427,11 @@ def run_live_connection_tests(
|
|
|
472
427
|
workflow_inputs["control_image"] = control_image
|
|
473
428
|
if connector_name:
|
|
474
429
|
workflow_inputs["connector_name"] = connector_name
|
|
430
|
+
if airbyte_ref:
|
|
431
|
+
workflow_inputs["airbyte_ref"] = airbyte_ref
|
|
475
432
|
|
|
476
433
|
try:
|
|
477
|
-
|
|
434
|
+
dispatch_result = trigger_workflow_dispatch(
|
|
478
435
|
owner=LIVE_TEST_REPO_OWNER,
|
|
479
436
|
repo=LIVE_TEST_REPO_NAME,
|
|
480
437
|
workflow_file=REGRESSION_TEST_WORKFLOW_FILE,
|
|
@@ -491,12 +448,15 @@ def run_live_connection_tests(
|
|
|
491
448
|
workflow_url=None,
|
|
492
449
|
)
|
|
493
450
|
|
|
451
|
+
view_url = dispatch_result.run_url or dispatch_result.workflow_url
|
|
494
452
|
return RunLiveConnectionTestsResponse(
|
|
495
453
|
run_id=run_id,
|
|
496
454
|
status=TestRunStatus.QUEUED,
|
|
497
455
|
message=f"Regression-test workflow triggered for connection {connection_id}. "
|
|
498
|
-
f"View progress at: {
|
|
499
|
-
workflow_url=workflow_url,
|
|
456
|
+
f"View progress at: {view_url}",
|
|
457
|
+
workflow_url=dispatch_result.workflow_url,
|
|
458
|
+
github_run_id=dispatch_result.run_id,
|
|
459
|
+
github_run_url=dispatch_result.run_url,
|
|
500
460
|
)
|
|
501
461
|
|
|
502
462
|
|
|
@@ -8,7 +8,6 @@ workflow in the airbytehq/airbyte repository via GitHub's workflow dispatch API.
|
|
|
8
8
|
from __future__ import annotations
|
|
9
9
|
|
|
10
10
|
import base64
|
|
11
|
-
import os
|
|
12
11
|
from typing import Annotated, Literal
|
|
13
12
|
|
|
14
13
|
import requests
|
|
@@ -16,15 +15,22 @@ import yaml
|
|
|
16
15
|
from fastmcp import FastMCP
|
|
17
16
|
from pydantic import BaseModel, Field
|
|
18
17
|
|
|
18
|
+
from airbyte_ops_mcp.github_actions import GITHUB_API_BASE, resolve_github_token
|
|
19
19
|
from airbyte_ops_mcp.mcp._mcp_utils import mcp_tool, register_mcp_tools
|
|
20
20
|
|
|
21
|
-
GITHUB_API_BASE = "https://api.github.com"
|
|
22
21
|
DEFAULT_REPO_OWNER = "airbytehq"
|
|
23
22
|
DEFAULT_REPO_NAME = "airbyte"
|
|
24
23
|
DEFAULT_BRANCH = "master"
|
|
25
24
|
PRERELEASE_WORKFLOW_FILE = "publish-connectors-prerelease-command.yml"
|
|
26
25
|
CONNECTOR_PATH_PREFIX = "airbyte-integrations/connectors"
|
|
27
26
|
|
|
27
|
+
# Token env vars for prerelease publishing (in order of preference)
|
|
28
|
+
PRERELEASE_TOKEN_ENV_VARS = [
|
|
29
|
+
"GITHUB_CONNECTOR_PUBLISHING_PAT",
|
|
30
|
+
"GITHUB_CI_WORKFLOW_TRIGGER_PAT",
|
|
31
|
+
"GITHUB_TOKEN",
|
|
32
|
+
]
|
|
33
|
+
|
|
28
34
|
|
|
29
35
|
class PRHeadInfo(BaseModel):
|
|
30
36
|
"""Information about a PR's head commit."""
|
|
@@ -46,34 +52,6 @@ class PrereleaseWorkflowResult(BaseModel):
|
|
|
46
52
|
docker_image_tag: str | None = None
|
|
47
53
|
|
|
48
54
|
|
|
49
|
-
def _get_github_token() -> str:
|
|
50
|
-
"""Get GitHub token from environment.
|
|
51
|
-
|
|
52
|
-
Checks for tokens in order of specificity:
|
|
53
|
-
1. GITHUB_CONNECTOR_PUBLISHING_PAT (most specific)
|
|
54
|
-
2. GITHUB_CI_WORKFLOW_TRIGGER_PAT (general workflow triggering)
|
|
55
|
-
3. GITHUB_TOKEN (fallback)
|
|
56
|
-
|
|
57
|
-
Returns:
|
|
58
|
-
GitHub token string.
|
|
59
|
-
|
|
60
|
-
Raises:
|
|
61
|
-
ValueError: If no GitHub token environment variable is set.
|
|
62
|
-
"""
|
|
63
|
-
token = (
|
|
64
|
-
os.getenv("GITHUB_CONNECTOR_PUBLISHING_PAT")
|
|
65
|
-
or os.getenv("GITHUB_CI_WORKFLOW_TRIGGER_PAT")
|
|
66
|
-
or os.getenv("GITHUB_TOKEN")
|
|
67
|
-
)
|
|
68
|
-
if not token:
|
|
69
|
-
raise ValueError(
|
|
70
|
-
"No GitHub token found. Set GITHUB_CONNECTOR_PUBLISHING_PAT, "
|
|
71
|
-
"GITHUB_CI_WORKFLOW_TRIGGER_PAT, or GITHUB_TOKEN environment variable "
|
|
72
|
-
"with 'actions:write' permission."
|
|
73
|
-
)
|
|
74
|
-
return token
|
|
75
|
-
|
|
76
|
-
|
|
77
55
|
def _get_pr_head_info(
|
|
78
56
|
owner: str,
|
|
79
57
|
repo: str,
|
|
@@ -248,7 +226,7 @@ def publish_connector_to_airbyte_registry(
|
|
|
248
226
|
)
|
|
249
227
|
|
|
250
228
|
# Guard: Check for required token
|
|
251
|
-
token =
|
|
229
|
+
token = resolve_github_token(PRERELEASE_TOKEN_ENV_VARS)
|
|
252
230
|
|
|
253
231
|
# Get the PR's head ref and SHA
|
|
254
232
|
head_info = _get_pr_head_info(
|
|
File without changes
|
{airbyte_internal_ops-0.1.10.dist-info → airbyte_internal_ops-0.1.11.dist-info}/entry_points.txt
RENAMED
|
File without changes
|