airbyte-internal-ops 0.4.0__py3-none-any.whl → 0.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {airbyte_internal_ops-0.4.0.dist-info → airbyte_internal_ops-0.4.1.dist-info}/METADATA +1 -1
- {airbyte_internal_ops-0.4.0.dist-info → airbyte_internal_ops-0.4.1.dist-info}/RECORD +12 -12
- airbyte_ops_mcp/cli/gh.py +105 -2
- airbyte_ops_mcp/cli/registry.py +2 -2
- airbyte_ops_mcp/github_actions.py +6 -100
- airbyte_ops_mcp/github_api.py +163 -5
- airbyte_ops_mcp/mcp/{github.py → github_actions.py} +117 -5
- airbyte_ops_mcp/mcp/prerelease.py +6 -53
- airbyte_ops_mcp/mcp/regression_tests.py +4 -6
- airbyte_ops_mcp/mcp/server.py +2 -2
- {airbyte_internal_ops-0.4.0.dist-info → airbyte_internal_ops-0.4.1.dist-info}/WHEEL +0 -0
- {airbyte_internal_ops-0.4.0.dist-info → airbyte_internal_ops-0.4.1.dist-info}/entry_points.txt +0 -0
|
@@ -2,8 +2,8 @@ airbyte_ops_mcp/__init__.py,sha256=tuzdlMkfnWBnsri5KGHM2M_xuNnzFk2u_aR79mmN7Yg,7
|
|
|
2
2
|
airbyte_ops_mcp/_annotations.py,sha256=MO-SBDnbykxxHDESG7d8rviZZ4WlZgJKv0a8eBqcEzQ,1757
|
|
3
3
|
airbyte_ops_mcp/constants.py,sha256=khcv9W3WkApIyPygEGgE2noBIqLomjoOMLxFBU1ArjA,5308
|
|
4
4
|
airbyte_ops_mcp/gcp_auth.py,sha256=i0cm1_xX4fj_31iKlfARpNvTaSr85iGTSw9KMf4f4MU,7206
|
|
5
|
-
airbyte_ops_mcp/github_actions.py,sha256=
|
|
6
|
-
airbyte_ops_mcp/github_api.py,sha256=
|
|
5
|
+
airbyte_ops_mcp/github_actions.py,sha256=FSi_tjS9TbwRVp8dwlDZhFOi7lJXEZQLhPm2KpcjNlY,7022
|
|
6
|
+
airbyte_ops_mcp/github_api.py,sha256=ezpMR1vjqQ-1f5yOLBVbxW70OPtUferl1uA0u_gUVo8,12733
|
|
7
7
|
airbyte_ops_mcp/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
8
|
airbyte_ops_mcp/_legacy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
9
|
airbyte_ops_mcp/_legacy/airbyte_ci/README.md,sha256=qEYx4geDR8AEDjrcA303h7Nol-CMDLojxUyiGzQprM8,236
|
|
@@ -354,8 +354,8 @@ airbyte_ops_mcp/cli/_base.py,sha256=I8tWnyQf0ks4r3J8N8h-5GZxyn37T-55KsbuHnxYlcg,
|
|
|
354
354
|
airbyte_ops_mcp/cli/_shared.py,sha256=jg-xMyGzTCGPqKd8VTfE_3kGPIyO_3Kx5sQbG4rPc0Y,1311
|
|
355
355
|
airbyte_ops_mcp/cli/app.py,sha256=SEdBpqFUG2O8zGV5ifwptxrLGFph_dLr66-MX9d69gQ,789
|
|
356
356
|
airbyte_ops_mcp/cli/cloud.py,sha256=OmeJPW8ME82PLJSqzoU_tz_3iqsTA-MY4QBO-ad8gfo,44141
|
|
357
|
-
airbyte_ops_mcp/cli/gh.py,sha256=
|
|
358
|
-
airbyte_ops_mcp/cli/registry.py,sha256=
|
|
357
|
+
airbyte_ops_mcp/cli/gh.py,sha256=koJPu0MDB6AW7mJq2z4dZV65ofvsZTkqoeitGF8KJR8,5364
|
|
358
|
+
airbyte_ops_mcp/cli/registry.py,sha256=L4nDKhlegr31gSE-GUvDFSq10KgDz5kJuZXgLIxYIyg,9785
|
|
359
359
|
airbyte_ops_mcp/cli/repo.py,sha256=G1hoQpH0XYhUH3FFOsia9xabGB0LP9o3XcwBuqvFVo0,16331
|
|
360
360
|
airbyte_ops_mcp/cloud_admin/__init__.py,sha256=cqE96Q10Kp6elhH9DAi6TVsIwSUy3sooDLLrxTaktGk,816
|
|
361
361
|
airbyte_ops_mcp/cloud_admin/api_client.py,sha256=ysTztSbLX0SZSK3qneHTSKVODRzVmLbHBC3ND0j_LTc,38020
|
|
@@ -376,15 +376,15 @@ airbyte_ops_mcp/mcp/cloud_connector_versions.py,sha256=5qUYRZapYBprmmc5J3lKQzeQ3
|
|
|
376
376
|
airbyte_ops_mcp/mcp/connector_analysis.py,sha256=OC4KrOSkMkKPkOisWnSv96BDDE5TQYHq-Jxa2vtjJpo,298
|
|
377
377
|
airbyte_ops_mcp/mcp/connector_qa.py,sha256=aImpqdnqBPDrz10BS0owsV4kuIU2XdalzgbaGZsbOL0,258
|
|
378
378
|
airbyte_ops_mcp/mcp/gcp_logs.py,sha256=IPtq4098_LN1Cgeba4jATO1iYFFFpL2-aRO0pGcOdzs,2689
|
|
379
|
-
airbyte_ops_mcp/mcp/
|
|
379
|
+
airbyte_ops_mcp/mcp/github_actions.py,sha256=_mAVTl6UX3F7S_HeV1-M5R4jMNzNQGI3ADs3sBzden8,11760
|
|
380
380
|
airbyte_ops_mcp/mcp/github_repo_ops.py,sha256=PiERpt8abo20Gz4CfXhrDNlVM4o4FOt5sweZJND2a0s,5314
|
|
381
381
|
airbyte_ops_mcp/mcp/metadata.py,sha256=fwGW97WknR5lfKcQnFtK6dU87aA6TmLj1NkKyqDAV9g,270
|
|
382
|
-
airbyte_ops_mcp/mcp/prerelease.py,sha256=
|
|
382
|
+
airbyte_ops_mcp/mcp/prerelease.py,sha256=fEZwqtyFQC9nKBF6MJf0WcHoiEoCiouFbBG2bqBtuRY,10701
|
|
383
383
|
airbyte_ops_mcp/mcp/prod_db_queries.py,sha256=VsiBBnVbOjc8lBb2Xr1lmcH3wu7QHQfjd4lORarEE1s,42700
|
|
384
384
|
airbyte_ops_mcp/mcp/prompts.py,sha256=mJld9mdPECXYZffWXGSvNs4Xevx3rxqUGNlzGKVC2_s,1599
|
|
385
385
|
airbyte_ops_mcp/mcp/registry.py,sha256=PW-VYUj42qx2pQ_apUkVaoUFq7VgB9zEU7-aGrkSCCw,290
|
|
386
|
-
airbyte_ops_mcp/mcp/regression_tests.py,sha256=
|
|
387
|
-
airbyte_ops_mcp/mcp/server.py,sha256=
|
|
386
|
+
airbyte_ops_mcp/mcp/regression_tests.py,sha256=dmM22ODwUTbVisKiRcJunzEgMKrZOkpsbkUm0_hFWYk,16752
|
|
387
|
+
airbyte_ops_mcp/mcp/server.py,sha256=dMOFXPFeHBIqicOWs8UsPfzgsWnzsWDsZJ79E_OYjT0,5341
|
|
388
388
|
airbyte_ops_mcp/mcp/server_info.py,sha256=Yi4B1auW64QZGBDas5mro_vwTjvrP785TFNSBP7GhRg,2361
|
|
389
389
|
airbyte_ops_mcp/prod_db_access/__init__.py,sha256=5pxouMPY1beyWlB0UwPnbaLTKTHqU6X82rbbgKY2vYU,1069
|
|
390
390
|
airbyte_ops_mcp/prod_db_access/db_engine.py,sha256=VUqEWZtharJUR-Cri_pMwtGh1C4Neu4s195mbEXlm-w,9190
|
|
@@ -414,7 +414,7 @@ airbyte_ops_mcp/regression_tests/regression/comparators.py,sha256=MJkLZEKHivgrG0
|
|
|
414
414
|
airbyte_ops_mcp/regression_tests/validation/__init__.py,sha256=MBEwGOoNuqT4_oCahtoK62OKWIjUCfWa7vZTxNj_0Ek,1532
|
|
415
415
|
airbyte_ops_mcp/regression_tests/validation/catalog_validators.py,sha256=jqqVAMOk0mtdPgwu4d0hA0ZEjtsNh5gapvGydRv3_qk,12553
|
|
416
416
|
airbyte_ops_mcp/regression_tests/validation/record_validators.py,sha256=RjauAhKWNwxMBTu0eNS2hMFNQVs5CLbQU51kp6FOVDk,7432
|
|
417
|
-
airbyte_internal_ops-0.4.
|
|
418
|
-
airbyte_internal_ops-0.4.
|
|
419
|
-
airbyte_internal_ops-0.4.
|
|
420
|
-
airbyte_internal_ops-0.4.
|
|
417
|
+
airbyte_internal_ops-0.4.1.dist-info/METADATA,sha256=mjk54F-EL71ItP6D4BxFg8_eZVZlbfe8bW1wTgno10g,5679
|
|
418
|
+
airbyte_internal_ops-0.4.1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
419
|
+
airbyte_internal_ops-0.4.1.dist-info/entry_points.txt,sha256=WxP0l7bRFss4Cr5uQqVj9mTEKwnRKouNuphXQF0lotA,171
|
|
420
|
+
airbyte_internal_ops-0.4.1.dist-info/RECORD,,
|
airbyte_ops_mcp/cli/gh.py
CHANGED
|
@@ -3,17 +3,23 @@
|
|
|
3
3
|
|
|
4
4
|
Commands:
|
|
5
5
|
airbyte-ops gh workflow status - Check GitHub Actions workflow status
|
|
6
|
+
airbyte-ops gh workflow trigger - Trigger a GitHub Actions CI workflow
|
|
6
7
|
"""
|
|
7
8
|
|
|
8
9
|
from __future__ import annotations
|
|
9
10
|
|
|
11
|
+
import json
|
|
12
|
+
import time
|
|
10
13
|
from typing import Annotated
|
|
11
14
|
|
|
12
15
|
from cyclopts import App, Parameter
|
|
13
16
|
|
|
14
17
|
from airbyte_ops_mcp.cli._base import app
|
|
15
18
|
from airbyte_ops_mcp.cli._shared import exit_with_error, print_json
|
|
16
|
-
from airbyte_ops_mcp.mcp.
|
|
19
|
+
from airbyte_ops_mcp.mcp.github_actions import (
|
|
20
|
+
check_ci_workflow_status,
|
|
21
|
+
trigger_ci_workflow,
|
|
22
|
+
)
|
|
17
23
|
|
|
18
24
|
# Create the gh sub-app
|
|
19
25
|
gh_app = App(name="gh", help="GitHub operations.")
|
|
@@ -62,10 +68,107 @@ def workflow_status(
|
|
|
62
68
|
"Must provide either --url OR all of (--owner, --repo, --run-id)."
|
|
63
69
|
)
|
|
64
70
|
|
|
65
|
-
result =
|
|
71
|
+
result = check_ci_workflow_status(
|
|
66
72
|
workflow_url=url,
|
|
67
73
|
owner=owner,
|
|
68
74
|
repo=repo,
|
|
69
75
|
run_id=run_id,
|
|
70
76
|
)
|
|
71
77
|
print_json(result.model_dump())
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
@workflow_app.command(name="trigger")
|
|
81
|
+
def workflow_trigger(
|
|
82
|
+
owner: Annotated[
|
|
83
|
+
str,
|
|
84
|
+
Parameter(help="Repository owner (e.g., 'airbytehq')."),
|
|
85
|
+
],
|
|
86
|
+
repo: Annotated[
|
|
87
|
+
str,
|
|
88
|
+
Parameter(help="Repository name (e.g., 'airbyte')."),
|
|
89
|
+
],
|
|
90
|
+
workflow_file: Annotated[
|
|
91
|
+
str,
|
|
92
|
+
Parameter(help="Workflow file name (e.g., 'connector-regression-test.yml')."),
|
|
93
|
+
],
|
|
94
|
+
workflow_definition_ref: Annotated[
|
|
95
|
+
str | None,
|
|
96
|
+
Parameter(
|
|
97
|
+
help="Branch name or PR number for the workflow definition to use. "
|
|
98
|
+
"If a PR number is provided, it resolves to the PR's head branch name. "
|
|
99
|
+
"Defaults to 'main' if not specified."
|
|
100
|
+
),
|
|
101
|
+
] = None,
|
|
102
|
+
inputs: Annotated[
|
|
103
|
+
str | None,
|
|
104
|
+
Parameter(
|
|
105
|
+
help='Workflow inputs as a JSON string (e.g., \'{"key": "value"}\').'
|
|
106
|
+
),
|
|
107
|
+
] = None,
|
|
108
|
+
wait: Annotated[
|
|
109
|
+
bool,
|
|
110
|
+
Parameter(help="Wait for the workflow to complete before returning."),
|
|
111
|
+
] = False,
|
|
112
|
+
wait_seconds: Annotated[
|
|
113
|
+
int,
|
|
114
|
+
Parameter(
|
|
115
|
+
help="Maximum seconds to wait for workflow completion (default: 600)."
|
|
116
|
+
),
|
|
117
|
+
] = 600,
|
|
118
|
+
) -> None:
|
|
119
|
+
"""Trigger a GitHub Actions CI workflow via workflow_dispatch.
|
|
120
|
+
|
|
121
|
+
This command triggers a workflow in any GitHub repository that has workflow_dispatch
|
|
122
|
+
enabled. It resolves PR numbers to branch names automatically.
|
|
123
|
+
"""
|
|
124
|
+
# Parse inputs JSON if provided
|
|
125
|
+
parsed_inputs: dict[str, str] | None = None
|
|
126
|
+
if inputs:
|
|
127
|
+
try:
|
|
128
|
+
parsed_inputs = json.loads(inputs)
|
|
129
|
+
except json.JSONDecodeError as e:
|
|
130
|
+
exit_with_error(f"Invalid JSON for --inputs: {e}")
|
|
131
|
+
|
|
132
|
+
# Trigger the workflow
|
|
133
|
+
result = trigger_ci_workflow(
|
|
134
|
+
owner=owner,
|
|
135
|
+
repo=repo,
|
|
136
|
+
workflow_file=workflow_file,
|
|
137
|
+
workflow_definition_ref=workflow_definition_ref,
|
|
138
|
+
inputs=parsed_inputs,
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
print_json(result.model_dump())
|
|
142
|
+
|
|
143
|
+
# If wait is enabled and we have a run_id, poll for completion
|
|
144
|
+
if wait and result.run_id:
|
|
145
|
+
print(f"\nWaiting for workflow to complete (timeout: {wait_seconds}s)...")
|
|
146
|
+
start_time = time.time()
|
|
147
|
+
poll_interval = 10 # seconds
|
|
148
|
+
|
|
149
|
+
while time.time() - start_time < wait_seconds:
|
|
150
|
+
status_result = check_ci_workflow_status(
|
|
151
|
+
owner=owner,
|
|
152
|
+
repo=repo,
|
|
153
|
+
run_id=result.run_id,
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
if status_result.status == "completed":
|
|
157
|
+
print(
|
|
158
|
+
f"\nWorkflow completed with conclusion: {status_result.conclusion}"
|
|
159
|
+
)
|
|
160
|
+
print_json(status_result.model_dump())
|
|
161
|
+
return
|
|
162
|
+
|
|
163
|
+
elapsed = int(time.time() - start_time)
|
|
164
|
+
print(f" Status: {status_result.status} (elapsed: {elapsed}s)")
|
|
165
|
+
time.sleep(poll_interval)
|
|
166
|
+
|
|
167
|
+
print(f"\nTimeout reached after {wait_seconds}s. Workflow still running.")
|
|
168
|
+
# Print final status
|
|
169
|
+
final_status = check_ci_workflow_status(
|
|
170
|
+
owner=owner,
|
|
171
|
+
repo=repo,
|
|
172
|
+
run_id=result.run_id,
|
|
173
|
+
)
|
|
174
|
+
print_json(final_status.model_dump())
|
airbyte_ops_mcp/cli/registry.py
CHANGED
|
@@ -28,11 +28,11 @@ from airbyte_ops_mcp.cli._shared import (
|
|
|
28
28
|
print_json,
|
|
29
29
|
print_success,
|
|
30
30
|
)
|
|
31
|
-
from airbyte_ops_mcp.
|
|
31
|
+
from airbyte_ops_mcp.github_api import (
|
|
32
32
|
get_file_contents_at_ref,
|
|
33
33
|
resolve_github_token,
|
|
34
34
|
)
|
|
35
|
-
from airbyte_ops_mcp.mcp.
|
|
35
|
+
from airbyte_ops_mcp.mcp.github_actions import get_docker_image_info
|
|
36
36
|
from airbyte_ops_mcp.mcp.prerelease import (
|
|
37
37
|
compute_prerelease_docker_image_tag,
|
|
38
38
|
publish_connector_to_airbyte_registry,
|
|
@@ -1,72 +1,23 @@
|
|
|
1
1
|
# Copyright (c) 2025 Airbyte, Inc., all rights reserved.
|
|
2
2
|
"""GitHub Actions API utilities.
|
|
3
3
|
|
|
4
|
-
This module provides
|
|
5
|
-
including workflow dispatch, run discovery, and
|
|
4
|
+
This module provides utilities for interacting with GitHub Actions workflows,
|
|
5
|
+
including workflow dispatch, run discovery, and job status. These utilities
|
|
6
6
|
are used by MCP tools but are not MCP-specific.
|
|
7
|
+
|
|
8
|
+
For general GitHub API utilities (authentication, PR info, file contents),
|
|
9
|
+
see the github_api module.
|
|
7
10
|
"""
|
|
8
11
|
|
|
9
12
|
from __future__ import annotations
|
|
10
13
|
|
|
11
|
-
import os
|
|
12
|
-
import shutil
|
|
13
|
-
import subprocess
|
|
14
14
|
import time
|
|
15
15
|
from dataclasses import dataclass
|
|
16
16
|
from datetime import datetime, timedelta
|
|
17
17
|
|
|
18
18
|
import requests
|
|
19
19
|
|
|
20
|
-
GITHUB_API_BASE
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
def resolve_github_token(preferred_env_vars: list[str] | None = None) -> str:
|
|
24
|
-
"""Resolve GitHub token from environment variables or gh CLI.
|
|
25
|
-
|
|
26
|
-
Checks environment variables in order of preference, returning the first
|
|
27
|
-
non-empty value found. If no environment variables are set, attempts to
|
|
28
|
-
get a token from the gh CLI tool using 'gh auth token'.
|
|
29
|
-
|
|
30
|
-
Args:
|
|
31
|
-
preferred_env_vars: List of environment variable names to check in order.
|
|
32
|
-
Defaults to ["GITHUB_CI_WORKFLOW_TRIGGER_PAT", "GITHUB_TOKEN"].
|
|
33
|
-
|
|
34
|
-
Returns:
|
|
35
|
-
GitHub token string.
|
|
36
|
-
|
|
37
|
-
Raises:
|
|
38
|
-
ValueError: If no GitHub token is found in env vars or gh CLI.
|
|
39
|
-
"""
|
|
40
|
-
if preferred_env_vars is None:
|
|
41
|
-
preferred_env_vars = ["GITHUB_CI_WORKFLOW_TRIGGER_PAT", "GITHUB_TOKEN"]
|
|
42
|
-
|
|
43
|
-
# Check environment variables first
|
|
44
|
-
for env_var in preferred_env_vars:
|
|
45
|
-
token = os.getenv(env_var)
|
|
46
|
-
if token:
|
|
47
|
-
return token
|
|
48
|
-
|
|
49
|
-
# Fall back to gh CLI if available
|
|
50
|
-
gh_path = shutil.which("gh")
|
|
51
|
-
if gh_path:
|
|
52
|
-
try:
|
|
53
|
-
result = subprocess.run(
|
|
54
|
-
[gh_path, "auth", "token"],
|
|
55
|
-
capture_output=True,
|
|
56
|
-
text=True,
|
|
57
|
-
timeout=5,
|
|
58
|
-
check=False,
|
|
59
|
-
)
|
|
60
|
-
if result.returncode == 0 and result.stdout.strip():
|
|
61
|
-
return result.stdout.strip()
|
|
62
|
-
except (subprocess.TimeoutExpired, subprocess.SubprocessError):
|
|
63
|
-
pass
|
|
64
|
-
|
|
65
|
-
env_var_list = ", ".join(preferred_env_vars)
|
|
66
|
-
raise ValueError(
|
|
67
|
-
f"No GitHub token found. Set one of: {env_var_list} environment variable, "
|
|
68
|
-
"or authenticate with 'gh auth login'."
|
|
69
|
-
)
|
|
20
|
+
from airbyte_ops_mcp.github_api import GITHUB_API_BASE, resolve_github_token
|
|
70
21
|
|
|
71
22
|
|
|
72
23
|
@dataclass
|
|
@@ -106,51 +57,6 @@ class WorkflowJobInfo:
|
|
|
106
57
|
"""ISO 8601 timestamp when the job completed"""
|
|
107
58
|
|
|
108
59
|
|
|
109
|
-
def get_file_contents_at_ref(
|
|
110
|
-
owner: str,
|
|
111
|
-
repo: str,
|
|
112
|
-
path: str,
|
|
113
|
-
ref: str,
|
|
114
|
-
token: str | None = None,
|
|
115
|
-
) -> str | None:
|
|
116
|
-
"""Fetch file contents from GitHub at a specific ref.
|
|
117
|
-
|
|
118
|
-
Uses the GitHub Contents API to retrieve file contents at a specific
|
|
119
|
-
commit SHA, branch, or tag. This allows reading files without having
|
|
120
|
-
the repository checked out locally.
|
|
121
|
-
|
|
122
|
-
Args:
|
|
123
|
-
owner: Repository owner (e.g., "airbytehq")
|
|
124
|
-
repo: Repository name (e.g., "airbyte")
|
|
125
|
-
path: Path to the file within the repository
|
|
126
|
-
ref: Git ref (commit SHA, branch name, or tag)
|
|
127
|
-
token: GitHub API token (optional for public repos, but recommended
|
|
128
|
-
to avoid rate limiting)
|
|
129
|
-
|
|
130
|
-
Returns:
|
|
131
|
-
File contents as a string, or None if the file doesn't exist.
|
|
132
|
-
|
|
133
|
-
Raises:
|
|
134
|
-
requests.HTTPError: If API request fails (except 404).
|
|
135
|
-
"""
|
|
136
|
-
url = f"{GITHUB_API_BASE}/repos/{owner}/{repo}/contents/{path}"
|
|
137
|
-
headers = {
|
|
138
|
-
"Accept": "application/vnd.github.raw+json",
|
|
139
|
-
"X-GitHub-Api-Version": "2022-11-28",
|
|
140
|
-
}
|
|
141
|
-
if token:
|
|
142
|
-
headers["Authorization"] = f"Bearer {token}"
|
|
143
|
-
|
|
144
|
-
params = {"ref": ref}
|
|
145
|
-
|
|
146
|
-
response = requests.get(url, headers=headers, params=params, timeout=30)
|
|
147
|
-
if response.status_code == 404:
|
|
148
|
-
return None
|
|
149
|
-
response.raise_for_status()
|
|
150
|
-
|
|
151
|
-
return response.text
|
|
152
|
-
|
|
153
|
-
|
|
154
60
|
def get_workflow_jobs(
|
|
155
61
|
owner: str,
|
|
156
62
|
repo: str,
|
airbyte_ops_mcp/github_api.py
CHANGED
|
@@ -1,20 +1,178 @@
|
|
|
1
1
|
# Copyright (c) 2025 Airbyte, Inc., all rights reserved.
|
|
2
|
-
"""GitHub API utilities
|
|
2
|
+
"""GitHub API utilities.
|
|
3
3
|
|
|
4
|
-
This module provides utilities for interacting with GitHub's REST API
|
|
5
|
-
|
|
6
|
-
used by MCP tools
|
|
4
|
+
This module provides core utilities for interacting with GitHub's REST API,
|
|
5
|
+
including authentication, user/comment operations, PR information retrieval,
|
|
6
|
+
and file content fetching. These utilities are used by MCP tools and other
|
|
7
|
+
modules but are not MCP-specific.
|
|
7
8
|
"""
|
|
8
9
|
|
|
9
10
|
from __future__ import annotations
|
|
10
11
|
|
|
12
|
+
import os
|
|
11
13
|
import re
|
|
14
|
+
import shutil
|
|
15
|
+
import subprocess
|
|
12
16
|
from dataclasses import dataclass
|
|
13
17
|
from urllib.parse import urlparse
|
|
14
18
|
|
|
15
19
|
import requests
|
|
16
20
|
|
|
17
|
-
|
|
21
|
+
GITHUB_API_BASE = "https://api.github.com"
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def resolve_github_token(preferred_env_vars: list[str] | None = None) -> str:
|
|
25
|
+
"""Resolve GitHub token from environment variables or gh CLI.
|
|
26
|
+
|
|
27
|
+
Checks environment variables in order of preference, returning the first
|
|
28
|
+
non-empty value found. If no environment variables are set, attempts to
|
|
29
|
+
get a token from the gh CLI tool using 'gh auth token'.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
preferred_env_vars: List of environment variable names to check in order.
|
|
33
|
+
Defaults to ["GITHUB_CI_WORKFLOW_TRIGGER_PAT", "GITHUB_TOKEN"].
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
GitHub token string.
|
|
37
|
+
|
|
38
|
+
Raises:
|
|
39
|
+
ValueError: If no GitHub token is found in env vars or gh CLI.
|
|
40
|
+
"""
|
|
41
|
+
if preferred_env_vars is None:
|
|
42
|
+
preferred_env_vars = ["GITHUB_CI_WORKFLOW_TRIGGER_PAT", "GITHUB_TOKEN"]
|
|
43
|
+
|
|
44
|
+
# Check environment variables first
|
|
45
|
+
for env_var in preferred_env_vars:
|
|
46
|
+
token = os.getenv(env_var)
|
|
47
|
+
if token:
|
|
48
|
+
return token
|
|
49
|
+
|
|
50
|
+
# Fall back to gh CLI if available
|
|
51
|
+
gh_path = shutil.which("gh")
|
|
52
|
+
if gh_path:
|
|
53
|
+
try:
|
|
54
|
+
result = subprocess.run(
|
|
55
|
+
[gh_path, "auth", "token"],
|
|
56
|
+
capture_output=True,
|
|
57
|
+
text=True,
|
|
58
|
+
timeout=5,
|
|
59
|
+
check=False,
|
|
60
|
+
)
|
|
61
|
+
if result.returncode == 0 and result.stdout.strip():
|
|
62
|
+
return result.stdout.strip()
|
|
63
|
+
except (subprocess.TimeoutExpired, subprocess.SubprocessError):
|
|
64
|
+
pass
|
|
65
|
+
|
|
66
|
+
env_var_list = ", ".join(preferred_env_vars)
|
|
67
|
+
raise ValueError(
|
|
68
|
+
f"No GitHub token found. Set one of: {env_var_list} environment variable, "
|
|
69
|
+
"or authenticate with 'gh auth login'."
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
@dataclass
|
|
74
|
+
class PRHeadInfo:
|
|
75
|
+
"""Information about a PR's head commit."""
|
|
76
|
+
|
|
77
|
+
ref: str
|
|
78
|
+
"""Branch name of the PR's head"""
|
|
79
|
+
|
|
80
|
+
sha: str
|
|
81
|
+
"""Full commit SHA of the PR's head"""
|
|
82
|
+
|
|
83
|
+
short_sha: str
|
|
84
|
+
"""First 7 characters of the commit SHA"""
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def get_pr_head_ref(
|
|
88
|
+
owner: str,
|
|
89
|
+
repo: str,
|
|
90
|
+
pr_number: int,
|
|
91
|
+
token: str,
|
|
92
|
+
) -> PRHeadInfo:
|
|
93
|
+
"""Get the head ref (branch name) and SHA for a PR.
|
|
94
|
+
|
|
95
|
+
This is useful for resolving a PR number to the actual branch name,
|
|
96
|
+
which is required for workflow_dispatch API calls (which don't accept
|
|
97
|
+
refs/pull/{pr}/head format).
|
|
98
|
+
|
|
99
|
+
Args:
|
|
100
|
+
owner: Repository owner (e.g., "airbytehq")
|
|
101
|
+
repo: Repository name (e.g., "airbyte")
|
|
102
|
+
pr_number: Pull request number
|
|
103
|
+
token: GitHub API token
|
|
104
|
+
|
|
105
|
+
Returns:
|
|
106
|
+
PRHeadInfo with ref (branch name), sha, and short_sha.
|
|
107
|
+
|
|
108
|
+
Raises:
|
|
109
|
+
ValueError: If PR not found.
|
|
110
|
+
requests.HTTPError: If API request fails.
|
|
111
|
+
"""
|
|
112
|
+
url = f"{GITHUB_API_BASE}/repos/{owner}/{repo}/pulls/{pr_number}"
|
|
113
|
+
headers = {
|
|
114
|
+
"Authorization": f"Bearer {token}",
|
|
115
|
+
"Accept": "application/vnd.github+json",
|
|
116
|
+
"X-GitHub-Api-Version": "2022-11-28",
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
response = requests.get(url, headers=headers, timeout=30)
|
|
120
|
+
if response.status_code == 404:
|
|
121
|
+
raise ValueError(f"PR {owner}/{repo}#{pr_number} not found")
|
|
122
|
+
response.raise_for_status()
|
|
123
|
+
|
|
124
|
+
pr_data = response.json()
|
|
125
|
+
sha = pr_data["head"]["sha"]
|
|
126
|
+
return PRHeadInfo(
|
|
127
|
+
ref=pr_data["head"]["ref"],
|
|
128
|
+
sha=sha,
|
|
129
|
+
short_sha=sha[:7],
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def get_file_contents_at_ref(
|
|
134
|
+
owner: str,
|
|
135
|
+
repo: str,
|
|
136
|
+
path: str,
|
|
137
|
+
ref: str,
|
|
138
|
+
token: str | None = None,
|
|
139
|
+
) -> str | None:
|
|
140
|
+
"""Fetch file contents from GitHub at a specific ref.
|
|
141
|
+
|
|
142
|
+
Uses the GitHub Contents API to retrieve file contents at a specific
|
|
143
|
+
commit SHA, branch, or tag. This allows reading files without having
|
|
144
|
+
the repository checked out locally.
|
|
145
|
+
|
|
146
|
+
Args:
|
|
147
|
+
owner: Repository owner (e.g., "airbytehq")
|
|
148
|
+
repo: Repository name (e.g., "airbyte")
|
|
149
|
+
path: Path to the file within the repository
|
|
150
|
+
ref: Git ref (commit SHA, branch name, or tag)
|
|
151
|
+
token: GitHub API token (optional for public repos, but recommended
|
|
152
|
+
to avoid rate limiting)
|
|
153
|
+
|
|
154
|
+
Returns:
|
|
155
|
+
File contents as a string, or None if the file doesn't exist.
|
|
156
|
+
|
|
157
|
+
Raises:
|
|
158
|
+
requests.HTTPError: If API request fails (except 404).
|
|
159
|
+
"""
|
|
160
|
+
url = f"{GITHUB_API_BASE}/repos/{owner}/{repo}/contents/{path}"
|
|
161
|
+
headers = {
|
|
162
|
+
"Accept": "application/vnd.github.raw+json",
|
|
163
|
+
"X-GitHub-Api-Version": "2022-11-28",
|
|
164
|
+
}
|
|
165
|
+
if token:
|
|
166
|
+
headers["Authorization"] = f"Bearer {token}"
|
|
167
|
+
|
|
168
|
+
params = {"ref": ref}
|
|
169
|
+
|
|
170
|
+
response = requests.get(url, headers=headers, params=params, timeout=30)
|
|
171
|
+
if response.status_code == 404:
|
|
172
|
+
return None
|
|
173
|
+
response.raise_for_status()
|
|
174
|
+
|
|
175
|
+
return response.text
|
|
18
176
|
|
|
19
177
|
|
|
20
178
|
class GitHubCommentParseError(Exception):
|
|
@@ -15,12 +15,22 @@ from fastmcp import FastMCP
|
|
|
15
15
|
from pydantic import BaseModel, Field
|
|
16
16
|
|
|
17
17
|
from airbyte_ops_mcp.github_actions import (
|
|
18
|
-
GITHUB_API_BASE,
|
|
19
18
|
get_workflow_jobs,
|
|
19
|
+
trigger_workflow_dispatch,
|
|
20
|
+
)
|
|
21
|
+
from airbyte_ops_mcp.github_api import (
|
|
22
|
+
GITHUB_API_BASE,
|
|
23
|
+
get_pr_head_ref,
|
|
20
24
|
resolve_github_token,
|
|
21
25
|
)
|
|
22
26
|
from airbyte_ops_mcp.mcp._mcp_utils import mcp_tool, register_mcp_tools
|
|
23
27
|
|
|
28
|
+
# Token env vars for workflow triggering (in order of preference)
|
|
29
|
+
WORKFLOW_TRIGGER_TOKEN_ENV_VARS = [
|
|
30
|
+
"GITHUB_CI_WORKFLOW_TRIGGER_PAT",
|
|
31
|
+
"GITHUB_TOKEN",
|
|
32
|
+
]
|
|
33
|
+
|
|
24
34
|
DOCKERHUB_API_BASE = "https://hub.docker.com/v2"
|
|
25
35
|
|
|
26
36
|
|
|
@@ -36,7 +46,7 @@ class JobInfo(BaseModel):
|
|
|
36
46
|
|
|
37
47
|
|
|
38
48
|
class WorkflowRunStatus(BaseModel):
|
|
39
|
-
"""Response model for
|
|
49
|
+
"""Response model for check_ci_workflow_status MCP tool."""
|
|
40
50
|
|
|
41
51
|
run_id: int
|
|
42
52
|
status: str
|
|
@@ -116,7 +126,7 @@ def _get_workflow_run(
|
|
|
116
126
|
idempotent=True,
|
|
117
127
|
open_world=True,
|
|
118
128
|
)
|
|
119
|
-
def
|
|
129
|
+
def check_ci_workflow_status(
|
|
120
130
|
workflow_url: Annotated[
|
|
121
131
|
str | None,
|
|
122
132
|
Field(
|
|
@@ -196,6 +206,108 @@ def check_workflow_status(
|
|
|
196
206
|
)
|
|
197
207
|
|
|
198
208
|
|
|
209
|
+
class TriggerCIWorkflowResult(BaseModel):
|
|
210
|
+
"""Response model for trigger_ci_workflow MCP tool."""
|
|
211
|
+
|
|
212
|
+
success: bool
|
|
213
|
+
message: str
|
|
214
|
+
workflow_url: str
|
|
215
|
+
run_id: int | None = None
|
|
216
|
+
run_url: str | None = None
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
@mcp_tool(
|
|
220
|
+
read_only=False,
|
|
221
|
+
idempotent=False,
|
|
222
|
+
open_world=True,
|
|
223
|
+
)
|
|
224
|
+
def trigger_ci_workflow(
|
|
225
|
+
owner: Annotated[
|
|
226
|
+
str,
|
|
227
|
+
Field(description="Repository owner (e.g., 'airbytehq')"),
|
|
228
|
+
],
|
|
229
|
+
repo: Annotated[
|
|
230
|
+
str,
|
|
231
|
+
Field(description="Repository name (e.g., 'airbyte')"),
|
|
232
|
+
],
|
|
233
|
+
workflow_file: Annotated[
|
|
234
|
+
str,
|
|
235
|
+
Field(description="Workflow file name (e.g., 'connector-regression-test.yml')"),
|
|
236
|
+
],
|
|
237
|
+
workflow_definition_ref: Annotated[
|
|
238
|
+
str | None,
|
|
239
|
+
Field(
|
|
240
|
+
description="Branch name or PR number for the workflow definition to use. "
|
|
241
|
+
"If a PR number (integer string) is provided, it resolves to the PR's head branch name. "
|
|
242
|
+
"If a branch name is provided, it is used directly. "
|
|
243
|
+
"Defaults to the repository's default branch if not specified."
|
|
244
|
+
),
|
|
245
|
+
] = None,
|
|
246
|
+
inputs: Annotated[
|
|
247
|
+
dict[str, str] | None,
|
|
248
|
+
Field(
|
|
249
|
+
description="Workflow inputs as a dictionary of string key-value pairs. "
|
|
250
|
+
"These are passed to the workflow_dispatch event."
|
|
251
|
+
),
|
|
252
|
+
] = None,
|
|
253
|
+
) -> TriggerCIWorkflowResult:
|
|
254
|
+
"""Trigger a GitHub Actions CI workflow via workflow_dispatch.
|
|
255
|
+
|
|
256
|
+
This tool triggers a workflow in any GitHub repository that has workflow_dispatch
|
|
257
|
+
enabled. It resolves PR numbers to branch names automatically since GitHub's
|
|
258
|
+
workflow_dispatch API only accepts branch names, not refs/pull/{pr}/head format.
|
|
259
|
+
|
|
260
|
+
Requires GITHUB_CI_WORKFLOW_TRIGGER_PAT or GITHUB_TOKEN environment variable
|
|
261
|
+
with 'actions:write' permission.
|
|
262
|
+
"""
|
|
263
|
+
# Guard: Check for required token
|
|
264
|
+
token = resolve_github_token(WORKFLOW_TRIGGER_TOKEN_ENV_VARS)
|
|
265
|
+
|
|
266
|
+
# Resolve workflow definition ref
|
|
267
|
+
# If a PR number is provided (integer string), resolve to the PR's head branch name
|
|
268
|
+
# Otherwise use the provided branch name or default to repo's default branch
|
|
269
|
+
if workflow_definition_ref:
|
|
270
|
+
if workflow_definition_ref.isdigit():
|
|
271
|
+
# Resolve PR number to branch name via GitHub API
|
|
272
|
+
pr_head_info = get_pr_head_ref(
|
|
273
|
+
owner,
|
|
274
|
+
repo,
|
|
275
|
+
int(workflow_definition_ref),
|
|
276
|
+
token,
|
|
277
|
+
)
|
|
278
|
+
resolved_ref = pr_head_info.ref
|
|
279
|
+
else:
|
|
280
|
+
resolved_ref = workflow_definition_ref
|
|
281
|
+
else:
|
|
282
|
+
# Default to main (most common default branch)
|
|
283
|
+
resolved_ref = "main"
|
|
284
|
+
|
|
285
|
+
# Trigger the workflow
|
|
286
|
+
result = trigger_workflow_dispatch(
|
|
287
|
+
owner=owner,
|
|
288
|
+
repo=repo,
|
|
289
|
+
workflow_file=workflow_file,
|
|
290
|
+
ref=resolved_ref,
|
|
291
|
+
inputs=inputs or {},
|
|
292
|
+
token=token,
|
|
293
|
+
find_run=True,
|
|
294
|
+
)
|
|
295
|
+
|
|
296
|
+
# Build response message
|
|
297
|
+
if result.run_id:
|
|
298
|
+
message = f"Successfully triggered workflow {workflow_file} on {owner}/{repo} (ref: {resolved_ref}). Run ID: {result.run_id}"
|
|
299
|
+
else:
|
|
300
|
+
message = f"Successfully triggered workflow {workflow_file} on {owner}/{repo} (ref: {resolved_ref}). Run ID not yet available."
|
|
301
|
+
|
|
302
|
+
return TriggerCIWorkflowResult(
|
|
303
|
+
success=True,
|
|
304
|
+
message=message,
|
|
305
|
+
workflow_url=result.workflow_url,
|
|
306
|
+
run_id=result.run_id,
|
|
307
|
+
run_url=result.run_url,
|
|
308
|
+
)
|
|
309
|
+
|
|
310
|
+
|
|
199
311
|
class DockerImageInfo(BaseModel):
|
|
200
312
|
"""Response model for get_docker_image_info MCP tool."""
|
|
201
313
|
|
|
@@ -282,8 +394,8 @@ def get_docker_image_info(
|
|
|
282
394
|
)
|
|
283
395
|
|
|
284
396
|
|
|
285
|
-
def
|
|
286
|
-
"""Register GitHub tools with the FastMCP app.
|
|
397
|
+
def register_github_actions_tools(app: FastMCP) -> None:
|
|
398
|
+
"""Register GitHub Actions tools with the FastMCP app.
|
|
287
399
|
|
|
288
400
|
Args:
|
|
289
401
|
app: FastMCP application instance
|
|
@@ -18,7 +18,11 @@ import yaml
|
|
|
18
18
|
from fastmcp import FastMCP
|
|
19
19
|
from pydantic import BaseModel, Field
|
|
20
20
|
|
|
21
|
-
from airbyte_ops_mcp.
|
|
21
|
+
from airbyte_ops_mcp.github_api import (
|
|
22
|
+
GITHUB_API_BASE,
|
|
23
|
+
get_pr_head_ref,
|
|
24
|
+
resolve_github_token,
|
|
25
|
+
)
|
|
22
26
|
from airbyte_ops_mcp.mcp._mcp_utils import mcp_tool, register_mcp_tools
|
|
23
27
|
|
|
24
28
|
|
|
@@ -87,14 +91,6 @@ def compute_prerelease_docker_image_tag(base_version: str, sha: str) -> str:
|
|
|
87
91
|
return f"{base_version}-{PRERELEASE_TAG_PREFIX}.{short_sha}"
|
|
88
92
|
|
|
89
93
|
|
|
90
|
-
class PRHeadInfo(BaseModel):
|
|
91
|
-
"""Information about a PR's head commit."""
|
|
92
|
-
|
|
93
|
-
ref: str
|
|
94
|
-
sha: str
|
|
95
|
-
short_sha: str
|
|
96
|
-
|
|
97
|
-
|
|
98
94
|
class PrereleaseWorkflowResult(BaseModel):
|
|
99
95
|
"""Response model for publish_connector_to_airbyte_registry MCP tool."""
|
|
100
96
|
|
|
@@ -107,47 +103,6 @@ class PrereleaseWorkflowResult(BaseModel):
|
|
|
107
103
|
docker_image_tag: str | None = None
|
|
108
104
|
|
|
109
105
|
|
|
110
|
-
def _get_pr_head_info(
|
|
111
|
-
owner: str,
|
|
112
|
-
repo: str,
|
|
113
|
-
pr_number: int,
|
|
114
|
-
token: str,
|
|
115
|
-
) -> PRHeadInfo:
|
|
116
|
-
"""Get the head ref and SHA for a PR.
|
|
117
|
-
|
|
118
|
-
Args:
|
|
119
|
-
owner: Repository owner (e.g., "airbytehq")
|
|
120
|
-
repo: Repository name (e.g., "airbyte")
|
|
121
|
-
pr_number: Pull request number
|
|
122
|
-
token: GitHub API token
|
|
123
|
-
|
|
124
|
-
Returns:
|
|
125
|
-
PRHeadInfo with ref, sha, and short_sha.
|
|
126
|
-
|
|
127
|
-
Raises:
|
|
128
|
-
ValueError: If PR not found or API error.
|
|
129
|
-
"""
|
|
130
|
-
url = f"{GITHUB_API_BASE}/repos/{owner}/{repo}/pulls/{pr_number}"
|
|
131
|
-
headers = {
|
|
132
|
-
"Authorization": f"Bearer {token}",
|
|
133
|
-
"Accept": "application/vnd.github+json",
|
|
134
|
-
"X-GitHub-Api-Version": "2022-11-28",
|
|
135
|
-
}
|
|
136
|
-
|
|
137
|
-
response = requests.get(url, headers=headers, timeout=30)
|
|
138
|
-
if response.status_code == 404:
|
|
139
|
-
raise ValueError(f"PR {owner}/{repo}#{pr_number} not found")
|
|
140
|
-
response.raise_for_status()
|
|
141
|
-
|
|
142
|
-
pr_data = response.json()
|
|
143
|
-
sha = pr_data["head"]["sha"]
|
|
144
|
-
return PRHeadInfo(
|
|
145
|
-
ref=pr_data["head"]["ref"],
|
|
146
|
-
sha=sha,
|
|
147
|
-
short_sha=sha[:7],
|
|
148
|
-
)
|
|
149
|
-
|
|
150
|
-
|
|
151
106
|
def _get_connector_metadata(
|
|
152
107
|
owner: str,
|
|
153
108
|
repo: str,
|
|
@@ -305,9 +260,7 @@ def publish_connector_to_airbyte_registry(
|
|
|
305
260
|
|
|
306
261
|
# Get the PR's head SHA for computing the docker image tag
|
|
307
262
|
# Note: We no longer pass gitref to the workflow - it derives the ref from PR number
|
|
308
|
-
head_info =
|
|
309
|
-
DEFAULT_REPO_OWNER, target_repo_name, pr_number, token
|
|
310
|
-
)
|
|
263
|
+
head_info = get_pr_head_ref(DEFAULT_REPO_OWNER, target_repo_name, pr_number, token)
|
|
311
264
|
|
|
312
265
|
# Prepare workflow inputs
|
|
313
266
|
# The workflow uses refs/pull/{pr}/head directly - no gitref needed
|
|
@@ -31,11 +31,8 @@ from airbyte.exceptions import (
|
|
|
31
31
|
from fastmcp import FastMCP
|
|
32
32
|
from pydantic import BaseModel, Field
|
|
33
33
|
|
|
34
|
-
from airbyte_ops_mcp.github_actions import
|
|
35
|
-
|
|
36
|
-
resolve_github_token,
|
|
37
|
-
trigger_workflow_dispatch,
|
|
38
|
-
)
|
|
34
|
+
from airbyte_ops_mcp.github_actions import trigger_workflow_dispatch
|
|
35
|
+
from airbyte_ops_mcp.github_api import GITHUB_API_BASE, resolve_github_token
|
|
39
36
|
from airbyte_ops_mcp.mcp._mcp_utils import mcp_tool, register_mcp_tools
|
|
40
37
|
from airbyte_ops_mcp.mcp.prerelease import ConnectorRepo
|
|
41
38
|
|
|
@@ -285,7 +282,7 @@ class RunRegressionTestsResponse(BaseModel):
|
|
|
285
282
|
)
|
|
286
283
|
github_run_id: int | None = Field(
|
|
287
284
|
default=None,
|
|
288
|
-
description="GitHub Actions workflow run ID (use with
|
|
285
|
+
description="GitHub Actions workflow run ID (use with check_ci_workflow_status)",
|
|
289
286
|
)
|
|
290
287
|
github_run_url: str | None = Field(
|
|
291
288
|
default=None,
|
|
@@ -421,6 +418,7 @@ def run_regression_tests(
|
|
|
421
418
|
workflow_inputs["override_control_image"] = override_control_image
|
|
422
419
|
|
|
423
420
|
mode_description = "single-version" if skip_compare else "comparison"
|
|
421
|
+
|
|
424
422
|
try:
|
|
425
423
|
dispatch_result = trigger_workflow_dispatch(
|
|
426
424
|
owner=REGRESSION_TEST_REPO_OWNER,
|
airbyte_ops_mcp/mcp/server.py
CHANGED
|
@@ -25,7 +25,7 @@ from airbyte_ops_mcp.mcp.cloud_connector_versions import (
|
|
|
25
25
|
register_cloud_connector_version_tools,
|
|
26
26
|
)
|
|
27
27
|
from airbyte_ops_mcp.mcp.gcp_logs import register_gcp_logs_tools
|
|
28
|
-
from airbyte_ops_mcp.mcp.
|
|
28
|
+
from airbyte_ops_mcp.mcp.github_actions import register_github_actions_tools
|
|
29
29
|
from airbyte_ops_mcp.mcp.github_repo_ops import register_github_repo_ops_tools
|
|
30
30
|
from airbyte_ops_mcp.mcp.prerelease import register_prerelease_tools
|
|
31
31
|
from airbyte_ops_mcp.mcp.prod_db_queries import register_prod_db_query_tools
|
|
@@ -59,7 +59,7 @@ def register_server_assets(app: FastMCP) -> None:
|
|
|
59
59
|
"""
|
|
60
60
|
register_server_info_resources(app)
|
|
61
61
|
register_github_repo_ops_tools(app)
|
|
62
|
-
|
|
62
|
+
register_github_actions_tools(app)
|
|
63
63
|
register_prerelease_tools(app)
|
|
64
64
|
register_cloud_connector_version_tools(app)
|
|
65
65
|
register_prod_db_query_tools(app)
|
|
File without changes
|
{airbyte_internal_ops-0.4.0.dist-info → airbyte_internal_ops-0.4.1.dist-info}/entry_points.txt
RENAMED
|
File without changes
|