airbyte-internal-ops 0.1.10__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {airbyte_internal_ops-0.1.10.dist-info → airbyte_internal_ops-0.2.0.dist-info}/METADATA +1 -1
- {airbyte_internal_ops-0.1.10.dist-info → airbyte_internal_ops-0.2.0.dist-info}/RECORD +21 -18
- {airbyte_internal_ops-0.1.10.dist-info → airbyte_internal_ops-0.2.0.dist-info}/entry_points.txt +1 -0
- airbyte_ops_mcp/cli/cloud.py +151 -3
- airbyte_ops_mcp/cloud_admin/auth.py +32 -0
- airbyte_ops_mcp/constants.py +18 -0
- airbyte_ops_mcp/github_actions.py +218 -0
- airbyte_ops_mcp/live_tests/cdk_secrets.py +90 -0
- airbyte_ops_mcp/live_tests/ci_output.py +55 -5
- airbyte_ops_mcp/live_tests/connector_runner.py +3 -0
- airbyte_ops_mcp/mcp/_http_headers.py +198 -0
- airbyte_ops_mcp/mcp/cloud_connector_versions.py +118 -22
- airbyte_ops_mcp/mcp/github.py +2 -21
- airbyte_ops_mcp/mcp/live_tests.py +46 -84
- airbyte_ops_mcp/mcp/prerelease.py +9 -31
- airbyte_ops_mcp/mcp/prod_db_queries.py +67 -24
- airbyte_ops_mcp/mcp/server.py +81 -8
- airbyte_ops_mcp/prod_db_access/db_engine.py +8 -0
- airbyte_ops_mcp/prod_db_access/queries.py +27 -15
- airbyte_ops_mcp/prod_db_access/sql.py +17 -16
- {airbyte_internal_ops-0.1.10.dist-info → airbyte_internal_ops-0.2.0.dist-info}/WHEEL +0 -0
|
@@ -9,7 +9,6 @@ in GitHub Actions and results can be polled via workflow status.
|
|
|
9
9
|
from __future__ import annotations
|
|
10
10
|
|
|
11
11
|
import logging
|
|
12
|
-
import os
|
|
13
12
|
import uuid
|
|
14
13
|
from datetime import datetime
|
|
15
14
|
from enum import Enum
|
|
@@ -21,6 +20,11 @@ from airbyte.cloud.auth import resolve_cloud_client_id, resolve_cloud_client_sec
|
|
|
21
20
|
from fastmcp import FastMCP
|
|
22
21
|
from pydantic import BaseModel, Field
|
|
23
22
|
|
|
23
|
+
from airbyte_ops_mcp.github_actions import (
|
|
24
|
+
GITHUB_API_BASE,
|
|
25
|
+
resolve_github_token,
|
|
26
|
+
trigger_workflow_dispatch,
|
|
27
|
+
)
|
|
24
28
|
from airbyte_ops_mcp.mcp._mcp_utils import mcp_tool, register_mcp_tools
|
|
25
29
|
|
|
26
30
|
logger = logging.getLogger(__name__)
|
|
@@ -29,7 +33,6 @@ logger = logging.getLogger(__name__)
|
|
|
29
33
|
# GitHub Workflow Configuration
|
|
30
34
|
# =============================================================================
|
|
31
35
|
|
|
32
|
-
GITHUB_API_BASE = "https://api.github.com"
|
|
33
36
|
LIVE_TEST_REPO_OWNER = "airbytehq"
|
|
34
37
|
LIVE_TEST_REPO_NAME = "airbyte-ops-mcp"
|
|
35
38
|
LIVE_TEST_DEFAULT_BRANCH = "main"
|
|
@@ -37,76 +40,6 @@ LIVE_TEST_WORKFLOW_FILE = "connector-live-test.yml"
|
|
|
37
40
|
REGRESSION_TEST_WORKFLOW_FILE = "connector-regression-test.yml"
|
|
38
41
|
|
|
39
42
|
|
|
40
|
-
# =============================================================================
|
|
41
|
-
# GitHub API Helper Functions
|
|
42
|
-
# =============================================================================
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
def _get_github_token() -> str:
|
|
46
|
-
"""Get GitHub token from environment.
|
|
47
|
-
|
|
48
|
-
Checks for tokens in order of specificity:
|
|
49
|
-
1. GITHUB_CI_WORKFLOW_TRIGGER_PAT (general workflow triggering)
|
|
50
|
-
2. GITHUB_TOKEN (fallback)
|
|
51
|
-
|
|
52
|
-
Returns:
|
|
53
|
-
GitHub token string.
|
|
54
|
-
|
|
55
|
-
Raises:
|
|
56
|
-
ValueError: If no GitHub token environment variable is set.
|
|
57
|
-
"""
|
|
58
|
-
token = os.getenv("GITHUB_CI_WORKFLOW_TRIGGER_PAT") or os.getenv("GITHUB_TOKEN")
|
|
59
|
-
if not token:
|
|
60
|
-
raise ValueError(
|
|
61
|
-
"No GitHub token found. Set GITHUB_CI_WORKFLOW_TRIGGER_PAT or GITHUB_TOKEN "
|
|
62
|
-
"environment variable with 'actions:write' permission."
|
|
63
|
-
)
|
|
64
|
-
return token
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
def _trigger_workflow_dispatch(
|
|
68
|
-
owner: str,
|
|
69
|
-
repo: str,
|
|
70
|
-
workflow_file: str,
|
|
71
|
-
ref: str,
|
|
72
|
-
inputs: dict[str, Any],
|
|
73
|
-
token: str,
|
|
74
|
-
) -> str:
|
|
75
|
-
"""Trigger a GitHub Actions workflow via workflow_dispatch.
|
|
76
|
-
|
|
77
|
-
Args:
|
|
78
|
-
owner: Repository owner (e.g., "airbytehq")
|
|
79
|
-
repo: Repository name (e.g., "airbyte-ops-mcp")
|
|
80
|
-
workflow_file: Workflow file name (e.g., "connector-live-test.yml")
|
|
81
|
-
ref: Git ref to run the workflow on (branch name)
|
|
82
|
-
inputs: Workflow inputs dictionary
|
|
83
|
-
token: GitHub API token
|
|
84
|
-
|
|
85
|
-
Returns:
|
|
86
|
-
URL to view workflow runs.
|
|
87
|
-
|
|
88
|
-
Raises:
|
|
89
|
-
requests.HTTPError: If API request fails.
|
|
90
|
-
"""
|
|
91
|
-
url = f"{GITHUB_API_BASE}/repos/{owner}/{repo}/actions/workflows/{workflow_file}/dispatches"
|
|
92
|
-
headers = {
|
|
93
|
-
"Authorization": f"Bearer {token}",
|
|
94
|
-
"Accept": "application/vnd.github+json",
|
|
95
|
-
"X-GitHub-Api-Version": "2022-11-28",
|
|
96
|
-
}
|
|
97
|
-
payload = {
|
|
98
|
-
"ref": ref,
|
|
99
|
-
"inputs": inputs,
|
|
100
|
-
}
|
|
101
|
-
|
|
102
|
-
response = requests.post(url, headers=headers, json=payload, timeout=30)
|
|
103
|
-
response.raise_for_status()
|
|
104
|
-
|
|
105
|
-
# workflow_dispatch returns 204 No Content on success
|
|
106
|
-
# Return URL to view workflow runs
|
|
107
|
-
return f"https://github.com/{owner}/{repo}/actions/workflows/{workflow_file}"
|
|
108
|
-
|
|
109
|
-
|
|
110
43
|
def _get_workflow_run_status(
|
|
111
44
|
owner: str,
|
|
112
45
|
repo: str,
|
|
@@ -293,12 +226,22 @@ class LiveConnectionTestResult(BaseModel):
|
|
|
293
226
|
class RunLiveConnectionTestsResponse(BaseModel):
|
|
294
227
|
"""Response from starting a live connection test via GitHub Actions workflow."""
|
|
295
228
|
|
|
296
|
-
run_id: str = Field(
|
|
229
|
+
run_id: str = Field(
|
|
230
|
+
description="Unique identifier for the test run (internal tracking ID)"
|
|
231
|
+
)
|
|
297
232
|
status: TestRunStatus = Field(description="Initial status of the test run")
|
|
298
233
|
message: str = Field(description="Human-readable status message")
|
|
299
234
|
workflow_url: str | None = Field(
|
|
300
235
|
default=None,
|
|
301
|
-
description="URL to view the GitHub Actions workflow
|
|
236
|
+
description="URL to view the GitHub Actions workflow file",
|
|
237
|
+
)
|
|
238
|
+
github_run_id: int | None = Field(
|
|
239
|
+
default=None,
|
|
240
|
+
description="GitHub Actions workflow run ID (use with check_workflow_status)",
|
|
241
|
+
)
|
|
242
|
+
github_run_url: str | None = Field(
|
|
243
|
+
default=None,
|
|
244
|
+
description="Direct URL to the GitHub Actions workflow run",
|
|
302
245
|
)
|
|
303
246
|
|
|
304
247
|
|
|
@@ -348,9 +291,16 @@ def run_live_connection_tests(
|
|
|
348
291
|
] = None,
|
|
349
292
|
connector_name: Annotated[
|
|
350
293
|
str | None,
|
|
351
|
-
"Connector name to build
|
|
352
|
-
"(e.g., 'source-pokeapi'). If provided, builds the
|
|
353
|
-
"
|
|
294
|
+
"Connector name to build the connector image from source "
|
|
295
|
+
"(e.g., 'source-pokeapi'). If provided, builds the image locally with tag 'dev'. "
|
|
296
|
+
"For live tests, this builds the test image. For regression tests, this builds "
|
|
297
|
+
"the target image while control is auto-detected from the connection.",
|
|
298
|
+
] = None,
|
|
299
|
+
pr: Annotated[
|
|
300
|
+
int | None,
|
|
301
|
+
"PR number from the airbyte monorepo to checkout and build from "
|
|
302
|
+
"(e.g., 70847). Only used when connector_name is provided. "
|
|
303
|
+
"If not specified, builds from the default branch (master).",
|
|
354
304
|
] = None,
|
|
355
305
|
) -> RunLiveConnectionTestsResponse:
|
|
356
306
|
"""Start a live connection test run via GitHub Actions workflow.
|
|
@@ -377,7 +327,7 @@ def run_live_connection_tests(
|
|
|
377
327
|
|
|
378
328
|
# Get GitHub token
|
|
379
329
|
try:
|
|
380
|
-
token =
|
|
330
|
+
token = resolve_github_token()
|
|
381
331
|
except ValueError as e:
|
|
382
332
|
return RunLiveConnectionTestsResponse(
|
|
383
333
|
run_id=run_id,
|
|
@@ -422,9 +372,13 @@ def run_live_connection_tests(
|
|
|
422
372
|
}
|
|
423
373
|
if connector_image:
|
|
424
374
|
workflow_inputs["connector_image"] = connector_image
|
|
375
|
+
if connector_name:
|
|
376
|
+
workflow_inputs["connector_name"] = connector_name
|
|
377
|
+
if pr:
|
|
378
|
+
workflow_inputs["pr"] = str(pr)
|
|
425
379
|
|
|
426
380
|
try:
|
|
427
|
-
|
|
381
|
+
dispatch_result = trigger_workflow_dispatch(
|
|
428
382
|
owner=LIVE_TEST_REPO_OWNER,
|
|
429
383
|
repo=LIVE_TEST_REPO_NAME,
|
|
430
384
|
workflow_file=LIVE_TEST_WORKFLOW_FILE,
|
|
@@ -441,12 +395,15 @@ def run_live_connection_tests(
|
|
|
441
395
|
workflow_url=None,
|
|
442
396
|
)
|
|
443
397
|
|
|
398
|
+
view_url = dispatch_result.run_url or dispatch_result.workflow_url
|
|
444
399
|
return RunLiveConnectionTestsResponse(
|
|
445
400
|
run_id=run_id,
|
|
446
401
|
status=TestRunStatus.QUEUED,
|
|
447
402
|
message=f"Live-test workflow triggered for connection {connection_id}. "
|
|
448
|
-
f"View progress at: {
|
|
449
|
-
workflow_url=workflow_url,
|
|
403
|
+
f"View progress at: {view_url}",
|
|
404
|
+
workflow_url=dispatch_result.workflow_url,
|
|
405
|
+
github_run_id=dispatch_result.run_id,
|
|
406
|
+
github_run_url=dispatch_result.run_url,
|
|
450
407
|
)
|
|
451
408
|
|
|
452
409
|
# Regression test workflow (skip_regression_tests=False)
|
|
@@ -472,9 +429,11 @@ def run_live_connection_tests(
|
|
|
472
429
|
workflow_inputs["control_image"] = control_image
|
|
473
430
|
if connector_name:
|
|
474
431
|
workflow_inputs["connector_name"] = connector_name
|
|
432
|
+
if pr:
|
|
433
|
+
workflow_inputs["pr"] = str(pr)
|
|
475
434
|
|
|
476
435
|
try:
|
|
477
|
-
|
|
436
|
+
dispatch_result = trigger_workflow_dispatch(
|
|
478
437
|
owner=LIVE_TEST_REPO_OWNER,
|
|
479
438
|
repo=LIVE_TEST_REPO_NAME,
|
|
480
439
|
workflow_file=REGRESSION_TEST_WORKFLOW_FILE,
|
|
@@ -491,12 +450,15 @@ def run_live_connection_tests(
|
|
|
491
450
|
workflow_url=None,
|
|
492
451
|
)
|
|
493
452
|
|
|
453
|
+
view_url = dispatch_result.run_url or dispatch_result.workflow_url
|
|
494
454
|
return RunLiveConnectionTestsResponse(
|
|
495
455
|
run_id=run_id,
|
|
496
456
|
status=TestRunStatus.QUEUED,
|
|
497
457
|
message=f"Regression-test workflow triggered for connection {connection_id}. "
|
|
498
|
-
f"View progress at: {
|
|
499
|
-
workflow_url=workflow_url,
|
|
458
|
+
f"View progress at: {view_url}",
|
|
459
|
+
workflow_url=dispatch_result.workflow_url,
|
|
460
|
+
github_run_id=dispatch_result.run_id,
|
|
461
|
+
github_run_url=dispatch_result.run_url,
|
|
500
462
|
)
|
|
501
463
|
|
|
502
464
|
|
|
@@ -8,7 +8,6 @@ workflow in the airbytehq/airbyte repository via GitHub's workflow dispatch API.
|
|
|
8
8
|
from __future__ import annotations
|
|
9
9
|
|
|
10
10
|
import base64
|
|
11
|
-
import os
|
|
12
11
|
from typing import Annotated, Literal
|
|
13
12
|
|
|
14
13
|
import requests
|
|
@@ -16,15 +15,22 @@ import yaml
|
|
|
16
15
|
from fastmcp import FastMCP
|
|
17
16
|
from pydantic import BaseModel, Field
|
|
18
17
|
|
|
18
|
+
from airbyte_ops_mcp.github_actions import GITHUB_API_BASE, resolve_github_token
|
|
19
19
|
from airbyte_ops_mcp.mcp._mcp_utils import mcp_tool, register_mcp_tools
|
|
20
20
|
|
|
21
|
-
GITHUB_API_BASE = "https://api.github.com"
|
|
22
21
|
DEFAULT_REPO_OWNER = "airbytehq"
|
|
23
22
|
DEFAULT_REPO_NAME = "airbyte"
|
|
24
23
|
DEFAULT_BRANCH = "master"
|
|
25
24
|
PRERELEASE_WORKFLOW_FILE = "publish-connectors-prerelease-command.yml"
|
|
26
25
|
CONNECTOR_PATH_PREFIX = "airbyte-integrations/connectors"
|
|
27
26
|
|
|
27
|
+
# Token env vars for prerelease publishing (in order of preference)
|
|
28
|
+
PRERELEASE_TOKEN_ENV_VARS = [
|
|
29
|
+
"GITHUB_CONNECTOR_PUBLISHING_PAT",
|
|
30
|
+
"GITHUB_CI_WORKFLOW_TRIGGER_PAT",
|
|
31
|
+
"GITHUB_TOKEN",
|
|
32
|
+
]
|
|
33
|
+
|
|
28
34
|
|
|
29
35
|
class PRHeadInfo(BaseModel):
|
|
30
36
|
"""Information about a PR's head commit."""
|
|
@@ -46,34 +52,6 @@ class PrereleaseWorkflowResult(BaseModel):
|
|
|
46
52
|
docker_image_tag: str | None = None
|
|
47
53
|
|
|
48
54
|
|
|
49
|
-
def _get_github_token() -> str:
|
|
50
|
-
"""Get GitHub token from environment.
|
|
51
|
-
|
|
52
|
-
Checks for tokens in order of specificity:
|
|
53
|
-
1. GITHUB_CONNECTOR_PUBLISHING_PAT (most specific)
|
|
54
|
-
2. GITHUB_CI_WORKFLOW_TRIGGER_PAT (general workflow triggering)
|
|
55
|
-
3. GITHUB_TOKEN (fallback)
|
|
56
|
-
|
|
57
|
-
Returns:
|
|
58
|
-
GitHub token string.
|
|
59
|
-
|
|
60
|
-
Raises:
|
|
61
|
-
ValueError: If no GitHub token environment variable is set.
|
|
62
|
-
"""
|
|
63
|
-
token = (
|
|
64
|
-
os.getenv("GITHUB_CONNECTOR_PUBLISHING_PAT")
|
|
65
|
-
or os.getenv("GITHUB_CI_WORKFLOW_TRIGGER_PAT")
|
|
66
|
-
or os.getenv("GITHUB_TOKEN")
|
|
67
|
-
)
|
|
68
|
-
if not token:
|
|
69
|
-
raise ValueError(
|
|
70
|
-
"No GitHub token found. Set GITHUB_CONNECTOR_PUBLISHING_PAT, "
|
|
71
|
-
"GITHUB_CI_WORKFLOW_TRIGGER_PAT, or GITHUB_TOKEN environment variable "
|
|
72
|
-
"with 'actions:write' permission."
|
|
73
|
-
)
|
|
74
|
-
return token
|
|
75
|
-
|
|
76
|
-
|
|
77
55
|
def _get_pr_head_info(
|
|
78
56
|
owner: str,
|
|
79
57
|
repo: str,
|
|
@@ -248,7 +226,7 @@ def publish_connector_to_airbyte_registry(
|
|
|
248
226
|
)
|
|
249
227
|
|
|
250
228
|
# Guard: Check for required token
|
|
251
|
-
token =
|
|
229
|
+
token = resolve_github_token(PRERELEASE_TOKEN_ENV_VARS)
|
|
252
230
|
|
|
253
231
|
# Get the PR's head ref and SHA
|
|
254
232
|
head_info = _get_pr_head_info(
|
|
@@ -20,7 +20,7 @@ from airbyte_ops_mcp.prod_db_access.queries import (
|
|
|
20
20
|
query_connections_by_connector,
|
|
21
21
|
query_connector_versions,
|
|
22
22
|
query_dataplanes_list,
|
|
23
|
-
|
|
23
|
+
query_failed_sync_attempts_for_connector,
|
|
24
24
|
query_new_connector_releases,
|
|
25
25
|
query_sync_results_for_version,
|
|
26
26
|
query_workspace_info,
|
|
@@ -249,12 +249,41 @@ def query_prod_connector_version_sync_results(
|
|
|
249
249
|
@mcp_tool(
|
|
250
250
|
read_only=True,
|
|
251
251
|
idempotent=True,
|
|
252
|
+
open_world=True,
|
|
252
253
|
)
|
|
253
|
-
def
|
|
254
|
-
|
|
255
|
-
str,
|
|
256
|
-
Field(
|
|
257
|
-
|
|
254
|
+
def query_prod_failed_sync_attempts_for_connector(
|
|
255
|
+
source_definition_id: Annotated[
|
|
256
|
+
str | None,
|
|
257
|
+
Field(
|
|
258
|
+
description=(
|
|
259
|
+
"Source connector definition ID (UUID) to search for. "
|
|
260
|
+
"Exactly one of this or source_canonical_name is required. "
|
|
261
|
+
"Example: 'afa734e4-3571-11ec-991a-1e0031268139' for YouTube Analytics."
|
|
262
|
+
),
|
|
263
|
+
default=None,
|
|
264
|
+
),
|
|
265
|
+
] = None,
|
|
266
|
+
source_canonical_name: Annotated[
|
|
267
|
+
str | None,
|
|
268
|
+
Field(
|
|
269
|
+
description=(
|
|
270
|
+
"Canonical source connector name to search for. "
|
|
271
|
+
"Exactly one of this or source_definition_id is required. "
|
|
272
|
+
"Examples: 'source-youtube-analytics', 'YouTube Analytics'."
|
|
273
|
+
),
|
|
274
|
+
default=None,
|
|
275
|
+
),
|
|
276
|
+
] = None,
|
|
277
|
+
organization_id: Annotated[
|
|
278
|
+
str | None,
|
|
279
|
+
Field(
|
|
280
|
+
description=(
|
|
281
|
+
"Optional organization ID (UUID) to filter results. "
|
|
282
|
+
"If provided, only failed attempts from this organization will be returned."
|
|
283
|
+
),
|
|
284
|
+
default=None,
|
|
285
|
+
),
|
|
286
|
+
] = None,
|
|
258
287
|
days: Annotated[
|
|
259
288
|
int,
|
|
260
289
|
Field(description="Number of days to look back (default: 7)", default=7),
|
|
@@ -264,29 +293,43 @@ def query_prod_failed_sync_attempts_for_version(
|
|
|
264
293
|
Field(description="Maximum number of results (default: 100)", default=100),
|
|
265
294
|
] = 100,
|
|
266
295
|
) -> list[dict[str, Any]]:
|
|
267
|
-
"""List failed sync attempts
|
|
296
|
+
"""List failed sync attempts for ALL actors using a source connector type.
|
|
268
297
|
|
|
269
|
-
|
|
270
|
-
|
|
298
|
+
This tool finds all actors with the given connector definition and returns their
|
|
299
|
+
failed sync attempts, regardless of whether they have explicit version pins.
|
|
271
300
|
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
- failed_attempt_number: Which attempt this was (0-indexed)
|
|
276
|
-
- failure_summary: JSON containing failure details including failureType and messages
|
|
301
|
+
This is useful for investigating connector issues across all users. Use this when
|
|
302
|
+
you want to find failures for a connector type regardless of which version users
|
|
303
|
+
are on.
|
|
277
304
|
|
|
278
|
-
Note:
|
|
279
|
-
|
|
305
|
+
Note: This tool only supports SOURCE connectors. For destination connectors,
|
|
306
|
+
a separate tool would be needed.
|
|
280
307
|
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
organization_id, dataplane_group_id, dataplane_name, failed_attempt_id,
|
|
285
|
-
failed_attempt_number, failed_attempt_status, failed_attempt_created_at,
|
|
286
|
-
failed_attempt_ended_at, failure_summary, processing_task_queue
|
|
308
|
+
Key fields in results:
|
|
309
|
+
- failure_summary: JSON containing failure details including failureType and messages
|
|
310
|
+
- pin_origin_type, pin_origin, pinned_version_id: Version pin context (NULL if not pinned)
|
|
287
311
|
"""
|
|
288
|
-
|
|
289
|
-
|
|
312
|
+
# Validate that exactly one of the two parameters is provided
|
|
313
|
+
if (source_definition_id is None) == (source_canonical_name is None):
|
|
314
|
+
raise PyAirbyteInputError(
|
|
315
|
+
message=(
|
|
316
|
+
"Exactly one of source_definition_id or source_canonical_name "
|
|
317
|
+
"must be provided, but not both."
|
|
318
|
+
),
|
|
319
|
+
)
|
|
320
|
+
|
|
321
|
+
# Resolve canonical name to definition ID if needed
|
|
322
|
+
resolved_definition_id: str
|
|
323
|
+
if source_canonical_name:
|
|
324
|
+
resolved_definition_id = _resolve_canonical_name_to_definition_id(
|
|
325
|
+
canonical_name=source_canonical_name,
|
|
326
|
+
)
|
|
327
|
+
else:
|
|
328
|
+
resolved_definition_id = source_definition_id # type: ignore[assignment]
|
|
329
|
+
|
|
330
|
+
return query_failed_sync_attempts_for_connector(
|
|
331
|
+
connector_definition_id=resolved_definition_id,
|
|
332
|
+
organization_id=organization_id,
|
|
290
333
|
days=days,
|
|
291
334
|
limit=limit,
|
|
292
335
|
)
|
airbyte_ops_mcp/mcp/server.py
CHANGED
|
@@ -2,9 +2,18 @@
|
|
|
2
2
|
"""Airbyte Admin MCP server implementation.
|
|
3
3
|
|
|
4
4
|
This module provides the main MCP server for Airbyte admin operations.
|
|
5
|
+
|
|
6
|
+
The server can run in two modes:
|
|
7
|
+
- **stdio mode** (default): For direct MCP client connections via stdin/stdout
|
|
8
|
+
- **HTTP mode**: For HTTP-based MCP connections, useful for containerized deployments
|
|
9
|
+
|
|
10
|
+
Environment Variables:
|
|
11
|
+
MCP_HTTP_HOST: Host to bind HTTP server to (default: 127.0.0.1)
|
|
12
|
+
MCP_HTTP_PORT: Port for HTTP server (default: 8082)
|
|
5
13
|
"""
|
|
6
14
|
|
|
7
15
|
import asyncio
|
|
16
|
+
import os
|
|
8
17
|
import sys
|
|
9
18
|
from pathlib import Path
|
|
10
19
|
|
|
@@ -23,6 +32,10 @@ from airbyte_ops_mcp.mcp.prod_db_queries import register_prod_db_query_tools
|
|
|
23
32
|
from airbyte_ops_mcp.mcp.prompts import register_prompts
|
|
24
33
|
from airbyte_ops_mcp.mcp.server_info import register_server_info_resources
|
|
25
34
|
|
|
35
|
+
# Default HTTP server configuration
|
|
36
|
+
DEFAULT_HTTP_HOST = "127.0.0.1"
|
|
37
|
+
DEFAULT_HTTP_PORT = 8082
|
|
38
|
+
|
|
26
39
|
app: FastMCP = FastMCP(MCP_SERVER_NAME)
|
|
27
40
|
|
|
28
41
|
|
|
@@ -56,27 +69,87 @@ def register_server_assets(app: FastMCP) -> None:
|
|
|
56
69
|
register_server_assets(app)
|
|
57
70
|
|
|
58
71
|
|
|
59
|
-
def
|
|
60
|
-
"""
|
|
61
|
-
# Load environment variables from .env file in current working directory
|
|
72
|
+
def _load_env() -> None:
|
|
73
|
+
"""Load environment variables from .env file if present."""
|
|
62
74
|
env_file = Path.cwd() / ".env"
|
|
63
75
|
if env_file.exists():
|
|
64
76
|
load_dotenv(env_file)
|
|
65
77
|
print(f"Loaded environment from: {env_file}", flush=True, file=sys.stderr)
|
|
66
78
|
|
|
79
|
+
|
|
80
|
+
def main() -> None:
|
|
81
|
+
"""Main entry point for the Airbyte Admin MCP server (stdio mode).
|
|
82
|
+
|
|
83
|
+
This is the default entry point that runs the server in stdio mode,
|
|
84
|
+
suitable for direct MCP client connections.
|
|
85
|
+
"""
|
|
86
|
+
_load_env()
|
|
87
|
+
|
|
67
88
|
print("=" * 60, flush=True, file=sys.stderr)
|
|
68
|
-
print("Starting Airbyte Admin MCP server.", file=sys.stderr)
|
|
89
|
+
print("Starting Airbyte Admin MCP server (stdio mode).", file=sys.stderr)
|
|
69
90
|
try:
|
|
70
91
|
asyncio.run(app.run_stdio_async(show_banner=False))
|
|
71
92
|
except KeyboardInterrupt:
|
|
72
93
|
print("Airbyte Admin MCP server interrupted by user.", file=sys.stderr)
|
|
73
|
-
except Exception as ex:
|
|
74
|
-
print(f"Error running Airbyte Admin MCP server: {ex}", file=sys.stderr)
|
|
75
|
-
sys.exit(1)
|
|
76
94
|
|
|
77
95
|
print("Airbyte Admin MCP server stopped.", file=sys.stderr)
|
|
78
96
|
print("=" * 60, flush=True, file=sys.stderr)
|
|
79
|
-
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def _parse_port(port_str: str | None, default: int) -> int:
|
|
100
|
+
"""Parse and validate a port number from string.
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
port_str: Port string from environment variable, or None if not set
|
|
104
|
+
default: Default port to use if port_str is None
|
|
105
|
+
|
|
106
|
+
Returns:
|
|
107
|
+
Validated port number
|
|
108
|
+
|
|
109
|
+
Raises:
|
|
110
|
+
ValueError: If port_str is not a valid integer or out of range
|
|
111
|
+
"""
|
|
112
|
+
if port_str is None:
|
|
113
|
+
return default
|
|
114
|
+
|
|
115
|
+
port_str = port_str.strip()
|
|
116
|
+
if not port_str.isdecimal():
|
|
117
|
+
raise ValueError(f"MCP_HTTP_PORT must be a valid integer, got: {port_str!r}")
|
|
118
|
+
|
|
119
|
+
port = int(port_str)
|
|
120
|
+
if not 1 <= port <= 65535:
|
|
121
|
+
raise ValueError(f"MCP_HTTP_PORT must be between 1 and 65535, got: {port}")
|
|
122
|
+
|
|
123
|
+
return port
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def main_http() -> None:
|
|
127
|
+
"""HTTP entry point for the Airbyte Admin MCP server.
|
|
128
|
+
|
|
129
|
+
This entry point runs the server in HTTP mode, suitable for containerized
|
|
130
|
+
deployments where the server needs to be accessible over HTTP.
|
|
131
|
+
|
|
132
|
+
Environment Variables:
|
|
133
|
+
MCP_HTTP_HOST: Host to bind to (default: 127.0.0.1)
|
|
134
|
+
MCP_HTTP_PORT: Port to listen on (default: 8082)
|
|
135
|
+
"""
|
|
136
|
+
_load_env()
|
|
137
|
+
|
|
138
|
+
host = os.getenv("MCP_HTTP_HOST", DEFAULT_HTTP_HOST)
|
|
139
|
+
port = _parse_port(os.getenv("MCP_HTTP_PORT"), DEFAULT_HTTP_PORT)
|
|
140
|
+
|
|
141
|
+
print("=" * 60, flush=True, file=sys.stderr)
|
|
142
|
+
print(
|
|
143
|
+
f"Starting Airbyte Admin MCP server (HTTP mode) on {host}:{port}",
|
|
144
|
+
file=sys.stderr,
|
|
145
|
+
)
|
|
146
|
+
try:
|
|
147
|
+
app.run(transport="http", host=host, port=port)
|
|
148
|
+
except KeyboardInterrupt:
|
|
149
|
+
print("Airbyte Admin MCP server interrupted by user.", file=sys.stderr)
|
|
150
|
+
|
|
151
|
+
print("Airbyte Admin MCP server stopped.", file=sys.stderr)
|
|
152
|
+
print("=" * 60, flush=True, file=sys.stderr)
|
|
80
153
|
|
|
81
154
|
|
|
82
155
|
if __name__ == "__main__":
|
|
@@ -52,6 +52,7 @@ def _is_tailscale_connected() -> bool:
|
|
|
52
52
|
Detection methods:
|
|
53
53
|
1. Check for tailscale0 network interface (Linux)
|
|
54
54
|
2. Run 'tailscale status --json' and check backend state (cross-platform)
|
|
55
|
+
3. Check macOS-specific Tailscale.app location if tailscale not in PATH
|
|
55
56
|
"""
|
|
56
57
|
# Method 1: Check for tailscale0 interface (Linux)
|
|
57
58
|
try:
|
|
@@ -63,6 +64,13 @@ def _is_tailscale_connected() -> bool:
|
|
|
63
64
|
|
|
64
65
|
# Method 2: Check tailscale CLI status
|
|
65
66
|
tailscale_path = shutil.which("tailscale")
|
|
67
|
+
|
|
68
|
+
# Method 3: On macOS, check the standard Tailscale.app location if not in PATH
|
|
69
|
+
if not tailscale_path and os.path.exists(
|
|
70
|
+
"/Applications/Tailscale.app/Contents/MacOS/Tailscale"
|
|
71
|
+
):
|
|
72
|
+
tailscale_path = "/Applications/Tailscale.app/Contents/MacOS/Tailscale"
|
|
73
|
+
|
|
66
74
|
if tailscale_path:
|
|
67
75
|
try:
|
|
68
76
|
result = subprocess.run(
|
|
@@ -24,7 +24,7 @@ from airbyte_ops_mcp.prod_db_access.sql import (
|
|
|
24
24
|
SELECT_CONNECTIONS_BY_CONNECTOR_AND_ORG,
|
|
25
25
|
SELECT_CONNECTOR_VERSIONS,
|
|
26
26
|
SELECT_DATAPLANES_LIST,
|
|
27
|
-
|
|
27
|
+
SELECT_FAILED_SYNC_ATTEMPTS_FOR_CONNECTOR,
|
|
28
28
|
SELECT_NEW_CONNECTOR_RELEASES,
|
|
29
29
|
SELECT_ORG_WORKSPACES,
|
|
30
30
|
SELECT_SUCCESSFUL_SYNCS_FOR_VERSION,
|
|
@@ -225,43 +225,55 @@ def query_sync_results_for_version(
|
|
|
225
225
|
)
|
|
226
226
|
|
|
227
227
|
|
|
228
|
-
def
|
|
229
|
-
|
|
228
|
+
def query_failed_sync_attempts_for_connector(
|
|
229
|
+
connector_definition_id: str,
|
|
230
|
+
organization_id: str | None = None,
|
|
230
231
|
days: int = 7,
|
|
231
232
|
limit: int = 100,
|
|
232
233
|
*,
|
|
233
234
|
gsm_client: secretmanager.SecretManagerServiceClient | None = None,
|
|
234
235
|
) -> list[dict[str, Any]]:
|
|
235
|
-
"""Query failed sync
|
|
236
|
+
"""Query failed sync attempts for ALL actors using a connector definition.
|
|
236
237
|
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
jobs and attempts tables to optimize join performance.
|
|
238
|
+
Finds all actors with the given actor_definition_id and returns their failed
|
|
239
|
+
sync attempts, regardless of whether they have explicit version pins.
|
|
240
240
|
|
|
241
|
-
|
|
242
|
-
|
|
241
|
+
This is useful for investigating connector issues across all users.
|
|
242
|
+
|
|
243
|
+
Note: This query only supports SOURCE connectors (joins via connection.source_id).
|
|
244
|
+
For destination connectors, a separate query would be needed.
|
|
243
245
|
|
|
244
246
|
Args:
|
|
245
|
-
|
|
247
|
+
connector_definition_id: Connector definition UUID to filter by
|
|
248
|
+
organization_id: Optional organization UUID to filter results by (post-query filter)
|
|
246
249
|
days: Number of days to look back (default: 7)
|
|
247
250
|
limit: Maximum number of results (default: 100)
|
|
248
251
|
gsm_client: GCP Secret Manager client. If None, a new client will be instantiated.
|
|
249
252
|
|
|
250
253
|
Returns:
|
|
251
|
-
List of failed sync
|
|
254
|
+
List of failed sync attempt records with failure_summary and workspace info
|
|
252
255
|
"""
|
|
253
256
|
cutoff_date = datetime.now(timezone.utc) - timedelta(days=days)
|
|
254
|
-
|
|
255
|
-
|
|
257
|
+
|
|
258
|
+
results = _run_sql_query(
|
|
259
|
+
SELECT_FAILED_SYNC_ATTEMPTS_FOR_CONNECTOR,
|
|
256
260
|
parameters={
|
|
257
|
-
"
|
|
261
|
+
"connector_definition_id": connector_definition_id,
|
|
258
262
|
"cutoff_date": cutoff_date,
|
|
259
263
|
"limit": limit,
|
|
260
264
|
},
|
|
261
|
-
query_name="
|
|
265
|
+
query_name="SELECT_FAILED_SYNC_ATTEMPTS_FOR_CONNECTOR",
|
|
262
266
|
gsm_client=gsm_client,
|
|
263
267
|
)
|
|
264
268
|
|
|
269
|
+
# Post-query filter by organization_id if provided
|
|
270
|
+
if organization_id is not None:
|
|
271
|
+
results = [
|
|
272
|
+
r for r in results if str(r.get("organization_id")) == organization_id
|
|
273
|
+
]
|
|
274
|
+
|
|
275
|
+
return results
|
|
276
|
+
|
|
265
277
|
|
|
266
278
|
def query_dataplanes_list(
|
|
267
279
|
*,
|