airbyte-internal-ops 0.3.1__py3-none-any.whl → 0.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {airbyte_internal_ops-0.3.1.dist-info → airbyte_internal_ops-0.4.1.dist-info}/METADATA +1 -1
- {airbyte_internal_ops-0.3.1.dist-info → airbyte_internal_ops-0.4.1.dist-info}/RECORD +16 -16
- airbyte_ops_mcp/cli/gh.py +105 -2
- airbyte_ops_mcp/cli/registry.py +2 -2
- airbyte_ops_mcp/github_actions.py +6 -100
- airbyte_ops_mcp/github_api.py +163 -5
- airbyte_ops_mcp/mcp/{github.py → github_actions.py} +117 -5
- airbyte_ops_mcp/mcp/prerelease.py +53 -63
- airbyte_ops_mcp/mcp/prod_db_queries.py +298 -1
- airbyte_ops_mcp/mcp/regression_tests.py +20 -9
- airbyte_ops_mcp/mcp/server.py +2 -2
- airbyte_ops_mcp/prod_db_access/queries.py +68 -0
- airbyte_ops_mcp/prod_db_access/sql.py +108 -0
- airbyte_ops_mcp/regression_tests/connector_runner.py +8 -4
- {airbyte_internal_ops-0.3.1.dist-info → airbyte_internal_ops-0.4.1.dist-info}/WHEEL +0 -0
- {airbyte_internal_ops-0.3.1.dist-info → airbyte_internal_ops-0.4.1.dist-info}/entry_points.txt +0 -0
|
@@ -15,12 +15,22 @@ from fastmcp import FastMCP
|
|
|
15
15
|
from pydantic import BaseModel, Field
|
|
16
16
|
|
|
17
17
|
from airbyte_ops_mcp.github_actions import (
|
|
18
|
-
GITHUB_API_BASE,
|
|
19
18
|
get_workflow_jobs,
|
|
19
|
+
trigger_workflow_dispatch,
|
|
20
|
+
)
|
|
21
|
+
from airbyte_ops_mcp.github_api import (
|
|
22
|
+
GITHUB_API_BASE,
|
|
23
|
+
get_pr_head_ref,
|
|
20
24
|
resolve_github_token,
|
|
21
25
|
)
|
|
22
26
|
from airbyte_ops_mcp.mcp._mcp_utils import mcp_tool, register_mcp_tools
|
|
23
27
|
|
|
28
|
+
# Token env vars for workflow triggering (in order of preference)
|
|
29
|
+
WORKFLOW_TRIGGER_TOKEN_ENV_VARS = [
|
|
30
|
+
"GITHUB_CI_WORKFLOW_TRIGGER_PAT",
|
|
31
|
+
"GITHUB_TOKEN",
|
|
32
|
+
]
|
|
33
|
+
|
|
24
34
|
DOCKERHUB_API_BASE = "https://hub.docker.com/v2"
|
|
25
35
|
|
|
26
36
|
|
|
@@ -36,7 +46,7 @@ class JobInfo(BaseModel):
|
|
|
36
46
|
|
|
37
47
|
|
|
38
48
|
class WorkflowRunStatus(BaseModel):
|
|
39
|
-
"""Response model for
|
|
49
|
+
"""Response model for check_ci_workflow_status MCP tool."""
|
|
40
50
|
|
|
41
51
|
run_id: int
|
|
42
52
|
status: str
|
|
@@ -116,7 +126,7 @@ def _get_workflow_run(
|
|
|
116
126
|
idempotent=True,
|
|
117
127
|
open_world=True,
|
|
118
128
|
)
|
|
119
|
-
def
|
|
129
|
+
def check_ci_workflow_status(
|
|
120
130
|
workflow_url: Annotated[
|
|
121
131
|
str | None,
|
|
122
132
|
Field(
|
|
@@ -196,6 +206,108 @@ def check_workflow_status(
|
|
|
196
206
|
)
|
|
197
207
|
|
|
198
208
|
|
|
209
|
+
class TriggerCIWorkflowResult(BaseModel):
|
|
210
|
+
"""Response model for trigger_ci_workflow MCP tool."""
|
|
211
|
+
|
|
212
|
+
success: bool
|
|
213
|
+
message: str
|
|
214
|
+
workflow_url: str
|
|
215
|
+
run_id: int | None = None
|
|
216
|
+
run_url: str | None = None
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
@mcp_tool(
|
|
220
|
+
read_only=False,
|
|
221
|
+
idempotent=False,
|
|
222
|
+
open_world=True,
|
|
223
|
+
)
|
|
224
|
+
def trigger_ci_workflow(
|
|
225
|
+
owner: Annotated[
|
|
226
|
+
str,
|
|
227
|
+
Field(description="Repository owner (e.g., 'airbytehq')"),
|
|
228
|
+
],
|
|
229
|
+
repo: Annotated[
|
|
230
|
+
str,
|
|
231
|
+
Field(description="Repository name (e.g., 'airbyte')"),
|
|
232
|
+
],
|
|
233
|
+
workflow_file: Annotated[
|
|
234
|
+
str,
|
|
235
|
+
Field(description="Workflow file name (e.g., 'connector-regression-test.yml')"),
|
|
236
|
+
],
|
|
237
|
+
workflow_definition_ref: Annotated[
|
|
238
|
+
str | None,
|
|
239
|
+
Field(
|
|
240
|
+
description="Branch name or PR number for the workflow definition to use. "
|
|
241
|
+
"If a PR number (integer string) is provided, it resolves to the PR's head branch name. "
|
|
242
|
+
"If a branch name is provided, it is used directly. "
|
|
243
|
+
"Defaults to the repository's default branch if not specified."
|
|
244
|
+
),
|
|
245
|
+
] = None,
|
|
246
|
+
inputs: Annotated[
|
|
247
|
+
dict[str, str] | None,
|
|
248
|
+
Field(
|
|
249
|
+
description="Workflow inputs as a dictionary of string key-value pairs. "
|
|
250
|
+
"These are passed to the workflow_dispatch event."
|
|
251
|
+
),
|
|
252
|
+
] = None,
|
|
253
|
+
) -> TriggerCIWorkflowResult:
|
|
254
|
+
"""Trigger a GitHub Actions CI workflow via workflow_dispatch.
|
|
255
|
+
|
|
256
|
+
This tool triggers a workflow in any GitHub repository that has workflow_dispatch
|
|
257
|
+
enabled. It resolves PR numbers to branch names automatically since GitHub's
|
|
258
|
+
workflow_dispatch API only accepts branch names, not refs/pull/{pr}/head format.
|
|
259
|
+
|
|
260
|
+
Requires GITHUB_CI_WORKFLOW_TRIGGER_PAT or GITHUB_TOKEN environment variable
|
|
261
|
+
with 'actions:write' permission.
|
|
262
|
+
"""
|
|
263
|
+
# Guard: Check for required token
|
|
264
|
+
token = resolve_github_token(WORKFLOW_TRIGGER_TOKEN_ENV_VARS)
|
|
265
|
+
|
|
266
|
+
# Resolve workflow definition ref
|
|
267
|
+
# If a PR number is provided (integer string), resolve to the PR's head branch name
|
|
268
|
+
# Otherwise use the provided branch name or default to repo's default branch
|
|
269
|
+
if workflow_definition_ref:
|
|
270
|
+
if workflow_definition_ref.isdigit():
|
|
271
|
+
# Resolve PR number to branch name via GitHub API
|
|
272
|
+
pr_head_info = get_pr_head_ref(
|
|
273
|
+
owner,
|
|
274
|
+
repo,
|
|
275
|
+
int(workflow_definition_ref),
|
|
276
|
+
token,
|
|
277
|
+
)
|
|
278
|
+
resolved_ref = pr_head_info.ref
|
|
279
|
+
else:
|
|
280
|
+
resolved_ref = workflow_definition_ref
|
|
281
|
+
else:
|
|
282
|
+
# Default to main (most common default branch)
|
|
283
|
+
resolved_ref = "main"
|
|
284
|
+
|
|
285
|
+
# Trigger the workflow
|
|
286
|
+
result = trigger_workflow_dispatch(
|
|
287
|
+
owner=owner,
|
|
288
|
+
repo=repo,
|
|
289
|
+
workflow_file=workflow_file,
|
|
290
|
+
ref=resolved_ref,
|
|
291
|
+
inputs=inputs or {},
|
|
292
|
+
token=token,
|
|
293
|
+
find_run=True,
|
|
294
|
+
)
|
|
295
|
+
|
|
296
|
+
# Build response message
|
|
297
|
+
if result.run_id:
|
|
298
|
+
message = f"Successfully triggered workflow {workflow_file} on {owner}/{repo} (ref: {resolved_ref}). Run ID: {result.run_id}"
|
|
299
|
+
else:
|
|
300
|
+
message = f"Successfully triggered workflow {workflow_file} on {owner}/{repo} (ref: {resolved_ref}). Run ID not yet available."
|
|
301
|
+
|
|
302
|
+
return TriggerCIWorkflowResult(
|
|
303
|
+
success=True,
|
|
304
|
+
message=message,
|
|
305
|
+
workflow_url=result.workflow_url,
|
|
306
|
+
run_id=result.run_id,
|
|
307
|
+
run_url=result.run_url,
|
|
308
|
+
)
|
|
309
|
+
|
|
310
|
+
|
|
199
311
|
class DockerImageInfo(BaseModel):
|
|
200
312
|
"""Response model for get_docker_image_info MCP tool."""
|
|
201
313
|
|
|
@@ -282,8 +394,8 @@ def get_docker_image_info(
|
|
|
282
394
|
)
|
|
283
395
|
|
|
284
396
|
|
|
285
|
-
def
|
|
286
|
-
"""Register GitHub tools with the FastMCP app.
|
|
397
|
+
def register_github_actions_tools(app: FastMCP) -> None:
|
|
398
|
+
"""Register GitHub Actions tools with the FastMCP app.
|
|
287
399
|
|
|
288
400
|
Args:
|
|
289
401
|
app: FastMCP application instance
|
|
@@ -2,12 +2,15 @@
|
|
|
2
2
|
"""MCP tools for triggering connector pre-release workflows.
|
|
3
3
|
|
|
4
4
|
This module provides MCP tools for triggering the publish-connectors-prerelease
|
|
5
|
-
workflow in the airbytehq/airbyte repository
|
|
5
|
+
workflow in the airbytehq/airbyte repository (for OSS connectors) or the
|
|
6
|
+
publish_enterprise_connectors workflow in airbytehq/airbyte-enterprise
|
|
7
|
+
(for enterprise connectors) via GitHub's workflow dispatch API.
|
|
6
8
|
"""
|
|
7
9
|
|
|
8
10
|
from __future__ import annotations
|
|
9
11
|
|
|
10
12
|
import base64
|
|
13
|
+
from enum import StrEnum
|
|
11
14
|
from typing import Annotated, Literal
|
|
12
15
|
|
|
13
16
|
import requests
|
|
@@ -15,15 +18,32 @@ import yaml
|
|
|
15
18
|
from fastmcp import FastMCP
|
|
16
19
|
from pydantic import BaseModel, Field
|
|
17
20
|
|
|
18
|
-
from airbyte_ops_mcp.
|
|
21
|
+
from airbyte_ops_mcp.github_api import (
|
|
22
|
+
GITHUB_API_BASE,
|
|
23
|
+
get_pr_head_ref,
|
|
24
|
+
resolve_github_token,
|
|
25
|
+
)
|
|
19
26
|
from airbyte_ops_mcp.mcp._mcp_utils import mcp_tool, register_mcp_tools
|
|
20
27
|
|
|
28
|
+
|
|
29
|
+
class ConnectorRepo(StrEnum):
|
|
30
|
+
"""Repository where connector code is located."""
|
|
31
|
+
|
|
32
|
+
AIRBYTE = "airbyte"
|
|
33
|
+
AIRBYTE_ENTERPRISE = "airbyte-enterprise"
|
|
34
|
+
|
|
35
|
+
|
|
21
36
|
DEFAULT_REPO_OWNER = "airbytehq"
|
|
22
|
-
DEFAULT_REPO_NAME =
|
|
37
|
+
DEFAULT_REPO_NAME = ConnectorRepo.AIRBYTE
|
|
23
38
|
DEFAULT_BRANCH = "master"
|
|
24
39
|
PRERELEASE_WORKFLOW_FILE = "publish-connectors-prerelease-command.yml"
|
|
25
40
|
CONNECTOR_PATH_PREFIX = "airbyte-integrations/connectors"
|
|
26
41
|
|
|
42
|
+
# Enterprise repository constants
|
|
43
|
+
ENTERPRISE_REPO_NAME = ConnectorRepo.AIRBYTE_ENTERPRISE
|
|
44
|
+
ENTERPRISE_DEFAULT_BRANCH = "main"
|
|
45
|
+
ENTERPRISE_PRERELEASE_WORKFLOW_FILE = "publish_enterprise_connectors.yml"
|
|
46
|
+
|
|
27
47
|
# Token env vars for prerelease publishing (in order of preference)
|
|
28
48
|
PRERELEASE_TOKEN_ENV_VARS = [
|
|
29
49
|
"GITHUB_CONNECTOR_PUBLISHING_PAT",
|
|
@@ -71,14 +91,6 @@ def compute_prerelease_docker_image_tag(base_version: str, sha: str) -> str:
|
|
|
71
91
|
return f"{base_version}-{PRERELEASE_TAG_PREFIX}.{short_sha}"
|
|
72
92
|
|
|
73
93
|
|
|
74
|
-
class PRHeadInfo(BaseModel):
|
|
75
|
-
"""Information about a PR's head commit."""
|
|
76
|
-
|
|
77
|
-
ref: str
|
|
78
|
-
sha: str
|
|
79
|
-
short_sha: str
|
|
80
|
-
|
|
81
|
-
|
|
82
94
|
class PrereleaseWorkflowResult(BaseModel):
|
|
83
95
|
"""Response model for publish_connector_to_airbyte_registry MCP tool."""
|
|
84
96
|
|
|
@@ -91,47 +103,6 @@ class PrereleaseWorkflowResult(BaseModel):
|
|
|
91
103
|
docker_image_tag: str | None = None
|
|
92
104
|
|
|
93
105
|
|
|
94
|
-
def _get_pr_head_info(
|
|
95
|
-
owner: str,
|
|
96
|
-
repo: str,
|
|
97
|
-
pr_number: int,
|
|
98
|
-
token: str,
|
|
99
|
-
) -> PRHeadInfo:
|
|
100
|
-
"""Get the head ref and SHA for a PR.
|
|
101
|
-
|
|
102
|
-
Args:
|
|
103
|
-
owner: Repository owner (e.g., "airbytehq")
|
|
104
|
-
repo: Repository name (e.g., "airbyte")
|
|
105
|
-
pr_number: Pull request number
|
|
106
|
-
token: GitHub API token
|
|
107
|
-
|
|
108
|
-
Returns:
|
|
109
|
-
PRHeadInfo with ref, sha, and short_sha.
|
|
110
|
-
|
|
111
|
-
Raises:
|
|
112
|
-
ValueError: If PR not found or API error.
|
|
113
|
-
"""
|
|
114
|
-
url = f"{GITHUB_API_BASE}/repos/{owner}/{repo}/pulls/{pr_number}"
|
|
115
|
-
headers = {
|
|
116
|
-
"Authorization": f"Bearer {token}",
|
|
117
|
-
"Accept": "application/vnd.github+json",
|
|
118
|
-
"X-GitHub-Api-Version": "2022-11-28",
|
|
119
|
-
}
|
|
120
|
-
|
|
121
|
-
response = requests.get(url, headers=headers, timeout=30)
|
|
122
|
-
if response.status_code == 404:
|
|
123
|
-
raise ValueError(f"PR {owner}/{repo}#{pr_number} not found")
|
|
124
|
-
response.raise_for_status()
|
|
125
|
-
|
|
126
|
-
pr_data = response.json()
|
|
127
|
-
sha = pr_data["head"]["sha"]
|
|
128
|
-
return PRHeadInfo(
|
|
129
|
-
ref=pr_data["head"]["ref"],
|
|
130
|
-
sha=sha,
|
|
131
|
-
short_sha=sha[:7],
|
|
132
|
-
)
|
|
133
|
-
|
|
134
|
-
|
|
135
106
|
def _get_connector_metadata(
|
|
136
107
|
owner: str,
|
|
137
108
|
repo: str,
|
|
@@ -238,6 +209,14 @@ def publish_connector_to_airbyte_registry(
|
|
|
238
209
|
int,
|
|
239
210
|
Field(description="The pull request number containing the connector changes"),
|
|
240
211
|
],
|
|
212
|
+
repo: Annotated[
|
|
213
|
+
ConnectorRepo,
|
|
214
|
+
Field(
|
|
215
|
+
default=ConnectorRepo.AIRBYTE,
|
|
216
|
+
description="Repository where the connector PR is located. "
|
|
217
|
+
"Use 'airbyte' for OSS connectors (default) or 'airbyte-enterprise' for enterprise connectors.",
|
|
218
|
+
),
|
|
219
|
+
],
|
|
241
220
|
prerelease: Annotated[
|
|
242
221
|
Literal[True],
|
|
243
222
|
Field(
|
|
@@ -249,8 +228,10 @@ def publish_connector_to_airbyte_registry(
|
|
|
249
228
|
"""Publish a connector to the Airbyte registry.
|
|
250
229
|
|
|
251
230
|
Currently only supports pre-release publishing. This tool triggers the
|
|
252
|
-
publish-connectors-prerelease workflow in the airbytehq/airbyte repository
|
|
253
|
-
|
|
231
|
+
publish-connectors-prerelease workflow in the airbytehq/airbyte repository
|
|
232
|
+
(for OSS connectors) or the publish_enterprise_connectors workflow in
|
|
233
|
+
airbytehq/airbyte-enterprise (for enterprise connectors), which publishes
|
|
234
|
+
a pre-release version of the specified connector from the PR branch.
|
|
254
235
|
|
|
255
236
|
Pre-release versions are tagged with the format: {version}-preview.{7-char-git-sha}
|
|
256
237
|
These versions are available for version pinning via the scoped_configuration API.
|
|
@@ -267,17 +248,25 @@ def publish_connector_to_airbyte_registry(
|
|
|
267
248
|
# Guard: Check for required token
|
|
268
249
|
token = resolve_github_token(PRERELEASE_TOKEN_ENV_VARS)
|
|
269
250
|
|
|
251
|
+
# Determine repo-specific settings
|
|
252
|
+
is_enterprise = repo == ConnectorRepo.AIRBYTE_ENTERPRISE
|
|
253
|
+
target_repo_name = ENTERPRISE_REPO_NAME if is_enterprise else DEFAULT_REPO_NAME
|
|
254
|
+
target_branch = ENTERPRISE_DEFAULT_BRANCH if is_enterprise else DEFAULT_BRANCH
|
|
255
|
+
target_workflow = (
|
|
256
|
+
ENTERPRISE_PRERELEASE_WORKFLOW_FILE
|
|
257
|
+
if is_enterprise
|
|
258
|
+
else PRERELEASE_WORKFLOW_FILE
|
|
259
|
+
)
|
|
260
|
+
|
|
270
261
|
# Get the PR's head SHA for computing the docker image tag
|
|
271
262
|
# Note: We no longer pass gitref to the workflow - it derives the ref from PR number
|
|
272
|
-
head_info =
|
|
273
|
-
DEFAULT_REPO_OWNER, DEFAULT_REPO_NAME, pr_number, token
|
|
274
|
-
)
|
|
263
|
+
head_info = get_pr_head_ref(DEFAULT_REPO_OWNER, target_repo_name, pr_number, token)
|
|
275
264
|
|
|
276
265
|
# Prepare workflow inputs
|
|
277
266
|
# The workflow uses refs/pull/{pr}/head directly - no gitref needed
|
|
278
267
|
# Note: The workflow auto-detects modified connectors from the PR
|
|
279
268
|
workflow_inputs = {
|
|
280
|
-
"repo": f"{DEFAULT_REPO_OWNER}/{
|
|
269
|
+
"repo": f"{DEFAULT_REPO_OWNER}/{target_repo_name}",
|
|
281
270
|
"pr": str(pr_number),
|
|
282
271
|
}
|
|
283
272
|
|
|
@@ -285,9 +274,9 @@ def publish_connector_to_airbyte_registry(
|
|
|
285
274
|
# The workflow will checkout the PR branch via inputs.gitref
|
|
286
275
|
workflow_url = _trigger_workflow_dispatch(
|
|
287
276
|
owner=DEFAULT_REPO_OWNER,
|
|
288
|
-
repo=
|
|
289
|
-
workflow_file=
|
|
290
|
-
ref=
|
|
277
|
+
repo=target_repo_name,
|
|
278
|
+
workflow_file=target_workflow,
|
|
279
|
+
ref=target_branch,
|
|
291
280
|
inputs=workflow_inputs,
|
|
292
281
|
token=token,
|
|
293
282
|
)
|
|
@@ -297,7 +286,7 @@ def publish_connector_to_airbyte_registry(
|
|
|
297
286
|
docker_image_tag: str | None = None
|
|
298
287
|
metadata = _get_connector_metadata(
|
|
299
288
|
DEFAULT_REPO_OWNER,
|
|
300
|
-
|
|
289
|
+
target_repo_name,
|
|
301
290
|
connector_name,
|
|
302
291
|
head_info.sha,
|
|
303
292
|
token,
|
|
@@ -311,9 +300,10 @@ def publish_connector_to_airbyte_registry(
|
|
|
311
300
|
base_version, head_info.sha
|
|
312
301
|
)
|
|
313
302
|
|
|
303
|
+
repo_info = f" from {repo}" if is_enterprise else ""
|
|
314
304
|
return PrereleaseWorkflowResult(
|
|
315
305
|
success=True,
|
|
316
|
-
message=f"Successfully triggered pre-release workflow for {connector_name} from PR #{pr_number}",
|
|
306
|
+
message=f"Successfully triggered pre-release workflow for {connector_name}{repo_info} from PR #{pr_number}",
|
|
317
307
|
workflow_url=workflow_url,
|
|
318
308
|
connector_name=connector_name,
|
|
319
309
|
pr_number=pr_number,
|
|
@@ -7,7 +7,7 @@ airbyte_ops_mcp.prod_db_access.queries for use by AI agents.
|
|
|
7
7
|
|
|
8
8
|
from __future__ import annotations
|
|
9
9
|
|
|
10
|
-
from datetime import datetime
|
|
10
|
+
from datetime import datetime, timezone
|
|
11
11
|
from enum import StrEnum
|
|
12
12
|
from typing import Annotated, Any
|
|
13
13
|
|
|
@@ -24,9 +24,11 @@ from airbyte_ops_mcp.prod_db_access.queries import (
|
|
|
24
24
|
query_connections_by_destination_connector,
|
|
25
25
|
query_connector_versions,
|
|
26
26
|
query_dataplanes_list,
|
|
27
|
+
query_destination_connection_stats,
|
|
27
28
|
query_failed_sync_attempts_for_connector,
|
|
28
29
|
query_new_connector_releases,
|
|
29
30
|
query_recent_syncs_for_connector,
|
|
31
|
+
query_source_connection_stats,
|
|
30
32
|
query_syncs_for_version_pinned_connector,
|
|
31
33
|
query_workspace_info,
|
|
32
34
|
query_workspaces_by_email_domain,
|
|
@@ -90,6 +92,94 @@ class WorkspacesByEmailDomainResult(BaseModel):
|
|
|
90
92
|
)
|
|
91
93
|
|
|
92
94
|
|
|
95
|
+
class LatestAttemptBreakdown(BaseModel):
|
|
96
|
+
"""Breakdown of connections by latest attempt status."""
|
|
97
|
+
|
|
98
|
+
succeeded: int = Field(
|
|
99
|
+
default=0, description="Connections where latest attempt succeeded"
|
|
100
|
+
)
|
|
101
|
+
failed: int = Field(
|
|
102
|
+
default=0, description="Connections where latest attempt failed"
|
|
103
|
+
)
|
|
104
|
+
cancelled: int = Field(
|
|
105
|
+
default=0, description="Connections where latest attempt was cancelled"
|
|
106
|
+
)
|
|
107
|
+
running: int = Field(
|
|
108
|
+
default=0, description="Connections where latest attempt is still running"
|
|
109
|
+
)
|
|
110
|
+
unknown: int = Field(
|
|
111
|
+
default=0,
|
|
112
|
+
description="Connections with no recent attempts in the lookback window",
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
class VersionPinStats(BaseModel):
|
|
117
|
+
"""Stats for connections pinned to a specific version."""
|
|
118
|
+
|
|
119
|
+
pinned_version_id: str | None = Field(
|
|
120
|
+
description="The connector version UUID (None for unpinned connections)"
|
|
121
|
+
)
|
|
122
|
+
docker_image_tag: str | None = Field(
|
|
123
|
+
default=None, description="The docker image tag for this version"
|
|
124
|
+
)
|
|
125
|
+
total_connections: int = Field(description="Total number of connections")
|
|
126
|
+
enabled_connections: int = Field(
|
|
127
|
+
description="Number of enabled (active status) connections"
|
|
128
|
+
)
|
|
129
|
+
active_connections: int = Field(
|
|
130
|
+
description="Number of connections with recent sync activity"
|
|
131
|
+
)
|
|
132
|
+
latest_attempt: LatestAttemptBreakdown = Field(
|
|
133
|
+
description="Breakdown by latest attempt status"
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
class ConnectorConnectionStats(BaseModel):
|
|
138
|
+
"""Aggregate connection stats for a connector."""
|
|
139
|
+
|
|
140
|
+
connector_definition_id: str = Field(description="The connector definition UUID")
|
|
141
|
+
connector_type: str = Field(description="'source' or 'destination'")
|
|
142
|
+
canonical_name: str | None = Field(
|
|
143
|
+
default=None, description="The canonical connector name if resolved"
|
|
144
|
+
)
|
|
145
|
+
total_connections: int = Field(
|
|
146
|
+
description="Total number of non-deprecated connections"
|
|
147
|
+
)
|
|
148
|
+
enabled_connections: int = Field(
|
|
149
|
+
description="Number of enabled (active status) connections"
|
|
150
|
+
)
|
|
151
|
+
active_connections: int = Field(
|
|
152
|
+
description="Number of connections with recent sync activity"
|
|
153
|
+
)
|
|
154
|
+
pinned_connections: int = Field(
|
|
155
|
+
description="Number of connections with explicit version pins"
|
|
156
|
+
)
|
|
157
|
+
unpinned_connections: int = Field(
|
|
158
|
+
description="Number of connections on default version"
|
|
159
|
+
)
|
|
160
|
+
latest_attempt: LatestAttemptBreakdown = Field(
|
|
161
|
+
description="Overall breakdown by latest attempt status"
|
|
162
|
+
)
|
|
163
|
+
by_version: list[VersionPinStats] = Field(
|
|
164
|
+
description="Stats broken down by pinned version"
|
|
165
|
+
)
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
class ConnectorConnectionStatsResponse(BaseModel):
|
|
169
|
+
"""Response containing connection stats for multiple connectors."""
|
|
170
|
+
|
|
171
|
+
sources: list[ConnectorConnectionStats] = Field(
|
|
172
|
+
default_factory=list, description="Stats for source connectors"
|
|
173
|
+
)
|
|
174
|
+
destinations: list[ConnectorConnectionStats] = Field(
|
|
175
|
+
default_factory=list, description="Stats for destination connectors"
|
|
176
|
+
)
|
|
177
|
+
active_within_days: int = Field(
|
|
178
|
+
description="Lookback window used for 'active' connections"
|
|
179
|
+
)
|
|
180
|
+
generated_at: datetime = Field(description="When this response was generated")
|
|
181
|
+
|
|
182
|
+
|
|
93
183
|
# Cloud registry URL for resolving canonical names
|
|
94
184
|
CLOUD_REGISTRY_URL = (
|
|
95
185
|
"https://connectors.airbyte.com/files/registries/v0/cloud_registry.json"
|
|
@@ -851,6 +941,213 @@ def query_prod_workspaces_by_email_domain(
|
|
|
851
941
|
)
|
|
852
942
|
|
|
853
943
|
|
|
944
|
+
def _build_connector_stats(
|
|
945
|
+
connector_definition_id: str,
|
|
946
|
+
connector_type: str,
|
|
947
|
+
canonical_name: str | None,
|
|
948
|
+
rows: list[dict[str, Any]],
|
|
949
|
+
version_tags: dict[str, str | None],
|
|
950
|
+
) -> ConnectorConnectionStats:
|
|
951
|
+
"""Build ConnectorConnectionStats from query result rows."""
|
|
952
|
+
# Aggregate totals across all version groups
|
|
953
|
+
total_connections = 0
|
|
954
|
+
enabled_connections = 0
|
|
955
|
+
active_connections = 0
|
|
956
|
+
pinned_connections = 0
|
|
957
|
+
unpinned_connections = 0
|
|
958
|
+
total_succeeded = 0
|
|
959
|
+
total_failed = 0
|
|
960
|
+
total_cancelled = 0
|
|
961
|
+
total_running = 0
|
|
962
|
+
total_unknown = 0
|
|
963
|
+
|
|
964
|
+
by_version: list[VersionPinStats] = []
|
|
965
|
+
|
|
966
|
+
for row in rows:
|
|
967
|
+
version_id = row.get("pinned_version_id")
|
|
968
|
+
row_total = int(row.get("total_connections", 0))
|
|
969
|
+
row_enabled = int(row.get("enabled_connections", 0))
|
|
970
|
+
row_active = int(row.get("active_connections", 0))
|
|
971
|
+
row_pinned = int(row.get("pinned_connections", 0))
|
|
972
|
+
row_unpinned = int(row.get("unpinned_connections", 0))
|
|
973
|
+
row_succeeded = int(row.get("succeeded_connections", 0))
|
|
974
|
+
row_failed = int(row.get("failed_connections", 0))
|
|
975
|
+
row_cancelled = int(row.get("cancelled_connections", 0))
|
|
976
|
+
row_running = int(row.get("running_connections", 0))
|
|
977
|
+
row_unknown = int(row.get("unknown_connections", 0))
|
|
978
|
+
|
|
979
|
+
total_connections += row_total
|
|
980
|
+
enabled_connections += row_enabled
|
|
981
|
+
active_connections += row_active
|
|
982
|
+
pinned_connections += row_pinned
|
|
983
|
+
unpinned_connections += row_unpinned
|
|
984
|
+
total_succeeded += row_succeeded
|
|
985
|
+
total_failed += row_failed
|
|
986
|
+
total_cancelled += row_cancelled
|
|
987
|
+
total_running += row_running
|
|
988
|
+
total_unknown += row_unknown
|
|
989
|
+
|
|
990
|
+
by_version.append(
|
|
991
|
+
VersionPinStats(
|
|
992
|
+
pinned_version_id=str(version_id) if version_id else None,
|
|
993
|
+
docker_image_tag=version_tags.get(str(version_id))
|
|
994
|
+
if version_id
|
|
995
|
+
else None,
|
|
996
|
+
total_connections=row_total,
|
|
997
|
+
enabled_connections=row_enabled,
|
|
998
|
+
active_connections=row_active,
|
|
999
|
+
latest_attempt=LatestAttemptBreakdown(
|
|
1000
|
+
succeeded=row_succeeded,
|
|
1001
|
+
failed=row_failed,
|
|
1002
|
+
cancelled=row_cancelled,
|
|
1003
|
+
running=row_running,
|
|
1004
|
+
unknown=row_unknown,
|
|
1005
|
+
),
|
|
1006
|
+
)
|
|
1007
|
+
)
|
|
1008
|
+
|
|
1009
|
+
return ConnectorConnectionStats(
|
|
1010
|
+
connector_definition_id=connector_definition_id,
|
|
1011
|
+
connector_type=connector_type,
|
|
1012
|
+
canonical_name=canonical_name,
|
|
1013
|
+
total_connections=total_connections,
|
|
1014
|
+
enabled_connections=enabled_connections,
|
|
1015
|
+
active_connections=active_connections,
|
|
1016
|
+
pinned_connections=pinned_connections,
|
|
1017
|
+
unpinned_connections=unpinned_connections,
|
|
1018
|
+
latest_attempt=LatestAttemptBreakdown(
|
|
1019
|
+
succeeded=total_succeeded,
|
|
1020
|
+
failed=total_failed,
|
|
1021
|
+
cancelled=total_cancelled,
|
|
1022
|
+
running=total_running,
|
|
1023
|
+
unknown=total_unknown,
|
|
1024
|
+
),
|
|
1025
|
+
by_version=by_version,
|
|
1026
|
+
)
|
|
1027
|
+
|
|
1028
|
+
|
|
1029
|
+
@mcp_tool(
|
|
1030
|
+
read_only=True,
|
|
1031
|
+
idempotent=True,
|
|
1032
|
+
open_world=True,
|
|
1033
|
+
)
|
|
1034
|
+
def query_prod_connector_connection_stats(
|
|
1035
|
+
source_definition_ids: Annotated[
|
|
1036
|
+
list[str] | None,
|
|
1037
|
+
Field(
|
|
1038
|
+
description=(
|
|
1039
|
+
"List of source connector definition IDs (UUIDs) to get stats for. "
|
|
1040
|
+
"Example: ['afa734e4-3571-11ec-991a-1e0031268139']"
|
|
1041
|
+
),
|
|
1042
|
+
default=None,
|
|
1043
|
+
),
|
|
1044
|
+
] = None,
|
|
1045
|
+
destination_definition_ids: Annotated[
|
|
1046
|
+
list[str] | None,
|
|
1047
|
+
Field(
|
|
1048
|
+
description=(
|
|
1049
|
+
"List of destination connector definition IDs (UUIDs) to get stats for. "
|
|
1050
|
+
"Example: ['94bd199c-2ff0-4aa2-b98e-17f0acb72610']"
|
|
1051
|
+
),
|
|
1052
|
+
default=None,
|
|
1053
|
+
),
|
|
1054
|
+
] = None,
|
|
1055
|
+
active_within_days: Annotated[
|
|
1056
|
+
int,
|
|
1057
|
+
Field(
|
|
1058
|
+
description=(
|
|
1059
|
+
"Number of days to look back for 'active' connections (default: 7). "
|
|
1060
|
+
"Connections with sync activity within this window are counted as active."
|
|
1061
|
+
),
|
|
1062
|
+
default=7,
|
|
1063
|
+
),
|
|
1064
|
+
] = 7,
|
|
1065
|
+
) -> ConnectorConnectionStatsResponse:
|
|
1066
|
+
"""Get aggregate connection stats for multiple connectors.
|
|
1067
|
+
|
|
1068
|
+
Returns counts of connections grouped by pinned version for each connector,
|
|
1069
|
+
including:
|
|
1070
|
+
- Total, enabled, and active connection counts
|
|
1071
|
+
- Pinned vs unpinned breakdown
|
|
1072
|
+
- Latest attempt status breakdown (succeeded, failed, cancelled, running, unknown)
|
|
1073
|
+
|
|
1074
|
+
This tool is designed for release monitoring workflows. It allows you to:
|
|
1075
|
+
1. Query recently released connectors to identify which ones to monitor
|
|
1076
|
+
2. Get aggregate stats showing how many connections are using each version
|
|
1077
|
+
3. See health metrics (pass/fail) broken down by version
|
|
1078
|
+
|
|
1079
|
+
The 'active_within_days' parameter controls the lookback window for:
|
|
1080
|
+
- Counting 'active' connections (those with recent sync activity)
|
|
1081
|
+
- Determining 'latest attempt status' (most recent attempt within the window)
|
|
1082
|
+
|
|
1083
|
+
Connections with no sync activity in the lookback window will have
|
|
1084
|
+
'unknown' status in the latest_attempt breakdown.
|
|
1085
|
+
"""
|
|
1086
|
+
# Initialize empty lists if None
|
|
1087
|
+
source_ids = source_definition_ids or []
|
|
1088
|
+
destination_ids = destination_definition_ids or []
|
|
1089
|
+
|
|
1090
|
+
if not source_ids and not destination_ids:
|
|
1091
|
+
raise PyAirbyteInputError(
|
|
1092
|
+
message=(
|
|
1093
|
+
"At least one of source_definition_ids or destination_definition_ids "
|
|
1094
|
+
"must be provided."
|
|
1095
|
+
),
|
|
1096
|
+
)
|
|
1097
|
+
|
|
1098
|
+
sources: list[ConnectorConnectionStats] = []
|
|
1099
|
+
destinations: list[ConnectorConnectionStats] = []
|
|
1100
|
+
|
|
1101
|
+
# Process source connectors
|
|
1102
|
+
for source_def_id in source_ids:
|
|
1103
|
+
# Get version info for tag lookup
|
|
1104
|
+
versions = query_connector_versions(source_def_id)
|
|
1105
|
+
version_tags = {
|
|
1106
|
+
str(v["version_id"]): v.get("docker_image_tag") for v in versions
|
|
1107
|
+
}
|
|
1108
|
+
|
|
1109
|
+
# Get aggregate stats
|
|
1110
|
+
rows = query_source_connection_stats(source_def_id, days=active_within_days)
|
|
1111
|
+
|
|
1112
|
+
sources.append(
|
|
1113
|
+
_build_connector_stats(
|
|
1114
|
+
connector_definition_id=source_def_id,
|
|
1115
|
+
connector_type="source",
|
|
1116
|
+
canonical_name=None,
|
|
1117
|
+
rows=rows,
|
|
1118
|
+
version_tags=version_tags,
|
|
1119
|
+
)
|
|
1120
|
+
)
|
|
1121
|
+
|
|
1122
|
+
# Process destination connectors
|
|
1123
|
+
for dest_def_id in destination_ids:
|
|
1124
|
+
# Get version info for tag lookup
|
|
1125
|
+
versions = query_connector_versions(dest_def_id)
|
|
1126
|
+
version_tags = {
|
|
1127
|
+
str(v["version_id"]): v.get("docker_image_tag") for v in versions
|
|
1128
|
+
}
|
|
1129
|
+
|
|
1130
|
+
# Get aggregate stats
|
|
1131
|
+
rows = query_destination_connection_stats(dest_def_id, days=active_within_days)
|
|
1132
|
+
|
|
1133
|
+
destinations.append(
|
|
1134
|
+
_build_connector_stats(
|
|
1135
|
+
connector_definition_id=dest_def_id,
|
|
1136
|
+
connector_type="destination",
|
|
1137
|
+
canonical_name=None,
|
|
1138
|
+
rows=rows,
|
|
1139
|
+
version_tags=version_tags,
|
|
1140
|
+
)
|
|
1141
|
+
)
|
|
1142
|
+
|
|
1143
|
+
return ConnectorConnectionStatsResponse(
|
|
1144
|
+
sources=sources,
|
|
1145
|
+
destinations=destinations,
|
|
1146
|
+
active_within_days=active_within_days,
|
|
1147
|
+
generated_at=datetime.now(timezone.utc),
|
|
1148
|
+
)
|
|
1149
|
+
|
|
1150
|
+
|
|
854
1151
|
def register_prod_db_query_tools(app: FastMCP) -> None:
|
|
855
1152
|
"""Register prod DB query tools with the FastMCP app."""
|
|
856
1153
|
register_mcp_tools(app, domain=__name__)
|