airbyte-internal-ops 0.3.1__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: airbyte-internal-ops
3
- Version: 0.3.1
3
+ Version: 0.4.0
4
4
  Summary: MCP and API interfaces that let the agents do the admin work
5
5
  Author-email: Aaron Steers <aj@airbyte.io>
6
6
  Keywords: admin,airbyte,api,mcp
@@ -379,18 +379,18 @@ airbyte_ops_mcp/mcp/gcp_logs.py,sha256=IPtq4098_LN1Cgeba4jATO1iYFFFpL2-aRO0pGcOd
379
379
  airbyte_ops_mcp/mcp/github.py,sha256=h3M3VJrq09y_F9ueQVCq3bUbVBNFuTNKprHtGU_ttio,8045
380
380
  airbyte_ops_mcp/mcp/github_repo_ops.py,sha256=PiERpt8abo20Gz4CfXhrDNlVM4o4FOt5sweZJND2a0s,5314
381
381
  airbyte_ops_mcp/mcp/metadata.py,sha256=fwGW97WknR5lfKcQnFtK6dU87aA6TmLj1NkKyqDAV9g,270
382
- airbyte_ops_mcp/mcp/prerelease.py,sha256=nc6VU03ADVHWM3OjGKxbS5XqY4VoyRyrZNU_fyAtaOI,10465
383
- airbyte_ops_mcp/mcp/prod_db_queries.py,sha256=DPzyHCT3yxj2kjkucefoVpsR71vscuJQ8tGgLs_lhv0,32068
382
+ airbyte_ops_mcp/mcp/prerelease.py,sha256=GI4p1rGDCLZ6QbEG57oD_M3_buIHwq9B0In6fbj7Ptk,11883
383
+ airbyte_ops_mcp/mcp/prod_db_queries.py,sha256=VsiBBnVbOjc8lBb2Xr1lmcH3wu7QHQfjd4lORarEE1s,42700
384
384
  airbyte_ops_mcp/mcp/prompts.py,sha256=mJld9mdPECXYZffWXGSvNs4Xevx3rxqUGNlzGKVC2_s,1599
385
385
  airbyte_ops_mcp/mcp/registry.py,sha256=PW-VYUj42qx2pQ_apUkVaoUFq7VgB9zEU7-aGrkSCCw,290
386
- airbyte_ops_mcp/mcp/regression_tests.py,sha256=S1h-5S5gcZA4WEtIZyAQ836hd04tjSRRqMiYMx0S93g,16079
386
+ airbyte_ops_mcp/mcp/regression_tests.py,sha256=VpXS36Ox2qPxtxnDhVoNfr83UfppWx8rMgCoDiKWzWg,16727
387
387
  airbyte_ops_mcp/mcp/server.py,sha256=lKAXxt4u4bz7dsKvAYFFHziMbun2pOnxYmrMtRxsZvM,5317
388
388
  airbyte_ops_mcp/mcp/server_info.py,sha256=Yi4B1auW64QZGBDas5mro_vwTjvrP785TFNSBP7GhRg,2361
389
389
  airbyte_ops_mcp/prod_db_access/__init__.py,sha256=5pxouMPY1beyWlB0UwPnbaLTKTHqU6X82rbbgKY2vYU,1069
390
390
  airbyte_ops_mcp/prod_db_access/db_engine.py,sha256=VUqEWZtharJUR-Cri_pMwtGh1C4Neu4s195mbEXlm-w,9190
391
391
  airbyte_ops_mcp/prod_db_access/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
392
- airbyte_ops_mcp/prod_db_access/queries.py,sha256=TNxTY5Hf3ImHBX0_e_20-VbF3yzYm2mX3ykWzQXgpno,17754
393
- airbyte_ops_mcp/prod_db_access/sql.py,sha256=xB7SJGnBSlY-ZB7ku_9QfvNIEldGEmCn-jJcAdes_LY,30407
392
+ airbyte_ops_mcp/prod_db_access/queries.py,sha256=pyW5GxDZ5ibwXawxyI_IR7VFcmoX7pZyZ2jdADqhJRY,20276
393
+ airbyte_ops_mcp/prod_db_access/sql.py,sha256=lzFOYfkb-rFTaZ6vrAK9G8Ym4KTUdhPMBzK44NSRzcg,35362
394
394
  airbyte_ops_mcp/registry/__init__.py,sha256=iEaPlt9GrnlaLbc__98TguNeZG8wuQu7S-_2QkhHcbA,858
395
395
  airbyte_ops_mcp/registry/models.py,sha256=B4L4TKr52wo0xs0CqvCBrpowqjShzVnZ5eTr2-EyhNs,2346
396
396
  airbyte_ops_mcp/registry/publish.py,sha256=VoPxsM2_0zJ829orzCRN-kjgcJtuBNyXgW4I9J680ro,12717
@@ -400,7 +400,7 @@ airbyte_ops_mcp/regression_tests/ci_output.py,sha256=rrvCVKKShc1iVPMuQJDBqSbsiAH
400
400
  airbyte_ops_mcp/regression_tests/config.py,sha256=dwWeY0tatdbwl9BqbhZ7EljoZDCtKmGO5fvOAIxeXmA,5873
401
401
  airbyte_ops_mcp/regression_tests/connection_fetcher.py,sha256=5wIiA0VvCFNEc-fr6Po18gZMX3E5fyPOGf2SuVOqv5U,12799
402
402
  airbyte_ops_mcp/regression_tests/connection_secret_retriever.py,sha256=FhWNVWq7sON4nwUmVJv8BgXBOqg1YV4b5WuWyCzZ0LU,4695
403
- airbyte_ops_mcp/regression_tests/connector_runner.py,sha256=bappfBSq8dn3IyVAMS_XuzYEwWus23hkDCHLa2RFysI,9920
403
+ airbyte_ops_mcp/regression_tests/connector_runner.py,sha256=OZzUa2aLh0sHaEARsDePOA-e3qEX4cvh3Jhnvi8S1rY,10130
404
404
  airbyte_ops_mcp/regression_tests/evaluation_modes.py,sha256=lAL6pEDmy_XCC7_m4_NXjt_f6Z8CXeAhMkc0FU8bm_M,1364
405
405
  airbyte_ops_mcp/regression_tests/http_metrics.py,sha256=oTD7f2MnQOvx4plOxHop2bInQ0-whvuToSsrC7TIM-M,12469
406
406
  airbyte_ops_mcp/regression_tests/models.py,sha256=brtAT9oO1TwjFcP91dFcu0XcUNqQb-jf7di1zkoVEuo,8782
@@ -414,7 +414,7 @@ airbyte_ops_mcp/regression_tests/regression/comparators.py,sha256=MJkLZEKHivgrG0
414
414
  airbyte_ops_mcp/regression_tests/validation/__init__.py,sha256=MBEwGOoNuqT4_oCahtoK62OKWIjUCfWa7vZTxNj_0Ek,1532
415
415
  airbyte_ops_mcp/regression_tests/validation/catalog_validators.py,sha256=jqqVAMOk0mtdPgwu4d0hA0ZEjtsNh5gapvGydRv3_qk,12553
416
416
  airbyte_ops_mcp/regression_tests/validation/record_validators.py,sha256=RjauAhKWNwxMBTu0eNS2hMFNQVs5CLbQU51kp6FOVDk,7432
417
- airbyte_internal_ops-0.3.1.dist-info/METADATA,sha256=kx1iQ0YE42LjpsFpjJD7SECaYMHEjo36VjvSVf3BwHk,5679
418
- airbyte_internal_ops-0.3.1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
419
- airbyte_internal_ops-0.3.1.dist-info/entry_points.txt,sha256=WxP0l7bRFss4Cr5uQqVj9mTEKwnRKouNuphXQF0lotA,171
420
- airbyte_internal_ops-0.3.1.dist-info/RECORD,,
417
+ airbyte_internal_ops-0.4.0.dist-info/METADATA,sha256=K9rJIUSobD2QWdHccHpKZooawgipH8ZozDqTl0FrG-8,5679
418
+ airbyte_internal_ops-0.4.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
419
+ airbyte_internal_ops-0.4.0.dist-info/entry_points.txt,sha256=WxP0l7bRFss4Cr5uQqVj9mTEKwnRKouNuphXQF0lotA,171
420
+ airbyte_internal_ops-0.4.0.dist-info/RECORD,,
@@ -2,12 +2,15 @@
2
2
  """MCP tools for triggering connector pre-release workflows.
3
3
 
4
4
  This module provides MCP tools for triggering the publish-connectors-prerelease
5
- workflow in the airbytehq/airbyte repository via GitHub's workflow dispatch API.
5
+ workflow in the airbytehq/airbyte repository (for OSS connectors) or the
6
+ publish_enterprise_connectors workflow in airbytehq/airbyte-enterprise
7
+ (for enterprise connectors) via GitHub's workflow dispatch API.
6
8
  """
7
9
 
8
10
  from __future__ import annotations
9
11
 
10
12
  import base64
13
+ from enum import StrEnum
11
14
  from typing import Annotated, Literal
12
15
 
13
16
  import requests
@@ -18,12 +21,25 @@ from pydantic import BaseModel, Field
18
21
  from airbyte_ops_mcp.github_actions import GITHUB_API_BASE, resolve_github_token
19
22
  from airbyte_ops_mcp.mcp._mcp_utils import mcp_tool, register_mcp_tools
20
23
 
24
+
25
+ class ConnectorRepo(StrEnum):
26
+ """Repository where connector code is located."""
27
+
28
+ AIRBYTE = "airbyte"
29
+ AIRBYTE_ENTERPRISE = "airbyte-enterprise"
30
+
31
+
21
32
  DEFAULT_REPO_OWNER = "airbytehq"
22
- DEFAULT_REPO_NAME = "airbyte"
33
+ DEFAULT_REPO_NAME = ConnectorRepo.AIRBYTE
23
34
  DEFAULT_BRANCH = "master"
24
35
  PRERELEASE_WORKFLOW_FILE = "publish-connectors-prerelease-command.yml"
25
36
  CONNECTOR_PATH_PREFIX = "airbyte-integrations/connectors"
26
37
 
38
+ # Enterprise repository constants
39
+ ENTERPRISE_REPO_NAME = ConnectorRepo.AIRBYTE_ENTERPRISE
40
+ ENTERPRISE_DEFAULT_BRANCH = "main"
41
+ ENTERPRISE_PRERELEASE_WORKFLOW_FILE = "publish_enterprise_connectors.yml"
42
+
27
43
  # Token env vars for prerelease publishing (in order of preference)
28
44
  PRERELEASE_TOKEN_ENV_VARS = [
29
45
  "GITHUB_CONNECTOR_PUBLISHING_PAT",
@@ -238,6 +254,14 @@ def publish_connector_to_airbyte_registry(
238
254
  int,
239
255
  Field(description="The pull request number containing the connector changes"),
240
256
  ],
257
+ repo: Annotated[
258
+ ConnectorRepo,
259
+ Field(
260
+ default=ConnectorRepo.AIRBYTE,
261
+ description="Repository where the connector PR is located. "
262
+ "Use 'airbyte' for OSS connectors (default) or 'airbyte-enterprise' for enterprise connectors.",
263
+ ),
264
+ ],
241
265
  prerelease: Annotated[
242
266
  Literal[True],
243
267
  Field(
@@ -249,8 +273,10 @@ def publish_connector_to_airbyte_registry(
249
273
  """Publish a connector to the Airbyte registry.
250
274
 
251
275
  Currently only supports pre-release publishing. This tool triggers the
252
- publish-connectors-prerelease workflow in the airbytehq/airbyte repository,
253
- which publishes a pre-release version of the specified connector from the PR branch.
276
+ publish-connectors-prerelease workflow in the airbytehq/airbyte repository
277
+ (for OSS connectors) or the publish_enterprise_connectors workflow in
278
+ airbytehq/airbyte-enterprise (for enterprise connectors), which publishes
279
+ a pre-release version of the specified connector from the PR branch.
254
280
 
255
281
  Pre-release versions are tagged with the format: {version}-preview.{7-char-git-sha}
256
282
  These versions are available for version pinning via the scoped_configuration API.
@@ -267,17 +293,27 @@ def publish_connector_to_airbyte_registry(
267
293
  # Guard: Check for required token
268
294
  token = resolve_github_token(PRERELEASE_TOKEN_ENV_VARS)
269
295
 
296
+ # Determine repo-specific settings
297
+ is_enterprise = repo == ConnectorRepo.AIRBYTE_ENTERPRISE
298
+ target_repo_name = ENTERPRISE_REPO_NAME if is_enterprise else DEFAULT_REPO_NAME
299
+ target_branch = ENTERPRISE_DEFAULT_BRANCH if is_enterprise else DEFAULT_BRANCH
300
+ target_workflow = (
301
+ ENTERPRISE_PRERELEASE_WORKFLOW_FILE
302
+ if is_enterprise
303
+ else PRERELEASE_WORKFLOW_FILE
304
+ )
305
+
270
306
  # Get the PR's head SHA for computing the docker image tag
271
307
  # Note: We no longer pass gitref to the workflow - it derives the ref from PR number
272
308
  head_info = _get_pr_head_info(
273
- DEFAULT_REPO_OWNER, DEFAULT_REPO_NAME, pr_number, token
309
+ DEFAULT_REPO_OWNER, target_repo_name, pr_number, token
274
310
  )
275
311
 
276
312
  # Prepare workflow inputs
277
313
  # The workflow uses refs/pull/{pr}/head directly - no gitref needed
278
314
  # Note: The workflow auto-detects modified connectors from the PR
279
315
  workflow_inputs = {
280
- "repo": f"{DEFAULT_REPO_OWNER}/{DEFAULT_REPO_NAME}",
316
+ "repo": f"{DEFAULT_REPO_OWNER}/{target_repo_name}",
281
317
  "pr": str(pr_number),
282
318
  }
283
319
 
@@ -285,9 +321,9 @@ def publish_connector_to_airbyte_registry(
285
321
  # The workflow will checkout the PR branch via inputs.gitref
286
322
  workflow_url = _trigger_workflow_dispatch(
287
323
  owner=DEFAULT_REPO_OWNER,
288
- repo=DEFAULT_REPO_NAME,
289
- workflow_file=PRERELEASE_WORKFLOW_FILE,
290
- ref=DEFAULT_BRANCH,
324
+ repo=target_repo_name,
325
+ workflow_file=target_workflow,
326
+ ref=target_branch,
291
327
  inputs=workflow_inputs,
292
328
  token=token,
293
329
  )
@@ -297,7 +333,7 @@ def publish_connector_to_airbyte_registry(
297
333
  docker_image_tag: str | None = None
298
334
  metadata = _get_connector_metadata(
299
335
  DEFAULT_REPO_OWNER,
300
- DEFAULT_REPO_NAME,
336
+ target_repo_name,
301
337
  connector_name,
302
338
  head_info.sha,
303
339
  token,
@@ -311,9 +347,10 @@ def publish_connector_to_airbyte_registry(
311
347
  base_version, head_info.sha
312
348
  )
313
349
 
350
+ repo_info = f" from {repo}" if is_enterprise else ""
314
351
  return PrereleaseWorkflowResult(
315
352
  success=True,
316
- message=f"Successfully triggered pre-release workflow for {connector_name} from PR #{pr_number}",
353
+ message=f"Successfully triggered pre-release workflow for {connector_name}{repo_info} from PR #{pr_number}",
317
354
  workflow_url=workflow_url,
318
355
  connector_name=connector_name,
319
356
  pr_number=pr_number,
@@ -7,7 +7,7 @@ airbyte_ops_mcp.prod_db_access.queries for use by AI agents.
7
7
 
8
8
  from __future__ import annotations
9
9
 
10
- from datetime import datetime
10
+ from datetime import datetime, timezone
11
11
  from enum import StrEnum
12
12
  from typing import Annotated, Any
13
13
 
@@ -24,9 +24,11 @@ from airbyte_ops_mcp.prod_db_access.queries import (
24
24
  query_connections_by_destination_connector,
25
25
  query_connector_versions,
26
26
  query_dataplanes_list,
27
+ query_destination_connection_stats,
27
28
  query_failed_sync_attempts_for_connector,
28
29
  query_new_connector_releases,
29
30
  query_recent_syncs_for_connector,
31
+ query_source_connection_stats,
30
32
  query_syncs_for_version_pinned_connector,
31
33
  query_workspace_info,
32
34
  query_workspaces_by_email_domain,
@@ -90,6 +92,94 @@ class WorkspacesByEmailDomainResult(BaseModel):
90
92
  )
91
93
 
92
94
 
95
+ class LatestAttemptBreakdown(BaseModel):
96
+ """Breakdown of connections by latest attempt status."""
97
+
98
+ succeeded: int = Field(
99
+ default=0, description="Connections where latest attempt succeeded"
100
+ )
101
+ failed: int = Field(
102
+ default=0, description="Connections where latest attempt failed"
103
+ )
104
+ cancelled: int = Field(
105
+ default=0, description="Connections where latest attempt was cancelled"
106
+ )
107
+ running: int = Field(
108
+ default=0, description="Connections where latest attempt is still running"
109
+ )
110
+ unknown: int = Field(
111
+ default=0,
112
+ description="Connections with no recent attempts in the lookback window",
113
+ )
114
+
115
+
116
+ class VersionPinStats(BaseModel):
117
+ """Stats for connections pinned to a specific version."""
118
+
119
+ pinned_version_id: str | None = Field(
120
+ description="The connector version UUID (None for unpinned connections)"
121
+ )
122
+ docker_image_tag: str | None = Field(
123
+ default=None, description="The docker image tag for this version"
124
+ )
125
+ total_connections: int = Field(description="Total number of connections")
126
+ enabled_connections: int = Field(
127
+ description="Number of enabled (active status) connections"
128
+ )
129
+ active_connections: int = Field(
130
+ description="Number of connections with recent sync activity"
131
+ )
132
+ latest_attempt: LatestAttemptBreakdown = Field(
133
+ description="Breakdown by latest attempt status"
134
+ )
135
+
136
+
137
+ class ConnectorConnectionStats(BaseModel):
138
+ """Aggregate connection stats for a connector."""
139
+
140
+ connector_definition_id: str = Field(description="The connector definition UUID")
141
+ connector_type: str = Field(description="'source' or 'destination'")
142
+ canonical_name: str | None = Field(
143
+ default=None, description="The canonical connector name if resolved"
144
+ )
145
+ total_connections: int = Field(
146
+ description="Total number of non-deprecated connections"
147
+ )
148
+ enabled_connections: int = Field(
149
+ description="Number of enabled (active status) connections"
150
+ )
151
+ active_connections: int = Field(
152
+ description="Number of connections with recent sync activity"
153
+ )
154
+ pinned_connections: int = Field(
155
+ description="Number of connections with explicit version pins"
156
+ )
157
+ unpinned_connections: int = Field(
158
+ description="Number of connections on default version"
159
+ )
160
+ latest_attempt: LatestAttemptBreakdown = Field(
161
+ description="Overall breakdown by latest attempt status"
162
+ )
163
+ by_version: list[VersionPinStats] = Field(
164
+ description="Stats broken down by pinned version"
165
+ )
166
+
167
+
168
+ class ConnectorConnectionStatsResponse(BaseModel):
169
+ """Response containing connection stats for multiple connectors."""
170
+
171
+ sources: list[ConnectorConnectionStats] = Field(
172
+ default_factory=list, description="Stats for source connectors"
173
+ )
174
+ destinations: list[ConnectorConnectionStats] = Field(
175
+ default_factory=list, description="Stats for destination connectors"
176
+ )
177
+ active_within_days: int = Field(
178
+ description="Lookback window used for 'active' connections"
179
+ )
180
+ generated_at: datetime = Field(description="When this response was generated")
181
+
182
+
93
183
  # Cloud registry URL for resolving canonical names
94
184
  CLOUD_REGISTRY_URL = (
95
185
  "https://connectors.airbyte.com/files/registries/v0/cloud_registry.json"
@@ -851,6 +941,213 @@ def query_prod_workspaces_by_email_domain(
851
941
  )
852
942
 
853
943
 
944
+ def _build_connector_stats(
945
+ connector_definition_id: str,
946
+ connector_type: str,
947
+ canonical_name: str | None,
948
+ rows: list[dict[str, Any]],
949
+ version_tags: dict[str, str | None],
950
+ ) -> ConnectorConnectionStats:
951
+ """Build ConnectorConnectionStats from query result rows."""
952
+ # Aggregate totals across all version groups
953
+ total_connections = 0
954
+ enabled_connections = 0
955
+ active_connections = 0
956
+ pinned_connections = 0
957
+ unpinned_connections = 0
958
+ total_succeeded = 0
959
+ total_failed = 0
960
+ total_cancelled = 0
961
+ total_running = 0
962
+ total_unknown = 0
963
+
964
+ by_version: list[VersionPinStats] = []
965
+
966
+ for row in rows:
967
+ version_id = row.get("pinned_version_id")
968
+ row_total = int(row.get("total_connections", 0))
969
+ row_enabled = int(row.get("enabled_connections", 0))
970
+ row_active = int(row.get("active_connections", 0))
971
+ row_pinned = int(row.get("pinned_connections", 0))
972
+ row_unpinned = int(row.get("unpinned_connections", 0))
973
+ row_succeeded = int(row.get("succeeded_connections", 0))
974
+ row_failed = int(row.get("failed_connections", 0))
975
+ row_cancelled = int(row.get("cancelled_connections", 0))
976
+ row_running = int(row.get("running_connections", 0))
977
+ row_unknown = int(row.get("unknown_connections", 0))
978
+
979
+ total_connections += row_total
980
+ enabled_connections += row_enabled
981
+ active_connections += row_active
982
+ pinned_connections += row_pinned
983
+ unpinned_connections += row_unpinned
984
+ total_succeeded += row_succeeded
985
+ total_failed += row_failed
986
+ total_cancelled += row_cancelled
987
+ total_running += row_running
988
+ total_unknown += row_unknown
989
+
990
+ by_version.append(
991
+ VersionPinStats(
992
+ pinned_version_id=str(version_id) if version_id else None,
993
+ docker_image_tag=version_tags.get(str(version_id))
994
+ if version_id
995
+ else None,
996
+ total_connections=row_total,
997
+ enabled_connections=row_enabled,
998
+ active_connections=row_active,
999
+ latest_attempt=LatestAttemptBreakdown(
1000
+ succeeded=row_succeeded,
1001
+ failed=row_failed,
1002
+ cancelled=row_cancelled,
1003
+ running=row_running,
1004
+ unknown=row_unknown,
1005
+ ),
1006
+ )
1007
+ )
1008
+
1009
+ return ConnectorConnectionStats(
1010
+ connector_definition_id=connector_definition_id,
1011
+ connector_type=connector_type,
1012
+ canonical_name=canonical_name,
1013
+ total_connections=total_connections,
1014
+ enabled_connections=enabled_connections,
1015
+ active_connections=active_connections,
1016
+ pinned_connections=pinned_connections,
1017
+ unpinned_connections=unpinned_connections,
1018
+ latest_attempt=LatestAttemptBreakdown(
1019
+ succeeded=total_succeeded,
1020
+ failed=total_failed,
1021
+ cancelled=total_cancelled,
1022
+ running=total_running,
1023
+ unknown=total_unknown,
1024
+ ),
1025
+ by_version=by_version,
1026
+ )
1027
+
1028
+
1029
+ @mcp_tool(
1030
+ read_only=True,
1031
+ idempotent=True,
1032
+ open_world=True,
1033
+ )
1034
+ def query_prod_connector_connection_stats(
1035
+ source_definition_ids: Annotated[
1036
+ list[str] | None,
1037
+ Field(
1038
+ description=(
1039
+ "List of source connector definition IDs (UUIDs) to get stats for. "
1040
+ "Example: ['afa734e4-3571-11ec-991a-1e0031268139']"
1041
+ ),
1042
+ default=None,
1043
+ ),
1044
+ ] = None,
1045
+ destination_definition_ids: Annotated[
1046
+ list[str] | None,
1047
+ Field(
1048
+ description=(
1049
+ "List of destination connector definition IDs (UUIDs) to get stats for. "
1050
+ "Example: ['94bd199c-2ff0-4aa2-b98e-17f0acb72610']"
1051
+ ),
1052
+ default=None,
1053
+ ),
1054
+ ] = None,
1055
+ active_within_days: Annotated[
1056
+ int,
1057
+ Field(
1058
+ description=(
1059
+ "Number of days to look back for 'active' connections (default: 7). "
1060
+ "Connections with sync activity within this window are counted as active."
1061
+ ),
1062
+ default=7,
1063
+ ),
1064
+ ] = 7,
1065
+ ) -> ConnectorConnectionStatsResponse:
1066
+ """Get aggregate connection stats for multiple connectors.
1067
+
1068
+ Returns counts of connections grouped by pinned version for each connector,
1069
+ including:
1070
+ - Total, enabled, and active connection counts
1071
+ - Pinned vs unpinned breakdown
1072
+ - Latest attempt status breakdown (succeeded, failed, cancelled, running, unknown)
1073
+
1074
+ This tool is designed for release monitoring workflows. It allows you to:
1075
+ 1. Query recently released connectors to identify which ones to monitor
1076
+ 2. Get aggregate stats showing how many connections are using each version
1077
+ 3. See health metrics (pass/fail) broken down by version
1078
+
1079
+ The 'active_within_days' parameter controls the lookback window for:
1080
+ - Counting 'active' connections (those with recent sync activity)
1081
+ - Determining 'latest attempt status' (most recent attempt within the window)
1082
+
1083
+ Connections with no sync activity in the lookback window will have
1084
+ 'unknown' status in the latest_attempt breakdown.
1085
+ """
1086
+ # Initialize empty lists if None
1087
+ source_ids = source_definition_ids or []
1088
+ destination_ids = destination_definition_ids or []
1089
+
1090
+ if not source_ids and not destination_ids:
1091
+ raise PyAirbyteInputError(
1092
+ message=(
1093
+ "At least one of source_definition_ids or destination_definition_ids "
1094
+ "must be provided."
1095
+ ),
1096
+ )
1097
+
1098
+ sources: list[ConnectorConnectionStats] = []
1099
+ destinations: list[ConnectorConnectionStats] = []
1100
+
1101
+ # Process source connectors
1102
+ for source_def_id in source_ids:
1103
+ # Get version info for tag lookup
1104
+ versions = query_connector_versions(source_def_id)
1105
+ version_tags = {
1106
+ str(v["version_id"]): v.get("docker_image_tag") for v in versions
1107
+ }
1108
+
1109
+ # Get aggregate stats
1110
+ rows = query_source_connection_stats(source_def_id, days=active_within_days)
1111
+
1112
+ sources.append(
1113
+ _build_connector_stats(
1114
+ connector_definition_id=source_def_id,
1115
+ connector_type="source",
1116
+ canonical_name=None,
1117
+ rows=rows,
1118
+ version_tags=version_tags,
1119
+ )
1120
+ )
1121
+
1122
+ # Process destination connectors
1123
+ for dest_def_id in destination_ids:
1124
+ # Get version info for tag lookup
1125
+ versions = query_connector_versions(dest_def_id)
1126
+ version_tags = {
1127
+ str(v["version_id"]): v.get("docker_image_tag") for v in versions
1128
+ }
1129
+
1130
+ # Get aggregate stats
1131
+ rows = query_destination_connection_stats(dest_def_id, days=active_within_days)
1132
+
1133
+ destinations.append(
1134
+ _build_connector_stats(
1135
+ connector_definition_id=dest_def_id,
1136
+ connector_type="destination",
1137
+ canonical_name=None,
1138
+ rows=rows,
1139
+ version_tags=version_tags,
1140
+ )
1141
+ )
1142
+
1143
+ return ConnectorConnectionStatsResponse(
1144
+ sources=sources,
1145
+ destinations=destinations,
1146
+ active_within_days=active_within_days,
1147
+ generated_at=datetime.now(timezone.utc),
1148
+ )
1149
+
1150
+
854
1151
  def register_prod_db_query_tools(app: FastMCP) -> None:
855
1152
  """Register prod DB query tools with the FastMCP app."""
856
1153
  register_mcp_tools(app, domain=__name__)
@@ -37,6 +37,7 @@ from airbyte_ops_mcp.github_actions import (
37
37
  trigger_workflow_dispatch,
38
38
  )
39
39
  from airbyte_ops_mcp.mcp._mcp_utils import mcp_tool, register_mcp_tools
40
+ from airbyte_ops_mcp.mcp.prerelease import ConnectorRepo
40
41
 
41
42
  logger = logging.getLogger(__name__)
42
43
 
@@ -309,7 +310,13 @@ def run_regression_tests(
309
310
  ],
310
311
  pr: Annotated[
311
312
  int,
312
- "PR number from the airbyte monorepo to checkout and build from (e.g., 70847). Required.",
313
+ "PR number to checkout and build from (e.g., 70847). Required. "
314
+ "The PR must be from the repository specified by the 'repo' parameter.",
315
+ ],
316
+ repo: Annotated[
317
+ ConnectorRepo,
318
+ "Repository where the connector PR is located. "
319
+ "Use 'airbyte' for OSS connectors (default) or 'airbyte-enterprise' for enterprise connectors.",
313
320
  ],
314
321
  connection_id: Annotated[
315
322
  str | None,
@@ -347,6 +354,10 @@ def run_regression_tests(
347
354
  This tool triggers the regression test workflow which builds the connector
348
355
  from the specified PR and runs tests against it.
349
356
 
357
+ Supports both OSS connectors (from airbytehq/airbyte) and enterprise connectors
358
+ (from airbytehq/airbyte-enterprise). Use the 'repo' parameter to specify which
359
+ repository contains the connector PR.
360
+
350
361
  - skip_compare=False (default): Comparison mode - compares the PR version
351
362
  against the baseline (control) version.
352
363
  - skip_compare=True: Single-version mode - runs tests without comparison.
@@ -390,10 +401,11 @@ def run_regression_tests(
390
401
  workflow_url=None,
391
402
  )
392
403
 
393
- # Build workflow inputs - connector_name and pr are required
404
+ # Build workflow inputs - connector_name, pr, and repo are required
394
405
  workflow_inputs: dict[str, str] = {
395
406
  "connector_name": connector_name,
396
407
  "pr": str(pr),
408
+ "repo": repo,
397
409
  }
398
410
 
399
411
  # Add optional inputs
@@ -431,12 +443,13 @@ def run_regression_tests(
431
443
 
432
444
  view_url = dispatch_result.run_url or dispatch_result.workflow_url
433
445
  connection_info = f" for connection {connection_id}" if connection_id else ""
446
+ repo_info = f" from {repo}" if repo != ConnectorRepo.AIRBYTE else ""
434
447
  return RunRegressionTestsResponse(
435
448
  run_id=run_id,
436
449
  status=TestRunStatus.QUEUED,
437
450
  message=(
438
451
  f"{mode_description.capitalize()} regression test workflow triggered "
439
- f"for {connector_name} (PR #{pr}){connection_info}. View progress at: {view_url}"
452
+ f"for {connector_name} (PR #{pr}{repo_info}){connection_info}. View progress at: {view_url}"
440
453
  ),
441
454
  workflow_url=dispatch_result.workflow_url,
442
455
  github_run_id=dispatch_result.run_id,
@@ -26,6 +26,7 @@ from airbyte_ops_mcp.prod_db_access.sql import (
26
26
  SELECT_CONNECTIONS_BY_DESTINATION_CONNECTOR_AND_ORG,
27
27
  SELECT_CONNECTOR_VERSIONS,
28
28
  SELECT_DATAPLANES_LIST,
29
+ SELECT_DESTINATION_CONNECTION_STATS,
29
30
  SELECT_FAILED_SYNC_ATTEMPTS_FOR_CONNECTOR,
30
31
  SELECT_NEW_CONNECTOR_RELEASES,
31
32
  SELECT_ORG_WORKSPACES,
@@ -35,6 +36,7 @@ from airbyte_ops_mcp.prod_db_access.sql import (
35
36
  SELECT_RECENT_SUCCESSFUL_SYNCS_FOR_SOURCE_CONNECTOR,
36
37
  SELECT_RECENT_SYNCS_FOR_DESTINATION_CONNECTOR,
37
38
  SELECT_RECENT_SYNCS_FOR_SOURCE_CONNECTOR,
39
+ SELECT_SOURCE_CONNECTION_STATS,
38
40
  SELECT_SUCCESSFUL_SYNCS_FOR_VERSION,
39
41
  SELECT_SYNC_RESULTS_FOR_VERSION,
40
42
  SELECT_WORKSPACE_INFO,
@@ -497,3 +499,69 @@ def query_workspaces_by_email_domain(
497
499
  query_name="SELECT_WORKSPACES_BY_EMAIL_DOMAIN",
498
500
  gsm_client=gsm_client,
499
501
  )
502
+
503
+
504
+ def query_source_connection_stats(
505
+ connector_definition_id: str,
506
+ days: int = 7,
507
+ *,
508
+ gsm_client: secretmanager.SecretManagerServiceClient | None = None,
509
+ ) -> list[dict[str, Any]]:
510
+ """Query aggregate connection stats for a SOURCE connector.
511
+
512
+ Returns counts of connections grouped by pinned version, including:
513
+ - Total, enabled, and active connection counts
514
+ - Pinned vs unpinned breakdown
515
+ - Latest attempt status breakdown (succeeded, failed, cancelled, running, unknown)
516
+
517
+ Args:
518
+ connector_definition_id: Source connector definition UUID
519
+ days: Number of days to look back for "active" connections (default: 7)
520
+ gsm_client: GCP Secret Manager client. If None, a new client will be instantiated.
521
+
522
+ Returns:
523
+ List of dicts with aggregate counts grouped by pinned_version_id
524
+ """
525
+ cutoff_date = datetime.now(timezone.utc) - timedelta(days=days)
526
+ return _run_sql_query(
527
+ SELECT_SOURCE_CONNECTION_STATS,
528
+ parameters={
529
+ "connector_definition_id": connector_definition_id,
530
+ "cutoff_date": cutoff_date,
531
+ },
532
+ query_name="SELECT_SOURCE_CONNECTION_STATS",
533
+ gsm_client=gsm_client,
534
+ )
535
+
536
+
537
+ def query_destination_connection_stats(
538
+ connector_definition_id: str,
539
+ days: int = 7,
540
+ *,
541
+ gsm_client: secretmanager.SecretManagerServiceClient | None = None,
542
+ ) -> list[dict[str, Any]]:
543
+ """Query aggregate connection stats for a DESTINATION connector.
544
+
545
+ Returns counts of connections grouped by pinned version, including:
546
+ - Total, enabled, and active connection counts
547
+ - Pinned vs unpinned breakdown
548
+ - Latest attempt status breakdown (succeeded, failed, cancelled, running, unknown)
549
+
550
+ Args:
551
+ connector_definition_id: Destination connector definition UUID
552
+ days: Number of days to look back for "active" connections (default: 7)
553
+ gsm_client: GCP Secret Manager client. If None, a new client will be instantiated.
554
+
555
+ Returns:
556
+ List of dicts with aggregate counts grouped by pinned_version_id
557
+ """
558
+ cutoff_date = datetime.now(timezone.utc) - timedelta(days=days)
559
+ return _run_sql_query(
560
+ SELECT_DESTINATION_CONNECTION_STATS,
561
+ parameters={
562
+ "connector_definition_id": connector_definition_id,
563
+ "cutoff_date": cutoff_date,
564
+ },
565
+ query_name="SELECT_DESTINATION_CONNECTION_STATS",
566
+ gsm_client=gsm_client,
567
+ )
@@ -826,3 +826,111 @@ SELECT_WORKSPACES_BY_EMAIL_DOMAIN = sqlalchemy.text(
826
826
  LIMIT :limit
827
827
  """
828
828
  )
829
+
830
+ # =============================================================================
831
+ # Connector Connection Stats Queries (Aggregate Counts)
832
+ # =============================================================================
833
+
834
+ # Count connections by SOURCE connector with latest attempt status breakdown
835
+ # Groups by pinned version and provides counts of succeeded/failed/other attempts
836
+ # Uses a CTE to get the latest attempt per connection, then aggregates
837
+ SELECT_SOURCE_CONNECTION_STATS = sqlalchemy.text(
838
+ """
839
+ WITH latest_attempts AS (
840
+ SELECT DISTINCT ON (connection.id)
841
+ connection.id AS connection_id,
842
+ connection.status AS connection_status,
843
+ scoped_configuration.value AS pinned_version_id,
844
+ attempts.status::text AS latest_attempt_status
845
+ FROM connection
846
+ JOIN actor
847
+ ON connection.source_id = actor.id
848
+ AND actor.actor_definition_id = :connector_definition_id
849
+ AND actor.tombstone = false
850
+ JOIN workspace
851
+ ON actor.workspace_id = workspace.id
852
+ AND workspace.tombstone = false
853
+ LEFT JOIN scoped_configuration
854
+ ON scoped_configuration.scope_id = actor.id
855
+ AND scoped_configuration.key = 'connector_version'
856
+ AND scoped_configuration.scope_type = 'actor'
857
+ LEFT JOIN jobs
858
+ ON jobs.scope = connection.id::text
859
+ AND jobs.config_type = 'sync'
860
+ AND jobs.updated_at >= :cutoff_date
861
+ LEFT JOIN attempts
862
+ ON attempts.job_id = jobs.id
863
+ WHERE
864
+ connection.status != 'deprecated'
865
+ ORDER BY
866
+ connection.id,
867
+ attempts.ended_at DESC NULLS LAST
868
+ )
869
+ SELECT
870
+ pinned_version_id,
871
+ COUNT(*) AS total_connections,
872
+ COUNT(*) FILTER (WHERE connection_status = 'active') AS enabled_connections,
873
+ COUNT(*) FILTER (WHERE latest_attempt_status IS NOT NULL) AS active_connections,
874
+ COUNT(*) FILTER (WHERE pinned_version_id IS NOT NULL) AS pinned_connections,
875
+ COUNT(*) FILTER (WHERE pinned_version_id IS NULL) AS unpinned_connections,
876
+ COUNT(*) FILTER (WHERE latest_attempt_status = 'succeeded') AS succeeded_connections,
877
+ COUNT(*) FILTER (WHERE latest_attempt_status = 'failed') AS failed_connections,
878
+ COUNT(*) FILTER (WHERE latest_attempt_status = 'cancelled') AS cancelled_connections,
879
+ COUNT(*) FILTER (WHERE latest_attempt_status = 'running') AS running_connections,
880
+ COUNT(*) FILTER (WHERE latest_attempt_status IS NULL) AS unknown_connections
881
+ FROM latest_attempts
882
+ GROUP BY pinned_version_id
883
+ ORDER BY total_connections DESC
884
+ """
885
+ )
886
+
887
+ # Count connections by DESTINATION connector with latest attempt status breakdown
888
+ SELECT_DESTINATION_CONNECTION_STATS = sqlalchemy.text(
889
+ """
890
+ WITH latest_attempts AS (
891
+ SELECT DISTINCT ON (connection.id)
892
+ connection.id AS connection_id,
893
+ connection.status AS connection_status,
894
+ scoped_configuration.value AS pinned_version_id,
895
+ attempts.status::text AS latest_attempt_status
896
+ FROM connection
897
+ JOIN actor
898
+ ON connection.destination_id = actor.id
899
+ AND actor.actor_definition_id = :connector_definition_id
900
+ AND actor.tombstone = false
901
+ JOIN workspace
902
+ ON actor.workspace_id = workspace.id
903
+ AND workspace.tombstone = false
904
+ LEFT JOIN scoped_configuration
905
+ ON scoped_configuration.scope_id = actor.id
906
+ AND scoped_configuration.key = 'connector_version'
907
+ AND scoped_configuration.scope_type = 'actor'
908
+ LEFT JOIN jobs
909
+ ON jobs.scope = connection.id::text
910
+ AND jobs.config_type = 'sync'
911
+ AND jobs.updated_at >= :cutoff_date
912
+ LEFT JOIN attempts
913
+ ON attempts.job_id = jobs.id
914
+ WHERE
915
+ connection.status != 'deprecated'
916
+ ORDER BY
917
+ connection.id,
918
+ attempts.ended_at DESC NULLS LAST
919
+ )
920
+ SELECT
921
+ pinned_version_id,
922
+ COUNT(*) AS total_connections,
923
+ COUNT(*) FILTER (WHERE connection_status = 'active') AS enabled_connections,
924
+ COUNT(*) FILTER (WHERE latest_attempt_status IS NOT NULL) AS active_connections,
925
+ COUNT(*) FILTER (WHERE pinned_version_id IS NOT NULL) AS pinned_connections,
926
+ COUNT(*) FILTER (WHERE pinned_version_id IS NULL) AS unpinned_connections,
927
+ COUNT(*) FILTER (WHERE latest_attempt_status = 'succeeded') AS succeeded_connections,
928
+ COUNT(*) FILTER (WHERE latest_attempt_status = 'failed') AS failed_connections,
929
+ COUNT(*) FILTER (WHERE latest_attempt_status = 'cancelled') AS cancelled_connections,
930
+ COUNT(*) FILTER (WHERE latest_attempt_status = 'running') AS running_connections,
931
+ COUNT(*) FILTER (WHERE latest_attempt_status IS NULL) AS unknown_connections
932
+ FROM latest_attempts
933
+ GROUP BY pinned_version_id
934
+ ORDER BY total_connections DESC
935
+ """
936
+ )
@@ -105,16 +105,19 @@ class ConnectorRunner:
105
105
  if self.config is not None:
106
106
  config_path = temp_dir / self.CONFIG_FILE
107
107
  config_path.write_text(json.dumps(self.config))
108
+ config_path.chmod(0o666)
108
109
  self.logger.debug(f"Wrote config to {config_path}")
109
110
 
110
111
  if self.configured_catalog is not None:
111
112
  catalog_path = temp_dir / self.CATALOG_FILE
112
113
  catalog_path.write_text(self.configured_catalog.json())
114
+ catalog_path.chmod(0o666)
113
115
  self.logger.debug(f"Wrote catalog to {catalog_path}")
114
116
 
115
117
  if self.state is not None:
116
118
  state_path = temp_dir / self.STATE_FILE
117
119
  state_path.write_text(json.dumps(self.state))
120
+ state_path.chmod(0o666)
118
121
  self.logger.debug(f"Wrote state to {state_path}")
119
122
 
120
123
  def _build_docker_command(self, temp_dir: Path) -> list[str]:
@@ -135,7 +138,7 @@ class ConnectorRunner:
135
138
  "--name",
136
139
  container_name,
137
140
  "-v",
138
- f"{temp_dir}:{self.DATA_DIR}:ro",
141
+ f"{temp_dir}:{self.DATA_DIR}",
139
142
  ]
140
143
 
141
144
  if self.proxy_url:
@@ -168,9 +171,10 @@ class ConnectorRunner:
168
171
 
169
172
  with tempfile.TemporaryDirectory() as temp_dir:
170
173
  temp_path = Path(temp_dir)
171
- # Make temp directory world-readable so non-root container users can access it
172
- # Many connector images run as non-root users (e.g., 'airbyte' user)
173
- temp_path.chmod(0o755)
174
+ # Make temp directory world-writable so non-root container users can read/write
175
+ # Many connector images run as non-root users (e.g., 'airbyte' user) with
176
+ # different UIDs than the host user, so they need write access for config migration
177
+ temp_path.chmod(0o777)
174
178
  self._prepare_data_directory(temp_path)
175
179
 
176
180
  docker_cmd = self._build_docker_command(temp_path)