airbyte-internal-ops 0.3.0__py3-none-any.whl → 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: airbyte-internal-ops
3
- Version: 0.3.0
3
+ Version: 0.3.1
4
4
  Summary: MCP and API interfaces that let the agents do the admin work
5
5
  Author-email: Aaron Steers <aj@airbyte.io>
6
6
  Keywords: admin,airbyte,api,mcp
@@ -1,7 +1,7 @@
1
1
  airbyte_ops_mcp/__init__.py,sha256=tuzdlMkfnWBnsri5KGHM2M_xuNnzFk2u_aR79mmN7Yg,772
2
2
  airbyte_ops_mcp/_annotations.py,sha256=MO-SBDnbykxxHDESG7d8rviZZ4WlZgJKv0a8eBqcEzQ,1757
3
3
  airbyte_ops_mcp/constants.py,sha256=khcv9W3WkApIyPygEGgE2noBIqLomjoOMLxFBU1ArjA,5308
4
- airbyte_ops_mcp/gcp_auth.py,sha256=5k-k145ZoYhHLjyDES8nrA8f8BBihRI0ykrdD1IcfOs,3599
4
+ airbyte_ops_mcp/gcp_auth.py,sha256=i0cm1_xX4fj_31iKlfARpNvTaSr85iGTSw9KMf4f4MU,7206
5
5
  airbyte_ops_mcp/github_actions.py,sha256=wKnuIVmF4u1gMYNdSoryD_PUmvMz5SaHgOvbU0dsolA,9957
6
6
  airbyte_ops_mcp/github_api.py,sha256=uupbYKAkm7yLHK_1cDXYKl1bOYhUygZhG5IHspS7duE,8104
7
7
  airbyte_ops_mcp/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -363,11 +363,11 @@ airbyte_ops_mcp/cloud_admin/auth.py,sha256=qE2Aqe0qbZB755KscL65s54Jz78-F-X5a8fXK
363
363
  airbyte_ops_mcp/cloud_admin/connection_config.py,sha256=9opGQer-cGMJANmm-LFLMwvMCNu3nzxa2n2XHkZj9Fw,4899
364
364
  airbyte_ops_mcp/cloud_admin/models.py,sha256=YZ3FbEW-tZa50khKTTl4Bzvy_LsGyyQd6qcpXo62jls,2670
365
365
  airbyte_ops_mcp/connection_config_retriever/__init__.py,sha256=Xoi-YvARrNPhECdpwEDDkdwEpnvj8zuUlwULpf4iRrU,800
366
- airbyte_ops_mcp/connection_config_retriever/audit_logging.py,sha256=GjT4dVa0TtvGDmiBz9qwzcYCnSf9hTo7UM6l7ubUNE8,2846
366
+ airbyte_ops_mcp/connection_config_retriever/audit_logging.py,sha256=QdOG9984NXeMaKeJnFUZ4oCOmqi37PBRG2NRBBjrZQQ,2753
367
367
  airbyte_ops_mcp/connection_config_retriever/retrieval.py,sha256=s6yeCyrboWkUd6KdaheEo87x-rLtQNTL8XeR8O9z2HI,12160
368
368
  airbyte_ops_mcp/connection_config_retriever/secrets_resolution.py,sha256=12g0lZzhCzAPl4Iv4eMW6d76mvXjIBGspOnNhywzks4,3644
369
369
  airbyte_ops_mcp/gcp_logs/__init__.py,sha256=IqkxclXJnD1U4L2at7aC9GYqPXnuLdYLgmkm3ZiIu6s,409
370
- airbyte_ops_mcp/gcp_logs/error_lookup.py,sha256=wtC2pXwUuJQcVyonIcduDyGxk8kjJ8Dj-Vyq9AdnYh4,12763
370
+ airbyte_ops_mcp/gcp_logs/error_lookup.py,sha256=Ufl1FtNQJKP_yWndVT1Xku1mT-gxW_0atmNMCYMXvOo,12757
371
371
  airbyte_ops_mcp/mcp/__init__.py,sha256=QqkNkxzdXlg-W03urBAQ3zmtOKFPf35rXgO9ceUjpng,334
372
372
  airbyte_ops_mcp/mcp/_guidance.py,sha256=48tQSnDnxqXtyGJxxgjz0ZiI814o_7Fj7f6R8jpQ7so,2375
373
373
  airbyte_ops_mcp/mcp/_http_headers.py,sha256=9TAH2RYhFR3z2JugW4Q3WrrqJIdaCzAbyA1GhtQ_EMM,7278
@@ -380,7 +380,7 @@ airbyte_ops_mcp/mcp/github.py,sha256=h3M3VJrq09y_F9ueQVCq3bUbVBNFuTNKprHtGU_ttio
380
380
  airbyte_ops_mcp/mcp/github_repo_ops.py,sha256=PiERpt8abo20Gz4CfXhrDNlVM4o4FOt5sweZJND2a0s,5314
381
381
  airbyte_ops_mcp/mcp/metadata.py,sha256=fwGW97WknR5lfKcQnFtK6dU87aA6TmLj1NkKyqDAV9g,270
382
382
  airbyte_ops_mcp/mcp/prerelease.py,sha256=nc6VU03ADVHWM3OjGKxbS5XqY4VoyRyrZNU_fyAtaOI,10465
383
- airbyte_ops_mcp/mcp/prod_db_queries.py,sha256=FfGoq3aEj6ZUT4ysBIs1w7LzzwBeRXTaRvPGEx62RzI,25474
383
+ airbyte_ops_mcp/mcp/prod_db_queries.py,sha256=DPzyHCT3yxj2kjkucefoVpsR71vscuJQ8tGgLs_lhv0,32068
384
384
  airbyte_ops_mcp/mcp/prompts.py,sha256=mJld9mdPECXYZffWXGSvNs4Xevx3rxqUGNlzGKVC2_s,1599
385
385
  airbyte_ops_mcp/mcp/registry.py,sha256=PW-VYUj42qx2pQ_apUkVaoUFq7VgB9zEU7-aGrkSCCw,290
386
386
  airbyte_ops_mcp/mcp/regression_tests.py,sha256=S1h-5S5gcZA4WEtIZyAQ836hd04tjSRRqMiYMx0S93g,16079
@@ -389,8 +389,8 @@ airbyte_ops_mcp/mcp/server_info.py,sha256=Yi4B1auW64QZGBDas5mro_vwTjvrP785TFNSBP
389
389
  airbyte_ops_mcp/prod_db_access/__init__.py,sha256=5pxouMPY1beyWlB0UwPnbaLTKTHqU6X82rbbgKY2vYU,1069
390
390
  airbyte_ops_mcp/prod_db_access/db_engine.py,sha256=VUqEWZtharJUR-Cri_pMwtGh1C4Neu4s195mbEXlm-w,9190
391
391
  airbyte_ops_mcp/prod_db_access/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
392
- airbyte_ops_mcp/prod_db_access/queries.py,sha256=BBPAQEfcing4G0Q9PEmI8C_9kN26sZc65ZGXd9WuFSw,14257
393
- airbyte_ops_mcp/prod_db_access/sql.py,sha256=hTbPY4r_rrtJ34B5eVpwyuBMLotyuP--UTv0vl3ZwBw,19432
392
+ airbyte_ops_mcp/prod_db_access/queries.py,sha256=TNxTY5Hf3ImHBX0_e_20-VbF3yzYm2mX3ykWzQXgpno,17754
393
+ airbyte_ops_mcp/prod_db_access/sql.py,sha256=xB7SJGnBSlY-ZB7ku_9QfvNIEldGEmCn-jJcAdes_LY,30407
394
394
  airbyte_ops_mcp/registry/__init__.py,sha256=iEaPlt9GrnlaLbc__98TguNeZG8wuQu7S-_2QkhHcbA,858
395
395
  airbyte_ops_mcp/registry/models.py,sha256=B4L4TKr52wo0xs0CqvCBrpowqjShzVnZ5eTr2-EyhNs,2346
396
396
  airbyte_ops_mcp/registry/publish.py,sha256=VoPxsM2_0zJ829orzCRN-kjgcJtuBNyXgW4I9J680ro,12717
@@ -399,7 +399,7 @@ airbyte_ops_mcp/regression_tests/cdk_secrets.py,sha256=iRjqqBS96KZoswfgT7ju-pE_p
399
399
  airbyte_ops_mcp/regression_tests/ci_output.py,sha256=rrvCVKKShc1iVPMuQJDBqSbsiAHIDpX8SA9j0Uwl_Cg,12718
400
400
  airbyte_ops_mcp/regression_tests/config.py,sha256=dwWeY0tatdbwl9BqbhZ7EljoZDCtKmGO5fvOAIxeXmA,5873
401
401
  airbyte_ops_mcp/regression_tests/connection_fetcher.py,sha256=5wIiA0VvCFNEc-fr6Po18gZMX3E5fyPOGf2SuVOqv5U,12799
402
- airbyte_ops_mcp/regression_tests/connection_secret_retriever.py,sha256=_qd_nBLx6Xc6yVQHht716sFELX8SgIE5q3R3R708tfw,4879
402
+ airbyte_ops_mcp/regression_tests/connection_secret_retriever.py,sha256=FhWNVWq7sON4nwUmVJv8BgXBOqg1YV4b5WuWyCzZ0LU,4695
403
403
  airbyte_ops_mcp/regression_tests/connector_runner.py,sha256=bappfBSq8dn3IyVAMS_XuzYEwWus23hkDCHLa2RFysI,9920
404
404
  airbyte_ops_mcp/regression_tests/evaluation_modes.py,sha256=lAL6pEDmy_XCC7_m4_NXjt_f6Z8CXeAhMkc0FU8bm_M,1364
405
405
  airbyte_ops_mcp/regression_tests/http_metrics.py,sha256=oTD7f2MnQOvx4plOxHop2bInQ0-whvuToSsrC7TIM-M,12469
@@ -414,7 +414,7 @@ airbyte_ops_mcp/regression_tests/regression/comparators.py,sha256=MJkLZEKHivgrG0
414
414
  airbyte_ops_mcp/regression_tests/validation/__init__.py,sha256=MBEwGOoNuqT4_oCahtoK62OKWIjUCfWa7vZTxNj_0Ek,1532
415
415
  airbyte_ops_mcp/regression_tests/validation/catalog_validators.py,sha256=jqqVAMOk0mtdPgwu4d0hA0ZEjtsNh5gapvGydRv3_qk,12553
416
416
  airbyte_ops_mcp/regression_tests/validation/record_validators.py,sha256=RjauAhKWNwxMBTu0eNS2hMFNQVs5CLbQU51kp6FOVDk,7432
417
- airbyte_internal_ops-0.3.0.dist-info/METADATA,sha256=Gx40HXaZtFle9mxFDJQNYMGccjrZ3d0xirHsaWcg04s,5679
418
- airbyte_internal_ops-0.3.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
419
- airbyte_internal_ops-0.3.0.dist-info/entry_points.txt,sha256=WxP0l7bRFss4Cr5uQqVj9mTEKwnRKouNuphXQF0lotA,171
420
- airbyte_internal_ops-0.3.0.dist-info/RECORD,,
417
+ airbyte_internal_ops-0.3.1.dist-info/METADATA,sha256=kx1iQ0YE42LjpsFpjJD7SECaYMHEjo36VjvSVf3BwHk,5679
418
+ airbyte_internal_ops-0.3.1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
419
+ airbyte_internal_ops-0.3.1.dist-info/entry_points.txt,sha256=WxP0l7bRFss4Cr5uQqVj9mTEKwnRKouNuphXQF0lotA,171
420
+ airbyte_internal_ops-0.3.1.dist-info/RECORD,,
@@ -11,9 +11,8 @@ import logging
11
11
  import subprocess
12
12
  from typing import TYPE_CHECKING, Any, Callable
13
13
 
14
- from google.cloud import logging as gcloud_logging
15
-
16
14
  from airbyte_ops_mcp.constants import GCP_PROJECT_NAME
15
+ from airbyte_ops_mcp.gcp_auth import get_logging_client
17
16
 
18
17
  if TYPE_CHECKING:
19
18
  from airbyte_ops_mcp.connection_config_retriever.retrieval import (
@@ -23,21 +22,18 @@ if TYPE_CHECKING:
23
22
  LOGGER = logging.getLogger(__name__)
24
23
 
25
24
  # Lazy-initialized to avoid import-time GCP calls
26
- _logging_client: gcloud_logging.Client | None = None
27
25
  _airbyte_gcloud_logger: Any = None
28
26
 
29
27
 
30
28
  def _get_logger() -> Any:
31
29
  """Get the GCP Cloud Logger, initializing lazily on first use."""
32
- global _logging_client, _airbyte_gcloud_logger
30
+ global _airbyte_gcloud_logger
33
31
 
34
32
  if _airbyte_gcloud_logger is not None:
35
33
  return _airbyte_gcloud_logger
36
34
 
37
- _logging_client = gcloud_logging.Client(project=GCP_PROJECT_NAME)
38
- _airbyte_gcloud_logger = _logging_client.logger(
39
- "airbyte-cloud-connection-retriever"
40
- )
35
+ logging_client = get_logging_client(GCP_PROJECT_NAME)
36
+ _airbyte_gcloud_logger = logging_client.logger("airbyte-cloud-connection-retriever")
41
37
  return _airbyte_gcloud_logger
42
38
 
43
39
 
@@ -6,94 +6,187 @@ the airbyte-ops-mcp codebase. It supports both standard Application Default
6
6
  Credentials (ADC) and the GCP_PROD_DB_ACCESS_CREDENTIALS environment variable
7
7
  used internally at Airbyte.
8
8
 
9
+ The preferred approach is to pass credentials directly to GCP client constructors
10
+ rather than relying on file-based ADC discovery. This module provides helpers
11
+ that construct credentials from JSON content in environment variables.
12
+
9
13
  Usage:
10
- from airbyte_ops_mcp.gcp_auth import get_secret_manager_client
14
+ from airbyte_ops_mcp.gcp_auth import get_gcp_credentials, get_secret_manager_client
15
+
16
+ # Get credentials object to pass to any GCP client
17
+ credentials = get_gcp_credentials()
18
+ client = logging.Client(project="my-project", credentials=credentials)
11
19
 
12
- # Get a properly authenticated Secret Manager client
20
+ # Or use the convenience helper for Secret Manager
13
21
  client = get_secret_manager_client()
14
22
  """
15
23
 
16
24
  from __future__ import annotations
17
25
 
26
+ import json
18
27
  import logging
19
28
  import os
20
- import tempfile
21
- from pathlib import Path
29
+ import sys
30
+ import threading
22
31
 
32
+ import google.auth
33
+ from google.cloud import logging as gcp_logging
23
34
  from google.cloud import secretmanager
35
+ from google.oauth2 import service_account
24
36
 
25
37
  from airbyte_ops_mcp.constants import ENV_GCP_PROD_DB_ACCESS_CREDENTIALS
26
38
 
27
39
  logger = logging.getLogger(__name__)
28
40
 
29
- # Environment variable name (internal to GCP libraries)
30
- ENV_GOOGLE_APPLICATION_CREDENTIALS = "GOOGLE_APPLICATION_CREDENTIALS"
31
41
 
32
- # Module-level cache for the credentials file path
33
- _credentials_file_path: str | None = None
42
+ def _get_identity_from_service_account_info(info: dict) -> str | None:
43
+ """Extract service account identity from parsed JSON info.
44
+
45
+ Only accesses the 'client_email' key to avoid any risk of leaking
46
+ other credential material.
34
47
 
48
+ Args:
49
+ info: Parsed service account JSON as a dict.
35
50
 
36
- def ensure_adc_credentials() -> str | None:
37
- """Ensure GCP Application Default Credentials are available.
51
+ Returns:
52
+ The client_email if present and a string, otherwise None.
53
+ """
54
+ client_email = info.get("client_email")
55
+ if isinstance(client_email, str):
56
+ return client_email
57
+ return None
38
58
 
39
- If GOOGLE_APPLICATION_CREDENTIALS is not set but GCP_PROD_DB_ACCESS_CREDENTIALS is,
40
- write the JSON credentials to a temp file and set GOOGLE_APPLICATION_CREDENTIALS
41
- to point to that file. This provides a fallback for internal employees who use
42
- GCP_PROD_DB_ACCESS_CREDENTIALS as their standard credential source.
43
59
 
44
- Note: GOOGLE_APPLICATION_CREDENTIALS must be a file path, not JSON content.
45
- The GCP_PROD_DB_ACCESS_CREDENTIALS env var contains the JSON content directly,
46
- so we write it to a temp file first.
60
+ def _get_identity_from_credentials(
61
+ credentials: google.auth.credentials.Credentials,
62
+ ) -> str | None:
63
+ """Extract identity from a credentials object using safe attribute access.
47
64
 
48
- This function is idempotent and safe to call multiple times.
65
+ Only accesses known-safe attributes that don't trigger network calls
66
+ or token refresh.
67
+
68
+ Args:
69
+ credentials: A GCP credentials object.
49
70
 
50
71
  Returns:
51
- The path to the credentials file if one was created, or None if
52
- GOOGLE_APPLICATION_CREDENTIALS was already set.
72
+ The service account email if available, otherwise None.
53
73
  """
54
- global _credentials_file_path
74
+ # Try service_account_email first (most common for service accounts)
75
+ identity = getattr(credentials, "service_account_email", None)
76
+ if isinstance(identity, str):
77
+ return identity
78
+
79
+ # Try signer_email as fallback (sometimes present on impersonated creds)
80
+ identity = getattr(credentials, "signer_email", None)
81
+ if isinstance(identity, str):
82
+ return identity
83
+
84
+ return None
85
+
55
86
 
56
- # If GOOGLE_APPLICATION_CREDENTIALS is already set, nothing to do
57
- if ENV_GOOGLE_APPLICATION_CREDENTIALS in os.environ:
58
- return None
87
+ # Default scopes for GCP services used by this module
88
+ DEFAULT_GCP_SCOPES = ["https://www.googleapis.com/auth/cloud-platform"]
59
89
 
60
- # Check if we have the fallback credentials
61
- gsm_creds = os.getenv(ENV_GCP_PROD_DB_ACCESS_CREDENTIALS)
62
- if not gsm_creds:
63
- return None
90
+ # Module-level cache for credentials (thread-safe)
91
+ _cached_credentials: google.auth.credentials.Credentials | None = None
92
+ _credentials_lock = threading.Lock()
64
93
 
65
- # Reuse the same file path if we've already written credentials and file still exists
66
- if _credentials_file_path is not None and Path(_credentials_file_path).exists():
67
- os.environ[ENV_GOOGLE_APPLICATION_CREDENTIALS] = _credentials_file_path
68
- return _credentials_file_path
69
94
 
70
- # Write credentials to a temp file
71
- # Use a unique filename based on PID to avoid collisions between processes
72
- creds_file = Path(tempfile.gettempdir()) / f"gcp_prod_db_creds_{os.getpid()}.json"
73
- creds_file.write_text(gsm_creds)
95
+ def get_gcp_credentials() -> google.auth.credentials.Credentials:
96
+ """Get GCP credentials, preferring direct JSON parsing over file-based ADC.
74
97
 
75
- # Set restrictive permissions (owner read/write only)
76
- creds_file.chmod(0o600)
98
+ This function resolves credentials in the following order:
99
+ 1. GCP_PROD_DB_ACCESS_CREDENTIALS env var (JSON content) - parsed directly
100
+ 2. Standard ADC discovery (workload identity, gcloud auth, GOOGLE_APPLICATION_CREDENTIALS)
77
101
 
78
- _credentials_file_path = str(creds_file)
79
- os.environ[ENV_GOOGLE_APPLICATION_CREDENTIALS] = _credentials_file_path
102
+ The credentials are cached after first resolution for efficiency.
103
+ Uses the cloud-platform scope which provides access to all GCP services.
80
104
 
81
- logger.debug(
82
- f"Wrote {ENV_GCP_PROD_DB_ACCESS_CREDENTIALS} to {creds_file} and set "
83
- f"{ENV_GOOGLE_APPLICATION_CREDENTIALS}"
84
- )
105
+ Returns:
106
+ A Credentials object that can be passed to any GCP client constructor.
85
107
 
86
- return _credentials_file_path
108
+ Raises:
109
+ google.auth.exceptions.DefaultCredentialsError: If no credentials can be found.
110
+ """
111
+ global _cached_credentials
112
+
113
+ # Return cached credentials if available (fast path without lock)
114
+ if _cached_credentials is not None:
115
+ return _cached_credentials
116
+
117
+ # Acquire lock for thread-safe credential initialization
118
+ with _credentials_lock:
119
+ # Double-check after acquiring lock (another thread may have initialized)
120
+ if _cached_credentials is not None:
121
+ return _cached_credentials
122
+
123
+ # Try GCP_PROD_DB_ACCESS_CREDENTIALS first (JSON content in env var)
124
+ creds_json = os.getenv(ENV_GCP_PROD_DB_ACCESS_CREDENTIALS)
125
+ if creds_json:
126
+ try:
127
+ creds_dict = json.loads(creds_json)
128
+ credentials = service_account.Credentials.from_service_account_info(
129
+ creds_dict,
130
+ scopes=DEFAULT_GCP_SCOPES,
131
+ )
132
+ # Extract identity safely (only after successful credential creation)
133
+ identity = _get_identity_from_service_account_info(creds_dict)
134
+ identity_str = f" (identity: {identity})" if identity else ""
135
+ print(
136
+ f"GCP credentials loaded from {ENV_GCP_PROD_DB_ACCESS_CREDENTIALS}{identity_str}",
137
+ file=sys.stderr,
138
+ )
139
+ logger.debug(
140
+ f"Loaded GCP credentials from {ENV_GCP_PROD_DB_ACCESS_CREDENTIALS} env var"
141
+ )
142
+ _cached_credentials = credentials
143
+ return credentials
144
+ except (json.JSONDecodeError, ValueError) as e:
145
+ # Log only exception type to avoid any risk of leaking credential content
146
+ logger.warning(
147
+ f"Failed to parse {ENV_GCP_PROD_DB_ACCESS_CREDENTIALS}: "
148
+ f"{type(e).__name__}. Falling back to ADC discovery."
149
+ )
150
+
151
+ # Fall back to standard ADC discovery
152
+ credentials, project = google.auth.default(scopes=DEFAULT_GCP_SCOPES)
153
+ # Extract identity safely from ADC credentials
154
+ identity = _get_identity_from_credentials(credentials)
155
+ identity_str = f" (identity: {identity})" if identity else ""
156
+ project_str = f" (project: {project})" if project else ""
157
+ print(
158
+ f"GCP credentials loaded via ADC{project_str}{identity_str}",
159
+ file=sys.stderr,
160
+ )
161
+ logger.debug(f"Loaded GCP credentials via ADC discovery (project: {project})")
162
+ _cached_credentials = credentials
163
+ return credentials
87
164
 
88
165
 
89
166
  def get_secret_manager_client() -> secretmanager.SecretManagerServiceClient:
90
167
  """Get a Secret Manager client with proper credential handling.
91
168
 
92
- This function ensures GCP credentials are available (supporting the
93
- GCP_PROD_DB_ACCESS_CREDENTIALS fallback) before creating the client.
169
+ This function uses get_gcp_credentials() to resolve credentials and passes
170
+ them directly to the client constructor.
94
171
 
95
172
  Returns:
96
173
  A configured SecretManagerServiceClient instance.
97
174
  """
98
- ensure_adc_credentials()
99
- return secretmanager.SecretManagerServiceClient()
175
+ credentials = get_gcp_credentials()
176
+ return secretmanager.SecretManagerServiceClient(credentials=credentials)
177
+
178
+
179
+ def get_logging_client(project: str) -> gcp_logging.Client:
180
+ """Get a Cloud Logging client with proper credential handling.
181
+
182
+ This function uses get_gcp_credentials() to resolve credentials and passes
183
+ them directly to the client constructor.
184
+
185
+ Args:
186
+ project: The GCP project ID to use for logging operations.
187
+
188
+ Returns:
189
+ A configured Cloud Logging Client instance.
190
+ """
191
+ credentials = get_gcp_credentials()
192
+ return gcp_logging.Client(project=project, credentials=credentials)
@@ -24,10 +24,12 @@ from datetime import UTC, datetime, timedelta
24
24
  from enum import StrEnum
25
25
  from typing import Any
26
26
 
27
- from google.cloud import logging
27
+ from google.cloud import logging as gcp_logging
28
28
  from google.cloud.logging_v2 import entries
29
29
  from pydantic import BaseModel, Field
30
30
 
31
+ from airbyte_ops_mcp.gcp_auth import get_logging_client
32
+
31
33
  # Default GCP project for Airbyte Cloud
32
34
  DEFAULT_GCP_PROJECT = "prod-ab-cloud-proj"
33
35
 
@@ -291,14 +293,13 @@ def fetch_error_logs(
291
293
  specified error ID, then fetches related log entries (multi-line stack traces)
292
294
  from the same timestamp and resource.
293
295
  """
294
- client_options = {"quota_project_id": project}
295
- client = logging.Client(project=project, client_options=client_options)
296
+ client = get_logging_client(project)
296
297
 
297
298
  filter_str = _build_filter(error_id, lookback_days, min_severity_filter)
298
299
 
299
300
  entries_iterator = client.list_entries(
300
301
  filter_=filter_str,
301
- order_by=logging.DESCENDING,
302
+ order_by=gcp_logging.DESCENDING,
302
303
  )
303
304
 
304
305
  initial_matches = list(entries_iterator)
@@ -356,7 +357,7 @@ def fetch_error_logs(
356
357
 
357
358
  related_entries = client.list_entries(
358
359
  filter_=related_filter,
359
- order_by=logging.ASCENDING,
360
+ order_by=gcp_logging.ASCENDING,
360
361
  )
361
362
 
362
363
  for entry in related_entries:
@@ -8,6 +8,7 @@ airbyte_ops_mcp.prod_db_access.queries for use by AI agents.
8
8
  from __future__ import annotations
9
9
 
10
10
  from datetime import datetime
11
+ from enum import StrEnum
11
12
  from typing import Annotated, Any
12
13
 
13
14
  import requests
@@ -25,11 +26,21 @@ from airbyte_ops_mcp.prod_db_access.queries import (
25
26
  query_dataplanes_list,
26
27
  query_failed_sync_attempts_for_connector,
27
28
  query_new_connector_releases,
28
- query_sync_results_for_version,
29
+ query_recent_syncs_for_connector,
30
+ query_syncs_for_version_pinned_connector,
29
31
  query_workspace_info,
30
32
  query_workspaces_by_email_domain,
31
33
  )
32
34
 
35
+
36
+ class StatusFilter(StrEnum):
37
+ """Filter for job status in sync queries."""
38
+
39
+ ALL = "all"
40
+ SUCCEEDED = "succeeded"
41
+ FAILED = "failed"
42
+
43
+
33
44
  # Cloud UI base URL for building connection URLs
34
45
  CLOUD_UI_BASE_URL = "https://cloud.airbyte.com"
35
46
 
@@ -293,7 +304,7 @@ def query_prod_actors_by_connector_version(
293
304
  read_only=True,
294
305
  idempotent=True,
295
306
  )
296
- def query_prod_connector_version_sync_results(
307
+ def query_prod_recent_syncs_for_version_pinned_connector(
297
308
  connector_version_id: Annotated[
298
309
  str,
299
310
  Field(description="Connector version UUID to find sync results for"),
@@ -314,11 +325,16 @@ def query_prod_connector_version_sync_results(
314
325
  ),
315
326
  ] = False,
316
327
  ) -> list[dict[str, Any]]:
317
- """List sync job results for actors pinned to a specific connector version.
328
+ """List sync job results for actors PINNED to a specific connector version.
318
329
 
319
- Returns sync job results for connections using actors that are pinned
320
- to the specified version. Useful for monitoring rollout health and
321
- identifying issues with specific connector versions.
330
+ IMPORTANT: This tool ONLY returns results for actors that have been explicitly
331
+ pinned to the specified version via scoped_configuration. Most connections run
332
+ unpinned and will NOT appear in these results.
333
+
334
+ Use this tool when you want to monitor rollout health for actors that have been
335
+ explicitly pinned to a pre-release or specific version. For finding healthy
336
+ connections across ALL actors using a connector type (regardless of pinning),
337
+ use query_prod_recent_syncs_for_connector instead.
322
338
 
323
339
  The actor_id field is the actor ID (superset of source_id/destination_id).
324
340
 
@@ -327,7 +343,7 @@ def query_prod_connector_version_sync_results(
327
343
  pin_origin_type, pin_origin, workspace_id, workspace_name, organization_id,
328
344
  dataplane_group_id, dataplane_name
329
345
  """
330
- return query_sync_results_for_version(
346
+ return query_syncs_for_version_pinned_connector(
331
347
  connector_version_id,
332
348
  days=days,
333
349
  limit=limit,
@@ -335,6 +351,163 @@ def query_prod_connector_version_sync_results(
335
351
  )
336
352
 
337
353
 
354
+ @mcp_tool(
355
+ read_only=True,
356
+ idempotent=True,
357
+ open_world=True,
358
+ )
359
+ def query_prod_recent_syncs_for_connector(
360
+ source_definition_id: Annotated[
361
+ str | None,
362
+ Field(
363
+ description=(
364
+ "Source connector definition ID (UUID) to search for. "
365
+ "Provide this OR source_canonical_name OR destination_definition_id "
366
+ "OR destination_canonical_name (exactly one required). "
367
+ "Example: 'afa734e4-3571-11ec-991a-1e0031268139' for YouTube Analytics."
368
+ ),
369
+ default=None,
370
+ ),
371
+ ],
372
+ source_canonical_name: Annotated[
373
+ str | None,
374
+ Field(
375
+ description=(
376
+ "Canonical source connector name to search for. "
377
+ "Provide this OR source_definition_id OR destination_definition_id "
378
+ "OR destination_canonical_name (exactly one required). "
379
+ "Examples: 'source-youtube-analytics', 'YouTube Analytics'."
380
+ ),
381
+ default=None,
382
+ ),
383
+ ],
384
+ destination_definition_id: Annotated[
385
+ str | None,
386
+ Field(
387
+ description=(
388
+ "Destination connector definition ID (UUID) to search for. "
389
+ "Provide this OR destination_canonical_name OR source_definition_id "
390
+ "OR source_canonical_name (exactly one required). "
391
+ "Example: '94bd199c-2ff0-4aa2-b98e-17f0acb72610' for DuckDB."
392
+ ),
393
+ default=None,
394
+ ),
395
+ ],
396
+ destination_canonical_name: Annotated[
397
+ str | None,
398
+ Field(
399
+ description=(
400
+ "Canonical destination connector name to search for. "
401
+ "Provide this OR destination_definition_id OR source_definition_id "
402
+ "OR source_canonical_name (exactly one required). "
403
+ "Examples: 'destination-duckdb', 'DuckDB'."
404
+ ),
405
+ default=None,
406
+ ),
407
+ ],
408
+ status_filter: Annotated[
409
+ StatusFilter,
410
+ Field(
411
+ description=(
412
+ "Filter by job status: 'all' (default), 'succeeded', or 'failed'. "
413
+ "Use 'succeeded' to find healthy connections with recent successful syncs. "
414
+ "Use 'failed' to find connections with recent failures."
415
+ ),
416
+ default=StatusFilter.ALL,
417
+ ),
418
+ ],
419
+ organization_id: Annotated[
420
+ str | OrganizationAliasEnum | None,
421
+ Field(
422
+ description=(
423
+ "Optional organization ID (UUID) or alias to filter results. "
424
+ "If provided, only syncs from this organization will be returned. "
425
+ "Accepts '@airbyte-internal' as an alias for the Airbyte internal org."
426
+ ),
427
+ default=None,
428
+ ),
429
+ ],
430
+ lookback_days: Annotated[
431
+ int,
432
+ Field(description="Number of days to look back (default: 7)", default=7),
433
+ ],
434
+ limit: Annotated[
435
+ int,
436
+ Field(description="Maximum number of results (default: 100)", default=100),
437
+ ],
438
+ ) -> list[dict[str, Any]]:
439
+ """List recent sync jobs for ALL actors using a connector type.
440
+
441
+ This tool finds all actors with the given connector definition and returns their
442
+ recent sync jobs, regardless of whether they have explicit version pins. It filters
443
+ out deleted actors, deleted workspaces, and deprecated connections.
444
+
445
+ Use this tool to:
446
+ - Find healthy connections with recent successful syncs (status_filter='succeeded')
447
+ - Investigate connector issues across all users (status_filter='failed')
448
+ - Get an overview of all recent sync activity (status_filter='all')
449
+
450
+ Supports both SOURCE and DESTINATION connectors. Provide exactly one of:
451
+ source_definition_id, source_canonical_name, destination_definition_id,
452
+ or destination_canonical_name.
453
+
454
+ Key fields in results:
455
+ - job_status: 'succeeded', 'failed', 'cancelled', etc.
456
+ - connection_id, connection_name: The connection that ran the sync
457
+ - actor_id, actor_name: The source or destination actor
458
+ - pin_origin_type, pin_origin, pinned_version_id: Version pin context (NULL if not pinned)
459
+ """
460
+ # Validate that exactly one connector parameter is provided
461
+ provided_params = [
462
+ source_definition_id,
463
+ source_canonical_name,
464
+ destination_definition_id,
465
+ destination_canonical_name,
466
+ ]
467
+ num_provided = sum(p is not None for p in provided_params)
468
+ if num_provided != 1:
469
+ raise PyAirbyteInputError(
470
+ message=(
471
+ "Exactly one of source_definition_id, source_canonical_name, "
472
+ "destination_definition_id, or destination_canonical_name must be provided."
473
+ ),
474
+ )
475
+
476
+ # Determine if this is a destination connector
477
+ is_destination = (
478
+ destination_definition_id is not None or destination_canonical_name is not None
479
+ )
480
+
481
+ # Resolve canonical name to definition ID if needed
482
+ resolved_definition_id: str
483
+ if source_canonical_name:
484
+ resolved_definition_id = _resolve_canonical_name_to_definition_id(
485
+ canonical_name=source_canonical_name,
486
+ )
487
+ elif destination_canonical_name:
488
+ resolved_definition_id = _resolve_canonical_name_to_definition_id(
489
+ canonical_name=destination_canonical_name,
490
+ )
491
+ elif source_definition_id:
492
+ resolved_definition_id = source_definition_id
493
+ else:
494
+ # We've validated exactly one param is provided, so this must be set
495
+ assert destination_definition_id is not None
496
+ resolved_definition_id = destination_definition_id
497
+
498
+ # Resolve organization ID alias
499
+ resolved_organization_id = OrganizationAliasEnum.resolve(organization_id)
500
+
501
+ return query_recent_syncs_for_connector(
502
+ connector_definition_id=resolved_definition_id,
503
+ is_destination=is_destination,
504
+ status_filter=status_filter,
505
+ organization_id=resolved_organization_id,
506
+ days=lookback_days,
507
+ limit=limit,
508
+ )
509
+
510
+
338
511
  @mcp_tool(
339
512
  read_only=True,
340
513
  idempotent=True,
@@ -29,6 +29,12 @@ from airbyte_ops_mcp.prod_db_access.sql import (
29
29
  SELECT_FAILED_SYNC_ATTEMPTS_FOR_CONNECTOR,
30
30
  SELECT_NEW_CONNECTOR_RELEASES,
31
31
  SELECT_ORG_WORKSPACES,
32
+ SELECT_RECENT_FAILED_SYNCS_FOR_DESTINATION_CONNECTOR,
33
+ SELECT_RECENT_FAILED_SYNCS_FOR_SOURCE_CONNECTOR,
34
+ SELECT_RECENT_SUCCESSFUL_SYNCS_FOR_DESTINATION_CONNECTOR,
35
+ SELECT_RECENT_SUCCESSFUL_SYNCS_FOR_SOURCE_CONNECTOR,
36
+ SELECT_RECENT_SYNCS_FOR_DESTINATION_CONNECTOR,
37
+ SELECT_RECENT_SYNCS_FOR_SOURCE_CONNECTOR,
32
38
  SELECT_SUCCESSFUL_SYNCS_FOR_VERSION,
33
39
  SELECT_SYNC_RESULTS_FOR_VERSION,
34
40
  SELECT_WORKSPACE_INFO,
@@ -227,7 +233,7 @@ def query_actors_pinned_to_version(
227
233
  )
228
234
 
229
235
 
230
- def query_sync_results_for_version(
236
+ def query_syncs_for_version_pinned_connector(
231
237
  connector_version_id: str,
232
238
  days: int = 7,
233
239
  limit: int = 100,
@@ -320,6 +326,81 @@ def query_failed_sync_attempts_for_connector(
320
326
  return results
321
327
 
322
328
 
329
+ def query_recent_syncs_for_connector(
330
+ connector_definition_id: str,
331
+ is_destination: bool = False,
332
+ status_filter: str = "all",
333
+ organization_id: str | None = None,
334
+ days: int = 7,
335
+ limit: int = 100,
336
+ *,
337
+ gsm_client: secretmanager.SecretManagerServiceClient | None = None,
338
+ ) -> list[dict[str, Any]]:
339
+ """Query recent sync jobs for ALL actors using a connector definition.
340
+
341
+ Finds all actors with the given actor_definition_id and returns their sync jobs,
342
+ regardless of whether they have explicit version pins. Filters out deleted actors,
343
+ deleted workspaces, and deprecated connections.
344
+
345
+ This is useful for finding healthy connections with recent successful syncs,
346
+ or for investigating connector issues across all users.
347
+
348
+ Args:
349
+ connector_definition_id: Connector definition UUID to filter by
350
+ is_destination: If True, query destination connectors; if False, query sources
351
+ status_filter: Filter by job status - "all", "succeeded", or "failed"
352
+ organization_id: Optional organization UUID to filter results by (post-query filter)
353
+ days: Number of days to look back (default: 7)
354
+ limit: Maximum number of results (default: 100)
355
+ gsm_client: GCP Secret Manager client. If None, a new client will be instantiated.
356
+
357
+ Returns:
358
+ List of sync job records with workspace info and optional pin context
359
+ """
360
+ cutoff_date = datetime.now(timezone.utc) - timedelta(days=days)
361
+
362
+ # Select the appropriate query based on connector type and status filter
363
+ if is_destination:
364
+ if status_filter == "succeeded":
365
+ query = SELECT_RECENT_SUCCESSFUL_SYNCS_FOR_DESTINATION_CONNECTOR
366
+ query_name = "SELECT_RECENT_SUCCESSFUL_SYNCS_FOR_DESTINATION_CONNECTOR"
367
+ elif status_filter == "failed":
368
+ query = SELECT_RECENT_FAILED_SYNCS_FOR_DESTINATION_CONNECTOR
369
+ query_name = "SELECT_RECENT_FAILED_SYNCS_FOR_DESTINATION_CONNECTOR"
370
+ else:
371
+ query = SELECT_RECENT_SYNCS_FOR_DESTINATION_CONNECTOR
372
+ query_name = "SELECT_RECENT_SYNCS_FOR_DESTINATION_CONNECTOR"
373
+ else:
374
+ if status_filter == "succeeded":
375
+ query = SELECT_RECENT_SUCCESSFUL_SYNCS_FOR_SOURCE_CONNECTOR
376
+ query_name = "SELECT_RECENT_SUCCESSFUL_SYNCS_FOR_SOURCE_CONNECTOR"
377
+ elif status_filter == "failed":
378
+ query = SELECT_RECENT_FAILED_SYNCS_FOR_SOURCE_CONNECTOR
379
+ query_name = "SELECT_RECENT_FAILED_SYNCS_FOR_SOURCE_CONNECTOR"
380
+ else:
381
+ query = SELECT_RECENT_SYNCS_FOR_SOURCE_CONNECTOR
382
+ query_name = "SELECT_RECENT_SYNCS_FOR_SOURCE_CONNECTOR"
383
+
384
+ results = _run_sql_query(
385
+ query,
386
+ parameters={
387
+ "connector_definition_id": connector_definition_id,
388
+ "cutoff_date": cutoff_date,
389
+ "limit": limit,
390
+ },
391
+ query_name=query_name,
392
+ gsm_client=gsm_client,
393
+ )
394
+
395
+ # Post-query filter by organization_id if provided
396
+ if organization_id is not None:
397
+ results = [
398
+ r for r in results if str(r.get("organization_id")) == organization_id
399
+ ]
400
+
401
+ return results
402
+
403
+
323
404
  def query_dataplanes_list(
324
405
  *,
325
406
  gsm_client: secretmanager.SecretManagerServiceClient | None = None,
@@ -360,6 +360,305 @@ SELECT_SUCCESSFUL_SYNCS_FOR_VERSION = sqlalchemy.text(
360
360
  """
361
361
  )
362
362
 
363
+ # Get recent sync results for ALL actors using a SOURCE connector definition.
364
+ # Finds all actors with the given actor_definition_id and returns their sync attempts,
365
+ # regardless of whether they have explicit version pins.
366
+ # Query starts from jobs table to leverage indexed columns.
367
+ # The LEFT JOIN to scoped_configuration provides pin context when available (pin_origin_type,
368
+ # pin_origin, pinned_version_id will be NULL for unpinned actors).
369
+ # Status filtering ('all', 'succeeded', 'failed') is handled at the application layer by
370
+ # selecting among different SQL query constants; this query returns all statuses.
371
+ SELECT_RECENT_SYNCS_FOR_SOURCE_CONNECTOR = sqlalchemy.text(
372
+ """
373
+ SELECT
374
+ jobs.id AS job_id,
375
+ jobs.scope AS connection_id,
376
+ jobs.status AS job_status,
377
+ jobs.started_at AS job_started_at,
378
+ jobs.updated_at AS job_updated_at,
379
+ connection.name AS connection_name,
380
+ actor.id AS actor_id,
381
+ actor.name AS actor_name,
382
+ actor.actor_definition_id,
383
+ actor.tombstone AS actor_tombstone,
384
+ workspace.id AS workspace_id,
385
+ workspace.name AS workspace_name,
386
+ workspace.organization_id,
387
+ workspace.dataplane_group_id,
388
+ dataplane_group.name AS dataplane_name,
389
+ scoped_configuration.origin_type AS pin_origin_type,
390
+ scoped_configuration.origin AS pin_origin,
391
+ scoped_configuration.value AS pinned_version_id
392
+ FROM jobs
393
+ JOIN connection
394
+ ON jobs.scope = connection.id::text
395
+ AND connection.status != 'deprecated'
396
+ JOIN actor
397
+ ON connection.source_id = actor.id
398
+ AND actor.actor_definition_id = :connector_definition_id
399
+ AND actor.tombstone = false
400
+ JOIN workspace
401
+ ON actor.workspace_id = workspace.id
402
+ AND workspace.tombstone = false
403
+ LEFT JOIN dataplane_group
404
+ ON workspace.dataplane_group_id = dataplane_group.id
405
+ LEFT JOIN scoped_configuration
406
+ ON scoped_configuration.scope_id = actor.id
407
+ AND scoped_configuration.key = 'connector_version'
408
+ AND scoped_configuration.scope_type = 'actor'
409
+ WHERE
410
+ jobs.config_type = 'sync'
411
+ AND jobs.updated_at >= :cutoff_date
412
+ ORDER BY
413
+ jobs.updated_at DESC
414
+ LIMIT :limit
415
+ """
416
+ )
417
+
418
+ # Same as above but filtered to only successful syncs
419
+ SELECT_RECENT_SUCCESSFUL_SYNCS_FOR_SOURCE_CONNECTOR = sqlalchemy.text(
420
+ """
421
+ SELECT
422
+ jobs.id AS job_id,
423
+ jobs.scope AS connection_id,
424
+ jobs.status AS job_status,
425
+ jobs.started_at AS job_started_at,
426
+ jobs.updated_at AS job_updated_at,
427
+ connection.name AS connection_name,
428
+ actor.id AS actor_id,
429
+ actor.name AS actor_name,
430
+ actor.actor_definition_id,
431
+ actor.tombstone AS actor_tombstone,
432
+ workspace.id AS workspace_id,
433
+ workspace.name AS workspace_name,
434
+ workspace.organization_id,
435
+ workspace.dataplane_group_id,
436
+ dataplane_group.name AS dataplane_name,
437
+ scoped_configuration.origin_type AS pin_origin_type,
438
+ scoped_configuration.origin AS pin_origin,
439
+ scoped_configuration.value AS pinned_version_id
440
+ FROM jobs
441
+ JOIN connection
442
+ ON jobs.scope = connection.id::text
443
+ AND connection.status != 'deprecated'
444
+ JOIN actor
445
+ ON connection.source_id = actor.id
446
+ AND actor.actor_definition_id = :connector_definition_id
447
+ AND actor.tombstone = false
448
+ JOIN workspace
449
+ ON actor.workspace_id = workspace.id
450
+ AND workspace.tombstone = false
451
+ LEFT JOIN dataplane_group
452
+ ON workspace.dataplane_group_id = dataplane_group.id
453
+ LEFT JOIN scoped_configuration
454
+ ON scoped_configuration.scope_id = actor.id
455
+ AND scoped_configuration.key = 'connector_version'
456
+ AND scoped_configuration.scope_type = 'actor'
457
+ WHERE
458
+ jobs.config_type = 'sync'
459
+ AND jobs.status = 'succeeded'
460
+ AND jobs.updated_at >= :cutoff_date
461
+ ORDER BY
462
+ jobs.updated_at DESC
463
+ LIMIT :limit
464
+ """
465
+ )
466
+
467
+ # Same as above but filtered to only failed syncs
468
+ SELECT_RECENT_FAILED_SYNCS_FOR_SOURCE_CONNECTOR = sqlalchemy.text(
469
+ """
470
+ SELECT
471
+ jobs.id AS job_id,
472
+ jobs.scope AS connection_id,
473
+ jobs.status AS job_status,
474
+ jobs.started_at AS job_started_at,
475
+ jobs.updated_at AS job_updated_at,
476
+ connection.name AS connection_name,
477
+ actor.id AS actor_id,
478
+ actor.name AS actor_name,
479
+ actor.actor_definition_id,
480
+ actor.tombstone AS actor_tombstone,
481
+ workspace.id AS workspace_id,
482
+ workspace.name AS workspace_name,
483
+ workspace.organization_id,
484
+ workspace.dataplane_group_id,
485
+ dataplane_group.name AS dataplane_name,
486
+ scoped_configuration.origin_type AS pin_origin_type,
487
+ scoped_configuration.origin AS pin_origin,
488
+ scoped_configuration.value AS pinned_version_id
489
+ FROM jobs
490
+ JOIN connection
491
+ ON jobs.scope = connection.id::text
492
+ AND connection.status != 'deprecated'
493
+ JOIN actor
494
+ ON connection.source_id = actor.id
495
+ AND actor.actor_definition_id = :connector_definition_id
496
+ AND actor.tombstone = false
497
+ JOIN workspace
498
+ ON actor.workspace_id = workspace.id
499
+ AND workspace.tombstone = false
500
+ LEFT JOIN dataplane_group
501
+ ON workspace.dataplane_group_id = dataplane_group.id
502
+ LEFT JOIN scoped_configuration
503
+ ON scoped_configuration.scope_id = actor.id
504
+ AND scoped_configuration.key = 'connector_version'
505
+ AND scoped_configuration.scope_type = 'actor'
506
+ WHERE
507
+ jobs.config_type = 'sync'
508
+ AND jobs.status = 'failed'
509
+ AND jobs.updated_at >= :cutoff_date
510
+ ORDER BY
511
+ jobs.updated_at DESC
512
+ LIMIT :limit
513
+ """
514
+ )
515
+
516
+ # Get recent sync results for ALL actors using a DESTINATION connector definition.
517
+ SELECT_RECENT_SYNCS_FOR_DESTINATION_CONNECTOR = sqlalchemy.text(
518
+ """
519
+ SELECT
520
+ jobs.id AS job_id,
521
+ jobs.scope AS connection_id,
522
+ jobs.status AS job_status,
523
+ jobs.started_at AS job_started_at,
524
+ jobs.updated_at AS job_updated_at,
525
+ connection.name AS connection_name,
526
+ actor.id AS actor_id,
527
+ actor.name AS actor_name,
528
+ actor.actor_definition_id,
529
+ actor.tombstone AS actor_tombstone,
530
+ workspace.id AS workspace_id,
531
+ workspace.name AS workspace_name,
532
+ workspace.organization_id,
533
+ workspace.dataplane_group_id,
534
+ dataplane_group.name AS dataplane_name,
535
+ scoped_configuration.origin_type AS pin_origin_type,
536
+ scoped_configuration.origin AS pin_origin,
537
+ scoped_configuration.value AS pinned_version_id
538
+ FROM jobs
539
+ JOIN connection
540
+ ON jobs.scope = connection.id::text
541
+ AND connection.status != 'deprecated'
542
+ JOIN actor
543
+ ON connection.destination_id = actor.id
544
+ AND actor.actor_definition_id = :connector_definition_id
545
+ AND actor.tombstone = false
546
+ JOIN workspace
547
+ ON actor.workspace_id = workspace.id
548
+ AND workspace.tombstone = false
549
+ LEFT JOIN dataplane_group
550
+ ON workspace.dataplane_group_id = dataplane_group.id
551
+ LEFT JOIN scoped_configuration
552
+ ON scoped_configuration.scope_id = actor.id
553
+ AND scoped_configuration.key = 'connector_version'
554
+ AND scoped_configuration.scope_type = 'actor'
555
+ WHERE
556
+ jobs.config_type = 'sync'
557
+ AND jobs.updated_at >= :cutoff_date
558
+ ORDER BY
559
+ jobs.updated_at DESC
560
+ LIMIT :limit
561
+ """
562
+ )
563
+
564
+ # Same as above but filtered to only successful syncs
565
+ SELECT_RECENT_SUCCESSFUL_SYNCS_FOR_DESTINATION_CONNECTOR = sqlalchemy.text(
566
+ """
567
+ SELECT
568
+ jobs.id AS job_id,
569
+ jobs.scope AS connection_id,
570
+ jobs.status AS job_status,
571
+ jobs.started_at AS job_started_at,
572
+ jobs.updated_at AS job_updated_at,
573
+ connection.name AS connection_name,
574
+ actor.id AS actor_id,
575
+ actor.name AS actor_name,
576
+ actor.actor_definition_id,
577
+ actor.tombstone AS actor_tombstone,
578
+ workspace.id AS workspace_id,
579
+ workspace.name AS workspace_name,
580
+ workspace.organization_id,
581
+ workspace.dataplane_group_id,
582
+ dataplane_group.name AS dataplane_name,
583
+ scoped_configuration.origin_type AS pin_origin_type,
584
+ scoped_configuration.origin AS pin_origin,
585
+ scoped_configuration.value AS pinned_version_id
586
+ FROM jobs
587
+ JOIN connection
588
+ ON jobs.scope = connection.id::text
589
+ AND connection.status != 'deprecated'
590
+ JOIN actor
591
+ ON connection.destination_id = actor.id
592
+ AND actor.actor_definition_id = :connector_definition_id
593
+ AND actor.tombstone = false
594
+ JOIN workspace
595
+ ON actor.workspace_id = workspace.id
596
+ AND workspace.tombstone = false
597
+ LEFT JOIN dataplane_group
598
+ ON workspace.dataplane_group_id = dataplane_group.id
599
+ LEFT JOIN scoped_configuration
600
+ ON scoped_configuration.scope_id = actor.id
601
+ AND scoped_configuration.key = 'connector_version'
602
+ AND scoped_configuration.scope_type = 'actor'
603
+ WHERE
604
+ jobs.config_type = 'sync'
605
+ AND jobs.status = 'succeeded'
606
+ AND jobs.updated_at >= :cutoff_date
607
+ ORDER BY
608
+ jobs.updated_at DESC
609
+ LIMIT :limit
610
+ """
611
+ )
612
+
613
+ # Same as above but filtered to only failed syncs
614
+ SELECT_RECENT_FAILED_SYNCS_FOR_DESTINATION_CONNECTOR = sqlalchemy.text(
615
+ """
616
+ SELECT
617
+ jobs.id AS job_id,
618
+ jobs.scope AS connection_id,
619
+ jobs.status AS job_status,
620
+ jobs.started_at AS job_started_at,
621
+ jobs.updated_at AS job_updated_at,
622
+ connection.name AS connection_name,
623
+ actor.id AS actor_id,
624
+ actor.name AS actor_name,
625
+ actor.actor_definition_id,
626
+ actor.tombstone AS actor_tombstone,
627
+ workspace.id AS workspace_id,
628
+ workspace.name AS workspace_name,
629
+ workspace.organization_id,
630
+ workspace.dataplane_group_id,
631
+ dataplane_group.name AS dataplane_name,
632
+ scoped_configuration.origin_type AS pin_origin_type,
633
+ scoped_configuration.origin AS pin_origin,
634
+ scoped_configuration.value AS pinned_version_id
635
+ FROM jobs
636
+ JOIN connection
637
+ ON jobs.scope = connection.id::text
638
+ AND connection.status != 'deprecated'
639
+ JOIN actor
640
+ ON connection.destination_id = actor.id
641
+ AND actor.actor_definition_id = :connector_definition_id
642
+ AND actor.tombstone = false
643
+ JOIN workspace
644
+ ON actor.workspace_id = workspace.id
645
+ AND workspace.tombstone = false
646
+ LEFT JOIN dataplane_group
647
+ ON workspace.dataplane_group_id = dataplane_group.id
648
+ LEFT JOIN scoped_configuration
649
+ ON scoped_configuration.scope_id = actor.id
650
+ AND scoped_configuration.key = 'connector_version'
651
+ AND scoped_configuration.scope_type = 'actor'
652
+ WHERE
653
+ jobs.config_type = 'sync'
654
+ AND jobs.status = 'failed'
655
+ AND jobs.updated_at >= :cutoff_date
656
+ ORDER BY
657
+ jobs.updated_at DESC
658
+ LIMIT :limit
659
+ """
660
+ )
661
+
363
662
  # Get failed attempt results for ALL actors using a connector definition.
364
663
  # Finds all actors with the given actor_definition_id and returns their failed sync attempts,
365
664
  # regardless of whether they have explicit version pins.
@@ -39,7 +39,6 @@ from airbyte_ops_mcp.connection_config_retriever import (
39
39
  ConnectionObject,
40
40
  retrieve_objects,
41
41
  )
42
- from airbyte_ops_mcp.gcp_auth import ensure_adc_credentials
43
42
 
44
43
  if TYPE_CHECKING:
45
44
  from airbyte_ops_mcp.regression_tests.connection_fetcher import ConnectionData
@@ -85,9 +84,6 @@ def retrieve_unmasked_config(
85
84
  Returns:
86
85
  The unmasked source config dict, or None if retrieval fails.
87
86
  """
88
- # Ensure GCP credentials are available (supports GCP_PROD_DB_ACCESS_CREDENTIALS fallback)
89
- ensure_adc_credentials()
90
-
91
87
  # Only request the source config - that's all we need for secrets
92
88
  requested_objects = [ConnectionObject.SOURCE_CONFIG]
93
89