airbyte-internal-ops 0.1.11__py3-none-any.whl → 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {airbyte_internal_ops-0.1.11.dist-info → airbyte_internal_ops-0.2.1.dist-info}/METADATA +2 -2
- {airbyte_internal_ops-0.1.11.dist-info → airbyte_internal_ops-0.2.1.dist-info}/RECORD +41 -40
- {airbyte_internal_ops-0.1.11.dist-info → airbyte_internal_ops-0.2.1.dist-info}/entry_points.txt +1 -0
- airbyte_ops_mcp/__init__.py +2 -2
- airbyte_ops_mcp/cli/cloud.py +264 -301
- airbyte_ops_mcp/cloud_admin/api_client.py +51 -26
- airbyte_ops_mcp/cloud_admin/auth.py +32 -0
- airbyte_ops_mcp/cloud_admin/connection_config.py +2 -2
- airbyte_ops_mcp/constants.py +18 -0
- airbyte_ops_mcp/github_actions.py +94 -5
- airbyte_ops_mcp/mcp/_http_headers.py +254 -0
- airbyte_ops_mcp/mcp/_mcp_utils.py +2 -2
- airbyte_ops_mcp/mcp/cloud_connector_versions.py +162 -52
- airbyte_ops_mcp/mcp/github.py +34 -1
- airbyte_ops_mcp/mcp/prod_db_queries.py +67 -24
- airbyte_ops_mcp/mcp/{live_tests.py → regression_tests.py} +165 -152
- airbyte_ops_mcp/mcp/server.py +84 -11
- airbyte_ops_mcp/prod_db_access/db_engine.py +15 -11
- airbyte_ops_mcp/prod_db_access/queries.py +27 -15
- airbyte_ops_mcp/prod_db_access/sql.py +17 -16
- airbyte_ops_mcp/{live_tests → regression_tests}/__init__.py +3 -3
- airbyte_ops_mcp/{live_tests → regression_tests}/cdk_secrets.py +1 -1
- airbyte_ops_mcp/{live_tests → regression_tests}/connection_secret_retriever.py +3 -3
- airbyte_ops_mcp/{live_tests → regression_tests}/connector_runner.py +1 -1
- airbyte_ops_mcp/{live_tests → regression_tests}/message_cache/__init__.py +3 -1
- airbyte_ops_mcp/{live_tests → regression_tests}/regression/__init__.py +1 -1
- airbyte_ops_mcp/{live_tests → regression_tests}/schema_generation.py +3 -1
- airbyte_ops_mcp/{live_tests → regression_tests}/validation/__init__.py +2 -2
- airbyte_ops_mcp/{live_tests → regression_tests}/validation/record_validators.py +4 -2
- {airbyte_internal_ops-0.1.11.dist-info → airbyte_internal_ops-0.2.1.dist-info}/WHEEL +0 -0
- /airbyte_ops_mcp/{live_tests → regression_tests}/ci_output.py +0 -0
- /airbyte_ops_mcp/{live_tests → regression_tests}/commons/__init__.py +0 -0
- /airbyte_ops_mcp/{live_tests → regression_tests}/config.py +0 -0
- /airbyte_ops_mcp/{live_tests → regression_tests}/connection_fetcher.py +0 -0
- /airbyte_ops_mcp/{live_tests → regression_tests}/evaluation_modes.py +0 -0
- /airbyte_ops_mcp/{live_tests → regression_tests}/http_metrics.py +0 -0
- /airbyte_ops_mcp/{live_tests → regression_tests}/message_cache/duckdb_cache.py +0 -0
- /airbyte_ops_mcp/{live_tests → regression_tests}/models.py +0 -0
- /airbyte_ops_mcp/{live_tests → regression_tests}/obfuscation.py +0 -0
- /airbyte_ops_mcp/{live_tests → regression_tests}/regression/comparators.py +0 -0
- /airbyte_ops_mcp/{live_tests → regression_tests}/validation/catalog_validators.py +0 -0
|
@@ -52,6 +52,7 @@ def _is_tailscale_connected() -> bool:
|
|
|
52
52
|
Detection methods:
|
|
53
53
|
1. Check for tailscale0 network interface (Linux)
|
|
54
54
|
2. Run 'tailscale status --json' and check backend state (cross-platform)
|
|
55
|
+
3. Check macOS-specific Tailscale.app location if tailscale not in PATH
|
|
55
56
|
"""
|
|
56
57
|
# Method 1: Check for tailscale0 interface (Linux)
|
|
57
58
|
try:
|
|
@@ -63,6 +64,13 @@ def _is_tailscale_connected() -> bool:
|
|
|
63
64
|
|
|
64
65
|
# Method 2: Check tailscale CLI status
|
|
65
66
|
tailscale_path = shutil.which("tailscale")
|
|
67
|
+
|
|
68
|
+
# Method 3: On macOS, check the standard Tailscale.app location if not in PATH
|
|
69
|
+
if not tailscale_path and os.path.exists(
|
|
70
|
+
"/Applications/Tailscale.app/Contents/MacOS/Tailscale"
|
|
71
|
+
):
|
|
72
|
+
tailscale_path = "/Applications/Tailscale.app/Contents/MacOS/Tailscale"
|
|
73
|
+
|
|
66
74
|
if tailscale_path:
|
|
67
75
|
try:
|
|
68
76
|
result = subprocess.run(
|
|
@@ -158,24 +166,20 @@ def _get_connector() -> Connector:
|
|
|
158
166
|
|
|
159
167
|
|
|
160
168
|
def _get_secret_value(
|
|
161
|
-
gsm_client: secretmanager.SecretManagerServiceClient,
|
|
169
|
+
gsm_client: secretmanager.SecretManagerServiceClient,
|
|
170
|
+
secret_id: str,
|
|
162
171
|
) -> str:
|
|
163
|
-
"""Get the value of the
|
|
172
|
+
"""Get the value of the latest version of a secret.
|
|
164
173
|
|
|
165
174
|
Args:
|
|
166
175
|
gsm_client: GCP Secret Manager client
|
|
167
|
-
secret_id: The
|
|
176
|
+
secret_id: The full resource ID of the secret
|
|
177
|
+
(e.g., "projects/123/secrets/my-secret")
|
|
168
178
|
|
|
169
179
|
Returns:
|
|
170
|
-
The value of the
|
|
180
|
+
The value of the latest version of the secret
|
|
171
181
|
"""
|
|
172
|
-
response = gsm_client.
|
|
173
|
-
request={"parent": secret_id, "filter": "state:ENABLED"}
|
|
174
|
-
)
|
|
175
|
-
if len(response.versions) == 0:
|
|
176
|
-
raise ValueError(f"No enabled version of secret {secret_id} found")
|
|
177
|
-
enabled_version = response.versions[0]
|
|
178
|
-
response = gsm_client.access_secret_version(name=enabled_version.name)
|
|
182
|
+
response = gsm_client.access_secret_version(name=f"{secret_id}/versions/latest")
|
|
179
183
|
return response.payload.data.decode("UTF-8")
|
|
180
184
|
|
|
181
185
|
|
|
@@ -24,7 +24,7 @@ from airbyte_ops_mcp.prod_db_access.sql import (
|
|
|
24
24
|
SELECT_CONNECTIONS_BY_CONNECTOR_AND_ORG,
|
|
25
25
|
SELECT_CONNECTOR_VERSIONS,
|
|
26
26
|
SELECT_DATAPLANES_LIST,
|
|
27
|
-
|
|
27
|
+
SELECT_FAILED_SYNC_ATTEMPTS_FOR_CONNECTOR,
|
|
28
28
|
SELECT_NEW_CONNECTOR_RELEASES,
|
|
29
29
|
SELECT_ORG_WORKSPACES,
|
|
30
30
|
SELECT_SUCCESSFUL_SYNCS_FOR_VERSION,
|
|
@@ -225,43 +225,55 @@ def query_sync_results_for_version(
|
|
|
225
225
|
)
|
|
226
226
|
|
|
227
227
|
|
|
228
|
-
def
|
|
229
|
-
|
|
228
|
+
def query_failed_sync_attempts_for_connector(
|
|
229
|
+
connector_definition_id: str,
|
|
230
|
+
organization_id: str | None = None,
|
|
230
231
|
days: int = 7,
|
|
231
232
|
limit: int = 100,
|
|
232
233
|
*,
|
|
233
234
|
gsm_client: secretmanager.SecretManagerServiceClient | None = None,
|
|
234
235
|
) -> list[dict[str, Any]]:
|
|
235
|
-
"""Query failed sync
|
|
236
|
+
"""Query failed sync attempts for ALL actors using a connector definition.
|
|
236
237
|
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
jobs and attempts tables to optimize join performance.
|
|
238
|
+
Finds all actors with the given actor_definition_id and returns their failed
|
|
239
|
+
sync attempts, regardless of whether they have explicit version pins.
|
|
240
240
|
|
|
241
|
-
|
|
242
|
-
|
|
241
|
+
This is useful for investigating connector issues across all users.
|
|
242
|
+
|
|
243
|
+
Note: This query only supports SOURCE connectors (joins via connection.source_id).
|
|
244
|
+
For destination connectors, a separate query would be needed.
|
|
243
245
|
|
|
244
246
|
Args:
|
|
245
|
-
|
|
247
|
+
connector_definition_id: Connector definition UUID to filter by
|
|
248
|
+
organization_id: Optional organization UUID to filter results by (post-query filter)
|
|
246
249
|
days: Number of days to look back (default: 7)
|
|
247
250
|
limit: Maximum number of results (default: 100)
|
|
248
251
|
gsm_client: GCP Secret Manager client. If None, a new client will be instantiated.
|
|
249
252
|
|
|
250
253
|
Returns:
|
|
251
|
-
List of failed sync
|
|
254
|
+
List of failed sync attempt records with failure_summary and workspace info
|
|
252
255
|
"""
|
|
253
256
|
cutoff_date = datetime.now(timezone.utc) - timedelta(days=days)
|
|
254
|
-
|
|
255
|
-
|
|
257
|
+
|
|
258
|
+
results = _run_sql_query(
|
|
259
|
+
SELECT_FAILED_SYNC_ATTEMPTS_FOR_CONNECTOR,
|
|
256
260
|
parameters={
|
|
257
|
-
"
|
|
261
|
+
"connector_definition_id": connector_definition_id,
|
|
258
262
|
"cutoff_date": cutoff_date,
|
|
259
263
|
"limit": limit,
|
|
260
264
|
},
|
|
261
|
-
query_name="
|
|
265
|
+
query_name="SELECT_FAILED_SYNC_ATTEMPTS_FOR_CONNECTOR",
|
|
262
266
|
gsm_client=gsm_client,
|
|
263
267
|
)
|
|
264
268
|
|
|
269
|
+
# Post-query filter by organization_id if provided
|
|
270
|
+
if organization_id is not None:
|
|
271
|
+
results = [
|
|
272
|
+
r for r in results if str(r.get("organization_id")) == organization_id
|
|
273
|
+
]
|
|
274
|
+
|
|
275
|
+
return results
|
|
276
|
+
|
|
265
277
|
|
|
266
278
|
def query_dataplanes_list(
|
|
267
279
|
*,
|
|
@@ -305,32 +305,33 @@ SELECT_SUCCESSFUL_SYNCS_FOR_VERSION = sqlalchemy.text(
|
|
|
305
305
|
"""
|
|
306
306
|
)
|
|
307
307
|
|
|
308
|
-
# Get failed attempt results for actors
|
|
309
|
-
#
|
|
310
|
-
#
|
|
311
|
-
# from
|
|
312
|
-
#
|
|
313
|
-
#
|
|
314
|
-
#
|
|
315
|
-
|
|
308
|
+
# Get failed attempt results for ALL actors using a connector definition.
|
|
309
|
+
# Finds all actors with the given actor_definition_id and returns their failed sync attempts,
|
|
310
|
+
# regardless of whether they have explicit version pins.
|
|
311
|
+
# Query starts from attempts table to leverage indexed columns (ended_at, status).
|
|
312
|
+
# Note: This query only supports SOURCE connectors (joins via connection.source_id).
|
|
313
|
+
# The LEFT JOIN to scoped_configuration provides pin context when available (pin_origin_type,
|
|
314
|
+
# pin_origin, pinned_version_id will be NULL for unpinned actors).
|
|
315
|
+
SELECT_FAILED_SYNC_ATTEMPTS_FOR_CONNECTOR = sqlalchemy.text(
|
|
316
316
|
"""
|
|
317
317
|
SELECT
|
|
318
318
|
jobs.id AS job_id,
|
|
319
319
|
jobs.scope AS connection_id,
|
|
320
|
-
jobs.status AS
|
|
320
|
+
jobs.status AS latest_job_status,
|
|
321
321
|
jobs.started_at AS job_started_at,
|
|
322
322
|
jobs.updated_at AS job_updated_at,
|
|
323
323
|
connection.name AS connection_name,
|
|
324
324
|
actor.id AS actor_id,
|
|
325
325
|
actor.name AS actor_name,
|
|
326
326
|
actor.actor_definition_id,
|
|
327
|
-
scoped_configuration.origin_type AS pin_origin_type,
|
|
328
|
-
scoped_configuration.origin AS pin_origin,
|
|
329
327
|
workspace.id AS workspace_id,
|
|
330
328
|
workspace.name AS workspace_name,
|
|
331
329
|
workspace.organization_id,
|
|
332
330
|
workspace.dataplane_group_id,
|
|
333
331
|
dataplane_group.name AS dataplane_name,
|
|
332
|
+
scoped_configuration.origin_type AS pin_origin_type,
|
|
333
|
+
scoped_configuration.origin AS pin_origin,
|
|
334
|
+
scoped_configuration.value AS pinned_version_id,
|
|
334
335
|
attempts.id AS failed_attempt_id,
|
|
335
336
|
attempts.attempt_number AS failed_attempt_number,
|
|
336
337
|
attempts.status AS failed_attempt_status,
|
|
@@ -347,15 +348,15 @@ SELECT_FAILED_SYNC_ATTEMPTS_FOR_VERSION = sqlalchemy.text(
|
|
|
347
348
|
ON jobs.scope = connection.id::text
|
|
348
349
|
JOIN actor
|
|
349
350
|
ON connection.source_id = actor.id
|
|
350
|
-
|
|
351
|
-
ON scoped_configuration.scope_id = actor.id
|
|
352
|
-
AND scoped_configuration.key = 'connector_version'
|
|
353
|
-
AND scoped_configuration.scope_type = 'actor'
|
|
354
|
-
AND scoped_configuration.value = :actor_definition_version_id
|
|
351
|
+
AND actor.actor_definition_id = :connector_definition_id
|
|
355
352
|
JOIN workspace
|
|
356
353
|
ON actor.workspace_id = workspace.id
|
|
357
354
|
LEFT JOIN dataplane_group
|
|
358
355
|
ON workspace.dataplane_group_id = dataplane_group.id
|
|
356
|
+
LEFT JOIN scoped_configuration
|
|
357
|
+
ON scoped_configuration.scope_id = actor.id
|
|
358
|
+
AND scoped_configuration.key = 'connector_version'
|
|
359
|
+
AND scoped_configuration.scope_type = 'actor'
|
|
359
360
|
WHERE
|
|
360
361
|
attempts.ended_at >= :cutoff_date
|
|
361
362
|
AND attempts.status = 'failed'
|
|
@@ -5,17 +5,17 @@ This module provides tools for testing Airbyte connectors against live data
|
|
|
5
5
|
without using Dagger. It uses Docker SDK directly for container orchestration.
|
|
6
6
|
"""
|
|
7
7
|
|
|
8
|
-
from airbyte_ops_mcp.
|
|
8
|
+
from airbyte_ops_mcp.regression_tests.connection_fetcher import (
|
|
9
9
|
ConnectionData,
|
|
10
10
|
fetch_connection_data,
|
|
11
11
|
)
|
|
12
|
-
from airbyte_ops_mcp.
|
|
12
|
+
from airbyte_ops_mcp.regression_tests.connection_secret_retriever import (
|
|
13
13
|
enrich_config_with_secrets,
|
|
14
14
|
is_secret_retriever_enabled,
|
|
15
15
|
retrieve_unmasked_config,
|
|
16
16
|
should_use_secret_retriever,
|
|
17
17
|
)
|
|
18
|
-
from airbyte_ops_mcp.
|
|
18
|
+
from airbyte_ops_mcp.regression_tests.models import (
|
|
19
19
|
Command,
|
|
20
20
|
ConnectorUnderTest,
|
|
21
21
|
ExecutionResult,
|
|
@@ -5,7 +5,7 @@ This module uses the PyAirbyte GoogleGSMSecretManager to retrieve
|
|
|
5
5
|
integration test secrets from Google Secret Manager for connectors.
|
|
6
6
|
|
|
7
7
|
Usage:
|
|
8
|
-
from airbyte_ops_mcp.
|
|
8
|
+
from airbyte_ops_mcp.regression_tests.cdk_secrets import get_first_config_from_secrets
|
|
9
9
|
|
|
10
10
|
# Fetch the first config for a connector
|
|
11
11
|
config = get_first_config_from_secrets("source-github")
|
|
@@ -11,8 +11,8 @@ The secret retriever requires:
|
|
|
11
11
|
- Cloud SQL Proxy running to internal Postgres (or CI environment)
|
|
12
12
|
|
|
13
13
|
Usage:
|
|
14
|
-
from airbyte_ops_mcp.
|
|
15
|
-
from airbyte_ops_mcp.
|
|
14
|
+
from airbyte_ops_mcp.regression_tests.connection_fetcher import fetch_connection_data
|
|
15
|
+
from airbyte_ops_mcp.regression_tests.connection_secret_retriever import (
|
|
16
16
|
enrich_config_with_secrets,
|
|
17
17
|
should_use_secret_retriever,
|
|
18
18
|
)
|
|
@@ -42,7 +42,7 @@ from airbyte_ops_mcp.connection_config_retriever import (
|
|
|
42
42
|
from airbyte_ops_mcp.gcp_auth import ensure_adc_credentials
|
|
43
43
|
|
|
44
44
|
if TYPE_CHECKING:
|
|
45
|
-
from airbyte_ops_mcp.
|
|
45
|
+
from airbyte_ops_mcp.regression_tests.connection_fetcher import ConnectionData
|
|
46
46
|
|
|
47
47
|
logger = logging.getLogger(__name__)
|
|
48
48
|
|
|
@@ -8,7 +8,9 @@ Based on airbyte-ci implementation:
|
|
|
8
8
|
https://github.com/airbytehq/airbyte/tree/master/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends
|
|
9
9
|
"""
|
|
10
10
|
|
|
11
|
-
from airbyte_ops_mcp.
|
|
11
|
+
from airbyte_ops_mcp.regression_tests.message_cache.duckdb_cache import (
|
|
12
|
+
DuckDbMessageCache,
|
|
13
|
+
)
|
|
12
14
|
|
|
13
15
|
__all__ = [
|
|
14
16
|
"DuckDbMessageCache",
|
|
@@ -8,7 +8,7 @@ Based on airbyte-ci implementation:
|
|
|
8
8
|
https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/live-tests/src/live_tests/regression_tests/test_read.py
|
|
9
9
|
"""
|
|
10
10
|
|
|
11
|
-
from airbyte_ops_mcp.
|
|
11
|
+
from airbyte_ops_mcp.regression_tests.regression.comparators import (
|
|
12
12
|
ComparisonResult,
|
|
13
13
|
RecordDiff,
|
|
14
14
|
StreamComparisonResult,
|
|
@@ -19,7 +19,9 @@ from airbyte_protocol.models import AirbyteMessage
|
|
|
19
19
|
from airbyte_protocol.models import Type as AirbyteMessageType
|
|
20
20
|
from genson import SchemaBuilder
|
|
21
21
|
|
|
22
|
-
from airbyte_ops_mcp.
|
|
22
|
+
from airbyte_ops_mcp.regression_tests.obfuscation import (
|
|
23
|
+
convert_obfuscated_record_to_typed,
|
|
24
|
+
)
|
|
23
25
|
|
|
24
26
|
logger = logging.getLogger(__name__)
|
|
25
27
|
|
|
@@ -8,7 +8,7 @@ Based on airbyte-ci validation tests:
|
|
|
8
8
|
https://github.com/airbytehq/airbyte/tree/master/airbyte-ci/connectors/live-tests/src/live_tests/validation_tests
|
|
9
9
|
"""
|
|
10
10
|
|
|
11
|
-
from airbyte_ops_mcp.
|
|
11
|
+
from airbyte_ops_mcp.regression_tests.validation.catalog_validators import (
|
|
12
12
|
ValidationResult,
|
|
13
13
|
validate_additional_properties_is_true,
|
|
14
14
|
validate_catalog,
|
|
@@ -20,7 +20,7 @@ from airbyte_ops_mcp.live_tests.validation.catalog_validators import (
|
|
|
20
20
|
validate_schemas_are_valid_json_schema,
|
|
21
21
|
validate_streams_have_sync_modes,
|
|
22
22
|
)
|
|
23
|
-
from airbyte_ops_mcp.
|
|
23
|
+
from airbyte_ops_mcp.regression_tests.validation.record_validators import (
|
|
24
24
|
validate_primary_keys_in_records,
|
|
25
25
|
validate_records_conform_to_schema,
|
|
26
26
|
validate_state_messages_emitted,
|
|
@@ -15,10 +15,12 @@ import jsonschema
|
|
|
15
15
|
from airbyte_protocol.models import AirbyteMessage, AirbyteStateType
|
|
16
16
|
from airbyte_protocol.models import Type as AirbyteMessageType
|
|
17
17
|
|
|
18
|
-
from airbyte_ops_mcp.
|
|
18
|
+
from airbyte_ops_mcp.regression_tests.validation.catalog_validators import (
|
|
19
|
+
ValidationResult,
|
|
20
|
+
)
|
|
19
21
|
|
|
20
22
|
if TYPE_CHECKING:
|
|
21
|
-
from airbyte_ops_mcp.
|
|
23
|
+
from airbyte_ops_mcp.regression_tests.models import ExecutionResult
|
|
22
24
|
|
|
23
25
|
|
|
24
26
|
def validate_records_conform_to_schema(
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|