airbyte-internal-ops 0.1.4__py3-none-any.whl → 0.1.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {airbyte_internal_ops-0.1.4.dist-info → airbyte_internal_ops-0.1.6.dist-info}/METADATA +70 -1
- {airbyte_internal_ops-0.1.4.dist-info → airbyte_internal_ops-0.1.6.dist-info}/RECORD +30 -31
- airbyte_ops_mcp/__init__.py +30 -2
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/connectors/pipeline.py +2 -8
- airbyte_ops_mcp/airbyte_repo/list_connectors.py +176 -4
- airbyte_ops_mcp/airbyte_repo/utils.py +5 -3
- airbyte_ops_mcp/cli/cloud.py +35 -36
- airbyte_ops_mcp/cli/registry.py +90 -1
- airbyte_ops_mcp/cli/repo.py +15 -0
- airbyte_ops_mcp/connection_config_retriever/__init__.py +26 -0
- airbyte_ops_mcp/{live_tests/_connection_retriever → connection_config_retriever}/audit_logging.py +5 -6
- airbyte_ops_mcp/{live_tests/_connection_retriever → connection_config_retriever}/retrieval.py +8 -22
- airbyte_ops_mcp/{live_tests/_connection_retriever → connection_config_retriever}/secrets_resolution.py +8 -42
- airbyte_ops_mcp/constants.py +35 -0
- airbyte_ops_mcp/live_tests/connection_secret_retriever.py +1 -1
- airbyte_ops_mcp/mcp/github_repo_ops.py +10 -0
- airbyte_ops_mcp/mcp/live_tests.py +21 -6
- airbyte_ops_mcp/mcp/prod_db_queries.py +357 -0
- airbyte_ops_mcp/mcp/server.py +2 -0
- airbyte_ops_mcp/mcp/server_info.py +2 -2
- airbyte_ops_mcp/prod_db_access/__init__.py +34 -0
- airbyte_ops_mcp/prod_db_access/db_engine.py +127 -0
- airbyte_ops_mcp/prod_db_access/py.typed +0 -0
- airbyte_ops_mcp/prod_db_access/queries.py +272 -0
- airbyte_ops_mcp/prod_db_access/sql.py +353 -0
- airbyte_ops_mcp/registry/__init__.py +34 -0
- airbyte_ops_mcp/registry/models.py +63 -0
- airbyte_ops_mcp/registry/publish.py +368 -0
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/connectors/publish/__init__.py +0 -3
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/connectors/publish/commands.py +0 -242
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/connectors/publish/context.py +0 -175
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/connectors/publish/pipeline.py +0 -1056
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/poetry/publish/__init__.py +0 -3
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/poetry/publish/commands.py +0 -127
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/steps/python_registry.py +0 -238
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/models/contexts/python_registry_publish.py +0 -119
- airbyte_ops_mcp/live_tests/_connection_retriever/__init__.py +0 -35
- airbyte_ops_mcp/live_tests/_connection_retriever/consts.py +0 -33
- airbyte_ops_mcp/live_tests/_connection_retriever/db_access.py +0 -82
- {airbyte_internal_ops-0.1.4.dist-info → airbyte_internal_ops-0.1.6.dist-info}/WHEEL +0 -0
- {airbyte_internal_ops-0.1.4.dist-info → airbyte_internal_ops-0.1.6.dist-info}/entry_points.txt +0 -0
airbyte_ops_mcp/cli/cloud.py
CHANGED
|
@@ -23,7 +23,12 @@ from airbyte_protocol.models import ConfiguredAirbyteCatalog
|
|
|
23
23
|
from cyclopts import App, Parameter
|
|
24
24
|
|
|
25
25
|
from airbyte_ops_mcp.cli._base import app
|
|
26
|
-
from airbyte_ops_mcp.cli._shared import
|
|
26
|
+
from airbyte_ops_mcp.cli._shared import (
|
|
27
|
+
exit_with_error,
|
|
28
|
+
print_error,
|
|
29
|
+
print_json,
|
|
30
|
+
print_success,
|
|
31
|
+
)
|
|
27
32
|
from airbyte_ops_mcp.cloud_admin.connection_config import fetch_connection_config
|
|
28
33
|
from airbyte_ops_mcp.live_tests.ci_output import (
|
|
29
34
|
generate_regression_report,
|
|
@@ -391,12 +396,11 @@ def live_test(
|
|
|
391
396
|
# If connector_name is provided, build the image from source
|
|
392
397
|
if connector_name:
|
|
393
398
|
if connector_image:
|
|
394
|
-
print_error("Cannot specify both connector_image and connector_name")
|
|
395
399
|
write_github_output("success", False)
|
|
396
400
|
write_github_output(
|
|
397
401
|
"error", "Cannot specify both connector_image and connector_name"
|
|
398
402
|
)
|
|
399
|
-
|
|
403
|
+
exit_with_error("Cannot specify both connector_image and connector_name")
|
|
400
404
|
|
|
401
405
|
repo_root_path = Path(repo_root) if repo_root else None
|
|
402
406
|
built_image = _build_connector_image_from_source(
|
|
@@ -407,19 +411,18 @@ def live_test(
|
|
|
407
411
|
if not built_image:
|
|
408
412
|
write_github_output("success", False)
|
|
409
413
|
write_github_output("error", f"Failed to build image for {connector_name}")
|
|
410
|
-
|
|
414
|
+
exit_with_error(f"Failed to build image for {connector_name}")
|
|
411
415
|
resolved_connector_image = built_image
|
|
412
416
|
|
|
413
417
|
if connection_id:
|
|
414
418
|
if config_path or catalog_path:
|
|
415
|
-
print_error(
|
|
416
|
-
"Cannot specify both connection_id and config_path/catalog_path"
|
|
417
|
-
)
|
|
418
419
|
write_github_output("success", False)
|
|
419
420
|
write_github_output(
|
|
420
421
|
"error", "Cannot specify both connection_id and file paths"
|
|
421
422
|
)
|
|
422
|
-
|
|
423
|
+
exit_with_error(
|
|
424
|
+
"Cannot specify both connection_id and config_path/catalog_path"
|
|
425
|
+
)
|
|
423
426
|
|
|
424
427
|
print_success(f"Fetching config/catalog from connection: {connection_id}")
|
|
425
428
|
connection_data = fetch_connection_data(connection_id)
|
|
@@ -439,25 +442,23 @@ def live_test(
|
|
|
439
442
|
catalog_file = Path(catalog_path) if catalog_path else None
|
|
440
443
|
|
|
441
444
|
if not resolved_connector_image:
|
|
442
|
-
|
|
445
|
+
write_github_output("success", False)
|
|
446
|
+
write_github_output("error", "Missing connector image")
|
|
447
|
+
exit_with_error(
|
|
443
448
|
"You must provide one of the following: a connector_image, a connector_name, "
|
|
444
449
|
"or a connection_id for a connection that has an associated connector image. "
|
|
445
450
|
"If using connection_id, ensure the connection has a connector image configured."
|
|
446
451
|
)
|
|
447
|
-
write_github_output("success", False)
|
|
448
|
-
write_github_output("error", "Missing connector image")
|
|
449
|
-
return
|
|
450
452
|
|
|
451
453
|
# If connector_name was provided, we just built the image locally and it is already
|
|
452
454
|
# available in Docker, so we skip the image availability check/pull. Only try to pull
|
|
453
455
|
# if we didn't just build it (i.e., using a pre-built image from registry).
|
|
454
456
|
if not connector_name and not ensure_image_available(resolved_connector_image):
|
|
455
|
-
print_error(f"Failed to pull connector image: {resolved_connector_image}")
|
|
456
457
|
write_github_output("success", False)
|
|
457
458
|
write_github_output(
|
|
458
459
|
"error", f"Failed to pull image: {resolved_connector_image}"
|
|
459
460
|
)
|
|
460
|
-
|
|
461
|
+
exit_with_error(f"Failed to pull connector image: {resolved_connector_image}")
|
|
461
462
|
|
|
462
463
|
result = _run_connector_command(
|
|
463
464
|
connector_image=resolved_connector_image,
|
|
@@ -494,7 +495,7 @@ def live_test(
|
|
|
494
495
|
if result["success"]:
|
|
495
496
|
print_success(f"Live test passed for {resolved_connector_image}")
|
|
496
497
|
else:
|
|
497
|
-
|
|
498
|
+
exit_with_error(f"Live test failed for {resolved_connector_image}")
|
|
498
499
|
|
|
499
500
|
|
|
500
501
|
def _run_with_optional_http_metrics(
|
|
@@ -673,12 +674,11 @@ def regression_test(
|
|
|
673
674
|
# If connector_name is provided, build the target image from source
|
|
674
675
|
if connector_name:
|
|
675
676
|
if target_image:
|
|
676
|
-
print_error("Cannot specify both target_image and connector_name")
|
|
677
677
|
write_github_output("success", False)
|
|
678
678
|
write_github_output(
|
|
679
679
|
"error", "Cannot specify both target_image and connector_name"
|
|
680
680
|
)
|
|
681
|
-
|
|
681
|
+
exit_with_error("Cannot specify both target_image and connector_name")
|
|
682
682
|
|
|
683
683
|
repo_root_path = Path(repo_root) if repo_root else None
|
|
684
684
|
built_image = _build_connector_image_from_source(
|
|
@@ -689,19 +689,18 @@ def regression_test(
|
|
|
689
689
|
if not built_image:
|
|
690
690
|
write_github_output("success", False)
|
|
691
691
|
write_github_output("error", f"Failed to build image for {connector_name}")
|
|
692
|
-
|
|
692
|
+
exit_with_error(f"Failed to build image for {connector_name}")
|
|
693
693
|
resolved_target_image = built_image
|
|
694
694
|
|
|
695
695
|
if connection_id:
|
|
696
696
|
if config_path or catalog_path:
|
|
697
|
-
print_error(
|
|
698
|
-
"Cannot specify both connection_id and config_path/catalog_path"
|
|
699
|
-
)
|
|
700
697
|
write_github_output("success", False)
|
|
701
698
|
write_github_output(
|
|
702
699
|
"error", "Cannot specify both connection_id and file paths"
|
|
703
700
|
)
|
|
704
|
-
|
|
701
|
+
exit_with_error(
|
|
702
|
+
"Cannot specify both connection_id and config_path/catalog_path"
|
|
703
|
+
)
|
|
705
704
|
|
|
706
705
|
print_success(f"Fetching config/catalog from connection: {connection_id}")
|
|
707
706
|
connection_data = fetch_connection_data(connection_id)
|
|
@@ -723,36 +722,36 @@ def regression_test(
|
|
|
723
722
|
|
|
724
723
|
# Validate that we have both images
|
|
725
724
|
if not resolved_target_image:
|
|
726
|
-
|
|
725
|
+
write_github_output("success", False)
|
|
726
|
+
write_github_output("error", "No target image specified")
|
|
727
|
+
exit_with_error(
|
|
727
728
|
"You must provide one of the following: a target_image or a connector_name "
|
|
728
729
|
"to build the target image from source."
|
|
729
730
|
)
|
|
730
|
-
write_github_output("success", False)
|
|
731
|
-
write_github_output("error", "No target image specified")
|
|
732
|
-
return
|
|
733
731
|
|
|
734
732
|
if not resolved_control_image:
|
|
735
|
-
|
|
733
|
+
write_github_output("success", False)
|
|
734
|
+
write_github_output("error", "No control image specified")
|
|
735
|
+
exit_with_error(
|
|
736
736
|
"You must provide one of the following: a control_image or a connection_id "
|
|
737
737
|
"for a connection that has an associated connector image."
|
|
738
738
|
)
|
|
739
|
-
write_github_output("success", False)
|
|
740
|
-
write_github_output("error", "No control image specified")
|
|
741
|
-
return
|
|
742
739
|
|
|
743
740
|
# Pull images if they weren't just built locally
|
|
744
741
|
# If connector_name was provided, we just built the target image locally
|
|
745
742
|
if not connector_name and not ensure_image_available(resolved_target_image):
|
|
746
|
-
print_error(f"Failed to pull target connector image: {resolved_target_image}")
|
|
747
743
|
write_github_output("success", False)
|
|
748
744
|
write_github_output("error", f"Failed to pull image: {resolved_target_image}")
|
|
749
|
-
|
|
745
|
+
exit_with_error(
|
|
746
|
+
f"Failed to pull target connector image: {resolved_target_image}"
|
|
747
|
+
)
|
|
750
748
|
|
|
751
749
|
if not ensure_image_available(resolved_control_image):
|
|
752
|
-
print_error(f"Failed to pull control connector image: {resolved_control_image}")
|
|
753
750
|
write_github_output("success", False)
|
|
754
751
|
write_github_output("error", f"Failed to pull image: {resolved_control_image}")
|
|
755
|
-
|
|
752
|
+
exit_with_error(
|
|
753
|
+
f"Failed to pull control connector image: {resolved_control_image}"
|
|
754
|
+
)
|
|
756
755
|
|
|
757
756
|
target_output = output_path / "target"
|
|
758
757
|
control_output = output_path / "control"
|
|
@@ -819,7 +818,7 @@ def regression_test(
|
|
|
819
818
|
write_github_summary(summary)
|
|
820
819
|
|
|
821
820
|
if regression_detected:
|
|
822
|
-
|
|
821
|
+
exit_with_error(
|
|
823
822
|
f"Regression detected between {resolved_target_image} and {resolved_control_image}"
|
|
824
823
|
)
|
|
825
824
|
elif both_succeeded:
|
|
@@ -827,7 +826,7 @@ def regression_test(
|
|
|
827
826
|
f"Regression test passed for {resolved_target_image} vs {resolved_control_image}"
|
|
828
827
|
)
|
|
829
828
|
else:
|
|
830
|
-
|
|
829
|
+
exit_with_error(
|
|
831
830
|
f"Both versions failed for {resolved_target_image} vs {resolved_control_image}"
|
|
832
831
|
)
|
|
833
832
|
|
airbyte_ops_mcp/cli/registry.py
CHANGED
|
@@ -1,21 +1,36 @@
|
|
|
1
1
|
# Copyright (c) 2025 Airbyte, Inc., all rights reserved.
|
|
2
2
|
"""CLI commands for connector registry operations.
|
|
3
3
|
|
|
4
|
+
This module provides CLI wrappers for registry operations. The core logic
|
|
5
|
+
lives in the `airbyte_ops_mcp.registry` capability module.
|
|
6
|
+
|
|
4
7
|
Commands:
|
|
5
8
|
airbyte-ops registry connector publish-prerelease - Publish connector prerelease
|
|
9
|
+
airbyte-ops registry connector publish - Publish connector (apply/rollback version override)
|
|
6
10
|
airbyte-ops registry image inspect - Inspect Docker image on DockerHub
|
|
7
11
|
"""
|
|
8
12
|
|
|
9
13
|
from __future__ import annotations
|
|
10
14
|
|
|
15
|
+
from pathlib import Path
|
|
11
16
|
from typing import Annotated
|
|
12
17
|
|
|
13
18
|
from cyclopts import App, Parameter
|
|
14
19
|
|
|
15
20
|
from airbyte_ops_mcp.cli._base import app
|
|
16
|
-
from airbyte_ops_mcp.cli._shared import
|
|
21
|
+
from airbyte_ops_mcp.cli._shared import (
|
|
22
|
+
exit_with_error,
|
|
23
|
+
print_error,
|
|
24
|
+
print_json,
|
|
25
|
+
print_success,
|
|
26
|
+
)
|
|
17
27
|
from airbyte_ops_mcp.mcp.github import get_docker_image_info
|
|
18
28
|
from airbyte_ops_mcp.mcp.prerelease import publish_connector_to_airbyte_registry
|
|
29
|
+
from airbyte_ops_mcp.registry import (
|
|
30
|
+
ConnectorPublishResult,
|
|
31
|
+
PublishAction,
|
|
32
|
+
publish_connector,
|
|
33
|
+
)
|
|
19
34
|
|
|
20
35
|
# Create the registry sub-app
|
|
21
36
|
registry_app = App(
|
|
@@ -65,6 +80,80 @@ def publish_prerelease(
|
|
|
65
80
|
print_json(result.model_dump())
|
|
66
81
|
|
|
67
82
|
|
|
83
|
+
@connector_app.command(name="publish")
|
|
84
|
+
def publish(
|
|
85
|
+
name: Annotated[
|
|
86
|
+
str,
|
|
87
|
+
Parameter(help="Connector technical name (e.g., source-github)."),
|
|
88
|
+
],
|
|
89
|
+
repo_path: Annotated[
|
|
90
|
+
Path,
|
|
91
|
+
Parameter(help="Path to the Airbyte monorepo. Defaults to current directory."),
|
|
92
|
+
] = Path.cwd(),
|
|
93
|
+
apply_override: Annotated[
|
|
94
|
+
bool,
|
|
95
|
+
Parameter(
|
|
96
|
+
help="Apply a version override (promote RC to stable).",
|
|
97
|
+
negative="", # Disable --no-apply-override
|
|
98
|
+
),
|
|
99
|
+
] = False,
|
|
100
|
+
rollback_override: Annotated[
|
|
101
|
+
bool,
|
|
102
|
+
Parameter(
|
|
103
|
+
help="Rollback a version override.",
|
|
104
|
+
negative="", # Disable --no-rollback-override
|
|
105
|
+
),
|
|
106
|
+
] = False,
|
|
107
|
+
dry_run: Annotated[
|
|
108
|
+
bool,
|
|
109
|
+
Parameter(help="Show what would be published without making changes."),
|
|
110
|
+
] = False,
|
|
111
|
+
prod: Annotated[
|
|
112
|
+
bool,
|
|
113
|
+
Parameter(
|
|
114
|
+
help="Target the production GCS bucket. Without this flag, operations target the dev bucket for safe testing.",
|
|
115
|
+
negative="", # Disable --no-prod
|
|
116
|
+
),
|
|
117
|
+
] = False,
|
|
118
|
+
) -> None:
|
|
119
|
+
"""Publish a connector to the Airbyte registry.
|
|
120
|
+
|
|
121
|
+
This command handles connector publishing operations including applying
|
|
122
|
+
version overrides (promoting RC to stable) or rolling back version overrides.
|
|
123
|
+
|
|
124
|
+
By default, operations target the dev bucket (dev-airbyte-cloud-connector-metadata-service-2)
|
|
125
|
+
for safe testing. Use --prod to target the production bucket.
|
|
126
|
+
"""
|
|
127
|
+
if apply_override and rollback_override:
|
|
128
|
+
exit_with_error("Cannot use both --apply-override and --rollback-override")
|
|
129
|
+
|
|
130
|
+
if not apply_override and not rollback_override:
|
|
131
|
+
exit_with_error("Must specify either --apply-override or --rollback-override")
|
|
132
|
+
|
|
133
|
+
# Map CLI flags to PublishAction
|
|
134
|
+
action: PublishAction = (
|
|
135
|
+
"apply-version-override" if apply_override else "rollback-version-override"
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
# Delegate to the capability module
|
|
139
|
+
if not repo_path.exists():
|
|
140
|
+
exit_with_error(f"Repository path not found: {repo_path}")
|
|
141
|
+
|
|
142
|
+
result: ConnectorPublishResult = publish_connector(
|
|
143
|
+
repo_path=repo_path,
|
|
144
|
+
connector_name=name,
|
|
145
|
+
action=action,
|
|
146
|
+
dry_run=dry_run,
|
|
147
|
+
use_prod=prod,
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
# Output result as JSON
|
|
151
|
+
print_json(result.model_dump())
|
|
152
|
+
|
|
153
|
+
if result.status == "failure":
|
|
154
|
+
exit_with_error(result.message or "Operation failed", code=1)
|
|
155
|
+
|
|
156
|
+
|
|
68
157
|
@image_app.command(name="inspect")
|
|
69
158
|
def inspect_image(
|
|
70
159
|
image: Annotated[
|
airbyte_ops_mcp/cli/repo.py
CHANGED
|
@@ -27,6 +27,7 @@ from airbyte_ops_mcp.airbyte_repo.list_connectors import (
|
|
|
27
27
|
CONNECTOR_PATH_PREFIX,
|
|
28
28
|
METADATA_FILE_NAME,
|
|
29
29
|
_detect_connector_language,
|
|
30
|
+
get_connectors_with_local_cdk,
|
|
30
31
|
)
|
|
31
32
|
from airbyte_ops_mcp.cli._base import app
|
|
32
33
|
from airbyte_ops_mcp.cli._shared import exit_with_error, print_json
|
|
@@ -160,6 +161,15 @@ def list_connectors(
|
|
|
160
161
|
bool,
|
|
161
162
|
Parameter(help="Include only modified connectors (requires PR context)."),
|
|
162
163
|
] = False,
|
|
164
|
+
local_cdk: Annotated[
|
|
165
|
+
bool,
|
|
166
|
+
Parameter(
|
|
167
|
+
help=(
|
|
168
|
+
"Include connectors using local CDK reference. "
|
|
169
|
+
"When combined with --modified-only, adds local-CDK connectors to the modified set."
|
|
170
|
+
)
|
|
171
|
+
),
|
|
172
|
+
] = False,
|
|
163
173
|
language: Annotated[
|
|
164
174
|
list[str] | None,
|
|
165
175
|
Parameter(help="Languages to include (python, java, low-code, manifest-only)."),
|
|
@@ -286,6 +296,11 @@ def list_connectors(
|
|
|
286
296
|
connectors = list(result.connectors)
|
|
287
297
|
repo_path_obj = Path(repo_path)
|
|
288
298
|
|
|
299
|
+
# Add connectors with local CDK reference if --local-cdk flag is set
|
|
300
|
+
if local_cdk:
|
|
301
|
+
local_cdk_connectors = get_connectors_with_local_cdk(repo_path)
|
|
302
|
+
connectors = sorted(set(connectors) | local_cdk_connectors)
|
|
303
|
+
|
|
289
304
|
# Apply connector type filter
|
|
290
305
|
if connector_type_filter:
|
|
291
306
|
connectors = [
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
# Copyright (c) 2025 Airbyte, Inc., all rights reserved.
|
|
2
|
+
"""Connection config retriever module.
|
|
3
|
+
|
|
4
|
+
This module provides functionality to retrieve unmasked connection configuration
|
|
5
|
+
from Airbyte Cloud's internal database, including secret resolution from GCP
|
|
6
|
+
Secret Manager and audit logging to GCP Cloud Logging.
|
|
7
|
+
|
|
8
|
+
Refactored from: live_tests/_connection_retriever
|
|
9
|
+
Original source: airbyte-platform-internal/tools/connection-retriever
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from airbyte_ops_mcp.connection_config_retriever.retrieval import (
|
|
13
|
+
ConnectionNotFoundError,
|
|
14
|
+
RetrievalMetadata,
|
|
15
|
+
TestingCandidate,
|
|
16
|
+
retrieve_objects,
|
|
17
|
+
)
|
|
18
|
+
from airbyte_ops_mcp.constants import ConnectionObject
|
|
19
|
+
|
|
20
|
+
__all__ = [
|
|
21
|
+
"ConnectionNotFoundError",
|
|
22
|
+
"ConnectionObject",
|
|
23
|
+
"RetrievalMetadata",
|
|
24
|
+
"TestingCandidate",
|
|
25
|
+
"retrieve_objects",
|
|
26
|
+
]
|
airbyte_ops_mcp/{live_tests/_connection_retriever → connection_config_retriever}/audit_logging.py
RENAMED
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
# Copyright (c) 2025 Airbyte, Inc., all rights reserved.
|
|
2
|
-
"""Audit logging for
|
|
2
|
+
"""Audit logging for connection config retrieval.
|
|
3
3
|
|
|
4
|
-
|
|
4
|
+
Refactored from: live_tests/_connection_retriever/audit_logging.py
|
|
5
|
+
Original source: airbyte-platform-internal/tools/connection-retriever/src/connection_retriever/audit_logging.py
|
|
5
6
|
"""
|
|
6
7
|
|
|
7
8
|
from __future__ import annotations
|
|
@@ -12,12 +13,10 @@ from typing import TYPE_CHECKING, Any, Callable
|
|
|
12
13
|
|
|
13
14
|
from google.cloud import logging as gcloud_logging
|
|
14
15
|
|
|
15
|
-
from airbyte_ops_mcp.
|
|
16
|
-
GCP_PROJECT_NAME,
|
|
17
|
-
)
|
|
16
|
+
from airbyte_ops_mcp.constants import GCP_PROJECT_NAME
|
|
18
17
|
|
|
19
18
|
if TYPE_CHECKING:
|
|
20
|
-
from airbyte_ops_mcp.
|
|
19
|
+
from airbyte_ops_mcp.connection_config_retriever.retrieval import (
|
|
21
20
|
RetrievalMetadata,
|
|
22
21
|
)
|
|
23
22
|
|
airbyte_ops_mcp/{live_tests/_connection_retriever → connection_config_retriever}/retrieval.py
RENAMED
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
# Copyright (c) 2025 Airbyte, Inc., all rights reserved.
|
|
2
|
-
"""Core retrieval logic for
|
|
2
|
+
"""Core retrieval logic for connection config retrieval.
|
|
3
3
|
|
|
4
|
-
|
|
4
|
+
Refactored from: live_tests/_connection_retriever/retrieval.py
|
|
5
|
+
Original source: airbyte-platform-internal/tools/connection-retriever/src/connection_retriever/retrieval.py
|
|
5
6
|
|
|
6
7
|
This is a minimal subset focused on retrieving unmasked source config.
|
|
7
8
|
For testing candidate discovery, see issue #91.
|
|
@@ -18,19 +19,12 @@ import requests
|
|
|
18
19
|
import sqlalchemy
|
|
19
20
|
from google.cloud import secretmanager
|
|
20
21
|
|
|
21
|
-
from airbyte_ops_mcp.
|
|
22
|
-
|
|
23
|
-
)
|
|
24
|
-
from airbyte_ops_mcp.live_tests._connection_retriever.consts import (
|
|
25
|
-
CLOUD_REGISTRY_URL,
|
|
26
|
-
ConnectionObject,
|
|
27
|
-
)
|
|
28
|
-
from airbyte_ops_mcp.live_tests._connection_retriever.db_access import (
|
|
29
|
-
get_pool,
|
|
30
|
-
)
|
|
31
|
-
from airbyte_ops_mcp.live_tests._connection_retriever.secrets_resolution import (
|
|
22
|
+
from airbyte_ops_mcp.connection_config_retriever.audit_logging import audit
|
|
23
|
+
from airbyte_ops_mcp.connection_config_retriever.secrets_resolution import (
|
|
32
24
|
get_resolved_config,
|
|
33
25
|
)
|
|
26
|
+
from airbyte_ops_mcp.constants import CLOUD_REGISTRY_URL, ConnectionObject
|
|
27
|
+
from airbyte_ops_mcp.prod_db_access.db_engine import get_pool
|
|
34
28
|
|
|
35
29
|
LOGGER = logging.getLogger(__name__)
|
|
36
30
|
|
|
@@ -59,7 +53,7 @@ SELECT_ON_CONNECTION_NOT_EU = sqlalchemy.text(
|
|
|
59
53
|
SELECT_ON_CONNECTION_DATAPLANE_GROUP_IS_EU = sqlalchemy.text(
|
|
60
54
|
"""
|
|
61
55
|
SELECT
|
|
62
|
-
CASE WHEN dataplane_group.name = 'EU' THEN TRUE ELSE FALSE END
|
|
56
|
+
CASE WHEN dataplane_group.name = 'EU' THEN TRUE ELSE FALSE END AS is_eu
|
|
63
57
|
FROM
|
|
64
58
|
connection
|
|
65
59
|
JOIN
|
|
@@ -309,14 +303,6 @@ def retrieve_objects(
|
|
|
309
303
|
|
|
310
304
|
This is a simplified version that only supports retrieval by connection_id.
|
|
311
305
|
For testing candidate discovery by docker image, see issue #91.
|
|
312
|
-
|
|
313
|
-
Args:
|
|
314
|
-
connection_objects: List of ConnectionObject types to retrieve
|
|
315
|
-
retrieval_reason: Reason for retrieval (for audit logging)
|
|
316
|
-
connection_id: The connection ID to retrieve objects for
|
|
317
|
-
|
|
318
|
-
Returns:
|
|
319
|
-
List containing a single TestingCandidate with the requested objects
|
|
320
306
|
"""
|
|
321
307
|
connection_candidates = [TestingCandidate(connection_id=connection_id)]
|
|
322
308
|
|
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
# Copyright (c) 2025 Airbyte, Inc., all rights reserved.
|
|
2
|
-
"""Secret resolution for
|
|
2
|
+
"""Secret resolution for connection config retrieval.
|
|
3
3
|
|
|
4
|
-
|
|
4
|
+
Refactored from: live_tests/_connection_retriever/secrets_resolution.py
|
|
5
|
+
Original source: airbyte-platform-internal/tools/connection-retriever/src/connection_retriever/secrets_resolution.py
|
|
5
6
|
"""
|
|
6
7
|
|
|
7
8
|
from __future__ import annotations
|
|
@@ -11,23 +12,13 @@ from typing import Any
|
|
|
11
12
|
import dpath
|
|
12
13
|
from google.cloud import secretmanager
|
|
13
14
|
|
|
14
|
-
from airbyte_ops_mcp.
|
|
15
|
-
GCP_PROJECT_NAME,
|
|
16
|
-
)
|
|
15
|
+
from airbyte_ops_mcp.constants import GCP_PROJECT_NAME
|
|
17
16
|
|
|
18
17
|
|
|
19
18
|
def get_secret_value(
|
|
20
19
|
secret_manager_client: secretmanager.SecretManagerServiceClient, secret_id: str
|
|
21
20
|
) -> str:
|
|
22
|
-
"""Get the value of the enabled version of a secret.
|
|
23
|
-
|
|
24
|
-
Args:
|
|
25
|
-
secret_manager_client: The secret manager client
|
|
26
|
-
secret_id: The id of the secret
|
|
27
|
-
|
|
28
|
-
Returns:
|
|
29
|
-
The value of the enabled version of the secret
|
|
30
|
-
"""
|
|
21
|
+
"""Get the value of the enabled version of a secret."""
|
|
31
22
|
response = secret_manager_client.list_secret_versions(
|
|
32
23
|
request={"parent": secret_id, "filter": "state:ENABLED"}
|
|
33
24
|
)
|
|
@@ -39,14 +30,7 @@ def get_secret_value(
|
|
|
39
30
|
|
|
40
31
|
|
|
41
32
|
def is_secret(value: Any) -> bool:
|
|
42
|
-
"""Determine if a value is a secret.
|
|
43
|
-
|
|
44
|
-
Args:
|
|
45
|
-
value: The value to check
|
|
46
|
-
|
|
47
|
-
Returns:
|
|
48
|
-
True if the value is a secret, False otherwise
|
|
49
|
-
"""
|
|
33
|
+
"""Determine if a value is a secret."""
|
|
50
34
|
return isinstance(value, dict) and value.get("_secret") is not None
|
|
51
35
|
|
|
52
36
|
|
|
@@ -54,15 +38,7 @@ def resolve_secrets_in_config(
|
|
|
54
38
|
secret_manager_client: secretmanager.SecretManagerServiceClient,
|
|
55
39
|
connector_config: dict,
|
|
56
40
|
) -> dict:
|
|
57
|
-
"""Recursively resolve secrets in the connector_config.
|
|
58
|
-
|
|
59
|
-
Args:
|
|
60
|
-
secret_manager_client: The secret manager client
|
|
61
|
-
connector_config: The connector_config to resolve secrets in
|
|
62
|
-
|
|
63
|
-
Returns:
|
|
64
|
-
The connector_config with secrets resolved
|
|
65
|
-
"""
|
|
41
|
+
"""Recursively resolve secrets in the connector_config."""
|
|
66
42
|
for key in connector_config:
|
|
67
43
|
if is_secret(connector_config[key]):
|
|
68
44
|
secret_id = f"projects/{GCP_PROJECT_NAME}/secrets/{connector_config[key]['_secret']}"
|
|
@@ -91,17 +67,7 @@ def get_resolved_config(
|
|
|
91
67
|
actor_oauth_parameter: dict,
|
|
92
68
|
spec: dict,
|
|
93
69
|
) -> dict:
|
|
94
|
-
"""Get the resolved configuration, resolving secrets and merging OAuth params.
|
|
95
|
-
|
|
96
|
-
Args:
|
|
97
|
-
secret_manager_client: The secret manager client
|
|
98
|
-
actor_configuration: The actor configuration
|
|
99
|
-
actor_oauth_parameter: The actor oauth parameter
|
|
100
|
-
spec: The connector spec
|
|
101
|
-
|
|
102
|
-
Returns:
|
|
103
|
-
The resolved configuration
|
|
104
|
-
"""
|
|
70
|
+
"""Get the resolved configuration, resolving secrets and merging OAuth params."""
|
|
105
71
|
resolved_configuration = resolve_secrets_in_config(
|
|
106
72
|
secret_manager_client, actor_configuration
|
|
107
73
|
)
|
airbyte_ops_mcp/constants.py
CHANGED
|
@@ -3,6 +3,8 @@
|
|
|
3
3
|
|
|
4
4
|
from __future__ import annotations
|
|
5
5
|
|
|
6
|
+
from enum import Enum
|
|
7
|
+
|
|
6
8
|
MCP_SERVER_NAME = "airbyte-internal-ops"
|
|
7
9
|
"""The name of the MCP server."""
|
|
8
10
|
|
|
@@ -13,3 +15,36 @@ ENV_AIRBYTE_INTERNAL_ADMIN_USER = "AIRBYTE_INTERNAL_ADMIN_USER"
|
|
|
13
15
|
# Expected values for internal admin authentication
|
|
14
16
|
EXPECTED_ADMIN_FLAG_VALUE = "airbyte.io"
|
|
15
17
|
EXPECTED_ADMIN_EMAIL_DOMAIN = "@airbyte.io"
|
|
18
|
+
|
|
19
|
+
# =============================================================================
|
|
20
|
+
# GCP and Prod DB Constants (from connection-retriever)
|
|
21
|
+
# =============================================================================
|
|
22
|
+
|
|
23
|
+
GCP_PROJECT_NAME = "prod-ab-cloud-proj"
|
|
24
|
+
"""The GCP project name for Airbyte Cloud production."""
|
|
25
|
+
|
|
26
|
+
CLOUD_REGISTRY_URL = (
|
|
27
|
+
"https://connectors.airbyte.com/files/registries/v0/cloud_registry.json"
|
|
28
|
+
)
|
|
29
|
+
"""URL for the Airbyte Cloud connector registry."""
|
|
30
|
+
|
|
31
|
+
CONNECTION_RETRIEVER_PG_CONNECTION_DETAILS_SECRET_ID = (
|
|
32
|
+
"projects/587336813068/secrets/CONNECTION_RETRIEVER_PG_CONNECTION_DETAILS"
|
|
33
|
+
)
|
|
34
|
+
"""GCP Secret Manager ID for Prod DB connection details."""
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class ConnectionObject(Enum):
|
|
38
|
+
"""Types of connection objects that can be retrieved."""
|
|
39
|
+
|
|
40
|
+
CONNECTION = "connection"
|
|
41
|
+
SOURCE_ID = "source-id"
|
|
42
|
+
DESTINATION_ID = "destination-id"
|
|
43
|
+
DESTINATION_CONFIG = "destination-config"
|
|
44
|
+
SOURCE_CONFIG = "source-config"
|
|
45
|
+
CATALOG = "catalog"
|
|
46
|
+
CONFIGURED_CATALOG = "configured-catalog"
|
|
47
|
+
STATE = "state"
|
|
48
|
+
WORKSPACE_ID = "workspace-id"
|
|
49
|
+
DESTINATION_DOCKER_IMAGE = "destination-docker-image"
|
|
50
|
+
SOURCE_DOCKER_IMAGE = "source-docker-image"
|
|
@@ -59,6 +59,14 @@ def list_connectors_in_repo(
|
|
|
59
59
|
set[str] | None,
|
|
60
60
|
"Set of languages to exclude (mutually exclusive with language_filter)",
|
|
61
61
|
] = None,
|
|
62
|
+
connector_type: Annotated[
|
|
63
|
+
Literal["source", "destination"] | None,
|
|
64
|
+
"Filter by connector type: 'source' or 'destination', None=all",
|
|
65
|
+
] = None,
|
|
66
|
+
connector_subtype: Annotated[
|
|
67
|
+
Literal["api", "database", "file", "custom"] | None,
|
|
68
|
+
"Filter by connector subtype: 'api', 'database', 'file', 'custom', None=all",
|
|
69
|
+
] = None,
|
|
62
70
|
pr_num_or_url: Annotated[
|
|
63
71
|
str | None,
|
|
64
72
|
"PR number (e.g., '123'), GitHub URL, or None to auto-detect from GITHUB_REF environment variable",
|
|
@@ -81,6 +89,8 @@ def list_connectors_in_repo(
|
|
|
81
89
|
modified=modified,
|
|
82
90
|
language_filter=language_filter,
|
|
83
91
|
language_exclude=language_exclude,
|
|
92
|
+
connector_type=connector_type,
|
|
93
|
+
connector_subtype=connector_subtype,
|
|
84
94
|
base_ref=base_ref,
|
|
85
95
|
head_ref=head_ref,
|
|
86
96
|
)
|
|
@@ -17,6 +17,7 @@ from typing import Annotated, Any
|
|
|
17
17
|
|
|
18
18
|
import requests
|
|
19
19
|
from airbyte.cloud import CloudWorkspace
|
|
20
|
+
from airbyte.cloud.auth import resolve_cloud_client_id, resolve_cloud_client_secret
|
|
20
21
|
from fastmcp import FastMCP
|
|
21
22
|
from pydantic import BaseModel, Field
|
|
22
23
|
|
|
@@ -388,15 +389,29 @@ def run_live_connection_tests(
|
|
|
388
389
|
|
|
389
390
|
# Validate workspace membership if workspace_id is provided
|
|
390
391
|
if workspace_id:
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
392
|
+
client_id = resolve_cloud_client_id()
|
|
393
|
+
client_secret = resolve_cloud_client_secret()
|
|
394
|
+
if not client_id or not client_secret:
|
|
395
|
+
return RunLiveConnectionTestsResponse(
|
|
396
|
+
run_id=run_id,
|
|
397
|
+
status=TestRunStatus.FAILED,
|
|
398
|
+
message=(
|
|
399
|
+
"Missing Airbyte Cloud credentials. "
|
|
400
|
+
"Set AIRBYTE_CLOUD_CLIENT_ID and AIRBYTE_CLOUD_CLIENT_SECRET env vars."
|
|
401
|
+
),
|
|
402
|
+
workflow_url=None,
|
|
403
|
+
)
|
|
404
|
+
workspace = CloudWorkspace(
|
|
405
|
+
workspace_id=workspace_id,
|
|
406
|
+
client_id=client_id,
|
|
407
|
+
client_secret=client_secret,
|
|
408
|
+
)
|
|
409
|
+
connection = workspace.get_connection(connection_id)
|
|
410
|
+
if connection is None:
|
|
396
411
|
return RunLiveConnectionTestsResponse(
|
|
397
412
|
run_id=run_id,
|
|
398
413
|
status=TestRunStatus.FAILED,
|
|
399
|
-
message=f"Connection {connection_id}
|
|
414
|
+
message=f"Connection {connection_id} not found in workspace {workspace_id}",
|
|
400
415
|
workflow_url=None,
|
|
401
416
|
)
|
|
402
417
|
|