airbyte-internal-ops 0.1.10__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {airbyte_internal_ops-0.1.10.dist-info → airbyte_internal_ops-0.2.0.dist-info}/METADATA +1 -1
- {airbyte_internal_ops-0.1.10.dist-info → airbyte_internal_ops-0.2.0.dist-info}/RECORD +21 -18
- {airbyte_internal_ops-0.1.10.dist-info → airbyte_internal_ops-0.2.0.dist-info}/entry_points.txt +1 -0
- airbyte_ops_mcp/cli/cloud.py +151 -3
- airbyte_ops_mcp/cloud_admin/auth.py +32 -0
- airbyte_ops_mcp/constants.py +18 -0
- airbyte_ops_mcp/github_actions.py +218 -0
- airbyte_ops_mcp/live_tests/cdk_secrets.py +90 -0
- airbyte_ops_mcp/live_tests/ci_output.py +55 -5
- airbyte_ops_mcp/live_tests/connector_runner.py +3 -0
- airbyte_ops_mcp/mcp/_http_headers.py +198 -0
- airbyte_ops_mcp/mcp/cloud_connector_versions.py +118 -22
- airbyte_ops_mcp/mcp/github.py +2 -21
- airbyte_ops_mcp/mcp/live_tests.py +46 -84
- airbyte_ops_mcp/mcp/prerelease.py +9 -31
- airbyte_ops_mcp/mcp/prod_db_queries.py +67 -24
- airbyte_ops_mcp/mcp/server.py +81 -8
- airbyte_ops_mcp/prod_db_access/db_engine.py +8 -0
- airbyte_ops_mcp/prod_db_access/queries.py +27 -15
- airbyte_ops_mcp/prod_db_access/sql.py +17 -16
- {airbyte_internal_ops-0.1.10.dist-info → airbyte_internal_ops-0.2.0.dist-info}/WHEEL +0 -0
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
airbyte_ops_mcp/__init__.py,sha256=HhzURuYr29_UIdMrnWYaZB8ENr_kFkBdm4uqeiIW3Vw,760
|
|
2
2
|
airbyte_ops_mcp/_annotations.py,sha256=MO-SBDnbykxxHDESG7d8rviZZ4WlZgJKv0a8eBqcEzQ,1757
|
|
3
|
-
airbyte_ops_mcp/constants.py,sha256=
|
|
3
|
+
airbyte_ops_mcp/constants.py,sha256=THmvIjU3pb7kpNjn7TpRWD86gtDLmtlQwYuFnaQp_rg,3095
|
|
4
4
|
airbyte_ops_mcp/gcp_auth.py,sha256=5k-k145ZoYhHLjyDES8nrA8f8BBihRI0ykrdD1IcfOs,3599
|
|
5
|
+
airbyte_ops_mcp/github_actions.py,sha256=KwpQ0BrmCa6wiGRmSFGcFN-yIdlzLXN8kUxpi1ME3Tc,6740
|
|
5
6
|
airbyte_ops_mcp/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
7
|
airbyte_ops_mcp/_legacy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
7
8
|
airbyte_ops_mcp/_legacy/airbyte_ci/README.md,sha256=qEYx4geDR8AEDjrcA303h7Nol-CMDLojxUyiGzQprM8,236
|
|
@@ -351,13 +352,13 @@ airbyte_ops_mcp/cli/__init__.py,sha256=XpL7FyVfgabfBF2JR7u7NwJ2krlYqjd_OwLcWf-Xc
|
|
|
351
352
|
airbyte_ops_mcp/cli/_base.py,sha256=I8tWnyQf0ks4r3J8N8h-5GZxyn37T-55KsbuHnxYlcg,415
|
|
352
353
|
airbyte_ops_mcp/cli/_shared.py,sha256=jg-xMyGzTCGPqKd8VTfE_3kGPIyO_3Kx5sQbG4rPc0Y,1311
|
|
353
354
|
airbyte_ops_mcp/cli/app.py,sha256=SEdBpqFUG2O8zGV5ifwptxrLGFph_dLr66-MX9d69gQ,789
|
|
354
|
-
airbyte_ops_mcp/cli/cloud.py,sha256=
|
|
355
|
+
airbyte_ops_mcp/cli/cloud.py,sha256=Vv1nAXGPQlpKvDbKJ2cg86yGpkRlOZtHy0cWI_-dYJA,45116
|
|
355
356
|
airbyte_ops_mcp/cli/gh.py,sha256=91b1AxFXvHQCFyXhrrym-756ZjnMCqvxFdmwCtma1zI,2046
|
|
356
357
|
airbyte_ops_mcp/cli/registry.py,sha256=-yiLJWSslV_qGi6ImXZYfXOJSE4oJBO7yICkyA_RiUo,5792
|
|
357
358
|
airbyte_ops_mcp/cli/repo.py,sha256=G1hoQpH0XYhUH3FFOsia9xabGB0LP9o3XcwBuqvFVo0,16331
|
|
358
359
|
airbyte_ops_mcp/cloud_admin/__init__.py,sha256=cqE96Q10Kp6elhH9DAi6TVsIwSUy3sooDLLrxTaktGk,816
|
|
359
360
|
airbyte_ops_mcp/cloud_admin/api_client.py,sha256=6PovHDwOzo4fxSyk6viwvnXjCRIiC4uPZo0pGMx0Bdk,17359
|
|
360
|
-
airbyte_ops_mcp/cloud_admin/auth.py,sha256=
|
|
361
|
+
airbyte_ops_mcp/cloud_admin/auth.py,sha256=qE2Aqe0qbZB755KscL65s54Jz78-F-X5a8fXKsrYEOQ,3749
|
|
361
362
|
airbyte_ops_mcp/cloud_admin/connection_config.py,sha256=UtbIwuB7CA3WJr9oYRwlKDsjciqd_9ewWdml2f8DuXw,4887
|
|
362
363
|
airbyte_ops_mcp/cloud_admin/models.py,sha256=YZ3FbEW-tZa50khKTTl4Bzvy_LsGyyQd6qcpXo62jls,2670
|
|
363
364
|
airbyte_ops_mcp/connection_config_retriever/__init__.py,sha256=Xoi-YvARrNPhECdpwEDDkdwEpnvj8zuUlwULpf4iRrU,800
|
|
@@ -365,11 +366,12 @@ airbyte_ops_mcp/connection_config_retriever/audit_logging.py,sha256=GjT4dVa0TtvG
|
|
|
365
366
|
airbyte_ops_mcp/connection_config_retriever/retrieval.py,sha256=s6yeCyrboWkUd6KdaheEo87x-rLtQNTL8XeR8O9z2HI,12160
|
|
366
367
|
airbyte_ops_mcp/connection_config_retriever/secrets_resolution.py,sha256=12g0lZzhCzAPl4Iv4eMW6d76mvXjIBGspOnNhywzks4,3644
|
|
367
368
|
airbyte_ops_mcp/live_tests/__init__.py,sha256=qJac67dt6DQCqif39HqeiG3Tr9xrxfP-ala8HsLZKis,1020
|
|
368
|
-
airbyte_ops_mcp/live_tests/
|
|
369
|
+
airbyte_ops_mcp/live_tests/cdk_secrets.py,sha256=TJ0Vbk5jfTvYElREh4fQFHWof0_bIxZfJqT33dDhtrE,3198
|
|
370
|
+
airbyte_ops_mcp/live_tests/ci_output.py,sha256=rrvCVKKShc1iVPMuQJDBqSbsiAHIDpX8SA9j0Uwl_Cg,12718
|
|
369
371
|
airbyte_ops_mcp/live_tests/config.py,sha256=dwWeY0tatdbwl9BqbhZ7EljoZDCtKmGO5fvOAIxeXmA,5873
|
|
370
372
|
airbyte_ops_mcp/live_tests/connection_fetcher.py,sha256=5wIiA0VvCFNEc-fr6Po18gZMX3E5fyPOGf2SuVOqv5U,12799
|
|
371
373
|
airbyte_ops_mcp/live_tests/connection_secret_retriever.py,sha256=DtZYB4Y8CXfUXTFhmzrqzjuEFoICzz5Md3Ol_y9HCq0,4861
|
|
372
|
-
airbyte_ops_mcp/live_tests/connector_runner.py,sha256=
|
|
374
|
+
airbyte_ops_mcp/live_tests/connector_runner.py,sha256=BLy2RY-KLCK9jNmPz5EsPCk55fJ9WlOOaxr_Xw-GOjY,9914
|
|
373
375
|
airbyte_ops_mcp/live_tests/evaluation_modes.py,sha256=lAL6pEDmy_XCC7_m4_NXjt_f6Z8CXeAhMkc0FU8bm_M,1364
|
|
374
376
|
airbyte_ops_mcp/live_tests/http_metrics.py,sha256=oTD7f2MnQOvx4plOxHop2bInQ0-whvuToSsrC7TIM-M,12469
|
|
375
377
|
airbyte_ops_mcp/live_tests/models.py,sha256=brtAT9oO1TwjFcP91dFcu0XcUNqQb-jf7di1zkoVEuo,8782
|
|
@@ -385,29 +387,30 @@ airbyte_ops_mcp/live_tests/validation/catalog_validators.py,sha256=jqqVAMOk0mtdP
|
|
|
385
387
|
airbyte_ops_mcp/live_tests/validation/record_validators.py,sha256=-7Ir2LWGCrtadK2JLuBgppSyk0RFJX6Nsy0lrabtwrs,7411
|
|
386
388
|
airbyte_ops_mcp/mcp/__init__.py,sha256=QqkNkxzdXlg-W03urBAQ3zmtOKFPf35rXgO9ceUjpng,334
|
|
387
389
|
airbyte_ops_mcp/mcp/_guidance.py,sha256=48tQSnDnxqXtyGJxxgjz0ZiI814o_7Fj7f6R8jpQ7so,2375
|
|
390
|
+
airbyte_ops_mcp/mcp/_http_headers.py,sha256=NfrbxYROOqisZFLjCNDvv7wFsFHDBzwr6l0U6xs209M,5563
|
|
388
391
|
airbyte_ops_mcp/mcp/_mcp_utils.py,sha256=nhztHcoc-_ASPpJfoDBjxjjqEvQM6_QIrhp7F2UCrQk,11494
|
|
389
|
-
airbyte_ops_mcp/mcp/cloud_connector_versions.py,sha256=
|
|
392
|
+
airbyte_ops_mcp/mcp/cloud_connector_versions.py,sha256=Iz0SirqNAJigdyei-Qqi059OFxixt0VvXdC5CVBXZHc,14331
|
|
390
393
|
airbyte_ops_mcp/mcp/connector_analysis.py,sha256=OC4KrOSkMkKPkOisWnSv96BDDE5TQYHq-Jxa2vtjJpo,298
|
|
391
394
|
airbyte_ops_mcp/mcp/connector_qa.py,sha256=aImpqdnqBPDrz10BS0owsV4kuIU2XdalzgbaGZsbOL0,258
|
|
392
|
-
airbyte_ops_mcp/mcp/github.py,sha256=
|
|
395
|
+
airbyte_ops_mcp/mcp/github.py,sha256=Wum5V99A9vTsjK0YVoE1UOVu75F-M9chg0AnUGkiiT4,7215
|
|
393
396
|
airbyte_ops_mcp/mcp/github_repo_ops.py,sha256=PiERpt8abo20Gz4CfXhrDNlVM4o4FOt5sweZJND2a0s,5314
|
|
394
|
-
airbyte_ops_mcp/mcp/live_tests.py,sha256=
|
|
397
|
+
airbyte_ops_mcp/mcp/live_tests.py,sha256=8Nh0jZ9Un_jzAGJf88POgRVxJsomh8TVPyGhDKltx3Y,17158
|
|
395
398
|
airbyte_ops_mcp/mcp/metadata.py,sha256=fwGW97WknR5lfKcQnFtK6dU87aA6TmLj1NkKyqDAV9g,270
|
|
396
|
-
airbyte_ops_mcp/mcp/prerelease.py,sha256=
|
|
397
|
-
airbyte_ops_mcp/mcp/prod_db_queries.py,sha256=
|
|
399
|
+
airbyte_ops_mcp/mcp/prerelease.py,sha256=LHLaSd8q0l7boAsVqTXOjFGDxAGsPZdtL3kj5_IOTEE,8852
|
|
400
|
+
airbyte_ops_mcp/mcp/prod_db_queries.py,sha256=_eNMFM1CBQ4OM_daf2iq-L7lvlytqbI_6v48m5vJdSQ,15632
|
|
398
401
|
airbyte_ops_mcp/mcp/prompts.py,sha256=mJld9mdPECXYZffWXGSvNs4Xevx3rxqUGNlzGKVC2_s,1599
|
|
399
402
|
airbyte_ops_mcp/mcp/registry.py,sha256=PW-VYUj42qx2pQ_apUkVaoUFq7VgB9zEU7-aGrkSCCw,290
|
|
400
|
-
airbyte_ops_mcp/mcp/server.py,sha256
|
|
403
|
+
airbyte_ops_mcp/mcp/server.py,sha256=-nMufnrpE55urarz0FTi7tG_WGgdqpCk9KnxbK-78xs,5184
|
|
401
404
|
airbyte_ops_mcp/mcp/server_info.py,sha256=Yi4B1auW64QZGBDas5mro_vwTjvrP785TFNSBP7GhRg,2361
|
|
402
405
|
airbyte_ops_mcp/prod_db_access/__init__.py,sha256=5pxouMPY1beyWlB0UwPnbaLTKTHqU6X82rbbgKY2vYU,1069
|
|
403
|
-
airbyte_ops_mcp/prod_db_access/db_engine.py,sha256=
|
|
406
|
+
airbyte_ops_mcp/prod_db_access/db_engine.py,sha256=sG_yXRsP_KAEndJmiaooPk-BS-AHEdS-M2Cas0CrXzc,9384
|
|
404
407
|
airbyte_ops_mcp/prod_db_access/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
405
|
-
airbyte_ops_mcp/prod_db_access/queries.py,sha256=
|
|
406
|
-
airbyte_ops_mcp/prod_db_access/sql.py,sha256=
|
|
408
|
+
airbyte_ops_mcp/prod_db_access/queries.py,sha256=txeqRPbovgqbk7lu8ttiZXgA77abFzzeO3hql2o8c44,11228
|
|
409
|
+
airbyte_ops_mcp/prod_db_access/sql.py,sha256=P6UbIHafg3ibs901DPlJxLilxsc-RrCPvnyzSwP-fMw,16300
|
|
407
410
|
airbyte_ops_mcp/registry/__init__.py,sha256=iEaPlt9GrnlaLbc__98TguNeZG8wuQu7S-_2QkhHcbA,858
|
|
408
411
|
airbyte_ops_mcp/registry/models.py,sha256=B4L4TKr52wo0xs0CqvCBrpowqjShzVnZ5eTr2-EyhNs,2346
|
|
409
412
|
airbyte_ops_mcp/registry/publish.py,sha256=VoPxsM2_0zJ829orzCRN-kjgcJtuBNyXgW4I9J680ro,12717
|
|
410
|
-
airbyte_internal_ops-0.
|
|
411
|
-
airbyte_internal_ops-0.
|
|
412
|
-
airbyte_internal_ops-0.
|
|
413
|
-
airbyte_internal_ops-0.
|
|
413
|
+
airbyte_internal_ops-0.2.0.dist-info/METADATA,sha256=rakGRwvZx1XV9JszUAAJo9nx_ayyP7NJvC7P3mzK9Tk,5282
|
|
414
|
+
airbyte_internal_ops-0.2.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
415
|
+
airbyte_internal_ops-0.2.0.dist-info/entry_points.txt,sha256=WxP0l7bRFss4Cr5uQqVj9mTEKwnRKouNuphXQF0lotA,171
|
|
416
|
+
airbyte_internal_ops-0.2.0.dist-info/RECORD,,
|
airbyte_ops_mcp/cli/cloud.py
CHANGED
|
@@ -18,10 +18,13 @@ import shutil
|
|
|
18
18
|
import signal
|
|
19
19
|
import socket
|
|
20
20
|
import subprocess
|
|
21
|
+
import tempfile
|
|
21
22
|
import time
|
|
22
23
|
from pathlib import Path
|
|
23
24
|
from typing import Annotated, Literal
|
|
24
25
|
|
|
26
|
+
import requests
|
|
27
|
+
import yaml
|
|
25
28
|
from airbyte_cdk.models.connector_metadata import MetadataFile
|
|
26
29
|
from airbyte_cdk.utils.connector_paths import find_connector_root_from_name
|
|
27
30
|
from airbyte_cdk.utils.docker import build_connector_image, verify_docker_installation
|
|
@@ -42,6 +45,7 @@ from airbyte_ops_mcp.constants import (
|
|
|
42
45
|
DEFAULT_CLOUD_SQL_PROXY_PORT,
|
|
43
46
|
ENV_GCP_PROD_DB_ACCESS_CREDENTIALS,
|
|
44
47
|
)
|
|
48
|
+
from airbyte_ops_mcp.live_tests.cdk_secrets import get_first_config_from_secrets
|
|
45
49
|
from airbyte_ops_mcp.live_tests.ci_output import (
|
|
46
50
|
generate_regression_report,
|
|
47
51
|
get_report_summary,
|
|
@@ -55,6 +59,10 @@ from airbyte_ops_mcp.live_tests.connection_fetcher import (
|
|
|
55
59
|
fetch_connection_data,
|
|
56
60
|
save_connection_data_to_files,
|
|
57
61
|
)
|
|
62
|
+
from airbyte_ops_mcp.live_tests.connection_secret_retriever import (
|
|
63
|
+
enrich_config_with_secrets,
|
|
64
|
+
should_use_secret_retriever,
|
|
65
|
+
)
|
|
58
66
|
from airbyte_ops_mcp.live_tests.connector_runner import (
|
|
59
67
|
ConnectorRunner,
|
|
60
68
|
ensure_image_available,
|
|
@@ -314,6 +322,20 @@ def set_version_override(
|
|
|
314
322
|
str,
|
|
315
323
|
Parameter(help="Explanation for the override (min 10 characters)."),
|
|
316
324
|
],
|
|
325
|
+
issue_url: Annotated[
|
|
326
|
+
str,
|
|
327
|
+
Parameter(help="GitHub issue URL providing context for this operation."),
|
|
328
|
+
],
|
|
329
|
+
approval_comment_url: Annotated[
|
|
330
|
+
str,
|
|
331
|
+
Parameter(help="GitHub comment URL where admin authorized this deployment."),
|
|
332
|
+
],
|
|
333
|
+
ai_agent_session_url: Annotated[
|
|
334
|
+
str | None,
|
|
335
|
+
Parameter(
|
|
336
|
+
help="URL to AI agent session driving this operation (for auditability)."
|
|
337
|
+
),
|
|
338
|
+
] = None,
|
|
317
339
|
reason_url: Annotated[
|
|
318
340
|
str | None,
|
|
319
341
|
Parameter(help="Optional URL with more context (e.g., issue link)."),
|
|
@@ -324,6 +346,7 @@ def set_version_override(
|
|
|
324
346
|
Requires admin authentication via AIRBYTE_INTERNAL_ADMIN_FLAG and
|
|
325
347
|
AIRBYTE_INTERNAL_ADMIN_USER environment variables.
|
|
326
348
|
"""
|
|
349
|
+
admin_user_email = os.environ.get("AIRBYTE_INTERNAL_ADMIN_USER")
|
|
327
350
|
result = set_cloud_connector_version_override(
|
|
328
351
|
workspace_id=workspace_id,
|
|
329
352
|
actor_id=connector_id,
|
|
@@ -332,6 +355,10 @@ def set_version_override(
|
|
|
332
355
|
unset=False,
|
|
333
356
|
override_reason=reason,
|
|
334
357
|
override_reason_reference_url=reason_url,
|
|
358
|
+
admin_user_email=admin_user_email,
|
|
359
|
+
issue_url=issue_url,
|
|
360
|
+
approval_comment_url=approval_comment_url,
|
|
361
|
+
ai_agent_session_url=ai_agent_session_url,
|
|
335
362
|
)
|
|
336
363
|
if result.success:
|
|
337
364
|
print_success(result.message)
|
|
@@ -354,12 +381,27 @@ def clear_version_override(
|
|
|
354
381
|
Literal["source", "destination"],
|
|
355
382
|
Parameter(help="The type of connector."),
|
|
356
383
|
],
|
|
384
|
+
issue_url: Annotated[
|
|
385
|
+
str,
|
|
386
|
+
Parameter(help="GitHub issue URL providing context for this operation."),
|
|
387
|
+
],
|
|
388
|
+
approval_comment_url: Annotated[
|
|
389
|
+
str,
|
|
390
|
+
Parameter(help="GitHub comment URL where admin authorized this deployment."),
|
|
391
|
+
],
|
|
392
|
+
ai_agent_session_url: Annotated[
|
|
393
|
+
str | None,
|
|
394
|
+
Parameter(
|
|
395
|
+
help="URL to AI agent session driving this operation (for auditability)."
|
|
396
|
+
),
|
|
397
|
+
] = None,
|
|
357
398
|
) -> None:
|
|
358
399
|
"""Clear a version override from a deployed connector.
|
|
359
400
|
|
|
360
401
|
Requires admin authentication via AIRBYTE_INTERNAL_ADMIN_FLAG and
|
|
361
402
|
AIRBYTE_INTERNAL_ADMIN_USER environment variables.
|
|
362
403
|
"""
|
|
404
|
+
admin_user_email = os.environ.get("AIRBYTE_INTERNAL_ADMIN_USER")
|
|
363
405
|
result = set_cloud_connector_version_override(
|
|
364
406
|
workspace_id=workspace_id,
|
|
365
407
|
actor_id=connector_id,
|
|
@@ -368,6 +410,10 @@ def clear_version_override(
|
|
|
368
410
|
unset=True,
|
|
369
411
|
override_reason=None,
|
|
370
412
|
override_reason_reference_url=None,
|
|
413
|
+
admin_user_email=admin_user_email,
|
|
414
|
+
issue_url=issue_url,
|
|
415
|
+
approval_comment_url=approval_comment_url,
|
|
416
|
+
ai_agent_session_url=ai_agent_session_url,
|
|
371
417
|
)
|
|
372
418
|
if result.success:
|
|
373
419
|
print_success(result.message)
|
|
@@ -507,6 +553,49 @@ def _build_connector_image_from_source(
|
|
|
507
553
|
return built_image
|
|
508
554
|
|
|
509
555
|
|
|
556
|
+
def _fetch_control_image_from_metadata(connector_name: str) -> str | None:
|
|
557
|
+
"""Fetch the current released connector image from metadata.yaml on main branch.
|
|
558
|
+
|
|
559
|
+
This fetches the connector's metadata.yaml from the airbyte monorepo's master branch
|
|
560
|
+
and extracts the dockerRepository and dockerImageTag to construct the control image.
|
|
561
|
+
|
|
562
|
+
Args:
|
|
563
|
+
connector_name: The connector name (e.g., 'source-github').
|
|
564
|
+
|
|
565
|
+
Returns:
|
|
566
|
+
The full connector image with tag (e.g., 'airbyte/source-github:1.0.0'),
|
|
567
|
+
or None if the metadata could not be fetched or parsed.
|
|
568
|
+
"""
|
|
569
|
+
metadata_url = (
|
|
570
|
+
f"https://raw.githubusercontent.com/airbytehq/airbyte/master/"
|
|
571
|
+
f"airbyte-integrations/connectors/{connector_name}/metadata.yaml"
|
|
572
|
+
)
|
|
573
|
+
response = requests.get(metadata_url, timeout=30)
|
|
574
|
+
if not response.ok:
|
|
575
|
+
print_error(
|
|
576
|
+
f"Failed to fetch metadata for {connector_name}: "
|
|
577
|
+
f"HTTP {response.status_code} from {metadata_url}"
|
|
578
|
+
)
|
|
579
|
+
return None
|
|
580
|
+
|
|
581
|
+
metadata = yaml.safe_load(response.text)
|
|
582
|
+
if not isinstance(metadata, dict):
|
|
583
|
+
print_error(f"Invalid metadata format for {connector_name}: expected dict")
|
|
584
|
+
return None
|
|
585
|
+
|
|
586
|
+
data = metadata.get("data", {})
|
|
587
|
+
docker_repository = data.get("dockerRepository")
|
|
588
|
+
docker_image_tag = data.get("dockerImageTag")
|
|
589
|
+
|
|
590
|
+
if not docker_repository or not docker_image_tag:
|
|
591
|
+
print_error(
|
|
592
|
+
f"Could not find dockerRepository/dockerImageTag in metadata for {connector_name}"
|
|
593
|
+
)
|
|
594
|
+
return None
|
|
595
|
+
|
|
596
|
+
return f"{docker_repository}:{docker_image_tag}"
|
|
597
|
+
|
|
598
|
+
|
|
510
599
|
@connector_app.command(name="live-test")
|
|
511
600
|
def live_test(
|
|
512
601
|
connector_image: Annotated[
|
|
@@ -894,6 +983,26 @@ def regression_test(
|
|
|
894
983
|
|
|
895
984
|
print_success(f"Fetching config/catalog from connection: {connection_id}")
|
|
896
985
|
connection_data = fetch_connection_data(connection_id)
|
|
986
|
+
|
|
987
|
+
# Check if we should retrieve unmasked secrets
|
|
988
|
+
if should_use_secret_retriever():
|
|
989
|
+
print_success(
|
|
990
|
+
"USE_CONNECTION_SECRET_RETRIEVER enabled - enriching config with unmasked secrets..."
|
|
991
|
+
)
|
|
992
|
+
try:
|
|
993
|
+
connection_data = enrich_config_with_secrets(
|
|
994
|
+
connection_data,
|
|
995
|
+
retrieval_reason="Regression test with USE_CONNECTION_SECRET_RETRIEVER=true",
|
|
996
|
+
)
|
|
997
|
+
print_success("Successfully retrieved unmasked secrets from database")
|
|
998
|
+
except Exception as e:
|
|
999
|
+
print_error(f"Failed to retrieve unmasked secrets: {e}")
|
|
1000
|
+
print_error(
|
|
1001
|
+
"Proceeding with masked config from public API - tests may fail due to masked credentials. "
|
|
1002
|
+
"If you expected unmasked secrets, verify that the USE_CONNECTION_SECRET_RETRIEVER flag is enabled "
|
|
1003
|
+
f"and that the {ENV_GCP_PROD_DB_ACCESS_CREDENTIALS} environment variable is set with valid database credentials."
|
|
1004
|
+
)
|
|
1005
|
+
|
|
897
1006
|
config_file, catalog_file = save_connection_data_to_files(
|
|
898
1007
|
connection_data, output_path / "connection_data"
|
|
899
1008
|
)
|
|
@@ -906,10 +1015,48 @@ def regression_test(
|
|
|
906
1015
|
if not resolved_control_image and connection_data.connector_image:
|
|
907
1016
|
resolved_control_image = connection_data.connector_image
|
|
908
1017
|
print_success(f"Auto-detected control image: {resolved_control_image}")
|
|
1018
|
+
elif config_path:
|
|
1019
|
+
config_file = Path(config_path)
|
|
1020
|
+
catalog_file = Path(catalog_path) if catalog_path else None
|
|
1021
|
+
elif connector_name:
|
|
1022
|
+
# Fallback: fetch integration test secrets from GSM using PyAirbyte API
|
|
1023
|
+
print_success(
|
|
1024
|
+
f"No connection_id or config_path provided. "
|
|
1025
|
+
f"Attempting to fetch integration test config from GSM for {connector_name}..."
|
|
1026
|
+
)
|
|
1027
|
+
gsm_config = get_first_config_from_secrets(connector_name)
|
|
1028
|
+
if gsm_config:
|
|
1029
|
+
# Write config to a temp file (not in output_path to avoid artifact upload)
|
|
1030
|
+
gsm_config_dir = Path(
|
|
1031
|
+
tempfile.mkdtemp(prefix=f"gsm-config-{connector_name}-")
|
|
1032
|
+
)
|
|
1033
|
+
gsm_config_dir.chmod(0o700)
|
|
1034
|
+
gsm_config_file = gsm_config_dir / "config.json"
|
|
1035
|
+
gsm_config_file.write_text(json.dumps(gsm_config, indent=2))
|
|
1036
|
+
gsm_config_file.chmod(0o600)
|
|
1037
|
+
config_file = gsm_config_file
|
|
1038
|
+
catalog_file = None
|
|
1039
|
+
print_success(
|
|
1040
|
+
f"Fetched integration test config from GSM for {connector_name}"
|
|
1041
|
+
)
|
|
1042
|
+
else:
|
|
1043
|
+
print_error(
|
|
1044
|
+
f"Failed to fetch integration test config from GSM for {connector_name}."
|
|
1045
|
+
)
|
|
1046
|
+
config_file = None
|
|
1047
|
+
catalog_file = None
|
|
909
1048
|
else:
|
|
910
|
-
config_file =
|
|
1049
|
+
config_file = None
|
|
911
1050
|
catalog_file = Path(catalog_path) if catalog_path else None
|
|
912
1051
|
|
|
1052
|
+
# Auto-detect control_image from metadata.yaml if connector_name is provided
|
|
1053
|
+
if not resolved_control_image and connector_name:
|
|
1054
|
+
resolved_control_image = _fetch_control_image_from_metadata(connector_name)
|
|
1055
|
+
if resolved_control_image:
|
|
1056
|
+
print_success(
|
|
1057
|
+
f"Auto-detected control image from metadata.yaml: {resolved_control_image}"
|
|
1058
|
+
)
|
|
1059
|
+
|
|
913
1060
|
# Validate that we have both images
|
|
914
1061
|
if not resolved_target_image:
|
|
915
1062
|
write_github_output("success", False)
|
|
@@ -923,8 +1070,9 @@ def regression_test(
|
|
|
923
1070
|
write_github_output("success", False)
|
|
924
1071
|
write_github_output("error", "No control image specified")
|
|
925
1072
|
exit_with_error(
|
|
926
|
-
"You must provide one of the following: a control_image
|
|
927
|
-
"for a connection that has an associated connector image
|
|
1073
|
+
"You must provide one of the following: a control_image, a connection_id "
|
|
1074
|
+
"for a connection that has an associated connector image, or a connector_name "
|
|
1075
|
+
"to auto-detect the control image from the airbyte repo's metadata.yaml."
|
|
928
1076
|
)
|
|
929
1077
|
|
|
930
1078
|
# Pull images if they weren't just built locally
|
|
@@ -41,6 +41,20 @@ def check_internal_admin_flag() -> bool:
|
|
|
41
41
|
return bool(admin_user and EXPECTED_ADMIN_EMAIL_DOMAIN in admin_user)
|
|
42
42
|
|
|
43
43
|
|
|
44
|
+
def check_internal_admin_flag_only() -> bool:
|
|
45
|
+
"""Check if internal admin flag is set (without requiring user email env var).
|
|
46
|
+
|
|
47
|
+
This is a lighter check that only validates AIRBYTE_INTERNAL_ADMIN_FLAG,
|
|
48
|
+
allowing the admin user email to be provided as a parameter instead of
|
|
49
|
+
an environment variable.
|
|
50
|
+
|
|
51
|
+
Returns:
|
|
52
|
+
True if AIRBYTE_INTERNAL_ADMIN_FLAG is set correctly, False otherwise
|
|
53
|
+
"""
|
|
54
|
+
admin_flag = os.environ.get(ENV_AIRBYTE_INTERNAL_ADMIN_FLAG)
|
|
55
|
+
return admin_flag == EXPECTED_ADMIN_FLAG_VALUE
|
|
56
|
+
|
|
57
|
+
|
|
44
58
|
def require_internal_admin() -> None:
|
|
45
59
|
"""Require internal admin access for the current operation.
|
|
46
60
|
|
|
@@ -59,6 +73,24 @@ def require_internal_admin() -> None:
|
|
|
59
73
|
)
|
|
60
74
|
|
|
61
75
|
|
|
76
|
+
def require_internal_admin_flag_only() -> None:
|
|
77
|
+
"""Require internal admin flag for the current operation.
|
|
78
|
+
|
|
79
|
+
This is a lighter check that only validates AIRBYTE_INTERNAL_ADMIN_FLAG,
|
|
80
|
+
allowing the admin user email to be provided as a parameter instead of
|
|
81
|
+
an environment variable.
|
|
82
|
+
|
|
83
|
+
Raises:
|
|
84
|
+
CloudAuthError: If AIRBYTE_INTERNAL_ADMIN_FLAG is not properly configured
|
|
85
|
+
"""
|
|
86
|
+
if not check_internal_admin_flag_only():
|
|
87
|
+
raise CloudAuthError(
|
|
88
|
+
"This operation requires internal admin access. "
|
|
89
|
+
f"Set {ENV_AIRBYTE_INTERNAL_ADMIN_FLAG}={EXPECTED_ADMIN_FLAG_VALUE} "
|
|
90
|
+
"environment variable."
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
|
|
62
94
|
def get_admin_user_email() -> str:
|
|
63
95
|
"""Get the admin user email from environment.
|
|
64
96
|
|
airbyte_ops_mcp/constants.py
CHANGED
|
@@ -20,6 +20,24 @@ ENV_GCP_PROD_DB_ACCESS_CREDENTIALS = "GCP_PROD_DB_ACCESS_CREDENTIALS"
|
|
|
20
20
|
EXPECTED_ADMIN_FLAG_VALUE = "airbyte.io"
|
|
21
21
|
EXPECTED_ADMIN_EMAIL_DOMAIN = "@airbyte.io"
|
|
22
22
|
|
|
23
|
+
# =============================================================================
|
|
24
|
+
# HTTP Header Names for Airbyte Cloud Authentication
|
|
25
|
+
# =============================================================================
|
|
26
|
+
# These headers follow the PyAirbyte convention for passing credentials
|
|
27
|
+
# via HTTP when running as an MCP HTTP server.
|
|
28
|
+
|
|
29
|
+
HEADER_AIRBYTE_CLOUD_CLIENT_ID = "X-Airbyte-Cloud-Client-Id"
|
|
30
|
+
"""HTTP header for OAuth client ID."""
|
|
31
|
+
|
|
32
|
+
HEADER_AIRBYTE_CLOUD_CLIENT_SECRET = "X-Airbyte-Cloud-Client-Secret"
|
|
33
|
+
"""HTTP header for OAuth client secret."""
|
|
34
|
+
|
|
35
|
+
HEADER_AIRBYTE_CLOUD_WORKSPACE_ID = "X-Airbyte-Cloud-Workspace-Id"
|
|
36
|
+
"""HTTP header for default workspace ID."""
|
|
37
|
+
|
|
38
|
+
HEADER_AIRBYTE_CLOUD_API_URL = "X-Airbyte-Cloud-Api-Url"
|
|
39
|
+
"""HTTP header for API root URL override."""
|
|
40
|
+
|
|
23
41
|
# =============================================================================
|
|
24
42
|
# GCP and Prod DB Constants (from connection-retriever)
|
|
25
43
|
# =============================================================================
|
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
# Copyright (c) 2025 Airbyte, Inc., all rights reserved.
|
|
2
|
+
"""GitHub Actions API utilities.
|
|
3
|
+
|
|
4
|
+
This module provides core utilities for interacting with GitHub Actions workflows,
|
|
5
|
+
including workflow dispatch, run discovery, and authentication. These utilities
|
|
6
|
+
are used by MCP tools but are not MCP-specific.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
import os
|
|
12
|
+
import shutil
|
|
13
|
+
import subprocess
|
|
14
|
+
import time
|
|
15
|
+
from dataclasses import dataclass
|
|
16
|
+
from datetime import datetime, timedelta
|
|
17
|
+
|
|
18
|
+
import requests
|
|
19
|
+
|
|
20
|
+
GITHUB_API_BASE = "https://api.github.com"
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def resolve_github_token(preferred_env_vars: list[str] | None = None) -> str:
|
|
24
|
+
"""Resolve GitHub token from environment variables or gh CLI.
|
|
25
|
+
|
|
26
|
+
Checks environment variables in order of preference, returning the first
|
|
27
|
+
non-empty value found. If no environment variables are set, attempts to
|
|
28
|
+
get a token from the gh CLI tool using 'gh auth token'.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
preferred_env_vars: List of environment variable names to check in order.
|
|
32
|
+
Defaults to ["GITHUB_CI_WORKFLOW_TRIGGER_PAT", "GITHUB_TOKEN"].
|
|
33
|
+
|
|
34
|
+
Returns:
|
|
35
|
+
GitHub token string.
|
|
36
|
+
|
|
37
|
+
Raises:
|
|
38
|
+
ValueError: If no GitHub token is found in env vars or gh CLI.
|
|
39
|
+
"""
|
|
40
|
+
if preferred_env_vars is None:
|
|
41
|
+
preferred_env_vars = ["GITHUB_CI_WORKFLOW_TRIGGER_PAT", "GITHUB_TOKEN"]
|
|
42
|
+
|
|
43
|
+
# Check environment variables first
|
|
44
|
+
for env_var in preferred_env_vars:
|
|
45
|
+
token = os.getenv(env_var)
|
|
46
|
+
if token:
|
|
47
|
+
return token
|
|
48
|
+
|
|
49
|
+
# Fall back to gh CLI if available
|
|
50
|
+
gh_path = shutil.which("gh")
|
|
51
|
+
if gh_path:
|
|
52
|
+
try:
|
|
53
|
+
result = subprocess.run(
|
|
54
|
+
[gh_path, "auth", "token"],
|
|
55
|
+
capture_output=True,
|
|
56
|
+
text=True,
|
|
57
|
+
timeout=5,
|
|
58
|
+
check=False,
|
|
59
|
+
)
|
|
60
|
+
if result.returncode == 0 and result.stdout.strip():
|
|
61
|
+
return result.stdout.strip()
|
|
62
|
+
except (subprocess.TimeoutExpired, subprocess.SubprocessError):
|
|
63
|
+
pass
|
|
64
|
+
|
|
65
|
+
env_var_list = ", ".join(preferred_env_vars)
|
|
66
|
+
raise ValueError(
|
|
67
|
+
f"No GitHub token found. Set one of: {env_var_list} environment variable, "
|
|
68
|
+
"or authenticate with 'gh auth login'."
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
@dataclass
|
|
73
|
+
class WorkflowDispatchResult:
|
|
74
|
+
"""Result of triggering a workflow dispatch."""
|
|
75
|
+
|
|
76
|
+
workflow_url: str
|
|
77
|
+
"""URL to the workflow file (e.g., .../actions/workflows/my-workflow.yml)"""
|
|
78
|
+
|
|
79
|
+
run_id: int | None = None
|
|
80
|
+
"""GitHub Actions run ID, if discovered"""
|
|
81
|
+
|
|
82
|
+
run_url: str | None = None
|
|
83
|
+
"""Direct URL to the workflow run, if discovered"""
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def find_workflow_run(
|
|
87
|
+
owner: str,
|
|
88
|
+
repo: str,
|
|
89
|
+
workflow_file: str,
|
|
90
|
+
ref: str,
|
|
91
|
+
token: str,
|
|
92
|
+
created_after: datetime,
|
|
93
|
+
max_wait_seconds: float = 5.0,
|
|
94
|
+
) -> tuple[int, str] | None:
|
|
95
|
+
"""Find a workflow run that was created after a given time.
|
|
96
|
+
|
|
97
|
+
This is used to find the run that was just triggered via workflow_dispatch.
|
|
98
|
+
Polls for up to max_wait_seconds to handle GitHub API eventual consistency.
|
|
99
|
+
|
|
100
|
+
Args:
|
|
101
|
+
owner: Repository owner
|
|
102
|
+
repo: Repository name
|
|
103
|
+
workflow_file: Workflow file name
|
|
104
|
+
ref: Git ref the workflow was triggered on
|
|
105
|
+
token: GitHub API token
|
|
106
|
+
created_after: Only consider runs created after this time
|
|
107
|
+
max_wait_seconds: Maximum time to wait for run to appear (default 5 seconds)
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
Tuple of (run_id, run_url) if found, None otherwise.
|
|
111
|
+
"""
|
|
112
|
+
url = (
|
|
113
|
+
f"{GITHUB_API_BASE}/repos/{owner}/{repo}/actions/workflows/{workflow_file}/runs"
|
|
114
|
+
)
|
|
115
|
+
headers = {
|
|
116
|
+
"Authorization": f"Bearer {token}",
|
|
117
|
+
"Accept": "application/vnd.github+json",
|
|
118
|
+
"X-GitHub-Api-Version": "2022-11-28",
|
|
119
|
+
}
|
|
120
|
+
params = {
|
|
121
|
+
"branch": ref,
|
|
122
|
+
"event": "workflow_dispatch",
|
|
123
|
+
"per_page": 5,
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
# Add a small buffer to handle timestamp precision differences between
|
|
127
|
+
# local time and GitHub's created_at (which has second resolution)
|
|
128
|
+
search_after = created_after - timedelta(seconds=2)
|
|
129
|
+
|
|
130
|
+
deadline = time.monotonic() + max_wait_seconds
|
|
131
|
+
attempt = 0
|
|
132
|
+
|
|
133
|
+
while time.monotonic() < deadline:
|
|
134
|
+
if attempt > 0:
|
|
135
|
+
time.sleep(1.0)
|
|
136
|
+
attempt += 1
|
|
137
|
+
|
|
138
|
+
response = requests.get(url, headers=headers, params=params, timeout=30)
|
|
139
|
+
if not response.ok:
|
|
140
|
+
continue
|
|
141
|
+
|
|
142
|
+
data = response.json()
|
|
143
|
+
runs = data.get("workflow_runs", [])
|
|
144
|
+
|
|
145
|
+
for run in runs:
|
|
146
|
+
run_created_at = datetime.fromisoformat(
|
|
147
|
+
run["created_at"].replace("Z", "+00:00")
|
|
148
|
+
)
|
|
149
|
+
if run_created_at >= search_after:
|
|
150
|
+
return run["id"], run["html_url"]
|
|
151
|
+
|
|
152
|
+
return None
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def trigger_workflow_dispatch(
|
|
156
|
+
owner: str,
|
|
157
|
+
repo: str,
|
|
158
|
+
workflow_file: str,
|
|
159
|
+
ref: str,
|
|
160
|
+
inputs: dict,
|
|
161
|
+
token: str,
|
|
162
|
+
find_run: bool = True,
|
|
163
|
+
max_wait_seconds: float = 5.0,
|
|
164
|
+
) -> WorkflowDispatchResult:
|
|
165
|
+
"""Trigger a GitHub Actions workflow via workflow_dispatch.
|
|
166
|
+
|
|
167
|
+
Args:
|
|
168
|
+
owner: Repository owner (e.g., "airbytehq")
|
|
169
|
+
repo: Repository name (e.g., "airbyte-ops-mcp")
|
|
170
|
+
workflow_file: Workflow file name (e.g., "connector-live-test.yml")
|
|
171
|
+
ref: Git ref to run the workflow on (branch name)
|
|
172
|
+
inputs: Workflow inputs dictionary
|
|
173
|
+
token: GitHub API token
|
|
174
|
+
find_run: Whether to attempt to find the run after dispatch (default True)
|
|
175
|
+
max_wait_seconds: Maximum time to wait for run discovery (default 5 seconds)
|
|
176
|
+
|
|
177
|
+
Returns:
|
|
178
|
+
WorkflowDispatchResult with workflow URL and optionally run ID/URL.
|
|
179
|
+
|
|
180
|
+
Raises:
|
|
181
|
+
requests.HTTPError: If API request fails.
|
|
182
|
+
"""
|
|
183
|
+
dispatch_time = datetime.now(tz=datetime.now().astimezone().tzinfo)
|
|
184
|
+
|
|
185
|
+
url = f"{GITHUB_API_BASE}/repos/{owner}/{repo}/actions/workflows/{workflow_file}/dispatches"
|
|
186
|
+
headers = {
|
|
187
|
+
"Authorization": f"Bearer {token}",
|
|
188
|
+
"Accept": "application/vnd.github+json",
|
|
189
|
+
"X-GitHub-Api-Version": "2022-11-28",
|
|
190
|
+
}
|
|
191
|
+
payload = {
|
|
192
|
+
"ref": ref,
|
|
193
|
+
"inputs": inputs,
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
response = requests.post(url, headers=headers, json=payload, timeout=30)
|
|
197
|
+
response.raise_for_status()
|
|
198
|
+
|
|
199
|
+
workflow_url = (
|
|
200
|
+
f"https://github.com/{owner}/{repo}/actions/workflows/{workflow_file}"
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
if not find_run:
|
|
204
|
+
return WorkflowDispatchResult(workflow_url=workflow_url)
|
|
205
|
+
|
|
206
|
+
# Best-effort lookup of the run that was just triggered
|
|
207
|
+
run_info = find_workflow_run(
|
|
208
|
+
owner, repo, workflow_file, ref, token, dispatch_time, max_wait_seconds
|
|
209
|
+
)
|
|
210
|
+
if run_info:
|
|
211
|
+
run_id, run_url = run_info
|
|
212
|
+
return WorkflowDispatchResult(
|
|
213
|
+
workflow_url=workflow_url,
|
|
214
|
+
run_id=run_id,
|
|
215
|
+
run_url=run_url,
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
return WorkflowDispatchResult(workflow_url=workflow_url)
|