airbyte-internal-ops 0.1.11__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {airbyte_internal_ops-0.1.11.dist-info → airbyte_internal_ops-0.2.0.dist-info}/METADATA +1 -1
- {airbyte_internal_ops-0.1.11.dist-info → airbyte_internal_ops-0.2.0.dist-info}/RECORD +16 -15
- {airbyte_internal_ops-0.1.11.dist-info → airbyte_internal_ops-0.2.0.dist-info}/entry_points.txt +1 -0
- airbyte_ops_mcp/cli/cloud.py +62 -0
- airbyte_ops_mcp/cloud_admin/auth.py +32 -0
- airbyte_ops_mcp/constants.py +18 -0
- airbyte_ops_mcp/github_actions.py +25 -4
- airbyte_ops_mcp/mcp/_http_headers.py +198 -0
- airbyte_ops_mcp/mcp/cloud_connector_versions.py +118 -22
- airbyte_ops_mcp/mcp/live_tests.py +9 -7
- airbyte_ops_mcp/mcp/prod_db_queries.py +67 -24
- airbyte_ops_mcp/mcp/server.py +81 -8
- airbyte_ops_mcp/prod_db_access/db_engine.py +8 -0
- airbyte_ops_mcp/prod_db_access/queries.py +27 -15
- airbyte_ops_mcp/prod_db_access/sql.py +17 -16
- {airbyte_internal_ops-0.1.11.dist-info → airbyte_internal_ops-0.2.0.dist-info}/WHEEL +0 -0
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
airbyte_ops_mcp/__init__.py,sha256=HhzURuYr29_UIdMrnWYaZB8ENr_kFkBdm4uqeiIW3Vw,760
|
|
2
2
|
airbyte_ops_mcp/_annotations.py,sha256=MO-SBDnbykxxHDESG7d8rviZZ4WlZgJKv0a8eBqcEzQ,1757
|
|
3
|
-
airbyte_ops_mcp/constants.py,sha256=
|
|
3
|
+
airbyte_ops_mcp/constants.py,sha256=THmvIjU3pb7kpNjn7TpRWD86gtDLmtlQwYuFnaQp_rg,3095
|
|
4
4
|
airbyte_ops_mcp/gcp_auth.py,sha256=5k-k145ZoYhHLjyDES8nrA8f8BBihRI0ykrdD1IcfOs,3599
|
|
5
|
-
airbyte_ops_mcp/github_actions.py,sha256=
|
|
5
|
+
airbyte_ops_mcp/github_actions.py,sha256=KwpQ0BrmCa6wiGRmSFGcFN-yIdlzLXN8kUxpi1ME3Tc,6740
|
|
6
6
|
airbyte_ops_mcp/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
7
7
|
airbyte_ops_mcp/_legacy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
8
|
airbyte_ops_mcp/_legacy/airbyte_ci/README.md,sha256=qEYx4geDR8AEDjrcA303h7Nol-CMDLojxUyiGzQprM8,236
|
|
@@ -352,13 +352,13 @@ airbyte_ops_mcp/cli/__init__.py,sha256=XpL7FyVfgabfBF2JR7u7NwJ2krlYqjd_OwLcWf-Xc
|
|
|
352
352
|
airbyte_ops_mcp/cli/_base.py,sha256=I8tWnyQf0ks4r3J8N8h-5GZxyn37T-55KsbuHnxYlcg,415
|
|
353
353
|
airbyte_ops_mcp/cli/_shared.py,sha256=jg-xMyGzTCGPqKd8VTfE_3kGPIyO_3Kx5sQbG4rPc0Y,1311
|
|
354
354
|
airbyte_ops_mcp/cli/app.py,sha256=SEdBpqFUG2O8zGV5ifwptxrLGFph_dLr66-MX9d69gQ,789
|
|
355
|
-
airbyte_ops_mcp/cli/cloud.py,sha256=
|
|
355
|
+
airbyte_ops_mcp/cli/cloud.py,sha256=Vv1nAXGPQlpKvDbKJ2cg86yGpkRlOZtHy0cWI_-dYJA,45116
|
|
356
356
|
airbyte_ops_mcp/cli/gh.py,sha256=91b1AxFXvHQCFyXhrrym-756ZjnMCqvxFdmwCtma1zI,2046
|
|
357
357
|
airbyte_ops_mcp/cli/registry.py,sha256=-yiLJWSslV_qGi6ImXZYfXOJSE4oJBO7yICkyA_RiUo,5792
|
|
358
358
|
airbyte_ops_mcp/cli/repo.py,sha256=G1hoQpH0XYhUH3FFOsia9xabGB0LP9o3XcwBuqvFVo0,16331
|
|
359
359
|
airbyte_ops_mcp/cloud_admin/__init__.py,sha256=cqE96Q10Kp6elhH9DAi6TVsIwSUy3sooDLLrxTaktGk,816
|
|
360
360
|
airbyte_ops_mcp/cloud_admin/api_client.py,sha256=6PovHDwOzo4fxSyk6viwvnXjCRIiC4uPZo0pGMx0Bdk,17359
|
|
361
|
-
airbyte_ops_mcp/cloud_admin/auth.py,sha256=
|
|
361
|
+
airbyte_ops_mcp/cloud_admin/auth.py,sha256=qE2Aqe0qbZB755KscL65s54Jz78-F-X5a8fXKsrYEOQ,3749
|
|
362
362
|
airbyte_ops_mcp/cloud_admin/connection_config.py,sha256=UtbIwuB7CA3WJr9oYRwlKDsjciqd_9ewWdml2f8DuXw,4887
|
|
363
363
|
airbyte_ops_mcp/cloud_admin/models.py,sha256=YZ3FbEW-tZa50khKTTl4Bzvy_LsGyyQd6qcpXo62jls,2670
|
|
364
364
|
airbyte_ops_mcp/connection_config_retriever/__init__.py,sha256=Xoi-YvARrNPhECdpwEDDkdwEpnvj8zuUlwULpf4iRrU,800
|
|
@@ -387,29 +387,30 @@ airbyte_ops_mcp/live_tests/validation/catalog_validators.py,sha256=jqqVAMOk0mtdP
|
|
|
387
387
|
airbyte_ops_mcp/live_tests/validation/record_validators.py,sha256=-7Ir2LWGCrtadK2JLuBgppSyk0RFJX6Nsy0lrabtwrs,7411
|
|
388
388
|
airbyte_ops_mcp/mcp/__init__.py,sha256=QqkNkxzdXlg-W03urBAQ3zmtOKFPf35rXgO9ceUjpng,334
|
|
389
389
|
airbyte_ops_mcp/mcp/_guidance.py,sha256=48tQSnDnxqXtyGJxxgjz0ZiI814o_7Fj7f6R8jpQ7so,2375
|
|
390
|
+
airbyte_ops_mcp/mcp/_http_headers.py,sha256=NfrbxYROOqisZFLjCNDvv7wFsFHDBzwr6l0U6xs209M,5563
|
|
390
391
|
airbyte_ops_mcp/mcp/_mcp_utils.py,sha256=nhztHcoc-_ASPpJfoDBjxjjqEvQM6_QIrhp7F2UCrQk,11494
|
|
391
|
-
airbyte_ops_mcp/mcp/cloud_connector_versions.py,sha256=
|
|
392
|
+
airbyte_ops_mcp/mcp/cloud_connector_versions.py,sha256=Iz0SirqNAJigdyei-Qqi059OFxixt0VvXdC5CVBXZHc,14331
|
|
392
393
|
airbyte_ops_mcp/mcp/connector_analysis.py,sha256=OC4KrOSkMkKPkOisWnSv96BDDE5TQYHq-Jxa2vtjJpo,298
|
|
393
394
|
airbyte_ops_mcp/mcp/connector_qa.py,sha256=aImpqdnqBPDrz10BS0owsV4kuIU2XdalzgbaGZsbOL0,258
|
|
394
395
|
airbyte_ops_mcp/mcp/github.py,sha256=Wum5V99A9vTsjK0YVoE1UOVu75F-M9chg0AnUGkiiT4,7215
|
|
395
396
|
airbyte_ops_mcp/mcp/github_repo_ops.py,sha256=PiERpt8abo20Gz4CfXhrDNlVM4o4FOt5sweZJND2a0s,5314
|
|
396
|
-
airbyte_ops_mcp/mcp/live_tests.py,sha256=
|
|
397
|
+
airbyte_ops_mcp/mcp/live_tests.py,sha256=8Nh0jZ9Un_jzAGJf88POgRVxJsomh8TVPyGhDKltx3Y,17158
|
|
397
398
|
airbyte_ops_mcp/mcp/metadata.py,sha256=fwGW97WknR5lfKcQnFtK6dU87aA6TmLj1NkKyqDAV9g,270
|
|
398
399
|
airbyte_ops_mcp/mcp/prerelease.py,sha256=LHLaSd8q0l7boAsVqTXOjFGDxAGsPZdtL3kj5_IOTEE,8852
|
|
399
|
-
airbyte_ops_mcp/mcp/prod_db_queries.py,sha256=
|
|
400
|
+
airbyte_ops_mcp/mcp/prod_db_queries.py,sha256=_eNMFM1CBQ4OM_daf2iq-L7lvlytqbI_6v48m5vJdSQ,15632
|
|
400
401
|
airbyte_ops_mcp/mcp/prompts.py,sha256=mJld9mdPECXYZffWXGSvNs4Xevx3rxqUGNlzGKVC2_s,1599
|
|
401
402
|
airbyte_ops_mcp/mcp/registry.py,sha256=PW-VYUj42qx2pQ_apUkVaoUFq7VgB9zEU7-aGrkSCCw,290
|
|
402
|
-
airbyte_ops_mcp/mcp/server.py,sha256
|
|
403
|
+
airbyte_ops_mcp/mcp/server.py,sha256=-nMufnrpE55urarz0FTi7tG_WGgdqpCk9KnxbK-78xs,5184
|
|
403
404
|
airbyte_ops_mcp/mcp/server_info.py,sha256=Yi4B1auW64QZGBDas5mro_vwTjvrP785TFNSBP7GhRg,2361
|
|
404
405
|
airbyte_ops_mcp/prod_db_access/__init__.py,sha256=5pxouMPY1beyWlB0UwPnbaLTKTHqU6X82rbbgKY2vYU,1069
|
|
405
|
-
airbyte_ops_mcp/prod_db_access/db_engine.py,sha256=
|
|
406
|
+
airbyte_ops_mcp/prod_db_access/db_engine.py,sha256=sG_yXRsP_KAEndJmiaooPk-BS-AHEdS-M2Cas0CrXzc,9384
|
|
406
407
|
airbyte_ops_mcp/prod_db_access/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
407
|
-
airbyte_ops_mcp/prod_db_access/queries.py,sha256=
|
|
408
|
-
airbyte_ops_mcp/prod_db_access/sql.py,sha256=
|
|
408
|
+
airbyte_ops_mcp/prod_db_access/queries.py,sha256=txeqRPbovgqbk7lu8ttiZXgA77abFzzeO3hql2o8c44,11228
|
|
409
|
+
airbyte_ops_mcp/prod_db_access/sql.py,sha256=P6UbIHafg3ibs901DPlJxLilxsc-RrCPvnyzSwP-fMw,16300
|
|
409
410
|
airbyte_ops_mcp/registry/__init__.py,sha256=iEaPlt9GrnlaLbc__98TguNeZG8wuQu7S-_2QkhHcbA,858
|
|
410
411
|
airbyte_ops_mcp/registry/models.py,sha256=B4L4TKr52wo0xs0CqvCBrpowqjShzVnZ5eTr2-EyhNs,2346
|
|
411
412
|
airbyte_ops_mcp/registry/publish.py,sha256=VoPxsM2_0zJ829orzCRN-kjgcJtuBNyXgW4I9J680ro,12717
|
|
412
|
-
airbyte_internal_ops-0.
|
|
413
|
-
airbyte_internal_ops-0.
|
|
414
|
-
airbyte_internal_ops-0.
|
|
415
|
-
airbyte_internal_ops-0.
|
|
413
|
+
airbyte_internal_ops-0.2.0.dist-info/METADATA,sha256=rakGRwvZx1XV9JszUAAJo9nx_ayyP7NJvC7P3mzK9Tk,5282
|
|
414
|
+
airbyte_internal_ops-0.2.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
415
|
+
airbyte_internal_ops-0.2.0.dist-info/entry_points.txt,sha256=WxP0l7bRFss4Cr5uQqVj9mTEKwnRKouNuphXQF0lotA,171
|
|
416
|
+
airbyte_internal_ops-0.2.0.dist-info/RECORD,,
|
airbyte_ops_mcp/cli/cloud.py
CHANGED
|
@@ -59,6 +59,10 @@ from airbyte_ops_mcp.live_tests.connection_fetcher import (
|
|
|
59
59
|
fetch_connection_data,
|
|
60
60
|
save_connection_data_to_files,
|
|
61
61
|
)
|
|
62
|
+
from airbyte_ops_mcp.live_tests.connection_secret_retriever import (
|
|
63
|
+
enrich_config_with_secrets,
|
|
64
|
+
should_use_secret_retriever,
|
|
65
|
+
)
|
|
62
66
|
from airbyte_ops_mcp.live_tests.connector_runner import (
|
|
63
67
|
ConnectorRunner,
|
|
64
68
|
ensure_image_available,
|
|
@@ -318,6 +322,20 @@ def set_version_override(
|
|
|
318
322
|
str,
|
|
319
323
|
Parameter(help="Explanation for the override (min 10 characters)."),
|
|
320
324
|
],
|
|
325
|
+
issue_url: Annotated[
|
|
326
|
+
str,
|
|
327
|
+
Parameter(help="GitHub issue URL providing context for this operation."),
|
|
328
|
+
],
|
|
329
|
+
approval_comment_url: Annotated[
|
|
330
|
+
str,
|
|
331
|
+
Parameter(help="GitHub comment URL where admin authorized this deployment."),
|
|
332
|
+
],
|
|
333
|
+
ai_agent_session_url: Annotated[
|
|
334
|
+
str | None,
|
|
335
|
+
Parameter(
|
|
336
|
+
help="URL to AI agent session driving this operation (for auditability)."
|
|
337
|
+
),
|
|
338
|
+
] = None,
|
|
321
339
|
reason_url: Annotated[
|
|
322
340
|
str | None,
|
|
323
341
|
Parameter(help="Optional URL with more context (e.g., issue link)."),
|
|
@@ -328,6 +346,7 @@ def set_version_override(
|
|
|
328
346
|
Requires admin authentication via AIRBYTE_INTERNAL_ADMIN_FLAG and
|
|
329
347
|
AIRBYTE_INTERNAL_ADMIN_USER environment variables.
|
|
330
348
|
"""
|
|
349
|
+
admin_user_email = os.environ.get("AIRBYTE_INTERNAL_ADMIN_USER")
|
|
331
350
|
result = set_cloud_connector_version_override(
|
|
332
351
|
workspace_id=workspace_id,
|
|
333
352
|
actor_id=connector_id,
|
|
@@ -336,6 +355,10 @@ def set_version_override(
|
|
|
336
355
|
unset=False,
|
|
337
356
|
override_reason=reason,
|
|
338
357
|
override_reason_reference_url=reason_url,
|
|
358
|
+
admin_user_email=admin_user_email,
|
|
359
|
+
issue_url=issue_url,
|
|
360
|
+
approval_comment_url=approval_comment_url,
|
|
361
|
+
ai_agent_session_url=ai_agent_session_url,
|
|
339
362
|
)
|
|
340
363
|
if result.success:
|
|
341
364
|
print_success(result.message)
|
|
@@ -358,12 +381,27 @@ def clear_version_override(
|
|
|
358
381
|
Literal["source", "destination"],
|
|
359
382
|
Parameter(help="The type of connector."),
|
|
360
383
|
],
|
|
384
|
+
issue_url: Annotated[
|
|
385
|
+
str,
|
|
386
|
+
Parameter(help="GitHub issue URL providing context for this operation."),
|
|
387
|
+
],
|
|
388
|
+
approval_comment_url: Annotated[
|
|
389
|
+
str,
|
|
390
|
+
Parameter(help="GitHub comment URL where admin authorized this deployment."),
|
|
391
|
+
],
|
|
392
|
+
ai_agent_session_url: Annotated[
|
|
393
|
+
str | None,
|
|
394
|
+
Parameter(
|
|
395
|
+
help="URL to AI agent session driving this operation (for auditability)."
|
|
396
|
+
),
|
|
397
|
+
] = None,
|
|
361
398
|
) -> None:
|
|
362
399
|
"""Clear a version override from a deployed connector.
|
|
363
400
|
|
|
364
401
|
Requires admin authentication via AIRBYTE_INTERNAL_ADMIN_FLAG and
|
|
365
402
|
AIRBYTE_INTERNAL_ADMIN_USER environment variables.
|
|
366
403
|
"""
|
|
404
|
+
admin_user_email = os.environ.get("AIRBYTE_INTERNAL_ADMIN_USER")
|
|
367
405
|
result = set_cloud_connector_version_override(
|
|
368
406
|
workspace_id=workspace_id,
|
|
369
407
|
actor_id=connector_id,
|
|
@@ -372,6 +410,10 @@ def clear_version_override(
|
|
|
372
410
|
unset=True,
|
|
373
411
|
override_reason=None,
|
|
374
412
|
override_reason_reference_url=None,
|
|
413
|
+
admin_user_email=admin_user_email,
|
|
414
|
+
issue_url=issue_url,
|
|
415
|
+
approval_comment_url=approval_comment_url,
|
|
416
|
+
ai_agent_session_url=ai_agent_session_url,
|
|
375
417
|
)
|
|
376
418
|
if result.success:
|
|
377
419
|
print_success(result.message)
|
|
@@ -941,6 +983,26 @@ def regression_test(
|
|
|
941
983
|
|
|
942
984
|
print_success(f"Fetching config/catalog from connection: {connection_id}")
|
|
943
985
|
connection_data = fetch_connection_data(connection_id)
|
|
986
|
+
|
|
987
|
+
# Check if we should retrieve unmasked secrets
|
|
988
|
+
if should_use_secret_retriever():
|
|
989
|
+
print_success(
|
|
990
|
+
"USE_CONNECTION_SECRET_RETRIEVER enabled - enriching config with unmasked secrets..."
|
|
991
|
+
)
|
|
992
|
+
try:
|
|
993
|
+
connection_data = enrich_config_with_secrets(
|
|
994
|
+
connection_data,
|
|
995
|
+
retrieval_reason="Regression test with USE_CONNECTION_SECRET_RETRIEVER=true",
|
|
996
|
+
)
|
|
997
|
+
print_success("Successfully retrieved unmasked secrets from database")
|
|
998
|
+
except Exception as e:
|
|
999
|
+
print_error(f"Failed to retrieve unmasked secrets: {e}")
|
|
1000
|
+
print_error(
|
|
1001
|
+
"Proceeding with masked config from public API - tests may fail due to masked credentials. "
|
|
1002
|
+
"If you expected unmasked secrets, verify that the USE_CONNECTION_SECRET_RETRIEVER flag is enabled "
|
|
1003
|
+
f"and that the {ENV_GCP_PROD_DB_ACCESS_CREDENTIALS} environment variable is set with valid database credentials."
|
|
1004
|
+
)
|
|
1005
|
+
|
|
944
1006
|
config_file, catalog_file = save_connection_data_to_files(
|
|
945
1007
|
connection_data, output_path / "connection_data"
|
|
946
1008
|
)
|
|
@@ -41,6 +41,20 @@ def check_internal_admin_flag() -> bool:
|
|
|
41
41
|
return bool(admin_user and EXPECTED_ADMIN_EMAIL_DOMAIN in admin_user)
|
|
42
42
|
|
|
43
43
|
|
|
44
|
+
def check_internal_admin_flag_only() -> bool:
|
|
45
|
+
"""Check if internal admin flag is set (without requiring user email env var).
|
|
46
|
+
|
|
47
|
+
This is a lighter check that only validates AIRBYTE_INTERNAL_ADMIN_FLAG,
|
|
48
|
+
allowing the admin user email to be provided as a parameter instead of
|
|
49
|
+
an environment variable.
|
|
50
|
+
|
|
51
|
+
Returns:
|
|
52
|
+
True if AIRBYTE_INTERNAL_ADMIN_FLAG is set correctly, False otherwise
|
|
53
|
+
"""
|
|
54
|
+
admin_flag = os.environ.get(ENV_AIRBYTE_INTERNAL_ADMIN_FLAG)
|
|
55
|
+
return admin_flag == EXPECTED_ADMIN_FLAG_VALUE
|
|
56
|
+
|
|
57
|
+
|
|
44
58
|
def require_internal_admin() -> None:
|
|
45
59
|
"""Require internal admin access for the current operation.
|
|
46
60
|
|
|
@@ -59,6 +73,24 @@ def require_internal_admin() -> None:
|
|
|
59
73
|
)
|
|
60
74
|
|
|
61
75
|
|
|
76
|
+
def require_internal_admin_flag_only() -> None:
|
|
77
|
+
"""Require internal admin flag for the current operation.
|
|
78
|
+
|
|
79
|
+
This is a lighter check that only validates AIRBYTE_INTERNAL_ADMIN_FLAG,
|
|
80
|
+
allowing the admin user email to be provided as a parameter instead of
|
|
81
|
+
an environment variable.
|
|
82
|
+
|
|
83
|
+
Raises:
|
|
84
|
+
CloudAuthError: If AIRBYTE_INTERNAL_ADMIN_FLAG is not properly configured
|
|
85
|
+
"""
|
|
86
|
+
if not check_internal_admin_flag_only():
|
|
87
|
+
raise CloudAuthError(
|
|
88
|
+
"This operation requires internal admin access. "
|
|
89
|
+
f"Set {ENV_AIRBYTE_INTERNAL_ADMIN_FLAG}={EXPECTED_ADMIN_FLAG_VALUE} "
|
|
90
|
+
"environment variable."
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
|
|
62
94
|
def get_admin_user_email() -> str:
|
|
63
95
|
"""Get the admin user email from environment.
|
|
64
96
|
|
airbyte_ops_mcp/constants.py
CHANGED
|
@@ -20,6 +20,24 @@ ENV_GCP_PROD_DB_ACCESS_CREDENTIALS = "GCP_PROD_DB_ACCESS_CREDENTIALS"
|
|
|
20
20
|
EXPECTED_ADMIN_FLAG_VALUE = "airbyte.io"
|
|
21
21
|
EXPECTED_ADMIN_EMAIL_DOMAIN = "@airbyte.io"
|
|
22
22
|
|
|
23
|
+
# =============================================================================
|
|
24
|
+
# HTTP Header Names for Airbyte Cloud Authentication
|
|
25
|
+
# =============================================================================
|
|
26
|
+
# These headers follow the PyAirbyte convention for passing credentials
|
|
27
|
+
# via HTTP when running as an MCP HTTP server.
|
|
28
|
+
|
|
29
|
+
HEADER_AIRBYTE_CLOUD_CLIENT_ID = "X-Airbyte-Cloud-Client-Id"
|
|
30
|
+
"""HTTP header for OAuth client ID."""
|
|
31
|
+
|
|
32
|
+
HEADER_AIRBYTE_CLOUD_CLIENT_SECRET = "X-Airbyte-Cloud-Client-Secret"
|
|
33
|
+
"""HTTP header for OAuth client secret."""
|
|
34
|
+
|
|
35
|
+
HEADER_AIRBYTE_CLOUD_WORKSPACE_ID = "X-Airbyte-Cloud-Workspace-Id"
|
|
36
|
+
"""HTTP header for default workspace ID."""
|
|
37
|
+
|
|
38
|
+
HEADER_AIRBYTE_CLOUD_API_URL = "X-Airbyte-Cloud-Api-Url"
|
|
39
|
+
"""HTTP header for API root URL override."""
|
|
40
|
+
|
|
23
41
|
# =============================================================================
|
|
24
42
|
# GCP and Prod DB Constants (from connection-retriever)
|
|
25
43
|
# =============================================================================
|
|
@@ -9,6 +9,8 @@ are used by MCP tools but are not MCP-specific.
|
|
|
9
9
|
from __future__ import annotations
|
|
10
10
|
|
|
11
11
|
import os
|
|
12
|
+
import shutil
|
|
13
|
+
import subprocess
|
|
12
14
|
import time
|
|
13
15
|
from dataclasses import dataclass
|
|
14
16
|
from datetime import datetime, timedelta
|
|
@@ -19,10 +21,11 @@ GITHUB_API_BASE = "https://api.github.com"
|
|
|
19
21
|
|
|
20
22
|
|
|
21
23
|
def resolve_github_token(preferred_env_vars: list[str] | None = None) -> str:
|
|
22
|
-
"""Resolve GitHub token from environment variables.
|
|
24
|
+
"""Resolve GitHub token from environment variables or gh CLI.
|
|
23
25
|
|
|
24
26
|
Checks environment variables in order of preference, returning the first
|
|
25
|
-
non-empty value found.
|
|
27
|
+
non-empty value found. If no environment variables are set, attempts to
|
|
28
|
+
get a token from the gh CLI tool using 'gh auth token'.
|
|
26
29
|
|
|
27
30
|
Args:
|
|
28
31
|
preferred_env_vars: List of environment variable names to check in order.
|
|
@@ -32,19 +35,37 @@ def resolve_github_token(preferred_env_vars: list[str] | None = None) -> str:
|
|
|
32
35
|
GitHub token string.
|
|
33
36
|
|
|
34
37
|
Raises:
|
|
35
|
-
ValueError: If no GitHub token is found in
|
|
38
|
+
ValueError: If no GitHub token is found in env vars or gh CLI.
|
|
36
39
|
"""
|
|
37
40
|
if preferred_env_vars is None:
|
|
38
41
|
preferred_env_vars = ["GITHUB_CI_WORKFLOW_TRIGGER_PAT", "GITHUB_TOKEN"]
|
|
39
42
|
|
|
43
|
+
# Check environment variables first
|
|
40
44
|
for env_var in preferred_env_vars:
|
|
41
45
|
token = os.getenv(env_var)
|
|
42
46
|
if token:
|
|
43
47
|
return token
|
|
44
48
|
|
|
49
|
+
# Fall back to gh CLI if available
|
|
50
|
+
gh_path = shutil.which("gh")
|
|
51
|
+
if gh_path:
|
|
52
|
+
try:
|
|
53
|
+
result = subprocess.run(
|
|
54
|
+
[gh_path, "auth", "token"],
|
|
55
|
+
capture_output=True,
|
|
56
|
+
text=True,
|
|
57
|
+
timeout=5,
|
|
58
|
+
check=False,
|
|
59
|
+
)
|
|
60
|
+
if result.returncode == 0 and result.stdout.strip():
|
|
61
|
+
return result.stdout.strip()
|
|
62
|
+
except (subprocess.TimeoutExpired, subprocess.SubprocessError):
|
|
63
|
+
pass
|
|
64
|
+
|
|
45
65
|
env_var_list = ", ".join(preferred_env_vars)
|
|
46
66
|
raise ValueError(
|
|
47
|
-
f"No GitHub token found. Set one of: {env_var_list} environment variable
|
|
67
|
+
f"No GitHub token found. Set one of: {env_var_list} environment variable, "
|
|
68
|
+
"or authenticate with 'gh auth login'."
|
|
48
69
|
)
|
|
49
70
|
|
|
50
71
|
|
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
# Copyright (c) 2025 Airbyte, Inc., all rights reserved.
|
|
2
|
+
"""HTTP header extraction for Airbyte Cloud credentials.
|
|
3
|
+
|
|
4
|
+
This module provides internal helper functions for extracting Airbyte Cloud
|
|
5
|
+
authentication credentials from HTTP headers when running as an MCP HTTP server.
|
|
6
|
+
This enables per-request credential passing from upstream services like coral-agents.
|
|
7
|
+
|
|
8
|
+
The resolution order for credentials is:
|
|
9
|
+
1. HTTP headers (when running as MCP HTTP server)
|
|
10
|
+
2. Environment variables (fallback)
|
|
11
|
+
|
|
12
|
+
Note: This module is prefixed with "_" to indicate it is internal helper logic
|
|
13
|
+
for the MCP module and should not be imported directly by external code.
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
from __future__ import annotations
|
|
17
|
+
|
|
18
|
+
from airbyte.cloud.auth import (
|
|
19
|
+
resolve_cloud_api_url,
|
|
20
|
+
resolve_cloud_client_id,
|
|
21
|
+
resolve_cloud_client_secret,
|
|
22
|
+
resolve_cloud_workspace_id,
|
|
23
|
+
)
|
|
24
|
+
from airbyte.secrets.base import SecretString
|
|
25
|
+
from fastmcp.server.dependencies import get_http_headers
|
|
26
|
+
|
|
27
|
+
from airbyte_ops_mcp.constants import (
|
|
28
|
+
HEADER_AIRBYTE_CLOUD_API_URL,
|
|
29
|
+
HEADER_AIRBYTE_CLOUD_CLIENT_ID,
|
|
30
|
+
HEADER_AIRBYTE_CLOUD_CLIENT_SECRET,
|
|
31
|
+
HEADER_AIRBYTE_CLOUD_WORKSPACE_ID,
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _get_header_value(headers: dict[str, str], header_name: str) -> str | None:
|
|
36
|
+
"""Get a header value from a headers dict, case-insensitively.
|
|
37
|
+
|
|
38
|
+
Args:
|
|
39
|
+
headers: Dictionary of HTTP headers.
|
|
40
|
+
header_name: The header name to look for (case-insensitive).
|
|
41
|
+
|
|
42
|
+
Returns:
|
|
43
|
+
The header value if found, None otherwise.
|
|
44
|
+
"""
|
|
45
|
+
header_name_lower = header_name.lower()
|
|
46
|
+
for key, value in headers.items():
|
|
47
|
+
if key.lower() == header_name_lower:
|
|
48
|
+
return value
|
|
49
|
+
return None
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def get_client_id_from_headers() -> SecretString | None:
|
|
53
|
+
"""Extract client ID from HTTP headers.
|
|
54
|
+
|
|
55
|
+
Returns:
|
|
56
|
+
The client ID as a SecretString, or None if not found or not in HTTP context.
|
|
57
|
+
"""
|
|
58
|
+
headers = get_http_headers()
|
|
59
|
+
if not headers:
|
|
60
|
+
return None
|
|
61
|
+
|
|
62
|
+
value = _get_header_value(headers, HEADER_AIRBYTE_CLOUD_CLIENT_ID)
|
|
63
|
+
if value:
|
|
64
|
+
return SecretString(value)
|
|
65
|
+
return None
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def get_client_secret_from_headers() -> SecretString | None:
|
|
69
|
+
"""Extract client secret from HTTP headers.
|
|
70
|
+
|
|
71
|
+
Returns:
|
|
72
|
+
The client secret as a SecretString, or None if not found or not in HTTP context.
|
|
73
|
+
"""
|
|
74
|
+
headers = get_http_headers()
|
|
75
|
+
if not headers:
|
|
76
|
+
return None
|
|
77
|
+
|
|
78
|
+
value = _get_header_value(headers, HEADER_AIRBYTE_CLOUD_CLIENT_SECRET)
|
|
79
|
+
if value:
|
|
80
|
+
return SecretString(value)
|
|
81
|
+
return None
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def get_workspace_id_from_headers() -> str | None:
|
|
85
|
+
"""Extract workspace ID from HTTP headers.
|
|
86
|
+
|
|
87
|
+
Returns:
|
|
88
|
+
The workspace ID, or None if not found or not in HTTP context.
|
|
89
|
+
"""
|
|
90
|
+
headers = get_http_headers()
|
|
91
|
+
if not headers:
|
|
92
|
+
return None
|
|
93
|
+
|
|
94
|
+
return _get_header_value(headers, HEADER_AIRBYTE_CLOUD_WORKSPACE_ID)
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def get_api_url_from_headers() -> str | None:
|
|
98
|
+
"""Extract API URL from HTTP headers.
|
|
99
|
+
|
|
100
|
+
Returns:
|
|
101
|
+
The API URL, or None if not found or not in HTTP context.
|
|
102
|
+
"""
|
|
103
|
+
headers = get_http_headers()
|
|
104
|
+
if not headers:
|
|
105
|
+
return None
|
|
106
|
+
|
|
107
|
+
return _get_header_value(headers, HEADER_AIRBYTE_CLOUD_API_URL)
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def resolve_client_id() -> SecretString:
|
|
111
|
+
"""Resolve client ID from HTTP headers or environment variables.
|
|
112
|
+
|
|
113
|
+
Resolution order:
|
|
114
|
+
1. HTTP header X-Airbyte-Cloud-Client-Id
|
|
115
|
+
2. Environment variable AIRBYTE_CLOUD_CLIENT_ID (via PyAirbyte)
|
|
116
|
+
|
|
117
|
+
Returns:
|
|
118
|
+
The resolved client ID as a SecretString.
|
|
119
|
+
|
|
120
|
+
Raises:
|
|
121
|
+
PyAirbyteSecretNotFoundError: If no client ID can be resolved.
|
|
122
|
+
"""
|
|
123
|
+
header_value = get_client_id_from_headers()
|
|
124
|
+
if header_value:
|
|
125
|
+
return header_value
|
|
126
|
+
|
|
127
|
+
return resolve_cloud_client_id()
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def resolve_client_secret() -> SecretString:
|
|
131
|
+
"""Resolve client secret from HTTP headers or environment variables.
|
|
132
|
+
|
|
133
|
+
Resolution order:
|
|
134
|
+
1. HTTP header X-Airbyte-Cloud-Client-Secret
|
|
135
|
+
2. Environment variable AIRBYTE_CLOUD_CLIENT_SECRET (via PyAirbyte)
|
|
136
|
+
|
|
137
|
+
Returns:
|
|
138
|
+
The resolved client secret as a SecretString.
|
|
139
|
+
|
|
140
|
+
Raises:
|
|
141
|
+
PyAirbyteSecretNotFoundError: If no client secret can be resolved.
|
|
142
|
+
"""
|
|
143
|
+
header_value = get_client_secret_from_headers()
|
|
144
|
+
if header_value:
|
|
145
|
+
return header_value
|
|
146
|
+
|
|
147
|
+
return resolve_cloud_client_secret()
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def resolve_workspace_id(workspace_id: str | None = None) -> str:
|
|
151
|
+
"""Resolve workspace ID from multiple sources.
|
|
152
|
+
|
|
153
|
+
Resolution order:
|
|
154
|
+
1. Explicit workspace_id parameter (if provided)
|
|
155
|
+
2. HTTP header X-Airbyte-Cloud-Workspace-Id
|
|
156
|
+
3. Environment variable AIRBYTE_CLOUD_WORKSPACE_ID (via PyAirbyte)
|
|
157
|
+
|
|
158
|
+
Args:
|
|
159
|
+
workspace_id: Optional explicit workspace ID.
|
|
160
|
+
|
|
161
|
+
Returns:
|
|
162
|
+
The resolved workspace ID.
|
|
163
|
+
|
|
164
|
+
Raises:
|
|
165
|
+
PyAirbyteSecretNotFoundError: If no workspace ID can be resolved.
|
|
166
|
+
"""
|
|
167
|
+
if workspace_id is not None:
|
|
168
|
+
return workspace_id
|
|
169
|
+
|
|
170
|
+
header_value = get_workspace_id_from_headers()
|
|
171
|
+
if header_value:
|
|
172
|
+
return header_value
|
|
173
|
+
|
|
174
|
+
return resolve_cloud_workspace_id()
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
def resolve_api_url(api_url: str | None = None) -> str:
|
|
178
|
+
"""Resolve API URL from multiple sources.
|
|
179
|
+
|
|
180
|
+
Resolution order:
|
|
181
|
+
1. Explicit api_url parameter (if provided)
|
|
182
|
+
2. HTTP header X-Airbyte-Cloud-Api-Url
|
|
183
|
+
3. Environment variable / default (via PyAirbyte)
|
|
184
|
+
|
|
185
|
+
Args:
|
|
186
|
+
api_url: Optional explicit API URL.
|
|
187
|
+
|
|
188
|
+
Returns:
|
|
189
|
+
The resolved API URL.
|
|
190
|
+
"""
|
|
191
|
+
if api_url is not None:
|
|
192
|
+
return api_url
|
|
193
|
+
|
|
194
|
+
header_value = get_api_url_from_headers()
|
|
195
|
+
if header_value:
|
|
196
|
+
return header_value
|
|
197
|
+
|
|
198
|
+
return resolve_cloud_api_url()
|
|
@@ -15,10 +15,6 @@ from typing import Annotated, Literal
|
|
|
15
15
|
|
|
16
16
|
from airbyte import constants
|
|
17
17
|
from airbyte.cloud import CloudWorkspace
|
|
18
|
-
from airbyte.cloud.auth import (
|
|
19
|
-
resolve_cloud_client_id,
|
|
20
|
-
resolve_cloud_client_secret,
|
|
21
|
-
)
|
|
22
18
|
from airbyte.exceptions import PyAirbyteInputError
|
|
23
19
|
from fastmcp import FastMCP
|
|
24
20
|
from pydantic import Field
|
|
@@ -26,19 +22,26 @@ from pydantic import Field
|
|
|
26
22
|
from airbyte_ops_mcp.cloud_admin import api_client
|
|
27
23
|
from airbyte_ops_mcp.cloud_admin.auth import (
|
|
28
24
|
CloudAuthError,
|
|
29
|
-
|
|
30
|
-
require_internal_admin,
|
|
25
|
+
require_internal_admin_flag_only,
|
|
31
26
|
)
|
|
32
27
|
from airbyte_ops_mcp.cloud_admin.models import (
|
|
33
28
|
ConnectorVersionInfo,
|
|
34
29
|
VersionOverrideOperationResult,
|
|
35
30
|
)
|
|
31
|
+
from airbyte_ops_mcp.mcp._http_headers import (
|
|
32
|
+
resolve_client_id,
|
|
33
|
+
resolve_client_secret,
|
|
34
|
+
)
|
|
36
35
|
from airbyte_ops_mcp.mcp._mcp_utils import mcp_tool, register_mcp_tools
|
|
37
36
|
|
|
38
37
|
|
|
39
38
|
def _get_workspace(workspace_id: str) -> CloudWorkspace:
|
|
40
39
|
"""Get a CloudWorkspace instance for the specified workspace.
|
|
41
40
|
|
|
41
|
+
Credentials are resolved in priority order:
|
|
42
|
+
1. HTTP headers (X-Airbyte-Cloud-Client-Id, X-Airbyte-Cloud-Client-Secret)
|
|
43
|
+
2. Environment variables (AIRBYTE_CLOUD_CLIENT_ID, AIRBYTE_CLOUD_CLIENT_SECRET)
|
|
44
|
+
|
|
42
45
|
Args:
|
|
43
46
|
workspace_id: The Airbyte Cloud workspace ID (required).
|
|
44
47
|
|
|
@@ -46,19 +49,21 @@ def _get_workspace(workspace_id: str) -> CloudWorkspace:
|
|
|
46
49
|
CloudWorkspace instance configured for the specified workspace.
|
|
47
50
|
|
|
48
51
|
Raises:
|
|
49
|
-
CloudAuthError: If
|
|
52
|
+
CloudAuthError: If credentials cannot be resolved from headers or env vars.
|
|
50
53
|
"""
|
|
51
54
|
try:
|
|
52
55
|
return CloudWorkspace(
|
|
53
56
|
workspace_id=workspace_id,
|
|
54
|
-
client_id=
|
|
55
|
-
client_secret=
|
|
57
|
+
client_id=resolve_client_id(),
|
|
58
|
+
client_secret=resolve_client_secret(),
|
|
56
59
|
api_root=constants.CLOUD_API_ROOT, # Used for workspace operations
|
|
57
60
|
)
|
|
58
61
|
except Exception as e:
|
|
59
62
|
raise CloudAuthError(
|
|
60
|
-
f"Failed to initialize CloudWorkspace. Ensure
|
|
61
|
-
f"
|
|
63
|
+
f"Failed to initialize CloudWorkspace. Ensure credentials are provided "
|
|
64
|
+
f"via HTTP headers (X-Airbyte-Cloud-Client-Id, X-Airbyte-Cloud-Client-Secret) "
|
|
65
|
+
f"or environment variables (AIRBYTE_CLOUD_CLIENT_ID, AIRBYTE_CLOUD_CLIENT_SECRET). "
|
|
66
|
+
f"Error: {e}"
|
|
62
67
|
) from e
|
|
63
68
|
|
|
64
69
|
|
|
@@ -85,8 +90,9 @@ def get_cloud_connector_version(
|
|
|
85
90
|
Returns version details including the current version string and whether
|
|
86
91
|
an override (pin) is applied.
|
|
87
92
|
|
|
88
|
-
|
|
89
|
-
|
|
93
|
+
Authentication credentials are resolved in priority order:
|
|
94
|
+
1. HTTP headers: X-Airbyte-Cloud-Client-Id, X-Airbyte-Cloud-Client-Secret
|
|
95
|
+
2. Environment variables: AIRBYTE_CLOUD_CLIENT_ID, AIRBYTE_CLOUD_CLIENT_SECRET
|
|
90
96
|
"""
|
|
91
97
|
try:
|
|
92
98
|
workspace = _get_workspace(workspace_id)
|
|
@@ -163,11 +169,47 @@ def set_cloud_connector_version_override(
|
|
|
163
169
|
default=None,
|
|
164
170
|
),
|
|
165
171
|
],
|
|
172
|
+
admin_user_email: Annotated[
|
|
173
|
+
str | None,
|
|
174
|
+
Field(
|
|
175
|
+
description="Email of the admin user authorizing this operation. "
|
|
176
|
+
"Must be an @airbyte.io email address. Required for authorization.",
|
|
177
|
+
default=None,
|
|
178
|
+
),
|
|
179
|
+
],
|
|
180
|
+
issue_url: Annotated[
|
|
181
|
+
str | None,
|
|
182
|
+
Field(
|
|
183
|
+
description="URL to the GitHub issue providing context for this operation. "
|
|
184
|
+
"Must be a valid GitHub URL (https://github.com/...). Required for authorization.",
|
|
185
|
+
default=None,
|
|
186
|
+
),
|
|
187
|
+
],
|
|
188
|
+
approval_comment_url: Annotated[
|
|
189
|
+
str | None,
|
|
190
|
+
Field(
|
|
191
|
+
description="URL to a GitHub comment where the admin has explicitly "
|
|
192
|
+
"requested or authorized this deployment. Must be a valid GitHub comment URL. "
|
|
193
|
+
"Required for authorization.",
|
|
194
|
+
default=None,
|
|
195
|
+
),
|
|
196
|
+
],
|
|
197
|
+
ai_agent_session_url: Annotated[
|
|
198
|
+
str | None,
|
|
199
|
+
Field(
|
|
200
|
+
description="URL to the AI agent session driving this operation, if applicable. "
|
|
201
|
+
"Provides additional auditability for AI-driven operations.",
|
|
202
|
+
default=None,
|
|
203
|
+
),
|
|
204
|
+
],
|
|
166
205
|
) -> VersionOverrideOperationResult:
|
|
167
206
|
"""Set or clear a version override for a deployed connector.
|
|
168
207
|
|
|
169
|
-
**Admin-only operation** - Requires
|
|
170
|
-
|
|
208
|
+
**Admin-only operation** - Requires:
|
|
209
|
+
- AIRBYTE_INTERNAL_ADMIN_FLAG=airbyte.io environment variable
|
|
210
|
+
- admin_user_email parameter (must be @airbyte.io email)
|
|
211
|
+
- issue_url parameter (GitHub issue URL for context)
|
|
212
|
+
- approval_comment_url parameter (GitHub comment URL with approval)
|
|
171
213
|
|
|
172
214
|
You must specify EXACTLY ONE of `version` OR `unset=True`, but not both.
|
|
173
215
|
When setting a version, `override_reason` is required.
|
|
@@ -177,13 +219,13 @@ def set_cloud_connector_version_override(
|
|
|
177
219
|
- Production versions: Require strong justification mentioning customer/support/investigation
|
|
178
220
|
- Release candidates (-rc): Any admin can pin/unpin RC versions
|
|
179
221
|
|
|
180
|
-
|
|
181
|
-
|
|
222
|
+
Authentication credentials are resolved in priority order:
|
|
223
|
+
1. HTTP headers: X-Airbyte-Cloud-Client-Id, X-Airbyte-Cloud-Client-Secret
|
|
224
|
+
2. Environment variables: AIRBYTE_CLOUD_CLIENT_ID, AIRBYTE_CLOUD_CLIENT_SECRET
|
|
182
225
|
"""
|
|
183
|
-
# Validate admin access
|
|
226
|
+
# Validate admin access (check env var flag)
|
|
184
227
|
try:
|
|
185
|
-
|
|
186
|
-
user_email = get_admin_user_email()
|
|
228
|
+
require_internal_admin_flag_only()
|
|
187
229
|
except CloudAuthError as e:
|
|
188
230
|
return VersionOverrideOperationResult(
|
|
189
231
|
success=False,
|
|
@@ -192,6 +234,60 @@ def set_cloud_connector_version_override(
|
|
|
192
234
|
connector_type=actor_type,
|
|
193
235
|
)
|
|
194
236
|
|
|
237
|
+
# Validate new authorization parameters
|
|
238
|
+
validation_errors: list[str] = []
|
|
239
|
+
|
|
240
|
+
if not admin_user_email:
|
|
241
|
+
validation_errors.append("admin_user_email is required for authorization")
|
|
242
|
+
elif "@airbyte.io" not in admin_user_email:
|
|
243
|
+
validation_errors.append(
|
|
244
|
+
f"admin_user_email must be an @airbyte.io email address, got: {admin_user_email}"
|
|
245
|
+
)
|
|
246
|
+
|
|
247
|
+
if not issue_url:
|
|
248
|
+
validation_errors.append(
|
|
249
|
+
"issue_url is required for authorization (GitHub issue URL)"
|
|
250
|
+
)
|
|
251
|
+
elif not issue_url.startswith("https://github.com/"):
|
|
252
|
+
validation_errors.append(
|
|
253
|
+
f"issue_url must be a valid GitHub URL (https://github.com/...), got: {issue_url}"
|
|
254
|
+
)
|
|
255
|
+
|
|
256
|
+
if not approval_comment_url:
|
|
257
|
+
validation_errors.append(
|
|
258
|
+
"approval_comment_url is required for authorization (GitHub comment URL)"
|
|
259
|
+
)
|
|
260
|
+
elif not approval_comment_url.startswith("https://github.com/"):
|
|
261
|
+
validation_errors.append(
|
|
262
|
+
f"approval_comment_url must be a valid GitHub URL, got: {approval_comment_url}"
|
|
263
|
+
)
|
|
264
|
+
elif (
|
|
265
|
+
"#issuecomment-" not in approval_comment_url
|
|
266
|
+
and "#discussion_r" not in approval_comment_url
|
|
267
|
+
):
|
|
268
|
+
validation_errors.append(
|
|
269
|
+
"approval_comment_url must be a GitHub comment URL "
|
|
270
|
+
"(containing #issuecomment- or #discussion_r)"
|
|
271
|
+
)
|
|
272
|
+
|
|
273
|
+
if validation_errors:
|
|
274
|
+
return VersionOverrideOperationResult(
|
|
275
|
+
success=False,
|
|
276
|
+
message="Authorization validation failed: " + "; ".join(validation_errors),
|
|
277
|
+
connector_id=actor_id,
|
|
278
|
+
connector_type=actor_type,
|
|
279
|
+
)
|
|
280
|
+
|
|
281
|
+
# Build enhanced override reason with audit fields (only for 'set' operations)
|
|
282
|
+
enhanced_override_reason = override_reason
|
|
283
|
+
if not unset and override_reason:
|
|
284
|
+
audit_parts = [override_reason]
|
|
285
|
+
audit_parts.append(f"Issue: {issue_url}")
|
|
286
|
+
audit_parts.append(f"Approval: {approval_comment_url}")
|
|
287
|
+
if ai_agent_session_url:
|
|
288
|
+
audit_parts.append(f"AI Session: {ai_agent_session_url}")
|
|
289
|
+
enhanced_override_reason = " | ".join(audit_parts)
|
|
290
|
+
|
|
195
291
|
# Get workspace and current version info
|
|
196
292
|
try:
|
|
197
293
|
workspace = _get_workspace(workspace_id)
|
|
@@ -233,9 +329,9 @@ def set_cloud_connector_version_override(
|
|
|
233
329
|
workspace_id=workspace_id,
|
|
234
330
|
version=version,
|
|
235
331
|
unset=unset,
|
|
236
|
-
override_reason=
|
|
332
|
+
override_reason=enhanced_override_reason,
|
|
237
333
|
override_reason_reference_url=override_reason_reference_url,
|
|
238
|
-
user_email=
|
|
334
|
+
user_email=admin_user_email,
|
|
239
335
|
)
|
|
240
336
|
|
|
241
337
|
# Get updated version info after the operation
|
|
@@ -296,11 +296,11 @@ def run_live_connection_tests(
|
|
|
296
296
|
"For live tests, this builds the test image. For regression tests, this builds "
|
|
297
297
|
"the target image while control is auto-detected from the connection.",
|
|
298
298
|
] = None,
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
"
|
|
302
|
-
"(e.g.,
|
|
303
|
-
"
|
|
299
|
+
pr: Annotated[
|
|
300
|
+
int | None,
|
|
301
|
+
"PR number from the airbyte monorepo to checkout and build from "
|
|
302
|
+
"(e.g., 70847). Only used when connector_name is provided. "
|
|
303
|
+
"If not specified, builds from the default branch (master).",
|
|
304
304
|
] = None,
|
|
305
305
|
) -> RunLiveConnectionTestsResponse:
|
|
306
306
|
"""Start a live connection test run via GitHub Actions workflow.
|
|
@@ -374,6 +374,8 @@ def run_live_connection_tests(
|
|
|
374
374
|
workflow_inputs["connector_image"] = connector_image
|
|
375
375
|
if connector_name:
|
|
376
376
|
workflow_inputs["connector_name"] = connector_name
|
|
377
|
+
if pr:
|
|
378
|
+
workflow_inputs["pr"] = str(pr)
|
|
377
379
|
|
|
378
380
|
try:
|
|
379
381
|
dispatch_result = trigger_workflow_dispatch(
|
|
@@ -427,8 +429,8 @@ def run_live_connection_tests(
|
|
|
427
429
|
workflow_inputs["control_image"] = control_image
|
|
428
430
|
if connector_name:
|
|
429
431
|
workflow_inputs["connector_name"] = connector_name
|
|
430
|
-
if
|
|
431
|
-
workflow_inputs["
|
|
432
|
+
if pr:
|
|
433
|
+
workflow_inputs["pr"] = str(pr)
|
|
432
434
|
|
|
433
435
|
try:
|
|
434
436
|
dispatch_result = trigger_workflow_dispatch(
|
|
@@ -20,7 +20,7 @@ from airbyte_ops_mcp.prod_db_access.queries import (
|
|
|
20
20
|
query_connections_by_connector,
|
|
21
21
|
query_connector_versions,
|
|
22
22
|
query_dataplanes_list,
|
|
23
|
-
|
|
23
|
+
query_failed_sync_attempts_for_connector,
|
|
24
24
|
query_new_connector_releases,
|
|
25
25
|
query_sync_results_for_version,
|
|
26
26
|
query_workspace_info,
|
|
@@ -249,12 +249,41 @@ def query_prod_connector_version_sync_results(
|
|
|
249
249
|
@mcp_tool(
|
|
250
250
|
read_only=True,
|
|
251
251
|
idempotent=True,
|
|
252
|
+
open_world=True,
|
|
252
253
|
)
|
|
253
|
-
def
|
|
254
|
-
|
|
255
|
-
str,
|
|
256
|
-
Field(
|
|
257
|
-
|
|
254
|
+
def query_prod_failed_sync_attempts_for_connector(
|
|
255
|
+
source_definition_id: Annotated[
|
|
256
|
+
str | None,
|
|
257
|
+
Field(
|
|
258
|
+
description=(
|
|
259
|
+
"Source connector definition ID (UUID) to search for. "
|
|
260
|
+
"Exactly one of this or source_canonical_name is required. "
|
|
261
|
+
"Example: 'afa734e4-3571-11ec-991a-1e0031268139' for YouTube Analytics."
|
|
262
|
+
),
|
|
263
|
+
default=None,
|
|
264
|
+
),
|
|
265
|
+
] = None,
|
|
266
|
+
source_canonical_name: Annotated[
|
|
267
|
+
str | None,
|
|
268
|
+
Field(
|
|
269
|
+
description=(
|
|
270
|
+
"Canonical source connector name to search for. "
|
|
271
|
+
"Exactly one of this or source_definition_id is required. "
|
|
272
|
+
"Examples: 'source-youtube-analytics', 'YouTube Analytics'."
|
|
273
|
+
),
|
|
274
|
+
default=None,
|
|
275
|
+
),
|
|
276
|
+
] = None,
|
|
277
|
+
organization_id: Annotated[
|
|
278
|
+
str | None,
|
|
279
|
+
Field(
|
|
280
|
+
description=(
|
|
281
|
+
"Optional organization ID (UUID) to filter results. "
|
|
282
|
+
"If provided, only failed attempts from this organization will be returned."
|
|
283
|
+
),
|
|
284
|
+
default=None,
|
|
285
|
+
),
|
|
286
|
+
] = None,
|
|
258
287
|
days: Annotated[
|
|
259
288
|
int,
|
|
260
289
|
Field(description="Number of days to look back (default: 7)", default=7),
|
|
@@ -264,29 +293,43 @@ def query_prod_failed_sync_attempts_for_version(
|
|
|
264
293
|
Field(description="Maximum number of results (default: 100)", default=100),
|
|
265
294
|
] = 100,
|
|
266
295
|
) -> list[dict[str, Any]]:
|
|
267
|
-
"""List failed sync attempts
|
|
296
|
+
"""List failed sync attempts for ALL actors using a source connector type.
|
|
268
297
|
|
|
269
|
-
|
|
270
|
-
|
|
298
|
+
This tool finds all actors with the given connector definition and returns their
|
|
299
|
+
failed sync attempts, regardless of whether they have explicit version pins.
|
|
271
300
|
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
- failed_attempt_number: Which attempt this was (0-indexed)
|
|
276
|
-
- failure_summary: JSON containing failure details including failureType and messages
|
|
301
|
+
This is useful for investigating connector issues across all users. Use this when
|
|
302
|
+
you want to find failures for a connector type regardless of which version users
|
|
303
|
+
are on.
|
|
277
304
|
|
|
278
|
-
Note:
|
|
279
|
-
|
|
305
|
+
Note: This tool only supports SOURCE connectors. For destination connectors,
|
|
306
|
+
a separate tool would be needed.
|
|
280
307
|
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
organization_id, dataplane_group_id, dataplane_name, failed_attempt_id,
|
|
285
|
-
failed_attempt_number, failed_attempt_status, failed_attempt_created_at,
|
|
286
|
-
failed_attempt_ended_at, failure_summary, processing_task_queue
|
|
308
|
+
Key fields in results:
|
|
309
|
+
- failure_summary: JSON containing failure details including failureType and messages
|
|
310
|
+
- pin_origin_type, pin_origin, pinned_version_id: Version pin context (NULL if not pinned)
|
|
287
311
|
"""
|
|
288
|
-
|
|
289
|
-
|
|
312
|
+
# Validate that exactly one of the two parameters is provided
|
|
313
|
+
if (source_definition_id is None) == (source_canonical_name is None):
|
|
314
|
+
raise PyAirbyteInputError(
|
|
315
|
+
message=(
|
|
316
|
+
"Exactly one of source_definition_id or source_canonical_name "
|
|
317
|
+
"must be provided, but not both."
|
|
318
|
+
),
|
|
319
|
+
)
|
|
320
|
+
|
|
321
|
+
# Resolve canonical name to definition ID if needed
|
|
322
|
+
resolved_definition_id: str
|
|
323
|
+
if source_canonical_name:
|
|
324
|
+
resolved_definition_id = _resolve_canonical_name_to_definition_id(
|
|
325
|
+
canonical_name=source_canonical_name,
|
|
326
|
+
)
|
|
327
|
+
else:
|
|
328
|
+
resolved_definition_id = source_definition_id # type: ignore[assignment]
|
|
329
|
+
|
|
330
|
+
return query_failed_sync_attempts_for_connector(
|
|
331
|
+
connector_definition_id=resolved_definition_id,
|
|
332
|
+
organization_id=organization_id,
|
|
290
333
|
days=days,
|
|
291
334
|
limit=limit,
|
|
292
335
|
)
|
airbyte_ops_mcp/mcp/server.py
CHANGED
|
@@ -2,9 +2,18 @@
|
|
|
2
2
|
"""Airbyte Admin MCP server implementation.
|
|
3
3
|
|
|
4
4
|
This module provides the main MCP server for Airbyte admin operations.
|
|
5
|
+
|
|
6
|
+
The server can run in two modes:
|
|
7
|
+
- **stdio mode** (default): For direct MCP client connections via stdin/stdout
|
|
8
|
+
- **HTTP mode**: For HTTP-based MCP connections, useful for containerized deployments
|
|
9
|
+
|
|
10
|
+
Environment Variables:
|
|
11
|
+
MCP_HTTP_HOST: Host to bind HTTP server to (default: 127.0.0.1)
|
|
12
|
+
MCP_HTTP_PORT: Port for HTTP server (default: 8082)
|
|
5
13
|
"""
|
|
6
14
|
|
|
7
15
|
import asyncio
|
|
16
|
+
import os
|
|
8
17
|
import sys
|
|
9
18
|
from pathlib import Path
|
|
10
19
|
|
|
@@ -23,6 +32,10 @@ from airbyte_ops_mcp.mcp.prod_db_queries import register_prod_db_query_tools
|
|
|
23
32
|
from airbyte_ops_mcp.mcp.prompts import register_prompts
|
|
24
33
|
from airbyte_ops_mcp.mcp.server_info import register_server_info_resources
|
|
25
34
|
|
|
35
|
+
# Default HTTP server configuration
|
|
36
|
+
DEFAULT_HTTP_HOST = "127.0.0.1"
|
|
37
|
+
DEFAULT_HTTP_PORT = 8082
|
|
38
|
+
|
|
26
39
|
app: FastMCP = FastMCP(MCP_SERVER_NAME)
|
|
27
40
|
|
|
28
41
|
|
|
@@ -56,27 +69,87 @@ def register_server_assets(app: FastMCP) -> None:
|
|
|
56
69
|
register_server_assets(app)
|
|
57
70
|
|
|
58
71
|
|
|
59
|
-
def
|
|
60
|
-
"""
|
|
61
|
-
# Load environment variables from .env file in current working directory
|
|
72
|
+
def _load_env() -> None:
|
|
73
|
+
"""Load environment variables from .env file if present."""
|
|
62
74
|
env_file = Path.cwd() / ".env"
|
|
63
75
|
if env_file.exists():
|
|
64
76
|
load_dotenv(env_file)
|
|
65
77
|
print(f"Loaded environment from: {env_file}", flush=True, file=sys.stderr)
|
|
66
78
|
|
|
79
|
+
|
|
80
|
+
def main() -> None:
|
|
81
|
+
"""Main entry point for the Airbyte Admin MCP server (stdio mode).
|
|
82
|
+
|
|
83
|
+
This is the default entry point that runs the server in stdio mode,
|
|
84
|
+
suitable for direct MCP client connections.
|
|
85
|
+
"""
|
|
86
|
+
_load_env()
|
|
87
|
+
|
|
67
88
|
print("=" * 60, flush=True, file=sys.stderr)
|
|
68
|
-
print("Starting Airbyte Admin MCP server.", file=sys.stderr)
|
|
89
|
+
print("Starting Airbyte Admin MCP server (stdio mode).", file=sys.stderr)
|
|
69
90
|
try:
|
|
70
91
|
asyncio.run(app.run_stdio_async(show_banner=False))
|
|
71
92
|
except KeyboardInterrupt:
|
|
72
93
|
print("Airbyte Admin MCP server interrupted by user.", file=sys.stderr)
|
|
73
|
-
except Exception as ex:
|
|
74
|
-
print(f"Error running Airbyte Admin MCP server: {ex}", file=sys.stderr)
|
|
75
|
-
sys.exit(1)
|
|
76
94
|
|
|
77
95
|
print("Airbyte Admin MCP server stopped.", file=sys.stderr)
|
|
78
96
|
print("=" * 60, flush=True, file=sys.stderr)
|
|
79
|
-
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def _parse_port(port_str: str | None, default: int) -> int:
|
|
100
|
+
"""Parse and validate a port number from string.
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
port_str: Port string from environment variable, or None if not set
|
|
104
|
+
default: Default port to use if port_str is None
|
|
105
|
+
|
|
106
|
+
Returns:
|
|
107
|
+
Validated port number
|
|
108
|
+
|
|
109
|
+
Raises:
|
|
110
|
+
ValueError: If port_str is not a valid integer or out of range
|
|
111
|
+
"""
|
|
112
|
+
if port_str is None:
|
|
113
|
+
return default
|
|
114
|
+
|
|
115
|
+
port_str = port_str.strip()
|
|
116
|
+
if not port_str.isdecimal():
|
|
117
|
+
raise ValueError(f"MCP_HTTP_PORT must be a valid integer, got: {port_str!r}")
|
|
118
|
+
|
|
119
|
+
port = int(port_str)
|
|
120
|
+
if not 1 <= port <= 65535:
|
|
121
|
+
raise ValueError(f"MCP_HTTP_PORT must be between 1 and 65535, got: {port}")
|
|
122
|
+
|
|
123
|
+
return port
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def main_http() -> None:
|
|
127
|
+
"""HTTP entry point for the Airbyte Admin MCP server.
|
|
128
|
+
|
|
129
|
+
This entry point runs the server in HTTP mode, suitable for containerized
|
|
130
|
+
deployments where the server needs to be accessible over HTTP.
|
|
131
|
+
|
|
132
|
+
Environment Variables:
|
|
133
|
+
MCP_HTTP_HOST: Host to bind to (default: 127.0.0.1)
|
|
134
|
+
MCP_HTTP_PORT: Port to listen on (default: 8082)
|
|
135
|
+
"""
|
|
136
|
+
_load_env()
|
|
137
|
+
|
|
138
|
+
host = os.getenv("MCP_HTTP_HOST", DEFAULT_HTTP_HOST)
|
|
139
|
+
port = _parse_port(os.getenv("MCP_HTTP_PORT"), DEFAULT_HTTP_PORT)
|
|
140
|
+
|
|
141
|
+
print("=" * 60, flush=True, file=sys.stderr)
|
|
142
|
+
print(
|
|
143
|
+
f"Starting Airbyte Admin MCP server (HTTP mode) on {host}:{port}",
|
|
144
|
+
file=sys.stderr,
|
|
145
|
+
)
|
|
146
|
+
try:
|
|
147
|
+
app.run(transport="http", host=host, port=port)
|
|
148
|
+
except KeyboardInterrupt:
|
|
149
|
+
print("Airbyte Admin MCP server interrupted by user.", file=sys.stderr)
|
|
150
|
+
|
|
151
|
+
print("Airbyte Admin MCP server stopped.", file=sys.stderr)
|
|
152
|
+
print("=" * 60, flush=True, file=sys.stderr)
|
|
80
153
|
|
|
81
154
|
|
|
82
155
|
if __name__ == "__main__":
|
|
@@ -52,6 +52,7 @@ def _is_tailscale_connected() -> bool:
|
|
|
52
52
|
Detection methods:
|
|
53
53
|
1. Check for tailscale0 network interface (Linux)
|
|
54
54
|
2. Run 'tailscale status --json' and check backend state (cross-platform)
|
|
55
|
+
3. Check macOS-specific Tailscale.app location if tailscale not in PATH
|
|
55
56
|
"""
|
|
56
57
|
# Method 1: Check for tailscale0 interface (Linux)
|
|
57
58
|
try:
|
|
@@ -63,6 +64,13 @@ def _is_tailscale_connected() -> bool:
|
|
|
63
64
|
|
|
64
65
|
# Method 2: Check tailscale CLI status
|
|
65
66
|
tailscale_path = shutil.which("tailscale")
|
|
67
|
+
|
|
68
|
+
# Method 3: On macOS, check the standard Tailscale.app location if not in PATH
|
|
69
|
+
if not tailscale_path and os.path.exists(
|
|
70
|
+
"/Applications/Tailscale.app/Contents/MacOS/Tailscale"
|
|
71
|
+
):
|
|
72
|
+
tailscale_path = "/Applications/Tailscale.app/Contents/MacOS/Tailscale"
|
|
73
|
+
|
|
66
74
|
if tailscale_path:
|
|
67
75
|
try:
|
|
68
76
|
result = subprocess.run(
|
|
@@ -24,7 +24,7 @@ from airbyte_ops_mcp.prod_db_access.sql import (
|
|
|
24
24
|
SELECT_CONNECTIONS_BY_CONNECTOR_AND_ORG,
|
|
25
25
|
SELECT_CONNECTOR_VERSIONS,
|
|
26
26
|
SELECT_DATAPLANES_LIST,
|
|
27
|
-
|
|
27
|
+
SELECT_FAILED_SYNC_ATTEMPTS_FOR_CONNECTOR,
|
|
28
28
|
SELECT_NEW_CONNECTOR_RELEASES,
|
|
29
29
|
SELECT_ORG_WORKSPACES,
|
|
30
30
|
SELECT_SUCCESSFUL_SYNCS_FOR_VERSION,
|
|
@@ -225,43 +225,55 @@ def query_sync_results_for_version(
|
|
|
225
225
|
)
|
|
226
226
|
|
|
227
227
|
|
|
228
|
-
def
|
|
229
|
-
|
|
228
|
+
def query_failed_sync_attempts_for_connector(
|
|
229
|
+
connector_definition_id: str,
|
|
230
|
+
organization_id: str | None = None,
|
|
230
231
|
days: int = 7,
|
|
231
232
|
limit: int = 100,
|
|
232
233
|
*,
|
|
233
234
|
gsm_client: secretmanager.SecretManagerServiceClient | None = None,
|
|
234
235
|
) -> list[dict[str, Any]]:
|
|
235
|
-
"""Query failed sync
|
|
236
|
+
"""Query failed sync attempts for ALL actors using a connector definition.
|
|
236
237
|
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
jobs and attempts tables to optimize join performance.
|
|
238
|
+
Finds all actors with the given actor_definition_id and returns their failed
|
|
239
|
+
sync attempts, regardless of whether they have explicit version pins.
|
|
240
240
|
|
|
241
|
-
|
|
242
|
-
|
|
241
|
+
This is useful for investigating connector issues across all users.
|
|
242
|
+
|
|
243
|
+
Note: This query only supports SOURCE connectors (joins via connection.source_id).
|
|
244
|
+
For destination connectors, a separate query would be needed.
|
|
243
245
|
|
|
244
246
|
Args:
|
|
245
|
-
|
|
247
|
+
connector_definition_id: Connector definition UUID to filter by
|
|
248
|
+
organization_id: Optional organization UUID to filter results by (post-query filter)
|
|
246
249
|
days: Number of days to look back (default: 7)
|
|
247
250
|
limit: Maximum number of results (default: 100)
|
|
248
251
|
gsm_client: GCP Secret Manager client. If None, a new client will be instantiated.
|
|
249
252
|
|
|
250
253
|
Returns:
|
|
251
|
-
List of failed sync
|
|
254
|
+
List of failed sync attempt records with failure_summary and workspace info
|
|
252
255
|
"""
|
|
253
256
|
cutoff_date = datetime.now(timezone.utc) - timedelta(days=days)
|
|
254
|
-
|
|
255
|
-
|
|
257
|
+
|
|
258
|
+
results = _run_sql_query(
|
|
259
|
+
SELECT_FAILED_SYNC_ATTEMPTS_FOR_CONNECTOR,
|
|
256
260
|
parameters={
|
|
257
|
-
"
|
|
261
|
+
"connector_definition_id": connector_definition_id,
|
|
258
262
|
"cutoff_date": cutoff_date,
|
|
259
263
|
"limit": limit,
|
|
260
264
|
},
|
|
261
|
-
query_name="
|
|
265
|
+
query_name="SELECT_FAILED_SYNC_ATTEMPTS_FOR_CONNECTOR",
|
|
262
266
|
gsm_client=gsm_client,
|
|
263
267
|
)
|
|
264
268
|
|
|
269
|
+
# Post-query filter by organization_id if provided
|
|
270
|
+
if organization_id is not None:
|
|
271
|
+
results = [
|
|
272
|
+
r for r in results if str(r.get("organization_id")) == organization_id
|
|
273
|
+
]
|
|
274
|
+
|
|
275
|
+
return results
|
|
276
|
+
|
|
265
277
|
|
|
266
278
|
def query_dataplanes_list(
|
|
267
279
|
*,
|
|
@@ -305,32 +305,33 @@ SELECT_SUCCESSFUL_SYNCS_FOR_VERSION = sqlalchemy.text(
|
|
|
305
305
|
"""
|
|
306
306
|
)
|
|
307
307
|
|
|
308
|
-
# Get failed attempt results for actors
|
|
309
|
-
#
|
|
310
|
-
#
|
|
311
|
-
# from
|
|
312
|
-
#
|
|
313
|
-
#
|
|
314
|
-
#
|
|
315
|
-
|
|
308
|
+
# Get failed attempt results for ALL actors using a connector definition.
|
|
309
|
+
# Finds all actors with the given actor_definition_id and returns their failed sync attempts,
|
|
310
|
+
# regardless of whether they have explicit version pins.
|
|
311
|
+
# Query starts from attempts table to leverage indexed columns (ended_at, status).
|
|
312
|
+
# Note: This query only supports SOURCE connectors (joins via connection.source_id).
|
|
313
|
+
# The LEFT JOIN to scoped_configuration provides pin context when available (pin_origin_type,
|
|
314
|
+
# pin_origin, pinned_version_id will be NULL for unpinned actors).
|
|
315
|
+
SELECT_FAILED_SYNC_ATTEMPTS_FOR_CONNECTOR = sqlalchemy.text(
|
|
316
316
|
"""
|
|
317
317
|
SELECT
|
|
318
318
|
jobs.id AS job_id,
|
|
319
319
|
jobs.scope AS connection_id,
|
|
320
|
-
jobs.status AS
|
|
320
|
+
jobs.status AS latest_job_status,
|
|
321
321
|
jobs.started_at AS job_started_at,
|
|
322
322
|
jobs.updated_at AS job_updated_at,
|
|
323
323
|
connection.name AS connection_name,
|
|
324
324
|
actor.id AS actor_id,
|
|
325
325
|
actor.name AS actor_name,
|
|
326
326
|
actor.actor_definition_id,
|
|
327
|
-
scoped_configuration.origin_type AS pin_origin_type,
|
|
328
|
-
scoped_configuration.origin AS pin_origin,
|
|
329
327
|
workspace.id AS workspace_id,
|
|
330
328
|
workspace.name AS workspace_name,
|
|
331
329
|
workspace.organization_id,
|
|
332
330
|
workspace.dataplane_group_id,
|
|
333
331
|
dataplane_group.name AS dataplane_name,
|
|
332
|
+
scoped_configuration.origin_type AS pin_origin_type,
|
|
333
|
+
scoped_configuration.origin AS pin_origin,
|
|
334
|
+
scoped_configuration.value AS pinned_version_id,
|
|
334
335
|
attempts.id AS failed_attempt_id,
|
|
335
336
|
attempts.attempt_number AS failed_attempt_number,
|
|
336
337
|
attempts.status AS failed_attempt_status,
|
|
@@ -347,15 +348,15 @@ SELECT_FAILED_SYNC_ATTEMPTS_FOR_VERSION = sqlalchemy.text(
|
|
|
347
348
|
ON jobs.scope = connection.id::text
|
|
348
349
|
JOIN actor
|
|
349
350
|
ON connection.source_id = actor.id
|
|
350
|
-
|
|
351
|
-
ON scoped_configuration.scope_id = actor.id
|
|
352
|
-
AND scoped_configuration.key = 'connector_version'
|
|
353
|
-
AND scoped_configuration.scope_type = 'actor'
|
|
354
|
-
AND scoped_configuration.value = :actor_definition_version_id
|
|
351
|
+
AND actor.actor_definition_id = :connector_definition_id
|
|
355
352
|
JOIN workspace
|
|
356
353
|
ON actor.workspace_id = workspace.id
|
|
357
354
|
LEFT JOIN dataplane_group
|
|
358
355
|
ON workspace.dataplane_group_id = dataplane_group.id
|
|
356
|
+
LEFT JOIN scoped_configuration
|
|
357
|
+
ON scoped_configuration.scope_id = actor.id
|
|
358
|
+
AND scoped_configuration.key = 'connector_version'
|
|
359
|
+
AND scoped_configuration.scope_type = 'actor'
|
|
359
360
|
WHERE
|
|
360
361
|
attempts.ended_at >= :cutoff_date
|
|
361
362
|
AND attempts.status = 'failed'
|
|
File without changes
|