airbyte-internal-ops 0.1.4__py3-none-any.whl → 0.1.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. {airbyte_internal_ops-0.1.4.dist-info → airbyte_internal_ops-0.1.6.dist-info}/METADATA +70 -1
  2. {airbyte_internal_ops-0.1.4.dist-info → airbyte_internal_ops-0.1.6.dist-info}/RECORD +30 -31
  3. airbyte_ops_mcp/__init__.py +30 -2
  4. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/connectors/pipeline.py +2 -8
  5. airbyte_ops_mcp/airbyte_repo/list_connectors.py +176 -4
  6. airbyte_ops_mcp/airbyte_repo/utils.py +5 -3
  7. airbyte_ops_mcp/cli/cloud.py +35 -36
  8. airbyte_ops_mcp/cli/registry.py +90 -1
  9. airbyte_ops_mcp/cli/repo.py +15 -0
  10. airbyte_ops_mcp/connection_config_retriever/__init__.py +26 -0
  11. airbyte_ops_mcp/{live_tests/_connection_retriever → connection_config_retriever}/audit_logging.py +5 -6
  12. airbyte_ops_mcp/{live_tests/_connection_retriever → connection_config_retriever}/retrieval.py +8 -22
  13. airbyte_ops_mcp/{live_tests/_connection_retriever → connection_config_retriever}/secrets_resolution.py +8 -42
  14. airbyte_ops_mcp/constants.py +35 -0
  15. airbyte_ops_mcp/live_tests/connection_secret_retriever.py +1 -1
  16. airbyte_ops_mcp/mcp/github_repo_ops.py +10 -0
  17. airbyte_ops_mcp/mcp/live_tests.py +21 -6
  18. airbyte_ops_mcp/mcp/prod_db_queries.py +357 -0
  19. airbyte_ops_mcp/mcp/server.py +2 -0
  20. airbyte_ops_mcp/mcp/server_info.py +2 -2
  21. airbyte_ops_mcp/prod_db_access/__init__.py +34 -0
  22. airbyte_ops_mcp/prod_db_access/db_engine.py +127 -0
  23. airbyte_ops_mcp/prod_db_access/py.typed +0 -0
  24. airbyte_ops_mcp/prod_db_access/queries.py +272 -0
  25. airbyte_ops_mcp/prod_db_access/sql.py +353 -0
  26. airbyte_ops_mcp/registry/__init__.py +34 -0
  27. airbyte_ops_mcp/registry/models.py +63 -0
  28. airbyte_ops_mcp/registry/publish.py +368 -0
  29. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/connectors/publish/__init__.py +0 -3
  30. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/connectors/publish/commands.py +0 -242
  31. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/connectors/publish/context.py +0 -175
  32. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/connectors/publish/pipeline.py +0 -1056
  33. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/poetry/publish/__init__.py +0 -3
  34. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/poetry/publish/commands.py +0 -127
  35. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/steps/python_registry.py +0 -238
  36. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/models/contexts/python_registry_publish.py +0 -119
  37. airbyte_ops_mcp/live_tests/_connection_retriever/__init__.py +0 -35
  38. airbyte_ops_mcp/live_tests/_connection_retriever/consts.py +0 -33
  39. airbyte_ops_mcp/live_tests/_connection_retriever/db_access.py +0 -82
  40. {airbyte_internal_ops-0.1.4.dist-info → airbyte_internal_ops-0.1.6.dist-info}/WHEEL +0 -0
  41. {airbyte_internal_ops-0.1.4.dist-info → airbyte_internal_ops-0.1.6.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,272 @@
1
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
2
+ """Query execution functions for Airbyte Cloud Prod DB Replica.
3
+
4
+ This module provides functions that execute SQL queries against the Prod DB Replica
5
+ and return structured results. Each function wraps a SQL template from sql.py.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import logging
11
+ from collections.abc import Mapping
12
+ from datetime import datetime, timedelta, timezone
13
+ from time import perf_counter
14
+ from typing import Any
15
+
16
+ import sqlalchemy
17
+ from google.cloud import secretmanager
18
+
19
+ from airbyte_ops_mcp.prod_db_access.db_engine import get_pool
20
+ from airbyte_ops_mcp.prod_db_access.sql import (
21
+ SELECT_ACTORS_PINNED_TO_VERSION,
22
+ SELECT_CONNECTIONS_BY_CONNECTOR,
23
+ SELECT_CONNECTOR_VERSIONS,
24
+ SELECT_DATAPLANES_LIST,
25
+ SELECT_NEW_CONNECTOR_RELEASES,
26
+ SELECT_ORG_WORKSPACES,
27
+ SELECT_SUCCESSFUL_SYNCS_FOR_VERSION,
28
+ SELECT_SYNC_RESULTS_FOR_VERSION,
29
+ SELECT_WORKSPACE_INFO,
30
+ )
31
+
32
+ logger = logging.getLogger(__name__)
33
+
34
+
35
+ def _run_sql_query(
36
+ statement: sqlalchemy.sql.elements.TextClause,
37
+ parameters: Mapping[str, Any] | None = None,
38
+ *,
39
+ query_name: str | None = None,
40
+ gsm_client: secretmanager.SecretManagerServiceClient | None = None,
41
+ ) -> list[dict[str, Any]]:
42
+ """Execute a SQL text statement and return rows as list[dict], logging elapsed time.
43
+
44
+ Args:
45
+ statement: SQLAlchemy text clause to execute
46
+ parameters: Query parameters to bind
47
+ query_name: Optional name for logging (defaults to first line of SQL)
48
+ gsm_client: GCP Secret Manager client for retrieving credentials.
49
+ If None, a new client will be instantiated.
50
+
51
+ Returns:
52
+ List of row dicts from the query result
53
+ """
54
+ if gsm_client is None:
55
+ gsm_client = secretmanager.SecretManagerServiceClient()
56
+ pool = get_pool(gsm_client)
57
+ start = perf_counter()
58
+ with pool.connect() as conn:
59
+ result = conn.execute(statement, parameters or {})
60
+ rows = [dict(row._mapping) for row in result]
61
+ elapsed = perf_counter() - start
62
+
63
+ name = query_name or "SQL query"
64
+ logger.info("Prod DB query %s returned %d rows in %.3f s", name, len(rows), elapsed)
65
+
66
+ return rows
67
+
68
+
69
+ def query_connections_by_connector(
70
+ connector_definition_id: str,
71
+ organization_id: str | None = None,
72
+ limit: int = 1000,
73
+ *,
74
+ gsm_client: secretmanager.SecretManagerServiceClient | None = None,
75
+ ) -> list[dict[str, Any]]:
76
+ """Query connections by source connector type, optionally filtered by organization.
77
+
78
+ Args:
79
+ connector_definition_id: Connector definition UUID to filter by
80
+ organization_id: Optional organization UUID to search within
81
+ limit: Maximum number of results (default: 1000)
82
+ gsm_client: GCP Secret Manager client. If None, a new client will be instantiated.
83
+
84
+ Returns:
85
+ List of connection records with workspace and dataplane info
86
+ """
87
+ return _run_sql_query(
88
+ SELECT_CONNECTIONS_BY_CONNECTOR,
89
+ parameters={
90
+ "connector_definition_id": connector_definition_id,
91
+ "organization_id": organization_id,
92
+ "limit": limit,
93
+ },
94
+ query_name="SELECT_CONNECTIONS_BY_CONNECTOR",
95
+ gsm_client=gsm_client,
96
+ )
97
+
98
+
99
+ def query_connector_versions(
100
+ connector_definition_id: str,
101
+ *,
102
+ gsm_client: secretmanager.SecretManagerServiceClient | None = None,
103
+ ) -> list[dict[str, Any]]:
104
+ """Query all versions for a connector definition.
105
+
106
+ Args:
107
+ connector_definition_id: Connector definition UUID
108
+ gsm_client: GCP Secret Manager client. If None, a new client will be instantiated.
109
+
110
+ Returns:
111
+ List of version records ordered by last_published DESC
112
+ """
113
+ return _run_sql_query(
114
+ SELECT_CONNECTOR_VERSIONS,
115
+ parameters={"actor_definition_id": connector_definition_id},
116
+ query_name="SELECT_CONNECTOR_VERSIONS",
117
+ gsm_client=gsm_client,
118
+ )
119
+
120
+
121
+ def query_new_connector_releases(
122
+ days: int = 7,
123
+ limit: int = 100,
124
+ *,
125
+ gsm_client: secretmanager.SecretManagerServiceClient | None = None,
126
+ ) -> list[dict[str, Any]]:
127
+ """Query recently published connector versions.
128
+
129
+ Args:
130
+ days: Number of days to look back (default: 7)
131
+ limit: Maximum number of results (default: 100)
132
+ gsm_client: GCP Secret Manager client. If None, a new client will be instantiated.
133
+
134
+ Returns:
135
+ List of recently published connector versions
136
+ """
137
+ cutoff_date = datetime.now(timezone.utc) - timedelta(days=days)
138
+ return _run_sql_query(
139
+ SELECT_NEW_CONNECTOR_RELEASES,
140
+ parameters={"cutoff_date": cutoff_date, "limit": limit},
141
+ query_name="SELECT_NEW_CONNECTOR_RELEASES",
142
+ gsm_client=gsm_client,
143
+ )
144
+
145
+
146
+ def query_actors_pinned_to_version(
147
+ connector_version_id: str,
148
+ *,
149
+ gsm_client: secretmanager.SecretManagerServiceClient | None = None,
150
+ ) -> list[dict[str, Any]]:
151
+ """Query actors (sources/destinations) pinned to a specific connector version.
152
+
153
+ Args:
154
+ connector_version_id: Connector version UUID to search for
155
+ gsm_client: GCP Secret Manager client. If None, a new client will be instantiated.
156
+
157
+ Returns:
158
+ List of actors pinned to the specified version
159
+ """
160
+ return _run_sql_query(
161
+ SELECT_ACTORS_PINNED_TO_VERSION,
162
+ parameters={"actor_definition_version_id": connector_version_id},
163
+ query_name="SELECT_ACTORS_PINNED_TO_VERSION",
164
+ gsm_client=gsm_client,
165
+ )
166
+
167
+
168
+ def query_sync_results_for_version(
169
+ connector_version_id: str,
170
+ days: int = 7,
171
+ limit: int = 100,
172
+ successful_only: bool = False,
173
+ *,
174
+ gsm_client: secretmanager.SecretManagerServiceClient | None = None,
175
+ ) -> list[dict[str, Any]]:
176
+ """Query sync job results for actors pinned to a specific connector version.
177
+
178
+ Args:
179
+ connector_version_id: Connector version UUID to filter by
180
+ days: Number of days to look back (default: 7)
181
+ limit: Maximum number of results (default: 100)
182
+ successful_only: If True, only return successful syncs (default: False)
183
+ gsm_client: GCP Secret Manager client. If None, a new client will be instantiated.
184
+
185
+ Returns:
186
+ List of sync job results
187
+ """
188
+ cutoff_date = datetime.now(timezone.utc) - timedelta(days=days)
189
+ query = (
190
+ SELECT_SUCCESSFUL_SYNCS_FOR_VERSION
191
+ if successful_only
192
+ else SELECT_SYNC_RESULTS_FOR_VERSION
193
+ )
194
+ query_name = (
195
+ "SELECT_SUCCESSFUL_SYNCS_FOR_VERSION"
196
+ if successful_only
197
+ else "SELECT_SYNC_RESULTS_FOR_VERSION"
198
+ )
199
+ return _run_sql_query(
200
+ query,
201
+ parameters={
202
+ "actor_definition_version_id": connector_version_id,
203
+ "cutoff_date": cutoff_date,
204
+ "limit": limit,
205
+ },
206
+ query_name=query_name,
207
+ gsm_client=gsm_client,
208
+ )
209
+
210
+
211
+ def query_dataplanes_list(
212
+ *,
213
+ gsm_client: secretmanager.SecretManagerServiceClient | None = None,
214
+ ) -> list[dict[str, Any]]:
215
+ """Query all dataplane groups with workspace counts.
216
+
217
+ Args:
218
+ gsm_client: GCP Secret Manager client. If None, a new client will be instantiated.
219
+
220
+ Returns:
221
+ List of dataplane groups ordered by workspace count DESC
222
+ """
223
+ return _run_sql_query(
224
+ SELECT_DATAPLANES_LIST,
225
+ query_name="SELECT_DATAPLANES_LIST",
226
+ gsm_client=gsm_client,
227
+ )
228
+
229
+
230
+ def query_workspace_info(
231
+ workspace_id: str,
232
+ *,
233
+ gsm_client: secretmanager.SecretManagerServiceClient | None = None,
234
+ ) -> dict[str, Any] | None:
235
+ """Query workspace info including dataplane group.
236
+
237
+ Args:
238
+ workspace_id: Workspace UUID
239
+ gsm_client: GCP Secret Manager client. If None, a new client will be instantiated.
240
+
241
+ Returns:
242
+ Workspace info dict, or None if not found
243
+ """
244
+ rows = _run_sql_query(
245
+ SELECT_WORKSPACE_INFO,
246
+ parameters={"workspace_id": workspace_id},
247
+ query_name="SELECT_WORKSPACE_INFO",
248
+ gsm_client=gsm_client,
249
+ )
250
+ return rows[0] if rows else None
251
+
252
+
253
+ def query_org_workspaces(
254
+ organization_id: str,
255
+ *,
256
+ gsm_client: secretmanager.SecretManagerServiceClient | None = None,
257
+ ) -> list[dict[str, Any]]:
258
+ """Query all workspaces in an organization with dataplane info.
259
+
260
+ Args:
261
+ organization_id: Organization UUID
262
+ gsm_client: GCP Secret Manager client. If None, a new client will be instantiated.
263
+
264
+ Returns:
265
+ List of workspaces in the organization
266
+ """
267
+ return _run_sql_query(
268
+ SELECT_ORG_WORKSPACES,
269
+ parameters={"organization_id": organization_id},
270
+ query_name="SELECT_ORG_WORKSPACES",
271
+ gsm_client=gsm_client,
272
+ )
@@ -0,0 +1,353 @@
1
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
2
+ """SQL query templates and schema documentation for Airbyte Cloud Prod DB Replica.
3
+
4
+ Prod DB Replica Schema Reference
5
+ ================================
6
+
7
+ Database: prod-configapi
8
+ Instance: prod-ab-cloud-proj:us-west3:prod-pgsql-replica
9
+
10
+ connection
11
+ ----------
12
+ id, namespace_definition, namespace_format, prefix, source_id, destination_id, name,
13
+ catalog, status, schedule, manual, resource_requirements, created_at, updated_at,
14
+ source_catalog_id, schedule_type, schedule_data, non_breaking_change_preference,
15
+ breaking_change, field_selection_data, destination_catalog_id, status_reason
16
+
17
+ actor
18
+ -----
19
+ id, workspace_id, actor_definition_id, name, configuration, actor_type, tombstone,
20
+ created_at, updated_at, resource_requirements
21
+
22
+ workspace
23
+ ---------
24
+ id, customer_id, name, slug, email, initial_setup_complete, anonymous_data_collection,
25
+ send_newsletter, send_security_updates, display_setup_wizard, tombstone, notifications,
26
+ first_sync_complete, feedback_complete, created_at, updated_at, webhook_operation_configs,
27
+ notification_settings, organization_id, dataplane_group_id
28
+
29
+ dataplane_group
30
+ ---------------
31
+ id, organization_id, name, enabled, created_at, updated_at, tombstone
32
+
33
+ Note: Main dataplane groups are:
34
+ - 645a183f-b12b-4c6e-8ad3-99e165603450 = US (default, ~133K workspaces)
35
+ - 153996d3-208e-4887-b8b1-e5fe48104450 = US-Central (~12K workspaces)
36
+ - b9e48d61-f082-4a14-a8d0-799a907938cb = EU (~3K workspaces)
37
+
38
+ actor_definition_version
39
+ ------------------------
40
+ id, actor_definition_id, created_at, updated_at, documentation_url, docker_repository,
41
+ docker_image_tag, spec, protocol_version, release_date, normalization_repository,
42
+ normalization_tag, supports_dbt, normalization_integration_type, allowed_hosts,
43
+ suggested_streams, release_stage, support_state, support_level, supports_refreshes,
44
+ cdk_version, last_published, internal_support_level, language, supports_file_transfer,
45
+ supports_data_activation, connector_ipc_options
46
+
47
+ scoped_configuration
48
+ --------------------
49
+ id, key, resource_type, resource_id, scope_type, scope_id, value, description,
50
+ reference_url, origin_type, origin, expires_at, created_at, updated_at
51
+
52
+ Note: Version overrides are stored with key='connector_version', resource_type='actor_definition',
53
+ scope_type='actor', and value=actor_definition_version.id (UUID).
54
+
55
+ jobs
56
+ ----
57
+ id, config_type, scope (connection_id), config, status, started_at, created_at,
58
+ updated_at, metadata, is_scheduled
59
+
60
+ Note: status values: 'succeeded', 'failed', 'cancelled', 'running', 'incomplete'
61
+ config_type values: 'sync', 'reset_connection', 'refresh'
62
+
63
+ attempts
64
+ --------
65
+ id, job_id, attempt_number, log_path, output, status, created_at, updated_at,
66
+ ended_at, failure_summary, processing_task_queue, attempt_sync_config
67
+ """
68
+
69
+ from __future__ import annotations
70
+
71
+ import sqlalchemy
72
+
73
+ # =============================================================================
74
+ # Connection Queries
75
+ # =============================================================================
76
+
77
+ # Query connections by connector type, optionally filtered by organization
78
+ SELECT_CONNECTIONS_BY_CONNECTOR = sqlalchemy.text(
79
+ """
80
+ SELECT
81
+ connection.id AS connection_id,
82
+ connection.name AS connection_name,
83
+ connection.source_id,
84
+ workspace.id AS workspace_id,
85
+ workspace.name AS workspace_name,
86
+ workspace.organization_id,
87
+ workspace.dataplane_group_id,
88
+ dataplane_group.name AS dataplane_name,
89
+ source_actor.actor_definition_id AS source_definition_id,
90
+ source_actor.name AS source_name
91
+ FROM connection
92
+ JOIN actor AS source_actor
93
+ ON connection.source_id = source_actor.id
94
+ JOIN workspace
95
+ ON source_actor.workspace_id = workspace.id
96
+ LEFT JOIN dataplane_group
97
+ ON workspace.dataplane_group_id = dataplane_group.id
98
+ WHERE
99
+ source_actor.actor_definition_id = :connector_definition_id
100
+ AND (:organization_id IS NULL OR workspace.organization_id = :organization_id)
101
+ LIMIT :limit
102
+ """
103
+ )
104
+
105
+ # =============================================================================
106
+ # Connector Version Queries
107
+ # =============================================================================
108
+
109
+ SELECT_CONNECTOR_VERSIONS = sqlalchemy.text(
110
+ """
111
+ SELECT
112
+ actor_definition_version.id AS version_id,
113
+ actor_definition_version.docker_image_tag,
114
+ actor_definition_version.docker_repository,
115
+ actor_definition_version.release_stage,
116
+ actor_definition_version.support_level,
117
+ actor_definition_version.cdk_version,
118
+ actor_definition_version.language,
119
+ actor_definition_version.last_published,
120
+ actor_definition_version.release_date
121
+ FROM actor_definition_version
122
+ WHERE
123
+ actor_definition_version.actor_definition_id = :actor_definition_id
124
+ ORDER BY
125
+ actor_definition_version.last_published DESC NULLS LAST,
126
+ actor_definition_version.created_at DESC
127
+ """
128
+ )
129
+
130
+ # List new connector releases within the last N days
131
+ # Uses last_published (timestamp) rather than release_date (date only, often NULL)
132
+ # Note: No index on last_published, but table is small (~39K rows)
133
+ SELECT_NEW_CONNECTOR_RELEASES = sqlalchemy.text(
134
+ """
135
+ SELECT
136
+ actor_definition_version.id AS version_id,
137
+ actor_definition_version.actor_definition_id,
138
+ actor_definition_version.docker_repository,
139
+ actor_definition_version.docker_image_tag,
140
+ actor_definition_version.last_published,
141
+ actor_definition_version.release_date,
142
+ actor_definition_version.release_stage,
143
+ actor_definition_version.support_level,
144
+ actor_definition_version.cdk_version,
145
+ actor_definition_version.language,
146
+ actor_definition_version.created_at
147
+ FROM actor_definition_version
148
+ WHERE
149
+ actor_definition_version.last_published >= :cutoff_date
150
+ ORDER BY
151
+ actor_definition_version.last_published DESC
152
+ LIMIT :limit
153
+ """
154
+ )
155
+
156
+ SELECT_ACTORS_PINNED_TO_VERSION = sqlalchemy.text(
157
+ """
158
+ SELECT
159
+ scoped_configuration.scope_id AS actor_id,
160
+ scoped_configuration.resource_id AS actor_definition_id,
161
+ scoped_configuration.origin_type,
162
+ scoped_configuration.origin,
163
+ scoped_configuration.description,
164
+ scoped_configuration.created_at,
165
+ scoped_configuration.expires_at,
166
+ actor.name AS actor_name,
167
+ actor.workspace_id,
168
+ workspace.name AS workspace_name,
169
+ workspace.organization_id,
170
+ workspace.dataplane_group_id,
171
+ dataplane_group.name AS dataplane_name
172
+ FROM scoped_configuration
173
+ JOIN actor
174
+ ON scoped_configuration.scope_id = actor.id
175
+ JOIN workspace
176
+ ON actor.workspace_id = workspace.id
177
+ LEFT JOIN dataplane_group
178
+ ON workspace.dataplane_group_id = dataplane_group.id
179
+ WHERE
180
+ scoped_configuration.key = 'connector_version'
181
+ AND scoped_configuration.scope_type = 'actor'
182
+ AND scoped_configuration.value = :actor_definition_version_id
183
+ ORDER BY
184
+ scoped_configuration.created_at DESC
185
+ """
186
+ )
187
+
188
+ # =============================================================================
189
+ # Sync Results Queries
190
+ # =============================================================================
191
+
192
+ # Get sync results for actors pinned to a specific connector definition VERSION ID
193
+ # This joins through scoped_configuration to find actors with version overrides
194
+ SELECT_SYNC_RESULTS_FOR_VERSION = sqlalchemy.text(
195
+ """
196
+ SELECT
197
+ jobs.id AS job_id,
198
+ jobs.scope AS connection_id,
199
+ jobs.status AS job_status,
200
+ jobs.started_at,
201
+ jobs.updated_at AS job_updated_at,
202
+ connection.name AS connection_name,
203
+ actor.id AS actor_id,
204
+ actor.name AS actor_name,
205
+ actor.actor_definition_id,
206
+ scoped_configuration.origin_type AS pin_origin_type,
207
+ scoped_configuration.origin AS pin_origin,
208
+ workspace.id AS workspace_id,
209
+ workspace.name AS workspace_name,
210
+ workspace.organization_id,
211
+ workspace.dataplane_group_id,
212
+ dataplane_group.name AS dataplane_name
213
+ FROM jobs
214
+ JOIN connection
215
+ ON jobs.scope = connection.id::text
216
+ JOIN actor
217
+ ON connection.source_id = actor.id
218
+ JOIN scoped_configuration
219
+ ON scoped_configuration.scope_id = actor.id
220
+ AND scoped_configuration.key = 'connector_version'
221
+ AND scoped_configuration.scope_type = 'actor'
222
+ JOIN workspace
223
+ ON actor.workspace_id = workspace.id
224
+ LEFT JOIN dataplane_group
225
+ ON workspace.dataplane_group_id = dataplane_group.id
226
+ WHERE
227
+ jobs.config_type = 'sync'
228
+ AND scoped_configuration.value = :actor_definition_version_id
229
+ AND jobs.updated_at >= :cutoff_date
230
+ ORDER BY
231
+ jobs.updated_at DESC
232
+ LIMIT :limit
233
+ """
234
+ )
235
+
236
+ # Get successful sync results for actors pinned to a specific connector definition VERSION ID
237
+ SELECT_SUCCESSFUL_SYNCS_FOR_VERSION = sqlalchemy.text(
238
+ """
239
+ SELECT
240
+ jobs.id AS job_id,
241
+ jobs.scope AS connection_id,
242
+ jobs.started_at,
243
+ jobs.updated_at AS job_updated_at,
244
+ connection.name AS connection_name,
245
+ actor.id AS actor_id,
246
+ actor.name AS actor_name,
247
+ actor.actor_definition_id,
248
+ scoped_configuration.origin_type AS pin_origin_type,
249
+ scoped_configuration.origin AS pin_origin,
250
+ workspace.id AS workspace_id,
251
+ workspace.name AS workspace_name,
252
+ workspace.organization_id,
253
+ workspace.dataplane_group_id,
254
+ dataplane_group.name AS dataplane_name
255
+ FROM jobs
256
+ JOIN connection
257
+ ON jobs.scope = connection.id::text
258
+ JOIN actor
259
+ ON connection.source_id = actor.id
260
+ JOIN scoped_configuration
261
+ ON scoped_configuration.scope_id = actor.id
262
+ AND scoped_configuration.key = 'connector_version'
263
+ AND scoped_configuration.scope_type = 'actor'
264
+ JOIN workspace
265
+ ON actor.workspace_id = workspace.id
266
+ LEFT JOIN dataplane_group
267
+ ON workspace.dataplane_group_id = dataplane_group.id
268
+ WHERE
269
+ jobs.config_type = 'sync'
270
+ AND jobs.status = 'succeeded'
271
+ AND scoped_configuration.value = :actor_definition_version_id
272
+ AND jobs.updated_at >= :cutoff_date
273
+ ORDER BY
274
+ jobs.updated_at DESC
275
+ LIMIT :limit
276
+ """
277
+ )
278
+
279
+ # =============================================================================
280
+ # Dataplane and Workspace Queries
281
+ # =============================================================================
282
+
283
+ # List all dataplane groups with workspace counts
284
+ SELECT_DATAPLANES_LIST = sqlalchemy.text(
285
+ """
286
+ SELECT
287
+ dataplane_group.id AS dataplane_group_id,
288
+ dataplane_group.name AS dataplane_name,
289
+ dataplane_group.organization_id,
290
+ dataplane_group.enabled,
291
+ dataplane_group.tombstone,
292
+ dataplane_group.created_at,
293
+ COUNT(workspace.id) AS workspace_count
294
+ FROM dataplane_group
295
+ LEFT JOIN workspace
296
+ ON workspace.dataplane_group_id = dataplane_group.id
297
+ AND workspace.tombstone = false
298
+ WHERE
299
+ dataplane_group.tombstone = false
300
+ GROUP BY
301
+ dataplane_group.id,
302
+ dataplane_group.name,
303
+ dataplane_group.organization_id,
304
+ dataplane_group.enabled,
305
+ dataplane_group.tombstone,
306
+ dataplane_group.created_at
307
+ ORDER BY
308
+ workspace_count DESC
309
+ """
310
+ )
311
+
312
+ # Get workspace info including dataplane group for EU filtering
313
+ SELECT_WORKSPACE_INFO = sqlalchemy.text(
314
+ """
315
+ SELECT
316
+ workspace.id AS workspace_id,
317
+ workspace.name AS workspace_name,
318
+ workspace.slug,
319
+ workspace.organization_id,
320
+ workspace.dataplane_group_id,
321
+ dataplane_group.name AS dataplane_name,
322
+ workspace.created_at,
323
+ workspace.tombstone
324
+ FROM workspace
325
+ LEFT JOIN dataplane_group
326
+ ON workspace.dataplane_group_id = dataplane_group.id
327
+ WHERE
328
+ workspace.id = :workspace_id
329
+ """
330
+ )
331
+
332
+ # Get all workspaces in an organization with dataplane info
333
+ SELECT_ORG_WORKSPACES = sqlalchemy.text(
334
+ """
335
+ SELECT
336
+ workspace.id AS workspace_id,
337
+ workspace.name AS workspace_name,
338
+ workspace.slug,
339
+ workspace.organization_id,
340
+ workspace.dataplane_group_id,
341
+ dataplane_group.name AS dataplane_name,
342
+ workspace.created_at,
343
+ workspace.tombstone
344
+ FROM workspace
345
+ LEFT JOIN dataplane_group
346
+ ON workspace.dataplane_group_id = dataplane_group.id
347
+ WHERE
348
+ workspace.organization_id = :organization_id
349
+ AND workspace.tombstone = false
350
+ ORDER BY
351
+ workspace.name
352
+ """
353
+ )
@@ -0,0 +1,34 @@
1
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
2
+ """Registry operations for Airbyte connectors.
3
+
4
+ This package provides functionality for publishing connectors to the Airbyte
5
+ registry, including promoting and rolling back release candidates.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ from airbyte_ops_mcp.registry.models import (
11
+ ConnectorMetadata,
12
+ ConnectorPublishResult,
13
+ PublishAction,
14
+ )
15
+ from airbyte_ops_mcp.registry.publish import (
16
+ CONNECTOR_PATH_PREFIX,
17
+ METADATA_FILE_NAME,
18
+ get_connector_metadata,
19
+ is_release_candidate,
20
+ publish_connector,
21
+ strip_rc_suffix,
22
+ )
23
+
24
+ __all__ = [
25
+ "CONNECTOR_PATH_PREFIX",
26
+ "METADATA_FILE_NAME",
27
+ "ConnectorMetadata",
28
+ "ConnectorPublishResult",
29
+ "PublishAction",
30
+ "get_connector_metadata",
31
+ "is_release_candidate",
32
+ "publish_connector",
33
+ "strip_rc_suffix",
34
+ ]