airbyte-internal-ops 0.1.5__py3-none-any.whl → 0.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. {airbyte_internal_ops-0.1.5.dist-info → airbyte_internal_ops-0.1.7.dist-info}/METADATA +70 -1
  2. {airbyte_internal_ops-0.1.5.dist-info → airbyte_internal_ops-0.1.7.dist-info}/RECORD +25 -26
  3. airbyte_ops_mcp/__init__.py +30 -2
  4. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/connectors/pipeline.py +2 -8
  5. airbyte_ops_mcp/airbyte_repo/list_connectors.py +132 -0
  6. airbyte_ops_mcp/cli/registry.py +90 -1
  7. airbyte_ops_mcp/connection_config_retriever/__init__.py +26 -0
  8. airbyte_ops_mcp/{live_tests/_connection_retriever → connection_config_retriever}/audit_logging.py +5 -6
  9. airbyte_ops_mcp/{live_tests/_connection_retriever → connection_config_retriever}/retrieval.py +8 -22
  10. airbyte_ops_mcp/{live_tests/_connection_retriever → connection_config_retriever}/secrets_resolution.py +8 -42
  11. airbyte_ops_mcp/constants.py +35 -0
  12. airbyte_ops_mcp/live_tests/connection_secret_retriever.py +17 -6
  13. airbyte_ops_mcp/mcp/github_repo_ops.py +10 -0
  14. airbyte_ops_mcp/mcp/prod_db_queries.py +357 -0
  15. airbyte_ops_mcp/mcp/server.py +2 -0
  16. airbyte_ops_mcp/prod_db_access/__init__.py +34 -0
  17. airbyte_ops_mcp/prod_db_access/db_engine.py +126 -0
  18. airbyte_ops_mcp/prod_db_access/py.typed +0 -0
  19. airbyte_ops_mcp/prod_db_access/queries.py +272 -0
  20. airbyte_ops_mcp/prod_db_access/sql.py +353 -0
  21. airbyte_ops_mcp/registry/__init__.py +34 -0
  22. airbyte_ops_mcp/registry/models.py +63 -0
  23. airbyte_ops_mcp/registry/publish.py +368 -0
  24. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/connectors/publish/__init__.py +0 -3
  25. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/connectors/publish/commands.py +0 -242
  26. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/connectors/publish/context.py +0 -175
  27. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/connectors/publish/pipeline.py +0 -1056
  28. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/poetry/publish/__init__.py +0 -3
  29. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/poetry/publish/commands.py +0 -127
  30. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/steps/python_registry.py +0 -238
  31. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/models/contexts/python_registry_publish.py +0 -119
  32. airbyte_ops_mcp/live_tests/_connection_retriever/__init__.py +0 -35
  33. airbyte_ops_mcp/live_tests/_connection_retriever/consts.py +0 -33
  34. airbyte_ops_mcp/live_tests/_connection_retriever/db_access.py +0 -82
  35. {airbyte_internal_ops-0.1.5.dist-info → airbyte_internal_ops-0.1.7.dist-info}/WHEEL +0 -0
  36. {airbyte_internal_ops-0.1.5.dist-info → airbyte_internal_ops-0.1.7.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,353 @@
1
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
2
+ """SQL query templates and schema documentation for Airbyte Cloud Prod DB Replica.
3
+
4
+ Prod DB Replica Schema Reference
5
+ ================================
6
+
7
+ Database: prod-configapi
8
+ Instance: prod-ab-cloud-proj:us-west3:prod-pgsql-replica
9
+
10
+ connection
11
+ ----------
12
+ id, namespace_definition, namespace_format, prefix, source_id, destination_id, name,
13
+ catalog, status, schedule, manual, resource_requirements, created_at, updated_at,
14
+ source_catalog_id, schedule_type, schedule_data, non_breaking_change_preference,
15
+ breaking_change, field_selection_data, destination_catalog_id, status_reason
16
+
17
+ actor
18
+ -----
19
+ id, workspace_id, actor_definition_id, name, configuration, actor_type, tombstone,
20
+ created_at, updated_at, resource_requirements
21
+
22
+ workspace
23
+ ---------
24
+ id, customer_id, name, slug, email, initial_setup_complete, anonymous_data_collection,
25
+ send_newsletter, send_security_updates, display_setup_wizard, tombstone, notifications,
26
+ first_sync_complete, feedback_complete, created_at, updated_at, webhook_operation_configs,
27
+ notification_settings, organization_id, dataplane_group_id
28
+
29
+ dataplane_group
30
+ ---------------
31
+ id, organization_id, name, enabled, created_at, updated_at, tombstone
32
+
33
+ Note: Main dataplane groups are:
34
+ - 645a183f-b12b-4c6e-8ad3-99e165603450 = US (default, ~133K workspaces)
35
+ - 153996d3-208e-4887-b8b1-e5fe48104450 = US-Central (~12K workspaces)
36
+ - b9e48d61-f082-4a14-a8d0-799a907938cb = EU (~3K workspaces)
37
+
38
+ actor_definition_version
39
+ ------------------------
40
+ id, actor_definition_id, created_at, updated_at, documentation_url, docker_repository,
41
+ docker_image_tag, spec, protocol_version, release_date, normalization_repository,
42
+ normalization_tag, supports_dbt, normalization_integration_type, allowed_hosts,
43
+ suggested_streams, release_stage, support_state, support_level, supports_refreshes,
44
+ cdk_version, last_published, internal_support_level, language, supports_file_transfer,
45
+ supports_data_activation, connector_ipc_options
46
+
47
+ scoped_configuration
48
+ --------------------
49
+ id, key, resource_type, resource_id, scope_type, scope_id, value, description,
50
+ reference_url, origin_type, origin, expires_at, created_at, updated_at
51
+
52
+ Note: Version overrides are stored with key='connector_version', resource_type='actor_definition',
53
+ scope_type='actor', and value=actor_definition_version.id (UUID).
54
+
55
+ jobs
56
+ ----
57
+ id, config_type, scope (connection_id), config, status, started_at, created_at,
58
+ updated_at, metadata, is_scheduled
59
+
60
+ Note: status values: 'succeeded', 'failed', 'cancelled', 'running', 'incomplete'
61
+ config_type values: 'sync', 'reset_connection', 'refresh'
62
+
63
+ attempts
64
+ --------
65
+ id, job_id, attempt_number, log_path, output, status, created_at, updated_at,
66
+ ended_at, failure_summary, processing_task_queue, attempt_sync_config
67
+ """
68
+
69
+ from __future__ import annotations
70
+
71
+ import sqlalchemy
72
+
73
+ # =============================================================================
74
+ # Connection Queries
75
+ # =============================================================================
76
+
77
+ # Query connections by connector type, optionally filtered by organization
78
+ SELECT_CONNECTIONS_BY_CONNECTOR = sqlalchemy.text(
79
+ """
80
+ SELECT
81
+ connection.id AS connection_id,
82
+ connection.name AS connection_name,
83
+ connection.source_id,
84
+ workspace.id AS workspace_id,
85
+ workspace.name AS workspace_name,
86
+ workspace.organization_id,
87
+ workspace.dataplane_group_id,
88
+ dataplane_group.name AS dataplane_name,
89
+ source_actor.actor_definition_id AS source_definition_id,
90
+ source_actor.name AS source_name
91
+ FROM connection
92
+ JOIN actor AS source_actor
93
+ ON connection.source_id = source_actor.id
94
+ JOIN workspace
95
+ ON source_actor.workspace_id = workspace.id
96
+ LEFT JOIN dataplane_group
97
+ ON workspace.dataplane_group_id = dataplane_group.id
98
+ WHERE
99
+ source_actor.actor_definition_id = :connector_definition_id
100
+ AND (:organization_id IS NULL OR workspace.organization_id = :organization_id)
101
+ LIMIT :limit
102
+ """
103
+ )
104
+
105
+ # =============================================================================
106
+ # Connector Version Queries
107
+ # =============================================================================
108
+
109
+ SELECT_CONNECTOR_VERSIONS = sqlalchemy.text(
110
+ """
111
+ SELECT
112
+ actor_definition_version.id AS version_id,
113
+ actor_definition_version.docker_image_tag,
114
+ actor_definition_version.docker_repository,
115
+ actor_definition_version.release_stage,
116
+ actor_definition_version.support_level,
117
+ actor_definition_version.cdk_version,
118
+ actor_definition_version.language,
119
+ actor_definition_version.last_published,
120
+ actor_definition_version.release_date
121
+ FROM actor_definition_version
122
+ WHERE
123
+ actor_definition_version.actor_definition_id = :actor_definition_id
124
+ ORDER BY
125
+ actor_definition_version.last_published DESC NULLS LAST,
126
+ actor_definition_version.created_at DESC
127
+ """
128
+ )
129
+
130
+ # List new connector releases within the last N days
131
+ # Uses last_published (timestamp) rather than release_date (date only, often NULL)
132
+ # Note: No index on last_published, but table is small (~39K rows)
133
+ SELECT_NEW_CONNECTOR_RELEASES = sqlalchemy.text(
134
+ """
135
+ SELECT
136
+ actor_definition_version.id AS version_id,
137
+ actor_definition_version.actor_definition_id,
138
+ actor_definition_version.docker_repository,
139
+ actor_definition_version.docker_image_tag,
140
+ actor_definition_version.last_published,
141
+ actor_definition_version.release_date,
142
+ actor_definition_version.release_stage,
143
+ actor_definition_version.support_level,
144
+ actor_definition_version.cdk_version,
145
+ actor_definition_version.language,
146
+ actor_definition_version.created_at
147
+ FROM actor_definition_version
148
+ WHERE
149
+ actor_definition_version.last_published >= :cutoff_date
150
+ ORDER BY
151
+ actor_definition_version.last_published DESC
152
+ LIMIT :limit
153
+ """
154
+ )
155
+
156
+ SELECT_ACTORS_PINNED_TO_VERSION = sqlalchemy.text(
157
+ """
158
+ SELECT
159
+ scoped_configuration.scope_id AS actor_id,
160
+ scoped_configuration.resource_id AS actor_definition_id,
161
+ scoped_configuration.origin_type,
162
+ scoped_configuration.origin,
163
+ scoped_configuration.description,
164
+ scoped_configuration.created_at,
165
+ scoped_configuration.expires_at,
166
+ actor.name AS actor_name,
167
+ actor.workspace_id,
168
+ workspace.name AS workspace_name,
169
+ workspace.organization_id,
170
+ workspace.dataplane_group_id,
171
+ dataplane_group.name AS dataplane_name
172
+ FROM scoped_configuration
173
+ JOIN actor
174
+ ON scoped_configuration.scope_id = actor.id
175
+ JOIN workspace
176
+ ON actor.workspace_id = workspace.id
177
+ LEFT JOIN dataplane_group
178
+ ON workspace.dataplane_group_id = dataplane_group.id
179
+ WHERE
180
+ scoped_configuration.key = 'connector_version'
181
+ AND scoped_configuration.scope_type = 'actor'
182
+ AND scoped_configuration.value = :actor_definition_version_id
183
+ ORDER BY
184
+ scoped_configuration.created_at DESC
185
+ """
186
+ )
187
+
188
+ # =============================================================================
189
+ # Sync Results Queries
190
+ # =============================================================================
191
+
192
+ # Get sync results for actors pinned to a specific connector definition VERSION ID
193
+ # This joins through scoped_configuration to find actors with version overrides
194
+ SELECT_SYNC_RESULTS_FOR_VERSION = sqlalchemy.text(
195
+ """
196
+ SELECT
197
+ jobs.id AS job_id,
198
+ jobs.scope AS connection_id,
199
+ jobs.status AS job_status,
200
+ jobs.started_at,
201
+ jobs.updated_at AS job_updated_at,
202
+ connection.name AS connection_name,
203
+ actor.id AS actor_id,
204
+ actor.name AS actor_name,
205
+ actor.actor_definition_id,
206
+ scoped_configuration.origin_type AS pin_origin_type,
207
+ scoped_configuration.origin AS pin_origin,
208
+ workspace.id AS workspace_id,
209
+ workspace.name AS workspace_name,
210
+ workspace.organization_id,
211
+ workspace.dataplane_group_id,
212
+ dataplane_group.name AS dataplane_name
213
+ FROM jobs
214
+ JOIN connection
215
+ ON jobs.scope = connection.id::text
216
+ JOIN actor
217
+ ON connection.source_id = actor.id
218
+ JOIN scoped_configuration
219
+ ON scoped_configuration.scope_id = actor.id
220
+ AND scoped_configuration.key = 'connector_version'
221
+ AND scoped_configuration.scope_type = 'actor'
222
+ JOIN workspace
223
+ ON actor.workspace_id = workspace.id
224
+ LEFT JOIN dataplane_group
225
+ ON workspace.dataplane_group_id = dataplane_group.id
226
+ WHERE
227
+ jobs.config_type = 'sync'
228
+ AND scoped_configuration.value = :actor_definition_version_id
229
+ AND jobs.updated_at >= :cutoff_date
230
+ ORDER BY
231
+ jobs.updated_at DESC
232
+ LIMIT :limit
233
+ """
234
+ )
235
+
236
+ # Get successful sync results for actors pinned to a specific connector definition VERSION ID
237
+ SELECT_SUCCESSFUL_SYNCS_FOR_VERSION = sqlalchemy.text(
238
+ """
239
+ SELECT
240
+ jobs.id AS job_id,
241
+ jobs.scope AS connection_id,
242
+ jobs.started_at,
243
+ jobs.updated_at AS job_updated_at,
244
+ connection.name AS connection_name,
245
+ actor.id AS actor_id,
246
+ actor.name AS actor_name,
247
+ actor.actor_definition_id,
248
+ scoped_configuration.origin_type AS pin_origin_type,
249
+ scoped_configuration.origin AS pin_origin,
250
+ workspace.id AS workspace_id,
251
+ workspace.name AS workspace_name,
252
+ workspace.organization_id,
253
+ workspace.dataplane_group_id,
254
+ dataplane_group.name AS dataplane_name
255
+ FROM jobs
256
+ JOIN connection
257
+ ON jobs.scope = connection.id::text
258
+ JOIN actor
259
+ ON connection.source_id = actor.id
260
+ JOIN scoped_configuration
261
+ ON scoped_configuration.scope_id = actor.id
262
+ AND scoped_configuration.key = 'connector_version'
263
+ AND scoped_configuration.scope_type = 'actor'
264
+ JOIN workspace
265
+ ON actor.workspace_id = workspace.id
266
+ LEFT JOIN dataplane_group
267
+ ON workspace.dataplane_group_id = dataplane_group.id
268
+ WHERE
269
+ jobs.config_type = 'sync'
270
+ AND jobs.status = 'succeeded'
271
+ AND scoped_configuration.value = :actor_definition_version_id
272
+ AND jobs.updated_at >= :cutoff_date
273
+ ORDER BY
274
+ jobs.updated_at DESC
275
+ LIMIT :limit
276
+ """
277
+ )
278
+
279
+ # =============================================================================
280
+ # Dataplane and Workspace Queries
281
+ # =============================================================================
282
+
283
+ # List all dataplane groups with workspace counts
284
+ SELECT_DATAPLANES_LIST = sqlalchemy.text(
285
+ """
286
+ SELECT
287
+ dataplane_group.id AS dataplane_group_id,
288
+ dataplane_group.name AS dataplane_name,
289
+ dataplane_group.organization_id,
290
+ dataplane_group.enabled,
291
+ dataplane_group.tombstone,
292
+ dataplane_group.created_at,
293
+ COUNT(workspace.id) AS workspace_count
294
+ FROM dataplane_group
295
+ LEFT JOIN workspace
296
+ ON workspace.dataplane_group_id = dataplane_group.id
297
+ AND workspace.tombstone = false
298
+ WHERE
299
+ dataplane_group.tombstone = false
300
+ GROUP BY
301
+ dataplane_group.id,
302
+ dataplane_group.name,
303
+ dataplane_group.organization_id,
304
+ dataplane_group.enabled,
305
+ dataplane_group.tombstone,
306
+ dataplane_group.created_at
307
+ ORDER BY
308
+ workspace_count DESC
309
+ """
310
+ )
311
+
312
+ # Get workspace info including dataplane group for EU filtering
313
+ SELECT_WORKSPACE_INFO = sqlalchemy.text(
314
+ """
315
+ SELECT
316
+ workspace.id AS workspace_id,
317
+ workspace.name AS workspace_name,
318
+ workspace.slug,
319
+ workspace.organization_id,
320
+ workspace.dataplane_group_id,
321
+ dataplane_group.name AS dataplane_name,
322
+ workspace.created_at,
323
+ workspace.tombstone
324
+ FROM workspace
325
+ LEFT JOIN dataplane_group
326
+ ON workspace.dataplane_group_id = dataplane_group.id
327
+ WHERE
328
+ workspace.id = :workspace_id
329
+ """
330
+ )
331
+
332
+ # Get all workspaces in an organization with dataplane info
333
+ SELECT_ORG_WORKSPACES = sqlalchemy.text(
334
+ """
335
+ SELECT
336
+ workspace.id AS workspace_id,
337
+ workspace.name AS workspace_name,
338
+ workspace.slug,
339
+ workspace.organization_id,
340
+ workspace.dataplane_group_id,
341
+ dataplane_group.name AS dataplane_name,
342
+ workspace.created_at,
343
+ workspace.tombstone
344
+ FROM workspace
345
+ LEFT JOIN dataplane_group
346
+ ON workspace.dataplane_group_id = dataplane_group.id
347
+ WHERE
348
+ workspace.organization_id = :organization_id
349
+ AND workspace.tombstone = false
350
+ ORDER BY
351
+ workspace.name
352
+ """
353
+ )
@@ -0,0 +1,34 @@
1
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
2
+ """Registry operations for Airbyte connectors.
3
+
4
+ This package provides functionality for publishing connectors to the Airbyte
5
+ registry, including promoting and rolling back release candidates.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ from airbyte_ops_mcp.registry.models import (
11
+ ConnectorMetadata,
12
+ ConnectorPublishResult,
13
+ PublishAction,
14
+ )
15
+ from airbyte_ops_mcp.registry.publish import (
16
+ CONNECTOR_PATH_PREFIX,
17
+ METADATA_FILE_NAME,
18
+ get_connector_metadata,
19
+ is_release_candidate,
20
+ publish_connector,
21
+ strip_rc_suffix,
22
+ )
23
+
24
+ __all__ = [
25
+ "CONNECTOR_PATH_PREFIX",
26
+ "METADATA_FILE_NAME",
27
+ "ConnectorMetadata",
28
+ "ConnectorPublishResult",
29
+ "PublishAction",
30
+ "get_connector_metadata",
31
+ "is_release_candidate",
32
+ "publish_connector",
33
+ "strip_rc_suffix",
34
+ ]
@@ -0,0 +1,63 @@
1
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
2
+ """Pydantic models for registry connector publish operations.
3
+
4
+ This module defines the data models used for connector publish operations
5
+ including applying and rolling back version overrides.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ from typing import Literal
11
+
12
+ from pydantic import BaseModel, Field
13
+
14
+
15
+ class ConnectorMetadata(BaseModel):
16
+ """Connector metadata from metadata.yaml.
17
+
18
+ This model represents the essential metadata about a connector
19
+ read from its metadata.yaml file in the Airbyte monorepo.
20
+ """
21
+
22
+ name: str = Field(description="The connector technical name")
23
+ docker_repository: str = Field(description="The Docker repository")
24
+ docker_image_tag: str = Field(description="The Docker image tag/version")
25
+ support_level: str | None = Field(
26
+ default=None, description="The support level (certified, community, etc.)"
27
+ )
28
+ definition_id: str | None = Field(
29
+ default=None, description="The connector definition ID"
30
+ )
31
+
32
+
33
+ class ConnectorPublishResult(BaseModel):
34
+ """Result of a connector publish operation.
35
+
36
+ This model provides detailed information about the outcome of a
37
+ connector publish operation (apply or rollback version override).
38
+ """
39
+
40
+ connector: str = Field(description="The connector technical name")
41
+ version: str = Field(description="The connector version")
42
+ action: Literal["apply-version-override", "rollback-version-override"] = Field(
43
+ description="The action performed"
44
+ )
45
+ status: Literal["success", "failure", "dry-run"] = Field(
46
+ description="The status of the operation"
47
+ )
48
+ docker_image: str | None = Field(
49
+ default=None, description="The Docker image name if applicable"
50
+ )
51
+ registry_updated: bool = Field(
52
+ default=False, description="Whether the registry was updated"
53
+ )
54
+ message: str | None = Field(default=None, description="Additional status message")
55
+
56
+ def __str__(self) -> str:
57
+ """Return a string representation of the publish result."""
58
+ status_prefix = "dry-run" if self.status == "dry-run" else self.status
59
+ return f"[{status_prefix}] {self.connector}:{self.version} - {self.action}"
60
+
61
+
62
+ # Type alias for publish actions
63
+ PublishAction = Literal["apply-version-override", "rollback-version-override"]