airbyte-internal-ops 0.1.4__py3-none-any.whl → 0.1.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. {airbyte_internal_ops-0.1.4.dist-info → airbyte_internal_ops-0.1.6.dist-info}/METADATA +70 -1
  2. {airbyte_internal_ops-0.1.4.dist-info → airbyte_internal_ops-0.1.6.dist-info}/RECORD +30 -31
  3. airbyte_ops_mcp/__init__.py +30 -2
  4. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/connectors/pipeline.py +2 -8
  5. airbyte_ops_mcp/airbyte_repo/list_connectors.py +176 -4
  6. airbyte_ops_mcp/airbyte_repo/utils.py +5 -3
  7. airbyte_ops_mcp/cli/cloud.py +35 -36
  8. airbyte_ops_mcp/cli/registry.py +90 -1
  9. airbyte_ops_mcp/cli/repo.py +15 -0
  10. airbyte_ops_mcp/connection_config_retriever/__init__.py +26 -0
  11. airbyte_ops_mcp/{live_tests/_connection_retriever → connection_config_retriever}/audit_logging.py +5 -6
  12. airbyte_ops_mcp/{live_tests/_connection_retriever → connection_config_retriever}/retrieval.py +8 -22
  13. airbyte_ops_mcp/{live_tests/_connection_retriever → connection_config_retriever}/secrets_resolution.py +8 -42
  14. airbyte_ops_mcp/constants.py +35 -0
  15. airbyte_ops_mcp/live_tests/connection_secret_retriever.py +1 -1
  16. airbyte_ops_mcp/mcp/github_repo_ops.py +10 -0
  17. airbyte_ops_mcp/mcp/live_tests.py +21 -6
  18. airbyte_ops_mcp/mcp/prod_db_queries.py +357 -0
  19. airbyte_ops_mcp/mcp/server.py +2 -0
  20. airbyte_ops_mcp/mcp/server_info.py +2 -2
  21. airbyte_ops_mcp/prod_db_access/__init__.py +34 -0
  22. airbyte_ops_mcp/prod_db_access/db_engine.py +127 -0
  23. airbyte_ops_mcp/prod_db_access/py.typed +0 -0
  24. airbyte_ops_mcp/prod_db_access/queries.py +272 -0
  25. airbyte_ops_mcp/prod_db_access/sql.py +353 -0
  26. airbyte_ops_mcp/registry/__init__.py +34 -0
  27. airbyte_ops_mcp/registry/models.py +63 -0
  28. airbyte_ops_mcp/registry/publish.py +368 -0
  29. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/connectors/publish/__init__.py +0 -3
  30. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/connectors/publish/commands.py +0 -242
  31. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/connectors/publish/context.py +0 -175
  32. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/connectors/publish/pipeline.py +0 -1056
  33. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/poetry/publish/__init__.py +0 -3
  34. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/poetry/publish/commands.py +0 -127
  35. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/steps/python_registry.py +0 -238
  36. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/models/contexts/python_registry_publish.py +0 -119
  37. airbyte_ops_mcp/live_tests/_connection_retriever/__init__.py +0 -35
  38. airbyte_ops_mcp/live_tests/_connection_retriever/consts.py +0 -33
  39. airbyte_ops_mcp/live_tests/_connection_retriever/db_access.py +0 -82
  40. {airbyte_internal_ops-0.1.4.dist-info → airbyte_internal_ops-0.1.6.dist-info}/WHEEL +0 -0
  41. {airbyte_internal_ops-0.1.4.dist-info → airbyte_internal_ops-0.1.6.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,63 @@
1
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
2
+ """Pydantic models for registry connector publish operations.
3
+
4
+ This module defines the data models used for connector publish operations
5
+ including applying and rolling back version overrides.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ from typing import Literal
11
+
12
+ from pydantic import BaseModel, Field
13
+
14
+
15
+ class ConnectorMetadata(BaseModel):
16
+ """Connector metadata from metadata.yaml.
17
+
18
+ This model represents the essential metadata about a connector
19
+ read from its metadata.yaml file in the Airbyte monorepo.
20
+ """
21
+
22
+ name: str = Field(description="The connector technical name")
23
+ docker_repository: str = Field(description="The Docker repository")
24
+ docker_image_tag: str = Field(description="The Docker image tag/version")
25
+ support_level: str | None = Field(
26
+ default=None, description="The support level (certified, community, etc.)"
27
+ )
28
+ definition_id: str | None = Field(
29
+ default=None, description="The connector definition ID"
30
+ )
31
+
32
+
33
+ class ConnectorPublishResult(BaseModel):
34
+ """Result of a connector publish operation.
35
+
36
+ This model provides detailed information about the outcome of a
37
+ connector publish operation (apply or rollback version override).
38
+ """
39
+
40
+ connector: str = Field(description="The connector technical name")
41
+ version: str = Field(description="The connector version")
42
+ action: Literal["apply-version-override", "rollback-version-override"] = Field(
43
+ description="The action performed"
44
+ )
45
+ status: Literal["success", "failure", "dry-run"] = Field(
46
+ description="The status of the operation"
47
+ )
48
+ docker_image: str | None = Field(
49
+ default=None, description="The Docker image name if applicable"
50
+ )
51
+ registry_updated: bool = Field(
52
+ default=False, description="Whether the registry was updated"
53
+ )
54
+ message: str | None = Field(default=None, description="Additional status message")
55
+
56
+ def __str__(self) -> str:
57
+ """Return a string representation of the publish result."""
58
+ status_prefix = "dry-run" if self.status == "dry-run" else self.status
59
+ return f"[{status_prefix}] {self.connector}:{self.version} - {self.action}"
60
+
61
+
62
+ # Type alias for publish actions
63
+ PublishAction = Literal["apply-version-override", "rollback-version-override"]
@@ -0,0 +1,368 @@
1
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
2
+ """Core logic for registry connector publish operations.
3
+
4
+ This module provides the core functionality for publishing connectors
5
+ to the Airbyte registry, including applying and rolling back version overrides.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import json
11
+ import os
12
+ from pathlib import Path
13
+
14
+ import yaml
15
+ from google.cloud import storage
16
+ from google.oauth2 import service_account
17
+
18
+ from airbyte_ops_mcp.registry.models import (
19
+ ConnectorMetadata,
20
+ ConnectorPublishResult,
21
+ PublishAction,
22
+ )
23
+
24
+ CONNECTOR_PATH_PREFIX = "airbyte-integrations/connectors"
25
+ METADATA_FILE_NAME = "metadata.yaml"
26
+ METADATA_FOLDER = "metadata"
27
+ LATEST_GCS_FOLDER_NAME = "latest"
28
+ RELEASE_CANDIDATE_GCS_FOLDER_NAME = "release_candidate"
29
+
30
+ # Bucket names for metadata service
31
+ PROD_METADATA_SERVICE_BUCKET_NAME = "prod-airbyte-cloud-connector-metadata-service"
32
+ DEV_METADATA_SERVICE_BUCKET_NAME = "dev-airbyte-cloud-connector-metadata-service-2"
33
+
34
+ # Default to dev bucket for safety - use --prod flag to target production
35
+ DEFAULT_METADATA_SERVICE_BUCKET_NAME = DEV_METADATA_SERVICE_BUCKET_NAME
36
+
37
+
38
+ def _get_gcs_client() -> storage.Client:
39
+ """Get a GCS storage client using credentials from environment."""
40
+ gcs_creds = os.environ.get("GCS_CREDENTIALS")
41
+ if not gcs_creds:
42
+ raise ValueError(
43
+ "GCS_CREDENTIALS environment variable is required for registry operations"
44
+ )
45
+ service_account_info = json.loads(gcs_creds)
46
+ credentials = service_account.Credentials.from_service_account_info(
47
+ service_account_info
48
+ )
49
+ return storage.Client(credentials=credentials)
50
+
51
+
52
+ def _get_bucket_name(use_prod: bool = False) -> str:
53
+ """Get the metadata service bucket name.
54
+
55
+ Args:
56
+ use_prod: If True, use the production bucket. Otherwise use dev bucket.
57
+ Can be overridden by METADATA_SERVICE_BUCKET_NAME env var.
58
+
59
+ Returns:
60
+ The bucket name to use for GCS operations.
61
+ """
62
+ # Environment variable takes precedence if set
63
+ env_bucket = os.environ.get("METADATA_SERVICE_BUCKET_NAME")
64
+ if env_bucket:
65
+ return env_bucket
66
+
67
+ # Otherwise use prod or dev based on flag
68
+ if use_prod:
69
+ return PROD_METADATA_SERVICE_BUCKET_NAME
70
+ return DEV_METADATA_SERVICE_BUCKET_NAME
71
+
72
+
73
+ def get_connector_metadata(repo_path: Path, connector_name: str) -> ConnectorMetadata:
74
+ """Read connector metadata from metadata.yaml.
75
+
76
+ Args:
77
+ repo_path: Path to the Airbyte monorepo.
78
+ connector_name: The connector technical name (e.g., 'source-github').
79
+
80
+ Returns:
81
+ ConnectorMetadata object with the connector's metadata.
82
+
83
+ Raises:
84
+ FileNotFoundError: If the connector directory or metadata file doesn't exist.
85
+ """
86
+ connector_dir = repo_path / CONNECTOR_PATH_PREFIX / connector_name
87
+ if not connector_dir.exists():
88
+ raise FileNotFoundError(f"Connector directory not found: {connector_dir}")
89
+
90
+ metadata_file = connector_dir / METADATA_FILE_NAME
91
+ if not metadata_file.exists():
92
+ raise FileNotFoundError(f"Metadata file not found: {metadata_file}")
93
+
94
+ with open(metadata_file) as f:
95
+ metadata = yaml.safe_load(f)
96
+
97
+ data = metadata.get("data", {})
98
+ return ConnectorMetadata(
99
+ name=connector_name,
100
+ docker_repository=data.get("dockerRepository", f"airbyte/{connector_name}"),
101
+ docker_image_tag=data.get("dockerImageTag", "unknown"),
102
+ support_level=data.get("supportLevel"),
103
+ definition_id=data.get("definitionId"),
104
+ )
105
+
106
+
107
+ def is_release_candidate(version: str) -> bool:
108
+ """Check if a version string is a release candidate.
109
+
110
+ Args:
111
+ version: The version string to check.
112
+
113
+ Returns:
114
+ True if the version is a release candidate (format: X.Y.Z-rc.N), False otherwise.
115
+ """
116
+ return "-rc." in version
117
+
118
+
119
+ def strip_rc_suffix(version: str) -> str:
120
+ """Strip the release candidate suffix from a version string.
121
+
122
+ Args:
123
+ version: The version string (e.g., '1.2.3-rc.1').
124
+
125
+ Returns:
126
+ The base version without RC suffix (e.g., '1.2.3').
127
+ Returns the original version if no RC suffix is present.
128
+ """
129
+ if "-rc." in version:
130
+ return version.split("-rc.")[0]
131
+ return version
132
+
133
+
134
+ def publish_connector(
135
+ repo_path: Path,
136
+ connector_name: str,
137
+ action: PublishAction,
138
+ dry_run: bool = False,
139
+ use_prod: bool = False,
140
+ ) -> ConnectorPublishResult:
141
+ """Publish a connector to the Airbyte registry.
142
+
143
+ This function handles both applying version overrides (from RC to stable)
144
+ and rolling back version overrides.
145
+
146
+ Args:
147
+ repo_path: Path to the Airbyte monorepo.
148
+ connector_name: The connector technical name (e.g., 'source-github').
149
+ action: The publish action to perform ('apply-version-override' or 'rollback-version-override').
150
+ dry_run: If True, show what would be published without making changes.
151
+ use_prod: If True, target the production GCS bucket. Otherwise use dev bucket (default).
152
+
153
+ Returns:
154
+ ConnectorPublishResult with the operation outcome.
155
+
156
+ Raises:
157
+ FileNotFoundError: If the connector or metadata file doesn't exist.
158
+ """
159
+ metadata = get_connector_metadata(repo_path, connector_name)
160
+
161
+ if action == "apply-version-override":
162
+ return _apply_version_override(metadata, dry_run, use_prod)
163
+ else:
164
+ return _rollback_version_override(metadata, dry_run, use_prod)
165
+
166
+
167
+ def _apply_version_override(
168
+ metadata: ConnectorMetadata, dry_run: bool, use_prod: bool = False
169
+ ) -> ConnectorPublishResult:
170
+ """Apply a version override to promote an RC to stable.
171
+
172
+ This copies the release candidate metadata to the 'latest' path in GCS,
173
+ then deletes the release candidate metadata.
174
+
175
+ Requires GCS_CREDENTIALS environment variable to be set.
176
+ """
177
+ version = metadata.docker_image_tag
178
+ docker_repo = metadata.docker_repository
179
+
180
+ if not is_release_candidate(version):
181
+ return ConnectorPublishResult(
182
+ connector=metadata.name,
183
+ version=version,
184
+ action="apply-version-override",
185
+ status="failure",
186
+ docker_image=f"{docker_repo}:{version}",
187
+ registry_updated=False,
188
+ message=f"Version '{version}' is not a release candidate. "
189
+ "Expected format: X.Y.Z-rc.N",
190
+ )
191
+
192
+ target_version = strip_rc_suffix(version)
193
+
194
+ if dry_run:
195
+ return ConnectorPublishResult(
196
+ connector=metadata.name,
197
+ version=target_version,
198
+ action="apply-version-override",
199
+ status="dry-run",
200
+ docker_image=f"{docker_repo}:{target_version}",
201
+ registry_updated=False,
202
+ message=f"Would apply version override for {metadata.name}: {version} -> {target_version}",
203
+ )
204
+
205
+ bucket_name = _get_bucket_name(use_prod)
206
+ storage_client = _get_gcs_client()
207
+ bucket = storage_client.bucket(bucket_name)
208
+
209
+ gcp_connector_dir = f"{METADATA_FOLDER}/{docker_repo}"
210
+ version_path = f"{gcp_connector_dir}/{version}/{METADATA_FILE_NAME}"
211
+ rc_path = (
212
+ f"{gcp_connector_dir}/{RELEASE_CANDIDATE_GCS_FOLDER_NAME}/{METADATA_FILE_NAME}"
213
+ )
214
+ latest_path = f"{gcp_connector_dir}/{LATEST_GCS_FOLDER_NAME}/{METADATA_FILE_NAME}"
215
+
216
+ version_blob = bucket.blob(version_path)
217
+ rc_blob = bucket.blob(rc_path)
218
+ latest_blob = bucket.blob(latest_path)
219
+
220
+ if not version_blob.exists():
221
+ return ConnectorPublishResult(
222
+ connector=metadata.name,
223
+ version=version,
224
+ action="apply-version-override",
225
+ status="failure",
226
+ docker_image=f"{docker_repo}:{version}",
227
+ registry_updated=False,
228
+ message=f"Version metadata file not found: {version_path}",
229
+ )
230
+
231
+ if not rc_blob.exists():
232
+ return ConnectorPublishResult(
233
+ connector=metadata.name,
234
+ version=version,
235
+ action="apply-version-override",
236
+ status="failure",
237
+ docker_image=f"{docker_repo}:{version}",
238
+ registry_updated=False,
239
+ message=f"Release candidate metadata file not found: {rc_path}",
240
+ )
241
+
242
+ version_blob.reload()
243
+ rc_blob.reload()
244
+ if rc_blob.md5_hash != version_blob.md5_hash:
245
+ return ConnectorPublishResult(
246
+ connector=metadata.name,
247
+ version=version,
248
+ action="apply-version-override",
249
+ status="failure",
250
+ docker_image=f"{docker_repo}:{version}",
251
+ registry_updated=False,
252
+ message=f"RC metadata hash does not match version metadata hash. "
253
+ f"Unsafe to promote. RC: {rc_path}, Version: {version_path}",
254
+ )
255
+
256
+ bucket.copy_blob(rc_blob, bucket, latest_blob.name)
257
+ rc_blob.delete()
258
+
259
+ return ConnectorPublishResult(
260
+ connector=metadata.name,
261
+ version=target_version,
262
+ action="apply-version-override",
263
+ status="success",
264
+ docker_image=f"{docker_repo}:{target_version}",
265
+ registry_updated=True,
266
+ message=f"Applied version override for {metadata.name}: {version} -> {target_version}. "
267
+ f"Copied RC to latest and deleted RC metadata.",
268
+ )
269
+
270
+
271
+ def _rollback_version_override(
272
+ metadata: ConnectorMetadata, dry_run: bool, use_prod: bool = False
273
+ ) -> ConnectorPublishResult:
274
+ """Rollback a version override by deleting the RC metadata from GCS.
275
+
276
+ This deletes both the release candidate metadata and the versioned metadata
277
+ after verifying their hashes match.
278
+
279
+ Requires GCS_CREDENTIALS environment variable to be set.
280
+ """
281
+ version = metadata.docker_image_tag
282
+ docker_repo = metadata.docker_repository
283
+
284
+ if not is_release_candidate(version):
285
+ return ConnectorPublishResult(
286
+ connector=metadata.name,
287
+ version=version,
288
+ action="rollback-version-override",
289
+ status="failure",
290
+ docker_image=f"{docker_repo}:{version}",
291
+ registry_updated=False,
292
+ message=f"Version '{version}' is not a release candidate. "
293
+ "Expected format: X.Y.Z-rc.N",
294
+ )
295
+
296
+ if dry_run:
297
+ return ConnectorPublishResult(
298
+ connector=metadata.name,
299
+ version=version,
300
+ action="rollback-version-override",
301
+ status="dry-run",
302
+ docker_image=f"{docker_repo}:{version}",
303
+ registry_updated=False,
304
+ message=f"Would rollback version override for {metadata.name} (current: {version})",
305
+ )
306
+
307
+ bucket_name = _get_bucket_name(use_prod)
308
+ storage_client = _get_gcs_client()
309
+ bucket = storage_client.bucket(bucket_name)
310
+
311
+ gcp_connector_dir = f"{METADATA_FOLDER}/{docker_repo}"
312
+ version_path = f"{gcp_connector_dir}/{version}/{METADATA_FILE_NAME}"
313
+ rc_path = (
314
+ f"{gcp_connector_dir}/{RELEASE_CANDIDATE_GCS_FOLDER_NAME}/{METADATA_FILE_NAME}"
315
+ )
316
+
317
+ version_blob = bucket.blob(version_path)
318
+ rc_blob = bucket.blob(rc_path)
319
+
320
+ if not version_blob.exists():
321
+ return ConnectorPublishResult(
322
+ connector=metadata.name,
323
+ version=version,
324
+ action="rollback-version-override",
325
+ status="failure",
326
+ docker_image=f"{docker_repo}:{version}",
327
+ registry_updated=False,
328
+ message=f"Version metadata file not found: {version_path}",
329
+ )
330
+
331
+ if not rc_blob.exists():
332
+ return ConnectorPublishResult(
333
+ connector=metadata.name,
334
+ version=version,
335
+ action="rollback-version-override",
336
+ status="failure",
337
+ docker_image=f"{docker_repo}:{version}",
338
+ registry_updated=False,
339
+ message=f"Release candidate metadata file not found: {rc_path}",
340
+ )
341
+
342
+ version_blob.reload()
343
+ rc_blob.reload()
344
+ if rc_blob.md5_hash != version_blob.md5_hash:
345
+ return ConnectorPublishResult(
346
+ connector=metadata.name,
347
+ version=version,
348
+ action="rollback-version-override",
349
+ status="failure",
350
+ docker_image=f"{docker_repo}:{version}",
351
+ registry_updated=False,
352
+ message=f"RC metadata hash does not match version metadata hash. "
353
+ f"Unsafe to delete. RC: {rc_path}, Version: {version_path}",
354
+ )
355
+
356
+ rc_blob.delete()
357
+ version_blob.delete()
358
+
359
+ return ConnectorPublishResult(
360
+ connector=metadata.name,
361
+ version=version,
362
+ action="rollback-version-override",
363
+ status="success",
364
+ docker_image=f"{docker_repo}:{version}",
365
+ registry_updated=True,
366
+ message=f"Rolled back version override for {metadata.name}. "
367
+ f"Deleted RC and version metadata from GCS.",
368
+ )
@@ -1,3 +0,0 @@
1
- #
2
- # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
3
- #
@@ -1,242 +0,0 @@
1
- #
2
- # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
3
- #
4
- from typing import Callable, Dict, Iterable, List
5
-
6
- import asyncclick as click
7
- from consts import (
8
- DEFAULT_PYTHON_PACKAGE_REGISTRY_CHECK_URL,
9
- DEFAULT_PYTHON_PACKAGE_REGISTRY_URL,
10
- ContextState,
11
- )
12
- from pipelines import main_logger
13
- from pipelines.airbyte_ci.connectors.pipeline import run_connectors_pipelines
14
- from pipelines.airbyte_ci.connectors.publish.context import (
15
- PublishConnectorContext,
16
- RolloutMode,
17
- )
18
- from pipelines.airbyte_ci.connectors.publish.pipeline import (
19
- reorder_contexts,
20
- run_connector_promote_pipeline,
21
- run_connector_publish_pipeline,
22
- run_connector_rollback_pipeline,
23
- )
24
- from pipelines.cli.click_decorators import click_ci_requirements_option
25
- from pipelines.cli.confirm_prompt import confirm
26
- from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand
27
- from pipelines.cli.secrets import wrap_gcp_credentials_in_secret, wrap_in_secret
28
- from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles
29
- from pipelines.helpers.utils import fail_if_missing_docker_hub_creds
30
- from pipelines.models.secrets import Secret
31
-
32
- ROLLOUT_MODE_TO_PIPELINE_FUNCTION: Dict[RolloutMode, Callable] = {
33
- RolloutMode.PUBLISH: run_connector_publish_pipeline,
34
- RolloutMode.PROMOTE: run_connector_promote_pipeline,
35
- RolloutMode.ROLLBACK: run_connector_rollback_pipeline,
36
- }
37
-
38
-
39
- # Third-party connectors can't be published with this pipeline, skip them.
40
- # This is not the same as partner connectors. Partner connectors use our tech stack and can
41
- # be published just fine. Third-party connectors are in their own subdirectory.
42
- def filter_out_third_party_connectors(
43
- selected_connectors_with_modified_files: Iterable[ConnectorWithModifiedFiles],
44
- ) -> List[ConnectorWithModifiedFiles]:
45
- """
46
- Return the list of connectors filtering out the connectors stored in connectors/third-party directory.
47
- """
48
- filtered_connectors = []
49
- for connector in selected_connectors_with_modified_files:
50
- if connector.is_third_party:
51
- main_logger.info(
52
- f"Skipping third party connector {connector.technical_name} from the list of connectors"
53
- )
54
- else:
55
- filtered_connectors.append(connector)
56
- return filtered_connectors
57
-
58
-
59
- @click.command(
60
- cls=DaggerPipelineCommand, help="Publish all images for the selected connectors."
61
- )
62
- @click_ci_requirements_option()
63
- @click.option(
64
- "--pre-release/--main-release",
65
- help="Use this flag if you want to publish pre-release images.",
66
- default=True,
67
- type=bool,
68
- )
69
- @click.option(
70
- "--spec-cache-gcs-credentials",
71
- help="The service account key to upload files to the GCS bucket hosting spec cache.",
72
- type=click.STRING,
73
- required=True,
74
- envvar="SPEC_CACHE_GCS_CREDENTIALS",
75
- callback=wrap_gcp_credentials_in_secret,
76
- )
77
- @click.option(
78
- "--spec-cache-bucket-name",
79
- help="The name of the GCS bucket where specs will be cached.",
80
- type=click.STRING,
81
- required=True,
82
- envvar="SPEC_CACHE_BUCKET_NAME",
83
- )
84
- @click.option(
85
- "--metadata-service-gcs-credentials",
86
- help="The service account key to upload files to the GCS bucket hosting the metadata files.",
87
- type=click.STRING,
88
- required=True,
89
- envvar="METADATA_SERVICE_GCS_CREDENTIALS",
90
- callback=wrap_gcp_credentials_in_secret,
91
- )
92
- @click.option(
93
- "--metadata-service-bucket-name",
94
- help="The name of the GCS bucket where metadata files will be uploaded.",
95
- type=click.STRING,
96
- required=True,
97
- envvar="METADATA_SERVICE_BUCKET_NAME",
98
- )
99
- @click.option(
100
- "--slack-webhook",
101
- help="The Slack webhook URL to send notifications to.",
102
- type=click.STRING,
103
- envvar="SLACK_WEBHOOK",
104
- )
105
- @click.option(
106
- "--python-registry-token",
107
- help="Access token for python registry",
108
- type=click.STRING,
109
- envvar="PYTHON_REGISTRY_TOKEN",
110
- callback=wrap_in_secret,
111
- )
112
- @click.option(
113
- "--python-registry-url",
114
- help="Which python registry url to publish to. If not set, the default pypi is used. For test pypi, use https://test.pypi.org/legacy/",
115
- type=click.STRING,
116
- default=DEFAULT_PYTHON_PACKAGE_REGISTRY_URL,
117
- envvar="PYTHON_REGISTRY_URL",
118
- )
119
- @click.option(
120
- "--python-registry-check-url",
121
- help="Which url to check whether a certain version is published already. If not set, the default pypi is used. For test pypi, use https://test.pypi.org/pypi/",
122
- type=click.STRING,
123
- default=DEFAULT_PYTHON_PACKAGE_REGISTRY_CHECK_URL,
124
- envvar="PYTHON_REGISTRY_CHECK_URL",
125
- )
126
- @click.option(
127
- "--promote-release-candidate",
128
- help="Promote a release candidate to a main release.",
129
- type=click.BOOL,
130
- default=False,
131
- is_flag=True,
132
- )
133
- @click.option(
134
- "--rollback-release-candidate",
135
- help="Rollback a release candidate to a previous version.",
136
- type=click.BOOL,
137
- default=False,
138
- is_flag=True,
139
- )
140
- @click.pass_context
141
- async def publish(
142
- ctx: click.Context,
143
- pre_release: bool,
144
- spec_cache_gcs_credentials: Secret,
145
- spec_cache_bucket_name: str,
146
- metadata_service_bucket_name: str,
147
- metadata_service_gcs_credentials: Secret,
148
- slack_webhook: str,
149
- python_registry_token: Secret,
150
- python_registry_url: str,
151
- python_registry_check_url: str,
152
- promote_release_candidate: bool,
153
- rollback_release_candidate: bool,
154
- ) -> bool:
155
- if promote_release_candidate and rollback_release_candidate:
156
- raise click.UsageError(
157
- "You can't promote and rollback a release candidate at the same time."
158
- )
159
- elif promote_release_candidate:
160
- rollout_mode = RolloutMode.PROMOTE
161
- elif rollback_release_candidate:
162
- rollout_mode = RolloutMode.ROLLBACK
163
- else:
164
- rollout_mode = RolloutMode.PUBLISH
165
-
166
- ctx.obj["selected_connectors_with_modified_files"] = (
167
- filter_out_third_party_connectors(
168
- ctx.obj["selected_connectors_with_modified_files"]
169
- )
170
- )
171
- if not ctx.obj["selected_connectors_with_modified_files"]:
172
- return True
173
-
174
- if ctx.obj["is_local"]:
175
- confirm(
176
- "Publishing from a local environment is not recommended and requires to be logged in Airbyte's DockerHub registry, do you want to continue?",
177
- abort=True,
178
- )
179
-
180
- fail_if_missing_docker_hub_creds(ctx)
181
-
182
- publish_connector_contexts = reorder_contexts(
183
- [
184
- PublishConnectorContext(
185
- connector=connector,
186
- pre_release=pre_release,
187
- spec_cache_gcs_credentials=spec_cache_gcs_credentials,
188
- spec_cache_bucket_name=spec_cache_bucket_name,
189
- metadata_service_gcs_credentials=metadata_service_gcs_credentials,
190
- metadata_bucket_name=metadata_service_bucket_name,
191
- docker_hub_username=Secret(
192
- "docker_hub_username", ctx.obj["secret_stores"]["in_memory"]
193
- ),
194
- docker_hub_password=Secret(
195
- "docker_hub_password", ctx.obj["secret_stores"]["in_memory"]
196
- ),
197
- slack_webhook=slack_webhook,
198
- ci_report_bucket=ctx.obj["ci_report_bucket_name"],
199
- report_output_prefix=ctx.obj["report_output_prefix"],
200
- is_local=ctx.obj["is_local"],
201
- git_branch=ctx.obj["git_branch"],
202
- git_revision=ctx.obj["git_revision"],
203
- diffed_branch=ctx.obj["diffed_branch"],
204
- git_repo_url=ctx.obj["git_repo_url"],
205
- gha_workflow_run_url=ctx.obj.get("gha_workflow_run_url"),
206
- dagger_logs_url=ctx.obj.get("dagger_logs_url"),
207
- pipeline_start_timestamp=ctx.obj.get("pipeline_start_timestamp"),
208
- ci_context=ctx.obj.get("ci_context"),
209
- ci_gcp_credentials=ctx.obj["ci_gcp_credentials"],
210
- pull_request=ctx.obj.get("pull_request"),
211
- s3_build_cache_access_key_id=ctx.obj.get(
212
- "s3_build_cache_access_key_id"
213
- ),
214
- s3_build_cache_secret_key=ctx.obj.get("s3_build_cache_secret_key"),
215
- use_local_cdk=ctx.obj.get("use_local_cdk"),
216
- use_cdk_ref=ctx.obj.get("use_cdk_ref"),
217
- python_registry_token=python_registry_token,
218
- python_registry_url=python_registry_url,
219
- python_registry_check_url=python_registry_check_url,
220
- rollout_mode=rollout_mode,
221
- ci_github_access_token=ctx.obj.get("ci_github_access_token"),
222
- )
223
- for connector in ctx.obj["selected_connectors_with_modified_files"]
224
- ]
225
- )
226
- main_logger.warn(
227
- "Concurrency is forced to 1. For stability reasons we disable parallel publish pipelines."
228
- )
229
- ctx.obj["concurrency"] = 1
230
-
231
- ran_publish_connector_contexts = await run_connectors_pipelines(
232
- publish_connector_contexts,
233
- ROLLOUT_MODE_TO_PIPELINE_FUNCTION[rollout_mode],
234
- f"{rollout_mode.value} connectors",
235
- ctx.obj["concurrency"],
236
- ctx.obj["dagger_logs_path"],
237
- ctx.obj["execute_timeout"],
238
- )
239
- return all(
240
- context.state is ContextState.SUCCESSFUL
241
- for context in ran_publish_connector_contexts
242
- )