airbyte-internal-ops 0.1.4__py3-none-any.whl → 0.1.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. {airbyte_internal_ops-0.1.4.dist-info → airbyte_internal_ops-0.1.6.dist-info}/METADATA +70 -1
  2. {airbyte_internal_ops-0.1.4.dist-info → airbyte_internal_ops-0.1.6.dist-info}/RECORD +30 -31
  3. airbyte_ops_mcp/__init__.py +30 -2
  4. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/connectors/pipeline.py +2 -8
  5. airbyte_ops_mcp/airbyte_repo/list_connectors.py +176 -4
  6. airbyte_ops_mcp/airbyte_repo/utils.py +5 -3
  7. airbyte_ops_mcp/cli/cloud.py +35 -36
  8. airbyte_ops_mcp/cli/registry.py +90 -1
  9. airbyte_ops_mcp/cli/repo.py +15 -0
  10. airbyte_ops_mcp/connection_config_retriever/__init__.py +26 -0
  11. airbyte_ops_mcp/{live_tests/_connection_retriever → connection_config_retriever}/audit_logging.py +5 -6
  12. airbyte_ops_mcp/{live_tests/_connection_retriever → connection_config_retriever}/retrieval.py +8 -22
  13. airbyte_ops_mcp/{live_tests/_connection_retriever → connection_config_retriever}/secrets_resolution.py +8 -42
  14. airbyte_ops_mcp/constants.py +35 -0
  15. airbyte_ops_mcp/live_tests/connection_secret_retriever.py +1 -1
  16. airbyte_ops_mcp/mcp/github_repo_ops.py +10 -0
  17. airbyte_ops_mcp/mcp/live_tests.py +21 -6
  18. airbyte_ops_mcp/mcp/prod_db_queries.py +357 -0
  19. airbyte_ops_mcp/mcp/server.py +2 -0
  20. airbyte_ops_mcp/mcp/server_info.py +2 -2
  21. airbyte_ops_mcp/prod_db_access/__init__.py +34 -0
  22. airbyte_ops_mcp/prod_db_access/db_engine.py +127 -0
  23. airbyte_ops_mcp/prod_db_access/py.typed +0 -0
  24. airbyte_ops_mcp/prod_db_access/queries.py +272 -0
  25. airbyte_ops_mcp/prod_db_access/sql.py +353 -0
  26. airbyte_ops_mcp/registry/__init__.py +34 -0
  27. airbyte_ops_mcp/registry/models.py +63 -0
  28. airbyte_ops_mcp/registry/publish.py +368 -0
  29. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/connectors/publish/__init__.py +0 -3
  30. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/connectors/publish/commands.py +0 -242
  31. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/connectors/publish/context.py +0 -175
  32. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/connectors/publish/pipeline.py +0 -1056
  33. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/poetry/publish/__init__.py +0 -3
  34. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/poetry/publish/commands.py +0 -127
  35. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/steps/python_registry.py +0 -238
  36. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/models/contexts/python_registry_publish.py +0 -119
  37. airbyte_ops_mcp/live_tests/_connection_retriever/__init__.py +0 -35
  38. airbyte_ops_mcp/live_tests/_connection_retriever/consts.py +0 -33
  39. airbyte_ops_mcp/live_tests/_connection_retriever/db_access.py +0 -82
  40. {airbyte_internal_ops-0.1.4.dist-info → airbyte_internal_ops-0.1.6.dist-info}/WHEEL +0 -0
  41. {airbyte_internal_ops-0.1.4.dist-info → airbyte_internal_ops-0.1.6.dist-info}/entry_points.txt +0 -0
@@ -1,1056 +0,0 @@
1
- #
2
- # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
3
- #
4
- from __future__ import annotations
5
-
6
- import json
7
- import os
8
- import uuid
9
- from datetime import datetime
10
- from pathlib import Path
11
- from typing import Dict, Iterable, List, Tuple
12
-
13
- import anyio
14
- import semver
15
- import yaml
16
- from airbyte_protocol.models.airbyte_protocol import (
17
- ConnectorSpecification, # type: ignore
18
- )
19
- from auto_merge.consts import AUTO_MERGE_BYPASS_CI_CHECKS_LABEL # type: ignore
20
- from connector_ops.utils import METADATA_FILE_NAME, ConnectorLanguage # type: ignore
21
- from consts import LOCAL_BUILD_PLATFORM
22
- from dagger import (
23
- Container,
24
- Directory,
25
- ExecError,
26
- File,
27
- ImageLayerCompression,
28
- Platform,
29
- QueryError,
30
- )
31
- from pipelines import consts
32
- from pipelines.airbyte_ci.connectors.build_image import steps
33
- from pipelines.airbyte_ci.connectors.publish.context import (
34
- PublishConnectorContext,
35
- RolloutMode,
36
- )
37
- from pipelines.airbyte_ci.connectors.reports import ConnectorReport
38
- from pipelines.airbyte_ci.metadata.pipeline import MetadataUpload
39
- from pipelines.airbyte_ci.steps.bump_version import SetConnectorVersion
40
- from pipelines.airbyte_ci.steps.changelog import AddChangelogEntry
41
- from pipelines.airbyte_ci.steps.pull_request import CreateOrUpdatePullRequest
42
- from pipelines.airbyte_ci.steps.python_registry import (
43
- PublishToPythonRegistry,
44
- PythonRegistryPublishContext,
45
- )
46
- from pipelines.dagger.actions.remote_storage import upload_to_gcs
47
- from pipelines.dagger.actions.system import docker
48
- from pipelines.helpers.connectors.dagger_fs import dagger_read_file, dagger_write_file
49
- from pipelines.helpers.pip import is_package_published
50
- from pipelines.models.steps import Step, StepModifyingFiles, StepResult, StepStatus
51
- from pydantic import BaseModel, ValidationError
52
-
53
-
54
- class InvalidSpecOutputError(Exception):
55
- pass
56
-
57
-
58
- class CheckConnectorImageDoesNotExist(Step):
59
- context: PublishConnectorContext
60
- title = "Check if the connector docker image does not exist on the registry."
61
-
62
- async def _run(self) -> StepResult:
63
- docker_repository, docker_tag = self.context.docker_image.split(":")
64
- crane_ls = (
65
- docker.with_crane(
66
- self.context,
67
- )
68
- .with_env_variable("CACHEBUSTER", str(uuid.uuid4()))
69
- .with_exec(["ls", docker_repository], use_entrypoint=True)
70
- )
71
- try:
72
- crane_ls_stdout = await crane_ls.stdout()
73
- except ExecError as e:
74
- if "NAME_UNKNOWN" in e.stderr:
75
- return StepResult(
76
- step=self,
77
- status=StepStatus.SUCCESS,
78
- stdout=f"The docker repository {docker_repository} does not exist.",
79
- )
80
- else:
81
- return StepResult(
82
- step=self,
83
- status=StepStatus.FAILURE,
84
- stderr=e.stderr,
85
- stdout=e.stdout,
86
- )
87
- else: # The docker repo exists and ls was successful
88
- existing_tags = crane_ls_stdout.split("\n")
89
- docker_tag_already_exists = docker_tag in existing_tags
90
- if docker_tag_already_exists:
91
- return StepResult(
92
- step=self,
93
- status=StepStatus.SKIPPED,
94
- stderr=f"{self.context.docker_image} already exists.",
95
- )
96
- return StepResult(
97
- step=self,
98
- status=StepStatus.SUCCESS,
99
- stdout=f"No manifest found for {self.context.docker_image}.",
100
- )
101
-
102
-
103
- class CheckPythonRegistryPackageDoesNotExist(Step):
104
- context: PythonRegistryPublishContext
105
- title = "Check if the connector is published on python registry"
106
-
107
- async def _run(self) -> StepResult:
108
- is_published = is_package_published(
109
- self.context.package_metadata.name,
110
- self.context.package_metadata.version,
111
- self.context.registry_check_url,
112
- )
113
- if is_published:
114
- return StepResult(
115
- step=self,
116
- status=StepStatus.SKIPPED,
117
- stderr=f"{self.context.package_metadata.name} already exists in version {self.context.package_metadata.version}.",
118
- )
119
- else:
120
- return StepResult(
121
- step=self,
122
- status=StepStatus.SUCCESS,
123
- stdout=f"{self.context.package_metadata.name} does not exist in version {self.context.package_metadata.version}.",
124
- )
125
-
126
-
127
- class ConnectorDependenciesMetadata(BaseModel):
128
- connector_technical_name: str
129
- connector_repository: str
130
- connector_version: str
131
- connector_definition_id: str
132
- dependencies: List[Dict[str, str]]
133
- generation_time: datetime = datetime.utcnow()
134
-
135
-
136
- class UploadDependenciesToMetadataService(Step):
137
- context: PublishConnectorContext
138
- title = "Upload connector dependencies list to GCS."
139
- key_prefix = "connector_dependencies"
140
-
141
- async def _run(
142
- self, built_containers_per_platform: Dict[Platform, Container]
143
- ) -> StepResult:
144
- assert self.context.connector.language in [
145
- ConnectorLanguage.PYTHON,
146
- ConnectorLanguage.LOW_CODE,
147
- ], "This step can only run for Python connectors."
148
- built_container = built_containers_per_platform[LOCAL_BUILD_PLATFORM]
149
- pip_freeze_output = await built_container.with_exec(["pip", "freeze"]).stdout()
150
- dependencies = [
151
- {"package_name": line.split("==")[0], "version": line.split("==")[1]}
152
- for line in pip_freeze_output.splitlines()
153
- if "==" in line
154
- ]
155
- connector_technical_name = self.context.connector.technical_name
156
- connector_version = self.context.metadata["dockerImageTag"]
157
- dependencies_metadata = ConnectorDependenciesMetadata(
158
- connector_technical_name=connector_technical_name,
159
- connector_repository=self.context.metadata["dockerRepository"],
160
- connector_version=connector_version,
161
- connector_definition_id=self.context.metadata["definitionId"],
162
- dependencies=dependencies,
163
- ).json()
164
- file = (
165
- (await self.context.get_connector_dir())
166
- .with_new_file("dependencies.json", contents=dependencies_metadata)
167
- .file("dependencies.json")
168
- )
169
- key = f"{self.key_prefix}/{connector_technical_name}/{connector_version}/dependencies.json"
170
- exit_code, stdout, stderr = await upload_to_gcs(
171
- self.context.dagger_client,
172
- file,
173
- key,
174
- self.context.metadata_bucket_name,
175
- self.context.metadata_service_gcs_credentials,
176
- flags=['--cache-control="no-cache"'],
177
- )
178
- if exit_code != 0:
179
- return StepResult(
180
- step=self, status=StepStatus.FAILURE, stdout=stdout, stderr=stderr
181
- )
182
- return StepResult(
183
- step=self,
184
- status=StepStatus.SUCCESS,
185
- stdout="Uploaded connector dependencies to metadata service bucket.",
186
- )
187
-
188
-
189
- class PushConnectorImageToRegistry(Step):
190
- context: PublishConnectorContext
191
- title = "Push connector image to registry"
192
-
193
- @property
194
- def latest_docker_image_name(self) -> str:
195
- return f"{self.context.docker_repository}:latest"
196
-
197
- @property
198
- def should_push_latest_tag(self) -> bool:
199
- """
200
- We don't want to push the latest tag for release candidates or pre-releases.
201
-
202
- Returns:
203
- bool: True if the latest tag should be pushed, False otherwise.
204
- """
205
- is_release_candidate = "-rc" in self.context.connector.version
206
- is_pre_release = self.context.pre_release
207
- return not (is_release_candidate or is_pre_release)
208
-
209
- async def _run(
210
- self, built_containers_per_platform: List[Container], attempts: int = 3
211
- ) -> StepResult:
212
- try:
213
- image_ref = await built_containers_per_platform[0].publish(
214
- f"docker.io/{self.context.docker_image}",
215
- platform_variants=built_containers_per_platform[1:],
216
- forced_compression=ImageLayerCompression.Gzip,
217
- )
218
- if self.should_push_latest_tag:
219
- image_ref = await built_containers_per_platform[0].publish(
220
- f"docker.io/{self.latest_docker_image_name}",
221
- platform_variants=built_containers_per_platform[1:],
222
- forced_compression=ImageLayerCompression.Gzip,
223
- )
224
- return StepResult(
225
- step=self, status=StepStatus.SUCCESS, stdout=f"Published {image_ref}"
226
- )
227
- except QueryError as e:
228
- if attempts > 0:
229
- self.context.logger.error(str(e))
230
- self.context.logger.warn(
231
- f"Failed to publish {self.context.docker_image}. Retrying. {attempts} attempts left."
232
- )
233
- await anyio.sleep(5)
234
- return await self._run(built_containers_per_platform, attempts - 1)
235
- return StepResult(step=self, status=StepStatus.FAILURE, stderr=str(e))
236
-
237
-
238
- class PushVersionImageAsLatest(Step):
239
- context: PublishConnectorContext
240
- title = "Push existing version image as latest"
241
-
242
- @property
243
- def latest_docker_image_name(self) -> str:
244
- return f"{self.context.docker_repository}:latest"
245
-
246
- async def _run(self, attempts: int = 3) -> StepResult:
247
- per_platform_containers = [
248
- self.context.dagger_client.container(platform=platform).from_(
249
- f"docker.io/{self.context.docker_image}"
250
- )
251
- for platform in consts.BUILD_PLATFORMS
252
- ]
253
-
254
- try:
255
- image_ref = await per_platform_containers[0].publish(
256
- f"docker.io/{self.latest_docker_image_name}",
257
- platform_variants=per_platform_containers[1:],
258
- forced_compression=ImageLayerCompression.Gzip,
259
- )
260
- return StepResult(
261
- step=self, status=StepStatus.SUCCESS, stdout=f"Published {image_ref}"
262
- )
263
- except QueryError as e:
264
- if attempts > 0:
265
- self.context.logger.error(str(e))
266
- self.context.logger.warn(
267
- f"Failed to publish {self.context.docker_image}. Retrying. {attempts} attempts left."
268
- )
269
- await anyio.sleep(5)
270
- return await self._run(attempts - 1)
271
- return StepResult(step=self, status=StepStatus.FAILURE, stderr=str(e))
272
-
273
-
274
- class PullConnectorImageFromRegistry(Step):
275
- context: PublishConnectorContext
276
- title = "Pull connector image from registry"
277
-
278
- async def check_if_image_only_has_gzip_layers(self) -> bool:
279
- """Check if the image only has gzip layers.
280
- Docker version > 21 can create images that has some layers compressed with zstd.
281
- These layers are not supported by previous docker versions.
282
- We want to make sure that the image we are about to release is compatible with all docker versions.
283
- We use crane to inspect the manifest of the image and check if it only has gzip layers.
284
- """
285
- has_only_gzip_layers = True
286
- for platform in consts.BUILD_PLATFORMS:
287
- inspect = docker.with_crane(self.context).with_exec(
288
- [
289
- "manifest",
290
- "--platform",
291
- f"{platform!s}",
292
- f"docker.io/{self.context.docker_image}",
293
- ],
294
- use_entrypoint=True,
295
- )
296
- try:
297
- inspect_stdout = await inspect.stdout()
298
- except ExecError as e:
299
- raise Exception(
300
- f"Failed to inspect {self.context.docker_image}: {e.stderr}"
301
- ) from e
302
- try:
303
- for layer in json.loads(inspect_stdout)["layers"]:
304
- if not layer["mediaType"].endswith("gzip"):
305
- has_only_gzip_layers = False
306
- break
307
- except (KeyError, json.JSONDecodeError) as e:
308
- raise Exception(
309
- f"Failed to parse manifest for {self.context.docker_image}: {inspect_stdout}"
310
- ) from e
311
- return has_only_gzip_layers
312
-
313
- async def _run(self, attempt: int = 3) -> StepResult:
314
- try:
315
- try:
316
- await (
317
- self.context.dagger_client.container()
318
- .from_(f"docker.io/{self.context.docker_image}")
319
- .with_exec(["spec"], use_entrypoint=True)
320
- )
321
- except ExecError:
322
- if attempt > 0:
323
- await anyio.sleep(10)
324
- return await self._run(attempt - 1)
325
- else:
326
- return StepResult(
327
- step=self,
328
- status=StepStatus.FAILURE,
329
- stderr=f"Failed to pull {self.context.docker_image}",
330
- )
331
- if not await self.check_if_image_only_has_gzip_layers():
332
- return StepResult(
333
- step=self,
334
- status=StepStatus.FAILURE,
335
- stderr=f"Image {self.context.docker_image} does not only have gzip compressed layers. Please rebuild the connector with Docker < 21.",
336
- )
337
- else:
338
- return StepResult(
339
- step=self,
340
- status=StepStatus.SUCCESS,
341
- stdout=f"Pulled {self.context.docker_image} and validated it has gzip only compressed layers and we can run spec on it.",
342
- )
343
- except QueryError as e:
344
- if attempt > 0:
345
- await anyio.sleep(10)
346
- return await self._run(attempt - 1)
347
- return StepResult(step=self, status=StepStatus.FAILURE, stderr=str(e))
348
-
349
-
350
- class UploadSpecToCache(Step):
351
- context: PublishConnectorContext
352
- title = "Upload connector spec to spec cache bucket"
353
- default_spec_file_name = "spec.json"
354
- cloud_spec_file_name = "spec.cloud.json"
355
-
356
- @property
357
- def spec_key_prefix(self) -> str:
358
- return "specs/" + self.context.docker_image.replace(":", "/")
359
-
360
- @property
361
- def cloud_spec_key(self) -> str:
362
- return f"{self.spec_key_prefix}/{self.cloud_spec_file_name}"
363
-
364
- @property
365
- def oss_spec_key(self) -> str:
366
- return f"{self.spec_key_prefix}/{self.default_spec_file_name}"
367
-
368
- def _parse_spec_output(self, spec_output: str) -> str:
369
- parsed_spec_message = None
370
- for line in spec_output.split("\n"):
371
- try:
372
- parsed_json = json.loads(line)
373
- if parsed_json["type"] == "SPEC":
374
- parsed_spec_message = parsed_json
375
- break
376
- except (json.JSONDecodeError, KeyError):
377
- continue
378
- if parsed_spec_message:
379
- parsed_spec = parsed_spec_message["spec"]
380
- try:
381
- ConnectorSpecification.parse_obj(parsed_spec)
382
- return json.dumps(parsed_spec)
383
- except (ValidationError, ValueError) as e:
384
- raise InvalidSpecOutputError(
385
- f"The SPEC message did not pass schema validation: {e!s}."
386
- )
387
- raise InvalidSpecOutputError("No spec found in the output of the SPEC command.")
388
-
389
- async def _get_connector_spec(
390
- self, connector: Container, deployment_mode: str
391
- ) -> str:
392
- """
393
- Get the connector spec by running the `spec` command in the connector container.
394
-
395
- Args:
396
- connector (Container): The connector container.
397
- deployment_mode (str): The deployment mode to run the spec command in. Valid values are "OSS" and "CLOUD".
398
- """
399
- spec_output = (
400
- await connector.with_env_variable("DEPLOYMENT_MODE", deployment_mode)
401
- .with_exec(["spec"], use_entrypoint=True)
402
- .stdout()
403
- )
404
- return self._parse_spec_output(spec_output)
405
-
406
- async def _get_spec_as_file(
407
- self, spec: str, name: str = "spec_to_cache.json"
408
- ) -> File:
409
- return (
410
- (await self.context.get_connector_dir())
411
- .with_new_file(name, contents=spec)
412
- .file(name)
413
- )
414
-
415
- async def _run(self, built_connector: Container) -> StepResult:
416
- try:
417
- oss_spec: str = await self._get_connector_spec(built_connector, "OSS")
418
- cloud_spec: str = await self._get_connector_spec(built_connector, "CLOUD")
419
- except InvalidSpecOutputError as e:
420
- return StepResult(step=self, status=StepStatus.FAILURE, stderr=str(e))
421
-
422
- specs_to_uploads: List[Tuple[str, File]] = [
423
- (self.oss_spec_key, await self._get_spec_as_file(oss_spec))
424
- ]
425
-
426
- if oss_spec != cloud_spec:
427
- specs_to_uploads.append(
428
- (
429
- self.cloud_spec_key,
430
- await self._get_spec_as_file(
431
- cloud_spec, "cloud_spec_to_cache.json"
432
- ),
433
- )
434
- )
435
-
436
- for key, file in specs_to_uploads:
437
- exit_code, stdout, stderr = await upload_to_gcs(
438
- self.context.dagger_client,
439
- file,
440
- key,
441
- self.context.spec_cache_bucket_name,
442
- self.context.spec_cache_gcs_credentials,
443
- flags=['--cache-control="no-cache"'],
444
- )
445
- if exit_code != 0:
446
- return StepResult(
447
- step=self, status=StepStatus.FAILURE, stdout=stdout, stderr=stderr
448
- )
449
- return StepResult(
450
- step=self,
451
- status=StepStatus.SUCCESS,
452
- stdout="Uploaded connector spec to spec cache bucket.",
453
- )
454
-
455
-
456
- class UploadSbom(Step):
457
- context: PublishConnectorContext
458
- title = "Upload SBOM to metadata service bucket"
459
- SBOM_KEY_PREFIX = "sbom"
460
- SYFT_DOCKER_IMAGE = "anchore/syft:v1.6.0"
461
- SBOM_FORMAT = "spdx-json"
462
- IN_CONTAINER_SBOM_PATH = "sbom.json"
463
- SBOM_EXTENSION = "spdx.json"
464
-
465
- def get_syft_container(self) -> Container:
466
- home_dir = os.path.expanduser("~")
467
- config_path = os.path.join(home_dir, ".docker", "config.json")
468
- config_file = self.dagger_client.host().file(config_path)
469
- return (
470
- self.dagger_client.container()
471
- .from_(self.SYFT_DOCKER_IMAGE)
472
- .with_mounted_file("/config/config.json", config_file)
473
- .with_env_variable("DOCKER_CONFIG", "/config")
474
- # Syft requires access to the docker daemon. We share the host's docker socket with the Syft container.
475
- .with_unix_socket(
476
- "/var/run/docker.sock",
477
- self.dagger_client.host().unix_socket("/var/run/docker.sock"),
478
- )
479
- )
480
-
481
- async def _run(self) -> StepResult:
482
- try:
483
- syft_container = self.get_syft_container()
484
- sbom_file = await syft_container.with_exec(
485
- [
486
- self.context.docker_image,
487
- "-o",
488
- f"{self.SBOM_FORMAT}={self.IN_CONTAINER_SBOM_PATH}",
489
- ],
490
- use_entrypoint=True,
491
- ).file(self.IN_CONTAINER_SBOM_PATH)
492
- except ExecError as e:
493
- return StepResult(
494
- step=self, status=StepStatus.FAILURE, stderr=str(e), exc_info=e
495
- )
496
-
497
- # This will lead to a key like: sbom/airbyte/source-faker/0.1.0.json
498
- key = f"{self.SBOM_KEY_PREFIX}/{self.context.docker_image.replace(':', '/')}.{self.SBOM_EXTENSION}"
499
- exit_code, stdout, stderr = await upload_to_gcs(
500
- self.context.dagger_client,
501
- sbom_file,
502
- key,
503
- self.context.metadata_bucket_name,
504
- self.context.metadata_service_gcs_credentials,
505
- flags=['--cache-control="no-cache"', "--content-type=application/json"],
506
- )
507
- if exit_code != 0:
508
- return StepResult(
509
- step=self, status=StepStatus.FAILURE, stdout=stdout, stderr=stderr
510
- )
511
- return StepResult(
512
- step=self,
513
- status=StepStatus.SUCCESS,
514
- stdout="Uploaded SBOM to metadata service bucket.",
515
- )
516
-
517
-
518
- class SetPromotedVersion(SetConnectorVersion):
519
- context: PublishConnectorContext
520
- title = "Promote release candidate"
521
-
522
- @property
523
- def current_semver_version(self) -> semver.Version:
524
- return semver.Version.parse(self.context.connector.version)
525
-
526
- @property
527
- def promoted_semver_version(self) -> semver.Version:
528
- return self.current_semver_version.replace(prerelease=None)
529
-
530
- @property
531
- def promoted_version(self) -> str:
532
- return str(self.promoted_semver_version)
533
-
534
- @property
535
- def current_version_is_rc(self) -> bool:
536
- return bool(
537
- self.current_semver_version.prerelease
538
- and "rc" in self.current_semver_version.prerelease
539
- )
540
-
541
- def __init__(
542
- self, context: PublishConnectorContext, connector_directory: Directory
543
- ) -> None:
544
- self.context = context
545
- super().__init__(context, connector_directory, self.promoted_version)
546
-
547
- async def _run(self) -> StepResult:
548
- if not self.current_version_is_rc:
549
- return StepResult(
550
- step=self,
551
- status=StepStatus.SKIPPED,
552
- stdout="The connector version has no rc suffix.",
553
- )
554
- return await super()._run()
555
-
556
-
557
- class DisableProgressiveRollout(StepModifyingFiles):
558
- context: PublishConnectorContext
559
- title = "Disable progressive rollout in metadata file"
560
-
561
- async def _run(self) -> StepResult:
562
- raw_metadata = await dagger_read_file(
563
- await self.context.get_connector_dir(include=[METADATA_FILE_NAME]),
564
- METADATA_FILE_NAME,
565
- )
566
- current_metadata = yaml.safe_load(raw_metadata)
567
- enable_progressive_rollout = (
568
- current_metadata.get("data", {})
569
- .get("releases", {})
570
- .get("rolloutConfiguration", {})
571
- .get("enableProgressiveRollout", False)
572
- )
573
- if not enable_progressive_rollout:
574
- return StepResult(
575
- step=self,
576
- status=StepStatus.SKIPPED,
577
- stdout="Progressive rollout is already disabled.",
578
- )
579
- # We do an in-place replacement instead of serializing back to yaml to preserve comments and formatting.
580
- new_raw_metadata = raw_metadata.replace(
581
- "enableProgressiveRollout: true", "enableProgressiveRollout: false"
582
- )
583
- self.modified_directory = dagger_write_file(
584
- self.modified_directory, METADATA_FILE_NAME, new_raw_metadata
585
- )
586
- self.modified_files.append(METADATA_FILE_NAME)
587
- return StepResult(
588
- step=self,
589
- status=StepStatus.SUCCESS,
590
- stdout="Set enableProgressiveRollout to false in connector metadata.",
591
- output=self.modified_directory,
592
- )
593
-
594
-
595
- # Helpers
596
- def create_connector_report(
597
- results: List[StepResult], context: PublishConnectorContext
598
- ) -> ConnectorReport:
599
- """Generate a connector report from results and assign it to the context.
600
-
601
- Args:
602
- results (List[StepResult]): List of step results.
603
- context (PublishConnectorContext): The connector context to assign the report to.
604
-
605
- Returns:
606
- ConnectorReport: The connector report.
607
- """
608
- report = ConnectorReport(context, results, name="PUBLISH RESULTS")
609
- context.report = report
610
- return report
611
-
612
-
613
- # Pipeline
614
- async def run_connector_publish_pipeline(
615
- context: PublishConnectorContext, semaphore: anyio.Semaphore
616
- ) -> ConnectorReport:
617
- """Run a publish pipeline for a single connector.
618
-
619
- 1. Validate the metadata file.
620
- 2. Check if the connector image already exists.
621
- 3. Build the connector, with platform variants.
622
- 4. Push the connector to DockerHub, with platform variants.
623
- 5. Upload its spec to the spec cache bucket.
624
- 6. Upload its metadata file to the metadata service bucket.
625
-
626
- Returns:
627
- ConnectorReport: The reports holding publish results.
628
- """
629
-
630
- assert context.rollout_mode == RolloutMode.PUBLISH, (
631
- "This pipeline can only run in publish mode."
632
- )
633
-
634
- metadata_upload_step = MetadataUpload(
635
- context=context,
636
- metadata_service_gcs_credentials=context.metadata_service_gcs_credentials,
637
- docker_hub_username=context.docker_hub_username,
638
- docker_hub_password=context.docker_hub_password,
639
- metadata_bucket_name=context.metadata_bucket_name,
640
- pre_release=context.pre_release,
641
- pre_release_tag=context.docker_image_tag,
642
- )
643
-
644
- upload_spec_to_cache_step = UploadSpecToCache(context)
645
-
646
- upload_sbom_step = UploadSbom(context)
647
-
648
- async with semaphore:
649
- async with context:
650
- results = []
651
-
652
- # Check if the connector image is already published to the registry.
653
- check_connector_image_results = await CheckConnectorImageDoesNotExist(
654
- context
655
- ).run()
656
- results.append(check_connector_image_results)
657
-
658
- (
659
- python_registry_steps,
660
- terminate_early,
661
- ) = await _run_python_registry_publish_pipeline(context)
662
- results.extend(python_registry_steps)
663
-
664
- if terminate_early:
665
- return create_connector_report(results, context)
666
-
667
- # If the connector image already exists, we don't need to build it, but we still need to upload the metadata file.
668
- # We also need to upload the spec to the spec cache bucket.
669
- # For pre-releases, rebuild all the time.
670
- if (
671
- check_connector_image_results.status is StepStatus.SKIPPED
672
- and not context.pre_release
673
- ):
674
- context.logger.info(
675
- "The connector version is already published. Let's upload metadata.yaml and spec to GCS even if no version bump happened."
676
- )
677
- already_published_connector = context.dagger_client.container().from_(
678
- context.docker_image
679
- )
680
- upload_to_spec_cache_results = await upload_spec_to_cache_step.run(
681
- already_published_connector
682
- )
683
- results.append(upload_to_spec_cache_results)
684
- if upload_to_spec_cache_results.status is not StepStatus.SUCCESS:
685
- return create_connector_report(results, context)
686
-
687
- upload_sbom_results = await upload_sbom_step.run()
688
- results.append(upload_sbom_results)
689
- if upload_sbom_results.status is not StepStatus.SUCCESS:
690
- return create_connector_report(results, context)
691
-
692
- metadata_upload_results = await metadata_upload_step.run()
693
- results.append(metadata_upload_results)
694
-
695
- # Exit early if the connector image already exists
696
- if (
697
- check_connector_image_results.status is not StepStatus.SUCCESS
698
- and not context.pre_release
699
- ):
700
- return create_connector_report(results, context)
701
-
702
- build_connector_results = await steps.run_connector_build(context)
703
- results.append(build_connector_results)
704
-
705
- # Exit early if the connector image failed to build
706
- if build_connector_results.status is not StepStatus.SUCCESS:
707
- return create_connector_report(results, context)
708
-
709
- if context.connector.language in [
710
- ConnectorLanguage.PYTHON,
711
- ConnectorLanguage.LOW_CODE,
712
- ]:
713
- upload_dependencies_step = await UploadDependenciesToMetadataService(
714
- context
715
- ).run(build_connector_results.output)
716
- results.append(upload_dependencies_step)
717
-
718
- built_connector_platform_variants = list(
719
- build_connector_results.output.values()
720
- )
721
- push_connector_image_results = await PushConnectorImageToRegistry(
722
- context
723
- ).run(built_connector_platform_variants)
724
- results.append(push_connector_image_results)
725
-
726
- # Exit early if the connector image failed to push
727
- if push_connector_image_results.status is not StepStatus.SUCCESS:
728
- return create_connector_report(results, context)
729
-
730
- # Make sure the image published is healthy by pulling it and running SPEC on it.
731
- # See https://github.com/airbytehq/airbyte/issues/26085
732
- pull_connector_image_results = await PullConnectorImageFromRegistry(
733
- context
734
- ).run()
735
- results.append(pull_connector_image_results)
736
-
737
- # Exit early if the connector image failed to pull
738
- if pull_connector_image_results.status is not StepStatus.SUCCESS:
739
- return create_connector_report(results, context)
740
-
741
- upload_to_spec_cache_results = await upload_spec_to_cache_step.run(
742
- built_connector_platform_variants[0]
743
- )
744
- results.append(upload_to_spec_cache_results)
745
- if upload_to_spec_cache_results.status is not StepStatus.SUCCESS:
746
- return create_connector_report(results, context)
747
-
748
- upload_sbom_results = await upload_sbom_step.run()
749
- results.append(upload_sbom_results)
750
- if upload_sbom_results.status is not StepStatus.SUCCESS:
751
- return create_connector_report(results, context)
752
-
753
- metadata_upload_results = await metadata_upload_step.run()
754
- results.append(metadata_upload_results)
755
- connector_report = create_connector_report(results, context)
756
- return connector_report
757
-
758
-
759
- async def _run_python_registry_publish_pipeline(
760
- context: PublishConnectorContext,
761
- ) -> Tuple[List[StepResult], bool]:
762
- """
763
- Run the python registry publish pipeline for a single connector.
764
- Return the results of the steps and a boolean indicating whether there was an error and the pipeline should be stopped.
765
- """
766
- results: List[StepResult] = []
767
- # Try to convert the context to a PythonRegistryPublishContext. If it returns None, it means we don't need to publish to a python registry.
768
- python_registry_context = (
769
- await PythonRegistryPublishContext.from_publish_connector_context(context)
770
- )
771
- if not python_registry_context:
772
- return results, False
773
-
774
- if not context.python_registry_token or not context.python_registry_url:
775
- # If the python registry token or url are not set, we can't publish to the python registry - stop the pipeline.
776
- return [
777
- StepResult(
778
- step=PublishToPythonRegistry(python_registry_context),
779
- status=StepStatus.FAILURE,
780
- stderr="Pypi publishing is enabled, but python registry token or url are not set.",
781
- )
782
- ], True
783
-
784
- check_python_registry_package_exists_results = (
785
- await CheckPythonRegistryPackageDoesNotExist(python_registry_context).run()
786
- )
787
- results.append(check_python_registry_package_exists_results)
788
- if check_python_registry_package_exists_results.status is StepStatus.SKIPPED:
789
- context.logger.info(
790
- "The connector version is already published on python registry."
791
- )
792
- elif check_python_registry_package_exists_results.status is StepStatus.SUCCESS:
793
- context.logger.info(
794
- "The connector version is not published on python registry. Let's build and publish it."
795
- )
796
- publish_to_python_registry_results = await PublishToPythonRegistry(
797
- python_registry_context
798
- ).run()
799
- results.append(publish_to_python_registry_results)
800
- if publish_to_python_registry_results.status is StepStatus.FAILURE:
801
- return results, True
802
- elif check_python_registry_package_exists_results.status is StepStatus.FAILURE:
803
- return results, True
804
-
805
- return results, False
806
-
807
-
808
- def get_rollback_pr_creation_arguments(
809
- modified_files: Iterable[Path],
810
- context: PublishConnectorContext,
811
- step_results: Iterable[StepResult],
812
- release_candidate_version: str,
813
- ) -> Tuple[Tuple, Dict]:
814
- return (
815
- (modified_files,),
816
- {
817
- "branch_id": f"{context.connector.technical_name}/rollback-{release_candidate_version}",
818
- "commit_message": "[auto-publish] " # << We can skip Vercel builds if this is in the commit message
819
- + "; ".join(
820
- step_result.step.title
821
- for step_result in step_results
822
- if step_result.success
823
- ),
824
- "pr_title": f"🐙 {context.connector.technical_name}: Stop progressive rollout for {release_candidate_version}",
825
- "pr_body": f"The release candidate version {release_candidate_version} has been deemed unstable. This PR stops its progressive rollout. This PR will be automatically merged as part of the `auto-merge` workflow. This workflow runs every 2 hours.",
826
- },
827
- )
828
-
829
-
830
- async def run_connector_rollback_pipeline(
831
- context: PublishConnectorContext, semaphore: anyio.Semaphore
832
- ) -> ConnectorReport:
833
- """Run a rollback pipeline for a single connector.
834
-
835
- 1. Disable progressive rollout in metadata file.
836
- 2. Open a PR with the updated metadata, set the auto-merge label.
837
-
838
- Returns:
839
- ConnectorReport: The reports holding promote results.
840
- """
841
-
842
- results = []
843
- current_version = context.connector.version
844
- all_modified_files = set()
845
- async with semaphore, context:
846
- assert context.rollout_mode == RolloutMode.ROLLBACK, (
847
- "This pipeline can only run in rollback mode."
848
- )
849
- original_connector_directory = await context.get_connector_dir()
850
-
851
- # Disable progressive rollout in metadata file
852
- reset_release_candidate = DisableProgressiveRollout(
853
- context, original_connector_directory
854
- )
855
- reset_release_candidate_results = await reset_release_candidate.run()
856
- results.append(reset_release_candidate_results)
857
- if reset_release_candidate_results.success:
858
- all_modified_files.update(
859
- await reset_release_candidate.export_modified_files(
860
- context.connector.code_directory
861
- )
862
- )
863
-
864
- if not all([result.success for result in results]):
865
- context.logger.error("The metadata update failed. Skipping PR creation.")
866
- connector_report = create_connector_report(results, context)
867
- return connector_report
868
-
869
- # Open PR when all previous steps are successful
870
- initial_pr_creation = CreateOrUpdatePullRequest(
871
- context,
872
- # We will merge even if the CI checks fail, due to this label:
873
- labels=[AUTO_MERGE_BYPASS_CI_CHECKS_LABEL, "rollback-rc"],
874
- # Let GitHub auto-merge this if all checks pass before the next run
875
- # of our auto-merge workflow:
876
- github_auto_merge=True,
877
- # Don't skip CI, as it prevents the PR from auto-merging naturally:
878
- skip_ci=False,
879
- )
880
- pr_creation_args, pr_creation_kwargs = get_rollback_pr_creation_arguments(
881
- all_modified_files, context, results, current_version
882
- )
883
- initial_pr_creation_result = await initial_pr_creation.run(
884
- *pr_creation_args, **pr_creation_kwargs
885
- )
886
- results.append(initial_pr_creation_result)
887
-
888
- connector_report = create_connector_report(results, context)
889
- return connector_report
890
-
891
-
892
- def get_promotion_pr_creation_arguments(
893
- modified_files: Iterable[Path],
894
- context: PublishConnectorContext,
895
- step_results: Iterable[StepResult],
896
- release_candidate_version: str,
897
- promoted_version: str,
898
- ) -> Tuple[Tuple, Dict]:
899
- return (
900
- (modified_files,),
901
- {
902
- "branch_id": f"{context.connector.technical_name}/{promoted_version}",
903
- "commit_message": "[auto-publish] " # << We can skip Vercel builds if this is in the commit message
904
- + "; ".join(
905
- step_result.step.title
906
- for step_result in step_results
907
- if step_result.success
908
- ),
909
- "pr_title": f"🐙 {context.connector.technical_name}: release {promoted_version}",
910
- "pr_body": f"The release candidate version {release_candidate_version} has been deemed stable and is now ready to be promoted to an official release ({promoted_version}). This PR will be automatically merged as part of the `auto-merge` workflow. This workflow runs every 2 hours.",
911
- },
912
- )
913
-
914
-
915
- async def run_connector_promote_pipeline(
916
- context: PublishConnectorContext, semaphore: anyio.Semaphore
917
- ) -> ConnectorReport:
918
- """Run a promote pipeline for a single connector.
919
-
920
- 1. Update connector metadata to:
921
- * Remove the RC suffix from the version.
922
- * Disable progressive rollout.
923
- 2. Open a PR with the updated metadata.
924
- 3. Add a changelog entry to the documentation.
925
- 4. Update the PR with the updated changelog, set the auto-merge label.
926
-
927
- Returns:
928
- ConnectorReport: The reports holding promote results.
929
- """
930
-
931
- results = []
932
- current_version = context.connector.version
933
- all_modified_files = set()
934
- async with semaphore:
935
- async with context:
936
- assert context.rollout_mode == RolloutMode.PROMOTE, (
937
- "This pipeline can only run in promote mode."
938
- )
939
- original_connector_directory = await context.get_connector_dir()
940
- # Remove RC suffix
941
- set_promoted_version = SetPromotedVersion(
942
- context, original_connector_directory
943
- )
944
- set_promoted_version_results = await set_promoted_version.run()
945
- results.append(set_promoted_version_results)
946
- if set_promoted_version_results.success:
947
- all_modified_files.update(
948
- await set_promoted_version.export_modified_files(
949
- context.connector.code_directory
950
- )
951
- )
952
-
953
- # Disable progressive rollout in metadata file
954
- reset_release_candidate = DisableProgressiveRollout(
955
- context, set_promoted_version_results.output
956
- )
957
- reset_release_candidate_results = await reset_release_candidate.run()
958
- results.append(reset_release_candidate_results)
959
- if reset_release_candidate_results.success:
960
- all_modified_files.update(
961
- await reset_release_candidate.export_modified_files(
962
- context.connector.code_directory
963
- )
964
- )
965
-
966
- if not all([result.success for result in results]):
967
- context.logger.error(
968
- "The metadata update failed. Skipping PR creation."
969
- )
970
- connector_report = create_connector_report(results, context)
971
- return connector_report
972
-
973
- # Open PR when all previous steps are successful
974
- promoted_version = set_promoted_version.promoted_version
975
- initial_pr_creation = CreateOrUpdatePullRequest(context, skip_ci=False)
976
- pr_creation_args, pr_creation_kwargs = get_promotion_pr_creation_arguments(
977
- all_modified_files, context, results, current_version, promoted_version
978
- )
979
- initial_pr_creation_result = await initial_pr_creation.run(
980
- *pr_creation_args, **pr_creation_kwargs
981
- )
982
- results.append(initial_pr_creation_result)
983
- # Update changelog and update PR
984
- if initial_pr_creation_result.success:
985
- created_pr = initial_pr_creation_result.output
986
- documentation_directory = await context.get_repo_dir(
987
- include=[
988
- str(context.connector.local_connector_documentation_directory)
989
- ]
990
- ).directory(
991
- str(context.connector.local_connector_documentation_directory)
992
- )
993
- add_changelog_entry = AddChangelogEntry(
994
- context,
995
- documentation_directory,
996
- promoted_version,
997
- f"Promoting release candidate {current_version} to a main version.",
998
- created_pr.number,
999
- )
1000
- add_changelog_entry_result = await add_changelog_entry.run()
1001
- results.append(add_changelog_entry_result)
1002
- if add_changelog_entry_result.success:
1003
- all_modified_files.update(
1004
- await add_changelog_entry.export_modified_files(
1005
- context.connector.local_connector_documentation_directory
1006
- )
1007
- )
1008
- post_changelog_pr_update = CreateOrUpdatePullRequest(
1009
- context,
1010
- skip_ci=False, # Don't skip CI, as it prevents the PR from auto-merging naturally.
1011
- # We will merge even if the CI checks fail, due to the "bypass-ci-checks" label:
1012
- labels=[AUTO_MERGE_BYPASS_CI_CHECKS_LABEL, "promoted-rc"],
1013
- github_auto_merge=True, # Let GitHub auto-merge this if/when all required checks have passed.
1014
- )
1015
- pr_creation_args, pr_creation_kwargs = (
1016
- get_promotion_pr_creation_arguments(
1017
- all_modified_files,
1018
- context,
1019
- results,
1020
- current_version,
1021
- promoted_version,
1022
- )
1023
- )
1024
- post_changelog_pr_update_result = await post_changelog_pr_update.run(
1025
- *pr_creation_args, **pr_creation_kwargs
1026
- )
1027
- results.append(post_changelog_pr_update_result)
1028
-
1029
- connector_report = create_connector_report(results, context)
1030
- return connector_report
1031
-
1032
-
1033
- def reorder_contexts(
1034
- contexts: List[PublishConnectorContext],
1035
- ) -> List[PublishConnectorContext]:
1036
- """Reorder contexts so that the ones that are for strict-encrypt/secure connectors come first.
1037
- The metadata upload on publish checks if the the connectors referenced in the metadata file are already published to DockerHub.
1038
- Non strict-encrypt variant reference the strict-encrypt variant in their metadata file for cloud.
1039
- So if we publish the non strict-encrypt variant first, the metadata upload will fail if the strict-encrypt variant is not published yet.
1040
- As strict-encrypt variant are often modified in the same PR as the non strict-encrypt variant, we want to publish them first.
1041
- """
1042
-
1043
- def is_secure_variant(context: PublishConnectorContext) -> bool:
1044
- SECURE_VARIANT_KEYS = ["secure", "strict-encrypt"]
1045
- return any(
1046
- key in context.connector.technical_name for key in SECURE_VARIANT_KEYS
1047
- )
1048
-
1049
- return sorted(
1050
- contexts,
1051
- key=lambda context: (
1052
- is_secure_variant(context),
1053
- context.connector.technical_name,
1054
- ),
1055
- reverse=True,
1056
- )