airbyte-internal-ops 0.8.0__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -8,6 +8,8 @@ Commands:
8
8
  airbyte-ops registry connector compute-prerelease-tag - Compute prerelease version tag
9
9
  airbyte-ops registry connector publish-prerelease - Publish connector prerelease
10
10
  airbyte-ops registry connector publish - Publish connector (apply/rollback version override)
11
+ airbyte-ops registry enterprise-stubs sync --bucket prod|dev - Sync connector_stubs.json to GCS
12
+ airbyte-ops registry enterprise-stubs check --bucket prod|dev - Compare local file with GCS
11
13
  airbyte-ops registry image inspect - Inspect Docker image on DockerHub
12
14
  """
13
15
 
@@ -16,13 +18,14 @@ from __future__ import annotations
16
18
  import contextlib
17
19
  import sys
18
20
  from pathlib import Path
19
- from typing import Annotated
21
+ from typing import Annotated, Literal
20
22
 
21
23
  import yaml
22
24
  from cyclopts import App, Parameter
23
25
 
24
26
  from airbyte_ops_mcp.cli._base import app
25
27
  from airbyte_ops_mcp.cli._shared import (
28
+ error_console,
26
29
  exit_with_error,
27
30
  print_error,
28
31
  print_json,
@@ -42,6 +45,15 @@ from airbyte_ops_mcp.registry import (
42
45
  PublishAction,
43
46
  publish_connector,
44
47
  )
48
+ from airbyte_ops_mcp.registry._gcs_util import get_bucket_name
49
+ from airbyte_ops_mcp.registry.connector_stubs import (
50
+ CONNECTOR_STUBS_FILE,
51
+ CONNECTOR_STUBS_PATH,
52
+ ConnectorStub,
53
+ load_local_stubs,
54
+ read_connector_stubs,
55
+ write_connector_stubs,
56
+ )
45
57
 
46
58
  # Create the registry sub-app
47
59
  registry_app = App(
@@ -57,11 +69,39 @@ registry_app.command(connector_app)
57
69
  image_app = App(name="image", help="Docker image operations.")
58
70
  registry_app.command(image_app)
59
71
 
72
+ # Create the enterprise-stubs sub-app under registry (for whole-file GCS operations)
73
+ enterprise_stubs_app = App(
74
+ name="enterprise-stubs",
75
+ help="Enterprise connector stubs GCS operations (whole-file sync).",
76
+ )
77
+ registry_app.command(enterprise_stubs_app)
78
+
60
79
 
61
80
  AIRBYTE_REPO_OWNER = "airbytehq"
81
+ AIRBYTE_ENTERPRISE_REPO_NAME = "airbyte-enterprise"
62
82
  AIRBYTE_REPO_NAME = "airbyte"
63
83
  CONNECTOR_PATH_PREFIX = "airbyte-integrations/connectors"
64
84
 
85
+ # Type alias for bucket argument
86
+ BucketArg = Literal["dev", "prod"]
87
+
88
+
89
+ def _validate_bucket_arg(bucket: str) -> BucketArg:
90
+ """Validate and return the bucket argument.
91
+
92
+ Args:
93
+ bucket: The bucket argument from CLI.
94
+
95
+ Returns:
96
+ The validated bucket value.
97
+
98
+ Raises:
99
+ SystemExit: If the bucket value is invalid.
100
+ """
101
+ if bucket not in ("prod", "dev"):
102
+ exit_with_error(f"Invalid bucket '{bucket}'. Must be 'prod' or 'dev'.")
103
+ return bucket # type: ignore[return-value]
104
+
65
105
 
66
106
  def _get_connector_version_from_github(
67
107
  connector_name: str,
@@ -295,3 +335,177 @@ def inspect_image(
295
335
  else:
296
336
  print_error(f"Image {result.full_name} not found.")
297
337
  print_json(result.model_dump())
338
+
339
+
340
+ @enterprise_stubs_app.command(name="check")
341
+ def enterprise_stubs_check(
342
+ bucket: Annotated[
343
+ BucketArg,
344
+ Parameter(
345
+ help="Target GCS bucket: 'prod' or 'dev'.",
346
+ ),
347
+ ],
348
+ repo_root: Annotated[
349
+ Path,
350
+ Parameter(
351
+ help="Path to the airbyte-enterprise repository root. Defaults to current directory."
352
+ ),
353
+ ] = Path.cwd(),
354
+ ) -> None:
355
+ """Compare local connector_stubs.json with the version in GCS.
356
+
357
+ This command reads the entire local connector_stubs.json file and compares it
358
+ with the version currently published in GCS.
359
+
360
+ Exit codes:
361
+ 0: Local file matches GCS (check passed)
362
+ 1: Differences found (check failed)
363
+
364
+ Output:
365
+ STDOUT: JSON representation of the comparison result
366
+ STDERR: Informational messages and comparison details
367
+
368
+ Example:
369
+ airbyte-ops registry enterprise-stubs check --bucket prod --repo-root /path/to/airbyte-enterprise
370
+ airbyte-ops registry enterprise-stubs check --bucket dev
371
+ """
372
+ bucket = _validate_bucket_arg(bucket)
373
+
374
+ # Load local stubs
375
+ try:
376
+ local_stubs = load_local_stubs(repo_root)
377
+ except FileNotFoundError as e:
378
+ exit_with_error(str(e))
379
+ except ValueError as e:
380
+ exit_with_error(str(e))
381
+
382
+ # Load published stubs from GCS
383
+ bucket_name = get_bucket_name(bucket)
384
+ published_stubs = read_connector_stubs(bucket_name)
385
+
386
+ error_console.print(
387
+ f"Comparing local {CONNECTOR_STUBS_FILE} with {bucket_name}/{CONNECTOR_STUBS_PATH}"
388
+ )
389
+
390
+ # Build lookup dicts by stub ID (filter out stubs without IDs)
391
+ local_by_id = {stub["id"]: stub for stub in local_stubs if stub.get("id")}
392
+ published_by_id = {stub["id"]: stub for stub in published_stubs if stub.get("id")}
393
+
394
+ all_ids = set(local_by_id.keys()) | set(published_by_id.keys())
395
+ differences: list[dict[str, str]] = []
396
+
397
+ for stub_id in sorted(all_ids):
398
+ local_stub = local_by_id.get(stub_id)
399
+ published_stub = published_by_id.get(stub_id)
400
+
401
+ if local_stub is None:
402
+ differences.append({"id": stub_id, "status": "only_in_gcs"})
403
+ elif published_stub is None:
404
+ differences.append({"id": stub_id, "status": "only_in_local"})
405
+ elif local_stub != published_stub:
406
+ differences.append({"id": stub_id, "status": "modified"})
407
+
408
+ result = {
409
+ "local_count": len(local_stubs),
410
+ "published_count": len(published_stubs),
411
+ "in_sync": len(differences) == 0,
412
+ "differences": differences,
413
+ }
414
+
415
+ if differences:
416
+ error_console.print(
417
+ f"[yellow]Warning:[/yellow] {len(differences)} difference(s) found:"
418
+ )
419
+ for diff in differences:
420
+ error_console.print(f" {diff['id']}: {diff['status']}")
421
+ print_json(result)
422
+ sys.exit(1)
423
+
424
+ error_console.print(
425
+ f"[green]Local file is in sync with GCS ({len(local_stubs)} stubs)[/green]"
426
+ )
427
+ print_json(result)
428
+
429
+
430
+ @enterprise_stubs_app.command(name="sync")
431
+ def enterprise_stubs_sync(
432
+ bucket: Annotated[
433
+ BucketArg,
434
+ Parameter(
435
+ help="Target GCS bucket: 'prod' or 'dev'.",
436
+ ),
437
+ ],
438
+ repo_root: Annotated[
439
+ Path,
440
+ Parameter(
441
+ help="Path to the airbyte-enterprise repository root. Defaults to current directory."
442
+ ),
443
+ ] = Path.cwd(),
444
+ dry_run: Annotated[
445
+ bool,
446
+ Parameter(help="Show what would be uploaded without making changes."),
447
+ ] = False,
448
+ ) -> None:
449
+ """Sync local connector_stubs.json to GCS.
450
+
451
+ This command uploads the entire local connector_stubs.json file to GCS,
452
+ replacing the existing file. Use this after merging changes to master
453
+ in the airbyte-enterprise repository.
454
+
455
+ Exit codes:
456
+ 0: Sync successful (or dry-run completed)
457
+ 1: Error (file not found, validation failed, etc.)
458
+
459
+ Output:
460
+ STDOUT: JSON representation of the sync result
461
+ STDERR: Informational messages and status updates
462
+
463
+ Example:
464
+ airbyte-ops registry enterprise-stubs sync --bucket prod --repo-root /path/to/airbyte-enterprise
465
+ airbyte-ops registry enterprise-stubs sync --bucket dev
466
+ airbyte-ops registry enterprise-stubs sync --bucket dev --dry-run
467
+ """
468
+ bucket = _validate_bucket_arg(bucket)
469
+
470
+ # Load local stubs
471
+ try:
472
+ local_stubs = load_local_stubs(repo_root)
473
+ except FileNotFoundError as e:
474
+ exit_with_error(str(e))
475
+ except ValueError as e:
476
+ exit_with_error(str(e))
477
+
478
+ # Validate all stubs
479
+ for stub in local_stubs:
480
+ ConnectorStub(**stub)
481
+
482
+ bucket_name = get_bucket_name(bucket)
483
+
484
+ if dry_run:
485
+ error_console.print(
486
+ f"[DRY RUN] Would upload {len(local_stubs)} stubs to "
487
+ f"{bucket_name}/{CONNECTOR_STUBS_PATH}"
488
+ )
489
+ result = {
490
+ "dry_run": True,
491
+ "stub_count": len(local_stubs),
492
+ "bucket": bucket_name,
493
+ "path": CONNECTOR_STUBS_PATH,
494
+ }
495
+ print_json(result)
496
+ return
497
+
498
+ # Write to GCS (replaces entire file)
499
+ write_connector_stubs(bucket_name, local_stubs)
500
+
501
+ error_console.print(
502
+ f"[green]Synced {len(local_stubs)} stubs to {bucket_name}/{CONNECTOR_STUBS_PATH}[/green]"
503
+ )
504
+ result = {
505
+ "dry_run": False,
506
+ "stub_count": len(local_stubs),
507
+ "bucket": bucket_name,
508
+ "path": CONNECTOR_STUBS_PATH,
509
+ "stub_ids": [stub.get("id") for stub in local_stubs],
510
+ }
511
+ print_json(result)
@@ -9,6 +9,8 @@ modules but are not MCP-specific.
9
9
 
10
10
  from __future__ import annotations
11
11
 
12
+ import datetime
13
+ import functools
12
14
  import os
13
15
  import re
14
16
  import shutil
@@ -17,6 +19,8 @@ from dataclasses import dataclass
17
19
  from urllib.parse import urlparse
18
20
 
19
21
  import requests
22
+ from github import Auth, Github, GithubException
23
+ from github.Repository import Repository
20
24
 
21
25
  GITHUB_API_BASE = "https://api.github.com"
22
26
 
@@ -130,6 +134,58 @@ def get_pr_head_ref(
130
134
  )
131
135
 
132
136
 
137
+ @functools.lru_cache(maxsize=32)
138
+ def _get_github_repo(owner: str, repo: str, token: str) -> Repository:
139
+ """Get a cached GitHub repository object.
140
+
141
+ This function caches repository objects to avoid redundant API calls
142
+ when fetching multiple PRs from the same repository. Uses lazy=True
143
+ to avoid making an API call just to create the repo object.
144
+ """
145
+ auth = Auth.Token(token)
146
+ gh = Github(auth=auth)
147
+ return gh.get_repo(f"{owner}/{repo}", lazy=True)
148
+
149
+
150
+ def get_pr_merge_date(
151
+ owner: str,
152
+ repo: str,
153
+ pr_number: int,
154
+ token: str | None = None,
155
+ ) -> datetime.date | None:
156
+ """Get the merge date for a PR.
157
+
158
+ Args:
159
+ owner: Repository owner (e.g., "airbytehq")
160
+ repo: Repository name (e.g., "airbyte")
161
+ pr_number: Pull request number
162
+ token: GitHub API token. If None, will be resolved from environment.
163
+
164
+ Returns:
165
+ The date the PR was merged, or None if not merged.
166
+
167
+ Raises:
168
+ GitHubAPIError: If the API request fails.
169
+ """
170
+ if token is None:
171
+ token = resolve_github_token()
172
+
173
+ try:
174
+ gh_repo = _get_github_repo(owner, repo, token)
175
+ pr = gh_repo.get_pull(pr_number)
176
+ except GithubException as e:
177
+ if e.status == 404:
178
+ raise GitHubAPIError(f"PR {owner}/{repo}#{pr_number} not found") from e
179
+ raise GitHubAPIError(
180
+ f"Failed to fetch PR {owner}/{repo}#{pr_number}: {e.status} {e.data}"
181
+ ) from e
182
+
183
+ if pr.merged_at is None:
184
+ return None
185
+
186
+ return pr.merged_at.date()
187
+
188
+
133
189
  def get_file_contents_at_ref(
134
190
  owner: str,
135
191
  repo: str,
@@ -0,0 +1,100 @@
1
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
2
+ """Shared GCS utilities for registry operations.
3
+
4
+ This module provides common GCS helper functions used across registry
5
+ operations, including connector stubs and metadata management.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ from typing import Literal
11
+
12
+ from google.cloud import storage
13
+
14
+ from airbyte_ops_mcp.gcp_auth import get_gcp_credentials
15
+
16
+ # GCS bucket names for the metadata service
17
+ PROD_METADATA_SERVICE_BUCKET_NAME = "prod-airbyte-cloud-connector-metadata-service"
18
+ DEV_METADATA_SERVICE_BUCKET_NAME = "dev-airbyte-cloud-connector-metadata-service-2"
19
+
20
+ # Type alias for environment ID
21
+ EnvId = Literal["dev", "prod"]
22
+
23
+
24
+ def get_gcs_client() -> storage.Client:
25
+ """Get a GCS storage client using centralized credentials.
26
+
27
+ Uses the centralized GCP authentication from gcp_auth module,
28
+ which supports both GCP_PROD_DB_ACCESS_CREDENTIALS env var
29
+ and standard ADC discovery.
30
+
31
+ Returns:
32
+ A configured GCS storage client.
33
+ """
34
+ credentials = get_gcp_credentials()
35
+ return storage.Client(credentials=credentials)
36
+
37
+
38
+ def get_bucket_name(env: EnvId) -> str:
39
+ """Get the metadata service bucket name.
40
+
41
+ Args:
42
+ env: The environment ID ('dev' or 'prod').
43
+
44
+ Returns:
45
+ The bucket name to use for GCS operations.
46
+ """
47
+ if env == "prod":
48
+ return PROD_METADATA_SERVICE_BUCKET_NAME
49
+ return DEV_METADATA_SERVICE_BUCKET_NAME
50
+
51
+
52
+ def get_gcs_file_text(
53
+ bucket_name: str,
54
+ path: str,
55
+ client: storage.Client | None = None,
56
+ ) -> str | None:
57
+ """Read a text file from GCS.
58
+
59
+ Args:
60
+ bucket_name: The GCS bucket name.
61
+ path: The path to the file within the bucket.
62
+ client: Optional GCS client. If not provided, creates one using get_gcs_client().
63
+
64
+ Returns:
65
+ The file contents as a string, or None if the file doesn't exist.
66
+ """
67
+ if client is None:
68
+ client = get_gcs_client()
69
+
70
+ bucket = client.bucket(bucket_name)
71
+ blob = bucket.blob(path)
72
+
73
+ if not blob.exists():
74
+ return None
75
+
76
+ return blob.download_as_string().decode("utf-8")
77
+
78
+
79
+ def upload_gcs_file_text(
80
+ bucket_name: str,
81
+ path: str,
82
+ content: str,
83
+ content_type: str = "text/plain",
84
+ client: storage.Client | None = None,
85
+ ) -> None:
86
+ """Upload a text file to GCS.
87
+
88
+ Args:
89
+ bucket_name: The GCS bucket name.
90
+ path: The path to the file within the bucket.
91
+ content: The text content to upload.
92
+ content_type: The MIME type of the content. Defaults to "text/plain".
93
+ client: Optional GCS client. If not provided, creates one using get_gcs_client().
94
+ """
95
+ if client is None:
96
+ client = get_gcs_client()
97
+
98
+ bucket = client.bucket(bucket_name)
99
+ blob = bucket.blob(path)
100
+ blob.upload_from_string(content, content_type=content_type)
@@ -0,0 +1,186 @@
1
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
2
+ """Core logic for connector stub operations.
3
+
4
+ This module provides the core functionality for managing enterprise connector
5
+ stubs in the Airbyte Cloud catalog. The stubs are stored in GCS and appear
6
+ in the catalog as placeholder entries that direct users to a sales funnel.
7
+
8
+ The stubs are stored at:
9
+ gs://prod-airbyte-cloud-connector-metadata-service/resources/connector_stubs/v1/connector_stubs.json
10
+ """
11
+
12
+ from __future__ import annotations
13
+
14
+ import json
15
+ from pathlib import Path
16
+
17
+ from pydantic import BaseModel, Field
18
+
19
+ from airbyte_ops_mcp.registry._gcs_util import (
20
+ get_bucket_name,
21
+ get_gcs_file_text,
22
+ upload_gcs_file_text,
23
+ )
24
+
25
+ # Path to connector stubs file in GCS
26
+ CONNECTOR_STUBS_PATH = "resources/connector_stubs/v1/connector_stubs.json"
27
+
28
+ # Local file name for connector stubs in airbyte-enterprise repo
29
+ CONNECTOR_STUBS_FILE = "connector_stubs.json"
30
+
31
+
32
+ class ConnectorStub(BaseModel):
33
+ """A connector stub entry for the enterprise connector catalog.
34
+
35
+ Stubs are placeholder entries that appear in the Airbyte Cloud catalog
36
+ for enterprise connectors. When users click on them, they are directed
37
+ to a sales funnel rather than being able to configure the connector directly.
38
+ """
39
+
40
+ model_config = {"populate_by_name": True}
41
+
42
+ id: str = Field(
43
+ description="Unique identifier for the stub (e.g., 'source-oracle-enterprise')"
44
+ )
45
+ name: str = Field(description="Display name of the connector (e.g., 'Oracle')")
46
+ url: str = Field(
47
+ description="URL to the connector's documentation page (must be publicly accessible)"
48
+ )
49
+ icon: str = Field(
50
+ description="URL to the connector's icon (typically stored in the same GCS bucket)"
51
+ )
52
+ definition_id: str | None = Field(
53
+ default=None,
54
+ description="UUID of the connector definition (if it exists in the registry)",
55
+ alias="definitionId",
56
+ )
57
+ label: str | None = Field(
58
+ default=None, description="Label for the connector (typically 'enterprise')"
59
+ )
60
+ type: str | None = Field(
61
+ default=None,
62
+ description="Type of connector (e.g., 'enterprise_source', 'enterprise_destination')",
63
+ )
64
+ codename: str | None = Field(
65
+ default=None, description="Internal codename for the connector (optional)"
66
+ )
67
+
68
+
69
+ def read_connector_stubs(bucket_name: str) -> list[dict]:
70
+ """Read connector stubs from GCS.
71
+
72
+ Args:
73
+ bucket_name: The GCS bucket name.
74
+
75
+ Returns:
76
+ List of connector stub dictionaries.
77
+
78
+ Raises:
79
+ ValueError: If the file exists but contains invalid data.
80
+ """
81
+ content = get_gcs_file_text(bucket_name, CONNECTOR_STUBS_PATH)
82
+
83
+ if content is None:
84
+ return []
85
+
86
+ stubs = json.loads(content)
87
+
88
+ if not isinstance(stubs, list):
89
+ raise ValueError(
90
+ f"Expected connector_stubs.json to contain a list, got {type(stubs).__name__}"
91
+ )
92
+
93
+ return stubs
94
+
95
+
96
+ def write_connector_stubs(bucket_name: str, stubs: list[dict]) -> None:
97
+ """Write connector stubs to GCS.
98
+
99
+ Args:
100
+ bucket_name: The GCS bucket name.
101
+ stubs: List of connector stub dictionaries to write.
102
+ """
103
+ content = json.dumps(stubs, indent=2)
104
+ upload_gcs_file_text(
105
+ bucket_name, CONNECTOR_STUBS_PATH, content, content_type="application/json"
106
+ )
107
+
108
+
109
+ def find_stub_by_connector(stubs: list[dict], connector: str) -> dict | None:
110
+ """Find a stub by connector name or ID.
111
+
112
+ Matches by:
113
+ - Exact ID match (e.g., 'source-oracle-enterprise')
114
+ - ID with '-enterprise' suffix (e.g., 'source-oracle' matches 'source-oracle-enterprise')
115
+ - Name match (case-insensitive, spaces converted to hyphens)
116
+
117
+ Args:
118
+ stubs: List of connector stub dictionaries.
119
+ connector: Connector name or stub ID to find.
120
+
121
+ Returns:
122
+ The matching stub dictionary, or None if not found.
123
+ """
124
+ for stub in stubs:
125
+ stub_id = stub.get("id", "")
126
+ # Match by exact ID or by connector name pattern
127
+ if stub_id == connector or stub_id == f"{connector}-enterprise":
128
+ return stub
129
+ # Also check if the connector name matches the stub name
130
+ if stub.get("name", "").lower().replace(" ", "-") == connector.lower():
131
+ return stub
132
+ return None
133
+
134
+
135
+ def load_local_stubs(repo_root: Path) -> list[dict]:
136
+ """Load connector stubs from local repository.
137
+
138
+ Args:
139
+ repo_root: Path to the airbyte-enterprise repository root.
140
+
141
+ Returns:
142
+ List of connector stub dictionaries.
143
+
144
+ Raises:
145
+ FileNotFoundError: If the connector stubs file doesn't exist.
146
+ ValueError: If the file contains invalid data.
147
+ """
148
+ stubs_file = repo_root / CONNECTOR_STUBS_FILE
149
+ if not stubs_file.exists():
150
+ raise FileNotFoundError(f"Connector stubs file not found: {stubs_file}")
151
+
152
+ content = stubs_file.read_text()
153
+ stubs = json.loads(content)
154
+
155
+ if not isinstance(stubs, list):
156
+ raise ValueError(
157
+ f"Expected {CONNECTOR_STUBS_FILE} to contain a list, got {type(stubs).__name__}"
158
+ )
159
+
160
+ return stubs
161
+
162
+
163
+ def save_local_stubs(repo_root: Path, stubs: list[dict]) -> None:
164
+ """Save connector stubs to local repository.
165
+
166
+ Args:
167
+ repo_root: Path to the airbyte-enterprise repository root.
168
+ stubs: List of connector stub dictionaries to save.
169
+ """
170
+ stubs_file = repo_root / CONNECTOR_STUBS_FILE
171
+ content = json.dumps(stubs, indent=2) + "\n"
172
+ stubs_file.write_text(content)
173
+
174
+
175
+ # Re-export get_bucket_name for convenience
176
+ __all__ = [
177
+ "CONNECTOR_STUBS_FILE",
178
+ "CONNECTOR_STUBS_PATH",
179
+ "ConnectorStub",
180
+ "find_stub_by_connector",
181
+ "get_bucket_name",
182
+ "load_local_stubs",
183
+ "read_connector_stubs",
184
+ "save_local_stubs",
185
+ "write_connector_stubs",
186
+ ]
@@ -15,6 +15,10 @@ import yaml
15
15
  from google.cloud import storage
16
16
  from google.oauth2 import service_account
17
17
 
18
+ from airbyte_ops_mcp.registry._gcs_util import (
19
+ DEV_METADATA_SERVICE_BUCKET_NAME,
20
+ PROD_METADATA_SERVICE_BUCKET_NAME,
21
+ )
18
22
  from airbyte_ops_mcp.registry.models import (
19
23
  ConnectorMetadata,
20
24
  ConnectorPublishResult,
@@ -27,10 +31,6 @@ METADATA_FOLDER = "metadata"
27
31
  LATEST_GCS_FOLDER_NAME = "latest"
28
32
  RELEASE_CANDIDATE_GCS_FOLDER_NAME = "release_candidate"
29
33
 
30
- # Bucket names for metadata service
31
- PROD_METADATA_SERVICE_BUCKET_NAME = "prod-airbyte-cloud-connector-metadata-service"
32
- DEV_METADATA_SERVICE_BUCKET_NAME = "dev-airbyte-cloud-connector-metadata-service-2"
33
-
34
34
  # Default to dev bucket for safety - use --prod flag to target production
35
35
  DEFAULT_METADATA_SERVICE_BUCKET_NAME = DEV_METADATA_SERVICE_BUCKET_NAME
36
36