airbyte-internal-ops 0.1.3__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. {airbyte_internal_ops-0.1.3.dist-info → airbyte_internal_ops-0.1.4.dist-info}/METADATA +8 -5
  2. {airbyte_internal_ops-0.1.3.dist-info → airbyte_internal_ops-0.1.4.dist-info}/RECORD +31 -11
  3. airbyte_ops_mcp/_legacy/airbyte_ci/connector_pipelines/airbyte_ci/connectors/test/steps/common.py +1 -1
  4. airbyte_ops_mcp/cli/cloud.py +309 -38
  5. airbyte_ops_mcp/cloud_admin/connection_config.py +131 -0
  6. airbyte_ops_mcp/live_tests/__init__.py +16 -0
  7. airbyte_ops_mcp/live_tests/_connection_retriever/__init__.py +35 -0
  8. airbyte_ops_mcp/live_tests/_connection_retriever/audit_logging.py +88 -0
  9. airbyte_ops_mcp/live_tests/_connection_retriever/consts.py +33 -0
  10. airbyte_ops_mcp/live_tests/_connection_retriever/db_access.py +82 -0
  11. airbyte_ops_mcp/live_tests/_connection_retriever/retrieval.py +391 -0
  12. airbyte_ops_mcp/live_tests/_connection_retriever/secrets_resolution.py +130 -0
  13. airbyte_ops_mcp/live_tests/config.py +190 -0
  14. airbyte_ops_mcp/live_tests/connection_fetcher.py +159 -2
  15. airbyte_ops_mcp/live_tests/connection_secret_retriever.py +173 -0
  16. airbyte_ops_mcp/live_tests/evaluation_modes.py +45 -0
  17. airbyte_ops_mcp/live_tests/http_metrics.py +81 -0
  18. airbyte_ops_mcp/live_tests/message_cache/__init__.py +15 -0
  19. airbyte_ops_mcp/live_tests/message_cache/duckdb_cache.py +415 -0
  20. airbyte_ops_mcp/live_tests/obfuscation.py +126 -0
  21. airbyte_ops_mcp/live_tests/regression/__init__.py +29 -0
  22. airbyte_ops_mcp/live_tests/regression/comparators.py +466 -0
  23. airbyte_ops_mcp/live_tests/schema_generation.py +154 -0
  24. airbyte_ops_mcp/live_tests/validation/__init__.py +43 -0
  25. airbyte_ops_mcp/live_tests/validation/catalog_validators.py +389 -0
  26. airbyte_ops_mcp/live_tests/validation/record_validators.py +227 -0
  27. airbyte_ops_mcp/mcp/_mcp_utils.py +3 -0
  28. airbyte_ops_mcp/mcp/live_tests.py +500 -0
  29. airbyte_ops_mcp/mcp/server.py +3 -0
  30. {airbyte_internal_ops-0.1.3.dist-info → airbyte_internal_ops-0.1.4.dist-info}/WHEEL +0 -0
  31. {airbyte_internal_ops-0.1.3.dist-info → airbyte_internal_ops-0.1.4.dist-info}/entry_points.txt +0 -0
@@ -9,12 +9,13 @@ from __future__ import annotations
9
9
 
10
10
  import json
11
11
  import os
12
- from dataclasses import dataclass
12
+ from dataclasses import dataclass, field
13
13
  from pathlib import Path
14
14
  from typing import Any
15
15
 
16
16
  import requests
17
17
  from airbyte import constants
18
+ from airbyte.cloud import CloudWorkspace
18
19
  from airbyte.exceptions import PyAirbyteInputError
19
20
 
20
21
 
@@ -29,6 +30,16 @@ class ConnectionData:
29
30
  config: dict[str, Any]
30
31
  catalog: dict[str, Any]
31
32
  stream_names: list[str]
33
+ docker_repository: str | None = None
34
+ docker_image_tag: str | None = None
35
+ state: list[dict[str, Any]] | None = field(default=None)
36
+
37
+ @property
38
+ def connector_image(self) -> str | None:
39
+ """Get the full connector image name with tag."""
40
+ if self.docker_repository and self.docker_image_tag:
41
+ return f"{self.docker_repository}:{self.docker_image_tag}"
42
+ return None
32
43
 
33
44
 
34
45
  def _get_access_token(
@@ -121,6 +132,28 @@ def fetch_connection_data(
121
132
  )
122
133
 
123
134
  source_data = source_response.json()
135
+ source_definition_id = source_data.get("definitionId", "")
136
+
137
+ # Try to get docker repository and image tag from source definition version
138
+ docker_repository = None
139
+ docker_image_tag = None
140
+ if source_definition_id:
141
+ try:
142
+ # Use the Config API to get version info for the source
143
+ config_api_root = constants.CLOUD_CONFIG_API_ROOT
144
+ version_response = requests.post(
145
+ f"{config_api_root}/actor_definition_versions/get_for_source",
146
+ json={"sourceId": source_id},
147
+ headers=headers,
148
+ timeout=30,
149
+ )
150
+ if version_response.status_code == 200:
151
+ version_data = version_response.json()
152
+ docker_repository = version_data.get("dockerRepository")
153
+ docker_image_tag = version_data.get("dockerImageTag")
154
+ except Exception:
155
+ # Non-fatal: we can still proceed without docker info
156
+ pass
124
157
 
125
158
  # Build configured catalog from connection streams
126
159
  streams_config = conn_data.get("configurations", {}).get("streams", [])
@@ -133,10 +166,12 @@ def fetch_connection_data(
133
166
  connection_id=connection_id,
134
167
  source_id=source_id,
135
168
  source_name=source_data.get("name", ""),
136
- source_definition_id=source_data.get("definitionId", ""),
169
+ source_definition_id=source_definition_id,
137
170
  config=source_data.get("configuration", {}),
138
171
  catalog=catalog,
139
172
  stream_names=stream_names,
173
+ docker_repository=docker_repository,
174
+ docker_image_tag=docker_image_tag,
140
175
  )
141
176
 
142
177
 
@@ -214,3 +249,125 @@ def save_connection_data_to_files(
214
249
  catalog_path.write_text(json.dumps(connection_data.catalog, indent=2))
215
250
 
216
251
  return config_path, catalog_path
252
+
253
+
254
+ def fetch_connection_artifacts(
255
+ connection_id: str,
256
+ workspace_id: str | None = None,
257
+ client_id: str | None = None,
258
+ client_secret: str | None = None,
259
+ ) -> tuple[dict[str, Any] | None, list[dict[str, Any]] | None]:
260
+ """Fetch catalog and state artifacts using PyAirbyte's CloudConnection.
261
+
262
+ This uses the Config API endpoints via PyAirbyte to get the actual
263
+ configured catalog (with full schemas) and state artifacts without
264
+ requiring direct database access.
265
+
266
+ Args:
267
+ connection_id: The connection ID to fetch artifacts for.
268
+ workspace_id: Airbyte Cloud workspace ID (defaults to env var).
269
+ client_id: Airbyte Cloud client ID (defaults to env var).
270
+ client_secret: Airbyte Cloud client secret (defaults to env var).
271
+
272
+ Returns:
273
+ Tuple of (catalog, state) where:
274
+ - catalog: The configured catalog dict with full schemas, or None
275
+ - state: List of state dicts for each stream, or None if no state
276
+ """
277
+ workspace_id = workspace_id or os.getenv("AIRBYTE_CLOUD_WORKSPACE_ID")
278
+ client_id = client_id or os.getenv("AIRBYTE_CLOUD_CLIENT_ID")
279
+ client_secret = client_secret or os.getenv("AIRBYTE_CLOUD_CLIENT_SECRET")
280
+
281
+ if not workspace_id:
282
+ raise PyAirbyteInputError(
283
+ message="Missing Airbyte Cloud workspace ID",
284
+ context={"hint": "Set AIRBYTE_CLOUD_WORKSPACE_ID env var"},
285
+ )
286
+
287
+ workspace = CloudWorkspace(
288
+ workspace_id=workspace_id,
289
+ client_id=client_id,
290
+ client_secret=client_secret,
291
+ )
292
+ connection = workspace.get_connection(connection_id)
293
+
294
+ catalog = connection.get_catalog_artifact()
295
+ state = connection.get_state_artifacts()
296
+
297
+ return catalog, state
298
+
299
+
300
+ def enrich_connection_data_with_artifacts(
301
+ connection_data: ConnectionData,
302
+ workspace_id: str | None = None,
303
+ client_id: str | None = None,
304
+ client_secret: str | None = None,
305
+ ) -> ConnectionData:
306
+ """Enrich ConnectionData with full catalog and state from PyAirbyte.
307
+
308
+ This replaces the minimal catalog (with empty schemas) with the actual
309
+ configured catalog from the Config API, and adds state artifacts.
310
+
311
+ Args:
312
+ connection_data: The connection data to enrich.
313
+ workspace_id: Airbyte Cloud workspace ID (defaults to env var).
314
+ client_id: Airbyte Cloud client ID (defaults to env var).
315
+ client_secret: Airbyte Cloud client secret (defaults to env var).
316
+
317
+ Returns:
318
+ ConnectionData with enriched catalog and state.
319
+ """
320
+ catalog, state = fetch_connection_artifacts(
321
+ connection_id=connection_data.connection_id,
322
+ workspace_id=workspace_id,
323
+ client_id=client_id,
324
+ client_secret=client_secret,
325
+ )
326
+
327
+ if catalog is not None:
328
+ # Convert syncCatalog format to ConfiguredAirbyteCatalog format
329
+ connection_data.catalog = _convert_sync_catalog_to_configured(catalog)
330
+
331
+ connection_data.state = state
332
+ return connection_data
333
+
334
+
335
+ def _convert_sync_catalog_to_configured(sync_catalog: dict[str, Any]) -> dict[str, Any]:
336
+ """Convert syncCatalog format to ConfiguredAirbyteCatalog format.
337
+
338
+ The Config API returns syncCatalog in a slightly different format than
339
+ the Airbyte protocol's ConfiguredAirbyteCatalog. This function converts
340
+ between the two formats.
341
+ """
342
+ configured_streams = []
343
+
344
+ for stream_config in sync_catalog.get("streams", []):
345
+ stream_info = stream_config.get("stream", {})
346
+ config_info = stream_config.get("config", {})
347
+
348
+ configured_stream = {
349
+ "stream": {
350
+ "name": stream_info.get("name", ""),
351
+ "json_schema": stream_info.get("jsonSchema", {}),
352
+ "supported_sync_modes": stream_info.get("supportedSyncModes", []),
353
+ "source_defined_cursor": stream_info.get("sourceDefinedCursor", False),
354
+ "default_cursor_field": stream_info.get("defaultCursorField", []),
355
+ "source_defined_primary_key": stream_info.get(
356
+ "sourceDefinedPrimaryKey", []
357
+ ),
358
+ },
359
+ "sync_mode": config_info.get("syncMode", "full_refresh"),
360
+ "destination_sync_mode": config_info.get("destinationSyncMode", "append"),
361
+ }
362
+
363
+ cursor_field = config_info.get("cursorField")
364
+ if cursor_field:
365
+ configured_stream["cursor_field"] = cursor_field
366
+
367
+ primary_key = config_info.get("primaryKey")
368
+ if primary_key:
369
+ configured_stream["primary_key"] = primary_key
370
+
371
+ configured_streams.append(configured_stream)
372
+
373
+ return {"streams": configured_streams}
@@ -0,0 +1,173 @@
1
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
2
+ """Retrieve unmasked connection secrets via vendored connection-retriever.
3
+
4
+ This module provides a focused utility for enriching connection config with
5
+ unmasked secrets from the vendored connection-retriever code. It is designed
6
+ to work alongside the existing connection_fetcher module, which handles all
7
+ other connection data via the public Cloud API.
8
+
9
+ The secret retriever requires:
10
+ - GCP credentials with appropriate permissions
11
+ - Cloud SQL Proxy running to internal Postgres (or CI environment)
12
+
13
+ Usage:
14
+ from airbyte_ops_mcp.live_tests.connection_fetcher import fetch_connection_data
15
+ from airbyte_ops_mcp.live_tests.connection_secret_retriever import (
16
+ enrich_config_with_secrets,
17
+ should_use_secret_retriever,
18
+ )
19
+
20
+ # Fetch connection data via public API (config will have masked secrets)
21
+ connection_data = fetch_connection_data(connection_id)
22
+
23
+ # Enrich with unmasked secrets if enabled
24
+ if should_use_secret_retriever():
25
+ connection_data = enrich_config_with_secrets(
26
+ connection_data,
27
+ retrieval_reason="MCP live test",
28
+ )
29
+ """
30
+
31
+ from __future__ import annotations
32
+
33
+ import logging
34
+ import os
35
+ from dataclasses import replace
36
+ from typing import TYPE_CHECKING
37
+
38
+ from airbyte_ops_mcp.live_tests._connection_retriever import (
39
+ ConnectionObject,
40
+ retrieve_objects,
41
+ )
42
+
43
+ if TYPE_CHECKING:
44
+ from airbyte_ops_mcp.live_tests.connection_fetcher import ConnectionData
45
+
46
+ logger = logging.getLogger(__name__)
47
+
48
+ # Environment variable to enable secret retrieval
49
+ ENV_USE_SECRET_RETRIEVER = "USE_CONNECTION_SECRET_RETRIEVER"
50
+
51
+ # GCP credential environment variables
52
+ ENV_GOOGLE_APPLICATION_CREDENTIALS = "GOOGLE_APPLICATION_CREDENTIALS"
53
+ ENV_GCP_PROD_DB_ACCESS_CREDENTIALS = "GCP_PROD_DB_ACCESS_CREDENTIALS"
54
+
55
+
56
+ def _ensure_gcp_credentials_env() -> None:
57
+ """Ensure GCP credentials are available via standard env var.
58
+
59
+ If GOOGLE_APPLICATION_CREDENTIALS is not set but GCP_PROD_DB_ACCESS_CREDENTIALS is,
60
+ copy the value to GOOGLE_APPLICATION_CREDENTIALS. This provides a fallback
61
+ for internal employees who use GCP_PROD_DB_ACCESS_CREDENTIALS as their standard
62
+ credential path for prod database access.
63
+
64
+ This function is idempotent and safe to call multiple times.
65
+ """
66
+ if ENV_GOOGLE_APPLICATION_CREDENTIALS not in os.environ:
67
+ gsm_creds = os.getenv(ENV_GCP_PROD_DB_ACCESS_CREDENTIALS)
68
+ if gsm_creds:
69
+ os.environ[ENV_GOOGLE_APPLICATION_CREDENTIALS] = gsm_creds
70
+ logger.debug(
71
+ f"Using {ENV_GCP_PROD_DB_ACCESS_CREDENTIALS} as fallback for "
72
+ f"{ENV_GOOGLE_APPLICATION_CREDENTIALS}"
73
+ )
74
+
75
+
76
+ def is_secret_retriever_enabled() -> bool:
77
+ """Check if secret retrieval is enabled via environment variable.
78
+
79
+ Returns:
80
+ True if USE_CONNECTION_SECRET_RETRIEVER is set to a truthy value.
81
+ """
82
+ value = os.getenv(ENV_USE_SECRET_RETRIEVER, "").lower()
83
+ return value in ("true", "1", "yes")
84
+
85
+
86
+ def should_use_secret_retriever() -> bool:
87
+ """Check if secret retrieval should be used.
88
+
89
+ Returns:
90
+ True if USE_CONNECTION_SECRET_RETRIEVER env var is set to a truthy value.
91
+ """
92
+ return is_secret_retriever_enabled()
93
+
94
+
95
+ def retrieve_unmasked_config(
96
+ connection_id: str,
97
+ retrieval_reason: str = "MCP live tests",
98
+ ) -> dict | None:
99
+ """Retrieve unmasked source config from vendored connection-retriever.
100
+
101
+ This function directly queries the internal Postgres database to get
102
+ the source configuration with unmasked secrets.
103
+
104
+ Args:
105
+ connection_id: The Airbyte Cloud connection ID.
106
+ retrieval_reason: Reason for retrieval (for audit logging).
107
+
108
+ Returns:
109
+ The unmasked source config dict, or None if retrieval fails.
110
+ """
111
+ # Ensure GCP credentials are available (supports GCP_PROD_DB_ACCESS_CREDENTIALS fallback)
112
+ _ensure_gcp_credentials_env()
113
+
114
+ # Only request the source config - that's all we need for secrets
115
+ requested_objects = [ConnectionObject.SOURCE_CONFIG]
116
+
117
+ candidates = retrieve_objects(
118
+ connection_objects=requested_objects,
119
+ retrieval_reason=retrieval_reason,
120
+ connection_id=connection_id,
121
+ )
122
+
123
+ if not candidates:
124
+ logger.warning(
125
+ f"No connection data found for connection ID {connection_id} "
126
+ "via connection-retriever"
127
+ )
128
+ return None
129
+
130
+ candidate = candidates[0]
131
+ if candidate.source_config:
132
+ return dict(candidate.source_config)
133
+
134
+ return None
135
+
136
+
137
+ def enrich_config_with_secrets(
138
+ connection_data: ConnectionData,
139
+ retrieval_reason: str = "MCP live tests",
140
+ ) -> ConnectionData:
141
+ """Enrich connection data with unmasked secrets from internal retriever.
142
+
143
+ This function takes a ConnectionData object (typically from the public
144
+ Cloud API with masked secrets) and replaces the config with unmasked
145
+ secrets from the internal connection-retriever.
146
+
147
+ Args:
148
+ connection_data: The connection data to enrich.
149
+ retrieval_reason: Reason for retrieval (for audit logging).
150
+
151
+ Returns:
152
+ A new ConnectionData with unmasked config, or the original if
153
+ retrieval fails or is not available.
154
+ """
155
+ unmasked_config = retrieve_unmasked_config(
156
+ connection_id=connection_data.connection_id,
157
+ retrieval_reason=retrieval_reason,
158
+ )
159
+
160
+ if unmasked_config is None:
161
+ logger.info(
162
+ f"Could not retrieve unmasked config for {connection_data.connection_id}, "
163
+ "using masked config from Cloud API"
164
+ )
165
+ return connection_data
166
+
167
+ logger.info(
168
+ f"Successfully enriched config with unmasked secrets for "
169
+ f"{connection_data.connection_id}"
170
+ )
171
+
172
+ # Return a new ConnectionData with the unmasked config
173
+ return replace(connection_data, config=unmasked_config)
@@ -0,0 +1,45 @@
1
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
2
+ """Test evaluation modes for live tests.
3
+
4
+ This module provides evaluation modes that control how test failures are handled.
5
+
6
+ Based on airbyte-ci implementation:
7
+ https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/live-tests/src/live_tests/commons/evaluation_modes.py
8
+ """
9
+
10
+ from __future__ import annotations
11
+
12
+ from enum import Enum
13
+
14
+
15
+ class TestEvaluationMode(Enum):
16
+ """Test evaluation modes.
17
+
18
+ Tests may be run in "diagnostic" mode or "strict" mode.
19
+
20
+ When run in "diagnostic" mode, validation failures won't fail the overall
21
+ test run, but errors will still be surfaced in the test report.
22
+
23
+ In "strict" mode, tests pass/fail as usual.
24
+
25
+ Diagnostic mode is useful for tests that don't affect the overall
26
+ functionality of the connector but test an ideal state.
27
+ """
28
+
29
+ DIAGNOSTIC = "diagnostic"
30
+ STRICT = "strict"
31
+
32
+ @classmethod
33
+ def from_string(
34
+ cls,
35
+ value: str,
36
+ ) -> TestEvaluationMode:
37
+ """Parse evaluation mode from string."""
38
+ value_lower = value.lower()
39
+ if value_lower == "diagnostic":
40
+ return cls.DIAGNOSTIC
41
+ if value_lower == "strict":
42
+ return cls.STRICT
43
+ raise ValueError(
44
+ f"Unknown evaluation mode: {value}. Must be 'diagnostic' or 'strict'."
45
+ )
@@ -21,11 +21,13 @@ from typing import Iterator
21
21
  try:
22
22
  from mitmproxy import http as mitmproxy_http
23
23
  from mitmproxy import io as mitmproxy_io
24
+ from mitmproxy.addons.savehar import SaveHar
24
25
 
25
26
  MITMPROXY_AVAILABLE = True
26
27
  except ImportError:
27
28
  mitmproxy_http = None # type: ignore[assignment]
28
29
  mitmproxy_io = None # type: ignore[assignment]
30
+ SaveHar = None # type: ignore[assignment, misc]
29
31
  MITMPROXY_AVAILABLE = False
30
32
 
31
33
  logger = logging.getLogger(__name__)
@@ -317,3 +319,82 @@ def compute_http_metrics_comparison(
317
319
  },
318
320
  "difference": target_metrics.flow_count - control_metrics.flow_count,
319
321
  }
322
+
323
+
324
+ def get_http_flows_from_mitm_dump(
325
+ mitm_dump_path: Path,
326
+ ) -> list[mitmproxy_http.HTTPFlow]: # type: ignore[name-defined]
327
+ """Get HTTP flows from a mitmproxy dump file.
328
+
329
+ Based on airbyte-ci implementation:
330
+ https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/live-tests/src/live_tests/commons/utils.py#L129-L139
331
+
332
+ Args:
333
+ mitm_dump_path: Path to the mitmproxy dump file.
334
+
335
+ Returns:
336
+ List of HTTP flows from the dump file.
337
+ """
338
+ if not MITMPROXY_AVAILABLE:
339
+ logger.warning("mitmproxy Python package not installed")
340
+ return []
341
+
342
+ if not mitm_dump_path.exists():
343
+ logger.warning(f"Mitmproxy dump file not found: {mitm_dump_path}")
344
+ return []
345
+
346
+ with open(mitm_dump_path, "rb") as dump_file:
347
+ return [
348
+ f
349
+ for f in mitmproxy_io.FlowReader(dump_file).stream()
350
+ if isinstance(f, mitmproxy_http.HTTPFlow)
351
+ ]
352
+
353
+
354
+ def mitm_http_stream_to_har(
355
+ mitm_http_stream_path: Path,
356
+ har_file_path: Path,
357
+ ) -> Path:
358
+ """Convert a mitmproxy HTTP stream file to a HAR file.
359
+
360
+ Based on airbyte-ci implementation:
361
+ https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/live-tests/src/live_tests/commons/utils.py#L142-L154
362
+
363
+ HAR (HTTP Archive) is a standard JSON format for recording HTTP transactions.
364
+ This allows HTTP traffic captured by mitmproxy to be viewed in browser dev tools
365
+ or other HAR viewers.
366
+
367
+ Args:
368
+ mitm_http_stream_path: Path to the mitmproxy HTTP stream file (.mitm).
369
+ har_file_path: Path where the HAR file will be saved.
370
+
371
+ Returns:
372
+ Path to the generated HAR file.
373
+
374
+ Raises:
375
+ RuntimeError: If mitmproxy is not available.
376
+ """
377
+ if not MITMPROXY_AVAILABLE or SaveHar is None:
378
+ raise RuntimeError(
379
+ "mitmproxy Python package not installed; cannot convert to HAR"
380
+ )
381
+
382
+ flows = get_http_flows_from_mitm_dump(mitm_http_stream_path)
383
+ if not flows:
384
+ logger.warning(f"No HTTP flows found in {mitm_http_stream_path}")
385
+ return har_file_path
386
+
387
+ har_file_path.parent.mkdir(parents=True, exist_ok=True)
388
+ try:
389
+ SaveHar().export_har(flows, str(har_file_path))
390
+ except Exception as e:
391
+ logger.error(f"Failed to export HAR file to {har_file_path}: {e}")
392
+ raise
393
+
394
+ if har_file_path.exists() and har_file_path.stat().st_size > 0:
395
+ logger.info(f"Generated HAR file at {har_file_path}")
396
+ else:
397
+ logger.error(f"Failed to generate valid HAR file at {har_file_path}")
398
+ raise RuntimeError(f"Failed to generate valid HAR file at {har_file_path}")
399
+
400
+ return har_file_path
@@ -0,0 +1,15 @@
1
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
2
+ """Message cache for storing Airbyte messages from connector executions.
3
+
4
+ This module provides a DuckDB-based message cache for persisting and querying
5
+ Airbyte messages produced during connector test runs.
6
+
7
+ Based on airbyte-ci implementation:
8
+ https://github.com/airbytehq/airbyte/tree/master/airbyte-ci/connectors/live-tests/src/live_tests/commons/backends
9
+ """
10
+
11
+ from airbyte_ops_mcp.live_tests.message_cache.duckdb_cache import DuckDbMessageCache
12
+
13
+ __all__ = [
14
+ "DuckDbMessageCache",
15
+ ]