airbyte-cdk 6.55.0__py3-none-any.whl → 6.55.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -101,7 +101,7 @@ def connector_cli_group() -> None:
101
101
  pass
102
102
 
103
103
 
104
- @connector_cli_group.command()
104
+ @connector_cli_group.command("test")
105
105
  @click.argument(
106
106
  "connector",
107
107
  required=False,
@@ -114,10 +114,18 @@ def connector_cli_group() -> None:
114
114
  default=False,
115
115
  help="Only collect tests, do not run them.",
116
116
  )
117
- def test(
117
+ @click.option(
118
+ "--pytest-arg",
119
+ "pytest_args", # ← map --pytest-arg into pytest_args
120
+ type=str,
121
+ multiple=True,
122
+ help="Additional argument(s) to pass to pytest. Can be specified multiple times.",
123
+ )
124
+ def connector_test(
118
125
  connector: str | Path | None = None,
119
126
  *,
120
127
  collect_only: bool = False,
128
+ pytest_args: list[str] | None = None,
121
129
  ) -> None:
122
130
  """Run connector tests.
123
131
 
@@ -130,19 +138,36 @@ def test(
130
138
  directory. If the current working directory is not a connector directory (e.g. starting
131
139
  with 'source-') and no connector name or path is provided, the process will fail.
132
140
  """
141
+ click.echo("Connector test command executed.")
142
+ connector_name, connector_directory = resolve_connector_name_and_directory(connector)
143
+
144
+ pytest_args = pytest_args or []
145
+ if collect_only:
146
+ pytest_args.append("--collect-only")
147
+
148
+ run_connector_tests(
149
+ connector_name=connector_name,
150
+ connector_directory=connector_directory,
151
+ extra_pytest_args=pytest_args,
152
+ )
153
+
154
+
155
+ def run_connector_tests(
156
+ connector_name: str,
157
+ connector_directory: Path,
158
+ extra_pytest_args: list[str],
159
+ ) -> None:
133
160
  if pytest is None:
134
161
  raise ImportError(
135
162
  "pytest is not installed. Please install pytest to run the connector tests."
136
163
  )
137
- click.echo("Connector test command executed.")
138
- connector_name, connector_directory = resolve_connector_name_and_directory(connector)
139
164
 
140
165
  connector_test_suite = create_connector_test_suite(
141
166
  connector_name=connector_name if not connector_directory else None,
142
167
  connector_directory=connector_directory,
143
168
  )
144
169
 
145
- pytest_args: list[str] = []
170
+ pytest_args: list[str] = ["-p", "airbyte_cdk.test.standard_tests.pytest_hooks"]
146
171
  if connector_directory:
147
172
  pytest_args.append(f"--rootdir={connector_directory}")
148
173
  os.chdir(str(connector_directory))
@@ -158,8 +183,8 @@ def test(
158
183
  test_file_path.parent.mkdir(parents=True, exist_ok=True)
159
184
  test_file_path.write_text(file_text)
160
185
 
161
- if collect_only:
162
- pytest_args.append("--collect-only")
186
+ if extra_pytest_args:
187
+ pytest_args.extend(extra_pytest_args)
163
188
 
164
189
  pytest_args.append(str(test_file_path))
165
190
 
@@ -170,7 +195,6 @@ def test(
170
195
 
171
196
  click.echo(f"Running tests from connector directory: {connector_directory}...")
172
197
  click.echo(f"Test file: {test_file_path}")
173
- click.echo(f"Collect only: {collect_only}")
174
198
  click.echo(f"Pytest args: {pytest_args}")
175
199
  click.echo("Invoking Pytest...")
176
200
  exit_code = pytest.main(
@@ -10,6 +10,7 @@ from pathlib import Path
10
10
 
11
11
  import rich_click as click
12
12
 
13
+ from airbyte_cdk.cli.airbyte_cdk._connector import run_connector_tests
13
14
  from airbyte_cdk.models.connector_metadata import MetadataFile
14
15
  from airbyte_cdk.utils.connector_paths import resolve_connector_name_and_directory
15
16
  from airbyte_cdk.utils.docker import (
@@ -88,6 +89,81 @@ def build(
88
89
  sys.exit(1)
89
90
 
90
91
 
92
+ @image_cli_group.command("test")
93
+ @click.argument(
94
+ "connector",
95
+ required=False,
96
+ type=str,
97
+ metavar="[CONNECTOR]",
98
+ )
99
+ @click.option(
100
+ "--image",
101
+ help="Image to test, instead of building a new one.",
102
+ )
103
+ def image_test( # "image test" command
104
+ connector: str | None = None,
105
+ *,
106
+ image: str | None = None,
107
+ ) -> None:
108
+ """Test a connector Docker image.
109
+
110
+ [CONNECTOR] can be a connector name (e.g. 'source-pokeapi'), a path to a connector directory, or omitted to use the current working directory.
111
+ If a string containing '/' is provided, it is treated as a path. Otherwise, it is treated as a connector name.
112
+
113
+ If an image is provided, it will be used for testing instead of building a new one.
114
+
115
+ Note: You should run `airbyte-cdk secrets fetch` before running this command to ensure
116
+ that the secrets are available for the connector tests.
117
+ """
118
+ if not verify_docker_installation():
119
+ click.echo(
120
+ "Docker is not installed or not running. Please install Docker and try again.", err=True
121
+ )
122
+ sys.exit(1)
123
+
124
+ connector_name, connector_directory = resolve_connector_name_and_directory(connector)
125
+
126
+ # Select only tests with the 'image_tests' mark
127
+ pytest_args = ["-m", "image_tests"]
128
+ if not image:
129
+ metadata_file_path: Path = connector_directory / "metadata.yaml"
130
+ try:
131
+ metadata = MetadataFile.from_file(metadata_file_path)
132
+ except (FileNotFoundError, ValueError) as e:
133
+ click.echo(
134
+ f"Error loading metadata file '{metadata_file_path}': {e!s}",
135
+ err=True,
136
+ )
137
+ sys.exit(1)
138
+
139
+ tag = "dev-latest"
140
+ image = f"{metadata.data.dockerRepository}:{tag}"
141
+ click.echo(f"Building Image for Connector: {image}")
142
+ try:
143
+ image = build_connector_image(
144
+ connector_directory=connector_directory,
145
+ connector_name=connector_name,
146
+ metadata=metadata,
147
+ tag=tag,
148
+ no_verify=True,
149
+ )
150
+ except ConnectorImageBuildError as e:
151
+ click.echo(
152
+ f"Error building connector image: {e!s}",
153
+ err=True,
154
+ )
155
+ sys.exit(1)
156
+
157
+ pytest_args.extend(["--connector-image", image])
158
+
159
+ click.echo(f"Testing Connector Image: {image}")
160
+ run_connector_tests(
161
+ connector_name=connector_name,
162
+ connector_directory=connector_directory,
163
+ extra_pytest_args=pytest_args,
164
+ )
165
+
166
+
91
167
  __all__ = [
92
168
  "image_cli_group",
93
169
  ]
@@ -99,12 +99,12 @@ def secrets_cli_group() -> None:
99
99
  help="Print GitHub CI mask for secrets.",
100
100
  type=bool,
101
101
  is_flag=True,
102
- default=False,
102
+ default=None,
103
103
  )
104
104
  def fetch(
105
105
  connector: str | Path | None = None,
106
106
  gcp_project_id: str = GCP_PROJECT_ID,
107
- print_ci_secrets_masks: bool = False,
107
+ print_ci_secrets_masks: bool | None = None,
108
108
  ) -> None:
109
109
  """Fetch secrets for a connector from Google Secret Manager.
110
110
 
@@ -181,22 +181,23 @@ def fetch(
181
181
  if secret_count == 0:
182
182
  raise exceptions[0]
183
183
 
184
- if not print_ci_secrets_masks:
185
- return
186
-
187
- if not os.environ.get("CI", None):
184
+ if print_ci_secrets_masks and "CI" not in os.environ:
188
185
  click.echo(
189
186
  "The `--print-ci-secrets-masks` option is only available in CI environments. "
190
187
  "The `CI` env var is either not set or not set to a truthy value. "
191
188
  "Skipping printing secret masks.",
192
189
  err=True,
193
190
  )
194
- return
195
-
196
- # Else print the CI mask
197
- _print_ci_secrets_masks(
198
- secrets_dir=secrets_dir,
199
- )
191
+ print_ci_secrets_masks = False
192
+ elif print_ci_secrets_masks is None:
193
+ # If not explicitly set, we check if we are in a CI environment
194
+ # and set to True if so.
195
+ print_ci_secrets_masks = os.environ.get("CI", "") != ""
196
+
197
+ if print_ci_secrets_masks:
198
+ _print_ci_secrets_masks(
199
+ secrets_dir=secrets_dir,
200
+ )
200
201
 
201
202
 
202
203
  @secrets_cli_group.command("list")
@@ -4,9 +4,11 @@ from typing import Any, Dict
4
4
  from serpyco_rs import CustomType, Serializer
5
5
 
6
6
  from .airbyte_protocol import ( # type: ignore[attr-defined] # all classes are imported to airbyte_protocol via *
7
+ AirbyteCatalog,
7
8
  AirbyteMessage,
8
9
  AirbyteStateBlob,
9
10
  AirbyteStateMessage,
11
+ AirbyteStream,
10
12
  AirbyteStreamState,
11
13
  ConfiguredAirbyteCatalog,
12
14
  ConfiguredAirbyteStream,
@@ -30,6 +32,8 @@ def custom_type_resolver(t: type) -> CustomType[AirbyteStateBlob, Dict[str, Any]
30
32
  return AirbyteStateBlobType() if t is AirbyteStateBlob else None
31
33
 
32
34
 
35
+ AirbyteCatalogSerializer = Serializer(AirbyteCatalog, omit_none=True)
36
+ AirbyteStreamSerializer = Serializer(AirbyteStream, omit_none=True)
33
37
  AirbyteStreamStateSerializer = Serializer(
34
38
  AirbyteStreamState, omit_none=True, custom_type_resolver=custom_type_resolver
35
39
  )
@@ -34,6 +34,15 @@ class ConnectorBuildOptions(BaseModel):
34
34
  )
35
35
 
36
36
 
37
+ class SuggestedStreams(BaseModel):
38
+ """Suggested streams from metadata.yaml."""
39
+
40
+ streams: list[str] = Field(
41
+ default=[],
42
+ description="List of suggested streams for the connector",
43
+ )
44
+
45
+
37
46
  class ConnectorMetadata(BaseModel):
38
47
  """Connector metadata from metadata.yaml."""
39
48
 
@@ -47,6 +56,11 @@ class ConnectorMetadata(BaseModel):
47
56
  description="List of tags for the connector",
48
57
  )
49
58
 
59
+ suggestedStreams: SuggestedStreams | None = Field(
60
+ default=None,
61
+ description="Suggested streams for the connector",
62
+ )
63
+
50
64
  @property
51
65
  def language(self) -> ConnectorLanguage:
52
66
  """Get the connector language."""
@@ -19,9 +19,11 @@ import logging
19
19
  import re
20
20
  import tempfile
21
21
  import traceback
22
+ from collections import deque
23
+ from collections.abc import Generator, Mapping
22
24
  from io import StringIO
23
25
  from pathlib import Path
24
- from typing import Any, List, Mapping, Optional, Union
26
+ from typing import Any, List, Literal, Optional, Union, final, overload
25
27
 
26
28
  import orjson
27
29
  from pydantic import ValidationError as V2ValidationError
@@ -36,6 +38,7 @@ from airbyte_cdk.models import (
36
38
  AirbyteMessageSerializer,
37
39
  AirbyteStateMessage,
38
40
  AirbyteStateMessageSerializer,
41
+ AirbyteStreamState,
39
42
  AirbyteStreamStatus,
40
43
  ConfiguredAirbyteCatalog,
41
44
  ConfiguredAirbyteCatalogSerializer,
@@ -48,13 +51,41 @@ from airbyte_cdk.test.models.scenario import ExpectedOutcome
48
51
 
49
52
 
50
53
  class EntrypointOutput:
51
- def __init__(self, messages: List[str], uncaught_exception: Optional[BaseException] = None):
52
- try:
53
- self._messages = [self._parse_message(message) for message in messages]
54
- except V2ValidationError as exception:
55
- raise ValueError("All messages are expected to be AirbyteMessage") from exception
54
+ """A class to encapsulate the output of an Airbyte connector's execution.
55
+
56
+ This class can be initialized with a list of messages or a file containing messages.
57
+ It provides methods to access different types of messages produced during the execution
58
+ of an Airbyte connector, including both successful messages and error messages.
59
+
60
+ When working with records and state messages, it provides both a list and an iterator
61
+ implementation. Lists are easier to work with, but generators are better suited to handle
62
+ large volumes of messages without overflowing the available memory.
63
+ """
64
+
65
+ def __init__(
66
+ self,
67
+ messages: list[str] | None = None,
68
+ uncaught_exception: Optional[BaseException] = None,
69
+ *,
70
+ message_file: Path | None = None,
71
+ ) -> None:
72
+ if messages is None and message_file is None:
73
+ raise ValueError("Either messages or message_file must be provided")
74
+ if messages is not None and message_file is not None:
75
+ raise ValueError("Only one of messages or message_file can be provided")
76
+
77
+ self._messages: list[AirbyteMessage] | None = None
78
+ self._message_file: Path | None = message_file
79
+ if messages:
80
+ try:
81
+ self._messages = [self._parse_message(message) for message in messages]
82
+ except V2ValidationError as exception:
83
+ raise ValueError("All messages are expected to be AirbyteMessage") from exception
56
84
 
57
85
  if uncaught_exception:
86
+ if self._messages is None:
87
+ self._messages = []
88
+
58
89
  self._messages.append(
59
90
  assemble_uncaught_exception(
60
91
  type(uncaught_exception), uncaught_exception
@@ -72,39 +103,76 @@ class EntrypointOutput:
72
103
  )
73
104
 
74
105
  @property
75
- def records_and_state_messages(self) -> List[AirbyteMessage]:
76
- return self._get_message_by_types([Type.RECORD, Type.STATE])
106
+ def records_and_state_messages(
107
+ self,
108
+ ) -> list[AirbyteMessage]:
109
+ return self.get_message_by_types(
110
+ message_types=[Type.RECORD, Type.STATE],
111
+ safe_iterator=False,
112
+ )
113
+
114
+ def records_and_state_messages_iterator(
115
+ self,
116
+ ) -> Generator[AirbyteMessage, None, None]:
117
+ """Returns a generator that yields record and state messages one by one.
118
+
119
+ Use this instead of `records_and_state_messages` when the volume of messages could be large
120
+ enough to overload available memory.
121
+ """
122
+ return self.get_message_by_types(
123
+ message_types=[Type.RECORD, Type.STATE],
124
+ safe_iterator=True,
125
+ )
77
126
 
78
127
  @property
79
128
  def records(self) -> List[AirbyteMessage]:
80
- return self._get_message_by_types([Type.RECORD])
129
+ return self.get_message_by_types([Type.RECORD])
130
+
131
+ @property
132
+ def records_iterator(self) -> Generator[AirbyteMessage, None, None]:
133
+ """Returns a generator that yields record messages one by one.
134
+
135
+ Use this instead of `records` when the volume of records could be large
136
+ enough to overload available memory.
137
+ """
138
+ return self.get_message_by_types([Type.RECORD], safe_iterator=True)
81
139
 
82
140
  @property
83
141
  def state_messages(self) -> List[AirbyteMessage]:
84
- return self._get_message_by_types([Type.STATE])
142
+ return self.get_message_by_types([Type.STATE])
85
143
 
86
144
  @property
87
145
  def spec_messages(self) -> List[AirbyteMessage]:
88
- return self._get_message_by_types([Type.SPEC])
146
+ return self.get_message_by_types([Type.SPEC])
89
147
 
90
148
  @property
91
149
  def connection_status_messages(self) -> List[AirbyteMessage]:
92
- return self._get_message_by_types([Type.CONNECTION_STATUS])
150
+ return self.get_message_by_types([Type.CONNECTION_STATUS])
93
151
 
94
152
  @property
95
- def most_recent_state(self) -> Any:
96
- state_messages = self._get_message_by_types([Type.STATE])
97
- if not state_messages:
98
- raise ValueError("Can't provide most recent state as there are no state messages")
99
- return state_messages[-1].state.stream # type: ignore[union-attr] # state has `stream`
153
+ def most_recent_state(self) -> AirbyteStreamState | None:
154
+ state_message_iterator = self.get_message_by_types(
155
+ [Type.STATE],
156
+ safe_iterator=True,
157
+ )
158
+ # Use a deque with maxlen=1 to efficiently get the last state message
159
+ double_ended_queue = deque(state_message_iterator, maxlen=1)
160
+ try:
161
+ final_state_message: AirbyteMessage = double_ended_queue.pop()
162
+ except IndexError:
163
+ raise ValueError(
164
+ "Can't provide most recent state as there are no state messages."
165
+ ) from None
166
+
167
+ return final_state_message.state.stream # type: ignore[union-attr] # state has `stream`
100
168
 
101
169
  @property
102
170
  def logs(self) -> List[AirbyteMessage]:
103
- return self._get_message_by_types([Type.LOG])
171
+ return self.get_message_by_types([Type.LOG])
104
172
 
105
173
  @property
106
174
  def trace_messages(self) -> List[AirbyteMessage]:
107
- return self._get_message_by_types([Type.TRACE])
175
+ return self.get_message_by_types([Type.TRACE])
108
176
 
109
177
  @property
110
178
  def analytics_messages(self) -> List[AirbyteMessage]:
@@ -116,7 +184,7 @@ class EntrypointOutput:
116
184
 
117
185
  @property
118
186
  def catalog(self) -> AirbyteMessage:
119
- catalog = self._get_message_by_types([Type.CATALOG])
187
+ catalog = self.get_message_by_types([Type.CATALOG])
120
188
  if len(catalog) != 1:
121
189
  raise ValueError(f"Expected exactly one catalog but got {len(catalog)}")
122
190
  return catalog[0]
@@ -131,13 +199,80 @@ class EntrypointOutput:
131
199
  )
132
200
  return list(status_messages)
133
201
 
134
- def _get_message_by_types(self, message_types: List[Type]) -> List[AirbyteMessage]:
135
- return [message for message in self._messages if message.type in message_types]
202
+ def get_message_iterator(self) -> Generator[AirbyteMessage, None, None]:
203
+ """Creates a generator which yields messages one by one.
204
+
205
+ This will iterate over all messages in the output file (if provided) or the messages
206
+ provided during initialization. File results are provided first, followed by any
207
+ messages that were passed in directly.
208
+ """
209
+ if self._message_file:
210
+ try:
211
+ with open(self._message_file, "r", encoding="utf-8") as file:
212
+ for line in file:
213
+ if not line.strip():
214
+ # Skip empty lines
215
+ continue
216
+
217
+ yield self._parse_message(line.strip())
218
+ except FileNotFoundError:
219
+ raise ValueError(f"Message file {self._message_file} not found")
220
+
221
+ if self._messages is not None:
222
+ yield from self._messages
223
+
224
+ # Overloads to provide proper type hints for different usages of `get_message_by_types`.
225
+
226
+ @overload
227
+ def get_message_by_types(
228
+ self,
229
+ message_types: list[Type],
230
+ ) -> list[AirbyteMessage]: ...
231
+
232
+ @overload
233
+ def get_message_by_types(
234
+ self,
235
+ message_types: list[Type],
236
+ *,
237
+ safe_iterator: Literal[False],
238
+ ) -> list[AirbyteMessage]: ...
239
+
240
+ @overload
241
+ def get_message_by_types(
242
+ self,
243
+ message_types: list[Type],
244
+ *,
245
+ safe_iterator: Literal[True],
246
+ ) -> Generator[AirbyteMessage, None, None]: ...
247
+
248
+ def get_message_by_types(
249
+ self,
250
+ message_types: list[Type],
251
+ *,
252
+ safe_iterator: bool = False,
253
+ ) -> list[AirbyteMessage] | Generator[AirbyteMessage, None, None]:
254
+ """Get messages of specific types.
255
+
256
+ If `safe_iterator` is True, returns a generator that yields messages one by one.
257
+ If `safe_iterator` is False, returns a list of messages.
258
+
259
+ Use `safe_iterator=True` when the volume of messages could overload the available
260
+ memory.
261
+ """
262
+ message_generator = self.get_message_iterator()
263
+
264
+ if safe_iterator:
265
+ return (message for message in message_generator if message.type in message_types)
266
+
267
+ return [message for message in message_generator if message.type in message_types]
136
268
 
137
269
  def _get_trace_message_by_trace_type(self, trace_type: TraceType) -> List[AirbyteMessage]:
138
270
  return [
139
271
  message
140
- for message in self._get_message_by_types([Type.TRACE])
272
+ for message in self.get_message_by_types(
273
+ [Type.TRACE],
274
+ safe_iterator=True,
275
+ )
141
276
  if message.trace.type == trace_type # type: ignore[union-attr] # trace has `type`
142
277
  ]
143
278
 
@@ -184,7 +319,7 @@ def _run_command(
184
319
  parsed_args = AirbyteEntrypoint.parse_args(args)
185
320
 
186
321
  source_entrypoint = AirbyteEntrypoint(source)
187
- messages = []
322
+ messages: list[str] = []
188
323
  uncaught_exception = None
189
324
  try:
190
325
  for message in source_entrypoint.run(parsed_args):
@@ -199,8 +334,10 @@ def _run_command(
199
334
  captured_logs = log_capture_buffer.getvalue().split("\n")[:-1]
200
335
 
201
336
  parent_logger.removeHandler(stream_handler)
202
-
203
- return EntrypointOutput(messages + captured_logs, uncaught_exception=uncaught_exception)
337
+ return EntrypointOutput(
338
+ messages=messages + captured_logs,
339
+ uncaught_exception=uncaught_exception,
340
+ )
204
341
 
205
342
 
206
343
  def discover(
@@ -9,14 +9,20 @@ up iteration cycles.
9
9
 
10
10
  from __future__ import annotations
11
11
 
12
+ import json
13
+ import tempfile
14
+ from contextlib import contextmanager, suppress
12
15
  from pathlib import Path # noqa: TC003 # Pydantic needs this (don't move to 'if typing' block)
13
- from typing import Any, Literal, cast
16
+ from typing import TYPE_CHECKING, Any, Literal, cast
14
17
 
15
18
  import yaml
16
19
  from pydantic import BaseModel, ConfigDict
17
20
 
18
21
  from airbyte_cdk.test.models.outcome import ExpectedOutcome
19
22
 
23
+ if TYPE_CHECKING:
24
+ from collections.abc import Generator
25
+
20
26
 
21
27
  class ConnectorTestScenario(BaseModel):
22
28
  """Acceptance test scenario, as a Pydantic model.
@@ -41,13 +47,13 @@ class ConnectorTestScenario(BaseModel):
41
47
  config_path: Path | None = None
42
48
  config_dict: dict[str, Any] | None = None
43
49
 
44
- id: str | None = None
50
+ _id: str | None = None # Used to override the default ID generation
45
51
 
46
52
  configured_catalog_path: Path | None = None
47
53
  timeout_seconds: int | None = None
48
54
  expect_records: AcceptanceTestExpectRecords | None = None
49
55
  file_types: AcceptanceTestFileTypes | None = None
50
- status: Literal["succeed", "failed"] | None = None
56
+ status: Literal["succeed", "failed", "exception"] | None = None
51
57
 
52
58
  def get_config_dict(
53
59
  self,
@@ -93,16 +99,49 @@ class ConnectorTestScenario(BaseModel):
93
99
  return ExpectedOutcome.from_status_str(self.status)
94
100
 
95
101
  @property
96
- def instance_name(self) -> str:
97
- return self.config_path.stem if self.config_path else "Unnamed Scenario"
102
+ def id(self) -> str:
103
+ """Return a unique identifier for the test scenario.
104
+
105
+ This is used by PyTest to identify the test scenario.
106
+ """
107
+ if self._id:
108
+ return self._id
98
109
 
99
- def __str__(self) -> str:
100
- if self.id:
101
- return f"'{self.id}' Test Scenario"
102
110
  if self.config_path:
103
- return f"'{self.config_path.name}' Test Scenario"
111
+ return self.config_path.stem
112
+
113
+ return str(hash(self))
104
114
 
105
- return f"'{hash(self)}' Test Scenario"
115
+ def __str__(self) -> str:
116
+ return f"'{self.id}' Test Scenario"
117
+
118
+ @contextmanager
119
+ def with_temp_config_file(
120
+ self,
121
+ connector_root: Path,
122
+ ) -> Generator[Path, None, None]:
123
+ """Yield a temporary JSON file path containing the config dict and delete it on exit."""
124
+ config = self.get_config_dict(
125
+ empty_if_missing=True,
126
+ connector_root=connector_root,
127
+ )
128
+ with tempfile.NamedTemporaryFile(
129
+ prefix="config-",
130
+ suffix=".json",
131
+ mode="w",
132
+ delete=False, # Don't fail if cannot delete the file on exit
133
+ encoding="utf-8",
134
+ ) as temp_file:
135
+ temp_file.write(json.dumps(config))
136
+ temp_file.flush()
137
+ # Allow the file to be read by other processes
138
+ temp_path = Path(temp_file.name)
139
+ temp_path.chmod(temp_path.stat().st_mode | 0o444)
140
+ yield temp_path
141
+
142
+ # attempt cleanup, ignore errors
143
+ with suppress(OSError):
144
+ temp_path.unlink()
106
145
 
107
146
  def without_expected_outcome(self) -> ConnectorTestScenario:
108
147
  """Return a copy of the scenario that does not expect failure or success.
@@ -27,10 +27,8 @@ Available test suites base classes:
27
27
 
28
28
  '''
29
29
 
30
- from airbyte_cdk.test.standard_tests.connector_base import (
31
- ConnectorTestScenario,
32
- ConnectorTestSuiteBase,
33
- )
30
+ from airbyte_cdk.test.models.scenario import ConnectorTestScenario
31
+ from airbyte_cdk.test.standard_tests.connector_base import ConnectorTestSuiteBase
34
32
  from airbyte_cdk.test.standard_tests.declarative_sources import (
35
33
  DeclarativeSourceTestSuite,
36
34
  )