airbyte-cdk 6.54.9__py3-none-any.whl → 6.54.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airbyte_cdk/sources/declarative/incremental/concurrent_partition_cursor.py +64 -55
- airbyte_cdk/sources/declarative/interpolation/jinja.py +4 -2
- airbyte_cdk/test/entrypoint_wrapper.py +39 -11
- airbyte_cdk/test/models/__init__.py +10 -0
- airbyte_cdk/test/models/outcome.py +58 -0
- airbyte_cdk/test/{standard_tests/models → models}/scenario.py +60 -5
- airbyte_cdk/test/standard_tests/_job_runner.py +14 -9
- airbyte_cdk/test/standard_tests/connector_base.py +20 -15
- airbyte_cdk/test/standard_tests/declarative_sources.py +7 -2
- airbyte_cdk/test/standard_tests/source_base.py +19 -12
- airbyte_cdk/test/standard_tests/util.py +1 -1
- airbyte_cdk/test/utils/reading.py +12 -2
- {airbyte_cdk-6.54.9.dist-info → airbyte_cdk-6.54.11.dist-info}/METADATA +1 -1
- {airbyte_cdk-6.54.9.dist-info → airbyte_cdk-6.54.11.dist-info}/RECORD +18 -17
- airbyte_cdk/test/standard_tests/models/__init__.py +0 -7
- {airbyte_cdk-6.54.9.dist-info → airbyte_cdk-6.54.11.dist-info}/LICENSE.txt +0 -0
- {airbyte_cdk-6.54.9.dist-info → airbyte_cdk-6.54.11.dist-info}/LICENSE_SHORT +0 -0
- {airbyte_cdk-6.54.9.dist-info → airbyte_cdk-6.54.11.dist-info}/WHEEL +0 -0
- {airbyte_cdk-6.54.9.dist-info → airbyte_cdk-6.54.11.dist-info}/entry_points.txt +0 -0
@@ -9,7 +9,7 @@ import time
|
|
9
9
|
from collections import OrderedDict
|
10
10
|
from copy import deepcopy
|
11
11
|
from datetime import timedelta
|
12
|
-
from typing import Any, Callable, Iterable, Mapping, MutableMapping, Optional
|
12
|
+
from typing import Any, Callable, Iterable, List, Mapping, MutableMapping, Optional
|
13
13
|
|
14
14
|
from airbyte_cdk.sources.connector_state_manager import ConnectorStateManager
|
15
15
|
from airbyte_cdk.sources.declarative.incremental.global_substream_cursor import (
|
@@ -66,8 +66,8 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
66
66
|
_GLOBAL_STATE_KEY = "state"
|
67
67
|
_PERPARTITION_STATE_KEY = "states"
|
68
68
|
_IS_PARTITION_DUPLICATION_LOGGED = False
|
69
|
-
|
70
|
-
|
69
|
+
_PARENT_STATE = 0
|
70
|
+
_GENERATION_SEQUENCE = 1
|
71
71
|
|
72
72
|
def __init__(
|
73
73
|
self,
|
@@ -99,19 +99,29 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
99
99
|
self._semaphore_per_partition: OrderedDict[str, threading.Semaphore] = OrderedDict()
|
100
100
|
|
101
101
|
# Parent-state tracking: store each partition’s parent state in creation order
|
102
|
-
self._partition_parent_state_map: OrderedDict[str, Mapping[str, Any]] =
|
102
|
+
self._partition_parent_state_map: OrderedDict[str, tuple[Mapping[str, Any], int]] = (
|
103
|
+
OrderedDict()
|
104
|
+
)
|
105
|
+
self._parent_state: Optional[StreamState] = None
|
106
|
+
|
107
|
+
# Tracks when the last slice for partition is emitted
|
108
|
+
self._partitions_done_generating_stream_slices: set[str] = set()
|
109
|
+
# Used to track the index of partitions that are not closed yet
|
110
|
+
self._processing_partitions_indexes: List[int] = list()
|
111
|
+
self._generated_partitions_count: int = 0
|
112
|
+
# Dictionary to map partition keys to their index
|
113
|
+
self._partition_key_to_index: dict[str, int] = {}
|
103
114
|
|
104
|
-
self._finished_partitions: set[str] = set()
|
105
115
|
self._lock = threading.Lock()
|
106
|
-
self._timer = Timer()
|
107
|
-
self._new_global_cursor: Optional[StreamState] = None
|
108
116
|
self._lookback_window: int = 0
|
109
|
-
self.
|
117
|
+
self._new_global_cursor: Optional[StreamState] = None
|
110
118
|
self._number_of_partitions: int = 0
|
111
119
|
self._use_global_cursor: bool = use_global_cursor
|
112
120
|
self._partition_serializer = PerPartitionKeySerializer()
|
121
|
+
|
113
122
|
# Track the last time a state message was emitted
|
114
123
|
self._last_emission_time: float = 0.0
|
124
|
+
self._timer = Timer()
|
115
125
|
|
116
126
|
self._set_initial_state(stream_state)
|
117
127
|
|
@@ -157,60 +167,37 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
157
167
|
self._cursor_per_partition[partition_key].close_partition(partition=partition)
|
158
168
|
cursor = self._cursor_per_partition[partition_key]
|
159
169
|
if (
|
160
|
-
partition_key in self.
|
170
|
+
partition_key in self._partitions_done_generating_stream_slices
|
161
171
|
and self._semaphore_per_partition[partition_key]._value == 0
|
162
172
|
):
|
163
173
|
self._update_global_cursor(cursor.state[self.cursor_field.cursor_field_key])
|
164
174
|
|
175
|
+
# Clean up the partition if it is fully processed
|
176
|
+
self._cleanup_if_done(partition_key)
|
177
|
+
|
165
178
|
self._check_and_update_parent_state()
|
166
179
|
|
167
180
|
self._emit_state_message()
|
168
181
|
|
169
182
|
def _check_and_update_parent_state(self) -> None:
|
170
|
-
"""
|
171
|
-
Pop the leftmost partition state from _partition_parent_state_map only if
|
172
|
-
*all partitions* up to (and including) that partition key in _semaphore_per_partition
|
173
|
-
are fully finished (i.e. in _finished_partitions and semaphore._value == 0).
|
174
|
-
Additionally, delete finished semaphores with a value of 0 to free up memory,
|
175
|
-
as they are only needed to track errors and completion status.
|
176
|
-
"""
|
177
183
|
last_closed_state = None
|
178
184
|
|
179
185
|
while self._partition_parent_state_map:
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
# Verify ALL partitions from the left up to earliest_key are finished
|
184
|
-
all_left_finished = True
|
185
|
-
for p_key, sem in list(
|
186
|
-
self._semaphore_per_partition.items()
|
187
|
-
): # Use list to allow modification during iteration
|
188
|
-
# If any earlier partition is still not finished, we must stop
|
189
|
-
if p_key not in self._finished_partitions or sem._value != 0:
|
190
|
-
all_left_finished = False
|
191
|
-
break
|
192
|
-
# Once we've reached earliest_key in the semaphore order, we can stop checking
|
193
|
-
if p_key == earliest_key:
|
194
|
-
break
|
195
|
-
|
196
|
-
# If the partitions up to earliest_key are not all finished, break the while-loop
|
197
|
-
if not all_left_finished:
|
198
|
-
break
|
186
|
+
earliest_key, (candidate_state, candidate_seq) = next(
|
187
|
+
iter(self._partition_parent_state_map.items())
|
188
|
+
)
|
199
189
|
|
200
|
-
#
|
201
|
-
|
202
|
-
|
190
|
+
# if any partition that started <= candidate_seq is still open, we must wait
|
191
|
+
if (
|
192
|
+
self._processing_partitions_indexes
|
193
|
+
and self._processing_partitions_indexes[0] <= candidate_seq
|
194
|
+
):
|
195
|
+
break
|
203
196
|
|
204
|
-
#
|
205
|
-
|
206
|
-
|
207
|
-
if p_key in self._finished_partitions and sem._value == 0:
|
208
|
-
del self._semaphore_per_partition[p_key]
|
209
|
-
logger.debug(f"Deleted finished semaphore for partition {p_key} with value 0")
|
210
|
-
if p_key == earliest_key:
|
211
|
-
break
|
197
|
+
# safe to pop
|
198
|
+
self._partition_parent_state_map.popitem(last=False)
|
199
|
+
last_closed_state = candidate_state
|
212
200
|
|
213
|
-
# Update _parent_state if we popped at least one partition
|
214
201
|
if last_closed_state is not None:
|
215
202
|
self._parent_state = last_closed_state
|
216
203
|
|
@@ -289,18 +276,24 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
289
276
|
if not self._IS_PARTITION_DUPLICATION_LOGGED:
|
290
277
|
logger.warning(f"Partition duplication detected for stream {self._stream_name}")
|
291
278
|
self._IS_PARTITION_DUPLICATION_LOGGED = True
|
279
|
+
return
|
292
280
|
else:
|
293
281
|
self._semaphore_per_partition[partition_key] = threading.Semaphore(0)
|
294
282
|
|
295
283
|
with self._lock:
|
284
|
+
seq = self._generated_partitions_count
|
285
|
+
self._generated_partitions_count += 1
|
286
|
+
self._processing_partitions_indexes.append(seq)
|
287
|
+
self._partition_key_to_index[partition_key] = seq
|
288
|
+
|
296
289
|
if (
|
297
290
|
len(self._partition_parent_state_map) == 0
|
298
291
|
or self._partition_parent_state_map[
|
299
292
|
next(reversed(self._partition_parent_state_map))
|
300
|
-
]
|
293
|
+
][self._PARENT_STATE]
|
301
294
|
!= parent_state
|
302
295
|
):
|
303
|
-
self._partition_parent_state_map[partition_key] = deepcopy(parent_state)
|
296
|
+
self._partition_parent_state_map[partition_key] = (deepcopy(parent_state), seq)
|
304
297
|
|
305
298
|
for cursor_slice, is_last_slice, _ in iterate_with_last_flag_and_state(
|
306
299
|
cursor.stream_slices(),
|
@@ -308,7 +301,7 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
308
301
|
):
|
309
302
|
self._semaphore_per_partition[partition_key].release()
|
310
303
|
if is_last_slice:
|
311
|
-
self.
|
304
|
+
self._partitions_done_generating_stream_slices.add(partition_key)
|
312
305
|
yield StreamSlice(
|
313
306
|
partition=partition, cursor_slice=cursor_slice, extra_fields=partition.extra_fields
|
314
307
|
)
|
@@ -338,14 +331,11 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
338
331
|
while len(self._cursor_per_partition) > self.DEFAULT_MAX_PARTITIONS_NUMBER - 1:
|
339
332
|
# Try removing finished partitions first
|
340
333
|
for partition_key in list(self._cursor_per_partition.keys()):
|
341
|
-
if partition_key in self.
|
342
|
-
partition_key not in self._semaphore_per_partition
|
343
|
-
or self._semaphore_per_partition[partition_key]._value == 0
|
344
|
-
):
|
334
|
+
if partition_key not in self._partition_key_to_index:
|
345
335
|
oldest_partition = self._cursor_per_partition.pop(
|
346
336
|
partition_key
|
347
337
|
) # Remove the oldest partition
|
348
|
-
logger.
|
338
|
+
logger.debug(
|
349
339
|
f"The maximum number of partitions has been reached. Dropping the oldest finished partition: {oldest_partition}. Over limit: {self._number_of_partitions - self.DEFAULT_MAX_PARTITIONS_NUMBER}."
|
350
340
|
)
|
351
341
|
break
|
@@ -474,6 +464,25 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
474
464
|
):
|
475
465
|
self._new_global_cursor = {self.cursor_field.cursor_field_key: copy.deepcopy(value)}
|
476
466
|
|
467
|
+
def _cleanup_if_done(self, partition_key: str) -> None:
|
468
|
+
"""
|
469
|
+
Free every in-memory structure that belonged to a completed partition:
|
470
|
+
cursor, semaphore, flag inside `_finished_partitions`
|
471
|
+
"""
|
472
|
+
if not (
|
473
|
+
partition_key in self._partitions_done_generating_stream_slices
|
474
|
+
and self._semaphore_per_partition[partition_key]._value == 0
|
475
|
+
):
|
476
|
+
return
|
477
|
+
|
478
|
+
self._semaphore_per_partition.pop(partition_key, None)
|
479
|
+
self._partitions_done_generating_stream_slices.discard(partition_key)
|
480
|
+
|
481
|
+
seq = self._partition_key_to_index.pop(partition_key)
|
482
|
+
self._processing_partitions_indexes.remove(seq)
|
483
|
+
|
484
|
+
logger.debug(f"Partition {partition_key} fully processed and cleaned up.")
|
485
|
+
|
477
486
|
def _to_partition_key(self, partition: Mapping[str, Any]) -> str:
|
478
487
|
return self._partition_serializer.to_partition_key(partition)
|
479
488
|
|
@@ -147,16 +147,18 @@ class JinjaInterpolation(Interpolation):
|
|
147
147
|
# It can be returned as is
|
148
148
|
return s
|
149
149
|
|
150
|
+
@staticmethod
|
150
151
|
@cache
|
151
|
-
def _find_undeclared_variables(
|
152
|
+
def _find_undeclared_variables(s: Optional[str]) -> Set[str]:
|
152
153
|
"""
|
153
154
|
Find undeclared variables and cache them
|
154
155
|
"""
|
155
156
|
ast = _ENVIRONMENT.parse(s) # type: ignore # parse is able to handle None
|
156
157
|
return meta.find_undeclared_variables(ast)
|
157
158
|
|
159
|
+
@staticmethod
|
158
160
|
@cache
|
159
|
-
def _compile(
|
161
|
+
def _compile(s: str) -> Template:
|
160
162
|
"""
|
161
163
|
We must cache the Jinja Template ourselves because we're using `from_string` instead of a template loader
|
162
164
|
"""
|
@@ -44,6 +44,7 @@ from airbyte_cdk.models import (
|
|
44
44
|
Type,
|
45
45
|
)
|
46
46
|
from airbyte_cdk.sources import Source
|
47
|
+
from airbyte_cdk.test.models.scenario import ExpectedOutcome
|
47
48
|
|
48
49
|
|
49
50
|
class EntrypointOutput:
|
@@ -157,8 +158,22 @@ class EntrypointOutput:
|
|
157
158
|
|
158
159
|
|
159
160
|
def _run_command(
|
160
|
-
source: Source,
|
161
|
+
source: Source,
|
162
|
+
args: List[str],
|
163
|
+
expecting_exception: bool | None = None, # Deprecated, use `expected_outcome` instead.
|
164
|
+
*,
|
165
|
+
expected_outcome: ExpectedOutcome | None = None,
|
161
166
|
) -> EntrypointOutput:
|
167
|
+
"""Internal function to run a command with the AirbyteEntrypoint.
|
168
|
+
|
169
|
+
Note: Even though this function is private, some connectors do call it directly.
|
170
|
+
|
171
|
+
Note: The `expecting_exception` arg is now deprecated in favor of the tri-state
|
172
|
+
`expected_outcome` arg. The old argument is supported (for now) for backwards compatibility.
|
173
|
+
"""
|
174
|
+
expected_outcome = expected_outcome or ExpectedOutcome.from_expecting_exception_bool(
|
175
|
+
expecting_exception,
|
176
|
+
)
|
162
177
|
log_capture_buffer = StringIO()
|
163
178
|
stream_handler = logging.StreamHandler(log_capture_buffer)
|
164
179
|
stream_handler.setLevel(logging.INFO)
|
@@ -175,27 +190,30 @@ def _run_command(
|
|
175
190
|
for message in source_entrypoint.run(parsed_args):
|
176
191
|
messages.append(message)
|
177
192
|
except Exception as exception:
|
178
|
-
if
|
193
|
+
if expected_outcome.expect_success():
|
179
194
|
print("Printing unexpected error from entrypoint_wrapper")
|
180
195
|
print("".join(traceback.format_exception(None, exception, exception.__traceback__)))
|
196
|
+
|
181
197
|
uncaught_exception = exception
|
182
198
|
|
183
199
|
captured_logs = log_capture_buffer.getvalue().split("\n")[:-1]
|
184
200
|
|
185
201
|
parent_logger.removeHandler(stream_handler)
|
186
202
|
|
187
|
-
return EntrypointOutput(messages + captured_logs, uncaught_exception)
|
203
|
+
return EntrypointOutput(messages + captured_logs, uncaught_exception=uncaught_exception)
|
188
204
|
|
189
205
|
|
190
206
|
def discover(
|
191
207
|
source: Source,
|
192
208
|
config: Mapping[str, Any],
|
193
|
-
expecting_exception: bool =
|
209
|
+
expecting_exception: bool | None = None, # Deprecated, use `expected_outcome` instead.
|
210
|
+
*,
|
211
|
+
expected_outcome: ExpectedOutcome | None = None,
|
194
212
|
) -> EntrypointOutput:
|
195
213
|
"""
|
196
214
|
config must be json serializable
|
197
|
-
:param
|
198
|
-
provide
|
215
|
+
:param expected_outcome: By default if there is an uncaught exception, the exception will be printed out. If this is expected, please
|
216
|
+
provide `expected_outcome=ExpectedOutcome.EXPECT_FAILURE` so that the test output logs are cleaner
|
199
217
|
"""
|
200
218
|
|
201
219
|
with tempfile.TemporaryDirectory() as tmp_directory:
|
@@ -203,7 +221,10 @@ def discover(
|
|
203
221
|
config_file = make_file(tmp_directory_path / "config.json", config)
|
204
222
|
|
205
223
|
return _run_command(
|
206
|
-
source,
|
224
|
+
source,
|
225
|
+
["discover", "--config", config_file, "--debug"],
|
226
|
+
expecting_exception=expecting_exception, # Deprecated, but still supported.
|
227
|
+
expected_outcome=expected_outcome,
|
207
228
|
)
|
208
229
|
|
209
230
|
|
@@ -212,13 +233,15 @@ def read(
|
|
212
233
|
config: Mapping[str, Any],
|
213
234
|
catalog: ConfiguredAirbyteCatalog,
|
214
235
|
state: Optional[List[AirbyteStateMessage]] = None,
|
215
|
-
expecting_exception: bool =
|
236
|
+
expecting_exception: bool | None = None, # Deprecated, use `expected_outcome` instead.
|
237
|
+
*,
|
238
|
+
expected_outcome: ExpectedOutcome | None = None,
|
216
239
|
) -> EntrypointOutput:
|
217
240
|
"""
|
218
241
|
config and state must be json serializable
|
219
242
|
|
220
|
-
:param
|
221
|
-
provide
|
243
|
+
:param expected_outcome: By default if there is an uncaught exception, the exception will be printed out. If this is expected, please
|
244
|
+
provide `expected_outcome=ExpectedOutcome.EXPECT_FAILURE` so that the test output logs are cleaner.
|
222
245
|
"""
|
223
246
|
with tempfile.TemporaryDirectory() as tmp_directory:
|
224
247
|
tmp_directory_path = Path(tmp_directory)
|
@@ -245,7 +268,12 @@ def read(
|
|
245
268
|
]
|
246
269
|
)
|
247
270
|
|
248
|
-
return _run_command(
|
271
|
+
return _run_command(
|
272
|
+
source,
|
273
|
+
args,
|
274
|
+
expecting_exception=expecting_exception, # Deprecated, but still supported.
|
275
|
+
expected_outcome=expected_outcome,
|
276
|
+
)
|
249
277
|
|
250
278
|
|
251
279
|
def make_file(
|
@@ -0,0 +1,10 @@
|
|
1
|
+
# Copyright (c) 2025 Airbyte, Inc., all rights reserved.
|
2
|
+
"""Models used for standard tests."""
|
3
|
+
|
4
|
+
from airbyte_cdk.test.models.outcome import ExpectedOutcome
|
5
|
+
from airbyte_cdk.test.models.scenario import ConnectorTestScenario
|
6
|
+
|
7
|
+
__all__ = [
|
8
|
+
"ConnectorTestScenario",
|
9
|
+
"ExpectedOutcome",
|
10
|
+
]
|
@@ -0,0 +1,58 @@
|
|
1
|
+
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
|
2
|
+
"""Run acceptance tests in PyTest.
|
3
|
+
|
4
|
+
These tests leverage the same `acceptance-test-config.yml` configuration files as the
|
5
|
+
acceptance tests in CAT, but they run in PyTest instead of CAT. This allows us to run
|
6
|
+
the acceptance tests in the same local environment as we are developing in, speeding
|
7
|
+
up iteration cycles.
|
8
|
+
"""
|
9
|
+
|
10
|
+
from __future__ import annotations
|
11
|
+
|
12
|
+
from enum import Enum, auto
|
13
|
+
|
14
|
+
|
15
|
+
class ExpectedOutcome(Enum):
|
16
|
+
"""Enum to represent the expected outcome of a test scenario.
|
17
|
+
|
18
|
+
Class supports comparisons to a boolean or None.
|
19
|
+
"""
|
20
|
+
|
21
|
+
EXPECT_EXCEPTION = auto()
|
22
|
+
EXPECT_SUCCESS = auto()
|
23
|
+
ALLOW_ANY = auto()
|
24
|
+
|
25
|
+
@classmethod
|
26
|
+
def from_status_str(cls, status: str | None) -> ExpectedOutcome:
|
27
|
+
"""Convert a status string to an ExpectedOutcome."""
|
28
|
+
if status is None:
|
29
|
+
return ExpectedOutcome.ALLOW_ANY
|
30
|
+
|
31
|
+
try:
|
32
|
+
return {
|
33
|
+
"succeed": ExpectedOutcome.EXPECT_SUCCESS,
|
34
|
+
"failed": ExpectedOutcome.EXPECT_EXCEPTION,
|
35
|
+
}[status]
|
36
|
+
except KeyError as ex:
|
37
|
+
raise ValueError(f"Invalid status '{status}'. Expected 'succeed' or 'failed'.") from ex
|
38
|
+
|
39
|
+
@classmethod
|
40
|
+
def from_expecting_exception_bool(cls, expecting_exception: bool | None) -> ExpectedOutcome:
|
41
|
+
"""Convert a boolean indicating whether an exception is expected to an ExpectedOutcome."""
|
42
|
+
if expecting_exception is None:
|
43
|
+
# Align with legacy behavior where default would be 'False' (no exception expected)
|
44
|
+
return ExpectedOutcome.EXPECT_SUCCESS
|
45
|
+
|
46
|
+
return (
|
47
|
+
ExpectedOutcome.EXPECT_EXCEPTION
|
48
|
+
if expecting_exception
|
49
|
+
else ExpectedOutcome.EXPECT_SUCCESS
|
50
|
+
)
|
51
|
+
|
52
|
+
def expect_exception(self) -> bool:
|
53
|
+
"""Return whether the expectation is that an exception should be raised."""
|
54
|
+
return self == ExpectedOutcome.EXPECT_EXCEPTION
|
55
|
+
|
56
|
+
def expect_success(self) -> bool:
|
57
|
+
"""Return whether the expectation is that the test should succeed without exceptions."""
|
58
|
+
return self == ExpectedOutcome.EXPECT_SUCCESS
|
@@ -9,11 +9,13 @@ up iteration cycles.
|
|
9
9
|
|
10
10
|
from __future__ import annotations
|
11
11
|
|
12
|
-
from pathlib import Path
|
12
|
+
from pathlib import Path # noqa: TC003 # Pydantic needs this (don't move to 'if typing' block)
|
13
13
|
from typing import Any, Literal, cast
|
14
14
|
|
15
15
|
import yaml
|
16
|
-
from pydantic import BaseModel
|
16
|
+
from pydantic import BaseModel, ConfigDict
|
17
|
+
|
18
|
+
from airbyte_cdk.test.models.outcome import ExpectedOutcome
|
17
19
|
|
18
20
|
|
19
21
|
class ConnectorTestScenario(BaseModel):
|
@@ -24,6 +26,10 @@ class ConnectorTestScenario(BaseModel):
|
|
24
26
|
acceptance test configuration file.
|
25
27
|
"""
|
26
28
|
|
29
|
+
# Allows the class to be hashable, which PyTest will require
|
30
|
+
# when we use to parameterize tests.
|
31
|
+
model_config = ConfigDict(frozen=True)
|
32
|
+
|
27
33
|
class AcceptanceTestExpectRecords(BaseModel):
|
28
34
|
path: Path
|
29
35
|
exact_order: bool = False
|
@@ -46,6 +52,7 @@ class ConnectorTestScenario(BaseModel):
|
|
46
52
|
def get_config_dict(
|
47
53
|
self,
|
48
54
|
*,
|
55
|
+
connector_root: Path,
|
49
56
|
empty_if_missing: bool,
|
50
57
|
) -> dict[str, Any]:
|
51
58
|
"""Return the config dictionary.
|
@@ -61,7 +68,15 @@ class ConnectorTestScenario(BaseModel):
|
|
61
68
|
return self.config_dict
|
62
69
|
|
63
70
|
if self.config_path is not None:
|
64
|
-
|
71
|
+
config_path = self.config_path
|
72
|
+
if not config_path.is_absolute():
|
73
|
+
# We usually receive a relative path here. Let's resolve it.
|
74
|
+
config_path = (connector_root / self.config_path).resolve().absolute()
|
75
|
+
|
76
|
+
return cast(
|
77
|
+
dict[str, Any],
|
78
|
+
yaml.safe_load(config_path.read_text()),
|
79
|
+
)
|
65
80
|
|
66
81
|
if empty_if_missing:
|
67
82
|
return {}
|
@@ -69,8 +84,13 @@ class ConnectorTestScenario(BaseModel):
|
|
69
84
|
raise ValueError("No config dictionary or path provided.")
|
70
85
|
|
71
86
|
@property
|
72
|
-
def
|
73
|
-
|
87
|
+
def expected_outcome(self) -> ExpectedOutcome:
|
88
|
+
"""Whether the test scenario expects an exception to be raised.
|
89
|
+
|
90
|
+
Returns True if the scenario expects an exception, False if it does not,
|
91
|
+
and None if there is no set expectation.
|
92
|
+
"""
|
93
|
+
return ExpectedOutcome.from_status_str(self.status)
|
74
94
|
|
75
95
|
@property
|
76
96
|
def instance_name(self) -> str:
|
@@ -83,3 +103,38 @@ class ConnectorTestScenario(BaseModel):
|
|
83
103
|
return f"'{self.config_path.name}' Test Scenario"
|
84
104
|
|
85
105
|
return f"'{hash(self)}' Test Scenario"
|
106
|
+
|
107
|
+
def without_expected_outcome(self) -> ConnectorTestScenario:
|
108
|
+
"""Return a copy of the scenario that does not expect failure or success.
|
109
|
+
|
110
|
+
This is useful when running multiple steps, to defer the expectations to a later step.
|
111
|
+
"""
|
112
|
+
return ConnectorTestScenario(
|
113
|
+
**self.model_dump(exclude={"status"}),
|
114
|
+
)
|
115
|
+
|
116
|
+
def with_expecting_failure(self) -> ConnectorTestScenario:
|
117
|
+
"""Return a copy of the scenario that expects failure.
|
118
|
+
|
119
|
+
This is useful when deriving new scenarios from existing ones.
|
120
|
+
"""
|
121
|
+
if self.status == "failed":
|
122
|
+
return self
|
123
|
+
|
124
|
+
return ConnectorTestScenario(
|
125
|
+
**self.model_dump(exclude={"status"}),
|
126
|
+
status="failed",
|
127
|
+
)
|
128
|
+
|
129
|
+
def with_expecting_success(self) -> ConnectorTestScenario:
|
130
|
+
"""Return a copy of the scenario that expects success.
|
131
|
+
|
132
|
+
This is useful when deriving new scenarios from existing ones.
|
133
|
+
"""
|
134
|
+
if self.status == "succeed":
|
135
|
+
return self
|
136
|
+
|
137
|
+
return ConnectorTestScenario(
|
138
|
+
**self.model_dump(exclude={"status"}),
|
139
|
+
status="succeed",
|
140
|
+
)
|
@@ -16,7 +16,7 @@ from airbyte_cdk.models import (
|
|
16
16
|
Status,
|
17
17
|
)
|
18
18
|
from airbyte_cdk.test import entrypoint_wrapper
|
19
|
-
from airbyte_cdk.test.
|
19
|
+
from airbyte_cdk.test.models import (
|
20
20
|
ConnectorTestScenario,
|
21
21
|
)
|
22
22
|
|
@@ -58,6 +58,7 @@ def run_test_job(
|
|
58
58
|
connector: IConnector | type[IConnector] | Callable[[], IConnector],
|
59
59
|
verb: Literal["spec", "read", "check", "discover"],
|
60
60
|
*,
|
61
|
+
connector_root: Path,
|
61
62
|
test_scenario: ConnectorTestScenario | None = None,
|
62
63
|
catalog: ConfiguredAirbyteCatalog | dict[str, Any] | None = None,
|
63
64
|
) -> entrypoint_wrapper.EntrypointOutput:
|
@@ -84,7 +85,10 @@ def run_test_job(
|
|
84
85
|
)
|
85
86
|
|
86
87
|
args: list[str] = [verb]
|
87
|
-
config_dict = test_scenario.get_config_dict(
|
88
|
+
config_dict = test_scenario.get_config_dict(
|
89
|
+
empty_if_missing=True,
|
90
|
+
connector_root=connector_root,
|
91
|
+
)
|
88
92
|
if config_dict and verb != "spec":
|
89
93
|
# Write the config to a temp json file and pass the path to the file as an argument.
|
90
94
|
config_path = (
|
@@ -118,9 +122,9 @@ def run_test_job(
|
|
118
122
|
result: entrypoint_wrapper.EntrypointOutput = entrypoint_wrapper._run_command( # noqa: SLF001 # Non-public API
|
119
123
|
source=connector_obj, # type: ignore [arg-type]
|
120
124
|
args=args,
|
121
|
-
|
125
|
+
expected_outcome=test_scenario.expected_outcome,
|
122
126
|
)
|
123
|
-
if result.errors and
|
127
|
+
if result.errors and test_scenario.expected_outcome.expect_success():
|
124
128
|
raise AssertionError(
|
125
129
|
f"Expected no errors but got {len(result.errors)}: \n" + _errors_to_str(result)
|
126
130
|
)
|
@@ -135,7 +139,7 @@ def run_test_job(
|
|
135
139
|
+ "\n".join([str(msg) for msg in result.connection_status_messages])
|
136
140
|
+ _errors_to_str(result)
|
137
141
|
)
|
138
|
-
if test_scenario.expect_exception:
|
142
|
+
if test_scenario.expected_outcome.expect_exception():
|
139
143
|
conn_status = result.connection_status_messages[0].connectionStatus
|
140
144
|
assert conn_status, (
|
141
145
|
"Expected CONNECTION_STATUS message to be present. Got: \n"
|
@@ -149,14 +153,15 @@ def run_test_job(
|
|
149
153
|
return result
|
150
154
|
|
151
155
|
# For all other verbs, we assert check that an exception is raised (or not).
|
152
|
-
if test_scenario.expect_exception:
|
156
|
+
if test_scenario.expected_outcome.expect_exception():
|
153
157
|
if not result.errors:
|
154
158
|
raise AssertionError("Expected exception but got none.")
|
155
159
|
|
156
160
|
return result
|
157
161
|
|
158
|
-
|
159
|
-
|
160
|
-
|
162
|
+
if test_scenario.expected_outcome.expect_success():
|
163
|
+
assert not result.errors, (
|
164
|
+
f"Expected no errors but got {len(result.errors)}: \n" + _errors_to_str(result)
|
165
|
+
)
|
161
166
|
|
162
167
|
return result
|
@@ -20,10 +20,10 @@ from airbyte_cdk.models import (
|
|
20
20
|
Type,
|
21
21
|
)
|
22
22
|
from airbyte_cdk.test import entrypoint_wrapper
|
23
|
-
from airbyte_cdk.test.
|
24
|
-
from airbyte_cdk.test.standard_tests.models import (
|
23
|
+
from airbyte_cdk.test.models import (
|
25
24
|
ConnectorTestScenario,
|
26
25
|
)
|
26
|
+
from airbyte_cdk.test.standard_tests._job_runner import IConnector, run_test_job
|
27
27
|
from airbyte_cdk.utils.connector_paths import (
|
28
28
|
ACCEPTANCE_TEST_CONFIG,
|
29
29
|
find_connector_root,
|
@@ -116,6 +116,7 @@ class ConnectorTestSuiteBase(abc.ABC):
|
|
116
116
|
self.create_connector(scenario),
|
117
117
|
"check",
|
118
118
|
test_scenario=scenario,
|
119
|
+
connector_root=self.get_connector_root_dir(),
|
119
120
|
)
|
120
121
|
conn_status_messages: list[AirbyteMessage] = [
|
121
122
|
msg for msg in result._messages if msg.type == Type.CONNECTION_STATUS
|
@@ -163,19 +164,23 @@ class ConnectorTestSuiteBase(abc.ABC):
|
|
163
164
|
):
|
164
165
|
continue
|
165
166
|
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
]
|
172
|
-
|
167
|
+
for test in all_tests_config["acceptance_tests"][category]["tests"]:
|
168
|
+
if "config_path" not in test:
|
169
|
+
# Skip tests without a config_path
|
170
|
+
continue
|
171
|
+
|
172
|
+
if "iam_role" in test["config_path"]:
|
173
|
+
# We skip iam_role tests for now, as they are not supported in the test suite.
|
174
|
+
continue
|
175
|
+
|
176
|
+
scenario = ConnectorTestScenario.model_validate(test)
|
177
|
+
|
178
|
+
if scenario.config_path and scenario.config_path in [
|
179
|
+
s.config_path for s in test_scenarios
|
180
|
+
]:
|
181
|
+
# Skip duplicate scenarios based on config_path
|
182
|
+
continue
|
173
183
|
|
174
|
-
|
175
|
-
for test in test_scenarios:
|
176
|
-
if test.config_path:
|
177
|
-
test.config_path = connector_root / test.config_path
|
178
|
-
if test.configured_catalog_path:
|
179
|
-
test.configured_catalog_path = connector_root / test.configured_catalog_path
|
184
|
+
test_scenarios.append(scenario)
|
180
185
|
|
181
186
|
return test_scenarios
|
@@ -9,8 +9,8 @@ from boltons.typeutils import classproperty
|
|
9
9
|
from airbyte_cdk.sources.declarative.concurrent_declarative_source import (
|
10
10
|
ConcurrentDeclarativeSource,
|
11
11
|
)
|
12
|
+
from airbyte_cdk.test.models import ConnectorTestScenario
|
12
13
|
from airbyte_cdk.test.standard_tests._job_runner import IConnector
|
13
|
-
from airbyte_cdk.test.standard_tests.models import ConnectorTestScenario
|
14
14
|
from airbyte_cdk.test.standard_tests.source_base import SourceTestSuiteBase
|
15
15
|
from airbyte_cdk.utils.connector_paths import MANIFEST_YAML
|
16
16
|
|
@@ -78,7 +78,12 @@ class DeclarativeSourceTestSuite(SourceTestSuiteBase):
|
|
78
78
|
config = {
|
79
79
|
"__injected_manifest": manifest_dict,
|
80
80
|
}
|
81
|
-
config.update(
|
81
|
+
config.update(
|
82
|
+
scenario.get_config_dict(
|
83
|
+
empty_if_missing=True,
|
84
|
+
connector_root=cls.get_connector_root_dir(),
|
85
|
+
),
|
86
|
+
)
|
82
87
|
|
83
88
|
if cls.components_py_path and cls.components_py_path.exists():
|
84
89
|
os.environ["AIRBYTE_ENABLE_UNSAFE_CODE"] = "true"
|
@@ -2,6 +2,7 @@
|
|
2
2
|
"""Base class for source test suites."""
|
3
3
|
|
4
4
|
from dataclasses import asdict
|
5
|
+
from typing import TYPE_CHECKING
|
5
6
|
|
6
7
|
from airbyte_cdk.models import (
|
7
8
|
AirbyteMessage,
|
@@ -12,14 +13,16 @@ from airbyte_cdk.models import (
|
|
12
13
|
SyncMode,
|
13
14
|
Type,
|
14
15
|
)
|
15
|
-
from airbyte_cdk.test import
|
16
|
+
from airbyte_cdk.test.models import (
|
17
|
+
ConnectorTestScenario,
|
18
|
+
)
|
16
19
|
from airbyte_cdk.test.standard_tests._job_runner import run_test_job
|
17
20
|
from airbyte_cdk.test.standard_tests.connector_base import (
|
18
21
|
ConnectorTestSuiteBase,
|
19
22
|
)
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
+
|
24
|
+
if TYPE_CHECKING:
|
25
|
+
from airbyte_cdk.test import entrypoint_wrapper
|
23
26
|
|
24
27
|
|
25
28
|
class SourceTestSuiteBase(ConnectorTestSuiteBase):
|
@@ -43,6 +46,7 @@ class SourceTestSuiteBase(ConnectorTestSuiteBase):
|
|
43
46
|
self.create_connector(scenario),
|
44
47
|
"check",
|
45
48
|
test_scenario=scenario,
|
49
|
+
connector_root=self.get_connector_root_dir(),
|
46
50
|
)
|
47
51
|
conn_status_messages: list[AirbyteMessage] = [
|
48
52
|
msg for msg in result._messages if msg.type == Type.CONNECTION_STATUS
|
@@ -61,6 +65,7 @@ class SourceTestSuiteBase(ConnectorTestSuiteBase):
|
|
61
65
|
run_test_job(
|
62
66
|
self.create_connector(scenario),
|
63
67
|
"discover",
|
68
|
+
connector_root=self.get_connector_root_dir(),
|
64
69
|
test_scenario=scenario,
|
65
70
|
)
|
66
71
|
|
@@ -80,6 +85,7 @@ class SourceTestSuiteBase(ConnectorTestSuiteBase):
|
|
80
85
|
verb="spec",
|
81
86
|
test_scenario=None,
|
82
87
|
connector=self.create_connector(scenario=None),
|
88
|
+
connector_root=self.get_connector_root_dir(),
|
83
89
|
)
|
84
90
|
# If an error occurs, it will be raised above.
|
85
91
|
|
@@ -102,10 +108,11 @@ class SourceTestSuiteBase(ConnectorTestSuiteBase):
|
|
102
108
|
discover_result = run_test_job(
|
103
109
|
self.create_connector(scenario),
|
104
110
|
"discover",
|
105
|
-
|
111
|
+
connector_root=self.get_connector_root_dir(),
|
112
|
+
test_scenario=scenario.without_expected_outcome(),
|
106
113
|
)
|
107
|
-
if scenario.expect_exception:
|
108
|
-
|
114
|
+
if scenario.expected_outcome.expect_exception() and discover_result.errors:
|
115
|
+
# Failed as expected; we're done.
|
109
116
|
return
|
110
117
|
|
111
118
|
configured_catalog = ConfiguredAirbyteCatalog(
|
@@ -122,6 +129,7 @@ class SourceTestSuiteBase(ConnectorTestSuiteBase):
|
|
122
129
|
self.create_connector(scenario),
|
123
130
|
"read",
|
124
131
|
test_scenario=scenario,
|
132
|
+
connector_root=self.get_connector_root_dir(),
|
125
133
|
catalog=configured_catalog,
|
126
134
|
)
|
127
135
|
|
@@ -149,15 +157,14 @@ class SourceTestSuiteBase(ConnectorTestSuiteBase):
|
|
149
157
|
),
|
150
158
|
sync_mode="INVALID", # type: ignore [reportArgumentType]
|
151
159
|
destination_sync_mode="INVALID", # type: ignore [reportArgumentType]
|
152
|
-
)
|
153
|
-
]
|
160
|
+
),
|
161
|
+
],
|
154
162
|
)
|
155
|
-
# Set expected status to "failed" to ensure the test fails if the connector.
|
156
|
-
scenario.status = "failed"
|
157
163
|
result: entrypoint_wrapper.EntrypointOutput = run_test_job(
|
158
164
|
self.create_connector(scenario),
|
159
165
|
"read",
|
160
|
-
|
166
|
+
connector_root=self.get_connector_root_dir(),
|
167
|
+
test_scenario=scenario.with_expecting_failure(), # Expect failure due to bad catalog
|
161
168
|
catalog=asdict(invalid_configured_catalog),
|
162
169
|
)
|
163
170
|
assert result.errors, "Expected errors but got none."
|
@@ -6,6 +6,7 @@ from airbyte_cdk import AbstractSource
|
|
6
6
|
from airbyte_cdk.models import AirbyteStateMessage, ConfiguredAirbyteCatalog, SyncMode
|
7
7
|
from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
8
8
|
from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read
|
9
|
+
from airbyte_cdk.test.models.outcome import ExpectedOutcome
|
9
10
|
|
10
11
|
|
11
12
|
def catalog(stream_name: str, sync_mode: SyncMode) -> ConfiguredAirbyteCatalog:
|
@@ -19,8 +20,17 @@ def read_records(
|
|
19
20
|
stream_name: str,
|
20
21
|
sync_mode: SyncMode,
|
21
22
|
state: Optional[List[AirbyteStateMessage]] = None,
|
22
|
-
expecting_exception: bool =
|
23
|
+
expecting_exception: bool | None = None, # Deprecated, use expected_outcome instead.
|
24
|
+
*,
|
25
|
+
expected_outcome: ExpectedOutcome | None = None,
|
23
26
|
) -> EntrypointOutput:
|
24
27
|
"""Read records from a stream."""
|
25
28
|
_catalog = catalog(stream_name, sync_mode)
|
26
|
-
return read(
|
29
|
+
return read(
|
30
|
+
source,
|
31
|
+
config,
|
32
|
+
_catalog,
|
33
|
+
state,
|
34
|
+
expecting_exception=expecting_exception, # Deprecated, for backward compatibility.
|
35
|
+
expected_outcome=expected_outcome,
|
36
|
+
)
|
@@ -111,7 +111,7 @@ airbyte_cdk/sources/declarative/extractors/record_selector.py,sha256=vCpwX1PVRFP
|
|
111
111
|
airbyte_cdk/sources/declarative/extractors/response_to_file_extractor.py,sha256=WJyA2OYIEgFpVP5Y3o0tIj69AV6IKkn9B16MeXaEItI,6513
|
112
112
|
airbyte_cdk/sources/declarative/extractors/type_transformer.py,sha256=d6Y2Rfg8pMVEEnHllfVksWZdNVOU55yk34O03dP9muY,1626
|
113
113
|
airbyte_cdk/sources/declarative/incremental/__init__.py,sha256=U1oZKtBaEC6IACmvziY9Wzg7Z8EgF4ZuR7NwvjlB_Sk,1255
|
114
|
-
airbyte_cdk/sources/declarative/incremental/concurrent_partition_cursor.py,sha256=
|
114
|
+
airbyte_cdk/sources/declarative/incremental/concurrent_partition_cursor.py,sha256=araWk039M89c6lQHEUltfM1VI_xGw9gZIDXRWWF6SkM,22591
|
115
115
|
airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py,sha256=Rbe6lJLTtZ5en33MwZiB9-H9-AwDMNHgwBZs8EqhYqk,22172
|
116
116
|
airbyte_cdk/sources/declarative/incremental/declarative_cursor.py,sha256=5Bhw9VRPyIuCaD0wmmq_L3DZsa-rJgtKSEUzSd8YYD0,536
|
117
117
|
airbyte_cdk/sources/declarative/incremental/global_substream_cursor.py,sha256=2tsE6FgXzemf4fZZ4uGtd8QpRBl9GJ2CRqSNJE5p0EI,16077
|
@@ -125,7 +125,7 @@ airbyte_cdk/sources/declarative/interpolation/interpolated_mapping.py,sha256=h36
|
|
125
125
|
airbyte_cdk/sources/declarative/interpolation/interpolated_nested_mapping.py,sha256=myVaNtFqxOAwrbp93rgd1dhkqyuvXvET9rsimQ89ktc,1873
|
126
126
|
airbyte_cdk/sources/declarative/interpolation/interpolated_string.py,sha256=CQkHqGlfa87G6VYMtBAQWin7ECKpfMdrDcg0JO5_rhc,3212
|
127
127
|
airbyte_cdk/sources/declarative/interpolation/interpolation.py,sha256=9IoeuWam3L6GyN10L6U8xNWXmkt9cnahSDNkez1OmFY,982
|
128
|
-
airbyte_cdk/sources/declarative/interpolation/jinja.py,sha256=
|
128
|
+
airbyte_cdk/sources/declarative/interpolation/jinja.py,sha256=oFGKs3oX0xO6DOL4E9x8rhxwbEoRcgx4HJVIL1RQ9c4,7269
|
129
129
|
airbyte_cdk/sources/declarative/interpolation/macros.py,sha256=xRcmjape4_WGmKMJpmBsKY0k4OHJDM46Hv3V-dlSz3w,5640
|
130
130
|
airbyte_cdk/sources/declarative/manifest_declarative_source.py,sha256=ciXtM7Qhus170ZwP8B9Ac4VScX2FPBYvlbZRv_r376U,24692
|
131
131
|
airbyte_cdk/sources/declarative/migrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -378,29 +378,30 @@ airbyte_cdk/sql/shared/sql_processor.py,sha256=1CwfC3fp9dWnHBpKtly7vGduf9ho_Mahi
|
|
378
378
|
airbyte_cdk/sql/types.py,sha256=XEIhRAo_ASd0kVLBkdLf5bHiRhNple-IJrC9TibcDdY,5880
|
379
379
|
airbyte_cdk/test/__init__.py,sha256=f_XdkOg4_63QT2k3BbKY34209lppwgw-svzfZstQEq4,199
|
380
380
|
airbyte_cdk/test/catalog_builder.py,sha256=-y05Cz1x0Dlk6oE9LSKhCozssV2gYBNtMdV5YYOPOtk,3015
|
381
|
-
airbyte_cdk/test/entrypoint_wrapper.py,sha256=
|
381
|
+
airbyte_cdk/test/entrypoint_wrapper.py,sha256=BOqE_cj33UNNIQuJDxroY9vzmX5tsdGx1YKST8pE9oA,11307
|
382
382
|
airbyte_cdk/test/mock_http/__init__.py,sha256=jE5kC6CQ0OXkTqKhciDnNVZHesBFVIA2YvkdFGwva7k,322
|
383
383
|
airbyte_cdk/test/mock_http/matcher.py,sha256=4Qj8UnJKZIs-eodshryce3SN1Ayc8GZpBETmP6hTEyc,1446
|
384
384
|
airbyte_cdk/test/mock_http/mocker.py,sha256=XgsjMtVoeMpRELPyALgrkHFauH9H5irxrz1Kcxh2yFY,8013
|
385
385
|
airbyte_cdk/test/mock_http/request.py,sha256=tdB8cqk2vLgCDTOKffBKsM06llYs4ZecgtH6DKyx6yY,4112
|
386
386
|
airbyte_cdk/test/mock_http/response.py,sha256=s4-cQQqTtmeej0pQDWqmG0vUWpHS-93lIWMpW3zSVyU,662
|
387
387
|
airbyte_cdk/test/mock_http/response_builder.py,sha256=F-v7ebftqGj7YVIMLKdodmU9U8Dq8aIyllWGo2NGwHc,8331
|
388
|
+
airbyte_cdk/test/models/__init__.py,sha256=5f5oFcuUA3dyNTfvvTWav2pTD8WX4nznObKgMTmvdus,290
|
389
|
+
airbyte_cdk/test/models/outcome.py,sha256=TdLEnRZv1QjlbI0xdsJqA2rfIvSVRJWXS4TNS9NbKZQ,2135
|
390
|
+
airbyte_cdk/test/models/scenario.py,sha256=rlU3ykv6YCQsDxEtHMcDQJS_HQYa8sxfWSM7xiHWSjU,4710
|
388
391
|
airbyte_cdk/test/standard_tests/__init__.py,sha256=YS2bghoGmQ-4GNIbe6RuEmvV-V1kpM1OyxTpebrs0Ig,1338
|
389
|
-
airbyte_cdk/test/standard_tests/_job_runner.py,sha256=
|
390
|
-
airbyte_cdk/test/standard_tests/connector_base.py,sha256=
|
391
|
-
airbyte_cdk/test/standard_tests/declarative_sources.py,sha256=
|
392
|
+
airbyte_cdk/test/standard_tests/_job_runner.py,sha256=vTuLJiJv-LBqXLfcr7RaTm0HBH4y7L8U6tDv3qYdTkg,6195
|
393
|
+
airbyte_cdk/test/standard_tests/connector_base.py,sha256=YA_frtCav8ygg9XlHY9Yxi-KOkHBsaiXZGIp4ULa2D4,7107
|
394
|
+
airbyte_cdk/test/standard_tests/declarative_sources.py,sha256=4lmXKVJEhYeZAYaaXODwkn-DoJt_V--Thbea0kzOqdc,3502
|
392
395
|
airbyte_cdk/test/standard_tests/destination_base.py,sha256=MARZip2mdo_PzGvzf2VBTAfrP4tbjrJYgeJUApnAArA,731
|
393
|
-
airbyte_cdk/test/standard_tests/models/__init__.py,sha256=bS25WlzQwPNxpU5DHtUDZo1DuXd0LkEv9qesNhY1jkY,135
|
394
|
-
airbyte_cdk/test/standard_tests/models/scenario.py,sha256=kvuc5oIH-TA3TS7xtiz1RQ8tcY_WC0aI6-TsDQAqhUA,2710
|
395
396
|
airbyte_cdk/test/standard_tests/pytest_hooks.py,sha256=4OMy2jNQThS8y7Tyj8MiMy2-SWjoefD4lGo-zQmCUfU,1886
|
396
|
-
airbyte_cdk/test/standard_tests/source_base.py,sha256=
|
397
|
-
airbyte_cdk/test/standard_tests/util.py,sha256=
|
397
|
+
airbyte_cdk/test/standard_tests/source_base.py,sha256=o5N9a1lPgx8IVHgXlZcRdKmE4hhgQtTgYoAOA79MgS4,6421
|
398
|
+
airbyte_cdk/test/standard_tests/util.py,sha256=340vihLJ_2rEnq91dRHutbPM4ssm2ze1uq01cOI5vF4,2937
|
398
399
|
airbyte_cdk/test/state_builder.py,sha256=kLPql9lNzUJaBg5YYRLJlY_Hy5JLHJDVyKPMZMoYM44,946
|
399
400
|
airbyte_cdk/test/utils/__init__.py,sha256=Hu-1XT2KDoYjDF7-_ziDwv5bY3PueGjANOCbzeOegDg,57
|
400
401
|
airbyte_cdk/test/utils/data.py,sha256=CkCR1_-rujWNmPXFR1IXTMwx1rAl06wAyIKWpDcN02w,820
|
401
402
|
airbyte_cdk/test/utils/http_mocking.py,sha256=F2hpm2q4ijojQN5u2XtgTAp8aNgHgJ64eZNkZ9BW0ig,550
|
402
403
|
airbyte_cdk/test/utils/manifest_only_fixtures.py,sha256=7HqCmsfNaAIjq2o9V9f-rgQdksncDZFfMifQpFzlLXo,2104
|
403
|
-
airbyte_cdk/test/utils/reading.py,sha256=
|
404
|
+
airbyte_cdk/test/utils/reading.py,sha256=9ReW2uoITE7NCpVBKn6EfM9yi9_SvqhsNLb-5LODka8,1289
|
404
405
|
airbyte_cdk/utils/__init__.py,sha256=qhnC02DbS35OY8oB_tkYHwZzHed2FZeBM__G8IOgckY,347
|
405
406
|
airbyte_cdk/utils/airbyte_secrets_utils.py,sha256=wEtRnl5KRhN6eLJwrDrC4FJjyqt_4vkA1F65mdl8c24,3142
|
406
407
|
airbyte_cdk/utils/analytics_message.py,sha256=bi3uugQ2NjecnwTnz63iD5D1M8ZR8mXPbdtt6w5cC4s,653
|
@@ -420,9 +421,9 @@ airbyte_cdk/utils/slice_hasher.py,sha256=EDxgROHDbfG-QKQb59m7h_7crN1tRiawdf5uU7G
|
|
420
421
|
airbyte_cdk/utils/spec_schema_transformations.py,sha256=-5HTuNsnDBAhj-oLeQXwpTGA0HdcjFOf2zTEMUTTg_Y,816
|
421
422
|
airbyte_cdk/utils/stream_status_utils.py,sha256=ZmBoiy5HVbUEHAMrUONxZvxnvfV9CesmQJLDTAIWnWw,1171
|
422
423
|
airbyte_cdk/utils/traced_exception.py,sha256=C8uIBuCL_E4WnBAOPSxBicD06JAldoN9fGsQDp463OY,6292
|
423
|
-
airbyte_cdk-6.54.
|
424
|
-
airbyte_cdk-6.54.
|
425
|
-
airbyte_cdk-6.54.
|
426
|
-
airbyte_cdk-6.54.
|
427
|
-
airbyte_cdk-6.54.
|
428
|
-
airbyte_cdk-6.54.
|
424
|
+
airbyte_cdk-6.54.11.dist-info/LICENSE.txt,sha256=Wfe61S4BaGPj404v8lrAbvhjYR68SHlkzeYrg3_bbuM,1051
|
425
|
+
airbyte_cdk-6.54.11.dist-info/LICENSE_SHORT,sha256=aqF6D1NcESmpn-cqsxBtszTEnHKnlsp8L4x9wAh3Nxg,55
|
426
|
+
airbyte_cdk-6.54.11.dist-info/METADATA,sha256=GAc-NcvDVzTVP4sUrNphva1hP0kyZt-ANEM5Qz5CgPY,6344
|
427
|
+
airbyte_cdk-6.54.11.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
428
|
+
airbyte_cdk-6.54.11.dist-info/entry_points.txt,sha256=AKWbEkHfpzzk9nF9tqBUaw1MbvTM4mGtEzmZQm0ZWvM,139
|
429
|
+
airbyte_cdk-6.54.11.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|