airbyte-cdk 6.6.0__py3-none-any.whl → 6.6.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airbyte_cdk/sources/concurrent_source/concurrent_read_processor.py +2 -1
- airbyte_cdk/sources/declarative/concurrent_declarative_source.py +52 -28
- airbyte_cdk/sources/declarative/manifest_declarative_source.py +2 -2
- airbyte_cdk/sources/declarative/stream_slicers/declarative_partition_generator.py +85 -0
- airbyte_cdk/sources/declarative/stream_slicers/stream_slicer.py +6 -13
- airbyte_cdk/sources/file_based/stream/concurrent/adapters.py +0 -11
- airbyte_cdk/sources/streams/concurrent/adapters.py +4 -102
- airbyte_cdk/sources/streams/concurrent/cursor.py +50 -17
- airbyte_cdk/sources/streams/concurrent/partitions/partition.py +0 -15
- airbyte_cdk/sources/streams/concurrent/partitions/stream_slicer.py +21 -0
- airbyte_cdk/sources/streams/concurrent/state_converters/abstract_stream_state_converter.py +7 -0
- airbyte_cdk/sources/streams/concurrent/state_converters/datetime_stream_state_converter.py +5 -1
- airbyte_cdk/utils/slice_hasher.py +30 -0
- {airbyte_cdk-6.6.0.dist-info → airbyte_cdk-6.6.1.dist-info}/METADATA +1 -1
- {airbyte_cdk-6.6.0.dist-info → airbyte_cdk-6.6.1.dist-info}/RECORD +17 -14
- {airbyte_cdk-6.6.0.dist-info → airbyte_cdk-6.6.1.dist-info}/LICENSE.txt +0 -0
- {airbyte_cdk-6.6.0.dist-info → airbyte_cdk-6.6.1.dist-info}/WHEEL +0 -0
@@ -114,7 +114,8 @@ class ConcurrentReadProcessor:
|
|
114
114
|
|
115
115
|
try:
|
116
116
|
if sentinel.is_successful:
|
117
|
-
partition.
|
117
|
+
stream = self._stream_name_to_instance[partition.stream_name()]
|
118
|
+
stream.cursor.close_partition(partition)
|
118
119
|
except Exception as exception:
|
119
120
|
self._flag_exception(partition.stream_name(), exception)
|
120
121
|
yield AirbyteTracedException.from_exception(
|
@@ -3,7 +3,7 @@
|
|
3
3
|
#
|
4
4
|
|
5
5
|
import logging
|
6
|
-
from typing import Any, Generic, Iterator, List, Mapping, Optional, Tuple, Union
|
6
|
+
from typing import Any, Generic, Iterator, List, Mapping, Optional, Tuple, Union, Callable
|
7
7
|
|
8
8
|
from airbyte_cdk.models import (
|
9
9
|
AirbyteCatalog,
|
@@ -27,18 +27,24 @@ from airbyte_cdk.sources.declarative.models.declarative_component_schema import
|
|
27
27
|
)
|
28
28
|
from airbyte_cdk.sources.declarative.models.declarative_component_schema import (
|
29
29
|
DatetimeBasedCursor as DatetimeBasedCursorModel,
|
30
|
+
DeclarativeStream as DeclarativeStreamModel,
|
30
31
|
)
|
31
32
|
from airbyte_cdk.sources.declarative.parsers.model_to_component_factory import (
|
32
33
|
ModelToComponentFactory,
|
34
|
+
ComponentDefinition,
|
33
35
|
)
|
34
36
|
from airbyte_cdk.sources.declarative.requesters import HttpRequester
|
35
|
-
from airbyte_cdk.sources.declarative.retrievers import SimpleRetriever
|
37
|
+
from airbyte_cdk.sources.declarative.retrievers import SimpleRetriever, Retriever
|
38
|
+
from airbyte_cdk.sources.declarative.stream_slicers.declarative_partition_generator import (
|
39
|
+
DeclarativePartitionFactory,
|
40
|
+
StreamSlicerPartitionGenerator,
|
41
|
+
)
|
36
42
|
from airbyte_cdk.sources.declarative.transformations.add_fields import AddFields
|
37
43
|
from airbyte_cdk.sources.declarative.types import ConnectionDefinition
|
38
44
|
from airbyte_cdk.sources.source import TState
|
45
|
+
from airbyte_cdk.sources.types import Config, StreamState
|
39
46
|
from airbyte_cdk.sources.streams import Stream
|
40
47
|
from airbyte_cdk.sources.streams.concurrent.abstract_stream import AbstractStream
|
41
|
-
from airbyte_cdk.sources.streams.concurrent.adapters import CursorPartitionGenerator
|
42
48
|
from airbyte_cdk.sources.streams.concurrent.availability_strategy import (
|
43
49
|
AlwaysAvailableAvailabilityStrategy,
|
44
50
|
)
|
@@ -213,31 +219,18 @@ class ConcurrentDeclarativeSource(ManifestDeclarativeSource, Generic[TState]):
|
|
213
219
|
)
|
214
220
|
)
|
215
221
|
|
216
|
-
|
217
|
-
|
218
|
-
|
219
|
-
|
220
|
-
|
221
|
-
|
222
|
-
|
223
|
-
|
224
|
-
|
225
|
-
|
226
|
-
|
227
|
-
|
228
|
-
if declarative_stream.retriever.cursor:
|
229
|
-
declarative_stream.retriever.cursor.set_initial_state(
|
230
|
-
stream_state=stream_state
|
231
|
-
)
|
232
|
-
declarative_stream.retriever.cursor = None
|
233
|
-
|
234
|
-
partition_generator = CursorPartitionGenerator(
|
235
|
-
stream=declarative_stream,
|
236
|
-
message_repository=self.message_repository, # type: ignore # message_repository is always instantiated with a value by factory
|
237
|
-
cursor=cursor,
|
238
|
-
connector_state_converter=connector_state_converter,
|
239
|
-
cursor_field=[cursor.cursor_field.cursor_field_key],
|
240
|
-
slice_boundary_fields=cursor.slice_boundary_fields,
|
222
|
+
partition_generator = StreamSlicerPartitionGenerator(
|
223
|
+
DeclarativePartitionFactory(
|
224
|
+
declarative_stream.name,
|
225
|
+
declarative_stream.get_json_schema(),
|
226
|
+
self._retriever_factory(
|
227
|
+
name_to_stream_mapping[declarative_stream.name],
|
228
|
+
config,
|
229
|
+
stream_state,
|
230
|
+
),
|
231
|
+
self.message_repository,
|
232
|
+
),
|
233
|
+
cursor,
|
241
234
|
)
|
242
235
|
|
243
236
|
concurrent_streams.append(
|
@@ -350,3 +343,34 @@ class ConcurrentDeclarativeSource(ManifestDeclarativeSource, Generic[TState]):
|
|
350
343
|
if stream.stream.name not in concurrent_stream_names
|
351
344
|
]
|
352
345
|
)
|
346
|
+
|
347
|
+
def _retriever_factory(
|
348
|
+
self, stream_config: ComponentDefinition, source_config: Config, stream_state: StreamState
|
349
|
+
) -> Callable[[], Retriever]:
|
350
|
+
def _factory_method() -> Retriever:
|
351
|
+
declarative_stream: DeclarativeStream = self._constructor.create_component(
|
352
|
+
DeclarativeStreamModel,
|
353
|
+
stream_config,
|
354
|
+
source_config,
|
355
|
+
emit_connector_builder_messages=self._emit_connector_builder_messages,
|
356
|
+
)
|
357
|
+
|
358
|
+
# This is an optimization so that we don't invoke any cursor or state management flows within the
|
359
|
+
# low-code framework because state management is handled through the ConcurrentCursor.
|
360
|
+
if (
|
361
|
+
declarative_stream
|
362
|
+
and declarative_stream.retriever
|
363
|
+
and isinstance(declarative_stream.retriever, SimpleRetriever)
|
364
|
+
):
|
365
|
+
# Also a temporary hack. In the legacy Stream implementation, as part of the read, set_initial_state() is
|
366
|
+
# called to instantiate incoming state on the cursor. Although we no longer rely on the legacy low-code cursor
|
367
|
+
# for concurrent checkpointing, low-code components like StopConditionPaginationStrategyDecorator and
|
368
|
+
# ClientSideIncrementalRecordFilterDecorator still rely on a DatetimeBasedCursor that is properly initialized
|
369
|
+
# with state.
|
370
|
+
if declarative_stream.retriever.cursor:
|
371
|
+
declarative_stream.retriever.cursor.set_initial_state(stream_state=stream_state)
|
372
|
+
declarative_stream.retriever.cursor = None
|
373
|
+
|
374
|
+
return declarative_stream.retriever
|
375
|
+
|
376
|
+
return _factory_method
|
@@ -8,7 +8,7 @@ import pkgutil
|
|
8
8
|
import re
|
9
9
|
from copy import deepcopy
|
10
10
|
from importlib import metadata
|
11
|
-
from typing import Any, Dict, Iterator, List, Mapping, Optional, Tuple
|
11
|
+
from typing import Any, Dict, Iterator, List, Mapping, Optional, Tuple
|
12
12
|
|
13
13
|
import yaml
|
14
14
|
from airbyte_cdk.models import (
|
@@ -94,7 +94,7 @@ class ManifestDeclarativeSource(DeclarativeSource):
|
|
94
94
|
return self._source_config
|
95
95
|
|
96
96
|
@property
|
97
|
-
def message_repository(self) ->
|
97
|
+
def message_repository(self) -> MessageRepository:
|
98
98
|
return self._message_repository
|
99
99
|
|
100
100
|
@property
|
@@ -0,0 +1,85 @@
|
|
1
|
+
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
|
2
|
+
|
3
|
+
from typing import Iterable, Optional, Mapping, Any, Callable
|
4
|
+
|
5
|
+
from airbyte_cdk.sources.declarative.retrievers import Retriever
|
6
|
+
from airbyte_cdk.sources.message import MessageRepository
|
7
|
+
from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition
|
8
|
+
from airbyte_cdk.sources.streams.concurrent.partitions.partition_generator import PartitionGenerator
|
9
|
+
from airbyte_cdk.sources.streams.concurrent.partitions.record import Record
|
10
|
+
from airbyte_cdk.sources.streams.concurrent.partitions.stream_slicer import StreamSlicer
|
11
|
+
from airbyte_cdk.sources.types import StreamSlice
|
12
|
+
from airbyte_cdk.utils.slice_hasher import SliceHasher
|
13
|
+
|
14
|
+
|
15
|
+
class DeclarativePartitionFactory:
|
16
|
+
def __init__(
|
17
|
+
self,
|
18
|
+
stream_name: str,
|
19
|
+
json_schema: Mapping[str, Any],
|
20
|
+
retriever_factory: Callable[[], Retriever],
|
21
|
+
message_repository: MessageRepository,
|
22
|
+
) -> None:
|
23
|
+
"""
|
24
|
+
The DeclarativePartitionFactory takes a retriever_factory and not a retriever directly. The reason is that our components are not
|
25
|
+
thread safe and classes like `DefaultPaginator` may not work because multiple threads can access and modify a shared field across each other.
|
26
|
+
In order to avoid these problems, we will create one retriever per thread which should make the processing thread-safe.
|
27
|
+
"""
|
28
|
+
self._stream_name = stream_name
|
29
|
+
self._json_schema = json_schema
|
30
|
+
self._retriever_factory = retriever_factory
|
31
|
+
self._message_repository = message_repository
|
32
|
+
|
33
|
+
def create(self, stream_slice: StreamSlice) -> Partition:
|
34
|
+
return DeclarativePartition(
|
35
|
+
self._stream_name,
|
36
|
+
self._json_schema,
|
37
|
+
self._retriever_factory(),
|
38
|
+
self._message_repository,
|
39
|
+
stream_slice,
|
40
|
+
)
|
41
|
+
|
42
|
+
|
43
|
+
class DeclarativePartition(Partition):
|
44
|
+
def __init__(
|
45
|
+
self,
|
46
|
+
stream_name: str,
|
47
|
+
json_schema: Mapping[str, Any],
|
48
|
+
retriever: Retriever,
|
49
|
+
message_repository: MessageRepository,
|
50
|
+
stream_slice: StreamSlice,
|
51
|
+
):
|
52
|
+
self._stream_name = stream_name
|
53
|
+
self._json_schema = json_schema
|
54
|
+
self._retriever = retriever
|
55
|
+
self._message_repository = message_repository
|
56
|
+
self._stream_slice = stream_slice
|
57
|
+
self._hash = SliceHasher.hash(self._stream_name, self._stream_slice)
|
58
|
+
|
59
|
+
def read(self) -> Iterable[Record]:
|
60
|
+
for stream_data in self._retriever.read_records(self._json_schema, self._stream_slice):
|
61
|
+
if isinstance(stream_data, Mapping):
|
62
|
+
yield Record(stream_data, self)
|
63
|
+
else:
|
64
|
+
self._message_repository.emit_message(stream_data)
|
65
|
+
|
66
|
+
def to_slice(self) -> Optional[Mapping[str, Any]]:
|
67
|
+
return self._stream_slice
|
68
|
+
|
69
|
+
def stream_name(self) -> str:
|
70
|
+
return self._stream_name
|
71
|
+
|
72
|
+
def __hash__(self) -> int:
|
73
|
+
return self._hash
|
74
|
+
|
75
|
+
|
76
|
+
class StreamSlicerPartitionGenerator(PartitionGenerator):
|
77
|
+
def __init__(
|
78
|
+
self, partition_factory: DeclarativePartitionFactory, stream_slicer: StreamSlicer
|
79
|
+
) -> None:
|
80
|
+
self._partition_factory = partition_factory
|
81
|
+
self._stream_slicer = stream_slicer
|
82
|
+
|
83
|
+
def generate(self) -> Iterable[Partition]:
|
84
|
+
for stream_slice in self._stream_slicer.stream_slices():
|
85
|
+
yield self._partition_factory.create(stream_slice)
|
@@ -2,18 +2,17 @@
|
|
2
2
|
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
3
3
|
#
|
4
4
|
|
5
|
-
from abc import
|
6
|
-
from dataclasses import dataclass
|
7
|
-
from typing import Iterable
|
5
|
+
from abc import ABC
|
8
6
|
|
9
7
|
from airbyte_cdk.sources.declarative.requesters.request_options.request_options_provider import (
|
10
8
|
RequestOptionsProvider,
|
11
9
|
)
|
12
|
-
from airbyte_cdk.sources.
|
10
|
+
from airbyte_cdk.sources.streams.concurrent.partitions.stream_slicer import (
|
11
|
+
StreamSlicer as ConcurrentStreamSlicer,
|
12
|
+
)
|
13
13
|
|
14
14
|
|
15
|
-
|
16
|
-
class StreamSlicer(RequestOptionsProvider):
|
15
|
+
class StreamSlicer(ConcurrentStreamSlicer, RequestOptionsProvider, ABC):
|
17
16
|
"""
|
18
17
|
Slices the stream into a subset of records.
|
19
18
|
Slices enable state checkpointing and data retrieval parallelization.
|
@@ -23,10 +22,4 @@ class StreamSlicer(RequestOptionsProvider):
|
|
23
22
|
See the stream slicing section of the docs for more information.
|
24
23
|
"""
|
25
24
|
|
26
|
-
|
27
|
-
def stream_slices(self) -> Iterable[StreamSlice]:
|
28
|
-
"""
|
29
|
-
Defines stream slices
|
30
|
-
|
31
|
-
:return: List of stream slices
|
32
|
-
"""
|
25
|
+
pass
|
@@ -226,7 +226,6 @@ class FileBasedStreamPartition(Partition):
|
|
226
226
|
sync_mode: SyncMode,
|
227
227
|
cursor_field: Optional[List[str]],
|
228
228
|
state: Optional[MutableMapping[str, Any]],
|
229
|
-
cursor: "AbstractConcurrentFileBasedCursor",
|
230
229
|
):
|
231
230
|
self._stream = stream
|
232
231
|
self._slice = _slice
|
@@ -234,8 +233,6 @@ class FileBasedStreamPartition(Partition):
|
|
234
233
|
self._sync_mode = sync_mode
|
235
234
|
self._cursor_field = cursor_field
|
236
235
|
self._state = state
|
237
|
-
self._cursor = cursor
|
238
|
-
self._is_closed = False
|
239
236
|
|
240
237
|
def read(self) -> Iterable[Record]:
|
241
238
|
try:
|
@@ -289,13 +286,6 @@ class FileBasedStreamPartition(Partition):
|
|
289
286
|
file = self._slice["files"][0]
|
290
287
|
return {"files": [file]}
|
291
288
|
|
292
|
-
def close(self) -> None:
|
293
|
-
self._cursor.close_partition(self)
|
294
|
-
self._is_closed = True
|
295
|
-
|
296
|
-
def is_closed(self) -> bool:
|
297
|
-
return self._is_closed
|
298
|
-
|
299
289
|
def __hash__(self) -> int:
|
300
290
|
if self._slice:
|
301
291
|
# Convert the slice to a string so that it can be hashed
|
@@ -352,7 +342,6 @@ class FileBasedStreamPartitionGenerator(PartitionGenerator):
|
|
352
342
|
self._sync_mode,
|
353
343
|
self._cursor_field,
|
354
344
|
self._state,
|
355
|
-
self._cursor,
|
356
345
|
)
|
357
346
|
)
|
358
347
|
self._cursor.set_pending_partitions(pending_partitions)
|
@@ -38,15 +38,13 @@ from airbyte_cdk.sources.streams.concurrent.helpers import (
|
|
38
38
|
from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition
|
39
39
|
from airbyte_cdk.sources.streams.concurrent.partitions.partition_generator import PartitionGenerator
|
40
40
|
from airbyte_cdk.sources.streams.concurrent.partitions.record import Record
|
41
|
-
from airbyte_cdk.sources.streams.concurrent.state_converters.datetime_stream_state_converter import (
|
42
|
-
DateTimeStreamStateConverter,
|
43
|
-
)
|
44
41
|
from airbyte_cdk.sources.streams.core import StreamData
|
45
|
-
from airbyte_cdk.sources.types import StreamSlice
|
46
42
|
from airbyte_cdk.sources.utils.schema_helpers import InternalConfig
|
47
43
|
from airbyte_cdk.sources.utils.slice_logger import SliceLogger
|
48
44
|
from deprecated.classic import deprecated
|
49
45
|
|
46
|
+
from airbyte_cdk.utils.slice_hasher import SliceHasher
|
47
|
+
|
50
48
|
"""
|
51
49
|
This module contains adapters to help enabling concurrency on Stream objects without needing to migrate to AbstractStream
|
52
50
|
"""
|
@@ -96,7 +94,6 @@ class StreamFacade(AbstractStreamFacade[DefaultStream], Stream):
|
|
96
94
|
else SyncMode.incremental,
|
97
95
|
[cursor_field] if cursor_field is not None else None,
|
98
96
|
state,
|
99
|
-
cursor,
|
100
97
|
),
|
101
98
|
name=stream.name,
|
102
99
|
namespace=stream.namespace,
|
@@ -259,7 +256,6 @@ class StreamPartition(Partition):
|
|
259
256
|
sync_mode: SyncMode,
|
260
257
|
cursor_field: Optional[List[str]],
|
261
258
|
state: Optional[MutableMapping[str, Any]],
|
262
|
-
cursor: Cursor,
|
263
259
|
):
|
264
260
|
"""
|
265
261
|
:param stream: The stream to delegate to
|
@@ -272,8 +268,7 @@ class StreamPartition(Partition):
|
|
272
268
|
self._sync_mode = sync_mode
|
273
269
|
self._cursor_field = cursor_field
|
274
270
|
self._state = state
|
275
|
-
self.
|
276
|
-
self._is_closed = False
|
271
|
+
self._hash = SliceHasher.hash(self._stream.name, self._slice)
|
277
272
|
|
278
273
|
def read(self) -> Iterable[Record]:
|
279
274
|
"""
|
@@ -313,23 +308,11 @@ class StreamPartition(Partition):
|
|
313
308
|
return self._slice
|
314
309
|
|
315
310
|
def __hash__(self) -> int:
|
316
|
-
|
317
|
-
# Convert the slice to a string so that it can be hashed
|
318
|
-
s = json.dumps(self._slice, sort_keys=True, cls=SliceEncoder)
|
319
|
-
return hash((self._stream.name, s))
|
320
|
-
else:
|
321
|
-
return hash(self._stream.name)
|
311
|
+
return self._hash
|
322
312
|
|
323
313
|
def stream_name(self) -> str:
|
324
314
|
return self._stream.name
|
325
315
|
|
326
|
-
def close(self) -> None:
|
327
|
-
self._cursor.close_partition(self)
|
328
|
-
self._is_closed = True
|
329
|
-
|
330
|
-
def is_closed(self) -> bool:
|
331
|
-
return self._is_closed
|
332
|
-
|
333
316
|
def __repr__(self) -> str:
|
334
317
|
return f"StreamPartition({self._stream.name}, {self._slice})"
|
335
318
|
|
@@ -349,7 +332,6 @@ class StreamPartitionGenerator(PartitionGenerator):
|
|
349
332
|
sync_mode: SyncMode,
|
350
333
|
cursor_field: Optional[List[str]],
|
351
334
|
state: Optional[MutableMapping[str, Any]],
|
352
|
-
cursor: Cursor,
|
353
335
|
):
|
354
336
|
"""
|
355
337
|
:param stream: The stream to delegate to
|
@@ -360,7 +342,6 @@ class StreamPartitionGenerator(PartitionGenerator):
|
|
360
342
|
self._sync_mode = sync_mode
|
361
343
|
self._cursor_field = cursor_field
|
362
344
|
self._state = state
|
363
|
-
self._cursor = cursor
|
364
345
|
|
365
346
|
def generate(self) -> Iterable[Partition]:
|
366
347
|
for s in self._stream.stream_slices(
|
@@ -373,85 +354,6 @@ class StreamPartitionGenerator(PartitionGenerator):
|
|
373
354
|
self._sync_mode,
|
374
355
|
self._cursor_field,
|
375
356
|
self._state,
|
376
|
-
self._cursor,
|
377
|
-
)
|
378
|
-
|
379
|
-
|
380
|
-
class CursorPartitionGenerator(PartitionGenerator):
|
381
|
-
"""
|
382
|
-
This class generates partitions using the concurrent cursor and iterates through state slices to generate partitions.
|
383
|
-
|
384
|
-
It is used when synchronizing a stream in incremental or full-refresh mode where state information is maintained
|
385
|
-
across partitions. Each partition represents a subset of the stream's data and is determined by the cursor's state.
|
386
|
-
"""
|
387
|
-
|
388
|
-
_START_BOUNDARY = 0
|
389
|
-
_END_BOUNDARY = 1
|
390
|
-
|
391
|
-
def __init__(
|
392
|
-
self,
|
393
|
-
stream: Stream,
|
394
|
-
message_repository: MessageRepository,
|
395
|
-
cursor: Cursor,
|
396
|
-
connector_state_converter: DateTimeStreamStateConverter,
|
397
|
-
cursor_field: Optional[List[str]],
|
398
|
-
slice_boundary_fields: Optional[Tuple[str, str]],
|
399
|
-
):
|
400
|
-
"""
|
401
|
-
Initialize the CursorPartitionGenerator with a stream, sync mode, and cursor.
|
402
|
-
|
403
|
-
:param stream: The stream to delegate to for partition generation.
|
404
|
-
:param message_repository: The message repository to use to emit non-record messages.
|
405
|
-
:param sync_mode: The synchronization mode.
|
406
|
-
:param cursor: A Cursor object that maintains the state and the cursor field.
|
407
|
-
"""
|
408
|
-
self._stream = stream
|
409
|
-
self.message_repository = message_repository
|
410
|
-
self._sync_mode = SyncMode.full_refresh
|
411
|
-
self._cursor = cursor
|
412
|
-
self._cursor_field = cursor_field
|
413
|
-
self._state = self._cursor.state
|
414
|
-
self._slice_boundary_fields = slice_boundary_fields
|
415
|
-
self._connector_state_converter = connector_state_converter
|
416
|
-
|
417
|
-
def generate(self) -> Iterable[Partition]:
|
418
|
-
"""
|
419
|
-
Generate partitions based on the slices in the cursor's state.
|
420
|
-
|
421
|
-
This method iterates through the list of slices found in the cursor's state, and for each slice, it generates
|
422
|
-
a `StreamPartition` object.
|
423
|
-
|
424
|
-
:return: An iterable of StreamPartition objects.
|
425
|
-
"""
|
426
|
-
|
427
|
-
start_boundary = (
|
428
|
-
self._slice_boundary_fields[self._START_BOUNDARY]
|
429
|
-
if self._slice_boundary_fields
|
430
|
-
else "start"
|
431
|
-
)
|
432
|
-
end_boundary = (
|
433
|
-
self._slice_boundary_fields[self._END_BOUNDARY]
|
434
|
-
if self._slice_boundary_fields
|
435
|
-
else "end"
|
436
|
-
)
|
437
|
-
|
438
|
-
for slice_start, slice_end in self._cursor.generate_slices():
|
439
|
-
stream_slice = StreamSlice(
|
440
|
-
partition={},
|
441
|
-
cursor_slice={
|
442
|
-
start_boundary: self._connector_state_converter.output_format(slice_start),
|
443
|
-
end_boundary: self._connector_state_converter.output_format(slice_end),
|
444
|
-
},
|
445
|
-
)
|
446
|
-
|
447
|
-
yield StreamPartition(
|
448
|
-
self._stream,
|
449
|
-
copy.deepcopy(stream_slice),
|
450
|
-
self.message_repository,
|
451
|
-
self._sync_mode,
|
452
|
-
self._cursor_field,
|
453
|
-
self._state,
|
454
|
-
self._cursor,
|
455
357
|
)
|
456
358
|
|
457
359
|
|
@@ -11,9 +11,11 @@ from airbyte_cdk.sources.message import MessageRepository
|
|
11
11
|
from airbyte_cdk.sources.streams import NO_CURSOR_STATE_KEY
|
12
12
|
from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition
|
13
13
|
from airbyte_cdk.sources.streams.concurrent.partitions.record import Record
|
14
|
+
from airbyte_cdk.sources.streams.concurrent.partitions.stream_slicer import StreamSlicer
|
14
15
|
from airbyte_cdk.sources.streams.concurrent.state_converters.abstract_stream_state_converter import (
|
15
16
|
AbstractStreamStateConverter,
|
16
17
|
)
|
18
|
+
from airbyte_cdk.sources.types import StreamSlice
|
17
19
|
|
18
20
|
|
19
21
|
def _extract_value(mapping: Mapping[str, Any], path: List[str]) -> Any:
|
@@ -61,7 +63,7 @@ class CursorField:
|
|
61
63
|
return cursor_value # type: ignore # we assume that the value the path points at is a comparable
|
62
64
|
|
63
65
|
|
64
|
-
class Cursor(ABC):
|
66
|
+
class Cursor(StreamSlicer, ABC):
|
65
67
|
@property
|
66
68
|
@abstractmethod
|
67
69
|
def state(self) -> MutableMapping[str, Any]: ...
|
@@ -88,12 +90,12 @@ class Cursor(ABC):
|
|
88
90
|
"""
|
89
91
|
raise NotImplementedError()
|
90
92
|
|
91
|
-
def
|
93
|
+
def stream_slices(self) -> Iterable[StreamSlice]:
|
92
94
|
"""
|
93
95
|
Default placeholder implementation of generate_slices.
|
94
96
|
Subclasses can override this method to provide actual behavior.
|
95
97
|
"""
|
96
|
-
yield
|
98
|
+
yield StreamSlice(partition={}, cursor_slice={})
|
97
99
|
|
98
100
|
|
99
101
|
class FinalStateCursor(Cursor):
|
@@ -184,8 +186,15 @@ class ConcurrentCursor(Cursor):
|
|
184
186
|
return self._cursor_field
|
185
187
|
|
186
188
|
@property
|
187
|
-
def
|
188
|
-
return
|
189
|
+
def _slice_boundary_fields_wrapper(self) -> Tuple[str, str]:
|
190
|
+
return (
|
191
|
+
self._slice_boundary_fields
|
192
|
+
if self._slice_boundary_fields
|
193
|
+
else (
|
194
|
+
self._connector_state_converter.START_KEY,
|
195
|
+
self._connector_state_converter.END_KEY,
|
196
|
+
)
|
197
|
+
)
|
189
198
|
|
190
199
|
def _get_concurrent_state(
|
191
200
|
self, state: MutableMapping[str, Any]
|
@@ -299,7 +308,7 @@ class ConcurrentCursor(Cursor):
|
|
299
308
|
"""
|
300
309
|
self._emit_state_message()
|
301
310
|
|
302
|
-
def
|
311
|
+
def stream_slices(self) -> Iterable[StreamSlice]:
|
303
312
|
"""
|
304
313
|
Generating slices based on a few parameters:
|
305
314
|
* lookback_window: Buffer to remove from END_KEY of the highest slice
|
@@ -368,7 +377,7 @@ class ConcurrentCursor(Cursor):
|
|
368
377
|
|
369
378
|
def _split_per_slice_range(
|
370
379
|
self, lower: CursorValueType, upper: CursorValueType, upper_is_end: bool
|
371
|
-
) -> Iterable[
|
380
|
+
) -> Iterable[StreamSlice]:
|
372
381
|
if lower >= upper:
|
373
382
|
return
|
374
383
|
|
@@ -377,10 +386,22 @@ class ConcurrentCursor(Cursor):
|
|
377
386
|
|
378
387
|
lower = max(lower, self._start) if self._start else lower
|
379
388
|
if not self._slice_range or self._evaluate_upper_safely(lower, self._slice_range) >= upper:
|
380
|
-
|
381
|
-
|
382
|
-
|
383
|
-
|
389
|
+
start_value, end_value = (
|
390
|
+
(lower, upper - self._cursor_granularity)
|
391
|
+
if self._cursor_granularity and not upper_is_end
|
392
|
+
else (lower, upper)
|
393
|
+
)
|
394
|
+
yield StreamSlice(
|
395
|
+
partition={},
|
396
|
+
cursor_slice={
|
397
|
+
self._slice_boundary_fields_wrapper[
|
398
|
+
self._START_BOUNDARY
|
399
|
+
]: self._connector_state_converter.output_format(start_value),
|
400
|
+
self._slice_boundary_fields_wrapper[
|
401
|
+
self._END_BOUNDARY
|
402
|
+
]: self._connector_state_converter.output_format(end_value),
|
403
|
+
},
|
404
|
+
)
|
384
405
|
else:
|
385
406
|
stop_processing = False
|
386
407
|
current_lower_boundary = lower
|
@@ -389,12 +410,24 @@ class ConcurrentCursor(Cursor):
|
|
389
410
|
self._evaluate_upper_safely(current_lower_boundary, self._slice_range), upper
|
390
411
|
)
|
391
412
|
has_reached_upper_boundary = current_upper_boundary >= upper
|
392
|
-
|
393
|
-
|
394
|
-
|
395
|
-
|
396
|
-
|
397
|
-
|
413
|
+
|
414
|
+
start_value, end_value = (
|
415
|
+
(current_lower_boundary, current_upper_boundary - self._cursor_granularity)
|
416
|
+
if self._cursor_granularity
|
417
|
+
and (not upper_is_end or not has_reached_upper_boundary)
|
418
|
+
else (current_lower_boundary, current_upper_boundary)
|
419
|
+
)
|
420
|
+
yield StreamSlice(
|
421
|
+
partition={},
|
422
|
+
cursor_slice={
|
423
|
+
self._slice_boundary_fields_wrapper[
|
424
|
+
self._START_BOUNDARY
|
425
|
+
]: self._connector_state_converter.output_format(start_value),
|
426
|
+
self._slice_boundary_fields_wrapper[
|
427
|
+
self._END_BOUNDARY
|
428
|
+
]: self._connector_state_converter.output_format(end_value),
|
429
|
+
},
|
430
|
+
)
|
398
431
|
current_lower_boundary = current_upper_boundary
|
399
432
|
if current_upper_boundary >= upper:
|
400
433
|
stop_processing = True
|
@@ -40,21 +40,6 @@ class Partition(ABC):
|
|
40
40
|
"""
|
41
41
|
pass
|
42
42
|
|
43
|
-
@abstractmethod
|
44
|
-
def close(self) -> None:
|
45
|
-
"""
|
46
|
-
Closes the partition.
|
47
|
-
"""
|
48
|
-
pass
|
49
|
-
|
50
|
-
@abstractmethod
|
51
|
-
def is_closed(self) -> bool:
|
52
|
-
"""
|
53
|
-
Returns whether the partition is closed.
|
54
|
-
:return:
|
55
|
-
"""
|
56
|
-
pass
|
57
|
-
|
58
43
|
@abstractmethod
|
59
44
|
def __hash__(self) -> int:
|
60
45
|
"""
|
@@ -0,0 +1,21 @@
|
|
1
|
+
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
|
2
|
+
|
3
|
+
from abc import ABC, abstractmethod
|
4
|
+
from typing import Iterable
|
5
|
+
|
6
|
+
from airbyte_cdk.sources.types import StreamSlice
|
7
|
+
|
8
|
+
|
9
|
+
class StreamSlicer(ABC):
|
10
|
+
"""
|
11
|
+
Slices the stream into chunks that can be fetched independently. Slices enable state checkpointing and data retrieval parallelization.
|
12
|
+
"""
|
13
|
+
|
14
|
+
@abstractmethod
|
15
|
+
def stream_slices(self) -> Iterable[StreamSlice]:
|
16
|
+
"""
|
17
|
+
Defines stream slices
|
18
|
+
|
19
|
+
:return: An iterable of stream slices
|
20
|
+
"""
|
21
|
+
pass
|
@@ -124,6 +124,13 @@ class AbstractStreamStateConverter(ABC):
|
|
124
124
|
"""
|
125
125
|
...
|
126
126
|
|
127
|
+
@abstractmethod
|
128
|
+
def output_format(self, value: Any) -> Any:
|
129
|
+
"""
|
130
|
+
Convert the cursor value type to a JSON valid type.
|
131
|
+
"""
|
132
|
+
...
|
133
|
+
|
127
134
|
def merge_intervals(
|
128
135
|
self, intervals: List[MutableMapping[str, Any]]
|
129
136
|
) -> List[MutableMapping[str, Any]]:
|
@@ -82,7 +82,11 @@ class DateTimeStreamStateConverter(AbstractStreamStateConverter):
|
|
82
82
|
# The start and end are the same to avoid confusion as to whether the records for this slice
|
83
83
|
# were actually synced
|
84
84
|
slices = [
|
85
|
-
{
|
85
|
+
{
|
86
|
+
self.START_KEY: start if start is not None else sync_start,
|
87
|
+
self.END_KEY: sync_start,
|
88
|
+
self.MOST_RECENT_RECORD_KEY: sync_start,
|
89
|
+
}
|
86
90
|
]
|
87
91
|
|
88
92
|
return sync_start, {
|
@@ -0,0 +1,30 @@
|
|
1
|
+
import hashlib
|
2
|
+
import json
|
3
|
+
from typing import Any, Mapping, Optional, Final
|
4
|
+
|
5
|
+
|
6
|
+
class SliceEncoder(json.JSONEncoder):
|
7
|
+
def default(self, obj: Any) -> Any:
|
8
|
+
if hasattr(obj, "__json_serializable__"):
|
9
|
+
return obj.__json_serializable__()
|
10
|
+
|
11
|
+
# Let the base class default method raise the TypeError
|
12
|
+
return super().default(obj)
|
13
|
+
|
14
|
+
|
15
|
+
class SliceHasher:
|
16
|
+
_ENCODING: Final = "utf-8"
|
17
|
+
|
18
|
+
@classmethod
|
19
|
+
def hash(cls, stream_name: str, stream_slice: Optional[Mapping[str, Any]] = None) -> int:
|
20
|
+
if stream_slice:
|
21
|
+
try:
|
22
|
+
s = json.dumps(stream_slice, sort_keys=True, cls=SliceEncoder)
|
23
|
+
hash_input = f"{stream_name}:{s}".encode(cls._ENCODING)
|
24
|
+
except TypeError as e:
|
25
|
+
raise ValueError(f"Failed to serialize stream slice: {e}")
|
26
|
+
else:
|
27
|
+
hash_input = stream_name.encode(cls._ENCODING)
|
28
|
+
|
29
|
+
# Use last 8 bytes as 64-bit integer for better distribution
|
30
|
+
return int.from_bytes(hashlib.sha256(hash_input).digest()[-8:], "big")
|
@@ -30,7 +30,7 @@ airbyte_cdk/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
30
30
|
airbyte_cdk/sources/__init__.py,sha256=45J83QsFH3Wky3sVapZWg4C58R_i1thm61M06t2c1AQ,1156
|
31
31
|
airbyte_cdk/sources/abstract_source.py,sha256=qY0nZzNm-9qVkt-t6s-Y6UYKIk_2zSBSn3Y_IGzJAoA,15633
|
32
32
|
airbyte_cdk/sources/concurrent_source/__init__.py,sha256=3D_RJsxQfiLboSCDdNei1Iv-msRp3DXsas6E9kl7dXc,386
|
33
|
-
airbyte_cdk/sources/concurrent_source/concurrent_read_processor.py,sha256=
|
33
|
+
airbyte_cdk/sources/concurrent_source/concurrent_read_processor.py,sha256=cEyZALjsxVx7k9bzcDUpSIXskOqEg5PKn7K3ZMk5U2E,12766
|
34
34
|
airbyte_cdk/sources/concurrent_source/concurrent_source.py,sha256=3uiJwkytP8HjY3CPTZtoPF9i0WAJE0K6GREyVZUWPaI,7768
|
35
35
|
airbyte_cdk/sources/concurrent_source/concurrent_source_adapter.py,sha256=f9PIRPWn2tXu0-bxVeYHL2vYdqCzZ_kgpHg5_Ep-cfQ,6103
|
36
36
|
airbyte_cdk/sources/concurrent_source/partition_generation_completed_sentinel.py,sha256=z1t-rAZBsqVidv2fpUlPHE9JgyXsITuGk4AMu96mXSQ,696
|
@@ -58,7 +58,7 @@ airbyte_cdk/sources/declarative/checks/check_stream.py,sha256=dAA-UhmMj0WLXCkRQr
|
|
58
58
|
airbyte_cdk/sources/declarative/checks/connection_checker.py,sha256=MBRJo6WJlZQHpIfOGaNOkkHUmgUl_4wDM6VPo41z5Ss,1383
|
59
59
|
airbyte_cdk/sources/declarative/concurrency_level/__init__.py,sha256=5XUqrmlstYlMM0j6crktlKQwALek0uiz2D3WdM46MyA,191
|
60
60
|
airbyte_cdk/sources/declarative/concurrency_level/concurrency_level.py,sha256=YIwCTCpOr_QSNW4ltQK0yUGWInI8PKNY216HOOegYLk,2101
|
61
|
-
airbyte_cdk/sources/declarative/concurrent_declarative_source.py,sha256=
|
61
|
+
airbyte_cdk/sources/declarative/concurrent_declarative_source.py,sha256=BpaTXzdf57pqUg1xDlj2xxm-VN74h8c-55xEQUlA_1I,19734
|
62
62
|
airbyte_cdk/sources/declarative/datetime/__init__.py,sha256=l9LG7Qm6e5r_qgqfVKnx3mXYtg1I9MmMjomVIPfU4XA,177
|
63
63
|
airbyte_cdk/sources/declarative/datetime/datetime_parser.py,sha256=SX9JjdesN1edN2WVUVMzU_ptqp2QB1OnsnjZ4mwcX7w,2579
|
64
64
|
airbyte_cdk/sources/declarative/datetime/min_max_datetime.py,sha256=8VZJP18eJLabSPP1XBSPDaagUBG6q1ynIiPJy3rE2mc,5344
|
@@ -95,7 +95,7 @@ airbyte_cdk/sources/declarative/interpolation/interpolated_string.py,sha256=LYEZ
|
|
95
95
|
airbyte_cdk/sources/declarative/interpolation/interpolation.py,sha256=-V5UddGm69UKEB6o_O1EIES9kfY8FV_X4Ji8w1yOuSA,981
|
96
96
|
airbyte_cdk/sources/declarative/interpolation/jinja.py,sha256=KwTd0oagnZI4tARxnJZlQiDHn1IXqS7dbnRT0rKRAj8,6626
|
97
97
|
airbyte_cdk/sources/declarative/interpolation/macros.py,sha256=QgIfSVPHx_MMUCgbQdm-NMpUlp_cpk0OQhoRDFtkrxE,4040
|
98
|
-
airbyte_cdk/sources/declarative/manifest_declarative_source.py,sha256
|
98
|
+
airbyte_cdk/sources/declarative/manifest_declarative_source.py,sha256=LjOyya1Eh3x3NOO_0MIbiev57OniJUyKkQ3dheT16kI,12896
|
99
99
|
airbyte_cdk/sources/declarative/migrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
100
100
|
airbyte_cdk/sources/declarative/migrations/legacy_to_per_partition_state_migration.py,sha256=iNsF3jWCaZAmJYArmDQg0MJgZikk6frh3IfhcMBR_Qc,3924
|
101
101
|
airbyte_cdk/sources/declarative/migrations/state_migration.py,sha256=KWPjealMLKSMtajXgkdGgKg7EmTLR-CqqD7UIh0-eDU,794
|
@@ -160,7 +160,8 @@ airbyte_cdk/sources/declarative/schema/schema_loader.py,sha256=kjt8v0N5wWKA5zyLn
|
|
160
160
|
airbyte_cdk/sources/declarative/spec/__init__.py,sha256=H0UwoRhgucbKBIzg85AXrifybVmfpwWpPdy22vZKVuo,141
|
161
161
|
airbyte_cdk/sources/declarative/spec/spec.py,sha256=1vGFWbMA2nj2zSb9e-VChfntI-Ag8SUgcwLkhMfCKUw,1907
|
162
162
|
airbyte_cdk/sources/declarative/stream_slicers/__init__.py,sha256=sI9vhc95RwJYOnA0VKjcbtKgFcmAbWjhdWBXFbAijOs,176
|
163
|
-
airbyte_cdk/sources/declarative/stream_slicers/
|
163
|
+
airbyte_cdk/sources/declarative/stream_slicers/declarative_partition_generator.py,sha256=YDk2BmVMAUSyJTM1Clzjk5rdYRJzksMLLxJZdaMxuHE,3384
|
164
|
+
airbyte_cdk/sources/declarative/stream_slicers/stream_slicer.py,sha256=SOkIPBi2Wu7yxIvA15yFzUAB95a3IzA8LPq5DEqHQQc,725
|
164
165
|
airbyte_cdk/sources/declarative/transformations/__init__.py,sha256=CPJ8TlMpiUmvG3624VYu_NfTzxwKcfBjM2Q2wJ7fkSA,919
|
165
166
|
airbyte_cdk/sources/declarative/transformations/add_fields.py,sha256=Oh4Kqws_K9r4qe75ZZ8aE4ydkKzIVP8sqeSeovCMs4M,5026
|
166
167
|
airbyte_cdk/sources/declarative/transformations/keys_to_lower_transformation.py,sha256=RTs5KX4V3hM7A6QN1WlGF21YccTIyNH6qQI9IMb__hw,670
|
@@ -210,7 +211,7 @@ airbyte_cdk/sources/file_based/schema_validation_policies/default_schema_validat
|
|
210
211
|
airbyte_cdk/sources/file_based/stream/__init__.py,sha256=QPDqdgjsabOQD93dSFqHGaFS_3pIwm-chEabZHiPJi0,265
|
211
212
|
airbyte_cdk/sources/file_based/stream/abstract_file_based_stream.py,sha256=hZZSoAMFb--rifpg8B6gZD3I1KRF2KVrWIDlLjiKcpA,7479
|
212
213
|
airbyte_cdk/sources/file_based/stream/concurrent/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
213
|
-
airbyte_cdk/sources/file_based/stream/concurrent/adapters.py,sha256=
|
214
|
+
airbyte_cdk/sources/file_based/stream/concurrent/adapters.py,sha256=zaZYfnQsACkVEyR7ZJM-8z3f3aSL0eO69J-vI3n4gkQ,13882
|
214
215
|
airbyte_cdk/sources/file_based/stream/concurrent/cursor/__init__.py,sha256=AtTntHQgspWt8vZ9cjIjSOO1YpH2OO-D8E78pAViE7k,329
|
215
216
|
airbyte_cdk/sources/file_based/stream/concurrent/cursor/abstract_concurrent_file_based_cursor.py,sha256=pCh1X-WTTuJGF9r-QlcTMlkPbpLBeOFkGrOJePtwTeU,1887
|
216
217
|
airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_concurrent_cursor.py,sha256=CPE_dEmAYK0r-wdI40zTnLtL3B-0DYThuJ1OKOCO8MU,14911
|
@@ -238,22 +239,23 @@ airbyte_cdk/sources/streams/concurrent/README.md,sha256=0nvgnlCBfZJiPDAofT8yFmUh
|
|
238
239
|
airbyte_cdk/sources/streams/concurrent/__init__.py,sha256=4Hw-PX1-VgESLF16cDdvuYCzGJtHntThLF4qIiULWeo,61
|
239
240
|
airbyte_cdk/sources/streams/concurrent/abstract_stream.py,sha256=qqK8padYVkZNZ_nYLf4NXAmdNW9K8u_ys-RV-kzZAc8,3891
|
240
241
|
airbyte_cdk/sources/streams/concurrent/abstract_stream_facade.py,sha256=QTry1QCBUwJDw1QSCEvz23s7zIEx_7QMxkPq9j-oPIQ,1358
|
241
|
-
airbyte_cdk/sources/streams/concurrent/adapters.py,sha256=
|
242
|
+
airbyte_cdk/sources/streams/concurrent/adapters.py,sha256=dbRuPkk-t5QWu39F5IjMkbxwtnLyQM4PIJ3EX2C4UwQ,15051
|
242
243
|
airbyte_cdk/sources/streams/concurrent/availability_strategy.py,sha256=QjX52lC4YOOvcTEhR0RU9e42yuWtueQkW_I_qpE_uTM,2876
|
243
|
-
airbyte_cdk/sources/streams/concurrent/cursor.py,sha256=
|
244
|
+
airbyte_cdk/sources/streams/concurrent/cursor.py,sha256=cvgpTQC7X66GQu0huJOpgceECre2bBU0tI_ZrGYBn5E,19468
|
244
245
|
airbyte_cdk/sources/streams/concurrent/default_stream.py,sha256=WdZYzION3q6nIhIIcpFqlovDcouOHdbnB0U1YIDP2Jk,3175
|
245
246
|
airbyte_cdk/sources/streams/concurrent/exceptions.py,sha256=JOZ446MCLpmF26r9KfS6OO_6rGjcjgJNZdcw6jccjEI,468
|
246
247
|
airbyte_cdk/sources/streams/concurrent/helpers.py,sha256=gtj9p0clZwgnClrIRH6V2Wl0Jwu11Plq-9FP4FU2VQA,1327
|
247
248
|
airbyte_cdk/sources/streams/concurrent/partition_enqueuer.py,sha256=2t64b_z9cEPmlHZnjSiMTO8PEtEdiAJDG0JcYOtUqAE,3363
|
248
249
|
airbyte_cdk/sources/streams/concurrent/partition_reader.py,sha256=0TIrjbTzYJGdA0AZUzbeIKr0iHbawnoEKVl7bWxOFZY,1760
|
249
250
|
airbyte_cdk/sources/streams/concurrent/partitions/__init__.py,sha256=4Hw-PX1-VgESLF16cDdvuYCzGJtHntThLF4qIiULWeo,61
|
250
|
-
airbyte_cdk/sources/streams/concurrent/partitions/partition.py,sha256=
|
251
|
+
airbyte_cdk/sources/streams/concurrent/partitions/partition.py,sha256=FRt6wsdMoXL2gZiAkd4cP-Bi7oM1ZzRDimDh0j98atw,1521
|
251
252
|
airbyte_cdk/sources/streams/concurrent/partitions/partition_generator.py,sha256=_ymkkBr71_qt1fW0_MUqw96OfNBkeJngXQ09yolEDHw,441
|
252
253
|
airbyte_cdk/sources/streams/concurrent/partitions/record.py,sha256=HVGVZ2yF5iaPKxTjRn305lLmYb5I8k7DkQoNIyKA_MA,938
|
254
|
+
airbyte_cdk/sources/streams/concurrent/partitions/stream_slicer.py,sha256=nbdkkHoN0NFeSs7YUFfzY1Lg5Jrt8fWY_ln3YrhY-Ko,544
|
253
255
|
airbyte_cdk/sources/streams/concurrent/partitions/types.py,sha256=6k83K_dnwHAadkTBPSdWKssTzxVGVLH5DzZFkN6pFr8,1197
|
254
256
|
airbyte_cdk/sources/streams/concurrent/state_converters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
255
|
-
airbyte_cdk/sources/streams/concurrent/state_converters/abstract_stream_state_converter.py,sha256=
|
256
|
-
airbyte_cdk/sources/streams/concurrent/state_converters/datetime_stream_state_converter.py,sha256=
|
257
|
+
airbyte_cdk/sources/streams/concurrent/state_converters/abstract_stream_state_converter.py,sha256=CXHUMOhndu-LOKgsnNTItv5s5qrKpmJDeHOzlH1nBy8,6819
|
258
|
+
airbyte_cdk/sources/streams/concurrent/state_converters/datetime_stream_state_converter.py,sha256=n9UPKpSsCu1zjaOKVp6WTuyGSQS2Qh9YWy8myKBpe18,7721
|
257
259
|
airbyte_cdk/sources/streams/core.py,sha256=0sNVS--49wtulgSb99wFR6XimaUv_owPFEsR6yxJMNE,31928
|
258
260
|
airbyte_cdk/sources/streams/http/__init__.py,sha256=NXaNlkzZMkh5kS8S5ujEaKEE6855sk6_HljF_GFjKZI,311
|
259
261
|
airbyte_cdk/sources/streams/http/availability_strategy.py,sha256=sovoGFThZr-doMN9vJvTuJBrvkwQVIO0qTQO64pGZPY,2428
|
@@ -321,10 +323,11 @@ airbyte_cdk/utils/message_utils.py,sha256=wSkwgZ-TWA0ll8sbNPp5coqHCmLJp0veDcGd7_
|
|
321
323
|
airbyte_cdk/utils/oneof_option_config.py,sha256=N8EmWdYdwt0FM7fuShh6H8nj_r4KEL9tb2DJJtwsPow,1180
|
322
324
|
airbyte_cdk/utils/print_buffer.py,sha256=PhMOi0C4Z91kWKrSvCQXcp8qRh1uCimpIdvrg6voZIA,2810
|
323
325
|
airbyte_cdk/utils/schema_inferrer.py,sha256=igYTpdi1uqzyj13h5EJli67g1hfwQK8K_jlNTGGeUMY,9860
|
326
|
+
airbyte_cdk/utils/slice_hasher.py,sha256=EemcgcQlI8-LPYOPlYv4Qkdjyho79XVLWaUHF5XclWc,1043
|
324
327
|
airbyte_cdk/utils/spec_schema_transformations.py,sha256=LVc9KbtMeV_z99jWo0Ou8u4l6eBJ0BWNhxj4zrrGKRs,763
|
325
328
|
airbyte_cdk/utils/stream_status_utils.py,sha256=ZmBoiy5HVbUEHAMrUONxZvxnvfV9CesmQJLDTAIWnWw,1171
|
326
329
|
airbyte_cdk/utils/traced_exception.py,sha256=89TQdFuYZ1NJgmFpqLzY_T_T_64TpJYmVqs119Bp43g,6164
|
327
|
-
airbyte_cdk-6.6.
|
328
|
-
airbyte_cdk-6.6.
|
329
|
-
airbyte_cdk-6.6.
|
330
|
-
airbyte_cdk-6.6.
|
330
|
+
airbyte_cdk-6.6.1.dist-info/LICENSE.txt,sha256=Wfe61S4BaGPj404v8lrAbvhjYR68SHlkzeYrg3_bbuM,1051
|
331
|
+
airbyte_cdk-6.6.1.dist-info/METADATA,sha256=y0IouLNV_Hs3TtkCZToJcltP4_NaFV0Yrtzguh5yPvc,13347
|
332
|
+
airbyte_cdk-6.6.1.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
333
|
+
airbyte_cdk-6.6.1.dist-info/RECORD,,
|
File without changes
|
File without changes
|