airbyte-cdk 6.6.8rc13__py3-none-any.whl → 6.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. airbyte_cdk/sources/concurrent_source/concurrent_read_processor.py +3 -3
  2. airbyte_cdk/sources/concurrent_source/concurrent_source.py +1 -1
  3. airbyte_cdk/sources/declarative/extractors/record_filter.py +6 -48
  4. airbyte_cdk/sources/declarative/extractors/record_selector.py +31 -4
  5. airbyte_cdk/sources/declarative/incremental/global_substream_cursor.py +5 -2
  6. airbyte_cdk/sources/declarative/incremental/per_partition_cursor.py +5 -2
  7. airbyte_cdk/sources/declarative/incremental/per_partition_with_global.py +1 -3
  8. airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py +4 -0
  9. airbyte_cdk/sources/declarative/requesters/paginators/strategies/stop_condition.py +8 -3
  10. airbyte_cdk/sources/declarative/retrievers/simple_retriever.py +11 -4
  11. airbyte_cdk/sources/declarative/stream_slicers/declarative_partition_generator.py +6 -3
  12. airbyte_cdk/sources/file_based/stream/concurrent/adapters.py +3 -3
  13. airbyte_cdk/sources/file_based/stream/concurrent/cursor/abstract_concurrent_file_based_cursor.py +1 -1
  14. airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_concurrent_cursor.py +1 -1
  15. airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_final_state_cursor.py +1 -1
  16. airbyte_cdk/sources/streams/concurrent/adapters.py +6 -2
  17. airbyte_cdk/sources/streams/concurrent/cursor.py +30 -6
  18. airbyte_cdk/sources/streams/concurrent/partitions/partition.py +1 -1
  19. airbyte_cdk/sources/streams/concurrent/partitions/types.py +1 -1
  20. airbyte_cdk/sources/types.py +14 -1
  21. {airbyte_cdk-6.6.8rc13.dist-info → airbyte_cdk-6.7.0.dist-info}/METADATA +1 -1
  22. {airbyte_cdk-6.6.8rc13.dist-info → airbyte_cdk-6.7.0.dist-info}/RECORD +25 -26
  23. airbyte_cdk/sources/streams/concurrent/partitions/record.py +0 -35
  24. {airbyte_cdk-6.6.8rc13.dist-info → airbyte_cdk-6.7.0.dist-info}/LICENSE.txt +0 -0
  25. {airbyte_cdk-6.6.8rc13.dist-info → airbyte_cdk-6.7.0.dist-info}/WHEEL +0 -0
  26. {airbyte_cdk-6.6.8rc13.dist-info → airbyte_cdk-6.7.0.dist-info}/entry_points.txt +0 -0
@@ -17,8 +17,8 @@ from airbyte_cdk.sources.streams.concurrent.abstract_stream import AbstractStrea
17
17
  from airbyte_cdk.sources.streams.concurrent.partition_enqueuer import PartitionEnqueuer
18
18
  from airbyte_cdk.sources.streams.concurrent.partition_reader import PartitionReader
19
19
  from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition
20
- from airbyte_cdk.sources.streams.concurrent.partitions.record import Record
21
20
  from airbyte_cdk.sources.streams.concurrent.partitions.types import PartitionCompleteSentinel
21
+ from airbyte_cdk.sources.types import Record
22
22
  from airbyte_cdk.sources.utils.record_helper import stream_data_to_airbyte_message
23
23
  from airbyte_cdk.sources.utils.slice_logger import SliceLogger
24
24
  from airbyte_cdk.utils import AirbyteTracedException
@@ -147,11 +147,11 @@ class ConcurrentReadProcessor:
147
147
  # AbstractStreams are expected to return data as they are expected.
148
148
  # Any transformation on the data should be done before reaching this point
149
149
  message = stream_data_to_airbyte_message(
150
- stream_name=record.partition.stream_name(),
150
+ stream_name=record.stream_name,
151
151
  data_or_message=record.data,
152
152
  is_file_transfer_message=record.is_file_transfer_message,
153
153
  )
154
- stream = self._stream_name_to_instance[record.partition.stream_name()]
154
+ stream = self._stream_name_to_instance[record.stream_name]
155
155
 
156
156
  if message.type == MessageType.RECORD:
157
157
  if self._record_counter[stream.name] == 0:
@@ -18,11 +18,11 @@ from airbyte_cdk.sources.streams.concurrent.abstract_stream import AbstractStrea
18
18
  from airbyte_cdk.sources.streams.concurrent.partition_enqueuer import PartitionEnqueuer
19
19
  from airbyte_cdk.sources.streams.concurrent.partition_reader import PartitionReader
20
20
  from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition
21
- from airbyte_cdk.sources.streams.concurrent.partitions.record import Record
22
21
  from airbyte_cdk.sources.streams.concurrent.partitions.types import (
23
22
  PartitionCompleteSentinel,
24
23
  QueueItem,
25
24
  )
25
+ from airbyte_cdk.sources.types import Record
26
26
  from airbyte_cdk.sources.utils.slice_logger import DebugSliceLogger, SliceLogger
27
27
 
28
28
 
@@ -1,7 +1,6 @@
1
1
  #
2
2
  # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
3
3
  #
4
- import datetime
5
4
  from dataclasses import InitVar, dataclass
6
5
  from typing import Any, Iterable, Mapping, Optional, Union
7
6
 
@@ -11,7 +10,7 @@ from airbyte_cdk.sources.declarative.incremental import (
11
10
  PerPartitionWithGlobalCursor,
12
11
  )
13
12
  from airbyte_cdk.sources.declarative.interpolation.interpolated_boolean import InterpolatedBoolean
14
- from airbyte_cdk.sources.types import Config, StreamSlice, StreamState
13
+ from airbyte_cdk.sources.types import Config, Record, StreamSlice, StreamState
15
14
 
16
15
 
17
16
  @dataclass
@@ -68,20 +67,6 @@ class ClientSideIncrementalRecordFilterDecorator(RecordFilter):
68
67
  self._date_time_based_cursor = date_time_based_cursor
69
68
  self._substream_cursor = substream_cursor
70
69
 
71
- @property
72
- def _cursor_field(self) -> str:
73
- return self._date_time_based_cursor.cursor_field.eval(self._date_time_based_cursor.config) # type: ignore # eval returns a string in this context
74
-
75
- @property
76
- def _start_date_from_config(self) -> datetime.datetime:
77
- return self._date_time_based_cursor._start_datetime.get_datetime(
78
- self._date_time_based_cursor.config
79
- )
80
-
81
- @property
82
- def _end_datetime(self) -> datetime.datetime:
83
- return self._date_time_based_cursor.select_best_end_datetime()
84
-
85
70
  def filter_records(
86
71
  self,
87
72
  records: Iterable[Mapping[str, Any]],
@@ -89,16 +74,14 @@ class ClientSideIncrementalRecordFilterDecorator(RecordFilter):
89
74
  stream_slice: Optional[StreamSlice] = None,
90
75
  next_page_token: Optional[Mapping[str, Any]] = None,
91
76
  ) -> Iterable[Mapping[str, Any]]:
92
- state_value = self._get_state_value(
93
- stream_state, stream_slice or StreamSlice(partition={}, cursor_slice={})
94
- )
95
- filter_date: datetime.datetime = self._get_filter_date(state_value)
96
77
  records = (
97
78
  record
98
79
  for record in records
99
- if self._end_datetime
100
- >= self._date_time_based_cursor.parse_date(record[self._cursor_field])
101
- >= filter_date
80
+ if (self._substream_cursor or self._date_time_based_cursor).should_be_synced(
81
+ # Record is created on the fly to align with cursors interface; stream name is ignored as we don't need it here
82
+ # Record stream name is empty cause it is not used durig the filtering
83
+ Record(data=record, associated_slice=stream_slice, stream_name="")
84
+ )
102
85
  )
103
86
  if self.condition:
104
87
  records = super().filter_records(
@@ -108,28 +91,3 @@ class ClientSideIncrementalRecordFilterDecorator(RecordFilter):
108
91
  next_page_token=next_page_token,
109
92
  )
110
93
  yield from records
111
-
112
- def _get_state_value(
113
- self, stream_state: StreamState, stream_slice: StreamSlice
114
- ) -> Optional[str]:
115
- """
116
- Return cursor_value or None in case it was not found.
117
- Cursor_value may be empty if:
118
- 1. It is an initial sync => no stream_state exist at all.
119
- 2. In Parent-child stream, and we already make initial sync, so stream_state is present.
120
- During the second read, we receive one extra record from parent and therefore no stream_state for this record will be found.
121
-
122
- :param StreamState stream_state: State
123
- :param StreamSlice stream_slice: Current Stream slice
124
- :return Optional[str]: cursor_value in case it was found, otherwise None.
125
- """
126
- state = (self._substream_cursor or self._date_time_based_cursor).select_state(stream_slice)
127
-
128
- return state.get(self._cursor_field) if state else None
129
-
130
- def _get_filter_date(self, state_value: Optional[str]) -> datetime.datetime:
131
- start_date_parsed = self._start_date_from_config
132
- if state_value:
133
- return max(start_date_parsed, self._date_time_based_cursor.parse_date(state_value))
134
- else:
135
- return start_date_parsed
@@ -3,13 +3,14 @@
3
3
  #
4
4
 
5
5
  from dataclasses import InitVar, dataclass, field
6
- from typing import Any, Iterable, List, Mapping, Optional
6
+ from typing import Any, Iterable, List, Mapping, Optional, Union
7
7
 
8
8
  import requests
9
9
 
10
10
  from airbyte_cdk.sources.declarative.extractors.http_selector import HttpSelector
11
11
  from airbyte_cdk.sources.declarative.extractors.record_extractor import RecordExtractor
12
12
  from airbyte_cdk.sources.declarative.extractors.record_filter import RecordFilter
13
+ from airbyte_cdk.sources.declarative.interpolation import InterpolatedString
13
14
  from airbyte_cdk.sources.declarative.models import SchemaNormalization
14
15
  from airbyte_cdk.sources.declarative.transformations import RecordTransformation
15
16
  from airbyte_cdk.sources.types import Config, Record, StreamSlice, StreamState
@@ -38,11 +39,34 @@ class RecordSelector(HttpSelector):
38
39
  config: Config
39
40
  parameters: InitVar[Mapping[str, Any]]
40
41
  schema_normalization: TypeTransformer
42
+ name: str
43
+ _name: Union[InterpolatedString, str] = field(init=False, repr=False, default="")
41
44
  record_filter: Optional[RecordFilter] = None
42
45
  transformations: List[RecordTransformation] = field(default_factory=lambda: [])
43
46
 
44
47
  def __post_init__(self, parameters: Mapping[str, Any]) -> None:
45
48
  self._parameters = parameters
49
+ self._name = (
50
+ InterpolatedString(self._name, parameters=parameters)
51
+ if isinstance(self._name, str)
52
+ else self._name
53
+ )
54
+
55
+ @property # type: ignore
56
+ def name(self) -> str:
57
+ """
58
+ :return: Stream name
59
+ """
60
+ return (
61
+ str(self._name.eval(self.config))
62
+ if isinstance(self._name, InterpolatedString)
63
+ else self._name
64
+ )
65
+
66
+ @name.setter
67
+ def name(self, value: str) -> None:
68
+ if not isinstance(value, property):
69
+ self._name = value
46
70
 
47
71
  def select_records(
48
72
  self,
@@ -86,7 +110,7 @@ class RecordSelector(HttpSelector):
86
110
  transformed_data = self._transform(filtered_data, stream_state, stream_slice)
87
111
  normalized_data = self._normalize_by_schema(transformed_data, schema=records_schema)
88
112
  for data in normalized_data:
89
- yield Record(data, stream_slice)
113
+ yield Record(data=data, stream_name=self.name, associated_slice=stream_slice)
90
114
 
91
115
  def _normalize_by_schema(
92
116
  self, records: Iterable[Mapping[str, Any]], schema: Optional[Mapping[str, Any]]
@@ -126,6 +150,9 @@ class RecordSelector(HttpSelector):
126
150
  for record in records:
127
151
  for transformation in self.transformations:
128
152
  transformation.transform(
129
- record, config=self.config, stream_state=stream_state, stream_slice=stream_slice
130
- ) # type: ignore # record has type Mapping[str, Any], but Dict[str, Any] expected
153
+ record, # type: ignore # record has type Mapping[str, Any], but Dict[str, Any] expected
154
+ config=self.config,
155
+ stream_state=stream_state,
156
+ stream_slice=stream_slice,
157
+ )
131
158
  yield record
@@ -340,8 +340,11 @@ class GlobalSubstreamCursor(DeclarativeCursor):
340
340
  @staticmethod
341
341
  def _convert_record_to_cursor_record(record: Record) -> Record:
342
342
  return Record(
343
- record.data,
344
- StreamSlice(partition={}, cursor_slice=record.associated_slice.cursor_slice)
343
+ data=record.data,
344
+ stream_name=record.stream_name,
345
+ associated_slice=StreamSlice(
346
+ partition={}, cursor_slice=record.associated_slice.cursor_slice
347
+ )
345
348
  if record.associated_slice
346
349
  else None,
347
350
  )
@@ -325,8 +325,11 @@ class PerPartitionCursor(DeclarativeCursor):
325
325
  @staticmethod
326
326
  def _convert_record_to_cursor_record(record: Record) -> Record:
327
327
  return Record(
328
- record.data,
329
- StreamSlice(partition={}, cursor_slice=record.associated_slice.cursor_slice)
328
+ data=record.data,
329
+ stream_name=record.stream_name,
330
+ associated_slice=StreamSlice(
331
+ partition={}, cursor_slice=record.associated_slice.cursor_slice
332
+ )
330
333
  if record.associated_slice
331
334
  else None,
332
335
  )
@@ -194,9 +194,7 @@ class PerPartitionWithGlobalCursor(DeclarativeCursor):
194
194
  )
195
195
 
196
196
  def should_be_synced(self, record: Record) -> bool:
197
- return self._global_cursor.should_be_synced(
198
- record
199
- ) or self._per_partition_cursor.should_be_synced(record)
197
+ return self._get_active_cursor().should_be_synced(record)
200
198
 
201
199
  def is_greater_than_or_equal(self, first: Record, second: Record) -> bool:
202
200
  return self._global_cursor.is_greater_than_or_equal(first, second)
@@ -1781,6 +1781,7 @@ class ModelToComponentFactory:
1781
1781
  self,
1782
1782
  model: RecordSelectorModel,
1783
1783
  config: Config,
1784
+ name: str,
1784
1785
  *,
1785
1786
  transformations: List[RecordTransformation],
1786
1787
  decoder: Optional[Decoder] = None,
@@ -1811,6 +1812,7 @@ class ModelToComponentFactory:
1811
1812
 
1812
1813
  return RecordSelector(
1813
1814
  extractor=extractor,
1815
+ name=name,
1814
1816
  config=config,
1815
1817
  record_filter=record_filter,
1816
1818
  transformations=transformations,
@@ -1881,6 +1883,7 @@ class ModelToComponentFactory:
1881
1883
  )
1882
1884
  record_selector = self._create_component_from_model(
1883
1885
  model=model.record_selector,
1886
+ name=name,
1884
1887
  config=config,
1885
1888
  decoder=decoder,
1886
1889
  transformations=transformations,
@@ -2035,6 +2038,7 @@ class ModelToComponentFactory:
2035
2038
  requester=download_requester,
2036
2039
  record_selector=RecordSelector(
2037
2040
  extractor=ResponseToFileExtractor(),
2041
+ name=name,
2038
2042
  record_filter=None,
2039
2043
  transformations=[],
2040
2044
  schema_normalization=TypeTransformer(TransformConfig.NoTransform),
@@ -11,6 +11,7 @@ from airbyte_cdk.sources.declarative.incremental.declarative_cursor import Decla
11
11
  from airbyte_cdk.sources.declarative.requesters.paginators.strategies.pagination_strategy import (
12
12
  PaginationStrategy,
13
13
  )
14
+ from airbyte_cdk.sources.streams.concurrent.cursor import ConcurrentCursor
14
15
  from airbyte_cdk.sources.types import Record
15
16
 
16
17
 
@@ -26,7 +27,11 @@ class PaginationStopCondition(ABC):
26
27
 
27
28
 
28
29
  class CursorStopCondition(PaginationStopCondition):
29
- def __init__(self, cursor: DeclarativeCursor):
30
+ def __init__(
31
+ self,
32
+ cursor: DeclarativeCursor
33
+ | ConcurrentCursor, # migrate to use both old and concurrent versions
34
+ ):
30
35
  self._cursor = cursor
31
36
 
32
37
  def is_met(self, record: Record) -> bool:
@@ -47,8 +52,8 @@ class StopConditionPaginationStrategyDecorator(PaginationStrategy):
47
52
  return None
48
53
  return self._delegate.next_page_token(response, last_page_size, last_record)
49
54
 
50
- def reset(self) -> None:
51
- self._delegate.reset()
55
+ def reset(self, reset_value: Optional[Any] = None) -> None:
56
+ self._delegate.reset(reset_value)
52
57
 
53
58
  def get_page_size(self) -> Optional[int]:
54
59
  return self._delegate.get_page_size()
@@ -468,8 +468,9 @@ class SimpleRetriever(Retriever):
468
468
  else:
469
469
  return None
470
470
 
471
- @staticmethod
472
- def _extract_record(stream_data: StreamData, stream_slice: StreamSlice) -> Optional[Record]:
471
+ def _extract_record(
472
+ self, stream_data: StreamData, stream_slice: StreamSlice
473
+ ) -> Optional[Record]:
473
474
  """
474
475
  As we allow the output of _read_pages to be StreamData, it can be multiple things. Therefore, we need to filter out and normalize
475
476
  to data to streamline the rest of the process.
@@ -478,9 +479,15 @@ class SimpleRetriever(Retriever):
478
479
  # Record is not part of `StreamData` but is the most common implementation of `Mapping[str, Any]` which is part of `StreamData`
479
480
  return stream_data
480
481
  elif isinstance(stream_data, (dict, Mapping)):
481
- return Record(dict(stream_data), stream_slice)
482
+ return Record(
483
+ data=dict(stream_data), associated_slice=stream_slice, stream_name=self.name
484
+ )
482
485
  elif isinstance(stream_data, AirbyteMessage) and stream_data.record:
483
- return Record(stream_data.record.data, stream_slice)
486
+ return Record(
487
+ data=stream_data.record.data, # type:ignore # AirbyteMessage always has record.data
488
+ associated_slice=stream_slice,
489
+ stream_name=self.name,
490
+ )
484
491
  return None
485
492
 
486
493
  # stream_slices is defined with arguments on http stream and fixing this has a long tail of dependencies. Will be resolved by the decoupling of http stream and simple retriever
@@ -6,9 +6,8 @@ from airbyte_cdk.sources.declarative.retrievers import Retriever
6
6
  from airbyte_cdk.sources.message import MessageRepository
7
7
  from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition
8
8
  from airbyte_cdk.sources.streams.concurrent.partitions.partition_generator import PartitionGenerator
9
- from airbyte_cdk.sources.streams.concurrent.partitions.record import Record
10
9
  from airbyte_cdk.sources.streams.concurrent.partitions.stream_slicer import StreamSlicer
11
- from airbyte_cdk.sources.types import StreamSlice
10
+ from airbyte_cdk.sources.types import Record, StreamSlice
12
11
  from airbyte_cdk.utils.slice_hasher import SliceHasher
13
12
 
14
13
 
@@ -59,7 +58,11 @@ class DeclarativePartition(Partition):
59
58
  def read(self) -> Iterable[Record]:
60
59
  for stream_data in self._retriever.read_records(self._json_schema, self._stream_slice):
61
60
  if isinstance(stream_data, Mapping):
62
- yield Record(stream_data, self)
61
+ yield Record(
62
+ data=stream_data,
63
+ stream_name=self.stream_name(),
64
+ associated_slice=self._stream_slice,
65
+ )
63
66
  else:
64
67
  self._message_repository.emit_message(stream_data)
65
68
 
@@ -41,8 +41,8 @@ from airbyte_cdk.sources.streams.concurrent.helpers import (
41
41
  )
42
42
  from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition
43
43
  from airbyte_cdk.sources.streams.concurrent.partitions.partition_generator import PartitionGenerator
44
- from airbyte_cdk.sources.streams.concurrent.partitions.record import Record
45
44
  from airbyte_cdk.sources.streams.core import StreamData
45
+ from airbyte_cdk.sources.types import Record
46
46
  from airbyte_cdk.sources.utils.schema_helpers import InternalConfig
47
47
  from airbyte_cdk.sources.utils.slice_logger import SliceLogger
48
48
 
@@ -248,7 +248,7 @@ class FileBasedStreamPartition(Partition):
248
248
  self._stream.transformer.transform(
249
249
  data_to_return, self._stream.get_json_schema()
250
250
  )
251
- yield Record(data_to_return, self)
251
+ yield Record(data=data_to_return, stream_name=self.stream_name())
252
252
  elif (
253
253
  isinstance(record_data, AirbyteMessage)
254
254
  and record_data.type == Type.RECORD
@@ -266,7 +266,7 @@ class FileBasedStreamPartition(Partition):
266
266
  else:
267
267
  yield Record(
268
268
  data=record_message_data,
269
- partition=self,
269
+ stream_name=self.stream_name(),
270
270
  is_file_transfer_message=self._use_file_transfer(),
271
271
  )
272
272
  else:
@@ -12,7 +12,7 @@ from airbyte_cdk.sources.file_based.stream.cursor import AbstractFileBasedCursor
12
12
  from airbyte_cdk.sources.file_based.types import StreamState
13
13
  from airbyte_cdk.sources.streams.concurrent.cursor import Cursor
14
14
  from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition
15
- from airbyte_cdk.sources.streams.concurrent.partitions.record import Record
15
+ from airbyte_cdk.sources.types import Record
16
16
 
17
17
  if TYPE_CHECKING:
18
18
  from airbyte_cdk.sources.file_based.stream.concurrent.adapters import FileBasedStreamPartition
@@ -19,7 +19,7 @@ from airbyte_cdk.sources.file_based.types import StreamState
19
19
  from airbyte_cdk.sources.message.repository import MessageRepository
20
20
  from airbyte_cdk.sources.streams.concurrent.cursor import CursorField
21
21
  from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition
22
- from airbyte_cdk.sources.streams.concurrent.partitions.record import Record
22
+ from airbyte_cdk.sources.types import Record
23
23
 
24
24
  if TYPE_CHECKING:
25
25
  from airbyte_cdk.sources.file_based.stream.concurrent.adapters import FileBasedStreamPartition
@@ -16,7 +16,7 @@ from airbyte_cdk.sources.file_based.types import StreamState
16
16
  from airbyte_cdk.sources.message import MessageRepository
17
17
  from airbyte_cdk.sources.streams import NO_CURSOR_STATE_KEY
18
18
  from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition
19
- from airbyte_cdk.sources.streams.concurrent.partitions.record import Record
19
+ from airbyte_cdk.sources.types import Record
20
20
 
21
21
  if TYPE_CHECKING:
22
22
  from airbyte_cdk.sources.file_based.stream.concurrent.adapters import FileBasedStreamPartition
@@ -39,8 +39,8 @@ from airbyte_cdk.sources.streams.concurrent.helpers import (
39
39
  )
40
40
  from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition
41
41
  from airbyte_cdk.sources.streams.concurrent.partitions.partition_generator import PartitionGenerator
42
- from airbyte_cdk.sources.streams.concurrent.partitions.record import Record
43
42
  from airbyte_cdk.sources.streams.core import StreamData
43
+ from airbyte_cdk.sources.types import Record
44
44
  from airbyte_cdk.sources.utils.schema_helpers import InternalConfig
45
45
  from airbyte_cdk.sources.utils.slice_logger import SliceLogger
46
46
  from airbyte_cdk.utils.slice_hasher import SliceHasher
@@ -294,7 +294,11 @@ class StreamPartition(Partition):
294
294
  self._stream.transformer.transform(
295
295
  data_to_return, self._stream.get_json_schema()
296
296
  )
297
- yield Record(data_to_return, self)
297
+ yield Record(
298
+ data=data_to_return,
299
+ stream_name=self.stream_name(),
300
+ associated_slice=self._slice,
301
+ )
298
302
  else:
299
303
  self._message_repository.emit_message(record_data)
300
304
  except Exception as e:
@@ -3,6 +3,7 @@
3
3
  #
4
4
 
5
5
  import functools
6
+ import logging
6
7
  from abc import ABC, abstractmethod
7
8
  from typing import Any, Callable, Iterable, List, Mapping, MutableMapping, Optional, Protocol, Tuple
8
9
 
@@ -10,12 +11,13 @@ from airbyte_cdk.sources.connector_state_manager import ConnectorStateManager
10
11
  from airbyte_cdk.sources.message import MessageRepository
11
12
  from airbyte_cdk.sources.streams import NO_CURSOR_STATE_KEY
12
13
  from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition
13
- from airbyte_cdk.sources.streams.concurrent.partitions.record import Record
14
14
  from airbyte_cdk.sources.streams.concurrent.partitions.stream_slicer import StreamSlicer
15
15
  from airbyte_cdk.sources.streams.concurrent.state_converters.abstract_stream_state_converter import (
16
16
  AbstractStreamStateConverter,
17
17
  )
18
- from airbyte_cdk.sources.types import StreamSlice
18
+ from airbyte_cdk.sources.types import Record, StreamSlice
19
+
20
+ LOGGER = logging.getLogger("airbyte")
19
21
 
20
22
 
21
23
  def _extract_value(mapping: Mapping[str, Any], path: List[str]) -> Any:
@@ -173,9 +175,11 @@ class ConcurrentCursor(Cursor):
173
175
  self.start, self._concurrent_state = self._get_concurrent_state(stream_state)
174
176
  self._lookback_window = lookback_window
175
177
  self._slice_range = slice_range
176
- self._most_recent_cursor_value_per_partition: MutableMapping[Partition, Any] = {}
178
+ self._most_recent_cursor_value_per_partition: MutableMapping[StreamSlice, Any] = {}
177
179
  self._has_closed_at_least_one_slice = False
178
180
  self._cursor_granularity = cursor_granularity
181
+ # Flag to track if the logger has been triggered (per stream)
182
+ self._should_be_synced_logger_triggered = False
179
183
 
180
184
  @property
181
185
  def state(self) -> MutableMapping[str, Any]:
@@ -210,12 +214,12 @@ class ConcurrentCursor(Cursor):
210
214
 
211
215
  def observe(self, record: Record) -> None:
212
216
  most_recent_cursor_value = self._most_recent_cursor_value_per_partition.get(
213
- record.partition
217
+ record.associated_slice
214
218
  )
215
219
  cursor_value = self._extract_cursor_value(record)
216
220
 
217
221
  if most_recent_cursor_value is None or most_recent_cursor_value < cursor_value:
218
- self._most_recent_cursor_value_per_partition[record.partition] = cursor_value
222
+ self._most_recent_cursor_value_per_partition[record.associated_slice] = cursor_value
219
223
 
220
224
  def _extract_cursor_value(self, record: Record) -> Any:
221
225
  return self._connector_state_converter.parse_value(self._cursor_field.extract_value(record))
@@ -231,7 +235,9 @@ class ConcurrentCursor(Cursor):
231
235
  self._has_closed_at_least_one_slice = True
232
236
 
233
237
  def _add_slice_to_state(self, partition: Partition) -> None:
234
- most_recent_cursor_value = self._most_recent_cursor_value_per_partition.get(partition)
238
+ most_recent_cursor_value = self._most_recent_cursor_value_per_partition.get(
239
+ partition.to_slice()
240
+ )
235
241
 
236
242
  if self._slice_boundary_fields:
237
243
  if "slices" not in self.state:
@@ -442,3 +448,21 @@ class ConcurrentCursor(Cursor):
442
448
  return lower + step
443
449
  except OverflowError:
444
450
  return self._end_provider()
451
+
452
+ def should_be_synced(self, record: Record) -> bool:
453
+ """
454
+ Determines if a record should be synced based on its cursor value.
455
+ :param record: The record to evaluate
456
+
457
+ :return: True if the record's cursor value falls within the sync boundaries
458
+ """
459
+ try:
460
+ record_cursor_value: CursorValueType = self._extract_cursor_value(record) # type: ignore # cursor_field is converted to an InterpolatedString in __post_init__
461
+ except ValueError:
462
+ if not self._should_be_synced_logger_triggered:
463
+ LOGGER.warning(
464
+ f"Could not find cursor field `{self.cursor_field.cursor_field_key}` in record. The incremental sync will assume it needs to be synced"
465
+ )
466
+ self._should_be_synced_logger_triggered = True
467
+ return True
468
+ return self.start <= record_cursor_value <= self._end_provider()
@@ -5,7 +5,7 @@
5
5
  from abc import ABC, abstractmethod
6
6
  from typing import Any, Iterable, Mapping, Optional
7
7
 
8
- from airbyte_cdk.sources.streams.concurrent.partitions.record import Record
8
+ from airbyte_cdk.sources.types import Record
9
9
 
10
10
 
11
11
  class Partition(ABC):
@@ -8,7 +8,7 @@ from airbyte_cdk.sources.concurrent_source.partition_generation_completed_sentin
8
8
  PartitionGenerationCompletedSentinel,
9
9
  )
10
10
  from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition
11
- from airbyte_cdk.sources.streams.concurrent.partitions.record import Record
11
+ from airbyte_cdk.sources.types import Record
12
12
 
13
13
 
14
14
  class PartitionCompleteSentinel:
@@ -6,6 +6,8 @@ from __future__ import annotations
6
6
 
7
7
  from typing import Any, ItemsView, Iterator, KeysView, List, Mapping, Optional, ValuesView
8
8
 
9
+ import orjson
10
+
9
11
  # A FieldPointer designates a path to a field inside a mapping. For example, retrieving ["k1", "k1.2"] in the object {"k1" :{"k1.2":
10
12
  # "hello"}] returns "hello"
11
13
  FieldPointer = List[str]
@@ -15,9 +17,17 @@ StreamState = Mapping[str, Any]
15
17
 
16
18
 
17
19
  class Record(Mapping[str, Any]):
18
- def __init__(self, data: Mapping[str, Any], associated_slice: Optional[StreamSlice]):
20
+ def __init__(
21
+ self,
22
+ data: Mapping[str, Any],
23
+ stream_name: str,
24
+ associated_slice: Optional[StreamSlice] = None,
25
+ is_file_transfer_message: bool = False,
26
+ ):
19
27
  self._data = data
20
28
  self._associated_slice = associated_slice
29
+ self.stream_name = stream_name
30
+ self.is_file_transfer_message = is_file_transfer_message
21
31
 
22
32
  @property
23
33
  def data(self) -> Mapping[str, Any]:
@@ -139,3 +149,6 @@ class StreamSlice(Mapping[str, Any]):
139
149
 
140
150
  def __json_serializable__(self) -> Any:
141
151
  return self._stream_slice
152
+
153
+ def __hash__(self) -> int:
154
+ return hash(orjson.dumps(self._stream_slice, option=orjson.OPT_SORT_KEYS))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: airbyte-cdk
3
- Version: 6.6.8rc13
3
+ Version: 6.7.0
4
4
  Summary: A framework for writing Airbyte Connectors.
5
5
  Home-page: https://airbyte.com
6
6
  License: MIT
@@ -34,8 +34,8 @@ airbyte_cdk/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
34
34
  airbyte_cdk/sources/__init__.py,sha256=45J83QsFH3Wky3sVapZWg4C58R_i1thm61M06t2c1AQ,1156
35
35
  airbyte_cdk/sources/abstract_source.py,sha256=qY0nZzNm-9qVkt-t6s-Y6UYKIk_2zSBSn3Y_IGzJAoA,15633
36
36
  airbyte_cdk/sources/concurrent_source/__init__.py,sha256=3D_RJsxQfiLboSCDdNei1Iv-msRp3DXsas6E9kl7dXc,386
37
- airbyte_cdk/sources/concurrent_source/concurrent_read_processor.py,sha256=cEyZALjsxVx7k9bzcDUpSIXskOqEg5PKn7K3ZMk5U2E,12766
38
- airbyte_cdk/sources/concurrent_source/concurrent_source.py,sha256=3uiJwkytP8HjY3CPTZtoPF9i0WAJE0K6GREyVZUWPaI,7768
37
+ airbyte_cdk/sources/concurrent_source/concurrent_read_processor.py,sha256=dbDBNcNNg2IZU5pZb3HfZeILU7X5_EhYGSbNqq3JD4I,12711
38
+ airbyte_cdk/sources/concurrent_source/concurrent_source.py,sha256=Bq54JBp8HUz7lGhladoX-dpyscX77Kxbs32bLwWdjdI,7737
39
39
  airbyte_cdk/sources/concurrent_source/concurrent_source_adapter.py,sha256=f9PIRPWn2tXu0-bxVeYHL2vYdqCzZ_kgpHg5_Ep-cfQ,6103
40
40
  airbyte_cdk/sources/concurrent_source/partition_generation_completed_sentinel.py,sha256=z1t-rAZBsqVidv2fpUlPHE9JgyXsITuGk4AMu96mXSQ,696
41
41
  airbyte_cdk/sources/concurrent_source/stream_thread_exception.py,sha256=-q6mG2145HKQ28rZGD1bUmjPlIZ1S7-Yhewl8Ntu6xI,764
@@ -80,15 +80,15 @@ airbyte_cdk/sources/declarative/extractors/__init__.py,sha256=YFuL4D4RuuB8E1DNSb
80
80
  airbyte_cdk/sources/declarative/extractors/dpath_extractor.py,sha256=wR4Ol4MG2lt5UlqXF5EU_k7qa5cN4_-luu3PJ1PlO3A,3131
81
81
  airbyte_cdk/sources/declarative/extractors/http_selector.py,sha256=2zWZ4ewTqQC8VwkjS0xD_u350Km3SiYP7hpOOgiLg5o,1169
82
82
  airbyte_cdk/sources/declarative/extractors/record_extractor.py,sha256=XJELMjahAsaomlvQgN2zrNO0DJX0G0fr9r682gUz7Pg,691
83
- airbyte_cdk/sources/declarative/extractors/record_filter.py,sha256=baG_e7MHZI5ggYi2CAkad3fDULoto1Dl-gOGC_X8Pnk,5280
84
- airbyte_cdk/sources/declarative/extractors/record_selector.py,sha256=2FwsHBYz8Df7DoYmsXwvkphA2moOFdJNT6fd5gHDhmg,5720
83
+ airbyte_cdk/sources/declarative/extractors/record_filter.py,sha256=OJ9xmhNWNwwzxYOeIrDy1GINb1zH9MBy6suC5tm2LSk,3545
84
+ airbyte_cdk/sources/declarative/extractors/record_selector.py,sha256=AkXPOWyp741cpYLBl9AbmVmOQmQ2BzZ2XjgsMEB6gGc,6583
85
85
  airbyte_cdk/sources/declarative/extractors/response_to_file_extractor.py,sha256=ajE6YPt4Yk970Q9Oj1lvSXH8m7UqvAifgg0pG0QUEzk,6399
86
86
  airbyte_cdk/sources/declarative/incremental/__init__.py,sha256=CmZl9ddwMZFo8L7mEl_OFHN3ahIFRSYrJjMbR_cJaFA,1006
87
87
  airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py,sha256=wvaJKHLNkdtzciIYKOAICPepRpQE8rCiFIwwvuGepr8,22153
88
88
  airbyte_cdk/sources/declarative/incremental/declarative_cursor.py,sha256=5Bhw9VRPyIuCaD0wmmq_L3DZsa-rJgtKSEUzSd8YYD0,536
89
- airbyte_cdk/sources/declarative/incremental/global_substream_cursor.py,sha256=WPCr8-zan_ccAePglmR0yb9AYSIwjPgh157yv4DFVRQ,15773
90
- airbyte_cdk/sources/declarative/incremental/per_partition_cursor.py,sha256=l9EWEdQNun1lCls2FPUjd--UG6Jm3whbPdHAxg5aQ_Q,15653
91
- airbyte_cdk/sources/declarative/incremental/per_partition_with_global.py,sha256=RKnlXRsmvd_4UKGFBmNsDyRd3C3sntW6qQLwJChr-3Y,8436
89
+ airbyte_cdk/sources/declarative/incremental/global_substream_cursor.py,sha256=3_EEZop94bMitZaJd2PF5Q2Xt9v94tYg7p7YJz8tAFc,15869
90
+ airbyte_cdk/sources/declarative/incremental/per_partition_cursor.py,sha256=hElcYijbOHjdLKOMA7W7aizEbf22r7OSApXALP875uI,15749
91
+ airbyte_cdk/sources/declarative/incremental/per_partition_with_global.py,sha256=2YBOA2NnwAeIKlIhSwUB_W-FaGnPcmrG_liY7b4mV2Y,8365
92
92
  airbyte_cdk/sources/declarative/incremental/resumable_full_refresh_cursor.py,sha256=10LFv1QPM-agVKl6eaANmEBOfd7gZgBrkoTcMggsieQ,4809
93
93
  airbyte_cdk/sources/declarative/interpolation/__init__.py,sha256=tjUJkn3B-iZ-p7RP2c3dVZejrGiQeooGmS5ibWTuUL4,437
94
94
  airbyte_cdk/sources/declarative/interpolation/filters.py,sha256=dqf9W6LCnB5aWGLX1BoKxU-waORf1jT03LpJB671msU,3639
@@ -109,7 +109,7 @@ airbyte_cdk/sources/declarative/parsers/__init__.py,sha256=ZnqYNxHsKCgO38IwB34RQ
109
109
  airbyte_cdk/sources/declarative/parsers/custom_exceptions.py,sha256=Rir9_z3Kcd5Es0-LChrzk-0qubAsiK_RSEnLmK2OXm8,553
110
110
  airbyte_cdk/sources/declarative/parsers/manifest_component_transformer.py,sha256=jVZ3ZV5YZrmDNIX5cM2mugXmnbH27zHRcD22_3oatpo,8454
111
111
  airbyte_cdk/sources/declarative/parsers/manifest_reference_resolver.py,sha256=IWUOdF03o-aQn0Occo1BJCxU0Pz-QILk5L67nzw2thw,6803
112
- airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py,sha256=AwEUpzCgpCIURvEoNOKtG1BiHsC0LYhrgKepKOCJ0tQ,95404
112
+ airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py,sha256=-CjcKFMKjWRjdVJg8lLJWOYFV--wcCSO6uCMObf4F78,95496
113
113
  airbyte_cdk/sources/declarative/partition_routers/__init__.py,sha256=8uGos2u7TFTx_EJBdcjdUGn3Eyx6jUuEa1_VB8UP_dI,631
114
114
  airbyte_cdk/sources/declarative/partition_routers/cartesian_product_stream_slicer.py,sha256=c5cuVFM6NFkuQqG8Z5IwkBuwDrvXZN1CunUOM_L0ezg,6892
115
115
  airbyte_cdk/sources/declarative/partition_routers/list_partition_router.py,sha256=t7pRdFWfFWJtQQG19c9PVeMODyO2BknRTakpM5U9N-8,4844
@@ -141,7 +141,7 @@ airbyte_cdk/sources/declarative/requesters/paginators/strategies/cursor_paginati
141
141
  airbyte_cdk/sources/declarative/requesters/paginators/strategies/offset_increment.py,sha256=pMPi6iQrhtrI9VRPj218QNM_OqD8lX8P3Tr9yloKoE8,3367
142
142
  airbyte_cdk/sources/declarative/requesters/paginators/strategies/page_increment.py,sha256=kQGpfr-dOwarxTIf2S4sHVulBzm8zSwQXBM7rOhkafA,2491
143
143
  airbyte_cdk/sources/declarative/requesters/paginators/strategies/pagination_strategy.py,sha256=ABpO4t0UUziBZnyml8UT_NhlF6loekhQji57TpKnaiY,1290
144
- airbyte_cdk/sources/declarative/requesters/paginators/strategies/stop_condition.py,sha256=LALMWYc2bD64Ig0ITEWNNTdsOpD-rZrAT5nAiF5OGQc,1991
144
+ airbyte_cdk/sources/declarative/requesters/paginators/strategies/stop_condition.py,sha256=-8NwokW-aKwv8DdeHh1ssODTobBYSOmIhH2-IjSjlNA,2213
145
145
  airbyte_cdk/sources/declarative/requesters/request_option.py,sha256=_qmv8CLQQ3fERt6BuMZeRu6tZXscPoeARx1VJdWMQ_M,1055
146
146
  airbyte_cdk/sources/declarative/requesters/request_options/__init__.py,sha256=DzFS1eh7rj9OSSvgLyS_tBI9lhf3MyiKM8oab-KCkr8,772
147
147
  airbyte_cdk/sources/declarative/requesters/request_options/datetime_based_request_options_provider.py,sha256=x8VtwLSefLDoUpEKTBND1apP3Ar-ub0dut4xJjSIXeI,3749
@@ -155,7 +155,7 @@ airbyte_cdk/sources/declarative/requesters/requester.py,sha256=iVVpXQ4KEd9OyZNwm
155
155
  airbyte_cdk/sources/declarative/retrievers/__init__.py,sha256=FVQpUGVwp2Gibk4gp07VmLKX5AafUlsZWFSrDpUDuJM,443
156
156
  airbyte_cdk/sources/declarative/retrievers/async_retriever.py,sha256=hHfE6fNe2sKKcVbgK6M8R1nOmHGnD2tL276ZO0HB7Yc,4966
157
157
  airbyte_cdk/sources/declarative/retrievers/retriever.py,sha256=XPLs593Xv8c5cKMc37XzUAYmzlXd1a7eSsspM-CMuWA,1696
158
- airbyte_cdk/sources/declarative/retrievers/simple_retriever.py,sha256=LCKJCXDTjqILj_1rtB0RxlJEo3b2jTHjM1jGsKUgjlY,23932
158
+ airbyte_cdk/sources/declarative/retrievers/simple_retriever.py,sha256=N4swGw5mfuTXJ2R7AKX18CHzizsr69pXwt5uSHLPi48,24172
159
159
  airbyte_cdk/sources/declarative/schema/__init__.py,sha256=ul8L9S0-__AMEdbCLHBq-PMEeA928NVp8BB83BMotfU,517
160
160
  airbyte_cdk/sources/declarative/schema/default_schema_loader.py,sha256=KTACrIE23a83wsm3Rd9Eb4K6-20lrGqYxTHNp9yxsso,1820
161
161
  airbyte_cdk/sources/declarative/schema/inline_schema_loader.py,sha256=bVETE10hRsatRJq3R3BeyRR0wIoK3gcP1gcpVRQ_P5U,464
@@ -164,7 +164,7 @@ airbyte_cdk/sources/declarative/schema/schema_loader.py,sha256=kjt8v0N5wWKA5zyLn
164
164
  airbyte_cdk/sources/declarative/spec/__init__.py,sha256=H0UwoRhgucbKBIzg85AXrifybVmfpwWpPdy22vZKVuo,141
165
165
  airbyte_cdk/sources/declarative/spec/spec.py,sha256=1vGFWbMA2nj2zSb9e-VChfntI-Ag8SUgcwLkhMfCKUw,1907
166
166
  airbyte_cdk/sources/declarative/stream_slicers/__init__.py,sha256=sI9vhc95RwJYOnA0VKjcbtKgFcmAbWjhdWBXFbAijOs,176
167
- airbyte_cdk/sources/declarative/stream_slicers/declarative_partition_generator.py,sha256=5MiyGG6Ek1-tV2RhuPVuwTuwNj9WiG9k6m6cZu7bj84,3384
167
+ airbyte_cdk/sources/declarative/stream_slicers/declarative_partition_generator.py,sha256=E7feZ5xkHwFHODq8FSjwdGe291RZoCMCRHT1rWnQ1lI,3463
168
168
  airbyte_cdk/sources/declarative/stream_slicers/stream_slicer.py,sha256=SOkIPBi2Wu7yxIvA15yFzUAB95a3IzA8LPq5DEqHQQc,725
169
169
  airbyte_cdk/sources/declarative/transformations/__init__.py,sha256=CPJ8TlMpiUmvG3624VYu_NfTzxwKcfBjM2Q2wJ7fkSA,919
170
170
  airbyte_cdk/sources/declarative/transformations/add_fields.py,sha256=r4YdAuAk2bQtNWJMztIIy2CC-NglD9NeK1s1TeO9wkw,5027
@@ -215,11 +215,11 @@ airbyte_cdk/sources/file_based/schema_validation_policies/default_schema_validat
215
215
  airbyte_cdk/sources/file_based/stream/__init__.py,sha256=QPDqdgjsabOQD93dSFqHGaFS_3pIwm-chEabZHiPJi0,265
216
216
  airbyte_cdk/sources/file_based/stream/abstract_file_based_stream.py,sha256=V_6oReWy83x1lXLhAIx4qkruOqPCX-5ROBQCgzvYPUU,7480
217
217
  airbyte_cdk/sources/file_based/stream/concurrent/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
218
- airbyte_cdk/sources/file_based/stream/concurrent/adapters.py,sha256=EOir_7F3LKJT75H4aORi0jxlX5KgKnOEAU2EedN_Fus,13883
218
+ airbyte_cdk/sources/file_based/stream/concurrent/adapters.py,sha256=L6r58EsffGsyREZN4HCpLnqSoY3Z9kW5kUJkBYV9gDs,13899
219
219
  airbyte_cdk/sources/file_based/stream/concurrent/cursor/__init__.py,sha256=AtTntHQgspWt8vZ9cjIjSOO1YpH2OO-D8E78pAViE7k,329
220
- airbyte_cdk/sources/file_based/stream/concurrent/cursor/abstract_concurrent_file_based_cursor.py,sha256=pCh1X-WTTuJGF9r-QlcTMlkPbpLBeOFkGrOJePtwTeU,1887
221
- airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_concurrent_cursor.py,sha256=CPE_dEmAYK0r-wdI40zTnLtL3B-0DYThuJ1OKOCO8MU,14911
222
- airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_final_state_cursor.py,sha256=1K4oGJKtPejdVT4qoSgs6nuS0Q4BuKtVKK3iDOmNMlA,3228
220
+ airbyte_cdk/sources/file_based/stream/concurrent/cursor/abstract_concurrent_file_based_cursor.py,sha256=5dYZMLBEbvCyrCT89lCYdm2FdrLPLuxjdpQSVGP5o0w,1856
221
+ airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_concurrent_cursor.py,sha256=gRTL-9I3ejjQOpLKd6ixe9rB3kGlubCdhUt9ri6AdAI,14880
222
+ airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_final_state_cursor.py,sha256=V4Dy7o-FVLwCsvHgV8cgCN46vb_nc7Jlfow-D3SXjbU,3197
223
223
  airbyte_cdk/sources/file_based/stream/cursor/__init__.py,sha256=MhFB5hOo8sjwvCh8gangaymdg3EJWYt_72brFOZt068,191
224
224
  airbyte_cdk/sources/file_based/stream/cursor/abstract_file_based_cursor.py,sha256=om-x3gZFPgWDpi15S9RxZmR36VHnk8sytgN6LlBQhAw,1934
225
225
  airbyte_cdk/sources/file_based/stream/cursor/default_file_based_cursor.py,sha256=LgAl6xAgY-QD_KWZe1txkFgZlNe0jZydoTyAcHsP15g,7022
@@ -243,20 +243,19 @@ airbyte_cdk/sources/streams/concurrent/README.md,sha256=0nvgnlCBfZJiPDAofT8yFmUh
243
243
  airbyte_cdk/sources/streams/concurrent/__init__.py,sha256=4Hw-PX1-VgESLF16cDdvuYCzGJtHntThLF4qIiULWeo,61
244
244
  airbyte_cdk/sources/streams/concurrent/abstract_stream.py,sha256=sVFzrJq9YVuMalfKqg-_IIlbm_yrKW1eqAG_U-4Jwr4,3892
245
245
  airbyte_cdk/sources/streams/concurrent/abstract_stream_facade.py,sha256=QTry1QCBUwJDw1QSCEvz23s7zIEx_7QMxkPq9j-oPIQ,1358
246
- airbyte_cdk/sources/streams/concurrent/adapters.py,sha256=O0oWyq62OOWKr8GfQjpElPWjEC3-d_zbrliZq_9KpCw,15051
246
+ airbyte_cdk/sources/streams/concurrent/adapters.py,sha256=f2TmcQaDRN9ttiaD_wsgDCCXUG4C_UtIQy19yd49tp0,15176
247
247
  airbyte_cdk/sources/streams/concurrent/availability_strategy.py,sha256=xqErZU9v9QTe9Fv-MSJAICABs3Ke27mdA7QpgyFFj8g,2877
248
- airbyte_cdk/sources/streams/concurrent/cursor.py,sha256=cvgpTQC7X66GQu0huJOpgceECre2bBU0tI_ZrGYBn5E,19468
248
+ airbyte_cdk/sources/streams/concurrent/cursor.py,sha256=D_kQxKAmIwgs3eoJeVZPTjMToRT1N2FGd2RR8RnpX90,20555
249
249
  airbyte_cdk/sources/streams/concurrent/default_stream.py,sha256=WdZYzION3q6nIhIIcpFqlovDcouOHdbnB0U1YIDP2Jk,3175
250
250
  airbyte_cdk/sources/streams/concurrent/exceptions.py,sha256=JOZ446MCLpmF26r9KfS6OO_6rGjcjgJNZdcw6jccjEI,468
251
251
  airbyte_cdk/sources/streams/concurrent/helpers.py,sha256=gtj9p0clZwgnClrIRH6V2Wl0Jwu11Plq-9FP4FU2VQA,1327
252
252
  airbyte_cdk/sources/streams/concurrent/partition_enqueuer.py,sha256=2t64b_z9cEPmlHZnjSiMTO8PEtEdiAJDG0JcYOtUqAE,3363
253
253
  airbyte_cdk/sources/streams/concurrent/partition_reader.py,sha256=0TIrjbTzYJGdA0AZUzbeIKr0iHbawnoEKVl7bWxOFZY,1760
254
254
  airbyte_cdk/sources/streams/concurrent/partitions/__init__.py,sha256=4Hw-PX1-VgESLF16cDdvuYCzGJtHntThLF4qIiULWeo,61
255
- airbyte_cdk/sources/streams/concurrent/partitions/partition.py,sha256=FRt6wsdMoXL2gZiAkd4cP-Bi7oM1ZzRDimDh0j98atw,1521
255
+ airbyte_cdk/sources/streams/concurrent/partitions/partition.py,sha256=CmaRcKn8y118No3qvbRV9DBeAUKv17lrVgloR4Y9TwU,1490
256
256
  airbyte_cdk/sources/streams/concurrent/partitions/partition_generator.py,sha256=_ymkkBr71_qt1fW0_MUqw96OfNBkeJngXQ09yolEDHw,441
257
- airbyte_cdk/sources/streams/concurrent/partitions/record.py,sha256=HVGVZ2yF5iaPKxTjRn305lLmYb5I8k7DkQoNIyKA_MA,938
258
257
  airbyte_cdk/sources/streams/concurrent/partitions/stream_slicer.py,sha256=nbdkkHoN0NFeSs7YUFfzY1Lg5Jrt8fWY_ln3YrhY-Ko,544
259
- airbyte_cdk/sources/streams/concurrent/partitions/types.py,sha256=6k83K_dnwHAadkTBPSdWKssTzxVGVLH5DzZFkN6pFr8,1197
258
+ airbyte_cdk/sources/streams/concurrent/partitions/types.py,sha256=frPVvHtY7vLxpGEbMQzNvF1Y52ZVyct9f1DDhGoRjwY,1166
260
259
  airbyte_cdk/sources/streams/concurrent/state_converters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
261
260
  airbyte_cdk/sources/streams/concurrent/state_converters/abstract_stream_state_converter.py,sha256=CXHUMOhndu-LOKgsnNTItv5s5qrKpmJDeHOzlH1nBy8,6819
262
261
  airbyte_cdk/sources/streams/concurrent/state_converters/datetime_stream_state_converter.py,sha256=wDfBtNH_mnUOjAlIJjxVHpGrb3JKK_zkcRjx0Ma5DRc,7721
@@ -282,7 +281,7 @@ airbyte_cdk/sources/streams/http/requests_native_auth/abstract_token.py,sha256=T
282
281
  airbyte_cdk/sources/streams/http/requests_native_auth/oauth.py,sha256=7qqG4HX84DTIBp_C4hrmDbE0ZBzExnZDKl_TapmPzRQ,13616
283
282
  airbyte_cdk/sources/streams/http/requests_native_auth/token.py,sha256=h5PTzcdH-RQLeCg7xZ45w_484OPUDSwNWl_iMJQmZoI,2526
284
283
  airbyte_cdk/sources/streams/utils/__init__.py,sha256=4Hw-PX1-VgESLF16cDdvuYCzGJtHntThLF4qIiULWeo,61
285
- airbyte_cdk/sources/types.py,sha256=q0W2snCfmiGIaiV5L42_4C_r3ObKgUQm9eqCS90VXtg,4558
284
+ airbyte_cdk/sources/types.py,sha256=WWVigI7ZSoQU2TBCzDsHJtoX4Ima9v--lcLyYwUG_cE,4904
286
285
  airbyte_cdk/sources/utils/__init__.py,sha256=TTN6VUxVy6Is8BhYQZR5pxJGQh8yH4duXh4O1TiMiEY,118
287
286
  airbyte_cdk/sources/utils/casing.py,sha256=QC-gV1O4e8DR4-bhdXieUPKm_JamzslVyfABLYYRSXA,256
288
287
  airbyte_cdk/sources/utils/record_helper.py,sha256=EIUPaJuLNrLUuTYgo0JbEsqFGvBYRiDJpWP2QOU-wWU,2184
@@ -331,8 +330,8 @@ airbyte_cdk/utils/slice_hasher.py,sha256=-pHexlNYoWYPnXNH-M7HEbjmeJe9Zk7SJijdQ7d
331
330
  airbyte_cdk/utils/spec_schema_transformations.py,sha256=LVc9KbtMeV_z99jWo0Ou8u4l6eBJ0BWNhxj4zrrGKRs,763
332
331
  airbyte_cdk/utils/stream_status_utils.py,sha256=ZmBoiy5HVbUEHAMrUONxZvxnvfV9CesmQJLDTAIWnWw,1171
333
332
  airbyte_cdk/utils/traced_exception.py,sha256=a6q51tBS3IdtefuOiL1eBwSmnNAXfjFMlMjSIQ_Tl-o,6165
334
- airbyte_cdk-6.6.8rc13.dist-info/LICENSE.txt,sha256=Wfe61S4BaGPj404v8lrAbvhjYR68SHlkzeYrg3_bbuM,1051
335
- airbyte_cdk-6.6.8rc13.dist-info/METADATA,sha256=S-r45hkXHXroqgtnHK-8RSxvqOG37klFAMFbbQFqpcQ,13523
336
- airbyte_cdk-6.6.8rc13.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
337
- airbyte_cdk-6.6.8rc13.dist-info/entry_points.txt,sha256=fj-e3PAQvsxsQzyyq8UkG1k8spunWnD4BAH2AwlR6NM,95
338
- airbyte_cdk-6.6.8rc13.dist-info/RECORD,,
333
+ airbyte_cdk-6.7.0.dist-info/LICENSE.txt,sha256=Wfe61S4BaGPj404v8lrAbvhjYR68SHlkzeYrg3_bbuM,1051
334
+ airbyte_cdk-6.7.0.dist-info/METADATA,sha256=tgykgp9Fe8vJtANTVCU542AfkDWxpaEhJYidxu9uHbE,13519
335
+ airbyte_cdk-6.7.0.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
336
+ airbyte_cdk-6.7.0.dist-info/entry_points.txt,sha256=fj-e3PAQvsxsQzyyq8UkG1k8spunWnD4BAH2AwlR6NM,95
337
+ airbyte_cdk-6.7.0.dist-info/RECORD,,
@@ -1,35 +0,0 @@
1
- #
2
- # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
3
- #
4
-
5
- from typing import TYPE_CHECKING, Any, Mapping
6
-
7
- if TYPE_CHECKING:
8
- from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition
9
-
10
-
11
- class Record:
12
- """
13
- Represents a record read from a stream.
14
- """
15
-
16
- def __init__(
17
- self,
18
- data: Mapping[str, Any],
19
- partition: "Partition",
20
- is_file_transfer_message: bool = False,
21
- ):
22
- self.data = data
23
- self.partition = partition
24
- self.is_file_transfer_message = is_file_transfer_message
25
-
26
- def __eq__(self, other: Any) -> bool:
27
- if not isinstance(other, Record):
28
- return False
29
- return (
30
- self.data == other.data
31
- and self.partition.stream_name() == other.partition.stream_name()
32
- )
33
-
34
- def __repr__(self) -> str:
35
- return f"Record(data={self.data}, stream_name={self.partition.stream_name()})"