airbyte-cdk 6.60.3__py3-none-any.whl → 6.60.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -468,34 +468,11 @@ class ConcurrentDeclarativeSource(ManifestDeclarativeSource, Generic[TState]):
468
468
  def _get_retriever(
469
469
  declarative_stream: DeclarativeStream, stream_state: Mapping[str, Any]
470
470
  ) -> Retriever:
471
- retriever = declarative_stream.retriever
472
-
473
- # This is an optimization so that we don't invoke any cursor or state management flows within the
474
- # low-code framework because state management is handled through the ConcurrentCursor.
475
- if declarative_stream and isinstance(retriever, SimpleRetriever):
476
- # Also a temporary hack. In the legacy Stream implementation, as part of the read,
477
- # set_initial_state() is called to instantiate incoming state on the cursor. Although we no
478
- # longer rely on the legacy low-code cursor for concurrent checkpointing, low-code components
479
- # like StopConditionPaginationStrategyDecorator still rely on a DatetimeBasedCursor that is
480
- # properly initialized with state.
481
- if retriever.cursor:
482
- retriever.cursor.set_initial_state(stream_state=stream_state)
483
-
484
- # Similar to above, the ClientSideIncrementalRecordFilterDecorator cursor is a separate instance
485
- # from the one initialized on the SimpleRetriever, so it also must also have state initialized
486
- # for semi-incremental streams using is_client_side_incremental to filter properly
487
- if isinstance(retriever.record_selector, RecordSelector) and isinstance(
488
- retriever.record_selector.record_filter, ClientSideIncrementalRecordFilterDecorator
489
- ):
490
- retriever.record_selector.record_filter._cursor.set_initial_state(
491
- stream_state=stream_state
492
- ) # type: ignore # After non-concurrent cursors are deprecated we can remove these cursor workarounds
493
-
471
+ if declarative_stream and isinstance(declarative_stream.retriever, SimpleRetriever):
494
472
  # We zero it out here, but since this is a cursor reference, the state is still properly
495
473
  # instantiated for the other components that reference it
496
- retriever.cursor = None
497
-
498
- return retriever
474
+ declarative_stream.retriever.cursor = None
475
+ return declarative_stream.retriever
499
476
 
500
477
  @staticmethod
501
478
  def _select_streams(
@@ -4,12 +4,8 @@
4
4
  from dataclasses import InitVar, dataclass
5
5
  from typing import Any, Iterable, Mapping, Optional, Union
6
6
 
7
- from airbyte_cdk.sources.declarative.incremental import (
8
- DatetimeBasedCursor,
9
- GlobalSubstreamCursor,
10
- PerPartitionWithGlobalCursor,
11
- )
12
7
  from airbyte_cdk.sources.declarative.interpolation.interpolated_boolean import InterpolatedBoolean
8
+ from airbyte_cdk.sources.streams.concurrent.cursor import Cursor
13
9
  from airbyte_cdk.sources.types import Config, Record, StreamSlice, StreamState
14
10
 
15
11
 
@@ -53,13 +49,13 @@ class ClientSideIncrementalRecordFilterDecorator(RecordFilter):
53
49
  """
54
50
  Applies a filter to a list of records to exclude those that are older than the stream_state/start_date.
55
51
 
56
- :param DatetimeBasedCursor date_time_based_cursor: Cursor used to extract datetime values
52
+ :param Cursor cursor: Cursor used to filter out values
57
53
  :param PerPartitionCursor per_partition_cursor: Optional Cursor used for mapping cursor value in nested stream_state
58
54
  """
59
55
 
60
56
  def __init__(
61
57
  self,
62
- cursor: Union[DatetimeBasedCursor, PerPartitionWithGlobalCursor, GlobalSubstreamCursor],
58
+ cursor: Union[Cursor],
63
59
  **kwargs: Any,
64
60
  ):
65
61
  super().__init__(**kwargs)
@@ -77,7 +73,7 @@ class ClientSideIncrementalRecordFilterDecorator(RecordFilter):
77
73
  for record in records
78
74
  if self._cursor.should_be_synced(
79
75
  # Record is created on the fly to align with cursors interface; stream name is ignored as we don't need it here
80
- # Record stream name is empty cause it is not used durig the filtering
76
+ # Record stream name is empty because it is not used during the filtering
81
77
  Record(data=record, associated_slice=stream_slice, stream_name="")
82
78
  )
83
79
  )
@@ -81,6 +81,7 @@ class ConcurrentPerPartitionCursor(Cursor):
81
81
  connector_state_converter: AbstractStreamStateConverter,
82
82
  cursor_field: CursorField,
83
83
  use_global_cursor: bool = False,
84
+ attempt_to_create_cursor_if_not_provided: bool = False,
84
85
  ) -> None:
85
86
  self._global_cursor: Optional[StreamState] = {}
86
87
  self._stream_name = stream_name
@@ -125,6 +126,9 @@ class ConcurrentPerPartitionCursor(Cursor):
125
126
 
126
127
  self._set_initial_state(stream_state)
127
128
 
129
+ # FIXME this is a temporary field the time of the migration from declarative cursors to concurrent ones
130
+ self._attempt_to_create_cursor_if_not_provided = attempt_to_create_cursor_if_not_provided
131
+
128
132
  @property
129
133
  def cursor_field(self) -> CursorField:
130
134
  return self._cursor_field
@@ -512,13 +516,28 @@ class ConcurrentPerPartitionCursor(Cursor):
512
516
  raise ValueError(
513
517
  "Invalid state as stream slices that are emitted should refer to an existing cursor"
514
518
  )
519
+
520
+ if self._use_global_cursor:
521
+ return self._create_cursor(
522
+ self._global_cursor,
523
+ self._lookback_window if self._global_cursor else 0,
524
+ )
525
+
515
526
  partition_key = self._to_partition_key(record.associated_slice.partition)
516
- if partition_key not in self._cursor_per_partition:
527
+ if (
528
+ partition_key not in self._cursor_per_partition
529
+ and not self._attempt_to_create_cursor_if_not_provided
530
+ ):
517
531
  raise ValueError(
518
532
  "Invalid state as stream slices that are emitted should refer to an existing cursor"
519
533
  )
520
- cursor = self._cursor_per_partition[partition_key]
521
- return cursor
534
+ elif partition_key not in self._cursor_per_partition:
535
+ return self._create_cursor(
536
+ self._global_cursor,
537
+ self._lookback_window if self._global_cursor else 0,
538
+ )
539
+ else:
540
+ return self._cursor_per_partition[partition_key]
522
541
 
523
542
  def limit_reached(self) -> bool:
524
543
  return self._number_of_partitions > self.SWITCH_TO_GLOBAL_LIMIT
@@ -34,7 +34,6 @@ from airbyte_cdk.connector_builder.models import (
34
34
  )
35
35
  from airbyte_cdk.models import FailureType, Level
36
36
  from airbyte_cdk.sources.connector_state_manager import ConnectorStateManager
37
- from airbyte_cdk.sources.declarative import transformations
38
37
  from airbyte_cdk.sources.declarative.async_job.job_orchestrator import AsyncJobOrchestrator
39
38
  from airbyte_cdk.sources.declarative.async_job.job_tracker import JobTracker
40
39
  from airbyte_cdk.sources.declarative.async_job.repository import AsyncJobRepository
@@ -604,7 +603,7 @@ from airbyte_cdk.sources.streams.concurrent.clamping import (
604
603
  WeekClampingStrategy,
605
604
  Weekday,
606
605
  )
607
- from airbyte_cdk.sources.streams.concurrent.cursor import ConcurrentCursor, CursorField
606
+ from airbyte_cdk.sources.streams.concurrent.cursor import ConcurrentCursor, Cursor, CursorField
608
607
  from airbyte_cdk.sources.streams.concurrent.state_converters.datetime_stream_state_converter import (
609
608
  CustomFormatConcurrentStreamStateConverter,
610
609
  DateTimeStreamStateConverter,
@@ -1475,6 +1474,7 @@ class ModelToComponentFactory:
1475
1474
  stream_namespace: Optional[str],
1476
1475
  config: Config,
1477
1476
  message_repository: Optional[MessageRepository] = None,
1477
+ stream_state_migrations: Optional[List[Any]] = None,
1478
1478
  **kwargs: Any,
1479
1479
  ) -> ConcurrentCursor:
1480
1480
  # Per-partition incremental streams can dynamically create child cursors which will pass their current
@@ -1485,6 +1485,7 @@ class ModelToComponentFactory:
1485
1485
  if "stream_state" not in kwargs
1486
1486
  else kwargs["stream_state"]
1487
1487
  )
1488
+ stream_state = self.apply_stream_state_migrations(stream_state_migrations, stream_state)
1488
1489
 
1489
1490
  component_type = component_definition.get("type")
1490
1491
  if component_definition.get("type") != model_type.__name__:
@@ -1561,6 +1562,7 @@ class ModelToComponentFactory:
1561
1562
  stream_state: MutableMapping[str, Any],
1562
1563
  partition_router: PartitionRouter,
1563
1564
  stream_state_migrations: Optional[List[Any]] = None,
1565
+ attempt_to_create_cursor_if_not_provided: bool = False,
1564
1566
  **kwargs: Any,
1565
1567
  ) -> ConcurrentPerPartitionCursor:
1566
1568
  component_type = component_definition.get("type")
@@ -1631,6 +1633,7 @@ class ModelToComponentFactory:
1631
1633
  connector_state_converter=connector_state_converter,
1632
1634
  cursor_field=cursor_field,
1633
1635
  use_global_cursor=use_global_cursor,
1636
+ attempt_to_create_cursor_if_not_provided=attempt_to_create_cursor_if_not_provided,
1634
1637
  )
1635
1638
 
1636
1639
  @staticmethod
@@ -1931,30 +1934,17 @@ class ModelToComponentFactory:
1931
1934
  and hasattr(model.incremental_sync, "is_data_feed")
1932
1935
  and model.incremental_sync.is_data_feed
1933
1936
  )
1934
- client_side_incremental_sync = None
1935
- if (
1937
+ client_side_filtering_enabled = (
1936
1938
  model.incremental_sync
1937
1939
  and hasattr(model.incremental_sync, "is_client_side_incremental")
1938
1940
  and model.incremental_sync.is_client_side_incremental
1939
- ):
1940
- supported_slicers = (
1941
- DatetimeBasedCursor,
1942
- GlobalSubstreamCursor,
1943
- PerPartitionWithGlobalCursor,
1944
- )
1945
- if combined_slicers and not isinstance(combined_slicers, supported_slicers):
1946
- raise ValueError(
1947
- "Unsupported Slicer is used. PerPartitionWithGlobalCursor should be used here instead"
1948
- )
1949
- cursor = (
1950
- combined_slicers
1951
- if isinstance(
1952
- combined_slicers, (PerPartitionWithGlobalCursor, GlobalSubstreamCursor)
1953
- )
1954
- else self._create_component_from_model(model=model.incremental_sync, config=config)
1941
+ )
1942
+ concurrent_cursor = None
1943
+ if stop_condition_on_cursor or client_side_filtering_enabled:
1944
+ stream_slicer = self._build_stream_slicer_from_partition_router(
1945
+ model.retriever, config, stream_name=model.name
1955
1946
  )
1956
-
1957
- client_side_incremental_sync = {"cursor": cursor}
1947
+ concurrent_cursor = self._build_concurrent_cursor(model, stream_slicer, config)
1958
1948
 
1959
1949
  if model.incremental_sync and isinstance(model.incremental_sync, DatetimeBasedCursorModel):
1960
1950
  cursor_model = model.incremental_sync
@@ -2029,8 +2019,10 @@ class ModelToComponentFactory:
2029
2019
  primary_key=primary_key,
2030
2020
  stream_slicer=combined_slicers,
2031
2021
  request_options_provider=request_options_provider,
2032
- stop_condition_on_cursor=stop_condition_on_cursor,
2033
- client_side_incremental_sync=client_side_incremental_sync,
2022
+ stop_condition_cursor=concurrent_cursor,
2023
+ client_side_incremental_sync={"cursor": concurrent_cursor}
2024
+ if client_side_filtering_enabled
2025
+ else None,
2034
2026
  transformations=transformations,
2035
2027
  file_uploader=file_uploader,
2036
2028
  incremental_sync=model.incremental_sync,
@@ -2185,6 +2177,67 @@ class ModelToComponentFactory:
2185
2177
  return self._create_component_from_model(model=model.incremental_sync, config=config) # type: ignore[no-any-return] # Will be created Cursor as stream_slicer_model is model.incremental_sync
2186
2178
  return None
2187
2179
 
2180
+ def _build_concurrent_cursor(
2181
+ self,
2182
+ model: DeclarativeStreamModel,
2183
+ stream_slicer: Optional[PartitionRouter],
2184
+ config: Config,
2185
+ ) -> Optional[StreamSlicer]:
2186
+ stream_state = self._connector_state_manager.get_stream_state(
2187
+ stream_name=model.name or "", namespace=None
2188
+ )
2189
+
2190
+ if model.incremental_sync and stream_slicer:
2191
+ # FIXME there is a discrepancy where this logic is applied on the create_*_cursor methods for
2192
+ # ConcurrentCursor but it is applied outside of create_concurrent_cursor_from_perpartition_cursor
2193
+ if model.state_migrations:
2194
+ state_transformations = [
2195
+ self._create_component_from_model(
2196
+ state_migration, config, declarative_stream=model
2197
+ )
2198
+ for state_migration in model.state_migrations
2199
+ ]
2200
+ else:
2201
+ state_transformations = []
2202
+
2203
+ return self.create_concurrent_cursor_from_perpartition_cursor( # type: ignore # This is a known issue that we are creating and returning a ConcurrentCursor which does not technically implement the (low-code) StreamSlicer. However, (low-code) StreamSlicer and ConcurrentCursor both implement StreamSlicer.stream_slices() which is the primary method needed for checkpointing
2204
+ state_manager=self._connector_state_manager,
2205
+ model_type=DatetimeBasedCursorModel,
2206
+ component_definition=model.incremental_sync.__dict__,
2207
+ stream_name=model.name or "",
2208
+ stream_namespace=None,
2209
+ config=config or {},
2210
+ stream_state=stream_state,
2211
+ stream_state_migrations=state_transformations,
2212
+ partition_router=stream_slicer,
2213
+ attempt_to_create_cursor_if_not_provided=True,
2214
+ )
2215
+ elif model.incremental_sync:
2216
+ if type(model.incremental_sync) == IncrementingCountCursorModel:
2217
+ return self.create_concurrent_cursor_from_incrementing_count_cursor( # type: ignore # This is a known issue that we are creating and returning a ConcurrentCursor which does not technically implement the (low-code) StreamSlicer. However, (low-code) StreamSlicer and ConcurrentCursor both implement StreamSlicer.stream_slices() which is the primary method needed for checkpointing
2218
+ model_type=IncrementingCountCursorModel,
2219
+ component_definition=model.incremental_sync.__dict__,
2220
+ stream_name=model.name or "",
2221
+ stream_namespace=None,
2222
+ config=config or {},
2223
+ stream_state_migrations=model.state_migrations,
2224
+ )
2225
+ elif type(model.incremental_sync) == DatetimeBasedCursorModel:
2226
+ return self.create_concurrent_cursor_from_datetime_based_cursor( # type: ignore # This is a known issue that we are creating and returning a ConcurrentCursor which does not technically implement the (low-code) StreamSlicer. However, (low-code) StreamSlicer and ConcurrentCursor both implement StreamSlicer.stream_slices() which is the primary method needed for checkpointing
2227
+ model_type=type(model.incremental_sync),
2228
+ component_definition=model.incremental_sync.__dict__,
2229
+ stream_name=model.name or "",
2230
+ stream_namespace=None,
2231
+ config=config or {},
2232
+ stream_state_migrations=model.state_migrations,
2233
+ attempt_to_create_cursor_if_not_provided=True,
2234
+ )
2235
+ else:
2236
+ raise ValueError(
2237
+ f"Incremental sync of type {type(model.incremental_sync)} is not supported"
2238
+ )
2239
+ return None
2240
+
2188
2241
  def _build_resumable_cursor(
2189
2242
  self,
2190
2243
  model: Union[
@@ -2285,7 +2338,7 @@ class ModelToComponentFactory:
2285
2338
  url_base: str,
2286
2339
  extractor_model: Optional[Union[CustomRecordExtractorModel, DpathExtractorModel]] = None,
2287
2340
  decoder: Optional[Decoder] = None,
2288
- cursor_used_for_stop_condition: Optional[DeclarativeCursor] = None,
2341
+ cursor_used_for_stop_condition: Optional[Cursor] = None,
2289
2342
  ) -> Union[DefaultPaginator, PaginatorTestReadDecorator]:
2290
2343
  if decoder:
2291
2344
  if self._is_supported_decoder_for_pagination(decoder):
@@ -3146,7 +3199,7 @@ class ModelToComponentFactory:
3146
3199
  primary_key: Optional[Union[str, List[str], List[List[str]]]],
3147
3200
  stream_slicer: Optional[StreamSlicer],
3148
3201
  request_options_provider: Optional[RequestOptionsProvider] = None,
3149
- stop_condition_on_cursor: bool = False,
3202
+ stop_condition_cursor: Optional[Cursor] = None,
3150
3203
  client_side_incremental_sync: Optional[Dict[str, Any]] = None,
3151
3204
  transformations: List[RecordTransformation],
3152
3205
  file_uploader: Optional[DefaultFileUploader] = None,
@@ -3277,7 +3330,6 @@ class ModelToComponentFactory:
3277
3330
  ),
3278
3331
  )
3279
3332
 
3280
- cursor_used_for_stop_condition = cursor if stop_condition_on_cursor else None
3281
3333
  paginator = (
3282
3334
  self._create_component_from_model(
3283
3335
  model=model.paginator,
@@ -3285,7 +3337,7 @@ class ModelToComponentFactory:
3285
3337
  url_base=_get_url(),
3286
3338
  extractor_model=model.record_selector.extractor,
3287
3339
  decoder=decoder,
3288
- cursor_used_for_stop_condition=cursor_used_for_stop_condition,
3340
+ cursor_used_for_stop_condition=stop_condition_cursor or None,
3289
3341
  )
3290
3342
  if model.paginator
3291
3343
  else NoPagination(parameters={})
@@ -7,11 +7,10 @@ from typing import Any, Optional
7
7
 
8
8
  import requests
9
9
 
10
- from airbyte_cdk.sources.declarative.incremental.declarative_cursor import DeclarativeCursor
11
10
  from airbyte_cdk.sources.declarative.requesters.paginators.strategies.pagination_strategy import (
12
11
  PaginationStrategy,
13
12
  )
14
- from airbyte_cdk.sources.streams.concurrent.cursor import ConcurrentCursor
13
+ from airbyte_cdk.sources.streams.concurrent.cursor import Cursor
15
14
  from airbyte_cdk.sources.types import Record
16
15
 
17
16
 
@@ -29,8 +28,7 @@ class PaginationStopCondition(ABC):
29
28
  class CursorStopCondition(PaginationStopCondition):
30
29
  def __init__(
31
30
  self,
32
- cursor: DeclarativeCursor
33
- | ConcurrentCursor, # migrate to use both old and concurrent versions
31
+ cursor: Cursor,
34
32
  ):
35
33
  self._cursor = cursor
36
34
 
@@ -311,3 +311,6 @@ class FileBasedConcurrentCursor(AbstractConcurrentFileBasedCursor):
311
311
 
312
312
  def ensure_at_least_one_state_emitted(self) -> None:
313
313
  self.emit_state_message()
314
+
315
+ def should_be_synced(self, record: Record) -> bool:
316
+ return True
@@ -81,3 +81,6 @@ class FileBasedFinalStateCursor(AbstractConcurrentFileBasedCursor):
81
81
  self._stream_name, self._stream_namespace
82
82
  )
83
83
  self._message_repository.emit_message(state_message)
84
+
85
+ def should_be_synced(self, record: Record) -> bool:
86
+ return True
@@ -74,6 +74,10 @@ class Cursor(StreamSlicer, ABC):
74
74
  """
75
75
  raise NotImplementedError()
76
76
 
77
+ @abstractmethod
78
+ def should_be_synced(self, record: Record) -> bool:
79
+ pass
80
+
77
81
  def stream_slices(self) -> Iterable[StreamSlice]:
78
82
  """
79
83
  Default placeholder implementation of generate_slices.
@@ -123,6 +127,9 @@ class FinalStateCursor(Cursor):
123
127
  )
124
128
  self._message_repository.emit_message(state_message)
125
129
 
130
+ def should_be_synced(self, record: Record) -> bool:
131
+ return True
132
+
126
133
 
127
134
  class ConcurrentCursor(Cursor):
128
135
  _START_BOUNDARY = 0
@@ -192,9 +199,23 @@ class ConcurrentCursor(Cursor):
192
199
  self, state: MutableMapping[str, Any]
193
200
  ) -> Tuple[CursorValueType, MutableMapping[str, Any]]:
194
201
  if self._connector_state_converter.is_state_message_compatible(state):
202
+ partitioned_state = self._connector_state_converter.deserialize(state)
203
+ slices_from_partitioned_state = partitioned_state.get("slices", [])
204
+
205
+ value_from_partitioned_state = None
206
+ if slices_from_partitioned_state:
207
+ # We assume here that the slices have been already merged
208
+ first_slice = slices_from_partitioned_state[0]
209
+ value_from_partitioned_state = (
210
+ first_slice[self._connector_state_converter.MOST_RECENT_RECORD_KEY]
211
+ if self._connector_state_converter.MOST_RECENT_RECORD_KEY in first_slice
212
+ else first_slice[self._connector_state_converter.END_KEY]
213
+ )
195
214
  return (
196
- self._start or self._connector_state_converter.zero_value,
197
- self._connector_state_converter.deserialize(state),
215
+ value_from_partitioned_state
216
+ or self._start
217
+ or self._connector_state_converter.zero_value,
218
+ partitioned_state,
198
219
  )
199
220
  return self._connector_state_converter.convert_from_sequential_state(
200
221
  self._cursor_field, state, self._start
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: airbyte-cdk
3
- Version: 6.60.3
3
+ Version: 6.60.4
4
4
  Summary: A framework for writing Airbyte Connectors.
5
5
  Home-page: https://airbyte.com
6
6
  License: MIT
@@ -86,7 +86,7 @@ airbyte_cdk/sources/declarative/checks/check_stream.py,sha256=QeExVmpSYjr_CnghHu
86
86
  airbyte_cdk/sources/declarative/checks/connection_checker.py,sha256=MBRJo6WJlZQHpIfOGaNOkkHUmgUl_4wDM6VPo41z5Ss,1383
87
87
  airbyte_cdk/sources/declarative/concurrency_level/__init__.py,sha256=5XUqrmlstYlMM0j6crktlKQwALek0uiz2D3WdM46MyA,191
88
88
  airbyte_cdk/sources/declarative/concurrency_level/concurrency_level.py,sha256=YIwCTCpOr_QSNW4ltQK0yUGWInI8PKNY216HOOegYLk,2101
89
- airbyte_cdk/sources/declarative/concurrent_declarative_source.py,sha256=rQz9gXp3m8M8E201EWnD7BfeefDXhW3233GG_JLpdOQ,28546
89
+ airbyte_cdk/sources/declarative/concurrent_declarative_source.py,sha256=IwKlf20G5C4j-am9FrLhRN0qv61A5rU097xPnnFmt5U,27022
90
90
  airbyte_cdk/sources/declarative/datetime/__init__.py,sha256=4Hw-PX1-VgESLF16cDdvuYCzGJtHntThLF4qIiULWeo,61
91
91
  airbyte_cdk/sources/declarative/datetime/datetime_parser.py,sha256=_zGNGq31RNy_0QBLt_EcTvgPyhj7urPdx6oA3M5-r3o,3150
92
92
  airbyte_cdk/sources/declarative/datetime/min_max_datetime.py,sha256=0BHBtDNQZfvwM45-tY5pNlTcKAFSGGNxemoi0Jic-0E,5785
@@ -107,12 +107,12 @@ airbyte_cdk/sources/declarative/extractors/__init__.py,sha256=RmV-IkO1YLj0PSOrrq
107
107
  airbyte_cdk/sources/declarative/extractors/dpath_extractor.py,sha256=wR4Ol4MG2lt5UlqXF5EU_k7qa5cN4_-luu3PJ1PlO3A,3131
108
108
  airbyte_cdk/sources/declarative/extractors/http_selector.py,sha256=2zWZ4ewTqQC8VwkjS0xD_u350Km3SiYP7hpOOgiLg5o,1169
109
109
  airbyte_cdk/sources/declarative/extractors/record_extractor.py,sha256=XJELMjahAsaomlvQgN2zrNO0DJX0G0fr9r682gUz7Pg,691
110
- airbyte_cdk/sources/declarative/extractors/record_filter.py,sha256=yTdEkyDUSW2KbFkEwJJMlS963C955LgCCOVfTmmScpQ,3367
110
+ airbyte_cdk/sources/declarative/extractors/record_filter.py,sha256=sNLGjFX0fnqO_p1F5JO7-tBwjX83wZdz6W-WJTQpGps,3188
111
111
  airbyte_cdk/sources/declarative/extractors/record_selector.py,sha256=vCpwX1PVRFPYKMzm0DHHP3YEZ0Gmd3bBzggOsRha038,7192
112
112
  airbyte_cdk/sources/declarative/extractors/response_to_file_extractor.py,sha256=WJyA2OYIEgFpVP5Y3o0tIj69AV6IKkn9B16MeXaEItI,6513
113
113
  airbyte_cdk/sources/declarative/extractors/type_transformer.py,sha256=d6Y2Rfg8pMVEEnHllfVksWZdNVOU55yk34O03dP9muY,1626
114
114
  airbyte_cdk/sources/declarative/incremental/__init__.py,sha256=U1oZKtBaEC6IACmvziY9Wzg7Z8EgF4ZuR7NwvjlB_Sk,1255
115
- airbyte_cdk/sources/declarative/incremental/concurrent_partition_cursor.py,sha256=cmv_nV1G3HMf-YUKtm6Pb2pbisx3R0ZnP_B-8cTnn0I,22842
115
+ airbyte_cdk/sources/declarative/incremental/concurrent_partition_cursor.py,sha256=LagQ5ON8zdsltOg81fmc7FX--C38gfdo4QLeT2E_Qas,23622
116
116
  airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py,sha256=AD5qJSryosA9p3rzdl_vX60uwG9_mOk5Q8sGD8XSTjE,21592
117
117
  airbyte_cdk/sources/declarative/incremental/declarative_cursor.py,sha256=5Bhw9VRPyIuCaD0wmmq_L3DZsa-rJgtKSEUzSd8YYD0,536
118
118
  airbyte_cdk/sources/declarative/incremental/global_substream_cursor.py,sha256=69XbGqqTHBCSXi4MV6qO7uTEsTUPRN7uML0VJDjl8qU,15809
@@ -141,7 +141,7 @@ airbyte_cdk/sources/declarative/parsers/custom_exceptions.py,sha256=wnRUP0Xeru9R
141
141
  airbyte_cdk/sources/declarative/parsers/manifest_component_transformer.py,sha256=2UdpCz3yi7ISZTyqkQXSSy3dMxeyOWqV7OlAS5b9GVg,11568
142
142
  airbyte_cdk/sources/declarative/parsers/manifest_normalizer.py,sha256=EtKjS9c94yNp3AwQC8KUCQaAYW5T3zvFYxoWYjc_buI,19729
143
143
  airbyte_cdk/sources/declarative/parsers/manifest_reference_resolver.py,sha256=pJmg78vqE5VfUrF_KJnWjucQ4k9IWFULeAxHCowrHXE,6806
144
- airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py,sha256=1Mb__NBxSt2m1xXRNv9V-Xvk2Dj1uTk21AK0QeuIpMo,178243
144
+ airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py,sha256=rKp3chyB0iil4j-ekGmBA0Y2T7XbVM34pqpN3jNdvYc,181864
145
145
  airbyte_cdk/sources/declarative/partition_routers/__init__.py,sha256=TBC9AkGaUqHm2IKHMPN6punBIcY5tWGULowcLoAVkfw,1109
146
146
  airbyte_cdk/sources/declarative/partition_routers/async_job_partition_router.py,sha256=VelO7zKqKtzMJ35jyFeg0ypJLQC0plqqIBNXoBW1G2E,3001
147
147
  airbyte_cdk/sources/declarative/partition_routers/cartesian_product_stream_slicer.py,sha256=c5cuVFM6NFkuQqG8Z5IwkBuwDrvXZN1CunUOM_L0ezg,6892
@@ -176,7 +176,7 @@ airbyte_cdk/sources/declarative/requesters/paginators/strategies/cursor_paginati
176
176
  airbyte_cdk/sources/declarative/requesters/paginators/strategies/offset_increment.py,sha256=mJ14vcdCpD9rwYdj1Wi6GRzwnOF2yymlQnkjUgGDXmE,4220
177
177
  airbyte_cdk/sources/declarative/requesters/paginators/strategies/page_increment.py,sha256=Z2i6a-oKMmOTxHxsTVSnyaShkJ3u8xZw1xIJdx2yxss,2731
178
178
  airbyte_cdk/sources/declarative/requesters/paginators/strategies/pagination_strategy.py,sha256=ZBshGQNr5Bb_V8dqnWRISqdXFcjm1CKIXnlfbRhNl8g,1308
179
- airbyte_cdk/sources/declarative/requesters/paginators/strategies/stop_condition.py,sha256=LoKXdUbSgHEtSwtA8DFrnX6SpQbRVVwreY8NguTKTcI,2229
179
+ airbyte_cdk/sources/declarative/requesters/paginators/strategies/stop_condition.py,sha256=wVTjBVxR2PJZ06W9wWQmNEe3mvWLnsm40s5HpYpNG-Y,2037
180
180
  airbyte_cdk/sources/declarative/requesters/query_properties/__init__.py,sha256=sHwHVuN6djuRBF7zQb-HmINV0By4wE5j_i6TjmIPMzQ,494
181
181
  airbyte_cdk/sources/declarative/requesters/query_properties/properties_from_endpoint.py,sha256=3h9Ae6TNGagh9sMYWdG5KoEFWDlqUWZ5fkswTPreveM,1616
182
182
  airbyte_cdk/sources/declarative/requesters/query_properties/property_chunking.py,sha256=G-kHHopdScW8oLqLOEaCwgk6Ri8H-7BprZyaw1uKV4s,2982
@@ -288,8 +288,8 @@ airbyte_cdk/sources/file_based/stream/concurrent/__init__.py,sha256=47DEQpj8HBSa
288
288
  airbyte_cdk/sources/file_based/stream/concurrent/adapters.py,sha256=1AIuAOHa_M6zN9l0eAWBHwhKl4fdP4-KlUMOMzTv11U,13525
289
289
  airbyte_cdk/sources/file_based/stream/concurrent/cursor/__init__.py,sha256=Rx7TwjH8B7e0eee83Tlqxv1bWn-BVXOmlUAH7auM1uM,344
290
290
  airbyte_cdk/sources/file_based/stream/concurrent/cursor/abstract_concurrent_file_based_cursor.py,sha256=5dYZMLBEbvCyrCT89lCYdm2FdrLPLuxjdpQSVGP5o0w,1856
291
- airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_concurrent_cursor.py,sha256=gRTL-9I3ejjQOpLKd6ixe9rB3kGlubCdhUt9ri6AdAI,14880
292
- airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_final_state_cursor.py,sha256=V4Dy7o-FVLwCsvHgV8cgCN46vb_nc7Jlfow-D3SXjbU,3197
291
+ airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_concurrent_cursor.py,sha256=lubzER11aRWGureKhdvZqWraM7hOWiv-WrGQofnmcJU,14957
292
+ airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_final_state_cursor.py,sha256=x4CMdu5Ldrta6UZTkYGsYSGh4J9yNaBuex2MV6QnUt8,3274
293
293
  airbyte_cdk/sources/file_based/stream/cursor/__init__.py,sha256=MhFB5hOo8sjwvCh8gangaymdg3EJWYt_72brFOZt068,191
294
294
  airbyte_cdk/sources/file_based/stream/cursor/abstract_file_based_cursor.py,sha256=om-x3gZFPgWDpi15S9RxZmR36VHnk8sytgN6LlBQhAw,1934
295
295
  airbyte_cdk/sources/file_based/stream/cursor/default_file_based_cursor.py,sha256=VGV7xLyBribuBMVrXtO1xqkWJD86bl7yhXtjnwLMohM,7051
@@ -319,7 +319,7 @@ airbyte_cdk/sources/streams/concurrent/abstract_stream_facade.py,sha256=QTry1QCB
319
319
  airbyte_cdk/sources/streams/concurrent/adapters.py,sha256=aZtJ_75gVPmoCS-URtfQQX8mYId5xk5Q5mLQYeTM0N4,15814
320
320
  airbyte_cdk/sources/streams/concurrent/availability_strategy.py,sha256=4La5v2UffSjGnhmF4kwNIKt_g3RXk2ux1mSHA1ejgYM,2898
321
321
  airbyte_cdk/sources/streams/concurrent/clamping.py,sha256=i26GVyui2ScEXSP-IP_61K2HaTp1-6lTlYHsZVYpuZA,3240
322
- airbyte_cdk/sources/streams/concurrent/cursor.py,sha256=LFXbKBEMtNSVz_kZs9qydS9fPvzTU5wdgXRagRRJeHo,21388
322
+ airbyte_cdk/sources/streams/concurrent/cursor.py,sha256=xFFB8eEbtjGUdb42vkyWT5JB-WTUsaJlZ0gjKoVEycc,22307
323
323
  airbyte_cdk/sources/streams/concurrent/cursor_types.py,sha256=ZyWLPpeLX1qXcP5MwS-wxK11IBMsnVPCw9zx8gA2_Ro,843
324
324
  airbyte_cdk/sources/streams/concurrent/default_stream.py,sha256=3SBjFa1z955pSE_2qt1C7mAky-RKjOZeQDePbZkWYYs,3371
325
325
  airbyte_cdk/sources/streams/concurrent/exceptions.py,sha256=JOZ446MCLpmF26r9KfS6OO_6rGjcjgJNZdcw6jccjEI,468
@@ -424,9 +424,9 @@ airbyte_cdk/utils/slice_hasher.py,sha256=EDxgROHDbfG-QKQb59m7h_7crN1tRiawdf5uU7G
424
424
  airbyte_cdk/utils/spec_schema_transformations.py,sha256=-5HTuNsnDBAhj-oLeQXwpTGA0HdcjFOf2zTEMUTTg_Y,816
425
425
  airbyte_cdk/utils/stream_status_utils.py,sha256=ZmBoiy5HVbUEHAMrUONxZvxnvfV9CesmQJLDTAIWnWw,1171
426
426
  airbyte_cdk/utils/traced_exception.py,sha256=C8uIBuCL_E4WnBAOPSxBicD06JAldoN9fGsQDp463OY,6292
427
- airbyte_cdk-6.60.3.dist-info/LICENSE.txt,sha256=Wfe61S4BaGPj404v8lrAbvhjYR68SHlkzeYrg3_bbuM,1051
428
- airbyte_cdk-6.60.3.dist-info/LICENSE_SHORT,sha256=aqF6D1NcESmpn-cqsxBtszTEnHKnlsp8L4x9wAh3Nxg,55
429
- airbyte_cdk-6.60.3.dist-info/METADATA,sha256=Tq6xNhMal_OKVSb0BvKW0o5lRKbIq_RxmA4N0pkLJBE,6477
430
- airbyte_cdk-6.60.3.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
431
- airbyte_cdk-6.60.3.dist-info/entry_points.txt,sha256=AKWbEkHfpzzk9nF9tqBUaw1MbvTM4mGtEzmZQm0ZWvM,139
432
- airbyte_cdk-6.60.3.dist-info/RECORD,,
427
+ airbyte_cdk-6.60.4.dist-info/LICENSE.txt,sha256=Wfe61S4BaGPj404v8lrAbvhjYR68SHlkzeYrg3_bbuM,1051
428
+ airbyte_cdk-6.60.4.dist-info/LICENSE_SHORT,sha256=aqF6D1NcESmpn-cqsxBtszTEnHKnlsp8L4x9wAh3Nxg,55
429
+ airbyte_cdk-6.60.4.dist-info/METADATA,sha256=zwXZHjfOrFMNnnbw4L-VYjPc-QlCv5vfKmldjRSeEjQ,6477
430
+ airbyte_cdk-6.60.4.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
431
+ airbyte_cdk-6.60.4.dist-info/entry_points.txt,sha256=AKWbEkHfpzzk9nF9tqBUaw1MbvTM4mGtEzmZQm0ZWvM,139
432
+ airbyte_cdk-6.60.4.dist-info/RECORD,,