airbyte-cdk 7.3.2__py3-none-any.whl → 7.3.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2279,12 +2279,14 @@ definitions:
2279
2279
  - FAIL
2280
2280
  - RETRY
2281
2281
  - IGNORE
2282
+ - RESET_PAGINATION
2282
2283
  - RATE_LIMITED
2283
2284
  examples:
2284
2285
  - SUCCESS
2285
2286
  - FAIL
2286
2287
  - RETRY
2287
2288
  - IGNORE
2289
+ - RESET_PAGINATION
2288
2290
  - RATE_LIMITED
2289
2291
  failure_type:
2290
2292
  title: Failure Type
@@ -3707,6 +3709,9 @@ definitions:
3707
3709
  anyOf:
3708
3710
  - "$ref": "#/definitions/DefaultPaginator"
3709
3711
  - "$ref": "#/definitions/NoPagination"
3712
+ pagination_reset:
3713
+ description: Describes what triggers pagination reset and how to handle it.
3714
+ "$ref": "#/definitions/PaginationReset"
3710
3715
  ignore_stream_slicer_parameters_on_paginated_requests:
3711
3716
  description: If true, the partition router and incremental request options will be ignored when paginating requests. Request options set directly on the requester will not be ignored.
3712
3717
  type: boolean
@@ -3730,6 +3735,36 @@ definitions:
3730
3735
  $parameters:
3731
3736
  type: object
3732
3737
  additionalProperties: true
3738
+ PaginationReset:
3739
+ title: Pagination Reset
3740
+ description: Describes what triggers pagination reset and how to handle it. If SPLIT_USING_CURSOR, the connector developer is accountable for ensuring that the records are returned in ascending order.
3741
+ type: object
3742
+ required:
3743
+ - type
3744
+ - action
3745
+ properties:
3746
+ type:
3747
+ type: string
3748
+ enum: [ PaginationReset ]
3749
+ action:
3750
+ type: string
3751
+ enum:
3752
+ - SPLIT_USING_CURSOR
3753
+ - RESET
3754
+ limits:
3755
+ "$ref": "#/definitions/PaginationResetLimits"
3756
+ PaginationResetLimits:
3757
+ title: Pagination Reset Limits
3758
+ description: Describes the limits that trigger pagination reset
3759
+ type: object
3760
+ required:
3761
+ - type
3762
+ properties:
3763
+ type:
3764
+ type: string
3765
+ enum: [ PaginationResetLimits ]
3766
+ number_of_records:
3767
+ type: integer
3733
3768
  GzipDecoder:
3734
3769
  title: gzip
3735
3770
  description: Select 'gzip' for response data that is compressed with gzip. Requires specifying an inner data type/decoder to parse the decompressed data.
@@ -151,7 +151,7 @@ class ConcurrentPerPartitionCursor(Cursor):
151
151
  self._connector_state_converter = connector_state_converter
152
152
  self._cursor_field = cursor_field
153
153
 
154
- self._cursor_factory = cursor_factory
154
+ self._cursor_factory = cursor_factory # self._cursor_factory is flagged as private but is used in model_to_component_factory to ease pagination reset instantiation
155
155
  self._partition_router = partition_router
156
156
 
157
157
  # The dict is ordered to ensure that once the maximum number of partitions is reached,
@@ -539,6 +539,7 @@ class Action(Enum):
539
539
  FAIL = "FAIL"
540
540
  RETRY = "RETRY"
541
541
  IGNORE = "IGNORE"
542
+ RESET_PAGINATION = "RESET_PAGINATION"
542
543
  RATE_LIMITED = "RATE_LIMITED"
543
544
 
544
545
 
@@ -553,7 +554,14 @@ class HttpResponseFilter(BaseModel):
553
554
  action: Optional[Action] = Field(
554
555
  None,
555
556
  description="Action to execute if a response matches the filter.",
556
- examples=["SUCCESS", "FAIL", "RETRY", "IGNORE", "RATE_LIMITED"],
557
+ examples=[
558
+ "SUCCESS",
559
+ "FAIL",
560
+ "RETRY",
561
+ "IGNORE",
562
+ "RESET_PAGINATION",
563
+ "RATE_LIMITED",
564
+ ],
557
565
  title="Action",
558
566
  )
559
567
  failure_type: Optional[FailureType] = Field(
@@ -1173,6 +1181,16 @@ class LegacySessionTokenAuthenticator(BaseModel):
1173
1181
  parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters")
1174
1182
 
1175
1183
 
1184
+ class Action1(Enum):
1185
+ SPLIT_USING_CURSOR = "SPLIT_USING_CURSOR"
1186
+ RESET = "RESET"
1187
+
1188
+
1189
+ class PaginationResetLimits(BaseModel):
1190
+ type: Literal["PaginationResetLimits"]
1191
+ number_of_records: Optional[int] = None
1192
+
1193
+
1176
1194
  class CsvDecoder(BaseModel):
1177
1195
  type: Literal["CsvDecoder"]
1178
1196
  encoding: Optional[str] = "utf-8"
@@ -2054,6 +2072,12 @@ class RecordSelector(BaseModel):
2054
2072
  parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters")
2055
2073
 
2056
2074
 
2075
+ class PaginationReset(BaseModel):
2076
+ type: Literal["PaginationReset"]
2077
+ action: Action1
2078
+ limits: Optional[PaginationResetLimits] = None
2079
+
2080
+
2057
2081
  class GzipDecoder(BaseModel):
2058
2082
  type: Literal["GzipDecoder"]
2059
2083
  decoder: Union[CsvDecoder, GzipDecoder, JsonDecoder, JsonlDecoder]
@@ -2822,6 +2846,10 @@ class SimpleRetriever(BaseModel):
2822
2846
  None,
2823
2847
  description="Paginator component that describes how to navigate through the API's pages.",
2824
2848
  )
2849
+ pagination_reset: Optional[PaginationReset] = Field(
2850
+ None,
2851
+ description="Describes what triggers pagination reset and how to handle it.",
2852
+ )
2825
2853
  ignore_stream_slicer_parameters_on_paginated_requests: Optional[bool] = Field(
2826
2854
  False,
2827
2855
  description="If true, the partition router and incremental request options will be ignored when paginating requests. Request options set directly on the requester will not be ignored.",
@@ -116,11 +116,15 @@ from airbyte_cdk.sources.declarative.migrations.legacy_to_per_partition_state_mi
116
116
  )
117
117
  from airbyte_cdk.sources.declarative.models import (
118
118
  CustomStateMigration,
119
+ PaginationResetLimits,
119
120
  )
120
121
  from airbyte_cdk.sources.declarative.models.base_model_with_deprecations import (
121
122
  DEPRECATION_LOGS_TAG,
122
123
  BaseModelWithDeprecations,
123
124
  )
125
+ from airbyte_cdk.sources.declarative.models.declarative_component_schema import (
126
+ Action1 as PaginationResetActionModel,
127
+ )
124
128
  from airbyte_cdk.sources.declarative.models.declarative_component_schema import (
125
129
  AddedFieldDefinition as AddedFieldDefinitionModel,
126
130
  )
@@ -358,6 +362,9 @@ from airbyte_cdk.sources.declarative.models.declarative_component_schema import
358
362
  from airbyte_cdk.sources.declarative.models.declarative_component_schema import (
359
363
  PageIncrement as PageIncrementModel,
360
364
  )
365
+ from airbyte_cdk.sources.declarative.models.declarative_component_schema import (
366
+ PaginationReset as PaginationResetModel,
367
+ )
361
368
  from airbyte_cdk.sources.declarative.models.declarative_component_schema import (
362
369
  ParametrizedComponentsResolver as ParametrizedComponentsResolverModel,
363
370
  )
@@ -529,6 +536,7 @@ from airbyte_cdk.sources.declarative.retrievers.file_uploader import (
529
536
  LocalFileSystemFileWriter,
530
537
  NoopFileWriter,
531
538
  )
539
+ from airbyte_cdk.sources.declarative.retrievers.pagination_tracker import PaginationTracker
532
540
  from airbyte_cdk.sources.declarative.schema import (
533
541
  ComplexFieldType,
534
542
  DefaultSchemaLoader,
@@ -644,6 +652,8 @@ _NO_STREAM_SLICING = SinglePartitionRouter(parameters={})
644
652
  # this would be a circular import
645
653
  MAX_SLICES = 5
646
654
 
655
+ LOGGER = logging.getLogger(f"airbyte.model_to_component_factory")
656
+
647
657
 
648
658
  class ModelToComponentFactory:
649
659
  EPOCH_DATETIME_FORMAT = "%s"
@@ -2043,6 +2053,7 @@ class ModelToComponentFactory:
2043
2053
  if isinstance(concurrent_cursor, FinalStateCursor)
2044
2054
  else concurrent_cursor
2045
2055
  )
2056
+
2046
2057
  retriever = self._create_component_from_model(
2047
2058
  model=model.retriever,
2048
2059
  config=config,
@@ -2051,12 +2062,9 @@ class ModelToComponentFactory:
2051
2062
  request_options_provider=request_options_provider,
2052
2063
  stream_slicer=stream_slicer,
2053
2064
  partition_router=partition_router,
2054
- stop_condition_cursor=concurrent_cursor
2055
- if self._is_stop_condition_on_cursor(model)
2056
- else None,
2057
- client_side_incremental_sync={"cursor": concurrent_cursor}
2058
- if self._is_client_side_filtering_enabled(model)
2059
- else None,
2065
+ has_stop_condition_cursor=self._is_stop_condition_on_cursor(model),
2066
+ is_client_side_incremental_sync=self._is_client_side_filtering_enabled(model),
2067
+ cursor=concurrent_cursor,
2060
2068
  transformations=transformations,
2061
2069
  file_uploader=file_uploader,
2062
2070
  incremental_sync=model.incremental_sync,
@@ -3049,7 +3057,7 @@ class ModelToComponentFactory:
3049
3057
  name: str,
3050
3058
  transformations: List[RecordTransformation] | None = None,
3051
3059
  decoder: Decoder | None = None,
3052
- client_side_incremental_sync: Dict[str, Any] | None = None,
3060
+ client_side_incremental_sync_cursor: Optional[Cursor] = None,
3053
3061
  file_uploader: Optional[DefaultFileUploader] = None,
3054
3062
  **kwargs: Any,
3055
3063
  ) -> RecordSelector:
@@ -3065,14 +3073,14 @@ class ModelToComponentFactory:
3065
3073
  transform_before_filtering = (
3066
3074
  False if model.transform_before_filtering is None else model.transform_before_filtering
3067
3075
  )
3068
- if client_side_incremental_sync:
3076
+ if client_side_incremental_sync_cursor:
3069
3077
  record_filter = ClientSideIncrementalRecordFilterDecorator(
3070
3078
  config=config,
3071
3079
  parameters=model.parameters,
3072
3080
  condition=model.record_filter.condition
3073
3081
  if (model.record_filter and hasattr(model.record_filter, "condition"))
3074
3082
  else None,
3075
- **client_side_incremental_sync,
3083
+ cursor=client_side_incremental_sync_cursor,
3076
3084
  )
3077
3085
  transform_before_filtering = (
3078
3086
  True
@@ -3150,8 +3158,9 @@ class ModelToComponentFactory:
3150
3158
  name: str,
3151
3159
  primary_key: Optional[Union[str, List[str], List[List[str]]]],
3152
3160
  request_options_provider: Optional[RequestOptionsProvider] = None,
3153
- stop_condition_cursor: Optional[Cursor] = None,
3154
- client_side_incremental_sync: Optional[Dict[str, Any]] = None,
3161
+ cursor: Optional[Cursor] = None,
3162
+ has_stop_condition_cursor: bool = False,
3163
+ is_client_side_incremental_sync: bool = False,
3155
3164
  transformations: List[RecordTransformation],
3156
3165
  file_uploader: Optional[DefaultFileUploader] = None,
3157
3166
  incremental_sync: Optional[
@@ -3181,6 +3190,9 @@ class ModelToComponentFactory:
3181
3190
 
3182
3191
  return _url or _url_base
3183
3192
 
3193
+ if cursor is None:
3194
+ cursor = FinalStateCursor(name, None, self._message_repository)
3195
+
3184
3196
  decoder = (
3185
3197
  self._create_component_from_model(model=model.decoder, config=config)
3186
3198
  if model.decoder
@@ -3192,7 +3204,7 @@ class ModelToComponentFactory:
3192
3204
  config=config,
3193
3205
  decoder=decoder,
3194
3206
  transformations=transformations,
3195
- client_side_incremental_sync=client_side_incremental_sync,
3207
+ client_side_incremental_sync_cursor=cursor if is_client_side_incremental_sync else None,
3196
3208
  file_uploader=file_uploader,
3197
3209
  )
3198
3210
 
@@ -3280,7 +3292,7 @@ class ModelToComponentFactory:
3280
3292
  url_base=_get_url(requester),
3281
3293
  extractor_model=model.record_selector.extractor,
3282
3294
  decoder=decoder,
3283
- cursor_used_for_stop_condition=stop_condition_cursor or None,
3295
+ cursor_used_for_stop_condition=cursor if has_stop_condition_cursor else None,
3284
3296
  )
3285
3297
  if model.paginator
3286
3298
  else NoPagination(parameters={})
@@ -3329,6 +3341,13 @@ class ModelToComponentFactory:
3329
3341
  parameters=model.parameters or {},
3330
3342
  )
3331
3343
 
3344
+ if (
3345
+ model.record_selector.record_filter
3346
+ and model.pagination_reset
3347
+ and model.pagination_reset.limits
3348
+ ):
3349
+ raise ValueError("PaginationResetLimits are not supported while having record filter.")
3350
+
3332
3351
  return SimpleRetriever(
3333
3352
  name=name,
3334
3353
  paginator=paginator,
@@ -3342,9 +3361,40 @@ class ModelToComponentFactory:
3342
3361
  ignore_stream_slicer_parameters_on_paginated_requests=ignore_stream_slicer_parameters_on_paginated_requests,
3343
3362
  additional_query_properties=query_properties,
3344
3363
  log_formatter=self._get_log_formatter(log_formatter, name),
3364
+ pagination_tracker_factory=self._create_pagination_tracker_factory(
3365
+ model.pagination_reset, cursor
3366
+ ),
3345
3367
  parameters=model.parameters or {},
3346
3368
  )
3347
3369
 
3370
+ def _create_pagination_tracker_factory(
3371
+ self, model: Optional[PaginationResetModel], cursor: Cursor
3372
+ ) -> Callable[[], PaginationTracker]:
3373
+ if model is None:
3374
+ return lambda: PaginationTracker()
3375
+
3376
+ # Until we figure out a way to use any cursor for PaginationTracker, we will have to have this cursor selector logic
3377
+ cursor_factory: Callable[[], Optional[ConcurrentCursor]] = lambda: None
3378
+ if model.action == PaginationResetActionModel.RESET:
3379
+ # in that case, we will let cursor_factory to return None even if the stream has a cursor
3380
+ pass
3381
+ elif model.action == PaginationResetActionModel.SPLIT_USING_CURSOR:
3382
+ if isinstance(cursor, ConcurrentCursor):
3383
+ cursor_factory = lambda: cursor.copy_without_state() # type: ignore # the if condition validates that it is a ConcurrentCursor
3384
+ elif isinstance(cursor, ConcurrentPerPartitionCursor):
3385
+ cursor_factory = lambda: cursor._cursor_factory.create( # type: ignore # if this becomes a problem, we would need to extract the cursor_factory instantiation logic and make it accessible here
3386
+ {}, datetime.timedelta(0)
3387
+ )
3388
+ elif not isinstance(cursor, FinalStateCursor):
3389
+ LOGGER.warning(
3390
+ "Unknown cursor for PaginationTracker. Pagination resets might not work properly"
3391
+ )
3392
+ else:
3393
+ raise ValueError(f"Unknown PaginationReset action: {model.action}")
3394
+
3395
+ limit = model.limits.number_of_records if model and model.limits else None
3396
+ return lambda: PaginationTracker(cursor_factory(), limit)
3397
+
3348
3398
  def _get_log_formatter(
3349
3399
  self, log_formatter: Callable[[Response], Any] | None, name: str
3350
3400
  ) -> Callable[[Response], Any] | None:
@@ -66,14 +66,14 @@ class CompositeErrorHandler(ErrorHandler):
66
66
  if not isinstance(matched_error_resolution, ErrorResolution):
67
67
  continue
68
68
 
69
- if matched_error_resolution.response_action == ResponseAction.SUCCESS:
69
+ if matched_error_resolution.response_action in [
70
+ ResponseAction.SUCCESS,
71
+ ResponseAction.RETRY,
72
+ ResponseAction.IGNORE,
73
+ ResponseAction.RESET_PAGINATION,
74
+ ]:
70
75
  return matched_error_resolution
71
76
 
72
- if (
73
- matched_error_resolution.response_action == ResponseAction.RETRY
74
- or matched_error_resolution.response_action == ResponseAction.IGNORE
75
- ):
76
- return matched_error_resolution
77
77
  if matched_error_resolution:
78
78
  return matched_error_resolution
79
79
 
@@ -0,0 +1,64 @@
1
+ from typing import Optional
2
+
3
+ from airbyte_cdk.sources.declarative.models import FailureType
4
+ from airbyte_cdk.sources.declarative.types import Record, StreamSlice
5
+ from airbyte_cdk.sources.streams.concurrent.cursor import ConcurrentCursor
6
+ from airbyte_cdk.utils.traced_exception import AirbyteTracedException
7
+
8
+
9
+ class PaginationTracker:
10
+ _record_count: int
11
+ _number_of_attempt_with_same_slice: int
12
+
13
+ def __init__(
14
+ self, cursor: Optional[ConcurrentCursor] = None, max_number_of_records: Optional[int] = None
15
+ ) -> None:
16
+ """
17
+ Ideally, we would have passed the `Cursor` interface here instead of `ConcurrentCursor` but not all
18
+ implementations of `Cursor` can support this use case. For example, if the `ConcurrentPerPartitionCursor`
19
+ switch to global state, we stop keeping track of the state per partition and therefore can't get an accurate
20
+ view for a specific stream_slice. In order to solve that, we decided to scope this feature to use only
21
+ ConcurrentCursor which is the only "leaf" cursor that actually emits stream slices with `cursor_partition`.
22
+ """
23
+ self._cursor = cursor
24
+ self._limit = max_number_of_records
25
+ self._reset()
26
+
27
+ """
28
+ Given we have a cursor, we do not allow for the same slice to be processed twice because we assume we will
29
+ always process the same slice.
30
+
31
+ Given no cursor, we assume that the pagination reset is for retrying purposes and we allow to retry once.
32
+ """
33
+ self._allowed_number_of_attempt_with_same_slice = 1 if self._cursor else 2
34
+ self._number_of_attempt_with_same_slice = 0
35
+
36
+ def observe(self, record: Record) -> None:
37
+ self._record_count += 1
38
+ if self._cursor:
39
+ self._cursor.observe(record)
40
+
41
+ def has_reached_limit(self) -> bool:
42
+ return self._limit is not None and self._record_count >= self._limit
43
+
44
+ def _reset(self) -> None:
45
+ self._record_count = 0
46
+
47
+ def reduce_slice_range_if_possible(self, stream_slice: StreamSlice) -> StreamSlice:
48
+ new_slice = self._cursor.reduce_slice_range(stream_slice) if self._cursor else stream_slice
49
+
50
+ if new_slice == stream_slice:
51
+ self._number_of_attempt_with_same_slice += 1
52
+ if (
53
+ self._number_of_attempt_with_same_slice
54
+ >= self._allowed_number_of_attempt_with_same_slice
55
+ ):
56
+ raise AirbyteTracedException(
57
+ internal_message=f"There were {self._number_of_attempt_with_same_slice} attempts with the same slice already while the max allowed is {self._allowed_number_of_attempt_with_same_slice}",
58
+ failure_type=FailureType.system_error,
59
+ )
60
+ else:
61
+ self._number_of_attempt_with_same_slice = 0
62
+
63
+ self._reset()
64
+ return new_slice
@@ -3,10 +3,10 @@
3
3
  #
4
4
 
5
5
  import json
6
+ import logging
6
7
  from collections import defaultdict
7
8
  from dataclasses import InitVar, dataclass, field
8
9
  from functools import partial
9
- from itertools import islice
10
10
  from typing import (
11
11
  Any,
12
12
  Callable,
@@ -39,14 +39,20 @@ from airbyte_cdk.sources.declarative.requesters.request_options import (
39
39
  RequestOptionsProvider,
40
40
  )
41
41
  from airbyte_cdk.sources.declarative.requesters.requester import Requester
42
+ from airbyte_cdk.sources.declarative.retrievers.pagination_tracker import PaginationTracker
42
43
  from airbyte_cdk.sources.declarative.retrievers.retriever import Retriever
43
44
  from airbyte_cdk.sources.declarative.stream_slicers.stream_slicer import StreamSlicer
44
45
  from airbyte_cdk.sources.source import ExperimentalClassWarning
46
+ from airbyte_cdk.sources.streams.concurrent.cursor import Cursor
45
47
  from airbyte_cdk.sources.streams.core import StreamData
48
+ from airbyte_cdk.sources.streams.http.pagination_reset_exception import (
49
+ PaginationResetRequiredException,
50
+ )
46
51
  from airbyte_cdk.sources.types import Config, Record, StreamSlice, StreamState
47
52
  from airbyte_cdk.utils.mapping_helpers import combine_mappings
48
53
 
49
54
  FULL_REFRESH_SYNC_COMPLETE_KEY = "__ab_full_refresh_sync_complete"
55
+ LOGGER = logging.getLogger("airbyte")
50
56
 
51
57
 
52
58
  @dataclass
@@ -92,8 +98,14 @@ class SimpleRetriever(Retriever):
92
98
  ignore_stream_slicer_parameters_on_paginated_requests: bool = False
93
99
  additional_query_properties: Optional[QueryProperties] = None
94
100
  log_formatter: Optional[Callable[[requests.Response], Any]] = None
101
+ pagination_tracker_factory: Callable[[], PaginationTracker] = field(
102
+ default_factory=lambda: lambda: PaginationTracker()
103
+ )
95
104
 
96
105
  def __post_init__(self, parameters: Mapping[str, Any]) -> None:
106
+ # while changing `ModelToComponentFactory.create_simple_retriever` to accept a cursor, the sources implementing
107
+ # a CustomRetriever inheriting for SimpleRetriever needed to have the following validation added.
108
+ self.cursor = None if isinstance(self.cursor, Cursor) else self.cursor
97
109
  self._paginator = self.paginator or NoPagination(parameters=parameters)
98
110
  self._parameters = parameters
99
111
  self._name = (
@@ -362,90 +374,97 @@ class SimpleRetriever(Retriever):
362
374
  stream_state: Mapping[str, Any],
363
375
  stream_slice: StreamSlice,
364
376
  ) -> Iterable[Record]:
365
- pagination_complete = False
366
- initial_token = self._paginator.get_initial_token()
367
- next_page_token: Optional[Mapping[str, Any]] = (
368
- {"next_page_token": initial_token} if initial_token is not None else None
369
- )
370
- while not pagination_complete:
371
- property_chunks: List[List[str]] = (
372
- list(
373
- self.additional_query_properties.get_request_property_chunks(
374
- stream_slice=stream_slice
375
- )
376
- )
377
- if self.additional_query_properties
378
- else [
379
- []
380
- ] # A single empty property chunk represents the case where property chunking is not configured
381
- )
382
-
377
+ pagination_tracker = self.pagination_tracker_factory()
378
+ reset_pagination = False
379
+ next_page_token = self._get_initial_next_page_token()
380
+ while True:
383
381
  merged_records: MutableMapping[str, Any] = defaultdict(dict)
384
382
  last_page_size = 0
385
383
  last_record: Optional[Record] = None
386
- response: Optional[requests.Response] = None
387
- for properties in property_chunks:
388
- if len(properties) > 0:
389
- stream_slice = StreamSlice(
390
- partition=stream_slice.partition or {},
391
- cursor_slice=stream_slice.cursor_slice or {},
392
- extra_fields={"query_properties": properties},
393
- )
394
-
395
- response = self._fetch_next_page(stream_state, stream_slice, next_page_token)
396
- for current_record in records_generator_fn(response):
397
- if (
398
- current_record
399
- and self.additional_query_properties
400
- and self.additional_query_properties.property_chunking
384
+
385
+ response = None
386
+ try:
387
+ if (
388
+ self.additional_query_properties
389
+ and self.additional_query_properties.property_chunking
390
+ ):
391
+ for properties in self.additional_query_properties.get_request_property_chunks(
392
+ stream_slice=stream_slice
401
393
  ):
402
- merge_key = (
403
- self.additional_query_properties.property_chunking.get_merge_key(
404
- current_record
394
+ stream_slice = StreamSlice(
395
+ partition=stream_slice.partition or {},
396
+ cursor_slice=stream_slice.cursor_slice or {},
397
+ extra_fields={"query_properties": properties},
398
+ )
399
+ response = self._fetch_next_page(
400
+ stream_state, stream_slice, next_page_token
401
+ )
402
+
403
+ for current_record in records_generator_fn(response):
404
+ merge_key = (
405
+ self.additional_query_properties.property_chunking.get_merge_key(
406
+ current_record
407
+ )
405
408
  )
409
+ if merge_key:
410
+ _deep_merge(merged_records[merge_key], current_record)
411
+ else:
412
+ # We should still emit records even if the record did not have a merge key
413
+ pagination_tracker.observe(current_record)
414
+ last_page_size += 1
415
+ last_record = current_record
416
+ yield current_record
417
+
418
+ for merged_record in merged_records.values():
419
+ record = Record(
420
+ data=merged_record, stream_name=self.name, associated_slice=stream_slice
406
421
  )
407
- if merge_key:
408
- _deep_merge(merged_records[merge_key], current_record)
409
- else:
410
- # We should still emit records even if the record did not have a merge key
411
- last_page_size += 1
412
- last_record = current_record
413
- yield current_record
414
- else:
422
+ pagination_tracker.observe(record)
423
+ last_page_size += 1
424
+ last_record = record
425
+ yield record
426
+ else:
427
+ response = self._fetch_next_page(stream_state, stream_slice, next_page_token)
428
+ for current_record in records_generator_fn(response):
429
+ pagination_tracker.observe(current_record)
415
430
  last_page_size += 1
416
431
  last_record = current_record
417
432
  yield current_record
418
-
419
- if (
420
- self.additional_query_properties
421
- and self.additional_query_properties.property_chunking
422
- ):
423
- for merged_record in merged_records.values():
424
- record = Record(
425
- data=merged_record, stream_name=self.name, associated_slice=stream_slice
426
- )
427
- last_page_size += 1
428
- last_record = record
429
- yield record
430
-
431
- if not response:
432
- pagination_complete = True
433
+ except PaginationResetRequiredException:
434
+ reset_pagination = True
435
+ else:
436
+ if not response:
437
+ break
438
+
439
+ if reset_pagination or pagination_tracker.has_reached_limit():
440
+ next_page_token = self._get_initial_next_page_token()
441
+ previous_slice = stream_slice
442
+ stream_slice = pagination_tracker.reduce_slice_range_if_possible(stream_slice)
443
+ LOGGER.info(
444
+ f"Hitting PaginationReset event. StreamSlice used will go from {previous_slice} to {stream_slice}"
445
+ )
446
+ reset_pagination = False
433
447
  else:
434
448
  last_page_token_value = (
435
449
  next_page_token.get("next_page_token") if next_page_token else None
436
450
  )
437
451
  next_page_token = self._next_page_token(
438
- response=response,
452
+ response=response, # type:ignore # we are breaking from the loop on the try/else if there are no response so this should be fine
439
453
  last_page_size=last_page_size,
440
454
  last_record=last_record,
441
455
  last_page_token_value=last_page_token_value,
442
456
  )
443
457
  if not next_page_token:
444
- pagination_complete = True
458
+ break
445
459
 
446
460
  # Always return an empty generator just in case no records were ever yielded
447
461
  yield from []
448
462
 
463
+ def _get_initial_next_page_token(self) -> Optional[Mapping[str, Any]]:
464
+ initial_token = self._paginator.get_initial_token()
465
+ next_page_token = {"next_page_token": initial_token} if initial_token is not None else None
466
+ return next_page_token
467
+
449
468
  def _read_single_page(
450
469
  self,
451
470
  records_generator_fn: Callable[[Optional[requests.Response]], Iterable[Record]],
@@ -19,7 +19,7 @@ from typing import (
19
19
  )
20
20
 
21
21
  from airbyte_cdk.sources.connector_state_manager import ConnectorStateManager
22
- from airbyte_cdk.sources.message import MessageRepository
22
+ from airbyte_cdk.sources.message import MessageRepository, NoopMessageRepository
23
23
  from airbyte_cdk.sources.streams import NO_CURSOR_STATE_KEY
24
24
  from airbyte_cdk.sources.streams.concurrent.clamping import ClampingStrategy, NoClamping
25
25
  from airbyte_cdk.sources.streams.concurrent.cursor_types import CursorValueType, GapType
@@ -41,7 +41,7 @@ class CursorField:
41
41
  def __init__(self, cursor_field_key: str) -> None:
42
42
  self.cursor_field_key = cursor_field_key
43
43
 
44
- def extract_value(self, record: Record) -> CursorValueType:
44
+ def extract_value(self, record: Record) -> Any:
45
45
  cursor_value = record.data.get(self.cursor_field_key)
46
46
  if cursor_value is None:
47
47
  raise ValueError(f"Could not find cursor field {self.cursor_field_key} in record")
@@ -136,6 +136,24 @@ class ConcurrentCursor(Cursor):
136
136
  _START_BOUNDARY = 0
137
137
  _END_BOUNDARY = 1
138
138
 
139
+ def copy_without_state(self) -> "ConcurrentCursor":
140
+ return self.__class__(
141
+ stream_name=self._stream_name,
142
+ stream_namespace=self._stream_namespace,
143
+ stream_state={},
144
+ message_repository=NoopMessageRepository(),
145
+ connector_state_manager=ConnectorStateManager(),
146
+ connector_state_converter=self._connector_state_converter,
147
+ cursor_field=self._cursor_field,
148
+ slice_boundary_fields=self._slice_boundary_fields,
149
+ start=self._start,
150
+ end_provider=self._end_provider,
151
+ lookback_window=self._lookback_window,
152
+ slice_range=self._slice_range,
153
+ cursor_granularity=self._cursor_granularity,
154
+ clamping_strategy=self._clamping_strategy,
155
+ )
156
+
139
157
  def __init__(
140
158
  self,
141
159
  stream_name: str,
@@ -174,6 +192,7 @@ class ConcurrentCursor(Cursor):
174
192
  # Flag to track if the logger has been triggered (per stream)
175
193
  self._should_be_synced_logger_triggered = False
176
194
  self._clamping_strategy = clamping_strategy
195
+ self._is_ascending_order = True
177
196
 
178
197
  # A lock is required when closing a partition because updating the cursor's concurrent_state is
179
198
  # not thread safe. When multiple partitions are being closed by the cursor at the same time, it is
@@ -245,6 +264,8 @@ class ConcurrentCursor(Cursor):
245
264
 
246
265
  if most_recent_cursor_value is None or most_recent_cursor_value < cursor_value:
247
266
  self._most_recent_cursor_value_per_partition[record.associated_slice] = cursor_value
267
+ elif most_recent_cursor_value > cursor_value:
268
+ self._is_ascending_order = False
248
269
  except ValueError:
249
270
  self._log_for_record_without_cursor_value()
250
271
 
@@ -516,3 +537,31 @@ class ConcurrentCursor(Cursor):
516
537
  f"Could not find cursor field `{self.cursor_field.cursor_field_key}` in record for stream {self._stream_name}. The incremental sync will assume it needs to be synced"
517
538
  )
518
539
  self._should_be_synced_logger_triggered = True
540
+
541
+ def reduce_slice_range(self, stream_slice: StreamSlice) -> StreamSlice:
542
+ # In theory, we might be more flexible here meaning that it doesn't need to be in ascending order but it just
543
+ # needs to be ordered. For now though, we will only support ascending order.
544
+ if not self._is_ascending_order:
545
+ LOGGER.warning(
546
+ "Attempting to reduce slice while records are not returned in incremental order might lead to missing records"
547
+ )
548
+
549
+ if stream_slice in self._most_recent_cursor_value_per_partition:
550
+ return StreamSlice(
551
+ partition=stream_slice.partition,
552
+ cursor_slice={
553
+ self._slice_boundary_fields_wrapper[
554
+ self._START_BOUNDARY
555
+ ]: self._connector_state_converter.output_format(
556
+ self._most_recent_cursor_value_per_partition[stream_slice]
557
+ ),
558
+ self._slice_boundary_fields_wrapper[
559
+ self._END_BOUNDARY
560
+ ]: stream_slice.cursor_slice[
561
+ self._slice_boundary_fields_wrapper[self._END_BOUNDARY]
562
+ ],
563
+ },
564
+ extra_fields=stream_slice.extra_fields,
565
+ )
566
+ else:
567
+ return stream_slice
@@ -16,6 +16,7 @@ class ResponseAction(Enum):
16
16
  RETRY = "RETRY"
17
17
  FAIL = "FAIL"
18
18
  IGNORE = "IGNORE"
19
+ RESET_PAGINATION = "RESET_PAGINATION"
19
20
  RATE_LIMITED = "RATE_LIMITED"
20
21
 
21
22
 
@@ -42,6 +42,9 @@ from airbyte_cdk.sources.streams.http.exceptions import (
42
42
  RequestBodyException,
43
43
  UserDefinedBackoffException,
44
44
  )
45
+ from airbyte_cdk.sources.streams.http.pagination_reset_exception import (
46
+ PaginationResetRequiredException,
47
+ )
45
48
  from airbyte_cdk.sources.streams.http.rate_limiting import (
46
49
  http_client_default_backoff_handler,
47
50
  rate_limit_default_backoff_handler,
@@ -428,6 +431,9 @@ class HttpClient:
428
431
  if error_resolution.response_action not in self._ACTIONS_TO_RETRY_ON:
429
432
  self._evict_key(request)
430
433
 
434
+ if error_resolution.response_action == ResponseAction.RESET_PAGINATION:
435
+ raise PaginationResetRequiredException()
436
+
431
437
  # Emit stream status RUNNING with the reason RATE_LIMITED to log that the rate limit has been reached
432
438
  if error_resolution.response_action == ResponseAction.RATE_LIMITED:
433
439
  # TODO: Update to handle with message repository when concurrent message repository is ready
@@ -0,0 +1,2 @@
1
+ class PaginationResetRequiredException(Exception):
2
+ pass
@@ -75,6 +75,25 @@ class NestedPath(Path):
75
75
  return f"NestedPath(path={self._path})"
76
76
 
77
77
 
78
+ class RootPath:
79
+ """
80
+ Path to use when the root of the response is an array.
81
+ """
82
+
83
+ def write(self, template: List[Dict[str, Any]], value: List[Dict[str, Any]]) -> None:
84
+ template.extend(value)
85
+
86
+ def update(self, template: List[Dict[str, Any]], value: List[Any]) -> None:
87
+ template.clear()
88
+ template.extend(value)
89
+
90
+ def extract(self, template: List[Dict[str, Any]]) -> Any:
91
+ return template
92
+
93
+ def __str__(self) -> str:
94
+ return f"RootPath"
95
+
96
+
78
97
  class PaginationStrategy(ABC):
79
98
  @abstractmethod
80
99
  def update(self, response: Dict[str, Any]) -> None:
@@ -149,12 +168,14 @@ class RecordBuilder:
149
168
  class HttpResponseBuilder:
150
169
  def __init__(
151
170
  self,
152
- template: Dict[str, Any],
153
- records_path: Union[FieldPath, NestedPath],
171
+ template: Union[Dict[str, Any], List[Dict[str, Any]]],
172
+ records_path: Union[FieldPath, NestedPath, RootPath],
154
173
  pagination_strategy: Optional[PaginationStrategy],
155
174
  ):
156
- self._response = template
175
+ _validate_path_with_response(records_path, template)
176
+
157
177
  self._records: List[RecordBuilder] = []
178
+ self._response = template
158
179
  self._records_path = records_path
159
180
  self._pagination_strategy = pagination_strategy
160
181
  self._status_code = 200
@@ -169,6 +190,9 @@ class HttpResponseBuilder:
169
190
  "`pagination_strategy` was not provided and hence, fields related to the pagination can't be modified. Please provide "
170
191
  "`pagination_strategy` while instantiating ResponseBuilder to leverage this capability"
171
192
  )
193
+ elif isinstance(self._response, List):
194
+ raise ValueError("pagination_strategy requires the response to be a dict but was list")
195
+
172
196
  self._pagination_strategy.update(self._response)
173
197
  return self
174
198
 
@@ -177,7 +201,7 @@ class HttpResponseBuilder:
177
201
  return self
178
202
 
179
203
  def build(self) -> HttpResponse:
180
- self._records_path.update(self._response, [record.build() for record in self._records])
204
+ self._records_path.update(self._response, [record.build() for record in self._records]) # type: ignore # validated using _validate_path_with_response
181
205
  return HttpResponse(json.dumps(self._response), self._status_code)
182
206
 
183
207
 
@@ -208,15 +232,16 @@ def find_binary_response(resource: str, execution_folder: str) -> bytes:
208
232
 
209
233
  def create_record_builder(
210
234
  response_template: Dict[str, Any],
211
- records_path: Union[FieldPath, NestedPath],
235
+ records_path: Union[FieldPath, NestedPath, RootPath],
212
236
  record_id_path: Optional[Path] = None,
213
237
  record_cursor_path: Optional[Union[FieldPath, NestedPath]] = None,
214
238
  ) -> RecordBuilder:
215
239
  """
216
240
  This will use the first record define at `records_path` as a template for the records. If more records are defined, they will be ignored
217
241
  """
242
+ _validate_path_with_response(records_path, response_template)
218
243
  try:
219
- record_template = records_path.extract(response_template)[0]
244
+ record_template = records_path.extract(response_template)[0] # type: ignore # validated using _validate_path_with_response
220
245
  if not record_template:
221
246
  raise ValueError(
222
247
  f"Could not extract any record from template at path `{records_path}`. "
@@ -230,8 +255,20 @@ def create_record_builder(
230
255
 
231
256
 
232
257
  def create_response_builder(
233
- response_template: Dict[str, Any],
234
- records_path: Union[FieldPath, NestedPath],
258
+ response_template: Union[Dict[str, Any], List[Dict[str, Any]]],
259
+ records_path: Union[FieldPath, NestedPath, RootPath],
235
260
  pagination_strategy: Optional[PaginationStrategy] = None,
236
261
  ) -> HttpResponseBuilder:
237
262
  return HttpResponseBuilder(response_template, records_path, pagination_strategy)
263
+
264
+
265
+ def _validate_path_with_response(
266
+ records_path: Union[FieldPath, NestedPath, RootPath],
267
+ response_template: Union[Dict[str, Any], List[Dict[str, Any]]],
268
+ ) -> None:
269
+ if isinstance(response_template, List) and not isinstance(records_path, RootPath):
270
+ raise ValueError("templates of type lists require RootPath")
271
+ elif isinstance(response_template, Dict) and not isinstance(
272
+ records_path, (FieldPath, NestedPath)
273
+ ):
274
+ raise ValueError("templates of type dict either require FieldPath or NestedPath")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: airbyte-cdk
3
- Version: 7.3.2
3
+ Version: 7.3.3
4
4
  Summary: A framework for writing Airbyte Connectors.
5
5
  Home-page: https://airbyte.com
6
6
  License: MIT
@@ -130,7 +130,7 @@ airbyte_cdk/sources/declarative/concurrent_declarative_source.py,sha256=z0AgJ6AZ
130
130
  airbyte_cdk/sources/declarative/datetime/__init__.py,sha256=4Hw-PX1-VgESLF16cDdvuYCzGJtHntThLF4qIiULWeo,61
131
131
  airbyte_cdk/sources/declarative/datetime/datetime_parser.py,sha256=_zGNGq31RNy_0QBLt_EcTvgPyhj7urPdx6oA3M5-r3o,3150
132
132
  airbyte_cdk/sources/declarative/datetime/min_max_datetime.py,sha256=0BHBtDNQZfvwM45-tY5pNlTcKAFSGGNxemoi0Jic-0E,5785
133
- airbyte_cdk/sources/declarative/declarative_component_schema.yaml,sha256=D5WeK1Iw_T0ZxLKCmKLowyO7GwxnwfhYom-sd1W14uQ,187793
133
+ airbyte_cdk/sources/declarative/declarative_component_schema.yaml,sha256=0rHsucvsnGfcDVZ3hn9mBU9bhiHVC979LfXuZfoqaUk,188875
134
134
  airbyte_cdk/sources/declarative/decoders/__init__.py,sha256=JHb_0d3SE6kNY10mxA5YBEKPeSbsWYjByq1gUQxepoE,953
135
135
  airbyte_cdk/sources/declarative/decoders/composite_raw_decoder.py,sha256=qB4lRUrCXLTE-a3VlpOLaazHiC7RIF_FIVJesuz7ebw,8078
136
136
  airbyte_cdk/sources/declarative/decoders/decoder.py,sha256=1PeKwuMK8x9dsA2zqUjSVinEWVSEgYcUS6npiW3aC2c,855
@@ -150,7 +150,7 @@ airbyte_cdk/sources/declarative/extractors/record_selector.py,sha256=vCpwX1PVRFP
150
150
  airbyte_cdk/sources/declarative/extractors/response_to_file_extractor.py,sha256=WJyA2OYIEgFpVP5Y3o0tIj69AV6IKkn9B16MeXaEItI,6513
151
151
  airbyte_cdk/sources/declarative/extractors/type_transformer.py,sha256=d6Y2Rfg8pMVEEnHllfVksWZdNVOU55yk34O03dP9muY,1626
152
152
  airbyte_cdk/sources/declarative/incremental/__init__.py,sha256=_y8H65KgdmVNpwQAzXtXzi-t9mY6bmIIAWtRAbpHfEo,295
153
- airbyte_cdk/sources/declarative/incremental/concurrent_partition_cursor.py,sha256=vaynWCXmScAuVnrbJ2T7M1Y4RSZO7ctAej-kzZJYifk,27868
153
+ airbyte_cdk/sources/declarative/incremental/concurrent_partition_cursor.py,sha256=ldroIGnz1rie9cNZF-Jsl3J6yAqNe7KYS7PNE342Eqs,27995
154
154
  airbyte_cdk/sources/declarative/interpolation/__init__.py,sha256=Kh7FxhfetyNVDnAQ9zSxNe4oUbb8CvoW7Mqz7cs2iPg,437
155
155
  airbyte_cdk/sources/declarative/interpolation/filters.py,sha256=cYap5zzOxIJWCLIfbkNlpyfUhjZ8FklLroIG4WGzYVs,5537
156
156
  airbyte_cdk/sources/declarative/interpolation/interpolated_boolean.py,sha256=8F3ntT_Mfo8cO9n6dCq8rTfJIpfKmzRCsVtVdhzaoGc,1964
@@ -165,14 +165,14 @@ airbyte_cdk/sources/declarative/migrations/legacy_to_per_partition_state_migrati
165
165
  airbyte_cdk/sources/declarative/migrations/state_migration.py,sha256=KWPjealMLKSMtajXgkdGgKg7EmTLR-CqqD7UIh0-eDU,794
166
166
  airbyte_cdk/sources/declarative/models/__init__.py,sha256=nUFxNCiKeYRVXuZEKA7GD-lTHxsiKcQ8FitZjKhPIvE,100
167
167
  airbyte_cdk/sources/declarative/models/base_model_with_deprecations.py,sha256=Imnj3yef0aqRdLfaUxkIYISUb8YkiPrRH_wBd-x8HjM,5999
168
- airbyte_cdk/sources/declarative/models/declarative_component_schema.py,sha256=t2td-7swgXY3RJc1VDBFUYI2Blc55j5TDFdg90aHwlU,132123
168
+ airbyte_cdk/sources/declarative/models/declarative_component_schema.py,sha256=oLHcK2TVRgzahkDPuPvZ-6OqXS_DQU2gcBGq3SRpKsY,132793
169
169
  airbyte_cdk/sources/declarative/parsers/__init__.py,sha256=ZnqYNxHsKCgO38IwB34RQyRMXTs4GTvlRi3ImKnIioo,61
170
170
  airbyte_cdk/sources/declarative/parsers/custom_code_compiler.py,sha256=nlVvHC511NUyDEEIRBkoeDTAvLqKNp-hRy8D19z8tdk,5941
171
171
  airbyte_cdk/sources/declarative/parsers/custom_exceptions.py,sha256=wnRUP0Xeru9Rbu5OexXSDN9QWDo8YU4tT9M2LDVOgGA,802
172
172
  airbyte_cdk/sources/declarative/parsers/manifest_component_transformer.py,sha256=la9Ulpc0lQewiBLKJ0FpsWxyU5XISv-ulmFRHJLJ1Pc,11292
173
173
  airbyte_cdk/sources/declarative/parsers/manifest_normalizer.py,sha256=EtKjS9c94yNp3AwQC8KUCQaAYW5T3zvFYxoWYjc_buI,19729
174
174
  airbyte_cdk/sources/declarative/parsers/manifest_reference_resolver.py,sha256=pJmg78vqE5VfUrF_KJnWjucQ4k9IWFULeAxHCowrHXE,6806
175
- airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py,sha256=QUDfBVn3KQZQKQJUyr4Dfzez4742JEtat3VuwStsTHM,185692
175
+ airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py,sha256=AgZ7mRbXVZHWTftPuDdH6wTanZPGi_dxtiu2dSxyWjs,188331
176
176
  airbyte_cdk/sources/declarative/partition_routers/__init__.py,sha256=TBC9AkGaUqHm2IKHMPN6punBIcY5tWGULowcLoAVkfw,1109
177
177
  airbyte_cdk/sources/declarative/partition_routers/async_job_partition_router.py,sha256=VelO7zKqKtzMJ35jyFeg0ypJLQC0plqqIBNXoBW1G2E,3001
178
178
  airbyte_cdk/sources/declarative/partition_routers/cartesian_product_stream_slicer.py,sha256=ocm4hZ4k-tEGs5HLrtI8ecWSK0hGqNH0Rvz2byx_HZk,6927
@@ -191,7 +191,7 @@ airbyte_cdk/sources/declarative/requesters/error_handlers/backoff_strategies/hea
191
191
  airbyte_cdk/sources/declarative/requesters/error_handlers/backoff_strategies/wait_time_from_header_backoff_strategy.py,sha256=I3KYCJHhPiRfxYUzOa293YH4U3wGFISDsdY1OMHWRtw,2942
192
192
  airbyte_cdk/sources/declarative/requesters/error_handlers/backoff_strategies/wait_until_time_from_header_backoff_strategy.py,sha256=T2JTIdHdPzPiW0MpkCNYPsuaHUtF9V-ijNqUqdTDl6U,3069
193
193
  airbyte_cdk/sources/declarative/requesters/error_handlers/backoff_strategy.py,sha256=ZN5kcaVAQDinX0Ld5NXA8M_7Sax5BoPsknVwH7v06as,634
194
- airbyte_cdk/sources/declarative/requesters/error_handlers/composite_error_handler.py,sha256=4_PegbHBUiNbqa5ndZ2n9rm69O2iEfWU-NcIhSXZDIs,4137
194
+ airbyte_cdk/sources/declarative/requesters/error_handlers/composite_error_handler.py,sha256=yVMyKI4C-9vbIrTtQPdAndZQzdzpgRzqxtXDed2xwcE,4050
195
195
  airbyte_cdk/sources/declarative/requesters/error_handlers/default_error_handler.py,sha256=BGED9TcbA3mlvd9D7sog_u5AiyjWGVOUq_00aK3PNzg,5111
196
196
  airbyte_cdk/sources/declarative/requesters/error_handlers/default_http_response_filter.py,sha256=q0YkeYUUWO6iErUy0vjqiOkhg8_9d5YcCmtlpXAJJ9E,1314
197
197
  airbyte_cdk/sources/declarative/requesters/error_handlers/error_handler.py,sha256=Tan66odx8VHzfdyyXMQkXz2pJYksllGqvxmpoajgcK4,669
@@ -240,8 +240,9 @@ airbyte_cdk/sources/declarative/retrievers/file_uploader/file_uploader.py,sha256
240
240
  airbyte_cdk/sources/declarative/retrievers/file_uploader/file_writer.py,sha256=V8gAFjQXkhX5mwj1NafdcUrMfMBNF1hi0mrdXIl5qEc,359
241
241
  airbyte_cdk/sources/declarative/retrievers/file_uploader/local_file_system_file_writer.py,sha256=jLpdonre1UHfbjGSD5AK_T0codLABJByTvbqepDZtEQ,422
242
242
  airbyte_cdk/sources/declarative/retrievers/file_uploader/noop_file_writer.py,sha256=1yfimzxm09d2j605cu_HhiYVDNVL1rUMi3vs_jYlIyY,330
243
+ airbyte_cdk/sources/declarative/retrievers/pagination_tracker.py,sha256=h-3GfksrWaQUa1xIefq9eG-6_DuW77Vq8XDenv-hCps,2865
243
244
  airbyte_cdk/sources/declarative/retrievers/retriever.py,sha256=os5psYh8z7ZdCAvbfZeTpmjvPa7Qpx0mblpKf47ZaZM,1876
244
- airbyte_cdk/sources/declarative/retrievers/simple_retriever.py,sha256=sa8xS8FTStqgp1kkc_ObJjO_b1Q4Nek3XdJ7KODLqQw,28136
245
+ airbyte_cdk/sources/declarative/retrievers/simple_retriever.py,sha256=8nb87fsBno1SOxHxYA-sGxAy48sapcF3aZszBu4Ew_s,29643
245
246
  airbyte_cdk/sources/declarative/schema/__init__.py,sha256=xU45UvM5O4c1PSM13UHpCdh5hpW3HXy9vRRGEiAC1rg,795
246
247
  airbyte_cdk/sources/declarative/schema/composite_schema_loader.py,sha256=ymGbvxS_QyGc4nnjEyRo5ch8bVedELO41PAUxKXZyMw,1113
247
248
  airbyte_cdk/sources/declarative/schema/default_schema_loader.py,sha256=UnbzlExmwoQiVV8zDg4lhAEaqA_0pRfwbMRe8yqOuWk,1834
@@ -352,7 +353,7 @@ airbyte_cdk/sources/streams/concurrent/abstract_stream_facade.py,sha256=QTry1QCB
352
353
  airbyte_cdk/sources/streams/concurrent/adapters.py,sha256=h4ZewhWn2PzPTt0lZZjcUL4rrpW9E_of7prnI3bm-c4,14004
353
354
  airbyte_cdk/sources/streams/concurrent/availability_strategy.py,sha256=M0XmvF3vjlr4GbCM0XH1hAj7udiAONM9SnmXjqufzLM,1035
354
355
  airbyte_cdk/sources/streams/concurrent/clamping.py,sha256=i26GVyui2ScEXSP-IP_61K2HaTp1-6lTlYHsZVYpuZA,3240
355
- airbyte_cdk/sources/streams/concurrent/cursor.py,sha256=Dxjx4IAHZ6HHyfJ-B5SUTTYgdb1ZiiBKsZm3pYUquzk,23411
356
+ airbyte_cdk/sources/streams/concurrent/cursor.py,sha256=oEcqUyswPyOg6SnjrFr8c8YYxVvaaInWNCHRLQcKjmk,25713
356
357
  airbyte_cdk/sources/streams/concurrent/cursor_types.py,sha256=ZyWLPpeLX1qXcP5MwS-wxK11IBMsnVPCw9zx8gA2_Ro,843
357
358
  airbyte_cdk/sources/streams/concurrent/default_stream.py,sha256=SSufbo5f7OOYS8DZaABXeJVvodcfp9wb8J9lT5Xik3s,4744
358
359
  airbyte_cdk/sources/streams/concurrent/exceptions.py,sha256=JOZ446MCLpmF26r9KfS6OO_6rGjcjgJNZdcw6jccjEI,468
@@ -379,10 +380,11 @@ airbyte_cdk/sources/streams/http/error_handlers/error_handler.py,sha256=GuqP7U1e
379
380
  airbyte_cdk/sources/streams/http/error_handlers/error_message_parser.py,sha256=xC93uB5BJd3iOnAXCrYLJTitWeGZlqzwe55VtsZqNnE,456
380
381
  airbyte_cdk/sources/streams/http/error_handlers/http_status_error_handler.py,sha256=2gqececTxxUqO6aIkVNNXADg48Px5EHUwnXHL9KiPT8,4188
381
382
  airbyte_cdk/sources/streams/http/error_handlers/json_error_message_parser.py,sha256=GW5rkBQLLTj7MEaDdbpG7DHxTQVRrDOg1ehLLxjqiM4,1828
382
- airbyte_cdk/sources/streams/http/error_handlers/response_models.py,sha256=xGIVELBFY0TmH9aUq1ikoqJz8oHLr6di2JLvKWVEO-s,2236
383
+ airbyte_cdk/sources/streams/http/error_handlers/response_models.py,sha256=4uE83yZfBe8s_81U3yZRcY1eRhvJnz9NanCp7G2PY-k,2278
383
384
  airbyte_cdk/sources/streams/http/exceptions.py,sha256=TTUpWq_qLPtdvXqYPpMhtYbFVQ7dGtajDVfjb6KQ8z8,2099
384
385
  airbyte_cdk/sources/streams/http/http.py,sha256=0uariNq8OFnlX7iqOHwBhecxA-Hfd5hSY8_XCEgn3jI,28499
385
- airbyte_cdk/sources/streams/http/http_client.py,sha256=7VzR4Cm5Sqm79SdFg26WXPzQC_-RclPHAFKiyCjoFs8,25442
386
+ airbyte_cdk/sources/streams/http/http_client.py,sha256=Afa4bPJrazMmSG15y-Hum194LnlpkaNrfgjLQk1SzqM,25690
387
+ airbyte_cdk/sources/streams/http/pagination_reset_exception.py,sha256=M5zUi9OREH5DnHHoZUDbKf9uTdIBta8je5p52U9gu94,60
386
388
  airbyte_cdk/sources/streams/http/rate_limiting.py,sha256=IwdjrHKUnU97XO4qONgYRv4YYW51xQ8SJm4WLafXDB8,6351
387
389
  airbyte_cdk/sources/streams/http/requests_native_auth/__init__.py,sha256=RN0D3nOX1xLgwEwKWu6pkGy3XqBFzKSNZ8Lf6umU2eY,413
388
390
  airbyte_cdk/sources/streams/http/requests_native_auth/abstract_oauth.py,sha256=aWrBmJ8AhUtvtHhHq5JGVZFXjDa7jG8DZePG4gEs9VY,19800
@@ -419,7 +421,7 @@ airbyte_cdk/test/mock_http/matcher.py,sha256=4Qj8UnJKZIs-eodshryce3SN1Ayc8GZpBET
419
421
  airbyte_cdk/test/mock_http/mocker.py,sha256=XgsjMtVoeMpRELPyALgrkHFauH9H5irxrz1Kcxh2yFY,8013
420
422
  airbyte_cdk/test/mock_http/request.py,sha256=tdB8cqk2vLgCDTOKffBKsM06llYs4ZecgtH6DKyx6yY,4112
421
423
  airbyte_cdk/test/mock_http/response.py,sha256=s4-cQQqTtmeej0pQDWqmG0vUWpHS-93lIWMpW3zSVyU,662
422
- airbyte_cdk/test/mock_http/response_builder.py,sha256=F-v7ebftqGj7YVIMLKdodmU9U8Dq8aIyllWGo2NGwHc,8331
424
+ airbyte_cdk/test/mock_http/response_builder.py,sha256=N9DovhVtLqIGyubWcPGomr9CNy8KLg-EJoDk6x_t4js,9857
423
425
  airbyte_cdk/test/models/__init__.py,sha256=5f5oFcuUA3dyNTfvvTWav2pTD8WX4nznObKgMTmvdus,290
424
426
  airbyte_cdk/test/models/outcome.py,sha256=niSX6gkP4P-_kQUF1jkbBXq72FC3Rtkvtdl0gJsUyho,2263
425
427
  airbyte_cdk/test/models/scenario.py,sha256=M6vq4btxUI6ZiSQNNoNFOgUsZNDFdoieGOTe-AVHstc,6435
@@ -457,9 +459,9 @@ airbyte_cdk/utils/slice_hasher.py,sha256=EDxgROHDbfG-QKQb59m7h_7crN1tRiawdf5uU7G
457
459
  airbyte_cdk/utils/spec_schema_transformations.py,sha256=9YDJmnIGFsT51CVQf2tSSvTapGimITjEFGbUTSZAGTI,963
458
460
  airbyte_cdk/utils/stream_status_utils.py,sha256=ZmBoiy5HVbUEHAMrUONxZvxnvfV9CesmQJLDTAIWnWw,1171
459
461
  airbyte_cdk/utils/traced_exception.py,sha256=C8uIBuCL_E4WnBAOPSxBicD06JAldoN9fGsQDp463OY,6292
460
- airbyte_cdk-7.3.2.dist-info/LICENSE.txt,sha256=Wfe61S4BaGPj404v8lrAbvhjYR68SHlkzeYrg3_bbuM,1051
461
- airbyte_cdk-7.3.2.dist-info/LICENSE_SHORT,sha256=aqF6D1NcESmpn-cqsxBtszTEnHKnlsp8L4x9wAh3Nxg,55
462
- airbyte_cdk-7.3.2.dist-info/METADATA,sha256=rx2PHndj9YB3wb4wbgEplFCj7zyt3Lge0QkB5ztsRHQ,6798
463
- airbyte_cdk-7.3.2.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
464
- airbyte_cdk-7.3.2.dist-info/entry_points.txt,sha256=eLZ2UYvJZGm1s07Pplcs--1Gim60YhZWTb53j_dghwU,195
465
- airbyte_cdk-7.3.2.dist-info/RECORD,,
462
+ airbyte_cdk-7.3.3.dist-info/LICENSE.txt,sha256=Wfe61S4BaGPj404v8lrAbvhjYR68SHlkzeYrg3_bbuM,1051
463
+ airbyte_cdk-7.3.3.dist-info/LICENSE_SHORT,sha256=aqF6D1NcESmpn-cqsxBtszTEnHKnlsp8L4x9wAh3Nxg,55
464
+ airbyte_cdk-7.3.3.dist-info/METADATA,sha256=oy27N1IyxbkA8M82Maa4L-hRO-Njevgu_4AqS89jyFU,6798
465
+ airbyte_cdk-7.3.3.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
466
+ airbyte_cdk-7.3.3.dist-info/entry_points.txt,sha256=eLZ2UYvJZGm1s07Pplcs--1Gim60YhZWTb53j_dghwU,195
467
+ airbyte_cdk-7.3.3.dist-info/RECORD,,