airbyte-cdk 6.33.6__py3-none-any.whl → 6.34.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airbyte_cdk/sources/declarative/concurrent_declarative_source.py +0 -75
- airbyte_cdk/sources/declarative/declarative_component_schema.yaml +0 -15
- airbyte_cdk/sources/declarative/incremental/concurrent_partition_cursor.py +56 -25
- airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py +5 -0
- airbyte_cdk/sources/declarative/incremental/global_substream_cursor.py +6 -2
- airbyte_cdk/sources/declarative/interpolation/jinja.py +13 -0
- airbyte_cdk/sources/declarative/requesters/http_requester.py +0 -1
- airbyte_cdk/sources/declarative/requesters/paginators/default_paginator.py +10 -0
- airbyte_cdk/sources/declarative/requesters/request_options/interpolated_nested_request_input_provider.py +1 -4
- airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_input_provider.py +0 -3
- airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_options_provider.py +2 -47
- airbyte_cdk/sources/declarative/retrievers/simple_retriever.py +0 -2
- airbyte_cdk/sources/declarative/transformations/add_fields.py +4 -4
- airbyte_cdk/test/mock_http/mocker.py +9 -1
- airbyte_cdk/test/mock_http/response.py +6 -3
- airbyte_cdk/utils/mapping_helpers.py +43 -2
- {airbyte_cdk-6.33.6.dist-info → airbyte_cdk-6.34.0.dist-info}/METADATA +1 -1
- {airbyte_cdk-6.33.6.dist-info → airbyte_cdk-6.34.0.dist-info}/RECORD +22 -22
- {airbyte_cdk-6.33.6.dist-info → airbyte_cdk-6.34.0.dist-info}/LICENSE.txt +0 -0
- {airbyte_cdk-6.33.6.dist-info → airbyte_cdk-6.34.0.dist-info}/LICENSE_SHORT +0 -0
- {airbyte_cdk-6.33.6.dist-info → airbyte_cdk-6.34.0.dist-info}/WHEEL +0 -0
- {airbyte_cdk-6.33.6.dist-info → airbyte_cdk-6.34.0.dist-info}/entry_points.txt +0 -0
@@ -24,7 +24,6 @@ from airbyte_cdk.sources.declarative.incremental.datetime_based_cursor import Da
|
|
24
24
|
from airbyte_cdk.sources.declarative.incremental.per_partition_with_global import (
|
25
25
|
PerPartitionWithGlobalCursor,
|
26
26
|
)
|
27
|
-
from airbyte_cdk.sources.declarative.interpolation import InterpolatedString
|
28
27
|
from airbyte_cdk.sources.declarative.manifest_declarative_source import ManifestDeclarativeSource
|
29
28
|
from airbyte_cdk.sources.declarative.models.declarative_component_schema import (
|
30
29
|
ConcurrencyLevel as ConcurrencyLevelModel,
|
@@ -36,13 +35,11 @@ from airbyte_cdk.sources.declarative.parsers.model_to_component_factory import (
|
|
36
35
|
ModelToComponentFactory,
|
37
36
|
)
|
38
37
|
from airbyte_cdk.sources.declarative.partition_routers import AsyncJobPartitionRouter
|
39
|
-
from airbyte_cdk.sources.declarative.requesters import HttpRequester
|
40
38
|
from airbyte_cdk.sources.declarative.retrievers import AsyncRetriever, Retriever, SimpleRetriever
|
41
39
|
from airbyte_cdk.sources.declarative.stream_slicers.declarative_partition_generator import (
|
42
40
|
DeclarativePartitionFactory,
|
43
41
|
StreamSlicerPartitionGenerator,
|
44
42
|
)
|
45
|
-
from airbyte_cdk.sources.declarative.transformations.add_fields import AddFields
|
46
43
|
from airbyte_cdk.sources.declarative.types import ConnectionDefinition
|
47
44
|
from airbyte_cdk.sources.source import TState
|
48
45
|
from airbyte_cdk.sources.streams import Stream
|
@@ -321,9 +318,6 @@ class ConcurrentDeclarativeSource(ManifestDeclarativeSource, Generic[TState]):
|
|
321
318
|
incremental_sync_component_definition
|
322
319
|
and incremental_sync_component_definition.get("type", "")
|
323
320
|
== DatetimeBasedCursorModel.__name__
|
324
|
-
and self._stream_supports_concurrent_partition_processing(
|
325
|
-
declarative_stream=declarative_stream
|
326
|
-
)
|
327
321
|
and hasattr(declarative_stream.retriever, "stream_slicer")
|
328
322
|
and isinstance(
|
329
323
|
declarative_stream.retriever.stream_slicer, PerPartitionWithGlobalCursor
|
@@ -390,9 +384,6 @@ class ConcurrentDeclarativeSource(ManifestDeclarativeSource, Generic[TState]):
|
|
390
384
|
and bool(incremental_sync_component_definition)
|
391
385
|
and incremental_sync_component_definition.get("type", "")
|
392
386
|
== DatetimeBasedCursorModel.__name__
|
393
|
-
and self._stream_supports_concurrent_partition_processing(
|
394
|
-
declarative_stream=declarative_stream
|
395
|
-
)
|
396
387
|
and hasattr(declarative_stream.retriever, "stream_slicer")
|
397
388
|
and (
|
398
389
|
isinstance(declarative_stream.retriever.stream_slicer, DatetimeBasedCursor)
|
@@ -400,72 +391,6 @@ class ConcurrentDeclarativeSource(ManifestDeclarativeSource, Generic[TState]):
|
|
400
391
|
)
|
401
392
|
)
|
402
393
|
|
403
|
-
def _stream_supports_concurrent_partition_processing(
|
404
|
-
self, declarative_stream: DeclarativeStream
|
405
|
-
) -> bool:
|
406
|
-
"""
|
407
|
-
Many connectors make use of stream_state during interpolation on a per-partition basis under the assumption that
|
408
|
-
state is updated sequentially. Because the concurrent CDK engine processes different partitions in parallel,
|
409
|
-
stream_state is no longer a thread-safe interpolation context. It would be a race condition because a cursor's
|
410
|
-
stream_state can be updated in any order depending on which stream partition's finish first.
|
411
|
-
|
412
|
-
We should start to move away from depending on the value of stream_state for low-code components that operate
|
413
|
-
per-partition, but we need to gate this otherwise some connectors will be blocked from publishing. See the
|
414
|
-
cdk-migrations.md for the full list of connectors.
|
415
|
-
"""
|
416
|
-
|
417
|
-
if isinstance(declarative_stream.retriever, SimpleRetriever) and isinstance(
|
418
|
-
declarative_stream.retriever.requester, HttpRequester
|
419
|
-
):
|
420
|
-
http_requester = declarative_stream.retriever.requester
|
421
|
-
if "stream_state" in http_requester._path.string:
|
422
|
-
self.logger.warning(
|
423
|
-
f"Low-code stream '{declarative_stream.name}' uses interpolation of stream_state in the HttpRequester which is not thread-safe. Defaulting to synchronous processing"
|
424
|
-
)
|
425
|
-
return False
|
426
|
-
|
427
|
-
request_options_provider = http_requester._request_options_provider
|
428
|
-
if request_options_provider.request_options_contain_stream_state():
|
429
|
-
self.logger.warning(
|
430
|
-
f"Low-code stream '{declarative_stream.name}' uses interpolation of stream_state in the HttpRequester which is not thread-safe. Defaulting to synchronous processing"
|
431
|
-
)
|
432
|
-
return False
|
433
|
-
|
434
|
-
record_selector = declarative_stream.retriever.record_selector
|
435
|
-
if isinstance(record_selector, RecordSelector):
|
436
|
-
if (
|
437
|
-
record_selector.record_filter
|
438
|
-
and not isinstance(
|
439
|
-
record_selector.record_filter, ClientSideIncrementalRecordFilterDecorator
|
440
|
-
)
|
441
|
-
and "stream_state" in record_selector.record_filter.condition
|
442
|
-
):
|
443
|
-
self.logger.warning(
|
444
|
-
f"Low-code stream '{declarative_stream.name}' uses interpolation of stream_state in the RecordFilter which is not thread-safe. Defaulting to synchronous processing"
|
445
|
-
)
|
446
|
-
return False
|
447
|
-
|
448
|
-
for add_fields in [
|
449
|
-
transformation
|
450
|
-
for transformation in record_selector.transformations
|
451
|
-
if isinstance(transformation, AddFields)
|
452
|
-
]:
|
453
|
-
for field in add_fields.fields:
|
454
|
-
if isinstance(field.value, str) and "stream_state" in field.value:
|
455
|
-
self.logger.warning(
|
456
|
-
f"Low-code stream '{declarative_stream.name}' uses interpolation of stream_state in the AddFields which is not thread-safe. Defaulting to synchronous processing"
|
457
|
-
)
|
458
|
-
return False
|
459
|
-
if (
|
460
|
-
isinstance(field.value, InterpolatedString)
|
461
|
-
and "stream_state" in field.value.string
|
462
|
-
):
|
463
|
-
self.logger.warning(
|
464
|
-
f"Low-code stream '{declarative_stream.name}' uses interpolation of stream_state in the AddFields which is not thread-safe. Defaulting to synchronous processing"
|
465
|
-
)
|
466
|
-
return False
|
467
|
-
return True
|
468
|
-
|
469
394
|
@staticmethod
|
470
395
|
def _get_retriever(
|
471
396
|
declarative_stream: DeclarativeStream, stream_state: Mapping[str, Any]
|
@@ -82,7 +82,6 @@ definitions:
|
|
82
82
|
- stream_interval
|
83
83
|
- stream_partition
|
84
84
|
- stream_slice
|
85
|
-
- stream_state
|
86
85
|
examples:
|
87
86
|
- "{{ record['updates'] }}"
|
88
87
|
- "{{ record['MetaData']['LastUpdatedTime'] }}"
|
@@ -1776,7 +1775,6 @@ definitions:
|
|
1776
1775
|
- stream_interval
|
1777
1776
|
- stream_partition
|
1778
1777
|
- stream_slice
|
1779
|
-
- stream_state
|
1780
1778
|
examples:
|
1781
1779
|
- "/products"
|
1782
1780
|
- "/quotes/{{ stream_partition['id'] }}/quote_line_groups"
|
@@ -1826,7 +1824,6 @@ definitions:
|
|
1826
1824
|
- stream_interval
|
1827
1825
|
- stream_partition
|
1828
1826
|
- stream_slice
|
1829
|
-
- stream_state
|
1830
1827
|
examples:
|
1831
1828
|
- |
|
1832
1829
|
[{"clause": {"type": "timestamp", "operator": 10, "parameters":
|
@@ -1844,7 +1841,6 @@ definitions:
|
|
1844
1841
|
- stream_interval
|
1845
1842
|
- stream_partition
|
1846
1843
|
- stream_slice
|
1847
|
-
- stream_state
|
1848
1844
|
examples:
|
1849
1845
|
- sort_order: "ASC"
|
1850
1846
|
sort_field: "CREATED_AT"
|
@@ -1865,7 +1861,6 @@ definitions:
|
|
1865
1861
|
- stream_interval
|
1866
1862
|
- stream_partition
|
1867
1863
|
- stream_slice
|
1868
|
-
- stream_state
|
1869
1864
|
examples:
|
1870
1865
|
- Output-Format: JSON
|
1871
1866
|
- Version: "{{ config['version'] }}"
|
@@ -1882,7 +1877,6 @@ definitions:
|
|
1882
1877
|
- stream_interval
|
1883
1878
|
- stream_partition
|
1884
1879
|
- stream_slice
|
1885
|
-
- stream_state
|
1886
1880
|
examples:
|
1887
1881
|
- unit: "day"
|
1888
1882
|
- query: 'last_event_time BETWEEN TIMESTAMP "{{ stream_interval.start_time }}" AND TIMESTAMP "{{ stream_interval.end_time }}"'
|
@@ -2237,7 +2231,6 @@ definitions:
|
|
2237
2231
|
interpolation_context:
|
2238
2232
|
- config
|
2239
2233
|
- record
|
2240
|
-
- stream_state
|
2241
2234
|
- stream_slice
|
2242
2235
|
new:
|
2243
2236
|
type: string
|
@@ -2251,7 +2244,6 @@ definitions:
|
|
2251
2244
|
interpolation_context:
|
2252
2245
|
- config
|
2253
2246
|
- record
|
2254
|
-
- stream_state
|
2255
2247
|
- stream_slice
|
2256
2248
|
$parameters:
|
2257
2249
|
type: object
|
@@ -2901,7 +2893,6 @@ definitions:
|
|
2901
2893
|
- stream_interval
|
2902
2894
|
- stream_partition
|
2903
2895
|
- stream_slice
|
2904
|
-
- stream_state
|
2905
2896
|
examples:
|
2906
2897
|
- "{{ record['created_at'] >= stream_interval['start_time'] }}"
|
2907
2898
|
- "{{ record.status in ['active', 'expired'] }}"
|
@@ -3689,12 +3680,6 @@ interpolation:
|
|
3689
3680
|
- title: stream_slice
|
3690
3681
|
description: This variable is deprecated. Use stream_interval or stream_partition instead.
|
3691
3682
|
type: object
|
3692
|
-
- title: stream_state
|
3693
|
-
description: The current state of the stream. The object's keys are defined by the incremental sync's cursor_field the and partition router's values.
|
3694
|
-
type: object
|
3695
|
-
examples:
|
3696
|
-
- created_at: "2020-01-01 00:00:00.000+00:00"
|
3697
|
-
- updated_at: "2020-01-02 00:00:00.000+00:00"
|
3698
3683
|
macros:
|
3699
3684
|
- title: now_utc
|
3700
3685
|
description: Returns the current date and time in the UTC timezone.
|
@@ -5,6 +5,7 @@
|
|
5
5
|
import copy
|
6
6
|
import logging
|
7
7
|
import threading
|
8
|
+
import time
|
8
9
|
from collections import OrderedDict
|
9
10
|
from copy import deepcopy
|
10
11
|
from datetime import timedelta
|
@@ -58,7 +59,8 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
58
59
|
CurrentPerPartitionCursor expects the state of the ConcurrentCursor to follow the format {cursor_field: cursor_value}.
|
59
60
|
"""
|
60
61
|
|
61
|
-
DEFAULT_MAX_PARTITIONS_NUMBER =
|
62
|
+
DEFAULT_MAX_PARTITIONS_NUMBER = 25_000
|
63
|
+
SWITCH_TO_GLOBAL_LIMIT = 10_000
|
62
64
|
_NO_STATE: Mapping[str, Any] = {}
|
63
65
|
_NO_CURSOR_STATE: Mapping[str, Any] = {}
|
64
66
|
_GLOBAL_STATE_KEY = "state"
|
@@ -99,9 +101,11 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
99
101
|
self._new_global_cursor: Optional[StreamState] = None
|
100
102
|
self._lookback_window: int = 0
|
101
103
|
self._parent_state: Optional[StreamState] = None
|
102
|
-
self.
|
104
|
+
self._number_of_partitions: int = 0
|
103
105
|
self._use_global_cursor: bool = False
|
104
106
|
self._partition_serializer = PerPartitionKeySerializer()
|
107
|
+
# Track the last time a state message was emitted
|
108
|
+
self._last_emission_time: float = 0.0
|
105
109
|
|
106
110
|
self._set_initial_state(stream_state)
|
107
111
|
|
@@ -141,21 +145,16 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
141
145
|
raise ValueError("stream_slice cannot be None")
|
142
146
|
|
143
147
|
partition_key = self._to_partition_key(stream_slice.partition)
|
144
|
-
self._cursor_per_partition[partition_key].close_partition(partition=partition)
|
145
148
|
with self._lock:
|
146
149
|
self._semaphore_per_partition[partition_key].acquire()
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
and self._semaphore_per_partition[partition_key]._value == 0
|
151
|
-
):
|
150
|
+
if not self._use_global_cursor:
|
151
|
+
self._cursor_per_partition[partition_key].close_partition(partition=partition)
|
152
|
+
cursor = self._cursor_per_partition[partition_key]
|
152
153
|
if (
|
153
|
-
self.
|
154
|
-
|
155
|
-
< cursor.state[self.cursor_field.cursor_field_key]
|
154
|
+
partition_key in self._finished_partitions
|
155
|
+
and self._semaphore_per_partition[partition_key]._value == 0
|
156
156
|
):
|
157
|
-
self.
|
158
|
-
if not self._use_global_cursor:
|
157
|
+
self._update_global_cursor(cursor.state[self.cursor_field.cursor_field_key])
|
159
158
|
self._emit_state_message()
|
160
159
|
|
161
160
|
def ensure_at_least_one_state_emitted(self) -> None:
|
@@ -169,9 +168,23 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
169
168
|
self._global_cursor = self._new_global_cursor
|
170
169
|
self._lookback_window = self._timer.finish()
|
171
170
|
self._parent_state = self._partition_router.get_stream_state()
|
172
|
-
self._emit_state_message()
|
171
|
+
self._emit_state_message(throttle=False)
|
173
172
|
|
174
|
-
def
|
173
|
+
def _throttle_state_message(self) -> Optional[float]:
|
174
|
+
"""
|
175
|
+
Throttles the state message emission to once every 60 seconds.
|
176
|
+
"""
|
177
|
+
current_time = time.time()
|
178
|
+
if current_time - self._last_emission_time <= 60:
|
179
|
+
return None
|
180
|
+
return current_time
|
181
|
+
|
182
|
+
def _emit_state_message(self, throttle: bool = True) -> None:
|
183
|
+
if throttle:
|
184
|
+
current_time = self._throttle_state_message()
|
185
|
+
if current_time is None:
|
186
|
+
return
|
187
|
+
self._last_emission_time = current_time
|
175
188
|
self._connector_state_manager.update_state_for_stream(
|
176
189
|
self._stream_name,
|
177
190
|
self._stream_namespace,
|
@@ -202,6 +215,7 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
202
215
|
self._lookback_window if self._global_cursor else 0,
|
203
216
|
)
|
204
217
|
with self._lock:
|
218
|
+
self._number_of_partitions += 1
|
205
219
|
self._cursor_per_partition[self._to_partition_key(partition.partition)] = cursor
|
206
220
|
self._semaphore_per_partition[self._to_partition_key(partition.partition)] = (
|
207
221
|
threading.Semaphore(0)
|
@@ -232,9 +246,15 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
232
246
|
- Logs a warning each time a partition is removed, indicating whether it was finished
|
233
247
|
or removed due to being the oldest.
|
234
248
|
"""
|
249
|
+
if not self._use_global_cursor and self.limit_reached():
|
250
|
+
logger.info(
|
251
|
+
f"Exceeded the 'SWITCH_TO_GLOBAL_LIMIT' of {self.SWITCH_TO_GLOBAL_LIMIT}. "
|
252
|
+
f"Switching to global cursor for {self._stream_name}."
|
253
|
+
)
|
254
|
+
self._use_global_cursor = True
|
255
|
+
|
235
256
|
with self._lock:
|
236
257
|
while len(self._cursor_per_partition) > self.DEFAULT_MAX_PARTITIONS_NUMBER - 1:
|
237
|
-
self._over_limit += 1
|
238
258
|
# Try removing finished partitions first
|
239
259
|
for partition_key in list(self._cursor_per_partition.keys()):
|
240
260
|
if (
|
@@ -245,7 +265,7 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
245
265
|
partition_key
|
246
266
|
) # Remove the oldest partition
|
247
267
|
logger.warning(
|
248
|
-
f"The maximum number of partitions has been reached. Dropping the oldest finished partition: {oldest_partition}. Over limit: {self.
|
268
|
+
f"The maximum number of partitions has been reached. Dropping the oldest finished partition: {oldest_partition}. Over limit: {self._number_of_partitions - self.DEFAULT_MAX_PARTITIONS_NUMBER}."
|
249
269
|
)
|
250
270
|
break
|
251
271
|
else:
|
@@ -254,7 +274,7 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
254
274
|
1
|
255
275
|
] # Remove the oldest partition
|
256
276
|
logger.warning(
|
257
|
-
f"The maximum number of partitions has been reached. Dropping the oldest partition: {oldest_partition}. Over limit: {self.
|
277
|
+
f"The maximum number of partitions has been reached. Dropping the oldest partition: {oldest_partition}. Over limit: {self._number_of_partitions - self.DEFAULT_MAX_PARTITIONS_NUMBER}."
|
258
278
|
)
|
259
279
|
|
260
280
|
def _set_initial_state(self, stream_state: StreamState) -> None:
|
@@ -314,6 +334,7 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
314
334
|
self._lookback_window = int(stream_state.get("lookback_window", 0))
|
315
335
|
|
316
336
|
for state in stream_state.get(self._PERPARTITION_STATE_KEY, []):
|
337
|
+
self._number_of_partitions += 1
|
317
338
|
self._cursor_per_partition[self._to_partition_key(state["partition"])] = (
|
318
339
|
self._create_cursor(state["cursor"])
|
319
340
|
)
|
@@ -354,16 +375,26 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
354
375
|
self._new_global_cursor = deepcopy(fixed_global_state)
|
355
376
|
|
356
377
|
def observe(self, record: Record) -> None:
|
357
|
-
if not self._use_global_cursor and self.limit_reached():
|
358
|
-
self._use_global_cursor = True
|
359
|
-
|
360
378
|
if not record.associated_slice:
|
361
379
|
raise ValueError(
|
362
380
|
"Invalid state as stream slices that are emitted should refer to an existing cursor"
|
363
381
|
)
|
364
|
-
|
365
|
-
|
366
|
-
|
382
|
+
|
383
|
+
record_cursor = self._connector_state_converter.output_format(
|
384
|
+
self._connector_state_converter.parse_value(self._cursor_field.extract_value(record))
|
385
|
+
)
|
386
|
+
self._update_global_cursor(record_cursor)
|
387
|
+
if not self._use_global_cursor:
|
388
|
+
self._cursor_per_partition[
|
389
|
+
self._to_partition_key(record.associated_slice.partition)
|
390
|
+
].observe(record)
|
391
|
+
|
392
|
+
def _update_global_cursor(self, value: Any) -> None:
|
393
|
+
if (
|
394
|
+
self._new_global_cursor is None
|
395
|
+
or self._new_global_cursor[self.cursor_field.cursor_field_key] < value
|
396
|
+
):
|
397
|
+
self._new_global_cursor = {self.cursor_field.cursor_field_key: copy.deepcopy(value)}
|
367
398
|
|
368
399
|
def _to_partition_key(self, partition: Mapping[str, Any]) -> str:
|
369
400
|
return self._partition_serializer.to_partition_key(partition)
|
@@ -397,4 +428,4 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
397
428
|
return cursor
|
398
429
|
|
399
430
|
def limit_reached(self) -> bool:
|
400
|
-
return self.
|
431
|
+
return self._number_of_partitions > self.SWITCH_TO_GLOBAL_LIMIT
|
@@ -21,6 +21,7 @@ from airbyte_cdk.sources.declarative.requesters.request_option import (
|
|
21
21
|
)
|
22
22
|
from airbyte_cdk.sources.message import MessageRepository
|
23
23
|
from airbyte_cdk.sources.types import Config, Record, StreamSlice, StreamState
|
24
|
+
from airbyte_cdk.utils.mapping_helpers import _validate_component_request_option_paths
|
24
25
|
|
25
26
|
|
26
27
|
@dataclass
|
@@ -122,6 +123,10 @@ class DatetimeBasedCursor(DeclarativeCursor):
|
|
122
123
|
if not self.cursor_datetime_formats:
|
123
124
|
self.cursor_datetime_formats = [self.datetime_format]
|
124
125
|
|
126
|
+
_validate_component_request_option_paths(
|
127
|
+
self.config, self.start_time_option, self.end_time_option
|
128
|
+
)
|
129
|
+
|
125
130
|
def get_stream_state(self) -> StreamState:
|
126
131
|
return {self.cursor_field.eval(self.config): self._cursor} if self._cursor else {} # type: ignore # cursor_field is converted to an InterpolatedString in __post_init__
|
127
132
|
|
@@ -115,7 +115,9 @@ class GlobalSubstreamCursor(DeclarativeCursor):
|
|
115
115
|
* Yield the last slice. At that point, once there are as many slices yielded as closes, the global slice will be closed too
|
116
116
|
"""
|
117
117
|
slice_generator = (
|
118
|
-
StreamSlice(
|
118
|
+
StreamSlice(
|
119
|
+
partition=partition, cursor_slice=cursor_slice, extra_fields=partition.extra_fields
|
120
|
+
)
|
119
121
|
for partition in self._partition_router.stream_slices()
|
120
122
|
for cursor_slice in self._stream_cursor.stream_slices()
|
121
123
|
)
|
@@ -131,7 +133,9 @@ class GlobalSubstreamCursor(DeclarativeCursor):
|
|
131
133
|
|
132
134
|
def generate_slices_from_partition(self, partition: StreamSlice) -> Iterable[StreamSlice]:
|
133
135
|
slice_generator = (
|
134
|
-
StreamSlice(
|
136
|
+
StreamSlice(
|
137
|
+
partition=partition, cursor_slice=cursor_slice, extra_fields=partition.extra_fields
|
138
|
+
)
|
135
139
|
for cursor_slice in self._stream_cursor.stream_slices()
|
136
140
|
)
|
137
141
|
|
@@ -11,10 +11,12 @@ from jinja2.environment import Template
|
|
11
11
|
from jinja2.exceptions import UndefinedError
|
12
12
|
from jinja2.sandbox import SandboxedEnvironment
|
13
13
|
|
14
|
+
from airbyte_cdk.models import FailureType
|
14
15
|
from airbyte_cdk.sources.declarative.interpolation.filters import filters
|
15
16
|
from airbyte_cdk.sources.declarative.interpolation.interpolation import Interpolation
|
16
17
|
from airbyte_cdk.sources.declarative.interpolation.macros import macros
|
17
18
|
from airbyte_cdk.sources.types import Config
|
19
|
+
from airbyte_cdk.utils import AirbyteTracedException
|
18
20
|
|
19
21
|
|
20
22
|
class StreamPartitionAccessEnvironment(SandboxedEnvironment):
|
@@ -36,6 +38,10 @@ _ALIASES = {
|
|
36
38
|
"stream_partition": "stream_slice", # Use stream_partition to access partition router's values
|
37
39
|
}
|
38
40
|
|
41
|
+
_UNSUPPORTED_INTERPOLATION_VARIABLES: Mapping[str, str] = {
|
42
|
+
"stream_state": "`stream_state` is no longer supported for interpolation. We recommend using `stream_interval` instead. Please reference the CDK Migration Guide for more information.",
|
43
|
+
}
|
44
|
+
|
39
45
|
# These extensions are not installed so they're not currently a problem,
|
40
46
|
# but we're still explicitly removing them from the jinja context.
|
41
47
|
# At worst, this is documentation that we do NOT want to include these extensions because of the potential security risks
|
@@ -95,6 +101,13 @@ class JinjaInterpolation(Interpolation):
|
|
95
101
|
elif equivalent in context:
|
96
102
|
context[alias] = context[equivalent]
|
97
103
|
|
104
|
+
for variable_name in _UNSUPPORTED_INTERPOLATION_VARIABLES:
|
105
|
+
if variable_name in input_str:
|
106
|
+
raise AirbyteTracedException(
|
107
|
+
message=_UNSUPPORTED_INTERPOLATION_VARIABLES[variable_name],
|
108
|
+
internal_message=_UNSUPPORTED_INTERPOLATION_VARIABLES[variable_name],
|
109
|
+
failure_type=FailureType.config_error,
|
110
|
+
)
|
98
111
|
try:
|
99
112
|
if isinstance(input_str, str):
|
100
113
|
result = self._eval(input_str, context)
|
@@ -23,6 +23,9 @@ from airbyte_cdk.sources.declarative.requesters.request_option import (
|
|
23
23
|
)
|
24
24
|
from airbyte_cdk.sources.declarative.requesters.request_path import RequestPath
|
25
25
|
from airbyte_cdk.sources.types import Config, Record, StreamSlice, StreamState
|
26
|
+
from airbyte_cdk.utils.mapping_helpers import (
|
27
|
+
_validate_component_request_option_paths,
|
28
|
+
)
|
26
29
|
|
27
30
|
|
28
31
|
@dataclass
|
@@ -113,6 +116,13 @@ class DefaultPaginator(Paginator):
|
|
113
116
|
if isinstance(self.url_base, str):
|
114
117
|
self.url_base = InterpolatedString(string=self.url_base, parameters=parameters)
|
115
118
|
|
119
|
+
if self.page_token_option and not isinstance(self.page_token_option, RequestPath):
|
120
|
+
_validate_component_request_option_paths(
|
121
|
+
self.config,
|
122
|
+
self.page_size_option,
|
123
|
+
self.page_token_option,
|
124
|
+
)
|
125
|
+
|
116
126
|
def get_initial_token(self) -> Optional[Any]:
|
117
127
|
"""
|
118
128
|
Return the page token that should be used for the first request of a stream
|
@@ -10,7 +10,7 @@ from airbyte_cdk.sources.declarative.interpolation.interpolated_nested_mapping i
|
|
10
10
|
NestedMapping,
|
11
11
|
)
|
12
12
|
from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString
|
13
|
-
from airbyte_cdk.sources.types import Config, StreamSlice
|
13
|
+
from airbyte_cdk.sources.types import Config, StreamSlice
|
14
14
|
|
15
15
|
|
16
16
|
@dataclass
|
@@ -42,20 +42,17 @@ class InterpolatedNestedRequestInputProvider:
|
|
42
42
|
|
43
43
|
def eval_request_inputs(
|
44
44
|
self,
|
45
|
-
stream_state: Optional[StreamState] = None,
|
46
45
|
stream_slice: Optional[StreamSlice] = None,
|
47
46
|
next_page_token: Optional[Mapping[str, Any]] = None,
|
48
47
|
) -> Mapping[str, Any]:
|
49
48
|
"""
|
50
49
|
Returns the request inputs to set on an outgoing HTTP request
|
51
50
|
|
52
|
-
:param stream_state: The stream state
|
53
51
|
:param stream_slice: The stream slice
|
54
52
|
:param next_page_token: The pagination token
|
55
53
|
:return: The request inputs to set on an outgoing HTTP request
|
56
54
|
"""
|
57
55
|
kwargs = {
|
58
|
-
"stream_state": stream_state,
|
59
56
|
"stream_slice": stream_slice,
|
60
57
|
"next_page_token": next_page_token,
|
61
58
|
}
|
airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_input_provider.py
CHANGED
@@ -37,7 +37,6 @@ class InterpolatedRequestInputProvider:
|
|
37
37
|
|
38
38
|
def eval_request_inputs(
|
39
39
|
self,
|
40
|
-
stream_state: Optional[StreamState] = None,
|
41
40
|
stream_slice: Optional[StreamSlice] = None,
|
42
41
|
next_page_token: Optional[Mapping[str, Any]] = None,
|
43
42
|
valid_key_types: Optional[Tuple[Type[Any]]] = None,
|
@@ -46,7 +45,6 @@ class InterpolatedRequestInputProvider:
|
|
46
45
|
"""
|
47
46
|
Returns the request inputs to set on an outgoing HTTP request
|
48
47
|
|
49
|
-
:param stream_state: The stream state
|
50
48
|
:param stream_slice: The stream slice
|
51
49
|
:param next_page_token: The pagination token
|
52
50
|
:param valid_key_types: A tuple of types that the interpolator should allow
|
@@ -54,7 +52,6 @@ class InterpolatedRequestInputProvider:
|
|
54
52
|
:return: The request inputs to set on an outgoing HTTP request
|
55
53
|
"""
|
56
54
|
kwargs = {
|
57
|
-
"stream_state": stream_state,
|
58
55
|
"stream_slice": stream_slice,
|
59
56
|
"next_page_token": next_page_token,
|
60
57
|
}
|
airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_options_provider.py
CHANGED
@@ -5,8 +5,6 @@
|
|
5
5
|
from dataclasses import InitVar, dataclass, field
|
6
6
|
from typing import Any, Mapping, MutableMapping, Optional, Union
|
7
7
|
|
8
|
-
from typing_extensions import deprecated
|
9
|
-
|
10
8
|
from airbyte_cdk.sources.declarative.interpolation.interpolated_nested_mapping import NestedMapping
|
11
9
|
from airbyte_cdk.sources.declarative.requesters.request_options.interpolated_nested_request_input_provider import (
|
12
10
|
InterpolatedNestedRequestInputProvider,
|
@@ -17,7 +15,6 @@ from airbyte_cdk.sources.declarative.requesters.request_options.interpolated_req
|
|
17
15
|
from airbyte_cdk.sources.declarative.requesters.request_options.request_options_provider import (
|
18
16
|
RequestOptionsProvider,
|
19
17
|
)
|
20
|
-
from airbyte_cdk.sources.source import ExperimentalClassWarning
|
21
18
|
from airbyte_cdk.sources.types import Config, StreamSlice, StreamState
|
22
19
|
|
23
20
|
RequestInput = Union[str, Mapping[str, str]]
|
@@ -80,7 +77,6 @@ class InterpolatedRequestOptionsProvider(RequestOptionsProvider):
|
|
80
77
|
next_page_token: Optional[Mapping[str, Any]] = None,
|
81
78
|
) -> MutableMapping[str, Any]:
|
82
79
|
interpolated_value = self._parameter_interpolator.eval_request_inputs(
|
83
|
-
stream_state,
|
84
80
|
stream_slice,
|
85
81
|
next_page_token,
|
86
82
|
valid_key_types=(str,),
|
@@ -97,9 +93,7 @@ class InterpolatedRequestOptionsProvider(RequestOptionsProvider):
|
|
97
93
|
stream_slice: Optional[StreamSlice] = None,
|
98
94
|
next_page_token: Optional[Mapping[str, Any]] = None,
|
99
95
|
) -> Mapping[str, Any]:
|
100
|
-
return self._headers_interpolator.eval_request_inputs(
|
101
|
-
stream_state, stream_slice, next_page_token
|
102
|
-
)
|
96
|
+
return self._headers_interpolator.eval_request_inputs(stream_slice, next_page_token)
|
103
97
|
|
104
98
|
def get_request_body_data(
|
105
99
|
self,
|
@@ -109,7 +103,6 @@ class InterpolatedRequestOptionsProvider(RequestOptionsProvider):
|
|
109
103
|
next_page_token: Optional[Mapping[str, Any]] = None,
|
110
104
|
) -> Union[Mapping[str, Any], str]:
|
111
105
|
return self._body_data_interpolator.eval_request_inputs(
|
112
|
-
stream_state,
|
113
106
|
stream_slice,
|
114
107
|
next_page_token,
|
115
108
|
valid_key_types=(str,),
|
@@ -123,42 +116,4 @@ class InterpolatedRequestOptionsProvider(RequestOptionsProvider):
|
|
123
116
|
stream_slice: Optional[StreamSlice] = None,
|
124
117
|
next_page_token: Optional[Mapping[str, Any]] = None,
|
125
118
|
) -> Mapping[str, Any]:
|
126
|
-
return self._body_json_interpolator.eval_request_inputs(
|
127
|
-
stream_state, stream_slice, next_page_token
|
128
|
-
)
|
129
|
-
|
130
|
-
@deprecated(
|
131
|
-
"This class is temporary and used to incrementally deliver low-code to concurrent",
|
132
|
-
category=ExperimentalClassWarning,
|
133
|
-
)
|
134
|
-
def request_options_contain_stream_state(self) -> bool:
|
135
|
-
"""
|
136
|
-
Temporary helper method used as we move low-code streams to the concurrent framework. This method determines if
|
137
|
-
the InterpolatedRequestOptionsProvider has is a dependency on a non-thread safe interpolation context such as
|
138
|
-
stream_state.
|
139
|
-
"""
|
140
|
-
|
141
|
-
return (
|
142
|
-
self._check_if_interpolation_uses_stream_state(self.request_parameters)
|
143
|
-
or self._check_if_interpolation_uses_stream_state(self.request_headers)
|
144
|
-
or self._check_if_interpolation_uses_stream_state(self.request_body_data)
|
145
|
-
or self._check_if_interpolation_uses_stream_state(self.request_body_json)
|
146
|
-
)
|
147
|
-
|
148
|
-
@staticmethod
|
149
|
-
def _check_if_interpolation_uses_stream_state(
|
150
|
-
request_input: Optional[Union[RequestInput, NestedMapping]],
|
151
|
-
) -> bool:
|
152
|
-
if not request_input:
|
153
|
-
return False
|
154
|
-
elif isinstance(request_input, str):
|
155
|
-
return "stream_state" in request_input
|
156
|
-
else:
|
157
|
-
for key, val in request_input.items():
|
158
|
-
# Covers the case of RequestInput in the form of a string or Mapping[str, str]. It also covers the case
|
159
|
-
# of a NestedMapping where the value is a string.
|
160
|
-
# Note: Doesn't account for nested mappings for request_body_json, but I don't see stream_state used in that way
|
161
|
-
# in our code
|
162
|
-
if "stream_state" in key or (isinstance(val, str) and "stream_state" in val):
|
163
|
-
return True
|
164
|
-
return False
|
119
|
+
return self._body_json_interpolator.eval_request_inputs(stream_slice, next_page_token)
|
@@ -133,7 +133,6 @@ class SimpleRetriever(Retriever):
|
|
133
133
|
|
134
134
|
mappings = [
|
135
135
|
paginator_method(
|
136
|
-
stream_state=stream_state,
|
137
136
|
stream_slice=stream_slice,
|
138
137
|
next_page_token=next_page_token,
|
139
138
|
),
|
@@ -141,7 +140,6 @@ class SimpleRetriever(Retriever):
|
|
141
140
|
if not next_page_token or not self.ignore_stream_slicer_parameters_on_paginated_requests:
|
142
141
|
mappings.append(
|
143
142
|
stream_slicer_method(
|
144
|
-
stream_state=stream_state,
|
145
143
|
stream_slice=stream_slice,
|
146
144
|
next_page_token=next_page_token,
|
147
145
|
)
|
@@ -64,9 +64,9 @@ class AddFields(RecordTransformation):
|
|
64
64
|
- path: ["shop_id"]
|
65
65
|
value: "{{ config.shop_id }}"
|
66
66
|
|
67
|
-
# from
|
68
|
-
- path: ["
|
69
|
-
value: "{{
|
67
|
+
# from stream_interval
|
68
|
+
- path: ["date"]
|
69
|
+
value: "{{ stream_interval.start_date }}"
|
70
70
|
|
71
71
|
# from record
|
72
72
|
- path: ["unnested_value"]
|
@@ -128,7 +128,7 @@ class AddFields(RecordTransformation):
|
|
128
128
|
) -> None:
|
129
129
|
if config is None:
|
130
130
|
config = {}
|
131
|
-
kwargs = {"record": record, "
|
131
|
+
kwargs = {"record": record, "stream_slice": stream_slice}
|
132
132
|
for parsed_field in self._parsed_fields:
|
133
133
|
valid_types = (parsed_field.value_type,) if parsed_field.value_type else None
|
134
134
|
value = parsed_field.value.eval(config, valid_types=valid_types, **kwargs)
|
@@ -17,6 +17,7 @@ class SupportedHttpMethods(str, Enum):
|
|
17
17
|
GET = "get"
|
18
18
|
PATCH = "patch"
|
19
19
|
POST = "post"
|
20
|
+
PUT = "put"
|
20
21
|
DELETE = "delete"
|
21
22
|
|
22
23
|
|
@@ -77,7 +78,7 @@ class HttpMocker(contextlib.ContextDecorator):
|
|
77
78
|
additional_matcher=self._matches_wrapper(matcher),
|
78
79
|
response_list=[
|
79
80
|
{
|
80
|
-
|
81
|
+
self._get_body_field(response): response.body,
|
81
82
|
"status_code": response.status_code,
|
82
83
|
"headers": response.headers,
|
83
84
|
}
|
@@ -85,6 +86,10 @@ class HttpMocker(contextlib.ContextDecorator):
|
|
85
86
|
],
|
86
87
|
)
|
87
88
|
|
89
|
+
@staticmethod
|
90
|
+
def _get_body_field(response: HttpResponse) -> str:
|
91
|
+
return "text" if isinstance(response.body, str) else "content"
|
92
|
+
|
88
93
|
def get(self, request: HttpRequest, responses: Union[HttpResponse, List[HttpResponse]]) -> None:
|
89
94
|
self._mock_request_method(SupportedHttpMethods.GET, request, responses)
|
90
95
|
|
@@ -98,6 +103,9 @@ class HttpMocker(contextlib.ContextDecorator):
|
|
98
103
|
) -> None:
|
99
104
|
self._mock_request_method(SupportedHttpMethods.POST, request, responses)
|
100
105
|
|
106
|
+
def put(self, request: HttpRequest, responses: Union[HttpResponse, List[HttpResponse]]) -> None:
|
107
|
+
self._mock_request_method(SupportedHttpMethods.PUT, request, responses)
|
108
|
+
|
101
109
|
def delete(
|
102
110
|
self, request: HttpRequest, responses: Union[HttpResponse, List[HttpResponse]]
|
103
111
|
) -> None:
|
@@ -1,19 +1,22 @@
|
|
1
1
|
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
2
2
|
|
3
3
|
from types import MappingProxyType
|
4
|
-
from typing import Mapping
|
4
|
+
from typing import Mapping, Union
|
5
5
|
|
6
6
|
|
7
7
|
class HttpResponse:
|
8
8
|
def __init__(
|
9
|
-
self,
|
9
|
+
self,
|
10
|
+
body: Union[str, bytes],
|
11
|
+
status_code: int = 200,
|
12
|
+
headers: Mapping[str, str] = MappingProxyType({}),
|
10
13
|
):
|
11
14
|
self._body = body
|
12
15
|
self._status_code = status_code
|
13
16
|
self._headers = headers
|
14
17
|
|
15
18
|
@property
|
16
|
-
def body(self) -> str:
|
19
|
+
def body(self) -> Union[str, bytes]:
|
17
20
|
return self._body
|
18
21
|
|
19
22
|
@property
|
@@ -6,6 +6,12 @@
|
|
6
6
|
import copy
|
7
7
|
from typing import Any, Dict, List, Mapping, Optional, Union
|
8
8
|
|
9
|
+
from airbyte_cdk.sources.declarative.requesters.request_option import (
|
10
|
+
RequestOption,
|
11
|
+
RequestOptionType,
|
12
|
+
)
|
13
|
+
from airbyte_cdk.sources.types import Config
|
14
|
+
|
9
15
|
|
10
16
|
def _merge_mappings(
|
11
17
|
target: Dict[str, Any],
|
@@ -33,13 +39,17 @@ def _merge_mappings(
|
|
33
39
|
if isinstance(target_value, dict) and isinstance(source_value, dict):
|
34
40
|
# Only body_json supports nested_structures
|
35
41
|
if not allow_same_value_merge:
|
36
|
-
raise ValueError(
|
42
|
+
raise ValueError(
|
43
|
+
f"Request body collision, duplicate keys detected at key path: {'.'.join(current_path)}. Please ensure that all keys in the request are unique."
|
44
|
+
)
|
37
45
|
# If both are dictionaries, recursively merge them
|
38
46
|
_merge_mappings(target_value, source_value, current_path, allow_same_value_merge)
|
39
47
|
|
40
48
|
elif not allow_same_value_merge or target_value != source_value:
|
41
49
|
# If same key has different values, that's a conflict
|
42
|
-
raise ValueError(
|
50
|
+
raise ValueError(
|
51
|
+
f"Request body collision, duplicate keys detected at key path: {'.'.join(current_path)}. Please ensure that all keys in the request are unique."
|
52
|
+
)
|
43
53
|
else:
|
44
54
|
# No conflict, just copy the value (using deepcopy for nested structures)
|
45
55
|
target[key] = copy.deepcopy(source_value)
|
@@ -102,3 +112,34 @@ def combine_mappings(
|
|
102
112
|
_merge_mappings(result, mapping, allow_same_value_merge=allow_same_value_merge)
|
103
113
|
|
104
114
|
return result
|
115
|
+
|
116
|
+
|
117
|
+
def _validate_component_request_option_paths(
|
118
|
+
config: Config, *request_options: Optional[RequestOption]
|
119
|
+
) -> None:
|
120
|
+
"""
|
121
|
+
Validates that a component with multiple request options does not have conflicting paths.
|
122
|
+
Uses dummy values for validation since actual values might not be available at init time.
|
123
|
+
"""
|
124
|
+
grouped_options: Dict[RequestOptionType, List[RequestOption]] = {}
|
125
|
+
for option in request_options:
|
126
|
+
if option:
|
127
|
+
grouped_options.setdefault(option.inject_into, []).append(option)
|
128
|
+
|
129
|
+
for inject_type, options in grouped_options.items():
|
130
|
+
if len(options) <= 1:
|
131
|
+
continue
|
132
|
+
|
133
|
+
option_dicts: List[Optional[Union[Mapping[str, Any], str]]] = []
|
134
|
+
for i, option in enumerate(options):
|
135
|
+
option_dict: Dict[str, Any] = {}
|
136
|
+
# Use indexed dummy values to ensure we catch conflicts
|
137
|
+
option.inject_into_request(option_dict, f"dummy_value_{i}", config)
|
138
|
+
option_dicts.append(option_dict)
|
139
|
+
|
140
|
+
try:
|
141
|
+
combine_mappings(
|
142
|
+
option_dicts, allow_same_value_merge=(inject_type == RequestOptionType.body_json)
|
143
|
+
)
|
144
|
+
except ValueError as error:
|
145
|
+
raise ValueError(error)
|
@@ -67,11 +67,11 @@ airbyte_cdk/sources/declarative/checks/check_stream.py,sha256=dAA-UhmMj0WLXCkRQr
|
|
67
67
|
airbyte_cdk/sources/declarative/checks/connection_checker.py,sha256=MBRJo6WJlZQHpIfOGaNOkkHUmgUl_4wDM6VPo41z5Ss,1383
|
68
68
|
airbyte_cdk/sources/declarative/concurrency_level/__init__.py,sha256=5XUqrmlstYlMM0j6crktlKQwALek0uiz2D3WdM46MyA,191
|
69
69
|
airbyte_cdk/sources/declarative/concurrency_level/concurrency_level.py,sha256=YIwCTCpOr_QSNW4ltQK0yUGWInI8PKNY216HOOegYLk,2101
|
70
|
-
airbyte_cdk/sources/declarative/concurrent_declarative_source.py,sha256=
|
70
|
+
airbyte_cdk/sources/declarative/concurrent_declarative_source.py,sha256=MRnIdGeKPk1dO9-4eWRHa7mI6Ay_7szGo9H1RJSZDb8,24453
|
71
71
|
airbyte_cdk/sources/declarative/datetime/__init__.py,sha256=l9LG7Qm6e5r_qgqfVKnx3mXYtg1I9MmMjomVIPfU4XA,177
|
72
72
|
airbyte_cdk/sources/declarative/datetime/datetime_parser.py,sha256=SX9JjdesN1edN2WVUVMzU_ptqp2QB1OnsnjZ4mwcX7w,2579
|
73
73
|
airbyte_cdk/sources/declarative/datetime/min_max_datetime.py,sha256=0BHBtDNQZfvwM45-tY5pNlTcKAFSGGNxemoi0Jic-0E,5785
|
74
|
-
airbyte_cdk/sources/declarative/declarative_component_schema.yaml,sha256=
|
74
|
+
airbyte_cdk/sources/declarative/declarative_component_schema.yaml,sha256=MpHb5FRxxYpApasx6ICtxkrbQa02du9qGAVlpTMbA5s,144011
|
75
75
|
airbyte_cdk/sources/declarative/declarative_source.py,sha256=nF7wBqFd3AQmEKAm4CnIo29CJoQL562cJGSCeL8U8bA,1531
|
76
76
|
airbyte_cdk/sources/declarative/declarative_stream.py,sha256=venZjfpvtqr3oFSuvMBWtn4h9ayLhD4L65ACuXCDZ64,10445
|
77
77
|
airbyte_cdk/sources/declarative/decoders/__init__.py,sha256=JHb_0d3SE6kNY10mxA5YBEKPeSbsWYjByq1gUQxepoE,953
|
@@ -92,10 +92,10 @@ airbyte_cdk/sources/declarative/extractors/record_selector.py,sha256=HCqx7IyENM_
|
|
92
92
|
airbyte_cdk/sources/declarative/extractors/response_to_file_extractor.py,sha256=LhqGDfX06_dDYLKsIVnwQ_nAWCln-v8PV7Wgt_QVeTI,6533
|
93
93
|
airbyte_cdk/sources/declarative/extractors/type_transformer.py,sha256=d6Y2Rfg8pMVEEnHllfVksWZdNVOU55yk34O03dP9muY,1626
|
94
94
|
airbyte_cdk/sources/declarative/incremental/__init__.py,sha256=U1oZKtBaEC6IACmvziY9Wzg7Z8EgF4ZuR7NwvjlB_Sk,1255
|
95
|
-
airbyte_cdk/sources/declarative/incremental/concurrent_partition_cursor.py,sha256=
|
96
|
-
airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py,sha256=
|
95
|
+
airbyte_cdk/sources/declarative/incremental/concurrent_partition_cursor.py,sha256=Pg2phEFT9T8AzUjK6hVhn0rgR3yY6JPF-Dfv0g1m5dQ,19191
|
96
|
+
airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py,sha256=Rbe6lJLTtZ5en33MwZiB9-H9-AwDMNHgwBZs8EqhYqk,22172
|
97
97
|
airbyte_cdk/sources/declarative/incremental/declarative_cursor.py,sha256=5Bhw9VRPyIuCaD0wmmq_L3DZsa-rJgtKSEUzSd8YYD0,536
|
98
|
-
airbyte_cdk/sources/declarative/incremental/global_substream_cursor.py,sha256=
|
98
|
+
airbyte_cdk/sources/declarative/incremental/global_substream_cursor.py,sha256=2tsE6FgXzemf4fZZ4uGtd8QpRBl9GJ2CRqSNJE5p0EI,16077
|
99
99
|
airbyte_cdk/sources/declarative/incremental/per_partition_cursor.py,sha256=9IAJTCiRUXvhFFz-IhZtYh_KfAjLHqthsYf2jErQRls,17728
|
100
100
|
airbyte_cdk/sources/declarative/incremental/per_partition_with_global.py,sha256=2YBOA2NnwAeIKlIhSwUB_W-FaGnPcmrG_liY7b4mV2Y,8365
|
101
101
|
airbyte_cdk/sources/declarative/incremental/resumable_full_refresh_cursor.py,sha256=10LFv1QPM-agVKl6eaANmEBOfd7gZgBrkoTcMggsieQ,4809
|
@@ -106,7 +106,7 @@ airbyte_cdk/sources/declarative/interpolation/interpolated_mapping.py,sha256=UrF
|
|
106
106
|
airbyte_cdk/sources/declarative/interpolation/interpolated_nested_mapping.py,sha256=i2L0gREX8nHA-pKokdVqwBf4aJgWP71KOxIABj_DHcY,1857
|
107
107
|
airbyte_cdk/sources/declarative/interpolation/interpolated_string.py,sha256=LYEZnZ_hB7rvBSZxG9s0RSrzsOkDWbBY0_P6qu5lEfc,3212
|
108
108
|
airbyte_cdk/sources/declarative/interpolation/interpolation.py,sha256=-V5UddGm69UKEB6o_O1EIES9kfY8FV_X4Ji8w1yOuSA,981
|
109
|
-
airbyte_cdk/sources/declarative/interpolation/jinja.py,sha256=
|
109
|
+
airbyte_cdk/sources/declarative/interpolation/jinja.py,sha256=tNJAejgHRp9qtNpUS2Ok6pzQHXLl-AK7u_sU-F44OD4,7182
|
110
110
|
airbyte_cdk/sources/declarative/interpolation/macros.py,sha256=Y5AWYxbJTUtJ_Jm7DV9qrZDiymFR9LST7fBt4piT2-U,4585
|
111
111
|
airbyte_cdk/sources/declarative/manifest_declarative_source.py,sha256=TN6GCgLXaWDONTaJwQ3A5ELqC-sxwKz-UYSraJYB-dI,17078
|
112
112
|
airbyte_cdk/sources/declarative/migrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -143,9 +143,9 @@ airbyte_cdk/sources/declarative/requesters/error_handlers/default_http_response_
|
|
143
143
|
airbyte_cdk/sources/declarative/requesters/error_handlers/error_handler.py,sha256=Tan66odx8VHzfdyyXMQkXz2pJYksllGqvxmpoajgcK4,669
|
144
144
|
airbyte_cdk/sources/declarative/requesters/error_handlers/http_response_filter.py,sha256=E-fQbt4ShfxZVoqfnmOx69C6FUPWZz8BIqI3DN9Kcjs,7935
|
145
145
|
airbyte_cdk/sources/declarative/requesters/http_job_repository.py,sha256=3GtOefPH08evlSUxaILkiKLTHbIspFY4qd5B3ZqNE60,10063
|
146
|
-
airbyte_cdk/sources/declarative/requesters/http_requester.py,sha256=
|
146
|
+
airbyte_cdk/sources/declarative/requesters/http_requester.py,sha256=pR2uR5b9eGyvYIOYwus3mz3OaqRu1ozwja_ys1SE7hc,14952
|
147
147
|
airbyte_cdk/sources/declarative/requesters/paginators/__init__.py,sha256=uArbKs9JKNCt7t9tZoeWwjDpyI1HoPp29FNW0JzvaEM,644
|
148
|
-
airbyte_cdk/sources/declarative/requesters/paginators/default_paginator.py,sha256=
|
148
|
+
airbyte_cdk/sources/declarative/requesters/paginators/default_paginator.py,sha256=ZW4lwWNAzb4zL0jKc-HjowP5-y0Zg9xi0YlK6tkx_XY,12057
|
149
149
|
airbyte_cdk/sources/declarative/requesters/paginators/no_pagination.py,sha256=j6j9QRPaTbKQ2N661RFVKthhkWiodEp6ut0tKeEd0Ng,2019
|
150
150
|
airbyte_cdk/sources/declarative/requesters/paginators/paginator.py,sha256=OlN-y0PEOMzlUNUh3pzonoTpIJpGwkP4ibFengvpLVU,2230
|
151
151
|
airbyte_cdk/sources/declarative/requesters/paginators/strategies/__init__.py,sha256=2gly8fuZpDNwtu1Qg6oE2jBLGqQRdzSLJdnpk_iDV6I,767
|
@@ -158,9 +158,9 @@ airbyte_cdk/sources/declarative/requesters/request_option.py,sha256=Bl0gxGWudmwT
|
|
158
158
|
airbyte_cdk/sources/declarative/requesters/request_options/__init__.py,sha256=WCwpKqM4wKqy-DHJaCHbKAlFqRVOqMi9K5qonxIfi_Y,809
|
159
159
|
airbyte_cdk/sources/declarative/requesters/request_options/datetime_based_request_options_provider.py,sha256=31nG6_0igidJFQon37-WeQkTpG3g2A5ZmlluI3ilZdE,3632
|
160
160
|
airbyte_cdk/sources/declarative/requesters/request_options/default_request_options_provider.py,sha256=SRROdPJZ5kuqHLOlkh115pWP9nDGfDxRYPgH9oD3hPo,1798
|
161
|
-
airbyte_cdk/sources/declarative/requesters/request_options/interpolated_nested_request_input_provider.py,sha256=
|
162
|
-
airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_input_provider.py,sha256=
|
163
|
-
airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_options_provider.py,sha256=
|
161
|
+
airbyte_cdk/sources/declarative/requesters/request_options/interpolated_nested_request_input_provider.py,sha256=86YozYuBDfu0t9NbevIvQoGU0vqTP4rt3dRSTsHz3PA,2269
|
162
|
+
airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_input_provider.py,sha256=rR00kE64U2yL0McU1gPr4_W5_sLUqwDgL3Nvj691nRU,2884
|
163
|
+
airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_options_provider.py,sha256=vOsdHfWHiTFc89WENHPv1hcxLgdzycMXVT_IEtLuhfs,5012
|
164
164
|
airbyte_cdk/sources/declarative/requesters/request_options/request_options_provider.py,sha256=8YRiDzjYvqJ-aMmKFcjqzv_-e8OZ5QG_TbpZ-nuCu6s,2590
|
165
165
|
airbyte_cdk/sources/declarative/requesters/request_path.py,sha256=S3MeFvcaQrMbOkSY2W2VbXLNomqt_3eXqVd9ZhgNwUs,299
|
166
166
|
airbyte_cdk/sources/declarative/requesters/requester.py,sha256=iVVpXQ4KEd9OyZNwmOofMvx7_06i8ZRxGo3aNTrEQLM,4946
|
@@ -171,7 +171,7 @@ airbyte_cdk/sources/declarative/resolvers/http_components_resolver.py,sha256=Aio
|
|
171
171
|
airbyte_cdk/sources/declarative/retrievers/__init__.py,sha256=ix9m1dkR69DcXCXUKC5RK_ZZM7ojTLBQ4IkWQTfmfCk,456
|
172
172
|
airbyte_cdk/sources/declarative/retrievers/async_retriever.py,sha256=2oQn_vo7uJKp4pdMnsF5CG5Iwc9rkPeEOLoAm_9bcus,3222
|
173
173
|
airbyte_cdk/sources/declarative/retrievers/retriever.py,sha256=XPLs593Xv8c5cKMc37XzUAYmzlXd1a7eSsspM-CMuWA,1696
|
174
|
-
airbyte_cdk/sources/declarative/retrievers/simple_retriever.py,sha256=
|
174
|
+
airbyte_cdk/sources/declarative/retrievers/simple_retriever.py,sha256=bOAKQLgMv1Vca-ozMPRVAg1V5nkyUoPwqC02lKpnLiM,24575
|
175
175
|
airbyte_cdk/sources/declarative/schema/__init__.py,sha256=xU45UvM5O4c1PSM13UHpCdh5hpW3HXy9vRRGEiAC1rg,795
|
176
176
|
airbyte_cdk/sources/declarative/schema/default_schema_loader.py,sha256=KTACrIE23a83wsm3Rd9Eb4K6-20lrGqYxTHNp9yxsso,1820
|
177
177
|
airbyte_cdk/sources/declarative/schema/dynamic_schema_loader.py,sha256=J8Q_iJYhcSQLWyt0bTZCbDAGpxt9G8FCc6Q9jtGsNzw,10703
|
@@ -184,7 +184,7 @@ airbyte_cdk/sources/declarative/stream_slicers/__init__.py,sha256=sI9vhc95RwJYOn
|
|
184
184
|
airbyte_cdk/sources/declarative/stream_slicers/declarative_partition_generator.py,sha256=RW1Q44ml-VWeMl4lNcV6EfyzrzCZkjj-hd0Omx_n_n4,3405
|
185
185
|
airbyte_cdk/sources/declarative/stream_slicers/stream_slicer.py,sha256=SOkIPBi2Wu7yxIvA15yFzUAB95a3IzA8LPq5DEqHQQc,725
|
186
186
|
airbyte_cdk/sources/declarative/transformations/__init__.py,sha256=CPJ8TlMpiUmvG3624VYu_NfTzxwKcfBjM2Q2wJ7fkSA,919
|
187
|
-
airbyte_cdk/sources/declarative/transformations/add_fields.py,sha256=
|
187
|
+
airbyte_cdk/sources/declarative/transformations/add_fields.py,sha256=7UHCGc4xOxkYs5iXbPAPrP3-IEY60A-Go8QushsmaqY,4959
|
188
188
|
airbyte_cdk/sources/declarative/transformations/dpath_flatten_fields.py,sha256=1A-DWGjMqY4ggzRUZsZ3Sjrt-xsNgwUo5c72sSc5OZ0,2077
|
189
189
|
airbyte_cdk/sources/declarative/transformations/flatten_fields.py,sha256=yT3owG6rMKaRX-LJ_T-jSTnh1B5NoAHyH4YZN9yOvE8,1758
|
190
190
|
airbyte_cdk/sources/declarative/transformations/keys_replace_transformation.py,sha256=vbIn6ump-Ut6g20yMub7PFoPBhOKVtrHSAUdcOUdLfw,1999
|
@@ -333,9 +333,9 @@ airbyte_cdk/test/catalog_builder.py,sha256=-y05Cz1x0Dlk6oE9LSKhCozssV2gYBNtMdV5Y
|
|
333
333
|
airbyte_cdk/test/entrypoint_wrapper.py,sha256=9XBii_YguQp0d8cykn3hy102FsJcwIBQzSB7co5ho0s,9802
|
334
334
|
airbyte_cdk/test/mock_http/__init__.py,sha256=jE5kC6CQ0OXkTqKhciDnNVZHesBFVIA2YvkdFGwva7k,322
|
335
335
|
airbyte_cdk/test/mock_http/matcher.py,sha256=4Qj8UnJKZIs-eodshryce3SN1Ayc8GZpBETmP6hTEyc,1446
|
336
|
-
airbyte_cdk/test/mock_http/mocker.py,sha256=
|
336
|
+
airbyte_cdk/test/mock_http/mocker.py,sha256=ghX44cLwhs7lqz1gYMizGX8zfPnDvt3YNI2w5jLpzIs,7726
|
337
337
|
airbyte_cdk/test/mock_http/request.py,sha256=tdB8cqk2vLgCDTOKffBKsM06llYs4ZecgtH6DKyx6yY,4112
|
338
|
-
airbyte_cdk/test/mock_http/response.py,sha256=
|
338
|
+
airbyte_cdk/test/mock_http/response.py,sha256=s4-cQQqTtmeej0pQDWqmG0vUWpHS-93lIWMpW3zSVyU,662
|
339
339
|
airbyte_cdk/test/mock_http/response_builder.py,sha256=debPx_lRYBaQVSwCoKLa0F8KFk3h0qG7bWxFBATa0cc,7958
|
340
340
|
airbyte_cdk/test/state_builder.py,sha256=kLPql9lNzUJaBg5YYRLJlY_Hy5JLHJDVyKPMZMoYM44,946
|
341
341
|
airbyte_cdk/test/utils/__init__.py,sha256=Hu-1XT2KDoYjDF7-_ziDwv5bY3PueGjANOCbzeOegDg,57
|
@@ -351,7 +351,7 @@ airbyte_cdk/utils/datetime_format_inferrer.py,sha256=Ne2cpk7Tx3eZDEW2Q3O7jnNOY9g
|
|
351
351
|
airbyte_cdk/utils/datetime_helpers.py,sha256=8mqzZ67Or2PBp7tLtrhh6XFv4wFzYsjCL_DOQJRaftI,17751
|
352
352
|
airbyte_cdk/utils/event_timing.py,sha256=aiuFmPU80buLlNdKq4fDTEqqhEIelHPF6AalFGwY8as,2557
|
353
353
|
airbyte_cdk/utils/is_cloud_environment.py,sha256=DayV32Irh-SdnJ0MnjvstwCJ66_l5oEsd8l85rZtHoc,574
|
354
|
-
airbyte_cdk/utils/mapping_helpers.py,sha256=
|
354
|
+
airbyte_cdk/utils/mapping_helpers.py,sha256=imUTULHmZ1Ks-MRMRLIVqHCX1eJi_j6tFQrYsKIKtM4,5967
|
355
355
|
airbyte_cdk/utils/message_utils.py,sha256=OTzbkwN7AdMDA3iKYq1LKwfPFxpyEDfdgEF9BED3dkU,1366
|
356
356
|
airbyte_cdk/utils/oneof_option_config.py,sha256=N8EmWdYdwt0FM7fuShh6H8nj_r4KEL9tb2DJJtwsPow,1180
|
357
357
|
airbyte_cdk/utils/print_buffer.py,sha256=PhMOi0C4Z91kWKrSvCQXcp8qRh1uCimpIdvrg6voZIA,2810
|
@@ -360,9 +360,9 @@ airbyte_cdk/utils/slice_hasher.py,sha256=EDxgROHDbfG-QKQb59m7h_7crN1tRiawdf5uU7G
|
|
360
360
|
airbyte_cdk/utils/spec_schema_transformations.py,sha256=-5HTuNsnDBAhj-oLeQXwpTGA0HdcjFOf2zTEMUTTg_Y,816
|
361
361
|
airbyte_cdk/utils/stream_status_utils.py,sha256=ZmBoiy5HVbUEHAMrUONxZvxnvfV9CesmQJLDTAIWnWw,1171
|
362
362
|
airbyte_cdk/utils/traced_exception.py,sha256=C8uIBuCL_E4WnBAOPSxBicD06JAldoN9fGsQDp463OY,6292
|
363
|
-
airbyte_cdk-6.
|
364
|
-
airbyte_cdk-6.
|
365
|
-
airbyte_cdk-6.
|
366
|
-
airbyte_cdk-6.
|
367
|
-
airbyte_cdk-6.
|
368
|
-
airbyte_cdk-6.
|
363
|
+
airbyte_cdk-6.34.0.dist-info/LICENSE.txt,sha256=Wfe61S4BaGPj404v8lrAbvhjYR68SHlkzeYrg3_bbuM,1051
|
364
|
+
airbyte_cdk-6.34.0.dist-info/LICENSE_SHORT,sha256=aqF6D1NcESmpn-cqsxBtszTEnHKnlsp8L4x9wAh3Nxg,55
|
365
|
+
airbyte_cdk-6.34.0.dist-info/METADATA,sha256=hfuoGpKE6LnZRPmJ62z4RAc3OM6NWjcppus4NkrugSc,6010
|
366
|
+
airbyte_cdk-6.34.0.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
367
|
+
airbyte_cdk-6.34.0.dist-info/entry_points.txt,sha256=fj-e3PAQvsxsQzyyq8UkG1k8spunWnD4BAH2AwlR6NM,95
|
368
|
+
airbyte_cdk-6.34.0.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|