airbyte-cdk 6.36.0.dev0__py3-none-any.whl → 6.36.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airbyte_cdk/sources/declarative/concurrent_declarative_source.py +15 -75
- airbyte_cdk/sources/declarative/declarative_component_schema.yaml +15 -16
- airbyte_cdk/sources/declarative/decoders/composite_raw_decoder.py +13 -2
- airbyte_cdk/sources/declarative/incremental/concurrent_partition_cursor.py +17 -84
- airbyte_cdk/sources/declarative/incremental/global_substream_cursor.py +6 -2
- airbyte_cdk/sources/declarative/interpolation/__init__.py +1 -1
- airbyte_cdk/sources/declarative/interpolation/filters.py +2 -1
- airbyte_cdk/sources/declarative/interpolation/interpolated_boolean.py +1 -1
- airbyte_cdk/sources/declarative/interpolation/interpolated_mapping.py +1 -1
- airbyte_cdk/sources/declarative/interpolation/interpolated_nested_mapping.py +1 -1
- airbyte_cdk/sources/declarative/interpolation/interpolated_string.py +1 -1
- airbyte_cdk/sources/declarative/interpolation/interpolation.py +2 -1
- airbyte_cdk/sources/declarative/interpolation/jinja.py +14 -1
- airbyte_cdk/sources/declarative/interpolation/macros.py +19 -4
- airbyte_cdk/sources/declarative/models/declarative_component_schema.py +1 -1
- airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py +10 -7
- airbyte_cdk/sources/declarative/requesters/http_requester.py +0 -1
- airbyte_cdk/sources/declarative/requesters/request_options/interpolated_nested_request_input_provider.py +1 -4
- airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_input_provider.py +0 -3
- airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_options_provider.py +2 -47
- airbyte_cdk/sources/declarative/retrievers/simple_retriever.py +0 -2
- airbyte_cdk/sources/declarative/transformations/add_fields.py +4 -4
- {airbyte_cdk-6.36.0.dev0.dist-info → airbyte_cdk-6.36.1.dist-info}/METADATA +1 -1
- {airbyte_cdk-6.36.0.dev0.dist-info → airbyte_cdk-6.36.1.dist-info}/RECORD +28 -28
- {airbyte_cdk-6.36.0.dev0.dist-info → airbyte_cdk-6.36.1.dist-info}/LICENSE.txt +0 -0
- {airbyte_cdk-6.36.0.dev0.dist-info → airbyte_cdk-6.36.1.dist-info}/LICENSE_SHORT +0 -0
- {airbyte_cdk-6.36.0.dev0.dist-info → airbyte_cdk-6.36.1.dist-info}/WHEEL +0 -0
- {airbyte_cdk-6.36.0.dev0.dist-info → airbyte_cdk-6.36.1.dist-info}/entry_points.txt +0 -0
@@ -24,7 +24,6 @@ from airbyte_cdk.sources.declarative.incremental.datetime_based_cursor import Da
|
|
24
24
|
from airbyte_cdk.sources.declarative.incremental.per_partition_with_global import (
|
25
25
|
PerPartitionWithGlobalCursor,
|
26
26
|
)
|
27
|
-
from airbyte_cdk.sources.declarative.interpolation import InterpolatedString
|
28
27
|
from airbyte_cdk.sources.declarative.manifest_declarative_source import ManifestDeclarativeSource
|
29
28
|
from airbyte_cdk.sources.declarative.models.declarative_component_schema import (
|
30
29
|
ConcurrencyLevel as ConcurrencyLevelModel,
|
@@ -36,17 +35,16 @@ from airbyte_cdk.sources.declarative.parsers.model_to_component_factory import (
|
|
36
35
|
ModelToComponentFactory,
|
37
36
|
)
|
38
37
|
from airbyte_cdk.sources.declarative.partition_routers import AsyncJobPartitionRouter
|
39
|
-
from airbyte_cdk.sources.declarative.requesters import HttpRequester
|
40
38
|
from airbyte_cdk.sources.declarative.retrievers import AsyncRetriever, Retriever, SimpleRetriever
|
41
39
|
from airbyte_cdk.sources.declarative.stream_slicers.declarative_partition_generator import (
|
42
40
|
DeclarativePartitionFactory,
|
43
41
|
StreamSlicerPartitionGenerator,
|
44
42
|
)
|
45
|
-
from airbyte_cdk.sources.declarative.transformations.add_fields import AddFields
|
46
43
|
from airbyte_cdk.sources.declarative.types import ConnectionDefinition
|
47
44
|
from airbyte_cdk.sources.source import TState
|
48
45
|
from airbyte_cdk.sources.streams import Stream
|
49
46
|
from airbyte_cdk.sources.streams.concurrent.abstract_stream import AbstractStream
|
47
|
+
from airbyte_cdk.sources.streams.concurrent.abstract_stream_facade import AbstractStreamFacade
|
50
48
|
from airbyte_cdk.sources.streams.concurrent.availability_strategy import (
|
51
49
|
AlwaysAvailableAvailabilityStrategy,
|
52
50
|
)
|
@@ -121,6 +119,12 @@ class ConcurrentDeclarativeSource(ManifestDeclarativeSource, Generic[TState]):
|
|
121
119
|
message_repository=self.message_repository,
|
122
120
|
)
|
123
121
|
|
122
|
+
# TODO: Remove this. This property is necessary to safely migrate Stripe during the transition state.
|
123
|
+
@property
|
124
|
+
def is_partially_declarative(self) -> bool:
|
125
|
+
"""This flag used to avoid unexpected AbstractStreamFacade processing as concurrent streams."""
|
126
|
+
return False
|
127
|
+
|
124
128
|
def read(
|
125
129
|
self,
|
126
130
|
logger: logging.Logger,
|
@@ -321,9 +325,6 @@ class ConcurrentDeclarativeSource(ManifestDeclarativeSource, Generic[TState]):
|
|
321
325
|
incremental_sync_component_definition
|
322
326
|
and incremental_sync_component_definition.get("type", "")
|
323
327
|
== DatetimeBasedCursorModel.__name__
|
324
|
-
and self._stream_supports_concurrent_partition_processing(
|
325
|
-
declarative_stream=declarative_stream
|
326
|
-
)
|
327
328
|
and hasattr(declarative_stream.retriever, "stream_slicer")
|
328
329
|
and isinstance(
|
329
330
|
declarative_stream.retriever.stream_slicer, PerPartitionWithGlobalCursor
|
@@ -375,6 +376,14 @@ class ConcurrentDeclarativeSource(ManifestDeclarativeSource, Generic[TState]):
|
|
375
376
|
)
|
376
377
|
else:
|
377
378
|
synchronous_streams.append(declarative_stream)
|
379
|
+
# TODO: Remove this. This check is necessary to safely migrate Stripe during the transition state.
|
380
|
+
# Condition below needs to ensure that concurrent support is not lost for sources that already support
|
381
|
+
# it before migration, but now are only partially migrated to declarative implementation (e.g., Stripe).
|
382
|
+
elif (
|
383
|
+
isinstance(declarative_stream, AbstractStreamFacade)
|
384
|
+
and self.is_partially_declarative
|
385
|
+
):
|
386
|
+
concurrent_streams.append(declarative_stream.get_underlying_stream())
|
378
387
|
else:
|
379
388
|
synchronous_streams.append(declarative_stream)
|
380
389
|
|
@@ -390,9 +399,6 @@ class ConcurrentDeclarativeSource(ManifestDeclarativeSource, Generic[TState]):
|
|
390
399
|
and bool(incremental_sync_component_definition)
|
391
400
|
and incremental_sync_component_definition.get("type", "")
|
392
401
|
== DatetimeBasedCursorModel.__name__
|
393
|
-
and self._stream_supports_concurrent_partition_processing(
|
394
|
-
declarative_stream=declarative_stream
|
395
|
-
)
|
396
402
|
and hasattr(declarative_stream.retriever, "stream_slicer")
|
397
403
|
and (
|
398
404
|
isinstance(declarative_stream.retriever.stream_slicer, DatetimeBasedCursor)
|
@@ -400,72 +406,6 @@ class ConcurrentDeclarativeSource(ManifestDeclarativeSource, Generic[TState]):
|
|
400
406
|
)
|
401
407
|
)
|
402
408
|
|
403
|
-
def _stream_supports_concurrent_partition_processing(
|
404
|
-
self, declarative_stream: DeclarativeStream
|
405
|
-
) -> bool:
|
406
|
-
"""
|
407
|
-
Many connectors make use of stream_state during interpolation on a per-partition basis under the assumption that
|
408
|
-
state is updated sequentially. Because the concurrent CDK engine processes different partitions in parallel,
|
409
|
-
stream_state is no longer a thread-safe interpolation context. It would be a race condition because a cursor's
|
410
|
-
stream_state can be updated in any order depending on which stream partition's finish first.
|
411
|
-
|
412
|
-
We should start to move away from depending on the value of stream_state for low-code components that operate
|
413
|
-
per-partition, but we need to gate this otherwise some connectors will be blocked from publishing. See the
|
414
|
-
cdk-migrations.md for the full list of connectors.
|
415
|
-
"""
|
416
|
-
|
417
|
-
if isinstance(declarative_stream.retriever, SimpleRetriever) and isinstance(
|
418
|
-
declarative_stream.retriever.requester, HttpRequester
|
419
|
-
):
|
420
|
-
http_requester = declarative_stream.retriever.requester
|
421
|
-
if "stream_state" in http_requester._path.string:
|
422
|
-
self.logger.warning(
|
423
|
-
f"Low-code stream '{declarative_stream.name}' uses interpolation of stream_state in the HttpRequester which is not thread-safe. Defaulting to synchronous processing"
|
424
|
-
)
|
425
|
-
return False
|
426
|
-
|
427
|
-
request_options_provider = http_requester._request_options_provider
|
428
|
-
if request_options_provider.request_options_contain_stream_state():
|
429
|
-
self.logger.warning(
|
430
|
-
f"Low-code stream '{declarative_stream.name}' uses interpolation of stream_state in the HttpRequester which is not thread-safe. Defaulting to synchronous processing"
|
431
|
-
)
|
432
|
-
return False
|
433
|
-
|
434
|
-
record_selector = declarative_stream.retriever.record_selector
|
435
|
-
if isinstance(record_selector, RecordSelector):
|
436
|
-
if (
|
437
|
-
record_selector.record_filter
|
438
|
-
and not isinstance(
|
439
|
-
record_selector.record_filter, ClientSideIncrementalRecordFilterDecorator
|
440
|
-
)
|
441
|
-
and "stream_state" in record_selector.record_filter.condition
|
442
|
-
):
|
443
|
-
self.logger.warning(
|
444
|
-
f"Low-code stream '{declarative_stream.name}' uses interpolation of stream_state in the RecordFilter which is not thread-safe. Defaulting to synchronous processing"
|
445
|
-
)
|
446
|
-
return False
|
447
|
-
|
448
|
-
for add_fields in [
|
449
|
-
transformation
|
450
|
-
for transformation in record_selector.transformations
|
451
|
-
if isinstance(transformation, AddFields)
|
452
|
-
]:
|
453
|
-
for field in add_fields.fields:
|
454
|
-
if isinstance(field.value, str) and "stream_state" in field.value:
|
455
|
-
self.logger.warning(
|
456
|
-
f"Low-code stream '{declarative_stream.name}' uses interpolation of stream_state in the AddFields which is not thread-safe. Defaulting to synchronous processing"
|
457
|
-
)
|
458
|
-
return False
|
459
|
-
if (
|
460
|
-
isinstance(field.value, InterpolatedString)
|
461
|
-
and "stream_state" in field.value.string
|
462
|
-
):
|
463
|
-
self.logger.warning(
|
464
|
-
f"Low-code stream '{declarative_stream.name}' uses interpolation of stream_state in the AddFields which is not thread-safe. Defaulting to synchronous processing"
|
465
|
-
)
|
466
|
-
return False
|
467
|
-
return True
|
468
|
-
|
469
409
|
@staticmethod
|
470
410
|
def _get_retriever(
|
471
411
|
declarative_stream: DeclarativeStream, stream_state: Mapping[str, Any]
|
@@ -82,7 +82,6 @@ definitions:
|
|
82
82
|
- stream_interval
|
83
83
|
- stream_partition
|
84
84
|
- stream_slice
|
85
|
-
- stream_state
|
86
85
|
examples:
|
87
86
|
- "{{ record['updates'] }}"
|
88
87
|
- "{{ record['MetaData']['LastUpdatedTime'] }}"
|
@@ -1491,7 +1490,11 @@ definitions:
|
|
1491
1490
|
limit:
|
1492
1491
|
title: Limit
|
1493
1492
|
description: The maximum number of calls allowed within the interval.
|
1494
|
-
|
1493
|
+
anyOf:
|
1494
|
+
- type: integer
|
1495
|
+
- type: string
|
1496
|
+
interpolation_context:
|
1497
|
+
- config
|
1495
1498
|
interval:
|
1496
1499
|
title: Interval
|
1497
1500
|
description: The time interval for the rate limit.
|
@@ -1776,7 +1779,6 @@ definitions:
|
|
1776
1779
|
- stream_interval
|
1777
1780
|
- stream_partition
|
1778
1781
|
- stream_slice
|
1779
|
-
- stream_state
|
1780
1782
|
examples:
|
1781
1783
|
- "/products"
|
1782
1784
|
- "/quotes/{{ stream_partition['id'] }}/quote_line_groups"
|
@@ -1826,7 +1828,6 @@ definitions:
|
|
1826
1828
|
- stream_interval
|
1827
1829
|
- stream_partition
|
1828
1830
|
- stream_slice
|
1829
|
-
- stream_state
|
1830
1831
|
examples:
|
1831
1832
|
- |
|
1832
1833
|
[{"clause": {"type": "timestamp", "operator": 10, "parameters":
|
@@ -1844,7 +1845,6 @@ definitions:
|
|
1844
1845
|
- stream_interval
|
1845
1846
|
- stream_partition
|
1846
1847
|
- stream_slice
|
1847
|
-
- stream_state
|
1848
1848
|
examples:
|
1849
1849
|
- sort_order: "ASC"
|
1850
1850
|
sort_field: "CREATED_AT"
|
@@ -1865,7 +1865,6 @@ definitions:
|
|
1865
1865
|
- stream_interval
|
1866
1866
|
- stream_partition
|
1867
1867
|
- stream_slice
|
1868
|
-
- stream_state
|
1869
1868
|
examples:
|
1870
1869
|
- Output-Format: JSON
|
1871
1870
|
- Version: "{{ config['version'] }}"
|
@@ -1882,7 +1881,6 @@ definitions:
|
|
1882
1881
|
- stream_interval
|
1883
1882
|
- stream_partition
|
1884
1883
|
- stream_slice
|
1885
|
-
- stream_state
|
1886
1884
|
examples:
|
1887
1885
|
- unit: "day"
|
1888
1886
|
- query: 'last_event_time BETWEEN TIMESTAMP "{{ stream_interval.start_time }}" AND TIMESTAMP "{{ stream_interval.end_time }}"'
|
@@ -2237,7 +2235,6 @@ definitions:
|
|
2237
2235
|
interpolation_context:
|
2238
2236
|
- config
|
2239
2237
|
- record
|
2240
|
-
- stream_state
|
2241
2238
|
- stream_slice
|
2242
2239
|
new:
|
2243
2240
|
type: string
|
@@ -2251,7 +2248,6 @@ definitions:
|
|
2251
2248
|
interpolation_context:
|
2252
2249
|
- config
|
2253
2250
|
- record
|
2254
|
-
- stream_state
|
2255
2251
|
- stream_slice
|
2256
2252
|
$parameters:
|
2257
2253
|
type: object
|
@@ -2901,7 +2897,6 @@ definitions:
|
|
2901
2897
|
- stream_interval
|
2902
2898
|
- stream_partition
|
2903
2899
|
- stream_slice
|
2904
|
-
- stream_state
|
2905
2900
|
examples:
|
2906
2901
|
- "{{ record['created_at'] >= stream_interval['start_time'] }}"
|
2907
2902
|
- "{{ record.status in ['active', 'expired'] }}"
|
@@ -3689,12 +3684,6 @@ interpolation:
|
|
3689
3684
|
- title: stream_slice
|
3690
3685
|
description: This variable is deprecated. Use stream_interval or stream_partition instead.
|
3691
3686
|
type: object
|
3692
|
-
- title: stream_state
|
3693
|
-
description: The current state of the stream. The object's keys are defined by the incremental sync's cursor_field the and partition router's values.
|
3694
|
-
type: object
|
3695
|
-
examples:
|
3696
|
-
- created_at: "2020-01-01 00:00:00.000+00:00"
|
3697
|
-
- updated_at: "2020-01-02 00:00:00.000+00:00"
|
3698
3687
|
macros:
|
3699
3688
|
- title: now_utc
|
3700
3689
|
description: Returns the current date and time in the UTC timezone.
|
@@ -3759,6 +3748,16 @@ interpolation:
|
|
3759
3748
|
- "{{ format_datetime(config['start_time'], '%Y-%m-%d') }}"
|
3760
3749
|
- "{{ format_datetime(config['start_date'], '%Y-%m-%dT%H:%M:%S.%fZ') }}"
|
3761
3750
|
- "{{ format_datetime(config['start_date'], '%Y-%m-%dT%H:%M:%S.%fZ', '%a, %d %b %Y %H:%M:%S %z') }}"
|
3751
|
+
- title: str_to_datetime
|
3752
|
+
description: Converts a string to a datetime object with UTC timezone.
|
3753
|
+
arguments:
|
3754
|
+
s: The string to convert.
|
3755
|
+
return_type: datetime.datetime
|
3756
|
+
examples:
|
3757
|
+
- "{{ str_to_datetime('2022-01-14') }}"
|
3758
|
+
- "{{ str_to_datetime('2022-01-01 13:45:30') }}"
|
3759
|
+
- "{{ str_to_datetime('2022-01-01T13:45:30+00:00') }}"
|
3760
|
+
- "{{ str_to_datetime('2022-01-01T13:45:30.123456Z') }}"
|
3762
3761
|
filters:
|
3763
3762
|
- title: hash
|
3764
3763
|
description: Convert the specified value to a hashed string.
|
@@ -107,6 +107,16 @@ class CsvParser(Parser):
|
|
107
107
|
encoding: Optional[str] = "utf-8"
|
108
108
|
delimiter: Optional[str] = ","
|
109
109
|
|
110
|
+
def _get_delimiter(self) -> Optional[str]:
|
111
|
+
"""
|
112
|
+
Get delimiter from the configuration. Check for the escape character and decode it.
|
113
|
+
"""
|
114
|
+
if self.delimiter is not None:
|
115
|
+
if self.delimiter.startswith("\\"):
|
116
|
+
self.delimiter = self.delimiter.encode("utf-8").decode("unicode_escape")
|
117
|
+
|
118
|
+
return self.delimiter
|
119
|
+
|
110
120
|
def parse(
|
111
121
|
self,
|
112
122
|
data: BufferedIOBase,
|
@@ -115,8 +125,9 @@ class CsvParser(Parser):
|
|
115
125
|
Parse CSV data from decompressed bytes.
|
116
126
|
"""
|
117
127
|
text_data = TextIOWrapper(data, encoding=self.encoding) # type: ignore
|
118
|
-
reader = csv.DictReader(text_data, delimiter=self.
|
119
|
-
|
128
|
+
reader = csv.DictReader(text_data, delimiter=self._get_delimiter() or ",")
|
129
|
+
for row in reader:
|
130
|
+
yield row
|
120
131
|
|
121
132
|
|
122
133
|
@dataclass
|
@@ -95,10 +95,6 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
95
95
|
# the oldest partitions can be efficiently removed, maintaining the most recent partitions.
|
96
96
|
self._cursor_per_partition: OrderedDict[str, ConcurrentCursor] = OrderedDict()
|
97
97
|
self._semaphore_per_partition: OrderedDict[str, threading.Semaphore] = OrderedDict()
|
98
|
-
|
99
|
-
# Parent-state tracking: store each partition’s parent state in creation order
|
100
|
-
self._partition_parent_state_map: OrderedDict[str, Mapping[str, Any]] = OrderedDict()
|
101
|
-
|
102
98
|
self._finished_partitions: set[str] = set()
|
103
99
|
self._lock = threading.Lock()
|
104
100
|
self._timer = Timer()
|
@@ -159,62 +155,11 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
159
155
|
and self._semaphore_per_partition[partition_key]._value == 0
|
160
156
|
):
|
161
157
|
self._update_global_cursor(cursor.state[self.cursor_field.cursor_field_key])
|
162
|
-
|
163
|
-
self._check_and_update_parent_state()
|
164
|
-
|
165
|
-
self._emit_state_message()
|
166
|
-
|
167
|
-
def _check_and_update_parent_state(self) -> None:
|
168
|
-
"""
|
169
|
-
Pop the leftmost partition state from _partition_parent_state_map only if
|
170
|
-
*all partitions* up to (and including) that partition key in _semaphore_per_partition
|
171
|
-
are fully finished (i.e. in _finished_partitions and semaphore._value == 0).
|
172
|
-
Additionally, delete finished semaphores with a value of 0 to free up memory,
|
173
|
-
as they are only needed to track errors and completion status.
|
174
|
-
"""
|
175
|
-
last_closed_state = None
|
176
|
-
|
177
|
-
while self._partition_parent_state_map:
|
178
|
-
# Look at the earliest partition key in creation order
|
179
|
-
earliest_key = next(iter(self._partition_parent_state_map))
|
180
|
-
|
181
|
-
# Verify ALL partitions from the left up to earliest_key are finished
|
182
|
-
all_left_finished = True
|
183
|
-
for p_key, sem in list(
|
184
|
-
self._semaphore_per_partition.items()
|
185
|
-
): # Use list to allow modification during iteration
|
186
|
-
# If any earlier partition is still not finished, we must stop
|
187
|
-
if p_key not in self._finished_partitions or sem._value != 0:
|
188
|
-
all_left_finished = False
|
189
|
-
break
|
190
|
-
# Once we've reached earliest_key in the semaphore order, we can stop checking
|
191
|
-
if p_key == earliest_key:
|
192
|
-
break
|
193
|
-
|
194
|
-
# If the partitions up to earliest_key are not all finished, break the while-loop
|
195
|
-
if not all_left_finished:
|
196
|
-
break
|
197
|
-
|
198
|
-
# Pop the leftmost entry from parent-state map
|
199
|
-
_, closed_parent_state = self._partition_parent_state_map.popitem(last=False)
|
200
|
-
last_closed_state = closed_parent_state
|
201
|
-
|
202
|
-
# Clean up finished semaphores with value 0 up to and including earliest_key
|
203
|
-
for p_key in list(self._semaphore_per_partition.keys()):
|
204
|
-
sem = self._semaphore_per_partition[p_key]
|
205
|
-
if p_key in self._finished_partitions and sem._value == 0:
|
206
|
-
del self._semaphore_per_partition[p_key]
|
207
|
-
logger.debug(f"Deleted finished semaphore for partition {p_key} with value 0")
|
208
|
-
if p_key == earliest_key:
|
209
|
-
break
|
210
|
-
|
211
|
-
# Update _parent_state if we popped at least one partition
|
212
|
-
if last_closed_state is not None:
|
213
|
-
self._parent_state = last_closed_state
|
158
|
+
self._emit_state_message()
|
214
159
|
|
215
160
|
def ensure_at_least_one_state_emitted(self) -> None:
|
216
161
|
"""
|
217
|
-
The platform
|
162
|
+
The platform expect to have at least one state message on successful syncs. Hence, whatever happens, we expect this method to be
|
218
163
|
called.
|
219
164
|
"""
|
220
165
|
if not any(
|
@@ -251,25 +196,18 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
251
196
|
self._message_repository.emit_message(state_message)
|
252
197
|
|
253
198
|
def stream_slices(self) -> Iterable[StreamSlice]:
|
254
|
-
print("stream_slices")
|
255
199
|
if self._timer.is_running():
|
256
200
|
raise RuntimeError("stream_slices has been executed more than once.")
|
257
201
|
|
258
202
|
slices = self._partition_router.stream_slices()
|
259
203
|
self._timer.start()
|
260
|
-
for partition
|
261
|
-
|
262
|
-
):
|
263
|
-
yield from self._generate_slices_from_partition(partition, parent_state)
|
204
|
+
for partition in slices:
|
205
|
+
yield from self._generate_slices_from_partition(partition)
|
264
206
|
|
265
|
-
def _generate_slices_from_partition(
|
266
|
-
self, partition: StreamSlice, parent_state: Mapping[str, Any]
|
267
|
-
) -> Iterable[StreamSlice]:
|
207
|
+
def _generate_slices_from_partition(self, partition: StreamSlice) -> Iterable[StreamSlice]:
|
268
208
|
# Ensure the maximum number of partitions is not exceeded
|
269
209
|
self._ensure_partition_limit()
|
270
210
|
|
271
|
-
partition_key = self._to_partition_key(partition.partition)
|
272
|
-
|
273
211
|
cursor = self._cursor_per_partition.get(self._to_partition_key(partition.partition))
|
274
212
|
if not cursor:
|
275
213
|
cursor = self._create_cursor(
|
@@ -278,26 +216,18 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
278
216
|
)
|
279
217
|
with self._lock:
|
280
218
|
self._number_of_partitions += 1
|
281
|
-
self._cursor_per_partition[
|
282
|
-
|
283
|
-
|
284
|
-
|
285
|
-
if (
|
286
|
-
len(self._partition_parent_state_map) == 0
|
287
|
-
or self._partition_parent_state_map[
|
288
|
-
next(reversed(self._partition_parent_state_map))
|
289
|
-
]
|
290
|
-
!= parent_state
|
291
|
-
):
|
292
|
-
self._partition_parent_state_map[partition_key] = deepcopy(parent_state)
|
219
|
+
self._cursor_per_partition[self._to_partition_key(partition.partition)] = cursor
|
220
|
+
self._semaphore_per_partition[self._to_partition_key(partition.partition)] = (
|
221
|
+
threading.Semaphore(0)
|
222
|
+
)
|
293
223
|
|
294
224
|
for cursor_slice, is_last_slice, _ in iterate_with_last_flag_and_state(
|
295
225
|
cursor.stream_slices(),
|
296
226
|
lambda: None,
|
297
227
|
):
|
298
|
-
self._semaphore_per_partition[
|
228
|
+
self._semaphore_per_partition[self._to_partition_key(partition.partition)].release()
|
299
229
|
if is_last_slice:
|
300
|
-
self._finished_partitions.add(
|
230
|
+
self._finished_partitions.add(self._to_partition_key(partition.partition))
|
301
231
|
yield StreamSlice(
|
302
232
|
partition=partition, cursor_slice=cursor_slice, extra_fields=partition.extra_fields
|
303
233
|
)
|
@@ -327,9 +257,9 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
327
257
|
while len(self._cursor_per_partition) > self.DEFAULT_MAX_PARTITIONS_NUMBER - 1:
|
328
258
|
# Try removing finished partitions first
|
329
259
|
for partition_key in list(self._cursor_per_partition.keys()):
|
330
|
-
if
|
331
|
-
partition_key
|
332
|
-
|
260
|
+
if (
|
261
|
+
partition_key in self._finished_partitions
|
262
|
+
and self._semaphore_per_partition[partition_key]._value == 0
|
333
263
|
):
|
334
264
|
oldest_partition = self._cursor_per_partition.pop(
|
335
265
|
partition_key
|
@@ -408,6 +338,9 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
408
338
|
self._cursor_per_partition[self._to_partition_key(state["partition"])] = (
|
409
339
|
self._create_cursor(state["cursor"])
|
410
340
|
)
|
341
|
+
self._semaphore_per_partition[self._to_partition_key(state["partition"])] = (
|
342
|
+
threading.Semaphore(0)
|
343
|
+
)
|
411
344
|
|
412
345
|
# set default state for missing partitions if it is per partition with fallback to global
|
413
346
|
if self._GLOBAL_STATE_KEY in stream_state:
|
@@ -115,7 +115,9 @@ class GlobalSubstreamCursor(DeclarativeCursor):
|
|
115
115
|
* Yield the last slice. At that point, once there are as many slices yielded as closes, the global slice will be closed too
|
116
116
|
"""
|
117
117
|
slice_generator = (
|
118
|
-
StreamSlice(
|
118
|
+
StreamSlice(
|
119
|
+
partition=partition, cursor_slice=cursor_slice, extra_fields=partition.extra_fields
|
120
|
+
)
|
119
121
|
for partition in self._partition_router.stream_slices()
|
120
122
|
for cursor_slice in self._stream_cursor.stream_slices()
|
121
123
|
)
|
@@ -131,7 +133,9 @@ class GlobalSubstreamCursor(DeclarativeCursor):
|
|
131
133
|
|
132
134
|
def generate_slices_from_partition(self, partition: StreamSlice) -> Iterable[StreamSlice]:
|
133
135
|
slice_generator = (
|
134
|
-
StreamSlice(
|
136
|
+
StreamSlice(
|
137
|
+
partition=partition, cursor_slice=cursor_slice, extra_fields=partition.extra_fields
|
138
|
+
)
|
135
139
|
for cursor_slice in self._stream_cursor.stream_slices()
|
136
140
|
)
|
137
141
|
|
@@ -1,5 +1,5 @@
|
|
1
1
|
#
|
2
|
-
# Copyright (c)
|
2
|
+
# Copyright (c) 2025 Airbyte, Inc., all rights reserved.
|
3
3
|
#
|
4
4
|
|
5
5
|
import ast
|
@@ -11,10 +11,12 @@ from jinja2.environment import Template
|
|
11
11
|
from jinja2.exceptions import UndefinedError
|
12
12
|
from jinja2.sandbox import SandboxedEnvironment
|
13
13
|
|
14
|
+
from airbyte_cdk.models import FailureType
|
14
15
|
from airbyte_cdk.sources.declarative.interpolation.filters import filters
|
15
16
|
from airbyte_cdk.sources.declarative.interpolation.interpolation import Interpolation
|
16
17
|
from airbyte_cdk.sources.declarative.interpolation.macros import macros
|
17
18
|
from airbyte_cdk.sources.types import Config
|
19
|
+
from airbyte_cdk.utils import AirbyteTracedException
|
18
20
|
|
19
21
|
|
20
22
|
class StreamPartitionAccessEnvironment(SandboxedEnvironment):
|
@@ -36,6 +38,10 @@ _ALIASES = {
|
|
36
38
|
"stream_partition": "stream_slice", # Use stream_partition to access partition router's values
|
37
39
|
}
|
38
40
|
|
41
|
+
_UNSUPPORTED_INTERPOLATION_VARIABLES: Mapping[str, str] = {
|
42
|
+
"stream_state": "`stream_state` is no longer supported for interpolation. We recommend using `stream_interval` instead. Please reference the CDK Migration Guide for more information.",
|
43
|
+
}
|
44
|
+
|
39
45
|
# These extensions are not installed so they're not currently a problem,
|
40
46
|
# but we're still explicitly removing them from the jinja context.
|
41
47
|
# At worst, this is documentation that we do NOT want to include these extensions because of the potential security risks
|
@@ -95,6 +101,13 @@ class JinjaInterpolation(Interpolation):
|
|
95
101
|
elif equivalent in context:
|
96
102
|
context[alias] = context[equivalent]
|
97
103
|
|
104
|
+
for variable_name in _UNSUPPORTED_INTERPOLATION_VARIABLES:
|
105
|
+
if variable_name in input_str:
|
106
|
+
raise AirbyteTracedException(
|
107
|
+
message=_UNSUPPORTED_INTERPOLATION_VARIABLES[variable_name],
|
108
|
+
internal_message=_UNSUPPORTED_INTERPOLATION_VARIABLES[variable_name],
|
109
|
+
failure_type=FailureType.config_error,
|
110
|
+
)
|
98
111
|
try:
|
99
112
|
if isinstance(input_str, str):
|
100
113
|
result = self._eval(input_str, context)
|
@@ -1,5 +1,5 @@
|
|
1
1
|
#
|
2
|
-
# Copyright (c)
|
2
|
+
# Copyright (c) 2025 Airbyte, Inc., all rights reserved.
|
3
3
|
#
|
4
4
|
|
5
5
|
import builtins
|
@@ -63,10 +63,24 @@ def timestamp(dt: Union[float, str]) -> Union[int, float]:
|
|
63
63
|
if isinstance(dt, (int, float)):
|
64
64
|
return int(dt)
|
65
65
|
else:
|
66
|
-
return
|
66
|
+
return str_to_datetime(dt).astimezone(pytz.utc).timestamp()
|
67
67
|
|
68
68
|
|
69
|
-
def
|
69
|
+
def str_to_datetime(s: str) -> datetime.datetime:
|
70
|
+
"""
|
71
|
+
Converts a string to a datetime object with UTC timezone
|
72
|
+
|
73
|
+
If the input string does not contain timezone information, UTC is assumed.
|
74
|
+
Supports both basic date strings like "2022-01-14" and datetime strings with optional timezone
|
75
|
+
like "2022-01-01T13:45:30+00:00".
|
76
|
+
|
77
|
+
Usage:
|
78
|
+
`"{{ str_to_datetime('2022-01-14') }}"`
|
79
|
+
|
80
|
+
:param s: string to parse as datetime
|
81
|
+
:return: datetime object in UTC timezone
|
82
|
+
"""
|
83
|
+
|
70
84
|
parsed_date = parser.isoparse(s)
|
71
85
|
if not parsed_date.tzinfo:
|
72
86
|
# Assume UTC if the input does not contain a timezone
|
@@ -155,7 +169,7 @@ def format_datetime(
|
|
155
169
|
if isinstance(dt, datetime.datetime):
|
156
170
|
return dt.strftime(format)
|
157
171
|
dt_datetime = (
|
158
|
-
datetime.datetime.strptime(dt, input_format) if input_format else
|
172
|
+
datetime.datetime.strptime(dt, input_format) if input_format else str_to_datetime(dt)
|
159
173
|
)
|
160
174
|
if format == "%s":
|
161
175
|
return str(int(dt_datetime.timestamp()))
|
@@ -172,5 +186,6 @@ _macros_list = [
|
|
172
186
|
duration,
|
173
187
|
format_datetime,
|
174
188
|
today_with_timezone,
|
189
|
+
str_to_datetime,
|
175
190
|
]
|
176
191
|
macros = {f.__name__: f for f in _macros_list}
|
@@ -2091,10 +2091,10 @@ class ModelToComponentFactory:
|
|
2091
2091
|
def create_json_decoder(model: JsonDecoderModel, config: Config, **kwargs: Any) -> Decoder:
|
2092
2092
|
return JsonDecoder(parameters={})
|
2093
2093
|
|
2094
|
-
|
2095
|
-
def create_csv_decoder(model: CsvDecoderModel, config: Config, **kwargs: Any) -> Decoder:
|
2094
|
+
def create_csv_decoder(self, model: CsvDecoderModel, config: Config, **kwargs: Any) -> Decoder:
|
2096
2095
|
return CompositeRawDecoder(
|
2097
|
-
parser=ModelToComponentFactory._get_parser(model, config),
|
2096
|
+
parser=ModelToComponentFactory._get_parser(model, config),
|
2097
|
+
stream_response=False if self._emit_connector_builder_messages else True,
|
2098
2098
|
)
|
2099
2099
|
|
2100
2100
|
@staticmethod
|
@@ -2103,10 +2103,12 @@ class ModelToComponentFactory:
|
|
2103
2103
|
parser=ModelToComponentFactory._get_parser(model, config), stream_response=True
|
2104
2104
|
)
|
2105
2105
|
|
2106
|
-
|
2107
|
-
|
2106
|
+
def create_gzip_decoder(
|
2107
|
+
self, model: GzipDecoderModel, config: Config, **kwargs: Any
|
2108
|
+
) -> Decoder:
|
2108
2109
|
return CompositeRawDecoder(
|
2109
|
-
parser=ModelToComponentFactory._get_parser(model, config),
|
2110
|
+
parser=ModelToComponentFactory._get_parser(model, config),
|
2111
|
+
stream_response=False if self._emit_connector_builder_messages else True,
|
2110
2112
|
)
|
2111
2113
|
|
2112
2114
|
@staticmethod
|
@@ -3024,8 +3026,9 @@ class ModelToComponentFactory:
|
|
3024
3026
|
)
|
3025
3027
|
|
3026
3028
|
def create_rate(self, model: RateModel, config: Config, **kwargs: Any) -> Rate:
|
3029
|
+
interpolated_limit = InterpolatedString.create(str(model.limit), parameters={})
|
3027
3030
|
return Rate(
|
3028
|
-
limit=
|
3031
|
+
limit=int(interpolated_limit.eval(config=config)),
|
3029
3032
|
interval=parse_duration(model.interval),
|
3030
3033
|
)
|
3031
3034
|
|
@@ -10,7 +10,7 @@ from airbyte_cdk.sources.declarative.interpolation.interpolated_nested_mapping i
|
|
10
10
|
NestedMapping,
|
11
11
|
)
|
12
12
|
from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString
|
13
|
-
from airbyte_cdk.sources.types import Config, StreamSlice
|
13
|
+
from airbyte_cdk.sources.types import Config, StreamSlice
|
14
14
|
|
15
15
|
|
16
16
|
@dataclass
|
@@ -42,20 +42,17 @@ class InterpolatedNestedRequestInputProvider:
|
|
42
42
|
|
43
43
|
def eval_request_inputs(
|
44
44
|
self,
|
45
|
-
stream_state: Optional[StreamState] = None,
|
46
45
|
stream_slice: Optional[StreamSlice] = None,
|
47
46
|
next_page_token: Optional[Mapping[str, Any]] = None,
|
48
47
|
) -> Mapping[str, Any]:
|
49
48
|
"""
|
50
49
|
Returns the request inputs to set on an outgoing HTTP request
|
51
50
|
|
52
|
-
:param stream_state: The stream state
|
53
51
|
:param stream_slice: The stream slice
|
54
52
|
:param next_page_token: The pagination token
|
55
53
|
:return: The request inputs to set on an outgoing HTTP request
|
56
54
|
"""
|
57
55
|
kwargs = {
|
58
|
-
"stream_state": stream_state,
|
59
56
|
"stream_slice": stream_slice,
|
60
57
|
"next_page_token": next_page_token,
|
61
58
|
}
|
airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_input_provider.py
CHANGED
@@ -37,7 +37,6 @@ class InterpolatedRequestInputProvider:
|
|
37
37
|
|
38
38
|
def eval_request_inputs(
|
39
39
|
self,
|
40
|
-
stream_state: Optional[StreamState] = None,
|
41
40
|
stream_slice: Optional[StreamSlice] = None,
|
42
41
|
next_page_token: Optional[Mapping[str, Any]] = None,
|
43
42
|
valid_key_types: Optional[Tuple[Type[Any]]] = None,
|
@@ -46,7 +45,6 @@ class InterpolatedRequestInputProvider:
|
|
46
45
|
"""
|
47
46
|
Returns the request inputs to set on an outgoing HTTP request
|
48
47
|
|
49
|
-
:param stream_state: The stream state
|
50
48
|
:param stream_slice: The stream slice
|
51
49
|
:param next_page_token: The pagination token
|
52
50
|
:param valid_key_types: A tuple of types that the interpolator should allow
|
@@ -54,7 +52,6 @@ class InterpolatedRequestInputProvider:
|
|
54
52
|
:return: The request inputs to set on an outgoing HTTP request
|
55
53
|
"""
|
56
54
|
kwargs = {
|
57
|
-
"stream_state": stream_state,
|
58
55
|
"stream_slice": stream_slice,
|
59
56
|
"next_page_token": next_page_token,
|
60
57
|
}
|
airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_options_provider.py
CHANGED
@@ -5,8 +5,6 @@
|
|
5
5
|
from dataclasses import InitVar, dataclass, field
|
6
6
|
from typing import Any, Mapping, MutableMapping, Optional, Union
|
7
7
|
|
8
|
-
from typing_extensions import deprecated
|
9
|
-
|
10
8
|
from airbyte_cdk.sources.declarative.interpolation.interpolated_nested_mapping import NestedMapping
|
11
9
|
from airbyte_cdk.sources.declarative.requesters.request_options.interpolated_nested_request_input_provider import (
|
12
10
|
InterpolatedNestedRequestInputProvider,
|
@@ -17,7 +15,6 @@ from airbyte_cdk.sources.declarative.requesters.request_options.interpolated_req
|
|
17
15
|
from airbyte_cdk.sources.declarative.requesters.request_options.request_options_provider import (
|
18
16
|
RequestOptionsProvider,
|
19
17
|
)
|
20
|
-
from airbyte_cdk.sources.source import ExperimentalClassWarning
|
21
18
|
from airbyte_cdk.sources.types import Config, StreamSlice, StreamState
|
22
19
|
|
23
20
|
RequestInput = Union[str, Mapping[str, str]]
|
@@ -80,7 +77,6 @@ class InterpolatedRequestOptionsProvider(RequestOptionsProvider):
|
|
80
77
|
next_page_token: Optional[Mapping[str, Any]] = None,
|
81
78
|
) -> MutableMapping[str, Any]:
|
82
79
|
interpolated_value = self._parameter_interpolator.eval_request_inputs(
|
83
|
-
stream_state,
|
84
80
|
stream_slice,
|
85
81
|
next_page_token,
|
86
82
|
valid_key_types=(str,),
|
@@ -97,9 +93,7 @@ class InterpolatedRequestOptionsProvider(RequestOptionsProvider):
|
|
97
93
|
stream_slice: Optional[StreamSlice] = None,
|
98
94
|
next_page_token: Optional[Mapping[str, Any]] = None,
|
99
95
|
) -> Mapping[str, Any]:
|
100
|
-
return self._headers_interpolator.eval_request_inputs(
|
101
|
-
stream_state, stream_slice, next_page_token
|
102
|
-
)
|
96
|
+
return self._headers_interpolator.eval_request_inputs(stream_slice, next_page_token)
|
103
97
|
|
104
98
|
def get_request_body_data(
|
105
99
|
self,
|
@@ -109,7 +103,6 @@ class InterpolatedRequestOptionsProvider(RequestOptionsProvider):
|
|
109
103
|
next_page_token: Optional[Mapping[str, Any]] = None,
|
110
104
|
) -> Union[Mapping[str, Any], str]:
|
111
105
|
return self._body_data_interpolator.eval_request_inputs(
|
112
|
-
stream_state,
|
113
106
|
stream_slice,
|
114
107
|
next_page_token,
|
115
108
|
valid_key_types=(str,),
|
@@ -123,42 +116,4 @@ class InterpolatedRequestOptionsProvider(RequestOptionsProvider):
|
|
123
116
|
stream_slice: Optional[StreamSlice] = None,
|
124
117
|
next_page_token: Optional[Mapping[str, Any]] = None,
|
125
118
|
) -> Mapping[str, Any]:
|
126
|
-
return self._body_json_interpolator.eval_request_inputs(
|
127
|
-
stream_state, stream_slice, next_page_token
|
128
|
-
)
|
129
|
-
|
130
|
-
@deprecated(
|
131
|
-
"This class is temporary and used to incrementally deliver low-code to concurrent",
|
132
|
-
category=ExperimentalClassWarning,
|
133
|
-
)
|
134
|
-
def request_options_contain_stream_state(self) -> bool:
|
135
|
-
"""
|
136
|
-
Temporary helper method used as we move low-code streams to the concurrent framework. This method determines if
|
137
|
-
the InterpolatedRequestOptionsProvider has is a dependency on a non-thread safe interpolation context such as
|
138
|
-
stream_state.
|
139
|
-
"""
|
140
|
-
|
141
|
-
return (
|
142
|
-
self._check_if_interpolation_uses_stream_state(self.request_parameters)
|
143
|
-
or self._check_if_interpolation_uses_stream_state(self.request_headers)
|
144
|
-
or self._check_if_interpolation_uses_stream_state(self.request_body_data)
|
145
|
-
or self._check_if_interpolation_uses_stream_state(self.request_body_json)
|
146
|
-
)
|
147
|
-
|
148
|
-
@staticmethod
|
149
|
-
def _check_if_interpolation_uses_stream_state(
|
150
|
-
request_input: Optional[Union[RequestInput, NestedMapping]],
|
151
|
-
) -> bool:
|
152
|
-
if not request_input:
|
153
|
-
return False
|
154
|
-
elif isinstance(request_input, str):
|
155
|
-
return "stream_state" in request_input
|
156
|
-
else:
|
157
|
-
for key, val in request_input.items():
|
158
|
-
# Covers the case of RequestInput in the form of a string or Mapping[str, str]. It also covers the case
|
159
|
-
# of a NestedMapping where the value is a string.
|
160
|
-
# Note: Doesn't account for nested mappings for request_body_json, but I don't see stream_state used in that way
|
161
|
-
# in our code
|
162
|
-
if "stream_state" in key or (isinstance(val, str) and "stream_state" in val):
|
163
|
-
return True
|
164
|
-
return False
|
119
|
+
return self._body_json_interpolator.eval_request_inputs(stream_slice, next_page_token)
|
@@ -133,7 +133,6 @@ class SimpleRetriever(Retriever):
|
|
133
133
|
|
134
134
|
mappings = [
|
135
135
|
paginator_method(
|
136
|
-
stream_state=stream_state,
|
137
136
|
stream_slice=stream_slice,
|
138
137
|
next_page_token=next_page_token,
|
139
138
|
),
|
@@ -141,7 +140,6 @@ class SimpleRetriever(Retriever):
|
|
141
140
|
if not next_page_token or not self.ignore_stream_slicer_parameters_on_paginated_requests:
|
142
141
|
mappings.append(
|
143
142
|
stream_slicer_method(
|
144
|
-
stream_state=stream_state,
|
145
143
|
stream_slice=stream_slice,
|
146
144
|
next_page_token=next_page_token,
|
147
145
|
)
|
@@ -64,9 +64,9 @@ class AddFields(RecordTransformation):
|
|
64
64
|
- path: ["shop_id"]
|
65
65
|
value: "{{ config.shop_id }}"
|
66
66
|
|
67
|
-
# from
|
68
|
-
- path: ["
|
69
|
-
value: "{{
|
67
|
+
# from stream_interval
|
68
|
+
- path: ["date"]
|
69
|
+
value: "{{ stream_interval.start_date }}"
|
70
70
|
|
71
71
|
# from record
|
72
72
|
- path: ["unnested_value"]
|
@@ -128,7 +128,7 @@ class AddFields(RecordTransformation):
|
|
128
128
|
) -> None:
|
129
129
|
if config is None:
|
130
130
|
config = {}
|
131
|
-
kwargs = {"record": record, "
|
131
|
+
kwargs = {"record": record, "stream_slice": stream_slice}
|
132
132
|
for parsed_field in self._parsed_fields:
|
133
133
|
valid_types = (parsed_field.value_type,) if parsed_field.value_type else None
|
134
134
|
value = parsed_field.value.eval(config, valid_types=valid_types, **kwargs)
|
@@ -67,15 +67,15 @@ airbyte_cdk/sources/declarative/checks/check_stream.py,sha256=dAA-UhmMj0WLXCkRQr
|
|
67
67
|
airbyte_cdk/sources/declarative/checks/connection_checker.py,sha256=MBRJo6WJlZQHpIfOGaNOkkHUmgUl_4wDM6VPo41z5Ss,1383
|
68
68
|
airbyte_cdk/sources/declarative/concurrency_level/__init__.py,sha256=5XUqrmlstYlMM0j6crktlKQwALek0uiz2D3WdM46MyA,191
|
69
69
|
airbyte_cdk/sources/declarative/concurrency_level/concurrency_level.py,sha256=YIwCTCpOr_QSNW4ltQK0yUGWInI8PKNY216HOOegYLk,2101
|
70
|
-
airbyte_cdk/sources/declarative/concurrent_declarative_source.py,sha256=
|
70
|
+
airbyte_cdk/sources/declarative/concurrent_declarative_source.py,sha256=KBF9wdPC5KauFwg9dv4pFHLz01ZMwbMvN5ZCcZgiBEE,25424
|
71
71
|
airbyte_cdk/sources/declarative/datetime/__init__.py,sha256=l9LG7Qm6e5r_qgqfVKnx3mXYtg1I9MmMjomVIPfU4XA,177
|
72
72
|
airbyte_cdk/sources/declarative/datetime/datetime_parser.py,sha256=SX9JjdesN1edN2WVUVMzU_ptqp2QB1OnsnjZ4mwcX7w,2579
|
73
73
|
airbyte_cdk/sources/declarative/datetime/min_max_datetime.py,sha256=0BHBtDNQZfvwM45-tY5pNlTcKAFSGGNxemoi0Jic-0E,5785
|
74
|
-
airbyte_cdk/sources/declarative/declarative_component_schema.yaml,sha256=
|
74
|
+
airbyte_cdk/sources/declarative/declarative_component_schema.yaml,sha256=5o5GsltzbVL2jyXvjWzUoV_r5xpwG_YdLSVUuG_d_34,144548
|
75
75
|
airbyte_cdk/sources/declarative/declarative_source.py,sha256=nF7wBqFd3AQmEKAm4CnIo29CJoQL562cJGSCeL8U8bA,1531
|
76
76
|
airbyte_cdk/sources/declarative/declarative_stream.py,sha256=venZjfpvtqr3oFSuvMBWtn4h9ayLhD4L65ACuXCDZ64,10445
|
77
77
|
airbyte_cdk/sources/declarative/decoders/__init__.py,sha256=JHb_0d3SE6kNY10mxA5YBEKPeSbsWYjByq1gUQxepoE,953
|
78
|
-
airbyte_cdk/sources/declarative/decoders/composite_raw_decoder.py,sha256=
|
78
|
+
airbyte_cdk/sources/declarative/decoders/composite_raw_decoder.py,sha256=DJbWaaJ5LHCBpyWz-4bEw8rqtJYqabEYZtxnfRtWFE0,4946
|
79
79
|
airbyte_cdk/sources/declarative/decoders/decoder.py,sha256=sl-Gt8lXi7yD2Q-sD8je5QS2PbgrgsYjxRLWsay7DMc,826
|
80
80
|
airbyte_cdk/sources/declarative/decoders/json_decoder.py,sha256=BdWpXXPhEGf_zknggJmhojLosmxuw51RBVTS0jvdCPc,2080
|
81
81
|
airbyte_cdk/sources/declarative/decoders/noop_decoder.py,sha256=iZh0yKY_JzgBnJWiubEusf5c0o6Khd-8EWFWT-8EgFo,542
|
@@ -92,34 +92,34 @@ airbyte_cdk/sources/declarative/extractors/record_selector.py,sha256=HCqx7IyENM_
|
|
92
92
|
airbyte_cdk/sources/declarative/extractors/response_to_file_extractor.py,sha256=LhqGDfX06_dDYLKsIVnwQ_nAWCln-v8PV7Wgt_QVeTI,6533
|
93
93
|
airbyte_cdk/sources/declarative/extractors/type_transformer.py,sha256=d6Y2Rfg8pMVEEnHllfVksWZdNVOU55yk34O03dP9muY,1626
|
94
94
|
airbyte_cdk/sources/declarative/incremental/__init__.py,sha256=U1oZKtBaEC6IACmvziY9Wzg7Z8EgF4ZuR7NwvjlB_Sk,1255
|
95
|
-
airbyte_cdk/sources/declarative/incremental/concurrent_partition_cursor.py,sha256=
|
95
|
+
airbyte_cdk/sources/declarative/incremental/concurrent_partition_cursor.py,sha256=Pg2phEFT9T8AzUjK6hVhn0rgR3yY6JPF-Dfv0g1m5dQ,19191
|
96
96
|
airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py,sha256=Rbe6lJLTtZ5en33MwZiB9-H9-AwDMNHgwBZs8EqhYqk,22172
|
97
97
|
airbyte_cdk/sources/declarative/incremental/declarative_cursor.py,sha256=5Bhw9VRPyIuCaD0wmmq_L3DZsa-rJgtKSEUzSd8YYD0,536
|
98
|
-
airbyte_cdk/sources/declarative/incremental/global_substream_cursor.py,sha256=
|
98
|
+
airbyte_cdk/sources/declarative/incremental/global_substream_cursor.py,sha256=2tsE6FgXzemf4fZZ4uGtd8QpRBl9GJ2CRqSNJE5p0EI,16077
|
99
99
|
airbyte_cdk/sources/declarative/incremental/per_partition_cursor.py,sha256=9IAJTCiRUXvhFFz-IhZtYh_KfAjLHqthsYf2jErQRls,17728
|
100
100
|
airbyte_cdk/sources/declarative/incremental/per_partition_with_global.py,sha256=2YBOA2NnwAeIKlIhSwUB_W-FaGnPcmrG_liY7b4mV2Y,8365
|
101
101
|
airbyte_cdk/sources/declarative/incremental/resumable_full_refresh_cursor.py,sha256=10LFv1QPM-agVKl6eaANmEBOfd7gZgBrkoTcMggsieQ,4809
|
102
|
-
airbyte_cdk/sources/declarative/interpolation/__init__.py,sha256=
|
103
|
-
airbyte_cdk/sources/declarative/interpolation/filters.py,sha256=
|
104
|
-
airbyte_cdk/sources/declarative/interpolation/interpolated_boolean.py,sha256=
|
105
|
-
airbyte_cdk/sources/declarative/interpolation/interpolated_mapping.py,sha256=
|
106
|
-
airbyte_cdk/sources/declarative/interpolation/interpolated_nested_mapping.py,sha256=
|
107
|
-
airbyte_cdk/sources/declarative/interpolation/interpolated_string.py,sha256=
|
108
|
-
airbyte_cdk/sources/declarative/interpolation/interpolation.py,sha256
|
109
|
-
airbyte_cdk/sources/declarative/interpolation/jinja.py,sha256=
|
110
|
-
airbyte_cdk/sources/declarative/interpolation/macros.py,sha256=
|
102
|
+
airbyte_cdk/sources/declarative/interpolation/__init__.py,sha256=Kh7FxhfetyNVDnAQ9zSxNe4oUbb8CvoW7Mqz7cs2iPg,437
|
103
|
+
airbyte_cdk/sources/declarative/interpolation/filters.py,sha256=JXdjSmi6eTUTA-qBoR9wSmXlEYvVCOZRKq2GhkDg09M,3640
|
104
|
+
airbyte_cdk/sources/declarative/interpolation/interpolated_boolean.py,sha256=8F3ntT_Mfo8cO9n6dCq8rTfJIpfKmzRCsVtVdhzaoGc,1964
|
105
|
+
airbyte_cdk/sources/declarative/interpolation/interpolated_mapping.py,sha256=h36RIng4GZ9v4o_fRmgJjTNOtWmhK7NOILU1oSKPE4Q,2083
|
106
|
+
airbyte_cdk/sources/declarative/interpolation/interpolated_nested_mapping.py,sha256=vjwvkLk7_l6YDcFClwjCMcTleRjQBh7-dzny7PUaoG8,1857
|
107
|
+
airbyte_cdk/sources/declarative/interpolation/interpolated_string.py,sha256=CQkHqGlfa87G6VYMtBAQWin7ECKpfMdrDcg0JO5_rhc,3212
|
108
|
+
airbyte_cdk/sources/declarative/interpolation/interpolation.py,sha256=9IoeuWam3L6GyN10L6U8xNWXmkt9cnahSDNkez1OmFY,982
|
109
|
+
airbyte_cdk/sources/declarative/interpolation/jinja.py,sha256=UQeuS4Vpyp4hlOn-R3tRyeBX0e9IoV6jQ6gH-Jz8lY0,7182
|
110
|
+
airbyte_cdk/sources/declarative/interpolation/macros.py,sha256=ajgVJT9sZBUFZUDLjzyPWupCNXt1HvzbCq-4yv9aY3c,5042
|
111
111
|
airbyte_cdk/sources/declarative/manifest_declarative_source.py,sha256=TN6GCgLXaWDONTaJwQ3A5ELqC-sxwKz-UYSraJYB-dI,17078
|
112
112
|
airbyte_cdk/sources/declarative/migrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
113
113
|
airbyte_cdk/sources/declarative/migrations/legacy_to_per_partition_state_migration.py,sha256=iemy3fKLczcU0-Aor7tx5jcT6DRedKMqyK7kCOp01hg,3924
|
114
114
|
airbyte_cdk/sources/declarative/migrations/state_migration.py,sha256=KWPjealMLKSMtajXgkdGgKg7EmTLR-CqqD7UIh0-eDU,794
|
115
115
|
airbyte_cdk/sources/declarative/models/__init__.py,sha256=nUFxNCiKeYRVXuZEKA7GD-lTHxsiKcQ8FitZjKhPIvE,100
|
116
|
-
airbyte_cdk/sources/declarative/models/declarative_component_schema.py,sha256=
|
116
|
+
airbyte_cdk/sources/declarative/models/declarative_component_schema.py,sha256=gNL9DqajD2A8UBnKAz7F7YQuYH7frQyHiPQPIMGq2xo,101958
|
117
117
|
airbyte_cdk/sources/declarative/parsers/__init__.py,sha256=ZnqYNxHsKCgO38IwB34RQyRMXTs4GTvlRi3ImKnIioo,61
|
118
118
|
airbyte_cdk/sources/declarative/parsers/custom_code_compiler.py,sha256=958MMX6_ZOJUlDDdNr9Krosgi2bCKGx2Z765M2Woz18,5505
|
119
119
|
airbyte_cdk/sources/declarative/parsers/custom_exceptions.py,sha256=Rir9_z3Kcd5Es0-LChrzk-0qubAsiK_RSEnLmK2OXm8,553
|
120
120
|
airbyte_cdk/sources/declarative/parsers/manifest_component_transformer.py,sha256=CXwTfD3wSQq3okcqwigpprbHhSURUokh4GK2OmOyKC8,9132
|
121
121
|
airbyte_cdk/sources/declarative/parsers/manifest_reference_resolver.py,sha256=IWUOdF03o-aQn0Occo1BJCxU0Pz-QILk5L67nzw2thw,6803
|
122
|
-
airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py,sha256=
|
122
|
+
airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py,sha256=IVQdcy1-kXz-T23zsMdoPBK2xuDE7gXHRtNTE5PBQ80,133990
|
123
123
|
airbyte_cdk/sources/declarative/partition_routers/__init__.py,sha256=HJ-Syp3p7RpyR_OK0X_a2kSyISfu3W-PKrRI16iY0a8,957
|
124
124
|
airbyte_cdk/sources/declarative/partition_routers/async_job_partition_router.py,sha256=VelO7zKqKtzMJ35jyFeg0ypJLQC0plqqIBNXoBW1G2E,3001
|
125
125
|
airbyte_cdk/sources/declarative/partition_routers/cartesian_product_stream_slicer.py,sha256=c5cuVFM6NFkuQqG8Z5IwkBuwDrvXZN1CunUOM_L0ezg,6892
|
@@ -143,7 +143,7 @@ airbyte_cdk/sources/declarative/requesters/error_handlers/default_http_response_
|
|
143
143
|
airbyte_cdk/sources/declarative/requesters/error_handlers/error_handler.py,sha256=Tan66odx8VHzfdyyXMQkXz2pJYksllGqvxmpoajgcK4,669
|
144
144
|
airbyte_cdk/sources/declarative/requesters/error_handlers/http_response_filter.py,sha256=E-fQbt4ShfxZVoqfnmOx69C6FUPWZz8BIqI3DN9Kcjs,7935
|
145
145
|
airbyte_cdk/sources/declarative/requesters/http_job_repository.py,sha256=3GtOefPH08evlSUxaILkiKLTHbIspFY4qd5B3ZqNE60,10063
|
146
|
-
airbyte_cdk/sources/declarative/requesters/http_requester.py,sha256=
|
146
|
+
airbyte_cdk/sources/declarative/requesters/http_requester.py,sha256=pR2uR5b9eGyvYIOYwus3mz3OaqRu1ozwja_ys1SE7hc,14952
|
147
147
|
airbyte_cdk/sources/declarative/requesters/paginators/__init__.py,sha256=uArbKs9JKNCt7t9tZoeWwjDpyI1HoPp29FNW0JzvaEM,644
|
148
148
|
airbyte_cdk/sources/declarative/requesters/paginators/default_paginator.py,sha256=ZW4lwWNAzb4zL0jKc-HjowP5-y0Zg9xi0YlK6tkx_XY,12057
|
149
149
|
airbyte_cdk/sources/declarative/requesters/paginators/no_pagination.py,sha256=j6j9QRPaTbKQ2N661RFVKthhkWiodEp6ut0tKeEd0Ng,2019
|
@@ -158,9 +158,9 @@ airbyte_cdk/sources/declarative/requesters/request_option.py,sha256=Bl0gxGWudmwT
|
|
158
158
|
airbyte_cdk/sources/declarative/requesters/request_options/__init__.py,sha256=WCwpKqM4wKqy-DHJaCHbKAlFqRVOqMi9K5qonxIfi_Y,809
|
159
159
|
airbyte_cdk/sources/declarative/requesters/request_options/datetime_based_request_options_provider.py,sha256=31nG6_0igidJFQon37-WeQkTpG3g2A5ZmlluI3ilZdE,3632
|
160
160
|
airbyte_cdk/sources/declarative/requesters/request_options/default_request_options_provider.py,sha256=SRROdPJZ5kuqHLOlkh115pWP9nDGfDxRYPgH9oD3hPo,1798
|
161
|
-
airbyte_cdk/sources/declarative/requesters/request_options/interpolated_nested_request_input_provider.py,sha256=
|
162
|
-
airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_input_provider.py,sha256=
|
163
|
-
airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_options_provider.py,sha256=
|
161
|
+
airbyte_cdk/sources/declarative/requesters/request_options/interpolated_nested_request_input_provider.py,sha256=86YozYuBDfu0t9NbevIvQoGU0vqTP4rt3dRSTsHz3PA,2269
|
162
|
+
airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_input_provider.py,sha256=rR00kE64U2yL0McU1gPr4_W5_sLUqwDgL3Nvj691nRU,2884
|
163
|
+
airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_options_provider.py,sha256=vOsdHfWHiTFc89WENHPv1hcxLgdzycMXVT_IEtLuhfs,5012
|
164
164
|
airbyte_cdk/sources/declarative/requesters/request_options/request_options_provider.py,sha256=8YRiDzjYvqJ-aMmKFcjqzv_-e8OZ5QG_TbpZ-nuCu6s,2590
|
165
165
|
airbyte_cdk/sources/declarative/requesters/request_path.py,sha256=S3MeFvcaQrMbOkSY2W2VbXLNomqt_3eXqVd9ZhgNwUs,299
|
166
166
|
airbyte_cdk/sources/declarative/requesters/requester.py,sha256=iVVpXQ4KEd9OyZNwmOofMvx7_06i8ZRxGo3aNTrEQLM,4946
|
@@ -171,7 +171,7 @@ airbyte_cdk/sources/declarative/resolvers/http_components_resolver.py,sha256=Aio
|
|
171
171
|
airbyte_cdk/sources/declarative/retrievers/__init__.py,sha256=ix9m1dkR69DcXCXUKC5RK_ZZM7ojTLBQ4IkWQTfmfCk,456
|
172
172
|
airbyte_cdk/sources/declarative/retrievers/async_retriever.py,sha256=2oQn_vo7uJKp4pdMnsF5CG5Iwc9rkPeEOLoAm_9bcus,3222
|
173
173
|
airbyte_cdk/sources/declarative/retrievers/retriever.py,sha256=XPLs593Xv8c5cKMc37XzUAYmzlXd1a7eSsspM-CMuWA,1696
|
174
|
-
airbyte_cdk/sources/declarative/retrievers/simple_retriever.py,sha256=
|
174
|
+
airbyte_cdk/sources/declarative/retrievers/simple_retriever.py,sha256=bOAKQLgMv1Vca-ozMPRVAg1V5nkyUoPwqC02lKpnLiM,24575
|
175
175
|
airbyte_cdk/sources/declarative/schema/__init__.py,sha256=xU45UvM5O4c1PSM13UHpCdh5hpW3HXy9vRRGEiAC1rg,795
|
176
176
|
airbyte_cdk/sources/declarative/schema/default_schema_loader.py,sha256=KTACrIE23a83wsm3Rd9Eb4K6-20lrGqYxTHNp9yxsso,1820
|
177
177
|
airbyte_cdk/sources/declarative/schema/dynamic_schema_loader.py,sha256=J8Q_iJYhcSQLWyt0bTZCbDAGpxt9G8FCc6Q9jtGsNzw,10703
|
@@ -184,7 +184,7 @@ airbyte_cdk/sources/declarative/stream_slicers/__init__.py,sha256=sI9vhc95RwJYOn
|
|
184
184
|
airbyte_cdk/sources/declarative/stream_slicers/declarative_partition_generator.py,sha256=RW1Q44ml-VWeMl4lNcV6EfyzrzCZkjj-hd0Omx_n_n4,3405
|
185
185
|
airbyte_cdk/sources/declarative/stream_slicers/stream_slicer.py,sha256=SOkIPBi2Wu7yxIvA15yFzUAB95a3IzA8LPq5DEqHQQc,725
|
186
186
|
airbyte_cdk/sources/declarative/transformations/__init__.py,sha256=CPJ8TlMpiUmvG3624VYu_NfTzxwKcfBjM2Q2wJ7fkSA,919
|
187
|
-
airbyte_cdk/sources/declarative/transformations/add_fields.py,sha256=
|
187
|
+
airbyte_cdk/sources/declarative/transformations/add_fields.py,sha256=7UHCGc4xOxkYs5iXbPAPrP3-IEY60A-Go8QushsmaqY,4959
|
188
188
|
airbyte_cdk/sources/declarative/transformations/dpath_flatten_fields.py,sha256=1A-DWGjMqY4ggzRUZsZ3Sjrt-xsNgwUo5c72sSc5OZ0,2077
|
189
189
|
airbyte_cdk/sources/declarative/transformations/flatten_fields.py,sha256=yT3owG6rMKaRX-LJ_T-jSTnh1B5NoAHyH4YZN9yOvE8,1758
|
190
190
|
airbyte_cdk/sources/declarative/transformations/keys_replace_transformation.py,sha256=vbIn6ump-Ut6g20yMub7PFoPBhOKVtrHSAUdcOUdLfw,1999
|
@@ -360,9 +360,9 @@ airbyte_cdk/utils/slice_hasher.py,sha256=EDxgROHDbfG-QKQb59m7h_7crN1tRiawdf5uU7G
|
|
360
360
|
airbyte_cdk/utils/spec_schema_transformations.py,sha256=-5HTuNsnDBAhj-oLeQXwpTGA0HdcjFOf2zTEMUTTg_Y,816
|
361
361
|
airbyte_cdk/utils/stream_status_utils.py,sha256=ZmBoiy5HVbUEHAMrUONxZvxnvfV9CesmQJLDTAIWnWw,1171
|
362
362
|
airbyte_cdk/utils/traced_exception.py,sha256=C8uIBuCL_E4WnBAOPSxBicD06JAldoN9fGsQDp463OY,6292
|
363
|
-
airbyte_cdk-6.36.
|
364
|
-
airbyte_cdk-6.36.
|
365
|
-
airbyte_cdk-6.36.
|
366
|
-
airbyte_cdk-6.36.
|
367
|
-
airbyte_cdk-6.36.
|
368
|
-
airbyte_cdk-6.36.
|
363
|
+
airbyte_cdk-6.36.1.dist-info/LICENSE.txt,sha256=Wfe61S4BaGPj404v8lrAbvhjYR68SHlkzeYrg3_bbuM,1051
|
364
|
+
airbyte_cdk-6.36.1.dist-info/LICENSE_SHORT,sha256=aqF6D1NcESmpn-cqsxBtszTEnHKnlsp8L4x9wAh3Nxg,55
|
365
|
+
airbyte_cdk-6.36.1.dist-info/METADATA,sha256=M6sRWwrp7ag_VCNrcA-JX0lQwll_VT2BPXLkpCHe5dA,6010
|
366
|
+
airbyte_cdk-6.36.1.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
367
|
+
airbyte_cdk-6.36.1.dist-info/entry_points.txt,sha256=fj-e3PAQvsxsQzyyq8UkG1k8spunWnD4BAH2AwlR6NM,95
|
368
|
+
airbyte_cdk-6.36.1.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|