airbyte-cdk 6.34.0.dev2__py3-none-any.whl → 6.34.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. airbyte_cdk/sources/declarative/concurrent_declarative_source.py +0 -75
  2. airbyte_cdk/sources/declarative/declarative_component_schema.yaml +10 -15
  3. airbyte_cdk/sources/declarative/incremental/concurrent_partition_cursor.py +13 -66
  4. airbyte_cdk/sources/declarative/incremental/global_substream_cursor.py +6 -2
  5. airbyte_cdk/sources/declarative/interpolation/__init__.py +1 -1
  6. airbyte_cdk/sources/declarative/interpolation/filters.py +2 -1
  7. airbyte_cdk/sources/declarative/interpolation/interpolated_boolean.py +1 -1
  8. airbyte_cdk/sources/declarative/interpolation/interpolated_mapping.py +1 -1
  9. airbyte_cdk/sources/declarative/interpolation/interpolated_nested_mapping.py +1 -1
  10. airbyte_cdk/sources/declarative/interpolation/interpolated_string.py +1 -1
  11. airbyte_cdk/sources/declarative/interpolation/interpolation.py +2 -1
  12. airbyte_cdk/sources/declarative/interpolation/jinja.py +14 -1
  13. airbyte_cdk/sources/declarative/interpolation/macros.py +19 -4
  14. airbyte_cdk/sources/declarative/requesters/http_requester.py +0 -1
  15. airbyte_cdk/sources/declarative/requesters/request_options/interpolated_nested_request_input_provider.py +1 -4
  16. airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_input_provider.py +0 -3
  17. airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_options_provider.py +2 -47
  18. airbyte_cdk/sources/declarative/retrievers/simple_retriever.py +0 -2
  19. airbyte_cdk/sources/declarative/transformations/add_fields.py +4 -4
  20. {airbyte_cdk-6.34.0.dev2.dist-info → airbyte_cdk-6.34.1.dist-info}/METADATA +1 -1
  21. {airbyte_cdk-6.34.0.dev2.dist-info → airbyte_cdk-6.34.1.dist-info}/RECORD +25 -25
  22. {airbyte_cdk-6.34.0.dev2.dist-info → airbyte_cdk-6.34.1.dist-info}/LICENSE.txt +0 -0
  23. {airbyte_cdk-6.34.0.dev2.dist-info → airbyte_cdk-6.34.1.dist-info}/LICENSE_SHORT +0 -0
  24. {airbyte_cdk-6.34.0.dev2.dist-info → airbyte_cdk-6.34.1.dist-info}/WHEEL +0 -0
  25. {airbyte_cdk-6.34.0.dev2.dist-info → airbyte_cdk-6.34.1.dist-info}/entry_points.txt +0 -0
@@ -24,7 +24,6 @@ from airbyte_cdk.sources.declarative.incremental.datetime_based_cursor import Da
24
24
  from airbyte_cdk.sources.declarative.incremental.per_partition_with_global import (
25
25
  PerPartitionWithGlobalCursor,
26
26
  )
27
- from airbyte_cdk.sources.declarative.interpolation import InterpolatedString
28
27
  from airbyte_cdk.sources.declarative.manifest_declarative_source import ManifestDeclarativeSource
29
28
  from airbyte_cdk.sources.declarative.models.declarative_component_schema import (
30
29
  ConcurrencyLevel as ConcurrencyLevelModel,
@@ -36,13 +35,11 @@ from airbyte_cdk.sources.declarative.parsers.model_to_component_factory import (
36
35
  ModelToComponentFactory,
37
36
  )
38
37
  from airbyte_cdk.sources.declarative.partition_routers import AsyncJobPartitionRouter
39
- from airbyte_cdk.sources.declarative.requesters import HttpRequester
40
38
  from airbyte_cdk.sources.declarative.retrievers import AsyncRetriever, Retriever, SimpleRetriever
41
39
  from airbyte_cdk.sources.declarative.stream_slicers.declarative_partition_generator import (
42
40
  DeclarativePartitionFactory,
43
41
  StreamSlicerPartitionGenerator,
44
42
  )
45
- from airbyte_cdk.sources.declarative.transformations.add_fields import AddFields
46
43
  from airbyte_cdk.sources.declarative.types import ConnectionDefinition
47
44
  from airbyte_cdk.sources.source import TState
48
45
  from airbyte_cdk.sources.streams import Stream
@@ -321,9 +318,6 @@ class ConcurrentDeclarativeSource(ManifestDeclarativeSource, Generic[TState]):
321
318
  incremental_sync_component_definition
322
319
  and incremental_sync_component_definition.get("type", "")
323
320
  == DatetimeBasedCursorModel.__name__
324
- and self._stream_supports_concurrent_partition_processing(
325
- declarative_stream=declarative_stream
326
- )
327
321
  and hasattr(declarative_stream.retriever, "stream_slicer")
328
322
  and isinstance(
329
323
  declarative_stream.retriever.stream_slicer, PerPartitionWithGlobalCursor
@@ -390,9 +384,6 @@ class ConcurrentDeclarativeSource(ManifestDeclarativeSource, Generic[TState]):
390
384
  and bool(incremental_sync_component_definition)
391
385
  and incremental_sync_component_definition.get("type", "")
392
386
  == DatetimeBasedCursorModel.__name__
393
- and self._stream_supports_concurrent_partition_processing(
394
- declarative_stream=declarative_stream
395
- )
396
387
  and hasattr(declarative_stream.retriever, "stream_slicer")
397
388
  and (
398
389
  isinstance(declarative_stream.retriever.stream_slicer, DatetimeBasedCursor)
@@ -400,72 +391,6 @@ class ConcurrentDeclarativeSource(ManifestDeclarativeSource, Generic[TState]):
400
391
  )
401
392
  )
402
393
 
403
- def _stream_supports_concurrent_partition_processing(
404
- self, declarative_stream: DeclarativeStream
405
- ) -> bool:
406
- """
407
- Many connectors make use of stream_state during interpolation on a per-partition basis under the assumption that
408
- state is updated sequentially. Because the concurrent CDK engine processes different partitions in parallel,
409
- stream_state is no longer a thread-safe interpolation context. It would be a race condition because a cursor's
410
- stream_state can be updated in any order depending on which stream partition's finish first.
411
-
412
- We should start to move away from depending on the value of stream_state for low-code components that operate
413
- per-partition, but we need to gate this otherwise some connectors will be blocked from publishing. See the
414
- cdk-migrations.md for the full list of connectors.
415
- """
416
-
417
- if isinstance(declarative_stream.retriever, SimpleRetriever) and isinstance(
418
- declarative_stream.retriever.requester, HttpRequester
419
- ):
420
- http_requester = declarative_stream.retriever.requester
421
- if "stream_state" in http_requester._path.string:
422
- self.logger.warning(
423
- f"Low-code stream '{declarative_stream.name}' uses interpolation of stream_state in the HttpRequester which is not thread-safe. Defaulting to synchronous processing"
424
- )
425
- return False
426
-
427
- request_options_provider = http_requester._request_options_provider
428
- if request_options_provider.request_options_contain_stream_state():
429
- self.logger.warning(
430
- f"Low-code stream '{declarative_stream.name}' uses interpolation of stream_state in the HttpRequester which is not thread-safe. Defaulting to synchronous processing"
431
- )
432
- return False
433
-
434
- record_selector = declarative_stream.retriever.record_selector
435
- if isinstance(record_selector, RecordSelector):
436
- if (
437
- record_selector.record_filter
438
- and not isinstance(
439
- record_selector.record_filter, ClientSideIncrementalRecordFilterDecorator
440
- )
441
- and "stream_state" in record_selector.record_filter.condition
442
- ):
443
- self.logger.warning(
444
- f"Low-code stream '{declarative_stream.name}' uses interpolation of stream_state in the RecordFilter which is not thread-safe. Defaulting to synchronous processing"
445
- )
446
- return False
447
-
448
- for add_fields in [
449
- transformation
450
- for transformation in record_selector.transformations
451
- if isinstance(transformation, AddFields)
452
- ]:
453
- for field in add_fields.fields:
454
- if isinstance(field.value, str) and "stream_state" in field.value:
455
- self.logger.warning(
456
- f"Low-code stream '{declarative_stream.name}' uses interpolation of stream_state in the AddFields which is not thread-safe. Defaulting to synchronous processing"
457
- )
458
- return False
459
- if (
460
- isinstance(field.value, InterpolatedString)
461
- and "stream_state" in field.value.string
462
- ):
463
- self.logger.warning(
464
- f"Low-code stream '{declarative_stream.name}' uses interpolation of stream_state in the AddFields which is not thread-safe. Defaulting to synchronous processing"
465
- )
466
- return False
467
- return True
468
-
469
394
  @staticmethod
470
395
  def _get_retriever(
471
396
  declarative_stream: DeclarativeStream, stream_state: Mapping[str, Any]
@@ -82,7 +82,6 @@ definitions:
82
82
  - stream_interval
83
83
  - stream_partition
84
84
  - stream_slice
85
- - stream_state
86
85
  examples:
87
86
  - "{{ record['updates'] }}"
88
87
  - "{{ record['MetaData']['LastUpdatedTime'] }}"
@@ -1776,7 +1775,6 @@ definitions:
1776
1775
  - stream_interval
1777
1776
  - stream_partition
1778
1777
  - stream_slice
1779
- - stream_state
1780
1778
  examples:
1781
1779
  - "/products"
1782
1780
  - "/quotes/{{ stream_partition['id'] }}/quote_line_groups"
@@ -1826,7 +1824,6 @@ definitions:
1826
1824
  - stream_interval
1827
1825
  - stream_partition
1828
1826
  - stream_slice
1829
- - stream_state
1830
1827
  examples:
1831
1828
  - |
1832
1829
  [{"clause": {"type": "timestamp", "operator": 10, "parameters":
@@ -1844,7 +1841,6 @@ definitions:
1844
1841
  - stream_interval
1845
1842
  - stream_partition
1846
1843
  - stream_slice
1847
- - stream_state
1848
1844
  examples:
1849
1845
  - sort_order: "ASC"
1850
1846
  sort_field: "CREATED_AT"
@@ -1865,7 +1861,6 @@ definitions:
1865
1861
  - stream_interval
1866
1862
  - stream_partition
1867
1863
  - stream_slice
1868
- - stream_state
1869
1864
  examples:
1870
1865
  - Output-Format: JSON
1871
1866
  - Version: "{{ config['version'] }}"
@@ -1882,7 +1877,6 @@ definitions:
1882
1877
  - stream_interval
1883
1878
  - stream_partition
1884
1879
  - stream_slice
1885
- - stream_state
1886
1880
  examples:
1887
1881
  - unit: "day"
1888
1882
  - query: 'last_event_time BETWEEN TIMESTAMP "{{ stream_interval.start_time }}" AND TIMESTAMP "{{ stream_interval.end_time }}"'
@@ -2237,7 +2231,6 @@ definitions:
2237
2231
  interpolation_context:
2238
2232
  - config
2239
2233
  - record
2240
- - stream_state
2241
2234
  - stream_slice
2242
2235
  new:
2243
2236
  type: string
@@ -2251,7 +2244,6 @@ definitions:
2251
2244
  interpolation_context:
2252
2245
  - config
2253
2246
  - record
2254
- - stream_state
2255
2247
  - stream_slice
2256
2248
  $parameters:
2257
2249
  type: object
@@ -2901,7 +2893,6 @@ definitions:
2901
2893
  - stream_interval
2902
2894
  - stream_partition
2903
2895
  - stream_slice
2904
- - stream_state
2905
2896
  examples:
2906
2897
  - "{{ record['created_at'] >= stream_interval['start_time'] }}"
2907
2898
  - "{{ record.status in ['active', 'expired'] }}"
@@ -3689,12 +3680,6 @@ interpolation:
3689
3680
  - title: stream_slice
3690
3681
  description: This variable is deprecated. Use stream_interval or stream_partition instead.
3691
3682
  type: object
3692
- - title: stream_state
3693
- description: The current state of the stream. The object's keys are defined by the incremental sync's cursor_field the and partition router's values.
3694
- type: object
3695
- examples:
3696
- - created_at: "2020-01-01 00:00:00.000+00:00"
3697
- - updated_at: "2020-01-02 00:00:00.000+00:00"
3698
3683
  macros:
3699
3684
  - title: now_utc
3700
3685
  description: Returns the current date and time in the UTC timezone.
@@ -3759,6 +3744,16 @@ interpolation:
3759
3744
  - "{{ format_datetime(config['start_time'], '%Y-%m-%d') }}"
3760
3745
  - "{{ format_datetime(config['start_date'], '%Y-%m-%dT%H:%M:%S.%fZ') }}"
3761
3746
  - "{{ format_datetime(config['start_date'], '%Y-%m-%dT%H:%M:%S.%fZ', '%a, %d %b %Y %H:%M:%S %z') }}"
3747
+ - title: str_to_datetime
3748
+ description: Converts a string to a datetime object with UTC timezone.
3749
+ arguments:
3750
+ s: The string to convert.
3751
+ return_type: datetime.datetime
3752
+ examples:
3753
+ - "{{ str_to_datetime('2022-01-14') }}"
3754
+ - "{{ str_to_datetime('2022-01-01 13:45:30') }}"
3755
+ - "{{ str_to_datetime('2022-01-01T13:45:30+00:00') }}"
3756
+ - "{{ str_to_datetime('2022-01-01T13:45:30.123456Z') }}"
3762
3757
  filters:
3763
3758
  - title: hash
3764
3759
  description: Convert the specified value to a hashed string.
@@ -95,10 +95,6 @@ class ConcurrentPerPartitionCursor(Cursor):
95
95
  # the oldest partitions can be efficiently removed, maintaining the most recent partitions.
96
96
  self._cursor_per_partition: OrderedDict[str, ConcurrentCursor] = OrderedDict()
97
97
  self._semaphore_per_partition: OrderedDict[str, threading.Semaphore] = OrderedDict()
98
-
99
- # Parent-state tracking: store each partition’s parent state in creation order
100
- self._partition_parent_state_map: OrderedDict[str, Mapping[str, Any]] = OrderedDict()
101
-
102
98
  self._finished_partitions: set[str] = set()
103
99
  self._lock = threading.Lock()
104
100
  self._timer = Timer()
@@ -159,45 +155,7 @@ class ConcurrentPerPartitionCursor(Cursor):
159
155
  and self._semaphore_per_partition[partition_key]._value == 0
160
156
  ):
161
157
  self._update_global_cursor(cursor.state[self.cursor_field.cursor_field_key])
162
-
163
- self._check_and_update_parent_state()
164
-
165
- self._emit_state_message()
166
-
167
- def _check_and_update_parent_state(self) -> None:
168
- """
169
- Pop the leftmost partition state from _partition_parent_state_map only if
170
- *all partitions* up to (and including) that partition key in _semaphore_per_partition
171
- are fully finished (i.e. in _finished_partitions and semaphore._value == 0).
172
- """
173
- last_closed_state = None
174
-
175
- while self._partition_parent_state_map:
176
- # Look at the earliest partition key in creation order
177
- earliest_key = next(iter(self._partition_parent_state_map))
178
-
179
- # Verify ALL partitions from the left up to earliest_key are finished
180
- all_left_finished = True
181
- for p_key, sem in self._semaphore_per_partition.items():
182
- # If any earlier partition is still not finished, we must stop
183
- if p_key not in self._finished_partitions or sem._value != 0:
184
- all_left_finished = False
185
- break
186
- # Once we've reached earliest_key in the semaphore order, we can stop checking
187
- if p_key == earliest_key:
188
- break
189
-
190
- # If the partitions up to earliest_key are not all finished, break the while-loop
191
- if not all_left_finished:
192
- break
193
-
194
- # Otherwise, pop the leftmost entry from parent-state map
195
- _, closed_parent_state = self._partition_parent_state_map.popitem(last=False)
196
- last_closed_state = closed_parent_state
197
-
198
- # Update _parent_state if we actually popped at least one partition
199
- if last_closed_state is not None:
200
- self._parent_state = last_closed_state
158
+ self._emit_state_message()
201
159
 
202
160
  def ensure_at_least_one_state_emitted(self) -> None:
203
161
  """
@@ -243,19 +201,13 @@ class ConcurrentPerPartitionCursor(Cursor):
243
201
 
244
202
  slices = self._partition_router.stream_slices()
245
203
  self._timer.start()
246
- for partition, last, parent_state in iterate_with_last_flag_and_state(
247
- slices, self._partition_router.get_stream_state
248
- ):
249
- yield from self._generate_slices_from_partition(partition, parent_state)
204
+ for partition in slices:
205
+ yield from self._generate_slices_from_partition(partition)
250
206
 
251
- def _generate_slices_from_partition(
252
- self, partition: StreamSlice, parent_state: Mapping[str, Any]
253
- ) -> Iterable[StreamSlice]:
207
+ def _generate_slices_from_partition(self, partition: StreamSlice) -> Iterable[StreamSlice]:
254
208
  # Ensure the maximum number of partitions is not exceeded
255
209
  self._ensure_partition_limit()
256
210
 
257
- partition_key = self._to_partition_key(partition.partition)
258
-
259
211
  cursor = self._cursor_per_partition.get(self._to_partition_key(partition.partition))
260
212
  if not cursor:
261
213
  cursor = self._create_cursor(
@@ -264,26 +216,18 @@ class ConcurrentPerPartitionCursor(Cursor):
264
216
  )
265
217
  with self._lock:
266
218
  self._number_of_partitions += 1
267
- self._cursor_per_partition[partition_key] = cursor
268
- self._semaphore_per_partition[partition_key] = threading.Semaphore(0)
269
-
270
- with self._lock:
271
- if (
272
- len(self._partition_parent_state_map) == 0
273
- or self._partition_parent_state_map[
274
- next(reversed(self._partition_parent_state_map))
275
- ]
276
- != parent_state
277
- ):
278
- self._partition_parent_state_map[partition_key] = deepcopy(parent_state)
219
+ self._cursor_per_partition[self._to_partition_key(partition.partition)] = cursor
220
+ self._semaphore_per_partition[self._to_partition_key(partition.partition)] = (
221
+ threading.Semaphore(0)
222
+ )
279
223
 
280
224
  for cursor_slice, is_last_slice, _ in iterate_with_last_flag_and_state(
281
225
  cursor.stream_slices(),
282
226
  lambda: None,
283
227
  ):
284
- self._semaphore_per_partition[partition_key].release()
228
+ self._semaphore_per_partition[self._to_partition_key(partition.partition)].release()
285
229
  if is_last_slice:
286
- self._finished_partitions.add(partition_key)
230
+ self._finished_partitions.add(self._to_partition_key(partition.partition))
287
231
  yield StreamSlice(
288
232
  partition=partition, cursor_slice=cursor_slice, extra_fields=partition.extra_fields
289
233
  )
@@ -394,6 +338,9 @@ class ConcurrentPerPartitionCursor(Cursor):
394
338
  self._cursor_per_partition[self._to_partition_key(state["partition"])] = (
395
339
  self._create_cursor(state["cursor"])
396
340
  )
341
+ self._semaphore_per_partition[self._to_partition_key(state["partition"])] = (
342
+ threading.Semaphore(0)
343
+ )
397
344
 
398
345
  # set default state for missing partitions if it is per partition with fallback to global
399
346
  if self._GLOBAL_STATE_KEY in stream_state:
@@ -115,7 +115,9 @@ class GlobalSubstreamCursor(DeclarativeCursor):
115
115
  * Yield the last slice. At that point, once there are as many slices yielded as closes, the global slice will be closed too
116
116
  """
117
117
  slice_generator = (
118
- StreamSlice(partition=partition, cursor_slice=cursor_slice)
118
+ StreamSlice(
119
+ partition=partition, cursor_slice=cursor_slice, extra_fields=partition.extra_fields
120
+ )
119
121
  for partition in self._partition_router.stream_slices()
120
122
  for cursor_slice in self._stream_cursor.stream_slices()
121
123
  )
@@ -131,7 +133,9 @@ class GlobalSubstreamCursor(DeclarativeCursor):
131
133
 
132
134
  def generate_slices_from_partition(self, partition: StreamSlice) -> Iterable[StreamSlice]:
133
135
  slice_generator = (
134
- StreamSlice(partition=partition, cursor_slice=cursor_slice)
136
+ StreamSlice(
137
+ partition=partition, cursor_slice=cursor_slice, extra_fields=partition.extra_fields
138
+ )
135
139
  for cursor_slice in self._stream_cursor.stream_slices()
136
140
  )
137
141
 
@@ -1,5 +1,5 @@
1
1
  #
2
- # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
2
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
3
3
  #
4
4
 
5
5
  from airbyte_cdk.sources.declarative.interpolation.interpolated_boolean import InterpolatedBoolean
@@ -1,6 +1,7 @@
1
1
  #
2
- # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
2
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
3
3
  #
4
+
4
5
  import base64
5
6
  import hashlib
6
7
  import json
@@ -1,5 +1,5 @@
1
1
  #
2
- # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
2
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
3
3
  #
4
4
 
5
5
  from dataclasses import InitVar, dataclass
@@ -1,5 +1,5 @@
1
1
  #
2
- # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
2
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
3
3
  #
4
4
 
5
5
 
@@ -1,5 +1,5 @@
1
1
  #
2
- # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
2
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
3
3
  #
4
4
 
5
5
 
@@ -1,5 +1,5 @@
1
1
  #
2
- # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
2
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
3
3
  #
4
4
 
5
5
  from dataclasses import InitVar, dataclass
@@ -1,7 +1,8 @@
1
1
  #
2
- # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
2
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
3
3
  #
4
4
 
5
+
5
6
  from abc import ABC, abstractmethod
6
7
  from typing import Any, Optional
7
8
 
@@ -1,5 +1,5 @@
1
1
  #
2
- # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
2
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
3
3
  #
4
4
 
5
5
  import ast
@@ -11,10 +11,12 @@ from jinja2.environment import Template
11
11
  from jinja2.exceptions import UndefinedError
12
12
  from jinja2.sandbox import SandboxedEnvironment
13
13
 
14
+ from airbyte_cdk.models import FailureType
14
15
  from airbyte_cdk.sources.declarative.interpolation.filters import filters
15
16
  from airbyte_cdk.sources.declarative.interpolation.interpolation import Interpolation
16
17
  from airbyte_cdk.sources.declarative.interpolation.macros import macros
17
18
  from airbyte_cdk.sources.types import Config
19
+ from airbyte_cdk.utils import AirbyteTracedException
18
20
 
19
21
 
20
22
  class StreamPartitionAccessEnvironment(SandboxedEnvironment):
@@ -36,6 +38,10 @@ _ALIASES = {
36
38
  "stream_partition": "stream_slice", # Use stream_partition to access partition router's values
37
39
  }
38
40
 
41
+ _UNSUPPORTED_INTERPOLATION_VARIABLES: Mapping[str, str] = {
42
+ "stream_state": "`stream_state` is no longer supported for interpolation. We recommend using `stream_interval` instead. Please reference the CDK Migration Guide for more information.",
43
+ }
44
+
39
45
  # These extensions are not installed so they're not currently a problem,
40
46
  # but we're still explicitly removing them from the jinja context.
41
47
  # At worst, this is documentation that we do NOT want to include these extensions because of the potential security risks
@@ -95,6 +101,13 @@ class JinjaInterpolation(Interpolation):
95
101
  elif equivalent in context:
96
102
  context[alias] = context[equivalent]
97
103
 
104
+ for variable_name in _UNSUPPORTED_INTERPOLATION_VARIABLES:
105
+ if variable_name in input_str:
106
+ raise AirbyteTracedException(
107
+ message=_UNSUPPORTED_INTERPOLATION_VARIABLES[variable_name],
108
+ internal_message=_UNSUPPORTED_INTERPOLATION_VARIABLES[variable_name],
109
+ failure_type=FailureType.config_error,
110
+ )
98
111
  try:
99
112
  if isinstance(input_str, str):
100
113
  result = self._eval(input_str, context)
@@ -1,5 +1,5 @@
1
1
  #
2
- # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
2
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
3
3
  #
4
4
 
5
5
  import builtins
@@ -63,10 +63,24 @@ def timestamp(dt: Union[float, str]) -> Union[int, float]:
63
63
  if isinstance(dt, (int, float)):
64
64
  return int(dt)
65
65
  else:
66
- return _str_to_datetime(dt).astimezone(pytz.utc).timestamp()
66
+ return str_to_datetime(dt).astimezone(pytz.utc).timestamp()
67
67
 
68
68
 
69
- def _str_to_datetime(s: str) -> datetime.datetime:
69
+ def str_to_datetime(s: str) -> datetime.datetime:
70
+ """
71
+ Converts a string to a datetime object with UTC timezone
72
+
73
+ If the input string does not contain timezone information, UTC is assumed.
74
+ Supports both basic date strings like "2022-01-14" and datetime strings with optional timezone
75
+ like "2022-01-01T13:45:30+00:00".
76
+
77
+ Usage:
78
+ `"{{ str_to_datetime('2022-01-14') }}"`
79
+
80
+ :param s: string to parse as datetime
81
+ :return: datetime object in UTC timezone
82
+ """
83
+
70
84
  parsed_date = parser.isoparse(s)
71
85
  if not parsed_date.tzinfo:
72
86
  # Assume UTC if the input does not contain a timezone
@@ -155,7 +169,7 @@ def format_datetime(
155
169
  if isinstance(dt, datetime.datetime):
156
170
  return dt.strftime(format)
157
171
  dt_datetime = (
158
- datetime.datetime.strptime(dt, input_format) if input_format else _str_to_datetime(dt)
172
+ datetime.datetime.strptime(dt, input_format) if input_format else str_to_datetime(dt)
159
173
  )
160
174
  if format == "%s":
161
175
  return str(int(dt_datetime.timestamp()))
@@ -172,5 +186,6 @@ _macros_list = [
172
186
  duration,
173
187
  format_datetime,
174
188
  today_with_timezone,
189
+ str_to_datetime,
175
190
  ]
176
191
  macros = {f.__name__: f for f in _macros_list}
@@ -123,7 +123,6 @@ class HttpRequester(Requester):
123
123
  next_page_token: Optional[Mapping[str, Any]],
124
124
  ) -> str:
125
125
  kwargs = {
126
- "stream_state": stream_state,
127
126
  "stream_slice": stream_slice,
128
127
  "next_page_token": next_page_token,
129
128
  }
@@ -10,7 +10,7 @@ from airbyte_cdk.sources.declarative.interpolation.interpolated_nested_mapping i
10
10
  NestedMapping,
11
11
  )
12
12
  from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString
13
- from airbyte_cdk.sources.types import Config, StreamSlice, StreamState
13
+ from airbyte_cdk.sources.types import Config, StreamSlice
14
14
 
15
15
 
16
16
  @dataclass
@@ -42,20 +42,17 @@ class InterpolatedNestedRequestInputProvider:
42
42
 
43
43
  def eval_request_inputs(
44
44
  self,
45
- stream_state: Optional[StreamState] = None,
46
45
  stream_slice: Optional[StreamSlice] = None,
47
46
  next_page_token: Optional[Mapping[str, Any]] = None,
48
47
  ) -> Mapping[str, Any]:
49
48
  """
50
49
  Returns the request inputs to set on an outgoing HTTP request
51
50
 
52
- :param stream_state: The stream state
53
51
  :param stream_slice: The stream slice
54
52
  :param next_page_token: The pagination token
55
53
  :return: The request inputs to set on an outgoing HTTP request
56
54
  """
57
55
  kwargs = {
58
- "stream_state": stream_state,
59
56
  "stream_slice": stream_slice,
60
57
  "next_page_token": next_page_token,
61
58
  }
@@ -37,7 +37,6 @@ class InterpolatedRequestInputProvider:
37
37
 
38
38
  def eval_request_inputs(
39
39
  self,
40
- stream_state: Optional[StreamState] = None,
41
40
  stream_slice: Optional[StreamSlice] = None,
42
41
  next_page_token: Optional[Mapping[str, Any]] = None,
43
42
  valid_key_types: Optional[Tuple[Type[Any]]] = None,
@@ -46,7 +45,6 @@ class InterpolatedRequestInputProvider:
46
45
  """
47
46
  Returns the request inputs to set on an outgoing HTTP request
48
47
 
49
- :param stream_state: The stream state
50
48
  :param stream_slice: The stream slice
51
49
  :param next_page_token: The pagination token
52
50
  :param valid_key_types: A tuple of types that the interpolator should allow
@@ -54,7 +52,6 @@ class InterpolatedRequestInputProvider:
54
52
  :return: The request inputs to set on an outgoing HTTP request
55
53
  """
56
54
  kwargs = {
57
- "stream_state": stream_state,
58
55
  "stream_slice": stream_slice,
59
56
  "next_page_token": next_page_token,
60
57
  }
@@ -5,8 +5,6 @@
5
5
  from dataclasses import InitVar, dataclass, field
6
6
  from typing import Any, Mapping, MutableMapping, Optional, Union
7
7
 
8
- from typing_extensions import deprecated
9
-
10
8
  from airbyte_cdk.sources.declarative.interpolation.interpolated_nested_mapping import NestedMapping
11
9
  from airbyte_cdk.sources.declarative.requesters.request_options.interpolated_nested_request_input_provider import (
12
10
  InterpolatedNestedRequestInputProvider,
@@ -17,7 +15,6 @@ from airbyte_cdk.sources.declarative.requesters.request_options.interpolated_req
17
15
  from airbyte_cdk.sources.declarative.requesters.request_options.request_options_provider import (
18
16
  RequestOptionsProvider,
19
17
  )
20
- from airbyte_cdk.sources.source import ExperimentalClassWarning
21
18
  from airbyte_cdk.sources.types import Config, StreamSlice, StreamState
22
19
 
23
20
  RequestInput = Union[str, Mapping[str, str]]
@@ -80,7 +77,6 @@ class InterpolatedRequestOptionsProvider(RequestOptionsProvider):
80
77
  next_page_token: Optional[Mapping[str, Any]] = None,
81
78
  ) -> MutableMapping[str, Any]:
82
79
  interpolated_value = self._parameter_interpolator.eval_request_inputs(
83
- stream_state,
84
80
  stream_slice,
85
81
  next_page_token,
86
82
  valid_key_types=(str,),
@@ -97,9 +93,7 @@ class InterpolatedRequestOptionsProvider(RequestOptionsProvider):
97
93
  stream_slice: Optional[StreamSlice] = None,
98
94
  next_page_token: Optional[Mapping[str, Any]] = None,
99
95
  ) -> Mapping[str, Any]:
100
- return self._headers_interpolator.eval_request_inputs(
101
- stream_state, stream_slice, next_page_token
102
- )
96
+ return self._headers_interpolator.eval_request_inputs(stream_slice, next_page_token)
103
97
 
104
98
  def get_request_body_data(
105
99
  self,
@@ -109,7 +103,6 @@ class InterpolatedRequestOptionsProvider(RequestOptionsProvider):
109
103
  next_page_token: Optional[Mapping[str, Any]] = None,
110
104
  ) -> Union[Mapping[str, Any], str]:
111
105
  return self._body_data_interpolator.eval_request_inputs(
112
- stream_state,
113
106
  stream_slice,
114
107
  next_page_token,
115
108
  valid_key_types=(str,),
@@ -123,42 +116,4 @@ class InterpolatedRequestOptionsProvider(RequestOptionsProvider):
123
116
  stream_slice: Optional[StreamSlice] = None,
124
117
  next_page_token: Optional[Mapping[str, Any]] = None,
125
118
  ) -> Mapping[str, Any]:
126
- return self._body_json_interpolator.eval_request_inputs(
127
- stream_state, stream_slice, next_page_token
128
- )
129
-
130
- @deprecated(
131
- "This class is temporary and used to incrementally deliver low-code to concurrent",
132
- category=ExperimentalClassWarning,
133
- )
134
- def request_options_contain_stream_state(self) -> bool:
135
- """
136
- Temporary helper method used as we move low-code streams to the concurrent framework. This method determines if
137
- the InterpolatedRequestOptionsProvider has is a dependency on a non-thread safe interpolation context such as
138
- stream_state.
139
- """
140
-
141
- return (
142
- self._check_if_interpolation_uses_stream_state(self.request_parameters)
143
- or self._check_if_interpolation_uses_stream_state(self.request_headers)
144
- or self._check_if_interpolation_uses_stream_state(self.request_body_data)
145
- or self._check_if_interpolation_uses_stream_state(self.request_body_json)
146
- )
147
-
148
- @staticmethod
149
- def _check_if_interpolation_uses_stream_state(
150
- request_input: Optional[Union[RequestInput, NestedMapping]],
151
- ) -> bool:
152
- if not request_input:
153
- return False
154
- elif isinstance(request_input, str):
155
- return "stream_state" in request_input
156
- else:
157
- for key, val in request_input.items():
158
- # Covers the case of RequestInput in the form of a string or Mapping[str, str]. It also covers the case
159
- # of a NestedMapping where the value is a string.
160
- # Note: Doesn't account for nested mappings for request_body_json, but I don't see stream_state used in that way
161
- # in our code
162
- if "stream_state" in key or (isinstance(val, str) and "stream_state" in val):
163
- return True
164
- return False
119
+ return self._body_json_interpolator.eval_request_inputs(stream_slice, next_page_token)
@@ -133,7 +133,6 @@ class SimpleRetriever(Retriever):
133
133
 
134
134
  mappings = [
135
135
  paginator_method(
136
- stream_state=stream_state,
137
136
  stream_slice=stream_slice,
138
137
  next_page_token=next_page_token,
139
138
  ),
@@ -141,7 +140,6 @@ class SimpleRetriever(Retriever):
141
140
  if not next_page_token or not self.ignore_stream_slicer_parameters_on_paginated_requests:
142
141
  mappings.append(
143
142
  stream_slicer_method(
144
- stream_state=stream_state,
145
143
  stream_slice=stream_slice,
146
144
  next_page_token=next_page_token,
147
145
  )
@@ -64,9 +64,9 @@ class AddFields(RecordTransformation):
64
64
  - path: ["shop_id"]
65
65
  value: "{{ config.shop_id }}"
66
66
 
67
- # from state
68
- - path: ["current_state"]
69
- value: "{{ stream_state.cursor_field }}" # Or {{ stream_state['cursor_field'] }}
67
+ # from stream_interval
68
+ - path: ["date"]
69
+ value: "{{ stream_interval.start_date }}"
70
70
 
71
71
  # from record
72
72
  - path: ["unnested_value"]
@@ -128,7 +128,7 @@ class AddFields(RecordTransformation):
128
128
  ) -> None:
129
129
  if config is None:
130
130
  config = {}
131
- kwargs = {"record": record, "stream_state": stream_state, "stream_slice": stream_slice}
131
+ kwargs = {"record": record, "stream_slice": stream_slice}
132
132
  for parsed_field in self._parsed_fields:
133
133
  valid_types = (parsed_field.value_type,) if parsed_field.value_type else None
134
134
  value = parsed_field.value.eval(config, valid_types=valid_types, **kwargs)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: airbyte-cdk
3
- Version: 6.34.0.dev2
3
+ Version: 6.34.1
4
4
  Summary: A framework for writing Airbyte Connectors.
5
5
  Home-page: https://airbyte.com
6
6
  License: MIT
@@ -67,11 +67,11 @@ airbyte_cdk/sources/declarative/checks/check_stream.py,sha256=dAA-UhmMj0WLXCkRQr
67
67
  airbyte_cdk/sources/declarative/checks/connection_checker.py,sha256=MBRJo6WJlZQHpIfOGaNOkkHUmgUl_4wDM6VPo41z5Ss,1383
68
68
  airbyte_cdk/sources/declarative/concurrency_level/__init__.py,sha256=5XUqrmlstYlMM0j6crktlKQwALek0uiz2D3WdM46MyA,191
69
69
  airbyte_cdk/sources/declarative/concurrency_level/concurrency_level.py,sha256=YIwCTCpOr_QSNW4ltQK0yUGWInI8PKNY216HOOegYLk,2101
70
- airbyte_cdk/sources/declarative/concurrent_declarative_source.py,sha256=ThOqmaaqPykS2gTDKnlLSPy0p7djjV1Svazes58Rmic,28844
70
+ airbyte_cdk/sources/declarative/concurrent_declarative_source.py,sha256=MRnIdGeKPk1dO9-4eWRHa7mI6Ay_7szGo9H1RJSZDb8,24453
71
71
  airbyte_cdk/sources/declarative/datetime/__init__.py,sha256=l9LG7Qm6e5r_qgqfVKnx3mXYtg1I9MmMjomVIPfU4XA,177
72
72
  airbyte_cdk/sources/declarative/datetime/datetime_parser.py,sha256=SX9JjdesN1edN2WVUVMzU_ptqp2QB1OnsnjZ4mwcX7w,2579
73
73
  airbyte_cdk/sources/declarative/datetime/min_max_datetime.py,sha256=0BHBtDNQZfvwM45-tY5pNlTcKAFSGGNxemoi0Jic-0E,5785
74
- airbyte_cdk/sources/declarative/declarative_component_schema.yaml,sha256=LExB78FzoQ1ueR5GRyEO-r4HqdghiywvvfyVUXhvU4I,144561
74
+ airbyte_cdk/sources/declarative/declarative_component_schema.yaml,sha256=sbaPluPICE9lJEOz2jDnRxvbw8MfVdolzEp7cSYoj84,144454
75
75
  airbyte_cdk/sources/declarative/declarative_source.py,sha256=nF7wBqFd3AQmEKAm4CnIo29CJoQL562cJGSCeL8U8bA,1531
76
76
  airbyte_cdk/sources/declarative/declarative_stream.py,sha256=venZjfpvtqr3oFSuvMBWtn4h9ayLhD4L65ACuXCDZ64,10445
77
77
  airbyte_cdk/sources/declarative/decoders/__init__.py,sha256=JHb_0d3SE6kNY10mxA5YBEKPeSbsWYjByq1gUQxepoE,953
@@ -92,22 +92,22 @@ airbyte_cdk/sources/declarative/extractors/record_selector.py,sha256=HCqx7IyENM_
92
92
  airbyte_cdk/sources/declarative/extractors/response_to_file_extractor.py,sha256=LhqGDfX06_dDYLKsIVnwQ_nAWCln-v8PV7Wgt_QVeTI,6533
93
93
  airbyte_cdk/sources/declarative/extractors/type_transformer.py,sha256=d6Y2Rfg8pMVEEnHllfVksWZdNVOU55yk34O03dP9muY,1626
94
94
  airbyte_cdk/sources/declarative/incremental/__init__.py,sha256=U1oZKtBaEC6IACmvziY9Wzg7Z8EgF4ZuR7NwvjlB_Sk,1255
95
- airbyte_cdk/sources/declarative/incremental/concurrent_partition_cursor.py,sha256=T4a-WMMFzPNyMpt-aNyw-eoW90hZNO3M2-Sy57jYpCw,21418
95
+ airbyte_cdk/sources/declarative/incremental/concurrent_partition_cursor.py,sha256=Pg2phEFT9T8AzUjK6hVhn0rgR3yY6JPF-Dfv0g1m5dQ,19191
96
96
  airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py,sha256=Rbe6lJLTtZ5en33MwZiB9-H9-AwDMNHgwBZs8EqhYqk,22172
97
97
  airbyte_cdk/sources/declarative/incremental/declarative_cursor.py,sha256=5Bhw9VRPyIuCaD0wmmq_L3DZsa-rJgtKSEUzSd8YYD0,536
98
- airbyte_cdk/sources/declarative/incremental/global_substream_cursor.py,sha256=9HO-QbL9akvjq2NP7l498RwLA4iQZlBMQW1tZbt34I8,15943
98
+ airbyte_cdk/sources/declarative/incremental/global_substream_cursor.py,sha256=2tsE6FgXzemf4fZZ4uGtd8QpRBl9GJ2CRqSNJE5p0EI,16077
99
99
  airbyte_cdk/sources/declarative/incremental/per_partition_cursor.py,sha256=9IAJTCiRUXvhFFz-IhZtYh_KfAjLHqthsYf2jErQRls,17728
100
100
  airbyte_cdk/sources/declarative/incremental/per_partition_with_global.py,sha256=2YBOA2NnwAeIKlIhSwUB_W-FaGnPcmrG_liY7b4mV2Y,8365
101
101
  airbyte_cdk/sources/declarative/incremental/resumable_full_refresh_cursor.py,sha256=10LFv1QPM-agVKl6eaANmEBOfd7gZgBrkoTcMggsieQ,4809
102
- airbyte_cdk/sources/declarative/interpolation/__init__.py,sha256=tjUJkn3B-iZ-p7RP2c3dVZejrGiQeooGmS5ibWTuUL4,437
103
- airbyte_cdk/sources/declarative/interpolation/filters.py,sha256=dqf9W6LCnB5aWGLX1BoKxU-waORf1jT03LpJB671msU,3639
104
- airbyte_cdk/sources/declarative/interpolation/interpolated_boolean.py,sha256=aPw-ounF0m2Ns03WUheoxOAsSRkiRMHax9uVsbibDyI,1964
105
- airbyte_cdk/sources/declarative/interpolation/interpolated_mapping.py,sha256=UrF56LVOP1ELUxe2mEeQPBbWqomF7iEa4pVcI9HLb6c,2083
106
- airbyte_cdk/sources/declarative/interpolation/interpolated_nested_mapping.py,sha256=i2L0gREX8nHA-pKokdVqwBf4aJgWP71KOxIABj_DHcY,1857
107
- airbyte_cdk/sources/declarative/interpolation/interpolated_string.py,sha256=LYEZnZ_hB7rvBSZxG9s0RSrzsOkDWbBY0_P6qu5lEfc,3212
108
- airbyte_cdk/sources/declarative/interpolation/interpolation.py,sha256=-V5UddGm69UKEB6o_O1EIES9kfY8FV_X4Ji8w1yOuSA,981
109
- airbyte_cdk/sources/declarative/interpolation/jinja.py,sha256=BtsY_jtT4MihFqeQgc05HXj3Ndt-e2ESQgGwbg3Sdxc,6430
110
- airbyte_cdk/sources/declarative/interpolation/macros.py,sha256=Y5AWYxbJTUtJ_Jm7DV9qrZDiymFR9LST7fBt4piT2-U,4585
102
+ airbyte_cdk/sources/declarative/interpolation/__init__.py,sha256=Kh7FxhfetyNVDnAQ9zSxNe4oUbb8CvoW7Mqz7cs2iPg,437
103
+ airbyte_cdk/sources/declarative/interpolation/filters.py,sha256=JXdjSmi6eTUTA-qBoR9wSmXlEYvVCOZRKq2GhkDg09M,3640
104
+ airbyte_cdk/sources/declarative/interpolation/interpolated_boolean.py,sha256=8F3ntT_Mfo8cO9n6dCq8rTfJIpfKmzRCsVtVdhzaoGc,1964
105
+ airbyte_cdk/sources/declarative/interpolation/interpolated_mapping.py,sha256=h36RIng4GZ9v4o_fRmgJjTNOtWmhK7NOILU1oSKPE4Q,2083
106
+ airbyte_cdk/sources/declarative/interpolation/interpolated_nested_mapping.py,sha256=vjwvkLk7_l6YDcFClwjCMcTleRjQBh7-dzny7PUaoG8,1857
107
+ airbyte_cdk/sources/declarative/interpolation/interpolated_string.py,sha256=CQkHqGlfa87G6VYMtBAQWin7ECKpfMdrDcg0JO5_rhc,3212
108
+ airbyte_cdk/sources/declarative/interpolation/interpolation.py,sha256=9IoeuWam3L6GyN10L6U8xNWXmkt9cnahSDNkez1OmFY,982
109
+ airbyte_cdk/sources/declarative/interpolation/jinja.py,sha256=UQeuS4Vpyp4hlOn-R3tRyeBX0e9IoV6jQ6gH-Jz8lY0,7182
110
+ airbyte_cdk/sources/declarative/interpolation/macros.py,sha256=ajgVJT9sZBUFZUDLjzyPWupCNXt1HvzbCq-4yv9aY3c,5042
111
111
  airbyte_cdk/sources/declarative/manifest_declarative_source.py,sha256=TN6GCgLXaWDONTaJwQ3A5ELqC-sxwKz-UYSraJYB-dI,17078
112
112
  airbyte_cdk/sources/declarative/migrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
113
113
  airbyte_cdk/sources/declarative/migrations/legacy_to_per_partition_state_migration.py,sha256=iemy3fKLczcU0-Aor7tx5jcT6DRedKMqyK7kCOp01hg,3924
@@ -143,7 +143,7 @@ airbyte_cdk/sources/declarative/requesters/error_handlers/default_http_response_
143
143
  airbyte_cdk/sources/declarative/requesters/error_handlers/error_handler.py,sha256=Tan66odx8VHzfdyyXMQkXz2pJYksllGqvxmpoajgcK4,669
144
144
  airbyte_cdk/sources/declarative/requesters/error_handlers/http_response_filter.py,sha256=E-fQbt4ShfxZVoqfnmOx69C6FUPWZz8BIqI3DN9Kcjs,7935
145
145
  airbyte_cdk/sources/declarative/requesters/http_job_repository.py,sha256=3GtOefPH08evlSUxaILkiKLTHbIspFY4qd5B3ZqNE60,10063
146
- airbyte_cdk/sources/declarative/requesters/http_requester.py,sha256=Ek5hS60-CYjvEaFD-bI7qA-bPgbOPb9hTbMBU4n5zNs,14994
146
+ airbyte_cdk/sources/declarative/requesters/http_requester.py,sha256=pR2uR5b9eGyvYIOYwus3mz3OaqRu1ozwja_ys1SE7hc,14952
147
147
  airbyte_cdk/sources/declarative/requesters/paginators/__init__.py,sha256=uArbKs9JKNCt7t9tZoeWwjDpyI1HoPp29FNW0JzvaEM,644
148
148
  airbyte_cdk/sources/declarative/requesters/paginators/default_paginator.py,sha256=ZW4lwWNAzb4zL0jKc-HjowP5-y0Zg9xi0YlK6tkx_XY,12057
149
149
  airbyte_cdk/sources/declarative/requesters/paginators/no_pagination.py,sha256=j6j9QRPaTbKQ2N661RFVKthhkWiodEp6ut0tKeEd0Ng,2019
@@ -158,9 +158,9 @@ airbyte_cdk/sources/declarative/requesters/request_option.py,sha256=Bl0gxGWudmwT
158
158
  airbyte_cdk/sources/declarative/requesters/request_options/__init__.py,sha256=WCwpKqM4wKqy-DHJaCHbKAlFqRVOqMi9K5qonxIfi_Y,809
159
159
  airbyte_cdk/sources/declarative/requesters/request_options/datetime_based_request_options_provider.py,sha256=31nG6_0igidJFQon37-WeQkTpG3g2A5ZmlluI3ilZdE,3632
160
160
  airbyte_cdk/sources/declarative/requesters/request_options/default_request_options_provider.py,sha256=SRROdPJZ5kuqHLOlkh115pWP9nDGfDxRYPgH9oD3hPo,1798
161
- airbyte_cdk/sources/declarative/requesters/request_options/interpolated_nested_request_input_provider.py,sha256=UW4cAtzkQ261AyLI1cmCL2WLdI3ZDYGUTmrqKB9W3u8,2422
162
- airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_input_provider.py,sha256=Vr2-qa8iHC0vJ4cCtPl7lAUlhrnl4lUuPLMSFrzxMIg,3024
163
- airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_options_provider.py,sha256=EniJILjqOWUBRqb8M02AtFl7KDgUHvqm7207abXhAU4,6975
161
+ airbyte_cdk/sources/declarative/requesters/request_options/interpolated_nested_request_input_provider.py,sha256=86YozYuBDfu0t9NbevIvQoGU0vqTP4rt3dRSTsHz3PA,2269
162
+ airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_input_provider.py,sha256=rR00kE64U2yL0McU1gPr4_W5_sLUqwDgL3Nvj691nRU,2884
163
+ airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_options_provider.py,sha256=vOsdHfWHiTFc89WENHPv1hcxLgdzycMXVT_IEtLuhfs,5012
164
164
  airbyte_cdk/sources/declarative/requesters/request_options/request_options_provider.py,sha256=8YRiDzjYvqJ-aMmKFcjqzv_-e8OZ5QG_TbpZ-nuCu6s,2590
165
165
  airbyte_cdk/sources/declarative/requesters/request_path.py,sha256=S3MeFvcaQrMbOkSY2W2VbXLNomqt_3eXqVd9ZhgNwUs,299
166
166
  airbyte_cdk/sources/declarative/requesters/requester.py,sha256=iVVpXQ4KEd9OyZNwmOofMvx7_06i8ZRxGo3aNTrEQLM,4946
@@ -171,7 +171,7 @@ airbyte_cdk/sources/declarative/resolvers/http_components_resolver.py,sha256=Aio
171
171
  airbyte_cdk/sources/declarative/retrievers/__init__.py,sha256=ix9m1dkR69DcXCXUKC5RK_ZZM7ojTLBQ4IkWQTfmfCk,456
172
172
  airbyte_cdk/sources/declarative/retrievers/async_retriever.py,sha256=2oQn_vo7uJKp4pdMnsF5CG5Iwc9rkPeEOLoAm_9bcus,3222
173
173
  airbyte_cdk/sources/declarative/retrievers/retriever.py,sha256=XPLs593Xv8c5cKMc37XzUAYmzlXd1a7eSsspM-CMuWA,1696
174
- airbyte_cdk/sources/declarative/retrievers/simple_retriever.py,sha256=uvsBqSUimi85YfSjPuOUoAlewwtvaYwgsLg2EDcswLE,24665
174
+ airbyte_cdk/sources/declarative/retrievers/simple_retriever.py,sha256=bOAKQLgMv1Vca-ozMPRVAg1V5nkyUoPwqC02lKpnLiM,24575
175
175
  airbyte_cdk/sources/declarative/schema/__init__.py,sha256=xU45UvM5O4c1PSM13UHpCdh5hpW3HXy9vRRGEiAC1rg,795
176
176
  airbyte_cdk/sources/declarative/schema/default_schema_loader.py,sha256=KTACrIE23a83wsm3Rd9Eb4K6-20lrGqYxTHNp9yxsso,1820
177
177
  airbyte_cdk/sources/declarative/schema/dynamic_schema_loader.py,sha256=J8Q_iJYhcSQLWyt0bTZCbDAGpxt9G8FCc6Q9jtGsNzw,10703
@@ -184,7 +184,7 @@ airbyte_cdk/sources/declarative/stream_slicers/__init__.py,sha256=sI9vhc95RwJYOn
184
184
  airbyte_cdk/sources/declarative/stream_slicers/declarative_partition_generator.py,sha256=RW1Q44ml-VWeMl4lNcV6EfyzrzCZkjj-hd0Omx_n_n4,3405
185
185
  airbyte_cdk/sources/declarative/stream_slicers/stream_slicer.py,sha256=SOkIPBi2Wu7yxIvA15yFzUAB95a3IzA8LPq5DEqHQQc,725
186
186
  airbyte_cdk/sources/declarative/transformations/__init__.py,sha256=CPJ8TlMpiUmvG3624VYu_NfTzxwKcfBjM2Q2wJ7fkSA,919
187
- airbyte_cdk/sources/declarative/transformations/add_fields.py,sha256=r4YdAuAk2bQtNWJMztIIy2CC-NglD9NeK1s1TeO9wkw,5027
187
+ airbyte_cdk/sources/declarative/transformations/add_fields.py,sha256=7UHCGc4xOxkYs5iXbPAPrP3-IEY60A-Go8QushsmaqY,4959
188
188
  airbyte_cdk/sources/declarative/transformations/dpath_flatten_fields.py,sha256=1A-DWGjMqY4ggzRUZsZ3Sjrt-xsNgwUo5c72sSc5OZ0,2077
189
189
  airbyte_cdk/sources/declarative/transformations/flatten_fields.py,sha256=yT3owG6rMKaRX-LJ_T-jSTnh1B5NoAHyH4YZN9yOvE8,1758
190
190
  airbyte_cdk/sources/declarative/transformations/keys_replace_transformation.py,sha256=vbIn6ump-Ut6g20yMub7PFoPBhOKVtrHSAUdcOUdLfw,1999
@@ -360,9 +360,9 @@ airbyte_cdk/utils/slice_hasher.py,sha256=EDxgROHDbfG-QKQb59m7h_7crN1tRiawdf5uU7G
360
360
  airbyte_cdk/utils/spec_schema_transformations.py,sha256=-5HTuNsnDBAhj-oLeQXwpTGA0HdcjFOf2zTEMUTTg_Y,816
361
361
  airbyte_cdk/utils/stream_status_utils.py,sha256=ZmBoiy5HVbUEHAMrUONxZvxnvfV9CesmQJLDTAIWnWw,1171
362
362
  airbyte_cdk/utils/traced_exception.py,sha256=C8uIBuCL_E4WnBAOPSxBicD06JAldoN9fGsQDp463OY,6292
363
- airbyte_cdk-6.34.0.dev2.dist-info/LICENSE.txt,sha256=Wfe61S4BaGPj404v8lrAbvhjYR68SHlkzeYrg3_bbuM,1051
364
- airbyte_cdk-6.34.0.dev2.dist-info/LICENSE_SHORT,sha256=aqF6D1NcESmpn-cqsxBtszTEnHKnlsp8L4x9wAh3Nxg,55
365
- airbyte_cdk-6.34.0.dev2.dist-info/METADATA,sha256=ocEC-CNtHU4hTlGp7U03ZuhKoyy4L2trucsaREiqru0,6015
366
- airbyte_cdk-6.34.0.dev2.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
367
- airbyte_cdk-6.34.0.dev2.dist-info/entry_points.txt,sha256=fj-e3PAQvsxsQzyyq8UkG1k8spunWnD4BAH2AwlR6NM,95
368
- airbyte_cdk-6.34.0.dev2.dist-info/RECORD,,
363
+ airbyte_cdk-6.34.1.dist-info/LICENSE.txt,sha256=Wfe61S4BaGPj404v8lrAbvhjYR68SHlkzeYrg3_bbuM,1051
364
+ airbyte_cdk-6.34.1.dist-info/LICENSE_SHORT,sha256=aqF6D1NcESmpn-cqsxBtszTEnHKnlsp8L4x9wAh3Nxg,55
365
+ airbyte_cdk-6.34.1.dist-info/METADATA,sha256=tIe6MuNVPnGhKeE7m-mrWy7Y7YlcsYAFoA7DHh2EtOc,6010
366
+ airbyte_cdk-6.34.1.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
367
+ airbyte_cdk-6.34.1.dist-info/entry_points.txt,sha256=fj-e3PAQvsxsQzyyq8UkG1k8spunWnD4BAH2AwlR6NM,95
368
+ airbyte_cdk-6.34.1.dist-info/RECORD,,