airbyte-cdk 6.36.0.dev0__py3-none-any.whl → 6.37.0.dev0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. airbyte_cdk/connector_builder/models.py +16 -14
  2. airbyte_cdk/connector_builder/test_reader/helpers.py +120 -22
  3. airbyte_cdk/connector_builder/test_reader/message_grouper.py +16 -3
  4. airbyte_cdk/connector_builder/test_reader/types.py +9 -1
  5. airbyte_cdk/entrypoint.py +7 -7
  6. airbyte_cdk/sources/declarative/auth/token_provider.py +1 -0
  7. airbyte_cdk/sources/declarative/concurrent_declarative_source.py +15 -75
  8. airbyte_cdk/sources/declarative/declarative_component_schema.yaml +15 -16
  9. airbyte_cdk/sources/declarative/decoders/composite_raw_decoder.py +13 -2
  10. airbyte_cdk/sources/declarative/incremental/concurrent_partition_cursor.py +0 -1
  11. airbyte_cdk/sources/declarative/incremental/global_substream_cursor.py +6 -2
  12. airbyte_cdk/sources/declarative/interpolation/__init__.py +1 -1
  13. airbyte_cdk/sources/declarative/interpolation/filters.py +2 -1
  14. airbyte_cdk/sources/declarative/interpolation/interpolated_boolean.py +1 -1
  15. airbyte_cdk/sources/declarative/interpolation/interpolated_mapping.py +1 -1
  16. airbyte_cdk/sources/declarative/interpolation/interpolated_nested_mapping.py +1 -1
  17. airbyte_cdk/sources/declarative/interpolation/interpolated_string.py +1 -1
  18. airbyte_cdk/sources/declarative/interpolation/interpolation.py +2 -1
  19. airbyte_cdk/sources/declarative/interpolation/jinja.py +14 -1
  20. airbyte_cdk/sources/declarative/interpolation/macros.py +19 -4
  21. airbyte_cdk/sources/declarative/models/declarative_component_schema.py +1 -1
  22. airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py +52 -30
  23. airbyte_cdk/sources/declarative/requesters/http_requester.py +0 -1
  24. airbyte_cdk/sources/declarative/requesters/request_options/interpolated_nested_request_input_provider.py +1 -4
  25. airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_input_provider.py +0 -3
  26. airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_options_provider.py +2 -47
  27. airbyte_cdk/sources/declarative/retrievers/simple_retriever.py +0 -2
  28. airbyte_cdk/sources/declarative/transformations/add_fields.py +4 -4
  29. airbyte_cdk/sources/http_logger.py +3 -0
  30. airbyte_cdk/sources/streams/http/requests_native_auth/abstract_oauth.py +1 -0
  31. {airbyte_cdk-6.36.0.dev0.dist-info → airbyte_cdk-6.37.0.dev0.dist-info}/METADATA +1 -1
  32. {airbyte_cdk-6.36.0.dev0.dist-info → airbyte_cdk-6.37.0.dev0.dist-info}/RECORD +36 -36
  33. {airbyte_cdk-6.36.0.dev0.dist-info → airbyte_cdk-6.37.0.dev0.dist-info}/LICENSE.txt +0 -0
  34. {airbyte_cdk-6.36.0.dev0.dist-info → airbyte_cdk-6.37.0.dev0.dist-info}/LICENSE_SHORT +0 -0
  35. {airbyte_cdk-6.36.0.dev0.dist-info → airbyte_cdk-6.37.0.dev0.dist-info}/WHEEL +0 -0
  36. {airbyte_cdk-6.36.0.dev0.dist-info → airbyte_cdk-6.37.0.dev0.dist-info}/entry_points.txt +0 -0
@@ -115,7 +115,9 @@ class GlobalSubstreamCursor(DeclarativeCursor):
115
115
  * Yield the last slice. At that point, once there are as many slices yielded as closes, the global slice will be closed too
116
116
  """
117
117
  slice_generator = (
118
- StreamSlice(partition=partition, cursor_slice=cursor_slice)
118
+ StreamSlice(
119
+ partition=partition, cursor_slice=cursor_slice, extra_fields=partition.extra_fields
120
+ )
119
121
  for partition in self._partition_router.stream_slices()
120
122
  for cursor_slice in self._stream_cursor.stream_slices()
121
123
  )
@@ -131,7 +133,9 @@ class GlobalSubstreamCursor(DeclarativeCursor):
131
133
 
132
134
  def generate_slices_from_partition(self, partition: StreamSlice) -> Iterable[StreamSlice]:
133
135
  slice_generator = (
134
- StreamSlice(partition=partition, cursor_slice=cursor_slice)
136
+ StreamSlice(
137
+ partition=partition, cursor_slice=cursor_slice, extra_fields=partition.extra_fields
138
+ )
135
139
  for cursor_slice in self._stream_cursor.stream_slices()
136
140
  )
137
141
 
@@ -1,5 +1,5 @@
1
1
  #
2
- # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
2
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
3
3
  #
4
4
 
5
5
  from airbyte_cdk.sources.declarative.interpolation.interpolated_boolean import InterpolatedBoolean
@@ -1,6 +1,7 @@
1
1
  #
2
- # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
2
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
3
3
  #
4
+
4
5
  import base64
5
6
  import hashlib
6
7
  import json
@@ -1,5 +1,5 @@
1
1
  #
2
- # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
2
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
3
3
  #
4
4
 
5
5
  from dataclasses import InitVar, dataclass
@@ -1,5 +1,5 @@
1
1
  #
2
- # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
2
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
3
3
  #
4
4
 
5
5
 
@@ -1,5 +1,5 @@
1
1
  #
2
- # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
2
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
3
3
  #
4
4
 
5
5
 
@@ -1,5 +1,5 @@
1
1
  #
2
- # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
2
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
3
3
  #
4
4
 
5
5
  from dataclasses import InitVar, dataclass
@@ -1,7 +1,8 @@
1
1
  #
2
- # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
2
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
3
3
  #
4
4
 
5
+
5
6
  from abc import ABC, abstractmethod
6
7
  from typing import Any, Optional
7
8
 
@@ -1,5 +1,5 @@
1
1
  #
2
- # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
2
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
3
3
  #
4
4
 
5
5
  import ast
@@ -11,10 +11,12 @@ from jinja2.environment import Template
11
11
  from jinja2.exceptions import UndefinedError
12
12
  from jinja2.sandbox import SandboxedEnvironment
13
13
 
14
+ from airbyte_cdk.models import FailureType
14
15
  from airbyte_cdk.sources.declarative.interpolation.filters import filters
15
16
  from airbyte_cdk.sources.declarative.interpolation.interpolation import Interpolation
16
17
  from airbyte_cdk.sources.declarative.interpolation.macros import macros
17
18
  from airbyte_cdk.sources.types import Config
19
+ from airbyte_cdk.utils import AirbyteTracedException
18
20
 
19
21
 
20
22
  class StreamPartitionAccessEnvironment(SandboxedEnvironment):
@@ -36,6 +38,10 @@ _ALIASES = {
36
38
  "stream_partition": "stream_slice", # Use stream_partition to access partition router's values
37
39
  }
38
40
 
41
+ _UNSUPPORTED_INTERPOLATION_VARIABLES: Mapping[str, str] = {
42
+ "stream_state": "`stream_state` is no longer supported for interpolation. We recommend using `stream_interval` instead. Please reference the CDK Migration Guide for more information.",
43
+ }
44
+
39
45
  # These extensions are not installed so they're not currently a problem,
40
46
  # but we're still explicitly removing them from the jinja context.
41
47
  # At worst, this is documentation that we do NOT want to include these extensions because of the potential security risks
@@ -95,6 +101,13 @@ class JinjaInterpolation(Interpolation):
95
101
  elif equivalent in context:
96
102
  context[alias] = context[equivalent]
97
103
 
104
+ for variable_name in _UNSUPPORTED_INTERPOLATION_VARIABLES:
105
+ if variable_name in input_str:
106
+ raise AirbyteTracedException(
107
+ message=_UNSUPPORTED_INTERPOLATION_VARIABLES[variable_name],
108
+ internal_message=_UNSUPPORTED_INTERPOLATION_VARIABLES[variable_name],
109
+ failure_type=FailureType.config_error,
110
+ )
98
111
  try:
99
112
  if isinstance(input_str, str):
100
113
  result = self._eval(input_str, context)
@@ -1,5 +1,5 @@
1
1
  #
2
- # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
2
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
3
3
  #
4
4
 
5
5
  import builtins
@@ -63,10 +63,24 @@ def timestamp(dt: Union[float, str]) -> Union[int, float]:
63
63
  if isinstance(dt, (int, float)):
64
64
  return int(dt)
65
65
  else:
66
- return _str_to_datetime(dt).astimezone(pytz.utc).timestamp()
66
+ return str_to_datetime(dt).astimezone(pytz.utc).timestamp()
67
67
 
68
68
 
69
- def _str_to_datetime(s: str) -> datetime.datetime:
69
+ def str_to_datetime(s: str) -> datetime.datetime:
70
+ """
71
+ Converts a string to a datetime object with UTC timezone
72
+
73
+ If the input string does not contain timezone information, UTC is assumed.
74
+ Supports both basic date strings like "2022-01-14" and datetime strings with optional timezone
75
+ like "2022-01-01T13:45:30+00:00".
76
+
77
+ Usage:
78
+ `"{{ str_to_datetime('2022-01-14') }}"`
79
+
80
+ :param s: string to parse as datetime
81
+ :return: datetime object in UTC timezone
82
+ """
83
+
70
84
  parsed_date = parser.isoparse(s)
71
85
  if not parsed_date.tzinfo:
72
86
  # Assume UTC if the input does not contain a timezone
@@ -155,7 +169,7 @@ def format_datetime(
155
169
  if isinstance(dt, datetime.datetime):
156
170
  return dt.strftime(format)
157
171
  dt_datetime = (
158
- datetime.datetime.strptime(dt, input_format) if input_format else _str_to_datetime(dt)
172
+ datetime.datetime.strptime(dt, input_format) if input_format else str_to_datetime(dt)
159
173
  )
160
174
  if format == "%s":
161
175
  return str(int(dt_datetime.timestamp()))
@@ -172,5 +186,6 @@ _macros_list = [
172
186
  duration,
173
187
  format_datetime,
174
188
  today_with_timezone,
189
+ str_to_datetime,
175
190
  ]
176
191
  macros = {f.__name__: f for f in _macros_list}
@@ -646,7 +646,7 @@ class Rate(BaseModel):
646
646
  class Config:
647
647
  extra = Extra.allow
648
648
 
649
- limit: int = Field(
649
+ limit: Union[int, str] = Field(
650
650
  ...,
651
651
  description="The maximum number of calls allowed within the interval.",
652
652
  title="Limit",
@@ -2091,10 +2091,10 @@ class ModelToComponentFactory:
2091
2091
  def create_json_decoder(model: JsonDecoderModel, config: Config, **kwargs: Any) -> Decoder:
2092
2092
  return JsonDecoder(parameters={})
2093
2093
 
2094
- @staticmethod
2095
- def create_csv_decoder(model: CsvDecoderModel, config: Config, **kwargs: Any) -> Decoder:
2094
+ def create_csv_decoder(self, model: CsvDecoderModel, config: Config, **kwargs: Any) -> Decoder:
2096
2095
  return CompositeRawDecoder(
2097
- parser=ModelToComponentFactory._get_parser(model, config), stream_response=True
2096
+ parser=ModelToComponentFactory._get_parser(model, config),
2097
+ stream_response=False if self._emit_connector_builder_messages else True,
2098
2098
  )
2099
2099
 
2100
2100
  @staticmethod
@@ -2103,10 +2103,12 @@ class ModelToComponentFactory:
2103
2103
  parser=ModelToComponentFactory._get_parser(model, config), stream_response=True
2104
2104
  )
2105
2105
 
2106
- @staticmethod
2107
- def create_gzip_decoder(model: GzipDecoderModel, config: Config, **kwargs: Any) -> Decoder:
2106
+ def create_gzip_decoder(
2107
+ self, model: GzipDecoderModel, config: Config, **kwargs: Any
2108
+ ) -> Decoder:
2108
2109
  return CompositeRawDecoder(
2109
- parser=ModelToComponentFactory._get_parser(model, config), stream_response=True
2110
+ parser=ModelToComponentFactory._get_parser(model, config),
2111
+ stream_response=False if self._emit_connector_builder_messages else True,
2110
2112
  )
2111
2113
 
2112
2114
  @staticmethod
@@ -2627,6 +2629,47 @@ class ModelToComponentFactory:
2627
2629
  transformations: List[RecordTransformation],
2628
2630
  **kwargs: Any,
2629
2631
  ) -> AsyncRetriever:
2632
+ def _get_download_retriever() -> SimpleRetrieverTestReadDecorator | SimpleRetriever:
2633
+ record_selector = RecordSelector(
2634
+ extractor=download_extractor,
2635
+ name=name,
2636
+ record_filter=None,
2637
+ transformations=transformations,
2638
+ schema_normalization=TypeTransformer(TransformConfig.NoTransform),
2639
+ config=config,
2640
+ parameters={},
2641
+ )
2642
+ paginator = (
2643
+ self._create_component_from_model(
2644
+ model=model.download_paginator, decoder=decoder, config=config, url_base=""
2645
+ )
2646
+ if model.download_paginator
2647
+ else NoPagination(parameters={})
2648
+ )
2649
+ maximum_number_of_slices = self._limit_slices_fetched or 5
2650
+
2651
+ if self._limit_slices_fetched or self._emit_connector_builder_messages:
2652
+ return SimpleRetrieverTestReadDecorator(
2653
+ requester=download_requester,
2654
+ record_selector=record_selector,
2655
+ primary_key=None,
2656
+ name=job_download_components_name,
2657
+ paginator=paginator,
2658
+ config=config,
2659
+ parameters={},
2660
+ maximum_number_of_slices=maximum_number_of_slices,
2661
+ )
2662
+
2663
+ return SimpleRetriever(
2664
+ requester=download_requester,
2665
+ record_selector=record_selector,
2666
+ primary_key=None,
2667
+ name=job_download_components_name,
2668
+ paginator=paginator,
2669
+ config=config,
2670
+ parameters={},
2671
+ )
2672
+
2630
2673
  decoder = (
2631
2674
  self._create_component_from_model(model=model.decoder, config=config)
2632
2675
  if model.decoder
@@ -2680,29 +2723,7 @@ class ModelToComponentFactory:
2680
2723
  config=config,
2681
2724
  name=job_download_components_name,
2682
2725
  )
2683
- download_retriever = SimpleRetriever(
2684
- requester=download_requester,
2685
- record_selector=RecordSelector(
2686
- extractor=download_extractor,
2687
- name=name,
2688
- record_filter=None,
2689
- transformations=transformations,
2690
- schema_normalization=TypeTransformer(TransformConfig.NoTransform),
2691
- config=config,
2692
- parameters={},
2693
- ),
2694
- primary_key=None,
2695
- name=job_download_components_name,
2696
- paginator=(
2697
- self._create_component_from_model(
2698
- model=model.download_paginator, decoder=decoder, config=config, url_base=""
2699
- )
2700
- if model.download_paginator
2701
- else NoPagination(parameters={})
2702
- ),
2703
- config=config,
2704
- parameters={},
2705
- )
2726
+ download_retriever = _get_download_retriever()
2706
2727
  abort_requester = (
2707
2728
  self._create_component_from_model(
2708
2729
  model=model.abort_requester,
@@ -3024,8 +3045,9 @@ class ModelToComponentFactory:
3024
3045
  )
3025
3046
 
3026
3047
  def create_rate(self, model: RateModel, config: Config, **kwargs: Any) -> Rate:
3048
+ interpolated_limit = InterpolatedString.create(str(model.limit), parameters={})
3027
3049
  return Rate(
3028
- limit=model.limit,
3050
+ limit=int(interpolated_limit.eval(config=config)),
3029
3051
  interval=parse_duration(model.interval),
3030
3052
  )
3031
3053
 
@@ -123,7 +123,6 @@ class HttpRequester(Requester):
123
123
  next_page_token: Optional[Mapping[str, Any]],
124
124
  ) -> str:
125
125
  kwargs = {
126
- "stream_state": stream_state,
127
126
  "stream_slice": stream_slice,
128
127
  "next_page_token": next_page_token,
129
128
  }
@@ -10,7 +10,7 @@ from airbyte_cdk.sources.declarative.interpolation.interpolated_nested_mapping i
10
10
  NestedMapping,
11
11
  )
12
12
  from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString
13
- from airbyte_cdk.sources.types import Config, StreamSlice, StreamState
13
+ from airbyte_cdk.sources.types import Config, StreamSlice
14
14
 
15
15
 
16
16
  @dataclass
@@ -42,20 +42,17 @@ class InterpolatedNestedRequestInputProvider:
42
42
 
43
43
  def eval_request_inputs(
44
44
  self,
45
- stream_state: Optional[StreamState] = None,
46
45
  stream_slice: Optional[StreamSlice] = None,
47
46
  next_page_token: Optional[Mapping[str, Any]] = None,
48
47
  ) -> Mapping[str, Any]:
49
48
  """
50
49
  Returns the request inputs to set on an outgoing HTTP request
51
50
 
52
- :param stream_state: The stream state
53
51
  :param stream_slice: The stream slice
54
52
  :param next_page_token: The pagination token
55
53
  :return: The request inputs to set on an outgoing HTTP request
56
54
  """
57
55
  kwargs = {
58
- "stream_state": stream_state,
59
56
  "stream_slice": stream_slice,
60
57
  "next_page_token": next_page_token,
61
58
  }
@@ -37,7 +37,6 @@ class InterpolatedRequestInputProvider:
37
37
 
38
38
  def eval_request_inputs(
39
39
  self,
40
- stream_state: Optional[StreamState] = None,
41
40
  stream_slice: Optional[StreamSlice] = None,
42
41
  next_page_token: Optional[Mapping[str, Any]] = None,
43
42
  valid_key_types: Optional[Tuple[Type[Any]]] = None,
@@ -46,7 +45,6 @@ class InterpolatedRequestInputProvider:
46
45
  """
47
46
  Returns the request inputs to set on an outgoing HTTP request
48
47
 
49
- :param stream_state: The stream state
50
48
  :param stream_slice: The stream slice
51
49
  :param next_page_token: The pagination token
52
50
  :param valid_key_types: A tuple of types that the interpolator should allow
@@ -54,7 +52,6 @@ class InterpolatedRequestInputProvider:
54
52
  :return: The request inputs to set on an outgoing HTTP request
55
53
  """
56
54
  kwargs = {
57
- "stream_state": stream_state,
58
55
  "stream_slice": stream_slice,
59
56
  "next_page_token": next_page_token,
60
57
  }
@@ -5,8 +5,6 @@
5
5
  from dataclasses import InitVar, dataclass, field
6
6
  from typing import Any, Mapping, MutableMapping, Optional, Union
7
7
 
8
- from typing_extensions import deprecated
9
-
10
8
  from airbyte_cdk.sources.declarative.interpolation.interpolated_nested_mapping import NestedMapping
11
9
  from airbyte_cdk.sources.declarative.requesters.request_options.interpolated_nested_request_input_provider import (
12
10
  InterpolatedNestedRequestInputProvider,
@@ -17,7 +15,6 @@ from airbyte_cdk.sources.declarative.requesters.request_options.interpolated_req
17
15
  from airbyte_cdk.sources.declarative.requesters.request_options.request_options_provider import (
18
16
  RequestOptionsProvider,
19
17
  )
20
- from airbyte_cdk.sources.source import ExperimentalClassWarning
21
18
  from airbyte_cdk.sources.types import Config, StreamSlice, StreamState
22
19
 
23
20
  RequestInput = Union[str, Mapping[str, str]]
@@ -80,7 +77,6 @@ class InterpolatedRequestOptionsProvider(RequestOptionsProvider):
80
77
  next_page_token: Optional[Mapping[str, Any]] = None,
81
78
  ) -> MutableMapping[str, Any]:
82
79
  interpolated_value = self._parameter_interpolator.eval_request_inputs(
83
- stream_state,
84
80
  stream_slice,
85
81
  next_page_token,
86
82
  valid_key_types=(str,),
@@ -97,9 +93,7 @@ class InterpolatedRequestOptionsProvider(RequestOptionsProvider):
97
93
  stream_slice: Optional[StreamSlice] = None,
98
94
  next_page_token: Optional[Mapping[str, Any]] = None,
99
95
  ) -> Mapping[str, Any]:
100
- return self._headers_interpolator.eval_request_inputs(
101
- stream_state, stream_slice, next_page_token
102
- )
96
+ return self._headers_interpolator.eval_request_inputs(stream_slice, next_page_token)
103
97
 
104
98
  def get_request_body_data(
105
99
  self,
@@ -109,7 +103,6 @@ class InterpolatedRequestOptionsProvider(RequestOptionsProvider):
109
103
  next_page_token: Optional[Mapping[str, Any]] = None,
110
104
  ) -> Union[Mapping[str, Any], str]:
111
105
  return self._body_data_interpolator.eval_request_inputs(
112
- stream_state,
113
106
  stream_slice,
114
107
  next_page_token,
115
108
  valid_key_types=(str,),
@@ -123,42 +116,4 @@ class InterpolatedRequestOptionsProvider(RequestOptionsProvider):
123
116
  stream_slice: Optional[StreamSlice] = None,
124
117
  next_page_token: Optional[Mapping[str, Any]] = None,
125
118
  ) -> Mapping[str, Any]:
126
- return self._body_json_interpolator.eval_request_inputs(
127
- stream_state, stream_slice, next_page_token
128
- )
129
-
130
- @deprecated(
131
- "This class is temporary and used to incrementally deliver low-code to concurrent",
132
- category=ExperimentalClassWarning,
133
- )
134
- def request_options_contain_stream_state(self) -> bool:
135
- """
136
- Temporary helper method used as we move low-code streams to the concurrent framework. This method determines if
137
- the InterpolatedRequestOptionsProvider has is a dependency on a non-thread safe interpolation context such as
138
- stream_state.
139
- """
140
-
141
- return (
142
- self._check_if_interpolation_uses_stream_state(self.request_parameters)
143
- or self._check_if_interpolation_uses_stream_state(self.request_headers)
144
- or self._check_if_interpolation_uses_stream_state(self.request_body_data)
145
- or self._check_if_interpolation_uses_stream_state(self.request_body_json)
146
- )
147
-
148
- @staticmethod
149
- def _check_if_interpolation_uses_stream_state(
150
- request_input: Optional[Union[RequestInput, NestedMapping]],
151
- ) -> bool:
152
- if not request_input:
153
- return False
154
- elif isinstance(request_input, str):
155
- return "stream_state" in request_input
156
- else:
157
- for key, val in request_input.items():
158
- # Covers the case of RequestInput in the form of a string or Mapping[str, str]. It also covers the case
159
- # of a NestedMapping where the value is a string.
160
- # Note: Doesn't account for nested mappings for request_body_json, but I don't see stream_state used in that way
161
- # in our code
162
- if "stream_state" in key or (isinstance(val, str) and "stream_state" in val):
163
- return True
164
- return False
119
+ return self._body_json_interpolator.eval_request_inputs(stream_slice, next_page_token)
@@ -133,7 +133,6 @@ class SimpleRetriever(Retriever):
133
133
 
134
134
  mappings = [
135
135
  paginator_method(
136
- stream_state=stream_state,
137
136
  stream_slice=stream_slice,
138
137
  next_page_token=next_page_token,
139
138
  ),
@@ -141,7 +140,6 @@ class SimpleRetriever(Retriever):
141
140
  if not next_page_token or not self.ignore_stream_slicer_parameters_on_paginated_requests:
142
141
  mappings.append(
143
142
  stream_slicer_method(
144
- stream_state=stream_state,
145
143
  stream_slice=stream_slice,
146
144
  next_page_token=next_page_token,
147
145
  )
@@ -64,9 +64,9 @@ class AddFields(RecordTransformation):
64
64
  - path: ["shop_id"]
65
65
  value: "{{ config.shop_id }}"
66
66
 
67
- # from state
68
- - path: ["current_state"]
69
- value: "{{ stream_state.cursor_field }}" # Or {{ stream_state['cursor_field'] }}
67
+ # from stream_interval
68
+ - path: ["date"]
69
+ value: "{{ stream_interval.start_date }}"
70
70
 
71
71
  # from record
72
72
  - path: ["unnested_value"]
@@ -128,7 +128,7 @@ class AddFields(RecordTransformation):
128
128
  ) -> None:
129
129
  if config is None:
130
130
  config = {}
131
- kwargs = {"record": record, "stream_state": stream_state, "stream_slice": stream_slice}
131
+ kwargs = {"record": record, "stream_slice": stream_slice}
132
132
  for parsed_field in self._parsed_fields:
133
133
  valid_types = (parsed_field.value_type,) if parsed_field.value_type else None
134
134
  value = parsed_field.value.eval(config, valid_types=valid_types, **kwargs)
@@ -15,11 +15,14 @@ def format_http_message(
15
15
  description: str,
16
16
  stream_name: Optional[str],
17
17
  is_auxiliary: bool | None = None,
18
+ type: Optional[str] = None,
18
19
  ) -> LogMessage:
20
+ request_type: str = type if type else "HTTP"
19
21
  request = response.request
20
22
  log_message = {
21
23
  "http": {
22
24
  "title": title,
25
+ "type": request_type,
23
26
  "description": description,
24
27
  "request": {
25
28
  "method": request.method,
@@ -396,6 +396,7 @@ class AbstractOauth2Authenticator(AuthBase):
396
396
  "Obtains access token",
397
397
  self._NO_STREAM_NAME,
398
398
  is_auxiliary=True,
399
+ type="AUTH",
399
400
  ),
400
401
  )
401
402
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: airbyte-cdk
3
- Version: 6.36.0.dev0
3
+ Version: 6.37.0.dev0
4
4
  Summary: A framework for writing Airbyte Connectors.
5
5
  Home-page: https://airbyte.com
6
6
  License: MIT