airbyte-cdk 6.20.1__py3-none-any.whl → 6.20.2.dev0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. airbyte_cdk/sources/declarative/auth/oauth.py +0 -34
  2. airbyte_cdk/sources/declarative/checks/__init__.py +2 -18
  3. airbyte_cdk/sources/declarative/concurrent_declarative_source.py +80 -16
  4. airbyte_cdk/sources/declarative/declarative_component_schema.yaml +21 -97
  5. airbyte_cdk/sources/declarative/decoders/composite_raw_decoder.py +0 -43
  6. airbyte_cdk/sources/declarative/extractors/record_filter.py +3 -5
  7. airbyte_cdk/sources/declarative/incremental/__init__.py +6 -0
  8. airbyte_cdk/sources/declarative/incremental/concurrent_partition_cursor.py +331 -0
  9. airbyte_cdk/sources/declarative/incremental/global_substream_cursor.py +3 -0
  10. airbyte_cdk/sources/declarative/incremental/per_partition_cursor.py +15 -0
  11. airbyte_cdk/sources/declarative/manifest_declarative_source.py +1 -2
  12. airbyte_cdk/sources/declarative/models/declarative_component_schema.py +26 -97
  13. airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py +106 -116
  14. airbyte_cdk/sources/declarative/requesters/http_job_repository.py +4 -33
  15. airbyte_cdk/sources/declarative/retrievers/simple_retriever.py +1 -1
  16. airbyte_cdk/sources/declarative/schema/dynamic_schema_loader.py +3 -13
  17. airbyte_cdk/sources/file_based/config/abstract_file_based_spec.py +0 -11
  18. airbyte_cdk/sources/file_based/exceptions.py +0 -34
  19. airbyte_cdk/sources/file_based/file_based_source.py +5 -28
  20. airbyte_cdk/sources/file_based/file_based_stream_reader.py +4 -18
  21. airbyte_cdk/sources/file_based/file_types/unstructured_parser.py +2 -25
  22. airbyte_cdk/sources/file_based/stream/default_file_based_stream.py +2 -30
  23. airbyte_cdk/sources/streams/concurrent/cursor.py +30 -21
  24. airbyte_cdk/sources/streams/http/requests_native_auth/abstract_oauth.py +4 -33
  25. airbyte_cdk/sources/streams/http/requests_native_auth/oauth.py +4 -42
  26. airbyte_cdk/sources/types.py +0 -3
  27. {airbyte_cdk-6.20.1.dist-info → airbyte_cdk-6.20.2.dev0.dist-info}/METADATA +1 -1
  28. {airbyte_cdk-6.20.1.dist-info → airbyte_cdk-6.20.2.dev0.dist-info}/RECORD +31 -32
  29. airbyte_cdk/sources/declarative/checks/check_dynamic_stream.py +0 -51
  30. airbyte_cdk/sources/declarative/requesters/README.md +0 -56
  31. {airbyte_cdk-6.20.1.dist-info → airbyte_cdk-6.20.2.dev0.dist-info}/LICENSE.txt +0 -0
  32. {airbyte_cdk-6.20.1.dist-info → airbyte_cdk-6.20.2.dev0.dist-info}/WHEEL +0 -0
  33. {airbyte_cdk-6.20.1.dist-info → airbyte_cdk-6.20.2.dev0.dist-info}/entry_points.txt +0 -0
@@ -135,17 +135,6 @@ class AbstractFileBasedStreamReader(ABC):
135
135
  return use_file_transfer
136
136
  return False
137
137
 
138
- def preserve_directory_structure(self) -> bool:
139
- # fall back to preserve subdirectories if config is not present or incomplete
140
- if (
141
- self.use_file_transfer()
142
- and self.config
143
- and hasattr(self.config.delivery_method, "preserve_directory_structure")
144
- and self.config.delivery_method.preserve_directory_structure is not None
145
- ):
146
- return self.config.delivery_method.preserve_directory_structure
147
- return True
148
-
149
138
  @abstractmethod
150
139
  def get_file(
151
140
  self, file: RemoteFile, local_directory: str, logger: logging.Logger
@@ -170,13 +159,10 @@ class AbstractFileBasedStreamReader(ABC):
170
159
  """
171
160
  ...
172
161
 
173
- def _get_file_transfer_paths(self, file: RemoteFile, local_directory: str) -> List[str]:
174
- preserve_directory_structure = self.preserve_directory_structure()
175
- if preserve_directory_structure:
176
- # Remove left slashes from source path format to make relative path for writing locally
177
- file_relative_path = file.uri.lstrip("/")
178
- else:
179
- file_relative_path = path.basename(file.uri)
162
+ @staticmethod
163
+ def _get_file_transfer_paths(file: RemoteFile, local_directory: str) -> List[str]:
164
+ # Remove left slashes from source path format to make relative path for writing locally
165
+ file_relative_path = file.uri.lstrip("/")
180
166
  local_file_path = path.join(local_directory, file_relative_path)
181
167
 
182
168
  # Ensure the local directory exists
@@ -2,7 +2,6 @@
2
2
  # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
3
3
  #
4
4
  import logging
5
- import os
6
5
  import traceback
7
6
  from datetime import datetime
8
7
  from io import BytesIO, IOBase
@@ -43,34 +42,12 @@ unstructured_partition_pdf = None
43
42
  unstructured_partition_docx = None
44
43
  unstructured_partition_pptx = None
45
44
 
46
- AIRBYTE_NLTK_DATA_DIR = "/airbyte/nltk_data"
47
- TMP_NLTK_DATA_DIR = "/tmp/nltk_data"
48
-
49
-
50
- def get_nltk_temp_folder() -> str:
51
- """
52
- For non-root connectors /tmp is not currently writable, but we should allow it in the future.
53
- It's safe to use /airbyte for now. Fallback to /tmp for local development.
54
- """
55
- try:
56
- nltk_data_dir = AIRBYTE_NLTK_DATA_DIR
57
- os.makedirs(nltk_data_dir, exist_ok=True)
58
- except OSError:
59
- nltk_data_dir = TMP_NLTK_DATA_DIR
60
- os.makedirs(nltk_data_dir, exist_ok=True)
61
- return nltk_data_dir
62
-
63
-
64
45
  try:
65
- nltk_data_dir = get_nltk_temp_folder()
66
- nltk.data.path.append(nltk_data_dir)
67
46
  nltk.data.find("tokenizers/punkt.zip")
68
47
  nltk.data.find("tokenizers/punkt_tab.zip")
69
- nltk.data.find("tokenizers/averaged_perceptron_tagger_eng.zip")
70
48
  except LookupError:
71
- nltk.download("punkt", download_dir=nltk_data_dir, quiet=True)
72
- nltk.download("punkt_tab", download_dir=nltk_data_dir, quiet=True)
73
- nltk.download("averaged_perceptron_tagger_eng", download_dir=nltk_data_dir, quiet=True)
49
+ nltk.download("punkt")
50
+ nltk.download("punkt_tab")
74
51
 
75
52
 
76
53
  def optional_decode(contents: Union[str, bytes]) -> str:
@@ -5,17 +5,14 @@
5
5
  import asyncio
6
6
  import itertools
7
7
  import traceback
8
- from collections import defaultdict
9
8
  from copy import deepcopy
10
9
  from functools import cache
11
- from os import path
12
- from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Set, Tuple, Union
10
+ from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Set, Union
13
11
 
14
12
  from airbyte_cdk.models import AirbyteLogMessage, AirbyteMessage, FailureType, Level
15
13
  from airbyte_cdk.models import Type as MessageType
16
14
  from airbyte_cdk.sources.file_based.config.file_based_stream_config import PrimaryKeyType
17
15
  from airbyte_cdk.sources.file_based.exceptions import (
18
- DuplicatedFilesError,
19
16
  FileBasedSourceError,
20
17
  InvalidSchemaError,
21
18
  MissingSchemaError,
@@ -46,8 +43,6 @@ class DefaultFileBasedStream(AbstractFileBasedStream, IncrementalMixin):
46
43
  """
47
44
 
48
45
  FILE_TRANSFER_KW = "use_file_transfer"
49
- PRESERVE_DIRECTORY_STRUCTURE_KW = "preserve_directory_structure"
50
- FILES_KEY = "files"
51
46
  DATE_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ"
52
47
  ab_last_mod_col = "_ab_source_file_last_modified"
53
48
  ab_file_name_col = "_ab_source_file_url"
@@ -55,15 +50,10 @@ class DefaultFileBasedStream(AbstractFileBasedStream, IncrementalMixin):
55
50
  source_file_url = "source_file_url"
56
51
  airbyte_columns = [ab_last_mod_col, ab_file_name_col]
57
52
  use_file_transfer = False
58
- preserve_directory_structure = True
59
53
 
60
54
  def __init__(self, **kwargs: Any):
61
55
  if self.FILE_TRANSFER_KW in kwargs:
62
56
  self.use_file_transfer = kwargs.pop(self.FILE_TRANSFER_KW, False)
63
- if self.PRESERVE_DIRECTORY_STRUCTURE_KW in kwargs:
64
- self.preserve_directory_structure = kwargs.pop(
65
- self.PRESERVE_DIRECTORY_STRUCTURE_KW, True
66
- )
67
57
  super().__init__(**kwargs)
68
58
 
69
59
  @property
@@ -108,33 +98,15 @@ class DefaultFileBasedStream(AbstractFileBasedStream, IncrementalMixin):
108
98
  else:
109
99
  return super()._filter_schema_invalid_properties(configured_catalog_json_schema)
110
100
 
111
- def _duplicated_files_names(
112
- self, slices: List[dict[str, List[RemoteFile]]]
113
- ) -> List[dict[str, List[str]]]:
114
- seen_file_names: Dict[str, List[str]] = defaultdict(list)
115
- for file_slice in slices:
116
- for file_found in file_slice[self.FILES_KEY]:
117
- file_name = path.basename(file_found.uri)
118
- seen_file_names[file_name].append(file_found.uri)
119
- return [
120
- {file_name: paths} for file_name, paths in seen_file_names.items() if len(paths) > 1
121
- ]
122
-
123
101
  def compute_slices(self) -> Iterable[Optional[Mapping[str, Any]]]:
124
102
  # Sort files by last_modified, uri and return them grouped by last_modified
125
103
  all_files = self.list_files()
126
104
  files_to_read = self._cursor.get_files_to_sync(all_files, self.logger)
127
105
  sorted_files_to_read = sorted(files_to_read, key=lambda f: (f.last_modified, f.uri))
128
106
  slices = [
129
- {self.FILES_KEY: list(group[1])}
107
+ {"files": list(group[1])}
130
108
  for group in itertools.groupby(sorted_files_to_read, lambda f: f.last_modified)
131
109
  ]
132
- if slices and not self.preserve_directory_structure:
133
- duplicated_files_names = self._duplicated_files_names(slices)
134
- if duplicated_files_names:
135
- raise DuplicatedFilesError(
136
- stream=self.name, duplicated_files_names=duplicated_files_names
137
- )
138
110
  return slices
139
111
 
140
112
  def transform_record(
@@ -196,7 +196,9 @@ class ConcurrentCursor(Cursor):
196
196
 
197
197
  @property
198
198
  def state(self) -> MutableMapping[str, Any]:
199
- return self._concurrent_state
199
+ return self._connector_state_converter.convert_to_state_message(
200
+ self.cursor_field, self._concurrent_state
201
+ )
200
202
 
201
203
  @property
202
204
  def cursor_field(self) -> CursorField:
@@ -241,10 +243,10 @@ class ConcurrentCursor(Cursor):
241
243
  return self._connector_state_converter.parse_value(self._cursor_field.extract_value(record))
242
244
 
243
245
  def close_partition(self, partition: Partition) -> None:
244
- slice_count_before = len(self.state.get("slices", []))
246
+ slice_count_before = len(self._concurrent_state.get("slices", []))
245
247
  self._add_slice_to_state(partition)
246
248
  if slice_count_before < len(
247
- self.state["slices"]
249
+ self._concurrent_state["slices"]
248
250
  ): # only emit if at least one slice has been processed
249
251
  self._merge_partitions()
250
252
  self._emit_state_message()
@@ -256,11 +258,11 @@ class ConcurrentCursor(Cursor):
256
258
  )
257
259
 
258
260
  if self._slice_boundary_fields:
259
- if "slices" not in self.state:
261
+ if "slices" not in self._concurrent_state:
260
262
  raise RuntimeError(
261
263
  f"The state for stream {self._stream_name} should have at least one slice to delineate the sync start time, but no slices are present. This is unexpected. Please contact Support."
262
264
  )
263
- self.state["slices"].append(
265
+ self._concurrent_state["slices"].append(
264
266
  {
265
267
  self._connector_state_converter.START_KEY: self._extract_from_slice(
266
268
  partition, self._slice_boundary_fields[self._START_BOUNDARY]
@@ -288,7 +290,7 @@ class ConcurrentCursor(Cursor):
288
290
  "expected. Please contact the Airbyte team."
289
291
  )
290
292
 
291
- self.state["slices"].append(
293
+ self._concurrent_state["slices"].append(
292
294
  {
293
295
  self._connector_state_converter.START_KEY: self.start,
294
296
  self._connector_state_converter.END_KEY: most_recent_cursor_value,
@@ -300,9 +302,7 @@ class ConcurrentCursor(Cursor):
300
302
  self._connector_state_manager.update_state_for_stream(
301
303
  self._stream_name,
302
304
  self._stream_namespace,
303
- self._connector_state_converter.convert_to_state_message(
304
- self._cursor_field, self.state
305
- ),
305
+ self.state,
306
306
  )
307
307
  state_message = self._connector_state_manager.create_state_message(
308
308
  self._stream_name, self._stream_namespace
@@ -310,7 +310,9 @@ class ConcurrentCursor(Cursor):
310
310
  self._message_repository.emit_message(state_message)
311
311
 
312
312
  def _merge_partitions(self) -> None:
313
- self.state["slices"] = self._connector_state_converter.merge_intervals(self.state["slices"])
313
+ self._concurrent_state["slices"] = self._connector_state_converter.merge_intervals(
314
+ self._concurrent_state["slices"]
315
+ )
314
316
 
315
317
  def _extract_from_slice(self, partition: Partition, key: str) -> CursorValueType:
316
318
  try:
@@ -347,36 +349,42 @@ class ConcurrentCursor(Cursor):
347
349
  if self._start is not None and self._is_start_before_first_slice():
348
350
  yield from self._split_per_slice_range(
349
351
  self._start,
350
- self.state["slices"][0][self._connector_state_converter.START_KEY],
352
+ self._concurrent_state["slices"][0][self._connector_state_converter.START_KEY],
351
353
  False,
352
354
  )
353
355
 
354
- if len(self.state["slices"]) == 1:
356
+ if len(self._concurrent_state["slices"]) == 1:
355
357
  yield from self._split_per_slice_range(
356
358
  self._calculate_lower_boundary_of_last_slice(
357
- self.state["slices"][0][self._connector_state_converter.END_KEY]
359
+ self._concurrent_state["slices"][0][self._connector_state_converter.END_KEY]
358
360
  ),
359
361
  self._end_provider(),
360
362
  True,
361
363
  )
362
- elif len(self.state["slices"]) > 1:
363
- for i in range(len(self.state["slices"]) - 1):
364
+ elif len(self._concurrent_state["slices"]) > 1:
365
+ for i in range(len(self._concurrent_state["slices"]) - 1):
364
366
  if self._cursor_granularity:
365
367
  yield from self._split_per_slice_range(
366
- self.state["slices"][i][self._connector_state_converter.END_KEY]
368
+ self._concurrent_state["slices"][i][self._connector_state_converter.END_KEY]
367
369
  + self._cursor_granularity,
368
- self.state["slices"][i + 1][self._connector_state_converter.START_KEY],
370
+ self._concurrent_state["slices"][i + 1][
371
+ self._connector_state_converter.START_KEY
372
+ ],
369
373
  False,
370
374
  )
371
375
  else:
372
376
  yield from self._split_per_slice_range(
373
- self.state["slices"][i][self._connector_state_converter.END_KEY],
374
- self.state["slices"][i + 1][self._connector_state_converter.START_KEY],
377
+ self._concurrent_state["slices"][i][
378
+ self._connector_state_converter.END_KEY
379
+ ],
380
+ self._concurrent_state["slices"][i + 1][
381
+ self._connector_state_converter.START_KEY
382
+ ],
375
383
  False,
376
384
  )
377
385
  yield from self._split_per_slice_range(
378
386
  self._calculate_lower_boundary_of_last_slice(
379
- self.state["slices"][-1][self._connector_state_converter.END_KEY]
387
+ self._concurrent_state["slices"][-1][self._connector_state_converter.END_KEY]
380
388
  ),
381
389
  self._end_provider(),
382
390
  True,
@@ -387,7 +395,8 @@ class ConcurrentCursor(Cursor):
387
395
  def _is_start_before_first_slice(self) -> bool:
388
396
  return (
389
397
  self._start is not None
390
- and self._start < self.state["slices"][0][self._connector_state_converter.START_KEY]
398
+ and self._start
399
+ < self._concurrent_state["slices"][0][self._connector_state_converter.START_KEY]
391
400
  )
392
401
 
393
402
  def _calculate_lower_boundary_of_last_slice(
@@ -81,10 +81,10 @@ class AbstractOauth2Authenticator(AuthBase):
81
81
  Override to define additional parameters
82
82
  """
83
83
  payload: MutableMapping[str, Any] = {
84
- self.get_grant_type_name(): self.get_grant_type(),
85
- self.get_client_id_name(): self.get_client_id(),
86
- self.get_client_secret_name(): self.get_client_secret(),
87
- self.get_refresh_token_name(): self.get_refresh_token(),
84
+ "grant_type": self.get_grant_type(),
85
+ "client_id": self.get_client_id(),
86
+ "client_secret": self.get_client_secret(),
87
+ "refresh_token": self.get_refresh_token(),
88
88
  }
89
89
 
90
90
  if self.get_scopes():
@@ -98,14 +98,6 @@ class AbstractOauth2Authenticator(AuthBase):
98
98
 
99
99
  return payload
100
100
 
101
- def build_refresh_request_headers(self) -> Mapping[str, Any] | None:
102
- """
103
- Returns the request headers to set on the refresh request
104
-
105
- """
106
- headers = self.get_refresh_request_headers()
107
- return headers if headers else None
108
-
109
101
  def _wrap_refresh_token_exception(
110
102
  self, exception: requests.exceptions.RequestException
111
103
  ) -> bool:
@@ -136,7 +128,6 @@ class AbstractOauth2Authenticator(AuthBase):
136
128
  method="POST",
137
129
  url=self.get_token_refresh_endpoint(), # type: ignore # returns None, if not provided, but str | bytes is expected.
138
130
  data=self.build_refresh_request_body(),
139
- headers=self.build_refresh_request_headers(),
140
131
  )
141
132
  if response.ok:
142
133
  response_json = response.json()
@@ -215,26 +206,14 @@ class AbstractOauth2Authenticator(AuthBase):
215
206
  def get_token_refresh_endpoint(self) -> Optional[str]:
216
207
  """Returns the endpoint to refresh the access token"""
217
208
 
218
- @abstractmethod
219
- def get_client_id_name(self) -> str:
220
- """The client id name to authenticate"""
221
-
222
209
  @abstractmethod
223
210
  def get_client_id(self) -> str:
224
211
  """The client id to authenticate"""
225
212
 
226
- @abstractmethod
227
- def get_client_secret_name(self) -> str:
228
- """The client secret name to authenticate"""
229
-
230
213
  @abstractmethod
231
214
  def get_client_secret(self) -> str:
232
215
  """The client secret to authenticate"""
233
216
 
234
- @abstractmethod
235
- def get_refresh_token_name(self) -> str:
236
- """The refresh token name to authenticate"""
237
-
238
217
  @abstractmethod
239
218
  def get_refresh_token(self) -> Optional[str]:
240
219
  """The token used to refresh the access token when it expires"""
@@ -263,18 +242,10 @@ class AbstractOauth2Authenticator(AuthBase):
263
242
  def get_refresh_request_body(self) -> Mapping[str, Any]:
264
243
  """Returns the request body to set on the refresh request"""
265
244
 
266
- @abstractmethod
267
- def get_refresh_request_headers(self) -> Mapping[str, Any]:
268
- """Returns the request headers to set on the refresh request"""
269
-
270
245
  @abstractmethod
271
246
  def get_grant_type(self) -> str:
272
247
  """Returns grant_type specified for requesting access_token"""
273
248
 
274
- @abstractmethod
275
- def get_grant_type_name(self) -> str:
276
- """Returns grant_type specified name for requesting access_token"""
277
-
278
249
  @property
279
250
  @abstractmethod
280
251
  def access_token(self) -> str:
@@ -30,17 +30,12 @@ class Oauth2Authenticator(AbstractOauth2Authenticator):
30
30
  client_id: str,
31
31
  client_secret: str,
32
32
  refresh_token: str,
33
- client_id_name: str = "client_id",
34
- client_secret_name: str = "client_secret",
35
- refresh_token_name: str = "refresh_token",
36
33
  scopes: List[str] | None = None,
37
34
  token_expiry_date: pendulum.DateTime | None = None,
38
35
  token_expiry_date_format: str | None = None,
39
36
  access_token_name: str = "access_token",
40
37
  expires_in_name: str = "expires_in",
41
38
  refresh_request_body: Mapping[str, Any] | None = None,
42
- refresh_request_headers: Mapping[str, Any] | None = None,
43
- grant_type_name: str = "grant_type",
44
39
  grant_type: str = "refresh_token",
45
40
  token_expiry_is_time_of_expiration: bool = False,
46
41
  refresh_token_error_status_codes: Tuple[int, ...] = (),
@@ -48,18 +43,13 @@ class Oauth2Authenticator(AbstractOauth2Authenticator):
48
43
  refresh_token_error_values: Tuple[str, ...] = (),
49
44
  ):
50
45
  self._token_refresh_endpoint = token_refresh_endpoint
51
- self._client_secret_name = client_secret_name
52
46
  self._client_secret = client_secret
53
- self._client_id_name = client_id_name
54
47
  self._client_id = client_id
55
- self._refresh_token_name = refresh_token_name
56
48
  self._refresh_token = refresh_token
57
49
  self._scopes = scopes
58
50
  self._access_token_name = access_token_name
59
51
  self._expires_in_name = expires_in_name
60
52
  self._refresh_request_body = refresh_request_body
61
- self._refresh_request_headers = refresh_request_headers
62
- self._grant_type_name = grant_type_name
63
53
  self._grant_type = grant_type
64
54
 
65
55
  self._token_expiry_date = token_expiry_date or pendulum.now().subtract(days=1) # type: ignore [no-untyped-call]
@@ -73,21 +63,12 @@ class Oauth2Authenticator(AbstractOauth2Authenticator):
73
63
  def get_token_refresh_endpoint(self) -> str:
74
64
  return self._token_refresh_endpoint
75
65
 
76
- def get_client_id_name(self) -> str:
77
- return self._client_id_name
78
-
79
66
  def get_client_id(self) -> str:
80
67
  return self._client_id
81
68
 
82
- def get_client_secret_name(self) -> str:
83
- return self._client_secret_name
84
-
85
69
  def get_client_secret(self) -> str:
86
70
  return self._client_secret
87
71
 
88
- def get_refresh_token_name(self) -> str:
89
- return self._refresh_token_name
90
-
91
72
  def get_refresh_token(self) -> str:
92
73
  return self._refresh_token
93
74
 
@@ -103,12 +84,6 @@ class Oauth2Authenticator(AbstractOauth2Authenticator):
103
84
  def get_refresh_request_body(self) -> Mapping[str, Any]:
104
85
  return self._refresh_request_body # type: ignore [return-value]
105
86
 
106
- def get_refresh_request_headers(self) -> Mapping[str, Any]:
107
- return self._refresh_request_headers # type: ignore [return-value]
108
-
109
- def get_grant_type_name(self) -> str:
110
- return self._grant_type_name
111
-
112
87
  def get_grant_type(self) -> str:
113
88
  return self._grant_type
114
89
 
@@ -154,12 +129,8 @@ class SingleUseRefreshTokenOauth2Authenticator(Oauth2Authenticator):
154
129
  expires_in_name: str = "expires_in",
155
130
  refresh_token_name: str = "refresh_token",
156
131
  refresh_request_body: Mapping[str, Any] | None = None,
157
- refresh_request_headers: Mapping[str, Any] | None = None,
158
- grant_type_name: str = "grant_type",
159
132
  grant_type: str = "refresh_token",
160
- client_id_name: str = "client_id",
161
133
  client_id: Optional[str] = None,
162
- client_secret_name: str = "client_secret",
163
134
  client_secret: Optional[str] = None,
164
135
  access_token_config_path: Sequence[str] = ("credentials", "access_token"),
165
136
  refresh_token_config_path: Sequence[str] = ("credentials", "refresh_token"),
@@ -180,7 +151,6 @@ class SingleUseRefreshTokenOauth2Authenticator(Oauth2Authenticator):
180
151
  expires_in_name (str, optional): Name of the name of the field that characterizes when the current access token will expire, used to parse the refresh token response. Defaults to "expires_in".
181
152
  refresh_token_name (str, optional): Name of the name of the refresh token field, used to parse the refresh token response. Defaults to "refresh_token".
182
153
  refresh_request_body (Mapping[str, Any], optional): Custom key value pair that will be added to the refresh token request body. Defaults to None.
183
- refresh_request_headers (Mapping[str, Any], optional): Custom key value pair that will be added to the refresh token request headers. Defaults to None.
184
154
  grant_type (str, optional): OAuth grant type. Defaults to "refresh_token".
185
155
  client_id (Optional[str]): The client id to authenticate. If not specified, defaults to credentials.client_id in the config object.
186
156
  client_secret (Optional[str]): The client secret to authenticate. If not specified, defaults to credentials.client_secret in the config object.
@@ -204,31 +174,23 @@ class SingleUseRefreshTokenOauth2Authenticator(Oauth2Authenticator):
204
174
  ("credentials", "client_secret"),
205
175
  )
206
176
  )
207
- self._client_id_name = client_id_name
208
- self._client_secret_name = client_secret_name
209
177
  self._access_token_config_path = access_token_config_path
210
178
  self._refresh_token_config_path = refresh_token_config_path
211
179
  self._token_expiry_date_config_path = token_expiry_date_config_path
212
180
  self._token_expiry_date_format = token_expiry_date_format
213
181
  self._refresh_token_name = refresh_token_name
214
- self._grant_type_name = grant_type_name
215
182
  self._connector_config = connector_config
216
183
  self.__message_repository = message_repository
217
184
  super().__init__(
218
- token_refresh_endpoint=token_refresh_endpoint,
219
- client_id_name=self._client_id_name,
220
- client_id=self.get_client_id(),
221
- client_secret_name=self._client_secret_name,
222
- client_secret=self.get_client_secret(),
223
- refresh_token=self.get_refresh_token(),
224
- refresh_token_name=self._refresh_token_name,
185
+ token_refresh_endpoint,
186
+ self.get_client_id(),
187
+ self.get_client_secret(),
188
+ self.get_refresh_token(),
225
189
  scopes=scopes,
226
190
  token_expiry_date=self.get_token_expiry_date(),
227
191
  access_token_name=access_token_name,
228
192
  expires_in_name=expires_in_name,
229
193
  refresh_request_body=refresh_request_body,
230
- refresh_request_headers=refresh_request_headers,
231
- grant_type_name=self._grant_type_name,
232
194
  grant_type=grant_type,
233
195
  token_expiry_date_format=token_expiry_date_format,
234
196
  token_expiry_is_time_of_expiration=token_expiry_is_time_of_expiration,
@@ -152,6 +152,3 @@ class StreamSlice(Mapping[str, Any]):
152
152
 
153
153
  def __hash__(self) -> int:
154
154
  return hash(orjson.dumps(self._stream_slice, option=orjson.OPT_SORT_KEYS))
155
-
156
- def __bool__(self) -> bool:
157
- return bool(self._stream_slice) or bool(self._extra_fields)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: airbyte-cdk
3
- Version: 6.20.1
3
+ Version: 6.20.2.dev0
4
4
  Summary: A framework for writing Airbyte Connectors.
5
5
  License: MIT
6
6
  Keywords: airbyte,connector-development-kit,cdk