airbyte-cdk 6.34.0.dev0__py3-none-any.whl → 6.34.0.dev2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. airbyte_cdk/connector_builder/connector_builder_handler.py +16 -12
  2. airbyte_cdk/connector_builder/test_reader/__init__.py +7 -0
  3. airbyte_cdk/connector_builder/test_reader/helpers.py +591 -0
  4. airbyte_cdk/connector_builder/test_reader/message_grouper.py +160 -0
  5. airbyte_cdk/connector_builder/test_reader/reader.py +441 -0
  6. airbyte_cdk/connector_builder/test_reader/types.py +75 -0
  7. airbyte_cdk/entrypoint.py +6 -6
  8. airbyte_cdk/logger.py +1 -4
  9. airbyte_cdk/sources/declarative/incremental/concurrent_partition_cursor.py +122 -38
  10. airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py +5 -0
  11. airbyte_cdk/sources/declarative/requesters/paginators/default_paginator.py +10 -0
  12. airbyte_cdk/sources/file_based/config/abstract_file_based_spec.py +2 -1
  13. airbyte_cdk/sources/file_based/config/validate_config_transfer_modes.py +81 -0
  14. airbyte_cdk/sources/file_based/file_based_source.py +70 -37
  15. airbyte_cdk/sources/file_based/file_based_stream_reader.py +107 -12
  16. airbyte_cdk/sources/file_based/stream/__init__.py +10 -1
  17. airbyte_cdk/sources/file_based/stream/identities_stream.py +47 -0
  18. airbyte_cdk/sources/file_based/stream/permissions_file_based_stream.py +85 -0
  19. airbyte_cdk/sources/specs/transfer_modes.py +26 -0
  20. airbyte_cdk/sources/streams/permissions/identities_stream.py +75 -0
  21. airbyte_cdk/test/mock_http/mocker.py +9 -1
  22. airbyte_cdk/test/mock_http/response.py +6 -3
  23. airbyte_cdk/utils/mapping_helpers.py +43 -2
  24. airbyte_cdk/utils/print_buffer.py +0 -4
  25. {airbyte_cdk-6.34.0.dev0.dist-info → airbyte_cdk-6.34.0.dev2.dist-info}/METADATA +1 -1
  26. {airbyte_cdk-6.34.0.dev0.dist-info → airbyte_cdk-6.34.0.dev2.dist-info}/RECORD +30 -21
  27. airbyte_cdk/connector_builder/message_grouper.py +0 -448
  28. {airbyte_cdk-6.34.0.dev0.dist-info → airbyte_cdk-6.34.0.dev2.dist-info}/LICENSE.txt +0 -0
  29. {airbyte_cdk-6.34.0.dev0.dist-info → airbyte_cdk-6.34.0.dev2.dist-info}/LICENSE_SHORT +0 -0
  30. {airbyte_cdk-6.34.0.dev0.dist-info → airbyte_cdk-6.34.0.dev2.dist-info}/WHEEL +0 -0
  31. {airbyte_cdk-6.34.0.dev0.dist-info → airbyte_cdk-6.34.0.dev2.dist-info}/entry_points.txt +0 -0
@@ -13,6 +13,11 @@ from typing import Any, Dict, Iterable, List, Optional, Set
13
13
  from wcmatch.glob import GLOBSTAR, globmatch
14
14
 
15
15
  from airbyte_cdk.sources.file_based.config.abstract_file_based_spec import AbstractFileBasedSpec
16
+ from airbyte_cdk.sources.file_based.config.validate_config_transfer_modes import (
17
+ include_identities_stream,
18
+ preserve_directory_structure,
19
+ use_file_transfer,
20
+ )
16
21
  from airbyte_cdk.sources.file_based.remote_file import RemoteFile
17
22
 
18
23
 
@@ -128,24 +133,20 @@ class AbstractFileBasedStreamReader(ABC):
128
133
 
129
134
  def use_file_transfer(self) -> bool:
130
135
  if self.config:
131
- use_file_transfer = (
132
- hasattr(self.config.delivery_method, "delivery_type")
133
- and self.config.delivery_method.delivery_type == "use_file_transfer"
134
- )
135
- return use_file_transfer
136
+ return use_file_transfer(self.config)
136
137
  return False
137
138
 
138
139
  def preserve_directory_structure(self) -> bool:
139
140
  # fall back to preserve subdirectories if config is not present or incomplete
140
- if (
141
- self.use_file_transfer()
142
- and self.config
143
- and hasattr(self.config.delivery_method, "preserve_directory_structure")
144
- and self.config.delivery_method.preserve_directory_structure is not None
145
- ):
146
- return self.config.delivery_method.preserve_directory_structure
141
+ if self.config:
142
+ return preserve_directory_structure(self.config)
147
143
  return True
148
144
 
145
+ def include_identities_stream(self) -> bool:
146
+ if self.config:
147
+ return include_identities_stream(self.config)
148
+ return False
149
+
149
150
  @abstractmethod
150
151
  def get_file(
151
152
  self, file: RemoteFile, local_directory: str, logger: logging.Logger
@@ -183,3 +184,97 @@ class AbstractFileBasedStreamReader(ABC):
183
184
  makedirs(path.dirname(local_file_path), exist_ok=True)
184
185
  absolute_file_path = path.abspath(local_file_path)
185
186
  return [file_relative_path, local_file_path, absolute_file_path]
187
+
188
+ @abstractmethod
189
+ def get_file_acl_permissions(self, file: RemoteFile, logger: logging.Logger) -> Dict[str, Any]:
190
+ """
191
+ This function should return the allow list for a given file, i.e. the list of all identities and their permission levels associated with it
192
+
193
+ e.g.
194
+ def get_file_acl_permissions(self, file: RemoteFile, logger: logging.Logger):
195
+ api_conn = some_api.conn(credentials=SOME_CREDENTIALS)
196
+ result = api_conn.get_file_permissions_info(file.id)
197
+ return MyPermissionsModel(
198
+ id=result["id"],
199
+ access_control_list = result["access_control_list"],
200
+ is_public = result["is_public"],
201
+ ).dict()
202
+ """
203
+ raise NotImplementedError(
204
+ f"{self.__class__.__name__} does not implement get_file_acl_permissions(). To support ACL permissions, implement this method and update file_permissions_schema."
205
+ )
206
+
207
+ @abstractmethod
208
+ def load_identity_groups(self, logger: logging.Logger) -> Iterable[Dict[str, Any]]:
209
+ """
210
+ This function should return the Identities in a determined "space" or "domain" where the file metadata (ACLs) are fetched and ACLs items (Identities) exists.
211
+
212
+ e.g.
213
+ def load_identity_groups(self, logger: logging.Logger) -> Dict[str, Any]:
214
+ api_conn = some_api.conn(credentials=SOME_CREDENTIALS)
215
+ users_api = api_conn.users()
216
+ groups_api = api_conn.groups()
217
+ members_api = self.google_directory_service.members()
218
+ for user in users_api.list():
219
+ yield my_identity_model(id=user.id, name=user.name, email_address=user.email, type="user").dict()
220
+ for group in groups_api.list():
221
+ group_obj = my_identity_model(id=group.id, name=groups.name, email_address=user.email, type="group").dict()
222
+ for member in members_api.list(group=group):
223
+ group_obj.member_email_addresses = group_obj.member_email_addresses or []
224
+ group_obj.member_email_addresses.append(member.email)
225
+ yield group_obj.dict()
226
+ """
227
+ raise NotImplementedError(
228
+ f"{self.__class__.__name__} does not implement load_identity_groups(). To support identities, implement this method and update identities_schema."
229
+ )
230
+
231
+ @property
232
+ @abstractmethod
233
+ def file_permissions_schema(self) -> Dict[str, Any]:
234
+ """
235
+ This function should return the permissions schema for file permissions stream.
236
+
237
+ e.g.
238
+ def file_permissions_schema(self) -> Dict[str, Any]:
239
+ # you can also follow the patter we have for python connectors and have a json file and read from there e.g. schemas/identities.json
240
+ return {
241
+ "type": "object",
242
+ "properties": {
243
+ "id": { "type": "string" },
244
+ "file_path": { "type": "string" },
245
+ "access_control_list": {
246
+ "type": "array",
247
+ "items": { "type": "string" }
248
+ },
249
+ "publicly_accessible": { "type": "boolean" }
250
+ }
251
+ }
252
+ """
253
+ raise NotImplementedError(
254
+ f"{self.__class__.__name__} does not implement file_permissions_schema, please return json schema for your permissions streams."
255
+ )
256
+
257
+ @property
258
+ @abstractmethod
259
+ def identities_schema(self) -> Dict[str, Any]:
260
+ """
261
+ This function should return the identities schema for file identity stream.
262
+
263
+ e.g.
264
+ def identities_schema(self) -> Dict[str, Any]:
265
+ # you can also follow the patter we have for python connectors and have a json file and read from there e.g. schemas/identities.json
266
+ return {
267
+ "type": "object",
268
+ "properties": {
269
+ "id": { "type": "string" },
270
+ "remote_id": { "type": "string" },
271
+ "name": { "type": ["null", "string"] },
272
+ "email_address": { "type": ["null", "string"] },
273
+ "member_email_addresses": { "type": ["null", "array"] },
274
+ "type": { "type": "string" },
275
+ }
276
+ }
277
+ """
278
+ raise NotImplementedError(
279
+ f"{self.__class__.__name__} does not implement identities_schema, please return json schema for your identities stream."
280
+ )
@@ -1,4 +1,13 @@
1
1
  from airbyte_cdk.sources.file_based.stream.abstract_file_based_stream import AbstractFileBasedStream
2
2
  from airbyte_cdk.sources.file_based.stream.default_file_based_stream import DefaultFileBasedStream
3
+ from airbyte_cdk.sources.file_based.stream.identities_stream import FileIdentitiesStream
4
+ from airbyte_cdk.sources.file_based.stream.permissions_file_based_stream import (
5
+ PermissionsFileBasedStream,
6
+ )
3
7
 
4
- __all__ = ["AbstractFileBasedStream", "DefaultFileBasedStream"]
8
+ __all__ = [
9
+ "AbstractFileBasedStream",
10
+ "DefaultFileBasedStream",
11
+ "FileIdentitiesStream",
12
+ "PermissionsFileBasedStream",
13
+ ]
@@ -0,0 +1,47 @@
1
+ #
2
+ # Copyright (c) 2024 Airbyte, Inc., all rights reserved.
3
+ #
4
+
5
+ from functools import cache
6
+ from typing import Any, Dict, Iterable, Mapping, MutableMapping, Optional
7
+
8
+ from airbyte_cdk.sources.file_based.config.file_based_stream_config import PrimaryKeyType
9
+ from airbyte_cdk.sources.file_based.discovery_policy import AbstractDiscoveryPolicy
10
+ from airbyte_cdk.sources.file_based.exceptions import FileBasedErrorsCollector
11
+ from airbyte_cdk.sources.file_based.file_based_stream_reader import AbstractFileBasedStreamReader
12
+ from airbyte_cdk.sources.streams.core import JsonSchema
13
+ from airbyte_cdk.sources.streams.permissions.identities_stream import IdentitiesStream
14
+
15
+
16
+ class FileIdentitiesStream(IdentitiesStream):
17
+ """
18
+ The identities stream. A full refresh stream to sync identities from a certain domain.
19
+ The stream reader manage the logic to get such data, which is implemented on connector side.
20
+ """
21
+
22
+ is_resumable = False
23
+
24
+ def __init__(
25
+ self,
26
+ catalog_schema: Optional[Mapping[str, Any]],
27
+ stream_reader: AbstractFileBasedStreamReader,
28
+ discovery_policy: AbstractDiscoveryPolicy,
29
+ errors_collector: FileBasedErrorsCollector,
30
+ ) -> None:
31
+ super().__init__()
32
+ self.catalog_schema = catalog_schema
33
+ self.stream_reader = stream_reader
34
+ self._discovery_policy = discovery_policy
35
+ self.errors_collector = errors_collector
36
+ self._cursor: MutableMapping[str, Any] = {}
37
+
38
+ @property
39
+ def primary_key(self) -> PrimaryKeyType:
40
+ return None
41
+
42
+ def load_identity_groups(self) -> Iterable[Dict[str, Any]]:
43
+ return self.stream_reader.load_identity_groups(logger=self.logger)
44
+
45
+ @cache
46
+ def get_json_schema(self) -> JsonSchema:
47
+ return self.stream_reader.identities_schema
@@ -0,0 +1,85 @@
1
+ #
2
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
3
+ #
4
+
5
+ import traceback
6
+ from typing import Any, Dict, Iterable
7
+
8
+ from airbyte_cdk.models import AirbyteLogMessage, AirbyteMessage, Level
9
+ from airbyte_cdk.models import Type as MessageType
10
+ from airbyte_cdk.sources.file_based.stream import DefaultFileBasedStream
11
+ from airbyte_cdk.sources.file_based.types import StreamSlice
12
+ from airbyte_cdk.sources.streams.core import JsonSchema
13
+ from airbyte_cdk.sources.utils.record_helper import stream_data_to_airbyte_message
14
+
15
+
16
+ class PermissionsFileBasedStream(DefaultFileBasedStream):
17
+ """
18
+ A specialized stream for handling file-based ACL permissions.
19
+
20
+ This stream works with the stream_reader to:
21
+ 1. Fetch ACL permissions for each file in the source
22
+ 2. Transform permissions into a standardized format
23
+ 3. Generate records containing permission information
24
+
25
+ The stream_reader is responsible for the actual implementation of permission retrieval
26
+ and schema definition, while this class handles the streaming interface.
27
+ """
28
+
29
+ def _filter_schema_invalid_properties(
30
+ self, configured_catalog_json_schema: Dict[str, Any]
31
+ ) -> Dict[str, Any]:
32
+ return self.stream_reader.file_permissions_schema
33
+
34
+ def read_records_from_slice(self, stream_slice: StreamSlice) -> Iterable[AirbyteMessage]:
35
+ """
36
+ Yield permissions records from all remote files
37
+ """
38
+
39
+ for file in stream_slice["files"]:
40
+ no_permissions = False
41
+ file_datetime_string = file.last_modified.strftime(self.DATE_TIME_FORMAT)
42
+ try:
43
+ permissions_record = self.stream_reader.get_file_acl_permissions(
44
+ file, logger=self.logger
45
+ )
46
+ if not permissions_record:
47
+ no_permissions = True
48
+ self.logger.warning(
49
+ f"Unable to fetch permissions. stream={self.name} file={file.uri}"
50
+ )
51
+ continue
52
+ permissions_record = self.transform_record(
53
+ permissions_record, file, file_datetime_string
54
+ )
55
+ yield stream_data_to_airbyte_message(
56
+ self.name, permissions_record, is_file_transfer_message=False
57
+ )
58
+ except Exception as e:
59
+ self.logger.error(f"Failed to retrieve permissions for file {file.uri}: {str(e)}")
60
+ yield AirbyteMessage(
61
+ type=MessageType.LOG,
62
+ log=AirbyteLogMessage(
63
+ level=Level.ERROR,
64
+ message=f"Error retrieving files permissions: stream={self.name} file={file.uri}",
65
+ stack_trace=traceback.format_exc(),
66
+ ),
67
+ )
68
+ finally:
69
+ if no_permissions:
70
+ yield AirbyteMessage(
71
+ type=MessageType.LOG,
72
+ log=AirbyteLogMessage(
73
+ level=Level.WARN,
74
+ message=f"Unable to fetch permissions. stream={self.name} file={file.uri}",
75
+ ),
76
+ )
77
+
78
+ def _get_raw_json_schema(self) -> JsonSchema:
79
+ """
80
+ Retrieve the raw JSON schema for file permissions from the stream reader.
81
+
82
+ Returns:
83
+ The file permissions schema that defines the structure of permission records
84
+ """
85
+ return self.stream_reader.file_permissions_schema
@@ -0,0 +1,26 @@
1
+ #
2
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
3
+ #
4
+
5
+ from typing import Literal
6
+
7
+ from pydantic.v1 import AnyUrl, BaseModel, Field
8
+
9
+ from airbyte_cdk import OneOfOptionConfig
10
+
11
+
12
+ class DeliverPermissions(BaseModel):
13
+ class Config(OneOfOptionConfig):
14
+ title = "Replicate Permissions ACL"
15
+ description = "Sends one identity stream and one for more permissions (ACL) streams to the destination. This data can be used in downstream systems to recreate permission restrictions mirroring the original source."
16
+ discriminator = "delivery_type"
17
+
18
+ delivery_type: Literal["use_permissions_transfer"] = Field(
19
+ "use_permissions_transfer", const=True
20
+ )
21
+
22
+ include_identities_stream: bool = Field(
23
+ title="Include Identity Stream",
24
+ description="This data can be used in downstream systems to recreate permission restrictions mirroring the original source",
25
+ default=True,
26
+ )
@@ -0,0 +1,75 @@
1
+ #
2
+ # Copyright (c) 2024 Airbyte, Inc., all rights reserved.
3
+ #
4
+
5
+ import traceback
6
+ from abc import ABC, abstractmethod
7
+ from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional
8
+
9
+ from airbyte_protocol_dataclasses.models import SyncMode
10
+
11
+ from airbyte_cdk.models import AirbyteLogMessage, AirbyteMessage, Level
12
+ from airbyte_cdk.models import Type as MessageType
13
+ from airbyte_cdk.sources.streams import Stream
14
+ from airbyte_cdk.sources.streams.checkpoint import Cursor
15
+ from airbyte_cdk.sources.utils.record_helper import stream_data_to_airbyte_message
16
+ from airbyte_cdk.utils.traced_exception import AirbyteTracedException
17
+
18
+
19
+ class IdentitiesStream(Stream, ABC):
20
+ """
21
+ The identities stream. A full refresh stream to sync identities from a certain domain.
22
+ The load_identity_groups method manage the logic to get such data.
23
+ """
24
+
25
+ IDENTITIES_STREAM_NAME = "identities"
26
+
27
+ is_resumable = False
28
+
29
+ def __init__(self) -> None:
30
+ super().__init__()
31
+ self._cursor: MutableMapping[str, Any] = {}
32
+
33
+ @property
34
+ def state(self) -> MutableMapping[str, Any]:
35
+ return self._cursor
36
+
37
+ @state.setter
38
+ def state(self, value: MutableMapping[str, Any]) -> None:
39
+ """State setter, accept state serialized by state getter."""
40
+ self._cursor = value
41
+
42
+ def read_records(
43
+ self,
44
+ sync_mode: SyncMode,
45
+ cursor_field: Optional[List[str]] = None,
46
+ stream_slice: Optional[Mapping[str, Any]] = None,
47
+ stream_state: Optional[Mapping[str, Any]] = None,
48
+ ) -> Iterable[Mapping[str, Any] | AirbyteMessage]:
49
+ try:
50
+ identity_groups = self.load_identity_groups()
51
+ for record in identity_groups:
52
+ yield stream_data_to_airbyte_message(self.name, record)
53
+ except AirbyteTracedException as exc:
54
+ # Re-raise the exception to stop the whole sync immediately as this is a fatal error
55
+ raise exc
56
+ except Exception as e:
57
+ yield AirbyteMessage(
58
+ type=MessageType.LOG,
59
+ log=AirbyteLogMessage(
60
+ level=Level.ERROR,
61
+ message=f"Error trying to read identities: {e} stream={self.name}",
62
+ stack_trace=traceback.format_exc(),
63
+ ),
64
+ )
65
+
66
+ @abstractmethod
67
+ def load_identity_groups(self) -> Iterable[Dict[str, Any]]:
68
+ raise NotImplementedError("Implement this method to read identity records")
69
+
70
+ @property
71
+ def name(self) -> str:
72
+ return self.IDENTITIES_STREAM_NAME
73
+
74
+ def get_cursor(self) -> Optional[Cursor]:
75
+ return None
@@ -17,6 +17,7 @@ class SupportedHttpMethods(str, Enum):
17
17
  GET = "get"
18
18
  PATCH = "patch"
19
19
  POST = "post"
20
+ PUT = "put"
20
21
  DELETE = "delete"
21
22
 
22
23
 
@@ -77,7 +78,7 @@ class HttpMocker(contextlib.ContextDecorator):
77
78
  additional_matcher=self._matches_wrapper(matcher),
78
79
  response_list=[
79
80
  {
80
- "text": response.body,
81
+ self._get_body_field(response): response.body,
81
82
  "status_code": response.status_code,
82
83
  "headers": response.headers,
83
84
  }
@@ -85,6 +86,10 @@ class HttpMocker(contextlib.ContextDecorator):
85
86
  ],
86
87
  )
87
88
 
89
+ @staticmethod
90
+ def _get_body_field(response: HttpResponse) -> str:
91
+ return "text" if isinstance(response.body, str) else "content"
92
+
88
93
  def get(self, request: HttpRequest, responses: Union[HttpResponse, List[HttpResponse]]) -> None:
89
94
  self._mock_request_method(SupportedHttpMethods.GET, request, responses)
90
95
 
@@ -98,6 +103,9 @@ class HttpMocker(contextlib.ContextDecorator):
98
103
  ) -> None:
99
104
  self._mock_request_method(SupportedHttpMethods.POST, request, responses)
100
105
 
106
+ def put(self, request: HttpRequest, responses: Union[HttpResponse, List[HttpResponse]]) -> None:
107
+ self._mock_request_method(SupportedHttpMethods.PUT, request, responses)
108
+
101
109
  def delete(
102
110
  self, request: HttpRequest, responses: Union[HttpResponse, List[HttpResponse]]
103
111
  ) -> None:
@@ -1,19 +1,22 @@
1
1
  # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
2
2
 
3
3
  from types import MappingProxyType
4
- from typing import Mapping
4
+ from typing import Mapping, Union
5
5
 
6
6
 
7
7
  class HttpResponse:
8
8
  def __init__(
9
- self, body: str, status_code: int = 200, headers: Mapping[str, str] = MappingProxyType({})
9
+ self,
10
+ body: Union[str, bytes],
11
+ status_code: int = 200,
12
+ headers: Mapping[str, str] = MappingProxyType({}),
10
13
  ):
11
14
  self._body = body
12
15
  self._status_code = status_code
13
16
  self._headers = headers
14
17
 
15
18
  @property
16
- def body(self) -> str:
19
+ def body(self) -> Union[str, bytes]:
17
20
  return self._body
18
21
 
19
22
  @property
@@ -6,6 +6,12 @@
6
6
  import copy
7
7
  from typing import Any, Dict, List, Mapping, Optional, Union
8
8
 
9
+ from airbyte_cdk.sources.declarative.requesters.request_option import (
10
+ RequestOption,
11
+ RequestOptionType,
12
+ )
13
+ from airbyte_cdk.sources.types import Config
14
+
9
15
 
10
16
  def _merge_mappings(
11
17
  target: Dict[str, Any],
@@ -33,13 +39,17 @@ def _merge_mappings(
33
39
  if isinstance(target_value, dict) and isinstance(source_value, dict):
34
40
  # Only body_json supports nested_structures
35
41
  if not allow_same_value_merge:
36
- raise ValueError(f"Duplicate keys found: {'.'.join(current_path)}")
42
+ raise ValueError(
43
+ f"Request body collision, duplicate keys detected at key path: {'.'.join(current_path)}. Please ensure that all keys in the request are unique."
44
+ )
37
45
  # If both are dictionaries, recursively merge them
38
46
  _merge_mappings(target_value, source_value, current_path, allow_same_value_merge)
39
47
 
40
48
  elif not allow_same_value_merge or target_value != source_value:
41
49
  # If same key has different values, that's a conflict
42
- raise ValueError(f"Duplicate keys found: {'.'.join(current_path)}")
50
+ raise ValueError(
51
+ f"Request body collision, duplicate keys detected at key path: {'.'.join(current_path)}. Please ensure that all keys in the request are unique."
52
+ )
43
53
  else:
44
54
  # No conflict, just copy the value (using deepcopy for nested structures)
45
55
  target[key] = copy.deepcopy(source_value)
@@ -102,3 +112,34 @@ def combine_mappings(
102
112
  _merge_mappings(result, mapping, allow_same_value_merge=allow_same_value_merge)
103
113
 
104
114
  return result
115
+
116
+
117
+ def _validate_component_request_option_paths(
118
+ config: Config, *request_options: Optional[RequestOption]
119
+ ) -> None:
120
+ """
121
+ Validates that a component with multiple request options does not have conflicting paths.
122
+ Uses dummy values for validation since actual values might not be available at init time.
123
+ """
124
+ grouped_options: Dict[RequestOptionType, List[RequestOption]] = {}
125
+ for option in request_options:
126
+ if option:
127
+ grouped_options.setdefault(option.inject_into, []).append(option)
128
+
129
+ for inject_type, options in grouped_options.items():
130
+ if len(options) <= 1:
131
+ continue
132
+
133
+ option_dicts: List[Optional[Union[Mapping[str, Any], str]]] = []
134
+ for i, option in enumerate(options):
135
+ option_dict: Dict[str, Any] = {}
136
+ # Use indexed dummy values to ensure we catch conflicts
137
+ option.inject_into_request(option_dict, f"dummy_value_{i}", config)
138
+ option_dicts.append(option_dict)
139
+
140
+ try:
141
+ combine_mappings(
142
+ option_dicts, allow_same_value_merge=(inject_type == RequestOptionType.body_json)
143
+ )
144
+ except ValueError as error:
145
+ raise ValueError(error)
@@ -73,7 +73,3 @@ class PrintBuffer:
73
73
  ) -> None:
74
74
  self.flush()
75
75
  sys.stdout, sys.stderr = self.old_stdout, self.old_stderr
76
-
77
- def flush_logger(self) -> None:
78
- """Explicit flush that can be triggered by logger to synchronize with PrintBuffer."""
79
- self.flush()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: airbyte-cdk
3
- Version: 6.34.0.dev0
3
+ Version: 6.34.0.dev2
4
4
  Summary: A framework for writing Airbyte Connectors.
5
5
  Home-page: https://airbyte.com
6
6
  License: MIT