airbyte-cdk 6.61.1__py3-none-any.whl → 6.61.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airbyte_cdk/__init__.py +0 -2
- airbyte_cdk/connector_builder/connector_builder_handler.py +7 -5
- airbyte_cdk/connector_builder/main.py +4 -2
- airbyte_cdk/connector_builder/test_reader/reader.py +10 -8
- airbyte_cdk/legacy/__init__.py +1 -0
- airbyte_cdk/legacy/sources/__init__.py +1 -0
- airbyte_cdk/legacy/sources/declarative/__init__.py +1 -0
- airbyte_cdk/legacy/sources/declarative/incremental/__init__.py +1 -0
- airbyte_cdk/{sources → legacy/sources}/declarative/manifest_declarative_source.py +1 -1
- airbyte_cdk/manifest_server/Dockerfile +45 -0
- airbyte_cdk/manifest_server/README.md +142 -0
- airbyte_cdk/manifest_server/__init__.py +3 -0
- airbyte_cdk/manifest_server/api_models/__init__.py +49 -0
- airbyte_cdk/manifest_server/api_models/capabilities.py +7 -0
- airbyte_cdk/manifest_server/api_models/dicts.py +17 -0
- airbyte_cdk/manifest_server/api_models/manifest.py +73 -0
- airbyte_cdk/manifest_server/api_models/stream.py +76 -0
- airbyte_cdk/manifest_server/app.py +17 -0
- airbyte_cdk/manifest_server/auth.py +43 -0
- airbyte_cdk/manifest_server/cli/__init__.py +5 -0
- airbyte_cdk/manifest_server/cli/_common.py +28 -0
- airbyte_cdk/manifest_server/cli/_info.py +30 -0
- airbyte_cdk/manifest_server/cli/_openapi.py +43 -0
- airbyte_cdk/manifest_server/cli/_start.py +38 -0
- airbyte_cdk/manifest_server/cli/run.py +59 -0
- airbyte_cdk/manifest_server/command_processor/__init__.py +0 -0
- airbyte_cdk/manifest_server/command_processor/processor.py +122 -0
- airbyte_cdk/manifest_server/command_processor/utils.py +99 -0
- airbyte_cdk/manifest_server/main.py +24 -0
- airbyte_cdk/manifest_server/openapi.yaml +641 -0
- airbyte_cdk/manifest_server/routers/__init__.py +0 -0
- airbyte_cdk/manifest_server/routers/capabilities.py +25 -0
- airbyte_cdk/manifest_server/routers/health.py +13 -0
- airbyte_cdk/manifest_server/routers/manifest.py +155 -0
- airbyte_cdk/sources/declarative/concurrent_declarative_source.py +507 -24
- airbyte_cdk/sources/declarative/incremental/__init__.py +4 -4
- airbyte_cdk/sources/declarative/incremental/per_partition_with_global.py +4 -4
- airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py +12 -0
- airbyte_cdk/sources/declarative/retrievers/retriever.py +1 -2
- airbyte_cdk/sources/streams/http/http_client.py +21 -0
- {airbyte_cdk-6.61.1.dist-info → airbyte_cdk-6.61.3.dist-info}/METADATA +4 -1
- {airbyte_cdk-6.61.1.dist-info → airbyte_cdk-6.61.3.dist-info}/RECORD +48 -19
- {airbyte_cdk-6.61.1.dist-info → airbyte_cdk-6.61.3.dist-info}/entry_points.txt +1 -0
- /airbyte_cdk/{sources → legacy/sources}/declarative/declarative_source.py +0 -0
- /airbyte_cdk/{sources → legacy/sources}/declarative/incremental/per_partition_cursor.py +0 -0
- {airbyte_cdk-6.61.1.dist-info → airbyte_cdk-6.61.3.dist-info}/LICENSE.txt +0 -0
- {airbyte_cdk-6.61.1.dist-info → airbyte_cdk-6.61.3.dist-info}/LICENSE_SHORT +0 -0
- {airbyte_cdk-6.61.1.dist-info → airbyte_cdk-6.61.3.dist-info}/WHEEL +0 -0
@@ -1,33 +1,55 @@
|
|
1
|
-
#
|
2
|
-
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
|
3
|
-
#
|
1
|
+
# Copyright (c) 2025 Airbyte, Inc., all rights reserved.
|
4
2
|
|
3
|
+
import json
|
5
4
|
import logging
|
5
|
+
import pkgutil
|
6
|
+
from copy import deepcopy
|
6
7
|
from dataclasses import dataclass, field
|
7
8
|
from queue import Queue
|
9
|
+
from types import ModuleType
|
8
10
|
from typing import (
|
9
11
|
Any,
|
10
12
|
ClassVar,
|
13
|
+
Dict,
|
11
14
|
Generic,
|
12
15
|
Iterator,
|
13
16
|
List,
|
14
17
|
Mapping,
|
15
18
|
MutableMapping,
|
16
19
|
Optional,
|
20
|
+
Set,
|
17
21
|
Tuple,
|
18
22
|
Union,
|
19
23
|
)
|
20
24
|
|
25
|
+
import orjson
|
26
|
+
import yaml
|
21
27
|
from airbyte_protocol_dataclasses.models import Level
|
28
|
+
from jsonschema.exceptions import ValidationError
|
29
|
+
from jsonschema.validators import validate
|
22
30
|
|
31
|
+
from airbyte_cdk.config_observation import create_connector_config_control_message
|
32
|
+
from airbyte_cdk.connector_builder.models import (
|
33
|
+
LogMessage as ConnectorBuilderLogMessage,
|
34
|
+
)
|
35
|
+
from airbyte_cdk.manifest_migrations.migration_handler import (
|
36
|
+
ManifestMigrationHandler,
|
37
|
+
)
|
23
38
|
from airbyte_cdk.models import (
|
24
39
|
AirbyteCatalog,
|
40
|
+
AirbyteConnectionStatus,
|
25
41
|
AirbyteMessage,
|
26
42
|
AirbyteStateMessage,
|
27
43
|
ConfiguredAirbyteCatalog,
|
44
|
+
ConnectorSpecification,
|
45
|
+
FailureType,
|
28
46
|
)
|
47
|
+
from airbyte_cdk.models.airbyte_protocol_serializers import AirbyteMessageSerializer
|
48
|
+
from airbyte_cdk.sources.abstract_source import AbstractSource
|
29
49
|
from airbyte_cdk.sources.concurrent_source.concurrent_source import ConcurrentSource
|
30
50
|
from airbyte_cdk.sources.connector_state_manager import ConnectorStateManager
|
51
|
+
from airbyte_cdk.sources.declarative.checks import COMPONENTS_CHECKER_TYPE_MAPPING
|
52
|
+
from airbyte_cdk.sources.declarative.checks.connection_checker import ConnectionChecker
|
31
53
|
from airbyte_cdk.sources.declarative.concurrency_level import ConcurrencyLevel
|
32
54
|
from airbyte_cdk.sources.declarative.declarative_stream import DeclarativeStream
|
33
55
|
from airbyte_cdk.sources.declarative.incremental import (
|
@@ -38,26 +60,50 @@ from airbyte_cdk.sources.declarative.incremental.datetime_based_cursor import Da
|
|
38
60
|
from airbyte_cdk.sources.declarative.incremental.per_partition_with_global import (
|
39
61
|
PerPartitionWithGlobalCursor,
|
40
62
|
)
|
41
|
-
from airbyte_cdk.sources.declarative.
|
63
|
+
from airbyte_cdk.sources.declarative.interpolation import InterpolatedBoolean
|
64
|
+
from airbyte_cdk.sources.declarative.models import FileUploader
|
42
65
|
from airbyte_cdk.sources.declarative.models.declarative_component_schema import (
|
43
66
|
ConcurrencyLevel as ConcurrencyLevelModel,
|
44
67
|
)
|
45
68
|
from airbyte_cdk.sources.declarative.models.declarative_component_schema import (
|
46
69
|
DatetimeBasedCursor as DatetimeBasedCursorModel,
|
47
70
|
)
|
71
|
+
from airbyte_cdk.sources.declarative.models.declarative_component_schema import (
|
72
|
+
DeclarativeStream as DeclarativeStreamModel,
|
73
|
+
)
|
48
74
|
from airbyte_cdk.sources.declarative.models.declarative_component_schema import (
|
49
75
|
IncrementingCountCursor as IncrementingCountCursorModel,
|
50
76
|
)
|
77
|
+
from airbyte_cdk.sources.declarative.models.declarative_component_schema import (
|
78
|
+
Spec as SpecModel,
|
79
|
+
)
|
80
|
+
from airbyte_cdk.sources.declarative.models.declarative_component_schema import (
|
81
|
+
StateDelegatingStream as StateDelegatingStreamModel,
|
82
|
+
)
|
83
|
+
from airbyte_cdk.sources.declarative.parsers.custom_code_compiler import (
|
84
|
+
get_registered_components_module,
|
85
|
+
)
|
86
|
+
from airbyte_cdk.sources.declarative.parsers.manifest_component_transformer import (
|
87
|
+
ManifestComponentTransformer,
|
88
|
+
)
|
89
|
+
from airbyte_cdk.sources.declarative.parsers.manifest_normalizer import (
|
90
|
+
ManifestNormalizer,
|
91
|
+
)
|
92
|
+
from airbyte_cdk.sources.declarative.parsers.manifest_reference_resolver import (
|
93
|
+
ManifestReferenceResolver,
|
94
|
+
)
|
51
95
|
from airbyte_cdk.sources.declarative.parsers.model_to_component_factory import (
|
52
96
|
ModelToComponentFactory,
|
53
97
|
)
|
54
98
|
from airbyte_cdk.sources.declarative.partition_routers import AsyncJobPartitionRouter
|
99
|
+
from airbyte_cdk.sources.declarative.resolvers import COMPONENTS_RESOLVER_TYPE_MAPPING
|
55
100
|
from airbyte_cdk.sources.declarative.retrievers import AsyncRetriever, Retriever, SimpleRetriever
|
101
|
+
from airbyte_cdk.sources.declarative.spec.spec import Spec
|
56
102
|
from airbyte_cdk.sources.declarative.stream_slicers.declarative_partition_generator import (
|
57
103
|
DeclarativePartitionFactory,
|
58
104
|
StreamSlicerPartitionGenerator,
|
59
105
|
)
|
60
|
-
from airbyte_cdk.sources.declarative.types import ConnectionDefinition
|
106
|
+
from airbyte_cdk.sources.declarative.types import Config, ConnectionDefinition
|
61
107
|
from airbyte_cdk.sources.message.concurrent_repository import ConcurrentMessageRepository
|
62
108
|
from airbyte_cdk.sources.message.repository import InMemoryMessageRepository, MessageRepository
|
63
109
|
from airbyte_cdk.sources.source import TState
|
@@ -68,6 +114,12 @@ from airbyte_cdk.sources.streams.concurrent.cursor import ConcurrentCursor, Fina
|
|
68
114
|
from airbyte_cdk.sources.streams.concurrent.default_stream import DefaultStream
|
69
115
|
from airbyte_cdk.sources.streams.concurrent.helpers import get_primary_key_from_stream
|
70
116
|
from airbyte_cdk.sources.streams.concurrent.partitions.types import QueueItem
|
117
|
+
from airbyte_cdk.sources.utils.slice_logger import (
|
118
|
+
AlwaysLogSliceLogger,
|
119
|
+
DebugSliceLogger,
|
120
|
+
SliceLogger,
|
121
|
+
)
|
122
|
+
from airbyte_cdk.utils.traced_exception import AirbyteTracedException
|
71
123
|
|
72
124
|
|
73
125
|
@dataclass
|
@@ -85,8 +137,33 @@ class TestLimits:
|
|
85
137
|
max_streams: int = field(default=DEFAULT_MAX_STREAMS)
|
86
138
|
|
87
139
|
|
88
|
-
|
89
|
-
|
140
|
+
def _get_declarative_component_schema() -> Dict[str, Any]:
|
141
|
+
try:
|
142
|
+
raw_component_schema = pkgutil.get_data(
|
143
|
+
"airbyte_cdk", "sources/declarative/declarative_component_schema.yaml"
|
144
|
+
)
|
145
|
+
if raw_component_schema is not None:
|
146
|
+
declarative_component_schema = yaml.load(raw_component_schema, Loader=yaml.SafeLoader)
|
147
|
+
return declarative_component_schema # type: ignore
|
148
|
+
else:
|
149
|
+
raise RuntimeError(
|
150
|
+
"Failed to read manifest component json schema required for deduplication"
|
151
|
+
)
|
152
|
+
except FileNotFoundError as e:
|
153
|
+
raise FileNotFoundError(
|
154
|
+
f"Failed to read manifest component json schema required for deduplication: {e}"
|
155
|
+
)
|
156
|
+
|
157
|
+
|
158
|
+
# todo: AbstractSource can be removed once we've completely moved off all legacy synchronous CDK code paths
|
159
|
+
# and replaced with implementing the source.py:Source class
|
160
|
+
#
|
161
|
+
# todo: The `ConcurrentDeclarativeSource.message_repository()` method can also be removed once AbstractSource
|
162
|
+
# is no longer inherited from since the only external dependency is from that class.
|
163
|
+
#
|
164
|
+
# todo: It is worth investigating removal of the Generic[TState] since it will always be Optional[List[AirbyteStateMessage]]
|
165
|
+
class ConcurrentDeclarativeSource(AbstractSource, Generic[TState]):
|
166
|
+
# By default, we defer to a value of 2. A value lower than could cause a PartitionEnqueuer to be stuck in a state of deadlock
|
90
167
|
# because it has hit the limit of futures but not partition reader is consuming them.
|
91
168
|
_LOWEST_SAFE_CONCURRENCY_LEVEL = 2
|
92
169
|
|
@@ -104,6 +181,10 @@ class ConcurrentDeclarativeSource(ManifestDeclarativeSource, Generic[TState]):
|
|
104
181
|
config_path: Optional[str] = None,
|
105
182
|
**kwargs: Any,
|
106
183
|
) -> None:
|
184
|
+
self.logger = logging.getLogger(f"airbyte.{self.name}")
|
185
|
+
|
186
|
+
self._limits = limits
|
187
|
+
|
107
188
|
# todo: We could remove state from initialization. Now that streams are grouped during the read(), a source
|
108
189
|
# no longer needs to store the original incoming state. But maybe there's an edge case?
|
109
190
|
self._connector_state_manager = ConnectorStateManager(state=state) # type: ignore # state is always in the form of List[AirbyteStateMessage]. The ConnectorStateManager should use generics, but this can be done later
|
@@ -132,18 +213,40 @@ class ConcurrentDeclarativeSource(ManifestDeclarativeSource, Generic[TState]):
|
|
132
213
|
disable_cache=True if limits else False,
|
133
214
|
)
|
134
215
|
|
135
|
-
self.
|
216
|
+
self._should_normalize = normalize_manifest
|
217
|
+
self._should_migrate = migrate_manifest
|
218
|
+
self._declarative_component_schema = _get_declarative_component_schema()
|
219
|
+
# If custom components are needed, locate and/or register them.
|
220
|
+
self.components_module: ModuleType | None = get_registered_components_module(config=config)
|
221
|
+
# set additional attributes
|
222
|
+
self._debug = debug
|
223
|
+
self._emit_connector_builder_messages = emit_connector_builder_messages
|
224
|
+
self._constructor = (
|
225
|
+
component_factory
|
226
|
+
if component_factory
|
227
|
+
else ModelToComponentFactory(
|
228
|
+
emit_connector_builder_messages=emit_connector_builder_messages,
|
229
|
+
max_concurrent_async_job_count=source_config.get("max_concurrent_async_job_count"),
|
230
|
+
)
|
231
|
+
)
|
136
232
|
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
233
|
+
self._message_repository = self._constructor.get_message_repository()
|
234
|
+
self._slice_logger: SliceLogger = (
|
235
|
+
AlwaysLogSliceLogger() if emit_connector_builder_messages else DebugSliceLogger()
|
236
|
+
)
|
237
|
+
|
238
|
+
# resolve all components in the manifest
|
239
|
+
self._source_config = self._pre_process_manifest(dict(source_config))
|
240
|
+
# validate resolved manifest against the declarative component schema
|
241
|
+
self._validate_source()
|
242
|
+
# apply additional post-processing to the manifest
|
243
|
+
self._post_process_manifest()
|
244
|
+
|
245
|
+
spec: Optional[Mapping[str, Any]] = self._source_config.get("spec")
|
246
|
+
self._spec_component: Optional[Spec] = (
|
247
|
+
self._constructor.create_component(SpecModel, spec, dict()) if spec else None
|
146
248
|
)
|
249
|
+
self._config = self._migrate_and_transform_config(config_path, config) or {}
|
147
250
|
|
148
251
|
concurrency_level_from_manifest = self._source_config.get("concurrency_level")
|
149
252
|
if concurrency_level_from_manifest:
|
@@ -171,15 +274,141 @@ class ConcurrentDeclarativeSource(ManifestDeclarativeSource, Generic[TState]):
|
|
171
274
|
logger=self.logger,
|
172
275
|
slice_logger=self._slice_logger,
|
173
276
|
queue=queue,
|
174
|
-
message_repository=self.
|
277
|
+
message_repository=self._message_repository,
|
278
|
+
)
|
279
|
+
|
280
|
+
def _pre_process_manifest(self, manifest: Dict[str, Any]) -> Dict[str, Any]:
|
281
|
+
"""
|
282
|
+
Preprocesses the provided manifest dictionary by resolving any manifest references.
|
283
|
+
|
284
|
+
This method modifies the input manifest in place, resolving references using the
|
285
|
+
ManifestReferenceResolver to ensure all references within the manifest are properly handled.
|
286
|
+
|
287
|
+
Args:
|
288
|
+
manifest (Dict[str, Any]): The manifest dictionary to preprocess and resolve references in.
|
289
|
+
|
290
|
+
Returns:
|
291
|
+
None
|
292
|
+
"""
|
293
|
+
# For ease of use we don't require the type to be specified at the top level manifest, but it should be included during processing
|
294
|
+
manifest = self._fix_source_type(manifest)
|
295
|
+
# Resolve references in the manifest
|
296
|
+
resolved_manifest = ManifestReferenceResolver().preprocess_manifest(manifest)
|
297
|
+
# Propagate types and parameters throughout the manifest
|
298
|
+
propagated_manifest = ManifestComponentTransformer().propagate_types_and_parameters(
|
299
|
+
"", resolved_manifest, {}
|
175
300
|
)
|
176
301
|
|
302
|
+
return propagated_manifest
|
303
|
+
|
304
|
+
def _fix_source_type(self, manifest: Dict[str, Any]) -> Dict[str, Any]:
|
305
|
+
"""
|
306
|
+
Fix the source type in the manifest. This is necessary because the source type is not always set in the manifest.
|
307
|
+
"""
|
308
|
+
if "type" not in manifest:
|
309
|
+
manifest["type"] = "DeclarativeSource"
|
310
|
+
|
311
|
+
return manifest
|
312
|
+
|
313
|
+
def _post_process_manifest(self) -> None:
|
314
|
+
"""
|
315
|
+
Post-processes the manifest after validation.
|
316
|
+
This method is responsible for any additional modifications or transformations needed
|
317
|
+
after the manifest has been validated and before it is used in the source.
|
318
|
+
"""
|
319
|
+
# apply manifest migration, if required
|
320
|
+
self._migrate_manifest()
|
321
|
+
# apply manifest normalization, if required
|
322
|
+
self._normalize_manifest()
|
323
|
+
|
324
|
+
def _migrate_manifest(self) -> None:
|
325
|
+
"""
|
326
|
+
This method is used to migrate the manifest. It should be called after the manifest has been validated.
|
327
|
+
The migration is done in place, so the original manifest is modified.
|
328
|
+
|
329
|
+
The original manifest is returned if any error occurs during migration.
|
330
|
+
"""
|
331
|
+
if self._should_migrate:
|
332
|
+
manifest_migrator = ManifestMigrationHandler(self._source_config)
|
333
|
+
self._source_config = manifest_migrator.apply_migrations()
|
334
|
+
# validate migrated manifest against the declarative component schema
|
335
|
+
self._validate_source()
|
336
|
+
|
337
|
+
def _normalize_manifest(self) -> None:
|
338
|
+
"""
|
339
|
+
This method is used to normalize the manifest. It should be called after the manifest has been validated.
|
340
|
+
|
341
|
+
Connector Builder UI rendering requires the manifest to be in a specific format.
|
342
|
+
- references have been resolved
|
343
|
+
- the commonly used definitions are extracted to the `definitions.linked.*`
|
344
|
+
"""
|
345
|
+
if self._should_normalize:
|
346
|
+
normalizer = ManifestNormalizer(self._source_config, self._declarative_component_schema)
|
347
|
+
self._source_config = normalizer.normalize()
|
348
|
+
|
349
|
+
def _validate_source(self) -> None:
|
350
|
+
"""
|
351
|
+
Validates the connector manifest against the declarative component schema
|
352
|
+
"""
|
353
|
+
|
354
|
+
try:
|
355
|
+
validate(self._source_config, self._declarative_component_schema)
|
356
|
+
except ValidationError as e:
|
357
|
+
raise ValidationError(
|
358
|
+
"Validation against json schema defined in declarative_component_schema.yaml schema failed"
|
359
|
+
) from e
|
360
|
+
|
361
|
+
def _migrate_and_transform_config(
|
362
|
+
self,
|
363
|
+
config_path: Optional[str],
|
364
|
+
config: Optional[Config],
|
365
|
+
) -> Optional[Config]:
|
366
|
+
if not config:
|
367
|
+
return None
|
368
|
+
if not self._spec_component:
|
369
|
+
return config
|
370
|
+
mutable_config = dict(config)
|
371
|
+
self._spec_component.migrate_config(mutable_config)
|
372
|
+
if mutable_config != config:
|
373
|
+
if config_path:
|
374
|
+
with open(config_path, "w") as f:
|
375
|
+
json.dump(mutable_config, f)
|
376
|
+
control_message = create_connector_config_control_message(mutable_config)
|
377
|
+
print(orjson.dumps(AirbyteMessageSerializer.dump(control_message)).decode())
|
378
|
+
self._spec_component.transform_config(mutable_config)
|
379
|
+
return mutable_config
|
380
|
+
|
381
|
+
def configure(self, config: Mapping[str, Any], temp_dir: str) -> Mapping[str, Any]:
|
382
|
+
config = self._config or config
|
383
|
+
return super().configure(config, temp_dir)
|
384
|
+
|
385
|
+
@property
|
386
|
+
def resolved_manifest(self) -> Mapping[str, Any]:
|
387
|
+
"""
|
388
|
+
Returns the resolved manifest configuration for the source.
|
389
|
+
|
390
|
+
This property provides access to the internal source configuration as a mapping,
|
391
|
+
which contains all settings and parameters required to define the source's behavior.
|
392
|
+
|
393
|
+
Returns:
|
394
|
+
Mapping[str, Any]: The resolved source configuration manifest.
|
395
|
+
"""
|
396
|
+
return self._source_config
|
397
|
+
|
398
|
+
# TODO: Deprecate this class once ConcurrentDeclarativeSource no longer inherits AbstractSource
|
399
|
+
@property
|
400
|
+
def message_repository(self) -> MessageRepository:
|
401
|
+
return self._message_repository
|
402
|
+
|
177
403
|
# TODO: Remove this. This property is necessary to safely migrate Stripe during the transition state.
|
178
404
|
@property
|
179
405
|
def is_partially_declarative(self) -> bool:
|
180
406
|
"""This flag used to avoid unexpected AbstractStreamFacade processing as concurrent streams."""
|
181
407
|
return False
|
182
408
|
|
409
|
+
def deprecation_warnings(self) -> List[ConnectorBuilderLogMessage]:
|
410
|
+
return self._constructor.get_model_deprecations()
|
411
|
+
|
183
412
|
def read(
|
184
413
|
self,
|
185
414
|
logger: logging.Logger,
|
@@ -237,7 +466,140 @@ class ConcurrentDeclarativeSource(ManifestDeclarativeSource, Generic[TState]):
|
|
237
466
|
|
238
467
|
In both case, we will assume that calling the DeclarativeStream is perfectly fine as the result for these is the same regardless of if it is a DeclarativeStream or a DefaultStream (concurrent). This should simply be removed once we have moved away from the mentioned code paths above.
|
239
468
|
"""
|
240
|
-
|
469
|
+
|
470
|
+
if self._spec_component:
|
471
|
+
self._spec_component.validate_config(config)
|
472
|
+
|
473
|
+
stream_configs = (
|
474
|
+
self._stream_configs(self._source_config, config=config) + self.dynamic_streams
|
475
|
+
)
|
476
|
+
|
477
|
+
api_budget_model = self._source_config.get("api_budget")
|
478
|
+
if api_budget_model:
|
479
|
+
self._constructor.set_api_budget(api_budget_model, config)
|
480
|
+
|
481
|
+
source_streams = [
|
482
|
+
self._constructor.create_component(
|
483
|
+
(
|
484
|
+
StateDelegatingStreamModel
|
485
|
+
if stream_config.get("type") == StateDelegatingStreamModel.__name__
|
486
|
+
else DeclarativeStreamModel
|
487
|
+
),
|
488
|
+
stream_config,
|
489
|
+
config,
|
490
|
+
emit_connector_builder_messages=self._emit_connector_builder_messages,
|
491
|
+
)
|
492
|
+
for stream_config in self._initialize_cache_for_parent_streams(deepcopy(stream_configs))
|
493
|
+
]
|
494
|
+
return source_streams
|
495
|
+
|
496
|
+
@staticmethod
|
497
|
+
def _initialize_cache_for_parent_streams(
|
498
|
+
stream_configs: List[Dict[str, Any]],
|
499
|
+
) -> List[Dict[str, Any]]:
|
500
|
+
parent_streams = set()
|
501
|
+
|
502
|
+
def update_with_cache_parent_configs(
|
503
|
+
parent_configs: list[dict[str, Any]],
|
504
|
+
) -> None:
|
505
|
+
for parent_config in parent_configs:
|
506
|
+
parent_streams.add(parent_config["stream"]["name"])
|
507
|
+
if parent_config["stream"]["type"] == "StateDelegatingStream":
|
508
|
+
parent_config["stream"]["full_refresh_stream"]["retriever"]["requester"][
|
509
|
+
"use_cache"
|
510
|
+
] = True
|
511
|
+
parent_config["stream"]["incremental_stream"]["retriever"]["requester"][
|
512
|
+
"use_cache"
|
513
|
+
] = True
|
514
|
+
else:
|
515
|
+
parent_config["stream"]["retriever"]["requester"]["use_cache"] = True
|
516
|
+
|
517
|
+
for stream_config in stream_configs:
|
518
|
+
if stream_config.get("incremental_sync", {}).get("parent_stream"):
|
519
|
+
parent_streams.add(stream_config["incremental_sync"]["parent_stream"]["name"])
|
520
|
+
stream_config["incremental_sync"]["parent_stream"]["retriever"]["requester"][
|
521
|
+
"use_cache"
|
522
|
+
] = True
|
523
|
+
|
524
|
+
elif stream_config.get("retriever", {}).get("partition_router", {}):
|
525
|
+
partition_router = stream_config["retriever"]["partition_router"]
|
526
|
+
|
527
|
+
if isinstance(partition_router, dict) and partition_router.get(
|
528
|
+
"parent_stream_configs"
|
529
|
+
):
|
530
|
+
update_with_cache_parent_configs(partition_router["parent_stream_configs"])
|
531
|
+
elif isinstance(partition_router, list):
|
532
|
+
for router in partition_router:
|
533
|
+
if router.get("parent_stream_configs"):
|
534
|
+
update_with_cache_parent_configs(router["parent_stream_configs"])
|
535
|
+
|
536
|
+
for stream_config in stream_configs:
|
537
|
+
if stream_config["name"] in parent_streams:
|
538
|
+
if stream_config["type"] == "StateDelegatingStream":
|
539
|
+
stream_config["full_refresh_stream"]["retriever"]["requester"]["use_cache"] = (
|
540
|
+
True
|
541
|
+
)
|
542
|
+
stream_config["incremental_stream"]["retriever"]["requester"]["use_cache"] = (
|
543
|
+
True
|
544
|
+
)
|
545
|
+
else:
|
546
|
+
stream_config["retriever"]["requester"]["use_cache"] = True
|
547
|
+
return stream_configs
|
548
|
+
|
549
|
+
def spec(self, logger: logging.Logger) -> ConnectorSpecification:
|
550
|
+
"""
|
551
|
+
Returns the connector specification (spec) as defined in the Airbyte Protocol. The spec is an object describing the possible
|
552
|
+
configurations (e.g: username and password) which can be configured when running this connector. For low-code connectors, this
|
553
|
+
will first attempt to load the spec from the manifest's spec block, otherwise it will load it from "spec.yaml" or "spec.json"
|
554
|
+
in the project root.
|
555
|
+
"""
|
556
|
+
return (
|
557
|
+
self._spec_component.generate_spec() if self._spec_component else super().spec(logger)
|
558
|
+
)
|
559
|
+
|
560
|
+
def check(self, logger: logging.Logger, config: Mapping[str, Any]) -> AirbyteConnectionStatus:
|
561
|
+
return super().check(logger, config)
|
562
|
+
|
563
|
+
def check_connection(
|
564
|
+
self, logger: logging.Logger, config: Mapping[str, Any]
|
565
|
+
) -> Tuple[bool, Any]:
|
566
|
+
"""
|
567
|
+
:param logger: The source logger
|
568
|
+
:param config: The user-provided configuration as specified by the source's spec.
|
569
|
+
This usually contains information required to check connection e.g. tokens, secrets and keys etc.
|
570
|
+
:return: A tuple of (boolean, error). If boolean is true, then the connection check is successful
|
571
|
+
and we can connect to the underlying data source using the provided configuration.
|
572
|
+
Otherwise, the input config cannot be used to connect to the underlying data source,
|
573
|
+
and the "error" object should describe what went wrong.
|
574
|
+
The error object will be cast to string to display the problem to the user.
|
575
|
+
"""
|
576
|
+
return self.connection_checker.check_connection(self, logger, config)
|
577
|
+
|
578
|
+
@property
|
579
|
+
def connection_checker(self) -> ConnectionChecker:
|
580
|
+
check = self._source_config["check"]
|
581
|
+
if "type" not in check:
|
582
|
+
check["type"] = "CheckStream"
|
583
|
+
check_stream = self._constructor.create_component(
|
584
|
+
COMPONENTS_CHECKER_TYPE_MAPPING[check["type"]],
|
585
|
+
check,
|
586
|
+
dict(),
|
587
|
+
emit_connector_builder_messages=self._emit_connector_builder_messages,
|
588
|
+
)
|
589
|
+
if isinstance(check_stream, ConnectionChecker):
|
590
|
+
return check_stream
|
591
|
+
else:
|
592
|
+
raise ValueError(
|
593
|
+
f"Expected to generate a ConnectionChecker component, but received {check_stream.__class__}"
|
594
|
+
)
|
595
|
+
|
596
|
+
@property
|
597
|
+
def dynamic_streams(self) -> List[Dict[str, Any]]:
|
598
|
+
return self._dynamic_stream_configs(
|
599
|
+
manifest=self._source_config,
|
600
|
+
config=self._config,
|
601
|
+
with_dynamic_stream_name=True,
|
602
|
+
)
|
241
603
|
|
242
604
|
def _group_streams(
|
243
605
|
self, config: Mapping[str, Any]
|
@@ -333,7 +695,7 @@ class ConcurrentDeclarativeSource(ManifestDeclarativeSource, Generic[TState]):
|
|
333
695
|
stream_name=declarative_stream.name,
|
334
696
|
schema_loader=declarative_stream._schema_loader, # type: ignore # We are accessing the private property but the public one is optional and we will remove this code soonish
|
335
697
|
retriever=retriever,
|
336
|
-
message_repository=self.
|
698
|
+
message_repository=self._message_repository,
|
337
699
|
max_records_limit=self._limits.max_records
|
338
700
|
if self._limits
|
339
701
|
else None,
|
@@ -370,7 +732,7 @@ class ConcurrentDeclarativeSource(ManifestDeclarativeSource, Generic[TState]):
|
|
370
732
|
stream_name=declarative_stream.name,
|
371
733
|
schema_loader=declarative_stream._schema_loader, # type: ignore # We are accessing the private property but the public one is optional and we will remove this code soonish
|
372
734
|
retriever=retriever,
|
373
|
-
message_repository=self.
|
735
|
+
message_repository=self._message_repository,
|
374
736
|
max_records_limit=self._limits.max_records
|
375
737
|
if self._limits
|
376
738
|
else None,
|
@@ -404,7 +766,7 @@ class ConcurrentDeclarativeSource(ManifestDeclarativeSource, Generic[TState]):
|
|
404
766
|
stream_name=declarative_stream.name,
|
405
767
|
schema_loader=declarative_stream._schema_loader, # type: ignore # We are accessing the private property but the public one is optional and we will remove this code soonish
|
406
768
|
retriever=declarative_stream.retriever,
|
407
|
-
message_repository=self.
|
769
|
+
message_repository=self._message_repository,
|
408
770
|
max_records_limit=self._limits.max_records if self._limits else None,
|
409
771
|
),
|
410
772
|
declarative_stream.retriever.stream_slicer,
|
@@ -416,7 +778,7 @@ class ConcurrentDeclarativeSource(ManifestDeclarativeSource, Generic[TState]):
|
|
416
778
|
final_state_cursor = FinalStateCursor(
|
417
779
|
stream_name=declarative_stream.name,
|
418
780
|
stream_namespace=declarative_stream.namespace,
|
419
|
-
message_repository=self.
|
781
|
+
message_repository=self._message_repository,
|
420
782
|
)
|
421
783
|
|
422
784
|
concurrent_streams.append(
|
@@ -468,7 +830,7 @@ class ConcurrentDeclarativeSource(ManifestDeclarativeSource, Generic[TState]):
|
|
468
830
|
stream_name=declarative_stream.name,
|
469
831
|
schema_loader=declarative_stream._schema_loader, # type: ignore # We are accessing the private property but the public one is optional and we will remove this code soonish
|
470
832
|
retriever=retriever,
|
471
|
-
message_repository=self.
|
833
|
+
message_repository=self._message_repository,
|
472
834
|
max_records_limit=self._limits.max_records if self._limits else None,
|
473
835
|
),
|
474
836
|
perpartition_cursor,
|
@@ -502,6 +864,127 @@ class ConcurrentDeclarativeSource(ManifestDeclarativeSource, Generic[TState]):
|
|
502
864
|
|
503
865
|
return concurrent_streams, synchronous_streams
|
504
866
|
|
867
|
+
def _stream_configs(
|
868
|
+
self, manifest: Mapping[str, Any], config: Mapping[str, Any]
|
869
|
+
) -> List[Dict[str, Any]]:
|
870
|
+
# This has a warning flag for static, but after we finish part 4 we'll replace manifest with self._source_config
|
871
|
+
stream_configs = []
|
872
|
+
for current_stream_config in manifest.get("streams", []):
|
873
|
+
if (
|
874
|
+
"type" in current_stream_config
|
875
|
+
and current_stream_config["type"] == "ConditionalStreams"
|
876
|
+
):
|
877
|
+
interpolated_boolean = InterpolatedBoolean(
|
878
|
+
condition=current_stream_config.get("condition"),
|
879
|
+
parameters={},
|
880
|
+
)
|
881
|
+
|
882
|
+
if interpolated_boolean.eval(config=config):
|
883
|
+
stream_configs.extend(current_stream_config.get("streams", []))
|
884
|
+
else:
|
885
|
+
if "type" not in current_stream_config:
|
886
|
+
current_stream_config["type"] = "DeclarativeStream"
|
887
|
+
stream_configs.append(current_stream_config)
|
888
|
+
return stream_configs
|
889
|
+
|
890
|
+
def _dynamic_stream_configs(
|
891
|
+
self,
|
892
|
+
manifest: Mapping[str, Any],
|
893
|
+
config: Mapping[str, Any],
|
894
|
+
with_dynamic_stream_name: Optional[bool] = None,
|
895
|
+
) -> List[Dict[str, Any]]:
|
896
|
+
dynamic_stream_definitions: List[Dict[str, Any]] = manifest.get("dynamic_streams", [])
|
897
|
+
dynamic_stream_configs: List[Dict[str, Any]] = []
|
898
|
+
seen_dynamic_streams: Set[str] = set()
|
899
|
+
|
900
|
+
for dynamic_definition_index, dynamic_definition in enumerate(dynamic_stream_definitions):
|
901
|
+
components_resolver_config = dynamic_definition["components_resolver"]
|
902
|
+
|
903
|
+
if not components_resolver_config:
|
904
|
+
raise ValueError(
|
905
|
+
f"Missing 'components_resolver' in dynamic definition: {dynamic_definition}"
|
906
|
+
)
|
907
|
+
|
908
|
+
resolver_type = components_resolver_config.get("type")
|
909
|
+
if not resolver_type:
|
910
|
+
raise ValueError(
|
911
|
+
f"Missing 'type' in components resolver configuration: {components_resolver_config}"
|
912
|
+
)
|
913
|
+
|
914
|
+
if resolver_type not in COMPONENTS_RESOLVER_TYPE_MAPPING:
|
915
|
+
raise ValueError(
|
916
|
+
f"Invalid components resolver type '{resolver_type}'. "
|
917
|
+
f"Expected one of {list(COMPONENTS_RESOLVER_TYPE_MAPPING.keys())}."
|
918
|
+
)
|
919
|
+
|
920
|
+
if "retriever" in components_resolver_config:
|
921
|
+
components_resolver_config["retriever"]["requester"]["use_cache"] = True
|
922
|
+
|
923
|
+
# Create a resolver for dynamic components based on type
|
924
|
+
if resolver_type == "HttpComponentsResolver":
|
925
|
+
components_resolver = self._constructor.create_component(
|
926
|
+
model_type=COMPONENTS_RESOLVER_TYPE_MAPPING[resolver_type],
|
927
|
+
component_definition=components_resolver_config,
|
928
|
+
config=config,
|
929
|
+
stream_name=dynamic_definition.get("name"),
|
930
|
+
)
|
931
|
+
else:
|
932
|
+
components_resolver = self._constructor.create_component(
|
933
|
+
model_type=COMPONENTS_RESOLVER_TYPE_MAPPING[resolver_type],
|
934
|
+
component_definition=components_resolver_config,
|
935
|
+
config=config,
|
936
|
+
)
|
937
|
+
|
938
|
+
stream_template_config = dynamic_definition["stream_template"]
|
939
|
+
|
940
|
+
for dynamic_stream in components_resolver.resolve_components(
|
941
|
+
stream_template_config=stream_template_config
|
942
|
+
):
|
943
|
+
# Get the use_parent_parameters configuration from the dynamic definition
|
944
|
+
# Default to True for backward compatibility, since connectors were already using it by default when this param was added
|
945
|
+
use_parent_parameters = dynamic_definition.get("use_parent_parameters", True)
|
946
|
+
|
947
|
+
dynamic_stream = {
|
948
|
+
**ManifestComponentTransformer().propagate_types_and_parameters(
|
949
|
+
"", dynamic_stream, {}, use_parent_parameters=use_parent_parameters
|
950
|
+
)
|
951
|
+
}
|
952
|
+
|
953
|
+
if "type" not in dynamic_stream:
|
954
|
+
dynamic_stream["type"] = "DeclarativeStream"
|
955
|
+
|
956
|
+
# Ensure that each stream is created with a unique name
|
957
|
+
name = dynamic_stream.get("name")
|
958
|
+
|
959
|
+
if with_dynamic_stream_name:
|
960
|
+
dynamic_stream["dynamic_stream_name"] = dynamic_definition.get(
|
961
|
+
"name", f"dynamic_stream_{dynamic_definition_index}"
|
962
|
+
)
|
963
|
+
|
964
|
+
if not isinstance(name, str):
|
965
|
+
raise ValueError(
|
966
|
+
f"Expected stream name {name} to be a string, got {type(name)}."
|
967
|
+
)
|
968
|
+
|
969
|
+
if name in seen_dynamic_streams:
|
970
|
+
error_message = f"Dynamic streams list contains a duplicate name: {name}. Please contact Airbyte Support."
|
971
|
+
failure_type = FailureType.system_error
|
972
|
+
|
973
|
+
if resolver_type == "ConfigComponentsResolver":
|
974
|
+
error_message = f"Dynamic streams list contains a duplicate name: {name}. Please check your configuration."
|
975
|
+
failure_type = FailureType.config_error
|
976
|
+
|
977
|
+
raise AirbyteTracedException(
|
978
|
+
message=error_message,
|
979
|
+
internal_message=error_message,
|
980
|
+
failure_type=failure_type,
|
981
|
+
)
|
982
|
+
|
983
|
+
seen_dynamic_streams.add(name)
|
984
|
+
dynamic_stream_configs.append(dynamic_stream)
|
985
|
+
|
986
|
+
return dynamic_stream_configs
|
987
|
+
|
505
988
|
def _is_concurrent_cursor_incremental_without_partition_routing(
|
506
989
|
self,
|
507
990
|
declarative_stream: DeclarativeStream,
|