airbyte-cdk 6.54.10__py3-none-any.whl → 6.55.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airbyte_cdk/sources/declarative/concurrent_declarative_source.py +1 -1
- airbyte_cdk/sources/declarative/declarative_component_schema.yaml +31 -0
- airbyte_cdk/sources/declarative/incremental/concurrent_partition_cursor.py +64 -55
- airbyte_cdk/sources/declarative/interpolation/jinja.py +4 -2
- airbyte_cdk/sources/declarative/manifest_declarative_source.py +28 -9
- airbyte_cdk/sources/declarative/models/declarative_component_schema.py +23 -2
- airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py +2 -2
- {airbyte_cdk-6.54.10.dist-info → airbyte_cdk-6.55.0.dist-info}/METADATA +1 -1
- {airbyte_cdk-6.54.10.dist-info → airbyte_cdk-6.55.0.dist-info}/RECORD +13 -13
- {airbyte_cdk-6.54.10.dist-info → airbyte_cdk-6.55.0.dist-info}/LICENSE.txt +0 -0
- {airbyte_cdk-6.54.10.dist-info → airbyte_cdk-6.55.0.dist-info}/LICENSE_SHORT +0 -0
- {airbyte_cdk-6.54.10.dist-info → airbyte_cdk-6.55.0.dist-info}/WHEEL +0 -0
- {airbyte_cdk-6.54.10.dist-info → airbyte_cdk-6.55.0.dist-info}/entry_points.txt +0 -0
@@ -202,7 +202,7 @@ class ConcurrentDeclarativeSource(ManifestDeclarativeSource, Generic[TState]):
|
|
202
202
|
|
203
203
|
# Combine streams and dynamic_streams. Note: both cannot be empty at the same time,
|
204
204
|
# and this is validated during the initialization of the source.
|
205
|
-
streams = self._stream_configs(self._source_config) + self._dynamic_stream_configs(
|
205
|
+
streams = self._stream_configs(self._source_config, config) + self._dynamic_stream_configs(
|
206
206
|
self._source_config, config
|
207
207
|
)
|
208
208
|
|
@@ -25,6 +25,7 @@ properties:
|
|
25
25
|
type: array
|
26
26
|
items:
|
27
27
|
anyOf:
|
28
|
+
- "$ref": "#/definitions/ConditionalStreams"
|
28
29
|
- "$ref": "#/definitions/DeclarativeStream"
|
29
30
|
- "$ref": "#/definitions/StateDelegatingStream"
|
30
31
|
dynamic_streams:
|
@@ -424,6 +425,36 @@ definitions:
|
|
424
425
|
$parameters:
|
425
426
|
type: object
|
426
427
|
additionalProperties: true
|
428
|
+
ConditionalStreams:
|
429
|
+
title: Conditional Streams
|
430
|
+
description: Streams that are only available while performing a connector operation when the condition is met.
|
431
|
+
type: object
|
432
|
+
required:
|
433
|
+
- type
|
434
|
+
- streams
|
435
|
+
- condition
|
436
|
+
properties:
|
437
|
+
type:
|
438
|
+
type: string
|
439
|
+
enum: [ConditionalStreams]
|
440
|
+
condition:
|
441
|
+
title: Condition
|
442
|
+
description: Condition that will be evaluated to determine if a set of streams should be available.
|
443
|
+
type: string
|
444
|
+
interpolation_context:
|
445
|
+
- config
|
446
|
+
- parameters
|
447
|
+
examples:
|
448
|
+
- "{{ config['is_sandbox'] }}"
|
449
|
+
streams:
|
450
|
+
title: Streams
|
451
|
+
description: Streams that will be used during an operation based on the condition.
|
452
|
+
type: array
|
453
|
+
items:
|
454
|
+
"$ref": "#/definitions/DeclarativeStream"
|
455
|
+
$parameters:
|
456
|
+
type: object
|
457
|
+
additionalProperties: true
|
427
458
|
ConstantBackoffStrategy:
|
428
459
|
title: Constant Backoff
|
429
460
|
description: Backoff strategy with a constant backoff interval.
|
@@ -9,7 +9,7 @@ import time
|
|
9
9
|
from collections import OrderedDict
|
10
10
|
from copy import deepcopy
|
11
11
|
from datetime import timedelta
|
12
|
-
from typing import Any, Callable, Iterable, Mapping, MutableMapping, Optional
|
12
|
+
from typing import Any, Callable, Iterable, List, Mapping, MutableMapping, Optional
|
13
13
|
|
14
14
|
from airbyte_cdk.sources.connector_state_manager import ConnectorStateManager
|
15
15
|
from airbyte_cdk.sources.declarative.incremental.global_substream_cursor import (
|
@@ -66,8 +66,8 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
66
66
|
_GLOBAL_STATE_KEY = "state"
|
67
67
|
_PERPARTITION_STATE_KEY = "states"
|
68
68
|
_IS_PARTITION_DUPLICATION_LOGGED = False
|
69
|
-
|
70
|
-
|
69
|
+
_PARENT_STATE = 0
|
70
|
+
_GENERATION_SEQUENCE = 1
|
71
71
|
|
72
72
|
def __init__(
|
73
73
|
self,
|
@@ -99,19 +99,29 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
99
99
|
self._semaphore_per_partition: OrderedDict[str, threading.Semaphore] = OrderedDict()
|
100
100
|
|
101
101
|
# Parent-state tracking: store each partition’s parent state in creation order
|
102
|
-
self._partition_parent_state_map: OrderedDict[str, Mapping[str, Any]] =
|
102
|
+
self._partition_parent_state_map: OrderedDict[str, tuple[Mapping[str, Any], int]] = (
|
103
|
+
OrderedDict()
|
104
|
+
)
|
105
|
+
self._parent_state: Optional[StreamState] = None
|
106
|
+
|
107
|
+
# Tracks when the last slice for partition is emitted
|
108
|
+
self._partitions_done_generating_stream_slices: set[str] = set()
|
109
|
+
# Used to track the index of partitions that are not closed yet
|
110
|
+
self._processing_partitions_indexes: List[int] = list()
|
111
|
+
self._generated_partitions_count: int = 0
|
112
|
+
# Dictionary to map partition keys to their index
|
113
|
+
self._partition_key_to_index: dict[str, int] = {}
|
103
114
|
|
104
|
-
self._finished_partitions: set[str] = set()
|
105
115
|
self._lock = threading.Lock()
|
106
|
-
self._timer = Timer()
|
107
|
-
self._new_global_cursor: Optional[StreamState] = None
|
108
116
|
self._lookback_window: int = 0
|
109
|
-
self.
|
117
|
+
self._new_global_cursor: Optional[StreamState] = None
|
110
118
|
self._number_of_partitions: int = 0
|
111
119
|
self._use_global_cursor: bool = use_global_cursor
|
112
120
|
self._partition_serializer = PerPartitionKeySerializer()
|
121
|
+
|
113
122
|
# Track the last time a state message was emitted
|
114
123
|
self._last_emission_time: float = 0.0
|
124
|
+
self._timer = Timer()
|
115
125
|
|
116
126
|
self._set_initial_state(stream_state)
|
117
127
|
|
@@ -157,60 +167,37 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
157
167
|
self._cursor_per_partition[partition_key].close_partition(partition=partition)
|
158
168
|
cursor = self._cursor_per_partition[partition_key]
|
159
169
|
if (
|
160
|
-
partition_key in self.
|
170
|
+
partition_key in self._partitions_done_generating_stream_slices
|
161
171
|
and self._semaphore_per_partition[partition_key]._value == 0
|
162
172
|
):
|
163
173
|
self._update_global_cursor(cursor.state[self.cursor_field.cursor_field_key])
|
164
174
|
|
175
|
+
# Clean up the partition if it is fully processed
|
176
|
+
self._cleanup_if_done(partition_key)
|
177
|
+
|
165
178
|
self._check_and_update_parent_state()
|
166
179
|
|
167
180
|
self._emit_state_message()
|
168
181
|
|
169
182
|
def _check_and_update_parent_state(self) -> None:
|
170
|
-
"""
|
171
|
-
Pop the leftmost partition state from _partition_parent_state_map only if
|
172
|
-
*all partitions* up to (and including) that partition key in _semaphore_per_partition
|
173
|
-
are fully finished (i.e. in _finished_partitions and semaphore._value == 0).
|
174
|
-
Additionally, delete finished semaphores with a value of 0 to free up memory,
|
175
|
-
as they are only needed to track errors and completion status.
|
176
|
-
"""
|
177
183
|
last_closed_state = None
|
178
184
|
|
179
185
|
while self._partition_parent_state_map:
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
# Verify ALL partitions from the left up to earliest_key are finished
|
184
|
-
all_left_finished = True
|
185
|
-
for p_key, sem in list(
|
186
|
-
self._semaphore_per_partition.items()
|
187
|
-
): # Use list to allow modification during iteration
|
188
|
-
# If any earlier partition is still not finished, we must stop
|
189
|
-
if p_key not in self._finished_partitions or sem._value != 0:
|
190
|
-
all_left_finished = False
|
191
|
-
break
|
192
|
-
# Once we've reached earliest_key in the semaphore order, we can stop checking
|
193
|
-
if p_key == earliest_key:
|
194
|
-
break
|
195
|
-
|
196
|
-
# If the partitions up to earliest_key are not all finished, break the while-loop
|
197
|
-
if not all_left_finished:
|
198
|
-
break
|
186
|
+
earliest_key, (candidate_state, candidate_seq) = next(
|
187
|
+
iter(self._partition_parent_state_map.items())
|
188
|
+
)
|
199
189
|
|
200
|
-
#
|
201
|
-
|
202
|
-
|
190
|
+
# if any partition that started <= candidate_seq is still open, we must wait
|
191
|
+
if (
|
192
|
+
self._processing_partitions_indexes
|
193
|
+
and self._processing_partitions_indexes[0] <= candidate_seq
|
194
|
+
):
|
195
|
+
break
|
203
196
|
|
204
|
-
#
|
205
|
-
|
206
|
-
|
207
|
-
if p_key in self._finished_partitions and sem._value == 0:
|
208
|
-
del self._semaphore_per_partition[p_key]
|
209
|
-
logger.debug(f"Deleted finished semaphore for partition {p_key} with value 0")
|
210
|
-
if p_key == earliest_key:
|
211
|
-
break
|
197
|
+
# safe to pop
|
198
|
+
self._partition_parent_state_map.popitem(last=False)
|
199
|
+
last_closed_state = candidate_state
|
212
200
|
|
213
|
-
# Update _parent_state if we popped at least one partition
|
214
201
|
if last_closed_state is not None:
|
215
202
|
self._parent_state = last_closed_state
|
216
203
|
|
@@ -289,18 +276,24 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
289
276
|
if not self._IS_PARTITION_DUPLICATION_LOGGED:
|
290
277
|
logger.warning(f"Partition duplication detected for stream {self._stream_name}")
|
291
278
|
self._IS_PARTITION_DUPLICATION_LOGGED = True
|
279
|
+
return
|
292
280
|
else:
|
293
281
|
self._semaphore_per_partition[partition_key] = threading.Semaphore(0)
|
294
282
|
|
295
283
|
with self._lock:
|
284
|
+
seq = self._generated_partitions_count
|
285
|
+
self._generated_partitions_count += 1
|
286
|
+
self._processing_partitions_indexes.append(seq)
|
287
|
+
self._partition_key_to_index[partition_key] = seq
|
288
|
+
|
296
289
|
if (
|
297
290
|
len(self._partition_parent_state_map) == 0
|
298
291
|
or self._partition_parent_state_map[
|
299
292
|
next(reversed(self._partition_parent_state_map))
|
300
|
-
]
|
293
|
+
][self._PARENT_STATE]
|
301
294
|
!= parent_state
|
302
295
|
):
|
303
|
-
self._partition_parent_state_map[partition_key] = deepcopy(parent_state)
|
296
|
+
self._partition_parent_state_map[partition_key] = (deepcopy(parent_state), seq)
|
304
297
|
|
305
298
|
for cursor_slice, is_last_slice, _ in iterate_with_last_flag_and_state(
|
306
299
|
cursor.stream_slices(),
|
@@ -308,7 +301,7 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
308
301
|
):
|
309
302
|
self._semaphore_per_partition[partition_key].release()
|
310
303
|
if is_last_slice:
|
311
|
-
self.
|
304
|
+
self._partitions_done_generating_stream_slices.add(partition_key)
|
312
305
|
yield StreamSlice(
|
313
306
|
partition=partition, cursor_slice=cursor_slice, extra_fields=partition.extra_fields
|
314
307
|
)
|
@@ -338,14 +331,11 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
338
331
|
while len(self._cursor_per_partition) > self.DEFAULT_MAX_PARTITIONS_NUMBER - 1:
|
339
332
|
# Try removing finished partitions first
|
340
333
|
for partition_key in list(self._cursor_per_partition.keys()):
|
341
|
-
if partition_key in self.
|
342
|
-
partition_key not in self._semaphore_per_partition
|
343
|
-
or self._semaphore_per_partition[partition_key]._value == 0
|
344
|
-
):
|
334
|
+
if partition_key not in self._partition_key_to_index:
|
345
335
|
oldest_partition = self._cursor_per_partition.pop(
|
346
336
|
partition_key
|
347
337
|
) # Remove the oldest partition
|
348
|
-
logger.
|
338
|
+
logger.debug(
|
349
339
|
f"The maximum number of partitions has been reached. Dropping the oldest finished partition: {oldest_partition}. Over limit: {self._number_of_partitions - self.DEFAULT_MAX_PARTITIONS_NUMBER}."
|
350
340
|
)
|
351
341
|
break
|
@@ -474,6 +464,25 @@ class ConcurrentPerPartitionCursor(Cursor):
|
|
474
464
|
):
|
475
465
|
self._new_global_cursor = {self.cursor_field.cursor_field_key: copy.deepcopy(value)}
|
476
466
|
|
467
|
+
def _cleanup_if_done(self, partition_key: str) -> None:
|
468
|
+
"""
|
469
|
+
Free every in-memory structure that belonged to a completed partition:
|
470
|
+
cursor, semaphore, flag inside `_finished_partitions`
|
471
|
+
"""
|
472
|
+
if not (
|
473
|
+
partition_key in self._partitions_done_generating_stream_slices
|
474
|
+
and self._semaphore_per_partition[partition_key]._value == 0
|
475
|
+
):
|
476
|
+
return
|
477
|
+
|
478
|
+
self._semaphore_per_partition.pop(partition_key, None)
|
479
|
+
self._partitions_done_generating_stream_slices.discard(partition_key)
|
480
|
+
|
481
|
+
seq = self._partition_key_to_index.pop(partition_key)
|
482
|
+
self._processing_partitions_indexes.remove(seq)
|
483
|
+
|
484
|
+
logger.debug(f"Partition {partition_key} fully processed and cleaned up.")
|
485
|
+
|
477
486
|
def _to_partition_key(self, partition: Mapping[str, Any]) -> str:
|
478
487
|
return self._partition_serializer.to_partition_key(partition)
|
479
488
|
|
@@ -147,16 +147,18 @@ class JinjaInterpolation(Interpolation):
|
|
147
147
|
# It can be returned as is
|
148
148
|
return s
|
149
149
|
|
150
|
+
@staticmethod
|
150
151
|
@cache
|
151
|
-
def _find_undeclared_variables(
|
152
|
+
def _find_undeclared_variables(s: Optional[str]) -> Set[str]:
|
152
153
|
"""
|
153
154
|
Find undeclared variables and cache them
|
154
155
|
"""
|
155
156
|
ast = _ENVIRONMENT.parse(s) # type: ignore # parse is able to handle None
|
156
157
|
return meta.find_undeclared_variables(ast)
|
157
158
|
|
159
|
+
@staticmethod
|
158
160
|
@cache
|
159
|
-
def _compile(
|
161
|
+
def _compile(s: str) -> Template:
|
160
162
|
"""
|
161
163
|
We must cache the Jinja Template ourselves because we're using `from_string` instead of a template loader
|
162
164
|
"""
|
@@ -8,7 +8,7 @@ import pkgutil
|
|
8
8
|
from copy import deepcopy
|
9
9
|
from importlib import metadata
|
10
10
|
from types import ModuleType
|
11
|
-
from typing import Any, Dict, Iterator, List, Mapping,
|
11
|
+
from typing import Any, Dict, Iterator, List, Mapping, Optional, Set
|
12
12
|
|
13
13
|
import orjson
|
14
14
|
import yaml
|
@@ -35,6 +35,10 @@ from airbyte_cdk.models.airbyte_protocol_serializers import AirbyteMessageSerial
|
|
35
35
|
from airbyte_cdk.sources.declarative.checks import COMPONENTS_CHECKER_TYPE_MAPPING
|
36
36
|
from airbyte_cdk.sources.declarative.checks.connection_checker import ConnectionChecker
|
37
37
|
from airbyte_cdk.sources.declarative.declarative_source import DeclarativeSource
|
38
|
+
from airbyte_cdk.sources.declarative.interpolation import InterpolatedBoolean
|
39
|
+
from airbyte_cdk.sources.declarative.models.declarative_component_schema import (
|
40
|
+
ConditionalStreams as ConditionalStreamsModel,
|
41
|
+
)
|
38
42
|
from airbyte_cdk.sources.declarative.models.declarative_component_schema import (
|
39
43
|
DeclarativeStream as DeclarativeStreamModel,
|
40
44
|
)
|
@@ -300,7 +304,9 @@ class ManifestDeclarativeSource(DeclarativeSource):
|
|
300
304
|
}
|
301
305
|
)
|
302
306
|
|
303
|
-
stream_configs =
|
307
|
+
stream_configs = (
|
308
|
+
self._stream_configs(self._source_config, config=config) + self.dynamic_streams
|
309
|
+
)
|
304
310
|
|
305
311
|
api_budget_model = self._source_config.get("api_budget")
|
306
312
|
if api_budget_model:
|
@@ -319,7 +325,6 @@ class ManifestDeclarativeSource(DeclarativeSource):
|
|
319
325
|
)
|
320
326
|
for stream_config in self._initialize_cache_for_parent_streams(deepcopy(stream_configs))
|
321
327
|
]
|
322
|
-
|
323
328
|
return source_streams
|
324
329
|
|
325
330
|
@staticmethod
|
@@ -373,7 +378,6 @@ class ManifestDeclarativeSource(DeclarativeSource):
|
|
373
378
|
)
|
374
379
|
else:
|
375
380
|
stream_config["retriever"]["requester"]["use_cache"] = True
|
376
|
-
|
377
381
|
return stream_configs
|
378
382
|
|
379
383
|
def spec(self, logger: logging.Logger) -> ConnectorSpecification:
|
@@ -477,12 +481,27 @@ class ManifestDeclarativeSource(DeclarativeSource):
|
|
477
481
|
# No exception
|
478
482
|
return parsed_version
|
479
483
|
|
480
|
-
def _stream_configs(
|
484
|
+
def _stream_configs(
|
485
|
+
self, manifest: Mapping[str, Any], config: Mapping[str, Any]
|
486
|
+
) -> List[Dict[str, Any]]:
|
481
487
|
# This has a warning flag for static, but after we finish part 4 we'll replace manifest with self._source_config
|
482
|
-
stream_configs
|
483
|
-
for
|
484
|
-
if
|
485
|
-
|
488
|
+
stream_configs = []
|
489
|
+
for current_stream_config in manifest.get("streams", []):
|
490
|
+
if (
|
491
|
+
"type" in current_stream_config
|
492
|
+
and current_stream_config["type"] == "ConditionalStreams"
|
493
|
+
):
|
494
|
+
interpolated_boolean = InterpolatedBoolean(
|
495
|
+
condition=current_stream_config.get("condition"),
|
496
|
+
parameters={},
|
497
|
+
)
|
498
|
+
|
499
|
+
if interpolated_boolean.eval(config=config):
|
500
|
+
stream_configs.extend(current_stream_config.get("streams", []))
|
501
|
+
else:
|
502
|
+
if "type" not in current_stream_config:
|
503
|
+
current_stream_config["type"] = "DeclarativeStream"
|
504
|
+
stream_configs.append(current_stream_config)
|
486
505
|
return stream_configs
|
487
506
|
|
488
507
|
def _dynamic_stream_configs(
|
@@ -1,3 +1,5 @@
|
|
1
|
+
# Copyright (c) 2025 Airbyte, Inc., all rights reserved.
|
2
|
+
|
1
3
|
# generated by datamodel-codegen:
|
2
4
|
# filename: declarative_component_schema.yaml
|
3
5
|
|
@@ -2168,7 +2170,7 @@ class DeclarativeSource1(BaseModel):
|
|
2168
2170
|
|
2169
2171
|
type: Literal["DeclarativeSource"]
|
2170
2172
|
check: Union[CheckStream, CheckDynamicStream]
|
2171
|
-
streams: List[Union[DeclarativeStream, StateDelegatingStream]]
|
2173
|
+
streams: List[Union[ConditionalStreams, DeclarativeStream, StateDelegatingStream]]
|
2172
2174
|
dynamic_streams: Optional[List[DynamicDeclarativeStream]] = None
|
2173
2175
|
version: str = Field(
|
2174
2176
|
...,
|
@@ -2201,7 +2203,9 @@ class DeclarativeSource2(BaseModel):
|
|
2201
2203
|
|
2202
2204
|
type: Literal["DeclarativeSource"]
|
2203
2205
|
check: Union[CheckStream, CheckDynamicStream]
|
2204
|
-
streams: Optional[List[Union[DeclarativeStream, StateDelegatingStream]]] =
|
2206
|
+
streams: Optional[List[Union[ConditionalStreams, DeclarativeStream, StateDelegatingStream]]] = (
|
2207
|
+
None
|
2208
|
+
)
|
2205
2209
|
dynamic_streams: List[DynamicDeclarativeStream]
|
2206
2210
|
version: str = Field(
|
2207
2211
|
...,
|
@@ -2280,6 +2284,22 @@ class SelectiveAuthenticator(BaseModel):
|
|
2280
2284
|
parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters")
|
2281
2285
|
|
2282
2286
|
|
2287
|
+
class ConditionalStreams(BaseModel):
|
2288
|
+
type: Literal["ConditionalStreams"]
|
2289
|
+
condition: str = Field(
|
2290
|
+
...,
|
2291
|
+
description="Condition that will be evaluated to determine if a set of streams should be available.",
|
2292
|
+
examples=["{{ config['is_sandbox'] }}"],
|
2293
|
+
title="Condition",
|
2294
|
+
)
|
2295
|
+
streams: List[DeclarativeStream] = Field(
|
2296
|
+
...,
|
2297
|
+
description="Streams that will be used during an operation based on the condition.",
|
2298
|
+
title="Streams",
|
2299
|
+
)
|
2300
|
+
parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters")
|
2301
|
+
|
2302
|
+
|
2283
2303
|
class FileUploader(BaseModel):
|
2284
2304
|
type: Literal["FileUploader"]
|
2285
2305
|
requester: Union[HttpRequester, CustomRequester] = Field(
|
@@ -2936,6 +2956,7 @@ CompositeErrorHandler.update_forward_refs()
|
|
2936
2956
|
DeclarativeSource1.update_forward_refs()
|
2937
2957
|
DeclarativeSource2.update_forward_refs()
|
2938
2958
|
SelectiveAuthenticator.update_forward_refs()
|
2959
|
+
ConditionalStreams.update_forward_refs()
|
2939
2960
|
FileUploader.update_forward_refs()
|
2940
2961
|
DeclarativeStream.update_forward_refs()
|
2941
2962
|
SessionTokenAuthenticator.update_forward_refs()
|
@@ -3150,12 +3150,12 @@ class ModelToComponentFactory:
|
|
3150
3150
|
This is needed because the URL is not set until the requester is created.
|
3151
3151
|
"""
|
3152
3152
|
|
3153
|
-
_url = (
|
3153
|
+
_url: str = (
|
3154
3154
|
model.requester.url
|
3155
3155
|
if hasattr(model.requester, "url") and model.requester.url is not None
|
3156
3156
|
else requester.get_url()
|
3157
3157
|
)
|
3158
|
-
_url_base = (
|
3158
|
+
_url_base: str = (
|
3159
3159
|
model.requester.url_base
|
3160
3160
|
if hasattr(model.requester, "url_base") and model.requester.url_base is not None
|
3161
3161
|
else requester.get_url_base()
|
@@ -85,11 +85,11 @@ airbyte_cdk/sources/declarative/checks/check_stream.py,sha256=QeExVmpSYjr_CnghHu
|
|
85
85
|
airbyte_cdk/sources/declarative/checks/connection_checker.py,sha256=MBRJo6WJlZQHpIfOGaNOkkHUmgUl_4wDM6VPo41z5Ss,1383
|
86
86
|
airbyte_cdk/sources/declarative/concurrency_level/__init__.py,sha256=5XUqrmlstYlMM0j6crktlKQwALek0uiz2D3WdM46MyA,191
|
87
87
|
airbyte_cdk/sources/declarative/concurrency_level/concurrency_level.py,sha256=YIwCTCpOr_QSNW4ltQK0yUGWInI8PKNY216HOOegYLk,2101
|
88
|
-
airbyte_cdk/sources/declarative/concurrent_declarative_source.py,sha256=
|
88
|
+
airbyte_cdk/sources/declarative/concurrent_declarative_source.py,sha256=rQz9gXp3m8M8E201EWnD7BfeefDXhW3233GG_JLpdOQ,28546
|
89
89
|
airbyte_cdk/sources/declarative/datetime/__init__.py,sha256=4Hw-PX1-VgESLF16cDdvuYCzGJtHntThLF4qIiULWeo,61
|
90
90
|
airbyte_cdk/sources/declarative/datetime/datetime_parser.py,sha256=_zGNGq31RNy_0QBLt_EcTvgPyhj7urPdx6oA3M5-r3o,3150
|
91
91
|
airbyte_cdk/sources/declarative/datetime/min_max_datetime.py,sha256=0BHBtDNQZfvwM45-tY5pNlTcKAFSGGNxemoi0Jic-0E,5785
|
92
|
-
airbyte_cdk/sources/declarative/declarative_component_schema.yaml,sha256=
|
92
|
+
airbyte_cdk/sources/declarative/declarative_component_schema.yaml,sha256=ES1gwuWHAXC9z9Dk8i0kNGK-2omTtjexiNp0iwV54tE,178751
|
93
93
|
airbyte_cdk/sources/declarative/declarative_source.py,sha256=qmyMnnet92eGc3C22yBtpvD5UZjqdhsAafP_zxI5wp8,1814
|
94
94
|
airbyte_cdk/sources/declarative/declarative_stream.py,sha256=dCRlddBUSaJmBNBz1pSO1r2rTw8AP5d2_vlmIeGs2gg,10767
|
95
95
|
airbyte_cdk/sources/declarative/decoders/__init__.py,sha256=JHb_0d3SE6kNY10mxA5YBEKPeSbsWYjByq1gUQxepoE,953
|
@@ -111,7 +111,7 @@ airbyte_cdk/sources/declarative/extractors/record_selector.py,sha256=vCpwX1PVRFP
|
|
111
111
|
airbyte_cdk/sources/declarative/extractors/response_to_file_extractor.py,sha256=WJyA2OYIEgFpVP5Y3o0tIj69AV6IKkn9B16MeXaEItI,6513
|
112
112
|
airbyte_cdk/sources/declarative/extractors/type_transformer.py,sha256=d6Y2Rfg8pMVEEnHllfVksWZdNVOU55yk34O03dP9muY,1626
|
113
113
|
airbyte_cdk/sources/declarative/incremental/__init__.py,sha256=U1oZKtBaEC6IACmvziY9Wzg7Z8EgF4ZuR7NwvjlB_Sk,1255
|
114
|
-
airbyte_cdk/sources/declarative/incremental/concurrent_partition_cursor.py,sha256=
|
114
|
+
airbyte_cdk/sources/declarative/incremental/concurrent_partition_cursor.py,sha256=araWk039M89c6lQHEUltfM1VI_xGw9gZIDXRWWF6SkM,22591
|
115
115
|
airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py,sha256=Rbe6lJLTtZ5en33MwZiB9-H9-AwDMNHgwBZs8EqhYqk,22172
|
116
116
|
airbyte_cdk/sources/declarative/incremental/declarative_cursor.py,sha256=5Bhw9VRPyIuCaD0wmmq_L3DZsa-rJgtKSEUzSd8YYD0,536
|
117
117
|
airbyte_cdk/sources/declarative/incremental/global_substream_cursor.py,sha256=2tsE6FgXzemf4fZZ4uGtd8QpRBl9GJ2CRqSNJE5p0EI,16077
|
@@ -125,22 +125,22 @@ airbyte_cdk/sources/declarative/interpolation/interpolated_mapping.py,sha256=h36
|
|
125
125
|
airbyte_cdk/sources/declarative/interpolation/interpolated_nested_mapping.py,sha256=myVaNtFqxOAwrbp93rgd1dhkqyuvXvET9rsimQ89ktc,1873
|
126
126
|
airbyte_cdk/sources/declarative/interpolation/interpolated_string.py,sha256=CQkHqGlfa87G6VYMtBAQWin7ECKpfMdrDcg0JO5_rhc,3212
|
127
127
|
airbyte_cdk/sources/declarative/interpolation/interpolation.py,sha256=9IoeuWam3L6GyN10L6U8xNWXmkt9cnahSDNkez1OmFY,982
|
128
|
-
airbyte_cdk/sources/declarative/interpolation/jinja.py,sha256=
|
128
|
+
airbyte_cdk/sources/declarative/interpolation/jinja.py,sha256=oFGKs3oX0xO6DOL4E9x8rhxwbEoRcgx4HJVIL1RQ9c4,7269
|
129
129
|
airbyte_cdk/sources/declarative/interpolation/macros.py,sha256=xRcmjape4_WGmKMJpmBsKY0k4OHJDM46Hv3V-dlSz3w,5640
|
130
|
-
airbyte_cdk/sources/declarative/manifest_declarative_source.py,sha256=
|
130
|
+
airbyte_cdk/sources/declarative/manifest_declarative_source.py,sha256=i87TixffTZVRg5m0J_QV_jl901M9BKJeqxf773pxzgA,25563
|
131
131
|
airbyte_cdk/sources/declarative/migrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
132
132
|
airbyte_cdk/sources/declarative/migrations/legacy_to_per_partition_state_migration.py,sha256=V2lpYE9LJKvz6BUViHk4vaRGndxNABmPbDCtyYdkqaE,4013
|
133
133
|
airbyte_cdk/sources/declarative/migrations/state_migration.py,sha256=KWPjealMLKSMtajXgkdGgKg7EmTLR-CqqD7UIh0-eDU,794
|
134
134
|
airbyte_cdk/sources/declarative/models/__init__.py,sha256=nUFxNCiKeYRVXuZEKA7GD-lTHxsiKcQ8FitZjKhPIvE,100
|
135
135
|
airbyte_cdk/sources/declarative/models/base_model_with_deprecations.py,sha256=Imnj3yef0aqRdLfaUxkIYISUb8YkiPrRH_wBd-x8HjM,5999
|
136
|
-
airbyte_cdk/sources/declarative/models/declarative_component_schema.py,sha256=
|
136
|
+
airbyte_cdk/sources/declarative/models/declarative_component_schema.py,sha256=xfmKKdsnPeP9hV_cjBfZEGMxdU0HJDuMwoQGrmTPYMo,126543
|
137
137
|
airbyte_cdk/sources/declarative/parsers/__init__.py,sha256=ZnqYNxHsKCgO38IwB34RQyRMXTs4GTvlRi3ImKnIioo,61
|
138
138
|
airbyte_cdk/sources/declarative/parsers/custom_code_compiler.py,sha256=nlVvHC511NUyDEEIRBkoeDTAvLqKNp-hRy8D19z8tdk,5941
|
139
139
|
airbyte_cdk/sources/declarative/parsers/custom_exceptions.py,sha256=wnRUP0Xeru9Rbu5OexXSDN9QWDo8YU4tT9M2LDVOgGA,802
|
140
140
|
airbyte_cdk/sources/declarative/parsers/manifest_component_transformer.py,sha256=2UdpCz3yi7ISZTyqkQXSSy3dMxeyOWqV7OlAS5b9GVg,11568
|
141
141
|
airbyte_cdk/sources/declarative/parsers/manifest_normalizer.py,sha256=laBy7ebjA-PiNwc-50U4FHvMqS_mmHvnabxgFs4CjGw,17069
|
142
142
|
airbyte_cdk/sources/declarative/parsers/manifest_reference_resolver.py,sha256=pJmg78vqE5VfUrF_KJnWjucQ4k9IWFULeAxHCowrHXE,6806
|
143
|
-
airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py,sha256=
|
143
|
+
airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py,sha256=IaVbbuJv7DvheUaW3iBSv1qChpU8Vm1k7ZF_uESZYFU,175315
|
144
144
|
airbyte_cdk/sources/declarative/partition_routers/__init__.py,sha256=TBC9AkGaUqHm2IKHMPN6punBIcY5tWGULowcLoAVkfw,1109
|
145
145
|
airbyte_cdk/sources/declarative/partition_routers/async_job_partition_router.py,sha256=VelO7zKqKtzMJ35jyFeg0ypJLQC0plqqIBNXoBW1G2E,3001
|
146
146
|
airbyte_cdk/sources/declarative/partition_routers/cartesian_product_stream_slicer.py,sha256=c5cuVFM6NFkuQqG8Z5IwkBuwDrvXZN1CunUOM_L0ezg,6892
|
@@ -421,9 +421,9 @@ airbyte_cdk/utils/slice_hasher.py,sha256=EDxgROHDbfG-QKQb59m7h_7crN1tRiawdf5uU7G
|
|
421
421
|
airbyte_cdk/utils/spec_schema_transformations.py,sha256=-5HTuNsnDBAhj-oLeQXwpTGA0HdcjFOf2zTEMUTTg_Y,816
|
422
422
|
airbyte_cdk/utils/stream_status_utils.py,sha256=ZmBoiy5HVbUEHAMrUONxZvxnvfV9CesmQJLDTAIWnWw,1171
|
423
423
|
airbyte_cdk/utils/traced_exception.py,sha256=C8uIBuCL_E4WnBAOPSxBicD06JAldoN9fGsQDp463OY,6292
|
424
|
-
airbyte_cdk-6.
|
425
|
-
airbyte_cdk-6.
|
426
|
-
airbyte_cdk-6.
|
427
|
-
airbyte_cdk-6.
|
428
|
-
airbyte_cdk-6.
|
429
|
-
airbyte_cdk-6.
|
424
|
+
airbyte_cdk-6.55.0.dist-info/LICENSE.txt,sha256=Wfe61S4BaGPj404v8lrAbvhjYR68SHlkzeYrg3_bbuM,1051
|
425
|
+
airbyte_cdk-6.55.0.dist-info/LICENSE_SHORT,sha256=aqF6D1NcESmpn-cqsxBtszTEnHKnlsp8L4x9wAh3Nxg,55
|
426
|
+
airbyte_cdk-6.55.0.dist-info/METADATA,sha256=Me6noWhmTUus25avMry9Dor-Qd2mqi-gyOpRhqc05RI,6343
|
427
|
+
airbyte_cdk-6.55.0.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
428
|
+
airbyte_cdk-6.55.0.dist-info/entry_points.txt,sha256=AKWbEkHfpzzk9nF9tqBUaw1MbvTM4mGtEzmZQm0ZWvM,139
|
429
|
+
airbyte_cdk-6.55.0.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|