airbyte-cdk 0.54.0__py3-none-any.whl → 0.55.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. airbyte_cdk/sources/concurrent_source/__init__.py +3 -0
  2. airbyte_cdk/sources/concurrent_source/concurrent_read_processor.py +190 -0
  3. airbyte_cdk/sources/concurrent_source/concurrent_source.py +161 -0
  4. airbyte_cdk/sources/concurrent_source/concurrent_source_adapter.py +63 -0
  5. airbyte_cdk/sources/concurrent_source/partition_generation_completed_sentinel.py +17 -0
  6. airbyte_cdk/sources/concurrent_source/thread_pool_manager.py +97 -0
  7. airbyte_cdk/sources/streams/concurrent/abstract_stream.py +4 -4
  8. airbyte_cdk/sources/streams/concurrent/adapters.py +34 -12
  9. airbyte_cdk/sources/streams/concurrent/default_stream.py +79 -0
  10. airbyte_cdk/sources/streams/concurrent/partition_enqueuer.py +7 -7
  11. airbyte_cdk/sources/streams/concurrent/partitions/partition.py +23 -0
  12. airbyte_cdk/sources/streams/concurrent/partitions/record.py +4 -3
  13. airbyte_cdk/sources/streams/concurrent/partitions/types.py +2 -3
  14. airbyte_cdk/sources/utils/slice_logger.py +5 -0
  15. {airbyte_cdk-0.54.0.dist-info → airbyte_cdk-0.55.0.dist-info}/METADATA +1 -1
  16. {airbyte_cdk-0.54.0.dist-info → airbyte_cdk-0.55.0.dist-info}/RECORD +35 -23
  17. unit_tests/sources/concurrent_source/__init__.py +3 -0
  18. unit_tests/sources/concurrent_source/test_concurrent_source_adapter.py +105 -0
  19. unit_tests/sources/streams/concurrent/scenarios/stream_facade_builder.py +14 -7
  20. unit_tests/sources/streams/concurrent/scenarios/stream_facade_scenarios.py +2 -3
  21. unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_scenarios.py +44 -55
  22. unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_source_builder.py +24 -15
  23. unit_tests/sources/streams/concurrent/test_adapters.py +52 -32
  24. unit_tests/sources/streams/concurrent/test_concurrent_partition_generator.py +6 -5
  25. unit_tests/sources/streams/concurrent/test_concurrent_read_processor.py +604 -0
  26. unit_tests/sources/streams/concurrent/test_cursor.py +1 -1
  27. unit_tests/sources/streams/concurrent/{test_thread_based_concurrent_stream.py → test_default_stream.py} +7 -144
  28. unit_tests/sources/streams/concurrent/test_partition_reader.py +2 -2
  29. unit_tests/sources/streams/concurrent/test_thread_pool_manager.py +98 -0
  30. unit_tests/sources/streams/test_stream_read.py +1 -2
  31. unit_tests/sources/test_concurrent_source.py +105 -0
  32. unit_tests/sources/test_source_read.py +461 -0
  33. airbyte_cdk/sources/streams/concurrent/thread_based_concurrent_stream.py +0 -221
  34. {airbyte_cdk-0.54.0.dist-info → airbyte_cdk-0.55.0.dist-info}/LICENSE.txt +0 -0
  35. {airbyte_cdk-0.54.0.dist-info → airbyte_cdk-0.55.0.dist-info}/WHEEL +0 -0
  36. {airbyte_cdk-0.54.0.dist-info → airbyte_cdk-0.55.0.dist-info}/top_level.txt +0 -0
@@ -22,11 +22,11 @@ from airbyte_cdk.sources.streams.concurrent.availability_strategy import (
22
22
  StreamUnavailable,
23
23
  )
24
24
  from airbyte_cdk.sources.streams.concurrent.cursor import Cursor, NoopCursor
25
+ from airbyte_cdk.sources.streams.concurrent.default_stream import DefaultStream
25
26
  from airbyte_cdk.sources.streams.concurrent.exceptions import ExceptionWithDisplayMessage
26
27
  from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition
27
28
  from airbyte_cdk.sources.streams.concurrent.partitions.partition_generator import PartitionGenerator
28
29
  from airbyte_cdk.sources.streams.concurrent.partitions.record import Record
29
- from airbyte_cdk.sources.streams.concurrent.thread_based_concurrent_stream import ThreadBasedConcurrentStream
30
30
  from airbyte_cdk.sources.streams.core import StreamData
31
31
  from airbyte_cdk.sources.utils.schema_helpers import InternalConfig
32
32
  from airbyte_cdk.sources.utils.slice_logger import SliceLogger
@@ -52,7 +52,6 @@ class StreamFacade(Stream):
52
52
  stream: Stream,
53
53
  source: AbstractSource,
54
54
  logger: logging.Logger,
55
- max_workers: int,
56
55
  state: Optional[MutableMapping[str, Any]],
57
56
  cursor: Cursor,
58
57
  ) -> Stream:
@@ -73,28 +72,27 @@ class StreamFacade(Stream):
73
72
 
74
73
  message_repository = source.message_repository
75
74
  return StreamFacade(
76
- ThreadBasedConcurrentStream(
75
+ DefaultStream(
77
76
  partition_generator=StreamPartitionGenerator(
78
77
  stream,
79
78
  message_repository,
80
79
  SyncMode.full_refresh if isinstance(cursor, NoopCursor) else SyncMode.incremental,
81
80
  [cursor_field] if cursor_field is not None else None,
82
81
  state,
82
+ cursor,
83
83
  ),
84
- max_workers=max_workers,
85
84
  name=stream.name,
86
85
  namespace=stream.namespace,
87
86
  json_schema=stream.get_json_schema(),
88
87
  availability_strategy=StreamAvailabilityStrategy(stream, source),
89
88
  primary_key=pk,
90
89
  cursor_field=cursor_field,
91
- slice_logger=source._slice_logger,
92
- message_repository=message_repository,
93
90
  logger=logger,
94
- cursor=cursor,
95
91
  ),
96
92
  stream,
97
93
  cursor,
94
+ slice_logger=source._slice_logger,
95
+ logger=logger,
98
96
  )
99
97
 
100
98
  @property
@@ -132,13 +130,15 @@ class StreamFacade(Stream):
132
130
  else:
133
131
  return stream.cursor_field
134
132
 
135
- def __init__(self, stream: AbstractStream, legacy_stream: Stream, cursor: Cursor):
133
+ def __init__(self, stream: AbstractStream, legacy_stream: Stream, cursor: Cursor, slice_logger: SliceLogger, logger: logging.Logger):
136
134
  """
137
135
  :param stream: The underlying AbstractStream
138
136
  """
139
137
  self._abstract_stream = stream
140
138
  self._legacy_stream = legacy_stream
141
139
  self._cursor = cursor
140
+ self._slice_logger = slice_logger
141
+ self._logger = logger
142
142
 
143
143
  def read_full_refresh(
144
144
  self,
@@ -177,8 +177,11 @@ class StreamFacade(Stream):
177
177
  yield from self._read_records()
178
178
 
179
179
  def _read_records(self) -> Iterable[StreamData]:
180
- for record in self._abstract_stream.read():
181
- yield record.data
180
+ for partition in self._abstract_stream.generate_partitions():
181
+ if self._slice_logger.should_log_slice_message(self._logger):
182
+ yield self._slice_logger.create_slice_log_message(partition.to_slice())
183
+ for record in partition.read():
184
+ yield record.data
182
185
 
183
186
  @property
184
187
  def name(self) -> str:
@@ -259,6 +262,7 @@ class StreamPartition(Partition):
259
262
  sync_mode: SyncMode,
260
263
  cursor_field: Optional[List[str]],
261
264
  state: Optional[MutableMapping[str, Any]],
265
+ cursor: Cursor,
262
266
  ):
263
267
  """
264
268
  :param stream: The stream to delegate to
@@ -271,6 +275,8 @@ class StreamPartition(Partition):
271
275
  self._sync_mode = sync_mode
272
276
  self._cursor_field = cursor_field
273
277
  self._state = state
278
+ self._cursor = cursor
279
+ self._is_closed = False
274
280
 
275
281
  def read(self) -> Iterable[Record]:
276
282
  """
@@ -294,7 +300,9 @@ class StreamPartition(Partition):
294
300
  if isinstance(record_data, Mapping):
295
301
  data_to_return = dict(record_data)
296
302
  self._stream.transformer.transform(data_to_return, self._stream.get_json_schema())
297
- yield Record(data_to_return)
303
+ record = Record(data_to_return, self._stream.name)
304
+ self._cursor.observe(record)
305
+ yield Record(data_to_return, self._stream.name)
298
306
  else:
299
307
  self._message_repository.emit_message(record_data)
300
308
  except Exception as e:
@@ -315,6 +323,16 @@ class StreamPartition(Partition):
315
323
  else:
316
324
  return hash(self._stream.name)
317
325
 
326
+ def stream_name(self) -> str:
327
+ return self._stream.name
328
+
329
+ def close(self) -> None:
330
+ self._cursor.close_partition(self)
331
+ self._is_closed = True
332
+
333
+ def is_closed(self) -> bool:
334
+ return self._is_closed
335
+
318
336
  def __repr__(self) -> str:
319
337
  return f"StreamPartition({self._stream.name}, {self._slice})"
320
338
 
@@ -334,6 +352,7 @@ class StreamPartitionGenerator(PartitionGenerator):
334
352
  sync_mode: SyncMode,
335
353
  cursor_field: Optional[List[str]],
336
354
  state: Optional[MutableMapping[str, Any]],
355
+ cursor: Cursor,
337
356
  ):
338
357
  """
339
358
  :param stream: The stream to delegate to
@@ -344,10 +363,13 @@ class StreamPartitionGenerator(PartitionGenerator):
344
363
  self._sync_mode = sync_mode
345
364
  self._cursor_field = cursor_field
346
365
  self._state = state
366
+ self._cursor = cursor
347
367
 
348
368
  def generate(self) -> Iterable[Partition]:
349
369
  for s in self._stream.stream_slices(sync_mode=self._sync_mode, cursor_field=self._cursor_field, stream_state=self._state):
350
- yield StreamPartition(self._stream, copy.deepcopy(s), self.message_repository, self._sync_mode, self._cursor_field, self._state)
370
+ yield StreamPartition(
371
+ self._stream, copy.deepcopy(s), self.message_repository, self._sync_mode, self._cursor_field, self._state, self._cursor
372
+ )
351
373
 
352
374
 
353
375
  @deprecated("This class is experimental. Use at your own risk.")
@@ -0,0 +1,79 @@
1
+ #
2
+ # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
3
+ #
4
+
5
+ from functools import lru_cache
6
+ from logging import Logger
7
+ from typing import Any, Iterable, List, Mapping, Optional
8
+
9
+ from airbyte_cdk.models import AirbyteStream, SyncMode
10
+ from airbyte_cdk.sources.streams.concurrent.abstract_stream import AbstractStream
11
+ from airbyte_cdk.sources.streams.concurrent.availability_strategy import AbstractAvailabilityStrategy, StreamAvailability
12
+ from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition
13
+ from airbyte_cdk.sources.streams.concurrent.partitions.partition_generator import PartitionGenerator
14
+
15
+
16
+ class DefaultStream(AbstractStream):
17
+ def __init__(
18
+ self,
19
+ partition_generator: PartitionGenerator,
20
+ name: str,
21
+ json_schema: Mapping[str, Any],
22
+ availability_strategy: AbstractAvailabilityStrategy,
23
+ primary_key: List[str],
24
+ cursor_field: Optional[str],
25
+ logger: Logger,
26
+ namespace: Optional[str] = None,
27
+ ) -> None:
28
+ self._stream_partition_generator = partition_generator
29
+ self._name = name
30
+ self._json_schema = json_schema
31
+ self._availability_strategy = availability_strategy
32
+ self._primary_key = primary_key
33
+ self._cursor_field = cursor_field
34
+ self._logger = logger
35
+ self._namespace = namespace
36
+
37
+ def generate_partitions(self) -> Iterable[Partition]:
38
+ yield from self._stream_partition_generator.generate()
39
+
40
+ @property
41
+ def name(self) -> str:
42
+ return self._name
43
+
44
+ def check_availability(self) -> StreamAvailability:
45
+ return self._availability_strategy.check_availability(self._logger)
46
+
47
+ @property
48
+ def cursor_field(self) -> Optional[str]:
49
+ return self._cursor_field
50
+
51
+ @lru_cache(maxsize=None)
52
+ def get_json_schema(self) -> Mapping[str, Any]:
53
+ return self._json_schema
54
+
55
+ def as_airbyte_stream(self) -> AirbyteStream:
56
+ stream = AirbyteStream(name=self.name, json_schema=dict(self._json_schema), supported_sync_modes=[SyncMode.full_refresh])
57
+
58
+ if self._namespace:
59
+ stream.namespace = self._namespace
60
+
61
+ if self._cursor_field:
62
+ stream.source_defined_cursor = True
63
+ stream.supported_sync_modes.append(SyncMode.incremental)
64
+ stream.default_cursor_field = [self._cursor_field]
65
+
66
+ keys = self._primary_key
67
+ if keys and len(keys) > 0:
68
+ stream.source_defined_primary_key = [keys]
69
+
70
+ return stream
71
+
72
+ def log_stream_sync_configuration(self) -> None:
73
+ self._logger.debug(
74
+ f"Syncing stream instance: {self.name}",
75
+ extra={
76
+ "primary_key": self._primary_key,
77
+ "cursor_field": self.cursor_field,
78
+ },
79
+ )
@@ -4,8 +4,9 @@
4
4
 
5
5
  from queue import Queue
6
6
 
7
- from airbyte_cdk.sources.streams.concurrent.partitions.partition_generator import PartitionGenerator
8
- from airbyte_cdk.sources.streams.concurrent.partitions.types import PARTITIONS_GENERATED_SENTINEL, QueueItem
7
+ from airbyte_cdk.sources.concurrent_source.partition_generation_completed_sentinel import PartitionGenerationCompletedSentinel
8
+ from airbyte_cdk.sources.streams.concurrent.abstract_stream import AbstractStream
9
+ from airbyte_cdk.sources.streams.concurrent.partitions.types import QueueItem
9
10
 
10
11
 
11
12
  class PartitionEnqueuer:
@@ -13,15 +14,14 @@ class PartitionEnqueuer:
13
14
  Generates partitions from a partition generator and puts them in a queue.
14
15
  """
15
16
 
16
- def __init__(self, queue: Queue[QueueItem], sentinel: PARTITIONS_GENERATED_SENTINEL) -> None:
17
+ def __init__(self, queue: Queue[QueueItem]) -> None:
17
18
  """
18
19
  :param queue: The queue to put the partitions in.
19
20
  :param sentinel: The sentinel to put in the queue when all the partitions have been generated.
20
21
  """
21
22
  self._queue = queue
22
- self._sentinel = sentinel
23
23
 
24
- def generate_partitions(self, partition_generator: PartitionGenerator) -> None:
24
+ def generate_partitions(self, stream: AbstractStream) -> None:
25
25
  """
26
26
  Generate partitions from a partition generator and put them in a queue.
27
27
  When all the partitions are added to the queue, a sentinel is added to the queue to indicate that all the partitions have been generated.
@@ -33,8 +33,8 @@ class PartitionEnqueuer:
33
33
  :return:
34
34
  """
35
35
  try:
36
- for partition in partition_generator.generate():
36
+ for partition in stream.generate_partitions():
37
37
  self._queue.put(partition)
38
- self._queue.put(self._sentinel)
38
+ self._queue.put(PartitionGenerationCompletedSentinel(stream))
39
39
  except Exception as e:
40
40
  self._queue.put(e)
@@ -32,6 +32,29 @@ class Partition(ABC):
32
32
  """
33
33
  pass
34
34
 
35
+ @abstractmethod
36
+ def stream_name(self) -> str:
37
+ """
38
+ Returns the name of the stream that this partition is reading from.
39
+ :return: The name of the stream.
40
+ """
41
+ pass
42
+
43
+ @abstractmethod
44
+ def close(self) -> None:
45
+ """
46
+ Closes the partition.
47
+ """
48
+ pass
49
+
50
+ @abstractmethod
51
+ def is_closed(self) -> bool:
52
+ """
53
+ Returns whether the partition is closed.
54
+ :return:
55
+ """
56
+ pass
57
+
35
58
  @abstractmethod
36
59
  def __hash__(self) -> int:
37
60
  """
@@ -10,13 +10,14 @@ class Record:
10
10
  Represents a record read from a stream.
11
11
  """
12
12
 
13
- def __init__(self, data: Mapping[str, Any]):
13
+ def __init__(self, data: Mapping[str, Any], stream_name: str):
14
14
  self.data = data
15
+ self.stream_name = stream_name
15
16
 
16
17
  def __eq__(self, other: Any) -> bool:
17
18
  if not isinstance(other, Record):
18
19
  return False
19
- return self.data == other.data
20
+ return self.data == other.data and self.stream_name == other.stream_name
20
21
 
21
22
  def __repr__(self) -> str:
22
- return f"Record(data={self.data})"
23
+ return f"Record(data={self.data}, stream_name={self.stream_name})"
@@ -4,11 +4,10 @@
4
4
 
5
5
  from typing import Union
6
6
 
7
+ from airbyte_cdk.sources.concurrent_source.partition_generation_completed_sentinel import PartitionGenerationCompletedSentinel
7
8
  from airbyte_cdk.sources.streams.concurrent.partitions.partition import Partition
8
9
  from airbyte_cdk.sources.streams.concurrent.partitions.record import Record
9
10
 
10
- PARTITIONS_GENERATED_SENTINEL = object
11
-
12
11
 
13
12
  class PartitionCompleteSentinel:
14
13
  """
@@ -26,4 +25,4 @@ class PartitionCompleteSentinel:
26
25
  """
27
26
  Typedef representing the items that can be added to the ThreadBasedConcurrentStream
28
27
  """
29
- QueueItem = Union[Record, Partition, PartitionCompleteSentinel, PARTITIONS_GENERATED_SENTINEL, Partition, Exception]
28
+ QueueItem = Union[Record, Partition, PartitionCompleteSentinel, PartitionGenerationCompletedSentinel, Exception]
@@ -12,6 +12,11 @@ from airbyte_cdk.models import Type as MessageType
12
12
 
13
13
 
14
14
  class SliceLogger(ABC):
15
+ """
16
+ SliceLogger is an interface that allows us to log slices of data in a uniform way.
17
+ It is responsible for determining whether or not a slice should be logged and for creating the log message.
18
+ """
19
+
15
20
  SLICE_LOG_PREFIX = "slice:"
16
21
 
17
22
  def create_slice_log_message(self, _slice: Optional[Mapping[str, Any]]) -> AirbyteMessage:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: airbyte-cdk
3
- Version: 0.54.0
3
+ Version: 0.55.0
4
4
  Summary: A framework for writing Airbyte Connectors.
5
5
  Home-page: https://github.com/airbytehq/airbyte
6
6
  Author: Airbyte
@@ -30,6 +30,12 @@ airbyte_cdk/sources/connector_state_manager.py,sha256=wsmUgII398MazCTKxwLBLzeiU6
30
30
  airbyte_cdk/sources/http_config.py,sha256=OBZeuyFilm6NlDlBhFQvHhTWabEvZww6OHDIlZujIS0,730
31
31
  airbyte_cdk/sources/http_logger.py,sha256=v0kkpDtA0GUOgj6_3AayrYaBrSHBqG4t3MGbrtxaNmU,1437
32
32
  airbyte_cdk/sources/source.py,sha256=dk50z8Roc28MJ8FxWe652B-GwItO__bTZqFm7WOtHnw,4412
33
+ airbyte_cdk/sources/concurrent_source/__init__.py,sha256=4Hw-PX1-VgESLF16cDdvuYCzGJtHntThLF4qIiULWeo,61
34
+ airbyte_cdk/sources/concurrent_source/concurrent_read_processor.py,sha256=7A8bdOqg9Hpw37QgqoGO6eYBx8_pKikt_AJUcImp-x4,9715
35
+ airbyte_cdk/sources/concurrent_source/concurrent_source.py,sha256=nmHZLmUwHYW5a8B-XpwVLZ2weys5oWTVYSMseW7WdYc,7836
36
+ airbyte_cdk/sources/concurrent_source/concurrent_source_adapter.py,sha256=si5ipxvzCE2Pdusg19evr8ziG7eqBBuBuDPww6i3Amg,3223
37
+ airbyte_cdk/sources/concurrent_source/partition_generation_completed_sentinel.py,sha256=oExaUlnDepGZjNmauIkFDCbWtxZvkBCFo1K0wAr4sRA,493
38
+ airbyte_cdk/sources/concurrent_source/thread_pool_manager.py,sha256=huFp0uuG2kZAnbHY8oeDYuX0hfmP-rLOnJMa72ZuWt0,3905
33
39
  airbyte_cdk/sources/declarative/__init__.py,sha256=ZnqYNxHsKCgO38IwB34RQyRMXTs4GTvlRi3ImKnIioo,61
34
40
  airbyte_cdk/sources/declarative/create_partial.py,sha256=sUJOwD8hBzW4pxw2XhYlSTMgl-WMc5WpP5Oq_jo3fHw,3371
35
41
  airbyte_cdk/sources/declarative/declarative_component_schema.yaml,sha256=LtLvEpzKo86RzMO6n20-z4ECW6P0Yoi26HXRCSLP9K0,85049
@@ -191,20 +197,20 @@ airbyte_cdk/sources/streams/availability_strategy.py,sha256=7BM0qLvXS0QrlKvnVkBE
191
197
  airbyte_cdk/sources/streams/call_rate.py,sha256=5T4J8WxMNov76iXRUtD5KlM1CsROxuAQPwGQAZyvpHg,20555
192
198
  airbyte_cdk/sources/streams/core.py,sha256=bIuQV7Zs9JpIyNDcfPCbyzv-BWDr_2ictK7s5AihLZQ,16025
193
199
  airbyte_cdk/sources/streams/concurrent/__init__.py,sha256=4Hw-PX1-VgESLF16cDdvuYCzGJtHntThLF4qIiULWeo,61
194
- airbyte_cdk/sources/streams/concurrent/abstract_stream.py,sha256=GCcRvUixoDOkNPy0vK37xdGxYaOfZXxVH6InzndduaE,3525
195
- airbyte_cdk/sources/streams/concurrent/adapters.py,sha256=yYpmVHwRkanyz1Pfm1dbZt_Q93pGnY8cmVKDLwiFTBM,17325
200
+ airbyte_cdk/sources/streams/concurrent/abstract_stream.py,sha256=W7WEz6FrfAjb0o_msnMBIESSVO1qJC2_A8ocYg55Rw4,3579
201
+ airbyte_cdk/sources/streams/concurrent/adapters.py,sha256=kBlHLeHi_xg_rtPMzfyU4Osh-k7K5NLUMOMfAPwiwrk,18090
196
202
  airbyte_cdk/sources/streams/concurrent/availability_strategy.py,sha256=8xDRpfktnARBbRi_RwznvKuoGrpPF2b6tQyloMwogkM,2013
197
203
  airbyte_cdk/sources/streams/concurrent/cursor.py,sha256=vvQeY-IkJ8gfwKfCZTLTVlBYA9gVgQX6bYdWUT0D-q4,6504
204
+ airbyte_cdk/sources/streams/concurrent/default_stream.py,sha256=w83pvFbw9vjfhbovw-LrCFiwQMO8hfo1Vm-1CB1SeXQ,2777
198
205
  airbyte_cdk/sources/streams/concurrent/exceptions.py,sha256=-WETGIY5_QFmVeDFiqm4WhRJ_nNCkfcDwOQqx6cSqrI,365
199
- airbyte_cdk/sources/streams/concurrent/partition_enqueuer.py,sha256=uAUhCkxFOaptDJfIEDmFnnF4xn4coG5kvE4B_5tVx14,1557
206
+ airbyte_cdk/sources/streams/concurrent/partition_enqueuer.py,sha256=Mmn0hYq2xWe2a0WOpZPF3iZNozfmv7vY37LgfdY7DVo,1570
200
207
  airbyte_cdk/sources/streams/concurrent/partition_reader.py,sha256=H8sGVVGx6uKMSUehRaqmVbE19DE3cx3NivQ4sFj8wbk,1303
201
208
  airbyte_cdk/sources/streams/concurrent/state_converter.py,sha256=PwqcRVPR6LQxWL0yvPTp_u2Uh0hBJU-BDSjPKiyJVEk,4689
202
- airbyte_cdk/sources/streams/concurrent/thread_based_concurrent_stream.py,sha256=M7CpPPBswHTYjG4opiTOf5eWHOJ6i4TyP0v991pFxOo,10843
203
209
  airbyte_cdk/sources/streams/concurrent/partitions/__init__.py,sha256=4Hw-PX1-VgESLF16cDdvuYCzGJtHntThLF4qIiULWeo,61
204
- airbyte_cdk/sources/streams/concurrent/partitions/partition.py,sha256=tjXF8lZMvyfZaCYCHr5aTPwbVstmRjYZDwYAvLDY-ds,1312
210
+ airbyte_cdk/sources/streams/concurrent/partitions/partition.py,sha256=o2QvDYZF3Tn9NbC5jc1UkDwMiCWq9fNGj493u2WFoko,1795
205
211
  airbyte_cdk/sources/streams/concurrent/partitions/partition_generator.py,sha256=_ymkkBr71_qt1fW0_MUqw96OfNBkeJngXQ09yolEDHw,441
206
- airbyte_cdk/sources/streams/concurrent/partitions/record.py,sha256=c87pzwl18pq1_3XLoKDXH_WwrskjbBnTGkxrF4uU5-A,469
207
- airbyte_cdk/sources/streams/concurrent/partitions/types.py,sha256=uc3aBg2kbp3mZry3RtmAwtFExKG2oQw2qG12tZWY514,849
212
+ airbyte_cdk/sources/streams/concurrent/partitions/record.py,sha256=-Q3zLex3CHOXiB-KOZLbBZaPiQ_BLFJdknr6yoRz9I0,600
213
+ airbyte_cdk/sources/streams/concurrent/partitions/types.py,sha256=iVARnsGOSdvlSCqAf-yxc4_PUT3oOR9B6cyVNcLTjY8,932
208
214
  airbyte_cdk/sources/streams/http/__init__.py,sha256=cTP2d7Wf0hYXaN20U0dtxKa1pFZ9rI-lBbkQ0UM1apQ,261
209
215
  airbyte_cdk/sources/streams/http/availability_strategy.py,sha256=MHgW42gwaevaCVnNLrUSE6WJHT4reeZ417nMWrmbC7o,6884
210
216
  airbyte_cdk/sources/streams/http/exceptions.py,sha256=OokLDI7W8hZvq9e15sL3em2AdwmzmcAl72Ms-i5l0Nw,1334
@@ -227,7 +233,7 @@ airbyte_cdk/sources/utils/catalog_helpers.py,sha256=Jo3F6NQE2O7aP4x7yGScwbvtPQyC
227
233
  airbyte_cdk/sources/utils/record_helper.py,sha256=lNtOK1rMUxB9cw6wIi3yNu85jlqTN5inTkZZCWvPKXA,1711
228
234
  airbyte_cdk/sources/utils/schema_helpers.py,sha256=_Kasvdo60OE1aHkrd2Q48OHrMJnZ8nSWliuAVbR7vJs,8483
229
235
  airbyte_cdk/sources/utils/schema_models.py,sha256=m1vOqNkkVYGblc492wKo11Zm5FK9F0-JoNb50aRZnew,3151
230
- airbyte_cdk/sources/utils/slice_logger.py,sha256=Dv0m6Y4YYbHKB-RoMZosKjMgarQZJc4dp7jmKdwilJU,1515
236
+ airbyte_cdk/sources/utils/slice_logger.py,sha256=YeWSoZeOsQp9oZK7mick2J8KFdiY726LY2iiIj_--r4,1731
231
237
  airbyte_cdk/sources/utils/transform.py,sha256=4GYmO6bq33HF-a1in0dKQKqUOYI1bWItyuYF875bSQg,9493
232
238
  airbyte_cdk/sources/utils/types.py,sha256=41ZQR681t5TUnOScij58d088sb99klH_ZENFcaYro_g,175
233
239
  airbyte_cdk/utils/__init__.py,sha256=qZoNqzEKhIXdN_ZfvXlIGnmiDDjCFy6BVCzzWjUZcuU,294
@@ -256,11 +262,15 @@ unit_tests/singer/test_singer_helpers.py,sha256=pZV6VxJuK-3-FICNGmoGbokrA_zkaFZE
256
262
  unit_tests/singer/test_singer_source.py,sha256=edN_kv7dnYAdBveWdUYOs74ak0dK6p8uaX225h_ZILA,4442
257
263
  unit_tests/sources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
258
264
  unit_tests/sources/test_abstract_source.py,sha256=V7zSpOk-jqfOz8FtnImAo_zDe-Q2TjPqD_l_T0QaiDw,48179
265
+ unit_tests/sources/test_concurrent_source.py,sha256=NT4K0z-oz2OZBHE9xNQT0KUdI2wJ-5vNWLUHZlIYKKU,3552
259
266
  unit_tests/sources/test_config.py,sha256=lxjeaf48pOMF4Pf3-Z1ux_tHTyjRFCdG_hpnxw3e7uQ,2839
260
267
  unit_tests/sources/test_connector_state_manager.py,sha256=ynFxA63Cxe6t-wMMh9C6ByTlMAuk8W7H2FikDhnUEQ0,24264
261
268
  unit_tests/sources/test_http_logger.py,sha256=VT6DqgspI3DcRnoBQkkQX0z4dF_AOiYZ5P_zxmMW8oU,9004
262
269
  unit_tests/sources/test_integration_source.py,sha256=7DAWzuYwU_HXzhw-rRjjwQuQej-hVpNyzw_NLqQiJVc,3369
263
270
  unit_tests/sources/test_source.py,sha256=W0I4umL_d_OToLYYiRkjkJR6e-cCYjdV8zKc3uLvF0k,27999
271
+ unit_tests/sources/test_source_read.py,sha256=AEFoJfzM0_5QQIJyKwGLK_kq_Vz_CBivImnUnXJQJ0I,17176
272
+ unit_tests/sources/concurrent_source/__init__.py,sha256=4Hw-PX1-VgESLF16cDdvuYCzGJtHntThLF4qIiULWeo,61
273
+ unit_tests/sources/concurrent_source/test_concurrent_source_adapter.py,sha256=zsGnMcEsBedjW8wahil6LNqniil-3NXhyZd5W-80Km0,3665
264
274
  unit_tests/sources/declarative/__init__.py,sha256=ZnqYNxHsKCgO38IwB34RQyRMXTs4GTvlRi3ImKnIioo,61
265
275
  unit_tests/sources/declarative/external_component.py,sha256=lU2gL736bLEWtmrGm1B2k83RXt_3XkROimLIahZd5dg,293
266
276
  unit_tests/sources/declarative/test_create_partial.py,sha256=s_KIywQqt8RlauOCWNJVk3HC3KBTAtSwFTN6JVQgu80,2636
@@ -379,22 +389,24 @@ unit_tests/sources/message/test_repository.py,sha256=oiScwg4cAdnYDl7PPN1nZniDGpA
379
389
  unit_tests/sources/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
380
390
  unit_tests/sources/streams/test_availability_strategy.py,sha256=vJrSEk9NwRghu0YsSNoMYHKWzA9UFemwyClpke8Mk2s,2315
381
391
  unit_tests/sources/streams/test_call_rate.py,sha256=5QsokqxIFoR438QTd7p_eb0K-LW6awZXDtQiMTAb_Qo,13069
382
- unit_tests/sources/streams/test_stream_read.py,sha256=so1M-vf8HxSGq-MVrMymtCvK0abdaT_X9AfV9i0uVm0,6879
392
+ unit_tests/sources/streams/test_stream_read.py,sha256=xxyYV5jPsAptmI0awPO_VGWMaE-y80XMDCB6u87IPaY,6875
383
393
  unit_tests/sources/streams/test_streams_core.py,sha256=YOC7XqWFJ13Z4YuO9Nh4AR4AwpJ-s111vqPplFfpxk4,5059
384
394
  unit_tests/sources/streams/concurrent/__init__.py,sha256=4Hw-PX1-VgESLF16cDdvuYCzGJtHntThLF4qIiULWeo,61
385
- unit_tests/sources/streams/concurrent/test_adapters.py,sha256=v8_tv2GCUk73DyoiPEPFDBrTRCsXysR-Cw-DXkQnPn4,14743
386
- unit_tests/sources/streams/concurrent/test_concurrent_partition_generator.py,sha256=6ai_6AeRuiUFB0p5TcFGHMG2eiGFbGrmXhI41Oe5XYQ,1321
387
- unit_tests/sources/streams/concurrent/test_cursor.py,sha256=sqH8xE3GamETSSVqsdKafziAAm-y_j_MegES_C5ExMM,5790
388
- unit_tests/sources/streams/concurrent/test_partition_reader.py,sha256=eM5dzfmLKm9Lj-BfQUjAZRhCZzfvhk7AkKpcHGcoPfg,931
395
+ unit_tests/sources/streams/concurrent/test_adapters.py,sha256=Y_c1vKCtGKEzrUSncmpgp0lgFnArmBrIrmLFaOIAxRg,15439
396
+ unit_tests/sources/streams/concurrent/test_concurrent_partition_generator.py,sha256=v8yf19_sDgVcWop6WKotahlQiO6B8MwxhGi3AL4vHm8,1375
397
+ unit_tests/sources/streams/concurrent/test_concurrent_read_processor.py,sha256=1578s0CtFVQALrv2slo12RIgNmNwwobJAFWfBre8jdc,23822
398
+ unit_tests/sources/streams/concurrent/test_cursor.py,sha256=xsQ0zHKzU-iRnpTiAMvGRPbiL50zRJerDoloenkhcj0,5818
399
+ unit_tests/sources/streams/concurrent/test_default_stream.py,sha256=VLF46ESoRqcoALYCdrdZ2NDl5s2T1fRRWsYAy2-IwYw,6502
400
+ unit_tests/sources/streams/concurrent/test_partition_reader.py,sha256=2uj7uV3ie0BMb--aa3MUru-f4jLiYUR-Nl0r3EhwxLQ,951
389
401
  unit_tests/sources/streams/concurrent/test_state_converter.py,sha256=rvg8becWR1iPdm5TAanZssKj5_iw8dInE_uqmjqghZE,8349
390
- unit_tests/sources/streams/concurrent/test_thread_based_concurrent_stream.py,sha256=_jBMJIZ6Hu9mWX4v9SRUdtxvgntA-rQpNbbygBi6HXA,11629
402
+ unit_tests/sources/streams/concurrent/test_thread_pool_manager.py,sha256=7L9Sv7VXULOHx3-KSFwFtzAY1X96wcPiPKGq38BQEVg,3699
391
403
  unit_tests/sources/streams/concurrent/scenarios/__init__.py,sha256=4Hw-PX1-VgESLF16cDdvuYCzGJtHntThLF4qIiULWeo,61
392
404
  unit_tests/sources/streams/concurrent/scenarios/incremental_scenarios.py,sha256=x77AQf8_O4dQ2aF1o800CzI0hOEyU8ayxoNdSOvxkhM,10495
393
- unit_tests/sources/streams/concurrent/scenarios/stream_facade_builder.py,sha256=FdgEO-bWA_IDFIJb0W83qE4QCCZ8eexbn_Mq8LJq0iE,5040
394
- unit_tests/sources/streams/concurrent/scenarios/stream_facade_scenarios.py,sha256=svWqPyIAJCr-TPm0zFk9_gXB8hluVQuet6TqbV6yW3g,14096
405
+ unit_tests/sources/streams/concurrent/scenarios/stream_facade_builder.py,sha256=OD_9R5fHt5Nf7hH8m28-UDoZJkY8iUBJLI73kd-u2BE,5794
406
+ unit_tests/sources/streams/concurrent/scenarios/stream_facade_scenarios.py,sha256=v0yP5MRGYJAb9bp2yXnp5yUmYKJ6aAKjHcNHigL_ONY,13981
395
407
  unit_tests/sources/streams/concurrent/scenarios/test_concurrent_scenarios.py,sha256=sQpvIJa5-Iv03KZfC2sP2zB8XSPCZAjLpUMpNBOA-xM,3897
396
- unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_scenarios.py,sha256=kiZ6VvQywg073FtrpP9AD3yIwSKbalVyfOM2Ksu3sZI,13554
397
- unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_source_builder.py,sha256=G4Em5zfAd9ExzDaD-86nabxWHj9wn5HT3Mfz37UNiME,5310
408
+ unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_scenarios.py,sha256=KqCLsXB_9rV4hNdSPrNynK3G-UIsipqsZT6X0Z-iM5E,13175
409
+ unit_tests/sources/streams/concurrent/scenarios/thread_based_concurrent_stream_source_builder.py,sha256=aMtEOpCkxH-v2BBOYj4xABzPKcDYh_jieGfaIp4hy9w,5727
398
410
  unit_tests/sources/streams/concurrent/scenarios/utils.py,sha256=Pl1F4asW8AvV6bV5W3Qg21GiLqfdMT_rOt1CsFA0aVM,1953
399
411
  unit_tests/sources/streams/http/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
400
412
  unit_tests/sources/streams/http/test_availability_strategy.py,sha256=kuQJ5FIc4lffpHmEUVzvoN1QXQzvz8WEkFvzHItiipg,6063
@@ -411,8 +423,8 @@ unit_tests/utils/test_schema_inferrer.py,sha256=Z2jHBZ540wnYkylIdV_2xr75Vtwlxuyg
411
423
  unit_tests/utils/test_secret_utils.py,sha256=XKe0f1RHYii8iwE6ATmBr5JGDI1pzzrnZUGdUSMJQP4,4886
412
424
  unit_tests/utils/test_stream_status_utils.py,sha256=Xr8MZ2HWgTVIyMbywDvuYkRaUF4RZLQOT8-JjvcfR24,2970
413
425
  unit_tests/utils/test_traced_exception.py,sha256=bDFP5zMBizFenz6V2WvEZTRCKGB5ijh3DBezjbfoYIs,4198
414
- airbyte_cdk-0.54.0.dist-info/LICENSE.txt,sha256=Wfe61S4BaGPj404v8lrAbvhjYR68SHlkzeYrg3_bbuM,1051
415
- airbyte_cdk-0.54.0.dist-info/METADATA,sha256=nq675Flnxy5fGE4WB4NNbRqlRvnXUWrEBEk0F4JYpn8,11983
416
- airbyte_cdk-0.54.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
417
- airbyte_cdk-0.54.0.dist-info/top_level.txt,sha256=edvsDKTnE6sD2wfCUaeTfKf5gQIL6CPVMwVL2sWZzqo,51
418
- airbyte_cdk-0.54.0.dist-info/RECORD,,
426
+ airbyte_cdk-0.55.0.dist-info/LICENSE.txt,sha256=Wfe61S4BaGPj404v8lrAbvhjYR68SHlkzeYrg3_bbuM,1051
427
+ airbyte_cdk-0.55.0.dist-info/METADATA,sha256=2r6pncfFhwFGRcafz2bnvxd-3BwRcgkvHktsJhG0Z8Q,11983
428
+ airbyte_cdk-0.55.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
429
+ airbyte_cdk-0.55.0.dist-info/top_level.txt,sha256=edvsDKTnE6sD2wfCUaeTfKf5gQIL6CPVMwVL2sWZzqo,51
430
+ airbyte_cdk-0.55.0.dist-info/RECORD,,
@@ -0,0 +1,3 @@
1
+ #
2
+ # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
3
+ #
@@ -0,0 +1,105 @@
1
+ #
2
+ # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
3
+ #
4
+ import logging
5
+ from typing import Any, List, Mapping, Optional, Tuple
6
+ from unittest.mock import Mock
7
+
8
+ import freezegun
9
+ from airbyte_cdk.models import (
10
+ AirbyteMessage,
11
+ AirbyteRecordMessage,
12
+ AirbyteStream,
13
+ ConfiguredAirbyteCatalog,
14
+ ConfiguredAirbyteStream,
15
+ DestinationSyncMode,
16
+ SyncMode,
17
+ )
18
+ from airbyte_cdk.models import Type as MessageType
19
+ from airbyte_cdk.sources.concurrent_source.concurrent_source_adapter import ConcurrentSourceAdapter
20
+ from airbyte_cdk.sources.message import InMemoryMessageRepository
21
+ from airbyte_cdk.sources.streams import Stream
22
+ from airbyte_cdk.sources.streams.concurrent.adapters import StreamFacade
23
+ from airbyte_cdk.sources.streams.concurrent.cursor import NoopCursor
24
+
25
+
26
+ class _MockSource(ConcurrentSourceAdapter):
27
+ def __init__(self, concurrent_source, _streams_to_is_concurrent, logger):
28
+ super().__init__(concurrent_source)
29
+ self._streams_to_is_concurrent = _streams_to_is_concurrent
30
+ self._logger = logger
31
+
32
+ message_repository = InMemoryMessageRepository()
33
+
34
+ def check_connection(self, logger: logging.Logger, config: Mapping[str, Any]) -> Tuple[bool, Optional[Any]]:
35
+ raise NotImplementedError
36
+
37
+ def streams(self, config: Mapping[str, Any]) -> List[Stream]:
38
+ return [
39
+ StreamFacade.create_from_stream(s, self, self._logger, None, NoopCursor()) if is_concurrent else s
40
+ for s, is_concurrent in self._streams_to_is_concurrent.items()
41
+ ]
42
+
43
+
44
+ @freezegun.freeze_time("2020-01-01T00:00:00")
45
+ def test_concurrent_source_adapter():
46
+ concurrent_source = Mock()
47
+ message_from_concurrent_stream = AirbyteMessage(
48
+ type=MessageType.RECORD,
49
+ record=AirbyteRecordMessage(
50
+ stream="s2",
51
+ data={"data": 2},
52
+ emitted_at=1577836800000,
53
+ ),
54
+ )
55
+ concurrent_source.read.return_value = iter([message_from_concurrent_stream])
56
+ regular_stream = _mock_stream("s1", [{"data": 1}])
57
+ concurrent_stream = _mock_stream("s2", [])
58
+ unavailable_stream = _mock_stream("s3", [{"data": 3}], False)
59
+ concurrent_stream.name = "s2"
60
+ logger = Mock()
61
+ adapter = _MockSource(concurrent_source, {regular_stream: False, concurrent_stream: True, unavailable_stream: False}, logger)
62
+
63
+ messages = list(adapter.read(logger, {}, _configured_catalog([regular_stream, concurrent_stream, unavailable_stream])))
64
+ records = [m for m in messages if m.type == MessageType.RECORD]
65
+
66
+ expected_records = [
67
+ message_from_concurrent_stream,
68
+ AirbyteMessage(
69
+ type=MessageType.RECORD,
70
+ record=AirbyteRecordMessage(
71
+ stream="s1",
72
+ data={"data": 1},
73
+ emitted_at=1577836800000,
74
+ ),
75
+ ),
76
+ ]
77
+
78
+ assert records == expected_records
79
+
80
+
81
+ def _mock_stream(name: str, data=[], available: bool = True):
82
+ s = Mock()
83
+ s.name = name
84
+ s.as_airbyte_stream.return_value = AirbyteStream(
85
+ name=name,
86
+ json_schema={},
87
+ supported_sync_modes=[SyncMode.full_refresh],
88
+ )
89
+ s.check_availability.return_value = (True, None) if available else (False, "not available")
90
+ s.read_full_refresh.return_value = iter(data)
91
+ s.primary_key = None
92
+ return s
93
+
94
+
95
+ def _configured_catalog(streams: List[Stream]):
96
+ return ConfiguredAirbyteCatalog(
97
+ streams=[
98
+ ConfiguredAirbyteStream(
99
+ stream=stream.as_airbyte_stream(),
100
+ sync_mode=SyncMode.full_refresh,
101
+ destination_sync_mode=DestinationSyncMode.overwrite,
102
+ )
103
+ for stream in streams
104
+ ]
105
+ )
@@ -1,11 +1,14 @@
1
1
  #
2
2
  # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
3
3
  #
4
+ import concurrent
4
5
  import logging
5
6
  from typing import Any, List, Mapping, Optional, Tuple, Union
6
7
 
7
8
  from airbyte_cdk.models import AirbyteStateMessage, ConfiguredAirbyteCatalog, ConnectorSpecification, DestinationSyncMode, SyncMode
8
- from airbyte_cdk.sources import AbstractSource
9
+ from airbyte_cdk.sources.concurrent_source.concurrent_source import ConcurrentSource
10
+ from airbyte_cdk.sources.concurrent_source.concurrent_source_adapter import ConcurrentSourceAdapter
11
+ from airbyte_cdk.sources.concurrent_source.thread_pool_manager import ThreadPoolManager
9
12
  from airbyte_cdk.sources.connector_state_manager import ConnectorStateManager
10
13
  from airbyte_cdk.sources.message import InMemoryMessageRepository, MessageRepository
11
14
  from airbyte_cdk.sources.streams import Stream
@@ -14,6 +17,7 @@ from airbyte_cdk.sources.streams.concurrent.cursor import ConcurrentCursor, Curs
14
17
  from airbyte_cdk.sources.streams.concurrent.state_converter import EpochValueConcurrentStreamStateConverter
15
18
  from airbyte_protocol.models import ConfiguredAirbyteStream
16
19
  from unit_tests.sources.file_based.scenarios.scenario_builder import SourceBuilder
20
+ from unit_tests.sources.streams.concurrent.scenarios.thread_based_concurrent_stream_source_builder import NeverLogSliceLogger
17
21
 
18
22
  _NO_STATE = None
19
23
 
@@ -22,18 +26,21 @@ class StreamFacadeConcurrentConnectorStateConverter(EpochValueConcurrentStreamSt
22
26
  pass
23
27
 
24
28
 
25
- class StreamFacadeSource(AbstractSource):
29
+ class StreamFacadeSource(ConcurrentSourceAdapter):
26
30
  def __init__(
27
31
  self,
28
32
  streams: List[Stream],
29
- max_workers: int,
33
+ threadpool: concurrent.futures.ThreadPoolExecutor,
30
34
  cursor_field: Optional[CursorField] = None,
31
35
  cursor_boundaries: Optional[Tuple[str, str]] = None,
32
36
  input_state: Optional[List[Mapping[str, Any]]] = _NO_STATE,
33
37
  ):
34
- self._streams = streams
35
- self._max_workers = max_workers
36
38
  self._message_repository = InMemoryMessageRepository()
39
+ threadpool_manager = ThreadPoolManager(threadpool, streams[0].logger)
40
+ concurrent_source = ConcurrentSource(threadpool_manager, streams[0].logger, NeverLogSliceLogger(), self._message_repository)
41
+ super().__init__(concurrent_source)
42
+ self._streams = streams
43
+ self._threadpool = threadpool_manager
37
44
  self._cursor_field = cursor_field
38
45
  self._cursor_boundaries = cursor_boundaries
39
46
  self._state = [AirbyteStateMessage.parse_obj(s) for s in input_state] if input_state else None
@@ -49,7 +56,6 @@ class StreamFacadeSource(AbstractSource):
49
56
  stream,
50
57
  self,
51
58
  stream.logger,
52
- self._max_workers,
53
59
  state_converter.get_concurrent_stream_state(state_manager.get_stream_state(stream.name, stream.namespace)),
54
60
  ConcurrentCursor(
55
61
  stream.name,
@@ -115,4 +121,5 @@ class StreamFacadeSourceBuilder(SourceBuilder[StreamFacadeSource]):
115
121
  return self
116
122
 
117
123
  def build(self, configured_catalog: Optional[Mapping[str, Any]]) -> StreamFacadeSource:
118
- return StreamFacadeSource(self._streams, self._max_workers, self._cursor_field, self._cursor_boundaries, self._input_state)
124
+ threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=self._max_workers, thread_name_prefix="workerpool")
125
+ return StreamFacadeSource(self._streams, threadpool, self._cursor_field, self._cursor_boundaries, self._input_state)