airbyte-cdk 0.61.2__py3-none-any.whl → 0.62.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. airbyte_cdk/sources/abstract_source.py +14 -33
  2. airbyte_cdk/sources/connector_state_manager.py +16 -4
  3. airbyte_cdk/sources/file_based/file_based_source.py +87 -35
  4. airbyte_cdk/sources/file_based/stream/abstract_file_based_stream.py +3 -0
  5. airbyte_cdk/sources/file_based/stream/concurrent/adapters.py +15 -13
  6. airbyte_cdk/sources/file_based/stream/concurrent/cursor/__init__.py +5 -0
  7. airbyte_cdk/sources/file_based/stream/concurrent/{cursor.py → cursor/abstract_concurrent_file_based_cursor.py} +22 -44
  8. airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_concurrent_cursor.py +279 -0
  9. airbyte_cdk/sources/file_based/stream/concurrent/cursor/file_based_noop_cursor.py +56 -0
  10. airbyte_cdk/sources/file_based/stream/default_file_based_stream.py +11 -2
  11. airbyte_cdk/test/mock_http/mocker.py +3 -1
  12. airbyte_cdk/test/mock_http/response.py +9 -1
  13. airbyte_cdk/utils/traced_exception.py +1 -16
  14. {airbyte_cdk-0.61.2.dist-info → airbyte_cdk-0.62.1.dist-info}/METADATA +1 -1
  15. {airbyte_cdk-0.61.2.dist-info → airbyte_cdk-0.62.1.dist-info}/RECORD +33 -26
  16. unit_tests/sources/file_based/helpers.py +5 -0
  17. unit_tests/sources/file_based/scenarios/concurrent_incremental_scenarios.py +2860 -0
  18. unit_tests/sources/file_based/scenarios/incremental_scenarios.py +11 -0
  19. unit_tests/sources/file_based/scenarios/scenario_builder.py +6 -2
  20. unit_tests/sources/file_based/stream/concurrent/__init__.py +0 -0
  21. unit_tests/sources/file_based/stream/concurrent/test_adapters.py +365 -0
  22. unit_tests/sources/file_based/stream/concurrent/test_file_based_concurrent_cursor.py +462 -0
  23. unit_tests/sources/file_based/test_file_based_scenarios.py +45 -0
  24. unit_tests/sources/file_based/test_scenarios.py +16 -8
  25. unit_tests/sources/streams/concurrent/scenarios/stream_facade_builder.py +13 -2
  26. unit_tests/sources/test_abstract_source.py +36 -170
  27. unit_tests/sources/test_connector_state_manager.py +20 -13
  28. unit_tests/sources/test_integration_source.py +8 -25
  29. unit_tests/sources/test_source_read.py +1 -1
  30. unit_tests/test/mock_http/test_mocker.py +3 -1
  31. {airbyte_cdk-0.61.2.dist-info → airbyte_cdk-0.62.1.dist-info}/LICENSE.txt +0 -0
  32. {airbyte_cdk-0.61.2.dist-info → airbyte_cdk-0.62.1.dist-info}/WHEEL +0 -0
  33. {airbyte_cdk-0.61.2.dist-info → airbyte_cdk-0.62.1.dist-info}/top_level.txt +0 -0
@@ -2,6 +2,7 @@
2
2
  # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
3
3
  #
4
4
 
5
+ from airbyte_cdk.sources.file_based.stream.cursor import DefaultFileBasedCursor
5
6
  from airbyte_cdk.test.state_builder import StateBuilder
6
7
  from unit_tests.sources.file_based.helpers import LowHistoryLimitCursor
7
8
  from unit_tests.sources.file_based.scenarios.file_based_source_builder import FileBasedSourceBuilder
@@ -37,6 +38,7 @@ single_csv_input_state_is_earlier_scenario = (
37
38
  }
38
39
  )
39
40
  .set_file_type("csv")
41
+ .set_cursor_cls(DefaultFileBasedCursor)
40
42
  )
41
43
  .set_incremental_scenario_config(
42
44
  IncrementalScenarioConfig(
@@ -130,6 +132,7 @@ single_csv_file_is_skipped_if_same_modified_at_as_in_history = (
130
132
  }
131
133
  )
132
134
  .set_file_type("csv")
135
+ .set_cursor_cls(DefaultFileBasedCursor)
133
136
  )
134
137
  .set_incremental_scenario_config(
135
138
  IncrementalScenarioConfig(
@@ -205,6 +208,7 @@ single_csv_file_is_synced_if_modified_at_is_more_recent_than_in_history = (
205
208
  }
206
209
  )
207
210
  .set_file_type("csv")
211
+ .set_cursor_cls(DefaultFileBasedCursor)
208
212
  )
209
213
  .set_incremental_scenario_config(
210
214
  IncrementalScenarioConfig(
@@ -298,6 +302,7 @@ single_csv_no_input_state_scenario = (
298
302
  }
299
303
  )
300
304
  .set_file_type("csv")
305
+ .set_cursor_cls(DefaultFileBasedCursor)
301
306
  )
302
307
  .set_expected_catalog(
303
308
  {
@@ -397,6 +402,7 @@ multi_csv_same_timestamp_scenario = (
397
402
  }
398
403
  )
399
404
  .set_file_type("csv")
405
+ .set_cursor_cls(DefaultFileBasedCursor)
400
406
  )
401
407
  .set_expected_catalog(
402
408
  {
@@ -511,6 +517,7 @@ single_csv_input_state_is_later_scenario = (
511
517
  }
512
518
  )
513
519
  .set_file_type("csv")
520
+ .set_cursor_cls(DefaultFileBasedCursor)
514
521
  )
515
522
  .set_expected_catalog(
516
523
  {
@@ -615,6 +622,7 @@ multi_csv_different_timestamps_scenario = (
615
622
  }
616
623
  )
617
624
  .set_file_type("csv")
625
+ .set_cursor_cls(DefaultFileBasedCursor)
618
626
  )
619
627
  .set_expected_catalog(
620
628
  {
@@ -753,6 +761,7 @@ multi_csv_per_timestamp_scenario = (
753
761
  }
754
762
  )
755
763
  .set_file_type("csv")
764
+ .set_cursor_cls(DefaultFileBasedCursor)
756
765
  )
757
766
  .set_expected_catalog(
758
767
  {
@@ -913,6 +922,7 @@ multi_csv_skip_file_if_already_in_history = (
913
922
  }
914
923
  )
915
924
  .set_file_type("csv")
925
+ .set_cursor_cls(DefaultFileBasedCursor)
916
926
  )
917
927
  .set_expected_catalog(
918
928
  {
@@ -1059,6 +1069,7 @@ multi_csv_include_missing_files_within_history_range = (
1059
1069
  }
1060
1070
  )
1061
1071
  .set_file_type("csv")
1072
+ .set_cursor_cls(DefaultFileBasedCursor)
1062
1073
  )
1063
1074
  .set_expected_catalog(
1064
1075
  {
@@ -6,7 +6,7 @@ from copy import deepcopy
6
6
  from dataclasses import dataclass, field
7
7
  from typing import Any, Generic, List, Mapping, Optional, Set, Tuple, Type, TypeVar
8
8
 
9
- from airbyte_cdk.models import AirbyteAnalyticsTraceMessage, SyncMode
9
+ from airbyte_cdk.models import AirbyteAnalyticsTraceMessage, AirbyteStateMessage, SyncMode
10
10
  from airbyte_cdk.sources import AbstractSource
11
11
  from airbyte_cdk.sources.source import TState
12
12
  from airbyte_protocol.models import ConfiguredAirbyteCatalog
@@ -191,10 +191,14 @@ class TestScenarioBuilder(Generic[SourceType]):
191
191
  def build(self) -> "TestScenario[SourceType]":
192
192
  if self.source_builder is None:
193
193
  raise ValueError("source_builder is not set")
194
+ if self._incremental_scenario_config and self._incremental_scenario_config.input_state:
195
+ state = [AirbyteStateMessage.parse_obj(s) for s in self._incremental_scenario_config.input_state]
196
+ else:
197
+ state = None
194
198
  source = self.source_builder.build(
195
199
  self._configured_catalog(SyncMode.incremental if self._incremental_scenario_config else SyncMode.full_refresh),
196
200
  self._config,
197
- self._incremental_scenario_config.input_state if self._incremental_scenario_config else None,
201
+ state,
198
202
  )
199
203
  return TestScenario(
200
204
  self._name,
@@ -0,0 +1,365 @@
1
+ #
2
+ # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
3
+ #
4
+ import logging
5
+ import unittest
6
+ from datetime import datetime
7
+ from unittest.mock import MagicMock, Mock
8
+
9
+ import pytest
10
+ from airbyte_cdk.models import AirbyteLogMessage, AirbyteMessage, AirbyteStream, Level, SyncMode
11
+ from airbyte_cdk.models import Type as MessageType
12
+ from airbyte_cdk.sources.file_based.availability_strategy import DefaultFileBasedAvailabilityStrategy
13
+ from airbyte_cdk.sources.file_based.config.csv_format import CsvFormat
14
+ from airbyte_cdk.sources.file_based.config.file_based_stream_config import FileBasedStreamConfig
15
+ from airbyte_cdk.sources.file_based.discovery_policy import DefaultDiscoveryPolicy
16
+ from airbyte_cdk.sources.file_based.exceptions import FileBasedErrorsCollector
17
+ from airbyte_cdk.sources.file_based.file_types import default_parsers
18
+ from airbyte_cdk.sources.file_based.remote_file import RemoteFile
19
+ from airbyte_cdk.sources.file_based.schema_validation_policies import EmitRecordPolicy
20
+ from airbyte_cdk.sources.file_based.stream import DefaultFileBasedStream
21
+ from airbyte_cdk.sources.file_based.stream.concurrent.adapters import (
22
+ FileBasedStreamFacade,
23
+ FileBasedStreamPartition,
24
+ FileBasedStreamPartitionGenerator,
25
+ )
26
+ from airbyte_cdk.sources.file_based.stream.concurrent.cursor import FileBasedNoopCursor
27
+ from airbyte_cdk.sources.message import InMemoryMessageRepository
28
+ from airbyte_cdk.sources.streams.concurrent.cursor import Cursor
29
+ from airbyte_cdk.sources.streams.concurrent.exceptions import ExceptionWithDisplayMessage
30
+ from airbyte_cdk.sources.streams.concurrent.partitions.record import Record
31
+ from airbyte_cdk.sources.utils.slice_logger import SliceLogger
32
+ from airbyte_cdk.sources.utils.transform import TransformConfig, TypeTransformer
33
+ from freezegun import freeze_time
34
+
35
+ _ANY_SYNC_MODE = SyncMode.full_refresh
36
+ _ANY_STATE = {"state_key": "state_value"}
37
+ _ANY_CURSOR_FIELD = ["a", "cursor", "key"]
38
+ _STREAM_NAME = "stream"
39
+ _ANY_CURSOR = Mock(spec=FileBasedNoopCursor)
40
+
41
+
42
+ @pytest.mark.parametrize(
43
+ "sync_mode",
44
+ [
45
+ pytest.param(SyncMode.full_refresh, id="test_full_refresh"),
46
+ pytest.param(SyncMode.incremental, id="test_incremental"),
47
+ ],
48
+ )
49
+ def test_file_based_stream_partition_generator(sync_mode):
50
+ stream = Mock()
51
+ message_repository = Mock()
52
+ stream_slices = [{"files": [RemoteFile(uri="1", last_modified=datetime.now())]},
53
+ {"files": [RemoteFile(uri="2", last_modified=datetime.now())]}]
54
+ stream.stream_slices.return_value = stream_slices
55
+
56
+ partition_generator = FileBasedStreamPartitionGenerator(stream, message_repository, _ANY_SYNC_MODE, _ANY_CURSOR_FIELD, _ANY_STATE, _ANY_CURSOR)
57
+
58
+ partitions = list(partition_generator.generate())
59
+ slices = [partition.to_slice() for partition in partitions]
60
+ assert slices == stream_slices
61
+ stream.stream_slices.assert_called_once_with(sync_mode=_ANY_SYNC_MODE, cursor_field=_ANY_CURSOR_FIELD, stream_state=_ANY_STATE)
62
+
63
+
64
+ @pytest.mark.parametrize(
65
+ "transformer, expected_records",
66
+ [
67
+ pytest.param(
68
+ TypeTransformer(TransformConfig.NoTransform),
69
+ [Record({"data": "1"}, _STREAM_NAME), Record({"data": "2"}, _STREAM_NAME)],
70
+ id="test_no_transform",
71
+ ),
72
+ pytest.param(
73
+ TypeTransformer(TransformConfig.DefaultSchemaNormalization),
74
+ [Record({"data": 1}, _STREAM_NAME), Record({"data": 2}, _STREAM_NAME)],
75
+ id="test_default_transform",
76
+ ),
77
+ ],
78
+ )
79
+ def test_file_based_stream_partition(transformer, expected_records):
80
+ stream = Mock()
81
+ stream.name = _STREAM_NAME
82
+ stream.get_json_schema.return_value = {"type": "object", "properties": {"data": {"type": ["integer"]}}}
83
+ stream.transformer = transformer
84
+ message_repository = InMemoryMessageRepository()
85
+ _slice = None
86
+ sync_mode = SyncMode.full_refresh
87
+ cursor_field = None
88
+ state = None
89
+ partition = FileBasedStreamPartition(stream, _slice, message_repository, sync_mode, cursor_field, state, _ANY_CURSOR)
90
+
91
+ a_log_message = AirbyteMessage(
92
+ type=MessageType.LOG,
93
+ log=AirbyteLogMessage(
94
+ level=Level.INFO,
95
+ message='slice:{"partition": 1}',
96
+ ),
97
+ )
98
+
99
+ stream_data = [a_log_message, {"data": "1"}, {"data": "2"}]
100
+ stream.read_records.return_value = stream_data
101
+
102
+ records = list(partition.read())
103
+ messages = list(message_repository.consume_queue())
104
+
105
+ assert records == expected_records
106
+ assert messages == [a_log_message]
107
+
108
+
109
+ @pytest.mark.parametrize(
110
+ "exception_type, expected_display_message",
111
+ [
112
+ pytest.param(Exception, None, id="test_exception_no_display_message"),
113
+ pytest.param(ExceptionWithDisplayMessage, "display_message", id="test_exception_no_display_message"),
114
+ ],
115
+ )
116
+ def test_file_based_stream_partition_raising_exception(exception_type, expected_display_message):
117
+ stream = Mock()
118
+ stream.get_error_display_message.return_value = expected_display_message
119
+
120
+ message_repository = InMemoryMessageRepository()
121
+ _slice = None
122
+
123
+ partition = FileBasedStreamPartition(stream, _slice, message_repository, _ANY_SYNC_MODE, _ANY_CURSOR_FIELD, _ANY_STATE, _ANY_CURSOR)
124
+
125
+ stream.read_records.side_effect = Exception()
126
+
127
+ with pytest.raises(exception_type) as e:
128
+ list(partition.read())
129
+ if isinstance(e, ExceptionWithDisplayMessage):
130
+ assert e.display_message == "display message"
131
+
132
+
133
+ @freeze_time("2023-06-09T00:00:00Z")
134
+ @pytest.mark.parametrize(
135
+ "_slice, expected_hash",
136
+ [
137
+ pytest.param({"files": [RemoteFile(uri="1", last_modified=datetime.strptime("2023-06-09T00:00:00Z", "%Y-%m-%dT%H:%M:%SZ"))]}, hash(("stream", "2023-06-09T00:00:00.000000Z_1")), id="test_hash_with_slice"),
138
+ pytest.param(None, hash("stream"), id="test_hash_no_slice"),
139
+ ],
140
+ )
141
+ def test_file_based_stream_partition_hash(_slice, expected_hash):
142
+ stream = Mock()
143
+ stream.name = "stream"
144
+ partition = FileBasedStreamPartition(stream, _slice, Mock(), _ANY_SYNC_MODE, _ANY_CURSOR_FIELD, _ANY_STATE, _ANY_CURSOR)
145
+
146
+ _hash = partition.__hash__()
147
+ assert _hash == expected_hash
148
+
149
+
150
+ class StreamFacadeTest(unittest.TestCase):
151
+ def setUp(self):
152
+ self._abstract_stream = Mock()
153
+ self._abstract_stream.name = "stream"
154
+ self._abstract_stream.as_airbyte_stream.return_value = AirbyteStream(
155
+ name="stream",
156
+ json_schema={"type": "object"},
157
+ supported_sync_modes=[SyncMode.full_refresh],
158
+ )
159
+ self._legacy_stream = DefaultFileBasedStream(
160
+ cursor=FileBasedNoopCursor(MagicMock()),
161
+ config=FileBasedStreamConfig(name="stream", format=CsvFormat()),
162
+ catalog_schema={},
163
+ stream_reader=MagicMock(),
164
+ availability_strategy=DefaultFileBasedAvailabilityStrategy(MagicMock()),
165
+ discovery_policy=DefaultDiscoveryPolicy(),
166
+ parsers=default_parsers,
167
+ validation_policy=EmitRecordPolicy(),
168
+ errors_collector=FileBasedErrorsCollector(),
169
+ )
170
+ self._cursor = Mock(spec=Cursor)
171
+ self._logger = Mock()
172
+ self._slice_logger = Mock()
173
+ self._slice_logger.should_log_slice_message.return_value = False
174
+ self._facade = FileBasedStreamFacade(self._abstract_stream, self._legacy_stream, self._cursor, self._slice_logger, self._logger)
175
+ self._source = Mock()
176
+
177
+ self._stream = Mock()
178
+ self._stream.primary_key = "id"
179
+
180
+ def test_name_is_delegated_to_wrapped_stream(self):
181
+ assert self._facade.name == self._abstract_stream.name
182
+
183
+ def test_cursor_field_is_a_string(self):
184
+ self._abstract_stream.cursor_field = "cursor_field"
185
+ assert self._facade.cursor_field == "cursor_field"
186
+
187
+ def test_source_defined_cursor_is_true(self):
188
+ assert self._facade.source_defined_cursor
189
+
190
+ def test_json_schema_is_delegated_to_wrapped_stream(self):
191
+ json_schema = {"type": "object"}
192
+ self._abstract_stream.get_json_schema.return_value = json_schema
193
+ assert self._facade.get_json_schema() == json_schema
194
+ self._abstract_stream.get_json_schema.assert_called_once_with()
195
+
196
+ def test_given_cursor_is_noop_when_supports_incremental_then_return_legacy_stream_response(self):
197
+ assert (
198
+ FileBasedStreamFacade(
199
+ self._abstract_stream, self._legacy_stream, _ANY_CURSOR, Mock(spec=SliceLogger), Mock(spec=logging.Logger)
200
+ ).supports_incremental
201
+ == self._legacy_stream.supports_incremental
202
+ )
203
+
204
+ def test_given_cursor_is_not_noop_when_supports_incremental_then_return_true(self):
205
+ assert FileBasedStreamFacade(
206
+ self._abstract_stream, self._legacy_stream, Mock(spec=Cursor), Mock(spec=SliceLogger), Mock(spec=logging.Logger)
207
+ ).supports_incremental
208
+
209
+ def test_full_refresh(self):
210
+ expected_stream_data = [{"data": 1}, {"data": 2}]
211
+ records = [Record(data, "stream") for data in expected_stream_data]
212
+
213
+ partition = Mock()
214
+ partition.read.return_value = records
215
+ self._abstract_stream.generate_partitions.return_value = [partition]
216
+
217
+ actual_stream_data = list(self._facade.read_records(SyncMode.full_refresh, None, {}, None))
218
+
219
+ assert actual_stream_data == expected_stream_data
220
+
221
+ def test_read_records_full_refresh(self):
222
+ expected_stream_data = [{"data": 1}, {"data": 2}]
223
+ records = [Record(data, "stream") for data in expected_stream_data]
224
+ partition = Mock()
225
+ partition.read.return_value = records
226
+ self._abstract_stream.generate_partitions.return_value = [partition]
227
+
228
+ actual_stream_data = list(self._facade.read_full_refresh(None, None, None))
229
+
230
+ assert actual_stream_data == expected_stream_data
231
+
232
+ def test_read_records_incremental(self):
233
+ expected_stream_data = [{"data": 1}, {"data": 2}]
234
+ records = [Record(data, "stream") for data in expected_stream_data]
235
+ partition = Mock()
236
+ partition.read.return_value = records
237
+ self._abstract_stream.generate_partitions.return_value = [partition]
238
+
239
+ actual_stream_data = list(self._facade.read_incremental(None, None, None, None, None, None, None))
240
+
241
+ assert actual_stream_data == expected_stream_data
242
+
243
+ def test_create_from_stream_stream(self):
244
+ stream = Mock()
245
+ stream.name = "stream"
246
+ stream.primary_key = "id"
247
+ stream.cursor_field = "cursor"
248
+
249
+ facade = FileBasedStreamFacade.create_from_stream(stream, self._source, self._logger, _ANY_STATE, self._cursor)
250
+
251
+ assert facade.name == "stream"
252
+ assert facade.cursor_field == "cursor"
253
+ assert facade._abstract_stream._primary_key == ["id"]
254
+
255
+ def test_create_from_stream_stream_with_none_primary_key(self):
256
+ stream = Mock()
257
+ stream.name = "stream"
258
+ stream.primary_key = None
259
+ stream.cursor_field = []
260
+
261
+ facade = FileBasedStreamFacade.create_from_stream(stream, self._source, self._logger, _ANY_STATE, self._cursor)
262
+ assert facade._abstract_stream._primary_key == []
263
+
264
+ def test_create_from_stream_with_composite_primary_key(self):
265
+ stream = Mock()
266
+ stream.name = "stream"
267
+ stream.primary_key = ["id", "name"]
268
+ stream.cursor_field = []
269
+
270
+ facade = FileBasedStreamFacade.create_from_stream(stream, self._source, self._logger, _ANY_STATE, self._cursor)
271
+ assert facade._abstract_stream._primary_key == ["id", "name"]
272
+
273
+ def test_create_from_stream_with_empty_list_cursor(self):
274
+ stream = Mock()
275
+ stream.primary_key = "id"
276
+ stream.cursor_field = []
277
+
278
+ facade = FileBasedStreamFacade.create_from_stream(stream, self._source, self._logger, _ANY_STATE, self._cursor)
279
+
280
+ assert facade.cursor_field == []
281
+
282
+ def test_create_from_stream_raises_exception_if_primary_key_is_nested(self):
283
+ stream = Mock()
284
+ stream.name = "stream"
285
+ stream.primary_key = [["field", "id"]]
286
+
287
+ with self.assertRaises(ValueError):
288
+ FileBasedStreamFacade.create_from_stream(stream, self._source, self._logger, _ANY_STATE, self._cursor)
289
+
290
+ def test_create_from_stream_raises_exception_if_primary_key_has_invalid_type(self):
291
+ stream = Mock()
292
+ stream.name = "stream"
293
+ stream.primary_key = 123
294
+
295
+ with self.assertRaises(ValueError):
296
+ FileBasedStreamFacade.create_from_stream(stream, self._source, self._logger, _ANY_STATE, self._cursor)
297
+
298
+ def test_create_from_stream_raises_exception_if_cursor_field_is_nested(self):
299
+ stream = Mock()
300
+ stream.name = "stream"
301
+ stream.primary_key = "id"
302
+ stream.cursor_field = ["field", "cursor"]
303
+
304
+ with self.assertRaises(ValueError):
305
+ FileBasedStreamFacade.create_from_stream(stream, self._source, self._logger, _ANY_STATE, self._cursor)
306
+
307
+ def test_create_from_stream_with_cursor_field_as_list(self):
308
+ stream = Mock()
309
+ stream.name = "stream"
310
+ stream.primary_key = "id"
311
+ stream.cursor_field = ["cursor"]
312
+
313
+ facade = FileBasedStreamFacade.create_from_stream(stream, self._source, self._logger, _ANY_STATE, self._cursor)
314
+ assert facade.cursor_field == "cursor"
315
+
316
+ def test_create_from_stream_none_message_repository(self):
317
+ self._stream.name = "stream"
318
+ self._stream.primary_key = "id"
319
+ self._stream.cursor_field = "cursor"
320
+ self._source.message_repository = None
321
+
322
+ with self.assertRaises(ValueError):
323
+ FileBasedStreamFacade.create_from_stream(self._stream, self._source, self._logger, {}, self._cursor)
324
+
325
+ def test_get_error_display_message_no_display_message(self):
326
+ self._stream.get_error_display_message.return_value = "display_message"
327
+
328
+ facade = FileBasedStreamFacade.create_from_stream(self._stream, self._source, self._logger, _ANY_STATE, self._cursor)
329
+
330
+ expected_display_message = None
331
+ e = Exception()
332
+
333
+ display_message = facade.get_error_display_message(e)
334
+
335
+ assert expected_display_message == display_message
336
+
337
+ def test_get_error_display_message_with_display_message(self):
338
+ self._stream.get_error_display_message.return_value = "display_message"
339
+
340
+ facade = FileBasedStreamFacade.create_from_stream(self._stream, self._source, self._logger, _ANY_STATE, self._cursor)
341
+
342
+ expected_display_message = "display_message"
343
+ e = ExceptionWithDisplayMessage("display_message")
344
+
345
+ display_message = facade.get_error_display_message(e)
346
+
347
+ assert expected_display_message == display_message
348
+
349
+
350
+ @pytest.mark.parametrize(
351
+ "exception, expected_display_message",
352
+ [
353
+ pytest.param(Exception("message"), None, id="test_no_display_message"),
354
+ pytest.param(ExceptionWithDisplayMessage("message"), "message", id="test_no_display_message"),
355
+ ],
356
+ )
357
+ def test_get_error_display_message(exception, expected_display_message):
358
+ stream = Mock()
359
+ legacy_stream = Mock()
360
+ cursor = Mock(spec=Cursor)
361
+ facade = FileBasedStreamFacade(stream, legacy_stream, cursor, Mock().Mock(), Mock())
362
+
363
+ display_message = facade.get_error_display_message(exception)
364
+
365
+ assert display_message == expected_display_message