airbyte-source-iterable 0.2.1__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airbyte_source_iterable-0.3.0.dist-info/METADATA +113 -0
- {airbyte_source_iterable-0.2.1.dist-info → airbyte_source_iterable-0.3.0.dist-info}/RECORD +9 -26
- {airbyte_source_iterable-0.2.1.dist-info → airbyte_source_iterable-0.3.0.dist-info}/WHEEL +1 -2
- airbyte_source_iterable-0.3.0.dist-info/entry_points.txt +3 -0
- airbyte_source_iterable-0.2.1.dist-info/METADATA +0 -118
- airbyte_source_iterable-0.2.1.dist-info/entry_points.txt +0 -2
- airbyte_source_iterable-0.2.1.dist-info/top_level.txt +0 -3
- integration_tests/__init__.py +0 -0
- integration_tests/abnormal_state.json +0 -74
- integration_tests/acceptance.py +0 -13
- integration_tests/catalog.json +0 -186
- integration_tests/configured_catalog.json +0 -186
- integration_tests/configured_catalog_additional_events.json +0 -291
- integration_tests/invalid_config.json +0 -4
- unit_tests/__init__.py +0 -3
- unit_tests/conftest.py +0 -49
- unit_tests/test_export_adjustable_range.py +0 -117
- unit_tests/test_exports_stream.py +0 -35
- unit_tests/test_slice_generator.py +0 -94
- unit_tests/test_source.py +0 -28
- unit_tests/test_stream_events.py +0 -202
- unit_tests/test_streams.py +0 -270
- unit_tests/test_utils.py +0 -12
@@ -1,94 +0,0 @@
|
|
1
|
-
#
|
2
|
-
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
3
|
-
#
|
4
|
-
|
5
|
-
import freezegun
|
6
|
-
import pendulum
|
7
|
-
import pytest
|
8
|
-
from source_iterable.slice_generators import AdjustableSliceGenerator, RangeSliceGenerator
|
9
|
-
|
10
|
-
TEST_DATE = pendulum.parse("2020-01-01")
|
11
|
-
|
12
|
-
|
13
|
-
def test_slice_gen():
|
14
|
-
start_date = TEST_DATE
|
15
|
-
generator = AdjustableSliceGenerator(start_date)
|
16
|
-
dates = []
|
17
|
-
for i in generator:
|
18
|
-
dates.append(i)
|
19
|
-
generator.adjust_range(pendulum.Period(start=start_date, end=start_date))
|
20
|
-
assert dates
|
21
|
-
days = [(slice.end_date - slice.start_date).total_days() for slice in dates]
|
22
|
-
assert days[1] == AdjustableSliceGenerator.DEFAULT_RANGE_DAYS
|
23
|
-
|
24
|
-
|
25
|
-
@freezegun.freeze_time(TEST_DATE + pendulum.Duration(days=1000))
|
26
|
-
def test_slice_gen_no_range_adjust():
|
27
|
-
start_date = TEST_DATE
|
28
|
-
generator = AdjustableSliceGenerator(start_date)
|
29
|
-
dates = []
|
30
|
-
for i in generator:
|
31
|
-
dates.append(i)
|
32
|
-
assert dates
|
33
|
-
days = [(slice.end_date - slice.start_date).total_days() for slice in dates]
|
34
|
-
assert days
|
35
|
-
assert days[1] == AdjustableSliceGenerator.MAX_RANGE_DAYS
|
36
|
-
|
37
|
-
|
38
|
-
@pytest.mark.parametrize(
|
39
|
-
"start_day,end_day,days,range",
|
40
|
-
[
|
41
|
-
(
|
42
|
-
"2020-01-01",
|
43
|
-
"2020-01-10",
|
44
|
-
5,
|
45
|
-
[
|
46
|
-
(pendulum.parse("2020-01-01"), pendulum.parse("2020-01-06")),
|
47
|
-
(pendulum.parse("2020-01-06"), pendulum.parse("2020-01-10")),
|
48
|
-
],
|
49
|
-
),
|
50
|
-
(
|
51
|
-
"2020-01-01",
|
52
|
-
"2020-01-10 20:00:12",
|
53
|
-
5,
|
54
|
-
[
|
55
|
-
(pendulum.parse("2020-01-01"), pendulum.parse("2020-01-06")),
|
56
|
-
(pendulum.parse("2020-01-06"), pendulum.parse("2020-01-10 20:00:12")),
|
57
|
-
],
|
58
|
-
),
|
59
|
-
(
|
60
|
-
"2020-01-01",
|
61
|
-
"2020-01-01 20:00:12",
|
62
|
-
5,
|
63
|
-
[
|
64
|
-
(pendulum.parse("2020-01-01"), pendulum.parse("2020-01-01 20:00:12")),
|
65
|
-
],
|
66
|
-
),
|
67
|
-
(
|
68
|
-
"2020-01-01",
|
69
|
-
"2020-01-10",
|
70
|
-
50,
|
71
|
-
[(pendulum.parse("2020-01-01"), pendulum.parse("2020-01-10"))],
|
72
|
-
),
|
73
|
-
(
|
74
|
-
"2020-01-01",
|
75
|
-
"2020-01-01",
|
76
|
-
50,
|
77
|
-
[],
|
78
|
-
),
|
79
|
-
],
|
80
|
-
)
|
81
|
-
def test_datetime_ranges(start_day, end_day, days, range):
|
82
|
-
start_day = pendulum.parse(start_day)
|
83
|
-
with freezegun.freeze_time(end_day):
|
84
|
-
end_day = pendulum.parse(end_day)
|
85
|
-
RangeSliceGenerator.RANGE_LENGTH_DAYS = days
|
86
|
-
generator = RangeSliceGenerator(start_day)
|
87
|
-
assert [(slice.start_date, slice.end_date) for slice in generator] == range
|
88
|
-
|
89
|
-
|
90
|
-
def test_datetime_wrong_range():
|
91
|
-
start_day = pendulum.parse("2020")
|
92
|
-
end_day = pendulum.parse("2000")
|
93
|
-
with pytest.raises(StopIteration):
|
94
|
-
next(RangeSliceGenerator.make_datetime_ranges(start_day, end_day, 1))
|
unit_tests/test_source.py
DELETED
@@ -1,28 +0,0 @@
|
|
1
|
-
#
|
2
|
-
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
3
|
-
#
|
4
|
-
|
5
|
-
from unittest.mock import MagicMock, patch
|
6
|
-
|
7
|
-
import pytest
|
8
|
-
import responses
|
9
|
-
from source_iterable.source import SourceIterable
|
10
|
-
from source_iterable.streams import Lists
|
11
|
-
|
12
|
-
|
13
|
-
@responses.activate
|
14
|
-
@pytest.mark.parametrize("body, status, expected_streams", ((b"", 401, 7), (b"alpha@gmail.com\nbeta@gmail.com", 200, 44)))
|
15
|
-
def test_source_streams(mock_lists_resp, config, body, status, expected_streams):
|
16
|
-
responses.add(responses.GET, "https://api.iterable.com/api/lists/getUsers?listId=1", body=body, status=status)
|
17
|
-
streams = SourceIterable().streams(config=config)
|
18
|
-
assert len(streams) == expected_streams
|
19
|
-
|
20
|
-
|
21
|
-
def test_source_check_connection_ok(config):
|
22
|
-
with patch.object(Lists, "read_records", return_value=iter([{"id": 1}])):
|
23
|
-
assert SourceIterable().check_connection(MagicMock(), config=config) == (True, None)
|
24
|
-
|
25
|
-
|
26
|
-
def test_source_check_connection_failed(config):
|
27
|
-
with patch.object(Lists, "read_records", return_value=iter([])):
|
28
|
-
assert SourceIterable().check_connection(MagicMock(), config=config)[0] is False
|
unit_tests/test_stream_events.py
DELETED
@@ -1,202 +0,0 @@
|
|
1
|
-
#
|
2
|
-
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
3
|
-
#
|
4
|
-
|
5
|
-
import json
|
6
|
-
|
7
|
-
import pytest
|
8
|
-
import requests
|
9
|
-
import responses
|
10
|
-
from source_iterable.streams import Events
|
11
|
-
|
12
|
-
|
13
|
-
@responses.activate
|
14
|
-
@pytest.mark.parametrize(
|
15
|
-
"response_objects,expected_objects,jsonl_body",
|
16
|
-
[
|
17
|
-
(
|
18
|
-
[
|
19
|
-
{
|
20
|
-
"createdAt": "2021",
|
21
|
-
"signupSource": "str",
|
22
|
-
"emailListIds": [1],
|
23
|
-
"itblInternal": {"documentUpdatedAt": "2021", "documentCreatedAt": "202"},
|
24
|
-
"_type": "str",
|
25
|
-
"messageTypeIds": [],
|
26
|
-
"channelIds": [],
|
27
|
-
"email": "test@mail.com",
|
28
|
-
"profileUpdatedAt": "2021",
|
29
|
-
},
|
30
|
-
{
|
31
|
-
"productRecommendationCount": 1,
|
32
|
-
"campaignId": 1,
|
33
|
-
"itblInternal": {"documentUpdatedAt": "2021", "documentCreatedAt": "2021"},
|
34
|
-
"contentId": 1,
|
35
|
-
"_type": "1",
|
36
|
-
"messageId": "1",
|
37
|
-
"messageBusId": "1",
|
38
|
-
"templateId": 1,
|
39
|
-
"createdAt": "2021",
|
40
|
-
"messageTypeId": 1,
|
41
|
-
"catalogCollectionCount": 1,
|
42
|
-
"catalogLookupCount": 0,
|
43
|
-
"email": "test@mail.com",
|
44
|
-
"channelId": 1,
|
45
|
-
},
|
46
|
-
{
|
47
|
-
"createdAt": "2021",
|
48
|
-
"campaignId": 1,
|
49
|
-
"itblInternal": {"documentUpdatedAt": "2021", "documentCreatedAt": "2021"},
|
50
|
-
"_type": "str",
|
51
|
-
"messageId": "1",
|
52
|
-
"templateId": 1,
|
53
|
-
"recipientState": "str",
|
54
|
-
"email": "test@mail.com",
|
55
|
-
},
|
56
|
-
{
|
57
|
-
"unsubSource": "str",
|
58
|
-
"createdAt": "2021",
|
59
|
-
"emailListIds": [],
|
60
|
-
"itblInternal": {"documentUpdatedAt": "2021", "documentCreatedAt": "2021"},
|
61
|
-
"_type": "str",
|
62
|
-
"messageId": "1",
|
63
|
-
"messageTypeIds": [],
|
64
|
-
"channelIds": [1],
|
65
|
-
"templateId": 1,
|
66
|
-
"recipientState": "str",
|
67
|
-
"email": "test@mail.com",
|
68
|
-
},
|
69
|
-
],
|
70
|
-
[],
|
71
|
-
False,
|
72
|
-
),
|
73
|
-
(
|
74
|
-
[
|
75
|
-
{
|
76
|
-
"createdAt": "2021",
|
77
|
-
"signupSource": "str",
|
78
|
-
"emailListIds": [1],
|
79
|
-
"itblInternal": {"documentUpdatedAt": "2021", "documentCreatedAt": "202"},
|
80
|
-
"_type": "str",
|
81
|
-
"messageTypeIds": [],
|
82
|
-
"channelIds": [],
|
83
|
-
"email": "test@mail.com",
|
84
|
-
"profileUpdatedAt": "2021",
|
85
|
-
}
|
86
|
-
],
|
87
|
-
[],
|
88
|
-
False,
|
89
|
-
),
|
90
|
-
(
|
91
|
-
[
|
92
|
-
{
|
93
|
-
"createdAt": "2021",
|
94
|
-
"signupSource": "str",
|
95
|
-
"emailListIds": [1],
|
96
|
-
"itblInternal": {"documentUpdatedAt": "2021", "documentCreatedAt": "202"},
|
97
|
-
"_type": "str",
|
98
|
-
"messageTypeIds": [],
|
99
|
-
"channelIds": [],
|
100
|
-
"email": "test@mail.com",
|
101
|
-
"profileUpdatedAt": "2021",
|
102
|
-
},
|
103
|
-
{
|
104
|
-
"productRecommendationCount": 1,
|
105
|
-
"campaignId": 1,
|
106
|
-
"itblInternal": {"documentUpdatedAt": "2021", "documentCreatedAt": "2021"},
|
107
|
-
"contentId": 1,
|
108
|
-
"_type": "1",
|
109
|
-
"messageId": "1",
|
110
|
-
"messageBusId": "1",
|
111
|
-
"templateId": 1,
|
112
|
-
"createdAt": "2021",
|
113
|
-
"messageTypeId": 1,
|
114
|
-
"catalogCollectionCount": 1,
|
115
|
-
"catalogLookupCount": 0,
|
116
|
-
"email": "test@mail.com",
|
117
|
-
"channelId": 1,
|
118
|
-
},
|
119
|
-
],
|
120
|
-
[
|
121
|
-
{
|
122
|
-
"itblInternal": {"documentUpdatedAt": "2021", "documentCreatedAt": "202"},
|
123
|
-
"_type": "str",
|
124
|
-
"createdAt": "2021",
|
125
|
-
"email": "test@mail.com",
|
126
|
-
"data": {
|
127
|
-
"signupSource": "str",
|
128
|
-
"emailListIds": [1],
|
129
|
-
"messageTypeIds": [],
|
130
|
-
"channelIds": [],
|
131
|
-
"profileUpdatedAt": "2021",
|
132
|
-
},
|
133
|
-
},
|
134
|
-
{
|
135
|
-
"itblInternal": {"documentUpdatedAt": "2021", "documentCreatedAt": "2021"},
|
136
|
-
"_type": "1",
|
137
|
-
"createdAt": "2021",
|
138
|
-
"email": "test@mail.com",
|
139
|
-
"data": {
|
140
|
-
"productRecommendationCount": 1,
|
141
|
-
"campaignId": 1,
|
142
|
-
"contentId": 1,
|
143
|
-
"messageId": "1",
|
144
|
-
"messageBusId": "1",
|
145
|
-
"templateId": 1,
|
146
|
-
"messageTypeId": 1,
|
147
|
-
"catalogCollectionCount": 1,
|
148
|
-
"catalogLookupCount": 0,
|
149
|
-
"channelId": 1,
|
150
|
-
},
|
151
|
-
},
|
152
|
-
],
|
153
|
-
True,
|
154
|
-
),
|
155
|
-
(
|
156
|
-
[
|
157
|
-
{
|
158
|
-
"createdAt": "2021",
|
159
|
-
"signupSource": "str",
|
160
|
-
"emailListIds": [1],
|
161
|
-
"itblInternal": {"documentUpdatedAt": "2021", "documentCreatedAt": "202"},
|
162
|
-
"_type": "str",
|
163
|
-
"messageTypeIds": [],
|
164
|
-
"channelIds": [],
|
165
|
-
"email": "test@mail.com",
|
166
|
-
"profileUpdatedAt": "2021",
|
167
|
-
}
|
168
|
-
],
|
169
|
-
[
|
170
|
-
{
|
171
|
-
"itblInternal": {"documentUpdatedAt": "2021", "documentCreatedAt": "202"},
|
172
|
-
"_type": "str",
|
173
|
-
"createdAt": "2021",
|
174
|
-
"email": "test@mail.com",
|
175
|
-
"data": {
|
176
|
-
"signupSource": "str",
|
177
|
-
"emailListIds": [1],
|
178
|
-
"messageTypeIds": [],
|
179
|
-
"channelIds": [],
|
180
|
-
"profileUpdatedAt": "2021",
|
181
|
-
},
|
182
|
-
}
|
183
|
-
],
|
184
|
-
True,
|
185
|
-
),
|
186
|
-
],
|
187
|
-
)
|
188
|
-
def test_events_parse_response(response_objects, expected_objects, jsonl_body):
|
189
|
-
if jsonl_body:
|
190
|
-
response_body = "\n".join([json.dumps(obj) for obj in response_objects])
|
191
|
-
else:
|
192
|
-
response_body = json.dumps(response_objects)
|
193
|
-
responses.add(responses.GET, "https://example.com", body=response_body)
|
194
|
-
response = requests.get("https://example.com")
|
195
|
-
stream = Events(authenticator=None)
|
196
|
-
|
197
|
-
if jsonl_body:
|
198
|
-
records = [record for record in stream.parse_response(response)]
|
199
|
-
assert records == expected_objects
|
200
|
-
else:
|
201
|
-
with pytest.raises(TypeError):
|
202
|
-
[record for record in stream.parse_response(response)]
|
unit_tests/test_streams.py
DELETED
@@ -1,270 +0,0 @@
|
|
1
|
-
#
|
2
|
-
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
3
|
-
#
|
4
|
-
|
5
|
-
import json
|
6
|
-
from unittest.mock import MagicMock
|
7
|
-
|
8
|
-
import pendulum
|
9
|
-
import pytest
|
10
|
-
import requests
|
11
|
-
import responses
|
12
|
-
from airbyte_cdk.models import SyncMode
|
13
|
-
from source_iterable.streams import (
|
14
|
-
Campaigns,
|
15
|
-
CampaignsMetrics,
|
16
|
-
Channels,
|
17
|
-
Events,
|
18
|
-
Lists,
|
19
|
-
ListUsers,
|
20
|
-
MessageTypes,
|
21
|
-
Metadata,
|
22
|
-
Templates,
|
23
|
-
Users,
|
24
|
-
)
|
25
|
-
from source_iterable.utils import dateutil_parse, read_full_refresh
|
26
|
-
|
27
|
-
|
28
|
-
@pytest.mark.parametrize(
|
29
|
-
"stream,date,slice,expected_path",
|
30
|
-
[
|
31
|
-
(Lists, False, {}, "lists"),
|
32
|
-
(Campaigns, False, {}, "campaigns"),
|
33
|
-
(Channels, False, {}, "channels"),
|
34
|
-
(Events, False, {}, "export/userEvents"),
|
35
|
-
(MessageTypes, False, {}, "messageTypes"),
|
36
|
-
(Metadata, False, {}, "metadata"),
|
37
|
-
(ListUsers, False, {"list_id": 1}, "lists/getUsers?listId=1"),
|
38
|
-
(CampaignsMetrics, True, {}, "campaigns/metrics"),
|
39
|
-
(Templates, True, {}, "templates"),
|
40
|
-
],
|
41
|
-
)
|
42
|
-
def test_path(config, stream, date, slice, expected_path):
|
43
|
-
args = {"authenticator": None}
|
44
|
-
if date:
|
45
|
-
args["start_date"] = "2019-10-10T00:00:00"
|
46
|
-
|
47
|
-
assert stream(**args).path(stream_slice=slice) == expected_path
|
48
|
-
|
49
|
-
|
50
|
-
def test_campaigns_metrics_csv():
|
51
|
-
csv_string = "a,b,c,d\n1, 2,,3\n6,,1, 2\n"
|
52
|
-
output = [{"a": 1, "b": 2, "d": 3}, {"a": 6, "c": 1, "d": 2}]
|
53
|
-
|
54
|
-
assert CampaignsMetrics._parse_csv_string_to_dict(csv_string) == output
|
55
|
-
|
56
|
-
|
57
|
-
@pytest.mark.parametrize(
|
58
|
-
"url,id",
|
59
|
-
[
|
60
|
-
("http://google.com?listId=1&another=another", 1),
|
61
|
-
("http://google.com?another=another", None),
|
62
|
-
],
|
63
|
-
)
|
64
|
-
def test_list_users_get_list_id(url, id):
|
65
|
-
assert ListUsers._get_list_id(url) == id
|
66
|
-
|
67
|
-
|
68
|
-
def test_campaigns_metrics_request_params():
|
69
|
-
stream = CampaignsMetrics(authenticator=None, start_date="2019-10-10T00:00:00")
|
70
|
-
params = stream.request_params(stream_slice={"campaign_ids": "c101"}, stream_state=None)
|
71
|
-
assert params == {"campaignId": "c101", "startDateTime": "2019-10-10T00:00:00"}
|
72
|
-
|
73
|
-
|
74
|
-
def test_events_request_params():
|
75
|
-
stream = Events(authenticator=None)
|
76
|
-
params = stream.request_params(stream_slice={"email": "a@a.a"}, stream_state=None)
|
77
|
-
assert params == {"email": "a@a.a", "includeCustomEvents": "true"}
|
78
|
-
|
79
|
-
|
80
|
-
def test_templates_parse_response():
|
81
|
-
stream = Templates(authenticator=None, start_date="2019-10-10T00:00:00")
|
82
|
-
with responses.RequestsMock() as rsps:
|
83
|
-
rsps.add(
|
84
|
-
responses.GET,
|
85
|
-
"https://api.iterable.com/api/1/foobar",
|
86
|
-
json={"templates": [{"createdAt": "2022-01-01", "id": 1}]},
|
87
|
-
status=200,
|
88
|
-
content_type="application/json",
|
89
|
-
)
|
90
|
-
resp = requests.get("https://api.iterable.com/api/1/foobar")
|
91
|
-
|
92
|
-
records = stream.parse_response(response=resp)
|
93
|
-
|
94
|
-
assert list(records) == [{"id": 1, "createdAt": dateutil_parse("2022-01-01")}]
|
95
|
-
|
96
|
-
|
97
|
-
def test_list_users_parse_response():
|
98
|
-
stream = ListUsers(authenticator=None)
|
99
|
-
with responses.RequestsMock() as rsps:
|
100
|
-
rsps.add(
|
101
|
-
responses.GET,
|
102
|
-
"https://api.iterable.com/lists/getUsers?listId=100",
|
103
|
-
body="user100",
|
104
|
-
status=200,
|
105
|
-
content_type="application/json",
|
106
|
-
)
|
107
|
-
resp = requests.get("https://api.iterable.com/lists/getUsers?listId=100")
|
108
|
-
|
109
|
-
records = stream.parse_response(response=resp)
|
110
|
-
|
111
|
-
assert list(records) == [{"email": "user100", "listId": 100}]
|
112
|
-
|
113
|
-
|
114
|
-
def test_campaigns_metrics_parse_response():
|
115
|
-
|
116
|
-
stream = CampaignsMetrics(authenticator=None, start_date="2019-10-10T00:00:00")
|
117
|
-
with responses.RequestsMock() as rsps:
|
118
|
-
rsps.add(
|
119
|
-
responses.GET,
|
120
|
-
"https://api.iterable.com/lists/getUsers?listId=100",
|
121
|
-
body="""a,b,c,d
|
122
|
-
1, 2,, 3
|
123
|
-
6,, 1, 2
|
124
|
-
""",
|
125
|
-
status=200,
|
126
|
-
content_type="application/json",
|
127
|
-
)
|
128
|
-
resp = requests.get("https://api.iterable.com/lists/getUsers?listId=100")
|
129
|
-
|
130
|
-
records = stream.parse_response(response=resp)
|
131
|
-
|
132
|
-
assert list(records) == [
|
133
|
-
{"data": {"a": 1, "b": 2, "d": 3}},
|
134
|
-
{"data": {"a": 6, "c": 1, "d": 2}},
|
135
|
-
]
|
136
|
-
|
137
|
-
|
138
|
-
def test_iterable_stream_parse_response():
|
139
|
-
stream = Lists(authenticator=None)
|
140
|
-
with responses.RequestsMock() as rsps:
|
141
|
-
rsps.add(
|
142
|
-
responses.GET,
|
143
|
-
"https://api.iterable.com/lists/getUsers?listId=100",
|
144
|
-
json={"lists": [{"id": 1}, {"id": 2}]},
|
145
|
-
status=200,
|
146
|
-
content_type="application/json",
|
147
|
-
)
|
148
|
-
resp = requests.get("https://api.iterable.com/lists/getUsers?listId=100")
|
149
|
-
|
150
|
-
records = stream.parse_response(response=resp)
|
151
|
-
|
152
|
-
assert list(records) == [{"id": 1}, {"id": 2}]
|
153
|
-
|
154
|
-
|
155
|
-
def test_iterable_stream_backoff_time():
|
156
|
-
stream = Lists(authenticator=None)
|
157
|
-
assert stream.backoff_time(response=None) is None
|
158
|
-
|
159
|
-
|
160
|
-
def test_iterable_export_stream_backoff_time():
|
161
|
-
stream = Users(authenticator=None, start_date="2019-10-10T00:00:00")
|
162
|
-
assert stream.backoff_time(response=None) is None
|
163
|
-
|
164
|
-
|
165
|
-
@pytest.mark.parametrize(
|
166
|
-
"current_state,record_date,expected_state",
|
167
|
-
[
|
168
|
-
({}, "2022", {"profileUpdatedAt": "2022-01-01T00:00:00+00:00"}),
|
169
|
-
({"profileUpdatedAt": "2020-01-01T00:00:00+00:00"}, "2022", {"profileUpdatedAt": "2022-01-01T00:00:00+00:00"}),
|
170
|
-
({"profileUpdatedAt": "2022-01-01T00:00:00+00:00"}, "2020", {"profileUpdatedAt": "2022-01-01T00:00:00+00:00"}),
|
171
|
-
],
|
172
|
-
)
|
173
|
-
def test_get_updated_state(current_state, record_date, expected_state):
|
174
|
-
stream = Users(authenticator=None, start_date="2019-10-10T00:00:00")
|
175
|
-
state = stream.get_updated_state(
|
176
|
-
current_stream_state=current_state,
|
177
|
-
latest_record={"profileUpdatedAt": pendulum.parse(record_date)},
|
178
|
-
)
|
179
|
-
assert state == expected_state
|
180
|
-
|
181
|
-
|
182
|
-
@responses.activate
|
183
|
-
def test_stream_stops_on_401(mock_lists_resp):
|
184
|
-
# no requests should be made after getting 401 error despite the multiple slices
|
185
|
-
users_stream = ListUsers(authenticator=None)
|
186
|
-
responses.add(responses.GET, "https://api.iterable.com/api/lists/getUsers?listId=1", json={}, status=401)
|
187
|
-
slices = 0
|
188
|
-
for slice_ in users_stream.stream_slices(sync_mode=SyncMode.full_refresh):
|
189
|
-
slices += 1
|
190
|
-
_ = list(users_stream.read_records(stream_slice=slice_, sync_mode=SyncMode.full_refresh))
|
191
|
-
assert len(responses.calls) == 1
|
192
|
-
assert slices > 1
|
193
|
-
|
194
|
-
|
195
|
-
@responses.activate
|
196
|
-
def test_listuser_stream_keep_working_on_500():
|
197
|
-
users_stream = ListUsers(authenticator=None)
|
198
|
-
|
199
|
-
msg_error = "An error occurred. Please try again later. If problem persists, please contact your CSM"
|
200
|
-
generic_error1 = {"msg": msg_error, "code": "GenericError"}
|
201
|
-
generic_error2 = {"msg": msg_error, "code": "Generic Error"}
|
202
|
-
|
203
|
-
responses.get("https://api.iterable.com/api/lists", json={"lists": [{"id": 1000}, {"id": 2000}, {"id": 3000}]})
|
204
|
-
responses.get("https://api.iterable.com/api/lists/getUsers?listId=1000", json=generic_error1, status=500)
|
205
|
-
responses.get("https://api.iterable.com/api/lists/getUsers?listId=2000", body="one@d1.com\ntwo@d1.com\nthree@d1.com")
|
206
|
-
responses.get("https://api.iterable.com/api/lists/getUsers?listId=3000", json=generic_error2, status=500)
|
207
|
-
responses.get("https://api.iterable.com/api/lists/getUsers?listId=3000", body="one@d2.com\ntwo@d2.com\nthree@d2.com")
|
208
|
-
|
209
|
-
expected_records = [
|
210
|
-
{"email": "one@d1.com", "listId": 2000},
|
211
|
-
{"email": "two@d1.com", "listId": 2000},
|
212
|
-
{"email": "three@d1.com", "listId": 2000},
|
213
|
-
{"email": "one@d2.com", "listId": 3000},
|
214
|
-
{"email": "two@d2.com", "listId": 3000},
|
215
|
-
{"email": "three@d2.com", "listId": 3000},
|
216
|
-
]
|
217
|
-
|
218
|
-
records = list(read_full_refresh(users_stream))
|
219
|
-
assert records == expected_records
|
220
|
-
|
221
|
-
|
222
|
-
@responses.activate
|
223
|
-
def test_events_read_full_refresh():
|
224
|
-
stream = Events(authenticator=None)
|
225
|
-
responses.get("https://api.iterable.com/api/lists", json={"lists": [{"id": 1}]})
|
226
|
-
responses.get("https://api.iterable.com/api/lists/getUsers?listId=1", body="user1\nuser2\nuser3\nuser4\nuser5\nuser6")
|
227
|
-
|
228
|
-
def get_body(emails):
|
229
|
-
return "\n".join([json.dumps({"email": email}) for email in emails]) + "\n"
|
230
|
-
|
231
|
-
msg_error = "An error occurred. Please try again later. If problem persists, please contact your CSM"
|
232
|
-
generic_error1 = {"msg": msg_error, "code": "GenericError"}
|
233
|
-
generic_error2 = {"msg": msg_error, "code": "Generic Error"}
|
234
|
-
|
235
|
-
responses.get("https://api.iterable.com/api/export/userEvents?email=user1&includeCustomEvents=true", body=get_body(["user1"]))
|
236
|
-
|
237
|
-
responses.get("https://api.iterable.com/api/export/userEvents?email=user2&includeCustomEvents=true", json=generic_error1, status=500)
|
238
|
-
responses.get("https://api.iterable.com/api/export/userEvents?email=user2&includeCustomEvents=true", body=get_body(["user2"]))
|
239
|
-
|
240
|
-
responses.get("https://api.iterable.com/api/export/userEvents?email=user3&includeCustomEvents=true", body=get_body(["user3"]))
|
241
|
-
|
242
|
-
responses.get("https://api.iterable.com/api/export/userEvents?email=user4&includeCustomEvents=true", json=generic_error1, status=500)
|
243
|
-
|
244
|
-
responses.get("https://api.iterable.com/api/export/userEvents?email=user5&includeCustomEvents=true", json=generic_error2, status=500)
|
245
|
-
responses.get("https://api.iterable.com/api/export/userEvents?email=user5&includeCustomEvents=true", json=generic_error2, status=500)
|
246
|
-
responses.get("https://api.iterable.com/api/export/userEvents?email=user5&includeCustomEvents=true", body=get_body(["user5"]))
|
247
|
-
|
248
|
-
m = responses.get(
|
249
|
-
"https://api.iterable.com/api/export/userEvents?email=user6&includeCustomEvents=true", json=generic_error2, status=500
|
250
|
-
)
|
251
|
-
|
252
|
-
records = list(read_full_refresh(stream))
|
253
|
-
assert [r["email"] for r in records] == ["user1", "user2", "user3", "user5"]
|
254
|
-
assert m.call_count == 3
|
255
|
-
|
256
|
-
|
257
|
-
def test_retry_read_timeout():
|
258
|
-
stream = Lists(authenticator=None)
|
259
|
-
stream._session.send = MagicMock(side_effect=requests.exceptions.ReadTimeout)
|
260
|
-
with pytest.raises(requests.exceptions.ReadTimeout):
|
261
|
-
list(read_full_refresh(stream))
|
262
|
-
stream._session.send.call_args[1] == {"timeout": (60, 300)}
|
263
|
-
assert stream._session.send.call_count == stream.max_retries + 1
|
264
|
-
|
265
|
-
stream = Campaigns(authenticator=None)
|
266
|
-
stream._session.send = MagicMock(side_effect=requests.exceptions.ConnectionError)
|
267
|
-
with pytest.raises(requests.exceptions.ConnectionError):
|
268
|
-
list(read_full_refresh(stream))
|
269
|
-
stream._session.send.call_args[1] == {"timeout": (60, 300)}
|
270
|
-
assert stream._session.send.call_count == stream.max_retries + 1
|
unit_tests/test_utils.py
DELETED
@@ -1,12 +0,0 @@
|
|
1
|
-
#
|
2
|
-
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
3
|
-
#
|
4
|
-
|
5
|
-
import pendulum
|
6
|
-
from source_iterable.utils import dateutil_parse
|
7
|
-
|
8
|
-
|
9
|
-
def test_dateutil_parse():
|
10
|
-
assert pendulum.parse("2021-04-08 14:23:30 +00:00", strict=False) == dateutil_parse("2021-04-08 14:23:30 +00:00")
|
11
|
-
assert pendulum.parse("2021-04-14T16:51:23+00:00", strict=False) == dateutil_parse("2021-04-14T16:51:23+00:00")
|
12
|
-
assert pendulum.parse("2021-04-14T16:23:30.700000+00:00", strict=False) == dateutil_parse("2021-04-14T16:23:30.700000+00:00")
|