ingestr 0.13.2__py3-none-any.whl → 0.14.104__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ingestr/conftest.py +72 -0
- ingestr/main.py +134 -87
- ingestr/src/adjust/__init__.py +4 -4
- ingestr/src/adjust/adjust_helpers.py +7 -3
- ingestr/src/airtable/__init__.py +3 -2
- ingestr/src/allium/__init__.py +128 -0
- ingestr/src/anthropic/__init__.py +277 -0
- ingestr/src/anthropic/helpers.py +525 -0
- ingestr/src/applovin/__init__.py +262 -0
- ingestr/src/applovin_max/__init__.py +117 -0
- ingestr/src/appsflyer/__init__.py +325 -0
- ingestr/src/appsflyer/client.py +49 -45
- ingestr/src/appstore/__init__.py +1 -0
- ingestr/src/arrow/__init__.py +9 -1
- ingestr/src/asana_source/__init__.py +1 -1
- ingestr/src/attio/__init__.py +102 -0
- ingestr/src/attio/helpers.py +65 -0
- ingestr/src/blob.py +38 -11
- ingestr/src/buildinfo.py +1 -0
- ingestr/src/chess/__init__.py +1 -1
- ingestr/src/clickup/__init__.py +85 -0
- ingestr/src/clickup/helpers.py +47 -0
- ingestr/src/collector/spinner.py +43 -0
- ingestr/src/couchbase_source/__init__.py +118 -0
- ingestr/src/couchbase_source/helpers.py +135 -0
- ingestr/src/cursor/__init__.py +83 -0
- ingestr/src/cursor/helpers.py +188 -0
- ingestr/src/destinations.py +520 -33
- ingestr/src/docebo/__init__.py +589 -0
- ingestr/src/docebo/client.py +435 -0
- ingestr/src/docebo/helpers.py +97 -0
- ingestr/src/elasticsearch/__init__.py +80 -0
- ingestr/src/elasticsearch/helpers.py +138 -0
- ingestr/src/errors.py +8 -0
- ingestr/src/facebook_ads/__init__.py +47 -28
- ingestr/src/facebook_ads/helpers.py +59 -37
- ingestr/src/facebook_ads/settings.py +2 -0
- ingestr/src/facebook_ads/utils.py +39 -0
- ingestr/src/factory.py +116 -2
- ingestr/src/filesystem/__init__.py +8 -3
- ingestr/src/filters.py +46 -3
- ingestr/src/fluxx/__init__.py +9906 -0
- ingestr/src/fluxx/helpers.py +209 -0
- ingestr/src/frankfurter/__init__.py +157 -0
- ingestr/src/frankfurter/helpers.py +48 -0
- ingestr/src/freshdesk/__init__.py +89 -0
- ingestr/src/freshdesk/freshdesk_client.py +137 -0
- ingestr/src/freshdesk/settings.py +9 -0
- ingestr/src/fundraiseup/__init__.py +95 -0
- ingestr/src/fundraiseup/client.py +81 -0
- ingestr/src/github/__init__.py +41 -6
- ingestr/src/github/helpers.py +5 -5
- ingestr/src/google_analytics/__init__.py +22 -4
- ingestr/src/google_analytics/helpers.py +124 -6
- ingestr/src/google_sheets/__init__.py +4 -4
- ingestr/src/google_sheets/helpers/data_processing.py +2 -2
- ingestr/src/hostaway/__init__.py +302 -0
- ingestr/src/hostaway/client.py +288 -0
- ingestr/src/http/__init__.py +35 -0
- ingestr/src/http/readers.py +114 -0
- ingestr/src/http_client.py +24 -0
- ingestr/src/hubspot/__init__.py +66 -23
- ingestr/src/hubspot/helpers.py +52 -22
- ingestr/src/hubspot/settings.py +14 -7
- ingestr/src/influxdb/__init__.py +46 -0
- ingestr/src/influxdb/client.py +34 -0
- ingestr/src/intercom/__init__.py +142 -0
- ingestr/src/intercom/helpers.py +674 -0
- ingestr/src/intercom/settings.py +279 -0
- ingestr/src/isoc_pulse/__init__.py +159 -0
- ingestr/src/jira_source/__init__.py +340 -0
- ingestr/src/jira_source/helpers.py +439 -0
- ingestr/src/jira_source/settings.py +170 -0
- ingestr/src/kafka/__init__.py +4 -1
- ingestr/src/kinesis/__init__.py +139 -0
- ingestr/src/kinesis/helpers.py +82 -0
- ingestr/src/klaviyo/{_init_.py → __init__.py} +5 -6
- ingestr/src/linear/__init__.py +634 -0
- ingestr/src/linear/helpers.py +111 -0
- ingestr/src/linkedin_ads/helpers.py +0 -1
- ingestr/src/loader.py +69 -0
- ingestr/src/mailchimp/__init__.py +126 -0
- ingestr/src/mailchimp/helpers.py +226 -0
- ingestr/src/mailchimp/settings.py +164 -0
- ingestr/src/masking.py +344 -0
- ingestr/src/mixpanel/__init__.py +62 -0
- ingestr/src/mixpanel/client.py +99 -0
- ingestr/src/monday/__init__.py +246 -0
- ingestr/src/monday/helpers.py +392 -0
- ingestr/src/monday/settings.py +328 -0
- ingestr/src/mongodb/__init__.py +72 -8
- ingestr/src/mongodb/helpers.py +915 -38
- ingestr/src/partition.py +32 -0
- ingestr/src/personio/__init__.py +331 -0
- ingestr/src/personio/helpers.py +86 -0
- ingestr/src/phantombuster/__init__.py +65 -0
- ingestr/src/phantombuster/client.py +87 -0
- ingestr/src/pinterest/__init__.py +82 -0
- ingestr/src/pipedrive/__init__.py +198 -0
- ingestr/src/pipedrive/helpers/__init__.py +23 -0
- ingestr/src/pipedrive/helpers/custom_fields_munger.py +102 -0
- ingestr/src/pipedrive/helpers/pages.py +115 -0
- ingestr/src/pipedrive/settings.py +27 -0
- ingestr/src/pipedrive/typing.py +3 -0
- ingestr/src/plusvibeai/__init__.py +335 -0
- ingestr/src/plusvibeai/helpers.py +544 -0
- ingestr/src/plusvibeai/settings.py +252 -0
- ingestr/src/quickbooks/__init__.py +117 -0
- ingestr/src/resource.py +40 -0
- ingestr/src/revenuecat/__init__.py +83 -0
- ingestr/src/revenuecat/helpers.py +237 -0
- ingestr/src/salesforce/__init__.py +156 -0
- ingestr/src/salesforce/helpers.py +64 -0
- ingestr/src/shopify/__init__.py +1 -17
- ingestr/src/smartsheets/__init__.py +82 -0
- ingestr/src/snapchat_ads/__init__.py +489 -0
- ingestr/src/snapchat_ads/client.py +72 -0
- ingestr/src/snapchat_ads/helpers.py +535 -0
- ingestr/src/socrata_source/__init__.py +83 -0
- ingestr/src/socrata_source/helpers.py +85 -0
- ingestr/src/socrata_source/settings.py +8 -0
- ingestr/src/solidgate/__init__.py +219 -0
- ingestr/src/solidgate/helpers.py +154 -0
- ingestr/src/sources.py +3132 -212
- ingestr/src/stripe_analytics/__init__.py +49 -21
- ingestr/src/stripe_analytics/helpers.py +286 -1
- ingestr/src/stripe_analytics/settings.py +62 -10
- ingestr/src/telemetry/event.py +10 -9
- ingestr/src/tiktok_ads/__init__.py +12 -6
- ingestr/src/tiktok_ads/tiktok_helpers.py +0 -1
- ingestr/src/trustpilot/__init__.py +48 -0
- ingestr/src/trustpilot/client.py +48 -0
- ingestr/src/version.py +6 -1
- ingestr/src/wise/__init__.py +68 -0
- ingestr/src/wise/client.py +63 -0
- ingestr/src/zoom/__init__.py +99 -0
- ingestr/src/zoom/helpers.py +102 -0
- ingestr/tests/unit/test_smartsheets.py +133 -0
- ingestr-0.14.104.dist-info/METADATA +563 -0
- ingestr-0.14.104.dist-info/RECORD +203 -0
- ingestr/src/appsflyer/_init_.py +0 -24
- ingestr-0.13.2.dist-info/METADATA +0 -302
- ingestr-0.13.2.dist-info/RECORD +0 -107
- {ingestr-0.13.2.dist-info → ingestr-0.14.104.dist-info}/WHEEL +0 -0
- {ingestr-0.13.2.dist-info → ingestr-0.14.104.dist-info}/entry_points.txt +0 -0
- {ingestr-0.13.2.dist-info → ingestr-0.14.104.dist-info}/licenses/LICENSE.md +0 -0
|
@@ -0,0 +1,209 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from typing import Any, Dict, Iterator, List, Optional
|
|
3
|
+
|
|
4
|
+
import dlt
|
|
5
|
+
import pendulum
|
|
6
|
+
import requests
|
|
7
|
+
|
|
8
|
+
FLUXX_API_BASE = "https://{instance}.fluxxlabs.com"
|
|
9
|
+
FLUXX_OAUTH_TOKEN_PATH = "/oauth/token"
|
|
10
|
+
FLUXX_API_V2_PATH = "/api/rest/v2"
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def get_access_token(instance: str, client_id: str, client_secret: str) -> str:
|
|
14
|
+
"""Obtain OAuth access token using client credentials flow."""
|
|
15
|
+
token_url = f"{FLUXX_API_BASE.format(instance=instance)}{FLUXX_OAUTH_TOKEN_PATH}"
|
|
16
|
+
|
|
17
|
+
response = requests.post(
|
|
18
|
+
token_url,
|
|
19
|
+
data={
|
|
20
|
+
"grant_type": "client_credentials",
|
|
21
|
+
"client_id": client_id,
|
|
22
|
+
"client_secret": client_secret,
|
|
23
|
+
},
|
|
24
|
+
)
|
|
25
|
+
response.raise_for_status()
|
|
26
|
+
|
|
27
|
+
token_data = response.json()
|
|
28
|
+
return token_data["access_token"]
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def fluxx_api_request(
|
|
32
|
+
instance: str,
|
|
33
|
+
access_token: str,
|
|
34
|
+
endpoint: str,
|
|
35
|
+
method: str = "GET",
|
|
36
|
+
params: Optional[Dict[str, Any]] = None,
|
|
37
|
+
data: Optional[Dict[str, Any]] = None,
|
|
38
|
+
) -> Dict[str, Any]:
|
|
39
|
+
"""Make an authenticated request to the Fluxx API."""
|
|
40
|
+
url = f"{FLUXX_API_BASE.format(instance=instance)}{FLUXX_API_V2_PATH}/{endpoint}"
|
|
41
|
+
|
|
42
|
+
headers = {
|
|
43
|
+
"Authorization": f"Bearer {access_token}",
|
|
44
|
+
"Content-Type": "application/json",
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
response = requests.request(
|
|
48
|
+
method=method,
|
|
49
|
+
url=url,
|
|
50
|
+
headers=headers,
|
|
51
|
+
params=params,
|
|
52
|
+
json=data,
|
|
53
|
+
)
|
|
54
|
+
response.raise_for_status()
|
|
55
|
+
|
|
56
|
+
if response.text:
|
|
57
|
+
return response.json()
|
|
58
|
+
return {}
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def paginate_fluxx_resource(
|
|
62
|
+
instance: str,
|
|
63
|
+
access_token: str,
|
|
64
|
+
endpoint: str,
|
|
65
|
+
params: Optional[Dict[str, Any]] = None,
|
|
66
|
+
page_size: int = 100,
|
|
67
|
+
) -> Iterator[List[Dict[str, Any]]]:
|
|
68
|
+
"""Paginate through a Fluxx API resource."""
|
|
69
|
+
if params is None:
|
|
70
|
+
params = {}
|
|
71
|
+
|
|
72
|
+
page = 1
|
|
73
|
+
params["per_page"] = page_size
|
|
74
|
+
|
|
75
|
+
while True:
|
|
76
|
+
params["page"] = page
|
|
77
|
+
|
|
78
|
+
response = fluxx_api_request(
|
|
79
|
+
instance=instance,
|
|
80
|
+
access_token=access_token,
|
|
81
|
+
endpoint=endpoint,
|
|
82
|
+
params=params,
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
if not response:
|
|
86
|
+
break
|
|
87
|
+
|
|
88
|
+
# Get the first available key from records instead of assuming endpoint name
|
|
89
|
+
records = response["records"]
|
|
90
|
+
if records:
|
|
91
|
+
# Pick the first key available in records
|
|
92
|
+
first_key = next(iter(records))
|
|
93
|
+
items = records[first_key]
|
|
94
|
+
else:
|
|
95
|
+
items = []
|
|
96
|
+
|
|
97
|
+
yield items
|
|
98
|
+
|
|
99
|
+
if response["per_page"] is None or len(items) < response["per_page"]:
|
|
100
|
+
break
|
|
101
|
+
|
|
102
|
+
page += 1
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def get_date_range(updated_at, start_date):
|
|
106
|
+
"""Extract current start and end dates from incremental state."""
|
|
107
|
+
if updated_at.last_value:
|
|
108
|
+
current_start_date = pendulum.parse(updated_at.last_value)
|
|
109
|
+
else:
|
|
110
|
+
current_start_date = (
|
|
111
|
+
pendulum.parse(start_date)
|
|
112
|
+
if start_date
|
|
113
|
+
else pendulum.now().subtract(days=30)
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
if updated_at.end_value:
|
|
117
|
+
current_end_date = pendulum.parse(updated_at.end_value)
|
|
118
|
+
else:
|
|
119
|
+
current_end_date = pendulum.now(tz="UTC")
|
|
120
|
+
|
|
121
|
+
return current_start_date, current_end_date
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def create_dynamic_resource(
|
|
125
|
+
resource_name: str,
|
|
126
|
+
endpoint: str,
|
|
127
|
+
instance: str,
|
|
128
|
+
access_token: str,
|
|
129
|
+
start_date: Optional[pendulum.DateTime] = None,
|
|
130
|
+
end_date: Optional[pendulum.DateTime] = None,
|
|
131
|
+
fields_to_extract: Optional[Dict[str, Any]] = None,
|
|
132
|
+
):
|
|
133
|
+
"""Factory function to create dynamic Fluxx resources."""
|
|
134
|
+
|
|
135
|
+
# Extract column definitions for DLT resource
|
|
136
|
+
columns = {}
|
|
137
|
+
if fields_to_extract:
|
|
138
|
+
for field_name, field_config in fields_to_extract.items():
|
|
139
|
+
data_type = field_config.get("data_type")
|
|
140
|
+
if data_type:
|
|
141
|
+
columns[field_name] = {"data_type": data_type}
|
|
142
|
+
|
|
143
|
+
@dlt.resource(name=resource_name, write_disposition="replace", columns=columns) # type: ignore
|
|
144
|
+
def fluxx_resource() -> Iterator[Dict[str, Any]]:
|
|
145
|
+
params = {}
|
|
146
|
+
if fields_to_extract:
|
|
147
|
+
field_names = list(fields_to_extract.keys())
|
|
148
|
+
params["cols"] = json.dumps(field_names)
|
|
149
|
+
|
|
150
|
+
for page in paginate_fluxx_resource(
|
|
151
|
+
instance=instance,
|
|
152
|
+
access_token=access_token,
|
|
153
|
+
endpoint=endpoint,
|
|
154
|
+
params=params,
|
|
155
|
+
page_size=100,
|
|
156
|
+
):
|
|
157
|
+
yield [normalize_fluxx_item(item, fields_to_extract) for item in page] # type: ignore
|
|
158
|
+
|
|
159
|
+
return fluxx_resource
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
def normalize_fluxx_item(
|
|
163
|
+
item: Dict[str, Any], fields_to_extract: Optional[Dict[str, Any]] = None
|
|
164
|
+
) -> Dict[str, Any]:
|
|
165
|
+
"""
|
|
166
|
+
Normalize a Fluxx API response item.
|
|
167
|
+
Handles nested structures and field extraction based on field types.
|
|
168
|
+
Rounds all decimal/float values to 4 decimal places regardless of field type.
|
|
169
|
+
"""
|
|
170
|
+
normalized: Dict[str, Any] = {}
|
|
171
|
+
|
|
172
|
+
# If no field mapping provided, just return the item as-is
|
|
173
|
+
if not fields_to_extract:
|
|
174
|
+
return item
|
|
175
|
+
|
|
176
|
+
for field_name, field_config in fields_to_extract.items():
|
|
177
|
+
if field_name in item:
|
|
178
|
+
value = item[field_name]
|
|
179
|
+
field_type = field_config.get("data_type")
|
|
180
|
+
|
|
181
|
+
if isinstance(value, float):
|
|
182
|
+
# Round any numeric value with decimal places
|
|
183
|
+
normalized[field_name] = round(value, 4)
|
|
184
|
+
elif field_type == "json":
|
|
185
|
+
# Handle json fields (arrays/relations)
|
|
186
|
+
if value is None:
|
|
187
|
+
normalized[field_name] = None
|
|
188
|
+
elif value == "":
|
|
189
|
+
normalized[field_name] = None
|
|
190
|
+
elif isinstance(value, (list, dict)):
|
|
191
|
+
normalized[field_name] = value
|
|
192
|
+
else:
|
|
193
|
+
# Single value - wrap in array for json fields
|
|
194
|
+
normalized[field_name] = [value]
|
|
195
|
+
elif field_type in ("date", "timestamp", "datetime", "text"):
|
|
196
|
+
# Handle text/date fields - convert empty strings to None
|
|
197
|
+
if value == "":
|
|
198
|
+
normalized[field_name] = None
|
|
199
|
+
else:
|
|
200
|
+
normalized[field_name] = value
|
|
201
|
+
else:
|
|
202
|
+
# All other field types - pass through as-is
|
|
203
|
+
normalized[field_name] = value
|
|
204
|
+
|
|
205
|
+
# Always include id if present
|
|
206
|
+
if "id" in item:
|
|
207
|
+
normalized["id"] = item["id"]
|
|
208
|
+
|
|
209
|
+
return normalized
|
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
from typing import Any, Iterator, Optional
|
|
2
|
+
|
|
3
|
+
import dlt
|
|
4
|
+
from dlt.common.pendulum import pendulum
|
|
5
|
+
from dlt.common.time import ensure_pendulum_datetime
|
|
6
|
+
from dlt.common.typing import TAnyDateTime
|
|
7
|
+
|
|
8
|
+
from ingestr.src.frankfurter.helpers import get_path_with_retry
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dlt.source(
|
|
12
|
+
name="frankfurter",
|
|
13
|
+
max_table_nesting=0,
|
|
14
|
+
)
|
|
15
|
+
def frankfurter_source(
|
|
16
|
+
start_date: TAnyDateTime,
|
|
17
|
+
end_date: TAnyDateTime | None,
|
|
18
|
+
base_currency: str,
|
|
19
|
+
) -> Any:
|
|
20
|
+
"""
|
|
21
|
+
A dlt source for the frankfurter.dev API. It groups several resources (in this case frankfurter.dev API endpoints) containing
|
|
22
|
+
various types of data: currencies, latest rates, historical rates.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
@dlt.resource(
|
|
26
|
+
write_disposition="replace",
|
|
27
|
+
)
|
|
28
|
+
def currencies() -> Iterator[dict]:
|
|
29
|
+
"""
|
|
30
|
+
Yields each currency as a separate row with two columns: currency_code and currency_name.
|
|
31
|
+
"""
|
|
32
|
+
# Retrieve the list of currencies from the API
|
|
33
|
+
currencies_data = get_path_with_retry("currencies")
|
|
34
|
+
|
|
35
|
+
for currency_code, currency_name in currencies_data.items():
|
|
36
|
+
yield {"currency_code": currency_code, "currency_name": currency_name}
|
|
37
|
+
|
|
38
|
+
@dlt.resource(
|
|
39
|
+
write_disposition="merge",
|
|
40
|
+
columns={
|
|
41
|
+
"date": {"data_type": "text"},
|
|
42
|
+
"currency_code": {"data_type": "text"},
|
|
43
|
+
"rate": {"data_type": "double"},
|
|
44
|
+
"base_currency": {"data_type": "text"},
|
|
45
|
+
},
|
|
46
|
+
primary_key=["date", "currency_code", "base_currency"],
|
|
47
|
+
)
|
|
48
|
+
def latest(base_currency: Optional[str] = "") -> Iterator[dict]:
|
|
49
|
+
"""
|
|
50
|
+
Fetches the latest exchange rates and yields them as rows.
|
|
51
|
+
"""
|
|
52
|
+
# Base URL
|
|
53
|
+
url = "latest?"
|
|
54
|
+
|
|
55
|
+
if base_currency:
|
|
56
|
+
url += f"base={base_currency}"
|
|
57
|
+
|
|
58
|
+
# Fetch data
|
|
59
|
+
data = get_path_with_retry(url)
|
|
60
|
+
|
|
61
|
+
# Extract rates and base currency
|
|
62
|
+
rates = data["rates"]
|
|
63
|
+
date = pendulum.parse(data["date"])
|
|
64
|
+
|
|
65
|
+
# Add the base currency with a rate of 1.0
|
|
66
|
+
yield {
|
|
67
|
+
"date": date,
|
|
68
|
+
"currency_code": base_currency,
|
|
69
|
+
"rate": 1.0,
|
|
70
|
+
"base_currency": base_currency,
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
# Add all currencies and their rates
|
|
74
|
+
for currency_code, rate in rates.items():
|
|
75
|
+
yield {
|
|
76
|
+
"date": date,
|
|
77
|
+
"currency_code": currency_code,
|
|
78
|
+
"rate": rate,
|
|
79
|
+
"base_currency": base_currency,
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
@dlt.resource(
|
|
83
|
+
write_disposition="merge",
|
|
84
|
+
columns={
|
|
85
|
+
"date": {"data_type": "text"},
|
|
86
|
+
"currency_code": {"data_type": "text"},
|
|
87
|
+
"rate": {"data_type": "double"},
|
|
88
|
+
"base_currency": {"data_type": "text"},
|
|
89
|
+
},
|
|
90
|
+
primary_key=("date", "currency_code", "base_currency"),
|
|
91
|
+
)
|
|
92
|
+
def exchange_rates(
|
|
93
|
+
date_time=dlt.sources.incremental(
|
|
94
|
+
"date",
|
|
95
|
+
initial_value=start_date,
|
|
96
|
+
end_value=end_date,
|
|
97
|
+
range_start="closed",
|
|
98
|
+
range_end="closed",
|
|
99
|
+
),
|
|
100
|
+
) -> Iterator[dict]:
|
|
101
|
+
"""
|
|
102
|
+
Fetches exchange rates for a specified date range.
|
|
103
|
+
If only start_date is provided, fetches data until now.
|
|
104
|
+
If both start_date and end_date are provided, fetches data for each day in the range.
|
|
105
|
+
"""
|
|
106
|
+
if date_time.last_value is not None:
|
|
107
|
+
start_date = date_time.last_value
|
|
108
|
+
else:
|
|
109
|
+
start_date = start_date
|
|
110
|
+
|
|
111
|
+
if date_time.end_value is not None:
|
|
112
|
+
end_date = date_time.end_value
|
|
113
|
+
else:
|
|
114
|
+
end_date = pendulum.now()
|
|
115
|
+
|
|
116
|
+
# Ensure start_date.last_value is a pendulum.DateTime object
|
|
117
|
+
start_date_obj = ensure_pendulum_datetime(start_date) # type: ignore
|
|
118
|
+
start_date_str = start_date_obj.format("YYYY-MM-DD")
|
|
119
|
+
|
|
120
|
+
# Ensure end_date is a pendulum.DateTime object
|
|
121
|
+
end_date_obj = ensure_pendulum_datetime(end_date)
|
|
122
|
+
end_date_str = end_date_obj.format("YYYY-MM-DD")
|
|
123
|
+
|
|
124
|
+
# Compose the URL
|
|
125
|
+
url = f"{start_date_str}..{end_date_str}?"
|
|
126
|
+
|
|
127
|
+
if base_currency:
|
|
128
|
+
url += f"base={base_currency}"
|
|
129
|
+
|
|
130
|
+
# Fetch data from the API
|
|
131
|
+
data = get_path_with_retry(url)
|
|
132
|
+
|
|
133
|
+
# Extract base currency and rates from the API response
|
|
134
|
+
rates = data["rates"]
|
|
135
|
+
|
|
136
|
+
# Iterate over the rates dictionary (one entry per date)
|
|
137
|
+
for date, daily_rates in rates.items():
|
|
138
|
+
formatted_date = pendulum.parse(date)
|
|
139
|
+
|
|
140
|
+
# Add the base currency with a rate of 1.0
|
|
141
|
+
yield {
|
|
142
|
+
"date": formatted_date,
|
|
143
|
+
"currency_code": base_currency,
|
|
144
|
+
"rate": 1.0,
|
|
145
|
+
"base_currency": base_currency,
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
# Add all other currencies and their rates
|
|
149
|
+
for currency_code, rate in daily_rates.items():
|
|
150
|
+
yield {
|
|
151
|
+
"date": formatted_date,
|
|
152
|
+
"currency_code": currency_code,
|
|
153
|
+
"rate": rate,
|
|
154
|
+
"base_currency": base_currency,
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
return currencies, latest, exchange_rates
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
|
|
3
|
+
from dlt.common.pendulum import pendulum
|
|
4
|
+
from dlt.common.typing import StrAny
|
|
5
|
+
from dlt.sources.helpers import requests
|
|
6
|
+
|
|
7
|
+
FRANKFURTER_API_URL = "https://api.frankfurter.dev/v1/"
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def get_url_with_retry(url: str) -> StrAny:
|
|
11
|
+
r = requests.get(url, timeout=5)
|
|
12
|
+
return r.json() # type: ignore
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def get_path_with_retry(path: str) -> StrAny:
|
|
16
|
+
return get_url_with_retry(f"{FRANKFURTER_API_URL}{path}")
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def validate_dates(start_date: datetime, end_date: datetime | None) -> None:
|
|
20
|
+
current_date = pendulum.now()
|
|
21
|
+
|
|
22
|
+
# Check if start_date is in the futurep
|
|
23
|
+
if start_date > current_date:
|
|
24
|
+
raise ValueError("Interval-start cannot be in the future.")
|
|
25
|
+
|
|
26
|
+
# Check if end_date is in the future
|
|
27
|
+
if end_date is not None and end_date > current_date:
|
|
28
|
+
raise ValueError("Interval-end cannot be in the future.")
|
|
29
|
+
|
|
30
|
+
# Check if start_date is before end_date
|
|
31
|
+
if end_date is not None and start_date > end_date:
|
|
32
|
+
raise ValueError("Interval-end cannot be before interval-start.")
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def validate_currency(currency_code: str) -> bool:
|
|
36
|
+
url = "https://api.frankfurter.dev/v1/currencies"
|
|
37
|
+
|
|
38
|
+
response = requests.get(url, timeout=5)
|
|
39
|
+
currencies = response.json()
|
|
40
|
+
|
|
41
|
+
if currency_code.upper() in currencies:
|
|
42
|
+
return True
|
|
43
|
+
else:
|
|
44
|
+
supported_currencies = list(currencies.keys())
|
|
45
|
+
print(
|
|
46
|
+
f"Invalid base currency '{currency_code}'. Supported currencies are: {supported_currencies}"
|
|
47
|
+
)
|
|
48
|
+
return False
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
"""This source uses Freshdesk API and dlt to load data such as Agents, Companies, Tickets
|
|
2
|
+
etc. to the database"""
|
|
3
|
+
|
|
4
|
+
from typing import Any, Dict, Generator, Iterable, List, Optional
|
|
5
|
+
|
|
6
|
+
import dlt
|
|
7
|
+
import pendulum
|
|
8
|
+
from dlt.common.time import ensure_pendulum_datetime
|
|
9
|
+
from dlt.sources import DltResource
|
|
10
|
+
|
|
11
|
+
from .freshdesk_client import FreshdeskClient
|
|
12
|
+
from .settings import DEFAULT_ENDPOINTS
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@dlt.source()
|
|
16
|
+
def freshdesk_source(
|
|
17
|
+
domain: str,
|
|
18
|
+
api_secret_key: str,
|
|
19
|
+
start_date: pendulum.DateTime,
|
|
20
|
+
end_date: Optional[pendulum.DateTime] = None,
|
|
21
|
+
per_page: int = 100,
|
|
22
|
+
endpoints: Optional[List[str]] = None,
|
|
23
|
+
query: Optional[str] = None,
|
|
24
|
+
) -> Iterable[DltResource]:
|
|
25
|
+
"""
|
|
26
|
+
Retrieves data from specified Freshdesk API endpoints.
|
|
27
|
+
|
|
28
|
+
This source supports pagination and incremental data loading. It fetches data from a list of
|
|
29
|
+
specified endpoints, or defaults to predefined endpoints in 'settings.py'.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
endpoints: A list of Freshdesk API endpoints to fetch. Deafults to 'settings.py'.
|
|
33
|
+
per_page: The number of items to fetch per page, with a maximum of 100.
|
|
34
|
+
domain: The Freshdesk domain from which to fetch the data. Defaults to 'config.toml'.
|
|
35
|
+
api_secret_key: Freshdesk API key. Defaults to 'secrets.toml'.
|
|
36
|
+
|
|
37
|
+
Yields:
|
|
38
|
+
Iterable[DltResource]: Resources with data updated after the last 'updated_at'
|
|
39
|
+
timestamp for each endpoint.
|
|
40
|
+
"""
|
|
41
|
+
# Instantiate FreshdeskClient with the provided domain and API key
|
|
42
|
+
freshdesk = FreshdeskClient(api_key=api_secret_key, domain=domain)
|
|
43
|
+
|
|
44
|
+
def incremental_resource(
|
|
45
|
+
endpoint: str,
|
|
46
|
+
updated_at: Optional[Any] = dlt.sources.incremental(
|
|
47
|
+
"updated_at",
|
|
48
|
+
initial_value=start_date.isoformat(),
|
|
49
|
+
end_value=end_date.isoformat() if end_date else None,
|
|
50
|
+
range_start="closed",
|
|
51
|
+
range_end="closed",
|
|
52
|
+
),
|
|
53
|
+
) -> Generator[Dict[Any, Any], Any, None]:
|
|
54
|
+
"""
|
|
55
|
+
Fetches and yields paginated data from a specified API endpoint.
|
|
56
|
+
Each page of data is fetched based on the `updated_at` timestamp
|
|
57
|
+
to ensure incremental loading.
|
|
58
|
+
"""
|
|
59
|
+
|
|
60
|
+
if updated_at.last_value is not None:
|
|
61
|
+
start_date = ensure_pendulum_datetime(updated_at.last_value)
|
|
62
|
+
else:
|
|
63
|
+
start_date = start_date
|
|
64
|
+
|
|
65
|
+
if updated_at.end_value is not None:
|
|
66
|
+
end_date = ensure_pendulum_datetime(updated_at.end_value)
|
|
67
|
+
else:
|
|
68
|
+
end_date = pendulum.now(tz="UTC")
|
|
69
|
+
|
|
70
|
+
# Use the FreshdeskClient instance to fetch paginated responses
|
|
71
|
+
yield from freshdesk.paginated_response(
|
|
72
|
+
endpoint=endpoint,
|
|
73
|
+
per_page=per_page,
|
|
74
|
+
start_date=start_date,
|
|
75
|
+
end_date=end_date,
|
|
76
|
+
query=query,
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
# Set default endpoints if not provided
|
|
80
|
+
endpoints = endpoints or DEFAULT_ENDPOINTS
|
|
81
|
+
|
|
82
|
+
# For each endpoint, create and yield a DLT resource
|
|
83
|
+
for endpoint in endpoints:
|
|
84
|
+
yield dlt.resource(
|
|
85
|
+
incremental_resource,
|
|
86
|
+
name=endpoint,
|
|
87
|
+
write_disposition="merge",
|
|
88
|
+
primary_key="id",
|
|
89
|
+
)(endpoint=endpoint)
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
"""Freshdesk Client for making authenticated requests"""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import time
|
|
5
|
+
from typing import Any, Dict, Iterable, Optional
|
|
6
|
+
|
|
7
|
+
import pendulum
|
|
8
|
+
from dlt.common.typing import TDataItem
|
|
9
|
+
from dlt.sources.helpers import requests
|
|
10
|
+
|
|
11
|
+
from ingestr.src.errors import HTTPError
|
|
12
|
+
|
|
13
|
+
TICKETS_QUERY_MAX_PAGE = 10
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class FreshdeskClient:
|
|
17
|
+
"""
|
|
18
|
+
Client for making authenticated requests to the Freshdesk API. It incorporates API requests with
|
|
19
|
+
rate limit and pagination.
|
|
20
|
+
|
|
21
|
+
Attributes:
|
|
22
|
+
api_key (str): The API key used for authenticating requests to the Freshdesk API.
|
|
23
|
+
domain (str): The Freshdesk domain specific to the user, used in constructing the base URL.
|
|
24
|
+
base_url (str): The base URL constructed from the domain, targeting the Freshdesk API v2.
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
def __init__(self, api_key: str, domain: str):
|
|
28
|
+
# Initialize the FreshdeskClient instance with API key and domain.
|
|
29
|
+
# The API key is used for authentication with the Freshdesk API.
|
|
30
|
+
# The domain specifies the unique Freshdesk domain of the user.
|
|
31
|
+
|
|
32
|
+
# Store the API key provided during initialization.
|
|
33
|
+
self.api_key = api_key
|
|
34
|
+
# Store the Freshdesk domain provided during initialization.
|
|
35
|
+
self.domain = domain
|
|
36
|
+
|
|
37
|
+
# Construct the base URL for the API requests.
|
|
38
|
+
# This URL is formed by appending the domain to the standard Freshdesk API base URL format.
|
|
39
|
+
# All API requests will use this base URL as their starting point.
|
|
40
|
+
self.base_url = f"https://{domain}.freshdesk.com/api/v2"
|
|
41
|
+
|
|
42
|
+
def _request_with_rate_limit(self, url: str, **kwargs: Any) -> requests.Response:
|
|
43
|
+
"""
|
|
44
|
+
Handles rate limits in HTTP requests and ensures
|
|
45
|
+
that the client doesn't exceed the limit set by the server.
|
|
46
|
+
"""
|
|
47
|
+
|
|
48
|
+
while True:
|
|
49
|
+
try:
|
|
50
|
+
response = requests.get(url, **kwargs, auth=(self.api_key, "X"))
|
|
51
|
+
response.raise_for_status()
|
|
52
|
+
|
|
53
|
+
return response
|
|
54
|
+
except requests.HTTPError as e:
|
|
55
|
+
if e.response.status_code == 429:
|
|
56
|
+
# Get the 'Retry-After' header to know how long to wait
|
|
57
|
+
# Fallback to 60 seconds if header is missing
|
|
58
|
+
seconds_to_wait = int(e.response.headers.get("Retry-After", 60))
|
|
59
|
+
# Log a warning message
|
|
60
|
+
logging.warning(
|
|
61
|
+
"Rate limited. Waiting to retry after: %s secs", seconds_to_wait
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
# Wait for the specified number of seconds before retrying
|
|
65
|
+
time.sleep(seconds_to_wait)
|
|
66
|
+
else:
|
|
67
|
+
# If the error is not a rate limit (429), raise the exception to be
|
|
68
|
+
# handled elsewhere or stop execution
|
|
69
|
+
raise HTTPError(e) from e
|
|
70
|
+
|
|
71
|
+
def paginated_response(
|
|
72
|
+
self,
|
|
73
|
+
endpoint: str,
|
|
74
|
+
per_page: int,
|
|
75
|
+
start_date: pendulum.DateTime,
|
|
76
|
+
end_date: pendulum.DateTime,
|
|
77
|
+
query: Optional[str] = None,
|
|
78
|
+
) -> Iterable[TDataItem]:
|
|
79
|
+
"""
|
|
80
|
+
Fetches a paginated response from a specified endpoint.
|
|
81
|
+
|
|
82
|
+
This method will continuously fetch data from the given endpoint,
|
|
83
|
+
page by page, until no more data is available or until it reaches data
|
|
84
|
+
updated at the specified timestamp.
|
|
85
|
+
"""
|
|
86
|
+
page = 1
|
|
87
|
+
if query is not None:
|
|
88
|
+
query = query.replace('"', "").strip()
|
|
89
|
+
|
|
90
|
+
is_tickets_query = query and endpoint == "tickets"
|
|
91
|
+
|
|
92
|
+
while True:
|
|
93
|
+
# Construct the URL for the specific endpoint
|
|
94
|
+
url = f"{self.base_url}/{endpoint}"
|
|
95
|
+
|
|
96
|
+
params: Dict[str, Any] = {"per_page": per_page, "page": page}
|
|
97
|
+
|
|
98
|
+
# Implement date range splitting logic here, if applicable
|
|
99
|
+
if endpoint in ["tickets", "contacts"]:
|
|
100
|
+
param_key = (
|
|
101
|
+
"updated_since" if endpoint == "tickets" else "_updated_since"
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
params[param_key] = start_date.to_iso8601_string()
|
|
105
|
+
|
|
106
|
+
if is_tickets_query:
|
|
107
|
+
url = f"{self.base_url}/search/tickets"
|
|
108
|
+
params = {
|
|
109
|
+
"query": f'"{query}"',
|
|
110
|
+
"page": page,
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
# Handle requests with rate-limiting
|
|
114
|
+
# A maximum of 300 pages (30000 tickets) will be returned.
|
|
115
|
+
response = self._request_with_rate_limit(url, params=params)
|
|
116
|
+
data = response.json()
|
|
117
|
+
|
|
118
|
+
if query and endpoint == "tickets":
|
|
119
|
+
data = data["results"]
|
|
120
|
+
|
|
121
|
+
if not data:
|
|
122
|
+
break # Stop if no data or max page limit reached
|
|
123
|
+
|
|
124
|
+
filtered_data = [
|
|
125
|
+
item
|
|
126
|
+
for item in data
|
|
127
|
+
if "updated_at" in item
|
|
128
|
+
and pendulum.parse(item["updated_at"]) <= end_date
|
|
129
|
+
]
|
|
130
|
+
if not filtered_data:
|
|
131
|
+
break
|
|
132
|
+
yield filtered_data
|
|
133
|
+
page += 1
|
|
134
|
+
|
|
135
|
+
# https://developers.freshdesk.com/api/#filter_tickets
|
|
136
|
+
if is_tickets_query and page > TICKETS_QUERY_MAX_PAGE:
|
|
137
|
+
break
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This module defines default settings for the Freshdesk integration.
|
|
3
|
+
|
|
4
|
+
It specifies a list of default endpoints to be used when interacting with the Freshdesk API,
|
|
5
|
+
covering common entities such as agents, companies, contacts, groups, roles, and tickets.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
# Define default endpoints for the Freshdesk API integration.
|
|
9
|
+
DEFAULT_ENDPOINTS = ["agents", "companies", "contacts", "groups", "roles", "tickets"]
|