ingestr 0.14.94__py3-none-any.whl → 0.14.97__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ingestr might be problematic. Click here for more details.

ingestr/src/buildinfo.py CHANGED
@@ -1 +1 @@
1
- version = "v0.14.94"
1
+ version = "v0.14.97"
ingestr/src/factory.py CHANGED
@@ -54,6 +54,7 @@ from ingestr.src.sources import (
54
54
  GoogleAnalyticsSource,
55
55
  GoogleSheetsSource,
56
56
  GorgiasSource,
57
+ HostawaySource,
57
58
  HttpSource,
58
59
  HubspotSource,
59
60
  InfluxDBSource,
@@ -84,6 +85,7 @@ from ingestr.src.sources import (
84
85
  ShopifySource,
85
86
  SlackSource,
86
87
  SmartsheetSource,
88
+ SocrataSource,
87
89
  SolidgateSource,
88
90
  SqlSource,
89
91
  StripeAnalyticsSource,
@@ -177,6 +179,7 @@ class SourceDestinationFactory:
177
179
  "facebookads": FacebookAdsSource,
178
180
  "fluxx": FluxxSource,
179
181
  "slack": SlackSource,
182
+ "hostaway": HostawaySource,
180
183
  "hubspot": HubspotSource,
181
184
  "intercom": IntercomSource,
182
185
  "jira": JiraSource,
@@ -218,6 +221,7 @@ class SourceDestinationFactory:
218
221
  "sftp": SFTPSource,
219
222
  "pinterest": PinterestSource,
220
223
  "revenuecat": RevenueCatSource,
224
+ "socrata": SocrataSource,
221
225
  "zoom": ZoomSource,
222
226
  "clickup": ClickupSource,
223
227
  "influxdb": InfluxDBSource,
@@ -0,0 +1,302 @@
1
+ from typing import Iterable
2
+
3
+ import dlt
4
+ import pendulum
5
+ from dlt.common.typing import TDataItem
6
+ from dlt.sources import DltResource
7
+
8
+ from .client import HostawayClient
9
+
10
+
11
+ @dlt.source(max_table_nesting=0)
12
+ def hostaway_source(
13
+ api_key: str,
14
+ start_date: pendulum.DateTime,
15
+ end_date: pendulum.DateTime | None = None,
16
+ ) -> Iterable[DltResource]:
17
+ """
18
+ Hostaway API source for fetching listings and fee settings data.
19
+
20
+ Args:
21
+ api_key: Hostaway API key for Bearer token authentication
22
+ start_date: Start date for incremental loading
23
+ end_date: End date for incremental loading (defaults to current time)
24
+
25
+ Returns:
26
+ Iterable[DltResource]: DLT resources for listings and/or fee settings
27
+ """
28
+
29
+ client = HostawayClient(api_key)
30
+
31
+ @dlt.resource(
32
+ write_disposition="merge",
33
+ name="listings",
34
+ primary_key="id",
35
+ )
36
+ def listings(
37
+ datetime=dlt.sources.incremental(
38
+ "latestActivityOn",
39
+ initial_value=start_date,
40
+ end_value=end_date,
41
+ range_end="closed",
42
+ range_start="closed",
43
+ ),
44
+ ) -> Iterable[TDataItem]:
45
+ """
46
+ Fetch listings from Hostaway API with incremental loading.
47
+ Uses latestActivityOn field as the incremental cursor.
48
+ """
49
+ start_dt = datetime.last_value
50
+ end_dt = (
51
+ datetime.end_value
52
+ if datetime.end_value is not None
53
+ else pendulum.now(tz="UTC")
54
+ )
55
+
56
+ yield from client.fetch_listings(start_dt, end_dt)
57
+
58
+ @dlt.resource(
59
+ write_disposition="merge",
60
+ name="listing_fee_settings",
61
+ primary_key="id",
62
+ )
63
+ def listing_fee_settings(
64
+ datetime=dlt.sources.incremental(
65
+ "updatedOn",
66
+ initial_value=start_date,
67
+ end_value=end_date,
68
+ range_end="closed",
69
+ range_start="closed",
70
+ ),
71
+ ) -> Iterable[TDataItem]:
72
+ """
73
+ Fetch listing fee settings from Hostaway API with incremental loading.
74
+ Uses updatedOn field as the incremental cursor.
75
+ """
76
+ start_dt = datetime.last_value
77
+ end_dt = (
78
+ datetime.end_value
79
+ if datetime.end_value is not None
80
+ else pendulum.now(tz="UTC")
81
+ )
82
+
83
+ yield from client.fetch_all_listing_fee_settings(start_dt, end_dt)
84
+
85
+ @dlt.resource(
86
+ write_disposition="replace",
87
+ name="listing_agreements",
88
+ )
89
+ def listing_agreements() -> Iterable[TDataItem]:
90
+ """
91
+ Fetch listing agreements from Hostaway API.
92
+
93
+ Note: Uses replace mode, so no incremental loading.
94
+ """
95
+ very_old_date = pendulum.datetime(1970, 1, 1, tz="UTC")
96
+ now = pendulum.now(tz="UTC")
97
+ yield from client.fetch_all_listing_agreements(very_old_date, now)
98
+
99
+ @dlt.resource(
100
+ write_disposition="replace",
101
+ name="listing_pricing_settings",
102
+ )
103
+ def listing_pricing_settings() -> Iterable[TDataItem]:
104
+ """
105
+ Fetch listing pricing settings from Hostaway API.
106
+
107
+ Note: Uses replace mode, so no incremental loading.
108
+ """
109
+ very_old_date = pendulum.datetime(1970, 1, 1, tz="UTC")
110
+ now = pendulum.now(tz="UTC")
111
+ yield from client.fetch_all_listing_pricing_settings(very_old_date, now)
112
+
113
+ @dlt.resource(
114
+ write_disposition="replace",
115
+ name="cancellation_policies",
116
+ )
117
+ def cancellation_policies() -> Iterable[TDataItem]:
118
+ yield from client.fetch_cancellation_policies()
119
+
120
+ @dlt.resource(
121
+ write_disposition="replace",
122
+ name="cancellation_policies_airbnb",
123
+ )
124
+ def cancellation_policies_airbnb() -> Iterable[TDataItem]:
125
+ yield from client.fetch_cancellation_policies_airbnb()
126
+
127
+ @dlt.resource(
128
+ write_disposition="replace",
129
+ name="cancellation_policies_marriott",
130
+ )
131
+ def cancellation_policies_marriott() -> Iterable[TDataItem]:
132
+ yield from client.fetch_cancellation_policies_marriott()
133
+
134
+ @dlt.resource(
135
+ write_disposition="replace",
136
+ name="cancellation_policies_vrbo",
137
+ )
138
+ def cancellation_policies_vrbo() -> Iterable[TDataItem]:
139
+ yield from client.fetch_cancellation_policies_vrbo()
140
+
141
+ @dlt.resource(
142
+ write_disposition="replace",
143
+ name="reservations",
144
+ selected=False,
145
+ )
146
+ def reservations() -> Iterable[TDataItem]:
147
+ yield from client.fetch_reservations()
148
+
149
+ @dlt.transformer(
150
+ data_from=reservations,
151
+ write_disposition="replace",
152
+ name="finance_fields",
153
+ )
154
+ def finance_fields(reservation_item: TDataItem) -> Iterable[TDataItem]:
155
+ @dlt.defer
156
+ def _get_finance_field(res_id):
157
+ return list(client.fetch_finance_field(res_id))
158
+
159
+ reservation_id_val = reservation_item.get("id")
160
+ if reservation_id_val:
161
+ yield _get_finance_field(reservation_id_val)
162
+
163
+ @dlt.resource(
164
+ write_disposition="replace",
165
+ name="reservation_payment_methods",
166
+ )
167
+ def reservation_payment_methods() -> Iterable[TDataItem]:
168
+ yield from client.fetch_reservation_payment_methods()
169
+
170
+ @dlt.transformer(
171
+ data_from=reservations,
172
+ write_disposition="replace",
173
+ name="reservation_rental_agreements",
174
+ )
175
+ def reservation_rental_agreements(
176
+ reservation_item: TDataItem,
177
+ ) -> Iterable[TDataItem]:
178
+ @dlt.defer
179
+ def _get_rental_agreement(res_id):
180
+ return list(client.fetch_reservation_rental_agreement(res_id))
181
+
182
+ reservation_id = reservation_item.get("id")
183
+ if reservation_id:
184
+ yield _get_rental_agreement(reservation_id)
185
+
186
+ @dlt.transformer(
187
+ data_from=listings,
188
+ write_disposition="replace",
189
+ name="listing_calendars",
190
+ )
191
+ def listing_calendars(listing_item: TDataItem) -> Iterable[TDataItem]:
192
+ @dlt.defer
193
+ def _get_calendar(lst_id):
194
+ return list(client.fetch_listing_calendar(lst_id))
195
+
196
+ listing_id_val = listing_item.get("id")
197
+ if listing_id_val:
198
+ yield _get_calendar(listing_id_val)
199
+
200
+ @dlt.resource(
201
+ write_disposition="replace",
202
+ name="conversations",
203
+ )
204
+ def conversations() -> Iterable[TDataItem]:
205
+ yield from client.fetch_conversations()
206
+
207
+ @dlt.resource(
208
+ write_disposition="replace",
209
+ name="message_templates",
210
+ )
211
+ def message_templates() -> Iterable[TDataItem]:
212
+ yield from client.fetch_message_templates()
213
+
214
+ @dlt.resource(
215
+ write_disposition="replace",
216
+ name="bed_types",
217
+ )
218
+ def bed_types() -> Iterable[TDataItem]:
219
+ yield from client.fetch_bed_types()
220
+
221
+ @dlt.resource(
222
+ write_disposition="replace",
223
+ name="property_types",
224
+ )
225
+ def property_types() -> Iterable[TDataItem]:
226
+ yield from client.fetch_property_types()
227
+
228
+ @dlt.resource(
229
+ write_disposition="replace",
230
+ name="countries",
231
+ )
232
+ def countries() -> Iterable[TDataItem]:
233
+ yield from client.fetch_countries()
234
+
235
+ @dlt.resource(
236
+ write_disposition="replace",
237
+ name="account_tax_settings",
238
+ )
239
+ def account_tax_settings() -> Iterable[TDataItem]:
240
+ yield from client.fetch_account_tax_settings()
241
+
242
+ @dlt.resource(
243
+ write_disposition="replace",
244
+ name="user_groups",
245
+ )
246
+ def user_groups() -> Iterable[TDataItem]:
247
+ yield from client.fetch_user_groups()
248
+
249
+ @dlt.resource(
250
+ write_disposition="replace",
251
+ name="guest_payment_charges",
252
+ )
253
+ def guest_payment_charges() -> Iterable[TDataItem]:
254
+ yield from client.fetch_guest_payment_charges()
255
+
256
+ @dlt.resource(
257
+ write_disposition="replace",
258
+ name="coupons",
259
+ )
260
+ def coupons() -> Iterable[TDataItem]:
261
+ yield from client.fetch_coupons()
262
+
263
+ @dlt.resource(
264
+ write_disposition="replace",
265
+ name="webhook_reservations",
266
+ )
267
+ def webhook_reservations() -> Iterable[TDataItem]:
268
+ yield from client.fetch_webhook_reservations()
269
+
270
+ @dlt.resource(
271
+ write_disposition="replace",
272
+ name="tasks",
273
+ )
274
+ def tasks() -> Iterable[TDataItem]:
275
+ yield from client.fetch_tasks()
276
+
277
+ return (
278
+ listings,
279
+ listing_fee_settings,
280
+ listing_agreements,
281
+ listing_pricing_settings,
282
+ cancellation_policies,
283
+ cancellation_policies_airbnb,
284
+ cancellation_policies_marriott,
285
+ cancellation_policies_vrbo,
286
+ reservations,
287
+ finance_fields,
288
+ reservation_payment_methods,
289
+ reservation_rental_agreements,
290
+ listing_calendars,
291
+ conversations,
292
+ message_templates,
293
+ bed_types,
294
+ property_types,
295
+ countries,
296
+ account_tax_settings,
297
+ user_groups,
298
+ guest_payment_charges,
299
+ coupons,
300
+ webhook_reservations,
301
+ tasks,
302
+ )
@@ -0,0 +1,288 @@
1
+ from typing import Callable, Iterable, Optional
2
+
3
+ import pendulum
4
+ from dlt.sources.helpers.requests import Client
5
+
6
+
7
+ class HostawayClient:
8
+ BASE_URL = "https://api.hostaway.com"
9
+
10
+ def __init__(self, api_key: str) -> None:
11
+ self.session = Client(raise_for_status=False).session
12
+ self.session.headers.update({"Authorization": f"Bearer {api_key}"})
13
+
14
+ def _fetch_single(self, url: str, params: Optional[dict] = None) -> Iterable[dict]:
15
+ response = self.session.get(url, params=params, timeout=30)
16
+ response.raise_for_status()
17
+ response_data = response.json()
18
+
19
+ if isinstance(response_data, dict) and "result" in response_data:
20
+ items = response_data["result"]
21
+ elif isinstance(response_data, list):
22
+ items = response_data
23
+ else:
24
+ items = []
25
+
26
+ if isinstance(items, list):
27
+ for item in items:
28
+ yield item
29
+ elif isinstance(items, dict):
30
+ yield items
31
+
32
+ def _paginate(
33
+ self,
34
+ url: str,
35
+ params: Optional[dict] = None,
36
+ limit: int = 100,
37
+ process_item: Optional[Callable[[dict], dict]] = None,
38
+ ) -> Iterable[dict]:
39
+ offset = 0
40
+ if params is None:
41
+ params = {}
42
+
43
+ while True:
44
+ page_params = {**params, "limit": limit, "offset": offset}
45
+ response = self.session.get(url, params=page_params, timeout=30)
46
+ response.raise_for_status()
47
+ response_data = response.json()
48
+
49
+ if isinstance(response_data, dict) and "result" in response_data:
50
+ items = response_data["result"]
51
+ elif isinstance(response_data, list):
52
+ items = response_data
53
+ else:
54
+ items = []
55
+
56
+ if not items or (isinstance(items, list) and len(items) == 0):
57
+ break
58
+
59
+ if isinstance(items, list):
60
+ for item in items:
61
+ if process_item:
62
+ item = process_item(item)
63
+ yield item
64
+ elif isinstance(items, dict):
65
+ if process_item:
66
+ items = process_item(items)
67
+ yield items
68
+
69
+ if isinstance(items, list) and len(items) < limit:
70
+ break
71
+ elif isinstance(items, dict):
72
+ break
73
+
74
+ offset += limit
75
+
76
+ def fetch_listings(
77
+ self,
78
+ start_time: pendulum.DateTime,
79
+ end_time: pendulum.DateTime,
80
+ ) -> Iterable[dict]:
81
+ def process_listing(listing: dict) -> dict:
82
+ if "latestActivityOn" in listing and listing["latestActivityOn"]:
83
+ try:
84
+ listing["latestActivityOn"] = pendulum.parse(
85
+ listing["latestActivityOn"]
86
+ )
87
+ except Exception:
88
+ listing["latestActivityOn"] = pendulum.datetime(
89
+ 1970, 1, 1, tz="UTC"
90
+ )
91
+ else:
92
+ listing["latestActivityOn"] = pendulum.datetime(1970, 1, 1, tz="UTC")
93
+ return listing
94
+
95
+ url = f"{self.BASE_URL}/v1/listings"
96
+ for listing in self._paginate(url, process_item=process_listing):
97
+ if start_time <= listing["latestActivityOn"] <= end_time:
98
+ yield listing
99
+
100
+ def fetch_listing_fee_settings(
101
+ self,
102
+ listing_id,
103
+ start_time: pendulum.DateTime,
104
+ end_time: pendulum.DateTime,
105
+ ) -> Iterable[dict]:
106
+ def process_fee(fee: dict) -> dict:
107
+ if "updatedOn" in fee and fee["updatedOn"]:
108
+ try:
109
+ fee["updatedOn"] = pendulum.parse(fee["updatedOn"])
110
+ except Exception:
111
+ fee["updatedOn"] = pendulum.datetime(1970, 1, 1, tz="UTC")
112
+ else:
113
+ fee["updatedOn"] = pendulum.datetime(1970, 1, 1, tz="UTC")
114
+ return fee
115
+
116
+ url = f"{self.BASE_URL}/v1/listingFeeSettings/{str(listing_id)}"
117
+ for fee in self._paginate(url, process_item=process_fee):
118
+ if start_time <= fee["updatedOn"] <= end_time:
119
+ yield fee
120
+
121
+ def fetch_all_listing_fee_settings(
122
+ self,
123
+ start_time: pendulum.DateTime,
124
+ end_time: pendulum.DateTime,
125
+ ) -> Iterable[dict]:
126
+ for listing in self.fetch_listings(start_time, end_time):
127
+ listing_id = listing.get("id")
128
+ if listing_id:
129
+ try:
130
+ yield from self.fetch_listing_fee_settings(
131
+ listing_id, start_time, end_time
132
+ )
133
+ except Exception:
134
+ continue
135
+
136
+ def fetch_listing_agreement(
137
+ self,
138
+ listing_id,
139
+ ) -> Iterable[dict]:
140
+ url = f"{self.BASE_URL}/v1/listingAgreement/{str(listing_id)}"
141
+ yield from self._paginate(url)
142
+
143
+ def fetch_listing_pricing_settings(
144
+ self,
145
+ listing_id,
146
+ ) -> Iterable[dict]:
147
+ url = f"{self.BASE_URL}/v1/listing/pricingSettings/{str(listing_id)}"
148
+ yield from self._paginate(url)
149
+
150
+ def fetch_all_listing_pricing_settings(
151
+ self,
152
+ start_time: pendulum.DateTime,
153
+ end_time: pendulum.DateTime,
154
+ ) -> Iterable[dict]:
155
+ for listing in self.fetch_listings(start_time, end_time):
156
+ listing_id = listing.get("id")
157
+ if listing_id:
158
+ try:
159
+ yield from self.fetch_listing_pricing_settings(listing_id)
160
+ except Exception:
161
+ continue
162
+
163
+ def fetch_all_listing_agreements(
164
+ self,
165
+ start_time: pendulum.DateTime,
166
+ end_time: pendulum.DateTime,
167
+ ) -> Iterable[dict]:
168
+ for listing in self.fetch_listings(start_time, end_time):
169
+ listing_id = listing.get("id")
170
+ if listing_id:
171
+ try:
172
+ yield from self.fetch_listing_agreement(listing_id)
173
+ except Exception:
174
+ continue
175
+
176
+ def fetch_cancellation_policies(self) -> Iterable[dict]:
177
+ url = f"{self.BASE_URL}/v1/cancellationPolicies"
178
+ yield from self._fetch_single(url)
179
+
180
+ def fetch_cancellation_policies_airbnb(self) -> Iterable[dict]:
181
+ url = f"{self.BASE_URL}/v1/cancellationPolicies/airbnb"
182
+ yield from self._fetch_single(url)
183
+
184
+ def fetch_cancellation_policies_marriott(self) -> Iterable[dict]:
185
+ url = f"{self.BASE_URL}/v1/cancellationPolicies/marriott"
186
+ yield from self._fetch_single(url)
187
+
188
+ def fetch_cancellation_policies_vrbo(self) -> Iterable[dict]:
189
+ url = f"{self.BASE_URL}/v1/cancellationPolicies/vrbo"
190
+ yield from self._fetch_single(url)
191
+
192
+ def fetch_reservations(self) -> Iterable[dict]:
193
+ url = f"{self.BASE_URL}/v1/reservations"
194
+ yield from self._paginate(url)
195
+
196
+ def fetch_finance_field(self, reservation_id) -> Iterable[dict]:
197
+ url = f"{self.BASE_URL}/v1/financeField/{str(reservation_id)}"
198
+ yield from self._fetch_single(url)
199
+
200
+ def fetch_all_finance_fields(self) -> Iterable[dict]:
201
+ for reservation in self.fetch_reservations():
202
+ reservation_id = reservation.get("id")
203
+ if reservation_id:
204
+ try:
205
+ yield from self.fetch_finance_field(reservation_id)
206
+ except Exception:
207
+ continue
208
+
209
+ def fetch_reservation_payment_methods(self) -> Iterable[dict]:
210
+ url = f"{self.BASE_URL}/v1/reservations/paymentMethods"
211
+ yield from self._fetch_single(url)
212
+
213
+ def fetch_reservation_rental_agreement(self, reservation_id) -> Iterable[dict]:
214
+ url = f"{self.BASE_URL}/v1/reservations/{str(reservation_id)}/rentalAgreement"
215
+ try:
216
+ yield from self._fetch_single(url)
217
+ except Exception:
218
+ return
219
+
220
+ def fetch_all_reservation_rental_agreements(self) -> Iterable[dict]:
221
+ for reservation in self.fetch_reservations():
222
+ reservation_id = reservation.get("id")
223
+ if reservation_id:
224
+ try:
225
+ yield from self.fetch_reservation_rental_agreement(reservation_id)
226
+ except Exception:
227
+ continue
228
+
229
+ def fetch_listing_calendar(self, listing_id) -> Iterable[dict]:
230
+ url = f"{self.BASE_URL}/v1/listings/{str(listing_id)}/calendar"
231
+ yield from self._fetch_single(url)
232
+
233
+ def fetch_all_listing_calendars(
234
+ self,
235
+ start_time: pendulum.DateTime,
236
+ end_time: pendulum.DateTime,
237
+ ) -> Iterable[dict]:
238
+ for listing in self.fetch_listings(start_time, end_time):
239
+ listing_id = listing.get("id")
240
+ if listing_id:
241
+ try:
242
+ yield from self.fetch_listing_calendar(listing_id)
243
+ except Exception:
244
+ continue
245
+
246
+ def fetch_conversations(self) -> Iterable[dict]:
247
+ url = f"{self.BASE_URL}/v1/conversations"
248
+ yield from self._paginate(url)
249
+
250
+ def fetch_message_templates(self) -> Iterable[dict]:
251
+ url = f"{self.BASE_URL}/v1/messageTemplates"
252
+ yield from self._fetch_single(url)
253
+
254
+ def fetch_bed_types(self) -> Iterable[dict]:
255
+ url = f"{self.BASE_URL}/v1/bedTypes"
256
+ yield from self._fetch_single(url)
257
+
258
+ def fetch_property_types(self) -> Iterable[dict]:
259
+ url = f"{self.BASE_URL}/v1/propertyTypes"
260
+ yield from self._fetch_single(url)
261
+
262
+ def fetch_countries(self) -> Iterable[dict]:
263
+ url = f"{self.BASE_URL}/v1/countries"
264
+ yield from self._fetch_single(url)
265
+
266
+ def fetch_account_tax_settings(self) -> Iterable[dict]:
267
+ url = f"{self.BASE_URL}/v1/accountTaxSettings"
268
+ yield from self._fetch_single(url)
269
+
270
+ def fetch_user_groups(self) -> Iterable[dict]:
271
+ url = f"{self.BASE_URL}/v1/userGroups"
272
+ yield from self._fetch_single(url)
273
+
274
+ def fetch_guest_payment_charges(self) -> Iterable[dict]:
275
+ url = f"{self.BASE_URL}/v1/guestPayments/charges"
276
+ yield from self._paginate(url)
277
+
278
+ def fetch_coupons(self) -> Iterable[dict]:
279
+ url = f"{self.BASE_URL}/v1/coupons"
280
+ yield from self._fetch_single(url)
281
+
282
+ def fetch_webhook_reservations(self) -> Iterable[dict]:
283
+ url = f"{self.BASE_URL}/v1/webhooks/reservations"
284
+ yield from self._fetch_single(url)
285
+
286
+ def fetch_tasks(self) -> Iterable[dict]:
287
+ url = f"{self.BASE_URL}/v1/tasks"
288
+ yield from self._fetch_single(url)
@@ -0,0 +1,83 @@
1
+ """A source loading data from Socrata open data platform"""
2
+
3
+ from typing import Any, Dict, Iterator, Optional
4
+
5
+ import dlt
6
+
7
+ from .helpers import fetch_data
8
+
9
+
10
+ @dlt.source(name="socrata", max_table_nesting=0)
11
+ def source(
12
+ domain: str,
13
+ dataset_id: str,
14
+ app_token: Optional[str] = None,
15
+ username: Optional[str] = None,
16
+ password: Optional[str] = None,
17
+ incremental: Optional[Any] = None,
18
+ primary_key: Optional[str] = None,
19
+ write_disposition: Optional[str] = dlt.config.value,
20
+ ):
21
+ """
22
+ A dlt source for the Socrata open data platform.
23
+
24
+ Supports both full refresh (replace) and incremental loading (merge).
25
+
26
+ Args:
27
+ domain: The Socrata domain (e.g., "evergreen.data.socrata.com")
28
+ dataset_id: The dataset identifier (e.g., "6udu-fhnu")
29
+ app_token: Socrata app token for higher rate limits (recommended)
30
+ username: Username for authentication (if dataset is private)
31
+ password: Password for authentication (if dataset is private)
32
+ incremental: DLT incremental object for incremental loading
33
+ primary_key: Primary key field for merge operations (default: ":id")
34
+ write_disposition: Write disposition ("replace", "append", "merge").
35
+ If not provided, automatically determined based on incremental setting.
36
+
37
+ Returns:
38
+ A dlt source with a single "dataset" resource
39
+ """
40
+
41
+ @dlt.resource(
42
+ write_disposition=write_disposition or "replace",
43
+ primary_key=primary_key, # type: ignore[call-overload]
44
+ )
45
+ def dataset(
46
+ incremental: Optional[dlt.sources.incremental] = incremental, # type: ignore[type-arg]
47
+ ) -> Iterator[Dict[str, Any]]:
48
+ """
49
+ Yields records from a Socrata dataset.
50
+
51
+ Supports both full refresh (replace) and incremental loading (merge).
52
+ When incremental is provided, filters data using SoQL WHERE clause on the server side.
53
+
54
+ Yields:
55
+ Dict[str, Any]: Individual records from the dataset
56
+ """
57
+ fetch_kwargs: Dict[str, Any] = {
58
+ "domain": domain,
59
+ "dataset_id": dataset_id,
60
+ "app_token": app_token,
61
+ "username": username,
62
+ "password": password,
63
+ }
64
+
65
+ if incremental and incremental.cursor_path:
66
+ fetch_kwargs["incremental_key"] = incremental.cursor_path
67
+ fetch_kwargs["start_value"] = (
68
+ str(incremental.last_value)
69
+ if incremental.last_value is not None
70
+ else None
71
+ )
72
+ if getattr(incremental, "end_value", None) is not None:
73
+ ev = incremental.end_value # type: ignore[attr-defined]
74
+ fetch_kwargs["end_value"] = (
75
+ ev.isoformat() # type: ignore[union-attr]
76
+ if hasattr(ev, "isoformat")
77
+ else str(ev)
78
+ )
79
+
80
+ # Fetch and yield records
81
+ yield from fetch_data(**fetch_kwargs)
82
+
83
+ return (dataset,)
@@ -0,0 +1,85 @@
1
+ """Socrata API helpers"""
2
+
3
+ from typing import Any, Dict, Iterator, Optional
4
+
5
+ from dlt.sources.helpers import requests
6
+
7
+ from .settings import DEFAULT_PAGE_SIZE, REQUEST_TIMEOUT
8
+
9
+
10
+ def fetch_data(
11
+ domain: str,
12
+ dataset_id: str,
13
+ app_token: Optional[str] = None,
14
+ username: Optional[str] = None,
15
+ password: Optional[str] = None,
16
+ incremental_key: Optional[str] = None,
17
+ start_value: Optional[str] = None,
18
+ end_value: Optional[str] = None,
19
+ ) -> Iterator[Dict[str, Any]]:
20
+ """
21
+ Fetch records from Socrata dataset with pagination and optional filtering.
22
+
23
+ Uses offset-based pagination to get all records, not just first 50000.
24
+ Supports incremental loading via SoQL WHERE clause for server-side filtering.
25
+
26
+ Args:
27
+ domain: Socrata domain (e.g., "data.seattle.gov")
28
+ dataset_id: Dataset identifier (e.g., "6udu-fhnu")
29
+ app_token: Socrata app token for higher rate limits
30
+ username: Username for authentication
31
+ password: Password for authentication
32
+ start_value: Minimum value for incremental_key (inclusive)
33
+ end_value: Maximum value for incremental_key (exclusive)
34
+
35
+ Yields:
36
+ Lists of records (one list per page)
37
+
38
+ Raises:
39
+ requests.HTTPError: If API request fails
40
+ """
41
+ url = f"https://{domain}/resource/{dataset_id}.json"
42
+
43
+ headers = {"Accept": "application/json"}
44
+ if app_token:
45
+ headers["X-App-Token"] = app_token
46
+
47
+ auth = (username, password) if username and password else None
48
+
49
+ limit = DEFAULT_PAGE_SIZE
50
+ offset = 0
51
+
52
+ while True:
53
+ params: Dict[str, Any] = {"$limit": limit, "$offset": offset}
54
+
55
+ if incremental_key and start_value:
56
+ start_value_iso = str(start_value).replace(" ", "T")
57
+ where_conditions = [f"{incremental_key} >= '{start_value_iso}'"]
58
+
59
+ if end_value:
60
+ end_value_iso = str(end_value).replace(" ", "T")
61
+ where_conditions.append(f"{incremental_key} < '{end_value_iso}'")
62
+
63
+ params["$where"] = " AND ".join(where_conditions)
64
+ params["$order"] = f"{incremental_key} ASC"
65
+
66
+ response = requests.get(
67
+ url,
68
+ headers=headers,
69
+ auth=auth,
70
+ params=params,
71
+ timeout=REQUEST_TIMEOUT,
72
+ )
73
+ response.raise_for_status()
74
+
75
+ data = response.json()
76
+
77
+ if not data:
78
+ break
79
+
80
+ yield data
81
+
82
+ if len(data) < limit:
83
+ break
84
+
85
+ offset += limit
@@ -0,0 +1,8 @@
1
+ """Socrata API settings and constants"""
2
+
3
+ # Request timeout in seconds
4
+ REQUEST_TIMEOUT = 30
5
+
6
+ # Maximum number of records to fetch per page
7
+ # Socrata API supports up to 50000 records per request
8
+ DEFAULT_PAGE_SIZE = 50000
ingestr/src/sources.py CHANGED
@@ -4233,3 +4233,183 @@ class CouchbaseSource:
4233
4233
  table_instance.max_table_nesting = 1
4234
4234
 
4235
4235
  return table_instance
4236
+
4237
+
4238
+ class SocrataSource:
4239
+ def handles_incrementality(self) -> bool:
4240
+ return False
4241
+
4242
+ def dlt_source(self, uri: str, table: str, **kwargs):
4243
+ """
4244
+ Creates a DLT source for Socrata open data platform.
4245
+
4246
+ URI format: socrata://domain?app_token=TOKEN
4247
+ Table: dataset_id (e.g., "6udu-fhnu")
4248
+
4249
+ Args:
4250
+ uri: Socrata connection URI with domain and optional auth params
4251
+ table: Dataset ID (e.g., "6udu-fhnu")
4252
+ **kwargs: Additional arguments:
4253
+ - incremental_key: Field to use for incremental loading (e.g., ":updated_at")
4254
+ - interval_start: Start date for initial load
4255
+ - interval_end: End date for load
4256
+ - primary_key: Primary key field for merge operations
4257
+
4258
+ Returns:
4259
+ DltResource for the Socrata dataset
4260
+ """
4261
+ from urllib.parse import parse_qs, urlparse
4262
+
4263
+ parsed = urlparse(uri)
4264
+
4265
+ domain = parsed.netloc
4266
+ if not domain:
4267
+ raise ValueError(
4268
+ "Domain must be provided in the URI.\n"
4269
+ "Format: socrata://domain?app_token=TOKEN\n"
4270
+ "Example: socrata://evergreen.data.socrata.com?app_token=mytoken"
4271
+ )
4272
+
4273
+ query_params = parse_qs(parsed.query)
4274
+
4275
+ dataset_id = table
4276
+ if not dataset_id:
4277
+ raise ValueError(
4278
+ "Dataset ID must be provided as the table parameter.\n"
4279
+ "Example: --source-table 6udu-fhnu"
4280
+ )
4281
+
4282
+ app_token = query_params.get("app_token", [None])[0]
4283
+ username = query_params.get("username", [None])[0]
4284
+ password = query_params.get("password", [None])[0]
4285
+
4286
+ incremental = None
4287
+ if kwargs.get("incremental_key"):
4288
+ start_value = kwargs.get("interval_start")
4289
+ end_value = kwargs.get("interval_end")
4290
+
4291
+ if start_value:
4292
+ start_value = (
4293
+ start_value.isoformat()
4294
+ if hasattr(start_value, "isoformat")
4295
+ else str(start_value)
4296
+ )
4297
+
4298
+ if end_value:
4299
+ end_value = (
4300
+ end_value.isoformat()
4301
+ if hasattr(end_value, "isoformat")
4302
+ else str(end_value)
4303
+ )
4304
+
4305
+ incremental = dlt_incremental(
4306
+ kwargs.get("incremental_key", ""),
4307
+ initial_value=start_value,
4308
+ end_value=end_value,
4309
+ range_end="open",
4310
+ range_start="closed",
4311
+ )
4312
+
4313
+ primary_key = kwargs.get("primary_key")
4314
+
4315
+ from ingestr.src.socrata_source import source
4316
+
4317
+ return source(
4318
+ domain=domain,
4319
+ dataset_id=dataset_id,
4320
+ app_token=app_token,
4321
+ username=username,
4322
+ password=password,
4323
+ incremental=incremental,
4324
+ primary_key=primary_key,
4325
+ ).with_resources("dataset")
4326
+
4327
+
4328
+ class HostawaySource:
4329
+ def handles_incrementality(self) -> bool:
4330
+ return True
4331
+
4332
+ def dlt_source(self, uri: str, table: str, **kwargs):
4333
+ if kwargs.get("incremental_key"):
4334
+ raise ValueError(
4335
+ "Hostaway takes care of incrementality on its own, you should not provide incremental_key"
4336
+ )
4337
+
4338
+ source_parts = urlparse(uri)
4339
+ source_params = parse_qs(source_parts.query)
4340
+ api_key = source_params.get("api_key")
4341
+
4342
+ if not api_key:
4343
+ raise ValueError("api_key in the URI is required to connect to Hostaway")
4344
+
4345
+ match table:
4346
+ case "listings":
4347
+ resource_name = "listings"
4348
+ case "listing_fee_settings":
4349
+ resource_name = "listing_fee_settings"
4350
+ case "listing_agreements":
4351
+ resource_name = "listing_agreements"
4352
+ case "listing_pricing_settings":
4353
+ resource_name = "listing_pricing_settings"
4354
+ case "cancellation_policies":
4355
+ resource_name = "cancellation_policies"
4356
+ case "cancellation_policies_airbnb":
4357
+ resource_name = "cancellation_policies_airbnb"
4358
+ case "cancellation_policies_marriott":
4359
+ resource_name = "cancellation_policies_marriott"
4360
+ case "cancellation_policies_vrbo":
4361
+ resource_name = "cancellation_policies_vrbo"
4362
+ case "reservations":
4363
+ resource_name = "reservations"
4364
+ case "finance_fields":
4365
+ resource_name = "finance_fields"
4366
+ case "reservation_payment_methods":
4367
+ resource_name = "reservation_payment_methods"
4368
+ case "reservation_rental_agreements":
4369
+ resource_name = "reservation_rental_agreements"
4370
+ case "listing_calendars":
4371
+ resource_name = "listing_calendars"
4372
+ case "conversations":
4373
+ resource_name = "conversations"
4374
+ case "message_templates":
4375
+ resource_name = "message_templates"
4376
+ case "bed_types":
4377
+ resource_name = "bed_types"
4378
+ case "property_types":
4379
+ resource_name = "property_types"
4380
+ case "countries":
4381
+ resource_name = "countries"
4382
+ case "account_tax_settings":
4383
+ resource_name = "account_tax_settings"
4384
+ case "user_groups":
4385
+ resource_name = "user_groups"
4386
+ case "guest_payment_charges":
4387
+ resource_name = "guest_payment_charges"
4388
+ case "coupons":
4389
+ resource_name = "coupons"
4390
+ case "webhook_reservations":
4391
+ resource_name = "webhook_reservations"
4392
+ case "tasks":
4393
+ resource_name = "tasks"
4394
+ case _:
4395
+ raise ValueError(
4396
+ f"Resource '{table}' is not supported for Hostaway source yet, if you are interested in it please create a GitHub issue at https://github.com/bruin-data/ingestr"
4397
+ )
4398
+
4399
+ start_date = kwargs.get("interval_start")
4400
+ if start_date:
4401
+ start_date = ensure_pendulum_datetime(start_date).in_timezone("UTC")
4402
+ else:
4403
+ start_date = pendulum.datetime(1970, 1, 1).in_timezone("UTC")
4404
+
4405
+ end_date = kwargs.get("interval_end")
4406
+ if end_date:
4407
+ end_date = ensure_pendulum_datetime(end_date).in_timezone("UTC")
4408
+
4409
+ from ingestr.src.hostaway import hostaway_source
4410
+
4411
+ return hostaway_source(
4412
+ api_key=api_key[0],
4413
+ start_date=start_date,
4414
+ end_date=end_date,
4415
+ ).with_resources(resource_name)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ingestr
3
- Version: 0.14.94
3
+ Version: 0.14.97
4
4
  Summary: ingestr is a command-line application that ingests data from various sources and stores them in any database.
5
5
  Project-URL: Homepage, https://github.com/bruin-data/ingestr
6
6
  Project-URL: Issues, https://github.com/bruin-data/ingestr/issues
@@ -2,17 +2,17 @@ ingestr/conftest.py,sha256=OE2yxeTCosS9CUFVuqNypm-2ftYvVBeeq7egm3878cI,1981
2
2
  ingestr/main.py,sha256=qo0g3wCFl8a_1jUwXagX8L1Q8PKKQlTF7md9pfnzW0Y,27155
3
3
  ingestr/src/.gitignore,sha256=8cX1AZTSI0TcdZFGTmS_oyBjpfCzhOEt0DdAo2dFIY8,203
4
4
  ingestr/src/blob.py,sha256=UUWMjHUuoR9xP1XZQ6UANQmnMVyDx3d0X4-2FQC271I,2138
5
- ingestr/src/buildinfo.py,sha256=CcaXu7ayEOiHaFuTFCgGdFP5zqt7HZWvn7bvO_OyRhk,21
5
+ ingestr/src/buildinfo.py,sha256=9YBWD_D5PtKLUdQnBqW9Onk5dyG9HVr8nLFni4TM9S8,21
6
6
  ingestr/src/destinations.py,sha256=QtjE0AGs0WkPHaI2snWPHJ8HHi4lwXUBYLJPklz8Mvk,27772
7
7
  ingestr/src/errors.py,sha256=fhJ2BxOqOsBfOxuTDKfZblvawBrPG3x_1VikIxMZBRI,874
8
- ingestr/src/factory.py,sha256=WBc2y5N-9HH3WOTUSe7sDEftarScDAipka7CSW-A4L4,7829
8
+ ingestr/src/factory.py,sha256=j8vVlaE737p2MBn-b7JuR8OZP1On6uB3_kNbJkgS344,7938
9
9
  ingestr/src/filters.py,sha256=0n0sNAVG_f-B_1r7lW5iNtw9z_G1bxWzPaiL1i6tnbU,1665
10
10
  ingestr/src/http_client.py,sha256=bxqsk6nJNXCo-79gW04B53DQO-yr25vaSsqP0AKtjx4,732
11
11
  ingestr/src/loader.py,sha256=9NaWAyfkXdqAZSS-N72Iwo36Lbx4PyqIfaaH1dNdkFs,1712
12
12
  ingestr/src/masking.py,sha256=VN0LdfvExhQ1bZMRylGtaBUIoH-vjuIUmRnYKwo3yiY,11358
13
13
  ingestr/src/partition.py,sha256=BrIP6wFJvyR7Nus_3ElnfxknUXeCipK_E_bB8kZowfc,969
14
14
  ingestr/src/resource.py,sha256=ZqmZxFQVGlF8rFPhBiUB08HES0yoTj8sZ--jKfaaVps,1164
15
- ingestr/src/sources.py,sha256=XEimy9ph8QDW8vjNocjovxM6cdrUkIPmFGZ9eLXZWVk,148842
15
+ ingestr/src/sources.py,sha256=KM1Y4eZtdjTGLFfVmIHXAGXWYQrYGjcBEslVZd7IX80,155470
16
16
  ingestr/src/table_definition.py,sha256=REbAbqdlmUMUuRh8nEQRreWjPVOQ5ZcfqGkScKdCrmk,390
17
17
  ingestr/src/time.py,sha256=H_Fk2J4ShXyUM-EMY7MqCLZQhlnZMZvO952bmZPc4yE,254
18
18
  ingestr/src/version.py,sha256=J_2xgZ0mKlvuHcjdKCx2nlioneLH0I47JiU_Slr_Nwc,189
@@ -86,6 +86,8 @@ ingestr/src/google_sheets/helpers/api_calls.py,sha256=RiVfdacbaneszhmuhYilkJnkc9
86
86
  ingestr/src/google_sheets/helpers/data_processing.py,sha256=RNt2MYfdJhk4bRahnQVezpNg2x9z0vx60YFq2ukZ8vI,11004
87
87
  ingestr/src/gorgias/__init__.py,sha256=_mFkMYwlY5OKEY0o_FK1OKol03A-8uk7bm1cKlmt5cs,21432
88
88
  ingestr/src/gorgias/helpers.py,sha256=DamuijnvhGY9hysQO4txrVMf4izkGbh5qfBKImdOINE,5427
89
+ ingestr/src/hostaway/__init__.py,sha256=sq7qG5J4XcyoYoHBSgAszYPByN9bMLzWjhSmvzJuTeI,8887
90
+ ingestr/src/hostaway/client.py,sha256=omzoT4gPQ_nvMWDcm7-bm2AyFwwRDgV8D1sI0gkkydw,10452
89
91
  ingestr/src/http/__init__.py,sha256=Y9mQIE0RolHOh6dPjW41qzYXSG8BC0GPKxEtz2CJGpU,902
90
92
  ingestr/src/http/readers.py,sha256=rgBwYG5SOQ7P2uzBAFMOQIevKxV51ZW41VSiRTZ0Xvo,3863
91
93
  ingestr/src/hubspot/__init__.py,sha256=FCqjLeOjijdc9JC_NoDwtRqy3FDyY-szDi6UV7CdDN0,11548
@@ -154,6 +156,9 @@ ingestr/src/slack/__init__.py,sha256=pyDukxcilqTAe_bBzfWJ8Vxi83S-XEdEFBH2pEgILrM
154
156
  ingestr/src/slack/helpers.py,sha256=08TLK7vhFvH_uekdLVOLF3bTDe1zgH0QxHObXHzk1a8,6545
155
157
  ingestr/src/slack/settings.py,sha256=NhKn4y1zokEa5EmIZ05wtj_-I0GOASXZ5V81M1zXCtY,457
156
158
  ingestr/src/smartsheets/__init__.py,sha256=RIEfN1T2TMFg8T0RvN4o6sqC58YusJRDrmE9Isos5P4,2375
159
+ ingestr/src/socrata_source/__init__.py,sha256=K5DVpsVXTMfunZd5YoEsn1nipfo1zavFS59g3m2tsc8,2984
160
+ ingestr/src/socrata_source/helpers.py,sha256=KbVojFSmMLXb0ajh8bhqfZfxDHH7rQ3nyI8p2jxVifA,2500
161
+ ingestr/src/socrata_source/settings.py,sha256=DLfu-4HOa5nR7h9tbOySEa2ye3w_Z6TYZ9_zPqWaNQk,220
157
162
  ingestr/src/solidgate/__init__.py,sha256=Ts83j-JSnFsFuF4tDhVOfZKg7H0-bIpfn3kg1ZOR58A,8003
158
163
  ingestr/src/solidgate/helpers.py,sha256=mAsW_1hpD7ab3Y2vw8fxHi4yD3aT1geLdIYZ7ycyxBc,5690
159
164
  ingestr/src/sql_database/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -186,8 +191,8 @@ ingestr/testdata/merge_expected.csv,sha256=DReHqWGnQMsf2PBv_Q2pfjsgvikYFnf1zYcQZ
186
191
  ingestr/testdata/merge_part1.csv,sha256=Pw8Z9IDKcNU0qQHx1z6BUf4rF_-SxKGFOvymCt4OY9I,185
187
192
  ingestr/testdata/merge_part2.csv,sha256=T_GiWxA81SN63_tMOIuemcvboEFeAmbKc7xRXvL9esw,287
188
193
  ingestr/tests/unit/test_smartsheets.py,sha256=zf3DXT29Y4TH2lNPBFphdjlaelUUyPJcsW2UO68RzDs,4862
189
- ingestr-0.14.94.dist-info/METADATA,sha256=B9NePP8xKNVs7YZ6hZV6gDfTGBxdRwxs7aMd5xPL6As,15359
190
- ingestr-0.14.94.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
191
- ingestr-0.14.94.dist-info/entry_points.txt,sha256=oPJy0KBnPWYjDtP1k8qwAihcTLHSZokSQvRAw_wtfJM,46
192
- ingestr-0.14.94.dist-info/licenses/LICENSE.md,sha256=cW8wIhn8HFE-KLStDF9jHQ1O_ARWP3kTpk_-eOccL24,1075
193
- ingestr-0.14.94.dist-info/RECORD,,
194
+ ingestr-0.14.97.dist-info/METADATA,sha256=XtW1-uOtM8YkyoK59fjqCsF3ZAQ8hBPXY-hd0pljoag,15359
195
+ ingestr-0.14.97.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
196
+ ingestr-0.14.97.dist-info/entry_points.txt,sha256=oPJy0KBnPWYjDtP1k8qwAihcTLHSZokSQvRAw_wtfJM,46
197
+ ingestr-0.14.97.dist-info/licenses/LICENSE.md,sha256=cW8wIhn8HFE-KLStDF9jHQ1O_ARWP3kTpk_-eOccL24,1075
198
+ ingestr-0.14.97.dist-info/RECORD,,