ingestr 0.13.53__py3-none-any.whl → 0.13.54__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ingestr might be problematic. Click here for more details.
- ingestr/src/buildinfo.py +1 -1
- ingestr/src/destinations.py +1 -4
- ingestr/src/factory.py +4 -0
- ingestr/src/mixpanel/__init__.py +62 -0
- ingestr/src/mixpanel/client.py +99 -0
- ingestr/src/quickbooks/__init__.py +117 -0
- ingestr/src/sources.py +122 -3
- {ingestr-0.13.53.dist-info → ingestr-0.13.54.dist-info}/METADATA +8 -1
- {ingestr-0.13.53.dist-info → ingestr-0.13.54.dist-info}/RECORD +12 -9
- {ingestr-0.13.53.dist-info → ingestr-0.13.54.dist-info}/WHEEL +0 -0
- {ingestr-0.13.53.dist-info → ingestr-0.13.54.dist-info}/entry_points.txt +0 -0
- {ingestr-0.13.53.dist-info → ingestr-0.13.54.dist-info}/licenses/LICENSE.md +0 -0
ingestr/src/buildinfo.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
version = "v0.13.
|
|
1
|
+
version = "v0.13.54"
|
ingestr/src/destinations.py
CHANGED
|
@@ -476,7 +476,7 @@ class SqliteDestination(GenericSqlDestination):
|
|
|
476
476
|
|
|
477
477
|
def dlt_run_params(self, uri: str, table: str, **kwargs):
|
|
478
478
|
return {
|
|
479
|
-
#https://dlthub.com/docs/dlt-ecosystem/destinations/sqlalchemy#dataset-files
|
|
479
|
+
# https://dlthub.com/docs/dlt-ecosystem/destinations/sqlalchemy#dataset-files
|
|
480
480
|
"dataset_name": "main",
|
|
481
481
|
"table_name": table,
|
|
482
482
|
}
|
|
@@ -495,6 +495,3 @@ class MySqlDestination(GenericSqlDestination):
|
|
|
495
495
|
"dataset_name": database,
|
|
496
496
|
"table_name": table,
|
|
497
497
|
}
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
ingestr/src/factory.py
CHANGED
|
@@ -47,11 +47,13 @@ from ingestr.src.sources import (
|
|
|
47
47
|
KlaviyoSource,
|
|
48
48
|
LinkedInAdsSource,
|
|
49
49
|
LocalCsvSource,
|
|
50
|
+
MixpanelSource,
|
|
50
51
|
MongoDbSource,
|
|
51
52
|
NotionSource,
|
|
52
53
|
PersonioSource,
|
|
53
54
|
PhantombusterSource,
|
|
54
55
|
PipedriveSource,
|
|
56
|
+
QuickBooksSource,
|
|
55
57
|
S3Source,
|
|
56
58
|
SalesforceSource,
|
|
57
59
|
SFTPSource,
|
|
@@ -140,6 +142,7 @@ class SourceDestinationFactory:
|
|
|
140
142
|
"hubspot": HubspotSource,
|
|
141
143
|
"airtable": AirtableSource,
|
|
142
144
|
"klaviyo": KlaviyoSource,
|
|
145
|
+
"mixpanel": MixpanelSource,
|
|
143
146
|
"appsflyer": AppsflyerSource,
|
|
144
147
|
"kafka": KafkaSource,
|
|
145
148
|
"adjust": AdjustSource,
|
|
@@ -166,6 +169,7 @@ class SourceDestinationFactory:
|
|
|
166
169
|
"elasticsearch": ElasticsearchSource,
|
|
167
170
|
"attio": AttioSource,
|
|
168
171
|
"solidgate": SolidgateSource,
|
|
172
|
+
"quickbooks": QuickBooksSource,
|
|
169
173
|
"smartsheet": SmartsheetSource,
|
|
170
174
|
"sftp": SFTPSource,
|
|
171
175
|
}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
from typing import Iterable
|
|
2
|
+
|
|
3
|
+
import dlt
|
|
4
|
+
import pendulum
|
|
5
|
+
from dlt.common.typing import TDataItem
|
|
6
|
+
from dlt.sources import DltResource
|
|
7
|
+
|
|
8
|
+
from .client import MixpanelClient
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dlt.source(max_table_nesting=0)
|
|
12
|
+
def mixpanel_source(
|
|
13
|
+
username: str,
|
|
14
|
+
password: str,
|
|
15
|
+
project_id: str,
|
|
16
|
+
server: str,
|
|
17
|
+
start_date: pendulum.DateTime,
|
|
18
|
+
end_date: pendulum.DateTime | None = None,
|
|
19
|
+
) -> Iterable[DltResource]:
|
|
20
|
+
client = MixpanelClient(username, password, project_id, server)
|
|
21
|
+
|
|
22
|
+
@dlt.resource(write_disposition="merge", name="events", primary_key="distinct_id")
|
|
23
|
+
def events(
|
|
24
|
+
date=dlt.sources.incremental(
|
|
25
|
+
"time",
|
|
26
|
+
initial_value=start_date.int_timestamp,
|
|
27
|
+
end_value=end_date.int_timestamp if end_date else None,
|
|
28
|
+
range_end="closed",
|
|
29
|
+
range_start="closed",
|
|
30
|
+
),
|
|
31
|
+
) -> Iterable[TDataItem]:
|
|
32
|
+
if date.end_value is None:
|
|
33
|
+
end_dt = pendulum.now(tz="UTC")
|
|
34
|
+
else:
|
|
35
|
+
end_dt = pendulum.from_timestamp(date.end_value)
|
|
36
|
+
|
|
37
|
+
start_dt = pendulum.from_timestamp(date.last_value)
|
|
38
|
+
|
|
39
|
+
yield from client.fetch_events(
|
|
40
|
+
start_dt,
|
|
41
|
+
end_dt,
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
@dlt.resource(write_disposition="merge", primary_key="distinct_id", name="profiles")
|
|
45
|
+
def profiles(
|
|
46
|
+
last_seen=dlt.sources.incremental(
|
|
47
|
+
"last_seen",
|
|
48
|
+
initial_value=start_date,
|
|
49
|
+
end_value=end_date,
|
|
50
|
+
range_end="closed",
|
|
51
|
+
range_start="closed",
|
|
52
|
+
),
|
|
53
|
+
) -> Iterable[TDataItem]:
|
|
54
|
+
if last_seen.end_value is None:
|
|
55
|
+
end_dt = pendulum.now(tz="UTC")
|
|
56
|
+
else:
|
|
57
|
+
end_dt = last_seen.end_value
|
|
58
|
+
|
|
59
|
+
start_dt = last_seen.last_value
|
|
60
|
+
yield from client.fetch_profiles(start_dt, end_dt)
|
|
61
|
+
|
|
62
|
+
return events, profiles
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from typing import Iterable
|
|
3
|
+
|
|
4
|
+
import pendulum
|
|
5
|
+
from dlt.sources.helpers.requests import Client
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class MixpanelClient:
|
|
9
|
+
def __init__(self, username: str, password: str, project_id: str, server: str):
|
|
10
|
+
self.username = username
|
|
11
|
+
self.password = password
|
|
12
|
+
self.project_id = project_id
|
|
13
|
+
self.server = server
|
|
14
|
+
self.session = Client(raise_for_status=False).session
|
|
15
|
+
|
|
16
|
+
def fetch_events(
|
|
17
|
+
self, start_date: pendulum.DateTime, end_date: pendulum.DateTime
|
|
18
|
+
) -> Iterable[dict]:
|
|
19
|
+
if self.server == "us":
|
|
20
|
+
server = "data"
|
|
21
|
+
elif self.server == "in":
|
|
22
|
+
server = "data-in"
|
|
23
|
+
else:
|
|
24
|
+
server = "data-eu"
|
|
25
|
+
|
|
26
|
+
url = f"https://{server}.mixpanel.com/api/2.0/export/"
|
|
27
|
+
params = {
|
|
28
|
+
"project_id": self.project_id,
|
|
29
|
+
"from_date": start_date.format("YYYY-MM-DD"),
|
|
30
|
+
"to_date": end_date.format("YYYY-MM-DD"),
|
|
31
|
+
}
|
|
32
|
+
headers = {
|
|
33
|
+
"accept": "text/plain",
|
|
34
|
+
}
|
|
35
|
+
from requests.auth import HTTPBasicAuth
|
|
36
|
+
|
|
37
|
+
auth = HTTPBasicAuth(self.username, self.password)
|
|
38
|
+
resp = self.session.get(url, params=params, headers=headers, auth=auth)
|
|
39
|
+
resp.raise_for_status()
|
|
40
|
+
for line in resp.iter_lines():
|
|
41
|
+
if line:
|
|
42
|
+
data = json.loads(line.decode())
|
|
43
|
+
if "properties" in data:
|
|
44
|
+
for key, value in data["properties"].items():
|
|
45
|
+
if key.startswith("$"):
|
|
46
|
+
data[key[1:]] = value
|
|
47
|
+
else:
|
|
48
|
+
data[key] = value
|
|
49
|
+
del data["properties"]
|
|
50
|
+
yield data
|
|
51
|
+
|
|
52
|
+
def fetch_profiles(
|
|
53
|
+
self, start_date: pendulum.DateTime, end_date: pendulum.DateTime
|
|
54
|
+
) -> Iterable[dict]:
|
|
55
|
+
if self.server == "us":
|
|
56
|
+
server = ""
|
|
57
|
+
elif self.server == "in":
|
|
58
|
+
server = "in."
|
|
59
|
+
else:
|
|
60
|
+
server = "eu."
|
|
61
|
+
url = f"https://{server}mixpanel.com/api/query/engage"
|
|
62
|
+
headers = {
|
|
63
|
+
"accept": "application/json",
|
|
64
|
+
"content-type": "application/x-www-form-urlencoded",
|
|
65
|
+
}
|
|
66
|
+
from requests.auth import HTTPBasicAuth
|
|
67
|
+
|
|
68
|
+
auth = HTTPBasicAuth(self.username, self.password)
|
|
69
|
+
page = 0
|
|
70
|
+
session_id = None
|
|
71
|
+
while True:
|
|
72
|
+
params = {"project_id": self.project_id, "page": str(page)}
|
|
73
|
+
if session_id:
|
|
74
|
+
params["session_id"] = session_id
|
|
75
|
+
start_str = start_date.format("YYYY-MM-DDTHH:mm:ss")
|
|
76
|
+
end_str = end_date.format("YYYY-MM-DDTHH:mm:ss")
|
|
77
|
+
where = f'properties["$last_seen"] >= "{start_str}" and properties["$last_seen"] <= "{end_str}"'
|
|
78
|
+
params["where"] = where
|
|
79
|
+
resp = self.session.post(url, params=params, headers=headers, auth=auth)
|
|
80
|
+
|
|
81
|
+
resp.raise_for_status()
|
|
82
|
+
data = resp.json()
|
|
83
|
+
|
|
84
|
+
for result in data.get("results", []):
|
|
85
|
+
for key, value in result["$properties"].items():
|
|
86
|
+
if key.startswith("$"):
|
|
87
|
+
if key == "$last_seen":
|
|
88
|
+
result["last_seen"] = pendulum.parse(value)
|
|
89
|
+
else:
|
|
90
|
+
result[key[1:]] = value
|
|
91
|
+
result["distinct_id"] = result["$distinct_id"]
|
|
92
|
+
del result["$properties"]
|
|
93
|
+
del result["$distinct_id"]
|
|
94
|
+
yield result
|
|
95
|
+
if not data.get("results"):
|
|
96
|
+
break
|
|
97
|
+
session_id = data.get("session_id", session_id)
|
|
98
|
+
|
|
99
|
+
page += 1
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
"""QuickBooks source built on top of python-quickbooks."""
|
|
2
|
+
|
|
3
|
+
from typing import Iterable, Iterator, List, Optional
|
|
4
|
+
|
|
5
|
+
import dlt
|
|
6
|
+
import pendulum
|
|
7
|
+
from dlt.common.time import ensure_pendulum_datetime
|
|
8
|
+
from dlt.common.typing import TDataItem
|
|
9
|
+
from dlt.sources import DltResource
|
|
10
|
+
from intuitlib.client import AuthClient # type: ignore
|
|
11
|
+
|
|
12
|
+
from quickbooks import QuickBooks # type: ignore
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@dlt.source(name="quickbooks", max_table_nesting=0)
|
|
16
|
+
def quickbooks_source(
|
|
17
|
+
company_id: str,
|
|
18
|
+
start_date: pendulum.DateTime,
|
|
19
|
+
object: str,
|
|
20
|
+
end_date: pendulum.DateTime | None,
|
|
21
|
+
client_id: str,
|
|
22
|
+
client_secret: str,
|
|
23
|
+
refresh_token: str,
|
|
24
|
+
environment: str = "production",
|
|
25
|
+
minor_version: Optional[str] = None,
|
|
26
|
+
) -> Iterable[DltResource]:
|
|
27
|
+
"""Create dlt resources for QuickBooks objects.
|
|
28
|
+
|
|
29
|
+
Parameters
|
|
30
|
+
----------
|
|
31
|
+
company_id: str
|
|
32
|
+
QuickBooks company id (realm id).
|
|
33
|
+
client_id: str
|
|
34
|
+
OAuth client id.
|
|
35
|
+
client_secret: str
|
|
36
|
+
OAuth client secret.
|
|
37
|
+
refresh_token: str
|
|
38
|
+
OAuth refresh token.
|
|
39
|
+
access_token: Optional[str]
|
|
40
|
+
Optional access token. If not provided the library will refresh using the
|
|
41
|
+
provided refresh token.
|
|
42
|
+
environment: str
|
|
43
|
+
Either ``"production"`` or ``"sandbox"``.
|
|
44
|
+
minor_version: Optional[int]
|
|
45
|
+
QuickBooks API minor version if needed.
|
|
46
|
+
"""
|
|
47
|
+
|
|
48
|
+
auth_client = AuthClient(
|
|
49
|
+
client_id=client_id,
|
|
50
|
+
client_secret=client_secret,
|
|
51
|
+
environment=environment,
|
|
52
|
+
# redirect_uri is not used since we authenticate using refresh token which skips the step of redirect callback.
|
|
53
|
+
# as redirect_uri is required param, we are passing empty string.
|
|
54
|
+
redirect_uri="",
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
# https://help.developer.intuit.com/s/article/Validity-of-Refresh-Token
|
|
58
|
+
client = QuickBooks(
|
|
59
|
+
auth_client=auth_client,
|
|
60
|
+
refresh_token=refresh_token,
|
|
61
|
+
company_id=company_id,
|
|
62
|
+
minorversion=minor_version,
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
def fetch_object(
|
|
66
|
+
obj_name: str,
|
|
67
|
+
updated_at: dlt.sources.incremental[str] = dlt.sources.incremental(
|
|
68
|
+
"lastupdatedtime",
|
|
69
|
+
initial_value=start_date, # type: ignore
|
|
70
|
+
end_value=end_date, # type: ignore
|
|
71
|
+
range_start="closed",
|
|
72
|
+
range_end="closed",
|
|
73
|
+
allow_external_schedulers=True,
|
|
74
|
+
),
|
|
75
|
+
) -> Iterator[List[TDataItem]]:
|
|
76
|
+
start_pos = 1
|
|
77
|
+
|
|
78
|
+
end_dt = updated_at.end_value or pendulum.now(tz="UTC")
|
|
79
|
+
start_dt = ensure_pendulum_datetime(str(updated_at.last_value)).in_tz("UTC")
|
|
80
|
+
|
|
81
|
+
start_str = start_dt.isoformat()
|
|
82
|
+
end_str = end_dt.isoformat()
|
|
83
|
+
|
|
84
|
+
where_clause = f"WHERE MetaData.LastUpdatedTime >= '{start_str}' AND MetaData.LastUpdatedTime < '{end_str}'"
|
|
85
|
+
while True:
|
|
86
|
+
query = (
|
|
87
|
+
f"SELECT * FROM {obj_name} {where_clause} "
|
|
88
|
+
f"ORDERBY MetaData.LastUpdatedTime ASC STARTPOSITION {start_pos} MAXRESULTS 1000"
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
result = client.query(query)
|
|
92
|
+
|
|
93
|
+
items = result.get("QueryResponse", {}).get(obj_name.capitalize(), [])
|
|
94
|
+
if not items:
|
|
95
|
+
break
|
|
96
|
+
|
|
97
|
+
for item in items:
|
|
98
|
+
if item.get("MetaData") and item["MetaData"].get("LastUpdatedTime"):
|
|
99
|
+
item["lastupdatedtime"] = ensure_pendulum_datetime(
|
|
100
|
+
item["MetaData"]["LastUpdatedTime"]
|
|
101
|
+
)
|
|
102
|
+
item["id"] = item["Id"]
|
|
103
|
+
del item["Id"]
|
|
104
|
+
|
|
105
|
+
yield item
|
|
106
|
+
|
|
107
|
+
if len(items) < 1000:
|
|
108
|
+
break
|
|
109
|
+
|
|
110
|
+
start_pos += 1000
|
|
111
|
+
|
|
112
|
+
yield dlt.resource(
|
|
113
|
+
fetch_object,
|
|
114
|
+
name=object.lower(),
|
|
115
|
+
write_disposition="merge",
|
|
116
|
+
primary_key="id",
|
|
117
|
+
)(object)
|
ingestr/src/sources.py
CHANGED
|
@@ -79,7 +79,7 @@ class SqlSource:
|
|
|
79
79
|
# clickhouse://<username>:<password>@<host>:<port>?secure=<secure>
|
|
80
80
|
if uri.startswith("clickhouse://"):
|
|
81
81
|
parsed_uri = urlparse(uri)
|
|
82
|
-
|
|
82
|
+
|
|
83
83
|
query_params = parse_qs(parsed_uri.query)
|
|
84
84
|
|
|
85
85
|
if "http_port" in query_params:
|
|
@@ -691,8 +691,6 @@ class StripeAnalyticsSource:
|
|
|
691
691
|
endpoint,
|
|
692
692
|
],
|
|
693
693
|
stripe_secret_key=api_key[0],
|
|
694
|
-
start_date=kwargs.get("interval_start", None),
|
|
695
|
-
end_date=kwargs.get("interval_end", None),
|
|
696
694
|
).with_resources(endpoint)
|
|
697
695
|
|
|
698
696
|
elif table in INCREMENTAL_ENDPOINTS:
|
|
@@ -965,6 +963,57 @@ class KlaviyoSource:
|
|
|
965
963
|
).with_resources(resource)
|
|
966
964
|
|
|
967
965
|
|
|
966
|
+
class MixpanelSource:
|
|
967
|
+
def handles_incrementality(self) -> bool:
|
|
968
|
+
return True
|
|
969
|
+
|
|
970
|
+
def dlt_source(self, uri: str, table: str, **kwargs):
|
|
971
|
+
if kwargs.get("incremental_key"):
|
|
972
|
+
raise ValueError(
|
|
973
|
+
"Mixpanel takes care of incrementality on its own, you should not provide incremental_key"
|
|
974
|
+
)
|
|
975
|
+
|
|
976
|
+
parsed = urlparse(uri)
|
|
977
|
+
params = parse_qs(parsed.query)
|
|
978
|
+
username = params.get("username")
|
|
979
|
+
password = params.get("password")
|
|
980
|
+
project_id = params.get("project_id")
|
|
981
|
+
server = params.get("server", ["eu"])
|
|
982
|
+
|
|
983
|
+
if not username or not password or not project_id:
|
|
984
|
+
raise ValueError(
|
|
985
|
+
"username, password, project_id are required to connect to Mixpanel"
|
|
986
|
+
)
|
|
987
|
+
|
|
988
|
+
if table not in ["events", "profiles"]:
|
|
989
|
+
raise ValueError(
|
|
990
|
+
f"Resource '{table}' is not supported for Mixpanel source yet, if you are interested in it please create a GitHub issue at https://github.com/bruin-data/ingestr"
|
|
991
|
+
)
|
|
992
|
+
|
|
993
|
+
start_date = kwargs.get("interval_start")
|
|
994
|
+
if start_date:
|
|
995
|
+
start_date = ensure_pendulum_datetime(start_date).in_timezone("UTC")
|
|
996
|
+
else:
|
|
997
|
+
start_date = pendulum.datetime(2020, 1, 1).in_timezone("UTC")
|
|
998
|
+
|
|
999
|
+
end_date = kwargs.get("interval_end")
|
|
1000
|
+
if end_date:
|
|
1001
|
+
end_date = ensure_pendulum_datetime(end_date).in_timezone("UTC")
|
|
1002
|
+
else:
|
|
1003
|
+
end_date = pendulum.now().in_timezone("UTC")
|
|
1004
|
+
|
|
1005
|
+
from ingestr.src.mixpanel import mixpanel_source
|
|
1006
|
+
|
|
1007
|
+
return mixpanel_source(
|
|
1008
|
+
username=username[0],
|
|
1009
|
+
password=password[0],
|
|
1010
|
+
project_id=project_id[0],
|
|
1011
|
+
start_date=start_date,
|
|
1012
|
+
end_date=end_date,
|
|
1013
|
+
server=server[0],
|
|
1014
|
+
).with_resources(table)
|
|
1015
|
+
|
|
1016
|
+
|
|
968
1017
|
class KafkaSource:
|
|
969
1018
|
def handles_incrementality(self) -> bool:
|
|
970
1019
|
return False
|
|
@@ -2536,3 +2585,73 @@ class SFTPSource:
|
|
|
2536
2585
|
|
|
2537
2586
|
dlt_source_resource = readers(bucket_url, fs, file_glob)
|
|
2538
2587
|
return dlt_source_resource.with_resources(endpoint)
|
|
2588
|
+
|
|
2589
|
+
|
|
2590
|
+
class QuickBooksSource:
|
|
2591
|
+
def handles_incrementality(self) -> bool:
|
|
2592
|
+
return True
|
|
2593
|
+
|
|
2594
|
+
# quickbooks://?company_id=<company_id>&client_id=<client_id>&client_secret=<client_secret>&refresh_token=<refresh>&access_token=<access_token>&environment=<env>&minor_version=<version>
|
|
2595
|
+
def dlt_source(self, uri: str, table: str, **kwargs):
|
|
2596
|
+
parsed_uri = urlparse(uri)
|
|
2597
|
+
|
|
2598
|
+
params = parse_qs(parsed_uri.query)
|
|
2599
|
+
company_id = params.get("company_id")
|
|
2600
|
+
client_id = params.get("client_id")
|
|
2601
|
+
client_secret = params.get("client_secret")
|
|
2602
|
+
refresh_token = params.get("refresh_token")
|
|
2603
|
+
environment = params.get("environment", ["production"])
|
|
2604
|
+
minor_version = params.get("minor_version", [None])
|
|
2605
|
+
|
|
2606
|
+
if not client_id or not client_id[0].strip():
|
|
2607
|
+
raise MissingValueError("client_id", "QuickBooks")
|
|
2608
|
+
|
|
2609
|
+
if not client_secret or not client_secret[0].strip():
|
|
2610
|
+
raise MissingValueError("client_secret", "QuickBooks")
|
|
2611
|
+
|
|
2612
|
+
if not refresh_token or not refresh_token[0].strip():
|
|
2613
|
+
raise MissingValueError("refresh_token", "QuickBooks")
|
|
2614
|
+
|
|
2615
|
+
if not company_id or not company_id[0].strip():
|
|
2616
|
+
raise MissingValueError("company_id", "QuickBooks")
|
|
2617
|
+
|
|
2618
|
+
if environment[0] not in ["production", "sandbox"]:
|
|
2619
|
+
raise ValueError(
|
|
2620
|
+
"Invalid environment. Must be either 'production' or 'sandbox'."
|
|
2621
|
+
)
|
|
2622
|
+
|
|
2623
|
+
from ingestr.src.quickbooks import quickbooks_source
|
|
2624
|
+
|
|
2625
|
+
table_name = table.replace(" ", "")
|
|
2626
|
+
table_mapping = {
|
|
2627
|
+
"customers": "customer",
|
|
2628
|
+
"invoices": "invoice",
|
|
2629
|
+
"accounts": "account",
|
|
2630
|
+
"vendors": "vendor",
|
|
2631
|
+
"payments": "payment",
|
|
2632
|
+
}
|
|
2633
|
+
if table_name in table_mapping:
|
|
2634
|
+
table_name = table_mapping[table_name]
|
|
2635
|
+
|
|
2636
|
+
start_date = kwargs.get("interval_start")
|
|
2637
|
+
if start_date is None:
|
|
2638
|
+
start_date = ensure_pendulum_datetime("2025-01-01").in_tz("UTC")
|
|
2639
|
+
else:
|
|
2640
|
+
start_date = ensure_pendulum_datetime(start_date).in_tz("UTC")
|
|
2641
|
+
|
|
2642
|
+
end_date = kwargs.get("interval_end")
|
|
2643
|
+
|
|
2644
|
+
if end_date is not None:
|
|
2645
|
+
end_date = ensure_pendulum_datetime(end_date).in_tz("UTC")
|
|
2646
|
+
|
|
2647
|
+
return quickbooks_source(
|
|
2648
|
+
company_id=company_id[0],
|
|
2649
|
+
start_date=start_date,
|
|
2650
|
+
end_date=end_date,
|
|
2651
|
+
client_id=client_id[0],
|
|
2652
|
+
client_secret=client_secret[0],
|
|
2653
|
+
refresh_token=refresh_token[0],
|
|
2654
|
+
environment=environment[0],
|
|
2655
|
+
minor_version=minor_version[0],
|
|
2656
|
+
object=table_name,
|
|
2657
|
+
).with_resources(table_name)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ingestr
|
|
3
|
-
Version: 0.13.
|
|
3
|
+
Version: 0.13.54
|
|
4
4
|
Summary: ingestr is a command-line application that ingests data from various sources and stores them in any database.
|
|
5
5
|
Project-URL: Homepage, https://github.com/bruin-data/ingestr
|
|
6
6
|
Project-URL: Issues, https://github.com/bruin-data/ingestr/issues
|
|
@@ -52,14 +52,17 @@ Requires-Dist: dlt==1.10.0
|
|
|
52
52
|
Requires-Dist: dnspython==2.7.0
|
|
53
53
|
Requires-Dist: duckdb-engine==0.17.0
|
|
54
54
|
Requires-Dist: duckdb==1.2.1
|
|
55
|
+
Requires-Dist: ecdsa==0.19.1
|
|
55
56
|
Requires-Dist: elastic-transport==8.17.1
|
|
56
57
|
Requires-Dist: elasticsearch==8.10.1
|
|
58
|
+
Requires-Dist: enum-compat==0.0.3
|
|
57
59
|
Requires-Dist: et-xmlfile==2.0.0
|
|
58
60
|
Requires-Dist: facebook-business==20.0.0
|
|
59
61
|
Requires-Dist: filelock==3.17.0
|
|
60
62
|
Requires-Dist: flatten-json==0.1.14
|
|
61
63
|
Requires-Dist: frozenlist==1.5.0
|
|
62
64
|
Requires-Dist: fsspec==2025.3.2
|
|
65
|
+
Requires-Dist: future==1.0.0
|
|
63
66
|
Requires-Dist: gcsfs==2025.3.2
|
|
64
67
|
Requires-Dist: geojson==3.2.0
|
|
65
68
|
Requires-Dist: gitdb==4.0.12
|
|
@@ -93,6 +96,7 @@ Requires-Dist: ibm-db-sa==0.4.1
|
|
|
93
96
|
Requires-Dist: ibm-db==3.2.6
|
|
94
97
|
Requires-Dist: idna==3.10
|
|
95
98
|
Requires-Dist: inflection==0.5.1
|
|
99
|
+
Requires-Dist: intuit-oauth==1.2.4
|
|
96
100
|
Requires-Dist: isodate==0.7.2
|
|
97
101
|
Requires-Dist: jmespath==1.0.1
|
|
98
102
|
Requires-Dist: jsonpath-ng==1.7.0
|
|
@@ -147,8 +151,11 @@ Requires-Dist: pyparsing==3.2.1
|
|
|
147
151
|
Requires-Dist: pyrate-limiter==3.7.0
|
|
148
152
|
Requires-Dist: python-dateutil==2.9.0.post0
|
|
149
153
|
Requires-Dist: python-dotenv==1.0.1
|
|
154
|
+
Requires-Dist: python-jose==3.5.0
|
|
155
|
+
Requires-Dist: python-quickbooks==0.9.2
|
|
150
156
|
Requires-Dist: pytz==2025.1
|
|
151
157
|
Requires-Dist: pyyaml==6.0.2
|
|
158
|
+
Requires-Dist: rauth==0.7.3
|
|
152
159
|
Requires-Dist: redshift-connector==2.1.5
|
|
153
160
|
Requires-Dist: requests-file==2.1.0
|
|
154
161
|
Requires-Dist: requests-oauthlib==1.3.1
|
|
@@ -2,16 +2,16 @@ ingestr/conftest.py,sha256=Q03FIJIZpLBbpj55cfCHIKEjc1FCvWJhMF2cidUJKQU,1748
|
|
|
2
2
|
ingestr/main.py,sha256=GkC1hdq8AVGrvolc95zMfjmibI95p2pmFkbgCOVf-Og,26311
|
|
3
3
|
ingestr/src/.gitignore,sha256=8cX1AZTSI0TcdZFGTmS_oyBjpfCzhOEt0DdAo2dFIY8,203
|
|
4
4
|
ingestr/src/blob.py,sha256=onMe5ZHxPXTdcB_s2oGNdMo-XQJ3ajwOsWE9eSTGFmc,1495
|
|
5
|
-
ingestr/src/buildinfo.py,sha256=
|
|
6
|
-
ingestr/src/destinations.py,sha256=
|
|
5
|
+
ingestr/src/buildinfo.py,sha256=5aqjzKmdujvelIHbUBjqDKLXcEB2hEdLvQ6BJyTyZ1Q,21
|
|
6
|
+
ingestr/src/destinations.py,sha256=TcxM2rcwHfgMMP6U0yRNcfWKlEzkBbZbqCIDww7lkTY,16882
|
|
7
7
|
ingestr/src/errors.py,sha256=Ufs4_DfE77_E3vnA1fOQdi6cmuLVNm7_SbFLkL1XPGk,686
|
|
8
|
-
ingestr/src/factory.py,sha256=
|
|
8
|
+
ingestr/src/factory.py,sha256=mcjgbmrZr6TvP9fCMQxo-aMGcrb2PqToRcSLp5nldww,6138
|
|
9
9
|
ingestr/src/filters.py,sha256=LLecXe9QkLFkFLUZ92OXNdcANr1a8edDxrflc2ko_KA,1452
|
|
10
10
|
ingestr/src/http_client.py,sha256=bxqsk6nJNXCo-79gW04B53DQO-yr25vaSsqP0AKtjx4,732
|
|
11
11
|
ingestr/src/loader.py,sha256=9NaWAyfkXdqAZSS-N72Iwo36Lbx4PyqIfaaH1dNdkFs,1712
|
|
12
12
|
ingestr/src/partition.py,sha256=BrIP6wFJvyR7Nus_3ElnfxknUXeCipK_E_bB8kZowfc,969
|
|
13
13
|
ingestr/src/resource.py,sha256=ZqmZxFQVGlF8rFPhBiUB08HES0yoTj8sZ--jKfaaVps,1164
|
|
14
|
-
ingestr/src/sources.py,sha256=
|
|
14
|
+
ingestr/src/sources.py,sha256=Nwx66jQwqjovLomZIVwIVNLSBy8KgqGhu4-spPHp7ZM,91785
|
|
15
15
|
ingestr/src/table_definition.py,sha256=REbAbqdlmUMUuRh8nEQRreWjPVOQ5ZcfqGkScKdCrmk,390
|
|
16
16
|
ingestr/src/time.py,sha256=H_Fk2J4ShXyUM-EMY7MqCLZQhlnZMZvO952bmZPc4yE,254
|
|
17
17
|
ingestr/src/version.py,sha256=J_2xgZ0mKlvuHcjdKCx2nlioneLH0I47JiU_Slr_Nwc,189
|
|
@@ -82,6 +82,8 @@ ingestr/src/klaviyo/helpers.py,sha256=_i-SHffhv25feLDcjy6Blj1UxYLISCwVCMgGtrlnYH
|
|
|
82
82
|
ingestr/src/linkedin_ads/__init__.py,sha256=CAPWFyV24loziiphbLmODxZUXZJwm4JxlFkr56q0jfo,1855
|
|
83
83
|
ingestr/src/linkedin_ads/dimension_time_enum.py,sha256=EmHRdkFyTAfo4chGjThrwqffWJxmAadZMbpTvf0xkQc,198
|
|
84
84
|
ingestr/src/linkedin_ads/helpers.py,sha256=eUWudRVlXl4kqIhfXQ1eVsUpZwJn7UFqKSpnbLfxzds,4498
|
|
85
|
+
ingestr/src/mixpanel/__init__.py,sha256=s1QtqMP0BTGW6YtdCabJFWj7lEn7KujzELwGpBOQgfs,1796
|
|
86
|
+
ingestr/src/mixpanel/client.py,sha256=c_reouegOVYBOwHLfgYFwpmkba0Sxro1Zkml07NCYf0,3602
|
|
85
87
|
ingestr/src/mongodb/__init__.py,sha256=T-RYPS_skl_2gNVfYWWXan2bVQYmm0bFBcCCqG5ejvg,7275
|
|
86
88
|
ingestr/src/mongodb/helpers.py,sha256=H0GpOK3bPBhFWBEhJZOjywUBdzih6MOpmyVO_cKSN14,24178
|
|
87
89
|
ingestr/src/notion/__init__.py,sha256=36wUui8finbc85ObkRMq8boMraXMUehdABN_AMe_hzA,1834
|
|
@@ -99,6 +101,7 @@ ingestr/src/pipedrive/typing.py,sha256=lEMXu4hhAA3XkhVSlBUa-juqyupisd3c-qSQKxFvz
|
|
|
99
101
|
ingestr/src/pipedrive/helpers/__init__.py,sha256=UX1K_qnGXB0ShtnBOfp2XuVbK8RRoCK8TdEmIjRckgg,710
|
|
100
102
|
ingestr/src/pipedrive/helpers/custom_fields_munger.py,sha256=rZ4AjdITHfJE2NNomCR7vMBS1KnWpEGVF6fADwsIHUE,4488
|
|
101
103
|
ingestr/src/pipedrive/helpers/pages.py,sha256=Klpjw2OnMuhzit3PpiHKsfzGcJ3rQPSQBl3HhE3-6eA,3358
|
|
104
|
+
ingestr/src/quickbooks/__init__.py,sha256=cZUuVCOTGPHTscRj6i0DytO63_fWF-4ieMxoU4PcyTg,3727
|
|
102
105
|
ingestr/src/salesforce/__init__.py,sha256=2hik5pRrxVODdDTlUEMoyccNC07zozjnxkMHcjMT1qA,4558
|
|
103
106
|
ingestr/src/salesforce/helpers.py,sha256=QTdazBt-qRTBbCQMZnyclIaDQFmBixBy_RDKD00Lt-8,2492
|
|
104
107
|
ingestr/src/shopify/__init__.py,sha256=PF_6VQnS065Br1UzSIekTVXBu3WtrMQL_v5CfbfaX5Y,63151
|
|
@@ -135,8 +138,8 @@ ingestr/testdata/merge_expected.csv,sha256=DReHqWGnQMsf2PBv_Q2pfjsgvikYFnf1zYcQZ
|
|
|
135
138
|
ingestr/testdata/merge_part1.csv,sha256=Pw8Z9IDKcNU0qQHx1z6BUf4rF_-SxKGFOvymCt4OY9I,185
|
|
136
139
|
ingestr/testdata/merge_part2.csv,sha256=T_GiWxA81SN63_tMOIuemcvboEFeAmbKc7xRXvL9esw,287
|
|
137
140
|
ingestr/tests/unit/test_smartsheets.py,sha256=eiC2CCO4iNJcuN36ONvqmEDryCA1bA1REpayHpu42lk,5058
|
|
138
|
-
ingestr-0.13.
|
|
139
|
-
ingestr-0.13.
|
|
140
|
-
ingestr-0.13.
|
|
141
|
-
ingestr-0.13.
|
|
142
|
-
ingestr-0.13.
|
|
141
|
+
ingestr-0.13.54.dist-info/METADATA,sha256=5RaRSmJX-SiZV9iFYz3cSkiuZp0GXQ2ij9whkbQHFs8,15131
|
|
142
|
+
ingestr-0.13.54.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
143
|
+
ingestr-0.13.54.dist-info/entry_points.txt,sha256=oPJy0KBnPWYjDtP1k8qwAihcTLHSZokSQvRAw_wtfJM,46
|
|
144
|
+
ingestr-0.13.54.dist-info/licenses/LICENSE.md,sha256=cW8wIhn8HFE-KLStDF9jHQ1O_ARWP3kTpk_-eOccL24,1075
|
|
145
|
+
ingestr-0.13.54.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|