ingestr 0.7.7__tar.gz → 0.8.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ingestr might be problematic. Click here for more details.
- {ingestr-0.7.7 → ingestr-0.8.1}/.gitignore +2 -1
- {ingestr-0.7.7 → ingestr-0.8.1}/PKG-INFO +22 -1
- {ingestr-0.7.7 → ingestr-0.8.1}/README.md +18 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/.vitepress/config.mjs +8 -0
- ingestr-0.8.1/docs/supported-sources/adjust.md +30 -0
- ingestr-0.8.1/docs/supported-sources/appsflyer.md +28 -0
- ingestr-0.8.1/docs/supported-sources/facebook-ads.md +51 -0
- ingestr-0.8.1/docs/supported-sources/kafka.md +38 -0
- ingestr-0.8.1/docs/supported-sources/klaviyo.md +64 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/supported-sources/shopify.md +6 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/main.py +10 -0
- ingestr-0.8.1/ingestr/src/.gitignore +10 -0
- ingestr-0.8.1/ingestr/src/adjust/_init_.py +31 -0
- ingestr-0.8.1/ingestr/src/adjust/helpers.py +82 -0
- ingestr-0.8.1/ingestr/src/appsflyer/_init_.py +24 -0
- ingestr-0.8.1/ingestr/src/appsflyer/client.py +106 -0
- ingestr-0.8.1/ingestr/src/facebook_ads/__init__.py +197 -0
- ingestr-0.8.1/ingestr/src/facebook_ads/exceptions.py +5 -0
- ingestr-0.8.1/ingestr/src/facebook_ads/helpers.py +255 -0
- ingestr-0.8.1/ingestr/src/facebook_ads/settings.py +208 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/factory.py +15 -0
- ingestr-0.8.1/ingestr/src/kafka/__init__.py +103 -0
- ingestr-0.8.1/ingestr/src/kafka/helpers.py +227 -0
- ingestr-0.8.1/ingestr/src/klaviyo/_init_.py +173 -0
- ingestr-0.8.1/ingestr/src/klaviyo/client.py +212 -0
- ingestr-0.8.1/ingestr/src/klaviyo/helpers.py +19 -0
- ingestr-0.8.1/ingestr/src/shopify/__init__.py +1925 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/shopify/helpers.py +73 -32
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/sources.py +230 -7
- ingestr-0.8.1/ingestr/src/version.py +1 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/pyproject.toml +10 -1
- {ingestr-0.7.7 → ingestr-0.8.1}/requirements.txt +3 -0
- ingestr-0.7.7/ingestr/src/shopify/__init__.py +0 -227
- ingestr-0.7.7/ingestr/src/version.py +0 -1
- {ingestr-0.7.7 → ingestr-0.8.1}/.dockerignore +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/.github/workflows/deploy-docs.yml +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/.github/workflows/tests.yml +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/.python-version +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/Dockerfile +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/LICENSE.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/Makefile +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/.vitepress/theme/custom.css +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/.vitepress/theme/index.js +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/commands/example-uris.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/commands/ingest.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/getting-started/core-concepts.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/getting-started/incremental-loading.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/getting-started/quickstart.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/getting-started/telemetry.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/index.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/supported-sources/airtable.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/supported-sources/bigquery.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/supported-sources/chess.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/supported-sources/csv.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/supported-sources/databricks.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/supported-sources/duckdb.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/supported-sources/gorgias.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/supported-sources/gsheets.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/supported-sources/hubspot.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/supported-sources/mongodb.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/supported-sources/mssql.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/supported-sources/mysql.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/supported-sources/notion.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/supported-sources/oracle.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/supported-sources/postgres.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/supported-sources/redshift.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/supported-sources/sap-hana.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/supported-sources/slack.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/supported-sources/snowflake.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/supported-sources/sqlite.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/docs/supported-sources/stripe.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/airtable/__init__.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/chess/__init__.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/chess/helpers.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/chess/settings.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/destinations.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/google_sheets/README.md +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/google_sheets/__init__.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/google_sheets/helpers/__init__.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/google_sheets/helpers/api_calls.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/google_sheets/helpers/data_processing.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/gorgias/__init__.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/gorgias/helpers.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/hubspot/__init__.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/hubspot/helpers.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/hubspot/settings.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/mongodb/__init__.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/mongodb/helpers.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/notion/__init__.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/notion/helpers/__init__.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/notion/helpers/client.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/notion/helpers/database.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/notion/settings.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/shopify/exceptions.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/shopify/settings.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/slack/__init__.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/slack/helpers.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/slack/settings.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/sql_database/__init__.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/sql_database/arrow_helpers.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/sql_database/helpers.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/sql_database/override.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/sql_database/schema_types.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/stripe_analytics/__init__.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/stripe_analytics/helpers.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/stripe_analytics/settings.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/table_definition.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/telemetry/event.py +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/src/testdata/fakebqcredentials.json +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/testdata/.gitignore +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/testdata/create_replace.csv +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/testdata/delete_insert_expected.csv +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/testdata/delete_insert_part1.csv +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/testdata/delete_insert_part2.csv +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/testdata/merge_expected.csv +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/testdata/merge_part1.csv +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/ingestr/testdata/merge_part2.csv +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/package-lock.json +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/package.json +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/requirements-dev.txt +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/resources/demo.gif +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/resources/demo.tape +0 -0
- {ingestr-0.7.7 → ingestr-0.8.1}/resources/ingestr.svg +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: ingestr
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.8.1
|
|
4
4
|
Summary: ingestr is a command-line application that ingests data from various sources and stores them in any database.
|
|
5
5
|
Project-URL: Homepage, https://github.com/bruin-data/ingestr
|
|
6
6
|
Project-URL: Issues, https://github.com/bruin-data/ingestr/issues
|
|
@@ -14,11 +14,13 @@ Classifier: Operating System :: OS Independent
|
|
|
14
14
|
Classifier: Programming Language :: Python :: 3
|
|
15
15
|
Classifier: Topic :: Database
|
|
16
16
|
Requires-Python: >=3.9
|
|
17
|
+
Requires-Dist: confluent-kafka>=2.3.0
|
|
17
18
|
Requires-Dist: cx-oracle==8.3.0
|
|
18
19
|
Requires-Dist: databricks-sql-connector==2.9.3
|
|
19
20
|
Requires-Dist: dlt==0.5.1
|
|
20
21
|
Requires-Dist: duckdb-engine==0.11.5
|
|
21
22
|
Requires-Dist: duckdb==0.10.2
|
|
23
|
+
Requires-Dist: facebook-business==20.0.0
|
|
22
24
|
Requires-Dist: google-api-python-client==2.130.0
|
|
23
25
|
Requires-Dist: google-cloud-bigquery-storage==2.24.0
|
|
24
26
|
Requires-Dist: mysql-connector-python==9.0.0
|
|
@@ -42,6 +44,7 @@ Requires-Dist: sqlalchemy==1.4.52
|
|
|
42
44
|
Requires-Dist: stripe==10.7.0
|
|
43
45
|
Requires-Dist: tqdm==4.66.2
|
|
44
46
|
Requires-Dist: typer==0.12.3
|
|
47
|
+
Requires-Dist: types-requests==2.32.0.20240907
|
|
45
48
|
Description-Content-Type: text/markdown
|
|
46
49
|
|
|
47
50
|
<div align="center">
|
|
@@ -176,15 +179,28 @@ Join our Slack community [here](https://join.slack.com/t/bruindatacommunity/shar
|
|
|
176
179
|
<tr>
|
|
177
180
|
<td colspan="3" style='text-align:center;'><strong>Platforms</strong></td>
|
|
178
181
|
</tr>
|
|
182
|
+
<td>Adjust</td>
|
|
183
|
+
<td>✅</td>
|
|
184
|
+
<td>-</td>
|
|
179
185
|
<tr>
|
|
180
186
|
<td>Airtable</td>
|
|
181
187
|
<td>✅</td>
|
|
182
188
|
<td>-</td>
|
|
189
|
+
</tr>
|
|
190
|
+
<tr>
|
|
191
|
+
<td>AppsFlyer</td>
|
|
192
|
+
<td>✅</td>
|
|
193
|
+
<td>-</td>
|
|
183
194
|
</tr>
|
|
184
195
|
<tr>
|
|
185
196
|
<td>Chess.com</td>
|
|
186
197
|
<td>✅</td>
|
|
187
198
|
<td>-</td>
|
|
199
|
+
</tr>
|
|
200
|
+
<tr>
|
|
201
|
+
<td>Facebook Ads</td>
|
|
202
|
+
<td>✅</td>
|
|
203
|
+
<td>-</td>
|
|
188
204
|
</tr>
|
|
189
205
|
<tr>
|
|
190
206
|
<td>Gorgias</td>
|
|
@@ -200,6 +216,11 @@ Join our Slack community [here](https://join.slack.com/t/bruindatacommunity/shar
|
|
|
200
216
|
<td>HubSpot</td>
|
|
201
217
|
<td>✅</td>
|
|
202
218
|
<td>-</td>
|
|
219
|
+
</tr>
|
|
220
|
+
<tr>
|
|
221
|
+
<td>Klaviyo</td>
|
|
222
|
+
<td>✅</td>
|
|
223
|
+
<td>-</td>
|
|
203
224
|
</tr>
|
|
204
225
|
<tr>
|
|
205
226
|
<td>Notion</td>
|
|
@@ -130,15 +130,28 @@ Join our Slack community [here](https://join.slack.com/t/bruindatacommunity/shar
|
|
|
130
130
|
<tr>
|
|
131
131
|
<td colspan="3" style='text-align:center;'><strong>Platforms</strong></td>
|
|
132
132
|
</tr>
|
|
133
|
+
<td>Adjust</td>
|
|
134
|
+
<td>✅</td>
|
|
135
|
+
<td>-</td>
|
|
133
136
|
<tr>
|
|
134
137
|
<td>Airtable</td>
|
|
135
138
|
<td>✅</td>
|
|
136
139
|
<td>-</td>
|
|
140
|
+
</tr>
|
|
141
|
+
<tr>
|
|
142
|
+
<td>AppsFlyer</td>
|
|
143
|
+
<td>✅</td>
|
|
144
|
+
<td>-</td>
|
|
137
145
|
</tr>
|
|
138
146
|
<tr>
|
|
139
147
|
<td>Chess.com</td>
|
|
140
148
|
<td>✅</td>
|
|
141
149
|
<td>-</td>
|
|
150
|
+
</tr>
|
|
151
|
+
<tr>
|
|
152
|
+
<td>Facebook Ads</td>
|
|
153
|
+
<td>✅</td>
|
|
154
|
+
<td>-</td>
|
|
142
155
|
</tr>
|
|
143
156
|
<tr>
|
|
144
157
|
<td>Gorgias</td>
|
|
@@ -154,6 +167,11 @@ Join our Slack community [here](https://join.slack.com/t/bruindatacommunity/shar
|
|
|
154
167
|
<td>HubSpot</td>
|
|
155
168
|
<td>✅</td>
|
|
156
169
|
<td>-</td>
|
|
170
|
+
</tr>
|
|
171
|
+
<tr>
|
|
172
|
+
<td>Klaviyo</td>
|
|
173
|
+
<td>✅</td>
|
|
174
|
+
<td>-</td>
|
|
157
175
|
</tr>
|
|
158
176
|
<tr>
|
|
159
177
|
<td>Notion</td>
|
|
@@ -64,6 +64,7 @@ export default defineConfig({
|
|
|
64
64
|
text: "Google BigQuery",
|
|
65
65
|
link: "/supported-sources/bigquery.md",
|
|
66
66
|
},
|
|
67
|
+
{ text: "Kafka", link: "/supported-sources/kafka.md" },
|
|
67
68
|
{ text: "Local CSV Files", link: "/supported-sources/csv.md" },
|
|
68
69
|
{
|
|
69
70
|
text: "Microsoft SQL Server",
|
|
@@ -83,11 +84,18 @@ export default defineConfig({
|
|
|
83
84
|
text: "Platforms",
|
|
84
85
|
collapsed: false,
|
|
85
86
|
items: [
|
|
87
|
+
{ text: "Adjust", link: "/supported-sources/adjust.md" },
|
|
86
88
|
{ text: "Airtable", link: "/supported-sources/airtable.md" },
|
|
89
|
+
{ text: "AppsFlyer", link: "/supported-sources/appsflyer.md" },
|
|
87
90
|
{ text: "Chess.com", link: "/supported-sources/chess.md" },
|
|
91
|
+
{
|
|
92
|
+
text: "Facebook Ads",
|
|
93
|
+
link: "/supported-sources/facebook-ads.md",
|
|
94
|
+
},
|
|
88
95
|
{ text: "Google Sheets", link: "/supported-sources/gsheets.md" },
|
|
89
96
|
{ text: "Gorgias", link: "/supported-sources/gorgias.md" },
|
|
90
97
|
{ text: "HubSpot", link: "/supported-sources/hubspot.md" },
|
|
98
|
+
{ text: "Klaviyo", link: "/supported-sources/klaviyo.md" },
|
|
91
99
|
{ text: "Notion", link: "/supported-sources/notion.md" },
|
|
92
100
|
{ text: "Shopify", link: "/supported-sources/shopify.md" },
|
|
93
101
|
{ text: "Slack", link: "/supported-sources/slack.md" },
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
# Adjust
|
|
2
|
+
|
|
3
|
+
[Adjust](https://www.adjust.com/) is a mobile marketing analytics platform that provides solutions for measuring and optimizing campaigns, as well as protecting user data.
|
|
4
|
+
|
|
5
|
+
ingestr supports Adjust as a source.
|
|
6
|
+
|
|
7
|
+
## URI Format
|
|
8
|
+
|
|
9
|
+
The URI format for Adjust is as follows:
|
|
10
|
+
|
|
11
|
+
```plaintext
|
|
12
|
+
adjust://?api_key=<api-key-here>
|
|
13
|
+
```
|
|
14
|
+
|
|
15
|
+
An API token is required to retrieve reports from the Adjust reporting API. please follow the guide to [obtain a API key](https://dev.adjust.com/en/api/rs-api/authentication/).
|
|
16
|
+
|
|
17
|
+
Once you complete the guide, you should have an API key. Let's say your API key is `nr_123`, here's a sample command that will copy the data from Adjust into a duckdb database:
|
|
18
|
+
|
|
19
|
+
```sh
|
|
20
|
+
ingestr ingest --source-uri 'adjust://?api_key=nr_123' --source-table 'campaigns' --dest-uri duckdb:///adjust.duckdb --dest-table 'adjust.output' --interval-start '2024-09-05' --interval-end '2024-09-08'
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
The result of this command will be a table in the `adjust.duckdb` database
|
|
24
|
+
|
|
25
|
+
Available Source Table:
|
|
26
|
+
Adjust source allows ingesting the following source into separate tables:
|
|
27
|
+
|
|
28
|
+
-`Campaigns`: Retrieves data for a campaign, showing the app's revenue and network costs over multiple days.
|
|
29
|
+
|
|
30
|
+
--`Creatives`: Retrieves data for a creative assest, detailing the app's revenue and network costs across multiple days
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
# AppsFlyer
|
|
2
|
+
|
|
3
|
+
[AppsFlyer](https://www.appsflyer.com/) is a mobile marketing analytics and attribution platform that helps businesses track, measure, and optimize their app marketing efforts across various channels.
|
|
4
|
+
|
|
5
|
+
ingestr supports AppsFlyer as a source.
|
|
6
|
+
|
|
7
|
+
The URI format for AppsFlyer is as follows:
|
|
8
|
+
|
|
9
|
+
```plaintext
|
|
10
|
+
appsflyer://?api_key=<api-key>
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
An API token is required to retrieve reports from the AppsFlyer API. Please follow the guide to [obtain a API key](https://support.appsflyer.com/hc/en-us/articles/360004562377-Managing-AppsFlyer-tokens)
|
|
14
|
+
|
|
15
|
+
Once you complete the guide, you should have an API key. Let's say your API key is ey123, here's a sample command that will copy the data from AppsFlyer into a duckdb database:
|
|
16
|
+
|
|
17
|
+
ingestr ingest --source-uri 'appsflyer://?api_key=ey123' --source-table 'campaigns' --dest-uri duckdb:///appsflyer.duckdb --dest-table 'appsflyer.output' --interval-start '2024-08-01' --interval-end '2024-08-28'
|
|
18
|
+
|
|
19
|
+
The result of this command will be a table in the appsflyer.duckdb database
|
|
20
|
+
|
|
21
|
+
Available Source Table:
|
|
22
|
+
AppsFlyer source allows ingesting the following source into separate tables:
|
|
23
|
+
|
|
24
|
+
-Campaigns: Retrieves data for campaigns, detailing the app's costs, loyal users, total installs, and revenue over multiple days.
|
|
25
|
+
|
|
26
|
+
-Creatives: Retrieves data for a creative asset, including revenue and cost.
|
|
27
|
+
|
|
28
|
+
Use these as `--source-table` parameter in the `ingestr ingest` command.
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
# Facebook Ads
|
|
2
|
+
|
|
3
|
+
Facebook Ads is the advertising platform that helps users to create targeted ads on Facebook, Instagram and Messenger.
|
|
4
|
+
|
|
5
|
+
ingestr supports Facebook Ads as a source.
|
|
6
|
+
|
|
7
|
+
## URI Format
|
|
8
|
+
|
|
9
|
+
The URI format for Facebook Ads is as follows:
|
|
10
|
+
|
|
11
|
+
```plaintext
|
|
12
|
+
facebookads://?access_token=<access_token>&account_id=<account_id>
|
|
13
|
+
```
|
|
14
|
+
|
|
15
|
+
URI parameters:
|
|
16
|
+
|
|
17
|
+
- `access_token` is associated with Business Facebook App.
|
|
18
|
+
- `account_id` is associated with Ad manager.
|
|
19
|
+
|
|
20
|
+
Both are used for authentication with Facebook Ads API.
|
|
21
|
+
|
|
22
|
+
The URI is used to connect to Facebook Ads API for extracting data.
|
|
23
|
+
|
|
24
|
+
## Setting up a Facebook Ads Integration
|
|
25
|
+
|
|
26
|
+
Facebook Ads requires a few steps to set up an integration, please follow the guide dltHub [has built here](https://dlthub.com/docs/dlt-ecosystem/verified-sources/facebook_ads#setup-guide).
|
|
27
|
+
|
|
28
|
+
Once you complete the guide, you should have an Access_Token and Account ID . Let's say your access_token is `abcdef` and account_id is `1234` , here's a sample command that will copy the data from Facebook Ads into a duckdb database:
|
|
29
|
+
|
|
30
|
+
```sh
|
|
31
|
+
ingestr ingest \
|
|
32
|
+
--source-uri 'facebookads://?access_token=easdyh&account_id=1234' \
|
|
33
|
+
--source-table 'campaigns' \
|
|
34
|
+
--dest-uri 'duckdb:///facebook.duckdb' \
|
|
35
|
+
--dest-table 'dest.campaigns'
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
The result of this command will be a table in the `facebook.duckdb` database.
|
|
39
|
+
|
|
40
|
+
## Available Tables
|
|
41
|
+
|
|
42
|
+
Facebook Ads source allows ingesting the following sources into separate tables:
|
|
43
|
+
|
|
44
|
+
- `campaigns`: Retrieves all DEFAULT_CAMPAIGN_FIELDS.
|
|
45
|
+
- `ad_sets`: Retrieves all DEFAULT_ADSET_FIELDS.
|
|
46
|
+
- `leads`: Retrieves all DEFAULT_LEAD_FIELDS.
|
|
47
|
+
- `ads_creatives`: Retrieves all DEFAULT_ADCREATIVE_FIELDS.
|
|
48
|
+
- `ads`: Retrieves all DEFAULT_ADS_FIELDS.
|
|
49
|
+
- `facebook_insights`: Retrieves all DEFAULT_INSIGHTS_FIELDS.
|
|
50
|
+
|
|
51
|
+
Use these as `--source-table` parameter in the `ingestr ingest` command.
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
# Apache Kafka
|
|
2
|
+
[Apache Kafka](https://kafka.apache.org/) is a distributed event streaming platform used by thousands of companies for high-performance data pipelines, streaming analytics, data integration, and mission-critical applications.
|
|
3
|
+
|
|
4
|
+
ingestr supports Apache Kafka as a source.
|
|
5
|
+
|
|
6
|
+
## URI Format
|
|
7
|
+
The URI format for Apache Kafka is as follows:
|
|
8
|
+
|
|
9
|
+
```plaintext
|
|
10
|
+
kafka://?bootstrap_servers=localhost:9092&group_id=test_group&security_protocol=SASL_SSL&sasl_mechanisms=PLAIN&sasl_username=example_username&sasl_password=example_secret&batch_size=1000&batch_timeout=3
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
URI parameters:
|
|
14
|
+
- `bootstrap_servers`: The Kafka server(s) to connect to, typically in the form of a host and port (e.g., `localhost:9092`).
|
|
15
|
+
- `group_id`: The consumer group ID used for identifying the client when consuming messages.
|
|
16
|
+
- `security_protocol`: The protocol used to communicate with brokers (e.g., `SASL_SSL` for secure communication).
|
|
17
|
+
- `sasl_mechanisms`: The SASL mechanism to be used for authentication (e.g., `PLAIN`).
|
|
18
|
+
- `sasl_username`: The username for SASL authentication.
|
|
19
|
+
- `sasl_password`: The password for SASL authentication.
|
|
20
|
+
- `batch_size`: The number of messages to fetch in a single batch, defaults to 3000.
|
|
21
|
+
- `batch_timeout`: The maximum time to wait for messages, defaults to 3 seconds.
|
|
22
|
+
|
|
23
|
+
The URI is used to connect to the Kafka brokers for ingesting messages.
|
|
24
|
+
|
|
25
|
+
### Group ID
|
|
26
|
+
The group ID is used to identify the consumer group that reads messages from a topic. Kafka uses the group ID to manage consumer offsets and assign partitions to consumers, which means that the group ID is the key to reading messages from the correct partition and position in the topic.
|
|
27
|
+
|
|
28
|
+
Once you have your Kafka server, credentials, and group ID set up, here's a sample command to ingest messages from a Kafka topic into a duckdb database:
|
|
29
|
+
|
|
30
|
+
```sh
|
|
31
|
+
ingestr ingest \
|
|
32
|
+
--source-uri 'kafka://?bootstrap_servers=localhost:9092' \
|
|
33
|
+
--source-table 'my-topic' \
|
|
34
|
+
--dest-uri duckdb:///kafka.duckdb \
|
|
35
|
+
--dest-table 'kafka.my_topic'
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
The result of this command will be a table in the `kafka.duckdb` database with JSON columns.
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
# Klaviyo
|
|
2
|
+
|
|
3
|
+
[Klaviyo](https://www.klaviyo.com/) is a marketing automation platform that helps businesses build and manage smarter digital relationships with their customers by connecting through personalized email and enhancing customer loyality.
|
|
4
|
+
|
|
5
|
+
ingestr supports Klaviyo as a source.
|
|
6
|
+
|
|
7
|
+
## URI Format
|
|
8
|
+
|
|
9
|
+
The URI format for Klaviyo is as follows:
|
|
10
|
+
|
|
11
|
+
```plaintext
|
|
12
|
+
klaviyo://?api_key=<api-key>
|
|
13
|
+
```
|
|
14
|
+
|
|
15
|
+
URI parameters:
|
|
16
|
+
|
|
17
|
+
- `api_key`: The API key used for authentication with the Klaviyo API.
|
|
18
|
+
|
|
19
|
+
The URI is used to connect to the Klaviyo API for extracting data.
|
|
20
|
+
|
|
21
|
+
```bash
|
|
22
|
+
ingestr ingest --source-table 'events' --source-uri 'klaviyo://?api_key=pk_test' --dest-uri duckdb:///klaviyo.duckdb --interval-start 2022-01-01 --dest-table 'klaviyo.events' --extract-parallelism 20
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
This command fethes all the events that are created/updated since 2022-01-01 and writes them to `klaviyo.events` table on DuckDB, using 20 parallel threads to improve performance and efficiently handle large data .
|
|
26
|
+
|
|
27
|
+
## Available Tables
|
|
28
|
+
|
|
29
|
+
Klaviyo source allows ingesting the following sources into separate tables:
|
|
30
|
+
|
|
31
|
+
[events](https://developers.klaviyo.com/en/reference/events_api_overview): Retrieves all events in an account where each event represents an action taken by a profile such as a password reset or a product order.
|
|
32
|
+
|
|
33
|
+
[profiles](https://developers.klaviyo.com/en/reference/profiles_api_overview): Retrieves all profiles in an account where each profile includes details like organization, job title, email and other attributes.
|
|
34
|
+
|
|
35
|
+
[campaigns](https://developers.klaviyo.com/en/reference/campaigns_api_overview): Retrieves all campaigns in an account where each campaign is a targeted message sent to a specific audience.
|
|
36
|
+
|
|
37
|
+
[metrics](https://developers.klaviyo.com/en/reference/metrics_api_overview): Retrieves all metrics in an account where each metric represents a category of events or actions a person can take.
|
|
38
|
+
|
|
39
|
+
[tags](https://developers.klaviyo.com/en/reference/get_tags): Retrieves all tags in an account.
|
|
40
|
+
|
|
41
|
+
[coupons](https://developers.klaviyo.com/en/reference/get_coupons): Retrieves all coupons in an account.
|
|
42
|
+
|
|
43
|
+
[catalog-variants](https://developers.klaviyo.com/en/reference/get_catalog_variants): Retrieves all variants in an account.
|
|
44
|
+
|
|
45
|
+
[catalog-categories](https://developers.klaviyo.com/en/reference/get_catalog_categories): Retrieves all catalog categories in an account.
|
|
46
|
+
|
|
47
|
+
[catalog-items](https://developers.klaviyo.com/en/reference/get_catalog_items): Retrieves all catalog items in an account.
|
|
48
|
+
|
|
49
|
+
[flows](https://developers.klaviyo.com/en/reference/get_flows): Retrieves all flows in an account where flow is a sequence of automated actions that is triggered when a person performs a specific action.
|
|
50
|
+
|
|
51
|
+
[lists](https://developers.klaviyo.com/en/reference/get_lists): Retrieves all lists in an account.
|
|
52
|
+
|
|
53
|
+
[images](https://developers.klaviyo.com/en/reference/get_images): Retrieves all images in an account..
|
|
54
|
+
|
|
55
|
+
[segments](https://developers.klaviyo.com/en/reference/get_segments): Retrieves all segments in an account where segment is a dynamic list that contains profiles meeting a certain set of conditions.
|
|
56
|
+
|
|
57
|
+
[forms](https://developers.klaviyo.com/en/reference/get_forms): Retrieves all forms in an account.
|
|
58
|
+
|
|
59
|
+
[templates](https://developers.klaviyo.com/en/reference/get_templates): Retrieves all templates in an account.
|
|
60
|
+
|
|
61
|
+
Use these as `--source-table` parameter in the `ingestr ingest` command.
|
|
62
|
+
|
|
63
|
+
> [!WARNING]
|
|
64
|
+
> Klaviyo does not support incremental loading for many endpoints in its APIs, which means ingestr will load endpoints incrementally if they support it, and do a full-refresh if not.
|
|
@@ -32,6 +32,12 @@ The result of this command will be a table in the `shopify.duckdb` database with
|
|
|
32
32
|
Shopify source allows ingesting the following sources into separate tables:
|
|
33
33
|
- `orders`
|
|
34
34
|
- `customers`
|
|
35
|
+
- `discounts`: Uses the GraphQL API, `discountNodes` query, take a look at the [Shopify docs](https://shopify.dev/docs/api/admin-graphql/2024-07/queries/discountNodes) for more details.
|
|
35
36
|
- `products`
|
|
37
|
+
- `inventory_items`
|
|
38
|
+
- `transactions`
|
|
39
|
+
- `balance`
|
|
40
|
+
- `events`
|
|
41
|
+
- `price_rules`: this is a deprecated table, please use `discounts` instead.
|
|
36
42
|
|
|
37
43
|
Use these as `--source-table` parameter in the `ingestr ingest` command.
|
|
@@ -244,6 +244,13 @@ def ingest(
|
|
|
244
244
|
envvar="PIPELINES_DIR",
|
|
245
245
|
),
|
|
246
246
|
] = None, # type: ignore
|
|
247
|
+
extract_parallelism: Annotated[
|
|
248
|
+
Optional[int],
|
|
249
|
+
typer.Option(
|
|
250
|
+
help="The number of parallel jobs to run for extracting data from the source, only applicable for certain sources",
|
|
251
|
+
envvar="EXTRACT_PARALLELISM",
|
|
252
|
+
),
|
|
253
|
+
] = 5, # type: ignore
|
|
247
254
|
):
|
|
248
255
|
track(
|
|
249
256
|
"command_triggered",
|
|
@@ -252,7 +259,10 @@ def ingest(
|
|
|
252
259
|
},
|
|
253
260
|
)
|
|
254
261
|
|
|
262
|
+
dlt.config["data_writer.buffer_max_items"] = page_size
|
|
255
263
|
dlt.config["data_writer.file_max_items"] = loader_file_size
|
|
264
|
+
dlt.config["extract.workers"] = extract_parallelism
|
|
265
|
+
dlt.config["extract.max_parallel_items"] = extract_parallelism
|
|
256
266
|
if schema_naming != SchemaNaming.default:
|
|
257
267
|
dlt.config["schema.naming"] = schema_naming.value
|
|
258
268
|
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
from typing import Sequence
|
|
2
|
+
|
|
3
|
+
import dlt
|
|
4
|
+
from dlt.sources import DltResource
|
|
5
|
+
|
|
6
|
+
from .helpers import DEFAULT_DIMENSIONS, AdjustAPI
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@dlt.source(max_table_nesting=0)
|
|
10
|
+
def adjust_source(
|
|
11
|
+
start_date: str,
|
|
12
|
+
end_date: str,
|
|
13
|
+
api_key: str,
|
|
14
|
+
) -> Sequence[DltResource]:
|
|
15
|
+
@dlt.resource(write_disposition="merge", merge_key="day")
|
|
16
|
+
def campaigns():
|
|
17
|
+
adjust_api = AdjustAPI(api_key=api_key)
|
|
18
|
+
yield from adjust_api.fetch_report_data(
|
|
19
|
+
start_date=start_date,
|
|
20
|
+
end_date=end_date,
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
@dlt.resource(write_disposition="merge", merge_key="day")
|
|
24
|
+
def creatives():
|
|
25
|
+
dimensions = DEFAULT_DIMENSIONS + ["adgroup", "creative"]
|
|
26
|
+
adjust_api = AdjustAPI(api_key=api_key)
|
|
27
|
+
yield from adjust_api.fetch_report_data(
|
|
28
|
+
start_date=start_date, end_date=end_date, dimensions=dimensions
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
return campaigns, creatives
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
import requests
|
|
2
|
+
from dlt.sources.helpers.requests import Client
|
|
3
|
+
from requests.exceptions import HTTPError
|
|
4
|
+
|
|
5
|
+
DEFAULT_DIMENSIONS = ["campaign", "day", "app", "store_type", "channel", "country"]
|
|
6
|
+
|
|
7
|
+
DEFAULT_METRICS = [
|
|
8
|
+
"network_cost",
|
|
9
|
+
"all_revenue_total_d0",
|
|
10
|
+
"ad_revenue_total_d0",
|
|
11
|
+
"revenue_total_d0",
|
|
12
|
+
"all_revenue_total_d1",
|
|
13
|
+
"ad_revenue_total_d1",
|
|
14
|
+
"revenue_total_d1",
|
|
15
|
+
"all_revenue_total_d3",
|
|
16
|
+
"ad_revenue_total_d3",
|
|
17
|
+
"revenue_total_d3",
|
|
18
|
+
"all_revenue_total_d7",
|
|
19
|
+
"ad_revenue_total_d7",
|
|
20
|
+
"revenue_total_d7",
|
|
21
|
+
"all_revenue_total_d14",
|
|
22
|
+
"ad_revenue_total_d14",
|
|
23
|
+
"revenue_total_d14",
|
|
24
|
+
"all_revenue_total_d21",
|
|
25
|
+
]
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class AdjustAPI:
|
|
29
|
+
def __init__(self, api_key):
|
|
30
|
+
self.api_key = api_key
|
|
31
|
+
self.uri = "https://automate.adjust.com/reports-service/report"
|
|
32
|
+
|
|
33
|
+
def fetch_report_data(
|
|
34
|
+
self,
|
|
35
|
+
start_date,
|
|
36
|
+
end_date,
|
|
37
|
+
dimensions=DEFAULT_DIMENSIONS,
|
|
38
|
+
metrics=DEFAULT_METRICS,
|
|
39
|
+
utc_offset="+00:00",
|
|
40
|
+
ad_spend_mode="network",
|
|
41
|
+
attribution_source="first",
|
|
42
|
+
attribution_type="all",
|
|
43
|
+
cohort_maturity="immature",
|
|
44
|
+
reattributed="all",
|
|
45
|
+
sandbox="false",
|
|
46
|
+
):
|
|
47
|
+
headers = {"Authorization": f"Bearer {self.api_key}"}
|
|
48
|
+
comma_separated_dimensions = ",".join(dimensions)
|
|
49
|
+
comma_separated_metrics = ",".join(metrics)
|
|
50
|
+
params = {
|
|
51
|
+
"date_period": f"{start_date}:{end_date}",
|
|
52
|
+
"dimensions": comma_separated_dimensions,
|
|
53
|
+
"metrics": comma_separated_metrics,
|
|
54
|
+
"utc_offset": utc_offset,
|
|
55
|
+
"ad_spend_mode": ad_spend_mode,
|
|
56
|
+
"attribution_source": attribution_source,
|
|
57
|
+
"attribution_type": attribution_type,
|
|
58
|
+
"cohort_maturity": cohort_maturity,
|
|
59
|
+
"reattributed": reattributed,
|
|
60
|
+
"sandbox": sandbox,
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
def retry_on_limit(
|
|
64
|
+
response: requests.Response, exception: BaseException
|
|
65
|
+
) -> bool:
|
|
66
|
+
return response.status_code == 429
|
|
67
|
+
|
|
68
|
+
request_client = Client(
|
|
69
|
+
request_timeout=8.0,
|
|
70
|
+
raise_for_status=False,
|
|
71
|
+
retry_condition=retry_on_limit,
|
|
72
|
+
request_max_attempts=12,
|
|
73
|
+
request_backoff_factor=2,
|
|
74
|
+
).session
|
|
75
|
+
|
|
76
|
+
response = request_client.get(self.uri, headers=headers, params=params)
|
|
77
|
+
if response.status_code == 200:
|
|
78
|
+
result = response.json()
|
|
79
|
+
items = result.get("rows", [])
|
|
80
|
+
yield items
|
|
81
|
+
else:
|
|
82
|
+
raise HTTPError(f"Request failed with status code: {response.status_code}")
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
from typing import Iterable
|
|
2
|
+
|
|
3
|
+
import dlt
|
|
4
|
+
from dlt.common.typing import TDataItem
|
|
5
|
+
from dlt.sources import DltResource
|
|
6
|
+
|
|
7
|
+
from ingestr.src.appsflyer.client import AppsflyerClient
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dlt.source(max_table_nesting=0)
|
|
11
|
+
def appsflyer_source(
|
|
12
|
+
api_key: str, start_date: str, end_date: str
|
|
13
|
+
) -> Iterable[DltResource]:
|
|
14
|
+
client = AppsflyerClient(api_key)
|
|
15
|
+
|
|
16
|
+
@dlt.resource(write_disposition="merge", merge_key="install_time")
|
|
17
|
+
def campaigns() -> Iterable[TDataItem]:
|
|
18
|
+
yield from client.fetch_campaigns(start_date, end_date)
|
|
19
|
+
|
|
20
|
+
@dlt.resource(write_disposition="merge", merge_key="install_time")
|
|
21
|
+
def creatives() -> Iterable[TDataItem]:
|
|
22
|
+
yield from client.fetch_creatives(start_date, end_date)
|
|
23
|
+
|
|
24
|
+
return campaigns, creatives
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
|
|
3
|
+
import requests
|
|
4
|
+
from dlt.sources.helpers.requests import Client
|
|
5
|
+
from requests.exceptions import HTTPError
|
|
6
|
+
|
|
7
|
+
DEFAULT_GROUPING = ["c", "geo", "app_id", "install_time"]
|
|
8
|
+
DEFAULT_KPIS = [
|
|
9
|
+
"impressions",
|
|
10
|
+
"clicks",
|
|
11
|
+
"installs",
|
|
12
|
+
"cost",
|
|
13
|
+
"revenue",
|
|
14
|
+
"average_ecpi",
|
|
15
|
+
"loyal_users",
|
|
16
|
+
"uninstalls",
|
|
17
|
+
"roi",
|
|
18
|
+
]
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class AppsflyerClient:
|
|
22
|
+
def __init__(self, api_key: str):
|
|
23
|
+
self.api_key = api_key
|
|
24
|
+
self.uri = "https://hq1.appsflyer.com/api/master-agg-data/v4/app/all"
|
|
25
|
+
|
|
26
|
+
def __get_headers(self):
|
|
27
|
+
return {
|
|
28
|
+
"Authorization": f"{self.api_key}",
|
|
29
|
+
"accept": "text/json",
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
def _fetch_data(
|
|
33
|
+
self,
|
|
34
|
+
from_date: str,
|
|
35
|
+
to_date: str,
|
|
36
|
+
maximum_rows=1000000,
|
|
37
|
+
dimensions=DEFAULT_GROUPING,
|
|
38
|
+
metrics=DEFAULT_KPIS,
|
|
39
|
+
):
|
|
40
|
+
params = {
|
|
41
|
+
"from": from_date,
|
|
42
|
+
"to": to_date,
|
|
43
|
+
"groupings": ",".join(dimensions),
|
|
44
|
+
"kpis": ",".join(metrics),
|
|
45
|
+
"format": "json",
|
|
46
|
+
"maximum_rows": maximum_rows,
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
def retry_on_limit(
|
|
50
|
+
response: Optional[requests.Response], exception: Optional[BaseException]
|
|
51
|
+
) -> bool:
|
|
52
|
+
return (
|
|
53
|
+
isinstance(response, requests.Response) and response.status_code == 429
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
request_client = Client(
|
|
57
|
+
request_timeout=10.0,
|
|
58
|
+
raise_for_status=False,
|
|
59
|
+
retry_condition=retry_on_limit,
|
|
60
|
+
request_max_attempts=12,
|
|
61
|
+
request_backoff_factor=2,
|
|
62
|
+
).session
|
|
63
|
+
|
|
64
|
+
try:
|
|
65
|
+
response = request_client.get(
|
|
66
|
+
url=self.uri, headers=self.__get_headers(), params=params
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
if response.status_code == 200:
|
|
70
|
+
result = response.json()
|
|
71
|
+
yield result
|
|
72
|
+
else:
|
|
73
|
+
raise HTTPError(
|
|
74
|
+
f"Request failed with status code: {response.status_code}"
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
except requests.RequestException as e:
|
|
78
|
+
raise HTTPError(f"Request failed: {e}")
|
|
79
|
+
|
|
80
|
+
def fetch_campaigns(
|
|
81
|
+
self,
|
|
82
|
+
start_date: str,
|
|
83
|
+
end_date: str,
|
|
84
|
+
):
|
|
85
|
+
metrics = DEFAULT_KPIS + [
|
|
86
|
+
"cohort_day_1_revenue_per_user",
|
|
87
|
+
"cohort_day_1_total_revenue_per_user",
|
|
88
|
+
"cohort_day_3_revenue_per_user",
|
|
89
|
+
"cohort_day_3_total_revenue_per_user",
|
|
90
|
+
"cohort_day_7_total_revenue_per_user",
|
|
91
|
+
"cohort_day_7_revenue_per_user",
|
|
92
|
+
"cohort_day_14_total_revenue_per_user",
|
|
93
|
+
"cohort_day_14_revenue_per_user",
|
|
94
|
+
"cohort_day_21_total_revenue_per_user",
|
|
95
|
+
"cohort_day_21_revenue_per_user",
|
|
96
|
+
"retention_day_7",
|
|
97
|
+
]
|
|
98
|
+
return self._fetch_data(start_date, end_date, metrics=metrics)
|
|
99
|
+
|
|
100
|
+
def fetch_creatives(
|
|
101
|
+
self,
|
|
102
|
+
start_date: str,
|
|
103
|
+
end_date: str,
|
|
104
|
+
):
|
|
105
|
+
dimensions = DEFAULT_GROUPING + ["af_adset_id", "af_adset", "af_ad_id"]
|
|
106
|
+
return self._fetch_data(start_date, end_date, dimensions=dimensions)
|