ingestr 0.13.13__py3-none-any.whl → 0.14.104__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ingestr/conftest.py +72 -0
- ingestr/main.py +134 -87
- ingestr/src/adjust/__init__.py +4 -4
- ingestr/src/adjust/adjust_helpers.py +7 -3
- ingestr/src/airtable/__init__.py +3 -2
- ingestr/src/allium/__init__.py +128 -0
- ingestr/src/anthropic/__init__.py +277 -0
- ingestr/src/anthropic/helpers.py +525 -0
- ingestr/src/applovin_max/__init__.py +6 -4
- ingestr/src/appsflyer/__init__.py +325 -0
- ingestr/src/appsflyer/client.py +49 -45
- ingestr/src/appstore/__init__.py +1 -0
- ingestr/src/arrow/__init__.py +9 -1
- ingestr/src/asana_source/__init__.py +1 -1
- ingestr/src/attio/__init__.py +102 -0
- ingestr/src/attio/helpers.py +65 -0
- ingestr/src/blob.py +37 -10
- ingestr/src/buildinfo.py +1 -1
- ingestr/src/chess/__init__.py +1 -1
- ingestr/src/clickup/__init__.py +85 -0
- ingestr/src/clickup/helpers.py +47 -0
- ingestr/src/collector/spinner.py +43 -0
- ingestr/src/couchbase_source/__init__.py +118 -0
- ingestr/src/couchbase_source/helpers.py +135 -0
- ingestr/src/cursor/__init__.py +83 -0
- ingestr/src/cursor/helpers.py +188 -0
- ingestr/src/destinations.py +508 -27
- ingestr/src/docebo/__init__.py +589 -0
- ingestr/src/docebo/client.py +435 -0
- ingestr/src/docebo/helpers.py +97 -0
- ingestr/src/elasticsearch/__init__.py +80 -0
- ingestr/src/elasticsearch/helpers.py +138 -0
- ingestr/src/errors.py +8 -0
- ingestr/src/facebook_ads/__init__.py +47 -28
- ingestr/src/facebook_ads/helpers.py +59 -37
- ingestr/src/facebook_ads/settings.py +2 -0
- ingestr/src/facebook_ads/utils.py +39 -0
- ingestr/src/factory.py +107 -2
- ingestr/src/filesystem/__init__.py +8 -3
- ingestr/src/filters.py +46 -3
- ingestr/src/fluxx/__init__.py +9906 -0
- ingestr/src/fluxx/helpers.py +209 -0
- ingestr/src/frankfurter/__init__.py +157 -0
- ingestr/src/frankfurter/helpers.py +48 -0
- ingestr/src/freshdesk/__init__.py +89 -0
- ingestr/src/freshdesk/freshdesk_client.py +137 -0
- ingestr/src/freshdesk/settings.py +9 -0
- ingestr/src/fundraiseup/__init__.py +95 -0
- ingestr/src/fundraiseup/client.py +81 -0
- ingestr/src/github/__init__.py +41 -6
- ingestr/src/github/helpers.py +5 -5
- ingestr/src/google_analytics/__init__.py +22 -4
- ingestr/src/google_analytics/helpers.py +124 -6
- ingestr/src/google_sheets/__init__.py +4 -4
- ingestr/src/google_sheets/helpers/data_processing.py +2 -2
- ingestr/src/hostaway/__init__.py +302 -0
- ingestr/src/hostaway/client.py +288 -0
- ingestr/src/http/__init__.py +35 -0
- ingestr/src/http/readers.py +114 -0
- ingestr/src/http_client.py +24 -0
- ingestr/src/hubspot/__init__.py +66 -23
- ingestr/src/hubspot/helpers.py +52 -22
- ingestr/src/hubspot/settings.py +14 -7
- ingestr/src/influxdb/__init__.py +46 -0
- ingestr/src/influxdb/client.py +34 -0
- ingestr/src/intercom/__init__.py +142 -0
- ingestr/src/intercom/helpers.py +674 -0
- ingestr/src/intercom/settings.py +279 -0
- ingestr/src/isoc_pulse/__init__.py +159 -0
- ingestr/src/jira_source/__init__.py +340 -0
- ingestr/src/jira_source/helpers.py +439 -0
- ingestr/src/jira_source/settings.py +170 -0
- ingestr/src/kafka/__init__.py +4 -1
- ingestr/src/kinesis/__init__.py +139 -0
- ingestr/src/kinesis/helpers.py +82 -0
- ingestr/src/klaviyo/{_init_.py → __init__.py} +5 -6
- ingestr/src/linear/__init__.py +634 -0
- ingestr/src/linear/helpers.py +111 -0
- ingestr/src/linkedin_ads/helpers.py +0 -1
- ingestr/src/mailchimp/__init__.py +126 -0
- ingestr/src/mailchimp/helpers.py +226 -0
- ingestr/src/mailchimp/settings.py +164 -0
- ingestr/src/masking.py +344 -0
- ingestr/src/mixpanel/__init__.py +62 -0
- ingestr/src/mixpanel/client.py +99 -0
- ingestr/src/monday/__init__.py +246 -0
- ingestr/src/monday/helpers.py +392 -0
- ingestr/src/monday/settings.py +328 -0
- ingestr/src/mongodb/__init__.py +72 -8
- ingestr/src/mongodb/helpers.py +915 -38
- ingestr/src/partition.py +32 -0
- ingestr/src/phantombuster/__init__.py +65 -0
- ingestr/src/phantombuster/client.py +87 -0
- ingestr/src/pinterest/__init__.py +82 -0
- ingestr/src/pipedrive/__init__.py +198 -0
- ingestr/src/pipedrive/helpers/__init__.py +23 -0
- ingestr/src/pipedrive/helpers/custom_fields_munger.py +102 -0
- ingestr/src/pipedrive/helpers/pages.py +115 -0
- ingestr/src/pipedrive/settings.py +27 -0
- ingestr/src/pipedrive/typing.py +3 -0
- ingestr/src/plusvibeai/__init__.py +335 -0
- ingestr/src/plusvibeai/helpers.py +544 -0
- ingestr/src/plusvibeai/settings.py +252 -0
- ingestr/src/quickbooks/__init__.py +117 -0
- ingestr/src/resource.py +40 -0
- ingestr/src/revenuecat/__init__.py +83 -0
- ingestr/src/revenuecat/helpers.py +237 -0
- ingestr/src/salesforce/__init__.py +15 -8
- ingestr/src/shopify/__init__.py +1 -17
- ingestr/src/smartsheets/__init__.py +82 -0
- ingestr/src/snapchat_ads/__init__.py +489 -0
- ingestr/src/snapchat_ads/client.py +72 -0
- ingestr/src/snapchat_ads/helpers.py +535 -0
- ingestr/src/socrata_source/__init__.py +83 -0
- ingestr/src/socrata_source/helpers.py +85 -0
- ingestr/src/socrata_source/settings.py +8 -0
- ingestr/src/solidgate/__init__.py +219 -0
- ingestr/src/solidgate/helpers.py +154 -0
- ingestr/src/sources.py +2933 -245
- ingestr/src/stripe_analytics/__init__.py +49 -21
- ingestr/src/stripe_analytics/helpers.py +286 -1
- ingestr/src/stripe_analytics/settings.py +62 -10
- ingestr/src/telemetry/event.py +10 -9
- ingestr/src/tiktok_ads/__init__.py +12 -6
- ingestr/src/tiktok_ads/tiktok_helpers.py +0 -1
- ingestr/src/trustpilot/__init__.py +48 -0
- ingestr/src/trustpilot/client.py +48 -0
- ingestr/src/wise/__init__.py +68 -0
- ingestr/src/wise/client.py +63 -0
- ingestr/src/zoom/__init__.py +99 -0
- ingestr/src/zoom/helpers.py +102 -0
- ingestr/tests/unit/test_smartsheets.py +133 -0
- {ingestr-0.13.13.dist-info → ingestr-0.14.104.dist-info}/METADATA +229 -19
- ingestr-0.14.104.dist-info/RECORD +203 -0
- ingestr/src/appsflyer/_init_.py +0 -24
- ingestr-0.13.13.dist-info/RECORD +0 -115
- {ingestr-0.13.13.dist-info → ingestr-0.14.104.dist-info}/WHEEL +0 -0
- {ingestr-0.13.13.dist-info → ingestr-0.14.104.dist-info}/entry_points.txt +0 -0
- {ingestr-0.13.13.dist-info → ingestr-0.14.104.dist-info}/licenses/LICENSE.md +0 -0
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
from itertools import chain
|
|
2
|
+
from typing import (
|
|
3
|
+
Any,
|
|
4
|
+
Dict,
|
|
5
|
+
Iterable,
|
|
6
|
+
Iterator,
|
|
7
|
+
List,
|
|
8
|
+
TypeVar,
|
|
9
|
+
Union,
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
import dlt
|
|
13
|
+
from dlt.sources.helpers import requests
|
|
14
|
+
|
|
15
|
+
from ..typing import TDataPage
|
|
16
|
+
from .custom_fields_munger import rename_fields
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def get_pages(
|
|
20
|
+
entity: str, pipedrive_api_key: str, extra_params: Dict[str, Any] = None
|
|
21
|
+
) -> Iterator[List[Dict[str, Any]]]:
|
|
22
|
+
"""
|
|
23
|
+
Generic method to retrieve endpoint data based on the required headers and params.
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
entity: the endpoint you want to call
|
|
27
|
+
pipedrive_api_key:
|
|
28
|
+
extra_params: any needed request params except pagination.
|
|
29
|
+
|
|
30
|
+
Returns:
|
|
31
|
+
|
|
32
|
+
"""
|
|
33
|
+
headers = {"Content-Type": "application/json"}
|
|
34
|
+
params = {"api_token": pipedrive_api_key}
|
|
35
|
+
if extra_params:
|
|
36
|
+
params.update(extra_params)
|
|
37
|
+
url = f"https://app.pipedrive.com/v1/{entity}"
|
|
38
|
+
yield from _paginated_get(url, headers=headers, params=params)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def get_recent_items_incremental(
|
|
42
|
+
entity: str,
|
|
43
|
+
pipedrive_api_key: str,
|
|
44
|
+
since_timestamp: dlt.sources.incremental[str] = dlt.sources.incremental(
|
|
45
|
+
"update_time|modified", "1970-01-01 00:00:00"
|
|
46
|
+
),
|
|
47
|
+
) -> Iterator[TDataPage]:
|
|
48
|
+
"""Get a specific entity type from /recents with incremental state."""
|
|
49
|
+
yield from _get_recent_pages(entity, pipedrive_api_key, since_timestamp.last_value)
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def _paginated_get(
|
|
53
|
+
url: str, headers: Dict[str, Any], params: Dict[str, Any]
|
|
54
|
+
) -> Iterator[List[Dict[str, Any]]]:
|
|
55
|
+
"""
|
|
56
|
+
Requests and yields data 500 records at a time
|
|
57
|
+
Documentation: https://pipedrive.readme.io/docs/core-api-concepts-pagination
|
|
58
|
+
"""
|
|
59
|
+
# pagination start and page limit
|
|
60
|
+
params["start"] = 0
|
|
61
|
+
params["limit"] = 500
|
|
62
|
+
while True:
|
|
63
|
+
page = requests.get(url, headers=headers, params=params).json()
|
|
64
|
+
# yield data only
|
|
65
|
+
data = page["data"]
|
|
66
|
+
if data:
|
|
67
|
+
yield data
|
|
68
|
+
# check if next page exists
|
|
69
|
+
pagination_info = page.get("additional_data", {}).get("pagination", {})
|
|
70
|
+
# is_next_page is set to True or False
|
|
71
|
+
if not pagination_info.get("more_items_in_collection", False):
|
|
72
|
+
break
|
|
73
|
+
params["start"] = pagination_info.get("next_start")
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
T = TypeVar("T")
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def _extract_recents_data(data: Iterable[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
|
80
|
+
"""Results from recents endpoint contain `data` key which is either a single entity or list of entities
|
|
81
|
+
|
|
82
|
+
This returns a flat list of entities from an iterable of recent results
|
|
83
|
+
"""
|
|
84
|
+
return [
|
|
85
|
+
data_item
|
|
86
|
+
for data_item in chain.from_iterable(
|
|
87
|
+
(_list_wrapped(item["data"]) for item in data)
|
|
88
|
+
)
|
|
89
|
+
if data_item is not None
|
|
90
|
+
]
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def _list_wrapped(item: Union[List[T], T]) -> List[T]:
|
|
94
|
+
if isinstance(item, list):
|
|
95
|
+
return item
|
|
96
|
+
return [item]
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def _get_recent_pages(
|
|
100
|
+
entity: str, pipedrive_api_key: str, since_timestamp: str
|
|
101
|
+
) -> Iterator[TDataPage]:
|
|
102
|
+
custom_fields_mapping = (
|
|
103
|
+
dlt.current.source_state().get("custom_fields_mapping", {}).get(entity, {})
|
|
104
|
+
)
|
|
105
|
+
pages = get_pages(
|
|
106
|
+
"recents",
|
|
107
|
+
pipedrive_api_key,
|
|
108
|
+
extra_params=dict(since_timestamp=since_timestamp, items=entity),
|
|
109
|
+
)
|
|
110
|
+
pages = (_extract_recents_data(page) for page in pages)
|
|
111
|
+
for page in pages:
|
|
112
|
+
yield rename_fields(page, custom_fields_mapping)
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
__source_name__ = "pipedrive"
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
"""Pipedrive source settings and constants"""
|
|
2
|
+
|
|
3
|
+
ENTITY_MAPPINGS = [
|
|
4
|
+
("activity", "activityFields", {"user_id": 0}),
|
|
5
|
+
("organization", "organizationFields", None),
|
|
6
|
+
("person", "personFields", None),
|
|
7
|
+
("product", "productFields", None),
|
|
8
|
+
("deal", "dealFields", None),
|
|
9
|
+
("pipeline", None, None),
|
|
10
|
+
("stage", None, None),
|
|
11
|
+
("user", None, None),
|
|
12
|
+
]
|
|
13
|
+
|
|
14
|
+
RECENTS_ENTITIES = {
|
|
15
|
+
"activity": "activities",
|
|
16
|
+
"activityType": "activity_types",
|
|
17
|
+
"deal": "deals",
|
|
18
|
+
"file": "files",
|
|
19
|
+
"filter": "filters",
|
|
20
|
+
"note": "notes",
|
|
21
|
+
"person": "persons",
|
|
22
|
+
"organization": "organizations",
|
|
23
|
+
"pipeline": "pipelines",
|
|
24
|
+
"product": "products",
|
|
25
|
+
"stage": "stages",
|
|
26
|
+
"user": "users",
|
|
27
|
+
}
|
|
@@ -0,0 +1,335 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This source provides data extraction from PlusVibeAI via the REST API.
|
|
3
|
+
|
|
4
|
+
It defines functions to fetch data from different parts of PlusVibeAI including
|
|
5
|
+
campaigns and other marketing analytics data.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from typing import Any, Iterable, Optional
|
|
9
|
+
|
|
10
|
+
import dlt
|
|
11
|
+
from dlt.common.typing import TDataItem
|
|
12
|
+
|
|
13
|
+
from .helpers import get_client
|
|
14
|
+
from .settings import DEFAULT_PAGE_SIZE, DEFAULT_START_DATE
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@dlt.source
|
|
18
|
+
def plusvibeai_source() -> Any:
|
|
19
|
+
"""
|
|
20
|
+
The main function that runs all the other functions to fetch data from PlusVibeAI.
|
|
21
|
+
|
|
22
|
+
Returns:
|
|
23
|
+
Sequence[DltResource]: A sequence of DltResource objects containing the fetched data.
|
|
24
|
+
"""
|
|
25
|
+
return [
|
|
26
|
+
campaigns,
|
|
27
|
+
leads,
|
|
28
|
+
email_accounts,
|
|
29
|
+
emails,
|
|
30
|
+
blocklist,
|
|
31
|
+
webhooks,
|
|
32
|
+
tags,
|
|
33
|
+
]
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@dlt.resource(
|
|
37
|
+
write_disposition="merge",
|
|
38
|
+
primary_key="id",
|
|
39
|
+
max_table_nesting=0, # Keep nested objects (schedule, sequences) as JSON columns
|
|
40
|
+
)
|
|
41
|
+
def campaigns(
|
|
42
|
+
api_key: str = dlt.secrets.value,
|
|
43
|
+
workspace_id: str = dlt.secrets.value,
|
|
44
|
+
base_url: str = "https://api.plusvibe.ai",
|
|
45
|
+
max_results: Optional[int] = None,
|
|
46
|
+
updated: dlt.sources.incremental[str] = dlt.sources.incremental(
|
|
47
|
+
"modified_at", # PlusVibeAI uses modified_at for updates
|
|
48
|
+
initial_value=DEFAULT_START_DATE,
|
|
49
|
+
range_end="closed",
|
|
50
|
+
range_start="closed",
|
|
51
|
+
),
|
|
52
|
+
) -> Iterable[TDataItem]:
|
|
53
|
+
"""
|
|
54
|
+
Fetches campaigns from PlusVibeAI.
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
api_key (str): API key for authentication (get from https://app.plusvibe.ai/v2/settings/api-access/)
|
|
58
|
+
workspace_id (str): Workspace ID to access
|
|
59
|
+
base_url (str): PlusVibeAI API base URL
|
|
60
|
+
max_results (int): Maximum number of results to return
|
|
61
|
+
updated (str): The date from which to fetch updated campaigns
|
|
62
|
+
|
|
63
|
+
Yields:
|
|
64
|
+
dict: The campaign data with nested objects (schedule, sequences, etc.) as JSON.
|
|
65
|
+
"""
|
|
66
|
+
client = get_client(api_key, workspace_id, base_url)
|
|
67
|
+
|
|
68
|
+
for campaign in client.get_campaigns(
|
|
69
|
+
page_size=DEFAULT_PAGE_SIZE, max_results=max_results
|
|
70
|
+
):
|
|
71
|
+
# Apply incremental filter if needed
|
|
72
|
+
if updated.start_value:
|
|
73
|
+
campaign_updated = campaign.get("modified_at")
|
|
74
|
+
if campaign_updated and campaign_updated < updated.start_value:
|
|
75
|
+
continue
|
|
76
|
+
|
|
77
|
+
yield campaign
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
@dlt.resource(
|
|
81
|
+
write_disposition="merge",
|
|
82
|
+
primary_key="_id",
|
|
83
|
+
max_table_nesting=0,
|
|
84
|
+
)
|
|
85
|
+
def leads(
|
|
86
|
+
api_key: str = dlt.secrets.value,
|
|
87
|
+
workspace_id: str = dlt.secrets.value,
|
|
88
|
+
base_url: str = "https://api.plusvibe.ai",
|
|
89
|
+
max_results: Optional[int] = None,
|
|
90
|
+
updated: dlt.sources.incremental[str] = dlt.sources.incremental(
|
|
91
|
+
"modified_at",
|
|
92
|
+
initial_value=DEFAULT_START_DATE,
|
|
93
|
+
range_end="closed",
|
|
94
|
+
range_start="closed",
|
|
95
|
+
),
|
|
96
|
+
) -> Iterable[TDataItem]:
|
|
97
|
+
"""
|
|
98
|
+
Fetches leads from PlusVibeAI.
|
|
99
|
+
|
|
100
|
+
Args:
|
|
101
|
+
api_key (str): API key for authentication
|
|
102
|
+
workspace_id (str): Workspace ID to access
|
|
103
|
+
base_url (str): PlusVibeAI API base URL
|
|
104
|
+
max_results (int): Maximum number of results to return
|
|
105
|
+
updated (str): The date from which to fetch updated leads
|
|
106
|
+
|
|
107
|
+
Yields:
|
|
108
|
+
dict: The lead data.
|
|
109
|
+
"""
|
|
110
|
+
client = get_client(api_key, workspace_id, base_url)
|
|
111
|
+
|
|
112
|
+
for lead in client.get_leads(page_size=DEFAULT_PAGE_SIZE, max_results=max_results):
|
|
113
|
+
# Apply incremental filter if needed
|
|
114
|
+
if updated.start_value:
|
|
115
|
+
lead_updated = lead.get("modified_at")
|
|
116
|
+
if lead_updated and lead_updated < updated.start_value:
|
|
117
|
+
continue
|
|
118
|
+
|
|
119
|
+
yield lead
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
@dlt.resource(
|
|
123
|
+
write_disposition="merge",
|
|
124
|
+
primary_key="_id",
|
|
125
|
+
max_table_nesting=0,
|
|
126
|
+
)
|
|
127
|
+
def email_accounts(
|
|
128
|
+
api_key: str = dlt.secrets.value,
|
|
129
|
+
workspace_id: str = dlt.secrets.value,
|
|
130
|
+
base_url: str = "https://api.plusvibe.ai",
|
|
131
|
+
max_results: Optional[int] = None,
|
|
132
|
+
updated: dlt.sources.incremental[str] = dlt.sources.incremental(
|
|
133
|
+
"timestamp_updated",
|
|
134
|
+
initial_value=DEFAULT_START_DATE,
|
|
135
|
+
range_end="closed",
|
|
136
|
+
range_start="closed",
|
|
137
|
+
),
|
|
138
|
+
) -> Iterable[TDataItem]:
|
|
139
|
+
"""
|
|
140
|
+
Fetches email accounts from PlusVibeAI.
|
|
141
|
+
|
|
142
|
+
Args:
|
|
143
|
+
api_key (str): API key for authentication
|
|
144
|
+
workspace_id (str): Workspace ID to access
|
|
145
|
+
base_url (str): PlusVibeAI API base URL
|
|
146
|
+
max_results (int): Maximum number of results to return
|
|
147
|
+
updated (str): The date from which to fetch updated email accounts
|
|
148
|
+
|
|
149
|
+
Yields:
|
|
150
|
+
dict: The email account data.
|
|
151
|
+
"""
|
|
152
|
+
client = get_client(api_key, workspace_id, base_url)
|
|
153
|
+
|
|
154
|
+
for account in client.get_email_accounts(
|
|
155
|
+
page_size=DEFAULT_PAGE_SIZE, max_results=max_results
|
|
156
|
+
):
|
|
157
|
+
# Apply incremental filter if needed
|
|
158
|
+
if updated.start_value:
|
|
159
|
+
account_updated = account.get("timestamp_updated")
|
|
160
|
+
if account_updated and account_updated < updated.start_value:
|
|
161
|
+
continue
|
|
162
|
+
|
|
163
|
+
yield account
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
@dlt.resource(
|
|
167
|
+
write_disposition="merge",
|
|
168
|
+
primary_key="id",
|
|
169
|
+
max_table_nesting=0,
|
|
170
|
+
)
|
|
171
|
+
def emails(
|
|
172
|
+
api_key: str = dlt.secrets.value,
|
|
173
|
+
workspace_id: str = dlt.secrets.value,
|
|
174
|
+
base_url: str = "https://api.plusvibe.ai",
|
|
175
|
+
max_results: Optional[int] = None,
|
|
176
|
+
updated: dlt.sources.incremental[str] = dlt.sources.incremental(
|
|
177
|
+
"timestamp_created",
|
|
178
|
+
initial_value=DEFAULT_START_DATE,
|
|
179
|
+
range_end="closed",
|
|
180
|
+
range_start="closed",
|
|
181
|
+
),
|
|
182
|
+
) -> Iterable[TDataItem]:
|
|
183
|
+
"""
|
|
184
|
+
Fetches emails from PlusVibeAI.
|
|
185
|
+
|
|
186
|
+
Args:
|
|
187
|
+
api_key (str): API key for authentication
|
|
188
|
+
workspace_id (str): Workspace ID to access
|
|
189
|
+
base_url (str): PlusVibeAI API base URL
|
|
190
|
+
max_results (int): Maximum number of results to return
|
|
191
|
+
updated (str): The date from which to fetch emails
|
|
192
|
+
|
|
193
|
+
Yields:
|
|
194
|
+
dict: The email data.
|
|
195
|
+
"""
|
|
196
|
+
client = get_client(api_key, workspace_id, base_url)
|
|
197
|
+
|
|
198
|
+
for email in client.get_emails(max_results=max_results):
|
|
199
|
+
# Apply incremental filter if needed
|
|
200
|
+
if updated.start_value:
|
|
201
|
+
email_created = email.get("timestamp_created")
|
|
202
|
+
if email_created and email_created < updated.start_value:
|
|
203
|
+
continue
|
|
204
|
+
|
|
205
|
+
yield email
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
@dlt.resource(
|
|
209
|
+
write_disposition="merge",
|
|
210
|
+
primary_key="_id",
|
|
211
|
+
max_table_nesting=0,
|
|
212
|
+
)
|
|
213
|
+
def blocklist(
|
|
214
|
+
api_key: str = dlt.secrets.value,
|
|
215
|
+
workspace_id: str = dlt.secrets.value,
|
|
216
|
+
base_url: str = "https://api.plusvibe.ai",
|
|
217
|
+
max_results: Optional[int] = None,
|
|
218
|
+
updated: dlt.sources.incremental[str] = dlt.sources.incremental(
|
|
219
|
+
"created_at",
|
|
220
|
+
initial_value=DEFAULT_START_DATE,
|
|
221
|
+
range_end="closed",
|
|
222
|
+
range_start="closed",
|
|
223
|
+
),
|
|
224
|
+
) -> Iterable[TDataItem]:
|
|
225
|
+
"""
|
|
226
|
+
Fetches blocklist entries from PlusVibeAI.
|
|
227
|
+
|
|
228
|
+
Args:
|
|
229
|
+
api_key (str): API key for authentication
|
|
230
|
+
workspace_id (str): Workspace ID to access
|
|
231
|
+
base_url (str): PlusVibeAI API base URL
|
|
232
|
+
max_results (int): Maximum number of results to return
|
|
233
|
+
updated (str): The date from which to fetch blocklist entries
|
|
234
|
+
|
|
235
|
+
Yields:
|
|
236
|
+
dict: The blocklist entry data.
|
|
237
|
+
"""
|
|
238
|
+
client = get_client(api_key, workspace_id, base_url)
|
|
239
|
+
|
|
240
|
+
for entry in client.get_blocklist(
|
|
241
|
+
page_size=DEFAULT_PAGE_SIZE, max_results=max_results
|
|
242
|
+
):
|
|
243
|
+
# Apply incremental filter if needed
|
|
244
|
+
if updated.start_value:
|
|
245
|
+
entry_created = entry.get("created_at")
|
|
246
|
+
if entry_created and entry_created < updated.start_value:
|
|
247
|
+
continue
|
|
248
|
+
|
|
249
|
+
yield entry
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
@dlt.resource(
|
|
253
|
+
write_disposition="merge",
|
|
254
|
+
primary_key="_id",
|
|
255
|
+
max_table_nesting=0,
|
|
256
|
+
)
|
|
257
|
+
def webhooks(
|
|
258
|
+
api_key: str = dlt.secrets.value,
|
|
259
|
+
workspace_id: str = dlt.secrets.value,
|
|
260
|
+
base_url: str = "https://api.plusvibe.ai",
|
|
261
|
+
max_results: Optional[int] = None,
|
|
262
|
+
updated: dlt.sources.incremental[str] = dlt.sources.incremental(
|
|
263
|
+
"modified_at",
|
|
264
|
+
initial_value=DEFAULT_START_DATE,
|
|
265
|
+
range_end="closed",
|
|
266
|
+
range_start="closed",
|
|
267
|
+
),
|
|
268
|
+
) -> Iterable[TDataItem]:
|
|
269
|
+
"""
|
|
270
|
+
Fetches webhooks from PlusVibeAI.
|
|
271
|
+
|
|
272
|
+
Args:
|
|
273
|
+
api_key (str): API key for authentication
|
|
274
|
+
workspace_id (str): Workspace ID to access
|
|
275
|
+
base_url (str): PlusVibeAI API base URL
|
|
276
|
+
max_results (int): Maximum number of results to return
|
|
277
|
+
updated (str): The date from which to fetch updated webhooks
|
|
278
|
+
|
|
279
|
+
Yields:
|
|
280
|
+
dict: The webhook data.
|
|
281
|
+
"""
|
|
282
|
+
client = get_client(api_key, workspace_id, base_url)
|
|
283
|
+
|
|
284
|
+
for webhook in client.get_webhooks(
|
|
285
|
+
page_size=DEFAULT_PAGE_SIZE, max_results=max_results
|
|
286
|
+
):
|
|
287
|
+
# Apply incremental filter if needed
|
|
288
|
+
if updated.start_value:
|
|
289
|
+
webhook_updated = webhook.get("modified_at")
|
|
290
|
+
if webhook_updated and webhook_updated < updated.start_value:
|
|
291
|
+
continue
|
|
292
|
+
|
|
293
|
+
yield webhook
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
@dlt.resource(
|
|
297
|
+
write_disposition="merge",
|
|
298
|
+
primary_key="_id",
|
|
299
|
+
max_table_nesting=0,
|
|
300
|
+
)
|
|
301
|
+
def tags(
|
|
302
|
+
api_key: str = dlt.secrets.value,
|
|
303
|
+
workspace_id: str = dlt.secrets.value,
|
|
304
|
+
base_url: str = "https://api.plusvibe.ai",
|
|
305
|
+
max_results: Optional[int] = None,
|
|
306
|
+
updated: dlt.sources.incremental[str] = dlt.sources.incremental(
|
|
307
|
+
"modified_at",
|
|
308
|
+
initial_value=DEFAULT_START_DATE,
|
|
309
|
+
range_end="closed",
|
|
310
|
+
range_start="closed",
|
|
311
|
+
),
|
|
312
|
+
) -> Iterable[TDataItem]:
|
|
313
|
+
"""
|
|
314
|
+
Fetches tags from PlusVibeAI.
|
|
315
|
+
|
|
316
|
+
Args:
|
|
317
|
+
api_key (str): API key for authentication
|
|
318
|
+
workspace_id (str): Workspace ID to access
|
|
319
|
+
base_url (str): PlusVibeAI API base URL
|
|
320
|
+
max_results (int): Maximum number of results to return
|
|
321
|
+
updated (str): The date from which to fetch updated tags
|
|
322
|
+
|
|
323
|
+
Yields:
|
|
324
|
+
dict: The tag data.
|
|
325
|
+
"""
|
|
326
|
+
client = get_client(api_key, workspace_id, base_url)
|
|
327
|
+
|
|
328
|
+
for tag in client.get_tags(page_size=DEFAULT_PAGE_SIZE, max_results=max_results):
|
|
329
|
+
# Apply incremental filter if needed
|
|
330
|
+
if updated.start_value:
|
|
331
|
+
tag_updated = tag.get("modified_at")
|
|
332
|
+
if tag_updated and tag_updated < updated.start_value:
|
|
333
|
+
continue
|
|
334
|
+
|
|
335
|
+
yield tag
|