ingestr 0.13.79__py3-none-any.whl → 0.13.80__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ingestr might be problematic. Click here for more details.

ingestr/src/buildinfo.py CHANGED
@@ -1 +1 @@
1
- version = "v0.13.79"
1
+ version = "v0.13.80"
@@ -117,6 +117,9 @@ def facebook_insights_source(
117
117
  app_api_version: str = None,
118
118
  start_date: pendulum.DateTime | None = None,
119
119
  end_date: pendulum.DateTime | None = None,
120
+ insights_max_wait_to_finish_seconds: int = 60 * 60 * 4,
121
+ insights_max_wait_to_start_seconds: int = 60 * 30,
122
+ insights_max_async_sleep_seconds: int = 20,
120
123
  ) -> DltResource:
121
124
  """Incrementally loads insight reports with defined granularity level, fields, breakdowns etc.
122
125
 
@@ -206,7 +209,9 @@ def facebook_insights_source(
206
209
  }
207
210
  job = execute_job(
208
211
  account.get_insights(params=query, is_async=True),
209
- insights_max_async_sleep_seconds=20,
212
+ insights_max_async_sleep_seconds=insights_max_async_sleep_seconds,
213
+ insights_max_wait_to_finish_seconds=insights_max_wait_to_finish_seconds,
214
+ insights_max_wait_to_start_seconds=insights_max_wait_to_start_seconds,
210
215
  )
211
216
  output = list(map(process_report_item, job.get_result()))
212
217
  yield output
@@ -144,7 +144,7 @@ def execute_job(
144
144
  raise InsightsJobTimeout(
145
145
  "facebook_insights",
146
146
  pretty_error_message.format(
147
- job_id, insights_max_wait_to_finish_seconds // 60
147
+ job_id, insights_max_wait_to_finish_seconds
148
148
  ),
149
149
  )
150
150
 
@@ -3,7 +3,22 @@ from typing import Any, Dict, Iterable, Iterator
3
3
  import dlt
4
4
  import pendulum
5
5
 
6
- from .helpers import _normalize_issue, _normalize_team, _paginate
6
+ from .helpers import _paginate, normalize_dictionaries
7
+
8
+
9
+ def _get_date_range(updated_at, start_date):
10
+ """Extract current start and end dates from incremental state."""
11
+ if updated_at.last_value:
12
+ current_start_date = pendulum.parse(updated_at.last_value)
13
+ else:
14
+ current_start_date = pendulum.parse(start_date)
15
+
16
+ if updated_at.end_value:
17
+ current_end_date = pendulum.parse(updated_at.end_value)
18
+ else:
19
+ current_end_date = pendulum.now(tz="UTC")
20
+
21
+ return current_start_date, current_end_date
7
22
 
8
23
  ISSUES_QUERY = """
9
24
  query Issues($cursor: String) {
@@ -84,7 +99,25 @@ query Users($cursor: String) {
84
99
  }
85
100
  }
86
101
  """
87
-
102
+ WORKFLOW_STATES_QUERY = """
103
+ query WorkflowStates($cursor: String) {
104
+ workflowStates(first: 50, after: $cursor) {
105
+ nodes {
106
+ archivedAt
107
+ color
108
+ createdAt
109
+ id
110
+ inheritedFrom { id }
111
+ name
112
+ position
113
+ team { id }
114
+ type
115
+ updatedAt
116
+ }
117
+ pageInfo { hasNextPage endCursor }
118
+ }
119
+ }
120
+ """
88
121
 
89
122
  @dlt.source(name="linear", max_table_nesting=0)
90
123
  def linear_source(
@@ -102,20 +135,12 @@ def linear_source(
102
135
  range_end="closed",
103
136
  ),
104
137
  ) -> Iterator[Dict[str, Any]]:
105
- if updated_at.last_value:
106
- current_start_date = pendulum.parse(updated_at.last_value)
107
- else:
108
- current_start_date = pendulum.parse(start_date)
109
-
110
- if updated_at.end_value:
111
- current_end_date = pendulum.parse(updated_at.end_value)
112
- else:
113
- current_end_date = pendulum.now(tz="UTC")
138
+ current_start_date, current_end_date = _get_date_range(updated_at, start_date)
114
139
 
115
140
  for item in _paginate(api_key, ISSUES_QUERY, "issues"):
116
141
  if pendulum.parse(item["updatedAt"]) >= current_start_date:
117
142
  if pendulum.parse(item["updatedAt"]) <= current_end_date:
118
- yield _normalize_issue(item)
143
+ yield normalize_dictionaries(item)
119
144
 
120
145
  @dlt.resource(name="projects", primary_key="id", write_disposition="merge")
121
146
  def projects(
@@ -127,20 +152,12 @@ def linear_source(
127
152
  range_end="closed",
128
153
  ),
129
154
  ) -> Iterator[Dict[str, Any]]:
130
- if updated_at.last_value:
131
- current_start_date = pendulum.parse(updated_at.last_value)
132
- else:
133
- current_start_date = pendulum.parse(start_date)
134
-
135
- if updated_at.end_value:
136
- current_end_date = pendulum.parse(updated_at.end_value)
137
- else:
138
- current_end_date = pendulum.now(tz="UTC")
155
+ current_start_date, current_end_date = _get_date_range(updated_at, start_date)
139
156
 
140
157
  for item in _paginate(api_key, PROJECTS_QUERY, "projects"):
141
158
  if pendulum.parse(item["updatedAt"]) >= current_start_date:
142
159
  if pendulum.parse(item["updatedAt"]) <= current_end_date:
143
- yield item
160
+ yield normalize_dictionaries(item)
144
161
 
145
162
  @dlt.resource(name="teams", primary_key="id", write_disposition="merge")
146
163
  def teams(
@@ -153,21 +170,13 @@ def linear_source(
153
170
  ),
154
171
  ) -> Iterator[Dict[str, Any]]:
155
172
  print(start_date)
156
- if updated_at.last_value:
157
- current_start_date = pendulum.parse(updated_at.last_value)
158
- else:
159
- current_start_date = pendulum.parse(start_date)
173
+ current_start_date, current_end_date = _get_date_range(updated_at, start_date)
160
174
  print(current_start_date)
161
175
 
162
- if updated_at.end_value:
163
- current_end_date = pendulum.parse(updated_at.end_value)
164
- else:
165
- current_end_date = pendulum.now(tz="UTC")
166
-
167
176
  for item in _paginate(api_key, TEAMS_QUERY, "teams"):
168
177
  if pendulum.parse(item["updatedAt"]) >= current_start_date:
169
178
  if pendulum.parse(item["updatedAt"]) <= current_end_date:
170
- yield _normalize_team(item)
179
+ yield normalize_dictionaries(item)
171
180
 
172
181
  @dlt.resource(name="users", primary_key="id", write_disposition="merge")
173
182
  def users(
@@ -179,19 +188,28 @@ def linear_source(
179
188
  range_end="closed",
180
189
  ),
181
190
  ) -> Iterator[Dict[str, Any]]:
182
- if updated_at.last_value:
183
- current_start_date = pendulum.parse(updated_at.last_value)
184
- else:
185
- current_start_date = pendulum.parse(start_date)
186
-
187
- if updated_at.end_value:
188
- current_end_date = pendulum.parse(updated_at.end_value)
189
- else:
190
- current_end_date = pendulum.now(tz="UTC")
191
+ current_start_date, current_end_date = _get_date_range(updated_at, start_date)
191
192
 
192
193
  for item in _paginate(api_key, USERS_QUERY, "users"):
193
194
  if pendulum.parse(item["updatedAt"]) >= current_start_date:
194
195
  if pendulum.parse(item["updatedAt"]) <= current_end_date:
195
- yield item
196
+ yield normalize_dictionaries(item)
197
+
198
+ @dlt.resource(name="workflow_states", primary_key="id", write_disposition="merge")
199
+ def workflow_states(
200
+ updated_at: dlt.sources.incremental[str] = dlt.sources.incremental(
201
+ "updatedAt",
202
+ initial_value=start_date.isoformat(),
203
+ end_value=end_date.isoformat() if end_date else None,
204
+ range_start="closed",
205
+ range_end="closed",
206
+ ),
207
+ ) -> Iterator[Dict[str, Any]]:
208
+ current_start_date, current_end_date = _get_date_range(updated_at, start_date)
209
+
210
+ for item in _paginate(api_key, WORKFLOW_STATES_QUERY, "workflowStates"):
211
+ if pendulum.parse(item["updatedAt"]) >= current_start_date:
212
+ if pendulum.parse(item["updatedAt"]) <= current_end_date:
213
+ yield normalize_dictionaries(item)
214
+ return [issues, projects, teams, users, workflow_states]
196
215
 
197
- return issues, projects, teams, users
@@ -32,41 +32,24 @@ def _paginate(api_key: str, query: str, root: str) -> Iterator[Dict[str, Any]]:
32
32
  cursor = data["pageInfo"]["endCursor"]
33
33
 
34
34
 
35
- def _normalize_issue(item: Dict[str, Any]) -> Dict[str, Any]:
36
- field_mapping = {
37
- "assignee": "assignee_id",
38
- "creator": "creator_id",
39
- "state": "state_id",
40
- "cycle": "cycle_id",
41
- "project": "project_id",
42
- }
43
- for key, value in field_mapping.items():
44
- if item.get(key):
45
- item[value] = item[key]["id"]
46
- del item[key]
47
- else:
48
- item[value] = None
49
- del item[key]
50
- json_fields = [
51
- "comments",
52
- "subscribers",
53
- "attachments",
54
- "labels",
55
- "subtasks",
56
- "projects",
57
- "memberships",
58
- "members",
59
- ]
60
- for field in json_fields:
61
- if item.get(field):
62
- item[f"{field}"] = item[field].get("nodes", [])
63
35
 
64
- return item
65
36
 
66
-
67
- def _normalize_team(item: Dict[str, Any]) -> Dict[str, Any]:
68
- json_fields = ["memberships", "members", "projects"]
69
- for field in json_fields:
70
- if item.get(field):
71
- item[f"{field}"] = item[field].get("nodes", [])
72
- return item
37
+ def normalize_dictionaries(item: Dict[str, Any]) -> Dict[str, Any]:
38
+ """
39
+ Automatically normalize dictionary fields by detecting their structure:
40
+ - Convert nested objects with 'id' field to {field_name}_id
41
+ - Convert objects with 'nodes' field to arrays
42
+ """
43
+ normalized_item = item.copy()
44
+
45
+ for key, value in list(normalized_item.items()):
46
+ if isinstance(value, dict):
47
+ # If the dict has an 'id' field, replace with {key}_id
48
+ if 'id' in value:
49
+ normalized_item[f"{key}_id"] = value['id']
50
+ del normalized_item[key]
51
+ # If the dict has 'nodes' field, extract the nodes array
52
+ elif 'nodes' in value:
53
+ normalized_item[key] = value['nodes']
54
+
55
+ return normalized_item
ingestr/src/sources.py CHANGED
@@ -702,6 +702,11 @@ class ShopifySource:
702
702
  return True
703
703
 
704
704
  def dlt_source(self, uri: str, table: str, **kwargs):
705
+ if kwargs.get("incremental_key"):
706
+ raise ValueError(
707
+ "Shopify takes care of incrementality on its own, you should not provide incremental_key"
708
+ )
709
+
705
710
  source_fields = urlparse(uri)
706
711
  source_params = parse_qs(source_fields.query)
707
712
  api_key = source_params.get("api_key")
@@ -1003,6 +1008,16 @@ class FacebookAdsSource:
1003
1008
  facebook_insights_source,
1004
1009
  )
1005
1010
 
1011
+ insights_max_wait_to_finish_seconds = source_params.get(
1012
+ "insights_max_wait_to_finish_seconds", [60 * 60 * 4]
1013
+ )
1014
+ insights_max_wait_to_start_seconds = source_params.get(
1015
+ "insights_max_wait_to_start_seconds", [60 * 30]
1016
+ )
1017
+ insights_max_async_sleep_seconds = source_params.get(
1018
+ "insights_max_async_sleep_seconds", [20]
1019
+ )
1020
+
1006
1021
  endpoint = None
1007
1022
  if table in ["campaigns", "ad_sets", "ad_creatives", "ads", "leads"]:
1008
1023
  endpoint = table
@@ -1012,6 +1027,13 @@ class FacebookAdsSource:
1012
1027
  account_id=account_id[0],
1013
1028
  start_date=kwargs.get("interval_start"),
1014
1029
  end_date=kwargs.get("interval_end"),
1030
+ insights_max_wait_to_finish_seconds=insights_max_wait_to_finish_seconds[
1031
+ 0
1032
+ ],
1033
+ insights_max_wait_to_start_seconds=insights_max_wait_to_start_seconds[
1034
+ 0
1035
+ ],
1036
+ insights_max_async_sleep_seconds=insights_max_async_sleep_seconds[0],
1015
1037
  ).with_resources("facebook_insights")
1016
1038
  elif table.startswith("facebook_insights:"):
1017
1039
  # Parse custom breakdowns and metrics from table name
@@ -1652,6 +1674,11 @@ class TikTokSource:
1652
1674
  return True
1653
1675
 
1654
1676
  def dlt_source(self, uri: str, table: str, **kwargs):
1677
+ if kwargs.get("incremental_key"):
1678
+ raise ValueError(
1679
+ "TikTok takes care of incrementality on its own, you should not provide incremental_key"
1680
+ )
1681
+
1655
1682
  endpoint = "custom_reports"
1656
1683
 
1657
1684
  parsed_uri = urlparse(uri)
@@ -1874,6 +1901,11 @@ class GoogleAnalyticsSource:
1874
1901
  def dlt_source(self, uri: str, table: str, **kwargs):
1875
1902
  import ingestr.src.google_analytics.helpers as helpers
1876
1903
 
1904
+ if kwargs.get("incremental_key"):
1905
+ raise ValueError(
1906
+ "Google Analytics takes care of incrementality on its own, you should not provide incremental_key"
1907
+ )
1908
+
1877
1909
  result = helpers.parse_google_analytics_uri(uri)
1878
1910
  credentials = result["credentials"]
1879
1911
  property_id = result["property_id"]
@@ -2246,6 +2278,11 @@ class LinkedInAdsSource:
2246
2278
  return True
2247
2279
 
2248
2280
  def dlt_source(self, uri: str, table: str, **kwargs):
2281
+ if kwargs.get("incremental_key"):
2282
+ raise ValueError(
2283
+ "LinkedIn Ads takes care of incrementality on its own, you should not provide incremental_key"
2284
+ )
2285
+
2249
2286
  parsed_uri = urlparse(uri)
2250
2287
  source_fields = parse_qs(parsed_uri.query)
2251
2288
 
@@ -2329,6 +2366,11 @@ class ClickupSource:
2329
2366
  return True
2330
2367
 
2331
2368
  def dlt_source(self, uri: str, table: str, **kwargs):
2369
+ if kwargs.get("incremental_key"):
2370
+ raise ValueError(
2371
+ "ClickUp takes care of incrementality on its own, you should not provide incremental_key"
2372
+ )
2373
+
2332
2374
  parsed_uri = urlparse(uri)
2333
2375
  params = parse_qs(parsed_uri.query)
2334
2376
  api_token = params.get("api_token")
@@ -2413,6 +2455,11 @@ class ApplovinMaxSource:
2413
2455
  return True
2414
2456
 
2415
2457
  def dlt_source(self, uri: str, table: str, **kwargs):
2458
+ if kwargs.get("incremental_key"):
2459
+ raise ValueError(
2460
+ "AppLovin Max takes care of incrementality on its own, you should not provide incremental_key"
2461
+ )
2462
+
2416
2463
  parsed_uri = urlparse(uri)
2417
2464
  params = parse_qs(parsed_uri.query)
2418
2465
 
@@ -2505,6 +2552,11 @@ class PersonioSource:
2505
2552
 
2506
2553
  # applovin://?client_id=123&client_secret=123
2507
2554
  def dlt_source(self, uri: str, table: str, **kwargs):
2555
+ if kwargs.get("incremental_key"):
2556
+ raise ValueError(
2557
+ "Personio takes care of incrementality on its own, you should not provide incremental_key"
2558
+ )
2559
+
2508
2560
  parsed_uri = urlparse(uri)
2509
2561
  params = parse_qs(parsed_uri.query)
2510
2562
 
@@ -2595,6 +2647,11 @@ class PipedriveSource:
2595
2647
  return True
2596
2648
 
2597
2649
  def dlt_source(self, uri: str, table: str, **kwargs):
2650
+ if kwargs.get("incremental_key"):
2651
+ raise ValueError(
2652
+ "Pipedrive takes care of incrementality on its own, you should not provide incremental_key"
2653
+ )
2654
+
2598
2655
  parsed_uri = urlparse(uri)
2599
2656
  params = parse_qs(parsed_uri.query)
2600
2657
  api_key = params.get("api_token")
@@ -2677,6 +2734,11 @@ class FreshdeskSource:
2677
2734
  return True
2678
2735
 
2679
2736
  def dlt_source(self, uri: str, table: str, **kwargs):
2737
+ if kwargs.get("incremental_key"):
2738
+ raise ValueError(
2739
+ "Freshdesk takes care of incrementality on its own, you should not provide incremental_key"
2740
+ )
2741
+
2680
2742
  parsed_uri = urlparse(uri)
2681
2743
  domain = parsed_uri.netloc
2682
2744
  query = parsed_uri.query
@@ -2730,6 +2792,11 @@ class TrustpilotSource:
2730
2792
  return True
2731
2793
 
2732
2794
  def dlt_source(self, uri: str, table: str, **kwargs):
2795
+ if kwargs.get("incremental_key"):
2796
+ raise ValueError(
2797
+ "Trustpilot takes care of incrementality on its own, you should not provide incremental_key"
2798
+ )
2799
+
2733
2800
  parsed_uri = urlparse(uri)
2734
2801
  business_unit_id = parsed_uri.netloc
2735
2802
  params = parse_qs(parsed_uri.query)
@@ -2770,6 +2837,11 @@ class PhantombusterSource:
2770
2837
  return True
2771
2838
 
2772
2839
  def dlt_source(self, uri: str, table: str, **kwargs):
2840
+ if kwargs.get("incremental_key"):
2841
+ raise ValueError(
2842
+ "Phantombuster takes care of incrementality on its own, you should not provide incremental_key"
2843
+ )
2844
+
2773
2845
  # phantombuster://?api_key=<api_key>
2774
2846
  # source table = phantom_results:agent_id
2775
2847
  parsed_uri = urlparse(uri)
@@ -2923,6 +2995,11 @@ class SolidgateSource:
2923
2995
  return True
2924
2996
 
2925
2997
  def dlt_source(self, uri: str, table: str, **kwargs):
2998
+ if kwargs.get("incremental_key"):
2999
+ raise ValueError(
3000
+ "Solidgate takes care of incrementality on its own, you should not provide incremental_key"
3001
+ )
3002
+
2926
3003
  parsed_uri = urlparse(uri)
2927
3004
  query_params = parse_qs(parsed_uri.query)
2928
3005
  public_key = query_params.get("public_key")
@@ -3016,6 +3093,11 @@ class QuickBooksSource:
3016
3093
 
3017
3094
  # quickbooks://?company_id=<company_id>&client_id=<client_id>&client_secret=<client_secret>&refresh_token=<refresh>&access_token=<access_token>&environment=<env>&minor_version=<version>
3018
3095
  def dlt_source(self, uri: str, table: str, **kwargs):
3096
+ if kwargs.get("incremental_key"):
3097
+ raise ValueError(
3098
+ "QuickBooks takes care of incrementality on its own, you should not provide incremental_key"
3099
+ )
3100
+
3019
3101
  parsed_uri = urlparse(uri)
3020
3102
 
3021
3103
  params = parse_qs(parsed_uri.query)
@@ -3085,6 +3167,11 @@ class IsocPulseSource:
3085
3167
  return True
3086
3168
 
3087
3169
  def dlt_source(self, uri: str, table: str, **kwargs):
3170
+ if kwargs.get("incremental_key"):
3171
+ raise ValueError(
3172
+ "Internet Society Pulse takes care of incrementality on its own, you should not provide incremental_key"
3173
+ )
3174
+
3088
3175
  parsed_uri = urlparse(uri)
3089
3176
  params = parse_qs(parsed_uri.query)
3090
3177
  token = params.get("token")
@@ -3120,6 +3207,11 @@ class PinterestSource:
3120
3207
  return True
3121
3208
 
3122
3209
  def dlt_source(self, uri: str, table: str, **kwargs):
3210
+ if kwargs.get("incremental_key"):
3211
+ raise ValueError(
3212
+ "Pinterest takes care of incrementality on its own, you should not provide incremental_key"
3213
+ )
3214
+
3123
3215
  parsed = urlparse(uri)
3124
3216
  params = parse_qs(parsed.query)
3125
3217
  access_token = params.get("access_token")
@@ -3154,13 +3246,18 @@ class LinearSource:
3154
3246
  return True
3155
3247
 
3156
3248
  def dlt_source(self, uri: str, table: str, **kwargs):
3249
+ if kwargs.get("incremental_key"):
3250
+ raise ValueError(
3251
+ "Linear takes care of incrementality on its own, you should not provide incremental_key"
3252
+ )
3253
+
3157
3254
  parsed_uri = urlparse(uri)
3158
3255
  params = parse_qs(parsed_uri.query)
3159
3256
  api_key = params.get("api_key")
3160
3257
  if api_key is None:
3161
3258
  raise MissingValueError("api_key", "Linear")
3162
3259
 
3163
- if table not in ["issues", "projects", "teams", "users"]:
3260
+ if table not in ["issues", "projects", "teams", "users", "workflow_states"]:
3164
3261
  raise UnsupportedResourceError(table, "Linear")
3165
3262
 
3166
3263
  start_date = kwargs.get("interval_start")
@@ -3187,6 +3284,11 @@ class ZoomSource:
3187
3284
  return True
3188
3285
 
3189
3286
  def dlt_source(self, uri: str, table: str, **kwargs):
3287
+ if kwargs.get("incremental_key"):
3288
+ raise ValueError(
3289
+ "Zoom takes care of incrementality on its own, you should not provide incremental_key"
3290
+ )
3291
+
3190
3292
  parsed = urlparse(uri)
3191
3293
  params = parse_qs(parsed.query)
3192
3294
  client_id = params.get("client_id")
@@ -3228,6 +3330,11 @@ class InfluxDBSource:
3228
3330
  return True
3229
3331
 
3230
3332
  def dlt_source(self, uri: str, table: str, **kwargs):
3333
+ if kwargs.get("incremental_key"):
3334
+ raise ValueError(
3335
+ "InfluxDB takes care of incrementality on its own, you should not provide incremental_key"
3336
+ )
3337
+
3231
3338
  parsed_uri = urlparse(uri)
3232
3339
  params = parse_qs(parsed_uri.query)
3233
3340
  host = parsed_uri.hostname
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ingestr
3
- Version: 0.13.79
3
+ Version: 0.13.80
4
4
  Summary: ingestr is a command-line application that ingests data from various sources and stores them in any database.
5
5
  Project-URL: Homepage, https://github.com/bruin-data/ingestr
6
6
  Project-URL: Issues, https://github.com/bruin-data/ingestr/issues
@@ -2,7 +2,7 @@ ingestr/conftest.py,sha256=OE2yxeTCosS9CUFVuqNypm-2ftYvVBeeq7egm3878cI,1981
2
2
  ingestr/main.py,sha256=qoWHNcHh0-xVnyQxbQ-SKuTxPb1RNV3ENkCpqO7CLrk,26694
3
3
  ingestr/src/.gitignore,sha256=8cX1AZTSI0TcdZFGTmS_oyBjpfCzhOEt0DdAo2dFIY8,203
4
4
  ingestr/src/blob.py,sha256=UUWMjHUuoR9xP1XZQ6UANQmnMVyDx3d0X4-2FQC271I,2138
5
- ingestr/src/buildinfo.py,sha256=yE0cfxWae8TNJJLYcRmNexeK769vtdz_-vJGzcROgwE,21
5
+ ingestr/src/buildinfo.py,sha256=K8l-cnsoOmTtSy1GdxNMRLZxsjHoYiJsCc5rvXeCpHE,21
6
6
  ingestr/src/destinations.py,sha256=M2Yni6wiWcrvZ8EPJemidqxN156l0rehgCc7xuil7mo,22840
7
7
  ingestr/src/errors.py,sha256=Ufs4_DfE77_E3vnA1fOQdi6cmuLVNm7_SbFLkL1XPGk,686
8
8
  ingestr/src/factory.py,sha256=rF5Ry4o4t8KulSPBtrd7ZKCI_0TH1DAetG0zs9H7oik,6792
@@ -11,7 +11,7 @@ ingestr/src/http_client.py,sha256=bxqsk6nJNXCo-79gW04B53DQO-yr25vaSsqP0AKtjx4,73
11
11
  ingestr/src/loader.py,sha256=9NaWAyfkXdqAZSS-N72Iwo36Lbx4PyqIfaaH1dNdkFs,1712
12
12
  ingestr/src/partition.py,sha256=BrIP6wFJvyR7Nus_3ElnfxknUXeCipK_E_bB8kZowfc,969
13
13
  ingestr/src/resource.py,sha256=ZqmZxFQVGlF8rFPhBiUB08HES0yoTj8sZ--jKfaaVps,1164
14
- ingestr/src/sources.py,sha256=qZz35cdO-nO9CZsdOJ8Ni56wclNfbGQuGj4nsoHpFxE,115678
14
+ ingestr/src/sources.py,sha256=guwCdKQDvT2XMYeR2O3nJ9kZ-wLCSDpOex4UH0luG5k,119966
15
15
  ingestr/src/table_definition.py,sha256=REbAbqdlmUMUuRh8nEQRreWjPVOQ5ZcfqGkScKdCrmk,390
16
16
  ingestr/src/time.py,sha256=H_Fk2J4ShXyUM-EMY7MqCLZQhlnZMZvO952bmZPc4yE,254
17
17
  ingestr/src/version.py,sha256=J_2xgZ0mKlvuHcjdKCx2nlioneLH0I47JiU_Slr_Nwc,189
@@ -41,9 +41,9 @@ ingestr/src/clickup/helpers.py,sha256=RzDKMUAHccuDhocIQ2ToBXfCERo8CBJqA3t-IPltBC
41
41
  ingestr/src/collector/spinner.py,sha256=_ZUqF5MI43hVIULdjF5s5mrAZbhEFXaiWirQmrv3Yk4,1201
42
42
  ingestr/src/dynamodb/__init__.py,sha256=swhxkeYBbJ35jn1IghCtvYWT2BM33KynVCh_oR4z28A,2264
43
43
  ingestr/src/elasticsearch/__init__.py,sha256=m-q93HgUmTwGDUwHOjHawstWL06TC3WIX3H05szybrY,2556
44
- ingestr/src/facebook_ads/__init__.py,sha256=_9929DYzcq5iLt-l3DmJ4VBZwmoEwgyPZbPstH0ySmI,9725
44
+ ingestr/src/facebook_ads/__init__.py,sha256=bX6lnf0LxIcOyZHDVA9FL5iKhgnQ0f5Hfma4eXcQuIk,10094
45
45
  ingestr/src/facebook_ads/exceptions.py,sha256=4Nlbc0Mv3i5g-9AoyT-n1PIa8IDi3VCTfEAzholx4Wc,115
46
- ingestr/src/facebook_ads/helpers.py,sha256=NshS21can1xhRKQzg_o-c6qSxWoC3NnE3FwgJxUnygE,8239
46
+ ingestr/src/facebook_ads/helpers.py,sha256=Oh9-LepxxBRnPXQZMbbNOSbxg9T8a4nmiLSt22GPt6E,8233
47
47
  ingestr/src/facebook_ads/settings.py,sha256=Bsic8RcmH-NfEZ7r_NGospTCmwISK9XaMT5y2NZirtg,4938
48
48
  ingestr/src/facebook_ads/utils.py,sha256=ES2ylPoW3j3fjp6OMUgp21n1cG1OktXsmWWMk5vBW_I,1590
49
49
  ingestr/src/filesystem/__init__.py,sha256=zkIwbRr0ir0EUdniI25p2zGiVc-7M9EmR351AjNb0eA,4163
@@ -85,8 +85,8 @@ ingestr/src/kinesis/helpers.py,sha256=SO2cFmWNGcykUYmjHdfxWsOQSkLQXyhFtfWnkcUOM0
85
85
  ingestr/src/klaviyo/__init__.py,sha256=o_noUgbxLk36s4f9W56_ibPorF0n7kVapPUlV0p-jfA,7875
86
86
  ingestr/src/klaviyo/client.py,sha256=tPj79ia7AW0ZOJhzlKNPCliGbdojRNwUFp8HvB2ym5s,7434
87
87
  ingestr/src/klaviyo/helpers.py,sha256=_i-SHffhv25feLDcjy6Blj1UxYLISCwVCMgGtrlnYHk,496
88
- ingestr/src/linear/__init__.py,sha256=Qbf8EPHVh-8pVNe_fqLVinds7qQ3O4ymDuPPPD560Ng,5953
89
- ingestr/src/linear/helpers.py,sha256=Mb7oKpUTRnHl-CvO1fubjJlJFDkhTuA7PUldWglvagI,2044
88
+ ingestr/src/linear/__init__.py,sha256=XWgWiDJi87OFHnsOjTq4ZSAdtMcPdplBYC8fJM-6dMA,6607
89
+ ingestr/src/linear/helpers.py,sha256=y8XhEDBVnxMmSzzyrS0_RnPwtNJIRuKM4Kw3wW9p6UM,1796
90
90
  ingestr/src/linkedin_ads/__init__.py,sha256=CAPWFyV24loziiphbLmODxZUXZJwm4JxlFkr56q0jfo,1855
91
91
  ingestr/src/linkedin_ads/dimension_time_enum.py,sha256=EmHRdkFyTAfo4chGjThrwqffWJxmAadZMbpTvf0xkQc,198
92
92
  ingestr/src/linkedin_ads/helpers.py,sha256=eUWudRVlXl4kqIhfXQ1eVsUpZwJn7UFqKSpnbLfxzds,4498
@@ -151,8 +151,8 @@ ingestr/testdata/merge_expected.csv,sha256=DReHqWGnQMsf2PBv_Q2pfjsgvikYFnf1zYcQZ
151
151
  ingestr/testdata/merge_part1.csv,sha256=Pw8Z9IDKcNU0qQHx1z6BUf4rF_-SxKGFOvymCt4OY9I,185
152
152
  ingestr/testdata/merge_part2.csv,sha256=T_GiWxA81SN63_tMOIuemcvboEFeAmbKc7xRXvL9esw,287
153
153
  ingestr/tests/unit/test_smartsheets.py,sha256=eiC2CCO4iNJcuN36ONvqmEDryCA1bA1REpayHpu42lk,5058
154
- ingestr-0.13.79.dist-info/METADATA,sha256=5dl0NFB3Ach1_lFtE4xOJpud_chn_w0qvepZnnMjRzo,15182
155
- ingestr-0.13.79.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
156
- ingestr-0.13.79.dist-info/entry_points.txt,sha256=oPJy0KBnPWYjDtP1k8qwAihcTLHSZokSQvRAw_wtfJM,46
157
- ingestr-0.13.79.dist-info/licenses/LICENSE.md,sha256=cW8wIhn8HFE-KLStDF9jHQ1O_ARWP3kTpk_-eOccL24,1075
158
- ingestr-0.13.79.dist-info/RECORD,,
154
+ ingestr-0.13.80.dist-info/METADATA,sha256=HF4xO0hXDV_gn389MIuX_-GhahKPaz8GOMIMt1If4x0,15182
155
+ ingestr-0.13.80.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
156
+ ingestr-0.13.80.dist-info/entry_points.txt,sha256=oPJy0KBnPWYjDtP1k8qwAihcTLHSZokSQvRAw_wtfJM,46
157
+ ingestr-0.13.80.dist-info/licenses/LICENSE.md,sha256=cW8wIhn8HFE-KLStDF9jHQ1O_ARWP3kTpk_-eOccL24,1075
158
+ ingestr-0.13.80.dist-info/RECORD,,