ingestr 0.13.78__py3-none-any.whl → 0.13.80__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ingestr might be problematic. Click here for more details.

ingestr/src/sources.py CHANGED
@@ -73,6 +73,20 @@ class SqlSource:
73
73
 
74
74
  engine_adapter_callback = None
75
75
 
76
+ if uri.startswith("md://") or uri.startswith("motherduck://"):
77
+ parsed_uri = urlparse(uri)
78
+ query_params = parse_qs(parsed_uri.query)
79
+ # Convert md:// URI to duckdb:///md: format
80
+ if parsed_uri.path:
81
+ db_path = parsed_uri.path
82
+ else:
83
+ db_path = ""
84
+
85
+ token = query_params.get("token", [""])[0]
86
+ if not token:
87
+ raise ValueError("Token is required for MotherDuck connection")
88
+ uri = f"duckdb:///md:{db_path}?motherduck_token={token}"
89
+
76
90
  if uri.startswith("mysql://"):
77
91
  uri = uri.replace("mysql://", "mysql+pymysql://")
78
92
 
@@ -409,31 +423,181 @@ class MongoDbSource:
409
423
  return False
410
424
 
411
425
  def dlt_source(self, uri: str, table: str, **kwargs):
412
- table_fields = table_string_to_dataclass(table)
426
+ # Check if this is a custom query format (collection:query)
427
+ if ":" in table:
428
+ collection_name, query_json = table.split(":", 1)
413
429
 
414
- incremental = None
415
- if kwargs.get("incremental_key"):
416
- start_value = kwargs.get("interval_start")
417
- end_value = kwargs.get("interval_end")
430
+ # Parse and validate the query
431
+ try:
432
+ import json
418
433
 
419
- incremental = dlt_incremental(
420
- kwargs.get("incremental_key", ""),
421
- initial_value=start_value,
422
- end_value=end_value,
423
- range_end="closed",
424
- range_start="closed",
434
+ query = json.loads(query_json)
435
+ except json.JSONDecodeError as e:
436
+ raise ValueError(f"Invalid JSON query format: {e}")
437
+
438
+ # Validate that it's a list for aggregation pipeline
439
+ if not isinstance(query, list):
440
+ raise ValueError(
441
+ "Query must be a JSON array representing a MongoDB aggregation pipeline"
442
+ )
443
+
444
+ # Check for incremental load requirements
445
+ incremental = None
446
+ if kwargs.get("incremental_key"):
447
+ start_value = kwargs.get("interval_start")
448
+ end_value = kwargs.get("interval_end")
449
+
450
+ # Validate that incremental key is present in the pipeline
451
+ incremental_key = kwargs.get("incremental_key")
452
+ self._validate_incremental_query(query, str(incremental_key))
453
+
454
+ incremental = dlt_incremental(
455
+ str(incremental_key),
456
+ initial_value=start_value,
457
+ end_value=end_value,
458
+ )
459
+
460
+ # Substitute interval parameters in the query
461
+ query = self._substitute_interval_params(query, kwargs)
462
+
463
+ # Parse collection name to get database and collection
464
+ if "." in collection_name:
465
+ # Handle database.collection format
466
+ table_fields = table_string_to_dataclass(collection_name)
467
+ database = table_fields.dataset
468
+ collection = table_fields.table
469
+ else:
470
+ # Single collection name, use default database
471
+ database = None
472
+ collection = collection_name
473
+
474
+ table_instance = self.table_builder(
475
+ connection_url=uri,
476
+ database=database,
477
+ collection=collection,
478
+ parallel=False,
479
+ incremental=incremental,
480
+ custom_query=query,
481
+ )
482
+ table_instance.max_table_nesting = 1
483
+ return table_instance
484
+ else:
485
+ # Default behavior for simple collection names
486
+ table_fields = table_string_to_dataclass(table)
487
+
488
+ incremental = None
489
+ if kwargs.get("incremental_key"):
490
+ start_value = kwargs.get("interval_start")
491
+ end_value = kwargs.get("interval_end")
492
+
493
+ incremental = dlt_incremental(
494
+ kwargs.get("incremental_key", ""),
495
+ initial_value=start_value,
496
+ end_value=end_value,
497
+ )
498
+
499
+ table_instance = self.table_builder(
500
+ connection_url=uri,
501
+ database=table_fields.dataset,
502
+ collection=table_fields.table,
503
+ parallel=False,
504
+ incremental=incremental,
505
+ )
506
+ table_instance.max_table_nesting = 1
507
+
508
+ return table_instance
509
+
510
+ def _validate_incremental_query(self, query: list, incremental_key: str):
511
+ """Validate that incremental key is projected in the aggregation pipeline"""
512
+ # Check if there's a $project stage and if incremental_key is included
513
+ has_project = False
514
+ incremental_key_projected = False
515
+
516
+ for stage in query:
517
+ if "$project" in stage:
518
+ has_project = True
519
+ project_stage = stage["$project"]
520
+ if isinstance(project_stage, dict):
521
+ # Check if incremental_key is explicitly included
522
+ if incremental_key in project_stage:
523
+ if project_stage[incremental_key] not in [0, False]:
524
+ incremental_key_projected = True
525
+ # If there are only inclusions (1 or True values) and incremental_key is not included
526
+ elif any(v in [1, True] for v in project_stage.values()):
527
+ # This is an inclusion projection, incremental_key must be explicitly included
528
+ incremental_key_projected = False
529
+ # If there are only exclusions (0 or False values) and incremental_key is not excluded
530
+ elif all(
531
+ v in [0, False]
532
+ for v in project_stage.values()
533
+ if v in [0, False, 1, True]
534
+ ):
535
+ # This is an exclusion projection, incremental_key is included by default
536
+ if incremental_key not in project_stage:
537
+ incremental_key_projected = True
538
+ else:
539
+ incremental_key_projected = project_stage[
540
+ incremental_key
541
+ ] not in [0, False]
542
+ else:
543
+ # Mixed or unclear projection, assume incremental_key needs to be explicit
544
+ incremental_key_projected = False
545
+
546
+ # If there's a $project stage but incremental_key is not projected, raise error
547
+ if has_project and not incremental_key_projected:
548
+ raise ValueError(
549
+ f"Incremental key '{incremental_key}' must be included in the projected fields of the aggregation pipeline"
425
550
  )
426
551
 
427
- table_instance = self.table_builder(
428
- connection_url=uri,
429
- database=table_fields.dataset,
430
- collection=table_fields.table,
431
- parallel=False,
432
- incremental=incremental,
433
- )
434
- table_instance.max_table_nesting = 1
552
+ def _substitute_interval_params(self, query: list, kwargs: dict):
553
+ """Substitute :interval_start and :interval_end placeholders with actual datetime values"""
554
+ from dlt.common.time import ensure_pendulum_datetime
435
555
 
436
- return table_instance
556
+ # Get interval values and convert them to datetime objects
557
+ interval_start = kwargs.get("interval_start")
558
+ interval_end = kwargs.get("interval_end")
559
+
560
+ # Convert string dates to datetime objects if needed
561
+ if interval_start is not None:
562
+ if isinstance(interval_start, str):
563
+ pendulum_dt = ensure_pendulum_datetime(interval_start)
564
+ interval_start = (
565
+ pendulum_dt.to_datetime()
566
+ if hasattr(pendulum_dt, "to_datetime")
567
+ else pendulum_dt
568
+ )
569
+ elif hasattr(interval_start, "to_datetime"):
570
+ interval_start = interval_start.to_datetime()
571
+
572
+ if interval_end is not None:
573
+ if isinstance(interval_end, str):
574
+ pendulum_dt = ensure_pendulum_datetime(interval_end)
575
+ interval_end = (
576
+ pendulum_dt.to_datetime()
577
+ if hasattr(pendulum_dt, "to_datetime")
578
+ else pendulum_dt
579
+ )
580
+ elif hasattr(interval_end, "to_datetime"):
581
+ interval_end = interval_end.to_datetime()
582
+
583
+ # Deep copy the query and replace placeholders with actual datetime objects
584
+ def replace_placeholders(obj):
585
+ if isinstance(obj, dict):
586
+ result = {}
587
+ for key, value in obj.items():
588
+ if value == ":interval_start" and interval_start is not None:
589
+ result[key] = interval_start
590
+ elif value == ":interval_end" and interval_end is not None:
591
+ result[key] = interval_end
592
+ else:
593
+ result[key] = replace_placeholders(value)
594
+ return result
595
+ elif isinstance(obj, list):
596
+ return [replace_placeholders(item) for item in obj]
597
+ else:
598
+ return obj
599
+
600
+ return replace_placeholders(query)
437
601
 
438
602
 
439
603
  class LocalCsvSource:
@@ -538,6 +702,11 @@ class ShopifySource:
538
702
  return True
539
703
 
540
704
  def dlt_source(self, uri: str, table: str, **kwargs):
705
+ if kwargs.get("incremental_key"):
706
+ raise ValueError(
707
+ "Shopify takes care of incrementality on its own, you should not provide incremental_key"
708
+ )
709
+
541
710
  source_fields = urlparse(uri)
542
711
  source_params = parse_qs(source_fields.query)
543
712
  api_key = source_params.get("api_key")
@@ -839,6 +1008,16 @@ class FacebookAdsSource:
839
1008
  facebook_insights_source,
840
1009
  )
841
1010
 
1011
+ insights_max_wait_to_finish_seconds = source_params.get(
1012
+ "insights_max_wait_to_finish_seconds", [60 * 60 * 4]
1013
+ )
1014
+ insights_max_wait_to_start_seconds = source_params.get(
1015
+ "insights_max_wait_to_start_seconds", [60 * 30]
1016
+ )
1017
+ insights_max_async_sleep_seconds = source_params.get(
1018
+ "insights_max_async_sleep_seconds", [20]
1019
+ )
1020
+
842
1021
  endpoint = None
843
1022
  if table in ["campaigns", "ad_sets", "ad_creatives", "ads", "leads"]:
844
1023
  endpoint = table
@@ -848,6 +1027,13 @@ class FacebookAdsSource:
848
1027
  account_id=account_id[0],
849
1028
  start_date=kwargs.get("interval_start"),
850
1029
  end_date=kwargs.get("interval_end"),
1030
+ insights_max_wait_to_finish_seconds=insights_max_wait_to_finish_seconds[
1031
+ 0
1032
+ ],
1033
+ insights_max_wait_to_start_seconds=insights_max_wait_to_start_seconds[
1034
+ 0
1035
+ ],
1036
+ insights_max_async_sleep_seconds=insights_max_async_sleep_seconds[0],
851
1037
  ).with_resources("facebook_insights")
852
1038
  elif table.startswith("facebook_insights:"):
853
1039
  # Parse custom breakdowns and metrics from table name
@@ -961,7 +1147,7 @@ class SlackSource:
961
1147
 
962
1148
  class HubspotSource:
963
1149
  def handles_incrementality(self) -> bool:
964
- return True
1150
+ return False
965
1151
 
966
1152
  # hubspot://?api_key=<api_key>
967
1153
  def dlt_source(self, uri: str, table: str, **kwargs):
@@ -1488,6 +1674,11 @@ class TikTokSource:
1488
1674
  return True
1489
1675
 
1490
1676
  def dlt_source(self, uri: str, table: str, **kwargs):
1677
+ if kwargs.get("incremental_key"):
1678
+ raise ValueError(
1679
+ "TikTok takes care of incrementality on its own, you should not provide incremental_key"
1680
+ )
1681
+
1491
1682
  endpoint = "custom_reports"
1492
1683
 
1493
1684
  parsed_uri = urlparse(uri)
@@ -1710,6 +1901,11 @@ class GoogleAnalyticsSource:
1710
1901
  def dlt_source(self, uri: str, table: str, **kwargs):
1711
1902
  import ingestr.src.google_analytics.helpers as helpers
1712
1903
 
1904
+ if kwargs.get("incremental_key"):
1905
+ raise ValueError(
1906
+ "Google Analytics takes care of incrementality on its own, you should not provide incremental_key"
1907
+ )
1908
+
1713
1909
  result = helpers.parse_google_analytics_uri(uri)
1714
1910
  credentials = result["credentials"]
1715
1911
  property_id = result["property_id"]
@@ -2082,6 +2278,11 @@ class LinkedInAdsSource:
2082
2278
  return True
2083
2279
 
2084
2280
  def dlt_source(self, uri: str, table: str, **kwargs):
2281
+ if kwargs.get("incremental_key"):
2282
+ raise ValueError(
2283
+ "LinkedIn Ads takes care of incrementality on its own, you should not provide incremental_key"
2284
+ )
2285
+
2085
2286
  parsed_uri = urlparse(uri)
2086
2287
  source_fields = parse_qs(parsed_uri.query)
2087
2288
 
@@ -2165,6 +2366,11 @@ class ClickupSource:
2165
2366
  return True
2166
2367
 
2167
2368
  def dlt_source(self, uri: str, table: str, **kwargs):
2369
+ if kwargs.get("incremental_key"):
2370
+ raise ValueError(
2371
+ "ClickUp takes care of incrementality on its own, you should not provide incremental_key"
2372
+ )
2373
+
2168
2374
  parsed_uri = urlparse(uri)
2169
2375
  params = parse_qs(parsed_uri.query)
2170
2376
  api_token = params.get("api_token")
@@ -2249,6 +2455,11 @@ class ApplovinMaxSource:
2249
2455
  return True
2250
2456
 
2251
2457
  def dlt_source(self, uri: str, table: str, **kwargs):
2458
+ if kwargs.get("incremental_key"):
2459
+ raise ValueError(
2460
+ "AppLovin Max takes care of incrementality on its own, you should not provide incremental_key"
2461
+ )
2462
+
2252
2463
  parsed_uri = urlparse(uri)
2253
2464
  params = parse_qs(parsed_uri.query)
2254
2465
 
@@ -2341,6 +2552,11 @@ class PersonioSource:
2341
2552
 
2342
2553
  # applovin://?client_id=123&client_secret=123
2343
2554
  def dlt_source(self, uri: str, table: str, **kwargs):
2555
+ if kwargs.get("incremental_key"):
2556
+ raise ValueError(
2557
+ "Personio takes care of incrementality on its own, you should not provide incremental_key"
2558
+ )
2559
+
2344
2560
  parsed_uri = urlparse(uri)
2345
2561
  params = parse_qs(parsed_uri.query)
2346
2562
 
@@ -2431,6 +2647,11 @@ class PipedriveSource:
2431
2647
  return True
2432
2648
 
2433
2649
  def dlt_source(self, uri: str, table: str, **kwargs):
2650
+ if kwargs.get("incremental_key"):
2651
+ raise ValueError(
2652
+ "Pipedrive takes care of incrementality on its own, you should not provide incremental_key"
2653
+ )
2654
+
2434
2655
  parsed_uri = urlparse(uri)
2435
2656
  params = parse_qs(parsed_uri.query)
2436
2657
  api_key = params.get("api_token")
@@ -2513,6 +2734,11 @@ class FreshdeskSource:
2513
2734
  return True
2514
2735
 
2515
2736
  def dlt_source(self, uri: str, table: str, **kwargs):
2737
+ if kwargs.get("incremental_key"):
2738
+ raise ValueError(
2739
+ "Freshdesk takes care of incrementality on its own, you should not provide incremental_key"
2740
+ )
2741
+
2516
2742
  parsed_uri = urlparse(uri)
2517
2743
  domain = parsed_uri.netloc
2518
2744
  query = parsed_uri.query
@@ -2528,6 +2754,18 @@ class FreshdeskSource:
2528
2754
  if api_key is None:
2529
2755
  raise MissingValueError("api_key", "Freshdesk")
2530
2756
 
2757
+ start_date = kwargs.get("interval_start")
2758
+ if start_date is not None:
2759
+ start_date = ensure_pendulum_datetime(start_date).in_tz("UTC")
2760
+ else:
2761
+ start_date = ensure_pendulum_datetime("2022-01-01T00:00:00Z")
2762
+
2763
+ end_date = kwargs.get("interval_end")
2764
+ if end_date is not None:
2765
+ end_date = ensure_pendulum_datetime(end_date).in_tz("UTC")
2766
+ else:
2767
+ end_date = None
2768
+
2531
2769
  if table not in [
2532
2770
  "agents",
2533
2771
  "companies",
@@ -2541,7 +2779,10 @@ class FreshdeskSource:
2541
2779
  from ingestr.src.freshdesk import freshdesk_source
2542
2780
 
2543
2781
  return freshdesk_source(
2544
- api_secret_key=api_key[0], domain=domain
2782
+ api_secret_key=api_key[0],
2783
+ domain=domain,
2784
+ start_date=start_date,
2785
+ end_date=end_date,
2545
2786
  ).with_resources(table)
2546
2787
 
2547
2788
 
@@ -2551,6 +2792,11 @@ class TrustpilotSource:
2551
2792
  return True
2552
2793
 
2553
2794
  def dlt_source(self, uri: str, table: str, **kwargs):
2795
+ if kwargs.get("incremental_key"):
2796
+ raise ValueError(
2797
+ "Trustpilot takes care of incrementality on its own, you should not provide incremental_key"
2798
+ )
2799
+
2554
2800
  parsed_uri = urlparse(uri)
2555
2801
  business_unit_id = parsed_uri.netloc
2556
2802
  params = parse_qs(parsed_uri.query)
@@ -2591,6 +2837,11 @@ class PhantombusterSource:
2591
2837
  return True
2592
2838
 
2593
2839
  def dlt_source(self, uri: str, table: str, **kwargs):
2840
+ if kwargs.get("incremental_key"):
2841
+ raise ValueError(
2842
+ "Phantombuster takes care of incrementality on its own, you should not provide incremental_key"
2843
+ )
2844
+
2594
2845
  # phantombuster://?api_key=<api_key>
2595
2846
  # source table = phantom_results:agent_id
2596
2847
  parsed_uri = urlparse(uri)
@@ -2684,7 +2935,7 @@ class ElasticsearchSource:
2684
2935
 
2685
2936
  class AttioSource:
2686
2937
  def handles_incrementality(self) -> bool:
2687
- return True
2938
+ return False
2688
2939
 
2689
2940
  def dlt_source(self, uri: str, table: str, **kwargs):
2690
2941
  parsed_uri = urlparse(uri)
@@ -2744,6 +2995,11 @@ class SolidgateSource:
2744
2995
  return True
2745
2996
 
2746
2997
  def dlt_source(self, uri: str, table: str, **kwargs):
2998
+ if kwargs.get("incremental_key"):
2999
+ raise ValueError(
3000
+ "Solidgate takes care of incrementality on its own, you should not provide incremental_key"
3001
+ )
3002
+
2747
3003
  parsed_uri = urlparse(uri)
2748
3004
  query_params = parse_qs(parsed_uri.query)
2749
3005
  public_key = query_params.get("public_key")
@@ -2837,6 +3093,11 @@ class QuickBooksSource:
2837
3093
 
2838
3094
  # quickbooks://?company_id=<company_id>&client_id=<client_id>&client_secret=<client_secret>&refresh_token=<refresh>&access_token=<access_token>&environment=<env>&minor_version=<version>
2839
3095
  def dlt_source(self, uri: str, table: str, **kwargs):
3096
+ if kwargs.get("incremental_key"):
3097
+ raise ValueError(
3098
+ "QuickBooks takes care of incrementality on its own, you should not provide incremental_key"
3099
+ )
3100
+
2840
3101
  parsed_uri = urlparse(uri)
2841
3102
 
2842
3103
  params = parse_qs(parsed_uri.query)
@@ -2906,6 +3167,11 @@ class IsocPulseSource:
2906
3167
  return True
2907
3168
 
2908
3169
  def dlt_source(self, uri: str, table: str, **kwargs):
3170
+ if kwargs.get("incremental_key"):
3171
+ raise ValueError(
3172
+ "Internet Society Pulse takes care of incrementality on its own, you should not provide incremental_key"
3173
+ )
3174
+
2909
3175
  parsed_uri = urlparse(uri)
2910
3176
  params = parse_qs(parsed_uri.query)
2911
3177
  token = params.get("token")
@@ -2941,6 +3207,11 @@ class PinterestSource:
2941
3207
  return True
2942
3208
 
2943
3209
  def dlt_source(self, uri: str, table: str, **kwargs):
3210
+ if kwargs.get("incremental_key"):
3211
+ raise ValueError(
3212
+ "Pinterest takes care of incrementality on its own, you should not provide incremental_key"
3213
+ )
3214
+
2944
3215
  parsed = urlparse(uri)
2945
3216
  params = parse_qs(parsed.query)
2946
3217
  access_token = params.get("access_token")
@@ -2975,13 +3246,18 @@ class LinearSource:
2975
3246
  return True
2976
3247
 
2977
3248
  def dlt_source(self, uri: str, table: str, **kwargs):
3249
+ if kwargs.get("incremental_key"):
3250
+ raise ValueError(
3251
+ "Linear takes care of incrementality on its own, you should not provide incremental_key"
3252
+ )
3253
+
2978
3254
  parsed_uri = urlparse(uri)
2979
3255
  params = parse_qs(parsed_uri.query)
2980
3256
  api_key = params.get("api_key")
2981
3257
  if api_key is None:
2982
3258
  raise MissingValueError("api_key", "Linear")
2983
3259
 
2984
- if table not in ["issues", "projects", "teams", "users"]:
3260
+ if table not in ["issues", "projects", "teams", "users", "workflow_states"]:
2985
3261
  raise UnsupportedResourceError(table, "Linear")
2986
3262
 
2987
3263
  start_date = kwargs.get("interval_start")
@@ -3008,6 +3284,11 @@ class ZoomSource:
3008
3284
  return True
3009
3285
 
3010
3286
  def dlt_source(self, uri: str, table: str, **kwargs):
3287
+ if kwargs.get("incremental_key"):
3288
+ raise ValueError(
3289
+ "Zoom takes care of incrementality on its own, you should not provide incremental_key"
3290
+ )
3291
+
3011
3292
  parsed = urlparse(uri)
3012
3293
  params = parse_qs(parsed.query)
3013
3294
  client_id = params.get("client_id")
@@ -3049,6 +3330,11 @@ class InfluxDBSource:
3049
3330
  return True
3050
3331
 
3051
3332
  def dlt_source(self, uri: str, table: str, **kwargs):
3333
+ if kwargs.get("incremental_key"):
3334
+ raise ValueError(
3335
+ "InfluxDB takes care of incrementality on its own, you should not provide incremental_key"
3336
+ )
3337
+
3052
3338
  parsed_uri = urlparse(uri)
3053
3339
  params = parse_qs(parsed_uri.query)
3054
3340
  host = parsed_uri.hostname
@@ -3056,7 +3342,7 @@ class InfluxDBSource:
3056
3342
 
3057
3343
  secure = params.get("secure", ["true"])[0].lower() != "false"
3058
3344
  scheme = "https" if secure else "http"
3059
-
3345
+
3060
3346
  if port:
3061
3347
  host_url = f"{scheme}://{host}:{port}"
3062
3348
  else:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ingestr
3
- Version: 0.13.78
3
+ Version: 0.13.80
4
4
  Summary: ingestr is a command-line application that ingests data from various sources and stores them in any database.
5
5
  Project-URL: Homepage, https://github.com/bruin-data/ingestr
6
6
  Project-URL: Issues, https://github.com/bruin-data/ingestr/issues
@@ -355,6 +355,11 @@ Pull requests are welcome. However, please open an issue first to discuss what y
355
355
  <td>✅</td>
356
356
  <td>❌</td>
357
357
  </tr>
358
+ <tr>
359
+ <td>MotherDuck</td>
360
+ <td>✅</td>
361
+ <td>✅</td>
362
+ </tr>
358
363
  <tr>
359
364
  <td>MySQL</td>
360
365
  <td>✅</td>
@@ -1,17 +1,17 @@
1
1
  ingestr/conftest.py,sha256=OE2yxeTCosS9CUFVuqNypm-2ftYvVBeeq7egm3878cI,1981
2
- ingestr/main.py,sha256=QsNVrz5_NgRUkvfExnd-2E02TGmWivPuop5hYinVAjM,26513
2
+ ingestr/main.py,sha256=qoWHNcHh0-xVnyQxbQ-SKuTxPb1RNV3ENkCpqO7CLrk,26694
3
3
  ingestr/src/.gitignore,sha256=8cX1AZTSI0TcdZFGTmS_oyBjpfCzhOEt0DdAo2dFIY8,203
4
4
  ingestr/src/blob.py,sha256=UUWMjHUuoR9xP1XZQ6UANQmnMVyDx3d0X4-2FQC271I,2138
5
- ingestr/src/buildinfo.py,sha256=cARFQnpIzB5xD3JEaIPIkee7dO80kbLs4M_XypNnwSI,21
6
- ingestr/src/destinations.py,sha256=ivTPio0zzqLRx22i597pxZHMNClz-XvYSyCaCPuGd8g,22248
5
+ ingestr/src/buildinfo.py,sha256=K8l-cnsoOmTtSy1GdxNMRLZxsjHoYiJsCc5rvXeCpHE,21
6
+ ingestr/src/destinations.py,sha256=M2Yni6wiWcrvZ8EPJemidqxN156l0rehgCc7xuil7mo,22840
7
7
  ingestr/src/errors.py,sha256=Ufs4_DfE77_E3vnA1fOQdi6cmuLVNm7_SbFLkL1XPGk,686
8
- ingestr/src/factory.py,sha256=q_rSi4gYMfxnGvzhytPRAgC08N40nqDISvXwl7i-E_M,6655
8
+ ingestr/src/factory.py,sha256=rF5Ry4o4t8KulSPBtrd7ZKCI_0TH1DAetG0zs9H7oik,6792
9
9
  ingestr/src/filters.py,sha256=LLecXe9QkLFkFLUZ92OXNdcANr1a8edDxrflc2ko_KA,1452
10
10
  ingestr/src/http_client.py,sha256=bxqsk6nJNXCo-79gW04B53DQO-yr25vaSsqP0AKtjx4,732
11
11
  ingestr/src/loader.py,sha256=9NaWAyfkXdqAZSS-N72Iwo36Lbx4PyqIfaaH1dNdkFs,1712
12
12
  ingestr/src/partition.py,sha256=BrIP6wFJvyR7Nus_3ElnfxknUXeCipK_E_bB8kZowfc,969
13
13
  ingestr/src/resource.py,sha256=ZqmZxFQVGlF8rFPhBiUB08HES0yoTj8sZ--jKfaaVps,1164
14
- ingestr/src/sources.py,sha256=1A1tZKA1NUQnHdgvGPKHuRG5o8lNuCe7bIxB0n73eJw,107635
14
+ ingestr/src/sources.py,sha256=guwCdKQDvT2XMYeR2O3nJ9kZ-wLCSDpOex4UH0luG5k,119966
15
15
  ingestr/src/table_definition.py,sha256=REbAbqdlmUMUuRh8nEQRreWjPVOQ5ZcfqGkScKdCrmk,390
16
16
  ingestr/src/time.py,sha256=H_Fk2J4ShXyUM-EMY7MqCLZQhlnZMZvO952bmZPc4yE,254
17
17
  ingestr/src/version.py,sha256=J_2xgZ0mKlvuHcjdKCx2nlioneLH0I47JiU_Slr_Nwc,189
@@ -41,9 +41,9 @@ ingestr/src/clickup/helpers.py,sha256=RzDKMUAHccuDhocIQ2ToBXfCERo8CBJqA3t-IPltBC
41
41
  ingestr/src/collector/spinner.py,sha256=_ZUqF5MI43hVIULdjF5s5mrAZbhEFXaiWirQmrv3Yk4,1201
42
42
  ingestr/src/dynamodb/__init__.py,sha256=swhxkeYBbJ35jn1IghCtvYWT2BM33KynVCh_oR4z28A,2264
43
43
  ingestr/src/elasticsearch/__init__.py,sha256=m-q93HgUmTwGDUwHOjHawstWL06TC3WIX3H05szybrY,2556
44
- ingestr/src/facebook_ads/__init__.py,sha256=Rchn-nH5mAOWW7OeYMCy_VS8dAoqfYY4t0YzWDSeN5k,9751
44
+ ingestr/src/facebook_ads/__init__.py,sha256=bX6lnf0LxIcOyZHDVA9FL5iKhgnQ0f5Hfma4eXcQuIk,10094
45
45
  ingestr/src/facebook_ads/exceptions.py,sha256=4Nlbc0Mv3i5g-9AoyT-n1PIa8IDi3VCTfEAzholx4Wc,115
46
- ingestr/src/facebook_ads/helpers.py,sha256=NshS21can1xhRKQzg_o-c6qSxWoC3NnE3FwgJxUnygE,8239
46
+ ingestr/src/facebook_ads/helpers.py,sha256=Oh9-LepxxBRnPXQZMbbNOSbxg9T8a4nmiLSt22GPt6E,8233
47
47
  ingestr/src/facebook_ads/settings.py,sha256=Bsic8RcmH-NfEZ7r_NGospTCmwISK9XaMT5y2NZirtg,4938
48
48
  ingestr/src/facebook_ads/utils.py,sha256=ES2ylPoW3j3fjp6OMUgp21n1cG1OktXsmWWMk5vBW_I,1590
49
49
  ingestr/src/filesystem/__init__.py,sha256=zkIwbRr0ir0EUdniI25p2zGiVc-7M9EmR351AjNb0eA,4163
@@ -51,11 +51,11 @@ ingestr/src/filesystem/helpers.py,sha256=bg0muSHZr3hMa8H4jN2-LGWzI-SUoKlQNiWJ74-
51
51
  ingestr/src/filesystem/readers.py,sha256=a0fKkaRpnAOGsXI3EBNYZa7x6tlmAOsgRzb883StY30,3987
52
52
  ingestr/src/frankfurter/__init__.py,sha256=oVi4BiOxPRyckEVrBNunyMAHulPyMgyGRwBbhn-Xz6M,4987
53
53
  ingestr/src/frankfurter/helpers.py,sha256=SyrkRTDqvKdQxRHTV5kcSeVG3FEnaK5zxHyNyqtumZ0,1445
54
- ingestr/src/freshdesk/__init__.py,sha256=uFQW_cJyymxtHQiYb_xjzZAklc487L0n9GkgHgC7yAI,2618
55
- ingestr/src/freshdesk/freshdesk_client.py,sha256=3z5Yc008ADzRcJWtNc00PwjkLzG-RMI8jVIOOyYA-Rw,4088
54
+ ingestr/src/freshdesk/__init__.py,sha256=ukyorgCNsW_snzsYBDsr3Q0WB8f-to9Fk0enqHHFQlk,3087
55
+ ingestr/src/freshdesk/freshdesk_client.py,sha256=1nFf0K4MQ0KZbWwk4xSbYHaykVqmPLfN39miOFDpWVc,4385
56
56
  ingestr/src/freshdesk/settings.py,sha256=0Wr_OMnUZcTlry7BmALssLxD2yh686JW4moLNv12Jnw,409
57
- ingestr/src/github/__init__.py,sha256=R71y33KqzxDTvCLSGj2H2EztfGqsWGR9ZgcaurC1-A4,7220
58
- ingestr/src/github/helpers.py,sha256=hge8orylwiScRcMftlv4oSZ6ORvVANwHCPAGkg95FtI,6758
57
+ ingestr/src/github/__init__.py,sha256=C7b5j6CrxmTItS4tyDa3OYzdAw5c__xboOtoEJYe3wQ,7217
58
+ ingestr/src/github/helpers.py,sha256=rpv_3HzuOl4PQ-FUeA66pev-pgze9SaE8RUHIPYfZ_A,6759
59
59
  ingestr/src/github/queries.py,sha256=W34C02jUEdjFmOE7f7u9xvYyBNDMfVZAu0JIRZI2mkU,2302
60
60
  ingestr/src/github/settings.py,sha256=N5ahWrDIQ_4IWV9i-hTXxyYduqY9Ym2BTwqsWxcDdJ8,258
61
61
  ingestr/src/google_ads/__init__.py,sha256=bH0TtnRWcOUESezpvoA7VEUHAq_0ITGQeX4GGVBfl1I,3725
@@ -75,7 +75,7 @@ ingestr/src/gorgias/helpers.py,sha256=DamuijnvhGY9hysQO4txrVMf4izkGbh5qfBKImdOIN
75
75
  ingestr/src/hubspot/__init__.py,sha256=wqHefhc_YRI5dNFCcpvH-UUilNThE49sbGouSBiHYsw,11776
76
76
  ingestr/src/hubspot/helpers.py,sha256=k2b-lhxqBNKHoOSHoHegFSsk8xxjjGA0I04V0XyX2b4,7883
77
77
  ingestr/src/hubspot/settings.py,sha256=i73MkSiJfRLMFLfiJgYdhp-rhymHTfoqFzZ4uOJdFJM,2456
78
- ingestr/src/influxdb/__init__.py,sha256=sj_K4ShXECp6cW4xVVv2kCwQCFtTYD0dC9LOAEqFoVI,1289
78
+ ingestr/src/influxdb/__init__.py,sha256=cYsGnDPNHRTe9pp14ogDQgPTCI9TOdyJm1MaNuQLHdk,1290
79
79
  ingestr/src/influxdb/client.py,sha256=hCxSNREAWWEvvAV3RQbKaWp2-e_7EE8xmVRjTwLFEFo,1230
80
80
  ingestr/src/isoc_pulse/__init__.py,sha256=9b4eN4faatpiwTuRNPuYcEt1hEFDEjua9XhfakUigBk,4648
81
81
  ingestr/src/kafka/__init__.py,sha256=QUHsGmdv5_E-3z0GDHXvbk39puwuGDBsyYSDhvbA89E,3595
@@ -85,15 +85,15 @@ ingestr/src/kinesis/helpers.py,sha256=SO2cFmWNGcykUYmjHdfxWsOQSkLQXyhFtfWnkcUOM0
85
85
  ingestr/src/klaviyo/__init__.py,sha256=o_noUgbxLk36s4f9W56_ibPorF0n7kVapPUlV0p-jfA,7875
86
86
  ingestr/src/klaviyo/client.py,sha256=tPj79ia7AW0ZOJhzlKNPCliGbdojRNwUFp8HvB2ym5s,7434
87
87
  ingestr/src/klaviyo/helpers.py,sha256=_i-SHffhv25feLDcjy6Blj1UxYLISCwVCMgGtrlnYHk,496
88
- ingestr/src/linear/__init__.py,sha256=Qbf8EPHVh-8pVNe_fqLVinds7qQ3O4ymDuPPPD560Ng,5953
89
- ingestr/src/linear/helpers.py,sha256=Mb7oKpUTRnHl-CvO1fubjJlJFDkhTuA7PUldWglvagI,2044
88
+ ingestr/src/linear/__init__.py,sha256=XWgWiDJi87OFHnsOjTq4ZSAdtMcPdplBYC8fJM-6dMA,6607
89
+ ingestr/src/linear/helpers.py,sha256=y8XhEDBVnxMmSzzyrS0_RnPwtNJIRuKM4Kw3wW9p6UM,1796
90
90
  ingestr/src/linkedin_ads/__init__.py,sha256=CAPWFyV24loziiphbLmODxZUXZJwm4JxlFkr56q0jfo,1855
91
91
  ingestr/src/linkedin_ads/dimension_time_enum.py,sha256=EmHRdkFyTAfo4chGjThrwqffWJxmAadZMbpTvf0xkQc,198
92
92
  ingestr/src/linkedin_ads/helpers.py,sha256=eUWudRVlXl4kqIhfXQ1eVsUpZwJn7UFqKSpnbLfxzds,4498
93
93
  ingestr/src/mixpanel/__init__.py,sha256=s1QtqMP0BTGW6YtdCabJFWj7lEn7KujzELwGpBOQgfs,1796
94
94
  ingestr/src/mixpanel/client.py,sha256=c_reouegOVYBOwHLfgYFwpmkba0Sxro1Zkml07NCYf0,3602
95
- ingestr/src/mongodb/__init__.py,sha256=T-RYPS_skl_2gNVfYWWXan2bVQYmm0bFBcCCqG5ejvg,7275
96
- ingestr/src/mongodb/helpers.py,sha256=8pjNYZu4k2rkR9dItTMAnPaRdF1kroqLYX9FZ34RTqo,24491
95
+ ingestr/src/mongodb/__init__.py,sha256=5KNdR2mxJoHSOU1pt-FIJNg9HT4aHPwl6mI31xPBQLA,7487
96
+ ingestr/src/mongodb/helpers.py,sha256=VMGKkSN6FIQ4l-4TUqoc-Ou7r52_zPXuLF33ZN23B_I,30881
97
97
  ingestr/src/notion/__init__.py,sha256=36wUui8finbc85ObkRMq8boMraXMUehdABN_AMe_hzA,1834
98
98
  ingestr/src/notion/settings.py,sha256=MwQVZViJtnvOegfjXYc_pJ50oUYgSRPgwqu7TvpeMOA,82
99
99
  ingestr/src/notion/helpers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -151,8 +151,8 @@ ingestr/testdata/merge_expected.csv,sha256=DReHqWGnQMsf2PBv_Q2pfjsgvikYFnf1zYcQZ
151
151
  ingestr/testdata/merge_part1.csv,sha256=Pw8Z9IDKcNU0qQHx1z6BUf4rF_-SxKGFOvymCt4OY9I,185
152
152
  ingestr/testdata/merge_part2.csv,sha256=T_GiWxA81SN63_tMOIuemcvboEFeAmbKc7xRXvL9esw,287
153
153
  ingestr/tests/unit/test_smartsheets.py,sha256=eiC2CCO4iNJcuN36ONvqmEDryCA1bA1REpayHpu42lk,5058
154
- ingestr-0.13.78.dist-info/METADATA,sha256=Q7ofO2TRuTOUb4fhZvyr_kejvaOM2OwCrq3FnCLEk6U,15093
155
- ingestr-0.13.78.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
156
- ingestr-0.13.78.dist-info/entry_points.txt,sha256=oPJy0KBnPWYjDtP1k8qwAihcTLHSZokSQvRAw_wtfJM,46
157
- ingestr-0.13.78.dist-info/licenses/LICENSE.md,sha256=cW8wIhn8HFE-KLStDF9jHQ1O_ARWP3kTpk_-eOccL24,1075
158
- ingestr-0.13.78.dist-info/RECORD,,
154
+ ingestr-0.13.80.dist-info/METADATA,sha256=HF4xO0hXDV_gn389MIuX_-GhahKPaz8GOMIMt1If4x0,15182
155
+ ingestr-0.13.80.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
156
+ ingestr-0.13.80.dist-info/entry_points.txt,sha256=oPJy0KBnPWYjDtP1k8qwAihcTLHSZokSQvRAw_wtfJM,46
157
+ ingestr-0.13.80.dist-info/licenses/LICENSE.md,sha256=cW8wIhn8HFE-KLStDF9jHQ1O_ARWP3kTpk_-eOccL24,1075
158
+ ingestr-0.13.80.dist-info/RECORD,,