airbyte-source-google-search-console 1.5.16.dev202503120000__py3-none-any.whl → 1.5.16.dev202503142015__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: airbyte-source-google-search-console
3
- Version: 1.5.16.dev202503120000
3
+ Version: 1.5.16.dev202503142015
4
4
  Summary: Source implementation for Google Search Console.
5
5
  License: Elv2
6
6
  Author: Airbyte
@@ -19,8 +19,8 @@ source_google_search_console/schemas/sites.json,sha256=WNiCRuStPL1YkJiFa8FEbNJmq
19
19
  source_google_search_console/service_account_authenticator.py,sha256=gjUxt0xFxj82uviCQNTsA1Jlee__UDhYNjE7bRO1G0U,1227
20
20
  source_google_search_console/source.py,sha256=8n10_agSa2rvPvEyBvOfOpmpEardbEhi3H0vlK2A4_g,10002
21
21
  source_google_search_console/spec.json,sha256=WYtFvaSqWYGm1Dt2yV9G92U78Q94rh9oarbxJe3H7xo,8470
22
- source_google_search_console/streams.py,sha256=OQJ8kQS_QZBkHZE3KNNpqNCDuixE9jeQ91PkJwv9o6E,19695
23
- airbyte_source_google_search_console-1.5.16.dev202503120000.dist-info/METADATA,sha256=1fH-_BgBDDe1MejjqIvGZPqTPy_m-LxmlfFr9uMgwAc,5646
24
- airbyte_source_google_search_console-1.5.16.dev202503120000.dist-info/WHEEL,sha256=XbeZDeTWKc1w7CSIyre5aMDU_-PohRwTQceYnisIYYY,88
25
- airbyte_source_google_search_console-1.5.16.dev202503120000.dist-info/entry_points.txt,sha256=DMcgc9bCX-Vt6hm_68pa77qS3eGdeMhg-UdlFc-XKUM,85
26
- airbyte_source_google_search_console-1.5.16.dev202503120000.dist-info/RECORD,,
22
+ source_google_search_console/streams.py,sha256=argw9ECCf7nInvrg_B5BgWa59gqL_ecvpFGbWf6tCsE,22275
23
+ airbyte_source_google_search_console-1.5.16.dev202503142015.dist-info/METADATA,sha256=VPW3sTOMZUXbxtWilCnJ1rGS59MEmx5_orqs4Mh1fGM,5646
24
+ airbyte_source_google_search_console-1.5.16.dev202503142015.dist-info/WHEEL,sha256=XbeZDeTWKc1w7CSIyre5aMDU_-PohRwTQceYnisIYYY,88
25
+ airbyte_source_google_search_console-1.5.16.dev202503142015.dist-info/entry_points.txt,sha256=DMcgc9bCX-Vt6hm_68pa77qS3eGdeMhg-UdlFc-XKUM,85
26
+ airbyte_source_google_search_console-1.5.16.dev202503142015.dist-info/RECORD,,
@@ -378,12 +378,14 @@ class SearchAppearance(SearchAnalytics):
378
378
  next_page_token: Mapping[str, Any] = None,
379
379
  ) -> Optional[Union[Dict[str, Any], str]]:
380
380
  data = super().request_body_json(stream_state, stream_slice, next_page_token)
381
-
381
+
382
382
  fields_to_remove = ["aggregationType", "startRow", "rowLimit", "dataState"]
383
383
  for field in fields_to_remove:
384
384
  data.pop(field, None)
385
-
385
+
386
386
  return data
387
+
388
+
387
389
  class SearchByKeyword(SearchAnalytics):
388
390
  """
389
391
  Adds searchAppearance value to dimensionFilterGroups in json body
@@ -392,13 +394,26 @@ class SearchByKeyword(SearchAnalytics):
392
394
  groupType: "and" - Whether all filters in this group must return true ("and"), or one or more must return true (not yet supported).
393
395
  filters: {"dimension": "searchAppearance", "operator": "equals", "expression": keyword}
394
396
  """
397
+
395
398
  search_types = ["web", "news", "image", "video", "discover", "googleNews"]
396
399
 
397
400
  def stream_slices(
398
401
  self, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None
399
402
  ) -> Iterable[Optional[Mapping[str, Any]]]:
400
- for stream_slice in super().stream_slices(sync_mode, cursor_field, stream_state):
401
- yield stream_slice
403
+ search_appearance_stream = SearchAppearance(self._session.auth, self._site_urls, self._start_date, self._end_date)
404
+
405
+ for stream_slice in super().stream_slices(sync_mode, cursor_field, stream_state):
406
+ keywords_records = search_appearance_stream.read_records(
407
+ sync_mode=SyncMode.full_refresh, stream_state=stream_state, stream_slice=stream_slice
408
+ )
409
+ keywords = {record["searchAppearance"] for record in keywords_records}
410
+
411
+ for keyword in keywords:
412
+ filters = {"dimension": "searchAppearance", "operator": "equals", "expression": keyword}
413
+ stream_slice["dimensionFilterGroups"] = [{"groupType": "and", "filters": filters}]
414
+ stream_slice["dimensions"] = self.dimensions
415
+
416
+ yield stream_slice
402
417
 
403
418
  def request_body_json(
404
419
  self,
@@ -407,12 +422,25 @@ class SearchByKeyword(SearchAnalytics):
407
422
  next_page_token: Mapping[str, Any] = None,
408
423
  ) -> Optional[Union[Dict[str, Any], str]]:
409
424
  data = super().request_body_json(stream_state, stream_slice, next_page_token)
425
+ data["dimensionFilterGroups"] = stream_slice["dimensionFilterGroups"]
410
426
  return data
411
427
 
412
428
 
413
429
  class SearchAnalyticsKeywordPageReport(SearchByKeyword):
414
430
  primary_key = ["site_url", "date", "country", "device", "query", "page", "search_type"]
415
431
  dimensions = ["date", "country", "device", "query", "page"]
432
+ def stream_slices(
433
+ self, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None
434
+ ) -> Iterable[Optional[Mapping[str, Any]]]:
435
+ return super(SearchByKeyword, self).stream_slices(sync_mode, cursor_field, stream_state)
436
+
437
+ def request_body_json(
438
+ self,
439
+ stream_state: Mapping[str, Any] = None,
440
+ stream_slice: Mapping[str, Any] = None,
441
+ next_page_token: Mapping[str, Any] = None,
442
+ ) -> Optional[Union[Dict[str, Any], str]]:
443
+ return super(SearchByKeyword, self).request_body_json(stream_state, stream_slice, next_page_token)
416
444
 
417
445
 
418
446
  class SearchAnalyticsKeywordSiteReportByPage(SearchByKeyword):
@@ -420,11 +448,35 @@ class SearchAnalyticsKeywordSiteReportByPage(SearchByKeyword):
420
448
  dimensions = ["date", "country", "device", "query"]
421
449
  aggregation_type = QueryAggregationType.by_page
422
450
 
451
+ def stream_slices(self, sync_mode, cursor_field=None, stream_state=None):
452
+ for stream_slice in super(SearchByKeyword, self).stream_slices(sync_mode, cursor_field, stream_state):
453
+ yield stream_slice
454
+
455
+ def request_body_json(
456
+ self,
457
+ stream_state: Mapping[str, Any] = None,
458
+ stream_slice: Mapping[str, Any] = None,
459
+ next_page_token: Mapping[str, Any] = None,
460
+ ) -> Optional[Union[Dict[str, Any], str]]:
461
+ return super(SearchByKeyword, self).request_body_json(stream_state, stream_slice, next_page_token)
423
462
 
424
463
  class SearchAnalyticsKeywordSiteReportBySite(SearchByKeyword):
425
464
  primary_key = ["site_url", "date", "country", "device", "query", "search_type"]
426
465
  dimensions = ["date", "country", "device", "query"]
427
466
  aggregation_type = QueryAggregationType.by_property
467
+ def stream_slices(
468
+ self, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None
469
+ ) -> Iterable[Optional[Mapping[str, Any]]]:
470
+ return super(SearchByKeyword, self).stream_slices(sync_mode, cursor_field, stream_state)
471
+
472
+ def request_body_json(
473
+ self,
474
+ stream_state: Mapping[str, Any] = None,
475
+ stream_slice: Mapping[str, Any] = None,
476
+ next_page_token: Mapping[str, Any] = None,
477
+ ) -> Optional[Union[Dict[str, Any], str]]:
478
+ return super(SearchByKeyword, self).request_body_json(stream_state, stream_slice, next_page_token)
479
+
428
480
 
429
481
 
430
482
  class SearchAnalyticsSiteReportBySite(SearchAnalytics):