airbyte-source-google-search-console 1.5.16.dev202503142015__py3-none-any.whl → 1.5.16.dev202503252224__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {airbyte_source_google_search_console-1.5.16.dev202503142015.dist-info → airbyte_source_google_search_console-1.5.16.dev202503252224.dist-info}/METADATA +1 -1
- {airbyte_source_google_search_console-1.5.16.dev202503142015.dist-info → airbyte_source_google_search_console-1.5.16.dev202503252224.dist-info}/RECORD +5 -5
- source_google_search_console/streams.py +16 -61
- {airbyte_source_google_search_console-1.5.16.dev202503142015.dist-info → airbyte_source_google_search_console-1.5.16.dev202503252224.dist-info}/WHEEL +0 -0
- {airbyte_source_google_search_console-1.5.16.dev202503142015.dist-info → airbyte_source_google_search_console-1.5.16.dev202503252224.dist-info}/entry_points.txt +0 -0
@@ -19,8 +19,8 @@ source_google_search_console/schemas/sites.json,sha256=WNiCRuStPL1YkJiFa8FEbNJmq
|
|
19
19
|
source_google_search_console/service_account_authenticator.py,sha256=gjUxt0xFxj82uviCQNTsA1Jlee__UDhYNjE7bRO1G0U,1227
|
20
20
|
source_google_search_console/source.py,sha256=8n10_agSa2rvPvEyBvOfOpmpEardbEhi3H0vlK2A4_g,10002
|
21
21
|
source_google_search_console/spec.json,sha256=WYtFvaSqWYGm1Dt2yV9G92U78Q94rh9oarbxJe3H7xo,8470
|
22
|
-
source_google_search_console/streams.py,sha256=
|
23
|
-
airbyte_source_google_search_console-1.5.16.
|
24
|
-
airbyte_source_google_search_console-1.5.16.
|
25
|
-
airbyte_source_google_search_console-1.5.16.
|
26
|
-
airbyte_source_google_search_console-1.5.16.
|
22
|
+
source_google_search_console/streams.py,sha256=7C9E8gYpzU4VdQGsrS2AqjQTdMjeQ3mS3iAfvywADek,20436
|
23
|
+
airbyte_source_google_search_console-1.5.16.dev202503252224.dist-info/METADATA,sha256=llHA_zmljhs2_GK5_vKBp_TJj7mgizzaB_kFodADb_A,5646
|
24
|
+
airbyte_source_google_search_console-1.5.16.dev202503252224.dist-info/WHEEL,sha256=XbeZDeTWKc1w7CSIyre5aMDU_-PohRwTQceYnisIYYY,88
|
25
|
+
airbyte_source_google_search_console-1.5.16.dev202503252224.dist-info/entry_points.txt,sha256=DMcgc9bCX-Vt6hm_68pa77qS3eGdeMhg-UdlFc-XKUM,85
|
26
|
+
airbyte_source_google_search_console-1.5.16.dev202503252224.dist-info/RECORD,,
|
@@ -371,20 +371,6 @@ class SearchAppearance(SearchAnalytics):
|
|
371
371
|
primary_key = None
|
372
372
|
dimensions = ["searchAppearance"]
|
373
373
|
|
374
|
-
def request_body_json(
|
375
|
-
self,
|
376
|
-
stream_state: Mapping[str, Any] = None,
|
377
|
-
stream_slice: Mapping[str, Any] = None,
|
378
|
-
next_page_token: Mapping[str, Any] = None,
|
379
|
-
) -> Optional[Union[Dict[str, Any], str]]:
|
380
|
-
data = super().request_body_json(stream_state, stream_slice, next_page_token)
|
381
|
-
|
382
|
-
fields_to_remove = ["aggregationType", "startRow", "rowLimit", "dataState"]
|
383
|
-
for field in fields_to_remove:
|
384
|
-
data.pop(field, None)
|
385
|
-
|
386
|
-
return data
|
387
|
-
|
388
374
|
|
389
375
|
class SearchByKeyword(SearchAnalytics):
|
390
376
|
"""
|
@@ -395,24 +381,28 @@ class SearchByKeyword(SearchAnalytics):
|
|
395
381
|
filters: {"dimension": "searchAppearance", "operator": "equals", "expression": keyword}
|
396
382
|
"""
|
397
383
|
|
398
|
-
search_types = ["web", "news", "image", "video", "discover", "googleNews"]
|
399
|
-
|
400
384
|
def stream_slices(
|
401
385
|
self, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None
|
402
386
|
) -> Iterable[Optional[Mapping[str, Any]]]:
|
403
387
|
search_appearance_stream = SearchAppearance(self._session.auth, self._site_urls, self._start_date, self._end_date)
|
404
388
|
|
405
|
-
for stream_slice in super().stream_slices(sync_mode, cursor_field, stream_state):
|
389
|
+
for stream_slice in super().stream_slices(sync_mode, cursor_field, stream_state):
|
406
390
|
keywords_records = search_appearance_stream.read_records(
|
407
391
|
sync_mode=SyncMode.full_refresh, stream_state=stream_state, stream_slice=stream_slice
|
408
392
|
)
|
409
|
-
keywords
|
410
|
-
|
411
|
-
|
412
|
-
|
413
|
-
|
414
|
-
|
415
|
-
|
393
|
+
# Safely extract keywords, handling cases where "searchAppearance" might be missing
|
394
|
+
keywords = {record["searchAppearance"] for record in keywords_records if "searchAppearance" in record}
|
395
|
+
|
396
|
+
if keywords:
|
397
|
+
# If keywords exist, yield a slice for each keyword with filters
|
398
|
+
for keyword in keywords:
|
399
|
+
filters = {"dimension": "searchAppearance", "operator": "equals", "expression": keyword}
|
400
|
+
# Create a copy to avoid modifying the original stream_slice
|
401
|
+
stream_slice_with_filter = stream_slice.copy()
|
402
|
+
stream_slice_with_filter["dimensionFilterGroups"] = [{"groupType": "and", "filters": [filters]}]
|
403
|
+
yield stream_slice_with_filter
|
404
|
+
else:
|
405
|
+
# If no keywords are found, yield the base slice without filters
|
416
406
|
yield stream_slice
|
417
407
|
|
418
408
|
def request_body_json(
|
@@ -422,25 +412,14 @@ class SearchByKeyword(SearchAnalytics):
|
|
422
412
|
next_page_token: Mapping[str, Any] = None,
|
423
413
|
) -> Optional[Union[Dict[str, Any], str]]:
|
424
414
|
data = super().request_body_json(stream_state, stream_slice, next_page_token)
|
425
|
-
|
415
|
+
if "dimensionFilterGroups" in stream_slice:
|
416
|
+
data["dimensionFilterGroups"] = stream_slice["dimensionFilterGroups"]
|
426
417
|
return data
|
427
418
|
|
428
419
|
|
429
420
|
class SearchAnalyticsKeywordPageReport(SearchByKeyword):
|
430
421
|
primary_key = ["site_url", "date", "country", "device", "query", "page", "search_type"]
|
431
422
|
dimensions = ["date", "country", "device", "query", "page"]
|
432
|
-
def stream_slices(
|
433
|
-
self, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None
|
434
|
-
) -> Iterable[Optional[Mapping[str, Any]]]:
|
435
|
-
return super(SearchByKeyword, self).stream_slices(sync_mode, cursor_field, stream_state)
|
436
|
-
|
437
|
-
def request_body_json(
|
438
|
-
self,
|
439
|
-
stream_state: Mapping[str, Any] = None,
|
440
|
-
stream_slice: Mapping[str, Any] = None,
|
441
|
-
next_page_token: Mapping[str, Any] = None,
|
442
|
-
) -> Optional[Union[Dict[str, Any], str]]:
|
443
|
-
return super(SearchByKeyword, self).request_body_json(stream_state, stream_slice, next_page_token)
|
444
423
|
|
445
424
|
|
446
425
|
class SearchAnalyticsKeywordSiteReportByPage(SearchByKeyword):
|
@@ -448,35 +427,11 @@ class SearchAnalyticsKeywordSiteReportByPage(SearchByKeyword):
|
|
448
427
|
dimensions = ["date", "country", "device", "query"]
|
449
428
|
aggregation_type = QueryAggregationType.by_page
|
450
429
|
|
451
|
-
def stream_slices(self, sync_mode, cursor_field=None, stream_state=None):
|
452
|
-
for stream_slice in super(SearchByKeyword, self).stream_slices(sync_mode, cursor_field, stream_state):
|
453
|
-
yield stream_slice
|
454
|
-
|
455
|
-
def request_body_json(
|
456
|
-
self,
|
457
|
-
stream_state: Mapping[str, Any] = None,
|
458
|
-
stream_slice: Mapping[str, Any] = None,
|
459
|
-
next_page_token: Mapping[str, Any] = None,
|
460
|
-
) -> Optional[Union[Dict[str, Any], str]]:
|
461
|
-
return super(SearchByKeyword, self).request_body_json(stream_state, stream_slice, next_page_token)
|
462
430
|
|
463
431
|
class SearchAnalyticsKeywordSiteReportBySite(SearchByKeyword):
|
464
432
|
primary_key = ["site_url", "date", "country", "device", "query", "search_type"]
|
465
433
|
dimensions = ["date", "country", "device", "query"]
|
466
434
|
aggregation_type = QueryAggregationType.by_property
|
467
|
-
def stream_slices(
|
468
|
-
self, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None
|
469
|
-
) -> Iterable[Optional[Mapping[str, Any]]]:
|
470
|
-
return super(SearchByKeyword, self).stream_slices(sync_mode, cursor_field, stream_state)
|
471
|
-
|
472
|
-
def request_body_json(
|
473
|
-
self,
|
474
|
-
stream_state: Mapping[str, Any] = None,
|
475
|
-
stream_slice: Mapping[str, Any] = None,
|
476
|
-
next_page_token: Mapping[str, Any] = None,
|
477
|
-
) -> Optional[Union[Dict[str, Any], str]]:
|
478
|
-
return super(SearchByKeyword, self).request_body_json(stream_state, stream_slice, next_page_token)
|
479
|
-
|
480
435
|
|
481
436
|
|
482
437
|
class SearchAnalyticsSiteReportBySite(SearchAnalytics):
|
File without changes
|