garf-executors 0.2.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of garf-executors might be problematic. Click here for more details.
- garf_executors-0.2.2/PKG-INFO +72 -0
- garf_executors-0.2.2/README.md +32 -0
- garf_executors-0.2.2/garf_executors/__init__.py +60 -0
- garf_executors-0.2.2/garf_executors/api_executor.py +137 -0
- garf_executors-0.2.2/garf_executors/bq_executor.py +177 -0
- garf_executors-0.2.2/garf_executors/config.py +53 -0
- garf_executors-0.2.2/garf_executors/entrypoints/__init__.py +0 -0
- garf_executors-0.2.2/garf_executors/entrypoints/cli.py +164 -0
- garf_executors-0.2.2/garf_executors/entrypoints/grpc_server.py +68 -0
- garf_executors-0.2.2/garf_executors/entrypoints/server.py +118 -0
- garf_executors-0.2.2/garf_executors/entrypoints/tracer.py +42 -0
- garf_executors-0.2.2/garf_executors/entrypoints/utils.py +140 -0
- garf_executors-0.2.2/garf_executors/exceptions.py +17 -0
- garf_executors-0.2.2/garf_executors/execution_context.py +116 -0
- garf_executors-0.2.2/garf_executors/executor.py +124 -0
- garf_executors-0.2.2/garf_executors/fetchers.py +79 -0
- garf_executors-0.2.2/garf_executors/garf_pb2.py +45 -0
- garf_executors-0.2.2/garf_executors/garf_pb2_grpc.py +97 -0
- garf_executors-0.2.2/garf_executors/sql_executor.py +141 -0
- garf_executors-0.2.2/garf_executors/telemetry.py +20 -0
- garf_executors-0.2.2/garf_executors/workflow.py +96 -0
- garf_executors-0.2.2/garf_executors.egg-info/PKG-INFO +72 -0
- garf_executors-0.2.2/garf_executors.egg-info/SOURCES.txt +27 -0
- garf_executors-0.2.2/garf_executors.egg-info/dependency_links.txt +1 -0
- garf_executors-0.2.2/garf_executors.egg-info/entry_points.txt +2 -0
- garf_executors-0.2.2/garf_executors.egg-info/requires.txt +24 -0
- garf_executors-0.2.2/garf_executors.egg-info/top_level.txt +1 -0
- garf_executors-0.2.2/pyproject.toml +61 -0
- garf_executors-0.2.2/setup.cfg +4 -0
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: garf-executors
|
|
3
|
+
Version: 0.2.2
|
|
4
|
+
Summary: Executes queries against API and writes data to local/remote storage.
|
|
5
|
+
Author-email: "Google Inc. (gTech gPS CSE team)" <no-reply@google.com>, Andrei Markin <andrey.markin.ppc@gmail.com>
|
|
6
|
+
License: Apache 2.0
|
|
7
|
+
Classifier: Programming Language :: Python :: 3 :: Only
|
|
8
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
9
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
10
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
11
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
14
|
+
Classifier: Intended Audience :: Developers
|
|
15
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
16
|
+
Classifier: Operating System :: OS Independent
|
|
17
|
+
Classifier: License :: OSI Approved :: Apache Software License
|
|
18
|
+
Requires-Python: >=3.9
|
|
19
|
+
Description-Content-Type: text/markdown
|
|
20
|
+
Requires-Dist: garf-core
|
|
21
|
+
Requires-Dist: garf-io
|
|
22
|
+
Requires-Dist: pyyaml
|
|
23
|
+
Requires-Dist: pydantic
|
|
24
|
+
Requires-Dist: opentelemetry-api
|
|
25
|
+
Requires-Dist: opentelemetry-sdk
|
|
26
|
+
Requires-Dist: opentelemetry-exporter-otlp
|
|
27
|
+
Provides-Extra: bq
|
|
28
|
+
Requires-Dist: garf-io[bq]; extra == "bq"
|
|
29
|
+
Requires-Dist: pandas; extra == "bq"
|
|
30
|
+
Requires-Dist: google-cloud-logging; extra == "bq"
|
|
31
|
+
Provides-Extra: sql
|
|
32
|
+
Requires-Dist: garf-io[sqlalchemy]; extra == "sql"
|
|
33
|
+
Requires-Dist: pandas; extra == "sql"
|
|
34
|
+
Provides-Extra: server
|
|
35
|
+
Requires-Dist: fastapi[standard]; extra == "server"
|
|
36
|
+
Requires-Dist: opentelemetry-instrumentation-fastapi; extra == "server"
|
|
37
|
+
Requires-Dist: typer; extra == "server"
|
|
38
|
+
Provides-Extra: all
|
|
39
|
+
Requires-Dist: garf-executors[bq,server,sql]; extra == "all"
|
|
40
|
+
|
|
41
|
+
# `garf-executors` - One stop-shop for interacting with Reporting APIs.
|
|
42
|
+
|
|
43
|
+
`garf-executors` is responsible for orchestrating process of fetching from API and storing data in a storage.
|
|
44
|
+
|
|
45
|
+
Currently the following executors are supports:
|
|
46
|
+
|
|
47
|
+
* `ApiExecutor` - fetching data from reporting API and saves it to a requested destination.
|
|
48
|
+
* `BigQueryExecutor` - executes SQL code in BigQuery.
|
|
49
|
+
* `SqlExecutor` - executes SQL code in a SqlAlchemy supported DB.
|
|
50
|
+
|
|
51
|
+
## Installation
|
|
52
|
+
|
|
53
|
+
`pip install garf-executors`
|
|
54
|
+
|
|
55
|
+
## Usage
|
|
56
|
+
|
|
57
|
+
After `garf-executors` is installed you can use `garf` utility to perform fetching.
|
|
58
|
+
|
|
59
|
+
```
|
|
60
|
+
garf <QUERIES> --source <API_SOURCE> \
|
|
61
|
+
--output <OUTPUT_TYPE> \
|
|
62
|
+
--source.params1=<VALUE>
|
|
63
|
+
```
|
|
64
|
+
|
|
65
|
+
where
|
|
66
|
+
|
|
67
|
+
* `<QUERIES>`- local or remote path(s) to files with queries.
|
|
68
|
+
* `<API_SOURCE>`- type of API to use. Based on that the appropriate report fetcher will be initialized.
|
|
69
|
+
* `<OUTPUT_TYPE>` - output supported by [`garf-io` library](../garf_io/README.md).
|
|
70
|
+
|
|
71
|
+
If your report fetcher requires additional parameters you can pass them via key value pairs under `--source.` argument, i.e.`--source.regionCode='US'` - to get data only from *US*.
|
|
72
|
+
> Concrete `--source` parameters are dependent on a particular report fetcher and should be looked up in a documentation for this fetcher.
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
# `garf-executors` - One stop-shop for interacting with Reporting APIs.
|
|
2
|
+
|
|
3
|
+
`garf-executors` is responsible for orchestrating process of fetching from API and storing data in a storage.
|
|
4
|
+
|
|
5
|
+
Currently the following executors are supports:
|
|
6
|
+
|
|
7
|
+
* `ApiExecutor` - fetching data from reporting API and saves it to a requested destination.
|
|
8
|
+
* `BigQueryExecutor` - executes SQL code in BigQuery.
|
|
9
|
+
* `SqlExecutor` - executes SQL code in a SqlAlchemy supported DB.
|
|
10
|
+
|
|
11
|
+
## Installation
|
|
12
|
+
|
|
13
|
+
`pip install garf-executors`
|
|
14
|
+
|
|
15
|
+
## Usage
|
|
16
|
+
|
|
17
|
+
After `garf-executors` is installed you can use `garf` utility to perform fetching.
|
|
18
|
+
|
|
19
|
+
```
|
|
20
|
+
garf <QUERIES> --source <API_SOURCE> \
|
|
21
|
+
--output <OUTPUT_TYPE> \
|
|
22
|
+
--source.params1=<VALUE>
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
where
|
|
26
|
+
|
|
27
|
+
* `<QUERIES>`- local or remote path(s) to files with queries.
|
|
28
|
+
* `<API_SOURCE>`- type of API to use. Based on that the appropriate report fetcher will be initialized.
|
|
29
|
+
* `<OUTPUT_TYPE>` - output supported by [`garf-io` library](../garf_io/README.md).
|
|
30
|
+
|
|
31
|
+
If your report fetcher requires additional parameters you can pass them via key value pairs under `--source.` argument, i.e.`--source.regionCode='US'` - to get data only from *US*.
|
|
32
|
+
> Concrete `--source` parameters are dependent on a particular report fetcher and should be looked up in a documentation for this fetcher.
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
# Copyright 2025 Google LLC
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# https://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
"""Executors to fetch data from various APIs."""
|
|
15
|
+
|
|
16
|
+
from __future__ import annotations
|
|
17
|
+
|
|
18
|
+
import importlib
|
|
19
|
+
|
|
20
|
+
from garf_executors import executor, fetchers
|
|
21
|
+
from garf_executors.api_executor import ApiExecutionContext, ApiQueryExecutor
|
|
22
|
+
from garf_executors.telemetry import tracer
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@tracer.start_as_current_span('setup_executor')
|
|
26
|
+
def setup_executor(
|
|
27
|
+
source: str,
|
|
28
|
+
fetcher_parameters: dict[str, str | int | bool],
|
|
29
|
+
enable_cache: bool = False,
|
|
30
|
+
cache_ttl_seconds: int = 3600,
|
|
31
|
+
) -> type[executor.Executor]:
|
|
32
|
+
"""Initializes executors based on a source and parameters."""
|
|
33
|
+
if source == 'bq':
|
|
34
|
+
bq_executor = importlib.import_module('garf_executors.bq_executor')
|
|
35
|
+
query_executor = bq_executor.BigQueryExecutor(**fetcher_parameters)
|
|
36
|
+
elif source == 'sqldb':
|
|
37
|
+
sql_executor = importlib.import_module('garf_executors.sql_executor')
|
|
38
|
+
query_executor = (
|
|
39
|
+
sql_executor.SqlAlchemyQueryExecutor.from_connection_string(
|
|
40
|
+
fetcher_parameters.get('connection_string')
|
|
41
|
+
)
|
|
42
|
+
)
|
|
43
|
+
else:
|
|
44
|
+
concrete_api_fetcher = fetchers.get_report_fetcher(source)
|
|
45
|
+
query_executor = ApiQueryExecutor(
|
|
46
|
+
fetcher=concrete_api_fetcher(
|
|
47
|
+
**fetcher_parameters,
|
|
48
|
+
enable_cache=enable_cache,
|
|
49
|
+
cache_ttl_seconds=cache_ttl_seconds,
|
|
50
|
+
)
|
|
51
|
+
)
|
|
52
|
+
return query_executor
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
__all__ = [
|
|
56
|
+
'ApiQueryExecutor',
|
|
57
|
+
'ApiExecutionContext',
|
|
58
|
+
]
|
|
59
|
+
|
|
60
|
+
__version__ = '0.2.2'
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
# Copyright 2024 Google LLC
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# https://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
"""Module for executing Garf queries and writing them to local/remote.
|
|
15
|
+
|
|
16
|
+
ApiQueryExecutor performs fetching data from API in a form of
|
|
17
|
+
GarfReport and saving it to local/remote storage.
|
|
18
|
+
"""
|
|
19
|
+
# pylint: disable=C0330, g-bad-import-order, g-multiple-import
|
|
20
|
+
|
|
21
|
+
from __future__ import annotations
|
|
22
|
+
|
|
23
|
+
import logging
|
|
24
|
+
|
|
25
|
+
from garf_core import report_fetcher
|
|
26
|
+
from opentelemetry import trace
|
|
27
|
+
|
|
28
|
+
from garf_executors import exceptions, execution_context, executor, fetchers
|
|
29
|
+
from garf_executors.telemetry import tracer
|
|
30
|
+
|
|
31
|
+
logger = logging.getLogger(__name__)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class ApiExecutionContext(execution_context.ExecutionContext):
|
|
35
|
+
"""Common context for executing one or more queries."""
|
|
36
|
+
|
|
37
|
+
writer: str | list[str] = 'console'
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class ApiQueryExecutor(executor.Executor):
|
|
41
|
+
"""Gets data from API and writes them to local/remote storage.
|
|
42
|
+
|
|
43
|
+
Attributes:
|
|
44
|
+
api_client: a client used for connecting to API.
|
|
45
|
+
"""
|
|
46
|
+
|
|
47
|
+
def __init__(self, fetcher: report_fetcher.ApiReportFetcher) -> None:
|
|
48
|
+
"""Initializes ApiQueryExecutor.
|
|
49
|
+
|
|
50
|
+
Args:
|
|
51
|
+
fetcher: Instantiated report fetcher.
|
|
52
|
+
"""
|
|
53
|
+
self.fetcher = fetcher
|
|
54
|
+
super().__init__(
|
|
55
|
+
preprocessors=self.fetcher.preprocessors,
|
|
56
|
+
postprocessors=self.fetcher.postprocessors,
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
@classmethod
|
|
60
|
+
def from_fetcher_alias(
|
|
61
|
+
cls,
|
|
62
|
+
source: str,
|
|
63
|
+
fetcher_parameters: dict[str, str] | None = None,
|
|
64
|
+
enable_cache: bool = False,
|
|
65
|
+
cache_ttl_seconds: int = 3600,
|
|
66
|
+
) -> ApiQueryExecutor:
|
|
67
|
+
if not fetcher_parameters:
|
|
68
|
+
fetcher_parameters = {}
|
|
69
|
+
concrete_api_fetcher = fetchers.get_report_fetcher(source)
|
|
70
|
+
return ApiQueryExecutor(
|
|
71
|
+
fetcher=concrete_api_fetcher(
|
|
72
|
+
**fetcher_parameters,
|
|
73
|
+
enable_cache=enable_cache,
|
|
74
|
+
cache_ttl_seconds=cache_ttl_seconds,
|
|
75
|
+
)
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
@tracer.start_as_current_span('api.execute')
|
|
79
|
+
def execute(
|
|
80
|
+
self,
|
|
81
|
+
query: str,
|
|
82
|
+
title: str,
|
|
83
|
+
context: ApiExecutionContext,
|
|
84
|
+
) -> str:
|
|
85
|
+
"""Reads query, extract results and stores them in a specified location.
|
|
86
|
+
|
|
87
|
+
Args:
|
|
88
|
+
query: Location of the query.
|
|
89
|
+
title: Name of the query.
|
|
90
|
+
context: Query execution context.
|
|
91
|
+
|
|
92
|
+
Returns:
|
|
93
|
+
Result of writing the report.
|
|
94
|
+
|
|
95
|
+
Raises:
|
|
96
|
+
GarfExecutorError: When failed to execute query.
|
|
97
|
+
"""
|
|
98
|
+
span = trace.get_current_span()
|
|
99
|
+
span.set_attribute('fetcher.class', self.fetcher.__class__.__name__)
|
|
100
|
+
span.set_attribute(
|
|
101
|
+
'api.client.class', self.fetcher.api_client.__class__.__name__
|
|
102
|
+
)
|
|
103
|
+
try:
|
|
104
|
+
span.set_attribute('query.title', title)
|
|
105
|
+
span.set_attribute('query.text', query)
|
|
106
|
+
logger.debug('starting query %s', query)
|
|
107
|
+
results = self.fetcher.fetch(
|
|
108
|
+
query_specification=query,
|
|
109
|
+
args=context.query_parameters,
|
|
110
|
+
**context.fetcher_parameters,
|
|
111
|
+
)
|
|
112
|
+
writer_clients = context.writer_clients
|
|
113
|
+
if not writer_clients:
|
|
114
|
+
logger.warning('No writers configured, skipping write operation')
|
|
115
|
+
return None
|
|
116
|
+
writing_results = []
|
|
117
|
+
for writer_client in writer_clients:
|
|
118
|
+
logger.debug(
|
|
119
|
+
'Start writing data for query %s via %s writer',
|
|
120
|
+
title,
|
|
121
|
+
type(writer_client),
|
|
122
|
+
)
|
|
123
|
+
result = writer_client.write(results, title)
|
|
124
|
+
logger.debug(
|
|
125
|
+
'Finish writing data for query %s via %s writer',
|
|
126
|
+
title,
|
|
127
|
+
type(writer_client),
|
|
128
|
+
)
|
|
129
|
+
writing_results.append(result)
|
|
130
|
+
logger.info('%s executed successfully', title)
|
|
131
|
+
# Return the last writer's result for backward compatibility
|
|
132
|
+
return writing_results[-1] if writing_results else None
|
|
133
|
+
except Exception as e:
|
|
134
|
+
logger.error('%s generated an exception: %s', title, str(e))
|
|
135
|
+
raise exceptions.GarfExecutorError(
|
|
136
|
+
'%s generated an exception: %s', title, str(e)
|
|
137
|
+
) from e
|
|
@@ -0,0 +1,177 @@
|
|
|
1
|
+
# Copyright 2024 Google LLC
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# https://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
"""Executes queries in BigQuery."""
|
|
15
|
+
|
|
16
|
+
from __future__ import annotations
|
|
17
|
+
|
|
18
|
+
import contextlib
|
|
19
|
+
import os
|
|
20
|
+
|
|
21
|
+
try:
|
|
22
|
+
from google.cloud import bigquery # type: ignore
|
|
23
|
+
except ImportError as e:
|
|
24
|
+
raise ImportError(
|
|
25
|
+
'Please install garf-executors with BigQuery support '
|
|
26
|
+
'- `pip install garf-executors[bq]`'
|
|
27
|
+
) from e
|
|
28
|
+
|
|
29
|
+
import logging
|
|
30
|
+
|
|
31
|
+
from garf_core import query_editor, report
|
|
32
|
+
from google.cloud import exceptions as google_cloud_exceptions
|
|
33
|
+
from opentelemetry import trace
|
|
34
|
+
|
|
35
|
+
from garf_executors import exceptions, execution_context, executor
|
|
36
|
+
from garf_executors.telemetry import tracer
|
|
37
|
+
|
|
38
|
+
logger = logging.getLogger(__name__)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class BigQueryExecutorError(exceptions.GarfExecutorError):
|
|
42
|
+
"""Error when BigQueryExecutor fails to run query."""
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class BigQueryExecutor(executor.Executor, query_editor.TemplateProcessorMixin):
|
|
46
|
+
"""Handles query execution in BigQuery.
|
|
47
|
+
|
|
48
|
+
Attributes:
|
|
49
|
+
project_id: Google Cloud project id.
|
|
50
|
+
location: BigQuery dataset location.
|
|
51
|
+
client: BigQuery client.
|
|
52
|
+
"""
|
|
53
|
+
|
|
54
|
+
def __init__(
|
|
55
|
+
self,
|
|
56
|
+
project_id: str | None = os.getenv('GOOGLE_CLOUD_PROJECT'),
|
|
57
|
+
location: str | None = None,
|
|
58
|
+
) -> None:
|
|
59
|
+
"""Initializes BigQueryExecutor.
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
project_id: Google Cloud project id.
|
|
63
|
+
location: BigQuery dataset location.
|
|
64
|
+
"""
|
|
65
|
+
if not project_id:
|
|
66
|
+
raise BigQueryExecutorError(
|
|
67
|
+
'project_id is required. Either provide it as project_id parameter '
|
|
68
|
+
'or GOOGLE_CLOUD_PROJECT env variable.'
|
|
69
|
+
)
|
|
70
|
+
self.project_id = project_id
|
|
71
|
+
self.location = location
|
|
72
|
+
super().__init__()
|
|
73
|
+
|
|
74
|
+
@property
|
|
75
|
+
def client(self) -> bigquery.Client:
|
|
76
|
+
"""Instantiates bigquery client."""
|
|
77
|
+
return bigquery.Client(self.project_id)
|
|
78
|
+
|
|
79
|
+
@tracer.start_as_current_span('bq.execute')
|
|
80
|
+
def execute(
|
|
81
|
+
self,
|
|
82
|
+
query: str,
|
|
83
|
+
title: str,
|
|
84
|
+
context: execution_context.ExecutionContext = (
|
|
85
|
+
execution_context.ExecutionContext()
|
|
86
|
+
),
|
|
87
|
+
) -> report.GarfReport:
|
|
88
|
+
"""Executes query in BigQuery.
|
|
89
|
+
|
|
90
|
+
Args:
|
|
91
|
+
query: Location of the query.
|
|
92
|
+
title: Name of the query.
|
|
93
|
+
context: Query execution context.
|
|
94
|
+
|
|
95
|
+
Returns:
|
|
96
|
+
Report with data if query returns some data otherwise empty Report.
|
|
97
|
+
"""
|
|
98
|
+
span = trace.get_current_span()
|
|
99
|
+
logger.info('Executing script: %s', title)
|
|
100
|
+
query_text = self.replace_params_template(query, context.query_parameters)
|
|
101
|
+
self.create_datasets(context.query_parameters.macro)
|
|
102
|
+
job = self.client.query(query_text)
|
|
103
|
+
try:
|
|
104
|
+
result = job.result()
|
|
105
|
+
except google_cloud_exceptions.GoogleCloudError as e:
|
|
106
|
+
raise BigQueryExecutorError(
|
|
107
|
+
f'Failed to execute query {title}: Reason: {e}'
|
|
108
|
+
) from e
|
|
109
|
+
logger.debug('%s launched successfully', title)
|
|
110
|
+
if result.total_rows:
|
|
111
|
+
results = report.GarfReport.from_pandas(result.to_dataframe())
|
|
112
|
+
else:
|
|
113
|
+
results = report.GarfReport()
|
|
114
|
+
if context.writer and results:
|
|
115
|
+
writer_clients = context.writer_clients
|
|
116
|
+
if not writer_clients:
|
|
117
|
+
logger.warning('No writers configured, skipping write operation')
|
|
118
|
+
else:
|
|
119
|
+
writing_results = []
|
|
120
|
+
for writer_client in writer_clients:
|
|
121
|
+
logger.debug(
|
|
122
|
+
'Start writing data for query %s via %s writer',
|
|
123
|
+
title,
|
|
124
|
+
type(writer_client),
|
|
125
|
+
)
|
|
126
|
+
writing_result = writer_client.write(results, title)
|
|
127
|
+
logger.debug(
|
|
128
|
+
'Finish writing data for query %s via %s writer',
|
|
129
|
+
title,
|
|
130
|
+
type(writer_client),
|
|
131
|
+
)
|
|
132
|
+
writing_results.append(writing_result)
|
|
133
|
+
# Return the last writer's result for backward compatibility
|
|
134
|
+
logger.info('%s executed successfully', title)
|
|
135
|
+
return writing_results[-1] if writing_results else None
|
|
136
|
+
logger.info('%s executed successfully', title)
|
|
137
|
+
span.set_attribute('execute.num_results', len(results))
|
|
138
|
+
return results
|
|
139
|
+
|
|
140
|
+
@tracer.start_as_current_span('bq.create_datasets')
|
|
141
|
+
def create_datasets(self, macros: dict | None) -> None:
|
|
142
|
+
"""Creates datasets in BQ based on values in a dict.
|
|
143
|
+
|
|
144
|
+
If dict contains keys with 'dataset' in them, then values for such keys
|
|
145
|
+
are treated as dataset names.
|
|
146
|
+
|
|
147
|
+
Args:
|
|
148
|
+
macros: Mapping containing data for query execution.
|
|
149
|
+
"""
|
|
150
|
+
if macros and (datasets := extract_datasets(macros)):
|
|
151
|
+
for dataset in datasets:
|
|
152
|
+
dataset_id = f'{self.project_id}.{dataset}'
|
|
153
|
+
try:
|
|
154
|
+
self.client.get_dataset(dataset_id)
|
|
155
|
+
except google_cloud_exceptions.NotFound:
|
|
156
|
+
bq_dataset = bigquery.Dataset(dataset_id)
|
|
157
|
+
bq_dataset.location = self.location
|
|
158
|
+
with contextlib.suppress(google_cloud_exceptions.Conflict):
|
|
159
|
+
self.client.create_dataset(bq_dataset, timeout=30)
|
|
160
|
+
logger.info('Created new dataset %s', dataset_id)
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def extract_datasets(macros: dict | None) -> list[str]:
|
|
164
|
+
"""Finds dataset-related keys based on values in a dict.
|
|
165
|
+
|
|
166
|
+
If dict contains keys with 'dataset' in them, then values for such keys
|
|
167
|
+
are treated as dataset names.
|
|
168
|
+
|
|
169
|
+
Args:
|
|
170
|
+
macros: Mapping containing data for query execution.
|
|
171
|
+
|
|
172
|
+
Returns:
|
|
173
|
+
Possible names of datasets.
|
|
174
|
+
"""
|
|
175
|
+
if not macros:
|
|
176
|
+
return []
|
|
177
|
+
return [value for macro, value in macros.items() if 'dataset' in macro]
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
# Copyright 2025 Google LLC
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# https://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
# pylint: disable=C0330, g-bad-import-order, g-multiple-import
|
|
16
|
+
|
|
17
|
+
"""Stores mapping between API aliases and their execution context."""
|
|
18
|
+
|
|
19
|
+
from __future__ import annotations
|
|
20
|
+
|
|
21
|
+
import os
|
|
22
|
+
import pathlib
|
|
23
|
+
|
|
24
|
+
import pydantic
|
|
25
|
+
import smart_open
|
|
26
|
+
import yaml
|
|
27
|
+
|
|
28
|
+
from garf_executors.execution_context import ExecutionContext
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class Config(pydantic.BaseModel):
|
|
32
|
+
"""Stores necessary parameters for one or multiple API sources.
|
|
33
|
+
|
|
34
|
+
Attributes:
|
|
35
|
+
source: Mapping between API source alias and execution parameters.
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
sources: dict[str, ExecutionContext]
|
|
39
|
+
|
|
40
|
+
@classmethod
|
|
41
|
+
def from_file(cls, path: str | pathlib.Path | os.PathLike[str]) -> Config:
|
|
42
|
+
"""Builds config from local or remote yaml file."""
|
|
43
|
+
with smart_open.open(path, 'r', encoding='utf-8') as f:
|
|
44
|
+
data = yaml.safe_load(f)
|
|
45
|
+
return Config(sources=data)
|
|
46
|
+
|
|
47
|
+
def save(self, path: str | pathlib.Path | os.PathLike[str]) -> str:
|
|
48
|
+
"""Saves config to local or remote yaml file."""
|
|
49
|
+
with smart_open.open(path, 'w', encoding='utf-8') as f:
|
|
50
|
+
yaml.dump(
|
|
51
|
+
self.model_dump(exclude_none=True).get('sources'), f, encoding='utf-8'
|
|
52
|
+
)
|
|
53
|
+
return f'Config is saved to {str(path)}'
|
|
File without changes
|