garf-executors 0.0.11__py3-none-any.whl → 0.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- garf_executors/__init__.py +13 -4
- garf_executors/api_executor.py +9 -18
- garf_executors/bq_executor.py +3 -0
- garf_executors/entrypoints/cli.py +30 -6
- garf_executors/entrypoints/server.py +19 -7
- garf_executors/entrypoints/tracer.py +42 -0
- garf_executors/entrypoints/utils.py +19 -0
- garf_executors/execution_context.py +2 -2
- garf_executors/executor.py +48 -15
- garf_executors/fetchers.py +10 -3
- garf_executors/sql_executor.py +5 -0
- garf_executors/telemetry.py +20 -0
- {garf_executors-0.0.11.dist-info → garf_executors-0.1.4.dist-info}/METADATA +8 -3
- garf_executors-0.1.4.dist-info/RECORD +20 -0
- garf_executors-0.0.11.dist-info/RECORD +0 -18
- {garf_executors-0.0.11.dist-info → garf_executors-0.1.4.dist-info}/WHEEL +0 -0
- {garf_executors-0.0.11.dist-info → garf_executors-0.1.4.dist-info}/entry_points.txt +0 -0
- {garf_executors-0.0.11.dist-info → garf_executors-0.1.4.dist-info}/top_level.txt +0 -0
garf_executors/__init__.py
CHANGED
|
@@ -11,7 +11,7 @@
|
|
|
11
11
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
|
-
"""Executors to fetch data from various
|
|
14
|
+
"""Executors to fetch data from various APIs."""
|
|
15
15
|
|
|
16
16
|
from __future__ import annotations
|
|
17
17
|
|
|
@@ -19,10 +19,15 @@ import importlib
|
|
|
19
19
|
|
|
20
20
|
from garf_executors import executor, fetchers
|
|
21
21
|
from garf_executors.api_executor import ApiExecutionContext, ApiQueryExecutor
|
|
22
|
+
from garf_executors.telemetry import tracer
|
|
22
23
|
|
|
23
24
|
|
|
25
|
+
@tracer.start_as_current_span('setup_executor')
|
|
24
26
|
def setup_executor(
|
|
25
|
-
source: str,
|
|
27
|
+
source: str,
|
|
28
|
+
fetcher_parameters: dict[str, str | int | bool],
|
|
29
|
+
enable_cache: bool = False,
|
|
30
|
+
cache_ttl_seconds: int = 3600,
|
|
26
31
|
) -> type[executor.Executor]:
|
|
27
32
|
"""Initializes executors based on a source and parameters."""
|
|
28
33
|
if source == 'bq':
|
|
@@ -38,7 +43,11 @@ def setup_executor(
|
|
|
38
43
|
else:
|
|
39
44
|
concrete_api_fetcher = fetchers.get_report_fetcher(source)
|
|
40
45
|
query_executor = ApiQueryExecutor(
|
|
41
|
-
concrete_api_fetcher(
|
|
46
|
+
concrete_api_fetcher(
|
|
47
|
+
**fetcher_parameters,
|
|
48
|
+
enable_cache=enable_cache,
|
|
49
|
+
cache_ttl_seconds=cache_ttl_seconds,
|
|
50
|
+
)
|
|
42
51
|
)
|
|
43
52
|
return query_executor
|
|
44
53
|
|
|
@@ -48,4 +57,4 @@ __all__ = [
|
|
|
48
57
|
'ApiExecutionContext',
|
|
49
58
|
]
|
|
50
59
|
|
|
51
|
-
__version__ = '0.
|
|
60
|
+
__version__ = '0.1.4'
|
garf_executors/api_executor.py
CHANGED
|
@@ -20,11 +20,14 @@ GarfReport and saving it to local/remote storage.
|
|
|
20
20
|
|
|
21
21
|
from __future__ import annotations
|
|
22
22
|
|
|
23
|
+
import asyncio
|
|
23
24
|
import logging
|
|
24
25
|
|
|
25
26
|
from garf_core import report_fetcher
|
|
27
|
+
from opentelemetry import trace
|
|
26
28
|
|
|
27
29
|
from garf_executors import exceptions, execution_context, executor, fetchers
|
|
30
|
+
from garf_executors.telemetry import tracer
|
|
28
31
|
|
|
29
32
|
logger = logging.getLogger(__name__)
|
|
30
33
|
|
|
@@ -59,24 +62,7 @@ class ApiQueryExecutor(executor.Executor):
|
|
|
59
62
|
concrete_api_fetcher = fetchers.get_report_fetcher(source)
|
|
60
63
|
return ApiQueryExecutor(concrete_api_fetcher(**fetcher_parameters))
|
|
61
64
|
|
|
62
|
-
|
|
63
|
-
self,
|
|
64
|
-
query: str,
|
|
65
|
-
title: str,
|
|
66
|
-
context: ApiExecutionContext,
|
|
67
|
-
) -> str:
|
|
68
|
-
"""Performs query execution asynchronously.
|
|
69
|
-
|
|
70
|
-
Args:
|
|
71
|
-
query: Location of the query.
|
|
72
|
-
title: Name of the query.
|
|
73
|
-
context: Query execution context.
|
|
74
|
-
|
|
75
|
-
Returns:
|
|
76
|
-
Result of writing the report.
|
|
77
|
-
"""
|
|
78
|
-
return await self.execute(query, context, title, context)
|
|
79
|
-
|
|
65
|
+
@tracer.start_as_current_span('api.execute')
|
|
80
66
|
def execute(
|
|
81
67
|
self,
|
|
82
68
|
query: str,
|
|
@@ -96,7 +82,12 @@ class ApiQueryExecutor(executor.Executor):
|
|
|
96
82
|
Raises:
|
|
97
83
|
GarfExecutorError: When failed to execute query.
|
|
98
84
|
"""
|
|
85
|
+
span = trace.get_current_span()
|
|
86
|
+
span.set_attribute('fetcher', self.fetcher.__class__.__name__)
|
|
87
|
+
span.set_attribute('api_client', self.fetcher.api_client.__class__.__name__)
|
|
99
88
|
try:
|
|
89
|
+
span.set_attribute('query_title', title)
|
|
90
|
+
span.set_attribute('query_text', query)
|
|
100
91
|
logger.debug('starting query %s', query)
|
|
101
92
|
results = self.fetcher.fetch(
|
|
102
93
|
query_specification=query,
|
garf_executors/bq_executor.py
CHANGED
|
@@ -31,6 +31,7 @@ from garf_core import query_editor, report
|
|
|
31
31
|
from google.cloud import exceptions as google_cloud_exceptions
|
|
32
32
|
|
|
33
33
|
from garf_executors import exceptions, execution_context, executor
|
|
34
|
+
from garf_executors.telemetry import tracer
|
|
34
35
|
|
|
35
36
|
logger = logging.getLogger(__name__)
|
|
36
37
|
|
|
@@ -72,6 +73,7 @@ class BigQueryExecutor(executor.Executor, query_editor.TemplateProcessorMixin):
|
|
|
72
73
|
"""Instantiates bigquery client."""
|
|
73
74
|
return bigquery.Client(self.project_id)
|
|
74
75
|
|
|
76
|
+
@tracer.start_as_current_span('bq.execute')
|
|
75
77
|
def execute(
|
|
76
78
|
self,
|
|
77
79
|
query: str,
|
|
@@ -119,6 +121,7 @@ class BigQueryExecutor(executor.Executor, query_editor.TemplateProcessorMixin):
|
|
|
119
121
|
except google_cloud_exceptions.GoogleCloudError as e:
|
|
120
122
|
raise BigQueryExecutorError(e) from e
|
|
121
123
|
|
|
124
|
+
@tracer.start_as_current_span('bq.create_datasets')
|
|
122
125
|
def create_datasets(self, macros: dict | None) -> None:
|
|
123
126
|
"""Creates datasets in BQ based on values in a dict.
|
|
124
127
|
|
|
@@ -20,6 +20,7 @@ storage.
|
|
|
20
20
|
from __future__ import annotations
|
|
21
21
|
|
|
22
22
|
import argparse
|
|
23
|
+
import logging
|
|
23
24
|
import sys
|
|
24
25
|
|
|
25
26
|
from garf_io import reader
|
|
@@ -27,8 +28,13 @@ from garf_io import reader
|
|
|
27
28
|
import garf_executors
|
|
28
29
|
from garf_executors import config, exceptions
|
|
29
30
|
from garf_executors.entrypoints import utils
|
|
31
|
+
from garf_executors.entrypoints.tracer import initialize_tracer
|
|
32
|
+
from garf_executors.telemetry import tracer
|
|
30
33
|
|
|
34
|
+
initialize_tracer()
|
|
31
35
|
|
|
36
|
+
|
|
37
|
+
@tracer.start_as_current_span('garf.entrypoints.cli')
|
|
32
38
|
def main():
|
|
33
39
|
parser = argparse.ArgumentParser()
|
|
34
40
|
parser.add_argument('query', nargs='*')
|
|
@@ -38,6 +44,7 @@ def main():
|
|
|
38
44
|
parser.add_argument('--input', dest='input', default='file')
|
|
39
45
|
parser.add_argument('--log', '--loglevel', dest='loglevel', default='info')
|
|
40
46
|
parser.add_argument('--logger', dest='logger', default='local')
|
|
47
|
+
parser.add_argument('--log-name', dest='log_name', default='garf')
|
|
41
48
|
parser.add_argument(
|
|
42
49
|
'--parallel-queries', dest='parallel_queries', action='store_true'
|
|
43
50
|
)
|
|
@@ -47,9 +54,19 @@ def main():
|
|
|
47
54
|
parser.add_argument('--dry-run', dest='dry_run', action='store_true')
|
|
48
55
|
parser.add_argument('-v', '--version', dest='version', action='store_true')
|
|
49
56
|
parser.add_argument(
|
|
50
|
-
'--parallel-threshold', dest='parallel_threshold', default=
|
|
57
|
+
'--parallel-threshold', dest='parallel_threshold', default=10, type=int
|
|
58
|
+
)
|
|
59
|
+
parser.add_argument(
|
|
60
|
+
'--enable-cache', dest='enable_cache', action='store_true'
|
|
61
|
+
)
|
|
62
|
+
parser.add_argument(
|
|
63
|
+
'--cache-ttl-seconds',
|
|
64
|
+
dest='cache_ttl_seconds',
|
|
65
|
+
default=3600,
|
|
66
|
+
type=int,
|
|
51
67
|
)
|
|
52
68
|
parser.set_defaults(parallel_queries=True)
|
|
69
|
+
parser.set_defaults(enable_cache=False)
|
|
53
70
|
parser.set_defaults(dry_run=False)
|
|
54
71
|
args, kwargs = parser.parse_known_args()
|
|
55
72
|
|
|
@@ -57,7 +74,7 @@ def main():
|
|
|
57
74
|
print(garf_executors.__version__)
|
|
58
75
|
sys.exit()
|
|
59
76
|
logger = utils.init_logging(
|
|
60
|
-
loglevel=args.loglevel.upper(), logger_type=args.logger
|
|
77
|
+
loglevel=args.loglevel.upper(), logger_type=args.logger, name=args.log_name
|
|
61
78
|
)
|
|
62
79
|
if not args.query:
|
|
63
80
|
logger.error('Please provide one or more queries to run')
|
|
@@ -72,10 +89,13 @@ def main():
|
|
|
72
89
|
f'No execution context found for source {args.source} in {config_file}'
|
|
73
90
|
)
|
|
74
91
|
query_executor = garf_executors.setup_executor(
|
|
75
|
-
args.source,
|
|
92
|
+
source=args.source,
|
|
93
|
+
fetcher_parameters=context.fetcher_parameters,
|
|
94
|
+
enable_cache=args.enable_cache,
|
|
95
|
+
cache_ttl_seconds=args.cache_ttl_seconds,
|
|
76
96
|
)
|
|
77
97
|
batch = {query: reader_client.read(query) for query in args.query}
|
|
78
|
-
query_executor.execute_batch(batch, context, args.
|
|
98
|
+
query_executor.execute_batch(batch, context, args.parallel_threshold)
|
|
79
99
|
else:
|
|
80
100
|
extra_parameters = utils.ParamsParser(
|
|
81
101
|
['source', args.output, 'macro', 'template']
|
|
@@ -92,16 +112,20 @@ def main():
|
|
|
92
112
|
fetcher_parameters=source_parameters,
|
|
93
113
|
)
|
|
94
114
|
query_executor = garf_executors.setup_executor(
|
|
95
|
-
args.source,
|
|
115
|
+
source=args.source,
|
|
116
|
+
fetcher_parameters=context.fetcher_parameters,
|
|
117
|
+
enable_cache=args.enable_cache,
|
|
118
|
+
cache_ttl_seconds=args.cache_ttl_seconds,
|
|
96
119
|
)
|
|
97
120
|
if args.parallel_queries:
|
|
98
121
|
logger.info('Running queries in parallel')
|
|
99
122
|
batch = {query: reader_client.read(query) for query in args.query}
|
|
100
|
-
query_executor.execute_batch(batch, context, args.
|
|
123
|
+
query_executor.execute_batch(batch, context, args.parallel_threshold)
|
|
101
124
|
else:
|
|
102
125
|
logger.info('Running queries sequentially')
|
|
103
126
|
for query in args.query:
|
|
104
127
|
query_executor.execute(reader_client.read(query), query, context)
|
|
128
|
+
logging.shutdown()
|
|
105
129
|
|
|
106
130
|
|
|
107
131
|
if __name__ == '__main__':
|
|
@@ -20,9 +20,15 @@ import fastapi
|
|
|
20
20
|
import pydantic
|
|
21
21
|
import uvicorn
|
|
22
22
|
from garf_io import reader
|
|
23
|
+
from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
|
|
23
24
|
|
|
24
25
|
import garf_executors
|
|
25
26
|
from garf_executors import exceptions
|
|
27
|
+
from garf_executors.entrypoints.tracer import initialize_tracer
|
|
28
|
+
|
|
29
|
+
initialize_tracer()
|
|
30
|
+
app = fastapi.FastAPI()
|
|
31
|
+
FastAPIInstrumentor.instrument_app(app)
|
|
26
32
|
|
|
27
33
|
|
|
28
34
|
class ApiExecutorRequest(pydantic.BaseModel):
|
|
@@ -40,7 +46,7 @@ class ApiExecutorRequest(pydantic.BaseModel):
|
|
|
40
46
|
title: Optional[str] = None
|
|
41
47
|
query: Optional[str] = None
|
|
42
48
|
query_path: Optional[Union[str, list[str]]] = None
|
|
43
|
-
context: garf_executors.ApiExecutionContext
|
|
49
|
+
context: garf_executors.api_executor.ApiExecutionContext
|
|
44
50
|
|
|
45
51
|
@pydantic.model_validator(mode='after')
|
|
46
52
|
def check_query_specified(self):
|
|
@@ -67,10 +73,18 @@ class ApiExecutorResponse(pydantic.BaseModel):
|
|
|
67
73
|
results: list[str]
|
|
68
74
|
|
|
69
75
|
|
|
70
|
-
|
|
76
|
+
@app.get('/api/version')
|
|
77
|
+
async def version() -> str:
|
|
78
|
+
return garf_executors.__version__
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
@app.get('/api/fetchers')
|
|
82
|
+
async def get_fetchers() -> list[str]:
|
|
83
|
+
"""Shows all available API sources."""
|
|
84
|
+
return list(garf_executors.fetchers.find_fetchers())
|
|
71
85
|
|
|
72
86
|
|
|
73
|
-
@
|
|
87
|
+
@app.post('/api/execute')
|
|
74
88
|
async def execute(request: ApiExecutorRequest) -> ApiExecutorResponse:
|
|
75
89
|
query_executor = garf_executors.setup_executor(
|
|
76
90
|
request.source, request.context.fetcher_parameters
|
|
@@ -79,8 +93,8 @@ async def execute(request: ApiExecutorRequest) -> ApiExecutorResponse:
|
|
|
79
93
|
return ApiExecutorResponse(results=[result])
|
|
80
94
|
|
|
81
95
|
|
|
82
|
-
@
|
|
83
|
-
|
|
96
|
+
@app.post('/api/execute:batch')
|
|
97
|
+
def execute_batch(request: ApiExecutorRequest) -> ApiExecutorResponse:
|
|
84
98
|
query_executor = garf_executors.setup_executor(
|
|
85
99
|
request.source, request.context.fetcher_parameters
|
|
86
100
|
)
|
|
@@ -91,6 +105,4 @@ async def execute_batch(request: ApiExecutorRequest) -> ApiExecutorResponse:
|
|
|
91
105
|
|
|
92
106
|
|
|
93
107
|
if __name__ == '__main__':
|
|
94
|
-
app = fastapi.FastAPI()
|
|
95
|
-
app.include_router(router)
|
|
96
108
|
uvicorn.run(app)
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
# Copyright 2025 Google LLC
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# https://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
import os
|
|
16
|
+
|
|
17
|
+
from opentelemetry import trace
|
|
18
|
+
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import (
|
|
19
|
+
OTLPSpanExporter,
|
|
20
|
+
)
|
|
21
|
+
from opentelemetry.sdk.resources import Resource
|
|
22
|
+
from opentelemetry.sdk.trace import TracerProvider
|
|
23
|
+
from opentelemetry.sdk.trace.export import (
|
|
24
|
+
BatchSpanProcessor,
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
DEFAULT_SERVICE_NAME = 'garf'
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def initialize_tracer():
|
|
31
|
+
resource = Resource.create(
|
|
32
|
+
{'service.name': os.getenv('OTLP_SERVICE_NAME', DEFAULT_SERVICE_NAME)}
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
tracer_provider = TracerProvider(resource=resource)
|
|
36
|
+
|
|
37
|
+
if otel_endpoint := os.getenv('OTEL_EXPORTER_OTLP_ENDPOINT'):
|
|
38
|
+
otlp_processor = BatchSpanProcessor(
|
|
39
|
+
OTLPSpanExporter(endpoint=otel_endpoint, insecure=True)
|
|
40
|
+
)
|
|
41
|
+
tracer_provider.add_span_processor(otlp_processor)
|
|
42
|
+
trace.set_tracer_provider(tracer_provider)
|
|
@@ -93,6 +93,7 @@ class GarfParamsException(Exception):
|
|
|
93
93
|
class LoggerEnum(str, enum.Enum):
|
|
94
94
|
local = 'local'
|
|
95
95
|
rich = 'rich'
|
|
96
|
+
gcloud = 'gcloud'
|
|
96
97
|
|
|
97
98
|
|
|
98
99
|
def init_logging(
|
|
@@ -100,6 +101,7 @@ def init_logging(
|
|
|
100
101
|
logger_type: str | LoggerEnum = 'local',
|
|
101
102
|
name: str = __name__,
|
|
102
103
|
) -> logging.Logger:
|
|
104
|
+
loglevel = getattr(logging, loglevel)
|
|
103
105
|
if logger_type == 'rich':
|
|
104
106
|
logging.basicConfig(
|
|
105
107
|
format='%(message)s',
|
|
@@ -109,6 +111,23 @@ def init_logging(
|
|
|
109
111
|
rich_logging.RichHandler(rich_tracebacks=True),
|
|
110
112
|
],
|
|
111
113
|
)
|
|
114
|
+
elif logger_type == 'gcloud':
|
|
115
|
+
try:
|
|
116
|
+
import google.cloud.logging as glogging
|
|
117
|
+
except ImportError as e:
|
|
118
|
+
raise ImportError(
|
|
119
|
+
'Please install garf-executors with Cloud logging support - '
|
|
120
|
+
'`pip install garf-executors[bq]`'
|
|
121
|
+
) from e
|
|
122
|
+
|
|
123
|
+
client = glogging.Client()
|
|
124
|
+
handler = glogging.handlers.CloudLoggingHandler(client, name=name)
|
|
125
|
+
handler.close()
|
|
126
|
+
glogging.handlers.setup_logging(handler, log_level=loglevel)
|
|
127
|
+
logging.basicConfig(
|
|
128
|
+
level=loglevel,
|
|
129
|
+
handlers=[handler],
|
|
130
|
+
)
|
|
112
131
|
else:
|
|
113
132
|
logging.basicConfig(
|
|
114
133
|
format='[%(asctime)s][%(name)s][%(levelname)s] %(message)s',
|
|
@@ -42,8 +42,8 @@ class ExecutionContext(pydantic.BaseModel):
|
|
|
42
42
|
query_parameters: query_editor.GarfQueryParameters | None = pydantic.Field(
|
|
43
43
|
default_factory=dict
|
|
44
44
|
)
|
|
45
|
-
fetcher_parameters: dict[str, str | list[str | int]] | None =
|
|
46
|
-
default_factory=dict
|
|
45
|
+
fetcher_parameters: dict[str, str | bool | int | list[str | int]] | None = (
|
|
46
|
+
pydantic.Field(default_factory=dict)
|
|
47
47
|
)
|
|
48
48
|
writer: str | None = None
|
|
49
49
|
writer_parameters: dict[str, str] | None = pydantic.Field(
|
garf_executors/executor.py
CHANGED
|
@@ -14,14 +14,18 @@
|
|
|
14
14
|
|
|
15
15
|
"""Defines common functionality between executors."""
|
|
16
16
|
|
|
17
|
-
|
|
17
|
+
import asyncio
|
|
18
|
+
|
|
19
|
+
from opentelemetry import trace
|
|
18
20
|
|
|
19
21
|
from garf_executors import execution_context
|
|
22
|
+
from garf_executors.telemetry import tracer
|
|
20
23
|
|
|
21
24
|
|
|
22
25
|
class Executor:
|
|
23
26
|
"""Defines common functionality between executors."""
|
|
24
27
|
|
|
28
|
+
@tracer.start_as_current_span('api.execute_batch')
|
|
25
29
|
def execute_batch(
|
|
26
30
|
self,
|
|
27
31
|
batch: dict[str, str],
|
|
@@ -38,17 +42,46 @@ class Executor:
|
|
|
38
42
|
Returns:
|
|
39
43
|
Results of execution.
|
|
40
44
|
"""
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
45
|
+
span = trace.get_current_span()
|
|
46
|
+
span.set_attribute('api.parallel_threshold', parallel_threshold)
|
|
47
|
+
return asyncio.run(
|
|
48
|
+
self._run(
|
|
49
|
+
batch=batch, context=context, parallel_threshold=parallel_threshold
|
|
50
|
+
)
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
async def aexecute(
|
|
54
|
+
self,
|
|
55
|
+
query: str,
|
|
56
|
+
title: str,
|
|
57
|
+
context: execution_context.ExecutionContext,
|
|
58
|
+
) -> str:
|
|
59
|
+
"""Performs query execution asynchronously.
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
query: Location of the query.
|
|
63
|
+
title: Name of the query.
|
|
64
|
+
context: Query execution context.
|
|
65
|
+
|
|
66
|
+
Returns:
|
|
67
|
+
Result of writing the report.
|
|
68
|
+
"""
|
|
69
|
+
return await asyncio.to_thread(self.execute, query, title, context)
|
|
70
|
+
|
|
71
|
+
async def _run(
|
|
72
|
+
self,
|
|
73
|
+
batch: dict[str, str],
|
|
74
|
+
context: execution_context.ExecutionContext,
|
|
75
|
+
parallel_threshold: int,
|
|
76
|
+
):
|
|
77
|
+
semaphore = asyncio.Semaphore(value=parallel_threshold)
|
|
78
|
+
|
|
79
|
+
async def run_with_semaphore(fn):
|
|
80
|
+
async with semaphore:
|
|
81
|
+
return await fn
|
|
82
|
+
|
|
83
|
+
tasks = [
|
|
84
|
+
self.aexecute(query=query, title=title, context=context)
|
|
85
|
+
for title, query in batch.items()
|
|
86
|
+
]
|
|
87
|
+
return await asyncio.gather(*(run_with_semaphore(task) for task in tasks))
|
garf_executors/fetchers.py
CHANGED
|
@@ -16,9 +16,13 @@ import inspect
|
|
|
16
16
|
import sys
|
|
17
17
|
from importlib.metadata import entry_points
|
|
18
18
|
|
|
19
|
-
from garf_core import
|
|
19
|
+
from garf_core import report_fetcher
|
|
20
|
+
from opentelemetry import trace
|
|
20
21
|
|
|
22
|
+
from garf_executors.telemetry import tracer
|
|
21
23
|
|
|
24
|
+
|
|
25
|
+
@tracer.start_as_current_span('find_fetchers')
|
|
22
26
|
def find_fetchers() -> set[str]:
|
|
23
27
|
"""Identifiers all available report fetchers."""
|
|
24
28
|
if entrypoints := _get_entrypoints('garf'):
|
|
@@ -26,6 +30,7 @@ def find_fetchers() -> set[str]:
|
|
|
26
30
|
return set()
|
|
27
31
|
|
|
28
32
|
|
|
33
|
+
@tracer.start_as_current_span('get_report_fetcher')
|
|
29
34
|
def get_report_fetcher(source: str) -> type[report_fetcher.ApiReportFetcher]:
|
|
30
35
|
"""Loads report fetcher for a given source.
|
|
31
36
|
|
|
@@ -44,7 +49,9 @@ def get_report_fetcher(source: str) -> type[report_fetcher.ApiReportFetcher]:
|
|
|
44
49
|
for fetcher in _get_entrypoints('garf'):
|
|
45
50
|
if fetcher.name == source:
|
|
46
51
|
try:
|
|
47
|
-
|
|
52
|
+
with tracer.start_as_current_span('load_fetcher_module') as span:
|
|
53
|
+
fetcher_module = fetcher.load()
|
|
54
|
+
span.set_attribute('loaded_module', fetcher_module.__name__)
|
|
48
55
|
for name, obj in inspect.getmembers(fetcher_module):
|
|
49
56
|
if inspect.isclass(obj) and issubclass(
|
|
50
57
|
obj, report_fetcher.ApiReportFetcher
|
|
@@ -52,7 +59,7 @@ def get_report_fetcher(source: str) -> type[report_fetcher.ApiReportFetcher]:
|
|
|
52
59
|
return getattr(fetcher_module, name)
|
|
53
60
|
except ModuleNotFoundError:
|
|
54
61
|
continue
|
|
55
|
-
raise
|
|
62
|
+
raise report_fetcher.ApiReportFetcherError(
|
|
56
63
|
f'No fetcher available for the source "{source}"'
|
|
57
64
|
)
|
|
58
65
|
|
garf_executors/sql_executor.py
CHANGED
|
@@ -28,8 +28,10 @@ import re
|
|
|
28
28
|
|
|
29
29
|
import pandas as pd
|
|
30
30
|
from garf_core import query_editor, report
|
|
31
|
+
from opentelemetry import trace
|
|
31
32
|
|
|
32
33
|
from garf_executors import exceptions, execution_context, executor
|
|
34
|
+
from garf_executors.telemetry import tracer
|
|
33
35
|
|
|
34
36
|
logger = logging.getLogger(__name__)
|
|
35
37
|
|
|
@@ -66,6 +68,7 @@ class SqlAlchemyQueryExecutor(
|
|
|
66
68
|
engine = sqlalchemy.create_engine(connection_string)
|
|
67
69
|
return cls(engine)
|
|
68
70
|
|
|
71
|
+
@tracer.start_as_current_span('sql.execute')
|
|
69
72
|
def execute(
|
|
70
73
|
self,
|
|
71
74
|
query: str,
|
|
@@ -84,6 +87,7 @@ class SqlAlchemyQueryExecutor(
|
|
|
84
87
|
Returns:
|
|
85
88
|
Report with data if query returns some data otherwise empty Report.
|
|
86
89
|
"""
|
|
90
|
+
span = trace.get_current_span()
|
|
87
91
|
logging.info('Executing script: %s', title)
|
|
88
92
|
query_text = self.replace_params_template(query, context.query_parameters)
|
|
89
93
|
with self.engine.begin() as conn:
|
|
@@ -115,4 +119,5 @@ class SqlAlchemyQueryExecutor(
|
|
|
115
119
|
)
|
|
116
120
|
logger.info('%s executed successfully', title)
|
|
117
121
|
return writing_result
|
|
122
|
+
span.set_attribute('execute.num_results', len(results))
|
|
118
123
|
return results
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
# Copyright 2025 Google LLC
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# https://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
# pylint: disable=C0330, g-bad-import-order, g-multiple-import
|
|
16
|
+
from opentelemetry import trace
|
|
17
|
+
|
|
18
|
+
tracer = trace.get_tracer(
|
|
19
|
+
instrumenting_module_name='garf_executors',
|
|
20
|
+
)
|
|
@@ -1,34 +1,39 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: garf-executors
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.1.4
|
|
4
4
|
Summary: Executes queries against API and writes data to local/remote storage.
|
|
5
5
|
Author-email: "Google Inc. (gTech gPS CSE team)" <no-reply@google.com>, Andrei Markin <andrey.markin.ppc@gmail.com>
|
|
6
6
|
License: Apache 2.0
|
|
7
7
|
Classifier: Programming Language :: Python :: 3 :: Only
|
|
8
|
-
Classifier: Programming Language :: Python :: 3.8
|
|
9
8
|
Classifier: Programming Language :: Python :: 3.9
|
|
10
9
|
Classifier: Programming Language :: Python :: 3.10
|
|
11
10
|
Classifier: Programming Language :: Python :: 3.11
|
|
12
11
|
Classifier: Programming Language :: Python :: 3.12
|
|
13
12
|
Classifier: Programming Language :: Python :: 3.13
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
14
14
|
Classifier: Intended Audience :: Developers
|
|
15
15
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
16
16
|
Classifier: Operating System :: OS Independent
|
|
17
17
|
Classifier: License :: OSI Approved :: Apache Software License
|
|
18
|
-
Requires-Python: >=3.
|
|
18
|
+
Requires-Python: >=3.9
|
|
19
19
|
Description-Content-Type: text/markdown
|
|
20
20
|
Requires-Dist: garf-core
|
|
21
21
|
Requires-Dist: garf-io
|
|
22
22
|
Requires-Dist: pyyaml
|
|
23
23
|
Requires-Dist: pydantic
|
|
24
|
+
Requires-Dist: opentelemetry-api
|
|
25
|
+
Requires-Dist: opentelemetry-sdk
|
|
24
26
|
Provides-Extra: bq
|
|
25
27
|
Requires-Dist: garf-io[bq]; extra == "bq"
|
|
26
28
|
Requires-Dist: pandas; extra == "bq"
|
|
29
|
+
Requires-Dist: google-cloud-logging; extra == "bq"
|
|
27
30
|
Provides-Extra: sql
|
|
28
31
|
Requires-Dist: garf-io[sqlalchemy]; extra == "sql"
|
|
29
32
|
Requires-Dist: pandas; extra == "sql"
|
|
30
33
|
Provides-Extra: server
|
|
31
34
|
Requires-Dist: fastapi[standard]; extra == "server"
|
|
35
|
+
Requires-Dist: opentelemetry-instrumentation-fastapi; extra == "server"
|
|
36
|
+
Requires-Dist: opentelemetry-exporter-otlp; extra == "server"
|
|
32
37
|
Provides-Extra: all
|
|
33
38
|
Requires-Dist: garf-executors[bq,server,sql]; extra == "all"
|
|
34
39
|
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
garf_executors/__init__.py,sha256=4BZv9zb3tjlpF4kQSdTj1L5IdR-BiNQwtejg5dPTTcY,1933
|
|
2
|
+
garf_executors/api_executor.py,sha256=TxHtdnXjXjfBDU0z13yCulqF0XcEqAoOdVeGczdTSXs,3590
|
|
3
|
+
garf_executors/bq_executor.py,sha256=LOKNitigaMk4U-UjBZTHy4vG092nw6suEbgo2rrHCTI,5002
|
|
4
|
+
garf_executors/config.py,sha256=TqCzijm1PRvL4p-9Zl-kPkcC1SFKjhgTfKMJFmJW3fQ,1688
|
|
5
|
+
garf_executors/exceptions.py,sha256=U_7Q2ZMOUf89gzZd2pw7y3g7i1NeByPPKfpZ3q7p3ZU,662
|
|
6
|
+
garf_executors/execution_context.py,sha256=X4Wm_rE1mnnN2FuC_9bL05a8h8ko7qraeGY955ijNJc,2800
|
|
7
|
+
garf_executors/executor.py,sha256=_Nj6CKgyhzwFOxneODDhV1bvLjrMEvIu93W8YF9-sXo,2481
|
|
8
|
+
garf_executors/fetchers.py,sha256=HQqnMb0wlasVfXmAA7cnsd73POXPEGPxaC5mpEOnQk4,2443
|
|
9
|
+
garf_executors/sql_executor.py,sha256=_4oVPZKTd3lrDE0SM6uQ_bl13Ay9uhQuD-PHO9247WM,3920
|
|
10
|
+
garf_executors/telemetry.py,sha256=P75klGEoYgJ_-pR-izUIQ7B88ufskQ4vmW1rETg63Nc,747
|
|
11
|
+
garf_executors/entrypoints/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
12
|
+
garf_executors/entrypoints/cli.py,sha256=Qbg10LLWHEMBjjsOfEMDZQtjWpUwh6WJKSnqiXOzF6A,4765
|
|
13
|
+
garf_executors/entrypoints/server.py,sha256=b9blyBvN774RiTHUCZkfE5kNVnrTaANrETI4WMDHJeQ,3255
|
|
14
|
+
garf_executors/entrypoints/tracer.py,sha256=A_nolmGuMT3wOZJsoPORjfdtPO2lXdbr6CZt5BW0RTY,1374
|
|
15
|
+
garf_executors/entrypoints/utils.py,sha256=5XiGR2IOxdzAOY0lEWUeUV7tIpKBGRnQaIwBYvzQB7c,4337
|
|
16
|
+
garf_executors-0.1.4.dist-info/METADATA,sha256=3Z0plyqxqwCKYOm2PlXIfvxGo0lAVkdIaLD0s0pgZzQ,2900
|
|
17
|
+
garf_executors-0.1.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
18
|
+
garf_executors-0.1.4.dist-info/entry_points.txt,sha256=LskWNFIw8j0WJuI18-32OZrlASXAMg1XtrRYwsKBz2E,61
|
|
19
|
+
garf_executors-0.1.4.dist-info/top_level.txt,sha256=sP4dCXOENPn1hDFAunjMV8Js4NND_KGeO_gQWuaT0EY,15
|
|
20
|
+
garf_executors-0.1.4.dist-info/RECORD,,
|
|
@@ -1,18 +0,0 @@
|
|
|
1
|
-
garf_executors/__init__.py,sha256=BqmHrQ1gcaI5qv9ZVhZ9CzFbZ8vEFCk3Qw2SFCy3QHI,1687
|
|
2
|
-
garf_executors/api_executor.py,sha256=gifws1Kv-k_v3TtRQGn-WJiRQ1yWSyAdOJk38ab-nms,3573
|
|
3
|
-
garf_executors/bq_executor.py,sha256=yVgncimVLST8_60JMrS5Ain21RFt7K4TR2ePOja5WNA,4858
|
|
4
|
-
garf_executors/config.py,sha256=TqCzijm1PRvL4p-9Zl-kPkcC1SFKjhgTfKMJFmJW3fQ,1688
|
|
5
|
-
garf_executors/exceptions.py,sha256=U_7Q2ZMOUf89gzZd2pw7y3g7i1NeByPPKfpZ3q7p3ZU,662
|
|
6
|
-
garf_executors/execution_context.py,sha256=21u-Z5wRyqYFrFzph_ocqaKXypXyTyjBzutUGQbeBY4,2785
|
|
7
|
-
garf_executors/executor.py,sha256=bGTGlWZT5B7I_WIjhuQ0CkL7Dij_ijFCBxuC1jGVkng,1626
|
|
8
|
-
garf_executors/fetchers.py,sha256=Uoolh9L3Na2E6QsrnKV6Pwv5RrCKkcjds5gkDo0IxOw,2128
|
|
9
|
-
garf_executors/sql_executor.py,sha256=OGUN1AaSi6jC1v4YW0ZmcYXPE5EYfNbBRXrpdf4QTk4,3699
|
|
10
|
-
garf_executors/entrypoints/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
|
-
garf_executors/entrypoints/cli.py,sha256=eBg1QoJ9ipBIV06wVRuGJEY3a4QJC-k1S_tNB3E-2A4,3969
|
|
12
|
-
garf_executors/entrypoints/server.py,sha256=rdxL8uLsdRsQ3tFC3gasCsd06Pm-nZgiwcaaG1q5lHY,2872
|
|
13
|
-
garf_executors/entrypoints/utils.py,sha256=IjAu-Q1REQPuYd-gVGKhxai4LLXvlHTmE-FEID038NM,3741
|
|
14
|
-
garf_executors-0.0.11.dist-info/METADATA,sha256=g4N7s2psxDh6eCxfHSc7VC2g5vwuvMfSjm4tCbtPOfQ,2649
|
|
15
|
-
garf_executors-0.0.11.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
16
|
-
garf_executors-0.0.11.dist-info/entry_points.txt,sha256=LskWNFIw8j0WJuI18-32OZrlASXAMg1XtrRYwsKBz2E,61
|
|
17
|
-
garf_executors-0.0.11.dist-info/top_level.txt,sha256=sP4dCXOENPn1hDFAunjMV8Js4NND_KGeO_gQWuaT0EY,15
|
|
18
|
-
garf_executors-0.0.11.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|