garf-executors 0.0.13__tar.gz → 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. {garf_executors-0.0.13 → garf_executors-0.1.0}/PKG-INFO +5 -1
  2. {garf_executors-0.0.13 → garf_executors-0.1.0}/garf_executors/__init__.py +3 -1
  3. {garf_executors-0.0.13 → garf_executors-0.1.0}/garf_executors/api_executor.py +2 -0
  4. {garf_executors-0.0.13 → garf_executors-0.1.0}/garf_executors/bq_executor.py +3 -0
  5. {garf_executors-0.0.13 → garf_executors-0.1.0}/garf_executors/entrypoints/server.py +18 -6
  6. garf_executors-0.1.0/garf_executors/entrypoints/tracer.py +44 -0
  7. {garf_executors-0.0.13 → garf_executors-0.1.0}/garf_executors/fetchers.py +10 -3
  8. {garf_executors-0.0.13 → garf_executors-0.1.0}/garf_executors/sql_executor.py +5 -0
  9. garf_executors-0.1.0/garf_executors/telemetry.py +20 -0
  10. {garf_executors-0.0.13 → garf_executors-0.1.0}/garf_executors.egg-info/PKG-INFO +5 -1
  11. {garf_executors-0.0.13 → garf_executors-0.1.0}/garf_executors.egg-info/SOURCES.txt +2 -0
  12. {garf_executors-0.0.13 → garf_executors-0.1.0}/garf_executors.egg-info/requires.txt +4 -0
  13. {garf_executors-0.0.13 → garf_executors-0.1.0}/pyproject.toml +4 -0
  14. {garf_executors-0.0.13 → garf_executors-0.1.0}/README.md +0 -0
  15. {garf_executors-0.0.13 → garf_executors-0.1.0}/garf_executors/config.py +0 -0
  16. {garf_executors-0.0.13 → garf_executors-0.1.0}/garf_executors/entrypoints/__init__.py +0 -0
  17. {garf_executors-0.0.13 → garf_executors-0.1.0}/garf_executors/entrypoints/cli.py +0 -0
  18. {garf_executors-0.0.13 → garf_executors-0.1.0}/garf_executors/entrypoints/utils.py +0 -0
  19. {garf_executors-0.0.13 → garf_executors-0.1.0}/garf_executors/exceptions.py +0 -0
  20. {garf_executors-0.0.13 → garf_executors-0.1.0}/garf_executors/execution_context.py +0 -0
  21. {garf_executors-0.0.13 → garf_executors-0.1.0}/garf_executors/executor.py +0 -0
  22. {garf_executors-0.0.13 → garf_executors-0.1.0}/garf_executors.egg-info/dependency_links.txt +0 -0
  23. {garf_executors-0.0.13 → garf_executors-0.1.0}/garf_executors.egg-info/entry_points.txt +0 -0
  24. {garf_executors-0.0.13 → garf_executors-0.1.0}/garf_executors.egg-info/top_level.txt +0 -0
  25. {garf_executors-0.0.13 → garf_executors-0.1.0}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: garf-executors
3
- Version: 0.0.13
3
+ Version: 0.1.0
4
4
  Summary: Executes queries against API and writes data to local/remote storage.
5
5
  Author-email: "Google Inc. (gTech gPS CSE team)" <no-reply@google.com>, Andrei Markin <andrey.markin.ppc@gmail.com>
6
6
  License: Apache 2.0
@@ -21,6 +21,8 @@ Requires-Dist: garf-core
21
21
  Requires-Dist: garf-io
22
22
  Requires-Dist: pyyaml
23
23
  Requires-Dist: pydantic
24
+ Requires-Dist: opentelemetry-api
25
+ Requires-Dist: opentelemetry-sdk
24
26
  Provides-Extra: bq
25
27
  Requires-Dist: garf-io[bq]; extra == "bq"
26
28
  Requires-Dist: pandas; extra == "bq"
@@ -30,6 +32,8 @@ Requires-Dist: garf-io[sqlalchemy]; extra == "sql"
30
32
  Requires-Dist: pandas; extra == "sql"
31
33
  Provides-Extra: server
32
34
  Requires-Dist: fastapi[standard]; extra == "server"
35
+ Requires-Dist: opentelemetry-instrumentation-fastapi; extra == "server"
36
+ Requires-Dist: opentelemetry-exporter-otlp; extra == "server"
33
37
  Provides-Extra: all
34
38
  Requires-Dist: garf-executors[bq,server,sql]; extra == "all"
35
39
 
@@ -19,8 +19,10 @@ import importlib
19
19
 
20
20
  from garf_executors import executor, fetchers
21
21
  from garf_executors.api_executor import ApiExecutionContext, ApiQueryExecutor
22
+ from garf_executors.telemetry import tracer
22
23
 
23
24
 
25
+ @tracer.start_as_current_span('setup_executor')
24
26
  def setup_executor(
25
27
  source: str, fetcher_parameters: dict[str, str]
26
28
  ) -> type[executor.Executor]:
@@ -48,4 +50,4 @@ __all__ = [
48
50
  'ApiExecutionContext',
49
51
  ]
50
52
 
51
- __version__ = '0.0.13'
53
+ __version__ = '0.1.0'
@@ -25,6 +25,7 @@ import logging
25
25
  from garf_core import report_fetcher
26
26
 
27
27
  from garf_executors import exceptions, execution_context, executor, fetchers
28
+ from garf_executors.telemetry import tracer
28
29
 
29
30
  logger = logging.getLogger(__name__)
30
31
 
@@ -77,6 +78,7 @@ class ApiQueryExecutor(executor.Executor):
77
78
  """
78
79
  return await self.execute(query, context, title, context)
79
80
 
81
+ @tracer.start_as_current_span('api.execute')
80
82
  def execute(
81
83
  self,
82
84
  query: str,
@@ -31,6 +31,7 @@ from garf_core import query_editor, report
31
31
  from google.cloud import exceptions as google_cloud_exceptions
32
32
 
33
33
  from garf_executors import exceptions, execution_context, executor
34
+ from garf_executors.telemetry import tracer
34
35
 
35
36
  logger = logging.getLogger(__name__)
36
37
 
@@ -72,6 +73,7 @@ class BigQueryExecutor(executor.Executor, query_editor.TemplateProcessorMixin):
72
73
  """Instantiates bigquery client."""
73
74
  return bigquery.Client(self.project_id)
74
75
 
76
+ @tracer.start_as_current_span('bq.execute')
75
77
  def execute(
76
78
  self,
77
79
  query: str,
@@ -119,6 +121,7 @@ class BigQueryExecutor(executor.Executor, query_editor.TemplateProcessorMixin):
119
121
  except google_cloud_exceptions.GoogleCloudError as e:
120
122
  raise BigQueryExecutorError(e) from e
121
123
 
124
+ @tracer.start_as_current_span('bq.create_datasets')
122
125
  def create_datasets(self, macros: dict | None) -> None:
123
126
  """Creates datasets in BQ based on values in a dict.
124
127
 
@@ -20,9 +20,15 @@ import fastapi
20
20
  import pydantic
21
21
  import uvicorn
22
22
  from garf_io import reader
23
+ from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
23
24
 
24
25
  import garf_executors
25
26
  from garf_executors import exceptions
27
+ from garf_executors.entrypoints.tracer import initialize_tracer
28
+
29
+ initialize_tracer()
30
+ app = fastapi.FastAPI()
31
+ FastAPIInstrumentor.instrument_app(app)
26
32
 
27
33
 
28
34
  class ApiExecutorRequest(pydantic.BaseModel):
@@ -40,7 +46,7 @@ class ApiExecutorRequest(pydantic.BaseModel):
40
46
  title: Optional[str] = None
41
47
  query: Optional[str] = None
42
48
  query_path: Optional[Union[str, list[str]]] = None
43
- context: garf_executors.ApiExecutionContext
49
+ context: garf_executors.api_executor.ApiExecutionContext
44
50
 
45
51
  @pydantic.model_validator(mode='after')
46
52
  def check_query_specified(self):
@@ -67,10 +73,18 @@ class ApiExecutorResponse(pydantic.BaseModel):
67
73
  results: list[str]
68
74
 
69
75
 
70
- router = fastapi.APIRouter(prefix='/api')
76
+ @app.get('/api/version')
77
+ async def version() -> str:
78
+ return garf_executors.__version__
79
+
80
+
81
+ @app.get('/api/fetchers')
82
+ async def get_fetchers() -> list[str]:
83
+ """Shows all available API sources."""
84
+ return list(garf_executors.fetchers.find_fetchers())
71
85
 
72
86
 
73
- @router.post('/execute')
87
+ @app.post('/api/execute')
74
88
  async def execute(request: ApiExecutorRequest) -> ApiExecutorResponse:
75
89
  query_executor = garf_executors.setup_executor(
76
90
  request.source, request.context.fetcher_parameters
@@ -79,7 +93,7 @@ async def execute(request: ApiExecutorRequest) -> ApiExecutorResponse:
79
93
  return ApiExecutorResponse(results=[result])
80
94
 
81
95
 
82
- @router.post('/execute:batch')
96
+ @app.post('/api/execute:batch')
83
97
  async def execute_batch(request: ApiExecutorRequest) -> ApiExecutorResponse:
84
98
  query_executor = garf_executors.setup_executor(
85
99
  request.source, request.context.fetcher_parameters
@@ -91,6 +105,4 @@ async def execute_batch(request: ApiExecutorRequest) -> ApiExecutorResponse:
91
105
 
92
106
 
93
107
  if __name__ == '__main__':
94
- app = fastapi.FastAPI()
95
- app.include_router(router)
96
108
  uvicorn.run(app)
@@ -0,0 +1,44 @@
1
+ # Copyright 2025 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import os
16
+
17
+ from opentelemetry import trace
18
+ from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import (
19
+ OTLPSpanExporter,
20
+ )
21
+ from opentelemetry.sdk.resources import Resource
22
+ from opentelemetry.sdk.trace import TracerProvider
23
+ from opentelemetry.sdk.trace.export import (
24
+ BatchSpanProcessor,
25
+ )
26
+
27
+ DEFAULT_SERVICE_NAME = 'garf'
28
+
29
+
30
+ def initialize_tracer():
31
+ resource = Resource.create(
32
+ {'service.name': os.getenv('OTLP_SERVICE_NAME', DEFAULT_SERVICE_NAME)}
33
+ )
34
+
35
+ tracer_provider = TracerProvider(resource=resource)
36
+
37
+ otlp_processor = BatchSpanProcessor(
38
+ OTLPSpanExporter(
39
+ endpoint=os.getenv('OTEL_EXPORTER_OTLP_ENDPOINT'), insecure=True
40
+ )
41
+ )
42
+ tracer_provider.add_span_processor(otlp_processor)
43
+
44
+ trace.set_tracer_provider(tracer_provider)
@@ -16,9 +16,13 @@ import inspect
16
16
  import sys
17
17
  from importlib.metadata import entry_points
18
18
 
19
- from garf_core import exceptions, report_fetcher
19
+ from garf_core import report_fetcher
20
+ from opentelemetry import trace
20
21
 
22
+ from garf_executors.telemetry import tracer
21
23
 
24
+
25
+ @tracer.start_as_current_span('find_fetchers')
22
26
  def find_fetchers() -> set[str]:
23
27
  """Identifiers all available report fetchers."""
24
28
  if entrypoints := _get_entrypoints('garf'):
@@ -26,6 +30,7 @@ def find_fetchers() -> set[str]:
26
30
  return set()
27
31
 
28
32
 
33
+ @tracer.start_as_current_span('get_report_fetcher')
29
34
  def get_report_fetcher(source: str) -> type[report_fetcher.ApiReportFetcher]:
30
35
  """Loads report fetcher for a given source.
31
36
 
@@ -44,7 +49,9 @@ def get_report_fetcher(source: str) -> type[report_fetcher.ApiReportFetcher]:
44
49
  for fetcher in _get_entrypoints('garf'):
45
50
  if fetcher.name == source:
46
51
  try:
47
- fetcher_module = fetcher.load()
52
+ with tracer.start_as_current_span('load_fetcher_module') as span:
53
+ fetcher_module = fetcher.load()
54
+ span.set_attribute('loaded_module', fetcher_module.__name__)
48
55
  for name, obj in inspect.getmembers(fetcher_module):
49
56
  if inspect.isclass(obj) and issubclass(
50
57
  obj, report_fetcher.ApiReportFetcher
@@ -52,7 +59,7 @@ def get_report_fetcher(source: str) -> type[report_fetcher.ApiReportFetcher]:
52
59
  return getattr(fetcher_module, name)
53
60
  except ModuleNotFoundError:
54
61
  continue
55
- raise exceptions.ApiReportFetcherError(
62
+ raise report_fetcher.ApiReportFetcherError(
56
63
  f'No fetcher available for the source "{source}"'
57
64
  )
58
65
 
@@ -28,8 +28,10 @@ import re
28
28
 
29
29
  import pandas as pd
30
30
  from garf_core import query_editor, report
31
+ from opentelemetry import trace
31
32
 
32
33
  from garf_executors import exceptions, execution_context, executor
34
+ from garf_executors.telemetry import tracer
33
35
 
34
36
  logger = logging.getLogger(__name__)
35
37
 
@@ -66,6 +68,7 @@ class SqlAlchemyQueryExecutor(
66
68
  engine = sqlalchemy.create_engine(connection_string)
67
69
  return cls(engine)
68
70
 
71
+ @tracer.start_as_current_span('sql.execute')
69
72
  def execute(
70
73
  self,
71
74
  query: str,
@@ -84,6 +87,7 @@ class SqlAlchemyQueryExecutor(
84
87
  Returns:
85
88
  Report with data if query returns some data otherwise empty Report.
86
89
  """
90
+ span = trace.get_current_span()
87
91
  logging.info('Executing script: %s', title)
88
92
  query_text = self.replace_params_template(query, context.query_parameters)
89
93
  with self.engine.begin() as conn:
@@ -115,4 +119,5 @@ class SqlAlchemyQueryExecutor(
115
119
  )
116
120
  logger.info('%s executed successfully', title)
117
121
  return writing_result
122
+ span.set_attribute('execute.num_results', len(results))
118
123
  return results
@@ -0,0 +1,20 @@
1
+ # Copyright 2025 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ # pylint: disable=C0330, g-bad-import-order, g-multiple-import
16
+ from opentelemetry import trace
17
+
18
+ tracer = trace.get_tracer(
19
+ instrumenting_module_name='garf_executors',
20
+ )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: garf-executors
3
- Version: 0.0.13
3
+ Version: 0.1.0
4
4
  Summary: Executes queries against API and writes data to local/remote storage.
5
5
  Author-email: "Google Inc. (gTech gPS CSE team)" <no-reply@google.com>, Andrei Markin <andrey.markin.ppc@gmail.com>
6
6
  License: Apache 2.0
@@ -21,6 +21,8 @@ Requires-Dist: garf-core
21
21
  Requires-Dist: garf-io
22
22
  Requires-Dist: pyyaml
23
23
  Requires-Dist: pydantic
24
+ Requires-Dist: opentelemetry-api
25
+ Requires-Dist: opentelemetry-sdk
24
26
  Provides-Extra: bq
25
27
  Requires-Dist: garf-io[bq]; extra == "bq"
26
28
  Requires-Dist: pandas; extra == "bq"
@@ -30,6 +32,8 @@ Requires-Dist: garf-io[sqlalchemy]; extra == "sql"
30
32
  Requires-Dist: pandas; extra == "sql"
31
33
  Provides-Extra: server
32
34
  Requires-Dist: fastapi[standard]; extra == "server"
35
+ Requires-Dist: opentelemetry-instrumentation-fastapi; extra == "server"
36
+ Requires-Dist: opentelemetry-exporter-otlp; extra == "server"
33
37
  Provides-Extra: all
34
38
  Requires-Dist: garf-executors[bq,server,sql]; extra == "all"
35
39
 
@@ -9,6 +9,7 @@ garf_executors/execution_context.py
9
9
  garf_executors/executor.py
10
10
  garf_executors/fetchers.py
11
11
  garf_executors/sql_executor.py
12
+ garf_executors/telemetry.py
12
13
  garf_executors.egg-info/PKG-INFO
13
14
  garf_executors.egg-info/SOURCES.txt
14
15
  garf_executors.egg-info/dependency_links.txt
@@ -18,4 +19,5 @@ garf_executors.egg-info/top_level.txt
18
19
  garf_executors/entrypoints/__init__.py
19
20
  garf_executors/entrypoints/cli.py
20
21
  garf_executors/entrypoints/server.py
22
+ garf_executors/entrypoints/tracer.py
21
23
  garf_executors/entrypoints/utils.py
@@ -2,6 +2,8 @@ garf-core
2
2
  garf-io
3
3
  pyyaml
4
4
  pydantic
5
+ opentelemetry-api
6
+ opentelemetry-sdk
5
7
 
6
8
  [all]
7
9
  garf-executors[bq,server,sql]
@@ -13,6 +15,8 @@ google-cloud-logging
13
15
 
14
16
  [server]
15
17
  fastapi[standard]
18
+ opentelemetry-instrumentation-fastapi
19
+ opentelemetry-exporter-otlp
16
20
 
17
21
  [sql]
18
22
  garf-io[sqlalchemy]
@@ -9,6 +9,8 @@ dependencies = [
9
9
  "garf-io",
10
10
  "pyyaml",
11
11
  "pydantic",
12
+ "opentelemetry-api",
13
+ "opentelemetry-sdk",
12
14
  ]
13
15
  authors = [
14
16
  {name = "Google Inc. (gTech gPS CSE team)", email = "no-reply@google.com"},
@@ -48,6 +50,8 @@ sql=[
48
50
  ]
49
51
  server=[
50
52
  "fastapi[standard]",
53
+ "opentelemetry-instrumentation-fastapi",
54
+ "opentelemetry-exporter-otlp",
51
55
  ]
52
56
  all = [
53
57
  "garf-executors[bq,sql,server]"