garf-executors 0.2.2__tar.gz → 1.0.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of garf-executors might be problematic. Click here for more details.

Files changed (48) hide show
  1. {garf_executors-0.2.2 → garf_executors-1.0.2}/PKG-INFO +15 -6
  2. {garf_executors-0.2.2 → garf_executors-1.0.2}/README.md +8 -2
  3. {garf_executors-0.2.2/garf_executors → garf_executors-1.0.2/garf/executors}/__init__.py +6 -6
  4. {garf_executors-0.2.2/garf_executors → garf_executors-1.0.2/garf/executors}/api_executor.py +10 -4
  5. {garf_executors-0.2.2/garf_executors → garf_executors-1.0.2/garf/executors}/bq_executor.py +4 -4
  6. {garf_executors-0.2.2/garf_executors → garf_executors-1.0.2/garf/executors}/config.py +1 -2
  7. {garf_executors-0.2.2/garf_executors → garf_executors-1.0.2/garf/executors}/entrypoints/cli.py +26 -13
  8. {garf_executors-0.2.2/garf_executors → garf_executors-1.0.2/garf/executors}/entrypoints/grpc_server.py +5 -6
  9. {garf_executors-0.2.2/garf_executors → garf_executors-1.0.2/garf/executors}/entrypoints/server.py +9 -10
  10. {garf_executors-0.2.2/garf_executors → garf_executors-1.0.2/garf/executors}/entrypoints/tracer.py +20 -5
  11. {garf_executors-0.2.2/garf_executors → garf_executors-1.0.2/garf/executors}/execution_context.py +6 -5
  12. {garf_executors-0.2.2/garf_executors → garf_executors-1.0.2/garf/executors}/executor.py +4 -4
  13. {garf_executors-0.2.2/garf_executors → garf_executors-1.0.2/garf/executors}/fetchers.py +3 -4
  14. garf_executors-1.0.2/garf/executors/query_processor.py +61 -0
  15. {garf_executors-0.2.2/garf_executors → garf_executors-1.0.2/garf/executors}/sql_executor.py +9 -8
  16. {garf_executors-0.2.2/garf_executors → garf_executors-1.0.2/garf/executors}/telemetry.py +1 -1
  17. {garf_executors-0.2.2/garf_executors → garf_executors-1.0.2/garf/executors}/workflow.py +17 -4
  18. garf_executors-1.0.2/garf_executors/__init__.py +25 -0
  19. garf_executors-1.0.2/garf_executors/api_executor.py +25 -0
  20. garf_executors-1.0.2/garf_executors/bq_executor.py +25 -0
  21. garf_executors-1.0.2/garf_executors/config.py +25 -0
  22. garf_executors-1.0.2/garf_executors/entrypoints/__init__.py +25 -0
  23. garf_executors-1.0.2/garf_executors/entrypoints/cli.py +25 -0
  24. garf_executors-1.0.2/garf_executors/entrypoints/grcp_server.py +25 -0
  25. garf_executors-1.0.2/garf_executors/entrypoints/server.py +25 -0
  26. garf_executors-1.0.2/garf_executors/entrypoints/tracer.py +25 -0
  27. garf_executors-1.0.2/garf_executors/entrypoints/utils.py +25 -0
  28. garf_executors-1.0.2/garf_executors/exceptions.py +25 -0
  29. garf_executors-1.0.2/garf_executors/execution_context.py +25 -0
  30. garf_executors-1.0.2/garf_executors/executor.py +25 -0
  31. garf_executors-1.0.2/garf_executors/fetchers.py +25 -0
  32. garf_executors-1.0.2/garf_executors/sql_executor.py +25 -0
  33. garf_executors-1.0.2/garf_executors/telemetry.py +25 -0
  34. garf_executors-1.0.2/garf_executors/workflow.py +25 -0
  35. {garf_executors-0.2.2 → garf_executors-1.0.2}/garf_executors.egg-info/PKG-INFO +15 -6
  36. {garf_executors-0.2.2 → garf_executors-1.0.2}/garf_executors.egg-info/SOURCES.txt +21 -3
  37. garf_executors-1.0.2/garf_executors.egg-info/entry_points.txt +2 -0
  38. {garf_executors-0.2.2 → garf_executors-1.0.2}/garf_executors.egg-info/requires.txt +7 -3
  39. {garf_executors-0.2.2 → garf_executors-1.0.2}/garf_executors.egg-info/top_level.txt +1 -0
  40. {garf_executors-0.2.2 → garf_executors-1.0.2}/pyproject.toml +13 -5
  41. garf_executors-0.2.2/garf_executors.egg-info/entry_points.txt +0 -2
  42. {garf_executors-0.2.2/garf_executors → garf_executors-1.0.2/garf/executors}/entrypoints/__init__.py +0 -0
  43. {garf_executors-0.2.2/garf_executors → garf_executors-1.0.2/garf/executors}/entrypoints/utils.py +0 -0
  44. {garf_executors-0.2.2/garf_executors → garf_executors-1.0.2/garf/executors}/exceptions.py +0 -0
  45. {garf_executors-0.2.2/garf_executors → garf_executors-1.0.2/garf/executors}/garf_pb2.py +0 -0
  46. {garf_executors-0.2.2/garf_executors → garf_executors-1.0.2/garf/executors}/garf_pb2_grpc.py +0 -0
  47. {garf_executors-0.2.2 → garf_executors-1.0.2}/garf_executors.egg-info/dependency_links.txt +0 -0
  48. {garf_executors-0.2.2 → garf_executors-1.0.2}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: garf-executors
3
- Version: 0.2.2
3
+ Version: 1.0.2
4
4
  Summary: Executes queries against API and writes data to local/remote storage.
5
5
  Author-email: "Google Inc. (gTech gPS CSE team)" <no-reply@google.com>, Andrei Markin <andrey.markin.ppc@gmail.com>
6
6
  License: Apache 2.0
@@ -17,8 +17,8 @@ Classifier: Operating System :: OS Independent
17
17
  Classifier: License :: OSI Approved :: Apache Software License
18
18
  Requires-Python: >=3.9
19
19
  Description-Content-Type: text/markdown
20
- Requires-Dist: garf-core
21
- Requires-Dist: garf-io
20
+ Requires-Dist: garf-core>=1.0.0
21
+ Requires-Dist: garf-io>=1.0.0
22
22
  Requires-Dist: pyyaml
23
23
  Requires-Dist: pydantic
24
24
  Requires-Dist: opentelemetry-api
@@ -28,15 +28,18 @@ Provides-Extra: bq
28
28
  Requires-Dist: garf-io[bq]; extra == "bq"
29
29
  Requires-Dist: pandas; extra == "bq"
30
30
  Requires-Dist: google-cloud-logging; extra == "bq"
31
+ Requires-Dist: smart_open[gcs]; extra == "bq"
31
32
  Provides-Extra: sql
32
33
  Requires-Dist: garf-io[sqlalchemy]; extra == "sql"
33
34
  Requires-Dist: pandas; extra == "sql"
35
+ Provides-Extra: gcp
36
+ Requires-Dist: opentelemetry-exporter-gcp-trace; extra == "gcp"
34
37
  Provides-Extra: server
35
38
  Requires-Dist: fastapi[standard]; extra == "server"
36
39
  Requires-Dist: opentelemetry-instrumentation-fastapi; extra == "server"
37
40
  Requires-Dist: typer; extra == "server"
38
41
  Provides-Extra: all
39
- Requires-Dist: garf-executors[bq,server,sql]; extra == "all"
42
+ Requires-Dist: garf-executors[bq,gcp,server,sql]; extra == "all"
40
43
 
41
44
  # `garf-executors` - One stop-shop for interacting with Reporting APIs.
42
45
 
@@ -65,8 +68,14 @@ garf <QUERIES> --source <API_SOURCE> \
65
68
  where
66
69
 
67
70
  * `<QUERIES>`- local or remote path(s) to files with queries.
68
- * `<API_SOURCE>`- type of API to use. Based on that the appropriate report fetcher will be initialized.
69
- * `<OUTPUT_TYPE>` - output supported by [`garf-io` library](../garf_io/README.md).
71
+ * `source`- type of API to use. Based on that the appropriate report fetcher will be initialized. Explore supported APIs [here](https://google.github.io/garf/fetchers/overview/)
72
+ * `output` - output supported by [`garf-io` library](https://google.github.io/garf/usage/writers/).
70
73
 
71
74
  If your report fetcher requires additional parameters you can pass them via key value pairs under `--source.` argument, i.e.`--source.regionCode='US'` - to get data only from *US*.
72
75
  > Concrete `--source` parameters are dependent on a particular report fetcher and should be looked up in a documentation for this fetcher.
76
+
77
+ ## Documentation
78
+
79
+ Explore full documentation working with `garf-executors`
80
+
81
+ * [Documentation](https://google.github.io/garf/usage/executors/)
@@ -25,8 +25,14 @@ garf <QUERIES> --source <API_SOURCE> \
25
25
  where
26
26
 
27
27
  * `<QUERIES>`- local or remote path(s) to files with queries.
28
- * `<API_SOURCE>`- type of API to use. Based on that the appropriate report fetcher will be initialized.
29
- * `<OUTPUT_TYPE>` - output supported by [`garf-io` library](../garf_io/README.md).
28
+ * `source`- type of API to use. Based on that the appropriate report fetcher will be initialized. Explore supported APIs [here](https://google.github.io/garf/fetchers/overview/)
29
+ * `output` - output supported by [`garf-io` library](https://google.github.io/garf/usage/writers/).
30
30
 
31
31
  If your report fetcher requires additional parameters you can pass them via key value pairs under `--source.` argument, i.e.`--source.regionCode='US'` - to get data only from *US*.
32
32
  > Concrete `--source` parameters are dependent on a particular report fetcher and should be looked up in a documentation for this fetcher.
33
+
34
+ ## Documentation
35
+
36
+ Explore full documentation working with `garf-executors`
37
+
38
+ * [Documentation](https://google.github.io/garf/usage/executors/)
@@ -17,9 +17,9 @@ from __future__ import annotations
17
17
 
18
18
  import importlib
19
19
 
20
- from garf_executors import executor, fetchers
21
- from garf_executors.api_executor import ApiExecutionContext, ApiQueryExecutor
22
- from garf_executors.telemetry import tracer
20
+ from garf.executors import executor, fetchers
21
+ from garf.executors.api_executor import ApiExecutionContext, ApiQueryExecutor
22
+ from garf.executors.telemetry import tracer
23
23
 
24
24
 
25
25
  @tracer.start_as_current_span('setup_executor')
@@ -31,10 +31,10 @@ def setup_executor(
31
31
  ) -> type[executor.Executor]:
32
32
  """Initializes executors based on a source and parameters."""
33
33
  if source == 'bq':
34
- bq_executor = importlib.import_module('garf_executors.bq_executor')
34
+ bq_executor = importlib.import_module('garf.executors.bq_executor')
35
35
  query_executor = bq_executor.BigQueryExecutor(**fetcher_parameters)
36
36
  elif source == 'sqldb':
37
- sql_executor = importlib.import_module('garf_executors.sql_executor')
37
+ sql_executor = importlib.import_module('garf.executors.sql_executor')
38
38
  query_executor = (
39
39
  sql_executor.SqlAlchemyQueryExecutor.from_connection_string(
40
40
  fetcher_parameters.get('connection_string')
@@ -57,4 +57,4 @@ __all__ = [
57
57
  'ApiExecutionContext',
58
58
  ]
59
59
 
60
- __version__ = '0.2.2'
60
+ __version__ = '1.0.2'
@@ -22,12 +22,17 @@ from __future__ import annotations
22
22
 
23
23
  import logging
24
24
 
25
- from garf_core import report_fetcher
25
+ from garf.core import report_fetcher
26
+ from garf.executors import (
27
+ exceptions,
28
+ execution_context,
29
+ executor,
30
+ fetchers,
31
+ query_processor,
32
+ )
33
+ from garf.executors.telemetry import tracer
26
34
  from opentelemetry import trace
27
35
 
28
- from garf_executors import exceptions, execution_context, executor, fetchers
29
- from garf_executors.telemetry import tracer
30
-
31
36
  logger = logging.getLogger(__name__)
32
37
 
33
38
 
@@ -95,6 +100,7 @@ class ApiQueryExecutor(executor.Executor):
95
100
  Raises:
96
101
  GarfExecutorError: When failed to execute query.
97
102
  """
103
+ context = query_processor.process_gquery(context)
98
104
  span = trace.get_current_span()
99
105
  span.set_attribute('fetcher.class', self.fetcher.__class__.__name__)
100
106
  span.set_attribute(
@@ -28,13 +28,12 @@ except ImportError as e:
28
28
 
29
29
  import logging
30
30
 
31
- from garf_core import query_editor, report
31
+ from garf.core import query_editor, report
32
+ from garf.executors import exceptions, execution_context, executor
33
+ from garf.executors.telemetry import tracer
32
34
  from google.cloud import exceptions as google_cloud_exceptions
33
35
  from opentelemetry import trace
34
36
 
35
- from garf_executors import exceptions, execution_context, executor
36
- from garf_executors.telemetry import tracer
37
-
38
37
  logger = logging.getLogger(__name__)
39
38
 
40
39
 
@@ -55,6 +54,7 @@ class BigQueryExecutor(executor.Executor, query_editor.TemplateProcessorMixin):
55
54
  self,
56
55
  project_id: str | None = os.getenv('GOOGLE_CLOUD_PROJECT'),
57
56
  location: str | None = None,
57
+ **kwargs: str,
58
58
  ) -> None:
59
59
  """Initializes BigQueryExecutor.
60
60
 
@@ -24,8 +24,7 @@ import pathlib
24
24
  import pydantic
25
25
  import smart_open
26
26
  import yaml
27
-
28
- from garf_executors.execution_context import ExecutionContext
27
+ from garf.executors.execution_context import ExecutionContext
29
28
 
30
29
 
31
30
  class Config(pydantic.BaseModel):
@@ -21,17 +21,17 @@ from __future__ import annotations
21
21
 
22
22
  import argparse
23
23
  import logging
24
+ import pathlib
24
25
  import sys
25
26
 
26
- from garf_io import reader
27
+ import garf.executors
28
+ from garf.executors import config, exceptions, workflow
29
+ from garf.executors.entrypoints import utils
30
+ from garf.executors.entrypoints.tracer import initialize_tracer
31
+ from garf.executors.telemetry import tracer
32
+ from garf.io import reader
27
33
  from opentelemetry import trace
28
34
 
29
- import garf_executors
30
- from garf_executors import config, exceptions, workflow
31
- from garf_executors.entrypoints import utils
32
- from garf_executors.entrypoints.tracer import initialize_tracer
33
- from garf_executors.telemetry import tracer
34
-
35
35
  initialize_tracer()
36
36
 
37
37
 
@@ -76,17 +76,18 @@ def main():
76
76
  command_args = ' '.join(sys.argv[1:])
77
77
  span.set_attribute('cli.command', f'garf {command_args}')
78
78
  if args.version:
79
- print(garf_executors.__version__)
79
+ print(garf.executors.__version__)
80
80
  sys.exit()
81
81
  logger = utils.init_logging(
82
82
  loglevel=args.loglevel.upper(), logger_type=args.logger, name=args.log_name
83
83
  )
84
84
  reader_client = reader.create_reader(args.input)
85
85
  if workflow_file := args.workflow:
86
+ wf_parent = pathlib.Path.cwd() / pathlib.Path(workflow_file).parent
86
87
  execution_workflow = workflow.Workflow.from_file(workflow_file)
87
88
  for i, step in enumerate(execution_workflow.steps, 1):
88
89
  with tracer.start_as_current_span(f'{i}-{step.fetcher}'):
89
- query_executor = garf_executors.setup_executor(
90
+ query_executor = garf.executors.setup_executor(
90
91
  source=step.fetcher,
91
92
  fetcher_parameters=step.fetcher_parameters,
92
93
  enable_cache=args.enable_cache,
@@ -99,8 +100,20 @@ def main():
99
100
  'Please provide one or more queries to run'
100
101
  )
101
102
  for query in queries:
102
- if isinstance(query, garf_executors.workflow.QueryPath):
103
- batch[query.path] = reader_client.read(query.path)
103
+ if isinstance(query, garf.executors.workflow.QueryPath):
104
+ query_path = wf_parent / pathlib.Path(query.path)
105
+ if not query_path.exists():
106
+ raise workflow.GarfWorkflowError(f'Query: {query_path} not found')
107
+ batch[query.path] = reader_client.read(query_path)
108
+ elif isinstance(query, garf.executors.workflow.QueryFolder):
109
+ query_path = wf_parent / pathlib.Path(query.folder)
110
+ if not query_path.exists():
111
+ raise workflow.GarfWorkflowError(
112
+ f'Folder: {query_path} not found'
113
+ )
114
+ for p in query_path.rglob('*'):
115
+ if p.suffix == '.sql':
116
+ batch[p.stem] = reader_client.read(p)
104
117
  else:
105
118
  batch[query.query.title] = query.query.text
106
119
  query_executor.execute_batch(
@@ -130,7 +143,7 @@ def main():
130
143
  for output in outputs:
131
144
  writer_parameters.update(extra_parameters.get(output))
132
145
 
133
- context = garf_executors.api_executor.ApiExecutionContext(
146
+ context = garf.executors.api_executor.ApiExecutionContext(
134
147
  query_parameters={
135
148
  'macro': extra_parameters.get('macro'),
136
149
  'template': extra_parameters.get('template'),
@@ -139,7 +152,7 @@ def main():
139
152
  writer_parameters=writer_parameters,
140
153
  fetcher_parameters=source_parameters,
141
154
  )
142
- query_executor = garf_executors.setup_executor(
155
+ query_executor = garf.executors.setup_executor(
143
156
  source=args.source,
144
157
  fetcher_parameters=context.fetcher_parameters,
145
158
  enable_cache=args.enable_cache,
@@ -18,21 +18,20 @@ import argparse
18
18
  import logging
19
19
  from concurrent import futures
20
20
 
21
+ import garf.executors
21
22
  import grpc
23
+ from garf.executors import garf_pb2, garf_pb2_grpc
24
+ from garf.executors.entrypoints.tracer import initialize_tracer
22
25
  from google.protobuf.json_format import MessageToDict
23
26
  from grpc_reflection.v1alpha import reflection
24
27
 
25
- import garf_executors
26
- from garf_executors import garf_pb2, garf_pb2_grpc
27
- from garf_executors.entrypoints.tracer import initialize_tracer
28
-
29
28
 
30
29
  class GarfService(garf_pb2_grpc.GarfService):
31
30
  def Execute(self, request, context):
32
- query_executor = garf_executors.setup_executor(
31
+ query_executor = garf.executors.setup_executor(
33
32
  request.source, request.context.fetcher_parameters
34
33
  )
35
- execution_context = garf_executors.execution_context.ExecutionContext(
34
+ execution_context = garf.executors.execution_context.ExecutionContext(
36
35
  **MessageToDict(request.context, preserving_proto_field_name=True)
37
36
  )
38
37
  result = query_executor.execute(
@@ -17,17 +17,16 @@
17
17
  from typing import Optional, Union
18
18
 
19
19
  import fastapi
20
+ import garf.executors
20
21
  import pydantic
21
22
  import typer
22
23
  import uvicorn
23
- from garf_io import reader
24
+ from garf.executors import exceptions
25
+ from garf.executors.entrypoints.tracer import initialize_tracer
26
+ from garf.io import reader
24
27
  from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
25
28
  from typing_extensions import Annotated
26
29
 
27
- import garf_executors
28
- from garf_executors import exceptions
29
- from garf_executors.entrypoints.tracer import initialize_tracer
30
-
31
30
  initialize_tracer()
32
31
  app = fastapi.FastAPI()
33
32
  FastAPIInstrumentor.instrument_app(app)
@@ -49,7 +48,7 @@ class ApiExecutorRequest(pydantic.BaseModel):
49
48
  title: Optional[str] = None
50
49
  query: Optional[str] = None
51
50
  query_path: Optional[Union[str, list[str]]] = None
52
- context: garf_executors.api_executor.ApiExecutionContext
51
+ context: garf.executors.api_executor.ApiExecutionContext
53
52
 
54
53
  @pydantic.model_validator(mode='after')
55
54
  def check_query_specified(self):
@@ -78,18 +77,18 @@ class ApiExecutorResponse(pydantic.BaseModel):
78
77
 
79
78
  @app.get('/api/version')
80
79
  async def version() -> str:
81
- return garf_executors.__version__
80
+ return garf.executors.__version__
82
81
 
83
82
 
84
83
  @app.get('/api/fetchers')
85
84
  async def get_fetchers() -> list[str]:
86
85
  """Shows all available API sources."""
87
- return list(garf_executors.fetchers.find_fetchers())
86
+ return list(garf.executors.fetchers.find_fetchers())
88
87
 
89
88
 
90
89
  @app.post('/api/execute')
91
90
  async def execute(request: ApiExecutorRequest) -> ApiExecutorResponse:
92
- query_executor = garf_executors.setup_executor(
91
+ query_executor = garf.executors.setup_executor(
93
92
  request.source, request.context.fetcher_parameters
94
93
  )
95
94
  result = query_executor.execute(request.query, request.title, request.context)
@@ -98,7 +97,7 @@ async def execute(request: ApiExecutorRequest) -> ApiExecutorResponse:
98
97
 
99
98
  @app.post('/api/execute:batch')
100
99
  def execute_batch(request: ApiExecutorRequest) -> ApiExecutorResponse:
101
- query_executor = garf_executors.setup_executor(
100
+ query_executor = garf.executors.setup_executor(
102
101
  request.source, request.context.fetcher_parameters
103
102
  )
104
103
  reader_client = reader.FileReader()
@@ -1,4 +1,4 @@
1
- # Copyright 2025 Google LLC
1
+ # Copyright 2026 Google LLC
2
2
  #
3
3
  # Licensed under the Apache License, Version 2.0 (the "License");
4
4
  # you may not use this file except in compliance with the License.
@@ -35,8 +35,23 @@ def initialize_tracer():
35
35
  tracer_provider = TracerProvider(resource=resource)
36
36
 
37
37
  if otel_endpoint := os.getenv('OTEL_EXPORTER_OTLP_ENDPOINT'):
38
- otlp_processor = BatchSpanProcessor(
39
- OTLPSpanExporter(endpoint=otel_endpoint, insecure=True)
40
- )
41
- tracer_provider.add_span_processor(otlp_processor)
38
+ if gcp_project_id := os.getenv('OTEL_EXPORTER_GCP_PROJECT_ID'):
39
+ try:
40
+ from opentelemetry.exporter.cloud_trace import CloudTraceSpanExporter
41
+ except ImportError as e:
42
+ raise ImportError(
43
+ 'Please install garf-executors with GCP support '
44
+ '- `pip install garf-executors[gcp]`'
45
+ ) from e
46
+
47
+ cloud_span_processor = BatchSpanProcessor(
48
+ CloudTraceSpanExporter(project_id=gcp_project_id)
49
+ )
50
+ tracer_provider.add_span_processor(cloud_span_processor)
51
+ else:
52
+ otlp_processor = BatchSpanProcessor(
53
+ OTLPSpanExporter(endpoint=otel_endpoint, insecure=True)
54
+ )
55
+ tracer_provider.add_span_processor(otlp_processor)
56
+
42
57
  trace.set_tracer_provider(tracer_provider)
@@ -20,13 +20,14 @@ from __future__ import annotations
20
20
 
21
21
  import os
22
22
  import pathlib
23
+ from typing import Any
23
24
 
24
25
  import pydantic
25
26
  import smart_open
26
27
  import yaml
27
- from garf_core import query_editor
28
- from garf_io import writer
29
- from garf_io.writers import abs_writer
28
+ from garf.core import query_editor
29
+ from garf.io import writer
30
+ from garf.io.writers import abs_writer
30
31
 
31
32
 
32
33
  class ExecutionContext(pydantic.BaseModel):
@@ -42,8 +43,8 @@ class ExecutionContext(pydantic.BaseModel):
42
43
  query_parameters: query_editor.GarfQueryParameters | None = pydantic.Field(
43
44
  default_factory=dict
44
45
  )
45
- fetcher_parameters: dict[str, str | bool | int | list[str | int]] | None = (
46
- pydantic.Field(default_factory=dict)
46
+ fetcher_parameters: dict[str, Any] | None = pydantic.Field(
47
+ default_factory=dict
47
48
  )
48
49
  writer: str | list[str] | None = None
49
50
  writer_parameters: dict[str, str] | None = pydantic.Field(
@@ -18,12 +18,11 @@ import asyncio
18
18
  import inspect
19
19
  from typing import Optional
20
20
 
21
- from garf_core import report_fetcher
21
+ from garf.core import report_fetcher
22
+ from garf.executors import execution_context, query_processor
23
+ from garf.executors.telemetry import tracer
22
24
  from opentelemetry import trace
23
25
 
24
- from garf_executors import execution_context
25
- from garf_executors.telemetry import tracer
26
-
27
26
 
28
27
  class Executor:
29
28
  """Defines common functionality between executors."""
@@ -113,6 +112,7 @@ def _handle_processors(
113
112
  processors: dict[str, report_fetcher.Processor],
114
113
  context: execution_context.ExecutionContext,
115
114
  ) -> None:
115
+ context = query_processor.process_gquery(context)
116
116
  for k, processor in processors.items():
117
117
  processor_signature = list(inspect.signature(processor).parameters.keys())
118
118
  if k in context.fetcher_parameters:
@@ -17,11 +17,10 @@ import logging
17
17
  import sys
18
18
  from importlib.metadata import entry_points
19
19
 
20
- from garf_core import report_fetcher
20
+ from garf.core import report_fetcher
21
+ from garf.executors.telemetry import tracer
21
22
 
22
- from garf_executors.telemetry import tracer
23
-
24
- logger = logging.getLogger(name='garf_executors.fetchers')
23
+ logger = logging.getLogger(name='garf.executors.fetchers')
25
24
 
26
25
 
27
26
  @tracer.start_as_current_span('find_fetchers')
@@ -0,0 +1,61 @@
1
+ # Copyright 2026 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import contextlib
16
+
17
+ from garf.core import query_editor
18
+ from garf.executors import exceptions, execution_context
19
+
20
+
21
+ def process_gquery(
22
+ context: execution_context.ExecutionContext,
23
+ ) -> execution_context.ExecutionContext:
24
+ for k, v in context.fetcher_parameters.items():
25
+ if isinstance(v, str) and v.startswith('gquery'):
26
+ no_writer_context = context.model_copy(update={'writer': None})
27
+ try:
28
+ _, alias, query = v.split(':', maxsplit=3)
29
+ except ValueError:
30
+ raise exceptions.GarfExecutorError(
31
+ f'Incorrect gquery format, should be gquery:alias:query, got {v}'
32
+ )
33
+ if alias == 'sqldb':
34
+ from garf.executors import sql_executor
35
+
36
+ gquery_executor = sql_executor.SqlAlchemyQueryExecutor(
37
+ **context.fetcher_parameters
38
+ )
39
+ elif alias == 'bq':
40
+ from garf.executors import bq_executor
41
+
42
+ gquery_executor = bq_executor.BigQueryExecutor(
43
+ **context.fetcher_parameters
44
+ )
45
+ else:
46
+ raise exceptions.GarfExecutorError(
47
+ f'Unsupported alias for gquery: {alias}'
48
+ )
49
+ with contextlib.suppress(query_editor.GarfResourceError):
50
+ query_spec = query_editor.QuerySpecification(
51
+ text=query, args=context.query_parameters
52
+ ).generate()
53
+ if len(columns := [c for c in query_spec.column_names if c != '_']) > 1:
54
+ raise exceptions.GarfExecutorError(
55
+ f'Multiple columns in gquery: {columns}'
56
+ )
57
+ res = gquery_executor.execute(
58
+ query=query, title='gquery', context=no_writer_context
59
+ )
60
+ context.fetcher_parameters[k] = res.to_list(row_type='scalar')
61
+ return context
@@ -28,12 +28,11 @@ import re
28
28
  import uuid
29
29
 
30
30
  import pandas as pd
31
- from garf_core import query_editor, report
31
+ from garf.core import query_editor, report
32
+ from garf.executors import exceptions, execution_context, executor
33
+ from garf.executors.telemetry import tracer
32
34
  from opentelemetry import trace
33
35
 
34
- from garf_executors import exceptions, execution_context, executor
35
- from garf_executors.telemetry import tracer
36
-
37
36
  logger = logging.getLogger(__name__)
38
37
 
39
38
 
@@ -50,24 +49,26 @@ class SqlAlchemyQueryExecutor(
50
49
  engine: Initialized Engine object to operated on a given database.
51
50
  """
52
51
 
53
- def __init__(self, engine: sqlalchemy.engine.base.Engine) -> None:
52
+ def __init__(
53
+ self, engine: sqlalchemy.engine.base.Engine | None = None, **kwargs: str
54
+ ) -> None:
54
55
  """Initializes executor with a given engine.
55
56
 
56
57
  Args:
57
58
  engine: Initialized Engine object to operated on a given database.
58
59
  """
59
- self.engine = engine
60
+ self.engine = engine or sqlalchemy.create_engine('sqlite://')
60
61
  super().__init__()
61
62
 
62
63
  @classmethod
63
64
  def from_connection_string(
64
- cls, connection_string: str
65
+ cls, connection_string: str | None
65
66
  ) -> SqlAlchemyQueryExecutor:
66
67
  """Creates executor from SqlAlchemy connection string.
67
68
 
68
69
  https://docs.sqlalchemy.org/en/20/core/engines.html
69
70
  """
70
- engine = sqlalchemy.create_engine(connection_string)
71
+ engine = sqlalchemy.create_engine(connection_string or 'sqlite://')
71
72
  return cls(engine)
72
73
 
73
74
  @tracer.start_as_current_span('sql.execute')
@@ -16,5 +16,5 @@
16
16
  from opentelemetry import trace
17
17
 
18
18
  tracer = trace.get_tracer(
19
- instrumenting_module_name='garf_executors',
19
+ instrumenting_module_name='garf.executors',
20
20
  )
@@ -19,8 +19,18 @@ import pathlib
19
19
  import pydantic
20
20
  import smart_open
21
21
  import yaml
22
+ from garf.executors import exceptions
23
+ from garf.executors.execution_context import ExecutionContext
22
24
 
23
- from garf_executors.execution_context import ExecutionContext
25
+
26
+ class GarfWorkflowError(exceptions.GarfExecutorError):
27
+ """Workflow specific exception."""
28
+
29
+
30
+ class QueryFolder(pydantic.BaseModel):
31
+ """Path to folder with queries."""
32
+
33
+ folder: str
24
34
 
25
35
 
26
36
  class QueryPath(pydantic.BaseModel):
@@ -58,8 +68,8 @@ class ExecutionStep(ExecutionContext):
58
68
  """
59
69
 
60
70
  fetcher: str | None = None
61
- alias: str | None = None
62
- queries: list[QueryPath | QueryDefinition] | None = None
71
+ alias: str | None = pydantic.Field(default=None, pattern=r'^[a-zA-Z0-9_]+$')
72
+ queries: list[QueryPath | QueryDefinition | QueryFolder] | None = None
63
73
 
64
74
  @property
65
75
  def context(self) -> ExecutionContext:
@@ -85,7 +95,10 @@ class Workflow(pydantic.BaseModel):
85
95
  """Builds workflow from local or remote yaml file."""
86
96
  with smart_open.open(path, 'r', encoding='utf-8') as f:
87
97
  data = yaml.safe_load(f)
88
- return Workflow(steps=data.get('steps'))
98
+ try:
99
+ return Workflow(**data)
100
+ except pydantic.ValidationError as e:
101
+ raise GarfWorkflowError(f'Incorrect workflow:\n {e}') from e
89
102
 
90
103
  def save(self, path: str | pathlib.Path | os.PathLike[str]) -> str:
91
104
  """Saves workflow to local or remote yaml file."""
@@ -0,0 +1,25 @@
1
+ # Copyright 2026 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ import warnings
17
+
18
+ from garf.executors import *
19
+
20
+ warnings.warn(
21
+ "The 'garf_executors' namespace is deprecated. "
22
+ "Please use 'garf.executors' instead.",
23
+ DeprecationWarning,
24
+ stacklevel=2,
25
+ )
@@ -0,0 +1,25 @@
1
+ # Copyright 2026 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ import warnings
17
+
18
+ from garf.executors.api_executor import *
19
+
20
+ warnings.warn(
21
+ "The 'garf_executors' namespace is deprecated. "
22
+ "Please use 'garf.executors' instead.",
23
+ DeprecationWarning,
24
+ stacklevel=2,
25
+ )