garf-executors 0.0.6__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,4 @@
1
- # Copyright 2024 Google LLC
1
+ # Copyright 2025 Google LLC
2
2
  #
3
3
  # Licensed under the Apache License, Version 2.0 (the "License");
4
4
  # you may not use this file except in compliance with the License.
@@ -11,20 +11,50 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
- """Defines simplified import of executors.
15
-
16
- Instead of importing `garf_executors.api_executor.ApiQueryExecutor`
17
- import like this `garf_executors.ApiQueryExecutor`
18
- """
14
+ """Executors to fetch data from various APIs."""
19
15
 
20
16
  from __future__ import annotations
21
17
 
22
- from garf_executors.api_executor import ApiQueryExecutor
23
- from garf_executors.fetchers import FETCHERS
18
+ import importlib
19
+
20
+ from garf_executors import executor, fetchers
21
+ from garf_executors.api_executor import ApiExecutionContext, ApiQueryExecutor
22
+ from garf_executors.telemetry import tracer
23
+
24
+
25
+ @tracer.start_as_current_span('setup_executor')
26
+ def setup_executor(
27
+ source: str,
28
+ fetcher_parameters: dict[str, str | int | bool],
29
+ enable_cache: bool = False,
30
+ cache_ttl_seconds: int = 3600,
31
+ ) -> type[executor.Executor]:
32
+ """Initializes executors based on a source and parameters."""
33
+ if source == 'bq':
34
+ bq_executor = importlib.import_module('garf_executors.bq_executor')
35
+ query_executor = bq_executor.BigQueryExecutor(**fetcher_parameters)
36
+ elif source == 'sqldb':
37
+ sql_executor = importlib.import_module('garf_executors.sql_executor')
38
+ query_executor = (
39
+ sql_executor.SqlAlchemyQueryExecutor.from_connection_string(
40
+ fetcher_parameters.get('connection_string')
41
+ )
42
+ )
43
+ else:
44
+ concrete_api_fetcher = fetchers.get_report_fetcher(source)
45
+ query_executor = ApiQueryExecutor(
46
+ concrete_api_fetcher(
47
+ **fetcher_parameters,
48
+ enable_cache=enable_cache,
49
+ cache_ttl_seconds=cache_ttl_seconds,
50
+ )
51
+ )
52
+ return query_executor
53
+
24
54
 
25
55
  __all__ = [
26
- 'FETCHERS',
27
56
  'ApiQueryExecutor',
57
+ 'ApiExecutionContext',
28
58
  ]
29
59
 
30
- __version__ = '0.0.6'
60
+ __version__ = '0.1.4'
@@ -20,50 +20,25 @@ GarfReport and saving it to local/remote storage.
20
20
 
21
21
  from __future__ import annotations
22
22
 
23
+ import asyncio
23
24
  import logging
24
25
 
25
- import pydantic
26
+ from garf_core import report_fetcher
27
+ from opentelemetry import trace
26
28
 
27
- from garf_core import query_editor, report_fetcher
28
- from garf_executors import exceptions
29
- from garf_io import writer
30
- from garf_io.writers import abs_writer
29
+ from garf_executors import exceptions, execution_context, executor, fetchers
30
+ from garf_executors.telemetry import tracer
31
31
 
32
32
  logger = logging.getLogger(__name__)
33
33
 
34
34
 
35
- class ApiExecutionContext(pydantic.BaseModel):
36
- """Common context for executing one or more queries.
35
+ class ApiExecutionContext(execution_context.ExecutionContext):
36
+ """Common context for executing one or more queries."""
37
37
 
38
- Attributes:
39
- query_parameters: Parameters to dynamically change query text.
40
- fetcher_parameters: Parameters to specify fetching setup.
41
- writer: Type of writer to use.
42
- writer_parameters: Optional parameters to setup writer.
43
- """
44
-
45
- query_parameters: query_editor.GarfQueryParameters | None = None
46
- fetcher_parameters: dict[str, str] | None = None
47
38
  writer: str = 'console'
48
- writer_parameters: dict[str, str] | None = None
49
39
 
50
- def model_post_init(self, __context__) -> None:
51
- if self.fetcher_parameters is None:
52
- self.fetcher_parameters = {}
53
- if self.writer_parameters is None:
54
- self.writer_parameters = {}
55
40
 
56
- @property
57
- def writer_client(self) -> abs_writer.AbsWriter:
58
- writer_client = writer.create_writer(self.writer, **self.writer_parameters)
59
- if self.writer == 'bq':
60
- _ = writer_client.create_or_get_dataset()
61
- if self.writer == 'sheet':
62
- writer_client.init_client()
63
- return writer_client
64
-
65
-
66
- class ApiQueryExecutor:
41
+ class ApiQueryExecutor(executor.Executor):
67
42
  """Gets data from API and writes them to local/remote storage.
68
43
 
69
44
  Attributes:
@@ -78,23 +53,22 @@ class ApiQueryExecutor:
78
53
  """
79
54
  self.fetcher = fetcher
80
55
 
81
- async def aexecute(
82
- self, query: str, context: ApiExecutionContext, **kwargs: str
83
- ) -> None:
84
- """Reads query, extract results and stores them in a specified location.
85
-
86
- Args:
87
- query: Location of the query.
88
- context: Query execution context.
89
- """
90
- self.execute(query, context, **kwargs)
56
+ @classmethod
57
+ def from_fetcher_alias(
58
+ cls, source: str, fetcher_parameters: dict[str, str] | None = None
59
+ ) -> ApiQueryExecutor:
60
+ if not fetcher_parameters:
61
+ fetcher_parameters = {}
62
+ concrete_api_fetcher = fetchers.get_report_fetcher(source)
63
+ return ApiQueryExecutor(concrete_api_fetcher(**fetcher_parameters))
91
64
 
65
+ @tracer.start_as_current_span('api.execute')
92
66
  def execute(
93
67
  self,
94
68
  query: str,
95
69
  title: str,
96
70
  context: ApiExecutionContext,
97
- ) -> None:
71
+ ) -> str:
98
72
  """Reads query, extract results and stores them in a specified location.
99
73
 
100
74
  Args:
@@ -102,10 +76,18 @@ class ApiQueryExecutor:
102
76
  title: Name of the query.
103
77
  context: Query execution context.
104
78
 
79
+ Returns:
80
+ Result of writing the report.
81
+
105
82
  Raises:
106
83
  GarfExecutorError: When failed to execute query.
107
84
  """
85
+ span = trace.get_current_span()
86
+ span.set_attribute('fetcher', self.fetcher.__class__.__name__)
87
+ span.set_attribute('api_client', self.fetcher.api_client.__class__.__name__)
108
88
  try:
89
+ span.set_attribute('query_title', title)
90
+ span.set_attribute('query_text', query)
109
91
  logger.debug('starting query %s', query)
110
92
  results = self.fetcher.fetch(
111
93
  query_specification=query,
@@ -118,13 +100,14 @@ class ApiQueryExecutor:
118
100
  title,
119
101
  type(writer_client),
120
102
  )
121
- writer_client.write(results, title)
103
+ result = writer_client.write(results, title)
122
104
  logger.debug(
123
105
  'Finish writing data for query %s via %s writer',
124
106
  title,
125
107
  type(writer_client),
126
108
  )
127
109
  logger.info('%s executed successfully', title)
110
+ return result
128
111
  except Exception as e:
129
112
  logger.error('%s generated an exception: %s', title, str(e))
130
113
  raise exceptions.GarfExecutorError(
@@ -11,10 +11,12 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
- """Module for executing queries in BigQuery."""
14
+ """Executes queries in BigQuery."""
15
15
 
16
16
  from __future__ import annotations
17
17
 
18
+ import os
19
+
18
20
  try:
19
21
  from google.cloud import bigquery # type: ignore
20
22
  except ImportError as e:
@@ -25,19 +27,20 @@ except ImportError as e:
25
27
 
26
28
  import logging
27
29
 
28
- import pandas as pd
30
+ from garf_core import query_editor, report
29
31
  from google.cloud import exceptions as google_cloud_exceptions
30
32
 
31
- from garf_core import query_editor
33
+ from garf_executors import exceptions, execution_context, executor
34
+ from garf_executors.telemetry import tracer
32
35
 
33
36
  logger = logging.getLogger(__name__)
34
37
 
35
38
 
36
- class BigQueryExecutorError(Exception):
37
- """Error when executor fails to run query."""
39
+ class BigQueryExecutorError(exceptions.GarfExecutorError):
40
+ """Error when BigQueryExecutor fails to run query."""
38
41
 
39
42
 
40
- class BigQueryExecutor(query_editor.TemplateProcessorMixin):
43
+ class BigQueryExecutor(executor.Executor, query_editor.TemplateProcessorMixin):
41
44
  """Handles query execution in BigQuery.
42
45
 
43
46
  Attributes:
@@ -46,13 +49,22 @@ class BigQueryExecutor(query_editor.TemplateProcessorMixin):
46
49
  client: BigQuery client.
47
50
  """
48
51
 
49
- def __init__(self, project_id: str, location: str | None = None) -> None:
52
+ def __init__(
53
+ self,
54
+ project_id: str | None = os.getenv('GOOGLE_CLOUD_PROJECT'),
55
+ location: str | None = None,
56
+ ) -> None:
50
57
  """Initializes BigQueryExecutor.
51
58
 
52
59
  Args:
53
60
  project_id: Google Cloud project id.
54
61
  location: BigQuery dataset location.
55
62
  """
63
+ if not project_id:
64
+ raise BigQueryExecutorError(
65
+ 'project_id is required. Either provide it as project_id parameter '
66
+ 'or GOOGLE_CLOUD_PROJECT env variable.'
67
+ )
56
68
  self.project_id = project_id
57
69
  self.location = location
58
70
 
@@ -61,30 +73,55 @@ class BigQueryExecutor(query_editor.TemplateProcessorMixin):
61
73
  """Instantiates bigquery client."""
62
74
  return bigquery.Client(self.project_id)
63
75
 
76
+ @tracer.start_as_current_span('bq.execute')
64
77
  def execute(
65
- self, script_name: str, query_text: str, params: dict | None = None
66
- ) -> pd.DataFrame:
78
+ self,
79
+ query: str,
80
+ title: str,
81
+ context: execution_context.ExecutionContext = (
82
+ execution_context.ExecutionContext()
83
+ ),
84
+ ) -> report.GarfReport:
67
85
  """Executes query in BigQuery.
68
86
 
69
87
  Args:
70
- script_name: Script identifier.
71
- query_text: Query to be executed.
72
- params: Optional parameters to be replaced in query text.
88
+ query: Location of the query.
89
+ title: Name of the query.
90
+ context: Query execution context.
73
91
 
74
92
  Returns:
75
- DataFrame if query returns some data otherwise empty DataFrame.
93
+ Report with data if query returns some data otherwise empty Report.
76
94
  """
77
- query_text = self.replace_params_template(query_text, params)
95
+ query_text = self.replace_params_template(query, context.query_parameters)
96
+ self.create_datasets(context.query_parameters.macro)
78
97
  job = self.client.query(query_text)
79
98
  try:
80
99
  result = job.result()
81
- logger.debug('%s launched successfully', script_name)
100
+ logger.debug('%s launched successfully', title)
82
101
  if result.total_rows:
83
- return result.to_dataframe()
84
- return pd.DataFrame()
102
+ results = report.GarfReport.from_pandas(result.to_dataframe())
103
+ else:
104
+ results = report.GarfReport()
105
+ if context.writer and results:
106
+ writer_client = context.writer_client
107
+ logger.debug(
108
+ 'Start writing data for query %s via %s writer',
109
+ title,
110
+ type(writer_client),
111
+ )
112
+ writing_result = writer_client.write(results, title)
113
+ logger.debug(
114
+ 'Finish writing data for query %s via %s writer',
115
+ title,
116
+ type(writer_client),
117
+ )
118
+ logger.info('%s executed successfully', title)
119
+ return writing_result
120
+ return results
85
121
  except google_cloud_exceptions.GoogleCloudError as e:
86
122
  raise BigQueryExecutorError(e) from e
87
123
 
124
+ @tracer.start_as_current_span('bq.create_datasets')
88
125
  def create_datasets(self, macros: dict | None) -> None:
89
126
  """Creates datasets in BQ based on values in a dict.
90
127
 
@@ -92,7 +129,7 @@ class BigQueryExecutor(query_editor.TemplateProcessorMixin):
92
129
  are treated as dataset names.
93
130
 
94
131
  Args:
95
- macros: Mapping containing data for query execution.
132
+ macros: Mapping containing data for query execution.
96
133
  """
97
134
  if macros and (datasets := extract_datasets(macros)):
98
135
  for dataset in datasets:
@@ -103,7 +140,7 @@ class BigQueryExecutor(query_editor.TemplateProcessorMixin):
103
140
  bq_dataset = bigquery.Dataset(dataset_id)
104
141
  bq_dataset.location = self.location
105
142
  self.client.create_dataset(bq_dataset, timeout=30)
106
- logger.debug('Created new dataset %s', dataset_id)
143
+ logger.info('Created new dataset %s', dataset_id)
107
144
 
108
145
 
109
146
  def extract_datasets(macros: dict | None) -> list[str]:
@@ -0,0 +1,51 @@
1
+ # Copyright 2025 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ # pylint: disable=C0330, g-bad-import-order, g-multiple-import
16
+
17
+ """Stores mapping between API aliases and their execution context."""
18
+
19
+ from __future__ import annotations
20
+
21
+ import os
22
+ import pathlib
23
+
24
+ import pydantic
25
+ import smart_open
26
+ import yaml
27
+
28
+ from garf_executors.execution_context import ExecutionContext
29
+
30
+
31
+ class Config(pydantic.BaseModel):
32
+ """Stores necessary parameters for one or multiple API sources.
33
+
34
+ Attributes:
35
+ source: Mapping between API source alias and execution parameters.
36
+ """
37
+
38
+ sources: dict[str, ExecutionContext]
39
+
40
+ @classmethod
41
+ def from_file(cls, path: str | pathlib.Path | os.PathLike[str]) -> Config:
42
+ """Builds config from local or remote yaml file."""
43
+ with smart_open.open(path, 'r', encoding='utf-8') as f:
44
+ data = yaml.safe_load(f)
45
+ return Config(sources=data)
46
+
47
+ def save(self, path: str | pathlib.Path | os.PathLike[str]) -> str:
48
+ """Saves config to local or remote yaml file."""
49
+ with smart_open.open(path, 'w', encoding='utf-8') as f:
50
+ yaml.dump(self.model_dump().get('sources'), f, encoding='utf-8')
51
+ return f'Config is saved to {str(path)}'
@@ -20,24 +20,31 @@ storage.
20
20
  from __future__ import annotations
21
21
 
22
22
  import argparse
23
+ import logging
23
24
  import sys
24
- from concurrent import futures
25
+
26
+ from garf_io import reader
25
27
 
26
28
  import garf_executors
27
- from garf_executors import exceptions
29
+ from garf_executors import config, exceptions
28
30
  from garf_executors.entrypoints import utils
29
- from garf_io import reader
31
+ from garf_executors.entrypoints.tracer import initialize_tracer
32
+ from garf_executors.telemetry import tracer
30
33
 
34
+ initialize_tracer()
31
35
 
36
+
37
+ @tracer.start_as_current_span('garf.entrypoints.cli')
32
38
  def main():
33
39
  parser = argparse.ArgumentParser()
34
40
  parser.add_argument('query', nargs='*')
35
- parser.add_argument('-c', '--config', dest='garf_config', default=None)
41
+ parser.add_argument('-c', '--config', dest='config', default=None)
36
42
  parser.add_argument('--source', dest='source', default=None)
37
43
  parser.add_argument('--output', dest='output', default='console')
38
44
  parser.add_argument('--input', dest='input', default='file')
39
45
  parser.add_argument('--log', '--loglevel', dest='loglevel', default='info')
40
46
  parser.add_argument('--logger', dest='logger', default='local')
47
+ parser.add_argument('--log-name', dest='log_name', default='garf')
41
48
  parser.add_argument(
42
49
  '--parallel-queries', dest='parallel_queries', action='store_true'
43
50
  )
@@ -47,68 +54,78 @@ def main():
47
54
  parser.add_argument('--dry-run', dest='dry_run', action='store_true')
48
55
  parser.add_argument('-v', '--version', dest='version', action='store_true')
49
56
  parser.add_argument(
50
- '--parallel-threshold', dest='parallel_threshold', default=None, type=int
57
+ '--parallel-threshold', dest='parallel_threshold', default=10, type=int
58
+ )
59
+ parser.add_argument(
60
+ '--enable-cache', dest='enable_cache', action='store_true'
61
+ )
62
+ parser.add_argument(
63
+ '--cache-ttl-seconds',
64
+ dest='cache_ttl_seconds',
65
+ default=3600,
66
+ type=int,
51
67
  )
52
68
  parser.set_defaults(parallel_queries=True)
69
+ parser.set_defaults(enable_cache=False)
53
70
  parser.set_defaults(dry_run=False)
54
71
  args, kwargs = parser.parse_known_args()
55
72
 
56
73
  if args.version:
57
74
  print(garf_executors.__version__)
58
75
  sys.exit()
59
- if not (source := args.source):
60
- raise exceptions.GarfExecutorError(
61
- f'Select one of available sources: {list(garf_executors.FETCHERS.keys())}'
62
- )
63
- if not (concrete_api_fetcher := garf_executors.FETCHERS.get(source)):
64
- raise exceptions.GarfExecutorError(f'Source {source} is not available.')
65
-
66
76
  logger = utils.init_logging(
67
- loglevel=args.loglevel.upper(), logger_type=args.logger
77
+ loglevel=args.loglevel.upper(), logger_type=args.logger, name=args.log_name
68
78
  )
69
79
  if not args.query:
70
80
  logger.error('Please provide one or more queries to run')
71
81
  raise exceptions.GarfExecutorError(
72
82
  'Please provide one or more queries to run'
73
83
  )
74
- config = utils.ConfigBuilder('garf').build(vars(args), kwargs)
75
- logger.debug('config: %s', config)
76
-
77
- if config.params:
78
- config = utils.initialize_runtime_parameters(config)
79
- logger.debug('initialized config: %s', config)
80
-
81
- extra_parameters = utils.ParamsParser(['source']).parse(kwargs)
82
- source_parameters = extra_parameters.get('source', {})
83
84
  reader_client = reader.create_reader(args.input)
84
-
85
- context = garf_executors.api_executor.ApiExecutionContext(
86
- query_parameters=config.params,
87
- writer=args.output,
88
- writer_parameters=config.writer_params,
89
- fetcher_parameters=source_parameters,
90
- )
91
- query_executor = garf_executors.api_executor.ApiQueryExecutor(
92
- concrete_api_fetcher(**source_parameters)
93
- )
94
- if args.parallel_queries:
95
- logger.info('Running queries in parallel')
96
- with futures.ThreadPoolExecutor(args.parallel_threshold) as executor:
97
- future_to_query = {
98
- executor.submit(
99
- query_executor.execute,
100
- reader_client.read(query),
101
- query,
102
- context,
103
- ): query
104
- for query in args.query
105
- }
106
- for future in futures.as_completed(future_to_query):
107
- future.result()
85
+ if config_file := args.config:
86
+ execution_config = config.Config.from_file(config_file)
87
+ if not (context := execution_config.sources.get(args.source)):
88
+ raise exceptions.GarfExecutorError(
89
+ f'No execution context found for source {args.source} in {config_file}'
90
+ )
91
+ query_executor = garf_executors.setup_executor(
92
+ source=args.source,
93
+ fetcher_parameters=context.fetcher_parameters,
94
+ enable_cache=args.enable_cache,
95
+ cache_ttl_seconds=args.cache_ttl_seconds,
96
+ )
97
+ batch = {query: reader_client.read(query) for query in args.query}
98
+ query_executor.execute_batch(batch, context, args.parallel_threshold)
108
99
  else:
109
- logger.info('Running queries sequentially')
110
- for query in args.query:
111
- query_executor.execute(reader_client.read(query), query, context)
100
+ extra_parameters = utils.ParamsParser(
101
+ ['source', args.output, 'macro', 'template']
102
+ ).parse(kwargs)
103
+ source_parameters = extra_parameters.get('source', {})
104
+
105
+ context = garf_executors.api_executor.ApiExecutionContext(
106
+ query_parameters={
107
+ 'macro': extra_parameters.get('macro'),
108
+ 'template': extra_parameters.get('template'),
109
+ },
110
+ writer=args.output,
111
+ writer_parameters=extra_parameters.get(args.output),
112
+ fetcher_parameters=source_parameters,
113
+ )
114
+ query_executor = garf_executors.setup_executor(
115
+ source=args.source,
116
+ fetcher_parameters=context.fetcher_parameters,
117
+ enable_cache=args.enable_cache,
118
+ cache_ttl_seconds=args.cache_ttl_seconds,
119
+ )
120
+ if args.parallel_queries:
121
+ logger.info('Running queries in parallel')
122
+ batch = {query: reader_client.read(query) for query in args.query}
123
+ query_executor.execute_batch(batch, context, args.parallel_threshold)
124
+ else:
125
+ logger.info('Running queries sequentially')
126
+ for query in args.query:
127
+ query_executor.execute(reader_client.read(query), query, context)
128
+ logging.shutdown()
112
129
 
113
130
 
114
131
  if __name__ == '__main__':
@@ -14,12 +14,21 @@
14
14
 
15
15
  """FastAPI endpoint for executing queries."""
16
16
 
17
+ from typing import Optional, Union
18
+
17
19
  import fastapi
18
20
  import pydantic
19
21
  import uvicorn
22
+ from garf_io import reader
23
+ from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
20
24
 
21
25
  import garf_executors
22
26
  from garf_executors import exceptions
27
+ from garf_executors.entrypoints.tracer import initialize_tracer
28
+
29
+ initialize_tracer()
30
+ app = fastapi.FastAPI()
31
+ FastAPIInstrumentor.instrument_app(app)
23
32
 
24
33
 
25
34
  class ApiExecutorRequest(pydantic.BaseModel):
@@ -27,39 +36,73 @@ class ApiExecutorRequest(pydantic.BaseModel):
27
36
 
28
37
  Attributes:
29
38
  source: Type of API to interact with.
30
- query: Query to execute.
31
39
  title: Name of the query used as an output for writing.
40
+ query: Query to execute.
41
+ query_path: Local or remote path to query.
32
42
  context: Execution context.
33
43
  """
34
44
 
35
45
  source: str
36
- query: str
37
- title: str
46
+ title: Optional[str] = None
47
+ query: Optional[str] = None
48
+ query_path: Optional[Union[str, list[str]]] = None
38
49
  context: garf_executors.api_executor.ApiExecutionContext
39
50
 
51
+ @pydantic.model_validator(mode='after')
52
+ def check_query_specified(self):
53
+ if not self.query_path and not self.query:
54
+ raise exceptions.GarfExecutorError(
55
+ 'Missing one of required parameters: query, query_path'
56
+ )
57
+ return self
58
+
59
+ def model_post_init(self, __context__) -> None:
60
+ if self.query_path and isinstance(self.query_path, str):
61
+ self.query = reader.FileReader().read(self.query_path)
62
+ if not self.title:
63
+ self.title = str(self.query_path)
64
+
65
+
66
+ class ApiExecutorResponse(pydantic.BaseModel):
67
+ """Response after executing a query.
68
+
69
+ Attributes:
70
+ results: Results of query execution.
71
+ """
72
+
73
+ results: list[str]
74
+
75
+
76
+ @app.get('/api/version')
77
+ async def version() -> str:
78
+ return garf_executors.__version__
40
79
 
41
- router = fastapi.APIRouter(prefix='/api')
42
80
 
81
+ @app.get('/api/fetchers')
82
+ async def get_fetchers() -> list[str]:
83
+ """Shows all available API sources."""
84
+ return list(garf_executors.fetchers.find_fetchers())
43
85
 
44
- @router.post('/execute')
45
- async def execute(request: ApiExecutorRequest) -> dict[str, str]:
46
- if not (concrete_api_fetcher := garf_executors.FETCHERS.get(request.source)):
47
- raise exceptions.GarfExecutorError(
48
- f'Source {request.source} is not available.'
49
- )
50
86
 
51
- query_executor = garf_executors.api_executor.ApiQueryExecutor(
52
- concrete_api_fetcher(**request.context.fetcher_parameters)
87
+ @app.post('/api/execute')
88
+ async def execute(request: ApiExecutorRequest) -> ApiExecutorResponse:
89
+ query_executor = garf_executors.setup_executor(
90
+ request.source, request.context.fetcher_parameters
53
91
  )
92
+ result = query_executor.execute(request.query, request.title, request.context)
93
+ return ApiExecutorResponse(results=[result])
54
94
 
55
- query_executor.execute(request.query, request.title, request.context)
56
95
 
57
- return fastapi.responses.JSONResponse(
58
- content=fastapi.encoders.jsonable_encoder({'result': 'success'})
96
+ @app.post('/api/execute:batch')
97
+ def execute_batch(request: ApiExecutorRequest) -> ApiExecutorResponse:
98
+ query_executor = garf_executors.setup_executor(
99
+ request.source, request.context.fetcher_parameters
59
100
  )
101
+ reader_client = reader.FileReader()
102
+ batch = {query: reader_client.read(query) for query in request.query_path}
103
+ results = query_executor.execute_batch(batch, request.context)
104
+ return ApiExecutorResponse(results=results)
60
105
 
61
106
 
62
107
  if __name__ == '__main__':
63
- app = fastapi.FastAPI()
64
- app.include_router(router)
65
108
  uvicorn.run(app)