garf-executors 0.0.9__py3-none-any.whl → 0.0.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -48,4 +48,4 @@ __all__ = [
48
48
  'ApiExecutionContext',
49
49
  ]
50
50
 
51
- __version__ = '0.0.9'
51
+ __version__ = '0.0.11'
@@ -91,6 +91,7 @@ class BigQueryExecutor(executor.Executor, query_editor.TemplateProcessorMixin):
91
91
  Report with data if query returns some data otherwise empty Report.
92
92
  """
93
93
  query_text = self.replace_params_template(query, context.query_parameters)
94
+ self.create_datasets(context.query_parameters.macro)
94
95
  job = self.client.query(query_text)
95
96
  try:
96
97
  result = job.result()
@@ -136,7 +137,7 @@ class BigQueryExecutor(executor.Executor, query_editor.TemplateProcessorMixin):
136
137
  bq_dataset = bigquery.Dataset(dataset_id)
137
138
  bq_dataset.location = self.location
138
139
  self.client.create_dataset(bq_dataset, timeout=30)
139
- logger.debug('Created new dataset %s', dataset_id)
140
+ logger.info('Created new dataset %s', dataset_id)
140
141
 
141
142
 
142
143
  def extract_datasets(macros: dict | None) -> list[str]:
@@ -0,0 +1,51 @@
1
+ # Copyright 2025 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ # pylint: disable=C0330, g-bad-import-order, g-multiple-import
16
+
17
+ """Stores mapping between API aliases and their execution context."""
18
+
19
+ from __future__ import annotations
20
+
21
+ import os
22
+ import pathlib
23
+
24
+ import pydantic
25
+ import smart_open
26
+ import yaml
27
+
28
+ from garf_executors.execution_context import ExecutionContext
29
+
30
+
31
+ class Config(pydantic.BaseModel):
32
+ """Stores necessary parameters for one or multiple API sources.
33
+
34
+ Attributes:
35
+ source: Mapping between API source alias and execution parameters.
36
+ """
37
+
38
+ sources: dict[str, ExecutionContext]
39
+
40
+ @classmethod
41
+ def from_file(cls, path: str | pathlib.Path | os.PathLike[str]) -> Config:
42
+ """Builds config from local or remote yaml file."""
43
+ with smart_open.open(path, 'r', encoding='utf-8') as f:
44
+ data = yaml.safe_load(f)
45
+ return Config(sources=data)
46
+
47
+ def save(self, path: str | pathlib.Path | os.PathLike[str]) -> str:
48
+ """Saves config to local or remote yaml file."""
49
+ with smart_open.open(path, 'w', encoding='utf-8') as f:
50
+ yaml.dump(self.model_dump().get('sources'), f, encoding='utf-8')
51
+ return f'Config is saved to {str(path)}'
@@ -25,14 +25,14 @@ import sys
25
25
  from garf_io import reader
26
26
 
27
27
  import garf_executors
28
- from garf_executors import exceptions
28
+ from garf_executors import config, exceptions
29
29
  from garf_executors.entrypoints import utils
30
30
 
31
31
 
32
32
  def main():
33
33
  parser = argparse.ArgumentParser()
34
34
  parser.add_argument('query', nargs='*')
35
- parser.add_argument('-c', '--config', dest='garf_config', default=None)
35
+ parser.add_argument('-c', '--config', dest='config', default=None)
36
36
  parser.add_argument('--source', dest='source', default=None)
37
37
  parser.add_argument('--output', dest='output', default='console')
38
38
  parser.add_argument('--input', dest='input', default='file')
@@ -64,34 +64,44 @@ def main():
64
64
  raise exceptions.GarfExecutorError(
65
65
  'Please provide one or more queries to run'
66
66
  )
67
- config = utils.ConfigBuilder('garf').build(vars(args), kwargs)
68
- logger.debug('config: %s', config)
69
-
70
- if config.params:
71
- config = utils.initialize_runtime_parameters(config)
72
- logger.debug('initialized config: %s', config)
73
-
74
- extra_parameters = utils.ParamsParser(['source']).parse(kwargs)
75
- source_parameters = extra_parameters.get('source', {})
76
67
  reader_client = reader.create_reader(args.input)
77
-
78
- context = garf_executors.api_executor.ApiExecutionContext(
79
- query_parameters=config.params,
80
- writer=args.output,
81
- writer_parameters=config.writer_params,
82
- fetcher_parameters=source_parameters,
83
- )
84
- query_executor = garf_executors.setup_executor(
85
- args.source, context.fetcher_parameters
86
- )
87
- if args.parallel_queries:
88
- logger.info('Running queries in parallel')
68
+ if config_file := args.config:
69
+ execution_config = config.Config.from_file(config_file)
70
+ if not (context := execution_config.sources.get(args.source)):
71
+ raise exceptions.GarfExecutorError(
72
+ f'No execution context found for source {args.source} in {config_file}'
73
+ )
74
+ query_executor = garf_executors.setup_executor(
75
+ args.source, context.fetcher_parameters
76
+ )
89
77
  batch = {query: reader_client.read(query) for query in args.query}
90
78
  query_executor.execute_batch(batch, context, args.parallel_queries)
91
79
  else:
92
- logger.info('Running queries sequentially')
93
- for query in args.query:
94
- query_executor.execute(reader_client.read(query), query, context)
80
+ extra_parameters = utils.ParamsParser(
81
+ ['source', args.output, 'macro', 'template']
82
+ ).parse(kwargs)
83
+ source_parameters = extra_parameters.get('source', {})
84
+
85
+ context = garf_executors.api_executor.ApiExecutionContext(
86
+ query_parameters={
87
+ 'macro': extra_parameters.get('macro'),
88
+ 'template': extra_parameters.get('template'),
89
+ },
90
+ writer=args.output,
91
+ writer_parameters=extra_parameters.get(args.output),
92
+ fetcher_parameters=source_parameters,
93
+ )
94
+ query_executor = garf_executors.setup_executor(
95
+ args.source, context.fetcher_parameters
96
+ )
97
+ if args.parallel_queries:
98
+ logger.info('Running queries in parallel')
99
+ batch = {query: reader_client.read(query) for query in args.query}
100
+ query_executor.execute_batch(batch, context, args.parallel_queries)
101
+ else:
102
+ logger.info('Running queries sequentially')
103
+ for query in args.query:
104
+ query_executor.execute(reader_client.read(query), query, context)
95
105
 
96
106
 
97
107
  if __name__ == '__main__':
@@ -14,6 +14,8 @@
14
14
 
15
15
  """FastAPI endpoint for executing queries."""
16
16
 
17
+ from typing import Optional, Union
18
+
17
19
  import fastapi
18
20
  import pydantic
19
21
  import uvicorn
@@ -35,9 +37,9 @@ class ApiExecutorRequest(pydantic.BaseModel):
35
37
  """
36
38
 
37
39
  source: str
38
- title: str | None = None
39
- query: str | None = None
40
- query_path: str | list[str] | None = None
40
+ title: Optional[str] = None
41
+ query: Optional[str] = None
42
+ query_path: Optional[Union[str, list[str]]] = None
41
43
  context: garf_executors.ApiExecutionContext
42
44
 
43
45
  @pydantic.model_validator(mode='after')
@@ -15,261 +15,15 @@
15
15
 
16
16
  from __future__ import annotations
17
17
 
18
- import dataclasses
19
- import datetime
18
+ import enum
20
19
  import logging
21
- import os
22
20
  import sys
23
- from collections.abc import MutableSequence, Sequence
24
- from typing import Any, TypedDict
21
+ from collections.abc import Sequence
22
+ from typing import Any
25
23
 
26
- import smart_open
27
- import yaml
28
- from dateutil import relativedelta
29
- from garf_core import query_editor
30
24
  from rich import logging as rich_logging
31
25
 
32
26
 
33
- class GarfQueryParameters(TypedDict):
34
- """Annotation for dictionary of query specific parameters passed via CLI.
35
-
36
- Attributes:
37
- macros: Mapping for elements that will be replaced in the queries.
38
- template: Mapping for elements that will rendered via Jinja templates.
39
- """
40
-
41
- macros: dict[str, str]
42
- template: dict[str, str]
43
-
44
-
45
- @dataclasses.dataclass
46
- class BaseConfig:
47
- """Base config to inherit other configs from."""
48
-
49
- def __add__(self, other: BaseConfig) -> BaseConfig:
50
- """Creates new config of the same type from two configs.
51
-
52
- Parameters from added config overwrite already present parameters.
53
-
54
- Args:
55
- other: Config that could be merged with the original one.
56
-
57
- Returns:
58
- New config with values from both configs.
59
- """
60
- right_dict = _remove_empty_values(self.__dict__)
61
- left_dict = _remove_empty_values(other.__dict__)
62
- new_dict = {**right_dict, **left_dict}
63
- return self.__class__(**new_dict)
64
-
65
- @classmethod
66
- def from_dict(
67
- cls, config_parameters: dict[str, str | GarfQueryParameters]
68
- ) -> BaseConfig:
69
- """Builds config from provided parameters ignoring empty ones."""
70
- return cls(**_remove_empty_values(config_parameters))
71
-
72
-
73
- @dataclasses.dataclass
74
- class GarfConfig(BaseConfig):
75
- """Stores values to run garf from command line.
76
-
77
- Attributes:
78
- account:
79
- Account(s) to get data from.
80
- output:
81
- Specifies where to store fetched data (console, csv, BQ.)
82
- api_version:
83
- Google Ads API version.
84
- params:
85
- Any parameters passed to Garf query for substitution.
86
- writer_params:
87
- Any parameters that can be passed to writer for data saving.
88
- customer_ids_query:
89
- Query text to limit accounts fetched from Ads API.
90
- customer_ids_query_file:
91
- Path to query to limit accounts fetched from Ads API.
92
- """
93
-
94
- account: str | list[str] | None = None
95
- output: str = 'console'
96
- params: GarfQueryParameters = dataclasses.field(default_factory=dict)
97
- writer_params: dict[str, str | int] = dataclasses.field(default_factory=dict)
98
- customer_ids_query: str | None = None
99
- customer_ids_query_file: str | None = None
100
-
101
- def __post_init__(self) -> None:
102
- """Ensures that values passed during __init__ correctly formatted."""
103
- if isinstance(self.account, MutableSequence):
104
- self.account = [
105
- str(account).replace('-', '').strip() for account in self.account
106
- ]
107
- else:
108
- self.account = (
109
- str(self.account).replace('-', '').strip() if self.account else None
110
- )
111
- self.writer_params = {
112
- key.replace('-', '_'): value for key, value in self.writer_params.items()
113
- }
114
-
115
-
116
- class GarfConfigException(Exception):
117
- """Exception for invalid GarfConfig."""
118
-
119
-
120
- @dataclasses.dataclass
121
- class GarfBqConfig(BaseConfig):
122
- """Stores values to run garf-bq from command line.
123
-
124
- Attributes:
125
- project:
126
- Google Cloud project name.
127
- dataset_location:
128
- Location of BigQuery dataset.
129
- params:
130
- Any parameters passed to BigQuery query for substitution.
131
- """
132
-
133
- project: str | None = None
134
- dataset_location: str | None = None
135
- params: GarfQueryParameters = dataclasses.field(default_factory=dict)
136
-
137
-
138
- @dataclasses.dataclass
139
- class GarfSqlConfig(BaseConfig):
140
- """Stores values to run garf-sql from command line.
141
-
142
- Attributes:
143
- connection_string:
144
- Connection string to SqlAlchemy database engine.
145
- params:
146
- Any parameters passed to SQL query for substitution.
147
- """
148
-
149
- connection_string: str | None = None
150
- params: GarfQueryParameters = dataclasses.field(default_factory=dict)
151
-
152
-
153
- class ConfigBuilder:
154
- """Builds config of provided type.
155
-
156
- Config can be created from file, build from arguments or both.
157
-
158
- Attributes:
159
- config: Concrete config class that needs to be built.
160
- """
161
-
162
- _config_mapping: dict[str, BaseConfig] = {
163
- 'garf': GarfConfig,
164
- 'garf-bq': GarfBqConfig,
165
- 'garf-sql': GarfSqlConfig,
166
- }
167
-
168
- def __init__(self, config_type: str) -> None:
169
- """Sets concrete config type.
170
-
171
- Args:
172
- config_type: Type of config that should be built.
173
-
174
- Raises:
175
- GarfConfigException: When incorrect config_type is supplied.
176
- """
177
- if config_type not in self._config_mapping:
178
- raise GarfConfigException(f'Invalid config_type: {config_type}')
179
- self._config_type = config_type
180
- self.config = self._config_mapping.get(config_type)
181
-
182
- def build(
183
- self, parameters: dict[str, str], cli_named_args: Sequence[str]
184
- ) -> BaseConfig | None:
185
- """Builds config from file, build from arguments or both ways.
186
-
187
- When there are both config_file and CLI arguments the latter have more
188
- priority.
189
-
190
- Args:
191
- parameters: Parsed CLI arguments.
192
- cli_named_args: Unparsed CLI args in a form `--key.subkey=value`.
193
-
194
- Returns:
195
- Concrete config with injected values.
196
- """
197
- if not (garf_config_path := parameters.get('garf_config')):
198
- return self._build_config(parameters, cli_named_args)
199
- config_file = self._load_config(garf_config_path)
200
- config_cli = self._build_config(
201
- parameters, cli_named_args, init_defaults=False
202
- )
203
- if config_file and config_cli:
204
- config_file = config_file + config_cli
205
- return config_file
206
-
207
- def _build_config(
208
- self,
209
- parameters: dict[str, str],
210
- cli_named_args: Sequence[str],
211
- init_defaults: bool = True,
212
- ) -> BaseConfig | None:
213
- """Builds config from named and unnamed CLI parameters.
214
-
215
- Args:
216
- parameters: Parsed CLI arguments.
217
- cli_named_args: Unparsed CLI args in a form `--key.subkey=value`.
218
- init_defaults: Whether to provided default config values if
219
- expected parameter is missing
220
-
221
- Returns:
222
- Concrete config with injected values.
223
- """
224
- output = parameters.get('output')
225
- config_parameters = {
226
- k: v for k, v in parameters.items() if k in self.config.__annotations__
227
- }
228
- cli_params = ParamsParser(['macro', 'template', output]).parse(
229
- cli_named_args
230
- )
231
- cli_params = _remove_empty_values(cli_params)
232
- if output and (writer_params := cli_params.get(output)):
233
- _ = cli_params.pop(output)
234
- config_parameters.update({'writer_params': writer_params})
235
- if cli_params:
236
- config_parameters.update({'params': cli_params})
237
- if not config_parameters:
238
- return None
239
- if init_defaults:
240
- return self.config.from_dict(config_parameters)
241
- return self.config(**config_parameters)
242
-
243
- def _load_config(self, garf_config_path: str) -> BaseConfig:
244
- """Loads config from provided path.
245
-
246
- Args:
247
- garf_config_path: Path to local or remote storage.
248
-
249
- Returns:
250
- Concreate config with values taken from config file.
251
-
252
- Raises:
253
- GarfConfigException:
254
- If config file missing `garf` section.
255
- """
256
- with smart_open.open(garf_config_path, encoding='utf-8') as f:
257
- config = yaml.safe_load(f)
258
- garf_section = config.get(self._config_type)
259
- if not garf_section:
260
- raise GarfConfigException(
261
- f'Invalid config, must have `{self._config_type}` section!'
262
- )
263
- config_parameters = {
264
- k: v for k, v in garf_section.items() if k in self.config.__annotations__
265
- }
266
- if params := garf_section.get('params', {}):
267
- config_parameters.update({'params': params})
268
- if writer_params := garf_section.get(garf_section.get('output', '')):
269
- config_parameters.update({'writer_params': writer_params})
270
- return self.config(**config_parameters)
271
-
272
-
273
27
  class ParamsParser:
274
28
  def __init__(self, identifiers: Sequence[str]) -> None:
275
29
  self.identifiers = identifiers
@@ -336,117 +90,15 @@ class GarfParamsException(Exception):
336
90
  """Defines exception for incorrect parameters."""
337
91
 
338
92
 
339
- def convert_date(date_string: str) -> str:
340
- """Converts specific dates parameters to actual dates.
341
-
342
- Returns:
343
- Date string in YYYY-MM-DD format.
344
-
345
- Raises:
346
- ValueError:
347
- If dynamic lookback value (:YYYYMMDD-N) is incorrect.
348
- """
349
- if isinstance(date_string, list) or date_string.find(':YYYY') == -1:
350
- return date_string
351
- current_date = datetime.date.today()
352
- date_object = date_string.split('-')
353
- base_date = date_object[0]
354
- if len(date_object) == 2:
355
- try:
356
- days_ago = int(date_object[1])
357
- except ValueError as e:
358
- raise ValueError(
359
- 'Must provide numeric value for a number lookback period, '
360
- 'i.e. :YYYYMMDD-1'
361
- ) from e
362
- else:
363
- days_ago = 0
364
- if base_date == ':YYYY':
365
- new_date = datetime.datetime(current_date.year, 1, 1)
366
- delta = relativedelta.relativedelta(years=days_ago)
367
- elif base_date == ':YYYYMM':
368
- new_date = datetime.datetime(current_date.year, current_date.month, 1)
369
- delta = relativedelta.relativedelta(months=days_ago)
370
- elif base_date == ':YYYYMMDD':
371
- new_date = current_date
372
- delta = relativedelta.relativedelta(days=days_ago)
373
- return (new_date - delta).strftime('%Y-%m-%d')
374
-
375
-
376
- class ConfigSaver:
377
- def __init__(self, path: str) -> None:
378
- self.path = path
379
-
380
- def save(self, garf_config: BaseConfig):
381
- if os.path.exists(self.path):
382
- with smart_open.open(self.path, 'r', encoding='utf-8') as f:
383
- config = yaml.safe_load(f)
384
- else:
385
- config = {}
386
- config = self.prepare_config(config, garf_config)
387
- with smart_open.open(self.path, 'w', encoding='utf-8') as f:
388
- yaml.dump(
389
- config, f, default_flow_style=False, sort_keys=False, encoding='utf-8'
390
- )
391
-
392
- def prepare_config(self, config: dict, garf_config: BaseConfig) -> dict:
393
- garf = dataclasses.asdict(garf_config)
394
- if isinstance(garf_config, GarfConfig):
395
- garf[garf_config.output] = garf_config.writer_params
396
- if not isinstance(garf_config.account, MutableSequence):
397
- garf['account'] = garf_config.account.split(',')
398
- del garf['writer_params']
399
- garf = _remove_empty_values(garf)
400
- config.update({'garf': garf})
401
- if isinstance(garf_config, GarfBqConfig):
402
- garf = _remove_empty_values(garf)
403
- config.update({'garf-bq': garf})
404
- if isinstance(garf_config, GarfSqlConfig):
405
- garf = _remove_empty_values(garf)
406
- config.update({'garf-sql': garf})
407
- return config
408
-
409
-
410
- def initialize_runtime_parameters(config: BaseConfig) -> BaseConfig:
411
- """Formats parameters and add common parameter in config.
412
-
413
- Initialization identifies whether there are `date` parameters and performs
414
- necessary date conversions.
415
- Set of parameters that need to be generally available are injected into
416
- every parameter section of the config.
417
-
418
- Args:
419
- config: Instantiated config.
420
-
421
- Returns:
422
- Config with formatted parameters.
423
- """
424
- common_params = query_editor.CommonParametersMixin().common_params
425
- for key, param in config.params.items():
426
- for key_param, value_param in param.items():
427
- config.params[key][key_param] = convert_date(value_param)
428
- for common_param_key, common_param_value in common_params.items():
429
- if common_param_key not in config.params[key]:
430
- config.params[key][common_param_key] = common_param_value
431
- return config
432
-
433
-
434
- def _remove_empty_values(dict_object: dict[str, Any]) -> dict[str, Any]:
435
- """Remove all empty elements: strings, dictionaries from a dictionary."""
436
- if isinstance(dict_object, dict):
437
- return {
438
- key: value
439
- for key, value in (
440
- (key, _remove_empty_values(value)) for key, value in dict_object.items()
441
- )
442
- if value
443
- }
444
- if isinstance(dict_object, (int, str, MutableSequence)):
445
- return dict_object
93
+ class LoggerEnum(str, enum.Enum):
94
+ local = 'local'
95
+ rich = 'rich'
446
96
 
447
97
 
448
98
  def init_logging(
449
- loglevel: str = 'INFO', logger_type: str = 'local', name: str = __name__
99
+ loglevel: str = 'INFO',
100
+ logger_type: str | LoggerEnum = 'local',
101
+ name: str = __name__,
450
102
  ) -> logging.Logger:
451
103
  if logger_type == 'rich':
452
104
  logging.basicConfig(
@@ -14,9 +14,16 @@
14
14
 
15
15
  # pylint: disable=C0330, g-bad-import-order, g-multiple-import
16
16
 
17
+ """Captures parameters for fetching data from APIs."""
18
+
17
19
  from __future__ import annotations
18
20
 
21
+ import os
22
+ import pathlib
23
+
19
24
  import pydantic
25
+ import smart_open
26
+ import yaml
20
27
  from garf_core import query_editor
21
28
  from garf_io import writer
22
29
  from garf_io.writers import abs_writer
@@ -35,7 +42,7 @@ class ExecutionContext(pydantic.BaseModel):
35
42
  query_parameters: query_editor.GarfQueryParameters | None = pydantic.Field(
36
43
  default_factory=dict
37
44
  )
38
- fetcher_parameters: dict[str, str] | None = pydantic.Field(
45
+ fetcher_parameters: dict[str, str | list[str | int]] | None = pydantic.Field(
39
46
  default_factory=dict
40
47
  )
41
48
  writer: str | None = None
@@ -48,6 +55,23 @@ class ExecutionContext(pydantic.BaseModel):
48
55
  self.fetcher_parameters = {}
49
56
  if self.writer_parameters is None:
50
57
  self.writer_parameters = {}
58
+ if not self.query_parameters:
59
+ self.query_parameters = query_editor.GarfQueryParameters()
60
+
61
+ @classmethod
62
+ def from_file(
63
+ cls, path: str | pathlib.Path | os.PathLike[str]
64
+ ) -> ExecutionContext:
65
+ """Builds context from local or remote yaml file."""
66
+ with smart_open.open(path, 'r', encoding='utf-8') as f:
67
+ data = yaml.safe_load(f)
68
+ return ExecutionContext(**data)
69
+
70
+ def save(self, path: str | pathlib.Path | os.PathLike[str]) -> str:
71
+ """Saves context to local or remote yaml file."""
72
+ with smart_open.open(path, 'w', encoding='utf-8') as f:
73
+ yaml.dump(self.model_dump(), f, encoding='utf-8')
74
+ return f'ExecutionContext is saved to {str(path)}'
51
75
 
52
76
  @property
53
77
  def writer_client(self) -> abs_writer.AbsWriter:
@@ -13,6 +13,7 @@
13
13
  # limitations under the License.
14
14
 
15
15
  import inspect
16
+ import sys
16
17
  from importlib.metadata import entry_points
17
18
 
18
19
  from garf_core import exceptions, report_fetcher
@@ -20,7 +21,9 @@ from garf_core import exceptions, report_fetcher
20
21
 
21
22
  def find_fetchers() -> set[str]:
22
23
  """Identifiers all available report fetchers."""
23
- return {fetcher.name for fetcher in entry_points(group='garf')}
24
+ if entrypoints := _get_entrypoints('garf'):
25
+ return {fetcher.name for fetcher in entrypoints}
26
+ return set()
24
27
 
25
28
 
26
29
  def get_report_fetcher(source: str) -> type[report_fetcher.ApiReportFetcher]:
@@ -38,8 +41,7 @@ def get_report_fetcher(source: str) -> type[report_fetcher.ApiReportFetcher]:
38
41
  """
39
42
  if source not in find_fetchers():
40
43
  raise report_fetcher.MissingApiReportFetcherError(source)
41
- fetchers = entry_points(group='garf')
42
- for fetcher in fetchers:
44
+ for fetcher in _get_entrypoints('garf'):
43
45
  if fetcher.name == source:
44
46
  try:
45
47
  fetcher_module = fetcher.load()
@@ -53,3 +55,14 @@ def get_report_fetcher(source: str) -> type[report_fetcher.ApiReportFetcher]:
53
55
  raise exceptions.ApiReportFetcherError(
54
56
  f'No fetcher available for the source "{source}"'
55
57
  )
58
+
59
+
60
+ def _get_entrypoints(group='garf'):
61
+ if sys.version_info.major == 3 and sys.version_info.minor == 9:
62
+ try:
63
+ fetchers = entry_points()[group]
64
+ except KeyError:
65
+ fetchers = []
66
+ else:
67
+ fetchers = entry_points(group=group)
68
+ return fetchers
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: garf-executors
3
- Version: 0.0.9
3
+ Version: 0.0.11
4
4
  Summary: Executes queries against API and writes data to local/remote storage.
5
5
  Author-email: "Google Inc. (gTech gPS CSE team)" <no-reply@google.com>, Andrei Markin <andrey.markin.ppc@gmail.com>
6
6
  License: Apache 2.0
@@ -0,0 +1,18 @@
1
+ garf_executors/__init__.py,sha256=BqmHrQ1gcaI5qv9ZVhZ9CzFbZ8vEFCk3Qw2SFCy3QHI,1687
2
+ garf_executors/api_executor.py,sha256=gifws1Kv-k_v3TtRQGn-WJiRQ1yWSyAdOJk38ab-nms,3573
3
+ garf_executors/bq_executor.py,sha256=yVgncimVLST8_60JMrS5Ain21RFt7K4TR2ePOja5WNA,4858
4
+ garf_executors/config.py,sha256=TqCzijm1PRvL4p-9Zl-kPkcC1SFKjhgTfKMJFmJW3fQ,1688
5
+ garf_executors/exceptions.py,sha256=U_7Q2ZMOUf89gzZd2pw7y3g7i1NeByPPKfpZ3q7p3ZU,662
6
+ garf_executors/execution_context.py,sha256=21u-Z5wRyqYFrFzph_ocqaKXypXyTyjBzutUGQbeBY4,2785
7
+ garf_executors/executor.py,sha256=bGTGlWZT5B7I_WIjhuQ0CkL7Dij_ijFCBxuC1jGVkng,1626
8
+ garf_executors/fetchers.py,sha256=Uoolh9L3Na2E6QsrnKV6Pwv5RrCKkcjds5gkDo0IxOw,2128
9
+ garf_executors/sql_executor.py,sha256=OGUN1AaSi6jC1v4YW0ZmcYXPE5EYfNbBRXrpdf4QTk4,3699
10
+ garf_executors/entrypoints/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
+ garf_executors/entrypoints/cli.py,sha256=eBg1QoJ9ipBIV06wVRuGJEY3a4QJC-k1S_tNB3E-2A4,3969
12
+ garf_executors/entrypoints/server.py,sha256=rdxL8uLsdRsQ3tFC3gasCsd06Pm-nZgiwcaaG1q5lHY,2872
13
+ garf_executors/entrypoints/utils.py,sha256=IjAu-Q1REQPuYd-gVGKhxai4LLXvlHTmE-FEID038NM,3741
14
+ garf_executors-0.0.11.dist-info/METADATA,sha256=g4N7s2psxDh6eCxfHSc7VC2g5vwuvMfSjm4tCbtPOfQ,2649
15
+ garf_executors-0.0.11.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
16
+ garf_executors-0.0.11.dist-info/entry_points.txt,sha256=LskWNFIw8j0WJuI18-32OZrlASXAMg1XtrRYwsKBz2E,61
17
+ garf_executors-0.0.11.dist-info/top_level.txt,sha256=sP4dCXOENPn1hDFAunjMV8Js4NND_KGeO_gQWuaT0EY,15
18
+ garf_executors-0.0.11.dist-info/RECORD,,
@@ -1,17 +0,0 @@
1
- garf_executors/__init__.py,sha256=bydTJJBI8B4Xm9JMWwFqPWEXBUXSWEoal578Ur_QrcE,1686
2
- garf_executors/api_executor.py,sha256=gifws1Kv-k_v3TtRQGn-WJiRQ1yWSyAdOJk38ab-nms,3573
3
- garf_executors/bq_executor.py,sha256=yEEKajUs8sVfJUyVKc0j2TDEj9GN_lAWVGaWRk0QSpY,4802
4
- garf_executors/exceptions.py,sha256=U_7Q2ZMOUf89gzZd2pw7y3g7i1NeByPPKfpZ3q7p3ZU,662
5
- garf_executors/execution_context.py,sha256=maSg_lVd6HzVFhT2e0gsc1W8K_iarRupUfMGfuEcMvg,1965
6
- garf_executors/executor.py,sha256=bGTGlWZT5B7I_WIjhuQ0CkL7Dij_ijFCBxuC1jGVkng,1626
7
- garf_executors/fetchers.py,sha256=m2feJ6ByYq-oJXuQ3tmaNMx7soMcGsVC2hY4kOsPaNQ,1833
8
- garf_executors/sql_executor.py,sha256=OGUN1AaSi6jC1v4YW0ZmcYXPE5EYfNbBRXrpdf4QTk4,3699
9
- garf_executors/entrypoints/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
- garf_executors/entrypoints/cli.py,sha256=rEjSFdRXu4-SUC8jZfOSaTW_WXYWiShtmrsnz-6I7do,3456
11
- garf_executors/entrypoints/server.py,sha256=1Te4x2kVlVrBjYGruCk3Qq8B_QKXePXjK7qMg4jJms0,2821
12
- garf_executors/entrypoints/utils.py,sha256=LjngjMs7Yj8wjwS8T-njq7xazku86FS24FsprwT6i1E,15056
13
- garf_executors-0.0.9.dist-info/METADATA,sha256=ydgsjxE4Fc4ytNe-B6ZtRgwYzC4ac69oMmMc09AcKG0,2648
14
- garf_executors-0.0.9.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
15
- garf_executors-0.0.9.dist-info/entry_points.txt,sha256=LskWNFIw8j0WJuI18-32OZrlASXAMg1XtrRYwsKBz2E,61
16
- garf_executors-0.0.9.dist-info/top_level.txt,sha256=sP4dCXOENPn1hDFAunjMV8Js4NND_KGeO_gQWuaT0EY,15
17
- garf_executors-0.0.9.dist-info/RECORD,,