garf-executors 0.0.2__py3-none-any.whl → 0.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -20,7 +20,11 @@ import like this `garf_executors.ApiQueryExecutor`
20
20
  from __future__ import annotations
21
21
 
22
22
  from garf_executors.api_executor import ApiQueryExecutor
23
+ from garf_executors.fetchers import FETCHERS
23
24
 
24
25
  __all__ = [
26
+ 'FETCHERS',
25
27
  'ApiQueryExecutor',
26
28
  ]
29
+
30
+ __version__ = '0.0.5'
@@ -22,12 +22,47 @@ from __future__ import annotations
22
22
 
23
23
  import logging
24
24
 
25
- from garf_core import report_fetcher
26
- from garf_io.writers import abs_writer, console_writer
25
+ import pydantic
26
+
27
+ from garf_core import query_editor, report_fetcher
28
+ from garf_executors import exceptions
29
+ from garf_io import writer
30
+ from garf_io.writers import abs_writer
27
31
 
28
32
  logger = logging.getLogger(__name__)
29
33
 
30
34
 
35
+ class ApiExecutionContext(pydantic.BaseModel):
36
+ """Common context for executing one or more queries.
37
+
38
+ Attributes:
39
+ query_parameters: Parameters to dynamically change query text.
40
+ fetcher_parameters: Parameters to specify fetching setup.
41
+ writer: Type of writer to use.
42
+ writer_parameters: Optional parameters to setup writer.
43
+ """
44
+
45
+ query_parameters: query_editor.GarfQueryParameters | None = None
46
+ fetcher_parameters: dict[str, str] | None = None
47
+ writer: str = 'console'
48
+ writer_parameters: dict[str, str] | None = None
49
+
50
+ def model_post_init(self, __context__) -> None:
51
+ if self.fetcher_parameters is None:
52
+ self.fetcher_parameters = {}
53
+ if self.writer_parameters is None:
54
+ self.writer_parameters = {}
55
+
56
+ @property
57
+ def writer_client(self) -> abs_writer.AbsWriter:
58
+ writer_client = writer.create_writer(self.writer, **self.writer_parameters)
59
+ if self.writer == 'bq':
60
+ _ = writer_client.create_or_get_dataset()
61
+ if self.writer == 'sheet':
62
+ writer_client.init_client()
63
+ return writer_client
64
+
65
+
31
66
  class ApiQueryExecutor:
32
67
  """Gets data from API and writes them to local/remote storage.
33
68
 
@@ -36,7 +71,7 @@ class ApiQueryExecutor:
36
71
  """
37
72
 
38
73
  def __init__(self, fetcher: report_fetcher.ApiReportFetcher) -> None:
39
- """Initializes QueryExecutor.
74
+ """Initializes ApiQueryExecutor.
40
75
 
41
76
  Args:
42
77
  fetcher: Instantiated report fetcher.
@@ -44,55 +79,54 @@ class ApiQueryExecutor:
44
79
  self.fetcher = fetcher
45
80
 
46
81
  async def aexecute(
47
- self,
48
- query_text: str,
49
- query_name: str,
50
- writer_client: abs_writer.AbsWriter = console_writer.ConsoleWriter(),
51
- args: dict[str, str] | None = None,
52
- **kwargs: str,
82
+ self, query: str, context: ApiExecutionContext, **kwargs: str
53
83
  ) -> None:
54
84
  """Reads query, extract results and stores them in a specified location.
55
85
 
56
86
  Args:
57
- query_text: Text for the query.
58
- query_name: Identifier of a query.
59
- customer_ids: All accounts for which query will be executed.
60
- writer_client: Client responsible for writing data to local/remote
61
- location.
62
- args: Arguments that need to be passed to the query.
63
- optimize_performance: strategy for speeding up query execution
64
- ("NONE", "PROTOBUF", "BATCH", "BATCH_PROTOBUF").
87
+ query: Location of the query.
88
+ context: Query execution context.
65
89
  """
66
- self.execute(query_text, query_name, writer_client, args, **kwargs)
90
+ self.execute(query, context, **kwargs)
67
91
 
68
92
  def execute(
69
93
  self,
70
- query_text: str,
71
- query_name: str,
72
- writer_client: abs_writer.AbsWriter = console_writer.ConsoleWriter(),
73
- args: dict[str, str] | None = None,
74
- **kwargs: str,
94
+ query: str,
95
+ title: str,
96
+ context: ApiExecutionContext,
75
97
  ) -> None:
76
98
  """Reads query, extract results and stores them in a specified location.
77
99
 
78
100
  Args:
79
- query_text: Text for the query.
80
- query_name: Identifier of a query.
81
- writer_client: Client responsible for writing data to local/remote
82
- location.
83
- args: Arguments that need to be passed to the query.
101
+ query: Location of the query.
102
+ title: Name of the query.
103
+ context: Query execution context.
104
+
105
+ Raises:
106
+ GarfExecutorError: When failed to execute query.
84
107
  """
85
- results = self.fetcher.fetch(
86
- query_specification=query_text, args=args, **kwargs
87
- )
88
- logger.debug(
89
- 'Start writing data for query %s via %s writer',
90
- query_name,
91
- type(writer_client),
92
- )
93
- writer_client.write(results, query_name)
94
- logger.debug(
95
- 'Finish writing data for query %s via %s writer',
96
- query_name,
97
- type(writer_client),
98
- )
108
+ try:
109
+ logger.debug('starting query %s', query)
110
+ results = self.fetcher.fetch(
111
+ query_specification=query,
112
+ args=context.query_parameters,
113
+ **context.fetcher_parameters,
114
+ )
115
+ writer_client = context.writer_client
116
+ logger.debug(
117
+ 'Start writing data for query %s via %s writer',
118
+ title,
119
+ type(writer_client),
120
+ )
121
+ writer_client.write(results, title)
122
+ logger.debug(
123
+ 'Finish writing data for query %s via %s writer',
124
+ title,
125
+ type(writer_client),
126
+ )
127
+ logger.info('%s executed successfully', title)
128
+ except Exception as e:
129
+ logger.error('%s generated an exception: %s', title, str(e))
130
+ raise exceptions.GarfExecutorError(
131
+ '%s generated an exception: %s', title, str(e)
132
+ ) from e
@@ -0,0 +1,115 @@
1
+ # Copyright 2025 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Module for defining `garf` CLI utility.
15
+
16
+ `garf` allows to execute queries and store results in local/remote
17
+ storage.
18
+ """
19
+
20
+ from __future__ import annotations
21
+
22
+ import argparse
23
+ import sys
24
+ from concurrent import futures
25
+
26
+ import garf_executors
27
+ from garf_executors import exceptions
28
+ from garf_executors.entrypoints import utils
29
+ from garf_io import reader
30
+
31
+
32
+ def main():
33
+ parser = argparse.ArgumentParser()
34
+ parser.add_argument('query', nargs='*')
35
+ parser.add_argument('-c', '--config', dest='garf_config', default=None)
36
+ parser.add_argument('--source', dest='source', default=None)
37
+ parser.add_argument('--output', dest='output', default='console')
38
+ parser.add_argument('--input', dest='input', default='file')
39
+ parser.add_argument('--log', '--loglevel', dest='loglevel', default='info')
40
+ parser.add_argument('--logger', dest='logger', default='local')
41
+ parser.add_argument(
42
+ '--parallel-queries', dest='parallel_queries', action='store_true'
43
+ )
44
+ parser.add_argument(
45
+ '--no-parallel-queries', dest='parallel_queries', action='store_false'
46
+ )
47
+ parser.add_argument('--dry-run', dest='dry_run', action='store_true')
48
+ parser.add_argument('-v', '--version', dest='version', action='store_true')
49
+ parser.add_argument(
50
+ '--parallel-threshold', dest='parallel_threshold', default=None, type=int
51
+ )
52
+ parser.set_defaults(parallel_queries=True)
53
+ parser.set_defaults(dry_run=False)
54
+ args, kwargs = parser.parse_known_args()
55
+
56
+ if args.version:
57
+ print(garf_executors.__version__)
58
+ sys.exit()
59
+ if not (source := args.source):
60
+ raise exceptions.GarfExecutorError(
61
+ f'Select one of available sources: {list(garf_executors.FETCHERS.keys())}'
62
+ )
63
+ if not (concrete_api_fetcher := garf_executors.FETCHERS.get(source)):
64
+ raise exceptions.GarfExecutorError(f'Source {source} is not available.')
65
+
66
+ logger = utils.init_logging(
67
+ loglevel=args.loglevel.upper(), logger_type=args.logger
68
+ )
69
+ if not args.query:
70
+ logger.error('Please provide one or more queries to run')
71
+ raise exceptions.GarfExecutorError(
72
+ 'Please provide one or more queries to run'
73
+ )
74
+ config = utils.ConfigBuilder('garf').build(vars(args), kwargs)
75
+ logger.debug('config: %s', config)
76
+
77
+ if config.params:
78
+ config = utils.initialize_runtime_parameters(config)
79
+ logger.debug('initialized config: %s', config)
80
+
81
+ extra_parameters = utils.ParamsParser(['source']).parse(kwargs)
82
+ source_parameters = extra_parameters.get('source', {})
83
+ reader_client = reader.create_reader(args.input)
84
+
85
+ context = garf_executors.api_executor.ApiExecutionContext(
86
+ query_parameters=config.params,
87
+ writer=args.output,
88
+ writer_parameters=config.writer_params,
89
+ fetcher_parameters=source_parameters,
90
+ )
91
+ query_executor = garf_executors.api_executor.ApiQueryExecutor(
92
+ concrete_api_fetcher(**source_parameters)
93
+ )
94
+ if args.parallel_queries:
95
+ logger.info('Running queries in parallel')
96
+ with futures.ThreadPoolExecutor(args.parallel_threshold) as executor:
97
+ future_to_query = {
98
+ executor.submit(
99
+ query_executor.execute,
100
+ reader_client.read(query),
101
+ query,
102
+ context,
103
+ ): query
104
+ for query in args.query
105
+ }
106
+ for future in futures.as_completed(future_to_query):
107
+ future.result()
108
+ else:
109
+ logger.info('Running queries sequentially')
110
+ for query in args.query:
111
+ query_executor.execute(reader_client.read(query), query, context)
112
+
113
+
114
+ if __name__ == '__main__':
115
+ main()
@@ -0,0 +1,65 @@
1
+ # Copyright 2025 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """FastAPI endpoint for executing queries."""
16
+
17
+ import fastapi
18
+ import pydantic
19
+ import uvicorn
20
+
21
+ import garf_executors
22
+ from garf_executors import exceptions
23
+
24
+
25
+ class ApiExecutorRequest(pydantic.BaseModel):
26
+ """Request for executing a query.
27
+
28
+ Attributes:
29
+ source: Type of API to interact with.
30
+ query: Query to execute.
31
+ title: Name of the query used as an output for writing.
32
+ context: Execution context.
33
+ """
34
+
35
+ source: str
36
+ query: str
37
+ title: str
38
+ context: garf_executors.api_executor.ApiExecutionContext
39
+
40
+
41
+ router = fastapi.APIRouter(prefix='/api')
42
+
43
+
44
+ @router.post('/execute')
45
+ async def execute(request: ApiExecutorRequest) -> dict[str, str]:
46
+ if not (concrete_api_fetcher := garf_executors.FETCHERS.get(request.source)):
47
+ raise exceptions.GarfExecutorError(
48
+ f'Source {request.source} is not available.'
49
+ )
50
+
51
+ query_executor = garf_executors.api_executor.ApiQueryExecutor(
52
+ concrete_api_fetcher(**request.context.fetcher_parameters)
53
+ )
54
+
55
+ query_executor.execute(request.query, request.title, request.context)
56
+
57
+ return fastapi.responses.JSONResponse(
58
+ content=fastapi.encoders.jsonable_encoder({'result': 'success'})
59
+ )
60
+
61
+
62
+ if __name__ == '__main__':
63
+ app = fastapi.FastAPI()
64
+ app.include_router(router)
65
+ uvicorn.run(app)
@@ -20,9 +20,8 @@ import datetime
20
20
  import logging
21
21
  import os
22
22
  import sys
23
- import traceback
24
23
  from collections.abc import MutableSequence, Sequence
25
- from typing import Any, Callable, TypedDict
24
+ from typing import Any, TypedDict
26
25
 
27
26
  import smart_open
28
27
  import yaml
@@ -298,21 +297,36 @@ class ParamsParser:
298
297
  key = param[0]
299
298
  if not identifier or identifier not in key:
300
299
  return None
301
- provided_identifier, key = key.split('.')
300
+ provided_identifier, *keys = key.split('.')
301
+ if len(keys) > 1:
302
+ raise GarfParamsException(
303
+ f'{key} is invalid format,'
304
+ f'`--{identifier}.key=value` or `--{identifier}.key` '
305
+ 'are the correct formats'
306
+ )
302
307
  provided_identifier = provided_identifier.replace('--', '')
303
308
  if provided_identifier not in self.identifiers:
304
309
  raise GarfParamsException(
305
310
  f'CLI argument {provided_identifier} is not supported'
306
- f", supported arguments {', '.join(self.identifiers)}"
311
+ f', supported arguments {", ".join(self.identifiers)}'
307
312
  )
308
313
  if provided_identifier != identifier:
309
314
  return None
310
- key = key.replace('-', '_')
315
+ key = keys[0].replace('-', '_')
316
+ if not key:
317
+ raise GarfParamsException(
318
+ f'{identifier} {key} is invalid,'
319
+ f'`--{identifier}.key=value` or `--{identifier}.key` '
320
+ 'are the correct formats'
321
+ )
311
322
  if len(param) == 2:
312
323
  return {key: param[1]}
324
+ if len(param) == 1:
325
+ return {key: True}
313
326
  raise GarfParamsException(
314
327
  f'{identifier} {key} is invalid,'
315
- f'--{identifier}.key=value is the correct format'
328
+ f'`--{identifier}.key=value` or `--{identifier}.key` '
329
+ 'are the correct formats'
316
330
  )
317
331
 
318
332
 
@@ -429,25 +443,6 @@ def _remove_empty_values(dict_object: dict[str, Any]) -> dict[str, Any]:
429
443
  return dict_object
430
444
 
431
445
 
432
- def garf_runner(query: str, callback: Callable, logger) -> None:
433
- try:
434
- logger.debug('starting query %s', query)
435
- callback()
436
- logger.info('%s executed successfully', query)
437
- except Exception as e:
438
- traceback.print_tb(e.__traceback__)
439
- logger.error('%s generated an exception: %s', query, str(e))
440
-
441
-
442
- def postprocessor_runner(query: str, callback: Callable, logger) -> None:
443
- try:
444
- logger.debug('starting query %s', query)
445
- callback()
446
- logger.info('%s executed successfully', query)
447
- except Exception as e:
448
- logger.error('%s generated an exception: %s', query, str(e))
449
-
450
-
451
446
  def init_logging(
452
447
  loglevel: str = 'INFO', logger_type: str = 'local', name: str = __name__
453
448
  ) -> logging.Logger:
@@ -0,0 +1,17 @@
1
+ # Copyright 2025 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ class GarfExecutorError(Exception):
17
+ """Base class for garf executor exceptions."""
@@ -0,0 +1,37 @@
1
+ # Copyright 2025 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import inspect
16
+ from importlib.metadata import entry_points
17
+
18
+ from garf_core import report_fetcher
19
+
20
+
21
+ def get_report_fetchers() -> dict[str, report_fetcher.ApiReportFetcher]:
22
+ fetchers = entry_points(group='garf')
23
+ found_fetchers = {}
24
+ for fetcher in fetchers:
25
+ try:
26
+ fetcher_module = fetcher.load()
27
+ for name, obj in inspect.getmembers(fetcher_module):
28
+ if inspect.isclass(obj) and issubclass(
29
+ obj, report_fetcher.ApiReportFetcher
30
+ ):
31
+ found_fetchers[fetcher.name] = getattr(fetcher_module, name)
32
+ except ModuleNotFoundError:
33
+ continue
34
+ return found_fetchers
35
+
36
+
37
+ FETCHERS = get_report_fetchers()
@@ -0,0 +1,66 @@
1
+ Metadata-Version: 2.4
2
+ Name: garf-executors
3
+ Version: 0.0.5
4
+ Summary: Executes queries against API and writes data to local/remote storage.
5
+ Author-email: "Google Inc. (gTech gPS CSE team)" <no-reply@google.com>, Andrei Markin <andrey.markin.ppc@gmail.com>
6
+ License: Apache 2.0
7
+ Classifier: Programming Language :: Python :: 3 :: Only
8
+ Classifier: Programming Language :: Python :: 3.8
9
+ Classifier: Programming Language :: Python :: 3.9
10
+ Classifier: Programming Language :: Python :: 3.10
11
+ Classifier: Programming Language :: Python :: 3.11
12
+ Classifier: Programming Language :: Python :: 3.12
13
+ Classifier: Programming Language :: Python :: 3.13
14
+ Classifier: Intended Audience :: Developers
15
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
16
+ Classifier: Operating System :: OS Independent
17
+ Classifier: License :: OSI Approved :: Apache Software License
18
+ Requires-Python: >=3.8
19
+ Description-Content-Type: text/markdown
20
+ Requires-Dist: garf-core
21
+ Requires-Dist: garf-io
22
+ Requires-Dist: pyyaml
23
+ Requires-Dist: pydantic
24
+ Provides-Extra: bq
25
+ Requires-Dist: garf-io[bq]; extra == "bq"
26
+ Requires-Dist: pandas; extra == "bq"
27
+ Provides-Extra: sql
28
+ Requires-Dist: garf-io[sqlalchemy]; extra == "sql"
29
+ Requires-Dist: pandas; extra == "sql"
30
+ Provides-Extra: server
31
+ Requires-Dist: fastapi[standard]; extra == "server"
32
+ Provides-Extra: all
33
+ Requires-Dist: garf-executors[bq,server,sql]; extra == "all"
34
+
35
+ # `garf-executors` - One stop-shop for interacting with Reporting APIs.
36
+
37
+ `garf-executors` is responsible for orchestrating process of fetching from API and storing data in a storage.
38
+
39
+ Currently the following executors are supports:
40
+
41
+ * `ApiExecutor` - fetching data from reporting API and saves it to a requested destination.
42
+ * `BigQueryExecutor` - executes SQL code in BigQuery.
43
+ * `SqlExecutor` - executes SQL code in a SqlAlchemy supported DB.
44
+
45
+ ## Installation
46
+
47
+ `pip install garf-executors`
48
+
49
+ ## Usage
50
+
51
+ After `garf-executors` is installed you can use `garf` utility to perform fetching.
52
+
53
+ ```
54
+ garf <QUERIES> --source <API_SOURCE> \
55
+ --output <OUTPUT_TYPE> \
56
+ --source.params1=<VALUE>
57
+ ```
58
+
59
+ where
60
+
61
+ * `<QUERIES>`- local or remote path(s) to files with queries.
62
+ * `<API_SOURCE>`- type of API to use. Based on that the appropriate report fetcher will be initialized.
63
+ * `<OUTPUT_TYPE>` - output supported by [`garf-io` library](../garf_io/README.md).
64
+
65
+ If your report fetcher requires additional parameters you can pass them via key value pairs under `--source.` argument, i.e.`--source.regionCode='US'` - to get data only from *US*.
66
+ > Concrete `--source` parameters are dependent on a particular report fetcher and should be looked up in a documentation for this fetcher.
@@ -0,0 +1,15 @@
1
+ garf_executors/__init__.py,sha256=Afg7pEjgXQlhiQ2hCTlef77ypbLRt6i_8gB1tIv7rUM,955
2
+ garf_executors/api_executor.py,sha256=udrlMiYUmKh5NsIuJkNowqCenvtf5O925FPFawXSXbM,4021
3
+ garf_executors/bq_executor.py,sha256=JBPxbDRYgUgpJv6SqYiFPypTFjZGIZ-SOOb6dS2sZQY,3822
4
+ garf_executors/exceptions.py,sha256=U_7Q2ZMOUf89gzZd2pw7y3g7i1NeByPPKfpZ3q7p3ZU,662
5
+ garf_executors/fetchers.py,sha256=gkAKHsDPzJySg4wYLZeCmNINtk6f17-jFzOP7tE82r8,1226
6
+ garf_executors/sql_executor.py,sha256=vBNQ4HZZYxP_EYAh8Z4BerzLESfsNpXdhENzXIw-OIo,2532
7
+ garf_executors/entrypoints/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
+ garf_executors/entrypoints/cli.py,sha256=mWvPQkaqarDj5byHRvNAweVbUQiHZLXrC-35zY7l4fs,4043
9
+ garf_executors/entrypoints/server.py,sha256=rJ29VKWKaYJci1BLxZx-0LSILmUMf5BK8G1RRjRS2ts,1836
10
+ garf_executors/entrypoints/utils.py,sha256=ZZJFe2N4KwgzPRvak9gW_B25qESnzOyuF-qYZ2wW2_M,14974
11
+ garf_executors-0.0.5.dist-info/METADATA,sha256=a_7xXfg6sI-ugJAEvGuuXKym1nYSp4WvjuR4AypboEM,2648
12
+ garf_executors-0.0.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
13
+ garf_executors-0.0.5.dist-info/entry_points.txt,sha256=LskWNFIw8j0WJuI18-32OZrlASXAMg1XtrRYwsKBz2E,61
14
+ garf_executors-0.0.5.dist-info/top_level.txt,sha256=sP4dCXOENPn1hDFAunjMV8Js4NND_KGeO_gQWuaT0EY,15
15
+ garf_executors-0.0.5.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.6.0)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ garf = garf_executors.entrypoints.cli:main
File without changes
@@ -1,213 +0,0 @@
1
- # Copyright 2022 Google LLC
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # https://www.apache.org/licenses/LICENSE-2.0
8
- # Unless required by applicable law or agreed to in writing, software
9
- # distributed under the License is distributed on an "AS IS" BASIS,
10
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
- # See the License for the specific language governing permissions and
12
- # limitations under the License.
13
- """Module for defing `garf` CLI utility.
14
-
15
- `garf` allows to execute GAQL queries and store results in local/remote
16
- storage.
17
- """
18
-
19
- from __future__ import annotations
20
-
21
- import argparse
22
- import functools
23
- import sys
24
- from collections.abc import MutableSequence
25
- from concurrent import futures
26
- from pathlib import Path
27
-
28
- import smart_open
29
- import yaml
30
- from garf import api_clients, exceptions, query_executor
31
- from garf.cli import utils
32
- from garf.io import reader, writer
33
-
34
-
35
- def main():
36
- parser = argparse.ArgumentParser()
37
- parser.add_argument('query', nargs='*')
38
- parser.add_argument('-c', '--config', dest='garf_config', default=None)
39
- parser.add_argument('--account', dest='account', default=None)
40
- parser.add_argument('--output', dest='output', default=None)
41
- parser.add_argument('--input', dest='input', default='file')
42
- parser.add_argument(
43
- '--ads-config', dest='config', default=str(Path.home() / 'google-ads.yaml')
44
- )
45
- parser.add_argument('--api-version', dest='api_version', default=None)
46
- parser.add_argument('--log', '--loglevel', dest='loglevel', default='info')
47
- parser.add_argument('--logger', dest='logger', default='local')
48
- parser.add_argument(
49
- '--customer-ids-query', dest='customer_ids_query', default=None
50
- )
51
- parser.add_argument(
52
- '--customer-ids-query-file', dest='customer_ids_query_file', default=None
53
- )
54
- parser.add_argument('--save-config', dest='save_config', action='store_true')
55
- parser.add_argument(
56
- '--no-save-config', dest='save_config', action='store_false'
57
- )
58
- parser.add_argument(
59
- '--config-destination', dest='save_config_dest', default='config.yaml'
60
- )
61
- parser.add_argument(
62
- '--parallel-queries', dest='parallel_queries', action='store_true'
63
- )
64
- parser.add_argument(
65
- '--no-parallel-queries', dest='parallel_queries', action='store_false'
66
- )
67
- parser.add_argument(
68
- '--optimize-performance', dest='optimize_performance', default='NONE'
69
- )
70
- parser.add_argument('--dry-run', dest='dry_run', action='store_true')
71
- parser.add_argument(
72
- '--disable-account-expansion',
73
- dest='disable_account_expansion',
74
- action='store_true',
75
- )
76
- parser.add_argument('-v', '--version', dest='version', action='store_true')
77
- parser.add_argument(
78
- '--parallel-threshold', dest='parallel_threshold', default=None, type=int
79
- )
80
- parser.set_defaults(save_config=False)
81
- parser.set_defaults(parallel_queries=True)
82
- parser.set_defaults(dry_run=False)
83
- parser.set_defaults(disable_account_expansion=False)
84
- args = parser.parse_known_args()
85
- main_args = args[0]
86
-
87
- if main_args.version:
88
- import pkg_resources
89
-
90
- version = pkg_resources.require('google-ads-api-report-fetcher')[0].version
91
- print(f'garf version {version}')
92
- sys.exit()
93
-
94
- logger = utils.init_logging(
95
- loglevel=main_args.loglevel.upper(), logger_type=main_args.logger
96
- )
97
- if not main_args.query:
98
- logger.error('Please provide one or more queries to run')
99
- raise exceptions.GarfMissingQueryException(
100
- 'Please provide one or more queries to run'
101
- )
102
-
103
- with smart_open.open(main_args.config, 'r', encoding='utf-8') as f:
104
- google_ads_config_dict = yaml.safe_load(f)
105
-
106
- config = utils.ConfigBuilder('garf').build(vars(main_args), args[1])
107
- if not config.account:
108
- if mcc := google_ads_config_dict.get('login_customer_id'):
109
- config.account = str(mcc)
110
- else:
111
- raise exceptions.GarfMissingAccountException(
112
- 'No account found, please specify via --account CLI flag'
113
- 'or add as login_customer_id in google-ads.yaml'
114
- )
115
- logger.debug('config: %s', config)
116
-
117
- if main_args.save_config and not main_args.garf_config:
118
- utils.ConfigSaver(main_args.save_config_dest).save(config)
119
- if main_args.dry_run:
120
- sys.exit()
121
-
122
- if config.params:
123
- config = utils.initialize_runtime_parameters(config)
124
- logger.debug('initialized config: %s', config)
125
-
126
- ads_client = api_clients.GoogleAdsApiClient(
127
- config_dict=google_ads_config_dict,
128
- version=config.api_version,
129
- use_proto_plus=main_args.optimize_performance
130
- not in ('PROTOBUF', 'BATCH_PROTOBUF'),
131
- )
132
- ads_query_executor = query_executor.AdsQueryExecutor(ads_client)
133
- reader_factory = reader.ReaderFactory()
134
- reader_client = reader_factory.create_reader(main_args.input)
135
-
136
- if config.customer_ids_query:
137
- customer_ids_query = config.customer_ids_query
138
- elif config.customer_ids_query_file:
139
- file_reader = reader_factory.create_reader('file')
140
- customer_ids_query = file_reader.read(config.customer_ids_query_file)
141
- else:
142
- customer_ids_query = None
143
-
144
- if main_args.disable_account_expansion:
145
- logger.info(
146
- 'Skipping account expansion because of ' 'disable_account_expansion flag'
147
- )
148
- customer_ids = (
149
- config.account
150
- if isinstance(config.account, MutableSequence)
151
- else [config.account]
152
- )
153
- else:
154
- customer_ids = ads_query_executor.expand_mcc(
155
- config.account, customer_ids_query
156
- )
157
- if not customer_ids:
158
- logger.warning(
159
- 'Not a single under MCC %s is found that satisfies '
160
- 'the following customer_id query: "%s"',
161
- config.account,
162
- customer_ids_query,
163
- )
164
- sys.exit()
165
- writer_client = writer.WriterFactory().create_writer(
166
- config.output, **config.writer_params
167
- )
168
- if config.output == 'bq':
169
- _ = writer_client.create_or_get_dataset()
170
- if config.output == 'sheet':
171
- writer_client.init_client()
172
-
173
- logger.info(
174
- 'Total number of customer_ids is %d, accounts=[%s]',
175
- len(customer_ids),
176
- ','.join(map(str, customer_ids)),
177
- )
178
-
179
- if main_args.parallel_queries:
180
- logger.info('Running queries in parallel')
181
- with futures.ThreadPoolExecutor(main_args.parallel_threshold) as executor:
182
- future_to_query = {
183
- executor.submit(
184
- ads_query_executor.execute,
185
- reader_client.read(query),
186
- query,
187
- customer_ids,
188
- writer_client,
189
- config.params,
190
- main_args.optimize_performance,
191
- ): query
192
- for query in main_args.query
193
- }
194
- for future in futures.as_completed(future_to_query):
195
- query = future_to_query[future]
196
- utils.garf_runner(query, future.result, logger)
197
- else:
198
- logger.info('Running queries sequentially')
199
- for query in main_args.query:
200
- callback = functools.partial(
201
- ads_query_executor.execute,
202
- reader_client.read(query),
203
- query,
204
- customer_ids,
205
- writer_client,
206
- config.params,
207
- main_args.optimize_performance,
208
- )
209
- utils.garf_runner(query, callback, logger)
210
-
211
-
212
- if __name__ == '__main__':
213
- main()
@@ -1,112 +0,0 @@
1
- # Copyright 2022 Google LLC
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # https://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- """Module for defing `garf-bq` CLI utility.
15
-
16
- `garf-bq` allows to execute BigQuery queries based on Garf config.
17
- """
18
-
19
- from __future__ import annotations
20
-
21
- import argparse
22
- import functools
23
- import sys
24
- from concurrent import futures
25
-
26
- from garf_writers import reader # type: ignore
27
-
28
- from garf_executors import bq_executor
29
- from garf_executors.entrypoints import utils
30
-
31
-
32
- def main():
33
- parser = argparse.ArgumentParser()
34
- parser.add_argument('query', nargs='+')
35
- parser.add_argument('-c', '--config', dest='garf_config', default=None)
36
- parser.add_argument('--project', dest='project')
37
- parser.add_argument(
38
- '--dataset-location', dest='dataset_location', default=None
39
- )
40
- parser.add_argument('--save-config', dest='save_config', action='store_true')
41
- parser.add_argument(
42
- '--no-save-config', dest='save_config', action='store_false'
43
- )
44
- parser.add_argument(
45
- '--config-destination', dest='save_config_dest', default='config.yaml'
46
- )
47
- parser.add_argument('--log', '--loglevel', dest='loglevel', default='info')
48
- parser.add_argument('--logger', dest='logger', default='local')
49
- parser.add_argument('--dry-run', dest='dry_run', action='store_true')
50
- parser.add_argument(
51
- '--parallel-queries', dest='parallel_queries', action='store_true'
52
- )
53
- parser.add_argument(
54
- '--no-parallel-queries', dest='parallel_queries', action='store_false'
55
- )
56
- parser.add_argument(
57
- '--parallel-threshold', dest='parallel_threshold', default=None, type=int
58
- )
59
- parser.set_defaults(save_config=False)
60
- parser.set_defaults(dry_run=False)
61
- parser.set_defaults(parallel_queries=True)
62
- args = parser.parse_known_args()
63
- main_args = args[0]
64
-
65
- logger = utils.init_logging(
66
- loglevel=main_args.loglevel.upper(), logger_type=main_args.logger
67
- )
68
- config = utils.ConfigBuilder('garf-bq').build(vars(main_args), args[1])
69
- logger.debug('config: %s', config)
70
- if main_args.save_config and not main_args.garf_config:
71
- utils.ConfigSaver(main_args.save_config_dest).save(config)
72
- if main_args.dry_run:
73
- sys.exit()
74
-
75
- config = utils.initialize_runtime_parameters(config)
76
- logger.debug('initialized config: %s', config)
77
-
78
- bigquery_executor = bq_executor.BigQueryExecutor(
79
- project_id=config.project, location=config.dataset_location
80
- )
81
- bigquery_executor.create_datasets(config.params.get('macro'))
82
-
83
- reader_client = reader.FileReader()
84
-
85
- if main_args.parallel_queries:
86
- logger.info('Running queries in parallel')
87
- with futures.ThreadPoolExecutor(
88
- max_workers=main_args.parallel_threshold
89
- ) as executor:
90
- future_to_query = {
91
- executor.submit(
92
- bigquery_executor.execute,
93
- query,
94
- reader_client.read(query),
95
- config.params,
96
- ): query
97
- for query in sorted(main_args.query)
98
- }
99
- for future in futures.as_completed(future_to_query):
100
- query = future_to_query[future]
101
- utils.postprocessor_runner(query, future.result, logger)
102
- else:
103
- logger.info('Running queries sequentially')
104
- for query in sorted(main_args.query):
105
- callback = functools.partial(
106
- executor.execute, query, reader_client.read(query), config.params
107
- )
108
- utils.postprocessor_runner(query, callback, logger)
109
-
110
-
111
- if __name__ == '__main__':
112
- main()
@@ -1,213 +0,0 @@
1
- # Copyright 2022 Google LLC
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # https://www.apache.org/licenses/LICENSE-2.0
8
- # Unless required by applicable law or agreed to in writing, software
9
- # distributed under the License is distributed on an "AS IS" BASIS,
10
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
- # See the License for the specific language governing permissions and
12
- # limitations under the License.
13
- """Module for defing `garf` CLI utility.
14
-
15
- `garf` allows to execute GAQL queries and store results in local/remote
16
- storage.
17
- """
18
-
19
- from __future__ import annotations
20
-
21
- import argparse
22
- import functools
23
- import sys
24
- from collections.abc import MutableSequence
25
- from concurrent import futures
26
- from pathlib import Path
27
-
28
- import smart_open
29
- import yaml
30
- from garf import api_clients, exceptions, query_executor
31
- from garf.cli import utils
32
- from garf.io import reader, writer
33
-
34
-
35
- def main():
36
- parser = argparse.ArgumentParser()
37
- parser.add_argument('query', nargs='*')
38
- parser.add_argument('-c', '--config', dest='garf_config', default=None)
39
- parser.add_argument('--account', dest='account', default=None)
40
- parser.add_argument('--output', dest='output', default=None)
41
- parser.add_argument('--input', dest='input', default='file')
42
- parser.add_argument(
43
- '--ads-config', dest='config', default=str(Path.home() / 'google-ads.yaml')
44
- )
45
- parser.add_argument('--api-version', dest='api_version', default=None)
46
- parser.add_argument('--log', '--loglevel', dest='loglevel', default='info')
47
- parser.add_argument('--logger', dest='logger', default='local')
48
- parser.add_argument(
49
- '--customer-ids-query', dest='customer_ids_query', default=None
50
- )
51
- parser.add_argument(
52
- '--customer-ids-query-file', dest='customer_ids_query_file', default=None
53
- )
54
- parser.add_argument('--save-config', dest='save_config', action='store_true')
55
- parser.add_argument(
56
- '--no-save-config', dest='save_config', action='store_false'
57
- )
58
- parser.add_argument(
59
- '--config-destination', dest='save_config_dest', default='config.yaml'
60
- )
61
- parser.add_argument(
62
- '--parallel-queries', dest='parallel_queries', action='store_true'
63
- )
64
- parser.add_argument(
65
- '--no-parallel-queries', dest='parallel_queries', action='store_false'
66
- )
67
- parser.add_argument(
68
- '--optimize-performance', dest='optimize_performance', default='NONE'
69
- )
70
- parser.add_argument('--dry-run', dest='dry_run', action='store_true')
71
- parser.add_argument(
72
- '--disable-account-expansion',
73
- dest='disable_account_expansion',
74
- action='store_true',
75
- )
76
- parser.add_argument('-v', '--version', dest='version', action='store_true')
77
- parser.add_argument(
78
- '--parallel-threshold', dest='parallel_threshold', default=None, type=int
79
- )
80
- parser.set_defaults(save_config=False)
81
- parser.set_defaults(parallel_queries=True)
82
- parser.set_defaults(dry_run=False)
83
- parser.set_defaults(disable_account_expansion=False)
84
- args = parser.parse_known_args()
85
- main_args = args[0]
86
-
87
- if main_args.version:
88
- import pkg_resources
89
-
90
- version = pkg_resources.require('google-ads-api-report-fetcher')[0].version
91
- print(f'garf version {version}')
92
- sys.exit()
93
-
94
- logger = utils.init_logging(
95
- loglevel=main_args.loglevel.upper(), logger_type=main_args.logger
96
- )
97
- if not main_args.query:
98
- logger.error('Please provide one or more queries to run')
99
- raise exceptions.GarfMissingQueryException(
100
- 'Please provide one or more queries to run'
101
- )
102
-
103
- with smart_open.open(main_args.config, 'r', encoding='utf-8') as f:
104
- google_ads_config_dict = yaml.safe_load(f)
105
-
106
- config = utils.ConfigBuilder('garf').build(vars(main_args), args[1])
107
- if not config.account:
108
- if mcc := google_ads_config_dict.get('login_customer_id'):
109
- config.account = str(mcc)
110
- else:
111
- raise exceptions.GarfMissingAccountException(
112
- 'No account found, please specify via --account CLI flag'
113
- 'or add as login_customer_id in google-ads.yaml'
114
- )
115
- logger.debug('config: %s', config)
116
-
117
- if main_args.save_config and not main_args.garf_config:
118
- utils.ConfigSaver(main_args.save_config_dest).save(config)
119
- if main_args.dry_run:
120
- sys.exit()
121
-
122
- if config.params:
123
- config = utils.initialize_runtime_parameters(config)
124
- logger.debug('initialized config: %s', config)
125
-
126
- ads_client = api_clients.GoogleAdsApiClient(
127
- config_dict=google_ads_config_dict,
128
- version=config.api_version,
129
- use_proto_plus=main_args.optimize_performance
130
- not in ('PROTOBUF', 'BATCH_PROTOBUF'),
131
- )
132
- ads_query_executor = query_executor.AdsQueryExecutor(ads_client)
133
- reader_factory = reader.ReaderFactory()
134
- reader_client = reader_factory.create_reader(main_args.input)
135
-
136
- if config.customer_ids_query:
137
- customer_ids_query = config.customer_ids_query
138
- elif config.customer_ids_query_file:
139
- file_reader = reader_factory.create_reader('file')
140
- customer_ids_query = file_reader.read(config.customer_ids_query_file)
141
- else:
142
- customer_ids_query = None
143
-
144
- if main_args.disable_account_expansion:
145
- logger.info(
146
- 'Skipping account expansion because of ' 'disable_account_expansion flag'
147
- )
148
- customer_ids = (
149
- config.account
150
- if isinstance(config.account, MutableSequence)
151
- else [config.account]
152
- )
153
- else:
154
- customer_ids = ads_query_executor.expand_mcc(
155
- config.account, customer_ids_query
156
- )
157
- if not customer_ids:
158
- logger.warning(
159
- 'Not a single under MCC %s is found that satisfies '
160
- 'the following customer_id query: "%s"',
161
- config.account,
162
- customer_ids_query,
163
- )
164
- sys.exit()
165
- writer_client = writer.WriterFactory().create_writer(
166
- config.output, **config.writer_params
167
- )
168
- if config.output == 'bq':
169
- _ = writer_client.create_or_get_dataset()
170
- if config.output == 'sheet':
171
- writer_client.init_client()
172
-
173
- logger.info(
174
- 'Total number of customer_ids is %d, accounts=[%s]',
175
- len(customer_ids),
176
- ','.join(map(str, customer_ids)),
177
- )
178
-
179
- if main_args.parallel_queries:
180
- logger.info('Running queries in parallel')
181
- with futures.ThreadPoolExecutor(main_args.parallel_threshold) as executor:
182
- future_to_query = {
183
- executor.submit(
184
- ads_query_executor.execute,
185
- reader_client.read(query),
186
- query,
187
- customer_ids,
188
- writer_client,
189
- config.params,
190
- main_args.optimize_performance,
191
- ): query
192
- for query in main_args.query
193
- }
194
- for future in futures.as_completed(future_to_query):
195
- query = future_to_query[future]
196
- utils.garf_runner(query, future.result, logger)
197
- else:
198
- logger.info('Running queries sequentially')
199
- for query in main_args.query:
200
- callback = functools.partial(
201
- ads_query_executor.execute,
202
- reader_client.read(query),
203
- query,
204
- customer_ids,
205
- writer_client,
206
- config.params,
207
- main_args.optimize_performance,
208
- )
209
- utils.garf_runner(query, callback, logger)
210
-
211
-
212
- if __name__ == '__main__':
213
- main()
@@ -1,109 +0,0 @@
1
- # Copyright 2023 Google LLC
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # https://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- """Module for defing `garf` CLI utility.
15
-
16
- `garf-sql` allows to execute SQL queries in various Databases via SqlAlchemy.
17
- """
18
-
19
- from __future__ import annotations
20
-
21
- import argparse
22
- import functools
23
- import sys
24
- from concurrent import futures
25
-
26
- import sqlalchemy
27
- from garf_writers import reader # type: ignore
28
-
29
- from garf_executors import sql_executor
30
- from garf_executors.entrypoints import utils
31
-
32
-
33
- def main():
34
- parser = argparse.ArgumentParser()
35
- parser.add_argument('query', nargs='+')
36
- parser.add_argument('-c', '--config', dest='garf_config', default=None)
37
- parser.add_argument('--conn', '--connection-string', dest='connection_string')
38
- parser.add_argument('--save-config', dest='save_config', action='store_true')
39
- parser.add_argument(
40
- '--no-save-config', dest='save_config', action='store_false'
41
- )
42
- parser.add_argument(
43
- '--config-destination', dest='save_config_dest', default='config.yaml'
44
- )
45
- parser.add_argument('--log', '--loglevel', dest='loglevel', default='info')
46
- parser.add_argument('--logger', dest='logger', default='local')
47
- parser.add_argument('--dry-run', dest='dry_run', action='store_true')
48
- parser.add_argument(
49
- '--parallel-queries', dest='parallel_queries', action='store_true'
50
- )
51
- parser.add_argument(
52
- '--no-parallel-queries', dest='parallel_queries', action='store_false'
53
- )
54
- parser.add_argument(
55
- '--parallel-threshold', dest='parallel_threshold', default=None, type=int
56
- )
57
- parser.set_defaults(save_config=False)
58
- parser.set_defaults(dry_run=False)
59
- parser.set_defaults(parallel_queries=True)
60
- args = parser.parse_known_args()
61
- main_args = args[0]
62
-
63
- logger = utils.init_logging(
64
- loglevel=main_args.loglevel.upper(), logger_type=main_args.logger
65
- )
66
-
67
- config = utils.ConfigBuilder('garf-sql').build(vars(main_args), args[1])
68
- logger.debug('config: %s', config)
69
- if main_args.save_config and not main_args.garf_config:
70
- utils.ConfigSaver(main_args.save_config_dest).save(config)
71
- if main_args.dry_run:
72
- sys.exit()
73
-
74
- config = utils.initialize_runtime_parameters(config)
75
- logger.debug('initialized config: %s', config)
76
-
77
- engine = sqlalchemy.create_engine(config.connection_string)
78
- sqlalchemy_query_executor = sql_executor.SqlAlchemyQueryExecutor(engine)
79
-
80
- reader_client = reader.FileReader()
81
-
82
- if main_args.parallel_queries:
83
- logger.info('Running queries in parallel')
84
- with futures.ThreadPoolExecutor(
85
- max_workers=main_args.parallel_threshold
86
- ) as executor:
87
- future_to_query = {
88
- executor.submit(
89
- sqlalchemy_query_executor.execute,
90
- query,
91
- reader_client.read(query),
92
- config.params,
93
- ): query
94
- for query in sorted(main_args.query)
95
- }
96
- for future in futures.as_completed(future_to_query):
97
- query = future_to_query[future]
98
- utils.postprocessor_runner(query, future.result, logger)
99
- else:
100
- logger.info('Running queries sequentially')
101
- for query in sorted(main_args.query):
102
- callback = functools.partial(
103
- executor.execute, query, reader_client.read(query), config.params
104
- )
105
- utils.postprocessor_runner(query, callback, logger)
106
-
107
-
108
- if __name__ == '__main__':
109
- main()
@@ -1,30 +0,0 @@
1
- Metadata-Version: 2.1
2
- Name: garf-executors
3
- Version: 0.0.2
4
- Summary: Executes queries against API and writes data to local/remote storage.
5
- Author-email: "Google Inc. (gTech gPS CSE team)" <no-reply@google.com>
6
- License: Apache 2.0
7
- Classifier: Programming Language :: Python :: 3 :: Only
8
- Classifier: Programming Language :: Python :: 3.8
9
- Classifier: Programming Language :: Python :: 3.9
10
- Classifier: Programming Language :: Python :: 3.10
11
- Classifier: Programming Language :: Python :: 3.11
12
- Classifier: Programming Language :: Python :: 3.12
13
- Classifier: Intended Audience :: Developers
14
- Classifier: Topic :: Software Development :: Libraries :: Python Modules
15
- Classifier: Operating System :: OS Independent
16
- Classifier: License :: OSI Approved :: Apache Software License
17
- Requires-Python: >=3.8
18
- Description-Content-Type: text/markdown
19
- Requires-Dist: garf-core
20
- Requires-Dist: garf-io
21
- Provides-Extra: bq
22
- Requires-Dist: garf-io[bq]; extra == "bq"
23
- Requires-Dist: pandas; extra == "bq"
24
- Provides-Extra: sql
25
- Requires-Dist: garf-io[sqlalchemy]; extra == "sql"
26
- Requires-Dist: pandas; extra == "sql"
27
- Provides-Extra: all
28
- Requires-Dist: garf-executors[bq,sql]; extra == "all"
29
-
30
- # Gaarf Executors
@@ -1,16 +0,0 @@
1
- garf_executors/__init__.py,sha256=EWaHzmbB0XnTnYoR0bNsknBBbCet7tpv7gUwahtBIC0,873
2
- garf_executors/api_executor.py,sha256=axB7msuZ7LXFk_f3MjIyGM2AhDILyu7g70zqNW0hG6Q,3149
3
- garf_executors/bq_executor.py,sha256=JBPxbDRYgUgpJv6SqYiFPypTFjZGIZ-SOOb6dS2sZQY,3822
4
- garf_executors/sql_executor.py,sha256=vBNQ4HZZYxP_EYAh8Z4BerzLESfsNpXdhENzXIw-OIo,2532
5
- garf_executors/entrypoints/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
- garf_executors/entrypoints/utils.py,sha256=C-XbXunnt7HN27eHcTAO7iytu6rM4eWNKApxL345Z6g,15116
7
- garf_executors/entrypoints/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
- garf_executors/entrypoints/cli/api.py,sha256=0QzB7KdXyZnHhaMEGsFhj2B8hyNxV-F5JW6zC90rTcQ,7118
9
- garf_executors/entrypoints/cli/bq.py,sha256=Rk3nTkcGhyp1hnSSFsLFIFPXo33l_B_O1MiCK2fdZM8,3824
10
- garf_executors/entrypoints/cli/gaarf.py,sha256=0QzB7KdXyZnHhaMEGsFhj2B8hyNxV-F5JW6zC90rTcQ,7118
11
- garf_executors/entrypoints/cli/sql.py,sha256=tXQwhrLNUvfORxVdaZHnIawAR06oWZGro2vcreJ22Kc,3753
12
- garf_executors-0.0.2.dist-info/METADATA,sha256=rxCpgyIWOUo0tQu3HL4kM3cki8eW__tBPgFm3Ee3xIA,1175
13
- garf_executors-0.0.2.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
14
- garf_executors-0.0.2.dist-info/entry_points.txt,sha256=ksbFBDblKlOYqNyYoL3uaZVYWEYM_KWb0sWrvUamhd4,136
15
- garf_executors-0.0.2.dist-info/top_level.txt,sha256=sP4dCXOENPn1hDFAunjMV8Js4NND_KGeO_gQWuaT0EY,15
16
- garf_executors-0.0.2.dist-info/RECORD,,
@@ -1,3 +0,0 @@
1
- [console_scripts]
2
- garf-bq-executor = garf_executors.entrypoints.cli.bq:main
3
- garf-sql-executor = garf_executors.entrypoints.cli.sql:main