garf-executors 0.2.3__py3-none-any.whl → 1.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. garf/executors/__init__.py +25 -0
  2. garf/executors/api_executor.py +228 -0
  3. garf/executors/bq_executor.py +179 -0
  4. garf/executors/config.py +52 -0
  5. garf/executors/entrypoints/__init__.py +0 -0
  6. garf/executors/entrypoints/cli.py +164 -0
  7. {garf_executors → garf/executors}/entrypoints/grpc_server.py +22 -9
  8. garf/executors/entrypoints/server.py +174 -0
  9. garf/executors/entrypoints/tracer.py +82 -0
  10. garf/executors/entrypoints/utils.py +140 -0
  11. garf/executors/exceptions.py +17 -0
  12. garf/executors/execution_context.py +117 -0
  13. garf/executors/executor.py +124 -0
  14. garf/executors/fetchers.py +128 -0
  15. garf/executors/garf_pb2.py +51 -0
  16. {garf_executors → garf/executors}/garf_pb2_grpc.py +45 -2
  17. garf/executors/query_processor.py +79 -0
  18. garf/executors/setup.py +58 -0
  19. garf/executors/sql_executor.py +144 -0
  20. garf/executors/telemetry.py +20 -0
  21. garf/executors/workflows/__init__.py +0 -0
  22. garf/executors/workflows/gcp_workflow.yaml +49 -0
  23. garf/executors/workflows/workflow.py +164 -0
  24. garf/executors/workflows/workflow_runner.py +172 -0
  25. garf_executors/__init__.py +9 -44
  26. garf_executors/api_executor.py +9 -121
  27. garf_executors/bq_executor.py +9 -161
  28. garf_executors/config.py +9 -37
  29. garf_executors/entrypoints/__init__.py +25 -0
  30. garf_executors/entrypoints/cli.py +9 -148
  31. garf_executors/entrypoints/grcp_server.py +25 -0
  32. garf_executors/entrypoints/server.py +9 -102
  33. garf_executors/entrypoints/tracer.py +8 -40
  34. garf_executors/entrypoints/utils.py +9 -124
  35. garf_executors/exceptions.py +11 -3
  36. garf_executors/execution_context.py +9 -100
  37. garf_executors/executor.py +9 -108
  38. garf_executors/fetchers.py +9 -63
  39. garf_executors/sql_executor.py +9 -125
  40. garf_executors/telemetry.py +10 -5
  41. garf_executors/workflow.py +8 -79
  42. {garf_executors-0.2.3.dist-info → garf_executors-1.1.3.dist-info}/METADATA +18 -5
  43. garf_executors-1.1.3.dist-info/RECORD +46 -0
  44. {garf_executors-0.2.3.dist-info → garf_executors-1.1.3.dist-info}/WHEEL +1 -1
  45. garf_executors-1.1.3.dist-info/entry_points.txt +2 -0
  46. {garf_executors-0.2.3.dist-info → garf_executors-1.1.3.dist-info}/top_level.txt +1 -0
  47. garf_executors/garf_pb2.py +0 -45
  48. garf_executors-0.2.3.dist-info/RECORD +0 -24
  49. garf_executors-0.2.3.dist-info/entry_points.txt +0 -2
File without changes
@@ -0,0 +1,49 @@
1
+ run:
2
+ for:
3
+ value: pair
4
+ in: ${pairs}
5
+ steps:
6
+ - log_source:
7
+ call: sys.log
8
+ args:
9
+ data: ${pair.alias}
10
+ - execute_queries:
11
+ parallel:
12
+ for:
13
+ value: query
14
+ in: ${pair.queries}
15
+ steps:
16
+ - log_query:
17
+ call: sys.log
18
+ args:
19
+ data: ${pair}
20
+ - execute_single_query:
21
+ try:
22
+ call: http.post
23
+ args:
24
+ url: ${sys.get_env("GARF_ENDPOINT") + "/api/execute"}
25
+ auth:
26
+ type: OIDC
27
+ body:
28
+ source: ${pair.fetcher}
29
+ # query_path: ${query.path}
30
+ title: ${query.query.title}
31
+ query: ${query.query.text}
32
+ context:
33
+ fetcher_parameters: ${pair.fetcher_parameters}
34
+ writer: ${pair.writer}
35
+ writer_parameters: ${pair.writer_parameters}
36
+ query_parameters:
37
+ macro: ${pair.query_parameters.macro}
38
+ template: ${pair.query_parameters.template}
39
+ result: task_resp
40
+ except:
41
+ as: e
42
+ assign:
43
+ - task_resp:
44
+ status: "failed"
45
+ error: ${e.message}
46
+ - log_result:
47
+ call: sys.log
48
+ args:
49
+ data: ${task_resp}
@@ -0,0 +1,164 @@
1
+ # Copyright 2026 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Workflow specifies steps of end-to-end fetching and processing."""
15
+
16
+ from __future__ import annotations
17
+
18
+ import copy
19
+ import os
20
+ import pathlib
21
+ import re
22
+ from collections import defaultdict
23
+ from typing import Any
24
+
25
+ import pydantic
26
+ import smart_open
27
+ import yaml
28
+ from garf.executors import exceptions
29
+ from garf.executors.execution_context import ExecutionContext
30
+
31
+
32
+ class GarfWorkflowError(exceptions.GarfExecutorError):
33
+ """Workflow specific exception."""
34
+
35
+
36
+ class QueryFolder(pydantic.BaseModel):
37
+ """Path to folder with queries."""
38
+
39
+ folder: str
40
+
41
+
42
+ class QueryPath(pydantic.BaseModel):
43
+ """Path file with query."""
44
+
45
+ path: str
46
+ prefix: str | None = None
47
+
48
+ @property
49
+ def full_path(self) -> str:
50
+ if self.prefix:
51
+ return re.sub('/$', '', self.prefix) + '/' + self.path
52
+ return self.path
53
+
54
+
55
+ class QueryDefinition(pydantic.BaseModel):
56
+ """Definition of a query."""
57
+
58
+ query: Query
59
+
60
+
61
+ class Query(pydantic.BaseModel):
62
+ """Query elements.
63
+
64
+ Attributes:
65
+ text: Query text.
66
+ title: Name of the query.
67
+ """
68
+
69
+ text: str
70
+ title: str
71
+
72
+
73
+ class ExecutionStep(ExecutionContext):
74
+ """Common context for executing one or more queries.
75
+
76
+ Attributes:
77
+ fetcher: Name of a fetcher to get data from API.
78
+ alias: Optional alias to identify execution step.
79
+ queries: Queries to run for a particular fetcher.
80
+ context: Execution context for queries and fetcher.
81
+ parallel_threshold: Max allowed parallelism for the queries in the step.
82
+ """
83
+
84
+ fetcher: str | None = None
85
+ alias: str | None = pydantic.Field(default=None, pattern=r'^[a-zA-Z0-9_]+$')
86
+ queries: list[QueryPath | QueryDefinition | QueryFolder] | None = None
87
+ parallel_threshold: int | None = None
88
+
89
+ @property
90
+ def context(self) -> ExecutionContext:
91
+ return ExecutionContext(
92
+ writer=self.writer,
93
+ writer_parameters=self.writer_parameters,
94
+ query_parameters=self.query_parameters,
95
+ fetcher_parameters=self.fetcher_parameters,
96
+ )
97
+
98
+
99
+ class Workflow(pydantic.BaseModel):
100
+ """Orchestrates execution of queries for multiple fetchers.
101
+
102
+ Attributes:
103
+ steps: Contains one or several fetcher executions.
104
+ context: Query and fetcher parameters to overwrite in steps.
105
+ """
106
+
107
+ steps: list[ExecutionStep]
108
+ context: ExecutionContext | None = None
109
+
110
+ def model_post_init(self, __context__) -> None:
111
+ if context := self.context:
112
+ custom_parameters = defaultdict(dict)
113
+ if custom_macros := context.query_parameters.macro:
114
+ custom_parameters['query_parameters']['macro'] = custom_macros
115
+ if custom_templates := context.query_parameters.template:
116
+ custom_parameters['query_parameters']['template'] = custom_templates
117
+ if custom_fetcher_parameters := context.fetcher_parameters:
118
+ custom_parameters['fetcher_parameters'] = custom_fetcher_parameters
119
+
120
+ if custom_parameters:
121
+ steps = self.steps
122
+ for i, step in enumerate(steps):
123
+ res = _merge_dicts(
124
+ step.model_dump(exclude_none=True), dict(custom_parameters)
125
+ )
126
+ steps[i] = ExecutionStep(**res)
127
+
128
+ @classmethod
129
+ def from_file(
130
+ cls,
131
+ path: str | pathlib.Path | os.PathLike[str],
132
+ context: ExecutionContext | None = None,
133
+ ) -> Workflow:
134
+ """Builds workflow from local or remote yaml file."""
135
+ with smart_open.open(path, 'r', encoding='utf-8') as f:
136
+ data = yaml.safe_load(f)
137
+ try:
138
+ return Workflow(steps=data.get('steps'), context=context)
139
+ except pydantic.ValidationError as e:
140
+ raise GarfWorkflowError(f'Incorrect workflow:\n {e}') from e
141
+
142
+ def save(self, path: str | pathlib.Path | os.PathLike[str]) -> str:
143
+ """Saves workflow to local or remote yaml file."""
144
+ with smart_open.open(path, 'w', encoding='utf-8') as f:
145
+ yaml.dump(
146
+ self.model_dump(exclude_none=True).get('steps'), f, encoding='utf-8'
147
+ )
148
+ return f'Workflow is saved to {str(path)}'
149
+
150
+
151
+ def _merge_dicts(
152
+ dict1: dict[str, Any], dict2: dict[str, Any]
153
+ ) -> dict[str, Any]:
154
+ result = copy.deepcopy(dict1)
155
+ for key, value in dict2.items():
156
+ if (
157
+ key in result
158
+ and isinstance(result[key], dict)
159
+ and isinstance(value, dict)
160
+ ):
161
+ result[key] = _merge_dicts(result[key], value)
162
+ else:
163
+ result[key] = value
164
+ return result
@@ -0,0 +1,172 @@
1
+ # Copyright 2026 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """Runs garf workflow."""
15
+
16
+ from __future__ import annotations
17
+
18
+ import logging
19
+ import pathlib
20
+ import re
21
+ from typing import Final
22
+
23
+ import yaml
24
+ from garf.executors import exceptions, setup
25
+ from garf.executors.telemetry import tracer
26
+ from garf.executors.workflows import workflow
27
+ from garf.io import reader
28
+
29
+ logger = logging.getLogger(__name__)
30
+
31
+ _REMOTE_FILES_PATTERN: Final[str] = (
32
+ '^(http|gs|s3|aruze|hdfs|webhdfs|ssh|scp|sftp)'
33
+ )
34
+ _SCRIPT_PATH = pathlib.Path(__file__).parent
35
+
36
+
37
+ class WorkflowRunner:
38
+ """Runs garf workflow.
39
+
40
+ Attributes:
41
+ workflow: Workflow to execute.
42
+ wf_parent: Optional location of a workflow file.
43
+ parallel_threshold: Max allowed parallelism for the queries in the workflow.
44
+ """
45
+
46
+ def __init__(
47
+ self,
48
+ execution_workflow: workflow.Workflow,
49
+ wf_parent: pathlib.Path | str,
50
+ parallel_threshold: int = 10,
51
+ ) -> None:
52
+ """Initializes WorkflowRunner."""
53
+ self.workflow = execution_workflow
54
+ self.wf_parent = wf_parent
55
+ self.parallel_threshold = parallel_threshold
56
+
57
+ @classmethod
58
+ def from_file(
59
+ cls,
60
+ workflow_file: str | pathlib.Path,
61
+ ) -> WorkflowRunner:
62
+ """Initialized Workflow runner from a local or remote file."""
63
+ if isinstance(workflow_file, str):
64
+ workflow_file = pathlib.Path(workflow_file)
65
+ execution_workflow = workflow.Workflow.from_file(workflow_file)
66
+ return cls(
67
+ execution_workflow=execution_workflow, wf_parent=workflow_file.parent
68
+ )
69
+
70
+ def run(
71
+ self,
72
+ enable_cache: bool = False,
73
+ cache_ttl_seconds: int = 3600,
74
+ selected_aliases: list[str] | None = None,
75
+ skipped_aliases: list[str] | None = None,
76
+ ) -> list[str]:
77
+ skipped_aliases = skipped_aliases or []
78
+ selected_aliases = selected_aliases or []
79
+ reader_client = reader.create_reader('file')
80
+ execution_results = []
81
+ logger.info('Starting Garf Workflow...')
82
+ for i, step in enumerate(self.workflow.steps, 1):
83
+ step_name = f'{i}-{step.fetcher}'
84
+ if step.alias:
85
+ step_name = f'{step_name}-{step.alias}'
86
+ if step.alias in skipped_aliases:
87
+ logger.warning(
88
+ 'Skipping step %d, fetcher: %s, alias: %s',
89
+ i,
90
+ step.fetcher,
91
+ step.alias,
92
+ )
93
+ continue
94
+ if selected_aliases and step.alias not in selected_aliases:
95
+ logger.warning(
96
+ 'Skipping step %d, fetcher: %s, alias: %s',
97
+ i,
98
+ step.fetcher,
99
+ step.alias,
100
+ )
101
+ continue
102
+ with tracer.start_as_current_span(step_name):
103
+ logger.info(
104
+ 'Running step %d, fetcher: %s, alias: %s', i, step.fetcher, step.alias
105
+ )
106
+ query_executor = setup.setup_executor(
107
+ source=step.fetcher,
108
+ fetcher_parameters=step.fetcher_parameters,
109
+ enable_cache=enable_cache,
110
+ cache_ttl_seconds=cache_ttl_seconds,
111
+ )
112
+ batch = {}
113
+ if not (queries := step.queries):
114
+ logger.error('Please provide one or more queries to run')
115
+ raise exceptions.GarfExecutorError(
116
+ 'Please provide one or more queries to run'
117
+ )
118
+ for query in queries:
119
+ if isinstance(query, workflow.QueryPath):
120
+ query_path = query.full_path
121
+ if re.match(_REMOTE_FILES_PATTERN, query_path):
122
+ batch[query.path] = reader_client.read(query_path)
123
+ else:
124
+ if not query.prefix:
125
+ query_path = self.wf_parent / pathlib.Path(query.path)
126
+ if not query_path.exists():
127
+ raise workflow.GarfWorkflowError(
128
+ f'Query: {query_path} not found'
129
+ )
130
+ batch[query.path] = reader_client.read(query_path)
131
+ elif isinstance(query, workflow.QueryFolder):
132
+ query_path = self.wf_parent / pathlib.Path(query.folder)
133
+ if not query_path.exists():
134
+ raise workflow.GarfWorkflowError(
135
+ f'Folder: {query_path} not found'
136
+ )
137
+ for p in query_path.rglob('*'):
138
+ if p.suffix == '.sql':
139
+ batch[p.stem] = reader_client.read(p)
140
+ else:
141
+ batch[query.query.title] = query.query.text
142
+ query_executor.execute_batch(
143
+ batch,
144
+ step.context,
145
+ step.parallel_threshold or self.parallel_threshold,
146
+ )
147
+ execution_results.append(step_name)
148
+ return execution_results
149
+
150
+ def compile(self, path: str | pathlib.Path) -> str:
151
+ """Saves workflow with expanded anchors."""
152
+ return self.workflow.save(path)
153
+
154
+ def deploy(self, path: str | pathlib.Path) -> str:
155
+ """Prepares workflow for deployment to Google Cloud Workflows."""
156
+ wf = self.workflow.model_dump(exclude_none=True).get('steps')
157
+ with open(_SCRIPT_PATH / 'gcp_workflow.yaml', 'r', encoding='utf-8') as f:
158
+ cloud_workflow_run_template = yaml.safe_load(f)
159
+ init = {
160
+ 'init': {
161
+ 'assign': [{'pairs': wf}],
162
+ },
163
+ }
164
+ cloud_workflow = {
165
+ 'main': {
166
+ 'params': [],
167
+ 'steps': [init, cloud_workflow_run_template],
168
+ },
169
+ }
170
+ with open(path, 'w', encoding='utf-8') as f:
171
+ yaml.dump(cloud_workflow, f, sort_keys=False)
172
+ return f'Workflow is saved to {path}'
@@ -1,4 +1,4 @@
1
- # Copyright 2025 Google LLC
1
+ # Copyright 2026 Google LLC
2
2
  #
3
3
  # Licensed under the Apache License, Version 2.0 (the "License");
4
4
  # you may not use this file except in compliance with the License.
@@ -11,50 +11,15 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
- """Executors to fetch data from various APIs."""
15
14
 
16
- from __future__ import annotations
17
15
 
18
- import importlib
16
+ import warnings
19
17
 
20
- from garf_executors import executor, fetchers
21
- from garf_executors.api_executor import ApiExecutionContext, ApiQueryExecutor
22
- from garf_executors.telemetry import tracer
18
+ from garf.executors import *
23
19
 
24
-
25
- @tracer.start_as_current_span('setup_executor')
26
- def setup_executor(
27
- source: str,
28
- fetcher_parameters: dict[str, str | int | bool],
29
- enable_cache: bool = False,
30
- cache_ttl_seconds: int = 3600,
31
- ) -> type[executor.Executor]:
32
- """Initializes executors based on a source and parameters."""
33
- if source == 'bq':
34
- bq_executor = importlib.import_module('garf_executors.bq_executor')
35
- query_executor = bq_executor.BigQueryExecutor(**fetcher_parameters)
36
- elif source == 'sqldb':
37
- sql_executor = importlib.import_module('garf_executors.sql_executor')
38
- query_executor = (
39
- sql_executor.SqlAlchemyQueryExecutor.from_connection_string(
40
- fetcher_parameters.get('connection_string')
41
- )
42
- )
43
- else:
44
- concrete_api_fetcher = fetchers.get_report_fetcher(source)
45
- query_executor = ApiQueryExecutor(
46
- fetcher=concrete_api_fetcher(
47
- **fetcher_parameters,
48
- enable_cache=enable_cache,
49
- cache_ttl_seconds=cache_ttl_seconds,
50
- )
51
- )
52
- return query_executor
53
-
54
-
55
- __all__ = [
56
- 'ApiQueryExecutor',
57
- 'ApiExecutionContext',
58
- ]
59
-
60
- __version__ = '0.2.3'
20
+ warnings.warn(
21
+ "The 'garf_executors' namespace is deprecated. "
22
+ "Please use 'garf.executors' instead.",
23
+ DeprecationWarning,
24
+ stacklevel=2,
25
+ )
@@ -1,4 +1,4 @@
1
- # Copyright 2024 Google LLC
1
+ # Copyright 2026 Google LLC
2
2
  #
3
3
  # Licensed under the Apache License, Version 2.0 (the "License");
4
4
  # you may not use this file except in compliance with the License.
@@ -11,127 +11,15 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
- """Module for executing Garf queries and writing them to local/remote.
15
14
 
16
- ApiQueryExecutor performs fetching data from API in a form of
17
- GarfReport and saving it to local/remote storage.
18
- """
19
- # pylint: disable=C0330, g-bad-import-order, g-multiple-import
20
15
 
21
- from __future__ import annotations
16
+ import warnings
22
17
 
23
- import logging
18
+ from garf.executors.api_executor import *
24
19
 
25
- from garf_core import report_fetcher
26
- from opentelemetry import trace
27
-
28
- from garf_executors import exceptions, execution_context, executor, fetchers
29
- from garf_executors.telemetry import tracer
30
-
31
- logger = logging.getLogger(__name__)
32
-
33
-
34
- class ApiExecutionContext(execution_context.ExecutionContext):
35
- """Common context for executing one or more queries."""
36
-
37
- writer: str | list[str] = 'console'
38
-
39
-
40
- class ApiQueryExecutor(executor.Executor):
41
- """Gets data from API and writes them to local/remote storage.
42
-
43
- Attributes:
44
- api_client: a client used for connecting to API.
45
- """
46
-
47
- def __init__(self, fetcher: report_fetcher.ApiReportFetcher) -> None:
48
- """Initializes ApiQueryExecutor.
49
-
50
- Args:
51
- fetcher: Instantiated report fetcher.
52
- """
53
- self.fetcher = fetcher
54
- super().__init__(
55
- preprocessors=self.fetcher.preprocessors,
56
- postprocessors=self.fetcher.postprocessors,
57
- )
58
-
59
- @classmethod
60
- def from_fetcher_alias(
61
- cls,
62
- source: str,
63
- fetcher_parameters: dict[str, str] | None = None,
64
- enable_cache: bool = False,
65
- cache_ttl_seconds: int = 3600,
66
- ) -> ApiQueryExecutor:
67
- if not fetcher_parameters:
68
- fetcher_parameters = {}
69
- concrete_api_fetcher = fetchers.get_report_fetcher(source)
70
- return ApiQueryExecutor(
71
- fetcher=concrete_api_fetcher(
72
- **fetcher_parameters,
73
- enable_cache=enable_cache,
74
- cache_ttl_seconds=cache_ttl_seconds,
75
- )
76
- )
77
-
78
- @tracer.start_as_current_span('api.execute')
79
- def execute(
80
- self,
81
- query: str,
82
- title: str,
83
- context: ApiExecutionContext,
84
- ) -> str:
85
- """Reads query, extract results and stores them in a specified location.
86
-
87
- Args:
88
- query: Location of the query.
89
- title: Name of the query.
90
- context: Query execution context.
91
-
92
- Returns:
93
- Result of writing the report.
94
-
95
- Raises:
96
- GarfExecutorError: When failed to execute query.
97
- """
98
- span = trace.get_current_span()
99
- span.set_attribute('fetcher.class', self.fetcher.__class__.__name__)
100
- span.set_attribute(
101
- 'api.client.class', self.fetcher.api_client.__class__.__name__
102
- )
103
- try:
104
- span.set_attribute('query.title', title)
105
- span.set_attribute('query.text', query)
106
- logger.debug('starting query %s', query)
107
- results = self.fetcher.fetch(
108
- query_specification=query,
109
- args=context.query_parameters,
110
- **context.fetcher_parameters,
111
- )
112
- writer_clients = context.writer_clients
113
- if not writer_clients:
114
- logger.warning('No writers configured, skipping write operation')
115
- return None
116
- writing_results = []
117
- for writer_client in writer_clients:
118
- logger.debug(
119
- 'Start writing data for query %s via %s writer',
120
- title,
121
- type(writer_client),
122
- )
123
- result = writer_client.write(results, title)
124
- logger.debug(
125
- 'Finish writing data for query %s via %s writer',
126
- title,
127
- type(writer_client),
128
- )
129
- writing_results.append(result)
130
- logger.info('%s executed successfully', title)
131
- # Return the last writer's result for backward compatibility
132
- return writing_results[-1] if writing_results else None
133
- except Exception as e:
134
- logger.error('%s generated an exception: %s', title, str(e))
135
- raise exceptions.GarfExecutorError(
136
- '%s generated an exception: %s', title, str(e)
137
- ) from e
20
+ warnings.warn(
21
+ "The 'garf_executors' namespace is deprecated. "
22
+ "Please use 'garf.executors' instead.",
23
+ DeprecationWarning,
24
+ stacklevel=2,
25
+ )