garf-executors 1.1.3__py3-none-any.whl → 1.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -22,4 +22,4 @@ __all__ = [
22
22
  'ApiExecutionContext',
23
23
  ]
24
24
 
25
- __version__ = '1.1.3'
25
+ __version__ = '1.2.0'
@@ -32,6 +32,7 @@ from garf.executors import (
32
32
  query_processor,
33
33
  )
34
34
  from garf.executors.telemetry import tracer
35
+ from garf.io.writers import abs_writer
35
36
  from opentelemetry import metrics, trace
36
37
 
37
38
  logger = logging.getLogger(__name__)
@@ -61,6 +62,7 @@ class ApiQueryExecutor(executor.Executor):
61
62
  self,
62
63
  fetcher: report_fetcher.ApiReportFetcher,
63
64
  report_simulator: simulator.ApiReportSimulator | None = None,
65
+ writers: list[abs_writer.AbsWriter] | None = None,
64
66
  ) -> None:
65
67
  """Initializes ApiQueryExecutor.
66
68
 
@@ -70,6 +72,7 @@ class ApiQueryExecutor(executor.Executor):
70
72
  """
71
73
  self.fetcher = fetcher
72
74
  self.simulator = report_simulator
75
+ self.writers = writers
73
76
  super().__init__(
74
77
  preprocessors=self.fetcher.preprocessors,
75
78
  postprocessors=self.fetcher.postprocessors,
@@ -123,8 +126,6 @@ class ApiQueryExecutor(executor.Executor):
123
126
  'api.client.class', self.fetcher.api_client.__class__.__name__
124
127
  )
125
128
  try:
126
- span.set_attribute('query.title', title)
127
- span.set_attribute('query.text', query)
128
129
  logger.debug('starting query %s', query)
129
130
  title = pathlib.Path(title).name.split('.')[0]
130
131
  api_counter.add(
@@ -136,7 +137,7 @@ class ApiQueryExecutor(executor.Executor):
136
137
  title=title,
137
138
  **context.fetcher_parameters,
138
139
  )
139
- writer_clients = context.writer_clients
140
+ writer_clients = self.writers or context.writer_clients
140
141
  if not writer_clients:
141
142
  logger.warning('No writers configured, skipping write operation')
142
143
  return None
@@ -185,13 +186,11 @@ class ApiQueryExecutor(executor.Executor):
185
186
  """
186
187
  context = query_processor.process_gquery(context)
187
188
  span = trace.get_current_span()
188
- span.set_attribute('fetcher.class', self.fetcher.__class__.__name__)
189
+ span.set_attribute('simulator.class', self.simulator.__class__.__name__)
189
190
  span.set_attribute(
190
- 'api.client.class', self.fetcher.api_client.__class__.__name__
191
+ 'api.client.class', self.simulator.api_client.__class__.__name__
191
192
  )
192
193
  try:
193
- span.set_attribute('query.title', title)
194
- span.set_attribute('query.text', query)
195
194
  logger.debug('starting query %s', query)
196
195
  title = pathlib.Path(title).name.split('.')[0]
197
196
  results = self.simulator.simulate(
@@ -200,7 +199,7 @@ class ApiQueryExecutor(executor.Executor):
200
199
  title=title,
201
200
  **context.fetcher_parameters,
202
201
  )
203
- writer_clients = context.writer_clients
202
+ writer_clients = self.writers or context.writer_clients
204
203
  if not writer_clients:
205
204
  logger.warning('No writers configured, skipping write operation')
206
205
  return None
@@ -17,6 +17,7 @@ from __future__ import annotations
17
17
 
18
18
  import contextlib
19
19
  import os
20
+ import warnings
20
21
 
21
22
  try:
22
23
  from google.cloud import bigquery # type: ignore
@@ -31,6 +32,7 @@ import logging
31
32
  from garf.core import query_editor, report
32
33
  from garf.executors import exceptions, execution_context, executor
33
34
  from garf.executors.telemetry import tracer
35
+ from garf.io.writers import abs_writer
34
36
  from google.cloud import exceptions as google_cloud_exceptions
35
37
  from opentelemetry import trace
36
38
 
@@ -41,7 +43,7 @@ class BigQueryExecutorError(exceptions.GarfExecutorError):
41
43
  """Error when BigQueryExecutor fails to run query."""
42
44
 
43
45
 
44
- class BigQueryExecutor(executor.Executor, query_editor.TemplateProcessorMixin):
46
+ class BigQueryExecutor(executor.Executor):
45
47
  """Handles query execution in BigQuery.
46
48
 
47
49
  Attributes:
@@ -52,29 +54,42 @@ class BigQueryExecutor(executor.Executor, query_editor.TemplateProcessorMixin):
52
54
 
53
55
  def __init__(
54
56
  self,
55
- project_id: str | None = os.getenv('GOOGLE_CLOUD_PROJECT'),
57
+ project: str | None = os.getenv('GOOGLE_CLOUD_PROJECT'),
56
58
  location: str | None = None,
59
+ writers: list[abs_writer.AbsWriter] | None = None,
57
60
  **kwargs: str,
58
61
  ) -> None:
59
62
  """Initializes BigQueryExecutor.
60
63
 
61
64
  Args:
62
- project_id: Google Cloud project id.
63
- location: BigQuery dataset location.
65
+ project_id: Google Cloud project id.
66
+ location: BigQuery dataset location.
67
+ writers: Instantiated writers.
64
68
  """
65
- if not project_id:
69
+ if not project and 'project_id' not in kwargs:
66
70
  raise BigQueryExecutorError(
67
- 'project_id is required. Either provide it as project_id parameter '
71
+ 'project is required. Either provide it as project parameter '
68
72
  'or GOOGLE_CLOUD_PROJECT env variable.'
69
73
  )
70
- self.project_id = project_id
74
+ if project_id := kwargs.get('project_id'):
75
+ warnings.warn(
76
+ "'project_id' parameter is deprecated. Please use 'project' instead.",
77
+ DeprecationWarning,
78
+ stacklevel=2,
79
+ )
80
+ self.project = project or project_id
71
81
  self.location = location
82
+ self.writers = writers
83
+ self._client = None
72
84
  super().__init__()
73
85
 
74
86
  @property
75
87
  def client(self) -> bigquery.Client:
76
- """Instantiates bigquery client."""
77
- return bigquery.Client(self.project_id)
88
+ """Instantiated BigQuery client."""
89
+ if not self._client:
90
+ with tracer.start_as_current_span('bq.create_client'):
91
+ self._client = bigquery.Client(self.project)
92
+ return self._client
78
93
 
79
94
  @tracer.start_as_current_span('bq.execute')
80
95
  def execute(
@@ -96,25 +111,23 @@ class BigQueryExecutor(executor.Executor, query_editor.TemplateProcessorMixin):
96
111
  Report with data if query returns some data otherwise empty Report.
97
112
  """
98
113
  span = trace.get_current_span()
114
+ query_spec = (
115
+ query_editor.QuerySpecification(
116
+ text=query, title=title, args=context.query_parameters
117
+ )
118
+ .remove_comments()
119
+ .expand()
120
+ )
121
+ query_text = query_spec.query.text
122
+ title = query_spec.query.title
99
123
  span.set_attribute('query.title', title)
100
124
  span.set_attribute('query.text', query)
101
125
  logger.info('Executing script: %s', title)
102
- query_text = self.replace_params_template(query, context.query_parameters)
126
+ # TODO: move to initialization
103
127
  self.create_datasets(context.query_parameters.macro)
104
- job = self.client.query(query_text)
105
- try:
106
- result = job.result()
107
- except google_cloud_exceptions.GoogleCloudError as e:
108
- raise BigQueryExecutorError(
109
- f'Failed to execute query {title}: Reason: {e}'
110
- ) from e
111
- logger.debug('%s launched successfully', title)
112
- if result.total_rows:
113
- results = report.GarfReport.from_pandas(result.to_dataframe())
114
- else:
115
- results = report.GarfReport()
116
- if context.writer and results:
117
- writer_clients = context.writer_clients
128
+ results = self._query(query_text, title)
129
+ if results and (self.writers or context.writer):
130
+ writer_clients = self.writers or context.writer_clients
118
131
  if not writer_clients:
119
132
  logger.warning('No writers configured, skipping write operation')
120
133
  else:
@@ -151,7 +164,7 @@ class BigQueryExecutor(executor.Executor, query_editor.TemplateProcessorMixin):
151
164
  """
152
165
  if macros and (datasets := extract_datasets(macros)):
153
166
  for dataset in datasets:
154
- dataset_id = f'{self.project_id}.{dataset}'
167
+ dataset_id = f'{self.project}.{dataset}'
155
168
  try:
156
169
  self.client.get_dataset(dataset_id)
157
170
  except google_cloud_exceptions.NotFound:
@@ -161,6 +174,19 @@ class BigQueryExecutor(executor.Executor, query_editor.TemplateProcessorMixin):
161
174
  self.client.create_dataset(bq_dataset, timeout=30)
162
175
  logger.info('Created new dataset %s', dataset_id)
163
176
 
177
+ def _query(self, query_text, title) -> report.GarfReport:
178
+ job = self.client.query(query_text)
179
+ try:
180
+ result = job.result()
181
+ except google_cloud_exceptions.GoogleCloudError as e:
182
+ raise BigQueryExecutorError(
183
+ f'Failed to execute query {title}: Reason: {e}'
184
+ ) from e
185
+ logger.debug('%s launched successfully', title)
186
+ if result.total_rows:
187
+ return report.GarfReport.from_pandas(result.to_dataframe())
188
+ return report.GarfReport()
189
+
164
190
 
165
191
  def extract_datasets(macros: dict | None) -> list[str]:
166
192
  """Finds dataset-related keys based on values in a dict.
@@ -122,6 +122,7 @@ def main():
122
122
  cache_ttl_seconds=args.cache_ttl_seconds,
123
123
  selected_aliases=workflow_include,
124
124
  skipped_aliases=workflow_skip,
125
+ simulate=args.simulate,
125
126
  )
126
127
  meter_provider.shutdown()
127
128
  sys.exit()
@@ -143,6 +144,8 @@ def main():
143
144
  enable_cache=args.enable_cache,
144
145
  cache_ttl_seconds=args.cache_ttl_seconds,
145
146
  simulate=args.simulate,
147
+ writers=context.writer,
148
+ writer_parameters=context.writer_parameters,
146
149
  )
147
150
  batch = {query: reader_client.read(query) for query in args.query}
148
151
  if args.parallel_queries and len(args.query) > 1:
@@ -36,8 +36,8 @@ class ExecutionContext(pydantic.BaseModel):
36
36
  Attributes:
37
37
  query_parameters: Parameters to dynamically change query text.
38
38
  fetcher_parameters: Parameters to specify fetching setup.
39
- writer: Type of writer to use. Can be a single writer string or list of writers.
40
- writer_parameters: Optional parameters to setup writer.
39
+ writer: Type of writer(s) to use.
40
+ writer_parameters: Optional parameters to setup writer(s).
41
41
  """
42
42
 
43
43
  query_parameters: query_editor.GarfQueryParameters | None = pydantic.Field(
@@ -77,41 +77,25 @@ class ExecutionContext(pydantic.BaseModel):
77
77
  @property
78
78
  def writer_client(self) -> abs_writer.AbsWriter:
79
79
  """Returns single writer client."""
80
+ if not self.writer:
81
+ raise writer.GarfIoWriterError('No available writer')
80
82
  if isinstance(self.writer, list) and len(self.writer) > 0:
81
83
  writer_type = self.writer[0]
82
84
  else:
83
85
  writer_type = self.writer
84
-
85
- writer_params = self.writer_parameters or {}
86
-
87
- if not writer_type:
88
- raise ValueError('No writer specified')
89
-
90
- writer_client = writer.create_writer(writer_type, **writer_params)
91
- if writer_type == 'bq':
92
- _ = writer_client.create_or_get_dataset()
93
- if writer_type == 'sheet':
94
- writer_client.init_client()
95
- return writer_client
86
+ writer_clients = writer.setup_writers(
87
+ writers=[writer_type], writer_parameters=self.writer_parameters
88
+ )
89
+ return writer_clients[0]
96
90
 
97
91
  @property
98
92
  def writer_clients(self) -> list[abs_writer.AbsWriter]:
99
93
  """Returns list of writer clients."""
100
94
  if not self.writer:
101
- return []
102
-
103
- # Convert single writer to list for uniform processing
95
+ raise writer.GarfIoWriterError('No available writer')
104
96
  writers_to_use = (
105
97
  self.writer if isinstance(self.writer, list) else [self.writer]
106
98
  )
107
- writer_params = self.writer_parameters or {}
108
-
109
- clients = []
110
- for writer_type in writers_to_use:
111
- writer_client = writer.create_writer(writer_type, **writer_params)
112
- if writer_type == 'bq':
113
- _ = writer_client.create_or_get_dataset()
114
- if writer_type == 'sheet':
115
- writer_client.init_client()
116
- clients.append(writer_client)
117
- return clients
99
+ return writer.setup_writers(
100
+ writers=writers_to_use, writer_parameters=self.writer_parameters
101
+ )
garf/executors/setup.py CHANGED
@@ -16,10 +16,15 @@
16
16
  from __future__ import annotations
17
17
 
18
18
  import importlib
19
+ import logging
20
+ from typing import Any
19
21
 
20
22
  from garf.executors import executor, fetchers
21
23
  from garf.executors.api_executor import ApiQueryExecutor
22
24
  from garf.executors.telemetry import tracer
25
+ from garf.io import writer
26
+
27
+ logger = logging.getLogger('garf.executors.setup')
23
28
 
24
29
 
25
30
  @tracer.start_as_current_span('setup_executor')
@@ -29,16 +34,28 @@ def setup_executor(
29
34
  enable_cache: bool = False,
30
35
  cache_ttl_seconds: int = 3600,
31
36
  simulate: bool = False,
37
+ writers: str | list[str] | None = None,
38
+ writer_parameters: dict[str, Any] | None = None,
32
39
  ) -> type[executor.Executor]:
33
40
  """Initializes executors based on a source and parameters."""
41
+ if simulate and enable_cache:
42
+ logger.warning('Simulating API responses. Disabling cache.')
43
+ enable_cache = False
44
+ if writers:
45
+ writer_clients = writer.setup_writers(writers, writer_parameters)
46
+ else:
47
+ writer_clients = None
34
48
  if source == 'bq':
35
49
  bq_executor = importlib.import_module('garf.executors.bq_executor')
36
- query_executor = bq_executor.BigQueryExecutor(**fetcher_parameters)
50
+ query_executor = bq_executor.BigQueryExecutor(
51
+ **fetcher_parameters, writers=writer_clients
52
+ )
37
53
  elif source == 'sqldb':
38
54
  sql_executor = importlib.import_module('garf.executors.sql_executor')
39
55
  query_executor = (
40
56
  sql_executor.SqlAlchemyQueryExecutor.from_connection_string(
41
- fetcher_parameters.get('connection_string')
57
+ connection_string=fetcher_parameters.get('connection_string'),
58
+ writers=writer_clients,
42
59
  )
43
60
  )
44
61
  else:
@@ -54,5 +71,6 @@ def setup_executor(
54
71
  cache_ttl_seconds=cache_ttl_seconds,
55
72
  ),
56
73
  report_simulator=concrete_simulator,
74
+ writers=writer_clients,
57
75
  )
58
76
  return query_executor
@@ -31,6 +31,7 @@ import pandas as pd
31
31
  from garf.core import query_editor, report
32
32
  from garf.executors import exceptions, execution_context, executor
33
33
  from garf.executors.telemetry import tracer
34
+ from garf.io.writers import abs_writer
34
35
  from opentelemetry import trace
35
36
 
36
37
  logger = logging.getLogger(__name__)
@@ -40,9 +41,7 @@ class SqlAlchemyQueryExecutorError(exceptions.GarfExecutorError):
40
41
  """Error when SqlAlchemyQueryExecutor fails to run query."""
41
42
 
42
43
 
43
- class SqlAlchemyQueryExecutor(
44
- executor.Executor, query_editor.TemplateProcessorMixin
45
- ):
44
+ class SqlAlchemyQueryExecutor(executor.Executor):
46
45
  """Handles query execution via SqlAlchemy.
47
46
 
48
47
  Attributes:
@@ -50,7 +49,10 @@ class SqlAlchemyQueryExecutor(
50
49
  """
51
50
 
52
51
  def __init__(
53
- self, engine: sqlalchemy.engine.base.Engine | None = None, **kwargs: str
52
+ self,
53
+ engine: sqlalchemy.engine.base.Engine | None = None,
54
+ writers: list[abs_writer.AbsWriter] | None = None,
55
+ **kwargs: str,
54
56
  ) -> None:
55
57
  """Initializes executor with a given engine.
56
58
 
@@ -58,18 +60,19 @@ class SqlAlchemyQueryExecutor(
58
60
  engine: Initialized Engine object to operated on a given database.
59
61
  """
60
62
  self.engine = engine or sqlalchemy.create_engine('sqlite://')
63
+ self.writers = writers
61
64
  super().__init__()
62
65
 
63
66
  @classmethod
64
67
  def from_connection_string(
65
- cls, connection_string: str | None
68
+ cls, connection_string: str | None, writers: list[str] | None = None
66
69
  ) -> SqlAlchemyQueryExecutor:
67
70
  """Creates executor from SqlAlchemy connection string.
68
71
 
69
72
  https://docs.sqlalchemy.org/en/20/core/engines.html
70
73
  """
71
74
  engine = sqlalchemy.create_engine(connection_string or 'sqlite://')
72
- return cls(engine)
75
+ return cls(engine=engine, writers=writers)
73
76
 
74
77
  @tracer.start_as_current_span('sql.execute')
75
78
  def execute(
@@ -91,10 +94,18 @@ class SqlAlchemyQueryExecutor(
91
94
  Report with data if query returns some data otherwise empty Report.
92
95
  """
93
96
  span = trace.get_current_span()
97
+ query_spec = (
98
+ query_editor.QuerySpecification(
99
+ text=query, title=title, args=context.query_parameters
100
+ )
101
+ .remove_comments()
102
+ .expand()
103
+ )
104
+ query_text = query_spec.query.text
105
+ title = query_spec.query.title
94
106
  span.set_attribute('query.title', title)
95
- span.set_attribute('query.text', query)
107
+ span.set_attribute('query.text', query_text)
96
108
  logger.info('Executing script: %s', title)
97
- query_text = self.replace_params_template(query, context.query_parameters)
98
109
  with self.engine.begin() as conn:
99
110
  if re.findall(r'(create|update) ', query_text.lower()):
100
111
  try:
@@ -118,8 +129,8 @@ class SqlAlchemyQueryExecutor(
118
129
  ) from e
119
130
  finally:
120
131
  conn.connection.execute(f'DROP TABLE {temp_table_name}')
121
- if context.writer and results:
122
- writer_clients = context.writer_clients
132
+ if results and (self.writers or context.writer):
133
+ writer_clients = self.writers or context.writer_clients
123
134
  if not writer_clients:
124
135
  logger.warning('No writers configured, skipping write operation')
125
136
  else:
@@ -116,6 +116,8 @@ class Workflow(pydantic.BaseModel):
116
116
  custom_parameters['query_parameters']['template'] = custom_templates
117
117
  if custom_fetcher_parameters := context.fetcher_parameters:
118
118
  custom_parameters['fetcher_parameters'] = custom_fetcher_parameters
119
+ if custom_writer_parameters := context.writer_parameters:
120
+ custom_parameters['writer_parameters'] = custom_writer_parameters
119
121
 
120
122
  if custom_parameters:
121
123
  steps = self.steps
@@ -143,7 +145,7 @@ class Workflow(pydantic.BaseModel):
143
145
  """Saves workflow to local or remote yaml file."""
144
146
  with smart_open.open(path, 'w', encoding='utf-8') as f:
145
147
  yaml.dump(
146
- self.model_dump(exclude_none=True).get('steps'), f, encoding='utf-8'
148
+ self.model_dump(exclude_none=True), f, encoding='utf-8', sort_keys=False
147
149
  )
148
150
  return f'Workflow is saved to {str(path)}'
149
151
 
@@ -73,6 +73,7 @@ class WorkflowRunner:
73
73
  cache_ttl_seconds: int = 3600,
74
74
  selected_aliases: list[str] | None = None,
75
75
  skipped_aliases: list[str] | None = None,
76
+ simulate: bool = False,
76
77
  ) -> list[str]:
77
78
  skipped_aliases = skipped_aliases or []
78
79
  selected_aliases = selected_aliases or []
@@ -108,6 +109,9 @@ class WorkflowRunner:
108
109
  fetcher_parameters=step.fetcher_parameters,
109
110
  enable_cache=enable_cache,
110
111
  cache_ttl_seconds=cache_ttl_seconds,
112
+ simulate=simulate,
113
+ writers=step.writer,
114
+ writer_parameters=step.writer_parameters,
111
115
  )
112
116
  batch = {}
113
117
  if not (queries := step.queries):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: garf-executors
3
- Version: 1.1.3
3
+ Version: 1.2.0
4
4
  Summary: Executes queries against API and writes data to local/remote storage.
5
5
  Author-email: "Google Inc. (gTech gPS CSE team)" <no-reply@google.com>, Andrei Markin <andrey.markin.ppc@gmail.com>
6
6
  License: Apache 2.0
@@ -1,27 +1,27 @@
1
- garf/executors/__init__.py,sha256=BRk66duc0hxzgiE35Qx0dsvNX0_bliEv-vNJb8EiNg8,824
2
- garf/executors/api_executor.py,sha256=yZE6buZ7f4K31FLII3kurwpVGhQeK7OP8opiEHK9ZPs,7145
3
- garf/executors/bq_executor.py,sha256=pM5MKVVpWtCl-Av3xfFRcAfQhCouFoYvhViWyGWY_V4,5863
1
+ garf/executors/__init__.py,sha256=i7udlIWHIzMtLJmErjJJGiuauAGlApUlyB2sdX8gR4I,824
2
+ garf/executors/api_executor.py,sha256=96slSspngBQ2_R15gLxaUd565EqCg7syyC2yf2oK6ic,7118
3
+ garf/executors/bq_executor.py,sha256=-LlDFNF-OvPy-9_QzY1d58emOs5eehsgIDPYUkm-H1A,6696
4
4
  garf/executors/config.py,sha256=w5g9EYabPtK-6CPl3G87owUvBiqi0_r_aeAwISpK-zw,1720
5
5
  garf/executors/exceptions.py,sha256=U_7Q2ZMOUf89gzZd2pw7y3g7i1NeByPPKfpZ3q7p3ZU,662
6
- garf/executors/execution_context.py,sha256=us_S-x2jsJOBo4Vm78DhCgcgnu_HPc-f9-q_XI_eowU,3840
6
+ garf/executors/execution_context.py,sha256=_T9sEkkImTjtdwJcnxbCEZXA6cpXh5GVs9CCGcGdGcw,3380
7
7
  garf/executors/executor.py,sha256=SpPONsYHO49WbZ2HRjTKujrNH64BbrSfKcZSn4Dcd6o,3830
8
8
  garf/executors/fetchers.py,sha256=JemMM4FU4-Cpp2SxmMBtLgHgGy2gDsQSuuAozTh7Yjw,4477
9
9
  garf/executors/garf_pb2.py,sha256=OIKC7NGErbUckhB4pQ6HycB9My5X3FvaImARnvhPExM,3450
10
10
  garf/executors/garf_pb2_grpc.py,sha256=repGTh-ZDnNyAxMcJxAf0cLfr_JjX2AzZkY6PfZy0xM,4957
11
11
  garf/executors/query_processor.py,sha256=IM5qXBNYJSUlsY2djYx8WDeX367cMhI1ZrITT22TcvI,2932
12
- garf/executors/setup.py,sha256=S8eLYrW6XvITuwS88JeTq9XTE1MhMTX5UymHGDs0Ybw,2037
13
- garf/executors/sql_executor.py,sha256=cneniSHQJ0Mx5Xbp9ykspb2__D84Uk15o7CWhrLBG-o,4869
12
+ garf/executors/setup.py,sha256=txQtkpMNQ5WlYF2MzJdvPop07F1IqNN_X97mzedghDc,2604
13
+ garf/executors/sql_executor.py,sha256=TzDzVxpNjyniu9ZHubcLUfYg0igGc1cOUXH9ETt1WL8,5226
14
14
  garf/executors/telemetry.py,sha256=wLWAdJZmGinffIMv5FZNKaAUusgACTvokwhMFz2UCQ0,747
15
15
  garf/executors/entrypoints/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
- garf/executors/entrypoints/cli.py,sha256=U8vd7Rak5Y5llfLZLtZ55LO6mduMw7xvIAVe5I43-SI,6016
16
+ garf/executors/entrypoints/cli.py,sha256=x7RAQngeb3rr3rIhJ3d01q38G6DhTQYgt6p3nSPegxg,6123
17
17
  garf/executors/entrypoints/grpc_server.py,sha256=O7hinPjlVurSJjzudDFvVjkxX7xJHRcwFk4K4aJK0uo,2783
18
18
  garf/executors/entrypoints/server.py,sha256=rLnOgZqeWwZ9UsJZ8LXDvZ101JrUIwMTpDzC5t-6wsY,4988
19
19
  garf/executors/entrypoints/tracer.py,sha256=Oug3tePD8Yg5x6r1KNDx8RL_yAML0egy1DEFDobW8Uk,2792
20
20
  garf/executors/entrypoints/utils.py,sha256=5XiGR2IOxdzAOY0lEWUeUV7tIpKBGRnQaIwBYvzQB7c,4337
21
21
  garf/executors/workflows/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
22
  garf/executors/workflows/gcp_workflow.yaml,sha256=KKfyFaBihFiLrsNhZct0cccxMV8CnVJUsxBsXQ8VP-g,1743
23
- garf/executors/workflows/workflow.py,sha256=mr5YktxH6oIMasnowMLbEm5RTUIsTqTOIpHNuKm0UGQ,4810
24
- garf/executors/workflows/workflow_runner.py,sha256=bFdFh2gdenKEPWyAqd_znaMwr5w0efQd3qEnxEvjD-k,5789
23
+ garf/executors/workflows/workflow.py,sha256=xqPFHq6xJSWhpbw47U1dUx9AQpYJyMGqA6pRG-4CFQo,4952
24
+ garf/executors/workflows/workflow_runner.py,sha256=A3NxfBq9TgXs9CCaK8MzJBHVLTCDtF1sSuy_BLWbQco,5929
25
25
  garf_executors/__init__.py,sha256=5Ol67ktUcC0q5d5pGblYfdlAsraJC-Gcr2U0uCN6rSs,772
26
26
  garf_executors/api_executor.py,sha256=lmrPn6aheryM2jLRL2enU8GuKVUod3kEVkMYgjXn5EM,785
27
27
  garf_executors/bq_executor.py,sha256=wQ8pd4d6dMByHtYl_i-FbebPKO5WRcvC_y9asG8H3Zk,784
@@ -39,8 +39,8 @@ garf_executors/entrypoints/grcp_server.py,sha256=HiYsfk31OgkPA4jcED3htJGidkRZH5N
39
39
  garf_executors/entrypoints/server.py,sha256=JZCklhqx74PIhc4GIoOr2nNZz9n7QCfIm_vGyd3Q3dQ,815
40
40
  garf_executors/entrypoints/tracer.py,sha256=JlDSgeDP0Q5Lk_pZLASDXPgZCPaKkWqZgaWOQ7JB-Bs,815
41
41
  garf_executors/entrypoints/utils.py,sha256=iF-LBfKjrAhEW6HShh69RCPWVPC4Rf8iV5JEYSMhsx0,814
42
- garf_executors-1.1.3.dist-info/METADATA,sha256=oDascP95RXOmlqUu5HuZ4fBzakYgeekmEBlI7nmH9CA,3605
43
- garf_executors-1.1.3.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
44
- garf_executors-1.1.3.dist-info/entry_points.txt,sha256=0IBZun3_hC4HYU-1krlbjTArZym3phu4jxYXs809ilw,61
45
- garf_executors-1.1.3.dist-info/top_level.txt,sha256=UaHdWdgQhbiHyRzpYC-vW3Q7pdgbxXvTTBvDA655Jq4,20
46
- garf_executors-1.1.3.dist-info/RECORD,,
42
+ garf_executors-1.2.0.dist-info/METADATA,sha256=s8o4jKBZBT6uAxL6NMoU5pgYGv2JgEHx2f7LVG_jDj0,3605
43
+ garf_executors-1.2.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
44
+ garf_executors-1.2.0.dist-info/entry_points.txt,sha256=0IBZun3_hC4HYU-1krlbjTArZym3phu4jxYXs809ilw,61
45
+ garf_executors-1.2.0.dist-info/top_level.txt,sha256=UaHdWdgQhbiHyRzpYC-vW3Q7pdgbxXvTTBvDA655Jq4,20
46
+ garf_executors-1.2.0.dist-info/RECORD,,