apache-airflow-providers-google 10.13.0rc1__py3-none-any.whl → 10.13.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -259,3 +259,121 @@ class DataplexDataQualityJobStatusSensor(BaseSensorOperator):
259
259
  raise AirflowSkipException(message)
260
260
  raise AirflowDataQualityScanException(message)
261
261
  return job_status == DataScanJob.State.SUCCEEDED
262
+
263
+
264
+ class DataplexDataProfileJobStatusSensor(BaseSensorOperator):
265
+ """
266
+ Check the status of the Dataplex DataProfile job.
267
+
268
+ :param project_id: Required. The ID of the Google Cloud project that the task belongs to.
269
+ :param region: Required. The ID of the Google Cloud region that the task belongs to.
270
+ :param data_scan_id: Required. Data Quality scan identifier.
271
+ :param job_id: Required. Job ID.
272
+ :param api_version: The version of the api that will be requested for example 'v3'.
273
+ :param retry: A retry object used to retry requests. If `None` is specified, requests
274
+ will not be retried.
275
+ :param metadata: Additional metadata that is provided to the method.
276
+ :param gcp_conn_id: The connection ID to use when fetching connection info.
277
+ :param impersonation_chain: Optional service account to impersonate using short-term
278
+ credentials, or chained list of accounts required to get the access_token
279
+ of the last account in the list, which will be impersonated in the request.
280
+ If set as a string, the account must grant the originating account
281
+ the Service Account Token Creator IAM role.
282
+ If set as a sequence, the identities from the list must grant
283
+ Service Account Token Creator IAM role to the directly preceding identity, with first
284
+ account from the list granting this role to the originating account (templated).
285
+ :param result_timeout: Value in seconds for which operator will wait for the Data Quality scan result.
286
+ Throws exception if there is no result found after specified amount of seconds.
287
+
288
+ :return: Boolean indicating if the job run has reached the ``DataScanJob.State.SUCCEEDED``.
289
+ """
290
+
291
+ template_fields = ["job_id"]
292
+
293
+ def __init__(
294
+ self,
295
+ project_id: str,
296
+ region: str,
297
+ data_scan_id: str,
298
+ job_id: str,
299
+ api_version: str = "v1",
300
+ retry: Retry | _MethodDefault = DEFAULT,
301
+ metadata: Sequence[tuple[str, str]] = (),
302
+ gcp_conn_id: str = "google_cloud_default",
303
+ impersonation_chain: str | Sequence[str] | None = None,
304
+ result_timeout: float = 60.0 * 10,
305
+ start_sensor_time: float | None = None,
306
+ *args,
307
+ **kwargs,
308
+ ) -> None:
309
+ super().__init__(*args, **kwargs)
310
+ self.project_id = project_id
311
+ self.region = region
312
+ self.data_scan_id = data_scan_id
313
+ self.job_id = job_id
314
+ self.api_version = api_version
315
+ self.retry = retry
316
+ self.metadata = metadata
317
+ self.gcp_conn_id = gcp_conn_id
318
+ self.impersonation_chain = impersonation_chain
319
+ self.result_timeout = result_timeout
320
+ self.start_sensor_time = start_sensor_time
321
+
322
+ def _duration(self):
323
+ if not self.start_sensor_time:
324
+ self.start_sensor_time = time.monotonic()
325
+ return time.monotonic() - self.start_sensor_time
326
+
327
+ def poke(self, context: Context) -> bool:
328
+ self.log.info("Waiting for job %s to be %s", self.job_id, DataScanJob.State.SUCCEEDED)
329
+ if self.result_timeout:
330
+ duration = self._duration()
331
+ if duration > self.result_timeout:
332
+ # TODO: remove this if check when min_airflow_version is set to higher than 2.7.1
333
+ message = (
334
+ f"Timeout: Data Profile scan {self.job_id} is not ready after {self.result_timeout}s"
335
+ )
336
+ if self.soft_fail:
337
+ raise AirflowSkipException(message)
338
+ raise AirflowDataQualityScanResultTimeoutException(message)
339
+
340
+ hook = DataplexHook(
341
+ gcp_conn_id=self.gcp_conn_id,
342
+ api_version=self.api_version,
343
+ impersonation_chain=self.impersonation_chain,
344
+ )
345
+
346
+ try:
347
+ job = hook.get_data_scan_job(
348
+ project_id=self.project_id,
349
+ region=self.region,
350
+ data_scan_id=self.data_scan_id,
351
+ job_id=self.job_id,
352
+ timeout=self.timeout,
353
+ retry=self.retry,
354
+ metadata=self.metadata,
355
+ )
356
+ except GoogleAPICallError as e:
357
+ # TODO: remove this if check when min_airflow_version is set to higher than 2.7.1
358
+ message = f"Error occurred when trying to retrieve Data Profile scan job: {self.data_scan_id}"
359
+ if self.soft_fail:
360
+ raise AirflowSkipException(message, e)
361
+ raise AirflowException(message, e)
362
+
363
+ job_status = job.state
364
+ self.log.info(
365
+ "Current status of the Dataplex Data Profile scan job %s => %s", self.job_id, job_status
366
+ )
367
+ if job_status == DataScanJob.State.FAILED:
368
+ # TODO: remove this if check when min_airflow_version is set to higher than 2.7.1
369
+ message = f"Data Profile scan job failed: {self.job_id}"
370
+ if self.soft_fail:
371
+ raise AirflowSkipException(message)
372
+ raise AirflowException(message)
373
+ if job_status == DataScanJob.State.CANCELLED:
374
+ # TODO: remove this if check when min_airflow_version is set to higher than 2.7.1
375
+ message = f"Data Profile scan job cancelled: {self.job_id}"
376
+ if self.soft_fail:
377
+ raise AirflowSkipException(message)
378
+ raise AirflowException(message)
379
+ return job_status == DataScanJob.State.SUCCEEDED
@@ -107,3 +107,85 @@ class DataplexDataQualityJobTrigger(BaseTrigger):
107
107
  def _convert_to_dict(self, job: DataScanJob) -> dict:
108
108
  """Returns a representation of a DataScanJob instance as a dict."""
109
109
  return DataScanJob.to_dict(job)
110
+
111
+
112
+ class DataplexDataProfileJobTrigger(BaseTrigger):
113
+ """
114
+ DataplexDataProfileJobTrigger runs on the trigger worker and waits for the job to be `SUCCEEDED` state.
115
+
116
+ :param job_id: Optional. The ID of a Dataplex job.
117
+ :param data_scan_id: Required. DataScan identifier.
118
+ :param project_id: Google Cloud Project where the job is running.
119
+ :param region: The ID of the Google Cloud region that the job belongs to.
120
+ :param gcp_conn_id: Optional, the connection ID used to connect to Google Cloud Platform.
121
+ :param impersonation_chain: Optional service account to impersonate using short-term
122
+ credentials, or chained list of accounts required to get the access_token
123
+ of the last account in the list, which will be impersonated in the request.
124
+ If set as a string, the account must grant the originating account
125
+ the Service Account Token Creator IAM role.
126
+ If set as a sequence, the identities from the list must grant
127
+ Service Account Token Creator IAM role to the directly preceding identity, with first
128
+ account from the list granting this role to the originating account (templated).
129
+ :param polling_interval_seconds: polling period in seconds to check for the status.
130
+ """
131
+
132
+ def __init__(
133
+ self,
134
+ job_id: str | None,
135
+ data_scan_id: str,
136
+ project_id: str | None,
137
+ region: str,
138
+ gcp_conn_id: str = "google_cloud_default",
139
+ polling_interval_seconds: int = 10,
140
+ impersonation_chain: str | Sequence[str] | None = None,
141
+ **kwargs,
142
+ ):
143
+ super().__init__(**kwargs)
144
+ self.job_id = job_id
145
+ self.data_scan_id = data_scan_id
146
+ self.project_id = project_id
147
+ self.region = region
148
+ self.gcp_conn_id = gcp_conn_id
149
+ self.polling_interval_seconds = polling_interval_seconds
150
+ self.impersonation_chain = impersonation_chain
151
+
152
+ def serialize(self):
153
+ return (
154
+ "airflow.providers.google.cloud.triggers.dataplex.DataplexDataProfileJobTrigger",
155
+ {
156
+ "job_id": self.job_id,
157
+ "data_scan_id": self.data_scan_id,
158
+ "project_id": self.project_id,
159
+ "region": self.region,
160
+ "gcp_conn_id": self.gcp_conn_id,
161
+ "impersonation_chain": self.impersonation_chain,
162
+ "polling_interval_seconds": self.polling_interval_seconds,
163
+ },
164
+ )
165
+
166
+ async def run(self) -> AsyncIterator[TriggerEvent]:
167
+ hook = DataplexAsyncHook(
168
+ gcp_conn_id=self.gcp_conn_id,
169
+ impersonation_chain=self.impersonation_chain,
170
+ )
171
+ while True:
172
+ job = await hook.get_data_scan_job(
173
+ project_id=self.project_id,
174
+ region=self.region,
175
+ job_id=self.job_id,
176
+ data_scan_id=self.data_scan_id,
177
+ )
178
+ state = job.state
179
+ if state in (DataScanJob.State.FAILED, DataScanJob.State.SUCCEEDED, DataScanJob.State.CANCELLED):
180
+ break
181
+ self.log.info(
182
+ "Current state is: %s, sleeping for %s seconds.",
183
+ DataScanJob.State(state).name,
184
+ self.polling_interval_seconds,
185
+ )
186
+ await asyncio.sleep(self.polling_interval_seconds)
187
+ yield TriggerEvent({"job_id": self.job_id, "job_state": state, "job": self._convert_to_dict(job)})
188
+
189
+ def _convert_to_dict(self, job: DataScanJob) -> dict:
190
+ """Returns a representation of a DataScanJob instance as a dict."""
191
+ return DataScanJob.to_dict(job)
@@ -188,8 +188,8 @@ class GoogleBaseHook(BaseHook):
188
188
  conn_type = "google_cloud_platform"
189
189
  hook_name = "Google Cloud"
190
190
 
191
- @staticmethod
192
- def get_connection_form_widgets() -> dict[str, Any]:
191
+ @classmethod
192
+ def get_connection_form_widgets(cls) -> dict[str, Any]:
193
193
  """Returns connection widgets to add to connection form."""
194
194
  from flask_appbuilder.fieldwidgets import BS3PasswordFieldWidget, BS3TextFieldWidget
195
195
  from flask_babel import lazy_gettext
@@ -221,8 +221,8 @@ class GoogleBaseHook(BaseHook):
221
221
  ),
222
222
  }
223
223
 
224
- @staticmethod
225
- def get_ui_field_behaviour() -> dict[str, Any]:
224
+ @classmethod
225
+ def get_ui_field_behaviour(cls) -> dict[str, Any]:
226
226
  """Returns custom field behaviour."""
227
227
  return {
228
228
  "hidden_fields": ["host", "schema", "login", "password", "port", "extra"],
@@ -94,17 +94,17 @@ def get_provider_info():
94
94
  "google-auth>=1.0.0",
95
95
  "google-auth-httplib2>=0.0.1",
96
96
  "google-cloud-aiplatform>=1.22.1",
97
- "google-cloud-automl>=2.11.0",
98
- "google-cloud-bigquery-datatransfer>=3.11.0",
97
+ "google-cloud-automl>=2.12.0",
98
+ "google-cloud-bigquery-datatransfer>=3.13.0",
99
99
  "google-cloud-bigtable>=2.17.0",
100
- "google-cloud-build>=3.13.0",
100
+ "google-cloud-build>=3.22.0",
101
101
  "google-cloud-compute>=1.10.0",
102
102
  "google-cloud-container>=2.17.4",
103
103
  "google-cloud-datacatalog>=3.11.1",
104
- "google-cloud-dataflow-client>=0.8.2",
104
+ "google-cloud-dataflow-client>=0.8.6",
105
105
  "google-cloud-dataform>=0.5.0",
106
- "google-cloud-dataplex>=1.4.2",
107
- "google-cloud-dataproc>=5.5.0",
106
+ "google-cloud-dataplex>=1.10.0",
107
+ "google-cloud-dataproc>=5.8.0",
108
108
  "google-cloud-dataproc-metastore>=1.12.0",
109
109
  "google-cloud-dlp>=3.12.0",
110
110
  "google-cloud-kms>=2.15.0",
@@ -112,7 +112,7 @@ def get_provider_info():
112
112
  "google-cloud-logging>=3.5.0",
113
113
  "google-cloud-memcache>=1.7.0",
114
114
  "google-cloud-monitoring>=2.14.1",
115
- "google-cloud-orchestration-airflow>=1.7.0",
115
+ "google-cloud-orchestration-airflow>=1.10.0",
116
116
  "google-cloud-os-login>=2.9.1",
117
117
  "google-cloud-pubsub>=2.15.0",
118
118
  "google-cloud-redis>=2.12.0",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-google
3
- Version: 10.13.0rc1
3
+ Version: 10.13.0rc2
4
4
  Summary: Provider package apache-airflow-providers-google for Apache Airflow
5
5
  Keywords: airflow-provider,google,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -34,26 +34,26 @@ Requires-Dist: google-api-python-client>=1.6.0
34
34
  Requires-Dist: google-auth-httplib2>=0.0.1
35
35
  Requires-Dist: google-auth>=1.0.0
36
36
  Requires-Dist: google-cloud-aiplatform>=1.22.1
37
- Requires-Dist: google-cloud-automl>=2.11.0
37
+ Requires-Dist: google-cloud-automl>=2.12.0
38
38
  Requires-Dist: google-cloud-batch>=0.13.0
39
- Requires-Dist: google-cloud-bigquery-datatransfer>=3.11.0
39
+ Requires-Dist: google-cloud-bigquery-datatransfer>=3.13.0
40
40
  Requires-Dist: google-cloud-bigtable>=2.17.0
41
- Requires-Dist: google-cloud-build>=3.13.0
41
+ Requires-Dist: google-cloud-build>=3.22.0
42
42
  Requires-Dist: google-cloud-compute>=1.10.0
43
43
  Requires-Dist: google-cloud-container>=2.17.4
44
44
  Requires-Dist: google-cloud-datacatalog>=3.11.1
45
- Requires-Dist: google-cloud-dataflow-client>=0.8.2
45
+ Requires-Dist: google-cloud-dataflow-client>=0.8.6
46
46
  Requires-Dist: google-cloud-dataform>=0.5.0
47
- Requires-Dist: google-cloud-dataplex>=1.4.2
47
+ Requires-Dist: google-cloud-dataplex>=1.10.0
48
48
  Requires-Dist: google-cloud-dataproc-metastore>=1.12.0
49
- Requires-Dist: google-cloud-dataproc>=5.5.0
49
+ Requires-Dist: google-cloud-dataproc>=5.8.0
50
50
  Requires-Dist: google-cloud-dlp>=3.12.0
51
51
  Requires-Dist: google-cloud-kms>=2.15.0
52
52
  Requires-Dist: google-cloud-language>=2.9.0
53
53
  Requires-Dist: google-cloud-logging>=3.5.0
54
54
  Requires-Dist: google-cloud-memcache>=1.7.0
55
55
  Requires-Dist: google-cloud-monitoring>=2.14.1
56
- Requires-Dist: google-cloud-orchestration-airflow>=1.7.0
56
+ Requires-Dist: google-cloud-orchestration-airflow>=1.10.0
57
57
  Requires-Dist: google-cloud-os-login>=2.9.1
58
58
  Requires-Dist: google-cloud-pubsub>=2.15.0
59
59
  Requires-Dist: google-cloud-redis>=2.12.0
@@ -167,7 +167,7 @@ Provides-Extra: trino
167
167
 
168
168
  Package ``apache-airflow-providers-google``
169
169
 
170
- Release: ``10.13.0.rc1``
170
+ Release: ``10.13.0.rc2``
171
171
 
172
172
 
173
173
  Google services including:
@@ -217,17 +217,17 @@ PIP package Version required
217
217
  ``google-auth`` ``>=1.0.0``
218
218
  ``google-auth-httplib2`` ``>=0.0.1``
219
219
  ``google-cloud-aiplatform`` ``>=1.22.1``
220
- ``google-cloud-automl`` ``>=2.11.0``
221
- ``google-cloud-bigquery-datatransfer`` ``>=3.11.0``
220
+ ``google-cloud-automl`` ``>=2.12.0``
221
+ ``google-cloud-bigquery-datatransfer`` ``>=3.13.0``
222
222
  ``google-cloud-bigtable`` ``>=2.17.0``
223
- ``google-cloud-build`` ``>=3.13.0``
223
+ ``google-cloud-build`` ``>=3.22.0``
224
224
  ``google-cloud-compute`` ``>=1.10.0``
225
225
  ``google-cloud-container`` ``>=2.17.4``
226
226
  ``google-cloud-datacatalog`` ``>=3.11.1``
227
- ``google-cloud-dataflow-client`` ``>=0.8.2``
227
+ ``google-cloud-dataflow-client`` ``>=0.8.6``
228
228
  ``google-cloud-dataform`` ``>=0.5.0``
229
- ``google-cloud-dataplex`` ``>=1.4.2``
230
- ``google-cloud-dataproc`` ``>=5.5.0``
229
+ ``google-cloud-dataplex`` ``>=1.10.0``
230
+ ``google-cloud-dataproc`` ``>=5.8.0``
231
231
  ``google-cloud-dataproc-metastore`` ``>=1.12.0``
232
232
  ``google-cloud-dlp`` ``>=3.12.0``
233
233
  ``google-cloud-kms`` ``>=2.15.0``
@@ -235,7 +235,7 @@ PIP package Version required
235
235
  ``google-cloud-logging`` ``>=3.5.0``
236
236
  ``google-cloud-memcache`` ``>=1.7.0``
237
237
  ``google-cloud-monitoring`` ``>=2.14.1``
238
- ``google-cloud-orchestration-airflow`` ``>=1.7.0``
238
+ ``google-cloud-orchestration-airflow`` ``>=1.10.0``
239
239
  ``google-cloud-os-login`` ``>=2.9.1``
240
240
  ``google-cloud-pubsub`` ``>=2.15.0``
241
241
  ``google-cloud-redis`` ``>=2.12.0``
@@ -1,6 +1,6 @@
1
1
  airflow/providers/google/LICENSE,sha256=ywUBpKZc7Jb96rVt5I3IDbg7dIJAbUSHkuoDcF3jbH4,13569
2
2
  airflow/providers/google/__init__.py,sha256=zt7cqQtUdmjHKOKx5CUkUxWSJCPQPApG_t8_w5zEa48,1583
3
- airflow/providers/google/get_provider_info.py,sha256=WFUiZleU6OmR5Azc7L-IF8eTYqnSwUrrP3mcezlz-JI,79157
3
+ airflow/providers/google/get_provider_info.py,sha256=-bLHdeQOdjeISfPfR0OKzUam4UzST1mI0G0Rq9F64hc,79159
4
4
  airflow/providers/google/go_module_utils.py,sha256=cPXfLr6Crk5FHGakGSWhR52HoqG1hFbfOi64pgm5MC4,1769
5
5
  airflow/providers/google/ads/.gitignore,sha256=z_qaKzblF2LuVvP-06iDord9JBeyzIlNeJ4bx3LbtGc,167
6
6
  airflow/providers/google/ads/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -21,29 +21,29 @@ airflow/providers/google/cloud/example_dags/example_looker.py,sha256=xwUk7qiLIc9
21
21
  airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py,sha256=S8FIJtTENPSMJgkA01Kh5q8iZXHxrqtYP3pfaTT2wYc,7363
22
22
  airflow/providers/google/cloud/example_dags/example_salesforce_to_gcs.py,sha256=XquEuWAu3fhr030W3AZAkD0fMeN4ZB77QcQCip8GXS4,4892
23
23
  airflow/providers/google/cloud/fs/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
24
- airflow/providers/google/cloud/fs/gcs.py,sha256=hbtBKT0yzoHu04eILnveReJvtCkZ4n82fH0cZdZDnHE,2333
24
+ airflow/providers/google/cloud/fs/gcs.py,sha256=5mjLIaIVdbi828cuZiz_fgyWgvYZGDohI29kCVPsO7E,2479
25
25
  airflow/providers/google/cloud/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
26
26
  airflow/providers/google/cloud/hooks/automl.py,sha256=CDME_sTFOSUdMSBkK1vquDHQIlM3r1i3l-1rAJG0q-M,27567
27
27
  airflow/providers/google/cloud/hooks/bigquery.py,sha256=3kLj-nPNb6AI-N-ABWOXBC1DN16IKDIC_baHje94mgM,147753
28
- airflow/providers/google/cloud/hooks/bigquery_dts.py,sha256=rZIrK58s_W4-TLIR_5x0EYDwRZIAjyqCeWfKJ-WkGVQ,15173
28
+ airflow/providers/google/cloud/hooks/bigquery_dts.py,sha256=1qJfCYjLNcZ9RCrIUAtLBpYnYlVPzh3mJz52kV6zcn0,15233
29
29
  airflow/providers/google/cloud/hooks/bigtable.py,sha256=CCzj-prJ94ZBOH9gRxmxpfpSEKeXFIF0i_VadZryTV0,12589
30
30
  airflow/providers/google/cloud/hooks/cloud_batch.py,sha256=e2-PUQJEWdy2LOpXExKXH_z_wGWsvvit8tXyJry1-R4,7810
31
- airflow/providers/google/cloud/hooks/cloud_build.py,sha256=YX9NWvsBzMcAOrt7ghreZ_eBSVemO8Q7qPd13GzV7Z4,28315
32
- airflow/providers/google/cloud/hooks/cloud_composer.py,sha256=zyOPjmT3voqDTVBpo8zQqks8G-v0tPt0t1onIT1oobo,18383
31
+ airflow/providers/google/cloud/hooks/cloud_build.py,sha256=k_C6DSgXrB2jTv99y9XkUfb1d9ffta3G04_YmypcRLk,28375
32
+ airflow/providers/google/cloud/hooks/cloud_composer.py,sha256=8lGjnoBuPJBWgZsmdItlhM46EeNeeds6Rzm54o98JhI,18453
33
33
  airflow/providers/google/cloud/hooks/cloud_memorystore.py,sha256=edDjQa69HDknbJT3UCmmsqkYQCI3emXPejlG9wPftQM,40451
34
34
  airflow/providers/google/cloud/hooks/cloud_run.py,sha256=LDl0z-mVMHhkKk8wO-2z6ZRnyaLpE_Zh2J4kuNI9f_g,7333
35
35
  airflow/providers/google/cloud/hooks/cloud_sql.py,sha256=CuvN9Pg5jYBUI8wWVtUg6izMWrYCAnOwK3-MrZ7KTbc,45976
36
36
  airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py,sha256=8I9RDR3tl8I7neSkVKQkkatJ-O1sF1G3UFzQ88YVmlc,21332
37
37
  airflow/providers/google/cloud/hooks/compute.py,sha256=vbI892tXq-56XVn8zMaG8wO7W76dthCamm4hqIgzsYs,40688
38
- airflow/providers/google/cloud/hooks/compute_ssh.py,sha256=iMxn2DmQjDVlI1vCAnQoHNZ3BDOK0ZipZCS8VcWa9Eg,15578
38
+ airflow/providers/google/cloud/hooks/compute_ssh.py,sha256=WJjWcZ0HbRvxwqKrQFH7egIbbLsVlR7exjWYqwi5RqY,15594
39
39
  airflow/providers/google/cloud/hooks/datacatalog.py,sha256=J1iKUVM9JK2AE4mYvQrGXMewMYvD4aBYL1RpqyNb4ZA,54315
40
- airflow/providers/google/cloud/hooks/dataflow.py,sha256=3kOKbwdC9miOmrJx64Sk7QoHEkq8am2QL1fTlC-9-jI,51013
40
+ airflow/providers/google/cloud/hooks/dataflow.py,sha256=9AloOtTDSFTCb2jWDi7GDbZFMv7OIzK1xLEkNLdLAJk,51014
41
41
  airflow/providers/google/cloud/hooks/dataform.py,sha256=jmDFP0EVDpywHW6I3PV7VMXJ3-0LbsHnuAa5akOwP3w,25399
42
42
  airflow/providers/google/cloud/hooks/datafusion.py,sha256=h55UxH6bcfwpN9KknsYiR0ilXarGQzVl998Q48XFT6o,26187
43
43
  airflow/providers/google/cloud/hooks/datapipeline.py,sha256=ocsJFoW6LGvisRDHW3mRrkfh1S8tsgZwMiU8AVwpHDM,4363
44
- airflow/providers/google/cloud/hooks/dataplex.py,sha256=mS3mdd3fHXQdQD689LWApw17ZxI_W_knM9ac0XPE4gA,38261
44
+ airflow/providers/google/cloud/hooks/dataplex.py,sha256=PJJyN0gc_h3NA-C_Pfzl-iNbi9EbFLnRyNdP6UMgAm0,38321
45
45
  airflow/providers/google/cloud/hooks/dataprep.py,sha256=tGLfhtTDnGKKhke99LQv2moUllrP0Sv5UKJS4huUByw,12194
46
- airflow/providers/google/cloud/hooks/dataproc.py,sha256=dCLDqK4CzPKmBbnfE5ji5LaGJS38La8oZlhSNZvmedc,77416
46
+ airflow/providers/google/cloud/hooks/dataproc.py,sha256=VgJ-CJ9ZkBsktlf7UueoVbHJ5NFKNrR1kVDQ20dzWwk,77561
47
47
  airflow/providers/google/cloud/hooks/dataproc_metastore.py,sha256=bj_bfoHwugvpdTUzR574YjDNnnMRhpeuM_MAdYb8pKw,32164
48
48
  airflow/providers/google/cloud/hooks/datastore.py,sha256=IZ_-MPuaDEtBcTxAPgw1BUKxgAlNinaHtWRp0H1C5_Y,12161
49
49
  airflow/providers/google/cloud/hooks/dlp.py,sha256=-TlI7TXxNEtLzWsC9luMXuaMn6VSTlzzjJPgA7UnlqY,67512
@@ -120,7 +120,7 @@ airflow/providers/google/cloud/operators/cloud_batch.py,sha256=TAfXtWgGC4M7Zzx7r
120
120
  airflow/providers/google/cloud/operators/cloud_build.py,sha256=OjfhJt2hJ7asVAXyypWQIAZ10qBLUQeFZRqPKdXWA4I,48324
121
121
  airflow/providers/google/cloud/operators/cloud_composer.py,sha256=pESBOSOpmXhWSYTAAR1CPRguF5zCA9onV4E6q-HsKAI,26695
122
122
  airflow/providers/google/cloud/operators/cloud_memorystore.py,sha256=ktY_Vq8F16YQAHkJUGfc_mf3zWWjBr0Ka5ztw1JcF6s,70795
123
- airflow/providers/google/cloud/operators/cloud_run.py,sha256=6X2w4O-De0P-GDrkAAQSyg5Xe-1FJdOnHK5ELQIOxzc,15537
123
+ airflow/providers/google/cloud/operators/cloud_run.py,sha256=z8GcHajBO4ZziZZjF8ORLPiaMKuaYlD9tFwc8r_mSGM,15550
124
124
  airflow/providers/google/cloud/operators/cloud_sql.py,sha256=-nt3lDTjd5AIcniJIyVPedRm07R-I6k4xIjPZvPfRu0,51450
125
125
  airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py,sha256=5Hmvq_hpzTXIs0EhQe193LoQfo5MueAMN7mIJeHsbPo,44536
126
126
  airflow/providers/google/cloud/operators/compute.py,sha256=TJd300d0FENZ0pbLE8aPsWgx0-e1WIDRvWt1qtya3eU,74520
@@ -129,9 +129,9 @@ airflow/providers/google/cloud/operators/dataflow.py,sha256=dniTIQSRLMncke8R0CXt
129
129
  airflow/providers/google/cloud/operators/dataform.py,sha256=SQPALFU68WYndvyKlwabh_VnZyDG86FY8rBBIaH6hC8,45171
130
130
  airflow/providers/google/cloud/operators/datafusion.py,sha256=EZj30M96vIcb_mKFKwD44uuSh9k5yw7cJSGh_QxSBVw,41084
131
131
  airflow/providers/google/cloud/operators/datapipeline.py,sha256=iWRnkVK4McQ2NnA07VFNBh-ijRi80ndfvhuaury4Sgg,6382
132
- airflow/providers/google/cloud/operators/dataplex.py,sha256=XhhrJdH5afzldyU-m3vN3K0DvdRmvKjhVM4XBcgy-ZI,67868
132
+ airflow/providers/google/cloud/operators/dataplex.py,sha256=f914eQGsQ_eXunkxYLjoMFEJSjhMpZZQya2FIzCzmo4,91149
133
133
  airflow/providers/google/cloud/operators/dataprep.py,sha256=qwGcpHE06Hgn70MO6LiMuaqVLSWjLAtHqD8Y0TTHGok,10367
134
- airflow/providers/google/cloud/operators/dataproc.py,sha256=i1BkjfdEOnsoj3SsAeOSTJQiTKMVGSbaSngC3tSB8q4,130903
134
+ airflow/providers/google/cloud/operators/dataproc.py,sha256=VZ2FvEHOk_BbnsFI6s3klJ0kf5gcwaVdIbjzY_DUPWg,130983
135
135
  airflow/providers/google/cloud/operators/dataproc_metastore.py,sha256=1VH3xnipEJoZs14e39bv9Cv6JcQMyqfLC4mH7totxaY,49531
136
136
  airflow/providers/google/cloud/operators/datastore.py,sha256=2ioItQMOgxIFGE8tu07CdG-SK6BE6dUo9KpBQ-gnROs,24796
137
137
  airflow/providers/google/cloud/operators/dlp.py,sha256=4yVPfDnWaQCxs_dxM31thgvqh32__MmS3_-LB-6OGzk,120451
@@ -173,7 +173,7 @@ airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py,sha256=
173
173
  airflow/providers/google/cloud/sensors/dataflow.py,sha256=uDsflUJJyaEI0oxqanGSOQSZ2KYHonAUYj9LFcXAoXE,15157
174
174
  airflow/providers/google/cloud/sensors/dataform.py,sha256=iybEjgc4PDSt_ZrfpiXRJGXLwy5jF8zbantqZa6qP8Y,4963
175
175
  airflow/providers/google/cloud/sensors/datafusion.py,sha256=aQcu8IgVdChgjGMDNwLpwFLDa8UUw1izTs_TumO9r6Q,5831
176
- airflow/providers/google/cloud/sensors/dataplex.py,sha256=DZ8PTYGhcARAzMWd42CV3icQone7F5mIJbhfDsi8x18,11384
176
+ airflow/providers/google/cloud/sensors/dataplex.py,sha256=8zRDj9yrd0wqa4to_bo37Of4NE9eUfpu9q7sevV7Qcg,16773
177
177
  airflow/providers/google/cloud/sensors/dataprep.py,sha256=I3rhUxtYF0bPs7AKjZI4W52cbsjpMntZnHUCOuXT2pI,1912
178
178
  airflow/providers/google/cloud/sensors/dataproc.py,sha256=45VUMqI7FCCQUsF-Q8JtCkWa8tBz-nKv8hAjxcgrJOY,8279
179
179
  airflow/providers/google/cloud/sensors/dataproc_metastore.py,sha256=KQtbllJSP4icZnhjFIm8lVOuwTTs0wy9C6aBrVBGkV8,5560
@@ -224,7 +224,7 @@ airflow/providers/google/cloud/triggers/cloud_sql.py,sha256=uKgl_J8pS-2rVTz9IxGm
224
224
  airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py,sha256=KeCMraYc3qpYInNv1pP5MRJkWr6Cf-3VqPIZnj0pQz4,5198
225
225
  airflow/providers/google/cloud/triggers/dataflow.py,sha256=fgY7bL56CduKRs0AJF13V5kV5vijz13ytmSsxgmYYFs,6098
226
226
  airflow/providers/google/cloud/triggers/datafusion.py,sha256=EkA6d71LJWvuqDX1USOPjRhsfj9Wj40c8PllBzv3WpI,5987
227
- airflow/providers/google/cloud/triggers/dataplex.py,sha256=kCqycaXzfKnDbZsnLbGhXbU3T4zcUB5_DhFiasi324U,4731
227
+ airflow/providers/google/cloud/triggers/dataplex.py,sha256=IPrtmX7NGkcdsnzASwyWv2dVhJMeDJ9WhVkj6Of7Goo,8335
228
228
  airflow/providers/google/cloud/triggers/dataproc.py,sha256=o7jrkiqMSgPN45KqCB0tZBBjHpJUXkqMUlCfa17Xdbw,14985
229
229
  airflow/providers/google/cloud/triggers/gcs.py,sha256=Eh7zhHxXt3WVqlnur-FM1EMfz6s06QSx_pftqdwqThc,18486
230
230
  airflow/providers/google/cloud/triggers/kubernetes_engine.py,sha256=fyADod56R4TH7e4fHehh-F8u31BfIXEszOc2rrBZYxI,9470
@@ -247,7 +247,7 @@ airflow/providers/google/common/consts.py,sha256=KKjQX0FO6HP4MuzS8DDfWPoQNk0mhdO
247
247
  airflow/providers/google/common/auth_backend/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
248
248
  airflow/providers/google/common/auth_backend/google_openid.py,sha256=R5VyaAPy9SslX7R55ao2vMCsCgu42heKrpeQDJ6Wheo,4517
249
249
  airflow/providers/google/common/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
250
- airflow/providers/google/common/hooks/base_google.py,sha256=T2RAyS9vQ8TBXxQNWqjDgHx4f9v3WzsNrS5c96SlH7Y,27070
250
+ airflow/providers/google/common/hooks/base_google.py,sha256=vVqU74BFpalNzt0Z4C2rAKmS1RbGvWa_UPU-gAAzMzA,27074
251
251
  airflow/providers/google/common/hooks/discovery_api.py,sha256=gGqkPkIuY8M9eodvEbGrhyjg_RgNT4hDeJP8UQb09UA,6768
252
252
  airflow/providers/google/common/links/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
253
253
  airflow/providers/google/common/links/storage.py,sha256=vhbSUwzXDdU6-eGJb031YW1DA6ocTUFeI5_F4iL3h9U,2264
@@ -294,7 +294,7 @@ airflow/providers/google/suite/transfers/gcs_to_gdrive.py,sha256=7DIZVzdkzYfb94s
294
294
  airflow/providers/google/suite/transfers/gcs_to_sheets.py,sha256=KFEjSKWPCTQU2pJSCZ9EuX_9Nl4VcZ3_csc1F95ZUBA,4193
295
295
  airflow/providers/google/suite/transfers/local_to_drive.py,sha256=2QdO0DPErLfpV00Oeg_WBZZ0GSNFpnt3jBMK3FPD6lA,6099
296
296
  airflow/providers/google/suite/transfers/sql_to_sheets.py,sha256=sORkYSUDArRPnvi8WCiXP7YIXtpAgpEPhf8cqgpu644,5220
297
- apache_airflow_providers_google-10.13.0rc1.dist-info/entry_points.txt,sha256=Ay1Uo7uHxdXCxWew3CyBHumZ44Ld-iR7AcSR2fY-PLw,102
298
- apache_airflow_providers_google-10.13.0rc1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
299
- apache_airflow_providers_google-10.13.0rc1.dist-info/METADATA,sha256=qg41zvtvyZOBnIHPbaJIYdHsB28aop0Ba_LFhypVpV8,15546
300
- apache_airflow_providers_google-10.13.0rc1.dist-info/RECORD,,
297
+ apache_airflow_providers_google-10.13.0rc2.dist-info/entry_points.txt,sha256=Ay1Uo7uHxdXCxWew3CyBHumZ44Ld-iR7AcSR2fY-PLw,102
298
+ apache_airflow_providers_google-10.13.0rc2.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
299
+ apache_airflow_providers_google-10.13.0rc2.dist-info/METADATA,sha256=X2j4V1Q8MV2ZDZgzInTDl3mV8LETkazPiKQly_Cn02Q,15550
300
+ apache_airflow_providers_google-10.13.0rc2.dist-info/RECORD,,