apache-airflow-providers-google 15.0.1rc1__py3-none-any.whl → 15.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/google/__init__.py +1 -1
- airflow/providers/google/cloud/hooks/bigquery.py +3 -5
- airflow/providers/google/cloud/hooks/cloud_batch.py +3 -4
- airflow/providers/google/cloud/hooks/cloud_sql.py +34 -41
- airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +1 -1
- airflow/providers/google/cloud/hooks/compute_ssh.py +2 -3
- airflow/providers/google/cloud/hooks/dataflow.py +11 -6
- airflow/providers/google/cloud/hooks/datafusion.py +3 -4
- airflow/providers/google/cloud/hooks/dataprep.py +1 -1
- airflow/providers/google/cloud/hooks/dlp.py +1 -1
- airflow/providers/google/cloud/hooks/gcs.py +5 -6
- airflow/providers/google/cloud/hooks/kubernetes_engine.py +1 -2
- airflow/providers/google/cloud/hooks/managed_kafka.py +1 -1
- airflow/providers/google/cloud/hooks/mlengine.py +4 -6
- airflow/providers/google/cloud/hooks/stackdriver.py +4 -6
- airflow/providers/google/cloud/hooks/vision.py +1 -2
- airflow/providers/google/cloud/openlineage/mixins.py +2 -3
- airflow/providers/google/cloud/operators/alloy_db.py +1 -2
- airflow/providers/google/cloud/operators/automl.py +5 -5
- airflow/providers/google/cloud/operators/bigquery.py +24 -26
- airflow/providers/google/cloud/operators/cloud_batch.py +13 -15
- airflow/providers/google/cloud/operators/cloud_build.py +1 -2
- airflow/providers/google/cloud/operators/cloud_composer.py +24 -28
- airflow/providers/google/cloud/operators/cloud_run.py +12 -13
- airflow/providers/google/cloud/operators/cloud_sql.py +42 -49
- airflow/providers/google/cloud/operators/compute.py +9 -10
- airflow/providers/google/cloud/operators/dataproc.py +23 -26
- airflow/providers/google/cloud/operators/functions.py +5 -7
- airflow/providers/google/cloud/operators/kubernetes_engine.py +1 -2
- airflow/providers/google/cloud/operators/spanner.py +29 -33
- airflow/providers/google/cloud/sensors/cloud_composer.py +3 -5
- airflow/providers/google/cloud/sensors/dataflow.py +1 -1
- airflow/providers/google/cloud/sensors/dataproc.py +5 -5
- airflow/providers/google/cloud/sensors/gcs.py +15 -16
- airflow/providers/google/cloud/sensors/looker.py +3 -3
- airflow/providers/google/cloud/sensors/pubsub.py +13 -14
- airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +7 -8
- airflow/providers/google/cloud/transfers/cassandra_to_gcs.py +16 -20
- airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py +1 -2
- airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +14 -16
- airflow/providers/google/cloud/transfers/postgres_to_gcs.py +2 -3
- airflow/providers/google/cloud/utils/bigquery.py +5 -7
- airflow/providers/google/cloud/utils/dataform.py +1 -1
- airflow/providers/google/common/hooks/base_google.py +1 -1
- airflow/providers/google/common/hooks/operation_helpers.py +1 -2
- airflow/providers/google/common/utils/id_token_credentials.py +1 -1
- airflow/providers/google/leveldb/hooks/leveldb.py +4 -5
- {apache_airflow_providers_google-15.0.1rc1.dist-info → apache_airflow_providers_google-15.1.0.dist-info}/METADATA +9 -9
- {apache_airflow_providers_google-15.0.1rc1.dist-info → apache_airflow_providers_google-15.1.0.dist-info}/RECORD +51 -51
- {apache_airflow_providers_google-15.0.1rc1.dist-info → apache_airflow_providers_google-15.1.0.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_google-15.0.1rc1.dist-info → apache_airflow_providers_google-15.1.0.dist-info}/entry_points.txt +0 -0
@@ -186,18 +186,17 @@ class CloudComposerCreateEnvironmentOperator(GoogleCloudBaseOperator):
|
|
186
186
|
if not self.deferrable:
|
187
187
|
environment = hook.wait_for_operation(timeout=self.timeout, operation=result)
|
188
188
|
return Environment.to_dict(environment)
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
)
|
189
|
+
self.defer(
|
190
|
+
trigger=CloudComposerExecutionTrigger(
|
191
|
+
project_id=self.project_id,
|
192
|
+
region=self.region,
|
193
|
+
operation_name=result.operation.name,
|
194
|
+
gcp_conn_id=self.gcp_conn_id,
|
195
|
+
impersonation_chain=self.impersonation_chain,
|
196
|
+
pooling_period_seconds=self.pooling_period_seconds,
|
197
|
+
),
|
198
|
+
method_name=GOOGLE_DEFAULT_DEFERRABLE_METHOD_NAME,
|
199
|
+
)
|
201
200
|
except AlreadyExists:
|
202
201
|
environment = hook.get_environment(
|
203
202
|
project_id=self.project_id,
|
@@ -225,8 +224,7 @@ class CloudComposerCreateEnvironmentOperator(GoogleCloudBaseOperator):
|
|
225
224
|
metadata=self.metadata,
|
226
225
|
)
|
227
226
|
return Environment.to_dict(env)
|
228
|
-
|
229
|
-
raise AirflowException(f"Unexpected error in the operation: {event['operation_name']}")
|
227
|
+
raise AirflowException(f"Unexpected error in the operation: {event['operation_name']}")
|
230
228
|
|
231
229
|
|
232
230
|
class CloudComposerDeleteEnvironmentOperator(GoogleCloudBaseOperator):
|
@@ -555,18 +553,17 @@ class CloudComposerUpdateEnvironmentOperator(GoogleCloudBaseOperator):
|
|
555
553
|
if not self.deferrable:
|
556
554
|
environment = hook.wait_for_operation(timeout=self.timeout, operation=result)
|
557
555
|
return Environment.to_dict(environment)
|
558
|
-
|
559
|
-
|
560
|
-
|
561
|
-
|
562
|
-
|
563
|
-
|
564
|
-
|
565
|
-
|
566
|
-
|
567
|
-
|
568
|
-
|
569
|
-
)
|
556
|
+
self.defer(
|
557
|
+
trigger=CloudComposerExecutionTrigger(
|
558
|
+
project_id=self.project_id,
|
559
|
+
region=self.region,
|
560
|
+
operation_name=result.operation.name,
|
561
|
+
gcp_conn_id=self.gcp_conn_id,
|
562
|
+
impersonation_chain=self.impersonation_chain,
|
563
|
+
pooling_period_seconds=self.pooling_period_seconds,
|
564
|
+
),
|
565
|
+
method_name=GOOGLE_DEFAULT_DEFERRABLE_METHOD_NAME,
|
566
|
+
)
|
570
567
|
|
571
568
|
def execute_complete(self, context: Context, event: dict):
|
572
569
|
if event["operation_done"]:
|
@@ -584,8 +581,7 @@ class CloudComposerUpdateEnvironmentOperator(GoogleCloudBaseOperator):
|
|
584
581
|
metadata=self.metadata,
|
585
582
|
)
|
586
583
|
return Environment.to_dict(env)
|
587
|
-
|
588
|
-
raise AirflowException(f"Unexpected error in the operation: {event['operation_name']}")
|
584
|
+
raise AirflowException(f"Unexpected error in the operation: {event['operation_name']}")
|
589
585
|
|
590
586
|
|
591
587
|
class CloudComposerListImageVersionsOperator(GoogleCloudBaseOperator):
|
@@ -308,19 +308,18 @@ class CloudRunExecuteJobOperator(GoogleCloudBaseOperator):
|
|
308
308
|
self._fail_if_execution_failed(result)
|
309
309
|
job = hook.get_job(job_name=result.job, region=self.region, project_id=self.project_id)
|
310
310
|
return Job.to_dict(job)
|
311
|
-
|
312
|
-
|
313
|
-
|
314
|
-
|
315
|
-
|
316
|
-
|
317
|
-
|
318
|
-
|
319
|
-
|
320
|
-
|
321
|
-
|
322
|
-
|
323
|
-
)
|
311
|
+
self.defer(
|
312
|
+
trigger=CloudRunJobFinishedTrigger(
|
313
|
+
operation_name=self.operation.operation.name,
|
314
|
+
job_name=self.job_name,
|
315
|
+
project_id=self.project_id,
|
316
|
+
location=self.region,
|
317
|
+
gcp_conn_id=self.gcp_conn_id,
|
318
|
+
impersonation_chain=self.impersonation_chain,
|
319
|
+
polling_period_seconds=self.polling_period_seconds,
|
320
|
+
),
|
321
|
+
method_name="execute_complete",
|
322
|
+
)
|
324
323
|
|
325
324
|
def execute_complete(self, context: Context, event: dict):
|
326
325
|
status = event["status"]
|
@@ -477,15 +477,14 @@ class CloudSQLInstancePatchOperator(CloudSQLBaseOperator):
|
|
477
477
|
f"Cloud SQL instance with ID {self.instance} does not exist. "
|
478
478
|
"Please specify another instance to patch."
|
479
479
|
)
|
480
|
-
|
481
|
-
|
482
|
-
|
483
|
-
|
484
|
-
|
485
|
-
|
486
|
-
)
|
480
|
+
CloudSQLInstanceLink.persist(
|
481
|
+
context=context,
|
482
|
+
task_instance=self,
|
483
|
+
cloud_sql_instance=self.instance,
|
484
|
+
project_id=self.project_id or hook.project_id,
|
485
|
+
)
|
487
486
|
|
488
|
-
|
487
|
+
return hook.patch_instance(project_id=self.project_id, body=self.body, instance=self.instance)
|
489
488
|
|
490
489
|
|
491
490
|
class CloudSQLDeleteInstanceOperator(CloudSQLBaseOperator):
|
@@ -531,8 +530,7 @@ class CloudSQLDeleteInstanceOperator(CloudSQLBaseOperator):
|
|
531
530
|
if not self._check_if_instance_exists(self.instance, hook):
|
532
531
|
print(f"Cloud SQL instance with ID {self.instance} does not exist. Aborting delete.")
|
533
532
|
return True
|
534
|
-
|
535
|
-
return hook.delete_instance(project_id=self.project_id, instance=self.instance)
|
533
|
+
return hook.delete_instance(project_id=self.project_id, instance=self.instance)
|
536
534
|
|
537
535
|
|
538
536
|
class CloudSQLCloneInstanceOperator(CloudSQLBaseOperator):
|
@@ -612,19 +610,18 @@ class CloudSQLCloneInstanceOperator(CloudSQLBaseOperator):
|
|
612
610
|
f"Cloud SQL instance with ID {self.instance} does not exist. "
|
613
611
|
"Please specify another instance to patch."
|
614
612
|
)
|
615
|
-
|
616
|
-
|
617
|
-
"
|
618
|
-
|
619
|
-
|
620
|
-
**self.clone_context,
|
621
|
-
}
|
613
|
+
body = {
|
614
|
+
"cloneContext": {
|
615
|
+
"kind": "sql#cloneContext",
|
616
|
+
"destinationInstanceName": self.destination_instance_name,
|
617
|
+
**self.clone_context,
|
622
618
|
}
|
623
|
-
|
624
|
-
|
625
|
-
|
626
|
-
|
627
|
-
|
619
|
+
}
|
620
|
+
return hook.clone_instance(
|
621
|
+
project_id=self.project_id,
|
622
|
+
body=body,
|
623
|
+
instance=self.instance,
|
624
|
+
)
|
628
625
|
|
629
626
|
|
630
627
|
class CloudSQLCreateInstanceDatabaseOperator(CloudSQLBaseOperator):
|
@@ -728,8 +725,7 @@ class CloudSQLCreateInstanceDatabaseOperator(CloudSQLBaseOperator):
|
|
728
725
|
database,
|
729
726
|
)
|
730
727
|
return True
|
731
|
-
|
732
|
-
return hook.create_database(project_id=self.project_id, instance=self.instance, body=self.body)
|
728
|
+
return hook.create_database(project_id=self.project_id, instance=self.instance, body=self.body)
|
733
729
|
|
734
730
|
|
735
731
|
class CloudSQLPatchInstanceDatabaseOperator(CloudSQLBaseOperator):
|
@@ -824,16 +820,15 @@ class CloudSQLPatchInstanceDatabaseOperator(CloudSQLBaseOperator):
|
|
824
820
|
f"Cloud SQL instance with ID {self.instance} does not contain database '{self.database}'. "
|
825
821
|
"Please specify another database to patch."
|
826
822
|
)
|
827
|
-
|
828
|
-
|
829
|
-
|
830
|
-
|
831
|
-
|
832
|
-
|
833
|
-
|
834
|
-
|
835
|
-
|
836
|
-
)
|
823
|
+
CloudSQLInstanceDatabaseLink.persist(
|
824
|
+
context=context,
|
825
|
+
task_instance=self,
|
826
|
+
cloud_sql_instance=self.instance,
|
827
|
+
project_id=self.project_id or hook.project_id,
|
828
|
+
)
|
829
|
+
return hook.patch_database(
|
830
|
+
project_id=self.project_id, instance=self.instance, database=self.database, body=self.body
|
831
|
+
)
|
837
832
|
|
838
833
|
|
839
834
|
class CloudSQLDeleteInstanceDatabaseOperator(CloudSQLBaseOperator):
|
@@ -910,10 +905,9 @@ class CloudSQLDeleteInstanceDatabaseOperator(CloudSQLBaseOperator):
|
|
910
905
|
f"Aborting database delete."
|
911
906
|
)
|
912
907
|
return True
|
913
|
-
|
914
|
-
|
915
|
-
|
916
|
-
)
|
908
|
+
return hook.delete_database(
|
909
|
+
project_id=self.project_id, instance=self.instance, database=self.database
|
910
|
+
)
|
917
911
|
|
918
912
|
|
919
913
|
class CloudSQLExportInstanceOperator(CloudSQLBaseOperator):
|
@@ -1029,17 +1023,16 @@ class CloudSQLExportInstanceOperator(CloudSQLBaseOperator):
|
|
1029
1023
|
return hook._wait_for_operation_to_complete(
|
1030
1024
|
project_id=self.project_id, operation_name=operation_name
|
1031
1025
|
)
|
1032
|
-
|
1033
|
-
|
1034
|
-
|
1035
|
-
|
1036
|
-
|
1037
|
-
|
1038
|
-
|
1039
|
-
|
1040
|
-
|
1041
|
-
|
1042
|
-
)
|
1026
|
+
self.defer(
|
1027
|
+
trigger=CloudSQLExportTrigger(
|
1028
|
+
operation_name=operation_name,
|
1029
|
+
project_id=self.project_id or hook.project_id,
|
1030
|
+
gcp_conn_id=self.gcp_conn_id,
|
1031
|
+
impersonation_chain=self.impersonation_chain,
|
1032
|
+
poke_interval=self.poke_interval,
|
1033
|
+
),
|
1034
|
+
method_name="execute_complete",
|
1035
|
+
)
|
1043
1036
|
|
1044
1037
|
def execute_complete(self, context, event=None) -> None:
|
1045
1038
|
"""
|
@@ -1402,16 +1402,15 @@ class ComputeEngineInstanceGroupUpdateManagerTemplateOperator(ComputeEngineBaseO
|
|
1402
1402
|
request_id=self.request_id,
|
1403
1403
|
project_id=self.project_id,
|
1404
1404
|
)
|
1405
|
-
|
1406
|
-
|
1407
|
-
|
1408
|
-
|
1409
|
-
|
1410
|
-
|
1411
|
-
|
1412
|
-
|
1413
|
-
|
1414
|
-
return True
|
1405
|
+
# Idempotence achieved
|
1406
|
+
ComputeInstanceGroupManagerDetailsLink.persist(
|
1407
|
+
context=context,
|
1408
|
+
task_instance=self,
|
1409
|
+
location_id=self.zone,
|
1410
|
+
resource_id=self.resource_id,
|
1411
|
+
project_id=self.project_id or hook.project_id,
|
1412
|
+
)
|
1413
|
+
return True
|
1415
1414
|
|
1416
1415
|
|
1417
1416
|
class ComputeEngineInsertInstanceGroupManagerOperator(ComputeEngineBaseOperator):
|
@@ -340,7 +340,7 @@ class ClusterGenerator:
|
|
340
340
|
unit = match.group(2)
|
341
341
|
if unit == "s":
|
342
342
|
return {"seconds": val}
|
343
|
-
|
343
|
+
if unit == "m":
|
344
344
|
return {"seconds": int(timedelta(minutes=val).total_seconds())}
|
345
345
|
|
346
346
|
raise AirflowException(
|
@@ -822,26 +822,24 @@ class DataprocCreateClusterOperator(GoogleCloudBaseOperator):
|
|
822
822
|
)
|
823
823
|
self.log.info("Cluster created.")
|
824
824
|
return Cluster.to_dict(cluster)
|
825
|
-
|
826
|
-
|
827
|
-
|
828
|
-
|
829
|
-
|
830
|
-
|
831
|
-
|
832
|
-
|
833
|
-
self.
|
834
|
-
|
835
|
-
|
836
|
-
|
837
|
-
|
838
|
-
|
839
|
-
|
840
|
-
|
841
|
-
|
842
|
-
|
843
|
-
method_name="execute_complete",
|
844
|
-
)
|
825
|
+
cluster = hook.get_cluster(
|
826
|
+
project_id=self.project_id, region=self.region, cluster_name=self.cluster_name
|
827
|
+
)
|
828
|
+
if cluster.status.state == cluster.status.State.RUNNING:
|
829
|
+
self.log.info("Cluster created.")
|
830
|
+
return Cluster.to_dict(cluster)
|
831
|
+
self.defer(
|
832
|
+
trigger=DataprocClusterTrigger(
|
833
|
+
cluster_name=self.cluster_name,
|
834
|
+
project_id=self.project_id,
|
835
|
+
region=self.region,
|
836
|
+
gcp_conn_id=self.gcp_conn_id,
|
837
|
+
impersonation_chain=self.impersonation_chain,
|
838
|
+
polling_interval_seconds=self.polling_interval_seconds,
|
839
|
+
delete_on_error=self.delete_on_error,
|
840
|
+
),
|
841
|
+
method_name="execute_complete",
|
842
|
+
)
|
845
843
|
except AlreadyExists:
|
846
844
|
if not self.use_if_exists:
|
847
845
|
raise
|
@@ -1022,7 +1020,7 @@ class DataprocDeleteClusterOperator(GoogleCloudBaseOperator):
|
|
1022
1020
|
"""
|
1023
1021
|
if event and event["status"] == "error":
|
1024
1022
|
raise AirflowException(event["message"])
|
1025
|
-
|
1023
|
+
if event is None:
|
1026
1024
|
raise AirflowException("No event received in trigger callback")
|
1027
1025
|
self.log.info("Cluster deleted.")
|
1028
1026
|
|
@@ -1377,8 +1375,7 @@ class DataprocJobBaseOperator(GoogleCloudBaseOperator):
|
|
1377
1375
|
self.hook.wait_for_job(job_id=job_id, region=self.region, project_id=self.project_id)
|
1378
1376
|
self.log.info("Job %s completed successfully.", job_id)
|
1379
1377
|
return job_id
|
1380
|
-
|
1381
|
-
raise AirflowException("Create a job template before")
|
1378
|
+
raise AirflowException("Create a job template before")
|
1382
1379
|
|
1383
1380
|
def execute_complete(self, context, event=None) -> None:
|
1384
1381
|
"""
|
@@ -1916,9 +1913,9 @@ class DataprocSubmitJobOperator(GoogleCloudBaseOperator):
|
|
1916
1913
|
state = job.status.state
|
1917
1914
|
if state == JobStatus.State.DONE:
|
1918
1915
|
return self.job_id
|
1919
|
-
|
1916
|
+
if state == JobStatus.State.ERROR:
|
1920
1917
|
raise AirflowException(f"Job failed:\n{job}")
|
1921
|
-
|
1918
|
+
if state == JobStatus.State.CANCELLED:
|
1922
1919
|
raise AirflowException(f"Job was cancelled:\n{job}")
|
1923
1920
|
self.defer(
|
1924
1921
|
trigger=DataprocSubmitTrigger(
|
@@ -379,10 +379,9 @@ class CloudFunctionDeleteFunctionOperator(GoogleCloudBaseOperator):
|
|
379
379
|
def _validate_inputs(self) -> None:
|
380
380
|
if not self.name:
|
381
381
|
raise AttributeError("Empty parameter: name")
|
382
|
-
|
383
|
-
|
384
|
-
|
385
|
-
raise AttributeError(f"Parameter name must match pattern: {FUNCTION_NAME_PATTERN}")
|
382
|
+
pattern = FUNCTION_NAME_COMPILED_PATTERN
|
383
|
+
if not pattern.match(self.name):
|
384
|
+
raise AttributeError(f"Parameter name must match pattern: {FUNCTION_NAME_PATTERN}")
|
386
385
|
|
387
386
|
def execute(self, context: Context):
|
388
387
|
hook = CloudFunctionsHook(
|
@@ -404,9 +403,8 @@ class CloudFunctionDeleteFunctionOperator(GoogleCloudBaseOperator):
|
|
404
403
|
if status == 404:
|
405
404
|
self.log.info("The function does not exist in this project")
|
406
405
|
return None
|
407
|
-
|
408
|
-
|
409
|
-
raise e
|
406
|
+
self.log.error("An error occurred. Exiting.")
|
407
|
+
raise e
|
410
408
|
|
411
409
|
|
412
410
|
class CloudFunctionInvokeFunctionOperator(GoogleCloudBaseOperator):
|
@@ -457,8 +457,7 @@ class GKECreateClusterOperator(GKEOperatorMixin, GoogleCloudBaseOperator):
|
|
457
457
|
"""Extract the value of the given field name."""
|
458
458
|
if isinstance(self.body, dict):
|
459
459
|
return self.body.get(field_name, default_value)
|
460
|
-
|
461
|
-
return getattr(self.body, field_name, default_value)
|
460
|
+
return getattr(self.body, field_name, default_value)
|
462
461
|
|
463
462
|
def _alert_deprecated_body_fields(self) -> None:
|
464
463
|
"""Generate warning messages if deprecated fields were used in the body."""
|
@@ -188,13 +188,12 @@ class SpannerDeleteInstanceOperator(GoogleCloudBaseOperator):
|
|
188
188
|
)
|
189
189
|
if hook.get_instance(project_id=self.project_id, instance_id=self.instance_id):
|
190
190
|
return hook.delete_instance(project_id=self.project_id, instance_id=self.instance_id)
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
|
196
|
-
|
197
|
-
return True
|
191
|
+
self.log.info(
|
192
|
+
"Instance '%s' does not exist in project '%s'. Aborting delete.",
|
193
|
+
self.instance_id,
|
194
|
+
self.project_id,
|
195
|
+
)
|
196
|
+
return True
|
198
197
|
|
199
198
|
|
200
199
|
class SpannerQueryDatabaseInstanceOperator(GoogleCloudBaseOperator):
|
@@ -401,13 +400,12 @@ class SpannerDeployDatabaseInstanceOperator(GoogleCloudBaseOperator):
|
|
401
400
|
database_id=self.database_id,
|
402
401
|
ddl_statements=self.ddl_statements,
|
403
402
|
)
|
404
|
-
|
405
|
-
|
406
|
-
|
407
|
-
|
408
|
-
|
409
|
-
|
410
|
-
)
|
403
|
+
self.log.info(
|
404
|
+
"The database '%s' in project '%s' and instance '%s' already exists. Nothing to do. Exiting.",
|
405
|
+
self.database_id,
|
406
|
+
self.project_id,
|
407
|
+
self.instance_id,
|
408
|
+
)
|
411
409
|
return True
|
412
410
|
|
413
411
|
|
@@ -496,21 +494,20 @@ class SpannerUpdateDatabaseInstanceOperator(GoogleCloudBaseOperator):
|
|
496
494
|
f"and instance '{self.instance_id}' is missing. "
|
497
495
|
f"Create the database first before you can update it."
|
498
496
|
)
|
499
|
-
|
500
|
-
|
501
|
-
|
502
|
-
|
503
|
-
|
504
|
-
|
505
|
-
|
506
|
-
|
507
|
-
|
508
|
-
|
509
|
-
|
510
|
-
|
511
|
-
|
512
|
-
|
513
|
-
)
|
497
|
+
SpannerDatabaseLink.persist(
|
498
|
+
context=context,
|
499
|
+
task_instance=self,
|
500
|
+
instance_id=self.instance_id,
|
501
|
+
database_id=self.database_id,
|
502
|
+
project_id=self.project_id or hook.project_id,
|
503
|
+
)
|
504
|
+
return hook.update_database(
|
505
|
+
project_id=self.project_id,
|
506
|
+
instance_id=self.instance_id,
|
507
|
+
database_id=self.database_id,
|
508
|
+
ddl_statements=self.ddl_statements,
|
509
|
+
operation_id=self.operation_id,
|
510
|
+
)
|
514
511
|
|
515
512
|
|
516
513
|
class SpannerDeleteDatabaseInstanceOperator(GoogleCloudBaseOperator):
|
@@ -589,7 +586,6 @@ class SpannerDeleteDatabaseInstanceOperator(GoogleCloudBaseOperator):
|
|
589
586
|
self.instance_id,
|
590
587
|
)
|
591
588
|
return True
|
592
|
-
|
593
|
-
|
594
|
-
|
595
|
-
)
|
589
|
+
return hook.delete_database(
|
590
|
+
project_id=self.project_id, instance_id=self.instance_id, database_id=self.database_id
|
591
|
+
)
|
@@ -108,14 +108,12 @@ class CloudComposerDAGRunSensor(BaseSensorOperator):
|
|
108
108
|
if isinstance(self.execution_range, timedelta):
|
109
109
|
if self.execution_range < timedelta(0):
|
110
110
|
return context["logical_date"], context["logical_date"] - self.execution_range
|
111
|
-
|
112
|
-
|
113
|
-
elif isinstance(self.execution_range, list) and len(self.execution_range) > 0:
|
111
|
+
return context["logical_date"] - self.execution_range, context["logical_date"]
|
112
|
+
if isinstance(self.execution_range, list) and len(self.execution_range) > 0:
|
114
113
|
return self.execution_range[0], self.execution_range[1] if len(
|
115
114
|
self.execution_range
|
116
115
|
) > 1 else context["logical_date"]
|
117
|
-
|
118
|
-
return context["logical_date"] - timedelta(1), context["logical_date"]
|
116
|
+
return context["logical_date"] - timedelta(1), context["logical_date"]
|
119
117
|
|
120
118
|
def poke(self, context: Context) -> bool:
|
121
119
|
start_date, end_date = self._get_logical_dates(context)
|
@@ -117,7 +117,7 @@ class DataflowJobStatusSensor(BaseSensorOperator):
|
|
117
117
|
|
118
118
|
if job_status in self.expected_statuses:
|
119
119
|
return True
|
120
|
-
|
120
|
+
if job_status in DataflowJobStatus.TERMINAL_STATES:
|
121
121
|
message = f"Job with id '{self.job_id}' is already in terminal state: {job_status}"
|
122
122
|
raise AirflowException(message)
|
123
123
|
|
@@ -100,17 +100,17 @@ class DataprocJobSensor(BaseSensorOperator):
|
|
100
100
|
if state == JobStatus.State.ERROR:
|
101
101
|
message = f"Job failed:\n{job}"
|
102
102
|
raise AirflowException(message)
|
103
|
-
|
103
|
+
if state in {
|
104
104
|
JobStatus.State.CANCELLED,
|
105
105
|
JobStatus.State.CANCEL_PENDING,
|
106
106
|
JobStatus.State.CANCEL_STARTED,
|
107
107
|
}:
|
108
108
|
message = f"Job was cancelled:\n{job}"
|
109
109
|
raise AirflowException(message)
|
110
|
-
|
110
|
+
if state == JobStatus.State.DONE:
|
111
111
|
self.log.debug("Job %s completed successfully.", self.dataproc_job_id)
|
112
112
|
return True
|
113
|
-
|
113
|
+
if state == JobStatus.State.ATTEMPT_FAILURE:
|
114
114
|
self.log.debug("Job %s attempt has failed.", self.dataproc_job_id)
|
115
115
|
|
116
116
|
self.log.info("Waiting for job %s to complete.", self.dataproc_job_id)
|
@@ -179,13 +179,13 @@ class DataprocBatchSensor(BaseSensorOperator):
|
|
179
179
|
if state == Batch.State.FAILED:
|
180
180
|
message = "Batch failed"
|
181
181
|
raise AirflowException(message)
|
182
|
-
|
182
|
+
if state in {
|
183
183
|
Batch.State.CANCELLED,
|
184
184
|
Batch.State.CANCELLING,
|
185
185
|
}:
|
186
186
|
message = "Batch was cancelled."
|
187
187
|
raise AirflowException(message)
|
188
|
-
|
188
|
+
if state == Batch.State.SUCCEEDED:
|
189
189
|
self.log.debug("Batch %s completed successfully.", self.batch_id)
|
190
190
|
return True
|
191
191
|
|
@@ -306,23 +306,22 @@ class GCSObjectsWithPrefixExistenceSensor(BaseSensorOperator):
|
|
306
306
|
if not self.deferrable:
|
307
307
|
super().execute(context)
|
308
308
|
return self._matches
|
309
|
+
if not self.poke(context=context):
|
310
|
+
self.defer(
|
311
|
+
timeout=timedelta(seconds=self.timeout),
|
312
|
+
trigger=GCSPrefixBlobTrigger(
|
313
|
+
bucket=self.bucket,
|
314
|
+
prefix=self.prefix,
|
315
|
+
poke_interval=self.poke_interval,
|
316
|
+
google_cloud_conn_id=self.google_cloud_conn_id,
|
317
|
+
hook_params={
|
318
|
+
"impersonation_chain": self.impersonation_chain,
|
319
|
+
},
|
320
|
+
),
|
321
|
+
method_name="execute_complete",
|
322
|
+
)
|
309
323
|
else:
|
310
|
-
|
311
|
-
self.defer(
|
312
|
-
timeout=timedelta(seconds=self.timeout),
|
313
|
-
trigger=GCSPrefixBlobTrigger(
|
314
|
-
bucket=self.bucket,
|
315
|
-
prefix=self.prefix,
|
316
|
-
poke_interval=self.poke_interval,
|
317
|
-
google_cloud_conn_id=self.google_cloud_conn_id,
|
318
|
-
hook_params={
|
319
|
-
"impersonation_chain": self.impersonation_chain,
|
320
|
-
},
|
321
|
-
),
|
322
|
-
method_name="execute_complete",
|
323
|
-
)
|
324
|
-
else:
|
325
|
-
return self._matches
|
324
|
+
return self._matches
|
326
325
|
|
327
326
|
def execute_complete(self, context: dict[str, Any], event: dict[str, str | list[str]]) -> str | list[str]:
|
328
327
|
"""Return immediately and rely on trigger to throw a success event. Callback for the trigger."""
|
@@ -65,13 +65,13 @@ class LookerCheckPdtBuildSensor(BaseSensorOperator):
|
|
65
65
|
msg = status_dict["message"]
|
66
66
|
message = f'PDT materialization job failed. Job id: {self.materialization_id}. Message:\n"{msg}"'
|
67
67
|
raise AirflowException(message)
|
68
|
-
|
68
|
+
if status == JobStatus.CANCELLED.value:
|
69
69
|
message = f"PDT materialization job was cancelled. Job id: {self.materialization_id}."
|
70
70
|
raise AirflowException(message)
|
71
|
-
|
71
|
+
if status == JobStatus.UNKNOWN.value:
|
72
72
|
message = f"PDT materialization job has unknown status. Job id: {self.materialization_id}."
|
73
73
|
raise AirflowException(message)
|
74
|
-
|
74
|
+
if status == JobStatus.DONE.value:
|
75
75
|
self.log.debug(
|
76
76
|
"PDT materialization job completed successfully. Job id: %s.", self.materialization_id
|
77
77
|
)
|