apache-airflow-providers-google 10.21.0rc1__py3-none-any.whl → 10.21.1rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/google/__init__.py +1 -1
- airflow/providers/google/cloud/hooks/cloud_sql.py +9 -5
- airflow/providers/google/cloud/hooks/dataproc.py +1 -1
- airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py +1 -1
- airflow/providers/google/cloud/openlineage/mixins.py +68 -36
- airflow/providers/google/cloud/openlineage/utils.py +18 -18
- airflow/providers/google/cloud/operators/bigquery.py +13 -6
- airflow/providers/google/cloud/operators/dataflow.py +1 -1
- airflow/providers/google/cloud/operators/dataproc.py +1 -1
- airflow/providers/google/cloud/operators/dataproc_metastore.py +3 -7
- airflow/providers/google/cloud/operators/gcs.py +6 -9
- airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +34 -72
- airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py +1 -2
- airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +4 -9
- airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +4 -9
- airflow/providers/google/cloud/transfers/gcs_to_gcs.py +1 -2
- airflow/providers/google/get_provider_info.py +3 -1
- airflow/providers/google/marketing_platform/operators/analytics_admin.py +1 -0
- {apache_airflow_providers_google-10.21.0rc1.dist-info → apache_airflow_providers_google-10.21.1rc1.dist-info}/METADATA +79 -74
- {apache_airflow_providers_google-10.21.0rc1.dist-info → apache_airflow_providers_google-10.21.1rc1.dist-info}/RECORD +22 -22
- {apache_airflow_providers_google-10.21.0rc1.dist-info → apache_airflow_providers_google-10.21.1rc1.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_google-10.21.0rc1.dist-info → apache_airflow_providers_google-10.21.1rc1.dist-info}/entry_points.txt +0 -0
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
29
29
|
|
30
30
|
__all__ = ["__version__"]
|
31
31
|
|
32
|
-
__version__ = "10.21.
|
32
|
+
__version__ = "10.21.1"
|
33
33
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
35
35
|
"2.7.0"
|
@@ -59,8 +59,6 @@ from airflow.providers.google.common.hooks.base_google import (
|
|
59
59
|
GoogleBaseHook,
|
60
60
|
get_field,
|
61
61
|
)
|
62
|
-
from airflow.providers.mysql.hooks.mysql import MySqlHook
|
63
|
-
from airflow.providers.postgres.hooks.postgres import PostgresHook
|
64
62
|
from airflow.utils.log.logging_mixin import LoggingMixin
|
65
63
|
|
66
64
|
if TYPE_CHECKING:
|
@@ -856,7 +854,7 @@ class CloudSQLDatabaseHook(BaseHook):
|
|
856
854
|
# Port and socket path and db_hook are automatically generated
|
857
855
|
self.sql_proxy_tcp_port = None
|
858
856
|
self.sql_proxy_unique_path: str | None = None
|
859
|
-
self.db_hook:
|
857
|
+
self.db_hook: BaseHook | None = None
|
860
858
|
self.reserved_tcp_socket: socket.socket | None = None
|
861
859
|
# Generated based on clock + clock sequence. Unique per host (!).
|
862
860
|
# This is important as different hosts share the database
|
@@ -1140,7 +1138,7 @@ class CloudSQLDatabaseHook(BaseHook):
|
|
1140
1138
|
gcp_conn_id=self.gcp_conn_id,
|
1141
1139
|
)
|
1142
1140
|
|
1143
|
-
def get_database_hook(self, connection: Connection) ->
|
1141
|
+
def get_database_hook(self, connection: Connection) -> BaseHook:
|
1144
1142
|
"""
|
1145
1143
|
Retrieve database hook.
|
1146
1144
|
|
@@ -1148,14 +1146,20 @@ class CloudSQLDatabaseHook(BaseHook):
|
|
1148
1146
|
connects directly to the Google Cloud SQL database.
|
1149
1147
|
"""
|
1150
1148
|
if self.database_type == "postgres":
|
1151
|
-
|
1149
|
+
from airflow.providers.postgres.hooks.postgres import PostgresHook
|
1150
|
+
|
1151
|
+
db_hook: BaseHook = PostgresHook(connection=connection, database=self.database)
|
1152
1152
|
else:
|
1153
|
+
from airflow.providers.mysql.hooks.mysql import MySqlHook
|
1154
|
+
|
1153
1155
|
db_hook = MySqlHook(connection=connection, schema=self.database)
|
1154
1156
|
self.db_hook = db_hook
|
1155
1157
|
return db_hook
|
1156
1158
|
|
1157
1159
|
def cleanup_database_hook(self) -> None:
|
1158
1160
|
"""Clean up database hook after it was used."""
|
1161
|
+
from airflow.providers.postgres.hooks.postgres import PostgresHook
|
1162
|
+
|
1159
1163
|
if self.database_type == "postgres":
|
1160
1164
|
if not self.db_hook:
|
1161
1165
|
raise ValueError("The db_hook should be set")
|
@@ -274,7 +274,7 @@ class DataprocHook(GoogleBaseHook):
|
|
274
274
|
self,
|
275
275
|
operation: Operation,
|
276
276
|
timeout: float | None = None,
|
277
|
-
result_retry: AsyncRetry | _MethodDefault = DEFAULT,
|
277
|
+
result_retry: AsyncRetry | _MethodDefault | Retry = DEFAULT,
|
278
278
|
) -> Any:
|
279
279
|
"""Wait for a long-lasting operation to complete."""
|
280
280
|
try:
|
@@ -262,7 +262,7 @@ class CustomJobHook(GoogleBaseHook):
|
|
262
262
|
|
263
263
|
@staticmethod
|
264
264
|
def extract_model_id_from_training_pipeline(training_pipeline: dict[str, Any]) -> str:
|
265
|
-
"""Return a unique Model
|
265
|
+
"""Return a unique Model ID from a serialized TrainingPipeline proto."""
|
266
266
|
return training_pipeline["model_to_upload"]["name"].rpartition("/")[-1]
|
267
267
|
|
268
268
|
@staticmethod
|
@@ -20,19 +20,23 @@ from __future__ import annotations
|
|
20
20
|
import copy
|
21
21
|
import json
|
22
22
|
import traceback
|
23
|
-
from typing import TYPE_CHECKING
|
23
|
+
from typing import TYPE_CHECKING, cast
|
24
24
|
|
25
25
|
if TYPE_CHECKING:
|
26
|
-
from openlineage.
|
27
|
-
|
26
|
+
from airflow.providers.common.compat.openlineage.facet import (
|
27
|
+
Dataset,
|
28
|
+
InputDataset,
|
29
|
+
OutputDataset,
|
28
30
|
OutputStatisticsOutputDatasetFacet,
|
31
|
+
RunFacet,
|
29
32
|
SchemaDatasetFacet,
|
30
33
|
)
|
31
|
-
from openlineage.client.run import Dataset
|
32
|
-
|
33
34
|
from airflow.providers.google.cloud.openlineage.utils import BigQueryJobRunFacet
|
34
35
|
|
35
36
|
|
37
|
+
BIGQUERY_NAMESPACE = "bigquery"
|
38
|
+
|
39
|
+
|
36
40
|
class _BigQueryOpenLineageMixin:
|
37
41
|
def get_openlineage_facets_on_complete(self, _):
|
38
42
|
"""
|
@@ -61,8 +65,7 @@ class _BigQueryOpenLineageMixin:
|
|
61
65
|
- SchemaDatasetFacet
|
62
66
|
- OutputStatisticsOutputDatasetFacet
|
63
67
|
"""
|
64
|
-
from openlineage.
|
65
|
-
|
68
|
+
from airflow.providers.common.compat.openlineage.facet import ExternalQueryRunFacet, SQLJobFacet
|
66
69
|
from airflow.providers.openlineage.extractors import OperatorLineage
|
67
70
|
from airflow.providers.openlineage.sqlparser import SQLParser
|
68
71
|
|
@@ -79,11 +82,11 @@ class _BigQueryOpenLineageMixin:
|
|
79
82
|
impersonation_chain=self.impersonation_chain,
|
80
83
|
)
|
81
84
|
|
82
|
-
run_facets: dict[str,
|
85
|
+
run_facets: dict[str, RunFacet] = {
|
83
86
|
"externalQuery": ExternalQueryRunFacet(externalQueryId=self.job_id, source="bigquery")
|
84
87
|
}
|
85
88
|
|
86
|
-
job_facets = {"sql":
|
89
|
+
job_facets = {"sql": SQLJobFacet(query=SQLParser.normalize_sql(self.sql))}
|
87
90
|
|
88
91
|
self.client = self.hook.get_client(project_id=self.hook.project_id)
|
89
92
|
job_ids = self.job_id
|
@@ -104,8 +107,7 @@ class _BigQueryOpenLineageMixin:
|
|
104
107
|
)
|
105
108
|
|
106
109
|
def get_facets(self, job_id: str):
|
107
|
-
from openlineage.
|
108
|
-
|
110
|
+
from airflow.providers.common.compat.openlineage.facet import ErrorMessageRunFacet
|
109
111
|
from airflow.providers.google.cloud.openlineage.utils import (
|
110
112
|
BigQueryErrorRunFacet,
|
111
113
|
get_from_nullable_chain,
|
@@ -113,7 +115,7 @@ class _BigQueryOpenLineageMixin:
|
|
113
115
|
|
114
116
|
inputs = []
|
115
117
|
outputs = []
|
116
|
-
run_facets: dict[str,
|
118
|
+
run_facets: dict[str, RunFacet] = {}
|
117
119
|
if hasattr(self, "log"):
|
118
120
|
self.log.debug("Extracting data from bigquery job: `%s`", job_id)
|
119
121
|
try:
|
@@ -158,7 +160,7 @@ class _BigQueryOpenLineageMixin:
|
|
158
160
|
deduplicated_outputs = self._deduplicate_outputs(outputs)
|
159
161
|
return inputs, deduplicated_outputs, run_facets
|
160
162
|
|
161
|
-
def _deduplicate_outputs(self, outputs: list[
|
163
|
+
def _deduplicate_outputs(self, outputs: list[OutputDataset | None]) -> list[OutputDataset]:
|
162
164
|
# Sources are the same so we can compare only names
|
163
165
|
final_outputs = {}
|
164
166
|
for single_output in outputs:
|
@@ -171,22 +173,26 @@ class _BigQueryOpenLineageMixin:
|
|
171
173
|
|
172
174
|
# No OutputStatisticsOutputDatasetFacet is added to duplicated outputs as we can not determine
|
173
175
|
# if the rowCount or size can be summed together.
|
174
|
-
single_output.
|
176
|
+
if single_output.outputFacets:
|
177
|
+
single_output.outputFacets.pop("outputStatistics", None)
|
175
178
|
final_outputs[key] = single_output
|
176
179
|
|
177
180
|
return list(final_outputs.values())
|
178
181
|
|
179
|
-
def _get_inputs_outputs_from_job(
|
182
|
+
def _get_inputs_outputs_from_job(
|
183
|
+
self, properties: dict
|
184
|
+
) -> tuple[list[InputDataset], OutputDataset | None]:
|
180
185
|
from airflow.providers.google.cloud.openlineage.utils import get_from_nullable_chain
|
181
186
|
|
182
187
|
input_tables = get_from_nullable_chain(properties, ["statistics", "query", "referencedTables"]) or []
|
183
188
|
output_table = get_from_nullable_chain(properties, ["configuration", "query", "destinationTable"])
|
184
|
-
inputs = [self.
|
189
|
+
inputs = [(self._get_input_dataset(input_table)) for input_table in input_tables]
|
185
190
|
if output_table:
|
186
|
-
output = self.
|
191
|
+
output = self._get_output_dataset(output_table)
|
187
192
|
dataset_stat_facet = self._get_statistics_dataset_facet(properties)
|
193
|
+
output.outputFacets = output.outputFacets or {}
|
188
194
|
if dataset_stat_facet:
|
189
|
-
output.
|
195
|
+
output.outputFacets["outputStatistics"] = dataset_stat_facet
|
190
196
|
|
191
197
|
return inputs, output
|
192
198
|
|
@@ -210,9 +216,10 @@ class _BigQueryOpenLineageMixin:
|
|
210
216
|
)
|
211
217
|
|
212
218
|
@staticmethod
|
213
|
-
def _get_statistics_dataset_facet(
|
214
|
-
|
215
|
-
|
219
|
+
def _get_statistics_dataset_facet(
|
220
|
+
properties,
|
221
|
+
) -> OutputStatisticsOutputDatasetFacet | None:
|
222
|
+
from airflow.providers.common.compat.openlineage.facet import OutputStatisticsOutputDatasetFacet
|
216
223
|
from airflow.providers.google.cloud.openlineage.utils import get_from_nullable_chain
|
217
224
|
|
218
225
|
query_plan = get_from_nullable_chain(properties, chain=["statistics", "query", "queryPlan"])
|
@@ -226,10 +233,18 @@ class _BigQueryOpenLineageMixin:
|
|
226
233
|
return OutputStatisticsOutputDatasetFacet(rowCount=int(out_rows), size=int(out_bytes))
|
227
234
|
return None
|
228
235
|
|
229
|
-
def
|
230
|
-
from openlineage.
|
236
|
+
def _get_input_dataset(self, table: dict) -> InputDataset:
|
237
|
+
from airflow.providers.common.compat.openlineage.facet import InputDataset
|
238
|
+
|
239
|
+
return cast(InputDataset, self._get_dataset(table, "input"))
|
240
|
+
|
241
|
+
def _get_output_dataset(self, table: dict) -> OutputDataset:
|
242
|
+
from airflow.providers.common.compat.openlineage.facet import OutputDataset
|
231
243
|
|
232
|
-
|
244
|
+
return cast(OutputDataset, self._get_dataset(table, "output"))
|
245
|
+
|
246
|
+
def _get_dataset(self, table: dict, dataset_type: str) -> Dataset:
|
247
|
+
from airflow.providers.common.compat.openlineage.facet import InputDataset, OutputDataset
|
233
248
|
|
234
249
|
project = table.get("projectId")
|
235
250
|
dataset = table.get("datasetId")
|
@@ -237,15 +252,30 @@ class _BigQueryOpenLineageMixin:
|
|
237
252
|
dataset_name = f"{project}.{dataset}.{table_name}"
|
238
253
|
|
239
254
|
dataset_schema = self._get_table_schema_safely(dataset_name)
|
240
|
-
|
241
|
-
|
242
|
-
|
243
|
-
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
|
248
|
-
|
255
|
+
if dataset_type == "input":
|
256
|
+
# Logic specific to creating InputDataset (if needed)
|
257
|
+
return InputDataset(
|
258
|
+
namespace=BIGQUERY_NAMESPACE,
|
259
|
+
name=dataset_name,
|
260
|
+
facets={
|
261
|
+
"schema": dataset_schema,
|
262
|
+
}
|
263
|
+
if dataset_schema
|
264
|
+
else {},
|
265
|
+
)
|
266
|
+
elif dataset_type == "output":
|
267
|
+
# Logic specific to creating OutputDataset (if needed)
|
268
|
+
return OutputDataset(
|
269
|
+
namespace=BIGQUERY_NAMESPACE,
|
270
|
+
name=dataset_name,
|
271
|
+
facets={
|
272
|
+
"schema": dataset_schema,
|
273
|
+
}
|
274
|
+
if dataset_schema
|
275
|
+
else {},
|
276
|
+
)
|
277
|
+
else:
|
278
|
+
raise ValueError("Invalid dataset_type. Must be 'input' or 'output'")
|
249
279
|
|
250
280
|
def _get_table_schema_safely(self, table_name: str) -> SchemaDatasetFacet | None:
|
251
281
|
try:
|
@@ -256,8 +286,10 @@ class _BigQueryOpenLineageMixin:
|
|
256
286
|
return None
|
257
287
|
|
258
288
|
def _get_table_schema(self, table: str) -> SchemaDatasetFacet | None:
|
259
|
-
from openlineage.
|
260
|
-
|
289
|
+
from airflow.providers.common.compat.openlineage.facet import (
|
290
|
+
SchemaDatasetFacet,
|
291
|
+
SchemaDatasetFacetFields,
|
292
|
+
)
|
261
293
|
from airflow.providers.google.cloud.openlineage.utils import get_from_nullable_chain
|
262
294
|
|
263
295
|
bq_table = self.client.get_table(table)
|
@@ -271,7 +303,7 @@ class _BigQueryOpenLineageMixin:
|
|
271
303
|
|
272
304
|
return SchemaDatasetFacet(
|
273
305
|
fields=[
|
274
|
-
|
306
|
+
SchemaDatasetFacetFields(
|
275
307
|
name=field.get("name"),
|
276
308
|
type=field.get("type"),
|
277
309
|
description=field.get("description"),
|
@@ -20,23 +20,23 @@ from __future__ import annotations
|
|
20
20
|
from typing import TYPE_CHECKING, Any
|
21
21
|
|
22
22
|
from attr import define, field
|
23
|
-
|
24
|
-
|
23
|
+
|
24
|
+
if TYPE_CHECKING:
|
25
|
+
from google.cloud.bigquery.table import Table
|
26
|
+
|
27
|
+
from airflow.providers.common.compat.openlineage.facet import Dataset
|
28
|
+
|
29
|
+
from airflow.providers.common.compat.openlineage.facet import (
|
25
30
|
ColumnLineageDatasetFacet,
|
26
|
-
ColumnLineageDatasetFacetFieldsAdditional,
|
27
|
-
ColumnLineageDatasetFacetFieldsAdditionalInputFields,
|
28
31
|
DocumentationDatasetFacet,
|
32
|
+
Fields,
|
33
|
+
InputField,
|
34
|
+
RunFacet,
|
29
35
|
SchemaDatasetFacet,
|
30
|
-
|
36
|
+
SchemaDatasetFacetFields,
|
31
37
|
)
|
32
|
-
|
33
38
|
from airflow.providers.google import __version__ as provider_version
|
34
39
|
|
35
|
-
if TYPE_CHECKING:
|
36
|
-
from google.cloud.bigquery.table import Table
|
37
|
-
from openlineage.client.run import Dataset
|
38
|
-
|
39
|
-
|
40
40
|
BIGQUERY_NAMESPACE = "bigquery"
|
41
41
|
BIGQUERY_URI = "bigquery"
|
42
42
|
|
@@ -46,7 +46,9 @@ def get_facets_from_bq_table(table: Table) -> dict[Any, Any]:
|
|
46
46
|
facets = {
|
47
47
|
"schema": SchemaDatasetFacet(
|
48
48
|
fields=[
|
49
|
-
|
49
|
+
SchemaDatasetFacetFields(
|
50
|
+
name=field.name, type=field.field_type, description=field.description
|
51
|
+
)
|
50
52
|
for field in table.schema
|
51
53
|
]
|
52
54
|
),
|
@@ -71,11 +73,9 @@ def get_identity_column_lineage_facet(
|
|
71
73
|
|
72
74
|
column_lineage_facet = ColumnLineageDatasetFacet(
|
73
75
|
fields={
|
74
|
-
field:
|
76
|
+
field: Fields(
|
75
77
|
inputFields=[
|
76
|
-
|
77
|
-
namespace=dataset.namespace, name=dataset.name, field=field
|
78
|
-
)
|
78
|
+
InputField(namespace=dataset.namespace, name=dataset.name, field=field)
|
79
79
|
for dataset in input_datasets
|
80
80
|
],
|
81
81
|
transformationType="IDENTITY",
|
@@ -88,7 +88,7 @@ def get_identity_column_lineage_facet(
|
|
88
88
|
|
89
89
|
|
90
90
|
@define
|
91
|
-
class BigQueryJobRunFacet(
|
91
|
+
class BigQueryJobRunFacet(RunFacet):
|
92
92
|
"""
|
93
93
|
Facet that represents relevant statistics of bigquery run.
|
94
94
|
|
@@ -114,7 +114,7 @@ class BigQueryJobRunFacet(BaseFacet):
|
|
114
114
|
|
115
115
|
# TODO: remove BigQueryErrorRunFacet in next release
|
116
116
|
@define
|
117
|
-
class BigQueryErrorRunFacet(
|
117
|
+
class BigQueryErrorRunFacet(RunFacet):
|
118
118
|
"""
|
119
119
|
Represents errors that can happen during execution of BigqueryExtractor.
|
120
120
|
|
@@ -2955,6 +2955,7 @@ class BigQueryInsertJobOperator(GoogleCloudBaseOperator, _BigQueryOpenLineageMix
|
|
2955
2955
|
|
2956
2956
|
try:
|
2957
2957
|
self.log.info("Executing: %s'", self.configuration)
|
2958
|
+
# Create a job
|
2958
2959
|
job: BigQueryJob | UnknownJob = self._submit_job(hook, self.job_id)
|
2959
2960
|
except Conflict:
|
2960
2961
|
# If the job already exists retrieve it
|
@@ -2963,18 +2964,24 @@ class BigQueryInsertJobOperator(GoogleCloudBaseOperator, _BigQueryOpenLineageMix
|
|
2963
2964
|
location=self.location,
|
2964
2965
|
job_id=self.job_id,
|
2965
2966
|
)
|
2966
|
-
|
2967
|
-
|
2968
|
-
job
|
2969
|
-
self._handle_job_error(job)
|
2970
|
-
else:
|
2971
|
-
# Same job configuration so we need force_rerun
|
2967
|
+
|
2968
|
+
if job.state not in self.reattach_states:
|
2969
|
+
# Same job configuration, so we need force_rerun
|
2972
2970
|
raise AirflowException(
|
2973
2971
|
f"Job with id: {self.job_id} already exists and is in {job.state} state. If you "
|
2974
2972
|
f"want to force rerun it consider setting `force_rerun=True`."
|
2975
2973
|
f"Or, if you want to reattach in this scenario add {job.state} to `reattach_states`"
|
2976
2974
|
)
|
2977
2975
|
|
2976
|
+
else:
|
2977
|
+
# Job already reached state DONE
|
2978
|
+
if job.state == "DONE":
|
2979
|
+
raise AirflowException("Job is already in state DONE. Can not reattach to this job.")
|
2980
|
+
|
2981
|
+
# We are reattaching to a job
|
2982
|
+
self.log.info("Reattaching to existing Job in state %s", job.state)
|
2983
|
+
self._handle_job_error(job)
|
2984
|
+
|
2978
2985
|
job_types = {
|
2979
2986
|
LoadJob._JOB_TYPE: ["sourceTable", "destinationTable"],
|
2980
2987
|
CopyJob._JOB_TYPE: ["sourceTable", "destinationTable"],
|
@@ -143,7 +143,7 @@ class DataflowConfiguration:
|
|
143
143
|
def __init__(
|
144
144
|
self,
|
145
145
|
*,
|
146
|
-
job_name: str =
|
146
|
+
job_name: str | None = None,
|
147
147
|
append_job_name: bool = True,
|
148
148
|
project_id: str = PROVIDE_PROJECT_ID,
|
149
149
|
location: str | None = DEFAULT_DATAFLOW_LOCATION,
|
@@ -2995,7 +2995,7 @@ class DataprocCreateBatchOperator(GoogleCloudBaseOperator):
|
|
2995
2995
|
metadata: Sequence[tuple[str, str]] = (),
|
2996
2996
|
gcp_conn_id: str = "google_cloud_default",
|
2997
2997
|
impersonation_chain: str | Sequence[str] | None = None,
|
2998
|
-
result_retry: AsyncRetry | _MethodDefault = DEFAULT,
|
2998
|
+
result_retry: AsyncRetry | _MethodDefault | Retry = DEFAULT,
|
2999
2999
|
asynchronous: bool = False,
|
3000
3000
|
deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
|
3001
3001
|
polling_interval_seconds: int = 5,
|
@@ -22,12 +22,12 @@ from __future__ import annotations
|
|
22
22
|
import time
|
23
23
|
from typing import TYPE_CHECKING, Sequence
|
24
24
|
|
25
|
+
from google.api_core.exceptions import AlreadyExists
|
25
26
|
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
26
27
|
from google.api_core.retry import Retry, exponential_sleep_generator
|
27
28
|
from google.cloud.metastore_v1 import MetadataExport, MetadataManagementActivity
|
28
29
|
from google.cloud.metastore_v1.types import Backup, MetadataImport, Service
|
29
30
|
from google.cloud.metastore_v1.types.metastore import DatabaseDumpSpec, Restore
|
30
|
-
from googleapiclient.errors import HttpError
|
31
31
|
|
32
32
|
from airflow.exceptions import AirflowException
|
33
33
|
from airflow.models import BaseOperator, BaseOperatorLink
|
@@ -242,9 +242,7 @@ class DataprocMetastoreCreateBackupOperator(GoogleCloudBaseOperator):
|
|
242
242
|
)
|
243
243
|
backup = hook.wait_for_operation(self.timeout, operation)
|
244
244
|
self.log.info("Backup %s created successfully", self.backup_id)
|
245
|
-
except
|
246
|
-
if err.resp.status not in (409, "409"):
|
247
|
-
raise
|
245
|
+
except AlreadyExists:
|
248
246
|
self.log.info("Backup %s already exists", self.backup_id)
|
249
247
|
backup = hook.get_backup(
|
250
248
|
project_id=self.project_id,
|
@@ -448,9 +446,7 @@ class DataprocMetastoreCreateServiceOperator(GoogleCloudBaseOperator):
|
|
448
446
|
)
|
449
447
|
service = hook.wait_for_operation(self.timeout, operation)
|
450
448
|
self.log.info("Service %s created successfully", self.service_id)
|
451
|
-
except
|
452
|
-
if err.resp.status not in (409, "409"):
|
453
|
-
raise
|
449
|
+
except AlreadyExists:
|
454
450
|
self.log.info("Instance %s already exists", self.service_id)
|
455
451
|
service = hook.get_service(
|
456
452
|
region=self.region,
|
@@ -336,13 +336,12 @@ class GCSDeleteObjectsOperator(GoogleCloudBaseOperator):
|
|
336
336
|
hook.delete(bucket_name=self.bucket_name, object_name=object_name)
|
337
337
|
|
338
338
|
def get_openlineage_facets_on_start(self):
|
339
|
-
from openlineage.
|
339
|
+
from airflow.providers.common.compat.openlineage.facet import (
|
340
|
+
Dataset,
|
340
341
|
LifecycleStateChange,
|
341
342
|
LifecycleStateChangeDatasetFacet,
|
342
|
-
|
343
|
+
PreviousIdentifier,
|
343
344
|
)
|
344
|
-
from openlineage.client.run import Dataset
|
345
|
-
|
346
345
|
from airflow.providers.openlineage.extractors import OperatorLineage
|
347
346
|
|
348
347
|
objects = []
|
@@ -365,7 +364,7 @@ class GCSDeleteObjectsOperator(GoogleCloudBaseOperator):
|
|
365
364
|
facets={
|
366
365
|
"lifecycleStateChange": LifecycleStateChangeDatasetFacet(
|
367
366
|
lifecycleStateChange=LifecycleStateChange.DROP.value,
|
368
|
-
previousIdentifier=
|
367
|
+
previousIdentifier=PreviousIdentifier(
|
369
368
|
namespace=bucket_url,
|
370
369
|
name=object_name,
|
371
370
|
),
|
@@ -645,8 +644,7 @@ class GCSFileTransformOperator(GoogleCloudBaseOperator):
|
|
645
644
|
)
|
646
645
|
|
647
646
|
def get_openlineage_facets_on_start(self):
|
648
|
-
from openlineage.
|
649
|
-
|
647
|
+
from airflow.providers.common.compat.openlineage.facet import Dataset
|
650
648
|
from airflow.providers.openlineage.extractors import OperatorLineage
|
651
649
|
|
652
650
|
input_dataset = Dataset(
|
@@ -921,8 +919,7 @@ class GCSTimeSpanFileTransformOperator(GoogleCloudBaseOperator):
|
|
921
919
|
|
922
920
|
def get_openlineage_facets_on_complete(self, task_instance):
|
923
921
|
"""Implement on_complete as execute() resolves object prefixes."""
|
924
|
-
from openlineage.
|
925
|
-
|
922
|
+
from airflow.providers.common.compat.openlineage.facet import Dataset
|
926
923
|
from airflow.providers.openlineage.extractors import OperatorLineage
|
927
924
|
|
928
925
|
def _parse_prefix(pref):
|
@@ -180,6 +180,40 @@ class CustomTrainingJobBaseOperator(GoogleCloudBaseOperator):
|
|
180
180
|
stacklevel=2,
|
181
181
|
)
|
182
182
|
|
183
|
+
def execute_complete(self, context: Context, event: dict[str, Any]) -> dict[str, Any] | None:
|
184
|
+
if event["status"] == "error":
|
185
|
+
raise AirflowException(event["message"])
|
186
|
+
training_pipeline = event["job"]
|
187
|
+
custom_job_id = self.hook.extract_custom_job_id_from_training_pipeline(training_pipeline)
|
188
|
+
self.xcom_push(context, key="custom_job_id", value=custom_job_id)
|
189
|
+
try:
|
190
|
+
model = training_pipeline["model_to_upload"]
|
191
|
+
model_id = self.hook.extract_model_id(model)
|
192
|
+
self.xcom_push(context, key="model_id", value=model_id)
|
193
|
+
VertexAIModelLink.persist(context=context, task_instance=self, model_id=model_id)
|
194
|
+
return model
|
195
|
+
except KeyError:
|
196
|
+
self.log.warning(
|
197
|
+
"It is impossible to get the Model. "
|
198
|
+
"The Training Pipeline did not produce a Managed Model because it was not "
|
199
|
+
"configured to upload a Model. Please ensure that the 'model_serving_container_image_uri' "
|
200
|
+
"and 'model_display_name' parameters are passed in when creating a Training Pipeline, "
|
201
|
+
"and check that your training script saves the model to os.environ['AIP_MODEL_DIR']."
|
202
|
+
)
|
203
|
+
return None
|
204
|
+
|
205
|
+
@cached_property
|
206
|
+
def hook(self) -> CustomJobHook:
|
207
|
+
return CustomJobHook(
|
208
|
+
gcp_conn_id=self.gcp_conn_id,
|
209
|
+
impersonation_chain=self.impersonation_chain,
|
210
|
+
)
|
211
|
+
|
212
|
+
def on_kill(self) -> None:
|
213
|
+
"""Act as a callback called when the operator is killed; cancel any running job."""
|
214
|
+
if self.hook:
|
215
|
+
self.hook.cancel_job()
|
216
|
+
|
183
217
|
|
184
218
|
class CreateCustomContainerTrainingJobOperator(CustomTrainingJobBaseOperator):
|
185
219
|
"""
|
@@ -565,23 +599,6 @@ class CreateCustomContainerTrainingJobOperator(CustomTrainingJobBaseOperator):
|
|
565
599
|
VertexAITrainingLink.persist(context=context, task_instance=self, training_id=training_id)
|
566
600
|
return result
|
567
601
|
|
568
|
-
def on_kill(self) -> None:
|
569
|
-
"""Act as a callback called when the operator is killed; cancel any running job."""
|
570
|
-
if self.hook:
|
571
|
-
self.hook.cancel_job()
|
572
|
-
|
573
|
-
def execute_complete(self, context: Context, event: dict[str, Any]) -> dict[str, Any] | None:
|
574
|
-
if event["status"] == "error":
|
575
|
-
raise AirflowException(event["message"])
|
576
|
-
result = event["job"]
|
577
|
-
model_id = self.hook.extract_model_id_from_training_pipeline(result)
|
578
|
-
custom_job_id = self.hook.extract_custom_job_id_from_training_pipeline(result)
|
579
|
-
self.xcom_push(context, key="model_id", value=model_id)
|
580
|
-
VertexAIModelLink.persist(context=context, task_instance=self, model_id=model_id)
|
581
|
-
# push custom_job_id to xcom so it could be pulled by other tasks
|
582
|
-
self.xcom_push(context, key="custom_job_id", value=custom_job_id)
|
583
|
-
return result
|
584
|
-
|
585
602
|
def invoke_defer(self, context: Context) -> None:
|
586
603
|
custom_container_training_job_obj: CustomContainerTrainingJob = self.hook.submit_custom_container_training_job(
|
587
604
|
project_id=self.project_id,
|
@@ -651,13 +668,6 @@ class CreateCustomContainerTrainingJobOperator(CustomTrainingJobBaseOperator):
|
|
651
668
|
method_name="execute_complete",
|
652
669
|
)
|
653
670
|
|
654
|
-
@cached_property
|
655
|
-
def hook(self) -> CustomJobHook:
|
656
|
-
return CustomJobHook(
|
657
|
-
gcp_conn_id=self.gcp_conn_id,
|
658
|
-
impersonation_chain=self.impersonation_chain,
|
659
|
-
)
|
660
|
-
|
661
671
|
|
662
672
|
class CreateCustomPythonPackageTrainingJobOperator(CustomTrainingJobBaseOperator):
|
663
673
|
"""
|
@@ -1042,23 +1052,6 @@ class CreateCustomPythonPackageTrainingJobOperator(CustomTrainingJobBaseOperator
|
|
1042
1052
|
VertexAITrainingLink.persist(context=context, task_instance=self, training_id=training_id)
|
1043
1053
|
return result
|
1044
1054
|
|
1045
|
-
def on_kill(self) -> None:
|
1046
|
-
"""Cancel any running job. Callback called when the operator is killed."""
|
1047
|
-
if self.hook:
|
1048
|
-
self.hook.cancel_job()
|
1049
|
-
|
1050
|
-
def execute_complete(self, context: Context, event: dict[str, Any]) -> dict[str, Any] | None:
|
1051
|
-
if event["status"] == "error":
|
1052
|
-
raise AirflowException(event["message"])
|
1053
|
-
result = event["job"]
|
1054
|
-
model_id = self.hook.extract_model_id_from_training_pipeline(result)
|
1055
|
-
custom_job_id = self.hook.extract_custom_job_id_from_training_pipeline(result)
|
1056
|
-
self.xcom_push(context, key="model_id", value=model_id)
|
1057
|
-
VertexAIModelLink.persist(context=context, task_instance=self, model_id=model_id)
|
1058
|
-
# push custom_job_id to xcom so it could be pulled by other tasks
|
1059
|
-
self.xcom_push(context, key="custom_job_id", value=custom_job_id)
|
1060
|
-
return result
|
1061
|
-
|
1062
1055
|
def invoke_defer(self, context: Context) -> None:
|
1063
1056
|
custom_python_training_job_obj: CustomPythonPackageTrainingJob = self.hook.submit_custom_python_package_training_job(
|
1064
1057
|
project_id=self.project_id,
|
@@ -1129,13 +1122,6 @@ class CreateCustomPythonPackageTrainingJobOperator(CustomTrainingJobBaseOperator
|
|
1129
1122
|
method_name="execute_complete",
|
1130
1123
|
)
|
1131
1124
|
|
1132
|
-
@cached_property
|
1133
|
-
def hook(self) -> CustomJobHook:
|
1134
|
-
return CustomJobHook(
|
1135
|
-
gcp_conn_id=self.gcp_conn_id,
|
1136
|
-
impersonation_chain=self.impersonation_chain,
|
1137
|
-
)
|
1138
|
-
|
1139
1125
|
|
1140
1126
|
class CreateCustomTrainingJobOperator(CustomTrainingJobBaseOperator):
|
1141
1127
|
"""
|
@@ -1525,23 +1511,6 @@ class CreateCustomTrainingJobOperator(CustomTrainingJobBaseOperator):
|
|
1525
1511
|
VertexAITrainingLink.persist(context=context, task_instance=self, training_id=training_id)
|
1526
1512
|
return result
|
1527
1513
|
|
1528
|
-
def on_kill(self) -> None:
|
1529
|
-
"""Cancel any running job. Callback called when the operator is killed."""
|
1530
|
-
if self.hook:
|
1531
|
-
self.hook.cancel_job()
|
1532
|
-
|
1533
|
-
def execute_complete(self, context: Context, event: dict[str, Any]) -> dict[str, Any] | None:
|
1534
|
-
if event["status"] == "error":
|
1535
|
-
raise AirflowException(event["message"])
|
1536
|
-
result = event["job"]
|
1537
|
-
model_id = self.hook.extract_model_id_from_training_pipeline(result)
|
1538
|
-
custom_job_id = self.hook.extract_custom_job_id_from_training_pipeline(result)
|
1539
|
-
self.xcom_push(context, key="model_id", value=model_id)
|
1540
|
-
VertexAIModelLink.persist(context=context, task_instance=self, model_id=model_id)
|
1541
|
-
# push custom_job_id to xcom so it could be pulled by other tasks
|
1542
|
-
self.xcom_push(context, key="custom_job_id", value=custom_job_id)
|
1543
|
-
return result
|
1544
|
-
|
1545
1514
|
def invoke_defer(self, context: Context) -> None:
|
1546
1515
|
custom_training_job_obj: CustomTrainingJob = self.hook.submit_custom_training_job(
|
1547
1516
|
project_id=self.project_id,
|
@@ -1612,13 +1581,6 @@ class CreateCustomTrainingJobOperator(CustomTrainingJobBaseOperator):
|
|
1612
1581
|
method_name="execute_complete",
|
1613
1582
|
)
|
1614
1583
|
|
1615
|
-
@cached_property
|
1616
|
-
def hook(self) -> CustomJobHook:
|
1617
|
-
return CustomJobHook(
|
1618
|
-
gcp_conn_id=self.gcp_conn_id,
|
1619
|
-
impersonation_chain=self.impersonation_chain,
|
1620
|
-
)
|
1621
|
-
|
1622
1584
|
|
1623
1585
|
class DeleteCustomTrainingJobOperator(GoogleCloudBaseOperator):
|
1624
1586
|
"""
|
@@ -124,8 +124,7 @@ class AzureBlobStorageToGCSOperator(BaseOperator):
|
|
124
124
|
return f"gs://{self.bucket_name}/{self.object_name}"
|
125
125
|
|
126
126
|
def get_openlineage_facets_on_start(self):
|
127
|
-
from openlineage.
|
128
|
-
|
127
|
+
from airflow.providers.common.compat.openlineage.facet import Dataset
|
129
128
|
from airflow.providers.openlineage.extractors import OperatorLineage
|
130
129
|
|
131
130
|
wasb_hook = WasbHook(wasb_conn_id=self.wasb_conn_id)
|
@@ -289,13 +289,12 @@ class BigQueryToGCSOperator(BaseOperator):
|
|
289
289
|
"""Implement on_complete as we will include final BQ job id."""
|
290
290
|
from pathlib import Path
|
291
291
|
|
292
|
-
from openlineage.
|
292
|
+
from airflow.providers.common.compat.openlineage.facet import (
|
293
|
+
Dataset,
|
293
294
|
ExternalQueryRunFacet,
|
295
|
+
Identifier,
|
294
296
|
SymlinksDatasetFacet,
|
295
|
-
SymlinksDatasetFacetIdentifiers,
|
296
297
|
)
|
297
|
-
from openlineage.client.run import Dataset
|
298
|
-
|
299
298
|
from airflow.providers.google.cloud.hooks.gcs import _parse_gcs_url
|
300
299
|
from airflow.providers.google.cloud.openlineage.utils import (
|
301
300
|
get_facets_from_bq_table,
|
@@ -335,11 +334,7 @@ class BigQueryToGCSOperator(BaseOperator):
|
|
335
334
|
# but we create a symlink to the full object path with wildcard.
|
336
335
|
additional_facets = {
|
337
336
|
"symlink": SymlinksDatasetFacet(
|
338
|
-
identifiers=[
|
339
|
-
SymlinksDatasetFacetIdentifiers(
|
340
|
-
namespace=f"gs://{bucket}", name=blob, type="file"
|
341
|
-
)
|
342
|
-
]
|
337
|
+
identifiers=[Identifier(namespace=f"gs://{bucket}", name=blob, type="file")]
|
343
338
|
),
|
344
339
|
}
|
345
340
|
blob = Path(blob).parent.as_posix()
|
@@ -746,13 +746,12 @@ class GCSToBigQueryOperator(BaseOperator):
|
|
746
746
|
"""Implement on_complete as we will include final BQ job id."""
|
747
747
|
from pathlib import Path
|
748
748
|
|
749
|
-
from openlineage.
|
749
|
+
from airflow.providers.common.compat.openlineage.facet import (
|
750
|
+
Dataset,
|
750
751
|
ExternalQueryRunFacet,
|
752
|
+
Identifier,
|
751
753
|
SymlinksDatasetFacet,
|
752
|
-
SymlinksDatasetFacetIdentifiers,
|
753
754
|
)
|
754
|
-
from openlineage.client.run import Dataset
|
755
|
-
|
756
755
|
from airflow.providers.google.cloud.openlineage.utils import (
|
757
756
|
get_facets_from_bq_table,
|
758
757
|
get_identity_column_lineage_facet,
|
@@ -786,11 +785,7 @@ class GCSToBigQueryOperator(BaseOperator):
|
|
786
785
|
# but we create a symlink to the full object path with wildcard.
|
787
786
|
additional_facets = {
|
788
787
|
"symlink": SymlinksDatasetFacet(
|
789
|
-
identifiers=[
|
790
|
-
SymlinksDatasetFacetIdentifiers(
|
791
|
-
namespace=f"gs://{self.bucket}", name=blob, type="file"
|
792
|
-
)
|
793
|
-
]
|
788
|
+
identifiers=[Identifier(namespace=f"gs://{self.bucket}", name=blob, type="file")]
|
794
789
|
),
|
795
790
|
}
|
796
791
|
blob = Path(blob).parent.as_posix()
|
@@ -552,8 +552,7 @@ class GCSToGCSOperator(BaseOperator):
|
|
552
552
|
"""
|
553
553
|
from pathlib import Path
|
554
554
|
|
555
|
-
from openlineage.
|
556
|
-
|
555
|
+
from airflow.providers.common.compat.openlineage.facet import Dataset
|
557
556
|
from airflow.providers.openlineage.extractors import OperatorLineage
|
558
557
|
|
559
558
|
def _process_prefix(pref):
|
@@ -28,8 +28,9 @@ def get_provider_info():
|
|
28
28
|
"name": "Google",
|
29
29
|
"description": "Google services including:\n\n - `Google Ads <https://ads.google.com/>`__\n - `Google Cloud (GCP) <https://cloud.google.com/>`__\n - `Google Firebase <https://firebase.google.com/>`__\n - `Google LevelDB <https://github.com/google/leveldb/>`__\n - `Google Marketing Platform <https://marketingplatform.google.com/>`__\n - `Google Workspace <https://workspace.google.com/>`__ (formerly Google Suite)\n",
|
30
30
|
"state": "ready",
|
31
|
-
"source-date-epoch":
|
31
|
+
"source-date-epoch": 1722145669,
|
32
32
|
"versions": [
|
33
|
+
"10.21.1",
|
33
34
|
"10.21.0",
|
34
35
|
"10.20.0",
|
35
36
|
"10.19.0",
|
@@ -91,6 +92,7 @@ def get_provider_info():
|
|
91
92
|
],
|
92
93
|
"dependencies": [
|
93
94
|
"apache-airflow>=2.7.0",
|
95
|
+
"apache-airflow-providers-common-compat>=1.1.0",
|
94
96
|
"apache-airflow-providers-common-sql>=1.7.2",
|
95
97
|
"asgiref>=3.5.2",
|
96
98
|
"dill>=0.2.3",
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: apache-airflow-providers-google
|
3
|
-
Version: 10.21.
|
3
|
+
Version: 10.21.1rc1
|
4
4
|
Summary: Provider package apache-airflow-providers-google for Apache Airflow
|
5
5
|
Keywords: airflow-provider,google,airflow,integration
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
@@ -22,6 +22,7 @@ Classifier: Programming Language :: Python :: 3.11
|
|
22
22
|
Classifier: Programming Language :: Python :: 3.12
|
23
23
|
Classifier: Topic :: System :: Monitoring
|
24
24
|
Requires-Dist: PyOpenSSL>=23.0.0
|
25
|
+
Requires-Dist: apache-airflow-providers-common-compat>=1.1.0rc0
|
25
26
|
Requires-Dist: apache-airflow-providers-common-sql>=1.7.2rc0
|
26
27
|
Requires-Dist: apache-airflow>=2.7.0rc0
|
27
28
|
Requires-Dist: asgiref>=3.5.2
|
@@ -90,6 +91,7 @@ Requires-Dist: apache-airflow-providers-apache-beam ; extra == "apache.beam"
|
|
90
91
|
Requires-Dist: apache-beam[gcp] ; extra == "apache.beam"
|
91
92
|
Requires-Dist: apache-airflow-providers-apache-cassandra ; extra == "apache.cassandra"
|
92
93
|
Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0rc0 ; extra == "cncf.kubernetes"
|
94
|
+
Requires-Dist: apache-airflow-providers-common-compat ; extra == "common.compat"
|
93
95
|
Requires-Dist: apache-airflow-providers-common-sql ; extra == "common.sql"
|
94
96
|
Requires-Dist: apache-airflow-providers-facebook>=2.2.0rc0 ; extra == "facebook"
|
95
97
|
Requires-Dist: plyvel ; extra == "leveldb"
|
@@ -105,8 +107,8 @@ Requires-Dist: apache-airflow-providers-sftp ; extra == "sftp"
|
|
105
107
|
Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
|
106
108
|
Requires-Dist: apache-airflow-providers-trino ; extra == "trino"
|
107
109
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
108
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-google/10.21.
|
109
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-google/10.21.
|
110
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-google/10.21.1/changelog.html
|
111
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-google/10.21.1
|
110
112
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
111
113
|
Project-URL: Source Code, https://github.com/apache/airflow
|
112
114
|
Project-URL: Twitter, https://twitter.com/ApacheAirflow
|
@@ -115,6 +117,7 @@ Provides-Extra: amazon
|
|
115
117
|
Provides-Extra: apache.beam
|
116
118
|
Provides-Extra: apache.cassandra
|
117
119
|
Provides-Extra: cncf.kubernetes
|
120
|
+
Provides-Extra: common.compat
|
118
121
|
Provides-Extra: common.sql
|
119
122
|
Provides-Extra: facebook
|
120
123
|
Provides-Extra: leveldb
|
@@ -174,7 +177,7 @@ Provides-Extra: trino
|
|
174
177
|
|
175
178
|
Package ``apache-airflow-providers-google``
|
176
179
|
|
177
|
-
Release: ``10.21.
|
180
|
+
Release: ``10.21.1.rc1``
|
178
181
|
|
179
182
|
|
180
183
|
Google services including:
|
@@ -194,7 +197,7 @@ This is a provider package for ``google`` provider. All classes for this provide
|
|
194
197
|
are in ``airflow.providers.google`` python package.
|
195
198
|
|
196
199
|
You can find package information and changelog for the provider
|
197
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-google/10.21.
|
200
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-google/10.21.1/>`_.
|
198
201
|
|
199
202
|
Installation
|
200
203
|
------------
|
@@ -208,74 +211,75 @@ The package supports the following python versions: 3.8,3.9,3.10,3.11,3.12
|
|
208
211
|
Requirements
|
209
212
|
------------
|
210
213
|
|
211
|
-
|
212
|
-
PIP package
|
213
|
-
|
214
|
-
``apache-airflow``
|
215
|
-
``apache-airflow-providers-common-
|
216
|
-
``
|
217
|
-
``
|
218
|
-
``
|
219
|
-
``gcloud-aio-
|
220
|
-
``gcloud-aio-
|
221
|
-
``
|
222
|
-
``
|
223
|
-
``google-
|
224
|
-
``google-
|
225
|
-
``google-api-
|
226
|
-
``google-
|
227
|
-
``google-auth
|
228
|
-
``google-
|
229
|
-
``google-cloud-
|
230
|
-
``google-cloud-
|
231
|
-
``google-cloud-bigquery
|
232
|
-
``google-cloud-
|
233
|
-
``google-cloud-
|
234
|
-
``google-cloud-
|
235
|
-
``google-cloud-
|
236
|
-
``google-cloud-
|
237
|
-
``google-cloud-
|
238
|
-
``google-cloud-
|
239
|
-
``google-cloud-
|
240
|
-
``google-cloud-
|
241
|
-
``google-cloud-dataproc
|
242
|
-
``google-cloud-
|
243
|
-
``google-cloud-
|
244
|
-
``google-cloud-
|
245
|
-
``google-cloud-
|
246
|
-
``google-cloud-
|
247
|
-
``google-cloud-
|
248
|
-
``google-cloud-
|
249
|
-
``google-cloud-
|
250
|
-
``google-cloud-
|
251
|
-
``google-cloud-
|
252
|
-
``google-cloud-
|
253
|
-
``google-cloud-
|
254
|
-
``google-cloud-
|
255
|
-
``google-cloud-
|
256
|
-
``google-cloud-storage
|
257
|
-
``google-cloud-
|
258
|
-
``google-cloud-
|
259
|
-
``google-cloud-
|
260
|
-
``google-cloud-
|
261
|
-
``google-cloud-
|
262
|
-
``google-cloud-
|
263
|
-
``google-cloud-
|
264
|
-
``google-cloud-
|
265
|
-
``
|
266
|
-
``
|
267
|
-
``
|
268
|
-
``
|
269
|
-
``
|
270
|
-
``pandas``
|
271
|
-
``pandas``
|
272
|
-
``
|
273
|
-
``
|
274
|
-
``
|
275
|
-
``
|
276
|
-
``sqlalchemy-
|
277
|
-
``
|
278
|
-
|
214
|
+
========================================== =========================================
|
215
|
+
PIP package Version required
|
216
|
+
========================================== =========================================
|
217
|
+
``apache-airflow`` ``>=2.7.0``
|
218
|
+
``apache-airflow-providers-common-compat`` ``>=1.1.0``
|
219
|
+
``apache-airflow-providers-common-sql`` ``>=1.7.2``
|
220
|
+
``asgiref`` ``>=3.5.2``
|
221
|
+
``dill`` ``>=0.2.3``
|
222
|
+
``gcloud-aio-auth`` ``>=4.0.0,<5.0.0``
|
223
|
+
``gcloud-aio-bigquery`` ``>=6.1.2``
|
224
|
+
``gcloud-aio-storage`` ``>=9.0.0``
|
225
|
+
``gcsfs`` ``>=2023.10.0``
|
226
|
+
``google-ads`` ``>=24.1.0``
|
227
|
+
``google-analytics-admin`` ``>=0.9.0``
|
228
|
+
``google-api-core`` ``>=2.11.0,!=2.16.0,!=2.18.0``
|
229
|
+
``google-api-python-client`` ``>=2.0.2``
|
230
|
+
``google-auth`` ``>=2.29.0``
|
231
|
+
``google-auth-httplib2`` ``>=0.0.1``
|
232
|
+
``google-cloud-aiplatform`` ``>=1.57.0``
|
233
|
+
``google-cloud-automl`` ``>=2.12.0``
|
234
|
+
``google-cloud-bigquery`` ``<3.21.0,>=3.4.0``
|
235
|
+
``google-cloud-bigquery-datatransfer`` ``>=3.13.0``
|
236
|
+
``google-cloud-bigtable`` ``>=2.17.0``
|
237
|
+
``google-cloud-build`` ``>=3.22.0``
|
238
|
+
``google-cloud-compute`` ``>=1.10.0``
|
239
|
+
``google-cloud-container`` ``>=2.17.4``
|
240
|
+
``google-cloud-datacatalog`` ``>=3.11.1``
|
241
|
+
``google-cloud-dataflow-client`` ``>=0.8.6``
|
242
|
+
``google-cloud-dataform`` ``>=0.5.0``
|
243
|
+
``google-cloud-dataplex`` ``>=1.10.0``
|
244
|
+
``google-cloud-dataproc`` ``>=5.8.0``
|
245
|
+
``google-cloud-dataproc-metastore`` ``>=1.12.0``
|
246
|
+
``google-cloud-dlp`` ``>=3.12.0``
|
247
|
+
``google-cloud-kms`` ``>=2.15.0``
|
248
|
+
``google-cloud-language`` ``>=2.9.0``
|
249
|
+
``google-cloud-logging`` ``>=3.5.0``
|
250
|
+
``google-cloud-memcache`` ``>=1.7.0``
|
251
|
+
``google-cloud-monitoring`` ``>=2.18.0``
|
252
|
+
``google-cloud-orchestration-airflow`` ``>=1.10.0``
|
253
|
+
``google-cloud-os-login`` ``>=2.9.1``
|
254
|
+
``google-cloud-pubsub`` ``>=2.19.0``
|
255
|
+
``google-cloud-redis`` ``>=2.12.0``
|
256
|
+
``google-cloud-secret-manager`` ``>=2.16.0``
|
257
|
+
``google-cloud-spanner`` ``>=3.11.1``
|
258
|
+
``google-cloud-speech`` ``>=2.18.0``
|
259
|
+
``google-cloud-storage`` ``>=2.7.0``
|
260
|
+
``google-cloud-storage-transfer`` ``>=1.4.1``
|
261
|
+
``google-cloud-tasks`` ``>=2.13.0``
|
262
|
+
``google-cloud-texttospeech`` ``>=2.14.1``
|
263
|
+
``google-cloud-translate`` ``>=3.11.0``
|
264
|
+
``google-cloud-videointelligence`` ``>=2.11.0``
|
265
|
+
``google-cloud-vision`` ``>=3.4.0``
|
266
|
+
``google-cloud-workflows`` ``>=1.10.0``
|
267
|
+
``google-cloud-run`` ``>=0.10.0``
|
268
|
+
``google-cloud-batch`` ``>=0.13.0``
|
269
|
+
``grpcio-gcp`` ``>=0.2.2``
|
270
|
+
``httpx`` ``>=0.25.0``
|
271
|
+
``json-merge-patch`` ``>=0.2``
|
272
|
+
``looker-sdk`` ``>=22.4.0``
|
273
|
+
``pandas-gbq`` ``>=0.7.0``
|
274
|
+
``pandas`` ``>=2.1.2,<2.2; python_version >= "3.9"``
|
275
|
+
``pandas`` ``>=1.5.3,<2.2; python_version < "3.9"``
|
276
|
+
``proto-plus`` ``>=1.19.6``
|
277
|
+
``python-slugify`` ``>=7.0.0``
|
278
|
+
``PyOpenSSL`` ``>=23.0.0``
|
279
|
+
``sqlalchemy-bigquery`` ``>=1.2.1``
|
280
|
+
``sqlalchemy-spanner`` ``>=1.6.2``
|
281
|
+
``tenacity`` ``>=8.1.0``
|
282
|
+
========================================== =========================================
|
279
283
|
|
280
284
|
Cross provider package dependencies
|
281
285
|
-----------------------------------
|
@@ -297,6 +301,7 @@ Dependent package
|
|
297
301
|
`apache-airflow-providers-apache-beam <https://airflow.apache.org/docs/apache-airflow-providers-apache-beam>`_ ``apache.beam``
|
298
302
|
`apache-airflow-providers-apache-cassandra <https://airflow.apache.org/docs/apache-airflow-providers-apache-cassandra>`_ ``apache.cassandra``
|
299
303
|
`apache-airflow-providers-cncf-kubernetes <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes>`_ ``cncf.kubernetes``
|
304
|
+
`apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
|
300
305
|
`apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
|
301
306
|
`apache-airflow-providers-facebook <https://airflow.apache.org/docs/apache-airflow-providers-facebook>`_ ``facebook``
|
302
307
|
`apache-airflow-providers-microsoft-azure <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure>`_ ``microsoft.azure``
|
@@ -313,4 +318,4 @@ Dependent package
|
|
313
318
|
======================================================================================================================== ====================
|
314
319
|
|
315
320
|
The changelog for the provider package can be found in the
|
316
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-google/10.21.
|
321
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-google/10.21.1/changelog.html>`_.
|
@@ -1,6 +1,6 @@
|
|
1
1
|
airflow/providers/google/LICENSE,sha256=FFb4jd2AXnOOf7XLP04pQW6jbdhG49TxlGY6fFpCV1Y,13609
|
2
|
-
airflow/providers/google/__init__.py,sha256=
|
3
|
-
airflow/providers/google/get_provider_info.py,sha256=
|
2
|
+
airflow/providers/google/__init__.py,sha256=GHoF0_Dab1MavJJls-ic8buuO7ZKBAUC98uTOEffKNU,1495
|
3
|
+
airflow/providers/google/get_provider_info.py,sha256=nRFCOZSoLWDT95bnj9UFHtI8j071DErhIZUnZ4gHpdA,82075
|
4
4
|
airflow/providers/google/go_module_utils.py,sha256=XVM-IGME6CPgJA8fgDgkusFc4fz3lEghZaZ4elBkv7s,1780
|
5
5
|
airflow/providers/google/ads/.gitignore,sha256=z_qaKzblF2LuVvP-06iDord9JBeyzIlNeJ4bx3LbtGc,167
|
6
6
|
airflow/providers/google/ads/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
@@ -31,7 +31,7 @@ airflow/providers/google/cloud/hooks/cloud_build.py,sha256=jqjvBxP1T4u-h7lDcmADg
|
|
31
31
|
airflow/providers/google/cloud/hooks/cloud_composer.py,sha256=_GRmI4CHry6_J8-ABccsSy-xpQJ5hmHxzu9je4UGCSg,28716
|
32
32
|
airflow/providers/google/cloud/hooks/cloud_memorystore.py,sha256=4hUbVhN-AphoOjQ5NB2Ge4Z2Ay6vd5gnyvlxE3ffzWQ,40442
|
33
33
|
airflow/providers/google/cloud/hooks/cloud_run.py,sha256=VfcDtF1APloCiVn6Fu6vEiXrRvAFaPnkndq_I8bfIlk,7362
|
34
|
-
airflow/providers/google/cloud/hooks/cloud_sql.py,sha256=
|
34
|
+
airflow/providers/google/cloud/hooks/cloud_sql.py,sha256=4kEY5py-n-dvTKiQwgpFLYEDBe8MmBF6WqzrN4sfKcA,53344
|
35
35
|
airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py,sha256=hn0M_lJ8l_FdSeax6UeLmynLXtYvTPDUIjDNQ5mARyk,22963
|
36
36
|
airflow/providers/google/cloud/hooks/compute.py,sha256=sBpi7oZjJwV4JPkWm3iKw23l7cAjHDET2MU9S8FOc58,40673
|
37
37
|
airflow/providers/google/cloud/hooks/compute_ssh.py,sha256=GTSiuhDFpeN-7n8ggrF-XBaPQ2hfk80tWChGImYGpTo,15689
|
@@ -42,7 +42,7 @@ airflow/providers/google/cloud/hooks/datafusion.py,sha256=un_0r0fwiLPffNG-9tWN05
|
|
42
42
|
airflow/providers/google/cloud/hooks/datapipeline.py,sha256=PKbjlufsoT-rk0IbHBWzW8CjsLSJLYvttGSn5CTg3D0,2631
|
43
43
|
airflow/providers/google/cloud/hooks/dataplex.py,sha256=hOAQ5gXBE0V6fw5Y_7Q8BymD6_GmFGsc8TPvd4SwJPM,38347
|
44
44
|
airflow/providers/google/cloud/hooks/dataprep.py,sha256=GH46CoEMnc63RoMiJ7aKvsHlGFvsBl_QcRgbmWwJ5tU,12187
|
45
|
-
airflow/providers/google/cloud/hooks/dataproc.py,sha256=
|
45
|
+
airflow/providers/google/cloud/hooks/dataproc.py,sha256=L6NxhRIJpdAWUxrdwj9fi0vZ4MOkyMnF_BK90rwzumg,83954
|
46
46
|
airflow/providers/google/cloud/hooks/dataproc_metastore.py,sha256=Oh6I6PbawdCb0hkfrFNU4BVbxWangCcjIJdOBAh7q2Q,32152
|
47
47
|
airflow/providers/google/cloud/hooks/datastore.py,sha256=JuXTZqL-FAohbKBHQVYLCS3tY9pvMDjlg-dSphKiyPU,12158
|
48
48
|
airflow/providers/google/cloud/hooks/dlp.py,sha256=U1mnUEIQBcvh0bxf9RgeKLK2gjWx09qGh-hvCDOL_8k,67586
|
@@ -70,7 +70,7 @@ airflow/providers/google/cloud/hooks/workflows.py,sha256=M9U_Sn_Dll7FBHZ2wKi0xaJ
|
|
70
70
|
airflow/providers/google/cloud/hooks/vertex_ai/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
71
71
|
airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py,sha256=A0-dVLmM_UVuHDgGyyrjUN6kaGjeHfWbRX-HD-7sFJA,87298
|
72
72
|
airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py,sha256=zXxDwApchANkaP3_6j6MsSiiy8rC9ra7pN_A_9AmPFw,34759
|
73
|
-
airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py,sha256=
|
73
|
+
airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py,sha256=zZVxLGblWCtr8nuxT_2aCNzuq4xd7tVvfADcqEji5Vk,196976
|
74
74
|
airflow/providers/google/cloud/hooks/vertex_ai/dataset.py,sha256=hSkQL6d9j5tLCRR7LBDSAbGWWGrdwWBeIGnB21rjkCQ,18693
|
75
75
|
airflow/providers/google/cloud/hooks/vertex_ai/endpoint_service.py,sha256=_rO_E1EL75ZclFgm1JY78CtrOkB4XcLUkmEbk5qI-sQ,16109
|
76
76
|
airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py,sha256=76dwagsiJmB4kYx6ISo7HEot56elu0817KWkAxDCc3w,15978
|
@@ -115,11 +115,11 @@ airflow/providers/google/cloud/log/stackdriver_task_handler.py,sha256=5yh-Zgh2Me
|
|
115
115
|
airflow/providers/google/cloud/openlineage/BigQueryErrorRunFacet.json,sha256=3whXAY38LjxmQTpCnExiIU1Q1-8dZGtWjiK0A4JQWTA,688
|
116
116
|
airflow/providers/google/cloud/openlineage/BigQueryJobRunFacet.json,sha256=sWvE1o30bCqBBmB19n6wFZyL6BBcc22kCGWe0qcsYBc,850
|
117
117
|
airflow/providers/google/cloud/openlineage/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
118
|
-
airflow/providers/google/cloud/openlineage/mixins.py,sha256=
|
119
|
-
airflow/providers/google/cloud/openlineage/utils.py,sha256=
|
118
|
+
airflow/providers/google/cloud/openlineage/mixins.py,sha256=5blMJ1heK9rMYBWR5p-lBj1y_EA1qJyjfb-fwdBM31k,12908
|
119
|
+
airflow/providers/google/cloud/openlineage/utils.py,sha256=iBeHh68qMumZ1pc-PWRBgQW8sycwmdtkJV-oCvYplPA,5781
|
120
120
|
airflow/providers/google/cloud/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
121
121
|
airflow/providers/google/cloud/operators/automl.py,sha256=uZ-PSDqfBBviH0YWxh8gefr4DU5xiEkucYGRtSyDOG8,63479
|
122
|
-
airflow/providers/google/cloud/operators/bigquery.py,sha256=
|
122
|
+
airflow/providers/google/cloud/operators/bigquery.py,sha256=ZzUjaqpc97Ez5Q6ONwKHzSD3lbkJZ_JcR-uZ-RvkjUQ,131001
|
123
123
|
airflow/providers/google/cloud/operators/bigquery_dts.py,sha256=6VJISM4HoMBQ3EQ5nz3zxFk8tfluGA1d2vcUNUlYLPc,17695
|
124
124
|
airflow/providers/google/cloud/operators/bigtable.py,sha256=BnWHnTEscyPbsKWFaSreLr62W68fmHu5loQVZex7LPs,26921
|
125
125
|
airflow/providers/google/cloud/operators/cloud_base.py,sha256=Xysh4znvIQIxbQqmfKoaL6O09FikndHrQuKKUnEV7KU,1483
|
@@ -132,18 +132,18 @@ airflow/providers/google/cloud/operators/cloud_sql.py,sha256=VA_RRg_Zv3zo4cKmpEf
|
|
132
132
|
airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py,sha256=YQsVg8pDegEDvsWsZCbGrSqCln3iQyLQErZS_XZTnBo,48066
|
133
133
|
airflow/providers/google/cloud/operators/compute.py,sha256=lFGCacevkKJvAszJhfSLAOfetlsbYrCoImTeWXS5bqw,74607
|
134
134
|
airflow/providers/google/cloud/operators/datacatalog.py,sha256=TY0KZtphd28mNmZF4f3pV-G4-K61nwlOzBok1ZHqG_E,92962
|
135
|
-
airflow/providers/google/cloud/operators/dataflow.py,sha256=
|
135
|
+
airflow/providers/google/cloud/operators/dataflow.py,sha256=oETS5PdE1RqXUPu82ej1Vsl5BgGHOOSubcu_jmS9yI4,71500
|
136
136
|
airflow/providers/google/cloud/operators/dataform.py,sha256=MkRDkn12gm2PJHOIfCs61N8nTuF3bfrcbWT4zSCXOdI,48745
|
137
137
|
airflow/providers/google/cloud/operators/datafusion.py,sha256=NZoR65aChdkPUG8bxEaXDvMLvyMOVBoLkkJO9_v-2u0,41495
|
138
138
|
airflow/providers/google/cloud/operators/datapipeline.py,sha256=3Zk_v_wlCKRuqx5yeajNWGIJN09xGNWJAN0qwsWE56o,2403
|
139
139
|
airflow/providers/google/cloud/operators/dataplex.py,sha256=IgGwt95uW72IeLi1oHpGk8V0fKyt9apsc4kUpBz_7YQ,91195
|
140
140
|
airflow/providers/google/cloud/operators/dataprep.py,sha256=jTDDgRccd2zIUqGzJebZpbNTJsFdRi5RnMtldXHqiMs,10477
|
141
|
-
airflow/providers/google/cloud/operators/dataproc.py,sha256=
|
142
|
-
airflow/providers/google/cloud/operators/dataproc_metastore.py,sha256=
|
141
|
+
airflow/providers/google/cloud/operators/dataproc.py,sha256=K_77TrB5P4Wr0R6KZ-yerO-pbF54rPGBZ1RfRVrkWDA,153070
|
142
|
+
airflow/providers/google/cloud/operators/dataproc_metastore.py,sha256=mJOqDv4GEqQ7tx32ar-mMsPhIjYC_B1AZyiVDZBKOio,50402
|
143
143
|
airflow/providers/google/cloud/operators/datastore.py,sha256=di00jFy3Z1v0GcmcQ0df8NJ32yxcseOqWuojC4TKdmY,24927
|
144
144
|
airflow/providers/google/cloud/operators/dlp.py,sha256=SQCGml0RIKl0UrvXHIUiOskg5ayTj4F5_4k4rztClvM,120742
|
145
145
|
airflow/providers/google/cloud/operators/functions.py,sha256=dL5uaYtAWujwvAID_kLsyEsQ-ThFXGrEsg5Tk277FMs,20155
|
146
|
-
airflow/providers/google/cloud/operators/gcs.py,sha256=
|
146
|
+
airflow/providers/google/cloud/operators/gcs.py,sha256=kNnjfYeXkQEkUimTg0CinQ4h9oSxx8YJpupb-IliJJs,46618
|
147
147
|
airflow/providers/google/cloud/operators/kubernetes_engine.py,sha256=FuvYcibtKb94HqzvV-Pmq_rLP7xo_NpGii-KVmtEQI4,69865
|
148
148
|
airflow/providers/google/cloud/operators/life_sciences.py,sha256=cQzFWGdwh4yr44j7nfMXdGnPVRkeXwkrj_qdzlchD-w,4816
|
149
149
|
airflow/providers/google/cloud/operators/looker.py,sha256=LCbN0vv8y0exwvfHbRXmUtNUZIOlSfljInNZK1zcfrs,4063
|
@@ -163,7 +163,7 @@ airflow/providers/google/cloud/operators/workflows.py,sha256=fnyWLqRHz0UYu6AnQKK
|
|
163
163
|
airflow/providers/google/cloud/operators/vertex_ai/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
164
164
|
airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py,sha256=t_gFLDOlg8ywWf5A985zcPFr9kzTLkYSgHApVD1qGhg,31763
|
165
165
|
airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py,sha256=e0oFWmCRH3aQHNckjEf_YO5zP9LqiLVTzB1QTgv3iUo,28828
|
166
|
-
airflow/providers/google/cloud/operators/vertex_ai/custom_job.py,sha256=
|
166
|
+
airflow/providers/google/cloud/operators/vertex_ai/custom_job.py,sha256=USHusi4XUWqRlem0k5lyUXFtXxKCstrifLmeMZcQp7Q,98809
|
167
167
|
airflow/providers/google/cloud/operators/vertex_ai/dataset.py,sha256=u_iEOIJEoZj10bbiymvoAT7aJP50HyZMt7_2KxUKbxM,23051
|
168
168
|
airflow/providers/google/cloud/operators/vertex_ai/endpoint_service.py,sha256=mtbgC2NrolxIxyxOmqlnokc22NtCspa8oXwuHo140EU,26739
|
169
169
|
airflow/providers/google/cloud/operators/vertex_ai/generative_model.py,sha256=WYkTRLI7_EbGtvh59hCj1I7y7rjGfp6Znzw5E52p0n0,25200
|
@@ -192,10 +192,10 @@ airflow/providers/google/cloud/sensors/tasks.py,sha256=Y2t5OYIH98tsIN7G2LwSKnYZs
|
|
192
192
|
airflow/providers/google/cloud/sensors/workflows.py,sha256=iJdPt5J-9nTNFIWRcQqzXleyzbasJRsF5uPtvVfIxLA,5332
|
193
193
|
airflow/providers/google/cloud/transfers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
194
194
|
airflow/providers/google/cloud/transfers/adls_to_gcs.py,sha256=ona1WA9oNhozj_JPI28sFUOukMJehkvQyjB558vfp4Q,6739
|
195
|
-
airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py,sha256=
|
195
|
+
airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py,sha256=dIkxK_DlFOMjMdnGYNHW5bhl5LV5CcT4oFKtBDOKAko,5380
|
196
196
|
airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py,sha256=JlPRkiDsUBbu4WWHw-rTE2efmMw3uI84LR7pz4jHn1o,8182
|
197
197
|
airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py,sha256=jQzd_QtgGDAcfWvLgKPSQuH70t-GFFbzrV7vS6Y-Zd4,8180
|
198
|
-
airflow/providers/google/cloud/transfers/bigquery_to_gcs.py,sha256=
|
198
|
+
airflow/providers/google/cloud/transfers/bigquery_to_gcs.py,sha256=Vyvr1R7Kc44ZkUGDHAgdyokWsoRwH6CY1QHZV-fQj-A,15071
|
199
199
|
airflow/providers/google/cloud/transfers/bigquery_to_mssql.py,sha256=24MKm3FeZ1HLVnNhxVUre4nr5KztYJKFgc-yO55sdAk,4056
|
200
200
|
airflow/providers/google/cloud/transfers/bigquery_to_mysql.py,sha256=zMEadzSW-lXrxvSVHimNX9NQN2_6Q55dzrqHehQJKlA,3092
|
201
201
|
airflow/providers/google/cloud/transfers/bigquery_to_postgres.py,sha256=8A3gJq-ZxO0Vw3UTyAIZU1dZC31bgFQAq5_-UUjbpP4,4661
|
@@ -203,8 +203,8 @@ airflow/providers/google/cloud/transfers/bigquery_to_sql.py,sha256=n9MdtPYPLTXQx
|
|
203
203
|
airflow/providers/google/cloud/transfers/calendar_to_gcs.py,sha256=Yba_iwBxCFXW-jHcpB9fTd8bCgq5J4yJr764N1Xe7Ss,8399
|
204
204
|
airflow/providers/google/cloud/transfers/cassandra_to_gcs.py,sha256=3f2Q6seMBXHbLOXYa3m5GDh6N8fY-IOQSn1lu6Jmnv4,15814
|
205
205
|
airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py,sha256=QQinZNJhK8iKv-w_eBbNVzeAqfHLABqyL_Z6Fe2-z6I,10327
|
206
|
-
airflow/providers/google/cloud/transfers/gcs_to_bigquery.py,sha256=
|
207
|
-
airflow/providers/google/cloud/transfers/gcs_to_gcs.py,sha256=
|
206
|
+
airflow/providers/google/cloud/transfers/gcs_to_bigquery.py,sha256=PBmmKoniMPOkcJK0_-ltyfkV0uAww6uVqcVRxQopByU,37033
|
207
|
+
airflow/providers/google/cloud/transfers/gcs_to_gcs.py,sha256=3vQoTt95moWeRN1KDD90bONcQqaQhRmhvFe8uf5h6SA,26341
|
208
208
|
airflow/providers/google/cloud/transfers/gcs_to_local.py,sha256=qWUR9TZrj3Z5WzvO7gBAbr_V8LNhVHCAnjdlVgyuZBY,5274
|
209
209
|
airflow/providers/google/cloud/transfers/gcs_to_sftp.py,sha256=km9StbMCeVZc3Nupzkp9CJk9B2c_8dLmrbhwF1lTukU,8409
|
210
210
|
airflow/providers/google/cloud/transfers/gdrive_to_gcs.py,sha256=mkpIzRJEPnrYwncWWmnKwBdbhvNFoYVyMcNiKwlLrz4,4149
|
@@ -288,7 +288,7 @@ airflow/providers/google/marketing_platform/links/__init__.py,sha256=9hdXHABrVpk
|
|
288
288
|
airflow/providers/google/marketing_platform/links/analytics_admin.py,sha256=goKCFxRbYA-lJuHlo7nJInOmWizA_3oG8ccT4sQ9cS8,2118
|
289
289
|
airflow/providers/google/marketing_platform/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
290
290
|
airflow/providers/google/marketing_platform/operators/analytics.py,sha256=DVb46WF_LNBrmz2vgoTkd4Y6DDT25GGAJjxbCSyLMCg,23511
|
291
|
-
airflow/providers/google/marketing_platform/operators/analytics_admin.py,sha256=
|
291
|
+
airflow/providers/google/marketing_platform/operators/analytics_admin.py,sha256=8IrQaAq2SEutNKCO-YqO92ZSLp-o7YjnvillPNyMrKE,23828
|
292
292
|
airflow/providers/google/marketing_platform/operators/campaign_manager.py,sha256=BVKge_GPsiQEqhu5VJ7m8eyfiL_-q8OGJJ6iYpCs540,24935
|
293
293
|
airflow/providers/google/marketing_platform/operators/display_video.py,sha256=nMSicrDzw4pbCHx6IYnGJwtI17kvsRkLPRreD0QmcUQ,28179
|
294
294
|
airflow/providers/google/marketing_platform/operators/search_ads.py,sha256=LSVgUHvhX802lcDIAFwweijwGTAmN9NoAmyI-2UD-mM,9294
|
@@ -310,7 +310,7 @@ airflow/providers/google/suite/transfers/gcs_to_gdrive.py,sha256=CxtVhp3wlEOBtjR
|
|
310
310
|
airflow/providers/google/suite/transfers/gcs_to_sheets.py,sha256=4nwXWkTySeBXNuThPxzO7uww_hH6PthpppTeuShn27Q,4363
|
311
311
|
airflow/providers/google/suite/transfers/local_to_drive.py,sha256=ZSK0b1Rd6x_xsP2DVcUzeYu3qoo9Bsp3VmnKyBsFRH8,6105
|
312
312
|
airflow/providers/google/suite/transfers/sql_to_sheets.py,sha256=sORkYSUDArRPnvi8WCiXP7YIXtpAgpEPhf8cqgpu644,5220
|
313
|
-
apache_airflow_providers_google-10.21.
|
314
|
-
apache_airflow_providers_google-10.21.
|
315
|
-
apache_airflow_providers_google-10.21.
|
316
|
-
apache_airflow_providers_google-10.21.
|
313
|
+
apache_airflow_providers_google-10.21.1rc1.dist-info/entry_points.txt,sha256=Ay1Uo7uHxdXCxWew3CyBHumZ44Ld-iR7AcSR2fY-PLw,102
|
314
|
+
apache_airflow_providers_google-10.21.1rc1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
|
315
|
+
apache_airflow_providers_google-10.21.1rc1.dist-info/METADATA,sha256=m0U3M3YFSxrrCHr6O0bIdpqhui2Pm54pax_RmoFyYz8,17137
|
316
|
+
apache_airflow_providers_google-10.21.1rc1.dist-info/RECORD,,
|
File without changes
|