apache-airflow-providers-google 12.0.0rc2__py3-none-any.whl → 14.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/google/LICENSE +0 -52
- airflow/providers/google/__init__.py +1 -1
- airflow/providers/google/ads/hooks/ads.py +27 -13
- airflow/providers/google/ads/transfers/ads_to_gcs.py +18 -4
- airflow/providers/google/assets/bigquery.py +17 -0
- airflow/providers/google/cloud/_internal_client/secret_manager_client.py +2 -3
- airflow/providers/google/cloud/hooks/alloy_db.py +736 -8
- airflow/providers/google/cloud/hooks/automl.py +10 -4
- airflow/providers/google/cloud/hooks/bigquery.py +125 -22
- airflow/providers/google/cloud/hooks/bigquery_dts.py +8 -8
- airflow/providers/google/cloud/hooks/bigtable.py +2 -3
- airflow/providers/google/cloud/hooks/cloud_batch.py +3 -4
- airflow/providers/google/cloud/hooks/cloud_build.py +4 -5
- airflow/providers/google/cloud/hooks/cloud_composer.py +3 -4
- airflow/providers/google/cloud/hooks/cloud_memorystore.py +3 -4
- airflow/providers/google/cloud/hooks/cloud_run.py +3 -4
- airflow/providers/google/cloud/hooks/cloud_sql.py +7 -3
- airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +119 -7
- airflow/providers/google/cloud/hooks/compute.py +3 -3
- airflow/providers/google/cloud/hooks/datacatalog.py +3 -4
- airflow/providers/google/cloud/hooks/dataflow.py +12 -12
- airflow/providers/google/cloud/hooks/dataform.py +2 -3
- airflow/providers/google/cloud/hooks/datafusion.py +2 -2
- airflow/providers/google/cloud/hooks/dataplex.py +1032 -11
- airflow/providers/google/cloud/hooks/dataproc.py +4 -5
- airflow/providers/google/cloud/hooks/dataproc_metastore.py +3 -4
- airflow/providers/google/cloud/hooks/dlp.py +3 -4
- airflow/providers/google/cloud/hooks/gcs.py +7 -6
- airflow/providers/google/cloud/hooks/kms.py +2 -3
- airflow/providers/google/cloud/hooks/kubernetes_engine.py +8 -8
- airflow/providers/google/cloud/hooks/life_sciences.py +1 -1
- airflow/providers/google/cloud/hooks/managed_kafka.py +482 -0
- airflow/providers/google/cloud/hooks/natural_language.py +2 -3
- airflow/providers/google/cloud/hooks/os_login.py +2 -3
- airflow/providers/google/cloud/hooks/pubsub.py +6 -6
- airflow/providers/google/cloud/hooks/secret_manager.py +2 -3
- airflow/providers/google/cloud/hooks/spanner.py +2 -2
- airflow/providers/google/cloud/hooks/speech_to_text.py +2 -3
- airflow/providers/google/cloud/hooks/stackdriver.py +4 -4
- airflow/providers/google/cloud/hooks/tasks.py +3 -4
- airflow/providers/google/cloud/hooks/text_to_speech.py +2 -3
- airflow/providers/google/cloud/hooks/translate.py +236 -5
- airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py +9 -4
- airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py +3 -4
- airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py +4 -5
- airflow/providers/google/cloud/hooks/vertex_ai/dataset.py +3 -4
- airflow/providers/google/cloud/hooks/vertex_ai/endpoint_service.py +2 -3
- airflow/providers/google/cloud/hooks/vertex_ai/feature_store.py +3 -4
- airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +1 -181
- airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py +3 -4
- airflow/providers/google/cloud/hooks/vertex_ai/model_service.py +2 -3
- airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py +3 -4
- airflow/providers/google/cloud/hooks/vertex_ai/prediction_service.py +2 -3
- airflow/providers/google/cloud/hooks/video_intelligence.py +2 -3
- airflow/providers/google/cloud/hooks/vision.py +3 -4
- airflow/providers/google/cloud/hooks/workflows.py +2 -3
- airflow/providers/google/cloud/links/alloy_db.py +46 -0
- airflow/providers/google/cloud/links/bigquery.py +25 -0
- airflow/providers/google/cloud/links/dataplex.py +172 -2
- airflow/providers/google/cloud/links/kubernetes_engine.py +1 -2
- airflow/providers/google/cloud/links/managed_kafka.py +104 -0
- airflow/providers/google/cloud/links/translate.py +28 -0
- airflow/providers/google/cloud/log/gcs_task_handler.py +3 -3
- airflow/providers/google/cloud/log/stackdriver_task_handler.py +11 -10
- airflow/providers/google/cloud/openlineage/facets.py +67 -0
- airflow/providers/google/cloud/openlineage/mixins.py +438 -173
- airflow/providers/google/cloud/openlineage/utils.py +394 -61
- airflow/providers/google/cloud/operators/alloy_db.py +980 -69
- airflow/providers/google/cloud/operators/automl.py +83 -245
- airflow/providers/google/cloud/operators/bigquery.py +377 -74
- airflow/providers/google/cloud/operators/bigquery_dts.py +126 -13
- airflow/providers/google/cloud/operators/bigtable.py +1 -3
- airflow/providers/google/cloud/operators/cloud_base.py +1 -2
- airflow/providers/google/cloud/operators/cloud_batch.py +2 -4
- airflow/providers/google/cloud/operators/cloud_build.py +3 -5
- airflow/providers/google/cloud/operators/cloud_composer.py +5 -7
- airflow/providers/google/cloud/operators/cloud_memorystore.py +4 -6
- airflow/providers/google/cloud/operators/cloud_run.py +6 -5
- airflow/providers/google/cloud/operators/cloud_sql.py +20 -8
- airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +62 -8
- airflow/providers/google/cloud/operators/compute.py +3 -4
- airflow/providers/google/cloud/operators/datacatalog.py +9 -11
- airflow/providers/google/cloud/operators/dataflow.py +1 -112
- airflow/providers/google/cloud/operators/dataform.py +3 -5
- airflow/providers/google/cloud/operators/datafusion.py +1 -1
- airflow/providers/google/cloud/operators/dataplex.py +2046 -7
- airflow/providers/google/cloud/operators/dataproc.py +102 -17
- airflow/providers/google/cloud/operators/dataproc_metastore.py +7 -9
- airflow/providers/google/cloud/operators/dlp.py +17 -19
- airflow/providers/google/cloud/operators/gcs.py +14 -17
- airflow/providers/google/cloud/operators/kubernetes_engine.py +2 -2
- airflow/providers/google/cloud/operators/managed_kafka.py +788 -0
- airflow/providers/google/cloud/operators/natural_language.py +3 -5
- airflow/providers/google/cloud/operators/pubsub.py +39 -7
- airflow/providers/google/cloud/operators/speech_to_text.py +3 -5
- airflow/providers/google/cloud/operators/stackdriver.py +3 -5
- airflow/providers/google/cloud/operators/tasks.py +4 -6
- airflow/providers/google/cloud/operators/text_to_speech.py +2 -4
- airflow/providers/google/cloud/operators/translate.py +414 -5
- airflow/providers/google/cloud/operators/translate_speech.py +2 -4
- airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py +9 -8
- airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py +4 -6
- airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +6 -8
- airflow/providers/google/cloud/operators/vertex_ai/dataset.py +4 -6
- airflow/providers/google/cloud/operators/vertex_ai/endpoint_service.py +4 -6
- airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +0 -322
- airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py +4 -6
- airflow/providers/google/cloud/operators/vertex_ai/model_service.py +4 -6
- airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py +4 -6
- airflow/providers/google/cloud/operators/video_intelligence.py +3 -5
- airflow/providers/google/cloud/operators/vision.py +4 -6
- airflow/providers/google/cloud/operators/workflows.py +5 -7
- airflow/providers/google/cloud/secrets/secret_manager.py +1 -2
- airflow/providers/google/cloud/sensors/bigquery_dts.py +3 -5
- airflow/providers/google/cloud/sensors/bigtable.py +2 -3
- airflow/providers/google/cloud/sensors/cloud_composer.py +32 -8
- airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py +39 -1
- airflow/providers/google/cloud/sensors/dataplex.py +4 -6
- airflow/providers/google/cloud/sensors/dataproc.py +2 -3
- airflow/providers/google/cloud/sensors/dataproc_metastore.py +1 -2
- airflow/providers/google/cloud/sensors/gcs.py +2 -4
- airflow/providers/google/cloud/sensors/pubsub.py +2 -3
- airflow/providers/google/cloud/sensors/workflows.py +3 -5
- airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +5 -5
- airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +10 -12
- airflow/providers/google/cloud/transfers/gcs_to_gcs.py +1 -1
- airflow/providers/google/cloud/transfers/gcs_to_sftp.py +36 -4
- airflow/providers/google/cloud/transfers/mssql_to_gcs.py +27 -2
- airflow/providers/google/cloud/transfers/mysql_to_gcs.py +27 -2
- airflow/providers/google/cloud/transfers/postgres_to_gcs.py +27 -2
- airflow/providers/google/cloud/transfers/sftp_to_gcs.py +34 -5
- airflow/providers/google/cloud/transfers/sql_to_gcs.py +15 -0
- airflow/providers/google/cloud/transfers/trino_to_gcs.py +25 -2
- airflow/providers/google/cloud/triggers/bigquery_dts.py +1 -2
- airflow/providers/google/cloud/triggers/cloud_batch.py +1 -2
- airflow/providers/google/cloud/triggers/cloud_build.py +1 -2
- airflow/providers/google/cloud/triggers/cloud_composer.py +13 -3
- airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py +102 -4
- airflow/providers/google/cloud/triggers/dataflow.py +2 -3
- airflow/providers/google/cloud/triggers/dataplex.py +1 -2
- airflow/providers/google/cloud/triggers/dataproc.py +2 -3
- airflow/providers/google/cloud/triggers/kubernetes_engine.py +1 -1
- airflow/providers/google/cloud/triggers/pubsub.py +1 -2
- airflow/providers/google/cloud/triggers/vertex_ai.py +7 -8
- airflow/providers/google/cloud/utils/credentials_provider.py +15 -8
- airflow/providers/google/cloud/utils/external_token_supplier.py +1 -0
- airflow/providers/google/common/auth_backend/google_openid.py +4 -4
- airflow/providers/google/common/consts.py +1 -2
- airflow/providers/google/common/hooks/base_google.py +8 -7
- airflow/providers/google/get_provider_info.py +186 -134
- airflow/providers/google/marketing_platform/hooks/analytics_admin.py +2 -3
- airflow/providers/google/marketing_platform/hooks/search_ads.py +1 -1
- airflow/providers/google/marketing_platform/operators/analytics_admin.py +5 -7
- {apache_airflow_providers_google-12.0.0rc2.dist-info → apache_airflow_providers_google-14.0.0.dist-info}/METADATA +41 -58
- {apache_airflow_providers_google-12.0.0rc2.dist-info → apache_airflow_providers_google-14.0.0.dist-info}/RECORD +157 -159
- airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py +0 -141
- airflow/providers/google/cloud/example_dags/example_looker.py +0 -64
- airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py +0 -194
- airflow/providers/google/cloud/example_dags/example_salesforce_to_gcs.py +0 -129
- airflow/providers/google/marketing_platform/example_dags/__init__.py +0 -16
- airflow/providers/google/marketing_platform/example_dags/example_display_video.py +0 -213
- {apache_airflow_providers_google-12.0.0rc2.dist-info → apache_airflow_providers_google-14.0.0.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_google-12.0.0rc2.dist-info → apache_airflow_providers_google-14.0.0.dist-info}/entry_points.txt +0 -0
@@ -24,7 +24,7 @@ from datetime import timedelta
|
|
24
24
|
from typing import TYPE_CHECKING
|
25
25
|
|
26
26
|
import vertexai
|
27
|
-
from vertexai.generative_models import GenerativeModel
|
27
|
+
from vertexai.generative_models import GenerativeModel
|
28
28
|
from vertexai.language_models import TextEmbeddingModel, TextGenerationModel
|
29
29
|
from vertexai.preview.caching import CachedContent
|
30
30
|
from vertexai.preview.evaluation import EvalResult, EvalTask
|
@@ -100,186 +100,6 @@ class GenerativeModelHook(GoogleBaseHook):
|
|
100
100
|
cached_context_model = preview_generative_model.from_cached_content(cached_content)
|
101
101
|
return cached_context_model
|
102
102
|
|
103
|
-
@deprecated(
|
104
|
-
planned_removal_date="January 01, 2025",
|
105
|
-
use_instead="Part objects included in contents parameter of "
|
106
|
-
"airflow.providers.google.cloud.hooks.generative_model."
|
107
|
-
"GenerativeModelHook.generative_model_generate_content",
|
108
|
-
category=AirflowProviderDeprecationWarning,
|
109
|
-
)
|
110
|
-
def get_generative_model_part(self, content_gcs_path: str, content_mime_type: str | None = None) -> Part:
|
111
|
-
"""Return a Generative Model Part object."""
|
112
|
-
part = Part.from_uri(content_gcs_path, mime_type=content_mime_type)
|
113
|
-
return part
|
114
|
-
|
115
|
-
@deprecated(
|
116
|
-
planned_removal_date="January 01, 2025",
|
117
|
-
use_instead="airflow.providers.google.cloud.hooks.generative_model."
|
118
|
-
"GenerativeModelHook.text_generation_model_predict",
|
119
|
-
category=AirflowProviderDeprecationWarning,
|
120
|
-
)
|
121
|
-
@GoogleBaseHook.fallback_to_default_project_id
|
122
|
-
def prompt_language_model(
|
123
|
-
self,
|
124
|
-
prompt: str,
|
125
|
-
pretrained_model: str,
|
126
|
-
temperature: float,
|
127
|
-
max_output_tokens: int,
|
128
|
-
top_p: float,
|
129
|
-
top_k: int,
|
130
|
-
location: str,
|
131
|
-
project_id: str = PROVIDE_PROJECT_ID,
|
132
|
-
) -> str:
|
133
|
-
"""
|
134
|
-
Use the Vertex AI PaLM API to generate natural language text.
|
135
|
-
|
136
|
-
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
|
137
|
-
:param location: Required. The ID of the Google Cloud location that the service belongs to.
|
138
|
-
:param prompt: Required. Inputs or queries that a user or a program gives
|
139
|
-
to the Vertex AI PaLM API, in order to elicit a specific response.
|
140
|
-
:param pretrained_model: A pre-trained model optimized for performing natural
|
141
|
-
language tasks such as classification, summarization, extraction, content
|
142
|
-
creation, and ideation.
|
143
|
-
:param temperature: Temperature controls the degree of randomness in token
|
144
|
-
selection.
|
145
|
-
:param max_output_tokens: Token limit determines the maximum amount of text
|
146
|
-
output.
|
147
|
-
:param top_p: Tokens are selected from most probable to least until the sum
|
148
|
-
of their probabilities equals the top_p value. Defaults to 0.8.
|
149
|
-
:param top_k: A top_k of 1 means the selected token is the most probable
|
150
|
-
among all tokens.
|
151
|
-
"""
|
152
|
-
vertexai.init(project=project_id, location=location, credentials=self.get_credentials())
|
153
|
-
|
154
|
-
parameters = {
|
155
|
-
"temperature": temperature,
|
156
|
-
"max_output_tokens": max_output_tokens,
|
157
|
-
"top_p": top_p,
|
158
|
-
"top_k": top_k,
|
159
|
-
}
|
160
|
-
|
161
|
-
model = self.get_text_generation_model(pretrained_model)
|
162
|
-
|
163
|
-
response = model.predict(
|
164
|
-
prompt=prompt,
|
165
|
-
**parameters,
|
166
|
-
)
|
167
|
-
return response.text
|
168
|
-
|
169
|
-
@deprecated(
|
170
|
-
planned_removal_date="January 01, 2025",
|
171
|
-
use_instead="airflow.providers.google.cloud.hooks.generative_model."
|
172
|
-
"GenerativeModelHook.text_embedding_model_get_embeddings",
|
173
|
-
category=AirflowProviderDeprecationWarning,
|
174
|
-
)
|
175
|
-
@GoogleBaseHook.fallback_to_default_project_id
|
176
|
-
def generate_text_embeddings(
|
177
|
-
self,
|
178
|
-
prompt: str,
|
179
|
-
pretrained_model: str,
|
180
|
-
location: str,
|
181
|
-
project_id: str = PROVIDE_PROJECT_ID,
|
182
|
-
) -> list:
|
183
|
-
"""
|
184
|
-
Use the Vertex AI PaLM API to generate text embeddings.
|
185
|
-
|
186
|
-
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
|
187
|
-
:param location: Required. The ID of the Google Cloud location that the service belongs to.
|
188
|
-
:param prompt: Required. Inputs or queries that a user or a program gives
|
189
|
-
to the Vertex AI PaLM API, in order to elicit a specific response.
|
190
|
-
:param pretrained_model: A pre-trained model optimized for generating text embeddings.
|
191
|
-
"""
|
192
|
-
vertexai.init(project=project_id, location=location, credentials=self.get_credentials())
|
193
|
-
model = self.get_text_embedding_model(pretrained_model)
|
194
|
-
|
195
|
-
response = model.get_embeddings([prompt])[0] # single prompt
|
196
|
-
|
197
|
-
return response.values
|
198
|
-
|
199
|
-
@deprecated(
|
200
|
-
planned_removal_date="January 01, 2025",
|
201
|
-
use_instead="airflow.providers.google.cloud.hooks.generative_model."
|
202
|
-
"GenerativeModelHook.generative_model_generate_content",
|
203
|
-
category=AirflowProviderDeprecationWarning,
|
204
|
-
)
|
205
|
-
@GoogleBaseHook.fallback_to_default_project_id
|
206
|
-
def prompt_multimodal_model(
|
207
|
-
self,
|
208
|
-
prompt: str,
|
209
|
-
location: str,
|
210
|
-
generation_config: dict | None = None,
|
211
|
-
safety_settings: dict | None = None,
|
212
|
-
pretrained_model: str = "gemini-pro",
|
213
|
-
project_id: str = PROVIDE_PROJECT_ID,
|
214
|
-
) -> str:
|
215
|
-
"""
|
216
|
-
Use the Vertex AI Gemini Pro foundation model to generate natural language text.
|
217
|
-
|
218
|
-
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
|
219
|
-
:param location: Required. The ID of the Google Cloud location that the service belongs to.
|
220
|
-
:param prompt: Required. Inputs or queries that a user or a program gives
|
221
|
-
to the Multi-modal model, in order to elicit a specific response.
|
222
|
-
:param generation_config: Optional. Generation configuration settings.
|
223
|
-
:param safety_settings: Optional. Per request settings for blocking unsafe content.
|
224
|
-
:param pretrained_model: By default uses the pre-trained model `gemini-pro`,
|
225
|
-
supporting prompts with text-only input, including natural language
|
226
|
-
tasks, multi-turn text and code chat, and code generation. It can
|
227
|
-
output text and code.
|
228
|
-
"""
|
229
|
-
vertexai.init(project=project_id, location=location, credentials=self.get_credentials())
|
230
|
-
|
231
|
-
model = self.get_generative_model(pretrained_model)
|
232
|
-
response = model.generate_content(
|
233
|
-
contents=[prompt], generation_config=generation_config, safety_settings=safety_settings
|
234
|
-
)
|
235
|
-
|
236
|
-
return response.text
|
237
|
-
|
238
|
-
@deprecated(
|
239
|
-
planned_removal_date="January 01, 2025",
|
240
|
-
use_instead="airflow.providers.google.cloud.hooks.generative_model."
|
241
|
-
"GenerativeModelHook.generative_model_generate_content",
|
242
|
-
category=AirflowProviderDeprecationWarning,
|
243
|
-
)
|
244
|
-
@GoogleBaseHook.fallback_to_default_project_id
|
245
|
-
def prompt_multimodal_model_with_media(
|
246
|
-
self,
|
247
|
-
prompt: str,
|
248
|
-
location: str,
|
249
|
-
media_gcs_path: str,
|
250
|
-
mime_type: str,
|
251
|
-
generation_config: dict | None = None,
|
252
|
-
safety_settings: dict | None = None,
|
253
|
-
pretrained_model: str = "gemini-pro-vision",
|
254
|
-
project_id: str = PROVIDE_PROJECT_ID,
|
255
|
-
) -> str:
|
256
|
-
"""
|
257
|
-
Use the Vertex AI Gemini Pro foundation model to generate natural language text.
|
258
|
-
|
259
|
-
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
|
260
|
-
:param location: Required. The ID of the Google Cloud location that the service belongs to.
|
261
|
-
:param prompt: Required. Inputs or queries that a user or a program gives
|
262
|
-
to the Multi-modal model, in order to elicit a specific response.
|
263
|
-
:param generation_config: Optional. Generation configuration settings.
|
264
|
-
:param safety_settings: Optional. Per request settings for blocking unsafe content.
|
265
|
-
:param pretrained_model: By default uses the pre-trained model `gemini-pro-vision`,
|
266
|
-
supporting prompts with text-only input, including natural language
|
267
|
-
tasks, multi-turn text and code chat, and code generation. It can
|
268
|
-
output text and code.
|
269
|
-
:param media_gcs_path: A GCS path to a content file such as an image or a video.
|
270
|
-
Can be passed to the multi-modal model as part of the prompt. Used with vision models.
|
271
|
-
:param mime_type: Validates the media type presented by the file in the media_gcs_path.
|
272
|
-
"""
|
273
|
-
vertexai.init(project=project_id, location=location, credentials=self.get_credentials())
|
274
|
-
|
275
|
-
model = self.get_generative_model(pretrained_model)
|
276
|
-
part = self.get_generative_model_part(media_gcs_path, mime_type)
|
277
|
-
response = model.generate_content(
|
278
|
-
contents=[prompt, part], generation_config=generation_config, safety_settings=safety_settings
|
279
|
-
)
|
280
|
-
|
281
|
-
return response.text
|
282
|
-
|
283
103
|
@deprecated(
|
284
104
|
planned_removal_date="April 09, 2025",
|
285
105
|
use_instead="GenerativeModelHook.generative_model_generate_content",
|
@@ -29,15 +29,14 @@ import asyncio
|
|
29
29
|
from collections.abc import Sequence
|
30
30
|
from typing import TYPE_CHECKING
|
31
31
|
|
32
|
+
from airflow.exceptions import AirflowException
|
33
|
+
from airflow.providers.google.common.consts import CLIENT_INFO
|
34
|
+
from airflow.providers.google.common.hooks.base_google import GoogleBaseAsyncHook, GoogleBaseHook
|
32
35
|
from google.api_core.client_options import ClientOptions
|
33
36
|
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
34
37
|
from google.cloud.aiplatform import CustomJob, HyperparameterTuningJob, gapic, hyperparameter_tuning
|
35
38
|
from google.cloud.aiplatform_v1 import JobServiceAsyncClient, JobServiceClient, JobState, types
|
36
39
|
|
37
|
-
from airflow.exceptions import AirflowException
|
38
|
-
from airflow.providers.google.common.consts import CLIENT_INFO
|
39
|
-
from airflow.providers.google.common.hooks.base_google import GoogleBaseAsyncHook, GoogleBaseHook
|
40
|
-
|
41
40
|
if TYPE_CHECKING:
|
42
41
|
from google.api_core.operation import Operation
|
43
42
|
from google.api_core.retry import AsyncRetry, Retry
|
@@ -23,13 +23,12 @@ from __future__ import annotations
|
|
23
23
|
from collections.abc import Sequence
|
24
24
|
from typing import TYPE_CHECKING
|
25
25
|
|
26
|
+
from airflow.exceptions import AirflowException
|
27
|
+
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
|
26
28
|
from google.api_core.client_options import ClientOptions
|
27
29
|
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
28
30
|
from google.cloud.aiplatform_v1 import ModelServiceClient
|
29
31
|
|
30
|
-
from airflow.exceptions import AirflowException
|
31
|
-
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
|
32
|
-
|
33
32
|
if TYPE_CHECKING:
|
34
33
|
from google.api_core.operation import Operation
|
35
34
|
from google.api_core.retry import Retry
|
@@ -29,6 +29,9 @@ import asyncio
|
|
29
29
|
from collections.abc import Sequence
|
30
30
|
from typing import TYPE_CHECKING, Any
|
31
31
|
|
32
|
+
from airflow.exceptions import AirflowException
|
33
|
+
from airflow.providers.google.common.consts import CLIENT_INFO
|
34
|
+
from airflow.providers.google.common.hooks.base_google import GoogleBaseAsyncHook, GoogleBaseHook
|
32
35
|
from google.api_core.client_options import ClientOptions
|
33
36
|
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
34
37
|
from google.cloud.aiplatform import PipelineJob
|
@@ -39,10 +42,6 @@ from google.cloud.aiplatform_v1 import (
|
|
39
42
|
types,
|
40
43
|
)
|
41
44
|
|
42
|
-
from airflow.exceptions import AirflowException
|
43
|
-
from airflow.providers.google.common.consts import CLIENT_INFO
|
44
|
-
from airflow.providers.google.common.hooks.base_google import GoogleBaseAsyncHook, GoogleBaseHook
|
45
|
-
|
46
45
|
if TYPE_CHECKING:
|
47
46
|
from google.api_core.operation import Operation
|
48
47
|
from google.api_core.retry import AsyncRetry, Retry
|
@@ -20,13 +20,12 @@ from __future__ import annotations
|
|
20
20
|
from collections.abc import Sequence
|
21
21
|
from typing import TYPE_CHECKING
|
22
22
|
|
23
|
+
from airflow.providers.google.common.consts import CLIENT_INFO
|
24
|
+
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook
|
23
25
|
from google.api_core.client_options import ClientOptions
|
24
26
|
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
25
27
|
from google.cloud.aiplatform_v1 import PredictionServiceClient
|
26
28
|
|
27
|
-
from airflow.providers.google.common.consts import CLIENT_INFO
|
28
|
-
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook
|
29
|
-
|
30
29
|
if TYPE_CHECKING:
|
31
30
|
from google.api_core.retry import Retry
|
32
31
|
from google.cloud.aiplatform_v1.types import PredictResponse
|
@@ -22,6 +22,8 @@ from __future__ import annotations
|
|
22
22
|
from collections.abc import Sequence
|
23
23
|
from typing import TYPE_CHECKING
|
24
24
|
|
25
|
+
from airflow.providers.google.common.consts import CLIENT_INFO
|
26
|
+
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
|
25
27
|
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
26
28
|
from google.cloud.videointelligence_v1 import (
|
27
29
|
Feature,
|
@@ -29,9 +31,6 @@ from google.cloud.videointelligence_v1 import (
|
|
29
31
|
VideoIntelligenceServiceClient,
|
30
32
|
)
|
31
33
|
|
32
|
-
from airflow.providers.google.common.consts import CLIENT_INFO
|
33
|
-
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
|
34
|
-
|
35
34
|
if TYPE_CHECKING:
|
36
35
|
from google.api_core.operation import Operation
|
37
36
|
from google.api_core.retry import Retry
|
@@ -24,6 +24,9 @@ from copy import deepcopy
|
|
24
24
|
from functools import cached_property
|
25
25
|
from typing import TYPE_CHECKING, Any, Callable
|
26
26
|
|
27
|
+
from airflow.exceptions import AirflowException
|
28
|
+
from airflow.providers.google.common.consts import CLIENT_INFO
|
29
|
+
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook
|
27
30
|
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
28
31
|
from google.cloud.vision_v1 import (
|
29
32
|
AnnotateImageRequest,
|
@@ -36,10 +39,6 @@ from google.cloud.vision_v1 import (
|
|
36
39
|
)
|
37
40
|
from google.protobuf.json_format import MessageToDict
|
38
41
|
|
39
|
-
from airflow.exceptions import AirflowException
|
40
|
-
from airflow.providers.google.common.consts import CLIENT_INFO
|
41
|
-
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook
|
42
|
-
|
43
42
|
if TYPE_CHECKING:
|
44
43
|
from google.api_core.retry import Retry
|
45
44
|
from google.protobuf import field_mask_pb2
|
@@ -19,13 +19,12 @@ from __future__ import annotations
|
|
19
19
|
from collections.abc import Sequence
|
20
20
|
from typing import TYPE_CHECKING
|
21
21
|
|
22
|
+
from airflow.providers.google.common.consts import CLIENT_INFO
|
23
|
+
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook
|
22
24
|
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
23
25
|
from google.cloud.workflows.executions_v1beta import Execution, ExecutionsClient
|
24
26
|
from google.cloud.workflows_v1beta import Workflow, WorkflowsClient
|
25
27
|
|
26
|
-
from airflow.providers.google.common.consts import CLIENT_INFO
|
27
|
-
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook
|
28
|
-
|
29
28
|
if TYPE_CHECKING:
|
30
29
|
from google.api_core.operation import Operation
|
31
30
|
from google.api_core.retry import Retry
|
@@ -31,6 +31,10 @@ ALLOY_DB_BASE_LINK = "/alloydb"
|
|
31
31
|
ALLOY_DB_CLUSTER_LINK = (
|
32
32
|
ALLOY_DB_BASE_LINK + "/locations/{location_id}/clusters/{cluster_id}?project={project_id}"
|
33
33
|
)
|
34
|
+
ALLOY_DB_USERS_LINK = (
|
35
|
+
ALLOY_DB_BASE_LINK + "/locations/{location_id}/clusters/{cluster_id}/users?project={project_id}"
|
36
|
+
)
|
37
|
+
ALLOY_DB_BACKUPS_LINK = ALLOY_DB_BASE_LINK + "/backups?project={project_id}"
|
34
38
|
|
35
39
|
|
36
40
|
class AlloyDBClusterLink(BaseGoogleLink):
|
@@ -53,3 +57,45 @@ class AlloyDBClusterLink(BaseGoogleLink):
|
|
53
57
|
key=AlloyDBClusterLink.key,
|
54
58
|
value={"location_id": location_id, "cluster_id": cluster_id, "project_id": project_id},
|
55
59
|
)
|
60
|
+
|
61
|
+
|
62
|
+
class AlloyDBUsersLink(BaseGoogleLink):
|
63
|
+
"""Helper class for constructing AlloyDB users Link."""
|
64
|
+
|
65
|
+
name = "AlloyDB Users"
|
66
|
+
key = "alloy_db_users"
|
67
|
+
format_str = ALLOY_DB_USERS_LINK
|
68
|
+
|
69
|
+
@staticmethod
|
70
|
+
def persist(
|
71
|
+
context: Context,
|
72
|
+
task_instance: BaseOperator,
|
73
|
+
location_id: str,
|
74
|
+
cluster_id: str,
|
75
|
+
project_id: str | None,
|
76
|
+
):
|
77
|
+
task_instance.xcom_push(
|
78
|
+
context,
|
79
|
+
key=AlloyDBUsersLink.key,
|
80
|
+
value={"location_id": location_id, "cluster_id": cluster_id, "project_id": project_id},
|
81
|
+
)
|
82
|
+
|
83
|
+
|
84
|
+
class AlloyDBBackupsLink(BaseGoogleLink):
|
85
|
+
"""Helper class for constructing AlloyDB backups Link."""
|
86
|
+
|
87
|
+
name = "AlloyDB Backups"
|
88
|
+
key = "alloy_db_backups"
|
89
|
+
format_str = ALLOY_DB_BACKUPS_LINK
|
90
|
+
|
91
|
+
@staticmethod
|
92
|
+
def persist(
|
93
|
+
context: Context,
|
94
|
+
task_instance: BaseOperator,
|
95
|
+
project_id: str | None,
|
96
|
+
):
|
97
|
+
task_instance.xcom_push(
|
98
|
+
context,
|
99
|
+
key=AlloyDBBackupsLink.key,
|
100
|
+
value={"project_id": project_id},
|
101
|
+
)
|
@@ -35,6 +35,9 @@ BIGQUERY_TABLE_LINK = (
|
|
35
35
|
BIGQUERY_BASE_LINK
|
36
36
|
+ "?referrer=search&project={project_id}&d={dataset_id}&p={project_id}&page=table&t={table_id}"
|
37
37
|
)
|
38
|
+
BIGQUERY_JOB_DETAIL_LINK = (
|
39
|
+
BIGQUERY_BASE_LINK + "?project={project_id}&ws=!1m5!1m4!1m3!1s{project_id}!2s{job_id}!3s{location}"
|
40
|
+
)
|
38
41
|
|
39
42
|
|
40
43
|
class BigQueryDatasetLink(BaseGoogleLink):
|
@@ -78,3 +81,25 @@ class BigQueryTableLink(BaseGoogleLink):
|
|
78
81
|
key=BigQueryTableLink.key,
|
79
82
|
value={"dataset_id": dataset_id, "project_id": project_id, "table_id": table_id},
|
80
83
|
)
|
84
|
+
|
85
|
+
|
86
|
+
class BigQueryJobDetailLink(BaseGoogleLink):
|
87
|
+
"""Helper class for constructing BigQuery Job Detail Link."""
|
88
|
+
|
89
|
+
name = "BigQuery Job Detail"
|
90
|
+
key = "bigquery_job_detail"
|
91
|
+
format_str = BIGQUERY_JOB_DETAIL_LINK
|
92
|
+
|
93
|
+
@staticmethod
|
94
|
+
def persist(
|
95
|
+
context: Context,
|
96
|
+
task_instance: BaseOperator,
|
97
|
+
project_id: str,
|
98
|
+
location: str,
|
99
|
+
job_id: str,
|
100
|
+
):
|
101
|
+
task_instance.xcom_push(
|
102
|
+
context,
|
103
|
+
key=BigQueryJobDetailLink.key,
|
104
|
+
value={"project_id": project_id, "location": location, "job_id": job_id},
|
105
|
+
)
|
@@ -30,9 +30,20 @@ DATAPLEX_BASE_LINK = "/dataplex/process/tasks"
|
|
30
30
|
DATAPLEX_TASK_LINK = DATAPLEX_BASE_LINK + "/{lake_id}.{task_id};location={region}/jobs?project={project_id}"
|
31
31
|
DATAPLEX_TASKS_LINK = DATAPLEX_BASE_LINK + "?project={project_id}&qLake={lake_id}.{region}"
|
32
32
|
|
33
|
-
DATAPLEX_LAKE_LINK =
|
34
|
-
|
33
|
+
DATAPLEX_LAKE_LINK = "/dataplex/lakes/{lake_id};location={region}?project={project_id}"
|
34
|
+
DATAPLEX_CATALOG_ENTRY_GROUPS_LINK = "/dataplex/catalog/entry-groups?project={project_id}"
|
35
|
+
DATAPLEX_CATALOG_ENTRY_GROUP_LINK = (
|
36
|
+
"/dataplex/projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}?project={project_id}"
|
35
37
|
)
|
38
|
+
DATAPLEX_CATALOG_ENTRY_TYPE_LINK = (
|
39
|
+
"/dataplex/projects/{project_id}/locations/{location}/entryTypes/{entry_type_id}?project={project_id}"
|
40
|
+
)
|
41
|
+
DATAPLEX_CATALOG_ENTRY_TYPES_LINK = "/dataplex/catalog/entry-types?project={project_id}"
|
42
|
+
DATAPLEX_CATALOG_ASPECT_TYPE_LINK = (
|
43
|
+
"/dataplex/projects/{project_id}/locations/{location}/aspectTypes/{aspect_type_id}?project={project_id}"
|
44
|
+
)
|
45
|
+
DATAPLEX_CATALOG_ASPECT_TYPES_LINK = "/dataplex/catalog/aspect-types?project={project_id}"
|
46
|
+
DATAPLEX_CATALOG_ENTRY_LINK = "/dataplex/dp-entries/projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}?project={project_id}"
|
36
47
|
|
37
48
|
|
38
49
|
class DataplexTaskLink(BaseGoogleLink):
|
@@ -103,3 +114,162 @@ class DataplexLakeLink(BaseGoogleLink):
|
|
103
114
|
"project_id": task_instance.project_id,
|
104
115
|
},
|
105
116
|
)
|
117
|
+
|
118
|
+
|
119
|
+
class DataplexCatalogEntryGroupLink(BaseGoogleLink):
|
120
|
+
"""Helper class for constructing Dataplex Catalog EntryGroup link."""
|
121
|
+
|
122
|
+
name = "Dataplex Catalog EntryGroup"
|
123
|
+
key = "dataplex_catalog_entry_group_key"
|
124
|
+
format_str = DATAPLEX_CATALOG_ENTRY_GROUP_LINK
|
125
|
+
|
126
|
+
@staticmethod
|
127
|
+
def persist(
|
128
|
+
context: Context,
|
129
|
+
task_instance,
|
130
|
+
):
|
131
|
+
task_instance.xcom_push(
|
132
|
+
context=context,
|
133
|
+
key=DataplexCatalogEntryGroupLink.key,
|
134
|
+
value={
|
135
|
+
"entry_group_id": task_instance.entry_group_id,
|
136
|
+
"location": task_instance.location,
|
137
|
+
"project_id": task_instance.project_id,
|
138
|
+
},
|
139
|
+
)
|
140
|
+
|
141
|
+
|
142
|
+
class DataplexCatalogEntryGroupsLink(BaseGoogleLink):
|
143
|
+
"""Helper class for constructing Dataplex Catalog EntryGroups link."""
|
144
|
+
|
145
|
+
name = "Dataplex Catalog EntryGroups"
|
146
|
+
key = "dataplex_catalog_entry_groups_key"
|
147
|
+
format_str = DATAPLEX_CATALOG_ENTRY_GROUPS_LINK
|
148
|
+
|
149
|
+
@staticmethod
|
150
|
+
def persist(
|
151
|
+
context: Context,
|
152
|
+
task_instance,
|
153
|
+
):
|
154
|
+
task_instance.xcom_push(
|
155
|
+
context=context,
|
156
|
+
key=DataplexCatalogEntryGroupsLink.key,
|
157
|
+
value={
|
158
|
+
"location": task_instance.location,
|
159
|
+
"project_id": task_instance.project_id,
|
160
|
+
},
|
161
|
+
)
|
162
|
+
|
163
|
+
|
164
|
+
class DataplexCatalogEntryTypeLink(BaseGoogleLink):
|
165
|
+
"""Helper class for constructing Dataplex Catalog EntryType link."""
|
166
|
+
|
167
|
+
name = "Dataplex Catalog EntryType"
|
168
|
+
key = "dataplex_catalog_entry_type_key"
|
169
|
+
format_str = DATAPLEX_CATALOG_ENTRY_TYPE_LINK
|
170
|
+
|
171
|
+
@staticmethod
|
172
|
+
def persist(
|
173
|
+
context: Context,
|
174
|
+
task_instance,
|
175
|
+
):
|
176
|
+
task_instance.xcom_push(
|
177
|
+
context=context,
|
178
|
+
key=DataplexCatalogEntryTypeLink.key,
|
179
|
+
value={
|
180
|
+
"entry_type_id": task_instance.entry_type_id,
|
181
|
+
"location": task_instance.location,
|
182
|
+
"project_id": task_instance.project_id,
|
183
|
+
},
|
184
|
+
)
|
185
|
+
|
186
|
+
|
187
|
+
class DataplexCatalogEntryTypesLink(BaseGoogleLink):
|
188
|
+
"""Helper class for constructing Dataplex Catalog EntryTypes link."""
|
189
|
+
|
190
|
+
name = "Dataplex Catalog EntryTypes"
|
191
|
+
key = "dataplex_catalog_entry_types_key"
|
192
|
+
format_str = DATAPLEX_CATALOG_ENTRY_TYPES_LINK
|
193
|
+
|
194
|
+
@staticmethod
|
195
|
+
def persist(
|
196
|
+
context: Context,
|
197
|
+
task_instance,
|
198
|
+
):
|
199
|
+
task_instance.xcom_push(
|
200
|
+
context=context,
|
201
|
+
key=DataplexCatalogEntryTypesLink.key,
|
202
|
+
value={
|
203
|
+
"location": task_instance.location,
|
204
|
+
"project_id": task_instance.project_id,
|
205
|
+
},
|
206
|
+
)
|
207
|
+
|
208
|
+
|
209
|
+
class DataplexCatalogAspectTypeLink(BaseGoogleLink):
|
210
|
+
"""Helper class for constructing Dataplex Catalog AspectType link."""
|
211
|
+
|
212
|
+
name = "Dataplex Catalog AspectType"
|
213
|
+
key = "dataplex_catalog_aspect_type_key"
|
214
|
+
format_str = DATAPLEX_CATALOG_ASPECT_TYPE_LINK
|
215
|
+
|
216
|
+
@staticmethod
|
217
|
+
def persist(
|
218
|
+
context: Context,
|
219
|
+
task_instance,
|
220
|
+
):
|
221
|
+
task_instance.xcom_push(
|
222
|
+
context=context,
|
223
|
+
key=DataplexCatalogAspectTypeLink.key,
|
224
|
+
value={
|
225
|
+
"aspect_type_id": task_instance.aspect_type_id,
|
226
|
+
"location": task_instance.location,
|
227
|
+
"project_id": task_instance.project_id,
|
228
|
+
},
|
229
|
+
)
|
230
|
+
|
231
|
+
|
232
|
+
class DataplexCatalogAspectTypesLink(BaseGoogleLink):
|
233
|
+
"""Helper class for constructing Dataplex Catalog AspectTypes link."""
|
234
|
+
|
235
|
+
name = "Dataplex Catalog AspectTypes"
|
236
|
+
key = "dataplex_catalog_aspect_types_key"
|
237
|
+
format_str = DATAPLEX_CATALOG_ASPECT_TYPES_LINK
|
238
|
+
|
239
|
+
@staticmethod
|
240
|
+
def persist(
|
241
|
+
context: Context,
|
242
|
+
task_instance,
|
243
|
+
):
|
244
|
+
task_instance.xcom_push(
|
245
|
+
context=context,
|
246
|
+
key=DataplexCatalogAspectTypesLink.key,
|
247
|
+
value={
|
248
|
+
"location": task_instance.location,
|
249
|
+
"project_id": task_instance.project_id,
|
250
|
+
},
|
251
|
+
)
|
252
|
+
|
253
|
+
|
254
|
+
class DataplexCatalogEntryLink(BaseGoogleLink):
|
255
|
+
"""Helper class for constructing Dataplex Catalog Entry link."""
|
256
|
+
|
257
|
+
name = "Dataplex Catalog Entry"
|
258
|
+
key = "dataplex_catalog_entry_key"
|
259
|
+
format_str = DATAPLEX_CATALOG_ENTRY_LINK
|
260
|
+
|
261
|
+
@staticmethod
|
262
|
+
def persist(
|
263
|
+
context: Context,
|
264
|
+
task_instance,
|
265
|
+
):
|
266
|
+
task_instance.xcom_push(
|
267
|
+
context=context,
|
268
|
+
key=DataplexCatalogEntryLink.key,
|
269
|
+
value={
|
270
|
+
"entry_id": task_instance.entry_id,
|
271
|
+
"entry_group_id": task_instance.entry_group_id,
|
272
|
+
"location": task_instance.location,
|
273
|
+
"project_id": task_instance.project_id,
|
274
|
+
},
|
275
|
+
)
|
@@ -19,9 +19,8 @@ from __future__ import annotations
|
|
19
19
|
import json
|
20
20
|
from typing import TYPE_CHECKING
|
21
21
|
|
22
|
-
from google.cloud.container_v1.types import Cluster
|
23
|
-
|
24
22
|
from airflow.providers.google.cloud.links.base import BaseGoogleLink
|
23
|
+
from google.cloud.container_v1.types import Cluster
|
25
24
|
|
26
25
|
if TYPE_CHECKING:
|
27
26
|
from airflow.utils.context import Context
|