apache-airflow-providers-microsoft-azure 6.2.4rc1__py3-none-any.whl → 6.3.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/microsoft/azure/__init__.py +1 -1
- airflow/providers/microsoft/azure/get_provider_info.py +3 -6
- airflow/providers/microsoft/azure/hooks/adx.py +8 -2
- airflow/providers/microsoft/azure/hooks/base_azure.py +12 -4
- airflow/providers/microsoft/azure/hooks/batch.py +10 -3
- airflow/providers/microsoft/azure/hooks/cosmos.py +16 -9
- airflow/providers/microsoft/azure/hooks/data_lake.py +1 -1
- airflow/providers/microsoft/azure/hooks/synapse.py +1 -1
- airflow/providers/microsoft/azure/hooks/wasb.py +18 -13
- airflow/providers/microsoft/azure/operators/batch.py +6 -2
- airflow/providers/microsoft/azure/operators/data_factory.py +1 -1
- airflow/providers/microsoft/azure/secrets/key_vault.py +9 -0
- airflow/providers/microsoft/azure/triggers/data_factory.py +1 -1
- airflow/providers/microsoft/azure/utils.py +50 -0
- {apache_airflow_providers_microsoft_azure-6.2.4rc1.dist-info → apache_airflow_providers_microsoft_azure-6.3.0rc1.dist-info}/METADATA +8 -8
- {apache_airflow_providers_microsoft_azure-6.2.4rc1.dist-info → apache_airflow_providers_microsoft_azure-6.3.0rc1.dist-info}/RECORD +21 -23
- airflow/providers/microsoft/azure/example_dag/__init__.py +0 -16
- airflow/providers/microsoft/azure/example_dag/example_wasb_sensors.py +0 -60
- {apache_airflow_providers_microsoft_azure-6.2.4rc1.dist-info → apache_airflow_providers_microsoft_azure-6.3.0rc1.dist-info}/LICENSE +0 -0
- {apache_airflow_providers_microsoft_azure-6.2.4rc1.dist-info → apache_airflow_providers_microsoft_azure-6.3.0rc1.dist-info}/NOTICE +0 -0
- {apache_airflow_providers_microsoft_azure-6.2.4rc1.dist-info → apache_airflow_providers_microsoft_azure-6.3.0rc1.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_microsoft_azure-6.2.4rc1.dist-info → apache_airflow_providers_microsoft_azure-6.3.0rc1.dist-info}/entry_points.txt +0 -0
- {apache_airflow_providers_microsoft_azure-6.2.4rc1.dist-info → apache_airflow_providers_microsoft_azure-6.3.0rc1.dist-info}/top_level.txt +0 -0
@@ -29,6 +29,7 @@ def get_provider_info():
|
|
29
29
|
"description": "`Microsoft Azure <https://azure.microsoft.com/>`__\n",
|
30
30
|
"suspended": False,
|
31
31
|
"versions": [
|
32
|
+
"6.3.0",
|
32
33
|
"6.2.4",
|
33
34
|
"6.2.3",
|
34
35
|
"6.2.2",
|
@@ -85,14 +86,15 @@ def get_provider_info():
|
|
85
86
|
"azure-synapse-spark",
|
86
87
|
"adal>=1.2.7",
|
87
88
|
"azure-storage-file-datalake>=12.9.1",
|
89
|
+
"azure-kusto-data>=4.1.0",
|
88
90
|
"azure-mgmt-containerinstance>=1.5.0,<2.0",
|
89
91
|
"azure-mgmt-datafactory>=1.0.0,<2.0",
|
90
|
-
"azure-kusto-data>=0.0.43,<0.1",
|
91
92
|
],
|
92
93
|
"integrations": [
|
93
94
|
{
|
94
95
|
"integration-name": "Microsoft Azure Batch",
|
95
96
|
"external-doc-url": "https://azure.microsoft.com/en-us/services/batch/",
|
97
|
+
"how-to-guide": ["/docs/apache-airflow-providers-microsoft-azure/operators/batch.rst"],
|
96
98
|
"logo": "/integration-logos/azure/Microsoft-Azure-Batch.png",
|
97
99
|
"tags": ["azure"],
|
98
100
|
},
|
@@ -310,11 +312,6 @@ def get_provider_info():
|
|
310
312
|
"how-to-guide": "/docs/apache-airflow-providers-microsoft-azure/transfer/local_to_wasb.rst",
|
311
313
|
"python-module": "airflow.providers.microsoft.azure.transfers.local_to_wasb",
|
312
314
|
},
|
313
|
-
{
|
314
|
-
"source-integration-name": "Microsoft Azure Blob Storage",
|
315
|
-
"target-integration-name": "Google Cloud Storage (GCS)",
|
316
|
-
"python-module": "airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs",
|
317
|
-
},
|
318
315
|
{
|
319
316
|
"source-integration-name": "SSH File Transfer Protocol (SFTP)",
|
320
317
|
"target-integration-name": "Microsoft Azure Blob Storage",
|
@@ -28,8 +28,9 @@ from __future__ import annotations
|
|
28
28
|
import warnings
|
29
29
|
from typing import Any
|
30
30
|
|
31
|
+
from azure.identity import DefaultAzureCredential
|
32
|
+
from azure.kusto.data import ClientRequestProperties, KustoClient, KustoConnectionStringBuilder
|
31
33
|
from azure.kusto.data.exceptions import KustoServiceError
|
32
|
-
from azure.kusto.data.request import ClientRequestProperties, KustoClient, KustoConnectionStringBuilder
|
33
34
|
from azure.kusto.data.response import KustoResponseDataSetV2
|
34
35
|
|
35
36
|
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
|
@@ -105,7 +106,7 @@ class AzureDataExplorerHook(BaseHook):
|
|
105
106
|
"placeholders": {
|
106
107
|
"login": "Varies with authentication method",
|
107
108
|
"password": "Varies with authentication method",
|
108
|
-
"auth_method": "AAD_APP/AAD_APP_CERT/AAD_CREDS/AAD_DEVICE",
|
109
|
+
"auth_method": "AAD_APP/AAD_APP_CERT/AAD_CREDS/AAD_DEVICE/AZURE_TOKEN_CRED",
|
109
110
|
"tenant": "Used with AAD_APP/AAD_APP_CERT/AAD_CREDS",
|
110
111
|
"certificate": "Used with AAD_APP_CERT",
|
111
112
|
"thumbprint": "Used with AAD_APP_CERT",
|
@@ -183,6 +184,11 @@ class AzureDataExplorerHook(BaseHook):
|
|
183
184
|
)
|
184
185
|
elif auth_method == "AAD_DEVICE":
|
185
186
|
kcsb = KustoConnectionStringBuilder.with_aad_device_authentication(cluster)
|
187
|
+
elif auth_method == "AZURE_TOKEN_CRED":
|
188
|
+
kcsb = KustoConnectionStringBuilder.with_azure_token_credential(
|
189
|
+
connection_string=cluster,
|
190
|
+
credential=DefaultAzureCredential(),
|
191
|
+
)
|
186
192
|
else:
|
187
193
|
raise AirflowException(f"Unknown authentication method: {auth_method}")
|
188
194
|
|
@@ -24,6 +24,7 @@ from azure.common.credentials import ServicePrincipalCredentials
|
|
24
24
|
|
25
25
|
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
|
26
26
|
from airflow.hooks.base import BaseHook
|
27
|
+
from airflow.providers.microsoft.azure.utils import AzureIdentityCredentialAdapter
|
27
28
|
|
28
29
|
|
29
30
|
class AzureBaseHook(BaseHook):
|
@@ -124,10 +125,17 @@ class AzureBaseHook(BaseHook):
|
|
124
125
|
self.log.info("Getting connection using a JSON config.")
|
125
126
|
return get_client_from_json_dict(client_class=self.sdk_client, config_dict=key_json)
|
126
127
|
|
127
|
-
|
128
|
-
|
129
|
-
credentials
|
128
|
+
credentials: ServicePrincipalCredentials | AzureIdentityCredentialAdapter
|
129
|
+
if all([conn.login, conn.password, tenant]):
|
130
|
+
self.log.info("Getting connection using specific credentials and subscription_id.")
|
131
|
+
credentials = ServicePrincipalCredentials(
|
130
132
|
client_id=conn.login, secret=conn.password, tenant=tenant
|
131
|
-
)
|
133
|
+
)
|
134
|
+
else:
|
135
|
+
self.log.info("Using DefaultAzureCredential as credential")
|
136
|
+
credentials = AzureIdentityCredentialAdapter()
|
137
|
+
|
138
|
+
return self.sdk_client(
|
139
|
+
credentials=credentials,
|
132
140
|
subscription_id=subscription_id,
|
133
141
|
)
|
@@ -27,7 +27,7 @@ from azure.batch.models import JobAddParameter, PoolAddParameter, TaskAddParamet
|
|
27
27
|
from airflow.exceptions import AirflowException
|
28
28
|
from airflow.hooks.base import BaseHook
|
29
29
|
from airflow.models import Connection
|
30
|
-
from airflow.providers.microsoft.azure.utils import get_field
|
30
|
+
from airflow.providers.microsoft.azure.utils import AzureIdentityCredentialAdapter, get_field
|
31
31
|
from airflow.utils import timezone
|
32
32
|
|
33
33
|
|
@@ -96,7 +96,15 @@ class AzureBatchHook(BaseHook):
|
|
96
96
|
if not batch_account_url:
|
97
97
|
raise AirflowException("Batch Account URL parameter is missing.")
|
98
98
|
|
99
|
-
credentials
|
99
|
+
credentials: batch_auth.SharedKeyCredentials | AzureIdentityCredentialAdapter
|
100
|
+
if all([conn.login, conn.password]):
|
101
|
+
credentials = batch_auth.SharedKeyCredentials(conn.login, conn.password)
|
102
|
+
else:
|
103
|
+
credentials = AzureIdentityCredentialAdapter(
|
104
|
+
None, resource_id="https://batch.core.windows.net/.default"
|
105
|
+
)
|
106
|
+
# credentials = AzureIdentityCredentialAdapter()
|
107
|
+
|
100
108
|
batch_client = BatchServiceClient(credentials, batch_url=batch_account_url)
|
101
109
|
return batch_client
|
102
110
|
|
@@ -344,7 +352,6 @@ class AzureBatchHook(BaseHook):
|
|
344
352
|
:param task: The task to add
|
345
353
|
"""
|
346
354
|
try:
|
347
|
-
|
348
355
|
self.connection.task.add(job_id=job_id, task=task)
|
349
356
|
except batch_models.BatchErrorException as err:
|
350
357
|
if not err.error or err.error.code != "TaskExists":
|
@@ -25,7 +25,6 @@ the default database and collection to use (see connection `azure_cosmos_default
|
|
25
25
|
"""
|
26
26
|
from __future__ import annotations
|
27
27
|
|
28
|
-
import json
|
29
28
|
import uuid
|
30
29
|
from typing import Any
|
31
30
|
|
@@ -145,15 +144,17 @@ class AzureCosmosDBHook(BaseHook):
|
|
145
144
|
if collection_name is None:
|
146
145
|
raise AirflowBadRequest("Collection name cannot be None.")
|
147
146
|
|
147
|
+
# The ignores below is due to typing bug in azure-cosmos 9.2.0
|
148
|
+
# https://github.com/Azure/azure-sdk-for-python/issues/31811
|
148
149
|
existing_container = list(
|
149
150
|
self.get_conn()
|
150
151
|
.get_database_client(self.__get_database_name(database_name))
|
151
152
|
.query_containers(
|
152
153
|
"SELECT * FROM r WHERE r.id=@id",
|
153
|
-
parameters=[
|
154
|
+
parameters=[{"name": "@id", "value": collection_name}], # type: ignore[list-item]
|
154
155
|
)
|
155
156
|
)
|
156
|
-
if
|
157
|
+
if not existing_container:
|
157
158
|
return False
|
158
159
|
|
159
160
|
return True
|
@@ -170,17 +171,19 @@ class AzureCosmosDBHook(BaseHook):
|
|
170
171
|
|
171
172
|
# We need to check to see if this container already exists so we don't try
|
172
173
|
# to create it twice
|
174
|
+
# The ignores below is due to typing bug in azure-cosmos 9.2.0
|
175
|
+
# https://github.com/Azure/azure-sdk-for-python/issues/31811
|
173
176
|
existing_container = list(
|
174
177
|
self.get_conn()
|
175
178
|
.get_database_client(self.__get_database_name(database_name))
|
176
179
|
.query_containers(
|
177
180
|
"SELECT * FROM r WHERE r.id=@id",
|
178
|
-
parameters=[
|
181
|
+
parameters=[{"name": "@id", "value": collection_name}], # type: ignore[list-item]
|
179
182
|
)
|
180
183
|
)
|
181
184
|
|
182
185
|
# Only create if we did not find it already existing
|
183
|
-
if
|
186
|
+
if not existing_container:
|
184
187
|
self.get_conn().get_database_client(self.__get_database_name(database_name)).create_container(
|
185
188
|
collection_name, partition_key=partition_key
|
186
189
|
)
|
@@ -190,13 +193,15 @@ class AzureCosmosDBHook(BaseHook):
|
|
190
193
|
if database_name is None:
|
191
194
|
raise AirflowBadRequest("Database name cannot be None.")
|
192
195
|
|
196
|
+
# The ignores below is due to typing bug in azure-cosmos 9.2.0
|
197
|
+
# https://github.com/Azure/azure-sdk-for-python/issues/31811
|
193
198
|
existing_database = list(
|
194
199
|
self.get_conn().query_databases(
|
195
200
|
"SELECT * FROM r WHERE r.id=@id",
|
196
|
-
parameters=[
|
201
|
+
parameters=[{"name": "@id", "value": database_name}], # type: ignore[list-item]
|
197
202
|
)
|
198
203
|
)
|
199
|
-
if
|
204
|
+
if not existing_database:
|
200
205
|
return False
|
201
206
|
|
202
207
|
return True
|
@@ -208,15 +213,17 @@ class AzureCosmosDBHook(BaseHook):
|
|
208
213
|
|
209
214
|
# We need to check to see if this database already exists so we don't try
|
210
215
|
# to create it twice
|
216
|
+
# The ignores below is due to typing bug in azure-cosmos 9.2.0
|
217
|
+
# https://github.com/Azure/azure-sdk-for-python/issues/31811
|
211
218
|
existing_database = list(
|
212
219
|
self.get_conn().query_databases(
|
213
220
|
"SELECT * FROM r WHERE r.id=@id",
|
214
|
-
parameters=[
|
221
|
+
parameters=[{"name": "@id", "value": database_name}], # type: ignore[list-item]
|
215
222
|
)
|
216
223
|
)
|
217
224
|
|
218
225
|
# Only create if we did not find it already existing
|
219
|
-
if
|
226
|
+
if not existing_database:
|
220
227
|
self.get_conn().create_database(database_name)
|
221
228
|
|
222
229
|
def delete_database(self, database_name: str) -> None:
|
@@ -241,7 +241,7 @@ class AzureDataLakeStorageV2Hook(BaseHook):
|
|
241
241
|
accounts that have a hierarchical namespace. Using Adls_v2 connection
|
242
242
|
details create DataLakeServiceClient object.
|
243
243
|
|
244
|
-
Due to Wasb is marked as legacy and
|
244
|
+
Due to Wasb is marked as legacy and retirement of the (ADLS1), it would
|
245
245
|
be nice to implement ADLS gen2 hook for interacting with the storage account.
|
246
246
|
|
247
247
|
.. seealso::
|
@@ -183,7 +183,7 @@ class AzureSynapseHook(BaseHook):
|
|
183
183
|
)
|
184
184
|
|
185
185
|
# Wait to check the status of the job run based on the ``check_interval`` configured.
|
186
|
-
self.log.info("Sleeping for %s seconds",
|
186
|
+
self.log.info("Sleeping for %s seconds", check_interval)
|
187
187
|
time.sleep(check_interval)
|
188
188
|
|
189
189
|
job_run_status = self.get_job_run_status()
|
@@ -28,6 +28,7 @@ from __future__ import annotations
|
|
28
28
|
import logging
|
29
29
|
import os
|
30
30
|
from typing import Any, Union
|
31
|
+
from urllib.parse import urlparse
|
31
32
|
|
32
33
|
from asgiref.sync import sync_to_async
|
33
34
|
from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError
|
@@ -152,11 +153,13 @@ class WasbHook(BaseHook):
|
|
152
153
|
# connection_string auth takes priority
|
153
154
|
return BlobServiceClient.from_connection_string(connection_string, **extra)
|
154
155
|
|
155
|
-
account_url =
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
156
|
+
account_url = conn.host if conn.host else f"https://{conn.login}.blob.core.windows.net/"
|
157
|
+
parsed_url = urlparse(account_url)
|
158
|
+
|
159
|
+
if not parsed_url.netloc and "." not in parsed_url.path:
|
160
|
+
# if there's no netloc and no dots in the path, then user only
|
161
|
+
# provided the Active Directory ID, not the full URL or DNS name
|
162
|
+
account_url = f"https://{conn.login}.blob.core.windows.net/"
|
160
163
|
|
161
164
|
tenant = self._get_field(extra, "tenant_id")
|
162
165
|
if tenant:
|
@@ -238,7 +241,7 @@ class WasbHook(BaseHook):
|
|
238
241
|
:return: True if blobs matching the prefix exist, False otherwise.
|
239
242
|
"""
|
240
243
|
blobs = self.get_blobs_list(container_name=container_name, prefix=prefix, **kwargs)
|
241
|
-
return
|
244
|
+
return bool(blobs)
|
242
245
|
|
243
246
|
def get_blobs_list(
|
244
247
|
self,
|
@@ -499,7 +502,7 @@ class WasbHook(BaseHook):
|
|
499
502
|
blobs_to_delete = [blob_name]
|
500
503
|
else:
|
501
504
|
blobs_to_delete = []
|
502
|
-
if not ignore_if_missing and
|
505
|
+
if not ignore_if_missing and not blobs_to_delete:
|
503
506
|
raise AirflowException(f"Blob(s) not found: {blob_name}")
|
504
507
|
|
505
508
|
# The maximum number of blobs that can be deleted in a single request is 256 using the underlying
|
@@ -555,11 +558,13 @@ class WasbAsyncHook(WasbHook):
|
|
555
558
|
)
|
556
559
|
return self.blob_service_client
|
557
560
|
|
558
|
-
account_url =
|
559
|
-
|
560
|
-
|
561
|
-
|
562
|
-
|
561
|
+
account_url = conn.host if conn.host else f"https://{conn.login}.blob.core.windows.net/"
|
562
|
+
parsed_url = urlparse(account_url)
|
563
|
+
|
564
|
+
if not parsed_url.netloc and "." not in parsed_url.path:
|
565
|
+
# if there's no netloc and no dots in the path, then user only
|
566
|
+
# provided the Active Directory ID, not the full URL or DNS name
|
567
|
+
account_url = f"https://{conn.login}.blob.core.windows.net/"
|
563
568
|
|
564
569
|
tenant = self._get_field(extra, "tenant_id")
|
565
570
|
if tenant:
|
@@ -678,4 +683,4 @@ class WasbAsyncHook(WasbHook):
|
|
678
683
|
:param kwargs: Optional keyword arguments for ``ContainerClient.walk_blobs``
|
679
684
|
"""
|
680
685
|
blobs = await self.get_blobs_list_async(container_name=container_name, prefix=prefix, **kwargs)
|
681
|
-
return
|
686
|
+
return bool(blobs)
|
@@ -17,6 +17,7 @@
|
|
17
17
|
# under the License.
|
18
18
|
from __future__ import annotations
|
19
19
|
|
20
|
+
from functools import cached_property
|
20
21
|
from typing import TYPE_CHECKING, Any, Sequence
|
21
22
|
|
22
23
|
from azure.batch import models as batch_models
|
@@ -176,7 +177,10 @@ class AzureBatchOperator(BaseOperator):
|
|
176
177
|
self.timeout = timeout
|
177
178
|
self.should_delete_job = should_delete_job
|
178
179
|
self.should_delete_pool = should_delete_pool
|
179
|
-
|
180
|
+
|
181
|
+
@cached_property
|
182
|
+
def hook(self):
|
183
|
+
return self.get_hook()
|
180
184
|
|
181
185
|
def _check_inputs(self) -> Any:
|
182
186
|
if not self.os_family and not self.vm_publisher:
|
@@ -189,7 +193,7 @@ class AzureBatchOperator(BaseOperator):
|
|
189
193
|
)
|
190
194
|
|
191
195
|
if self.use_latest_image:
|
192
|
-
if not
|
196
|
+
if not self.vm_publisher or not self.vm_offer:
|
193
197
|
raise AirflowException(
|
194
198
|
f"If use_latest_image_and_sku is set to True then the parameters vm_publisher, "
|
195
199
|
f"vm_offer, must all be set. "
|
@@ -92,7 +92,7 @@ class AzureDataFactoryRunPipelineOperator(BaseOperator):
|
|
92
92
|
``AzureDataFactoryHook`` will attempt to use the resource group name provided in the corresponding
|
93
93
|
connection.
|
94
94
|
:param factory_name: The data factory name. If a value is not passed in to the operator, the
|
95
|
-
``AzureDataFactoryHook`` will attempt to use the factory name
|
95
|
+
``AzureDataFactoryHook`` will attempt to use the factory name provided in the corresponding
|
96
96
|
connection.
|
97
97
|
:param reference_pipeline_run_id: The pipeline run identifier. If this run ID is specified the parameters
|
98
98
|
of the specified run will be used to create a new run.
|
@@ -16,6 +16,8 @@
|
|
16
16
|
# under the License.
|
17
17
|
from __future__ import annotations
|
18
18
|
|
19
|
+
import logging
|
20
|
+
import os
|
19
21
|
import re
|
20
22
|
import warnings
|
21
23
|
from functools import cached_property
|
@@ -95,6 +97,13 @@ class AzureKeyVaultBackend(BaseSecretsBackend, LoggingMixin):
|
|
95
97
|
self.config_prefix = config_prefix.rstrip(sep)
|
96
98
|
else:
|
97
99
|
self.config_prefix = config_prefix
|
100
|
+
|
101
|
+
logger = logging.getLogger("azure.core.pipeline.policies.http_logging_policy")
|
102
|
+
try:
|
103
|
+
logger.setLevel(os.environ.get("AZURE_HTTP_LOGGING_LEVEL", logging.WARNING))
|
104
|
+
except ValueError:
|
105
|
+
logger.setLevel(logging.WARNING)
|
106
|
+
|
98
107
|
self.sep = sep
|
99
108
|
self.kwargs = kwargs
|
100
109
|
|
@@ -233,7 +233,7 @@ class AzureDataFactoryTrigger(BaseTrigger):
|
|
233
233
|
resource_group_name=self.resource_group_name,
|
234
234
|
factory_name=self.factory_name,
|
235
235
|
)
|
236
|
-
self.log.info("Unexpected error %s caught. Cancel pipeline run %s",
|
236
|
+
self.log.info("Unexpected error %s caught. Cancel pipeline run %s", e, self.run_id)
|
237
237
|
except Exception as err:
|
238
238
|
yield TriggerEvent({"status": "error", "message": str(err), "run_id": self.run_id})
|
239
239
|
yield TriggerEvent({"status": "error", "message": str(e), "run_id": self.run_id})
|
@@ -19,6 +19,12 @@ from __future__ import annotations
|
|
19
19
|
|
20
20
|
import warnings
|
21
21
|
|
22
|
+
from azure.core.pipeline import PipelineContext, PipelineRequest
|
23
|
+
from azure.core.pipeline.policies import BearerTokenCredentialPolicy
|
24
|
+
from azure.core.pipeline.transport import HttpRequest
|
25
|
+
from azure.identity import DefaultAzureCredential
|
26
|
+
from msrest.authentication import BasicTokenAuthentication
|
27
|
+
|
22
28
|
|
23
29
|
def get_field(*, conn_id: str, conn_type: str, extras: dict, field_name: str):
|
24
30
|
"""Get field from extra, first checking short name, then for backcompat we check for prefixed name."""
|
@@ -43,3 +49,47 @@ def get_field(*, conn_id: str, conn_type: str, extras: dict, field_name: str):
|
|
43
49
|
if ret == "":
|
44
50
|
return None
|
45
51
|
return ret
|
52
|
+
|
53
|
+
|
54
|
+
class AzureIdentityCredentialAdapter(BasicTokenAuthentication):
|
55
|
+
"""Adapt azure-identity credentials for backward compatibility.
|
56
|
+
|
57
|
+
Adapt credentials from azure-identity to be compatible with SD
|
58
|
+
that needs msrestazure or azure.common.credentials
|
59
|
+
|
60
|
+
Check https://stackoverflow.com/questions/63384092/exception-attributeerror-defaultazurecredential-object-has-no-attribute-sig
|
61
|
+
"""
|
62
|
+
|
63
|
+
def __init__(self, credential=None, resource_id="https://management.azure.com/.default", **kwargs):
|
64
|
+
"""Adapt azure-identity credentials for backward compatibility.
|
65
|
+
|
66
|
+
:param credential: Any azure-identity credential (DefaultAzureCredential by default)
|
67
|
+
:param str resource_id: The scope to use to get the token (default ARM)
|
68
|
+
"""
|
69
|
+
super().__init__(None)
|
70
|
+
if credential is None:
|
71
|
+
credential = DefaultAzureCredential()
|
72
|
+
self._policy = BearerTokenCredentialPolicy(credential, resource_id, **kwargs)
|
73
|
+
|
74
|
+
def _make_request(self):
|
75
|
+
return PipelineRequest(
|
76
|
+
HttpRequest("AzureIdentityCredentialAdapter", "https://fakeurl"), PipelineContext(None)
|
77
|
+
)
|
78
|
+
|
79
|
+
def set_token(self):
|
80
|
+
"""Ask the azure-core BearerTokenCredentialPolicy policy to get a token.
|
81
|
+
|
82
|
+
Using the policy gives us for free the caching system of azure-core.
|
83
|
+
We could make this code simpler by using private method, but by definition
|
84
|
+
I can't assure they will be there forever, so mocking a fake call to the policy
|
85
|
+
to extract the token, using 100% public API.
|
86
|
+
"""
|
87
|
+
request = self._make_request()
|
88
|
+
self._policy.on_request(request)
|
89
|
+
# Read Authorization, and get the second part after Bearer
|
90
|
+
token = request.http_request.headers["Authorization"].split(" ", 1)[1]
|
91
|
+
self.token = {"access_token": token}
|
92
|
+
|
93
|
+
def signed_session(self, azure_session=None):
|
94
|
+
self.set_token()
|
95
|
+
return super().signed_session(azure_session)
|
@@ -1,14 +1,14 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: apache-airflow-providers-microsoft-azure
|
3
|
-
Version: 6.
|
3
|
+
Version: 6.3.0rc1
|
4
4
|
Summary: Provider for Apache Airflow. Implements apache-airflow-providers-microsoft-azure package
|
5
5
|
Home-page: https://airflow.apache.org/
|
6
6
|
Download-URL: https://archive.apache.org/dist/airflow/providers
|
7
7
|
Author: Apache Software Foundation
|
8
8
|
Author-email: dev@airflow.apache.org
|
9
9
|
License: Apache License 2.0
|
10
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.
|
11
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.
|
10
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.3.0/
|
11
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.3.0/changelog.html
|
12
12
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
13
13
|
Project-URL: Source Code, https://github.com/apache/airflow
|
14
14
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
@@ -38,7 +38,7 @@ Requires-Dist: azure-cosmos >=4.0.0
|
|
38
38
|
Requires-Dist: azure-datalake-store >=0.0.45
|
39
39
|
Requires-Dist: azure-identity >=1.3.1
|
40
40
|
Requires-Dist: azure-keyvault-secrets >=4.1.0
|
41
|
-
Requires-Dist: azure-kusto-data
|
41
|
+
Requires-Dist: azure-kusto-data >=4.1.0
|
42
42
|
Requires-Dist: azure-mgmt-containerinstance <2.0,>=1.5.0
|
43
43
|
Requires-Dist: azure-mgmt-datafactory <2.0,>=1.0.0
|
44
44
|
Requires-Dist: azure-mgmt-datalake-store >=0.5.0
|
@@ -94,7 +94,7 @@ Requires-Dist: apache-airflow-providers-sftp ; extra == 'sftp'
|
|
94
94
|
|
95
95
|
Package ``apache-airflow-providers-microsoft-azure``
|
96
96
|
|
97
|
-
Release: ``6.
|
97
|
+
Release: ``6.3.0rc1``
|
98
98
|
|
99
99
|
|
100
100
|
`Microsoft Azure <https://azure.microsoft.com/>`__
|
@@ -107,7 +107,7 @@ This is a provider package for ``microsoft.azure`` provider. All classes for thi
|
|
107
107
|
are in ``airflow.providers.microsoft.azure`` python package.
|
108
108
|
|
109
109
|
You can find package information and changelog for the provider
|
110
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.
|
110
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.3.0/>`_.
|
111
111
|
|
112
112
|
|
113
113
|
Installation
|
@@ -140,9 +140,9 @@ PIP package Version required
|
|
140
140
|
``azure-synapse-spark``
|
141
141
|
``adal`` ``>=1.2.7``
|
142
142
|
``azure-storage-file-datalake`` ``>=12.9.1``
|
143
|
+
``azure-kusto-data`` ``>=4.1.0``
|
143
144
|
``azure-mgmt-containerinstance`` ``>=1.5.0,<2.0``
|
144
145
|
``azure-mgmt-datafactory`` ``>=1.0.0,<2.0``
|
145
|
-
``azure-kusto-data`` ``>=0.0.43,<0.1``
|
146
146
|
================================ ==================
|
147
147
|
|
148
148
|
Cross provider package dependencies
|
@@ -167,4 +167,4 @@ Dependent package
|
|
167
167
|
==================================================================================================== ==========
|
168
168
|
|
169
169
|
The changelog for the provider package can be found in the
|
170
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.
|
170
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.3.0/changelog.html>`_.
|
@@ -1,36 +1,34 @@
|
|
1
|
-
airflow/providers/microsoft/azure/__init__.py,sha256=
|
2
|
-
airflow/providers/microsoft/azure/get_provider_info.py,sha256=
|
3
|
-
airflow/providers/microsoft/azure/utils.py,sha256=
|
4
|
-
airflow/providers/microsoft/azure/example_dag/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
5
|
-
airflow/providers/microsoft/azure/example_dag/example_wasb_sensors.py,sha256=XeMBYcGvRChPoim7_Vx3VtSqn2pTRachjO-RhHcnmp4,2032
|
1
|
+
airflow/providers/microsoft/azure/__init__.py,sha256=SOXr_n3z0jdi3Iem4-ekMo3pvAdhTI09oJ3YAiX10eA,1584
|
2
|
+
airflow/providers/microsoft/azure/get_provider_info.py,sha256=QCvf8iHufAsRMezGD769pp_zQDsbxw4cOPR9ixQk-P8,17443
|
3
|
+
airflow/providers/microsoft/azure/utils.py,sha256=HWf2jbEKo78W9qWErjJ6e1V1GE7OZnkbbRFA7NjB3G8,4148
|
6
4
|
airflow/providers/microsoft/azure/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
7
|
-
airflow/providers/microsoft/azure/hooks/adx.py,sha256=
|
5
|
+
airflow/providers/microsoft/azure/hooks/adx.py,sha256=zhCOAUQ6HZqU-au7lW0777cGQhkTLZVCWDt_oYzjDoQ,9269
|
8
6
|
airflow/providers/microsoft/azure/hooks/asb.py,sha256=qMSmqKPZu_bcuaKHTD_Dn0TZ0ZNURLiawv2V1hblZgg,10984
|
9
|
-
airflow/providers/microsoft/azure/hooks/base_azure.py,sha256=
|
10
|
-
airflow/providers/microsoft/azure/hooks/batch.py,sha256=
|
7
|
+
airflow/providers/microsoft/azure/hooks/base_azure.py,sha256=zwXLtzraBh9oJqUYVM7Cp1d78RiPEc5unVT6LX1pX98,5838
|
8
|
+
airflow/providers/microsoft/azure/hooks/batch.py,sha256=nN-Y0_jGCRMe3n6LpFPR1oZKJUtoGsNO9DcZOSQ_SzM,15692
|
11
9
|
airflow/providers/microsoft/azure/hooks/container_instance.py,sha256=euwS6SlIFH4z7h_4B0E0S4lXxQDlKm4jgmd8sPhmCxA,6183
|
12
10
|
airflow/providers/microsoft/azure/hooks/container_registry.py,sha256=Js9baC9lnE_NbwTFpOH6yXANLpr-JTEd4NT4LapDNqU,2460
|
13
11
|
airflow/providers/microsoft/azure/hooks/container_volume.py,sha256=t6UUrPfTleFwVuuAX4vB7MUViCUkrNSAOxaOZktiH8U,3986
|
14
|
-
airflow/providers/microsoft/azure/hooks/cosmos.py,sha256=
|
12
|
+
airflow/providers/microsoft/azure/hooks/cosmos.py,sha256=6HN4d0T-HLP9e2886S9YEHtQqD0eE-j7c50WjHc8X_I,14647
|
15
13
|
airflow/providers/microsoft/azure/hooks/data_factory.py,sha256=19CCp0eyX2-OH8mTQBypfSWu5qvNYcouyy2S9ocSI68,45368
|
16
|
-
airflow/providers/microsoft/azure/hooks/data_lake.py,sha256=
|
14
|
+
airflow/providers/microsoft/azure/hooks/data_lake.py,sha256=H6Dz1c-JiI4T5YAoPCsQSbJe80MqX4NqzyLjxW-wBWA,22060
|
17
15
|
airflow/providers/microsoft/azure/hooks/fileshare.py,sha256=FIg6gbDoAtS1fL3n9Il-evCIFA7so5C3yAkPzzXF6Co,12340
|
18
|
-
airflow/providers/microsoft/azure/hooks/synapse.py,sha256=
|
19
|
-
airflow/providers/microsoft/azure/hooks/wasb.py,sha256=
|
16
|
+
airflow/providers/microsoft/azure/hooks/synapse.py,sha256=cr1ToTfRPaoLlrHMBJk5qlgYc7-lFKTkiUN_pmTry_I,7315
|
17
|
+
airflow/providers/microsoft/azure/hooks/wasb.py,sha256=i-990JZbiD9iLj06xW7weJ0K_uqZU5Bg6mCCTQzqkTA,28832
|
20
18
|
airflow/providers/microsoft/azure/log/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
21
19
|
airflow/providers/microsoft/azure/log/wasb_task_handler.py,sha256=X9RCtFimKcEdMOLYiCJZ3J8TE0spN6spCXhtGBcqNzY,10109
|
22
20
|
airflow/providers/microsoft/azure/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
23
21
|
airflow/providers/microsoft/azure/operators/adls.py,sha256=msx7uZNlSshiZsNASESk6c0BQr3QNvsByxWh0Nr2TP0,3789
|
24
22
|
airflow/providers/microsoft/azure/operators/adx.py,sha256=rK5ErQqYJFFDaSirE4G2QV6V3Oj2QtKxUk6fIsJGluA,3032
|
25
23
|
airflow/providers/microsoft/azure/operators/asb.py,sha256=cqLLkmF9SGLAwNRJEt1Cm6RibuckueY0_1avEtGEeU8,29255
|
26
|
-
airflow/providers/microsoft/azure/operators/batch.py,sha256=
|
24
|
+
airflow/providers/microsoft/azure/operators/batch.py,sha256=vssNXlzgt5ajmu5Pz9gxrZWfBrrQ_fPl0Ii-MR6YyKc,16311
|
27
25
|
airflow/providers/microsoft/azure/operators/container_instances.py,sha256=ZukRCHU0n87W4XRTcpcw1Q5mpfCFPkKaFT9Mu1uaYvg,15968
|
28
26
|
airflow/providers/microsoft/azure/operators/cosmos.py,sha256=Y1Hj4p6W8soVFaq1rx8LFgchNISjkq8vjdaQ0j8Tnqs,2782
|
29
|
-
airflow/providers/microsoft/azure/operators/data_factory.py,sha256=
|
27
|
+
airflow/providers/microsoft/azure/operators/data_factory.py,sha256=SjU5KrzmHKPEcbbsNBiABs0qruVdj2GdtxN9P_s5vTk,12520
|
30
28
|
airflow/providers/microsoft/azure/operators/synapse.py,sha256=KKnwRVLM--Kd9Acaw5GcUydX5IzR_GM2-PHw7mdq-BI,4422
|
31
29
|
airflow/providers/microsoft/azure/operators/wasb_delete_blob.py,sha256=GUfV9DLU1bGYvn2TE54iTYBTbxn3Jm_e985pXp_0IsE,2687
|
32
30
|
airflow/providers/microsoft/azure/secrets/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
33
|
-
airflow/providers/microsoft/azure/secrets/key_vault.py,sha256=
|
31
|
+
airflow/providers/microsoft/azure/secrets/key_vault.py,sha256=ns5ZtRaFVO1_zSXpoO8OsESkFloTJNlUOc1Dgp3jFqU,8179
|
34
32
|
airflow/providers/microsoft/azure/sensors/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
35
33
|
airflow/providers/microsoft/azure/sensors/cosmos.py,sha256=vRrJ8zJnApvuKxHia53tNZUZ7wILWFT3_5cEyMA2M1I,2637
|
36
34
|
airflow/providers/microsoft/azure/sensors/data_factory.py,sha256=xHZg-00V7Y6DGg07obZrhi-Ek1XYgy5V67RRcme7Heg,6145
|
@@ -42,12 +40,12 @@ airflow/providers/microsoft/azure/transfers/local_to_wasb.py,sha256=4CK9OKBW07BH
|
|
42
40
|
airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py,sha256=JW4o6TLipJUAPsto_oNEAmY1lYbAmOsdwGzJfJSkhDc,4475
|
43
41
|
airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py,sha256=-VSa-7Pz4Uf8RYCeXSDaZCyBB9dT2J37PJyuxd2STAk,8202
|
44
42
|
airflow/providers/microsoft/azure/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
45
|
-
airflow/providers/microsoft/azure/triggers/data_factory.py,sha256=
|
43
|
+
airflow/providers/microsoft/azure/triggers/data_factory.py,sha256=K78szKk3IAbx5mA9mKMmPvhJNhUA0EgqzXcJhOT4QK8,11192
|
46
44
|
airflow/providers/microsoft/azure/triggers/wasb.py,sha256=PizofSGAwdgCuswenlbzlH0Jr7GHEzT89BluucDt8dE,7382
|
47
|
-
apache_airflow_providers_microsoft_azure-6.
|
48
|
-
apache_airflow_providers_microsoft_azure-6.
|
49
|
-
apache_airflow_providers_microsoft_azure-6.
|
50
|
-
apache_airflow_providers_microsoft_azure-6.
|
51
|
-
apache_airflow_providers_microsoft_azure-6.
|
52
|
-
apache_airflow_providers_microsoft_azure-6.
|
53
|
-
apache_airflow_providers_microsoft_azure-6.
|
45
|
+
apache_airflow_providers_microsoft_azure-6.3.0rc1.dist-info/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
46
|
+
apache_airflow_providers_microsoft_azure-6.3.0rc1.dist-info/METADATA,sha256=1bLx3mXTyWlFTmF-1AgeFyzTFQtZp_cO9qizbQTAb5Q,7554
|
47
|
+
apache_airflow_providers_microsoft_azure-6.3.0rc1.dist-info/NOTICE,sha256=m-6s2XynUxVSUIxO4rVablAZCvFq-wmLrqV91DotRBw,240
|
48
|
+
apache_airflow_providers_microsoft_azure-6.3.0rc1.dist-info/WHEEL,sha256=5sUXSg9e4bi7lTLOHcm6QEYwO5TIF1TNbTSVFVjcJcc,92
|
49
|
+
apache_airflow_providers_microsoft_azure-6.3.0rc1.dist-info/entry_points.txt,sha256=mAQpo-U_MYTG3pa7xmaY5vG849FnAziWsdGpfV-16NM,112
|
50
|
+
apache_airflow_providers_microsoft_azure-6.3.0rc1.dist-info/top_level.txt,sha256=OeMVH5md7fr2QQWpnZoOWWxWO-0WH1IP70lpTVwopPg,8
|
51
|
+
apache_airflow_providers_microsoft_azure-6.3.0rc1.dist-info/RECORD,,
|
@@ -1,16 +0,0 @@
|
|
1
|
-
# Licensed to the Apache Software Foundation (ASF) under one
|
2
|
-
# or more contributor license agreements. See the NOTICE file
|
3
|
-
# distributed with this work for additional information
|
4
|
-
# regarding copyright ownership. The ASF licenses this file
|
5
|
-
# to you under the Apache License, Version 2.0 (the
|
6
|
-
# "License"); you may not use this file except in compliance
|
7
|
-
# with the License. You may obtain a copy of the License at
|
8
|
-
#
|
9
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
10
|
-
#
|
11
|
-
# Unless required by applicable law or agreed to in writing,
|
12
|
-
# software distributed under the License is distributed on an
|
13
|
-
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
14
|
-
# KIND, either express or implied. See the License for the
|
15
|
-
# specific language governing permissions and limitations
|
16
|
-
# under the License.
|
@@ -1,60 +0,0 @@
|
|
1
|
-
#
|
2
|
-
# Licensed to the Apache Software Foundation (ASF) under one
|
3
|
-
# or more contributor license agreements. See the NOTICE file
|
4
|
-
# distributed with this work for additional information
|
5
|
-
# regarding copyright ownership. The ASF licenses this file
|
6
|
-
# to you under the Apache License, Version 2.0 (the
|
7
|
-
# "License"); you may not use this file except in compliance
|
8
|
-
# with the License. You may obtain a copy of the License at
|
9
|
-
#
|
10
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
11
|
-
#
|
12
|
-
# Unless required by applicable law or agreed to in writing,
|
13
|
-
# software distributed under the License is distributed on an
|
14
|
-
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
15
|
-
# KIND, either express or implied. See the License for the
|
16
|
-
# specific language governing permissions and limitations
|
17
|
-
# under the License.
|
18
|
-
"""
|
19
|
-
Example Airflow DAG that senses blob(s) in Azure Blob Storage.
|
20
|
-
|
21
|
-
This DAG relies on the following OS environment variables
|
22
|
-
|
23
|
-
* CONTAINER_NAME - The container under which to look for the blob.
|
24
|
-
* BLOB_NAME - The name of the blob to match.
|
25
|
-
* PREFIX - The blob with the specified prefix to match.
|
26
|
-
"""
|
27
|
-
from __future__ import annotations
|
28
|
-
|
29
|
-
import os
|
30
|
-
from datetime import datetime
|
31
|
-
|
32
|
-
from airflow.models import DAG
|
33
|
-
from airflow.providers.microsoft.azure.sensors.wasb import WasbBlobSensor, WasbPrefixSensor
|
34
|
-
|
35
|
-
CONTAINER_NAME = os.environ.get("CONTAINER_NAME", "example-container-name")
|
36
|
-
BLOB_NAME = os.environ.get("BLOB_NAME", "example-blob-name")
|
37
|
-
PREFIX = os.environ.get("PREFIX", "example-prefix")
|
38
|
-
|
39
|
-
|
40
|
-
with DAG(
|
41
|
-
"example_wasb_sensors",
|
42
|
-
start_date=datetime(2022, 8, 8),
|
43
|
-
catchup=False,
|
44
|
-
tags=["example"],
|
45
|
-
) as dag:
|
46
|
-
# [START wasb_blob_sensor]
|
47
|
-
azure_wasb_sensor = WasbBlobSensor(
|
48
|
-
container_name=CONTAINER_NAME,
|
49
|
-
blob_name=BLOB_NAME,
|
50
|
-
task_id="wasb_sense_blob",
|
51
|
-
)
|
52
|
-
# [END wasb_blob_sensor]
|
53
|
-
|
54
|
-
# [START wasb_prefix_sensor]
|
55
|
-
azure_wasb_prefix_sensor = WasbPrefixSensor(
|
56
|
-
container_name=CONTAINER_NAME,
|
57
|
-
prefix=PREFIX,
|
58
|
-
task_id="wasb_sense_prefix",
|
59
|
-
)
|
60
|
-
# [END wasb_prefix_sensor]
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|