apache-airflow-providers-microsoft-azure 6.2.4rc1__tar.gz → 6.3.0rc1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. {apache-airflow-providers-microsoft-azure-6.2.4rc1/apache_airflow_providers_microsoft_azure.egg-info → apache-airflow-providers-microsoft-azure-6.3.0rc1}/PKG-INFO +7 -7
  2. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/README.rst +4 -4
  3. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/__init__.py +1 -1
  4. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/get_provider_info.py +3 -6
  5. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/hooks/adx.py +8 -2
  6. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/hooks/base_azure.py +12 -4
  7. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/hooks/batch.py +10 -3
  8. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/hooks/cosmos.py +16 -9
  9. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/hooks/data_lake.py +1 -1
  10. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/hooks/synapse.py +1 -1
  11. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/hooks/wasb.py +18 -13
  12. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/operators/batch.py +6 -2
  13. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/operators/data_factory.py +1 -1
  14. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/secrets/key_vault.py +9 -0
  15. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/triggers/data_factory.py +1 -1
  16. apache-airflow-providers-microsoft-azure-6.3.0rc1/airflow/providers/microsoft/azure/utils.py +95 -0
  17. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1/apache_airflow_providers_microsoft_azure.egg-info}/PKG-INFO +7 -7
  18. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/apache_airflow_providers_microsoft_azure.egg-info/SOURCES.txt +0 -2
  19. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/apache_airflow_providers_microsoft_azure.egg-info/requires.txt +1 -1
  20. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/pyproject.toml +26 -1
  21. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/setup.cfg +3 -3
  22. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/setup.py +1 -1
  23. apache-airflow-providers-microsoft-azure-6.2.4rc1/airflow/providers/microsoft/azure/example_dag/example_wasb_sensors.py +0 -60
  24. apache-airflow-providers-microsoft-azure-6.2.4rc1/airflow/providers/microsoft/azure/triggers/__init__.py +0 -16
  25. apache-airflow-providers-microsoft-azure-6.2.4rc1/airflow/providers/microsoft/azure/utils.py +0 -45
  26. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/LICENSE +0 -0
  27. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/MANIFEST.in +0 -0
  28. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/NOTICE +0 -0
  29. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/hooks/__init__.py +0 -0
  30. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/hooks/asb.py +0 -0
  31. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/hooks/container_instance.py +0 -0
  32. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/hooks/container_registry.py +0 -0
  33. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/hooks/container_volume.py +0 -0
  34. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/hooks/data_factory.py +0 -0
  35. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/hooks/fileshare.py +0 -0
  36. {apache-airflow-providers-microsoft-azure-6.2.4rc1/airflow/providers/microsoft/azure/example_dag → apache-airflow-providers-microsoft-azure-6.3.0rc1/airflow/providers/microsoft/azure/log}/__init__.py +0 -0
  37. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/log/wasb_task_handler.py +0 -0
  38. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/operators/__init__.py +0 -0
  39. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/operators/adls.py +0 -0
  40. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/operators/adx.py +0 -0
  41. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/operators/asb.py +0 -0
  42. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/operators/container_instances.py +0 -0
  43. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/operators/cosmos.py +0 -0
  44. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/operators/synapse.py +0 -0
  45. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py +0 -0
  46. {apache-airflow-providers-microsoft-azure-6.2.4rc1/airflow/providers/microsoft/azure/log → apache-airflow-providers-microsoft-azure-6.3.0rc1/airflow/providers/microsoft/azure/secrets}/__init__.py +0 -0
  47. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/sensors/__init__.py +0 -0
  48. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/sensors/cosmos.py +0 -0
  49. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/sensors/data_factory.py +0 -0
  50. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/sensors/wasb.py +0 -0
  51. {apache-airflow-providers-microsoft-azure-6.2.4rc1/airflow/providers/microsoft/azure/secrets → apache-airflow-providers-microsoft-azure-6.3.0rc1/airflow/providers/microsoft/azure/transfers}/__init__.py +0 -0
  52. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs.py +0 -0
  53. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/transfers/local_to_adls.py +0 -0
  54. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/transfers/local_to_wasb.py +0 -0
  55. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py +0 -0
  56. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py +0 -0
  57. {apache-airflow-providers-microsoft-azure-6.2.4rc1/airflow/providers/microsoft/azure/transfers → apache-airflow-providers-microsoft-azure-6.3.0rc1/airflow/providers/microsoft/azure/triggers}/__init__.py +0 -0
  58. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/airflow/providers/microsoft/azure/triggers/wasb.py +0 -0
  59. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/apache_airflow_providers_microsoft_azure.egg-info/dependency_links.txt +0 -0
  60. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/apache_airflow_providers_microsoft_azure.egg-info/entry_points.txt +0 -0
  61. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/apache_airflow_providers_microsoft_azure.egg-info/not-zip-safe +0 -0
  62. {apache-airflow-providers-microsoft-azure-6.2.4rc1 → apache-airflow-providers-microsoft-azure-6.3.0rc1}/apache_airflow_providers_microsoft_azure.egg-info/top_level.txt +0 -0
@@ -1,14 +1,14 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-microsoft-azure
3
- Version: 6.2.4rc1
3
+ Version: 6.3.0rc1
4
4
  Summary: Provider for Apache Airflow. Implements apache-airflow-providers-microsoft-azure package
5
5
  Home-page: https://airflow.apache.org/
6
6
  Download-URL: https://archive.apache.org/dist/airflow/providers
7
7
  Author: Apache Software Foundation
8
8
  Author-email: dev@airflow.apache.org
9
9
  License: Apache License 2.0
10
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.2.4/
11
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.2.4/changelog.html
10
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.3.0/
11
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.3.0/changelog.html
12
12
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
13
13
  Project-URL: Source Code, https://github.com/apache/airflow
14
14
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
@@ -73,7 +73,7 @@ License-File: NOTICE
73
73
 
74
74
  Package ``apache-airflow-providers-microsoft-azure``
75
75
 
76
- Release: ``6.2.4rc1``
76
+ Release: ``6.3.0rc1``
77
77
 
78
78
 
79
79
  `Microsoft Azure <https://azure.microsoft.com/>`__
@@ -86,7 +86,7 @@ This is a provider package for ``microsoft.azure`` provider. All classes for thi
86
86
  are in ``airflow.providers.microsoft.azure`` python package.
87
87
 
88
88
  You can find package information and changelog for the provider
89
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.2.4/>`_.
89
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.3.0/>`_.
90
90
 
91
91
 
92
92
  Installation
@@ -119,9 +119,9 @@ PIP package Version required
119
119
  ``azure-synapse-spark``
120
120
  ``adal`` ``>=1.2.7``
121
121
  ``azure-storage-file-datalake`` ``>=12.9.1``
122
+ ``azure-kusto-data`` ``>=4.1.0``
122
123
  ``azure-mgmt-containerinstance`` ``>=1.5.0,<2.0``
123
124
  ``azure-mgmt-datafactory`` ``>=1.0.0,<2.0``
124
- ``azure-kusto-data`` ``>=0.0.43,<0.1``
125
125
  ================================ ==================
126
126
 
127
127
  Cross provider package dependencies
@@ -146,4 +146,4 @@ Dependent package
146
146
  ==================================================================================================== ==========
147
147
 
148
148
  The changelog for the provider package can be found in the
149
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.2.4/changelog.html>`_.
149
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.3.0/changelog.html>`_.
@@ -36,7 +36,7 @@
36
36
 
37
37
  Package ``apache-airflow-providers-microsoft-azure``
38
38
 
39
- Release: ``6.2.4rc1``
39
+ Release: ``6.3.0rc1``
40
40
 
41
41
 
42
42
  `Microsoft Azure <https://azure.microsoft.com/>`__
@@ -49,7 +49,7 @@ This is a provider package for ``microsoft.azure`` provider. All classes for thi
49
49
  are in ``airflow.providers.microsoft.azure`` python package.
50
50
 
51
51
  You can find package information and changelog for the provider
52
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.2.4/>`_.
52
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.3.0/>`_.
53
53
 
54
54
 
55
55
  Installation
@@ -82,9 +82,9 @@ PIP package Version required
82
82
  ``azure-synapse-spark``
83
83
  ``adal`` ``>=1.2.7``
84
84
  ``azure-storage-file-datalake`` ``>=12.9.1``
85
+ ``azure-kusto-data`` ``>=4.1.0``
85
86
  ``azure-mgmt-containerinstance`` ``>=1.5.0,<2.0``
86
87
  ``azure-mgmt-datafactory`` ``>=1.0.0,<2.0``
87
- ``azure-kusto-data`` ``>=0.0.43,<0.1``
88
88
  ================================ ==================
89
89
 
90
90
  Cross provider package dependencies
@@ -109,4 +109,4 @@ Dependent package
109
109
  ==================================================================================================== ==========
110
110
 
111
111
  The changelog for the provider package can be found in the
112
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.2.4/changelog.html>`_.
112
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.3.0/changelog.html>`_.
@@ -28,7 +28,7 @@ import packaging.version
28
28
 
29
29
  __all__ = ["__version__"]
30
30
 
31
- __version__ = "6.2.4"
31
+ __version__ = "6.3.0"
32
32
 
33
33
  try:
34
34
  from airflow import __version__ as airflow_version
@@ -29,6 +29,7 @@ def get_provider_info():
29
29
  "description": "`Microsoft Azure <https://azure.microsoft.com/>`__\n",
30
30
  "suspended": False,
31
31
  "versions": [
32
+ "6.3.0",
32
33
  "6.2.4",
33
34
  "6.2.3",
34
35
  "6.2.2",
@@ -85,14 +86,15 @@ def get_provider_info():
85
86
  "azure-synapse-spark",
86
87
  "adal>=1.2.7",
87
88
  "azure-storage-file-datalake>=12.9.1",
89
+ "azure-kusto-data>=4.1.0",
88
90
  "azure-mgmt-containerinstance>=1.5.0,<2.0",
89
91
  "azure-mgmt-datafactory>=1.0.0,<2.0",
90
- "azure-kusto-data>=0.0.43,<0.1",
91
92
  ],
92
93
  "integrations": [
93
94
  {
94
95
  "integration-name": "Microsoft Azure Batch",
95
96
  "external-doc-url": "https://azure.microsoft.com/en-us/services/batch/",
97
+ "how-to-guide": ["/docs/apache-airflow-providers-microsoft-azure/operators/batch.rst"],
96
98
  "logo": "/integration-logos/azure/Microsoft-Azure-Batch.png",
97
99
  "tags": ["azure"],
98
100
  },
@@ -310,11 +312,6 @@ def get_provider_info():
310
312
  "how-to-guide": "/docs/apache-airflow-providers-microsoft-azure/transfer/local_to_wasb.rst",
311
313
  "python-module": "airflow.providers.microsoft.azure.transfers.local_to_wasb",
312
314
  },
313
- {
314
- "source-integration-name": "Microsoft Azure Blob Storage",
315
- "target-integration-name": "Google Cloud Storage (GCS)",
316
- "python-module": "airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs",
317
- },
318
315
  {
319
316
  "source-integration-name": "SSH File Transfer Protocol (SFTP)",
320
317
  "target-integration-name": "Microsoft Azure Blob Storage",
@@ -28,8 +28,9 @@ from __future__ import annotations
28
28
  import warnings
29
29
  from typing import Any
30
30
 
31
+ from azure.identity import DefaultAzureCredential
32
+ from azure.kusto.data import ClientRequestProperties, KustoClient, KustoConnectionStringBuilder
31
33
  from azure.kusto.data.exceptions import KustoServiceError
32
- from azure.kusto.data.request import ClientRequestProperties, KustoClient, KustoConnectionStringBuilder
33
34
  from azure.kusto.data.response import KustoResponseDataSetV2
34
35
 
35
36
  from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
@@ -105,7 +106,7 @@ class AzureDataExplorerHook(BaseHook):
105
106
  "placeholders": {
106
107
  "login": "Varies with authentication method",
107
108
  "password": "Varies with authentication method",
108
- "auth_method": "AAD_APP/AAD_APP_CERT/AAD_CREDS/AAD_DEVICE",
109
+ "auth_method": "AAD_APP/AAD_APP_CERT/AAD_CREDS/AAD_DEVICE/AZURE_TOKEN_CRED",
109
110
  "tenant": "Used with AAD_APP/AAD_APP_CERT/AAD_CREDS",
110
111
  "certificate": "Used with AAD_APP_CERT",
111
112
  "thumbprint": "Used with AAD_APP_CERT",
@@ -183,6 +184,11 @@ class AzureDataExplorerHook(BaseHook):
183
184
  )
184
185
  elif auth_method == "AAD_DEVICE":
185
186
  kcsb = KustoConnectionStringBuilder.with_aad_device_authentication(cluster)
187
+ elif auth_method == "AZURE_TOKEN_CRED":
188
+ kcsb = KustoConnectionStringBuilder.with_azure_token_credential(
189
+ connection_string=cluster,
190
+ credential=DefaultAzureCredential(),
191
+ )
186
192
  else:
187
193
  raise AirflowException(f"Unknown authentication method: {auth_method}")
188
194
 
@@ -24,6 +24,7 @@ from azure.common.credentials import ServicePrincipalCredentials
24
24
 
25
25
  from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
26
26
  from airflow.hooks.base import BaseHook
27
+ from airflow.providers.microsoft.azure.utils import AzureIdentityCredentialAdapter
27
28
 
28
29
 
29
30
  class AzureBaseHook(BaseHook):
@@ -124,10 +125,17 @@ class AzureBaseHook(BaseHook):
124
125
  self.log.info("Getting connection using a JSON config.")
125
126
  return get_client_from_json_dict(client_class=self.sdk_client, config_dict=key_json)
126
127
 
127
- self.log.info("Getting connection using specific credentials and subscription_id.")
128
- return self.sdk_client(
129
- credentials=ServicePrincipalCredentials(
128
+ credentials: ServicePrincipalCredentials | AzureIdentityCredentialAdapter
129
+ if all([conn.login, conn.password, tenant]):
130
+ self.log.info("Getting connection using specific credentials and subscription_id.")
131
+ credentials = ServicePrincipalCredentials(
130
132
  client_id=conn.login, secret=conn.password, tenant=tenant
131
- ),
133
+ )
134
+ else:
135
+ self.log.info("Using DefaultAzureCredential as credential")
136
+ credentials = AzureIdentityCredentialAdapter()
137
+
138
+ return self.sdk_client(
139
+ credentials=credentials,
132
140
  subscription_id=subscription_id,
133
141
  )
@@ -27,7 +27,7 @@ from azure.batch.models import JobAddParameter, PoolAddParameter, TaskAddParamet
27
27
  from airflow.exceptions import AirflowException
28
28
  from airflow.hooks.base import BaseHook
29
29
  from airflow.models import Connection
30
- from airflow.providers.microsoft.azure.utils import get_field
30
+ from airflow.providers.microsoft.azure.utils import AzureIdentityCredentialAdapter, get_field
31
31
  from airflow.utils import timezone
32
32
 
33
33
 
@@ -96,7 +96,15 @@ class AzureBatchHook(BaseHook):
96
96
  if not batch_account_url:
97
97
  raise AirflowException("Batch Account URL parameter is missing.")
98
98
 
99
- credentials = batch_auth.SharedKeyCredentials(conn.login, conn.password)
99
+ credentials: batch_auth.SharedKeyCredentials | AzureIdentityCredentialAdapter
100
+ if all([conn.login, conn.password]):
101
+ credentials = batch_auth.SharedKeyCredentials(conn.login, conn.password)
102
+ else:
103
+ credentials = AzureIdentityCredentialAdapter(
104
+ None, resource_id="https://batch.core.windows.net/.default"
105
+ )
106
+ # credentials = AzureIdentityCredentialAdapter()
107
+
100
108
  batch_client = BatchServiceClient(credentials, batch_url=batch_account_url)
101
109
  return batch_client
102
110
 
@@ -344,7 +352,6 @@ class AzureBatchHook(BaseHook):
344
352
  :param task: The task to add
345
353
  """
346
354
  try:
347
-
348
355
  self.connection.task.add(job_id=job_id, task=task)
349
356
  except batch_models.BatchErrorException as err:
350
357
  if not err.error or err.error.code != "TaskExists":
@@ -25,7 +25,6 @@ the default database and collection to use (see connection `azure_cosmos_default
25
25
  """
26
26
  from __future__ import annotations
27
27
 
28
- import json
29
28
  import uuid
30
29
  from typing import Any
31
30
 
@@ -145,15 +144,17 @@ class AzureCosmosDBHook(BaseHook):
145
144
  if collection_name is None:
146
145
  raise AirflowBadRequest("Collection name cannot be None.")
147
146
 
147
+ # The ignores below is due to typing bug in azure-cosmos 9.2.0
148
+ # https://github.com/Azure/azure-sdk-for-python/issues/31811
148
149
  existing_container = list(
149
150
  self.get_conn()
150
151
  .get_database_client(self.__get_database_name(database_name))
151
152
  .query_containers(
152
153
  "SELECT * FROM r WHERE r.id=@id",
153
- parameters=[json.dumps({"name": "@id", "value": collection_name})],
154
+ parameters=[{"name": "@id", "value": collection_name}], # type: ignore[list-item]
154
155
  )
155
156
  )
156
- if len(existing_container) == 0:
157
+ if not existing_container:
157
158
  return False
158
159
 
159
160
  return True
@@ -170,17 +171,19 @@ class AzureCosmosDBHook(BaseHook):
170
171
 
171
172
  # We need to check to see if this container already exists so we don't try
172
173
  # to create it twice
174
+ # The ignores below is due to typing bug in azure-cosmos 9.2.0
175
+ # https://github.com/Azure/azure-sdk-for-python/issues/31811
173
176
  existing_container = list(
174
177
  self.get_conn()
175
178
  .get_database_client(self.__get_database_name(database_name))
176
179
  .query_containers(
177
180
  "SELECT * FROM r WHERE r.id=@id",
178
- parameters=[json.dumps({"name": "@id", "value": collection_name})],
181
+ parameters=[{"name": "@id", "value": collection_name}], # type: ignore[list-item]
179
182
  )
180
183
  )
181
184
 
182
185
  # Only create if we did not find it already existing
183
- if len(existing_container) == 0:
186
+ if not existing_container:
184
187
  self.get_conn().get_database_client(self.__get_database_name(database_name)).create_container(
185
188
  collection_name, partition_key=partition_key
186
189
  )
@@ -190,13 +193,15 @@ class AzureCosmosDBHook(BaseHook):
190
193
  if database_name is None:
191
194
  raise AirflowBadRequest("Database name cannot be None.")
192
195
 
196
+ # The ignores below is due to typing bug in azure-cosmos 9.2.0
197
+ # https://github.com/Azure/azure-sdk-for-python/issues/31811
193
198
  existing_database = list(
194
199
  self.get_conn().query_databases(
195
200
  "SELECT * FROM r WHERE r.id=@id",
196
- parameters=[json.dumps({"name": "@id", "value": database_name})],
201
+ parameters=[{"name": "@id", "value": database_name}], # type: ignore[list-item]
197
202
  )
198
203
  )
199
- if len(existing_database) == 0:
204
+ if not existing_database:
200
205
  return False
201
206
 
202
207
  return True
@@ -208,15 +213,17 @@ class AzureCosmosDBHook(BaseHook):
208
213
 
209
214
  # We need to check to see if this database already exists so we don't try
210
215
  # to create it twice
216
+ # The ignores below is due to typing bug in azure-cosmos 9.2.0
217
+ # https://github.com/Azure/azure-sdk-for-python/issues/31811
211
218
  existing_database = list(
212
219
  self.get_conn().query_databases(
213
220
  "SELECT * FROM r WHERE r.id=@id",
214
- parameters=[json.dumps({"name": "@id", "value": database_name})],
221
+ parameters=[{"name": "@id", "value": database_name}], # type: ignore[list-item]
215
222
  )
216
223
  )
217
224
 
218
225
  # Only create if we did not find it already existing
219
- if len(existing_database) == 0:
226
+ if not existing_database:
220
227
  self.get_conn().create_database(database_name)
221
228
 
222
229
  def delete_database(self, database_name: str) -> None:
@@ -241,7 +241,7 @@ class AzureDataLakeStorageV2Hook(BaseHook):
241
241
  accounts that have a hierarchical namespace. Using Adls_v2 connection
242
242
  details create DataLakeServiceClient object.
243
243
 
244
- Due to Wasb is marked as legacy and and retirement of the (ADLS1), it would
244
+ Due to Wasb is marked as legacy and retirement of the (ADLS1), it would
245
245
  be nice to implement ADLS gen2 hook for interacting with the storage account.
246
246
 
247
247
  .. seealso::
@@ -183,7 +183,7 @@ class AzureSynapseHook(BaseHook):
183
183
  )
184
184
 
185
185
  # Wait to check the status of the job run based on the ``check_interval`` configured.
186
- self.log.info("Sleeping for %s seconds", str(check_interval))
186
+ self.log.info("Sleeping for %s seconds", check_interval)
187
187
  time.sleep(check_interval)
188
188
 
189
189
  job_run_status = self.get_job_run_status()
@@ -28,6 +28,7 @@ from __future__ import annotations
28
28
  import logging
29
29
  import os
30
30
  from typing import Any, Union
31
+ from urllib.parse import urlparse
31
32
 
32
33
  from asgiref.sync import sync_to_async
33
34
  from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError
@@ -152,11 +153,13 @@ class WasbHook(BaseHook):
152
153
  # connection_string auth takes priority
153
154
  return BlobServiceClient.from_connection_string(connection_string, **extra)
154
155
 
155
- account_url = (
156
- conn.host
157
- if conn.host and conn.host.startswith("https://")
158
- else f"https://{conn.login}.blob.core.windows.net/"
159
- )
156
+ account_url = conn.host if conn.host else f"https://{conn.login}.blob.core.windows.net/"
157
+ parsed_url = urlparse(account_url)
158
+
159
+ if not parsed_url.netloc and "." not in parsed_url.path:
160
+ # if there's no netloc and no dots in the path, then user only
161
+ # provided the Active Directory ID, not the full URL or DNS name
162
+ account_url = f"https://{conn.login}.blob.core.windows.net/"
160
163
 
161
164
  tenant = self._get_field(extra, "tenant_id")
162
165
  if tenant:
@@ -238,7 +241,7 @@ class WasbHook(BaseHook):
238
241
  :return: True if blobs matching the prefix exist, False otherwise.
239
242
  """
240
243
  blobs = self.get_blobs_list(container_name=container_name, prefix=prefix, **kwargs)
241
- return len(blobs) > 0
244
+ return bool(blobs)
242
245
 
243
246
  def get_blobs_list(
244
247
  self,
@@ -499,7 +502,7 @@ class WasbHook(BaseHook):
499
502
  blobs_to_delete = [blob_name]
500
503
  else:
501
504
  blobs_to_delete = []
502
- if not ignore_if_missing and len(blobs_to_delete) == 0:
505
+ if not ignore_if_missing and not blobs_to_delete:
503
506
  raise AirflowException(f"Blob(s) not found: {blob_name}")
504
507
 
505
508
  # The maximum number of blobs that can be deleted in a single request is 256 using the underlying
@@ -555,11 +558,13 @@ class WasbAsyncHook(WasbHook):
555
558
  )
556
559
  return self.blob_service_client
557
560
 
558
- account_url = (
559
- conn.host
560
- if conn.host and conn.host.startswith("https://")
561
- else f"https://{conn.login}.blob.core.windows.net/"
562
- )
561
+ account_url = conn.host if conn.host else f"https://{conn.login}.blob.core.windows.net/"
562
+ parsed_url = urlparse(account_url)
563
+
564
+ if not parsed_url.netloc and "." not in parsed_url.path:
565
+ # if there's no netloc and no dots in the path, then user only
566
+ # provided the Active Directory ID, not the full URL or DNS name
567
+ account_url = f"https://{conn.login}.blob.core.windows.net/"
563
568
 
564
569
  tenant = self._get_field(extra, "tenant_id")
565
570
  if tenant:
@@ -678,4 +683,4 @@ class WasbAsyncHook(WasbHook):
678
683
  :param kwargs: Optional keyword arguments for ``ContainerClient.walk_blobs``
679
684
  """
680
685
  blobs = await self.get_blobs_list_async(container_name=container_name, prefix=prefix, **kwargs)
681
- return len(blobs) > 0
686
+ return bool(blobs)
@@ -17,6 +17,7 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
+ from functools import cached_property
20
21
  from typing import TYPE_CHECKING, Any, Sequence
21
22
 
22
23
  from azure.batch import models as batch_models
@@ -176,7 +177,10 @@ class AzureBatchOperator(BaseOperator):
176
177
  self.timeout = timeout
177
178
  self.should_delete_job = should_delete_job
178
179
  self.should_delete_pool = should_delete_pool
179
- self.hook = self.get_hook()
180
+
181
+ @cached_property
182
+ def hook(self):
183
+ return self.get_hook()
180
184
 
181
185
  def _check_inputs(self) -> Any:
182
186
  if not self.os_family and not self.vm_publisher:
@@ -189,7 +193,7 @@ class AzureBatchOperator(BaseOperator):
189
193
  )
190
194
 
191
195
  if self.use_latest_image:
192
- if not all(elem for elem in [self.vm_publisher, self.vm_offer]):
196
+ if not self.vm_publisher or not self.vm_offer:
193
197
  raise AirflowException(
194
198
  f"If use_latest_image_and_sku is set to True then the parameters vm_publisher, "
195
199
  f"vm_offer, must all be set. "
@@ -92,7 +92,7 @@ class AzureDataFactoryRunPipelineOperator(BaseOperator):
92
92
  ``AzureDataFactoryHook`` will attempt to use the resource group name provided in the corresponding
93
93
  connection.
94
94
  :param factory_name: The data factory name. If a value is not passed in to the operator, the
95
- ``AzureDataFactoryHook`` will attempt to use the factory name name provided in the corresponding
95
+ ``AzureDataFactoryHook`` will attempt to use the factory name provided in the corresponding
96
96
  connection.
97
97
  :param reference_pipeline_run_id: The pipeline run identifier. If this run ID is specified the parameters
98
98
  of the specified run will be used to create a new run.
@@ -16,6 +16,8 @@
16
16
  # under the License.
17
17
  from __future__ import annotations
18
18
 
19
+ import logging
20
+ import os
19
21
  import re
20
22
  import warnings
21
23
  from functools import cached_property
@@ -95,6 +97,13 @@ class AzureKeyVaultBackend(BaseSecretsBackend, LoggingMixin):
95
97
  self.config_prefix = config_prefix.rstrip(sep)
96
98
  else:
97
99
  self.config_prefix = config_prefix
100
+
101
+ logger = logging.getLogger("azure.core.pipeline.policies.http_logging_policy")
102
+ try:
103
+ logger.setLevel(os.environ.get("AZURE_HTTP_LOGGING_LEVEL", logging.WARNING))
104
+ except ValueError:
105
+ logger.setLevel(logging.WARNING)
106
+
98
107
  self.sep = sep
99
108
  self.kwargs = kwargs
100
109
 
@@ -233,7 +233,7 @@ class AzureDataFactoryTrigger(BaseTrigger):
233
233
  resource_group_name=self.resource_group_name,
234
234
  factory_name=self.factory_name,
235
235
  )
236
- self.log.info("Unexpected error %s caught. Cancel pipeline run %s", str(e), self.run_id)
236
+ self.log.info("Unexpected error %s caught. Cancel pipeline run %s", e, self.run_id)
237
237
  except Exception as err:
238
238
  yield TriggerEvent({"status": "error", "message": str(err), "run_id": self.run_id})
239
239
  yield TriggerEvent({"status": "error", "message": str(e), "run_id": self.run_id})
@@ -0,0 +1,95 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+
18
+ from __future__ import annotations
19
+
20
+ import warnings
21
+
22
+ from azure.core.pipeline import PipelineContext, PipelineRequest
23
+ from azure.core.pipeline.policies import BearerTokenCredentialPolicy
24
+ from azure.core.pipeline.transport import HttpRequest
25
+ from azure.identity import DefaultAzureCredential
26
+ from msrest.authentication import BasicTokenAuthentication
27
+
28
+
29
+ def get_field(*, conn_id: str, conn_type: str, extras: dict, field_name: str):
30
+ """Get field from extra, first checking short name, then for backcompat we check for prefixed name."""
31
+ backcompat_prefix = f"extra__{conn_type}__"
32
+ backcompat_key = f"{backcompat_prefix}{field_name}"
33
+ ret = None
34
+ if field_name.startswith("extra__"):
35
+ raise ValueError(
36
+ f"Got prefixed name {field_name}; please remove the '{backcompat_prefix}' prefix "
37
+ "when using this method."
38
+ )
39
+ if field_name in extras:
40
+ if backcompat_key in extras:
41
+ warnings.warn(
42
+ f"Conflicting params `{field_name}` and `{backcompat_key}` found in extras for conn "
43
+ f"{conn_id}. Using value for `{field_name}`. Please ensure this is the correct "
44
+ f"value and remove the backcompat key `{backcompat_key}`."
45
+ )
46
+ ret = extras[field_name]
47
+ elif backcompat_key in extras:
48
+ ret = extras.get(backcompat_key)
49
+ if ret == "":
50
+ return None
51
+ return ret
52
+
53
+
54
+ class AzureIdentityCredentialAdapter(BasicTokenAuthentication):
55
+ """Adapt azure-identity credentials for backward compatibility.
56
+
57
+ Adapt credentials from azure-identity to be compatible with SD
58
+ that needs msrestazure or azure.common.credentials
59
+
60
+ Check https://stackoverflow.com/questions/63384092/exception-attributeerror-defaultazurecredential-object-has-no-attribute-sig
61
+ """
62
+
63
+ def __init__(self, credential=None, resource_id="https://management.azure.com/.default", **kwargs):
64
+ """Adapt azure-identity credentials for backward compatibility.
65
+
66
+ :param credential: Any azure-identity credential (DefaultAzureCredential by default)
67
+ :param str resource_id: The scope to use to get the token (default ARM)
68
+ """
69
+ super().__init__(None)
70
+ if credential is None:
71
+ credential = DefaultAzureCredential()
72
+ self._policy = BearerTokenCredentialPolicy(credential, resource_id, **kwargs)
73
+
74
+ def _make_request(self):
75
+ return PipelineRequest(
76
+ HttpRequest("AzureIdentityCredentialAdapter", "https://fakeurl"), PipelineContext(None)
77
+ )
78
+
79
+ def set_token(self):
80
+ """Ask the azure-core BearerTokenCredentialPolicy policy to get a token.
81
+
82
+ Using the policy gives us for free the caching system of azure-core.
83
+ We could make this code simpler by using private method, but by definition
84
+ I can't assure they will be there forever, so mocking a fake call to the policy
85
+ to extract the token, using 100% public API.
86
+ """
87
+ request = self._make_request()
88
+ self._policy.on_request(request)
89
+ # Read Authorization, and get the second part after Bearer
90
+ token = request.http_request.headers["Authorization"].split(" ", 1)[1]
91
+ self.token = {"access_token": token}
92
+
93
+ def signed_session(self, azure_session=None):
94
+ self.set_token()
95
+ return super().signed_session(azure_session)
@@ -1,14 +1,14 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-microsoft-azure
3
- Version: 6.2.4rc1
3
+ Version: 6.3.0rc1
4
4
  Summary: Provider for Apache Airflow. Implements apache-airflow-providers-microsoft-azure package
5
5
  Home-page: https://airflow.apache.org/
6
6
  Download-URL: https://archive.apache.org/dist/airflow/providers
7
7
  Author: Apache Software Foundation
8
8
  Author-email: dev@airflow.apache.org
9
9
  License: Apache License 2.0
10
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.2.4/
11
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.2.4/changelog.html
10
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.3.0/
11
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.3.0/changelog.html
12
12
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
13
13
  Project-URL: Source Code, https://github.com/apache/airflow
14
14
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
@@ -73,7 +73,7 @@ License-File: NOTICE
73
73
 
74
74
  Package ``apache-airflow-providers-microsoft-azure``
75
75
 
76
- Release: ``6.2.4rc1``
76
+ Release: ``6.3.0rc1``
77
77
 
78
78
 
79
79
  `Microsoft Azure <https://azure.microsoft.com/>`__
@@ -86,7 +86,7 @@ This is a provider package for ``microsoft.azure`` provider. All classes for thi
86
86
  are in ``airflow.providers.microsoft.azure`` python package.
87
87
 
88
88
  You can find package information and changelog for the provider
89
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.2.4/>`_.
89
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.3.0/>`_.
90
90
 
91
91
 
92
92
  Installation
@@ -119,9 +119,9 @@ PIP package Version required
119
119
  ``azure-synapse-spark``
120
120
  ``adal`` ``>=1.2.7``
121
121
  ``azure-storage-file-datalake`` ``>=12.9.1``
122
+ ``azure-kusto-data`` ``>=4.1.0``
122
123
  ``azure-mgmt-containerinstance`` ``>=1.5.0,<2.0``
123
124
  ``azure-mgmt-datafactory`` ``>=1.0.0,<2.0``
124
- ``azure-kusto-data`` ``>=0.0.43,<0.1``
125
125
  ================================ ==================
126
126
 
127
127
  Cross provider package dependencies
@@ -146,4 +146,4 @@ Dependent package
146
146
  ==================================================================================================== ==========
147
147
 
148
148
  The changelog for the provider package can be found in the
149
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.2.4/changelog.html>`_.
149
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.3.0/changelog.html>`_.
@@ -8,8 +8,6 @@ setup.py
8
8
  airflow/providers/microsoft/azure/__init__.py
9
9
  airflow/providers/microsoft/azure/get_provider_info.py
10
10
  airflow/providers/microsoft/azure/utils.py
11
- airflow/providers/microsoft/azure/example_dag/__init__.py
12
- airflow/providers/microsoft/azure/example_dag/example_wasb_sensors.py
13
11
  airflow/providers/microsoft/azure/hooks/__init__.py
14
12
  airflow/providers/microsoft/azure/hooks/adx.py
15
13
  airflow/providers/microsoft/azure/hooks/asb.py
@@ -5,7 +5,7 @@ azure-cosmos>=4.0.0
5
5
  azure-datalake-store>=0.0.45
6
6
  azure-identity>=1.3.1
7
7
  azure-keyvault-secrets>=4.1.0
8
- azure-kusto-data<0.1,>=0.0.43
8
+ azure-kusto-data>=4.1.0
9
9
  azure-mgmt-containerinstance<2.0,>=1.5.0
10
10
  azure-mgmt-datafactory<2.0,>=1.0.0
11
11
  azure-mgmt-datalake-store>=0.5.0
@@ -66,7 +66,6 @@ extend-select = [
66
66
  ]
67
67
  extend-ignore = [
68
68
  "D203",
69
- "D205",
70
69
  "D212",
71
70
  "D213",
72
71
  "D214",
@@ -144,3 +143,29 @@ combine-as-imports = true
144
143
  "airflow/security/utils.py" = ["E402"]
145
144
  "tests/providers/elasticsearch/log/elasticmock/__init__.py" = ["E402"]
146
145
  "tests/providers/elasticsearch/log/elasticmock/utilities/__init__.py" = ["E402"]
146
+
147
+ [tool.coverage.run]
148
+ branch = true
149
+ relative_files = true
150
+ source = ["airflow"]
151
+ omit = [
152
+ "airflow/_vendor/**",
153
+ "airflow/contrib/**",
154
+ "airflow/example_dags/**",
155
+ "airflow/migrations/**",
156
+ "airflow/providers/**/example_dags/**",
157
+ "airflow/www/node_modules/**",
158
+ "airflow/providers/google/ads/_vendor/**",
159
+ ]
160
+
161
+ [tool.coverage.report]
162
+ skip_empty = true
163
+ exclude_also = [
164
+ "def __repr__",
165
+ "raise AssertionError",
166
+ "raise NotImplementedError",
167
+ "if __name__ == .__main__.:",
168
+ "@(abc\\.)?abstractmethod",
169
+ "@(typing(_extensions)?\\.)?overload",
170
+ "if TYPE_CHECKING:"
171
+ ]
@@ -27,8 +27,8 @@ classifiers =
27
27
  Programming Language :: Python :: 3.11
28
28
  Topic :: System :: Monitoring
29
29
  project_urls =
30
- Documentation=https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.2.4/
31
- Changelog=https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.2.4/changelog.html
30
+ Documentation=https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.3.0/
31
+ Changelog=https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/6.3.0/changelog.html
32
32
  Bug Tracker=https://github.com/apache/airflow/issues
33
33
  Source Code=https://github.com/apache/airflow
34
34
  Slack Chat=https://s.apache.org/airflow-slack
@@ -54,7 +54,7 @@ install_requires =
54
54
  azure-datalake-store>=0.0.45
55
55
  azure-identity>=1.3.1
56
56
  azure-keyvault-secrets>=4.1.0
57
- azure-kusto-data>=0.0.43,<0.1
57
+ azure-kusto-data>=4.1.0
58
58
  azure-mgmt-containerinstance>=1.5.0,<2.0
59
59
  azure-mgmt-datafactory>=1.0.0,<2.0
60
60
  azure-mgmt-datalake-store>=0.5.0
@@ -26,7 +26,7 @@
26
26
 
27
27
  from setuptools import find_namespace_packages, setup
28
28
 
29
- version = "6.2.4"
29
+ version = "6.3.0"
30
30
 
31
31
 
32
32
  def do_setup():
@@ -1,60 +0,0 @@
1
- #
2
- # Licensed to the Apache Software Foundation (ASF) under one
3
- # or more contributor license agreements. See the NOTICE file
4
- # distributed with this work for additional information
5
- # regarding copyright ownership. The ASF licenses this file
6
- # to you under the Apache License, Version 2.0 (the
7
- # "License"); you may not use this file except in compliance
8
- # with the License. You may obtain a copy of the License at
9
- #
10
- # http://www.apache.org/licenses/LICENSE-2.0
11
- #
12
- # Unless required by applicable law or agreed to in writing,
13
- # software distributed under the License is distributed on an
14
- # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
- # KIND, either express or implied. See the License for the
16
- # specific language governing permissions and limitations
17
- # under the License.
18
- """
19
- Example Airflow DAG that senses blob(s) in Azure Blob Storage.
20
-
21
- This DAG relies on the following OS environment variables
22
-
23
- * CONTAINER_NAME - The container under which to look for the blob.
24
- * BLOB_NAME - The name of the blob to match.
25
- * PREFIX - The blob with the specified prefix to match.
26
- """
27
- from __future__ import annotations
28
-
29
- import os
30
- from datetime import datetime
31
-
32
- from airflow.models import DAG
33
- from airflow.providers.microsoft.azure.sensors.wasb import WasbBlobSensor, WasbPrefixSensor
34
-
35
- CONTAINER_NAME = os.environ.get("CONTAINER_NAME", "example-container-name")
36
- BLOB_NAME = os.environ.get("BLOB_NAME", "example-blob-name")
37
- PREFIX = os.environ.get("PREFIX", "example-prefix")
38
-
39
-
40
- with DAG(
41
- "example_wasb_sensors",
42
- start_date=datetime(2022, 8, 8),
43
- catchup=False,
44
- tags=["example"],
45
- ) as dag:
46
- # [START wasb_blob_sensor]
47
- azure_wasb_sensor = WasbBlobSensor(
48
- container_name=CONTAINER_NAME,
49
- blob_name=BLOB_NAME,
50
- task_id="wasb_sense_blob",
51
- )
52
- # [END wasb_blob_sensor]
53
-
54
- # [START wasb_prefix_sensor]
55
- azure_wasb_prefix_sensor = WasbPrefixSensor(
56
- container_name=CONTAINER_NAME,
57
- prefix=PREFIX,
58
- task_id="wasb_sense_prefix",
59
- )
60
- # [END wasb_prefix_sensor]
@@ -1,16 +0,0 @@
1
- # Licensed to the Apache Software Foundation (ASF) under one
2
- # or more contributor license agreements. See the NOTICE file
3
- # distributed with this work for additional information
4
- # regarding copyright ownership. The ASF licenses this file
5
- # to you under the Apache License, Version 2.0 (the
6
- # "License"); you may not use this file except in compliance
7
- # with the License. You may obtain a copy of the License at
8
- #
9
- # http://www.apache.org/licenses/LICENSE-2.0
10
- #
11
- # Unless required by applicable law or agreed to in writing,
12
- # software distributed under the License is distributed on an
13
- # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
- # KIND, either express or implied. See the License for the
15
- # specific language governing permissions and limitations
16
- # under the License.
@@ -1,45 +0,0 @@
1
- # Licensed to the Apache Software Foundation (ASF) under one
2
- # or more contributor license agreements. See the NOTICE file
3
- # distributed with this work for additional information
4
- # regarding copyright ownership. The ASF licenses this file
5
- # to you under the Apache License, Version 2.0 (the
6
- # "License"); you may not use this file except in compliance
7
- # with the License. You may obtain a copy of the License at
8
- #
9
- # http://www.apache.org/licenses/LICENSE-2.0
10
- #
11
- # Unless required by applicable law or agreed to in writing,
12
- # software distributed under the License is distributed on an
13
- # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
- # KIND, either express or implied. See the License for the
15
- # specific language governing permissions and limitations
16
- # under the License.
17
-
18
- from __future__ import annotations
19
-
20
- import warnings
21
-
22
-
23
- def get_field(*, conn_id: str, conn_type: str, extras: dict, field_name: str):
24
- """Get field from extra, first checking short name, then for backcompat we check for prefixed name."""
25
- backcompat_prefix = f"extra__{conn_type}__"
26
- backcompat_key = f"{backcompat_prefix}{field_name}"
27
- ret = None
28
- if field_name.startswith("extra__"):
29
- raise ValueError(
30
- f"Got prefixed name {field_name}; please remove the '{backcompat_prefix}' prefix "
31
- "when using this method."
32
- )
33
- if field_name in extras:
34
- if backcompat_key in extras:
35
- warnings.warn(
36
- f"Conflicting params `{field_name}` and `{backcompat_key}` found in extras for conn "
37
- f"{conn_id}. Using value for `{field_name}`. Please ensure this is the correct "
38
- f"value and remove the backcompat key `{backcompat_key}`."
39
- )
40
- ret = extras[field_name]
41
- elif backcompat_key in extras:
42
- ret = extras.get(backcompat_key)
43
- if ret == "":
44
- return None
45
- return ret