apache-airflow-providers-microsoft-azure 9.0.1rc1__tar.gz → 10.0.0rc1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/PKG-INFO +13 -12
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/README.rst +6 -6
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/__init__.py +1 -1
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/get_provider_info.py +4 -7
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/hooks/adx.py +1 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/hooks/container_registry.py +1 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/hooks/cosmos.py +29 -9
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/hooks/data_factory.py +1 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/hooks/synapse.py +60 -16
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/hooks/wasb.py +1 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/operators/adls.py +64 -19
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/operators/adx.py +1 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/operators/synapse.py +1 -1
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py +1 -0
- apache_airflow_providers_microsoft_azure-10.0.0rc1/airflow/providers/microsoft/azure/triggers/__init__.py +16 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/triggers/wasb.py +2 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/pyproject.toml +7 -6
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/LICENSE +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/fs/__init__.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/fs/adls.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/hooks/__init__.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/hooks/asb.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/hooks/base_azure.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/hooks/batch.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/hooks/container_instance.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/hooks/container_volume.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/hooks/data_lake.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/hooks/fileshare.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/log/__init__.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/log/wasb_task_handler.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/operators/__init__.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/operators/asb.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/operators/batch.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/operators/container_instances.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/operators/cosmos.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/operators/data_factory.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/secrets/__init__.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/secrets/key_vault.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/sensors/__init__.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/sensors/cosmos.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/sensors/data_factory.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/sensors/wasb.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1/airflow/providers/microsoft/azure/transfers → apache_airflow_providers_microsoft_azure-10.0.0rc1/airflow/providers/microsoft/azure/serialization}/__init__.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1/airflow/providers/microsoft/azure/triggers → apache_airflow_providers_microsoft_azure-10.0.0rc1/airflow/providers/microsoft/azure/transfers}/__init__.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/transfers/local_to_adls.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/transfers/local_to_wasb.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/triggers/data_factory.py +0 -0
- {apache_airflow_providers_microsoft_azure-9.0.1rc1 → apache_airflow_providers_microsoft_azure-10.0.0rc1}/airflow/providers/microsoft/azure/utils.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: apache-airflow-providers-microsoft-azure
|
3
|
-
Version:
|
3
|
+
Version: 10.0.0rc1
|
4
4
|
Summary: Provider package apache-airflow-providers-microsoft-azure for Apache Airflow
|
5
5
|
Keywords: airflow-provider,microsoft.azure,airflow,integration
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
@@ -19,12 +19,13 @@ Classifier: Programming Language :: Python :: 3.8
|
|
19
19
|
Classifier: Programming Language :: Python :: 3.9
|
20
20
|
Classifier: Programming Language :: Python :: 3.10
|
21
21
|
Classifier: Programming Language :: Python :: 3.11
|
22
|
+
Classifier: Programming Language :: Python :: 3.12
|
22
23
|
Classifier: Topic :: System :: Monitoring
|
23
24
|
Requires-Dist: adal>=1.2.7
|
24
25
|
Requires-Dist: adlfs>=2023.10.0
|
25
|
-
Requires-Dist: apache-airflow>=2.6.
|
26
|
+
Requires-Dist: apache-airflow>=2.6.0rc0
|
26
27
|
Requires-Dist: azure-batch>=8.0.0
|
27
|
-
Requires-Dist: azure-cosmos>=4.
|
28
|
+
Requires-Dist: azure-cosmos>=4.6.0
|
28
29
|
Requires-Dist: azure-datalake-store>=0.0.45
|
29
30
|
Requires-Dist: azure-identity>=1.3.1
|
30
31
|
Requires-Dist: azure-keyvault-secrets>=4.1.0
|
@@ -36,7 +37,7 @@ Requires-Dist: azure-mgmt-datafactory>=2.0.0
|
|
36
37
|
Requires-Dist: azure-mgmt-datalake-store>=0.5.0
|
37
38
|
Requires-Dist: azure-mgmt-resource>=2.2.0
|
38
39
|
Requires-Dist: azure-mgmt-storage>=16.0.0
|
39
|
-
Requires-Dist: azure-servicebus>=7.
|
40
|
+
Requires-Dist: azure-servicebus>=7.12.1
|
40
41
|
Requires-Dist: azure-storage-blob>=12.14.0
|
41
42
|
Requires-Dist: azure-storage-file-datalake>=12.9.1
|
42
43
|
Requires-Dist: azure-storage-file-share
|
@@ -46,8 +47,8 @@ Requires-Dist: apache-airflow-providers-google ; extra == "google"
|
|
46
47
|
Requires-Dist: apache-airflow-providers-oracle ; extra == "oracle"
|
47
48
|
Requires-Dist: apache-airflow-providers-sftp ; extra == "sftp"
|
48
49
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
49
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/
|
50
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/
|
50
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.0.0/changelog.html
|
51
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.0.0
|
51
52
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
52
53
|
Project-URL: Source Code, https://github.com/apache/airflow
|
53
54
|
Project-URL: Twitter, https://twitter.com/ApacheAirflow
|
@@ -100,7 +101,7 @@ Provides-Extra: sftp
|
|
100
101
|
|
101
102
|
Package ``apache-airflow-providers-microsoft-azure``
|
102
103
|
|
103
|
-
Release: ``
|
104
|
+
Release: ``10.0.0.rc1``
|
104
105
|
|
105
106
|
|
106
107
|
`Microsoft Azure <https://azure.microsoft.com/>`__
|
@@ -113,7 +114,7 @@ This is a provider package for ``microsoft.azure`` provider. All classes for thi
|
|
113
114
|
are in ``airflow.providers.microsoft.azure`` python package.
|
114
115
|
|
115
116
|
You can find package information and changelog for the provider
|
116
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/
|
117
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.0.0/>`_.
|
117
118
|
|
118
119
|
Installation
|
119
120
|
------------
|
@@ -122,7 +123,7 @@ You can install this package on top of an existing Airflow 2 installation (see `
|
|
122
123
|
for the minimum Airflow version supported) via
|
123
124
|
``pip install apache-airflow-providers-microsoft-azure``
|
124
125
|
|
125
|
-
The package supports the following python versions: 3.8,3.9,3.10,3.11
|
126
|
+
The package supports the following python versions: 3.8,3.9,3.10,3.11,3.12
|
126
127
|
|
127
128
|
Requirements
|
128
129
|
------------
|
@@ -133,7 +134,7 @@ PIP package Version required
|
|
133
134
|
``apache-airflow`` ``>=2.6.0``
|
134
135
|
``adlfs`` ``>=2023.10.0``
|
135
136
|
``azure-batch`` ``>=8.0.0``
|
136
|
-
``azure-cosmos`` ``>=4.
|
137
|
+
``azure-cosmos`` ``>=4.6.0``
|
137
138
|
``azure-mgmt-cosmosdb``
|
138
139
|
``azure-datalake-store`` ``>=0.0.45``
|
139
140
|
``azure-identity`` ``>=1.3.1``
|
@@ -143,7 +144,7 @@ PIP package Version required
|
|
143
144
|
``azure-storage-blob`` ``>=12.14.0``
|
144
145
|
``azure-mgmt-storage`` ``>=16.0.0``
|
145
146
|
``azure-storage-file-share``
|
146
|
-
``azure-servicebus`` ``>=7.
|
147
|
+
``azure-servicebus`` ``>=7.12.1``
|
147
148
|
``azure-synapse-spark``
|
148
149
|
``azure-synapse-artifacts`` ``>=0.17.0``
|
149
150
|
``adal`` ``>=1.2.7``
|
@@ -176,4 +177,4 @@ Dependent package
|
|
176
177
|
==================================================================================================== ==========
|
177
178
|
|
178
179
|
The changelog for the provider package can be found in the
|
179
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/
|
180
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.0.0/changelog.html>`_.
|
@@ -42,7 +42,7 @@
|
|
42
42
|
|
43
43
|
Package ``apache-airflow-providers-microsoft-azure``
|
44
44
|
|
45
|
-
Release: ``
|
45
|
+
Release: ``10.0.0.rc1``
|
46
46
|
|
47
47
|
|
48
48
|
`Microsoft Azure <https://azure.microsoft.com/>`__
|
@@ -55,7 +55,7 @@ This is a provider package for ``microsoft.azure`` provider. All classes for thi
|
|
55
55
|
are in ``airflow.providers.microsoft.azure`` python package.
|
56
56
|
|
57
57
|
You can find package information and changelog for the provider
|
58
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/
|
58
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.0.0/>`_.
|
59
59
|
|
60
60
|
Installation
|
61
61
|
------------
|
@@ -64,7 +64,7 @@ You can install this package on top of an existing Airflow 2 installation (see `
|
|
64
64
|
for the minimum Airflow version supported) via
|
65
65
|
``pip install apache-airflow-providers-microsoft-azure``
|
66
66
|
|
67
|
-
The package supports the following python versions: 3.8,3.9,3.10,3.11
|
67
|
+
The package supports the following python versions: 3.8,3.9,3.10,3.11,3.12
|
68
68
|
|
69
69
|
Requirements
|
70
70
|
------------
|
@@ -75,7 +75,7 @@ PIP package Version required
|
|
75
75
|
``apache-airflow`` ``>=2.6.0``
|
76
76
|
``adlfs`` ``>=2023.10.0``
|
77
77
|
``azure-batch`` ``>=8.0.0``
|
78
|
-
``azure-cosmos`` ``>=4.
|
78
|
+
``azure-cosmos`` ``>=4.6.0``
|
79
79
|
``azure-mgmt-cosmosdb``
|
80
80
|
``azure-datalake-store`` ``>=0.0.45``
|
81
81
|
``azure-identity`` ``>=1.3.1``
|
@@ -85,7 +85,7 @@ PIP package Version required
|
|
85
85
|
``azure-storage-blob`` ``>=12.14.0``
|
86
86
|
``azure-mgmt-storage`` ``>=16.0.0``
|
87
87
|
``azure-storage-file-share``
|
88
|
-
``azure-servicebus`` ``>=7.
|
88
|
+
``azure-servicebus`` ``>=7.12.1``
|
89
89
|
``azure-synapse-spark``
|
90
90
|
``azure-synapse-artifacts`` ``>=0.17.0``
|
91
91
|
``adal`` ``>=1.2.7``
|
@@ -118,4 +118,4 @@ Dependent package
|
|
118
118
|
==================================================================================================== ==========
|
119
119
|
|
120
120
|
The changelog for the provider package can be found in the
|
121
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/
|
121
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.0.0/changelog.html>`_.
|
@@ -30,6 +30,7 @@ def get_provider_info():
|
|
30
30
|
"state": "ready",
|
31
31
|
"source-date-epoch": 1709555852,
|
32
32
|
"versions": [
|
33
|
+
"10.0.0",
|
33
34
|
"9.0.1",
|
34
35
|
"9.0.0",
|
35
36
|
"8.5.1",
|
@@ -85,7 +86,7 @@ def get_provider_info():
|
|
85
86
|
"apache-airflow>=2.6.0",
|
86
87
|
"adlfs>=2023.10.0",
|
87
88
|
"azure-batch>=8.0.0",
|
88
|
-
"azure-cosmos>=4.
|
89
|
+
"azure-cosmos>=4.6.0",
|
89
90
|
"azure-mgmt-cosmosdb",
|
90
91
|
"azure-datalake-store>=0.0.45",
|
91
92
|
"azure-identity>=1.3.1",
|
@@ -95,7 +96,7 @@ def get_provider_info():
|
|
95
96
|
"azure-storage-blob>=12.14.0",
|
96
97
|
"azure-mgmt-storage>=16.0.0",
|
97
98
|
"azure-storage-file-share",
|
98
|
-
"azure-servicebus>=7.
|
99
|
+
"azure-servicebus>=7.12.1",
|
99
100
|
"azure-synapse-spark",
|
100
101
|
"azure-synapse-artifacts>=0.17.0",
|
101
102
|
"adal>=1.2.7",
|
@@ -392,17 +393,13 @@ def get_provider_info():
|
|
392
393
|
"connection-type": "azure_service_bus",
|
393
394
|
},
|
394
395
|
{
|
395
|
-
"hook-class-name": "airflow.providers.microsoft.azure.hooks.synapse.
|
396
|
+
"hook-class-name": "airflow.providers.microsoft.azure.hooks.synapse.BaseAzureSynapseHook",
|
396
397
|
"connection-type": "azure_synapse",
|
397
398
|
},
|
398
399
|
{
|
399
400
|
"hook-class-name": "airflow.providers.microsoft.azure.hooks.data_lake.AzureDataLakeStorageV2Hook",
|
400
401
|
"connection-type": "adls",
|
401
402
|
},
|
402
|
-
{
|
403
|
-
"hook-class-name": "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapsePipelineHook",
|
404
|
-
"connection-type": "azure_synapse_pipeline",
|
405
|
-
},
|
406
403
|
],
|
407
404
|
"secrets-backends": ["airflow.providers.microsoft.azure.secrets.key_vault.AzureKeyVaultBackend"],
|
408
405
|
"logging": ["airflow.providers.microsoft.azure.log.wasb_task_handler.WasbTaskHandler"],
|
@@ -23,12 +23,14 @@ Airflow connection of type `azure_cosmos` exists. Authorization can be done by s
|
|
23
23
|
login (=Endpoint uri), password (=secret key) and extra fields database_name and collection_name to specify
|
24
24
|
the default database and collection to use (see connection `azure_cosmos_default` for an example).
|
25
25
|
"""
|
26
|
+
|
26
27
|
from __future__ import annotations
|
27
28
|
|
28
29
|
import uuid
|
29
|
-
from typing import Any
|
30
|
+
from typing import TYPE_CHECKING, Any, List, Union
|
30
31
|
from urllib.parse import urlparse
|
31
32
|
|
33
|
+
from azure.cosmos import PartitionKey
|
32
34
|
from azure.cosmos.cosmos_client import CosmosClient
|
33
35
|
from azure.cosmos.exceptions import CosmosHttpResponseError
|
34
36
|
from azure.mgmt.cosmosdb import CosmosDBManagementClient
|
@@ -41,6 +43,9 @@ from airflow.providers.microsoft.azure.utils import (
|
|
41
43
|
get_sync_default_azure_credential,
|
42
44
|
)
|
43
45
|
|
46
|
+
if TYPE_CHECKING:
|
47
|
+
PartitionKeyType = Union[str, List[str]]
|
48
|
+
|
44
49
|
|
45
50
|
class AzureCosmosDBHook(BaseHook):
|
46
51
|
"""
|
@@ -110,6 +115,7 @@ class AzureCosmosDBHook(BaseHook):
|
|
110
115
|
|
111
116
|
self.default_database_name = None
|
112
117
|
self.default_collection_name = None
|
118
|
+
self.default_partition_key = None
|
113
119
|
|
114
120
|
def _get_field(self, extras, name):
|
115
121
|
return get_field(
|
@@ -152,6 +158,7 @@ class AzureCosmosDBHook(BaseHook):
|
|
152
158
|
|
153
159
|
self.default_database_name = self._get_field(extras, "database_name")
|
154
160
|
self.default_collection_name = self._get_field(extras, "collection_name")
|
161
|
+
self.default_partition_key = self._get_field(extras, "partition_key")
|
155
162
|
|
156
163
|
# Initialize the Python Azure Cosmos DB client
|
157
164
|
self._conn = CosmosClient(endpoint_uri, {"masterKey": master_key})
|
@@ -179,6 +186,18 @@ class AzureCosmosDBHook(BaseHook):
|
|
179
186
|
|
180
187
|
return coll_name
|
181
188
|
|
189
|
+
def __get_partition_key(self, partition_key: PartitionKeyType | None = None) -> PartitionKeyType:
|
190
|
+
self.get_conn()
|
191
|
+
if partition_key is None:
|
192
|
+
part_key = self.default_partition_key
|
193
|
+
else:
|
194
|
+
part_key = partition_key
|
195
|
+
|
196
|
+
if part_key is None:
|
197
|
+
raise AirflowBadRequest("Partition key must be specified")
|
198
|
+
|
199
|
+
return part_key
|
200
|
+
|
182
201
|
def does_collection_exist(self, collection_name: str, database_name: str) -> bool:
|
183
202
|
"""Check if a collection exists in CosmosDB."""
|
184
203
|
if collection_name is None:
|
@@ -203,7 +222,7 @@ class AzureCosmosDBHook(BaseHook):
|
|
203
222
|
self,
|
204
223
|
collection_name: str,
|
205
224
|
database_name: str | None = None,
|
206
|
-
partition_key:
|
225
|
+
partition_key: PartitionKeyType | None = None,
|
207
226
|
) -> None:
|
208
227
|
"""Create a new collection in the CosmosDB database."""
|
209
228
|
if collection_name is None:
|
@@ -225,7 +244,8 @@ class AzureCosmosDBHook(BaseHook):
|
|
225
244
|
# Only create if we did not find it already existing
|
226
245
|
if not existing_container:
|
227
246
|
self.get_conn().get_database_client(self.__get_database_name(database_name)).create_container(
|
228
|
-
collection_name,
|
247
|
+
collection_name,
|
248
|
+
partition_key=PartitionKey(path=self.__get_partition_key(partition_key)),
|
229
249
|
)
|
230
250
|
|
231
251
|
def does_database_exist(self, database_name: str) -> bool:
|
@@ -327,7 +347,7 @@ class AzureCosmosDBHook(BaseHook):
|
|
327
347
|
document_id: str,
|
328
348
|
database_name: str | None = None,
|
329
349
|
collection_name: str | None = None,
|
330
|
-
partition_key:
|
350
|
+
partition_key: PartitionKeyType | None = None,
|
331
351
|
) -> None:
|
332
352
|
"""Delete an existing document out of a collection in the CosmosDB database."""
|
333
353
|
if document_id is None:
|
@@ -336,7 +356,7 @@ class AzureCosmosDBHook(BaseHook):
|
|
336
356
|
self.get_conn()
|
337
357
|
.get_database_client(self.__get_database_name(database_name))
|
338
358
|
.get_container_client(self.__get_collection_name(collection_name))
|
339
|
-
.delete_item(document_id, partition_key=partition_key)
|
359
|
+
.delete_item(document_id, partition_key=self.__get_partition_key(partition_key))
|
340
360
|
)
|
341
361
|
|
342
362
|
def get_document(
|
@@ -344,7 +364,7 @@ class AzureCosmosDBHook(BaseHook):
|
|
344
364
|
document_id: str,
|
345
365
|
database_name: str | None = None,
|
346
366
|
collection_name: str | None = None,
|
347
|
-
partition_key:
|
367
|
+
partition_key: PartitionKeyType | None = None,
|
348
368
|
):
|
349
369
|
"""Get a document from an existing collection in the CosmosDB database."""
|
350
370
|
if document_id is None:
|
@@ -355,7 +375,7 @@ class AzureCosmosDBHook(BaseHook):
|
|
355
375
|
self.get_conn()
|
356
376
|
.get_database_client(self.__get_database_name(database_name))
|
357
377
|
.get_container_client(self.__get_collection_name(collection_name))
|
358
|
-
.read_item(document_id, partition_key=partition_key)
|
378
|
+
.read_item(document_id, partition_key=self.__get_partition_key(partition_key))
|
359
379
|
)
|
360
380
|
except CosmosHttpResponseError:
|
361
381
|
return None
|
@@ -365,7 +385,7 @@ class AzureCosmosDBHook(BaseHook):
|
|
365
385
|
sql_string: str,
|
366
386
|
database_name: str | None = None,
|
367
387
|
collection_name: str | None = None,
|
368
|
-
partition_key:
|
388
|
+
partition_key: PartitionKeyType | None = None,
|
369
389
|
) -> list | None:
|
370
390
|
"""Get a list of documents from an existing collection in the CosmosDB database via SQL query."""
|
371
391
|
if sql_string is None:
|
@@ -376,7 +396,7 @@ class AzureCosmosDBHook(BaseHook):
|
|
376
396
|
self.get_conn()
|
377
397
|
.get_database_client(self.__get_database_name(database_name))
|
378
398
|
.get_container_client(self.__get_collection_name(collection_name))
|
379
|
-
.query_items(sql_string, partition_key=partition_key)
|
399
|
+
.query_items(sql_string, partition_key=self.__get_partition_key(partition_key))
|
380
400
|
)
|
381
401
|
return list(result_iterable)
|
382
402
|
except CosmosHttpResponseError:
|
@@ -17,6 +17,7 @@
|
|
17
17
|
from __future__ import annotations
|
18
18
|
|
19
19
|
import time
|
20
|
+
import warnings
|
20
21
|
from typing import TYPE_CHECKING, Any, Union
|
21
22
|
|
22
23
|
from azure.core.exceptions import ServiceRequestError
|
@@ -24,7 +25,7 @@ from azure.identity import ClientSecretCredential, DefaultAzureCredential
|
|
24
25
|
from azure.synapse.artifacts import ArtifactsClient
|
25
26
|
from azure.synapse.spark import SparkClient
|
26
27
|
|
27
|
-
from airflow.exceptions import AirflowException, AirflowTaskTimeout
|
28
|
+
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, AirflowTaskTimeout
|
28
29
|
from airflow.hooks.base import BaseHook
|
29
30
|
from airflow.providers.microsoft.azure.utils import (
|
30
31
|
add_managed_identity_connection_widgets,
|
@@ -240,20 +241,20 @@ class AzureSynapsePipelineRunException(AirflowException):
|
|
240
241
|
"""An exception that indicates a pipeline run failed to complete."""
|
241
242
|
|
242
243
|
|
243
|
-
class
|
244
|
+
class BaseAzureSynapseHook(BaseHook):
|
244
245
|
"""
|
245
|
-
A hook to
|
246
|
+
A base hook class to create session and connection to Azure Synapse using connection id.
|
246
247
|
|
247
248
|
:param azure_synapse_conn_id: The :ref:`Azure Synapse connection id<howto/connection:synapse>`.
|
248
|
-
:param azure_synapse_workspace_dev_endpoint: The Azure Synapse Workspace development endpoint.
|
249
249
|
"""
|
250
250
|
|
251
|
-
conn_type: str = "
|
251
|
+
conn_type: str = "azure_synapse"
|
252
252
|
conn_name_attr: str = "azure_synapse_conn_id"
|
253
|
-
default_conn_name: str = "
|
254
|
-
hook_name: str = "Azure Synapse
|
253
|
+
default_conn_name: str = "azure_synapse_default"
|
254
|
+
hook_name: str = "Azure Synapse"
|
255
255
|
|
256
256
|
@classmethod
|
257
|
+
@add_managed_identity_connection_widgets
|
257
258
|
def get_connection_form_widgets(cls) -> dict[str, Any]:
|
258
259
|
"""Return connection widgets to add to connection form."""
|
259
260
|
from flask_appbuilder.fieldwidgets import BS3TextFieldWidget
|
@@ -262,6 +263,7 @@ class AzureSynapsePipelineHook(BaseHook):
|
|
262
263
|
|
263
264
|
return {
|
264
265
|
"tenantId": StringField(lazy_gettext("Tenant ID"), widget=BS3TextFieldWidget()),
|
266
|
+
"subscriptionId": StringField(lazy_gettext("Subscription ID"), widget=BS3TextFieldWidget()),
|
265
267
|
}
|
266
268
|
|
267
269
|
@classmethod
|
@@ -269,16 +271,51 @@ class AzureSynapsePipelineHook(BaseHook):
|
|
269
271
|
"""Return custom field behaviour."""
|
270
272
|
return {
|
271
273
|
"hidden_fields": ["schema", "port", "extra"],
|
272
|
-
"relabeling": {
|
274
|
+
"relabeling": {
|
275
|
+
"login": "Client ID",
|
276
|
+
"password": "Secret",
|
277
|
+
"host": "Synapse Workspace URL",
|
278
|
+
},
|
273
279
|
}
|
274
280
|
|
281
|
+
def __init__(self, azure_synapse_conn_id: str = default_conn_name, **kwargs) -> None:
|
282
|
+
super().__init__(**kwargs)
|
283
|
+
self.conn_id = azure_synapse_conn_id
|
284
|
+
|
285
|
+
def _get_field(self, extras: dict, field_name: str) -> str:
|
286
|
+
return get_field(
|
287
|
+
conn_id=self.conn_id,
|
288
|
+
conn_type=self.conn_type,
|
289
|
+
extras=extras,
|
290
|
+
field_name=field_name,
|
291
|
+
)
|
292
|
+
|
293
|
+
|
294
|
+
class AzureSynapsePipelineHook(BaseAzureSynapseHook):
|
295
|
+
"""
|
296
|
+
A hook to interact with Azure Synapse Pipeline.
|
297
|
+
|
298
|
+
:param azure_synapse_conn_id: The :ref:`Azure Synapse connection id<howto/connection:synapse>`.
|
299
|
+
:param azure_synapse_workspace_dev_endpoint: The Azure Synapse Workspace development endpoint.
|
300
|
+
"""
|
301
|
+
|
302
|
+
default_conn_name: str = "azure_synapse_connection"
|
303
|
+
|
275
304
|
def __init__(
|
276
|
-
self,
|
305
|
+
self,
|
306
|
+
azure_synapse_workspace_dev_endpoint: str,
|
307
|
+
azure_synapse_conn_id: str = default_conn_name,
|
308
|
+
**kwargs,
|
277
309
|
):
|
278
|
-
|
279
|
-
self.
|
310
|
+
# Handling deprecation of "default_conn_name"
|
311
|
+
if azure_synapse_conn_id == self.default_conn_name:
|
312
|
+
warnings.warn(
|
313
|
+
"The usage of `default_conn_name=azure_synapse_connection` is deprecated and will be removed in future. Please update your code to use the new default connection name: `default_conn_name=azure_synapse_default`. ",
|
314
|
+
AirflowProviderDeprecationWarning,
|
315
|
+
)
|
316
|
+
self._conn: ArtifactsClient | None = None
|
280
317
|
self.azure_synapse_workspace_dev_endpoint = azure_synapse_workspace_dev_endpoint
|
281
|
-
super().__init__()
|
318
|
+
super().__init__(azure_synapse_conn_id=azure_synapse_conn_id, **kwargs)
|
282
319
|
|
283
320
|
def _get_field(self, extras, name):
|
284
321
|
return get_field(
|
@@ -297,15 +334,22 @@ class AzureSynapsePipelineHook(BaseHook):
|
|
297
334
|
tenant = self._get_field(extras, "tenantId")
|
298
335
|
|
299
336
|
credential: Credentials
|
300
|
-
if conn.login
|
337
|
+
if not conn.login or not conn.password:
|
338
|
+
managed_identity_client_id = self._get_field(extras, "managed_identity_client_id")
|
339
|
+
workload_identity_tenant_id = self._get_field(extras, "workload_identity_tenant_id")
|
340
|
+
|
341
|
+
credential = get_sync_default_azure_credential(
|
342
|
+
managed_identity_client_id=managed_identity_client_id,
|
343
|
+
workload_identity_tenant_id=workload_identity_tenant_id,
|
344
|
+
)
|
345
|
+
else:
|
301
346
|
if not tenant:
|
302
347
|
raise ValueError("A Tenant ID is required when authenticating with Client ID and Secret.")
|
303
348
|
|
304
349
|
credential = ClientSecretCredential(
|
305
350
|
client_id=conn.login, client_secret=conn.password, tenant_id=tenant
|
306
351
|
)
|
307
|
-
|
308
|
-
credential = DefaultAzureCredential()
|
352
|
+
|
309
353
|
self._conn = self._create_client(credential, self.azure_synapse_workspace_dev_endpoint)
|
310
354
|
|
311
355
|
if self._conn is not None:
|
@@ -314,7 +358,7 @@ class AzureSynapsePipelineHook(BaseHook):
|
|
314
358
|
raise ValueError("Failed to create ArtifactsClient")
|
315
359
|
|
316
360
|
@staticmethod
|
317
|
-
def _create_client(credential: Credentials, endpoint: str):
|
361
|
+
def _create_client(credential: Credentials, endpoint: str) -> ArtifactsClient:
|
318
362
|
return ArtifactsClient(credential=credential, endpoint=endpoint)
|
319
363
|
|
320
364
|
def run_pipeline(self, pipeline_name: str, **config: Any) -> CreateRunResponse:
|
@@ -23,6 +23,7 @@ Airflow connection of type `wasb` exists. Authorization can be done by supplying
|
|
23
23
|
login (=Storage account name) and password (=KEY), or login and SAS token in the extra
|
24
24
|
field (see connection `wasb_default` for an example).
|
25
25
|
"""
|
26
|
+
|
26
27
|
from __future__ import annotations
|
27
28
|
|
28
29
|
import logging
|
@@ -16,22 +16,74 @@
|
|
16
16
|
# under the License.
|
17
17
|
from __future__ import annotations
|
18
18
|
|
19
|
-
from typing import TYPE_CHECKING, Any, Sequence
|
19
|
+
from typing import IO, TYPE_CHECKING, Any, AnyStr, Iterable, Sequence
|
20
20
|
|
21
21
|
from airflow.models import BaseOperator
|
22
|
-
from airflow.providers.microsoft.azure.hooks.data_lake import AzureDataLakeHook
|
22
|
+
from airflow.providers.microsoft.azure.hooks.data_lake import AzureDataLakeHook, AzureDataLakeStorageV2Hook
|
23
23
|
|
24
24
|
if TYPE_CHECKING:
|
25
25
|
from airflow.utils.context import Context
|
26
26
|
|
27
|
+
DEFAULT_AZURE_DATA_LAKE_CONN_ID = "azure_data_lake_default"
|
28
|
+
|
29
|
+
|
30
|
+
class ADLSCreateObjectOperator(BaseOperator):
|
31
|
+
"""
|
32
|
+
Creates a new object from passed data to Azure Data Lake on specified file.
|
33
|
+
|
34
|
+
.. seealso::
|
35
|
+
For more information on how to use this operator, take a look at the guide:
|
36
|
+
:ref:`howto/operator:ADLSCreateObjectOperator`
|
37
|
+
|
38
|
+
:param file_system_name: Name of the file system or instance of FileSystemProperties.
|
39
|
+
:param file_name: Name of the file which needs to be created in the file system.
|
40
|
+
:param data: The data that will be uploaded.
|
41
|
+
:param length: Size of the data in bytes (optional).
|
42
|
+
:param replace: Whether to forcibly overwrite existing files/directories.
|
43
|
+
If False and remote path is a directory, will quit regardless if any files
|
44
|
+
would be overwritten or not. If True, only matching filenames are actually
|
45
|
+
overwritten.
|
46
|
+
:param azure_data_lake_conn_id: Reference to the :ref:`Azure Data Lake connection<howto/connection:adl>`.
|
47
|
+
"""
|
48
|
+
|
49
|
+
template_fields: Sequence[str] = ("file_system_name", "file_name", "data")
|
50
|
+
ui_color = "#e4f0e8"
|
51
|
+
|
52
|
+
def __init__(
|
53
|
+
self,
|
54
|
+
*,
|
55
|
+
file_system_name: str,
|
56
|
+
file_name: str,
|
57
|
+
data: bytes | str | Iterable[AnyStr] | IO[AnyStr],
|
58
|
+
length: int | None = None,
|
59
|
+
replace: bool = False,
|
60
|
+
azure_data_lake_conn_id: str = DEFAULT_AZURE_DATA_LAKE_CONN_ID,
|
61
|
+
**kwargs,
|
62
|
+
) -> None:
|
63
|
+
super().__init__(**kwargs)
|
64
|
+
|
65
|
+
self.file_system_name = file_system_name
|
66
|
+
self.file_name = file_name
|
67
|
+
self.replace = replace
|
68
|
+
self.data = data # type: ignore[var-annotated]
|
69
|
+
self.length = length
|
70
|
+
self.azure_data_lake_conn_id = azure_data_lake_conn_id
|
71
|
+
|
72
|
+
def execute(self, context: Context) -> dict[str, Any]:
|
73
|
+
self.log.debug("Uploading %s to %s", self.data, self.file_name)
|
74
|
+
hook = AzureDataLakeStorageV2Hook(adls_conn_id=self.azure_data_lake_conn_id)
|
75
|
+
return hook.create_file(file_system_name=self.file_system_name, file_name=self.file_name).upload_data(
|
76
|
+
data=self.data, length=self.length, overwrite=self.replace
|
77
|
+
)
|
78
|
+
|
27
79
|
|
28
80
|
class ADLSDeleteOperator(BaseOperator):
|
29
81
|
"""
|
30
82
|
Delete files in the specified path.
|
31
83
|
|
32
|
-
|
33
|
-
|
34
|
-
|
84
|
+
.. seealso::
|
85
|
+
For more information on how to use this operator, take a look at the guide:
|
86
|
+
:ref:`howto/operator:ADLSDeleteOperator`
|
35
87
|
|
36
88
|
:param path: A directory or file to remove
|
37
89
|
:param recursive: Whether to loop into directories in the location and remove the files
|
@@ -48,7 +100,7 @@ class ADLSDeleteOperator(BaseOperator):
|
|
48
100
|
path: str,
|
49
101
|
recursive: bool = False,
|
50
102
|
ignore_not_found: bool = True,
|
51
|
-
azure_data_lake_conn_id: str =
|
103
|
+
azure_data_lake_conn_id: str = DEFAULT_AZURE_DATA_LAKE_CONN_ID,
|
52
104
|
**kwargs,
|
53
105
|
) -> None:
|
54
106
|
super().__init__(**kwargs)
|
@@ -69,26 +121,19 @@ class ADLSListOperator(BaseOperator):
|
|
69
121
|
This operator returns a python list with the names of files which can be used by
|
70
122
|
`xcom` in the downstream tasks.
|
71
123
|
|
72
|
-
|
73
|
-
|
74
|
-
|
124
|
+
.. seealso::
|
125
|
+
For more information on how to use this operator, take a look at the guide:
|
126
|
+
:ref:`howto/operator:ADLSListOperator`
|
75
127
|
|
76
|
-
|
77
|
-
|
78
|
-
folder in the specified ADLS account ::
|
79
|
-
|
80
|
-
adls_files = ADLSListOperator(
|
81
|
-
task_id="adls_files",
|
82
|
-
path="folder/output/*.parquet",
|
83
|
-
azure_data_lake_conn_id="azure_data_lake_default",
|
84
|
-
)
|
128
|
+
:param path: The Azure Data Lake path to find the objects. Supports glob strings (templated)
|
129
|
+
:param azure_data_lake_conn_id: Reference to the :ref:`Azure Data Lake connection<howto/connection:adl>`.
|
85
130
|
"""
|
86
131
|
|
87
132
|
template_fields: Sequence[str] = ("path",)
|
88
133
|
ui_color = "#901dd2"
|
89
134
|
|
90
135
|
def __init__(
|
91
|
-
self, *, path: str, azure_data_lake_conn_id: str =
|
136
|
+
self, *, path: str, azure_data_lake_conn_id: str = DEFAULT_AZURE_DATA_LAKE_CONN_ID, **kwargs
|
92
137
|
) -> None:
|
93
138
|
super().__init__(**kwargs)
|
94
139
|
self.path = path
|
@@ -110,7 +110,7 @@ class AzureSynapseRunSparkBatchOperator(BaseOperator):
|
|
110
110
|
):
|
111
111
|
self.log.info("Job run %s has completed successfully.", self.job_id)
|
112
112
|
else:
|
113
|
-
raise
|
113
|
+
raise AirflowException(f"Job run {self.job_id} has failed or has been cancelled.")
|
114
114
|
|
115
115
|
def on_kill(self) -> None:
|
116
116
|
if self.job_id:
|
@@ -0,0 +1,16 @@
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
3
|
+
# distributed with this work for additional information
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
6
|
+
# "License"); you may not use this file except in compliance
|
7
|
+
# with the License. You may obtain a copy of the License at
|
8
|
+
#
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
10
|
+
#
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
12
|
+
# software distributed under the License is distributed on an
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
14
|
+
# KIND, either express or implied. See the License for the
|
15
|
+
# specific language governing permissions and limitations
|
16
|
+
# under the License.
|
@@ -78,6 +78,7 @@ class WasbBlobSensorTrigger(BaseTrigger):
|
|
78
78
|
if blob_exists:
|
79
79
|
message = f"Blob {self.blob_name} found in container {self.container_name}."
|
80
80
|
yield TriggerEvent({"status": "success", "message": message})
|
81
|
+
return
|
81
82
|
else:
|
82
83
|
message = (
|
83
84
|
f"Blob {self.blob_name} not available yet in container {self.container_name}."
|
@@ -154,6 +155,7 @@ class WasbPrefixSensorTrigger(BaseTrigger):
|
|
154
155
|
if prefix_exists:
|
155
156
|
message = f"Prefix {self.prefix} found in container {self.container_name}."
|
156
157
|
yield TriggerEvent({"status": "success", "message": message})
|
158
|
+
return
|
157
159
|
else:
|
158
160
|
message = (
|
159
161
|
f"Prefix {self.prefix} not available yet in container {self.container_name}."
|
@@ -28,7 +28,7 @@ build-backend = "flit_core.buildapi"
|
|
28
28
|
|
29
29
|
[project]
|
30
30
|
name = "apache-airflow-providers-microsoft-azure"
|
31
|
-
version = "
|
31
|
+
version = "10.0.0.rc1"
|
32
32
|
description = "Provider package apache-airflow-providers-microsoft-azure for Apache Airflow"
|
33
33
|
readme = "README.rst"
|
34
34
|
authors = [
|
@@ -51,15 +51,16 @@ classifiers = [
|
|
51
51
|
"Programming Language :: Python :: 3.9",
|
52
52
|
"Programming Language :: Python :: 3.10",
|
53
53
|
"Programming Language :: Python :: 3.11",
|
54
|
+
"Programming Language :: Python :: 3.12",
|
54
55
|
"Topic :: System :: Monitoring",
|
55
56
|
]
|
56
57
|
requires-python = "~=3.8"
|
57
58
|
dependencies = [
|
58
59
|
"adal>=1.2.7",
|
59
60
|
"adlfs>=2023.10.0",
|
60
|
-
"apache-airflow>=2.6.
|
61
|
+
"apache-airflow>=2.6.0rc0",
|
61
62
|
"azure-batch>=8.0.0",
|
62
|
-
"azure-cosmos>=4.
|
63
|
+
"azure-cosmos>=4.6.0",
|
63
64
|
"azure-datalake-store>=0.0.45",
|
64
65
|
"azure-identity>=1.3.1",
|
65
66
|
"azure-keyvault-secrets>=4.1.0",
|
@@ -71,7 +72,7 @@ dependencies = [
|
|
71
72
|
"azure-mgmt-datalake-store>=0.5.0",
|
72
73
|
"azure-mgmt-resource>=2.2.0",
|
73
74
|
"azure-mgmt-storage>=16.0.0",
|
74
|
-
"azure-servicebus>=7.
|
75
|
+
"azure-servicebus>=7.12.1",
|
75
76
|
"azure-storage-blob>=12.14.0",
|
76
77
|
"azure-storage-file-datalake>=12.9.1",
|
77
78
|
"azure-storage-file-share",
|
@@ -80,8 +81,8 @@ dependencies = [
|
|
80
81
|
]
|
81
82
|
|
82
83
|
[project.urls]
|
83
|
-
"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/
|
84
|
-
"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/
|
84
|
+
"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.0.0"
|
85
|
+
"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.0.0/changelog.html"
|
85
86
|
"Bug Tracker" = "https://github.com/apache/airflow/issues"
|
86
87
|
"Source Code" = "https://github.com/apache/airflow"
|
87
88
|
"Slack Chat" = "https://s.apache.org/airflow-slack"
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|