apache-airflow-providers-microsoft-azure 12.0.0rc1__py3-none-any.whl → 12.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. airflow/providers/microsoft/azure/LICENSE +0 -52
  2. airflow/providers/microsoft/azure/__init__.py +1 -1
  3. airflow/providers/microsoft/azure/fs/adls.py +1 -2
  4. airflow/providers/microsoft/azure/get_provider_info.py +52 -46
  5. airflow/providers/microsoft/azure/hooks/adx.py +6 -7
  6. airflow/providers/microsoft/azure/hooks/asb.py +237 -8
  7. airflow/providers/microsoft/azure/hooks/base_azure.py +2 -3
  8. airflow/providers/microsoft/azure/hooks/batch.py +1 -2
  9. airflow/providers/microsoft/azure/hooks/container_instance.py +3 -4
  10. airflow/providers/microsoft/azure/hooks/container_registry.py +2 -3
  11. airflow/providers/microsoft/azure/hooks/container_volume.py +2 -3
  12. airflow/providers/microsoft/azure/hooks/cosmos.py +4 -5
  13. airflow/providers/microsoft/azure/hooks/data_factory.py +7 -7
  14. airflow/providers/microsoft/azure/hooks/data_lake.py +8 -9
  15. airflow/providers/microsoft/azure/hooks/fileshare.py +1 -2
  16. airflow/providers/microsoft/azure/hooks/msgraph.py +102 -35
  17. airflow/providers/microsoft/azure/hooks/synapse.py +4 -5
  18. airflow/providers/microsoft/azure/hooks/wasb.py +9 -9
  19. airflow/providers/microsoft/azure/log/wasb_task_handler.py +1 -2
  20. airflow/providers/microsoft/azure/operators/adx.py +1 -2
  21. airflow/providers/microsoft/azure/operators/asb.py +50 -62
  22. airflow/providers/microsoft/azure/operators/batch.py +1 -2
  23. airflow/providers/microsoft/azure/operators/container_instances.py +7 -7
  24. airflow/providers/microsoft/azure/operators/msgraph.py +44 -12
  25. airflow/providers/microsoft/azure/operators/powerbi.py +34 -5
  26. airflow/providers/microsoft/azure/operators/synapse.py +1 -2
  27. airflow/providers/microsoft/azure/secrets/key_vault.py +3 -4
  28. airflow/providers/microsoft/azure/sensors/msgraph.py +21 -5
  29. airflow/providers/microsoft/azure/triggers/data_factory.py +1 -2
  30. airflow/providers/microsoft/azure/triggers/msgraph.py +4 -0
  31. airflow/providers/microsoft/azure/triggers/powerbi.py +55 -11
  32. airflow/providers/microsoft/azure/utils.py +2 -1
  33. {apache_airflow_providers_microsoft_azure-12.0.0rc1.dist-info → apache_airflow_providers_microsoft_azure-12.1.0.dist-info}/METADATA +21 -38
  34. apache_airflow_providers_microsoft_azure-12.1.0.dist-info/RECORD +58 -0
  35. apache_airflow_providers_microsoft_azure-12.0.0rc1.dist-info/RECORD +0 -58
  36. {apache_airflow_providers_microsoft_azure-12.0.0rc1.dist-info → apache_airflow_providers_microsoft_azure-12.1.0.dist-info}/WHEEL +0 -0
  37. {apache_airflow_providers_microsoft_azure-12.0.0rc1.dist-info → apache_airflow_providers_microsoft_azure-12.1.0.dist-info}/entry_points.txt +0 -0
@@ -199,55 +199,3 @@ distributed under the License is distributed on an "AS IS" BASIS,
199
199
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
200
  See the License for the specific language governing permissions and
201
201
  limitations under the License.
202
-
203
- ============================================================================
204
- APACHE AIRFLOW SUBCOMPONENTS:
205
-
206
- The Apache Airflow project contains subcomponents with separate copyright
207
- notices and license terms. Your use of the source code for the these
208
- subcomponents is subject to the terms and conditions of the following
209
- licenses.
210
-
211
-
212
- ========================================================================
213
- Third party Apache 2.0 licenses
214
- ========================================================================
215
-
216
- The following components are provided under the Apache 2.0 License.
217
- See project link for details. The text of each license is also included
218
- at 3rd-party-licenses/LICENSE-[project].txt.
219
-
220
- (ALv2 License) hue v4.3.0 (https://github.com/cloudera/hue/)
221
- (ALv2 License) jqclock v2.3.0 (https://github.com/JohnRDOrazio/jQuery-Clock-Plugin)
222
- (ALv2 License) bootstrap3-typeahead v4.0.2 (https://github.com/bassjobsen/Bootstrap-3-Typeahead)
223
- (ALv2 License) connexion v2.7.0 (https://github.com/zalando/connexion)
224
-
225
- ========================================================================
226
- MIT licenses
227
- ========================================================================
228
-
229
- The following components are provided under the MIT License. See project link for details.
230
- The text of each license is also included at 3rd-party-licenses/LICENSE-[project].txt.
231
-
232
- (MIT License) jquery v3.5.1 (https://jquery.org/license/)
233
- (MIT License) dagre-d3 v0.6.4 (https://github.com/cpettitt/dagre-d3)
234
- (MIT License) bootstrap v3.4.1 (https://github.com/twbs/bootstrap/)
235
- (MIT License) d3-tip v0.9.1 (https://github.com/Caged/d3-tip)
236
- (MIT License) dataTables v1.10.25 (https://datatables.net)
237
- (MIT License) normalize.css v3.0.2 (http://necolas.github.io/normalize.css/)
238
- (MIT License) ElasticMock v1.3.2 (https://github.com/vrcmarcos/elasticmock)
239
- (MIT License) MomentJS v2.24.0 (http://momentjs.com/)
240
- (MIT License) eonasdan-bootstrap-datetimepicker v4.17.49 (https://github.com/eonasdan/bootstrap-datetimepicker/)
241
-
242
- ========================================================================
243
- BSD 3-Clause licenses
244
- ========================================================================
245
- The following components are provided under the BSD 3-Clause license. See project links for details.
246
- The text of each license is also included at 3rd-party-licenses/LICENSE-[project].txt.
247
-
248
- (BSD 3 License) d3 v5.16.0 (https://d3js.org)
249
- (BSD 3 License) d3-shape v2.1.0 (https://github.com/d3/d3-shape)
250
- (BSD 3 License) cgroupspy 0.2.1 (https://github.com/cloudsigma/cgroupspy)
251
-
252
- ========================================================================
253
- See 3rd-party-licenses/LICENSES-ui.txt for packages used in `/airflow/www`
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "12.0.0"
32
+ __version__ = "12.1.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.9.0"
@@ -18,10 +18,9 @@ from __future__ import annotations
18
18
 
19
19
  from typing import TYPE_CHECKING, Any
20
20
 
21
- from azure.identity import ClientSecretCredential
22
-
23
21
  from airflow.hooks.base import BaseHook
24
22
  from airflow.providers.microsoft.azure.utils import get_field, parse_blob_account_url
23
+ from azure.identity import ClientSecretCredential
25
24
 
26
25
  if TYPE_CHECKING:
27
26
  from fsspec import AbstractFileSystem
@@ -15,8 +15,7 @@
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
17
 
18
- # NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
19
- # OVERWRITTEN WHEN PREPARING PACKAGES.
18
+ # NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
20
19
  #
21
20
  # IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
22
21
  # `get_provider_info_TEMPLATE.py.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
@@ -28,8 +27,9 @@ def get_provider_info():
28
27
  "name": "Microsoft Azure",
29
28
  "description": "`Microsoft Azure <https://azure.microsoft.com/>`__\n",
30
29
  "state": "ready",
31
- "source-date-epoch": 1734535249,
30
+ "source-date-epoch": 1739963698,
32
31
  "versions": [
32
+ "12.1.0",
33
33
  "12.0.0",
34
34
  "11.1.0",
35
35
  "11.0.0",
@@ -93,85 +93,55 @@ def get_provider_info():
93
93
  "1.1.0",
94
94
  "1.0.0",
95
95
  ],
96
- "dependencies": [
97
- "apache-airflow>=2.9.0",
98
- "adlfs>=2023.10.0",
99
- "azure-batch>=8.0.0",
100
- "azure-cosmos>=4.6.0",
101
- "azure-mgmt-cosmosdb>=3.0.0",
102
- "azure-datalake-store>=0.0.45",
103
- "azure-identity>=1.3.1",
104
- "azure-keyvault-secrets>=4.1.0",
105
- "azure-mgmt-datalake-store>=0.5.0",
106
- "azure-mgmt-resource>=2.2.0",
107
- "azure-storage-blob>=12.14.0",
108
- "azure-mgmt-storage>=16.0.0",
109
- "azure-storage-file-share>=12.7.0",
110
- "azure-servicebus>=7.12.1",
111
- "azure-synapse-spark>=0.2.0",
112
- "azure-synapse-artifacts>=0.17.0",
113
- "adal>=1.2.7",
114
- "azure-storage-file-datalake>=12.9.1",
115
- "azure-kusto-data>=4.1.0,!=4.6.0",
116
- "azure-mgmt-datafactory>=2.0.0",
117
- "azure-mgmt-containerregistry>=8.0.0",
118
- "azure-mgmt-containerinstance>=10.1.0",
119
- "msgraph-core>=1.0.0,!=1.1.8",
120
- "microsoft-kiota-http>=1.3.0,!=1.3.4",
121
- "microsoft-kiota-serialization-json==1.0.0",
122
- "microsoft-kiota-serialization-text==1.0.0",
123
- "microsoft-kiota-abstractions<1.4.0",
124
- ],
125
- "devel-dependencies": ["pywinrm>=0.4"],
126
96
  "integrations": [
127
97
  {
128
98
  "integration-name": "Microsoft Azure Batch",
129
99
  "external-doc-url": "https://azure.microsoft.com/en-us/services/batch/",
130
100
  "how-to-guide": ["/docs/apache-airflow-providers-microsoft-azure/operators/batch.rst"],
131
- "logo": "/integration-logos/azure/Microsoft-Azure-Batch.png",
101
+ "logo": "/docs/integration-logos/Microsoft-Azure-Batch.png",
132
102
  "tags": ["azure"],
133
103
  },
134
104
  {
135
105
  "integration-name": "Microsoft Azure Blob Storage",
136
106
  "external-doc-url": "https://azure.microsoft.com/en-us/services/storage/blobs/",
137
- "logo": "/integration-logos/azure/Blob Storage.svg",
107
+ "logo": "/docs/integration-logos/Blob-Storage.svg",
138
108
  "tags": ["azure"],
139
109
  },
140
110
  {
141
111
  "integration-name": "Microsoft Azure Container Instances",
142
112
  "external-doc-url": "https://azure.microsoft.com/en-us/services/container-instances/",
143
- "logo": "/integration-logos/azure/Container Instances.svg",
113
+ "logo": "/docs/integration-logos/Container-Instances.svg",
144
114
  "tags": ["azure"],
145
115
  },
146
116
  {
147
117
  "integration-name": "Microsoft Azure Cosmos DB",
148
118
  "external-doc-url": "https://azure.microsoft.com/en-us/services/cosmos-db/",
149
- "logo": "/integration-logos/azure/Azure Cosmos DB.svg",
119
+ "logo": "/docs/integration-logos/Azure-Cosmos-DB.svg",
150
120
  "tags": ["azure"],
151
121
  },
152
122
  {
153
123
  "integration-name": "Microsoft Azure Data Explorer",
154
124
  "external-doc-url": "https://azure.microsoft.com/en-us/services/data-explorer/",
155
- "logo": "/integration-logos/azure/Microsoft-Azure-Data-Explorer.png",
125
+ "logo": "/docs/integration-logos/Microsoft-Azure-Data-Explorer.png",
156
126
  "tags": ["azure"],
157
127
  },
158
128
  {
159
129
  "integration-name": "Microsoft Azure Data Lake Storage",
160
130
  "how-to-guide": ["/docs/apache-airflow-providers-microsoft-azure/operators/adls.rst"],
161
131
  "external-doc-url": "https://azure.microsoft.com/en-us/services/storage/data-lake-storage/",
162
- "logo": "/integration-logos/azure/Data Lake Storage.svg",
132
+ "logo": "/docs/integration-logos/Data-Lake-Storage.svg",
163
133
  "tags": ["azure"],
164
134
  },
165
135
  {
166
136
  "integration-name": "Microsoft Azure Files",
167
137
  "external-doc-url": "https://azure.microsoft.com/en-us/services/storage/files/",
168
- "logo": "/integration-logos/azure/Azure Files.svg",
138
+ "logo": "/docs/integration-logos/Azure-Files.svg",
169
139
  "tags": ["azure"],
170
140
  },
171
141
  {
172
142
  "integration-name": "Microsoft Azure FileShare",
173
143
  "external-doc-url": "https://cloud.google.com/storage/",
174
- "logo": "/integration-logos/azure/Microsoft-Azure-Fileshare.png",
144
+ "logo": "/docs/integration-logos/Microsoft-Azure-Fileshare.png",
175
145
  "tags": ["azure"],
176
146
  },
177
147
  {
@@ -180,19 +150,19 @@ def get_provider_info():
180
150
  "/docs/apache-airflow-providers-microsoft-azure/operators/adf_run_pipeline.rst"
181
151
  ],
182
152
  "external-doc-url": "https://azure.microsoft.com/en-us/services/data-factory/",
183
- "logo": "/integration-logos/azure/Azure Data Factory.svg",
153
+ "logo": "/docs/integration-logos/Azure-Data-Factory.svg",
184
154
  "tags": ["azure"],
185
155
  },
186
156
  {
187
157
  "integration-name": "Microsoft Azure",
188
158
  "external-doc-url": "https://azure.microsoft.com/",
189
- "logo": "/integration-logos/azure/Microsoft-Azure.png",
159
+ "logo": "/docs/integration-logos/Microsoft-Azure.png",
190
160
  "tags": ["azure"],
191
161
  },
192
162
  {
193
163
  "integration-name": "Microsoft Azure Service Bus",
194
164
  "external-doc-url": "https://azure.microsoft.com/en-us/services/service-bus/",
195
- "logo": "/integration-logos/azure/Service-Bus.svg",
165
+ "logo": "/docs/integration-logos/Service-Bus.svg",
196
166
  "how-to-guide": ["/docs/apache-airflow-providers-microsoft-azure/operators/asb.rst"],
197
167
  "tags": ["azure"],
198
168
  },
@@ -207,13 +177,13 @@ def get_provider_info():
207
177
  {
208
178
  "integration-name": "Microsoft Azure Data Lake Storage Client Gen2",
209
179
  "external-doc-url": "https://azure.microsoft.com/en-us/products/storage/data-lake-storage/",
210
- "logo": "/integration-logos/azure/Data Lake Storage.svg",
180
+ "logo": "/docs/integration-logos/Data-Lake-Storage.svg",
211
181
  "tags": ["azure"],
212
182
  },
213
183
  {
214
184
  "integration-name": "Microsoft Graph API",
215
185
  "external-doc-url": "https://learn.microsoft.com/en-us/graph/use-the-api/",
216
- "logo": "/integration-logos/azure/Microsoft-Graph-API.png",
186
+ "logo": "/docs/integration-logos/Microsoft-Graph-API.png",
217
187
  "how-to-guide": ["/docs/apache-airflow-providers-microsoft-azure/operators/msgraph.rst"],
218
188
  "tags": ["azure"],
219
189
  },
@@ -487,4 +457,40 @@ def get_provider_info():
487
457
  },
488
458
  }
489
459
  },
460
+ "dependencies": [
461
+ "apache-airflow>=2.9.0",
462
+ "adlfs>=2023.10.0",
463
+ "azure-batch>=8.0.0",
464
+ "azure-cosmos>=4.6.0",
465
+ "azure-mgmt-cosmosdb>=3.0.0",
466
+ "azure-datalake-store>=0.0.45",
467
+ "azure-identity>=1.3.1",
468
+ "azure-keyvault-secrets>=4.1.0",
469
+ "azure-mgmt-datalake-store>=0.5.0",
470
+ "azure-mgmt-resource>=2.2.0",
471
+ "azure-storage-blob>=12.14.0",
472
+ "azure-mgmt-storage>=16.0.0",
473
+ "azure-storage-file-share>=12.7.0",
474
+ "azure-servicebus>=7.12.1",
475
+ "azure-synapse-spark>=0.2.0",
476
+ "azure-synapse-artifacts>=0.17.0",
477
+ "adal>=1.2.7",
478
+ "azure-storage-file-datalake>=12.9.1",
479
+ "azure-kusto-data>=4.1.0,!=4.6.0",
480
+ "azure-mgmt-datafactory>=2.0.0",
481
+ "azure-mgmt-containerregistry>=8.0.0",
482
+ "azure-mgmt-containerinstance>=10.1.0",
483
+ "msgraph-core>=1.0.0,!=1.1.8",
484
+ "microsoft-kiota-http>=1.3.0,!=1.3.4",
485
+ "microsoft-kiota-serialization-json==1.0.0",
486
+ "microsoft-kiota-serialization-text==1.0.0",
487
+ "microsoft-kiota-abstractions<1.4.0",
488
+ ],
489
+ "optional-dependencies": {
490
+ "amazon": ["apache-airflow-providers-amazon"],
491
+ "common.compat": ["apache-airflow-providers-common-compat"],
492
+ "oracle": ["apache-airflow-providers-oracle"],
493
+ "sftp": ["apache-airflow-providers-sftp"],
494
+ },
495
+ "devel-dependencies": ["pywinrm>=0.4"],
490
496
  }
@@ -20,7 +20,7 @@ This module contains Azure Data Explorer hook.
20
20
 
21
21
  .. spelling:word-list::
22
22
 
23
- KustoResponseDataSetV
23
+ KustoResponseDataSet
24
24
  kusto
25
25
  """
26
26
 
@@ -30,18 +30,17 @@ import warnings
30
30
  from functools import cached_property
31
31
  from typing import TYPE_CHECKING, Any
32
32
 
33
- from azure.kusto.data import ClientRequestProperties, KustoClient, KustoConnectionStringBuilder
34
- from azure.kusto.data.exceptions import KustoServiceError
35
-
36
33
  from airflow.exceptions import AirflowException
37
34
  from airflow.hooks.base import BaseHook
38
35
  from airflow.providers.microsoft.azure.utils import (
39
36
  add_managed_identity_connection_widgets,
40
37
  get_sync_default_azure_credential,
41
38
  )
39
+ from azure.kusto.data import ClientRequestProperties, KustoClient, KustoConnectionStringBuilder
40
+ from azure.kusto.data.exceptions import KustoServiceError
42
41
 
43
42
  if TYPE_CHECKING:
44
- from azure.kusto.data.response import KustoResponseDataSetV2
43
+ from azure.kusto.data.response import KustoResponseDataSet
45
44
 
46
45
 
47
46
  class AzureDataExplorerHook(BaseHook):
@@ -206,11 +205,11 @@ class AzureDataExplorerHook(BaseHook):
206
205
 
207
206
  return KustoClient(kcsb)
208
207
 
209
- def run_query(self, query: str, database: str, options: dict | None = None) -> KustoResponseDataSetV2:
208
+ def run_query(self, query: str, database: str, options: dict | None = None) -> KustoResponseDataSet:
210
209
  """
211
210
  Run KQL query using provided configuration, and return KustoResponseDataSet instance.
212
211
 
213
- See: `azure.kusto.data.response.KustoResponseDataSet`
212
+ See: azure.kusto.data.response.KustoResponseDataSet
214
213
  If query is unsuccessful AirflowException is raised.
215
214
 
216
215
  :param query: KQL query to run
@@ -17,7 +17,15 @@
17
17
  from __future__ import annotations
18
18
 
19
19
  from typing import TYPE_CHECKING, Any, Callable
20
+ from uuid import uuid4
20
21
 
22
+ from airflow.hooks.base import BaseHook
23
+ from airflow.providers.microsoft.azure.utils import (
24
+ add_managed_identity_connection_widgets,
25
+ get_field,
26
+ get_sync_default_azure_credential,
27
+ )
28
+ from azure.core.exceptions import ResourceNotFoundError
21
29
  from azure.servicebus import (
22
30
  ServiceBusClient,
23
31
  ServiceBusMessage,
@@ -25,19 +33,20 @@ from azure.servicebus import (
25
33
  ServiceBusReceiver,
26
34
  ServiceBusSender,
27
35
  )
28
- from azure.servicebus.management import QueueProperties, ServiceBusAdministrationClient
29
-
30
- from airflow.hooks.base import BaseHook
31
- from airflow.providers.microsoft.azure.utils import (
32
- add_managed_identity_connection_widgets,
33
- get_field,
34
- get_sync_default_azure_credential,
36
+ from azure.servicebus.management import (
37
+ AuthorizationRule,
38
+ CorrelationRuleFilter,
39
+ QueueProperties,
40
+ ServiceBusAdministrationClient,
41
+ SqlRuleFilter,
42
+ SubscriptionProperties,
35
43
  )
36
44
 
37
45
  if TYPE_CHECKING:
38
- from azure.identity import DefaultAzureCredential
46
+ import datetime
39
47
 
40
48
  from airflow.utils.context import Context
49
+ from azure.identity import DefaultAzureCredential
41
50
 
42
51
  MessageCallback = Callable[[ServiceBusMessage, Context], None]
43
52
 
@@ -184,6 +193,226 @@ class AdminClientHook(BaseAzureServiceBusHook):
184
193
  with self.get_conn() as service_mgmt_conn:
185
194
  service_mgmt_conn.delete_queue(queue_name)
186
195
 
196
+ def create_topic(
197
+ self,
198
+ topic_name: str,
199
+ azure_service_bus_conn_id: str = "azure_service_bus_default",
200
+ default_message_time_to_live: datetime.timedelta | str | None = None,
201
+ max_size_in_megabytes: int | None = None,
202
+ requires_duplicate_detection: bool | None = None,
203
+ duplicate_detection_history_time_window: datetime.timedelta | str | None = None,
204
+ enable_batched_operations: bool | None = None,
205
+ size_in_bytes: int | None = None,
206
+ filtering_messages_before_publishing: bool | None = None,
207
+ authorization_rules: list[AuthorizationRule] | None = None,
208
+ support_ordering: bool | None = None,
209
+ auto_delete_on_idle: datetime.timedelta | str | None = None,
210
+ enable_partitioning: bool | None = None,
211
+ enable_express: bool | None = None,
212
+ user_metadata: str | None = None,
213
+ max_message_size_in_kilobytes: int | None = None,
214
+ ) -> str:
215
+ """
216
+ Create a topic by connecting to service Bus Admin client.
217
+
218
+ :param topic_name: Name of the topic.
219
+ :param default_message_time_to_live: ISO 8601 default message time span to live value. This is
220
+ the duration after which the message expires, starting from when the message is sent to Service
221
+ Bus. This is the default value used when TimeToLive is not set on a message itself.
222
+ Input value of either type ~datetime.timedelta or string in ISO 8601 duration format
223
+ like "PT300S" is accepted.
224
+ :param max_size_in_megabytes: The maximum size of the topic in megabytes, which is the size of
225
+ memory allocated for the topic.
226
+ :param requires_duplicate_detection: A value indicating if this topic requires duplicate
227
+ detection.
228
+ :param duplicate_detection_history_time_window: ISO 8601 time span structure that defines the
229
+ duration of the duplicate detection history. The default value is 10 minutes.
230
+ Input value of either type ~datetime.timedelta or string in ISO 8601 duration format
231
+ like "PT300S" is accepted.
232
+ :param enable_batched_operations: Value that indicates whether server-side batched operations
233
+ are enabled.
234
+ :param size_in_bytes: The size of the topic, in bytes.
235
+ :param filtering_messages_before_publishing: Filter messages before publishing.
236
+ :param authorization_rules: List of Authorization rules for resource.
237
+ :param support_ordering: A value that indicates whether the topic supports ordering.
238
+ :param auto_delete_on_idle: ISO 8601 time span idle interval after which the topic is
239
+ automatically deleted. The minimum duration is 5 minutes.
240
+ Input value of either type ~datetime.timedelta or string in ISO 8601 duration format
241
+ like "PT300S" is accepted.
242
+ :param enable_partitioning: A value that indicates whether the topic is to be partitioned
243
+ across multiple message brokers.
244
+ :param enable_express: A value that indicates whether Express Entities are enabled. An express
245
+ queue holds a message in memory temporarily before writing it to persistent storage.
246
+ :param user_metadata: Metadata associated with the topic.
247
+ :param max_message_size_in_kilobytes: The maximum size in kilobytes of message payload that
248
+ can be accepted by the queue. This feature is only available when using a Premium namespace
249
+ and Service Bus API version "2021-05" or higher.
250
+ The minimum allowed value is 1024 while the maximum allowed value is 102400. Default value is 1024.
251
+ """
252
+ if topic_name is None:
253
+ raise TypeError("Topic name cannot be None.")
254
+
255
+ with self.get_conn() as service_mgmt_conn:
256
+ try:
257
+ topic_properties = service_mgmt_conn.get_topic(topic_name)
258
+ except ResourceNotFoundError:
259
+ topic_properties = None
260
+ if topic_properties and topic_properties.name == topic_name:
261
+ self.log.info("Topic name already exists")
262
+ return topic_properties.name
263
+ topic = service_mgmt_conn.create_topic(
264
+ topic_name=topic_name,
265
+ default_message_time_to_live=default_message_time_to_live,
266
+ max_size_in_megabytes=max_size_in_megabytes,
267
+ requires_duplicate_detection=requires_duplicate_detection,
268
+ duplicate_detection_history_time_window=duplicate_detection_history_time_window,
269
+ enable_batched_operations=enable_batched_operations,
270
+ size_in_bytes=size_in_bytes,
271
+ filtering_messages_before_publishing=filtering_messages_before_publishing,
272
+ authorization_rules=authorization_rules,
273
+ support_ordering=support_ordering,
274
+ auto_delete_on_idle=auto_delete_on_idle,
275
+ enable_partitioning=enable_partitioning,
276
+ enable_express=enable_express,
277
+ user_metadata=user_metadata,
278
+ max_message_size_in_kilobytes=max_message_size_in_kilobytes,
279
+ )
280
+ self.log.info("Created Topic %s", topic.name)
281
+ return topic.name
282
+
283
+ def create_subscription(
284
+ self,
285
+ topic_name: str,
286
+ subscription_name: str,
287
+ lock_duration: datetime.timedelta | str | None = None,
288
+ requires_session: bool | None = None,
289
+ default_message_time_to_live: datetime.timedelta | str | None = None,
290
+ dead_lettering_on_message_expiration: bool | None = True,
291
+ dead_lettering_on_filter_evaluation_exceptions: bool | None = None,
292
+ max_delivery_count: int | None = 10,
293
+ enable_batched_operations: bool | None = True,
294
+ forward_to: str | None = None,
295
+ user_metadata: str | None = None,
296
+ forward_dead_lettered_messages_to: str | None = None,
297
+ auto_delete_on_idle: datetime.timedelta | str | None = None,
298
+ filter_rule: CorrelationRuleFilter | SqlRuleFilter | None = None,
299
+ filter_rule_name: str | None = None,
300
+ ) -> SubscriptionProperties:
301
+ """
302
+ Create a subscription with specified name on a topic and return the SubscriptionProperties for it.
303
+
304
+ An optional filter_rule can be provided to filter messages based on their properties. In particular,
305
+ the correlation ID filter can be used to pair up replies to requests.
306
+
307
+ :param topic_name: The topic that will own the to-be-created subscription.
308
+ :param subscription_name: Name of the subscription that need to be created
309
+ :param lock_duration: ISO 8601 time span duration of a peek-lock; that is, the amount of time that
310
+ the message is locked for other receivers. The maximum value for LockDuration is 5 minutes; the
311
+ default value is 1 minute. Input value of either type ~datetime.timedelta or string in ISO 8601
312
+ duration format like "PT300S" is accepted.
313
+ :param requires_session: A value that indicates whether the queue supports the concept of sessions.
314
+ :param default_message_time_to_live: ISO 8601 default message time span to live value. This is the
315
+ duration after which the message expires, starting from when the message is sent to
316
+ Service Bus. This is the default value used when TimeToLive is not set on a message itself.
317
+ Input value of either type ~datetime.timedelta or string in ISO 8601 duration
318
+ format like "PT300S" is accepted.
319
+ :param dead_lettering_on_message_expiration: A value that indicates whether this subscription has
320
+ dead letter support when a message expires.
321
+ :param dead_lettering_on_filter_evaluation_exceptions: A value that indicates whether this
322
+ subscription has dead letter support when a message expires.
323
+ :param max_delivery_count: The maximum delivery count. A message is automatically dead lettered
324
+ after this number of deliveries. Default value is 10.
325
+ :param enable_batched_operations: Value that indicates whether server-side batched
326
+ operations are enabled.
327
+ :param forward_to: The name of the recipient entity to which all the messages sent to the
328
+ subscription are forwarded to.
329
+ :param user_metadata: Metadata associated with the subscription. Maximum number of characters is 1024.
330
+ :param forward_dead_lettered_messages_to: The name of the recipient entity to which all the
331
+ messages sent to the subscription are forwarded to.
332
+ :param auto_delete_on_idle: ISO 8601 time Span idle interval after which the subscription is
333
+ automatically deleted. The minimum duration is 5 minutes. Input value of either
334
+ type ~datetime.timedelta or string in ISO 8601 duration format like "PT300S" is accepted.
335
+ :param filter_rule: Optional correlation or SQL rule filter to apply on the messages.
336
+ :param filter_rule_name: Optional rule name to use applying the rule filter to the subscription
337
+ :param azure_service_bus_conn_id: Reference to the
338
+ :ref:`Azure Service Bus connection<howto/connection:azure_service_bus>`.
339
+ """
340
+ if subscription_name is None:
341
+ raise TypeError("Subscription name cannot be None.")
342
+ if topic_name is None:
343
+ raise TypeError("Topic name cannot be None.")
344
+
345
+ with self.get_conn() as connection:
346
+ # create subscription with name
347
+ subscription = connection.create_subscription(
348
+ topic_name=topic_name,
349
+ subscription_name=subscription_name,
350
+ lock_duration=lock_duration,
351
+ requires_session=requires_session,
352
+ default_message_time_to_live=default_message_time_to_live,
353
+ dead_lettering_on_ßmessage_expiration=dead_lettering_on_message_expiration,
354
+ dead_lettering_on_filter_evaluation_exceptions=dead_lettering_on_filter_evaluation_exceptions,
355
+ max_delivery_count=max_delivery_count,
356
+ enable_batched_operations=enable_batched_operations,
357
+ forward_to=forward_to,
358
+ user_metadata=user_metadata,
359
+ forward_dead_lettered_messages_to=forward_dead_lettered_messages_to,
360
+ auto_delete_on_idle=auto_delete_on_idle,
361
+ )
362
+
363
+ if filter_rule:
364
+ # remove default rule (which accepts all messages)
365
+ try:
366
+ connection.delete_rule(topic_name, subscription_name, "$Default")
367
+ except ResourceNotFoundError:
368
+ # as long as it is gone :)
369
+ self.log.debug("Could not find default rule '$Default' to delete; ignoring error.")
370
+
371
+ # add a rule to filter with the filter rule passed in
372
+ rule_name = filter_rule_name if filter_rule_name else "rule" + str(uuid4())
373
+ connection.create_rule(topic_name, subscription_name, rule_name, filter=filter_rule)
374
+ self.log.debug(
375
+ "Created rule %s for subscription %s on topic %s",
376
+ rule_name,
377
+ subscription_name,
378
+ topic_name,
379
+ )
380
+
381
+ return subscription
382
+
383
+ def update_subscription(
384
+ self,
385
+ topic_name: str,
386
+ subscription_name: str,
387
+ max_delivery_count: int | None = None,
388
+ dead_lettering_on_message_expiration: bool | None = None,
389
+ enable_batched_operations: bool | None = None,
390
+ ) -> None:
391
+ """
392
+ Update an Azure ServiceBus Topic Subscription under a ServiceBus Namespace.
393
+
394
+ :param topic_name: The topic that will own the to-be-created subscription.
395
+ :param subscription_name: Name of the subscription that need to be created.
396
+ :param max_delivery_count: The maximum delivery count. A message is automatically dead lettered
397
+ after this number of deliveries. Default value is 10.
398
+ :param dead_lettering_on_message_expiration: A value that indicates whether this subscription
399
+ has dead letter support when a message expires.
400
+ :param enable_batched_operations: Value that indicates whether server-side batched
401
+ operations are enabled.
402
+ """
403
+ with self.get_conn() as service_mgmt_conn:
404
+ subscription_prop = service_mgmt_conn.get_subscription(topic_name, subscription_name)
405
+ if max_delivery_count:
406
+ subscription_prop.max_delivery_count = max_delivery_count
407
+ if dead_lettering_on_message_expiration is not None:
408
+ subscription_prop.dead_lettering_on_message_expiration = dead_lettering_on_message_expiration
409
+ if enable_batched_operations is not None:
410
+ subscription_prop.enable_batched_operations = enable_batched_operations
411
+ # update by updating the properties in the model
412
+ service_mgmt_conn.update_subscription(topic_name, subscription_prop)
413
+ updated_subscription = service_mgmt_conn.get_subscription(topic_name, subscription_name)
414
+ self.log.info("Subscription Updated successfully %s", updated_subscription.name)
415
+
187
416
  def delete_subscription(self, subscription_name: str, topic_name: str) -> None:
188
417
  """
189
418
  Delete a topic subscription entities under a ServiceBus Namespace.
@@ -18,15 +18,14 @@ from __future__ import annotations
18
18
 
19
19
  from typing import Any
20
20
 
21
- from azure.common.client_factory import get_client_from_auth_file, get_client_from_json_dict
22
- from azure.common.credentials import ServicePrincipalCredentials
23
-
24
21
  from airflow.exceptions import AirflowException
25
22
  from airflow.hooks.base import BaseHook
26
23
  from airflow.providers.microsoft.azure.utils import (
27
24
  AzureIdentityCredentialAdapter,
28
25
  add_managed_identity_connection_widgets,
29
26
  )
27
+ from azure.common.client_factory import get_client_from_auth_file, get_client_from_json_dict
28
+ from azure.common.credentials import ServicePrincipalCredentials
30
29
 
31
30
 
32
31
  class AzureBaseHook(BaseHook):
@@ -22,8 +22,6 @@ from datetime import timedelta
22
22
  from functools import cached_property
23
23
  from typing import TYPE_CHECKING, Any
24
24
 
25
- from azure.batch import BatchServiceClient, batch_auth, models as batch_models
26
-
27
25
  from airflow.exceptions import AirflowException
28
26
  from airflow.hooks.base import BaseHook
29
27
  from airflow.providers.microsoft.azure.utils import (
@@ -32,6 +30,7 @@ from airflow.providers.microsoft.azure.utils import (
32
30
  get_field,
33
31
  )
34
32
  from airflow.utils import timezone
33
+ from azure.batch import BatchServiceClient, batch_auth, models as batch_models
35
34
 
36
35
  if TYPE_CHECKING:
37
36
  from azure.batch.models import JobAddParameter, PoolAddParameter, TaskAddParameter
@@ -20,13 +20,12 @@ from __future__ import annotations
20
20
  from functools import cached_property
21
21
  from typing import TYPE_CHECKING, Any, cast
22
22
 
23
- from azure.common.client_factory import get_client_from_auth_file, get_client_from_json_dict
24
- from azure.identity import ClientSecretCredential, DefaultAzureCredential
25
- from azure.mgmt.containerinstance import ContainerInstanceManagementClient
26
-
27
23
  from airflow.exceptions import AirflowException
28
24
  from airflow.providers.microsoft.azure.hooks.base_azure import AzureBaseHook
29
25
  from airflow.providers.microsoft.azure.utils import get_sync_default_azure_credential
26
+ from azure.common.client_factory import get_client_from_auth_file, get_client_from_json_dict
27
+ from azure.identity import ClientSecretCredential, DefaultAzureCredential
28
+ from azure.mgmt.containerinstance import ContainerInstanceManagementClient
30
29
 
31
30
  if TYPE_CHECKING:
32
31
  from azure.mgmt.containerinstance.models import (