apache-airflow-providers-microsoft-azure 9.0.1rc1__py3-none-any.whl → 10.0.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -27,7 +27,7 @@ import packaging.version
27
27
 
28
28
  __all__ = ["__version__"]
29
29
 
30
- __version__ = "9.0.1"
30
+ __version__ = "10.0.0"
31
31
 
32
32
  try:
33
33
  from airflow import __version__ as airflow_version
@@ -30,6 +30,7 @@ def get_provider_info():
30
30
  "state": "ready",
31
31
  "source-date-epoch": 1709555852,
32
32
  "versions": [
33
+ "10.0.0",
33
34
  "9.0.1",
34
35
  "9.0.0",
35
36
  "8.5.1",
@@ -85,7 +86,7 @@ def get_provider_info():
85
86
  "apache-airflow>=2.6.0",
86
87
  "adlfs>=2023.10.0",
87
88
  "azure-batch>=8.0.0",
88
- "azure-cosmos>=4.0.0",
89
+ "azure-cosmos>=4.6.0",
89
90
  "azure-mgmt-cosmosdb",
90
91
  "azure-datalake-store>=0.0.45",
91
92
  "azure-identity>=1.3.1",
@@ -95,7 +96,7 @@ def get_provider_info():
95
96
  "azure-storage-blob>=12.14.0",
96
97
  "azure-mgmt-storage>=16.0.0",
97
98
  "azure-storage-file-share",
98
- "azure-servicebus>=7.6.1",
99
+ "azure-servicebus>=7.12.1",
99
100
  "azure-synapse-spark",
100
101
  "azure-synapse-artifacts>=0.17.0",
101
102
  "adal>=1.2.7",
@@ -392,17 +393,13 @@ def get_provider_info():
392
393
  "connection-type": "azure_service_bus",
393
394
  },
394
395
  {
395
- "hook-class-name": "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseHook",
396
+ "hook-class-name": "airflow.providers.microsoft.azure.hooks.synapse.BaseAzureSynapseHook",
396
397
  "connection-type": "azure_synapse",
397
398
  },
398
399
  {
399
400
  "hook-class-name": "airflow.providers.microsoft.azure.hooks.data_lake.AzureDataLakeStorageV2Hook",
400
401
  "connection-type": "adls",
401
402
  },
402
- {
403
- "hook-class-name": "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapsePipelineHook",
404
- "connection-type": "azure_synapse_pipeline",
405
- },
406
403
  ],
407
404
  "secrets-backends": ["airflow.providers.microsoft.azure.secrets.key_vault.AzureKeyVaultBackend"],
408
405
  "logging": ["airflow.providers.microsoft.azure.log.wasb_task_handler.WasbTaskHandler"],
@@ -23,6 +23,7 @@ This module contains Azure Data Explorer hook.
23
23
  KustoResponseDataSetV
24
24
  kusto
25
25
  """
26
+
26
27
  from __future__ import annotations
27
28
 
28
29
  import warnings
@@ -16,6 +16,7 @@
16
16
  # specific language governing permissions and limitations
17
17
  # under the License.
18
18
  """Hook for Azure Container Registry."""
19
+
19
20
  from __future__ import annotations
20
21
 
21
22
  from functools import cached_property
@@ -23,12 +23,14 @@ Airflow connection of type `azure_cosmos` exists. Authorization can be done by s
23
23
  login (=Endpoint uri), password (=secret key) and extra fields database_name and collection_name to specify
24
24
  the default database and collection to use (see connection `azure_cosmos_default` for an example).
25
25
  """
26
+
26
27
  from __future__ import annotations
27
28
 
28
29
  import uuid
29
- from typing import Any
30
+ from typing import TYPE_CHECKING, Any, List, Union
30
31
  from urllib.parse import urlparse
31
32
 
33
+ from azure.cosmos import PartitionKey
32
34
  from azure.cosmos.cosmos_client import CosmosClient
33
35
  from azure.cosmos.exceptions import CosmosHttpResponseError
34
36
  from azure.mgmt.cosmosdb import CosmosDBManagementClient
@@ -41,6 +43,9 @@ from airflow.providers.microsoft.azure.utils import (
41
43
  get_sync_default_azure_credential,
42
44
  )
43
45
 
46
+ if TYPE_CHECKING:
47
+ PartitionKeyType = Union[str, List[str]]
48
+
44
49
 
45
50
  class AzureCosmosDBHook(BaseHook):
46
51
  """
@@ -110,6 +115,7 @@ class AzureCosmosDBHook(BaseHook):
110
115
 
111
116
  self.default_database_name = None
112
117
  self.default_collection_name = None
118
+ self.default_partition_key = None
113
119
 
114
120
  def _get_field(self, extras, name):
115
121
  return get_field(
@@ -152,6 +158,7 @@ class AzureCosmosDBHook(BaseHook):
152
158
 
153
159
  self.default_database_name = self._get_field(extras, "database_name")
154
160
  self.default_collection_name = self._get_field(extras, "collection_name")
161
+ self.default_partition_key = self._get_field(extras, "partition_key")
155
162
 
156
163
  # Initialize the Python Azure Cosmos DB client
157
164
  self._conn = CosmosClient(endpoint_uri, {"masterKey": master_key})
@@ -179,6 +186,18 @@ class AzureCosmosDBHook(BaseHook):
179
186
 
180
187
  return coll_name
181
188
 
189
+ def __get_partition_key(self, partition_key: PartitionKeyType | None = None) -> PartitionKeyType:
190
+ self.get_conn()
191
+ if partition_key is None:
192
+ part_key = self.default_partition_key
193
+ else:
194
+ part_key = partition_key
195
+
196
+ if part_key is None:
197
+ raise AirflowBadRequest("Partition key must be specified")
198
+
199
+ return part_key
200
+
182
201
  def does_collection_exist(self, collection_name: str, database_name: str) -> bool:
183
202
  """Check if a collection exists in CosmosDB."""
184
203
  if collection_name is None:
@@ -203,7 +222,7 @@ class AzureCosmosDBHook(BaseHook):
203
222
  self,
204
223
  collection_name: str,
205
224
  database_name: str | None = None,
206
- partition_key: str | None = None,
225
+ partition_key: PartitionKeyType | None = None,
207
226
  ) -> None:
208
227
  """Create a new collection in the CosmosDB database."""
209
228
  if collection_name is None:
@@ -225,7 +244,8 @@ class AzureCosmosDBHook(BaseHook):
225
244
  # Only create if we did not find it already existing
226
245
  if not existing_container:
227
246
  self.get_conn().get_database_client(self.__get_database_name(database_name)).create_container(
228
- collection_name, partition_key=partition_key
247
+ collection_name,
248
+ partition_key=PartitionKey(path=self.__get_partition_key(partition_key)),
229
249
  )
230
250
 
231
251
  def does_database_exist(self, database_name: str) -> bool:
@@ -327,7 +347,7 @@ class AzureCosmosDBHook(BaseHook):
327
347
  document_id: str,
328
348
  database_name: str | None = None,
329
349
  collection_name: str | None = None,
330
- partition_key: str | None = None,
350
+ partition_key: PartitionKeyType | None = None,
331
351
  ) -> None:
332
352
  """Delete an existing document out of a collection in the CosmosDB database."""
333
353
  if document_id is None:
@@ -336,7 +356,7 @@ class AzureCosmosDBHook(BaseHook):
336
356
  self.get_conn()
337
357
  .get_database_client(self.__get_database_name(database_name))
338
358
  .get_container_client(self.__get_collection_name(collection_name))
339
- .delete_item(document_id, partition_key=partition_key)
359
+ .delete_item(document_id, partition_key=self.__get_partition_key(partition_key))
340
360
  )
341
361
 
342
362
  def get_document(
@@ -344,7 +364,7 @@ class AzureCosmosDBHook(BaseHook):
344
364
  document_id: str,
345
365
  database_name: str | None = None,
346
366
  collection_name: str | None = None,
347
- partition_key: str | None = None,
367
+ partition_key: PartitionKeyType | None = None,
348
368
  ):
349
369
  """Get a document from an existing collection in the CosmosDB database."""
350
370
  if document_id is None:
@@ -355,7 +375,7 @@ class AzureCosmosDBHook(BaseHook):
355
375
  self.get_conn()
356
376
  .get_database_client(self.__get_database_name(database_name))
357
377
  .get_container_client(self.__get_collection_name(collection_name))
358
- .read_item(document_id, partition_key=partition_key)
378
+ .read_item(document_id, partition_key=self.__get_partition_key(partition_key))
359
379
  )
360
380
  except CosmosHttpResponseError:
361
381
  return None
@@ -365,7 +385,7 @@ class AzureCosmosDBHook(BaseHook):
365
385
  sql_string: str,
366
386
  database_name: str | None = None,
367
387
  collection_name: str | None = None,
368
- partition_key: str | None = None,
388
+ partition_key: PartitionKeyType | None = None,
369
389
  ) -> list | None:
370
390
  """Get a list of documents from an existing collection in the CosmosDB database via SQL query."""
371
391
  if sql_string is None:
@@ -376,7 +396,7 @@ class AzureCosmosDBHook(BaseHook):
376
396
  self.get_conn()
377
397
  .get_database_client(self.__get_database_name(database_name))
378
398
  .get_container_client(self.__get_collection_name(collection_name))
379
- .query_items(sql_string, partition_key=partition_key)
399
+ .query_items(sql_string, partition_key=self.__get_partition_key(partition_key))
380
400
  )
381
401
  return list(result_iterable)
382
402
  except CosmosHttpResponseError:
@@ -29,6 +29,7 @@
29
29
  DataFlowResource
30
30
  mgmt
31
31
  """
32
+
32
33
  from __future__ import annotations
33
34
 
34
35
  import inspect
@@ -17,6 +17,7 @@
17
17
  from __future__ import annotations
18
18
 
19
19
  import time
20
+ import warnings
20
21
  from typing import TYPE_CHECKING, Any, Union
21
22
 
22
23
  from azure.core.exceptions import ServiceRequestError
@@ -24,7 +25,7 @@ from azure.identity import ClientSecretCredential, DefaultAzureCredential
24
25
  from azure.synapse.artifacts import ArtifactsClient
25
26
  from azure.synapse.spark import SparkClient
26
27
 
27
- from airflow.exceptions import AirflowException, AirflowTaskTimeout
28
+ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, AirflowTaskTimeout
28
29
  from airflow.hooks.base import BaseHook
29
30
  from airflow.providers.microsoft.azure.utils import (
30
31
  add_managed_identity_connection_widgets,
@@ -240,20 +241,20 @@ class AzureSynapsePipelineRunException(AirflowException):
240
241
  """An exception that indicates a pipeline run failed to complete."""
241
242
 
242
243
 
243
- class AzureSynapsePipelineHook(BaseHook):
244
+ class BaseAzureSynapseHook(BaseHook):
244
245
  """
245
- A hook to interact with Azure Synapse Pipeline.
246
+ A base hook class to create session and connection to Azure Synapse using connection id.
246
247
 
247
248
  :param azure_synapse_conn_id: The :ref:`Azure Synapse connection id<howto/connection:synapse>`.
248
- :param azure_synapse_workspace_dev_endpoint: The Azure Synapse Workspace development endpoint.
249
249
  """
250
250
 
251
- conn_type: str = "azure_synapse_pipeline"
251
+ conn_type: str = "azure_synapse"
252
252
  conn_name_attr: str = "azure_synapse_conn_id"
253
- default_conn_name: str = "azure_synapse_connection"
254
- hook_name: str = "Azure Synapse Pipeline"
253
+ default_conn_name: str = "azure_synapse_default"
254
+ hook_name: str = "Azure Synapse"
255
255
 
256
256
  @classmethod
257
+ @add_managed_identity_connection_widgets
257
258
  def get_connection_form_widgets(cls) -> dict[str, Any]:
258
259
  """Return connection widgets to add to connection form."""
259
260
  from flask_appbuilder.fieldwidgets import BS3TextFieldWidget
@@ -262,6 +263,7 @@ class AzureSynapsePipelineHook(BaseHook):
262
263
 
263
264
  return {
264
265
  "tenantId": StringField(lazy_gettext("Tenant ID"), widget=BS3TextFieldWidget()),
266
+ "subscriptionId": StringField(lazy_gettext("Subscription ID"), widget=BS3TextFieldWidget()),
265
267
  }
266
268
 
267
269
  @classmethod
@@ -269,16 +271,51 @@ class AzureSynapsePipelineHook(BaseHook):
269
271
  """Return custom field behaviour."""
270
272
  return {
271
273
  "hidden_fields": ["schema", "port", "extra"],
272
- "relabeling": {"login": "Client ID", "password": "Secret", "host": "Synapse Workspace URL"},
274
+ "relabeling": {
275
+ "login": "Client ID",
276
+ "password": "Secret",
277
+ "host": "Synapse Workspace URL",
278
+ },
273
279
  }
274
280
 
281
+ def __init__(self, azure_synapse_conn_id: str = default_conn_name, **kwargs) -> None:
282
+ super().__init__(**kwargs)
283
+ self.conn_id = azure_synapse_conn_id
284
+
285
+ def _get_field(self, extras: dict, field_name: str) -> str:
286
+ return get_field(
287
+ conn_id=self.conn_id,
288
+ conn_type=self.conn_type,
289
+ extras=extras,
290
+ field_name=field_name,
291
+ )
292
+
293
+
294
+ class AzureSynapsePipelineHook(BaseAzureSynapseHook):
295
+ """
296
+ A hook to interact with Azure Synapse Pipeline.
297
+
298
+ :param azure_synapse_conn_id: The :ref:`Azure Synapse connection id<howto/connection:synapse>`.
299
+ :param azure_synapse_workspace_dev_endpoint: The Azure Synapse Workspace development endpoint.
300
+ """
301
+
302
+ default_conn_name: str = "azure_synapse_connection"
303
+
275
304
  def __init__(
276
- self, azure_synapse_workspace_dev_endpoint: str, azure_synapse_conn_id: str = default_conn_name
305
+ self,
306
+ azure_synapse_workspace_dev_endpoint: str,
307
+ azure_synapse_conn_id: str = default_conn_name,
308
+ **kwargs,
277
309
  ):
278
- self._conn = None
279
- self.conn_id = azure_synapse_conn_id
310
+ # Handling deprecation of "default_conn_name"
311
+ if azure_synapse_conn_id == self.default_conn_name:
312
+ warnings.warn(
313
+ "The usage of `default_conn_name=azure_synapse_connection` is deprecated and will be removed in future. Please update your code to use the new default connection name: `default_conn_name=azure_synapse_default`. ",
314
+ AirflowProviderDeprecationWarning,
315
+ )
316
+ self._conn: ArtifactsClient | None = None
280
317
  self.azure_synapse_workspace_dev_endpoint = azure_synapse_workspace_dev_endpoint
281
- super().__init__()
318
+ super().__init__(azure_synapse_conn_id=azure_synapse_conn_id, **kwargs)
282
319
 
283
320
  def _get_field(self, extras, name):
284
321
  return get_field(
@@ -297,15 +334,22 @@ class AzureSynapsePipelineHook(BaseHook):
297
334
  tenant = self._get_field(extras, "tenantId")
298
335
 
299
336
  credential: Credentials
300
- if conn.login is not None and conn.password is not None:
337
+ if not conn.login or not conn.password:
338
+ managed_identity_client_id = self._get_field(extras, "managed_identity_client_id")
339
+ workload_identity_tenant_id = self._get_field(extras, "workload_identity_tenant_id")
340
+
341
+ credential = get_sync_default_azure_credential(
342
+ managed_identity_client_id=managed_identity_client_id,
343
+ workload_identity_tenant_id=workload_identity_tenant_id,
344
+ )
345
+ else:
301
346
  if not tenant:
302
347
  raise ValueError("A Tenant ID is required when authenticating with Client ID and Secret.")
303
348
 
304
349
  credential = ClientSecretCredential(
305
350
  client_id=conn.login, client_secret=conn.password, tenant_id=tenant
306
351
  )
307
- else:
308
- credential = DefaultAzureCredential()
352
+
309
353
  self._conn = self._create_client(credential, self.azure_synapse_workspace_dev_endpoint)
310
354
 
311
355
  if self._conn is not None:
@@ -314,7 +358,7 @@ class AzureSynapsePipelineHook(BaseHook):
314
358
  raise ValueError("Failed to create ArtifactsClient")
315
359
 
316
360
  @staticmethod
317
- def _create_client(credential: Credentials, endpoint: str):
361
+ def _create_client(credential: Credentials, endpoint: str) -> ArtifactsClient:
318
362
  return ArtifactsClient(credential=credential, endpoint=endpoint)
319
363
 
320
364
  def run_pipeline(self, pipeline_name: str, **config: Any) -> CreateRunResponse:
@@ -23,6 +23,7 @@ Airflow connection of type `wasb` exists. Authorization can be done by supplying
23
23
  login (=Storage account name) and password (=KEY), or login and SAS token in the extra
24
24
  field (see connection `wasb_default` for an example).
25
25
  """
26
+
26
27
  from __future__ import annotations
27
28
 
28
29
  import logging
@@ -16,22 +16,74 @@
16
16
  # under the License.
17
17
  from __future__ import annotations
18
18
 
19
- from typing import TYPE_CHECKING, Any, Sequence
19
+ from typing import IO, TYPE_CHECKING, Any, AnyStr, Iterable, Sequence
20
20
 
21
21
  from airflow.models import BaseOperator
22
- from airflow.providers.microsoft.azure.hooks.data_lake import AzureDataLakeHook
22
+ from airflow.providers.microsoft.azure.hooks.data_lake import AzureDataLakeHook, AzureDataLakeStorageV2Hook
23
23
 
24
24
  if TYPE_CHECKING:
25
25
  from airflow.utils.context import Context
26
26
 
27
+ DEFAULT_AZURE_DATA_LAKE_CONN_ID = "azure_data_lake_default"
28
+
29
+
30
+ class ADLSCreateObjectOperator(BaseOperator):
31
+ """
32
+ Creates a new object from passed data to Azure Data Lake on specified file.
33
+
34
+ .. seealso::
35
+ For more information on how to use this operator, take a look at the guide:
36
+ :ref:`howto/operator:ADLSCreateObjectOperator`
37
+
38
+ :param file_system_name: Name of the file system or instance of FileSystemProperties.
39
+ :param file_name: Name of the file which needs to be created in the file system.
40
+ :param data: The data that will be uploaded.
41
+ :param length: Size of the data in bytes (optional).
42
+ :param replace: Whether to forcibly overwrite existing files/directories.
43
+ If False and remote path is a directory, will quit regardless if any files
44
+ would be overwritten or not. If True, only matching filenames are actually
45
+ overwritten.
46
+ :param azure_data_lake_conn_id: Reference to the :ref:`Azure Data Lake connection<howto/connection:adl>`.
47
+ """
48
+
49
+ template_fields: Sequence[str] = ("file_system_name", "file_name", "data")
50
+ ui_color = "#e4f0e8"
51
+
52
+ def __init__(
53
+ self,
54
+ *,
55
+ file_system_name: str,
56
+ file_name: str,
57
+ data: bytes | str | Iterable[AnyStr] | IO[AnyStr],
58
+ length: int | None = None,
59
+ replace: bool = False,
60
+ azure_data_lake_conn_id: str = DEFAULT_AZURE_DATA_LAKE_CONN_ID,
61
+ **kwargs,
62
+ ) -> None:
63
+ super().__init__(**kwargs)
64
+
65
+ self.file_system_name = file_system_name
66
+ self.file_name = file_name
67
+ self.replace = replace
68
+ self.data = data # type: ignore[var-annotated]
69
+ self.length = length
70
+ self.azure_data_lake_conn_id = azure_data_lake_conn_id
71
+
72
+ def execute(self, context: Context) -> dict[str, Any]:
73
+ self.log.debug("Uploading %s to %s", self.data, self.file_name)
74
+ hook = AzureDataLakeStorageV2Hook(adls_conn_id=self.azure_data_lake_conn_id)
75
+ return hook.create_file(file_system_name=self.file_system_name, file_name=self.file_name).upload_data(
76
+ data=self.data, length=self.length, overwrite=self.replace
77
+ )
78
+
27
79
 
28
80
  class ADLSDeleteOperator(BaseOperator):
29
81
  """
30
82
  Delete files in the specified path.
31
83
 
32
- .. seealso::
33
- For more information on how to use this operator, take a look at the guide:
34
- :ref:`howto/operator:ADLSDeleteOperator`
84
+ .. seealso::
85
+ For more information on how to use this operator, take a look at the guide:
86
+ :ref:`howto/operator:ADLSDeleteOperator`
35
87
 
36
88
  :param path: A directory or file to remove
37
89
  :param recursive: Whether to loop into directories in the location and remove the files
@@ -48,7 +100,7 @@ class ADLSDeleteOperator(BaseOperator):
48
100
  path: str,
49
101
  recursive: bool = False,
50
102
  ignore_not_found: bool = True,
51
- azure_data_lake_conn_id: str = "azure_data_lake_default",
103
+ azure_data_lake_conn_id: str = DEFAULT_AZURE_DATA_LAKE_CONN_ID,
52
104
  **kwargs,
53
105
  ) -> None:
54
106
  super().__init__(**kwargs)
@@ -69,26 +121,19 @@ class ADLSListOperator(BaseOperator):
69
121
  This operator returns a python list with the names of files which can be used by
70
122
  `xcom` in the downstream tasks.
71
123
 
72
- :param path: The Azure Data Lake path to find the objects. Supports glob
73
- strings (templated)
74
- :param azure_data_lake_conn_id: Reference to the :ref:`Azure Data Lake connection<howto/connection:adl>`.
124
+ .. seealso::
125
+ For more information on how to use this operator, take a look at the guide:
126
+ :ref:`howto/operator:ADLSListOperator`
75
127
 
76
- **Example**:
77
- The following Operator would list all the Parquet files from ``folder/output/``
78
- folder in the specified ADLS account ::
79
-
80
- adls_files = ADLSListOperator(
81
- task_id="adls_files",
82
- path="folder/output/*.parquet",
83
- azure_data_lake_conn_id="azure_data_lake_default",
84
- )
128
+ :param path: The Azure Data Lake path to find the objects. Supports glob strings (templated)
129
+ :param azure_data_lake_conn_id: Reference to the :ref:`Azure Data Lake connection<howto/connection:adl>`.
85
130
  """
86
131
 
87
132
  template_fields: Sequence[str] = ("path",)
88
133
  ui_color = "#901dd2"
89
134
 
90
135
  def __init__(
91
- self, *, path: str, azure_data_lake_conn_id: str = "azure_data_lake_default", **kwargs
136
+ self, *, path: str, azure_data_lake_conn_id: str = DEFAULT_AZURE_DATA_LAKE_CONN_ID, **kwargs
92
137
  ) -> None:
93
138
  super().__init__(**kwargs)
94
139
  self.path = path
@@ -16,6 +16,7 @@
16
16
  # specific language governing permissions and limitations
17
17
  # under the License.
18
18
  """This module contains Azure Data Explorer operators."""
19
+
19
20
  from __future__ import annotations
20
21
 
21
22
  from functools import cached_property
@@ -110,7 +110,7 @@ class AzureSynapseRunSparkBatchOperator(BaseOperator):
110
110
  ):
111
111
  self.log.info("Job run %s has completed successfully.", self.job_id)
112
112
  else:
113
- raise Exception(f"Job run {self.job_id} has failed or has been cancelled.")
113
+ raise AirflowException(f"Job run {self.job_id} has failed or has been cancelled.")
114
114
 
115
115
  def on_kill(self) -> None:
116
116
  if self.job_id:
@@ -0,0 +1,16 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
@@ -16,6 +16,7 @@
16
16
  # specific language governing permissions and limitations
17
17
  # under the License.
18
18
  """This module contains SFTP to Azure Blob Storage operator."""
19
+
19
20
  from __future__ import annotations
20
21
 
21
22
  import os
@@ -78,6 +78,7 @@ class WasbBlobSensorTrigger(BaseTrigger):
78
78
  if blob_exists:
79
79
  message = f"Blob {self.blob_name} found in container {self.container_name}."
80
80
  yield TriggerEvent({"status": "success", "message": message})
81
+ return
81
82
  else:
82
83
  message = (
83
84
  f"Blob {self.blob_name} not available yet in container {self.container_name}."
@@ -154,6 +155,7 @@ class WasbPrefixSensorTrigger(BaseTrigger):
154
155
  if prefix_exists:
155
156
  message = f"Prefix {self.prefix} found in container {self.container_name}."
156
157
  yield TriggerEvent({"status": "success", "message": message})
158
+ return
157
159
  else:
158
160
  message = (
159
161
  f"Prefix {self.prefix} not available yet in container {self.container_name}."
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-microsoft-azure
3
- Version: 9.0.1rc1
3
+ Version: 10.0.0rc1
4
4
  Summary: Provider package apache-airflow-providers-microsoft-azure for Apache Airflow
5
5
  Keywords: airflow-provider,microsoft.azure,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -19,12 +19,13 @@ Classifier: Programming Language :: Python :: 3.8
19
19
  Classifier: Programming Language :: Python :: 3.9
20
20
  Classifier: Programming Language :: Python :: 3.10
21
21
  Classifier: Programming Language :: Python :: 3.11
22
+ Classifier: Programming Language :: Python :: 3.12
22
23
  Classifier: Topic :: System :: Monitoring
23
24
  Requires-Dist: adal>=1.2.7
24
25
  Requires-Dist: adlfs>=2023.10.0
25
- Requires-Dist: apache-airflow>=2.6.0.dev0
26
+ Requires-Dist: apache-airflow>=2.6.0rc0
26
27
  Requires-Dist: azure-batch>=8.0.0
27
- Requires-Dist: azure-cosmos>=4.0.0
28
+ Requires-Dist: azure-cosmos>=4.6.0
28
29
  Requires-Dist: azure-datalake-store>=0.0.45
29
30
  Requires-Dist: azure-identity>=1.3.1
30
31
  Requires-Dist: azure-keyvault-secrets>=4.1.0
@@ -36,7 +37,7 @@ Requires-Dist: azure-mgmt-datafactory>=2.0.0
36
37
  Requires-Dist: azure-mgmt-datalake-store>=0.5.0
37
38
  Requires-Dist: azure-mgmt-resource>=2.2.0
38
39
  Requires-Dist: azure-mgmt-storage>=16.0.0
39
- Requires-Dist: azure-servicebus>=7.6.1
40
+ Requires-Dist: azure-servicebus>=7.12.1
40
41
  Requires-Dist: azure-storage-blob>=12.14.0
41
42
  Requires-Dist: azure-storage-file-datalake>=12.9.1
42
43
  Requires-Dist: azure-storage-file-share
@@ -46,8 +47,8 @@ Requires-Dist: apache-airflow-providers-google ; extra == "google"
46
47
  Requires-Dist: apache-airflow-providers-oracle ; extra == "oracle"
47
48
  Requires-Dist: apache-airflow-providers-sftp ; extra == "sftp"
48
49
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
49
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/9.0.1/changelog.html
50
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/9.0.1
50
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.0.0/changelog.html
51
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.0.0
51
52
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
52
53
  Project-URL: Source Code, https://github.com/apache/airflow
53
54
  Project-URL: Twitter, https://twitter.com/ApacheAirflow
@@ -100,7 +101,7 @@ Provides-Extra: sftp
100
101
 
101
102
  Package ``apache-airflow-providers-microsoft-azure``
102
103
 
103
- Release: ``9.0.1.rc1``
104
+ Release: ``10.0.0.rc1``
104
105
 
105
106
 
106
107
  `Microsoft Azure <https://azure.microsoft.com/>`__
@@ -113,7 +114,7 @@ This is a provider package for ``microsoft.azure`` provider. All classes for thi
113
114
  are in ``airflow.providers.microsoft.azure`` python package.
114
115
 
115
116
  You can find package information and changelog for the provider
116
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/9.0.1/>`_.
117
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.0.0/>`_.
117
118
 
118
119
  Installation
119
120
  ------------
@@ -122,7 +123,7 @@ You can install this package on top of an existing Airflow 2 installation (see `
122
123
  for the minimum Airflow version supported) via
123
124
  ``pip install apache-airflow-providers-microsoft-azure``
124
125
 
125
- The package supports the following python versions: 3.8,3.9,3.10,3.11
126
+ The package supports the following python versions: 3.8,3.9,3.10,3.11,3.12
126
127
 
127
128
  Requirements
128
129
  ------------
@@ -133,7 +134,7 @@ PIP package Version required
133
134
  ``apache-airflow`` ``>=2.6.0``
134
135
  ``adlfs`` ``>=2023.10.0``
135
136
  ``azure-batch`` ``>=8.0.0``
136
- ``azure-cosmos`` ``>=4.0.0``
137
+ ``azure-cosmos`` ``>=4.6.0``
137
138
  ``azure-mgmt-cosmosdb``
138
139
  ``azure-datalake-store`` ``>=0.0.45``
139
140
  ``azure-identity`` ``>=1.3.1``
@@ -143,7 +144,7 @@ PIP package Version required
143
144
  ``azure-storage-blob`` ``>=12.14.0``
144
145
  ``azure-mgmt-storage`` ``>=16.0.0``
145
146
  ``azure-storage-file-share``
146
- ``azure-servicebus`` ``>=7.6.1``
147
+ ``azure-servicebus`` ``>=7.12.1``
147
148
  ``azure-synapse-spark``
148
149
  ``azure-synapse-artifacts`` ``>=0.17.0``
149
150
  ``adal`` ``>=1.2.7``
@@ -176,4 +177,4 @@ Dependent package
176
177
  ==================================================================================================== ==========
177
178
 
178
179
  The changelog for the provider package can be found in the
179
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/9.0.1/changelog.html>`_.
180
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.0.0/changelog.html>`_.
@@ -1,34 +1,34 @@
1
1
  airflow/providers/microsoft/azure/LICENSE,sha256=ywUBpKZc7Jb96rVt5I3IDbg7dIJAbUSHkuoDcF3jbH4,13569
2
- airflow/providers/microsoft/azure/__init__.py,sha256=w1Jguqy2H6_sHGiPTuCxMlHT0ZwQghO9B3rsCxujm-c,1590
3
- airflow/providers/microsoft/azure/get_provider_info.py,sha256=sCOK_AUSlkQlJXDIqSURwDFCGy8ZeNZyH10YGqUINFY,18925
2
+ airflow/providers/microsoft/azure/__init__.py,sha256=4wim7VWC7Ike16uzbgZ7A2_k8bY3CnBmX2hbnjnJYfM,1591
3
+ airflow/providers/microsoft/azure/get_provider_info.py,sha256=tRRll2qDstnLsgcLQLNvlC95fSZvOIvy7NBRsUIia9M,18751
4
4
  airflow/providers/microsoft/azure/utils.py,sha256=Cq88MWL4SZ9vv8PxLM86kNRYa80XDJ_CVgKgdNu9tp4,7187
5
5
  airflow/providers/microsoft/azure/fs/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
6
6
  airflow/providers/microsoft/azure/fs/adls.py,sha256=FOxPfMhQ-BNg8C7AYmNQO9C8YBAOW4vLN8TAcAXPdg8,1697
7
7
  airflow/providers/microsoft/azure/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
8
- airflow/providers/microsoft/azure/hooks/adx.py,sha256=T9EOkTXEgp8LapTev1z18NWsrZGvg3XMA_SybGk6uOo,9999
8
+ airflow/providers/microsoft/azure/hooks/adx.py,sha256=ufxzcrLFHu-sr_0EySxEQRjEA3bHSZSl7D_U0bP-Ig4,10000
9
9
  airflow/providers/microsoft/azure/hooks/asb.py,sha256=YTvSEXCbJCG93fgS0TDCWujXB2Ze-1f9P9NkOefK8bA,14429
10
10
  airflow/providers/microsoft/azure/hooks/base_azure.py,sha256=DzqBDInE0vSPvWRK3AYBC9q74E4dLQRSkwjhY2nVyC0,6283
11
11
  airflow/providers/microsoft/azure/hooks/batch.py,sha256=BK2ciseUyGNFxOC0mEnPohZRfBlHf_UTyE0mB0tvn0g,16114
12
12
  airflow/providers/microsoft/azure/hooks/container_instance.py,sha256=KAzkf1veBJ4AtCvOY_WFa5HqZUoQJOMHekeAJUnmg1I,9015
13
- airflow/providers/microsoft/azure/hooks/container_registry.py,sha256=Rb4rfmI8zehSbSIIDr8CvHw6oCX_9ct7PyEK_XY40Aw,4854
13
+ airflow/providers/microsoft/azure/hooks/container_registry.py,sha256=ii6GiOvl3p0lSGEyerHuhpvJC1bh18Asz4QmatXipYY,4855
14
14
  airflow/providers/microsoft/azure/hooks/container_volume.py,sha256=ZSe2C3y5xttxp8Ir9dgihZnQzcArckhPazxHg1iknYk,5758
15
- airflow/providers/microsoft/azure/hooks/cosmos.py,sha256=8uTTQPECfOBukSL1NvIff5AbX9kxYZFbFsOuxi4Bki4,16637
16
- airflow/providers/microsoft/azure/hooks/data_factory.py,sha256=YRxPxtfROWYQ0kiKreB6aF4T5tEr-h-shI_tWBvpOAQ,45554
15
+ airflow/providers/microsoft/azure/hooks/cosmos.py,sha256=UCEM1xFj5wOGdNgy45fjbXgaB1QQWYaw12UD9TZVS1c,17472
16
+ airflow/providers/microsoft/azure/hooks/data_factory.py,sha256=OZj2mzMmZxTq0Rkz2XnqfDoSRyUBo2PqEWTKWV_rZk8,45555
17
17
  airflow/providers/microsoft/azure/hooks/data_lake.py,sha256=OoSWCphn5JBIkjRuvLl-gyTIUZnOMIx0CGyYlYYKGu4,23580
18
18
  airflow/providers/microsoft/azure/hooks/fileshare.py,sha256=jaHSD_xZxposSD5FbdlpZ7JK_CugFHNrgejZkZbHJXM,10884
19
- airflow/providers/microsoft/azure/hooks/synapse.py,sha256=yUOBz4LphPkJfVm6R1os0uKeI1VQa43E1rmE-QdU_nM,14730
20
- airflow/providers/microsoft/azure/hooks/wasb.py,sha256=y4ROoGLnpqijJ1D2r3sqyPaqepubFwGpWCLca9fDfKc,31121
19
+ airflow/providers/microsoft/azure/hooks/synapse.py,sha256=9vipfj81dF5eehanajTYSzMBrpRv0ua0lPVa89Z-MAw,16565
20
+ airflow/providers/microsoft/azure/hooks/wasb.py,sha256=EqXMsd8uaDc091wIXtbieesQy2gOjHVVEWfJPOy0GRA,31122
21
21
  airflow/providers/microsoft/azure/log/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
22
22
  airflow/providers/microsoft/azure/log/wasb_task_handler.py,sha256=lP57KoLjWvG9MQZvLOMaXvl7c_8krtK9E1jq89UGX_Y,9910
23
23
  airflow/providers/microsoft/azure/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
24
- airflow/providers/microsoft/azure/operators/adls.py,sha256=M_maZfDZQ8B5b5kVhoDBkTBsN7Zb6-LZZEGbAXcCQ5U,3790
25
- airflow/providers/microsoft/azure/operators/adx.py,sha256=y6rOJK2CMl2INZu55o64guyEyctTSbGldL7wLjhjh0Y,3406
24
+ airflow/providers/microsoft/azure/operators/adls.py,sha256=CSTn9Ukpcs8e5AUEGUM7nmn2vlzRQQ_mRTQ4TsB30pU,5789
25
+ airflow/providers/microsoft/azure/operators/adx.py,sha256=2dWB1SUHtjn7WmDIKoOONt0Zll9F5n3JJSY2GmNUgRw,3407
26
26
  airflow/providers/microsoft/azure/operators/asb.py,sha256=xoh8i2rvyKYLKoLLcx-2DTWTQsAr_zR_jZ7bTlsX_YU,29415
27
27
  airflow/providers/microsoft/azure/operators/batch.py,sha256=BxhI8H9vzCtvPZo7bjQtNirRAlRCOyo8UK2D7GZCJkk,16538
28
28
  airflow/providers/microsoft/azure/operators/container_instances.py,sha256=TWeRMLCp2Ku_PJ5D1gtEOOtaQ0Eg5tNzfSp5Xxunzyg,15731
29
29
  airflow/providers/microsoft/azure/operators/cosmos.py,sha256=Y1Hj4p6W8soVFaq1rx8LFgchNISjkq8vjdaQ0j8Tnqs,2782
30
30
  airflow/providers/microsoft/azure/operators/data_factory.py,sha256=TrPMXlOAY3VHVeoysqJnq-ugBC_H53cKKSGfJfu9Dno,12441
31
- airflow/providers/microsoft/azure/operators/synapse.py,sha256=yb3y_N1dHF5ZDhD7GQfv_EnIAWmME55eaZrjrmDmVgk,12379
31
+ airflow/providers/microsoft/azure/operators/synapse.py,sha256=gDniGvmCUooT8_jvIAJlc8zgPS57d0N_esL4e9xz7bo,12386
32
32
  airflow/providers/microsoft/azure/operators/wasb_delete_blob.py,sha256=GUfV9DLU1bGYvn2TE54iTYBTbxn3Jm_e985pXp_0IsE,2687
33
33
  airflow/providers/microsoft/azure/secrets/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
34
34
  airflow/providers/microsoft/azure/secrets/key_vault.py,sha256=m8IwaMpXb9Zr0IPO_Tw9RTmM3xx_RlAHl5rNSqNFt28,9666
@@ -36,16 +36,17 @@ airflow/providers/microsoft/azure/sensors/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD
36
36
  airflow/providers/microsoft/azure/sensors/cosmos.py,sha256=vRrJ8zJnApvuKxHia53tNZUZ7wILWFT3_5cEyMA2M1I,2637
37
37
  airflow/providers/microsoft/azure/sensors/data_factory.py,sha256=KGHbz7j_8gl87Munmk42uQo3IVgY3hXJ0mhzF7dLkxM,5552
38
38
  airflow/providers/microsoft/azure/sensors/wasb.py,sha256=rcUgAyQMUZKQiSf8sr05K0AE-8MV8Czwwc79hLXVWKw,9043
39
+ airflow/providers/microsoft/azure/serialization/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
39
40
  airflow/providers/microsoft/azure/transfers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
40
41
  airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs.py,sha256=nJgDlO6Q5saW-ZbRntTZyurwsRLHVM2zwD7YkrdJF_s,1628
41
42
  airflow/providers/microsoft/azure/transfers/local_to_adls.py,sha256=8sJhFGTLPapZzFWog2wc09xdOvwxP9Az-fKNSNj4MTM,4176
42
43
  airflow/providers/microsoft/azure/transfers/local_to_wasb.py,sha256=4CK9OKBW07BH4oCADES8O0_CMAXRM9sbe_qHsmLvvPA,2936
43
44
  airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py,sha256=vLJUqbnri2pAp2G6HTcMuWEmsXC0PcspA4O-otN_eJM,4468
44
- airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py,sha256=yVYp4HloUMJ-8GGZrPGGhuuXp0qR7tSkRZBricAEZ4M,8194
45
+ airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py,sha256=bg_r8vRUgffIvkHf7x4-Ekgu0uzx-YzO2p7F_WZIbaI,8195
45
46
  airflow/providers/microsoft/azure/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
46
47
  airflow/providers/microsoft/azure/triggers/data_factory.py,sha256=W0OsDL4YeJ2zRuBf-HzXXxQ-J7ZtgTUmSHHpC1_CPz0,11128
47
- airflow/providers/microsoft/azure/triggers/wasb.py,sha256=a5nD0YKWAm7K8mrTNKg6T3hN4wG11-qASMz9DkIrul8,7367
48
- apache_airflow_providers_microsoft_azure-9.0.1rc1.dist-info/entry_points.txt,sha256=6iWHenOoUC3YZBb3OKn6g0HlJsV58Ba56i8USmQrcJI,111
49
- apache_airflow_providers_microsoft_azure-9.0.1rc1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
50
- apache_airflow_providers_microsoft_azure-9.0.1rc1.dist-info/METADATA,sha256=N08dwUUXPaqqwvczcbGTmC7BPLqkVuTQ_Xu8e6gN4TI,8055
51
- apache_airflow_providers_microsoft_azure-9.0.1rc1.dist-info/RECORD,,
48
+ airflow/providers/microsoft/azure/triggers/wasb.py,sha256=PkCoOGGNpqOukIeHtgBGrCRPWn4aAOXA6eOOnobzVNw,7429
49
+ apache_airflow_providers_microsoft_azure-10.0.0rc1.dist-info/entry_points.txt,sha256=6iWHenOoUC3YZBb3OKn6g0HlJsV58Ba56i8USmQrcJI,111
50
+ apache_airflow_providers_microsoft_azure-10.0.0rc1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
51
+ apache_airflow_providers_microsoft_azure-10.0.0rc1.dist-info/METADATA,sha256=Ce15J2RwHxJtXnp4DwrgYY_cZdKtsXZHHBktB01-iNQ,8117
52
+ apache_airflow_providers_microsoft_azure-10.0.0rc1.dist-info/RECORD,,