apache-airflow-providers-microsoft-azure 12.5.0rc1__py3-none-any.whl → 12.6.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. airflow/providers/microsoft/azure/__init__.py +1 -1
  2. airflow/providers/microsoft/azure/fs/adls.py +7 -7
  3. airflow/providers/microsoft/azure/hooks/adx.py +1 -5
  4. airflow/providers/microsoft/azure/hooks/asb.py +2 -6
  5. airflow/providers/microsoft/azure/hooks/base_azure.py +1 -5
  6. airflow/providers/microsoft/azure/hooks/batch.py +1 -5
  7. airflow/providers/microsoft/azure/hooks/container_registry.py +1 -5
  8. airflow/providers/microsoft/azure/hooks/container_volume.py +1 -5
  9. airflow/providers/microsoft/azure/hooks/cosmos.py +5 -9
  10. airflow/providers/microsoft/azure/hooks/data_factory.py +1 -5
  11. airflow/providers/microsoft/azure/hooks/data_lake.py +2 -6
  12. airflow/providers/microsoft/azure/hooks/fileshare.py +1 -5
  13. airflow/providers/microsoft/azure/hooks/msgraph.py +6 -10
  14. airflow/providers/microsoft/azure/hooks/synapse.py +1 -5
  15. airflow/providers/microsoft/azure/hooks/wasb.py +6 -9
  16. airflow/providers/microsoft/azure/operators/data_factory.py +5 -7
  17. airflow/providers/microsoft/azure/operators/msgraph.py +2 -3
  18. airflow/providers/microsoft/azure/operators/powerbi.py +1 -1
  19. airflow/providers/microsoft/azure/operators/synapse.py +5 -7
  20. airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py +5 -4
  21. airflow/providers/microsoft/azure/utils.py +25 -11
  22. airflow/providers/microsoft/azure/version_compat.py +14 -1
  23. {apache_airflow_providers_microsoft_azure-12.5.0rc1.dist-info → apache_airflow_providers_microsoft_azure-12.6.0rc1.dist-info}/METADATA +12 -10
  24. {apache_airflow_providers_microsoft_azure-12.5.0rc1.dist-info → apache_airflow_providers_microsoft_azure-12.6.0rc1.dist-info}/RECORD +26 -26
  25. {apache_airflow_providers_microsoft_azure-12.5.0rc1.dist-info → apache_airflow_providers_microsoft_azure-12.6.0rc1.dist-info}/WHEEL +0 -0
  26. {apache_airflow_providers_microsoft_azure-12.5.0rc1.dist-info → apache_airflow_providers_microsoft_azure-12.6.0rc1.dist-info}/entry_points.txt +0 -0
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "12.5.0"
32
+ __version__ = "12.6.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -21,11 +21,7 @@ from typing import TYPE_CHECKING, Any
21
21
  from azure.identity import ClientSecretCredential
22
22
 
23
23
  from airflow.providers.microsoft.azure.utils import get_field, parse_blob_account_url
24
-
25
- try:
26
- from airflow.sdk import BaseHook
27
- except ImportError:
28
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
24
+ from airflow.providers.microsoft.azure.version_compat import BaseHook
29
25
 
30
26
  if TYPE_CHECKING:
31
27
  from fsspec import AbstractFileSystem
@@ -77,8 +73,11 @@ def get_fs(conn_id: str | None, storage_options: dict[str, Any] | None = None) -
77
73
  if tenant_id is None and password:
78
74
  options["account_key"] = password
79
75
 
80
- # now take any fields from extras and overlay on these
81
- # add empty field to remove defaults
76
+ # Now take any fields from extras and overlay them on top of existing options.
77
+ # Add empty field to remove defaults.
78
+ # 'account_host' is included to allow overriding the default Azure Blob endpoint domain
79
+ # (e.g., to use a private endpoint or custom domain instead of core.windows.net).
80
+
82
81
  fields = [
83
82
  "account_name",
84
83
  "account_key",
@@ -88,6 +87,7 @@ def get_fs(conn_id: str | None, storage_options: dict[str, Any] | None = None) -
88
87
  "workload_identity_client_id",
89
88
  "workload_identity_tenant_id",
90
89
  "anon",
90
+ "account_host",
91
91
  ]
92
92
  for field in fields:
93
93
  value = get_field(conn_id=conn_id, conn_type=conn_type, extras=extras, field_name=field)
@@ -38,11 +38,7 @@ from airflow.providers.microsoft.azure.utils import (
38
38
  add_managed_identity_connection_widgets,
39
39
  get_sync_default_azure_credential,
40
40
  )
41
-
42
- try:
43
- from airflow.sdk import BaseHook
44
- except ImportError:
45
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
41
+ from airflow.providers.microsoft.azure.version_compat import BaseHook
46
42
 
47
43
  if TYPE_CHECKING:
48
44
  from azure.kusto.data.response import KustoResponseDataSet
@@ -42,11 +42,7 @@ from airflow.providers.microsoft.azure.utils import (
42
42
  get_field,
43
43
  get_sync_default_azure_credential,
44
44
  )
45
-
46
- try:
47
- from airflow.sdk import BaseHook
48
- except ImportError:
49
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
45
+ from airflow.providers.microsoft.azure.version_compat import BaseHook
50
46
 
51
47
  if TYPE_CHECKING:
52
48
  import datetime
@@ -523,7 +519,7 @@ class MessageHook(BaseAzureServiceBusHook):
523
519
  message_creator: Callable[[str], ServiceBusMessage],
524
520
  ):
525
521
  list_messages = [message_creator(body) for body in messages]
526
- sender.send_messages(list_messages) # type: ignore[arg-type]
522
+ sender.send_messages(list_messages)
527
523
 
528
524
  @staticmethod
529
525
  def send_batch_message(
@@ -26,11 +26,7 @@ from airflow.providers.microsoft.azure.utils import (
26
26
  AzureIdentityCredentialAdapter,
27
27
  add_managed_identity_connection_widgets,
28
28
  )
29
-
30
- try:
31
- from airflow.sdk import BaseHook
32
- except ImportError:
33
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
29
+ from airflow.providers.microsoft.azure.version_compat import BaseHook
34
30
 
35
31
 
36
32
  class AzureBaseHook(BaseHook):
@@ -30,11 +30,7 @@ from airflow.providers.microsoft.azure.utils import (
30
30
  add_managed_identity_connection_widgets,
31
31
  get_field,
32
32
  )
33
-
34
- try:
35
- from airflow.sdk import BaseHook
36
- except ImportError:
37
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
33
+ from airflow.providers.microsoft.azure.version_compat import BaseHook
38
34
  from airflow.utils import timezone
39
35
 
40
36
  if TYPE_CHECKING:
@@ -30,11 +30,7 @@ from airflow.providers.microsoft.azure.utils import (
30
30
  get_field,
31
31
  get_sync_default_azure_credential,
32
32
  )
33
-
34
- try:
35
- from airflow.sdk import BaseHook
36
- except ImportError:
37
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
33
+ from airflow.providers.microsoft.azure.version_compat import BaseHook
38
34
 
39
35
 
40
36
  class AzureContainerRegistryHook(BaseHook):
@@ -26,11 +26,7 @@ from airflow.providers.microsoft.azure.utils import (
26
26
  get_field,
27
27
  get_sync_default_azure_credential,
28
28
  )
29
-
30
- try:
31
- from airflow.sdk import BaseHook
32
- except ImportError:
33
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
29
+ from airflow.providers.microsoft.azure.version_compat import BaseHook
34
30
 
35
31
 
36
32
  class AzureContainerVolumeHook(BaseHook):
@@ -41,11 +41,7 @@ from airflow.providers.microsoft.azure.utils import (
41
41
  get_field,
42
42
  get_sync_default_azure_credential,
43
43
  )
44
-
45
- try:
46
- from airflow.sdk import BaseHook
47
- except ImportError:
48
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
44
+ from airflow.providers.microsoft.azure.version_compat import BaseHook
49
45
 
50
46
  if TYPE_CHECKING:
51
47
  PartitionKeyType = str | list[str]
@@ -215,7 +211,7 @@ class AzureCosmosDBHook(BaseHook):
215
211
  .get_database_client(self.__get_database_name(database_name))
216
212
  .query_containers(
217
213
  "SELECT * FROM r WHERE r.id=@id",
218
- parameters=[{"name": "@id", "value": collection_name}], # type: ignore[list-item]
214
+ parameters=[{"name": "@id", "value": collection_name}],
219
215
  )
220
216
  )
221
217
  if not existing_container:
@@ -242,7 +238,7 @@ class AzureCosmosDBHook(BaseHook):
242
238
  .get_database_client(self.__get_database_name(database_name))
243
239
  .query_containers(
244
240
  "SELECT * FROM r WHERE r.id=@id",
245
- parameters=[{"name": "@id", "value": collection_name}], # type: ignore[list-item]
241
+ parameters=[{"name": "@id", "value": collection_name}],
246
242
  )
247
243
  )
248
244
 
@@ -263,7 +259,7 @@ class AzureCosmosDBHook(BaseHook):
263
259
  existing_database = list(
264
260
  self.get_conn().query_databases(
265
261
  "SELECT * FROM r WHERE r.id=@id",
266
- parameters=[{"name": "@id", "value": database_name}], # type: ignore[list-item]
262
+ parameters=[{"name": "@id", "value": database_name}],
267
263
  )
268
264
  )
269
265
  if not existing_database:
@@ -283,7 +279,7 @@ class AzureCosmosDBHook(BaseHook):
283
279
  existing_database = list(
284
280
  self.get_conn().query_databases(
285
281
  "SELECT * FROM r WHERE r.id=@id",
286
- parameters=[{"name": "@id", "value": database_name}], # type: ignore[list-item]
282
+ parameters=[{"name": "@id", "value": database_name}],
287
283
  )
288
284
  )
289
285
 
@@ -54,11 +54,7 @@ from airflow.providers.microsoft.azure.utils import (
54
54
  get_async_default_azure_credential,
55
55
  get_sync_default_azure_credential,
56
56
  )
57
-
58
- try:
59
- from airflow.sdk import BaseHook
60
- except ImportError:
61
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
57
+ from airflow.providers.microsoft.azure.version_compat import BaseHook
62
58
 
63
59
  if TYPE_CHECKING:
64
60
  from azure.core.polling import LROPoller
@@ -39,11 +39,7 @@ from airflow.providers.microsoft.azure.utils import (
39
39
  get_field,
40
40
  get_sync_default_azure_credential,
41
41
  )
42
-
43
- try:
44
- from airflow.sdk import BaseHook
45
- except ImportError:
46
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
42
+ from airflow.providers.microsoft.azure.version_compat import BaseHook
47
43
 
48
44
  Credentials = ClientSecretCredential | AzureIdentityCredentialAdapter | DefaultAzureCredential
49
45
 
@@ -342,7 +338,7 @@ class AzureDataLakeStorageV2Hook(BaseHook):
342
338
  """Return the DataLakeServiceClient object (cached)."""
343
339
  return self.get_conn()
344
340
 
345
- def get_conn(self) -> DataLakeServiceClient: # type: ignore[override]
341
+ def get_conn(self) -> DataLakeServiceClient:
346
342
  """Return the DataLakeServiceClient object."""
347
343
  conn = self.get_connection(self.conn_id)
348
344
  extra = conn.extra_dejson or {}
@@ -25,11 +25,7 @@ from airflow.providers.microsoft.azure.utils import (
25
25
  add_managed_identity_connection_widgets,
26
26
  get_sync_default_azure_credential,
27
27
  )
28
-
29
- try:
30
- from airflow.sdk import BaseHook
31
- except ImportError:
32
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
28
+ from airflow.providers.microsoft.azure.version_compat import BaseHook
33
29
 
34
30
 
35
31
  class AzureFileShareHook(BaseHook):
@@ -50,11 +50,7 @@ from airflow.exceptions import (
50
50
  AirflowException,
51
51
  AirflowNotFoundException,
52
52
  )
53
-
54
- try:
55
- from airflow.sdk import BaseHook
56
- except ImportError:
57
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
53
+ from airflow.providers.microsoft.azure.version_compat import BaseHook
58
54
 
59
55
  if TYPE_CHECKING:
60
56
  from azure.identity._internal.client_credential_base import ClientCredentialBase
@@ -297,7 +293,7 @@ class KiotaRequestAdapterHook(BaseHook):
297
293
  proxies=proxies,
298
294
  )
299
295
  http_client = GraphClientFactory.create_with_default_middleware(
300
- api_version=api_version, # type: ignore
296
+ api_version=api_version,
301
297
  client=httpx.AsyncClient(
302
298
  mounts=httpx_proxies,
303
299
  timeout=Timeout(timeout=self.timeout),
@@ -305,10 +301,10 @@ class KiotaRequestAdapterHook(BaseHook):
305
301
  trust_env=trust_env,
306
302
  base_url=base_url,
307
303
  ),
308
- host=host, # type: ignore
304
+ host=host,
309
305
  )
310
306
  auth_provider = AzureIdentityAuthenticationProvider(
311
- credentials=credentials, # type: ignore
307
+ credentials=credentials,
312
308
  scopes=scopes,
313
309
  allowed_hosts=allowed_hosts,
314
310
  )
@@ -364,7 +360,7 @@ class KiotaRequestAdapterHook(BaseHook):
364
360
  self.log.info("MSAL Proxies: %s", msal_proxies)
365
361
  if certificate_path or certificate_data:
366
362
  return CertificateCredential(
367
- tenant_id=tenant_id, # type: ignore
363
+ tenant_id=tenant_id,
368
364
  client_id=login, # type: ignore
369
365
  password=password,
370
366
  certificate_path=certificate_path,
@@ -375,7 +371,7 @@ class KiotaRequestAdapterHook(BaseHook):
375
371
  connection_verify=verify,
376
372
  )
377
373
  return ClientSecretCredential(
378
- tenant_id=tenant_id, # type: ignore
374
+ tenant_id=tenant_id,
379
375
  client_id=login, # type: ignore
380
376
  client_secret=password, # type: ignore
381
377
  authority=authority,
@@ -30,11 +30,7 @@ from airflow.providers.microsoft.azure.utils import (
30
30
  get_field,
31
31
  get_sync_default_azure_credential,
32
32
  )
33
-
34
- try:
35
- from airflow.sdk import BaseHook
36
- except ImportError:
37
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
33
+ from airflow.providers.microsoft.azure.version_compat import BaseHook
38
34
 
39
35
  if TYPE_CHECKING:
40
36
  from azure.synapse.artifacts.models import CreateRunResponse, PipelineRun
@@ -52,15 +52,12 @@ from airflow.providers.microsoft.azure.utils import (
52
52
  get_sync_default_azure_credential,
53
53
  parse_blob_account_url,
54
54
  )
55
-
56
- try:
57
- from airflow.sdk import BaseHook
58
- except ImportError:
59
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
55
+ from airflow.providers.microsoft.azure.version_compat import BaseHook
60
56
 
61
57
  if TYPE_CHECKING:
62
58
  from azure.core.credentials import TokenCredential
63
59
  from azure.storage.blob._models import BlobProperties
60
+ from azure.storage.blob.aio._list_blobs_helper import BlobPrefix
64
61
 
65
62
  AsyncCredentials = AsyncClientSecretCredential | AsyncDefaultAzureCredential
66
63
 
@@ -223,7 +220,7 @@ class WasbHook(BaseHook):
223
220
  )
224
221
 
225
222
  # TODO: rework the interface as it might also return AsyncContainerClient
226
- def _get_container_client(self, container_name: str) -> ContainerClient: # type: ignore[override]
223
+ def _get_container_client(self, container_name: str) -> ContainerClient:
227
224
  """
228
225
  Instantiate a container client.
229
226
 
@@ -625,7 +622,7 @@ class WasbAsyncHook(WasbHook):
625
622
  self.blob_service_client = AsyncBlobServiceClient(
626
623
  account_url=account_url,
627
624
  credential=token_credential,
628
- **extra, # type:ignore[arg-type]
625
+ **extra,
629
626
  )
630
627
  return self.blob_service_client
631
628
 
@@ -716,7 +713,7 @@ class WasbAsyncHook(WasbHook):
716
713
  include: list[str] | None = None,
717
714
  delimiter: str = "/",
718
715
  **kwargs: Any,
719
- ) -> list[BlobProperties]:
716
+ ) -> list[BlobProperties | BlobPrefix]:
720
717
  """
721
718
  List blobs in a given container.
722
719
 
@@ -729,7 +726,7 @@ class WasbAsyncHook(WasbHook):
729
726
  :param delimiter: filters objects based on the delimiter (for e.g '.csv')
730
727
  """
731
728
  container = self._get_container_client(container_name)
732
- blob_list: list[BlobProperties] = []
729
+ blob_list: list[BlobProperties | BlobPrefix] = []
733
730
  blobs = container.walk_blobs(name_starts_with=prefix, include=include, delimiter=delimiter, **kwargs)
734
731
  async for blob in blobs:
735
732
  blob_list.append(blob)
@@ -31,12 +31,10 @@ from airflow.providers.microsoft.azure.hooks.data_factory import (
31
31
  get_field,
32
32
  )
33
33
  from airflow.providers.microsoft.azure.triggers.data_factory import AzureDataFactoryTrigger
34
-
35
- try:
36
- from airflow.sdk import BaseHook
37
- except ImportError:
38
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
39
- from airflow.providers.microsoft.azure.version_compat import BaseOperator
34
+ from airflow.providers.microsoft.azure.version_compat import (
35
+ BaseHook,
36
+ BaseOperator,
37
+ )
40
38
  from airflow.utils.log.logging_mixin import LoggingMixin
41
39
 
42
40
  if TYPE_CHECKING:
@@ -49,7 +47,7 @@ if AIRFLOW_V_3_0_PLUS:
49
47
  from airflow.sdk import BaseOperatorLink
50
48
  from airflow.sdk.execution_time.xcom import XCom
51
49
  else:
52
- from airflow.models import XCom # type: ignore[no-redef]
50
+ from airflow.models import XCom
53
51
  from airflow.models.baseoperatorlink import BaseOperatorLink # type: ignore[no-redef]
54
52
 
55
53
 
@@ -32,8 +32,7 @@ from airflow.providers.microsoft.azure.triggers.msgraph import (
32
32
  MSGraphTrigger,
33
33
  ResponseSerializer,
34
34
  )
35
- from airflow.providers.microsoft.azure.version_compat import BaseOperator
36
- from airflow.utils.xcom import XCOM_RETURN_KEY
35
+ from airflow.providers.microsoft.azure.version_compat import XCOM_RETURN_KEY, BaseOperator
37
36
 
38
37
  if TYPE_CHECKING:
39
38
  from io import BytesIO
@@ -321,7 +320,7 @@ class MSGraphAsyncOperator(BaseOperator):
321
320
  if top and odata_count:
322
321
  if len(response.get("value", [])) == top and context:
323
322
  results = operator.pull_xcom(context)
324
- skip = sum([len(result["value"]) for result in results]) + top if results else top # type: ignore
323
+ skip = sum([len(result["value"]) for result in results]) + top if results else top
325
324
  query_parameters["$skip"] = skip
326
325
  return operator.url, query_parameters
327
326
  return response.get("@odata.nextLink"), operator.query_parameters
@@ -50,7 +50,7 @@ class PowerBILink(BaseOperatorLink):
50
50
 
51
51
  def get_link(self, operator: BaseOperator, *, ti_key: TaskInstanceKey):
52
52
  url = (
53
- "https://app.powerbi.com" # type: ignore[attr-defined]
53
+ "https://app.powerbi.com"
54
54
  f"/groups/{operator.group_id}/datasets/{operator.dataset_id}" # type: ignore[attr-defined]
55
55
  "/details?experience=power-bi"
56
56
  )
@@ -29,12 +29,10 @@ from airflow.providers.microsoft.azure.hooks.synapse import (
29
29
  AzureSynapsePipelineRunStatus,
30
30
  AzureSynapseSparkBatchRunStatus,
31
31
  )
32
- from airflow.providers.microsoft.azure.version_compat import BaseOperator
33
-
34
- try:
35
- from airflow.sdk import BaseHook
36
- except ImportError:
37
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
32
+ from airflow.providers.microsoft.azure.version_compat import (
33
+ BaseHook,
34
+ BaseOperator,
35
+ )
38
36
 
39
37
  if TYPE_CHECKING:
40
38
  from azure.synapse.spark.models import SparkBatchJobOptions
@@ -48,7 +46,7 @@ if AIRFLOW_V_3_0_PLUS:
48
46
  from airflow.sdk import BaseOperatorLink
49
47
  from airflow.sdk.execution_time.xcom import XCom
50
48
  else:
51
- from airflow.models import XCom # type: ignore[no-redef]
49
+ from airflow.models import XCom
52
50
  from airflow.models.baseoperatorlink import BaseOperatorLink # type: ignore[no-redef]
53
51
 
54
52
 
@@ -21,7 +21,7 @@ import csv
21
21
  import os
22
22
  from collections.abc import Sequence
23
23
  from tempfile import TemporaryDirectory
24
- from typing import TYPE_CHECKING, Any
24
+ from typing import TYPE_CHECKING, Any, Literal
25
25
 
26
26
  from airflow.providers.microsoft.azure.hooks.data_lake import AzureDataLakeHook
27
27
  from airflow.providers.microsoft.azure.version_compat import BaseOperator
@@ -45,6 +45,7 @@ class OracleToAzureDataLakeOperator(BaseOperator):
45
45
  :param encoding: encoding type for the file.
46
46
  :param quotechar: Character to use in quoting.
47
47
  :param quoting: Quoting strategy. See csv library for more information.
48
+ It can take on any of the csv.QUOTE_* constants.
48
49
  """
49
50
 
50
51
  template_fields: Sequence[str] = ("filename", "sql", "sql_params")
@@ -63,7 +64,7 @@ class OracleToAzureDataLakeOperator(BaseOperator):
63
64
  delimiter: str = ",",
64
65
  encoding: str = "utf-8",
65
66
  quotechar: str = '"',
66
- quoting: int = csv.QUOTE_MINIMAL,
67
+ quoting: Literal[0, 1, 2, 3] = csv.QUOTE_MINIMAL,
67
68
  **kwargs,
68
69
  ) -> None:
69
70
  super().__init__(**kwargs)
@@ -98,7 +99,7 @@ class OracleToAzureDataLakeOperator(BaseOperator):
98
99
 
99
100
  self.log.info("Dumping Oracle query results to local file")
100
101
  conn = oracle_hook.get_conn()
101
- cursor = conn.cursor() # type: ignore[attr-defined]
102
+ cursor = conn.cursor()
102
103
  cursor.execute(self.sql, self.sql_params)
103
104
 
104
105
  with TemporaryDirectory(prefix="airflow_oracle_to_azure_op_") as temp:
@@ -108,4 +109,4 @@ class OracleToAzureDataLakeOperator(BaseOperator):
108
109
  os.path.join(temp, self.filename), os.path.join(self.azure_data_lake_path, self.filename)
109
110
  )
110
111
  cursor.close()
111
- conn.close() # type: ignore[attr-defined]
112
+ conn.close()
@@ -60,17 +60,14 @@ def _get_default_azure_credential(
60
60
  *,
61
61
  managed_identity_client_id: str | None = None,
62
62
  workload_identity_tenant_id: str | None = None,
63
- use_async: bool = False,
64
- ) -> DefaultAzureCredential | AsyncDefaultAzureCredential:
63
+ ) -> DefaultAzureCredential:
65
64
  """
66
65
  Get DefaultAzureCredential based on provided arguments.
67
66
 
68
67
  If managed_identity_client_id and workload_identity_tenant_id are provided, this function returns
69
68
  DefaultAzureCredential with managed identity.
70
69
  """
71
- credential_cls: type[AsyncDefaultAzureCredential] | type[DefaultAzureCredential] = (
72
- AsyncDefaultAzureCredential if use_async else DefaultAzureCredential
73
- )
70
+ credential_cls: type[DefaultAzureCredential] = DefaultAzureCredential
74
71
  if managed_identity_client_id and workload_identity_tenant_id:
75
72
  return credential_cls(
76
73
  managed_identity_client_id=managed_identity_client_id,
@@ -80,14 +77,31 @@ def _get_default_azure_credential(
80
77
  return credential_cls()
81
78
 
82
79
 
83
- get_sync_default_azure_credential: partial[DefaultAzureCredential] = partial(
84
- _get_default_azure_credential, # type: ignore[arg-type]
85
- use_async=False,
86
- )
80
+ def _get_async_default_azure_credential(
81
+ *,
82
+ managed_identity_client_id: str | None = None,
83
+ workload_identity_tenant_id: str | None = None,
84
+ ) -> AsyncDefaultAzureCredential:
85
+ """
86
+ Get AsyncDefaultAzureCredential based on provided arguments.
87
+
88
+ If managed_identity_client_id and workload_identity_tenant_id are provided, this function returns
89
+ AsyncDefaultAzureCredential with managed identity.
90
+ """
91
+ credential_cls: type[AsyncDefaultAzureCredential] = AsyncDefaultAzureCredential
92
+ if managed_identity_client_id and workload_identity_tenant_id:
93
+ return credential_cls(
94
+ managed_identity_client_id=managed_identity_client_id,
95
+ workload_identity_tenant_id=workload_identity_tenant_id,
96
+ additionally_allowed_tenants=[workload_identity_tenant_id],
97
+ )
98
+ return credential_cls()
99
+
100
+
101
+ get_sync_default_azure_credential: partial[DefaultAzureCredential] = partial(_get_default_azure_credential)
87
102
 
88
103
  get_async_default_azure_credential: partial[AsyncDefaultAzureCredential] = partial(
89
- _get_default_azure_credential, # type: ignore[arg-type]
90
- use_async=True,
104
+ _get_async_default_azure_credential
91
105
  )
92
106
 
93
107
 
@@ -33,6 +33,7 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
33
33
 
34
34
 
35
35
  AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
36
+ AIRFLOW_V_3_1_PLUS = get_base_airflow_version_tuple() >= (3, 1, 0)
36
37
 
37
38
  if AIRFLOW_V_3_0_PLUS:
38
39
  from airflow.sdk import (
@@ -41,12 +42,24 @@ if AIRFLOW_V_3_0_PLUS:
41
42
  BaseSensorOperator,
42
43
  )
43
44
  else:
44
- from airflow.models import BaseOperator, BaseOperatorLink # type: ignore[no-redef]
45
+ from airflow.models import BaseOperator, BaseOperatorLink
45
46
  from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
46
47
 
48
+ if AIRFLOW_V_3_1_PLUS:
49
+ from airflow.sdk.bases.xcom import BaseXCom
50
+
51
+ XCOM_RETURN_KEY = BaseXCom.XCOM_RETURN_KEY
52
+ from airflow.sdk import BaseHook
53
+ else:
54
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
55
+ from airflow.utils.xcom import XCOM_RETURN_KEY # type: ignore[no-redef]
56
+
47
57
  __all__ = [
48
58
  "AIRFLOW_V_3_0_PLUS",
59
+ "AIRFLOW_V_3_1_PLUS",
60
+ "BaseHook",
49
61
  "BaseOperator",
50
62
  "BaseOperatorLink",
51
63
  "BaseSensorOperator",
64
+ "XCOM_RETURN_KEY",
52
65
  ]
@@ -1,11 +1,11 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-microsoft-azure
3
- Version: 12.5.0rc1
3
+ Version: 12.6.0rc1
4
4
  Summary: Provider package apache-airflow-providers-microsoft-azure for Apache Airflow
5
5
  Keywords: airflow-provider,microsoft.azure,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
7
7
  Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
8
- Requires-Python: ~=3.10
8
+ Requires-Python: >=3.10
9
9
  Description-Content-Type: text/x-rst
10
10
  Classifier: Development Status :: 5 - Production/Stable
11
11
  Classifier: Environment :: Console
@@ -18,6 +18,7 @@ Classifier: License :: OSI Approved :: Apache Software License
18
18
  Classifier: Programming Language :: Python :: 3.10
19
19
  Classifier: Programming Language :: Python :: 3.11
20
20
  Classifier: Programming Language :: Python :: 3.12
21
+ Classifier: Programming Language :: Python :: 3.13
21
22
  Classifier: Topic :: System :: Monitoring
22
23
  Requires-Dist: apache-airflow>=2.10.0rc1
23
24
  Requires-Dist: adlfs>=2023.10.0
@@ -29,7 +30,7 @@ Requires-Dist: azure-identity>=1.3.1
29
30
  Requires-Dist: azure-keyvault-secrets>=4.1.0
30
31
  Requires-Dist: azure-mgmt-datalake-store>=0.5.0
31
32
  Requires-Dist: azure-mgmt-resource>=2.2.0
32
- Requires-Dist: azure-storage-blob>=12.14.0
33
+ Requires-Dist: azure-storage-blob>=12.26.0
33
34
  Requires-Dist: azure-mgmt-storage>=16.0.0
34
35
  Requires-Dist: azure-storage-file-share>=12.7.0
35
36
  Requires-Dist: azure-servicebus>=7.12.1
@@ -55,8 +56,8 @@ Requires-Dist: apache-airflow-providers-common-compat ; extra == "common-compat"
55
56
  Requires-Dist: apache-airflow-providers-oracle ; extra == "oracle"
56
57
  Requires-Dist: apache-airflow-providers-sftp ; extra == "sftp"
57
58
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
58
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-microsoft-azure/12.5.0/changelog.html
59
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-microsoft-azure/12.5.0
59
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-microsoft-azure/12.6.0/changelog.html
60
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-microsoft-azure/12.6.0
60
61
  Project-URL: Mastodon, https://fosstodon.org/@airflow
61
62
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
62
63
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -91,8 +92,9 @@ Provides-Extra: sftp
91
92
 
92
93
  Package ``apache-airflow-providers-microsoft-azure``
93
94
 
94
- Release: ``12.5.0``
95
+ Release: ``12.6.0``
95
96
 
97
+ Release Date: ``|PypiReleaseDate|``
96
98
 
97
99
  `Microsoft Azure <https://azure.microsoft.com/>`__
98
100
 
@@ -104,7 +106,7 @@ This is a provider package for ``microsoft.azure`` provider. All classes for thi
104
106
  are in ``airflow.providers.microsoft.azure`` python package.
105
107
 
106
108
  You can find package information and changelog for the provider
107
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.5.0/>`_.
109
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.6.0/>`_.
108
110
 
109
111
  Installation
110
112
  ------------
@@ -113,7 +115,7 @@ You can install this package on top of an existing Airflow 2 installation (see `
113
115
  for the minimum Airflow version supported) via
114
116
  ``pip install apache-airflow-providers-microsoft-azure``
115
117
 
116
- The package supports the following python versions: 3.10,3.11,3.12
118
+ The package supports the following python versions: 3.10,3.11,3.12,3.13
117
119
 
118
120
  Requirements
119
121
  ------------
@@ -131,7 +133,7 @@ PIP package Version required
131
133
  ``azure-keyvault-secrets`` ``>=4.1.0``
132
134
  ``azure-mgmt-datalake-store`` ``>=0.5.0``
133
135
  ``azure-mgmt-resource`` ``>=2.2.0``
134
- ``azure-storage-blob`` ``>=12.14.0``
136
+ ``azure-storage-blob`` ``>=12.26.0``
135
137
  ``azure-mgmt-storage`` ``>=16.0.0``
136
138
  ``azure-storage-file-share`` ``>=12.7.0``
137
139
  ``azure-servicebus`` ``>=7.12.1``
@@ -177,5 +179,5 @@ Dependent package
177
179
  ================================================================================================================== =================
178
180
 
179
181
  The changelog for the provider package can be found in the
180
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.5.0/changelog.html>`_.
182
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.6.0/changelog.html>`_.
181
183
 
@@ -1,26 +1,26 @@
1
1
  airflow/providers/microsoft/azure/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/microsoft/azure/__init__.py,sha256=PtlPRXB-MkStXtoAfrr0Pg3uWVGBlah-235qeDz6Tfg,1505
2
+ airflow/providers/microsoft/azure/__init__.py,sha256=geDknD-DKADPmpVqCNJttFFp5iNupi2beaKacdWyfns,1505
3
3
  airflow/providers/microsoft/azure/get_provider_info.py,sha256=AeIwe8L-5p1z7IVF9Yc_mmQyAe149r6YVCyRpjfc3u0,18946
4
- airflow/providers/microsoft/azure/utils.py,sha256=0iDdxz-TujH181MoI2gRf6ppOVQfj_fKGzrHAcnx7uU,8415
5
- airflow/providers/microsoft/azure/version_compat.py,sha256=K8FMTtIkYUvO9DoDLSw6_5t3KxB5qy3WD8eTEB5L0hE,1959
4
+ airflow/providers/microsoft/azure/utils.py,sha256=KU9vHQRUhqTbC30GvmuZbL8rPBAziemM1oaT4rZp6K8,9015
5
+ airflow/providers/microsoft/azure/version_compat.py,sha256=pBBF2QxTvpmUMxTE4MfbLfZmydT8FgSxiMMYLAsTTIo,2388
6
6
  airflow/providers/microsoft/azure/fs/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
7
- airflow/providers/microsoft/azure/fs/adls.py,sha256=DDfaWiJPwXyz301Fky-Rqf6GfGt_zj3GXQIvdZjY2Lo,3785
7
+ airflow/providers/microsoft/azure/fs/adls.py,sha256=1La15shfckTX6tem3O34Gdneyd0TXFlFwABqIRHUd1A,3937
8
8
  airflow/providers/microsoft/azure/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
9
- airflow/providers/microsoft/azure/hooks/adx.py,sha256=Gnisga6k9x6p6nBacoktGbx_Oeq8gQq1WRJmaQD3OkE,9753
10
- airflow/providers/microsoft/azure/hooks/asb.py,sha256=O0jqrVrM0O8Z2unsXHmZwd6Hc1u0eJgyK3sDmmngPDU,30835
11
- airflow/providers/microsoft/azure/hooks/base_azure.py,sha256=POqjsJD4yyzFxXsSy5I-pImamnxuxtdjZ5L9ENBT4FM,5489
12
- airflow/providers/microsoft/azure/hooks/batch.py,sha256=r3YeSpSVJlaUbahQQ_qFvhTIss5Aeef4ORVaukAncoY,16154
9
+ airflow/providers/microsoft/azure/hooks/adx.py,sha256=NoPbs-CWbNH-AxvqtF3cKdV0-YFr9AlqCh2qg-DuSKw,9677
10
+ airflow/providers/microsoft/azure/hooks/asb.py,sha256=OsCvRP7uMEi7xl_meS09ts7q9E7yjF_Unq5ylki96as,30733
11
+ airflow/providers/microsoft/azure/hooks/base_azure.py,sha256=urqIC8fWdmo8fPoD1MuVZLRyb9ynHGi7sOY1iBc-25g,5413
12
+ airflow/providers/microsoft/azure/hooks/batch.py,sha256=VZT5cXQOCEN64O5vWqlLAFc3hx4lK2OZ9nPpfER4nPc,16078
13
13
  airflow/providers/microsoft/azure/hooks/container_instance.py,sha256=0MrPLk06mg3Jm18VlteCAse_agK8ysK9dUDjbdRL66Q,7201
14
- airflow/providers/microsoft/azure/hooks/container_registry.py,sha256=hzgjUSSNKp9PGY-VGhEnwQI8mqiDNvLa8lB9o4BriT8,5015
15
- airflow/providers/microsoft/azure/hooks/container_volume.py,sha256=RPRxp1XxASU-g_QiH10uKkaI3XTzubkZu7GY-hYD_1Q,5883
16
- airflow/providers/microsoft/azure/hooks/cosmos.py,sha256=xskA1CIwOjZk3rPefSZVZZMu4fEfZWvFHwhF7Uq3ueM,17683
17
- airflow/providers/microsoft/azure/hooks/data_factory.py,sha256=VllLo3O84MLdG_fc_oWpZcmGX4skvwvnXCtsFAEyCLo,45104
18
- airflow/providers/microsoft/azure/hooks/data_lake.py,sha256=t9LpVJ4uVF_Sb_-lTuEO9dsTAbGpvuITLhD1dMvqhwA,23990
19
- airflow/providers/microsoft/azure/hooks/fileshare.py,sha256=7vfZzX0Ln2bdjiNBbDwOa4jIhXpMvQ1Qzoxwd976hK0,10866
20
- airflow/providers/microsoft/azure/hooks/msgraph.py,sha256=CKO0qoU3l57gbXVoxuzQGHlsqrVmKPp-i4NLUx1qkHw,21315
14
+ airflow/providers/microsoft/azure/hooks/container_registry.py,sha256=YAo-wSTVnEAzfoNz-w5bIcmcHe5KSvrk4nRzKdpcbE8,4939
15
+ airflow/providers/microsoft/azure/hooks/container_volume.py,sha256=m6Jg8RnKwzGQQt3UDxLkU3sL7_bGZv-136InbQpGKw0,5807
16
+ airflow/providers/microsoft/azure/hooks/cosmos.py,sha256=gw90ZaoaPzTpE27eVM5zbJDeWW38LdzrQO3YwWcaNqI,17499
17
+ airflow/providers/microsoft/azure/hooks/data_factory.py,sha256=oo1qcyTDDhJcclq0gE3eVJsFZ_DbuT1PXhTz0NXawds,45028
18
+ airflow/providers/microsoft/azure/hooks/data_lake.py,sha256=1O6eRxe-4Hm5wkWH2zeS4nxc3CYUSFXOu8ySz-UtZJ8,23888
19
+ airflow/providers/microsoft/azure/hooks/fileshare.py,sha256=x8hXYHE6qUjJDTa1qp47DS34eZBeBrwE4cZkk1i0u48,10790
20
+ airflow/providers/microsoft/azure/hooks/msgraph.py,sha256=W7tASxPQnBQciIOYZi2hQo0-QtemMO_YTUCkkfH2pDk,21159
21
21
  airflow/providers/microsoft/azure/hooks/powerbi.py,sha256=_Z-PWDcZxwSmyP9uXBEEYNRrSIlGPFVUtDrtVk3KKxE,9778
22
- airflow/providers/microsoft/azure/hooks/synapse.py,sha256=95sd08YzcCTwxqxBeVp8BPTrfeDq_xJSQLSMXRKbFJs,16153
23
- airflow/providers/microsoft/azure/hooks/wasb.py,sha256=A4QEh-qC9T6eqs0exFg5QpGGz67yJQ5395FfZ-NO0nc,31652
22
+ airflow/providers/microsoft/azure/hooks/synapse.py,sha256=PC7whM9SOuc29hu9mU1DOvi5xmevGLS9Gadql2u0KYM,16077
23
+ airflow/providers/microsoft/azure/hooks/wasb.py,sha256=4rVsIbu6t782G-_lzkM3Ddig5iE5pP_72eZEqNCZC-A,31620
24
24
  airflow/providers/microsoft/azure/log/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
25
25
  airflow/providers/microsoft/azure/log/wasb_task_handler.py,sha256=s0BCDf7fritIvuwznFJTUEE03GqfecKy-stf1niWr3o,9926
26
26
  airflow/providers/microsoft/azure/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
@@ -30,10 +30,10 @@ airflow/providers/microsoft/azure/operators/asb.py,sha256=RzipreLn5DOA8oaE_-ifx4
30
30
  airflow/providers/microsoft/azure/operators/batch.py,sha256=NYjaxYcFwficmJb7l3Vjdm2JeW9MFxQ3I0trIIwSb1E,16304
31
31
  airflow/providers/microsoft/azure/operators/container_instances.py,sha256=8ti1L50pQQcRVL_7noKo6IXoobi8hO0-A4SlgcbIhX0,18672
32
32
  airflow/providers/microsoft/azure/operators/cosmos.py,sha256=t7XWU4L5W7tr33J6lC3_mIrFANW_JE1QOYpSFydkBxs,2848
33
- airflow/providers/microsoft/azure/operators/data_factory.py,sha256=mwHcuWObAQmtFG3pjVW3l7w_xuQddz_arQHXPdrHOq4,12946
34
- airflow/providers/microsoft/azure/operators/msgraph.py,sha256=obqWPwN0DA6HLemLWSolrEhATGwa0zHAO3l83VxoxvU,14209
35
- airflow/providers/microsoft/azure/operators/powerbi.py,sha256=d0GhzWxpBAM_Yq5mNDtdVpsi5kt6yQfDDJme21hCG-U,11893
36
- airflow/providers/microsoft/azure/operators/synapse.py,sha256=8IO-uQLNztVfuevkXuRYhFAXUqnW9QnE66MK_XGliZc,12891
33
+ airflow/providers/microsoft/azure/operators/data_factory.py,sha256=TQz7zQ3zBKOHhackZxMTdBFo66RenoPwcO8gXaLVXIs,12797
34
+ airflow/providers/microsoft/azure/operators/msgraph.py,sha256=Mz0r3Gz3Pv_mg_myXEVcuZEnJ-GgjIwJmLqk2_yt8lI,14163
35
+ airflow/providers/microsoft/azure/operators/powerbi.py,sha256=qcIMFKemQLgiJKQBxciBl1cT8nOwYP-7IPzPosUAAo8,11863
36
+ airflow/providers/microsoft/azure/operators/synapse.py,sha256=l-I72OIkiwpTJefP478Sxak_FnwcRSj_wIi4eDplGxQ,12742
37
37
  airflow/providers/microsoft/azure/operators/wasb_delete_blob.py,sha256=Rigi5xFXkHFNcX4-VnA4fFxJlKHlevdsCExX6VJWCts,2748
38
38
  airflow/providers/microsoft/azure/secrets/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
39
39
  airflow/providers/microsoft/azure/secrets/key_vault.py,sha256=EmtGfyBtfefGu1ZTtZ5WswIleOx-nx8wst7xfcua2rI,8962
@@ -45,7 +45,7 @@ airflow/providers/microsoft/azure/sensors/wasb.py,sha256=SXhgU2L1iTULybQzIw0059B
45
45
  airflow/providers/microsoft/azure/transfers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
46
46
  airflow/providers/microsoft/azure/transfers/local_to_adls.py,sha256=GZl16M2V9xIDYmUyERq6-RxvbP_0VVcMmeG9Z2jLzkU,4237
47
47
  airflow/providers/microsoft/azure/transfers/local_to_wasb.py,sha256=IqV18vzBwVbivfgOtsqR7q1pOs3uZBNDgoF_wrK1yAE,2997
48
- airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py,sha256=oS2WOKdiCVA-qz4PKTq-r2AoewZ4yaw2fEJ-QpmcFdg,4529
48
+ airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py,sha256=hjmjs00O4cbn2tgB9_UbnZ8Dn2DAUM6C2aamzMfyYb4,4550
49
49
  airflow/providers/microsoft/azure/transfers/s3_to_wasb.py,sha256=jQgicPw4uGyRSss9PA05TKqHdch2CNO8bMdNVEvVir0,12523
50
50
  airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py,sha256=l6kdtxq3SvCUo_Hx60t1YiX4bP8lF3WmbglICHk24vo,8409
51
51
  airflow/providers/microsoft/azure/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -53,7 +53,7 @@ airflow/providers/microsoft/azure/triggers/data_factory.py,sha256=U3vY_pj4yORxE7
53
53
  airflow/providers/microsoft/azure/triggers/msgraph.py,sha256=l7A50JoBebiKhhsxILtLvuoulyIn59BVdjTvdAezdpk,8704
54
54
  airflow/providers/microsoft/azure/triggers/powerbi.py,sha256=TD2VYR3yj8JwRMR6QpqWM_KuBsS9qbghqV_2aBKjCus,15798
55
55
  airflow/providers/microsoft/azure/triggers/wasb.py,sha256=RF-C6iqDEs6_pWireCWZXqxcqWK-sFJ695Okdd_EJOA,7456
56
- apache_airflow_providers_microsoft_azure-12.5.0rc1.dist-info/entry_points.txt,sha256=6iWHenOoUC3YZBb3OKn6g0HlJsV58Ba56i8USmQrcJI,111
57
- apache_airflow_providers_microsoft_azure-12.5.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
58
- apache_airflow_providers_microsoft_azure-12.5.0rc1.dist-info/METADATA,sha256=fiATPGy6wjN_pSwzpMY7dxdGQq12v_NlHMkczeMKyaI,8791
59
- apache_airflow_providers_microsoft_azure-12.5.0rc1.dist-info/RECORD,,
56
+ apache_airflow_providers_microsoft_azure-12.6.0rc1.dist-info/entry_points.txt,sha256=6iWHenOoUC3YZBb3OKn6g0HlJsV58Ba56i8USmQrcJI,111
57
+ apache_airflow_providers_microsoft_azure-12.6.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
58
+ apache_airflow_providers_microsoft_azure-12.6.0rc1.dist-info/METADATA,sha256=kAr9nQo0IytHJj7FXnxECPdp4fAyK5q0OKfmfngPamM,8883
59
+ apache_airflow_providers_microsoft_azure-12.6.0rc1.dist-info/RECORD,,