apache-airflow-providers-microsoft-azure 12.4.1rc1__py3-none-any.whl → 12.5.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. airflow/providers/microsoft/azure/__init__.py +1 -1
  2. airflow/providers/microsoft/azure/fs/adls.py +5 -1
  3. airflow/providers/microsoft/azure/hooks/adx.py +9 -5
  4. airflow/providers/microsoft/azure/hooks/asb.py +7 -2
  5. airflow/providers/microsoft/azure/hooks/base_azure.py +6 -2
  6. airflow/providers/microsoft/azure/hooks/batch.py +5 -1
  7. airflow/providers/microsoft/azure/hooks/container_instance.py +3 -1
  8. airflow/providers/microsoft/azure/hooks/container_registry.py +9 -3
  9. airflow/providers/microsoft/azure/hooks/container_volume.py +7 -3
  10. airflow/providers/microsoft/azure/hooks/cosmos.py +10 -5
  11. airflow/providers/microsoft/azure/hooks/data_factory.py +9 -4
  12. airflow/providers/microsoft/azure/hooks/data_lake.py +14 -9
  13. airflow/providers/microsoft/azure/hooks/fileshare.py +5 -1
  14. airflow/providers/microsoft/azure/hooks/msgraph.py +10 -4
  15. airflow/providers/microsoft/azure/hooks/powerbi.py +5 -1
  16. airflow/providers/microsoft/azure/hooks/synapse.py +7 -3
  17. airflow/providers/microsoft/azure/hooks/wasb.py +39 -23
  18. airflow/providers/microsoft/azure/operators/adls.py +1 -1
  19. airflow/providers/microsoft/azure/operators/adx.py +1 -1
  20. airflow/providers/microsoft/azure/operators/asb.py +3 -3
  21. airflow/providers/microsoft/azure/operators/batch.py +1 -1
  22. airflow/providers/microsoft/azure/operators/container_instances.py +1 -1
  23. airflow/providers/microsoft/azure/operators/cosmos.py +1 -1
  24. airflow/providers/microsoft/azure/operators/data_factory.py +6 -2
  25. airflow/providers/microsoft/azure/operators/msgraph.py +3 -4
  26. airflow/providers/microsoft/azure/operators/powerbi.py +9 -9
  27. airflow/providers/microsoft/azure/operators/synapse.py +6 -2
  28. airflow/providers/microsoft/azure/operators/wasb_delete_blob.py +1 -1
  29. airflow/providers/microsoft/azure/sensors/cosmos.py +6 -1
  30. airflow/providers/microsoft/azure/sensors/data_factory.py +6 -1
  31. airflow/providers/microsoft/azure/sensors/msgraph.py +8 -3
  32. airflow/providers/microsoft/azure/sensors/wasb.py +6 -1
  33. airflow/providers/microsoft/azure/transfers/local_to_adls.py +1 -1
  34. airflow/providers/microsoft/azure/transfers/local_to_wasb.py +1 -1
  35. airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py +1 -1
  36. airflow/providers/microsoft/azure/transfers/s3_to_wasb.py +1 -1
  37. airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py +3 -0
  38. airflow/providers/microsoft/azure/triggers/powerbi.py +6 -2
  39. airflow/providers/microsoft/azure/version_compat.py +17 -0
  40. {apache_airflow_providers_microsoft_azure-12.4.1rc1.dist-info → apache_airflow_providers_microsoft_azure-12.5.0rc1.dist-info}/METADATA +18 -19
  41. apache_airflow_providers_microsoft_azure-12.5.0rc1.dist-info/RECORD +59 -0
  42. apache_airflow_providers_microsoft_azure-12.4.1rc1.dist-info/RECORD +0 -59
  43. {apache_airflow_providers_microsoft_azure-12.4.1rc1.dist-info → apache_airflow_providers_microsoft_azure-12.5.0rc1.dist-info}/WHEEL +0 -0
  44. {apache_airflow_providers_microsoft_azure-12.4.1rc1.dist-info → apache_airflow_providers_microsoft_azure-12.5.0rc1.dist-info}/entry_points.txt +0 -0
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "12.4.1"
32
+ __version__ = "12.5.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -20,9 +20,13 @@ from typing import TYPE_CHECKING, Any
20
20
 
21
21
  from azure.identity import ClientSecretCredential
22
22
 
23
- from airflow.hooks.base import BaseHook
24
23
  from airflow.providers.microsoft.azure.utils import get_field, parse_blob_account_url
25
24
 
25
+ try:
26
+ from airflow.sdk import BaseHook
27
+ except ImportError:
28
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
29
+
26
30
  if TYPE_CHECKING:
27
31
  from fsspec import AbstractFileSystem
28
32
 
@@ -28,18 +28,22 @@ from __future__ import annotations
28
28
 
29
29
  import warnings
30
30
  from functools import cached_property
31
- from typing import TYPE_CHECKING, Any
31
+ from typing import TYPE_CHECKING, Any, cast
32
32
 
33
33
  from azure.kusto.data import ClientRequestProperties, KustoClient, KustoConnectionStringBuilder
34
34
  from azure.kusto.data.exceptions import KustoServiceError
35
35
 
36
36
  from airflow.exceptions import AirflowException
37
- from airflow.hooks.base import BaseHook
38
37
  from airflow.providers.microsoft.azure.utils import (
39
38
  add_managed_identity_connection_widgets,
40
39
  get_sync_default_azure_credential,
41
40
  )
42
41
 
42
+ try:
43
+ from airflow.sdk import BaseHook
44
+ except ImportError:
45
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
46
+
43
47
  if TYPE_CHECKING:
44
48
  from azure.kusto.data.response import KustoResponseDataSet
45
49
 
@@ -170,7 +174,7 @@ class AzureDataExplorerHook(BaseHook):
170
174
  if auth_method == "AAD_APP":
171
175
  tenant = get_required_param("tenant")
172
176
  kcsb = KustoConnectionStringBuilder.with_aad_application_key_authentication(
173
- cluster, conn.login, conn.password, tenant
177
+ cluster, cast("str", conn.login), cast("str", conn.password), tenant
174
178
  )
175
179
  elif auth_method == "AAD_APP_CERT":
176
180
  certificate = get_required_param("certificate")
@@ -178,7 +182,7 @@ class AzureDataExplorerHook(BaseHook):
178
182
  tenant = get_required_param("tenant")
179
183
  kcsb = KustoConnectionStringBuilder.with_aad_application_certificate_authentication(
180
184
  cluster,
181
- conn.login,
185
+ cast("str", conn.login),
182
186
  certificate,
183
187
  thumbprint,
184
188
  tenant,
@@ -186,7 +190,7 @@ class AzureDataExplorerHook(BaseHook):
186
190
  elif auth_method == "AAD_CREDS":
187
191
  tenant = get_required_param("tenant")
188
192
  kcsb = KustoConnectionStringBuilder.with_aad_user_password_authentication(
189
- cluster, conn.login, conn.password, tenant
193
+ cluster, cast("str", conn.login), cast("str", conn.password), tenant
190
194
  )
191
195
  elif auth_method == "AAD_DEVICE":
192
196
  kcsb = KustoConnectionStringBuilder.with_aad_device_authentication(cluster)
@@ -16,7 +16,8 @@
16
16
  # under the License.
17
17
  from __future__ import annotations
18
18
 
19
- from typing import TYPE_CHECKING, Any, Callable
19
+ from collections.abc import Callable
20
+ from typing import TYPE_CHECKING, Any
20
21
  from uuid import UUID, uuid4
21
22
 
22
23
  from azure.core.exceptions import ResourceNotFoundError
@@ -36,13 +37,17 @@ from azure.servicebus.management import (
36
37
  SubscriptionProperties,
37
38
  )
38
39
 
39
- from airflow.hooks.base import BaseHook
40
40
  from airflow.providers.microsoft.azure.utils import (
41
41
  add_managed_identity_connection_widgets,
42
42
  get_field,
43
43
  get_sync_default_azure_credential,
44
44
  )
45
45
 
46
+ try:
47
+ from airflow.sdk import BaseHook
48
+ except ImportError:
49
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
50
+
46
51
  if TYPE_CHECKING:
47
52
  import datetime
48
53
 
@@ -22,12 +22,16 @@ from azure.common.client_factory import get_client_from_auth_file, get_client_fr
22
22
  from azure.common.credentials import ServicePrincipalCredentials
23
23
 
24
24
  from airflow.exceptions import AirflowException
25
- from airflow.hooks.base import BaseHook
26
25
  from airflow.providers.microsoft.azure.utils import (
27
26
  AzureIdentityCredentialAdapter,
28
27
  add_managed_identity_connection_widgets,
29
28
  )
30
29
 
30
+ try:
31
+ from airflow.sdk import BaseHook
32
+ except ImportError:
33
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
34
+
31
35
 
32
36
  class AzureBaseHook(BaseHook):
33
37
  """
@@ -41,7 +45,7 @@ class AzureBaseHook(BaseHook):
41
45
  which refers to the information to connect to the service.
42
46
  """
43
47
 
44
- conn_name_attr = "azure_conn_id"
48
+ conn_name_attr = "conn_id"
45
49
  default_conn_name = "azure_default"
46
50
  conn_type = "azure"
47
51
  hook_name = "Azure"
@@ -25,12 +25,16 @@ from typing import TYPE_CHECKING, Any
25
25
  from azure.batch import BatchServiceClient, batch_auth, models as batch_models
26
26
 
27
27
  from airflow.exceptions import AirflowException
28
- from airflow.hooks.base import BaseHook
29
28
  from airflow.providers.microsoft.azure.utils import (
30
29
  AzureIdentityCredentialAdapter,
31
30
  add_managed_identity_connection_widgets,
32
31
  get_field,
33
32
  )
33
+
34
+ try:
35
+ from airflow.sdk import BaseHook
36
+ except ImportError:
37
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
34
38
  from airflow.utils import timezone
35
39
 
36
40
  if TYPE_CHECKING:
@@ -85,7 +85,9 @@ class AzureContainerInstanceHook(AzureBaseHook):
85
85
  if all([conn.login, conn.password, tenant]):
86
86
  self.log.info("Getting connection using specific credentials and subscription_id.")
87
87
  credential = ClientSecretCredential(
88
- client_id=conn.login, client_secret=conn.password, tenant_id=cast("str", tenant)
88
+ client_id=cast("str", conn.login),
89
+ client_secret=cast("str", conn.password),
90
+ tenant_id=cast("str", tenant),
89
91
  )
90
92
  else:
91
93
  self.log.info("Using DefaultAzureCredential as credential")
@@ -20,18 +20,22 @@
20
20
  from __future__ import annotations
21
21
 
22
22
  from functools import cached_property
23
- from typing import Any
23
+ from typing import Any, cast
24
24
 
25
25
  from azure.mgmt.containerinstance.models import ImageRegistryCredential
26
26
  from azure.mgmt.containerregistry import ContainerRegistryManagementClient
27
27
 
28
- from airflow.hooks.base import BaseHook
29
28
  from airflow.providers.microsoft.azure.utils import (
30
29
  add_managed_identity_connection_widgets,
31
30
  get_field,
32
31
  get_sync_default_azure_credential,
33
32
  )
34
33
 
34
+ try:
35
+ from airflow.sdk import BaseHook
36
+ except ImportError:
37
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
38
+
35
39
 
36
40
  class AzureContainerRegistryHook(BaseHook):
37
41
  """
@@ -121,4 +125,6 @@ class AzureContainerRegistryHook(BaseHook):
121
125
  credentials = client.registries.list_credentials(resource_group, conn.login).as_dict()
122
126
  password = credentials["passwords"][0]["value"]
123
127
 
124
- return ImageRegistryCredential(server=conn.host, username=conn.login, password=password)
128
+ return ImageRegistryCredential(
129
+ server=cast("str", conn.host), username=cast("str", conn.login), password=password
130
+ )
@@ -16,18 +16,22 @@
16
16
  # under the License.
17
17
  from __future__ import annotations
18
18
 
19
- from typing import Any
19
+ from typing import Any, cast
20
20
 
21
21
  from azure.mgmt.containerinstance.models import AzureFileVolume, Volume
22
22
  from azure.mgmt.storage import StorageManagementClient
23
23
 
24
- from airflow.hooks.base import BaseHook
25
24
  from airflow.providers.microsoft.azure.utils import (
26
25
  add_managed_identity_connection_widgets,
27
26
  get_field,
28
27
  get_sync_default_azure_credential,
29
28
  )
30
29
 
30
+ try:
31
+ from airflow.sdk import BaseHook
32
+ except ImportError:
33
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
34
+
31
35
 
32
36
  class AzureContainerVolumeHook(BaseHook):
33
37
  """
@@ -121,7 +125,7 @@ class AzureContainerVolumeHook(BaseHook):
121
125
  )
122
126
  return storage_account_list_keys_result.as_dict()["keys"][0]["value"]
123
127
 
124
- return conn.password
128
+ return cast("str", conn.password)
125
129
 
126
130
  def get_file_volume(
127
131
  self, mount_name: str, share_name: str, storage_account_name: str, read_only: bool = False
@@ -27,7 +27,7 @@ the default database and collection to use (see connection `azure_cosmos_default
27
27
  from __future__ import annotations
28
28
 
29
29
  import uuid
30
- from typing import TYPE_CHECKING, Any, Union
30
+ from typing import TYPE_CHECKING, Any, cast
31
31
  from urllib.parse import urlparse
32
32
 
33
33
  from azure.cosmos import PartitionKey
@@ -36,15 +36,19 @@ from azure.cosmos.exceptions import CosmosHttpResponseError
36
36
  from azure.mgmt.cosmosdb import CosmosDBManagementClient
37
37
 
38
38
  from airflow.exceptions import AirflowBadRequest, AirflowException
39
- from airflow.hooks.base import BaseHook
40
39
  from airflow.providers.microsoft.azure.utils import (
41
40
  add_managed_identity_connection_widgets,
42
41
  get_field,
43
42
  get_sync_default_azure_credential,
44
43
  )
45
44
 
45
+ try:
46
+ from airflow.sdk import BaseHook
47
+ except ImportError:
48
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
49
+
46
50
  if TYPE_CHECKING:
47
- PartitionKeyType = Union[str, list[str]]
51
+ PartitionKeyType = str | list[str]
48
52
 
49
53
 
50
54
  class AzureCosmosDBHook(BaseHook):
@@ -131,6 +135,7 @@ class AzureCosmosDBHook(BaseHook):
131
135
  conn = self.get_connection(self.conn_id)
132
136
  extras = conn.extra_dejson
133
137
  endpoint_uri = conn.login
138
+ endpoint_uri = cast("str", endpoint_uri)
134
139
  resource_group_name = self._get_field(extras, "resource_group_name")
135
140
 
136
141
  if conn.password:
@@ -147,12 +152,12 @@ class AzureCosmosDBHook(BaseHook):
147
152
  credential=credential,
148
153
  subscription_id=subscritption_id,
149
154
  )
150
-
155
+ conn.login = cast("str", conn.login)
151
156
  database_account = urlparse(conn.login).netloc.split(".")[0]
152
157
  database_account_keys = management_client.database_accounts.list_keys(
153
158
  resource_group_name, database_account
154
159
  )
155
- master_key = database_account_keys.primary_master_key
160
+ master_key = cast("str", database_account_keys.primary_master_key)
156
161
  else:
157
162
  raise AirflowException("Either password or resource_group_name is required")
158
163
 
@@ -35,8 +35,9 @@ from __future__ import annotations
35
35
 
36
36
  import inspect
37
37
  import time
38
+ from collections.abc import Callable
38
39
  from functools import wraps
39
- from typing import IO, TYPE_CHECKING, Any, Callable, TypeVar, Union, cast
40
+ from typing import IO, TYPE_CHECKING, Any, TypeVar, cast
40
41
 
41
42
  from asgiref.sync import sync_to_async
42
43
  from azure.identity import ClientSecretCredential, DefaultAzureCredential
@@ -48,13 +49,17 @@ from azure.mgmt.datafactory import DataFactoryManagementClient
48
49
  from azure.mgmt.datafactory.aio import DataFactoryManagementClient as AsyncDataFactoryManagementClient
49
50
 
50
51
  from airflow.exceptions import AirflowException
51
- from airflow.hooks.base import BaseHook
52
52
  from airflow.providers.microsoft.azure.utils import (
53
53
  add_managed_identity_connection_widgets,
54
54
  get_async_default_azure_credential,
55
55
  get_sync_default_azure_credential,
56
56
  )
57
57
 
58
+ try:
59
+ from airflow.sdk import BaseHook
60
+ except ImportError:
61
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
62
+
58
63
  if TYPE_CHECKING:
59
64
  from azure.core.polling import LROPoller
60
65
  from azure.mgmt.datafactory.models import (
@@ -68,8 +73,8 @@ if TYPE_CHECKING:
68
73
  TriggerResource,
69
74
  )
70
75
 
71
- Credentials = Union[ClientSecretCredential, DefaultAzureCredential]
72
- AsyncCredentials = Union[AsyncClientSecretCredential, AsyncDefaultAzureCredential]
76
+ Credentials = ClientSecretCredential | DefaultAzureCredential
77
+ AsyncCredentials = AsyncClientSecretCredential | AsyncDefaultAzureCredential
73
78
 
74
79
  T = TypeVar("T", bound=Any)
75
80
 
@@ -18,7 +18,7 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  from functools import cached_property
21
- from typing import Any, Union
21
+ from typing import Any, cast
22
22
 
23
23
  from azure.core.exceptions import ResourceExistsError, ResourceNotFoundError
24
24
  from azure.datalake.store import core, lib, multithread
@@ -33,7 +33,6 @@ from azure.storage.filedatalake import (
33
33
  )
34
34
 
35
35
  from airflow.exceptions import AirflowException
36
- from airflow.hooks.base import BaseHook
37
36
  from airflow.providers.microsoft.azure.utils import (
38
37
  AzureIdentityCredentialAdapter,
39
38
  add_managed_identity_connection_widgets,
@@ -41,7 +40,12 @@ from airflow.providers.microsoft.azure.utils import (
41
40
  get_sync_default_azure_credential,
42
41
  )
43
42
 
44
- Credentials = Union[ClientSecretCredential, AzureIdentityCredentialAdapter, DefaultAzureCredential]
43
+ try:
44
+ from airflow.sdk import BaseHook
45
+ except ImportError:
46
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
47
+
48
+ Credentials = ClientSecretCredential | AzureIdentityCredentialAdapter | DefaultAzureCredential
45
49
 
46
50
 
47
51
  class AzureDataLakeHook(BaseHook):
@@ -355,12 +359,13 @@ class AzureDataLakeStorageV2Hook(BaseHook):
355
359
  app_id = conn.login
356
360
  app_secret = conn.password
357
361
  proxies = extra.get("proxies", {})
358
-
362
+ app_id = cast("str", app_id)
363
+ app_secret = cast("str", app_secret)
359
364
  credential = ClientSecretCredential(
360
365
  tenant_id=tenant, client_id=app_id, client_secret=app_secret, proxies=proxies
361
366
  )
362
367
  elif conn.password:
363
- credential = conn.password
368
+ credential = conn.password # type: ignore[assignment]
364
369
  else:
365
370
  managed_identity_client_id = self._get_field(extra, "managed_identity_client_id")
366
371
  workload_identity_tenant_id = self._get_field(extra, "workload_identity_tenant_id")
@@ -424,7 +429,7 @@ class AzureDataLakeStorageV2Hook(BaseHook):
424
429
  :param file_system_name: Name of the file system or instance of FileSystemProperties.
425
430
  :param directory_name: Name of the directory which needs to be created in the file system.
426
431
  """
427
- result = self.get_file_system(file_system_name).create_directory(directory_name, kwargs)
432
+ result = self.get_file_system(file_system_name).create_directory(directory_name, **kwargs)
428
433
  return result
429
434
 
430
435
  def get_directory_client(
@@ -479,7 +484,7 @@ class AzureDataLakeStorageV2Hook(BaseHook):
479
484
  """
480
485
  file_client = self.create_file(file_system_name, file_name)
481
486
  with open(file_path, "rb") as data:
482
- file_client.upload_data(data, overwrite=overwrite, kwargs=kwargs)
487
+ file_client.upload_data(data, overwrite=overwrite, **kwargs)
483
488
 
484
489
  def upload_file_to_directory(
485
490
  self,
@@ -500,9 +505,9 @@ class AzureDataLakeStorageV2Hook(BaseHook):
500
505
  :param overwrite: Boolean flag to overwrite an existing file or not.
501
506
  """
502
507
  directory_client = self.get_directory_client(file_system_name, directory_name=directory_name)
503
- file_client = directory_client.create_file(file_name, kwargs=kwargs)
508
+ file_client = directory_client.create_file(file_name, **kwargs)
504
509
  with open(file_path, "rb") as data:
505
- file_client.upload_data(data, overwrite=overwrite, kwargs=kwargs)
510
+ file_client.upload_data(data, overwrite=overwrite, **kwargs)
506
511
 
507
512
  def list_files_directory(
508
513
  self, file_system_name: FileSystemProperties | str, directory_name: str
@@ -21,12 +21,16 @@ from typing import IO, Any
21
21
 
22
22
  from azure.storage.fileshare import FileProperties, ShareDirectoryClient, ShareFileClient, ShareServiceClient
23
23
 
24
- from airflow.hooks.base import BaseHook
25
24
  from airflow.providers.microsoft.azure.utils import (
26
25
  add_managed_identity_connection_widgets,
27
26
  get_sync_default_azure_credential,
28
27
  )
29
28
 
29
+ try:
30
+ from airflow.sdk import BaseHook
31
+ except ImportError:
32
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
33
+
30
34
 
31
35
  class AzureFileShareHook(BaseHook):
32
36
  """
@@ -50,7 +50,11 @@ from airflow.exceptions import (
50
50
  AirflowException,
51
51
  AirflowNotFoundException,
52
52
  )
53
- from airflow.hooks.base import BaseHook
53
+
54
+ try:
55
+ from airflow.sdk import BaseHook
56
+ except ImportError:
57
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
54
58
 
55
59
  if TYPE_CHECKING:
56
60
  from azure.identity._internal.client_credential_base import ClientCredentialBase
@@ -254,7 +258,7 @@ class KiotaRequestAdapterHook(BaseHook):
254
258
  client_secret = connection.password
255
259
  config = connection.extra_dejson if connection.extra else {}
256
260
  api_version = self.get_api_version(config)
257
- host = self.get_host(connection)
261
+ host = self.get_host(connection) # type: ignore[arg-type]
258
262
  base_url = config.get("base_url", urljoin(host, api_version))
259
263
  authority = config.get("authority")
260
264
  proxies = self.get_proxies(config)
@@ -299,6 +303,7 @@ class KiotaRequestAdapterHook(BaseHook):
299
303
  timeout=Timeout(timeout=self.timeout),
300
304
  verify=verify,
301
305
  trust_env=trust_env,
306
+ base_url=base_url,
302
307
  ),
303
308
  host=host, # type: ignore
304
309
  )
@@ -446,9 +451,9 @@ class KiotaRequestAdapterHook(BaseHook):
446
451
  request_information.url = url
447
452
  elif request_information.query_parameters.keys():
448
453
  query = ",".join(request_information.query_parameters.keys())
449
- request_information.url_template = f"{{+baseurl}}/{self.normalize_url(url)}{{?{query}}}"
454
+ request_information.url_template = f"{{+baseurl}}{self.normalize_url(url)}{{?{query}}}"
450
455
  else:
451
- request_information.url_template = f"{{+baseurl}}/{self.normalize_url(url)}"
456
+ request_information.url_template = f"{{+baseurl}}{self.normalize_url(url)}"
452
457
  if not response_type:
453
458
  request_information.request_options[ResponseHandlerOption.get_key()] = ResponseHandlerOption(
454
459
  response_handler=DefaultResponseHandler()
@@ -467,6 +472,7 @@ class KiotaRequestAdapterHook(BaseHook):
467
472
  header_name=RequestInformation.CONTENT_TYPE_HEADER, header_value="application/json"
468
473
  )
469
474
  request_information.content = json.dumps(data).encode("utf-8")
475
+ print("Request Information:", request_information.url)
470
476
  return request_information
471
477
 
472
478
  @staticmethod
@@ -189,12 +189,15 @@ class PowerBIHook(KiotaRequestAdapterHook):
189
189
 
190
190
  return refresh_details
191
191
 
192
- async def trigger_dataset_refresh(self, *, dataset_id: str, group_id: str) -> str:
192
+ async def trigger_dataset_refresh(
193
+ self, *, dataset_id: str, group_id: str, request_body: dict[str, Any] | None = None
194
+ ) -> str:
193
195
  """
194
196
  Triggers a refresh for the specified dataset from the given group id.
195
197
 
196
198
  :param dataset_id: The dataset id.
197
199
  :param group_id: The workspace id.
200
+ :param request_body: Additional arguments to pass to the request body, as described in https://learn.microsoft.com/en-us/rest/api/power-bi/datasets/refresh-dataset-in-group#request-body.
198
201
 
199
202
  :return: Request id of the dataset refresh request.
200
203
  """
@@ -207,6 +210,7 @@ class PowerBIHook(KiotaRequestAdapterHook):
207
210
  "group_id": group_id,
208
211
  "dataset_id": dataset_id,
209
212
  },
213
+ data=request_body,
210
214
  )
211
215
 
212
216
  request_id = response.get("requestid")
@@ -17,7 +17,7 @@
17
17
  from __future__ import annotations
18
18
 
19
19
  import time
20
- from typing import TYPE_CHECKING, Any, Union
20
+ from typing import TYPE_CHECKING, Any
21
21
 
22
22
  from azure.core.exceptions import ServiceRequestError
23
23
  from azure.identity import ClientSecretCredential, DefaultAzureCredential
@@ -25,18 +25,22 @@ from azure.synapse.artifacts import ArtifactsClient
25
25
  from azure.synapse.spark import SparkClient
26
26
 
27
27
  from airflow.exceptions import AirflowException, AirflowTaskTimeout
28
- from airflow.hooks.base import BaseHook
29
28
  from airflow.providers.microsoft.azure.utils import (
30
29
  add_managed_identity_connection_widgets,
31
30
  get_field,
32
31
  get_sync_default_azure_credential,
33
32
  )
34
33
 
34
+ try:
35
+ from airflow.sdk import BaseHook
36
+ except ImportError:
37
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
38
+
35
39
  if TYPE_CHECKING:
36
40
  from azure.synapse.artifacts.models import CreateRunResponse, PipelineRun
37
41
  from azure.synapse.spark.models import SparkBatchJobOptions
38
42
 
39
- Credentials = Union[ClientSecretCredential, DefaultAzureCredential]
43
+ Credentials = ClientSecretCredential | DefaultAzureCredential
40
44
 
41
45
 
42
46
  class AzureSynapseSparkBatchRunStatus:
@@ -29,7 +29,7 @@ from __future__ import annotations
29
29
  import logging
30
30
  import os
31
31
  from functools import cached_property
32
- from typing import TYPE_CHECKING, Any, Union
32
+ from typing import TYPE_CHECKING, Any, cast
33
33
 
34
34
  from asgiref.sync import sync_to_async
35
35
  from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError
@@ -46,7 +46,6 @@ from azure.storage.blob.aio import (
46
46
  )
47
47
 
48
48
  from airflow.exceptions import AirflowException
49
- from airflow.hooks.base import BaseHook
50
49
  from airflow.providers.microsoft.azure.utils import (
51
50
  add_managed_identity_connection_widgets,
52
51
  get_async_default_azure_credential,
@@ -54,10 +53,16 @@ from airflow.providers.microsoft.azure.utils import (
54
53
  parse_blob_account_url,
55
54
  )
56
55
 
56
+ try:
57
+ from airflow.sdk import BaseHook
58
+ except ImportError:
59
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
60
+
57
61
  if TYPE_CHECKING:
62
+ from azure.core.credentials import TokenCredential
58
63
  from azure.storage.blob._models import BlobProperties
59
64
 
60
- AsyncCredentials = Union[AsyncClientSecretCredential, AsyncDefaultAzureCredential]
65
+ AsyncCredentials = AsyncClientSecretCredential | AsyncDefaultAzureCredential
61
66
 
62
67
 
63
68
  class WasbHook(BaseHook):
@@ -67,8 +72,9 @@ class WasbHook(BaseHook):
67
72
  These parameters have to be passed in Airflow Data Base: account_name and account_key.
68
73
 
69
74
  Additional options passed in the 'extra' field of the connection will be
70
- passed to the `BlockBlockService()` constructor. For example, authenticate
71
- using a SAS token by adding {"sas_token": "YOUR_TOKEN"}.
75
+ passed to the `BlobServiceClient()` constructor. For example, authenticate
76
+ using a SAS token by adding {"sas_token": "YOUR_TOKEN"} or using an account key
77
+ by adding {"account_key": "YOUR_ACCOUNT_KEY"}.
72
78
 
73
79
  If no authentication configuration is provided, DefaultAzureCredential will be used (applicable
74
80
  when using Azure compute infrastructure).
@@ -121,7 +127,7 @@ class WasbHook(BaseHook):
121
127
  "tenant_id": "tenant",
122
128
  "shared_access_key": "shared access key",
123
129
  "sas_token": "account url or token",
124
- "extra": "additional options for use with ClientSecretCredential or DefaultAzureCredential",
130
+ "extra": "additional options for use with ClientSecretCredential, DefaultAzureCredential, or account_key authentication",
125
131
  },
126
132
  }
127
133
 
@@ -171,8 +177,8 @@ class WasbHook(BaseHook):
171
177
  tenant = self._get_field(extra, "tenant_id")
172
178
  if tenant:
173
179
  # use Active Directory auth
174
- app_id = conn.login
175
- app_secret = conn.password
180
+ app_id = cast("str", conn.login)
181
+ app_secret = cast("str", conn.password)
176
182
  token_credential = ClientSecretCredential(
177
183
  tenant_id=tenant, client_id=app_id, client_secret=app_secret, **client_secret_auth_config
178
184
  )
@@ -196,15 +202,20 @@ class WasbHook(BaseHook):
196
202
  return BlobServiceClient(account_url=f"{account_url.rstrip('/')}/{sas_token}", **extra)
197
203
 
198
204
  # Fall back to old auth (password) or use managed identity if not provided.
199
- credential = conn.password
205
+ credential: str | TokenCredential | None = conn.password
200
206
  if not credential:
201
- managed_identity_client_id = self._get_field(extra, "managed_identity_client_id")
202
- workload_identity_tenant_id = self._get_field(extra, "workload_identity_tenant_id")
203
- credential = get_sync_default_azure_credential(
204
- managed_identity_client_id=managed_identity_client_id,
205
- workload_identity_tenant_id=workload_identity_tenant_id,
206
- )
207
- self.log.info("Using DefaultAzureCredential as credential")
207
+ # Check for account_key in extra fields before falling back to DefaultAzureCredential
208
+ account_key = self._get_field(extra, "account_key")
209
+ if account_key:
210
+ credential = account_key
211
+ else:
212
+ managed_identity_client_id = self._get_field(extra, "managed_identity_client_id")
213
+ workload_identity_tenant_id = self._get_field(extra, "workload_identity_tenant_id")
214
+ credential = get_sync_default_azure_credential(
215
+ managed_identity_client_id=managed_identity_client_id,
216
+ workload_identity_tenant_id=workload_identity_tenant_id,
217
+ )
218
+ self.log.info("Using DefaultAzureCredential as credential")
208
219
  return BlobServiceClient(
209
220
  account_url=account_url,
210
221
  credential=credential,
@@ -646,13 +657,18 @@ class WasbAsyncHook(WasbHook):
646
657
  # Fall back to old auth (password) or use managed identity if not provided.
647
658
  credential = conn.password
648
659
  if not credential:
649
- managed_identity_client_id = self._get_field(extra, "managed_identity_client_id")
650
- workload_identity_tenant_id = self._get_field(extra, "workload_identity_tenant_id")
651
- credential = get_async_default_azure_credential(
652
- managed_identity_client_id=managed_identity_client_id,
653
- workload_identity_tenant_id=workload_identity_tenant_id,
654
- )
655
- self.log.info("Using DefaultAzureCredential as credential")
660
+ # Check for account_key in extra fields before falling back to DefaultAzureCredential
661
+ account_key = self._get_field(extra, "account_key")
662
+ if account_key:
663
+ credential = account_key
664
+ else:
665
+ managed_identity_client_id = self._get_field(extra, "managed_identity_client_id")
666
+ workload_identity_tenant_id = self._get_field(extra, "workload_identity_tenant_id")
667
+ credential = get_async_default_azure_credential(
668
+ managed_identity_client_id=managed_identity_client_id,
669
+ workload_identity_tenant_id=workload_identity_tenant_id,
670
+ )
671
+ self.log.info("Using DefaultAzureCredential as credential")
656
672
  self.blob_service_client = AsyncBlobServiceClient(
657
673
  account_url=account_url,
658
674
  credential=credential,
@@ -19,8 +19,8 @@ from __future__ import annotations
19
19
  from collections.abc import Iterable, Sequence
20
20
  from typing import IO, TYPE_CHECKING, Any, AnyStr
21
21
 
22
- from airflow.models import BaseOperator
23
22
  from airflow.providers.microsoft.azure.hooks.data_lake import AzureDataLakeHook, AzureDataLakeStorageV2Hook
23
+ from airflow.providers.microsoft.azure.version_compat import BaseOperator
24
24
 
25
25
  if TYPE_CHECKING:
26
26
  from airflow.utils.context import Context