apache-airflow-providers-microsoft-azure 12.8.1rc1__py3-none-any.whl → 12.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/microsoft/azure/__init__.py +3 -3
- airflow/providers/microsoft/azure/get_provider_info.py +5 -1
- airflow/providers/microsoft/azure/hooks/adx.py +1 -2
- airflow/providers/microsoft/azure/hooks/asb.py +54 -0
- airflow/providers/microsoft/azure/hooks/base_azure.py +1 -2
- airflow/providers/microsoft/azure/hooks/batch.py +1 -2
- airflow/providers/microsoft/azure/hooks/container_instance.py +1 -1
- airflow/providers/microsoft/azure/hooks/cosmos.py +2 -2
- airflow/providers/microsoft/azure/hooks/data_factory.py +1 -2
- airflow/providers/microsoft/azure/hooks/data_lake.py +1 -2
- airflow/providers/microsoft/azure/hooks/msgraph.py +137 -32
- airflow/providers/microsoft/azure/hooks/powerbi.py +1 -1
- airflow/providers/microsoft/azure/hooks/synapse.py +1 -2
- airflow/providers/microsoft/azure/hooks/wasb.py +1 -2
- airflow/providers/microsoft/azure/operators/batch.py +1 -2
- airflow/providers/microsoft/azure/operators/container_instances.py +78 -3
- airflow/providers/microsoft/azure/operators/data_factory.py +7 -2
- airflow/providers/microsoft/azure/operators/msgraph.py +9 -16
- airflow/providers/microsoft/azure/operators/powerbi.py +1 -2
- airflow/providers/microsoft/azure/operators/synapse.py +7 -2
- airflow/providers/microsoft/azure/sensors/data_factory.py +1 -2
- airflow/providers/microsoft/azure/sensors/msgraph.py +1 -2
- airflow/providers/microsoft/azure/sensors/wasb.py +1 -2
- airflow/providers/microsoft/azure/transfers/local_to_adls.py +1 -2
- airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py +1 -2
- airflow/providers/microsoft/azure/triggers/message_bus.py +223 -0
- {apache_airflow_providers_microsoft_azure-12.8.1rc1.dist-info → apache_airflow_providers_microsoft_azure-12.10.0.dist-info}/METADATA +10 -10
- {apache_airflow_providers_microsoft_azure-12.8.1rc1.dist-info → apache_airflow_providers_microsoft_azure-12.10.0.dist-info}/RECORD +32 -31
- {apache_airflow_providers_microsoft_azure-12.8.1rc1.dist-info → apache_airflow_providers_microsoft_azure-12.10.0.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_microsoft_azure-12.8.1rc1.dist-info → apache_airflow_providers_microsoft_azure-12.10.0.dist-info}/entry_points.txt +0 -0
- {apache_airflow_providers_microsoft_azure-12.8.1rc1.dist-info → apache_airflow_providers_microsoft_azure-12.10.0.dist-info}/licenses/LICENSE +0 -0
- {apache_airflow_providers_microsoft_azure-12.8.1rc1.dist-info → apache_airflow_providers_microsoft_azure-12.10.0.dist-info}/licenses/NOTICE +0 -0
|
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "12.
|
|
32
|
+
__version__ = "12.10.0"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
|
-
"2.
|
|
35
|
+
"2.11.0"
|
|
36
36
|
):
|
|
37
37
|
raise RuntimeError(
|
|
38
|
-
f"The package `apache-airflow-providers-microsoft-azure:{__version__}` needs Apache Airflow 2.
|
|
38
|
+
f"The package `apache-airflow-providers-microsoft-azure:{__version__}` needs Apache Airflow 2.11.0+"
|
|
39
39
|
)
|
|
@@ -200,7 +200,7 @@ def get_provider_info():
|
|
|
200
200
|
],
|
|
201
201
|
"filesystems": [
|
|
202
202
|
"airflow.providers.microsoft.azure.fs.adls",
|
|
203
|
-
"airflow.providers.microsoft.azure.fs.
|
|
203
|
+
"airflow.providers.microsoft.azure.fs.msgraph",
|
|
204
204
|
],
|
|
205
205
|
"hooks": [
|
|
206
206
|
{
|
|
@@ -281,6 +281,10 @@ def get_provider_info():
|
|
|
281
281
|
"integration-name": "Microsoft Power BI",
|
|
282
282
|
"python-modules": ["airflow.providers.microsoft.azure.triggers.powerbi"],
|
|
283
283
|
},
|
|
284
|
+
{
|
|
285
|
+
"integration-name": "Microsoft Azure Service Bus",
|
|
286
|
+
"python-modules": ["airflow.providers.microsoft.azure.triggers.message_bus"],
|
|
287
|
+
},
|
|
284
288
|
],
|
|
285
289
|
"transfers": [
|
|
286
290
|
{
|
|
@@ -33,8 +33,7 @@ from typing import TYPE_CHECKING, Any, cast
|
|
|
33
33
|
from azure.kusto.data import ClientRequestProperties, KustoClient, KustoConnectionStringBuilder
|
|
34
34
|
from azure.kusto.data.exceptions import KustoServiceError
|
|
35
35
|
|
|
36
|
-
from airflow.
|
|
37
|
-
from airflow.providers.common.compat.sdk import BaseHook
|
|
36
|
+
from airflow.providers.common.compat.sdk import AirflowException, BaseHook
|
|
38
37
|
from airflow.providers.microsoft.azure.utils import (
|
|
39
38
|
add_managed_identity_connection_widgets,
|
|
40
39
|
get_sync_default_azure_credential,
|
|
@@ -606,6 +606,60 @@ class MessageHook(BaseAzureServiceBusHook):
|
|
|
606
606
|
for msg in received_msgs:
|
|
607
607
|
self._process_message(msg, context, message_callback, subscription_receiver)
|
|
608
608
|
|
|
609
|
+
def read_message(
|
|
610
|
+
self,
|
|
611
|
+
queue_name: str,
|
|
612
|
+
max_wait_time: float | None = None,
|
|
613
|
+
) -> ServiceBusReceivedMessage | None:
|
|
614
|
+
"""
|
|
615
|
+
Read a single message from a Service Bus queue without callback processing.
|
|
616
|
+
|
|
617
|
+
:param queue_name: The name of the queue to read from.
|
|
618
|
+
:param max_wait_time: Maximum time to wait for messages (seconds).
|
|
619
|
+
:return: The received message or None if no message is available.
|
|
620
|
+
"""
|
|
621
|
+
with (
|
|
622
|
+
self.get_conn() as service_bus_client,
|
|
623
|
+
service_bus_client.get_queue_receiver(queue_name=queue_name) as receiver,
|
|
624
|
+
receiver,
|
|
625
|
+
):
|
|
626
|
+
received_msgs = receiver.receive_messages(max_message_count=1, max_wait_time=max_wait_time)
|
|
627
|
+
if received_msgs:
|
|
628
|
+
msg = received_msgs[0]
|
|
629
|
+
receiver.complete_message(msg)
|
|
630
|
+
return msg
|
|
631
|
+
return None
|
|
632
|
+
|
|
633
|
+
def read_subscription_message(
|
|
634
|
+
self,
|
|
635
|
+
topic_name: str,
|
|
636
|
+
subscription_name: str,
|
|
637
|
+
max_wait_time: float | None = None,
|
|
638
|
+
) -> ServiceBusReceivedMessage | None:
|
|
639
|
+
"""
|
|
640
|
+
Read a single message from a Service Bus topic subscription without callback processing.
|
|
641
|
+
|
|
642
|
+
:param topic_name: The name of the topic.
|
|
643
|
+
:param subscription_name: The name of the subscription.
|
|
644
|
+
:param max_wait_time: Maximum time to wait for messages (seconds).
|
|
645
|
+
:return: The received message or None if no message is available.
|
|
646
|
+
"""
|
|
647
|
+
with (
|
|
648
|
+
self.get_conn() as service_bus_client,
|
|
649
|
+
service_bus_client.get_subscription_receiver(
|
|
650
|
+
topic_name, subscription_name
|
|
651
|
+
) as subscription_receiver,
|
|
652
|
+
subscription_receiver,
|
|
653
|
+
):
|
|
654
|
+
received_msgs = subscription_receiver.receive_messages(
|
|
655
|
+
max_message_count=1, max_wait_time=max_wait_time
|
|
656
|
+
)
|
|
657
|
+
if received_msgs:
|
|
658
|
+
msg = received_msgs[0]
|
|
659
|
+
subscription_receiver.complete_message(msg)
|
|
660
|
+
return msg
|
|
661
|
+
return None
|
|
662
|
+
|
|
609
663
|
def _process_message(
|
|
610
664
|
self,
|
|
611
665
|
msg: ServiceBusReceivedMessage,
|
|
@@ -22,8 +22,7 @@ from azure.common.client_factory import get_client_from_auth_file, get_client_fr
|
|
|
22
22
|
from azure.common.credentials import ServicePrincipalCredentials
|
|
23
23
|
from azure.identity import ClientSecretCredential, DefaultAzureCredential
|
|
24
24
|
|
|
25
|
-
from airflow.
|
|
26
|
-
from airflow.providers.common.compat.sdk import BaseHook
|
|
25
|
+
from airflow.providers.common.compat.sdk import AirflowException, BaseHook
|
|
27
26
|
from airflow.providers.microsoft.azure.utils import (
|
|
28
27
|
AzureIdentityCredentialAdapter,
|
|
29
28
|
add_managed_identity_connection_widgets,
|
|
@@ -24,8 +24,7 @@ from typing import TYPE_CHECKING, Any
|
|
|
24
24
|
|
|
25
25
|
from azure.batch import BatchServiceClient, batch_auth, models as batch_models
|
|
26
26
|
|
|
27
|
-
from airflow.
|
|
28
|
-
from airflow.providers.common.compat.sdk import BaseHook
|
|
27
|
+
from airflow.providers.common.compat.sdk import AirflowException, BaseHook
|
|
29
28
|
from airflow.providers.microsoft.azure.utils import (
|
|
30
29
|
AzureIdentityCredentialAdapter,
|
|
31
30
|
add_managed_identity_connection_widgets,
|
|
@@ -24,7 +24,7 @@ from azure.common.client_factory import get_client_from_auth_file, get_client_fr
|
|
|
24
24
|
from azure.identity import ClientSecretCredential, DefaultAzureCredential
|
|
25
25
|
from azure.mgmt.containerinstance import ContainerInstanceManagementClient
|
|
26
26
|
|
|
27
|
-
from airflow.
|
|
27
|
+
from airflow.providers.common.compat.sdk import AirflowException
|
|
28
28
|
from airflow.providers.microsoft.azure.hooks.base_azure import AzureBaseHook
|
|
29
29
|
from airflow.providers.microsoft.azure.utils import get_sync_default_azure_credential
|
|
30
30
|
|
|
@@ -35,8 +35,8 @@ from azure.cosmos.cosmos_client import CosmosClient
|
|
|
35
35
|
from azure.cosmos.exceptions import CosmosHttpResponseError
|
|
36
36
|
from azure.mgmt.cosmosdb import CosmosDBManagementClient
|
|
37
37
|
|
|
38
|
-
from airflow.exceptions import AirflowBadRequest
|
|
39
|
-
from airflow.providers.common.compat.sdk import BaseHook
|
|
38
|
+
from airflow.exceptions import AirflowBadRequest
|
|
39
|
+
from airflow.providers.common.compat.sdk import AirflowException, BaseHook
|
|
40
40
|
from airflow.providers.microsoft.azure.utils import (
|
|
41
41
|
add_managed_identity_connection_widgets,
|
|
42
42
|
get_field,
|
|
@@ -48,8 +48,7 @@ from azure.identity.aio import (
|
|
|
48
48
|
from azure.mgmt.datafactory import DataFactoryManagementClient
|
|
49
49
|
from azure.mgmt.datafactory.aio import DataFactoryManagementClient as AsyncDataFactoryManagementClient
|
|
50
50
|
|
|
51
|
-
from airflow.
|
|
52
|
-
from airflow.providers.common.compat.sdk import BaseHook
|
|
51
|
+
from airflow.providers.common.compat.sdk import AirflowException, BaseHook
|
|
53
52
|
from airflow.providers.microsoft.azure.utils import (
|
|
54
53
|
add_managed_identity_connection_widgets,
|
|
55
54
|
get_async_default_azure_credential,
|
|
@@ -32,8 +32,7 @@ from azure.storage.filedatalake import (
|
|
|
32
32
|
FileSystemProperties,
|
|
33
33
|
)
|
|
34
34
|
|
|
35
|
-
from airflow.
|
|
36
|
-
from airflow.providers.common.compat.sdk import BaseHook
|
|
35
|
+
from airflow.providers.common.compat.sdk import AirflowException, BaseHook
|
|
37
36
|
from airflow.providers.microsoft.azure.utils import (
|
|
38
37
|
AzureIdentityCredentialAdapter,
|
|
39
38
|
add_managed_identity_connection_widgets,
|
|
@@ -18,9 +18,11 @@
|
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
20
|
import asyncio
|
|
21
|
+
import inspect
|
|
21
22
|
import json
|
|
22
23
|
import warnings
|
|
23
24
|
from ast import literal_eval
|
|
25
|
+
from collections.abc import Callable
|
|
24
26
|
from contextlib import suppress
|
|
25
27
|
from http import HTTPStatus
|
|
26
28
|
from io import BytesIO
|
|
@@ -46,14 +48,8 @@ from kiota_serialization_text.text_parse_node_factory import TextParseNodeFactor
|
|
|
46
48
|
from msgraph_core import APIVersion, GraphClientFactory
|
|
47
49
|
from msgraph_core._enums import NationalClouds
|
|
48
50
|
|
|
49
|
-
from airflow.exceptions import
|
|
50
|
-
|
|
51
|
-
AirflowConfigException,
|
|
52
|
-
AirflowException,
|
|
53
|
-
AirflowNotFoundException,
|
|
54
|
-
AirflowProviderDeprecationWarning,
|
|
55
|
-
)
|
|
56
|
-
from airflow.providers.common.compat.sdk import BaseHook
|
|
51
|
+
from airflow.exceptions import AirflowBadRequest, AirflowConfigException, AirflowProviderDeprecationWarning
|
|
52
|
+
from airflow.providers.common.compat.sdk import AirflowException, AirflowNotFoundException, BaseHook
|
|
57
53
|
|
|
58
54
|
if TYPE_CHECKING:
|
|
59
55
|
from azure.identity._internal.client_credential_base import ClientCredentialBase
|
|
@@ -64,6 +60,32 @@ if TYPE_CHECKING:
|
|
|
64
60
|
from airflow.providers.common.compat.sdk import Connection
|
|
65
61
|
|
|
66
62
|
|
|
63
|
+
PaginationCallable = Callable[..., tuple[str, dict[str, Any] | None]]
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def execute_callable(func: Callable, *args: Any, **kwargs: Any) -> Any:
|
|
67
|
+
"""Dynamically call a function by matching its signature to provided args/kwargs."""
|
|
68
|
+
sig = inspect.signature(func)
|
|
69
|
+
accepts_kwargs = any(p.kind == inspect.Parameter.VAR_KEYWORD for p in sig.parameters.values())
|
|
70
|
+
|
|
71
|
+
if not accepts_kwargs:
|
|
72
|
+
# Only pass arguments the function explicitly declares
|
|
73
|
+
filtered_kwargs = {k: v for k, v in kwargs.items() if k in sig.parameters}
|
|
74
|
+
else:
|
|
75
|
+
filtered_kwargs = kwargs
|
|
76
|
+
|
|
77
|
+
try:
|
|
78
|
+
sig.bind(*args, **filtered_kwargs)
|
|
79
|
+
except TypeError as err:
|
|
80
|
+
raise TypeError(
|
|
81
|
+
f"Failed to bind arguments to function {func.__name__}: {err}\n"
|
|
82
|
+
f"Expected parameters: {list(sig.parameters.keys())}\n"
|
|
83
|
+
f"Provided kwargs: {list(kwargs.keys())}"
|
|
84
|
+
) from err
|
|
85
|
+
|
|
86
|
+
return func(*args, **filtered_kwargs)
|
|
87
|
+
|
|
88
|
+
|
|
67
89
|
class DefaultResponseHandler(ResponseHandler):
|
|
68
90
|
"""DefaultResponseHandler returns JSON payload or content in bytes or response headers."""
|
|
69
91
|
|
|
@@ -128,7 +150,7 @@ class KiotaRequestAdapterHook(BaseHook):
|
|
|
128
150
|
conn_id: str = default_conn_name,
|
|
129
151
|
timeout: float | None = None,
|
|
130
152
|
proxies: dict | None = None,
|
|
131
|
-
host: str =
|
|
153
|
+
host: str | None = None,
|
|
132
154
|
scopes: str | list[str] | None = None,
|
|
133
155
|
api_version: APIVersion | str | None = None,
|
|
134
156
|
):
|
|
@@ -204,8 +226,12 @@ class KiotaRequestAdapterHook(BaseHook):
|
|
|
204
226
|
) # type: ignore
|
|
205
227
|
|
|
206
228
|
def get_host(self, connection: Connection) -> str:
|
|
207
|
-
if
|
|
208
|
-
|
|
229
|
+
if not self.host:
|
|
230
|
+
if connection.schema and connection.host:
|
|
231
|
+
return f"{connection.schema}://{connection.host}"
|
|
232
|
+
return NationalClouds.Global.value
|
|
233
|
+
if not self.host.startswith("http://") or not self.host.startswith("https://"):
|
|
234
|
+
return f"{connection.schema}://{self.host}"
|
|
209
235
|
return self.host
|
|
210
236
|
|
|
211
237
|
def get_base_url(self, host: str, api_version: str, config: dict) -> str:
|
|
@@ -222,7 +248,7 @@ class KiotaRequestAdapterHook(BaseHook):
|
|
|
222
248
|
return url
|
|
223
249
|
|
|
224
250
|
@classmethod
|
|
225
|
-
def to_httpx_proxies(cls, proxies: dict) -> dict:
|
|
251
|
+
def to_httpx_proxies(cls, proxies: dict | None) -> dict | None:
|
|
226
252
|
if proxies:
|
|
227
253
|
proxies = proxies.copy()
|
|
228
254
|
if proxies.get("http"):
|
|
@@ -232,9 +258,10 @@ class KiotaRequestAdapterHook(BaseHook):
|
|
|
232
258
|
if proxies.get("no"):
|
|
233
259
|
for url in proxies.pop("no", "").split(","):
|
|
234
260
|
proxies[cls.format_no_proxy_url(url.strip())] = None
|
|
235
|
-
|
|
261
|
+
return proxies
|
|
262
|
+
return None
|
|
236
263
|
|
|
237
|
-
def to_msal_proxies(self, authority: str | None, proxies: dict) -> dict | None:
|
|
264
|
+
def to_msal_proxies(self, authority: str | None, proxies: dict | None) -> dict | None:
|
|
238
265
|
self.log.debug("authority: %s", authority)
|
|
239
266
|
if authority and proxies:
|
|
240
267
|
no_proxies = proxies.get("no")
|
|
@@ -246,7 +273,8 @@ class KiotaRequestAdapterHook(BaseHook):
|
|
|
246
273
|
self.log.debug("domain_name: %s", domain_name)
|
|
247
274
|
if authority.endswith(domain_name):
|
|
248
275
|
return None
|
|
249
|
-
|
|
276
|
+
return proxies
|
|
277
|
+
return None
|
|
250
278
|
|
|
251
279
|
def _build_request_adapter(self, connection) -> tuple[str, RequestAdapter]:
|
|
252
280
|
client_id = connection.login
|
|
@@ -367,22 +395,24 @@ class KiotaRequestAdapterHook(BaseHook):
|
|
|
367
395
|
self.api_version = api_version
|
|
368
396
|
return request_adapter
|
|
369
397
|
|
|
370
|
-
def get_proxies(self, config: dict) -> dict:
|
|
371
|
-
proxies = self.proxies
|
|
372
|
-
if
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
398
|
+
def get_proxies(self, config: dict) -> dict | None:
|
|
399
|
+
proxies = self.proxies if self.proxies is not None else config.get("proxies", {})
|
|
400
|
+
if proxies:
|
|
401
|
+
if isinstance(proxies, str):
|
|
402
|
+
# TODO: Once provider depends on Airflow 2.10 or higher code below won't be needed anymore as
|
|
403
|
+
# we could then use the get_extra_dejson method on the connection which deserializes
|
|
404
|
+
# nested json. Make sure to use connection.get_extra_dejson(nested=True) instead of
|
|
405
|
+
# connection.extra_dejson.
|
|
406
|
+
with suppress(JSONDecodeError):
|
|
407
|
+
proxies = json.loads(proxies)
|
|
408
|
+
with suppress(Exception):
|
|
409
|
+
proxies = literal_eval(proxies)
|
|
410
|
+
if not isinstance(proxies, dict):
|
|
411
|
+
raise AirflowConfigException(
|
|
412
|
+
f"Proxies must be of type dict, got {type(proxies).__name__} instead!"
|
|
413
|
+
)
|
|
414
|
+
return proxies
|
|
415
|
+
return None
|
|
386
416
|
|
|
387
417
|
def get_credentials(
|
|
388
418
|
self,
|
|
@@ -391,7 +421,7 @@ class KiotaRequestAdapterHook(BaseHook):
|
|
|
391
421
|
config,
|
|
392
422
|
authority: str | None,
|
|
393
423
|
verify: bool,
|
|
394
|
-
proxies: dict,
|
|
424
|
+
proxies: dict | None,
|
|
395
425
|
) -> ClientCredentialBase:
|
|
396
426
|
tenant_id = config.get("tenant_id") or config.get("tenantId")
|
|
397
427
|
certificate_path = config.get("certificate_path")
|
|
@@ -434,6 +464,27 @@ class KiotaRequestAdapterHook(BaseHook):
|
|
|
434
464
|
except Exception as e:
|
|
435
465
|
return False, str(e)
|
|
436
466
|
|
|
467
|
+
@staticmethod
|
|
468
|
+
def default_pagination(
|
|
469
|
+
response: dict,
|
|
470
|
+
url: str | None = None,
|
|
471
|
+
query_parameters: dict[str, Any] | None = None,
|
|
472
|
+
responses: Callable[[], list[dict[str, Any]] | None] = lambda: [],
|
|
473
|
+
) -> tuple[Any, dict[str, Any] | None]:
|
|
474
|
+
if isinstance(response, dict):
|
|
475
|
+
odata_count = response.get("@odata.count")
|
|
476
|
+
if odata_count and query_parameters:
|
|
477
|
+
top = query_parameters.get("$top")
|
|
478
|
+
|
|
479
|
+
if top and odata_count:
|
|
480
|
+
if len(response.get("value", [])) == top:
|
|
481
|
+
results = responses()
|
|
482
|
+
skip = sum([len(result["value"]) for result in results]) + top if results else top # type: ignore
|
|
483
|
+
query_parameters["$skip"] = skip
|
|
484
|
+
return url, query_parameters
|
|
485
|
+
return response.get("@odata.nextLink"), query_parameters
|
|
486
|
+
return None, query_parameters
|
|
487
|
+
|
|
437
488
|
async def run(
|
|
438
489
|
self,
|
|
439
490
|
url: str = "",
|
|
@@ -463,6 +514,60 @@ class KiotaRequestAdapterHook(BaseHook):
|
|
|
463
514
|
|
|
464
515
|
return response
|
|
465
516
|
|
|
517
|
+
async def paginated_run(
|
|
518
|
+
self,
|
|
519
|
+
url: str = "",
|
|
520
|
+
response_type: str | None = None,
|
|
521
|
+
path_parameters: dict[str, Any] | None = None,
|
|
522
|
+
method: str = "GET",
|
|
523
|
+
query_parameters: dict[str, Any] | None = None,
|
|
524
|
+
headers: dict[str, str] | None = None,
|
|
525
|
+
data: dict[str, Any] | str | BytesIO | None = None,
|
|
526
|
+
pagination_function: PaginationCallable | None = None,
|
|
527
|
+
):
|
|
528
|
+
if pagination_function is None:
|
|
529
|
+
pagination_function = self.default_pagination
|
|
530
|
+
|
|
531
|
+
responses: list[dict] = []
|
|
532
|
+
|
|
533
|
+
async def run(
|
|
534
|
+
url: str = "",
|
|
535
|
+
query_parameters: dict[str, Any] | None = None,
|
|
536
|
+
):
|
|
537
|
+
while url:
|
|
538
|
+
response = await self.run(
|
|
539
|
+
url=url,
|
|
540
|
+
response_type=response_type,
|
|
541
|
+
path_parameters=path_parameters,
|
|
542
|
+
method=method,
|
|
543
|
+
query_parameters=query_parameters,
|
|
544
|
+
headers=headers,
|
|
545
|
+
data=data,
|
|
546
|
+
)
|
|
547
|
+
|
|
548
|
+
if response:
|
|
549
|
+
responses.append(response)
|
|
550
|
+
|
|
551
|
+
if pagination_function:
|
|
552
|
+
url, query_parameters = execute_callable(
|
|
553
|
+
pagination_function,
|
|
554
|
+
response=response,
|
|
555
|
+
url=url,
|
|
556
|
+
response_type=response_type,
|
|
557
|
+
path_parameters=path_parameters,
|
|
558
|
+
method=method,
|
|
559
|
+
query_parameters=query_parameters,
|
|
560
|
+
headers=headers,
|
|
561
|
+
data=data,
|
|
562
|
+
responses=lambda: responses,
|
|
563
|
+
)
|
|
564
|
+
else:
|
|
565
|
+
break
|
|
566
|
+
|
|
567
|
+
await run(url=url, query_parameters=query_parameters)
|
|
568
|
+
|
|
569
|
+
return responses
|
|
570
|
+
|
|
466
571
|
async def send_request(self, request_info: RequestInformation, response_type: str | None = None):
|
|
467
572
|
conn = await self.get_async_conn()
|
|
468
573
|
|
|
@@ -20,7 +20,7 @@ from __future__ import annotations
|
|
|
20
20
|
from enum import Enum
|
|
21
21
|
from typing import TYPE_CHECKING, Any
|
|
22
22
|
|
|
23
|
-
from airflow.
|
|
23
|
+
from airflow.providers.common.compat.sdk import AirflowException
|
|
24
24
|
from airflow.providers.microsoft.azure.hooks.msgraph import KiotaRequestAdapterHook
|
|
25
25
|
|
|
26
26
|
if TYPE_CHECKING:
|
|
@@ -24,8 +24,7 @@ from azure.identity import ClientSecretCredential, DefaultAzureCredential
|
|
|
24
24
|
from azure.synapse.artifacts import ArtifactsClient
|
|
25
25
|
from azure.synapse.spark import SparkClient
|
|
26
26
|
|
|
27
|
-
from airflow.
|
|
28
|
-
from airflow.providers.common.compat.sdk import BaseHook
|
|
27
|
+
from airflow.providers.common.compat.sdk import AirflowException, AirflowTaskTimeout, BaseHook
|
|
29
28
|
from airflow.providers.microsoft.azure.utils import (
|
|
30
29
|
add_managed_identity_connection_widgets,
|
|
31
30
|
get_field,
|
|
@@ -44,8 +44,7 @@ from azure.storage.blob.aio import (
|
|
|
44
44
|
ContainerClient as AsyncContainerClient,
|
|
45
45
|
)
|
|
46
46
|
|
|
47
|
-
from airflow.
|
|
48
|
-
from airflow.providers.common.compat.sdk import BaseHook
|
|
47
|
+
from airflow.providers.common.compat.sdk import AirflowException, BaseHook
|
|
49
48
|
from airflow.providers.microsoft.azure.utils import (
|
|
50
49
|
add_managed_identity_connection_widgets,
|
|
51
50
|
get_async_default_azure_credential,
|
|
@@ -23,8 +23,7 @@ from typing import TYPE_CHECKING, Any
|
|
|
23
23
|
|
|
24
24
|
from azure.batch import models as batch_models
|
|
25
25
|
|
|
26
|
-
from airflow.
|
|
27
|
-
from airflow.providers.common.compat.sdk import BaseOperator
|
|
26
|
+
from airflow.providers.common.compat.sdk import AirflowException, BaseOperator
|
|
28
27
|
from airflow.providers.microsoft.azure.hooks.batch import AzureBatchHook
|
|
29
28
|
|
|
30
29
|
if TYPE_CHECKING:
|
|
@@ -21,26 +21,28 @@ import re
|
|
|
21
21
|
import time
|
|
22
22
|
from collections import namedtuple
|
|
23
23
|
from collections.abc import Sequence
|
|
24
|
-
from typing import TYPE_CHECKING, Any
|
|
24
|
+
from typing import TYPE_CHECKING, Any, cast
|
|
25
25
|
|
|
26
26
|
from azure.mgmt.containerinstance.models import (
|
|
27
27
|
Container,
|
|
28
28
|
ContainerGroup,
|
|
29
29
|
ContainerGroupDiagnostics,
|
|
30
|
+
ContainerGroupIdentity,
|
|
30
31
|
ContainerGroupSubnetId,
|
|
31
32
|
ContainerPort,
|
|
32
33
|
DnsConfiguration,
|
|
33
34
|
EnvironmentVariable,
|
|
34
35
|
IpAddress,
|
|
36
|
+
ResourceIdentityType,
|
|
35
37
|
ResourceRequests,
|
|
36
38
|
ResourceRequirements,
|
|
39
|
+
UserAssignedIdentities,
|
|
37
40
|
Volume as _AzureVolume,
|
|
38
41
|
VolumeMount,
|
|
39
42
|
)
|
|
40
43
|
from msrestazure.azure_exceptions import CloudError
|
|
41
44
|
|
|
42
|
-
from airflow.
|
|
43
|
-
from airflow.providers.common.compat.sdk import BaseOperator
|
|
45
|
+
from airflow.providers.common.compat.sdk import AirflowException, AirflowTaskTimeout, BaseOperator
|
|
44
46
|
from airflow.providers.microsoft.azure.hooks.container_instance import AzureContainerInstanceHook
|
|
45
47
|
from airflow.providers.microsoft.azure.hooks.container_registry import AzureContainerRegistryHook
|
|
46
48
|
from airflow.providers.microsoft.azure.hooks.container_volume import AzureContainerVolumeHook
|
|
@@ -102,6 +104,7 @@ class AzureContainerInstancesOperator(BaseOperator):
|
|
|
102
104
|
:param dns_config: The DNS configuration for a container group.
|
|
103
105
|
:param diagnostics: Container group diagnostic information (Log Analytics).
|
|
104
106
|
:param priority: Container group priority, Possible values include: 'Regular', 'Spot'
|
|
107
|
+
:param identity: List of User/System assigned identities for the container group.
|
|
105
108
|
|
|
106
109
|
**Example**::
|
|
107
110
|
|
|
@@ -144,6 +147,15 @@ class AzureContainerInstancesOperator(BaseOperator):
|
|
|
144
147
|
}
|
|
145
148
|
},
|
|
146
149
|
priority="Regular",
|
|
150
|
+
identity = {
|
|
151
|
+
"type": "UserAssigned" | "SystemAssigned" | "SystemAssigned,UserAssigned",
|
|
152
|
+
"resource_ids": [
|
|
153
|
+
"/subscriptions/<sub>/resourceGroups/<rg>/providers/Microsoft.ManagedIdentity/userAssignedIdentities/<id>"
|
|
154
|
+
]
|
|
155
|
+
"user_assigned_identities": {
|
|
156
|
+
"/subscriptions/.../userAssignedIdentities/<id>": {}
|
|
157
|
+
}
|
|
158
|
+
}
|
|
147
159
|
command=["/bin/echo", "world"],
|
|
148
160
|
task_id="start_container",
|
|
149
161
|
)
|
|
@@ -180,6 +192,7 @@ class AzureContainerInstancesOperator(BaseOperator):
|
|
|
180
192
|
dns_config: DnsConfiguration | None = None,
|
|
181
193
|
diagnostics: ContainerGroupDiagnostics | None = None,
|
|
182
194
|
priority: str | None = "Regular",
|
|
195
|
+
identity: ContainerGroupIdentity | dict | None = None,
|
|
183
196
|
**kwargs,
|
|
184
197
|
) -> None:
|
|
185
198
|
super().__init__(**kwargs)
|
|
@@ -222,6 +235,7 @@ class AzureContainerInstancesOperator(BaseOperator):
|
|
|
222
235
|
self.dns_config = dns_config
|
|
223
236
|
self.diagnostics = diagnostics
|
|
224
237
|
self.priority = priority
|
|
238
|
+
self.identity = self._ensure_identity(identity)
|
|
225
239
|
if self.priority not in ["Regular", "Spot"]:
|
|
226
240
|
raise AirflowException(
|
|
227
241
|
"Invalid value for the priority argument. "
|
|
@@ -229,6 +243,66 @@ class AzureContainerInstancesOperator(BaseOperator):
|
|
|
229
243
|
f"Found `{self.priority}`."
|
|
230
244
|
)
|
|
231
245
|
|
|
246
|
+
# helper to accept dict (user-friendly) or ContainerGroupIdentity (SDK object)
|
|
247
|
+
@staticmethod
|
|
248
|
+
def _ensure_identity(identity: ContainerGroupIdentity | dict | None) -> ContainerGroupIdentity | None:
|
|
249
|
+
"""
|
|
250
|
+
Normalize identity input into a ContainerGroupIdentity instance.
|
|
251
|
+
|
|
252
|
+
Accepts:
|
|
253
|
+
- None -> returns None
|
|
254
|
+
- ContainerGroupIdentity -> returned as-is
|
|
255
|
+
- dict -> converted to ContainerGroupIdentity
|
|
256
|
+
- any other object -> returned as-is (pass-through) to preserve backwards compatibility
|
|
257
|
+
|
|
258
|
+
Expected dict shapes:
|
|
259
|
+
{"type": "UserAssigned", "resource_ids": ["/.../userAssignedIdentities/id1", ...]}
|
|
260
|
+
or
|
|
261
|
+
{"type": "SystemAssigned"}
|
|
262
|
+
or
|
|
263
|
+
{"type": "SystemAssigned,UserAssigned", "resource_ids": [...]}
|
|
264
|
+
"""
|
|
265
|
+
if identity is None:
|
|
266
|
+
return None
|
|
267
|
+
|
|
268
|
+
if isinstance(identity, ContainerGroupIdentity):
|
|
269
|
+
return identity
|
|
270
|
+
|
|
271
|
+
if isinstance(identity, dict):
|
|
272
|
+
# require type
|
|
273
|
+
id_type = identity.get("type")
|
|
274
|
+
if not id_type:
|
|
275
|
+
raise AirflowException(
|
|
276
|
+
"identity dict must include 'type' key with value 'UserAssigned' or 'SystemAssigned'"
|
|
277
|
+
)
|
|
278
|
+
|
|
279
|
+
# map common string type names to ResourceIdentityType enum values if available
|
|
280
|
+
type_map = {
|
|
281
|
+
"SystemAssigned": ResourceIdentityType.system_assigned,
|
|
282
|
+
"UserAssigned": ResourceIdentityType.user_assigned,
|
|
283
|
+
"SystemAssigned,UserAssigned": ResourceIdentityType.system_assigned_user_assigned,
|
|
284
|
+
"SystemAssigned, UserAssigned": ResourceIdentityType.system_assigned_user_assigned,
|
|
285
|
+
}
|
|
286
|
+
cg_type = type_map.get(id_type, id_type)
|
|
287
|
+
|
|
288
|
+
# build user_assigned_identities mapping if resource_ids provided
|
|
289
|
+
resource_ids = identity.get("resource_ids")
|
|
290
|
+
if resource_ids:
|
|
291
|
+
if not isinstance(resource_ids, (list, tuple)):
|
|
292
|
+
raise AirflowException("identity['resource_ids'] must be a list of resource id strings")
|
|
293
|
+
user_assigned_identities: dict[str, Any] = {rid: {} for rid in resource_ids}
|
|
294
|
+
else:
|
|
295
|
+
# accept a pre-built mapping if given
|
|
296
|
+
user_assigned_identities = identity.get("user_assigned_identities") or {}
|
|
297
|
+
|
|
298
|
+
return ContainerGroupIdentity(
|
|
299
|
+
type=cg_type,
|
|
300
|
+
user_assigned_identities=cast(
|
|
301
|
+
"dict[str, UserAssignedIdentities] | None", user_assigned_identities
|
|
302
|
+
),
|
|
303
|
+
)
|
|
304
|
+
return identity
|
|
305
|
+
|
|
232
306
|
def execute(self, context: Context) -> int:
|
|
233
307
|
# Check name again in case it was templated.
|
|
234
308
|
self._check_name(self.name)
|
|
@@ -304,6 +378,7 @@ class AzureContainerInstancesOperator(BaseOperator):
|
|
|
304
378
|
dns_config=self.dns_config,
|
|
305
379
|
diagnostics=self.diagnostics,
|
|
306
380
|
priority=self.priority,
|
|
381
|
+
identity=self.identity,
|
|
307
382
|
)
|
|
308
383
|
|
|
309
384
|
self._ci_hook.create_or_update(self.resource_group, self.name, container_group)
|
|
@@ -23,8 +23,13 @@ from functools import cached_property
|
|
|
23
23
|
from typing import TYPE_CHECKING, Any
|
|
24
24
|
|
|
25
25
|
from airflow.configuration import conf
|
|
26
|
-
from airflow.
|
|
27
|
-
|
|
26
|
+
from airflow.providers.common.compat.sdk import (
|
|
27
|
+
AirflowException,
|
|
28
|
+
BaseHook,
|
|
29
|
+
BaseOperator,
|
|
30
|
+
BaseOperatorLink,
|
|
31
|
+
XCom,
|
|
32
|
+
)
|
|
28
33
|
from airflow.providers.microsoft.azure.hooks.data_factory import (
|
|
29
34
|
AzureDataFactoryHook,
|
|
30
35
|
AzureDataFactoryPipelineRunException,
|
|
@@ -20,14 +20,13 @@ from __future__ import annotations
|
|
|
20
20
|
import warnings
|
|
21
21
|
from collections.abc import Callable, Sequence
|
|
22
22
|
from contextlib import suppress
|
|
23
|
-
from copy import deepcopy
|
|
24
23
|
from typing import (
|
|
25
24
|
TYPE_CHECKING,
|
|
26
25
|
Any,
|
|
27
26
|
)
|
|
28
27
|
|
|
29
|
-
from airflow.exceptions import
|
|
30
|
-
from airflow.providers.common.compat.sdk import XCOM_RETURN_KEY, BaseOperator
|
|
28
|
+
from airflow.exceptions import AirflowProviderDeprecationWarning
|
|
29
|
+
from airflow.providers.common.compat.sdk import XCOM_RETURN_KEY, AirflowException, BaseOperator, TaskDeferred
|
|
31
30
|
from airflow.providers.microsoft.azure.hooks.msgraph import KiotaRequestAdapterHook
|
|
32
31
|
from airflow.providers.microsoft.azure.triggers.msgraph import (
|
|
33
32
|
MSGraphTrigger,
|
|
@@ -247,7 +246,7 @@ class MSGraphAsyncOperator(BaseOperator):
|
|
|
247
246
|
@classmethod
|
|
248
247
|
def append_result(
|
|
249
248
|
cls,
|
|
250
|
-
results: Any,
|
|
249
|
+
results: list[Any],
|
|
251
250
|
result: Any,
|
|
252
251
|
append_result_as_list_if_absent: bool = False,
|
|
253
252
|
) -> list[Any]:
|
|
@@ -312,18 +311,12 @@ class MSGraphAsyncOperator(BaseOperator):
|
|
|
312
311
|
def paginate(
|
|
313
312
|
operator: MSGraphAsyncOperator, response: dict, **context
|
|
314
313
|
) -> tuple[Any, dict[str, Any] | None]:
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
if len(response.get("value", [])) == top and context:
|
|
322
|
-
results = operator.pull_xcom(context)
|
|
323
|
-
skip = sum([len(result["value"]) for result in results]) + top if results else top
|
|
324
|
-
query_parameters["$skip"] = skip
|
|
325
|
-
return operator.url, query_parameters
|
|
326
|
-
return response.get("@odata.nextLink"), operator.query_parameters
|
|
314
|
+
return KiotaRequestAdapterHook.default_pagination(
|
|
315
|
+
response=response,
|
|
316
|
+
url=operator.url,
|
|
317
|
+
query_parameters=operator.query_parameters,
|
|
318
|
+
responses=lambda: operator.pull_xcom(context),
|
|
319
|
+
)
|
|
327
320
|
|
|
328
321
|
def trigger_next_link(self, response, method_name: str, context: Context) -> None:
|
|
329
322
|
if isinstance(response, dict):
|
|
@@ -20,8 +20,7 @@ from __future__ import annotations
|
|
|
20
20
|
from collections.abc import Sequence
|
|
21
21
|
from typing import TYPE_CHECKING, Any
|
|
22
22
|
|
|
23
|
-
from airflow.
|
|
24
|
-
from airflow.providers.common.compat.sdk import BaseOperator, BaseOperatorLink
|
|
23
|
+
from airflow.providers.common.compat.sdk import AirflowException, BaseOperator, BaseOperatorLink
|
|
25
24
|
from airflow.providers.microsoft.azure.hooks.powerbi import PowerBIHook
|
|
26
25
|
from airflow.providers.microsoft.azure.triggers.powerbi import (
|
|
27
26
|
PowerBIDatasetListTrigger,
|
|
@@ -21,8 +21,13 @@ from functools import cached_property
|
|
|
21
21
|
from typing import TYPE_CHECKING, Any
|
|
22
22
|
from urllib.parse import urlencode
|
|
23
23
|
|
|
24
|
-
from airflow.
|
|
25
|
-
|
|
24
|
+
from airflow.providers.common.compat.sdk import (
|
|
25
|
+
AirflowException,
|
|
26
|
+
BaseHook,
|
|
27
|
+
BaseOperator,
|
|
28
|
+
BaseOperatorLink,
|
|
29
|
+
XCom,
|
|
30
|
+
)
|
|
26
31
|
from airflow.providers.microsoft.azure.hooks.synapse import (
|
|
27
32
|
AzureSynapseHook,
|
|
28
33
|
AzureSynapsePipelineHook,
|
|
@@ -22,8 +22,7 @@ from functools import cached_property
|
|
|
22
22
|
from typing import TYPE_CHECKING
|
|
23
23
|
|
|
24
24
|
from airflow.configuration import conf
|
|
25
|
-
from airflow.
|
|
26
|
-
from airflow.providers.common.compat.sdk import BaseSensorOperator
|
|
25
|
+
from airflow.providers.common.compat.sdk import AirflowException, BaseSensorOperator
|
|
27
26
|
from airflow.providers.microsoft.azure.hooks.data_factory import (
|
|
28
27
|
AzureDataFactoryHook,
|
|
29
28
|
AzureDataFactoryPipelineRunException,
|
|
@@ -20,8 +20,7 @@ from __future__ import annotations
|
|
|
20
20
|
from collections.abc import Callable, Sequence
|
|
21
21
|
from typing import TYPE_CHECKING, Any
|
|
22
22
|
|
|
23
|
-
from airflow.
|
|
24
|
-
from airflow.providers.common.compat.sdk import BaseSensorOperator
|
|
23
|
+
from airflow.providers.common.compat.sdk import AirflowException, BaseSensorOperator
|
|
25
24
|
from airflow.providers.common.compat.standard.triggers import TimeDeltaTrigger
|
|
26
25
|
from airflow.providers.microsoft.azure.hooks.msgraph import KiotaRequestAdapterHook
|
|
27
26
|
from airflow.providers.microsoft.azure.operators.msgraph import execute_callable
|
|
@@ -22,8 +22,7 @@ from datetime import timedelta
|
|
|
22
22
|
from typing import TYPE_CHECKING
|
|
23
23
|
|
|
24
24
|
from airflow.configuration import conf
|
|
25
|
-
from airflow.
|
|
26
|
-
from airflow.providers.common.compat.sdk import BaseSensorOperator
|
|
25
|
+
from airflow.providers.common.compat.sdk import AirflowException, BaseSensorOperator
|
|
27
26
|
from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
|
|
28
27
|
from airflow.providers.microsoft.azure.triggers.wasb import WasbBlobSensorTrigger, WasbPrefixSensorTrigger
|
|
29
28
|
|
|
@@ -19,8 +19,7 @@ from __future__ import annotations
|
|
|
19
19
|
from collections.abc import Sequence
|
|
20
20
|
from typing import TYPE_CHECKING, Any
|
|
21
21
|
|
|
22
|
-
from airflow.
|
|
23
|
-
from airflow.providers.common.compat.sdk import BaseOperator
|
|
22
|
+
from airflow.providers.common.compat.sdk import AirflowException, BaseOperator
|
|
24
23
|
from airflow.providers.microsoft.azure.hooks.data_lake import AzureDataLakeHook
|
|
25
24
|
|
|
26
25
|
if TYPE_CHECKING:
|
|
@@ -29,8 +29,7 @@ from typing import TYPE_CHECKING
|
|
|
29
29
|
if TYPE_CHECKING:
|
|
30
30
|
from airflow.utils.context import Context
|
|
31
31
|
|
|
32
|
-
from airflow.
|
|
33
|
-
from airflow.providers.common.compat.sdk import BaseOperator
|
|
32
|
+
from airflow.providers.common.compat.sdk import AirflowException, BaseOperator
|
|
34
33
|
from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
|
|
35
34
|
from airflow.providers.microsoft.azure.version_compat import AIRFLOW_V_3_0_PLUS
|
|
36
35
|
from airflow.providers.sftp.hooks.sftp import SFTPHook
|
|
@@ -0,0 +1,223 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
import asyncio
|
|
20
|
+
from abc import abstractmethod
|
|
21
|
+
from collections.abc import AsyncIterator
|
|
22
|
+
from typing import TYPE_CHECKING, Any
|
|
23
|
+
|
|
24
|
+
from asgiref.sync import sync_to_async
|
|
25
|
+
|
|
26
|
+
from airflow.providers.microsoft.azure.hooks.asb import MessageHook
|
|
27
|
+
|
|
28
|
+
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
|
|
29
|
+
|
|
30
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
31
|
+
from airflow.triggers.base import BaseEventTrigger, TriggerEvent
|
|
32
|
+
else:
|
|
33
|
+
from airflow.triggers.base import ( # type: ignore
|
|
34
|
+
BaseTrigger as BaseEventTrigger,
|
|
35
|
+
TriggerEvent,
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
if TYPE_CHECKING:
|
|
39
|
+
from azure.servicebus import ServiceBusReceivedMessage
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class BaseAzureServiceBusTrigger(BaseEventTrigger):
|
|
43
|
+
"""
|
|
44
|
+
Base trigger for Azure Service Bus message processing.
|
|
45
|
+
|
|
46
|
+
This trigger provides common functionality for listening to Azure Service Bus
|
|
47
|
+
queues and topics/subscriptions. It handles connection management and
|
|
48
|
+
async message processing.
|
|
49
|
+
|
|
50
|
+
:param poll_interval: Time interval between polling operations (seconds)
|
|
51
|
+
:param azure_service_bus_conn_id: Connection ID for Azure Service Bus
|
|
52
|
+
:param max_wait_time: Maximum time to wait for messages (seconds)
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
default_conn_name = "azure_service_bus_default"
|
|
56
|
+
default_max_wait_time = None
|
|
57
|
+
default_poll_interval = 60
|
|
58
|
+
|
|
59
|
+
def __init__(
|
|
60
|
+
self,
|
|
61
|
+
poll_interval: float | None = None,
|
|
62
|
+
azure_service_bus_conn_id: str | None = None,
|
|
63
|
+
max_wait_time: float | None = None,
|
|
64
|
+
) -> None:
|
|
65
|
+
self.connection_id = (
|
|
66
|
+
azure_service_bus_conn_id
|
|
67
|
+
if azure_service_bus_conn_id
|
|
68
|
+
else BaseAzureServiceBusTrigger.default_conn_name
|
|
69
|
+
)
|
|
70
|
+
self.max_wait_time = (
|
|
71
|
+
max_wait_time if max_wait_time else BaseAzureServiceBusTrigger.default_max_wait_time
|
|
72
|
+
)
|
|
73
|
+
self.poll_interval = (
|
|
74
|
+
poll_interval if poll_interval else BaseAzureServiceBusTrigger.default_poll_interval
|
|
75
|
+
)
|
|
76
|
+
self.message_hook = MessageHook(azure_service_bus_conn_id=self.connection_id)
|
|
77
|
+
|
|
78
|
+
@abstractmethod
|
|
79
|
+
def serialize(self) -> tuple[str, dict[str, Any]]:
|
|
80
|
+
"""Serialize the trigger instance."""
|
|
81
|
+
|
|
82
|
+
@abstractmethod
|
|
83
|
+
def run(self) -> AsyncIterator[TriggerEvent]:
|
|
84
|
+
"""Run the trigger logic."""
|
|
85
|
+
|
|
86
|
+
@classmethod
|
|
87
|
+
def _get_message_body(cls, message: ServiceBusReceivedMessage) -> str:
|
|
88
|
+
message_body = message.body
|
|
89
|
+
if isinstance(message_body, bytes):
|
|
90
|
+
return message_body.decode("utf-8")
|
|
91
|
+
try:
|
|
92
|
+
return "".join(chunk.decode("utf-8") for chunk in message_body)
|
|
93
|
+
except Exception:
|
|
94
|
+
raise TypeError(f"Expected bytes or an iterator of bytes, but got {type(message_body).__name__}")
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
class AzureServiceBusQueueTrigger(BaseAzureServiceBusTrigger):
|
|
98
|
+
"""
|
|
99
|
+
Trigger for Azure Service Bus Queue message processing.
|
|
100
|
+
|
|
101
|
+
This trigger monitors one or more Azure Service Bus queues for incoming messages.
|
|
102
|
+
When messages arrive, they are processed and yielded as trigger events that can
|
|
103
|
+
be consumed by downstream tasks.
|
|
104
|
+
|
|
105
|
+
Example:
|
|
106
|
+
>>> trigger = AzureServiceBusQueueTrigger(
|
|
107
|
+
... queues=["queue1", "queue2"],
|
|
108
|
+
... azure_service_bus_conn_id="my_asb_conn",
|
|
109
|
+
... poll_interval=30,
|
|
110
|
+
... )
|
|
111
|
+
|
|
112
|
+
:param queues: List of queue names to monitor
|
|
113
|
+
:param poll_interval: Time interval between polling operations (seconds)
|
|
114
|
+
:param azure_service_bus_conn_id: Connection ID for Azure Service Bus
|
|
115
|
+
:param max_wait_time: Maximum time to wait for messages (seconds)
|
|
116
|
+
"""
|
|
117
|
+
|
|
118
|
+
def __init__(
|
|
119
|
+
self,
|
|
120
|
+
queues: list[str],
|
|
121
|
+
poll_interval: float | None = None,
|
|
122
|
+
azure_service_bus_conn_id: str | None = None,
|
|
123
|
+
max_wait_time: float | None = None,
|
|
124
|
+
) -> None:
|
|
125
|
+
super().__init__(poll_interval, azure_service_bus_conn_id, max_wait_time)
|
|
126
|
+
self.queues = queues
|
|
127
|
+
|
|
128
|
+
def serialize(self) -> tuple[str, dict[str, Any]]:
|
|
129
|
+
return (
|
|
130
|
+
self.__class__.__module__ + "." + self.__class__.__qualname__,
|
|
131
|
+
{
|
|
132
|
+
"azure_service_bus_conn_id": self.connection_id,
|
|
133
|
+
"queues": self.queues,
|
|
134
|
+
"poll_interval": self.poll_interval,
|
|
135
|
+
"max_wait_time": self.max_wait_time,
|
|
136
|
+
},
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
async def run(self) -> AsyncIterator[TriggerEvent]:
|
|
140
|
+
read_queue_message_async = sync_to_async(self.message_hook.read_message)
|
|
141
|
+
|
|
142
|
+
while True:
|
|
143
|
+
for queue_name in self.queues:
|
|
144
|
+
message = await read_queue_message_async(
|
|
145
|
+
queue_name=queue_name, max_wait_time=self.max_wait_time
|
|
146
|
+
)
|
|
147
|
+
if message:
|
|
148
|
+
yield TriggerEvent(
|
|
149
|
+
{
|
|
150
|
+
"message": BaseAzureServiceBusTrigger._get_message_body(message),
|
|
151
|
+
"queue": queue_name,
|
|
152
|
+
}
|
|
153
|
+
)
|
|
154
|
+
break
|
|
155
|
+
await asyncio.sleep(self.poll_interval)
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
class AzureServiceBusSubscriptionTrigger(BaseAzureServiceBusTrigger):
|
|
159
|
+
"""
|
|
160
|
+
Trigger for Azure Service Bus Topic Subscription message processing.
|
|
161
|
+
|
|
162
|
+
This trigger monitors topic subscriptions for incoming messages. It can handle
|
|
163
|
+
multiple topics with a single subscription name, processing messages as they
|
|
164
|
+
arrive and yielding them as trigger events.
|
|
165
|
+
|
|
166
|
+
Example:
|
|
167
|
+
>>> trigger = AzureServiceBusSubscriptionTrigger(
|
|
168
|
+
... topics=["topic1", "topic2"],
|
|
169
|
+
... subscription_name="my-subscription",
|
|
170
|
+
... azure_service_bus_conn_id="my_asb_conn",
|
|
171
|
+
... )
|
|
172
|
+
|
|
173
|
+
:param topics: List of topic names to monitor
|
|
174
|
+
:param subscription_name: Name of the subscription to use
|
|
175
|
+
:param poll_interval: Time interval between polling operations (seconds)
|
|
176
|
+
:param azure_service_bus_conn_id: Connection ID for Azure Service Bus
|
|
177
|
+
:param max_wait_time: Maximum time to wait for messages (seconds)
|
|
178
|
+
"""
|
|
179
|
+
|
|
180
|
+
def __init__(
|
|
181
|
+
self,
|
|
182
|
+
topics: list[str],
|
|
183
|
+
subscription_name: str,
|
|
184
|
+
poll_interval: float | None = None,
|
|
185
|
+
azure_service_bus_conn_id: str | None = None,
|
|
186
|
+
max_wait_time: float | None = None,
|
|
187
|
+
) -> None:
|
|
188
|
+
super().__init__(poll_interval, azure_service_bus_conn_id, max_wait_time)
|
|
189
|
+
self.topics = topics
|
|
190
|
+
self.subscription_name = subscription_name
|
|
191
|
+
|
|
192
|
+
def serialize(self) -> tuple[str, dict[str, Any]]:
|
|
193
|
+
return (
|
|
194
|
+
self.__class__.__module__ + "." + self.__class__.__qualname__,
|
|
195
|
+
{
|
|
196
|
+
"azure_service_bus_conn_id": self.connection_id,
|
|
197
|
+
"topics": self.topics,
|
|
198
|
+
"subscription_name": self.subscription_name,
|
|
199
|
+
"poll_interval": self.poll_interval,
|
|
200
|
+
"max_wait_time": self.max_wait_time,
|
|
201
|
+
},
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
async def run(self) -> AsyncIterator[TriggerEvent]:
|
|
205
|
+
read_subscription_message_async = sync_to_async(self.message_hook.read_subscription_message)
|
|
206
|
+
|
|
207
|
+
while True:
|
|
208
|
+
for topic_name in self.topics:
|
|
209
|
+
message = await read_subscription_message_async(
|
|
210
|
+
topic_name=topic_name,
|
|
211
|
+
subscription_name=self.subscription_name,
|
|
212
|
+
max_wait_time=self.max_wait_time,
|
|
213
|
+
)
|
|
214
|
+
if message:
|
|
215
|
+
yield TriggerEvent(
|
|
216
|
+
{
|
|
217
|
+
"message": BaseAzureServiceBusTrigger._get_message_body(message),
|
|
218
|
+
"topic": topic_name,
|
|
219
|
+
"subscription": self.subscription_name,
|
|
220
|
+
}
|
|
221
|
+
)
|
|
222
|
+
break
|
|
223
|
+
await asyncio.sleep(self.poll_interval)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-microsoft-azure
|
|
3
|
-
Version: 12.
|
|
3
|
+
Version: 12.10.0
|
|
4
4
|
Summary: Provider package apache-airflow-providers-microsoft-azure for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,microsoft.azure,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -22,8 +22,8 @@ Classifier: Programming Language :: Python :: 3.13
|
|
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
|
23
23
|
License-File: LICENSE
|
|
24
24
|
License-File: NOTICE
|
|
25
|
-
Requires-Dist: apache-airflow>=2.
|
|
26
|
-
Requires-Dist: apache-airflow-providers-common-compat>=1.
|
|
25
|
+
Requires-Dist: apache-airflow>=2.11.0
|
|
26
|
+
Requires-Dist: apache-airflow-providers-common-compat>=1.10.1
|
|
27
27
|
Requires-Dist: adlfs>=2023.10.0
|
|
28
28
|
Requires-Dist: azure-batch>=8.0.0
|
|
29
29
|
Requires-Dist: azure-cosmos>=4.6.0
|
|
@@ -57,8 +57,8 @@ Requires-Dist: apache-airflow-providers-common-compat ; extra == "common-compat"
|
|
|
57
57
|
Requires-Dist: apache-airflow-providers-oracle ; extra == "oracle"
|
|
58
58
|
Requires-Dist: apache-airflow-providers-sftp ; extra == "sftp"
|
|
59
59
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
60
|
-
Project-URL: Changelog, https://airflow.
|
|
61
|
-
Project-URL: Documentation, https://airflow.
|
|
60
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.10.0/changelog.html
|
|
61
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.10.0
|
|
62
62
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
63
63
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
64
64
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
@@ -93,7 +93,7 @@ Provides-Extra: sftp
|
|
|
93
93
|
|
|
94
94
|
Package ``apache-airflow-providers-microsoft-azure``
|
|
95
95
|
|
|
96
|
-
Release: ``12.
|
|
96
|
+
Release: ``12.10.0``
|
|
97
97
|
|
|
98
98
|
|
|
99
99
|
`Microsoft Azure <https://azure.microsoft.com/>`__
|
|
@@ -106,7 +106,7 @@ This is a provider package for ``microsoft.azure`` provider. All classes for thi
|
|
|
106
106
|
are in ``airflow.providers.microsoft.azure`` python package.
|
|
107
107
|
|
|
108
108
|
You can find package information and changelog for the provider
|
|
109
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.
|
|
109
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.10.0/>`_.
|
|
110
110
|
|
|
111
111
|
Installation
|
|
112
112
|
------------
|
|
@@ -123,8 +123,8 @@ Requirements
|
|
|
123
123
|
========================================== ===========================
|
|
124
124
|
PIP package Version required
|
|
125
125
|
========================================== ===========================
|
|
126
|
-
``apache-airflow`` ``>=2.
|
|
127
|
-
``apache-airflow-providers-common-compat`` ``>=1.
|
|
126
|
+
``apache-airflow`` ``>=2.11.0``
|
|
127
|
+
``apache-airflow-providers-common-compat`` ``>=1.10.1``
|
|
128
128
|
``adlfs`` ``>=2023.10.0``
|
|
129
129
|
``azure-batch`` ``>=8.0.0``
|
|
130
130
|
``azure-cosmos`` ``>=4.6.0``
|
|
@@ -190,5 +190,5 @@ Extra Dependencies
|
|
|
190
190
|
================= ==========================================
|
|
191
191
|
|
|
192
192
|
The changelog for the provider package can be found in the
|
|
193
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.
|
|
193
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.10.0/changelog.html>`_.
|
|
194
194
|
|
|
@@ -1,61 +1,62 @@
|
|
|
1
|
-
airflow/providers/microsoft/azure/__init__.py,sha256=
|
|
2
|
-
airflow/providers/microsoft/azure/get_provider_info.py,sha256=
|
|
1
|
+
airflow/providers/microsoft/azure/__init__.py,sha256=JkV2NMHj7mJBzhARBL7n8ppjH5ibzcEZ4KbKAMIG3Ww,1506
|
|
2
|
+
airflow/providers/microsoft/azure/get_provider_info.py,sha256=dqA6iXe7BjeB42zxc_j1vHpOdqIjhMIFn24sr-ImgLs,19614
|
|
3
3
|
airflow/providers/microsoft/azure/utils.py,sha256=KU9vHQRUhqTbC30GvmuZbL8rPBAziemM1oaT4rZp6K8,9015
|
|
4
4
|
airflow/providers/microsoft/azure/version_compat.py,sha256=JRhqYf4UklO3xwmtCRPSXLQqq0yD1pCts3bhwxgVC0s,1670
|
|
5
5
|
airflow/providers/microsoft/azure/fs/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
6
6
|
airflow/providers/microsoft/azure/fs/adls.py,sha256=-lxaL4xYduRL8hQ27ERTCWugI29WiWwLvSTdxfrhzB8,3924
|
|
7
7
|
airflow/providers/microsoft/azure/fs/msgraph.py,sha256=6bjBjo1J04mksMe4-s1OCSt6hJ6eW4uaUcm0RnDeGv4,3779
|
|
8
8
|
airflow/providers/microsoft/azure/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
9
|
-
airflow/providers/microsoft/azure/hooks/adx.py,sha256=
|
|
10
|
-
airflow/providers/microsoft/azure/hooks/asb.py,sha256=
|
|
11
|
-
airflow/providers/microsoft/azure/hooks/base_azure.py,sha256=
|
|
12
|
-
airflow/providers/microsoft/azure/hooks/batch.py,sha256=
|
|
13
|
-
airflow/providers/microsoft/azure/hooks/container_instance.py,sha256=
|
|
9
|
+
airflow/providers/microsoft/azure/hooks/adx.py,sha256=NADc_tBMIYM4t6RF7ecchlmX1a6XVeN8uEMBQ3XoGAo,9634
|
|
10
|
+
airflow/providers/microsoft/azure/hooks/asb.py,sha256=7Dd0BLy5HqYoyVuNo4bxc6XGLmdC-0m95gDqc82K7sI,32806
|
|
11
|
+
airflow/providers/microsoft/azure/hooks/base_azure.py,sha256=fmwJrnTet0RcX7ZPyYa55eBonCI3srVcQ398dZ-rk00,8722
|
|
12
|
+
airflow/providers/microsoft/azure/hooks/batch.py,sha256=iaSwX40ANbqNZDq1qIFQ3ceUcNVpnNj4Lm6wVOLrc_w,16035
|
|
13
|
+
airflow/providers/microsoft/azure/hooks/container_instance.py,sha256=ncz7F7XcJe5ENLtmktvk4Z01t8C0u8lCmvZAW1iFBEE,7218
|
|
14
14
|
airflow/providers/microsoft/azure/hooks/container_registry.py,sha256=8tPVX_fsiIV3Y2dTSry1pkXyWhpu1OrEwaFRuNCZI5Q,4926
|
|
15
15
|
airflow/providers/microsoft/azure/hooks/container_volume.py,sha256=FIe3V0Y9z8yD-vbvzStY8w2QRenrYifh3msyhl3R8FU,5794
|
|
16
|
-
airflow/providers/microsoft/azure/hooks/cosmos.py,sha256=
|
|
17
|
-
airflow/providers/microsoft/azure/hooks/data_factory.py,sha256=
|
|
18
|
-
airflow/providers/microsoft/azure/hooks/data_lake.py,sha256=
|
|
16
|
+
airflow/providers/microsoft/azure/hooks/cosmos.py,sha256=cb84MBKJ7uA96_KFXvteAIjPaBerwsYyVXOdjlJ2BA4,17486
|
|
17
|
+
airflow/providers/microsoft/azure/hooks/data_factory.py,sha256=pYAhRFMcgiuCRynMon3NJNdQlBbPGhlxHsjIXZRVWxI,44899
|
|
18
|
+
airflow/providers/microsoft/azure/hooks/data_lake.py,sha256=CjkshIUiVTMnMpGEUbuKa-AdJghmq8RXoX4ukTnh3-I,23845
|
|
19
19
|
airflow/providers/microsoft/azure/hooks/fileshare.py,sha256=NL-ZqhsENEQar66S-QaVAytqs7ZEImzf3YDUMnKzJZ0,10777
|
|
20
|
-
airflow/providers/microsoft/azure/hooks/msgraph.py,sha256=
|
|
21
|
-
airflow/providers/microsoft/azure/hooks/powerbi.py,sha256=
|
|
22
|
-
airflow/providers/microsoft/azure/hooks/synapse.py,sha256=
|
|
23
|
-
airflow/providers/microsoft/azure/hooks/wasb.py,sha256=
|
|
20
|
+
airflow/providers/microsoft/azure/hooks/msgraph.py,sha256=cN1orxYofDzSwKY27ejeXm2dSAi4uX9zCurjs24JkgY,27130
|
|
21
|
+
airflow/providers/microsoft/azure/hooks/powerbi.py,sha256=2kBqwenmnzz8h05ZCqOZh0sztQIcz78S6jetN4-VnJ8,9795
|
|
22
|
+
airflow/providers/microsoft/azure/hooks/synapse.py,sha256=91HdO528UHDrZ2GqNZockeOjqbdRl_uL22NoDBWV7NA,16034
|
|
23
|
+
airflow/providers/microsoft/azure/hooks/wasb.py,sha256=at2RFKV6RpY6Qn5E2snkXXN3FSXM6CGdzMDT1oScGLI,32839
|
|
24
24
|
airflow/providers/microsoft/azure/log/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
25
25
|
airflow/providers/microsoft/azure/log/wasb_task_handler.py,sha256=Kgh-_GJ15GHlcCdPZdCAshKe92cqgHFvMpOae_RVaHs,10173
|
|
26
26
|
airflow/providers/microsoft/azure/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
27
27
|
airflow/providers/microsoft/azure/operators/adls.py,sha256=4nryg1p7bMi2B41YrmDhHE873_miu1kIf0wd1a3bu2k,5837
|
|
28
28
|
airflow/providers/microsoft/azure/operators/adx.py,sha256=FFWvsXdtvzSy4JW3mTJv94Ia87Jkm10gCjoIq8_NttM,3176
|
|
29
29
|
airflow/providers/microsoft/azure/operators/asb.py,sha256=xvmXtd42ktycmSz7Hx-V5wKl9Ca8msaZ68HWtWzKM4Q,30498
|
|
30
|
-
airflow/providers/microsoft/azure/operators/batch.py,sha256=
|
|
31
|
-
airflow/providers/microsoft/azure/operators/container_instances.py,sha256=
|
|
30
|
+
airflow/providers/microsoft/azure/operators/batch.py,sha256=s3f_26P8H4FyCip4WCkYGebi0seoe0_MtmByFGjKrX8,16261
|
|
31
|
+
airflow/providers/microsoft/azure/operators/container_instances.py,sha256=TLHSwjwUqSUjclRAIdx26CeHPOnE1sSAlSh2OM2V3vk,22044
|
|
32
32
|
airflow/providers/microsoft/azure/operators/cosmos.py,sha256=paMVck5RYiVQFd0N59HJWDS2nRQUuhK2rfNXBl8heQw,2835
|
|
33
|
-
airflow/providers/microsoft/azure/operators/data_factory.py,sha256=
|
|
34
|
-
airflow/providers/microsoft/azure/operators/msgraph.py,sha256=
|
|
35
|
-
airflow/providers/microsoft/azure/operators/powerbi.py,sha256=
|
|
36
|
-
airflow/providers/microsoft/azure/operators/synapse.py,sha256=
|
|
33
|
+
airflow/providers/microsoft/azure/operators/data_factory.py,sha256=0k2ZOu8ILlCUVt_dk0Qk9A_2viK4AII8gZV4lL6fjwE,12454
|
|
34
|
+
airflow/providers/microsoft/azure/operators/msgraph.py,sha256=VRu6kA9jZcrBiB0IVcP37g7nmF3lg4tZ-lstkajal3M,13741
|
|
35
|
+
airflow/providers/microsoft/azure/operators/powerbi.py,sha256=Zq299v0L0ZPHNwcL6Us_EI_e4Zn_dEpBFFQIgo89Fg4,11591
|
|
36
|
+
airflow/providers/microsoft/azure/operators/synapse.py,sha256=ioaDwiu1a4d-KTMukfLaSGRjgO84O-jNsd1xBUwnyP0,12404
|
|
37
37
|
airflow/providers/microsoft/azure/operators/wasb_delete_blob.py,sha256=mnwK_lFeuTp2SP2RExc0fiCzXAA9WXnGDBsmoKwNoH8,2735
|
|
38
38
|
airflow/providers/microsoft/azure/secrets/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
39
39
|
airflow/providers/microsoft/azure/secrets/key_vault.py,sha256=zbgjqiQo-cWu1N6QD7gFXRGSbmHw4xgAN-TiCovKYk4,9051
|
|
40
40
|
airflow/providers/microsoft/azure/sensors/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
41
41
|
airflow/providers/microsoft/azure/sensors/cosmos.py,sha256=cd7bNdjtNrtEKFZpglnrjfwzU_iLBuc7IaXtFzRdFTo,2679
|
|
42
|
-
airflow/providers/microsoft/azure/sensors/data_factory.py,sha256=
|
|
43
|
-
airflow/providers/microsoft/azure/sensors/msgraph.py,sha256=
|
|
44
|
-
airflow/providers/microsoft/azure/sensors/wasb.py,sha256=
|
|
42
|
+
airflow/providers/microsoft/azure/sensors/data_factory.py,sha256=eDRaJx2Kl6BWIGP37LNej9MfcwAahu4dE68cB_sct08,4999
|
|
43
|
+
airflow/providers/microsoft/azure/sensors/msgraph.py,sha256=adZBWXhGj_MpbV25adoBW0e-vd8ZMJYtKMw9quvepHA,7716
|
|
44
|
+
airflow/providers/microsoft/azure/sensors/wasb.py,sha256=h0me65vuxSppKHoKP4_t93mf-CiawMDjzmfS0cy7C1s,7367
|
|
45
45
|
airflow/providers/microsoft/azure/transfers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
46
|
-
airflow/providers/microsoft/azure/transfers/local_to_adls.py,sha256=
|
|
46
|
+
airflow/providers/microsoft/azure/transfers/local_to_adls.py,sha256=frMrPQ2YIbi5q7lkX-91_HG2YNIDEmvc-8Oe_30UaC0,4194
|
|
47
47
|
airflow/providers/microsoft/azure/transfers/local_to_wasb.py,sha256=LTagjkM3mx5-Grc73--VONw9fOuga71TT9w1DHUopGk,2984
|
|
48
48
|
airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py,sha256=pjvHXwdZA7DwWNae7tAj_FSbvhYg1JYjSr4AZTRcEw4,4537
|
|
49
49
|
airflow/providers/microsoft/azure/transfers/s3_to_wasb.py,sha256=PHGAGtQLTfKkXfkxvKRoa7CPZZaPP_z_p4Cvhzmcdpw,12510
|
|
50
|
-
airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py,sha256=
|
|
50
|
+
airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py,sha256=uJ7c81GMfuc2nA45zufNbRN9pGebbulgHuqSvB4IFIc,8400
|
|
51
51
|
airflow/providers/microsoft/azure/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
52
52
|
airflow/providers/microsoft/azure/triggers/data_factory.py,sha256=aj4PWk-hSBe1-EWBzkIH3WvC5vve9lci1OgKYjKQ3FU,11149
|
|
53
|
+
airflow/providers/microsoft/azure/triggers/message_bus.py,sha256=jNoswC4R4LrQ2I1hN5JnYSbJvmv9j0rmrenu8wfw-OE,8485
|
|
53
54
|
airflow/providers/microsoft/azure/triggers/msgraph.py,sha256=8Ke7jzmvld8uzBN1sxgs4xjzMXvxPYXWtLemyZ8lP9U,8542
|
|
54
55
|
airflow/providers/microsoft/azure/triggers/powerbi.py,sha256=z9LJ-PfgWstt7c2eIJX5LOGUicfdMv2EELhCsrCJ6Nk,16371
|
|
55
56
|
airflow/providers/microsoft/azure/triggers/wasb.py,sha256=RF-C6iqDEs6_pWireCWZXqxcqWK-sFJ695Okdd_EJOA,7456
|
|
56
|
-
apache_airflow_providers_microsoft_azure-12.
|
|
57
|
-
apache_airflow_providers_microsoft_azure-12.
|
|
58
|
-
apache_airflow_providers_microsoft_azure-12.
|
|
59
|
-
apache_airflow_providers_microsoft_azure-12.
|
|
60
|
-
apache_airflow_providers_microsoft_azure-12.
|
|
61
|
-
apache_airflow_providers_microsoft_azure-12.
|
|
57
|
+
apache_airflow_providers_microsoft_azure-12.10.0.dist-info/entry_points.txt,sha256=6iWHenOoUC3YZBb3OKn6g0HlJsV58Ba56i8USmQrcJI,111
|
|
58
|
+
apache_airflow_providers_microsoft_azure-12.10.0.dist-info/licenses/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
|
59
|
+
apache_airflow_providers_microsoft_azure-12.10.0.dist-info/licenses/NOTICE,sha256=E3-_E02gwwSEFzeeWPKmnIjOoos3hW28CLISV6sYrbQ,168
|
|
60
|
+
apache_airflow_providers_microsoft_azure-12.10.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
|
61
|
+
apache_airflow_providers_microsoft_azure-12.10.0.dist-info/METADATA,sha256=fCUql10zUpc_BlZNRc4-85bqwAJE1bOhyR1s4cXOiSU,9381
|
|
62
|
+
apache_airflow_providers_microsoft_azure-12.10.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|