apache-airflow-providers-microsoft-azure 12.2.2rc1__py3-none-any.whl → 12.3.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/microsoft/azure/__init__.py +1 -1
- airflow/providers/microsoft/azure/get_provider_info.py +8 -7
- airflow/providers/microsoft/azure/hooks/asb.py +35 -16
- airflow/providers/microsoft/azure/hooks/container_instance.py +2 -2
- airflow/providers/microsoft/azure/hooks/data_factory.py +2 -2
- airflow/providers/microsoft/azure/hooks/data_lake.py +1 -2
- airflow/providers/microsoft/azure/hooks/msgraph.py +34 -20
- airflow/providers/microsoft/azure/hooks/powerbi.py +1 -0
- airflow/providers/microsoft/azure/hooks/wasb.py +1 -2
- airflow/providers/microsoft/azure/log/wasb_task_handler.py +109 -68
- airflow/providers/microsoft/azure/operators/asb.py +15 -1
- airflow/providers/microsoft/azure/operators/container_instances.py +7 -1
- airflow/providers/microsoft/azure/operators/msgraph.py +2 -4
- airflow/providers/microsoft/azure/sensors/msgraph.py +2 -4
- airflow/providers/microsoft/azure/triggers/msgraph.py +2 -4
- {apache_airflow_providers_microsoft_azure-12.2.2rc1.dist-info → apache_airflow_providers_microsoft_azure-12.3.0rc1.dist-info}/METADATA +16 -16
- {apache_airflow_providers_microsoft_azure-12.2.2rc1.dist-info → apache_airflow_providers_microsoft_azure-12.3.0rc1.dist-info}/RECORD +19 -19
- {apache_airflow_providers_microsoft_azure-12.2.2rc1.dist-info → apache_airflow_providers_microsoft_azure-12.3.0rc1.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_microsoft_azure-12.2.2rc1.dist-info → apache_airflow_providers_microsoft_azure-12.3.0rc1.dist-info}/entry_points.txt +0 -0
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
29
29
|
|
30
30
|
__all__ = ["__version__"]
|
31
31
|
|
32
|
-
__version__ = "12.
|
32
|
+
__version__ = "12.3.0"
|
33
33
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
35
35
|
"2.9.0"
|
@@ -27,8 +27,9 @@ def get_provider_info():
|
|
27
27
|
"name": "Microsoft Azure",
|
28
28
|
"description": "`Microsoft Azure <https://azure.microsoft.com/>`__\n",
|
29
29
|
"state": "ready",
|
30
|
-
"source-date-epoch":
|
30
|
+
"source-date-epoch": 1743836331,
|
31
31
|
"versions": [
|
32
|
+
"12.3.0",
|
32
33
|
"12.2.2",
|
33
34
|
"12.2.1",
|
34
35
|
"12.2.0",
|
@@ -482,11 +483,11 @@ def get_provider_info():
|
|
482
483
|
"azure-mgmt-datafactory>=2.0.0",
|
483
484
|
"azure-mgmt-containerregistry>=8.0.0",
|
484
485
|
"azure-mgmt-containerinstance>=10.1.0",
|
485
|
-
"msgraph-core>=1.
|
486
|
-
"microsoft-kiota-http>=1.
|
487
|
-
"microsoft-kiota-serialization-json
|
488
|
-
"microsoft-kiota-serialization-text
|
489
|
-
"microsoft-kiota-abstractions
|
486
|
+
"msgraph-core>=1.3.3",
|
487
|
+
"microsoft-kiota-http>=1.8.0,<2.0.0",
|
488
|
+
"microsoft-kiota-serialization-json>=1.8.0",
|
489
|
+
"microsoft-kiota-serialization-text>=1.8.0",
|
490
|
+
"microsoft-kiota-abstractions>=1.8.0,<2.0.0",
|
490
491
|
"msal-extensions>=1.1.0",
|
491
492
|
],
|
492
493
|
"optional-dependencies": {
|
@@ -495,5 +496,5 @@ def get_provider_info():
|
|
495
496
|
"oracle": ["apache-airflow-providers-oracle"],
|
496
497
|
"sftp": ["apache-airflow-providers-sftp"],
|
497
498
|
},
|
498
|
-
"devel-dependencies": ["pywinrm>=0.5.0"],
|
499
|
+
"devel-dependencies": ["pywinrm>=0.5.0", "moto>=5.1.2"],
|
499
500
|
}
|
@@ -17,7 +17,7 @@
|
|
17
17
|
from __future__ import annotations
|
18
18
|
|
19
19
|
from typing import TYPE_CHECKING, Any, Callable
|
20
|
-
from uuid import uuid4
|
20
|
+
from uuid import UUID, uuid4
|
21
21
|
|
22
22
|
from azure.core.exceptions import ResourceNotFoundError
|
23
23
|
from azure.servicebus import (
|
@@ -468,7 +468,15 @@ class MessageHook(BaseAzureServiceBusHook):
|
|
468
468
|
self.log.info("Create and returns ServiceBusClient")
|
469
469
|
return client
|
470
470
|
|
471
|
-
def send_message(
|
471
|
+
def send_message(
|
472
|
+
self,
|
473
|
+
queue_name: str,
|
474
|
+
messages: str | list[str],
|
475
|
+
batch_message_flag: bool = False,
|
476
|
+
message_id: str | None = None,
|
477
|
+
reply_to: str | None = None,
|
478
|
+
message_headers: dict[str | bytes, int | float | bytes | bool | str | UUID] | None = None,
|
479
|
+
):
|
472
480
|
"""
|
473
481
|
Use ServiceBusClient Send to send message(s) to a Service Bus Queue.
|
474
482
|
|
@@ -478,38 +486,49 @@ class MessageHook(BaseAzureServiceBusHook):
|
|
478
486
|
:param messages: Message which needs to be sent to the queue. It can be string or list of string.
|
479
487
|
:param batch_message_flag: bool flag, can be set to True if message needs to be
|
480
488
|
sent as batch message.
|
489
|
+
:param message_id: Message ID to set on message being sent to the queue. Please note, message_id may only be
|
490
|
+
set when a single message is sent.
|
491
|
+
:param reply_to: Reply to which needs to be sent to the queue.
|
492
|
+
:param message_headers: Headers to add to the message's application_properties field for Azure Service Bus.
|
481
493
|
"""
|
482
494
|
if queue_name is None:
|
483
495
|
raise TypeError("Queue name cannot be None.")
|
484
496
|
if not messages:
|
485
497
|
raise ValueError("Messages list cannot be empty.")
|
498
|
+
if message_id and isinstance(messages, list) and len(messages) != 1:
|
499
|
+
raise TypeError("Message ID can only be set if a single message is sent.")
|
486
500
|
with (
|
487
501
|
self.get_conn() as service_bus_client,
|
488
502
|
service_bus_client.get_queue_sender(queue_name=queue_name) as sender,
|
489
503
|
sender,
|
490
504
|
):
|
491
|
-
|
492
|
-
|
493
|
-
|
494
|
-
|
495
|
-
|
496
|
-
|
505
|
+
message_creator = lambda msg_body: ServiceBusMessage(
|
506
|
+
msg_body, message_id=message_id, reply_to=reply_to, application_properties=message_headers
|
507
|
+
)
|
508
|
+
message_list = [messages] if isinstance(messages, str) else messages
|
509
|
+
if not batch_message_flag:
|
510
|
+
self.send_list_messages(sender, message_list, message_creator)
|
497
511
|
else:
|
498
|
-
|
499
|
-
self.send_list_messages(sender, messages)
|
500
|
-
else:
|
501
|
-
self.send_batch_message(sender, messages)
|
512
|
+
self.send_batch_message(sender, message_list, message_creator)
|
502
513
|
|
503
514
|
@staticmethod
|
504
|
-
def send_list_messages(
|
505
|
-
|
515
|
+
def send_list_messages(
|
516
|
+
sender: ServiceBusSender,
|
517
|
+
messages: list[str],
|
518
|
+
message_creator: Callable[[str], ServiceBusMessage],
|
519
|
+
):
|
520
|
+
list_messages = [message_creator(body) for body in messages]
|
506
521
|
sender.send_messages(list_messages) # type: ignore[arg-type]
|
507
522
|
|
508
523
|
@staticmethod
|
509
|
-
def send_batch_message(
|
524
|
+
def send_batch_message(
|
525
|
+
sender: ServiceBusSender,
|
526
|
+
messages: list[str],
|
527
|
+
message_creator: Callable[[str], ServiceBusMessage],
|
528
|
+
):
|
510
529
|
batch_message = sender.create_message_batch()
|
511
530
|
for message in messages:
|
512
|
-
batch_message.add_message(
|
531
|
+
batch_message.add_message(message_creator(message))
|
513
532
|
sender.send_messages(batch_message)
|
514
533
|
|
515
534
|
def receive_message(
|
@@ -85,7 +85,7 @@ class AzureContainerInstanceHook(AzureBaseHook):
|
|
85
85
|
if all([conn.login, conn.password, tenant]):
|
86
86
|
self.log.info("Getting connection using specific credentials and subscription_id.")
|
87
87
|
credential = ClientSecretCredential(
|
88
|
-
client_id=conn.login, client_secret=conn.password, tenant_id=cast(str, tenant)
|
88
|
+
client_id=conn.login, client_secret=conn.password, tenant_id=cast("str", tenant)
|
89
89
|
)
|
90
90
|
else:
|
91
91
|
self.log.info("Using DefaultAzureCredential as credential")
|
@@ -96,7 +96,7 @@ class AzureContainerInstanceHook(AzureBaseHook):
|
|
96
96
|
workload_identity_tenant_id=workload_identity_tenant_id,
|
97
97
|
)
|
98
98
|
|
99
|
-
subscription_id = cast(str, conn.extra_dejson.get("subscriptionId"))
|
99
|
+
subscription_id = cast("str", conn.extra_dejson.get("subscriptionId"))
|
100
100
|
return ContainerInstanceManagementClient(
|
101
101
|
credential=credential,
|
102
102
|
subscription_id=subscription_id,
|
@@ -1104,7 +1104,7 @@ def provide_targeted_factory_async(func: T) -> T:
|
|
1104
1104
|
|
1105
1105
|
return await func(*bound_args.args, **bound_args.kwargs)
|
1106
1106
|
|
1107
|
-
return cast(T, wrapper)
|
1107
|
+
return cast("T", wrapper)
|
1108
1108
|
|
1109
1109
|
|
1110
1110
|
class AzureDataFactoryAsyncHook(AzureDataFactoryHook):
|
@@ -1193,7 +1193,7 @@ class AzureDataFactoryAsyncHook(AzureDataFactoryHook):
|
|
1193
1193
|
:param factory_name: The factory name.
|
1194
1194
|
"""
|
1195
1195
|
pipeline_run = await self.get_pipeline_run(run_id, resource_group_name, factory_name)
|
1196
|
-
status: str = cast(str, pipeline_run.status)
|
1196
|
+
status: str = cast("str", pipeline_run.status)
|
1197
1197
|
return status
|
1198
1198
|
|
1199
1199
|
@provide_targeted_factory_async
|
@@ -328,8 +328,7 @@ class AzureDataLakeStorageV2Hook(BaseHook):
|
|
328
328
|
prefix = "extra__adls__"
|
329
329
|
if field_name.startswith("extra__"):
|
330
330
|
raise ValueError(
|
331
|
-
f"Got prefixed name {field_name}; please remove the '{prefix}' prefix "
|
332
|
-
f"when using this method."
|
331
|
+
f"Got prefixed name {field_name}; please remove the '{prefix}' prefix when using this method."
|
333
332
|
)
|
334
333
|
if field_name in extra_dict:
|
335
334
|
return extra_dict[field_name] or None
|
@@ -23,12 +23,12 @@ from contextlib import suppress
|
|
23
23
|
from http import HTTPStatus
|
24
24
|
from io import BytesIO
|
25
25
|
from json import JSONDecodeError
|
26
|
-
from typing import TYPE_CHECKING, Any
|
26
|
+
from typing import TYPE_CHECKING, Any, cast
|
27
27
|
from urllib.parse import quote, urljoin, urlparse
|
28
28
|
|
29
29
|
import httpx
|
30
30
|
from azure.identity import CertificateCredential, ClientSecretCredential
|
31
|
-
from httpx import AsyncHTTPTransport, Timeout
|
31
|
+
from httpx import AsyncHTTPTransport, Response, Timeout
|
32
32
|
from kiota_abstractions.api_error import APIError
|
33
33
|
from kiota_abstractions.method import Method
|
34
34
|
from kiota_abstractions.request_information import RequestInformation
|
@@ -55,10 +55,8 @@ from airflow.hooks.base import BaseHook
|
|
55
55
|
if TYPE_CHECKING:
|
56
56
|
from azure.identity._internal.client_credential_base import ClientCredentialBase
|
57
57
|
from kiota_abstractions.request_adapter import RequestAdapter
|
58
|
-
from kiota_abstractions.request_information import QueryParams
|
59
58
|
from kiota_abstractions.response_handler import NativeResponseType
|
60
59
|
from kiota_abstractions.serialization import ParsableFactory
|
61
|
-
from kiota_http.httpx_request_adapter import ResponseType
|
62
60
|
|
63
61
|
from airflow.models import Connection
|
64
62
|
|
@@ -67,7 +65,7 @@ class DefaultResponseHandler(ResponseHandler):
|
|
67
65
|
"""DefaultResponseHandler returns JSON payload or content in bytes or response headers."""
|
68
66
|
|
69
67
|
@staticmethod
|
70
|
-
def get_value(response:
|
68
|
+
def get_value(response: Response) -> Any:
|
71
69
|
with suppress(JSONDecodeError):
|
72
70
|
return response.json()
|
73
71
|
content = response.content
|
@@ -76,7 +74,7 @@ class DefaultResponseHandler(ResponseHandler):
|
|
76
74
|
return content
|
77
75
|
|
78
76
|
async def handle_response_async(
|
79
|
-
self, response: NativeResponseType, error_map: dict[str, ParsableFactory
|
77
|
+
self, response: NativeResponseType, error_map: dict[str, ParsableFactory] | None
|
80
78
|
) -> Any:
|
81
79
|
"""
|
82
80
|
Invoke this callback method when a response is received.
|
@@ -84,10 +82,11 @@ class DefaultResponseHandler(ResponseHandler):
|
|
84
82
|
param response: The type of the native response object.
|
85
83
|
param error_map: The error dict to use in case of a failed request.
|
86
84
|
"""
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
85
|
+
resp: Response = cast("Response", response)
|
86
|
+
value = self.get_value(resp)
|
87
|
+
if resp.status_code not in {200, 201, 202, 204, 302}:
|
88
|
+
message = value or resp.reason_phrase
|
89
|
+
status_code = HTTPStatus(resp.status_code)
|
91
90
|
if status_code == HTTPStatus.BAD_REQUEST:
|
92
91
|
raise AirflowBadRequest(message)
|
93
92
|
elif status_code == HTTPStatus.NOT_FOUND:
|
@@ -391,16 +390,16 @@ class KiotaRequestAdapterHook(BaseHook):
|
|
391
390
|
async def run(
|
392
391
|
self,
|
393
392
|
url: str = "",
|
394
|
-
response_type:
|
393
|
+
response_type: str | None = None,
|
395
394
|
path_parameters: dict[str, Any] | None = None,
|
396
395
|
method: str = "GET",
|
397
|
-
query_parameters: dict[str,
|
396
|
+
query_parameters: dict[str, Any] | None = None,
|
398
397
|
headers: dict[str, str] | None = None,
|
399
398
|
data: dict[str, Any] | str | BytesIO | None = None,
|
400
399
|
):
|
401
400
|
self.log.info("Executing url '%s' as '%s'", url, method)
|
402
401
|
|
403
|
-
response = await self.
|
402
|
+
response = await self.send_request(
|
404
403
|
request_info=self.request_information(
|
405
404
|
url=url,
|
406
405
|
response_type=response_type,
|
@@ -411,20 +410,31 @@ class KiotaRequestAdapterHook(BaseHook):
|
|
411
410
|
data=data,
|
412
411
|
),
|
413
412
|
response_type=response_type,
|
414
|
-
error_map=self.error_mapping(),
|
415
413
|
)
|
416
414
|
|
417
415
|
self.log.debug("response: %s", response)
|
418
416
|
|
419
417
|
return response
|
420
418
|
|
419
|
+
async def send_request(self, request_info: RequestInformation, response_type: str | None = None):
|
420
|
+
if response_type:
|
421
|
+
return await self.get_conn().send_primitive_async(
|
422
|
+
request_info=request_info,
|
423
|
+
response_type=response_type,
|
424
|
+
error_map=self.error_mapping(),
|
425
|
+
)
|
426
|
+
return await self.get_conn().send_no_response_content_async(
|
427
|
+
request_info=request_info,
|
428
|
+
error_map=self.error_mapping(),
|
429
|
+
)
|
430
|
+
|
421
431
|
def request_information(
|
422
432
|
self,
|
423
433
|
url: str,
|
424
|
-
response_type:
|
434
|
+
response_type: str | None = None,
|
425
435
|
path_parameters: dict[str, Any] | None = None,
|
426
436
|
method: str = "GET",
|
427
|
-
query_parameters: dict[str,
|
437
|
+
query_parameters: dict[str, Any] | None = None,
|
428
438
|
headers: dict[str, str] | None = None,
|
429
439
|
data: dict[str, Any] | str | BytesIO | None = None,
|
430
440
|
) -> RequestInformation:
|
@@ -446,8 +456,12 @@ class KiotaRequestAdapterHook(BaseHook):
|
|
446
456
|
headers = {**self.DEFAULT_HEADERS, **headers} if headers else self.DEFAULT_HEADERS
|
447
457
|
for header_name, header_value in headers.items():
|
448
458
|
request_information.headers.try_add(header_name=header_name, header_value=header_value)
|
449
|
-
if isinstance(data, BytesIO)
|
459
|
+
if isinstance(data, BytesIO):
|
460
|
+
request_information.content = data.read()
|
461
|
+
elif isinstance(data, bytes):
|
450
462
|
request_information.content = data
|
463
|
+
elif isinstance(data, str):
|
464
|
+
request_information.content = data.encode("utf-8")
|
451
465
|
elif data:
|
452
466
|
request_information.headers.try_add(
|
453
467
|
header_name=RequestInformation.CONTENT_TYPE_HEADER, header_value="application/json"
|
@@ -468,8 +482,8 @@ class KiotaRequestAdapterHook(BaseHook):
|
|
468
482
|
return {}
|
469
483
|
|
470
484
|
@staticmethod
|
471
|
-
def error_mapping() -> dict[str, ParsableFactory
|
485
|
+
def error_mapping() -> dict[str, type[ParsableFactory]]:
|
472
486
|
return {
|
473
|
-
"4XX": APIError,
|
474
|
-
"5XX": APIError,
|
487
|
+
"4XX": APIError, # type: ignore
|
488
|
+
"5XX": APIError, # type: ignore
|
475
489
|
}
|
@@ -144,8 +144,7 @@ class WasbHook(BaseHook):
|
|
144
144
|
prefix = "extra__wasb__"
|
145
145
|
if field_name.startswith("extra__"):
|
146
146
|
raise ValueError(
|
147
|
-
f"Got prefixed name {field_name}; please remove the '{prefix}' prefix "
|
148
|
-
f"when using this method."
|
147
|
+
f"Got prefixed name {field_name}; please remove the '{prefix}' prefix when using this method."
|
149
148
|
)
|
150
149
|
if field_name in extra_dict:
|
151
150
|
return extra_dict[field_name] or None
|
@@ -23,6 +23,7 @@ from functools import cached_property
|
|
23
23
|
from pathlib import Path
|
24
24
|
from typing import TYPE_CHECKING
|
25
25
|
|
26
|
+
import attrs
|
26
27
|
from azure.core.exceptions import HttpResponseError
|
27
28
|
|
28
29
|
from airflow.configuration import conf
|
@@ -34,34 +35,36 @@ if TYPE_CHECKING:
|
|
34
35
|
import logging
|
35
36
|
|
36
37
|
from airflow.models.taskinstance import TaskInstance
|
38
|
+
from airflow.sdk.types import RuntimeTaskInstanceProtocol as RuntimeTI
|
39
|
+
from airflow.utils.log.file_task_handler import LogMessages, LogSourceInfo
|
37
40
|
|
38
41
|
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
+
@attrs.define
|
43
|
+
class WasbRemoteLogIO(LoggingMixin): # noqa: D101
|
44
|
+
remote_base: str
|
45
|
+
base_log_folder: Path = attrs.field(converter=Path)
|
46
|
+
delete_local_copy: bool
|
42
47
|
|
43
|
-
|
44
|
-
"""
|
48
|
+
wasb_container: str
|
45
49
|
|
46
|
-
|
50
|
+
processors = ()
|
47
51
|
|
48
|
-
def
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
)
|
52
|
+
def upload(self, path: str | os.PathLike, ti: RuntimeTI):
|
53
|
+
"""Upload the given log path to the remote storage."""
|
54
|
+
path = Path(path)
|
55
|
+
if path.is_absolute():
|
56
|
+
local_loc = path
|
57
|
+
remote_loc = os.path.join(self.remote_base, path.relative_to(self.base_log_folder))
|
58
|
+
else:
|
59
|
+
local_loc = self.base_log_folder.joinpath(path)
|
60
|
+
remote_loc = os.path.join(self.remote_base, path)
|
61
|
+
|
62
|
+
if local_loc.is_file():
|
63
|
+
# read log and remove old logs to get just the latest additions
|
64
|
+
log = local_loc.read_text()
|
65
|
+
has_uploaded = self.write(log, remote_loc)
|
66
|
+
if has_uploaded and self.delete_local_copy:
|
67
|
+
shutil.rmtree(os.path.dirname(local_loc))
|
65
68
|
|
66
69
|
@cached_property
|
67
70
|
def hook(self):
|
@@ -81,53 +84,13 @@ class WasbTaskHandler(FileTaskHandler, LoggingMixin):
|
|
81
84
|
)
|
82
85
|
return None
|
83
86
|
|
84
|
-
def
|
85
|
-
super().set_context(ti, identifier=identifier)
|
86
|
-
# Local location and remote location is needed to open and
|
87
|
-
# upload local log file to Wasb remote storage.
|
88
|
-
if TYPE_CHECKING:
|
89
|
-
assert self.handler is not None
|
90
|
-
|
91
|
-
full_path = self.handler.baseFilename
|
92
|
-
self.log_relative_path = Path(full_path).relative_to(self.local_base).as_posix()
|
93
|
-
is_trigger_log_context = getattr(ti, "is_trigger_log_context", False)
|
94
|
-
self.upload_on_close = is_trigger_log_context or not getattr(ti, "raw", None)
|
95
|
-
|
96
|
-
def close(self) -> None:
|
97
|
-
"""Close and upload local log file to remote storage Wasb."""
|
98
|
-
# When application exit, system shuts down all handlers by
|
99
|
-
# calling close method. Here we check if logger is already
|
100
|
-
# closed to prevent uploading the log to remote storage multiple
|
101
|
-
# times when `logging.shutdown` is called.
|
102
|
-
if self.closed:
|
103
|
-
return
|
104
|
-
|
105
|
-
super().close()
|
106
|
-
|
107
|
-
if not self.upload_on_close:
|
108
|
-
return
|
109
|
-
|
110
|
-
local_loc = os.path.join(self.local_base, self.log_relative_path)
|
111
|
-
remote_loc = os.path.join(self.remote_base, self.log_relative_path)
|
112
|
-
if os.path.exists(local_loc):
|
113
|
-
# read log and remove old logs to get just the latest additions
|
114
|
-
with open(local_loc) as logfile:
|
115
|
-
log = logfile.read()
|
116
|
-
wasb_write = self.wasb_write(log, remote_loc, append=True)
|
117
|
-
|
118
|
-
if wasb_write and self.delete_local_copy:
|
119
|
-
shutil.rmtree(os.path.dirname(local_loc))
|
120
|
-
# Mark closed so we don't double write if close is called twice
|
121
|
-
self.closed = True
|
122
|
-
|
123
|
-
def _read_remote_logs(self, ti, try_number, metadata=None) -> tuple[list[str], list[str]]:
|
87
|
+
def read(self, relative_path, ti: RuntimeTI) -> tuple[LogSourceInfo, LogMessages | None]:
|
124
88
|
messages = []
|
125
89
|
logs = []
|
126
|
-
worker_log_relative_path = self._render_filename(ti, try_number)
|
127
90
|
# TODO: fix this - "relative path" i.e currently REMOTE_BASE_LOG_FOLDER should start with "wasb"
|
128
91
|
# unlike others with shceme in URL itself to identify the correct handler.
|
129
92
|
# This puts limitations on ways users can name the base_path.
|
130
|
-
prefix = os.path.join(self.remote_base,
|
93
|
+
prefix = os.path.join(self.remote_base, relative_path)
|
131
94
|
blob_names = []
|
132
95
|
try:
|
133
96
|
blob_names = self.hook.get_blobs_list(container_name=self.wasb_container, prefix=prefix)
|
@@ -143,8 +106,7 @@ class WasbTaskHandler(FileTaskHandler, LoggingMixin):
|
|
143
106
|
else:
|
144
107
|
messages.extend(["Found remote logs:", *[f" * {x}" for x in sorted(uris)]])
|
145
108
|
else:
|
146
|
-
|
147
|
-
messages.append(f"No logs found in WASB; ti={ti}")
|
109
|
+
return messages, None
|
148
110
|
|
149
111
|
for name in sorted(blob_names):
|
150
112
|
remote_log = ""
|
@@ -191,7 +153,7 @@ class WasbTaskHandler(FileTaskHandler, LoggingMixin):
|
|
191
153
|
return msg
|
192
154
|
return ""
|
193
155
|
|
194
|
-
def
|
156
|
+
def write(self, log: str, remote_log_location: str, append: bool = True) -> bool:
|
195
157
|
"""
|
196
158
|
Write the log to the remote_log_location. Fails silently if no hook was created.
|
197
159
|
|
@@ -210,3 +172,82 @@ class WasbTaskHandler(FileTaskHandler, LoggingMixin):
|
|
210
172
|
self.log.exception("Could not write logs to %s", remote_log_location)
|
211
173
|
return False
|
212
174
|
return True
|
175
|
+
|
176
|
+
|
177
|
+
class WasbTaskHandler(FileTaskHandler, LoggingMixin):
|
178
|
+
"""
|
179
|
+
WasbTaskHandler is a python log handler that handles and reads task instance logs.
|
180
|
+
|
181
|
+
It extends airflow FileTaskHandler and uploads to and reads from Wasb remote storage.
|
182
|
+
"""
|
183
|
+
|
184
|
+
trigger_should_wrap = True
|
185
|
+
|
186
|
+
def __init__(
|
187
|
+
self,
|
188
|
+
base_log_folder: str,
|
189
|
+
wasb_log_folder: str,
|
190
|
+
wasb_container: str,
|
191
|
+
**kwargs,
|
192
|
+
) -> None:
|
193
|
+
super().__init__(base_log_folder)
|
194
|
+
self.handler: logging.FileHandler | None = None
|
195
|
+
self.log_relative_path = ""
|
196
|
+
self.closed = False
|
197
|
+
self.upload_on_close = True
|
198
|
+
self.io = WasbRemoteLogIO(
|
199
|
+
base_log_folder=base_log_folder,
|
200
|
+
remote_base=wasb_log_folder,
|
201
|
+
wasb_container=wasb_container,
|
202
|
+
delete_local_copy=kwargs.get(
|
203
|
+
"delete_local_copy", conf.getboolean("logging", "delete_local_logs")
|
204
|
+
),
|
205
|
+
)
|
206
|
+
|
207
|
+
def set_context(self, ti: TaskInstance, *, identifier: str | None = None) -> None:
|
208
|
+
super().set_context(ti, identifier=identifier)
|
209
|
+
# Local location and remote location is needed to open and
|
210
|
+
# upload local log file to Wasb remote storage.
|
211
|
+
if TYPE_CHECKING:
|
212
|
+
assert self.handler is not None
|
213
|
+
|
214
|
+
self.ti = ti
|
215
|
+
full_path = self.handler.baseFilename
|
216
|
+
self.log_relative_path = Path(full_path).relative_to(self.local_base).as_posix()
|
217
|
+
is_trigger_log_context = getattr(ti, "is_trigger_log_context", False)
|
218
|
+
self.upload_on_close = is_trigger_log_context or not getattr(ti, "raw", None)
|
219
|
+
|
220
|
+
def close(self) -> None:
|
221
|
+
"""Close and upload local log file to remote storage Wasb."""
|
222
|
+
# When application exit, system shuts down all handlers by
|
223
|
+
# calling close method. Here we check if logger is already
|
224
|
+
# closed to prevent uploading the log to remote storage multiple
|
225
|
+
# times when `logging.shutdown` is called.
|
226
|
+
if self.closed:
|
227
|
+
return
|
228
|
+
|
229
|
+
super().close()
|
230
|
+
|
231
|
+
if not self.upload_on_close:
|
232
|
+
return
|
233
|
+
|
234
|
+
if hasattr(self, "ti"):
|
235
|
+
self.io.upload(self.log_relative_path, self.ti)
|
236
|
+
|
237
|
+
# Mark closed so we don't double write if close is called twice
|
238
|
+
self.closed = True
|
239
|
+
|
240
|
+
def _read_remote_logs(self, ti, try_number, metadata=None) -> tuple[LogSourceInfo, LogMessages]:
|
241
|
+
# Explicitly getting log relative path is necessary as the given
|
242
|
+
# task instance might be different than task instance passed in
|
243
|
+
# in set_context method.
|
244
|
+
worker_log_rel_path = self._render_filename(ti, try_number)
|
245
|
+
|
246
|
+
messages, logs = self.io.read(worker_log_rel_path, ti)
|
247
|
+
|
248
|
+
if logs is None:
|
249
|
+
logs = []
|
250
|
+
if not AIRFLOW_V_3_0_PLUS:
|
251
|
+
messages.append(f"No logs found in WASB; ti={ti}")
|
252
|
+
|
253
|
+
return messages, logs
|
@@ -18,6 +18,7 @@ from __future__ import annotations
|
|
18
18
|
|
19
19
|
from collections.abc import Sequence
|
20
20
|
from typing import TYPE_CHECKING, Any, Callable
|
21
|
+
from uuid import UUID
|
21
22
|
|
22
23
|
from airflow.models import BaseOperator
|
23
24
|
from airflow.providers.microsoft.azure.hooks.asb import AdminClientHook, MessageHook
|
@@ -100,6 +101,11 @@ class AzureServiceBusSendMessageOperator(BaseOperator):
|
|
100
101
|
as batch message it can be set to True.
|
101
102
|
:param azure_service_bus_conn_id: Reference to the
|
102
103
|
:ref: `Azure Service Bus connection<howto/connection:azure_service_bus>`.
|
104
|
+
:param message_id: Message ID to set on message being sent to the queue. Please note, message_id may only be
|
105
|
+
set when a single message is sent.
|
106
|
+
:param reply_to: Name of queue or topic the receiver should reply to. Determination of if the reply will be sent to
|
107
|
+
a queue or a topic should be made out-of-band.
|
108
|
+
:param message_headers: Headers to add to the message's application_properties field for Azure Service Bus.
|
103
109
|
"""
|
104
110
|
|
105
111
|
template_fields: Sequence[str] = ("queue_name",)
|
@@ -112,6 +118,9 @@ class AzureServiceBusSendMessageOperator(BaseOperator):
|
|
112
118
|
message: str | list[str],
|
113
119
|
batch: bool = False,
|
114
120
|
azure_service_bus_conn_id: str = "azure_service_bus_default",
|
121
|
+
message_id: str | None = None,
|
122
|
+
reply_to: str | None = None,
|
123
|
+
message_headers: dict[str | bytes, int | float | bytes | bool | str | UUID] | None = None,
|
115
124
|
**kwargs,
|
116
125
|
) -> None:
|
117
126
|
super().__init__(**kwargs)
|
@@ -119,6 +128,9 @@ class AzureServiceBusSendMessageOperator(BaseOperator):
|
|
119
128
|
self.batch = batch
|
120
129
|
self.message = message
|
121
130
|
self.azure_service_bus_conn_id = azure_service_bus_conn_id
|
131
|
+
self.message_id = message_id
|
132
|
+
self.reply_to = reply_to
|
133
|
+
self.message_headers = message_headers
|
122
134
|
|
123
135
|
def execute(self, context: Context) -> None:
|
124
136
|
"""Send Message to the specific queue in Service Bus namespace."""
|
@@ -126,7 +138,9 @@ class AzureServiceBusSendMessageOperator(BaseOperator):
|
|
126
138
|
hook = MessageHook(azure_service_bus_conn_id=self.azure_service_bus_conn_id)
|
127
139
|
|
128
140
|
# send message
|
129
|
-
hook.send_message(
|
141
|
+
hook.send_message(
|
142
|
+
self.queue_name, self.message, self.batch, self.message_id, self.reply_to, self.message_headers
|
143
|
+
)
|
130
144
|
|
131
145
|
|
132
146
|
class AzureServiceBusReceiveMessageOperator(BaseOperator):
|
@@ -120,7 +120,13 @@ class AzureContainerInstancesOperator(BaseOperator):
|
|
120
120
|
},
|
121
121
|
secured_variables=["POSTGRES_PASSWORD"],
|
122
122
|
volumes=[
|
123
|
-
(
|
123
|
+
(
|
124
|
+
"azure_container_instance_conn_id",
|
125
|
+
"my_storage_container",
|
126
|
+
"my_fileshare",
|
127
|
+
"/input-data",
|
128
|
+
True,
|
129
|
+
),
|
124
130
|
],
|
125
131
|
memory_in_gb=14.0,
|
126
132
|
cpu=4.0,
|
@@ -38,8 +38,6 @@ from airflow.utils.xcom import XCOM_RETURN_KEY
|
|
38
38
|
if TYPE_CHECKING:
|
39
39
|
from io import BytesIO
|
40
40
|
|
41
|
-
from kiota_abstractions.request_adapter import ResponseType
|
42
|
-
from kiota_abstractions.request_information import QueryParams
|
43
41
|
from msgraph_core import APIVersion
|
44
42
|
|
45
43
|
from airflow.utils.context import Context
|
@@ -118,11 +116,11 @@ class MSGraphAsyncOperator(BaseOperator):
|
|
118
116
|
self,
|
119
117
|
*,
|
120
118
|
url: str,
|
121
|
-
response_type:
|
119
|
+
response_type: str | None = None,
|
122
120
|
path_parameters: dict[str, Any] | None = None,
|
123
121
|
url_template: str | None = None,
|
124
122
|
method: str = "GET",
|
125
|
-
query_parameters: dict[str,
|
123
|
+
query_parameters: dict[str, Any] | None = None,
|
126
124
|
headers: dict[str, str] | None = None,
|
127
125
|
data: dict[str, Any] | str | BytesIO | None = None,
|
128
126
|
conn_id: str = KiotaRequestAdapterHook.default_conn_name,
|
@@ -31,8 +31,6 @@ if TYPE_CHECKING:
|
|
31
31
|
from datetime import timedelta
|
32
32
|
from io import BytesIO
|
33
33
|
|
34
|
-
from kiota_abstractions.request_information import QueryParams
|
35
|
-
from kiota_http.httpx_request_adapter import ResponseType
|
36
34
|
from msgraph_core import APIVersion
|
37
35
|
|
38
36
|
from airflow.utils.context import Context
|
@@ -76,11 +74,11 @@ class MSGraphSensor(BaseSensorOperator):
|
|
76
74
|
def __init__(
|
77
75
|
self,
|
78
76
|
url: str,
|
79
|
-
response_type:
|
77
|
+
response_type: str | None = None,
|
80
78
|
path_parameters: dict[str, Any] | None = None,
|
81
79
|
url_template: str | None = None,
|
82
80
|
method: str = "GET",
|
83
|
-
query_parameters: dict[str,
|
81
|
+
query_parameters: dict[str, Any] | None = None,
|
84
82
|
headers: dict[str, str] | None = None,
|
85
83
|
data: dict[str, Any] | str | BytesIO | None = None,
|
86
84
|
conn_id: str = KiotaRequestAdapterHook.default_conn_name,
|
@@ -40,8 +40,6 @@ if TYPE_CHECKING:
|
|
40
40
|
from io import BytesIO
|
41
41
|
|
42
42
|
from kiota_abstractions.request_adapter import RequestAdapter
|
43
|
-
from kiota_abstractions.request_information import QueryParams
|
44
|
-
from kiota_http.httpx_request_adapter import ResponseType
|
45
43
|
from msgraph_core import APIVersion
|
46
44
|
|
47
45
|
|
@@ -112,11 +110,11 @@ class MSGraphTrigger(BaseTrigger):
|
|
112
110
|
def __init__(
|
113
111
|
self,
|
114
112
|
url: str,
|
115
|
-
response_type:
|
113
|
+
response_type: str | None = None,
|
116
114
|
path_parameters: dict[str, Any] | None = None,
|
117
115
|
url_template: str | None = None,
|
118
116
|
method: str = "GET",
|
119
|
-
query_parameters: dict[str,
|
117
|
+
query_parameters: dict[str, Any] | None = None,
|
120
118
|
headers: dict[str, str] | None = None,
|
121
119
|
data: dict[str, Any] | str | BytesIO | None = None,
|
122
120
|
conn_id: str = KiotaRequestAdapterHook.default_conn_name,
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: apache-airflow-providers-microsoft-azure
|
3
|
-
Version: 12.
|
3
|
+
Version: 12.3.0rc1
|
4
4
|
Summary: Provider package apache-airflow-providers-microsoft-azure for Apache Airflow
|
5
5
|
Keywords: airflow-provider,microsoft.azure,airflow,integration
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
@@ -42,19 +42,19 @@ Requires-Dist: azure-kusto-data>=4.1.0,!=4.6.0
|
|
42
42
|
Requires-Dist: azure-mgmt-datafactory>=2.0.0
|
43
43
|
Requires-Dist: azure-mgmt-containerregistry>=8.0.0
|
44
44
|
Requires-Dist: azure-mgmt-containerinstance>=10.1.0
|
45
|
-
Requires-Dist: msgraph-core>=1.
|
46
|
-
Requires-Dist: microsoft-kiota-http>=1.
|
47
|
-
Requires-Dist: microsoft-kiota-serialization-json
|
48
|
-
Requires-Dist: microsoft-kiota-serialization-text
|
49
|
-
Requires-Dist: microsoft-kiota-abstractions
|
45
|
+
Requires-Dist: msgraph-core>=1.3.3
|
46
|
+
Requires-Dist: microsoft-kiota-http>=1.8.0,<2.0.0
|
47
|
+
Requires-Dist: microsoft-kiota-serialization-json>=1.8.0
|
48
|
+
Requires-Dist: microsoft-kiota-serialization-text>=1.8.0
|
49
|
+
Requires-Dist: microsoft-kiota-abstractions>=1.8.0,<2.0.0
|
50
50
|
Requires-Dist: msal-extensions>=1.1.0
|
51
51
|
Requires-Dist: apache-airflow-providers-amazon ; extra == "amazon"
|
52
52
|
Requires-Dist: apache-airflow-providers-common-compat ; extra == "common-compat"
|
53
53
|
Requires-Dist: apache-airflow-providers-oracle ; extra == "oracle"
|
54
54
|
Requires-Dist: apache-airflow-providers-sftp ; extra == "sftp"
|
55
55
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
56
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.
|
57
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.
|
56
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.3.0/changelog.html
|
57
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.3.0
|
58
58
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
59
59
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
60
60
|
Project-URL: Source Code, https://github.com/apache/airflow
|
@@ -89,7 +89,7 @@ Provides-Extra: sftp
|
|
89
89
|
|
90
90
|
Package ``apache-airflow-providers-microsoft-azure``
|
91
91
|
|
92
|
-
Release: ``12.
|
92
|
+
Release: ``12.3.0``
|
93
93
|
|
94
94
|
|
95
95
|
`Microsoft Azure <https://azure.microsoft.com/>`__
|
@@ -102,7 +102,7 @@ This is a provider package for ``microsoft.azure`` provider. All classes for thi
|
|
102
102
|
are in ``airflow.providers.microsoft.azure`` python package.
|
103
103
|
|
104
104
|
You can find package information and changelog for the provider
|
105
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.
|
105
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.3.0/>`_.
|
106
106
|
|
107
107
|
Installation
|
108
108
|
------------
|
@@ -141,11 +141,11 @@ PIP package Version required
|
|
141
141
|
``azure-mgmt-datafactory`` ``>=2.0.0``
|
142
142
|
``azure-mgmt-containerregistry`` ``>=8.0.0``
|
143
143
|
``azure-mgmt-containerinstance`` ``>=10.1.0``
|
144
|
-
``msgraph-core`` ``>=1.
|
145
|
-
``microsoft-kiota-http`` ``>=1.
|
146
|
-
``microsoft-kiota-serialization-json``
|
147
|
-
``microsoft-kiota-serialization-text``
|
148
|
-
``microsoft-kiota-abstractions``
|
144
|
+
``msgraph-core`` ``>=1.3.3``
|
145
|
+
``microsoft-kiota-http`` ``>=1.8.0,<2.0.0``
|
146
|
+
``microsoft-kiota-serialization-json`` ``>=1.8.0``
|
147
|
+
``microsoft-kiota-serialization-text`` ``>=1.8.0``
|
148
|
+
``microsoft-kiota-abstractions`` ``>=1.8.0,<2.0.0``
|
149
149
|
``msal-extensions`` ``>=1.1.0``
|
150
150
|
====================================== ===================
|
151
151
|
|
@@ -172,5 +172,5 @@ Dependent package
|
|
172
172
|
================================================================================================================== =================
|
173
173
|
|
174
174
|
The changelog for the provider package can be found in the
|
175
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.
|
175
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.3.0/changelog.html>`_.
|
176
176
|
|
@@ -1,37 +1,37 @@
|
|
1
1
|
airflow/providers/microsoft/azure/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
2
|
-
airflow/providers/microsoft/azure/__init__.py,sha256=
|
3
|
-
airflow/providers/microsoft/azure/get_provider_info.py,sha256=
|
2
|
+
airflow/providers/microsoft/azure/__init__.py,sha256=ZMYdVXMm50jfKz1hbXsXcawuUxWdvleyHlsZ2TOBsZs,1503
|
3
|
+
airflow/providers/microsoft/azure/get_provider_info.py,sha256=eRH-2B4nI_8R0xTqvCD0rrFI-hxcK6yMzrDGERM9cEg,22068
|
4
4
|
airflow/providers/microsoft/azure/utils.py,sha256=pvBiwBKRYMtEnqkQWLTqvhrsCOkMTRiPH97GkIYHUQc,8429
|
5
5
|
airflow/providers/microsoft/azure/version_compat.py,sha256=aHg90_DtgoSnQvILFICexMyNlHlALBdaeWqkX3dFDug,1605
|
6
6
|
airflow/providers/microsoft/azure/fs/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
7
7
|
airflow/providers/microsoft/azure/fs/adls.py,sha256=kXZOVulLNfqwJNR0X9MBN23kcYr3dyv8K_0KqAcWvnk,3679
|
8
8
|
airflow/providers/microsoft/azure/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
9
9
|
airflow/providers/microsoft/azure/hooks/adx.py,sha256=FR7EC6ygDhcMK9clw-IDuoJqYtsrq-6WJOZF8ixl8Rw,9576
|
10
|
-
airflow/providers/microsoft/azure/hooks/asb.py,sha256=
|
10
|
+
airflow/providers/microsoft/azure/hooks/asb.py,sha256=Ui9uvt7-42G60KxlYu0IC9FUB8LpuDyBpjrJbdIeL6k,30702
|
11
11
|
airflow/providers/microsoft/azure/hooks/base_azure.py,sha256=ZNTjfKiHM91dQ8imc2VPxorj3fOlzkv2SgTxy008Zr0,5389
|
12
12
|
airflow/providers/microsoft/azure/hooks/batch.py,sha256=BK2ciseUyGNFxOC0mEnPohZRfBlHf_UTyE0mB0tvn0g,16114
|
13
|
-
airflow/providers/microsoft/azure/hooks/container_instance.py,sha256=
|
13
|
+
airflow/providers/microsoft/azure/hooks/container_instance.py,sha256=O2sl9gELFkgeAKIc76s34IfRRvlc-XJKjRqd4hekrYU,7142
|
14
14
|
airflow/providers/microsoft/azure/hooks/container_registry.py,sha256=ii6GiOvl3p0lSGEyerHuhpvJC1bh18Asz4QmatXipYY,4855
|
15
15
|
airflow/providers/microsoft/azure/hooks/container_volume.py,sha256=ZSe2C3y5xttxp8Ir9dgihZnQzcArckhPazxHg1iknYk,5758
|
16
16
|
airflow/providers/microsoft/azure/hooks/cosmos.py,sha256=pE7MyNJx-u6pr_NlK5W2hv--wGBuDjSjPfeneZhky70,17466
|
17
|
-
airflow/providers/microsoft/azure/hooks/data_factory.py,sha256=
|
18
|
-
airflow/providers/microsoft/azure/hooks/data_lake.py,sha256=
|
17
|
+
airflow/providers/microsoft/azure/hooks/data_factory.py,sha256=zlkvYVVakmQGBpk5HQ0Ij_U59dQxiZdfVl8Z0JMuFus,44990
|
18
|
+
airflow/providers/microsoft/azure/hooks/data_lake.py,sha256=6ap2vEds-nTJZaIUBfv0lKoPJal8xPAMvhD06LdFwDQ,23804
|
19
19
|
airflow/providers/microsoft/azure/hooks/fileshare.py,sha256=jaHSD_xZxposSD5FbdlpZ7JK_CugFHNrgejZkZbHJXM,10884
|
20
|
-
airflow/providers/microsoft/azure/hooks/msgraph.py,sha256=
|
21
|
-
airflow/providers/microsoft/azure/hooks/powerbi.py,sha256=
|
20
|
+
airflow/providers/microsoft/azure/hooks/msgraph.py,sha256=F3qkZXSrnxSn2IAAXYNUT0RNRLYhHB00wbnc74TPcNw,21085
|
21
|
+
airflow/providers/microsoft/azure/hooks/powerbi.py,sha256=jTV7Ott10hzIcKzL88P0M9UHFl5syUN79f6viM_uYX8,7537
|
22
22
|
airflow/providers/microsoft/azure/hooks/synapse.py,sha256=ya4bByYKgfvUyqs3H_jCUyu2HfI0lmhNSuoKguJrPHU,16078
|
23
|
-
airflow/providers/microsoft/azure/hooks/wasb.py,sha256=
|
23
|
+
airflow/providers/microsoft/azure/hooks/wasb.py,sha256=MghGk7pGxRJ0hNSNLkTkWY7E4IL7Qyx6Yrv0I7m1nZE,30807
|
24
24
|
airflow/providers/microsoft/azure/log/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
25
|
-
airflow/providers/microsoft/azure/log/wasb_task_handler.py,sha256=
|
25
|
+
airflow/providers/microsoft/azure/log/wasb_task_handler.py,sha256=s0BCDf7fritIvuwznFJTUEE03GqfecKy-stf1niWr3o,9926
|
26
26
|
airflow/providers/microsoft/azure/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
27
27
|
airflow/providers/microsoft/azure/operators/adls.py,sha256=xLcaYKMFkygveTSGzFY1Y1oEzzliu3b77fumxYzYss8,5816
|
28
28
|
airflow/providers/microsoft/azure/operators/adx.py,sha256=eUHEhJ18gL3kdCqNaQ-3BL5rSoorOxtMLBjIAx7xcxY,3173
|
29
|
-
airflow/providers/microsoft/azure/operators/asb.py,sha256=
|
29
|
+
airflow/providers/microsoft/azure/operators/asb.py,sha256=7yj35zwQRr2EE0zIaKTbEjikhJ0v7KrQUyNCnJosqac,30477
|
30
30
|
airflow/providers/microsoft/azure/operators/batch.py,sha256=QgtmaUYgejLtER68o0dDhLRJSGM4WqmstW5Guy7I30U,16270
|
31
|
-
airflow/providers/microsoft/azure/operators/container_instances.py,sha256=
|
31
|
+
airflow/providers/microsoft/azure/operators/container_instances.py,sha256=SjuHOksDWOvknurFKgh12TBwM4-BZ7jCZd4D1RWN4-M,18529
|
32
32
|
airflow/providers/microsoft/azure/operators/cosmos.py,sha256=apq0wVYslRaO-ThXGnVyf7v_DovK951vaE7LxcVYU4E,2814
|
33
33
|
airflow/providers/microsoft/azure/operators/data_factory.py,sha256=kiVoQ1hYPG725rVhCfpE0MuxiufIQYwXNurCkwtKfY8,12806
|
34
|
-
airflow/providers/microsoft/azure/operators/msgraph.py,sha256=
|
34
|
+
airflow/providers/microsoft/azure/operators/msgraph.py,sha256=PuRwkwkpss3J6U38UwvgmGOUFQ8nEGK-B4--2mxP_9E,13410
|
35
35
|
airflow/providers/microsoft/azure/operators/powerbi.py,sha256=Lp6uzuLw_hNNrQJVy6XhCMCS2wNqhA4OEyTuDHUxMNo,6284
|
36
36
|
airflow/providers/microsoft/azure/operators/synapse.py,sha256=fqd3W9uHQYidJY-LAJZaQv1okr-pImXokyMd7iLeY-Q,12751
|
37
37
|
airflow/providers/microsoft/azure/operators/wasb_delete_blob.py,sha256=RQQz_xvcjyfY3AuIVkK-J-rkPF3rT-z4SJNPY-fpyZQ,2714
|
@@ -40,7 +40,7 @@ airflow/providers/microsoft/azure/secrets/key_vault.py,sha256=EmtGfyBtfefGu1ZTtZ
|
|
40
40
|
airflow/providers/microsoft/azure/sensors/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
41
41
|
airflow/providers/microsoft/azure/sensors/cosmos.py,sha256=MI1MTpWavmMkb4-vG56I6B8dKISj4xTjx8yWPT_Fbpc,2664
|
42
42
|
airflow/providers/microsoft/azure/sensors/data_factory.py,sha256=EEkXMZvB5vZyq0UPHyifRzcwSHgHqdwjtsrT49VsKU4,5014
|
43
|
-
airflow/providers/microsoft/azure/sensors/msgraph.py,sha256=
|
43
|
+
airflow/providers/microsoft/azure/sensors/msgraph.py,sha256=BBoNNGDjzeKAMR58XUVBxqpsO6lddMW3oDKBFlHSAjE,7731
|
44
44
|
airflow/providers/microsoft/azure/sensors/wasb.py,sha256=XA8prd03a5iYEguu3OraI1zUAecR9Lst16aNz8BBCAA,7382
|
45
45
|
airflow/providers/microsoft/azure/transfers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
46
46
|
airflow/providers/microsoft/azure/transfers/local_to_adls.py,sha256=4IbeumUCIFQaATfbCqPoOenN9ZfvXnWLl1uwBtLH6ps,4203
|
@@ -50,10 +50,10 @@ airflow/providers/microsoft/azure/transfers/s3_to_wasb.py,sha256=53If9gvBbMnCnjd
|
|
50
50
|
airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py,sha256=UU_PFJlmsd8xz3rM7qZdndIUmWYfM7E4QYUN293hHD4,8222
|
51
51
|
airflow/providers/microsoft/azure/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
52
52
|
airflow/providers/microsoft/azure/triggers/data_factory.py,sha256=U3vY_pj4yORxE7X6YR7CP3Jl73K9euCdUczy3dLmikU,11165
|
53
|
-
airflow/providers/microsoft/azure/triggers/msgraph.py,sha256=
|
53
|
+
airflow/providers/microsoft/azure/triggers/msgraph.py,sha256=l7A50JoBebiKhhsxILtLvuoulyIn59BVdjTvdAezdpk,8704
|
54
54
|
airflow/providers/microsoft/azure/triggers/powerbi.py,sha256=14RFR5HfuoX4qKG_FkFc72GhEf34ISg3TxQkBAD9TpA,9758
|
55
55
|
airflow/providers/microsoft/azure/triggers/wasb.py,sha256=RF-C6iqDEs6_pWireCWZXqxcqWK-sFJ695Okdd_EJOA,7456
|
56
|
-
apache_airflow_providers_microsoft_azure-12.
|
57
|
-
apache_airflow_providers_microsoft_azure-12.
|
58
|
-
apache_airflow_providers_microsoft_azure-12.
|
59
|
-
apache_airflow_providers_microsoft_azure-12.
|
56
|
+
apache_airflow_providers_microsoft_azure-12.3.0rc1.dist-info/entry_points.txt,sha256=6iWHenOoUC3YZBb3OKn6g0HlJsV58Ba56i8USmQrcJI,111
|
57
|
+
apache_airflow_providers_microsoft_azure-12.3.0rc1.dist-info/WHEEL,sha256=_2ozNFCLWc93bK4WKHCO-eDUENDlo-dgc9cU3qokYO4,82
|
58
|
+
apache_airflow_providers_microsoft_azure-12.3.0rc1.dist-info/METADATA,sha256=Hn5s_JDFZ3ONdC-cNf6WQALBm512L7V6ufYumwr5Ihc,8456
|
59
|
+
apache_airflow_providers_microsoft_azure-12.3.0rc1.dist-info/RECORD,,
|
File without changes
|