apache-airflow-providers-microsoft-azure 12.2.2rc1__tar.gz → 12.3.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/PKG-INFO +17 -17
  2. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/README.rst +8 -8
  3. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/pyproject.toml +10 -9
  4. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/__init__.py +1 -1
  5. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/get_provider_info.py +8 -7
  6. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/hooks/asb.py +35 -16
  7. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/hooks/container_instance.py +2 -2
  8. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/hooks/data_factory.py +2 -2
  9. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/hooks/data_lake.py +1 -2
  10. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/hooks/msgraph.py +34 -20
  11. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/hooks/powerbi.py +1 -0
  12. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/hooks/wasb.py +1 -2
  13. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/log/wasb_task_handler.py +109 -68
  14. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/operators/asb.py +15 -1
  15. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/operators/container_instances.py +7 -1
  16. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/operators/msgraph.py +2 -4
  17. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/sensors/msgraph.py +2 -4
  18. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/triggers/msgraph.py +2 -4
  19. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/LICENSE +0 -0
  20. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/fs/__init__.py +0 -0
  21. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/fs/adls.py +0 -0
  22. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/hooks/__init__.py +0 -0
  23. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/hooks/adx.py +0 -0
  24. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/hooks/base_azure.py +0 -0
  25. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/hooks/batch.py +0 -0
  26. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/hooks/container_registry.py +0 -0
  27. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/hooks/container_volume.py +0 -0
  28. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/hooks/cosmos.py +0 -0
  29. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/hooks/fileshare.py +0 -0
  30. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/hooks/synapse.py +0 -0
  31. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/log/__init__.py +0 -0
  32. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/operators/__init__.py +0 -0
  33. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/operators/adls.py +0 -0
  34. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/operators/adx.py +0 -0
  35. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/operators/batch.py +0 -0
  36. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/operators/cosmos.py +0 -0
  37. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/operators/data_factory.py +0 -0
  38. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/operators/powerbi.py +0 -0
  39. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/operators/synapse.py +0 -0
  40. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py +0 -0
  41. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/secrets/__init__.py +0 -0
  42. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/secrets/key_vault.py +0 -0
  43. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/sensors/__init__.py +0 -0
  44. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/sensors/cosmos.py +0 -0
  45. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/sensors/data_factory.py +0 -0
  46. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/sensors/wasb.py +0 -0
  47. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/transfers/__init__.py +0 -0
  48. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/transfers/local_to_adls.py +0 -0
  49. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/transfers/local_to_wasb.py +0 -0
  50. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py +0 -0
  51. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/transfers/s3_to_wasb.py +0 -0
  52. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py +0 -0
  53. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/triggers/__init__.py +0 -0
  54. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/triggers/data_factory.py +0 -0
  55. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/triggers/powerbi.py +0 -0
  56. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/triggers/wasb.py +0 -0
  57. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/utils.py +0 -0
  58. {apache_airflow_providers_microsoft_azure-12.2.2rc1 → apache_airflow_providers_microsoft_azure-12.3.0}/src/airflow/providers/microsoft/azure/version_compat.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-microsoft-azure
3
- Version: 12.2.2rc1
3
+ Version: 12.3.0
4
4
  Summary: Provider package apache-airflow-providers-microsoft-azure for Apache Airflow
5
5
  Keywords: airflow-provider,microsoft.azure,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,7 +20,7 @@ Classifier: Programming Language :: Python :: 3.10
20
20
  Classifier: Programming Language :: Python :: 3.11
21
21
  Classifier: Programming Language :: Python :: 3.12
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.9.0rc0
23
+ Requires-Dist: apache-airflow>=2.9.0
24
24
  Requires-Dist: adlfs>=2023.10.0
25
25
  Requires-Dist: azure-batch>=8.0.0
26
26
  Requires-Dist: azure-cosmos>=4.6.0
@@ -42,19 +42,19 @@ Requires-Dist: azure-kusto-data>=4.1.0,!=4.6.0
42
42
  Requires-Dist: azure-mgmt-datafactory>=2.0.0
43
43
  Requires-Dist: azure-mgmt-containerregistry>=8.0.0
44
44
  Requires-Dist: azure-mgmt-containerinstance>=10.1.0
45
- Requires-Dist: msgraph-core>=1.0.0,!=1.1.8
46
- Requires-Dist: microsoft-kiota-http>=1.3.0,!=1.3.4
47
- Requires-Dist: microsoft-kiota-serialization-json==1.0.0
48
- Requires-Dist: microsoft-kiota-serialization-text==1.0.0
49
- Requires-Dist: microsoft-kiota-abstractions<1.4.0
45
+ Requires-Dist: msgraph-core>=1.3.3
46
+ Requires-Dist: microsoft-kiota-http>=1.8.0,<2.0.0
47
+ Requires-Dist: microsoft-kiota-serialization-json>=1.8.0
48
+ Requires-Dist: microsoft-kiota-serialization-text>=1.8.0
49
+ Requires-Dist: microsoft-kiota-abstractions>=1.8.0,<2.0.0
50
50
  Requires-Dist: msal-extensions>=1.1.0
51
51
  Requires-Dist: apache-airflow-providers-amazon ; extra == "amazon"
52
52
  Requires-Dist: apache-airflow-providers-common-compat ; extra == "common-compat"
53
53
  Requires-Dist: apache-airflow-providers-oracle ; extra == "oracle"
54
54
  Requires-Dist: apache-airflow-providers-sftp ; extra == "sftp"
55
55
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
56
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.2.2/changelog.html
57
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.2.2
56
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.3.0/changelog.html
57
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.3.0
58
58
  Project-URL: Mastodon, https://fosstodon.org/@airflow
59
59
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
60
60
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -89,7 +89,7 @@ Provides-Extra: sftp
89
89
 
90
90
  Package ``apache-airflow-providers-microsoft-azure``
91
91
 
92
- Release: ``12.2.2``
92
+ Release: ``12.3.0``
93
93
 
94
94
 
95
95
  `Microsoft Azure <https://azure.microsoft.com/>`__
@@ -102,7 +102,7 @@ This is a provider package for ``microsoft.azure`` provider. All classes for thi
102
102
  are in ``airflow.providers.microsoft.azure`` python package.
103
103
 
104
104
  You can find package information and changelog for the provider
105
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.2.2/>`_.
105
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.3.0/>`_.
106
106
 
107
107
  Installation
108
108
  ------------
@@ -141,11 +141,11 @@ PIP package Version required
141
141
  ``azure-mgmt-datafactory`` ``>=2.0.0``
142
142
  ``azure-mgmt-containerregistry`` ``>=8.0.0``
143
143
  ``azure-mgmt-containerinstance`` ``>=10.1.0``
144
- ``msgraph-core`` ``>=1.0.0,!=1.1.8``
145
- ``microsoft-kiota-http`` ``>=1.3.0,!=1.3.4``
146
- ``microsoft-kiota-serialization-json`` ``==1.0.0``
147
- ``microsoft-kiota-serialization-text`` ``==1.0.0``
148
- ``microsoft-kiota-abstractions`` ``<1.4.0``
144
+ ``msgraph-core`` ``>=1.3.3``
145
+ ``microsoft-kiota-http`` ``>=1.8.0,<2.0.0``
146
+ ``microsoft-kiota-serialization-json`` ``>=1.8.0``
147
+ ``microsoft-kiota-serialization-text`` ``>=1.8.0``
148
+ ``microsoft-kiota-abstractions`` ``>=1.8.0,<2.0.0``
149
149
  ``msal-extensions`` ``>=1.1.0``
150
150
  ====================================== ===================
151
151
 
@@ -172,5 +172,5 @@ Dependent package
172
172
  ================================================================================================================== =================
173
173
 
174
174
  The changelog for the provider package can be found in the
175
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.2.2/changelog.html>`_.
175
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.3.0/changelog.html>`_.
176
176
 
@@ -23,7 +23,7 @@
23
23
 
24
24
  Package ``apache-airflow-providers-microsoft-azure``
25
25
 
26
- Release: ``12.2.2``
26
+ Release: ``12.3.0``
27
27
 
28
28
 
29
29
  `Microsoft Azure <https://azure.microsoft.com/>`__
@@ -36,7 +36,7 @@ This is a provider package for ``microsoft.azure`` provider. All classes for thi
36
36
  are in ``airflow.providers.microsoft.azure`` python package.
37
37
 
38
38
  You can find package information and changelog for the provider
39
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.2.2/>`_.
39
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.3.0/>`_.
40
40
 
41
41
  Installation
42
42
  ------------
@@ -75,11 +75,11 @@ PIP package Version required
75
75
  ``azure-mgmt-datafactory`` ``>=2.0.0``
76
76
  ``azure-mgmt-containerregistry`` ``>=8.0.0``
77
77
  ``azure-mgmt-containerinstance`` ``>=10.1.0``
78
- ``msgraph-core`` ``>=1.0.0,!=1.1.8``
79
- ``microsoft-kiota-http`` ``>=1.3.0,!=1.3.4``
80
- ``microsoft-kiota-serialization-json`` ``==1.0.0``
81
- ``microsoft-kiota-serialization-text`` ``==1.0.0``
82
- ``microsoft-kiota-abstractions`` ``<1.4.0``
78
+ ``msgraph-core`` ``>=1.3.3``
79
+ ``microsoft-kiota-http`` ``>=1.8.0,<2.0.0``
80
+ ``microsoft-kiota-serialization-json`` ``>=1.8.0``
81
+ ``microsoft-kiota-serialization-text`` ``>=1.8.0``
82
+ ``microsoft-kiota-abstractions`` ``>=1.8.0,<2.0.0``
83
83
  ``msal-extensions`` ``>=1.1.0``
84
84
  ====================================== ===================
85
85
 
@@ -106,4 +106,4 @@ Dependent package
106
106
  ================================================================================================================== =================
107
107
 
108
108
  The changelog for the provider package can be found in the
109
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.2.2/changelog.html>`_.
109
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.3.0/changelog.html>`_.
@@ -25,7 +25,7 @@ build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-microsoft-azure"
28
- version = "12.2.2.rc1"
28
+ version = "12.3.0"
29
29
  description = "Provider package apache-airflow-providers-microsoft-azure for Apache Airflow"
30
30
  readme = "README.rst"
31
31
  authors = [
@@ -57,7 +57,7 @@ requires-python = "~=3.9"
57
57
  # Make sure to run ``breeze static-checks --type update-providers-dependencies --all-files``
58
58
  # After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
59
59
  dependencies = [
60
- "apache-airflow>=2.9.0rc0",
60
+ "apache-airflow>=2.9.0",
61
61
  "adlfs>=2023.10.0",
62
62
  "azure-batch>=8.0.0",
63
63
  "azure-cosmos>=4.6.0",
@@ -82,15 +82,15 @@ dependencies = [
82
82
  "azure-mgmt-containerinstance>=10.1.0",
83
83
  # msgraph-core 1.1.8 has a bug which causes ABCMeta object is not subscriptable error
84
84
  # See https://github.com/microsoftgraph/msgraph-sdk-python-core/issues/781
85
- "msgraph-core>=1.0.0,!=1.1.8",
85
+ "msgraph-core>=1.3.3",
86
86
  # msgraph-core has transient import failures with microsoft-kiota-http==1.3.4
87
87
  # See https://github.com/microsoftgraph/msgraph-sdk-python-core/issues/706
88
- "microsoft-kiota-http>=1.3.0,!=1.3.4",
89
- "microsoft-kiota-serialization-json==1.0.0",
90
- "microsoft-kiota-serialization-text==1.0.0",
88
+ "microsoft-kiota-http>=1.8.0,<2.0.0",
89
+ "microsoft-kiota-serialization-json>=1.8.0",
90
+ "microsoft-kiota-serialization-text>=1.8.0",
91
91
  # microsoft-kiota-abstractions 1.4.0 breaks MyPy static checks on main
92
92
  # see https://github.com/apache/airflow/issues/43036
93
- "microsoft-kiota-abstractions<1.4.0",
93
+ "microsoft-kiota-abstractions>=1.8.0,<2.0.0",
94
94
  "msal-extensions>=1.1.0",
95
95
  ]
96
96
 
@@ -121,6 +121,7 @@ dev = [
121
121
  "apache-airflow-providers-sftp",
122
122
  # Additional devel dependencies (do not remove this line and add extra development dependencies)
123
123
  "pywinrm>=0.5.0",
124
+ "moto>=5.1.2",
124
125
  ]
125
126
 
126
127
  [tool.uv.sources]
@@ -134,8 +135,8 @@ apache-airflow-providers-fab = {workspace = true}
134
135
  apache-airflow-providers-standard = {workspace = true}
135
136
 
136
137
  [project.urls]
137
- "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.2.2"
138
- "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.2.2/changelog.html"
138
+ "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.3.0"
139
+ "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/12.3.0/changelog.html"
139
140
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
140
141
  "Source Code" = "https://github.com/apache/airflow"
141
142
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "12.2.2"
32
+ __version__ = "12.3.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.9.0"
@@ -27,8 +27,9 @@ def get_provider_info():
27
27
  "name": "Microsoft Azure",
28
28
  "description": "`Microsoft Azure <https://azure.microsoft.com/>`__\n",
29
29
  "state": "ready",
30
- "source-date-epoch": 1742980868,
30
+ "source-date-epoch": 1743836331,
31
31
  "versions": [
32
+ "12.3.0",
32
33
  "12.2.2",
33
34
  "12.2.1",
34
35
  "12.2.0",
@@ -482,11 +483,11 @@ def get_provider_info():
482
483
  "azure-mgmt-datafactory>=2.0.0",
483
484
  "azure-mgmt-containerregistry>=8.0.0",
484
485
  "azure-mgmt-containerinstance>=10.1.0",
485
- "msgraph-core>=1.0.0,!=1.1.8",
486
- "microsoft-kiota-http>=1.3.0,!=1.3.4",
487
- "microsoft-kiota-serialization-json==1.0.0",
488
- "microsoft-kiota-serialization-text==1.0.0",
489
- "microsoft-kiota-abstractions<1.4.0",
486
+ "msgraph-core>=1.3.3",
487
+ "microsoft-kiota-http>=1.8.0,<2.0.0",
488
+ "microsoft-kiota-serialization-json>=1.8.0",
489
+ "microsoft-kiota-serialization-text>=1.8.0",
490
+ "microsoft-kiota-abstractions>=1.8.0,<2.0.0",
490
491
  "msal-extensions>=1.1.0",
491
492
  ],
492
493
  "optional-dependencies": {
@@ -495,5 +496,5 @@ def get_provider_info():
495
496
  "oracle": ["apache-airflow-providers-oracle"],
496
497
  "sftp": ["apache-airflow-providers-sftp"],
497
498
  },
498
- "devel-dependencies": ["pywinrm>=0.5.0"],
499
+ "devel-dependencies": ["pywinrm>=0.5.0", "moto>=5.1.2"],
499
500
  }
@@ -17,7 +17,7 @@
17
17
  from __future__ import annotations
18
18
 
19
19
  from typing import TYPE_CHECKING, Any, Callable
20
- from uuid import uuid4
20
+ from uuid import UUID, uuid4
21
21
 
22
22
  from azure.core.exceptions import ResourceNotFoundError
23
23
  from azure.servicebus import (
@@ -468,7 +468,15 @@ class MessageHook(BaseAzureServiceBusHook):
468
468
  self.log.info("Create and returns ServiceBusClient")
469
469
  return client
470
470
 
471
- def send_message(self, queue_name: str, messages: str | list[str], batch_message_flag: bool = False):
471
+ def send_message(
472
+ self,
473
+ queue_name: str,
474
+ messages: str | list[str],
475
+ batch_message_flag: bool = False,
476
+ message_id: str | None = None,
477
+ reply_to: str | None = None,
478
+ message_headers: dict[str | bytes, int | float | bytes | bool | str | UUID] | None = None,
479
+ ):
472
480
  """
473
481
  Use ServiceBusClient Send to send message(s) to a Service Bus Queue.
474
482
 
@@ -478,38 +486,49 @@ class MessageHook(BaseAzureServiceBusHook):
478
486
  :param messages: Message which needs to be sent to the queue. It can be string or list of string.
479
487
  :param batch_message_flag: bool flag, can be set to True if message needs to be
480
488
  sent as batch message.
489
+ :param message_id: Message ID to set on message being sent to the queue. Please note, message_id may only be
490
+ set when a single message is sent.
491
+ :param reply_to: Reply to which needs to be sent to the queue.
492
+ :param message_headers: Headers to add to the message's application_properties field for Azure Service Bus.
481
493
  """
482
494
  if queue_name is None:
483
495
  raise TypeError("Queue name cannot be None.")
484
496
  if not messages:
485
497
  raise ValueError("Messages list cannot be empty.")
498
+ if message_id and isinstance(messages, list) and len(messages) != 1:
499
+ raise TypeError("Message ID can only be set if a single message is sent.")
486
500
  with (
487
501
  self.get_conn() as service_bus_client,
488
502
  service_bus_client.get_queue_sender(queue_name=queue_name) as sender,
489
503
  sender,
490
504
  ):
491
- if isinstance(messages, str):
492
- if not batch_message_flag:
493
- msg = ServiceBusMessage(messages)
494
- sender.send_messages(msg)
495
- else:
496
- self.send_batch_message(sender, [messages])
505
+ message_creator = lambda msg_body: ServiceBusMessage(
506
+ msg_body, message_id=message_id, reply_to=reply_to, application_properties=message_headers
507
+ )
508
+ message_list = [messages] if isinstance(messages, str) else messages
509
+ if not batch_message_flag:
510
+ self.send_list_messages(sender, message_list, message_creator)
497
511
  else:
498
- if not batch_message_flag:
499
- self.send_list_messages(sender, messages)
500
- else:
501
- self.send_batch_message(sender, messages)
512
+ self.send_batch_message(sender, message_list, message_creator)
502
513
 
503
514
  @staticmethod
504
- def send_list_messages(sender: ServiceBusSender, messages: list[str]):
505
- list_messages = [ServiceBusMessage(message) for message in messages]
515
+ def send_list_messages(
516
+ sender: ServiceBusSender,
517
+ messages: list[str],
518
+ message_creator: Callable[[str], ServiceBusMessage],
519
+ ):
520
+ list_messages = [message_creator(body) for body in messages]
506
521
  sender.send_messages(list_messages) # type: ignore[arg-type]
507
522
 
508
523
  @staticmethod
509
- def send_batch_message(sender: ServiceBusSender, messages: list[str]):
524
+ def send_batch_message(
525
+ sender: ServiceBusSender,
526
+ messages: list[str],
527
+ message_creator: Callable[[str], ServiceBusMessage],
528
+ ):
510
529
  batch_message = sender.create_message_batch()
511
530
  for message in messages:
512
- batch_message.add_message(ServiceBusMessage(message))
531
+ batch_message.add_message(message_creator(message))
513
532
  sender.send_messages(batch_message)
514
533
 
515
534
  def receive_message(
@@ -85,7 +85,7 @@ class AzureContainerInstanceHook(AzureBaseHook):
85
85
  if all([conn.login, conn.password, tenant]):
86
86
  self.log.info("Getting connection using specific credentials and subscription_id.")
87
87
  credential = ClientSecretCredential(
88
- client_id=conn.login, client_secret=conn.password, tenant_id=cast(str, tenant)
88
+ client_id=conn.login, client_secret=conn.password, tenant_id=cast("str", tenant)
89
89
  )
90
90
  else:
91
91
  self.log.info("Using DefaultAzureCredential as credential")
@@ -96,7 +96,7 @@ class AzureContainerInstanceHook(AzureBaseHook):
96
96
  workload_identity_tenant_id=workload_identity_tenant_id,
97
97
  )
98
98
 
99
- subscription_id = cast(str, conn.extra_dejson.get("subscriptionId"))
99
+ subscription_id = cast("str", conn.extra_dejson.get("subscriptionId"))
100
100
  return ContainerInstanceManagementClient(
101
101
  credential=credential,
102
102
  subscription_id=subscription_id,
@@ -1104,7 +1104,7 @@ def provide_targeted_factory_async(func: T) -> T:
1104
1104
 
1105
1105
  return await func(*bound_args.args, **bound_args.kwargs)
1106
1106
 
1107
- return cast(T, wrapper)
1107
+ return cast("T", wrapper)
1108
1108
 
1109
1109
 
1110
1110
  class AzureDataFactoryAsyncHook(AzureDataFactoryHook):
@@ -1193,7 +1193,7 @@ class AzureDataFactoryAsyncHook(AzureDataFactoryHook):
1193
1193
  :param factory_name: The factory name.
1194
1194
  """
1195
1195
  pipeline_run = await self.get_pipeline_run(run_id, resource_group_name, factory_name)
1196
- status: str = cast(str, pipeline_run.status)
1196
+ status: str = cast("str", pipeline_run.status)
1197
1197
  return status
1198
1198
 
1199
1199
  @provide_targeted_factory_async
@@ -328,8 +328,7 @@ class AzureDataLakeStorageV2Hook(BaseHook):
328
328
  prefix = "extra__adls__"
329
329
  if field_name.startswith("extra__"):
330
330
  raise ValueError(
331
- f"Got prefixed name {field_name}; please remove the '{prefix}' prefix "
332
- f"when using this method."
331
+ f"Got prefixed name {field_name}; please remove the '{prefix}' prefix when using this method."
333
332
  )
334
333
  if field_name in extra_dict:
335
334
  return extra_dict[field_name] or None
@@ -23,12 +23,12 @@ from contextlib import suppress
23
23
  from http import HTTPStatus
24
24
  from io import BytesIO
25
25
  from json import JSONDecodeError
26
- from typing import TYPE_CHECKING, Any
26
+ from typing import TYPE_CHECKING, Any, cast
27
27
  from urllib.parse import quote, urljoin, urlparse
28
28
 
29
29
  import httpx
30
30
  from azure.identity import CertificateCredential, ClientSecretCredential
31
- from httpx import AsyncHTTPTransport, Timeout
31
+ from httpx import AsyncHTTPTransport, Response, Timeout
32
32
  from kiota_abstractions.api_error import APIError
33
33
  from kiota_abstractions.method import Method
34
34
  from kiota_abstractions.request_information import RequestInformation
@@ -55,10 +55,8 @@ from airflow.hooks.base import BaseHook
55
55
  if TYPE_CHECKING:
56
56
  from azure.identity._internal.client_credential_base import ClientCredentialBase
57
57
  from kiota_abstractions.request_adapter import RequestAdapter
58
- from kiota_abstractions.request_information import QueryParams
59
58
  from kiota_abstractions.response_handler import NativeResponseType
60
59
  from kiota_abstractions.serialization import ParsableFactory
61
- from kiota_http.httpx_request_adapter import ResponseType
62
60
 
63
61
  from airflow.models import Connection
64
62
 
@@ -67,7 +65,7 @@ class DefaultResponseHandler(ResponseHandler):
67
65
  """DefaultResponseHandler returns JSON payload or content in bytes or response headers."""
68
66
 
69
67
  @staticmethod
70
- def get_value(response: NativeResponseType) -> Any:
68
+ def get_value(response: Response) -> Any:
71
69
  with suppress(JSONDecodeError):
72
70
  return response.json()
73
71
  content = response.content
@@ -76,7 +74,7 @@ class DefaultResponseHandler(ResponseHandler):
76
74
  return content
77
75
 
78
76
  async def handle_response_async(
79
- self, response: NativeResponseType, error_map: dict[str, ParsableFactory | None] | None = None
77
+ self, response: NativeResponseType, error_map: dict[str, ParsableFactory] | None
80
78
  ) -> Any:
81
79
  """
82
80
  Invoke this callback method when a response is received.
@@ -84,10 +82,11 @@ class DefaultResponseHandler(ResponseHandler):
84
82
  param response: The type of the native response object.
85
83
  param error_map: The error dict to use in case of a failed request.
86
84
  """
87
- value = self.get_value(response)
88
- if response.status_code not in {200, 201, 202, 204, 302}:
89
- message = value or response.reason_phrase
90
- status_code = HTTPStatus(response.status_code)
85
+ resp: Response = cast("Response", response)
86
+ value = self.get_value(resp)
87
+ if resp.status_code not in {200, 201, 202, 204, 302}:
88
+ message = value or resp.reason_phrase
89
+ status_code = HTTPStatus(resp.status_code)
91
90
  if status_code == HTTPStatus.BAD_REQUEST:
92
91
  raise AirflowBadRequest(message)
93
92
  elif status_code == HTTPStatus.NOT_FOUND:
@@ -391,16 +390,16 @@ class KiotaRequestAdapterHook(BaseHook):
391
390
  async def run(
392
391
  self,
393
392
  url: str = "",
394
- response_type: ResponseType | None = None,
393
+ response_type: str | None = None,
395
394
  path_parameters: dict[str, Any] | None = None,
396
395
  method: str = "GET",
397
- query_parameters: dict[str, QueryParams] | None = None,
396
+ query_parameters: dict[str, Any] | None = None,
398
397
  headers: dict[str, str] | None = None,
399
398
  data: dict[str, Any] | str | BytesIO | None = None,
400
399
  ):
401
400
  self.log.info("Executing url '%s' as '%s'", url, method)
402
401
 
403
- response = await self.get_conn().send_primitive_async(
402
+ response = await self.send_request(
404
403
  request_info=self.request_information(
405
404
  url=url,
406
405
  response_type=response_type,
@@ -411,20 +410,31 @@ class KiotaRequestAdapterHook(BaseHook):
411
410
  data=data,
412
411
  ),
413
412
  response_type=response_type,
414
- error_map=self.error_mapping(),
415
413
  )
416
414
 
417
415
  self.log.debug("response: %s", response)
418
416
 
419
417
  return response
420
418
 
419
+ async def send_request(self, request_info: RequestInformation, response_type: str | None = None):
420
+ if response_type:
421
+ return await self.get_conn().send_primitive_async(
422
+ request_info=request_info,
423
+ response_type=response_type,
424
+ error_map=self.error_mapping(),
425
+ )
426
+ return await self.get_conn().send_no_response_content_async(
427
+ request_info=request_info,
428
+ error_map=self.error_mapping(),
429
+ )
430
+
421
431
  def request_information(
422
432
  self,
423
433
  url: str,
424
- response_type: ResponseType | None = None,
434
+ response_type: str | None = None,
425
435
  path_parameters: dict[str, Any] | None = None,
426
436
  method: str = "GET",
427
- query_parameters: dict[str, QueryParams] | None = None,
437
+ query_parameters: dict[str, Any] | None = None,
428
438
  headers: dict[str, str] | None = None,
429
439
  data: dict[str, Any] | str | BytesIO | None = None,
430
440
  ) -> RequestInformation:
@@ -446,8 +456,12 @@ class KiotaRequestAdapterHook(BaseHook):
446
456
  headers = {**self.DEFAULT_HEADERS, **headers} if headers else self.DEFAULT_HEADERS
447
457
  for header_name, header_value in headers.items():
448
458
  request_information.headers.try_add(header_name=header_name, header_value=header_value)
449
- if isinstance(data, BytesIO) or isinstance(data, bytes) or isinstance(data, str):
459
+ if isinstance(data, BytesIO):
460
+ request_information.content = data.read()
461
+ elif isinstance(data, bytes):
450
462
  request_information.content = data
463
+ elif isinstance(data, str):
464
+ request_information.content = data.encode("utf-8")
451
465
  elif data:
452
466
  request_information.headers.try_add(
453
467
  header_name=RequestInformation.CONTENT_TYPE_HEADER, header_value="application/json"
@@ -468,8 +482,8 @@ class KiotaRequestAdapterHook(BaseHook):
468
482
  return {}
469
483
 
470
484
  @staticmethod
471
- def error_mapping() -> dict[str, ParsableFactory | None]:
485
+ def error_mapping() -> dict[str, type[ParsableFactory]]:
472
486
  return {
473
- "4XX": APIError,
474
- "5XX": APIError,
487
+ "4XX": APIError, # type: ignore
488
+ "5XX": APIError, # type: ignore
475
489
  }
@@ -187,6 +187,7 @@ class PowerBIHook(KiotaRequestAdapterHook):
187
187
  try:
188
188
  response = await self.run(
189
189
  url="myorg/groups/{group_id}/datasets/{dataset_id}/refreshes",
190
+ response_type=None,
190
191
  method="POST",
191
192
  path_parameters={
192
193
  "group_id": group_id,
@@ -144,8 +144,7 @@ class WasbHook(BaseHook):
144
144
  prefix = "extra__wasb__"
145
145
  if field_name.startswith("extra__"):
146
146
  raise ValueError(
147
- f"Got prefixed name {field_name}; please remove the '{prefix}' prefix "
148
- f"when using this method."
147
+ f"Got prefixed name {field_name}; please remove the '{prefix}' prefix when using this method."
149
148
  )
150
149
  if field_name in extra_dict:
151
150
  return extra_dict[field_name] or None
@@ -23,6 +23,7 @@ from functools import cached_property
23
23
  from pathlib import Path
24
24
  from typing import TYPE_CHECKING
25
25
 
26
+ import attrs
26
27
  from azure.core.exceptions import HttpResponseError
27
28
 
28
29
  from airflow.configuration import conf
@@ -34,34 +35,36 @@ if TYPE_CHECKING:
34
35
  import logging
35
36
 
36
37
  from airflow.models.taskinstance import TaskInstance
38
+ from airflow.sdk.types import RuntimeTaskInstanceProtocol as RuntimeTI
39
+ from airflow.utils.log.file_task_handler import LogMessages, LogSourceInfo
37
40
 
38
41
 
39
- class WasbTaskHandler(FileTaskHandler, LoggingMixin):
40
- """
41
- WasbTaskHandler is a python log handler that handles and reads task instance logs.
42
+ @attrs.define
43
+ class WasbRemoteLogIO(LoggingMixin): # noqa: D101
44
+ remote_base: str
45
+ base_log_folder: Path = attrs.field(converter=Path)
46
+ delete_local_copy: bool
42
47
 
43
- It extends airflow FileTaskHandler and uploads to and reads from Wasb remote storage.
44
- """
48
+ wasb_container: str
45
49
 
46
- trigger_should_wrap = True
50
+ processors = ()
47
51
 
48
- def __init__(
49
- self,
50
- base_log_folder: str,
51
- wasb_log_folder: str,
52
- wasb_container: str,
53
- **kwargs,
54
- ) -> None:
55
- super().__init__(base_log_folder)
56
- self.handler: logging.FileHandler | None = None
57
- self.wasb_container = wasb_container
58
- self.remote_base = wasb_log_folder
59
- self.log_relative_path = ""
60
- self.closed = False
61
- self.upload_on_close = True
62
- self.delete_local_copy = kwargs.get(
63
- "delete_local_copy", conf.getboolean("logging", "delete_local_logs")
64
- )
52
+ def upload(self, path: str | os.PathLike, ti: RuntimeTI):
53
+ """Upload the given log path to the remote storage."""
54
+ path = Path(path)
55
+ if path.is_absolute():
56
+ local_loc = path
57
+ remote_loc = os.path.join(self.remote_base, path.relative_to(self.base_log_folder))
58
+ else:
59
+ local_loc = self.base_log_folder.joinpath(path)
60
+ remote_loc = os.path.join(self.remote_base, path)
61
+
62
+ if local_loc.is_file():
63
+ # read log and remove old logs to get just the latest additions
64
+ log = local_loc.read_text()
65
+ has_uploaded = self.write(log, remote_loc)
66
+ if has_uploaded and self.delete_local_copy:
67
+ shutil.rmtree(os.path.dirname(local_loc))
65
68
 
66
69
  @cached_property
67
70
  def hook(self):
@@ -81,53 +84,13 @@ class WasbTaskHandler(FileTaskHandler, LoggingMixin):
81
84
  )
82
85
  return None
83
86
 
84
- def set_context(self, ti: TaskInstance, *, identifier: str | None = None) -> None:
85
- super().set_context(ti, identifier=identifier)
86
- # Local location and remote location is needed to open and
87
- # upload local log file to Wasb remote storage.
88
- if TYPE_CHECKING:
89
- assert self.handler is not None
90
-
91
- full_path = self.handler.baseFilename
92
- self.log_relative_path = Path(full_path).relative_to(self.local_base).as_posix()
93
- is_trigger_log_context = getattr(ti, "is_trigger_log_context", False)
94
- self.upload_on_close = is_trigger_log_context or not getattr(ti, "raw", None)
95
-
96
- def close(self) -> None:
97
- """Close and upload local log file to remote storage Wasb."""
98
- # When application exit, system shuts down all handlers by
99
- # calling close method. Here we check if logger is already
100
- # closed to prevent uploading the log to remote storage multiple
101
- # times when `logging.shutdown` is called.
102
- if self.closed:
103
- return
104
-
105
- super().close()
106
-
107
- if not self.upload_on_close:
108
- return
109
-
110
- local_loc = os.path.join(self.local_base, self.log_relative_path)
111
- remote_loc = os.path.join(self.remote_base, self.log_relative_path)
112
- if os.path.exists(local_loc):
113
- # read log and remove old logs to get just the latest additions
114
- with open(local_loc) as logfile:
115
- log = logfile.read()
116
- wasb_write = self.wasb_write(log, remote_loc, append=True)
117
-
118
- if wasb_write and self.delete_local_copy:
119
- shutil.rmtree(os.path.dirname(local_loc))
120
- # Mark closed so we don't double write if close is called twice
121
- self.closed = True
122
-
123
- def _read_remote_logs(self, ti, try_number, metadata=None) -> tuple[list[str], list[str]]:
87
+ def read(self, relative_path, ti: RuntimeTI) -> tuple[LogSourceInfo, LogMessages | None]:
124
88
  messages = []
125
89
  logs = []
126
- worker_log_relative_path = self._render_filename(ti, try_number)
127
90
  # TODO: fix this - "relative path" i.e currently REMOTE_BASE_LOG_FOLDER should start with "wasb"
128
91
  # unlike others with shceme in URL itself to identify the correct handler.
129
92
  # This puts limitations on ways users can name the base_path.
130
- prefix = os.path.join(self.remote_base, worker_log_relative_path)
93
+ prefix = os.path.join(self.remote_base, relative_path)
131
94
  blob_names = []
132
95
  try:
133
96
  blob_names = self.hook.get_blobs_list(container_name=self.wasb_container, prefix=prefix)
@@ -143,8 +106,7 @@ class WasbTaskHandler(FileTaskHandler, LoggingMixin):
143
106
  else:
144
107
  messages.extend(["Found remote logs:", *[f" * {x}" for x in sorted(uris)]])
145
108
  else:
146
- if not AIRFLOW_V_3_0_PLUS:
147
- messages.append(f"No logs found in WASB; ti={ti}")
109
+ return messages, None
148
110
 
149
111
  for name in sorted(blob_names):
150
112
  remote_log = ""
@@ -191,7 +153,7 @@ class WasbTaskHandler(FileTaskHandler, LoggingMixin):
191
153
  return msg
192
154
  return ""
193
155
 
194
- def wasb_write(self, log: str, remote_log_location: str, append: bool = True) -> bool:
156
+ def write(self, log: str, remote_log_location: str, append: bool = True) -> bool:
195
157
  """
196
158
  Write the log to the remote_log_location. Fails silently if no hook was created.
197
159
 
@@ -210,3 +172,82 @@ class WasbTaskHandler(FileTaskHandler, LoggingMixin):
210
172
  self.log.exception("Could not write logs to %s", remote_log_location)
211
173
  return False
212
174
  return True
175
+
176
+
177
+ class WasbTaskHandler(FileTaskHandler, LoggingMixin):
178
+ """
179
+ WasbTaskHandler is a python log handler that handles and reads task instance logs.
180
+
181
+ It extends airflow FileTaskHandler and uploads to and reads from Wasb remote storage.
182
+ """
183
+
184
+ trigger_should_wrap = True
185
+
186
+ def __init__(
187
+ self,
188
+ base_log_folder: str,
189
+ wasb_log_folder: str,
190
+ wasb_container: str,
191
+ **kwargs,
192
+ ) -> None:
193
+ super().__init__(base_log_folder)
194
+ self.handler: logging.FileHandler | None = None
195
+ self.log_relative_path = ""
196
+ self.closed = False
197
+ self.upload_on_close = True
198
+ self.io = WasbRemoteLogIO(
199
+ base_log_folder=base_log_folder,
200
+ remote_base=wasb_log_folder,
201
+ wasb_container=wasb_container,
202
+ delete_local_copy=kwargs.get(
203
+ "delete_local_copy", conf.getboolean("logging", "delete_local_logs")
204
+ ),
205
+ )
206
+
207
+ def set_context(self, ti: TaskInstance, *, identifier: str | None = None) -> None:
208
+ super().set_context(ti, identifier=identifier)
209
+ # Local location and remote location is needed to open and
210
+ # upload local log file to Wasb remote storage.
211
+ if TYPE_CHECKING:
212
+ assert self.handler is not None
213
+
214
+ self.ti = ti
215
+ full_path = self.handler.baseFilename
216
+ self.log_relative_path = Path(full_path).relative_to(self.local_base).as_posix()
217
+ is_trigger_log_context = getattr(ti, "is_trigger_log_context", False)
218
+ self.upload_on_close = is_trigger_log_context or not getattr(ti, "raw", None)
219
+
220
+ def close(self) -> None:
221
+ """Close and upload local log file to remote storage Wasb."""
222
+ # When application exit, system shuts down all handlers by
223
+ # calling close method. Here we check if logger is already
224
+ # closed to prevent uploading the log to remote storage multiple
225
+ # times when `logging.shutdown` is called.
226
+ if self.closed:
227
+ return
228
+
229
+ super().close()
230
+
231
+ if not self.upload_on_close:
232
+ return
233
+
234
+ if hasattr(self, "ti"):
235
+ self.io.upload(self.log_relative_path, self.ti)
236
+
237
+ # Mark closed so we don't double write if close is called twice
238
+ self.closed = True
239
+
240
+ def _read_remote_logs(self, ti, try_number, metadata=None) -> tuple[LogSourceInfo, LogMessages]:
241
+ # Explicitly getting log relative path is necessary as the given
242
+ # task instance might be different than task instance passed in
243
+ # in set_context method.
244
+ worker_log_rel_path = self._render_filename(ti, try_number)
245
+
246
+ messages, logs = self.io.read(worker_log_rel_path, ti)
247
+
248
+ if logs is None:
249
+ logs = []
250
+ if not AIRFLOW_V_3_0_PLUS:
251
+ messages.append(f"No logs found in WASB; ti={ti}")
252
+
253
+ return messages, logs
@@ -18,6 +18,7 @@ from __future__ import annotations
18
18
 
19
19
  from collections.abc import Sequence
20
20
  from typing import TYPE_CHECKING, Any, Callable
21
+ from uuid import UUID
21
22
 
22
23
  from airflow.models import BaseOperator
23
24
  from airflow.providers.microsoft.azure.hooks.asb import AdminClientHook, MessageHook
@@ -100,6 +101,11 @@ class AzureServiceBusSendMessageOperator(BaseOperator):
100
101
  as batch message it can be set to True.
101
102
  :param azure_service_bus_conn_id: Reference to the
102
103
  :ref: `Azure Service Bus connection<howto/connection:azure_service_bus>`.
104
+ :param message_id: Message ID to set on message being sent to the queue. Please note, message_id may only be
105
+ set when a single message is sent.
106
+ :param reply_to: Name of queue or topic the receiver should reply to. Determination of if the reply will be sent to
107
+ a queue or a topic should be made out-of-band.
108
+ :param message_headers: Headers to add to the message's application_properties field for Azure Service Bus.
103
109
  """
104
110
 
105
111
  template_fields: Sequence[str] = ("queue_name",)
@@ -112,6 +118,9 @@ class AzureServiceBusSendMessageOperator(BaseOperator):
112
118
  message: str | list[str],
113
119
  batch: bool = False,
114
120
  azure_service_bus_conn_id: str = "azure_service_bus_default",
121
+ message_id: str | None = None,
122
+ reply_to: str | None = None,
123
+ message_headers: dict[str | bytes, int | float | bytes | bool | str | UUID] | None = None,
115
124
  **kwargs,
116
125
  ) -> None:
117
126
  super().__init__(**kwargs)
@@ -119,6 +128,9 @@ class AzureServiceBusSendMessageOperator(BaseOperator):
119
128
  self.batch = batch
120
129
  self.message = message
121
130
  self.azure_service_bus_conn_id = azure_service_bus_conn_id
131
+ self.message_id = message_id
132
+ self.reply_to = reply_to
133
+ self.message_headers = message_headers
122
134
 
123
135
  def execute(self, context: Context) -> None:
124
136
  """Send Message to the specific queue in Service Bus namespace."""
@@ -126,7 +138,9 @@ class AzureServiceBusSendMessageOperator(BaseOperator):
126
138
  hook = MessageHook(azure_service_bus_conn_id=self.azure_service_bus_conn_id)
127
139
 
128
140
  # send message
129
- hook.send_message(self.queue_name, self.message, self.batch)
141
+ hook.send_message(
142
+ self.queue_name, self.message, self.batch, self.message_id, self.reply_to, self.message_headers
143
+ )
130
144
 
131
145
 
132
146
  class AzureServiceBusReceiveMessageOperator(BaseOperator):
@@ -120,7 +120,13 @@ class AzureContainerInstancesOperator(BaseOperator):
120
120
  },
121
121
  secured_variables=["POSTGRES_PASSWORD"],
122
122
  volumes=[
123
- ("azure_container_instance_conn_id", "my_storage_container", "my_fileshare", "/input-data", True),
123
+ (
124
+ "azure_container_instance_conn_id",
125
+ "my_storage_container",
126
+ "my_fileshare",
127
+ "/input-data",
128
+ True,
129
+ ),
124
130
  ],
125
131
  memory_in_gb=14.0,
126
132
  cpu=4.0,
@@ -38,8 +38,6 @@ from airflow.utils.xcom import XCOM_RETURN_KEY
38
38
  if TYPE_CHECKING:
39
39
  from io import BytesIO
40
40
 
41
- from kiota_abstractions.request_adapter import ResponseType
42
- from kiota_abstractions.request_information import QueryParams
43
41
  from msgraph_core import APIVersion
44
42
 
45
43
  from airflow.utils.context import Context
@@ -118,11 +116,11 @@ class MSGraphAsyncOperator(BaseOperator):
118
116
  self,
119
117
  *,
120
118
  url: str,
121
- response_type: ResponseType | None = None,
119
+ response_type: str | None = None,
122
120
  path_parameters: dict[str, Any] | None = None,
123
121
  url_template: str | None = None,
124
122
  method: str = "GET",
125
- query_parameters: dict[str, QueryParams] | None = None,
123
+ query_parameters: dict[str, Any] | None = None,
126
124
  headers: dict[str, str] | None = None,
127
125
  data: dict[str, Any] | str | BytesIO | None = None,
128
126
  conn_id: str = KiotaRequestAdapterHook.default_conn_name,
@@ -31,8 +31,6 @@ if TYPE_CHECKING:
31
31
  from datetime import timedelta
32
32
  from io import BytesIO
33
33
 
34
- from kiota_abstractions.request_information import QueryParams
35
- from kiota_http.httpx_request_adapter import ResponseType
36
34
  from msgraph_core import APIVersion
37
35
 
38
36
  from airflow.utils.context import Context
@@ -76,11 +74,11 @@ class MSGraphSensor(BaseSensorOperator):
76
74
  def __init__(
77
75
  self,
78
76
  url: str,
79
- response_type: ResponseType | None = None,
77
+ response_type: str | None = None,
80
78
  path_parameters: dict[str, Any] | None = None,
81
79
  url_template: str | None = None,
82
80
  method: str = "GET",
83
- query_parameters: dict[str, QueryParams] | None = None,
81
+ query_parameters: dict[str, Any] | None = None,
84
82
  headers: dict[str, str] | None = None,
85
83
  data: dict[str, Any] | str | BytesIO | None = None,
86
84
  conn_id: str = KiotaRequestAdapterHook.default_conn_name,
@@ -40,8 +40,6 @@ if TYPE_CHECKING:
40
40
  from io import BytesIO
41
41
 
42
42
  from kiota_abstractions.request_adapter import RequestAdapter
43
- from kiota_abstractions.request_information import QueryParams
44
- from kiota_http.httpx_request_adapter import ResponseType
45
43
  from msgraph_core import APIVersion
46
44
 
47
45
 
@@ -112,11 +110,11 @@ class MSGraphTrigger(BaseTrigger):
112
110
  def __init__(
113
111
  self,
114
112
  url: str,
115
- response_type: ResponseType | None = None,
113
+ response_type: str | None = None,
116
114
  path_parameters: dict[str, Any] | None = None,
117
115
  url_template: str | None = None,
118
116
  method: str = "GET",
119
- query_parameters: dict[str, QueryParams] | None = None,
117
+ query_parameters: dict[str, Any] | None = None,
120
118
  headers: dict[str, str] | None = None,
121
119
  data: dict[str, Any] | str | BytesIO | None = None,
122
120
  conn_id: str = KiotaRequestAdapterHook.default_conn_name,