apache-airflow-providers-microsoft-azure 10.3.0__tar.gz → 10.4.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/PKG-INFO +8 -8
  2. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/README.rst +4 -4
  3. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/__init__.py +3 -3
  4. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/get_provider_info.py +25 -2
  5. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/hooks/msgraph.py +7 -4
  6. apache_airflow_providers_microsoft_azure-10.4.0/airflow/providers/microsoft/azure/hooks/powerbi.py +218 -0
  7. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/hooks/wasb.py +9 -5
  8. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/operators/container_instances.py +18 -0
  9. apache_airflow_providers_microsoft_azure-10.4.0/airflow/providers/microsoft/azure/operators/powerbi.py +120 -0
  10. apache_airflow_providers_microsoft_azure-10.4.0/airflow/providers/microsoft/azure/triggers/powerbi.py +181 -0
  11. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/pyproject.toml +4 -4
  12. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/LICENSE +0 -0
  13. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/fs/__init__.py +0 -0
  14. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/fs/adls.py +0 -0
  15. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/hooks/__init__.py +0 -0
  16. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/hooks/adx.py +0 -0
  17. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/hooks/asb.py +0 -0
  18. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/hooks/base_azure.py +0 -0
  19. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/hooks/batch.py +0 -0
  20. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/hooks/container_instance.py +0 -0
  21. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/hooks/container_registry.py +0 -0
  22. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/hooks/container_volume.py +0 -0
  23. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/hooks/cosmos.py +0 -0
  24. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/hooks/data_factory.py +0 -0
  25. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/hooks/data_lake.py +0 -0
  26. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/hooks/fileshare.py +0 -0
  27. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/hooks/synapse.py +0 -0
  28. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/log/__init__.py +0 -0
  29. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/log/wasb_task_handler.py +0 -0
  30. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/operators/__init__.py +0 -0
  31. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/operators/adls.py +0 -0
  32. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/operators/adx.py +0 -0
  33. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/operators/asb.py +0 -0
  34. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/operators/batch.py +0 -0
  35. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/operators/cosmos.py +0 -0
  36. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/operators/data_factory.py +0 -0
  37. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/operators/msgraph.py +0 -0
  38. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/operators/synapse.py +0 -0
  39. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py +0 -0
  40. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/secrets/__init__.py +0 -0
  41. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/secrets/key_vault.py +0 -0
  42. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/sensors/__init__.py +0 -0
  43. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/sensors/cosmos.py +0 -0
  44. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/sensors/data_factory.py +0 -0
  45. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/sensors/msgraph.py +0 -0
  46. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/sensors/wasb.py +0 -0
  47. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/transfers/__init__.py +0 -0
  48. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs.py +0 -0
  49. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/transfers/local_to_adls.py +0 -0
  50. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/transfers/local_to_wasb.py +0 -0
  51. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py +0 -0
  52. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/transfers/s3_to_wasb.py +0 -0
  53. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py +0 -0
  54. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/triggers/__init__.py +0 -0
  55. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/triggers/data_factory.py +0 -0
  56. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/triggers/msgraph.py +0 -0
  57. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/triggers/wasb.py +0 -0
  58. {apache_airflow_providers_microsoft_azure-10.3.0 → apache_airflow_providers_microsoft_azure-10.4.0}/airflow/providers/microsoft/azure/utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-microsoft-azure
3
- Version: 10.3.0
3
+ Version: 10.4.0
4
4
  Summary: Provider package apache-airflow-providers-microsoft-azure for Apache Airflow
5
5
  Keywords: airflow-provider,microsoft.azure,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -23,7 +23,7 @@ Classifier: Programming Language :: Python :: 3.12
23
23
  Classifier: Topic :: System :: Monitoring
24
24
  Requires-Dist: adal>=1.2.7
25
25
  Requires-Dist: adlfs>=2023.10.0
26
- Requires-Dist: apache-airflow>=2.7.0
26
+ Requires-Dist: apache-airflow>=2.8.0
27
27
  Requires-Dist: azure-batch>=8.0.0
28
28
  Requires-Dist: azure-cosmos>=4.6.0
29
29
  Requires-Dist: azure-datalake-store>=0.0.45
@@ -49,8 +49,8 @@ Requires-Dist: apache-airflow-providers-google ; extra == "google"
49
49
  Requires-Dist: apache-airflow-providers-oracle ; extra == "oracle"
50
50
  Requires-Dist: apache-airflow-providers-sftp ; extra == "sftp"
51
51
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
52
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.3.0/changelog.html
53
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.3.0
52
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.4.0/changelog.html
53
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.4.0
54
54
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
55
55
  Project-URL: Source Code, https://github.com/apache/airflow
56
56
  Project-URL: Twitter, https://twitter.com/ApacheAirflow
@@ -104,7 +104,7 @@ Provides-Extra: sftp
104
104
 
105
105
  Package ``apache-airflow-providers-microsoft-azure``
106
106
 
107
- Release: ``10.3.0``
107
+ Release: ``10.4.0``
108
108
 
109
109
 
110
110
  `Microsoft Azure <https://azure.microsoft.com/>`__
@@ -117,7 +117,7 @@ This is a provider package for ``microsoft.azure`` provider. All classes for thi
117
117
  are in ``airflow.providers.microsoft.azure`` python package.
118
118
 
119
119
  You can find package information and changelog for the provider
120
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.3.0/>`_.
120
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.4.0/>`_.
121
121
 
122
122
  Installation
123
123
  ------------
@@ -134,7 +134,7 @@ Requirements
134
134
  ================================ ==================
135
135
  PIP package Version required
136
136
  ================================ ==================
137
- ``apache-airflow`` ``>=2.7.0``
137
+ ``apache-airflow`` ``>=2.8.0``
138
138
  ``adlfs`` ``>=2023.10.0``
139
139
  ``azure-batch`` ``>=8.0.0``
140
140
  ``azure-cosmos`` ``>=4.6.0``
@@ -182,4 +182,4 @@ Dependent package
182
182
  ==================================================================================================== ==========
183
183
 
184
184
  The changelog for the provider package can be found in the
185
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.3.0/changelog.html>`_.
185
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.4.0/changelog.html>`_.
@@ -42,7 +42,7 @@
42
42
 
43
43
  Package ``apache-airflow-providers-microsoft-azure``
44
44
 
45
- Release: ``10.3.0``
45
+ Release: ``10.4.0``
46
46
 
47
47
 
48
48
  `Microsoft Azure <https://azure.microsoft.com/>`__
@@ -55,7 +55,7 @@ This is a provider package for ``microsoft.azure`` provider. All classes for thi
55
55
  are in ``airflow.providers.microsoft.azure`` python package.
56
56
 
57
57
  You can find package information and changelog for the provider
58
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.3.0/>`_.
58
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.4.0/>`_.
59
59
 
60
60
  Installation
61
61
  ------------
@@ -72,7 +72,7 @@ Requirements
72
72
  ================================ ==================
73
73
  PIP package Version required
74
74
  ================================ ==================
75
- ``apache-airflow`` ``>=2.7.0``
75
+ ``apache-airflow`` ``>=2.8.0``
76
76
  ``adlfs`` ``>=2023.10.0``
77
77
  ``azure-batch`` ``>=8.0.0``
78
78
  ``azure-cosmos`` ``>=4.6.0``
@@ -120,4 +120,4 @@ Dependent package
120
120
  ==================================================================================================== ==========
121
121
 
122
122
  The changelog for the provider package can be found in the
123
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.3.0/changelog.html>`_.
123
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.4.0/changelog.html>`_.
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "10.3.0"
32
+ __version__ = "10.4.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
- "2.7.0"
35
+ "2.8.0"
36
36
  ):
37
37
  raise RuntimeError(
38
- f"The package `apache-airflow-providers-microsoft-azure:{__version__}` needs Apache Airflow 2.7.0+"
38
+ f"The package `apache-airflow-providers-microsoft-azure:{__version__}` needs Apache Airflow 2.8.0+"
39
39
  )
@@ -28,8 +28,9 @@ def get_provider_info():
28
28
  "name": "Microsoft Azure",
29
29
  "description": "`Microsoft Azure <https://azure.microsoft.com/>`__\n",
30
30
  "state": "ready",
31
- "source-date-epoch": 1722664136,
31
+ "source-date-epoch": 1723970344,
32
32
  "versions": [
33
+ "10.4.0",
33
34
  "10.3.0",
34
35
  "10.2.0",
35
36
  "10.1.2",
@@ -88,7 +89,7 @@ def get_provider_info():
88
89
  "1.0.0",
89
90
  ],
90
91
  "dependencies": [
91
- "apache-airflow>=2.7.0",
92
+ "apache-airflow>=2.8.0",
92
93
  "adlfs>=2023.10.0",
93
94
  "azure-batch>=8.0.0",
94
95
  "azure-cosmos>=4.6.0",
@@ -207,6 +208,11 @@ def get_provider_info():
207
208
  "how-to-guide": ["/docs/apache-airflow-providers-microsoft-azure/operators/msgraph.rst"],
208
209
  "tags": ["azure"],
209
210
  },
211
+ {
212
+ "integration-name": "Microsoft Power BI",
213
+ "external-doc-url": "https://learn.microsoft.com/en-us/rest/api/power-bi/",
214
+ "tags": ["azure"],
215
+ },
210
216
  ],
211
217
  "operators": [
212
218
  {
@@ -249,6 +255,10 @@ def get_provider_info():
249
255
  "integration-name": "Microsoft Graph API",
250
256
  "python-modules": ["airflow.providers.microsoft.azure.operators.msgraph"],
251
257
  },
258
+ {
259
+ "integration-name": "Microsoft Power BI",
260
+ "python-modules": ["airflow.providers.microsoft.azure.operators.powerbi"],
261
+ },
252
262
  ],
253
263
  "sensors": [
254
264
  {
@@ -326,6 +336,10 @@ def get_provider_info():
326
336
  "integration-name": "Microsoft Graph API",
327
337
  "python-modules": ["airflow.providers.microsoft.azure.hooks.msgraph"],
328
338
  },
339
+ {
340
+ "integration-name": "Microsoft Power BI",
341
+ "python-modules": ["airflow.providers.microsoft.azure.hooks.powerbi"],
342
+ },
329
343
  ],
330
344
  "triggers": [
331
345
  {
@@ -340,6 +354,10 @@ def get_provider_info():
340
354
  "integration-name": "Microsoft Graph API",
341
355
  "python-modules": ["airflow.providers.microsoft.azure.triggers.msgraph"],
342
356
  },
357
+ {
358
+ "integration-name": "Microsoft Power BI",
359
+ "python-modules": ["airflow.providers.microsoft.azure.triggers.powerbi"],
360
+ },
343
361
  ],
344
362
  "transfers": [
345
363
  {
@@ -435,12 +453,17 @@ def get_provider_info():
435
453
  "hook-class-name": "airflow.providers.microsoft.azure.hooks.data_lake.AzureDataLakeStorageV2Hook",
436
454
  "connection-type": "adls",
437
455
  },
456
+ {
457
+ "hook-class-name": "airflow.providers.microsoft.azure.hooks.powerbi.PowerBIHook",
458
+ "connection-type": "powerbi",
459
+ },
438
460
  ],
439
461
  "secrets-backends": ["airflow.providers.microsoft.azure.secrets.key_vault.AzureKeyVaultBackend"],
440
462
  "logging": ["airflow.providers.microsoft.azure.log.wasb_task_handler.WasbTaskHandler"],
441
463
  "extra-links": [
442
464
  "airflow.providers.microsoft.azure.operators.data_factory.AzureDataFactoryPipelineRunLink",
443
465
  "airflow.providers.microsoft.azure.operators.synapse.AzureSynapsePipelineRunLink",
466
+ "airflow.providers.microsoft.azure.operators.powerbi.PowerBILink",
444
467
  ],
445
468
  "config": {
446
469
  "azure_remote_logging": {
@@ -110,12 +110,16 @@ class KiotaRequestAdapterHook(BaseHook):
110
110
  conn_id: str = default_conn_name,
111
111
  timeout: float | None = None,
112
112
  proxies: dict | None = None,
113
+ host: str = NationalClouds.Global.value,
114
+ scopes: list[str] | None = None,
113
115
  api_version: APIVersion | str | None = None,
114
116
  ):
115
117
  super().__init__()
116
118
  self.conn_id = conn_id
117
119
  self.timeout = timeout
118
120
  self.proxies = proxies
121
+ self.host = host
122
+ self.scopes = scopes or ["https://graph.microsoft.com/.default"]
119
123
  self._api_version = self.resolve_api_version_from_value(api_version)
120
124
 
121
125
  @property
@@ -141,11 +145,10 @@ class KiotaRequestAdapterHook(BaseHook):
141
145
  )
142
146
  return self._api_version
143
147
 
144
- @staticmethod
145
- def get_host(connection: Connection) -> str:
148
+ def get_host(self, connection: Connection) -> str:
146
149
  if connection.schema and connection.host:
147
150
  return f"{connection.schema}://{connection.host}"
148
- return NationalClouds.Global.value
151
+ return self.host
149
152
 
150
153
  @staticmethod
151
154
  def format_no_proxy_url(url: str) -> str:
@@ -198,7 +201,7 @@ class KiotaRequestAdapterHook(BaseHook):
198
201
  proxies = self.proxies or config.get("proxies", {})
199
202
  msal_proxies = self.to_msal_proxies(authority=authority, proxies=proxies)
200
203
  httpx_proxies = self.to_httpx_proxies(proxies=proxies)
201
- scopes = config.get("scopes", ["https://graph.microsoft.com/.default"])
204
+ scopes = config.get("scopes", self.scopes)
202
205
  verify = config.get("verify", True)
203
206
  trust_env = config.get("trust_env", False)
204
207
  disable_instance_discovery = config.get("disable_instance_discovery", False)
@@ -0,0 +1,218 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+
18
+ from __future__ import annotations
19
+
20
+ from enum import Enum
21
+ from typing import TYPE_CHECKING, Any
22
+
23
+ from airflow.exceptions import AirflowException
24
+ from airflow.providers.microsoft.azure.hooks.msgraph import KiotaRequestAdapterHook
25
+
26
+ if TYPE_CHECKING:
27
+ from msgraph_core import APIVersion
28
+
29
+
30
+ class PowerBIDatasetRefreshFields(Enum):
31
+ """Power BI refresh dataset details."""
32
+
33
+ REQUEST_ID = "request_id"
34
+ STATUS = "status"
35
+ ERROR = "error"
36
+
37
+
38
+ class PowerBIDatasetRefreshStatus:
39
+ """Power BI refresh dataset statuses."""
40
+
41
+ IN_PROGRESS = "In Progress"
42
+ FAILED = "Failed"
43
+ COMPLETED = "Completed"
44
+ DISABLED = "Disabled"
45
+
46
+ TERMINAL_STATUSES = {FAILED, COMPLETED}
47
+
48
+
49
+ class PowerBIDatasetRefreshException(AirflowException):
50
+ """An exception that indicates a dataset refresh failed to complete."""
51
+
52
+
53
+ class PowerBIHook(KiotaRequestAdapterHook):
54
+ """
55
+ A async hook to interact with Power BI.
56
+
57
+ :param conn_id: The Power BI connection id.
58
+ """
59
+
60
+ conn_type: str = "powerbi"
61
+ conn_name_attr: str = "conn_id"
62
+ default_conn_name: str = "powerbi_default"
63
+ hook_name: str = "Power BI"
64
+
65
+ def __init__(
66
+ self,
67
+ conn_id: str = default_conn_name,
68
+ proxies: dict | None = None,
69
+ timeout: float = 60 * 60 * 24 * 7,
70
+ api_version: APIVersion | str | None = None,
71
+ ):
72
+ super().__init__(
73
+ conn_id=conn_id,
74
+ proxies=proxies,
75
+ timeout=timeout,
76
+ host="https://api.powerbi.com",
77
+ scopes=["https://analysis.windows.net/powerbi/api/.default"],
78
+ api_version=api_version,
79
+ )
80
+
81
+ @classmethod
82
+ def get_connection_form_widgets(cls) -> dict[str, Any]:
83
+ """Return connection widgets to add to connection form."""
84
+ from flask_appbuilder.fieldwidgets import BS3TextFieldWidget
85
+ from flask_babel import lazy_gettext
86
+ from wtforms import StringField
87
+
88
+ return {
89
+ "tenant_id": StringField(lazy_gettext("Tenant ID"), widget=BS3TextFieldWidget()),
90
+ }
91
+
92
+ @classmethod
93
+ def get_ui_field_behaviour(cls) -> dict[str, Any]:
94
+ """Return custom field behaviour."""
95
+ return {
96
+ "hidden_fields": ["schema", "port", "host", "extra"],
97
+ "relabeling": {
98
+ "login": "Client ID",
99
+ "password": "Client Secret",
100
+ },
101
+ }
102
+
103
+ async def get_refresh_history(
104
+ self,
105
+ dataset_id: str,
106
+ group_id: str,
107
+ ) -> list[dict[str, str]]:
108
+ """
109
+ Retrieve the refresh history of the specified dataset from the given group ID.
110
+
111
+ :param dataset_id: The dataset ID.
112
+ :param group_id: The workspace ID.
113
+
114
+ :return: Dictionary containing all the refresh histories of the dataset.
115
+ """
116
+ try:
117
+ response = await self.run(
118
+ url="myorg/groups/{group_id}/datasets/{dataset_id}/refreshes",
119
+ path_parameters={
120
+ "group_id": group_id,
121
+ "dataset_id": dataset_id,
122
+ },
123
+ )
124
+
125
+ refresh_histories = response.get("value")
126
+ return [self.raw_to_refresh_details(refresh_history) for refresh_history in refresh_histories]
127
+
128
+ except AirflowException:
129
+ raise PowerBIDatasetRefreshException("Failed to retrieve refresh history")
130
+
131
+ @classmethod
132
+ def raw_to_refresh_details(cls, refresh_details: dict) -> dict[str, str]:
133
+ """
134
+ Convert raw refresh details into a dictionary containing required fields.
135
+
136
+ :param refresh_details: Raw object of refresh details.
137
+ """
138
+ return {
139
+ PowerBIDatasetRefreshFields.REQUEST_ID.value: str(refresh_details.get("requestId")),
140
+ PowerBIDatasetRefreshFields.STATUS.value: (
141
+ "In Progress"
142
+ if str(refresh_details.get("status")) == "Unknown"
143
+ else str(refresh_details.get("status"))
144
+ ),
145
+ PowerBIDatasetRefreshFields.ERROR.value: str(refresh_details.get("serviceExceptionJson")),
146
+ }
147
+
148
+ async def get_refresh_details_by_refresh_id(
149
+ self, dataset_id: str, group_id: str, refresh_id: str
150
+ ) -> dict[str, str]:
151
+ """
152
+ Get the refresh details of the given request Id.
153
+
154
+ :param refresh_id: Request Id of the Dataset refresh.
155
+ """
156
+ refresh_histories = await self.get_refresh_history(dataset_id=dataset_id, group_id=group_id)
157
+
158
+ if len(refresh_histories) == 0:
159
+ raise PowerBIDatasetRefreshException(
160
+ f"Unable to fetch the details of dataset refresh with Request Id: {refresh_id}"
161
+ )
162
+
163
+ refresh_ids = [
164
+ refresh_history.get(PowerBIDatasetRefreshFields.REQUEST_ID.value)
165
+ for refresh_history in refresh_histories
166
+ ]
167
+
168
+ if refresh_id not in refresh_ids:
169
+ raise PowerBIDatasetRefreshException(
170
+ f"Unable to fetch the details of dataset refresh with Request Id: {refresh_id}"
171
+ )
172
+
173
+ refresh_details = refresh_histories[refresh_ids.index(refresh_id)]
174
+
175
+ return refresh_details
176
+
177
+ async def trigger_dataset_refresh(self, *, dataset_id: str, group_id: str) -> str:
178
+ """
179
+ Triggers a refresh for the specified dataset from the given group id.
180
+
181
+ :param dataset_id: The dataset id.
182
+ :param group_id: The workspace id.
183
+
184
+ :return: Request id of the dataset refresh request.
185
+ """
186
+ try:
187
+ response = await self.run(
188
+ url="myorg/groups/{group_id}/datasets/{dataset_id}/refreshes",
189
+ method="POST",
190
+ path_parameters={
191
+ "group_id": group_id,
192
+ "dataset_id": dataset_id,
193
+ },
194
+ )
195
+
196
+ request_id = response.get("requestid")
197
+ return request_id
198
+ except AirflowException:
199
+ raise PowerBIDatasetRefreshException("Failed to trigger dataset refresh.")
200
+
201
+ async def cancel_dataset_refresh(self, dataset_id: str, group_id: str, dataset_refresh_id: str) -> None:
202
+ """
203
+ Cancel the dataset refresh.
204
+
205
+ :param dataset_id: The dataset Id.
206
+ :param group_id: The workspace Id.
207
+ :param dataset_refresh_id: The dataset refresh Id.
208
+ """
209
+ await self.run(
210
+ url="myorg/groups/{group_id}/datasets/{dataset_id}/refreshes/{dataset_refresh_id}",
211
+ response_type=None,
212
+ path_parameters={
213
+ "group_id": group_id,
214
+ "dataset_id": dataset_id,
215
+ "dataset_refresh_id": dataset_refresh_id,
216
+ },
217
+ method="DELETE",
218
+ )
@@ -213,7 +213,8 @@ class WasbHook(BaseHook):
213
213
  **extra,
214
214
  )
215
215
 
216
- def _get_container_client(self, container_name: str) -> ContainerClient:
216
+ # TODO: rework the interface as it might also return AsyncContainerClient
217
+ def _get_container_client(self, container_name: str) -> ContainerClient: # type: ignore[override]
217
218
  """
218
219
  Instantiate a container client.
219
220
 
@@ -222,7 +223,7 @@ class WasbHook(BaseHook):
222
223
  """
223
224
  return self.blob_service_client.get_container_client(container_name)
224
225
 
225
- def _get_blob_client(self, container_name: str, blob_name: str) -> BlobClient:
226
+ def _get_blob_client(self, container_name: str, blob_name: str) -> BlobClient | AsyncBlobClient:
226
227
  """
227
228
  Instantiate a blob client.
228
229
 
@@ -415,7 +416,8 @@ class WasbHook(BaseHook):
415
416
  self.create_container(container_name)
416
417
 
417
418
  blob_client = self._get_blob_client(container_name, blob_name)
418
- return blob_client.upload_blob(data, blob_type, length=length, **kwargs)
419
+ # TODO: rework the interface as it might also return Awaitable
420
+ return blob_client.upload_blob(data, blob_type, length=length, **kwargs) # type: ignore[return-value]
419
421
 
420
422
  def download(
421
423
  self, container_name, blob_name, offset: int | None = None, length: int | None = None, **kwargs
@@ -430,7 +432,8 @@ class WasbHook(BaseHook):
430
432
  :param length: Number of bytes to read from the stream.
431
433
  """
432
434
  blob_client = self._get_blob_client(container_name, blob_name)
433
- return blob_client.download_blob(offset=offset, length=length, **kwargs)
435
+ # TODO: rework the interface as it might also return Awaitable
436
+ return blob_client.download_blob(offset=offset, length=length, **kwargs) # type: ignore[return-value]
434
437
 
435
438
  def create_container(self, container_name: str) -> None:
436
439
  """
@@ -656,7 +659,8 @@ class WasbAsyncHook(WasbHook):
656
659
  return False
657
660
  return True
658
661
 
659
- def _get_container_client(self, container_name: str) -> AsyncContainerClient:
662
+ # TODO: rework the interface as in parent Hook it returns ContainerClient
663
+ def _get_container_client(self, container_name: str) -> AsyncContainerClient: # type: ignore[override]
660
664
  """
661
665
  Instantiate a container client.
662
666
 
@@ -86,6 +86,12 @@ class AzureContainerInstancesOperator(BaseOperator):
86
86
  :param container_timeout: max time allowed for the execution of
87
87
  the container instance.
88
88
  :param tags: azure tags as dict of str:str
89
+ :param xcom_all: Control if logs are pushed to XCOM similarly to how DockerOperator does.
90
+ Possible values include: 'None', 'True', 'False'. Defaults to 'None', meaning no logs
91
+ are pushed to XCOM which is the historical behaviour. 'True' means push all logs to XCOM
92
+ which may run the risk of hitting XCOM size limits. 'False' means push only the last line
93
+ of the logs to XCOM. However, the logs are pushed into XCOM under "logs", not return_value
94
+ to avoid breaking the existing behaviour.
89
95
  :param os_type: The operating system type required by the containers
90
96
  in the container group. Possible values include: 'Windows', 'Linux'
91
97
  :param restart_policy: Restart policy for all containers within the container group.
@@ -158,6 +164,7 @@ class AzureContainerInstancesOperator(BaseOperator):
158
164
  remove_on_error: bool = True,
159
165
  fail_if_exists: bool = True,
160
166
  tags: dict[str, str] | None = None,
167
+ xcom_all: bool | None = None,
161
168
  os_type: str = "Linux",
162
169
  restart_policy: str = "Never",
163
170
  ip_address: IpAddress | None = None,
@@ -187,6 +194,7 @@ class AzureContainerInstancesOperator(BaseOperator):
187
194
  self.fail_if_exists = fail_if_exists
188
195
  self._ci_hook: Any = None
189
196
  self.tags = tags
197
+ self.xcom_all = xcom_all
190
198
  self.os_type = os_type
191
199
  if self.os_type not in ["Linux", "Windows"]:
192
200
  raise AirflowException(
@@ -296,6 +304,16 @@ class AzureContainerInstancesOperator(BaseOperator):
296
304
  self.log.info("Container group started %s/%s", self.resource_group, self.name)
297
305
 
298
306
  exit_code = self._monitor_logging(self.resource_group, self.name)
307
+ if self.xcom_all is not None:
308
+ logs = self._ci_hook.get_logs(self.resource_group, self.name)
309
+ if logs is None:
310
+ context["ti"].xcom_push(key="logs", value=[])
311
+ else:
312
+ if self.xcom_all:
313
+ context["ti"].xcom_push(key="logs", value=logs)
314
+ else:
315
+ # slice off the last entry in the list logs and return it as a list
316
+ context["ti"].xcom_push(key="logs", value=logs[-1:])
299
317
 
300
318
  self.log.info("Container had exit code: %s", exit_code)
301
319
  if exit_code != 0:
@@ -0,0 +1,120 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+
18
+ from __future__ import annotations
19
+
20
+ from typing import TYPE_CHECKING, Any, Sequence
21
+
22
+ from airflow.exceptions import AirflowException
23
+ from airflow.models import BaseOperator, BaseOperatorLink
24
+ from airflow.providers.microsoft.azure.hooks.powerbi import (
25
+ PowerBIHook,
26
+ )
27
+ from airflow.providers.microsoft.azure.triggers.powerbi import PowerBITrigger
28
+
29
+ if TYPE_CHECKING:
30
+ from msgraph_core import APIVersion
31
+
32
+ from airflow.models.taskinstancekey import TaskInstanceKey
33
+ from airflow.utils.context import Context
34
+
35
+
36
+ class PowerBILink(BaseOperatorLink):
37
+ """Construct a link to monitor a dataset in Power BI."""
38
+
39
+ name = "Monitor PowerBI Dataset"
40
+
41
+ def get_link(self, operator: BaseOperator, *, ti_key: TaskInstanceKey):
42
+ url = (
43
+ "https://app.powerbi.com" # type: ignore[attr-defined]
44
+ f"/groups/{operator.group_id}/datasets/{operator.dataset_id}" # type: ignore[attr-defined]
45
+ "/details?experience=power-bi"
46
+ )
47
+
48
+ return url
49
+
50
+
51
+ class PowerBIDatasetRefreshOperator(BaseOperator):
52
+ """
53
+ Refreshes a Power BI dataset.
54
+
55
+ :param dataset_id: The dataset id.
56
+ :param group_id: The workspace id.
57
+ :param conn_id: Airflow Connection ID that contains the connection information for the Power BI account used for authentication.
58
+ :param timeout: Time in seconds to wait for a dataset to reach a terminal status for asynchronous waits. Used only if ``wait_for_termination`` is True.
59
+ :param check_interval: Number of seconds to wait before rechecking the
60
+ refresh status.
61
+ """
62
+
63
+ template_fields: Sequence[str] = (
64
+ "dataset_id",
65
+ "group_id",
66
+ )
67
+ template_fields_renderers = {"parameters": "json"}
68
+
69
+ operator_extra_links = (PowerBILink(),)
70
+
71
+ def __init__(
72
+ self,
73
+ *,
74
+ dataset_id: str,
75
+ group_id: str,
76
+ conn_id: str = PowerBIHook.default_conn_name,
77
+ timeout: float = 60 * 60 * 24 * 7,
78
+ proxies: dict | None = None,
79
+ api_version: APIVersion | None = None,
80
+ check_interval: int = 60,
81
+ **kwargs,
82
+ ) -> None:
83
+ super().__init__(**kwargs)
84
+ self.hook = PowerBIHook(conn_id=conn_id, proxies=proxies, api_version=api_version, timeout=timeout)
85
+ self.dataset_id = dataset_id
86
+ self.group_id = group_id
87
+ self.wait_for_termination = True
88
+ self.conn_id = conn_id
89
+ self.timeout = timeout
90
+ self.check_interval = check_interval
91
+
92
+ def execute(self, context: Context):
93
+ """Refresh the Power BI Dataset."""
94
+ if self.wait_for_termination:
95
+ self.defer(
96
+ trigger=PowerBITrigger(
97
+ conn_id=self.conn_id,
98
+ group_id=self.group_id,
99
+ dataset_id=self.dataset_id,
100
+ timeout=self.timeout,
101
+ check_interval=self.check_interval,
102
+ wait_for_termination=self.wait_for_termination,
103
+ ),
104
+ method_name=self.execute_complete.__name__,
105
+ )
106
+
107
+ def execute_complete(self, context: Context, event: dict[str, str]) -> Any:
108
+ """
109
+ Return immediately - callback for when the trigger fires.
110
+
111
+ Relies on trigger to throw an exception, otherwise it assumes execution was successful.
112
+ """
113
+ if event:
114
+ if event["status"] == "error":
115
+ raise AirflowException(event["message"])
116
+
117
+ self.xcom_push(
118
+ context=context, key="powerbi_dataset_refresh_Id", value=event["dataset_refresh_id"]
119
+ )
120
+ self.xcom_push(context=context, key="powerbi_dataset_refresh_status", value=event["status"])
@@ -0,0 +1,181 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+
18
+ from __future__ import annotations
19
+
20
+ import asyncio
21
+ import time
22
+ from typing import TYPE_CHECKING, AsyncIterator
23
+
24
+ from airflow.providers.microsoft.azure.hooks.powerbi import (
25
+ PowerBIDatasetRefreshStatus,
26
+ PowerBIHook,
27
+ )
28
+ from airflow.triggers.base import BaseTrigger, TriggerEvent
29
+
30
+ if TYPE_CHECKING:
31
+ from msgraph_core import APIVersion
32
+
33
+
34
+ class PowerBITrigger(BaseTrigger):
35
+ """
36
+ Triggers when Power BI dataset refresh is completed.
37
+
38
+ Wait for termination will always be True.
39
+
40
+ :param conn_id: The connection Id to connect to PowerBI.
41
+ :param timeout: The HTTP timeout being used by the `KiotaRequestAdapter` (default is None).
42
+ When no timeout is specified or set to None then there is no HTTP timeout on each request.
43
+ :param proxies: A dict defining the HTTP proxies to be used (default is None).
44
+ :param api_version: The API version of the Microsoft Graph API to be used (default is v1).
45
+ You can pass an enum named APIVersion which has 2 possible members v1 and beta,
46
+ or you can pass a string as `v1.0` or `beta`.
47
+ :param dataset_id: The dataset Id to refresh.
48
+ :param group_id: The workspace Id where dataset is located.
49
+ :param end_time: Time in seconds when trigger should stop polling.
50
+ :param check_interval: Time in seconds to wait between each poll.
51
+ :param wait_for_termination: Wait for the dataset refresh to complete or fail.
52
+ """
53
+
54
+ def __init__(
55
+ self,
56
+ conn_id: str,
57
+ dataset_id: str,
58
+ group_id: str,
59
+ timeout: float = 60 * 60 * 24 * 7,
60
+ proxies: dict | None = None,
61
+ api_version: APIVersion | None = None,
62
+ check_interval: int = 60,
63
+ wait_for_termination: bool = True,
64
+ ):
65
+ super().__init__()
66
+ self.hook = PowerBIHook(conn_id=conn_id, proxies=proxies, api_version=api_version, timeout=timeout)
67
+ self.dataset_id = dataset_id
68
+ self.timeout = timeout
69
+ self.group_id = group_id
70
+ self.check_interval = check_interval
71
+ self.wait_for_termination = wait_for_termination
72
+
73
+ def serialize(self):
74
+ """Serialize the trigger instance."""
75
+ api_version = self.api_version.value if self.api_version else None
76
+ return (
77
+ "airflow.providers.microsoft.azure.triggers.powerbi.PowerBITrigger",
78
+ {
79
+ "conn_id": self.conn_id,
80
+ "proxies": self.proxies,
81
+ "api_version": api_version,
82
+ "dataset_id": self.dataset_id,
83
+ "group_id": self.group_id,
84
+ "timeout": self.timeout,
85
+ "check_interval": self.check_interval,
86
+ "wait_for_termination": self.wait_for_termination,
87
+ },
88
+ )
89
+
90
+ @property
91
+ def conn_id(self) -> str:
92
+ return self.hook.conn_id
93
+
94
+ @property
95
+ def proxies(self) -> dict | None:
96
+ return self.hook.proxies
97
+
98
+ @property
99
+ def api_version(self) -> APIVersion:
100
+ return self.hook.api_version
101
+
102
+ async def run(self) -> AsyncIterator[TriggerEvent]:
103
+ """Make async connection to the PowerBI and polls for the dataset refresh status."""
104
+ self.dataset_refresh_id = await self.hook.trigger_dataset_refresh(
105
+ dataset_id=self.dataset_id,
106
+ group_id=self.group_id,
107
+ )
108
+ try:
109
+ dataset_refresh_status = None
110
+ start_time = time.monotonic()
111
+ while start_time + self.timeout > time.monotonic():
112
+ refresh_details = await self.hook.get_refresh_details_by_refresh_id(
113
+ dataset_id=self.dataset_id,
114
+ group_id=self.group_id,
115
+ refresh_id=self.dataset_refresh_id,
116
+ )
117
+ dataset_refresh_status = refresh_details.get("status")
118
+
119
+ if dataset_refresh_status == PowerBIDatasetRefreshStatus.COMPLETED:
120
+ yield TriggerEvent(
121
+ {
122
+ "status": dataset_refresh_status,
123
+ "message": f"The dataset refresh {self.dataset_refresh_id} has {dataset_refresh_status}.",
124
+ "dataset_refresh_id": self.dataset_refresh_id,
125
+ }
126
+ )
127
+ return
128
+ elif dataset_refresh_status == PowerBIDatasetRefreshStatus.FAILED:
129
+ yield TriggerEvent(
130
+ {
131
+ "status": dataset_refresh_status,
132
+ "message": f"The dataset refresh {self.dataset_refresh_id} has {dataset_refresh_status}.",
133
+ "dataset_refresh_id": self.dataset_refresh_id,
134
+ }
135
+ )
136
+ return
137
+
138
+ self.log.info(
139
+ "Sleeping for %s. The dataset refresh status is %s.",
140
+ self.check_interval,
141
+ dataset_refresh_status,
142
+ )
143
+ await asyncio.sleep(self.check_interval)
144
+
145
+ yield TriggerEvent(
146
+ {
147
+ "status": "error",
148
+ "message": f"Timeout occurred while waiting for dataset refresh to complete: The dataset refresh {self.dataset_refresh_id} has status {dataset_refresh_status}.",
149
+ "dataset_refresh_id": self.dataset_refresh_id,
150
+ }
151
+ )
152
+ return
153
+ except Exception as error:
154
+ if self.dataset_refresh_id:
155
+ try:
156
+ self.log.info(
157
+ "Unexpected error %s caught. Canceling dataset refresh %s",
158
+ error,
159
+ self.dataset_refresh_id,
160
+ )
161
+ await self.hook.cancel_dataset_refresh(
162
+ dataset_id=self.dataset_id,
163
+ group_id=self.group_id,
164
+ dataset_refresh_id=self.dataset_refresh_id,
165
+ )
166
+ except Exception as e:
167
+ yield TriggerEvent(
168
+ {
169
+ "status": "error",
170
+ "message": f"An error occurred while canceling dataset: {e}",
171
+ "dataset_refresh_id": self.dataset_refresh_id,
172
+ }
173
+ )
174
+ return
175
+ yield TriggerEvent(
176
+ {
177
+ "status": "error",
178
+ "message": f"An error occurred: {error}",
179
+ "dataset_refresh_id": self.dataset_refresh_id,
180
+ }
181
+ )
@@ -28,7 +28,7 @@ build-backend = "flit_core.buildapi"
28
28
 
29
29
  [project]
30
30
  name = "apache-airflow-providers-microsoft-azure"
31
- version = "10.3.0"
31
+ version = "10.4.0"
32
32
  description = "Provider package apache-airflow-providers-microsoft-azure for Apache Airflow"
33
33
  readme = "README.rst"
34
34
  authors = [
@@ -58,7 +58,7 @@ requires-python = "~=3.8"
58
58
  dependencies = [
59
59
  "adal>=1.2.7",
60
60
  "adlfs>=2023.10.0",
61
- "apache-airflow>=2.7.0",
61
+ "apache-airflow>=2.8.0",
62
62
  "azure-batch>=8.0.0",
63
63
  "azure-cosmos>=4.6.0",
64
64
  "azure-datalake-store>=0.0.45",
@@ -82,8 +82,8 @@ dependencies = [
82
82
  ]
83
83
 
84
84
  [project.urls]
85
- "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.3.0"
86
- "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.3.0/changelog.html"
85
+ "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.4.0"
86
+ "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure/10.4.0/changelog.html"
87
87
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
88
88
  "Source Code" = "https://github.com/apache/airflow"
89
89
  "Slack Chat" = "https://s.apache.org/airflow-slack"