apache-airflow-providers-databricks 7.7.2__tar.gz → 7.7.3rc1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of apache-airflow-providers-databricks might be problematic. Click here for more details.
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/PKG-INFO +24 -11
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/README.rst +16 -3
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/changelog.rst +19 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/index.rst +3 -3
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/provider.yaml +2 -1
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/pyproject.toml +8 -8
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/__init__.py +1 -1
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/hooks/databricks_base.py +51 -27
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/operators/databricks.py +2 -7
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/plugins/databricks_workflow.py +6 -6
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/hooks/test_databricks.py +0 -1
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/hooks/test_databricks_base.py +143 -1
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/.latest-doc-only-change.txt +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/commits.rst +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/conf.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/connections/databricks.rst +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/img/databricks_workflow_task_group_airflow_graph_view.png +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/img/workflow_plugin_launch_task.png +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/img/workflow_plugin_single_task.png +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/img/workflow_run_databricks_graph_view.png +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/installing-providers-from-sources.rst +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/integration-logos/Databricks.png +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/operators/copy_into.rst +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/operators/index.rst +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/operators/jobs_create.rst +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/operators/notebook.rst +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/operators/repos_create.rst +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/operators/repos_delete.rst +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/operators/repos_update.rst +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/operators/run_now.rst +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/operators/sql.rst +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/operators/sql_statements.rst +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/operators/submit_run.rst +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/operators/task.rst +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/operators/workflow.rst +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/plugins/index.rst +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/plugins/workflow.rst +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/docs/security.rst +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/LICENSE +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/exceptions.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/get_provider_info.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/hooks/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/hooks/databricks.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/hooks/databricks_sql.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/operators/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/operators/databricks_repos.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/operators/databricks_sql.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/operators/databricks_workflow.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/plugins/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/sensors/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/sensors/databricks.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/sensors/databricks_partition.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/sensors/databricks_sql.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/triggers/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/triggers/databricks.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/utils/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/utils/databricks.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/utils/mixins.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/utils/openlineage.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/version_compat.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/conftest.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/system/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/system/databricks/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/system/databricks/example_databricks.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/system/databricks/example_databricks_repos.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/system/databricks/example_databricks_sensors.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/system/databricks/example_databricks_sql.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/system/databricks/example_databricks_workflow.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/hooks/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/hooks/test_databricks_azure_workload_identity.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/hooks/test_databricks_azure_workload_identity_async.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/hooks/test_databricks_sql.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/operators/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/operators/test_databricks.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/operators/test_databricks_copy.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/operators/test_databricks_repos.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/operators/test_databricks_sql.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/operators/test_databricks_workflow.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/plugins/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/plugins/test_databricks_workflow.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/sensors/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/sensors/test_databricks.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/sensors/test_databricks_partition.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/sensors/test_databricks_sql.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/test_exceptions.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/triggers/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/triggers/test_databricks.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/utils/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/utils/test_databricks.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/utils/test_mixins.py +0 -0
- {apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/utils/test_openlineage.py +0 -0
{apache_airflow_providers_databricks-7.7.2 → apache_airflow_providers_databricks-7.7.3rc1}/PKG-INFO
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-databricks
|
|
3
|
-
Version: 7.7.
|
|
3
|
+
Version: 7.7.3rc1
|
|
4
4
|
Summary: Provider package apache-airflow-providers-databricks for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,databricks,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -20,9 +20,9 @@ Classifier: Programming Language :: Python :: 3.11
|
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.12
|
|
21
21
|
Classifier: Programming Language :: Python :: 3.13
|
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
|
23
|
-
Requires-Dist: apache-airflow>=2.10.
|
|
24
|
-
Requires-Dist: apache-airflow-providers-common-compat>=1.6.
|
|
25
|
-
Requires-Dist: apache-airflow-providers-common-sql>=1.27.
|
|
23
|
+
Requires-Dist: apache-airflow>=2.10.0rc1
|
|
24
|
+
Requires-Dist: apache-airflow-providers-common-compat>=1.6.0rc1
|
|
25
|
+
Requires-Dist: apache-airflow-providers-common-sql>=1.27.0rc1
|
|
26
26
|
Requires-Dist: requests>=2.32.0,<3
|
|
27
27
|
Requires-Dist: databricks-sql-connector>=4.0.0
|
|
28
28
|
Requires-Dist: databricks-sqlalchemy>=1.0.2
|
|
@@ -33,13 +33,13 @@ Requires-Dist: pandas>=2.2.3; python_version >="3.13"
|
|
|
33
33
|
Requires-Dist: pyarrow>=16.1.0; python_version < '3.13'
|
|
34
34
|
Requires-Dist: pyarrow>=18.0.0; python_version >= '3.13'
|
|
35
35
|
Requires-Dist: azure-identity>=1.3.1 ; extra == "azure-identity"
|
|
36
|
-
Requires-Dist: apache-airflow-providers-fab>=2.2.
|
|
37
|
-
Requires-Dist: apache-airflow-providers-openlineage>=2.3.
|
|
36
|
+
Requires-Dist: apache-airflow-providers-fab>=2.2.0rc1 ; extra == "fab" and ( python_version < '3.13')
|
|
37
|
+
Requires-Dist: apache-airflow-providers-openlineage>=2.3.0rc1 ; extra == "openlineage"
|
|
38
38
|
Requires-Dist: databricks-sdk==0.10.0 ; extra == "sdk"
|
|
39
39
|
Requires-Dist: apache-airflow-providers-standard ; extra == "standard"
|
|
40
40
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
41
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.7.
|
|
42
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.7.
|
|
41
|
+
Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.7.3/changelog.html
|
|
42
|
+
Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.7.3
|
|
43
43
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
44
44
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
45
45
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
@@ -75,7 +75,7 @@ Provides-Extra: standard
|
|
|
75
75
|
|
|
76
76
|
Package ``apache-airflow-providers-databricks``
|
|
77
77
|
|
|
78
|
-
Release: ``7.7.
|
|
78
|
+
Release: ``7.7.3``
|
|
79
79
|
|
|
80
80
|
|
|
81
81
|
`Databricks <https://databricks.com/>`__
|
|
@@ -88,7 +88,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
|
|
|
88
88
|
are in ``airflow.providers.databricks`` python package.
|
|
89
89
|
|
|
90
90
|
You can find package information and changelog for the provider
|
|
91
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.7.
|
|
91
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.7.3/>`_.
|
|
92
92
|
|
|
93
93
|
Installation
|
|
94
94
|
------------
|
|
@@ -141,6 +141,19 @@ Dependent package
|
|
|
141
141
|
`apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
|
|
142
142
|
================================================================================================================== =================
|
|
143
143
|
|
|
144
|
+
Optional dependencies
|
|
145
|
+
----------------------
|
|
146
|
+
|
|
147
|
+
================== ================================================================
|
|
148
|
+
Extra Dependencies
|
|
149
|
+
================== ================================================================
|
|
150
|
+
``sdk`` ``databricks-sdk==0.10.0``
|
|
151
|
+
``azure-identity`` ``azure-identity>=1.3.1``
|
|
152
|
+
``fab`` ``apache-airflow-providers-fab>=2.2.0; python_version < '3.13'``
|
|
153
|
+
``standard`` ``apache-airflow-providers-standard``
|
|
154
|
+
``openlineage`` ``apache-airflow-providers-openlineage>=2.3.0``
|
|
155
|
+
================== ================================================================
|
|
156
|
+
|
|
144
157
|
The changelog for the provider package can be found in the
|
|
145
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.7.
|
|
158
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.7.3/changelog.html>`_.
|
|
146
159
|
|
|
@@ -23,7 +23,7 @@
|
|
|
23
23
|
|
|
24
24
|
Package ``apache-airflow-providers-databricks``
|
|
25
25
|
|
|
26
|
-
Release: ``7.7.
|
|
26
|
+
Release: ``7.7.3``
|
|
27
27
|
|
|
28
28
|
|
|
29
29
|
`Databricks <https://databricks.com/>`__
|
|
@@ -36,7 +36,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
|
|
|
36
36
|
are in ``airflow.providers.databricks`` python package.
|
|
37
37
|
|
|
38
38
|
You can find package information and changelog for the provider
|
|
39
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.7.
|
|
39
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.7.3/>`_.
|
|
40
40
|
|
|
41
41
|
Installation
|
|
42
42
|
------------
|
|
@@ -89,5 +89,18 @@ Dependent package
|
|
|
89
89
|
`apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
|
|
90
90
|
================================================================================================================== =================
|
|
91
91
|
|
|
92
|
+
Optional dependencies
|
|
93
|
+
----------------------
|
|
94
|
+
|
|
95
|
+
================== ================================================================
|
|
96
|
+
Extra Dependencies
|
|
97
|
+
================== ================================================================
|
|
98
|
+
``sdk`` ``databricks-sdk==0.10.0``
|
|
99
|
+
``azure-identity`` ``azure-identity>=1.3.1``
|
|
100
|
+
``fab`` ``apache-airflow-providers-fab>=2.2.0; python_version < '3.13'``
|
|
101
|
+
``standard`` ``apache-airflow-providers-standard``
|
|
102
|
+
``openlineage`` ``apache-airflow-providers-openlineage>=2.3.0``
|
|
103
|
+
================== ================================================================
|
|
104
|
+
|
|
92
105
|
The changelog for the provider package can be found in the
|
|
93
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.7.
|
|
106
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.7.3/changelog.html>`_.
|
|
@@ -26,6 +26,25 @@
|
|
|
26
26
|
Changelog
|
|
27
27
|
---------
|
|
28
28
|
|
|
29
|
+
7.7.3
|
|
30
|
+
.....
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
Release Date: ``|PypiReleaseDate|``
|
|
34
|
+
|
|
35
|
+
Bug Fixes
|
|
36
|
+
~~~~~~~~~
|
|
37
|
+
|
|
38
|
+
* ``Fix metadata service check handle 429 (#55462)``
|
|
39
|
+
|
|
40
|
+
Misc
|
|
41
|
+
~~~~
|
|
42
|
+
|
|
43
|
+
* ``Switch all airflow logging to structlog (#52651)``
|
|
44
|
+
|
|
45
|
+
.. Below changes are excluded from the changelog. Move them to
|
|
46
|
+
appropriate section above if needed. Do not delete the lines(!):
|
|
47
|
+
|
|
29
48
|
7.7.2
|
|
30
49
|
.....
|
|
31
50
|
|
|
@@ -78,7 +78,7 @@ apache-airflow-providers-databricks package
|
|
|
78
78
|
`Databricks <https://databricks.com/>`__
|
|
79
79
|
|
|
80
80
|
|
|
81
|
-
Release: 7.7.
|
|
81
|
+
Release: 7.7.3
|
|
82
82
|
|
|
83
83
|
Provider package
|
|
84
84
|
----------------
|
|
@@ -143,5 +143,5 @@ Downloading official packages
|
|
|
143
143
|
You can download officially released packages and verify their checksums and signatures from the
|
|
144
144
|
`Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
|
|
145
145
|
|
|
146
|
-
* `The apache-airflow-providers-databricks 7.7.
|
|
147
|
-
* `The apache-airflow-providers-databricks 7.7.
|
|
146
|
+
* `The apache-airflow-providers-databricks 7.7.3 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.7.3.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.7.3.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.7.3.tar.gz.sha512>`__)
|
|
147
|
+
* `The apache-airflow-providers-databricks 7.7.3 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.7.3-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.7.3-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.7.3-py3-none-any.whl.sha512>`__)
|
|
@@ -22,12 +22,13 @@ description: |
|
|
|
22
22
|
`Databricks <https://databricks.com/>`__
|
|
23
23
|
|
|
24
24
|
state: ready
|
|
25
|
-
source-date-epoch:
|
|
25
|
+
source-date-epoch: 1757950169
|
|
26
26
|
# Note that those versions are maintained by release manager - do not update them manually
|
|
27
27
|
# with the exception of case where other provider in sources has >= new provider version.
|
|
28
28
|
# In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
|
|
29
29
|
# to be done in the same PR
|
|
30
30
|
versions:
|
|
31
|
+
- 7.7.3
|
|
31
32
|
- 7.7.2
|
|
32
33
|
- 7.7.1
|
|
33
34
|
- 7.7.0
|
|
@@ -25,7 +25,7 @@ build-backend = "flit_core.buildapi"
|
|
|
25
25
|
|
|
26
26
|
[project]
|
|
27
27
|
name = "apache-airflow-providers-databricks"
|
|
28
|
-
version = "7.7.
|
|
28
|
+
version = "7.7.3rc1"
|
|
29
29
|
description = "Provider package apache-airflow-providers-databricks for Apache Airflow"
|
|
30
30
|
readme = "README.rst"
|
|
31
31
|
authors = [
|
|
@@ -57,9 +57,9 @@ requires-python = ">=3.10"
|
|
|
57
57
|
# Make sure to run ``prek update-providers-dependencies --all-files``
|
|
58
58
|
# After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
|
|
59
59
|
dependencies = [
|
|
60
|
-
"apache-airflow>=2.10.
|
|
61
|
-
"apache-airflow-providers-common-compat>=1.6.
|
|
62
|
-
"apache-airflow-providers-common-sql>=1.27.
|
|
60
|
+
"apache-airflow>=2.10.0rc1",
|
|
61
|
+
"apache-airflow-providers-common-compat>=1.6.0rc1",
|
|
62
|
+
"apache-airflow-providers-common-sql>=1.27.0rc1",
|
|
63
63
|
"requests>=2.32.0,<3",
|
|
64
64
|
"databricks-sql-connector>=4.0.0",
|
|
65
65
|
"databricks-sqlalchemy>=1.0.2",
|
|
@@ -82,13 +82,13 @@ dependencies = [
|
|
|
82
82
|
"azure-identity>=1.3.1",
|
|
83
83
|
]
|
|
84
84
|
"fab" = [
|
|
85
|
-
"apache-airflow-providers-fab>=2.2.
|
|
85
|
+
"apache-airflow-providers-fab>=2.2.0rc1; python_version < '3.13'"
|
|
86
86
|
]
|
|
87
87
|
"standard" = [
|
|
88
88
|
"apache-airflow-providers-standard"
|
|
89
89
|
]
|
|
90
90
|
"openlineage" = [
|
|
91
|
-
"apache-airflow-providers-openlineage>=2.3.
|
|
91
|
+
"apache-airflow-providers-openlineage>=2.3.0rc1"
|
|
92
92
|
]
|
|
93
93
|
|
|
94
94
|
[dependency-groups]
|
|
@@ -133,8 +133,8 @@ apache-airflow-providers-common-sql = {workspace = true}
|
|
|
133
133
|
apache-airflow-providers-standard = {workspace = true}
|
|
134
134
|
|
|
135
135
|
[project.urls]
|
|
136
|
-
"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.7.
|
|
137
|
-
"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.7.
|
|
136
|
+
"Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.7.3"
|
|
137
|
+
"Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.7.3/changelog.html"
|
|
138
138
|
"Bug Tracker" = "https://github.com/apache/airflow/issues"
|
|
139
139
|
"Source Code" = "https://github.com/apache/airflow"
|
|
140
140
|
"Slack Chat" = "https://s.apache.org/airflow-slack"
|
|
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "7.7.
|
|
32
|
+
__version__ = "7.7.3"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
35
|
"2.10.0"
|
|
@@ -121,6 +121,9 @@ class BaseDatabricksHook(BaseHook):
|
|
|
121
121
|
self.oauth_tokens: dict[str, dict] = {}
|
|
122
122
|
self.token_timeout_seconds = 10
|
|
123
123
|
self.caller = caller
|
|
124
|
+
self._metadata_cache: dict[str, Any] = {}
|
|
125
|
+
self._metadata_expiry: float = 0
|
|
126
|
+
self._metadata_ttl: int = 300
|
|
124
127
|
|
|
125
128
|
def my_after_func(retry_state):
|
|
126
129
|
self._log_request_error(retry_state.attempt_number, retry_state.outcome)
|
|
@@ -515,43 +518,64 @@ class BaseDatabricksHook(BaseHook):
|
|
|
515
518
|
|
|
516
519
|
return int(token[time_key]) > (int(time.time()) + TOKEN_REFRESH_LEAD_TIME)
|
|
517
520
|
|
|
518
|
-
|
|
519
|
-
def _check_azure_metadata_service() -> None:
|
|
521
|
+
def _check_azure_metadata_service(self) -> None:
|
|
520
522
|
"""
|
|
521
|
-
Check for Azure Metadata Service.
|
|
523
|
+
Check for Azure Metadata Service (with caching).
|
|
522
524
|
|
|
523
525
|
https://docs.microsoft.com/en-us/azure/virtual-machines/linux/instance-metadata-service
|
|
524
526
|
"""
|
|
527
|
+
if self._metadata_cache and time.time() < self._metadata_expiry:
|
|
528
|
+
return
|
|
525
529
|
try:
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
530
|
+
for attempt in self._get_retry_object():
|
|
531
|
+
with attempt:
|
|
532
|
+
response = requests.get(
|
|
533
|
+
AZURE_METADATA_SERVICE_INSTANCE_URL,
|
|
534
|
+
params={"api-version": "2021-02-01"},
|
|
535
|
+
headers={"Metadata": "true"},
|
|
536
|
+
timeout=2,
|
|
537
|
+
)
|
|
538
|
+
response.raise_for_status()
|
|
539
|
+
response_json = response.json()
|
|
540
|
+
|
|
541
|
+
self._validate_azure_metadata_service(response_json)
|
|
542
|
+
self._metadata_cache = response_json
|
|
543
|
+
self._metadata_expiry = time.time() + self._metadata_ttl
|
|
544
|
+
break
|
|
545
|
+
except RetryError:
|
|
546
|
+
raise ConnectionError(f"Failed to reach Azure Metadata Service after {self.retry_limit} retries.")
|
|
536
547
|
except (requests_exceptions.RequestException, ValueError) as e:
|
|
537
|
-
raise
|
|
548
|
+
raise ConnectionError(f"Can't reach Azure Metadata Service: {e}")
|
|
538
549
|
|
|
539
550
|
async def _a_check_azure_metadata_service(self):
|
|
540
551
|
"""Async version of `_check_azure_metadata_service()`."""
|
|
552
|
+
if self._metadata_cache and time.time() < self._metadata_expiry:
|
|
553
|
+
return
|
|
541
554
|
try:
|
|
542
|
-
async
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
+
async for attempt in self._a_get_retry_object():
|
|
556
|
+
with attempt:
|
|
557
|
+
async with self._session.get(
|
|
558
|
+
url=AZURE_METADATA_SERVICE_INSTANCE_URL,
|
|
559
|
+
params={"api-version": "2021-02-01"},
|
|
560
|
+
headers={"Metadata": "true"},
|
|
561
|
+
timeout=2,
|
|
562
|
+
) as resp:
|
|
563
|
+
resp.raise_for_status()
|
|
564
|
+
response_json = await resp.json()
|
|
565
|
+
self._validate_azure_metadata_service(response_json)
|
|
566
|
+
self._metadata_cache = response_json
|
|
567
|
+
self._metadata_expiry = time.time() + self._metadata_ttl
|
|
568
|
+
break
|
|
569
|
+
except RetryError:
|
|
570
|
+
raise ConnectionError(f"Failed to reach Azure Metadata Service after {self.retry_limit} retries.")
|
|
571
|
+
except (aiohttp.ClientError, ValueError) as e:
|
|
572
|
+
raise ConnectionError(f"Can't reach Azure Metadata Service: {e}")
|
|
573
|
+
|
|
574
|
+
def _validate_azure_metadata_service(self, response_json: dict) -> None:
|
|
575
|
+
if "compute" not in response_json or "azEnvironment" not in response_json["compute"]:
|
|
576
|
+
raise ValueError(
|
|
577
|
+
f"Was able to fetch some metadata, but it doesn't look like Azure Metadata: {response_json}"
|
|
578
|
+
)
|
|
555
579
|
|
|
556
580
|
def _get_token(self, raise_error: bool = False) -> str | None:
|
|
557
581
|
if "token" in self.databricks_conn.extra_dejson:
|
|
@@ -24,7 +24,6 @@ import time
|
|
|
24
24
|
from abc import ABC, abstractmethod
|
|
25
25
|
from collections.abc import Sequence
|
|
26
26
|
from functools import cached_property
|
|
27
|
-
from logging import Logger
|
|
28
27
|
from typing import TYPE_CHECKING, Any
|
|
29
28
|
|
|
30
29
|
from airflow.configuration import conf
|
|
@@ -60,12 +59,8 @@ if TYPE_CHECKING:
|
|
|
60
59
|
DatabricksWorkflowTaskGroup,
|
|
61
60
|
)
|
|
62
61
|
from airflow.providers.openlineage.extractors import OperatorLineage
|
|
63
|
-
from airflow.
|
|
64
|
-
|
|
65
|
-
try:
|
|
66
|
-
from airflow.sdk import TaskGroup
|
|
67
|
-
except ImportError:
|
|
68
|
-
from airflow.utils.task_group import TaskGroup # type: ignore[no-redef]
|
|
62
|
+
from airflow.sdk import TaskGroup
|
|
63
|
+
from airflow.sdk.types import Context, Logger
|
|
69
64
|
|
|
70
65
|
if AIRFLOW_V_3_0_PLUS:
|
|
71
66
|
from airflow.sdk import BaseOperatorLink
|
|
@@ -17,7 +17,6 @@
|
|
|
17
17
|
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
|
-
import logging
|
|
21
20
|
import os
|
|
22
21
|
from typing import TYPE_CHECKING, Any
|
|
23
22
|
from urllib.parse import unquote
|
|
@@ -45,6 +44,7 @@ if TYPE_CHECKING:
|
|
|
45
44
|
|
|
46
45
|
from airflow.models import BaseOperator
|
|
47
46
|
from airflow.providers.databricks.operators.databricks import DatabricksTaskBaseOperator
|
|
47
|
+
from airflow.sdk.types import Logger
|
|
48
48
|
from airflow.utils.context import Context
|
|
49
49
|
|
|
50
50
|
|
|
@@ -62,7 +62,7 @@ def get_auth_decorator():
|
|
|
62
62
|
|
|
63
63
|
|
|
64
64
|
def get_databricks_task_ids(
|
|
65
|
-
group_id: str, task_map: dict[str, DatabricksTaskBaseOperator], log:
|
|
65
|
+
group_id: str, task_map: dict[str, DatabricksTaskBaseOperator], log: Logger
|
|
66
66
|
) -> list[str]:
|
|
67
67
|
"""
|
|
68
68
|
Return a list of all Databricks task IDs for a dictionary of Airflow tasks.
|
|
@@ -112,7 +112,7 @@ if not AIRFLOW_V_3_0_PLUS:
|
|
|
112
112
|
|
|
113
113
|
@provide_session
|
|
114
114
|
def _clear_task_instances(
|
|
115
|
-
dag_id: str, run_id: str, task_ids: list[str], log:
|
|
115
|
+
dag_id: str, run_id: str, task_ids: list[str], log: Logger, session: Session = NEW_SESSION
|
|
116
116
|
) -> None:
|
|
117
117
|
dag = _get_dag(dag_id, session=session)
|
|
118
118
|
log.debug("task_ids %s to clear", str(task_ids))
|
|
@@ -145,7 +145,7 @@ def _repair_task(
|
|
|
145
145
|
databricks_conn_id: str,
|
|
146
146
|
databricks_run_id: int,
|
|
147
147
|
tasks_to_repair: list[str],
|
|
148
|
-
logger:
|
|
148
|
+
logger: Logger,
|
|
149
149
|
) -> int:
|
|
150
150
|
"""
|
|
151
151
|
Repair a Databricks task using the Databricks API.
|
|
@@ -294,7 +294,7 @@ class WorkflowJobRunLink(BaseOperatorLink, LoggingMixin):
|
|
|
294
294
|
def store_databricks_job_run_link(
|
|
295
295
|
context: Context,
|
|
296
296
|
metadata: Any,
|
|
297
|
-
logger:
|
|
297
|
+
logger: Logger,
|
|
298
298
|
) -> None:
|
|
299
299
|
"""
|
|
300
300
|
Store the Databricks job run link in XCom during task execution.
|
|
@@ -368,7 +368,7 @@ class WorkflowJobRepairAllFailedLink(BaseOperatorLink, LoggingMixin):
|
|
|
368
368
|
children[child_id] = child
|
|
369
369
|
return children
|
|
370
370
|
|
|
371
|
-
def get_tasks_to_run(self, ti_key: TaskInstanceKey, operator: BaseOperator, log:
|
|
371
|
+
def get_tasks_to_run(self, ti_key: TaskInstanceKey, operator: BaseOperator, log: Logger) -> str:
|
|
372
372
|
task_group = operator.task_group
|
|
373
373
|
if not task_group:
|
|
374
374
|
raise AirflowException("Task group is required for generating repair link.")
|
|
@@ -1449,7 +1449,6 @@ class TestDatabricksHookConnSettings(TestDatabricksHookToken):
|
|
|
1449
1449
|
@pytest.mark.asyncio
|
|
1450
1450
|
@mock.patch("airflow.providers.databricks.hooks.databricks_base.aiohttp.ClientSession.get")
|
|
1451
1451
|
async def test_async_do_api_call_only_existing_response_properties_are_read(self, mock_get):
|
|
1452
|
-
self.hook.log.setLevel("DEBUG")
|
|
1453
1452
|
response = mock_get.return_value.__aenter__.return_value
|
|
1454
1453
|
response.mock_add_spec(aiohttp.ClientResponse, spec_set=True)
|
|
1455
1454
|
response.json = AsyncMock(return_value={"bar": "baz"})
|
|
@@ -26,7 +26,7 @@ import time_machine
|
|
|
26
26
|
from aiohttp.client_exceptions import ClientConnectorError
|
|
27
27
|
from requests import exceptions as requests_exceptions
|
|
28
28
|
from requests.auth import HTTPBasicAuth
|
|
29
|
-
from tenacity import Future, RetryError
|
|
29
|
+
from tenacity import AsyncRetrying, Future, RetryError, retry_if_exception, stop_after_attempt, wait_fixed
|
|
30
30
|
|
|
31
31
|
from airflow.exceptions import AirflowException
|
|
32
32
|
from airflow.models import Connection
|
|
@@ -768,3 +768,145 @@ class TestBaseDatabricksHook:
|
|
|
768
768
|
exception.response = mock_response
|
|
769
769
|
hook = BaseDatabricksHook()
|
|
770
770
|
assert hook._get_error_code(exception) == "INVALID_REQUEST"
|
|
771
|
+
|
|
772
|
+
@mock.patch("requests.get")
|
|
773
|
+
@time_machine.travel("2025-07-12 12:00:00")
|
|
774
|
+
def test_check_azure_metadata_service_normal(self, mock_get):
|
|
775
|
+
travel_time = int(datetime(2025, 7, 12, 12, 0, 0).timestamp())
|
|
776
|
+
hook = BaseDatabricksHook()
|
|
777
|
+
mock_response = {"compute": {"azEnvironment": "AzurePublicCloud"}}
|
|
778
|
+
mock_get.return_value.json.return_value = mock_response
|
|
779
|
+
|
|
780
|
+
hook._check_azure_metadata_service()
|
|
781
|
+
|
|
782
|
+
assert hook._metadata_cache == mock_response
|
|
783
|
+
assert int(hook._metadata_expiry) == travel_time + hook._metadata_ttl
|
|
784
|
+
|
|
785
|
+
@mock.patch("requests.get")
|
|
786
|
+
@time_machine.travel("2025-07-12 12:00:00")
|
|
787
|
+
def test_check_azure_metadata_service_cached(self, mock_get):
|
|
788
|
+
travel_time = int(datetime(2025, 7, 12, 12, 0, 0).timestamp())
|
|
789
|
+
hook = BaseDatabricksHook()
|
|
790
|
+
mock_response = {"compute": {"azEnvironment": "AzurePublicCloud"}}
|
|
791
|
+
hook._metadata_cache = mock_response
|
|
792
|
+
hook._metadata_expiry = travel_time + 1000
|
|
793
|
+
|
|
794
|
+
hook._check_azure_metadata_service()
|
|
795
|
+
mock_get.assert_not_called()
|
|
796
|
+
|
|
797
|
+
@mock.patch("requests.get")
|
|
798
|
+
def test_check_azure_metadata_service_http_error(self, mock_get):
|
|
799
|
+
hook = BaseDatabricksHook()
|
|
800
|
+
mock_get.side_effect = requests_exceptions.RequestException("Fail")
|
|
801
|
+
|
|
802
|
+
with pytest.raises(ConnectionError, match="Can't reach Azure Metadata Service"):
|
|
803
|
+
hook._check_azure_metadata_service()
|
|
804
|
+
assert hook._metadata_cache == {}
|
|
805
|
+
assert hook._metadata_expiry == 0
|
|
806
|
+
|
|
807
|
+
@mock.patch("requests.get")
|
|
808
|
+
def test_check_azure_metadata_service_retry_error(self, mock_get):
|
|
809
|
+
hook = BaseDatabricksHook()
|
|
810
|
+
|
|
811
|
+
resp_429 = mock.Mock()
|
|
812
|
+
resp_429.status_code = 429
|
|
813
|
+
resp_429.content = b"Too many requests"
|
|
814
|
+
http_error = requests_exceptions.HTTPError(response=resp_429)
|
|
815
|
+
mock_get.side_effect = http_error
|
|
816
|
+
|
|
817
|
+
with pytest.raises(ConnectionError, match="Failed to reach Azure Metadata Service after 3 retries."):
|
|
818
|
+
hook._check_azure_metadata_service()
|
|
819
|
+
assert mock_get.call_count == 3
|
|
820
|
+
|
|
821
|
+
@pytest.mark.asyncio
|
|
822
|
+
@mock.patch("aiohttp.ClientSession.get")
|
|
823
|
+
async def test_a_check_azure_metadata_service_normal(self, mock_get):
|
|
824
|
+
hook = BaseDatabricksHook()
|
|
825
|
+
|
|
826
|
+
async_mock = mock.AsyncMock()
|
|
827
|
+
async_mock.__aenter__.return_value = async_mock
|
|
828
|
+
async_mock.__aexit__.return_value = None
|
|
829
|
+
async_mock.json.return_value = {"compute": {"azEnvironment": "AzurePublicCloud"}}
|
|
830
|
+
|
|
831
|
+
mock_get.return_value = async_mock
|
|
832
|
+
|
|
833
|
+
async with aiohttp.ClientSession() as session:
|
|
834
|
+
hook._session = session
|
|
835
|
+
mock_attempt = mock.Mock()
|
|
836
|
+
mock_attempt.__enter__ = mock.Mock(return_value=None)
|
|
837
|
+
mock_attempt.__exit__ = mock.Mock(return_value=None)
|
|
838
|
+
|
|
839
|
+
async def mock_retry_generator():
|
|
840
|
+
yield mock_attempt
|
|
841
|
+
|
|
842
|
+
hook._a_get_retry_object = mock.Mock(return_value=mock_retry_generator())
|
|
843
|
+
await hook._a_check_azure_metadata_service()
|
|
844
|
+
|
|
845
|
+
assert hook._metadata_cache["compute"]["azEnvironment"] == "AzurePublicCloud"
|
|
846
|
+
assert hook._metadata_expiry > 0
|
|
847
|
+
|
|
848
|
+
@pytest.mark.asyncio
|
|
849
|
+
@mock.patch("aiohttp.ClientSession.get")
|
|
850
|
+
@time_machine.travel("2025-07-12 12:00:00")
|
|
851
|
+
async def test_a_check_azure_metadata_service_cached(self, mock_get):
|
|
852
|
+
travel_time = int(datetime(2025, 7, 12, 12, 0, 0).timestamp())
|
|
853
|
+
hook = BaseDatabricksHook()
|
|
854
|
+
hook._metadata_cache = {"compute": {"azEnvironment": "AzurePublicCloud"}}
|
|
855
|
+
hook._metadata_expiry = travel_time + 1000
|
|
856
|
+
|
|
857
|
+
async with aiohttp.ClientSession() as session:
|
|
858
|
+
hook._session = session
|
|
859
|
+
await hook._a_check_azure_metadata_service()
|
|
860
|
+
mock_get.assert_not_called()
|
|
861
|
+
|
|
862
|
+
@pytest.mark.asyncio
|
|
863
|
+
@mock.patch("aiohttp.ClientSession.get")
|
|
864
|
+
async def test_a_check_azure_metadata_service_http_error(self, mock_get):
|
|
865
|
+
hook = BaseDatabricksHook()
|
|
866
|
+
|
|
867
|
+
async_mock = mock.AsyncMock()
|
|
868
|
+
async_mock.__aenter__.side_effect = aiohttp.ClientError("Fail")
|
|
869
|
+
async_mock.__aexit__.return_value = None
|
|
870
|
+
mock_get.return_value = async_mock
|
|
871
|
+
|
|
872
|
+
async with aiohttp.ClientSession() as session:
|
|
873
|
+
hook._session = session
|
|
874
|
+
mock_attempt = mock.Mock()
|
|
875
|
+
mock_attempt.__enter__ = mock.Mock(return_value=None)
|
|
876
|
+
mock_attempt.__exit__ = mock.Mock(return_value=None)
|
|
877
|
+
|
|
878
|
+
async def mock_retry_generator():
|
|
879
|
+
yield mock_attempt
|
|
880
|
+
|
|
881
|
+
hook._a_get_retry_object = mock.Mock(return_value=mock_retry_generator())
|
|
882
|
+
|
|
883
|
+
with pytest.raises(ConnectionError, match="Can't reach Azure Metadata Service"):
|
|
884
|
+
await hook._a_check_azure_metadata_service()
|
|
885
|
+
assert hook._metadata_cache == {}
|
|
886
|
+
assert hook._metadata_expiry == 0
|
|
887
|
+
|
|
888
|
+
@pytest.mark.asyncio
|
|
889
|
+
@mock.patch("aiohttp.ClientSession.get")
|
|
890
|
+
async def test_a_check_azure_metadata_service_retry_error(self, mock_get):
|
|
891
|
+
hook = BaseDatabricksHook()
|
|
892
|
+
|
|
893
|
+
mock_get.side_effect = aiohttp.ClientResponseError(
|
|
894
|
+
request_info=mock.Mock(), history=(), status=429, message="429 Too Many Requests"
|
|
895
|
+
)
|
|
896
|
+
|
|
897
|
+
async with aiohttp.ClientSession() as session:
|
|
898
|
+
hook._session = session
|
|
899
|
+
|
|
900
|
+
hook._a_get_retry_object = lambda: AsyncRetrying(
|
|
901
|
+
stop=stop_after_attempt(hook.retry_limit),
|
|
902
|
+
wait=wait_fixed(0),
|
|
903
|
+
retry=retry_if_exception(hook._retryable_error),
|
|
904
|
+
)
|
|
905
|
+
|
|
906
|
+
hook._validate_azure_metadata_service = mock.Mock()
|
|
907
|
+
|
|
908
|
+
with pytest.raises(
|
|
909
|
+
ConnectionError, match="Failed to reach Azure Metadata Service after 3 retries."
|
|
910
|
+
):
|
|
911
|
+
await hook._a_check_azure_metadata_service()
|
|
912
|
+
assert mock_get.call_count == 3
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|