apache-airflow-providers-databricks 7.7.2rc1__tar.gz → 7.7.3rc1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-databricks might be problematic. Click here for more details.

Files changed (95) hide show
  1. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/PKG-INFO +19 -6
  2. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/README.rst +16 -3
  3. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/changelog.rst +19 -0
  4. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/index.rst +3 -3
  5. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/provider.yaml +2 -1
  6. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/pyproject.toml +3 -3
  7. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/__init__.py +1 -1
  8. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/hooks/databricks_base.py +51 -27
  9. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/operators/databricks.py +2 -7
  10. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/plugins/databricks_workflow.py +6 -6
  11. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/hooks/test_databricks.py +0 -1
  12. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/hooks/test_databricks_base.py +143 -1
  13. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/.latest-doc-only-change.txt +0 -0
  14. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/commits.rst +0 -0
  15. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/conf.py +0 -0
  16. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/connections/databricks.rst +0 -0
  17. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/img/databricks_workflow_task_group_airflow_graph_view.png +0 -0
  18. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/img/workflow_plugin_launch_task.png +0 -0
  19. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/img/workflow_plugin_single_task.png +0 -0
  20. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/img/workflow_run_databricks_graph_view.png +0 -0
  21. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/installing-providers-from-sources.rst +0 -0
  22. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/integration-logos/Databricks.png +0 -0
  23. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/operators/copy_into.rst +0 -0
  24. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/operators/index.rst +0 -0
  25. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/operators/jobs_create.rst +0 -0
  26. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/operators/notebook.rst +0 -0
  27. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/operators/repos_create.rst +0 -0
  28. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/operators/repos_delete.rst +0 -0
  29. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/operators/repos_update.rst +0 -0
  30. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/operators/run_now.rst +0 -0
  31. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/operators/sql.rst +0 -0
  32. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/operators/sql_statements.rst +0 -0
  33. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/operators/submit_run.rst +0 -0
  34. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/operators/task.rst +0 -0
  35. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/operators/workflow.rst +0 -0
  36. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/plugins/index.rst +0 -0
  37. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/plugins/workflow.rst +0 -0
  38. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/docs/security.rst +0 -0
  39. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/__init__.py +0 -0
  40. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/__init__.py +0 -0
  41. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/LICENSE +0 -0
  42. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/exceptions.py +0 -0
  43. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/get_provider_info.py +0 -0
  44. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/hooks/__init__.py +0 -0
  45. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/hooks/databricks.py +0 -0
  46. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/hooks/databricks_sql.py +0 -0
  47. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/operators/__init__.py +0 -0
  48. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/operators/databricks_repos.py +0 -0
  49. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/operators/databricks_sql.py +0 -0
  50. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/operators/databricks_workflow.py +0 -0
  51. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/plugins/__init__.py +0 -0
  52. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/sensors/__init__.py +0 -0
  53. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/sensors/databricks.py +0 -0
  54. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/sensors/databricks_partition.py +0 -0
  55. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/sensors/databricks_sql.py +0 -0
  56. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/triggers/__init__.py +0 -0
  57. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/triggers/databricks.py +0 -0
  58. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/utils/__init__.py +0 -0
  59. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/utils/databricks.py +0 -0
  60. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/utils/mixins.py +0 -0
  61. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/utils/openlineage.py +0 -0
  62. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/src/airflow/providers/databricks/version_compat.py +0 -0
  63. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/conftest.py +0 -0
  64. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/system/__init__.py +0 -0
  65. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/system/databricks/__init__.py +0 -0
  66. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/system/databricks/example_databricks.py +0 -0
  67. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/system/databricks/example_databricks_repos.py +0 -0
  68. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/system/databricks/example_databricks_sensors.py +0 -0
  69. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/system/databricks/example_databricks_sql.py +0 -0
  70. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/system/databricks/example_databricks_workflow.py +0 -0
  71. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/__init__.py +0 -0
  72. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/__init__.py +0 -0
  73. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/hooks/__init__.py +0 -0
  74. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/hooks/test_databricks_azure_workload_identity.py +0 -0
  75. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/hooks/test_databricks_azure_workload_identity_async.py +0 -0
  76. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/hooks/test_databricks_sql.py +0 -0
  77. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/operators/__init__.py +0 -0
  78. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/operators/test_databricks.py +0 -0
  79. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/operators/test_databricks_copy.py +0 -0
  80. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/operators/test_databricks_repos.py +0 -0
  81. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/operators/test_databricks_sql.py +0 -0
  82. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/operators/test_databricks_workflow.py +0 -0
  83. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/plugins/__init__.py +0 -0
  84. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/plugins/test_databricks_workflow.py +0 -0
  85. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/sensors/__init__.py +0 -0
  86. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/sensors/test_databricks.py +0 -0
  87. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/sensors/test_databricks_partition.py +0 -0
  88. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/sensors/test_databricks_sql.py +0 -0
  89. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/test_exceptions.py +0 -0
  90. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/triggers/__init__.py +0 -0
  91. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/triggers/test_databricks.py +0 -0
  92. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/utils/__init__.py +0 -0
  93. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/utils/test_databricks.py +0 -0
  94. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/utils/test_mixins.py +0 -0
  95. {apache_airflow_providers_databricks-7.7.2rc1 → apache_airflow_providers_databricks-7.7.3rc1}/tests/unit/databricks/utils/test_openlineage.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-databricks
3
- Version: 7.7.2rc1
3
+ Version: 7.7.3rc1
4
4
  Summary: Provider package apache-airflow-providers-databricks for Apache Airflow
5
5
  Keywords: airflow-provider,databricks,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -38,8 +38,8 @@ Requires-Dist: apache-airflow-providers-openlineage>=2.3.0rc1 ; extra == "openli
38
38
  Requires-Dist: databricks-sdk==0.10.0 ; extra == "sdk"
39
39
  Requires-Dist: apache-airflow-providers-standard ; extra == "standard"
40
40
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
41
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.7.2/changelog.html
42
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.7.2
41
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.7.3/changelog.html
42
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.7.3
43
43
  Project-URL: Mastodon, https://fosstodon.org/@airflow
44
44
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
45
45
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -75,7 +75,7 @@ Provides-Extra: standard
75
75
 
76
76
  Package ``apache-airflow-providers-databricks``
77
77
 
78
- Release: ``7.7.2``
78
+ Release: ``7.7.3``
79
79
 
80
80
 
81
81
  `Databricks <https://databricks.com/>`__
@@ -88,7 +88,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
88
88
  are in ``airflow.providers.databricks`` python package.
89
89
 
90
90
  You can find package information and changelog for the provider
91
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.7.2/>`_.
91
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.7.3/>`_.
92
92
 
93
93
  Installation
94
94
  ------------
@@ -141,6 +141,19 @@ Dependent package
141
141
  `apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
142
142
  ================================================================================================================== =================
143
143
 
144
+ Optional dependencies
145
+ ----------------------
146
+
147
+ ================== ================================================================
148
+ Extra Dependencies
149
+ ================== ================================================================
150
+ ``sdk`` ``databricks-sdk==0.10.0``
151
+ ``azure-identity`` ``azure-identity>=1.3.1``
152
+ ``fab`` ``apache-airflow-providers-fab>=2.2.0; python_version < '3.13'``
153
+ ``standard`` ``apache-airflow-providers-standard``
154
+ ``openlineage`` ``apache-airflow-providers-openlineage>=2.3.0``
155
+ ================== ================================================================
156
+
144
157
  The changelog for the provider package can be found in the
145
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.7.2/changelog.html>`_.
158
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.7.3/changelog.html>`_.
146
159
 
@@ -23,7 +23,7 @@
23
23
 
24
24
  Package ``apache-airflow-providers-databricks``
25
25
 
26
- Release: ``7.7.2``
26
+ Release: ``7.7.3``
27
27
 
28
28
 
29
29
  `Databricks <https://databricks.com/>`__
@@ -36,7 +36,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
36
36
  are in ``airflow.providers.databricks`` python package.
37
37
 
38
38
  You can find package information and changelog for the provider
39
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.7.2/>`_.
39
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.7.3/>`_.
40
40
 
41
41
  Installation
42
42
  ------------
@@ -89,5 +89,18 @@ Dependent package
89
89
  `apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
90
90
  ================================================================================================================== =================
91
91
 
92
+ Optional dependencies
93
+ ----------------------
94
+
95
+ ================== ================================================================
96
+ Extra Dependencies
97
+ ================== ================================================================
98
+ ``sdk`` ``databricks-sdk==0.10.0``
99
+ ``azure-identity`` ``azure-identity>=1.3.1``
100
+ ``fab`` ``apache-airflow-providers-fab>=2.2.0; python_version < '3.13'``
101
+ ``standard`` ``apache-airflow-providers-standard``
102
+ ``openlineage`` ``apache-airflow-providers-openlineage>=2.3.0``
103
+ ================== ================================================================
104
+
92
105
  The changelog for the provider package can be found in the
93
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.7.2/changelog.html>`_.
106
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.7.3/changelog.html>`_.
@@ -26,6 +26,25 @@
26
26
  Changelog
27
27
  ---------
28
28
 
29
+ 7.7.3
30
+ .....
31
+
32
+
33
+ Release Date: ``|PypiReleaseDate|``
34
+
35
+ Bug Fixes
36
+ ~~~~~~~~~
37
+
38
+ * ``Fix metadata service check handle 429 (#55462)``
39
+
40
+ Misc
41
+ ~~~~
42
+
43
+ * ``Switch all airflow logging to structlog (#52651)``
44
+
45
+ .. Below changes are excluded from the changelog. Move them to
46
+ appropriate section above if needed. Do not delete the lines(!):
47
+
29
48
  7.7.2
30
49
  .....
31
50
 
@@ -78,7 +78,7 @@ apache-airflow-providers-databricks package
78
78
  `Databricks <https://databricks.com/>`__
79
79
 
80
80
 
81
- Release: 7.7.2
81
+ Release: 7.7.3
82
82
 
83
83
  Provider package
84
84
  ----------------
@@ -143,5 +143,5 @@ Downloading official packages
143
143
  You can download officially released packages and verify their checksums and signatures from the
144
144
  `Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
145
145
 
146
- * `The apache-airflow-providers-databricks 7.7.2 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.7.2.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.7.2.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.7.2.tar.gz.sha512>`__)
147
- * `The apache-airflow-providers-databricks 7.7.2 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.7.2-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.7.2-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.7.2-py3-none-any.whl.sha512>`__)
146
+ * `The apache-airflow-providers-databricks 7.7.3 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.7.3.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.7.3.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.7.3.tar.gz.sha512>`__)
147
+ * `The apache-airflow-providers-databricks 7.7.3 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.7.3-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.7.3-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.7.3-py3-none-any.whl.sha512>`__)
@@ -22,12 +22,13 @@ description: |
22
22
  `Databricks <https://databricks.com/>`__
23
23
 
24
24
  state: ready
25
- source-date-epoch: 1756876778
25
+ source-date-epoch: 1757950169
26
26
  # Note that those versions are maintained by release manager - do not update them manually
27
27
  # with the exception of case where other provider in sources has >= new provider version.
28
28
  # In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
29
29
  # to be done in the same PR
30
30
  versions:
31
+ - 7.7.3
31
32
  - 7.7.2
32
33
  - 7.7.1
33
34
  - 7.7.0
@@ -25,7 +25,7 @@ build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-databricks"
28
- version = "7.7.2rc1"
28
+ version = "7.7.3rc1"
29
29
  description = "Provider package apache-airflow-providers-databricks for Apache Airflow"
30
30
  readme = "README.rst"
31
31
  authors = [
@@ -133,8 +133,8 @@ apache-airflow-providers-common-sql = {workspace = true}
133
133
  apache-airflow-providers-standard = {workspace = true}
134
134
 
135
135
  [project.urls]
136
- "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.7.2"
137
- "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.7.2/changelog.html"
136
+ "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.7.3"
137
+ "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.7.3/changelog.html"
138
138
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
139
139
  "Source Code" = "https://github.com/apache/airflow"
140
140
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "7.7.2"
32
+ __version__ = "7.7.3"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -121,6 +121,9 @@ class BaseDatabricksHook(BaseHook):
121
121
  self.oauth_tokens: dict[str, dict] = {}
122
122
  self.token_timeout_seconds = 10
123
123
  self.caller = caller
124
+ self._metadata_cache: dict[str, Any] = {}
125
+ self._metadata_expiry: float = 0
126
+ self._metadata_ttl: int = 300
124
127
 
125
128
  def my_after_func(retry_state):
126
129
  self._log_request_error(retry_state.attempt_number, retry_state.outcome)
@@ -515,43 +518,64 @@ class BaseDatabricksHook(BaseHook):
515
518
 
516
519
  return int(token[time_key]) > (int(time.time()) + TOKEN_REFRESH_LEAD_TIME)
517
520
 
518
- @staticmethod
519
- def _check_azure_metadata_service() -> None:
521
+ def _check_azure_metadata_service(self) -> None:
520
522
  """
521
- Check for Azure Metadata Service.
523
+ Check for Azure Metadata Service (with caching).
522
524
 
523
525
  https://docs.microsoft.com/en-us/azure/virtual-machines/linux/instance-metadata-service
524
526
  """
527
+ if self._metadata_cache and time.time() < self._metadata_expiry:
528
+ return
525
529
  try:
526
- jsn = requests.get(
527
- AZURE_METADATA_SERVICE_INSTANCE_URL,
528
- params={"api-version": "2021-02-01"},
529
- headers={"Metadata": "true"},
530
- timeout=2,
531
- ).json()
532
- if "compute" not in jsn or "azEnvironment" not in jsn["compute"]:
533
- raise AirflowException(
534
- f"Was able to fetch some metadata, but it doesn't look like Azure Metadata: {jsn}"
535
- )
530
+ for attempt in self._get_retry_object():
531
+ with attempt:
532
+ response = requests.get(
533
+ AZURE_METADATA_SERVICE_INSTANCE_URL,
534
+ params={"api-version": "2021-02-01"},
535
+ headers={"Metadata": "true"},
536
+ timeout=2,
537
+ )
538
+ response.raise_for_status()
539
+ response_json = response.json()
540
+
541
+ self._validate_azure_metadata_service(response_json)
542
+ self._metadata_cache = response_json
543
+ self._metadata_expiry = time.time() + self._metadata_ttl
544
+ break
545
+ except RetryError:
546
+ raise ConnectionError(f"Failed to reach Azure Metadata Service after {self.retry_limit} retries.")
536
547
  except (requests_exceptions.RequestException, ValueError) as e:
537
- raise AirflowException(f"Can't reach Azure Metadata Service: {e}")
548
+ raise ConnectionError(f"Can't reach Azure Metadata Service: {e}")
538
549
 
539
550
  async def _a_check_azure_metadata_service(self):
540
551
  """Async version of `_check_azure_metadata_service()`."""
552
+ if self._metadata_cache and time.time() < self._metadata_expiry:
553
+ return
541
554
  try:
542
- async with self._session.get(
543
- url=AZURE_METADATA_SERVICE_INSTANCE_URL,
544
- params={"api-version": "2021-02-01"},
545
- headers={"Metadata": "true"},
546
- timeout=2,
547
- ) as resp:
548
- jsn = await resp.json()
549
- if "compute" not in jsn or "azEnvironment" not in jsn["compute"]:
550
- raise AirflowException(
551
- f"Was able to fetch some metadata, but it doesn't look like Azure Metadata: {jsn}"
552
- )
553
- except (requests_exceptions.RequestException, ValueError) as e:
554
- raise AirflowException(f"Can't reach Azure Metadata Service: {e}")
555
+ async for attempt in self._a_get_retry_object():
556
+ with attempt:
557
+ async with self._session.get(
558
+ url=AZURE_METADATA_SERVICE_INSTANCE_URL,
559
+ params={"api-version": "2021-02-01"},
560
+ headers={"Metadata": "true"},
561
+ timeout=2,
562
+ ) as resp:
563
+ resp.raise_for_status()
564
+ response_json = await resp.json()
565
+ self._validate_azure_metadata_service(response_json)
566
+ self._metadata_cache = response_json
567
+ self._metadata_expiry = time.time() + self._metadata_ttl
568
+ break
569
+ except RetryError:
570
+ raise ConnectionError(f"Failed to reach Azure Metadata Service after {self.retry_limit} retries.")
571
+ except (aiohttp.ClientError, ValueError) as e:
572
+ raise ConnectionError(f"Can't reach Azure Metadata Service: {e}")
573
+
574
+ def _validate_azure_metadata_service(self, response_json: dict) -> None:
575
+ if "compute" not in response_json or "azEnvironment" not in response_json["compute"]:
576
+ raise ValueError(
577
+ f"Was able to fetch some metadata, but it doesn't look like Azure Metadata: {response_json}"
578
+ )
555
579
 
556
580
  def _get_token(self, raise_error: bool = False) -> str | None:
557
581
  if "token" in self.databricks_conn.extra_dejson:
@@ -24,7 +24,6 @@ import time
24
24
  from abc import ABC, abstractmethod
25
25
  from collections.abc import Sequence
26
26
  from functools import cached_property
27
- from logging import Logger
28
27
  from typing import TYPE_CHECKING, Any
29
28
 
30
29
  from airflow.configuration import conf
@@ -60,12 +59,8 @@ if TYPE_CHECKING:
60
59
  DatabricksWorkflowTaskGroup,
61
60
  )
62
61
  from airflow.providers.openlineage.extractors import OperatorLineage
63
- from airflow.utils.context import Context
64
-
65
- try:
66
- from airflow.sdk import TaskGroup
67
- except ImportError:
68
- from airflow.utils.task_group import TaskGroup # type: ignore[no-redef]
62
+ from airflow.sdk import TaskGroup
63
+ from airflow.sdk.types import Context, Logger
69
64
 
70
65
  if AIRFLOW_V_3_0_PLUS:
71
66
  from airflow.sdk import BaseOperatorLink
@@ -17,7 +17,6 @@
17
17
 
18
18
  from __future__ import annotations
19
19
 
20
- import logging
21
20
  import os
22
21
  from typing import TYPE_CHECKING, Any
23
22
  from urllib.parse import unquote
@@ -45,6 +44,7 @@ if TYPE_CHECKING:
45
44
 
46
45
  from airflow.models import BaseOperator
47
46
  from airflow.providers.databricks.operators.databricks import DatabricksTaskBaseOperator
47
+ from airflow.sdk.types import Logger
48
48
  from airflow.utils.context import Context
49
49
 
50
50
 
@@ -62,7 +62,7 @@ def get_auth_decorator():
62
62
 
63
63
 
64
64
  def get_databricks_task_ids(
65
- group_id: str, task_map: dict[str, DatabricksTaskBaseOperator], log: logging.Logger
65
+ group_id: str, task_map: dict[str, DatabricksTaskBaseOperator], log: Logger
66
66
  ) -> list[str]:
67
67
  """
68
68
  Return a list of all Databricks task IDs for a dictionary of Airflow tasks.
@@ -112,7 +112,7 @@ if not AIRFLOW_V_3_0_PLUS:
112
112
 
113
113
  @provide_session
114
114
  def _clear_task_instances(
115
- dag_id: str, run_id: str, task_ids: list[str], log: logging.Logger, session: Session = NEW_SESSION
115
+ dag_id: str, run_id: str, task_ids: list[str], log: Logger, session: Session = NEW_SESSION
116
116
  ) -> None:
117
117
  dag = _get_dag(dag_id, session=session)
118
118
  log.debug("task_ids %s to clear", str(task_ids))
@@ -145,7 +145,7 @@ def _repair_task(
145
145
  databricks_conn_id: str,
146
146
  databricks_run_id: int,
147
147
  tasks_to_repair: list[str],
148
- logger: logging.Logger,
148
+ logger: Logger,
149
149
  ) -> int:
150
150
  """
151
151
  Repair a Databricks task using the Databricks API.
@@ -294,7 +294,7 @@ class WorkflowJobRunLink(BaseOperatorLink, LoggingMixin):
294
294
  def store_databricks_job_run_link(
295
295
  context: Context,
296
296
  metadata: Any,
297
- logger: logging.Logger,
297
+ logger: Logger,
298
298
  ) -> None:
299
299
  """
300
300
  Store the Databricks job run link in XCom during task execution.
@@ -368,7 +368,7 @@ class WorkflowJobRepairAllFailedLink(BaseOperatorLink, LoggingMixin):
368
368
  children[child_id] = child
369
369
  return children
370
370
 
371
- def get_tasks_to_run(self, ti_key: TaskInstanceKey, operator: BaseOperator, log: logging.Logger) -> str:
371
+ def get_tasks_to_run(self, ti_key: TaskInstanceKey, operator: BaseOperator, log: Logger) -> str:
372
372
  task_group = operator.task_group
373
373
  if not task_group:
374
374
  raise AirflowException("Task group is required for generating repair link.")
@@ -1449,7 +1449,6 @@ class TestDatabricksHookConnSettings(TestDatabricksHookToken):
1449
1449
  @pytest.mark.asyncio
1450
1450
  @mock.patch("airflow.providers.databricks.hooks.databricks_base.aiohttp.ClientSession.get")
1451
1451
  async def test_async_do_api_call_only_existing_response_properties_are_read(self, mock_get):
1452
- self.hook.log.setLevel("DEBUG")
1453
1452
  response = mock_get.return_value.__aenter__.return_value
1454
1453
  response.mock_add_spec(aiohttp.ClientResponse, spec_set=True)
1455
1454
  response.json = AsyncMock(return_value={"bar": "baz"})
@@ -26,7 +26,7 @@ import time_machine
26
26
  from aiohttp.client_exceptions import ClientConnectorError
27
27
  from requests import exceptions as requests_exceptions
28
28
  from requests.auth import HTTPBasicAuth
29
- from tenacity import Future, RetryError
29
+ from tenacity import AsyncRetrying, Future, RetryError, retry_if_exception, stop_after_attempt, wait_fixed
30
30
 
31
31
  from airflow.exceptions import AirflowException
32
32
  from airflow.models import Connection
@@ -768,3 +768,145 @@ class TestBaseDatabricksHook:
768
768
  exception.response = mock_response
769
769
  hook = BaseDatabricksHook()
770
770
  assert hook._get_error_code(exception) == "INVALID_REQUEST"
771
+
772
+ @mock.patch("requests.get")
773
+ @time_machine.travel("2025-07-12 12:00:00")
774
+ def test_check_azure_metadata_service_normal(self, mock_get):
775
+ travel_time = int(datetime(2025, 7, 12, 12, 0, 0).timestamp())
776
+ hook = BaseDatabricksHook()
777
+ mock_response = {"compute": {"azEnvironment": "AzurePublicCloud"}}
778
+ mock_get.return_value.json.return_value = mock_response
779
+
780
+ hook._check_azure_metadata_service()
781
+
782
+ assert hook._metadata_cache == mock_response
783
+ assert int(hook._metadata_expiry) == travel_time + hook._metadata_ttl
784
+
785
+ @mock.patch("requests.get")
786
+ @time_machine.travel("2025-07-12 12:00:00")
787
+ def test_check_azure_metadata_service_cached(self, mock_get):
788
+ travel_time = int(datetime(2025, 7, 12, 12, 0, 0).timestamp())
789
+ hook = BaseDatabricksHook()
790
+ mock_response = {"compute": {"azEnvironment": "AzurePublicCloud"}}
791
+ hook._metadata_cache = mock_response
792
+ hook._metadata_expiry = travel_time + 1000
793
+
794
+ hook._check_azure_metadata_service()
795
+ mock_get.assert_not_called()
796
+
797
+ @mock.patch("requests.get")
798
+ def test_check_azure_metadata_service_http_error(self, mock_get):
799
+ hook = BaseDatabricksHook()
800
+ mock_get.side_effect = requests_exceptions.RequestException("Fail")
801
+
802
+ with pytest.raises(ConnectionError, match="Can't reach Azure Metadata Service"):
803
+ hook._check_azure_metadata_service()
804
+ assert hook._metadata_cache == {}
805
+ assert hook._metadata_expiry == 0
806
+
807
+ @mock.patch("requests.get")
808
+ def test_check_azure_metadata_service_retry_error(self, mock_get):
809
+ hook = BaseDatabricksHook()
810
+
811
+ resp_429 = mock.Mock()
812
+ resp_429.status_code = 429
813
+ resp_429.content = b"Too many requests"
814
+ http_error = requests_exceptions.HTTPError(response=resp_429)
815
+ mock_get.side_effect = http_error
816
+
817
+ with pytest.raises(ConnectionError, match="Failed to reach Azure Metadata Service after 3 retries."):
818
+ hook._check_azure_metadata_service()
819
+ assert mock_get.call_count == 3
820
+
821
+ @pytest.mark.asyncio
822
+ @mock.patch("aiohttp.ClientSession.get")
823
+ async def test_a_check_azure_metadata_service_normal(self, mock_get):
824
+ hook = BaseDatabricksHook()
825
+
826
+ async_mock = mock.AsyncMock()
827
+ async_mock.__aenter__.return_value = async_mock
828
+ async_mock.__aexit__.return_value = None
829
+ async_mock.json.return_value = {"compute": {"azEnvironment": "AzurePublicCloud"}}
830
+
831
+ mock_get.return_value = async_mock
832
+
833
+ async with aiohttp.ClientSession() as session:
834
+ hook._session = session
835
+ mock_attempt = mock.Mock()
836
+ mock_attempt.__enter__ = mock.Mock(return_value=None)
837
+ mock_attempt.__exit__ = mock.Mock(return_value=None)
838
+
839
+ async def mock_retry_generator():
840
+ yield mock_attempt
841
+
842
+ hook._a_get_retry_object = mock.Mock(return_value=mock_retry_generator())
843
+ await hook._a_check_azure_metadata_service()
844
+
845
+ assert hook._metadata_cache["compute"]["azEnvironment"] == "AzurePublicCloud"
846
+ assert hook._metadata_expiry > 0
847
+
848
+ @pytest.mark.asyncio
849
+ @mock.patch("aiohttp.ClientSession.get")
850
+ @time_machine.travel("2025-07-12 12:00:00")
851
+ async def test_a_check_azure_metadata_service_cached(self, mock_get):
852
+ travel_time = int(datetime(2025, 7, 12, 12, 0, 0).timestamp())
853
+ hook = BaseDatabricksHook()
854
+ hook._metadata_cache = {"compute": {"azEnvironment": "AzurePublicCloud"}}
855
+ hook._metadata_expiry = travel_time + 1000
856
+
857
+ async with aiohttp.ClientSession() as session:
858
+ hook._session = session
859
+ await hook._a_check_azure_metadata_service()
860
+ mock_get.assert_not_called()
861
+
862
+ @pytest.mark.asyncio
863
+ @mock.patch("aiohttp.ClientSession.get")
864
+ async def test_a_check_azure_metadata_service_http_error(self, mock_get):
865
+ hook = BaseDatabricksHook()
866
+
867
+ async_mock = mock.AsyncMock()
868
+ async_mock.__aenter__.side_effect = aiohttp.ClientError("Fail")
869
+ async_mock.__aexit__.return_value = None
870
+ mock_get.return_value = async_mock
871
+
872
+ async with aiohttp.ClientSession() as session:
873
+ hook._session = session
874
+ mock_attempt = mock.Mock()
875
+ mock_attempt.__enter__ = mock.Mock(return_value=None)
876
+ mock_attempt.__exit__ = mock.Mock(return_value=None)
877
+
878
+ async def mock_retry_generator():
879
+ yield mock_attempt
880
+
881
+ hook._a_get_retry_object = mock.Mock(return_value=mock_retry_generator())
882
+
883
+ with pytest.raises(ConnectionError, match="Can't reach Azure Metadata Service"):
884
+ await hook._a_check_azure_metadata_service()
885
+ assert hook._metadata_cache == {}
886
+ assert hook._metadata_expiry == 0
887
+
888
+ @pytest.mark.asyncio
889
+ @mock.patch("aiohttp.ClientSession.get")
890
+ async def test_a_check_azure_metadata_service_retry_error(self, mock_get):
891
+ hook = BaseDatabricksHook()
892
+
893
+ mock_get.side_effect = aiohttp.ClientResponseError(
894
+ request_info=mock.Mock(), history=(), status=429, message="429 Too Many Requests"
895
+ )
896
+
897
+ async with aiohttp.ClientSession() as session:
898
+ hook._session = session
899
+
900
+ hook._a_get_retry_object = lambda: AsyncRetrying(
901
+ stop=stop_after_attempt(hook.retry_limit),
902
+ wait=wait_fixed(0),
903
+ retry=retry_if_exception(hook._retryable_error),
904
+ )
905
+
906
+ hook._validate_azure_metadata_service = mock.Mock()
907
+
908
+ with pytest.raises(
909
+ ConnectionError, match="Failed to reach Azure Metadata Service after 3 retries."
910
+ ):
911
+ await hook._a_check_azure_metadata_service()
912
+ assert mock_get.call_count == 3