apache-airflow-providers-databricks 7.8.1__tar.gz → 7.8.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (96) hide show
  1. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/NOTICE +1 -1
  2. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/PKG-INFO +10 -9
  3. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/README.rst +4 -4
  4. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/changelog.rst +33 -0
  5. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/index.rst +4 -5
  6. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/operators/jobs_create.rst +2 -2
  7. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/operators/run_now.rst +2 -2
  8. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/operators/submit_run.rst +3 -3
  9. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/provider.yaml +3 -1
  10. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/pyproject.toml +10 -6
  11. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/providers/databricks/__init__.py +1 -1
  12. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/providers/databricks/hooks/databricks.py +39 -39
  13. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/providers/databricks/hooks/databricks_base.py +1 -2
  14. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/providers/databricks/hooks/databricks_sql.py +10 -2
  15. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/providers/databricks/operators/databricks.py +7 -8
  16. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/providers/databricks/plugins/databricks_workflow.py +28 -8
  17. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/providers/databricks/sensors/databricks.py +1 -2
  18. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/providers/databricks/utils/mixins.py +2 -6
  19. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/databricks/hooks/test_databricks.py +26 -26
  20. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/databricks/hooks/test_databricks_base.py +1 -1
  21. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/databricks/hooks/test_databricks_sql.py +1 -2
  22. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/databricks/operators/test_databricks_copy.py +1 -2
  23. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/databricks/plugins/test_databricks_workflow.py +3 -4
  24. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/databricks/utils/test_openlineage.py +1 -1
  25. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/LICENSE +0 -0
  26. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/.latest-doc-only-change.txt +0 -0
  27. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/commits.rst +0 -0
  28. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/conf.py +0 -0
  29. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/connections/databricks.rst +0 -0
  30. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/img/databricks_workflow_task_group_airflow_graph_view.png +0 -0
  31. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/img/workflow_plugin_launch_task.png +0 -0
  32. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/img/workflow_plugin_single_task.png +0 -0
  33. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/img/workflow_run_databricks_graph_view.png +0 -0
  34. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/installing-providers-from-sources.rst +0 -0
  35. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/integration-logos/Databricks.png +0 -0
  36. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/operators/copy_into.rst +0 -0
  37. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/operators/index.rst +0 -0
  38. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/operators/notebook.rst +0 -0
  39. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/operators/repos_create.rst +0 -0
  40. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/operators/repos_delete.rst +0 -0
  41. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/operators/repos_update.rst +0 -0
  42. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/operators/sql.rst +0 -0
  43. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/operators/sql_statements.rst +0 -0
  44. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/operators/task.rst +0 -0
  45. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/operators/workflow.rst +0 -0
  46. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/plugins/index.rst +0 -0
  47. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/plugins/workflow.rst +0 -0
  48. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/docs/security.rst +0 -0
  49. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/__init__.py +0 -0
  50. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/providers/__init__.py +0 -0
  51. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/providers/databricks/exceptions.py +0 -0
  52. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/providers/databricks/get_provider_info.py +0 -0
  53. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/providers/databricks/hooks/__init__.py +0 -0
  54. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/providers/databricks/operators/__init__.py +0 -0
  55. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/providers/databricks/operators/databricks_repos.py +0 -0
  56. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/providers/databricks/operators/databricks_sql.py +0 -0
  57. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/providers/databricks/operators/databricks_workflow.py +0 -0
  58. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/providers/databricks/plugins/__init__.py +0 -0
  59. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/providers/databricks/sensors/__init__.py +0 -0
  60. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/providers/databricks/sensors/databricks_partition.py +0 -0
  61. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/providers/databricks/sensors/databricks_sql.py +0 -0
  62. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/providers/databricks/triggers/__init__.py +0 -0
  63. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/providers/databricks/triggers/databricks.py +0 -0
  64. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/providers/databricks/utils/__init__.py +0 -0
  65. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/providers/databricks/utils/databricks.py +0 -0
  66. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/providers/databricks/utils/openlineage.py +0 -0
  67. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/src/airflow/providers/databricks/version_compat.py +0 -0
  68. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/conftest.py +0 -0
  69. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/system/__init__.py +0 -0
  70. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/system/databricks/__init__.py +0 -0
  71. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/system/databricks/example_databricks.py +0 -0
  72. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/system/databricks/example_databricks_repos.py +0 -0
  73. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/system/databricks/example_databricks_sensors.py +0 -0
  74. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/system/databricks/example_databricks_sql.py +0 -0
  75. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/system/databricks/example_databricks_workflow.py +0 -0
  76. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/__init__.py +0 -0
  77. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/databricks/__init__.py +0 -0
  78. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/databricks/hooks/__init__.py +0 -0
  79. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/databricks/hooks/test_databricks_azure_workload_identity.py +0 -0
  80. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/databricks/hooks/test_databricks_azure_workload_identity_async.py +0 -0
  81. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/databricks/operators/__init__.py +0 -0
  82. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/databricks/operators/test_databricks.py +0 -0
  83. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/databricks/operators/test_databricks_repos.py +0 -0
  84. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/databricks/operators/test_databricks_sql.py +0 -0
  85. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/databricks/operators/test_databricks_workflow.py +0 -0
  86. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/databricks/plugins/__init__.py +0 -0
  87. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/databricks/sensors/__init__.py +0 -0
  88. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/databricks/sensors/test_databricks.py +0 -0
  89. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/databricks/sensors/test_databricks_partition.py +0 -0
  90. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/databricks/sensors/test_databricks_sql.py +0 -0
  91. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/databricks/test_exceptions.py +0 -0
  92. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/databricks/triggers/__init__.py +0 -0
  93. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/databricks/triggers/test_databricks.py +0 -0
  94. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/databricks/utils/__init__.py +0 -0
  95. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/databricks/utils/test_databricks.py +0 -0
  96. {apache_airflow_providers_databricks-7.8.1 → apache_airflow_providers_databricks-7.8.3}/tests/unit/databricks/utils/test_mixins.py +0 -0
@@ -1,5 +1,5 @@
1
1
  Apache Airflow
2
- Copyright 2016-2025 The Apache Software Foundation
2
+ Copyright 2016-2026 The Apache Software Foundation
3
3
 
4
4
  This product includes software developed at
5
5
  The Apache Software Foundation (http://www.apache.org/).
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-databricks
3
- Version: 7.8.1
3
+ Version: 7.8.3
4
4
  Summary: Provider package apache-airflow-providers-databricks for Apache Airflow
5
5
  Keywords: airflow-provider,databricks,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -23,11 +23,10 @@ Classifier: Topic :: System :: Monitoring
23
23
  License-File: LICENSE
24
24
  License-File: NOTICE
25
25
  Requires-Dist: apache-airflow>=2.11.0
26
- Requires-Dist: apache-airflow-providers-common-compat>=1.10.1
26
+ Requires-Dist: apache-airflow-providers-common-compat>=1.12.0
27
27
  Requires-Dist: apache-airflow-providers-common-sql>=1.27.0
28
28
  Requires-Dist: requests>=2.32.0,<3
29
29
  Requires-Dist: databricks-sql-connector>=4.0.0
30
- Requires-Dist: databricks-sqlalchemy>=1.0.2
31
30
  Requires-Dist: aiohttp>=3.9.2, <4
32
31
  Requires-Dist: mergedeep>=1.3.4
33
32
  Requires-Dist: pandas>=2.1.2; python_version <"3.13"
@@ -38,10 +37,11 @@ Requires-Dist: azure-identity>=1.3.1 ; extra == "azure-identity"
38
37
  Requires-Dist: apache-airflow-providers-fab>=2.2.0 ; extra == "fab" and ( python_version < '3.13')
39
38
  Requires-Dist: apache-airflow-providers-openlineage>=2.3.0 ; extra == "openlineage"
40
39
  Requires-Dist: databricks-sdk==0.10.0 ; extra == "sdk"
40
+ Requires-Dist: databricks-sqlalchemy>=1.0.2 ; extra == "sqlalchemy"
41
41
  Requires-Dist: apache-airflow-providers-standard ; extra == "standard"
42
42
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
43
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.1/changelog.html
44
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.1
43
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.3/changelog.html
44
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.3
45
45
  Project-URL: Mastodon, https://fosstodon.org/@airflow
46
46
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
47
47
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -50,6 +50,7 @@ Provides-Extra: azure-identity
50
50
  Provides-Extra: fab
51
51
  Provides-Extra: openlineage
52
52
  Provides-Extra: sdk
53
+ Provides-Extra: sqlalchemy
53
54
  Provides-Extra: standard
54
55
 
55
56
 
@@ -77,7 +78,7 @@ Provides-Extra: standard
77
78
 
78
79
  Package ``apache-airflow-providers-databricks``
79
80
 
80
- Release: ``7.8.1``
81
+ Release: ``7.8.3``
81
82
 
82
83
 
83
84
  `Databricks <https://databricks.com/>`__
@@ -90,7 +91,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
90
91
  are in ``airflow.providers.databricks`` python package.
91
92
 
92
93
  You can find package information and changelog for the provider
93
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.1/>`_.
94
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.3/>`_.
94
95
 
95
96
  Installation
96
97
  ------------
@@ -112,7 +113,6 @@ PIP package Version required
112
113
  ``apache-airflow-providers-common-sql`` ``>=1.27.0``
113
114
  ``requests`` ``>=2.32.0,<3``
114
115
  ``databricks-sql-connector`` ``>=4.0.0``
115
- ``databricks-sqlalchemy`` ``>=1.0.2``
116
116
  ``aiohttp`` ``>=3.9.2,<4``
117
117
  ``mergedeep`` ``>=1.3.4``
118
118
  ``pandas`` ``>=2.1.2; python_version < "3.13"``
@@ -153,8 +153,9 @@ Extra Dependencies
153
153
  ``fab`` ``apache-airflow-providers-fab>=2.2.0; python_version < '3.13'``
154
154
  ``standard`` ``apache-airflow-providers-standard``
155
155
  ``openlineage`` ``apache-airflow-providers-openlineage>=2.3.0``
156
+ ``sqlalchemy`` ``databricks-sqlalchemy>=1.0.2``
156
157
  ================== ================================================================
157
158
 
158
159
  The changelog for the provider package can be found in the
159
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.1/changelog.html>`_.
160
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.3/changelog.html>`_.
160
161
 
@@ -23,7 +23,7 @@
23
23
 
24
24
  Package ``apache-airflow-providers-databricks``
25
25
 
26
- Release: ``7.8.1``
26
+ Release: ``7.8.3``
27
27
 
28
28
 
29
29
  `Databricks <https://databricks.com/>`__
@@ -36,7 +36,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
36
36
  are in ``airflow.providers.databricks`` python package.
37
37
 
38
38
  You can find package information and changelog for the provider
39
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.1/>`_.
39
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.3/>`_.
40
40
 
41
41
  Installation
42
42
  ------------
@@ -58,7 +58,6 @@ PIP package Version required
58
58
  ``apache-airflow-providers-common-sql`` ``>=1.27.0``
59
59
  ``requests`` ``>=2.32.0,<3``
60
60
  ``databricks-sql-connector`` ``>=4.0.0``
61
- ``databricks-sqlalchemy`` ``>=1.0.2``
62
61
  ``aiohttp`` ``>=3.9.2,<4``
63
62
  ``mergedeep`` ``>=1.3.4``
64
63
  ``pandas`` ``>=2.1.2; python_version < "3.13"``
@@ -99,7 +98,8 @@ Extra Dependencies
99
98
  ``fab`` ``apache-airflow-providers-fab>=2.2.0; python_version < '3.13'``
100
99
  ``standard`` ``apache-airflow-providers-standard``
101
100
  ``openlineage`` ``apache-airflow-providers-openlineage>=2.3.0``
101
+ ``sqlalchemy`` ``databricks-sqlalchemy>=1.0.2``
102
102
  ================== ================================================================
103
103
 
104
104
  The changelog for the provider package can be found in the
105
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.1/changelog.html>`_.
105
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.3/changelog.html>`_.
@@ -26,6 +26,39 @@
26
26
  Changelog
27
27
  ---------
28
28
 
29
+ 7.8.3
30
+ .....
31
+
32
+ Misc
33
+ ~~~~
34
+
35
+ * ``Consume ''AirflowOptionalProviderFeatureException'' from compat sdk in providers (#60335)``
36
+ * ``Limit deltalake again to 1.3.1 due to missing ARM .whl files (#60376)``
37
+ * ``New year means updated Copyright notices (#60344)``
38
+ * ``Made sqlalchemy dependency optional for Databricks provider (#60110)``
39
+ * ``Move over plugins_manager to a shared library (#59956)``
40
+ * ``Limit deltalake again to 1.3.0 due to missing ARM .whl files (#60098)``
41
+ * ``Source databricks provider to use airflow.sdk.configuration.conf (#60021)``
42
+
43
+ .. Below changes are excluded from the changelog. Move them to
44
+ appropriate section above if needed. Do not delete the lines(!):
45
+ * ``Revert "Limit deltalake to not include 1.3.0 version (#59977)" (#60005)``
46
+ * ``Limit deltalake to not include 1.3.0 version (#59977)``
47
+
48
+ 7.8.2
49
+ .....
50
+
51
+ Misc
52
+ ~~~~
53
+
54
+ * ``'issue-59189:' Updating Databricks provider to point to '2.2/jobs/...' endpoint (#59217)``
55
+ * ``Remove top-level SDK reference in Core (#59817)``
56
+ * ``Refactor/sqla2 providers(celery, kubernetes, databricks, mysql) to remove SQLA query usage (#59537)``
57
+
58
+ .. Below changes are excluded from the changelog. Move them to
59
+ appropriate section above if needed. Do not delete the lines(!):
60
+ * ``TaskInstance unused method cleanup (#59835)``
61
+
29
62
  7.8.1
30
63
  .....
31
64
 
@@ -78,7 +78,7 @@ apache-airflow-providers-databricks package
78
78
  `Databricks <https://databricks.com/>`__
79
79
 
80
80
 
81
- Release: 7.8.1
81
+ Release: 7.8.3
82
82
 
83
83
  Provider package
84
84
  ----------------
@@ -102,11 +102,10 @@ The minimum Apache Airflow version supported by this provider distribution is ``
102
102
  PIP package Version required
103
103
  ========================================== ======================================
104
104
  ``apache-airflow`` ``>=2.11.0``
105
- ``apache-airflow-providers-common-compat`` ``>=1.10.1``
105
+ ``apache-airflow-providers-common-compat`` ``>=1.12.0``
106
106
  ``apache-airflow-providers-common-sql`` ``>=1.27.0``
107
107
  ``requests`` ``>=2.32.0,<3``
108
108
  ``databricks-sql-connector`` ``>=4.0.0``
109
- ``databricks-sqlalchemy`` ``>=1.0.2``
110
109
  ``aiohttp`` ``>=3.9.2,<4``
111
110
  ``mergedeep`` ``>=1.3.4``
112
111
  ``pandas`` ``>=2.1.2; python_version < "3.13"``
@@ -142,5 +141,5 @@ Downloading official packages
142
141
  You can download officially released packages and verify their checksums and signatures from the
143
142
  `Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
144
143
 
145
- * `The apache-airflow-providers-databricks 7.8.1 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.1.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.1.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.1.tar.gz.sha512>`__)
146
- * `The apache-airflow-providers-databricks 7.8.1 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.1-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.1-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.1-py3-none-any.whl.sha512>`__)
144
+ * `The apache-airflow-providers-databricks 7.8.3 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.3.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.3.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.3.tar.gz.sha512>`__)
145
+ * `The apache-airflow-providers-databricks 7.8.3 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.3-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.3-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.3-py3-none-any.whl.sha512>`__)
@@ -31,12 +31,12 @@ Using the Operator
31
31
  ------------------
32
32
 
33
33
  There are three ways to instantiate this operator. In the first way, you can take the JSON payload that you typically use
34
- to call the ``api/2.1/jobs/create`` endpoint and pass it directly to our ``DatabricksCreateJobsOperator`` through the
34
+ to call the ``api/2.2/jobs/create`` endpoint and pass it directly to our ``DatabricksCreateJobsOperator`` through the
35
35
  ``json`` parameter. With this approach you get full control over the underlying payload to Jobs REST API, including
36
36
  execution of Databricks jobs with multiple tasks, but it's harder to detect errors because of the lack of the type checking.
37
37
 
38
38
  The second way to accomplish the same thing is to use the named parameters of the ``DatabricksCreateJobsOperator`` directly. Note that there is exactly
39
- one named parameter for each top level parameter in the ``api/2.1/jobs/create`` endpoint.
39
+ one named parameter for each top level parameter in the ``api/2.2/jobs/create`` endpoint.
40
40
 
41
41
  The third way is to use both the json parameter **AND** the named parameters. They will be merged
42
42
  together. If there are conflicts during the merge, the named parameters will take precedence and
@@ -21,14 +21,14 @@ DatabricksRunNowOperator
21
21
  ========================
22
22
 
23
23
  Use the :class:`~airflow.providers.databricks.operators.DatabricksRunNowOperator` to trigger a run of an existing Databricks job
24
- via `api/2.1/jobs/run-now <https://docs.databricks.com/dev-tools/api/latest/jobs.html#operation/JobsRunNow>`_ API endpoint.
24
+ via `api/2.2/jobs/run-now <https://docs.databricks.com/dev-tools/api/latest/jobs.html#operation/JobsRunNow>`_ API endpoint.
25
25
 
26
26
 
27
27
  Using the Operator
28
28
  ^^^^^^^^^^^^^^^^^^
29
29
 
30
30
  There are two ways to instantiate this operator. In the first way, you can take the JSON payload that you typically use
31
- to call the ``api/2.1/jobs/run-now`` endpoint and pass it directly to our ``DatabricksRunNowOperator`` through the ``json`` parameter.
31
+ to call the ``api/2.2/jobs/run-now`` endpoint and pass it directly to our ``DatabricksRunNowOperator`` through the ``json`` parameter.
32
32
 
33
33
  Another way to accomplish the same thing is to use the named parameters of the ``DatabricksRunNowOperator`` directly.
34
34
  Note that there is exactly one named parameter for each top level parameter in the ``jobs/run-now`` endpoint.
@@ -24,14 +24,14 @@ DatabricksSubmitRunOperator
24
24
  ===========================
25
25
 
26
26
  Use the :class:`~airflow.providers.databricks.operators.DatabricksSubmitRunOperator` to submit
27
- a new Databricks job via Databricks `api/2.1/jobs/runs/submit <https://docs.databricks.com/dev-tools/api/latest/jobs.html#operation/JobsRunsSubmit>`_ API endpoint.
27
+ a new Databricks job via Databricks `api/2.2/jobs/runs/submit <https://docs.databricks.com/dev-tools/api/latest/jobs.html#operation/JobsRunsSubmit>`_ API endpoint.
28
28
 
29
29
 
30
30
  Using the Operator
31
31
  ------------------
32
32
 
33
33
  There are three ways to instantiate this operator. In the first way, you can take the JSON payload that you typically use
34
- to call the ``api/2.1/jobs/runs/submit`` endpoint and pass it directly to our ``DatabricksSubmitRunOperator`` through the
34
+ to call the ``api/2.2/jobs/runs/submit`` endpoint and pass it directly to our ``DatabricksSubmitRunOperator`` through the
35
35
  ``json`` parameter. With this approach you get full control over the underlying payload to Jobs REST API, including
36
36
  execution of Databricks jobs with multiple tasks, but it's harder to detect errors because of the lack of the type checking.
37
37
 
@@ -91,7 +91,7 @@ Currently the named parameters that ``DatabricksSubmitRunOperator`` supports are
91
91
  task_id="notebook_run", new_cluster=new_cluster, notebook_task=notebook_task
92
92
  )
93
93
 
94
- Another way to do is use the param tasks to pass array of objects to instantiate this operator. Here the value of tasks param that is used to invoke ``api/2.1/jobs/runs/submit`` endpoint is passed through the ``tasks`` param in ``DatabricksSubmitRunOperator``. Instead of invoking single task, you can pass array of task and submit a one-time run.
94
+ Another way to do is use the param tasks to pass array of objects to instantiate this operator. Here the value of tasks param that is used to invoke ``api/2.2/jobs/runs/submit`` endpoint is passed through the ``tasks`` param in ``DatabricksSubmitRunOperator``. Instead of invoking single task, you can pass array of task and submit a one-time run.
95
95
 
96
96
  .. code-block:: python
97
97
 
@@ -22,12 +22,14 @@ description: |
22
22
  `Databricks <https://databricks.com/>`__
23
23
 
24
24
  state: ready
25
- source-date-epoch: 1765298937
25
+ source-date-epoch: 1768334769
26
26
  # Note that those versions are maintained by release manager - do not update them manually
27
27
  # with the exception of case where other provider in sources has >= new provider version.
28
28
  # In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
29
29
  # to be done in the same PR
30
30
  versions:
31
+ - 7.8.3
32
+ - 7.8.2
31
33
  - 7.8.1
32
34
  - 7.8.0
33
35
  - 7.7.5
@@ -25,7 +25,7 @@ build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-databricks"
28
- version = "7.8.1"
28
+ version = "7.8.3"
29
29
  description = "Provider package apache-airflow-providers-databricks for Apache Airflow"
30
30
  readme = "README.rst"
31
31
  license = "Apache-2.0"
@@ -59,11 +59,10 @@ requires-python = ">=3.10"
59
59
  # After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
60
60
  dependencies = [
61
61
  "apache-airflow>=2.11.0",
62
- "apache-airflow-providers-common-compat>=1.10.1",
62
+ "apache-airflow-providers-common-compat>=1.12.0",
63
63
  "apache-airflow-providers-common-sql>=1.27.0",
64
64
  "requests>=2.32.0,<3",
65
65
  "databricks-sql-connector>=4.0.0",
66
- "databricks-sqlalchemy>=1.0.2",
67
66
  "aiohttp>=3.9.2, <4",
68
67
  "mergedeep>=1.3.4",
69
68
  'pandas>=2.1.2; python_version <"3.13"',
@@ -91,6 +90,9 @@ dependencies = [
91
90
  "openlineage" = [
92
91
  "apache-airflow-providers-openlineage>=2.3.0"
93
92
  ]
93
+ "sqlalchemy" = [
94
+ "databricks-sqlalchemy>=1.0.2",
95
+ ]
94
96
 
95
97
  [dependency-groups]
96
98
  dev = [
@@ -101,11 +103,13 @@ dev = [
101
103
  "apache-airflow-providers-common-sql",
102
104
  "apache-airflow-providers-openlineage",
103
105
  # Additional devel dependencies (do not remove this line and add extra development dependencies)
104
- "deltalake>=1.1.3",
106
+ # Limit deltalake to avoid issue with missing linux ARM wheels: https://github.com/delta-io/delta-rs/issues/4041
107
+ "deltalake>=1.1.3,!=1.3.0,!=1.3.1",
105
108
  "apache-airflow-providers-fab>=2.2.0; python_version < '3.13'",
106
109
  "apache-airflow-providers-microsoft-azure",
107
110
  "apache-airflow-providers-common-sql[pandas,polars]",
108
111
  "apache-airflow-providers-fab",
112
+ "apache-airflow-providers-databricks[sqlalchemy]",
109
113
  ]
110
114
 
111
115
  # To build docs:
@@ -134,8 +138,8 @@ apache-airflow-providers-common-sql = {workspace = true}
134
138
  apache-airflow-providers-standard = {workspace = true}
135
139
 
136
140
  [project.urls]
137
- "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.1"
138
- "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.1/changelog.html"
141
+ "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.3"
142
+ "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.3/changelog.html"
139
143
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
140
144
  "Source Code" = "https://github.com/apache/airflow"
141
145
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "7.8.1"
32
+ __version__ = "7.8.3"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.11.0"
@@ -20,9 +20,9 @@ Databricks hook.
20
20
 
21
21
  This hook enable the submitting and running of jobs to the Databricks platform. Internally the
22
22
  operators talk to the
23
- ``api/2.1/jobs/run-now``
23
+ ``api/2.2/jobs/run-now``
24
24
  `endpoint <https://docs.databricks.com/dev-tools/api/latest/jobs.html#operation/JobsRunNow>_`
25
- or the ``api/2.1/jobs/runs/submit``
25
+ or the ``api/2.2/jobs/runs/submit``
26
26
  `endpoint <https://docs.databricks.com/dev-tools/api/latest/jobs.html#operation/JobsRunsSubmit>`_.
27
27
  """
28
28
 
@@ -37,37 +37,37 @@ from requests import exceptions as requests_exceptions
37
37
  from airflow.providers.common.compat.sdk import AirflowException
38
38
  from airflow.providers.databricks.hooks.databricks_base import BaseDatabricksHook
39
39
 
40
- GET_CLUSTER_ENDPOINT = ("GET", "2.0/clusters/get")
41
- RESTART_CLUSTER_ENDPOINT = ("POST", "2.0/clusters/restart")
42
- START_CLUSTER_ENDPOINT = ("POST", "2.0/clusters/start")
43
- TERMINATE_CLUSTER_ENDPOINT = ("POST", "2.0/clusters/delete")
40
+ GET_CLUSTER_ENDPOINT = ("GET", "2.2/clusters/get")
41
+ RESTART_CLUSTER_ENDPOINT = ("POST", "2.2/clusters/restart")
42
+ START_CLUSTER_ENDPOINT = ("POST", "2.2/clusters/start")
43
+ TERMINATE_CLUSTER_ENDPOINT = ("POST", "2.2/clusters/delete")
44
44
 
45
- CREATE_ENDPOINT = ("POST", "2.1/jobs/create")
46
- RESET_ENDPOINT = ("POST", "2.1/jobs/reset")
47
- UPDATE_ENDPOINT = ("POST", "2.1/jobs/update")
48
- RUN_NOW_ENDPOINT = ("POST", "2.1/jobs/run-now")
49
- SUBMIT_RUN_ENDPOINT = ("POST", "2.1/jobs/runs/submit")
50
- GET_RUN_ENDPOINT = ("GET", "2.1/jobs/runs/get")
51
- CANCEL_RUN_ENDPOINT = ("POST", "2.1/jobs/runs/cancel")
52
- DELETE_RUN_ENDPOINT = ("POST", "2.1/jobs/runs/delete")
53
- REPAIR_RUN_ENDPOINT = ("POST", "2.1/jobs/runs/repair")
54
- OUTPUT_RUNS_JOB_ENDPOINT = ("GET", "2.1/jobs/runs/get-output")
55
- CANCEL_ALL_RUNS_ENDPOINT = ("POST", "2.1/jobs/runs/cancel-all")
45
+ CREATE_ENDPOINT = ("POST", "2.2/jobs/create")
46
+ RESET_ENDPOINT = ("POST", "2.2/jobs/reset")
47
+ UPDATE_ENDPOINT = ("POST", "2.2/jobs/update")
48
+ RUN_NOW_ENDPOINT = ("POST", "2.2/jobs/run-now")
49
+ SUBMIT_RUN_ENDPOINT = ("POST", "2.2/jobs/runs/submit")
50
+ GET_RUN_ENDPOINT = ("GET", "2.2/jobs/runs/get")
51
+ CANCEL_RUN_ENDPOINT = ("POST", "2.2/jobs/runs/cancel")
52
+ DELETE_RUN_ENDPOINT = ("POST", "2.2/jobs/runs/delete")
53
+ REPAIR_RUN_ENDPOINT = ("POST", "2.2/jobs/runs/repair")
54
+ OUTPUT_RUNS_JOB_ENDPOINT = ("GET", "2.2/jobs/runs/get-output")
55
+ CANCEL_ALL_RUNS_ENDPOINT = ("POST", "2.2/jobs/runs/cancel-all")
56
56
 
57
- INSTALL_LIBS_ENDPOINT = ("POST", "2.0/libraries/install")
58
- UNINSTALL_LIBS_ENDPOINT = ("POST", "2.0/libraries/uninstall")
59
- UPDATE_REPO_ENDPOINT = ("PATCH", "2.0/repos/")
60
- DELETE_REPO_ENDPOINT = ("DELETE", "2.0/repos/")
61
- CREATE_REPO_ENDPOINT = ("POST", "2.0/repos")
57
+ INSTALL_LIBS_ENDPOINT = ("POST", "2.2/libraries/install")
58
+ UNINSTALL_LIBS_ENDPOINT = ("POST", "2.2/libraries/uninstall")
59
+ UPDATE_REPO_ENDPOINT = ("PATCH", "2.2/repos/")
60
+ DELETE_REPO_ENDPOINT = ("DELETE", "2.2/repos/")
61
+ CREATE_REPO_ENDPOINT = ("POST", "2.2/repos")
62
62
 
63
- LIST_JOBS_ENDPOINT = ("GET", "2.1/jobs/list")
64
- LIST_PIPELINES_ENDPOINT = ("GET", "2.0/pipelines")
65
- LIST_SQL_ENDPOINTS_ENDPOINT = ("GET", "2.0/sql/endpoints")
63
+ LIST_JOBS_ENDPOINT = ("GET", "2.2/jobs/list")
64
+ LIST_PIPELINES_ENDPOINT = ("GET", "2.2/pipelines")
65
+ LIST_SQL_ENDPOINTS_ENDPOINT = ("GET", "2.2/sql/endpoints")
66
66
 
67
- WORKSPACE_GET_STATUS_ENDPOINT = ("GET", "2.0/workspace/get-status")
67
+ WORKSPACE_GET_STATUS_ENDPOINT = ("GET", "2.2/workspace/get-status")
68
68
 
69
- SPARK_VERSIONS_ENDPOINT = ("GET", "2.0/clusters/spark-versions")
70
- SQL_STATEMENTS_ENDPOINT = "2.0/sql/statements"
69
+ SPARK_VERSIONS_ENDPOINT = ("GET", "2.2/clusters/spark-versions")
70
+ SQL_STATEMENTS_ENDPOINT = "2.2/sql/statements"
71
71
 
72
72
 
73
73
  class RunLifeCycleState(Enum):
@@ -293,7 +293,7 @@ class DatabricksHook(BaseDatabricksHook):
293
293
 
294
294
  def create_job(self, json: dict) -> int:
295
295
  """
296
- Call the ``api/2.1/jobs/create`` endpoint.
296
+ Call the ``api/2.2/jobs/create`` endpoint.
297
297
 
298
298
  :param json: The data used in the body of the request to the ``create`` endpoint.
299
299
  :return: the job_id as an int
@@ -303,7 +303,7 @@ class DatabricksHook(BaseDatabricksHook):
303
303
 
304
304
  def reset_job(self, job_id: str, json: dict) -> None:
305
305
  """
306
- Call the ``api/2.1/jobs/reset`` endpoint.
306
+ Call the ``api/2.2/jobs/reset`` endpoint.
307
307
 
308
308
  :param json: The data used in the new_settings of the request to the ``reset`` endpoint.
309
309
  """
@@ -321,7 +321,7 @@ class DatabricksHook(BaseDatabricksHook):
321
321
 
322
322
  def update_job(self, job_id: str, json: dict) -> None:
323
323
  """
324
- Call the ``api/2.1/jobs/update`` endpoint.
324
+ Call the ``api/2.2/jobs/update`` endpoint.
325
325
 
326
326
  :param job_id: The id of the job to update.
327
327
  :param json: The data used in the new_settings of the request to the ``update`` endpoint.
@@ -330,7 +330,7 @@ class DatabricksHook(BaseDatabricksHook):
330
330
 
331
331
  def run_now(self, json: dict) -> int:
332
332
  """
333
- Call the ``api/2.1/jobs/run-now`` endpoint.
333
+ Call the ``api/2.2/jobs/run-now`` endpoint.
334
334
 
335
335
  :param json: The data used in the body of the request to the ``run-now`` endpoint.
336
336
  :return: the run_id as an int
@@ -340,7 +340,7 @@ class DatabricksHook(BaseDatabricksHook):
340
340
 
341
341
  def submit_run(self, json: dict) -> int:
342
342
  """
343
- Call the ``api/2.1/jobs/runs/submit`` endpoint.
343
+ Call the ``api/2.2/jobs/runs/submit`` endpoint.
344
344
 
345
345
  :param json: The data used in the body of the request to the ``submit`` endpoint.
346
346
  :return: the run_id as an int
@@ -385,9 +385,9 @@ class DatabricksHook(BaseDatabricksHook):
385
385
  all_jobs += [j for j in jobs if j["settings"]["name"] == job_name]
386
386
  else:
387
387
  all_jobs += jobs
388
- has_more = response.get("has_more", False)
389
- if has_more:
390
- page_token = response.get("next_page_token", "")
388
+ # issue-59189: API v2.2 removes "has_more" field
389
+ page_token = response.get("next_page_token", "")
390
+ has_more = bool(page_token)
391
391
 
392
392
  return all_jobs
393
393
 
@@ -717,7 +717,7 @@ class DatabricksHook(BaseDatabricksHook):
717
717
  """
718
718
  Install libraries on the cluster.
719
719
 
720
- Utility function to call the ``2.0/libraries/install`` endpoint.
720
+ Utility function to call the ``2.2/libraries/install`` endpoint.
721
721
 
722
722
  :param json: json dictionary containing cluster_id and an array of library
723
723
  """
@@ -727,7 +727,7 @@ class DatabricksHook(BaseDatabricksHook):
727
727
  """
728
728
  Uninstall libraries on the cluster.
729
729
 
730
- Utility function to call the ``2.0/libraries/uninstall`` endpoint.
730
+ Utility function to call the ``2.2/libraries/uninstall`` endpoint.
731
731
 
732
732
  :param json: json dictionary containing cluster_id and an array of library
733
733
  """
@@ -790,7 +790,7 @@ class DatabricksHook(BaseDatabricksHook):
790
790
  :param json: payload
791
791
  :return: json containing permission specification
792
792
  """
793
- return self._do_api_call(("PATCH", f"2.0/permissions/jobs/{job_id}"), json)
793
+ return self._do_api_call(("PATCH", f"2.2/permissions/jobs/{job_id}"), json)
794
794
 
795
795
  def post_sql_statement(self, json: dict[str, Any]) -> str:
796
796
  """
@@ -49,8 +49,7 @@ from tenacity import (
49
49
  )
50
50
 
51
51
  from airflow import __version__
52
- from airflow.exceptions import AirflowOptionalProviderFeatureException
53
- from airflow.providers.common.compat.sdk import AirflowException
52
+ from airflow.providers.common.compat.sdk import AirflowException, AirflowOptionalProviderFeatureException
54
53
  from airflow.providers_manager import ProvidersManager
55
54
 
56
55
  try:
@@ -32,9 +32,8 @@ from typing import (
32
32
 
33
33
  from databricks import sql
34
34
  from databricks.sql.types import Row
35
- from sqlalchemy.engine import URL
36
35
 
37
- from airflow.providers.common.compat.sdk import AirflowException
36
+ from airflow.providers.common.compat.sdk import AirflowException, AirflowOptionalProviderFeatureException
38
37
  from airflow.providers.common.sql.hooks.handlers import return_single_query_results
39
38
  from airflow.providers.common.sql.hooks.sql import DbApiHook
40
39
  from airflow.providers.databricks.exceptions import DatabricksSqlExecutionError, DatabricksSqlExecutionTimeout
@@ -43,6 +42,7 @@ from airflow.providers.databricks.hooks.databricks_base import BaseDatabricksHoo
43
42
 
44
43
  if TYPE_CHECKING:
45
44
  from databricks.sql.client import Connection
45
+ from sqlalchemy.engine import URL
46
46
 
47
47
  from airflow.models.connection import Connection as AirflowConnection
48
48
  from airflow.providers.openlineage.extractors import OperatorLineage
@@ -179,6 +179,14 @@ class DatabricksSqlHook(BaseDatabricksHook, DbApiHook):
179
179
 
180
180
  :return: the extracted sqlalchemy.engine.URL object.
181
181
  """
182
+ try:
183
+ from sqlalchemy.engine import URL
184
+ except ImportError:
185
+ raise AirflowOptionalProviderFeatureException(
186
+ "sqlalchemy is required to generate the connection URL. "
187
+ "Install it with: pip install 'apache-airflow-providers-databricks[sqlalchemy]'"
188
+ )
189
+
182
190
  url_query = {
183
191
  "http_path": self._http_path,
184
192
  "catalog": self.catalog,
@@ -26,8 +26,7 @@ from collections.abc import Sequence
26
26
  from functools import cached_property
27
27
  from typing import TYPE_CHECKING, Any
28
28
 
29
- from airflow.configuration import conf
30
- from airflow.providers.common.compat.sdk import AirflowException, BaseOperator, BaseOperatorLink, XCom
29
+ from airflow.providers.common.compat.sdk import AirflowException, BaseOperator, BaseOperatorLink, XCom, conf
31
30
  from airflow.providers.databricks.hooks.databricks import (
32
31
  DatabricksHook,
33
32
  RunLifeCycleState,
@@ -260,7 +259,7 @@ class DatabricksCreateJobsOperator(BaseOperator):
260
259
  https://docs.databricks.com/api/workspace/jobs/reset
261
260
 
262
261
  :param json: A JSON object containing API parameters which will be passed
263
- directly to the ``api/2.1/jobs/create`` endpoint. The other named parameters
262
+ directly to the ``api/2.2/jobs/create`` endpoint. The other named parameters
264
263
  (i.e. ``name``, ``tags``, ``tasks``, etc.) to this operator will
265
264
  be merged with this json dictionary if they are provided.
266
265
  If there are conflicts during the merge, the named parameters will
@@ -391,7 +390,7 @@ class DatabricksCreateJobsOperator(BaseOperator):
391
390
 
392
391
  class DatabricksSubmitRunOperator(BaseOperator):
393
392
  """
394
- Submits a Spark job run to Databricks using the api/2.1/jobs/runs/submit API endpoint.
393
+ Submits a Spark job run to Databricks using the api/2.2/jobs/runs/submit API endpoint.
395
394
 
396
395
  See: https://docs.databricks.com/dev-tools/api/latest/jobs.html#operation/JobsRunsSubmit
397
396
 
@@ -406,7 +405,7 @@ class DatabricksSubmitRunOperator(BaseOperator):
406
405
  .. seealso::
407
406
  https://docs.databricks.com/dev-tools/api/latest/jobs.html#operation/JobsRunsSubmit
408
407
  :param json: A JSON object containing API parameters which will be passed
409
- directly to the ``api/2.1/jobs/runs/submit`` endpoint. The other named parameters
408
+ directly to the ``api/2.2/jobs/runs/submit`` endpoint. The other named parameters
410
409
  (i.e. ``spark_jar_task``, ``notebook_task``..) to this operator will
411
410
  be merged with this json dictionary if they are provided.
412
411
  If there are conflicts during the merge, the named parameters will
@@ -644,14 +643,14 @@ class DatabricksSubmitRunOperator(BaseOperator):
644
643
 
645
644
  class DatabricksRunNowOperator(BaseOperator):
646
645
  """
647
- Runs an existing Spark job run to Databricks using the api/2.1/jobs/run-now API endpoint.
646
+ Runs an existing Spark job run to Databricks using the api/2.2/jobs/run-now API endpoint.
648
647
 
649
648
  See: https://docs.databricks.com/dev-tools/api/latest/jobs.html#operation/JobsRunNow
650
649
 
651
650
  There are two ways to instantiate this operator.
652
651
 
653
652
  In the first way, you can take the JSON payload that you typically use
654
- to call the ``api/2.1/jobs/run-now`` endpoint and pass it directly
653
+ to call the ``api/2.2/jobs/run-now`` endpoint and pass it directly
655
654
  to our ``DatabricksRunNowOperator`` through the ``json`` parameter.
656
655
  For example ::
657
656
 
@@ -729,7 +728,7 @@ class DatabricksRunNowOperator(BaseOperator):
729
728
  https://docs.databricks.com/en/workflows/jobs/settings.html#add-parameters-for-all-job-tasks
730
729
 
731
730
  :param json: A JSON object containing API parameters which will be passed
732
- directly to the ``api/2.1/jobs/run-now`` endpoint. The other named parameters
731
+ directly to the ``api/2.2/jobs/run-now`` endpoint. The other named parameters
733
732
  (i.e. ``notebook_params``, ``spark_submit_params``..) to this operator will
734
733
  be merged with this json dictionary if they are provided.
735
734
  If there are conflicts during the merge, the named parameters will