apache-airflow-providers-databricks 7.6.0rc1__tar.gz → 7.7.0rc1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (97) hide show
  1. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/PKG-INFO +22 -18
  2. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/README.rst +12 -9
  3. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/changelog.rst +34 -0
  4. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/index.rst +12 -8
  5. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/provider.yaml +2 -1
  6. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/pyproject.toml +13 -11
  7. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/__init__.py +1 -1
  8. {apache_airflow_providers_databricks-7.6.0rc1/tests/system → apache_airflow_providers_databricks-7.7.0rc1/src/airflow/providers}/__init__.py +1 -1
  9. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/providers/databricks/__init__.py +1 -1
  10. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/providers/databricks/hooks/databricks.py +2 -1
  11. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/providers/databricks/hooks/databricks_sql.py +36 -6
  12. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/providers/databricks/operators/databricks.py +27 -22
  13. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/providers/databricks/operators/databricks_workflow.py +5 -1
  14. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/providers/databricks/plugins/databricks_workflow.py +3 -3
  15. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/providers/databricks/triggers/databricks.py +3 -13
  16. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/providers/databricks/utils/databricks.py +53 -1
  17. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/providers/databricks/utils/openlineage.py +16 -19
  18. {apache_airflow_providers_databricks-7.6.0rc1/tests/unit → apache_airflow_providers_databricks-7.7.0rc1/tests/system}/__init__.py +1 -1
  19. {apache_airflow_providers_databricks-7.6.0rc1/src/airflow/providers → apache_airflow_providers_databricks-7.7.0rc1/tests/unit}/__init__.py +1 -1
  20. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/unit/databricks/hooks/test_databricks.py +23 -0
  21. apache_airflow_providers_databricks-7.7.0rc1/tests/unit/databricks/hooks/test_databricks_base.py +770 -0
  22. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/unit/databricks/hooks/test_databricks_sql.py +74 -27
  23. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/unit/databricks/operators/test_databricks.py +5 -2
  24. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/unit/databricks/operators/test_databricks_workflow.py +3 -0
  25. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/unit/databricks/plugins/test_databricks_workflow.py +12 -2
  26. apache_airflow_providers_databricks-7.7.0rc1/tests/unit/databricks/utils/test_databricks.py +490 -0
  27. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/unit/databricks/utils/test_openlineage.py +28 -24
  28. apache_airflow_providers_databricks-7.6.0rc1/tests/unit/databricks/hooks/test_databricks_base.py +0 -33
  29. apache_airflow_providers_databricks-7.6.0rc1/tests/unit/databricks/utils/test_databricks.py +0 -63
  30. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/.latest-doc-only-change.txt +0 -0
  31. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/commits.rst +0 -0
  32. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/conf.py +0 -0
  33. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/connections/databricks.rst +0 -0
  34. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/img/databricks_workflow_task_group_airflow_graph_view.png +0 -0
  35. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/img/workflow_plugin_launch_task.png +0 -0
  36. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/img/workflow_plugin_single_task.png +0 -0
  37. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/img/workflow_run_databricks_graph_view.png +0 -0
  38. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/installing-providers-from-sources.rst +0 -0
  39. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/integration-logos/Databricks.png +0 -0
  40. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/operators/copy_into.rst +0 -0
  41. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/operators/index.rst +0 -0
  42. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/operators/jobs_create.rst +0 -0
  43. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/operators/notebook.rst +0 -0
  44. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/operators/repos_create.rst +0 -0
  45. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/operators/repos_delete.rst +0 -0
  46. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/operators/repos_update.rst +0 -0
  47. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/operators/run_now.rst +0 -0
  48. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/operators/sql.rst +0 -0
  49. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/operators/sql_statements.rst +0 -0
  50. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/operators/submit_run.rst +0 -0
  51. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/operators/task.rst +0 -0
  52. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/operators/workflow.rst +0 -0
  53. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/plugins/index.rst +0 -0
  54. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/plugins/workflow.rst +0 -0
  55. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/docs/security.rst +0 -0
  56. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/providers/databricks/LICENSE +0 -0
  57. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/providers/databricks/exceptions.py +0 -0
  58. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/providers/databricks/get_provider_info.py +0 -0
  59. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/providers/databricks/hooks/__init__.py +0 -0
  60. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/providers/databricks/hooks/databricks_base.py +0 -0
  61. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/providers/databricks/operators/__init__.py +0 -0
  62. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/providers/databricks/operators/databricks_repos.py +0 -0
  63. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/providers/databricks/operators/databricks_sql.py +0 -0
  64. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/providers/databricks/plugins/__init__.py +0 -0
  65. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/providers/databricks/sensors/__init__.py +0 -0
  66. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/providers/databricks/sensors/databricks.py +0 -0
  67. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/providers/databricks/sensors/databricks_partition.py +0 -0
  68. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/providers/databricks/sensors/databricks_sql.py +0 -0
  69. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/providers/databricks/triggers/__init__.py +0 -0
  70. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/providers/databricks/utils/__init__.py +0 -0
  71. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/providers/databricks/utils/mixins.py +0 -0
  72. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/src/airflow/providers/databricks/version_compat.py +0 -0
  73. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/conftest.py +0 -0
  74. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/system/databricks/__init__.py +0 -0
  75. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/system/databricks/example_databricks.py +0 -0
  76. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/system/databricks/example_databricks_repos.py +0 -0
  77. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/system/databricks/example_databricks_sensors.py +0 -0
  78. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/system/databricks/example_databricks_sql.py +0 -0
  79. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/system/databricks/example_databricks_workflow.py +0 -0
  80. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/unit/databricks/__init__.py +0 -0
  81. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/unit/databricks/hooks/__init__.py +0 -0
  82. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/unit/databricks/hooks/test_databricks_azure_workload_identity.py +0 -0
  83. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/unit/databricks/hooks/test_databricks_azure_workload_identity_async.py +0 -0
  84. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/unit/databricks/operators/__init__.py +0 -0
  85. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/unit/databricks/operators/test_databricks_copy.py +0 -0
  86. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/unit/databricks/operators/test_databricks_repos.py +0 -0
  87. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/unit/databricks/operators/test_databricks_sql.py +0 -0
  88. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/unit/databricks/plugins/__init__.py +0 -0
  89. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/unit/databricks/sensors/__init__.py +0 -0
  90. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/unit/databricks/sensors/test_databricks.py +0 -0
  91. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/unit/databricks/sensors/test_databricks_partition.py +0 -0
  92. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/unit/databricks/sensors/test_databricks_sql.py +0 -0
  93. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/unit/databricks/test_exceptions.py +0 -0
  94. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/unit/databricks/triggers/__init__.py +0 -0
  95. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/unit/databricks/triggers/test_databricks.py +0 -0
  96. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/unit/databricks/utils/__init__.py +0 -0
  97. {apache_airflow_providers_databricks-7.6.0rc1 → apache_airflow_providers_databricks-7.7.0rc1}/tests/unit/databricks/utils/test_mixins.py +0 -0
@@ -1,11 +1,11 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-databricks
3
- Version: 7.6.0rc1
3
+ Version: 7.7.0rc1
4
4
  Summary: Provider package apache-airflow-providers-databricks for Apache Airflow
5
5
  Keywords: airflow-provider,databricks,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
7
7
  Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
8
- Requires-Python: ~=3.10
8
+ Requires-Python: >=3.10
9
9
  Description-Content-Type: text/x-rst
10
10
  Classifier: Development Status :: 5 - Production/Stable
11
11
  Classifier: Environment :: Console
@@ -18,33 +18,34 @@ Classifier: License :: OSI Approved :: Apache Software License
18
18
  Classifier: Programming Language :: Python :: 3.10
19
19
  Classifier: Programming Language :: Python :: 3.11
20
20
  Classifier: Programming Language :: Python :: 3.12
21
+ Classifier: Programming Language :: Python :: 3.13
21
22
  Classifier: Topic :: System :: Monitoring
22
23
  Requires-Dist: apache-airflow>=2.10.0rc1
23
24
  Requires-Dist: apache-airflow-providers-common-compat>=1.6.0rc1
24
25
  Requires-Dist: apache-airflow-providers-common-sql>=1.27.0rc1
26
+ Requires-Dist: apache-airflow-providers-openlineage>=2.3.0rc1
25
27
  Requires-Dist: requests>=2.32.0,<3
26
- Requires-Dist: databricks-sql-connector>=3.0.0
28
+ Requires-Dist: databricks-sql-connector>=4.0.0
27
29
  Requires-Dist: databricks-sqlalchemy>=1.0.2
28
30
  Requires-Dist: aiohttp>=3.9.2, <4
29
31
  Requires-Dist: mergedeep>=1.3.4
30
32
  Requires-Dist: pandas>=2.1.2; python_version <"3.13"
31
33
  Requires-Dist: pandas>=2.2.3; python_version >="3.13"
32
- Requires-Dist: pyarrow>=16.1.0
34
+ Requires-Dist: pyarrow>=16.1.0; python_version < '3.13'
35
+ Requires-Dist: pyarrow>=18.0.0; python_version >= '3.13'
33
36
  Requires-Dist: azure-identity>=1.3.1 ; extra == "azure-identity"
34
- Requires-Dist: apache-airflow-providers-fab ; extra == "fab"
35
- Requires-Dist: apache-airflow-providers-openlineage>=2.3.0rc1 ; extra == "openlineage"
37
+ Requires-Dist: apache-airflow-providers-fab>=2.2.0rc1 ; extra == "fab" and ( python_version < '3.13')
36
38
  Requires-Dist: databricks-sdk==0.10.0 ; extra == "sdk"
37
39
  Requires-Dist: apache-airflow-providers-standard ; extra == "standard"
38
40
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
39
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.6.0/changelog.html
40
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.6.0
41
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.7.0/changelog.html
42
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.7.0
41
43
  Project-URL: Mastodon, https://fosstodon.org/@airflow
42
44
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
43
45
  Project-URL: Source Code, https://github.com/apache/airflow
44
46
  Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
45
47
  Provides-Extra: azure-identity
46
48
  Provides-Extra: fab
47
- Provides-Extra: openlineage
48
49
  Provides-Extra: sdk
49
50
  Provides-Extra: standard
50
51
 
@@ -73,8 +74,9 @@ Provides-Extra: standard
73
74
 
74
75
  Package ``apache-airflow-providers-databricks``
75
76
 
76
- Release: ``7.6.0``
77
+ Release: ``7.7.0``
77
78
 
79
+ Release Date: ``|PypiReleaseDate|``
78
80
 
79
81
  `Databricks <https://databricks.com/>`__
80
82
 
@@ -86,7 +88,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
86
88
  are in ``airflow.providers.databricks`` python package.
87
89
 
88
90
  You can find package information and changelog for the provider
89
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.6.0/>`_.
91
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.7.0/>`_.
90
92
 
91
93
  Installation
92
94
  ------------
@@ -95,26 +97,28 @@ You can install this package on top of an existing Airflow 2 installation (see `
95
97
  for the minimum Airflow version supported) via
96
98
  ``pip install apache-airflow-providers-databricks``
97
99
 
98
- The package supports the following python versions: 3.10,3.11,3.12
100
+ The package supports the following python versions: 3.10,3.11,3.12,3.13
99
101
 
100
102
  Requirements
101
103
  ------------
102
104
 
103
- ========================================== =====================================
105
+ ========================================== ======================================
104
106
  PIP package Version required
105
- ========================================== =====================================
107
+ ========================================== ======================================
106
108
  ``apache-airflow`` ``>=2.10.0``
107
109
  ``apache-airflow-providers-common-compat`` ``>=1.6.0``
108
110
  ``apache-airflow-providers-common-sql`` ``>=1.27.0``
111
+ ``apache-airflow-providers-openlineage`` ``>=2.3.0``
109
112
  ``requests`` ``>=2.32.0,<3``
110
- ``databricks-sql-connector`` ``>=3.0.0``
113
+ ``databricks-sql-connector`` ``>=4.0.0``
111
114
  ``databricks-sqlalchemy`` ``>=1.0.2``
112
115
  ``aiohttp`` ``>=3.9.2,<4``
113
116
  ``mergedeep`` ``>=1.3.4``
114
117
  ``pandas`` ``>=2.1.2; python_version < "3.13"``
115
118
  ``pandas`` ``>=2.2.3; python_version >= "3.13"``
116
- ``pyarrow`` ``>=16.1.0``
117
- ========================================== =====================================
119
+ ``pyarrow`` ``>=16.1.0; python_version < "3.13"``
120
+ ``pyarrow`` ``>=18.0.0; python_version >= "3.13"``
121
+ ========================================== ======================================
118
122
 
119
123
  Cross provider package dependencies
120
124
  -----------------------------------
@@ -139,5 +143,5 @@ Dependent package
139
143
  ================================================================================================================== =================
140
144
 
141
145
  The changelog for the provider package can be found in the
142
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.6.0/changelog.html>`_.
146
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.7.0/changelog.html>`_.
143
147
 
@@ -23,8 +23,9 @@
23
23
 
24
24
  Package ``apache-airflow-providers-databricks``
25
25
 
26
- Release: ``7.6.0``
26
+ Release: ``7.7.0``
27
27
 
28
+ Release Date: ``|PypiReleaseDate|``
28
29
 
29
30
  `Databricks <https://databricks.com/>`__
30
31
 
@@ -36,7 +37,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
36
37
  are in ``airflow.providers.databricks`` python package.
37
38
 
38
39
  You can find package information and changelog for the provider
39
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.6.0/>`_.
40
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.7.0/>`_.
40
41
 
41
42
  Installation
42
43
  ------------
@@ -45,26 +46,28 @@ You can install this package on top of an existing Airflow 2 installation (see `
45
46
  for the minimum Airflow version supported) via
46
47
  ``pip install apache-airflow-providers-databricks``
47
48
 
48
- The package supports the following python versions: 3.10,3.11,3.12
49
+ The package supports the following python versions: 3.10,3.11,3.12,3.13
49
50
 
50
51
  Requirements
51
52
  ------------
52
53
 
53
- ========================================== =====================================
54
+ ========================================== ======================================
54
55
  PIP package Version required
55
- ========================================== =====================================
56
+ ========================================== ======================================
56
57
  ``apache-airflow`` ``>=2.10.0``
57
58
  ``apache-airflow-providers-common-compat`` ``>=1.6.0``
58
59
  ``apache-airflow-providers-common-sql`` ``>=1.27.0``
60
+ ``apache-airflow-providers-openlineage`` ``>=2.3.0``
59
61
  ``requests`` ``>=2.32.0,<3``
60
- ``databricks-sql-connector`` ``>=3.0.0``
62
+ ``databricks-sql-connector`` ``>=4.0.0``
61
63
  ``databricks-sqlalchemy`` ``>=1.0.2``
62
64
  ``aiohttp`` ``>=3.9.2,<4``
63
65
  ``mergedeep`` ``>=1.3.4``
64
66
  ``pandas`` ``>=2.1.2; python_version < "3.13"``
65
67
  ``pandas`` ``>=2.2.3; python_version >= "3.13"``
66
- ``pyarrow`` ``>=16.1.0``
67
- ========================================== =====================================
68
+ ``pyarrow`` ``>=16.1.0; python_version < "3.13"``
69
+ ``pyarrow`` ``>=18.0.0; python_version >= "3.13"``
70
+ ========================================== ======================================
68
71
 
69
72
  Cross provider package dependencies
70
73
  -----------------------------------
@@ -89,4 +92,4 @@ Dependent package
89
92
  ================================================================================================================== =================
90
93
 
91
94
  The changelog for the provider package can be found in the
92
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.6.0/changelog.html>`_.
95
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.7.0/changelog.html>`_.
@@ -26,6 +26,40 @@
26
26
  Changelog
27
27
  ---------
28
28
 
29
+ 7.7.0
30
+ .....
31
+
32
+ Features
33
+ ~~~~~~~~
34
+
35
+ * ``feat: Refactor Databricks error handling with utility functions (#52704)``
36
+
37
+ Bug Fixes
38
+ ~~~~~~~~~
39
+
40
+ * ``fix URI Construction in Databricks Hook (#53217)``
41
+ * ``fix: Improve logging and timeouts in OL helpers (#53139)``
42
+ * ``fix: Task Group Deprecation error from plugin (#53813)``
43
+ * ``Remove 'api' prefix from update_job_permission in databricks hook (#53039)``
44
+
45
+ Misc
46
+ ~~~~
47
+
48
+ * ``Refactor: Consolidate API endpoints for improved security and maintainability (#53214)``
49
+ * ``Exclude deltalake 1.1.1 (#53729)``
50
+ * ``Add Python 3.13 support for Airflow. (#46891)``
51
+ * ``Cleanup mypy ignores in databricks provider where possible (#53265)``
52
+ * ``Remove type ignore across codebase after mypy upgrade (#53243)``
53
+ * ``Remove upper-binding for "python-requires" (#52980)``
54
+ * ``Temporarily switch to use >=,< pattern instead of '~=' (#52967)``
55
+
56
+ .. Below changes are excluded from the changelog. Move them to
57
+ appropriate section above if needed. Do not delete the lines(!):
58
+ * ``Replace 'mock.patch("utcnow")' with time_machine. (#53642)``
59
+ * ``Added non-Azure DatabricksBaseHook tests (#53286)``
60
+ * ``Fix pandas FutureWarning (#53236)``
61
+ * ``Handle ruff PT028 changes (#53235)``
62
+
29
63
  7.6.0
30
64
  .....
31
65
 
@@ -78,7 +78,9 @@ apache-airflow-providers-databricks package
78
78
  `Databricks <https://databricks.com/>`__
79
79
 
80
80
 
81
- Release: 7.6.0
81
+ Release: 7.7.0
82
+
83
+ Release Date: ``|PypiReleaseDate|``
82
84
 
83
85
  Provider package
84
86
  ----------------
@@ -98,21 +100,23 @@ Requirements
98
100
 
99
101
  The minimum Apache Airflow version supported by this provider distribution is ``2.10.0``.
100
102
 
101
- ========================================== =====================================
103
+ ========================================== ======================================
102
104
  PIP package Version required
103
- ========================================== =====================================
105
+ ========================================== ======================================
104
106
  ``apache-airflow`` ``>=2.10.0``
105
107
  ``apache-airflow-providers-common-compat`` ``>=1.6.0``
106
108
  ``apache-airflow-providers-common-sql`` ``>=1.27.0``
109
+ ``apache-airflow-providers-openlineage`` ``>=2.3.0``
107
110
  ``requests`` ``>=2.32.0,<3``
108
- ``databricks-sql-connector`` ``>=3.0.0``
111
+ ``databricks-sql-connector`` ``>=4.0.0``
109
112
  ``databricks-sqlalchemy`` ``>=1.0.2``
110
113
  ``aiohttp`` ``>=3.9.2,<4``
111
114
  ``mergedeep`` ``>=1.3.4``
112
115
  ``pandas`` ``>=2.1.2; python_version < "3.13"``
113
116
  ``pandas`` ``>=2.2.3; python_version >= "3.13"``
114
- ``pyarrow`` ``>=16.1.0``
115
- ========================================== =====================================
117
+ ``pyarrow`` ``>=16.1.0; python_version < "3.13"``
118
+ ``pyarrow`` ``>=18.0.0; python_version >= "3.13"``
119
+ ========================================== ======================================
116
120
 
117
121
  Cross provider package dependencies
118
122
  -----------------------------------
@@ -142,5 +146,5 @@ Downloading official packages
142
146
  You can download officially released packages and verify their checksums and signatures from the
143
147
  `Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
144
148
 
145
- * `The apache-airflow-providers-databricks 7.6.0 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.6.0.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.6.0.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.6.0.tar.gz.sha512>`__)
146
- * `The apache-airflow-providers-databricks 7.6.0 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.6.0-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.6.0-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.6.0-py3-none-any.whl.sha512>`__)
149
+ * `The apache-airflow-providers-databricks 7.7.0 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.7.0.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.7.0.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.7.0.tar.gz.sha512>`__)
150
+ * `The apache-airflow-providers-databricks 7.7.0 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.7.0-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.7.0-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.7.0-py3-none-any.whl.sha512>`__)
@@ -22,12 +22,13 @@ description: |
22
22
  `Databricks <https://databricks.com/>`__
23
23
 
24
24
  state: ready
25
- source-date-epoch: 1751473030
25
+ source-date-epoch: 1753690258
26
26
  # Note that those versions are maintained by release manager - do not update them manually
27
27
  # with the exception of case where other provider in sources has >= new provider version.
28
28
  # In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
29
29
  # to be done in the same PR
30
30
  versions:
31
+ - 7.7.0
31
32
  - 7.6.0
32
33
  - 7.5.0
33
34
  - 7.4.0
@@ -25,7 +25,7 @@ build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-databricks"
28
- version = "7.6.0rc1"
28
+ version = "7.7.0rc1"
29
29
  description = "Provider package apache-airflow-providers-databricks for Apache Airflow"
30
30
  readme = "README.rst"
31
31
  authors = [
@@ -47,9 +47,10 @@ classifiers = [
47
47
  "Programming Language :: Python :: 3.10",
48
48
  "Programming Language :: Python :: 3.11",
49
49
  "Programming Language :: Python :: 3.12",
50
+ "Programming Language :: Python :: 3.13",
50
51
  "Topic :: System :: Monitoring",
51
52
  ]
52
- requires-python = "~=3.10"
53
+ requires-python = ">=3.10"
53
54
 
54
55
  # The dependencies should be modified in place in the generated file.
55
56
  # Any change in the dependencies is preserved when the file is regenerated
@@ -59,14 +60,16 @@ dependencies = [
59
60
  "apache-airflow>=2.10.0rc1",
60
61
  "apache-airflow-providers-common-compat>=1.6.0rc1",
61
62
  "apache-airflow-providers-common-sql>=1.27.0rc1",
63
+ "apache-airflow-providers-openlineage>=2.3.0rc1",
62
64
  "requests>=2.32.0,<3",
63
- "databricks-sql-connector>=3.0.0",
65
+ "databricks-sql-connector>=4.0.0",
64
66
  "databricks-sqlalchemy>=1.0.2",
65
67
  "aiohttp>=3.9.2, <4",
66
68
  "mergedeep>=1.3.4",
67
69
  'pandas>=2.1.2; python_version <"3.13"',
68
70
  'pandas>=2.2.3; python_version >="3.13"',
69
- "pyarrow>=16.1.0",
71
+ "pyarrow>=16.1.0; python_version < '3.13'",
72
+ "pyarrow>=18.0.0; python_version >= '3.13'",
70
73
  ]
71
74
 
72
75
  # The optional dependencies should be modified in place in the generated file
@@ -80,14 +83,11 @@ dependencies = [
80
83
  "azure-identity>=1.3.1",
81
84
  ]
82
85
  "fab" = [
83
- "apache-airflow-providers-fab"
86
+ "apache-airflow-providers-fab>=2.2.0rc1; python_version < '3.13'"
84
87
  ]
85
88
  "standard" = [
86
89
  "apache-airflow-providers-standard"
87
90
  ]
88
- "openlineage" = [
89
- "apache-airflow-providers-openlineage>=2.3.0rc1"
90
- ]
91
91
 
92
92
  [dependency-groups]
93
93
  dev = [
@@ -99,7 +99,9 @@ dev = [
99
99
  "apache-airflow-providers-fab",
100
100
  "apache-airflow-providers-openlineage",
101
101
  # Additional devel dependencies (do not remove this line and add extra development dependencies)
102
- "deltalake>=0.12.0",
102
+ # https://github.com/delta-io/delta-rs/issues/3621
103
+ "deltalake>=0.12.0,!=1.1.1",
104
+ "apache-airflow-providers-fab>=2.2.0; python_version < '3.13'",
103
105
  "apache-airflow-providers-microsoft-azure",
104
106
  "apache-airflow-providers-common-sql[pandas,polars]",
105
107
  ]
@@ -130,8 +132,8 @@ apache-airflow-providers-common-sql = {workspace = true}
130
132
  apache-airflow-providers-standard = {workspace = true}
131
133
 
132
134
  [project.urls]
133
- "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.6.0"
134
- "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.6.0/changelog.html"
135
+ "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.7.0"
136
+ "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.7.0/changelog.html"
135
137
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
136
138
  "Source Code" = "https://github.com/apache/airflow"
137
139
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -14,4 +14,4 @@
14
14
  # KIND, either express or implied. See the License for the
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
17
+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)
@@ -14,4 +14,4 @@
14
14
  # KIND, either express or implied. See the License for the
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
17
+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "7.6.0"
32
+ __version__ = "7.7.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -62,6 +62,7 @@ CREATE_REPO_ENDPOINT = ("POST", "2.0/repos")
62
62
 
63
63
  LIST_JOBS_ENDPOINT = ("GET", "2.1/jobs/list")
64
64
  LIST_PIPELINES_ENDPOINT = ("GET", "2.0/pipelines")
65
+ LIST_SQL_ENDPOINTS_ENDPOINT = ("GET", "2.0/sql/endpoints")
65
66
 
66
67
  WORKSPACE_GET_STATUS_ENDPOINT = ("GET", "2.0/workspace/get-status")
67
68
 
@@ -770,7 +771,7 @@ class DatabricksHook(BaseDatabricksHook):
770
771
  :param json: payload
771
772
  :return: json containing permission specification
772
773
  """
773
- return self._do_api_call(("PATCH", f"api/2.0/permissions/jobs/{job_id}"), json)
774
+ return self._do_api_call(("PATCH", f"2.0/permissions/jobs/{job_id}"), json)
774
775
 
775
776
  def post_sql_statement(self, json: dict[str, Any]) -> str:
776
777
  """
@@ -30,13 +30,15 @@ from typing import (
30
30
  overload,
31
31
  )
32
32
 
33
- from databricks import sql # type: ignore[attr-defined]
33
+ from databricks import sql
34
34
  from databricks.sql.types import Row
35
+ from sqlalchemy.engine import URL
35
36
 
36
37
  from airflow.exceptions import AirflowException
37
38
  from airflow.providers.common.sql.hooks.handlers import return_single_query_results
38
39
  from airflow.providers.common.sql.hooks.sql import DbApiHook
39
40
  from airflow.providers.databricks.exceptions import DatabricksSqlExecutionError, DatabricksSqlExecutionTimeout
41
+ from airflow.providers.databricks.hooks.databricks import LIST_SQL_ENDPOINTS_ENDPOINT
40
42
  from airflow.providers.databricks.hooks.databricks_base import BaseDatabricksHook
41
43
 
42
44
  if TYPE_CHECKING:
@@ -47,9 +49,6 @@ if TYPE_CHECKING:
47
49
  from airflow.providers.openlineage.sqlparser import DatabaseInfo
48
50
 
49
51
 
50
- LIST_SQL_ENDPOINTS_ENDPOINT = ("GET", "api/2.0/sql/endpoints")
51
-
52
-
53
52
  T = TypeVar("T")
54
53
 
55
54
 
@@ -173,7 +172,38 @@ class DatabricksSqlHook(BaseDatabricksHook, DbApiHook):
173
172
  raise AirflowException("SQL connection is not initialized")
174
173
  return cast("AirflowConnection", self._sql_conn)
175
174
 
176
- @overload # type: ignore[override]
175
+ @property
176
+ def sqlalchemy_url(self) -> URL:
177
+ """
178
+ Return a Sqlalchemy.engine.URL object from the connection.
179
+
180
+ :return: the extracted sqlalchemy.engine.URL object.
181
+ """
182
+ conn = self.get_conn()
183
+ url_query = {
184
+ "http_path": self._http_path,
185
+ "catalog": self.catalog,
186
+ "schema": self.schema,
187
+ }
188
+ url_query = {k: v for k, v in url_query.items() if v is not None}
189
+ return URL.create(
190
+ drivername="databricks",
191
+ username="token",
192
+ password=conn.password,
193
+ host=conn.host,
194
+ port=conn.port,
195
+ query=url_query,
196
+ )
197
+
198
+ def get_uri(self) -> str:
199
+ """
200
+ Extract the URI from the connection.
201
+
202
+ :return: the extracted uri.
203
+ """
204
+ return self.sqlalchemy_url.render_as_string(hide_password=False)
205
+
206
+ @overload
177
207
  def run(
178
208
  self,
179
209
  sql: str | Iterable[str],
@@ -258,7 +288,7 @@ class DatabricksSqlHook(BaseDatabricksHook, DbApiHook):
258
288
 
259
289
  # TODO: adjust this to make testing easier
260
290
  try:
261
- self._run_command(cur, sql_statement, parameters) # type: ignore[attr-defined]
291
+ self._run_command(cur, sql_statement, parameters)
262
292
  except Exception as e:
263
293
  if t is None or t.is_alive():
264
294
  raise DatabricksSqlExecutionError(
@@ -46,21 +46,32 @@ from airflow.providers.databricks.plugins.databricks_workflow import (
46
46
  from airflow.providers.databricks.triggers.databricks import (
47
47
  DatabricksExecutionTrigger,
48
48
  )
49
- from airflow.providers.databricks.utils.databricks import normalise_json_content, validate_trigger_event
49
+ from airflow.providers.databricks.utils.databricks import (
50
+ extract_failed_task_errors,
51
+ normalise_json_content,
52
+ validate_trigger_event,
53
+ )
50
54
  from airflow.providers.databricks.utils.mixins import DatabricksSQLStatementsMixin
51
55
  from airflow.providers.databricks.version_compat import AIRFLOW_V_3_0_PLUS, BaseOperator
52
56
 
53
57
  if TYPE_CHECKING:
54
58
  from airflow.models.taskinstancekey import TaskInstanceKey
59
+ from airflow.providers.databricks.operators.databricks_workflow import (
60
+ DatabricksWorkflowTaskGroup,
61
+ )
55
62
  from airflow.providers.openlineage.extractors import OperatorLineage
56
63
  from airflow.utils.context import Context
57
- from airflow.utils.task_group import TaskGroup
64
+
65
+ try:
66
+ from airflow.sdk import TaskGroup
67
+ except ImportError:
68
+ from airflow.utils.task_group import TaskGroup # type: ignore[no-redef]
58
69
 
59
70
  if AIRFLOW_V_3_0_PLUS:
60
71
  from airflow.sdk import BaseOperatorLink
61
72
  from airflow.sdk.execution_time.xcom import XCom
62
73
  else:
63
- from airflow.models import XCom # type: ignore[no-redef]
74
+ from airflow.models import XCom
64
75
  from airflow.models.baseoperatorlink import BaseOperatorLink # type: ignore[no-redef]
65
76
 
66
77
  DEFER_METHOD_NAME = "execute_complete"
@@ -95,17 +106,7 @@ def _handle_databricks_operator_execution(operator, hook, log, context) -> None:
95
106
  log.info("View run status, Spark UI, and logs at %s", run_page_url)
96
107
  return
97
108
  if run_state.result_state == "FAILED":
98
- failed_tasks = []
99
- for task in run_info.get("tasks", []):
100
- if task.get("state", {}).get("result_state", "") == "FAILED":
101
- task_run_id = task["run_id"]
102
- task_key = task["task_key"]
103
- run_output = hook.get_run_output(task_run_id)
104
- if "error" in run_output:
105
- error = run_output["error"]
106
- else:
107
- error = run_state.state_message
108
- failed_tasks.append({"task_key": task_key, "run_id": task_run_id, "error": error})
109
+ failed_tasks = extract_failed_task_errors(hook, run_info, run_state)
109
110
 
110
111
  error_message = (
111
112
  f"{operator.task_id} failed with terminal state: {run_state} "
@@ -1324,15 +1325,15 @@ class DatabricksTaskBaseOperator(BaseOperator, ABC):
1324
1325
 
1325
1326
  return self.databricks_run_id
1326
1327
 
1327
- def _handle_terminal_run_state(self, run_state: RunState) -> None:
1328
+ def _handle_terminal_run_state(self, run_state: RunState, errors: list) -> None:
1328
1329
  """Handle the terminal state of the run."""
1329
1330
  if run_state.life_cycle_state != RunLifeCycleState.TERMINATED.value:
1330
1331
  raise AirflowException(
1331
- f"Databricks job failed with state {run_state.life_cycle_state}. Message: {run_state.state_message}"
1332
+ f"Databricks job failed with state {run_state.life_cycle_state}. Message: {run_state.state_message}. Errors: {errors}"
1332
1333
  )
1333
1334
  if not run_state.is_successful:
1334
1335
  raise AirflowException(
1335
- f"Task failed. Final state {run_state.result_state}. Reason: {run_state.state_message}"
1336
+ f"Task failed. Final state {run_state.result_state}. Reason: {run_state.state_message}. Errors: {errors}"
1336
1337
  )
1337
1338
  self.log.info("Task succeeded. Final state %s.", run_state.result_state)
1338
1339
 
@@ -1414,12 +1415,17 @@ class DatabricksTaskBaseOperator(BaseOperator, ABC):
1414
1415
  time.sleep(self.polling_period_seconds)
1415
1416
  run = self._hook.get_run(current_task_run_id)
1416
1417
  run_state = RunState(**run["state"])
1418
+
1417
1419
  self.log.info(
1418
1420
  "Current state of the databricks task %s is %s",
1419
1421
  self.databricks_task_key,
1420
1422
  run_state.life_cycle_state,
1421
1423
  )
1422
- self._handle_terminal_run_state(run_state)
1424
+
1425
+ # Extract errors from the run response using utility function
1426
+ errors = extract_failed_task_errors(self._hook, run, run_state)
1427
+
1428
+ self._handle_terminal_run_state(run_state, errors)
1423
1429
 
1424
1430
  def execute(self, context: Context) -> None:
1425
1431
  """Execute the operator. Launch the job and monitor it if wait_for_termination is set to True."""
@@ -1428,9 +1434,7 @@ class DatabricksTaskBaseOperator(BaseOperator, ABC):
1428
1434
  if not self.workflow_run_metadata:
1429
1435
  launch_task_id = next(task for task in self.upstream_task_ids if task.endswith(".launch"))
1430
1436
  self.workflow_run_metadata = context["ti"].xcom_pull(task_ids=launch_task_id)
1431
- workflow_run_metadata = WorkflowRunMetadata( # type: ignore[arg-type]
1432
- **self.workflow_run_metadata
1433
- )
1437
+ workflow_run_metadata = WorkflowRunMetadata(**self.workflow_run_metadata)
1434
1438
  self.databricks_run_id = workflow_run_metadata.run_id
1435
1439
  self.databricks_conn_id = workflow_run_metadata.conn_id
1436
1440
 
@@ -1449,7 +1453,8 @@ class DatabricksTaskBaseOperator(BaseOperator, ABC):
1449
1453
 
1450
1454
  def execute_complete(self, context: dict | None, event: dict) -> None:
1451
1455
  run_state = RunState.from_json(event["run_state"])
1452
- self._handle_terminal_run_state(run_state)
1456
+ errors = event.get("errors", [])
1457
+ self._handle_terminal_run_state(run_state, errors)
1453
1458
 
1454
1459
 
1455
1460
  class DatabricksNotebookOperator(DatabricksTaskBaseOperator):
@@ -33,7 +33,11 @@ from airflow.providers.databricks.plugins.databricks_workflow import (
33
33
  store_databricks_job_run_link,
34
34
  )
35
35
  from airflow.providers.databricks.version_compat import AIRFLOW_V_3_0_PLUS, BaseOperator
36
- from airflow.utils.task_group import TaskGroup
36
+
37
+ try:
38
+ from airflow.sdk import TaskGroup
39
+ except ImportError:
40
+ from airflow.utils.task_group import TaskGroup # type: ignore[no-redef]
37
41
 
38
42
  if TYPE_CHECKING:
39
43
  from types import TracebackType
@@ -41,7 +41,6 @@ else:
41
41
  from airflow.www import auth # type: ignore
42
42
  from airflow.utils.log.logging_mixin import LoggingMixin
43
43
  from airflow.utils.state import TaskInstanceState
44
- from airflow.utils.task_group import TaskGroup
45
44
 
46
45
  if TYPE_CHECKING:
47
46
  from sqlalchemy.orm.session import Session
@@ -51,11 +50,12 @@ if TYPE_CHECKING:
51
50
  from airflow.utils.context import Context
52
51
 
53
52
  if AIRFLOW_V_3_0_PLUS:
54
- from airflow.sdk import BaseOperatorLink
53
+ from airflow.sdk import BaseOperatorLink, TaskGroup
55
54
  from airflow.sdk.execution_time.xcom import XCom
56
55
  else:
57
- from airflow.models import XCom # type: ignore[no-redef]
56
+ from airflow.models import XCom
58
57
  from airflow.models.baseoperatorlink import BaseOperatorLink # type: ignore[no-redef]
58
+ from airflow.utils.task_group import TaskGroup # type: ignore[no-redef]
59
59
 
60
60
 
61
61
  REPAIR_WAIT_ATTEMPTS = os.getenv("DATABRICKS_REPAIR_WAIT_ATTEMPTS", 20)