apache-airflow-providers-databricks 7.8.0rc1__tar.gz → 7.8.1rc1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (96) hide show
  1. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/PKG-INFO +8 -8
  2. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/README.rst +4 -4
  3. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/changelog.rst +12 -0
  4. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/index.rst +4 -4
  5. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/provider.yaml +2 -1
  6. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/pyproject.toml +4 -4
  7. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/providers/databricks/__init__.py +1 -1
  8. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/providers/databricks/exceptions.py +1 -1
  9. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/providers/databricks/hooks/databricks.py +1 -1
  10. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/providers/databricks/hooks/databricks_base.py +2 -1
  11. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/providers/databricks/hooks/databricks_sql.py +1 -1
  12. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/providers/databricks/operators/databricks.py +1 -2
  13. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/providers/databricks/operators/databricks_repos.py +1 -2
  14. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/providers/databricks/operators/databricks_sql.py +1 -2
  15. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/providers/databricks/operators/databricks_workflow.py +1 -2
  16. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/providers/databricks/plugins/databricks_workflow.py +2 -2
  17. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/providers/databricks/sensors/databricks.py +1 -2
  18. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/providers/databricks/sensors/databricks_partition.py +1 -2
  19. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/providers/databricks/sensors/databricks_sql.py +1 -2
  20. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/providers/databricks/utils/databricks.py +1 -1
  21. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/providers/databricks/utils/mixins.py +1 -1
  22. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/providers/databricks/utils/openlineage.py +25 -63
  23. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/databricks/hooks/test_databricks.py +1 -1
  24. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/databricks/hooks/test_databricks_base.py +1 -1
  25. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/databricks/hooks/test_databricks_sql.py +3 -2
  26. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/databricks/operators/test_databricks.py +1 -1
  27. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/databricks/operators/test_databricks_copy.py +1 -1
  28. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/databricks/operators/test_databricks_repos.py +1 -1
  29. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/databricks/operators/test_databricks_workflow.py +1 -1
  30. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/databricks/plugins/test_databricks_workflow.py +1 -1
  31. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/databricks/sensors/test_databricks.py +1 -1
  32. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/databricks/sensors/test_databricks_partition.py +1 -1
  33. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/databricks/sensors/test_databricks_sql.py +1 -1
  34. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/databricks/utils/test_databricks.py +1 -1
  35. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/databricks/utils/test_mixins.py +1 -1
  36. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/databricks/utils/test_openlineage.py +16 -44
  37. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/LICENSE +0 -0
  38. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/NOTICE +0 -0
  39. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/.latest-doc-only-change.txt +0 -0
  40. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/commits.rst +0 -0
  41. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/conf.py +0 -0
  42. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/connections/databricks.rst +0 -0
  43. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/img/databricks_workflow_task_group_airflow_graph_view.png +0 -0
  44. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/img/workflow_plugin_launch_task.png +0 -0
  45. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/img/workflow_plugin_single_task.png +0 -0
  46. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/img/workflow_run_databricks_graph_view.png +0 -0
  47. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/installing-providers-from-sources.rst +0 -0
  48. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/integration-logos/Databricks.png +0 -0
  49. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/operators/copy_into.rst +0 -0
  50. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/operators/index.rst +0 -0
  51. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/operators/jobs_create.rst +0 -0
  52. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/operators/notebook.rst +0 -0
  53. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/operators/repos_create.rst +0 -0
  54. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/operators/repos_delete.rst +0 -0
  55. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/operators/repos_update.rst +0 -0
  56. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/operators/run_now.rst +0 -0
  57. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/operators/sql.rst +0 -0
  58. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/operators/sql_statements.rst +0 -0
  59. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/operators/submit_run.rst +0 -0
  60. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/operators/task.rst +0 -0
  61. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/operators/workflow.rst +0 -0
  62. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/plugins/index.rst +0 -0
  63. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/plugins/workflow.rst +0 -0
  64. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/docs/security.rst +0 -0
  65. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/__init__.py +0 -0
  66. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/providers/__init__.py +0 -0
  67. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/providers/databricks/get_provider_info.py +0 -0
  68. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/providers/databricks/hooks/__init__.py +0 -0
  69. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/providers/databricks/operators/__init__.py +0 -0
  70. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/providers/databricks/plugins/__init__.py +0 -0
  71. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/providers/databricks/sensors/__init__.py +0 -0
  72. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/providers/databricks/triggers/__init__.py +0 -0
  73. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/providers/databricks/triggers/databricks.py +0 -0
  74. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/providers/databricks/utils/__init__.py +0 -0
  75. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/src/airflow/providers/databricks/version_compat.py +0 -0
  76. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/conftest.py +0 -0
  77. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/system/__init__.py +0 -0
  78. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/system/databricks/__init__.py +0 -0
  79. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/system/databricks/example_databricks.py +0 -0
  80. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/system/databricks/example_databricks_repos.py +0 -0
  81. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/system/databricks/example_databricks_sensors.py +0 -0
  82. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/system/databricks/example_databricks_sql.py +0 -0
  83. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/system/databricks/example_databricks_workflow.py +0 -0
  84. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/__init__.py +0 -0
  85. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/databricks/__init__.py +0 -0
  86. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/databricks/hooks/__init__.py +0 -0
  87. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/databricks/hooks/test_databricks_azure_workload_identity.py +0 -0
  88. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/databricks/hooks/test_databricks_azure_workload_identity_async.py +0 -0
  89. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/databricks/operators/__init__.py +0 -0
  90. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/databricks/operators/test_databricks_sql.py +0 -0
  91. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/databricks/plugins/__init__.py +0 -0
  92. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/databricks/sensors/__init__.py +0 -0
  93. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/databricks/test_exceptions.py +0 -0
  94. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/databricks/triggers/__init__.py +0 -0
  95. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/databricks/triggers/test_databricks.py +0 -0
  96. {apache_airflow_providers_databricks-7.8.0rc1 → apache_airflow_providers_databricks-7.8.1rc1}/tests/unit/databricks/utils/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-databricks
3
- Version: 7.8.0rc1
3
+ Version: 7.8.1rc1
4
4
  Summary: Provider package apache-airflow-providers-databricks for Apache Airflow
5
5
  Keywords: airflow-provider,databricks,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -23,7 +23,7 @@ Classifier: Topic :: System :: Monitoring
23
23
  License-File: LICENSE
24
24
  License-File: NOTICE
25
25
  Requires-Dist: apache-airflow>=2.11.0rc1
26
- Requires-Dist: apache-airflow-providers-common-compat>=1.8.0rc1
26
+ Requires-Dist: apache-airflow-providers-common-compat>=1.10.1rc1
27
27
  Requires-Dist: apache-airflow-providers-common-sql>=1.27.0rc1
28
28
  Requires-Dist: requests>=2.32.0,<3
29
29
  Requires-Dist: databricks-sql-connector>=4.0.0
@@ -40,8 +40,8 @@ Requires-Dist: apache-airflow-providers-openlineage>=2.3.0rc1 ; extra == "openli
40
40
  Requires-Dist: databricks-sdk==0.10.0 ; extra == "sdk"
41
41
  Requires-Dist: apache-airflow-providers-standard ; extra == "standard"
42
42
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
43
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.8.0/changelog.html
44
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.8.0
43
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.8.1/changelog.html
44
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.8.1
45
45
  Project-URL: Mastodon, https://fosstodon.org/@airflow
46
46
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
47
47
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -77,7 +77,7 @@ Provides-Extra: standard
77
77
 
78
78
  Package ``apache-airflow-providers-databricks``
79
79
 
80
- Release: ``7.8.0``
80
+ Release: ``7.8.1``
81
81
 
82
82
 
83
83
  `Databricks <https://databricks.com/>`__
@@ -90,7 +90,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
90
90
  are in ``airflow.providers.databricks`` python package.
91
91
 
92
92
  You can find package information and changelog for the provider
93
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.0/>`_.
93
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.1/>`_.
94
94
 
95
95
  Installation
96
96
  ------------
@@ -108,7 +108,7 @@ Requirements
108
108
  PIP package Version required
109
109
  ========================================== ======================================
110
110
  ``apache-airflow`` ``>=2.11.0``
111
- ``apache-airflow-providers-common-compat`` ``>=1.8.0``
111
+ ``apache-airflow-providers-common-compat`` ``>=1.10.1``
112
112
  ``apache-airflow-providers-common-sql`` ``>=1.27.0``
113
113
  ``requests`` ``>=2.32.0,<3``
114
114
  ``databricks-sql-connector`` ``>=4.0.0``
@@ -156,5 +156,5 @@ Extra Dependencies
156
156
  ================== ================================================================
157
157
 
158
158
  The changelog for the provider package can be found in the
159
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.0/changelog.html>`_.
159
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.1/changelog.html>`_.
160
160
 
@@ -23,7 +23,7 @@
23
23
 
24
24
  Package ``apache-airflow-providers-databricks``
25
25
 
26
- Release: ``7.8.0``
26
+ Release: ``7.8.1``
27
27
 
28
28
 
29
29
  `Databricks <https://databricks.com/>`__
@@ -36,7 +36,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
36
36
  are in ``airflow.providers.databricks`` python package.
37
37
 
38
38
  You can find package information and changelog for the provider
39
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.0/>`_.
39
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.1/>`_.
40
40
 
41
41
  Installation
42
42
  ------------
@@ -54,7 +54,7 @@ Requirements
54
54
  PIP package Version required
55
55
  ========================================== ======================================
56
56
  ``apache-airflow`` ``>=2.11.0``
57
- ``apache-airflow-providers-common-compat`` ``>=1.8.0``
57
+ ``apache-airflow-providers-common-compat`` ``>=1.10.1``
58
58
  ``apache-airflow-providers-common-sql`` ``>=1.27.0``
59
59
  ``requests`` ``>=2.32.0,<3``
60
60
  ``databricks-sql-connector`` ``>=4.0.0``
@@ -102,4 +102,4 @@ Extra Dependencies
102
102
  ================== ================================================================
103
103
 
104
104
  The changelog for the provider package can be found in the
105
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.0/changelog.html>`_.
105
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.1/changelog.html>`_.
@@ -26,6 +26,18 @@
26
26
  Changelog
27
27
  ---------
28
28
 
29
+ 7.8.1
30
+ .....
31
+
32
+ Misc
33
+ ~~~~
34
+
35
+ * ``chore: use OL macros instead of building OL ids from scratch (#59197)``
36
+ * ``Add backcompat for exceptions in providers (#58727)``
37
+
38
+ .. Below changes are excluded from the changelog. Move them to
39
+ appropriate section above if needed. Do not delete the lines(!):
40
+
29
41
  7.8.0
30
42
  .....
31
43
 
@@ -78,7 +78,7 @@ apache-airflow-providers-databricks package
78
78
  `Databricks <https://databricks.com/>`__
79
79
 
80
80
 
81
- Release: 7.8.0
81
+ Release: 7.8.1
82
82
 
83
83
  Provider package
84
84
  ----------------
@@ -102,7 +102,7 @@ The minimum Apache Airflow version supported by this provider distribution is ``
102
102
  PIP package Version required
103
103
  ========================================== ======================================
104
104
  ``apache-airflow`` ``>=2.11.0``
105
- ``apache-airflow-providers-common-compat`` ``>=1.8.0``
105
+ ``apache-airflow-providers-common-compat`` ``>=1.10.1``
106
106
  ``apache-airflow-providers-common-sql`` ``>=1.27.0``
107
107
  ``requests`` ``>=2.32.0,<3``
108
108
  ``databricks-sql-connector`` ``>=4.0.0``
@@ -142,5 +142,5 @@ Downloading official packages
142
142
  You can download officially released packages and verify their checksums and signatures from the
143
143
  `Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
144
144
 
145
- * `The apache-airflow-providers-databricks 7.8.0 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.0.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.0.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.0.tar.gz.sha512>`__)
146
- * `The apache-airflow-providers-databricks 7.8.0 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.0-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.0-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.0-py3-none-any.whl.sha512>`__)
145
+ * `The apache-airflow-providers-databricks 7.8.1 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.1.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.1.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.1.tar.gz.sha512>`__)
146
+ * `The apache-airflow-providers-databricks 7.8.1 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.1-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.1-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.1-py3-none-any.whl.sha512>`__)
@@ -22,12 +22,13 @@ description: |
22
22
  `Databricks <https://databricks.com/>`__
23
23
 
24
24
  state: ready
25
- source-date-epoch: 1764109970
25
+ source-date-epoch: 1765298937
26
26
  # Note that those versions are maintained by release manager - do not update them manually
27
27
  # with the exception of case where other provider in sources has >= new provider version.
28
28
  # In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
29
29
  # to be done in the same PR
30
30
  versions:
31
+ - 7.8.1
31
32
  - 7.8.0
32
33
  - 7.7.5
33
34
  - 7.7.4
@@ -25,7 +25,7 @@ build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-databricks"
28
- version = "7.8.0rc1"
28
+ version = "7.8.1rc1"
29
29
  description = "Provider package apache-airflow-providers-databricks for Apache Airflow"
30
30
  readme = "README.rst"
31
31
  license = "Apache-2.0"
@@ -59,7 +59,7 @@ requires-python = ">=3.10"
59
59
  # After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
60
60
  dependencies = [
61
61
  "apache-airflow>=2.11.0rc1",
62
- "apache-airflow-providers-common-compat>=1.8.0rc1",
62
+ "apache-airflow-providers-common-compat>=1.10.1rc1",
63
63
  "apache-airflow-providers-common-sql>=1.27.0rc1",
64
64
  "requests>=2.32.0,<3",
65
65
  "databricks-sql-connector>=4.0.0",
@@ -134,8 +134,8 @@ apache-airflow-providers-common-sql = {workspace = true}
134
134
  apache-airflow-providers-standard = {workspace = true}
135
135
 
136
136
  [project.urls]
137
- "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.8.0"
138
- "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.8.0/changelog.html"
137
+ "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.8.1"
138
+ "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.8.1/changelog.html"
139
139
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
140
140
  "Source Code" = "https://github.com/apache/airflow"
141
141
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "7.8.0"
32
+ __version__ = "7.8.1"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.11.0"
@@ -21,7 +21,7 @@
21
21
 
22
22
  from __future__ import annotations
23
23
 
24
- from airflow.exceptions import AirflowException
24
+ from airflow.providers.common.compat.sdk import AirflowException
25
25
 
26
26
 
27
27
  class DatabricksSqlExecutionError(AirflowException):
@@ -34,7 +34,7 @@ from typing import Any
34
34
 
35
35
  from requests import exceptions as requests_exceptions
36
36
 
37
- from airflow.exceptions import AirflowException
37
+ from airflow.providers.common.compat.sdk import AirflowException
38
38
  from airflow.providers.databricks.hooks.databricks_base import BaseDatabricksHook
39
39
 
40
40
  GET_CLUSTER_ENDPOINT = ("GET", "2.0/clusters/get")
@@ -49,7 +49,8 @@ from tenacity import (
49
49
  )
50
50
 
51
51
  from airflow import __version__
52
- from airflow.exceptions import AirflowException, AirflowOptionalProviderFeatureException
52
+ from airflow.exceptions import AirflowOptionalProviderFeatureException
53
+ from airflow.providers.common.compat.sdk import AirflowException
53
54
  from airflow.providers_manager import ProvidersManager
54
55
 
55
56
  try:
@@ -34,7 +34,7 @@ from databricks import sql
34
34
  from databricks.sql.types import Row
35
35
  from sqlalchemy.engine import URL
36
36
 
37
- from airflow.exceptions import AirflowException
37
+ from airflow.providers.common.compat.sdk import AirflowException
38
38
  from airflow.providers.common.sql.hooks.handlers import return_single_query_results
39
39
  from airflow.providers.common.sql.hooks.sql import DbApiHook
40
40
  from airflow.providers.databricks.exceptions import DatabricksSqlExecutionError, DatabricksSqlExecutionTimeout
@@ -27,8 +27,7 @@ from functools import cached_property
27
27
  from typing import TYPE_CHECKING, Any
28
28
 
29
29
  from airflow.configuration import conf
30
- from airflow.exceptions import AirflowException
31
- from airflow.providers.common.compat.sdk import BaseOperator, BaseOperatorLink, XCom
30
+ from airflow.providers.common.compat.sdk import AirflowException, BaseOperator, BaseOperatorLink, XCom
32
31
  from airflow.providers.databricks.hooks.databricks import (
33
32
  DatabricksHook,
34
33
  RunLifeCycleState,
@@ -25,8 +25,7 @@ from functools import cached_property
25
25
  from typing import TYPE_CHECKING
26
26
  from urllib.parse import urlsplit
27
27
 
28
- from airflow.exceptions import AirflowException
29
- from airflow.providers.common.compat.sdk import BaseOperator
28
+ from airflow.providers.common.compat.sdk import AirflowException, BaseOperator
30
29
  from airflow.providers.databricks.hooks.databricks import DatabricksHook
31
30
 
32
31
  if TYPE_CHECKING:
@@ -27,8 +27,7 @@ from typing import TYPE_CHECKING, Any, ClassVar
27
27
 
28
28
  from databricks.sql.utils import ParamEscaper
29
29
 
30
- from airflow.exceptions import AirflowException
31
- from airflow.providers.common.compat.sdk import BaseOperator
30
+ from airflow.providers.common.compat.sdk import AirflowException, BaseOperator
32
31
  from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
33
32
  from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook
34
33
 
@@ -25,8 +25,7 @@ from typing import TYPE_CHECKING, Any
25
25
 
26
26
  from mergedeep import merge
27
27
 
28
- from airflow.exceptions import AirflowException
29
- from airflow.providers.common.compat.sdk import BaseOperator, TaskGroup
28
+ from airflow.providers.common.compat.sdk import AirflowException, BaseOperator, TaskGroup
30
29
  from airflow.providers.databricks.hooks.databricks import DatabricksHook, RunLifeCycleState
31
30
  from airflow.providers.databricks.plugins.databricks_workflow import (
32
31
  WorkflowJobRepairAllFailedLink,
@@ -20,11 +20,11 @@ from __future__ import annotations
20
20
  from typing import TYPE_CHECKING, Any
21
21
  from urllib.parse import unquote
22
22
 
23
- from airflow.exceptions import AirflowException, TaskInstanceNotFound
23
+ from airflow.exceptions import TaskInstanceNotFound
24
24
  from airflow.models.dagrun import DagRun
25
25
  from airflow.models.taskinstance import TaskInstance, TaskInstanceKey, clear_task_instances
26
26
  from airflow.plugins_manager import AirflowPlugin
27
- from airflow.providers.common.compat.sdk import BaseOperatorLink, TaskGroup, XCom
27
+ from airflow.providers.common.compat.sdk import AirflowException, BaseOperatorLink, TaskGroup, XCom
28
28
  from airflow.providers.databricks.hooks.databricks import DatabricksHook
29
29
  from airflow.providers.databricks.version_compat import AIRFLOW_V_3_0_PLUS
30
30
  from airflow.utils.log.logging_mixin import LoggingMixin
@@ -23,8 +23,7 @@ from functools import cached_property
23
23
  from typing import TYPE_CHECKING, Any
24
24
 
25
25
  from airflow.configuration import conf
26
- from airflow.exceptions import AirflowException
27
- from airflow.providers.common.compat.sdk import BaseSensorOperator
26
+ from airflow.providers.common.compat.sdk import AirflowException, BaseSensorOperator
28
27
  from airflow.providers.databricks.hooks.databricks import DatabricksHook, SQLStatementState
29
28
  from airflow.providers.databricks.operators.databricks import DEFER_METHOD_NAME
30
29
  from airflow.providers.databricks.utils.mixins import DatabricksSQLStatementsMixin
@@ -27,8 +27,7 @@ from typing import TYPE_CHECKING, Any
27
27
 
28
28
  from databricks.sql.utils import ParamEscaper
29
29
 
30
- from airflow.exceptions import AirflowException
31
- from airflow.providers.common.compat.sdk import BaseSensorOperator
30
+ from airflow.providers.common.compat.sdk import AirflowException, BaseSensorOperator
32
31
  from airflow.providers.common.sql.hooks.handlers import fetch_all_handler
33
32
  from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook
34
33
 
@@ -24,8 +24,7 @@ from collections.abc import Callable, Iterable, Sequence
24
24
  from functools import cached_property
25
25
  from typing import TYPE_CHECKING, Any
26
26
 
27
- from airflow.exceptions import AirflowException
28
- from airflow.providers.common.compat.sdk import BaseSensorOperator
27
+ from airflow.providers.common.compat.sdk import AirflowException, BaseSensorOperator
29
28
  from airflow.providers.common.sql.hooks.handlers import fetch_all_handler
30
29
  from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook
31
30
 
@@ -17,7 +17,7 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
- from airflow.exceptions import AirflowException
20
+ from airflow.providers.common.compat.sdk import AirflowException
21
21
  from airflow.providers.databricks.hooks.databricks import DatabricksHook, RunState
22
22
 
23
23
 
@@ -26,7 +26,7 @@ from typing import (
26
26
  Protocol,
27
27
  )
28
28
 
29
- from airflow.exceptions import AirflowException
29
+ from airflow.providers.common.compat.sdk import AirflowException
30
30
  from airflow.providers.databricks.hooks.databricks import DatabricksHook, SQLStatementState
31
31
  from airflow.providers.databricks.triggers.databricks import DatabricksSQLStatementExecutionTrigger
32
32
 
@@ -24,7 +24,6 @@ from typing import TYPE_CHECKING, Any
24
24
  import requests
25
25
 
26
26
  from airflow.providers.common.compat.openlineage.check import require_openlineage_version
27
- from airflow.providers.databricks.version_compat import AIRFLOW_V_3_0_PLUS
28
27
  from airflow.utils import timezone
29
28
 
30
29
  if TYPE_CHECKING:
@@ -37,60 +36,6 @@ if TYPE_CHECKING:
37
36
  log = logging.getLogger(__name__)
38
37
 
39
38
 
40
- def _get_logical_date(task_instance):
41
- # todo: remove when min airflow version >= 3.0
42
- if AIRFLOW_V_3_0_PLUS:
43
- dagrun = task_instance.get_template_context()["dag_run"]
44
- return dagrun.logical_date or dagrun.run_after
45
-
46
- if hasattr(task_instance, "logical_date"):
47
- date = task_instance.logical_date
48
- else:
49
- date = task_instance.execution_date
50
-
51
- return date
52
-
53
-
54
- def _get_dag_run_clear_number(task_instance):
55
- # todo: remove when min airflow version >= 3.0
56
- if AIRFLOW_V_3_0_PLUS:
57
- dagrun = task_instance.get_template_context()["dag_run"]
58
- return dagrun.clear_number
59
- return task_instance.dag_run.clear_number
60
-
61
-
62
- # todo: move this run_id logic into OpenLineage's listener to avoid differences
63
- def _get_ol_run_id(task_instance) -> str:
64
- """
65
- Get OpenLineage run_id from TaskInstance.
66
-
67
- It's crucial that the task_instance's run_id creation logic matches OpenLineage's listener implementation.
68
- Only then can we ensure that the generated run_id aligns with the Airflow task,
69
- enabling a proper connection between events.
70
- """
71
- from airflow.providers.openlineage.plugins.adapter import OpenLineageAdapter
72
-
73
- # Generate same OL run id as is generated for current task instance
74
- return OpenLineageAdapter.build_task_instance_run_id(
75
- dag_id=task_instance.dag_id,
76
- task_id=task_instance.task_id,
77
- logical_date=_get_logical_date(task_instance),
78
- try_number=task_instance.try_number,
79
- map_index=task_instance.map_index,
80
- )
81
-
82
-
83
- # todo: move this run_id logic into OpenLineage's listener to avoid differences
84
- def _get_ol_dag_run_id(task_instance) -> str:
85
- from airflow.providers.openlineage.plugins.adapter import OpenLineageAdapter
86
-
87
- return OpenLineageAdapter.build_dag_run_id(
88
- dag_id=task_instance.dag_id,
89
- logical_date=_get_logical_date(task_instance),
90
- clear_number=_get_dag_run_clear_number(task_instance),
91
- )
92
-
93
-
94
39
  def _get_parent_run_facet(task_instance):
95
40
  """
96
41
  Retrieve the ParentRunFacet associated with a specific Airflow task instance.
@@ -101,22 +46,39 @@ def _get_parent_run_facet(task_instance):
101
46
  """
102
47
  from openlineage.client.facet_v2 import parent_run
103
48
 
104
- from airflow.providers.openlineage.conf import namespace
49
+ from airflow.providers.openlineage.plugins.macros import (
50
+ lineage_job_name,
51
+ lineage_job_namespace,
52
+ lineage_root_job_name,
53
+ lineage_root_run_id,
54
+ lineage_run_id,
55
+ )
56
+
57
+ parent_run_id = lineage_run_id(task_instance)
58
+ parent_job_name = lineage_job_name(task_instance)
59
+ parent_job_namespace = lineage_job_namespace()
60
+
61
+ root_parent_run_id = lineage_root_run_id(task_instance)
62
+ rot_parent_job_name = lineage_root_job_name(task_instance)
63
+
64
+ try: # Added in OL provider 2.9.0, try to use it if possible
65
+ from airflow.providers.openlineage.plugins.macros import lineage_root_job_namespace
105
66
 
106
- parent_run_id = _get_ol_run_id(task_instance)
107
- root_parent_run_id = _get_ol_dag_run_id(task_instance)
67
+ root_parent_job_namespace = lineage_root_job_namespace(task_instance)
68
+ except ImportError:
69
+ root_parent_job_namespace = lineage_job_namespace()
108
70
 
109
71
  return parent_run.ParentRunFacet(
110
72
  run=parent_run.Run(runId=parent_run_id),
111
73
  job=parent_run.Job(
112
- namespace=namespace(),
113
- name=f"{task_instance.dag_id}.{task_instance.task_id}",
74
+ namespace=parent_job_namespace,
75
+ name=parent_job_name,
114
76
  ),
115
77
  root=parent_run.Root(
116
78
  run=parent_run.RootRun(runId=root_parent_run_id),
117
79
  job=parent_run.RootJob(
118
- name=task_instance.dag_id,
119
- namespace=namespace(),
80
+ name=rot_parent_job_name,
81
+ namespace=root_parent_job_namespace,
120
82
  ),
121
83
  ),
122
84
  )
@@ -209,7 +171,7 @@ def _create_ol_event_pair(
209
171
  return start, end
210
172
 
211
173
 
212
- @require_openlineage_version(provider_min_version="2.3.0")
174
+ @require_openlineage_version(provider_min_version="2.5.0")
213
175
  def emit_openlineage_events_for_databricks_queries(
214
176
  task_instance,
215
177
  hook: DatabricksSqlHook | DatabricksHook | None = None,
@@ -33,8 +33,8 @@ from azure.core.credentials import AccessToken
33
33
  from requests import exceptions as requests_exceptions
34
34
  from requests.auth import HTTPBasicAuth
35
35
 
36
- from airflow.exceptions import AirflowException
37
36
  from airflow.models import Connection
37
+ from airflow.providers.common.compat.sdk import AirflowException
38
38
  from airflow.providers.databricks.hooks.databricks import (
39
39
  GET_RUN_ENDPOINT,
40
40
  SUBMIT_RUN_ENDPOINT,
@@ -28,8 +28,8 @@ from requests import exceptions as requests_exceptions
28
28
  from requests.auth import HTTPBasicAuth
29
29
  from tenacity import AsyncRetrying, Future, RetryError, retry_if_exception, stop_after_attempt, wait_fixed
30
30
 
31
- from airflow.exceptions import AirflowException
32
31
  from airflow.models import Connection
32
+ from airflow.providers.common.compat.sdk import AirflowException
33
33
  from airflow.providers.databricks.hooks.databricks_base import (
34
34
  DEFAULT_AZURE_CREDENTIAL_SETTING_KEY,
35
35
  DEFAULT_DATABRICKS_SCOPE,
@@ -30,8 +30,9 @@ import polars as pl
30
30
  import pytest
31
31
  from databricks.sql.types import Row
32
32
 
33
- from airflow.exceptions import AirflowException, AirflowOptionalProviderFeatureException
33
+ from airflow.exceptions import AirflowOptionalProviderFeatureException
34
34
  from airflow.models import Connection
35
+ from airflow.providers.common.compat.sdk import AirflowException
35
36
  from airflow.providers.common.sql.hooks.handlers import fetch_all_handler
36
37
  from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook, create_timeout_thread
37
38
 
@@ -569,7 +570,7 @@ def test_get_openlineage_database_specific_lineage_with_old_openlineage_provider
569
570
  hook.get_openlineage_database_info = lambda x: mock.MagicMock(authority="auth", scheme="scheme")
570
571
 
571
572
  expected_err = (
572
- "OpenLineage provider version `1.99.0` is lower than required `2.3.0`, "
573
+ "OpenLineage provider version `1.99.0` is lower than required `2.5.0`, "
573
574
  "skipping function `emit_openlineage_events_for_databricks_queries` execution"
574
575
  )
575
576
  with pytest.raises(AirflowOptionalProviderFeatureException, match=expected_err):
@@ -28,13 +28,13 @@ import pytest
28
28
  # Do not run the tests when FAB / Flask is not installed
29
29
  pytest.importorskip("flask_session")
30
30
 
31
- from airflow.exceptions import AirflowException, TaskDeferred
32
31
  from airflow.models import DAG
33
32
  from airflow.providers.common.compat.openlineage.facet import (
34
33
  Dataset,
35
34
  ExternalQueryRunFacet,
36
35
  SQLJobFacet,
37
36
  )
37
+ from airflow.providers.common.compat.sdk import AirflowException, TaskDeferred
38
38
  from airflow.providers.databricks.hooks.databricks import RunState, SQLStatementState
39
39
  from airflow.providers.databricks.operators.databricks import (
40
40
  DatabricksCreateJobsOperator,
@@ -21,12 +21,12 @@ from unittest import mock
21
21
 
22
22
  import pytest
23
23
 
24
- from airflow.exceptions import AirflowException
25
24
  from airflow.providers.common.compat.openlineage.facet import (
26
25
  Dataset,
27
26
  ExternalQueryRunFacet,
28
27
  SQLJobFacet,
29
28
  )
29
+ from airflow.providers.common.compat.sdk import AirflowException
30
30
  from airflow.providers.databricks.operators.databricks_sql import DatabricksCopyIntoOperator
31
31
  from airflow.providers.openlineage.extractors import OperatorLineage
32
32
 
@@ -21,7 +21,7 @@ from unittest import mock
21
21
 
22
22
  import pytest
23
23
 
24
- from airflow.exceptions import AirflowException
24
+ from airflow.providers.common.compat.sdk import AirflowException
25
25
  from airflow.providers.databricks.operators.databricks_repos import (
26
26
  DatabricksReposCreateOperator,
27
27
  DatabricksReposDeleteOperator,
@@ -25,8 +25,8 @@ import pytest
25
25
  pytest.importorskip("flask_session")
26
26
 
27
27
  from airflow import DAG
28
- from airflow.exceptions import AirflowException
29
28
  from airflow.models.baseoperator import BaseOperator
29
+ from airflow.providers.common.compat.sdk import AirflowException
30
30
  from airflow.providers.databricks.hooks.databricks import RunLifeCycleState
31
31
  from airflow.providers.databricks.operators.databricks_workflow import (
32
32
  DatabricksWorkflowTaskGroup,
@@ -34,10 +34,10 @@ if AIRFLOW_V_3_0_PLUS:
34
34
 
35
35
  from flask import url_for
36
36
 
37
- from airflow.exceptions import AirflowException
38
37
  from airflow.models.dagrun import DagRun
39
38
  from airflow.models.taskinstance import TaskInstanceKey
40
39
  from airflow.plugins_manager import AirflowPlugin
40
+ from airflow.providers.common.compat.sdk import AirflowException
41
41
  from airflow.providers.databricks.plugins.databricks_workflow import (
42
42
  DatabricksWorkflowPlugin,
43
43
  RepairDatabricksTasks,
@@ -21,7 +21,7 @@ from unittest import mock
21
21
 
22
22
  import pytest
23
23
 
24
- from airflow.exceptions import AirflowException, TaskDeferred
24
+ from airflow.providers.common.compat.sdk import AirflowException, TaskDeferred
25
25
  from airflow.providers.databricks.hooks.databricks import SQLStatementState
26
26
  from airflow.providers.databricks.sensors.databricks import DatabricksSQLStatementsSensor
27
27
  from airflow.providers.databricks.triggers.databricks import DatabricksSQLStatementExecutionTrigger
@@ -23,8 +23,8 @@ from unittest.mock import patch
23
23
 
24
24
  import pytest
25
25
 
26
- from airflow.exceptions import AirflowException
27
26
  from airflow.models import DAG
27
+ from airflow.providers.common.compat.sdk import AirflowException
28
28
  from airflow.providers.common.sql.hooks.handlers import fetch_all_handler
29
29
  from airflow.providers.databricks.sensors.databricks_partition import DatabricksPartitionSensor
30
30
  from airflow.utils import timezone
@@ -23,8 +23,8 @@ from unittest.mock import patch
23
23
 
24
24
  import pytest
25
25
 
26
- from airflow.exceptions import AirflowException
27
26
  from airflow.models import DAG
27
+ from airflow.providers.common.compat.sdk import AirflowException
28
28
  from airflow.providers.databricks.sensors.databricks_sql import DatabricksSqlSensor
29
29
  from airflow.utils import timezone
30
30
 
@@ -22,7 +22,7 @@ from unittest.mock import MagicMock
22
22
 
23
23
  import pytest
24
24
 
25
- from airflow.exceptions import AirflowException
25
+ from airflow.providers.common.compat.sdk import AirflowException
26
26
  from airflow.providers.databricks.hooks.databricks import RunState
27
27
  from airflow.providers.databricks.utils.databricks import (
28
28
  extract_failed_task_errors,
@@ -21,7 +21,7 @@ from unittest.mock import MagicMock
21
21
 
22
22
  import pytest
23
23
 
24
- from airflow.exceptions import AirflowException
24
+ from airflow.providers.common.compat.sdk import AirflowException
25
25
  from airflow.providers.databricks.utils.mixins import DatabricksSQLStatementsMixin
26
26
 
27
27
 
@@ -34,7 +34,6 @@ from airflow.providers.databricks.hooks.databricks import DatabricksHook
34
34
  from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook
35
35
  from airflow.providers.databricks.utils.openlineage import (
36
36
  _create_ol_event_pair,
37
- _get_ol_run_id,
38
37
  _get_parent_run_facet,
39
38
  _get_queries_details_from_databricks,
40
39
  _process_data_from_api,
@@ -46,40 +45,9 @@ from airflow.utils import timezone
46
45
  from airflow.utils.state import TaskInstanceState
47
46
 
48
47
 
49
- def test_get_ol_run_id_ti_success():
50
- logical_date = timezone.datetime(2025, 1, 1)
51
- mock_ti = mock.MagicMock(
52
- dag_id="dag_id",
53
- task_id="task_id",
54
- map_index=1,
55
- try_number=1,
56
- logical_date=logical_date,
57
- state=TaskInstanceState.SUCCESS,
58
- )
59
- mock_ti.get_template_context.return_value = {"dag_run": mock.MagicMock(logical_date=logical_date)}
60
-
61
- result = _get_ol_run_id(mock_ti)
62
- assert result == "01941f29-7c00-7087-8906-40e512c257bd"
63
-
64
-
65
- def test_get_ol_run_id_ti_failed():
66
- logical_date = timezone.datetime(2025, 1, 1)
67
- mock_ti = mock.MagicMock(
68
- dag_id="dag_id",
69
- task_id="task_id",
70
- map_index=1,
71
- try_number=1,
72
- logical_date=logical_date,
73
- state=TaskInstanceState.FAILED,
74
- )
75
- mock_ti.get_template_context.return_value = {"dag_run": mock.MagicMock(logical_date=logical_date)}
76
-
77
- result = _get_ol_run_id(mock_ti)
78
- assert result == "01941f29-7c00-7087-8906-40e512c257bd"
79
-
80
-
81
48
  def test_get_parent_run_facet():
82
49
  logical_date = timezone.datetime(2025, 1, 1)
50
+ dr = mock.MagicMock(logical_date=logical_date, clear_number=0)
83
51
  mock_ti = mock.MagicMock(
84
52
  dag_id="dag_id",
85
53
  task_id="task_id",
@@ -87,14 +55,18 @@ def test_get_parent_run_facet():
87
55
  try_number=1,
88
56
  logical_date=logical_date,
89
57
  state=TaskInstanceState.SUCCESS,
58
+ dag_run=dr,
90
59
  )
91
- mock_ti.get_template_context.return_value = {"dag_run": mock.MagicMock(logical_date=logical_date)}
60
+ mock_ti.get_template_context.return_value = {"dag_run": dr}
92
61
 
93
62
  result = _get_parent_run_facet(mock_ti)
94
63
 
95
64
  assert result.run.runId == "01941f29-7c00-7087-8906-40e512c257bd"
96
65
  assert result.job.namespace == namespace()
97
66
  assert result.job.name == "dag_id.task_id"
67
+ assert result.root.run.runId == "01941f29-7c00-743e-b109-28b18d0a19c5"
68
+ assert result.root.job.namespace == namespace()
69
+ assert result.root.job.name == "dag_id"
98
70
 
99
71
 
100
72
  def test_run_api_call_success():
@@ -283,7 +255,7 @@ def test_create_ol_event_pair_success(mock_generate_uuid, is_successful):
283
255
  assert start_event.job == end_event.job
284
256
 
285
257
 
286
- @mock.patch("importlib.metadata.version", return_value="2.3.0")
258
+ @mock.patch("importlib.metadata.version", return_value="3.0.0")
287
259
  @mock.patch("openlineage.client.uuid.generate_new_uuid")
288
260
  def test_emit_openlineage_events_for_databricks_queries(mock_generate_uuid, mock_version, time_machine):
289
261
  fake_uuid = "01958e68-03a2-79e3-9ae9-26865cc40e2f"
@@ -520,7 +492,7 @@ def test_emit_openlineage_events_for_databricks_queries(mock_generate_uuid, mock
520
492
  assert fake_adapter.emit.call_args_list == expected_calls
521
493
 
522
494
 
523
- @mock.patch("importlib.metadata.version", return_value="2.3.0")
495
+ @mock.patch("importlib.metadata.version", return_value="3.0.0")
524
496
  @mock.patch("openlineage.client.uuid.generate_new_uuid")
525
497
  def test_emit_openlineage_events_for_databricks_queries_without_metadata(
526
498
  mock_generate_uuid, mock_version, time_machine
@@ -638,7 +610,7 @@ def test_emit_openlineage_events_for_databricks_queries_without_metadata(
638
610
  assert fake_adapter.emit.call_args_list == expected_calls
639
611
 
640
612
 
641
- @mock.patch("importlib.metadata.version", return_value="2.3.0")
613
+ @mock.patch("importlib.metadata.version", return_value="3.0.0")
642
614
  @mock.patch("openlineage.client.uuid.generate_new_uuid")
643
615
  def test_emit_openlineage_events_for_databricks_queries_without_explicit_query_ids(
644
616
  mock_generate_uuid, mock_version, time_machine
@@ -760,7 +732,7 @@ def test_emit_openlineage_events_for_databricks_queries_without_explicit_query_i
760
732
  @mock.patch(
761
733
  "airflow.providers.openlineage.sqlparser.SQLParser.create_namespace", return_value="databricks_ns"
762
734
  )
763
- @mock.patch("importlib.metadata.version", return_value="2.3.0")
735
+ @mock.patch("importlib.metadata.version", return_value="3.0.0")
764
736
  @mock.patch("openlineage.client.uuid.generate_new_uuid")
765
737
  def test_emit_openlineage_events_for_databricks_queries_without_explicit_query_ids_and_namespace(
766
738
  mock_generate_uuid, mock_version, mock_parser, time_machine
@@ -878,7 +850,7 @@ def test_emit_openlineage_events_for_databricks_queries_without_explicit_query_i
878
850
  assert fake_adapter.emit.call_args_list == expected_calls
879
851
 
880
852
 
881
- @mock.patch("importlib.metadata.version", return_value="2.3.0")
853
+ @mock.patch("importlib.metadata.version", return_value="3.0.0")
882
854
  @mock.patch("openlineage.client.uuid.generate_new_uuid")
883
855
  def test_emit_openlineage_events_for_databricks_queries_without_explicit_query_ids_and_namespace_raw_ns(
884
856
  mock_generate_uuid, mock_version, time_machine
@@ -997,7 +969,7 @@ def test_emit_openlineage_events_for_databricks_queries_without_explicit_query_i
997
969
  assert fake_adapter.emit.call_args_list == expected_calls
998
970
 
999
971
 
1000
- @mock.patch("importlib.metadata.version", return_value="2.3.0")
972
+ @mock.patch("importlib.metadata.version", return_value="3.0.0")
1001
973
  @mock.patch("openlineage.client.uuid.generate_new_uuid")
1002
974
  def test_emit_openlineage_events_for_databricks_queries_ith_query_ids_and_hook_query_ids(
1003
975
  mock_generate_uuid, mock_version, time_machine
@@ -1117,7 +1089,7 @@ def test_emit_openlineage_events_for_databricks_queries_ith_query_ids_and_hook_q
1117
1089
  assert fake_adapter.emit.call_args_list == expected_calls
1118
1090
 
1119
1091
 
1120
- @mock.patch("importlib.metadata.version", return_value="2.3.0")
1092
+ @mock.patch("importlib.metadata.version", return_value="3.0.0")
1121
1093
  def test_emit_openlineage_events_for_databricks_queries_missing_query_ids_and_hook(mock_version):
1122
1094
  query_ids = []
1123
1095
  original_query_ids = copy.deepcopy(query_ids)
@@ -1142,7 +1114,7 @@ def test_emit_openlineage_events_for_databricks_queries_missing_query_ids_and_ho
1142
1114
  fake_adapter.emit.assert_not_called() # No events should be emitted
1143
1115
 
1144
1116
 
1145
- @mock.patch("importlib.metadata.version", return_value="2.3.0")
1117
+ @mock.patch("importlib.metadata.version", return_value="3.0.0")
1146
1118
  def test_emit_openlineage_events_for_databricks_queries_missing_query_namespace_and_hook(mock_version):
1147
1119
  query_ids = ["1", "2"]
1148
1120
  original_query_ids = copy.deepcopy(query_ids)
@@ -1168,7 +1140,7 @@ def test_emit_openlineage_events_for_databricks_queries_missing_query_namespace_
1168
1140
  fake_adapter.emit.assert_not_called() # No events should be emitted
1169
1141
 
1170
1142
 
1171
- @mock.patch("importlib.metadata.version", return_value="2.3.0")
1143
+ @mock.patch("importlib.metadata.version", return_value="3.0.0")
1172
1144
  def test_emit_openlineage_events_for_databricks_queries_missing_hook_and_query_for_extra_metadata_true(
1173
1145
  mock_version,
1174
1146
  ):
@@ -1213,7 +1185,7 @@ def test_emit_openlineage_events_with_old_openlineage_provider(mock_version):
1213
1185
  return_value=fake_listener,
1214
1186
  ):
1215
1187
  expected_err = (
1216
- "OpenLineage provider version `1.99.0` is lower than required `2.3.0`, "
1188
+ "OpenLineage provider version `1.99.0` is lower than required `2.5.0`, "
1217
1189
  "skipping function `emit_openlineage_events_for_databricks_queries` execution"
1218
1190
  )
1219
1191