apache-airflow-providers-databricks 7.0.0rc2__tar.gz → 7.2.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of apache-airflow-providers-databricks might be problematic. Click here for more details.
- {apache_airflow_providers_databricks-7.0.0rc2 → apache_airflow_providers_databricks-7.2.0}/PKG-INFO +17 -36
- {apache_airflow_providers_databricks-7.0.0rc2 → apache_airflow_providers_databricks-7.2.0}/README.rst +8 -27
- {apache_airflow_providers_databricks-7.0.0rc2 → apache_airflow_providers_databricks-7.2.0}/pyproject.toml +35 -20
- {apache_airflow_providers_databricks-7.0.0rc2 → apache_airflow_providers_databricks-7.2.0/src}/airflow/providers/databricks/LICENSE +0 -52
- {apache_airflow_providers_databricks-7.0.0rc2 → apache_airflow_providers_databricks-7.2.0/src}/airflow/providers/databricks/__init__.py +1 -1
- {apache_airflow_providers_databricks-7.0.0rc2 → apache_airflow_providers_databricks-7.2.0/src}/airflow/providers/databricks/get_provider_info.py +19 -31
- {apache_airflow_providers_databricks-7.0.0rc2 → apache_airflow_providers_databricks-7.2.0/src}/airflow/providers/databricks/hooks/databricks_sql.py +2 -3
- {apache_airflow_providers_databricks-7.0.0rc2 → apache_airflow_providers_databricks-7.2.0/src}/airflow/providers/databricks/operators/databricks.py +26 -19
- {apache_airflow_providers_databricks-7.0.0rc2 → apache_airflow_providers_databricks-7.2.0/src}/airflow/providers/databricks/operators/databricks_repos.py +5 -1
- {apache_airflow_providers_databricks-7.0.0rc2 → apache_airflow_providers_databricks-7.2.0/src}/airflow/providers/databricks/operators/databricks_sql.py +1 -2
- {apache_airflow_providers_databricks-7.0.0rc2 → apache_airflow_providers_databricks-7.2.0/src}/airflow/providers/databricks/operators/databricks_workflow.py +1 -1
- {apache_airflow_providers_databricks-7.0.0rc2 → apache_airflow_providers_databricks-7.2.0/src}/airflow/providers/databricks/plugins/databricks_workflow.py +7 -15
- {apache_airflow_providers_databricks-7.0.0rc2 → apache_airflow_providers_databricks-7.2.0/src}/airflow/providers/databricks/sensors/databricks_partition.py +6 -3
- {apache_airflow_providers_databricks-7.0.0rc2 → apache_airflow_providers_databricks-7.2.0/src}/airflow/providers/databricks/sensors/databricks_sql.py +5 -1
- {apache_airflow_providers_databricks-7.0.0rc2 → apache_airflow_providers_databricks-7.2.0/src}/airflow/providers/databricks/exceptions.py +0 -0
- {apache_airflow_providers_databricks-7.0.0rc2 → apache_airflow_providers_databricks-7.2.0/src}/airflow/providers/databricks/hooks/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.0.0rc2 → apache_airflow_providers_databricks-7.2.0/src}/airflow/providers/databricks/hooks/databricks.py +0 -0
- {apache_airflow_providers_databricks-7.0.0rc2 → apache_airflow_providers_databricks-7.2.0/src}/airflow/providers/databricks/hooks/databricks_base.py +0 -0
- {apache_airflow_providers_databricks-7.0.0rc2 → apache_airflow_providers_databricks-7.2.0/src}/airflow/providers/databricks/operators/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.0.0rc2 → apache_airflow_providers_databricks-7.2.0/src}/airflow/providers/databricks/plugins/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.0.0rc2 → apache_airflow_providers_databricks-7.2.0/src}/airflow/providers/databricks/sensors/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.0.0rc2 → apache_airflow_providers_databricks-7.2.0/src}/airflow/providers/databricks/triggers/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.0.0rc2 → apache_airflow_providers_databricks-7.2.0/src}/airflow/providers/databricks/triggers/databricks.py +0 -0
- {apache_airflow_providers_databricks-7.0.0rc2 → apache_airflow_providers_databricks-7.2.0/src}/airflow/providers/databricks/utils/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.0.0rc2 → apache_airflow_providers_databricks-7.2.0/src}/airflow/providers/databricks/utils/databricks.py +0 -0
{apache_airflow_providers_databricks-7.0.0rc2 → apache_airflow_providers_databricks-7.2.0}/PKG-INFO
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: apache-airflow-providers-databricks
|
|
3
|
-
Version: 7.0
|
|
3
|
+
Version: 7.2.0
|
|
4
4
|
Summary: Provider package apache-airflow-providers-databricks for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,databricks,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -20,20 +20,19 @@ Classifier: Programming Language :: Python :: 3.10
|
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.11
|
|
21
21
|
Classifier: Programming Language :: Python :: 3.12
|
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
|
23
|
-
Requires-Dist:
|
|
24
|
-
Requires-Dist: apache-airflow-providers-common-sql>=1.20.
|
|
25
|
-
Requires-Dist:
|
|
23
|
+
Requires-Dist: apache-airflow>=2.9.0
|
|
24
|
+
Requires-Dist: apache-airflow-providers-common-sql>=1.20.0
|
|
25
|
+
Requires-Dist: requests>=2.27.0,<3
|
|
26
26
|
Requires-Dist: databricks-sql-connector>=3.0.0
|
|
27
|
+
Requires-Dist: aiohttp>=3.9.2, <4
|
|
27
28
|
Requires-Dist: mergedeep>=1.3.4
|
|
28
|
-
Requires-Dist: pandas>=1.
|
|
29
|
-
Requires-Dist: pandas>=2.1.2,<2.2;python_version>="3.9"
|
|
29
|
+
Requires-Dist: pandas>=2.1.2,<2.2
|
|
30
30
|
Requires-Dist: pyarrow>=14.0.1
|
|
31
|
-
Requires-Dist: requests>=2.27.0,<3
|
|
32
31
|
Requires-Dist: azure-identity>=1.3.1 ; extra == "azure-identity"
|
|
33
32
|
Requires-Dist: databricks-sdk==0.10.0 ; extra == "sdk"
|
|
34
33
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
35
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.
|
|
36
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.
|
|
34
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.0/changelog.html
|
|
35
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.0
|
|
37
36
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
38
37
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
39
38
|
Project-URL: Twitter, https://x.com/ApacheAirflow
|
|
@@ -42,23 +41,6 @@ Provides-Extra: azure-identity
|
|
|
42
41
|
Provides-Extra: sdk
|
|
43
42
|
|
|
44
43
|
|
|
45
|
-
.. Licensed to the Apache Software Foundation (ASF) under one
|
|
46
|
-
or more contributor license agreements. See the NOTICE file
|
|
47
|
-
distributed with this work for additional information
|
|
48
|
-
regarding copyright ownership. The ASF licenses this file
|
|
49
|
-
to you under the Apache License, Version 2.0 (the
|
|
50
|
-
"License"); you may not use this file except in compliance
|
|
51
|
-
with the License. You may obtain a copy of the License at
|
|
52
|
-
|
|
53
|
-
.. http://www.apache.org/licenses/LICENSE-2.0
|
|
54
|
-
|
|
55
|
-
.. Unless required by applicable law or agreed to in writing,
|
|
56
|
-
software distributed under the License is distributed on an
|
|
57
|
-
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
58
|
-
KIND, either express or implied. See the License for the
|
|
59
|
-
specific language governing permissions and limitations
|
|
60
|
-
under the License.
|
|
61
|
-
|
|
62
44
|
.. Licensed to the Apache Software Foundation (ASF) under one
|
|
63
45
|
or more contributor license agreements. See the NOTICE file
|
|
64
46
|
distributed with this work for additional information
|
|
@@ -76,8 +58,7 @@ Provides-Extra: sdk
|
|
|
76
58
|
specific language governing permissions and limitations
|
|
77
59
|
under the License.
|
|
78
60
|
|
|
79
|
-
.. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
|
|
80
|
-
OVERWRITTEN WHEN PREPARING PACKAGES.
|
|
61
|
+
.. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
|
|
81
62
|
|
|
82
63
|
.. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
|
|
83
64
|
`PROVIDER_README_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
|
|
@@ -85,7 +66,7 @@ Provides-Extra: sdk
|
|
|
85
66
|
|
|
86
67
|
Package ``apache-airflow-providers-databricks``
|
|
87
68
|
|
|
88
|
-
Release: ``7.
|
|
69
|
+
Release: ``7.2.0``
|
|
89
70
|
|
|
90
71
|
|
|
91
72
|
`Databricks <https://databricks.com/>`__
|
|
@@ -98,7 +79,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
|
|
|
98
79
|
are in ``airflow.providers.databricks`` python package.
|
|
99
80
|
|
|
100
81
|
You can find package information and changelog for the provider
|
|
101
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.
|
|
82
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.0/>`_.
|
|
102
83
|
|
|
103
84
|
Installation
|
|
104
85
|
------------
|
|
@@ -112,19 +93,18 @@ The package supports the following python versions: 3.9,3.10,3.11,3.12
|
|
|
112
93
|
Requirements
|
|
113
94
|
------------
|
|
114
95
|
|
|
115
|
-
=======================================
|
|
96
|
+
======================================= ==================
|
|
116
97
|
PIP package Version required
|
|
117
|
-
=======================================
|
|
98
|
+
======================================= ==================
|
|
118
99
|
``apache-airflow`` ``>=2.9.0``
|
|
119
100
|
``apache-airflow-providers-common-sql`` ``>=1.20.0``
|
|
120
101
|
``requests`` ``>=2.27.0,<3``
|
|
121
102
|
``databricks-sql-connector`` ``>=3.0.0``
|
|
122
103
|
``aiohttp`` ``>=3.9.2,<4``
|
|
123
104
|
``mergedeep`` ``>=1.3.4``
|
|
124
|
-
``pandas`` ``>=2.1.2,<2.2
|
|
125
|
-
``pandas`` ``>=1.5.3,<2.2; python_version < "3.9"``
|
|
105
|
+
``pandas`` ``>=2.1.2,<2.2``
|
|
126
106
|
``pyarrow`` ``>=14.0.1``
|
|
127
|
-
=======================================
|
|
107
|
+
======================================= ==================
|
|
128
108
|
|
|
129
109
|
Cross provider package dependencies
|
|
130
110
|
-----------------------------------
|
|
@@ -146,4 +126,5 @@ Dependent package
|
|
|
146
126
|
============================================================================================================ ==============
|
|
147
127
|
|
|
148
128
|
The changelog for the provider package can be found in the
|
|
149
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.
|
|
129
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.0/changelog.html>`_.
|
|
130
|
+
|
|
@@ -1,21 +1,4 @@
|
|
|
1
1
|
|
|
2
|
-
.. Licensed to the Apache Software Foundation (ASF) under one
|
|
3
|
-
or more contributor license agreements. See the NOTICE file
|
|
4
|
-
distributed with this work for additional information
|
|
5
|
-
regarding copyright ownership. The ASF licenses this file
|
|
6
|
-
to you under the Apache License, Version 2.0 (the
|
|
7
|
-
"License"); you may not use this file except in compliance
|
|
8
|
-
with the License. You may obtain a copy of the License at
|
|
9
|
-
|
|
10
|
-
.. http://www.apache.org/licenses/LICENSE-2.0
|
|
11
|
-
|
|
12
|
-
.. Unless required by applicable law or agreed to in writing,
|
|
13
|
-
software distributed under the License is distributed on an
|
|
14
|
-
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
15
|
-
KIND, either express or implied. See the License for the
|
|
16
|
-
specific language governing permissions and limitations
|
|
17
|
-
under the License.
|
|
18
|
-
|
|
19
2
|
.. Licensed to the Apache Software Foundation (ASF) under one
|
|
20
3
|
or more contributor license agreements. See the NOTICE file
|
|
21
4
|
distributed with this work for additional information
|
|
@@ -33,8 +16,7 @@
|
|
|
33
16
|
specific language governing permissions and limitations
|
|
34
17
|
under the License.
|
|
35
18
|
|
|
36
|
-
.. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
|
|
37
|
-
OVERWRITTEN WHEN PREPARING PACKAGES.
|
|
19
|
+
.. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
|
|
38
20
|
|
|
39
21
|
.. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
|
|
40
22
|
`PROVIDER_README_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
|
|
@@ -42,7 +24,7 @@
|
|
|
42
24
|
|
|
43
25
|
Package ``apache-airflow-providers-databricks``
|
|
44
26
|
|
|
45
|
-
Release: ``7.
|
|
27
|
+
Release: ``7.2.0``
|
|
46
28
|
|
|
47
29
|
|
|
48
30
|
`Databricks <https://databricks.com/>`__
|
|
@@ -55,7 +37,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
|
|
|
55
37
|
are in ``airflow.providers.databricks`` python package.
|
|
56
38
|
|
|
57
39
|
You can find package information and changelog for the provider
|
|
58
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.
|
|
40
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.0/>`_.
|
|
59
41
|
|
|
60
42
|
Installation
|
|
61
43
|
------------
|
|
@@ -69,19 +51,18 @@ The package supports the following python versions: 3.9,3.10,3.11,3.12
|
|
|
69
51
|
Requirements
|
|
70
52
|
------------
|
|
71
53
|
|
|
72
|
-
=======================================
|
|
54
|
+
======================================= ==================
|
|
73
55
|
PIP package Version required
|
|
74
|
-
=======================================
|
|
56
|
+
======================================= ==================
|
|
75
57
|
``apache-airflow`` ``>=2.9.0``
|
|
76
58
|
``apache-airflow-providers-common-sql`` ``>=1.20.0``
|
|
77
59
|
``requests`` ``>=2.27.0,<3``
|
|
78
60
|
``databricks-sql-connector`` ``>=3.0.0``
|
|
79
61
|
``aiohttp`` ``>=3.9.2,<4``
|
|
80
62
|
``mergedeep`` ``>=1.3.4``
|
|
81
|
-
``pandas`` ``>=2.1.2,<2.2
|
|
82
|
-
``pandas`` ``>=1.5.3,<2.2; python_version < "3.9"``
|
|
63
|
+
``pandas`` ``>=2.1.2,<2.2``
|
|
83
64
|
``pyarrow`` ``>=14.0.1``
|
|
84
|
-
=======================================
|
|
65
|
+
======================================= ==================
|
|
85
66
|
|
|
86
67
|
Cross provider package dependencies
|
|
87
68
|
-----------------------------------
|
|
@@ -103,4 +84,4 @@ Dependent package
|
|
|
103
84
|
============================================================================================================ ==============
|
|
104
85
|
|
|
105
86
|
The changelog for the provider package can be found in the
|
|
106
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.
|
|
87
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.0/changelog.html>`_.
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
|
|
2
1
|
# Licensed to the Apache Software Foundation (ASF) under one
|
|
3
2
|
# or more contributor license agreements. See the NOTICE file
|
|
4
3
|
# distributed with this work for additional information
|
|
@@ -16,10 +15,9 @@
|
|
|
16
15
|
# specific language governing permissions and limitations
|
|
17
16
|
# under the License.
|
|
18
17
|
|
|
19
|
-
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
|
|
20
|
-
# OVERWRITTEN WHEN PREPARING PACKAGES.
|
|
18
|
+
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
|
|
21
19
|
|
|
22
|
-
# IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
|
|
20
|
+
# IF YOU WANT TO MODIFY THIS FILE EXCEPT DEPENDENCIES, YOU SHOULD MODIFY THE TEMPLATE
|
|
23
21
|
# `pyproject_TEMPLATE.toml.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
|
|
24
22
|
[build-system]
|
|
25
23
|
requires = ["flit_core==3.10.1"]
|
|
@@ -27,7 +25,7 @@ build-backend = "flit_core.buildapi"
|
|
|
27
25
|
|
|
28
26
|
[project]
|
|
29
27
|
name = "apache-airflow-providers-databricks"
|
|
30
|
-
version = "7.
|
|
28
|
+
version = "7.2.0"
|
|
31
29
|
description = "Provider package apache-airflow-providers-databricks for Apache Airflow"
|
|
32
30
|
readme = "README.rst"
|
|
33
31
|
authors = [
|
|
@@ -53,21 +51,41 @@ classifiers = [
|
|
|
53
51
|
"Topic :: System :: Monitoring",
|
|
54
52
|
]
|
|
55
53
|
requires-python = "~=3.9"
|
|
54
|
+
|
|
55
|
+
# The dependencies should be modified in place in the generated file
|
|
56
|
+
# Any change in the dependencies is preserved when the file is regenerated
|
|
56
57
|
dependencies = [
|
|
57
|
-
"
|
|
58
|
-
"apache-airflow-providers-common-sql>=1.20.
|
|
59
|
-
"
|
|
58
|
+
"apache-airflow>=2.9.0",
|
|
59
|
+
"apache-airflow-providers-common-sql>=1.20.0",
|
|
60
|
+
"requests>=2.27.0,<3",
|
|
60
61
|
"databricks-sql-connector>=3.0.0",
|
|
62
|
+
"aiohttp>=3.9.2, <4",
|
|
61
63
|
"mergedeep>=1.3.4",
|
|
62
|
-
"pandas>=1.
|
|
63
|
-
"pandas>=2.1.2,<2.2;python_version>=\"3.9\"",
|
|
64
|
+
"pandas>=2.1.2,<2.2",
|
|
64
65
|
"pyarrow>=14.0.1",
|
|
65
|
-
|
|
66
|
+
]
|
|
67
|
+
|
|
68
|
+
# The optional dependencies should be modified in place in the generated file
|
|
69
|
+
# Any change in the dependencies is preserved when the file is regenerated
|
|
70
|
+
[project.optional-dependencies]
|
|
71
|
+
# pip install apache-airflow-providers-databricks[sdk]
|
|
72
|
+
"sdk" = [
|
|
73
|
+
"databricks-sdk==0.10.0",
|
|
74
|
+
]
|
|
75
|
+
"azure-identity" = [
|
|
76
|
+
"azure-identity>=1.3.1",
|
|
77
|
+
]
|
|
78
|
+
|
|
79
|
+
# The dependency groups should be modified in place in the generated file
|
|
80
|
+
# Any change in the dependencies is preserved when the file is regenerated
|
|
81
|
+
[dependency-groups]
|
|
82
|
+
dev = [
|
|
83
|
+
"deltalake>=0.12.0",
|
|
66
84
|
]
|
|
67
85
|
|
|
68
86
|
[project.urls]
|
|
69
|
-
"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.
|
|
70
|
-
"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.
|
|
87
|
+
"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.0"
|
|
88
|
+
"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.0/changelog.html"
|
|
71
89
|
"Bug Tracker" = "https://github.com/apache/airflow/issues"
|
|
72
90
|
"Source Code" = "https://github.com/apache/airflow"
|
|
73
91
|
"Slack Chat" = "https://s.apache.org/airflow-slack"
|
|
@@ -76,15 +94,12 @@ dependencies = [
|
|
|
76
94
|
|
|
77
95
|
[project.entry-points."apache_airflow_provider"]
|
|
78
96
|
provider_info = "airflow.providers.databricks.get_provider_info:get_provider_info"
|
|
97
|
+
|
|
79
98
|
[project.entry-points."airflow.plugins"]
|
|
80
99
|
databricks_workflow = "airflow.providers.databricks.plugins.databricks_workflow:DatabricksWorkflowPlugin"
|
|
81
|
-
[project.optional-dependencies]
|
|
82
|
-
"sdk" = [
|
|
83
|
-
"databricks-sdk==0.10.0",
|
|
84
|
-
]
|
|
85
|
-
"azure-identity" = [
|
|
86
|
-
"azure-identity>=1.3.1",
|
|
87
|
-
]
|
|
88
100
|
|
|
89
101
|
[tool.flit.module]
|
|
90
102
|
name = "airflow.providers.databricks"
|
|
103
|
+
|
|
104
|
+
[tool.pytest.ini_options]
|
|
105
|
+
ignore = "tests/system/"
|
|
@@ -199,55 +199,3 @@ distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
199
199
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
200
200
|
See the License for the specific language governing permissions and
|
|
201
201
|
limitations under the License.
|
|
202
|
-
|
|
203
|
-
============================================================================
|
|
204
|
-
APACHE AIRFLOW SUBCOMPONENTS:
|
|
205
|
-
|
|
206
|
-
The Apache Airflow project contains subcomponents with separate copyright
|
|
207
|
-
notices and license terms. Your use of the source code for the these
|
|
208
|
-
subcomponents is subject to the terms and conditions of the following
|
|
209
|
-
licenses.
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
========================================================================
|
|
213
|
-
Third party Apache 2.0 licenses
|
|
214
|
-
========================================================================
|
|
215
|
-
|
|
216
|
-
The following components are provided under the Apache 2.0 License.
|
|
217
|
-
See project link for details. The text of each license is also included
|
|
218
|
-
at 3rd-party-licenses/LICENSE-[project].txt.
|
|
219
|
-
|
|
220
|
-
(ALv2 License) hue v4.3.0 (https://github.com/cloudera/hue/)
|
|
221
|
-
(ALv2 License) jqclock v2.3.0 (https://github.com/JohnRDOrazio/jQuery-Clock-Plugin)
|
|
222
|
-
(ALv2 License) bootstrap3-typeahead v4.0.2 (https://github.com/bassjobsen/Bootstrap-3-Typeahead)
|
|
223
|
-
(ALv2 License) connexion v2.7.0 (https://github.com/zalando/connexion)
|
|
224
|
-
|
|
225
|
-
========================================================================
|
|
226
|
-
MIT licenses
|
|
227
|
-
========================================================================
|
|
228
|
-
|
|
229
|
-
The following components are provided under the MIT License. See project link for details.
|
|
230
|
-
The text of each license is also included at 3rd-party-licenses/LICENSE-[project].txt.
|
|
231
|
-
|
|
232
|
-
(MIT License) jquery v3.5.1 (https://jquery.org/license/)
|
|
233
|
-
(MIT License) dagre-d3 v0.6.4 (https://github.com/cpettitt/dagre-d3)
|
|
234
|
-
(MIT License) bootstrap v3.4.1 (https://github.com/twbs/bootstrap/)
|
|
235
|
-
(MIT License) d3-tip v0.9.1 (https://github.com/Caged/d3-tip)
|
|
236
|
-
(MIT License) dataTables v1.10.25 (https://datatables.net)
|
|
237
|
-
(MIT License) normalize.css v3.0.2 (http://necolas.github.io/normalize.css/)
|
|
238
|
-
(MIT License) ElasticMock v1.3.2 (https://github.com/vrcmarcos/elasticmock)
|
|
239
|
-
(MIT License) MomentJS v2.24.0 (http://momentjs.com/)
|
|
240
|
-
(MIT License) eonasdan-bootstrap-datetimepicker v4.17.49 (https://github.com/eonasdan/bootstrap-datetimepicker/)
|
|
241
|
-
|
|
242
|
-
========================================================================
|
|
243
|
-
BSD 3-Clause licenses
|
|
244
|
-
========================================================================
|
|
245
|
-
The following components are provided under the BSD 3-Clause license. See project links for details.
|
|
246
|
-
The text of each license is also included at 3rd-party-licenses/LICENSE-[project].txt.
|
|
247
|
-
|
|
248
|
-
(BSD 3 License) d3 v5.16.0 (https://d3js.org)
|
|
249
|
-
(BSD 3 License) d3-shape v2.1.0 (https://github.com/d3/d3-shape)
|
|
250
|
-
(BSD 3 License) cgroupspy 0.2.1 (https://github.com/cloudsigma/cgroupspy)
|
|
251
|
-
|
|
252
|
-
========================================================================
|
|
253
|
-
See 3rd-party-licenses/LICENSES-ui.txt for packages used in `/airflow/www`
|
|
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "7.
|
|
32
|
+
__version__ = "7.2.0"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
35
|
"2.9.0"
|
|
@@ -15,8 +15,7 @@
|
|
|
15
15
|
# specific language governing permissions and limitations
|
|
16
16
|
# under the License.
|
|
17
17
|
|
|
18
|
-
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
|
|
19
|
-
# OVERWRITTEN WHEN PREPARING PACKAGES.
|
|
18
|
+
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
|
|
20
19
|
#
|
|
21
20
|
# IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
|
|
22
21
|
# `get_provider_info_TEMPLATE.py.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
|
|
@@ -28,8 +27,9 @@ def get_provider_info():
|
|
|
28
27
|
"name": "Databricks",
|
|
29
28
|
"description": "`Databricks <https://databricks.com/>`__\n",
|
|
30
29
|
"state": "ready",
|
|
31
|
-
"source-date-epoch":
|
|
30
|
+
"source-date-epoch": 1739959376,
|
|
32
31
|
"versions": [
|
|
32
|
+
"7.2.0",
|
|
33
33
|
"7.0.0",
|
|
34
34
|
"6.13.0",
|
|
35
35
|
"6.12.0",
|
|
@@ -77,30 +77,6 @@ def get_provider_info():
|
|
|
77
77
|
"1.0.1",
|
|
78
78
|
"1.0.0",
|
|
79
79
|
],
|
|
80
|
-
"dependencies": [
|
|
81
|
-
"apache-airflow>=2.9.0",
|
|
82
|
-
"apache-airflow-providers-common-sql>=1.20.0",
|
|
83
|
-
"requests>=2.27.0,<3",
|
|
84
|
-
"databricks-sql-connector>=3.0.0",
|
|
85
|
-
"aiohttp>=3.9.2, <4",
|
|
86
|
-
"mergedeep>=1.3.4",
|
|
87
|
-
'pandas>=2.1.2,<2.2;python_version>="3.9"',
|
|
88
|
-
'pandas>=1.5.3,<2.2;python_version<"3.9"',
|
|
89
|
-
"pyarrow>=14.0.1",
|
|
90
|
-
],
|
|
91
|
-
"additional-extras": [
|
|
92
|
-
{
|
|
93
|
-
"name": "sdk",
|
|
94
|
-
"description": "Install Databricks SDK",
|
|
95
|
-
"dependencies": ["databricks-sdk==0.10.0"],
|
|
96
|
-
},
|
|
97
|
-
{
|
|
98
|
-
"name": "azure-identity",
|
|
99
|
-
"description": "Install Azure Identity client library",
|
|
100
|
-
"dependencies": ["azure-identity>=1.3.1"],
|
|
101
|
-
},
|
|
102
|
-
],
|
|
103
|
-
"devel-dependencies": ["deltalake>=0.12.0"],
|
|
104
80
|
"integrations": [
|
|
105
81
|
{
|
|
106
82
|
"integration-name": "Databricks",
|
|
@@ -112,7 +88,7 @@ def get_provider_info():
|
|
|
112
88
|
"/docs/apache-airflow-providers-databricks/operators/run_now.rst",
|
|
113
89
|
"/docs/apache-airflow-providers-databricks/operators/task.rst",
|
|
114
90
|
],
|
|
115
|
-
"logo": "/integration-logos/
|
|
91
|
+
"logo": "/docs/integration-logos/Databricks.png",
|
|
116
92
|
"tags": ["service"],
|
|
117
93
|
},
|
|
118
94
|
{
|
|
@@ -122,7 +98,6 @@ def get_provider_info():
|
|
|
122
98
|
"/docs/apache-airflow-providers-databricks/operators/sql.rst",
|
|
123
99
|
"/docs/apache-airflow-providers-databricks/operators/copy_into.rst",
|
|
124
100
|
],
|
|
125
|
-
"logo": "/integration-logos/databricks/Databricks.png",
|
|
126
101
|
"tags": ["service"],
|
|
127
102
|
},
|
|
128
103
|
{
|
|
@@ -133,14 +108,12 @@ def get_provider_info():
|
|
|
133
108
|
"/docs/apache-airflow-providers-databricks/operators/repos_update.rst",
|
|
134
109
|
"/docs/apache-airflow-providers-databricks/operators/repos_delete.rst",
|
|
135
110
|
],
|
|
136
|
-
"logo": "/integration-logos/databricks/Databricks.png",
|
|
137
111
|
"tags": ["service"],
|
|
138
112
|
},
|
|
139
113
|
{
|
|
140
114
|
"integration-name": "Databricks Workflow",
|
|
141
115
|
"external-doc-url": "https://docs.databricks.com/en/workflows/index.html",
|
|
142
116
|
"how-to-guide": ["/docs/apache-airflow-providers-databricks/operators/workflow.rst"],
|
|
143
|
-
"logo": "/integration-logos/databricks/Databricks.png",
|
|
144
117
|
"tags": ["service"],
|
|
145
118
|
},
|
|
146
119
|
],
|
|
@@ -203,4 +176,19 @@ def get_provider_info():
|
|
|
203
176
|
}
|
|
204
177
|
],
|
|
205
178
|
"extra-links": ["airflow.providers.databricks.operators.databricks.DatabricksJobRunLink"],
|
|
179
|
+
"dependencies": [
|
|
180
|
+
"apache-airflow>=2.9.0",
|
|
181
|
+
"apache-airflow-providers-common-sql>=1.20.0",
|
|
182
|
+
"requests>=2.27.0,<3",
|
|
183
|
+
"databricks-sql-connector>=3.0.0",
|
|
184
|
+
"aiohttp>=3.9.2, <4",
|
|
185
|
+
"mergedeep>=1.3.4",
|
|
186
|
+
"pandas>=2.1.2,<2.2",
|
|
187
|
+
"pyarrow>=14.0.1",
|
|
188
|
+
],
|
|
189
|
+
"optional-dependencies": {
|
|
190
|
+
"sdk": ["databricks-sdk==0.10.0"],
|
|
191
|
+
"azure-identity": ["azure-identity>=1.3.1"],
|
|
192
|
+
},
|
|
193
|
+
"devel-dependencies": ["deltalake>=0.12.0"],
|
|
206
194
|
}
|
|
@@ -31,14 +31,13 @@ from typing import (
|
|
|
31
31
|
overload,
|
|
32
32
|
)
|
|
33
33
|
|
|
34
|
-
from databricks import sql # type: ignore[attr-defined]
|
|
35
|
-
from databricks.sql.types import Row
|
|
36
|
-
|
|
37
34
|
from airflow.exceptions import AirflowException
|
|
38
35
|
from airflow.models.connection import Connection as AirflowConnection
|
|
39
36
|
from airflow.providers.common.sql.hooks.sql import DbApiHook, return_single_query_results
|
|
40
37
|
from airflow.providers.databricks.exceptions import DatabricksSqlExecutionError, DatabricksSqlExecutionTimeout
|
|
41
38
|
from airflow.providers.databricks.hooks.databricks_base import BaseDatabricksHook
|
|
39
|
+
from databricks import sql # type: ignore[attr-defined]
|
|
40
|
+
from databricks.sql.types import Row
|
|
42
41
|
|
|
43
42
|
if TYPE_CHECKING:
|
|
44
43
|
from databricks.sql.client import Connection
|
|
@@ -19,6 +19,7 @@
|
|
|
19
19
|
|
|
20
20
|
from __future__ import annotations
|
|
21
21
|
|
|
22
|
+
import hashlib
|
|
22
23
|
import time
|
|
23
24
|
from abc import ABC, abstractmethod
|
|
24
25
|
from collections.abc import Sequence
|
|
@@ -966,6 +967,8 @@ class DatabricksTaskBaseOperator(BaseOperator, ABC):
|
|
|
966
967
|
|
|
967
968
|
:param caller: The name of the caller operator to be used in the logs.
|
|
968
969
|
:param databricks_conn_id: The name of the Airflow connection to use.
|
|
970
|
+
:param databricks_task_key: An optional task_key used to refer to the task by Databricks API. By
|
|
971
|
+
default this will be set to the hash of ``dag_id + task_id``.
|
|
969
972
|
:param databricks_retry_args: An optional dictionary with arguments passed to ``tenacity.Retrying`` class.
|
|
970
973
|
:param databricks_retry_delay: Number of seconds to wait between retries.
|
|
971
974
|
:param databricks_retry_limit: Amount of times to retry if the Databricks backend is unreachable.
|
|
@@ -986,6 +989,7 @@ class DatabricksTaskBaseOperator(BaseOperator, ABC):
|
|
|
986
989
|
self,
|
|
987
990
|
caller: str = "DatabricksTaskBaseOperator",
|
|
988
991
|
databricks_conn_id: str = "databricks_default",
|
|
992
|
+
databricks_task_key: str = "",
|
|
989
993
|
databricks_retry_args: dict[Any, Any] | None = None,
|
|
990
994
|
databricks_retry_delay: int = 1,
|
|
991
995
|
databricks_retry_limit: int = 3,
|
|
@@ -1000,6 +1004,7 @@ class DatabricksTaskBaseOperator(BaseOperator, ABC):
|
|
|
1000
1004
|
):
|
|
1001
1005
|
self.caller = caller
|
|
1002
1006
|
self.databricks_conn_id = databricks_conn_id
|
|
1007
|
+
self._databricks_task_key = databricks_task_key
|
|
1003
1008
|
self.databricks_retry_args = databricks_retry_args
|
|
1004
1009
|
self.databricks_retry_delay = databricks_retry_delay
|
|
1005
1010
|
self.databricks_retry_limit = databricks_retry_limit
|
|
@@ -1037,17 +1042,21 @@ class DatabricksTaskBaseOperator(BaseOperator, ABC):
|
|
|
1037
1042
|
caller=caller,
|
|
1038
1043
|
)
|
|
1039
1044
|
|
|
1040
|
-
|
|
1041
|
-
|
|
1042
|
-
|
|
1043
|
-
|
|
1044
|
-
|
|
1045
|
-
|
|
1046
|
-
|
|
1047
|
-
|
|
1048
|
-
task_id
|
|
1045
|
+
@cached_property
|
|
1046
|
+
def databricks_task_key(self) -> str:
|
|
1047
|
+
return self._generate_databricks_task_key()
|
|
1048
|
+
|
|
1049
|
+
def _generate_databricks_task_key(self, task_id: str | None = None) -> str:
|
|
1050
|
+
"""Create a databricks task key using the hash of dag_id and task_id."""
|
|
1051
|
+
if not self._databricks_task_key or len(self._databricks_task_key) > 100:
|
|
1052
|
+
self.log.info(
|
|
1053
|
+
"databricks_task_key has not be provided or the provided one exceeds 100 characters and will be truncated by the Databricks API. This will cause failure when trying to monitor the task. A task_key will be generated using the hash value of dag_id+task_id"
|
|
1049
1054
|
)
|
|
1050
|
-
|
|
1055
|
+
task_id = task_id or self.task_id
|
|
1056
|
+
task_key = f"{self.dag_id}__{task_id}".encode()
|
|
1057
|
+
self._databricks_task_key = hashlib.md5(task_key).hexdigest()
|
|
1058
|
+
self.log.info("Generated databricks task_key: %s", self._databricks_task_key)
|
|
1059
|
+
return self._databricks_task_key
|
|
1051
1060
|
|
|
1052
1061
|
@property
|
|
1053
1062
|
def _databricks_workflow_task_group(self) -> DatabricksWorkflowTaskGroup | None:
|
|
@@ -1077,7 +1086,7 @@ class DatabricksTaskBaseOperator(BaseOperator, ABC):
|
|
|
1077
1086
|
def _get_run_json(self) -> dict[str, Any]:
|
|
1078
1087
|
"""Get run json to be used for task submissions."""
|
|
1079
1088
|
run_json = {
|
|
1080
|
-
"run_name": self.
|
|
1089
|
+
"run_name": self.databricks_task_key,
|
|
1081
1090
|
**self._get_task_base_json(),
|
|
1082
1091
|
}
|
|
1083
1092
|
if self.new_cluster and self.existing_cluster_id:
|
|
@@ -1090,7 +1099,7 @@ class DatabricksTaskBaseOperator(BaseOperator, ABC):
|
|
|
1090
1099
|
raise ValueError("Must specify either existing_cluster_id or new_cluster.")
|
|
1091
1100
|
return run_json
|
|
1092
1101
|
|
|
1093
|
-
def _launch_job(self, context: Context | None = None) -> int:
|
|
1102
|
+
def _launch_job(self, context: Context | None = None) -> int | None:
|
|
1094
1103
|
"""Launch the job on Databricks."""
|
|
1095
1104
|
run_json = self._get_run_json()
|
|
1096
1105
|
self.databricks_run_id = self._hook.submit_run(run_json)
|
|
@@ -1127,9 +1136,7 @@ class DatabricksTaskBaseOperator(BaseOperator, ABC):
|
|
|
1127
1136
|
# building the {task_key: task} map below.
|
|
1128
1137
|
sorted_task_runs = sorted(tasks, key=lambda x: x["start_time"])
|
|
1129
1138
|
|
|
1130
|
-
return {task["task_key"]: task for task in sorted_task_runs}[
|
|
1131
|
-
self._get_databricks_task_id(self.task_id)
|
|
1132
|
-
]
|
|
1139
|
+
return {task["task_key"]: task for task in sorted_task_runs}[self.databricks_task_key]
|
|
1133
1140
|
|
|
1134
1141
|
def _convert_to_databricks_workflow_task(
|
|
1135
1142
|
self, relevant_upstreams: list[BaseOperator], context: Context | None = None
|
|
@@ -1137,9 +1144,9 @@ class DatabricksTaskBaseOperator(BaseOperator, ABC):
|
|
|
1137
1144
|
"""Convert the operator to a Databricks workflow task that can be a task in a workflow."""
|
|
1138
1145
|
base_task_json = self._get_task_base_json()
|
|
1139
1146
|
result = {
|
|
1140
|
-
"task_key": self.
|
|
1147
|
+
"task_key": self.databricks_task_key,
|
|
1141
1148
|
"depends_on": [
|
|
1142
|
-
{"task_key": self.
|
|
1149
|
+
{"task_key": self._generate_databricks_task_key(task_id)}
|
|
1143
1150
|
for task_id in self.upstream_task_ids
|
|
1144
1151
|
if task_id in relevant_upstreams
|
|
1145
1152
|
],
|
|
@@ -1172,7 +1179,7 @@ class DatabricksTaskBaseOperator(BaseOperator, ABC):
|
|
|
1172
1179
|
run_state = RunState(**run["state"])
|
|
1173
1180
|
self.log.info(
|
|
1174
1181
|
"Current state of the the databricks task %s is %s",
|
|
1175
|
-
self.
|
|
1182
|
+
self.databricks_task_key,
|
|
1176
1183
|
run_state.life_cycle_state,
|
|
1177
1184
|
)
|
|
1178
1185
|
if self.deferrable and not run_state.is_terminal:
|
|
@@ -1194,7 +1201,7 @@ class DatabricksTaskBaseOperator(BaseOperator, ABC):
|
|
|
1194
1201
|
run_state = RunState(**run["state"])
|
|
1195
1202
|
self.log.info(
|
|
1196
1203
|
"Current state of the databricks task %s is %s",
|
|
1197
|
-
self.
|
|
1204
|
+
self.databricks_task_key,
|
|
1198
1205
|
run_state.life_cycle_state,
|
|
1199
1206
|
)
|
|
1200
1207
|
self._handle_terminal_run_state(run_state)
|
|
@@ -30,7 +30,11 @@ from airflow.models import BaseOperator
|
|
|
30
30
|
from airflow.providers.databricks.hooks.databricks import DatabricksHook
|
|
31
31
|
|
|
32
32
|
if TYPE_CHECKING:
|
|
33
|
-
|
|
33
|
+
try:
|
|
34
|
+
from airflow.sdk.definitions.context import Context
|
|
35
|
+
except ImportError:
|
|
36
|
+
# TODO: Remove once provider drops support for Airflow 2
|
|
37
|
+
from airflow.utils.context import Context
|
|
34
38
|
|
|
35
39
|
|
|
36
40
|
class DatabricksReposCreateOperator(BaseOperator):
|
|
@@ -24,12 +24,11 @@ import json
|
|
|
24
24
|
from collections.abc import Sequence
|
|
25
25
|
from typing import TYPE_CHECKING, Any, ClassVar
|
|
26
26
|
|
|
27
|
-
from databricks.sql.utils import ParamEscaper
|
|
28
|
-
|
|
29
27
|
from airflow.exceptions import AirflowException
|
|
30
28
|
from airflow.models import BaseOperator
|
|
31
29
|
from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
|
|
32
30
|
from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook
|
|
31
|
+
from databricks.sql.utils import ParamEscaper
|
|
33
32
|
|
|
34
33
|
if TYPE_CHECKING:
|
|
35
34
|
from airflow.utils.context import Context
|
|
@@ -93,7 +93,7 @@ class _CreateDatabricksWorkflowOperator(BaseOperator):
|
|
|
93
93
|
"""
|
|
94
94
|
|
|
95
95
|
operator_extra_links = (WorkflowJobRunLink(), WorkflowJobRepairAllFailedLink())
|
|
96
|
-
template_fields = ("notebook_params",)
|
|
96
|
+
template_fields = ("notebook_params", "job_clusters")
|
|
97
97
|
caller = "_CreateDatabricksWorkflowOperator"
|
|
98
98
|
|
|
99
99
|
def __init__(
|
|
@@ -44,6 +44,8 @@ from airflow.www.views import AirflowBaseView
|
|
|
44
44
|
if TYPE_CHECKING:
|
|
45
45
|
from sqlalchemy.orm.session import Session
|
|
46
46
|
|
|
47
|
+
from airflow.providers.databricks.operators.databricks import DatabricksTaskBaseOperator
|
|
48
|
+
|
|
47
49
|
|
|
48
50
|
REPAIR_WAIT_ATTEMPTS = os.getenv("DATABRICKS_REPAIR_WAIT_ATTEMPTS", 20)
|
|
49
51
|
REPAIR_WAIT_DELAY = os.getenv("DATABRICKS_REPAIR_WAIT_DELAY", 0.5)
|
|
@@ -57,18 +59,8 @@ def get_auth_decorator():
|
|
|
57
59
|
return auth.has_access_dag("POST", DagAccessEntity.RUN)
|
|
58
60
|
|
|
59
61
|
|
|
60
|
-
def _get_databricks_task_id(task: BaseOperator) -> str:
|
|
61
|
-
"""
|
|
62
|
-
Get the databricks task ID using dag_id and task_id. removes illegal characters.
|
|
63
|
-
|
|
64
|
-
:param task: The task to get the databricks task ID for.
|
|
65
|
-
:return: The databricks task ID.
|
|
66
|
-
"""
|
|
67
|
-
return f"{task.dag_id}__{task.task_id.replace('.', '__')}"
|
|
68
|
-
|
|
69
|
-
|
|
70
62
|
def get_databricks_task_ids(
|
|
71
|
-
group_id: str, task_map: dict[str,
|
|
63
|
+
group_id: str, task_map: dict[str, DatabricksTaskBaseOperator], log: logging.Logger
|
|
72
64
|
) -> list[str]:
|
|
73
65
|
"""
|
|
74
66
|
Return a list of all Databricks task IDs for a dictionary of Airflow tasks.
|
|
@@ -83,7 +75,7 @@ def get_databricks_task_ids(
|
|
|
83
75
|
for task_id, task in task_map.items():
|
|
84
76
|
if task_id == f"{group_id}.launch":
|
|
85
77
|
continue
|
|
86
|
-
databricks_task_id =
|
|
78
|
+
databricks_task_id = task.databricks_task_key
|
|
87
79
|
log.debug("databricks task id for task %s is %s", task_id, databricks_task_id)
|
|
88
80
|
task_ids.append(databricks_task_id)
|
|
89
81
|
return task_ids
|
|
@@ -112,7 +104,7 @@ def _clear_task_instances(
|
|
|
112
104
|
dag = airflow_app.dag_bag.get_dag(dag_id)
|
|
113
105
|
log.debug("task_ids %s to clear", str(task_ids))
|
|
114
106
|
dr: DagRun = _get_dagrun(dag, run_id, session=session)
|
|
115
|
-
tis_to_clear = [ti for ti in dr.get_task_instances() if
|
|
107
|
+
tis_to_clear = [ti for ti in dr.get_task_instances() if ti.databricks_task_key in task_ids]
|
|
116
108
|
clear_task_instances(tis_to_clear, session)
|
|
117
109
|
|
|
118
110
|
|
|
@@ -327,7 +319,7 @@ class WorkflowJobRepairAllFailedLink(BaseOperatorLink, LoggingMixin):
|
|
|
327
319
|
|
|
328
320
|
tasks_to_run = {ti: t for ti, t in task_group_sub_tasks if ti in failed_and_skipped_tasks}
|
|
329
321
|
|
|
330
|
-
return ",".join(get_databricks_task_ids(task_group.group_id, tasks_to_run, log))
|
|
322
|
+
return ",".join(get_databricks_task_ids(task_group.group_id, tasks_to_run, log)) # type: ignore[arg-type]
|
|
331
323
|
|
|
332
324
|
@staticmethod
|
|
333
325
|
def _get_failed_and_skipped_tasks(dr: DagRun) -> list[str]:
|
|
@@ -390,7 +382,7 @@ class WorkflowJobRepairSingleTaskLink(BaseOperatorLink, LoggingMixin):
|
|
|
390
382
|
"databricks_conn_id": metadata.conn_id,
|
|
391
383
|
"databricks_run_id": metadata.run_id,
|
|
392
384
|
"run_id": ti_key.run_id,
|
|
393
|
-
"tasks_to_repair":
|
|
385
|
+
"tasks_to_repair": task.databricks_task_key,
|
|
394
386
|
}
|
|
395
387
|
return url_for("RepairDatabricksTasks.repair", **query_params)
|
|
396
388
|
|
|
@@ -25,15 +25,18 @@ from datetime import datetime
|
|
|
25
25
|
from functools import cached_property
|
|
26
26
|
from typing import TYPE_CHECKING, Any, Callable
|
|
27
27
|
|
|
28
|
-
from databricks.sql.utils import ParamEscaper
|
|
29
|
-
|
|
30
28
|
from airflow.exceptions import AirflowException
|
|
31
29
|
from airflow.providers.common.sql.hooks.sql import fetch_all_handler
|
|
32
30
|
from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook
|
|
33
31
|
from airflow.sensors.base import BaseSensorOperator
|
|
32
|
+
from databricks.sql.utils import ParamEscaper
|
|
34
33
|
|
|
35
34
|
if TYPE_CHECKING:
|
|
36
|
-
|
|
35
|
+
try:
|
|
36
|
+
from airflow.sdk.definitions.context import Context
|
|
37
|
+
except ImportError:
|
|
38
|
+
# TODO: Remove once provider drops support for Airflow 2
|
|
39
|
+
from airflow.utils.context import Context
|
|
37
40
|
|
|
38
41
|
|
|
39
42
|
class DatabricksPartitionSensor(BaseSensorOperator):
|
|
@@ -30,7 +30,11 @@ from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook
|
|
|
30
30
|
from airflow.sensors.base import BaseSensorOperator
|
|
31
31
|
|
|
32
32
|
if TYPE_CHECKING:
|
|
33
|
-
|
|
33
|
+
try:
|
|
34
|
+
from airflow.sdk.definitions.context import Context
|
|
35
|
+
except ImportError:
|
|
36
|
+
# TODO: Remove once provider drops support for Airflow 2
|
|
37
|
+
from airflow.utils.context import Context
|
|
34
38
|
|
|
35
39
|
|
|
36
40
|
class DatabricksSqlSensor(BaseSensorOperator):
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|