apache-airflow-providers-databricks 7.2.0__tar.gz → 7.2.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/PKG-INFO +29 -25
- {apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/README.rst +21 -21
- {apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/pyproject.toml +29 -10
- {apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/src/airflow/providers/databricks/__init__.py +1 -1
- {apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/src/airflow/providers/databricks/get_provider_info.py +4 -1
- {apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/src/airflow/providers/databricks/hooks/databricks_sql.py +3 -2
- {apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/src/airflow/providers/databricks/operators/databricks.py +7 -1
- {apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/src/airflow/providers/databricks/operators/databricks_sql.py +2 -1
- {apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/src/airflow/providers/databricks/plugins/databricks_workflow.py +29 -14
- {apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/src/airflow/providers/databricks/sensors/databricks_partition.py +3 -2
- {apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/src/airflow/providers/databricks/sensors/databricks_sql.py +1 -1
- apache_airflow_providers_databricks-7.2.1/src/airflow/providers/databricks/version_compat.py +36 -0
- {apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/src/airflow/providers/databricks/LICENSE +0 -0
- {apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/src/airflow/providers/databricks/exceptions.py +0 -0
- {apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/src/airflow/providers/databricks/hooks/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/src/airflow/providers/databricks/hooks/databricks.py +0 -0
- {apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/src/airflow/providers/databricks/hooks/databricks_base.py +0 -0
- {apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/src/airflow/providers/databricks/operators/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/src/airflow/providers/databricks/operators/databricks_repos.py +0 -0
- {apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/src/airflow/providers/databricks/operators/databricks_workflow.py +0 -0
- {apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/src/airflow/providers/databricks/plugins/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/src/airflow/providers/databricks/sensors/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/src/airflow/providers/databricks/triggers/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/src/airflow/providers/databricks/triggers/databricks.py +0 -0
- {apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/src/airflow/providers/databricks/utils/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/src/airflow/providers/databricks/utils/databricks.py +0 -0
{apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/PKG-INFO
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-databricks
|
|
3
|
-
Version: 7.2.
|
|
3
|
+
Version: 7.2.1
|
|
4
4
|
Summary: Provider package apache-airflow-providers-databricks for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,databricks,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -29,44 +29,47 @@ Requires-Dist: mergedeep>=1.3.4
|
|
|
29
29
|
Requires-Dist: pandas>=2.1.2,<2.2
|
|
30
30
|
Requires-Dist: pyarrow>=14.0.1
|
|
31
31
|
Requires-Dist: azure-identity>=1.3.1 ; extra == "azure-identity"
|
|
32
|
+
Requires-Dist: apache-airflow-providers-fab ; extra == "fab"
|
|
32
33
|
Requires-Dist: databricks-sdk==0.10.0 ; extra == "sdk"
|
|
34
|
+
Requires-Dist: apache-airflow-providers-standard ; extra == "standard"
|
|
33
35
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
34
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.
|
|
35
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.
|
|
36
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.1/changelog.html
|
|
37
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.1
|
|
36
38
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
37
39
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
38
40
|
Project-URL: Twitter, https://x.com/ApacheAirflow
|
|
39
41
|
Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
40
42
|
Provides-Extra: azure-identity
|
|
43
|
+
Provides-Extra: fab
|
|
41
44
|
Provides-Extra: sdk
|
|
45
|
+
Provides-Extra: standard
|
|
42
46
|
|
|
43
47
|
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
48
|
+
.. Licensed to the Apache Software Foundation (ASF) under one
|
|
49
|
+
or more contributor license agreements. See the NOTICE file
|
|
50
|
+
distributed with this work for additional information
|
|
51
|
+
regarding copyright ownership. The ASF licenses this file
|
|
52
|
+
to you under the Apache License, Version 2.0 (the
|
|
53
|
+
"License"); you may not use this file except in compliance
|
|
54
|
+
with the License. You may obtain a copy of the License at
|
|
51
55
|
|
|
52
|
-
|
|
56
|
+
.. http://www.apache.org/licenses/LICENSE-2.0
|
|
53
57
|
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
58
|
+
.. Unless required by applicable law or agreed to in writing,
|
|
59
|
+
software distributed under the License is distributed on an
|
|
60
|
+
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
61
|
+
KIND, either express or implied. See the License for the
|
|
62
|
+
specific language governing permissions and limitations
|
|
63
|
+
under the License.
|
|
60
64
|
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
.. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
|
|
64
|
-
`PROVIDER_README_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
|
|
65
|
+
.. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
|
|
65
66
|
|
|
67
|
+
.. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
|
|
68
|
+
``PROVIDER_README_TEMPLATE.rst.jinja2`` IN the ``dev/breeze/src/airflow_breeze/templates`` DIRECTORY
|
|
66
69
|
|
|
67
70
|
Package ``apache-airflow-providers-databricks``
|
|
68
71
|
|
|
69
|
-
Release: ``7.2.
|
|
72
|
+
Release: ``7.2.1``
|
|
70
73
|
|
|
71
74
|
|
|
72
75
|
`Databricks <https://databricks.com/>`__
|
|
@@ -79,7 +82,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
|
|
|
79
82
|
are in ``airflow.providers.databricks`` python package.
|
|
80
83
|
|
|
81
84
|
You can find package information and changelog for the provider
|
|
82
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.
|
|
85
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.1/>`_.
|
|
83
86
|
|
|
84
87
|
Installation
|
|
85
88
|
------------
|
|
@@ -123,8 +126,9 @@ You can install such cross-provider dependencies when installing from PyPI. For
|
|
|
123
126
|
Dependent package Extra
|
|
124
127
|
============================================================================================================ ==============
|
|
125
128
|
`apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
|
|
129
|
+
`apache-airflow-providers-fab <https://airflow.apache.org/docs/apache-airflow-providers-fab>`_ ``fab``
|
|
126
130
|
============================================================================================================ ==============
|
|
127
131
|
|
|
128
132
|
The changelog for the provider package can be found in the
|
|
129
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.
|
|
133
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.1/changelog.html>`_.
|
|
130
134
|
|
{apache_airflow_providers_databricks-7.2.0 → apache_airflow_providers_databricks-7.2.1}/README.rst
RENAMED
|
@@ -1,30 +1,29 @@
|
|
|
1
1
|
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
2
|
+
.. Licensed to the Apache Software Foundation (ASF) under one
|
|
3
|
+
or more contributor license agreements. See the NOTICE file
|
|
4
|
+
distributed with this work for additional information
|
|
5
|
+
regarding copyright ownership. The ASF licenses this file
|
|
6
|
+
to you under the Apache License, Version 2.0 (the
|
|
7
|
+
"License"); you may not use this file except in compliance
|
|
8
|
+
with the License. You may obtain a copy of the License at
|
|
9
9
|
|
|
10
|
-
|
|
10
|
+
.. http://www.apache.org/licenses/LICENSE-2.0
|
|
11
11
|
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
12
|
+
.. Unless required by applicable law or agreed to in writing,
|
|
13
|
+
software distributed under the License is distributed on an
|
|
14
|
+
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
15
|
+
KIND, either express or implied. See the License for the
|
|
16
|
+
specific language governing permissions and limitations
|
|
17
|
+
under the License.
|
|
18
18
|
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
.. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
|
|
22
|
-
`PROVIDER_README_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
|
|
19
|
+
.. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
|
|
23
20
|
|
|
21
|
+
.. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
|
|
22
|
+
``PROVIDER_README_TEMPLATE.rst.jinja2`` IN the ``dev/breeze/src/airflow_breeze/templates`` DIRECTORY
|
|
24
23
|
|
|
25
24
|
Package ``apache-airflow-providers-databricks``
|
|
26
25
|
|
|
27
|
-
Release: ``7.2.
|
|
26
|
+
Release: ``7.2.1``
|
|
28
27
|
|
|
29
28
|
|
|
30
29
|
`Databricks <https://databricks.com/>`__
|
|
@@ -37,7 +36,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
|
|
|
37
36
|
are in ``airflow.providers.databricks`` python package.
|
|
38
37
|
|
|
39
38
|
You can find package information and changelog for the provider
|
|
40
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.
|
|
39
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.1/>`_.
|
|
41
40
|
|
|
42
41
|
Installation
|
|
43
42
|
------------
|
|
@@ -81,7 +80,8 @@ You can install such cross-provider dependencies when installing from PyPI. For
|
|
|
81
80
|
Dependent package Extra
|
|
82
81
|
============================================================================================================ ==============
|
|
83
82
|
`apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
|
|
83
|
+
`apache-airflow-providers-fab <https://airflow.apache.org/docs/apache-airflow-providers-fab>`_ ``fab``
|
|
84
84
|
============================================================================================================ ==============
|
|
85
85
|
|
|
86
86
|
The changelog for the provider package can be found in the
|
|
87
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.
|
|
87
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.1/changelog.html>`_.
|
|
@@ -20,12 +20,12 @@
|
|
|
20
20
|
# IF YOU WANT TO MODIFY THIS FILE EXCEPT DEPENDENCIES, YOU SHOULD MODIFY THE TEMPLATE
|
|
21
21
|
# `pyproject_TEMPLATE.toml.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
|
|
22
22
|
[build-system]
|
|
23
|
-
requires = ["flit_core==3.
|
|
23
|
+
requires = ["flit_core==3.11.0"]
|
|
24
24
|
build-backend = "flit_core.buildapi"
|
|
25
25
|
|
|
26
26
|
[project]
|
|
27
27
|
name = "apache-airflow-providers-databricks"
|
|
28
|
-
version = "7.2.
|
|
28
|
+
version = "7.2.1"
|
|
29
29
|
description = "Provider package apache-airflow-providers-databricks for Apache Airflow"
|
|
30
30
|
readme = "README.rst"
|
|
31
31
|
authors = [
|
|
@@ -52,8 +52,10 @@ classifiers = [
|
|
|
52
52
|
]
|
|
53
53
|
requires-python = "~=3.9"
|
|
54
54
|
|
|
55
|
-
# The dependencies should be modified in place in the generated file
|
|
55
|
+
# The dependencies should be modified in place in the generated file.
|
|
56
56
|
# Any change in the dependencies is preserved when the file is regenerated
|
|
57
|
+
# Make sure to run ``breeze static-checks --type update-providers-dependencies --all-files``
|
|
58
|
+
# After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
|
|
57
59
|
dependencies = [
|
|
58
60
|
"apache-airflow>=2.9.0",
|
|
59
61
|
"apache-airflow-providers-common-sql>=1.20.0",
|
|
@@ -75,17 +77,37 @@ dependencies = [
|
|
|
75
77
|
"azure-identity" = [
|
|
76
78
|
"azure-identity>=1.3.1",
|
|
77
79
|
]
|
|
80
|
+
"fab" = [
|
|
81
|
+
"apache-airflow-providers-fab"
|
|
82
|
+
]
|
|
83
|
+
"standard" = [
|
|
84
|
+
"apache-airflow-providers-standard"
|
|
85
|
+
]
|
|
78
86
|
|
|
79
|
-
# The dependency groups should be modified in place in the generated file
|
|
80
|
-
# Any change in the dependencies is preserved when the file is regenerated
|
|
81
87
|
[dependency-groups]
|
|
82
88
|
dev = [
|
|
89
|
+
"apache-airflow",
|
|
90
|
+
"apache-airflow-task-sdk",
|
|
91
|
+
"apache-airflow-devel-common",
|
|
92
|
+
"apache-airflow-providers-common-sql",
|
|
93
|
+
"apache-airflow-providers-fab",
|
|
94
|
+
# Additional devel dependencies (do not remove this line and add extra development dependencies)
|
|
83
95
|
"deltalake>=0.12.0",
|
|
84
96
|
]
|
|
85
97
|
|
|
98
|
+
[tool.uv.sources]
|
|
99
|
+
# These names must match the names as defined in the pyproject.toml of the workspace items,
|
|
100
|
+
# *not* the workspace folder paths
|
|
101
|
+
apache-airflow = {workspace = true}
|
|
102
|
+
apache-airflow-devel-common = {workspace = true}
|
|
103
|
+
apache-airflow-task-sdk = {workspace = true}
|
|
104
|
+
apache-airflow-providers-common-sql = {workspace = true}
|
|
105
|
+
apache-airflow-providers-fab = {workspace = true}
|
|
106
|
+
apache-airflow-providers-standard = {workspace = true}
|
|
107
|
+
|
|
86
108
|
[project.urls]
|
|
87
|
-
"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.
|
|
88
|
-
"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.
|
|
109
|
+
"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.1"
|
|
110
|
+
"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.1/changelog.html"
|
|
89
111
|
"Bug Tracker" = "https://github.com/apache/airflow/issues"
|
|
90
112
|
"Source Code" = "https://github.com/apache/airflow"
|
|
91
113
|
"Slack Chat" = "https://s.apache.org/airflow-slack"
|
|
@@ -100,6 +122,3 @@ databricks_workflow = "airflow.providers.databricks.plugins.databricks_workflow:
|
|
|
100
122
|
|
|
101
123
|
[tool.flit.module]
|
|
102
124
|
name = "airflow.providers.databricks"
|
|
103
|
-
|
|
104
|
-
[tool.pytest.ini_options]
|
|
105
|
-
ignore = "tests/system/"
|
|
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "7.2.
|
|
32
|
+
__version__ = "7.2.1"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
35
|
"2.9.0"
|
|
@@ -27,8 +27,9 @@ def get_provider_info():
|
|
|
27
27
|
"name": "Databricks",
|
|
28
28
|
"description": "`Databricks <https://databricks.com/>`__\n",
|
|
29
29
|
"state": "ready",
|
|
30
|
-
"source-date-epoch":
|
|
30
|
+
"source-date-epoch": 1741508706,
|
|
31
31
|
"versions": [
|
|
32
|
+
"7.2.1",
|
|
32
33
|
"7.2.0",
|
|
33
34
|
"7.0.0",
|
|
34
35
|
"6.13.0",
|
|
@@ -189,6 +190,8 @@ def get_provider_info():
|
|
|
189
190
|
"optional-dependencies": {
|
|
190
191
|
"sdk": ["databricks-sdk==0.10.0"],
|
|
191
192
|
"azure-identity": ["azure-identity>=1.3.1"],
|
|
193
|
+
"fab": ["apache-airflow-providers-fab"],
|
|
194
|
+
"standard": ["apache-airflow-providers-standard"],
|
|
192
195
|
},
|
|
193
196
|
"devel-dependencies": ["deltalake>=0.12.0"],
|
|
194
197
|
}
|
|
@@ -31,13 +31,14 @@ from typing import (
|
|
|
31
31
|
overload,
|
|
32
32
|
)
|
|
33
33
|
|
|
34
|
+
from databricks import sql # type: ignore[attr-defined]
|
|
35
|
+
from databricks.sql.types import Row
|
|
36
|
+
|
|
34
37
|
from airflow.exceptions import AirflowException
|
|
35
38
|
from airflow.models.connection import Connection as AirflowConnection
|
|
36
39
|
from airflow.providers.common.sql.hooks.sql import DbApiHook, return_single_query_results
|
|
37
40
|
from airflow.providers.databricks.exceptions import DatabricksSqlExecutionError, DatabricksSqlExecutionTimeout
|
|
38
41
|
from airflow.providers.databricks.hooks.databricks_base import BaseDatabricksHook
|
|
39
|
-
from databricks import sql # type: ignore[attr-defined]
|
|
40
|
-
from databricks.sql.types import Row
|
|
41
42
|
|
|
42
43
|
if TYPE_CHECKING:
|
|
43
44
|
from databricks.sql.client import Connection
|
|
@@ -29,7 +29,7 @@ from typing import TYPE_CHECKING, Any
|
|
|
29
29
|
|
|
30
30
|
from airflow.configuration import conf
|
|
31
31
|
from airflow.exceptions import AirflowException
|
|
32
|
-
from airflow.models import BaseOperator,
|
|
32
|
+
from airflow.models import BaseOperator, XCom
|
|
33
33
|
from airflow.providers.databricks.hooks.databricks import DatabricksHook, RunLifeCycleState, RunState
|
|
34
34
|
from airflow.providers.databricks.operators.databricks_workflow import (
|
|
35
35
|
DatabricksWorkflowTaskGroup,
|
|
@@ -41,12 +41,18 @@ from airflow.providers.databricks.plugins.databricks_workflow import (
|
|
|
41
41
|
)
|
|
42
42
|
from airflow.providers.databricks.triggers.databricks import DatabricksExecutionTrigger
|
|
43
43
|
from airflow.providers.databricks.utils.databricks import normalise_json_content, validate_trigger_event
|
|
44
|
+
from airflow.providers.databricks.version_compat import AIRFLOW_V_3_0_PLUS
|
|
44
45
|
|
|
45
46
|
if TYPE_CHECKING:
|
|
46
47
|
from airflow.models.taskinstancekey import TaskInstanceKey
|
|
47
48
|
from airflow.utils.context import Context
|
|
48
49
|
from airflow.utils.task_group import TaskGroup
|
|
49
50
|
|
|
51
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
52
|
+
from airflow.sdk import BaseOperatorLink
|
|
53
|
+
else:
|
|
54
|
+
from airflow.models.baseoperatorlink import BaseOperatorLink # type: ignore[no-redef]
|
|
55
|
+
|
|
50
56
|
DEFER_METHOD_NAME = "execute_complete"
|
|
51
57
|
XCOM_RUN_ID_KEY = "run_id"
|
|
52
58
|
XCOM_JOB_ID_KEY = "job_id"
|
|
@@ -24,11 +24,12 @@ import json
|
|
|
24
24
|
from collections.abc import Sequence
|
|
25
25
|
from typing import TYPE_CHECKING, Any, ClassVar
|
|
26
26
|
|
|
27
|
+
from databricks.sql.utils import ParamEscaper
|
|
28
|
+
|
|
27
29
|
from airflow.exceptions import AirflowException
|
|
28
30
|
from airflow.models import BaseOperator
|
|
29
31
|
from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
|
|
30
32
|
from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook
|
|
31
|
-
from databricks.sql.utils import ParamEscaper
|
|
32
33
|
|
|
33
34
|
if TYPE_CHECKING:
|
|
34
35
|
from airflow.utils.context import Context
|
|
@@ -19,42 +19,53 @@ from __future__ import annotations
|
|
|
19
19
|
|
|
20
20
|
import logging
|
|
21
21
|
import os
|
|
22
|
-
from typing import TYPE_CHECKING, Any
|
|
22
|
+
from typing import TYPE_CHECKING, Any
|
|
23
23
|
from urllib.parse import unquote
|
|
24
24
|
|
|
25
|
-
from flask import
|
|
25
|
+
from flask import flash, redirect, request, url_for
|
|
26
|
+
from flask_appbuilder import BaseView
|
|
26
27
|
from flask_appbuilder.api import expose
|
|
27
28
|
|
|
28
29
|
from airflow.exceptions import AirflowException, TaskInstanceNotFound
|
|
29
|
-
from airflow.models import
|
|
30
|
+
from airflow.models import DagBag
|
|
30
31
|
from airflow.models.dag import DAG, clear_task_instances
|
|
31
32
|
from airflow.models.dagrun import DagRun
|
|
32
33
|
from airflow.models.taskinstance import TaskInstance, TaskInstanceKey
|
|
33
34
|
from airflow.models.xcom import XCom
|
|
34
35
|
from airflow.plugins_manager import AirflowPlugin
|
|
35
36
|
from airflow.providers.databricks.hooks.databricks import DatabricksHook
|
|
36
|
-
from airflow.
|
|
37
|
+
from airflow.providers.databricks.version_compat import AIRFLOW_V_3_0_PLUS
|
|
38
|
+
|
|
39
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
40
|
+
from airflow.providers.fab.www import auth
|
|
41
|
+
else:
|
|
42
|
+
from airflow.www import auth # type: ignore
|
|
37
43
|
from airflow.utils.log.logging_mixin import LoggingMixin
|
|
38
44
|
from airflow.utils.session import NEW_SESSION, provide_session
|
|
39
45
|
from airflow.utils.state import TaskInstanceState
|
|
40
46
|
from airflow.utils.task_group import TaskGroup
|
|
41
|
-
from airflow.www import auth
|
|
42
|
-
from airflow.www.views import AirflowBaseView
|
|
43
47
|
|
|
44
48
|
if TYPE_CHECKING:
|
|
45
49
|
from sqlalchemy.orm.session import Session
|
|
46
50
|
|
|
51
|
+
from airflow.models import BaseOperator
|
|
47
52
|
from airflow.providers.databricks.operators.databricks import DatabricksTaskBaseOperator
|
|
48
53
|
|
|
54
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
55
|
+
from airflow.sdk import BaseOperatorLink
|
|
56
|
+
else:
|
|
57
|
+
from airflow.models.baseoperatorlink import BaseOperatorLink # type: ignore[no-redef]
|
|
58
|
+
|
|
49
59
|
|
|
50
60
|
REPAIR_WAIT_ATTEMPTS = os.getenv("DATABRICKS_REPAIR_WAIT_ATTEMPTS", 20)
|
|
51
61
|
REPAIR_WAIT_DELAY = os.getenv("DATABRICKS_REPAIR_WAIT_DELAY", 0.5)
|
|
52
62
|
|
|
53
|
-
airflow_app = cast(AirflowApp, current_app)
|
|
54
|
-
|
|
55
63
|
|
|
56
64
|
def get_auth_decorator():
|
|
57
|
-
|
|
65
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
66
|
+
from airflow.api_fastapi.auth.managers.models.resource_details import DagAccessEntity
|
|
67
|
+
else:
|
|
68
|
+
from airflow.auth.managers.models.resource_details import DagAccessEntity
|
|
58
69
|
|
|
59
70
|
return auth.has_access_dag("POST", DagAccessEntity.RUN)
|
|
60
71
|
|
|
@@ -101,7 +112,8 @@ def _get_dagrun(dag: DAG, run_id: str, session: Session | None = None) -> DagRun
|
|
|
101
112
|
def _clear_task_instances(
|
|
102
113
|
dag_id: str, run_id: str, task_ids: list[str], log: logging.Logger, session: Session | None = None
|
|
103
114
|
) -> None:
|
|
104
|
-
|
|
115
|
+
dag_bag = DagBag(read_dags_from_db=True)
|
|
116
|
+
dag = dag_bag.get_dag(dag_id)
|
|
105
117
|
log.debug("task_ids %s to clear", str(task_ids))
|
|
106
118
|
dr: DagRun = _get_dagrun(dag, run_id, session=session)
|
|
107
119
|
tis_to_clear = [ti for ti in dr.get_task_instances() if ti.databricks_task_key in task_ids]
|
|
@@ -238,7 +250,8 @@ class WorkflowJobRunLink(BaseOperatorLink, LoggingMixin):
|
|
|
238
250
|
if not task_group:
|
|
239
251
|
raise AirflowException("Task group is required for generating Databricks Workflow Job Run Link.")
|
|
240
252
|
|
|
241
|
-
|
|
253
|
+
dag_bag = DagBag(read_dags_from_db=True)
|
|
254
|
+
dag = dag_bag.get_dag(ti_key.dag_id)
|
|
242
255
|
dag.get_task(ti_key.task_id)
|
|
243
256
|
self.log.info("Getting link for task %s", ti_key.task_id)
|
|
244
257
|
if ".launch" not in ti_key.task_id:
|
|
@@ -310,7 +323,8 @@ class WorkflowJobRepairAllFailedLink(BaseOperatorLink, LoggingMixin):
|
|
|
310
323
|
raise AirflowException("Task group is required for generating repair link.")
|
|
311
324
|
if not task_group.group_id:
|
|
312
325
|
raise AirflowException("Task group ID is required for generating repair link.")
|
|
313
|
-
|
|
326
|
+
dag_bag = DagBag(read_dags_from_db=True)
|
|
327
|
+
dag = dag_bag.get_dag(ti_key.dag_id)
|
|
314
328
|
dr = _get_dagrun(dag, ti_key.run_id)
|
|
315
329
|
log.debug("Getting failed and skipped tasks for dag run %s", dr.run_id)
|
|
316
330
|
task_group_sub_tasks = self.get_task_group_children(task_group).items()
|
|
@@ -369,7 +383,8 @@ class WorkflowJobRepairSingleTaskLink(BaseOperatorLink, LoggingMixin):
|
|
|
369
383
|
task_group.group_id,
|
|
370
384
|
ti_key.task_id,
|
|
371
385
|
)
|
|
372
|
-
|
|
386
|
+
dag_bag = DagBag(read_dags_from_db=True)
|
|
387
|
+
dag = dag_bag.get_dag(ti_key.dag_id)
|
|
373
388
|
task = dag.get_task(ti_key.task_id)
|
|
374
389
|
|
|
375
390
|
if ".launch" not in ti_key.task_id:
|
|
@@ -387,7 +402,7 @@ class WorkflowJobRepairSingleTaskLink(BaseOperatorLink, LoggingMixin):
|
|
|
387
402
|
return url_for("RepairDatabricksTasks.repair", **query_params)
|
|
388
403
|
|
|
389
404
|
|
|
390
|
-
class RepairDatabricksTasks(
|
|
405
|
+
class RepairDatabricksTasks(BaseView, LoggingMixin):
|
|
391
406
|
"""Repair databricks tasks from Airflow."""
|
|
392
407
|
|
|
393
408
|
default_view = "repair"
|
|
@@ -25,11 +25,12 @@ from datetime import datetime
|
|
|
25
25
|
from functools import cached_property
|
|
26
26
|
from typing import TYPE_CHECKING, Any, Callable
|
|
27
27
|
|
|
28
|
+
from databricks.sql.utils import ParamEscaper
|
|
29
|
+
|
|
28
30
|
from airflow.exceptions import AirflowException
|
|
29
|
-
from airflow.providers.common.sql.hooks.
|
|
31
|
+
from airflow.providers.common.sql.hooks.handlers import fetch_all_handler
|
|
30
32
|
from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook
|
|
31
33
|
from airflow.sensors.base import BaseSensorOperator
|
|
32
|
-
from databricks.sql.utils import ParamEscaper
|
|
33
34
|
|
|
34
35
|
if TYPE_CHECKING:
|
|
35
36
|
try:
|
|
@@ -25,7 +25,7 @@ from functools import cached_property
|
|
|
25
25
|
from typing import TYPE_CHECKING, Any, Callable
|
|
26
26
|
|
|
27
27
|
from airflow.exceptions import AirflowException
|
|
28
|
-
from airflow.providers.common.sql.hooks.
|
|
28
|
+
from airflow.providers.common.sql.hooks.handlers import fetch_all_handler
|
|
29
29
|
from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook
|
|
30
30
|
from airflow.sensors.base import BaseSensorOperator
|
|
31
31
|
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
#
|
|
18
|
+
# NOTE! THIS FILE IS COPIED MANUALLY IN OTHER PROVIDERS DELIBERATELY TO AVOID ADDING UNNECESSARY
|
|
19
|
+
# DEPENDENCIES BETWEEN PROVIDERS. IF YOU WANT TO ADD CONDITIONAL CODE IN YOUR PROVIDER THAT DEPENDS
|
|
20
|
+
# ON AIRFLOW VERSION, PLEASE COPY THIS FILE TO THE ROOT PACKAGE OF YOUR PROVIDER AND IMPORT
|
|
21
|
+
# THOSE CONSTANTS FROM IT RATHER THAN IMPORTING THEM FROM ANOTHER PROVIDER OR TEST CODE
|
|
22
|
+
#
|
|
23
|
+
from __future__ import annotations
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def get_base_airflow_version_tuple() -> tuple[int, int, int]:
|
|
27
|
+
from packaging.version import Version
|
|
28
|
+
|
|
29
|
+
from airflow import __version__
|
|
30
|
+
|
|
31
|
+
airflow_version = Version(__version__)
|
|
32
|
+
return airflow_version.major, airflow_version.minor, airflow_version.micro
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
AIRFLOW_V_2_10_PLUS = get_base_airflow_version_tuple() >= (2, 10, 0)
|
|
36
|
+
AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|