apache-airflow-providers-databricks 7.7.1__tar.gz → 7.8.0rc1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- apache_airflow_providers_databricks-7.8.0rc1/NOTICE +5 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/PKG-INFO +31 -18
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/README.rst +19 -9
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/changelog.rst +109 -5
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/index.rst +8 -12
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/operators/task.rst +1 -1
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/operators/workflow.rst +5 -5
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/provider.yaml +7 -1
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/pyproject.toml +14 -11
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/providers/databricks/__init__.py +3 -3
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/providers/databricks/get_provider_info.py +1 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/providers/databricks/hooks/databricks.py +19 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/providers/databricks/hooks/databricks_base.py +67 -36
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/providers/databricks/hooks/databricks_sql.py +5 -7
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/providers/databricks/operators/databricks.py +4 -19
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/providers/databricks/operators/databricks_repos.py +2 -6
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/providers/databricks/operators/databricks_sql.py +2 -2
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/providers/databricks/operators/databricks_workflow.py +3 -7
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/providers/databricks/plugins/databricks_workflow.py +124 -131
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/providers/databricks/sensors/databricks.py +2 -7
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/providers/databricks/sensors/databricks_partition.py +6 -14
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/providers/databricks/sensors/databricks_sql.py +2 -11
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/providers/databricks/version_compat.py +0 -12
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/system/databricks/example_databricks_workflow.py +1 -1
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/databricks/hooks/test_databricks.py +44 -6
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/databricks/hooks/test_databricks_base.py +145 -3
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/databricks/hooks/test_databricks_sql.py +30 -25
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/databricks/operators/test_databricks.py +9 -48
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/databricks/operators/test_databricks_sql.py +2 -2
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/databricks/operators/test_databricks_workflow.py +0 -1
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/databricks/plugins/test_databricks_workflow.py +6 -6
- {apache_airflow_providers_databricks-7.7.1/src/airflow/providers/databricks → apache_airflow_providers_databricks-7.8.0rc1}/LICENSE +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/.latest-doc-only-change.txt +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/commits.rst +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/conf.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/connections/databricks.rst +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/img/databricks_workflow_task_group_airflow_graph_view.png +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/img/workflow_plugin_launch_task.png +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/img/workflow_plugin_single_task.png +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/img/workflow_run_databricks_graph_view.png +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/installing-providers-from-sources.rst +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/integration-logos/Databricks.png +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/operators/copy_into.rst +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/operators/index.rst +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/operators/jobs_create.rst +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/operators/notebook.rst +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/operators/repos_create.rst +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/operators/repos_delete.rst +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/operators/repos_update.rst +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/operators/run_now.rst +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/operators/sql.rst +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/operators/sql_statements.rst +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/operators/submit_run.rst +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/plugins/index.rst +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/plugins/workflow.rst +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/docs/security.rst +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/providers/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/providers/databricks/exceptions.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/providers/databricks/hooks/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/providers/databricks/operators/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/providers/databricks/plugins/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/providers/databricks/sensors/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/providers/databricks/triggers/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/providers/databricks/triggers/databricks.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/providers/databricks/utils/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/providers/databricks/utils/databricks.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/providers/databricks/utils/mixins.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/src/airflow/providers/databricks/utils/openlineage.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/conftest.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/system/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/system/databricks/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/system/databricks/example_databricks.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/system/databricks/example_databricks_repos.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/system/databricks/example_databricks_sensors.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/system/databricks/example_databricks_sql.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/databricks/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/databricks/hooks/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/databricks/hooks/test_databricks_azure_workload_identity.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/databricks/hooks/test_databricks_azure_workload_identity_async.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/databricks/operators/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/databricks/operators/test_databricks_copy.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/databricks/operators/test_databricks_repos.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/databricks/plugins/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/databricks/sensors/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/databricks/sensors/test_databricks.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/databricks/sensors/test_databricks_partition.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/databricks/sensors/test_databricks_sql.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/databricks/test_exceptions.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/databricks/triggers/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/databricks/triggers/test_databricks.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/databricks/utils/__init__.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/databricks/utils/test_databricks.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/databricks/utils/test_mixins.py +0 -0
- {apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/tests/unit/databricks/utils/test_openlineage.py +0 -0
{apache_airflow_providers_databricks-7.7.1 → apache_airflow_providers_databricks-7.8.0rc1}/PKG-INFO
RENAMED
|
@@ -1,12 +1,13 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-databricks
|
|
3
|
-
Version: 7.
|
|
3
|
+
Version: 7.8.0rc1
|
|
4
4
|
Summary: Provider package apache-airflow-providers-databricks for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,databricks,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
7
7
|
Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
8
8
|
Requires-Python: >=3.10
|
|
9
9
|
Description-Content-Type: text/x-rst
|
|
10
|
+
License-Expression: Apache-2.0
|
|
10
11
|
Classifier: Development Status :: 5 - Production/Stable
|
|
11
12
|
Classifier: Environment :: Console
|
|
12
13
|
Classifier: Environment :: Web Environment
|
|
@@ -14,16 +15,16 @@ Classifier: Intended Audience :: Developers
|
|
|
14
15
|
Classifier: Intended Audience :: System Administrators
|
|
15
16
|
Classifier: Framework :: Apache Airflow
|
|
16
17
|
Classifier: Framework :: Apache Airflow :: Provider
|
|
17
|
-
Classifier: License :: OSI Approved :: Apache Software License
|
|
18
18
|
Classifier: Programming Language :: Python :: 3.10
|
|
19
19
|
Classifier: Programming Language :: Python :: 3.11
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.12
|
|
21
21
|
Classifier: Programming Language :: Python :: 3.13
|
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
Requires-Dist: apache-airflow
|
|
26
|
-
Requires-Dist: apache-airflow-providers-
|
|
23
|
+
License-File: LICENSE
|
|
24
|
+
License-File: NOTICE
|
|
25
|
+
Requires-Dist: apache-airflow>=2.11.0rc1
|
|
26
|
+
Requires-Dist: apache-airflow-providers-common-compat>=1.8.0rc1
|
|
27
|
+
Requires-Dist: apache-airflow-providers-common-sql>=1.27.0rc1
|
|
27
28
|
Requires-Dist: requests>=2.32.0,<3
|
|
28
29
|
Requires-Dist: databricks-sql-connector>=4.0.0
|
|
29
30
|
Requires-Dist: databricks-sqlalchemy>=1.0.2
|
|
@@ -34,18 +35,20 @@ Requires-Dist: pandas>=2.2.3; python_version >="3.13"
|
|
|
34
35
|
Requires-Dist: pyarrow>=16.1.0; python_version < '3.13'
|
|
35
36
|
Requires-Dist: pyarrow>=18.0.0; python_version >= '3.13'
|
|
36
37
|
Requires-Dist: azure-identity>=1.3.1 ; extra == "azure-identity"
|
|
37
|
-
Requires-Dist: apache-airflow-providers-fab>=2.2.
|
|
38
|
+
Requires-Dist: apache-airflow-providers-fab>=2.2.0rc1 ; extra == "fab" and ( python_version < '3.13')
|
|
39
|
+
Requires-Dist: apache-airflow-providers-openlineage>=2.3.0rc1 ; extra == "openlineage"
|
|
38
40
|
Requires-Dist: databricks-sdk==0.10.0 ; extra == "sdk"
|
|
39
41
|
Requires-Dist: apache-airflow-providers-standard ; extra == "standard"
|
|
40
42
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
41
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.
|
|
42
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.
|
|
43
|
+
Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.8.0/changelog.html
|
|
44
|
+
Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.8.0
|
|
43
45
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
44
46
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
45
47
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
46
48
|
Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
47
49
|
Provides-Extra: azure-identity
|
|
48
50
|
Provides-Extra: fab
|
|
51
|
+
Provides-Extra: openlineage
|
|
49
52
|
Provides-Extra: sdk
|
|
50
53
|
Provides-Extra: standard
|
|
51
54
|
|
|
@@ -74,9 +77,8 @@ Provides-Extra: standard
|
|
|
74
77
|
|
|
75
78
|
Package ``apache-airflow-providers-databricks``
|
|
76
79
|
|
|
77
|
-
Release: ``7.
|
|
80
|
+
Release: ``7.8.0``
|
|
78
81
|
|
|
79
|
-
Release Date: ``|PypiReleaseDate|``
|
|
80
82
|
|
|
81
83
|
`Databricks <https://databricks.com/>`__
|
|
82
84
|
|
|
@@ -88,12 +90,12 @@ This is a provider package for ``databricks`` provider. All classes for this pro
|
|
|
88
90
|
are in ``airflow.providers.databricks`` python package.
|
|
89
91
|
|
|
90
92
|
You can find package information and changelog for the provider
|
|
91
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.
|
|
93
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.0/>`_.
|
|
92
94
|
|
|
93
95
|
Installation
|
|
94
96
|
------------
|
|
95
97
|
|
|
96
|
-
You can install this package on top of an existing Airflow
|
|
98
|
+
You can install this package on top of an existing Airflow installation (see ``Requirements`` below
|
|
97
99
|
for the minimum Airflow version supported) via
|
|
98
100
|
``pip install apache-airflow-providers-databricks``
|
|
99
101
|
|
|
@@ -105,10 +107,9 @@ Requirements
|
|
|
105
107
|
========================================== ======================================
|
|
106
108
|
PIP package Version required
|
|
107
109
|
========================================== ======================================
|
|
108
|
-
``apache-airflow`` ``>=2.
|
|
109
|
-
``apache-airflow-providers-common-compat`` ``>=1.
|
|
110
|
+
``apache-airflow`` ``>=2.11.0``
|
|
111
|
+
``apache-airflow-providers-common-compat`` ``>=1.8.0``
|
|
110
112
|
``apache-airflow-providers-common-sql`` ``>=1.27.0``
|
|
111
|
-
``apache-airflow-providers-openlineage`` ``>=2.3.0``
|
|
112
113
|
``requests`` ``>=2.32.0,<3``
|
|
113
114
|
``databricks-sql-connector`` ``>=4.0.0``
|
|
114
115
|
``databricks-sqlalchemy`` ``>=1.0.2``
|
|
@@ -138,10 +139,22 @@ Dependent package
|
|
|
138
139
|
================================================================================================================== =================
|
|
139
140
|
`apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
|
|
140
141
|
`apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
|
|
141
|
-
`apache-airflow-providers-fab <https://airflow.apache.org/docs/apache-airflow-providers-fab>`_ ``fab``
|
|
142
142
|
`apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
|
|
143
143
|
================================================================================================================== =================
|
|
144
144
|
|
|
145
|
+
Optional dependencies
|
|
146
|
+
----------------------
|
|
147
|
+
|
|
148
|
+
================== ================================================================
|
|
149
|
+
Extra Dependencies
|
|
150
|
+
================== ================================================================
|
|
151
|
+
``sdk`` ``databricks-sdk==0.10.0``
|
|
152
|
+
``azure-identity`` ``azure-identity>=1.3.1``
|
|
153
|
+
``fab`` ``apache-airflow-providers-fab>=2.2.0; python_version < '3.13'``
|
|
154
|
+
``standard`` ``apache-airflow-providers-standard``
|
|
155
|
+
``openlineage`` ``apache-airflow-providers-openlineage>=2.3.0``
|
|
156
|
+
================== ================================================================
|
|
157
|
+
|
|
145
158
|
The changelog for the provider package can be found in the
|
|
146
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.
|
|
159
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.0/changelog.html>`_.
|
|
147
160
|
|
|
@@ -23,9 +23,8 @@
|
|
|
23
23
|
|
|
24
24
|
Package ``apache-airflow-providers-databricks``
|
|
25
25
|
|
|
26
|
-
Release: ``7.
|
|
26
|
+
Release: ``7.8.0``
|
|
27
27
|
|
|
28
|
-
Release Date: ``|PypiReleaseDate|``
|
|
29
28
|
|
|
30
29
|
`Databricks <https://databricks.com/>`__
|
|
31
30
|
|
|
@@ -37,12 +36,12 @@ This is a provider package for ``databricks`` provider. All classes for this pro
|
|
|
37
36
|
are in ``airflow.providers.databricks`` python package.
|
|
38
37
|
|
|
39
38
|
You can find package information and changelog for the provider
|
|
40
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.
|
|
39
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.0/>`_.
|
|
41
40
|
|
|
42
41
|
Installation
|
|
43
42
|
------------
|
|
44
43
|
|
|
45
|
-
You can install this package on top of an existing Airflow
|
|
44
|
+
You can install this package on top of an existing Airflow installation (see ``Requirements`` below
|
|
46
45
|
for the minimum Airflow version supported) via
|
|
47
46
|
``pip install apache-airflow-providers-databricks``
|
|
48
47
|
|
|
@@ -54,10 +53,9 @@ Requirements
|
|
|
54
53
|
========================================== ======================================
|
|
55
54
|
PIP package Version required
|
|
56
55
|
========================================== ======================================
|
|
57
|
-
``apache-airflow`` ``>=2.
|
|
58
|
-
``apache-airflow-providers-common-compat`` ``>=1.
|
|
56
|
+
``apache-airflow`` ``>=2.11.0``
|
|
57
|
+
``apache-airflow-providers-common-compat`` ``>=1.8.0``
|
|
59
58
|
``apache-airflow-providers-common-sql`` ``>=1.27.0``
|
|
60
|
-
``apache-airflow-providers-openlineage`` ``>=2.3.0``
|
|
61
59
|
``requests`` ``>=2.32.0,<3``
|
|
62
60
|
``databricks-sql-connector`` ``>=4.0.0``
|
|
63
61
|
``databricks-sqlalchemy`` ``>=1.0.2``
|
|
@@ -87,9 +85,21 @@ Dependent package
|
|
|
87
85
|
================================================================================================================== =================
|
|
88
86
|
`apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
|
|
89
87
|
`apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
|
|
90
|
-
`apache-airflow-providers-fab <https://airflow.apache.org/docs/apache-airflow-providers-fab>`_ ``fab``
|
|
91
88
|
`apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
|
|
92
89
|
================================================================================================================== =================
|
|
93
90
|
|
|
91
|
+
Optional dependencies
|
|
92
|
+
----------------------
|
|
93
|
+
|
|
94
|
+
================== ================================================================
|
|
95
|
+
Extra Dependencies
|
|
96
|
+
================== ================================================================
|
|
97
|
+
``sdk`` ``databricks-sdk==0.10.0``
|
|
98
|
+
``azure-identity`` ``azure-identity>=1.3.1``
|
|
99
|
+
``fab`` ``apache-airflow-providers-fab>=2.2.0; python_version < '3.13'``
|
|
100
|
+
``standard`` ``apache-airflow-providers-standard``
|
|
101
|
+
``openlineage`` ``apache-airflow-providers-openlineage>=2.3.0``
|
|
102
|
+
================== ================================================================
|
|
103
|
+
|
|
94
104
|
The changelog for the provider package can be found in the
|
|
95
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.
|
|
105
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.0/changelog.html>`_.
|
|
@@ -26,6 +26,113 @@
|
|
|
26
26
|
Changelog
|
|
27
27
|
---------
|
|
28
28
|
|
|
29
|
+
7.8.0
|
|
30
|
+
.....
|
|
31
|
+
|
|
32
|
+
.. note::
|
|
33
|
+
This release of provider is only available for Airflow 2.11+ as explained in the
|
|
34
|
+
Apache Airflow providers support policy <https://github.com/apache/airflow/blob/main/PROVIDERS.rst#minimum-supported-version-of-airflow-for-community-managed-providers>_.
|
|
35
|
+
|
|
36
|
+
Misc
|
|
37
|
+
~~~~
|
|
38
|
+
|
|
39
|
+
* ``Bump minimum Airflow version in providers to Airflow 2.11.0 (#58612)``
|
|
40
|
+
|
|
41
|
+
.. Below changes are excluded from the changelog. Move them to
|
|
42
|
+
appropriate section above if needed. Do not delete the lines(!):
|
|
43
|
+
* ``Updates to release process of providers (#58316)``
|
|
44
|
+
|
|
45
|
+
7.7.5
|
|
46
|
+
.....
|
|
47
|
+
|
|
48
|
+
Misc
|
|
49
|
+
~~~~
|
|
50
|
+
|
|
51
|
+
* ``Convert all airflow distributions to be compliant with ASF requirements (#58138)``
|
|
52
|
+
|
|
53
|
+
Doc-only
|
|
54
|
+
~~~~~~~~
|
|
55
|
+
|
|
56
|
+
* ``Fix documentation/provider.yaml consistencies (#57283)``
|
|
57
|
+
|
|
58
|
+
.. Below changes are excluded from the changelog. Move them to
|
|
59
|
+
appropriate section above if needed. Do not delete the lines(!):
|
|
60
|
+
* ``Delete all unnecessary LICENSE Files (#58191)``
|
|
61
|
+
* ``Enable ruff PLW2101,PLW2901,PLW3301 rule (#57700)``
|
|
62
|
+
* ``Enable PT006 rule to 14 files in providers (databricks,dbt,docker) (#57994)``
|
|
63
|
+
* ``Fix mypy static errors in databricks provider (#57768)``
|
|
64
|
+
* ``Enable ruff PLW1641 rule (#57679)``
|
|
65
|
+
* ``Enable ruff PLW1508 rule (#57653)``
|
|
66
|
+
* ``Fix code formatting via ruff preview (#57641)``
|
|
67
|
+
|
|
68
|
+
7.7.4
|
|
69
|
+
.....
|
|
70
|
+
|
|
71
|
+
Bug Fixes
|
|
72
|
+
~~~~~~~~~
|
|
73
|
+
|
|
74
|
+
* ``Fix Databricks provider import error without fab provider (#56702)``
|
|
75
|
+
|
|
76
|
+
Misc
|
|
77
|
+
~~~~
|
|
78
|
+
|
|
79
|
+
* ``Migrate databricks provider to ''common.compat'' (#56993)``
|
|
80
|
+
|
|
81
|
+
Doc-only
|
|
82
|
+
~~~~~~~~
|
|
83
|
+
|
|
84
|
+
* ``Remove placeholder Release Date in changelog and index files (#56056)``
|
|
85
|
+
|
|
86
|
+
.. Below changes are excluded from the changelog. Move them to
|
|
87
|
+
appropriate section above if needed. Do not delete the lines(!):
|
|
88
|
+
* ``Remove 'pytest.importorskip("flask_appbuilder")' from tests (#56679)``
|
|
89
|
+
* ``Enable PT011 rule to prvoider tests (#56320)``
|
|
90
|
+
|
|
91
|
+
7.7.3
|
|
92
|
+
.....
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
Bug Fixes
|
|
96
|
+
~~~~~~~~~
|
|
97
|
+
|
|
98
|
+
* ``Fix metadata service check handle 429 (#55462)``
|
|
99
|
+
|
|
100
|
+
Misc
|
|
101
|
+
~~~~
|
|
102
|
+
|
|
103
|
+
* ``Switch all airflow logging to structlog (#52651)``
|
|
104
|
+
|
|
105
|
+
.. Below changes are excluded from the changelog. Move them to
|
|
106
|
+
appropriate section above if needed. Do not delete the lines(!):
|
|
107
|
+
|
|
108
|
+
7.7.2
|
|
109
|
+
.....
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
Bug Fixes
|
|
113
|
+
~~~~~~~~~
|
|
114
|
+
|
|
115
|
+
* ``Fix Databricks sqlalchemy URL construction (#54478)``
|
|
116
|
+
* ``Ensures DatabricksWorkflowOperator updates ACL (if available) when resetting a job. (#47827)``
|
|
117
|
+
|
|
118
|
+
Misc
|
|
119
|
+
~~~~
|
|
120
|
+
|
|
121
|
+
* ``Remove airflow.models.DAG (#54383)``
|
|
122
|
+
* ``chore: change openlineage to optional dependency (#54748)``
|
|
123
|
+
* ``Move DagBag to SDK and make it return SDK DAG objects (#53918)``
|
|
124
|
+
|
|
125
|
+
Doc-only
|
|
126
|
+
~~~~~~~~
|
|
127
|
+
|
|
128
|
+
* ``Make term Dag consistent in providers docs (#55101)``
|
|
129
|
+
|
|
130
|
+
.. Below changes are excluded from the changelog. Move them to
|
|
131
|
+
appropriate section above if needed. Do not delete the lines(!):
|
|
132
|
+
* ``Switch pre-commit to prek (#54258)``
|
|
133
|
+
* ``Add CI support for SQLAlchemy 2.0 (#52233)``
|
|
134
|
+
* ``Fix Airflow 2 reference in README/index of providers (#55240)``
|
|
135
|
+
|
|
29
136
|
7.7.1
|
|
30
137
|
.....
|
|
31
138
|
|
|
@@ -309,8 +416,6 @@ Misc
|
|
|
309
416
|
.. Below changes are excluded from the changelog. Move them to
|
|
310
417
|
appropriate section above if needed. Do not delete the lines(!):
|
|
311
418
|
* ``Use Python 3.9 as target version for Ruff & Black rules (#44298)``
|
|
312
|
-
|
|
313
|
-
.. Review and move the new changes to one of the sections above:
|
|
314
419
|
* ``Update path of example dags in docs (#45069)``
|
|
315
420
|
* ``Revert "Added job_clusters as a templated parameter to CreateDatabricksWorkfl…" (#45035)``
|
|
316
421
|
|
|
@@ -611,11 +716,10 @@ Misc
|
|
|
611
716
|
* ``fix typos in DatabricksSubmitRunOperator (#36248)``
|
|
612
717
|
* ``Add code snippet formatting in docstrings via Ruff (#36262)``
|
|
613
718
|
|
|
614
|
-
..
|
|
719
|
+
.. Below changes are excluded from the changelog. Move them to
|
|
720
|
+
appropriate section above if needed. Do not delete the lines(!):
|
|
615
721
|
* ``Prepare docs 1st wave of Providers December 2023 (#36112)``
|
|
616
722
|
* ``Prepare docs 1st wave of Providers December 2023 RC2 (#36190)``
|
|
617
|
-
|
|
618
|
-
.. Review and move the new changes to one of the sections above:
|
|
619
723
|
* ``Re-apply updated version numbers to 2nd wave of providers in December (#36380)``
|
|
620
724
|
* ``Prepare 2nd wave of providers in December (#36373)``
|
|
621
725
|
|
|
@@ -57,7 +57,7 @@
|
|
|
57
57
|
:maxdepth: 1
|
|
58
58
|
:caption: Resources
|
|
59
59
|
|
|
60
|
-
Example
|
|
60
|
+
Example Dags <https://github.com/apache/airflow/tree/providers-databricks/|version|/providers/databricks/tests/system/databricks>
|
|
61
61
|
PyPI Repository <https://pypi.org/project/apache-airflow-providers-databricks/>
|
|
62
62
|
Installing from sources <installing-providers-from-sources>
|
|
63
63
|
|
|
@@ -78,9 +78,7 @@ apache-airflow-providers-databricks package
|
|
|
78
78
|
`Databricks <https://databricks.com/>`__
|
|
79
79
|
|
|
80
80
|
|
|
81
|
-
Release: 7.
|
|
82
|
-
|
|
83
|
-
Release Date: ``|PypiReleaseDate|``
|
|
81
|
+
Release: 7.8.0
|
|
84
82
|
|
|
85
83
|
Provider package
|
|
86
84
|
----------------
|
|
@@ -91,22 +89,21 @@ All classes for this package are included in the ``airflow.providers.databricks`
|
|
|
91
89
|
Installation
|
|
92
90
|
------------
|
|
93
91
|
|
|
94
|
-
You can install this package on top of an existing Airflow
|
|
92
|
+
You can install this package on top of an existing Airflow installation via
|
|
95
93
|
``pip install apache-airflow-providers-databricks``.
|
|
96
94
|
For the minimum Airflow version supported, see ``Requirements`` below.
|
|
97
95
|
|
|
98
96
|
Requirements
|
|
99
97
|
------------
|
|
100
98
|
|
|
101
|
-
The minimum Apache Airflow version supported by this provider distribution is ``2.
|
|
99
|
+
The minimum Apache Airflow version supported by this provider distribution is ``2.11.0``.
|
|
102
100
|
|
|
103
101
|
========================================== ======================================
|
|
104
102
|
PIP package Version required
|
|
105
103
|
========================================== ======================================
|
|
106
|
-
``apache-airflow`` ``>=2.
|
|
107
|
-
``apache-airflow-providers-common-compat`` ``>=1.
|
|
104
|
+
``apache-airflow`` ``>=2.11.0``
|
|
105
|
+
``apache-airflow-providers-common-compat`` ``>=1.8.0``
|
|
108
106
|
``apache-airflow-providers-common-sql`` ``>=1.27.0``
|
|
109
|
-
``apache-airflow-providers-openlineage`` ``>=2.3.0``
|
|
110
107
|
``requests`` ``>=2.32.0,<3``
|
|
111
108
|
``databricks-sql-connector`` ``>=4.0.0``
|
|
112
109
|
``databricks-sqlalchemy`` ``>=1.0.2``
|
|
@@ -136,7 +133,6 @@ Dependent package
|
|
|
136
133
|
================================================================================================================== =================
|
|
137
134
|
`apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
|
|
138
135
|
`apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
|
|
139
|
-
`apache-airflow-providers-fab <https://airflow.apache.org/docs/apache-airflow-providers-fab>`_ ``fab``
|
|
140
136
|
`apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
|
|
141
137
|
================================================================================================================== =================
|
|
142
138
|
|
|
@@ -146,5 +142,5 @@ Downloading official packages
|
|
|
146
142
|
You can download officially released packages and verify their checksums and signatures from the
|
|
147
143
|
`Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
|
|
148
144
|
|
|
149
|
-
* `The apache-airflow-providers-databricks 7.
|
|
150
|
-
* `The apache-airflow-providers-databricks 7.
|
|
145
|
+
* `The apache-airflow-providers-databricks 7.8.0 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.0.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.0.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.0.tar.gz.sha512>`__)
|
|
146
|
+
* `The apache-airflow-providers-databricks 7.8.0 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.0-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.0-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_databricks-7.8.0-py3-none-any.whl.sha512>`__)
|
|
@@ -22,7 +22,7 @@ DatabricksTaskOperator
|
|
|
22
22
|
======================
|
|
23
23
|
|
|
24
24
|
Use the :class:`~airflow.providers.databricks.operators.databricks.DatabricksTaskOperator` to launch and monitor
|
|
25
|
-
task runs on Databricks as Airflow tasks. This can be used as a standalone operator in a
|
|
25
|
+
task runs on Databricks as Airflow tasks. This can be used as a standalone operator in a Dag and as well as part of a
|
|
26
26
|
Databricks Workflow by using it as an operator(task) within the
|
|
27
27
|
:class:`~airflow.providers.databricks.operators.databricks_workflow.DatabricksWorkflowTaskGroup`.
|
|
28
28
|
|
|
@@ -28,7 +28,7 @@ Databricks notebook job runs as Airflow tasks. The task group launches a `Databr
|
|
|
28
28
|
There are a few advantages to defining your Databricks Workflows in Airflow:
|
|
29
29
|
|
|
30
30
|
======================================= ============================================= =================================
|
|
31
|
-
Authoring interface via Databricks (Web-based with Databricks UI) via Airflow(Code with Airflow
|
|
31
|
+
Authoring interface via Databricks (Web-based with Databricks UI) via Airflow(Code with Airflow Dag)
|
|
32
32
|
======================================= ============================================= =================================
|
|
33
33
|
Workflow compute pricing ✅ ✅
|
|
34
34
|
Notebook code in source control ✅ ✅
|
|
@@ -36,14 +36,14 @@ Workflow structure in source control ✅
|
|
|
36
36
|
Retry from beginning ✅ ✅
|
|
37
37
|
Retry single task ✅ ✅
|
|
38
38
|
Task groups within Workflows ✅
|
|
39
|
-
Trigger workflows from other
|
|
39
|
+
Trigger workflows from other Dags ✅
|
|
40
40
|
Workflow-level parameters ✅
|
|
41
41
|
======================================= ============================================= =================================
|
|
42
42
|
|
|
43
43
|
Examples
|
|
44
44
|
--------
|
|
45
45
|
|
|
46
|
-
Example of what a
|
|
46
|
+
Example of what a Dag looks like with a DatabricksWorkflowTaskGroup
|
|
47
47
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
48
48
|
.. exampleinclude:: /../../databricks/tests/system/databricks/example_databricks_workflow.py
|
|
49
49
|
:language: python
|
|
@@ -53,13 +53,13 @@ Example of what a DAG looks like with a DatabricksWorkflowTaskGroup
|
|
|
53
53
|
With this example, Airflow will produce a job named ``<dag_name>.test_workflow_<USER>_<GROUP_ID>`` that will
|
|
54
54
|
run task ``notebook_1`` and then ``notebook_2``. The job will be created in the databricks workspace
|
|
55
55
|
if it does not already exist. If the job already exists, it will be updated to match
|
|
56
|
-
the workflow defined in the
|
|
56
|
+
the workflow defined in the Dag.
|
|
57
57
|
|
|
58
58
|
The following image displays the resulting Databricks Workflow in the Airflow UI (based on the above example provided)
|
|
59
59
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
60
60
|
.. image:: ../img/databricks_workflow_task_group_airflow_graph_view.png
|
|
61
61
|
|
|
62
|
-
The corresponding Databricks Workflow in the Databricks UI for the run triggered from the Airflow
|
|
62
|
+
The corresponding Databricks Workflow in the Databricks UI for the run triggered from the Airflow Dag is depicted below
|
|
63
63
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
64
64
|
|
|
65
65
|
.. image:: ../img/workflow_run_databricks_graph_view.png
|
|
@@ -22,12 +22,17 @@ description: |
|
|
|
22
22
|
`Databricks <https://databricks.com/>`__
|
|
23
23
|
|
|
24
24
|
state: ready
|
|
25
|
-
source-date-epoch:
|
|
25
|
+
source-date-epoch: 1764109970
|
|
26
26
|
# Note that those versions are maintained by release manager - do not update them manually
|
|
27
27
|
# with the exception of case where other provider in sources has >= new provider version.
|
|
28
28
|
# In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
|
|
29
29
|
# to be done in the same PR
|
|
30
30
|
versions:
|
|
31
|
+
- 7.8.0
|
|
32
|
+
- 7.7.5
|
|
33
|
+
- 7.7.4
|
|
34
|
+
- 7.7.3
|
|
35
|
+
- 7.7.2
|
|
31
36
|
- 7.7.1
|
|
32
37
|
- 7.7.0
|
|
33
38
|
- 7.6.0
|
|
@@ -101,6 +106,7 @@ integrations:
|
|
|
101
106
|
external-doc-url: https://databricks.com/product/databricks-sql
|
|
102
107
|
how-to-guide:
|
|
103
108
|
- /docs/apache-airflow-providers-databricks/operators/sql.rst
|
|
109
|
+
- /docs/apache-airflow-providers-databricks/operators/sql_statements.rst
|
|
104
110
|
- /docs/apache-airflow-providers-databricks/operators/copy_into.rst
|
|
105
111
|
tags: [service]
|
|
106
112
|
- integration-name: Databricks Repos
|
|
@@ -25,9 +25,11 @@ build-backend = "flit_core.buildapi"
|
|
|
25
25
|
|
|
26
26
|
[project]
|
|
27
27
|
name = "apache-airflow-providers-databricks"
|
|
28
|
-
version = "7.
|
|
28
|
+
version = "7.8.0rc1"
|
|
29
29
|
description = "Provider package apache-airflow-providers-databricks for Apache Airflow"
|
|
30
30
|
readme = "README.rst"
|
|
31
|
+
license = "Apache-2.0"
|
|
32
|
+
license-files = ['LICENSE', 'NOTICE']
|
|
31
33
|
authors = [
|
|
32
34
|
{name="Apache Software Foundation", email="dev@airflow.apache.org"},
|
|
33
35
|
]
|
|
@@ -43,7 +45,6 @@ classifiers = [
|
|
|
43
45
|
"Intended Audience :: System Administrators",
|
|
44
46
|
"Framework :: Apache Airflow",
|
|
45
47
|
"Framework :: Apache Airflow :: Provider",
|
|
46
|
-
"License :: OSI Approved :: Apache Software License",
|
|
47
48
|
"Programming Language :: Python :: 3.10",
|
|
48
49
|
"Programming Language :: Python :: 3.11",
|
|
49
50
|
"Programming Language :: Python :: 3.12",
|
|
@@ -54,13 +55,12 @@ requires-python = ">=3.10"
|
|
|
54
55
|
|
|
55
56
|
# The dependencies should be modified in place in the generated file.
|
|
56
57
|
# Any change in the dependencies is preserved when the file is regenerated
|
|
57
|
-
# Make sure to run ``
|
|
58
|
+
# Make sure to run ``prek update-providers-dependencies --all-files``
|
|
58
59
|
# After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
|
|
59
60
|
dependencies = [
|
|
60
|
-
"apache-airflow>=2.
|
|
61
|
-
"apache-airflow-providers-common-compat>=1.
|
|
62
|
-
"apache-airflow-providers-common-sql>=1.27.
|
|
63
|
-
"apache-airflow-providers-openlineage>=2.3.0",
|
|
61
|
+
"apache-airflow>=2.11.0rc1",
|
|
62
|
+
"apache-airflow-providers-common-compat>=1.8.0rc1",
|
|
63
|
+
"apache-airflow-providers-common-sql>=1.27.0rc1",
|
|
64
64
|
"requests>=2.32.0,<3",
|
|
65
65
|
"databricks-sql-connector>=4.0.0",
|
|
66
66
|
"databricks-sqlalchemy>=1.0.2",
|
|
@@ -83,11 +83,14 @@ dependencies = [
|
|
|
83
83
|
"azure-identity>=1.3.1",
|
|
84
84
|
]
|
|
85
85
|
"fab" = [
|
|
86
|
-
"apache-airflow-providers-fab>=2.2.
|
|
86
|
+
"apache-airflow-providers-fab>=2.2.0rc1; python_version < '3.13'"
|
|
87
87
|
]
|
|
88
88
|
"standard" = [
|
|
89
89
|
"apache-airflow-providers-standard"
|
|
90
90
|
]
|
|
91
|
+
"openlineage" = [
|
|
92
|
+
"apache-airflow-providers-openlineage>=2.3.0rc1"
|
|
93
|
+
]
|
|
91
94
|
|
|
92
95
|
[dependency-groups]
|
|
93
96
|
dev = [
|
|
@@ -96,13 +99,13 @@ dev = [
|
|
|
96
99
|
"apache-airflow-devel-common",
|
|
97
100
|
"apache-airflow-providers-common-compat",
|
|
98
101
|
"apache-airflow-providers-common-sql",
|
|
99
|
-
"apache-airflow-providers-fab",
|
|
100
102
|
"apache-airflow-providers-openlineage",
|
|
101
103
|
# Additional devel dependencies (do not remove this line and add extra development dependencies)
|
|
102
104
|
"deltalake>=1.1.3",
|
|
103
105
|
"apache-airflow-providers-fab>=2.2.0; python_version < '3.13'",
|
|
104
106
|
"apache-airflow-providers-microsoft-azure",
|
|
105
107
|
"apache-airflow-providers-common-sql[pandas,polars]",
|
|
108
|
+
"apache-airflow-providers-fab",
|
|
106
109
|
]
|
|
107
110
|
|
|
108
111
|
# To build docs:
|
|
@@ -131,8 +134,8 @@ apache-airflow-providers-common-sql = {workspace = true}
|
|
|
131
134
|
apache-airflow-providers-standard = {workspace = true}
|
|
132
135
|
|
|
133
136
|
[project.urls]
|
|
134
|
-
"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.
|
|
135
|
-
"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.
|
|
137
|
+
"Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.8.0"
|
|
138
|
+
"Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.8.0/changelog.html"
|
|
136
139
|
"Bug Tracker" = "https://github.com/apache/airflow/issues"
|
|
137
140
|
"Source Code" = "https://github.com/apache/airflow"
|
|
138
141
|
"Slack Chat" = "https://s.apache.org/airflow-slack"
|
|
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "7.
|
|
32
|
+
__version__ = "7.8.0"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
|
-
"2.
|
|
35
|
+
"2.11.0"
|
|
36
36
|
):
|
|
37
37
|
raise RuntimeError(
|
|
38
|
-
f"The package `apache-airflow-providers-databricks:{__version__}` needs Apache Airflow 2.
|
|
38
|
+
f"The package `apache-airflow-providers-databricks:{__version__}` needs Apache Airflow 2.11.0+"
|
|
39
39
|
)
|
|
@@ -45,6 +45,7 @@ def get_provider_info():
|
|
|
45
45
|
"external-doc-url": "https://databricks.com/product/databricks-sql",
|
|
46
46
|
"how-to-guide": [
|
|
47
47
|
"/docs/apache-airflow-providers-databricks/operators/sql.rst",
|
|
48
|
+
"/docs/apache-airflow-providers-databricks/operators/sql_statements.rst",
|
|
48
49
|
"/docs/apache-airflow-providers-databricks/operators/copy_into.rst",
|
|
49
50
|
],
|
|
50
51
|
"tags": ["service"],
|
|
@@ -134,6 +134,9 @@ class RunState:
|
|
|
134
134
|
and self.state_message == other.state_message
|
|
135
135
|
)
|
|
136
136
|
|
|
137
|
+
def __hash__(self):
|
|
138
|
+
return hash((self.life_cycle_state, self.result_state, self.state_message))
|
|
139
|
+
|
|
137
140
|
def __repr__(self) -> str:
|
|
138
141
|
return str(self.__dict__)
|
|
139
142
|
|
|
@@ -183,6 +186,9 @@ class ClusterState:
|
|
|
183
186
|
def __eq__(self, other) -> bool:
|
|
184
187
|
return self.state == other.state and self.state_message == other.state_message
|
|
185
188
|
|
|
189
|
+
def __hash__(self):
|
|
190
|
+
return hash((self.state, self.state_message))
|
|
191
|
+
|
|
186
192
|
def __repr__(self) -> str:
|
|
187
193
|
return str(self.__dict__)
|
|
188
194
|
|
|
@@ -244,6 +250,9 @@ class SQLStatementState:
|
|
|
244
250
|
and self.error_message == other.error_message
|
|
245
251
|
)
|
|
246
252
|
|
|
253
|
+
def __hash__(self):
|
|
254
|
+
return hash((self.state, self.error_code, self.error_message))
|
|
255
|
+
|
|
247
256
|
def __repr__(self) -> str:
|
|
248
257
|
return str(self.__dict__)
|
|
249
258
|
|
|
@@ -298,6 +307,16 @@ class DatabricksHook(BaseDatabricksHook):
|
|
|
298
307
|
|
|
299
308
|
:param json: The data used in the new_settings of the request to the ``reset`` endpoint.
|
|
300
309
|
"""
|
|
310
|
+
access_control_list = json.get("access_control_list", None)
|
|
311
|
+
if access_control_list:
|
|
312
|
+
self.log.info(
|
|
313
|
+
"Updating job permission for Databricks workflow job id %s with access_control_list %s",
|
|
314
|
+
job_id,
|
|
315
|
+
access_control_list,
|
|
316
|
+
)
|
|
317
|
+
acl_json = {"access_control_list": access_control_list}
|
|
318
|
+
self.update_job_permission(job_id=int(job_id), json=acl_json)
|
|
319
|
+
|
|
301
320
|
self._do_api_call(RESET_ENDPOINT, {"job_id": job_id, "new_settings": json})
|
|
302
321
|
|
|
303
322
|
def update_job(self, job_id: str, json: dict) -> None:
|