apache-airflow-providers-databricks 5.0.1rc1__py3-none-any.whl → 5.1.0rc2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/databricks/__init__.py +3 -3
- airflow/providers/databricks/get_provider_info.py +3 -2
- airflow/providers/databricks/hooks/databricks_sql.py +5 -2
- {apache_airflow_providers_databricks-5.0.1rc1.dist-info → apache_airflow_providers_databricks-5.1.0rc2.dist-info}/METADATA +8 -8
- {apache_airflow_providers_databricks-5.0.1rc1.dist-info → apache_airflow_providers_databricks-5.1.0rc2.dist-info}/RECORD +7 -7
- {apache_airflow_providers_databricks-5.0.1rc1.dist-info → apache_airflow_providers_databricks-5.1.0rc2.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_databricks-5.0.1rc1.dist-info → apache_airflow_providers_databricks-5.1.0rc2.dist-info}/entry_points.txt +0 -0
|
@@ -27,7 +27,7 @@ import packaging.version
|
|
|
27
27
|
|
|
28
28
|
__all__ = ["__version__"]
|
|
29
29
|
|
|
30
|
-
__version__ = "5.0
|
|
30
|
+
__version__ = "5.1.0"
|
|
31
31
|
|
|
32
32
|
try:
|
|
33
33
|
from airflow import __version__ as airflow_version
|
|
@@ -35,8 +35,8 @@ except ImportError:
|
|
|
35
35
|
from airflow.version import version as airflow_version
|
|
36
36
|
|
|
37
37
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
38
|
-
"2.
|
|
38
|
+
"2.6.0"
|
|
39
39
|
):
|
|
40
40
|
raise RuntimeError(
|
|
41
|
-
f"The package `apache-airflow-providers-databricks:{__version__}` needs Apache Airflow 2.
|
|
41
|
+
f"The package `apache-airflow-providers-databricks:{__version__}` needs Apache Airflow 2.6.0+"
|
|
42
42
|
)
|
|
@@ -28,8 +28,9 @@ def get_provider_info():
|
|
|
28
28
|
"name": "Databricks",
|
|
29
29
|
"description": "`Databricks <https://databricks.com/>`__\n",
|
|
30
30
|
"suspended": False,
|
|
31
|
-
"source-date-epoch":
|
|
31
|
+
"source-date-epoch": 1701983371,
|
|
32
32
|
"versions": [
|
|
33
|
+
"5.1.0",
|
|
33
34
|
"5.0.1",
|
|
34
35
|
"5.0.0",
|
|
35
36
|
"4.7.0",
|
|
@@ -63,7 +64,7 @@ def get_provider_info():
|
|
|
63
64
|
"1.0.0",
|
|
64
65
|
],
|
|
65
66
|
"dependencies": [
|
|
66
|
-
"apache-airflow>=2.
|
|
67
|
+
"apache-airflow>=2.6.0",
|
|
67
68
|
"apache-airflow-providers-common-sql>=1.8.1",
|
|
68
69
|
"requests>=2.27,<3",
|
|
69
70
|
"databricks-sql-connector>=2.0.0, <3.0.0, !=2.9.0",
|
|
@@ -21,6 +21,7 @@ from copy import copy
|
|
|
21
21
|
from typing import TYPE_CHECKING, Any, Callable, Iterable, Mapping, TypeVar, overload
|
|
22
22
|
|
|
23
23
|
from databricks import sql # type: ignore[attr-defined]
|
|
24
|
+
from databricks.sql.types import Row
|
|
24
25
|
|
|
25
26
|
from airflow.exceptions import AirflowException
|
|
26
27
|
from airflow.providers.common.sql.hooks.sql import DbApiHook, return_single_query_results
|
|
@@ -242,9 +243,11 @@ class DatabricksSqlHook(BaseDatabricksHook, DbApiHook):
|
|
|
242
243
|
|
|
243
244
|
@staticmethod
|
|
244
245
|
def _make_serializable(result):
|
|
245
|
-
"""Transform the databricks Row objects into
|
|
246
|
-
if result
|
|
246
|
+
"""Transform the databricks Row objects into JSON-serializable lists."""
|
|
247
|
+
if isinstance(result, list):
|
|
247
248
|
return [list(row) for row in result]
|
|
249
|
+
elif isinstance(result, Row):
|
|
250
|
+
return list(result)
|
|
248
251
|
return result
|
|
249
252
|
|
|
250
253
|
def bulk_dump(self, table, tmp_file):
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: apache-airflow-providers-databricks
|
|
3
|
-
Version: 5.
|
|
3
|
+
Version: 5.1.0rc2
|
|
4
4
|
Summary: Provider package apache-airflow-providers-databricks for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,databricks,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -22,14 +22,14 @@ Classifier: Programming Language :: Python :: 3.11
|
|
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
|
23
23
|
Requires-Dist: aiohttp>=3.6.3, <4
|
|
24
24
|
Requires-Dist: apache-airflow-providers-common-sql>=1.8.1.dev0
|
|
25
|
-
Requires-Dist: apache-airflow>=2.
|
|
25
|
+
Requires-Dist: apache-airflow>=2.6.0.dev0
|
|
26
26
|
Requires-Dist: databricks-sql-connector>=2.0.0, <3.0.0, !=2.9.0
|
|
27
27
|
Requires-Dist: requests>=2.27,<3
|
|
28
28
|
Requires-Dist: apache-airflow-providers-common-sql ; extra == "common.sql"
|
|
29
29
|
Requires-Dist: databricks-sdk==0.10.0 ; extra == "sdk"
|
|
30
30
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
31
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/5.0
|
|
32
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/5.0
|
|
31
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/5.1.0/changelog.html
|
|
32
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/5.1.0
|
|
33
33
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
34
34
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
35
35
|
Project-URL: Twitter, https://twitter.com/ApacheAirflow
|
|
@@ -81,7 +81,7 @@ Provides-Extra: sdk
|
|
|
81
81
|
|
|
82
82
|
Package ``apache-airflow-providers-databricks``
|
|
83
83
|
|
|
84
|
-
Release: ``5.0.
|
|
84
|
+
Release: ``5.1.0.rc2``
|
|
85
85
|
|
|
86
86
|
|
|
87
87
|
`Databricks <https://databricks.com/>`__
|
|
@@ -94,7 +94,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
|
|
|
94
94
|
are in ``airflow.providers.databricks`` python package.
|
|
95
95
|
|
|
96
96
|
You can find package information and changelog for the provider
|
|
97
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/5.0
|
|
97
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/5.1.0/>`_.
|
|
98
98
|
|
|
99
99
|
Installation
|
|
100
100
|
------------
|
|
@@ -111,7 +111,7 @@ Requirements
|
|
|
111
111
|
======================================= ==========================
|
|
112
112
|
PIP package Version required
|
|
113
113
|
======================================= ==========================
|
|
114
|
-
``apache-airflow`` ``>=2.
|
|
114
|
+
``apache-airflow`` ``>=2.6.0``
|
|
115
115
|
``apache-airflow-providers-common-sql`` ``>=1.8.1``
|
|
116
116
|
``requests`` ``>=2.27,<3``
|
|
117
117
|
``databricks-sql-connector`` ``>=2.0.0,!=2.9.0,<3.0.0``
|
|
@@ -138,4 +138,4 @@ Dependent package
|
|
|
138
138
|
============================================================================================================ ==============
|
|
139
139
|
|
|
140
140
|
The changelog for the provider package can be found in the
|
|
141
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/5.0
|
|
141
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/5.1.0/changelog.html>`_.
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
airflow/providers/databricks/LICENSE,sha256=ywUBpKZc7Jb96rVt5I3IDbg7dIJAbUSHkuoDcF3jbH4,13569
|
|
2
|
-
airflow/providers/databricks/__init__.py,sha256=
|
|
3
|
-
airflow/providers/databricks/get_provider_info.py,sha256=
|
|
2
|
+
airflow/providers/databricks/__init__.py,sha256=JsaG1QZ78eKQBM9TcObkVPwxFuVOGUUkNq8GW0V6rYA,1585
|
|
3
|
+
airflow/providers/databricks/get_provider_info.py,sha256=Fv4b-vDzk-i2J31xahNNm4cvVzVUkz10jGF7iJSecuc,6104
|
|
4
4
|
airflow/providers/databricks/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
5
5
|
airflow/providers/databricks/hooks/databricks.py,sha256=wq7uiSU_WbBY8yrlI2Zm6l12t9YP0MgbBMeHe0zuWe4,22542
|
|
6
6
|
airflow/providers/databricks/hooks/databricks_base.py,sha256=nX-40P0XKZyFyahkbbImvWmoCQ4ic35viAYlFIupUHE,30591
|
|
7
|
-
airflow/providers/databricks/hooks/databricks_sql.py,sha256=
|
|
7
|
+
airflow/providers/databricks/hooks/databricks_sql.py,sha256=PVyN2VYnJR2kmPUJdxeFCChPR-B3R0kpUDDKh4slvOA,10474
|
|
8
8
|
airflow/providers/databricks/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
9
9
|
airflow/providers/databricks/operators/databricks.py,sha256=afG7vCIT8_JwTK7k1gXlgQDTsJj9JichlC2bP9XmHg0,40065
|
|
10
10
|
airflow/providers/databricks/operators/databricks_repos.py,sha256=NUxa0jvvmK16CDKb-7Tbs3wF9XoFi1AVJlKxlsE3r4k,13092
|
|
@@ -16,7 +16,7 @@ airflow/providers/databricks/triggers/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvR
|
|
|
16
16
|
airflow/providers/databricks/triggers/databricks.py,sha256=Qj9mB0bNYRY_toPEU17gxbxmPkkT3P789kCHu_T64BA,3997
|
|
17
17
|
airflow/providers/databricks/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
18
18
|
airflow/providers/databricks/utils/databricks.py,sha256=iRzRHvdFETGiFxZccOjfC8NGgDofMfP35Tqp3M5CGr0,2880
|
|
19
|
-
apache_airflow_providers_databricks-5.
|
|
20
|
-
apache_airflow_providers_databricks-5.
|
|
21
|
-
apache_airflow_providers_databricks-5.
|
|
22
|
-
apache_airflow_providers_databricks-5.
|
|
19
|
+
apache_airflow_providers_databricks-5.1.0rc2.dist-info/entry_points.txt,sha256=1WxGXTFDb107eV5Zmrt3p12J4LHYk56-ZKlvpOK7vg4,106
|
|
20
|
+
apache_airflow_providers_databricks-5.1.0rc2.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
|
|
21
|
+
apache_airflow_providers_databricks-5.1.0rc2.dist-info/METADATA,sha256=ehi83rkKMnM6yuXZRDHumEbmc6OYB4rSOGoGvT36M68,6442
|
|
22
|
+
apache_airflow_providers_databricks-5.1.0rc2.dist-info/RECORD,,
|
|
File without changes
|