apache-airflow-providers-databricks 6.11.0rc1__py3-none-any.whl → 6.12.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of apache-airflow-providers-databricks might be problematic. Click here for more details.
- airflow/providers/databricks/__init__.py +1 -1
- airflow/providers/databricks/get_provider_info.py +2 -1
- airflow/providers/databricks/hooks/databricks_base.py +13 -3
- {apache_airflow_providers_databricks-6.11.0rc1.dist-info → apache_airflow_providers_databricks-6.12.0.dist-info}/METADATA +8 -8
- {apache_airflow_providers_databricks-6.11.0rc1.dist-info → apache_airflow_providers_databricks-6.12.0.dist-info}/RECORD +7 -7
- {apache_airflow_providers_databricks-6.11.0rc1.dist-info → apache_airflow_providers_databricks-6.12.0.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_databricks-6.11.0rc1.dist-info → apache_airflow_providers_databricks-6.12.0.dist-info}/entry_points.txt +0 -0
|
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "6.
|
|
32
|
+
__version__ = "6.12.0"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
35
|
"2.8.0"
|
|
@@ -28,8 +28,9 @@ def get_provider_info():
|
|
|
28
28
|
"name": "Databricks",
|
|
29
29
|
"description": "`Databricks <https://databricks.com/>`__\n",
|
|
30
30
|
"state": "ready",
|
|
31
|
-
"source-date-epoch":
|
|
31
|
+
"source-date-epoch": 1730012521,
|
|
32
32
|
"versions": [
|
|
33
|
+
"6.12.0",
|
|
33
34
|
"6.11.0",
|
|
34
35
|
"6.10.0",
|
|
35
36
|
"6.9.0",
|
|
@@ -28,12 +28,14 @@ from __future__ import annotations
|
|
|
28
28
|
import copy
|
|
29
29
|
import platform
|
|
30
30
|
import time
|
|
31
|
+
from asyncio.exceptions import TimeoutError
|
|
31
32
|
from functools import cached_property
|
|
32
33
|
from typing import TYPE_CHECKING, Any
|
|
33
34
|
from urllib.parse import urlsplit
|
|
34
35
|
|
|
35
36
|
import aiohttp
|
|
36
37
|
import requests
|
|
38
|
+
from aiohttp.client_exceptions import ClientConnectorError
|
|
37
39
|
from requests import PreparedRequest, exceptions as requests_exceptions
|
|
38
40
|
from requests.auth import AuthBase, HTTPBasicAuth
|
|
39
41
|
from requests.exceptions import JSONDecodeError
|
|
@@ -515,6 +517,11 @@ class BaseDatabricksHook(BaseHook):
|
|
|
515
517
|
def _log_request_error(self, attempt_num: int, error: str) -> None:
|
|
516
518
|
self.log.error("Attempt %s API Request to Databricks failed with reason: %s", attempt_num, error)
|
|
517
519
|
|
|
520
|
+
def _endpoint_url(self, endpoint):
|
|
521
|
+
port = f":{self.databricks_conn.port}" if self.databricks_conn.port else ""
|
|
522
|
+
schema = self.databricks_conn.schema or "https"
|
|
523
|
+
return f"{schema}://{self.host}{port}/{endpoint}"
|
|
524
|
+
|
|
518
525
|
def _do_api_call(
|
|
519
526
|
self,
|
|
520
527
|
endpoint_info: tuple[str, str],
|
|
@@ -533,7 +540,7 @@ class BaseDatabricksHook(BaseHook):
|
|
|
533
540
|
method, endpoint = endpoint_info
|
|
534
541
|
|
|
535
542
|
# TODO: get rid of explicit 'api/' in the endpoint specification
|
|
536
|
-
url =
|
|
543
|
+
url = self._endpoint_url(endpoint)
|
|
537
544
|
|
|
538
545
|
aad_headers = self._get_aad_headers()
|
|
539
546
|
headers = {**self.user_agent_header, **aad_headers}
|
|
@@ -599,7 +606,7 @@ class BaseDatabricksHook(BaseHook):
|
|
|
599
606
|
"""
|
|
600
607
|
method, endpoint = endpoint_info
|
|
601
608
|
|
|
602
|
-
url =
|
|
609
|
+
url = self._endpoint_url(endpoint)
|
|
603
610
|
|
|
604
611
|
aad_headers = await self._a_get_aad_headers()
|
|
605
612
|
headers = {**self.user_agent_header, **aad_headers}
|
|
@@ -638,7 +645,7 @@ class BaseDatabricksHook(BaseHook):
|
|
|
638
645
|
headers={**headers, **self.user_agent_header},
|
|
639
646
|
timeout=self.timeout_seconds,
|
|
640
647
|
) as response:
|
|
641
|
-
self.log.debug("Response Status Code: %s", response.
|
|
648
|
+
self.log.debug("Response Status Code: %s", response.status)
|
|
642
649
|
self.log.debug("Response text: %s", response.text)
|
|
643
650
|
response.raise_for_status()
|
|
644
651
|
return await response.json()
|
|
@@ -678,6 +685,9 @@ class BaseDatabricksHook(BaseHook):
|
|
|
678
685
|
if exception.status >= 500 or exception.status == 429:
|
|
679
686
|
return True
|
|
680
687
|
|
|
688
|
+
if isinstance(exception, (ClientConnectorError, TimeoutError)):
|
|
689
|
+
return True
|
|
690
|
+
|
|
681
691
|
return False
|
|
682
692
|
|
|
683
693
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: apache-airflow-providers-databricks
|
|
3
|
-
Version: 6.
|
|
3
|
+
Version: 6.12.0
|
|
4
4
|
Summary: Provider package apache-airflow-providers-databricks for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,databricks,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -21,8 +21,8 @@ Classifier: Programming Language :: Python :: 3.11
|
|
|
21
21
|
Classifier: Programming Language :: Python :: 3.12
|
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
|
23
23
|
Requires-Dist: aiohttp>=3.9.2, <4
|
|
24
|
-
Requires-Dist: apache-airflow-providers-common-sql>=1.10.
|
|
25
|
-
Requires-Dist: apache-airflow>=2.8.
|
|
24
|
+
Requires-Dist: apache-airflow-providers-common-sql>=1.10.0
|
|
25
|
+
Requires-Dist: apache-airflow>=2.8.0
|
|
26
26
|
Requires-Dist: databricks-sql-connector>=2.0.0, <3.0.0, !=2.9.0
|
|
27
27
|
Requires-Dist: mergedeep>=1.3.4
|
|
28
28
|
Requires-Dist: pandas>=1.5.3,<2.2;python_version<"3.9"
|
|
@@ -33,8 +33,8 @@ Requires-Dist: azure-identity>=1.3.1 ; extra == "azure-identity"
|
|
|
33
33
|
Requires-Dist: apache-airflow-providers-common-sql ; extra == "common.sql"
|
|
34
34
|
Requires-Dist: databricks-sdk==0.10.0 ; extra == "sdk"
|
|
35
35
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
36
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/6.
|
|
37
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/6.
|
|
36
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/6.12.0/changelog.html
|
|
37
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/6.12.0
|
|
38
38
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
39
39
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
40
40
|
Project-URL: Twitter, https://twitter.com/ApacheAirflow
|
|
@@ -87,7 +87,7 @@ Provides-Extra: sdk
|
|
|
87
87
|
|
|
88
88
|
Package ``apache-airflow-providers-databricks``
|
|
89
89
|
|
|
90
|
-
Release: ``6.
|
|
90
|
+
Release: ``6.12.0``
|
|
91
91
|
|
|
92
92
|
|
|
93
93
|
`Databricks <https://databricks.com/>`__
|
|
@@ -100,7 +100,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
|
|
|
100
100
|
are in ``airflow.providers.databricks`` python package.
|
|
101
101
|
|
|
102
102
|
You can find package information and changelog for the provider
|
|
103
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/6.
|
|
103
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/6.12.0/>`_.
|
|
104
104
|
|
|
105
105
|
Installation
|
|
106
106
|
------------
|
|
@@ -148,4 +148,4 @@ Dependent package
|
|
|
148
148
|
============================================================================================================ ==============
|
|
149
149
|
|
|
150
150
|
The changelog for the provider package can be found in the
|
|
151
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/6.
|
|
151
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/6.12.0/changelog.html>`_.
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
airflow/providers/databricks/LICENSE,sha256=FFb4jd2AXnOOf7XLP04pQW6jbdhG49TxlGY6fFpCV1Y,13609
|
|
2
|
-
airflow/providers/databricks/__init__.py,sha256=
|
|
3
|
-
airflow/providers/databricks/get_provider_info.py,sha256=
|
|
2
|
+
airflow/providers/databricks/__init__.py,sha256=EsWUx_C4o41BREetpNY3zmYpNevlPE0GU-KRlil7OZ0,1498
|
|
3
|
+
airflow/providers/databricks/get_provider_info.py,sha256=tzdX7ldSJIxrtIweECNff54NTDyjkuNohqkZGKI4hYk,7762
|
|
4
4
|
airflow/providers/databricks/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
5
5
|
airflow/providers/databricks/hooks/databricks.py,sha256=-rgK_sMc2_BjTvSvMh1Md3XanVayOmcxijQfs1vRCPw,24774
|
|
6
|
-
airflow/providers/databricks/hooks/databricks_base.py,sha256=
|
|
6
|
+
airflow/providers/databricks/hooks/databricks_base.py,sha256=WPaasY9rE5cjCiPogVnFg8KBmS4eC2wEp6zuGPNnBGI,30072
|
|
7
7
|
airflow/providers/databricks/hooks/databricks_sql.py,sha256=sRT5gryPrFhCdjcXLzQ73OxtWUSLINwwDeBJMwRZERY,12569
|
|
8
8
|
airflow/providers/databricks/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
9
9
|
airflow/providers/databricks/operators/databricks.py,sha256=hx0KXJBEWgOaNZOcObi1jVUQ04B7jinDS0EDeITpvsk,69190
|
|
@@ -19,7 +19,7 @@ airflow/providers/databricks/triggers/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvR
|
|
|
19
19
|
airflow/providers/databricks/triggers/databricks.py,sha256=xk9aEfdZnG33a4WSFfg6SZF4FfROV8B4HOyBYBvZR_Q,5104
|
|
20
20
|
airflow/providers/databricks/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
21
21
|
airflow/providers/databricks/utils/databricks.py,sha256=EICTPZTD0R0dy9UGKgv8srkrBTgzCQrcYNL9oBWuhzk,2890
|
|
22
|
-
apache_airflow_providers_databricks-6.
|
|
23
|
-
apache_airflow_providers_databricks-6.
|
|
24
|
-
apache_airflow_providers_databricks-6.
|
|
25
|
-
apache_airflow_providers_databricks-6.
|
|
22
|
+
apache_airflow_providers_databricks-6.12.0.dist-info/entry_points.txt,sha256=hjmZm3ab2cteTR4t9eE28oKixHwNIKtLCThd6sx3XRQ,227
|
|
23
|
+
apache_airflow_providers_databricks-6.12.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
|
|
24
|
+
apache_airflow_providers_databricks-6.12.0.dist-info/METADATA,sha256=1eQr1BRna5DiPBXYV4JCSeXhyCiSzX1TYQDdLKR912k,7026
|
|
25
|
+
apache_airflow_providers_databricks-6.12.0.dist-info/RECORD,,
|
|
File without changes
|