apache-airflow-providers-databricks 4.4.0__tar.gz → 4.5.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-databricks might be problematic. Click here for more details.

Files changed (32) hide show
  1. {apache-airflow-providers-databricks-4.4.0/apache_airflow_providers_databricks.egg-info → apache-airflow-providers-databricks-4.5.0}/PKG-INFO +6 -6
  2. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/README.rst +3 -3
  3. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/airflow/providers/databricks/__init__.py +1 -1
  4. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/airflow/providers/databricks/get_provider_info.py +1 -0
  5. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/airflow/providers/databricks/hooks/databricks.py +74 -6
  6. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/airflow/providers/databricks/hooks/databricks_base.py +10 -8
  7. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/airflow/providers/databricks/hooks/databricks_sql.py +5 -3
  8. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/airflow/providers/databricks/operators/databricks.py +13 -1
  9. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/airflow/providers/databricks/operators/databricks_sql.py +2 -1
  10. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0/apache_airflow_providers_databricks.egg-info}/PKG-INFO +6 -6
  11. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/pyproject.toml +7 -5
  12. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/setup.cfg +2 -2
  13. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/setup.py +1 -1
  14. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/LICENSE +0 -0
  15. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/MANIFEST.in +0 -0
  16. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/NOTICE +0 -0
  17. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/airflow/providers/databricks/hooks/__init__.py +0 -0
  18. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/airflow/providers/databricks/operators/__init__.py +0 -0
  19. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/airflow/providers/databricks/operators/databricks_repos.py +0 -0
  20. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/airflow/providers/databricks/sensors/__init__.py +0 -0
  21. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/airflow/providers/databricks/sensors/databricks_partition.py +0 -0
  22. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/airflow/providers/databricks/sensors/databricks_sql.py +0 -0
  23. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/airflow/providers/databricks/triggers/__init__.py +0 -0
  24. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/airflow/providers/databricks/triggers/databricks.py +0 -0
  25. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/airflow/providers/databricks/utils/__init__.py +0 -0
  26. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/airflow/providers/databricks/utils/databricks.py +0 -0
  27. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/apache_airflow_providers_databricks.egg-info/SOURCES.txt +0 -0
  28. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/apache_airflow_providers_databricks.egg-info/dependency_links.txt +0 -0
  29. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/apache_airflow_providers_databricks.egg-info/entry_points.txt +0 -0
  30. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/apache_airflow_providers_databricks.egg-info/not-zip-safe +0 -0
  31. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/apache_airflow_providers_databricks.egg-info/requires.txt +0 -0
  32. {apache-airflow-providers-databricks-4.4.0 → apache-airflow-providers-databricks-4.5.0}/apache_airflow_providers_databricks.egg-info/top_level.txt +0 -0
@@ -1,14 +1,14 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-databricks
3
- Version: 4.4.0
3
+ Version: 4.5.0
4
4
  Summary: Provider for Apache Airflow. Implements apache-airflow-providers-databricks package
5
5
  Home-page: https://airflow.apache.org/
6
6
  Download-URL: https://archive.apache.org/dist/airflow/providers
7
7
  Author: Apache Software Foundation
8
8
  Author-email: dev@airflow.apache.org
9
9
  License: Apache License 2.0
10
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/4.4.0/
11
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/4.4.0/changelog.html
10
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/4.5.0/
11
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/4.5.0/changelog.html
12
12
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
13
13
  Project-URL: Source Code, https://github.com/apache/airflow
14
14
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
@@ -71,7 +71,7 @@ License-File: NOTICE
71
71
 
72
72
  Package ``apache-airflow-providers-databricks``
73
73
 
74
- Release: ``4.4.0``
74
+ Release: ``4.5.0``
75
75
 
76
76
 
77
77
  `Databricks <https://databricks.com/>`__
@@ -84,7 +84,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
84
84
  are in ``airflow.providers.databricks`` python package.
85
85
 
86
86
  You can find package information and changelog for the provider
87
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/4.4.0/>`_.
87
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/4.5.0/>`_.
88
88
 
89
89
 
90
90
  Installation
@@ -129,4 +129,4 @@ Dependent package
129
129
  ============================================================================================================ ==============
130
130
 
131
131
  The changelog for the provider package can be found in the
132
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/4.4.0/changelog.html>`_.
132
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/4.5.0/changelog.html>`_.
@@ -36,7 +36,7 @@
36
36
 
37
37
  Package ``apache-airflow-providers-databricks``
38
38
 
39
- Release: ``4.4.0``
39
+ Release: ``4.5.0``
40
40
 
41
41
 
42
42
  `Databricks <https://databricks.com/>`__
@@ -49,7 +49,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
49
49
  are in ``airflow.providers.databricks`` python package.
50
50
 
51
51
  You can find package information and changelog for the provider
52
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/4.4.0/>`_.
52
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/4.5.0/>`_.
53
53
 
54
54
 
55
55
  Installation
@@ -94,4 +94,4 @@ Dependent package
94
94
  ============================================================================================================ ==============
95
95
 
96
96
  The changelog for the provider package can be found in the
97
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/4.4.0/changelog.html>`_.
97
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/4.5.0/changelog.html>`_.
@@ -28,7 +28,7 @@ import packaging.version
28
28
 
29
29
  __all__ = ["__version__"]
30
30
 
31
- __version__ = "4.4.0"
31
+ __version__ = "4.5.0"
32
32
 
33
33
  try:
34
34
  from airflow import __version__ as airflow_version
@@ -29,6 +29,7 @@ def get_provider_info():
29
29
  "description": "`Databricks <https://databricks.com/>`__\n",
30
30
  "suspended": False,
31
31
  "versions": [
32
+ "4.5.0",
32
33
  "4.4.0",
33
34
  "4.3.3",
34
35
  "4.3.2",
@@ -52,10 +52,19 @@ INSTALL_LIBS_ENDPOINT = ("POST", "api/2.0/libraries/install")
52
52
  UNINSTALL_LIBS_ENDPOINT = ("POST", "api/2.0/libraries/uninstall")
53
53
 
54
54
  LIST_JOBS_ENDPOINT = ("GET", "api/2.1/jobs/list")
55
+ LIST_PIPELINES_ENDPOINT = ("GET", "/api/2.0/pipelines")
55
56
 
56
57
  WORKSPACE_GET_STATUS_ENDPOINT = ("GET", "api/2.0/workspace/get-status")
57
58
 
58
- RUN_LIFE_CYCLE_STATES = ["PENDING", "RUNNING", "TERMINATING", "TERMINATED", "SKIPPED", "INTERNAL_ERROR"]
59
+ RUN_LIFE_CYCLE_STATES = [
60
+ "PENDING",
61
+ "RUNNING",
62
+ "TERMINATING",
63
+ "TERMINATED",
64
+ "SKIPPED",
65
+ "INTERNAL_ERROR",
66
+ "QUEUED",
67
+ ]
59
68
 
60
69
  SPARK_VERSIONS_ENDPOINT = ("GET", "api/2.0/clusters/spark-versions")
61
70
 
@@ -75,11 +84,9 @@ class RunState:
75
84
  """True if the current state is a terminal state."""
76
85
  if self.life_cycle_state not in RUN_LIFE_CYCLE_STATES:
77
86
  raise AirflowException(
78
- (
79
- "Unexpected life cycle state: {}: If the state has "
80
- "been introduced recently, please check the Databricks user "
81
- "guide for troubleshooting information"
82
- ).format(self.life_cycle_state)
87
+ f"Unexpected life cycle state: {self.life_cycle_state}: If the state has "
88
+ "been introduced recently, please check the Databricks user "
89
+ "guide for troubleshooting information"
83
90
  )
84
91
  return self.life_cycle_state in ("TERMINATED", "SKIPPED", "INTERNAL_ERROR")
85
92
 
@@ -209,6 +216,67 @@ class DatabricksHook(BaseDatabricksHook):
209
216
  else:
210
217
  return matching_jobs[0]["job_id"]
211
218
 
219
+ def list_pipelines(
220
+ self, batch_size: int = 25, pipeline_name: str | None = None, notebook_path: str | None = None
221
+ ) -> list[dict[str, Any]]:
222
+ """
223
+ Lists the pipelines in Databricks Delta Live Tables.
224
+
225
+ :param batch_size: The limit/batch size used to retrieve pipelines.
226
+ :param pipeline_name: Optional name of a pipeline to search. Cannot be combined with path.
227
+ :param notebook_path: Optional notebook of a pipeline to search. Cannot be combined with name.
228
+ :return: A list of pipelines.
229
+ """
230
+ has_more = True
231
+ next_token = None
232
+ all_pipelines = []
233
+ filter = None
234
+ if pipeline_name and notebook_path:
235
+ raise AirflowException("Cannot combine pipeline_name and notebook_path in one request")
236
+
237
+ if notebook_path:
238
+ filter = f"notebook='{notebook_path}'"
239
+ elif pipeline_name:
240
+ filter = f"name LIKE '{pipeline_name}'"
241
+ payload: dict[str, Any] = {
242
+ "max_results": batch_size,
243
+ }
244
+ if filter:
245
+ payload["filter"] = filter
246
+
247
+ while has_more:
248
+ if next_token:
249
+ payload["page_token"] = next_token
250
+ response = self._do_api_call(LIST_PIPELINES_ENDPOINT, payload)
251
+ pipelines = response.get("statuses", [])
252
+ all_pipelines += pipelines
253
+ if "next_page_token" in response:
254
+ next_token = response["next_page_token"]
255
+ else:
256
+ has_more = False
257
+
258
+ return all_pipelines
259
+
260
+ def find_pipeline_id_by_name(self, pipeline_name: str) -> str | None:
261
+ """
262
+ Finds pipeline id by its name. If multiple pipelines with the same name, raises AirflowException.
263
+
264
+ :param pipeline_name: The name of the pipeline to look up.
265
+ :return: The pipeline_id as a GUID string or None if no pipeline was found.
266
+ """
267
+ matching_pipelines = self.list_pipelines(pipeline_name=pipeline_name)
268
+
269
+ if len(matching_pipelines) > 1:
270
+ raise AirflowException(
271
+ f"There are more than one job with name {pipeline_name}. "
272
+ "Please delete duplicated pipelines first"
273
+ )
274
+
275
+ if not pipeline_name:
276
+ return None
277
+ else:
278
+ return matching_pipelines[0]["pipeline_id"]
279
+
212
280
  def get_run_page_url(self, run_id: int) -> str:
213
281
  """
214
282
  Retrieves run_page_url.
@@ -28,7 +28,7 @@ import copy
28
28
  import platform
29
29
  import time
30
30
  from functools import cached_property
31
- from typing import Any
31
+ from typing import TYPE_CHECKING, Any
32
32
  from urllib.parse import urlsplit
33
33
 
34
34
  import aiohttp
@@ -48,9 +48,11 @@ from tenacity import (
48
48
  from airflow import __version__
49
49
  from airflow.exceptions import AirflowException
50
50
  from airflow.hooks.base import BaseHook
51
- from airflow.models import Connection
52
51
  from airflow.providers_manager import ProvidersManager
53
52
 
53
+ if TYPE_CHECKING:
54
+ from airflow.models import Connection
55
+
54
56
  # https://docs.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token#--get-an-azure-active-directory-access-token
55
57
  # https://docs.microsoft.com/en-us/graph/deployments#app-registration-and-token-service-root-endpoints
56
58
  AZURE_DEFAULT_AD_ENDPOINT = "https://login.microsoftonline.com"
@@ -121,12 +123,12 @@ class BaseDatabricksHook(BaseHook):
121
123
  self.retry_args["retry"] = retry_if_exception(self._retryable_error)
122
124
  self.retry_args["after"] = my_after_func
123
125
  else:
124
- self.retry_args = dict(
125
- stop=stop_after_attempt(self.retry_limit),
126
- wait=wait_exponential(min=self.retry_delay, max=(2**retry_limit)),
127
- retry=retry_if_exception(self._retryable_error),
128
- after=my_after_func,
129
- )
126
+ self.retry_args = {
127
+ "stop": stop_after_attempt(self.retry_limit),
128
+ "wait": wait_exponential(min=self.retry_delay, max=(2**retry_limit)),
129
+ "retry": retry_if_exception(self._retryable_error),
130
+ "after": my_after_func,
131
+ }
130
132
 
131
133
  @cached_property
132
134
  def databricks_conn(self) -> Connection:
@@ -18,15 +18,17 @@ from __future__ import annotations
18
18
 
19
19
  from contextlib import closing
20
20
  from copy import copy
21
- from typing import Any, Callable, Iterable, Mapping, TypeVar, overload
21
+ from typing import TYPE_CHECKING, Any, Callable, Iterable, Mapping, TypeVar, overload
22
22
 
23
23
  from databricks import sql # type: ignore[attr-defined]
24
- from databricks.sql.client import Connection # type: ignore[attr-defined]
25
24
 
26
25
  from airflow.exceptions import AirflowException
27
26
  from airflow.providers.common.sql.hooks.sql import DbApiHook, return_single_query_results
28
27
  from airflow.providers.databricks.hooks.databricks_base import BaseDatabricksHook
29
28
 
29
+ if TYPE_CHECKING:
30
+ from databricks.sql.client import Connection
31
+
30
32
  LIST_SQL_ENDPOINTS_ENDPOINT = ("GET", "api/2.0/sql/endpoints")
31
33
 
32
34
 
@@ -81,7 +83,7 @@ class DatabricksSqlHook(BaseDatabricksHook, DbApiHook):
81
83
 
82
84
  def _get_extra_config(self) -> dict[str, Any | None]:
83
85
  extra_params = copy(self.databricks_conn.extra_dejson)
84
- for arg in ["http_path", "session_configuration"] + self.extra_parameters:
86
+ for arg in ["http_path", "session_configuration", *self.extra_parameters]:
85
87
  if arg in extra_params:
86
88
  del extra_params[arg]
87
89
 
@@ -21,7 +21,6 @@ from __future__ import annotations
21
21
  import time
22
22
  import warnings
23
23
  from functools import cached_property
24
- from logging import Logger
25
24
  from typing import TYPE_CHECKING, Any, Sequence
26
25
 
27
26
  from airflow.configuration import conf
@@ -32,6 +31,8 @@ from airflow.providers.databricks.triggers.databricks import DatabricksExecution
32
31
  from airflow.providers.databricks.utils.databricks import normalise_json_content, validate_trigger_event
33
32
 
34
33
  if TYPE_CHECKING:
34
+ from logging import Logger
35
+
35
36
  from airflow.models.taskinstancekey import TaskInstanceKey
36
37
  from airflow.utils.context import Context
37
38
 
@@ -364,6 +365,8 @@ class DatabricksSubmitRunOperator(BaseOperator):
364
365
 
365
366
  if "dbt_task" in self.json and "git_source" not in self.json:
366
367
  raise AirflowException("git_source is required for dbt_task")
368
+ if pipeline_task is not None and "pipeline_id" in pipeline_task and "pipeline_name" in pipeline_task:
369
+ raise AirflowException("'pipeline_name' is not allowed in conjunction with 'pipeline_id'")
367
370
 
368
371
  # This variable will be used in case our task gets killed.
369
372
  self.run_id: int | None = None
@@ -383,6 +386,15 @@ class DatabricksSubmitRunOperator(BaseOperator):
383
386
  )
384
387
 
385
388
  def execute(self, context: Context):
389
+ if (
390
+ "pipeline_task" in self.json
391
+ and self.json["pipeline_task"].get("pipeline_id") is None
392
+ and self.json["pipeline_task"].get("pipeline_name")
393
+ ):
394
+ # If pipeline_id is not provided, we need to fetch it from the pipeline_name
395
+ pipeline_name = self.json["pipeline_task"]["pipeline_name"]
396
+ self.json["pipeline_task"]["pipeline_id"] = self._hook.get_pipeline_id(pipeline_name)
397
+ del self.json["pipeline_task"]["pipeline_name"]
386
398
  json_normalised = normalise_json_content(self.json)
387
399
  self.run_id = self._hook.submit_run(json_normalised)
388
400
  if self.deferrable:
@@ -22,7 +22,6 @@ import csv
22
22
  import json
23
23
  from typing import TYPE_CHECKING, Any, Sequence
24
24
 
25
- from databricks.sql.types import Row
26
25
  from databricks.sql.utils import ParamEscaper
27
26
 
28
27
  from airflow.exceptions import AirflowException
@@ -31,6 +30,8 @@ from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
31
30
  from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook
32
31
 
33
32
  if TYPE_CHECKING:
33
+ from databricks.sql.types import Row
34
+
34
35
  from airflow.utils.context import Context
35
36
 
36
37
 
@@ -1,14 +1,14 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-databricks
3
- Version: 4.4.0
3
+ Version: 4.5.0
4
4
  Summary: Provider for Apache Airflow. Implements apache-airflow-providers-databricks package
5
5
  Home-page: https://airflow.apache.org/
6
6
  Download-URL: https://archive.apache.org/dist/airflow/providers
7
7
  Author: Apache Software Foundation
8
8
  Author-email: dev@airflow.apache.org
9
9
  License: Apache License 2.0
10
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/4.4.0/
11
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/4.4.0/changelog.html
10
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/4.5.0/
11
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/4.5.0/changelog.html
12
12
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
13
13
  Project-URL: Source Code, https://github.com/apache/airflow
14
14
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
@@ -71,7 +71,7 @@ License-File: NOTICE
71
71
 
72
72
  Package ``apache-airflow-providers-databricks``
73
73
 
74
- Release: ``4.4.0``
74
+ Release: ``4.5.0``
75
75
 
76
76
 
77
77
  `Databricks <https://databricks.com/>`__
@@ -84,7 +84,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
84
84
  are in ``airflow.providers.databricks`` python package.
85
85
 
86
86
  You can find package information and changelog for the provider
87
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/4.4.0/>`_.
87
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/4.5.0/>`_.
88
88
 
89
89
 
90
90
  Installation
@@ -129,4 +129,4 @@ Dependent package
129
129
  ============================================================================================================ ==============
130
130
 
131
131
  The changelog for the provider package can be found in the
132
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/4.4.0/changelog.html>`_.
132
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/4.5.0/changelog.html>`_.
@@ -16,7 +16,7 @@
16
16
  # under the License.
17
17
  [tool.black]
18
18
  line-length = 110
19
- target-version = ['py37', 'py38', 'py39', 'py310']
19
+ target-version = ['py38', 'py39', 'py310', 'py311']
20
20
 
21
21
  # Editable installs are currently broken using setuptools 64.0.0 and above. The problem is tracked in
22
22
  # https://github.com/pypa/setuptools/issues/3548. We're also discussing how we could potentially fix
@@ -28,10 +28,8 @@ target-version = ['py37', 'py38', 'py39', 'py310']
28
28
  requires = ['setuptools==67.2.0']
29
29
  build-backend = "setuptools.build_meta"
30
30
 
31
- [project]
32
- requires-python = ">=3.8"
33
-
34
31
  [tool.ruff]
32
+ target-version = "py38"
35
33
  typing-modules = ["airflow.typing_compat"]
36
34
  line-length = 110
37
35
  extend-exclude = [
@@ -62,7 +60,9 @@ extend-select = [
62
60
  "D402",
63
61
  "D403",
64
62
  "D412",
65
- "D419"
63
+ "D419",
64
+ "TCH001", # typing-only-first-party-import
65
+ "TCH002", # typing-only-third-party-import
66
66
  ]
67
67
  extend-ignore = [
68
68
  "D203",
@@ -169,3 +169,5 @@ exclude_also = [
169
169
  "@(typing(_extensions)?\\.)?overload",
170
170
  "if TYPE_CHECKING:"
171
171
  ]
172
+ [tool.ruff.flake8-type-checking]
173
+ exempt-modules = ["typing", "typing_extensions"]
@@ -27,8 +27,8 @@ classifiers =
27
27
  Programming Language :: Python :: 3.11
28
28
  Topic :: System :: Monitoring
29
29
  project_urls =
30
- Documentation=https://airflow.apache.org/docs/apache-airflow-providers-databricks/4.4.0/
31
- Changelog=https://airflow.apache.org/docs/apache-airflow-providers-databricks/4.4.0/changelog.html
30
+ Documentation=https://airflow.apache.org/docs/apache-airflow-providers-databricks/4.5.0/
31
+ Changelog=https://airflow.apache.org/docs/apache-airflow-providers-databricks/4.5.0/changelog.html
32
32
  Bug Tracker=https://github.com/apache/airflow/issues
33
33
  Source Code=https://github.com/apache/airflow
34
34
  Slack Chat=https://s.apache.org/airflow-slack
@@ -26,7 +26,7 @@
26
26
 
27
27
  from setuptools import find_namespace_packages, setup
28
28
 
29
- version = "4.4.0"
29
+ version = "4.5.0"
30
30
 
31
31
 
32
32
  def do_setup():