apache-airflow-providers-databricks 7.8.0__py3-none-any.whl → 7.8.2rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. airflow/providers/databricks/__init__.py +1 -1
  2. airflow/providers/databricks/exceptions.py +1 -1
  3. airflow/providers/databricks/hooks/databricks.py +40 -40
  4. airflow/providers/databricks/hooks/databricks_base.py +2 -1
  5. airflow/providers/databricks/hooks/databricks_sql.py +1 -1
  6. airflow/providers/databricks/operators/databricks.py +7 -8
  7. airflow/providers/databricks/operators/databricks_repos.py +1 -2
  8. airflow/providers/databricks/operators/databricks_sql.py +1 -2
  9. airflow/providers/databricks/operators/databricks_workflow.py +1 -2
  10. airflow/providers/databricks/plugins/databricks_workflow.py +10 -8
  11. airflow/providers/databricks/sensors/databricks.py +1 -2
  12. airflow/providers/databricks/sensors/databricks_partition.py +1 -2
  13. airflow/providers/databricks/sensors/databricks_sql.py +1 -2
  14. airflow/providers/databricks/utils/databricks.py +1 -1
  15. airflow/providers/databricks/utils/mixins.py +3 -7
  16. airflow/providers/databricks/utils/openlineage.py +25 -63
  17. {apache_airflow_providers_databricks-7.8.0.dist-info → apache_airflow_providers_databricks-7.8.2rc1.dist-info}/METADATA +12 -12
  18. apache_airflow_providers_databricks-7.8.2rc1.dist-info/RECORD +31 -0
  19. apache_airflow_providers_databricks-7.8.0.dist-info/RECORD +0 -31
  20. {apache_airflow_providers_databricks-7.8.0.dist-info → apache_airflow_providers_databricks-7.8.2rc1.dist-info}/WHEEL +0 -0
  21. {apache_airflow_providers_databricks-7.8.0.dist-info → apache_airflow_providers_databricks-7.8.2rc1.dist-info}/entry_points.txt +0 -0
  22. {apache_airflow_providers_databricks-7.8.0.dist-info → apache_airflow_providers_databricks-7.8.2rc1.dist-info}/licenses/LICENSE +0 -0
  23. {apache_airflow_providers_databricks-7.8.0.dist-info → apache_airflow_providers_databricks-7.8.2rc1.dist-info}/licenses/NOTICE +0 -0
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "7.8.0"
32
+ __version__ = "7.8.2"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.11.0"
@@ -21,7 +21,7 @@
21
21
 
22
22
  from __future__ import annotations
23
23
 
24
- from airflow.exceptions import AirflowException
24
+ from airflow.providers.common.compat.sdk import AirflowException
25
25
 
26
26
 
27
27
  class DatabricksSqlExecutionError(AirflowException):
@@ -20,9 +20,9 @@ Databricks hook.
20
20
 
21
21
  This hook enable the submitting and running of jobs to the Databricks platform. Internally the
22
22
  operators talk to the
23
- ``api/2.1/jobs/run-now``
23
+ ``api/2.2/jobs/run-now``
24
24
  `endpoint <https://docs.databricks.com/dev-tools/api/latest/jobs.html#operation/JobsRunNow>_`
25
- or the ``api/2.1/jobs/runs/submit``
25
+ or the ``api/2.2/jobs/runs/submit``
26
26
  `endpoint <https://docs.databricks.com/dev-tools/api/latest/jobs.html#operation/JobsRunsSubmit>`_.
27
27
  """
28
28
 
@@ -34,40 +34,40 @@ from typing import Any
34
34
 
35
35
  from requests import exceptions as requests_exceptions
36
36
 
37
- from airflow.exceptions import AirflowException
37
+ from airflow.providers.common.compat.sdk import AirflowException
38
38
  from airflow.providers.databricks.hooks.databricks_base import BaseDatabricksHook
39
39
 
40
- GET_CLUSTER_ENDPOINT = ("GET", "2.0/clusters/get")
41
- RESTART_CLUSTER_ENDPOINT = ("POST", "2.0/clusters/restart")
42
- START_CLUSTER_ENDPOINT = ("POST", "2.0/clusters/start")
43
- TERMINATE_CLUSTER_ENDPOINT = ("POST", "2.0/clusters/delete")
40
+ GET_CLUSTER_ENDPOINT = ("GET", "2.2/clusters/get")
41
+ RESTART_CLUSTER_ENDPOINT = ("POST", "2.2/clusters/restart")
42
+ START_CLUSTER_ENDPOINT = ("POST", "2.2/clusters/start")
43
+ TERMINATE_CLUSTER_ENDPOINT = ("POST", "2.2/clusters/delete")
44
44
 
45
- CREATE_ENDPOINT = ("POST", "2.1/jobs/create")
46
- RESET_ENDPOINT = ("POST", "2.1/jobs/reset")
47
- UPDATE_ENDPOINT = ("POST", "2.1/jobs/update")
48
- RUN_NOW_ENDPOINT = ("POST", "2.1/jobs/run-now")
49
- SUBMIT_RUN_ENDPOINT = ("POST", "2.1/jobs/runs/submit")
50
- GET_RUN_ENDPOINT = ("GET", "2.1/jobs/runs/get")
51
- CANCEL_RUN_ENDPOINT = ("POST", "2.1/jobs/runs/cancel")
52
- DELETE_RUN_ENDPOINT = ("POST", "2.1/jobs/runs/delete")
53
- REPAIR_RUN_ENDPOINT = ("POST", "2.1/jobs/runs/repair")
54
- OUTPUT_RUNS_JOB_ENDPOINT = ("GET", "2.1/jobs/runs/get-output")
55
- CANCEL_ALL_RUNS_ENDPOINT = ("POST", "2.1/jobs/runs/cancel-all")
45
+ CREATE_ENDPOINT = ("POST", "2.2/jobs/create")
46
+ RESET_ENDPOINT = ("POST", "2.2/jobs/reset")
47
+ UPDATE_ENDPOINT = ("POST", "2.2/jobs/update")
48
+ RUN_NOW_ENDPOINT = ("POST", "2.2/jobs/run-now")
49
+ SUBMIT_RUN_ENDPOINT = ("POST", "2.2/jobs/runs/submit")
50
+ GET_RUN_ENDPOINT = ("GET", "2.2/jobs/runs/get")
51
+ CANCEL_RUN_ENDPOINT = ("POST", "2.2/jobs/runs/cancel")
52
+ DELETE_RUN_ENDPOINT = ("POST", "2.2/jobs/runs/delete")
53
+ REPAIR_RUN_ENDPOINT = ("POST", "2.2/jobs/runs/repair")
54
+ OUTPUT_RUNS_JOB_ENDPOINT = ("GET", "2.2/jobs/runs/get-output")
55
+ CANCEL_ALL_RUNS_ENDPOINT = ("POST", "2.2/jobs/runs/cancel-all")
56
56
 
57
- INSTALL_LIBS_ENDPOINT = ("POST", "2.0/libraries/install")
58
- UNINSTALL_LIBS_ENDPOINT = ("POST", "2.0/libraries/uninstall")
59
- UPDATE_REPO_ENDPOINT = ("PATCH", "2.0/repos/")
60
- DELETE_REPO_ENDPOINT = ("DELETE", "2.0/repos/")
61
- CREATE_REPO_ENDPOINT = ("POST", "2.0/repos")
57
+ INSTALL_LIBS_ENDPOINT = ("POST", "2.2/libraries/install")
58
+ UNINSTALL_LIBS_ENDPOINT = ("POST", "2.2/libraries/uninstall")
59
+ UPDATE_REPO_ENDPOINT = ("PATCH", "2.2/repos/")
60
+ DELETE_REPO_ENDPOINT = ("DELETE", "2.2/repos/")
61
+ CREATE_REPO_ENDPOINT = ("POST", "2.2/repos")
62
62
 
63
- LIST_JOBS_ENDPOINT = ("GET", "2.1/jobs/list")
64
- LIST_PIPELINES_ENDPOINT = ("GET", "2.0/pipelines")
65
- LIST_SQL_ENDPOINTS_ENDPOINT = ("GET", "2.0/sql/endpoints")
63
+ LIST_JOBS_ENDPOINT = ("GET", "2.2/jobs/list")
64
+ LIST_PIPELINES_ENDPOINT = ("GET", "2.2/pipelines")
65
+ LIST_SQL_ENDPOINTS_ENDPOINT = ("GET", "2.2/sql/endpoints")
66
66
 
67
- WORKSPACE_GET_STATUS_ENDPOINT = ("GET", "2.0/workspace/get-status")
67
+ WORKSPACE_GET_STATUS_ENDPOINT = ("GET", "2.2/workspace/get-status")
68
68
 
69
- SPARK_VERSIONS_ENDPOINT = ("GET", "2.0/clusters/spark-versions")
70
- SQL_STATEMENTS_ENDPOINT = "2.0/sql/statements"
69
+ SPARK_VERSIONS_ENDPOINT = ("GET", "2.2/clusters/spark-versions")
70
+ SQL_STATEMENTS_ENDPOINT = "2.2/sql/statements"
71
71
 
72
72
 
73
73
  class RunLifeCycleState(Enum):
@@ -293,7 +293,7 @@ class DatabricksHook(BaseDatabricksHook):
293
293
 
294
294
  def create_job(self, json: dict) -> int:
295
295
  """
296
- Call the ``api/2.1/jobs/create`` endpoint.
296
+ Call the ``api/2.2/jobs/create`` endpoint.
297
297
 
298
298
  :param json: The data used in the body of the request to the ``create`` endpoint.
299
299
  :return: the job_id as an int
@@ -303,7 +303,7 @@ class DatabricksHook(BaseDatabricksHook):
303
303
 
304
304
  def reset_job(self, job_id: str, json: dict) -> None:
305
305
  """
306
- Call the ``api/2.1/jobs/reset`` endpoint.
306
+ Call the ``api/2.2/jobs/reset`` endpoint.
307
307
 
308
308
  :param json: The data used in the new_settings of the request to the ``reset`` endpoint.
309
309
  """
@@ -321,7 +321,7 @@ class DatabricksHook(BaseDatabricksHook):
321
321
 
322
322
  def update_job(self, job_id: str, json: dict) -> None:
323
323
  """
324
- Call the ``api/2.1/jobs/update`` endpoint.
324
+ Call the ``api/2.2/jobs/update`` endpoint.
325
325
 
326
326
  :param job_id: The id of the job to update.
327
327
  :param json: The data used in the new_settings of the request to the ``update`` endpoint.
@@ -330,7 +330,7 @@ class DatabricksHook(BaseDatabricksHook):
330
330
 
331
331
  def run_now(self, json: dict) -> int:
332
332
  """
333
- Call the ``api/2.1/jobs/run-now`` endpoint.
333
+ Call the ``api/2.2/jobs/run-now`` endpoint.
334
334
 
335
335
  :param json: The data used in the body of the request to the ``run-now`` endpoint.
336
336
  :return: the run_id as an int
@@ -340,7 +340,7 @@ class DatabricksHook(BaseDatabricksHook):
340
340
 
341
341
  def submit_run(self, json: dict) -> int:
342
342
  """
343
- Call the ``api/2.1/jobs/runs/submit`` endpoint.
343
+ Call the ``api/2.2/jobs/runs/submit`` endpoint.
344
344
 
345
345
  :param json: The data used in the body of the request to the ``submit`` endpoint.
346
346
  :return: the run_id as an int
@@ -385,9 +385,9 @@ class DatabricksHook(BaseDatabricksHook):
385
385
  all_jobs += [j for j in jobs if j["settings"]["name"] == job_name]
386
386
  else:
387
387
  all_jobs += jobs
388
- has_more = response.get("has_more", False)
389
- if has_more:
390
- page_token = response.get("next_page_token", "")
388
+ # issue-59189: API v2.2 removes "has_more" field
389
+ page_token = response.get("next_page_token", "")
390
+ has_more = bool(page_token)
391
391
 
392
392
  return all_jobs
393
393
 
@@ -717,7 +717,7 @@ class DatabricksHook(BaseDatabricksHook):
717
717
  """
718
718
  Install libraries on the cluster.
719
719
 
720
- Utility function to call the ``2.0/libraries/install`` endpoint.
720
+ Utility function to call the ``2.2/libraries/install`` endpoint.
721
721
 
722
722
  :param json: json dictionary containing cluster_id and an array of library
723
723
  """
@@ -727,7 +727,7 @@ class DatabricksHook(BaseDatabricksHook):
727
727
  """
728
728
  Uninstall libraries on the cluster.
729
729
 
730
- Utility function to call the ``2.0/libraries/uninstall`` endpoint.
730
+ Utility function to call the ``2.2/libraries/uninstall`` endpoint.
731
731
 
732
732
  :param json: json dictionary containing cluster_id and an array of library
733
733
  """
@@ -790,7 +790,7 @@ class DatabricksHook(BaseDatabricksHook):
790
790
  :param json: payload
791
791
  :return: json containing permission specification
792
792
  """
793
- return self._do_api_call(("PATCH", f"2.0/permissions/jobs/{job_id}"), json)
793
+ return self._do_api_call(("PATCH", f"2.2/permissions/jobs/{job_id}"), json)
794
794
 
795
795
  def post_sql_statement(self, json: dict[str, Any]) -> str:
796
796
  """
@@ -49,7 +49,8 @@ from tenacity import (
49
49
  )
50
50
 
51
51
  from airflow import __version__
52
- from airflow.exceptions import AirflowException, AirflowOptionalProviderFeatureException
52
+ from airflow.exceptions import AirflowOptionalProviderFeatureException
53
+ from airflow.providers.common.compat.sdk import AirflowException
53
54
  from airflow.providers_manager import ProvidersManager
54
55
 
55
56
  try:
@@ -34,7 +34,7 @@ from databricks import sql
34
34
  from databricks.sql.types import Row
35
35
  from sqlalchemy.engine import URL
36
36
 
37
- from airflow.exceptions import AirflowException
37
+ from airflow.providers.common.compat.sdk import AirflowException
38
38
  from airflow.providers.common.sql.hooks.handlers import return_single_query_results
39
39
  from airflow.providers.common.sql.hooks.sql import DbApiHook
40
40
  from airflow.providers.databricks.exceptions import DatabricksSqlExecutionError, DatabricksSqlExecutionTimeout
@@ -27,8 +27,7 @@ from functools import cached_property
27
27
  from typing import TYPE_CHECKING, Any
28
28
 
29
29
  from airflow.configuration import conf
30
- from airflow.exceptions import AirflowException
31
- from airflow.providers.common.compat.sdk import BaseOperator, BaseOperatorLink, XCom
30
+ from airflow.providers.common.compat.sdk import AirflowException, BaseOperator, BaseOperatorLink, XCom
32
31
  from airflow.providers.databricks.hooks.databricks import (
33
32
  DatabricksHook,
34
33
  RunLifeCycleState,
@@ -261,7 +260,7 @@ class DatabricksCreateJobsOperator(BaseOperator):
261
260
  https://docs.databricks.com/api/workspace/jobs/reset
262
261
 
263
262
  :param json: A JSON object containing API parameters which will be passed
264
- directly to the ``api/2.1/jobs/create`` endpoint. The other named parameters
263
+ directly to the ``api/2.2/jobs/create`` endpoint. The other named parameters
265
264
  (i.e. ``name``, ``tags``, ``tasks``, etc.) to this operator will
266
265
  be merged with this json dictionary if they are provided.
267
266
  If there are conflicts during the merge, the named parameters will
@@ -392,7 +391,7 @@ class DatabricksCreateJobsOperator(BaseOperator):
392
391
 
393
392
  class DatabricksSubmitRunOperator(BaseOperator):
394
393
  """
395
- Submits a Spark job run to Databricks using the api/2.1/jobs/runs/submit API endpoint.
394
+ Submits a Spark job run to Databricks using the api/2.2/jobs/runs/submit API endpoint.
396
395
 
397
396
  See: https://docs.databricks.com/dev-tools/api/latest/jobs.html#operation/JobsRunsSubmit
398
397
 
@@ -407,7 +406,7 @@ class DatabricksSubmitRunOperator(BaseOperator):
407
406
  .. seealso::
408
407
  https://docs.databricks.com/dev-tools/api/latest/jobs.html#operation/JobsRunsSubmit
409
408
  :param json: A JSON object containing API parameters which will be passed
410
- directly to the ``api/2.1/jobs/runs/submit`` endpoint. The other named parameters
409
+ directly to the ``api/2.2/jobs/runs/submit`` endpoint. The other named parameters
411
410
  (i.e. ``spark_jar_task``, ``notebook_task``..) to this operator will
412
411
  be merged with this json dictionary if they are provided.
413
412
  If there are conflicts during the merge, the named parameters will
@@ -645,14 +644,14 @@ class DatabricksSubmitRunOperator(BaseOperator):
645
644
 
646
645
  class DatabricksRunNowOperator(BaseOperator):
647
646
  """
648
- Runs an existing Spark job run to Databricks using the api/2.1/jobs/run-now API endpoint.
647
+ Runs an existing Spark job run to Databricks using the api/2.2/jobs/run-now API endpoint.
649
648
 
650
649
  See: https://docs.databricks.com/dev-tools/api/latest/jobs.html#operation/JobsRunNow
651
650
 
652
651
  There are two ways to instantiate this operator.
653
652
 
654
653
  In the first way, you can take the JSON payload that you typically use
655
- to call the ``api/2.1/jobs/run-now`` endpoint and pass it directly
654
+ to call the ``api/2.2/jobs/run-now`` endpoint and pass it directly
656
655
  to our ``DatabricksRunNowOperator`` through the ``json`` parameter.
657
656
  For example ::
658
657
 
@@ -730,7 +729,7 @@ class DatabricksRunNowOperator(BaseOperator):
730
729
  https://docs.databricks.com/en/workflows/jobs/settings.html#add-parameters-for-all-job-tasks
731
730
 
732
731
  :param json: A JSON object containing API parameters which will be passed
733
- directly to the ``api/2.1/jobs/run-now`` endpoint. The other named parameters
732
+ directly to the ``api/2.2/jobs/run-now`` endpoint. The other named parameters
734
733
  (i.e. ``notebook_params``, ``spark_submit_params``..) to this operator will
735
734
  be merged with this json dictionary if they are provided.
736
735
  If there are conflicts during the merge, the named parameters will
@@ -25,8 +25,7 @@ from functools import cached_property
25
25
  from typing import TYPE_CHECKING
26
26
  from urllib.parse import urlsplit
27
27
 
28
- from airflow.exceptions import AirflowException
29
- from airflow.providers.common.compat.sdk import BaseOperator
28
+ from airflow.providers.common.compat.sdk import AirflowException, BaseOperator
30
29
  from airflow.providers.databricks.hooks.databricks import DatabricksHook
31
30
 
32
31
  if TYPE_CHECKING:
@@ -27,8 +27,7 @@ from typing import TYPE_CHECKING, Any, ClassVar
27
27
 
28
28
  from databricks.sql.utils import ParamEscaper
29
29
 
30
- from airflow.exceptions import AirflowException
31
- from airflow.providers.common.compat.sdk import BaseOperator
30
+ from airflow.providers.common.compat.sdk import AirflowException, BaseOperator
32
31
  from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
33
32
  from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook
34
33
 
@@ -25,8 +25,7 @@ from typing import TYPE_CHECKING, Any
25
25
 
26
26
  from mergedeep import merge
27
27
 
28
- from airflow.exceptions import AirflowException
29
- from airflow.providers.common.compat.sdk import BaseOperator, TaskGroup
28
+ from airflow.providers.common.compat.sdk import AirflowException, BaseOperator, TaskGroup
30
29
  from airflow.providers.databricks.hooks.databricks import DatabricksHook, RunLifeCycleState
31
30
  from airflow.providers.databricks.plugins.databricks_workflow import (
32
31
  WorkflowJobRepairAllFailedLink,
@@ -20,11 +20,13 @@ from __future__ import annotations
20
20
  from typing import TYPE_CHECKING, Any
21
21
  from urllib.parse import unquote
22
22
 
23
- from airflow.exceptions import AirflowException, TaskInstanceNotFound
23
+ from sqlalchemy import select
24
+
25
+ from airflow.exceptions import TaskInstanceNotFound
24
26
  from airflow.models.dagrun import DagRun
25
27
  from airflow.models.taskinstance import TaskInstance, TaskInstanceKey, clear_task_instances
26
28
  from airflow.plugins_manager import AirflowPlugin
27
- from airflow.providers.common.compat.sdk import BaseOperatorLink, TaskGroup, XCom
29
+ from airflow.providers.common.compat.sdk import AirflowException, BaseOperatorLink, TaskGroup, XCom
28
30
  from airflow.providers.databricks.hooks.databricks import DatabricksHook
29
31
  from airflow.providers.databricks.version_compat import AIRFLOW_V_3_0_PLUS
30
32
  from airflow.utils.log.logging_mixin import LoggingMixin
@@ -143,7 +145,9 @@ if not AIRFLOW_V_3_0_PLUS:
143
145
  if not session:
144
146
  raise AirflowException("Session not provided.")
145
147
 
146
- return session.query(DagRun).filter(DagRun.dag_id == dag.dag_id, DagRun.run_id == run_id).one()
148
+ return session.scalars(
149
+ select(DagRun).where(DagRun.dag_id == dag.dag_id, DagRun.run_id == run_id)
150
+ ).one()
147
151
 
148
152
  @provide_session
149
153
  def _clear_task_instances(
@@ -162,15 +166,13 @@ if not AIRFLOW_V_3_0_PLUS:
162
166
  dag_run = DagRun.find(dag_id, execution_date=dttm)[0] # type: ignore[call-arg]
163
167
  else:
164
168
  dag_run = DagRun.find(dag_id, logical_date=dttm)[0]
165
- ti = (
166
- session.query(TaskInstance)
167
- .filter(
169
+ ti = session.scalars(
170
+ select(TaskInstance).where(
168
171
  TaskInstance.dag_id == dag_id,
169
172
  TaskInstance.run_id == dag_run.run_id,
170
173
  TaskInstance.task_id == operator.task_id,
171
174
  )
172
- .one_or_none()
173
- )
175
+ ).one_or_none()
174
176
  if not ti:
175
177
  raise TaskInstanceNotFound("Task instance not found")
176
178
  return ti
@@ -23,8 +23,7 @@ from functools import cached_property
23
23
  from typing import TYPE_CHECKING, Any
24
24
 
25
25
  from airflow.configuration import conf
26
- from airflow.exceptions import AirflowException
27
- from airflow.providers.common.compat.sdk import BaseSensorOperator
26
+ from airflow.providers.common.compat.sdk import AirflowException, BaseSensorOperator
28
27
  from airflow.providers.databricks.hooks.databricks import DatabricksHook, SQLStatementState
29
28
  from airflow.providers.databricks.operators.databricks import DEFER_METHOD_NAME
30
29
  from airflow.providers.databricks.utils.mixins import DatabricksSQLStatementsMixin
@@ -27,8 +27,7 @@ from typing import TYPE_CHECKING, Any
27
27
 
28
28
  from databricks.sql.utils import ParamEscaper
29
29
 
30
- from airflow.exceptions import AirflowException
31
- from airflow.providers.common.compat.sdk import BaseSensorOperator
30
+ from airflow.providers.common.compat.sdk import AirflowException, BaseSensorOperator
32
31
  from airflow.providers.common.sql.hooks.handlers import fetch_all_handler
33
32
  from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook
34
33
 
@@ -24,8 +24,7 @@ from collections.abc import Callable, Iterable, Sequence
24
24
  from functools import cached_property
25
25
  from typing import TYPE_CHECKING, Any
26
26
 
27
- from airflow.exceptions import AirflowException
28
- from airflow.providers.common.compat.sdk import BaseSensorOperator
27
+ from airflow.providers.common.compat.sdk import AirflowException, BaseSensorOperator
29
28
  from airflow.providers.common.sql.hooks.handlers import fetch_all_handler
30
29
  from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook
31
30
 
@@ -17,7 +17,7 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
- from airflow.exceptions import AirflowException
20
+ from airflow.providers.common.compat.sdk import AirflowException
21
21
  from airflow.providers.databricks.hooks.databricks import DatabricksHook, RunState
22
22
 
23
23
 
@@ -20,18 +20,14 @@ from __future__ import annotations
20
20
 
21
21
  import time
22
22
  from logging import Logger
23
- from typing import (
24
- TYPE_CHECKING,
25
- Any,
26
- Protocol,
27
- )
23
+ from typing import TYPE_CHECKING, Any, Protocol
28
24
 
29
- from airflow.exceptions import AirflowException
25
+ from airflow.providers.common.compat.sdk import AirflowException
30
26
  from airflow.providers.databricks.hooks.databricks import DatabricksHook, SQLStatementState
31
27
  from airflow.providers.databricks.triggers.databricks import DatabricksSQLStatementExecutionTrigger
32
28
 
33
29
  if TYPE_CHECKING:
34
- from airflow.utils.context import Context
30
+ from airflow.sdk import Context
35
31
 
36
32
 
37
33
  class GetHookHasFields(Protocol):
@@ -24,7 +24,6 @@ from typing import TYPE_CHECKING, Any
24
24
  import requests
25
25
 
26
26
  from airflow.providers.common.compat.openlineage.check import require_openlineage_version
27
- from airflow.providers.databricks.version_compat import AIRFLOW_V_3_0_PLUS
28
27
  from airflow.utils import timezone
29
28
 
30
29
  if TYPE_CHECKING:
@@ -37,60 +36,6 @@ if TYPE_CHECKING:
37
36
  log = logging.getLogger(__name__)
38
37
 
39
38
 
40
- def _get_logical_date(task_instance):
41
- # todo: remove when min airflow version >= 3.0
42
- if AIRFLOW_V_3_0_PLUS:
43
- dagrun = task_instance.get_template_context()["dag_run"]
44
- return dagrun.logical_date or dagrun.run_after
45
-
46
- if hasattr(task_instance, "logical_date"):
47
- date = task_instance.logical_date
48
- else:
49
- date = task_instance.execution_date
50
-
51
- return date
52
-
53
-
54
- def _get_dag_run_clear_number(task_instance):
55
- # todo: remove when min airflow version >= 3.0
56
- if AIRFLOW_V_3_0_PLUS:
57
- dagrun = task_instance.get_template_context()["dag_run"]
58
- return dagrun.clear_number
59
- return task_instance.dag_run.clear_number
60
-
61
-
62
- # todo: move this run_id logic into OpenLineage's listener to avoid differences
63
- def _get_ol_run_id(task_instance) -> str:
64
- """
65
- Get OpenLineage run_id from TaskInstance.
66
-
67
- It's crucial that the task_instance's run_id creation logic matches OpenLineage's listener implementation.
68
- Only then can we ensure that the generated run_id aligns with the Airflow task,
69
- enabling a proper connection between events.
70
- """
71
- from airflow.providers.openlineage.plugins.adapter import OpenLineageAdapter
72
-
73
- # Generate same OL run id as is generated for current task instance
74
- return OpenLineageAdapter.build_task_instance_run_id(
75
- dag_id=task_instance.dag_id,
76
- task_id=task_instance.task_id,
77
- logical_date=_get_logical_date(task_instance),
78
- try_number=task_instance.try_number,
79
- map_index=task_instance.map_index,
80
- )
81
-
82
-
83
- # todo: move this run_id logic into OpenLineage's listener to avoid differences
84
- def _get_ol_dag_run_id(task_instance) -> str:
85
- from airflow.providers.openlineage.plugins.adapter import OpenLineageAdapter
86
-
87
- return OpenLineageAdapter.build_dag_run_id(
88
- dag_id=task_instance.dag_id,
89
- logical_date=_get_logical_date(task_instance),
90
- clear_number=_get_dag_run_clear_number(task_instance),
91
- )
92
-
93
-
94
39
  def _get_parent_run_facet(task_instance):
95
40
  """
96
41
  Retrieve the ParentRunFacet associated with a specific Airflow task instance.
@@ -101,22 +46,39 @@ def _get_parent_run_facet(task_instance):
101
46
  """
102
47
  from openlineage.client.facet_v2 import parent_run
103
48
 
104
- from airflow.providers.openlineage.conf import namespace
49
+ from airflow.providers.openlineage.plugins.macros import (
50
+ lineage_job_name,
51
+ lineage_job_namespace,
52
+ lineage_root_job_name,
53
+ lineage_root_run_id,
54
+ lineage_run_id,
55
+ )
56
+
57
+ parent_run_id = lineage_run_id(task_instance)
58
+ parent_job_name = lineage_job_name(task_instance)
59
+ parent_job_namespace = lineage_job_namespace()
60
+
61
+ root_parent_run_id = lineage_root_run_id(task_instance)
62
+ rot_parent_job_name = lineage_root_job_name(task_instance)
63
+
64
+ try: # Added in OL provider 2.9.0, try to use it if possible
65
+ from airflow.providers.openlineage.plugins.macros import lineage_root_job_namespace
105
66
 
106
- parent_run_id = _get_ol_run_id(task_instance)
107
- root_parent_run_id = _get_ol_dag_run_id(task_instance)
67
+ root_parent_job_namespace = lineage_root_job_namespace(task_instance)
68
+ except ImportError:
69
+ root_parent_job_namespace = lineage_job_namespace()
108
70
 
109
71
  return parent_run.ParentRunFacet(
110
72
  run=parent_run.Run(runId=parent_run_id),
111
73
  job=parent_run.Job(
112
- namespace=namespace(),
113
- name=f"{task_instance.dag_id}.{task_instance.task_id}",
74
+ namespace=parent_job_namespace,
75
+ name=parent_job_name,
114
76
  ),
115
77
  root=parent_run.Root(
116
78
  run=parent_run.RootRun(runId=root_parent_run_id),
117
79
  job=parent_run.RootJob(
118
- name=task_instance.dag_id,
119
- namespace=namespace(),
80
+ name=rot_parent_job_name,
81
+ namespace=root_parent_job_namespace,
120
82
  ),
121
83
  ),
122
84
  )
@@ -209,7 +171,7 @@ def _create_ol_event_pair(
209
171
  return start, end
210
172
 
211
173
 
212
- @require_openlineage_version(provider_min_version="2.3.0")
174
+ @require_openlineage_version(provider_min_version="2.5.0")
213
175
  def emit_openlineage_events_for_databricks_queries(
214
176
  task_instance,
215
177
  hook: DatabricksSqlHook | DatabricksHook | None = None,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-databricks
3
- Version: 7.8.0
3
+ Version: 7.8.2rc1
4
4
  Summary: Provider package apache-airflow-providers-databricks for Apache Airflow
5
5
  Keywords: airflow-provider,databricks,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -22,9 +22,9 @@ Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
23
  License-File: LICENSE
24
24
  License-File: NOTICE
25
- Requires-Dist: apache-airflow>=2.11.0
26
- Requires-Dist: apache-airflow-providers-common-compat>=1.8.0
27
- Requires-Dist: apache-airflow-providers-common-sql>=1.27.0
25
+ Requires-Dist: apache-airflow>=2.11.0rc1
26
+ Requires-Dist: apache-airflow-providers-common-compat>=1.10.1rc1
27
+ Requires-Dist: apache-airflow-providers-common-sql>=1.27.0rc1
28
28
  Requires-Dist: requests>=2.32.0,<3
29
29
  Requires-Dist: databricks-sql-connector>=4.0.0
30
30
  Requires-Dist: databricks-sqlalchemy>=1.0.2
@@ -35,13 +35,13 @@ Requires-Dist: pandas>=2.2.3; python_version >="3.13"
35
35
  Requires-Dist: pyarrow>=16.1.0; python_version < '3.13'
36
36
  Requires-Dist: pyarrow>=18.0.0; python_version >= '3.13'
37
37
  Requires-Dist: azure-identity>=1.3.1 ; extra == "azure-identity"
38
- Requires-Dist: apache-airflow-providers-fab>=2.2.0 ; extra == "fab" and ( python_version < '3.13')
39
- Requires-Dist: apache-airflow-providers-openlineage>=2.3.0 ; extra == "openlineage"
38
+ Requires-Dist: apache-airflow-providers-fab>=2.2.0rc1 ; extra == "fab" and ( python_version < '3.13')
39
+ Requires-Dist: apache-airflow-providers-openlineage>=2.3.0rc1 ; extra == "openlineage"
40
40
  Requires-Dist: databricks-sdk==0.10.0 ; extra == "sdk"
41
41
  Requires-Dist: apache-airflow-providers-standard ; extra == "standard"
42
42
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
43
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.0/changelog.html
44
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.0
43
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.8.2/changelog.html
44
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-databricks/7.8.2
45
45
  Project-URL: Mastodon, https://fosstodon.org/@airflow
46
46
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
47
47
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -77,7 +77,7 @@ Provides-Extra: standard
77
77
 
78
78
  Package ``apache-airflow-providers-databricks``
79
79
 
80
- Release: ``7.8.0``
80
+ Release: ``7.8.2``
81
81
 
82
82
 
83
83
  `Databricks <https://databricks.com/>`__
@@ -90,7 +90,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
90
90
  are in ``airflow.providers.databricks`` python package.
91
91
 
92
92
  You can find package information and changelog for the provider
93
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.0/>`_.
93
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.2/>`_.
94
94
 
95
95
  Installation
96
96
  ------------
@@ -108,7 +108,7 @@ Requirements
108
108
  PIP package Version required
109
109
  ========================================== ======================================
110
110
  ``apache-airflow`` ``>=2.11.0``
111
- ``apache-airflow-providers-common-compat`` ``>=1.8.0``
111
+ ``apache-airflow-providers-common-compat`` ``>=1.10.1``
112
112
  ``apache-airflow-providers-common-sql`` ``>=1.27.0``
113
113
  ``requests`` ``>=2.32.0,<3``
114
114
  ``databricks-sql-connector`` ``>=4.0.0``
@@ -156,5 +156,5 @@ Extra Dependencies
156
156
  ================== ================================================================
157
157
 
158
158
  The changelog for the provider package can be found in the
159
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.0/changelog.html>`_.
159
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.2/changelog.html>`_.
160
160
 
@@ -0,0 +1,31 @@
1
+ airflow/providers/databricks/__init__.py,sha256=b-DyL0dlIgtgJ_JwUEsrrNbwshoektdumNOnplO1dyI,1499
2
+ airflow/providers/databricks/exceptions.py,sha256=v7TD8auFp9LmyWqRtnXYG8mOit0WE3OuInUNFoC0zTo,1278
3
+ airflow/providers/databricks/get_provider_info.py,sha256=LfK0AwIARVh4tX5146-J2VRZwfe6GP3xjLyltA7X7iU,5738
4
+ airflow/providers/databricks/version_compat.py,sha256=RQbdCueLOaFZWekpQmF0BoAoJInW8EoyvJ3Ah-HbrPo,1577
5
+ airflow/providers/databricks/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
6
+ airflow/providers/databricks/hooks/databricks.py,sha256=6e2qa4jQJub4-Qg3j7GSX7Kt6cWNLa5n-5xqvQJGD0k,29788
7
+ airflow/providers/databricks/hooks/databricks_base.py,sha256=h4dZTVfjnuEznxv2S36IJ3cO8xG1hLuP2-_0HVrv2M8,36911
8
+ airflow/providers/databricks/hooks/databricks_sql.py,sha256=odloOD8Qx9cJ7rozdpEfpR5VSNPNfWH3dw6Yxv6pVCQ,17772
9
+ airflow/providers/databricks/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
10
+ airflow/providers/databricks/operators/databricks.py,sha256=DLLPlSCHBKdNwMemh_gGmzcbBgB-GVr-cYpq5J0DjXU,79286
11
+ airflow/providers/databricks/operators/databricks_repos.py,sha256=jOrYO_tFQJ5JBXeu7Rhrc3pcQJ4qtzSGSjGZ4GffmwU,13125
12
+ airflow/providers/databricks/operators/databricks_sql.py,sha256=m3_5dG3zd1rhWK96YAuBzh7J4Y20B9lLXBUFGtx1V84,21812
13
+ airflow/providers/databricks/operators/databricks_workflow.py,sha256=xqk6kbFcqArHo4w9E0sVGbAkX2tuBqWdtvwiFyc9jzo,14989
14
+ airflow/providers/databricks/plugins/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
15
+ airflow/providers/databricks/plugins/databricks_workflow.py,sha256=ri4yJ19cX8jF2JEuDojNWUgjshYdPKgHYMvlw0jmwow,20063
16
+ airflow/providers/databricks/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
17
+ airflow/providers/databricks/sensors/databricks.py,sha256=9gFu7Xy8wqaBKPAe94ozpVtruLluLwG07iMP53KAcPo,6232
18
+ airflow/providers/databricks/sensors/databricks_partition.py,sha256=AV7GoAIRnV7NEtbqUxp9WdSeN-LeIc49I3_NaI1cBiY,9910
19
+ airflow/providers/databricks/sensors/databricks_sql.py,sha256=ON3ulhD0I4ukJhKzDYTqw-8ZkdUuED_8QyDZbzFgHko,5603
20
+ airflow/providers/databricks/triggers/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
21
+ airflow/providers/databricks/triggers/databricks.py,sha256=DQbXLw1W_e3Iw-hsDph7vPuHc2caj623V7WmA2_PftM,8672
22
+ airflow/providers/databricks/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
23
+ airflow/providers/databricks/utils/databricks.py,sha256=bnZdjQ1etvAcfgdmb8BR4i1M4YjdcDXxxznVtmur1GM,5134
24
+ airflow/providers/databricks/utils/mixins.py,sha256=XDA9v9BeCgMIznYPpa-X7XIqrD1mJbw4eSQUjvTsQXI,7397
25
+ airflow/providers/databricks/utils/openlineage.py,sha256=naqLzbdBebwDUPvDhhIa5Ey_8SgKkYqdwhzJC_51gFU,13674
26
+ apache_airflow_providers_databricks-7.8.2rc1.dist-info/entry_points.txt,sha256=hjmZm3ab2cteTR4t9eE28oKixHwNIKtLCThd6sx3XRQ,227
27
+ apache_airflow_providers_databricks-7.8.2rc1.dist-info/licenses/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
28
+ apache_airflow_providers_databricks-7.8.2rc1.dist-info/licenses/NOTICE,sha256=E3-_E02gwwSEFzeeWPKmnIjOoos3hW28CLISV6sYrbQ,168
29
+ apache_airflow_providers_databricks-7.8.2rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
30
+ apache_airflow_providers_databricks-7.8.2rc1.dist-info/METADATA,sha256=UigXT7Txwtkg9ZSrz4iII-Hw1ruBnYpetJ7PGi8AYws,7784
31
+ apache_airflow_providers_databricks-7.8.2rc1.dist-info/RECORD,,
@@ -1,31 +0,0 @@
1
- airflow/providers/databricks/__init__.py,sha256=jK9hWZ8jptf_y_7T6PywlGiLdB4zq1EsGECyxMGFi8A,1499
2
- airflow/providers/databricks/exceptions.py,sha256=85RklmLOI_PnTzfXNIUd5fAu2aMMUhelwumQAX0wANE,1261
3
- airflow/providers/databricks/get_provider_info.py,sha256=LfK0AwIARVh4tX5146-J2VRZwfe6GP3xjLyltA7X7iU,5738
4
- airflow/providers/databricks/version_compat.py,sha256=RQbdCueLOaFZWekpQmF0BoAoJInW8EoyvJ3Ah-HbrPo,1577
5
- airflow/providers/databricks/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
6
- airflow/providers/databricks/hooks/databricks.py,sha256=uOBPUUAEc9eHBdvMgNyJzWivIFCt_GQgR4UlRrRxqgM,29754
7
- airflow/providers/databricks/hooks/databricks_base.py,sha256=m-m2AKqD3-6mEfvuwgo7Era47zGzsjKbpLTRQNjiUS4,36864
8
- airflow/providers/databricks/hooks/databricks_sql.py,sha256=xougOWuFgQzhBzFcuYkbX-lo0FpKCQztXoBETJEzesg,17755
9
- airflow/providers/databricks/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
10
- airflow/providers/databricks/operators/databricks.py,sha256=bVhFNTeGboHkmaJkYDYEyF0V1QUOB_RnsvwaCuEtIew,79316
11
- airflow/providers/databricks/operators/databricks_repos.py,sha256=VRZye45ZMlDxti6ZJjuouox5umiMoeQ-BKugPpE7jnM,13155
12
- airflow/providers/databricks/operators/databricks_sql.py,sha256=gwpkr660qpk4dUve98RB-hniaMzuXL6znQZZGilJxi0,21842
13
- airflow/providers/databricks/operators/databricks_workflow.py,sha256=QLsR0pGLWvvQbutsjj4RWwBE-z6tkWiYLHj6waMv8ZE,15019
14
- airflow/providers/databricks/plugins/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
15
- airflow/providers/databricks/plugins/databricks_workflow.py,sha256=5vyG2WNM25ptSv5IwAndUTqKAOmTneOWy_pAtqBKcgc,20020
16
- airflow/providers/databricks/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
17
- airflow/providers/databricks/sensors/databricks.py,sha256=RrjSzncvppdp5U8RYHd975MCIQIb_s1VQoxm9Aqbvac,6262
18
- airflow/providers/databricks/sensors/databricks_partition.py,sha256=qPDy8oxg-Lo-jnHy1EbxmA5GIjC6t0XnFJ1E3aAmUgg,9940
19
- airflow/providers/databricks/sensors/databricks_sql.py,sha256=shq7ng4LCiaD4Q7lorm4g1A7aijmq3nVUnCFlYtoI7c,5633
20
- airflow/providers/databricks/triggers/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
21
- airflow/providers/databricks/triggers/databricks.py,sha256=DQbXLw1W_e3Iw-hsDph7vPuHc2caj623V7WmA2_PftM,8672
22
- airflow/providers/databricks/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
23
- airflow/providers/databricks/utils/databricks.py,sha256=ecvzZbC4KdXds47VeSayot9EO-RQnTRJTEwKITH7waQ,5117
24
- airflow/providers/databricks/utils/mixins.py,sha256=WUmkt3AmXalmV6zOUIJZWbTldxYunAZOstddDhKCC94,7407
25
- airflow/providers/databricks/utils/openlineage.py,sha256=1jT5Woh9YifawdP-VFWsabfF-ecuCjPlzD5P_W4DAhI,15078
26
- apache_airflow_providers_databricks-7.8.0.dist-info/entry_points.txt,sha256=hjmZm3ab2cteTR4t9eE28oKixHwNIKtLCThd6sx3XRQ,227
27
- apache_airflow_providers_databricks-7.8.0.dist-info/licenses/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
28
- apache_airflow_providers_databricks-7.8.0.dist-info/licenses/NOTICE,sha256=E3-_E02gwwSEFzeeWPKmnIjOoos3hW28CLISV6sYrbQ,168
29
- apache_airflow_providers_databricks-7.8.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
30
- apache_airflow_providers_databricks-7.8.0.dist-info/METADATA,sha256=ag4y8wdimVeG_xpU6SGjicRyOsUe66vk2zyUTQ8h9c0,7750
31
- apache_airflow_providers_databricks-7.8.0.dist-info/RECORD,,