apache-airflow-providers-databricks 7.0.0rc1__py3-none-any.whl → 7.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-databricks might be problematic. Click here for more details.

@@ -199,55 +199,3 @@ distributed under the License is distributed on an "AS IS" BASIS,
199
199
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
200
  See the License for the specific language governing permissions and
201
201
  limitations under the License.
202
-
203
- ============================================================================
204
- APACHE AIRFLOW SUBCOMPONENTS:
205
-
206
- The Apache Airflow project contains subcomponents with separate copyright
207
- notices and license terms. Your use of the source code for the these
208
- subcomponents is subject to the terms and conditions of the following
209
- licenses.
210
-
211
-
212
- ========================================================================
213
- Third party Apache 2.0 licenses
214
- ========================================================================
215
-
216
- The following components are provided under the Apache 2.0 License.
217
- See project link for details. The text of each license is also included
218
- at 3rd-party-licenses/LICENSE-[project].txt.
219
-
220
- (ALv2 License) hue v4.3.0 (https://github.com/cloudera/hue/)
221
- (ALv2 License) jqclock v2.3.0 (https://github.com/JohnRDOrazio/jQuery-Clock-Plugin)
222
- (ALv2 License) bootstrap3-typeahead v4.0.2 (https://github.com/bassjobsen/Bootstrap-3-Typeahead)
223
- (ALv2 License) connexion v2.7.0 (https://github.com/zalando/connexion)
224
-
225
- ========================================================================
226
- MIT licenses
227
- ========================================================================
228
-
229
- The following components are provided under the MIT License. See project link for details.
230
- The text of each license is also included at 3rd-party-licenses/LICENSE-[project].txt.
231
-
232
- (MIT License) jquery v3.5.1 (https://jquery.org/license/)
233
- (MIT License) dagre-d3 v0.6.4 (https://github.com/cpettitt/dagre-d3)
234
- (MIT License) bootstrap v3.4.1 (https://github.com/twbs/bootstrap/)
235
- (MIT License) d3-tip v0.9.1 (https://github.com/Caged/d3-tip)
236
- (MIT License) dataTables v1.10.25 (https://datatables.net)
237
- (MIT License) normalize.css v3.0.2 (http://necolas.github.io/normalize.css/)
238
- (MIT License) ElasticMock v1.3.2 (https://github.com/vrcmarcos/elasticmock)
239
- (MIT License) MomentJS v2.24.0 (http://momentjs.com/)
240
- (MIT License) eonasdan-bootstrap-datetimepicker v4.17.49 (https://github.com/eonasdan/bootstrap-datetimepicker/)
241
-
242
- ========================================================================
243
- BSD 3-Clause licenses
244
- ========================================================================
245
- The following components are provided under the BSD 3-Clause license. See project links for details.
246
- The text of each license is also included at 3rd-party-licenses/LICENSE-[project].txt.
247
-
248
- (BSD 3 License) d3 v5.16.0 (https://d3js.org)
249
- (BSD 3 License) d3-shape v2.1.0 (https://github.com/d3/d3-shape)
250
- (BSD 3 License) cgroupspy 0.2.1 (https://github.com/cloudsigma/cgroupspy)
251
-
252
- ========================================================================
253
- See 3rd-party-licenses/LICENSES-ui.txt for packages used in `/airflow/www`
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "7.0.0"
32
+ __version__ = "7.1.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.9.0"
@@ -15,8 +15,7 @@
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
17
 
18
- # NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
19
- # OVERWRITTEN WHEN PREPARING PACKAGES.
18
+ # NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
20
19
  #
21
20
  # IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
22
21
  # `get_provider_info_TEMPLATE.py.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
@@ -28,8 +27,9 @@ def get_provider_info():
28
27
  "name": "Databricks",
29
28
  "description": "`Databricks <https://databricks.com/>`__\n",
30
29
  "state": "ready",
31
- "source-date-epoch": 1734533222,
30
+ "source-date-epoch": 1739959376,
32
31
  "versions": [
32
+ "7.1.0",
33
33
  "7.0.0",
34
34
  "6.13.0",
35
35
  "6.12.0",
@@ -77,30 +77,6 @@ def get_provider_info():
77
77
  "1.0.1",
78
78
  "1.0.0",
79
79
  ],
80
- "dependencies": [
81
- "apache-airflow>=2.9.0",
82
- "apache-airflow-providers-common-sql>=1.20.0",
83
- "requests>=2.27.0,<3",
84
- "databricks-sql-connector>=3.0.0",
85
- "aiohttp>=3.9.2, <4",
86
- "mergedeep>=1.3.4",
87
- 'pandas>=2.1.2,<2.2;python_version>="3.9"',
88
- 'pandas>=1.5.3,<2.2;python_version<"3.9"',
89
- "pyarrow>=14.0.1",
90
- ],
91
- "additional-extras": [
92
- {
93
- "name": "sdk",
94
- "description": "Install Databricks SDK",
95
- "dependencies": ["databricks-sdk==0.10.0"],
96
- },
97
- {
98
- "name": "azure-identity",
99
- "description": "Install Azure Identity client library",
100
- "dependencies": ["azure-identity>=1.3.1"],
101
- },
102
- ],
103
- "devel-dependencies": ["deltalake>=0.12.0"],
104
80
  "integrations": [
105
81
  {
106
82
  "integration-name": "Databricks",
@@ -112,7 +88,7 @@ def get_provider_info():
112
88
  "/docs/apache-airflow-providers-databricks/operators/run_now.rst",
113
89
  "/docs/apache-airflow-providers-databricks/operators/task.rst",
114
90
  ],
115
- "logo": "/integration-logos/databricks/Databricks.png",
91
+ "logo": "/docs/integration-logos/Databricks.png",
116
92
  "tags": ["service"],
117
93
  },
118
94
  {
@@ -122,7 +98,6 @@ def get_provider_info():
122
98
  "/docs/apache-airflow-providers-databricks/operators/sql.rst",
123
99
  "/docs/apache-airflow-providers-databricks/operators/copy_into.rst",
124
100
  ],
125
- "logo": "/integration-logos/databricks/Databricks.png",
126
101
  "tags": ["service"],
127
102
  },
128
103
  {
@@ -133,14 +108,12 @@ def get_provider_info():
133
108
  "/docs/apache-airflow-providers-databricks/operators/repos_update.rst",
134
109
  "/docs/apache-airflow-providers-databricks/operators/repos_delete.rst",
135
110
  ],
136
- "logo": "/integration-logos/databricks/Databricks.png",
137
111
  "tags": ["service"],
138
112
  },
139
113
  {
140
114
  "integration-name": "Databricks Workflow",
141
115
  "external-doc-url": "https://docs.databricks.com/en/workflows/index.html",
142
116
  "how-to-guide": ["/docs/apache-airflow-providers-databricks/operators/workflow.rst"],
143
- "logo": "/integration-logos/databricks/Databricks.png",
144
117
  "tags": ["service"],
145
118
  },
146
119
  ],
@@ -203,4 +176,19 @@ def get_provider_info():
203
176
  }
204
177
  ],
205
178
  "extra-links": ["airflow.providers.databricks.operators.databricks.DatabricksJobRunLink"],
179
+ "dependencies": [
180
+ "apache-airflow>=2.9.0",
181
+ "apache-airflow-providers-common-sql>=1.20.0",
182
+ "requests>=2.27.0,<3",
183
+ "databricks-sql-connector>=3.0.0",
184
+ "aiohttp>=3.9.2, <4",
185
+ "mergedeep>=1.3.4",
186
+ "pandas>=2.1.2,<2.2",
187
+ "pyarrow>=14.0.1",
188
+ ],
189
+ "optional-dependencies": {
190
+ "sdk": ["databricks-sdk==0.10.0"],
191
+ "azure-identity": ["azure-identity>=1.3.1"],
192
+ },
193
+ "devel-dependencies": ["deltalake>=0.12.0"],
206
194
  }
@@ -31,14 +31,13 @@ from typing import (
31
31
  overload,
32
32
  )
33
33
 
34
- from databricks import sql # type: ignore[attr-defined]
35
- from databricks.sql.types import Row
36
-
37
34
  from airflow.exceptions import AirflowException
38
35
  from airflow.models.connection import Connection as AirflowConnection
39
36
  from airflow.providers.common.sql.hooks.sql import DbApiHook, return_single_query_results
40
37
  from airflow.providers.databricks.exceptions import DatabricksSqlExecutionError, DatabricksSqlExecutionTimeout
41
38
  from airflow.providers.databricks.hooks.databricks_base import BaseDatabricksHook
39
+ from databricks import sql # type: ignore[attr-defined]
40
+ from databricks.sql.types import Row
42
41
 
43
42
  if TYPE_CHECKING:
44
43
  from databricks.sql.client import Connection
@@ -19,6 +19,7 @@
19
19
 
20
20
  from __future__ import annotations
21
21
 
22
+ import hashlib
22
23
  import time
23
24
  from abc import ABC, abstractmethod
24
25
  from collections.abc import Sequence
@@ -966,6 +967,8 @@ class DatabricksTaskBaseOperator(BaseOperator, ABC):
966
967
 
967
968
  :param caller: The name of the caller operator to be used in the logs.
968
969
  :param databricks_conn_id: The name of the Airflow connection to use.
970
+ :param databricks_task_key: An optional task_key used to refer to the task by Databricks API. By
971
+ default this will be set to the hash of ``dag_id + task_id``.
969
972
  :param databricks_retry_args: An optional dictionary with arguments passed to ``tenacity.Retrying`` class.
970
973
  :param databricks_retry_delay: Number of seconds to wait between retries.
971
974
  :param databricks_retry_limit: Amount of times to retry if the Databricks backend is unreachable.
@@ -986,6 +989,7 @@ class DatabricksTaskBaseOperator(BaseOperator, ABC):
986
989
  self,
987
990
  caller: str = "DatabricksTaskBaseOperator",
988
991
  databricks_conn_id: str = "databricks_default",
992
+ databricks_task_key: str = "",
989
993
  databricks_retry_args: dict[Any, Any] | None = None,
990
994
  databricks_retry_delay: int = 1,
991
995
  databricks_retry_limit: int = 3,
@@ -1000,6 +1004,7 @@ class DatabricksTaskBaseOperator(BaseOperator, ABC):
1000
1004
  ):
1001
1005
  self.caller = caller
1002
1006
  self.databricks_conn_id = databricks_conn_id
1007
+ self._databricks_task_key = databricks_task_key
1003
1008
  self.databricks_retry_args = databricks_retry_args
1004
1009
  self.databricks_retry_delay = databricks_retry_delay
1005
1010
  self.databricks_retry_limit = databricks_retry_limit
@@ -1037,17 +1042,21 @@ class DatabricksTaskBaseOperator(BaseOperator, ABC):
1037
1042
  caller=caller,
1038
1043
  )
1039
1044
 
1040
- def _get_databricks_task_id(self, task_id: str) -> str:
1041
- """Get the databricks task ID using dag_id and task_id. Removes illegal characters."""
1042
- task_id = f"{self.dag_id}__{task_id.replace('.', '__')}"
1043
- if len(task_id) > 100:
1044
- self.log.warning(
1045
- "The generated task_key '%s' exceeds 100 characters and will be truncated by the Databricks API. "
1046
- "This will cause failure when trying to monitor the task. task_key is generated by ",
1047
- "concatenating dag_id and task_id.",
1048
- task_id,
1045
+ @cached_property
1046
+ def databricks_task_key(self) -> str:
1047
+ return self._generate_databricks_task_key()
1048
+
1049
+ def _generate_databricks_task_key(self, task_id: str | None = None) -> str:
1050
+ """Create a databricks task key using the hash of dag_id and task_id."""
1051
+ if not self._databricks_task_key or len(self._databricks_task_key) > 100:
1052
+ self.log.info(
1053
+ "databricks_task_key has not be provided or the provided one exceeds 100 characters and will be truncated by the Databricks API. This will cause failure when trying to monitor the task. A task_key will be generated using the hash value of dag_id+task_id"
1049
1054
  )
1050
- return task_id
1055
+ task_id = task_id or self.task_id
1056
+ task_key = f"{self.dag_id}__{task_id}".encode()
1057
+ self._databricks_task_key = hashlib.md5(task_key).hexdigest()
1058
+ self.log.info("Generated databricks task_key: %s", self._databricks_task_key)
1059
+ return self._databricks_task_key
1051
1060
 
1052
1061
  @property
1053
1062
  def _databricks_workflow_task_group(self) -> DatabricksWorkflowTaskGroup | None:
@@ -1077,7 +1086,7 @@ class DatabricksTaskBaseOperator(BaseOperator, ABC):
1077
1086
  def _get_run_json(self) -> dict[str, Any]:
1078
1087
  """Get run json to be used for task submissions."""
1079
1088
  run_json = {
1080
- "run_name": self._get_databricks_task_id(self.task_id),
1089
+ "run_name": self.databricks_task_key,
1081
1090
  **self._get_task_base_json(),
1082
1091
  }
1083
1092
  if self.new_cluster and self.existing_cluster_id:
@@ -1090,7 +1099,7 @@ class DatabricksTaskBaseOperator(BaseOperator, ABC):
1090
1099
  raise ValueError("Must specify either existing_cluster_id or new_cluster.")
1091
1100
  return run_json
1092
1101
 
1093
- def _launch_job(self, context: Context | None = None) -> int:
1102
+ def _launch_job(self, context: Context | None = None) -> int | None:
1094
1103
  """Launch the job on Databricks."""
1095
1104
  run_json = self._get_run_json()
1096
1105
  self.databricks_run_id = self._hook.submit_run(run_json)
@@ -1127,9 +1136,7 @@ class DatabricksTaskBaseOperator(BaseOperator, ABC):
1127
1136
  # building the {task_key: task} map below.
1128
1137
  sorted_task_runs = sorted(tasks, key=lambda x: x["start_time"])
1129
1138
 
1130
- return {task["task_key"]: task for task in sorted_task_runs}[
1131
- self._get_databricks_task_id(self.task_id)
1132
- ]
1139
+ return {task["task_key"]: task for task in sorted_task_runs}[self.databricks_task_key]
1133
1140
 
1134
1141
  def _convert_to_databricks_workflow_task(
1135
1142
  self, relevant_upstreams: list[BaseOperator], context: Context | None = None
@@ -1137,9 +1144,9 @@ class DatabricksTaskBaseOperator(BaseOperator, ABC):
1137
1144
  """Convert the operator to a Databricks workflow task that can be a task in a workflow."""
1138
1145
  base_task_json = self._get_task_base_json()
1139
1146
  result = {
1140
- "task_key": self._get_databricks_task_id(self.task_id),
1147
+ "task_key": self.databricks_task_key,
1141
1148
  "depends_on": [
1142
- {"task_key": self._get_databricks_task_id(task_id)}
1149
+ {"task_key": self._generate_databricks_task_key(task_id)}
1143
1150
  for task_id in self.upstream_task_ids
1144
1151
  if task_id in relevant_upstreams
1145
1152
  ],
@@ -1172,7 +1179,7 @@ class DatabricksTaskBaseOperator(BaseOperator, ABC):
1172
1179
  run_state = RunState(**run["state"])
1173
1180
  self.log.info(
1174
1181
  "Current state of the the databricks task %s is %s",
1175
- self._get_databricks_task_id(self.task_id),
1182
+ self.databricks_task_key,
1176
1183
  run_state.life_cycle_state,
1177
1184
  )
1178
1185
  if self.deferrable and not run_state.is_terminal:
@@ -1194,7 +1201,7 @@ class DatabricksTaskBaseOperator(BaseOperator, ABC):
1194
1201
  run_state = RunState(**run["state"])
1195
1202
  self.log.info(
1196
1203
  "Current state of the databricks task %s is %s",
1197
- self._get_databricks_task_id(self.task_id),
1204
+ self.databricks_task_key,
1198
1205
  run_state.life_cycle_state,
1199
1206
  )
1200
1207
  self._handle_terminal_run_state(run_state)
@@ -30,7 +30,11 @@ from airflow.models import BaseOperator
30
30
  from airflow.providers.databricks.hooks.databricks import DatabricksHook
31
31
 
32
32
  if TYPE_CHECKING:
33
- from airflow.utils.context import Context
33
+ try:
34
+ from airflow.sdk.definitions.context import Context
35
+ except ImportError:
36
+ # TODO: Remove once provider drops support for Airflow 2
37
+ from airflow.utils.context import Context
34
38
 
35
39
 
36
40
  class DatabricksReposCreateOperator(BaseOperator):
@@ -24,12 +24,11 @@ import json
24
24
  from collections.abc import Sequence
25
25
  from typing import TYPE_CHECKING, Any, ClassVar
26
26
 
27
- from databricks.sql.utils import ParamEscaper
28
-
29
27
  from airflow.exceptions import AirflowException
30
28
  from airflow.models import BaseOperator
31
29
  from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
32
30
  from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook
31
+ from databricks.sql.utils import ParamEscaper
33
32
 
34
33
  if TYPE_CHECKING:
35
34
  from airflow.utils.context import Context
@@ -93,7 +93,7 @@ class _CreateDatabricksWorkflowOperator(BaseOperator):
93
93
  """
94
94
 
95
95
  operator_extra_links = (WorkflowJobRunLink(), WorkflowJobRepairAllFailedLink())
96
- template_fields = ("notebook_params",)
96
+ template_fields = ("notebook_params", "job_clusters")
97
97
  caller = "_CreateDatabricksWorkflowOperator"
98
98
 
99
99
  def __init__(
@@ -44,6 +44,8 @@ from airflow.www.views import AirflowBaseView
44
44
  if TYPE_CHECKING:
45
45
  from sqlalchemy.orm.session import Session
46
46
 
47
+ from airflow.providers.databricks.operators.databricks import DatabricksTaskBaseOperator
48
+
47
49
 
48
50
  REPAIR_WAIT_ATTEMPTS = os.getenv("DATABRICKS_REPAIR_WAIT_ATTEMPTS", 20)
49
51
  REPAIR_WAIT_DELAY = os.getenv("DATABRICKS_REPAIR_WAIT_DELAY", 0.5)
@@ -57,18 +59,8 @@ def get_auth_decorator():
57
59
  return auth.has_access_dag("POST", DagAccessEntity.RUN)
58
60
 
59
61
 
60
- def _get_databricks_task_id(task: BaseOperator) -> str:
61
- """
62
- Get the databricks task ID using dag_id and task_id. removes illegal characters.
63
-
64
- :param task: The task to get the databricks task ID for.
65
- :return: The databricks task ID.
66
- """
67
- return f"{task.dag_id}__{task.task_id.replace('.', '__')}"
68
-
69
-
70
62
  def get_databricks_task_ids(
71
- group_id: str, task_map: dict[str, BaseOperator], log: logging.Logger
63
+ group_id: str, task_map: dict[str, DatabricksTaskBaseOperator], log: logging.Logger
72
64
  ) -> list[str]:
73
65
  """
74
66
  Return a list of all Databricks task IDs for a dictionary of Airflow tasks.
@@ -83,7 +75,7 @@ def get_databricks_task_ids(
83
75
  for task_id, task in task_map.items():
84
76
  if task_id == f"{group_id}.launch":
85
77
  continue
86
- databricks_task_id = _get_databricks_task_id(task)
78
+ databricks_task_id = task.databricks_task_key
87
79
  log.debug("databricks task id for task %s is %s", task_id, databricks_task_id)
88
80
  task_ids.append(databricks_task_id)
89
81
  return task_ids
@@ -112,7 +104,7 @@ def _clear_task_instances(
112
104
  dag = airflow_app.dag_bag.get_dag(dag_id)
113
105
  log.debug("task_ids %s to clear", str(task_ids))
114
106
  dr: DagRun = _get_dagrun(dag, run_id, session=session)
115
- tis_to_clear = [ti for ti in dr.get_task_instances() if _get_databricks_task_id(ti) in task_ids]
107
+ tis_to_clear = [ti for ti in dr.get_task_instances() if ti.databricks_task_key in task_ids]
116
108
  clear_task_instances(tis_to_clear, session)
117
109
 
118
110
 
@@ -327,7 +319,7 @@ class WorkflowJobRepairAllFailedLink(BaseOperatorLink, LoggingMixin):
327
319
 
328
320
  tasks_to_run = {ti: t for ti, t in task_group_sub_tasks if ti in failed_and_skipped_tasks}
329
321
 
330
- return ",".join(get_databricks_task_ids(task_group.group_id, tasks_to_run, log))
322
+ return ",".join(get_databricks_task_ids(task_group.group_id, tasks_to_run, log)) # type: ignore[arg-type]
331
323
 
332
324
  @staticmethod
333
325
  def _get_failed_and_skipped_tasks(dr: DagRun) -> list[str]:
@@ -390,7 +382,7 @@ class WorkflowJobRepairSingleTaskLink(BaseOperatorLink, LoggingMixin):
390
382
  "databricks_conn_id": metadata.conn_id,
391
383
  "databricks_run_id": metadata.run_id,
392
384
  "run_id": ti_key.run_id,
393
- "tasks_to_repair": _get_databricks_task_id(task),
385
+ "tasks_to_repair": task.databricks_task_key,
394
386
  }
395
387
  return url_for("RepairDatabricksTasks.repair", **query_params)
396
388
 
@@ -25,15 +25,18 @@ from datetime import datetime
25
25
  from functools import cached_property
26
26
  from typing import TYPE_CHECKING, Any, Callable
27
27
 
28
- from databricks.sql.utils import ParamEscaper
29
-
30
28
  from airflow.exceptions import AirflowException
31
29
  from airflow.providers.common.sql.hooks.sql import fetch_all_handler
32
30
  from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook
33
31
  from airflow.sensors.base import BaseSensorOperator
32
+ from databricks.sql.utils import ParamEscaper
34
33
 
35
34
  if TYPE_CHECKING:
36
- from airflow.utils.context import Context
35
+ try:
36
+ from airflow.sdk.definitions.context import Context
37
+ except ImportError:
38
+ # TODO: Remove once provider drops support for Airflow 2
39
+ from airflow.utils.context import Context
37
40
 
38
41
 
39
42
  class DatabricksPartitionSensor(BaseSensorOperator):
@@ -30,7 +30,11 @@ from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook
30
30
  from airflow.sensors.base import BaseSensorOperator
31
31
 
32
32
  if TYPE_CHECKING:
33
- from airflow.utils.context import Context
33
+ try:
34
+ from airflow.sdk.definitions.context import Context
35
+ except ImportError:
36
+ # TODO: Remove once provider drops support for Airflow 2
37
+ from airflow.utils.context import Context
34
38
 
35
39
 
36
40
  class DatabricksSqlSensor(BaseSensorOperator):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: apache-airflow-providers-databricks
3
- Version: 7.0.0rc1
3
+ Version: 7.1.0
4
4
  Summary: Provider package apache-airflow-providers-databricks for Apache Airflow
5
5
  Keywords: airflow-provider,databricks,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,20 +20,19 @@ Classifier: Programming Language :: Python :: 3.10
20
20
  Classifier: Programming Language :: Python :: 3.11
21
21
  Classifier: Programming Language :: Python :: 3.12
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: aiohttp>=3.9.2, <4
24
- Requires-Dist: apache-airflow-providers-common-sql>=1.20.0rc0
25
- Requires-Dist: apache-airflow>=2.9.0rc0
23
+ Requires-Dist: apache-airflow>=2.9.0
24
+ Requires-Dist: apache-airflow-providers-common-sql>=1.20.0
25
+ Requires-Dist: requests>=2.27.0,<3
26
26
  Requires-Dist: databricks-sql-connector>=3.0.0
27
+ Requires-Dist: aiohttp>=3.9.2, <4
27
28
  Requires-Dist: mergedeep>=1.3.4
28
- Requires-Dist: pandas>=1.5.3,<2.2;python_version<"3.9"
29
- Requires-Dist: pandas>=2.1.2,<2.2;python_version>="3.9"
29
+ Requires-Dist: pandas>=2.1.2,<2.2
30
30
  Requires-Dist: pyarrow>=14.0.1
31
- Requires-Dist: requests>=2.27.0,<3
32
31
  Requires-Dist: azure-identity>=1.3.1 ; extra == "azure-identity"
33
32
  Requires-Dist: databricks-sdk==0.10.0 ; extra == "sdk"
34
33
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
35
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.0.0/changelog.html
36
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.0.0
34
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.1.0/changelog.html
35
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.1.0
37
36
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
38
37
  Project-URL: Source Code, https://github.com/apache/airflow
39
38
  Project-URL: Twitter, https://x.com/ApacheAirflow
@@ -42,23 +41,6 @@ Provides-Extra: azure-identity
42
41
  Provides-Extra: sdk
43
42
 
44
43
 
45
- .. Licensed to the Apache Software Foundation (ASF) under one
46
- or more contributor license agreements. See the NOTICE file
47
- distributed with this work for additional information
48
- regarding copyright ownership. The ASF licenses this file
49
- to you under the Apache License, Version 2.0 (the
50
- "License"); you may not use this file except in compliance
51
- with the License. You may obtain a copy of the License at
52
-
53
- .. http://www.apache.org/licenses/LICENSE-2.0
54
-
55
- .. Unless required by applicable law or agreed to in writing,
56
- software distributed under the License is distributed on an
57
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
58
- KIND, either express or implied. See the License for the
59
- specific language governing permissions and limitations
60
- under the License.
61
-
62
44
  .. Licensed to the Apache Software Foundation (ASF) under one
63
45
  or more contributor license agreements. See the NOTICE file
64
46
  distributed with this work for additional information
@@ -76,8 +58,7 @@ Provides-Extra: sdk
76
58
  specific language governing permissions and limitations
77
59
  under the License.
78
60
 
79
- .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
80
- OVERWRITTEN WHEN PREPARING PACKAGES.
61
+ .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
81
62
 
82
63
  .. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
83
64
  `PROVIDER_README_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
@@ -85,7 +66,7 @@ Provides-Extra: sdk
85
66
 
86
67
  Package ``apache-airflow-providers-databricks``
87
68
 
88
- Release: ``7.0.0.rc1``
69
+ Release: ``7.1.0``
89
70
 
90
71
 
91
72
  `Databricks <https://databricks.com/>`__
@@ -98,7 +79,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
98
79
  are in ``airflow.providers.databricks`` python package.
99
80
 
100
81
  You can find package information and changelog for the provider
101
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.0.0/>`_.
82
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.1.0/>`_.
102
83
 
103
84
  Installation
104
85
  ------------
@@ -112,19 +93,18 @@ The package supports the following python versions: 3.9,3.10,3.11,3.12
112
93
  Requirements
113
94
  ------------
114
95
 
115
- ======================================= =========================================
96
+ ======================================= ==================
116
97
  PIP package Version required
117
- ======================================= =========================================
98
+ ======================================= ==================
118
99
  ``apache-airflow`` ``>=2.9.0``
119
100
  ``apache-airflow-providers-common-sql`` ``>=1.20.0``
120
101
  ``requests`` ``>=2.27.0,<3``
121
102
  ``databricks-sql-connector`` ``>=3.0.0``
122
103
  ``aiohttp`` ``>=3.9.2,<4``
123
104
  ``mergedeep`` ``>=1.3.4``
124
- ``pandas`` ``>=2.1.2,<2.2; python_version >= "3.9"``
125
- ``pandas`` ``>=1.5.3,<2.2; python_version < "3.9"``
105
+ ``pandas`` ``>=2.1.2,<2.2``
126
106
  ``pyarrow`` ``>=14.0.1``
127
- ======================================= =========================================
107
+ ======================================= ==================
128
108
 
129
109
  Cross provider package dependencies
130
110
  -----------------------------------
@@ -146,4 +126,5 @@ Dependent package
146
126
  ============================================================================================================ ==============
147
127
 
148
128
  The changelog for the provider package can be found in the
149
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.0.0/changelog.html>`_.
129
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.1.0/changelog.html>`_.
130
+
@@ -1,26 +1,26 @@
1
- airflow/providers/databricks/LICENSE,sha256=FFb4jd2AXnOOf7XLP04pQW6jbdhG49TxlGY6fFpCV1Y,13609
2
- airflow/providers/databricks/__init__.py,sha256=v8XClKkcBotsY0PIlTEURUdD96YGqbFcjB-6k1gwH8g,1497
1
+ airflow/providers/databricks/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
+ airflow/providers/databricks/__init__.py,sha256=yNTguqRfpmybndzx-9642Qsg5rcq-zKHBlQSbOlgwVs,1497
3
3
  airflow/providers/databricks/exceptions.py,sha256=85RklmLOI_PnTzfXNIUd5fAu2aMMUhelwumQAX0wANE,1261
4
- airflow/providers/databricks/get_provider_info.py,sha256=ukstmmylM6CRZU-f6f7yP_00VrjbhFm-ydW8Q7BnheU,7788
4
+ airflow/providers/databricks/get_provider_info.py,sha256=815hMb4zzB6pxZmI6rbLM4d3Ki82UrMWELQFNsWEIkE,7213
5
5
  airflow/providers/databricks/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
6
6
  airflow/providers/databricks/hooks/databricks.py,sha256=-rgK_sMc2_BjTvSvMh1Md3XanVayOmcxijQfs1vRCPw,24774
7
7
  airflow/providers/databricks/hooks/databricks_base.py,sha256=8KVRF-ty20UQpJP3kgE6RDLAYqXk7ZjI07ZpwFIcGB8,34917
8
- airflow/providers/databricks/hooks/databricks_sql.py,sha256=Giy0XBz0t6PHepREIXzxlFZhaNYgnS4bzBT_I04v5rM,13119
8
+ airflow/providers/databricks/hooks/databricks_sql.py,sha256=e55wqvCR6tppId0arIfzXikhFs-1_DXFcP2OwvoHsH0,13118
9
9
  airflow/providers/databricks/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
10
- airflow/providers/databricks/operators/databricks.py,sha256=mYUYTFOIkraN1RnCYpgp_yxZxBhFr3Hr_bNqpYQD3ZA,69152
11
- airflow/providers/databricks/operators/databricks_repos.py,sha256=spR_c3uCZizvB4AEUP_mxLUJLCOE1CVV-CkjNPWBHXY,13120
12
- airflow/providers/databricks/operators/databricks_sql.py,sha256=thBHpt9_LMLJZ0PN-eLCI3AaT8IFq3NAHLDWDFP-Jiw,17031
13
- airflow/providers/databricks/operators/databricks_workflow.py,sha256=0_NaiPBbUjwtxZNE8BevMNWDCyQ0lHaCtNALa6ZAeNQ,14131
10
+ airflow/providers/databricks/operators/databricks.py,sha256=c4o60KjCJIN_xrgOLpuWMvMeKZoxN_OaTQnl-kUEQ3Q,69703
11
+ airflow/providers/databricks/operators/databricks_repos.py,sha256=m_72OnnU9df7UB-8SK2Tp5VjfNyjYeAnil3dCKs9SbA,13282
12
+ airflow/providers/databricks/operators/databricks_sql.py,sha256=zYLNggY7G0ouJQIEyXKbK3JNe-puBiOkTnQDd0jHIM0,17030
13
+ airflow/providers/databricks/operators/databricks_workflow.py,sha256=0vFu4w6O4tlStZ_Jhk1iswKFcTk-g_dthGFeDpXGZlw,14146
14
14
  airflow/providers/databricks/plugins/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
15
- airflow/providers/databricks/plugins/databricks_workflow.py,sha256=mNf5im2qqxvg1hx31p9uom-BrdPRRiw3uWESCCX6PU0,16908
15
+ airflow/providers/databricks/plugins/databricks_workflow.py,sha256=PJoDgtBYO3pvMjJlh72iOCrOTnNfQoprFM44UqykuaA,16706
16
16
  airflow/providers/databricks/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
17
- airflow/providers/databricks/sensors/databricks_partition.py,sha256=q_IVq5Oc8oKYM5xoV-lRQTHfmxMorZ6Bn9qplgxXUgo,9878
18
- airflow/providers/databricks/sensors/databricks_sql.py,sha256=Z3pWe8gI7fCWtVtEQ9G75Xy1zYIcMdIcqfJCdpz8bGI,5599
17
+ airflow/providers/databricks/sensors/databricks_partition.py,sha256=t4bi3XkGwcKYuaE8FfeZrhmJ9KCSyLnXb7mEwmgOrRU,10039
18
+ airflow/providers/databricks/sensors/databricks_sql.py,sha256=r3guNlbqBEi5ypVEBWMgEG2h1URCc-0YRu-LXsw8S-0,5761
19
19
  airflow/providers/databricks/triggers/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
20
20
  airflow/providers/databricks/triggers/databricks.py,sha256=xk9aEfdZnG33a4WSFfg6SZF4FfROV8B4HOyBYBvZR_Q,5104
21
21
  airflow/providers/databricks/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
22
22
  airflow/providers/databricks/utils/databricks.py,sha256=EICTPZTD0R0dy9UGKgv8srkrBTgzCQrcYNL9oBWuhzk,2890
23
- apache_airflow_providers_databricks-7.0.0rc1.dist-info/entry_points.txt,sha256=hjmZm3ab2cteTR4t9eE28oKixHwNIKtLCThd6sx3XRQ,227
24
- apache_airflow_providers_databricks-7.0.0rc1.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
25
- apache_airflow_providers_databricks-7.0.0rc1.dist-info/METADATA,sha256=9oEnmBcoE3X07Zve6r5-dr5zGZI3au1MFkrp05-dc8s,6893
26
- apache_airflow_providers_databricks-7.0.0rc1.dist-info/RECORD,,
23
+ apache_airflow_providers_databricks-7.1.0.dist-info/entry_points.txt,sha256=hjmZm3ab2cteTR4t9eE28oKixHwNIKtLCThd6sx3XRQ,227
24
+ apache_airflow_providers_databricks-7.1.0.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
25
+ apache_airflow_providers_databricks-7.1.0.dist-info/METADATA,sha256=RJFMy2XaNTlIDjciTtqOCZ8UCSUEeZdo9QCmCAC1enQ,5802
26
+ apache_airflow_providers_databricks-7.1.0.dist-info/RECORD,,