apache-airflow-providers-databricks 7.7.4__py3-none-any.whl → 7.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/databricks/__init__.py +3 -3
- airflow/providers/databricks/get_provider_info.py +1 -0
- airflow/providers/databricks/hooks/databricks.py +9 -0
- airflow/providers/databricks/hooks/databricks_base.py +16 -9
- airflow/providers/databricks/hooks/databricks_sql.py +3 -3
- airflow/providers/databricks/plugins/databricks_workflow.py +1 -6
- airflow/providers/databricks/sensors/databricks_partition.py +4 -3
- {apache_airflow_providers_databricks-7.7.4.dist-info → apache_airflow_providers_databricks-7.8.0.dist-info}/METADATA +11 -9
- {apache_airflow_providers_databricks-7.7.4.dist-info → apache_airflow_providers_databricks-7.8.0.dist-info}/RECORD +13 -12
- apache_airflow_providers_databricks-7.8.0.dist-info/licenses/NOTICE +5 -0
- {apache_airflow_providers_databricks-7.7.4.dist-info → apache_airflow_providers_databricks-7.8.0.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_databricks-7.7.4.dist-info → apache_airflow_providers_databricks-7.8.0.dist-info}/entry_points.txt +0 -0
- {airflow/providers/databricks → apache_airflow_providers_databricks-7.8.0.dist-info/licenses}/LICENSE +0 -0
|
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "7.
|
|
32
|
+
__version__ = "7.8.0"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
|
-
"2.
|
|
35
|
+
"2.11.0"
|
|
36
36
|
):
|
|
37
37
|
raise RuntimeError(
|
|
38
|
-
f"The package `apache-airflow-providers-databricks:{__version__}` needs Apache Airflow 2.
|
|
38
|
+
f"The package `apache-airflow-providers-databricks:{__version__}` needs Apache Airflow 2.11.0+"
|
|
39
39
|
)
|
|
@@ -45,6 +45,7 @@ def get_provider_info():
|
|
|
45
45
|
"external-doc-url": "https://databricks.com/product/databricks-sql",
|
|
46
46
|
"how-to-guide": [
|
|
47
47
|
"/docs/apache-airflow-providers-databricks/operators/sql.rst",
|
|
48
|
+
"/docs/apache-airflow-providers-databricks/operators/sql_statements.rst",
|
|
48
49
|
"/docs/apache-airflow-providers-databricks/operators/copy_into.rst",
|
|
49
50
|
],
|
|
50
51
|
"tags": ["service"],
|
|
@@ -134,6 +134,9 @@ class RunState:
|
|
|
134
134
|
and self.state_message == other.state_message
|
|
135
135
|
)
|
|
136
136
|
|
|
137
|
+
def __hash__(self):
|
|
138
|
+
return hash((self.life_cycle_state, self.result_state, self.state_message))
|
|
139
|
+
|
|
137
140
|
def __repr__(self) -> str:
|
|
138
141
|
return str(self.__dict__)
|
|
139
142
|
|
|
@@ -183,6 +186,9 @@ class ClusterState:
|
|
|
183
186
|
def __eq__(self, other) -> bool:
|
|
184
187
|
return self.state == other.state and self.state_message == other.state_message
|
|
185
188
|
|
|
189
|
+
def __hash__(self):
|
|
190
|
+
return hash((self.state, self.state_message))
|
|
191
|
+
|
|
186
192
|
def __repr__(self) -> str:
|
|
187
193
|
return str(self.__dict__)
|
|
188
194
|
|
|
@@ -244,6 +250,9 @@ class SQLStatementState:
|
|
|
244
250
|
and self.error_message == other.error_message
|
|
245
251
|
)
|
|
246
252
|
|
|
253
|
+
def __hash__(self):
|
|
254
|
+
return hash((self.state, self.error_code, self.error_message))
|
|
255
|
+
|
|
247
256
|
def __repr__(self) -> str:
|
|
248
257
|
return str(self.__dict__)
|
|
249
258
|
|
|
@@ -166,12 +166,12 @@ class BaseDatabricksHook(BaseHook):
|
|
|
166
166
|
return ua_string
|
|
167
167
|
|
|
168
168
|
@cached_property
|
|
169
|
-
def host(self) -> str:
|
|
169
|
+
def host(self) -> str | None:
|
|
170
|
+
host = None
|
|
170
171
|
if "host" in self.databricks_conn.extra_dejson:
|
|
171
172
|
host = self._parse_host(self.databricks_conn.extra_dejson["host"])
|
|
172
|
-
|
|
173
|
+
elif self.databricks_conn.host:
|
|
173
174
|
host = self._parse_host(self.databricks_conn.host)
|
|
174
|
-
|
|
175
175
|
return host
|
|
176
176
|
|
|
177
177
|
async def __aenter__(self):
|
|
@@ -207,6 +207,11 @@ class BaseDatabricksHook(BaseHook):
|
|
|
207
207
|
# In this case, host = xx.cloud.databricks.com
|
|
208
208
|
return host
|
|
209
209
|
|
|
210
|
+
def _get_connection_attr(self, attr_name: str) -> str:
|
|
211
|
+
if not (attr := getattr(self.databricks_conn, attr_name)):
|
|
212
|
+
raise ValueError(f"`{attr_name}` must be present in Connection")
|
|
213
|
+
return attr
|
|
214
|
+
|
|
210
215
|
def _get_retry_object(self) -> Retrying:
|
|
211
216
|
"""
|
|
212
217
|
Instantiate a retry object.
|
|
@@ -235,7 +240,7 @@ class BaseDatabricksHook(BaseHook):
|
|
|
235
240
|
with attempt:
|
|
236
241
|
resp = requests.post(
|
|
237
242
|
resource,
|
|
238
|
-
auth=HTTPBasicAuth(self.
|
|
243
|
+
auth=HTTPBasicAuth(self._get_connection_attr("login"), self.databricks_conn.password),
|
|
239
244
|
data="grant_type=client_credentials&scope=all-apis",
|
|
240
245
|
headers={
|
|
241
246
|
**self.user_agent_header,
|
|
@@ -271,7 +276,9 @@ class BaseDatabricksHook(BaseHook):
|
|
|
271
276
|
with attempt:
|
|
272
277
|
async with self._session.post(
|
|
273
278
|
resource,
|
|
274
|
-
auth=aiohttp.BasicAuth(
|
|
279
|
+
auth=aiohttp.BasicAuth(
|
|
280
|
+
self._get_connection_attr("login"), self.databricks_conn.password
|
|
281
|
+
),
|
|
275
282
|
data="grant_type=client_credentials&scope=all-apis",
|
|
276
283
|
headers={
|
|
277
284
|
**self.user_agent_header,
|
|
@@ -316,7 +323,7 @@ class BaseDatabricksHook(BaseHook):
|
|
|
316
323
|
token = ManagedIdentityCredential().get_token(f"{resource}/.default")
|
|
317
324
|
else:
|
|
318
325
|
credential = ClientSecretCredential(
|
|
319
|
-
client_id=self.
|
|
326
|
+
client_id=self._get_connection_attr("login"),
|
|
320
327
|
client_secret=self.databricks_conn.password,
|
|
321
328
|
tenant_id=self.databricks_conn.extra_dejson["azure_tenant_id"],
|
|
322
329
|
)
|
|
@@ -364,7 +371,7 @@ class BaseDatabricksHook(BaseHook):
|
|
|
364
371
|
token = await credential.get_token(f"{resource}/.default")
|
|
365
372
|
else:
|
|
366
373
|
async with AsyncClientSecretCredential(
|
|
367
|
-
client_id=self.
|
|
374
|
+
client_id=self._get_connection_attr("login"),
|
|
368
375
|
client_secret=self.databricks_conn.password,
|
|
369
376
|
tenant_id=self.databricks_conn.extra_dejson["azure_tenant_id"],
|
|
370
377
|
) as credential:
|
|
@@ -678,7 +685,7 @@ class BaseDatabricksHook(BaseHook):
|
|
|
678
685
|
auth = _TokenAuth(token)
|
|
679
686
|
else:
|
|
680
687
|
self.log.info("Using basic auth.")
|
|
681
|
-
auth = HTTPBasicAuth(self.
|
|
688
|
+
auth = HTTPBasicAuth(self._get_connection_attr("login"), self.databricks_conn.password)
|
|
682
689
|
|
|
683
690
|
request_func: Any
|
|
684
691
|
if method == "GET":
|
|
@@ -745,7 +752,7 @@ class BaseDatabricksHook(BaseHook):
|
|
|
745
752
|
auth = BearerAuth(token)
|
|
746
753
|
else:
|
|
747
754
|
self.log.info("Using basic auth.")
|
|
748
|
-
auth = aiohttp.BasicAuth(self.
|
|
755
|
+
auth = aiohttp.BasicAuth(self._get_connection_attr("login"), self.databricks_conn.password)
|
|
749
756
|
|
|
750
757
|
request_func: Any
|
|
751
758
|
if method == "GET":
|
|
@@ -184,13 +184,13 @@ class DatabricksSqlHook(BaseDatabricksHook, DbApiHook):
|
|
|
184
184
|
"catalog": self.catalog,
|
|
185
185
|
"schema": self.schema,
|
|
186
186
|
}
|
|
187
|
-
|
|
187
|
+
url_query_formatted: dict[str, str] = {k: v for k, v in url_query.items() if v is not None}
|
|
188
188
|
return URL.create(
|
|
189
189
|
drivername="databricks",
|
|
190
190
|
username="token",
|
|
191
191
|
password=self._get_token(raise_error=True),
|
|
192
192
|
host=self.host,
|
|
193
|
-
query=
|
|
193
|
+
query=url_query_formatted,
|
|
194
194
|
)
|
|
195
195
|
|
|
196
196
|
def get_uri(self) -> str:
|
|
@@ -278,7 +278,7 @@ class DatabricksSqlHook(BaseDatabricksHook, DbApiHook):
|
|
|
278
278
|
self.log.info("Running statement: %s, parameters: %s", sql_statement, parameters)
|
|
279
279
|
# when using AAD tokens, it could expire if previous query run longer than token lifetime
|
|
280
280
|
conn = self.get_conn()
|
|
281
|
-
with closing(conn.cursor())
|
|
281
|
+
with closing(conn.cursor()):
|
|
282
282
|
self.set_autocommit(conn, autocommit)
|
|
283
283
|
|
|
284
284
|
with closing(conn.cursor()) as cur:
|
|
@@ -17,7 +17,6 @@
|
|
|
17
17
|
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
|
-
import os
|
|
21
20
|
from typing import TYPE_CHECKING, Any
|
|
22
21
|
from urllib.parse import unquote
|
|
23
22
|
|
|
@@ -40,10 +39,6 @@ if TYPE_CHECKING:
|
|
|
40
39
|
from airflow.sdk.types import Logger
|
|
41
40
|
|
|
42
41
|
|
|
43
|
-
REPAIR_WAIT_ATTEMPTS = os.getenv("DATABRICKS_REPAIR_WAIT_ATTEMPTS", 20)
|
|
44
|
-
REPAIR_WAIT_DELAY = os.getenv("DATABRICKS_REPAIR_WAIT_DELAY", 0.5)
|
|
45
|
-
|
|
46
|
-
|
|
47
42
|
def get_databricks_task_ids(
|
|
48
43
|
group_id: str, task_map: dict[str, DatabricksTaskBaseOperator], log: Logger
|
|
49
44
|
) -> list[str]:
|
|
@@ -148,7 +143,7 @@ if not AIRFLOW_V_3_0_PLUS:
|
|
|
148
143
|
if not session:
|
|
149
144
|
raise AirflowException("Session not provided.")
|
|
150
145
|
|
|
151
|
-
return session.query(DagRun).filter(DagRun.dag_id == dag.dag_id, DagRun.run_id == run_id).
|
|
146
|
+
return session.query(DagRun).filter(DagRun.dag_id == dag.dag_id, DagRun.run_id == run_id).one()
|
|
152
147
|
|
|
153
148
|
@provide_session
|
|
154
149
|
def _clear_task_instances(
|
|
@@ -189,9 +189,10 @@ class DatabricksPartitionSensor(BaseSensorOperator):
|
|
|
189
189
|
formatted_opts = ""
|
|
190
190
|
if opts:
|
|
191
191
|
output_list = []
|
|
192
|
-
for
|
|
193
|
-
|
|
194
|
-
|
|
192
|
+
for partition_col_raw, partition_value in opts.items():
|
|
193
|
+
partition_col = (
|
|
194
|
+
self.escaper.escape_item(partition_col_raw) if escape_key else partition_col_raw
|
|
195
|
+
)
|
|
195
196
|
if partition_col in partition_columns:
|
|
196
197
|
if isinstance(partition_value, list):
|
|
197
198
|
output_list.append(f"""{partition_col} in {tuple(partition_value)}""")
|
|
@@ -1,12 +1,13 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-databricks
|
|
3
|
-
Version: 7.
|
|
3
|
+
Version: 7.8.0
|
|
4
4
|
Summary: Provider package apache-airflow-providers-databricks for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,databricks,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
7
7
|
Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
8
8
|
Requires-Python: >=3.10
|
|
9
9
|
Description-Content-Type: text/x-rst
|
|
10
|
+
License-Expression: Apache-2.0
|
|
10
11
|
Classifier: Development Status :: 5 - Production/Stable
|
|
11
12
|
Classifier: Environment :: Console
|
|
12
13
|
Classifier: Environment :: Web Environment
|
|
@@ -14,13 +15,14 @@ Classifier: Intended Audience :: Developers
|
|
|
14
15
|
Classifier: Intended Audience :: System Administrators
|
|
15
16
|
Classifier: Framework :: Apache Airflow
|
|
16
17
|
Classifier: Framework :: Apache Airflow :: Provider
|
|
17
|
-
Classifier: License :: OSI Approved :: Apache Software License
|
|
18
18
|
Classifier: Programming Language :: Python :: 3.10
|
|
19
19
|
Classifier: Programming Language :: Python :: 3.11
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.12
|
|
21
21
|
Classifier: Programming Language :: Python :: 3.13
|
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
|
23
|
-
|
|
23
|
+
License-File: LICENSE
|
|
24
|
+
License-File: NOTICE
|
|
25
|
+
Requires-Dist: apache-airflow>=2.11.0
|
|
24
26
|
Requires-Dist: apache-airflow-providers-common-compat>=1.8.0
|
|
25
27
|
Requires-Dist: apache-airflow-providers-common-sql>=1.27.0
|
|
26
28
|
Requires-Dist: requests>=2.32.0,<3
|
|
@@ -38,8 +40,8 @@ Requires-Dist: apache-airflow-providers-openlineage>=2.3.0 ; extra == "openlinea
|
|
|
38
40
|
Requires-Dist: databricks-sdk==0.10.0 ; extra == "sdk"
|
|
39
41
|
Requires-Dist: apache-airflow-providers-standard ; extra == "standard"
|
|
40
42
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
41
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.
|
|
42
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.
|
|
43
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.0/changelog.html
|
|
44
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.0
|
|
43
45
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
44
46
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
45
47
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
@@ -75,7 +77,7 @@ Provides-Extra: standard
|
|
|
75
77
|
|
|
76
78
|
Package ``apache-airflow-providers-databricks``
|
|
77
79
|
|
|
78
|
-
Release: ``7.
|
|
80
|
+
Release: ``7.8.0``
|
|
79
81
|
|
|
80
82
|
|
|
81
83
|
`Databricks <https://databricks.com/>`__
|
|
@@ -88,7 +90,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
|
|
|
88
90
|
are in ``airflow.providers.databricks`` python package.
|
|
89
91
|
|
|
90
92
|
You can find package information and changelog for the provider
|
|
91
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.
|
|
93
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.0/>`_.
|
|
92
94
|
|
|
93
95
|
Installation
|
|
94
96
|
------------
|
|
@@ -105,7 +107,7 @@ Requirements
|
|
|
105
107
|
========================================== ======================================
|
|
106
108
|
PIP package Version required
|
|
107
109
|
========================================== ======================================
|
|
108
|
-
``apache-airflow`` ``>=2.
|
|
110
|
+
``apache-airflow`` ``>=2.11.0``
|
|
109
111
|
``apache-airflow-providers-common-compat`` ``>=1.8.0``
|
|
110
112
|
``apache-airflow-providers-common-sql`` ``>=1.27.0``
|
|
111
113
|
``requests`` ``>=2.32.0,<3``
|
|
@@ -154,5 +156,5 @@ Extra Dependencies
|
|
|
154
156
|
================== ================================================================
|
|
155
157
|
|
|
156
158
|
The changelog for the provider package can be found in the
|
|
157
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.
|
|
159
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.8.0/changelog.html>`_.
|
|
158
160
|
|
|
@@ -1,22 +1,21 @@
|
|
|
1
|
-
airflow/providers/databricks/
|
|
2
|
-
airflow/providers/databricks/__init__.py,sha256=47IUUh1RLJbiG9at92L1--O2ozh0fap2Swxx9hN7ySs,1499
|
|
1
|
+
airflow/providers/databricks/__init__.py,sha256=jK9hWZ8jptf_y_7T6PywlGiLdB4zq1EsGECyxMGFi8A,1499
|
|
3
2
|
airflow/providers/databricks/exceptions.py,sha256=85RklmLOI_PnTzfXNIUd5fAu2aMMUhelwumQAX0wANE,1261
|
|
4
|
-
airflow/providers/databricks/get_provider_info.py,sha256=
|
|
3
|
+
airflow/providers/databricks/get_provider_info.py,sha256=LfK0AwIARVh4tX5146-J2VRZwfe6GP3xjLyltA7X7iU,5738
|
|
5
4
|
airflow/providers/databricks/version_compat.py,sha256=RQbdCueLOaFZWekpQmF0BoAoJInW8EoyvJ3Ah-HbrPo,1577
|
|
6
5
|
airflow/providers/databricks/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
7
|
-
airflow/providers/databricks/hooks/databricks.py,sha256=
|
|
8
|
-
airflow/providers/databricks/hooks/databricks_base.py,sha256=
|
|
9
|
-
airflow/providers/databricks/hooks/databricks_sql.py,sha256
|
|
6
|
+
airflow/providers/databricks/hooks/databricks.py,sha256=uOBPUUAEc9eHBdvMgNyJzWivIFCt_GQgR4UlRrRxqgM,29754
|
|
7
|
+
airflow/providers/databricks/hooks/databricks_base.py,sha256=m-m2AKqD3-6mEfvuwgo7Era47zGzsjKbpLTRQNjiUS4,36864
|
|
8
|
+
airflow/providers/databricks/hooks/databricks_sql.py,sha256=xougOWuFgQzhBzFcuYkbX-lo0FpKCQztXoBETJEzesg,17755
|
|
10
9
|
airflow/providers/databricks/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
11
10
|
airflow/providers/databricks/operators/databricks.py,sha256=bVhFNTeGboHkmaJkYDYEyF0V1QUOB_RnsvwaCuEtIew,79316
|
|
12
11
|
airflow/providers/databricks/operators/databricks_repos.py,sha256=VRZye45ZMlDxti6ZJjuouox5umiMoeQ-BKugPpE7jnM,13155
|
|
13
12
|
airflow/providers/databricks/operators/databricks_sql.py,sha256=gwpkr660qpk4dUve98RB-hniaMzuXL6znQZZGilJxi0,21842
|
|
14
13
|
airflow/providers/databricks/operators/databricks_workflow.py,sha256=QLsR0pGLWvvQbutsjj4RWwBE-z6tkWiYLHj6waMv8ZE,15019
|
|
15
14
|
airflow/providers/databricks/plugins/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
16
|
-
airflow/providers/databricks/plugins/databricks_workflow.py,sha256=
|
|
15
|
+
airflow/providers/databricks/plugins/databricks_workflow.py,sha256=5vyG2WNM25ptSv5IwAndUTqKAOmTneOWy_pAtqBKcgc,20020
|
|
17
16
|
airflow/providers/databricks/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
18
17
|
airflow/providers/databricks/sensors/databricks.py,sha256=RrjSzncvppdp5U8RYHd975MCIQIb_s1VQoxm9Aqbvac,6262
|
|
19
|
-
airflow/providers/databricks/sensors/databricks_partition.py,sha256=
|
|
18
|
+
airflow/providers/databricks/sensors/databricks_partition.py,sha256=qPDy8oxg-Lo-jnHy1EbxmA5GIjC6t0XnFJ1E3aAmUgg,9940
|
|
20
19
|
airflow/providers/databricks/sensors/databricks_sql.py,sha256=shq7ng4LCiaD4Q7lorm4g1A7aijmq3nVUnCFlYtoI7c,5633
|
|
21
20
|
airflow/providers/databricks/triggers/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
22
21
|
airflow/providers/databricks/triggers/databricks.py,sha256=DQbXLw1W_e3Iw-hsDph7vPuHc2caj623V7WmA2_PftM,8672
|
|
@@ -24,7 +23,9 @@ airflow/providers/databricks/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2
|
|
|
24
23
|
airflow/providers/databricks/utils/databricks.py,sha256=ecvzZbC4KdXds47VeSayot9EO-RQnTRJTEwKITH7waQ,5117
|
|
25
24
|
airflow/providers/databricks/utils/mixins.py,sha256=WUmkt3AmXalmV6zOUIJZWbTldxYunAZOstddDhKCC94,7407
|
|
26
25
|
airflow/providers/databricks/utils/openlineage.py,sha256=1jT5Woh9YifawdP-VFWsabfF-ecuCjPlzD5P_W4DAhI,15078
|
|
27
|
-
apache_airflow_providers_databricks-7.
|
|
28
|
-
apache_airflow_providers_databricks-7.
|
|
29
|
-
apache_airflow_providers_databricks-7.
|
|
30
|
-
apache_airflow_providers_databricks-7.
|
|
26
|
+
apache_airflow_providers_databricks-7.8.0.dist-info/entry_points.txt,sha256=hjmZm3ab2cteTR4t9eE28oKixHwNIKtLCThd6sx3XRQ,227
|
|
27
|
+
apache_airflow_providers_databricks-7.8.0.dist-info/licenses/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
|
28
|
+
apache_airflow_providers_databricks-7.8.0.dist-info/licenses/NOTICE,sha256=E3-_E02gwwSEFzeeWPKmnIjOoos3hW28CLISV6sYrbQ,168
|
|
29
|
+
apache_airflow_providers_databricks-7.8.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
|
30
|
+
apache_airflow_providers_databricks-7.8.0.dist-info/METADATA,sha256=ag4y8wdimVeG_xpU6SGjicRyOsUe66vk2zyUTQ8h9c0,7750
|
|
31
|
+
apache_airflow_providers_databricks-7.8.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|