apache-airflow-providers-databricks 7.3.0rc1__py3-none-any.whl → 7.3.1rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/databricks/__init__.py +1 -1
- airflow/providers/databricks/get_provider_info.py +0 -71
- airflow/providers/databricks/plugins/databricks_workflow.py +4 -0
- {apache_airflow_providers_databricks-7.3.0rc1.dist-info → apache_airflow_providers_databricks-7.3.1rc1.dist-info}/METADATA +6 -6
- {apache_airflow_providers_databricks-7.3.0rc1.dist-info → apache_airflow_providers_databricks-7.3.1rc1.dist-info}/RECORD +7 -7
- {apache_airflow_providers_databricks-7.3.0rc1.dist-info → apache_airflow_providers_databricks-7.3.1rc1.dist-info}/WHEEL +1 -1
- {apache_airflow_providers_databricks-7.3.0rc1.dist-info → apache_airflow_providers_databricks-7.3.1rc1.dist-info}/entry_points.txt +0 -0
|
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "7.3.
|
|
32
|
+
__version__ = "7.3.1"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
35
|
"2.9.0"
|
|
@@ -26,60 +26,6 @@ def get_provider_info():
|
|
|
26
26
|
"package-name": "apache-airflow-providers-databricks",
|
|
27
27
|
"name": "Databricks",
|
|
28
28
|
"description": "`Databricks <https://databricks.com/>`__\n",
|
|
29
|
-
"state": "ready",
|
|
30
|
-
"source-date-epoch": 1743835987,
|
|
31
|
-
"versions": [
|
|
32
|
-
"7.3.0",
|
|
33
|
-
"7.2.2",
|
|
34
|
-
"7.2.1",
|
|
35
|
-
"7.2.0",
|
|
36
|
-
"7.0.0",
|
|
37
|
-
"6.13.0",
|
|
38
|
-
"6.12.0",
|
|
39
|
-
"6.11.0",
|
|
40
|
-
"6.10.0",
|
|
41
|
-
"6.9.0",
|
|
42
|
-
"6.8.0",
|
|
43
|
-
"6.7.0",
|
|
44
|
-
"6.6.0",
|
|
45
|
-
"6.5.0",
|
|
46
|
-
"6.4.0",
|
|
47
|
-
"6.3.0",
|
|
48
|
-
"6.2.0",
|
|
49
|
-
"6.1.0",
|
|
50
|
-
"6.0.0",
|
|
51
|
-
"5.0.1",
|
|
52
|
-
"5.0.0",
|
|
53
|
-
"4.7.0",
|
|
54
|
-
"4.6.0",
|
|
55
|
-
"4.5.0",
|
|
56
|
-
"4.4.0",
|
|
57
|
-
"4.3.3",
|
|
58
|
-
"4.3.2",
|
|
59
|
-
"4.3.1",
|
|
60
|
-
"4.3.0",
|
|
61
|
-
"4.2.0",
|
|
62
|
-
"4.1.0",
|
|
63
|
-
"4.0.1",
|
|
64
|
-
"4.0.0",
|
|
65
|
-
"3.4.0",
|
|
66
|
-
"3.3.0",
|
|
67
|
-
"3.2.0",
|
|
68
|
-
"3.1.0",
|
|
69
|
-
"3.0.0",
|
|
70
|
-
"2.7.0",
|
|
71
|
-
"2.6.0",
|
|
72
|
-
"2.5.0",
|
|
73
|
-
"2.4.0",
|
|
74
|
-
"2.3.0",
|
|
75
|
-
"2.2.0",
|
|
76
|
-
"2.1.0",
|
|
77
|
-
"2.0.2",
|
|
78
|
-
"2.0.1",
|
|
79
|
-
"2.0.0",
|
|
80
|
-
"1.0.1",
|
|
81
|
-
"1.0.0",
|
|
82
|
-
],
|
|
83
29
|
"integrations": [
|
|
84
30
|
{
|
|
85
31
|
"integration-name": "Databricks",
|
|
@@ -179,21 +125,4 @@ def get_provider_info():
|
|
|
179
125
|
}
|
|
180
126
|
],
|
|
181
127
|
"extra-links": ["airflow.providers.databricks.operators.databricks.DatabricksJobRunLink"],
|
|
182
|
-
"dependencies": [
|
|
183
|
-
"apache-airflow>=2.9.0",
|
|
184
|
-
"apache-airflow-providers-common-sql>=1.20.0",
|
|
185
|
-
"requests>=2.31.0,<3",
|
|
186
|
-
"databricks-sql-connector>=3.0.0",
|
|
187
|
-
"aiohttp>=3.9.2, <4",
|
|
188
|
-
"mergedeep>=1.3.4",
|
|
189
|
-
"pandas>=2.1.2,<2.2",
|
|
190
|
-
"pyarrow>=14.0.1",
|
|
191
|
-
],
|
|
192
|
-
"optional-dependencies": {
|
|
193
|
-
"sdk": ["databricks-sdk==0.10.0"],
|
|
194
|
-
"azure-identity": ["azure-identity>=1.3.1"],
|
|
195
|
-
"fab": ["apache-airflow-providers-fab"],
|
|
196
|
-
"standard": ["apache-airflow-providers-standard"],
|
|
197
|
-
},
|
|
198
|
-
"devel-dependencies": ["deltalake>=0.12.0"],
|
|
199
128
|
}
|
|
@@ -149,12 +149,16 @@ def _repair_task(
|
|
|
149
149
|
databricks_run_id,
|
|
150
150
|
)
|
|
151
151
|
|
|
152
|
+
run_data = hook.get_run(databricks_run_id)
|
|
152
153
|
repair_json = {
|
|
153
154
|
"run_id": databricks_run_id,
|
|
154
155
|
"latest_repair_id": repair_history_id,
|
|
155
156
|
"rerun_tasks": tasks_to_repair,
|
|
156
157
|
}
|
|
157
158
|
|
|
159
|
+
if "overriding_parameters" in run_data:
|
|
160
|
+
repair_json["overriding_parameters"] = run_data["overriding_parameters"]
|
|
161
|
+
|
|
158
162
|
return hook.repair_run(repair_json)
|
|
159
163
|
|
|
160
164
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-databricks
|
|
3
|
-
Version: 7.3.
|
|
3
|
+
Version: 7.3.1rc1
|
|
4
4
|
Summary: Provider package apache-airflow-providers-databricks for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,databricks,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -33,8 +33,8 @@ Requires-Dist: apache-airflow-providers-fab ; extra == "fab"
|
|
|
33
33
|
Requires-Dist: databricks-sdk==0.10.0 ; extra == "sdk"
|
|
34
34
|
Requires-Dist: apache-airflow-providers-standard ; extra == "standard"
|
|
35
35
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
36
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.3.
|
|
37
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.3.
|
|
36
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.3.1/changelog.html
|
|
37
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.3.1
|
|
38
38
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
39
39
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
40
40
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
@@ -69,7 +69,7 @@ Provides-Extra: standard
|
|
|
69
69
|
|
|
70
70
|
Package ``apache-airflow-providers-databricks``
|
|
71
71
|
|
|
72
|
-
Release: ``7.3.
|
|
72
|
+
Release: ``7.3.1``
|
|
73
73
|
|
|
74
74
|
|
|
75
75
|
`Databricks <https://databricks.com/>`__
|
|
@@ -82,7 +82,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
|
|
|
82
82
|
are in ``airflow.providers.databricks`` python package.
|
|
83
83
|
|
|
84
84
|
You can find package information and changelog for the provider
|
|
85
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.3.
|
|
85
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.3.1/>`_.
|
|
86
86
|
|
|
87
87
|
Installation
|
|
88
88
|
------------
|
|
@@ -130,5 +130,5 @@ Dependent package
|
|
|
130
130
|
============================================================================================================ ==============
|
|
131
131
|
|
|
132
132
|
The changelog for the provider package can be found in the
|
|
133
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.3.
|
|
133
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.3.1/changelog.html>`_.
|
|
134
134
|
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
airflow/providers/databricks/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
|
2
|
-
airflow/providers/databricks/__init__.py,sha256=
|
|
2
|
+
airflow/providers/databricks/__init__.py,sha256=Qoxr8CR24lB4UV9kA2AVdf_THgkF4PbAkme6xE9yzFc,1497
|
|
3
3
|
airflow/providers/databricks/exceptions.py,sha256=85RklmLOI_PnTzfXNIUd5fAu2aMMUhelwumQAX0wANE,1261
|
|
4
|
-
airflow/providers/databricks/get_provider_info.py,sha256=
|
|
4
|
+
airflow/providers/databricks/get_provider_info.py,sha256=qNMX4Lft-NItPhFewFBSCi8n0_ISid_MQeETKQ67vdo,5573
|
|
5
5
|
airflow/providers/databricks/version_compat.py,sha256=aHg90_DtgoSnQvILFICexMyNlHlALBdaeWqkX3dFDug,1605
|
|
6
6
|
airflow/providers/databricks/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
7
7
|
airflow/providers/databricks/hooks/databricks.py,sha256=MSVcURTF7g_B_a3Oi9katAim5ILqlm7MHANGvfRC5yY,28921
|
|
@@ -13,7 +13,7 @@ airflow/providers/databricks/operators/databricks_repos.py,sha256=m_72OnnU9df7UB
|
|
|
13
13
|
airflow/providers/databricks/operators/databricks_sql.py,sha256=thBHpt9_LMLJZ0PN-eLCI3AaT8IFq3NAHLDWDFP-Jiw,17031
|
|
14
14
|
airflow/providers/databricks/operators/databricks_workflow.py,sha256=0vFu4w6O4tlStZ_Jhk1iswKFcTk-g_dthGFeDpXGZlw,14146
|
|
15
15
|
airflow/providers/databricks/plugins/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
16
|
-
airflow/providers/databricks/plugins/databricks_workflow.py,sha256=
|
|
16
|
+
airflow/providers/databricks/plugins/databricks_workflow.py,sha256=1UpsodBLRrTah9zBGBzfM7n1pdkzTo7yilt6QxASspQ,17460
|
|
17
17
|
airflow/providers/databricks/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
18
18
|
airflow/providers/databricks/sensors/databricks_partition.py,sha256=hS6Q2fb84_vASb7Ai50-pmjVukX6G8xIwdaZVIE17oM,10045
|
|
19
19
|
airflow/providers/databricks/sensors/databricks_sql.py,sha256=jIA9oGBUCAlXzyrqigxlg7JQDsBFuNIF8ZUEJM8gPxg,5766
|
|
@@ -21,7 +21,7 @@ airflow/providers/databricks/triggers/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvR
|
|
|
21
21
|
airflow/providers/databricks/triggers/databricks.py,sha256=dSogx6GlcJfZ4CFhtlMeWs9sYFEYthP82S_U8-tM2Tk,9240
|
|
22
22
|
airflow/providers/databricks/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
23
23
|
airflow/providers/databricks/utils/databricks.py,sha256=9LLgqYAS68s_PTnIez1HfN8xCKPK9D_Dt5SDF4wlbzQ,2890
|
|
24
|
-
apache_airflow_providers_databricks-7.3.
|
|
25
|
-
apache_airflow_providers_databricks-7.3.
|
|
26
|
-
apache_airflow_providers_databricks-7.3.
|
|
27
|
-
apache_airflow_providers_databricks-7.3.
|
|
24
|
+
apache_airflow_providers_databricks-7.3.1rc1.dist-info/entry_points.txt,sha256=hjmZm3ab2cteTR4t9eE28oKixHwNIKtLCThd6sx3XRQ,227
|
|
25
|
+
apache_airflow_providers_databricks-7.3.1rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
|
26
|
+
apache_airflow_providers_databricks-7.3.1rc1.dist-info/METADATA,sha256=QYashdryPdIp55gBuqmRkaP5xhS0VV546qVU-HkngFk,6088
|
|
27
|
+
apache_airflow_providers_databricks-7.3.1rc1.dist-info/RECORD,,
|