apache-airflow-providers-databricks 6.1.0rc2__tar.gz → 6.2.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-databricks might be problematic. Click here for more details.

Files changed (21) hide show
  1. {apache_airflow_providers_databricks-6.1.0rc2 → apache_airflow_providers_databricks-6.2.0}/PKG-INFO +12 -12
  2. {apache_airflow_providers_databricks-6.1.0rc2 → apache_airflow_providers_databricks-6.2.0}/README.rst +5 -5
  3. {apache_airflow_providers_databricks-6.1.0rc2 → apache_airflow_providers_databricks-6.2.0}/airflow/providers/databricks/__init__.py +1 -1
  4. {apache_airflow_providers_databricks-6.1.0rc2 → apache_airflow_providers_databricks-6.2.0}/airflow/providers/databricks/get_provider_info.py +4 -3
  5. {apache_airflow_providers_databricks-6.1.0rc2 → apache_airflow_providers_databricks-6.2.0}/airflow/providers/databricks/operators/databricks.py +18 -15
  6. {apache_airflow_providers_databricks-6.1.0rc2 → apache_airflow_providers_databricks-6.2.0}/airflow/providers/databricks/operators/databricks_sql.py +3 -3
  7. {apache_airflow_providers_databricks-6.1.0rc2 → apache_airflow_providers_databricks-6.2.0}/pyproject.toml +7 -7
  8. {apache_airflow_providers_databricks-6.1.0rc2 → apache_airflow_providers_databricks-6.2.0}/airflow/providers/databricks/LICENSE +0 -0
  9. {apache_airflow_providers_databricks-6.1.0rc2 → apache_airflow_providers_databricks-6.2.0}/airflow/providers/databricks/hooks/__init__.py +0 -0
  10. {apache_airflow_providers_databricks-6.1.0rc2 → apache_airflow_providers_databricks-6.2.0}/airflow/providers/databricks/hooks/databricks.py +0 -0
  11. {apache_airflow_providers_databricks-6.1.0rc2 → apache_airflow_providers_databricks-6.2.0}/airflow/providers/databricks/hooks/databricks_base.py +0 -0
  12. {apache_airflow_providers_databricks-6.1.0rc2 → apache_airflow_providers_databricks-6.2.0}/airflow/providers/databricks/hooks/databricks_sql.py +0 -0
  13. {apache_airflow_providers_databricks-6.1.0rc2 → apache_airflow_providers_databricks-6.2.0}/airflow/providers/databricks/operators/__init__.py +0 -0
  14. {apache_airflow_providers_databricks-6.1.0rc2 → apache_airflow_providers_databricks-6.2.0}/airflow/providers/databricks/operators/databricks_repos.py +0 -0
  15. {apache_airflow_providers_databricks-6.1.0rc2 → apache_airflow_providers_databricks-6.2.0}/airflow/providers/databricks/sensors/__init__.py +0 -0
  16. {apache_airflow_providers_databricks-6.1.0rc2 → apache_airflow_providers_databricks-6.2.0}/airflow/providers/databricks/sensors/databricks_partition.py +0 -0
  17. {apache_airflow_providers_databricks-6.1.0rc2 → apache_airflow_providers_databricks-6.2.0}/airflow/providers/databricks/sensors/databricks_sql.py +0 -0
  18. {apache_airflow_providers_databricks-6.1.0rc2 → apache_airflow_providers_databricks-6.2.0}/airflow/providers/databricks/triggers/__init__.py +0 -0
  19. {apache_airflow_providers_databricks-6.1.0rc2 → apache_airflow_providers_databricks-6.2.0}/airflow/providers/databricks/triggers/databricks.py +0 -0
  20. {apache_airflow_providers_databricks-6.1.0rc2 → apache_airflow_providers_databricks-6.2.0}/airflow/providers/databricks/utils/__init__.py +0 -0
  21. {apache_airflow_providers_databricks-6.1.0rc2 → apache_airflow_providers_databricks-6.2.0}/airflow/providers/databricks/utils/databricks.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-databricks
3
- Version: 6.1.0rc2
3
+ Version: 6.2.0
4
4
  Summary: Provider package apache-airflow-providers-databricks for Apache Airflow
5
5
  Keywords: airflow-provider,databricks,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,16 +20,16 @@ Classifier: Programming Language :: Python :: 3.9
20
20
  Classifier: Programming Language :: Python :: 3.10
21
21
  Classifier: Programming Language :: Python :: 3.11
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: aiohttp>=3.6.3, <4
24
- Requires-Dist: apache-airflow-providers-common-sql>=1.10.0.dev0
25
- Requires-Dist: apache-airflow>=2.6.0.dev0
23
+ Requires-Dist: aiohttp>=3.9.2, <4
24
+ Requires-Dist: apache-airflow-providers-common-sql>=1.10.0
25
+ Requires-Dist: apache-airflow>=2.6.0
26
26
  Requires-Dist: databricks-sql-connector>=2.0.0, <3.0.0, !=2.9.0
27
- Requires-Dist: requests>=2.27,<3
27
+ Requires-Dist: requests>=2.27.0,<3
28
28
  Requires-Dist: apache-airflow-providers-common-sql ; extra == "common.sql"
29
29
  Requires-Dist: databricks-sdk==0.10.0 ; extra == "sdk"
30
30
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
31
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/6.1.0/changelog.html
32
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/6.1.0
31
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/6.2.0/changelog.html
32
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/6.2.0
33
33
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
34
34
  Project-URL: Source Code, https://github.com/apache/airflow
35
35
  Project-URL: Twitter, https://twitter.com/ApacheAirflow
@@ -81,7 +81,7 @@ Provides-Extra: sdk
81
81
 
82
82
  Package ``apache-airflow-providers-databricks``
83
83
 
84
- Release: ``6.1.0.rc2``
84
+ Release: ``6.2.0``
85
85
 
86
86
 
87
87
  `Databricks <https://databricks.com/>`__
@@ -94,7 +94,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
94
94
  are in ``airflow.providers.databricks`` python package.
95
95
 
96
96
  You can find package information and changelog for the provider
97
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/6.1.0/>`_.
97
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/6.2.0/>`_.
98
98
 
99
99
  Installation
100
100
  ------------
@@ -113,9 +113,9 @@ PIP package Version required
113
113
  ======================================= ==========================
114
114
  ``apache-airflow`` ``>=2.6.0``
115
115
  ``apache-airflow-providers-common-sql`` ``>=1.10.0``
116
- ``requests`` ``>=2.27,<3``
116
+ ``requests`` ``>=2.27.0,<3``
117
117
  ``databricks-sql-connector`` ``>=2.0.0,!=2.9.0,<3.0.0``
118
- ``aiohttp`` ``>=3.6.3,<4``
118
+ ``aiohttp`` ``>=3.9.2,<4``
119
119
  ======================================= ==========================
120
120
 
121
121
  Cross provider package dependencies
@@ -138,4 +138,4 @@ Dependent package
138
138
  ============================================================================================================ ==============
139
139
 
140
140
  The changelog for the provider package can be found in the
141
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/6.1.0/changelog.html>`_.
141
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/6.2.0/changelog.html>`_.
@@ -42,7 +42,7 @@
42
42
 
43
43
  Package ``apache-airflow-providers-databricks``
44
44
 
45
- Release: ``6.1.0.rc2``
45
+ Release: ``6.2.0``
46
46
 
47
47
 
48
48
  `Databricks <https://databricks.com/>`__
@@ -55,7 +55,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
55
55
  are in ``airflow.providers.databricks`` python package.
56
56
 
57
57
  You can find package information and changelog for the provider
58
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/6.1.0/>`_.
58
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/6.2.0/>`_.
59
59
 
60
60
  Installation
61
61
  ------------
@@ -74,9 +74,9 @@ PIP package Version required
74
74
  ======================================= ==========================
75
75
  ``apache-airflow`` ``>=2.6.0``
76
76
  ``apache-airflow-providers-common-sql`` ``>=1.10.0``
77
- ``requests`` ``>=2.27,<3``
77
+ ``requests`` ``>=2.27.0,<3``
78
78
  ``databricks-sql-connector`` ``>=2.0.0,!=2.9.0,<3.0.0``
79
- ``aiohttp`` ``>=3.6.3,<4``
79
+ ``aiohttp`` ``>=3.9.2,<4``
80
80
  ======================================= ==========================
81
81
 
82
82
  Cross provider package dependencies
@@ -99,4 +99,4 @@ Dependent package
99
99
  ============================================================================================================ ==============
100
100
 
101
101
  The changelog for the provider package can be found in the
102
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/6.1.0/changelog.html>`_.
102
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/6.2.0/changelog.html>`_.
@@ -27,7 +27,7 @@ import packaging.version
27
27
 
28
28
  __all__ = ["__version__"]
29
29
 
30
- __version__ = "6.1.0"
30
+ __version__ = "6.2.0"
31
31
 
32
32
  try:
33
33
  from airflow import __version__ as airflow_version
@@ -28,8 +28,9 @@ def get_provider_info():
28
28
  "name": "Databricks",
29
29
  "description": "`Databricks <https://databricks.com/>`__\n",
30
30
  "state": "ready",
31
- "source-date-epoch": 1705912006,
31
+ "source-date-epoch": 1707636345,
32
32
  "versions": [
33
+ "6.2.0",
33
34
  "6.1.0",
34
35
  "6.0.0",
35
36
  "5.0.1",
@@ -67,9 +68,9 @@ def get_provider_info():
67
68
  "dependencies": [
68
69
  "apache-airflow>=2.6.0",
69
70
  "apache-airflow-providers-common-sql>=1.10.0",
70
- "requests>=2.27,<3",
71
+ "requests>=2.27.0,<3",
71
72
  "databricks-sql-connector>=2.0.0, <3.0.0, !=2.9.0",
72
- "aiohttp>=3.6.3, <4",
73
+ "aiohttp>=3.9.2, <4",
73
74
  ],
74
75
  "additional-extras": [
75
76
  {
@@ -19,11 +19,12 @@
19
19
  from __future__ import annotations
20
20
 
21
21
  import time
22
- import warnings
23
22
  from functools import cached_property
24
23
  from logging import Logger
25
24
  from typing import TYPE_CHECKING, Any, Sequence
26
25
 
26
+ from deprecated import deprecated
27
+
27
28
  from airflow.configuration import conf
28
29
  from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
29
30
  from airflow.models import BaseOperator, BaseOperatorLink, XCom
@@ -562,17 +563,18 @@ class DatabricksSubmitRunOperator(BaseOperator):
562
563
  _handle_deferrable_databricks_operator_completion(event, self.log)
563
564
 
564
565
 
566
+ @deprecated(
567
+ reason=(
568
+ "`DatabricksSubmitRunDeferrableOperator` has been deprecated. "
569
+ "Please use `airflow.providers.databricks.operators.DatabricksSubmitRunOperator` "
570
+ "with `deferrable=True` instead."
571
+ ),
572
+ category=AirflowProviderDeprecationWarning,
573
+ )
565
574
  class DatabricksSubmitRunDeferrableOperator(DatabricksSubmitRunOperator):
566
575
  """Deferrable version of ``DatabricksSubmitRunOperator``."""
567
576
 
568
577
  def __init__(self, *args, **kwargs):
569
- warnings.warn(
570
- "`DatabricksSubmitRunDeferrableOperator` has been deprecated. "
571
- "Please use `airflow.providers.databricks.operators.DatabricksSubmitRunOperator` with "
572
- "`deferrable=True` instead.",
573
- AirflowProviderDeprecationWarning,
574
- stacklevel=2,
575
- )
576
578
  super().__init__(deferrable=True, *args, **kwargs)
577
579
 
578
580
  def execute(self, context):
@@ -842,15 +844,16 @@ class DatabricksRunNowOperator(BaseOperator):
842
844
  self.log.error("Error: Task: %s with invalid run_id was requested to be cancelled.", self.task_id)
843
845
 
844
846
 
847
+ @deprecated(
848
+ reason=(
849
+ "`DatabricksRunNowDeferrableOperator` has been deprecated. "
850
+ "Please use `airflow.providers.databricks.operators.DatabricksRunNowOperator` "
851
+ "with `deferrable=True` instead."
852
+ ),
853
+ category=AirflowProviderDeprecationWarning,
854
+ )
845
855
  class DatabricksRunNowDeferrableOperator(DatabricksRunNowOperator):
846
856
  """Deferrable version of ``DatabricksRunNowOperator``."""
847
857
 
848
858
  def __init__(self, *args, **kwargs):
849
- warnings.warn(
850
- "`DatabricksRunNowDeferrableOperator` has been deprecated. "
851
- "Please use `airflow.providers.databricks.operators.DatabricksRunNowOperator` with "
852
- "`deferrable=True` instead.",
853
- AirflowProviderDeprecationWarning,
854
- stacklevel=2,
855
- )
856
859
  super().__init__(deferrable=True, *args, **kwargs)
@@ -146,14 +146,14 @@ class DatabricksSqlOperator(SQLExecuteQueryOperator):
146
146
  if write_header:
147
147
  writer.writeheader()
148
148
  for row in last_results:
149
- writer.writerow(row.asDict())
149
+ writer.writerow(row._asdict())
150
150
  elif self._output_format.lower() == "json":
151
151
  with open(self._output_path, "w") as file:
152
- file.write(json.dumps([row.asDict() for row in last_results]))
152
+ file.write(json.dumps([row._asdict() for row in last_results]))
153
153
  elif self._output_format.lower() == "jsonl":
154
154
  with open(self._output_path, "w") as file:
155
155
  for row in last_results:
156
- file.write(json.dumps(row.asDict()))
156
+ file.write(json.dumps(row._asdict()))
157
157
  file.write("\n")
158
158
  else:
159
159
  raise AirflowException(f"Unsupported output format: '{self._output_format}'")
@@ -28,7 +28,7 @@ build-backend = "flit_core.buildapi"
28
28
 
29
29
  [project]
30
30
  name = "apache-airflow-providers-databricks"
31
- version = "6.1.0.rc2"
31
+ version = "6.2.0"
32
32
  description = "Provider package apache-airflow-providers-databricks for Apache Airflow"
33
33
  readme = "README.rst"
34
34
  authors = [
@@ -55,16 +55,16 @@ classifiers = [
55
55
  ]
56
56
  requires-python = "~=3.8"
57
57
  dependencies = [
58
- "aiohttp>=3.6.3, <4",
59
- "apache-airflow-providers-common-sql>=1.10.0.dev0",
60
- "apache-airflow>=2.6.0.dev0",
58
+ "aiohttp>=3.9.2, <4",
59
+ "apache-airflow-providers-common-sql>=1.10.0",
60
+ "apache-airflow>=2.6.0",
61
61
  "databricks-sql-connector>=2.0.0, <3.0.0, !=2.9.0",
62
- "requests>=2.27,<3",
62
+ "requests>=2.27.0,<3",
63
63
  ]
64
64
 
65
65
  [project.urls]
66
- "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-databricks/6.1.0"
67
- "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-databricks/6.1.0/changelog.html"
66
+ "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-databricks/6.2.0"
67
+ "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-databricks/6.2.0/changelog.html"
68
68
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
69
69
  "Source Code" = "https://github.com/apache/airflow"
70
70
  "Slack Chat" = "https://s.apache.org/airflow-slack"