apache-airflow-providers-databricks 7.2.1rc1__tar.gz → 7.2.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-databricks might be problematic. Click here for more details.

Files changed (26) hide show
  1. {apache_airflow_providers_databricks-7.2.1rc1 → apache_airflow_providers_databricks-7.2.2}/PKG-INFO +10 -10
  2. {apache_airflow_providers_databricks-7.2.1rc1 → apache_airflow_providers_databricks-7.2.2}/README.rst +4 -4
  3. {apache_airflow_providers_databricks-7.2.1rc1 → apache_airflow_providers_databricks-7.2.2}/pyproject.toml +7 -7
  4. {apache_airflow_providers_databricks-7.2.1rc1 → apache_airflow_providers_databricks-7.2.2}/src/airflow/providers/databricks/__init__.py +1 -1
  5. {apache_airflow_providers_databricks-7.2.1rc1 → apache_airflow_providers_databricks-7.2.2}/src/airflow/providers/databricks/get_provider_info.py +2 -1
  6. {apache_airflow_providers_databricks-7.2.1rc1 → apache_airflow_providers_databricks-7.2.2}/src/airflow/providers/databricks/operators/databricks.py +3 -1
  7. {apache_airflow_providers_databricks-7.2.1rc1 → apache_airflow_providers_databricks-7.2.2}/src/airflow/providers/databricks/plugins/databricks_workflow.py +2 -1
  8. {apache_airflow_providers_databricks-7.2.1rc1 → apache_airflow_providers_databricks-7.2.2}/src/airflow/providers/databricks/LICENSE +0 -0
  9. {apache_airflow_providers_databricks-7.2.1rc1 → apache_airflow_providers_databricks-7.2.2}/src/airflow/providers/databricks/exceptions.py +0 -0
  10. {apache_airflow_providers_databricks-7.2.1rc1 → apache_airflow_providers_databricks-7.2.2}/src/airflow/providers/databricks/hooks/__init__.py +0 -0
  11. {apache_airflow_providers_databricks-7.2.1rc1 → apache_airflow_providers_databricks-7.2.2}/src/airflow/providers/databricks/hooks/databricks.py +0 -0
  12. {apache_airflow_providers_databricks-7.2.1rc1 → apache_airflow_providers_databricks-7.2.2}/src/airflow/providers/databricks/hooks/databricks_base.py +0 -0
  13. {apache_airflow_providers_databricks-7.2.1rc1 → apache_airflow_providers_databricks-7.2.2}/src/airflow/providers/databricks/hooks/databricks_sql.py +0 -0
  14. {apache_airflow_providers_databricks-7.2.1rc1 → apache_airflow_providers_databricks-7.2.2}/src/airflow/providers/databricks/operators/__init__.py +0 -0
  15. {apache_airflow_providers_databricks-7.2.1rc1 → apache_airflow_providers_databricks-7.2.2}/src/airflow/providers/databricks/operators/databricks_repos.py +0 -0
  16. {apache_airflow_providers_databricks-7.2.1rc1 → apache_airflow_providers_databricks-7.2.2}/src/airflow/providers/databricks/operators/databricks_sql.py +0 -0
  17. {apache_airflow_providers_databricks-7.2.1rc1 → apache_airflow_providers_databricks-7.2.2}/src/airflow/providers/databricks/operators/databricks_workflow.py +0 -0
  18. {apache_airflow_providers_databricks-7.2.1rc1 → apache_airflow_providers_databricks-7.2.2}/src/airflow/providers/databricks/plugins/__init__.py +0 -0
  19. {apache_airflow_providers_databricks-7.2.1rc1 → apache_airflow_providers_databricks-7.2.2}/src/airflow/providers/databricks/sensors/__init__.py +0 -0
  20. {apache_airflow_providers_databricks-7.2.1rc1 → apache_airflow_providers_databricks-7.2.2}/src/airflow/providers/databricks/sensors/databricks_partition.py +0 -0
  21. {apache_airflow_providers_databricks-7.2.1rc1 → apache_airflow_providers_databricks-7.2.2}/src/airflow/providers/databricks/sensors/databricks_sql.py +0 -0
  22. {apache_airflow_providers_databricks-7.2.1rc1 → apache_airflow_providers_databricks-7.2.2}/src/airflow/providers/databricks/triggers/__init__.py +0 -0
  23. {apache_airflow_providers_databricks-7.2.1rc1 → apache_airflow_providers_databricks-7.2.2}/src/airflow/providers/databricks/triggers/databricks.py +0 -0
  24. {apache_airflow_providers_databricks-7.2.1rc1 → apache_airflow_providers_databricks-7.2.2}/src/airflow/providers/databricks/utils/__init__.py +0 -0
  25. {apache_airflow_providers_databricks-7.2.1rc1 → apache_airflow_providers_databricks-7.2.2}/src/airflow/providers/databricks/utils/databricks.py +0 -0
  26. {apache_airflow_providers_databricks-7.2.1rc1 → apache_airflow_providers_databricks-7.2.2}/src/airflow/providers/databricks/version_compat.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-databricks
3
- Version: 7.2.1rc1
3
+ Version: 7.2.2
4
4
  Summary: Provider package apache-airflow-providers-databricks for Apache Airflow
5
5
  Keywords: airflow-provider,databricks,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,8 +20,8 @@ Classifier: Programming Language :: Python :: 3.10
20
20
  Classifier: Programming Language :: Python :: 3.11
21
21
  Classifier: Programming Language :: Python :: 3.12
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.9.0rc0
24
- Requires-Dist: apache-airflow-providers-common-sql>=1.20.0rc0
23
+ Requires-Dist: apache-airflow>=2.9.0
24
+ Requires-Dist: apache-airflow-providers-common-sql>=1.20.0
25
25
  Requires-Dist: requests>=2.27.0,<3
26
26
  Requires-Dist: databricks-sql-connector>=3.0.0
27
27
  Requires-Dist: aiohttp>=3.9.2, <4
@@ -33,11 +33,11 @@ Requires-Dist: apache-airflow-providers-fab ; extra == "fab"
33
33
  Requires-Dist: databricks-sdk==0.10.0 ; extra == "sdk"
34
34
  Requires-Dist: apache-airflow-providers-standard ; extra == "standard"
35
35
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
36
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.1/changelog.html
37
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.1
36
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.2/changelog.html
37
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.2
38
+ Project-URL: Mastodon, https://fosstodon.org/@airflow
38
39
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
39
40
  Project-URL: Source Code, https://github.com/apache/airflow
40
- Project-URL: Twitter, https://x.com/ApacheAirflow
41
41
  Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
42
42
  Provides-Extra: azure-identity
43
43
  Provides-Extra: fab
@@ -69,7 +69,7 @@ Provides-Extra: standard
69
69
 
70
70
  Package ``apache-airflow-providers-databricks``
71
71
 
72
- Release: ``7.2.1``
72
+ Release: ``7.2.2``
73
73
 
74
74
 
75
75
  `Databricks <https://databricks.com/>`__
@@ -82,7 +82,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
82
82
  are in ``airflow.providers.databricks`` python package.
83
83
 
84
84
  You can find package information and changelog for the provider
85
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.1/>`_.
85
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.2/>`_.
86
86
 
87
87
  Installation
88
88
  ------------
@@ -113,7 +113,7 @@ Cross provider package dependencies
113
113
  -----------------------------------
114
114
 
115
115
  Those are dependencies that might be needed in order to use all the features of the package.
116
- You need to install the specified provider packages in order to use them.
116
+ You need to install the specified providers in order to use them.
117
117
 
118
118
  You can install such cross-provider dependencies when installing from PyPI. For example:
119
119
 
@@ -130,5 +130,5 @@ Dependent package
130
130
  ============================================================================================================ ==============
131
131
 
132
132
  The changelog for the provider package can be found in the
133
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.1/changelog.html>`_.
133
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.2/changelog.html>`_.
134
134
 
@@ -23,7 +23,7 @@
23
23
 
24
24
  Package ``apache-airflow-providers-databricks``
25
25
 
26
- Release: ``7.2.1``
26
+ Release: ``7.2.2``
27
27
 
28
28
 
29
29
  `Databricks <https://databricks.com/>`__
@@ -36,7 +36,7 @@ This is a provider package for ``databricks`` provider. All classes for this pro
36
36
  are in ``airflow.providers.databricks`` python package.
37
37
 
38
38
  You can find package information and changelog for the provider
39
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.1/>`_.
39
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.2/>`_.
40
40
 
41
41
  Installation
42
42
  ------------
@@ -67,7 +67,7 @@ Cross provider package dependencies
67
67
  -----------------------------------
68
68
 
69
69
  Those are dependencies that might be needed in order to use all the features of the package.
70
- You need to install the specified provider packages in order to use them.
70
+ You need to install the specified providers in order to use them.
71
71
 
72
72
  You can install such cross-provider dependencies when installing from PyPI. For example:
73
73
 
@@ -84,4 +84,4 @@ Dependent package
84
84
  ============================================================================================================ ==============
85
85
 
86
86
  The changelog for the provider package can be found in the
87
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.1/changelog.html>`_.
87
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.2/changelog.html>`_.
@@ -20,12 +20,12 @@
20
20
  # IF YOU WANT TO MODIFY THIS FILE EXCEPT DEPENDENCIES, YOU SHOULD MODIFY THE TEMPLATE
21
21
  # `pyproject_TEMPLATE.toml.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
22
22
  [build-system]
23
- requires = ["flit_core==3.11.0"]
23
+ requires = ["flit_core==3.12.0"]
24
24
  build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-databricks"
28
- version = "7.2.1.rc1"
28
+ version = "7.2.2"
29
29
  description = "Provider package apache-airflow-providers-databricks for Apache Airflow"
30
30
  readme = "README.rst"
31
31
  authors = [
@@ -57,8 +57,8 @@ requires-python = "~=3.9"
57
57
  # Make sure to run ``breeze static-checks --type update-providers-dependencies --all-files``
58
58
  # After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
59
59
  dependencies = [
60
- "apache-airflow>=2.9.0rc0",
61
- "apache-airflow-providers-common-sql>=1.20.0rc0",
60
+ "apache-airflow>=2.9.0",
61
+ "apache-airflow-providers-common-sql>=1.20.0",
62
62
  "requests>=2.27.0,<3",
63
63
  "databricks-sql-connector>=3.0.0",
64
64
  "aiohttp>=3.9.2, <4",
@@ -106,12 +106,12 @@ apache-airflow-providers-fab = {workspace = true}
106
106
  apache-airflow-providers-standard = {workspace = true}
107
107
 
108
108
  [project.urls]
109
- "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.1"
110
- "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.1/changelog.html"
109
+ "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.2"
110
+ "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.2.2/changelog.html"
111
111
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
112
112
  "Source Code" = "https://github.com/apache/airflow"
113
113
  "Slack Chat" = "https://s.apache.org/airflow-slack"
114
- "Twitter" = "https://x.com/ApacheAirflow"
114
+ "Mastodon" = "https://fosstodon.org/@airflow"
115
115
  "YouTube" = "https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/"
116
116
 
117
117
  [project.entry-points."apache_airflow_provider"]
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "7.2.1"
32
+ __version__ = "7.2.2"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.9.0"
@@ -27,8 +27,9 @@ def get_provider_info():
27
27
  "name": "Databricks",
28
28
  "description": "`Databricks <https://databricks.com/>`__\n",
29
29
  "state": "ready",
30
- "source-date-epoch": 1741508706,
30
+ "source-date-epoch": 1742979996,
31
31
  "versions": [
32
+ "7.2.2",
32
33
  "7.2.1",
33
34
  "7.2.0",
34
35
  "7.0.0",
@@ -29,7 +29,7 @@ from typing import TYPE_CHECKING, Any
29
29
 
30
30
  from airflow.configuration import conf
31
31
  from airflow.exceptions import AirflowException
32
- from airflow.models import BaseOperator, XCom
32
+ from airflow.models import BaseOperator
33
33
  from airflow.providers.databricks.hooks.databricks import DatabricksHook, RunLifeCycleState, RunState
34
34
  from airflow.providers.databricks.operators.databricks_workflow import (
35
35
  DatabricksWorkflowTaskGroup,
@@ -50,7 +50,9 @@ if TYPE_CHECKING:
50
50
 
51
51
  if AIRFLOW_V_3_0_PLUS:
52
52
  from airflow.sdk import BaseOperatorLink
53
+ from airflow.sdk.execution_time.xcom import XCom
53
54
  else:
55
+ from airflow.models import XCom # type: ignore[no-redef]
54
56
  from airflow.models.baseoperatorlink import BaseOperatorLink # type: ignore[no-redef]
55
57
 
56
58
  DEFER_METHOD_NAME = "execute_complete"
@@ -31,7 +31,6 @@ from airflow.models import DagBag
31
31
  from airflow.models.dag import DAG, clear_task_instances
32
32
  from airflow.models.dagrun import DagRun
33
33
  from airflow.models.taskinstance import TaskInstance, TaskInstanceKey
34
- from airflow.models.xcom import XCom
35
34
  from airflow.plugins_manager import AirflowPlugin
36
35
  from airflow.providers.databricks.hooks.databricks import DatabricksHook
37
36
  from airflow.providers.databricks.version_compat import AIRFLOW_V_3_0_PLUS
@@ -53,7 +52,9 @@ if TYPE_CHECKING:
53
52
 
54
53
  if AIRFLOW_V_3_0_PLUS:
55
54
  from airflow.sdk import BaseOperatorLink
55
+ from airflow.sdk.execution_time.xcom import XCom
56
56
  else:
57
+ from airflow.models import XCom # type: ignore[no-redef]
57
58
  from airflow.models.baseoperatorlink import BaseOperatorLink # type: ignore[no-redef]
58
59
 
59
60