apache-airflow-providers-common-compat 1.5.1__tar.gz → 1.6.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of apache-airflow-providers-common-compat might be problematic. Click here for more details.
- {apache_airflow_providers_common_compat-1.5.1 → apache_airflow_providers_common_compat-1.6.0}/PKG-INFO +8 -8
- {apache_airflow_providers_common_compat-1.5.1 → apache_airflow_providers_common_compat-1.6.0}/README.rst +4 -4
- {apache_airflow_providers_common_compat-1.5.1 → apache_airflow_providers_common_compat-1.6.0}/pyproject.toml +5 -5
- {apache_airflow_providers_common_compat-1.5.1 → apache_airflow_providers_common_compat-1.6.0}/src/airflow/providers/common/compat/__init__.py +1 -1
- {apache_airflow_providers_common_compat-1.5.1 → apache_airflow_providers_common_compat-1.6.0}/src/airflow/providers/common/compat/assets/__init__.py +3 -2
- apache_airflow_providers_common_compat-1.6.0/src/airflow/providers/common/compat/check.py +99 -0
- {apache_airflow_providers_common_compat-1.5.1 → apache_airflow_providers_common_compat-1.6.0}/src/airflow/providers/common/compat/get_provider_info.py +2 -2
- apache_airflow_providers_common_compat-1.5.1/src/airflow/providers/common/compat/security/permissions.py → apache_airflow_providers_common_compat-1.6.0/src/airflow/providers/common/compat/notifier/__init__.py +8 -6
- apache_airflow_providers_common_compat-1.6.0/src/airflow/providers/common/compat/openlineage/check.py +104 -0
- apache_airflow_providers_common_compat-1.6.0/src/airflow/providers/common/compat/openlineage/utils/spark.py +56 -0
- apache_airflow_providers_common_compat-1.6.0/src/airflow/providers/common/compat/security/permissions.py +40 -0
- apache_airflow_providers_common_compat-1.5.1/src/airflow/providers/common/compat/openlineage/utils/spark.py +0 -130
- {apache_airflow_providers_common_compat-1.5.1 → apache_airflow_providers_common_compat-1.6.0}/src/airflow/providers/common/compat/LICENSE +0 -0
- {apache_airflow_providers_common_compat-1.5.1 → apache_airflow_providers_common_compat-1.6.0}/src/airflow/providers/common/compat/lineage/__init__.py +0 -0
- {apache_airflow_providers_common_compat-1.5.1 → apache_airflow_providers_common_compat-1.6.0}/src/airflow/providers/common/compat/lineage/entities.py +0 -0
- {apache_airflow_providers_common_compat-1.5.1 → apache_airflow_providers_common_compat-1.6.0}/src/airflow/providers/common/compat/lineage/hook.py +0 -0
- {apache_airflow_providers_common_compat-1.5.1 → apache_airflow_providers_common_compat-1.6.0}/src/airflow/providers/common/compat/openlineage/__init__.py +0 -0
- {apache_airflow_providers_common_compat-1.5.1 → apache_airflow_providers_common_compat-1.6.0}/src/airflow/providers/common/compat/openlineage/facet.py +0 -0
- {apache_airflow_providers_common_compat-1.5.1 → apache_airflow_providers_common_compat-1.6.0}/src/airflow/providers/common/compat/openlineage/utils/__init__.py +0 -0
- {apache_airflow_providers_common_compat-1.5.1 → apache_airflow_providers_common_compat-1.6.0}/src/airflow/providers/common/compat/openlineage/utils/sql.py +0 -0
- {apache_airflow_providers_common_compat-1.5.1 → apache_airflow_providers_common_compat-1.6.0}/src/airflow/providers/common/compat/openlineage/utils/utils.py +0 -0
- {apache_airflow_providers_common_compat-1.5.1 → apache_airflow_providers_common_compat-1.6.0}/src/airflow/providers/common/compat/security/__init__.py +0 -0
- {apache_airflow_providers_common_compat-1.5.1 → apache_airflow_providers_common_compat-1.6.0}/src/airflow/providers/common/compat/standard/__init__.py +0 -0
- {apache_airflow_providers_common_compat-1.5.1 → apache_airflow_providers_common_compat-1.6.0}/src/airflow/providers/common/compat/standard/operators.py +0 -0
- {apache_airflow_providers_common_compat-1.5.1 → apache_airflow_providers_common_compat-1.6.0}/src/airflow/providers/common/compat/standard/triggers.py +0 -0
- {apache_airflow_providers_common_compat-1.5.1 → apache_airflow_providers_common_compat-1.6.0}/src/airflow/providers/common/compat/standard/utils.py +0 -0
- {apache_airflow_providers_common_compat-1.5.1 → apache_airflow_providers_common_compat-1.6.0}/src/airflow/providers/common/compat/version_compat.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-common-compat
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.6.0
|
|
4
4
|
Summary: Provider package apache-airflow-providers-common-compat for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,common.compat,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -24,11 +24,11 @@ Requires-Dist: apache-airflow>=2.9.0
|
|
|
24
24
|
Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
|
|
25
25
|
Requires-Dist: apache-airflow-providers-standard ; extra == "standard"
|
|
26
26
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
27
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.
|
|
28
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.
|
|
27
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.0/changelog.html
|
|
28
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.0
|
|
29
|
+
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
29
30
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
30
31
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
31
|
-
Project-URL: Twitter, https://x.com/ApacheAirflow
|
|
32
32
|
Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
33
33
|
Provides-Extra: openlineage
|
|
34
34
|
Provides-Extra: standard
|
|
@@ -58,7 +58,7 @@ Provides-Extra: standard
|
|
|
58
58
|
|
|
59
59
|
Package ``apache-airflow-providers-common-compat``
|
|
60
60
|
|
|
61
|
-
Release: ``1.
|
|
61
|
+
Release: ``1.6.0``
|
|
62
62
|
|
|
63
63
|
|
|
64
64
|
``Common Compatibility Provider - providing compatibility code for previous Airflow versions.``
|
|
@@ -71,7 +71,7 @@ This is a provider package for ``common.compat`` provider. All classes for this
|
|
|
71
71
|
are in ``airflow.providers.common.compat`` python package.
|
|
72
72
|
|
|
73
73
|
You can find package information and changelog for the provider
|
|
74
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.
|
|
74
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.0/>`_.
|
|
75
75
|
|
|
76
76
|
Installation
|
|
77
77
|
------------
|
|
@@ -95,7 +95,7 @@ Cross provider package dependencies
|
|
|
95
95
|
-----------------------------------
|
|
96
96
|
|
|
97
97
|
Those are dependencies that might be needed in order to use all the features of the package.
|
|
98
|
-
You need to install the specified
|
|
98
|
+
You need to install the specified providers in order to use them.
|
|
99
99
|
|
|
100
100
|
You can install such cross-provider dependencies when installing from PyPI. For example:
|
|
101
101
|
|
|
@@ -112,5 +112,5 @@ Dependent package
|
|
|
112
112
|
============================================================================================================== ===============
|
|
113
113
|
|
|
114
114
|
The changelog for the provider package can be found in the
|
|
115
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.
|
|
115
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.0/changelog.html>`_.
|
|
116
116
|
|
|
@@ -23,7 +23,7 @@
|
|
|
23
23
|
|
|
24
24
|
Package ``apache-airflow-providers-common-compat``
|
|
25
25
|
|
|
26
|
-
Release: ``1.
|
|
26
|
+
Release: ``1.6.0``
|
|
27
27
|
|
|
28
28
|
|
|
29
29
|
``Common Compatibility Provider - providing compatibility code for previous Airflow versions.``
|
|
@@ -36,7 +36,7 @@ This is a provider package for ``common.compat`` provider. All classes for this
|
|
|
36
36
|
are in ``airflow.providers.common.compat`` python package.
|
|
37
37
|
|
|
38
38
|
You can find package information and changelog for the provider
|
|
39
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.
|
|
39
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.0/>`_.
|
|
40
40
|
|
|
41
41
|
Installation
|
|
42
42
|
------------
|
|
@@ -60,7 +60,7 @@ Cross provider package dependencies
|
|
|
60
60
|
-----------------------------------
|
|
61
61
|
|
|
62
62
|
Those are dependencies that might be needed in order to use all the features of the package.
|
|
63
|
-
You need to install the specified
|
|
63
|
+
You need to install the specified providers in order to use them.
|
|
64
64
|
|
|
65
65
|
You can install such cross-provider dependencies when installing from PyPI. For example:
|
|
66
66
|
|
|
@@ -77,4 +77,4 @@ Dependent package
|
|
|
77
77
|
============================================================================================================== ===============
|
|
78
78
|
|
|
79
79
|
The changelog for the provider package can be found in the
|
|
80
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.
|
|
80
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.0/changelog.html>`_.
|
|
@@ -20,12 +20,12 @@
|
|
|
20
20
|
# IF YOU WANT TO MODIFY THIS FILE EXCEPT DEPENDENCIES, YOU SHOULD MODIFY THE TEMPLATE
|
|
21
21
|
# `pyproject_TEMPLATE.toml.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
|
|
22
22
|
[build-system]
|
|
23
|
-
requires = ["flit_core==3.
|
|
23
|
+
requires = ["flit_core==3.12.0"]
|
|
24
24
|
build-backend = "flit_core.buildapi"
|
|
25
25
|
|
|
26
26
|
[project]
|
|
27
27
|
name = "apache-airflow-providers-common-compat"
|
|
28
|
-
version = "1.
|
|
28
|
+
version = "1.6.0"
|
|
29
29
|
description = "Provider package apache-airflow-providers-common-compat for Apache Airflow"
|
|
30
30
|
readme = "README.rst"
|
|
31
31
|
authors = [
|
|
@@ -91,12 +91,12 @@ apache-airflow-providers-fab = {workspace = true}
|
|
|
91
91
|
apache-airflow-providers-standard = {workspace = true}
|
|
92
92
|
|
|
93
93
|
[project.urls]
|
|
94
|
-
"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.
|
|
95
|
-
"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.
|
|
94
|
+
"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.0"
|
|
95
|
+
"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.0/changelog.html"
|
|
96
96
|
"Bug Tracker" = "https://github.com/apache/airflow/issues"
|
|
97
97
|
"Source Code" = "https://github.com/apache/airflow"
|
|
98
98
|
"Slack Chat" = "https://s.apache.org/airflow-slack"
|
|
99
|
-
"
|
|
99
|
+
"Mastodon" = "https://fosstodon.org/@airflow"
|
|
100
100
|
"YouTube" = "https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/"
|
|
101
101
|
|
|
102
102
|
[project.entry-points."apache_airflow_provider"]
|
|
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "1.
|
|
32
|
+
__version__ = "1.6.0"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
35
|
"2.9.0"
|
|
@@ -25,12 +25,12 @@ from airflow.providers.common.compat.version_compat import (
|
|
|
25
25
|
)
|
|
26
26
|
|
|
27
27
|
if TYPE_CHECKING:
|
|
28
|
-
from airflow.api_fastapi.auth.managers.models.resource_details import AssetDetails
|
|
28
|
+
from airflow.api_fastapi.auth.managers.models.resource_details import AssetAliasDetails, AssetDetails
|
|
29
29
|
from airflow.models.asset import expand_alias_to_assets
|
|
30
30
|
from airflow.sdk.definitions.asset import Asset, AssetAlias, AssetAll, AssetAny
|
|
31
31
|
else:
|
|
32
32
|
if AIRFLOW_V_3_0_PLUS:
|
|
33
|
-
from airflow.api_fastapi.auth.managers.models.resource_details import AssetDetails
|
|
33
|
+
from airflow.api_fastapi.auth.managers.models.resource_details import AssetAliasDetails, AssetDetails
|
|
34
34
|
from airflow.models.asset import expand_alias_to_assets
|
|
35
35
|
from airflow.sdk.definitions.asset import Asset, AssetAlias, AssetAll, AssetAny
|
|
36
36
|
else:
|
|
@@ -52,6 +52,7 @@ else:
|
|
|
52
52
|
__all__ = [
|
|
53
53
|
"Asset",
|
|
54
54
|
"AssetAlias",
|
|
55
|
+
"AssetAliasDetails",
|
|
55
56
|
"AssetAll",
|
|
56
57
|
"AssetAny",
|
|
57
58
|
"AssetDetails",
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
|
|
18
|
+
from __future__ import annotations
|
|
19
|
+
|
|
20
|
+
import functools
|
|
21
|
+
import importlib
|
|
22
|
+
from importlib import metadata
|
|
23
|
+
|
|
24
|
+
from packaging.version import Version
|
|
25
|
+
|
|
26
|
+
from airflow.exceptions import AirflowOptionalProviderFeatureException
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def require_provider_version(provider_name: str, provider_min_version: str):
|
|
30
|
+
"""
|
|
31
|
+
Enforce minimum version requirement for a specific provider.
|
|
32
|
+
|
|
33
|
+
Some providers, do not explicitly require other provider packages but may offer optional features
|
|
34
|
+
that depend on it. These features are generally available starting from a specific version of such
|
|
35
|
+
provider. This decorator helps ensure compatibility, preventing import errors and providing clear
|
|
36
|
+
logs about version requirements.
|
|
37
|
+
|
|
38
|
+
Args:
|
|
39
|
+
provider_name: Name of the provider e.g., apache-airflow-providers-openlineage
|
|
40
|
+
provider_min_version: Optional minimum version requirement e.g., 1.0.1
|
|
41
|
+
|
|
42
|
+
Raises:
|
|
43
|
+
ValueError: If neither `provider_name` nor `provider_min_version` is provided.
|
|
44
|
+
ValueError: If full provider name (e.g., apache-airflow-providers-openlineage) is not provided.
|
|
45
|
+
TypeError: If the decorator is used without parentheses (e.g., `@require_provider_version`).
|
|
46
|
+
"""
|
|
47
|
+
err_msg = (
|
|
48
|
+
"`require_provider_version` decorator must be used with two arguments: "
|
|
49
|
+
"'provider_name' and 'provider_min_version', "
|
|
50
|
+
'e.g., @require_provider_version(provider_name="apache-airflow-providers-openlineage", '
|
|
51
|
+
'provider_min_version="1.0.0")'
|
|
52
|
+
)
|
|
53
|
+
# Detect if decorator is mistakenly used without arguments
|
|
54
|
+
if callable(provider_name) and not provider_min_version:
|
|
55
|
+
raise TypeError(err_msg)
|
|
56
|
+
|
|
57
|
+
# Ensure both arguments are provided and not empty
|
|
58
|
+
if not provider_name or not provider_min_version:
|
|
59
|
+
raise ValueError(err_msg)
|
|
60
|
+
|
|
61
|
+
# Ensure full provider name is passed
|
|
62
|
+
if not provider_name.startswith("apache-airflow-providers-"):
|
|
63
|
+
raise ValueError(
|
|
64
|
+
f"Full `provider_name` must be provided starting with 'apache-airflow-providers-', "
|
|
65
|
+
f"got `{provider_name}`."
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
def decorator(func):
|
|
69
|
+
@functools.wraps(func)
|
|
70
|
+
def wrapper(*args, **kwargs):
|
|
71
|
+
try:
|
|
72
|
+
provider_version: str = metadata.version(provider_name)
|
|
73
|
+
except metadata.PackageNotFoundError:
|
|
74
|
+
try:
|
|
75
|
+
# Try dynamically importing the provider module based on the provider name
|
|
76
|
+
import_provider_name = provider_name.replace("apache-airflow-providers-", "").replace(
|
|
77
|
+
"-", "."
|
|
78
|
+
)
|
|
79
|
+
provider_module = importlib.import_module(f"airflow.providers.{import_provider_name}")
|
|
80
|
+
|
|
81
|
+
provider_version = getattr(provider_module, "__version__")
|
|
82
|
+
|
|
83
|
+
except (ImportError, AttributeError, ModuleNotFoundError):
|
|
84
|
+
raise AirflowOptionalProviderFeatureException(
|
|
85
|
+
f"Provider `{provider_name}` not found or has no version, "
|
|
86
|
+
f"skipping function `{func.__name__}` execution"
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
if provider_version and Version(provider_version) < Version(provider_min_version):
|
|
90
|
+
raise AirflowOptionalProviderFeatureException(
|
|
91
|
+
f"Provider's `{provider_name}` version `{provider_version}` is lower than required "
|
|
92
|
+
f"`{provider_min_version}`, skipping function `{func.__name__}` execution"
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
return func(*args, **kwargs)
|
|
96
|
+
|
|
97
|
+
return wrapper
|
|
98
|
+
|
|
99
|
+
return decorator
|
|
@@ -27,8 +27,8 @@ def get_provider_info():
|
|
|
27
27
|
"name": "Common Compat",
|
|
28
28
|
"description": "``Common Compatibility Provider - providing compatibility code for previous Airflow versions.``\n",
|
|
29
29
|
"state": "ready",
|
|
30
|
-
"source-date-epoch":
|
|
31
|
-
"versions": ["1.5.1", "1.5.0", "1.3.0", "1.2.2", "1.2.1", "1.2.0", "1.1.0", "1.0.0"],
|
|
30
|
+
"source-date-epoch": 1742480214,
|
|
31
|
+
"versions": ["1.6.0", "1.5.1", "1.5.0", "1.3.0", "1.2.2", "1.2.1", "1.2.0", "1.1.0", "1.0.0"],
|
|
32
32
|
"integrations": [
|
|
33
33
|
{
|
|
34
34
|
"integration-name": "Common Compat",
|
|
@@ -14,17 +14,19 @@
|
|
|
14
14
|
# KIND, either express or implied. See the License for the
|
|
15
15
|
# specific language governing permissions and limitations
|
|
16
16
|
# under the License.
|
|
17
|
+
|
|
17
18
|
from __future__ import annotations
|
|
18
19
|
|
|
19
20
|
from typing import TYPE_CHECKING
|
|
20
21
|
|
|
22
|
+
from airflow.providers.common.compat.version_compat import AIRFLOW_V_3_0_PLUS
|
|
23
|
+
|
|
21
24
|
if TYPE_CHECKING:
|
|
22
|
-
from airflow.
|
|
25
|
+
from airflow.sdk.definitions.notifier import BaseNotifier
|
|
26
|
+
elif AIRFLOW_V_3_0_PLUS:
|
|
27
|
+
from airflow.sdk.definitions.notifier import BaseNotifier
|
|
23
28
|
else:
|
|
24
|
-
|
|
25
|
-
from airflow.security.permissions import RESOURCE_ASSET
|
|
26
|
-
except ImportError:
|
|
27
|
-
from airflow.security.permissions import RESOURCE_DATASET as RESOURCE_ASSET
|
|
29
|
+
from airflow.notifications.basenotifier import BaseNotifier
|
|
28
30
|
|
|
29
31
|
|
|
30
|
-
__all__ = ["
|
|
32
|
+
__all__ = ["BaseNotifier"]
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
|
|
18
|
+
from __future__ import annotations
|
|
19
|
+
|
|
20
|
+
import functools
|
|
21
|
+
import logging
|
|
22
|
+
from importlib import metadata
|
|
23
|
+
|
|
24
|
+
from packaging.version import Version
|
|
25
|
+
|
|
26
|
+
from airflow.exceptions import AirflowOptionalProviderFeatureException
|
|
27
|
+
|
|
28
|
+
log = logging.getLogger(__name__)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def require_openlineage_version(
|
|
32
|
+
provider_min_version: str | None = None, client_min_version: str | None = None
|
|
33
|
+
):
|
|
34
|
+
"""
|
|
35
|
+
Enforce minimum version requirements for OpenLineage provider or client.
|
|
36
|
+
|
|
37
|
+
Some providers, such as Snowflake and DBT Cloud, do not require an OpenLineage provider but may
|
|
38
|
+
offer optional features that depend on it. These features are generally available starting
|
|
39
|
+
from a specific version of the OpenLineage provider or client. This decorator helps ensure compatibility,
|
|
40
|
+
preventing import errors and providing clear logs about version requirements.
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
provider_min_version: Optional minimum version requirement for apache-airflow-providers-openlineage
|
|
44
|
+
client_min_version: Optional minimum version requirement for openlineage-python
|
|
45
|
+
|
|
46
|
+
Raises:
|
|
47
|
+
ValueError: If neither `provider_min_version` nor `client_min_version` is provided.
|
|
48
|
+
TypeError: If the decorator is used without parentheses (e.g., `@require_openlineage_version`).
|
|
49
|
+
"""
|
|
50
|
+
err_msg = (
|
|
51
|
+
"`require_openlineage_version` decorator must be used with at least one argument: "
|
|
52
|
+
"'provider_min_version' or 'client_min_version', "
|
|
53
|
+
'e.g., @require_openlineage_version(provider_min_version="1.0.0")'
|
|
54
|
+
)
|
|
55
|
+
# Detect if decorator is mistakenly used without arguments
|
|
56
|
+
if callable(provider_min_version) and client_min_version is None:
|
|
57
|
+
raise TypeError(err_msg)
|
|
58
|
+
|
|
59
|
+
# Ensure at least one argument is provided
|
|
60
|
+
if provider_min_version is None and client_min_version is None:
|
|
61
|
+
raise ValueError(err_msg)
|
|
62
|
+
|
|
63
|
+
def decorator(func):
|
|
64
|
+
@functools.wraps(func)
|
|
65
|
+
def wrapper(*args, **kwargs):
|
|
66
|
+
if provider_min_version:
|
|
67
|
+
try:
|
|
68
|
+
provider_version: str = metadata.version("apache-airflow-providers-openlineage")
|
|
69
|
+
except metadata.PackageNotFoundError:
|
|
70
|
+
try:
|
|
71
|
+
from airflow.providers.openlineage import __version__ as provider_version
|
|
72
|
+
except (ImportError, AttributeError, ModuleNotFoundError):
|
|
73
|
+
raise AirflowOptionalProviderFeatureException(
|
|
74
|
+
"OpenLineage provider not found or has no version, "
|
|
75
|
+
f"skipping function `{func.__name__}` execution"
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
if provider_version and Version(provider_version) < Version(provider_min_version):
|
|
79
|
+
raise AirflowOptionalProviderFeatureException(
|
|
80
|
+
f"OpenLineage provider version `{provider_version}` "
|
|
81
|
+
f"is lower than required `{provider_min_version}`, "
|
|
82
|
+
f"skipping function `{func.__name__}` execution"
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
if client_min_version:
|
|
86
|
+
try:
|
|
87
|
+
client_version: str = metadata.version("openlineage-python")
|
|
88
|
+
except metadata.PackageNotFoundError:
|
|
89
|
+
raise AirflowOptionalProviderFeatureException(
|
|
90
|
+
f"OpenLineage client not found, skipping function `{func.__name__}` execution"
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
if client_version and Version(client_version) < Version(client_min_version):
|
|
94
|
+
raise AirflowOptionalProviderFeatureException(
|
|
95
|
+
f"OpenLineage client version `{client_version}` "
|
|
96
|
+
f"is lower than required `{client_min_version}`, "
|
|
97
|
+
f"skipping function `{func.__name__}` execution"
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
return func(*args, **kwargs)
|
|
101
|
+
|
|
102
|
+
return wrapper
|
|
103
|
+
|
|
104
|
+
return decorator
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
|
|
18
|
+
from __future__ import annotations
|
|
19
|
+
|
|
20
|
+
import logging
|
|
21
|
+
from typing import TYPE_CHECKING
|
|
22
|
+
|
|
23
|
+
log = logging.getLogger(__name__)
|
|
24
|
+
|
|
25
|
+
if TYPE_CHECKING:
|
|
26
|
+
from airflow.providers.openlineage.utils.spark import (
|
|
27
|
+
inject_parent_job_information_into_spark_properties,
|
|
28
|
+
inject_transport_information_into_spark_properties,
|
|
29
|
+
)
|
|
30
|
+
else:
|
|
31
|
+
try:
|
|
32
|
+
from airflow.providers.openlineage.utils.spark import (
|
|
33
|
+
inject_parent_job_information_into_spark_properties,
|
|
34
|
+
inject_transport_information_into_spark_properties,
|
|
35
|
+
)
|
|
36
|
+
except ImportError:
|
|
37
|
+
|
|
38
|
+
def inject_parent_job_information_into_spark_properties(properties: dict, context) -> dict:
|
|
39
|
+
log.warning(
|
|
40
|
+
"Could not import `airflow.providers.openlineage.plugins.macros`."
|
|
41
|
+
"Skipping the injection of OpenLineage parent job information into Spark properties."
|
|
42
|
+
)
|
|
43
|
+
return properties
|
|
44
|
+
|
|
45
|
+
def inject_transport_information_into_spark_properties(properties: dict, context) -> dict:
|
|
46
|
+
log.warning(
|
|
47
|
+
"Could not import `airflow.providers.openlineage.plugins.listener`."
|
|
48
|
+
"Skipping the injection of OpenLineage transport information into Spark properties."
|
|
49
|
+
)
|
|
50
|
+
return properties
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
__all__ = [
|
|
54
|
+
"inject_parent_job_information_into_spark_properties",
|
|
55
|
+
"inject_transport_information_into_spark_properties",
|
|
56
|
+
]
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
from typing import TYPE_CHECKING
|
|
20
|
+
|
|
21
|
+
if TYPE_CHECKING:
|
|
22
|
+
from airflow.security.permissions import (
|
|
23
|
+
RESOURCE_ASSET,
|
|
24
|
+
RESOURCE_ASSET_ALIAS,
|
|
25
|
+
RESOURCE_BACKFILL,
|
|
26
|
+
RESOURCE_DAG_VERSION,
|
|
27
|
+
)
|
|
28
|
+
else:
|
|
29
|
+
try:
|
|
30
|
+
from airflow.security.permissions import (
|
|
31
|
+
RESOURCE_ASSET,
|
|
32
|
+
RESOURCE_ASSET_ALIAS,
|
|
33
|
+
RESOURCE_BACKFILL,
|
|
34
|
+
RESOURCE_DAG_VERSION,
|
|
35
|
+
)
|
|
36
|
+
except ImportError:
|
|
37
|
+
from airflow.security.permissions import RESOURCE_DATASET as RESOURCE_ASSET
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
__all__ = ["RESOURCE_ASSET", "RESOURCE_ASSET_ALIAS", "RESOURCE_BACKFILL", "RESOURCE_DAG_VERSION"]
|
|
@@ -1,130 +0,0 @@
|
|
|
1
|
-
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
-
# or more contributor license agreements. See the NOTICE file
|
|
3
|
-
# distributed with this work for additional information
|
|
4
|
-
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
-
# to you under the Apache License, Version 2.0 (the
|
|
6
|
-
# "License"); you may not use this file except in compliance
|
|
7
|
-
# with the License. You may obtain a copy of the License at
|
|
8
|
-
#
|
|
9
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
-
#
|
|
11
|
-
# Unless required by applicable law or agreed to in writing,
|
|
12
|
-
# software distributed under the License is distributed on an
|
|
13
|
-
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
-
# KIND, either express or implied. See the License for the
|
|
15
|
-
# specific language governing permissions and limitations
|
|
16
|
-
# under the License.
|
|
17
|
-
|
|
18
|
-
from __future__ import annotations
|
|
19
|
-
|
|
20
|
-
import logging
|
|
21
|
-
from typing import TYPE_CHECKING
|
|
22
|
-
|
|
23
|
-
log = logging.getLogger(__name__)
|
|
24
|
-
|
|
25
|
-
if TYPE_CHECKING:
|
|
26
|
-
from airflow.providers.openlineage.utils.spark import (
|
|
27
|
-
inject_parent_job_information_into_spark_properties,
|
|
28
|
-
inject_transport_information_into_spark_properties,
|
|
29
|
-
)
|
|
30
|
-
else:
|
|
31
|
-
try:
|
|
32
|
-
from airflow.providers.openlineage.utils.spark import (
|
|
33
|
-
inject_parent_job_information_into_spark_properties,
|
|
34
|
-
inject_transport_information_into_spark_properties,
|
|
35
|
-
)
|
|
36
|
-
except ImportError:
|
|
37
|
-
try:
|
|
38
|
-
from airflow.providers.openlineage.plugins.macros import (
|
|
39
|
-
lineage_job_name,
|
|
40
|
-
lineage_job_namespace,
|
|
41
|
-
lineage_run_id,
|
|
42
|
-
)
|
|
43
|
-
except ImportError:
|
|
44
|
-
|
|
45
|
-
def inject_parent_job_information_into_spark_properties(properties: dict, context) -> dict:
|
|
46
|
-
log.warning(
|
|
47
|
-
"Could not import `airflow.providers.openlineage.plugins.macros`."
|
|
48
|
-
"Skipping the injection of OpenLineage parent job information into Spark properties."
|
|
49
|
-
)
|
|
50
|
-
return properties
|
|
51
|
-
|
|
52
|
-
else:
|
|
53
|
-
|
|
54
|
-
def inject_parent_job_information_into_spark_properties(properties: dict, context) -> dict:
|
|
55
|
-
if any(str(key).startswith("spark.openlineage.parent") for key in properties):
|
|
56
|
-
log.info(
|
|
57
|
-
"Some OpenLineage properties with parent job information are already present "
|
|
58
|
-
"in Spark properties. Skipping the injection of OpenLineage "
|
|
59
|
-
"parent job information into Spark properties."
|
|
60
|
-
)
|
|
61
|
-
return properties
|
|
62
|
-
|
|
63
|
-
ti = context["ti"]
|
|
64
|
-
ol_parent_job_properties = {
|
|
65
|
-
"spark.openlineage.parentJobNamespace": lineage_job_namespace(),
|
|
66
|
-
"spark.openlineage.parentJobName": lineage_job_name(ti),
|
|
67
|
-
"spark.openlineage.parentRunId": lineage_run_id(ti),
|
|
68
|
-
}
|
|
69
|
-
return {**properties, **ol_parent_job_properties}
|
|
70
|
-
|
|
71
|
-
try:
|
|
72
|
-
from airflow.providers.openlineage.plugins.listener import get_openlineage_listener
|
|
73
|
-
except ImportError:
|
|
74
|
-
|
|
75
|
-
def inject_transport_information_into_spark_properties(properties: dict, context) -> dict:
|
|
76
|
-
log.warning(
|
|
77
|
-
"Could not import `airflow.providers.openlineage.plugins.listener`."
|
|
78
|
-
"Skipping the injection of OpenLineage transport information into Spark properties."
|
|
79
|
-
)
|
|
80
|
-
return properties
|
|
81
|
-
|
|
82
|
-
else:
|
|
83
|
-
|
|
84
|
-
def inject_transport_information_into_spark_properties(properties: dict, context) -> dict:
|
|
85
|
-
if any(str(key).startswith("spark.openlineage.transport") for key in properties):
|
|
86
|
-
log.info(
|
|
87
|
-
"Some OpenLineage properties with transport information are already present "
|
|
88
|
-
"in Spark properties. Skipping the injection of OpenLineage "
|
|
89
|
-
"transport information into Spark properties."
|
|
90
|
-
)
|
|
91
|
-
return properties
|
|
92
|
-
|
|
93
|
-
transport = get_openlineage_listener().adapter.get_or_create_openlineage_client().transport
|
|
94
|
-
if transport.kind != "http":
|
|
95
|
-
log.info(
|
|
96
|
-
"OpenLineage transport type `%s` does not support automatic "
|
|
97
|
-
"injection of OpenLineage transport information into Spark properties.",
|
|
98
|
-
transport.kind,
|
|
99
|
-
)
|
|
100
|
-
return {}
|
|
101
|
-
|
|
102
|
-
transport_properties = {
|
|
103
|
-
"spark.openlineage.transport.type": "http",
|
|
104
|
-
"spark.openlineage.transport.url": transport.url,
|
|
105
|
-
"spark.openlineage.transport.endpoint": transport.endpoint,
|
|
106
|
-
# Timeout is converted to milliseconds, as required by Spark integration,
|
|
107
|
-
"spark.openlineage.transport.timeoutInMillis": str(int(transport.timeout * 1000)),
|
|
108
|
-
}
|
|
109
|
-
if transport.compression:
|
|
110
|
-
transport_properties["spark.openlineage.transport.compression"] = str(
|
|
111
|
-
transport.compression
|
|
112
|
-
)
|
|
113
|
-
|
|
114
|
-
if hasattr(transport.config.auth, "api_key") and transport.config.auth.get_bearer():
|
|
115
|
-
transport_properties["spark.openlineage.transport.auth.type"] = "api_key"
|
|
116
|
-
transport_properties["spark.openlineage.transport.auth.apiKey"] = (
|
|
117
|
-
transport.config.auth.get_bearer()
|
|
118
|
-
)
|
|
119
|
-
|
|
120
|
-
if hasattr(transport.config, "custom_headers") and transport.config.custom_headers:
|
|
121
|
-
for key, value in transport.config.custom_headers.items():
|
|
122
|
-
transport_properties[f"spark.openlineage.transport.headers.{key}"] = value
|
|
123
|
-
|
|
124
|
-
return {**properties, **transport_properties}
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
__all__ = [
|
|
128
|
-
"inject_parent_job_information_into_spark_properties",
|
|
129
|
-
"inject_transport_information_into_spark_properties",
|
|
130
|
-
]
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|