apache-airflow-providers-common-compat 1.6.0b1__tar.gz → 1.6.0rc1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {apache_airflow_providers_common_compat-1.6.0b1 → apache_airflow_providers_common_compat-1.6.0rc1}/PKG-INFO +8 -8
- {apache_airflow_providers_common_compat-1.6.0b1 → apache_airflow_providers_common_compat-1.6.0rc1}/README.rst +4 -4
- {apache_airflow_providers_common_compat-1.6.0b1 → apache_airflow_providers_common_compat-1.6.0rc1}/pyproject.toml +5 -5
- {apache_airflow_providers_common_compat-1.6.0b1 → apache_airflow_providers_common_compat-1.6.0rc1}/src/airflow/providers/common/compat/__init__.py +1 -1
- {apache_airflow_providers_common_compat-1.6.0b1 → apache_airflow_providers_common_compat-1.6.0rc1}/src/airflow/providers/common/compat/get_provider_info.py +1 -1
- apache_airflow_providers_common_compat-1.6.0rc1/src/airflow/providers/common/compat/notifier/__init__.py +32 -0
- apache_airflow_providers_common_compat-1.6.0rc1/src/airflow/providers/common/compat/openlineage/utils/spark.py +56 -0
- apache_airflow_providers_common_compat-1.6.0b1/src/airflow/providers/common/compat/openlineage/utils/spark.py +0 -130
- {apache_airflow_providers_common_compat-1.6.0b1 → apache_airflow_providers_common_compat-1.6.0rc1}/src/airflow/providers/common/compat/LICENSE +0 -0
- {apache_airflow_providers_common_compat-1.6.0b1 → apache_airflow_providers_common_compat-1.6.0rc1}/src/airflow/providers/common/compat/assets/__init__.py +0 -0
- {apache_airflow_providers_common_compat-1.6.0b1 → apache_airflow_providers_common_compat-1.6.0rc1}/src/airflow/providers/common/compat/check.py +0 -0
- {apache_airflow_providers_common_compat-1.6.0b1 → apache_airflow_providers_common_compat-1.6.0rc1}/src/airflow/providers/common/compat/lineage/__init__.py +0 -0
- {apache_airflow_providers_common_compat-1.6.0b1 → apache_airflow_providers_common_compat-1.6.0rc1}/src/airflow/providers/common/compat/lineage/entities.py +0 -0
- {apache_airflow_providers_common_compat-1.6.0b1 → apache_airflow_providers_common_compat-1.6.0rc1}/src/airflow/providers/common/compat/lineage/hook.py +0 -0
- {apache_airflow_providers_common_compat-1.6.0b1 → apache_airflow_providers_common_compat-1.6.0rc1}/src/airflow/providers/common/compat/openlineage/__init__.py +0 -0
- {apache_airflow_providers_common_compat-1.6.0b1 → apache_airflow_providers_common_compat-1.6.0rc1}/src/airflow/providers/common/compat/openlineage/check.py +0 -0
- {apache_airflow_providers_common_compat-1.6.0b1 → apache_airflow_providers_common_compat-1.6.0rc1}/src/airflow/providers/common/compat/openlineage/facet.py +0 -0
- {apache_airflow_providers_common_compat-1.6.0b1 → apache_airflow_providers_common_compat-1.6.0rc1}/src/airflow/providers/common/compat/openlineage/utils/__init__.py +0 -0
- {apache_airflow_providers_common_compat-1.6.0b1 → apache_airflow_providers_common_compat-1.6.0rc1}/src/airflow/providers/common/compat/openlineage/utils/sql.py +0 -0
- {apache_airflow_providers_common_compat-1.6.0b1 → apache_airflow_providers_common_compat-1.6.0rc1}/src/airflow/providers/common/compat/openlineage/utils/utils.py +0 -0
- {apache_airflow_providers_common_compat-1.6.0b1 → apache_airflow_providers_common_compat-1.6.0rc1}/src/airflow/providers/common/compat/security/__init__.py +0 -0
- {apache_airflow_providers_common_compat-1.6.0b1 → apache_airflow_providers_common_compat-1.6.0rc1}/src/airflow/providers/common/compat/security/permissions.py +0 -0
- {apache_airflow_providers_common_compat-1.6.0b1 → apache_airflow_providers_common_compat-1.6.0rc1}/src/airflow/providers/common/compat/standard/__init__.py +0 -0
- {apache_airflow_providers_common_compat-1.6.0b1 → apache_airflow_providers_common_compat-1.6.0rc1}/src/airflow/providers/common/compat/standard/operators.py +0 -0
- {apache_airflow_providers_common_compat-1.6.0b1 → apache_airflow_providers_common_compat-1.6.0rc1}/src/airflow/providers/common/compat/standard/triggers.py +0 -0
- {apache_airflow_providers_common_compat-1.6.0b1 → apache_airflow_providers_common_compat-1.6.0rc1}/src/airflow/providers/common/compat/standard/utils.py +0 -0
- {apache_airflow_providers_common_compat-1.6.0b1 → apache_airflow_providers_common_compat-1.6.0rc1}/src/airflow/providers/common/compat/version_compat.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-common-compat
|
|
3
|
-
Version: 1.6.
|
|
3
|
+
Version: 1.6.0rc1
|
|
4
4
|
Summary: Provider package apache-airflow-providers-common-compat for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,common.compat,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -20,12 +20,12 @@ Classifier: Programming Language :: Python :: 3.10
|
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.11
|
|
21
21
|
Classifier: Programming Language :: Python :: 3.12
|
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
|
23
|
-
Requires-Dist: apache-airflow>=2.9.
|
|
23
|
+
Requires-Dist: apache-airflow>=2.9.0rc0
|
|
24
24
|
Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
|
|
25
25
|
Requires-Dist: apache-airflow-providers-standard ; extra == "standard"
|
|
26
26
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
27
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.
|
|
28
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.
|
|
27
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.0/changelog.html
|
|
28
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.0
|
|
29
29
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
30
30
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
31
31
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
@@ -58,7 +58,7 @@ Provides-Extra: standard
|
|
|
58
58
|
|
|
59
59
|
Package ``apache-airflow-providers-common-compat``
|
|
60
60
|
|
|
61
|
-
Release: ``1.6.
|
|
61
|
+
Release: ``1.6.0``
|
|
62
62
|
|
|
63
63
|
|
|
64
64
|
``Common Compatibility Provider - providing compatibility code for previous Airflow versions.``
|
|
@@ -71,7 +71,7 @@ This is a provider package for ``common.compat`` provider. All classes for this
|
|
|
71
71
|
are in ``airflow.providers.common.compat`` python package.
|
|
72
72
|
|
|
73
73
|
You can find package information and changelog for the provider
|
|
74
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.
|
|
74
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.0/>`_.
|
|
75
75
|
|
|
76
76
|
Installation
|
|
77
77
|
------------
|
|
@@ -95,7 +95,7 @@ Cross provider package dependencies
|
|
|
95
95
|
-----------------------------------
|
|
96
96
|
|
|
97
97
|
Those are dependencies that might be needed in order to use all the features of the package.
|
|
98
|
-
You need to install the specified
|
|
98
|
+
You need to install the specified providers in order to use them.
|
|
99
99
|
|
|
100
100
|
You can install such cross-provider dependencies when installing from PyPI. For example:
|
|
101
101
|
|
|
@@ -112,5 +112,5 @@ Dependent package
|
|
|
112
112
|
============================================================================================================== ===============
|
|
113
113
|
|
|
114
114
|
The changelog for the provider package can be found in the
|
|
115
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.
|
|
115
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.0/changelog.html>`_.
|
|
116
116
|
|
|
@@ -23,7 +23,7 @@
|
|
|
23
23
|
|
|
24
24
|
Package ``apache-airflow-providers-common-compat``
|
|
25
25
|
|
|
26
|
-
Release: ``1.6.
|
|
26
|
+
Release: ``1.6.0``
|
|
27
27
|
|
|
28
28
|
|
|
29
29
|
``Common Compatibility Provider - providing compatibility code for previous Airflow versions.``
|
|
@@ -36,7 +36,7 @@ This is a provider package for ``common.compat`` provider. All classes for this
|
|
|
36
36
|
are in ``airflow.providers.common.compat`` python package.
|
|
37
37
|
|
|
38
38
|
You can find package information and changelog for the provider
|
|
39
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.
|
|
39
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.0/>`_.
|
|
40
40
|
|
|
41
41
|
Installation
|
|
42
42
|
------------
|
|
@@ -60,7 +60,7 @@ Cross provider package dependencies
|
|
|
60
60
|
-----------------------------------
|
|
61
61
|
|
|
62
62
|
Those are dependencies that might be needed in order to use all the features of the package.
|
|
63
|
-
You need to install the specified
|
|
63
|
+
You need to install the specified providers in order to use them.
|
|
64
64
|
|
|
65
65
|
You can install such cross-provider dependencies when installing from PyPI. For example:
|
|
66
66
|
|
|
@@ -77,4 +77,4 @@ Dependent package
|
|
|
77
77
|
============================================================================================================== ===============
|
|
78
78
|
|
|
79
79
|
The changelog for the provider package can be found in the
|
|
80
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.
|
|
80
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.0/changelog.html>`_.
|
|
@@ -20,12 +20,12 @@
|
|
|
20
20
|
# IF YOU WANT TO MODIFY THIS FILE EXCEPT DEPENDENCIES, YOU SHOULD MODIFY THE TEMPLATE
|
|
21
21
|
# `pyproject_TEMPLATE.toml.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
|
|
22
22
|
[build-system]
|
|
23
|
-
requires = ["flit_core==3.
|
|
23
|
+
requires = ["flit_core==3.12.0"]
|
|
24
24
|
build-backend = "flit_core.buildapi"
|
|
25
25
|
|
|
26
26
|
[project]
|
|
27
27
|
name = "apache-airflow-providers-common-compat"
|
|
28
|
-
version = "1.6.
|
|
28
|
+
version = "1.6.0.rc1"
|
|
29
29
|
description = "Provider package apache-airflow-providers-common-compat for Apache Airflow"
|
|
30
30
|
readme = "README.rst"
|
|
31
31
|
authors = [
|
|
@@ -57,7 +57,7 @@ requires-python = "~=3.9"
|
|
|
57
57
|
# Make sure to run ``breeze static-checks --type update-providers-dependencies --all-files``
|
|
58
58
|
# After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
|
|
59
59
|
dependencies = [
|
|
60
|
-
"apache-airflow>=2.9.
|
|
60
|
+
"apache-airflow>=2.9.0rc0",
|
|
61
61
|
]
|
|
62
62
|
|
|
63
63
|
# The optional dependencies should be modified in place in the generated file
|
|
@@ -91,8 +91,8 @@ apache-airflow-providers-fab = {workspace = true}
|
|
|
91
91
|
apache-airflow-providers-standard = {workspace = true}
|
|
92
92
|
|
|
93
93
|
[project.urls]
|
|
94
|
-
"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.
|
|
95
|
-
"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.
|
|
94
|
+
"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.0"
|
|
95
|
+
"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.0/changelog.html"
|
|
96
96
|
"Bug Tracker" = "https://github.com/apache/airflow/issues"
|
|
97
97
|
"Source Code" = "https://github.com/apache/airflow"
|
|
98
98
|
"Slack Chat" = "https://s.apache.org/airflow-slack"
|
|
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "1.6.
|
|
32
|
+
__version__ = "1.6.0"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
35
|
"2.9.0"
|
|
@@ -28,7 +28,7 @@ def get_provider_info():
|
|
|
28
28
|
"description": "``Common Compatibility Provider - providing compatibility code for previous Airflow versions.``\n",
|
|
29
29
|
"state": "ready",
|
|
30
30
|
"source-date-epoch": 1742480214,
|
|
31
|
-
"versions": ["1.6.
|
|
31
|
+
"versions": ["1.6.0", "1.5.1", "1.5.0", "1.3.0", "1.2.2", "1.2.1", "1.2.0", "1.1.0", "1.0.0"],
|
|
32
32
|
"integrations": [
|
|
33
33
|
{
|
|
34
34
|
"integration-name": "Common Compat",
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
|
|
18
|
+
from __future__ import annotations
|
|
19
|
+
|
|
20
|
+
from typing import TYPE_CHECKING
|
|
21
|
+
|
|
22
|
+
from airflow.providers.common.compat.version_compat import AIRFLOW_V_3_0_PLUS
|
|
23
|
+
|
|
24
|
+
if TYPE_CHECKING:
|
|
25
|
+
from airflow.sdk.definitions.notifier import BaseNotifier
|
|
26
|
+
elif AIRFLOW_V_3_0_PLUS:
|
|
27
|
+
from airflow.sdk.definitions.notifier import BaseNotifier
|
|
28
|
+
else:
|
|
29
|
+
from airflow.notifications.basenotifier import BaseNotifier
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
__all__ = ["BaseNotifier"]
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
|
|
18
|
+
from __future__ import annotations
|
|
19
|
+
|
|
20
|
+
import logging
|
|
21
|
+
from typing import TYPE_CHECKING
|
|
22
|
+
|
|
23
|
+
log = logging.getLogger(__name__)
|
|
24
|
+
|
|
25
|
+
if TYPE_CHECKING:
|
|
26
|
+
from airflow.providers.openlineage.utils.spark import (
|
|
27
|
+
inject_parent_job_information_into_spark_properties,
|
|
28
|
+
inject_transport_information_into_spark_properties,
|
|
29
|
+
)
|
|
30
|
+
else:
|
|
31
|
+
try:
|
|
32
|
+
from airflow.providers.openlineage.utils.spark import (
|
|
33
|
+
inject_parent_job_information_into_spark_properties,
|
|
34
|
+
inject_transport_information_into_spark_properties,
|
|
35
|
+
)
|
|
36
|
+
except ImportError:
|
|
37
|
+
|
|
38
|
+
def inject_parent_job_information_into_spark_properties(properties: dict, context) -> dict:
|
|
39
|
+
log.warning(
|
|
40
|
+
"Could not import `airflow.providers.openlineage.plugins.macros`."
|
|
41
|
+
"Skipping the injection of OpenLineage parent job information into Spark properties."
|
|
42
|
+
)
|
|
43
|
+
return properties
|
|
44
|
+
|
|
45
|
+
def inject_transport_information_into_spark_properties(properties: dict, context) -> dict:
|
|
46
|
+
log.warning(
|
|
47
|
+
"Could not import `airflow.providers.openlineage.plugins.listener`."
|
|
48
|
+
"Skipping the injection of OpenLineage transport information into Spark properties."
|
|
49
|
+
)
|
|
50
|
+
return properties
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
__all__ = [
|
|
54
|
+
"inject_parent_job_information_into_spark_properties",
|
|
55
|
+
"inject_transport_information_into_spark_properties",
|
|
56
|
+
]
|
|
@@ -1,130 +0,0 @@
|
|
|
1
|
-
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
-
# or more contributor license agreements. See the NOTICE file
|
|
3
|
-
# distributed with this work for additional information
|
|
4
|
-
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
-
# to you under the Apache License, Version 2.0 (the
|
|
6
|
-
# "License"); you may not use this file except in compliance
|
|
7
|
-
# with the License. You may obtain a copy of the License at
|
|
8
|
-
#
|
|
9
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
-
#
|
|
11
|
-
# Unless required by applicable law or agreed to in writing,
|
|
12
|
-
# software distributed under the License is distributed on an
|
|
13
|
-
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
-
# KIND, either express or implied. See the License for the
|
|
15
|
-
# specific language governing permissions and limitations
|
|
16
|
-
# under the License.
|
|
17
|
-
|
|
18
|
-
from __future__ import annotations
|
|
19
|
-
|
|
20
|
-
import logging
|
|
21
|
-
from typing import TYPE_CHECKING
|
|
22
|
-
|
|
23
|
-
log = logging.getLogger(__name__)
|
|
24
|
-
|
|
25
|
-
if TYPE_CHECKING:
|
|
26
|
-
from airflow.providers.openlineage.utils.spark import (
|
|
27
|
-
inject_parent_job_information_into_spark_properties,
|
|
28
|
-
inject_transport_information_into_spark_properties,
|
|
29
|
-
)
|
|
30
|
-
else:
|
|
31
|
-
try:
|
|
32
|
-
from airflow.providers.openlineage.utils.spark import (
|
|
33
|
-
inject_parent_job_information_into_spark_properties,
|
|
34
|
-
inject_transport_information_into_spark_properties,
|
|
35
|
-
)
|
|
36
|
-
except ImportError:
|
|
37
|
-
try:
|
|
38
|
-
from airflow.providers.openlineage.plugins.macros import (
|
|
39
|
-
lineage_job_name,
|
|
40
|
-
lineage_job_namespace,
|
|
41
|
-
lineage_run_id,
|
|
42
|
-
)
|
|
43
|
-
except ImportError:
|
|
44
|
-
|
|
45
|
-
def inject_parent_job_information_into_spark_properties(properties: dict, context) -> dict:
|
|
46
|
-
log.warning(
|
|
47
|
-
"Could not import `airflow.providers.openlineage.plugins.macros`."
|
|
48
|
-
"Skipping the injection of OpenLineage parent job information into Spark properties."
|
|
49
|
-
)
|
|
50
|
-
return properties
|
|
51
|
-
|
|
52
|
-
else:
|
|
53
|
-
|
|
54
|
-
def inject_parent_job_information_into_spark_properties(properties: dict, context) -> dict:
|
|
55
|
-
if any(str(key).startswith("spark.openlineage.parent") for key in properties):
|
|
56
|
-
log.info(
|
|
57
|
-
"Some OpenLineage properties with parent job information are already present "
|
|
58
|
-
"in Spark properties. Skipping the injection of OpenLineage "
|
|
59
|
-
"parent job information into Spark properties."
|
|
60
|
-
)
|
|
61
|
-
return properties
|
|
62
|
-
|
|
63
|
-
ti = context["ti"]
|
|
64
|
-
ol_parent_job_properties = {
|
|
65
|
-
"spark.openlineage.parentJobNamespace": lineage_job_namespace(),
|
|
66
|
-
"spark.openlineage.parentJobName": lineage_job_name(ti),
|
|
67
|
-
"spark.openlineage.parentRunId": lineage_run_id(ti),
|
|
68
|
-
}
|
|
69
|
-
return {**properties, **ol_parent_job_properties}
|
|
70
|
-
|
|
71
|
-
try:
|
|
72
|
-
from airflow.providers.openlineage.plugins.listener import get_openlineage_listener
|
|
73
|
-
except ImportError:
|
|
74
|
-
|
|
75
|
-
def inject_transport_information_into_spark_properties(properties: dict, context) -> dict:
|
|
76
|
-
log.warning(
|
|
77
|
-
"Could not import `airflow.providers.openlineage.plugins.listener`."
|
|
78
|
-
"Skipping the injection of OpenLineage transport information into Spark properties."
|
|
79
|
-
)
|
|
80
|
-
return properties
|
|
81
|
-
|
|
82
|
-
else:
|
|
83
|
-
|
|
84
|
-
def inject_transport_information_into_spark_properties(properties: dict, context) -> dict:
|
|
85
|
-
if any(str(key).startswith("spark.openlineage.transport") for key in properties):
|
|
86
|
-
log.info(
|
|
87
|
-
"Some OpenLineage properties with transport information are already present "
|
|
88
|
-
"in Spark properties. Skipping the injection of OpenLineage "
|
|
89
|
-
"transport information into Spark properties."
|
|
90
|
-
)
|
|
91
|
-
return properties
|
|
92
|
-
|
|
93
|
-
transport = get_openlineage_listener().adapter.get_or_create_openlineage_client().transport
|
|
94
|
-
if transport.kind != "http":
|
|
95
|
-
log.info(
|
|
96
|
-
"OpenLineage transport type `%s` does not support automatic "
|
|
97
|
-
"injection of OpenLineage transport information into Spark properties.",
|
|
98
|
-
transport.kind,
|
|
99
|
-
)
|
|
100
|
-
return {}
|
|
101
|
-
|
|
102
|
-
transport_properties = {
|
|
103
|
-
"spark.openlineage.transport.type": "http",
|
|
104
|
-
"spark.openlineage.transport.url": transport.url,
|
|
105
|
-
"spark.openlineage.transport.endpoint": transport.endpoint,
|
|
106
|
-
# Timeout is converted to milliseconds, as required by Spark integration,
|
|
107
|
-
"spark.openlineage.transport.timeoutInMillis": str(int(transport.timeout * 1000)),
|
|
108
|
-
}
|
|
109
|
-
if transport.compression:
|
|
110
|
-
transport_properties["spark.openlineage.transport.compression"] = str(
|
|
111
|
-
transport.compression
|
|
112
|
-
)
|
|
113
|
-
|
|
114
|
-
if hasattr(transport.config.auth, "api_key") and transport.config.auth.get_bearer():
|
|
115
|
-
transport_properties["spark.openlineage.transport.auth.type"] = "api_key"
|
|
116
|
-
transport_properties["spark.openlineage.transport.auth.apiKey"] = (
|
|
117
|
-
transport.config.auth.get_bearer()
|
|
118
|
-
)
|
|
119
|
-
|
|
120
|
-
if hasattr(transport.config, "custom_headers") and transport.config.custom_headers:
|
|
121
|
-
for key, value in transport.config.custom_headers.items():
|
|
122
|
-
transport_properties[f"spark.openlineage.transport.headers.{key}"] = value
|
|
123
|
-
|
|
124
|
-
return {**properties, **transport_properties}
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
__all__ = [
|
|
128
|
-
"inject_parent_job_information_into_spark_properties",
|
|
129
|
-
"inject_transport_information_into_spark_properties",
|
|
130
|
-
]
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|