apache-airflow-providers-common-compat 1.6.0b1__py3-none-any.whl → 1.6.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-common-compat might be problematic. Click here for more details.

@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "1.6.0b1"
32
+ __version__ = "1.6.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.9.0"
@@ -28,7 +28,7 @@ def get_provider_info():
28
28
  "description": "``Common Compatibility Provider - providing compatibility code for previous Airflow versions.``\n",
29
29
  "state": "ready",
30
30
  "source-date-epoch": 1742480214,
31
- "versions": ["1.6.0b1", "1.5.1", "1.5.0", "1.3.0", "1.2.2", "1.2.1", "1.2.0", "1.1.0", "1.0.0"],
31
+ "versions": ["1.6.0", "1.5.1", "1.5.0", "1.3.0", "1.2.2", "1.2.1", "1.2.0", "1.1.0", "1.0.0"],
32
32
  "integrations": [
33
33
  {
34
34
  "integration-name": "Common Compat",
@@ -0,0 +1,32 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+
18
+ from __future__ import annotations
19
+
20
+ from typing import TYPE_CHECKING
21
+
22
+ from airflow.providers.common.compat.version_compat import AIRFLOW_V_3_0_PLUS
23
+
24
+ if TYPE_CHECKING:
25
+ from airflow.sdk.definitions.notifier import BaseNotifier
26
+ elif AIRFLOW_V_3_0_PLUS:
27
+ from airflow.sdk.definitions.notifier import BaseNotifier
28
+ else:
29
+ from airflow.notifications.basenotifier import BaseNotifier
30
+
31
+
32
+ __all__ = ["BaseNotifier"]
@@ -34,94 +34,20 @@ else:
34
34
  inject_transport_information_into_spark_properties,
35
35
  )
36
36
  except ImportError:
37
- try:
38
- from airflow.providers.openlineage.plugins.macros import (
39
- lineage_job_name,
40
- lineage_job_namespace,
41
- lineage_run_id,
42
- )
43
- except ImportError:
44
-
45
- def inject_parent_job_information_into_spark_properties(properties: dict, context) -> dict:
46
- log.warning(
47
- "Could not import `airflow.providers.openlineage.plugins.macros`."
48
- "Skipping the injection of OpenLineage parent job information into Spark properties."
49
- )
50
- return properties
51
-
52
- else:
53
-
54
- def inject_parent_job_information_into_spark_properties(properties: dict, context) -> dict:
55
- if any(str(key).startswith("spark.openlineage.parent") for key in properties):
56
- log.info(
57
- "Some OpenLineage properties with parent job information are already present "
58
- "in Spark properties. Skipping the injection of OpenLineage "
59
- "parent job information into Spark properties."
60
- )
61
- return properties
62
-
63
- ti = context["ti"]
64
- ol_parent_job_properties = {
65
- "spark.openlineage.parentJobNamespace": lineage_job_namespace(),
66
- "spark.openlineage.parentJobName": lineage_job_name(ti),
67
- "spark.openlineage.parentRunId": lineage_run_id(ti),
68
- }
69
- return {**properties, **ol_parent_job_properties}
70
-
71
- try:
72
- from airflow.providers.openlineage.plugins.listener import get_openlineage_listener
73
- except ImportError:
74
-
75
- def inject_transport_information_into_spark_properties(properties: dict, context) -> dict:
76
- log.warning(
77
- "Could not import `airflow.providers.openlineage.plugins.listener`."
78
- "Skipping the injection of OpenLineage transport information into Spark properties."
79
- )
80
- return properties
81
37
 
82
- else:
83
-
84
- def inject_transport_information_into_spark_properties(properties: dict, context) -> dict:
85
- if any(str(key).startswith("spark.openlineage.transport") for key in properties):
86
- log.info(
87
- "Some OpenLineage properties with transport information are already present "
88
- "in Spark properties. Skipping the injection of OpenLineage "
89
- "transport information into Spark properties."
90
- )
91
- return properties
92
-
93
- transport = get_openlineage_listener().adapter.get_or_create_openlineage_client().transport
94
- if transport.kind != "http":
95
- log.info(
96
- "OpenLineage transport type `%s` does not support automatic "
97
- "injection of OpenLineage transport information into Spark properties.",
98
- transport.kind,
99
- )
100
- return {}
101
-
102
- transport_properties = {
103
- "spark.openlineage.transport.type": "http",
104
- "spark.openlineage.transport.url": transport.url,
105
- "spark.openlineage.transport.endpoint": transport.endpoint,
106
- # Timeout is converted to milliseconds, as required by Spark integration,
107
- "spark.openlineage.transport.timeoutInMillis": str(int(transport.timeout * 1000)),
108
- }
109
- if transport.compression:
110
- transport_properties["spark.openlineage.transport.compression"] = str(
111
- transport.compression
112
- )
113
-
114
- if hasattr(transport.config.auth, "api_key") and transport.config.auth.get_bearer():
115
- transport_properties["spark.openlineage.transport.auth.type"] = "api_key"
116
- transport_properties["spark.openlineage.transport.auth.apiKey"] = (
117
- transport.config.auth.get_bearer()
118
- )
119
-
120
- if hasattr(transport.config, "custom_headers") and transport.config.custom_headers:
121
- for key, value in transport.config.custom_headers.items():
122
- transport_properties[f"spark.openlineage.transport.headers.{key}"] = value
38
+ def inject_parent_job_information_into_spark_properties(properties: dict, context) -> dict:
39
+ log.warning(
40
+ "Could not import `airflow.providers.openlineage.plugins.macros`."
41
+ "Skipping the injection of OpenLineage parent job information into Spark properties."
42
+ )
43
+ return properties
123
44
 
124
- return {**properties, **transport_properties}
45
+ def inject_transport_information_into_spark_properties(properties: dict, context) -> dict:
46
+ log.warning(
47
+ "Could not import `airflow.providers.openlineage.plugins.listener`."
48
+ "Skipping the injection of OpenLineage transport information into Spark properties."
49
+ )
50
+ return properties
125
51
 
126
52
 
127
53
  __all__ = [
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-common-compat
3
- Version: 1.6.0b1
3
+ Version: 1.6.0rc1
4
4
  Summary: Provider package apache-airflow-providers-common-compat for Apache Airflow
5
5
  Keywords: airflow-provider,common.compat,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,12 +20,12 @@ Classifier: Programming Language :: Python :: 3.10
20
20
  Classifier: Programming Language :: Python :: 3.11
21
21
  Classifier: Programming Language :: Python :: 3.12
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.9.0
23
+ Requires-Dist: apache-airflow>=2.9.0rc0
24
24
  Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
25
25
  Requires-Dist: apache-airflow-providers-standard ; extra == "standard"
26
26
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
27
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.0b1/changelog.html
28
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.0b1
27
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.0/changelog.html
28
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.0
29
29
  Project-URL: Mastodon, https://fosstodon.org/@airflow
30
30
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
31
31
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -58,7 +58,7 @@ Provides-Extra: standard
58
58
 
59
59
  Package ``apache-airflow-providers-common-compat``
60
60
 
61
- Release: ``1.6.0b1``
61
+ Release: ``1.6.0``
62
62
 
63
63
 
64
64
  ``Common Compatibility Provider - providing compatibility code for previous Airflow versions.``
@@ -71,7 +71,7 @@ This is a provider package for ``common.compat`` provider. All classes for this
71
71
  are in ``airflow.providers.common.compat`` python package.
72
72
 
73
73
  You can find package information and changelog for the provider
74
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.0b1/>`_.
74
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.0/>`_.
75
75
 
76
76
  Installation
77
77
  ------------
@@ -95,7 +95,7 @@ Cross provider package dependencies
95
95
  -----------------------------------
96
96
 
97
97
  Those are dependencies that might be needed in order to use all the features of the package.
98
- You need to install the specified provider packages in order to use them.
98
+ You need to install the specified providers in order to use them.
99
99
 
100
100
  You can install such cross-provider dependencies when installing from PyPI. For example:
101
101
 
@@ -112,5 +112,5 @@ Dependent package
112
112
  ============================================================================================================== ===============
113
113
 
114
114
  The changelog for the provider package can be found in the
115
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.0b1/changelog.html>`_.
115
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.6.0/changelog.html>`_.
116
116
 
@@ -1,17 +1,18 @@
1
1
  airflow/providers/common/compat/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/common/compat/__init__.py,sha256=sysV2TNsvQETL5H5doqH9-9iUlzC9OvfBX9eqwRw29g,1502
2
+ airflow/providers/common/compat/__init__.py,sha256=sPpVS_aBAUBxD6wvGKKkFAm-f1i1GTWPIwgaB6a1wSQ,1500
3
3
  airflow/providers/common/compat/check.py,sha256=d6at8iFn_c2jbnmvswoMYz1DFUrAbQTVKMCA5PYAOrQ,4347
4
- airflow/providers/common/compat/get_provider_info.py,sha256=4DGFXh_Yb0imhgcsXvNq3E6FnmFI3lmlxSsroH49OUU,1995
4
+ airflow/providers/common/compat/get_provider_info.py,sha256=5vjEVk6xFG0bR4KqSmPY86TeXDeVrnFcwbn_NjC5pyo,1993
5
5
  airflow/providers/common/compat/version_compat.py,sha256=aHg90_DtgoSnQvILFICexMyNlHlALBdaeWqkX3dFDug,1605
6
6
  airflow/providers/common/compat/assets/__init__.py,sha256=73zbqrAVBepkL83xpjRjzLQF7LzI-uA-3pBu5n7-g80,2202
7
7
  airflow/providers/common/compat/lineage/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
8
8
  airflow/providers/common/compat/lineage/entities.py,sha256=otob45spY8YSa5fMx3XXg-JWNZttS8g4WrkvYte2HFY,2651
9
9
  airflow/providers/common/compat/lineage/hook.py,sha256=0ujOAFXc3U0zqAJfKVgSRAHCa6lKwlNd7yMacUyNdD8,3977
10
+ airflow/providers/common/compat/notifier/__init__.py,sha256=hZlFYmkQ4_g0rNymigAszxM7egmG1RNlAR8zvUuszew,1201
10
11
  airflow/providers/common/compat/openlineage/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
11
12
  airflow/providers/common/compat/openlineage/check.py,sha256=jcrz1xgqsUd9TCF564U_JGvU8Vyrss7qpiMaQz_ZIqA,4672
12
13
  airflow/providers/common/compat/openlineage/facet.py,sha256=_3YHsRP39vx6FPePFQCLjNHP9kzisNH5v97o-PWzwLk,6529
13
14
  airflow/providers/common/compat/openlineage/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
14
- airflow/providers/common/compat/openlineage/utils/spark.py,sha256=zyvuWFLye_6xDmC4PDQ9NKi0muH-QtqfsqRIHNjxzRQ,5983
15
+ airflow/providers/common/compat/openlineage/utils/spark.py,sha256=-6ztQ_yphcXyS34WHXGb9lEvN1uR_-Gk0bo0_o0F8Co,2196
15
16
  airflow/providers/common/compat/openlineage/utils/sql.py,sha256=bQdfjBgkHNt0cFKdA2BM7myapb_WVM_F2RkVoPZVfoU,3286
16
17
  airflow/providers/common/compat/openlineage/utils/utils.py,sha256=KB_1emJIEeYxPG7YscYHmIEDIx9VNmUPtDFz81UuhQk,1617
17
18
  airflow/providers/common/compat/security/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -20,7 +21,7 @@ airflow/providers/common/compat/standard/__init__.py,sha256=9hdXHABrVpkbpjZgUft3
20
21
  airflow/providers/common/compat/standard/operators.py,sha256=1jsiu8Cc6S2Xhhfo2Rh5PqJCnQ1CBH_UW5q7s3Y0Mpc,1712
21
22
  airflow/providers/common/compat/standard/triggers.py,sha256=XBi9KE-cAY_TlIyKVqr1-EU7xZNqgCVTugXZObV3gaQ,1177
22
23
  airflow/providers/common/compat/standard/utils.py,sha256=SvTXThqYRKVfmRRpq-D5eNcxQjY9TESrTyl3n670QM8,1289
23
- apache_airflow_providers_common_compat-1.6.0b1.dist-info/entry_points.txt,sha256=OdOClAuY8E82VvA-Zo6narFujtXdGihHKZH2HfmlPIo,109
24
- apache_airflow_providers_common_compat-1.6.0b1.dist-info/WHEEL,sha256=_2ozNFCLWc93bK4WKHCO-eDUENDlo-dgc9cU3qokYO4,82
25
- apache_airflow_providers_common_compat-1.6.0b1.dist-info/METADATA,sha256=F5-kvFY2qiIEY4AxB6dq_V_1YAKRFvgZXNfA29MaouA,5303
26
- apache_airflow_providers_common_compat-1.6.0b1.dist-info/RECORD,,
24
+ apache_airflow_providers_common_compat-1.6.0rc1.dist-info/entry_points.txt,sha256=OdOClAuY8E82VvA-Zo6narFujtXdGihHKZH2HfmlPIo,109
25
+ apache_airflow_providers_common_compat-1.6.0rc1.dist-info/WHEEL,sha256=_2ozNFCLWc93bK4WKHCO-eDUENDlo-dgc9cU3qokYO4,82
26
+ apache_airflow_providers_common_compat-1.6.0rc1.dist-info/METADATA,sha256=ZbLzMISU7z_x3EkMKhGWNaEJzkjhvasK7xZ5Fgp96bg,5289
27
+ apache_airflow_providers_common_compat-1.6.0rc1.dist-info/RECORD,,