apache-airflow-providers-common-compat 1.9.0rc1__py3-none-any.whl → 1.11.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/common/compat/__init__.py +3 -3
- airflow/providers/common/compat/lineage/hook.py +176 -22
- airflow/providers/common/compat/module_loading/__init__.py +49 -0
- airflow/providers/common/compat/sdk.py +60 -0
- airflow/providers/common/compat/version_compat.py +4 -7
- {apache_airflow_providers_common_compat-1.9.0rc1.dist-info → apache_airflow_providers_common_compat-1.11.0rc1.dist-info}/METADATA +8 -8
- {apache_airflow_providers_common_compat-1.9.0rc1.dist-info → apache_airflow_providers_common_compat-1.11.0rc1.dist-info}/RECORD +11 -10
- {apache_airflow_providers_common_compat-1.9.0rc1.dist-info → apache_airflow_providers_common_compat-1.11.0rc1.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_common_compat-1.9.0rc1.dist-info → apache_airflow_providers_common_compat-1.11.0rc1.dist-info}/entry_points.txt +0 -0
- {apache_airflow_providers_common_compat-1.9.0rc1.dist-info → apache_airflow_providers_common_compat-1.11.0rc1.dist-info}/licenses/LICENSE +0 -0
- {apache_airflow_providers_common_compat-1.9.0rc1.dist-info → apache_airflow_providers_common_compat-1.11.0rc1.dist-info}/licenses/NOTICE +0 -0
|
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "1.
|
|
32
|
+
__version__ = "1.11.0"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
|
-
"2.
|
|
35
|
+
"2.11.0"
|
|
36
36
|
):
|
|
37
37
|
raise RuntimeError(
|
|
38
|
-
f"The package `apache-airflow-providers-common-compat:{__version__}` needs Apache Airflow 2.
|
|
38
|
+
f"The package `apache-airflow-providers-common-compat:{__version__}` needs Apache Airflow 2.11.0+"
|
|
39
39
|
)
|
|
@@ -16,22 +16,166 @@
|
|
|
16
16
|
# under the License.
|
|
17
17
|
from __future__ import annotations
|
|
18
18
|
|
|
19
|
-
from
|
|
19
|
+
from typing import TYPE_CHECKING
|
|
20
20
|
|
|
21
|
+
if TYPE_CHECKING:
|
|
22
|
+
from typing import Any
|
|
21
23
|
|
|
22
|
-
|
|
23
|
-
from airflow.lineage.hook import get_hook_lineage_collector
|
|
24
|
+
from airflow.lineage.hook import LineageContext
|
|
24
25
|
|
|
25
|
-
collector = get_hook_lineage_collector()
|
|
26
26
|
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
27
|
+
def _lacks_asset_methods(collector):
|
|
28
|
+
"""Return True if the collector is missing any asset-related methods or properties."""
|
|
29
|
+
if ( # lazy evaluation, early return
|
|
30
|
+
hasattr(collector, "add_input_asset") # method
|
|
31
|
+
and hasattr(collector, "add_output_asset") # method
|
|
32
|
+
and hasattr(collector, "create_asset") # method
|
|
33
|
+
# If below we called hasattr(collector, "collected_assets") we'd call the property unnecessarily
|
|
34
|
+
and hasattr(type(collector), "collected_assets") # property
|
|
30
35
|
):
|
|
31
|
-
return
|
|
36
|
+
return False
|
|
32
37
|
|
|
33
|
-
|
|
38
|
+
return True
|
|
34
39
|
|
|
40
|
+
|
|
41
|
+
def _lacks_add_extra_method(collector):
|
|
42
|
+
"""Return True if the collector does not define an 'add_extra' method."""
|
|
43
|
+
# Method may be on class and attribute may be dynamically set on instance
|
|
44
|
+
if hasattr(collector, "add_extra") and hasattr(collector, "_extra"):
|
|
45
|
+
return False
|
|
46
|
+
return True
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def _add_extra_polyfill(collector):
|
|
50
|
+
"""
|
|
51
|
+
Add support for `add_extra` method to collector that may be lacking it (e.g., Airflow versions < 3.2.).
|
|
52
|
+
|
|
53
|
+
This polyfill adds the `add_extra` method to a class, modifies `collected_assets` and `has_collected`
|
|
54
|
+
properties and sets `_extra` and `_extra_counts` attributes on instance if not already there.
|
|
55
|
+
|
|
56
|
+
This function should be called after renaming on collectors that have `collected_assets` method,
|
|
57
|
+
so f.e. for Airflow 2 it should happen after renaming from dataset to asset.
|
|
58
|
+
"""
|
|
59
|
+
import hashlib
|
|
60
|
+
import json
|
|
61
|
+
from collections import defaultdict
|
|
62
|
+
|
|
63
|
+
import attr
|
|
64
|
+
|
|
65
|
+
from airflow.lineage.hook import HookLineage as _BaseHookLineage
|
|
66
|
+
|
|
67
|
+
# Add `extra` to HookLineage returned by `collected_assets` property
|
|
68
|
+
@attr.define
|
|
69
|
+
class ExtraLineageInfo:
|
|
70
|
+
"""
|
|
71
|
+
Holds lineage information for arbitrary non-asset metadata.
|
|
72
|
+
|
|
73
|
+
This class represents additional lineage context captured during a hook execution that is not
|
|
74
|
+
associated with a specific asset. It includes the metadata payload itself, the count of
|
|
75
|
+
how many times it has been encountered, and the context in which it was encountered.
|
|
76
|
+
"""
|
|
77
|
+
|
|
78
|
+
key: str
|
|
79
|
+
value: Any
|
|
80
|
+
count: int
|
|
81
|
+
context: LineageContext
|
|
82
|
+
|
|
83
|
+
@attr.define
|
|
84
|
+
class HookLineage(_BaseHookLineage):
|
|
85
|
+
# mypy is not happy, as base class is using other ExtraLineageInfo, but this code will never
|
|
86
|
+
# run on AF3.2, where this other one is used, so this is fine - we can ignore.
|
|
87
|
+
extra: list[ExtraLineageInfo] = attr.field(factory=list) # type: ignore[assignment]
|
|
88
|
+
|
|
89
|
+
# Initialize extra tracking attributes on this collector instance
|
|
90
|
+
collector._extra = {}
|
|
91
|
+
collector._extra_counts = defaultdict(int)
|
|
92
|
+
|
|
93
|
+
# Overwrite the `collected_assets` property on a class
|
|
94
|
+
_original_collected_assets = collector.__class__.collected_assets
|
|
95
|
+
|
|
96
|
+
def _compat_collected_assets(self) -> HookLineage:
|
|
97
|
+
"""Get the collected hook lineage information."""
|
|
98
|
+
# Defensive check since we patch the class property, but initialized _extra only on this instance.
|
|
99
|
+
if not hasattr(self, "_extra"):
|
|
100
|
+
self._extra = {}
|
|
101
|
+
if not hasattr(self, "_extra_counts"):
|
|
102
|
+
self._extra_counts = defaultdict(int)
|
|
103
|
+
|
|
104
|
+
# call the original `collected_assets` getter
|
|
105
|
+
lineage = _original_collected_assets.fget(self)
|
|
106
|
+
extra_list = [
|
|
107
|
+
ExtraLineageInfo(
|
|
108
|
+
key=key,
|
|
109
|
+
value=value,
|
|
110
|
+
count=self._extra_counts[count_key],
|
|
111
|
+
context=context,
|
|
112
|
+
)
|
|
113
|
+
for count_key, (key, value, context) in self._extra.items()
|
|
114
|
+
]
|
|
115
|
+
return HookLineage(
|
|
116
|
+
inputs=lineage.inputs,
|
|
117
|
+
outputs=lineage.outputs,
|
|
118
|
+
extra=extra_list,
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
type(collector).collected_assets = property(_compat_collected_assets)
|
|
122
|
+
|
|
123
|
+
# Overwrite the `has_collected` property on a class
|
|
124
|
+
_original_has_collected = collector.__class__.has_collected
|
|
125
|
+
|
|
126
|
+
def _compat_has_collected(self) -> bool:
|
|
127
|
+
# Defensive check since we patch the class property, but initialized _extra only on this instance.
|
|
128
|
+
if not hasattr(self, "_extra"):
|
|
129
|
+
self._extra = {}
|
|
130
|
+
# call the original `has_collected` getter
|
|
131
|
+
has_collected = _original_has_collected.fget(self)
|
|
132
|
+
return bool(has_collected or self._extra)
|
|
133
|
+
|
|
134
|
+
type(collector).has_collected = property(_compat_has_collected)
|
|
135
|
+
|
|
136
|
+
# Add `add_extra` method on the class
|
|
137
|
+
def _compat_add_extra(self, context, key, value):
|
|
138
|
+
"""Add extra information for older Airflow versions."""
|
|
139
|
+
_max_collected_extra = 200
|
|
140
|
+
|
|
141
|
+
if len(self._extra) >= _max_collected_extra:
|
|
142
|
+
if hasattr(self, "log"):
|
|
143
|
+
self.log.debug("Maximum number of extra exceeded. Skipping.")
|
|
144
|
+
return
|
|
145
|
+
|
|
146
|
+
if not key or not value:
|
|
147
|
+
if hasattr(self, "log"):
|
|
148
|
+
self.log.debug("Missing required parameter: both 'key' and 'value' must be provided.")
|
|
149
|
+
return
|
|
150
|
+
|
|
151
|
+
# Defensive check since we patch the class property, but initialized _extra only on this instance.
|
|
152
|
+
if not hasattr(self, "_extra"):
|
|
153
|
+
self._extra = {}
|
|
154
|
+
if not hasattr(self, "_extra_counts"):
|
|
155
|
+
self._extra_counts = defaultdict(int)
|
|
156
|
+
|
|
157
|
+
extra_str = json.dumps(value, sort_keys=True, default=str)
|
|
158
|
+
value_hash = hashlib.md5(extra_str.encode()).hexdigest()
|
|
159
|
+
entry_id = f"{key}_{value_hash}_{id(context)}"
|
|
160
|
+
if entry_id not in self._extra:
|
|
161
|
+
self._extra[entry_id] = (key, value, context)
|
|
162
|
+
self._extra_counts[entry_id] += 1
|
|
163
|
+
|
|
164
|
+
if len(self._extra) == _max_collected_extra:
|
|
165
|
+
if hasattr(self, "log"):
|
|
166
|
+
self.log.warning("Maximum number of extra exceeded. Skipping subsequent inputs.")
|
|
167
|
+
|
|
168
|
+
type(collector).add_extra = _compat_add_extra
|
|
169
|
+
return collector
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def _add_asset_naming_compatibility_layer(collector):
|
|
173
|
+
"""
|
|
174
|
+
Handle AF 2.x compatibility for dataset -> asset terminology rename.
|
|
175
|
+
|
|
176
|
+
This is only called for AF 2.x where we need to provide asset-named methods
|
|
177
|
+
that wrap the underlying dataset methods.
|
|
178
|
+
"""
|
|
35
179
|
from functools import wraps
|
|
36
180
|
|
|
37
181
|
from airflow.lineage.hook import DatasetLineageInfo, HookLineage
|
|
@@ -55,9 +199,9 @@ def _get_asset_compat_hook_lineage_collector():
|
|
|
55
199
|
collector.add_input_asset = rename_asset_kwargs_to_dataset_kwargs(collector.add_input_dataset)
|
|
56
200
|
collector.add_output_asset = rename_asset_kwargs_to_dataset_kwargs(collector.add_output_dataset)
|
|
57
201
|
|
|
58
|
-
def
|
|
202
|
+
def _compat_collected_assets(self) -> HookLineage:
|
|
59
203
|
"""Get the collected hook lineage information."""
|
|
60
|
-
lineage =
|
|
204
|
+
lineage = self.collected_datasets
|
|
61
205
|
return HookLineage(
|
|
62
206
|
[
|
|
63
207
|
DatasetLineageInfo(dataset=item.dataset, count=item.count, context=item.context)
|
|
@@ -69,20 +213,30 @@ def _get_asset_compat_hook_lineage_collector():
|
|
|
69
213
|
],
|
|
70
214
|
)
|
|
71
215
|
|
|
72
|
-
|
|
73
|
-
collector.__class__,
|
|
74
|
-
"collected_assets",
|
|
75
|
-
property(lambda collector: collected_assets_compat(collector)),
|
|
76
|
-
)
|
|
77
|
-
|
|
216
|
+
type(collector).collected_assets = property(_compat_collected_assets)
|
|
78
217
|
return collector
|
|
79
218
|
|
|
80
219
|
|
|
81
220
|
def get_hook_lineage_collector():
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
221
|
+
"""
|
|
222
|
+
Return a hook lineage collector with all required compatibility layers applied.
|
|
223
|
+
|
|
224
|
+
Compatibility is determined by inspecting the collector's available methods and
|
|
225
|
+
properties (duck typing), rather than relying on the Airflow version number.
|
|
226
|
+
|
|
227
|
+
Behavior by example:
|
|
228
|
+
Airflow 2: Collector is missing asset-based methods and `add_extra` - apply both layers.
|
|
229
|
+
Airflow 3.0–3.1: Collector has asset-based methods but lacks `add_extra` - apply single layer.
|
|
230
|
+
Airflow 3.2+: Collector has asset-based methods and `add_extra` support - no action required.
|
|
231
|
+
"""
|
|
232
|
+
from airflow.lineage.hook import get_hook_lineage_collector as get_global_collector
|
|
233
|
+
|
|
234
|
+
global_collector = get_global_collector()
|
|
235
|
+
|
|
236
|
+
if _lacks_asset_methods(global_collector):
|
|
237
|
+
global_collector = _add_asset_naming_compatibility_layer(global_collector)
|
|
85
238
|
|
|
86
|
-
|
|
239
|
+
if _lacks_add_extra_method(global_collector):
|
|
240
|
+
global_collector = _add_extra_polyfill(global_collector)
|
|
87
241
|
|
|
88
|
-
return
|
|
242
|
+
return global_collector
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
try:
|
|
20
|
+
from airflow.utils.module_loading import (
|
|
21
|
+
import_string,
|
|
22
|
+
iter_namespace,
|
|
23
|
+
qualname,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
except ImportError:
|
|
27
|
+
from airflow.sdk.module_loading import import_string, iter_namespace, qualname
|
|
28
|
+
|
|
29
|
+
try:
|
|
30
|
+
# This function was not available in Airflow 3.0/3.1 in module_loading, but it's good to keep it in the
|
|
31
|
+
# same shared module - good for reuse
|
|
32
|
+
from airflow.sdk._shared.module_loading import is_valid_dotpath
|
|
33
|
+
|
|
34
|
+
except ImportError:
|
|
35
|
+
# TODO: Remove it when Airflow 3.2.0 is the minimum version
|
|
36
|
+
def is_valid_dotpath(path: str) -> bool:
|
|
37
|
+
import re
|
|
38
|
+
|
|
39
|
+
if not isinstance(path, str):
|
|
40
|
+
return False
|
|
41
|
+
pattern = r"^[a-zA-Z_][a-zA-Z0-9_]*(\.[a-zA-Z_][a-zA-Z0-9_]*)*$"
|
|
42
|
+
return bool(re.match(pattern, path))
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
__all__ = [
|
|
46
|
+
"import_string",
|
|
47
|
+
"qualname",
|
|
48
|
+
"iter_namespace",
|
|
49
|
+
]
|
|
@@ -25,6 +25,8 @@ from __future__ import annotations
|
|
|
25
25
|
|
|
26
26
|
from typing import TYPE_CHECKING
|
|
27
27
|
|
|
28
|
+
from airflow.providers.common.compat.version_compat import AIRFLOW_V_3_0_PLUS
|
|
29
|
+
|
|
28
30
|
if TYPE_CHECKING:
|
|
29
31
|
import airflow.sdk.io as io # noqa: F401
|
|
30
32
|
import airflow.sdk.timezone as timezone # noqa: F401
|
|
@@ -77,6 +79,26 @@ if TYPE_CHECKING:
|
|
|
77
79
|
from airflow.sdk.definitions.context import context_merge as context_merge
|
|
78
80
|
from airflow.sdk.definitions.mappedoperator import MappedOperator as MappedOperator
|
|
79
81
|
from airflow.sdk.definitions.template import literal as literal
|
|
82
|
+
from airflow.sdk.exceptions import (
|
|
83
|
+
AirflowException as AirflowException,
|
|
84
|
+
AirflowFailException as AirflowFailException,
|
|
85
|
+
AirflowNotFoundException as AirflowNotFoundException,
|
|
86
|
+
AirflowSensorTimeout as AirflowSensorTimeout,
|
|
87
|
+
AirflowSkipException as AirflowSkipException,
|
|
88
|
+
AirflowTaskTimeout as AirflowTaskTimeout,
|
|
89
|
+
ParamValidationError as ParamValidationError,
|
|
90
|
+
TaskDeferred as TaskDeferred,
|
|
91
|
+
XComNotFound as XComNotFound,
|
|
92
|
+
)
|
|
93
|
+
from airflow.sdk.log import redact as redact
|
|
94
|
+
from airflow.sdk.observability.stats import Stats as Stats
|
|
95
|
+
|
|
96
|
+
# Airflow 3-only exceptions (conditionally imported)
|
|
97
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
98
|
+
from airflow.sdk.exceptions import (
|
|
99
|
+
DagRunTriggerException as DagRunTriggerException,
|
|
100
|
+
DownstreamTasksSkipped as DownstreamTasksSkipped,
|
|
101
|
+
)
|
|
80
102
|
from airflow.sdk.execution_time.context import (
|
|
81
103
|
AIRFLOW_VAR_NAME_FORMAT_MAPPING as AIRFLOW_VAR_NAME_FORMAT_MAPPING,
|
|
82
104
|
context_to_airflow_vars as context_to_airflow_vars,
|
|
@@ -84,6 +106,7 @@ if TYPE_CHECKING:
|
|
|
84
106
|
from airflow.sdk.execution_time.timeout import timeout as timeout
|
|
85
107
|
from airflow.sdk.execution_time.xcom import XCom as XCom
|
|
86
108
|
|
|
109
|
+
|
|
87
110
|
from airflow.providers.common.compat._compat_utils import create_module_getattr
|
|
88
111
|
|
|
89
112
|
# Rename map for classes that changed names between Airflow 2.x and 3.x
|
|
@@ -199,8 +222,45 @@ _IMPORT_MAP: dict[str, str | tuple[str, ...]] = {
|
|
|
199
222
|
# XCom & Task Communication
|
|
200
223
|
# ============================================================================
|
|
201
224
|
"XCOM_RETURN_KEY": "airflow.models.xcom",
|
|
225
|
+
# ============================================================================
|
|
226
|
+
# Exceptions (deprecated in airflow.exceptions, prefer SDK)
|
|
227
|
+
# ============================================================================
|
|
228
|
+
# Note: AirflowException and AirflowNotFoundException are not deprecated, but exposing them
|
|
229
|
+
# here keeps provider imports consistent across Airflow 2 and 3.
|
|
230
|
+
"AirflowException": ("airflow.sdk.exceptions", "airflow.exceptions"),
|
|
231
|
+
"AirflowFailException": ("airflow.sdk.exceptions", "airflow.exceptions"),
|
|
232
|
+
"AirflowNotFoundException": ("airflow.sdk.exceptions", "airflow.exceptions"),
|
|
233
|
+
"AirflowSkipException": ("airflow.sdk.exceptions", "airflow.exceptions"),
|
|
234
|
+
"AirflowTaskTimeout": ("airflow.sdk.exceptions", "airflow.exceptions"),
|
|
235
|
+
"AirflowSensorTimeout": ("airflow.sdk.exceptions", "airflow.exceptions"),
|
|
236
|
+
"ParamValidationError": ("airflow.sdk.exceptions", "airflow.exceptions"),
|
|
237
|
+
"TaskDeferred": ("airflow.sdk.exceptions", "airflow.exceptions"),
|
|
238
|
+
"XComNotFound": ("airflow.sdk.exceptions", "airflow.exceptions"),
|
|
239
|
+
# ============================================================================
|
|
240
|
+
# Observability
|
|
241
|
+
# ============================================================================
|
|
242
|
+
"Stats": ("airflow.sdk.observability.stats", "airflow.stats"),
|
|
243
|
+
# ============================================================================
|
|
244
|
+
# Secrets Masking
|
|
245
|
+
# ============================================================================
|
|
246
|
+
"redact": (
|
|
247
|
+
"airflow.sdk.log",
|
|
248
|
+
"airflow.sdk._shared.secrets_masker",
|
|
249
|
+
"airflow.sdk.execution_time.secrets_masker",
|
|
250
|
+
"airflow.utils.log.secrets_masker",
|
|
251
|
+
),
|
|
202
252
|
}
|
|
203
253
|
|
|
254
|
+
# Airflow 3-only exceptions (not available in Airflow 2)
|
|
255
|
+
_AIRFLOW_3_ONLY_EXCEPTIONS: dict[str, tuple[str, ...]] = {
|
|
256
|
+
"DownstreamTasksSkipped": ("airflow.sdk.exceptions", "airflow.exceptions"),
|
|
257
|
+
"DagRunTriggerException": ("airflow.sdk.exceptions", "airflow.exceptions"),
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
# Add Airflow 3-only exceptions to _IMPORT_MAP if running Airflow 3+
|
|
261
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
262
|
+
_IMPORT_MAP.update(_AIRFLOW_3_ONLY_EXCEPTIONS)
|
|
263
|
+
|
|
204
264
|
# Module map: module_name -> module_path(s)
|
|
205
265
|
# For entire modules that have been moved (e.g., timezone)
|
|
206
266
|
# Usage: from airflow.providers.common.compat.lazy_compat import timezone
|
|
@@ -32,16 +32,13 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
|
|
|
32
32
|
return airflow_version.major, airflow_version.minor, airflow_version.micro
|
|
33
33
|
|
|
34
34
|
|
|
35
|
-
AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
|
|
36
|
-
AIRFLOW_V_3_1_PLUS = get_base_airflow_version_tuple() >= (3, 1, 0)
|
|
35
|
+
AIRFLOW_V_3_0_PLUS: bool = get_base_airflow_version_tuple() >= (3, 0, 0)
|
|
36
|
+
AIRFLOW_V_3_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 0)
|
|
37
37
|
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
else:
|
|
41
|
-
from airflow.models import BaseOperator
|
|
38
|
+
# BaseOperator removed from version_compat to avoid circular imports
|
|
39
|
+
# Import it directly in files that need it instead
|
|
42
40
|
|
|
43
41
|
__all__ = [
|
|
44
42
|
"AIRFLOW_V_3_0_PLUS",
|
|
45
43
|
"AIRFLOW_V_3_1_PLUS",
|
|
46
|
-
"BaseOperator",
|
|
47
44
|
]
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-common-compat
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.11.0rc1
|
|
4
4
|
Summary: Provider package apache-airflow-providers-common-compat for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,common.compat,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -22,13 +22,13 @@ Classifier: Programming Language :: Python :: 3.13
|
|
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
|
23
23
|
License-File: LICENSE
|
|
24
24
|
License-File: NOTICE
|
|
25
|
-
Requires-Dist: apache-airflow>=2.
|
|
25
|
+
Requires-Dist: apache-airflow>=2.11.0rc1
|
|
26
26
|
Requires-Dist: asgiref>=2.3.0
|
|
27
27
|
Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
|
|
28
28
|
Requires-Dist: apache-airflow-providers-standard ; extra == "standard"
|
|
29
29
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
30
|
-
Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-common-compat/1.
|
|
31
|
-
Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-common-compat/1.
|
|
30
|
+
Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-common-compat/1.11.0/changelog.html
|
|
31
|
+
Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-common-compat/1.11.0
|
|
32
32
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
33
33
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
34
34
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
@@ -61,7 +61,7 @@ Provides-Extra: standard
|
|
|
61
61
|
|
|
62
62
|
Package ``apache-airflow-providers-common-compat``
|
|
63
63
|
|
|
64
|
-
Release: ``1.
|
|
64
|
+
Release: ``1.11.0``
|
|
65
65
|
|
|
66
66
|
|
|
67
67
|
Common Compatibility Provider - providing compatibility code for previous Airflow versions
|
|
@@ -74,7 +74,7 @@ This is a provider package for ``common.compat`` provider. All classes for this
|
|
|
74
74
|
are in ``airflow.providers.common.compat`` python package.
|
|
75
75
|
|
|
76
76
|
You can find package information and changelog for the provider
|
|
77
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.
|
|
77
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.11.0/>`_.
|
|
78
78
|
|
|
79
79
|
Installation
|
|
80
80
|
------------
|
|
@@ -91,7 +91,7 @@ Requirements
|
|
|
91
91
|
================== ==================
|
|
92
92
|
PIP package Version required
|
|
93
93
|
================== ==================
|
|
94
|
-
``apache-airflow`` ``>=2.
|
|
94
|
+
``apache-airflow`` ``>=2.11.0``
|
|
95
95
|
``asgiref`` ``>=2.3.0``
|
|
96
96
|
================== ==================
|
|
97
97
|
|
|
@@ -125,5 +125,5 @@ Extra Dependencies
|
|
|
125
125
|
=============== ========================================
|
|
126
126
|
|
|
127
127
|
The changelog for the provider package can be found in the
|
|
128
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.
|
|
128
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-compat/1.11.0/changelog.html>`_.
|
|
129
129
|
|
|
@@ -1,14 +1,15 @@
|
|
|
1
|
-
airflow/providers/common/compat/__init__.py,sha256=
|
|
1
|
+
airflow/providers/common/compat/__init__.py,sha256=O0_c6XzgbF2izqWCKygTAS2NawlpHA4uewMV6S1udkc,1503
|
|
2
2
|
airflow/providers/common/compat/_compat_utils.py,sha256=MAB8q34kchNo05y5iufKs_82MXe-lLbpwHlfbb1tJGQ,4294
|
|
3
3
|
airflow/providers/common/compat/check.py,sha256=d6at8iFn_c2jbnmvswoMYz1DFUrAbQTVKMCA5PYAOrQ,4347
|
|
4
4
|
airflow/providers/common/compat/get_provider_info.py,sha256=BfscTzSrq5WxqTt4njI9WwvWyvE3JjYYfZEdrSRT6IU,1555
|
|
5
|
-
airflow/providers/common/compat/sdk.py,sha256=
|
|
6
|
-
airflow/providers/common/compat/version_compat.py,sha256=
|
|
5
|
+
airflow/providers/common/compat/sdk.py,sha256=SA-1_ZiT9YPDRWfWA5zQAReJftkWx6-OmoacdD2fWdU,14353
|
|
6
|
+
airflow/providers/common/compat/version_compat.py,sha256=SEzG5OF34F8doKhFNByFY1O72M2LNWdKEOEPi1hpa38,1803
|
|
7
7
|
airflow/providers/common/compat/assets/__init__.py,sha256=P1xX2Nw8LeS3u_Prz-SZL_jtTteHXqDd-0G8hZXD8oU,2079
|
|
8
8
|
airflow/providers/common/compat/connection/__init__.py,sha256=K7z3lzLEdQjf_hh8GfdglqajdemvVb1h_i2srqTtKgc,1677
|
|
9
9
|
airflow/providers/common/compat/lineage/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
10
10
|
airflow/providers/common/compat/lineage/entities.py,sha256=ZdZh_U4i05xhpWtTMwaWxgzEIFHT7dF5zEUwCDdtRcA,2668
|
|
11
|
-
airflow/providers/common/compat/lineage/hook.py,sha256=
|
|
11
|
+
airflow/providers/common/compat/lineage/hook.py,sha256=mBusJyETAm8LM4UBW3320IMoKgINaZucdVJEPRLAe-8,9555
|
|
12
|
+
airflow/providers/common/compat/module_loading/__init__.py,sha256=fhvOQzfVtC177ZqfIDNc9bWwFYD5_HMjA_PGI0PJyCI,1677
|
|
12
13
|
airflow/providers/common/compat/notifier/__init__.py,sha256=ENJCBUjdRT68heVSfI8fhMs4hi6bUh5CYUkQTwTaqxM,1189
|
|
13
14
|
airflow/providers/common/compat/openlineage/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
14
15
|
airflow/providers/common/compat/openlineage/check.py,sha256=Vh73HcOSY__fDkeFs1SwD20tOWIu_yG66sdQrujKa4I,4701
|
|
@@ -25,9 +26,9 @@ airflow/providers/common/compat/standard/__init__.py,sha256=9hdXHABrVpkbpjZgUft3
|
|
|
25
26
|
airflow/providers/common/compat/standard/operators.py,sha256=MUQelwNbAbgdrA2RJkK2SJDz5AvhQ9VW6sr328r6khQ,1599
|
|
26
27
|
airflow/providers/common/compat/standard/triggers.py,sha256=ZGqIvGtliogw5-avmTm_4oY-7nX1pbeCk7jLz3vb3Ck,1157
|
|
27
28
|
airflow/providers/common/compat/standard/utils.py,sha256=GN38uPDqhpPaFD1CQAEQuSG2FAY8pJ_Q_-EIFDE7D08,1335
|
|
28
|
-
apache_airflow_providers_common_compat-1.
|
|
29
|
-
apache_airflow_providers_common_compat-1.
|
|
30
|
-
apache_airflow_providers_common_compat-1.
|
|
31
|
-
apache_airflow_providers_common_compat-1.
|
|
32
|
-
apache_airflow_providers_common_compat-1.
|
|
33
|
-
apache_airflow_providers_common_compat-1.
|
|
29
|
+
apache_airflow_providers_common_compat-1.11.0rc1.dist-info/entry_points.txt,sha256=OdOClAuY8E82VvA-Zo6narFujtXdGihHKZH2HfmlPIo,109
|
|
30
|
+
apache_airflow_providers_common_compat-1.11.0rc1.dist-info/licenses/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
|
31
|
+
apache_airflow_providers_common_compat-1.11.0rc1.dist-info/licenses/NOTICE,sha256=E3-_E02gwwSEFzeeWPKmnIjOoos3hW28CLISV6sYrbQ,168
|
|
32
|
+
apache_airflow_providers_common_compat-1.11.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
|
33
|
+
apache_airflow_providers_common_compat-1.11.0rc1.dist-info/METADATA,sha256=6MpnsKPsCUd_ks5wWcJj1elLdlh2GsXPKGb8cAi4IRU,5619
|
|
34
|
+
apache_airflow_providers_common_compat-1.11.0rc1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|