apache-airflow-providers-standard 0.2.0rc1__tar.gz → 0.3.0rc1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of apache-airflow-providers-standard might be problematic. Click here for more details.
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/PKG-INFO +6 -6
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/README.rst +3 -3
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/pyproject.toml +4 -3
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/__init__.py +1 -1
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/get_provider_info.py +2 -2
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/operators/bash.py +1 -1
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/operators/datetime.py +6 -1
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/operators/python.py +3 -5
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/operators/weekday.py +5 -2
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/sensors/external_task.py +99 -28
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/triggers/external_task.py +1 -1
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/utils/sensor_helper.py +11 -4
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/LICENSE +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/hooks/__init__.py +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/hooks/filesystem.py +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/hooks/package_index.py +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/hooks/subprocess.py +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/operators/__init__.py +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/operators/branch.py +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/operators/empty.py +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/operators/latest_only.py +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/operators/smooth.py +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/operators/trigger_dagrun.py +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/sensors/__init__.py +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/sensors/bash.py +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/sensors/date_time.py +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/sensors/filesystem.py +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/sensors/python.py +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/sensors/time.py +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/sensors/time_delta.py +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/sensors/weekday.py +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/triggers/__init__.py +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/triggers/file.py +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/triggers/temporal.py +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/utils/__init__.py +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/utils/python_virtualenv.py +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/utils/python_virtualenv_script.jinja2 +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/utils/skipmixin.py +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/utils/weekday.py +0 -0
- {apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/src/airflow/providers/standard/version_compat.py +0 -0
{apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/PKG-INFO
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-standard
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.3.0rc1
|
|
4
4
|
Summary: Provider package apache-airflow-providers-standard for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,standard,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -22,8 +22,8 @@ Classifier: Programming Language :: Python :: 3.12
|
|
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
|
23
23
|
Requires-Dist: apache-airflow>=2.9.0rc0
|
|
24
24
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
25
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.
|
|
26
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.
|
|
25
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.3.0/changelog.html
|
|
26
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.3.0
|
|
27
27
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
28
28
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
29
29
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
@@ -54,7 +54,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
|
54
54
|
|
|
55
55
|
Package ``apache-airflow-providers-standard``
|
|
56
56
|
|
|
57
|
-
Release: ``0.
|
|
57
|
+
Release: ``0.3.0``
|
|
58
58
|
|
|
59
59
|
|
|
60
60
|
Airflow Standard Provider
|
|
@@ -67,7 +67,7 @@ This is a provider package for ``standard`` provider. All classes for this provi
|
|
|
67
67
|
are in ``airflow.providers.standard`` python package.
|
|
68
68
|
|
|
69
69
|
You can find package information and changelog for the provider
|
|
70
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.
|
|
70
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.3.0/>`_.
|
|
71
71
|
|
|
72
72
|
Installation
|
|
73
73
|
------------
|
|
@@ -88,5 +88,5 @@ PIP package Version required
|
|
|
88
88
|
================== ==================
|
|
89
89
|
|
|
90
90
|
The changelog for the provider package can be found in the
|
|
91
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.
|
|
91
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.3.0/changelog.html>`_.
|
|
92
92
|
|
{apache_airflow_providers_standard-0.2.0rc1 → apache_airflow_providers_standard-0.3.0rc1}/README.rst
RENAMED
|
@@ -23,7 +23,7 @@
|
|
|
23
23
|
|
|
24
24
|
Package ``apache-airflow-providers-standard``
|
|
25
25
|
|
|
26
|
-
Release: ``0.
|
|
26
|
+
Release: ``0.3.0``
|
|
27
27
|
|
|
28
28
|
|
|
29
29
|
Airflow Standard Provider
|
|
@@ -36,7 +36,7 @@ This is a provider package for ``standard`` provider. All classes for this provi
|
|
|
36
36
|
are in ``airflow.providers.standard`` python package.
|
|
37
37
|
|
|
38
38
|
You can find package information and changelog for the provider
|
|
39
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.
|
|
39
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.3.0/>`_.
|
|
40
40
|
|
|
41
41
|
Installation
|
|
42
42
|
------------
|
|
@@ -57,4 +57,4 @@ PIP package Version required
|
|
|
57
57
|
================== ==================
|
|
58
58
|
|
|
59
59
|
The changelog for the provider package can be found in the
|
|
60
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.
|
|
60
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.3.0/changelog.html>`_.
|
|
@@ -25,7 +25,7 @@ build-backend = "flit_core.buildapi"
|
|
|
25
25
|
|
|
26
26
|
[project]
|
|
27
27
|
name = "apache-airflow-providers-standard"
|
|
28
|
-
version = "0.
|
|
28
|
+
version = "0.3.0.rc1"
|
|
29
29
|
description = "Provider package apache-airflow-providers-standard for Apache Airflow"
|
|
30
30
|
readme = "README.rst"
|
|
31
31
|
authors = [
|
|
@@ -66,6 +66,7 @@ dev = [
|
|
|
66
66
|
"apache-airflow-task-sdk",
|
|
67
67
|
"apache-airflow-devel-common",
|
|
68
68
|
# Additional devel dependencies (do not remove this line and add extra development dependencies)
|
|
69
|
+
"apache-airflow-providers-mysql",
|
|
69
70
|
]
|
|
70
71
|
|
|
71
72
|
[tool.uv.sources]
|
|
@@ -79,8 +80,8 @@ apache-airflow-providers-fab = {workspace = true}
|
|
|
79
80
|
apache-airflow-providers-standard = {workspace = true}
|
|
80
81
|
|
|
81
82
|
[project.urls]
|
|
82
|
-
"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-standard/0.
|
|
83
|
-
"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-standard/0.
|
|
83
|
+
"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-standard/0.3.0"
|
|
84
|
+
"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-standard/0.3.0/changelog.html"
|
|
84
85
|
"Bug Tracker" = "https://github.com/apache/airflow/issues"
|
|
85
86
|
"Source Code" = "https://github.com/apache/airflow"
|
|
86
87
|
"Slack Chat" = "https://s.apache.org/airflow-slack"
|
|
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "0.
|
|
32
|
+
__version__ = "0.3.0"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
35
|
"2.9.0"
|
|
@@ -27,8 +27,8 @@ def get_provider_info():
|
|
|
27
27
|
"name": "Standard",
|
|
28
28
|
"description": "Airflow Standard Provider\n",
|
|
29
29
|
"state": "ready",
|
|
30
|
-
"source-date-epoch":
|
|
31
|
-
"versions": ["0.2.0", "0.1.1", "0.1.0", "0.0.3", "0.0.2", "0.0.1"],
|
|
30
|
+
"source-date-epoch": 1743477899,
|
|
31
|
+
"versions": ["0.3.0", "0.2.0", "0.1.1", "0.1.0", "0.0.3", "0.0.2", "0.0.1"],
|
|
32
32
|
"integrations": [
|
|
33
33
|
{
|
|
34
34
|
"integration-name": "Standard",
|
|
@@ -253,7 +253,7 @@ class BashOperator(BaseOperator):
|
|
|
253
253
|
"""
|
|
254
254
|
with working_directory(cwd=self.cwd) as cwd:
|
|
255
255
|
with tempfile.NamedTemporaryFile(mode="w", dir=cwd, suffix=".sh") as file:
|
|
256
|
-
file.write(cast(str, self.bash_command))
|
|
256
|
+
file.write(cast("str", self.bash_command))
|
|
257
257
|
file.flush()
|
|
258
258
|
|
|
259
259
|
bash_script = os.path.basename(file.name)
|
|
@@ -77,9 +77,14 @@ class BranchDateTimeOperator(BaseBranchOperator):
|
|
|
77
77
|
|
|
78
78
|
def choose_branch(self, context: Context) -> str | Iterable[str]:
|
|
79
79
|
if self.use_task_logical_date:
|
|
80
|
-
now = context
|
|
80
|
+
now = context.get("logical_date")
|
|
81
|
+
if not now:
|
|
82
|
+
dag_run = context.get("dag_run")
|
|
83
|
+
now = dag_run.run_after # type: ignore[union-attr, assignment]
|
|
81
84
|
else:
|
|
82
85
|
now = timezone.coerce_datetime(timezone.utcnow())
|
|
86
|
+
if TYPE_CHECKING:
|
|
87
|
+
assert isinstance(now, datetime.datetime)
|
|
83
88
|
lower, upper = target_times_as_dates(now, self.target_lower, self.target_upper)
|
|
84
89
|
lower = timezone.coerce_datetime(lower, self.dag.timezone)
|
|
85
90
|
upper = timezone.coerce_datetime(upper, self.dag.timezone)
|
|
@@ -56,7 +56,7 @@ if AIRFLOW_V_3_0_PLUS:
|
|
|
56
56
|
from airflow.providers.standard.utils.skipmixin import SkipMixin
|
|
57
57
|
else:
|
|
58
58
|
from airflow.models.skipmixin import SkipMixin
|
|
59
|
-
from airflow.operators.branch import BranchMixIn
|
|
59
|
+
from airflow.operators.branch import BranchMixIn # type: ignore[no-redef]
|
|
60
60
|
|
|
61
61
|
|
|
62
62
|
log = logging.getLogger(__name__)
|
|
@@ -460,8 +460,7 @@ class _BasePythonVirtualenvOperator(PythonOperator, metaclass=ABCMeta):
|
|
|
460
460
|
serializer = serializer or "pickle"
|
|
461
461
|
if serializer not in _SERIALIZERS:
|
|
462
462
|
msg = (
|
|
463
|
-
f"Unsupported serializer {serializer!r}. "
|
|
464
|
-
f"Expected one of {', '.join(map(repr, _SERIALIZERS))}"
|
|
463
|
+
f"Unsupported serializer {serializer!r}. Expected one of {', '.join(map(repr, _SERIALIZERS))}"
|
|
465
464
|
)
|
|
466
465
|
raise AirflowException(msg)
|
|
467
466
|
|
|
@@ -1142,7 +1141,6 @@ def _get_current_context() -> Mapping[str, Any]:
|
|
|
1142
1141
|
|
|
1143
1142
|
if not _CURRENT_CONTEXT:
|
|
1144
1143
|
raise RuntimeError(
|
|
1145
|
-
"Current context was requested but no context was found! "
|
|
1146
|
-
"Are you running within an Airflow task?"
|
|
1144
|
+
"Current context was requested but no context was found! Are you running within an Airflow task?"
|
|
1147
1145
|
)
|
|
1148
1146
|
return _CURRENT_CONTEXT[-1]
|
|
@@ -116,10 +116,13 @@ class BranchDayOfWeekOperator(BaseBranchOperator):
|
|
|
116
116
|
|
|
117
117
|
def choose_branch(self, context: Context) -> str | Iterable[str]:
|
|
118
118
|
if self.use_task_logical_date:
|
|
119
|
-
now = context
|
|
119
|
+
now = context.get("logical_date")
|
|
120
|
+
if not now:
|
|
121
|
+
dag_run = context.get("dag_run")
|
|
122
|
+
now = dag_run.run_after # type: ignore[union-attr, assignment]
|
|
120
123
|
else:
|
|
121
124
|
now = timezone.make_naive(timezone.utcnow(), self.dag.timezone)
|
|
122
125
|
|
|
123
|
-
if now.isoweekday() in self._week_day_num:
|
|
126
|
+
if now.isoweekday() in self._week_day_num: # type: ignore[union-attr]
|
|
124
127
|
return self.follow_task_ids_if_true
|
|
125
128
|
return self.follow_task_ids_if_false
|
|
@@ -31,21 +31,26 @@ from airflow.providers.standard.operators.empty import EmptyOperator
|
|
|
31
31
|
from airflow.providers.standard.triggers.external_task import WorkflowTrigger
|
|
32
32
|
from airflow.providers.standard.utils.sensor_helper import _get_count, _get_external_task_group_task_ids
|
|
33
33
|
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
|
|
34
|
-
from airflow.sensors.base import BaseSensorOperator
|
|
35
34
|
from airflow.utils.file import correct_maybe_zipped
|
|
36
35
|
from airflow.utils.session import NEW_SESSION, provide_session
|
|
37
36
|
from airflow.utils.state import State, TaskInstanceState
|
|
38
37
|
|
|
38
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
39
|
+
from airflow.sdk.bases.sensor import BaseSensorOperator
|
|
40
|
+
else:
|
|
41
|
+
from airflow.sensors.base import BaseSensorOperator
|
|
42
|
+
|
|
39
43
|
if TYPE_CHECKING:
|
|
40
44
|
from sqlalchemy.orm import Session
|
|
41
45
|
|
|
42
|
-
from airflow.models.baseoperator import BaseOperator
|
|
43
46
|
from airflow.models.taskinstancekey import TaskInstanceKey
|
|
44
47
|
|
|
45
48
|
try:
|
|
49
|
+
from airflow.sdk import BaseOperator
|
|
46
50
|
from airflow.sdk.definitions.context import Context
|
|
47
51
|
except ImportError:
|
|
48
52
|
# TODO: Remove once provider drops support for Airflow 2
|
|
53
|
+
from airflow.models.baseoperator import BaseOperator
|
|
49
54
|
from airflow.utils.context import Context
|
|
50
55
|
|
|
51
56
|
|
|
@@ -65,15 +70,16 @@ class ExternalDagLink(BaseOperatorLink):
|
|
|
65
70
|
name = "External DAG"
|
|
66
71
|
|
|
67
72
|
def get_link(self, operator: BaseOperator, *, ti_key: TaskInstanceKey) -> str:
|
|
68
|
-
from airflow.models.renderedtifields import RenderedTaskInstanceFields
|
|
69
|
-
|
|
70
73
|
if TYPE_CHECKING:
|
|
71
74
|
assert isinstance(operator, (ExternalTaskMarker, ExternalTaskSensor))
|
|
72
75
|
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
76
|
+
external_dag_id = operator.external_dag_id
|
|
77
|
+
|
|
78
|
+
if not AIRFLOW_V_3_0_PLUS:
|
|
79
|
+
from airflow.models.renderedtifields import RenderedTaskInstanceFields
|
|
80
|
+
|
|
81
|
+
if template_fields := RenderedTaskInstanceFields.get_templated_fields(ti_key):
|
|
82
|
+
external_dag_id: str = template_fields.get("external_dag_id", operator.external_dag_id) # type: ignore[no-redef]
|
|
77
83
|
|
|
78
84
|
if AIRFLOW_V_3_0_PLUS:
|
|
79
85
|
from airflow.utils.helpers import build_airflow_dagrun_url
|
|
@@ -245,16 +251,22 @@ class ExternalTaskSensor(BaseSensorOperator):
|
|
|
245
251
|
self.poll_interval = poll_interval
|
|
246
252
|
|
|
247
253
|
def _get_dttm_filter(self, context):
|
|
254
|
+
logical_date = context.get("logical_date")
|
|
255
|
+
if logical_date is None:
|
|
256
|
+
dag_run = context.get("dag_run")
|
|
257
|
+
if TYPE_CHECKING:
|
|
258
|
+
assert dag_run
|
|
259
|
+
|
|
260
|
+
logical_date = dag_run.run_after
|
|
248
261
|
if self.execution_delta:
|
|
249
|
-
dttm =
|
|
262
|
+
dttm = logical_date - self.execution_delta
|
|
250
263
|
elif self.execution_date_fn:
|
|
251
264
|
dttm = self._handle_execution_date_fn(context=context)
|
|
252
265
|
else:
|
|
253
|
-
dttm =
|
|
266
|
+
dttm = logical_date
|
|
254
267
|
return dttm if isinstance(dttm, list) else [dttm]
|
|
255
268
|
|
|
256
|
-
|
|
257
|
-
def poke(self, context: Context, session: Session = NEW_SESSION) -> bool:
|
|
269
|
+
def poke(self, context: Context) -> bool:
|
|
258
270
|
# delay check to poke rather than __init__ in case it was supplied as XComArgs
|
|
259
271
|
if self.external_task_ids and len(self.external_task_ids) > len(set(self.external_task_ids)):
|
|
260
272
|
raise ValueError("Duplicate task_ids passed in external_task_ids parameter")
|
|
@@ -285,15 +297,62 @@ class ExternalTaskSensor(BaseSensorOperator):
|
|
|
285
297
|
serialized_dttm_filter,
|
|
286
298
|
)
|
|
287
299
|
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
300
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
301
|
+
return self._poke_af3(context, dttm_filter)
|
|
302
|
+
else:
|
|
303
|
+
return self._poke_af2(dttm_filter)
|
|
304
|
+
|
|
305
|
+
def _poke_af3(self, context: Context, dttm_filter: list[datetime.datetime]) -> bool:
|
|
306
|
+
self._has_checked_existence = True
|
|
307
|
+
ti = context["ti"]
|
|
308
|
+
|
|
309
|
+
def _get_count(states: list[str]) -> int:
|
|
310
|
+
if self.external_task_ids:
|
|
311
|
+
return ti.get_ti_count(
|
|
312
|
+
dag_id=self.external_dag_id,
|
|
313
|
+
task_ids=self.external_task_ids, # type: ignore[arg-type]
|
|
314
|
+
logical_dates=dttm_filter,
|
|
315
|
+
states=states,
|
|
316
|
+
)
|
|
317
|
+
elif self.external_task_group_id:
|
|
318
|
+
return ti.get_ti_count(
|
|
319
|
+
dag_id=self.external_dag_id,
|
|
320
|
+
task_group_id=self.external_task_group_id,
|
|
321
|
+
logical_dates=dttm_filter,
|
|
322
|
+
states=states,
|
|
323
|
+
)
|
|
324
|
+
else:
|
|
325
|
+
return ti.get_dr_count(
|
|
326
|
+
dag_id=self.external_dag_id,
|
|
327
|
+
logical_dates=dttm_filter,
|
|
328
|
+
states=states,
|
|
329
|
+
)
|
|
291
330
|
|
|
292
|
-
count_failed = -1
|
|
293
331
|
if self.failed_states:
|
|
294
|
-
|
|
332
|
+
count = _get_count(self.failed_states)
|
|
333
|
+
count_failed = self._calculate_count(count, dttm_filter)
|
|
334
|
+
self._handle_failed_states(count_failed)
|
|
295
335
|
|
|
296
|
-
|
|
336
|
+
if self.skipped_states:
|
|
337
|
+
count = _get_count(self.skipped_states)
|
|
338
|
+
count_skipped = self._calculate_count(count, dttm_filter)
|
|
339
|
+
self._handle_skipped_states(count_skipped)
|
|
340
|
+
|
|
341
|
+
count = _get_count(self.allowed_states)
|
|
342
|
+
count_allowed = self._calculate_count(count, dttm_filter)
|
|
343
|
+
return count_allowed == len(dttm_filter)
|
|
344
|
+
|
|
345
|
+
def _calculate_count(self, count: int, dttm_filter: list[datetime.datetime]) -> float | int:
|
|
346
|
+
"""Calculate the normalized count based on the type of check."""
|
|
347
|
+
if self.external_task_ids:
|
|
348
|
+
return count / len(self.external_task_ids)
|
|
349
|
+
elif self.external_task_group_id:
|
|
350
|
+
return count / len(dttm_filter)
|
|
351
|
+
else:
|
|
352
|
+
return count
|
|
353
|
+
|
|
354
|
+
def _handle_failed_states(self, count_failed: float | int) -> None:
|
|
355
|
+
"""Handle failed states and raise appropriate exceptions."""
|
|
297
356
|
if count_failed > 0:
|
|
298
357
|
if self.external_task_ids:
|
|
299
358
|
if self.soft_fail:
|
|
@@ -315,7 +374,6 @@ class ExternalTaskSensor(BaseSensorOperator):
|
|
|
315
374
|
f"The external task_group '{self.external_task_group_id}' "
|
|
316
375
|
f"in DAG '{self.external_dag_id}' failed."
|
|
317
376
|
)
|
|
318
|
-
|
|
319
377
|
else:
|
|
320
378
|
if self.soft_fail:
|
|
321
379
|
raise AirflowSkipException(
|
|
@@ -323,12 +381,8 @@ class ExternalTaskSensor(BaseSensorOperator):
|
|
|
323
381
|
)
|
|
324
382
|
raise AirflowException(f"The external DAG {self.external_dag_id} failed.")
|
|
325
383
|
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
count_skipped = self.get_count(dttm_filter, session, self.skipped_states)
|
|
329
|
-
|
|
330
|
-
# Skip if anything in the list has skipped. Note if we are checking multiple tasks and one skips
|
|
331
|
-
# before another errors, we'll skip first.
|
|
384
|
+
def _handle_skipped_states(self, count_skipped: float | int) -> None:
|
|
385
|
+
"""Handle skipped states and raise appropriate exceptions."""
|
|
332
386
|
if count_skipped > 0:
|
|
333
387
|
if self.external_task_ids:
|
|
334
388
|
raise AirflowSkipException(
|
|
@@ -346,7 +400,19 @@ class ExternalTaskSensor(BaseSensorOperator):
|
|
|
346
400
|
"Skipping."
|
|
347
401
|
)
|
|
348
402
|
|
|
349
|
-
|
|
403
|
+
@provide_session
|
|
404
|
+
def _poke_af2(self, dttm_filter: list[datetime.datetime], session: Session = NEW_SESSION) -> bool:
|
|
405
|
+
if self.check_existence and not self._has_checked_existence:
|
|
406
|
+
self._check_for_existence(session=session)
|
|
407
|
+
|
|
408
|
+
if self.failed_states:
|
|
409
|
+
count_failed = self.get_count(dttm_filter, session, self.failed_states)
|
|
410
|
+
self._handle_failed_states(count_failed)
|
|
411
|
+
|
|
412
|
+
if self.skipped_states:
|
|
413
|
+
count_skipped = self.get_count(dttm_filter, session, self.skipped_states)
|
|
414
|
+
self._handle_skipped_states(count_skipped)
|
|
415
|
+
|
|
350
416
|
count_allowed = self.get_count(dttm_filter, session, self.allowed_states)
|
|
351
417
|
return count_allowed == len(dttm_filter)
|
|
352
418
|
|
|
@@ -398,8 +464,7 @@ class ExternalTaskSensor(BaseSensorOperator):
|
|
|
398
464
|
for external_task_id in self.external_task_ids:
|
|
399
465
|
if not refreshed_dag_info.has_task(external_task_id):
|
|
400
466
|
raise AirflowException(
|
|
401
|
-
f"The external task {external_task_id} in "
|
|
402
|
-
f"DAG {self.external_dag_id} does not exist."
|
|
467
|
+
f"The external task {external_task_id} in DAG {self.external_dag_id} does not exist."
|
|
403
468
|
)
|
|
404
469
|
|
|
405
470
|
if self.external_task_group_id:
|
|
@@ -482,6 +547,9 @@ class ExternalTaskMarker(EmptyOperator):
|
|
|
482
547
|
"""
|
|
483
548
|
|
|
484
549
|
template_fields = ["external_dag_id", "external_task_id", "logical_date"]
|
|
550
|
+
if not AIRFLOW_V_3_0_PLUS:
|
|
551
|
+
template_fields.append("execution_date")
|
|
552
|
+
|
|
485
553
|
ui_color = "#4db7db"
|
|
486
554
|
operator_extra_links = [ExternalDagLink()]
|
|
487
555
|
|
|
@@ -509,6 +577,9 @@ class ExternalTaskMarker(EmptyOperator):
|
|
|
509
577
|
f"Expected str or datetime.datetime type for logical_date. Got {type(logical_date)}"
|
|
510
578
|
)
|
|
511
579
|
|
|
580
|
+
if not AIRFLOW_V_3_0_PLUS:
|
|
581
|
+
self.execution_date = self.logical_date
|
|
582
|
+
|
|
512
583
|
if recursion_depth <= 0:
|
|
513
584
|
raise ValueError("recursion_depth should be a positive integer")
|
|
514
585
|
self.recursion_depth = recursion_depth
|
|
@@ -21,6 +21,7 @@ from typing import TYPE_CHECKING, cast
|
|
|
21
21
|
from sqlalchemy import func, select, tuple_
|
|
22
22
|
|
|
23
23
|
from airflow.models import DagBag, DagRun, TaskInstance
|
|
24
|
+
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
|
|
24
25
|
from airflow.utils.session import NEW_SESSION, provide_session
|
|
25
26
|
|
|
26
27
|
if TYPE_CHECKING:
|
|
@@ -71,10 +72,12 @@ def _get_count(
|
|
|
71
72
|
tuple_(TI.task_id, TI.map_index).in_(external_task_group_task_ids)
|
|
72
73
|
)
|
|
73
74
|
)
|
|
74
|
-
|
|
75
|
+
/ len(external_task_group_task_ids)
|
|
76
|
+
* len(dttm_filter)
|
|
77
|
+
)
|
|
75
78
|
else:
|
|
76
79
|
count = session.scalar(_count_stmt(DR, states, dttm_filter, external_dag_id))
|
|
77
|
-
return cast(int, count)
|
|
80
|
+
return cast("int", count)
|
|
78
81
|
|
|
79
82
|
|
|
80
83
|
def _count_stmt(model, states, dttm_filter, external_dag_id) -> Executable:
|
|
@@ -86,8 +89,10 @@ def _count_stmt(model, states, dttm_filter, external_dag_id) -> Executable:
|
|
|
86
89
|
:param dttm_filter: date time filter for logical date
|
|
87
90
|
:param external_dag_id: The ID of the external DAG.
|
|
88
91
|
"""
|
|
92
|
+
date_field = model.logical_date if AIRFLOW_V_3_0_PLUS else model.execution_date
|
|
93
|
+
|
|
89
94
|
return select(func.count()).where(
|
|
90
|
-
model.dag_id == external_dag_id, model.state.in_(states),
|
|
95
|
+
model.dag_id == external_dag_id, model.state.in_(states), date_field.in_(dttm_filter)
|
|
91
96
|
)
|
|
92
97
|
|
|
93
98
|
|
|
@@ -104,11 +109,13 @@ def _get_external_task_group_task_ids(dttm_filter, external_task_group_id, exter
|
|
|
104
109
|
task_group = refreshed_dag_info.task_group_dict.get(external_task_group_id)
|
|
105
110
|
|
|
106
111
|
if task_group:
|
|
112
|
+
date_field = TaskInstance.logical_date if AIRFLOW_V_3_0_PLUS else TaskInstance.execution_date
|
|
113
|
+
|
|
107
114
|
group_tasks = session.scalars(
|
|
108
115
|
select(TaskInstance).filter(
|
|
109
116
|
TaskInstance.dag_id == external_dag_id,
|
|
110
117
|
TaskInstance.task_id.in_(task.task_id for task in task_group),
|
|
111
|
-
|
|
118
|
+
date_field.in_(dttm_filter),
|
|
112
119
|
)
|
|
113
120
|
)
|
|
114
121
|
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|