apache-airflow-providers-standard 1.4.1rc2__py3-none-any.whl → 1.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of apache-airflow-providers-standard might be problematic. Click here for more details.
- airflow/providers/standard/__init__.py +1 -1
- airflow/providers/standard/example_dags/example_external_task_parent_deferrable.py +0 -6
- airflow/providers/standard/example_dags/example_hitl_operator.py +81 -0
- airflow/providers/standard/exceptions.py +8 -0
- airflow/providers/standard/get_provider_info.py +2 -0
- airflow/providers/standard/hooks/filesystem.py +1 -4
- airflow/providers/standard/hooks/package_index.py +1 -4
- airflow/providers/standard/hooks/subprocess.py +1 -4
- airflow/providers/standard/models/__init__.py +16 -0
- airflow/providers/standard/operators/branch.py +2 -4
- airflow/providers/standard/operators/hitl.py +234 -0
- airflow/providers/standard/operators/latest_only.py +12 -2
- airflow/providers/standard/operators/python.py +15 -2
- airflow/providers/standard/operators/trigger_dagrun.py +1 -1
- airflow/providers/standard/sensors/date_time.py +5 -2
- airflow/providers/standard/sensors/external_task.py +26 -9
- airflow/providers/standard/triggers/external_task.py +3 -3
- airflow/providers/standard/triggers/hitl.py +132 -0
- airflow/providers/standard/utils/python_virtualenv.py +1 -6
- airflow/providers/standard/utils/skipmixin.py +2 -7
- airflow/providers/standard/version_compat.py +3 -1
- {apache_airflow_providers_standard-1.4.1rc2.dist-info → apache_airflow_providers_standard-1.5.0.dist-info}/METADATA +11 -9
- {apache_airflow_providers_standard-1.4.1rc2.dist-info → apache_airflow_providers_standard-1.5.0.dist-info}/RECORD +25 -21
- {apache_airflow_providers_standard-1.4.1rc2.dist-info → apache_airflow_providers_standard-1.5.0.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_standard-1.4.1rc2.dist-info → apache_airflow_providers_standard-1.5.0.dist-info}/entry_points.txt +0 -0
|
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "1.
|
|
32
|
+
__version__ = "1.5.0"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
35
|
"2.10.0"
|
|
@@ -56,9 +56,3 @@ with DAG(
|
|
|
56
56
|
end = EmptyOperator(task_id="end")
|
|
57
57
|
|
|
58
58
|
start >> [trigger_child_task, external_task_sensor] >> end
|
|
59
|
-
|
|
60
|
-
from tests_common.test_utils.watcher import watcher
|
|
61
|
-
|
|
62
|
-
# This test needs watcher in order to properly mark success/failure
|
|
63
|
-
# when "teardown" task with trigger rule is part of the DAG
|
|
64
|
-
list(dag.tasks) >> watcher()
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
|
|
18
|
+
from __future__ import annotations
|
|
19
|
+
|
|
20
|
+
import datetime
|
|
21
|
+
|
|
22
|
+
import pendulum
|
|
23
|
+
|
|
24
|
+
from airflow.providers.standard.operators.hitl import (
|
|
25
|
+
ApprovalOperator,
|
|
26
|
+
HITLBranchOperator,
|
|
27
|
+
HITLEntryOperator,
|
|
28
|
+
HITLOperator,
|
|
29
|
+
)
|
|
30
|
+
from airflow.sdk import DAG, Param, task
|
|
31
|
+
|
|
32
|
+
with DAG(
|
|
33
|
+
dag_id="example_hitl_operator",
|
|
34
|
+
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
|
|
35
|
+
catchup=False,
|
|
36
|
+
tags=["example", "HITL"],
|
|
37
|
+
):
|
|
38
|
+
wait_for_input = HITLEntryOperator(
|
|
39
|
+
task_id="wait_for_input",
|
|
40
|
+
subject="Please provide required information: ",
|
|
41
|
+
params={"information": Param("", type="string")},
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
wait_for_option = HITLOperator(
|
|
45
|
+
task_id="wait_for_option",
|
|
46
|
+
subject="Please choose one option to proceeded: ",
|
|
47
|
+
options=["option 1", "option 2", "option 3"],
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
valid_input_and_options = ApprovalOperator(
|
|
51
|
+
task_id="valid_input_and_options",
|
|
52
|
+
subject="Are the following input and options valid?",
|
|
53
|
+
body="""
|
|
54
|
+
Input: {{ task_instance.xcom_pull(task_ids='wait_for_input', key='return_value')["params_input"]["information"] }}
|
|
55
|
+
Option: {{ task_instance.xcom_pull(task_ids='wait_for_option', key='return_value')["chosen_options"] }}
|
|
56
|
+
""",
|
|
57
|
+
defaults="Reject",
|
|
58
|
+
execution_timeout=datetime.timedelta(minutes=1),
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
choose_a_branch_to_run = HITLBranchOperator(
|
|
62
|
+
task_id="choose_a_branch_to_run",
|
|
63
|
+
subject="You're now allowed to proceeded. Please choose one task to run: ",
|
|
64
|
+
options=["task_1", "task_2", "task_3"],
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
@task
|
|
68
|
+
def task_1(): ...
|
|
69
|
+
|
|
70
|
+
@task
|
|
71
|
+
def task_2(): ...
|
|
72
|
+
|
|
73
|
+
@task
|
|
74
|
+
def task_3(): ...
|
|
75
|
+
|
|
76
|
+
(
|
|
77
|
+
[wait_for_input, wait_for_option]
|
|
78
|
+
>> valid_input_and_options
|
|
79
|
+
>> choose_a_branch_to_run
|
|
80
|
+
>> [task_1(), task_2(), task_3()]
|
|
81
|
+
)
|
|
@@ -55,3 +55,11 @@ class ExternalDagFailedError(AirflowExternalTaskSensorException):
|
|
|
55
55
|
|
|
56
56
|
class DuplicateStateError(AirflowExternalTaskSensorException):
|
|
57
57
|
"""Raised when duplicate states are provided across allowed, skipped and failed states."""
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class HITLTriggerEventError(AirflowException):
|
|
61
|
+
"""Raised when TriggerEvent contains error."""
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class HITLTimeoutError(HITLTriggerEventError):
|
|
65
|
+
"""Raised when HILTOperator timeouts."""
|
|
@@ -58,6 +58,7 @@ def get_provider_info():
|
|
|
58
58
|
"airflow.providers.standard.operators.latest_only",
|
|
59
59
|
"airflow.providers.standard.operators.smooth",
|
|
60
60
|
"airflow.providers.standard.operators.branch",
|
|
61
|
+
"airflow.providers.standard.operators.hitl",
|
|
61
62
|
],
|
|
62
63
|
}
|
|
63
64
|
],
|
|
@@ -93,6 +94,7 @@ def get_provider_info():
|
|
|
93
94
|
"airflow.providers.standard.triggers.external_task",
|
|
94
95
|
"airflow.providers.standard.triggers.file",
|
|
95
96
|
"airflow.providers.standard.triggers.temporal",
|
|
97
|
+
"airflow.providers.standard.triggers.hitl",
|
|
96
98
|
],
|
|
97
99
|
}
|
|
98
100
|
],
|
|
@@ -20,10 +20,7 @@ from __future__ import annotations
|
|
|
20
20
|
from pathlib import Path
|
|
21
21
|
from typing import Any
|
|
22
22
|
|
|
23
|
-
|
|
24
|
-
from airflow.sdk import BaseHook
|
|
25
|
-
except ImportError:
|
|
26
|
-
from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
|
|
23
|
+
from airflow.providers.standard.version_compat import BaseHook
|
|
27
24
|
|
|
28
25
|
|
|
29
26
|
class FSHook(BaseHook):
|
|
@@ -23,10 +23,7 @@ import subprocess
|
|
|
23
23
|
from typing import Any
|
|
24
24
|
from urllib.parse import quote, urlparse
|
|
25
25
|
|
|
26
|
-
|
|
27
|
-
from airflow.sdk import BaseHook
|
|
28
|
-
except ImportError:
|
|
29
|
-
from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
|
|
26
|
+
from airflow.providers.standard.version_compat import BaseHook
|
|
30
27
|
|
|
31
28
|
|
|
32
29
|
class PackageIndexHook(BaseHook):
|
|
@@ -24,10 +24,7 @@ from collections.abc import Iterator
|
|
|
24
24
|
from subprocess import PIPE, STDOUT, Popen
|
|
25
25
|
from tempfile import TemporaryDirectory, gettempdir
|
|
26
26
|
|
|
27
|
-
|
|
28
|
-
from airflow.sdk import BaseHook
|
|
29
|
-
except ImportError:
|
|
30
|
-
from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
|
|
27
|
+
from airflow.providers.standard.version_compat import BaseHook
|
|
31
28
|
|
|
32
29
|
SubprocessResult = namedtuple("SubprocessResult", ["exit_code", "output"])
|
|
33
30
|
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
@@ -27,7 +27,7 @@ from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS, BaseOp
|
|
|
27
27
|
if AIRFLOW_V_3_0_PLUS:
|
|
28
28
|
from airflow.providers.standard.utils.skipmixin import SkipMixin
|
|
29
29
|
else:
|
|
30
|
-
from airflow.models.skipmixin import SkipMixin
|
|
30
|
+
from airflow.models.skipmixin import SkipMixin
|
|
31
31
|
|
|
32
32
|
if TYPE_CHECKING:
|
|
33
33
|
from airflow.sdk.definitions.context import Context
|
|
@@ -56,9 +56,7 @@ class BranchMixIn(SkipMixin):
|
|
|
56
56
|
if TYPE_CHECKING:
|
|
57
57
|
assert dag
|
|
58
58
|
|
|
59
|
-
if branches_to_execute
|
|
60
|
-
return
|
|
61
|
-
elif isinstance(branches_to_execute, str) or not isinstance(branches_to_execute, Iterable):
|
|
59
|
+
if isinstance(branches_to_execute, str) or not isinstance(branches_to_execute, Iterable):
|
|
62
60
|
branches_to_execute = [branches_to_execute]
|
|
63
61
|
|
|
64
62
|
for branch in branches_to_execute:
|
|
@@ -0,0 +1,234 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
import logging
|
|
20
|
+
|
|
21
|
+
from airflow.exceptions import AirflowOptionalProviderFeatureException
|
|
22
|
+
from airflow.providers.standard.version_compat import AIRFLOW_V_3_1_PLUS
|
|
23
|
+
|
|
24
|
+
if not AIRFLOW_V_3_1_PLUS:
|
|
25
|
+
raise AirflowOptionalProviderFeatureException("Human in the loop functionality needs Airflow 3.1+.")
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
from collections.abc import Collection, Mapping
|
|
29
|
+
from datetime import datetime, timezone
|
|
30
|
+
from typing import TYPE_CHECKING, Any
|
|
31
|
+
|
|
32
|
+
from airflow.providers.standard.exceptions import HITLTimeoutError, HITLTriggerEventError
|
|
33
|
+
from airflow.providers.standard.triggers.hitl import HITLTrigger, HITLTriggerEventSuccessPayload
|
|
34
|
+
from airflow.providers.standard.utils.skipmixin import SkipMixin
|
|
35
|
+
from airflow.providers.standard.version_compat import BaseOperator
|
|
36
|
+
from airflow.sdk.definitions.param import ParamsDict
|
|
37
|
+
from airflow.sdk.execution_time.hitl import upsert_hitl_detail
|
|
38
|
+
|
|
39
|
+
if TYPE_CHECKING:
|
|
40
|
+
from airflow.sdk.definitions.context import Context
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class HITLOperator(BaseOperator):
|
|
44
|
+
"""
|
|
45
|
+
Base class for all Human-in-the-loop Operators to inherit from.
|
|
46
|
+
|
|
47
|
+
:param subject: Headline/subject presented to the user for the interaction task.
|
|
48
|
+
:param options: List of options that the an user can select from to complete the task.
|
|
49
|
+
:param body: Descriptive text (with Markdown support) that gives the details that are needed to decide.
|
|
50
|
+
:param defaults: The default options and the options that are taken if timeout is passed.
|
|
51
|
+
:param multiple: Whether the user can select one or multiple options.
|
|
52
|
+
:param params: dictionary of parameter definitions that are in the format of Dag params such that
|
|
53
|
+
a Form Field can be rendered. Entered data is validated (schema, required fields) like for a Dag run
|
|
54
|
+
and added to XCom of the task result.
|
|
55
|
+
"""
|
|
56
|
+
|
|
57
|
+
template_fields: Collection[str] = ("subject", "body")
|
|
58
|
+
|
|
59
|
+
def __init__(
|
|
60
|
+
self,
|
|
61
|
+
*,
|
|
62
|
+
subject: str,
|
|
63
|
+
options: list[str],
|
|
64
|
+
body: str | None = None,
|
|
65
|
+
defaults: str | list[str] | None = None,
|
|
66
|
+
multiple: bool = False,
|
|
67
|
+
params: ParamsDict | dict[str, Any] | None = None,
|
|
68
|
+
**kwargs,
|
|
69
|
+
) -> None:
|
|
70
|
+
super().__init__(**kwargs)
|
|
71
|
+
self.subject = subject
|
|
72
|
+
self.body = body
|
|
73
|
+
|
|
74
|
+
self.options = options
|
|
75
|
+
# allow defaults to store more than one options when multiple=True
|
|
76
|
+
self.defaults = [defaults] if isinstance(defaults, str) else defaults
|
|
77
|
+
self.multiple = multiple
|
|
78
|
+
|
|
79
|
+
self.params: ParamsDict = params if isinstance(params, ParamsDict) else ParamsDict(params or {})
|
|
80
|
+
|
|
81
|
+
self.validate_defaults()
|
|
82
|
+
|
|
83
|
+
def validate_defaults(self) -> None:
|
|
84
|
+
"""
|
|
85
|
+
Validate whether the given defaults pass the following criteria.
|
|
86
|
+
|
|
87
|
+
1. Default options should be the subset of options.
|
|
88
|
+
2. When multiple is False, there should only be one option.
|
|
89
|
+
"""
|
|
90
|
+
if self.defaults is not None:
|
|
91
|
+
if not set(self.defaults).issubset(self.options):
|
|
92
|
+
raise ValueError(f'defaults "{self.defaults}" should be a subset of options "{self.options}"')
|
|
93
|
+
|
|
94
|
+
if self.multiple is False and len(self.defaults) > 1:
|
|
95
|
+
raise ValueError('More than one defaults given when "multiple" is set to False.')
|
|
96
|
+
|
|
97
|
+
def execute(self, context: Context):
|
|
98
|
+
"""Add a Human-in-the-loop Response and then defer to HITLTrigger and wait for user input."""
|
|
99
|
+
ti_id = context["task_instance"].id
|
|
100
|
+
# Write Human-in-the-loop input request to DB
|
|
101
|
+
upsert_hitl_detail(
|
|
102
|
+
ti_id=ti_id,
|
|
103
|
+
options=self.options,
|
|
104
|
+
subject=self.subject,
|
|
105
|
+
body=self.body,
|
|
106
|
+
defaults=self.defaults,
|
|
107
|
+
multiple=self.multiple,
|
|
108
|
+
params=self.serialized_params,
|
|
109
|
+
)
|
|
110
|
+
if self.execution_timeout:
|
|
111
|
+
timeout_datetime = datetime.now(timezone.utc) + self.execution_timeout
|
|
112
|
+
else:
|
|
113
|
+
timeout_datetime = None
|
|
114
|
+
self.log.info("Waiting for response")
|
|
115
|
+
# Defer the Human-in-the-loop response checking process to HITLTrigger
|
|
116
|
+
self.defer(
|
|
117
|
+
trigger=HITLTrigger(
|
|
118
|
+
ti_id=ti_id,
|
|
119
|
+
options=self.options,
|
|
120
|
+
defaults=self.defaults,
|
|
121
|
+
params=self.serialized_params,
|
|
122
|
+
multiple=self.multiple,
|
|
123
|
+
timeout_datetime=timeout_datetime,
|
|
124
|
+
),
|
|
125
|
+
method_name="execute_complete",
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
@property
|
|
129
|
+
def serialized_params(self) -> dict[str, Any]:
|
|
130
|
+
return self.params.dump() if isinstance(self.params, ParamsDict) else self.params
|
|
131
|
+
|
|
132
|
+
def execute_complete(self, context: Context, event: dict[str, Any]) -> Any:
|
|
133
|
+
if "error" in event:
|
|
134
|
+
self.process_trigger_event_error(event)
|
|
135
|
+
|
|
136
|
+
chosen_options = event["chosen_options"]
|
|
137
|
+
params_input = event["params_input"] or {}
|
|
138
|
+
self.validate_chosen_options(chosen_options)
|
|
139
|
+
self.validate_params_input(params_input)
|
|
140
|
+
return HITLTriggerEventSuccessPayload(
|
|
141
|
+
chosen_options=chosen_options,
|
|
142
|
+
params_input=params_input,
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
def process_trigger_event_error(self, event: dict[str, Any]) -> None:
|
|
146
|
+
if "error_type" == "timeout":
|
|
147
|
+
raise HITLTimeoutError(event)
|
|
148
|
+
|
|
149
|
+
raise HITLTriggerEventError(event)
|
|
150
|
+
|
|
151
|
+
def validate_chosen_options(self, chosen_options: list[str]) -> None:
|
|
152
|
+
"""Check whether user provide valid response."""
|
|
153
|
+
if diff := set(chosen_options) - set(self.options):
|
|
154
|
+
raise ValueError(f"Responses {diff} not in {self.options}")
|
|
155
|
+
|
|
156
|
+
def validate_params_input(self, params_input: Mapping) -> None:
|
|
157
|
+
"""Check whether user provide valid params input."""
|
|
158
|
+
if (
|
|
159
|
+
self.serialized_params is not None
|
|
160
|
+
and params_input is not None
|
|
161
|
+
and set(self.serialized_params.keys()) ^ set(params_input)
|
|
162
|
+
):
|
|
163
|
+
raise ValueError(f"params_input {params_input} does not match params {self.params}")
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
class ApprovalOperator(HITLOperator, SkipMixin):
|
|
167
|
+
"""Human-in-the-loop Operator that has only 'Approval' and 'Reject' options."""
|
|
168
|
+
|
|
169
|
+
inherits_from_skipmixin = True
|
|
170
|
+
|
|
171
|
+
FIXED_ARGS = ["options", "multiple"]
|
|
172
|
+
|
|
173
|
+
def __init__(self, ignore_downstream_trigger_rules: bool = False, **kwargs) -> None:
|
|
174
|
+
for arg in self.FIXED_ARGS:
|
|
175
|
+
if arg in kwargs:
|
|
176
|
+
raise ValueError(f"Passing {arg} to ApprovalOperator is not allowed.")
|
|
177
|
+
|
|
178
|
+
self.ignore_downstream_trigger_rules = ignore_downstream_trigger_rules
|
|
179
|
+
|
|
180
|
+
super().__init__(options=["Approve", "Reject"], multiple=False, **kwargs)
|
|
181
|
+
|
|
182
|
+
def execute_complete(self, context: Context, event: dict[str, Any]) -> Any:
|
|
183
|
+
ret = super().execute_complete(context=context, event=event)
|
|
184
|
+
|
|
185
|
+
chosen_option = ret["chosen_options"][0]
|
|
186
|
+
if chosen_option == "Approve":
|
|
187
|
+
self.log.info("Approved. Proceeding with downstream tasks...")
|
|
188
|
+
return ret
|
|
189
|
+
|
|
190
|
+
if not self.downstream_task_ids:
|
|
191
|
+
self.log.info("No downstream tasks; nothing to do.")
|
|
192
|
+
return ret
|
|
193
|
+
|
|
194
|
+
def get_tasks_to_skip():
|
|
195
|
+
if self.ignore_downstream_trigger_rules is True:
|
|
196
|
+
tasks = context["task"].get_flat_relatives(upstream=False)
|
|
197
|
+
else:
|
|
198
|
+
tasks = context["task"].get_direct_relatives(upstream=False)
|
|
199
|
+
|
|
200
|
+
yield from (t for t in tasks if not t.is_teardown)
|
|
201
|
+
|
|
202
|
+
tasks_to_skip = get_tasks_to_skip()
|
|
203
|
+
|
|
204
|
+
# this lets us avoid an intermediate list unless debug logging
|
|
205
|
+
if self.log.getEffectiveLevel() <= logging.DEBUG:
|
|
206
|
+
self.log.debug("Downstream task IDs %s", tasks_to_skip := list(get_tasks_to_skip()))
|
|
207
|
+
|
|
208
|
+
self.log.info("Skipping downstream tasks")
|
|
209
|
+
self.skip(ti=context["ti"], tasks=tasks_to_skip)
|
|
210
|
+
|
|
211
|
+
return ret
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
class HITLBranchOperator(HITLOperator):
|
|
215
|
+
"""BranchOperator based on Human-in-the-loop Response."""
|
|
216
|
+
|
|
217
|
+
def __init__(self, **kwargs) -> None:
|
|
218
|
+
super().__init__(**kwargs)
|
|
219
|
+
|
|
220
|
+
def execute_complete(self, context: Context, event: dict[str, Any]) -> None:
|
|
221
|
+
raise NotImplementedError
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
class HITLEntryOperator(HITLOperator):
|
|
225
|
+
"""Human-in-the-loop Operator that is used to accept user input through TriggerForm."""
|
|
226
|
+
|
|
227
|
+
def __init__(self, **kwargs) -> None:
|
|
228
|
+
if "options" not in kwargs:
|
|
229
|
+
kwargs["options"] = ["OK"]
|
|
230
|
+
|
|
231
|
+
if "defaults" not in kwargs:
|
|
232
|
+
kwargs["defaults"] = ["OK"]
|
|
233
|
+
|
|
234
|
+
super().__init__(**kwargs)
|
|
@@ -99,9 +99,19 @@ class LatestOnlyOperator(BaseBranchOperator):
|
|
|
99
99
|
|
|
100
100
|
from airflow.timetables.base import DataInterval, TimeRestriction
|
|
101
101
|
|
|
102
|
+
if dag_run.data_interval_start:
|
|
103
|
+
start = pendulum.instance(dag_run.data_interval_start)
|
|
104
|
+
else:
|
|
105
|
+
start = dagrun_date
|
|
106
|
+
|
|
107
|
+
if dag_run.data_interval_end:
|
|
108
|
+
end = pendulum.instance(dag_run.data_interval_end)
|
|
109
|
+
else:
|
|
110
|
+
end = dagrun_date
|
|
111
|
+
|
|
102
112
|
current_interval = DataInterval(
|
|
103
|
-
start=
|
|
104
|
-
end=
|
|
113
|
+
start=start,
|
|
114
|
+
end=end,
|
|
105
115
|
)
|
|
106
116
|
|
|
107
117
|
time_restriction = TimeRestriction(
|
|
@@ -62,7 +62,7 @@ if AIRFLOW_V_3_0_PLUS:
|
|
|
62
62
|
from airflow.providers.standard.operators.branch import BaseBranchOperator
|
|
63
63
|
from airflow.providers.standard.utils.skipmixin import SkipMixin
|
|
64
64
|
else:
|
|
65
|
-
from airflow.models.skipmixin import SkipMixin
|
|
65
|
+
from airflow.models.skipmixin import SkipMixin
|
|
66
66
|
from airflow.operators.branch import BaseBranchOperator # type: ignore[no-redef]
|
|
67
67
|
|
|
68
68
|
|
|
@@ -330,7 +330,7 @@ class ShortCircuitOperator(PythonOperator, SkipMixin):
|
|
|
330
330
|
self.skip(
|
|
331
331
|
dag_run=context["dag_run"],
|
|
332
332
|
tasks=to_skip,
|
|
333
|
-
execution_date=cast("DateTime", dag_run.logical_date), # type: ignore[call-arg
|
|
333
|
+
execution_date=cast("DateTime", dag_run.logical_date), # type: ignore[call-arg]
|
|
334
334
|
map_index=context["ti"].map_index,
|
|
335
335
|
)
|
|
336
336
|
|
|
@@ -861,6 +861,15 @@ class PythonVirtualenvOperator(_BasePythonVirtualenvOperator):
|
|
|
861
861
|
self.log.info("New Python virtual environment created in %s", venv_path)
|
|
862
862
|
return venv_path
|
|
863
863
|
|
|
864
|
+
def _cleanup_python_pycache_dir(self, cache_dir_path: Path) -> None:
|
|
865
|
+
try:
|
|
866
|
+
shutil.rmtree(cache_dir_path)
|
|
867
|
+
self.log.debug("The directory %s has been deleted.", cache_dir_path)
|
|
868
|
+
except FileNotFoundError:
|
|
869
|
+
self.log.warning("Fail to delete %s. The directory does not exist.", cache_dir_path)
|
|
870
|
+
except PermissionError:
|
|
871
|
+
self.log.warning("Permission denied to delete the directory %s.", cache_dir_path)
|
|
872
|
+
|
|
864
873
|
def _retrieve_index_urls_from_connection_ids(self):
|
|
865
874
|
"""Retrieve index URLs from Package Index connections."""
|
|
866
875
|
if self.index_urls is None:
|
|
@@ -880,9 +889,13 @@ class PythonVirtualenvOperator(_BasePythonVirtualenvOperator):
|
|
|
880
889
|
|
|
881
890
|
with TemporaryDirectory(prefix="venv") as tmp_dir:
|
|
882
891
|
tmp_path = Path(tmp_dir)
|
|
892
|
+
tmp_dir, temp_venv_dir = tmp_path.relative_to(tmp_path.anchor).parts
|
|
893
|
+
custom_pycache_prefix = Path(sys.pycache_prefix or "")
|
|
894
|
+
venv_python_cache_dir = Path.cwd() / custom_pycache_prefix / tmp_dir / temp_venv_dir
|
|
883
895
|
self._prepare_venv(tmp_path)
|
|
884
896
|
python_path = tmp_path / "bin" / "python"
|
|
885
897
|
result = self._execute_python_callable_in_subprocess(python_path)
|
|
898
|
+
self._cleanup_python_pycache_dir(venv_python_cache_dir)
|
|
886
899
|
return result
|
|
887
900
|
|
|
888
901
|
def _iter_serializable_context_keys(self):
|
|
@@ -99,8 +99,11 @@ class DateTimeSensor(BaseSensorOperator):
|
|
|
99
99
|
|
|
100
100
|
@property
|
|
101
101
|
def _moment(self) -> datetime.datetime:
|
|
102
|
-
if
|
|
103
|
-
|
|
102
|
+
# Note following is reachable code if Jinja is used for redering template fields and
|
|
103
|
+
# render_template_as_native_obj=True is used.
|
|
104
|
+
# In this case, the target_time is already a datetime object.
|
|
105
|
+
if isinstance(self.target_time, datetime.datetime): # type:ignore[unreachable]
|
|
106
|
+
return self.target_time # type:ignore[unreachable]
|
|
104
107
|
|
|
105
108
|
return timezone.parse(self.target_time)
|
|
106
109
|
|
|
@@ -59,7 +59,7 @@ if TYPE_CHECKING:
|
|
|
59
59
|
if AIRFLOW_V_3_0_PLUS:
|
|
60
60
|
from airflow.sdk.definitions.context import Context
|
|
61
61
|
else:
|
|
62
|
-
from airflow.utils.context import Context
|
|
62
|
+
from airflow.utils.context import Context
|
|
63
63
|
|
|
64
64
|
|
|
65
65
|
class ExternalDagLink(BaseOperatorLink):
|
|
@@ -252,20 +252,15 @@ class ExternalTaskSensor(BaseSensorOperator):
|
|
|
252
252
|
self.poll_interval = poll_interval
|
|
253
253
|
|
|
254
254
|
def _get_dttm_filter(self, context):
|
|
255
|
-
logical_date =
|
|
256
|
-
if AIRFLOW_V_3_0_PLUS:
|
|
257
|
-
if logical_date is None:
|
|
258
|
-
dag_run = context.get("dag_run")
|
|
259
|
-
if TYPE_CHECKING:
|
|
260
|
-
assert dag_run
|
|
255
|
+
logical_date = self._get_logical_date(context)
|
|
261
256
|
|
|
262
|
-
logical_date = dag_run.run_after
|
|
263
257
|
if self.execution_delta:
|
|
264
258
|
dttm = logical_date - self.execution_delta
|
|
265
259
|
elif self.execution_date_fn:
|
|
266
260
|
dttm = self._handle_execution_date_fn(context=context)
|
|
267
261
|
else:
|
|
268
262
|
dttm = logical_date
|
|
263
|
+
|
|
269
264
|
return dttm if isinstance(dttm, list) else [dttm]
|
|
270
265
|
|
|
271
266
|
def poke(self, context: Context) -> bool:
|
|
@@ -522,6 +517,28 @@ class ExternalTaskSensor(BaseSensorOperator):
|
|
|
522
517
|
dttm_filter, self.external_task_group_id, self.external_dag_id, session
|
|
523
518
|
)
|
|
524
519
|
|
|
520
|
+
def _get_logical_date(self, context) -> datetime.datetime:
|
|
521
|
+
"""
|
|
522
|
+
Handle backwards- and forwards-compatible retrieval of the date.
|
|
523
|
+
|
|
524
|
+
to pass as the positional argument to execution_date_fn.
|
|
525
|
+
"""
|
|
526
|
+
# Airflow 3.x: contexts define "logical_date" (or fall back to dag_run.run_after).
|
|
527
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
528
|
+
logical_date = context.get("logical_date")
|
|
529
|
+
dag_run = context.get("dag_run")
|
|
530
|
+
if not (logical_date or (dag_run and dag_run.run_after)):
|
|
531
|
+
raise ValueError(
|
|
532
|
+
"Either `logical_date` or `dag_run.run_after` must be provided in the context"
|
|
533
|
+
)
|
|
534
|
+
return logical_date or dag_run.run_after
|
|
535
|
+
|
|
536
|
+
# Airflow 2.x and earlier: contexts used "execution_date"
|
|
537
|
+
execution_date = context.get("execution_date")
|
|
538
|
+
if not execution_date:
|
|
539
|
+
raise ValueError("Either `execution_date` must be provided in the context`")
|
|
540
|
+
return execution_date
|
|
541
|
+
|
|
525
542
|
def _handle_execution_date_fn(self, context) -> Any:
|
|
526
543
|
"""
|
|
527
544
|
Handle backward compatibility.
|
|
@@ -534,7 +551,7 @@ class ExternalTaskSensor(BaseSensorOperator):
|
|
|
534
551
|
from airflow.utils.operator_helpers import make_kwargs_callable
|
|
535
552
|
|
|
536
553
|
# Remove "logical_date" because it is already a mandatory positional argument
|
|
537
|
-
logical_date = context
|
|
554
|
+
logical_date = self._get_logical_date(context)
|
|
538
555
|
kwargs = {k: v for k, v in context.items() if k not in {"execution_date", "logical_date"}}
|
|
539
556
|
# Add "context" in the kwargs for backward compatibility (because context used to be
|
|
540
557
|
# an acceptable argument of execution_date_fn)
|
|
@@ -123,7 +123,7 @@ class WorkflowTrigger(BaseTrigger):
|
|
|
123
123
|
return
|
|
124
124
|
allowed_count = await get_count_func(self.allowed_states)
|
|
125
125
|
|
|
126
|
-
if allowed_count == len(run_id_or_dates):
|
|
126
|
+
if allowed_count == len(run_id_or_dates):
|
|
127
127
|
yield TriggerEvent({"status": "success"})
|
|
128
128
|
return
|
|
129
129
|
self.log.info("Sleeping for %s seconds", self.poke_interval)
|
|
@@ -140,7 +140,7 @@ class WorkflowTrigger(BaseTrigger):
|
|
|
140
140
|
}
|
|
141
141
|
if self.external_task_ids:
|
|
142
142
|
count = await sync_to_async(RuntimeTaskInstance.get_ti_count)(
|
|
143
|
-
task_ids=self.external_task_ids,
|
|
143
|
+
task_ids=self.external_task_ids,
|
|
144
144
|
states=states,
|
|
145
145
|
**params,
|
|
146
146
|
)
|
|
@@ -233,7 +233,7 @@ class DagStateTrigger(BaseTrigger):
|
|
|
233
233
|
return
|
|
234
234
|
else:
|
|
235
235
|
while True:
|
|
236
|
-
num_dags = await self.count_dags()
|
|
236
|
+
num_dags = await self.count_dags()
|
|
237
237
|
if num_dags == runs_ids_or_dates:
|
|
238
238
|
yield TriggerEvent(self.serialize())
|
|
239
239
|
return
|
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
from airflow.exceptions import AirflowOptionalProviderFeatureException
|
|
20
|
+
from airflow.providers.standard.version_compat import AIRFLOW_V_3_1_PLUS
|
|
21
|
+
|
|
22
|
+
if not AIRFLOW_V_3_1_PLUS:
|
|
23
|
+
raise AirflowOptionalProviderFeatureException("Human in the loop functionality needs Airflow 3.1+.")
|
|
24
|
+
|
|
25
|
+
import asyncio
|
|
26
|
+
from collections.abc import AsyncIterator
|
|
27
|
+
from datetime import datetime
|
|
28
|
+
from typing import Any, Literal, TypedDict
|
|
29
|
+
from uuid import UUID
|
|
30
|
+
|
|
31
|
+
from asgiref.sync import sync_to_async
|
|
32
|
+
|
|
33
|
+
from airflow.sdk.execution_time.hitl import (
|
|
34
|
+
get_hitl_detail_content_detail,
|
|
35
|
+
update_htil_detail_response,
|
|
36
|
+
)
|
|
37
|
+
from airflow.triggers.base import BaseTrigger, TriggerEvent
|
|
38
|
+
from airflow.utils import timezone
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class HITLTriggerEventSuccessPayload(TypedDict, total=False):
|
|
42
|
+
"""Minimum required keys for a success Human-in-the-loop TriggerEvent."""
|
|
43
|
+
|
|
44
|
+
chosen_options: list[str]
|
|
45
|
+
params_input: dict[str, Any]
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class HITLTriggerEventFailurePayload(TypedDict):
|
|
49
|
+
"""Minimum required keys for a failed Human-in-the-loop TriggerEvent."""
|
|
50
|
+
|
|
51
|
+
error: str
|
|
52
|
+
error_type: Literal["timeout", "unknown"]
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class HITLTrigger(BaseTrigger):
|
|
56
|
+
"""A trigger that checks whether Human-in-the-loop responses are received."""
|
|
57
|
+
|
|
58
|
+
def __init__(
|
|
59
|
+
self,
|
|
60
|
+
*,
|
|
61
|
+
ti_id: UUID,
|
|
62
|
+
options: list[str],
|
|
63
|
+
params: dict[str, Any],
|
|
64
|
+
defaults: list[str] | None = None,
|
|
65
|
+
multiple: bool = False,
|
|
66
|
+
timeout_datetime: datetime | None,
|
|
67
|
+
poke_interval: float = 5.0,
|
|
68
|
+
**kwargs,
|
|
69
|
+
):
|
|
70
|
+
super().__init__(**kwargs)
|
|
71
|
+
self.ti_id = ti_id
|
|
72
|
+
self.poke_interval = poke_interval
|
|
73
|
+
|
|
74
|
+
self.options = options
|
|
75
|
+
self.multiple = multiple
|
|
76
|
+
self.defaults = defaults
|
|
77
|
+
self.timeout_datetime = timeout_datetime
|
|
78
|
+
|
|
79
|
+
self.params = params
|
|
80
|
+
|
|
81
|
+
def serialize(self) -> tuple[str, dict[str, Any]]:
|
|
82
|
+
"""Serialize HITLTrigger arguments and classpath."""
|
|
83
|
+
return (
|
|
84
|
+
"airflow.providers.standard.triggers.hitl.HITLTrigger",
|
|
85
|
+
{
|
|
86
|
+
"ti_id": self.ti_id,
|
|
87
|
+
"options": self.options,
|
|
88
|
+
"defaults": self.defaults,
|
|
89
|
+
"params": self.params,
|
|
90
|
+
"multiple": self.multiple,
|
|
91
|
+
"timeout_datetime": self.timeout_datetime,
|
|
92
|
+
"poke_interval": self.poke_interval,
|
|
93
|
+
},
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
async def run(self) -> AsyncIterator[TriggerEvent]:
|
|
97
|
+
"""Loop until the Human-in-the-loop response received or timeout reached."""
|
|
98
|
+
while True:
|
|
99
|
+
if self.timeout_datetime and self.timeout_datetime < timezone.utcnow():
|
|
100
|
+
if self.defaults is None:
|
|
101
|
+
yield TriggerEvent(
|
|
102
|
+
HITLTriggerEventFailurePayload(
|
|
103
|
+
error="The timeout has passed, and the response has not yet been received.",
|
|
104
|
+
error_type="timeout",
|
|
105
|
+
)
|
|
106
|
+
)
|
|
107
|
+
return
|
|
108
|
+
|
|
109
|
+
await sync_to_async(update_htil_detail_response)(
|
|
110
|
+
ti_id=self.ti_id,
|
|
111
|
+
chosen_options=self.defaults,
|
|
112
|
+
params_input=self.params,
|
|
113
|
+
)
|
|
114
|
+
yield TriggerEvent(
|
|
115
|
+
HITLTriggerEventSuccessPayload(
|
|
116
|
+
chosen_options=self.defaults,
|
|
117
|
+
params_input=self.params,
|
|
118
|
+
)
|
|
119
|
+
)
|
|
120
|
+
return
|
|
121
|
+
|
|
122
|
+
resp = await sync_to_async(get_hitl_detail_content_detail)(ti_id=self.ti_id)
|
|
123
|
+
if resp.response_received and resp.chosen_options:
|
|
124
|
+
self.log.info("Responded by %s at %s", resp.user_id, resp.response_at)
|
|
125
|
+
yield TriggerEvent(
|
|
126
|
+
HITLTriggerEventSuccessPayload(
|
|
127
|
+
chosen_options=resp.chosen_options,
|
|
128
|
+
params_input=resp.params_input,
|
|
129
|
+
)
|
|
130
|
+
)
|
|
131
|
+
return
|
|
132
|
+
await asyncio.sleep(self.poke_interval)
|
|
@@ -21,7 +21,6 @@ from __future__ import annotations
|
|
|
21
21
|
|
|
22
22
|
import os
|
|
23
23
|
import shutil
|
|
24
|
-
import sys
|
|
25
24
|
from pathlib import Path
|
|
26
25
|
|
|
27
26
|
import jinja2
|
|
@@ -56,9 +55,7 @@ def _use_uv() -> bool:
|
|
|
56
55
|
|
|
57
56
|
def _generate_uv_cmd(tmp_dir: str, python_bin: str, system_site_packages: bool) -> list[str]:
|
|
58
57
|
"""Build the command to install the venv via UV."""
|
|
59
|
-
cmd = ["uv", "venv", "--allow-existing", "--seed"]
|
|
60
|
-
if python_bin is not None:
|
|
61
|
-
cmd += ["--python", python_bin]
|
|
58
|
+
cmd = ["uv", "venv", "--allow-existing", "--seed", "--python", python_bin]
|
|
62
59
|
if system_site_packages:
|
|
63
60
|
cmd.append("--system-site-packages")
|
|
64
61
|
cmd.append(tmp_dir)
|
|
@@ -67,8 +64,6 @@ def _generate_uv_cmd(tmp_dir: str, python_bin: str, system_site_packages: bool)
|
|
|
67
64
|
|
|
68
65
|
def _generate_venv_cmd(tmp_dir: str, python_bin: str, system_site_packages: bool) -> list[str]:
|
|
69
66
|
"""We are using venv command instead of venv module to allow creation of venv for different python versions."""
|
|
70
|
-
if python_bin is None:
|
|
71
|
-
python_bin = sys.executable
|
|
72
67
|
cmd = [python_bin, "-m", "venv", tmp_dir]
|
|
73
68
|
if system_site_packages:
|
|
74
69
|
cmd.append("--system-site-packages")
|
|
@@ -27,12 +27,7 @@ from airflow.utils.log.logging_mixin import LoggingMixin
|
|
|
27
27
|
|
|
28
28
|
if TYPE_CHECKING:
|
|
29
29
|
from airflow.sdk.definitions._internal.node import DAGNode
|
|
30
|
-
from airflow.sdk.types import RuntimeTaskInstanceProtocol
|
|
31
|
-
|
|
32
|
-
if AIRFLOW_V_3_1_PLUS:
|
|
33
|
-
from airflow.sdk.types import Operator
|
|
34
|
-
else:
|
|
35
|
-
from airflow.models.operator import Operator
|
|
30
|
+
from airflow.sdk.types import Operator, RuntimeTaskInstanceProtocol
|
|
36
31
|
|
|
37
32
|
# The key used by SkipMixin to store XCom data.
|
|
38
33
|
XCOM_SKIPMIXIN_KEY = "skipmixin_key"
|
|
@@ -50,7 +45,7 @@ def _ensure_tasks(nodes: Iterable[DAGNode]) -> Sequence[Operator]:
|
|
|
50
45
|
from airflow.sdk.definitions.mappedoperator import MappedOperator
|
|
51
46
|
else:
|
|
52
47
|
from airflow.models.baseoperator import BaseOperator # type: ignore[no-redef]
|
|
53
|
-
from airflow.models.mappedoperator import MappedOperator
|
|
48
|
+
from airflow.models.mappedoperator import MappedOperator
|
|
54
49
|
|
|
55
50
|
return [n for n in nodes if isinstance(n, (BaseOperator, MappedOperator))]
|
|
56
51
|
|
|
@@ -39,8 +39,9 @@ AIRFLOW_V_3_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 0)
|
|
|
39
39
|
# DecoratedOperator -- where `DecoratedOperator._handle_output` needed `xcom_push` to exist on `BaseOperator`
|
|
40
40
|
# even though it wasn't used.
|
|
41
41
|
if AIRFLOW_V_3_1_PLUS:
|
|
42
|
-
from airflow.sdk import BaseOperator
|
|
42
|
+
from airflow.sdk import BaseHook, BaseOperator
|
|
43
43
|
else:
|
|
44
|
+
from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
|
|
44
45
|
from airflow.models.baseoperator import BaseOperator # type: ignore[no-redef]
|
|
45
46
|
|
|
46
47
|
if AIRFLOW_V_3_0_PLUS:
|
|
@@ -55,6 +56,7 @@ __all__ = [
|
|
|
55
56
|
"AIRFLOW_V_3_1_PLUS",
|
|
56
57
|
"BaseOperator",
|
|
57
58
|
"BaseOperatorLink",
|
|
59
|
+
"BaseHook",
|
|
58
60
|
"BaseSensorOperator",
|
|
59
61
|
"PokeReturnValue",
|
|
60
62
|
]
|
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-standard
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.5.0
|
|
4
4
|
Summary: Provider package apache-airflow-providers-standard for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,standard,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
7
7
|
Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
8
|
-
Requires-Python:
|
|
8
|
+
Requires-Python: >=3.10
|
|
9
9
|
Description-Content-Type: text/x-rst
|
|
10
10
|
Classifier: Development Status :: 5 - Production/Stable
|
|
11
11
|
Classifier: Environment :: Console
|
|
@@ -18,11 +18,12 @@ Classifier: License :: OSI Approved :: Apache Software License
|
|
|
18
18
|
Classifier: Programming Language :: Python :: 3.10
|
|
19
19
|
Classifier: Programming Language :: Python :: 3.11
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.12
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
21
22
|
Classifier: Topic :: System :: Monitoring
|
|
22
|
-
Requires-Dist: apache-airflow>=2.10.
|
|
23
|
+
Requires-Dist: apache-airflow>=2.10.0
|
|
23
24
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
24
|
-
Project-URL: Changelog, https://airflow.
|
|
25
|
-
Project-URL: Documentation, https://airflow.
|
|
25
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/1.5.0/changelog.html
|
|
26
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/1.5.0
|
|
26
27
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
27
28
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
28
29
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
@@ -53,8 +54,9 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
|
53
54
|
|
|
54
55
|
Package ``apache-airflow-providers-standard``
|
|
55
56
|
|
|
56
|
-
Release: ``1.
|
|
57
|
+
Release: ``1.5.0``
|
|
57
58
|
|
|
59
|
+
Release Date: ``|PypiReleaseDate|``
|
|
58
60
|
|
|
59
61
|
Airflow Standard Provider
|
|
60
62
|
|
|
@@ -66,7 +68,7 @@ This is a provider package for ``standard`` provider. All classes for this provi
|
|
|
66
68
|
are in ``airflow.providers.standard`` python package.
|
|
67
69
|
|
|
68
70
|
You can find package information and changelog for the provider
|
|
69
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.
|
|
71
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.5.0/>`_.
|
|
70
72
|
|
|
71
73
|
Installation
|
|
72
74
|
------------
|
|
@@ -75,7 +77,7 @@ You can install this package on top of an existing Airflow 2 installation (see `
|
|
|
75
77
|
for the minimum Airflow version supported) via
|
|
76
78
|
``pip install apache-airflow-providers-standard``
|
|
77
79
|
|
|
78
|
-
The package supports the following python versions: 3.10,3.11,3.12
|
|
80
|
+
The package supports the following python versions: 3.10,3.11,3.12,3.13
|
|
79
81
|
|
|
80
82
|
Requirements
|
|
81
83
|
------------
|
|
@@ -87,5 +89,5 @@ PIP package Version required
|
|
|
87
89
|
================== ==================
|
|
88
90
|
|
|
89
91
|
The changelog for the provider package can be found in the
|
|
90
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.
|
|
92
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.5.0/changelog.html>`_.
|
|
91
93
|
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
airflow/providers/standard/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
|
2
|
-
airflow/providers/standard/__init__.py,sha256=
|
|
3
|
-
airflow/providers/standard/exceptions.py,sha256=
|
|
4
|
-
airflow/providers/standard/get_provider_info.py,sha256=
|
|
5
|
-
airflow/providers/standard/version_compat.py,sha256=
|
|
2
|
+
airflow/providers/standard/__init__.py,sha256=aB_JSWWForH2rLR28WX3MxK740faq6K9vg564SXWBVQ,1497
|
|
3
|
+
airflow/providers/standard/exceptions.py,sha256=8CTMCs1xVk_06piBoyP3pKX6j29riukL8V2V7miPgEU,2269
|
|
4
|
+
airflow/providers/standard/get_provider_info.py,sha256=jhENLvqCXj0mzBPmJeAvPj7XWaaNuxPLhHVVA-9rqzs,7132
|
|
5
|
+
airflow/providers/standard/version_compat.py,sha256=03HDmI6x17lcPHboL-OhjQ3TrIKuLS2JUu3iwYGh-iM,2612
|
|
6
6
|
airflow/providers/standard/decorators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
7
7
|
airflow/providers/standard/decorators/bash.py,sha256=vYIu3MIGBh-qm85R3TrUHzmOAQYU1q0kyszZoWYXdGM,4396
|
|
8
8
|
airflow/providers/standard/decorators/branch_external_python.py,sha256=G1l3_sYM38wEWMRnzRuVGmAbd4uMN5D07zm3EM3-ZWo,2584
|
|
@@ -22,7 +22,8 @@ airflow/providers/standard/example_dags/example_branch_operator.py,sha256=c4dr2d
|
|
|
22
22
|
airflow/providers/standard/example_dags/example_branch_operator_decorator.py,sha256=mF_87Kqxhui6sCsWvBXJ6m_w9bUoeqrA_rUVJuyIeq4,4801
|
|
23
23
|
airflow/providers/standard/example_dags/example_external_task_child_deferrable.py,sha256=o-ji3leJTBjiChEWoqVu4ykz1YVYUd8-ApmZwHFcNc8,1233
|
|
24
24
|
airflow/providers/standard/example_dags/example_external_task_marker_dag.py,sha256=gssBjlfrGMDLZxTYOxo8ihXLbJ-3Uu31QodINGFWYNU,3650
|
|
25
|
-
airflow/providers/standard/example_dags/example_external_task_parent_deferrable.py,sha256=
|
|
25
|
+
airflow/providers/standard/example_dags/example_external_task_parent_deferrable.py,sha256=gPJ3VFX4b1drwW_-C4q-3cWFTDM2lupeXrwR6n2lsJE,2070
|
|
26
|
+
airflow/providers/standard/example_dags/example_hitl_operator.py,sha256=Z1CIrCzL7jaRG9N-nl3asH0_Gt-yMRU2LJlrknst28c,2587
|
|
26
27
|
airflow/providers/standard/example_dags/example_latest_only.py,sha256=ac9WpLMWLzyuxZks74t3HojS7vRG2gynmQfGm13gwOI,1456
|
|
27
28
|
airflow/providers/standard/example_dags/example_python_decorator.py,sha256=jveqPOw1GZzD3Z37_rYc8Q8hcyx8vCNjgetpO_P6qmg,4281
|
|
28
29
|
airflow/providers/standard/example_dags/example_python_operator.py,sha256=3L6CZHK2Fb7zmA9tDhZ5QaEe38WJYlS4l35Gc7xJAoE,4761
|
|
@@ -34,39 +35,42 @@ airflow/providers/standard/example_dags/example_trigger_controller_dag.py,sha256
|
|
|
34
35
|
airflow/providers/standard/example_dags/sql/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
35
36
|
airflow/providers/standard/example_dags/sql/sample.sql,sha256=OVk1qozBY58lp_tFtnyQiLSbKRdqKn4zbxJHH_Umdek,866
|
|
36
37
|
airflow/providers/standard/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
37
|
-
airflow/providers/standard/hooks/filesystem.py,sha256=
|
|
38
|
-
airflow/providers/standard/hooks/package_index.py,sha256=
|
|
39
|
-
airflow/providers/standard/hooks/subprocess.py,sha256=
|
|
38
|
+
airflow/providers/standard/hooks/filesystem.py,sha256=rAPX1EerCZxbCUITlF5I_oZzUw99_wVsiHr35Sf3xx8,2893
|
|
39
|
+
airflow/providers/standard/hooks/package_index.py,sha256=tgKNV3P2TRrhnJyt-IJ43Q4Z_SxjazuNLyb-k6DUTwM,3792
|
|
40
|
+
airflow/providers/standard/hooks/subprocess.py,sha256=Dte_ysI0kAx5wPUT96AkIFyEgSaUjg6zeUU7eGX0Zsw,4943
|
|
41
|
+
airflow/providers/standard/models/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
40
42
|
airflow/providers/standard/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
41
43
|
airflow/providers/standard/operators/bash.py,sha256=yzNbi20dp6PWJzXZqJ05gyMsdb6-UsNuNrPGp2hiu1E,11366
|
|
42
|
-
airflow/providers/standard/operators/branch.py,sha256=
|
|
44
|
+
airflow/providers/standard/operators/branch.py,sha256=No3JoaqPjEAu7QlVf-20nzli0yYKbYorjcx885i5eeg,3970
|
|
43
45
|
airflow/providers/standard/operators/datetime.py,sha256=bYDdbfAyAlEXRRHjOgB06UhgDum6SPdd5I3u-ylPSaw,5005
|
|
44
46
|
airflow/providers/standard/operators/empty.py,sha256=BTeZ4KRykaEHLZigSBkevcStCrbPdQpWDMnO3ZdtZqw,1338
|
|
45
|
-
airflow/providers/standard/operators/
|
|
46
|
-
airflow/providers/standard/operators/
|
|
47
|
+
airflow/providers/standard/operators/hitl.py,sha256=clGMwgBTIXQJqGnBe6K5QRafSKQLHgDavZgt15Kg1fg,9232
|
|
48
|
+
airflow/providers/standard/operators/latest_only.py,sha256=VkU-nAI8QbIrmeiv4wYXBcZF0yKMkcFapormg0J5-As,5110
|
|
49
|
+
airflow/providers/standard/operators/python.py,sha256=XPrjFzIY_BrtDvG1-KPrWo_4RmALSmage0p_Cygsks4,53788
|
|
47
50
|
airflow/providers/standard/operators/smooth.py,sha256=IMs5GjM42XEiroksIZ5flGQgxfRUbXZXCWxpshVinYQ,1396
|
|
48
|
-
airflow/providers/standard/operators/trigger_dagrun.py,sha256=
|
|
51
|
+
airflow/providers/standard/operators/trigger_dagrun.py,sha256=AgJaiB4u-X-HDvdYLdseFQO_zBYb6_UijV-qmDqwqo0,16576
|
|
49
52
|
airflow/providers/standard/operators/weekday.py,sha256=Qg7LhXYtybVSGZn8uQqF-r7RB7zOXfe3R6vSGVa_rJk,5083
|
|
50
53
|
airflow/providers/standard/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
51
54
|
airflow/providers/standard/sensors/bash.py,sha256=jiysK84IwnVpQj1_lE65E_pSPE0FO82GGtqXf_a8STA,4984
|
|
52
|
-
airflow/providers/standard/sensors/date_time.py,sha256=
|
|
53
|
-
airflow/providers/standard/sensors/external_task.py,sha256=
|
|
55
|
+
airflow/providers/standard/sensors/date_time.py,sha256=W4lN-EXCIiJkPf6FKvJ4yx7X9vSCfKT7YjVbnjtmkrM,6481
|
|
56
|
+
airflow/providers/standard/sensors/external_task.py,sha256=lDEg2Zbwp79f6VV6uH3PXI-NiHbL4IMAO4z-1VDl4gA,28695
|
|
54
57
|
airflow/providers/standard/sensors/filesystem.py,sha256=jDgxZQ4WXRv1PSjc2o4K0Iq_AxnaPw7yIUnafK_VpaM,6050
|
|
55
58
|
airflow/providers/standard/sensors/python.py,sha256=rpJa61FUjQRJwQwLdHpprnLdVW78rB4svhbw1U-LiWw,3408
|
|
56
59
|
airflow/providers/standard/sensors/time.py,sha256=kam9KWlPahuqFo3u7CAGnQk_DBYSKawBr5MDPCGwb6g,5091
|
|
57
60
|
airflow/providers/standard/sensors/time_delta.py,sha256=ggDSna-m_scLFks9zx1LoC64jQBjw7ZQqH7n96UU2BQ,7579
|
|
58
61
|
airflow/providers/standard/sensors/weekday.py,sha256=sKDQ7xC9c32DZxaGNIjqmW6HXE4hIvKC71Kt-_d9SG8,4470
|
|
59
62
|
airflow/providers/standard/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
60
|
-
airflow/providers/standard/triggers/external_task.py,sha256=
|
|
63
|
+
airflow/providers/standard/triggers/external_task.py,sha256=R2Wsd21pw9_gGTs9XuHafylt65hMVPisz2g6vnpLJ4o,11521
|
|
61
64
|
airflow/providers/standard/triggers/file.py,sha256=2i8-RwSjEgdOwQNcHCqLmSdpE3Ehqg4GQJ8nE3-fHxo,4886
|
|
65
|
+
airflow/providers/standard/triggers/hitl.py,sha256=ndBQdeiVqaehmChyryr1NX6JXBdGzx8D8l4TSIODLT8,4805
|
|
62
66
|
airflow/providers/standard/triggers/temporal.py,sha256=AlSdf3iNUMrdQmMlKHi0Ms-D_OU7hIkLFsqj23mpR3Q,4446
|
|
63
67
|
airflow/providers/standard/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
64
|
-
airflow/providers/standard/utils/python_virtualenv.py,sha256=
|
|
68
|
+
airflow/providers/standard/utils/python_virtualenv.py,sha256=R42AUzD6WK9TV36gRir39jRvTq8zYR5jScFLVWtUVeM,8038
|
|
65
69
|
airflow/providers/standard/utils/python_virtualenv_script.jinja2,sha256=3Z334hVq6hQ9EHkOoGnAHc2_XNkZQkOJGxZArDKLc-c,2770
|
|
66
70
|
airflow/providers/standard/utils/sensor_helper.py,sha256=PNIETsl_a4BkmOypFfHdpP0VuTkC6eWKUDuwnNVaWsA,5000
|
|
67
|
-
airflow/providers/standard/utils/skipmixin.py,sha256=
|
|
71
|
+
airflow/providers/standard/utils/skipmixin.py,sha256=PMrP2vtr5Sn6eCVslAqmEpY6Rgo6ZyfR73LPXS5NGVA,8015
|
|
68
72
|
airflow/providers/standard/utils/weekday.py,sha256=ySDrIkWv-lqqxURo9E98IGInDqERec2O4y9o2hQTGiQ,2685
|
|
69
|
-
apache_airflow_providers_standard-1.
|
|
70
|
-
apache_airflow_providers_standard-1.
|
|
71
|
-
apache_airflow_providers_standard-1.
|
|
72
|
-
apache_airflow_providers_standard-1.
|
|
73
|
+
apache_airflow_providers_standard-1.5.0.dist-info/entry_points.txt,sha256=mW2YRh3mVdZdaP5-iGSNgmcCh3YYdALIn28BCLBZZ40,104
|
|
74
|
+
apache_airflow_providers_standard-1.5.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
|
75
|
+
apache_airflow_providers_standard-1.5.0.dist-info/METADATA,sha256=9oCC36JIHLiFGT9dAaKpZo5eKV9um6KjPPKO6i0IBV0,3827
|
|
76
|
+
apache_airflow_providers_standard-1.5.0.dist-info/RECORD,,
|
|
File without changes
|