apache-airflow-providers-standard 1.2.0__py3-none-any.whl → 1.3.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. airflow/providers/standard/__init__.py +1 -1
  2. airflow/providers/standard/example_dags/__init__.py +16 -0
  3. airflow/providers/standard/example_dags/example_bash_decorator.py +114 -0
  4. airflow/providers/standard/example_dags/example_bash_operator.py +74 -0
  5. airflow/providers/standard/example_dags/example_branch_datetime_operator.py +105 -0
  6. airflow/providers/standard/example_dags/example_branch_day_of_week_operator.py +61 -0
  7. airflow/providers/standard/example_dags/example_branch_operator.py +166 -0
  8. airflow/providers/standard/example_dags/example_branch_operator_decorator.py +142 -0
  9. airflow/providers/standard/example_dags/example_external_task_child_deferrable.py +34 -0
  10. airflow/providers/standard/example_dags/example_external_task_marker_dag.py +98 -0
  11. airflow/providers/standard/example_dags/example_external_task_parent_deferrable.py +64 -0
  12. airflow/providers/standard/example_dags/example_latest_only.py +40 -0
  13. airflow/providers/standard/example_dags/example_python_decorator.py +132 -0
  14. airflow/providers/standard/example_dags/example_python_operator.py +147 -0
  15. airflow/providers/standard/example_dags/example_sensor_decorator.py +66 -0
  16. airflow/providers/standard/example_dags/example_sensors.py +135 -0
  17. airflow/providers/standard/example_dags/example_short_circuit_decorator.py +60 -0
  18. airflow/providers/standard/example_dags/example_short_circuit_operator.py +66 -0
  19. airflow/providers/standard/example_dags/example_trigger_controller_dag.py +46 -0
  20. airflow/providers/standard/example_dags/sql/__init__.py +16 -0
  21. airflow/providers/standard/example_dags/sql/sample.sql +24 -0
  22. airflow/providers/standard/operators/python.py +15 -9
  23. airflow/providers/standard/sensors/date_time.py +10 -4
  24. airflow/providers/standard/sensors/external_task.py +7 -6
  25. airflow/providers/standard/sensors/time.py +52 -40
  26. airflow/providers/standard/sensors/time_delta.py +47 -20
  27. {apache_airflow_providers_standard-1.2.0.dist-info → apache_airflow_providers_standard-1.3.0rc1.dist-info}/METADATA +7 -7
  28. {apache_airflow_providers_standard-1.2.0.dist-info → apache_airflow_providers_standard-1.3.0rc1.dist-info}/RECORD +30 -10
  29. {apache_airflow_providers_standard-1.2.0.dist-info → apache_airflow_providers_standard-1.3.0rc1.dist-info}/WHEEL +0 -0
  30. {apache_airflow_providers_standard-1.2.0.dist-info → apache_airflow_providers_standard-1.3.0rc1.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,34 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ from __future__ import annotations
18
+
19
+ from datetime import datetime
20
+
21
+ from airflow import DAG
22
+ from airflow.providers.standard.operators.bash import BashOperator
23
+
24
+ with DAG(
25
+ dag_id="child_dag",
26
+ start_date=datetime(2022, 1, 1),
27
+ schedule="@once",
28
+ catchup=False,
29
+ tags=["example", "async", "core"],
30
+ ) as dag:
31
+ BashOperator(
32
+ task_id="child_task",
33
+ bash_command="echo 1; sleep 1; echo 2; sleep 2; echo 3; sleep 3",
34
+ )
@@ -0,0 +1,98 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ """
19
+ Example DAG demonstrating setting up inter-DAG dependencies using ExternalTaskSensor and
20
+ ExternalTaskMarker.
21
+
22
+ In this example, child_task1 in example_external_task_marker_child depends on parent_task in
23
+ example_external_task_marker_parent. When parent_task is cleared with 'Recursive' selected,
24
+ the presence of ExternalTaskMarker tells Airflow to clear child_task1 and its downstream tasks.
25
+
26
+ ExternalTaskSensor will keep poking for the status of remote ExternalTaskMarker task at a regular
27
+ interval till one of the following will happen:
28
+
29
+ ExternalTaskMarker reaches the states mentioned in the allowed_states list.
30
+ In this case, ExternalTaskSensor will exit with a success status code
31
+
32
+ ExternalTaskMarker reaches the states mentioned in the failed_states list
33
+ In this case, ExternalTaskSensor will raise an AirflowException and user need to handle this
34
+ with multiple downstream tasks
35
+
36
+ ExternalTaskSensor times out. In this case, ExternalTaskSensor will raise AirflowSkipException
37
+ or AirflowSensorTimeout exception
38
+
39
+ """
40
+
41
+ from __future__ import annotations
42
+
43
+ import pendulum
44
+
45
+ from airflow.providers.standard.operators.empty import EmptyOperator
46
+ from airflow.providers.standard.sensors.external_task import ExternalTaskMarker, ExternalTaskSensor
47
+ from airflow.sdk import DAG
48
+
49
+ start_date = pendulum.datetime(2021, 1, 1, tz="UTC")
50
+
51
+ with DAG(
52
+ dag_id="example_external_task_marker_parent",
53
+ start_date=start_date,
54
+ catchup=False,
55
+ schedule=None,
56
+ tags=["example2"],
57
+ ) as parent_dag:
58
+ # [START howto_operator_external_task_marker]
59
+ parent_task = ExternalTaskMarker(
60
+ task_id="parent_task",
61
+ external_dag_id="example_external_task_marker_child",
62
+ external_task_id="child_task1",
63
+ )
64
+ # [END howto_operator_external_task_marker]
65
+
66
+ with DAG(
67
+ dag_id="example_external_task_marker_child",
68
+ start_date=start_date,
69
+ schedule=None,
70
+ catchup=False,
71
+ tags=["example2"],
72
+ ) as child_dag:
73
+ # [START howto_operator_external_task_sensor]
74
+ child_task1 = ExternalTaskSensor(
75
+ task_id="child_task1",
76
+ external_dag_id=parent_dag.dag_id,
77
+ external_task_id=parent_task.task_id,
78
+ timeout=600,
79
+ allowed_states=["success"],
80
+ failed_states=["failed", "skipped"],
81
+ mode="reschedule",
82
+ )
83
+ # [END howto_operator_external_task_sensor]
84
+
85
+ # [START howto_operator_external_task_sensor_with_task_group]
86
+ child_task2 = ExternalTaskSensor(
87
+ task_id="child_task2",
88
+ external_dag_id=parent_dag.dag_id,
89
+ external_task_group_id="parent_dag_task_group_id",
90
+ timeout=600,
91
+ allowed_states=["success"],
92
+ failed_states=["failed", "skipped"],
93
+ mode="reschedule",
94
+ )
95
+ # [END howto_operator_external_task_sensor_with_task_group]
96
+
97
+ child_task3 = EmptyOperator(task_id="child_task3")
98
+ child_task1 >> child_task2 >> child_task3
@@ -0,0 +1,64 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ from __future__ import annotations
18
+
19
+ from airflow import DAG
20
+ from airflow.providers.standard.operators.empty import EmptyOperator
21
+ from airflow.providers.standard.operators.trigger_dagrun import TriggerDagRunOperator
22
+ from airflow.providers.standard.sensors.external_task import ExternalTaskSensor
23
+ from airflow.utils.timezone import datetime
24
+
25
+ with DAG(
26
+ dag_id="example_external_task",
27
+ start_date=datetime(2022, 1, 1),
28
+ schedule="@once",
29
+ catchup=False,
30
+ tags=["example", "async", "core"],
31
+ ) as dag:
32
+ start = EmptyOperator(task_id="start")
33
+
34
+ # [START howto_external_task_async_sensor]
35
+ external_task_sensor = ExternalTaskSensor(
36
+ task_id="parent_task_sensor",
37
+ external_task_id="child_task",
38
+ external_dag_id="child_dag",
39
+ deferrable=True,
40
+ )
41
+ # [END howto_external_task_async_sensor]
42
+
43
+ trigger_child_task = TriggerDagRunOperator(
44
+ task_id="trigger_child_task",
45
+ trigger_dag_id="child_dag",
46
+ allowed_states=[
47
+ "success",
48
+ "failed",
49
+ ],
50
+ logical_date="{{ logical_date }}",
51
+ poke_interval=5,
52
+ reset_dag_run=True,
53
+ wait_for_completion=True,
54
+ )
55
+
56
+ end = EmptyOperator(task_id="end")
57
+
58
+ start >> [trigger_child_task, external_task_sensor] >> end
59
+
60
+ from tests_common.test_utils.watcher import watcher
61
+
62
+ # This test needs watcher in order to properly mark success/failure
63
+ # when "teardown" task with trigger rule is part of the DAG
64
+ list(dag.tasks) >> watcher()
@@ -0,0 +1,40 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ """Example of the LatestOnlyOperator"""
19
+
20
+ from __future__ import annotations
21
+
22
+ import datetime
23
+
24
+ from airflow.providers.standard.operators.empty import EmptyOperator
25
+ from airflow.providers.standard.operators.latest_only import LatestOnlyOperator
26
+ from airflow.sdk import DAG
27
+
28
+ with DAG(
29
+ dag_id="latest_only",
30
+ schedule=datetime.timedelta(hours=4),
31
+ start_date=datetime.datetime(2021, 1, 1),
32
+ catchup=False,
33
+ tags=["example2", "example3"],
34
+ ) as dag:
35
+ # [START howto_operator_latest_only]
36
+ latest_only = LatestOnlyOperator(task_id="latest_only")
37
+ # [END howto_operator_latest_only]
38
+ task1 = EmptyOperator(task_id="task1")
39
+
40
+ latest_only >> task1
@@ -0,0 +1,132 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ """
19
+ Example DAG demonstrating the usage of the TaskFlow API to execute Python functions natively and within a
20
+ virtual environment.
21
+ """
22
+
23
+ from __future__ import annotations
24
+
25
+ import logging
26
+ import sys
27
+ import time
28
+ from pprint import pprint
29
+
30
+ import pendulum
31
+
32
+ from airflow.sdk import dag, task
33
+
34
+ log = logging.getLogger(__name__)
35
+
36
+ PATH_TO_PYTHON_BINARY = sys.executable
37
+
38
+
39
+ @dag(
40
+ schedule=None,
41
+ start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
42
+ catchup=False,
43
+ tags=["example"],
44
+ )
45
+ def example_python_decorator():
46
+ # [START howto_operator_python]
47
+ @task(task_id="print_the_context")
48
+ def print_context(ds=None, **kwargs):
49
+ """Print the Airflow context and ds variable from the context."""
50
+ pprint(kwargs)
51
+ print(ds)
52
+ return "Whatever you return gets printed in the logs"
53
+
54
+ run_this = print_context()
55
+ # [END howto_operator_python]
56
+
57
+ # [START howto_operator_python_render_sql]
58
+ @task(task_id="log_sql_query", templates_dict={"query": "sql/sample.sql"}, templates_exts=[".sql"])
59
+ def log_sql(**kwargs):
60
+ log.info("Python task decorator query: %s", str(kwargs["templates_dict"]["query"]))
61
+
62
+ log_the_sql = log_sql()
63
+ # [END howto_operator_python_render_sql]
64
+
65
+ # [START howto_operator_python_kwargs]
66
+ # Generate 5 sleeping tasks, sleeping from 0.0 to 0.4 seconds respectively
67
+ @task
68
+ def my_sleeping_function(random_base):
69
+ """This is a function that will run within the DAG execution"""
70
+ time.sleep(random_base)
71
+
72
+ for i in range(5):
73
+ sleeping_task = my_sleeping_function.override(task_id=f"sleep_for_{i}")(random_base=i / 10)
74
+
75
+ run_this >> log_the_sql >> sleeping_task
76
+ # [END howto_operator_python_kwargs]
77
+
78
+ # [START howto_operator_python_venv]
79
+ @task.virtualenv(
80
+ task_id="virtualenv_python", requirements=["colorama==0.4.0"], system_site_packages=False
81
+ )
82
+ def callable_virtualenv():
83
+ """
84
+ Example function that will be performed in a virtual environment.
85
+
86
+ Importing at the module level ensures that it will not attempt to import the
87
+ library before it is installed.
88
+ """
89
+ from time import sleep
90
+
91
+ from colorama import Back, Fore, Style
92
+
93
+ print(Fore.RED + "some red text")
94
+ print(Back.GREEN + "and with a green background")
95
+ print(Style.DIM + "and in dim text")
96
+ print(Style.RESET_ALL)
97
+ for _ in range(4):
98
+ print(Style.DIM + "Please wait...", flush=True)
99
+ sleep(1)
100
+ print("Finished")
101
+
102
+ virtualenv_task = callable_virtualenv()
103
+ # [END howto_operator_python_venv]
104
+
105
+ sleeping_task >> virtualenv_task
106
+
107
+ # [START howto_operator_external_python]
108
+ @task.external_python(task_id="external_python", python=PATH_TO_PYTHON_BINARY)
109
+ def callable_external_python():
110
+ """
111
+ Example function that will be performed in a virtual environment.
112
+
113
+ Importing at the module level ensures that it will not attempt to import the
114
+ library before it is installed.
115
+ """
116
+ import sys
117
+ from time import sleep
118
+
119
+ print(f"Running task via {sys.executable}")
120
+ print("Sleeping")
121
+ for _ in range(4):
122
+ print("Please wait...", flush=True)
123
+ sleep(1)
124
+ print("Finished")
125
+
126
+ external_python_task = callable_external_python()
127
+ # [END howto_operator_external_python]
128
+
129
+ run_this >> external_python_task >> virtualenv_task
130
+
131
+
132
+ example_dag = example_python_decorator()
@@ -0,0 +1,147 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ """
19
+ Example DAG demonstrating the usage of the classic Python operators to execute Python functions natively and
20
+ within a virtual environment.
21
+ """
22
+
23
+ from __future__ import annotations
24
+
25
+ import logging
26
+ import sys
27
+ import time
28
+ from pprint import pprint
29
+
30
+ import pendulum
31
+
32
+ from airflow.providers.standard.operators.python import (
33
+ ExternalPythonOperator,
34
+ PythonOperator,
35
+ PythonVirtualenvOperator,
36
+ )
37
+ from airflow.sdk import DAG
38
+
39
+ log = logging.getLogger(__name__)
40
+
41
+ PATH_TO_PYTHON_BINARY = sys.executable
42
+
43
+
44
+ with DAG(
45
+ dag_id="example_python_operator",
46
+ schedule=None,
47
+ start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
48
+ catchup=False,
49
+ tags=["example"],
50
+ ) as dag:
51
+ # [START howto_operator_python]
52
+ def print_context(ds=None, **kwargs):
53
+ """Print the Airflow context and ds variable from the context."""
54
+ print("::group::All kwargs")
55
+ pprint(kwargs)
56
+ print("::endgroup::")
57
+ print("::group::Context variable ds")
58
+ print(ds)
59
+ print("::endgroup::")
60
+ return "Whatever you return gets printed in the logs"
61
+
62
+ run_this = PythonOperator(task_id="print_the_context", python_callable=print_context)
63
+ # [END howto_operator_python]
64
+
65
+ # [START howto_operator_python_render_sql]
66
+ def log_sql(**kwargs):
67
+ log.info("Python task decorator query: %s", str(kwargs["templates_dict"]["query"]))
68
+
69
+ log_the_sql = PythonOperator(
70
+ task_id="log_sql_query",
71
+ python_callable=log_sql,
72
+ templates_dict={"query": "sql/sample.sql"},
73
+ templates_exts=[".sql"],
74
+ )
75
+ # [END howto_operator_python_render_sql]
76
+
77
+ # [START howto_operator_python_kwargs]
78
+ # Generate 5 sleeping tasks, sleeping from 0.0 to 0.4 seconds respectively
79
+ def my_sleeping_function(random_base):
80
+ """This is a function that will run within the DAG execution"""
81
+ time.sleep(random_base)
82
+
83
+ for i in range(5):
84
+ sleeping_task = PythonOperator(
85
+ task_id=f"sleep_for_{i}", python_callable=my_sleeping_function, op_kwargs={"random_base": i / 10}
86
+ )
87
+
88
+ run_this >> log_the_sql >> sleeping_task
89
+ # [END howto_operator_python_kwargs]
90
+
91
+ # [START howto_operator_python_venv]
92
+ def callable_virtualenv():
93
+ """
94
+ Example function that will be performed in a virtual environment.
95
+
96
+ Importing at the function level ensures that it will not attempt to import the
97
+ library before it is installed.
98
+ """
99
+ from time import sleep
100
+
101
+ from colorama import Back, Fore, Style
102
+
103
+ print(Fore.RED + "some red text")
104
+ print(Back.GREEN + "and with a green background")
105
+ print(Style.DIM + "and in dim text")
106
+ print(Style.RESET_ALL)
107
+ for _ in range(4):
108
+ print(Style.DIM + "Please wait...", flush=True)
109
+ sleep(1)
110
+ print("Finished")
111
+
112
+ virtualenv_task = PythonVirtualenvOperator(
113
+ task_id="virtualenv_python",
114
+ python_callable=callable_virtualenv,
115
+ requirements=["colorama==0.4.0"],
116
+ system_site_packages=False,
117
+ )
118
+ # [END howto_operator_python_venv]
119
+
120
+ sleeping_task >> virtualenv_task
121
+
122
+ # [START howto_operator_external_python]
123
+ def callable_external_python():
124
+ """
125
+ Example function that will be performed in a virtual environment.
126
+
127
+ Importing at the module level ensures that it will not attempt to import the
128
+ library before it is installed.
129
+ """
130
+ import sys
131
+ from time import sleep
132
+
133
+ print(f"Running task via {sys.executable}")
134
+ print("Sleeping")
135
+ for _ in range(4):
136
+ print("Please wait...", flush=True)
137
+ sleep(1)
138
+ print("Finished")
139
+
140
+ external_python_task = ExternalPythonOperator(
141
+ task_id="external_python",
142
+ python_callable=callable_external_python,
143
+ python=PATH_TO_PYTHON_BINARY,
144
+ )
145
+ # [END howto_operator_external_python]
146
+
147
+ run_this >> external_python_task >> virtualenv_task
@@ -0,0 +1,66 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+
19
+ """Example DAG demonstrating the usage of the sensor decorator."""
20
+
21
+ from __future__ import annotations
22
+
23
+ # [START tutorial]
24
+ # [START import_module]
25
+ import pendulum
26
+
27
+ from airflow.sdk import PokeReturnValue, dag, task
28
+
29
+ # [END import_module]
30
+
31
+
32
+ # [START instantiate_dag]
33
+ @dag(
34
+ schedule=None,
35
+ start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
36
+ catchup=False,
37
+ tags=["example"],
38
+ )
39
+ def example_sensor_decorator():
40
+ # [END instantiate_dag]
41
+
42
+ # [START wait_function]
43
+ # Using a sensor operator to wait for the upstream data to be ready.
44
+ @task.sensor(poke_interval=60, timeout=3600, mode="reschedule")
45
+ def wait_for_upstream() -> PokeReturnValue:
46
+ return PokeReturnValue(is_done=True, xcom_value="xcom_value")
47
+
48
+ # [END wait_function]
49
+
50
+ # [START dummy_function]
51
+ @task
52
+ def dummy_operator() -> None:
53
+ pass
54
+
55
+ # [END dummy_function]
56
+
57
+ # [START main_flow]
58
+ wait_for_upstream() >> dummy_operator()
59
+ # [END main_flow]
60
+
61
+
62
+ # [START dag_invocation]
63
+ tutorial_etl_dag = example_sensor_decorator()
64
+ # [END dag_invocation]
65
+
66
+ # [END tutorial]