apache-airflow-providers-standard 1.6.0rc1__tar.gz → 1.7.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of apache-airflow-providers-standard might be problematic. Click here for more details.
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/PKG-INFO +8 -9
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/README.rst +4 -5
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/docs/changelog.rst +64 -6
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/docs/index.rst +3 -5
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/docs/operators/bash.rst +3 -3
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/docs/operators/datetime.rst +6 -6
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/docs/operators/python.rst +4 -4
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/docs/operators/trigger_dag_run.rst +1 -1
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/docs/sensors/datetime.rst +1 -1
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/docs/sensors/external_task_sensor.rst +17 -17
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/provider.yaml +2 -1
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/pyproject.toml +5 -5
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/__init__.py +1 -1
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/decorators/bash.py +1 -1
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/example_dags/example_bash_decorator.py +6 -1
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/example_dags/example_branch_operator.py +6 -1
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/example_dags/example_branch_operator_decorator.py +6 -1
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/example_dags/example_hitl_operator.py +11 -6
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/example_dags/example_sensors.py +6 -1
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/example_dags/example_short_circuit_decorator.py +6 -1
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/example_dags/example_short_circuit_operator.py +6 -1
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/exceptions.py +1 -1
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/operators/branch.py +11 -5
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/operators/hitl.py +166 -2
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/operators/python.py +6 -6
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/operators/trigger_dagrun.py +8 -5
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/sensors/date_time.py +9 -9
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/sensors/filesystem.py +4 -8
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/sensors/python.py +2 -3
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/sensors/time.py +4 -8
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/triggers/hitl.py +8 -4
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/triggers/temporal.py +5 -1
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/utils/python_virtualenv.py +10 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/utils/sensor_helper.py +3 -2
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/utils/skipmixin.py +6 -3
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/version_compat.py +6 -1
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/decorators/test_bash.py +29 -30
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/decorators/test_external_python.py +5 -2
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/decorators/test_python.py +47 -22
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/decorators/test_short_circuit.py +5 -1
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/operators/test_hitl.py +267 -3
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/operators/test_latest_only_operator.py +6 -1
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/operators/test_python.py +67 -5
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/operators/test_trigger_dagrun.py +25 -5
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/sensors/test_external_task_sensor.py +21 -218
- apache_airflow_providers_standard-1.7.0/tests/unit/standard/sensors/test_filesystem.py +240 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/sensors/test_time_delta.py +6 -7
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/triggers/test_hitl.py +11 -7
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/utils/test_sensor_helper.py +7 -4
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/utils/test_skipmixin.py +17 -5
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/docs/commits.rst +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/docs/conf.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/docs/configurations-ref.rst +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/docs/installing-providers-from-sources.rst +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/docs/operators/index.rst +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/docs/operators/latest_only.rst +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/docs/security.rst +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/docs/sensors/bash.rst +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/docs/sensors/file.rst +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/docs/sensors/index.rst +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/docs/sensors/python.rst +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/__init__.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/__init__.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/LICENSE +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/decorators/__init__.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/decorators/branch_external_python.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/decorators/branch_python.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/decorators/branch_virtualenv.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/decorators/external_python.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/decorators/python.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/decorators/python_virtualenv.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/decorators/sensor.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/decorators/short_circuit.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/example_dags/__init__.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/example_dags/example_bash_operator.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/example_dags/example_branch_datetime_operator.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/example_dags/example_branch_day_of_week_operator.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/example_dags/example_external_task_child_deferrable.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/example_dags/example_external_task_marker_dag.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/example_dags/example_external_task_parent_deferrable.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/example_dags/example_latest_only.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/example_dags/example_python_decorator.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/example_dags/example_python_operator.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/example_dags/example_sensor_decorator.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/example_dags/example_trigger_controller_dag.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/example_dags/sql/__init__.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/example_dags/sql/sample.sql +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/get_provider_info.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/hooks/__init__.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/hooks/filesystem.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/hooks/package_index.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/hooks/subprocess.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/models/__init__.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/operators/__init__.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/operators/bash.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/operators/datetime.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/operators/empty.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/operators/latest_only.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/operators/smooth.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/operators/weekday.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/sensors/__init__.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/sensors/bash.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/sensors/external_task.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/sensors/time_delta.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/sensors/weekday.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/triggers/__init__.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/triggers/external_task.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/triggers/file.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/utils/__init__.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/utils/python_virtualenv_script.jinja2 +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/src/airflow/providers/standard/utils/weekday.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/conftest.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/__init__.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/__init__.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/decorators/__init__.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/decorators/test_branch_external_python.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/decorators/test_branch_python.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/decorators/test_branch_virtualenv.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/decorators/test_python_virtualenv.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/decorators/test_sensor.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/hooks/__init__.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/hooks/test_filesystem.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/hooks/test_package_index.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/hooks/test_subprocess.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/operators/__init__.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/operators/test_bash.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/operators/test_branch_operator.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/operators/test_datetime.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/operators/test_smooth.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/operators/test_weekday.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/sensors/__init__.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/sensors/test_bash.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/sensors/test_date_time.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/sensors/test_python.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/sensors/test_time.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/sensors/test_weekday.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/test_exceptions.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/triggers/__init__.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/triggers/test_external_task.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/triggers/test_file.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/triggers/test_temporal.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/utils/__init__.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/utils/test_python_virtualenv.py +0 -0
- {apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/tests/unit/standard/utils/test_weekday.py +0 -0
{apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/PKG-INFO
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-standard
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.7.0
|
|
4
4
|
Summary: Provider package apache-airflow-providers-standard for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,standard,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -20,10 +20,10 @@ Classifier: Programming Language :: Python :: 3.11
|
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.12
|
|
21
21
|
Classifier: Programming Language :: Python :: 3.13
|
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
|
23
|
-
Requires-Dist: apache-airflow>=2.10.
|
|
23
|
+
Requires-Dist: apache-airflow>=2.10.0
|
|
24
24
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
25
|
-
Project-URL: Changelog, https://airflow.
|
|
26
|
-
Project-URL: Documentation, https://airflow.
|
|
25
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/1.7.0/changelog.html
|
|
26
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/1.7.0
|
|
27
27
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
28
28
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
29
29
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
@@ -54,9 +54,8 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
|
54
54
|
|
|
55
55
|
Package ``apache-airflow-providers-standard``
|
|
56
56
|
|
|
57
|
-
Release: ``1.
|
|
57
|
+
Release: ``1.7.0``
|
|
58
58
|
|
|
59
|
-
Release Date: ``|PypiReleaseDate|``
|
|
60
59
|
|
|
61
60
|
Airflow Standard Provider
|
|
62
61
|
|
|
@@ -68,12 +67,12 @@ This is a provider package for ``standard`` provider. All classes for this provi
|
|
|
68
67
|
are in ``airflow.providers.standard`` python package.
|
|
69
68
|
|
|
70
69
|
You can find package information and changelog for the provider
|
|
71
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.
|
|
70
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.7.0/>`_.
|
|
72
71
|
|
|
73
72
|
Installation
|
|
74
73
|
------------
|
|
75
74
|
|
|
76
|
-
You can install this package on top of an existing Airflow
|
|
75
|
+
You can install this package on top of an existing Airflow installation (see ``Requirements`` below
|
|
77
76
|
for the minimum Airflow version supported) via
|
|
78
77
|
``pip install apache-airflow-providers-standard``
|
|
79
78
|
|
|
@@ -89,5 +88,5 @@ PIP package Version required
|
|
|
89
88
|
================== ==================
|
|
90
89
|
|
|
91
90
|
The changelog for the provider package can be found in the
|
|
92
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.
|
|
91
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.7.0/changelog.html>`_.
|
|
93
92
|
|
{apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/README.rst
RENAMED
|
@@ -23,9 +23,8 @@
|
|
|
23
23
|
|
|
24
24
|
Package ``apache-airflow-providers-standard``
|
|
25
25
|
|
|
26
|
-
Release: ``1.
|
|
26
|
+
Release: ``1.7.0``
|
|
27
27
|
|
|
28
|
-
Release Date: ``|PypiReleaseDate|``
|
|
29
28
|
|
|
30
29
|
Airflow Standard Provider
|
|
31
30
|
|
|
@@ -37,12 +36,12 @@ This is a provider package for ``standard`` provider. All classes for this provi
|
|
|
37
36
|
are in ``airflow.providers.standard`` python package.
|
|
38
37
|
|
|
39
38
|
You can find package information and changelog for the provider
|
|
40
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.
|
|
39
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.7.0/>`_.
|
|
41
40
|
|
|
42
41
|
Installation
|
|
43
42
|
------------
|
|
44
43
|
|
|
45
|
-
You can install this package on top of an existing Airflow
|
|
44
|
+
You can install this package on top of an existing Airflow installation (see ``Requirements`` below
|
|
46
45
|
for the minimum Airflow version supported) via
|
|
47
46
|
``pip install apache-airflow-providers-standard``
|
|
48
47
|
|
|
@@ -58,4 +57,4 @@ PIP package Version required
|
|
|
58
57
|
================== ==================
|
|
59
58
|
|
|
60
59
|
The changelog for the provider package can be found in the
|
|
61
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.
|
|
60
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.7.0/changelog.html>`_.
|
|
@@ -35,28 +35,85 @@
|
|
|
35
35
|
Changelog
|
|
36
36
|
---------
|
|
37
37
|
|
|
38
|
+
1.7.0
|
|
39
|
+
.....
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
Release Date: ``|PypiReleaseDate|``
|
|
43
|
+
|
|
44
|
+
Features
|
|
45
|
+
~~~~~~~~
|
|
46
|
+
|
|
47
|
+
* ``Add options_mapping support to HITLBranchOperator (#55093)``
|
|
48
|
+
* ``feat(hitl): update url generating utility (#55022)``
|
|
49
|
+
* ``feat(hitl): add utility functions for generating the url to required actions page (#54827)``
|
|
50
|
+
* ``Display a more friendly error when invalid branches are provided to branch operators (#54273)``
|
|
51
|
+
* ``Add owners/actors/respondents to HITLOperators (#54308)``
|
|
52
|
+
|
|
53
|
+
Bug Fixes
|
|
54
|
+
~~~~~~~~~
|
|
55
|
+
|
|
56
|
+
* ``Fix ''BranchPythonOperator'' failure when callable returns None (#54991)``
|
|
57
|
+
* ``Fix external_python task failure when ''expect_airflow=False'' (#54809)``
|
|
58
|
+
* ``Fix typos in HITL-related code and comments (#54670)``
|
|
59
|
+
|
|
60
|
+
Misc
|
|
61
|
+
~~~~
|
|
62
|
+
|
|
63
|
+
* ``refactor(hitl): rename HITLDetail.user_id as HITLDetail.responded_user_id and add HITLDetail.responded_user_name (#55019)``
|
|
64
|
+
* ``Revert "Fix rendering of template fields with start from trigger" (#55037)``
|
|
65
|
+
* ``Change StartTriggerArgs imports (#54856)``
|
|
66
|
+
* ``Do not use HITLDetailResponse from core in sdk (#54358)``
|
|
67
|
+
* ``Move DagBag to SDK and make it return SDK DAG objects (#53918)``
|
|
68
|
+
* ``Remove MappedOperator inheritance (#53696)``
|
|
69
|
+
|
|
70
|
+
Doc-only
|
|
71
|
+
~~~~~~~~
|
|
72
|
+
|
|
73
|
+
* ``Make term Dag consistent in providers docs (#55101)``
|
|
74
|
+
|
|
75
|
+
.. Below changes are excluded from the changelog. Move them to
|
|
76
|
+
appropriate section above if needed. Do not delete the lines(!):
|
|
77
|
+
* ``Remove airflow.models.DAG (#54383)``
|
|
78
|
+
* ``Fix test_external_python tests setup (#55145)``
|
|
79
|
+
* ``Move trigger_rule utils from 'airflow/utils' to 'airflow.task'and integrate with Execution API spec (#53389)``
|
|
80
|
+
* ``Import documentation with screenshots for HITL (#54618)``
|
|
81
|
+
* ``Move filesystem sensor tests to standard provider (#54635)``
|
|
82
|
+
* ``Switch pre-commit to prek (#54258)``
|
|
83
|
+
* ``docs(hitl): fix typo in example_hitl_operator (#54537)``
|
|
84
|
+
* ``make bundle_name not nullable (#47592)``
|
|
85
|
+
* ``Remove SDK BaseOperator in TaskInstance (#53223)``
|
|
86
|
+
|
|
87
|
+
.. Review and move the new changes to one of the sections above:
|
|
88
|
+
* ``Fix Airflow 2 reference in README/index of providers (#55240)``
|
|
89
|
+
|
|
38
90
|
1.6.0
|
|
39
91
|
.....
|
|
40
92
|
|
|
41
93
|
Features
|
|
42
94
|
~~~~~~~~
|
|
43
95
|
|
|
44
|
-
* ``feat(
|
|
45
|
-
* ``feat(
|
|
96
|
+
* ``feat(HITL): add 'notifiers' to HITLOperator (#54128)``
|
|
97
|
+
* ``feat(HITL): add HITLBranchOperator (#53960)``
|
|
46
98
|
* ``feat(HITL): improve hitl trigger logging message (#53850)``
|
|
47
|
-
* ``feat(
|
|
99
|
+
* ``feat(HITL): add "timedout" column to HITLTriggerEventSuccessPayload (#53852)``
|
|
48
100
|
|
|
49
101
|
Bug Fixes
|
|
50
102
|
~~~~~~~~~
|
|
51
103
|
|
|
52
104
|
* ``Restore 'execute_complete' functionality 'TimeSensor' when 'deferrable=True' (#53669)``
|
|
53
105
|
* ``Fix several deprecation warnings related to airflow.sdk (#53791)``
|
|
106
|
+
* ``Fix pycache_cleanup path handling in PythonVirtualenvOperator (#54214)``
|
|
107
|
+
* ``fix(HITL): guard empty options or chosen_options when writing response (#54355)``
|
|
54
108
|
|
|
55
109
|
Misc
|
|
56
110
|
~~~~
|
|
57
111
|
|
|
58
|
-
* ``refactor(
|
|
112
|
+
* ``refactor(HITL): replace timezone usage with airflow.sdk.timezone (#53962)``
|
|
59
113
|
* ``refactor(HITL): make default options class variables to avoid typo (#53849)``
|
|
114
|
+
* ``Add a warning about python interpreter using with uv (#54262)``
|
|
115
|
+
* ``Introduce 'StdoutCaptureManager' to isolate stdout from 'logging' logs (#54065)``
|
|
116
|
+
* ``Move some items in 'airflow.utils.context' to appropriate places (#53600)``
|
|
60
117
|
|
|
61
118
|
Doc-only
|
|
62
119
|
~~~~~~~~
|
|
@@ -68,6 +125,7 @@ Doc-only
|
|
|
68
125
|
* ``Documentation for Human-in-the-loop operator (#53694)``
|
|
69
126
|
* ``Correct HITL version warnings to avoid confusion (#53876)``
|
|
70
127
|
* ``Move functions in 'airflow.utils.decorator' to more appropriate places (#53420)``
|
|
128
|
+
* ``Prepare release for Aug 2025 1st wave of providers (#54193)``
|
|
71
129
|
|
|
72
130
|
1.5.0
|
|
73
131
|
.....
|
|
@@ -83,8 +141,8 @@ Bug Fixes
|
|
|
83
141
|
|
|
84
142
|
* ``Fix key error in _handle_execution_date_fn for ExternalTaskSensor (#53728)``
|
|
85
143
|
* ``fix: Type mismatch for DateInterval in latest only operator (#53541)``
|
|
86
|
-
* ``fix(
|
|
87
|
-
* ``fix(
|
|
144
|
+
* ``fix(HITL): Fix HITLEntryOperator "options" and "defaults" handling (#53184)``
|
|
145
|
+
* ``fix(HITL): handle hitl details when task instance is retried (#53824)``
|
|
88
146
|
|
|
89
147
|
Misc
|
|
90
148
|
~~~~
|
|
@@ -43,7 +43,7 @@
|
|
|
43
43
|
:maxdepth: 1
|
|
44
44
|
:caption: Resources
|
|
45
45
|
|
|
46
|
-
Example
|
|
46
|
+
Example Dags <_api/airflow/providers/standard/example_dags/index>
|
|
47
47
|
PyPI Repository <https://pypi.org/project/apache-airflow-providers-standard/>
|
|
48
48
|
Installing from sources <installing-providers-from-sources>
|
|
49
49
|
Python API <_api/airflow/providers/standard/index>
|
|
@@ -66,9 +66,7 @@ apache-airflow-providers-standard package
|
|
|
66
66
|
Airflow Standard Provider
|
|
67
67
|
|
|
68
68
|
|
|
69
|
-
Release: 1.
|
|
70
|
-
|
|
71
|
-
Release Date: ``|PypiReleaseDate|``
|
|
69
|
+
Release: 1.7.0
|
|
72
70
|
|
|
73
71
|
Provider package
|
|
74
72
|
----------------
|
|
@@ -79,7 +77,7 @@ All classes for this package are included in the ``airflow.providers.standard``
|
|
|
79
77
|
Installation
|
|
80
78
|
------------
|
|
81
79
|
|
|
82
|
-
You can install this package on top of an existing Airflow
|
|
80
|
+
You can install this package on top of an existing Airflow installation via
|
|
83
81
|
``pip install apache-airflow-providers-standard``.
|
|
84
82
|
For the minimum Airflow version supported, see ``Requirements`` below.
|
|
85
83
|
|
|
@@ -246,7 +246,7 @@ into a temporary file. By default, the file is placed in a temporary directory
|
|
|
246
246
|
|
|
247
247
|
|
|
248
248
|
To execute a bash script, place it in a location relative to the directory containing
|
|
249
|
-
the
|
|
249
|
+
the Dag file. So if your Dag file is in ``/usr/local/airflow/dags/test_dag.py``, you can
|
|
250
250
|
move your ``test.sh`` file to any location under ``/usr/local/airflow/dags/`` (Example:
|
|
251
251
|
``/usr/local/airflow/dags/scripts/test.sh``) and pass the relative path to ``bash_command``
|
|
252
252
|
as shown below:
|
|
@@ -280,7 +280,7 @@ in files composed in different languages, and general flexibility in structuring
|
|
|
280
280
|
pipelines.
|
|
281
281
|
|
|
282
282
|
It is also possible to define your ``template_searchpath`` as pointing to any folder
|
|
283
|
-
locations in the
|
|
283
|
+
locations in the Dag constructor call.
|
|
284
284
|
|
|
285
285
|
.. tab-set::
|
|
286
286
|
|
|
@@ -302,7 +302,7 @@ locations in the DAG constructor call.
|
|
|
302
302
|
.. code-block:: python
|
|
303
303
|
:emphasize-lines: 1
|
|
304
304
|
|
|
305
|
-
with
|
|
305
|
+
with Dag("example_bash_dag", ..., template_searchpath="/opt/scripts"):
|
|
306
306
|
t2 = BashOperator(
|
|
307
307
|
task_id="bash_example",
|
|
308
308
|
bash_command="test.sh ",
|
|
@@ -24,16 +24,16 @@ Use the :class:`~airflow.providers.standard.operators.datetime.BranchDateTimeOpe
|
|
|
24
24
|
depending on whether the time falls into the range given by two target arguments,
|
|
25
25
|
|
|
26
26
|
This operator has two modes. First mode is to use current time (machine clock time at the
|
|
27
|
-
moment the
|
|
27
|
+
moment the Dag is executed), and the second mode is to use the ``logical_date`` of the Dag run it is run
|
|
28
28
|
with.
|
|
29
29
|
|
|
30
30
|
|
|
31
31
|
Usage with current time
|
|
32
32
|
-----------------------
|
|
33
33
|
|
|
34
|
-
The usages above might be useful in certain situations - for example when
|
|
35
|
-
and maintenance and is not really supposed to be used for any
|
|
36
|
-
because the "current time" make back-filling non-idempotent, its result depend on the time when the
|
|
34
|
+
The usages above might be useful in certain situations - for example when Dag is used to perform cleanups
|
|
35
|
+
and maintenance and is not really supposed to be used for any Dags that are supposed to be back-filled,
|
|
36
|
+
because the "current time" make back-filling non-idempotent, its result depend on the time when the Dag
|
|
37
37
|
actually was run. It's also slightly non-deterministic potentially even if it is run on schedule. It can
|
|
38
38
|
take some time between when the DAGRun was scheduled and executed and it might mean that even if
|
|
39
39
|
the DAGRun was scheduled properly, the actual time used for branching decision will be different than the
|
|
@@ -62,8 +62,8 @@ will raise an exception.
|
|
|
62
62
|
Usage with logical date
|
|
63
63
|
-----------------------
|
|
64
64
|
|
|
65
|
-
The usage is much more "data range" friendly. The ``logical_date`` does not change when the
|
|
66
|
-
it is not affected by execution delays, so this approach is suitable for idempotent
|
|
65
|
+
The usage is much more "data range" friendly. The ``logical_date`` does not change when the Dag is re-run and
|
|
66
|
+
it is not affected by execution delays, so this approach is suitable for idempotent Dag runs that might be
|
|
67
67
|
back-filled.
|
|
68
68
|
|
|
69
69
|
.. exampleinclude:: /../src/airflow/providers/standard/example_dags/example_branch_datetime_operator.py
|
|
@@ -165,7 +165,7 @@ If you want the context related to datetime objects like ``data_interval_start``
|
|
|
165
165
|
|
|
166
166
|
|
|
167
167
|
.. important::
|
|
168
|
-
The Python function body defined to be executed is cut out of the
|
|
168
|
+
The Python function body defined to be executed is cut out of the Dag into a temporary file w/o surrounding code.
|
|
169
169
|
As in the examples you need to add all imports again and you can not rely on variables from the global Python context.
|
|
170
170
|
|
|
171
171
|
If you want to pass variables into the classic :class:`~airflow.providers.standard.operators.python.PythonVirtualenvOperator` use
|
|
@@ -194,7 +194,7 @@ pip configuration as described in `pip config <https://pip.pypa.io/en/stable/top
|
|
|
194
194
|
|
|
195
195
|
If you want to use additional task specific private python repositories to setup the virtual environment, you can pass the ``index_urls`` parameter which will adjust the
|
|
196
196
|
pip install configurations. Passed index urls replace the standard system configured index url settings.
|
|
197
|
-
To prevent adding secrets to the private repository in your
|
|
197
|
+
To prevent adding secrets to the private repository in your Dag code you can use the Airflow
|
|
198
198
|
:doc:`apache-airflow:authoring-and-scheduling/connections`. For this purpose the connection type ``Package Index (Python)`` can be used.
|
|
199
199
|
In the ``Package Index (Python)`` connection type you can specify the index URL and credentials for the private repository.
|
|
200
200
|
After creating a ``Package Index (Python)`` connection, you can provide the connection ID to the ``PythonVirtualenvOperator`` using the ``index_urls_from_connection_ids`` parameter.
|
|
@@ -216,7 +216,7 @@ for each execution.
|
|
|
216
216
|
|
|
217
217
|
But still setting up the virtual environment for every execution needs some time. For repeated execution you can set the option ``venv_cache_path`` to a file system
|
|
218
218
|
folder on your worker. In this case the virtual environment will be set up once and be reused. If virtual environment caching is used, per unique requirements set different
|
|
219
|
-
virtual environment subfolders are created in the cache path. So depending on your variations in the
|
|
219
|
+
virtual environment subfolders are created in the cache path. So depending on your variations in the Dags in your system setup sufficient disk space is needed.
|
|
220
220
|
|
|
221
221
|
Note that no automated cleanup is made and in case of cached mode. All worker slots share the same virtual environment but if tasks are scheduled over and over on
|
|
222
222
|
different workers, it might happen that virtual environment are created on multiple workers individually. Also if the worker is started in a Kubernetes POD, a restart
|
|
@@ -286,7 +286,7 @@ If you want the context related to datetime objects like ``data_interval_start``
|
|
|
286
286
|
``lazy_object_proxy`` to your virtual environment.
|
|
287
287
|
|
|
288
288
|
.. important::
|
|
289
|
-
The Python function body defined to be executed is cut out of the
|
|
289
|
+
The Python function body defined to be executed is cut out of the Dag into a temporary file w/o surrounding code.
|
|
290
290
|
As in the examples you need to add all imports again and you can not rely on variables from the global Python context.
|
|
291
291
|
|
|
292
292
|
If you want to pass variables into the classic :class:`~airflow.providers.standard.operators.python.ExternalPythonOperator` use
|
|
@@ -22,7 +22,7 @@
|
|
|
22
22
|
TriggerDagRunOperator
|
|
23
23
|
=======================
|
|
24
24
|
|
|
25
|
-
Use the :class:`~airflow.providers.standard.operators.trigger_dagrun.TriggerDagRunOperator` to trigger
|
|
25
|
+
Use the :class:`~airflow.providers.standard.operators.trigger_dagrun.TriggerDagRunOperator` to trigger Dag from another Dag.
|
|
26
26
|
|
|
27
27
|
.. exampleinclude:: /../src/airflow/providers/standard/example_dags/example_trigger_controller_dag.py
|
|
28
28
|
:language: python
|
|
@@ -56,7 +56,7 @@ TimeSensor
|
|
|
56
56
|
|
|
57
57
|
Use the :class:`~airflow.providers.standard.sensors.time_sensor.TimeSensor` to end sensing after time specified. ``TimeSensor`` can be run in deferrable mode, if a Triggerer is available.
|
|
58
58
|
|
|
59
|
-
Time will be evaluated against ``data_interval_end`` if present for the
|
|
59
|
+
Time will be evaluated against ``data_interval_end`` if present for the Dag run, otherwise ``run_after`` will be used.
|
|
60
60
|
|
|
61
61
|
.. exampleinclude:: /../src/airflow/providers/standard/example_dags/example_sensors.py
|
|
62
62
|
:language: python
|
|
@@ -15,36 +15,36 @@
|
|
|
15
15
|
specific language governing permissions and limitations
|
|
16
16
|
under the License.
|
|
17
17
|
|
|
18
|
-
.. _howto/operator:Cross-
|
|
18
|
+
.. _howto/operator:Cross-Dag Dependencies:
|
|
19
19
|
|
|
20
|
-
Cross-
|
|
20
|
+
Cross-Dag Dependencies
|
|
21
21
|
======================
|
|
22
22
|
|
|
23
|
-
When two
|
|
24
|
-
|
|
25
|
-
dependencies for tasks on the same
|
|
26
|
-
tasks on the same
|
|
23
|
+
When two Dags have dependency relationships, it is worth considering combining them into a single
|
|
24
|
+
Dag, which is usually simpler to understand. Airflow also offers better visual representation of
|
|
25
|
+
dependencies for tasks on the same Dag. However, it is sometimes not practical to put all related
|
|
26
|
+
tasks on the same Dag. For example:
|
|
27
27
|
|
|
28
|
-
- Two
|
|
29
|
-
on a daily
|
|
30
|
-
- Different teams are responsible for different
|
|
28
|
+
- Two Dags may have different schedules. E.g. a weekly Dag may have tasks that depend on other tasks
|
|
29
|
+
on a daily Dag.
|
|
30
|
+
- Different teams are responsible for different Dags, but these Dags have some cross-Dag
|
|
31
31
|
dependencies.
|
|
32
|
-
- A task may depend on another task on the same
|
|
32
|
+
- A task may depend on another task on the same Dag, but for a different ``execution_date``
|
|
33
33
|
(start of the data interval).
|
|
34
34
|
- Use ``execution_delta`` for tasks running at different times, like ``execution_delta=timedelta(hours=1)``
|
|
35
35
|
to check against a task that runs 1 hour earlier.
|
|
36
36
|
|
|
37
|
-
``ExternalTaskSensor`` can be used to establish such dependencies across different
|
|
37
|
+
``ExternalTaskSensor`` can be used to establish such dependencies across different Dags. When it is
|
|
38
38
|
used together with ``ExternalTaskMarker``, clearing dependent tasks can also happen across different
|
|
39
|
-
|
|
39
|
+
Dags.
|
|
40
40
|
|
|
41
41
|
ExternalTaskSensor
|
|
42
42
|
^^^^^^^^^^^^^^^^^^
|
|
43
43
|
|
|
44
|
-
Use the :class:`~airflow.providers.standard.sensors.external_task.ExternalTaskSensor` to make tasks on a
|
|
45
|
-
wait for another task on a different
|
|
44
|
+
Use the :class:`~airflow.providers.standard.sensors.external_task.ExternalTaskSensor` to make tasks on a Dag
|
|
45
|
+
wait for another task on a different Dag for a specific ``execution_date``.
|
|
46
46
|
|
|
47
|
-
ExternalTaskSensor also provide options to set if the Task on a remote
|
|
47
|
+
ExternalTaskSensor also provide options to set if the Task on a remote Dag succeeded or failed
|
|
48
48
|
via ``allowed_states`` and ``failed_states`` parameters.
|
|
49
49
|
|
|
50
50
|
.. exampleinclude:: /../src/airflow/providers/standard/example_dags/example_external_task_marker_dag.py
|
|
@@ -64,8 +64,8 @@ Also for this action you can use sensor in the deferrable mode:
|
|
|
64
64
|
|
|
65
65
|
ExternalTaskSensor with task_group dependency
|
|
66
66
|
---------------------------------------------
|
|
67
|
-
In Addition, we can also use the :class:`~airflow.providers.standard.sensors.external_task.ExternalTaskSensor` to make tasks on a
|
|
68
|
-
wait for another ``task_group`` on a different
|
|
67
|
+
In Addition, we can also use the :class:`~airflow.providers.standard.sensors.external_task.ExternalTaskSensor` to make tasks on a Dag
|
|
68
|
+
wait for another ``task_group`` on a different Dag for a specific ``execution_date``.
|
|
69
69
|
|
|
70
70
|
.. exampleinclude:: /../src/airflow/providers/standard/example_dags/example_external_task_marker_dag.py
|
|
71
71
|
:language: python
|
{apache_airflow_providers_standard-1.6.0rc1 → apache_airflow_providers_standard-1.7.0}/provider.yaml
RENAMED
|
@@ -21,12 +21,13 @@ name: Standard
|
|
|
21
21
|
description: |
|
|
22
22
|
Airflow Standard Provider
|
|
23
23
|
state: ready
|
|
24
|
-
source-date-epoch:
|
|
24
|
+
source-date-epoch: 1756877597
|
|
25
25
|
# Note that those versions are maintained by release manager - do not update them manually
|
|
26
26
|
# with the exception of case where other provider in sources has >= new provider version.
|
|
27
27
|
# In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
|
|
28
28
|
# to be done in the same PR
|
|
29
29
|
versions:
|
|
30
|
+
- 1.7.0
|
|
30
31
|
- 1.6.0
|
|
31
32
|
- 1.5.0
|
|
32
33
|
- 1.4.1
|
|
@@ -25,7 +25,7 @@ build-backend = "flit_core.buildapi"
|
|
|
25
25
|
|
|
26
26
|
[project]
|
|
27
27
|
name = "apache-airflow-providers-standard"
|
|
28
|
-
version = "1.
|
|
28
|
+
version = "1.7.0"
|
|
29
29
|
description = "Provider package apache-airflow-providers-standard for Apache Airflow"
|
|
30
30
|
readme = "README.rst"
|
|
31
31
|
authors = [
|
|
@@ -54,10 +54,10 @@ requires-python = ">=3.10"
|
|
|
54
54
|
|
|
55
55
|
# The dependencies should be modified in place in the generated file.
|
|
56
56
|
# Any change in the dependencies is preserved when the file is regenerated
|
|
57
|
-
# Make sure to run ``
|
|
57
|
+
# Make sure to run ``prek update-providers-dependencies --all-files``
|
|
58
58
|
# After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
|
|
59
59
|
dependencies = [
|
|
60
|
-
"apache-airflow>=2.10.
|
|
60
|
+
"apache-airflow>=2.10.0",
|
|
61
61
|
]
|
|
62
62
|
|
|
63
63
|
[dependency-groups]
|
|
@@ -95,8 +95,8 @@ apache-airflow-providers-common-sql = {workspace = true}
|
|
|
95
95
|
apache-airflow-providers-standard = {workspace = true}
|
|
96
96
|
|
|
97
97
|
[project.urls]
|
|
98
|
-
"Documentation" = "https://airflow.
|
|
99
|
-
"Changelog" = "https://airflow.
|
|
98
|
+
"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-standard/1.7.0"
|
|
99
|
+
"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-standard/1.7.0/changelog.html"
|
|
100
100
|
"Bug Tracker" = "https://github.com/apache/airflow/issues"
|
|
101
101
|
"Source Code" = "https://github.com/apache/airflow"
|
|
102
102
|
"Slack Chat" = "https://s.apache.org/airflow-slack"
|
|
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "1.
|
|
32
|
+
__version__ = "1.7.0"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
35
|
"2.10.0"
|
|
@@ -33,8 +33,8 @@ else:
|
|
|
33
33
|
)
|
|
34
34
|
|
|
35
35
|
from airflow.providers.standard.operators.bash import BashOperator
|
|
36
|
+
from airflow.providers.standard.version_compat import context_merge
|
|
36
37
|
from airflow.sdk.definitions._internal.types import SET_DURING_EXECUTION
|
|
37
|
-
from airflow.utils.context import context_merge
|
|
38
38
|
from airflow.utils.operator_helpers import determine_kwargs
|
|
39
39
|
|
|
40
40
|
if TYPE_CHECKING:
|
|
@@ -23,7 +23,12 @@ from airflow.exceptions import AirflowSkipException
|
|
|
23
23
|
from airflow.providers.standard.operators.empty import EmptyOperator
|
|
24
24
|
from airflow.providers.standard.utils.weekday import WeekDay
|
|
25
25
|
from airflow.sdk import chain, dag, task
|
|
26
|
-
|
|
26
|
+
|
|
27
|
+
try:
|
|
28
|
+
from airflow.sdk import TriggerRule
|
|
29
|
+
except ImportError:
|
|
30
|
+
# Compatibility for Airflow < 3.1
|
|
31
|
+
from airflow.utils.trigger_rule import TriggerRule # type: ignore[no-redef,attr-defined]
|
|
27
32
|
|
|
28
33
|
|
|
29
34
|
@dag(schedule=None, start_date=pendulum.datetime(2023, 1, 1, tz="UTC"), catchup=False)
|
|
@@ -39,7 +39,12 @@ from airflow.providers.standard.operators.python import (
|
|
|
39
39
|
PythonVirtualenvOperator,
|
|
40
40
|
)
|
|
41
41
|
from airflow.sdk import DAG, Label
|
|
42
|
-
|
|
42
|
+
|
|
43
|
+
try:
|
|
44
|
+
from airflow.sdk import TriggerRule
|
|
45
|
+
except ImportError:
|
|
46
|
+
# Compatibility for Airflow < 3.1
|
|
47
|
+
from airflow.utils.trigger_rule import TriggerRule # type: ignore[no-redef,attr-defined]
|
|
43
48
|
|
|
44
49
|
PATH_TO_PYTHON_BINARY = sys.executable
|
|
45
50
|
|
|
@@ -32,7 +32,12 @@ import pendulum
|
|
|
32
32
|
|
|
33
33
|
from airflow.providers.standard.operators.empty import EmptyOperator
|
|
34
34
|
from airflow.sdk import DAG, Label, task
|
|
35
|
-
|
|
35
|
+
|
|
36
|
+
try:
|
|
37
|
+
from airflow.sdk import TriggerRule
|
|
38
|
+
except ImportError:
|
|
39
|
+
# Compatibility for Airflow < 3.1
|
|
40
|
+
from airflow.utils.trigger_rule import TriggerRule # type: ignore[no-redef,attr-defined]
|
|
36
41
|
|
|
37
42
|
PATH_TO_PYTHON_BINARY = sys.executable
|
|
38
43
|
|
|
@@ -37,6 +37,7 @@ if TYPE_CHECKING:
|
|
|
37
37
|
# [START hitl_tutorial]
|
|
38
38
|
|
|
39
39
|
|
|
40
|
+
# [START hitl_notifier]
|
|
40
41
|
class LocalLogNotifier(BaseNotifier):
|
|
41
42
|
"""Simple notifier to demonstrate HITL notification without setup any connection."""
|
|
42
43
|
|
|
@@ -46,10 +47,14 @@ class LocalLogNotifier(BaseNotifier):
|
|
|
46
47
|
self.message = message
|
|
47
48
|
|
|
48
49
|
def notify(self, context: Context) -> None:
|
|
50
|
+
url = HITLOperator.generate_link_to_ui_from_context(
|
|
51
|
+
context=context,
|
|
52
|
+
base_url="http://localhost:28080",
|
|
53
|
+
)
|
|
49
54
|
self.log.info(self.message)
|
|
55
|
+
self.log.info("Url to respond %s", url)
|
|
50
56
|
|
|
51
57
|
|
|
52
|
-
# [START htil_notifer]
|
|
53
58
|
hitl_request_callback = LocalLogNotifier(
|
|
54
59
|
message="""
|
|
55
60
|
[HITL]
|
|
@@ -65,7 +70,7 @@ hitl_success_callback = LocalLogNotifier(
|
|
|
65
70
|
message="{% set task_id = task.task_id -%}{{ ti.xcom_pull(task_ids=task_id) }}"
|
|
66
71
|
)
|
|
67
72
|
hitl_failure_callback = LocalLogNotifier(message="Request to response to '{{ task.subject }}' failed")
|
|
68
|
-
# [END
|
|
73
|
+
# [END hitl_notifier]
|
|
69
74
|
|
|
70
75
|
with DAG(
|
|
71
76
|
dag_id="example_hitl_operator",
|
|
@@ -95,7 +100,7 @@ with DAG(
|
|
|
95
100
|
)
|
|
96
101
|
# [END howto_hitl_operator]
|
|
97
102
|
|
|
98
|
-
# [START
|
|
103
|
+
# [START howto_hitl_operator_multiple]
|
|
99
104
|
wait_for_multiple_options = HITLOperator(
|
|
100
105
|
task_id="wait_for_multiple_options",
|
|
101
106
|
subject="Please choose option to proceed: ",
|
|
@@ -105,7 +110,7 @@ with DAG(
|
|
|
105
110
|
on_success_callback=hitl_success_callback,
|
|
106
111
|
on_failure_callback=hitl_failure_callback,
|
|
107
112
|
)
|
|
108
|
-
# [END
|
|
113
|
+
# [END howto_hitl_operator_multiple]
|
|
109
114
|
|
|
110
115
|
# [START howto_hitl_operator_timeout]
|
|
111
116
|
wait_for_default_option = HITLOperator(
|
|
@@ -127,8 +132,8 @@ with DAG(
|
|
|
127
132
|
body="""
|
|
128
133
|
Input: {{ ti.xcom_pull(task_ids='wait_for_input')["params_input"]["information"] }}
|
|
129
134
|
Option: {{ ti.xcom_pull(task_ids='wait_for_option')["chosen_options"] }}
|
|
130
|
-
Multiple Options: {{ ti.xcom_pull(task_ids='
|
|
131
|
-
Timeout Option: {{ ti.xcom_pull(task_ids='
|
|
135
|
+
Multiple Options: {{ ti.xcom_pull(task_ids='wait_for_multiple_options')["chosen_options"] }}
|
|
136
|
+
Timeout Option: {{ ti.xcom_pull(task_ids='wait_for_default_option')["chosen_options"] }}
|
|
132
137
|
""",
|
|
133
138
|
defaults="Reject",
|
|
134
139
|
execution_timeout=datetime.timedelta(minutes=1),
|
|
@@ -30,7 +30,12 @@ from airflow.providers.standard.sensors.time_delta import TimeDeltaSensor
|
|
|
30
30
|
from airflow.providers.standard.sensors.weekday import DayOfWeekSensor
|
|
31
31
|
from airflow.providers.standard.utils.weekday import WeekDay
|
|
32
32
|
from airflow.sdk import DAG
|
|
33
|
-
|
|
33
|
+
|
|
34
|
+
try:
|
|
35
|
+
from airflow.sdk import TriggerRule
|
|
36
|
+
except ImportError:
|
|
37
|
+
# Compatibility for Airflow < 3.1
|
|
38
|
+
from airflow.utils.trigger_rule import TriggerRule # type: ignore[no-redef,attr-defined]
|
|
34
39
|
|
|
35
40
|
|
|
36
41
|
# [START example_callables]
|
|
@@ -22,7 +22,12 @@ import pendulum
|
|
|
22
22
|
|
|
23
23
|
from airflow.providers.standard.operators.empty import EmptyOperator
|
|
24
24
|
from airflow.sdk import chain, dag, task
|
|
25
|
-
|
|
25
|
+
|
|
26
|
+
try:
|
|
27
|
+
from airflow.sdk import TriggerRule
|
|
28
|
+
except ImportError:
|
|
29
|
+
# Compatibility for Airflow < 3.1
|
|
30
|
+
from airflow.utils.trigger_rule import TriggerRule # type: ignore[no-redef,attr-defined]
|
|
26
31
|
|
|
27
32
|
|
|
28
33
|
@dag(schedule=None, start_date=pendulum.datetime(2021, 1, 1, tz="UTC"), catchup=False, tags=["example"])
|
|
@@ -24,7 +24,12 @@ import pendulum
|
|
|
24
24
|
from airflow.providers.standard.operators.empty import EmptyOperator
|
|
25
25
|
from airflow.providers.standard.operators.python import ShortCircuitOperator
|
|
26
26
|
from airflow.sdk import DAG, chain
|
|
27
|
-
|
|
27
|
+
|
|
28
|
+
try:
|
|
29
|
+
from airflow.sdk import TriggerRule
|
|
30
|
+
except ImportError:
|
|
31
|
+
# Compatibility for Airflow < 3.1
|
|
32
|
+
from airflow.utils.trigger_rule import TriggerRule # type: ignore[no-redef,attr-defined]
|
|
28
33
|
|
|
29
34
|
with DAG(
|
|
30
35
|
dag_id="example_short_circuit_operator",
|