apache-airflow-providers-standard 0.1.0rc1__py3-none-any.whl → 1.0.0.dev1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-standard might be problematic. Click here for more details.

Files changed (37) hide show
  1. airflow/providers/standard/LICENSE +52 -0
  2. airflow/providers/standard/__init__.py +1 -23
  3. airflow/providers/standard/get_provider_info.py +7 -52
  4. airflow/providers/standard/operators/bash.py +28 -82
  5. airflow/providers/standard/operators/datetime.py +3 -8
  6. airflow/providers/standard/operators/weekday.py +4 -11
  7. airflow/providers/standard/sensors/bash.py +5 -11
  8. airflow/providers/standard/sensors/date_time.py +8 -32
  9. airflow/providers/standard/sensors/time.py +5 -28
  10. airflow/providers/standard/sensors/time_delta.py +10 -48
  11. airflow/providers/standard/sensors/weekday.py +2 -7
  12. {apache_airflow_providers_standard-0.1.0rc1.dist-info → apache_airflow_providers_standard-1.0.0.dev1.dist-info}/METADATA +36 -20
  13. apache_airflow_providers_standard-1.0.0.dev1.dist-info/RECORD +17 -0
  14. {apache_airflow_providers_standard-0.1.0rc1.dist-info → apache_airflow_providers_standard-1.0.0.dev1.dist-info}/WHEEL +1 -1
  15. airflow/providers/standard/hooks/__init__.py +0 -16
  16. airflow/providers/standard/hooks/filesystem.py +0 -89
  17. airflow/providers/standard/hooks/package_index.py +0 -95
  18. airflow/providers/standard/hooks/subprocess.py +0 -119
  19. airflow/providers/standard/operators/empty.py +0 -39
  20. airflow/providers/standard/operators/generic_transfer.py +0 -138
  21. airflow/providers/standard/operators/latest_only.py +0 -83
  22. airflow/providers/standard/operators/python.py +0 -1132
  23. airflow/providers/standard/operators/trigger_dagrun.py +0 -292
  24. airflow/providers/standard/sensors/external_task.py +0 -509
  25. airflow/providers/standard/sensors/filesystem.py +0 -158
  26. airflow/providers/standard/sensors/python.py +0 -85
  27. airflow/providers/standard/triggers/__init__.py +0 -16
  28. airflow/providers/standard/triggers/external_task.py +0 -211
  29. airflow/providers/standard/triggers/file.py +0 -131
  30. airflow/providers/standard/triggers/temporal.py +0 -114
  31. airflow/providers/standard/utils/__init__.py +0 -16
  32. airflow/providers/standard/utils/python_virtualenv.py +0 -209
  33. airflow/providers/standard/utils/python_virtualenv_script.jinja2 +0 -77
  34. airflow/providers/standard/utils/sensor_helper.py +0 -119
  35. airflow/providers/standard/version_compat.py +0 -36
  36. apache_airflow_providers_standard-0.1.0rc1.dist-info/RECORD +0 -38
  37. {apache_airflow_providers_standard-0.1.0rc1.dist-info → apache_airflow_providers_standard-1.0.0.dev1.dist-info}/entry_points.txt +0 -0
@@ -1,209 +0,0 @@
1
- #
2
- # Licensed to the Apache Software Foundation (ASF) under one
3
- # or more contributor license agreements. See the NOTICE file
4
- # distributed with this work for additional information
5
- # regarding copyright ownership. The ASF licenses this file
6
- # to you under the Apache License, Version 2.0 (the
7
- # "License"); you may not use this file except in compliance
8
- # with the License. You may obtain a copy of the License at
9
- #
10
- # http://www.apache.org/licenses/LICENSE-2.0
11
- #
12
- # Unless required by applicable law or agreed to in writing,
13
- # software distributed under the License is distributed on an
14
- # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
- # KIND, either express or implied. See the License for the
16
- # specific language governing permissions and limitations
17
- # under the License.
18
- """Utilities for creating a virtual environment."""
19
-
20
- from __future__ import annotations
21
-
22
- import os
23
- import shutil
24
- import sys
25
- from pathlib import Path
26
-
27
- import jinja2
28
- from jinja2 import select_autoescape
29
-
30
- from airflow.configuration import conf
31
- from airflow.utils.process_utils import execute_in_subprocess
32
-
33
-
34
- def _is_uv_installed() -> bool:
35
- """
36
- Verify whether the uv tool is installed by checking if it's included in the system PATH or installed as a package.
37
-
38
- :return: True if it is. Whichever way of checking it works, is fine.
39
- """
40
- return bool(shutil.which("uv"))
41
-
42
-
43
- def _use_uv() -> bool:
44
- """
45
- Check if the uv tool should be used.
46
-
47
- :return: True if uv should be used.
48
- """
49
- venv_install_method = conf.get("standard", "venv_install_method", fallback="auto").lower()
50
- if venv_install_method == "auto":
51
- return _is_uv_installed()
52
- elif venv_install_method == "uv":
53
- return True
54
- return False
55
-
56
-
57
- def _generate_uv_cmd(tmp_dir: str, python_bin: str, system_site_packages: bool) -> list[str]:
58
- """Build the command to install the venv via UV."""
59
- cmd = ["uv", "venv", "--allow-existing", "--seed"]
60
- if python_bin is not None:
61
- cmd += ["--python", python_bin]
62
- if system_site_packages:
63
- cmd.append("--system-site-packages")
64
- cmd.append(tmp_dir)
65
- return cmd
66
-
67
-
68
- def _generate_venv_cmd(tmp_dir: str, python_bin: str, system_site_packages: bool) -> list[str]:
69
- """We are using venv command instead of venv module to allow creation of venv for different python versions."""
70
- if python_bin is None:
71
- python_bin = sys.executable
72
- cmd = [python_bin, "-m", "venv", tmp_dir]
73
- if system_site_packages:
74
- cmd.append("--system-site-packages")
75
- return cmd
76
-
77
-
78
- def _generate_uv_install_cmd_from_file(
79
- tmp_dir: str, requirements_file_path: str, pip_install_options: list[str]
80
- ) -> list[str]:
81
- return [
82
- "uv",
83
- "pip",
84
- "install",
85
- "--python",
86
- f"{tmp_dir}/bin/python",
87
- *pip_install_options,
88
- "-r",
89
- requirements_file_path,
90
- ]
91
-
92
-
93
- def _generate_pip_install_cmd_from_file(
94
- tmp_dir: str, requirements_file_path: str, pip_install_options: list[str]
95
- ) -> list[str]:
96
- return [f"{tmp_dir}/bin/pip", "install", *pip_install_options, "-r", requirements_file_path]
97
-
98
-
99
- def _generate_uv_install_cmd_from_list(
100
- tmp_dir: str, requirements: list[str], pip_install_options: list[str]
101
- ) -> list[str]:
102
- return ["uv", "pip", "install", "--python", f"{tmp_dir}/bin/python", *pip_install_options, *requirements]
103
-
104
-
105
- def _generate_pip_install_cmd_from_list(
106
- tmp_dir: str, requirements: list[str], pip_install_options: list[str]
107
- ) -> list[str]:
108
- return [f"{tmp_dir}/bin/pip", "install", *pip_install_options, *requirements]
109
-
110
-
111
- def _generate_pip_conf(conf_file: Path, index_urls: list[str]) -> None:
112
- if index_urls:
113
- pip_conf_options = f"index-url = {index_urls[0]}"
114
- if len(index_urls) > 1:
115
- pip_conf_options += f"\nextra-index-url = {' '.join(x for x in index_urls[1:])}"
116
- else:
117
- pip_conf_options = "no-index = true"
118
- conf_file.write_text(f"[global]\n{pip_conf_options}")
119
-
120
-
121
- def prepare_virtualenv(
122
- venv_directory: str,
123
- python_bin: str,
124
- system_site_packages: bool,
125
- requirements: list[str] | None = None,
126
- requirements_file_path: str | None = None,
127
- pip_install_options: list[str] | None = None,
128
- index_urls: list[str] | None = None,
129
- ) -> str:
130
- """
131
- Create a virtual environment and install the additional python packages.
132
-
133
- :param venv_directory: The path for directory where the environment will be created.
134
- :param python_bin: Path to the Python executable.
135
- :param system_site_packages: Whether to include system_site_packages in your virtualenv.
136
- See virtualenv documentation for more information.
137
- :param requirements: List of additional python packages.
138
- :param requirements_file_path: Path to the ``requirements.txt`` file.
139
- :param pip_install_options: a list of pip install options when installing requirements
140
- See 'pip install -h' for available options
141
- :param index_urls: an optional list of index urls to load Python packages from.
142
- If not provided the system pip conf will be used to source packages from.
143
- :return: Path to a binary file with Python in a virtual environment.
144
- """
145
- if pip_install_options is None:
146
- pip_install_options = []
147
-
148
- if requirements is not None and requirements_file_path is not None:
149
- raise ValueError("Either requirements OR requirements_file_path has to be passed, but not both")
150
-
151
- if index_urls is not None:
152
- _generate_pip_conf(Path(venv_directory) / "pip.conf", index_urls)
153
-
154
- if _use_uv():
155
- venv_cmd = _generate_uv_cmd(venv_directory, python_bin, system_site_packages)
156
- else:
157
- venv_cmd = _generate_venv_cmd(venv_directory, python_bin, system_site_packages)
158
- execute_in_subprocess(venv_cmd)
159
-
160
- pip_cmd = None
161
- if requirements is not None and len(requirements) != 0:
162
- if _use_uv():
163
- pip_cmd = _generate_uv_install_cmd_from_list(venv_directory, requirements, pip_install_options)
164
- else:
165
- pip_cmd = _generate_pip_install_cmd_from_list(venv_directory, requirements, pip_install_options)
166
- if requirements_file_path is not None and requirements_file_path:
167
- if _use_uv():
168
- pip_cmd = _generate_uv_install_cmd_from_file(
169
- venv_directory, requirements_file_path, pip_install_options
170
- )
171
- else:
172
- pip_cmd = _generate_pip_install_cmd_from_file(
173
- venv_directory, requirements_file_path, pip_install_options
174
- )
175
-
176
- if pip_cmd:
177
- execute_in_subprocess(pip_cmd)
178
-
179
- return f"{venv_directory}/bin/python"
180
-
181
-
182
- def write_python_script(
183
- jinja_context: dict,
184
- filename: str,
185
- render_template_as_native_obj: bool = False,
186
- ):
187
- """
188
- Render the python script to a file to execute in the virtual environment.
189
-
190
- :param jinja_context: The jinja context variables to unpack and replace with its placeholders in the
191
- template file.
192
- :param filename: The name of the file to dump the rendered script to.
193
- :param render_template_as_native_obj: If ``True``, rendered Jinja template would be converted
194
- to a native Python object
195
- """
196
- template_loader = jinja2.FileSystemLoader(searchpath=os.path.dirname(__file__))
197
- template_env: jinja2.Environment
198
- if render_template_as_native_obj:
199
- template_env = jinja2.nativetypes.NativeEnvironment(
200
- loader=template_loader, undefined=jinja2.StrictUndefined
201
- )
202
- else:
203
- template_env = jinja2.Environment(
204
- loader=template_loader,
205
- undefined=jinja2.StrictUndefined,
206
- autoescape=select_autoescape(["html", "xml"]),
207
- )
208
- template = template_env.get_template("python_virtualenv_script.jinja2")
209
- template.stream(**jinja_context).dump(filename)
@@ -1,77 +0,0 @@
1
- {#
2
- Licensed to the Apache Software Foundation (ASF) under one
3
- or more contributor license agreements. See the NOTICE file
4
- distributed with this work for additional information
5
- regarding copyright ownership. The ASF licenses this file
6
- to you under the Apache License, Version 2.0 (the
7
- "License"); you may not use this file except in compliance
8
- with the License. You may obtain a copy of the License at
9
-
10
- http://www.apache.org/licenses/LICENSE-2.0
11
-
12
- Unless required by applicable law or agreed to in writing,
13
- software distributed under the License is distributed on an
14
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
- KIND, either express or implied. See the License for the
16
- specific language governing permissions and limitations
17
- under the License.
18
- -#}
19
- from __future__ import annotations
20
-
21
- import {{ pickling_library }}
22
- import sys
23
-
24
- {% if expect_airflow %}
25
- {# Check whether Airflow is available in the environment.
26
- # If it is, we'll want to ensure that we integrate any macros that are being provided
27
- # by plugins prior to unpickling the task context. #}
28
- if sys.version_info >= (3,6):
29
- try:
30
- from airflow.plugins_manager import integrate_macros_plugins
31
- integrate_macros_plugins()
32
- except ImportError:
33
- {# Airflow is not available in this environment, therefore we won't
34
- # be able to integrate any plugin macros. #}
35
- pass
36
- {% endif %}
37
-
38
- # Script
39
- {{ python_callable_source }}
40
-
41
- # monkey patching for the cases when python_callable is part of the dag module.
42
- {% if modified_dag_module_name is defined %}
43
-
44
- import types
45
-
46
- {{ modified_dag_module_name }} = types.ModuleType("{{ modified_dag_module_name }}")
47
-
48
- {{ modified_dag_module_name }}.{{ python_callable }} = {{ python_callable }}
49
-
50
- sys.modules["{{modified_dag_module_name}}"] = {{modified_dag_module_name}}
51
-
52
- {% endif%}
53
-
54
- {% if op_args or op_kwargs %}
55
- with open(sys.argv[1], "rb") as file:
56
- arg_dict = {{ pickling_library }}.load(file)
57
- {% else %}
58
- arg_dict = {"args": [], "kwargs": {}}
59
- {% endif %}
60
-
61
- {% if string_args_global | default(true) -%}
62
- # Read string args
63
- with open(sys.argv[3], "r") as file:
64
- virtualenv_string_args = list(map(lambda x: x.strip(), list(file)))
65
- {% endif %}
66
-
67
- try:
68
- res = {{ python_callable }}(*arg_dict["args"], **arg_dict["kwargs"])
69
- except Exception as e:
70
- with open(sys.argv[4], "w") as file:
71
- file.write(str(e))
72
- raise
73
-
74
- # Write output
75
- with open(sys.argv[2], "wb") as file:
76
- if res is not None:
77
- {{ pickling_library }}.dump(res, file)
@@ -1,119 +0,0 @@
1
- # Licensed to the Apache Software Foundation (ASF) under one
2
- # or more contributor license agreements. See the NOTICE file
3
- # distributed with this work for additional information
4
- # regarding copyright ownership. The ASF licenses this file
5
- # to you under the Apache License, Version 2.0 (the
6
- # "License"); you may not use this file except in compliance
7
- # with the License. You may obtain a copy of the License at
8
- #
9
- # http://www.apache.org/licenses/LICENSE-2.0
10
- #
11
- # Unless required by applicable law or agreed to in writing,
12
- # software distributed under the License is distributed on an
13
- # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
- # KIND, either express or implied. See the License for the
15
- # specific language governing permissions and limitations
16
- # under the License.
17
- from __future__ import annotations
18
-
19
- from typing import TYPE_CHECKING, cast
20
-
21
- from sqlalchemy import func, select, tuple_
22
-
23
- from airflow.models import DagBag, DagRun, TaskInstance
24
- from airflow.utils.session import NEW_SESSION, provide_session
25
-
26
- if TYPE_CHECKING:
27
- from sqlalchemy.orm import Session
28
- from sqlalchemy.sql import Executable
29
-
30
-
31
- @provide_session
32
- def _get_count(
33
- dttm_filter,
34
- external_task_ids,
35
- external_task_group_id,
36
- external_dag_id,
37
- states,
38
- session: Session = NEW_SESSION,
39
- ) -> int:
40
- """
41
- Get the count of records against dttm filter and states.
42
-
43
- :param dttm_filter: date time filter for logical date
44
- :param external_task_ids: The list of task_ids
45
- :param external_task_group_id: The ID of the external task group
46
- :param external_dag_id: The ID of the external DAG.
47
- :param states: task or dag states
48
- :param session: airflow session object
49
- """
50
- TI = TaskInstance
51
- DR = DagRun
52
- if not dttm_filter:
53
- return 0
54
-
55
- if external_task_ids:
56
- count = (
57
- session.scalar(
58
- _count_stmt(TI, states, dttm_filter, external_dag_id).where(TI.task_id.in_(external_task_ids))
59
- )
60
- ) / len(external_task_ids)
61
- elif external_task_group_id:
62
- external_task_group_task_ids = _get_external_task_group_task_ids(
63
- dttm_filter, external_task_group_id, external_dag_id, session
64
- )
65
- if not external_task_group_task_ids:
66
- count = 0
67
- else:
68
- count = (
69
- session.scalar(
70
- _count_stmt(TI, states, dttm_filter, external_dag_id).where(
71
- tuple_(TI.task_id, TI.map_index).in_(external_task_group_task_ids)
72
- )
73
- )
74
- ) / len(external_task_group_task_ids)
75
- else:
76
- count = session.scalar(_count_stmt(DR, states, dttm_filter, external_dag_id))
77
- return cast(int, count)
78
-
79
-
80
- def _count_stmt(model, states, dttm_filter, external_dag_id) -> Executable:
81
- """
82
- Get the count of records against dttm filter and states.
83
-
84
- :param model: The SQLAlchemy model representing the relevant table.
85
- :param states: task or dag states
86
- :param dttm_filter: date time filter for logical date
87
- :param external_dag_id: The ID of the external DAG.
88
- """
89
- return select(func.count()).where(
90
- model.dag_id == external_dag_id, model.state.in_(states), model.logical_date.in_(dttm_filter)
91
- )
92
-
93
-
94
- def _get_external_task_group_task_ids(dttm_filter, external_task_group_id, external_dag_id, session):
95
- """
96
- Get the count of records against dttm filter and states.
97
-
98
- :param dttm_filter: date time filter for logical date
99
- :param external_task_group_id: The ID of the external task group
100
- :param external_dag_id: The ID of the external DAG.
101
- :param session: airflow session object
102
- """
103
- refreshed_dag_info = DagBag(read_dags_from_db=True).get_dag(external_dag_id, session)
104
- task_group = refreshed_dag_info.task_group_dict.get(external_task_group_id)
105
-
106
- if task_group:
107
- group_tasks = session.scalars(
108
- select(TaskInstance).filter(
109
- TaskInstance.dag_id == external_dag_id,
110
- TaskInstance.task_id.in_(task.task_id for task in task_group),
111
- TaskInstance.logical_date.in_(dttm_filter),
112
- )
113
- )
114
-
115
- return [(t.task_id, t.map_index) for t in group_tasks]
116
-
117
- # returning default task_id as group_id itself, this will avoid any failure in case of
118
- # 'check_existence=False' and will fail on timeout
119
- return [(external_task_group_id, -1)]
@@ -1,36 +0,0 @@
1
- # Licensed to the Apache Software Foundation (ASF) under one
2
- # or more contributor license agreements. See the NOTICE file
3
- # distributed with this work for additional information
4
- # regarding copyright ownership. The ASF licenses this file
5
- # to you under the Apache License, Version 2.0 (the
6
- # "License"); you may not use this file except in compliance
7
- # with the License. You may obtain a copy of the License at
8
- #
9
- # http://www.apache.org/licenses/LICENSE-2.0
10
- #
11
- # Unless required by applicable law or agreed to in writing,
12
- # software distributed under the License is distributed on an
13
- # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
- # KIND, either express or implied. See the License for the
15
- # specific language governing permissions and limitations
16
- # under the License.
17
- #
18
- # NOTE! THIS FILE IS COPIED MANUALLY IN OTHER PROVIDERS DELIBERATELY TO AVOID ADDING UNNECESSARY
19
- # DEPENDENCIES BETWEEN PROVIDERS. IF YOU WANT TO ADD CONDITIONAL CODE IN YOUR PROVIDER THAT DEPENDS
20
- # ON AIRFLOW VERSION, PLEASE COPY THIS FILE TO THE ROOT PACKAGE OF YOUR PROVIDER AND IMPORT
21
- # THOSE CONSTANTS FROM IT RATHER THAN IMPORTING THEM FROM ANOTHER PROVIDER OR TEST CODE
22
- #
23
- from __future__ import annotations
24
-
25
-
26
- def get_base_airflow_version_tuple() -> tuple[int, int, int]:
27
- from packaging.version import Version
28
-
29
- from airflow import __version__
30
-
31
- airflow_version = Version(__version__)
32
- return airflow_version.major, airflow_version.minor, airflow_version.micro
33
-
34
-
35
- AIRFLOW_V_2_10_PLUS = get_base_airflow_version_tuple() >= (2, 10, 0)
36
- AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
@@ -1,38 +0,0 @@
1
- airflow/providers/standard/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/standard/__init__.py,sha256=_0SheSBOknCZLmPVxMFYyFxMp7FYpHj-cI8mbJodsBE,1495
3
- airflow/providers/standard/get_provider_info.py,sha256=zFnTra9uOUt8ZdhaCqkoNTVqqAy51VP16SI56dk-YfM,4939
4
- airflow/providers/standard/version_compat.py,sha256=aHg90_DtgoSnQvILFICexMyNlHlALBdaeWqkX3dFDug,1605
5
- airflow/providers/standard/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
6
- airflow/providers/standard/hooks/filesystem.py,sha256=fDZwW_EYD8z1QXnReqI7gIwSbDPZNTKtqQvgktiP02o,2870
7
- airflow/providers/standard/hooks/package_index.py,sha256=U7_s_02-wwz9kTkzKr3JAhVQj2spuntWd_GmjfpV-y4,3769
8
- airflow/providers/standard/hooks/subprocess.py,sha256=GAmdF69jwUcpc7DH5I42GnJRs6NMQvHwFhimWpIdTU4,4920
9
- airflow/providers/standard/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
10
- airflow/providers/standard/operators/bash.py,sha256=AMSTPCgtArDE78XQ01b5jf1d3HEbZuP8_xz9dyXQgKc,13664
11
- airflow/providers/standard/operators/datetime.py,sha256=vsn2eaeVvUZBLXTzrEIC5Bd9svk81gM2VlxXCcmZhHY,4749
12
- airflow/providers/standard/operators/empty.py,sha256=C7_uLWJK6kExzlNc7xdMo8VAQ_ONWITvEQ2FImrMepM,1324
13
- airflow/providers/standard/operators/generic_transfer.py,sha256=BFCDTc_GTd6LNfU0Mr8Fx_MzGL9qcKNwzC4KNzD1gUw,5247
14
- airflow/providers/standard/operators/latest_only.py,sha256=NlpkrHk3QblaXYEFowLy9pRS-l0zpFtI12bDmF-t9Lo,3291
15
- airflow/providers/standard/operators/python.py,sha256=ZLeesBNGbZLFGeeEe9irZUfJUvceoQxEwYvnejzoPs4,49247
16
- airflow/providers/standard/operators/trigger_dagrun.py,sha256=lPbV-FR_6RHB6XDv58Fc8N92o3MEYfNJPFxt9h1SPFw,12301
17
- airflow/providers/standard/operators/weekday.py,sha256=XL1fMejCoCrifl52t9QmlrnavL3Nm3_VYbhUMWhI10I,4841
18
- airflow/providers/standard/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
19
- airflow/providers/standard/sensors/bash.py,sha256=afyz1m-1qzAp1fE5ta71rXhpTrKcCH7bNfwUU2Hv7GQ,5025
20
- airflow/providers/standard/sensors/date_time.py,sha256=hRUuLaNgqDh4jqaIaD8zdyq2BUXkpWM2NzJN5YkwTJI,6077
21
- airflow/providers/standard/sensors/external_task.py,sha256=T5cCj1txJUjnql6cHZayDqSjfWCE-zOxJS9-nxkSuio,23840
22
- airflow/providers/standard/sensors/filesystem.py,sha256=rfupSeHtFGdAcL6cw3H6u6ttBxogSThYiPqsUKgABMU,6029
23
- airflow/providers/standard/sensors/python.py,sha256=kvgpHN8hiyxJPlw9HsVpna0X6NRt0iTDvFFjqt3KFtQ,3405
24
- airflow/providers/standard/sensors/time.py,sha256=Pc9BZqqTQy3Qqz7uME9yF4qmWsXYCzAoAlsmwgpAraY,5007
25
- airflow/providers/standard/sensors/time_delta.py,sha256=H1jSNT72e-83usqMPMIRSgnR41IAFwkrafmE006jAOc,6012
26
- airflow/providers/standard/sensors/weekday.py,sha256=GdYa-DdKdQ_cOpuAFppHSaDKrzGGvVha4BfkoiJLTpM,3884
27
- airflow/providers/standard/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
28
- airflow/providers/standard/triggers/external_task.py,sha256=iZn-WsjTlJRd780xVds6rrTOrfLkf-Bp3Q1PbGfbYuU,8476
29
- airflow/providers/standard/triggers/file.py,sha256=2i8-RwSjEgdOwQNcHCqLmSdpE3Ehqg4GQJ8nE3-fHxo,4886
30
- airflow/providers/standard/triggers/temporal.py,sha256=Aub7Cp3HsPdeardF2jp-Z5nIRwzqtK9-aOlWtfKQfcg,4809
31
- airflow/providers/standard/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
32
- airflow/providers/standard/utils/python_virtualenv.py,sha256=FR3241l5Obuo2BBwwBs-s87pRpCLyJnh3sUtHxrgRuM,7759
33
- airflow/providers/standard/utils/python_virtualenv_script.jinja2,sha256=bn_QOYOj8Q2k-RE77LKgCy3iDTuv9vllyBAD4yeCb-A,2502
34
- airflow/providers/standard/utils/sensor_helper.py,sha256=BeaWt9X4PUE49V3QAG8WPHj3fWwUGeZngS5_Y8g_auA,4401
35
- apache_airflow_providers_standard-0.1.0rc1.dist-info/entry_points.txt,sha256=mW2YRh3mVdZdaP5-iGSNgmcCh3YYdALIn28BCLBZZ40,104
36
- apache_airflow_providers_standard-0.1.0rc1.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
37
- apache_airflow_providers_standard-0.1.0rc1.dist-info/METADATA,sha256=xJn_2eu37xGtXoyiuW0bFfvX-Y6BkpsuxfnQi-z2fUU,4023
38
- apache_airflow_providers_standard-0.1.0rc1.dist-info/RECORD,,