apache-airflow-providers-standard 0.1.0rc1__py3-none-any.whl → 1.0.0.dev0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-standard might be problematic. Click here for more details.

Files changed (37) hide show
  1. airflow/providers/standard/LICENSE +52 -0
  2. airflow/providers/standard/__init__.py +1 -23
  3. airflow/providers/standard/get_provider_info.py +7 -54
  4. airflow/providers/standard/operators/datetime.py +3 -8
  5. airflow/providers/standard/operators/weekday.py +4 -11
  6. airflow/providers/standard/sensors/date_time.py +8 -32
  7. airflow/providers/standard/sensors/time.py +5 -28
  8. airflow/providers/standard/sensors/time_delta.py +10 -48
  9. airflow/providers/standard/sensors/weekday.py +2 -7
  10. {apache_airflow_providers_standard-0.1.0rc1.dist-info → apache_airflow_providers_standard-1.0.0.dev0.dist-info}/METADATA +36 -20
  11. apache_airflow_providers_standard-1.0.0.dev0.dist-info/RECORD +15 -0
  12. {apache_airflow_providers_standard-0.1.0rc1.dist-info → apache_airflow_providers_standard-1.0.0.dev0.dist-info}/WHEEL +1 -1
  13. airflow/providers/standard/hooks/__init__.py +0 -16
  14. airflow/providers/standard/hooks/filesystem.py +0 -89
  15. airflow/providers/standard/hooks/package_index.py +0 -95
  16. airflow/providers/standard/hooks/subprocess.py +0 -119
  17. airflow/providers/standard/operators/bash.py +0 -310
  18. airflow/providers/standard/operators/empty.py +0 -39
  19. airflow/providers/standard/operators/generic_transfer.py +0 -138
  20. airflow/providers/standard/operators/latest_only.py +0 -83
  21. airflow/providers/standard/operators/python.py +0 -1132
  22. airflow/providers/standard/operators/trigger_dagrun.py +0 -292
  23. airflow/providers/standard/sensors/bash.py +0 -120
  24. airflow/providers/standard/sensors/external_task.py +0 -509
  25. airflow/providers/standard/sensors/filesystem.py +0 -158
  26. airflow/providers/standard/sensors/python.py +0 -85
  27. airflow/providers/standard/triggers/__init__.py +0 -16
  28. airflow/providers/standard/triggers/external_task.py +0 -211
  29. airflow/providers/standard/triggers/file.py +0 -131
  30. airflow/providers/standard/triggers/temporal.py +0 -114
  31. airflow/providers/standard/utils/__init__.py +0 -16
  32. airflow/providers/standard/utils/python_virtualenv.py +0 -209
  33. airflow/providers/standard/utils/python_virtualenv_script.jinja2 +0 -77
  34. airflow/providers/standard/utils/sensor_helper.py +0 -119
  35. airflow/providers/standard/version_compat.py +0 -36
  36. apache_airflow_providers_standard-0.1.0rc1.dist-info/RECORD +0 -38
  37. {apache_airflow_providers_standard-0.1.0rc1.dist-info → apache_airflow_providers_standard-1.0.0.dev0.dist-info}/entry_points.txt +0 -0
@@ -1,292 +0,0 @@
1
- #
2
- # Licensed to the Apache Software Foundation (ASF) under one
3
- # or more contributor license agreements. See the NOTICE file
4
- # distributed with this work for additional information
5
- # regarding copyright ownership. The ASF licenses this file
6
- # to you under the Apache License, Version 2.0 (the
7
- # "License"); you may not use this file except in compliance
8
- # with the License. You may obtain a copy of the License at
9
- #
10
- # http://www.apache.org/licenses/LICENSE-2.0
11
- #
12
- # Unless required by applicable law or agreed to in writing,
13
- # software distributed under the License is distributed on an
14
- # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
- # KIND, either express or implied. See the License for the
16
- # specific language governing permissions and limitations
17
- # under the License.
18
- from __future__ import annotations
19
-
20
- import datetime
21
- import json
22
- import time
23
- from collections.abc import Sequence
24
- from typing import TYPE_CHECKING, Any
25
-
26
- from sqlalchemy import select
27
- from sqlalchemy.orm.exc import NoResultFound
28
-
29
- from airflow.api.common.trigger_dag import trigger_dag
30
- from airflow.configuration import conf
31
- from airflow.exceptions import (
32
- AirflowException,
33
- AirflowSkipException,
34
- DagNotFound,
35
- DagRunAlreadyExists,
36
- )
37
- from airflow.models import BaseOperator, BaseOperatorLink
38
- from airflow.models.dag import DagModel
39
- from airflow.models.dagbag import DagBag
40
- from airflow.models.dagrun import DagRun
41
- from airflow.models.xcom import XCom
42
- from airflow.providers.standard.triggers.external_task import DagStateTrigger
43
- from airflow.utils import timezone
44
- from airflow.utils.helpers import build_airflow_url_with_query
45
- from airflow.utils.session import provide_session
46
- from airflow.utils.state import DagRunState
47
- from airflow.utils.types import DagRunTriggeredByType, DagRunType
48
-
49
- XCOM_LOGICAL_DATE_ISO = "trigger_logical_date_iso"
50
- XCOM_RUN_ID = "trigger_run_id"
51
-
52
-
53
- if TYPE_CHECKING:
54
- from sqlalchemy.orm.session import Session
55
-
56
- from airflow.models.taskinstancekey import TaskInstanceKey
57
-
58
- try:
59
- from airflow.sdk.definitions.context import Context
60
- except ImportError:
61
- # TODO: Remove once provider drops support for Airflow 2
62
- from airflow.utils.context import Context
63
-
64
-
65
- class TriggerDagRunLink(BaseOperatorLink):
66
- """
67
- Operator link for TriggerDagRunOperator.
68
-
69
- It allows users to access DAG triggered by task using TriggerDagRunOperator.
70
- """
71
-
72
- name = "Triggered DAG"
73
-
74
- def get_link(self, operator: BaseOperator, *, ti_key: TaskInstanceKey) -> str:
75
- from airflow.models.renderedtifields import RenderedTaskInstanceFields
76
-
77
- if TYPE_CHECKING:
78
- assert isinstance(operator, TriggerDagRunOperator)
79
-
80
- if template_fields := RenderedTaskInstanceFields.get_templated_fields(ti_key):
81
- trigger_dag_id: str = template_fields.get("trigger_dag_id", operator.trigger_dag_id)
82
- else:
83
- trigger_dag_id = operator.trigger_dag_id
84
-
85
- # Fetch the correct dag_run_id for the triggerED dag which is
86
- # stored in xcom during execution of the triggerING task.
87
- triggered_dag_run_id = XCom.get_value(ti_key=ti_key, key=XCOM_RUN_ID)
88
-
89
- query = {"dag_id": trigger_dag_id, "dag_run_id": triggered_dag_run_id}
90
- return build_airflow_url_with_query(query)
91
-
92
-
93
- class TriggerDagRunOperator(BaseOperator):
94
- """
95
- Triggers a DAG run for a specified DAG ID.
96
-
97
- Note that if database isolation mode is enabled, not all features are supported.
98
-
99
- :param trigger_dag_id: The ``dag_id`` of the DAG to trigger (templated).
100
- :param trigger_run_id: The run ID to use for the triggered DAG run (templated).
101
- If not provided, a run ID will be automatically generated.
102
- :param conf: Configuration for the DAG run (templated).
103
- :param logical_date: Logical date for the triggered DAG (templated).
104
- :param reset_dag_run: Whether clear existing DAG run if already exists.
105
- This is useful when backfill or rerun an existing DAG run.
106
- This only resets (not recreates) the DAG run.
107
- DAG run conf is immutable and will not be reset on rerun of an existing DAG run.
108
- When reset_dag_run=False and dag run exists, DagRunAlreadyExists will be raised.
109
- When reset_dag_run=True and dag run exists, existing DAG run will be cleared to rerun.
110
- :param wait_for_completion: Whether or not wait for DAG run completion. (default: False)
111
- :param poke_interval: Poke interval to check DAG run status when wait_for_completion=True.
112
- (default: 60)
113
- :param allowed_states: Optional list of allowed DAG run states of the triggered DAG. This is useful when
114
- setting ``wait_for_completion`` to True. Must be a valid DagRunState.
115
- Default is ``[DagRunState.SUCCESS]``.
116
- :param failed_states: Optional list of failed or disallowed DAG run states of the triggered DAG. This is
117
- useful when setting ``wait_for_completion`` to True. Must be a valid DagRunState.
118
- Default is ``[DagRunState.FAILED]``.
119
- :param skip_when_already_exists: Set to true to mark the task as SKIPPED if a DAG run of the triggered
120
- DAG for the same logical date already exists.
121
- :param deferrable: If waiting for completion, whether or not to defer the task until done,
122
- default is ``False``.
123
- """
124
-
125
- template_fields: Sequence[str] = (
126
- "trigger_dag_id",
127
- "trigger_run_id",
128
- "logical_date",
129
- "conf",
130
- "wait_for_completion",
131
- "skip_when_already_exists",
132
- )
133
- template_fields_renderers = {"conf": "py"}
134
- ui_color = "#ffefeb"
135
- operator_extra_links = [TriggerDagRunLink()]
136
-
137
- def __init__(
138
- self,
139
- *,
140
- trigger_dag_id: str,
141
- trigger_run_id: str | None = None,
142
- conf: dict | None = None,
143
- logical_date: str | datetime.datetime | None = None,
144
- reset_dag_run: bool = False,
145
- wait_for_completion: bool = False,
146
- poke_interval: int = 60,
147
- allowed_states: list[str | DagRunState] | None = None,
148
- failed_states: list[str | DagRunState] | None = None,
149
- skip_when_already_exists: bool = False,
150
- deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
151
- **kwargs,
152
- ) -> None:
153
- super().__init__(**kwargs)
154
- self.trigger_dag_id = trigger_dag_id
155
- self.trigger_run_id = trigger_run_id
156
- self.conf = conf
157
- self.reset_dag_run = reset_dag_run
158
- self.wait_for_completion = wait_for_completion
159
- self.poke_interval = poke_interval
160
- if allowed_states:
161
- self.allowed_states = [DagRunState(s) for s in allowed_states]
162
- else:
163
- self.allowed_states = [DagRunState.SUCCESS]
164
- if failed_states or failed_states == []:
165
- self.failed_states = [DagRunState(s) for s in failed_states]
166
- else:
167
- self.failed_states = [DagRunState.FAILED]
168
- self.skip_when_already_exists = skip_when_already_exists
169
- self._defer = deferrable
170
-
171
- if logical_date is not None and not isinstance(logical_date, (str, datetime.datetime)):
172
- type_name = type(logical_date).__name__
173
- raise TypeError(
174
- f"Expected str or datetime.datetime type for parameter 'logical_date'. Got {type_name}"
175
- )
176
-
177
- self.logical_date = logical_date
178
-
179
- def execute(self, context: Context):
180
- if self.logical_date is None or isinstance(self.logical_date, datetime.datetime):
181
- parsed_logical_date = self.logical_date
182
- else:
183
- parsed_logical_date = timezone.parse(self.logical_date)
184
-
185
- try:
186
- json.dumps(self.conf)
187
- except TypeError:
188
- raise AirflowException("conf parameter should be JSON Serializable")
189
-
190
- if self.trigger_run_id:
191
- run_id = str(self.trigger_run_id)
192
- else:
193
- run_id = DagRun.generate_run_id(
194
- run_type=DagRunType.MANUAL,
195
- logical_date=parsed_logical_date,
196
- run_after=parsed_logical_date or timezone.utcnow(),
197
- )
198
-
199
- try:
200
- dag_run = trigger_dag(
201
- dag_id=self.trigger_dag_id,
202
- run_id=run_id,
203
- conf=self.conf,
204
- logical_date=parsed_logical_date,
205
- replace_microseconds=False,
206
- triggered_by=DagRunTriggeredByType.OPERATOR,
207
- )
208
-
209
- except DagRunAlreadyExists as e:
210
- if self.reset_dag_run:
211
- dag_run = e.dag_run
212
- self.log.info("Clearing %s on %s", self.trigger_dag_id, dag_run.run_id)
213
-
214
- # Get target dag object and call clear()
215
- dag_model = DagModel.get_current(self.trigger_dag_id)
216
- if dag_model is None:
217
- raise DagNotFound(f"Dag id {self.trigger_dag_id} not found in DagModel")
218
-
219
- # Note: here execution fails on database isolation mode. Needs structural changes for AIP-72
220
- dag_bag = DagBag(dag_folder=dag_model.fileloc, read_dags_from_db=True)
221
- dag = dag_bag.get_dag(self.trigger_dag_id)
222
- dag.clear(run_id=dag_run.run_id)
223
- else:
224
- if self.skip_when_already_exists:
225
- raise AirflowSkipException(
226
- "Skipping due to skip_when_already_exists is set to True and DagRunAlreadyExists"
227
- )
228
- raise e
229
- if dag_run is None:
230
- raise RuntimeError("The dag_run should be set here!")
231
- # Store the run id from the dag run (either created or found above) to
232
- # be used when creating the extra link on the webserver.
233
- ti = context["task_instance"]
234
- ti.xcom_push(key=XCOM_RUN_ID, value=dag_run.run_id)
235
-
236
- if self.wait_for_completion:
237
- # Kick off the deferral process
238
- if self._defer:
239
- self.defer(
240
- trigger=DagStateTrigger(
241
- dag_id=self.trigger_dag_id,
242
- states=self.allowed_states + self.failed_states,
243
- run_ids=[run_id],
244
- poll_interval=self.poke_interval,
245
- ),
246
- method_name="execute_complete",
247
- )
248
- # wait for dag to complete
249
- while True:
250
- self.log.info(
251
- "Waiting for %s on %s to become allowed state %s ...",
252
- self.trigger_dag_id,
253
- run_id,
254
- self.allowed_states,
255
- )
256
- time.sleep(self.poke_interval)
257
-
258
- # Note: here execution fails on database isolation mode. Needs structural changes for AIP-72
259
- dag_run.refresh_from_db()
260
- state = dag_run.state
261
- if state in self.failed_states:
262
- raise AirflowException(f"{self.trigger_dag_id} failed with failed states {state}")
263
- if state in self.allowed_states:
264
- self.log.info("%s finished with allowed state %s", self.trigger_dag_id, state)
265
- return
266
-
267
- @provide_session
268
- def execute_complete(self, context: Context, session: Session, event: tuple[str, dict[str, Any]]):
269
- # This run_ids is parsed from the return trigger event
270
- provided_run_id = event[1]["run_ids"][0]
271
- try:
272
- # Note: here execution fails on database isolation mode. Needs structural changes for AIP-72
273
- dag_run = session.execute(
274
- select(DagRun).where(DagRun.dag_id == self.trigger_dag_id, DagRun.run_id == provided_run_id)
275
- ).scalar_one()
276
- except NoResultFound:
277
- raise AirflowException(
278
- f"No DAG run found for DAG {self.trigger_dag_id} and run ID {provided_run_id}"
279
- )
280
-
281
- state = dag_run.state
282
-
283
- if state in self.failed_states:
284
- raise AirflowException(f"{self.trigger_dag_id} failed with failed state {state}")
285
- if state in self.allowed_states:
286
- self.log.info("%s finished with allowed state %s", self.trigger_dag_id, state)
287
- return
288
-
289
- raise AirflowException(
290
- f"{self.trigger_dag_id} return {state} which is not in {self.failed_states}"
291
- f" or {self.allowed_states}"
292
- )
@@ -1,120 +0,0 @@
1
- #
2
- # Licensed to the Apache Software Foundation (ASF) under one
3
- # or more contributor license agreements. See the NOTICE file
4
- # distributed with this work for additional information
5
- # regarding copyright ownership. The ASF licenses this file
6
- # to you under the Apache License, Version 2.0 (the
7
- # "License"); you may not use this file except in compliance
8
- # with the License. You may obtain a copy of the License at
9
- #
10
- # http://www.apache.org/licenses/LICENSE-2.0
11
- #
12
- # Unless required by applicable law or agreed to in writing,
13
- # software distributed under the License is distributed on an
14
- # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
- # KIND, either express or implied. See the License for the
16
- # specific language governing permissions and limitations
17
- # under the License.
18
- from __future__ import annotations
19
-
20
- import os
21
- from collections.abc import Sequence
22
- from subprocess import PIPE, STDOUT, Popen
23
- from tempfile import NamedTemporaryFile, TemporaryDirectory, gettempdir
24
- from typing import TYPE_CHECKING
25
-
26
- from airflow.exceptions import AirflowFailException
27
- from airflow.sensors.base import BaseSensorOperator
28
-
29
- if TYPE_CHECKING:
30
- try:
31
- from airflow.sdk.definitions.context import Context
32
- except ImportError:
33
- # TODO: Remove once provider drops support for Airflow 2
34
- from airflow.utils.context import Context
35
-
36
-
37
- class BashSensor(BaseSensorOperator):
38
- """
39
- Executes a bash command/script.
40
-
41
- Return True if and only if the return code is 0.
42
-
43
- :param bash_command: The command, set of commands or reference to a
44
- bash script (must be '.sh') to be executed.
45
-
46
- :param env: If env is not None, it must be a mapping that defines the
47
- environment variables for the new process; these are used instead
48
- of inheriting the current process environment, which is the default
49
- behavior. (templated)
50
- :param output_encoding: output encoding of bash command.
51
- :param retry_exit_code: If task exits with this code, treat the sensor
52
- as not-yet-complete and retry the check later according to the
53
- usual retry/timeout settings. Any other non-zero return code will
54
- be treated as an error, and cause the sensor to fail. If set to
55
- ``None`` (the default), any non-zero exit code will cause a retry
56
- and the task will never raise an error except on time-out.
57
-
58
- .. seealso::
59
- For more information on how to use this sensor,take a look at the guide:
60
- :ref:`howto/operator:BashSensor`
61
- """
62
-
63
- template_fields: Sequence[str] = ("bash_command", "env")
64
-
65
- def __init__(
66
- self, *, bash_command, env=None, output_encoding="utf-8", retry_exit_code: int | None = None, **kwargs
67
- ):
68
- super().__init__(**kwargs)
69
- self.bash_command = bash_command
70
- self.env = env
71
- self.output_encoding = output_encoding
72
- self.retry_exit_code = retry_exit_code
73
-
74
- def poke(self, context: Context):
75
- """Execute the bash command in a temporary directory."""
76
- bash_command = self.bash_command
77
- self.log.info("Tmp dir root location: %s", gettempdir())
78
- with (
79
- TemporaryDirectory(prefix="airflowtmp") as tmp_dir,
80
- NamedTemporaryFile(dir=tmp_dir, prefix=self.task_id) as f,
81
- ):
82
- f.write(bytes(bash_command, "utf_8"))
83
- f.flush()
84
- fname = f.name
85
- script_location = tmp_dir + "/" + fname
86
- self.log.info("Temporary script location: %s", script_location)
87
- self.log.info("Running command: %s", bash_command)
88
-
89
- with Popen(
90
- ["bash", fname],
91
- stdout=PIPE,
92
- stderr=STDOUT,
93
- close_fds=True,
94
- cwd=tmp_dir,
95
- env=self.env,
96
- preexec_fn=os.setsid,
97
- ) as resp:
98
- if resp.stdout:
99
- self.log.info("Output:")
100
- for line in iter(resp.stdout.readline, b""):
101
- self.log.info(line.decode(self.output_encoding).strip())
102
- resp.wait()
103
- self.log.info("Command exited with return code %s", resp.returncode)
104
-
105
- # zero code means success, the sensor can go green
106
- if resp.returncode == 0:
107
- return True
108
-
109
- # we have a retry exit code, sensor retries if return code matches, otherwise error
110
- elif self.retry_exit_code is not None:
111
- if resp.returncode == self.retry_exit_code:
112
- self.log.info("Return code matches retry code, will retry later")
113
- return False
114
- else:
115
- raise AirflowFailException(f"Command exited with return code {resp.returncode}")
116
-
117
- # backwards compatibility: sensor retries no matter the error code
118
- else:
119
- self.log.info("Non-zero return code and no retry code set, will retry later")
120
- return False