apache-airflow-providers-standard 0.0.1rc1__py3-none-any.whl → 0.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of apache-airflow-providers-standard might be problematic. Click here for more details.
- airflow/providers/standard/__init__.py +1 -1
- airflow/providers/standard/get_provider_info.py +4 -2
- airflow/providers/standard/hooks/subprocess.py +1 -1
- airflow/providers/standard/operators/bash.py +3 -2
- airflow/providers/standard/operators/datetime.py +3 -2
- airflow/providers/standard/operators/generic_transfer.py +2 -1
- airflow/providers/standard/operators/python.py +20 -17
- airflow/providers/standard/operators/trigger_dagrun.py +305 -0
- airflow/providers/standard/operators/weekday.py +2 -1
- airflow/providers/standard/sensors/bash.py +6 -4
- airflow/providers/standard/sensors/date_time.py +4 -3
- airflow/providers/standard/sensors/filesystem.py +139 -0
- airflow/providers/standard/sensors/python.py +2 -1
- airflow/providers/standard/sensors/weekday.py +2 -1
- {apache_airflow_providers_standard-0.0.1rc1.dist-info → apache_airflow_providers_standard-0.0.2.dist-info}/METADATA +8 -8
- apache_airflow_providers_standard-0.0.2.dist-info/RECORD +30 -0
- apache_airflow_providers_standard-0.0.1rc1.dist-info/RECORD +0 -28
- {apache_airflow_providers_standard-0.0.1rc1.dist-info → apache_airflow_providers_standard-0.0.2.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_standard-0.0.1rc1.dist-info → apache_airflow_providers_standard-0.0.2.dist-info}/entry_points.txt +0 -0
|
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "0.0.
|
|
32
|
+
__version__ = "0.0.2"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
35
|
"2.8.0"
|
|
@@ -28,8 +28,8 @@ def get_provider_info():
|
|
|
28
28
|
"name": "Standard",
|
|
29
29
|
"description": "Airflow Standard Provider\n",
|
|
30
30
|
"state": "ready",
|
|
31
|
-
"source-date-epoch":
|
|
32
|
-
"versions": ["0.0.1"],
|
|
31
|
+
"source-date-epoch": 1732434919,
|
|
32
|
+
"versions": ["0.0.2", "0.0.1"],
|
|
33
33
|
"dependencies": ["apache-airflow>=2.8.0", "apache-airflow-providers-common-sql>=1.20.0"],
|
|
34
34
|
"integrations": [
|
|
35
35
|
{
|
|
@@ -52,6 +52,7 @@ def get_provider_info():
|
|
|
52
52
|
"airflow.providers.standard.operators.bash",
|
|
53
53
|
"airflow.providers.standard.operators.python",
|
|
54
54
|
"airflow.providers.standard.operators.generic_transfer",
|
|
55
|
+
"airflow.providers.standard.operators.trigger_dagrun",
|
|
55
56
|
],
|
|
56
57
|
}
|
|
57
58
|
],
|
|
@@ -65,6 +66,7 @@ def get_provider_info():
|
|
|
65
66
|
"airflow.providers.standard.sensors.weekday",
|
|
66
67
|
"airflow.providers.standard.sensors.bash",
|
|
67
68
|
"airflow.providers.standard.sensors.python",
|
|
69
|
+
"airflow.providers.standard.sensors.filesystem",
|
|
68
70
|
],
|
|
69
71
|
}
|
|
70
72
|
],
|
|
@@ -20,9 +20,9 @@ import contextlib
|
|
|
20
20
|
import os
|
|
21
21
|
import signal
|
|
22
22
|
from collections import namedtuple
|
|
23
|
+
from collections.abc import Iterator
|
|
23
24
|
from subprocess import PIPE, STDOUT, Popen
|
|
24
25
|
from tempfile import TemporaryDirectory, gettempdir
|
|
25
|
-
from typing import Iterator
|
|
26
26
|
|
|
27
27
|
from airflow.hooks.base import BaseHook
|
|
28
28
|
|
|
@@ -21,8 +21,9 @@ import os
|
|
|
21
21
|
import shutil
|
|
22
22
|
import tempfile
|
|
23
23
|
import warnings
|
|
24
|
+
from collections.abc import Container, Sequence
|
|
24
25
|
from functools import cached_property
|
|
25
|
-
from typing import TYPE_CHECKING, Any, Callable,
|
|
26
|
+
from typing import TYPE_CHECKING, Any, Callable, cast
|
|
26
27
|
|
|
27
28
|
from airflow.exceptions import AirflowException, AirflowSkipException
|
|
28
29
|
from airflow.models.baseoperator import BaseOperator
|
|
@@ -97,7 +98,7 @@ class BashOperator(BaseOperator):
|
|
|
97
98
|
|
|
98
99
|
.. code-block:: python
|
|
99
100
|
|
|
100
|
-
bash_command = "set -e; python3 script.py '{{
|
|
101
|
+
bash_command = "set -e; python3 script.py '{{ data_interval_end }}'"
|
|
101
102
|
|
|
102
103
|
.. note::
|
|
103
104
|
|
|
@@ -17,7 +17,8 @@
|
|
|
17
17
|
from __future__ import annotations
|
|
18
18
|
|
|
19
19
|
import datetime
|
|
20
|
-
from
|
|
20
|
+
from collections.abc import Iterable
|
|
21
|
+
from typing import TYPE_CHECKING
|
|
21
22
|
|
|
22
23
|
from airflow.exceptions import AirflowException
|
|
23
24
|
from airflow.operators.branch import BaseBranchOperator
|
|
@@ -44,7 +45,7 @@ class BranchDateTimeOperator(BaseBranchOperator):
|
|
|
44
45
|
:param target_lower: target lower bound.
|
|
45
46
|
:param target_upper: target upper bound.
|
|
46
47
|
:param use_task_logical_date: If ``True``, uses task's logical date to compare with targets.
|
|
47
|
-
|
|
48
|
+
Logical date is useful for backfilling. If ``False``, uses system's date.
|
|
48
49
|
"""
|
|
49
50
|
|
|
50
51
|
def __init__(
|
|
@@ -17,7 +17,8 @@
|
|
|
17
17
|
# under the License.
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
|
-
from
|
|
20
|
+
from collections.abc import Sequence
|
|
21
|
+
from typing import TYPE_CHECKING
|
|
21
22
|
|
|
22
23
|
from airflow.hooks.base import BaseHook
|
|
23
24
|
from airflow.models import BaseOperator
|
|
@@ -28,11 +28,11 @@ import textwrap
|
|
|
28
28
|
import types
|
|
29
29
|
import warnings
|
|
30
30
|
from abc import ABCMeta, abstractmethod
|
|
31
|
-
from collections.abc import Container
|
|
31
|
+
from collections.abc import Collection, Container, Iterable, Mapping, Sequence
|
|
32
32
|
from functools import cache
|
|
33
33
|
from pathlib import Path
|
|
34
34
|
from tempfile import TemporaryDirectory
|
|
35
|
-
from typing import TYPE_CHECKING, Any, Callable,
|
|
35
|
+
from typing import TYPE_CHECKING, Any, Callable, NamedTuple, cast
|
|
36
36
|
|
|
37
37
|
import lazy_object_proxy
|
|
38
38
|
|
|
@@ -114,13 +114,13 @@ class PythonOperator(BaseOperator):
|
|
|
114
114
|
function. This set of kwargs correspond exactly to what you can use in your jinja templates.
|
|
115
115
|
For this to work, you need to define ``**kwargs`` in your function header, or you can add directly the
|
|
116
116
|
keyword arguments you would like to get - for example with the below code your callable will get
|
|
117
|
-
the values of ``ti``
|
|
117
|
+
the values of ``ti`` context variables.
|
|
118
118
|
|
|
119
119
|
With explicit arguments:
|
|
120
120
|
|
|
121
121
|
.. code-block:: python
|
|
122
122
|
|
|
123
|
-
def my_python_callable(ti
|
|
123
|
+
def my_python_callable(ti):
|
|
124
124
|
pass
|
|
125
125
|
|
|
126
126
|
With kwargs:
|
|
@@ -129,7 +129,6 @@ class PythonOperator(BaseOperator):
|
|
|
129
129
|
|
|
130
130
|
def my_python_callable(**kwargs):
|
|
131
131
|
ti = kwargs["ti"]
|
|
132
|
-
next_ds = kwargs["next_ds"]
|
|
133
132
|
|
|
134
133
|
|
|
135
134
|
:param python_callable: A reference to an object that is callable
|
|
@@ -310,7 +309,7 @@ class ShortCircuitOperator(PythonOperator, SkipMixin):
|
|
|
310
309
|
self.skip(
|
|
311
310
|
dag_run=dag_run,
|
|
312
311
|
tasks=to_skip,
|
|
313
|
-
execution_date=cast("DateTime", dag_run.
|
|
312
|
+
execution_date=cast("DateTime", dag_run.logical_date), # type: ignore[call-arg, union-attr]
|
|
314
313
|
map_index=context["ti"].map_index,
|
|
315
314
|
)
|
|
316
315
|
|
|
@@ -360,34 +359,36 @@ class _BasePythonVirtualenvOperator(PythonOperator, metaclass=ABCMeta):
|
|
|
360
359
|
"ds_nodash",
|
|
361
360
|
"expanded_ti_count",
|
|
362
361
|
"inlets",
|
|
363
|
-
"next_ds",
|
|
364
|
-
"next_ds_nodash",
|
|
365
362
|
"outlets",
|
|
366
|
-
"prev_ds",
|
|
367
|
-
"prev_ds_nodash",
|
|
368
363
|
"run_id",
|
|
369
364
|
"task_instance_key_str",
|
|
370
365
|
"test_mode",
|
|
371
|
-
"tomorrow_ds",
|
|
372
|
-
"tomorrow_ds_nodash",
|
|
373
366
|
"ts",
|
|
374
367
|
"ts_nodash",
|
|
375
368
|
"ts_nodash_with_tz",
|
|
369
|
+
# The following should be removed when Airflow 2 support is dropped.
|
|
370
|
+
"next_ds",
|
|
371
|
+
"next_ds_nodash",
|
|
372
|
+
"prev_ds",
|
|
373
|
+
"prev_ds_nodash",
|
|
374
|
+
"tomorrow_ds",
|
|
375
|
+
"tomorrow_ds_nodash",
|
|
376
376
|
"yesterday_ds",
|
|
377
377
|
"yesterday_ds_nodash",
|
|
378
378
|
}
|
|
379
379
|
PENDULUM_SERIALIZABLE_CONTEXT_KEYS = {
|
|
380
380
|
"data_interval_end",
|
|
381
381
|
"data_interval_start",
|
|
382
|
-
"execution_date",
|
|
383
382
|
"logical_date",
|
|
384
|
-
"next_execution_date",
|
|
385
383
|
"prev_data_interval_end_success",
|
|
386
384
|
"prev_data_interval_start_success",
|
|
387
|
-
"prev_execution_date",
|
|
388
|
-
"prev_execution_date_success",
|
|
389
385
|
"prev_start_date_success",
|
|
390
386
|
"prev_end_date_success",
|
|
387
|
+
# The following should be removed when Airflow 2 support is dropped.
|
|
388
|
+
"execution_date",
|
|
389
|
+
"next_execution_date",
|
|
390
|
+
"prev_execution_date",
|
|
391
|
+
"prev_execution_date_success",
|
|
391
392
|
}
|
|
392
393
|
|
|
393
394
|
AIRFLOW_SERIALIZABLE_CONTEXT_KEYS = {
|
|
@@ -397,7 +398,9 @@ class _BasePythonVirtualenvOperator(PythonOperator, metaclass=ABCMeta):
|
|
|
397
398
|
"dag_run",
|
|
398
399
|
"task",
|
|
399
400
|
"params",
|
|
400
|
-
"triggering_asset_events"
|
|
401
|
+
"triggering_asset_events",
|
|
402
|
+
# The following should be removed when Airflow 2 support is dropped.
|
|
403
|
+
"triggering_dataset_events",
|
|
401
404
|
}
|
|
402
405
|
|
|
403
406
|
def __init__(
|
|
@@ -0,0 +1,305 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
3
|
+
# or more contributor license agreements. See the NOTICE file
|
|
4
|
+
# distributed with this work for additional information
|
|
5
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
6
|
+
# to you under the Apache License, Version 2.0 (the
|
|
7
|
+
# "License"); you may not use this file except in compliance
|
|
8
|
+
# with the License. You may obtain a copy of the License at
|
|
9
|
+
#
|
|
10
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
11
|
+
#
|
|
12
|
+
# Unless required by applicable law or agreed to in writing,
|
|
13
|
+
# software distributed under the License is distributed on an
|
|
14
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
15
|
+
# KIND, either express or implied. See the License for the
|
|
16
|
+
# specific language governing permissions and limitations
|
|
17
|
+
# under the License.
|
|
18
|
+
from __future__ import annotations
|
|
19
|
+
|
|
20
|
+
import datetime
|
|
21
|
+
import json
|
|
22
|
+
import time
|
|
23
|
+
from collections.abc import Sequence
|
|
24
|
+
from typing import TYPE_CHECKING, Any, cast
|
|
25
|
+
|
|
26
|
+
from sqlalchemy import select
|
|
27
|
+
from sqlalchemy.orm.exc import NoResultFound
|
|
28
|
+
|
|
29
|
+
from airflow.api.common.trigger_dag import trigger_dag
|
|
30
|
+
from airflow.api_internal.internal_api_call import InternalApiConfig
|
|
31
|
+
from airflow.configuration import conf
|
|
32
|
+
from airflow.exceptions import (
|
|
33
|
+
AirflowException,
|
|
34
|
+
AirflowSkipException,
|
|
35
|
+
DagNotFound,
|
|
36
|
+
DagRunAlreadyExists,
|
|
37
|
+
)
|
|
38
|
+
from airflow.models import BaseOperator, BaseOperatorLink
|
|
39
|
+
from airflow.models.dag import DagModel
|
|
40
|
+
from airflow.models.dagbag import DagBag
|
|
41
|
+
from airflow.models.dagrun import DagRun
|
|
42
|
+
from airflow.models.xcom import XCom
|
|
43
|
+
from airflow.triggers.external_task import DagStateTrigger
|
|
44
|
+
from airflow.utils import timezone
|
|
45
|
+
from airflow.utils.helpers import build_airflow_url_with_query
|
|
46
|
+
from airflow.utils.session import provide_session
|
|
47
|
+
from airflow.utils.state import DagRunState
|
|
48
|
+
from airflow.utils.types import DagRunTriggeredByType, DagRunType
|
|
49
|
+
|
|
50
|
+
XCOM_LOGICAL_DATE_ISO = "trigger_logical_date_iso"
|
|
51
|
+
XCOM_RUN_ID = "trigger_run_id"
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
if TYPE_CHECKING:
|
|
55
|
+
from sqlalchemy.orm.session import Session
|
|
56
|
+
|
|
57
|
+
from airflow.models.taskinstancekey import TaskInstanceKey
|
|
58
|
+
from airflow.utils.context import Context
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class TriggerDagRunLink(BaseOperatorLink):
|
|
62
|
+
"""
|
|
63
|
+
Operator link for TriggerDagRunOperator.
|
|
64
|
+
|
|
65
|
+
It allows users to access DAG triggered by task using TriggerDagRunOperator.
|
|
66
|
+
"""
|
|
67
|
+
|
|
68
|
+
name = "Triggered DAG"
|
|
69
|
+
|
|
70
|
+
def get_link(self, operator: BaseOperator, *, ti_key: TaskInstanceKey) -> str:
|
|
71
|
+
from airflow.models.renderedtifields import RenderedTaskInstanceFields
|
|
72
|
+
from airflow.models.taskinstance import TaskInstance
|
|
73
|
+
|
|
74
|
+
ti = TaskInstance.get_task_instance(
|
|
75
|
+
dag_id=ti_key.dag_id, run_id=ti_key.run_id, task_id=ti_key.task_id, map_index=ti_key.map_index
|
|
76
|
+
)
|
|
77
|
+
if TYPE_CHECKING:
|
|
78
|
+
assert ti is not None
|
|
79
|
+
|
|
80
|
+
template_fields = RenderedTaskInstanceFields.get_templated_fields(ti)
|
|
81
|
+
untemplated_trigger_dag_id = cast(TriggerDagRunOperator, operator).trigger_dag_id
|
|
82
|
+
if template_fields:
|
|
83
|
+
trigger_dag_id = template_fields.get("trigger_dag_id", untemplated_trigger_dag_id)
|
|
84
|
+
else:
|
|
85
|
+
trigger_dag_id = untemplated_trigger_dag_id
|
|
86
|
+
|
|
87
|
+
# Fetch the correct dag_run_id for the triggerED dag which is
|
|
88
|
+
# stored in xcom during execution of the triggerING task.
|
|
89
|
+
triggered_dag_run_id = XCom.get_value(ti_key=ti_key, key=XCOM_RUN_ID)
|
|
90
|
+
|
|
91
|
+
query = {
|
|
92
|
+
"dag_id": trigger_dag_id,
|
|
93
|
+
"dag_run_id": triggered_dag_run_id,
|
|
94
|
+
}
|
|
95
|
+
return build_airflow_url_with_query(query)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
class TriggerDagRunOperator(BaseOperator):
|
|
99
|
+
"""
|
|
100
|
+
Triggers a DAG run for a specified DAG ID.
|
|
101
|
+
|
|
102
|
+
Note that if database isolation mode is enabled, not all features are supported.
|
|
103
|
+
|
|
104
|
+
:param trigger_dag_id: The ``dag_id`` of the DAG to trigger (templated).
|
|
105
|
+
:param trigger_run_id: The run ID to use for the triggered DAG run (templated).
|
|
106
|
+
If not provided, a run ID will be automatically generated.
|
|
107
|
+
:param conf: Configuration for the DAG run (templated).
|
|
108
|
+
:param logical_date: Logical date for the triggered DAG (templated).
|
|
109
|
+
:param reset_dag_run: Whether clear existing DAG run if already exists.
|
|
110
|
+
This is useful when backfill or rerun an existing DAG run.
|
|
111
|
+
This only resets (not recreates) the DAG run.
|
|
112
|
+
DAG run conf is immutable and will not be reset on rerun of an existing DAG run.
|
|
113
|
+
When reset_dag_run=False and dag run exists, DagRunAlreadyExists will be raised.
|
|
114
|
+
When reset_dag_run=True and dag run exists, existing DAG run will be cleared to rerun.
|
|
115
|
+
:param wait_for_completion: Whether or not wait for DAG run completion. (default: False)
|
|
116
|
+
:param poke_interval: Poke interval to check DAG run status when wait_for_completion=True.
|
|
117
|
+
(default: 60)
|
|
118
|
+
:param allowed_states: Optional list of allowed DAG run states of the triggered DAG. This is useful when
|
|
119
|
+
setting ``wait_for_completion`` to True. Must be a valid DagRunState.
|
|
120
|
+
Default is ``[DagRunState.SUCCESS]``.
|
|
121
|
+
:param failed_states: Optional list of failed or disallowed DAG run states of the triggered DAG. This is
|
|
122
|
+
useful when setting ``wait_for_completion`` to True. Must be a valid DagRunState.
|
|
123
|
+
Default is ``[DagRunState.FAILED]``.
|
|
124
|
+
:param skip_when_already_exists: Set to true to mark the task as SKIPPED if a DAG run of the triggered
|
|
125
|
+
DAG for the same logical date already exists.
|
|
126
|
+
:param deferrable: If waiting for completion, whether or not to defer the task until done,
|
|
127
|
+
default is ``False``.
|
|
128
|
+
"""
|
|
129
|
+
|
|
130
|
+
template_fields: Sequence[str] = (
|
|
131
|
+
"trigger_dag_id",
|
|
132
|
+
"trigger_run_id",
|
|
133
|
+
"logical_date",
|
|
134
|
+
"conf",
|
|
135
|
+
"wait_for_completion",
|
|
136
|
+
"skip_when_already_exists",
|
|
137
|
+
)
|
|
138
|
+
template_fields_renderers = {"conf": "py"}
|
|
139
|
+
ui_color = "#ffefeb"
|
|
140
|
+
operator_extra_links = [TriggerDagRunLink()]
|
|
141
|
+
|
|
142
|
+
def __init__(
|
|
143
|
+
self,
|
|
144
|
+
*,
|
|
145
|
+
trigger_dag_id: str,
|
|
146
|
+
trigger_run_id: str | None = None,
|
|
147
|
+
conf: dict | None = None,
|
|
148
|
+
logical_date: str | datetime.datetime | None = None,
|
|
149
|
+
reset_dag_run: bool = False,
|
|
150
|
+
wait_for_completion: bool = False,
|
|
151
|
+
poke_interval: int = 60,
|
|
152
|
+
allowed_states: list[str | DagRunState] | None = None,
|
|
153
|
+
failed_states: list[str | DagRunState] | None = None,
|
|
154
|
+
skip_when_already_exists: bool = False,
|
|
155
|
+
deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
|
|
156
|
+
**kwargs,
|
|
157
|
+
) -> None:
|
|
158
|
+
super().__init__(**kwargs)
|
|
159
|
+
self.trigger_dag_id = trigger_dag_id
|
|
160
|
+
self.trigger_run_id = trigger_run_id
|
|
161
|
+
self.conf = conf
|
|
162
|
+
self.reset_dag_run = reset_dag_run
|
|
163
|
+
self.wait_for_completion = wait_for_completion
|
|
164
|
+
self.poke_interval = poke_interval
|
|
165
|
+
if allowed_states:
|
|
166
|
+
self.allowed_states = [DagRunState(s) for s in allowed_states]
|
|
167
|
+
else:
|
|
168
|
+
self.allowed_states = [DagRunState.SUCCESS]
|
|
169
|
+
if failed_states or failed_states == []:
|
|
170
|
+
self.failed_states = [DagRunState(s) for s in failed_states]
|
|
171
|
+
else:
|
|
172
|
+
self.failed_states = [DagRunState.FAILED]
|
|
173
|
+
self.skip_when_already_exists = skip_when_already_exists
|
|
174
|
+
self._defer = deferrable
|
|
175
|
+
|
|
176
|
+
if logical_date is not None and not isinstance(logical_date, (str, datetime.datetime)):
|
|
177
|
+
type_name = type(logical_date).__name__
|
|
178
|
+
raise TypeError(
|
|
179
|
+
f"Expected str or datetime.datetime type for parameter 'logical_date'. Got {type_name}"
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
self.logical_date = logical_date
|
|
183
|
+
|
|
184
|
+
def execute(self, context: Context):
|
|
185
|
+
if InternalApiConfig.get_use_internal_api():
|
|
186
|
+
if self.reset_dag_run:
|
|
187
|
+
raise AirflowException("Parameter reset_dag_run=True is broken with Database Isolation Mode.")
|
|
188
|
+
if self.wait_for_completion:
|
|
189
|
+
raise AirflowException(
|
|
190
|
+
"Parameter wait_for_completion=True is broken with Database Isolation Mode."
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
if isinstance(self.logical_date, datetime.datetime):
|
|
194
|
+
parsed_logical_date = self.logical_date
|
|
195
|
+
elif isinstance(self.logical_date, str):
|
|
196
|
+
parsed_logical_date = timezone.parse(self.logical_date)
|
|
197
|
+
else:
|
|
198
|
+
parsed_logical_date = timezone.utcnow()
|
|
199
|
+
|
|
200
|
+
try:
|
|
201
|
+
json.dumps(self.conf)
|
|
202
|
+
except TypeError:
|
|
203
|
+
raise AirflowException("conf parameter should be JSON Serializable")
|
|
204
|
+
|
|
205
|
+
if self.trigger_run_id:
|
|
206
|
+
run_id = str(self.trigger_run_id)
|
|
207
|
+
else:
|
|
208
|
+
run_id = DagRun.generate_run_id(DagRunType.MANUAL, parsed_logical_date)
|
|
209
|
+
|
|
210
|
+
try:
|
|
211
|
+
dag_run = trigger_dag(
|
|
212
|
+
dag_id=self.trigger_dag_id,
|
|
213
|
+
run_id=run_id,
|
|
214
|
+
conf=self.conf,
|
|
215
|
+
logical_date=parsed_logical_date,
|
|
216
|
+
replace_microseconds=False,
|
|
217
|
+
triggered_by=DagRunTriggeredByType.OPERATOR,
|
|
218
|
+
)
|
|
219
|
+
|
|
220
|
+
except DagRunAlreadyExists as e:
|
|
221
|
+
if self.reset_dag_run:
|
|
222
|
+
dag_run = e.dag_run
|
|
223
|
+
self.log.info("Clearing %s on %s", self.trigger_dag_id, dag_run.logical_date)
|
|
224
|
+
|
|
225
|
+
# Get target dag object and call clear()
|
|
226
|
+
dag_model = DagModel.get_current(self.trigger_dag_id)
|
|
227
|
+
if dag_model is None:
|
|
228
|
+
raise DagNotFound(f"Dag id {self.trigger_dag_id} not found in DagModel")
|
|
229
|
+
|
|
230
|
+
# Note: here execution fails on database isolation mode. Needs structural changes for AIP-72
|
|
231
|
+
dag_bag = DagBag(dag_folder=dag_model.fileloc, read_dags_from_db=True)
|
|
232
|
+
dag = dag_bag.get_dag(self.trigger_dag_id)
|
|
233
|
+
dag.clear(start_date=dag_run.logical_date, end_date=dag_run.logical_date)
|
|
234
|
+
else:
|
|
235
|
+
if self.skip_when_already_exists:
|
|
236
|
+
raise AirflowSkipException(
|
|
237
|
+
"Skipping due to skip_when_already_exists is set to True and DagRunAlreadyExists"
|
|
238
|
+
)
|
|
239
|
+
raise e
|
|
240
|
+
if dag_run is None:
|
|
241
|
+
raise RuntimeError("The dag_run should be set here!")
|
|
242
|
+
# Store the run id from the dag run (either created or found above) to
|
|
243
|
+
# be used when creating the extra link on the webserver.
|
|
244
|
+
ti = context["task_instance"]
|
|
245
|
+
ti.xcom_push(key=XCOM_RUN_ID, value=dag_run.run_id)
|
|
246
|
+
|
|
247
|
+
if self.wait_for_completion:
|
|
248
|
+
# Kick off the deferral process
|
|
249
|
+
if self._defer:
|
|
250
|
+
self.defer(
|
|
251
|
+
trigger=DagStateTrigger(
|
|
252
|
+
dag_id=self.trigger_dag_id,
|
|
253
|
+
states=self.allowed_states + self.failed_states,
|
|
254
|
+
logical_dates=[dag_run.logical_date],
|
|
255
|
+
poll_interval=self.poke_interval,
|
|
256
|
+
),
|
|
257
|
+
method_name="execute_complete",
|
|
258
|
+
)
|
|
259
|
+
# wait for dag to complete
|
|
260
|
+
while True:
|
|
261
|
+
self.log.info(
|
|
262
|
+
"Waiting for %s on %s to become allowed state %s ...",
|
|
263
|
+
self.trigger_dag_id,
|
|
264
|
+
dag_run.logical_date,
|
|
265
|
+
self.allowed_states,
|
|
266
|
+
)
|
|
267
|
+
time.sleep(self.poke_interval)
|
|
268
|
+
|
|
269
|
+
# Note: here execution fails on database isolation mode. Needs structural changes for AIP-72
|
|
270
|
+
dag_run.refresh_from_db()
|
|
271
|
+
state = dag_run.state
|
|
272
|
+
if state in self.failed_states:
|
|
273
|
+
raise AirflowException(f"{self.trigger_dag_id} failed with failed states {state}")
|
|
274
|
+
if state in self.allowed_states:
|
|
275
|
+
self.log.info("%s finished with allowed state %s", self.trigger_dag_id, state)
|
|
276
|
+
return
|
|
277
|
+
|
|
278
|
+
@provide_session
|
|
279
|
+
def execute_complete(self, context: Context, session: Session, event: tuple[str, dict[str, Any]]):
|
|
280
|
+
# This logical_date is parsed from the return trigger event
|
|
281
|
+
provided_logical_date = event[1]["logical_dates"][0]
|
|
282
|
+
try:
|
|
283
|
+
# Note: here execution fails on database isolation mode. Needs structural changes for AIP-72
|
|
284
|
+
dag_run = session.execute(
|
|
285
|
+
select(DagRun).where(
|
|
286
|
+
DagRun.dag_id == self.trigger_dag_id, DagRun.logical_date == provided_logical_date
|
|
287
|
+
)
|
|
288
|
+
).scalar_one()
|
|
289
|
+
except NoResultFound:
|
|
290
|
+
raise AirflowException(
|
|
291
|
+
f"No DAG run found for DAG {self.trigger_dag_id} and logical date {self.logical_date}"
|
|
292
|
+
)
|
|
293
|
+
|
|
294
|
+
state = dag_run.state
|
|
295
|
+
|
|
296
|
+
if state in self.failed_states:
|
|
297
|
+
raise AirflowException(f"{self.trigger_dag_id} failed with failed state {state}")
|
|
298
|
+
if state in self.allowed_states:
|
|
299
|
+
self.log.info("%s finished with allowed state %s", self.trigger_dag_id, state)
|
|
300
|
+
return
|
|
301
|
+
|
|
302
|
+
raise AirflowException(
|
|
303
|
+
f"{self.trigger_dag_id} return {state} which is not in {self.failed_states}"
|
|
304
|
+
f" or {self.allowed_states}"
|
|
305
|
+
)
|
|
@@ -17,7 +17,8 @@
|
|
|
17
17
|
# under the License.
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
|
-
from
|
|
20
|
+
from collections.abc import Iterable
|
|
21
|
+
from typing import TYPE_CHECKING
|
|
21
22
|
|
|
22
23
|
from airflow.operators.branch import BaseBranchOperator
|
|
23
24
|
from airflow.utils import timezone
|
|
@@ -18,9 +18,10 @@
|
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
20
|
import os
|
|
21
|
+
from collections.abc import Sequence
|
|
21
22
|
from subprocess import PIPE, STDOUT, Popen
|
|
22
23
|
from tempfile import NamedTemporaryFile, TemporaryDirectory, gettempdir
|
|
23
|
-
from typing import TYPE_CHECKING
|
|
24
|
+
from typing import TYPE_CHECKING
|
|
24
25
|
|
|
25
26
|
from airflow.exceptions import AirflowFailException
|
|
26
27
|
from airflow.sensors.base import BaseSensorOperator
|
|
@@ -70,9 +71,10 @@ class BashSensor(BaseSensorOperator):
|
|
|
70
71
|
"""Execute the bash command in a temporary directory."""
|
|
71
72
|
bash_command = self.bash_command
|
|
72
73
|
self.log.info("Tmp dir root location: %s", gettempdir())
|
|
73
|
-
with
|
|
74
|
-
|
|
75
|
-
|
|
74
|
+
with (
|
|
75
|
+
TemporaryDirectory(prefix="airflowtmp") as tmp_dir,
|
|
76
|
+
NamedTemporaryFile(dir=tmp_dir, prefix=self.task_id) as f,
|
|
77
|
+
):
|
|
76
78
|
f.write(bytes(bash_command, "utf_8"))
|
|
77
79
|
f.flush()
|
|
78
80
|
fname = f.name
|
|
@@ -18,8 +18,9 @@
|
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
20
|
import datetime
|
|
21
|
+
from collections.abc import Sequence
|
|
21
22
|
from dataclasses import dataclass
|
|
22
|
-
from typing import TYPE_CHECKING, Any, NoReturn
|
|
23
|
+
from typing import TYPE_CHECKING, Any, NoReturn
|
|
23
24
|
|
|
24
25
|
from airflow.providers.standard.utils.version_references import AIRFLOW_V_3_0_PLUS
|
|
25
26
|
from airflow.sensors.base import BaseSensorOperator
|
|
@@ -54,7 +55,7 @@ class DateTimeSensor(BaseSensorOperator):
|
|
|
54
55
|
It handles some cases for which ``TimeSensor`` and ``TimeDeltaSensor`` are not suited.
|
|
55
56
|
|
|
56
57
|
**Example** 1 :
|
|
57
|
-
If a task needs to wait for 11am on each ``
|
|
58
|
+
If a task needs to wait for 11am on each ``logical_date``. Using
|
|
58
59
|
``TimeSensor`` or ``TimeDeltaSensor``, all backfill tasks started at
|
|
59
60
|
1am have to wait for 10 hours. This is unnecessary, e.g. a backfill
|
|
60
61
|
task with ``{{ ds }} = '1970-01-01'`` does not need to wait because
|
|
@@ -69,7 +70,7 @@ class DateTimeSensor(BaseSensorOperator):
|
|
|
69
70
|
|
|
70
71
|
DateTimeSensor(
|
|
71
72
|
task_id="wait_for_0100",
|
|
72
|
-
target_time="{{
|
|
73
|
+
target_time="{{ data_interval_end.tomorrow().replace(hour=1) }}",
|
|
73
74
|
)
|
|
74
75
|
|
|
75
76
|
:param target_time: datetime after which the job succeeds. (templated)
|
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
3
|
+
# or more contributor license agreements. See the NOTICE file
|
|
4
|
+
# distributed with this work for additional information
|
|
5
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
6
|
+
# to you under the Apache License, Version 2.0 (the
|
|
7
|
+
# "License"); you may not use this file except in compliance
|
|
8
|
+
# with the License. You may obtain a copy of the License at
|
|
9
|
+
#
|
|
10
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
11
|
+
#
|
|
12
|
+
# Unless required by applicable law or agreed to in writing,
|
|
13
|
+
# software distributed under the License is distributed on an
|
|
14
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
15
|
+
# KIND, either express or implied. See the License for the
|
|
16
|
+
# specific language governing permissions and limitations
|
|
17
|
+
# under the License.
|
|
18
|
+
from __future__ import annotations
|
|
19
|
+
|
|
20
|
+
import datetime
|
|
21
|
+
import os
|
|
22
|
+
from collections.abc import Sequence
|
|
23
|
+
from functools import cached_property
|
|
24
|
+
from glob import glob
|
|
25
|
+
from typing import TYPE_CHECKING, Any
|
|
26
|
+
|
|
27
|
+
from airflow.configuration import conf
|
|
28
|
+
from airflow.exceptions import AirflowException
|
|
29
|
+
from airflow.providers.standard.hooks.filesystem import FSHook
|
|
30
|
+
from airflow.sensors.base import BaseSensorOperator
|
|
31
|
+
from airflow.triggers.base import StartTriggerArgs
|
|
32
|
+
from airflow.triggers.file import FileTrigger
|
|
33
|
+
|
|
34
|
+
if TYPE_CHECKING:
|
|
35
|
+
from airflow.utils.context import Context
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class FileSensor(BaseSensorOperator):
|
|
39
|
+
"""
|
|
40
|
+
Waits for a file or folder to land in a filesystem.
|
|
41
|
+
|
|
42
|
+
If the path given is a directory then this sensor will only return true if
|
|
43
|
+
any files exist inside it (either directly, or within a subdirectory)
|
|
44
|
+
|
|
45
|
+
:param fs_conn_id: reference to the File (path)
|
|
46
|
+
connection id
|
|
47
|
+
:param filepath: File or folder name (relative to
|
|
48
|
+
the base path set within the connection), can be a glob.
|
|
49
|
+
:param recursive: when set to ``True``, enables recursive directory matching behavior of
|
|
50
|
+
``**`` in glob filepath parameter. Defaults to ``False``.
|
|
51
|
+
:param deferrable: If waiting for completion, whether to defer the task until done,
|
|
52
|
+
default is ``False``.
|
|
53
|
+
:param start_from_trigger: Start the task directly from the triggerer without going into the worker.
|
|
54
|
+
:param trigger_kwargs: The keyword arguments passed to the trigger when start_from_trigger is set to True
|
|
55
|
+
during dynamic task mapping. This argument is not used in standard usage.
|
|
56
|
+
|
|
57
|
+
.. seealso::
|
|
58
|
+
For more information on how to use this sensor, take a look at the guide:
|
|
59
|
+
:ref:`howto/operator:FileSensor`
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
"""
|
|
63
|
+
|
|
64
|
+
template_fields: Sequence[str] = ("filepath",)
|
|
65
|
+
ui_color = "#91818a"
|
|
66
|
+
start_trigger_args = StartTriggerArgs(
|
|
67
|
+
trigger_cls="airflow.triggers.file.FileTrigger",
|
|
68
|
+
trigger_kwargs={},
|
|
69
|
+
next_method="execute_complete",
|
|
70
|
+
next_kwargs=None,
|
|
71
|
+
timeout=None,
|
|
72
|
+
)
|
|
73
|
+
start_from_trigger = False
|
|
74
|
+
|
|
75
|
+
def __init__(
|
|
76
|
+
self,
|
|
77
|
+
*,
|
|
78
|
+
filepath,
|
|
79
|
+
fs_conn_id="fs_default",
|
|
80
|
+
recursive=False,
|
|
81
|
+
deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
|
|
82
|
+
start_from_trigger: bool = False,
|
|
83
|
+
trigger_kwargs: dict[str, Any] | None = None,
|
|
84
|
+
**kwargs,
|
|
85
|
+
):
|
|
86
|
+
super().__init__(**kwargs)
|
|
87
|
+
self.filepath = filepath
|
|
88
|
+
self.fs_conn_id = fs_conn_id
|
|
89
|
+
self.recursive = recursive
|
|
90
|
+
self.deferrable = deferrable
|
|
91
|
+
|
|
92
|
+
self.start_from_trigger = start_from_trigger
|
|
93
|
+
|
|
94
|
+
if self.deferrable and self.start_from_trigger:
|
|
95
|
+
self.start_trigger_args.timeout = datetime.timedelta(seconds=self.timeout)
|
|
96
|
+
self.start_trigger_args.trigger_kwargs = dict(
|
|
97
|
+
filepath=self.path,
|
|
98
|
+
recursive=self.recursive,
|
|
99
|
+
poke_interval=self.poke_interval,
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
@cached_property
|
|
103
|
+
def path(self) -> str:
|
|
104
|
+
hook = FSHook(self.fs_conn_id)
|
|
105
|
+
basepath = hook.get_path()
|
|
106
|
+
full_path = os.path.join(basepath, self.filepath)
|
|
107
|
+
return full_path
|
|
108
|
+
|
|
109
|
+
def poke(self, context: Context) -> bool:
|
|
110
|
+
self.log.info("Poking for file %s", self.path)
|
|
111
|
+
for path in glob(self.path, recursive=self.recursive):
|
|
112
|
+
if os.path.isfile(path):
|
|
113
|
+
mod_time = datetime.datetime.fromtimestamp(os.path.getmtime(path)).strftime("%Y%m%d%H%M%S")
|
|
114
|
+
self.log.info("Found File %s last modified: %s", path, mod_time)
|
|
115
|
+
return True
|
|
116
|
+
|
|
117
|
+
for _, _, files in os.walk(path):
|
|
118
|
+
if files:
|
|
119
|
+
return True
|
|
120
|
+
return False
|
|
121
|
+
|
|
122
|
+
def execute(self, context: Context) -> None:
|
|
123
|
+
if not self.deferrable:
|
|
124
|
+
super().execute(context=context)
|
|
125
|
+
if not self.poke(context=context):
|
|
126
|
+
self.defer(
|
|
127
|
+
timeout=datetime.timedelta(seconds=self.timeout),
|
|
128
|
+
trigger=FileTrigger(
|
|
129
|
+
filepath=self.path,
|
|
130
|
+
recursive=self.recursive,
|
|
131
|
+
poke_interval=self.poke_interval,
|
|
132
|
+
),
|
|
133
|
+
method_name="execute_complete",
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
def execute_complete(self, context: Context, event: bool | None = None) -> None:
|
|
137
|
+
if not event:
|
|
138
|
+
raise AirflowException("%s task failed as %s not found.", self.task_id, self.filepath)
|
|
139
|
+
self.log.info("%s completed successfully as %s found.", self.task_id, self.filepath)
|
|
@@ -17,7 +17,8 @@
|
|
|
17
17
|
# under the License.
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
|
-
from
|
|
20
|
+
from collections.abc import Mapping, Sequence
|
|
21
|
+
from typing import TYPE_CHECKING, Any, Callable
|
|
21
22
|
|
|
22
23
|
from airflow.sensors.base import BaseSensorOperator, PokeReturnValue
|
|
23
24
|
from airflow.utils.context import context_merge
|
|
@@ -17,7 +17,8 @@
|
|
|
17
17
|
# under the License.
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
|
-
from
|
|
20
|
+
from collections.abc import Iterable
|
|
21
|
+
from typing import TYPE_CHECKING
|
|
21
22
|
|
|
22
23
|
from airflow.sensors.base import BaseSensorOperator
|
|
23
24
|
from airflow.utils import timezone
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: apache-airflow-providers-standard
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.2
|
|
4
4
|
Summary: Provider package apache-airflow-providers-standard for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,standard,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -20,11 +20,11 @@ Classifier: Programming Language :: Python :: 3.10
|
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.11
|
|
21
21
|
Classifier: Programming Language :: Python :: 3.12
|
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
|
23
|
-
Requires-Dist: apache-airflow-providers-common-sql>=1.20.
|
|
24
|
-
Requires-Dist: apache-airflow>=2.8.
|
|
23
|
+
Requires-Dist: apache-airflow-providers-common-sql>=1.20.0
|
|
24
|
+
Requires-Dist: apache-airflow>=2.8.0
|
|
25
25
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
26
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.0.
|
|
27
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.0.
|
|
26
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.0.2/changelog.html
|
|
27
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.0.2
|
|
28
28
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
29
29
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
30
30
|
Project-URL: Twitter, https://twitter.com/ApacheAirflow
|
|
@@ -74,7 +74,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
|
74
74
|
|
|
75
75
|
Package ``apache-airflow-providers-standard``
|
|
76
76
|
|
|
77
|
-
Release: ``0.0.
|
|
77
|
+
Release: ``0.0.2``
|
|
78
78
|
|
|
79
79
|
|
|
80
80
|
Airflow Standard Provider
|
|
@@ -87,7 +87,7 @@ This is a provider package for ``standard`` provider. All classes for this provi
|
|
|
87
87
|
are in ``airflow.providers.standard`` python package.
|
|
88
88
|
|
|
89
89
|
You can find package information and changelog for the provider
|
|
90
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.0.
|
|
90
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.0.2/>`_.
|
|
91
91
|
|
|
92
92
|
Installation
|
|
93
93
|
------------
|
|
@@ -109,4 +109,4 @@ PIP package Version required
|
|
|
109
109
|
======================================= ==================
|
|
110
110
|
|
|
111
111
|
The changelog for the provider package can be found in the
|
|
112
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.0.
|
|
112
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.0.2/changelog.html>`_.
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
airflow/providers/standard/LICENSE,sha256=FFb4jd2AXnOOf7XLP04pQW6jbdhG49TxlGY6fFpCV1Y,13609
|
|
2
|
+
airflow/providers/standard/__init__.py,sha256=Y9NnjAMDPzmmwIz6hIjtW8mk2me2U25AprTqwzTpXNI,1495
|
|
3
|
+
airflow/providers/standard/get_provider_info.py,sha256=ONygoc2VOl9a0kjZAVAXW9x3K34iJwzsU-OqPcpU5uM,4368
|
|
4
|
+
airflow/providers/standard/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
5
|
+
airflow/providers/standard/hooks/filesystem.py,sha256=fDZwW_EYD8z1QXnReqI7gIwSbDPZNTKtqQvgktiP02o,2870
|
|
6
|
+
airflow/providers/standard/hooks/package_index.py,sha256=U7_s_02-wwz9kTkzKr3JAhVQj2spuntWd_GmjfpV-y4,3769
|
|
7
|
+
airflow/providers/standard/hooks/subprocess.py,sha256=GAmdF69jwUcpc7DH5I42GnJRs6NMQvHwFhimWpIdTU4,4920
|
|
8
|
+
airflow/providers/standard/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
9
|
+
airflow/providers/standard/operators/bash.py,sha256=JND5TfySXqQcJUfzJLv8IZ2_PojNzsubjXjaMyTTdA4,13440
|
|
10
|
+
airflow/providers/standard/operators/datetime.py,sha256=3ArdXBAiolyrjLZP9VX4zD4OCWUcdxAOWst7f4IcD0k,4587
|
|
11
|
+
airflow/providers/standard/operators/generic_transfer.py,sha256=V9aJ9PtSETWpKXUsAHbqpJPlb-PJ4EN3sXKDrDQ8tvE,5085
|
|
12
|
+
airflow/providers/standard/operators/python.py,sha256=a3sotdSdTv-Q-7TOxibRQZ5e473Y0Gj9wTdoOVCP7no,52088
|
|
13
|
+
airflow/providers/standard/operators/trigger_dagrun.py,sha256=uXO43o70ewAO4Xko_FhJE3xyC-uTGiM6SGzOp3bZQ-c,13020
|
|
14
|
+
airflow/providers/standard/operators/weekday.py,sha256=f_Qjqsb00YhtDSb329aPmjZi6bR4EZTmhvvluCexGDE,4641
|
|
15
|
+
airflow/providers/standard/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
16
|
+
airflow/providers/standard/sensors/bash.py,sha256=rjbO7Ws30RwDZiVdjx8YRyS6M1ahiZu3dB_bUSrUfTs,4863
|
|
17
|
+
airflow/providers/standard/sensors/date_time.py,sha256=-lA_Zb2UkFe-jN8hT0_1pAuGqj5k5dQQ4NtPeWLhl6s,5887
|
|
18
|
+
airflow/providers/standard/sensors/filesystem.py,sha256=iT8q3k9tcq7wQ_w1N13AvYcGKNies-vCOo-qk5JVnL8,5336
|
|
19
|
+
airflow/providers/standard/sensors/python.py,sha256=iljexBkGJ6spV2Z4POSt-Xm1Fw2DdbAb12vYKTmwzVo,3243
|
|
20
|
+
airflow/providers/standard/sensors/time.py,sha256=kknnhVx-Ff3lRljY7jBEDc9EKEkwXkCf3m7KRm8BU_8,4817
|
|
21
|
+
airflow/providers/standard/sensors/time_delta.py,sha256=b9U15IpJ2yFoJMNKLJ6FcMvuijf_jMqmMPI733rfI_o,4957
|
|
22
|
+
airflow/providers/standard/sensors/weekday.py,sha256=f1uwCSvA4tibz88JKsij1zRieHcbTF2N4OT5GQumbaU,3722
|
|
23
|
+
airflow/providers/standard/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
24
|
+
airflow/providers/standard/utils/python_virtualenv.py,sha256=FR3241l5Obuo2BBwwBs-s87pRpCLyJnh3sUtHxrgRuM,7759
|
|
25
|
+
airflow/providers/standard/utils/python_virtualenv_script.jinja2,sha256=xipJ3A8yuJBQxdOZba3NLzM9KP07TeFg-2z1GBB1Bmc,3268
|
|
26
|
+
airflow/providers/standard/utils/version_references.py,sha256=_AxsWDE9afM-lkY2LzTLHy6qNqrTv5q_tqgRb1zS1Mo,1117
|
|
27
|
+
apache_airflow_providers_standard-0.0.2.dist-info/entry_points.txt,sha256=mW2YRh3mVdZdaP5-iGSNgmcCh3YYdALIn28BCLBZZ40,104
|
|
28
|
+
apache_airflow_providers_standard-0.0.2.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
|
|
29
|
+
apache_airflow_providers_standard-0.0.2.dist-info/METADATA,sha256=dcypwsVb7ehfOsQEbFBFtyQ2x2QSlz3mEmyL7WpKRjg,4845
|
|
30
|
+
apache_airflow_providers_standard-0.0.2.dist-info/RECORD,,
|
|
@@ -1,28 +0,0 @@
|
|
|
1
|
-
airflow/providers/standard/LICENSE,sha256=FFb4jd2AXnOOf7XLP04pQW6jbdhG49TxlGY6fFpCV1Y,13609
|
|
2
|
-
airflow/providers/standard/__init__.py,sha256=OEhrQnhJMzOX1WvAZQPlXAEAfVco-55lw8voEe-x47w,1495
|
|
3
|
-
airflow/providers/standard/get_provider_info.py,sha256=8CFrRtQJetNh97vyGFqLlcIFRiDsbXmUAymiv7R-E8w,4215
|
|
4
|
-
airflow/providers/standard/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
5
|
-
airflow/providers/standard/hooks/filesystem.py,sha256=fDZwW_EYD8z1QXnReqI7gIwSbDPZNTKtqQvgktiP02o,2870
|
|
6
|
-
airflow/providers/standard/hooks/package_index.py,sha256=U7_s_02-wwz9kTkzKr3JAhVQj2spuntWd_GmjfpV-y4,3769
|
|
7
|
-
airflow/providers/standard/hooks/subprocess.py,sha256=43tVi3bhmdJ_zowxYYuooJzJdp4Vw2jrz6R6L6g4Q0s,4911
|
|
8
|
-
airflow/providers/standard/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
9
|
-
airflow/providers/standard/operators/bash.py,sha256=fK0S14O_cDpDUgMT4-X9uLl_voJKeWWmLYBQxyxq4KA,13415
|
|
10
|
-
airflow/providers/standard/operators/datetime.py,sha256=nk0gwO_H8vIIu8ztA4zryWqZeoSokfINTu4o2vPUcPc,4562
|
|
11
|
-
airflow/providers/standard/operators/generic_transfer.py,sha256=zdY9-8UDDG3YUwvEjfH4Nxe7KaAoXUtZtoWnnadD04s,5058
|
|
12
|
-
airflow/providers/standard/operators/python.py,sha256=-1aLnyCJp_GoH7JVJxg62IRGt69aqPcAgmf-p7D-io8,51929
|
|
13
|
-
airflow/providers/standard/operators/weekday.py,sha256=u1tkO_04IGeshL17NhC6G7lIYLhgDnZjNrIKyAfd4b4,4614
|
|
14
|
-
airflow/providers/standard/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
15
|
-
airflow/providers/standard/sensors/bash.py,sha256=Gyi8zMwSESPyanlG9jbN9u-an6Vj33s7lTlmjWmjgSA,4821
|
|
16
|
-
airflow/providers/standard/sensors/date_time.py,sha256=t6EDZBFSsUuhNPxD9R7N9ZP6HOxw2aBTirtpUiilQl8,5864
|
|
17
|
-
airflow/providers/standard/sensors/python.py,sha256=0JoExKvQOJKKQlTEOXztg17UC5bpZDyIjQUeATV13LE,3216
|
|
18
|
-
airflow/providers/standard/sensors/time.py,sha256=kknnhVx-Ff3lRljY7jBEDc9EKEkwXkCf3m7KRm8BU_8,4817
|
|
19
|
-
airflow/providers/standard/sensors/time_delta.py,sha256=b9U15IpJ2yFoJMNKLJ6FcMvuijf_jMqmMPI733rfI_o,4957
|
|
20
|
-
airflow/providers/standard/sensors/weekday.py,sha256=PMg0eoGuD0xNLSJIBY9C1Y0aqSZn6PkQ_j_eYo7lnks,3695
|
|
21
|
-
airflow/providers/standard/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
22
|
-
airflow/providers/standard/utils/python_virtualenv.py,sha256=FR3241l5Obuo2BBwwBs-s87pRpCLyJnh3sUtHxrgRuM,7759
|
|
23
|
-
airflow/providers/standard/utils/python_virtualenv_script.jinja2,sha256=xipJ3A8yuJBQxdOZba3NLzM9KP07TeFg-2z1GBB1Bmc,3268
|
|
24
|
-
airflow/providers/standard/utils/version_references.py,sha256=_AxsWDE9afM-lkY2LzTLHy6qNqrTv5q_tqgRb1zS1Mo,1117
|
|
25
|
-
apache_airflow_providers_standard-0.0.1rc1.dist-info/entry_points.txt,sha256=mW2YRh3mVdZdaP5-iGSNgmcCh3YYdALIn28BCLBZZ40,104
|
|
26
|
-
apache_airflow_providers_standard-0.0.1rc1.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
|
|
27
|
-
apache_airflow_providers_standard-0.0.1rc1.dist-info/METADATA,sha256=b_PTbScMBE49UpqTjnVXWOd49HxX1WUca1tRJWLIDaU,4858
|
|
28
|
-
apache_airflow_providers_standard-0.0.1rc1.dist-info/RECORD,,
|
|
File without changes
|