apache-airflow-providers-standard 0.1.0rc1__py3-none-any.whl → 1.0.0.dev1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of apache-airflow-providers-standard might be problematic. Click here for more details.
- airflow/providers/standard/LICENSE +52 -0
- airflow/providers/standard/__init__.py +1 -23
- airflow/providers/standard/get_provider_info.py +7 -52
- airflow/providers/standard/operators/bash.py +28 -82
- airflow/providers/standard/operators/datetime.py +3 -8
- airflow/providers/standard/operators/weekday.py +4 -11
- airflow/providers/standard/sensors/bash.py +5 -11
- airflow/providers/standard/sensors/date_time.py +8 -32
- airflow/providers/standard/sensors/time.py +5 -28
- airflow/providers/standard/sensors/time_delta.py +10 -48
- airflow/providers/standard/sensors/weekday.py +2 -7
- {apache_airflow_providers_standard-0.1.0rc1.dist-info → apache_airflow_providers_standard-1.0.0.dev1.dist-info}/METADATA +36 -20
- apache_airflow_providers_standard-1.0.0.dev1.dist-info/RECORD +17 -0
- {apache_airflow_providers_standard-0.1.0rc1.dist-info → apache_airflow_providers_standard-1.0.0.dev1.dist-info}/WHEEL +1 -1
- airflow/providers/standard/hooks/__init__.py +0 -16
- airflow/providers/standard/hooks/filesystem.py +0 -89
- airflow/providers/standard/hooks/package_index.py +0 -95
- airflow/providers/standard/hooks/subprocess.py +0 -119
- airflow/providers/standard/operators/empty.py +0 -39
- airflow/providers/standard/operators/generic_transfer.py +0 -138
- airflow/providers/standard/operators/latest_only.py +0 -83
- airflow/providers/standard/operators/python.py +0 -1132
- airflow/providers/standard/operators/trigger_dagrun.py +0 -292
- airflow/providers/standard/sensors/external_task.py +0 -509
- airflow/providers/standard/sensors/filesystem.py +0 -158
- airflow/providers/standard/sensors/python.py +0 -85
- airflow/providers/standard/triggers/__init__.py +0 -16
- airflow/providers/standard/triggers/external_task.py +0 -211
- airflow/providers/standard/triggers/file.py +0 -131
- airflow/providers/standard/triggers/temporal.py +0 -114
- airflow/providers/standard/utils/__init__.py +0 -16
- airflow/providers/standard/utils/python_virtualenv.py +0 -209
- airflow/providers/standard/utils/python_virtualenv_script.jinja2 +0 -77
- airflow/providers/standard/utils/sensor_helper.py +0 -119
- airflow/providers/standard/version_compat.py +0 -36
- apache_airflow_providers_standard-0.1.0rc1.dist-info/RECORD +0 -38
- {apache_airflow_providers_standard-0.1.0rc1.dist-info → apache_airflow_providers_standard-1.0.0.dev1.dist-info}/entry_points.txt +0 -0
|
@@ -17,31 +17,18 @@
|
|
|
17
17
|
# under the License.
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
|
-
from datetime import
|
|
20
|
+
from datetime import timedelta
|
|
21
21
|
from time import sleep
|
|
22
22
|
from typing import TYPE_CHECKING, Any, NoReturn
|
|
23
23
|
|
|
24
|
-
from packaging.version import Version
|
|
25
|
-
|
|
26
24
|
from airflow.configuration import conf
|
|
27
25
|
from airflow.exceptions import AirflowSkipException
|
|
28
|
-
from airflow.providers.standard.triggers.temporal import DateTimeTrigger, TimeDeltaTrigger
|
|
29
|
-
from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
|
|
30
26
|
from airflow.sensors.base import BaseSensorOperator
|
|
27
|
+
from airflow.triggers.temporal import DateTimeTrigger, TimeDeltaTrigger
|
|
31
28
|
from airflow.utils import timezone
|
|
32
29
|
|
|
33
30
|
if TYPE_CHECKING:
|
|
34
|
-
|
|
35
|
-
from airflow.sdk.definitions.context import Context
|
|
36
|
-
except ImportError:
|
|
37
|
-
# TODO: Remove once provider drops support for Airflow 2
|
|
38
|
-
from airflow.utils.context import Context
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
def _get_airflow_version():
|
|
42
|
-
from airflow import __version__ as airflow_version
|
|
43
|
-
|
|
44
|
-
return Version(Version(airflow_version).base_version)
|
|
31
|
+
from airflow.utils.context import Context
|
|
45
32
|
|
|
46
33
|
|
|
47
34
|
class TimeDeltaSensor(BaseSensorOperator):
|
|
@@ -62,12 +49,8 @@ class TimeDeltaSensor(BaseSensorOperator):
|
|
|
62
49
|
self.delta = delta
|
|
63
50
|
|
|
64
51
|
def poke(self, context: Context):
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
if not isinstance(data_interval_end, datetime):
|
|
68
|
-
raise ValueError("`data_interval_end` returned non-datetime object")
|
|
69
|
-
|
|
70
|
-
target_dttm: datetime = data_interval_end + self.delta
|
|
52
|
+
target_dttm = context["data_interval_end"]
|
|
53
|
+
target_dttm += self.delta
|
|
71
54
|
self.log.info("Checking if the time (%s) has come", target_dttm)
|
|
72
55
|
return timezone.utcnow() > target_dttm
|
|
73
56
|
|
|
@@ -92,38 +75,19 @@ class TimeDeltaSensorAsync(TimeDeltaSensor):
|
|
|
92
75
|
self.end_from_trigger = end_from_trigger
|
|
93
76
|
|
|
94
77
|
def execute(self, context: Context) -> bool | NoReturn:
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
if not isinstance(data_interval_end, datetime):
|
|
98
|
-
raise ValueError("`data_interval_end` returned non-datetime object")
|
|
99
|
-
|
|
100
|
-
target_dttm: datetime = data_interval_end + self.delta
|
|
101
|
-
|
|
78
|
+
target_dttm = context["data_interval_end"]
|
|
79
|
+
target_dttm += self.delta
|
|
102
80
|
if timezone.utcnow() > target_dttm:
|
|
103
81
|
# If the target datetime is in the past, return immediately
|
|
104
82
|
return True
|
|
105
83
|
try:
|
|
106
|
-
|
|
107
|
-
trigger = DateTimeTrigger(moment=target_dttm, end_from_trigger=self.end_from_trigger)
|
|
108
|
-
else:
|
|
109
|
-
trigger = DateTimeTrigger(moment=target_dttm)
|
|
84
|
+
trigger = DateTimeTrigger(moment=target_dttm, end_from_trigger=self.end_from_trigger)
|
|
110
85
|
except (TypeError, ValueError) as e:
|
|
111
86
|
if self.soft_fail:
|
|
112
87
|
raise AirflowSkipException("Skipping due to soft_fail is set to True.") from e
|
|
113
88
|
raise
|
|
114
89
|
|
|
115
|
-
|
|
116
|
-
timeout: int | float | timedelta
|
|
117
|
-
if _get_airflow_version() >= Version("2.11.0"):
|
|
118
|
-
timeout = self.timeout
|
|
119
|
-
else:
|
|
120
|
-
timeout = timedelta(seconds=self.timeout)
|
|
121
|
-
|
|
122
|
-
self.defer(
|
|
123
|
-
trigger=trigger,
|
|
124
|
-
method_name="execute_complete",
|
|
125
|
-
timeout=timeout,
|
|
126
|
-
)
|
|
90
|
+
self.defer(trigger=trigger, method_name="execute_complete")
|
|
127
91
|
|
|
128
92
|
def execute_complete(self, context: Context, event: Any = None) -> None:
|
|
129
93
|
"""Handle the event when the trigger fires and return immediately."""
|
|
@@ -157,9 +121,7 @@ class WaitSensor(BaseSensorOperator):
|
|
|
157
121
|
def execute(self, context: Context) -> None:
|
|
158
122
|
if self.deferrable:
|
|
159
123
|
self.defer(
|
|
160
|
-
trigger=TimeDeltaTrigger(self.time_to_wait, end_from_trigger=True)
|
|
161
|
-
if AIRFLOW_V_3_0_PLUS
|
|
162
|
-
else TimeDeltaTrigger(self.time_to_wait),
|
|
124
|
+
trigger=TimeDeltaTrigger(self.time_to_wait, end_from_trigger=True),
|
|
163
125
|
method_name="execute_complete",
|
|
164
126
|
)
|
|
165
127
|
else:
|
|
@@ -17,19 +17,14 @@
|
|
|
17
17
|
# under the License.
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
|
-
from
|
|
21
|
-
from typing import TYPE_CHECKING
|
|
20
|
+
from typing import TYPE_CHECKING, Iterable
|
|
22
21
|
|
|
23
22
|
from airflow.sensors.base import BaseSensorOperator
|
|
24
23
|
from airflow.utils import timezone
|
|
25
24
|
from airflow.utils.weekday import WeekDay
|
|
26
25
|
|
|
27
26
|
if TYPE_CHECKING:
|
|
28
|
-
|
|
29
|
-
from airflow.sdk.definitions.context import Context
|
|
30
|
-
except ImportError:
|
|
31
|
-
# TODO: Remove once provider drops support for Airflow 2
|
|
32
|
-
from airflow.utils.context import Context
|
|
27
|
+
from airflow.utils.context import Context
|
|
33
28
|
|
|
34
29
|
|
|
35
30
|
class DayOfWeekSensor(BaseSensorOperator):
|
|
@@ -1,11 +1,11 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
2
|
Name: apache-airflow-providers-standard
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 1.0.0.dev1
|
|
4
4
|
Summary: Provider package apache-airflow-providers-standard for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,standard,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
7
7
|
Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
8
|
-
Requires-Python: ~=3.
|
|
8
|
+
Requires-Python: ~=3.8
|
|
9
9
|
Description-Content-Type: text/x-rst
|
|
10
10
|
Classifier: Development Status :: 5 - Production/Stable
|
|
11
11
|
Classifier: Environment :: Console
|
|
@@ -15,22 +15,39 @@ Classifier: Intended Audience :: System Administrators
|
|
|
15
15
|
Classifier: Framework :: Apache Airflow
|
|
16
16
|
Classifier: Framework :: Apache Airflow :: Provider
|
|
17
17
|
Classifier: License :: OSI Approved :: Apache Software License
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
18
19
|
Classifier: Programming Language :: Python :: 3.9
|
|
19
20
|
Classifier: Programming Language :: Python :: 3.10
|
|
20
21
|
Classifier: Programming Language :: Python :: 3.11
|
|
21
22
|
Classifier: Programming Language :: Python :: 3.12
|
|
22
23
|
Classifier: Topic :: System :: Monitoring
|
|
23
|
-
Requires-Dist: apache-airflow>=2.
|
|
24
|
-
Requires-Dist: apache-airflow-providers-common-sql>=1.20.0rc0
|
|
24
|
+
Requires-Dist: apache-airflow>=2.10.0.dev0
|
|
25
25
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
26
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/
|
|
27
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/
|
|
26
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/1.0.0/changelog.html
|
|
27
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/1.0.0
|
|
28
28
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
29
29
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
30
|
-
Project-URL: Twitter, https://
|
|
30
|
+
Project-URL: Twitter, https://twitter.com/ApacheAirflow
|
|
31
31
|
Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
32
32
|
|
|
33
33
|
|
|
34
|
+
.. Licensed to the Apache Software Foundation (ASF) under one
|
|
35
|
+
or more contributor license agreements. See the NOTICE file
|
|
36
|
+
distributed with this work for additional information
|
|
37
|
+
regarding copyright ownership. The ASF licenses this file
|
|
38
|
+
to you under the Apache License, Version 2.0 (the
|
|
39
|
+
"License"); you may not use this file except in compliance
|
|
40
|
+
with the License. You may obtain a copy of the License at
|
|
41
|
+
|
|
42
|
+
.. http://www.apache.org/licenses/LICENSE-2.0
|
|
43
|
+
|
|
44
|
+
.. Unless required by applicable law or agreed to in writing,
|
|
45
|
+
software distributed under the License is distributed on an
|
|
46
|
+
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
47
|
+
KIND, either express or implied. See the License for the
|
|
48
|
+
specific language governing permissions and limitations
|
|
49
|
+
under the License.
|
|
50
|
+
|
|
34
51
|
.. Licensed to the Apache Software Foundation (ASF) under one
|
|
35
52
|
or more contributor license agreements. See the NOTICE file
|
|
36
53
|
distributed with this work for additional information
|
|
@@ -48,7 +65,8 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
|
48
65
|
specific language governing permissions and limitations
|
|
49
66
|
under the License.
|
|
50
67
|
|
|
51
|
-
.. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
|
|
68
|
+
.. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
|
|
69
|
+
OVERWRITTEN WHEN PREPARING PACKAGES.
|
|
52
70
|
|
|
53
71
|
.. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
|
|
54
72
|
`PROVIDER_README_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
|
|
@@ -56,7 +74,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
|
56
74
|
|
|
57
75
|
Package ``apache-airflow-providers-standard``
|
|
58
76
|
|
|
59
|
-
Release: ``0.
|
|
77
|
+
Release: ``1.0.0.dev1``
|
|
60
78
|
|
|
61
79
|
|
|
62
80
|
Airflow Standard Provider
|
|
@@ -69,7 +87,7 @@ This is a provider package for ``standard`` provider. All classes for this provi
|
|
|
69
87
|
are in ``airflow.providers.standard`` python package.
|
|
70
88
|
|
|
71
89
|
You can find package information and changelog for the provider
|
|
72
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/
|
|
90
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.0.0/>`_.
|
|
73
91
|
|
|
74
92
|
Installation
|
|
75
93
|
------------
|
|
@@ -78,18 +96,16 @@ You can install this package on top of an existing Airflow 2 installation (see `
|
|
|
78
96
|
for the minimum Airflow version supported) via
|
|
79
97
|
``pip install apache-airflow-providers-standard``
|
|
80
98
|
|
|
81
|
-
The package supports the following python versions: 3.9,3.10,3.11,3.12
|
|
99
|
+
The package supports the following python versions: 3.8,3.9,3.10,3.11,3.12
|
|
82
100
|
|
|
83
101
|
Requirements
|
|
84
102
|
------------
|
|
85
103
|
|
|
86
|
-
|
|
87
|
-
PIP package
|
|
88
|
-
|
|
89
|
-
``apache-airflow``
|
|
90
|
-
|
|
91
|
-
======================================= ==================
|
|
104
|
+
================== ==================
|
|
105
|
+
PIP package Version required
|
|
106
|
+
================== ==================
|
|
107
|
+
``apache-airflow`` ``>=2.10.0``
|
|
108
|
+
================== ==================
|
|
92
109
|
|
|
93
110
|
The changelog for the provider package can be found in the
|
|
94
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/
|
|
95
|
-
|
|
111
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.0.0/changelog.html>`_.
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
airflow/providers/standard/LICENSE,sha256=FFb4jd2AXnOOf7XLP04pQW6jbdhG49TxlGY6fFpCV1Y,13609
|
|
2
|
+
airflow/providers/standard/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
3
|
+
airflow/providers/standard/get_provider_info.py,sha256=nFqgVL44xY7Xwa-Y2mn8SLMD-NH7lW_aNqgT17f0gLo,2559
|
|
4
|
+
airflow/providers/standard/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
5
|
+
airflow/providers/standard/operators/bash.py,sha256=mxHM1Uc57Twh-OS1r3nCX7QKpatsBVb1PJVI3OEGJCk,10801
|
|
6
|
+
airflow/providers/standard/operators/datetime.py,sha256=nk0gwO_H8vIIu8ztA4zryWqZeoSokfINTu4o2vPUcPc,4562
|
|
7
|
+
airflow/providers/standard/operators/weekday.py,sha256=ws1FHHxFVEZhq8MDfBvNmI9pjH5QcHqueRkuanhayeQ,4474
|
|
8
|
+
airflow/providers/standard/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
9
|
+
airflow/providers/standard/sensors/bash.py,sha256=Gyi8zMwSESPyanlG9jbN9u-an6Vj33s7lTlmjWmjgSA,4821
|
|
10
|
+
airflow/providers/standard/sensors/date_time.py,sha256=WKoMTLuSYwNbsvvkndRmiTGa6GN3jeolAGazbPDMeUk,5179
|
|
11
|
+
airflow/providers/standard/sensors/time.py,sha256=nhKqn7eQKnx-F-MSG_yrlN7RM8ZM3iOSUwgqxc_F2SQ,4142
|
|
12
|
+
airflow/providers/standard/sensors/time_delta.py,sha256=YMNELt0m89PvWsYmWWO-m6RW-ICPOZ8rcABrmt-4xxc,4660
|
|
13
|
+
airflow/providers/standard/sensors/weekday.py,sha256=PMg0eoGuD0xNLSJIBY9C1Y0aqSZn6PkQ_j_eYo7lnks,3695
|
|
14
|
+
apache_airflow_providers_standard-1.0.0.dev1.dist-info/entry_points.txt,sha256=mW2YRh3mVdZdaP5-iGSNgmcCh3YYdALIn28BCLBZZ40,104
|
|
15
|
+
apache_airflow_providers_standard-1.0.0.dev1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
|
|
16
|
+
apache_airflow_providers_standard-1.0.0.dev1.dist-info/METADATA,sha256=Q_aHdns_3O3-ukEP8lq8HJvSd9QZ0ENFnK-MSDOO_1g,4698
|
|
17
|
+
apache_airflow_providers_standard-1.0.0.dev1.dist-info/RECORD,,
|
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
-
# or more contributor license agreements. See the NOTICE file
|
|
3
|
-
# distributed with this work for additional information
|
|
4
|
-
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
-
# to you under the Apache License, Version 2.0 (the
|
|
6
|
-
# "License"); you may not use this file except in compliance
|
|
7
|
-
# with the License. You may obtain a copy of the License at
|
|
8
|
-
#
|
|
9
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
-
#
|
|
11
|
-
# Unless required by applicable law or agreed to in writing,
|
|
12
|
-
# software distributed under the License is distributed on an
|
|
13
|
-
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
-
# KIND, either express or implied. See the License for the
|
|
15
|
-
# specific language governing permissions and limitations
|
|
16
|
-
# under the License.
|
|
@@ -1,89 +0,0 @@
|
|
|
1
|
-
#
|
|
2
|
-
# Licensed to the Apache Software Foundation (ASF) under one
|
|
3
|
-
# or more contributor license agreements. See the NOTICE file
|
|
4
|
-
# distributed with this work for additional information
|
|
5
|
-
# regarding copyright ownership. The ASF licenses this file
|
|
6
|
-
# to you under the Apache License, Version 2.0 (the
|
|
7
|
-
# "License"); you may not use this file except in compliance
|
|
8
|
-
# with the License. You may obtain a copy of the License at
|
|
9
|
-
#
|
|
10
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
11
|
-
#
|
|
12
|
-
# Unless required by applicable law or agreed to in writing,
|
|
13
|
-
# software distributed under the License is distributed on an
|
|
14
|
-
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
15
|
-
# KIND, either express or implied. See the License for the
|
|
16
|
-
# specific language governing permissions and limitations
|
|
17
|
-
# under the License.
|
|
18
|
-
from __future__ import annotations
|
|
19
|
-
|
|
20
|
-
from pathlib import Path
|
|
21
|
-
from typing import Any
|
|
22
|
-
|
|
23
|
-
from airflow.hooks.base import BaseHook
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
class FSHook(BaseHook):
|
|
27
|
-
"""
|
|
28
|
-
Allows for interaction with an file server.
|
|
29
|
-
|
|
30
|
-
Connection should have a name and a path specified under extra:
|
|
31
|
-
|
|
32
|
-
example:
|
|
33
|
-
Connection Id: fs_test
|
|
34
|
-
Connection Type: File (path)
|
|
35
|
-
Host, Schema, Login, Password, Port: empty
|
|
36
|
-
Extra: {"path": "/tmp"}
|
|
37
|
-
"""
|
|
38
|
-
|
|
39
|
-
conn_name_attr = "fs_conn_id"
|
|
40
|
-
default_conn_name = "fs_default"
|
|
41
|
-
conn_type = "fs"
|
|
42
|
-
hook_name = "File (path)"
|
|
43
|
-
|
|
44
|
-
@classmethod
|
|
45
|
-
def get_connection_form_widgets(cls) -> dict[str, Any]:
|
|
46
|
-
"""Return connection widgets to add to connection form."""
|
|
47
|
-
from flask_appbuilder.fieldwidgets import BS3TextFieldWidget
|
|
48
|
-
from flask_babel import lazy_gettext
|
|
49
|
-
from wtforms import StringField
|
|
50
|
-
|
|
51
|
-
return {"path": StringField(lazy_gettext("Path"), widget=BS3TextFieldWidget())}
|
|
52
|
-
|
|
53
|
-
@classmethod
|
|
54
|
-
def get_ui_field_behaviour(cls) -> dict[str, Any]:
|
|
55
|
-
"""Return custom field behaviour."""
|
|
56
|
-
return {
|
|
57
|
-
"hidden_fields": ["host", "schema", "port", "login", "password", "extra"],
|
|
58
|
-
"relabeling": {},
|
|
59
|
-
"placeholders": {},
|
|
60
|
-
}
|
|
61
|
-
|
|
62
|
-
def __init__(self, fs_conn_id: str = default_conn_name, **kwargs):
|
|
63
|
-
super().__init__(**kwargs)
|
|
64
|
-
conn = self.get_connection(fs_conn_id)
|
|
65
|
-
self.basepath = conn.extra_dejson.get("path", "")
|
|
66
|
-
self.conn = conn
|
|
67
|
-
|
|
68
|
-
def get_conn(self) -> None:
|
|
69
|
-
pass
|
|
70
|
-
|
|
71
|
-
def get_path(self) -> str:
|
|
72
|
-
"""
|
|
73
|
-
Get the path to the filesystem location.
|
|
74
|
-
|
|
75
|
-
:return: the path.
|
|
76
|
-
"""
|
|
77
|
-
return self.basepath
|
|
78
|
-
|
|
79
|
-
def test_connection(self):
|
|
80
|
-
"""Test File connection."""
|
|
81
|
-
try:
|
|
82
|
-
p = self.get_path()
|
|
83
|
-
if not p:
|
|
84
|
-
return False, "File Path is undefined."
|
|
85
|
-
if not Path(p).exists():
|
|
86
|
-
return False, f"Path {p} does not exist."
|
|
87
|
-
return True, f"Path {p} is existing."
|
|
88
|
-
except Exception as e:
|
|
89
|
-
return False, str(e)
|
|
@@ -1,95 +0,0 @@
|
|
|
1
|
-
#
|
|
2
|
-
# Licensed to the Apache Software Foundation (ASF) under one
|
|
3
|
-
# or more contributor license agreements. See the NOTICE file
|
|
4
|
-
# distributed with this work for additional information
|
|
5
|
-
# regarding copyright ownership. The ASF licenses this file
|
|
6
|
-
# to you under the Apache License, Version 2.0 (the
|
|
7
|
-
# "License"); you may not use this file except in compliance
|
|
8
|
-
# with the License. You may obtain a copy of the License at
|
|
9
|
-
#
|
|
10
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
11
|
-
#
|
|
12
|
-
# Unless required by applicable law or agreed to in writing,
|
|
13
|
-
# software distributed under the License is distributed on an
|
|
14
|
-
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
15
|
-
# KIND, either express or implied. See the License for the
|
|
16
|
-
# specific language governing permissions and limitations
|
|
17
|
-
# under the License.
|
|
18
|
-
"""Hook for additional Package Indexes (Python)."""
|
|
19
|
-
|
|
20
|
-
from __future__ import annotations
|
|
21
|
-
|
|
22
|
-
import subprocess
|
|
23
|
-
from typing import Any
|
|
24
|
-
from urllib.parse import quote, urlparse
|
|
25
|
-
|
|
26
|
-
from airflow.hooks.base import BaseHook
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
class PackageIndexHook(BaseHook):
|
|
30
|
-
"""Specify package indexes/Python package sources using Airflow connections."""
|
|
31
|
-
|
|
32
|
-
conn_name_attr = "pi_conn_id"
|
|
33
|
-
default_conn_name = "package_index_default"
|
|
34
|
-
conn_type = "package_index"
|
|
35
|
-
hook_name = "Package Index (Python)"
|
|
36
|
-
|
|
37
|
-
def __init__(self, pi_conn_id: str = default_conn_name, **kwargs) -> None:
|
|
38
|
-
super().__init__(**kwargs)
|
|
39
|
-
self.pi_conn_id = pi_conn_id
|
|
40
|
-
self.conn = None
|
|
41
|
-
|
|
42
|
-
@staticmethod
|
|
43
|
-
def get_ui_field_behaviour() -> dict[str, Any]:
|
|
44
|
-
"""Return custom field behaviour."""
|
|
45
|
-
return {
|
|
46
|
-
"hidden_fields": ["schema", "port", "extra"],
|
|
47
|
-
"relabeling": {"host": "Package Index URL"},
|
|
48
|
-
"placeholders": {
|
|
49
|
-
"host": "Example: https://my-package-mirror.net/pypi/repo-name/simple",
|
|
50
|
-
"login": "Username for package index",
|
|
51
|
-
"password": "Password for package index (will be masked)",
|
|
52
|
-
},
|
|
53
|
-
}
|
|
54
|
-
|
|
55
|
-
@staticmethod
|
|
56
|
-
def _get_basic_auth_conn_url(index_url: str, user: str | None, password: str | None) -> str:
|
|
57
|
-
"""Return a connection URL with basic auth credentials based on connection config."""
|
|
58
|
-
url = urlparse(index_url)
|
|
59
|
-
host = url.netloc.split("@")[-1]
|
|
60
|
-
if user:
|
|
61
|
-
if password:
|
|
62
|
-
host = f"{quote(user)}:{quote(password)}@{host}"
|
|
63
|
-
else:
|
|
64
|
-
host = f"{quote(user)}@{host}"
|
|
65
|
-
return url._replace(netloc=host).geturl()
|
|
66
|
-
|
|
67
|
-
def get_conn(self) -> Any:
|
|
68
|
-
"""Return connection for the hook."""
|
|
69
|
-
return self.get_connection_url()
|
|
70
|
-
|
|
71
|
-
def get_connection_url(self) -> Any:
|
|
72
|
-
"""Return a connection URL with embedded credentials."""
|
|
73
|
-
conn = self.get_connection(self.pi_conn_id)
|
|
74
|
-
index_url = conn.host
|
|
75
|
-
if not index_url:
|
|
76
|
-
raise ValueError("Please provide an index URL.")
|
|
77
|
-
return self._get_basic_auth_conn_url(index_url, conn.login, conn.password)
|
|
78
|
-
|
|
79
|
-
def test_connection(self) -> tuple[bool, str]:
|
|
80
|
-
"""Test connection to package index url."""
|
|
81
|
-
conn_url = self.get_connection_url()
|
|
82
|
-
proc = subprocess.run(
|
|
83
|
-
["pip", "search", "not-existing-test-package", "--no-input", "--index", conn_url],
|
|
84
|
-
check=False,
|
|
85
|
-
capture_output=True,
|
|
86
|
-
)
|
|
87
|
-
conn = self.get_connection(self.pi_conn_id)
|
|
88
|
-
if proc.returncode not in [
|
|
89
|
-
0, # executed successfully, found package
|
|
90
|
-
23, # executed successfully, didn't find any packages
|
|
91
|
-
# (but we do not expect it to find 'not-existing-test-package')
|
|
92
|
-
]:
|
|
93
|
-
return False, f"Connection test to {conn.host} failed. Error: {str(proc.stderr)}"
|
|
94
|
-
|
|
95
|
-
return True, f"Connection to {conn.host} tested successfully!"
|
|
@@ -1,119 +0,0 @@
|
|
|
1
|
-
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
-
# or more contributor license agreements. See the NOTICE file
|
|
3
|
-
# distributed with this work for additional information
|
|
4
|
-
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
-
# to you under the Apache License, Version 2.0 (the
|
|
6
|
-
# "License"); you may not use this file except in compliance
|
|
7
|
-
# with the License. You may obtain a copy of the License at
|
|
8
|
-
#
|
|
9
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
-
#
|
|
11
|
-
# Unless required by applicable law or agreed to in writing,
|
|
12
|
-
# software distributed under the License is distributed on an
|
|
13
|
-
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
-
# KIND, either express or implied. See the License for the
|
|
15
|
-
# specific language governing permissions and limitations
|
|
16
|
-
# under the License.
|
|
17
|
-
from __future__ import annotations
|
|
18
|
-
|
|
19
|
-
import contextlib
|
|
20
|
-
import os
|
|
21
|
-
import signal
|
|
22
|
-
from collections import namedtuple
|
|
23
|
-
from collections.abc import Iterator
|
|
24
|
-
from subprocess import PIPE, STDOUT, Popen
|
|
25
|
-
from tempfile import TemporaryDirectory, gettempdir
|
|
26
|
-
|
|
27
|
-
from airflow.hooks.base import BaseHook
|
|
28
|
-
|
|
29
|
-
SubprocessResult = namedtuple("SubprocessResult", ["exit_code", "output"])
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
@contextlib.contextmanager
|
|
33
|
-
def working_directory(cwd: str | None = None) -> Iterator[str]:
|
|
34
|
-
"""
|
|
35
|
-
Context manager for handling (temporary) working directory.
|
|
36
|
-
|
|
37
|
-
Use the given cwd as working directory, if provided.
|
|
38
|
-
Otherwise, create a temporary directory.
|
|
39
|
-
"""
|
|
40
|
-
with contextlib.ExitStack() as stack:
|
|
41
|
-
if cwd is None:
|
|
42
|
-
cwd = stack.enter_context(TemporaryDirectory(prefix="airflowtmp"))
|
|
43
|
-
yield cwd
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
class SubprocessHook(BaseHook):
|
|
47
|
-
"""Hook for running processes with the ``subprocess`` module."""
|
|
48
|
-
|
|
49
|
-
def __init__(self, **kwargs) -> None:
|
|
50
|
-
self.sub_process: Popen[bytes] | None = None
|
|
51
|
-
super().__init__(**kwargs)
|
|
52
|
-
|
|
53
|
-
def run_command(
|
|
54
|
-
self,
|
|
55
|
-
command: list[str],
|
|
56
|
-
env: dict[str, str] | None = None,
|
|
57
|
-
output_encoding: str = "utf-8",
|
|
58
|
-
cwd: str | None = None,
|
|
59
|
-
) -> SubprocessResult:
|
|
60
|
-
"""
|
|
61
|
-
Execute the command.
|
|
62
|
-
|
|
63
|
-
If ``cwd`` is None, execute the command in a temporary directory which will be cleaned afterwards.
|
|
64
|
-
If ``env`` is not supplied, ``os.environ`` is passed
|
|
65
|
-
|
|
66
|
-
:param command: the command to run
|
|
67
|
-
:param env: Optional dict containing environment variables to be made available to the shell
|
|
68
|
-
environment in which ``command`` will be executed. If omitted, ``os.environ`` will be used.
|
|
69
|
-
Note, that in case you have Sentry configured, original variables from the environment
|
|
70
|
-
will also be passed to the subprocess with ``SUBPROCESS_`` prefix. See:
|
|
71
|
-
https://airflow.apache.org/docs/apache-airflow/stable/administration-and-deployment/logging-monitoring/errors.html for details.
|
|
72
|
-
:param output_encoding: encoding to use for decoding stdout
|
|
73
|
-
:param cwd: Working directory to run the command in.
|
|
74
|
-
If None (default), the command is run in a temporary directory.
|
|
75
|
-
:return: :class:`namedtuple` containing ``exit_code`` and ``output``, the last line from stderr
|
|
76
|
-
or stdout
|
|
77
|
-
"""
|
|
78
|
-
self.log.info("Tmp dir root location: %s", gettempdir())
|
|
79
|
-
with working_directory(cwd=cwd) as cwd:
|
|
80
|
-
|
|
81
|
-
def pre_exec():
|
|
82
|
-
# Restore default signal disposition and invoke setsid
|
|
83
|
-
for sig in ("SIGPIPE", "SIGXFZ", "SIGXFSZ"):
|
|
84
|
-
if hasattr(signal, sig):
|
|
85
|
-
signal.signal(getattr(signal, sig), signal.SIG_DFL)
|
|
86
|
-
os.setsid()
|
|
87
|
-
|
|
88
|
-
self.log.info("Running command: %s", command)
|
|
89
|
-
|
|
90
|
-
self.sub_process = Popen(
|
|
91
|
-
command,
|
|
92
|
-
stdout=PIPE,
|
|
93
|
-
stderr=STDOUT,
|
|
94
|
-
cwd=cwd,
|
|
95
|
-
env=env if env or env == {} else os.environ,
|
|
96
|
-
preexec_fn=pre_exec,
|
|
97
|
-
)
|
|
98
|
-
|
|
99
|
-
self.log.info("Output:")
|
|
100
|
-
line = ""
|
|
101
|
-
if self.sub_process is None:
|
|
102
|
-
raise RuntimeError("The subprocess should be created here and is None!")
|
|
103
|
-
if self.sub_process.stdout is not None:
|
|
104
|
-
for raw_line in iter(self.sub_process.stdout.readline, b""):
|
|
105
|
-
line = raw_line.decode(output_encoding, errors="backslashreplace").rstrip()
|
|
106
|
-
self.log.info("%s", line)
|
|
107
|
-
|
|
108
|
-
self.sub_process.wait()
|
|
109
|
-
|
|
110
|
-
self.log.info("Command exited with return code %s", self.sub_process.returncode)
|
|
111
|
-
return_code: int = self.sub_process.returncode
|
|
112
|
-
|
|
113
|
-
return SubprocessResult(exit_code=return_code, output=line)
|
|
114
|
-
|
|
115
|
-
def send_sigterm(self):
|
|
116
|
-
"""Send SIGTERM signal to ``self.sub_process`` if one exists."""
|
|
117
|
-
self.log.info("Sending SIGTERM signal to process group")
|
|
118
|
-
if self.sub_process and hasattr(self.sub_process, "pid"):
|
|
119
|
-
os.killpg(os.getpgid(self.sub_process.pid), signal.SIGTERM)
|
|
@@ -1,39 +0,0 @@
|
|
|
1
|
-
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
-
# or more contributor license agreements. See the NOTICE file
|
|
3
|
-
# distributed with this work for additional information
|
|
4
|
-
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
-
# to you under the Apache License, Version 2.0 (the
|
|
6
|
-
# "License"); you may not use this file except in compliance
|
|
7
|
-
# with the License. You may obtain a copy of the License at
|
|
8
|
-
#
|
|
9
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
-
#
|
|
11
|
-
# Unless required by applicable law or agreed to in writing,
|
|
12
|
-
# software distributed under the License is distributed on an
|
|
13
|
-
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
-
# KIND, either express or implied. See the License for the
|
|
15
|
-
# specific language governing permissions and limitations
|
|
16
|
-
# under the License.
|
|
17
|
-
from __future__ import annotations
|
|
18
|
-
|
|
19
|
-
from typing import TYPE_CHECKING
|
|
20
|
-
|
|
21
|
-
from airflow.models.baseoperator import BaseOperator
|
|
22
|
-
|
|
23
|
-
if TYPE_CHECKING:
|
|
24
|
-
from airflow.sdk.definitions.context import Context
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
class EmptyOperator(BaseOperator):
|
|
28
|
-
"""
|
|
29
|
-
Operator that does literally nothing.
|
|
30
|
-
|
|
31
|
-
It can be used to group tasks in a DAG.
|
|
32
|
-
The task is evaluated by the scheduler but never processed by the executor.
|
|
33
|
-
"""
|
|
34
|
-
|
|
35
|
-
ui_color = "#e8f7e4"
|
|
36
|
-
inherits_from_empty_operator = True
|
|
37
|
-
|
|
38
|
-
def execute(self, context: Context):
|
|
39
|
-
pass
|