apache-airflow-providers-http 5.3.4rc1__tar.gz → 5.4.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of apache-airflow-providers-http might be problematic. Click here for more details.
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/PKG-INFO +7 -7
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/README.rst +3 -3
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/docs/changelog.rst +16 -2
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/docs/index.rst +2 -1
- apache_airflow_providers_http-5.4.0/docs/triggers.rst +143 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/provider.yaml +2 -1
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/pyproject.toml +4 -4
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/src/airflow/providers/http/__init__.py +1 -1
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/src/airflow/providers/http/triggers/http.py +112 -14
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/src/airflow/providers/http/version_compat.py +1 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/tests/unit/http/triggers/test_http.py +112 -1
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/docs/.latest-doc-only-change.txt +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/docs/commits.rst +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/docs/conf.py +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/docs/connections/http.rst +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/docs/installing-providers-from-sources.rst +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/docs/integration-logos/HTTP.png +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/docs/operators.rst +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/docs/security.rst +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/src/airflow/__init__.py +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/src/airflow/providers/__init__.py +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/src/airflow/providers/http/LICENSE +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/src/airflow/providers/http/exceptions.py +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/src/airflow/providers/http/get_provider_info.py +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/src/airflow/providers/http/hooks/__init__.py +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/src/airflow/providers/http/hooks/http.py +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/src/airflow/providers/http/operators/__init__.py +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/src/airflow/providers/http/operators/http.py +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/src/airflow/providers/http/sensors/__init__.py +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/src/airflow/providers/http/sensors/http.py +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/src/airflow/providers/http/triggers/__init__.py +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/tests/conftest.py +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/tests/system/__init__.py +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/tests/system/http/__init__.py +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/tests/system/http/example_http.py +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/tests/unit/__init__.py +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/tests/unit/http/__init__.py +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/tests/unit/http/hooks/__init__.py +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/tests/unit/http/hooks/test_http.py +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/tests/unit/http/operators/__init__.py +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/tests/unit/http/operators/test_http.py +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/tests/unit/http/sensors/__init__.py +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/tests/unit/http/sensors/test_http.py +0 -0
- {apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/tests/unit/http/triggers/__init__.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-http
|
|
3
|
-
Version: 5.
|
|
3
|
+
Version: 5.4.0
|
|
4
4
|
Summary: Provider package apache-airflow-providers-http for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,http,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -20,14 +20,14 @@ Classifier: Programming Language :: Python :: 3.11
|
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.12
|
|
21
21
|
Classifier: Programming Language :: Python :: 3.13
|
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
|
23
|
-
Requires-Dist: apache-airflow>=2.10.
|
|
23
|
+
Requires-Dist: apache-airflow>=2.10.0
|
|
24
24
|
Requires-Dist: requests>=2.32.0,<3
|
|
25
25
|
Requires-Dist: requests-toolbelt>=1.0.0
|
|
26
26
|
Requires-Dist: aiohttp>=3.12.14
|
|
27
27
|
Requires-Dist: asgiref>=2.3.0
|
|
28
28
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
29
|
-
Project-URL: Changelog, https://airflow.
|
|
30
|
-
Project-URL: Documentation, https://airflow.
|
|
29
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-http/5.4.0/changelog.html
|
|
30
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-http/5.4.0
|
|
31
31
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
32
32
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
33
33
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
@@ -58,7 +58,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
|
58
58
|
|
|
59
59
|
Package ``apache-airflow-providers-http``
|
|
60
60
|
|
|
61
|
-
Release: ``5.
|
|
61
|
+
Release: ``5.4.0``
|
|
62
62
|
|
|
63
63
|
|
|
64
64
|
`Hypertext Transfer Protocol (HTTP) <https://www.w3.org/Protocols/>`__
|
|
@@ -71,7 +71,7 @@ This is a provider package for ``http`` provider. All classes for this provider
|
|
|
71
71
|
are in ``airflow.providers.http`` python package.
|
|
72
72
|
|
|
73
73
|
You can find package information and changelog for the provider
|
|
74
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-http/5.
|
|
74
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-http/5.4.0/>`_.
|
|
75
75
|
|
|
76
76
|
Installation
|
|
77
77
|
------------
|
|
@@ -96,5 +96,5 @@ PIP package Version required
|
|
|
96
96
|
===================== ==================
|
|
97
97
|
|
|
98
98
|
The changelog for the provider package can be found in the
|
|
99
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-http/5.
|
|
99
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-http/5.4.0/changelog.html>`_.
|
|
100
100
|
|
|
@@ -23,7 +23,7 @@
|
|
|
23
23
|
|
|
24
24
|
Package ``apache-airflow-providers-http``
|
|
25
25
|
|
|
26
|
-
Release: ``5.
|
|
26
|
+
Release: ``5.4.0``
|
|
27
27
|
|
|
28
28
|
|
|
29
29
|
`Hypertext Transfer Protocol (HTTP) <https://www.w3.org/Protocols/>`__
|
|
@@ -36,7 +36,7 @@ This is a provider package for ``http`` provider. All classes for this provider
|
|
|
36
36
|
are in ``airflow.providers.http`` python package.
|
|
37
37
|
|
|
38
38
|
You can find package information and changelog for the provider
|
|
39
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-http/5.
|
|
39
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-http/5.4.0/>`_.
|
|
40
40
|
|
|
41
41
|
Installation
|
|
42
42
|
------------
|
|
@@ -61,4 +61,4 @@ PIP package Version required
|
|
|
61
61
|
===================== ==================
|
|
62
62
|
|
|
63
63
|
The changelog for the provider package can be found in the
|
|
64
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-http/5.
|
|
64
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-http/5.4.0/changelog.html>`_.
|
{apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/docs/changelog.rst
RENAMED
|
@@ -27,11 +27,25 @@
|
|
|
27
27
|
Changelog
|
|
28
28
|
---------
|
|
29
29
|
|
|
30
|
-
5.
|
|
30
|
+
5.4.0
|
|
31
31
|
.....
|
|
32
32
|
|
|
33
|
+
Features
|
|
34
|
+
~~~~~~~~
|
|
35
|
+
|
|
36
|
+
* ``Create HttpEventTrigger for AssetWatcher library (#51253)``
|
|
37
|
+
|
|
38
|
+
Doc-only
|
|
39
|
+
~~~~~~~~
|
|
40
|
+
|
|
41
|
+
* ``Remove placeholder Release Date in changelog and index files (#56056)``
|
|
42
|
+
|
|
43
|
+
.. Below changes are excluded from the changelog. Move them to
|
|
44
|
+
appropriate section above if needed. Do not delete the lines(!):
|
|
45
|
+
|
|
46
|
+
5.3.4
|
|
47
|
+
.....
|
|
33
48
|
|
|
34
|
-
Release Date: ``|PypiReleaseDate|``
|
|
35
49
|
|
|
36
50
|
Bug Fixes
|
|
37
51
|
~~~~~~~~~
|
{apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/docs/index.rst
RENAMED
|
@@ -36,6 +36,7 @@
|
|
|
36
36
|
|
|
37
37
|
Connection types <connections/http>
|
|
38
38
|
Operators <operators>
|
|
39
|
+
Triggers <triggers>
|
|
39
40
|
|
|
40
41
|
.. toctree::
|
|
41
42
|
:hidden:
|
|
@@ -77,7 +78,7 @@ apache-airflow-providers-http package
|
|
|
77
78
|
`Hypertext Transfer Protocol (HTTP) <https://www.w3.org/Protocols/>`__
|
|
78
79
|
|
|
79
80
|
|
|
80
|
-
Release: 5.
|
|
81
|
+
Release: 5.4.0
|
|
81
82
|
|
|
82
83
|
Provider package
|
|
83
84
|
----------------
|
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
|
|
2
|
+
.. Licensed to the Apache Software Foundation (ASF) under one
|
|
3
|
+
or more contributor license agreements. See the NOTICE file
|
|
4
|
+
distributed with this work for additional information
|
|
5
|
+
regarding copyright ownership. The ASF licenses this file
|
|
6
|
+
to you under the Apache License, Version 2.0 (the
|
|
7
|
+
"License"); you may not use this file except in compliance
|
|
8
|
+
with the License. You may obtain a copy of the License at
|
|
9
|
+
|
|
10
|
+
.. http://www.apache.org/licenses/LICENSE-2.0
|
|
11
|
+
|
|
12
|
+
.. Unless required by applicable law or agreed to in writing,
|
|
13
|
+
software distributed under the License is distributed on an
|
|
14
|
+
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
15
|
+
KIND, either express or implied. See the License for the
|
|
16
|
+
specific language governing permissions and limitations
|
|
17
|
+
under the License.
|
|
18
|
+
|
|
19
|
+
HTTP Event Trigger
|
|
20
|
+
==================
|
|
21
|
+
|
|
22
|
+
.. _howto/trigger:HttpEventTrigger:
|
|
23
|
+
|
|
24
|
+
The ``HttpEventTrigger`` is an event-based trigger that monitors whether responses
|
|
25
|
+
from an API meet the conditions set by the user in the ``response_check`` callable.
|
|
26
|
+
|
|
27
|
+
It is designed for **Airflow 3.0+** to be used in combination with the ``AssetWatcher`` system,
|
|
28
|
+
enabling event-driven DAGs based on API responses.
|
|
29
|
+
|
|
30
|
+
How It Works
|
|
31
|
+
------------
|
|
32
|
+
|
|
33
|
+
1. Sends requests to an API.
|
|
34
|
+
2. Uses the callable at ``response_check_path`` to evaluate the API response.
|
|
35
|
+
3. If the callable returns ``True``, a ``TriggerEvent`` is emitted. This will trigger DAGs using this ``AssetWatcher`` for scheduling.
|
|
36
|
+
|
|
37
|
+
.. note::
|
|
38
|
+
This trigger requires **Airflow >= 3.0** due to dependencies on ``AssetWatcher`` and event-driven scheduling infrastructure.
|
|
39
|
+
|
|
40
|
+
Usage Example with AssetWatcher
|
|
41
|
+
-------------------------------
|
|
42
|
+
|
|
43
|
+
Here's an example of using the HttpEventTrigger in an AssetWatcher to monitor the GitHub API for new Airflow releases.
|
|
44
|
+
|
|
45
|
+
.. code-block:: python
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
import datetime
|
|
49
|
+
import os
|
|
50
|
+
|
|
51
|
+
from asgiref.sync import sync_to_async
|
|
52
|
+
|
|
53
|
+
from airflow.providers.http.triggers.http import HttpEventTrigger
|
|
54
|
+
from airflow.sdk import Asset, AssetWatcher, Variable, dag, task
|
|
55
|
+
|
|
56
|
+
# This token must be generated through GitHub and added as an environment variable
|
|
57
|
+
token = os.getenv("GITHUB_TOKEN")
|
|
58
|
+
|
|
59
|
+
headers = {
|
|
60
|
+
"Accept": "application/vnd.github+json",
|
|
61
|
+
"Authorization": f"Bearer {token}",
|
|
62
|
+
"X-GitHub-Api-Version": "2022-11-28",
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
async def check_github_api_response(response):
|
|
67
|
+
data = response.json()
|
|
68
|
+
release_id = str(data["id"])
|
|
69
|
+
get_variable_sync = sync_to_async(Variable.get)
|
|
70
|
+
previous_release_id = await get_variable_sync(key="release_id_var", default=None)
|
|
71
|
+
if release_id == previous_release_id:
|
|
72
|
+
return False
|
|
73
|
+
release_name = data["name"]
|
|
74
|
+
release_html_url = data["html_url"]
|
|
75
|
+
set_variable_sync = sync_to_async(Variable.set)
|
|
76
|
+
await set_variable_sync(key="release_id_var", value=str(release_id))
|
|
77
|
+
await set_variable_sync(key="release_name_var", value=release_name)
|
|
78
|
+
await set_variable_sync(key="release_html_url_var", value=release_html_url)
|
|
79
|
+
return True
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
trigger = HttpEventTrigger(
|
|
83
|
+
endpoint="repos/apache/airflow/releases/latest",
|
|
84
|
+
method="GET",
|
|
85
|
+
http_conn_id="http_default", # HTTP connection with https://api.github.com/ as the Host
|
|
86
|
+
headers=headers,
|
|
87
|
+
response_check_path="dags.check_airflow_releases.check_github_api_response", # Path to the check_github_api_response callable
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
asset = Asset(
|
|
91
|
+
"airflow_releases_asset", watchers=[AssetWatcher(name="airflow_releases_watcher", trigger=trigger)]
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
@dag(start_date=datetime.datetime(2024, 10, 1), schedule=asset, catchup=False)
|
|
96
|
+
def check_airflow_releases():
|
|
97
|
+
@task()
|
|
98
|
+
def print_airflow_release_info():
|
|
99
|
+
release_name = Variable.get("release_name_var")
|
|
100
|
+
release_html_url = Variable.get("release_html_url_var")
|
|
101
|
+
print(f"{release_name} has been released. Check it out at {release_html_url}")
|
|
102
|
+
|
|
103
|
+
print_airflow_release_info()
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
check_airflow_releases()
|
|
107
|
+
|
|
108
|
+
Parameters
|
|
109
|
+
----------
|
|
110
|
+
|
|
111
|
+
``http_conn_id``
|
|
112
|
+
http connection id that has the base API url i.e https://www.google.com/ and optional authentication credentials.
|
|
113
|
+
Default headers can also be specified in the Extra field in json format.
|
|
114
|
+
|
|
115
|
+
``auth_type``
|
|
116
|
+
The auth type for the service
|
|
117
|
+
|
|
118
|
+
``method``
|
|
119
|
+
the API method to be called
|
|
120
|
+
|
|
121
|
+
``endpoint``
|
|
122
|
+
Endpoint to be called, i.e. ``resource/v1/query?``
|
|
123
|
+
|
|
124
|
+
``headers``
|
|
125
|
+
Additional headers to be passed through as a dict
|
|
126
|
+
|
|
127
|
+
``data``
|
|
128
|
+
Payload to be uploaded or request parameters
|
|
129
|
+
|
|
130
|
+
``extra_options``
|
|
131
|
+
Additional kwargs to pass when creating a request.
|
|
132
|
+
|
|
133
|
+
``response_check_path``
|
|
134
|
+
Path to callable that evaluates whether the API response passes the conditions set by the user to trigger DAGs
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
Important Notes
|
|
138
|
+
---------------
|
|
139
|
+
|
|
140
|
+
1. A ``response_check_path`` value is required.
|
|
141
|
+
2. The ``response_check_path`` must contain the path to an asynchronous callable. Synchronous callables will raise an exception.
|
|
142
|
+
3. This trigger does not automatically record the previous API response.
|
|
143
|
+
4. The previous response may have to be persisted manually though ``Variable.set()`` in the ``response_check_path`` callable to prevent the trigger from emitting events repeatedly for the same API response.
|
{apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/provider.yaml
RENAMED
|
@@ -22,12 +22,13 @@ description: |
|
|
|
22
22
|
`Hypertext Transfer Protocol (HTTP) <https://www.w3.org/Protocols/>`__
|
|
23
23
|
|
|
24
24
|
state: ready
|
|
25
|
-
source-date-epoch:
|
|
25
|
+
source-date-epoch: 1761116964
|
|
26
26
|
# Note that those versions are maintained by release manager - do not update them manually
|
|
27
27
|
# with the exception of case where other provider in sources has >= new provider version.
|
|
28
28
|
# In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
|
|
29
29
|
# to be done in the same PR
|
|
30
30
|
versions:
|
|
31
|
+
- 5.4.0
|
|
31
32
|
- 5.3.4
|
|
32
33
|
- 5.3.3
|
|
33
34
|
- 5.3.2
|
{apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/pyproject.toml
RENAMED
|
@@ -25,7 +25,7 @@ build-backend = "flit_core.buildapi"
|
|
|
25
25
|
|
|
26
26
|
[project]
|
|
27
27
|
name = "apache-airflow-providers-http"
|
|
28
|
-
version = "5.
|
|
28
|
+
version = "5.4.0"
|
|
29
29
|
description = "Provider package apache-airflow-providers-http for Apache Airflow"
|
|
30
30
|
readme = "README.rst"
|
|
31
31
|
authors = [
|
|
@@ -57,7 +57,7 @@ requires-python = ">=3.10"
|
|
|
57
57
|
# Make sure to run ``prek update-providers-dependencies --all-files``
|
|
58
58
|
# After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
|
|
59
59
|
dependencies = [
|
|
60
|
-
"apache-airflow>=2.10.
|
|
60
|
+
"apache-airflow>=2.10.0",
|
|
61
61
|
# The 2.26.0 release of requests got rid of the chardet LGPL mandatory dependency, allowing us to
|
|
62
62
|
# release it as a requirement for airflow
|
|
63
63
|
"requests>=2.32.0,<3",
|
|
@@ -100,8 +100,8 @@ apache-airflow-providers-common-sql = {workspace = true}
|
|
|
100
100
|
apache-airflow-providers-standard = {workspace = true}
|
|
101
101
|
|
|
102
102
|
[project.urls]
|
|
103
|
-
"Documentation" = "https://airflow.
|
|
104
|
-
"Changelog" = "https://airflow.
|
|
103
|
+
"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-http/5.4.0"
|
|
104
|
+
"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-http/5.4.0/changelog.html"
|
|
105
105
|
"Bug Tracker" = "https://github.com/apache/airflow/issues"
|
|
106
106
|
"Source Code" = "https://github.com/apache/airflow"
|
|
107
107
|
"Slack Chat" = "https://s.apache.org/airflow-slack"
|
|
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "5.
|
|
32
|
+
__version__ = "5.4.0"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
35
|
"2.10.0"
|
|
@@ -18,20 +18,30 @@ from __future__ import annotations
|
|
|
18
18
|
|
|
19
19
|
import asyncio
|
|
20
20
|
import base64
|
|
21
|
+
import importlib
|
|
22
|
+
import inspect
|
|
21
23
|
import pickle
|
|
24
|
+
import sys
|
|
22
25
|
from collections.abc import AsyncIterator
|
|
23
26
|
from importlib import import_module
|
|
24
27
|
from typing import TYPE_CHECKING, Any
|
|
25
28
|
|
|
26
29
|
import aiohttp
|
|
27
30
|
import requests
|
|
31
|
+
from asgiref.sync import sync_to_async
|
|
28
32
|
from requests.cookies import RequestsCookieJar
|
|
29
33
|
from requests.structures import CaseInsensitiveDict
|
|
30
34
|
|
|
31
35
|
from airflow.exceptions import AirflowException
|
|
32
36
|
from airflow.providers.http.hooks.http import HttpAsyncHook
|
|
37
|
+
from airflow.providers.http.version_compat import AIRFLOW_V_3_0_PLUS
|
|
33
38
|
from airflow.triggers.base import BaseTrigger, TriggerEvent
|
|
34
39
|
|
|
40
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
41
|
+
from airflow.triggers.base import BaseEventTrigger
|
|
42
|
+
else:
|
|
43
|
+
from airflow.triggers.base import BaseTrigger as BaseEventTrigger # type: ignore
|
|
44
|
+
|
|
35
45
|
if TYPE_CHECKING:
|
|
36
46
|
from aiohttp.client_reqrep import ClientResponse
|
|
37
47
|
|
|
@@ -105,21 +115,9 @@ class HttpTrigger(BaseTrigger):
|
|
|
105
115
|
|
|
106
116
|
async def run(self) -> AsyncIterator[TriggerEvent]:
|
|
107
117
|
"""Make a series of asynchronous http calls via a http hook."""
|
|
108
|
-
hook =
|
|
109
|
-
method=self.method,
|
|
110
|
-
http_conn_id=self.http_conn_id,
|
|
111
|
-
auth_type=self.auth_type,
|
|
112
|
-
)
|
|
118
|
+
hook = self._get_async_hook()
|
|
113
119
|
try:
|
|
114
|
-
|
|
115
|
-
client_response = await hook.run(
|
|
116
|
-
session=session,
|
|
117
|
-
endpoint=self.endpoint,
|
|
118
|
-
data=self.data,
|
|
119
|
-
headers=self.headers,
|
|
120
|
-
extra_options=self.extra_options,
|
|
121
|
-
)
|
|
122
|
-
response = await self._convert_response(client_response)
|
|
120
|
+
response = await self._get_response(hook)
|
|
123
121
|
yield TriggerEvent(
|
|
124
122
|
{
|
|
125
123
|
"status": "success",
|
|
@@ -129,6 +127,25 @@ class HttpTrigger(BaseTrigger):
|
|
|
129
127
|
except Exception as e:
|
|
130
128
|
yield TriggerEvent({"status": "error", "message": str(e)})
|
|
131
129
|
|
|
130
|
+
def _get_async_hook(self) -> HttpAsyncHook:
|
|
131
|
+
return HttpAsyncHook(
|
|
132
|
+
method=self.method,
|
|
133
|
+
http_conn_id=self.http_conn_id,
|
|
134
|
+
auth_type=self.auth_type,
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
async def _get_response(self, hook):
|
|
138
|
+
async with aiohttp.ClientSession() as session:
|
|
139
|
+
client_response = await hook.run(
|
|
140
|
+
session=session,
|
|
141
|
+
endpoint=self.endpoint,
|
|
142
|
+
data=self.data,
|
|
143
|
+
headers=self.headers,
|
|
144
|
+
extra_options=self.extra_options,
|
|
145
|
+
)
|
|
146
|
+
response = await self._convert_response(client_response)
|
|
147
|
+
return response
|
|
148
|
+
|
|
132
149
|
@staticmethod
|
|
133
150
|
async def _convert_response(client_response: ClientResponse) -> requests.Response:
|
|
134
151
|
"""Convert aiohttp.client_reqrep.ClientResponse to requests.Response."""
|
|
@@ -219,3 +236,84 @@ class HttpSensorTrigger(BaseTrigger):
|
|
|
219
236
|
method=self.method,
|
|
220
237
|
http_conn_id=self.http_conn_id,
|
|
221
238
|
)
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
class HttpEventTrigger(HttpTrigger, BaseEventTrigger):
|
|
242
|
+
"""
|
|
243
|
+
HttpEventTrigger for event-based DAG scheduling when the API response satisfies the response check.
|
|
244
|
+
|
|
245
|
+
:param response_check_path: Path to the function that evaluates whether the API response
|
|
246
|
+
passes the conditions set by the user to fire the trigger. The method must be asynchronous.
|
|
247
|
+
:param http_conn_id: http connection id that has the base
|
|
248
|
+
API url i.e https://www.google.com/ and optional authentication credentials. Default
|
|
249
|
+
headers can also be specified in the Extra field in json format.
|
|
250
|
+
:param auth_type: The auth type for the service
|
|
251
|
+
:param method: The API method to be called
|
|
252
|
+
:param endpoint: Endpoint to be called, i.e. ``resource/v1/query?``.
|
|
253
|
+
:param headers: Additional headers to be passed through as a dict.
|
|
254
|
+
:param data: Payload to be uploaded or request parameters.
|
|
255
|
+
:param extra_options: Additional kwargs to pass when creating a request.
|
|
256
|
+
"""
|
|
257
|
+
|
|
258
|
+
def __init__(
|
|
259
|
+
self,
|
|
260
|
+
response_check_path: str,
|
|
261
|
+
http_conn_id: str = "http_default",
|
|
262
|
+
auth_type: Any = None,
|
|
263
|
+
method: str = "GET",
|
|
264
|
+
endpoint: str | None = None,
|
|
265
|
+
headers: dict[str, str] | None = None,
|
|
266
|
+
data: dict[str, Any] | str | None = None,
|
|
267
|
+
extra_options: dict[str, Any] | None = None,
|
|
268
|
+
):
|
|
269
|
+
super().__init__(http_conn_id, auth_type, method, endpoint, headers, data, extra_options)
|
|
270
|
+
self.response_check_path = response_check_path
|
|
271
|
+
|
|
272
|
+
def serialize(self) -> tuple[str, dict[str, Any]]:
|
|
273
|
+
"""Serialize HttpEventTrigger arguments and classpath."""
|
|
274
|
+
return (
|
|
275
|
+
self.__class__.__module__ + "." + self.__class__.__qualname__,
|
|
276
|
+
{
|
|
277
|
+
"http_conn_id": self.http_conn_id,
|
|
278
|
+
"method": self.method,
|
|
279
|
+
"auth_type": self.auth_type,
|
|
280
|
+
"endpoint": self.endpoint,
|
|
281
|
+
"headers": self.headers,
|
|
282
|
+
"data": self.data,
|
|
283
|
+
"extra_options": self.extra_options,
|
|
284
|
+
"response_check_path": self.response_check_path,
|
|
285
|
+
},
|
|
286
|
+
)
|
|
287
|
+
|
|
288
|
+
async def run(self) -> AsyncIterator[TriggerEvent]:
|
|
289
|
+
"""Make a series of asynchronous http calls via a http hook until the response passes the response check."""
|
|
290
|
+
hook = super()._get_async_hook()
|
|
291
|
+
try:
|
|
292
|
+
while True:
|
|
293
|
+
response = await super()._get_response(hook)
|
|
294
|
+
if await self._run_response_check(response):
|
|
295
|
+
break
|
|
296
|
+
yield TriggerEvent(
|
|
297
|
+
{
|
|
298
|
+
"status": "success",
|
|
299
|
+
"response": base64.standard_b64encode(pickle.dumps(response)).decode("ascii"),
|
|
300
|
+
}
|
|
301
|
+
)
|
|
302
|
+
except Exception as e:
|
|
303
|
+
self.log.error("status: error, message: %s", str(e))
|
|
304
|
+
|
|
305
|
+
async def _import_from_response_check_path(self):
|
|
306
|
+
"""Import the response check callable from the path provided by the user."""
|
|
307
|
+
module_path, func_name = self.response_check_path.rsplit(".", 1)
|
|
308
|
+
if module_path in sys.modules:
|
|
309
|
+
module = await sync_to_async(importlib.reload)(sys.modules[module_path])
|
|
310
|
+
module = await sync_to_async(importlib.import_module)(module_path)
|
|
311
|
+
return getattr(module, func_name)
|
|
312
|
+
|
|
313
|
+
async def _run_response_check(self, response) -> bool:
|
|
314
|
+
"""Run the response_check callable provided by the user."""
|
|
315
|
+
response_check = await self._import_from_response_check_path()
|
|
316
|
+
if not inspect.iscoroutinefunction(response_check):
|
|
317
|
+
raise AirflowException("The response_check callable is not asynchronous.")
|
|
318
|
+
check = await response_check(response)
|
|
319
|
+
return check
|
|
@@ -31,7 +31,7 @@ from requests.structures import CaseInsensitiveDict
|
|
|
31
31
|
from yarl import URL
|
|
32
32
|
|
|
33
33
|
from airflow.models import Connection
|
|
34
|
-
from airflow.providers.http.triggers.http import HttpSensorTrigger, HttpTrigger
|
|
34
|
+
from airflow.providers.http.triggers.http import HttpEventTrigger, HttpSensorTrigger, HttpTrigger
|
|
35
35
|
from airflow.triggers.base import TriggerEvent
|
|
36
36
|
|
|
37
37
|
HTTP_PATH = "airflow.providers.http.triggers.http.{}"
|
|
@@ -42,6 +42,7 @@ TEST_ENDPOINT = "endpoint"
|
|
|
42
42
|
TEST_HEADERS = {"Authorization": "Bearer test"}
|
|
43
43
|
TEST_DATA = {"key": "value"}
|
|
44
44
|
TEST_EXTRA_OPTIONS: dict[str, Any] = {}
|
|
45
|
+
TEST_RESPONSE_CHECK_PATH = "mock.path"
|
|
45
46
|
|
|
46
47
|
|
|
47
48
|
@pytest.fixture
|
|
@@ -69,6 +70,20 @@ def sensor_trigger():
|
|
|
69
70
|
)
|
|
70
71
|
|
|
71
72
|
|
|
73
|
+
@pytest.fixture
|
|
74
|
+
def event_trigger():
|
|
75
|
+
return HttpEventTrigger(
|
|
76
|
+
http_conn_id=TEST_CONN_ID,
|
|
77
|
+
auth_type=TEST_AUTH_TYPE,
|
|
78
|
+
method=TEST_METHOD,
|
|
79
|
+
endpoint=TEST_ENDPOINT,
|
|
80
|
+
headers=TEST_HEADERS,
|
|
81
|
+
data=TEST_DATA,
|
|
82
|
+
extra_options=TEST_EXTRA_OPTIONS,
|
|
83
|
+
response_check_path=TEST_RESPONSE_CHECK_PATH,
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
|
|
72
87
|
@pytest.fixture
|
|
73
88
|
def client_response():
|
|
74
89
|
client_response = mock.AsyncMock(ClientResponse)
|
|
@@ -192,3 +207,99 @@ class TestHttpSensorTrigger:
|
|
|
192
207
|
"extra_options": TEST_EXTRA_OPTIONS,
|
|
193
208
|
"poke_interval": 5.0,
|
|
194
209
|
}
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
class TestHttpEventTrigger:
|
|
213
|
+
@staticmethod
|
|
214
|
+
def _mock_run_result(result_to_mock):
|
|
215
|
+
f = Future()
|
|
216
|
+
f.set_result(result_to_mock)
|
|
217
|
+
return f
|
|
218
|
+
|
|
219
|
+
def test_serialization(self, event_trigger):
|
|
220
|
+
"""
|
|
221
|
+
Asserts that the HttpEventTrigger correctly serializes its arguments
|
|
222
|
+
and classpath.
|
|
223
|
+
"""
|
|
224
|
+
classpath, kwargs = event_trigger.serialize()
|
|
225
|
+
assert classpath == "airflow.providers.http.triggers.http.HttpEventTrigger"
|
|
226
|
+
assert kwargs == {
|
|
227
|
+
"http_conn_id": TEST_CONN_ID,
|
|
228
|
+
"auth_type": TEST_AUTH_TYPE,
|
|
229
|
+
"method": TEST_METHOD,
|
|
230
|
+
"endpoint": TEST_ENDPOINT,
|
|
231
|
+
"headers": TEST_HEADERS,
|
|
232
|
+
"data": TEST_DATA,
|
|
233
|
+
"extra_options": TEST_EXTRA_OPTIONS,
|
|
234
|
+
"response_check_path": TEST_RESPONSE_CHECK_PATH,
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
@pytest.mark.asyncio
|
|
238
|
+
@mock.patch(HTTP_PATH.format("HttpAsyncHook"))
|
|
239
|
+
async def test_trigger_on_success_yield_successfully(self, mock_hook, event_trigger, client_response):
|
|
240
|
+
"""
|
|
241
|
+
Tests the HttpEventTrigger only fires once the job execution reaches a successful state.
|
|
242
|
+
"""
|
|
243
|
+
mock_hook.return_value.run.return_value = self._mock_run_result(client_response)
|
|
244
|
+
event_trigger._run_response_check = mock.AsyncMock(side_effect=[False, True])
|
|
245
|
+
response = await HttpEventTrigger._convert_response(client_response)
|
|
246
|
+
|
|
247
|
+
generator = event_trigger.run()
|
|
248
|
+
actual = await generator.asend(None)
|
|
249
|
+
assert actual == TriggerEvent(
|
|
250
|
+
{
|
|
251
|
+
"status": "success",
|
|
252
|
+
"response": base64.standard_b64encode(pickle.dumps(response)).decode("ascii"),
|
|
253
|
+
}
|
|
254
|
+
)
|
|
255
|
+
assert mock_hook.return_value.run.call_count == 2
|
|
256
|
+
assert event_trigger._run_response_check.call_count == 2
|
|
257
|
+
|
|
258
|
+
@pytest.mark.asyncio
|
|
259
|
+
@mock.patch(HTTP_PATH.format("HttpAsyncHook"))
|
|
260
|
+
async def test_trigger_on_exception_logs_error_and_never_yields(
|
|
261
|
+
self, mock_hook, event_trigger, monkeypatch
|
|
262
|
+
):
|
|
263
|
+
"""
|
|
264
|
+
Tests the HttpEventTrigger logs the appropriate message and does not yield a TriggerEvent when an exception is raised.
|
|
265
|
+
"""
|
|
266
|
+
mock_hook.return_value.run.side_effect = Exception("Test exception")
|
|
267
|
+
mock_logger = mock.Mock()
|
|
268
|
+
monkeypatch.setattr(type(event_trigger), "log", mock_logger)
|
|
269
|
+
|
|
270
|
+
generator = event_trigger.run()
|
|
271
|
+
with pytest.raises(StopAsyncIteration):
|
|
272
|
+
await generator.asend(None)
|
|
273
|
+
|
|
274
|
+
mock_logger.error.assert_called_once_with("status: error, message: %s", "Test exception")
|
|
275
|
+
|
|
276
|
+
@pytest.mark.asyncio
|
|
277
|
+
async def test_convert_response(self, client_response):
|
|
278
|
+
"""
|
|
279
|
+
Assert convert aiohttp.client_reqrep.ClientResponse to requests.Response.
|
|
280
|
+
"""
|
|
281
|
+
response = await HttpEventTrigger._convert_response(client_response)
|
|
282
|
+
assert response.content == await client_response.read()
|
|
283
|
+
assert response.status_code == client_response.status
|
|
284
|
+
assert response.headers == CaseInsensitiveDict(client_response.headers)
|
|
285
|
+
assert response.url == str(client_response.url)
|
|
286
|
+
assert response.history == [HttpEventTrigger._convert_response(h) for h in client_response.history]
|
|
287
|
+
assert response.encoding == client_response.get_encoding()
|
|
288
|
+
assert response.reason == client_response.reason
|
|
289
|
+
assert dict(response.cookies) == dict(client_response.cookies)
|
|
290
|
+
|
|
291
|
+
@pytest.mark.db_test
|
|
292
|
+
@pytest.mark.asyncio
|
|
293
|
+
@mock.patch("aiohttp.client.ClientSession.post")
|
|
294
|
+
async def test_trigger_on_post_with_data(self, mock_http_post, event_trigger):
|
|
295
|
+
"""
|
|
296
|
+
Test that HttpEventTrigger posts the correct payload when a request is made.
|
|
297
|
+
"""
|
|
298
|
+
generator = event_trigger.run()
|
|
299
|
+
with pytest.raises(StopAsyncIteration):
|
|
300
|
+
await generator.asend(None)
|
|
301
|
+
mock_http_post.assert_called_once()
|
|
302
|
+
_, kwargs = mock_http_post.call_args
|
|
303
|
+
assert kwargs["data"] == TEST_DATA
|
|
304
|
+
assert kwargs["json"] is None
|
|
305
|
+
assert kwargs["params"] is None
|
|
File without changes
|
{apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/docs/commits.rst
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/docs/operators.rst
RENAMED
|
File without changes
|
{apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/docs/security.rst
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{apache_airflow_providers_http-5.3.4rc1 → apache_airflow_providers_http-5.4.0}/tests/conftest.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|