apache-airflow-providers-http 5.3.1__tar.gz → 5.3.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/PKG-INFO +8 -9
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/README.rst +4 -4
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/docs/changelog.rst +25 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/docs/index.rst +1 -1
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/provider.yaml +2 -1
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/pyproject.toml +4 -5
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/src/airflow/providers/http/__init__.py +1 -1
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/src/airflow/providers/http/hooks/http.py +98 -60
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/src/airflow/providers/http/operators/http.py +34 -8
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/src/airflow/providers/http/sensors/http.py +8 -4
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/src/airflow/providers/http/triggers/http.py +20 -4
- apache_airflow_providers_http-5.3.2/src/airflow/providers/http/version_compat.py +43 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/tests/unit/http/hooks/test_http.py +354 -336
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/tests/unit/http/operators/test_http.py +65 -2
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/tests/unit/http/sensors/test_http.py +13 -12
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/tests/unit/http/triggers/test_http.py +9 -1
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/docs/.latest-doc-only-change.txt +0 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/docs/commits.rst +0 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/docs/conf.py +0 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/docs/connections/http.rst +0 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/docs/installing-providers-from-sources.rst +0 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/docs/integration-logos/HTTP.png +0 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/docs/operators.rst +0 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/docs/security.rst +0 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/src/airflow/__init__.py +0 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/src/airflow/providers/__init__.py +0 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/src/airflow/providers/http/LICENSE +0 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/src/airflow/providers/http/exceptions.py +0 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/src/airflow/providers/http/get_provider_info.py +0 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/src/airflow/providers/http/hooks/__init__.py +0 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/src/airflow/providers/http/operators/__init__.py +0 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/src/airflow/providers/http/sensors/__init__.py +0 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/src/airflow/providers/http/triggers/__init__.py +0 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/tests/conftest.py +0 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/tests/system/__init__.py +0 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/tests/system/http/__init__.py +0 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/tests/system/http/example_http.py +0 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/tests/unit/__init__.py +0 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/tests/unit/http/__init__.py +0 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/tests/unit/http/hooks/__init__.py +0 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/tests/unit/http/operators/__init__.py +0 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/tests/unit/http/sensors/__init__.py +0 -0
- {apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/tests/unit/http/triggers/__init__.py +0 -0
@@ -1,11 +1,11 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: apache-airflow-providers-http
|
3
|
-
Version: 5.3.
|
3
|
+
Version: 5.3.2
|
4
4
|
Summary: Provider package apache-airflow-providers-http for Apache Airflow
|
5
5
|
Keywords: airflow-provider,http,airflow,integration
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
7
7
|
Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
|
8
|
-
Requires-Python: ~=3.
|
8
|
+
Requires-Python: ~=3.10
|
9
9
|
Description-Content-Type: text/x-rst
|
10
10
|
Classifier: Development Status :: 5 - Production/Stable
|
11
11
|
Classifier: Environment :: Console
|
@@ -15,7 +15,6 @@ Classifier: Intended Audience :: System Administrators
|
|
15
15
|
Classifier: Framework :: Apache Airflow
|
16
16
|
Classifier: Framework :: Apache Airflow :: Provider
|
17
17
|
Classifier: License :: OSI Approved :: Apache Software License
|
18
|
-
Classifier: Programming Language :: Python :: 3.9
|
19
18
|
Classifier: Programming Language :: Python :: 3.10
|
20
19
|
Classifier: Programming Language :: Python :: 3.11
|
21
20
|
Classifier: Programming Language :: Python :: 3.12
|
@@ -26,8 +25,8 @@ Requires-Dist: requests-toolbelt>=1.0.0
|
|
26
25
|
Requires-Dist: aiohttp>=3.9.2,!=3.11.0
|
27
26
|
Requires-Dist: asgiref>=2.3.0
|
28
27
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
29
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-http/5.3.
|
30
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-http/5.3.
|
28
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-http/5.3.2/changelog.html
|
29
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-http/5.3.2
|
31
30
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
32
31
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
33
32
|
Project-URL: Source Code, https://github.com/apache/airflow
|
@@ -58,7 +57,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
58
57
|
|
59
58
|
Package ``apache-airflow-providers-http``
|
60
59
|
|
61
|
-
Release: ``5.3.
|
60
|
+
Release: ``5.3.2``
|
62
61
|
|
63
62
|
|
64
63
|
`Hypertext Transfer Protocol (HTTP) <https://www.w3.org/Protocols/>`__
|
@@ -71,7 +70,7 @@ This is a provider package for ``http`` provider. All classes for this provider
|
|
71
70
|
are in ``airflow.providers.http`` python package.
|
72
71
|
|
73
72
|
You can find package information and changelog for the provider
|
74
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-http/5.3.
|
73
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-http/5.3.2/>`_.
|
75
74
|
|
76
75
|
Installation
|
77
76
|
------------
|
@@ -80,7 +79,7 @@ You can install this package on top of an existing Airflow 2 installation (see `
|
|
80
79
|
for the minimum Airflow version supported) via
|
81
80
|
``pip install apache-airflow-providers-http``
|
82
81
|
|
83
|
-
The package supports the following python versions: 3.
|
82
|
+
The package supports the following python versions: 3.10,3.11,3.12
|
84
83
|
|
85
84
|
Requirements
|
86
85
|
------------
|
@@ -96,5 +95,5 @@ PIP package Version required
|
|
96
95
|
===================== ====================
|
97
96
|
|
98
97
|
The changelog for the provider package can be found in the
|
99
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-http/5.3.
|
98
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-http/5.3.2/changelog.html>`_.
|
100
99
|
|
@@ -23,7 +23,7 @@
|
|
23
23
|
|
24
24
|
Package ``apache-airflow-providers-http``
|
25
25
|
|
26
|
-
Release: ``5.3.
|
26
|
+
Release: ``5.3.2``
|
27
27
|
|
28
28
|
|
29
29
|
`Hypertext Transfer Protocol (HTTP) <https://www.w3.org/Protocols/>`__
|
@@ -36,7 +36,7 @@ This is a provider package for ``http`` provider. All classes for this provider
|
|
36
36
|
are in ``airflow.providers.http`` python package.
|
37
37
|
|
38
38
|
You can find package information and changelog for the provider
|
39
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-http/5.3.
|
39
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-http/5.3.2/>`_.
|
40
40
|
|
41
41
|
Installation
|
42
42
|
------------
|
@@ -45,7 +45,7 @@ You can install this package on top of an existing Airflow 2 installation (see `
|
|
45
45
|
for the minimum Airflow version supported) via
|
46
46
|
``pip install apache-airflow-providers-http``
|
47
47
|
|
48
|
-
The package supports the following python versions: 3.
|
48
|
+
The package supports the following python versions: 3.10,3.11,3.12
|
49
49
|
|
50
50
|
Requirements
|
51
51
|
------------
|
@@ -61,4 +61,4 @@ PIP package Version required
|
|
61
61
|
===================== ====================
|
62
62
|
|
63
63
|
The changelog for the provider package can be found in the
|
64
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-http/5.3.
|
64
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-http/5.3.2/changelog.html>`_.
|
{apache_airflow_providers_http-5.3.1 → apache_airflow_providers_http-5.3.2}/docs/changelog.rst
RENAMED
@@ -27,6 +27,31 @@
|
|
27
27
|
Changelog
|
28
28
|
---------
|
29
29
|
|
30
|
+
5.3.2
|
31
|
+
.....
|
32
|
+
|
33
|
+
Bug Fixes
|
34
|
+
~~~~~~~~~
|
35
|
+
|
36
|
+
* ``Ensure 'HttpHook.run()' does not alter 'extra_options' passed to it (#51893)``
|
37
|
+
* ``Fix HttpOperator(deferrable=True) crash when connection has login / password (#52050)``
|
38
|
+
* ``Enable extra_options even when extra not defined for HttpHook.run (#51746)``
|
39
|
+
|
40
|
+
Misc
|
41
|
+
~~~~
|
42
|
+
|
43
|
+
* ``Move 'BaseHook' implementation to task SDK (#51873)``
|
44
|
+
* ``Provider Migration: Replace 'BaseOperator' to Task SDK for 'apache/http' (#52528)``
|
45
|
+
* ``Drop support for Python 3.9 (#52072)``
|
46
|
+
* ``Use BaseSensorOperator from task sdk in providers (#52296)``
|
47
|
+
* ``Add deprecation to 'airflow/sensors/base.py' (#52249)``
|
48
|
+
|
49
|
+
.. Below changes are excluded from the changelog. Move them to
|
50
|
+
appropriate section above if needed. Do not delete the lines(!):
|
51
|
+
* ``Revert "Replace models.BaseOperator to Task SDK for http (#52506)" (#52515)``
|
52
|
+
* ``Replace models.BaseOperator to Task SDK for http (#52506)``
|
53
|
+
* ``Remove db usage from http provider tests (#52227)``
|
54
|
+
|
30
55
|
5.3.1
|
31
56
|
.....
|
32
57
|
|
@@ -22,12 +22,13 @@ description: |
|
|
22
22
|
`Hypertext Transfer Protocol (HTTP) <https://www.w3.org/Protocols/>`__
|
23
23
|
|
24
24
|
state: ready
|
25
|
-
source-date-epoch:
|
25
|
+
source-date-epoch: 1751473576
|
26
26
|
# Note that those versions are maintained by release manager - do not update them manually
|
27
27
|
# with the exception of case where other provider in sources has >= new provider version.
|
28
28
|
# In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
|
29
29
|
# to be done in the same PR
|
30
30
|
versions:
|
31
|
+
- 5.3.2
|
31
32
|
- 5.3.1
|
32
33
|
- 5.3.0
|
33
34
|
- 5.2.2
|
@@ -25,7 +25,7 @@ build-backend = "flit_core.buildapi"
|
|
25
25
|
|
26
26
|
[project]
|
27
27
|
name = "apache-airflow-providers-http"
|
28
|
-
version = "5.3.
|
28
|
+
version = "5.3.2"
|
29
29
|
description = "Provider package apache-airflow-providers-http for Apache Airflow"
|
30
30
|
readme = "README.rst"
|
31
31
|
authors = [
|
@@ -44,13 +44,12 @@ classifiers = [
|
|
44
44
|
"Framework :: Apache Airflow",
|
45
45
|
"Framework :: Apache Airflow :: Provider",
|
46
46
|
"License :: OSI Approved :: Apache Software License",
|
47
|
-
"Programming Language :: Python :: 3.9",
|
48
47
|
"Programming Language :: Python :: 3.10",
|
49
48
|
"Programming Language :: Python :: 3.11",
|
50
49
|
"Programming Language :: Python :: 3.12",
|
51
50
|
"Topic :: System :: Monitoring",
|
52
51
|
]
|
53
|
-
requires-python = "~=3.
|
52
|
+
requires-python = "~=3.10"
|
54
53
|
|
55
54
|
# The dependencies should be modified in place in the generated file.
|
56
55
|
# Any change in the dependencies is preserved when the file is regenerated
|
@@ -101,8 +100,8 @@ apache-airflow-providers-common-sql = {workspace = true}
|
|
101
100
|
apache-airflow-providers-standard = {workspace = true}
|
102
101
|
|
103
102
|
[project.urls]
|
104
|
-
"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-http/5.3.
|
105
|
-
"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-http/5.3.
|
103
|
+
"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-http/5.3.2"
|
104
|
+
"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-http/5.3.2/changelog.html"
|
106
105
|
"Bug Tracker" = "https://github.com/apache/airflow/issues"
|
107
106
|
"Source Code" = "https://github.com/apache/airflow"
|
108
107
|
"Slack Chat" = "https://s.apache.org/airflow-slack"
|
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
29
29
|
|
30
30
|
__all__ = ["__version__"]
|
31
31
|
|
32
|
-
__version__ = "5.3.
|
32
|
+
__version__ = "5.3.2"
|
33
33
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
35
35
|
"2.10.0"
|
@@ -17,7 +17,9 @@
|
|
17
17
|
# under the License.
|
18
18
|
from __future__ import annotations
|
19
19
|
|
20
|
-
|
20
|
+
import copy
|
21
|
+
from collections.abc import Callable
|
22
|
+
from typing import TYPE_CHECKING, Any
|
21
23
|
from urllib.parse import urlparse
|
22
24
|
|
23
25
|
import aiohttp
|
@@ -31,9 +33,13 @@ from requests.models import DEFAULT_REDIRECT_LIMIT
|
|
31
33
|
from requests_toolbelt.adapters.socket_options import TCPKeepAliveAdapter
|
32
34
|
|
33
35
|
from airflow.exceptions import AirflowException
|
34
|
-
from airflow.hooks.base import BaseHook
|
35
36
|
from airflow.providers.http.exceptions import HttpErrorException, HttpMethodException
|
36
37
|
|
38
|
+
try:
|
39
|
+
from airflow.sdk import BaseHook
|
40
|
+
except ImportError:
|
41
|
+
from airflow.hooks.base import BaseHook as BaseHook # type: ignore
|
42
|
+
|
37
43
|
if TYPE_CHECKING:
|
38
44
|
from aiohttp.client_reqrep import ClientResponse
|
39
45
|
from requests.adapters import HTTPAdapter
|
@@ -48,37 +54,50 @@ def _url_from_endpoint(base_url: str | None, endpoint: str | None) -> str:
|
|
48
54
|
return (base_url or "") + (endpoint or "")
|
49
55
|
|
50
56
|
|
51
|
-
def _process_extra_options_from_connection(
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
if
|
76
|
-
|
77
|
-
if
|
78
|
-
|
79
|
-
if
|
80
|
-
|
81
|
-
|
57
|
+
def _process_extra_options_from_connection(
|
58
|
+
conn, extra_options: dict[str, Any]
|
59
|
+
) -> tuple[dict[str, Any], dict[str, Any]]:
|
60
|
+
"""
|
61
|
+
Return the updated extra options from the connection, as well as those passed.
|
62
|
+
|
63
|
+
:param conn: The HTTP Connection object passed to the Hook
|
64
|
+
:param extra_options: Use-defined extra options
|
65
|
+
:return: (tuple)
|
66
|
+
"""
|
67
|
+
# Copy, to prevent changing conn.extra_dejson and extra_options
|
68
|
+
conn_extra_options: dict = copy.copy(conn.extra_dejson)
|
69
|
+
passed_extra_options: dict = copy.copy(extra_options)
|
70
|
+
|
71
|
+
stream = conn_extra_options.pop("stream", None)
|
72
|
+
cert = conn_extra_options.pop("cert", None)
|
73
|
+
proxies = conn_extra_options.pop("proxies", conn_extra_options.pop("proxy", None))
|
74
|
+
timeout = conn_extra_options.pop("timeout", None)
|
75
|
+
verify_ssl = conn_extra_options.pop("verify", conn_extra_options.pop("verify_ssl", None))
|
76
|
+
allow_redirects = conn_extra_options.pop("allow_redirects", None)
|
77
|
+
max_redirects = conn_extra_options.pop("max_redirects", None)
|
78
|
+
trust_env = conn_extra_options.pop("trust_env", None)
|
79
|
+
check_response = conn_extra_options.pop("check_response", None)
|
80
|
+
|
81
|
+
if stream is not None and "stream" not in passed_extra_options:
|
82
|
+
passed_extra_options["stream"] = stream
|
83
|
+
if cert is not None and "cert" not in passed_extra_options:
|
84
|
+
passed_extra_options["cert"] = cert
|
85
|
+
if proxies is not None and "proxy" not in passed_extra_options:
|
86
|
+
passed_extra_options["proxy"] = proxies
|
87
|
+
if timeout is not None and "timeout" not in passed_extra_options:
|
88
|
+
passed_extra_options["timeout"] = timeout
|
89
|
+
if verify_ssl is not None and "verify_ssl" not in passed_extra_options:
|
90
|
+
passed_extra_options["verify_ssl"] = verify_ssl
|
91
|
+
if allow_redirects is not None and "allow_redirects" not in passed_extra_options:
|
92
|
+
passed_extra_options["allow_redirects"] = allow_redirects
|
93
|
+
if max_redirects is not None and "max_redirects" not in passed_extra_options:
|
94
|
+
passed_extra_options["max_redirects"] = max_redirects
|
95
|
+
if trust_env is not None and "trust_env" not in passed_extra_options:
|
96
|
+
passed_extra_options["trust_env"] = trust_env
|
97
|
+
if check_response is not None and "check_response" not in passed_extra_options:
|
98
|
+
passed_extra_options["check_response"] = check_response
|
99
|
+
|
100
|
+
return conn_extra_options, passed_extra_options
|
82
101
|
|
83
102
|
|
84
103
|
class HttpHook(BaseHook):
|
@@ -96,7 +115,6 @@ class HttpHook(BaseHook):
|
|
96
115
|
:param tcp_keep_alive_count: The TCP Keep Alive count parameter (corresponds to ``socket.TCP_KEEPCNT``)
|
97
116
|
:param tcp_keep_alive_interval: The TCP Keep Alive interval parameter (corresponds to
|
98
117
|
``socket.TCP_KEEPINTVL``)
|
99
|
-
:param auth_args: extra arguments used to initialize the auth_type if different than default HTTPBasicAuth
|
100
118
|
"""
|
101
119
|
|
102
120
|
conn_name_attr = "http_conn_id"
|
@@ -135,6 +153,8 @@ class HttpHook(BaseHook):
|
|
135
153
|
else:
|
136
154
|
self.keep_alive_adapter = None
|
137
155
|
|
156
|
+
self.merged_extra: dict = {}
|
157
|
+
|
138
158
|
@property
|
139
159
|
def auth_type(self):
|
140
160
|
return self._auth_type or HTTPBasicAuth
|
@@ -158,9 +178,15 @@ class HttpHook(BaseHook):
|
|
158
178
|
session = Session()
|
159
179
|
connection = self.get_connection(self.http_conn_id)
|
160
180
|
self._set_base_url(connection)
|
161
|
-
session = self._configure_session_from_auth(session, connection)
|
162
|
-
|
181
|
+
session = self._configure_session_from_auth(session, connection) # type: ignore[arg-type]
|
182
|
+
|
183
|
+
# Since get_conn can be called outside of run, we'll check this again
|
184
|
+
extra_options = extra_options or {}
|
185
|
+
|
186
|
+
if connection.extra or extra_options:
|
187
|
+
# These are being passed from to _configure_session_from_extra, no manipulation has been done yet
|
163
188
|
session = self._configure_session_from_extra(session, connection, extra_options)
|
189
|
+
|
164
190
|
session = self._configure_session_from_mount_adapters(session)
|
165
191
|
if self.default_headers:
|
166
192
|
session.headers.update(self.default_headers)
|
@@ -168,7 +194,7 @@ class HttpHook(BaseHook):
|
|
168
194
|
session.headers.update(headers)
|
169
195
|
return session
|
170
196
|
|
171
|
-
def _set_base_url(self, connection
|
197
|
+
def _set_base_url(self, connection) -> None:
|
172
198
|
host = connection.host or self.default_host
|
173
199
|
schema = connection.schema or "http"
|
174
200
|
# RFC 3986 (https://www.rfc-editor.org/rfc/rfc3986.html#page-16)
|
@@ -194,21 +220,33 @@ class HttpHook(BaseHook):
|
|
194
220
|
return None
|
195
221
|
|
196
222
|
def _configure_session_from_extra(
|
197
|
-
self, session: Session, connection
|
223
|
+
self, session: Session, connection, extra_options: dict[str, Any]
|
198
224
|
) -> Session:
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
session
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
225
|
+
"""
|
226
|
+
Configure the session using both the extra field from the Connection and passed in extra_options.
|
227
|
+
|
228
|
+
:param session: (Session)
|
229
|
+
:param connection: HTTP Connection passed into Hook
|
230
|
+
:param extra_options: (dict)
|
231
|
+
:return: (Session)
|
232
|
+
"""
|
233
|
+
# This is going to update self.merged_extra, which will be used below
|
234
|
+
conn_extra_options, self.merged_extra = _process_extra_options_from_connection(
|
235
|
+
connection, extra_options
|
236
|
+
)
|
237
|
+
|
238
|
+
session.proxies = self.merged_extra.get("proxies", self.merged_extra.get("proxy", {}))
|
239
|
+
session.stream = self.merged_extra.get("stream", False)
|
240
|
+
session.verify = self.merged_extra.get("verify", self.merged_extra.get("verify_ssl", True))
|
241
|
+
session.cert = self.merged_extra.get("cert", None)
|
242
|
+
session.max_redirects = self.merged_extra.get("max_redirects", DEFAULT_REDIRECT_LIMIT)
|
243
|
+
session.trust_env = self.merged_extra.get("trust_env", True)
|
244
|
+
|
208
245
|
try:
|
209
|
-
session.headers.update(
|
246
|
+
session.headers.update(conn_extra_options)
|
210
247
|
except TypeError:
|
211
248
|
self.log.warning("Connection to %s has invalid extra field.", connection.host)
|
249
|
+
|
212
250
|
return session
|
213
251
|
|
214
252
|
def _configure_session_from_mount_adapters(self, session: Session) -> Session:
|
@@ -245,9 +283,7 @@ class HttpHook(BaseHook):
|
|
245
283
|
For example, ``run(json=obj)`` is passed as ``requests.Request(json=obj)``
|
246
284
|
"""
|
247
285
|
extra_options = extra_options or {}
|
248
|
-
|
249
|
-
session = self.get_conn(headers, extra_options)
|
250
|
-
|
286
|
+
session = self.get_conn(headers, extra_options) # This sets self.merged_extra, which is used later
|
251
287
|
url = self.url_from_endpoint(endpoint)
|
252
288
|
|
253
289
|
if self.method == "GET":
|
@@ -262,7 +298,9 @@ class HttpHook(BaseHook):
|
|
262
298
|
|
263
299
|
prepped_request = session.prepare_request(req)
|
264
300
|
self.log.debug("Sending '%s' to url: %s", self.method, url)
|
265
|
-
|
301
|
+
|
302
|
+
# This is referencing self.merged_extra, which is update by _process ...
|
303
|
+
return self.run_and_check(session, prepped_request, self.merged_extra)
|
266
304
|
|
267
305
|
def check_response(self, response: Response) -> None:
|
268
306
|
"""
|
@@ -294,14 +332,12 @@ class HttpHook(BaseHook):
|
|
294
332
|
i.e. ``{'check_response': False}`` to avoid checking raising exceptions on non 2XX
|
295
333
|
or 3XX status codes
|
296
334
|
"""
|
297
|
-
extra_options = extra_options or {}
|
298
|
-
|
299
335
|
settings = session.merge_environment_settings(
|
300
336
|
prepped_request.url,
|
301
|
-
proxies=
|
302
|
-
stream=
|
303
|
-
verify=
|
304
|
-
cert=
|
337
|
+
proxies=session.proxies,
|
338
|
+
stream=session.stream,
|
339
|
+
verify=session.verify,
|
340
|
+
cert=session.cert,
|
305
341
|
)
|
306
342
|
|
307
343
|
# Send the request.
|
@@ -439,10 +475,12 @@ class HttpAsyncHook(BaseHook):
|
|
439
475
|
if conn.login:
|
440
476
|
auth = self.auth_type(conn.login, conn.password)
|
441
477
|
if conn.extra:
|
442
|
-
|
478
|
+
conn_extra_options, extra_options = _process_extra_options_from_connection(
|
479
|
+
conn=conn, extra_options=extra_options
|
480
|
+
)
|
443
481
|
|
444
482
|
try:
|
445
|
-
_headers.update(
|
483
|
+
_headers.update(conn_extra_options)
|
446
484
|
except TypeError:
|
447
485
|
self.log.warning("Connection to %s has invalid extra field.", conn.host)
|
448
486
|
if headers:
|
@@ -19,16 +19,21 @@ from __future__ import annotations
|
|
19
19
|
|
20
20
|
import base64
|
21
21
|
import pickle
|
22
|
-
from collections.abc import Sequence
|
23
|
-
from typing import TYPE_CHECKING, Any
|
22
|
+
from collections.abc import Callable, Sequence
|
23
|
+
from typing import TYPE_CHECKING, Any
|
24
24
|
|
25
|
+
from aiohttp import BasicAuth
|
25
26
|
from requests import Response
|
26
27
|
|
27
28
|
from airflow.configuration import conf
|
28
29
|
from airflow.exceptions import AirflowException
|
29
|
-
|
30
|
-
|
31
|
-
from airflow.
|
30
|
+
|
31
|
+
try:
|
32
|
+
from airflow.sdk import BaseHook
|
33
|
+
except ImportError:
|
34
|
+
from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
|
35
|
+
from airflow.providers.http.triggers.http import HttpTrigger, serialize_auth_type
|
36
|
+
from airflow.providers.http.version_compat import BaseOperator
|
32
37
|
from airflow.utils.helpers import merge_dicts
|
33
38
|
|
34
39
|
if TYPE_CHECKING:
|
@@ -122,7 +127,7 @@ class HttpOperator(BaseOperator):
|
|
122
127
|
request_kwargs: dict[str, Any] | None = None,
|
123
128
|
http_conn_id: str = "http_default",
|
124
129
|
log_response: bool = False,
|
125
|
-
auth_type: type[AuthBase] | None = None,
|
130
|
+
auth_type: type[AuthBase] | type[BasicAuth] | None = None,
|
126
131
|
tcp_keep_alive: bool = True,
|
127
132
|
tcp_keep_alive_idle: int = 120,
|
128
133
|
tcp_keep_alive_count: int = 20,
|
@@ -221,7 +226,7 @@ class HttpOperator(BaseOperator):
|
|
221
226
|
self.defer(
|
222
227
|
trigger=HttpTrigger(
|
223
228
|
http_conn_id=self.http_conn_id,
|
224
|
-
auth_type=self.
|
229
|
+
auth_type=serialize_auth_type(self._resolve_auth_type()),
|
225
230
|
method=self.method,
|
226
231
|
endpoint=self.endpoint,
|
227
232
|
headers=self.headers,
|
@@ -231,6 +236,27 @@ class HttpOperator(BaseOperator):
|
|
231
236
|
method_name="execute_complete",
|
232
237
|
)
|
233
238
|
|
239
|
+
def _resolve_auth_type(self) -> type[AuthBase] | type[BasicAuth] | None:
|
240
|
+
"""
|
241
|
+
Resolve the authentication type for the HTTP request.
|
242
|
+
|
243
|
+
If auth_type is not explicitly set, attempt to infer it from the connection configuration.
|
244
|
+
For connections with login/password, default to BasicAuth.
|
245
|
+
|
246
|
+
:return: The resolved authentication type class, or None if no auth is needed.
|
247
|
+
"""
|
248
|
+
if self.auth_type is not None:
|
249
|
+
return self.auth_type
|
250
|
+
|
251
|
+
try:
|
252
|
+
conn = BaseHook.get_connection(self.http_conn_id)
|
253
|
+
if conn.login or conn.password:
|
254
|
+
return BasicAuth
|
255
|
+
except Exception as e:
|
256
|
+
self.log.warning("Failed to resolve auth type from connection: %s", e)
|
257
|
+
|
258
|
+
return None
|
259
|
+
|
234
260
|
def process_response(self, context: Context, response: Response | list[Response]) -> Any:
|
235
261
|
"""Process the response."""
|
236
262
|
from airflow.utils.operator_helpers import determine_kwargs
|
@@ -291,7 +317,7 @@ class HttpOperator(BaseOperator):
|
|
291
317
|
self.defer(
|
292
318
|
trigger=HttpTrigger(
|
293
319
|
http_conn_id=self.http_conn_id,
|
294
|
-
auth_type=self.
|
320
|
+
auth_type=serialize_auth_type(self._resolve_auth_type()),
|
295
321
|
method=self.method,
|
296
322
|
**self._merge_next_page_parameters(next_page_params),
|
297
323
|
),
|
@@ -17,20 +17,24 @@
|
|
17
17
|
# under the License.
|
18
18
|
from __future__ import annotations
|
19
19
|
|
20
|
-
from collections.abc import Sequence
|
20
|
+
from collections.abc import Callable, Sequence
|
21
21
|
from datetime import timedelta
|
22
|
-
from typing import TYPE_CHECKING, Any
|
22
|
+
from typing import TYPE_CHECKING, Any
|
23
23
|
|
24
24
|
from airflow.configuration import conf
|
25
25
|
from airflow.exceptions import AirflowException
|
26
26
|
from airflow.providers.http.hooks.http import HttpHook
|
27
27
|
from airflow.providers.http.triggers.http import HttpSensorTrigger
|
28
|
-
from airflow.
|
28
|
+
from airflow.providers.http.version_compat import AIRFLOW_V_3_0_PLUS, BaseSensorOperator
|
29
29
|
|
30
30
|
if TYPE_CHECKING:
|
31
31
|
try:
|
32
32
|
from airflow.sdk.definitions.context import Context
|
33
|
-
|
33
|
+
|
34
|
+
if AIRFLOW_V_3_0_PLUS:
|
35
|
+
from airflow.sdk import PokeReturnValue
|
36
|
+
else:
|
37
|
+
from airflow.sensors.base import PokeReturnValue # type: ignore[no-redef]
|
34
38
|
except ImportError:
|
35
39
|
# TODO: Remove once provider drops support for Airflow 2
|
36
40
|
from airflow.utils.context import Context
|
@@ -20,6 +20,7 @@ import asyncio
|
|
20
20
|
import base64
|
21
21
|
import pickle
|
22
22
|
from collections.abc import AsyncIterator
|
23
|
+
from importlib import import_module
|
23
24
|
from typing import TYPE_CHECKING, Any
|
24
25
|
|
25
26
|
import aiohttp
|
@@ -35,6 +36,21 @@ if TYPE_CHECKING:
|
|
35
36
|
from aiohttp.client_reqrep import ClientResponse
|
36
37
|
|
37
38
|
|
39
|
+
def serialize_auth_type(auth: str | type | None) -> str | None:
|
40
|
+
if auth is None:
|
41
|
+
return None
|
42
|
+
if isinstance(auth, str):
|
43
|
+
return auth
|
44
|
+
return f"{auth.__module__}.{auth.__qualname__}"
|
45
|
+
|
46
|
+
|
47
|
+
def deserialize_auth_type(path: str | None) -> type | None:
|
48
|
+
if path is None:
|
49
|
+
return None
|
50
|
+
module_path, cls_name = path.rsplit(".", 1)
|
51
|
+
return getattr(import_module(module_path), cls_name)
|
52
|
+
|
53
|
+
|
38
54
|
class HttpTrigger(BaseTrigger):
|
39
55
|
"""
|
40
56
|
HttpTrigger run on the trigger worker.
|
@@ -56,7 +72,7 @@ class HttpTrigger(BaseTrigger):
|
|
56
72
|
def __init__(
|
57
73
|
self,
|
58
74
|
http_conn_id: str = "http_default",
|
59
|
-
auth_type:
|
75
|
+
auth_type: str | None = None,
|
60
76
|
method: str = "POST",
|
61
77
|
endpoint: str | None = None,
|
62
78
|
headers: dict[str, str] | None = None,
|
@@ -66,7 +82,7 @@ class HttpTrigger(BaseTrigger):
|
|
66
82
|
super().__init__()
|
67
83
|
self.http_conn_id = http_conn_id
|
68
84
|
self.method = method
|
69
|
-
self.auth_type = auth_type
|
85
|
+
self.auth_type = deserialize_auth_type(auth_type)
|
70
86
|
self.endpoint = endpoint
|
71
87
|
self.headers = headers
|
72
88
|
self.data = data
|
@@ -79,7 +95,7 @@ class HttpTrigger(BaseTrigger):
|
|
79
95
|
{
|
80
96
|
"http_conn_id": self.http_conn_id,
|
81
97
|
"method": self.method,
|
82
|
-
"auth_type": self.auth_type,
|
98
|
+
"auth_type": serialize_auth_type(self.auth_type),
|
83
99
|
"endpoint": self.endpoint,
|
84
100
|
"headers": self.headers,
|
85
101
|
"data": self.data,
|
@@ -126,7 +142,7 @@ class HttpTrigger(BaseTrigger):
|
|
126
142
|
response.reason = str(client_response.reason)
|
127
143
|
cookies = RequestsCookieJar()
|
128
144
|
for k, v in client_response.cookies.items():
|
129
|
-
cookies.set(k, v)
|
145
|
+
cookies.set(k, str(v)) # Convert Morsel to string
|
130
146
|
response.cookies = cookies
|
131
147
|
return response
|
132
148
|
|