apache-airflow-providers-http 5.3.2rc1__py3-none-any.whl → 5.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "5.3.2"
32
+ __version__ = "5.6.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
- "2.10.0"
35
+ "2.11.0"
36
36
  ):
37
37
  raise RuntimeError(
38
- f"The package `apache-airflow-providers-http:{__version__}` needs Apache Airflow 2.10.0+"
38
+ f"The package `apache-airflow-providers-http:{__version__}` needs Apache Airflow 2.11.0+"
39
39
  )
@@ -53,6 +53,7 @@ def get_provider_info():
53
53
  "python-modules": ["airflow.providers.http.hooks.http"],
54
54
  }
55
55
  ],
56
+ "notifications": ["airflow.providers.http.notifications.HttpNotifier"],
56
57
  "triggers": [
57
58
  {
58
59
  "integration-name": "Hypertext Transfer Protocol (HTTP)",
@@ -19,7 +19,7 @@ from __future__ import annotations
19
19
 
20
20
  import copy
21
21
  from collections.abc import Callable
22
- from typing import TYPE_CHECKING, Any
22
+ from typing import TYPE_CHECKING, Any, cast
23
23
  from urllib.parse import urlparse
24
24
 
25
25
  import aiohttp
@@ -33,13 +33,9 @@ from requests.models import DEFAULT_REDIRECT_LIMIT
33
33
  from requests_toolbelt.adapters.socket_options import TCPKeepAliveAdapter
34
34
 
35
35
  from airflow.exceptions import AirflowException
36
+ from airflow.providers.common.compat.sdk import BaseHook
36
37
  from airflow.providers.http.exceptions import HttpErrorException, HttpMethodException
37
38
 
38
- try:
39
- from airflow.sdk import BaseHook
40
- except ImportError:
41
- from airflow.hooks.base import BaseHook as BaseHook # type: ignore
42
-
43
39
  if TYPE_CHECKING:
44
40
  from aiohttp.client_reqrep import ClientResponse
45
41
  from requests.adapters import HTTPAdapter
@@ -139,6 +135,7 @@ class HttpHook(BaseHook):
139
135
  self.http_conn_id = http_conn_id
140
136
  self.method = method.upper()
141
137
  self.base_url: str = ""
138
+ self._base_url_initialized: bool = False
142
139
  self._retry_obj: Callable[..., Any]
143
140
  self._auth_type: Any = auth_type
144
141
 
@@ -207,6 +204,7 @@ class HttpHook(BaseHook):
207
204
  parsed = urlparse(self.base_url)
208
205
  if not parsed.scheme:
209
206
  raise ValueError(f"Invalid base URL: Missing scheme in {self.base_url}")
207
+ self._base_url_initialized = True
210
208
 
211
209
  def _configure_session_from_auth(self, session: Session, connection: Connection) -> Session:
212
210
  session.auth = self._extract_auth(connection)
@@ -239,7 +237,7 @@ class HttpHook(BaseHook):
239
237
  session.stream = self.merged_extra.get("stream", False)
240
238
  session.verify = self.merged_extra.get("verify", self.merged_extra.get("verify_ssl", True))
241
239
  session.cert = self.merged_extra.get("cert", None)
242
- session.max_redirects = self.merged_extra.get("max_redirects", DEFAULT_REDIRECT_LIMIT)
240
+ session.max_redirects = cast("int", self.merged_extra.get("max_redirects", DEFAULT_REDIRECT_LIMIT))
243
241
  session.trust_env = self.merged_extra.get("trust_env", True)
244
242
 
245
243
  try:
@@ -383,10 +381,14 @@ class HttpHook(BaseHook):
383
381
  self._retry_obj = tenacity.Retrying(**_retry_args)
384
382
 
385
383
  # TODO: remove ignore type when https://github.com/jd/tenacity/issues/428 is resolved
386
- return self._retry_obj(self.run, *args, **kwargs) # type: ignore
384
+ return self._retry_obj(self.run, *args, **kwargs)
387
385
 
388
386
  def url_from_endpoint(self, endpoint: str | None) -> str:
389
387
  """Combine base url with endpoint."""
388
+ # Ensure base_url is set by initializing it if it hasn't been initialized yet
389
+ if not self._base_url_initialized and not self.base_url:
390
+ connection = self.get_connection(self.http_conn_id)
391
+ self._set_base_url(connection)
390
392
  return _url_from_endpoint(base_url=self.base_url, endpoint=endpoint)
391
393
 
392
394
  def test_connection(self):
@@ -0,0 +1,21 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+
19
+ from airflow.providers.http.notifications.http import HttpNotifier
20
+
21
+ __all__ = ["HttpNotifier"]
@@ -0,0 +1,105 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+
18
+ from __future__ import annotations
19
+
20
+ from functools import cached_property
21
+ from typing import TYPE_CHECKING, Any
22
+
23
+ import aiohttp
24
+
25
+ from airflow.providers.common.compat.notifier import BaseNotifier
26
+ from airflow.providers.http.hooks.http import HttpAsyncHook, HttpHook
27
+
28
+ if TYPE_CHECKING:
29
+ from airflow.sdk.definitions.context import Context
30
+
31
+
32
+ class HttpNotifier(BaseNotifier):
33
+ """
34
+ HTTP Notifier.
35
+
36
+ Sends HTTP requests to notify external systems.
37
+
38
+ :param http_conn_id: HTTP connection id that has the base URL and optional authentication credentials.
39
+ :param endpoint: The endpoint to be called i.e. resource/v1/query?
40
+ :param method: The HTTP method to use. Defaults to POST.
41
+ :param data: Payload to be uploaded or request parameters
42
+ :param json: JSON payload to be uploaded
43
+ :param headers: Additional headers to be passed through as a dictionary
44
+ :param extra_options: Additional options to be used when executing the request
45
+ """
46
+
47
+ template_fields = ("http_conn_id", "endpoint", "data", "json", "headers", "extra_options")
48
+
49
+ def __init__(
50
+ self,
51
+ *,
52
+ http_conn_id: str = HttpHook.default_conn_name,
53
+ endpoint: str | None = None,
54
+ method: str = "POST",
55
+ data: dict[str, Any] | str | None = None,
56
+ json: dict[str, Any] | str | None = None,
57
+ headers: dict[str, Any] | None = None,
58
+ extra_options: dict[str, Any] | None = None,
59
+ **kwargs,
60
+ ):
61
+ super().__init__(**kwargs)
62
+ self.http_conn_id = http_conn_id
63
+ self.endpoint = endpoint
64
+ self.method = method
65
+ self.data = data
66
+ self.json = json
67
+ self.headers = headers
68
+ self.extra_options = extra_options or {}
69
+
70
+ @cached_property
71
+ def hook(self) -> HttpHook:
72
+ """HTTP Hook."""
73
+ return HttpHook(method=self.method, http_conn_id=self.http_conn_id)
74
+
75
+ @cached_property
76
+ def async_hook(self) -> HttpAsyncHook:
77
+ """HTTP Async Hook."""
78
+ return HttpAsyncHook(method=self.method, http_conn_id=self.http_conn_id)
79
+
80
+ def notify(self, context: Context) -> None:
81
+ """Send HTTP notification (sync)."""
82
+ resp = self.hook.run(
83
+ endpoint=self.endpoint,
84
+ data=self.data,
85
+ headers=self.headers,
86
+ extra_options=self.extra_options,
87
+ json=self.json,
88
+ )
89
+ self.log.debug("HTTP notification sent: %s %s", resp.status_code, resp.url)
90
+
91
+ async def async_notify(self, context: Context) -> None:
92
+ """Send HTTP notification (async)."""
93
+ async with aiohttp.ClientSession() as session:
94
+ resp = await self.async_hook.run(
95
+ session=session,
96
+ endpoint=self.endpoint,
97
+ data=self.data,
98
+ json=self.json,
99
+ headers=self.headers,
100
+ extra_options=self.extra_options,
101
+ )
102
+ self.log.debug("HTTP notification sent (async): %s %s", resp.status, resp.url)
103
+
104
+
105
+ send_http_notification = HttpNotifier
@@ -27,13 +27,8 @@ from requests import Response
27
27
 
28
28
  from airflow.configuration import conf
29
29
  from airflow.exceptions import AirflowException
30
-
31
- try:
32
- from airflow.sdk import BaseHook
33
- except ImportError:
34
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
30
+ from airflow.providers.common.compat.sdk import BaseHook, BaseOperator
35
31
  from airflow.providers.http.triggers.http import HttpTrigger, serialize_auth_type
36
- from airflow.providers.http.version_compat import BaseOperator
37
32
  from airflow.utils.helpers import merge_dicts
38
33
 
39
34
  if TYPE_CHECKING:
@@ -23,11 +23,13 @@ from typing import TYPE_CHECKING, Any
23
23
 
24
24
  from airflow.configuration import conf
25
25
  from airflow.exceptions import AirflowException
26
+ from airflow.providers.common.compat.sdk import BaseSensorOperator
26
27
  from airflow.providers.http.hooks.http import HttpHook
27
28
  from airflow.providers.http.triggers.http import HttpSensorTrigger
28
- from airflow.providers.http.version_compat import AIRFLOW_V_3_0_PLUS, BaseSensorOperator
29
29
 
30
30
  if TYPE_CHECKING:
31
+ from airflow.providers.common.compat.version_compat import AIRFLOW_V_3_0_PLUS
32
+
31
33
  try:
32
34
  from airflow.sdk.definitions.context import Context
33
35
 
@@ -18,20 +18,30 @@ from __future__ import annotations
18
18
 
19
19
  import asyncio
20
20
  import base64
21
+ import importlib
22
+ import inspect
21
23
  import pickle
24
+ import sys
22
25
  from collections.abc import AsyncIterator
23
26
  from importlib import import_module
24
27
  from typing import TYPE_CHECKING, Any
25
28
 
26
29
  import aiohttp
27
30
  import requests
31
+ from asgiref.sync import sync_to_async
28
32
  from requests.cookies import RequestsCookieJar
29
33
  from requests.structures import CaseInsensitiveDict
30
34
 
31
35
  from airflow.exceptions import AirflowException
36
+ from airflow.providers.common.compat.version_compat import AIRFLOW_V_3_0_PLUS
32
37
  from airflow.providers.http.hooks.http import HttpAsyncHook
33
38
  from airflow.triggers.base import BaseTrigger, TriggerEvent
34
39
 
40
+ if AIRFLOW_V_3_0_PLUS:
41
+ from airflow.triggers.base import BaseEventTrigger
42
+ else:
43
+ from airflow.triggers.base import BaseTrigger as BaseEventTrigger # type: ignore
44
+
35
45
  if TYPE_CHECKING:
36
46
  from aiohttp.client_reqrep import ClientResponse
37
47
 
@@ -105,21 +115,9 @@ class HttpTrigger(BaseTrigger):
105
115
 
106
116
  async def run(self) -> AsyncIterator[TriggerEvent]:
107
117
  """Make a series of asynchronous http calls via a http hook."""
108
- hook = HttpAsyncHook(
109
- method=self.method,
110
- http_conn_id=self.http_conn_id,
111
- auth_type=self.auth_type,
112
- )
118
+ hook = self._get_async_hook()
113
119
  try:
114
- async with aiohttp.ClientSession() as session:
115
- client_response = await hook.run(
116
- session=session,
117
- endpoint=self.endpoint,
118
- data=self.data,
119
- headers=self.headers,
120
- extra_options=self.extra_options,
121
- )
122
- response = await self._convert_response(client_response)
120
+ response = await self._get_response(hook)
123
121
  yield TriggerEvent(
124
122
  {
125
123
  "status": "success",
@@ -129,6 +127,25 @@ class HttpTrigger(BaseTrigger):
129
127
  except Exception as e:
130
128
  yield TriggerEvent({"status": "error", "message": str(e)})
131
129
 
130
+ def _get_async_hook(self) -> HttpAsyncHook:
131
+ return HttpAsyncHook(
132
+ method=self.method,
133
+ http_conn_id=self.http_conn_id,
134
+ auth_type=self.auth_type,
135
+ )
136
+
137
+ async def _get_response(self, hook):
138
+ async with aiohttp.ClientSession() as session:
139
+ client_response = await hook.run(
140
+ session=session,
141
+ endpoint=self.endpoint,
142
+ data=self.data,
143
+ headers=self.headers,
144
+ extra_options=self.extra_options,
145
+ )
146
+ response = await self._convert_response(client_response)
147
+ return response
148
+
132
149
  @staticmethod
133
150
  async def _convert_response(client_response: ClientResponse) -> requests.Response:
134
151
  """Convert aiohttp.client_reqrep.ClientResponse to requests.Response."""
@@ -219,3 +236,89 @@ class HttpSensorTrigger(BaseTrigger):
219
236
  method=self.method,
220
237
  http_conn_id=self.http_conn_id,
221
238
  )
239
+
240
+
241
+ class HttpEventTrigger(HttpTrigger, BaseEventTrigger):
242
+ """
243
+ HttpEventTrigger for event-based DAG scheduling when the API response satisfies the response check.
244
+
245
+ :param response_check_path: Path to the function that evaluates whether the API response
246
+ passes the conditions set by the user to fire the trigger. The method must be asynchronous.
247
+ :param http_conn_id: http connection id that has the base
248
+ API url i.e https://www.google.com/ and optional authentication credentials. Default
249
+ headers can also be specified in the Extra field in json format.
250
+ :param auth_type: The auth type for the service
251
+ :param method: The API method to be called
252
+ :param endpoint: Endpoint to be called, i.e. ``resource/v1/query?``.
253
+ :param headers: Additional headers to be passed through as a dict.
254
+ :param data: Payload to be uploaded or request parameters.
255
+ :param extra_options: Additional kwargs to pass when creating a request.
256
+ :parama poll_interval: How often, in seconds, the trigger should send a request to the API.
257
+ """
258
+
259
+ def __init__(
260
+ self,
261
+ response_check_path: str,
262
+ http_conn_id: str = "http_default",
263
+ auth_type: Any = None,
264
+ method: str = "GET",
265
+ endpoint: str | None = None,
266
+ headers: dict[str, str] | None = None,
267
+ data: dict[str, Any] | str | None = None,
268
+ extra_options: dict[str, Any] | None = None,
269
+ poll_interval: float = 60.0,
270
+ ):
271
+ super().__init__(http_conn_id, auth_type, method, endpoint, headers, data, extra_options)
272
+ self.response_check_path = response_check_path
273
+ self.poll_interval = poll_interval
274
+
275
+ def serialize(self) -> tuple[str, dict[str, Any]]:
276
+ """Serialize HttpEventTrigger arguments and classpath."""
277
+ return (
278
+ self.__class__.__module__ + "." + self.__class__.__qualname__,
279
+ {
280
+ "http_conn_id": self.http_conn_id,
281
+ "method": self.method,
282
+ "auth_type": serialize_auth_type(self.auth_type),
283
+ "endpoint": self.endpoint,
284
+ "headers": self.headers,
285
+ "data": self.data,
286
+ "extra_options": self.extra_options,
287
+ "response_check_path": self.response_check_path,
288
+ "poll_interval": self.poll_interval,
289
+ },
290
+ )
291
+
292
+ async def run(self) -> AsyncIterator[TriggerEvent]:
293
+ """Make a series of asynchronous http calls via a http hook until the response passes the response check."""
294
+ hook = super()._get_async_hook()
295
+ try:
296
+ while True:
297
+ response = await super()._get_response(hook)
298
+ if await self._run_response_check(response):
299
+ break
300
+ await asyncio.sleep(self.poll_interval)
301
+ yield TriggerEvent(
302
+ {
303
+ "status": "success",
304
+ "response": base64.standard_b64encode(pickle.dumps(response)).decode("ascii"),
305
+ }
306
+ )
307
+ except Exception as e:
308
+ self.log.error("status: error, message: %s", str(e))
309
+
310
+ async def _import_from_response_check_path(self):
311
+ """Import the response check callable from the path provided by the user."""
312
+ module_path, func_name = self.response_check_path.rsplit(".", 1)
313
+ if module_path in sys.modules:
314
+ module = await sync_to_async(importlib.reload)(sys.modules[module_path])
315
+ module = await sync_to_async(importlib.import_module)(module_path)
316
+ return getattr(module, func_name)
317
+
318
+ async def _run_response_check(self, response) -> bool:
319
+ """Run the response_check callable provided by the user."""
320
+ response_check = await self._import_from_response_check_path()
321
+ if not inspect.iscoroutinefunction(response_check):
322
+ raise AirflowException("The response_check callable is not asynchronous.")
323
+ check = await response_check(response)
324
+ return check
@@ -33,11 +33,9 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
33
33
 
34
34
 
35
35
  AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
36
+ AIRFLOW_V_3_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 0)
36
37
 
37
- if AIRFLOW_V_3_0_PLUS:
38
- from airflow.sdk import BaseOperator, BaseSensorOperator
39
- else:
40
- from airflow.models import BaseOperator # type: ignore[no-redef]
41
- from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
42
-
43
- __all__ = ["AIRFLOW_V_3_0_PLUS", "BaseOperator", "BaseSensorOperator"]
38
+ __all__ = [
39
+ "AIRFLOW_V_3_0_PLUS",
40
+ "AIRFLOW_V_3_1_PLUS",
41
+ ]
@@ -1,12 +1,13 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-http
3
- Version: 5.3.2rc1
3
+ Version: 5.6.0
4
4
  Summary: Provider package apache-airflow-providers-http for Apache Airflow
5
5
  Keywords: airflow-provider,http,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
7
7
  Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
8
- Requires-Python: ~=3.10
8
+ Requires-Python: >=3.10
9
9
  Description-Content-Type: text/x-rst
10
+ License-Expression: Apache-2.0
10
11
  Classifier: Development Status :: 5 - Production/Stable
11
12
  Classifier: Environment :: Console
12
13
  Classifier: Environment :: Web Environment
@@ -14,23 +15,28 @@ Classifier: Intended Audience :: Developers
14
15
  Classifier: Intended Audience :: System Administrators
15
16
  Classifier: Framework :: Apache Airflow
16
17
  Classifier: Framework :: Apache Airflow :: Provider
17
- Classifier: License :: OSI Approved :: Apache Software License
18
18
  Classifier: Programming Language :: Python :: 3.10
19
19
  Classifier: Programming Language :: Python :: 3.11
20
20
  Classifier: Programming Language :: Python :: 3.12
21
+ Classifier: Programming Language :: Python :: 3.13
21
22
  Classifier: Topic :: System :: Monitoring
22
- Requires-Dist: apache-airflow>=2.10.0rc1
23
+ License-File: LICENSE
24
+ License-File: NOTICE
25
+ Requires-Dist: apache-airflow>=2.11.0
26
+ Requires-Dist: apache-airflow-providers-common-compat>=1.8.0
23
27
  Requires-Dist: requests>=2.32.0,<3
24
28
  Requires-Dist: requests-toolbelt>=1.0.0
25
- Requires-Dist: aiohttp>=3.9.2,!=3.11.0
29
+ Requires-Dist: aiohttp>=3.12.14
26
30
  Requires-Dist: asgiref>=2.3.0
31
+ Requires-Dist: apache-airflow-providers-common-compat ; extra == "common-compat"
27
32
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
28
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-http/5.3.2/changelog.html
29
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-http/5.3.2
33
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-http/5.6.0/changelog.html
34
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-http/5.6.0
30
35
  Project-URL: Mastodon, https://fosstodon.org/@airflow
31
36
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
32
37
  Project-URL: Source Code, https://github.com/apache/airflow
33
38
  Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
39
+ Provides-Extra: common-compat
34
40
 
35
41
 
36
42
  .. Licensed to the Apache Software Foundation (ASF) under one
@@ -57,7 +63,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
57
63
 
58
64
  Package ``apache-airflow-providers-http``
59
65
 
60
- Release: ``5.3.2``
66
+ Release: ``5.6.0``
61
67
 
62
68
 
63
69
  `Hypertext Transfer Protocol (HTTP) <https://www.w3.org/Protocols/>`__
@@ -70,30 +76,59 @@ This is a provider package for ``http`` provider. All classes for this provider
70
76
  are in ``airflow.providers.http`` python package.
71
77
 
72
78
  You can find package information and changelog for the provider
73
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-http/5.3.2/>`_.
79
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-http/5.6.0/>`_.
74
80
 
75
81
  Installation
76
82
  ------------
77
83
 
78
- You can install this package on top of an existing Airflow 2 installation (see ``Requirements`` below
84
+ You can install this package on top of an existing Airflow installation (see ``Requirements`` below
79
85
  for the minimum Airflow version supported) via
80
86
  ``pip install apache-airflow-providers-http``
81
87
 
82
- The package supports the following python versions: 3.10,3.11,3.12
88
+ The package supports the following python versions: 3.10,3.11,3.12,3.13
83
89
 
84
90
  Requirements
85
91
  ------------
86
92
 
87
- ===================== ====================
88
- PIP package Version required
89
- ===================== ====================
90
- ``apache-airflow`` ``>=2.10.0``
91
- ``requests`` ``>=2.32.0,<3``
92
- ``requests-toolbelt`` ``>=1.0.0``
93
- ``aiohttp`` ``!=3.11.0,>=3.9.2``
94
- ``asgiref`` ``>=2.3.0``
95
- ===================== ====================
93
+ ========================================== ==================
94
+ PIP package Version required
95
+ ========================================== ==================
96
+ ``apache-airflow`` ``>=2.11.0``
97
+ ``apache-airflow-providers-common-compat`` ``>=1.8.0``
98
+ ``requests`` ``>=2.32.0,<3``
99
+ ``requests-toolbelt`` ``>=1.0.0``
100
+ ``aiohttp`` ``>=3.12.14``
101
+ ``asgiref`` ``>=2.3.0``
102
+ ========================================== ==================
103
+
104
+ Cross provider package dependencies
105
+ -----------------------------------
106
+
107
+ Those are dependencies that might be needed in order to use all the features of the package.
108
+ You need to install the specified providers in order to use them.
109
+
110
+ You can install such cross-provider dependencies when installing from PyPI. For example:
111
+
112
+ .. code-block:: bash
113
+
114
+ pip install apache-airflow-providers-http[common.compat]
115
+
116
+
117
+ ================================================================================================================== =================
118
+ Dependent package Extra
119
+ ================================================================================================================== =================
120
+ `apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
121
+ ================================================================================================================== =================
122
+
123
+ Optional dependencies
124
+ ----------------------
125
+
126
+ ================= ==========================================
127
+ Extra Dependencies
128
+ ================= ==========================================
129
+ ``common.compat`` ``apache-airflow-providers-common-compat``
130
+ ================= ==========================================
96
131
 
97
132
  The changelog for the provider package can be found in the
98
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-http/5.3.2/changelog.html>`_.
133
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-http/5.6.0/changelog.html>`_.
99
134
 
@@ -0,0 +1,20 @@
1
+ airflow/providers/http/__init__.py,sha256=FFIsR5YTe6L9kNnKLO5pddxz4AE7JhjZGD7Bux52N_c,1493
2
+ airflow/providers/http/exceptions.py,sha256=WnIEj0cnAS746uRF1661tCEBc_Uuo0bMEIMrQyEb9nc,1084
3
+ airflow/providers/http/get_provider_info.py,sha256=5_umCW3AGydGe94R8WxDqVf0vvCH_Hmb5ixTg55BMAU,2737
4
+ airflow/providers/http/version_compat.py,sha256=A6a37mMJVpSRlvL7wAMj4VGbFao3-lnRXMgnU3F3nLE,1676
5
+ airflow/providers/http/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
6
+ airflow/providers/http/hooks/http.py,sha256=1g_HCFMRwHRBEjOLZg2hJsD5pT4tWZ5yl7Xk5M2Y3bU,22585
7
+ airflow/providers/http/notifications/__init__.py,sha256=ltUs93oQlBbs3jPsNsZTWvy3IfuyKDSgp2N56AiVTys,883
8
+ airflow/providers/http/notifications/http.py,sha256=WB16Qdb48QSLH96-mO7hRtNrVX2H5JuYwj477IbyvYE,3812
9
+ airflow/providers/http/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
10
+ airflow/providers/http/operators/http.py,sha256=fqyy6I5QRfDDVTyKKNBJiB3Eyf88cWcr-sxApi6XURs,15536
11
+ airflow/providers/http/sensors/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
12
+ airflow/providers/http/sensors/http.py,sha256=cGyUopqLtPjIhtzcLWzrTZka_6qCE4IuE9jhpHWrM0U,8563
13
+ airflow/providers/http/triggers/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
14
+ airflow/providers/http/triggers/http.py,sha256=mqEo-DoOXsy1jZcPztdHFNxB8eyHWFCp2xrd8FCuFwM,13024
15
+ apache_airflow_providers_http-5.6.0.dist-info/entry_points.txt,sha256=65Rk4MYlxxtwo7y7-uNv4KS7MfoBnILhMjRQmNbRo1Q,100
16
+ apache_airflow_providers_http-5.6.0.dist-info/licenses/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
17
+ apache_airflow_providers_http-5.6.0.dist-info/licenses/NOTICE,sha256=E3-_E02gwwSEFzeeWPKmnIjOoos3hW28CLISV6sYrbQ,168
18
+ apache_airflow_providers_http-5.6.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
19
+ apache_airflow_providers_http-5.6.0.dist-info/METADATA,sha256=f3Y0pnaGvt2y7He1VJj8VgKZkKY--APht-TFaYa-EW8,5910
20
+ apache_airflow_providers_http-5.6.0.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Apache Airflow
2
+ Copyright 2016-2025 The Apache Software Foundation
3
+
4
+ This product includes software developed at
5
+ The Apache Software Foundation (http://www.apache.org/).
@@ -1,17 +0,0 @@
1
- airflow/providers/http/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/http/__init__.py,sha256=tQvdnTJRYE9FvJRDKU1d8fpZh2E7o__GzUfBxSSZhLI,1493
3
- airflow/providers/http/exceptions.py,sha256=WnIEj0cnAS746uRF1661tCEBc_Uuo0bMEIMrQyEb9nc,1084
4
- airflow/providers/http/get_provider_info.py,sha256=eJrWksE0KVCB32BavHb64imCUZZIoWyJYnsBj8UsS9Y,2657
5
- airflow/providers/http/version_compat.py,sha256=qgCyO8WUWZqFbibL39BgFatIsjiDX2jJPh-WL1cm5-A,1851
6
- airflow/providers/http/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
7
- airflow/providers/http/hooks/http.py,sha256=ld0LpT6oJEDFpQKPMPG-E5eB06JaVP436YcYSvr8aDc,22311
8
- airflow/providers/http/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
9
- airflow/providers/http/operators/http.py,sha256=wzozFzdc_PxrvK601sm8Iwr3cXdWPWV6q5GU6PqEw6U,15674
10
- airflow/providers/http/sensors/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
11
- airflow/providers/http/sensors/http.py,sha256=4lIgV7-3AUy64tAv9N5lKiDtWYaqaVHG_cXkz_Gijq8,8502
12
- airflow/providers/http/triggers/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
13
- airflow/providers/http/triggers/http.py,sha256=DG4_20edkRT44grqB5Gyx8pD0ueYsTURpsBMRwO1SNQ,8501
14
- apache_airflow_providers_http-5.3.2rc1.dist-info/entry_points.txt,sha256=65Rk4MYlxxtwo7y7-uNv4KS7MfoBnILhMjRQmNbRo1Q,100
15
- apache_airflow_providers_http-5.3.2rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
16
- apache_airflow_providers_http-5.3.2rc1.dist-info/METADATA,sha256=3EoDeAwK8K6Lk5gGQ2qRP5F56oFnUCvV8uj-NPbsb6k,4074
17
- apache_airflow_providers_http-5.3.2rc1.dist-info/RECORD,,