apache-airflow-providers-http 5.3.3__py3-none-any.whl → 5.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "5.3.3"
32
+ __version__ = "5.6.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
- "2.10.0"
35
+ "2.11.0"
36
36
  ):
37
37
  raise RuntimeError(
38
- f"The package `apache-airflow-providers-http:{__version__}` needs Apache Airflow 2.10.0+"
38
+ f"The package `apache-airflow-providers-http:{__version__}` needs Apache Airflow 2.11.0+"
39
39
  )
@@ -53,6 +53,7 @@ def get_provider_info():
53
53
  "python-modules": ["airflow.providers.http.hooks.http"],
54
54
  }
55
55
  ],
56
+ "notifications": ["airflow.providers.http.notifications.HttpNotifier"],
56
57
  "triggers": [
57
58
  {
58
59
  "integration-name": "Hypertext Transfer Protocol (HTTP)",
@@ -33,8 +33,8 @@ from requests.models import DEFAULT_REDIRECT_LIMIT
33
33
  from requests_toolbelt.adapters.socket_options import TCPKeepAliveAdapter
34
34
 
35
35
  from airflow.exceptions import AirflowException
36
+ from airflow.providers.common.compat.sdk import BaseHook
36
37
  from airflow.providers.http.exceptions import HttpErrorException, HttpMethodException
37
- from airflow.providers.http.version_compat import BaseHook
38
38
 
39
39
  if TYPE_CHECKING:
40
40
  from aiohttp.client_reqrep import ClientResponse
@@ -135,6 +135,7 @@ class HttpHook(BaseHook):
135
135
  self.http_conn_id = http_conn_id
136
136
  self.method = method.upper()
137
137
  self.base_url: str = ""
138
+ self._base_url_initialized: bool = False
138
139
  self._retry_obj: Callable[..., Any]
139
140
  self._auth_type: Any = auth_type
140
141
 
@@ -203,6 +204,7 @@ class HttpHook(BaseHook):
203
204
  parsed = urlparse(self.base_url)
204
205
  if not parsed.scheme:
205
206
  raise ValueError(f"Invalid base URL: Missing scheme in {self.base_url}")
207
+ self._base_url_initialized = True
206
208
 
207
209
  def _configure_session_from_auth(self, session: Session, connection: Connection) -> Session:
208
210
  session.auth = self._extract_auth(connection)
@@ -383,6 +385,10 @@ class HttpHook(BaseHook):
383
385
 
384
386
  def url_from_endpoint(self, endpoint: str | None) -> str:
385
387
  """Combine base url with endpoint."""
388
+ # Ensure base_url is set by initializing it if it hasn't been initialized yet
389
+ if not self._base_url_initialized and not self.base_url:
390
+ connection = self.get_connection(self.http_conn_id)
391
+ self._set_base_url(connection)
386
392
  return _url_from_endpoint(base_url=self.base_url, endpoint=endpoint)
387
393
 
388
394
  def test_connection(self):
@@ -0,0 +1,21 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+
19
+ from airflow.providers.http.notifications.http import HttpNotifier
20
+
21
+ __all__ = ["HttpNotifier"]
@@ -0,0 +1,105 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+
18
+ from __future__ import annotations
19
+
20
+ from functools import cached_property
21
+ from typing import TYPE_CHECKING, Any
22
+
23
+ import aiohttp
24
+
25
+ from airflow.providers.common.compat.notifier import BaseNotifier
26
+ from airflow.providers.http.hooks.http import HttpAsyncHook, HttpHook
27
+
28
+ if TYPE_CHECKING:
29
+ from airflow.sdk.definitions.context import Context
30
+
31
+
32
+ class HttpNotifier(BaseNotifier):
33
+ """
34
+ HTTP Notifier.
35
+
36
+ Sends HTTP requests to notify external systems.
37
+
38
+ :param http_conn_id: HTTP connection id that has the base URL and optional authentication credentials.
39
+ :param endpoint: The endpoint to be called i.e. resource/v1/query?
40
+ :param method: The HTTP method to use. Defaults to POST.
41
+ :param data: Payload to be uploaded or request parameters
42
+ :param json: JSON payload to be uploaded
43
+ :param headers: Additional headers to be passed through as a dictionary
44
+ :param extra_options: Additional options to be used when executing the request
45
+ """
46
+
47
+ template_fields = ("http_conn_id", "endpoint", "data", "json", "headers", "extra_options")
48
+
49
+ def __init__(
50
+ self,
51
+ *,
52
+ http_conn_id: str = HttpHook.default_conn_name,
53
+ endpoint: str | None = None,
54
+ method: str = "POST",
55
+ data: dict[str, Any] | str | None = None,
56
+ json: dict[str, Any] | str | None = None,
57
+ headers: dict[str, Any] | None = None,
58
+ extra_options: dict[str, Any] | None = None,
59
+ **kwargs,
60
+ ):
61
+ super().__init__(**kwargs)
62
+ self.http_conn_id = http_conn_id
63
+ self.endpoint = endpoint
64
+ self.method = method
65
+ self.data = data
66
+ self.json = json
67
+ self.headers = headers
68
+ self.extra_options = extra_options or {}
69
+
70
+ @cached_property
71
+ def hook(self) -> HttpHook:
72
+ """HTTP Hook."""
73
+ return HttpHook(method=self.method, http_conn_id=self.http_conn_id)
74
+
75
+ @cached_property
76
+ def async_hook(self) -> HttpAsyncHook:
77
+ """HTTP Async Hook."""
78
+ return HttpAsyncHook(method=self.method, http_conn_id=self.http_conn_id)
79
+
80
+ def notify(self, context: Context) -> None:
81
+ """Send HTTP notification (sync)."""
82
+ resp = self.hook.run(
83
+ endpoint=self.endpoint,
84
+ data=self.data,
85
+ headers=self.headers,
86
+ extra_options=self.extra_options,
87
+ json=self.json,
88
+ )
89
+ self.log.debug("HTTP notification sent: %s %s", resp.status_code, resp.url)
90
+
91
+ async def async_notify(self, context: Context) -> None:
92
+ """Send HTTP notification (async)."""
93
+ async with aiohttp.ClientSession() as session:
94
+ resp = await self.async_hook.run(
95
+ session=session,
96
+ endpoint=self.endpoint,
97
+ data=self.data,
98
+ json=self.json,
99
+ headers=self.headers,
100
+ extra_options=self.extra_options,
101
+ )
102
+ self.log.debug("HTTP notification sent (async): %s %s", resp.status, resp.url)
103
+
104
+
105
+ send_http_notification = HttpNotifier
@@ -27,8 +27,8 @@ from requests import Response
27
27
 
28
28
  from airflow.configuration import conf
29
29
  from airflow.exceptions import AirflowException
30
+ from airflow.providers.common.compat.sdk import BaseHook, BaseOperator
30
31
  from airflow.providers.http.triggers.http import HttpTrigger, serialize_auth_type
31
- from airflow.providers.http.version_compat import BaseHook, BaseOperator
32
32
  from airflow.utils.helpers import merge_dicts
33
33
 
34
34
  if TYPE_CHECKING:
@@ -23,11 +23,13 @@ from typing import TYPE_CHECKING, Any
23
23
 
24
24
  from airflow.configuration import conf
25
25
  from airflow.exceptions import AirflowException
26
+ from airflow.providers.common.compat.sdk import BaseSensorOperator
26
27
  from airflow.providers.http.hooks.http import HttpHook
27
28
  from airflow.providers.http.triggers.http import HttpSensorTrigger
28
- from airflow.providers.http.version_compat import AIRFLOW_V_3_0_PLUS, BaseSensorOperator
29
29
 
30
30
  if TYPE_CHECKING:
31
+ from airflow.providers.common.compat.version_compat import AIRFLOW_V_3_0_PLUS
32
+
31
33
  try:
32
34
  from airflow.sdk.definitions.context import Context
33
35
 
@@ -18,20 +18,30 @@ from __future__ import annotations
18
18
 
19
19
  import asyncio
20
20
  import base64
21
+ import importlib
22
+ import inspect
21
23
  import pickle
24
+ import sys
22
25
  from collections.abc import AsyncIterator
23
26
  from importlib import import_module
24
27
  from typing import TYPE_CHECKING, Any
25
28
 
26
29
  import aiohttp
27
30
  import requests
31
+ from asgiref.sync import sync_to_async
28
32
  from requests.cookies import RequestsCookieJar
29
33
  from requests.structures import CaseInsensitiveDict
30
34
 
31
35
  from airflow.exceptions import AirflowException
36
+ from airflow.providers.common.compat.version_compat import AIRFLOW_V_3_0_PLUS
32
37
  from airflow.providers.http.hooks.http import HttpAsyncHook
33
38
  from airflow.triggers.base import BaseTrigger, TriggerEvent
34
39
 
40
+ if AIRFLOW_V_3_0_PLUS:
41
+ from airflow.triggers.base import BaseEventTrigger
42
+ else:
43
+ from airflow.triggers.base import BaseTrigger as BaseEventTrigger # type: ignore
44
+
35
45
  if TYPE_CHECKING:
36
46
  from aiohttp.client_reqrep import ClientResponse
37
47
 
@@ -105,21 +115,9 @@ class HttpTrigger(BaseTrigger):
105
115
 
106
116
  async def run(self) -> AsyncIterator[TriggerEvent]:
107
117
  """Make a series of asynchronous http calls via a http hook."""
108
- hook = HttpAsyncHook(
109
- method=self.method,
110
- http_conn_id=self.http_conn_id,
111
- auth_type=self.auth_type,
112
- )
118
+ hook = self._get_async_hook()
113
119
  try:
114
- async with aiohttp.ClientSession() as session:
115
- client_response = await hook.run(
116
- session=session,
117
- endpoint=self.endpoint,
118
- data=self.data,
119
- headers=self.headers,
120
- extra_options=self.extra_options,
121
- )
122
- response = await self._convert_response(client_response)
120
+ response = await self._get_response(hook)
123
121
  yield TriggerEvent(
124
122
  {
125
123
  "status": "success",
@@ -129,6 +127,25 @@ class HttpTrigger(BaseTrigger):
129
127
  except Exception as e:
130
128
  yield TriggerEvent({"status": "error", "message": str(e)})
131
129
 
130
+ def _get_async_hook(self) -> HttpAsyncHook:
131
+ return HttpAsyncHook(
132
+ method=self.method,
133
+ http_conn_id=self.http_conn_id,
134
+ auth_type=self.auth_type,
135
+ )
136
+
137
+ async def _get_response(self, hook):
138
+ async with aiohttp.ClientSession() as session:
139
+ client_response = await hook.run(
140
+ session=session,
141
+ endpoint=self.endpoint,
142
+ data=self.data,
143
+ headers=self.headers,
144
+ extra_options=self.extra_options,
145
+ )
146
+ response = await self._convert_response(client_response)
147
+ return response
148
+
132
149
  @staticmethod
133
150
  async def _convert_response(client_response: ClientResponse) -> requests.Response:
134
151
  """Convert aiohttp.client_reqrep.ClientResponse to requests.Response."""
@@ -219,3 +236,89 @@ class HttpSensorTrigger(BaseTrigger):
219
236
  method=self.method,
220
237
  http_conn_id=self.http_conn_id,
221
238
  )
239
+
240
+
241
+ class HttpEventTrigger(HttpTrigger, BaseEventTrigger):
242
+ """
243
+ HttpEventTrigger for event-based DAG scheduling when the API response satisfies the response check.
244
+
245
+ :param response_check_path: Path to the function that evaluates whether the API response
246
+ passes the conditions set by the user to fire the trigger. The method must be asynchronous.
247
+ :param http_conn_id: http connection id that has the base
248
+ API url i.e https://www.google.com/ and optional authentication credentials. Default
249
+ headers can also be specified in the Extra field in json format.
250
+ :param auth_type: The auth type for the service
251
+ :param method: The API method to be called
252
+ :param endpoint: Endpoint to be called, i.e. ``resource/v1/query?``.
253
+ :param headers: Additional headers to be passed through as a dict.
254
+ :param data: Payload to be uploaded or request parameters.
255
+ :param extra_options: Additional kwargs to pass when creating a request.
256
+ :parama poll_interval: How often, in seconds, the trigger should send a request to the API.
257
+ """
258
+
259
+ def __init__(
260
+ self,
261
+ response_check_path: str,
262
+ http_conn_id: str = "http_default",
263
+ auth_type: Any = None,
264
+ method: str = "GET",
265
+ endpoint: str | None = None,
266
+ headers: dict[str, str] | None = None,
267
+ data: dict[str, Any] | str | None = None,
268
+ extra_options: dict[str, Any] | None = None,
269
+ poll_interval: float = 60.0,
270
+ ):
271
+ super().__init__(http_conn_id, auth_type, method, endpoint, headers, data, extra_options)
272
+ self.response_check_path = response_check_path
273
+ self.poll_interval = poll_interval
274
+
275
+ def serialize(self) -> tuple[str, dict[str, Any]]:
276
+ """Serialize HttpEventTrigger arguments and classpath."""
277
+ return (
278
+ self.__class__.__module__ + "." + self.__class__.__qualname__,
279
+ {
280
+ "http_conn_id": self.http_conn_id,
281
+ "method": self.method,
282
+ "auth_type": serialize_auth_type(self.auth_type),
283
+ "endpoint": self.endpoint,
284
+ "headers": self.headers,
285
+ "data": self.data,
286
+ "extra_options": self.extra_options,
287
+ "response_check_path": self.response_check_path,
288
+ "poll_interval": self.poll_interval,
289
+ },
290
+ )
291
+
292
+ async def run(self) -> AsyncIterator[TriggerEvent]:
293
+ """Make a series of asynchronous http calls via a http hook until the response passes the response check."""
294
+ hook = super()._get_async_hook()
295
+ try:
296
+ while True:
297
+ response = await super()._get_response(hook)
298
+ if await self._run_response_check(response):
299
+ break
300
+ await asyncio.sleep(self.poll_interval)
301
+ yield TriggerEvent(
302
+ {
303
+ "status": "success",
304
+ "response": base64.standard_b64encode(pickle.dumps(response)).decode("ascii"),
305
+ }
306
+ )
307
+ except Exception as e:
308
+ self.log.error("status: error, message: %s", str(e))
309
+
310
+ async def _import_from_response_check_path(self):
311
+ """Import the response check callable from the path provided by the user."""
312
+ module_path, func_name = self.response_check_path.rsplit(".", 1)
313
+ if module_path in sys.modules:
314
+ module = await sync_to_async(importlib.reload)(sys.modules[module_path])
315
+ module = await sync_to_async(importlib.import_module)(module_path)
316
+ return getattr(module, func_name)
317
+
318
+ async def _run_response_check(self, response) -> bool:
319
+ """Run the response_check callable provided by the user."""
320
+ response_check = await self._import_from_response_check_path()
321
+ if not inspect.iscoroutinefunction(response_check):
322
+ raise AirflowException("The response_check callable is not asynchronous.")
323
+ check = await response_check(response)
324
+ return check
@@ -35,15 +35,7 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
35
35
  AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
36
36
  AIRFLOW_V_3_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 0)
37
37
 
38
- if AIRFLOW_V_3_1_PLUS:
39
- from airflow.sdk import BaseHook
40
- else:
41
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
42
-
43
- if AIRFLOW_V_3_0_PLUS:
44
- from airflow.sdk import BaseOperator, BaseSensorOperator
45
- else:
46
- from airflow.models import BaseOperator
47
- from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
48
-
49
- __all__ = ["AIRFLOW_V_3_0_PLUS", "AIRFLOW_V_3_1_PLUS", "BaseHook", "BaseOperator", "BaseSensorOperator"]
38
+ __all__ = [
39
+ "AIRFLOW_V_3_0_PLUS",
40
+ "AIRFLOW_V_3_1_PLUS",
41
+ ]
@@ -1,12 +1,13 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-http
3
- Version: 5.3.3
3
+ Version: 5.6.0
4
4
  Summary: Provider package apache-airflow-providers-http for Apache Airflow
5
5
  Keywords: airflow-provider,http,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
7
7
  Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
8
8
  Requires-Python: >=3.10
9
9
  Description-Content-Type: text/x-rst
10
+ License-Expression: Apache-2.0
10
11
  Classifier: Development Status :: 5 - Production/Stable
11
12
  Classifier: Environment :: Console
12
13
  Classifier: Environment :: Web Environment
@@ -14,24 +15,28 @@ Classifier: Intended Audience :: Developers
14
15
  Classifier: Intended Audience :: System Administrators
15
16
  Classifier: Framework :: Apache Airflow
16
17
  Classifier: Framework :: Apache Airflow :: Provider
17
- Classifier: License :: OSI Approved :: Apache Software License
18
18
  Classifier: Programming Language :: Python :: 3.10
19
19
  Classifier: Programming Language :: Python :: 3.11
20
20
  Classifier: Programming Language :: Python :: 3.12
21
21
  Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.10.0
23
+ License-File: LICENSE
24
+ License-File: NOTICE
25
+ Requires-Dist: apache-airflow>=2.11.0
26
+ Requires-Dist: apache-airflow-providers-common-compat>=1.8.0
24
27
  Requires-Dist: requests>=2.32.0,<3
25
28
  Requires-Dist: requests-toolbelt>=1.0.0
26
29
  Requires-Dist: aiohttp>=3.12.14
27
30
  Requires-Dist: asgiref>=2.3.0
31
+ Requires-Dist: apache-airflow-providers-common-compat ; extra == "common-compat"
28
32
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
29
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-http/5.3.3/changelog.html
30
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-http/5.3.3
33
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-http/5.6.0/changelog.html
34
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-http/5.6.0
31
35
  Project-URL: Mastodon, https://fosstodon.org/@airflow
32
36
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
33
37
  Project-URL: Source Code, https://github.com/apache/airflow
34
38
  Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
39
+ Provides-Extra: common-compat
35
40
 
36
41
 
37
42
  .. Licensed to the Apache Software Foundation (ASF) under one
@@ -58,9 +63,8 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
58
63
 
59
64
  Package ``apache-airflow-providers-http``
60
65
 
61
- Release: ``5.3.3``
66
+ Release: ``5.6.0``
62
67
 
63
- Release Date: ``|PypiReleaseDate|``
64
68
 
65
69
  `Hypertext Transfer Protocol (HTTP) <https://www.w3.org/Protocols/>`__
66
70
 
@@ -72,12 +76,12 @@ This is a provider package for ``http`` provider. All classes for this provider
72
76
  are in ``airflow.providers.http`` python package.
73
77
 
74
78
  You can find package information and changelog for the provider
75
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-http/5.3.3/>`_.
79
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-http/5.6.0/>`_.
76
80
 
77
81
  Installation
78
82
  ------------
79
83
 
80
- You can install this package on top of an existing Airflow 2 installation (see ``Requirements`` below
84
+ You can install this package on top of an existing Airflow installation (see ``Requirements`` below
81
85
  for the minimum Airflow version supported) via
82
86
  ``pip install apache-airflow-providers-http``
83
87
 
@@ -86,16 +90,45 @@ The package supports the following python versions: 3.10,3.11,3.12,3.13
86
90
  Requirements
87
91
  ------------
88
92
 
89
- ===================== ====================
90
- PIP package Version required
91
- ===================== ====================
92
- ``apache-airflow`` ``>=2.10.0``
93
- ``requests`` ``>=2.32.0,<3``
94
- ``requests-toolbelt`` ``>=1.0.0``
95
- ``aiohttp`` ``!=3.11.0,>=3.9.2``
96
- ``asgiref`` ``>=2.3.0``
97
- ===================== ====================
93
+ ========================================== ==================
94
+ PIP package Version required
95
+ ========================================== ==================
96
+ ``apache-airflow`` ``>=2.11.0``
97
+ ``apache-airflow-providers-common-compat`` ``>=1.8.0``
98
+ ``requests`` ``>=2.32.0,<3``
99
+ ``requests-toolbelt`` ``>=1.0.0``
100
+ ``aiohttp`` ``>=3.12.14``
101
+ ``asgiref`` ``>=2.3.0``
102
+ ========================================== ==================
103
+
104
+ Cross provider package dependencies
105
+ -----------------------------------
106
+
107
+ Those are dependencies that might be needed in order to use all the features of the package.
108
+ You need to install the specified providers in order to use them.
109
+
110
+ You can install such cross-provider dependencies when installing from PyPI. For example:
111
+
112
+ .. code-block:: bash
113
+
114
+ pip install apache-airflow-providers-http[common.compat]
115
+
116
+
117
+ ================================================================================================================== =================
118
+ Dependent package Extra
119
+ ================================================================================================================== =================
120
+ `apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
121
+ ================================================================================================================== =================
122
+
123
+ Optional dependencies
124
+ ----------------------
125
+
126
+ ================= ==========================================
127
+ Extra Dependencies
128
+ ================= ==========================================
129
+ ``common.compat`` ``apache-airflow-providers-common-compat``
130
+ ================= ==========================================
98
131
 
99
132
  The changelog for the provider package can be found in the
100
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-http/5.3.3/changelog.html>`_.
133
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-http/5.6.0/changelog.html>`_.
101
134
 
@@ -0,0 +1,20 @@
1
+ airflow/providers/http/__init__.py,sha256=FFIsR5YTe6L9kNnKLO5pddxz4AE7JhjZGD7Bux52N_c,1493
2
+ airflow/providers/http/exceptions.py,sha256=WnIEj0cnAS746uRF1661tCEBc_Uuo0bMEIMrQyEb9nc,1084
3
+ airflow/providers/http/get_provider_info.py,sha256=5_umCW3AGydGe94R8WxDqVf0vvCH_Hmb5ixTg55BMAU,2737
4
+ airflow/providers/http/version_compat.py,sha256=A6a37mMJVpSRlvL7wAMj4VGbFao3-lnRXMgnU3F3nLE,1676
5
+ airflow/providers/http/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
6
+ airflow/providers/http/hooks/http.py,sha256=1g_HCFMRwHRBEjOLZg2hJsD5pT4tWZ5yl7Xk5M2Y3bU,22585
7
+ airflow/providers/http/notifications/__init__.py,sha256=ltUs93oQlBbs3jPsNsZTWvy3IfuyKDSgp2N56AiVTys,883
8
+ airflow/providers/http/notifications/http.py,sha256=WB16Qdb48QSLH96-mO7hRtNrVX2H5JuYwj477IbyvYE,3812
9
+ airflow/providers/http/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
10
+ airflow/providers/http/operators/http.py,sha256=fqyy6I5QRfDDVTyKKNBJiB3Eyf88cWcr-sxApi6XURs,15536
11
+ airflow/providers/http/sensors/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
12
+ airflow/providers/http/sensors/http.py,sha256=cGyUopqLtPjIhtzcLWzrTZka_6qCE4IuE9jhpHWrM0U,8563
13
+ airflow/providers/http/triggers/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
14
+ airflow/providers/http/triggers/http.py,sha256=mqEo-DoOXsy1jZcPztdHFNxB8eyHWFCp2xrd8FCuFwM,13024
15
+ apache_airflow_providers_http-5.6.0.dist-info/entry_points.txt,sha256=65Rk4MYlxxtwo7y7-uNv4KS7MfoBnILhMjRQmNbRo1Q,100
16
+ apache_airflow_providers_http-5.6.0.dist-info/licenses/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
17
+ apache_airflow_providers_http-5.6.0.dist-info/licenses/NOTICE,sha256=E3-_E02gwwSEFzeeWPKmnIjOoos3hW28CLISV6sYrbQ,168
18
+ apache_airflow_providers_http-5.6.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
19
+ apache_airflow_providers_http-5.6.0.dist-info/METADATA,sha256=f3Y0pnaGvt2y7He1VJj8VgKZkKY--APht-TFaYa-EW8,5910
20
+ apache_airflow_providers_http-5.6.0.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Apache Airflow
2
+ Copyright 2016-2025 The Apache Software Foundation
3
+
4
+ This product includes software developed at
5
+ The Apache Software Foundation (http://www.apache.org/).
@@ -1,17 +0,0 @@
1
- airflow/providers/http/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/http/__init__.py,sha256=2aIGbvz6ZokFMtXtYVuAoctQVYj4hz6VZZnYn1SIYZg,1493
3
- airflow/providers/http/exceptions.py,sha256=WnIEj0cnAS746uRF1661tCEBc_Uuo0bMEIMrQyEb9nc,1084
4
- airflow/providers/http/get_provider_info.py,sha256=eJrWksE0KVCB32BavHb64imCUZZIoWyJYnsBj8UsS9Y,2657
5
- airflow/providers/http/version_compat.py,sha256=MGhWZkvkF4S7MI6wOQmQrUCKHJ2vqDe1WjSkQNyAtRU,2082
6
- airflow/providers/http/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
7
- airflow/providers/http/hooks/http.py,sha256=MI1Y9VQKJYgSLlhm_s-pP3fjzahyJjGHCPi_vdbZ-3U,22238
8
- airflow/providers/http/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
9
- airflow/providers/http/operators/http.py,sha256=KzviUKRlf0-Omam9bjQuguWhvHmQTtlZz50kc1Z-95A,15538
10
- airflow/providers/http/sensors/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
11
- airflow/providers/http/sensors/http.py,sha256=4lIgV7-3AUy64tAv9N5lKiDtWYaqaVHG_cXkz_Gijq8,8502
12
- airflow/providers/http/triggers/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
13
- airflow/providers/http/triggers/http.py,sha256=DG4_20edkRT44grqB5Gyx8pD0ueYsTURpsBMRwO1SNQ,8501
14
- apache_airflow_providers_http-5.3.3.dist-info/entry_points.txt,sha256=65Rk4MYlxxtwo7y7-uNv4KS7MfoBnILhMjRQmNbRo1Q,100
15
- apache_airflow_providers_http-5.3.3.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
16
- apache_airflow_providers_http-5.3.3.dist-info/METADATA,sha256=yD0RDQjHq1TsmgE-dl4qgyMOeNZCyCFU5bn5zgIm_QA,4139
17
- apache_airflow_providers_http-5.3.3.dist-info/RECORD,,