apache-airflow-providers-http 4.13.3__tar.gz → 5.0.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (14) hide show
  1. {apache_airflow_providers_http-4.13.3 → apache_airflow_providers_http-5.0.0}/PKG-INFO +14 -14
  2. {apache_airflow_providers_http-4.13.3 → apache_airflow_providers_http-5.0.0}/README.rst +8 -8
  3. {apache_airflow_providers_http-4.13.3 → apache_airflow_providers_http-5.0.0}/airflow/providers/http/__init__.py +3 -3
  4. {apache_airflow_providers_http-4.13.3 → apache_airflow_providers_http-5.0.0}/airflow/providers/http/get_provider_info.py +4 -3
  5. {apache_airflow_providers_http-4.13.3 → apache_airflow_providers_http-5.0.0}/airflow/providers/http/hooks/http.py +80 -46
  6. {apache_airflow_providers_http-4.13.3 → apache_airflow_providers_http-5.0.0}/airflow/providers/http/operators/http.py +3 -67
  7. {apache_airflow_providers_http-4.13.3 → apache_airflow_providers_http-5.0.0}/airflow/providers/http/sensors/http.py +3 -2
  8. {apache_airflow_providers_http-4.13.3 → apache_airflow_providers_http-5.0.0}/airflow/providers/http/triggers/http.py +2 -1
  9. {apache_airflow_providers_http-4.13.3 → apache_airflow_providers_http-5.0.0}/pyproject.toml +6 -6
  10. {apache_airflow_providers_http-4.13.3 → apache_airflow_providers_http-5.0.0}/airflow/providers/http/LICENSE +0 -0
  11. {apache_airflow_providers_http-4.13.3 → apache_airflow_providers_http-5.0.0}/airflow/providers/http/hooks/__init__.py +0 -0
  12. {apache_airflow_providers_http-4.13.3 → apache_airflow_providers_http-5.0.0}/airflow/providers/http/operators/__init__.py +0 -0
  13. {apache_airflow_providers_http-4.13.3 → apache_airflow_providers_http-5.0.0}/airflow/providers/http/sensors/__init__.py +0 -0
  14. {apache_airflow_providers_http-4.13.3 → apache_airflow_providers_http-5.0.0}/airflow/providers/http/triggers/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: apache-airflow-providers-http
3
- Version: 4.13.3
3
+ Version: 5.0.0
4
4
  Summary: Provider package apache-airflow-providers-http for Apache Airflow
5
5
  Keywords: airflow-provider,http,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,17 +20,17 @@ Classifier: Programming Language :: Python :: 3.10
20
20
  Classifier: Programming Language :: Python :: 3.11
21
21
  Classifier: Programming Language :: Python :: 3.12
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: aiohttp>=3.9.2,<3.11.0
24
- Requires-Dist: apache-airflow>=2.8.0
23
+ Requires-Dist: aiohttp>=3.9.2,!=3.11.0
24
+ Requires-Dist: apache-airflow>=2.9.0
25
25
  Requires-Dist: asgiref>=2.3.0
26
26
  Requires-Dist: requests-toolbelt>=0.4.0
27
27
  Requires-Dist: requests>=2.27.0,<3
28
28
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
29
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-http/4.13.3/changelog.html
30
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-http/4.13.3
29
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-http/5.0.0/changelog.html
30
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-http/5.0.0
31
31
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
32
32
  Project-URL: Source Code, https://github.com/apache/airflow
33
- Project-URL: Twitter, https://twitter.com/ApacheAirflow
33
+ Project-URL: Twitter, https://x.com/ApacheAirflow
34
34
  Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
35
35
 
36
36
 
@@ -77,7 +77,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
77
77
 
78
78
  Package ``apache-airflow-providers-http``
79
79
 
80
- Release: ``4.13.3``
80
+ Release: ``5.0.0``
81
81
 
82
82
 
83
83
  `Hypertext Transfer Protocol (HTTP) <https://www.w3.org/Protocols/>`__
@@ -90,7 +90,7 @@ This is a provider package for ``http`` provider. All classes for this provider
90
90
  are in ``airflow.providers.http`` python package.
91
91
 
92
92
  You can find package information and changelog for the provider
93
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-http/4.13.3/>`_.
93
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-http/5.0.0/>`_.
94
94
 
95
95
  Installation
96
96
  ------------
@@ -104,15 +104,15 @@ The package supports the following python versions: 3.9,3.10,3.11,3.12
104
104
  Requirements
105
105
  ------------
106
106
 
107
- ===================== ===================
107
+ ===================== ====================
108
108
  PIP package Version required
109
- ===================== ===================
110
- ``apache-airflow`` ``>=2.8.0``
109
+ ===================== ====================
110
+ ``apache-airflow`` ``>=2.9.0``
111
111
  ``requests`` ``>=2.27.0,<3``
112
112
  ``requests-toolbelt`` ``>=0.4.0``
113
- ``aiohttp`` ``<3.11.0,>=3.9.2``
113
+ ``aiohttp`` ``!=3.11.0,>=3.9.2``
114
114
  ``asgiref`` ``>=2.3.0``
115
- ===================== ===================
115
+ ===================== ====================
116
116
 
117
117
  The changelog for the provider package can be found in the
118
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-http/4.13.3/changelog.html>`_.
118
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-http/5.0.0/changelog.html>`_.
@@ -42,7 +42,7 @@
42
42
 
43
43
  Package ``apache-airflow-providers-http``
44
44
 
45
- Release: ``4.13.3``
45
+ Release: ``5.0.0``
46
46
 
47
47
 
48
48
  `Hypertext Transfer Protocol (HTTP) <https://www.w3.org/Protocols/>`__
@@ -55,7 +55,7 @@ This is a provider package for ``http`` provider. All classes for this provider
55
55
  are in ``airflow.providers.http`` python package.
56
56
 
57
57
  You can find package information and changelog for the provider
58
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-http/4.13.3/>`_.
58
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-http/5.0.0/>`_.
59
59
 
60
60
  Installation
61
61
  ------------
@@ -69,15 +69,15 @@ The package supports the following python versions: 3.9,3.10,3.11,3.12
69
69
  Requirements
70
70
  ------------
71
71
 
72
- ===================== ===================
72
+ ===================== ====================
73
73
  PIP package Version required
74
- ===================== ===================
75
- ``apache-airflow`` ``>=2.8.0``
74
+ ===================== ====================
75
+ ``apache-airflow`` ``>=2.9.0``
76
76
  ``requests`` ``>=2.27.0,<3``
77
77
  ``requests-toolbelt`` ``>=0.4.0``
78
- ``aiohttp`` ``<3.11.0,>=3.9.2``
78
+ ``aiohttp`` ``!=3.11.0,>=3.9.2``
79
79
  ``asgiref`` ``>=2.3.0``
80
- ===================== ===================
80
+ ===================== ====================
81
81
 
82
82
  The changelog for the provider package can be found in the
83
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-http/4.13.3/changelog.html>`_.
83
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-http/5.0.0/changelog.html>`_.
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "4.13.3"
32
+ __version__ = "5.0.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
- "2.8.0"
35
+ "2.9.0"
36
36
  ):
37
37
  raise RuntimeError(
38
- f"The package `apache-airflow-providers-http:{__version__}` needs Apache Airflow 2.8.0+"
38
+ f"The package `apache-airflow-providers-http:{__version__}` needs Apache Airflow 2.9.0+"
39
39
  )
@@ -28,8 +28,9 @@ def get_provider_info():
28
28
  "name": "Hypertext Transfer Protocol (HTTP)",
29
29
  "description": "`Hypertext Transfer Protocol (HTTP) <https://www.w3.org/Protocols/>`__\n",
30
30
  "state": "ready",
31
- "source-date-epoch": 1731570291,
31
+ "source-date-epoch": 1734534857,
32
32
  "versions": [
33
+ "5.0.0",
33
34
  "4.13.3",
34
35
  "4.13.2",
35
36
  "4.13.1",
@@ -68,10 +69,10 @@ def get_provider_info():
68
69
  "1.0.0",
69
70
  ],
70
71
  "dependencies": [
71
- "apache-airflow>=2.8.0",
72
+ "apache-airflow>=2.9.0",
72
73
  "requests>=2.27.0,<3",
73
74
  "requests-toolbelt>=0.4.0",
74
- "aiohttp>=3.9.2,<3.11.0",
75
+ "aiohttp>=3.9.2,!=3.11.0",
75
76
  "asgiref>=2.3.0",
76
77
  ],
77
78
  "integrations": [
@@ -19,6 +19,7 @@ from __future__ import annotations
19
19
 
20
20
  import asyncio
21
21
  from typing import TYPE_CHECKING, Any, Callable
22
+ from urllib.parse import urlparse
22
23
 
23
24
  import aiohttp
24
25
  import requests
@@ -34,6 +35,7 @@ from airflow.hooks.base import BaseHook
34
35
 
35
36
  if TYPE_CHECKING:
36
37
  from aiohttp.client_reqrep import ClientResponse
38
+ from requests.adapters import HTTPAdapter
37
39
 
38
40
  from airflow.models import Connection
39
41
 
@@ -54,6 +56,7 @@ class HttpHook(BaseHook):
54
56
  API url i.e https://www.google.com/ and optional authentication credentials. Default
55
57
  headers can also be specified in the Extra field in json format.
56
58
  :param auth_type: The auth type for the service
59
+ :param adapter: An optional instance of `requests.adapters.HTTPAdapter` to mount for the session.
57
60
  :param tcp_keep_alive: Enable TCP Keep Alive for the connection.
58
61
  :param tcp_keep_alive_idle: The TCP Keep Alive Idle parameter (corresponds to ``socket.TCP_KEEPIDLE``).
59
62
  :param tcp_keep_alive_count: The TCP Keep Alive count parameter (corresponds to ``socket.TCP_KEEPCNT``)
@@ -76,6 +79,7 @@ class HttpHook(BaseHook):
76
79
  tcp_keep_alive_idle: int = 120,
77
80
  tcp_keep_alive_count: int = 20,
78
81
  tcp_keep_alive_interval: int = 30,
82
+ adapter: HTTPAdapter | None = None,
79
83
  ) -> None:
80
84
  super().__init__()
81
85
  self.http_conn_id = http_conn_id
@@ -83,10 +87,17 @@ class HttpHook(BaseHook):
83
87
  self.base_url: str = ""
84
88
  self._retry_obj: Callable[..., Any]
85
89
  self._auth_type: Any = auth_type
86
- self.tcp_keep_alive = tcp_keep_alive
87
- self.keep_alive_idle = tcp_keep_alive_idle
88
- self.keep_alive_count = tcp_keep_alive_count
89
- self.keep_alive_interval = tcp_keep_alive_interval
90
+
91
+ # If no adapter is provided, use TCPKeepAliveAdapter (default behavior)
92
+ self.adapter = adapter
93
+ if tcp_keep_alive and adapter is None:
94
+ self.keep_alive_adapter = TCPKeepAliveAdapter(
95
+ idle=tcp_keep_alive_idle,
96
+ count=tcp_keep_alive_count,
97
+ interval=tcp_keep_alive_interval,
98
+ )
99
+ else:
100
+ self.keep_alive_adapter = None
90
101
 
91
102
  @property
92
103
  def auth_type(self):
@@ -102,47 +113,76 @@ class HttpHook(BaseHook):
102
113
  """
103
114
  Create a Requests HTTP session.
104
115
 
105
- :param headers: additional headers to be passed through as a dictionary
116
+ :param headers: Additional headers to be passed through as a dictionary.
117
+ :return: A configured requests.Session object.
106
118
  """
107
119
  session = requests.Session()
108
-
109
- if self.http_conn_id:
110
- conn = self.get_connection(self.http_conn_id)
111
-
112
- if conn.host and "://" in conn.host:
113
- self.base_url = conn.host
114
- else:
115
- # schema defaults to HTTP
116
- schema = conn.schema if conn.schema else "http"
117
- host = conn.host if conn.host else ""
118
- self.base_url = f"{schema}://{host}"
119
-
120
- if conn.port:
121
- self.base_url += f":{conn.port}"
122
- if conn.login:
123
- session.auth = self.auth_type(conn.login, conn.password)
124
- elif self._auth_type:
125
- session.auth = self.auth_type()
126
- if conn.extra:
127
- extra = conn.extra_dejson
128
- extra.pop(
129
- "timeout", None
130
- ) # ignore this as timeout is only accepted in request method of Session
131
- extra.pop("allow_redirects", None) # ignore this as only max_redirects is accepted in Session
132
- session.proxies = extra.pop("proxies", extra.pop("proxy", {}))
133
- session.stream = extra.pop("stream", False)
134
- session.verify = extra.pop("verify", extra.pop("verify_ssl", True))
135
- session.cert = extra.pop("cert", None)
136
- session.max_redirects = extra.pop("max_redirects", DEFAULT_REDIRECT_LIMIT)
137
- session.trust_env = extra.pop("trust_env", True)
138
-
139
- try:
140
- session.headers.update(extra)
141
- except TypeError:
142
- self.log.warning("Connection to %s has invalid extra field.", conn.host)
120
+ connection = self.get_connection(self.http_conn_id)
121
+ self._set_base_url(connection)
122
+ session = self._configure_session_from_auth(session, connection)
123
+ if connection.extra:
124
+ session = self._configure_session_from_extra(session, connection)
125
+ session = self._configure_session_from_mount_adapters(session)
143
126
  if headers:
144
127
  session.headers.update(headers)
128
+ return session
129
+
130
+ def _set_base_url(self, connection: Connection) -> None:
131
+ host = connection.host or ""
132
+ schema = connection.schema or "http"
133
+ # RFC 3986 (https://www.rfc-editor.org/rfc/rfc3986.html#page-16)
134
+ if "://" in host:
135
+ self.base_url = host
136
+ else:
137
+ self.base_url = f"{schema}://{host}" if host else f"{schema}://"
138
+ if connection.port:
139
+ self.base_url = f"{self.base_url}:{connection.port}"
140
+ parsed = urlparse(self.base_url)
141
+ if not parsed.scheme:
142
+ raise ValueError(f"Invalid base URL: Missing scheme in {self.base_url}")
143
+
144
+ def _configure_session_from_auth(
145
+ self, session: requests.Session, connection: Connection
146
+ ) -> requests.Session:
147
+ session.auth = self._extract_auth(connection)
148
+ return session
149
+
150
+ def _extract_auth(self, connection: Connection) -> Any | None:
151
+ if connection.login:
152
+ return self.auth_type(connection.login, connection.password)
153
+ elif self._auth_type:
154
+ return self.auth_type()
155
+ return None
156
+
157
+ def _configure_session_from_extra(
158
+ self, session: requests.Session, connection: Connection
159
+ ) -> requests.Session:
160
+ extra = connection.extra_dejson
161
+ extra.pop("timeout", None)
162
+ extra.pop("allow_redirects", None)
163
+ session.proxies = extra.pop("proxies", extra.pop("proxy", {}))
164
+ session.stream = extra.pop("stream", False)
165
+ session.verify = extra.pop("verify", extra.pop("verify_ssl", True))
166
+ session.cert = extra.pop("cert", None)
167
+ session.max_redirects = extra.pop("max_redirects", DEFAULT_REDIRECT_LIMIT)
168
+ session.trust_env = extra.pop("trust_env", True)
169
+ try:
170
+ session.headers.update(extra)
171
+ except TypeError:
172
+ self.log.warning("Connection to %s has invalid extra field.", connection.host)
173
+ return session
145
174
 
175
+ def _configure_session_from_mount_adapters(self, session: requests.Session) -> requests.Session:
176
+ scheme = urlparse(self.base_url).scheme
177
+ if not scheme:
178
+ raise ValueError(
179
+ f"Cannot mount adapters: {self.base_url} does not include a valid scheme (http or https)."
180
+ )
181
+ if self.adapter:
182
+ session.mount(f"{scheme}://", self.adapter)
183
+ elif self.keep_alive_adapter:
184
+ session.mount("http://", self.keep_alive_adapter)
185
+ session.mount("https://", self.keep_alive_adapter)
146
186
  return session
147
187
 
148
188
  def run(
@@ -171,11 +211,6 @@ class HttpHook(BaseHook):
171
211
 
172
212
  url = self.url_from_endpoint(endpoint)
173
213
 
174
- if self.tcp_keep_alive:
175
- keep_alive_adapter = TCPKeepAliveAdapter(
176
- idle=self.keep_alive_idle, count=self.keep_alive_count, interval=self.keep_alive_interval
177
- )
178
- session.mount(url, keep_alive_adapter)
179
214
  if self.method == "GET":
180
215
  # GET uses params
181
216
  req = requests.Request(self.method, url, params=data, headers=headers, **request_kwargs)
@@ -467,5 +502,4 @@ class HttpAsyncHook(BaseHook):
467
502
  if exception.status == 413:
468
503
  # don't retry for payload Too Large
469
504
  return False
470
-
471
505
  return exception.status >= 500
@@ -19,13 +19,13 @@ from __future__ import annotations
19
19
 
20
20
  import base64
21
21
  import pickle
22
- from typing import TYPE_CHECKING, Any, Callable, Sequence
22
+ from collections.abc import Sequence
23
+ from typing import TYPE_CHECKING, Any, Callable
23
24
 
24
- from deprecated import deprecated
25
25
  from requests import Response
26
26
 
27
27
  from airflow.configuration import conf
28
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
28
+ from airflow.exceptions import AirflowException
29
29
  from airflow.hooks.base import BaseHook
30
30
  from airflow.models import BaseOperator
31
31
  from airflow.providers.http.triggers.http import HttpTrigger
@@ -320,67 +320,3 @@ class HttpOperator(BaseOperator):
320
320
  extra_options=merge_dicts(self.extra_options, next_page_params.get("extra_options", {})),
321
321
  **self.request_kwargs,
322
322
  )
323
-
324
-
325
- @deprecated(
326
- reason=(
327
- "Class `SimpleHttpOperator` is deprecated and "
328
- "will be removed in a future release. Please use `HttpOperator` instead."
329
- ),
330
- category=AirflowProviderDeprecationWarning,
331
- )
332
- class SimpleHttpOperator(HttpOperator):
333
- """
334
- Calls an endpoint on an HTTP system to execute an action.
335
-
336
- .. seealso::
337
- For more information on how to use this operator, take a look at the guide:
338
- :ref:`howto/operator:HttpOperator`
339
-
340
- :param http_conn_id: The :ref:`http connection<howto/connection:http>` to run
341
- the operator against
342
- :param endpoint: The relative part of the full url. (templated)
343
- :param method: The HTTP method to use, default = "POST"
344
- :param data: The data to pass. POST-data in POST/PUT and params
345
- in the URL for a GET request. (templated)
346
- :param headers: The HTTP headers to be added to the GET request
347
- :param pagination_function: A callable that generates the parameters used to call the API again,
348
- based on the previous response. Typically used when the API is paginated and returns for e.g a
349
- cursor, a 'next page id', or a 'next page URL'. When provided, the Operator will call the API
350
- repeatedly until this callable returns None. The result of the Operator will become by default a
351
- list of Response.text objects (instead of a single response object). Same with the other injected
352
- functions (like response_check, response_filter, ...) which will also receive a list of Response
353
- objects. This function receives a Response object form previous call, and should return a nested
354
- dictionary with the following optional keys: `endpoint`, `data`, `headers` and `extra_options.
355
- Those keys will be merged and/or override the parameters provided into the HttpOperator declaration.
356
- Parameters are merged when they are both a dictionary (e.g.: HttpOperator.headers will be merged
357
- with the `headers` dict provided by this function). When merging, dict items returned by this
358
- function will override initial ones (e.g: if both HttpOperator.headers and `headers` have a 'cookie'
359
- item, the one provided by `headers` is kept). Parameters are simply overridden when any of them are
360
- string (e.g.: HttpOperator.endpoint is overridden by `endpoint`).
361
- :param response_check: A check against the 'requests' response object.
362
- The callable takes the response object as the first positional argument
363
- and optionally any number of keyword arguments available in the context dictionary.
364
- It should return True for 'pass' and False otherwise. If a pagination_function
365
- is provided, this function will receive a list of response objects instead of a
366
- single response object.
367
- :param response_filter: A function allowing you to manipulate the response
368
- text. e.g response_filter=lambda response: json.loads(response.text).
369
- The callable takes the response object as the first positional argument
370
- and optionally any number of keyword arguments available in the context dictionary.
371
- If a pagination_function is provided, this function will receive a list of response
372
- object instead of a single response object.
373
- :param extra_options: Extra options for the 'requests' library, see the
374
- 'requests' documentation (options to modify timeout, ssl, etc.)
375
- :param log_response: Log the response (default: False)
376
- :param auth_type: The auth type for the service
377
- :param tcp_keep_alive: Enable TCP Keep Alive for the connection.
378
- :param tcp_keep_alive_idle: The TCP Keep Alive Idle parameter (corresponds to ``socket.TCP_KEEPIDLE``).
379
- :param tcp_keep_alive_count: The TCP Keep Alive count parameter (corresponds to ``socket.TCP_KEEPCNT``)
380
- :param tcp_keep_alive_interval: The TCP Keep Alive interval parameter (corresponds to
381
- ``socket.TCP_KEEPINTVL``)
382
- :param deferrable: Run operator in the deferrable mode
383
- """
384
-
385
- def __init__(self, **kwargs: Any):
386
- super().__init__(**kwargs)
@@ -17,8 +17,9 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
+ from collections.abc import Sequence
20
21
  from datetime import timedelta
21
- from typing import TYPE_CHECKING, Any, Callable, Sequence
22
+ from typing import TYPE_CHECKING, Any, Callable
22
23
 
23
24
  from airflow.configuration import conf
24
25
  from airflow.exceptions import AirflowException
@@ -48,7 +49,7 @@ class HttpSensor(BaseSensorOperator):
48
49
 
49
50
  def response_check(response, task_instance):
50
51
  # The task_instance is injected, so you can pull data form xcom
51
- # Other context variables such as dag, ds, execution_date are also available.
52
+ # Other context variables such as dag, ds, logical_date are also available.
52
53
  xcom_data = task_instance.xcom_pull(task_ids="pushing_task")
53
54
  # In practice you would do something more sensible with this data..
54
55
  print(xcom_data)
@@ -19,7 +19,8 @@ from __future__ import annotations
19
19
  import asyncio
20
20
  import base64
21
21
  import pickle
22
- from typing import TYPE_CHECKING, Any, AsyncIterator
22
+ from collections.abc import AsyncIterator
23
+ from typing import TYPE_CHECKING, Any
23
24
 
24
25
  import requests
25
26
  from requests.cookies import RequestsCookieJar
@@ -27,7 +27,7 @@ build-backend = "flit_core.buildapi"
27
27
 
28
28
  [project]
29
29
  name = "apache-airflow-providers-http"
30
- version = "4.13.3"
30
+ version = "5.0.0"
31
31
  description = "Provider package apache-airflow-providers-http for Apache Airflow"
32
32
  readme = "README.rst"
33
33
  authors = [
@@ -54,20 +54,20 @@ classifiers = [
54
54
  ]
55
55
  requires-python = "~=3.9"
56
56
  dependencies = [
57
- "aiohttp>=3.9.2,<3.11.0",
58
- "apache-airflow>=2.8.0",
57
+ "aiohttp>=3.9.2,!=3.11.0",
58
+ "apache-airflow>=2.9.0",
59
59
  "asgiref>=2.3.0",
60
60
  "requests-toolbelt>=0.4.0",
61
61
  "requests>=2.27.0,<3",
62
62
  ]
63
63
 
64
64
  [project.urls]
65
- "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-http/4.13.3"
66
- "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-http/4.13.3/changelog.html"
65
+ "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-http/5.0.0"
66
+ "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-http/5.0.0/changelog.html"
67
67
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
68
68
  "Source Code" = "https://github.com/apache/airflow"
69
69
  "Slack Chat" = "https://s.apache.org/airflow-slack"
70
- "Twitter" = "https://twitter.com/ApacheAirflow"
70
+ "Twitter" = "https://x.com/ApacheAirflow"
71
71
  "YouTube" = "https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/"
72
72
 
73
73
  [project.entry-points."apache_airflow_provider"]