apache-airflow-providers-http 4.9.0rc1__tar.gz → 4.9.1rc1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (14) hide show
  1. {apache_airflow_providers_http-4.9.0rc1 → apache_airflow_providers_http-4.9.1rc1}/PKG-INFO +10 -10
  2. {apache_airflow_providers_http-4.9.0rc1 → apache_airflow_providers_http-4.9.1rc1}/README.rst +5 -5
  3. {apache_airflow_providers_http-4.9.0rc1 → apache_airflow_providers_http-4.9.1rc1}/airflow/providers/http/__init__.py +1 -1
  4. {apache_airflow_providers_http-4.9.0rc1 → apache_airflow_providers_http-4.9.1rc1}/airflow/providers/http/get_provider_info.py +4 -3
  5. {apache_airflow_providers_http-4.9.0rc1 → apache_airflow_providers_http-4.9.1rc1}/airflow/providers/http/hooks/http.py +3 -5
  6. {apache_airflow_providers_http-4.9.0rc1 → apache_airflow_providers_http-4.9.1rc1}/airflow/providers/http/operators/http.py +9 -8
  7. {apache_airflow_providers_http-4.9.0rc1 → apache_airflow_providers_http-4.9.1rc1}/airflow/providers/http/sensors/http.py +31 -0
  8. {apache_airflow_providers_http-4.9.0rc1 → apache_airflow_providers_http-4.9.1rc1}/airflow/providers/http/triggers/http.py +74 -2
  9. {apache_airflow_providers_http-4.9.0rc1 → apache_airflow_providers_http-4.9.1rc1}/pyproject.toml +5 -5
  10. {apache_airflow_providers_http-4.9.0rc1 → apache_airflow_providers_http-4.9.1rc1}/airflow/providers/http/LICENSE +0 -0
  11. {apache_airflow_providers_http-4.9.0rc1 → apache_airflow_providers_http-4.9.1rc1}/airflow/providers/http/hooks/__init__.py +0 -0
  12. {apache_airflow_providers_http-4.9.0rc1 → apache_airflow_providers_http-4.9.1rc1}/airflow/providers/http/operators/__init__.py +0 -0
  13. {apache_airflow_providers_http-4.9.0rc1 → apache_airflow_providers_http-4.9.1rc1}/airflow/providers/http/sensors/__init__.py +0 -0
  14. {apache_airflow_providers_http-4.9.0rc1 → apache_airflow_providers_http-4.9.1rc1}/airflow/providers/http/triggers/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-http
3
- Version: 4.9.0rc1
3
+ Version: 4.9.1rc1
4
4
  Summary: Provider package apache-airflow-providers-http for Apache Airflow
5
5
  Keywords: airflow-provider,http,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,14 +20,14 @@ Classifier: Programming Language :: Python :: 3.9
20
20
  Classifier: Programming Language :: Python :: 3.10
21
21
  Classifier: Programming Language :: Python :: 3.11
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: aiohttp
23
+ Requires-Dist: aiohttp>=3.9.2
24
24
  Requires-Dist: apache-airflow>=2.6.0.dev0
25
25
  Requires-Dist: asgiref
26
- Requires-Dist: requests>=2.26.0
26
+ Requires-Dist: requests>=2.27.0,<3
27
27
  Requires-Dist: requests_toolbelt
28
28
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
29
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-http/4.9.0/changelog.html
30
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-http/4.9.0
29
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-http/4.9.1/changelog.html
30
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-http/4.9.1
31
31
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
32
32
  Project-URL: Source Code, https://github.com/apache/airflow
33
33
  Project-URL: Twitter, https://twitter.com/ApacheAirflow
@@ -77,7 +77,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
77
77
 
78
78
  Package ``apache-airflow-providers-http``
79
79
 
80
- Release: ``4.9.0.rc1``
80
+ Release: ``4.9.1.rc1``
81
81
 
82
82
 
83
83
  `Hypertext Transfer Protocol (HTTP) <https://www.w3.org/Protocols/>`__
@@ -90,7 +90,7 @@ This is a provider package for ``http`` provider. All classes for this provider
90
90
  are in ``airflow.providers.http`` python package.
91
91
 
92
92
  You can find package information and changelog for the provider
93
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-http/4.9.0/>`_.
93
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-http/4.9.1/>`_.
94
94
 
95
95
  Installation
96
96
  ------------
@@ -108,11 +108,11 @@ Requirements
108
108
  PIP package Version required
109
109
  ===================== ==================
110
110
  ``apache-airflow`` ``>=2.6.0``
111
- ``requests`` ``>=2.26.0``
111
+ ``requests`` ``>=2.27.0,<3``
112
112
  ``requests_toolbelt``
113
- ``aiohttp``
113
+ ``aiohttp`` ``>=3.9.2``
114
114
  ``asgiref``
115
115
  ===================== ==================
116
116
 
117
117
  The changelog for the provider package can be found in the
118
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-http/4.9.0/changelog.html>`_.
118
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-http/4.9.1/changelog.html>`_.
@@ -42,7 +42,7 @@
42
42
 
43
43
  Package ``apache-airflow-providers-http``
44
44
 
45
- Release: ``4.9.0.rc1``
45
+ Release: ``4.9.1.rc1``
46
46
 
47
47
 
48
48
  `Hypertext Transfer Protocol (HTTP) <https://www.w3.org/Protocols/>`__
@@ -55,7 +55,7 @@ This is a provider package for ``http`` provider. All classes for this provider
55
55
  are in ``airflow.providers.http`` python package.
56
56
 
57
57
  You can find package information and changelog for the provider
58
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-http/4.9.0/>`_.
58
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-http/4.9.1/>`_.
59
59
 
60
60
  Installation
61
61
  ------------
@@ -73,11 +73,11 @@ Requirements
73
73
  PIP package Version required
74
74
  ===================== ==================
75
75
  ``apache-airflow`` ``>=2.6.0``
76
- ``requests`` ``>=2.26.0``
76
+ ``requests`` ``>=2.27.0,<3``
77
77
  ``requests_toolbelt``
78
- ``aiohttp``
78
+ ``aiohttp`` ``>=3.9.2``
79
79
  ``asgiref``
80
80
  ===================== ==================
81
81
 
82
82
  The changelog for the provider package can be found in the
83
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-http/4.9.0/changelog.html>`_.
83
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-http/4.9.1/changelog.html>`_.
@@ -27,7 +27,7 @@ import packaging.version
27
27
 
28
28
  __all__ = ["__version__"]
29
29
 
30
- __version__ = "4.9.0"
30
+ __version__ = "4.9.1"
31
31
 
32
32
  try:
33
33
  from airflow import __version__ as airflow_version
@@ -28,8 +28,9 @@ def get_provider_info():
28
28
  "name": "Hypertext Transfer Protocol (HTTP)",
29
29
  "description": "`Hypertext Transfer Protocol (HTTP) <https://www.w3.org/Protocols/>`__\n",
30
30
  "state": "ready",
31
- "source-date-epoch": 1705912133,
31
+ "source-date-epoch": 1707636407,
32
32
  "versions": [
33
+ "4.9.1",
33
34
  "4.9.0",
34
35
  "4.8.0",
35
36
  "4.7.0",
@@ -59,9 +60,9 @@ def get_provider_info():
59
60
  ],
60
61
  "dependencies": [
61
62
  "apache-airflow>=2.6.0",
62
- "requests>=2.26.0",
63
+ "requests>=2.27.0,<3",
63
64
  "requests_toolbelt",
64
- "aiohttp",
65
+ "aiohttp>=3.9.2",
65
66
  "asgiref",
66
67
  ],
67
68
  "integrations": [
@@ -68,9 +68,8 @@ class HttpHook(BaseHook):
68
68
  tcp_keep_alive_idle: int = 120,
69
69
  tcp_keep_alive_count: int = 20,
70
70
  tcp_keep_alive_interval: int = 30,
71
- **kwargs,
72
71
  ) -> None:
73
- super().__init__(**kwargs)
72
+ super().__init__()
74
73
  self.http_conn_id = http_conn_id
75
74
  self.method = method.upper()
76
75
  self.base_url: str = ""
@@ -259,7 +258,8 @@ class HttpHook(BaseHook):
259
258
  """
260
259
  self._retry_obj = tenacity.Retrying(**_retry_args)
261
260
 
262
- return self._retry_obj(self.run, *args, **kwargs)
261
+ # TODO: remove ignore type when https://github.com/jd/tenacity/issues/428 is resolved
262
+ return self._retry_obj(self.run, *args, **kwargs) # type: ignore
263
263
 
264
264
  def url_from_endpoint(self, endpoint: str | None) -> str:
265
265
  """Combine base url with endpoint."""
@@ -298,9 +298,7 @@ class HttpAsyncHook(BaseHook):
298
298
  auth_type: Any = aiohttp.BasicAuth,
299
299
  retry_limit: int = 3,
300
300
  retry_delay: float = 1.0,
301
- **kwargs,
302
301
  ) -> None:
303
- super().__init__(**kwargs)
304
302
  self.http_conn_id = http_conn_id
305
303
  self.method = method.upper()
306
304
  self.base_url: str = ""
@@ -19,9 +19,9 @@ from __future__ import annotations
19
19
 
20
20
  import base64
21
21
  import pickle
22
- import warnings
23
22
  from typing import TYPE_CHECKING, Any, Callable, Sequence
24
23
 
24
+ from deprecated import deprecated
25
25
  from requests import Response
26
26
 
27
27
  from airflow.configuration import conf
@@ -233,7 +233,7 @@ class HttpOperator(BaseOperator):
233
233
  self, context: Context, event: dict, paginated_responses: None | list[Response] = None
234
234
  ):
235
235
  """
236
- Callback for when the trigger fires - returns immediately.
236
+ Execute callback when the trigger fires; returns immediately.
237
237
 
238
238
  Relies on trigger to throw an exception, otherwise it assumes execution was successful.
239
239
  """
@@ -291,6 +291,13 @@ class HttpOperator(BaseOperator):
291
291
  )
292
292
 
293
293
 
294
+ @deprecated(
295
+ reason=(
296
+ "Class `SimpleHttpOperator` is deprecated and "
297
+ "will be removed in a future release. Please use `HttpOperator` instead."
298
+ ),
299
+ category=AirflowProviderDeprecationWarning,
300
+ )
294
301
  class SimpleHttpOperator(HttpOperator):
295
302
  """
296
303
  Calls an endpoint on an HTTP system to execute an action.
@@ -345,10 +352,4 @@ class SimpleHttpOperator(HttpOperator):
345
352
  """
346
353
 
347
354
  def __init__(self, **kwargs: Any):
348
- warnings.warn(
349
- "Class `SimpleHttpOperator` is deprecated and "
350
- "will be removed in a future release. Please use `HttpOperator` instead.",
351
- AirflowProviderDeprecationWarning,
352
- stacklevel=2,
353
- )
354
355
  super().__init__(**kwargs)
@@ -17,10 +17,13 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
+ from datetime import timedelta
20
21
  from typing import TYPE_CHECKING, Any, Callable, Sequence
21
22
 
23
+ from airflow.configuration import conf
22
24
  from airflow.exceptions import AirflowException, AirflowSkipException
23
25
  from airflow.providers.http.hooks.http import HttpHook
26
+ from airflow.providers.http.triggers.http import HttpSensorTrigger
24
27
  from airflow.sensors.base import BaseSensorOperator
25
28
 
26
29
  if TYPE_CHECKING:
@@ -78,6 +81,8 @@ class HttpSensor(BaseSensorOperator):
78
81
  :param tcp_keep_alive_count: The TCP Keep Alive count parameter (corresponds to ``socket.TCP_KEEPCNT``)
79
82
  :param tcp_keep_alive_interval: The TCP Keep Alive interval parameter (corresponds to
80
83
  ``socket.TCP_KEEPINTVL``)
84
+ :param deferrable: If waiting for completion, whether to defer the task until done,
85
+ default is ``False``
81
86
  """
82
87
 
83
88
  template_fields: Sequence[str] = ("endpoint", "request_params", "headers")
@@ -97,6 +102,7 @@ class HttpSensor(BaseSensorOperator):
97
102
  tcp_keep_alive_idle: int = 120,
98
103
  tcp_keep_alive_count: int = 20,
99
104
  tcp_keep_alive_interval: int = 30,
105
+ deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
100
106
  **kwargs: Any,
101
107
  ) -> None:
102
108
  super().__init__(**kwargs)
@@ -114,6 +120,7 @@ class HttpSensor(BaseSensorOperator):
114
120
  self.tcp_keep_alive_idle = tcp_keep_alive_idle
115
121
  self.tcp_keep_alive_count = tcp_keep_alive_count
116
122
  self.tcp_keep_alive_interval = tcp_keep_alive_interval
123
+ self.deferrable = deferrable
117
124
 
118
125
  def poke(self, context: Context) -> bool:
119
126
  from airflow.utils.operator_helpers import determine_kwargs
@@ -135,9 +142,12 @@ class HttpSensor(BaseSensorOperator):
135
142
  headers=self.headers,
136
143
  extra_options=self.extra_options,
137
144
  )
145
+
138
146
  if self.response_check:
139
147
  kwargs = determine_kwargs(self.response_check, [response], context)
148
+
140
149
  return self.response_check(response, **kwargs)
150
+
141
151
  except AirflowException as exc:
142
152
  if str(exc).startswith(self.response_error_codes_allowlist):
143
153
  return False
@@ -148,3 +158,24 @@ class HttpSensor(BaseSensorOperator):
148
158
  raise exc
149
159
 
150
160
  return True
161
+
162
+ def execute(self, context: Context) -> None:
163
+ if not self.deferrable or self.response_check:
164
+ super().execute(context=context)
165
+ elif not self.poke(context):
166
+ self.defer(
167
+ timeout=timedelta(seconds=self.timeout),
168
+ trigger=HttpSensorTrigger(
169
+ endpoint=self.endpoint,
170
+ http_conn_id=self.http_conn_id,
171
+ data=self.request_params,
172
+ headers=self.headers,
173
+ method=self.method,
174
+ extra_options=self.extra_options,
175
+ poke_interval=self.poke_interval,
176
+ ),
177
+ method_name="execute_complete",
178
+ )
179
+
180
+ def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
181
+ self.log.info("%s completed successfully.", self.task_id)
@@ -16,6 +16,7 @@
16
16
  # under the License.
17
17
  from __future__ import annotations
18
18
 
19
+ import asyncio
19
20
  import base64
20
21
  import pickle
21
22
  from typing import TYPE_CHECKING, Any, AsyncIterator
@@ -24,6 +25,7 @@ import requests
24
25
  from requests.cookies import RequestsCookieJar
25
26
  from requests.structures import CaseInsensitiveDict
26
27
 
28
+ from airflow.exceptions import AirflowException
27
29
  from airflow.providers.http.hooks.http import HttpAsyncHook
28
30
  from airflow.triggers.base import BaseTrigger, TriggerEvent
29
31
 
@@ -69,7 +71,7 @@ class HttpTrigger(BaseTrigger):
69
71
  self.extra_options = extra_options
70
72
 
71
73
  def serialize(self) -> tuple[str, dict[str, Any]]:
72
- """Serializes HttpTrigger arguments and classpath."""
74
+ """Serialize HttpTrigger arguments and classpath."""
73
75
  return (
74
76
  "airflow.providers.http.triggers.http.HttpTrigger",
75
77
  {
@@ -84,7 +86,7 @@ class HttpTrigger(BaseTrigger):
84
86
  )
85
87
 
86
88
  async def run(self) -> AsyncIterator[TriggerEvent]:
87
- """Makes a series of asynchronous http calls via an http hook."""
89
+ """Make a series of asynchronous http calls via a http hook."""
88
90
  hook = HttpAsyncHook(
89
91
  method=self.method,
90
92
  http_conn_id=self.http_conn_id,
@@ -124,3 +126,73 @@ class HttpTrigger(BaseTrigger):
124
126
  cookies.set(k, v)
125
127
  response.cookies = cookies
126
128
  return response
129
+
130
+
131
+ class HttpSensorTrigger(BaseTrigger):
132
+ """
133
+ A trigger that fires when the request to a URL returns a non-404 status code.
134
+
135
+ :param endpoint: The relative part of the full url
136
+ :param http_conn_id: The HTTP Connection ID to run the sensor against
137
+ :param method: The HTTP request method to use
138
+ :param data: payload to be uploaded or aiohttp parameters
139
+ :param headers: The HTTP headers to be added to the GET request
140
+ :param extra_options: Additional kwargs to pass when creating a request.
141
+ For example, ``run(json=obj)`` is passed as ``aiohttp.ClientSession().get(json=obj)``
142
+ :param poke_interval: Time to sleep using asyncio
143
+ """
144
+
145
+ def __init__(
146
+ self,
147
+ endpoint: str | None = None,
148
+ http_conn_id: str = "http_default",
149
+ method: str = "GET",
150
+ data: dict[str, Any] | str | None = None,
151
+ headers: dict[str, str] | None = None,
152
+ extra_options: dict[str, Any] | None = None,
153
+ poke_interval: float = 5.0,
154
+ ):
155
+ super().__init__()
156
+ self.endpoint = endpoint
157
+ self.method = method
158
+ self.data = data
159
+ self.headers = headers
160
+ self.extra_options = extra_options or {}
161
+ self.http_conn_id = http_conn_id
162
+ self.poke_interval = poke_interval
163
+
164
+ def serialize(self) -> tuple[str, dict[str, Any]]:
165
+ """Serialize HttpTrigger arguments and classpath."""
166
+ return (
167
+ "airflow.providers.http.triggers.http.HttpSensorTrigger",
168
+ {
169
+ "endpoint": self.endpoint,
170
+ "data": self.data,
171
+ "headers": self.headers,
172
+ "extra_options": self.extra_options,
173
+ "http_conn_id": self.http_conn_id,
174
+ "poke_interval": self.poke_interval,
175
+ },
176
+ )
177
+
178
+ async def run(self) -> AsyncIterator[TriggerEvent]:
179
+ """Make a series of asynchronous http calls via an http hook."""
180
+ hook = self._get_async_hook()
181
+ while True:
182
+ try:
183
+ await hook.run(
184
+ endpoint=self.endpoint,
185
+ data=self.data,
186
+ headers=self.headers,
187
+ extra_options=self.extra_options,
188
+ )
189
+ yield TriggerEvent(True)
190
+ except AirflowException as exc:
191
+ if str(exc).startswith("404"):
192
+ await asyncio.sleep(self.poke_interval)
193
+
194
+ def _get_async_hook(self) -> HttpAsyncHook:
195
+ return HttpAsyncHook(
196
+ method=self.method,
197
+ http_conn_id=self.http_conn_id,
198
+ )
@@ -28,7 +28,7 @@ build-backend = "flit_core.buildapi"
28
28
 
29
29
  [project]
30
30
  name = "apache-airflow-providers-http"
31
- version = "4.9.0.rc1"
31
+ version = "4.9.1.rc1"
32
32
  description = "Provider package apache-airflow-providers-http for Apache Airflow"
33
33
  readme = "README.rst"
34
34
  authors = [
@@ -55,16 +55,16 @@ classifiers = [
55
55
  ]
56
56
  requires-python = "~=3.8"
57
57
  dependencies = [
58
- "aiohttp",
58
+ "aiohttp>=3.9.2",
59
59
  "apache-airflow>=2.6.0.dev0",
60
60
  "asgiref",
61
- "requests>=2.26.0",
61
+ "requests>=2.27.0,<3",
62
62
  "requests_toolbelt",
63
63
  ]
64
64
 
65
65
  [project.urls]
66
- "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-http/4.9.0"
67
- "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-http/4.9.0/changelog.html"
66
+ "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-http/4.9.1"
67
+ "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-http/4.9.1/changelog.html"
68
68
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
69
69
  "Source Code" = "https://github.com/apache/airflow"
70
70
  "Slack Chat" = "https://s.apache.org/airflow-slack"