hishel 0.1.4__py3-none-any.whl → 1.0.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hishel/__init__.py +59 -52
- hishel/_async_cache.py +213 -0
- hishel/_async_httpx.py +236 -0
- hishel/_core/_headers.py +646 -0
- hishel/{beta/_core → _core}/_spec.py +270 -136
- hishel/_core/_storages/_async_base.py +71 -0
- hishel/_core/_storages/_async_sqlite.py +420 -0
- hishel/_core/_storages/_packing.py +144 -0
- hishel/_core/_storages/_sync_base.py +71 -0
- hishel/_core/_storages/_sync_sqlite.py +420 -0
- hishel/{beta/_core → _core}/models.py +100 -37
- hishel/_policies.py +49 -0
- hishel/_sync_cache.py +213 -0
- hishel/_sync_httpx.py +236 -0
- hishel/_utils.py +37 -366
- hishel/asgi.py +400 -0
- hishel/fastapi.py +263 -0
- hishel/httpx.py +12 -0
- hishel/{beta/requests.py → requests.py} +41 -30
- hishel-1.0.0b1.dist-info/METADATA +509 -0
- hishel-1.0.0b1.dist-info/RECORD +24 -0
- hishel/_async/__init__.py +0 -5
- hishel/_async/_client.py +0 -30
- hishel/_async/_mock.py +0 -43
- hishel/_async/_pool.py +0 -201
- hishel/_async/_storages.py +0 -768
- hishel/_async/_transports.py +0 -282
- hishel/_controller.py +0 -581
- hishel/_exceptions.py +0 -10
- hishel/_files.py +0 -54
- hishel/_headers.py +0 -215
- hishel/_lfu_cache.py +0 -71
- hishel/_lmdb_types_.pyi +0 -53
- hishel/_s3.py +0 -122
- hishel/_serializers.py +0 -329
- hishel/_sync/__init__.py +0 -5
- hishel/_sync/_client.py +0 -30
- hishel/_sync/_mock.py +0 -43
- hishel/_sync/_pool.py +0 -201
- hishel/_sync/_storages.py +0 -768
- hishel/_sync/_transports.py +0 -282
- hishel/_synchronization.py +0 -37
- hishel/beta/__init__.py +0 -59
- hishel/beta/_async_cache.py +0 -167
- hishel/beta/_core/__init__.py +0 -0
- hishel/beta/_core/_async/_storages/_sqlite.py +0 -411
- hishel/beta/_core/_base/_storages/_base.py +0 -260
- hishel/beta/_core/_base/_storages/_packing.py +0 -165
- hishel/beta/_core/_headers.py +0 -301
- hishel/beta/_core/_sync/_storages/_sqlite.py +0 -411
- hishel/beta/_sync_cache.py +0 -167
- hishel/beta/httpx.py +0 -317
- hishel-0.1.4.dist-info/METADATA +0 -404
- hishel-0.1.4.dist-info/RECORD +0 -41
- {hishel-0.1.4.dist-info → hishel-1.0.0b1.dist-info}/WHEEL +0 -0
- {hishel-0.1.4.dist-info → hishel-1.0.0b1.dist-info}/licenses/LICENSE +0 -0
hishel/__init__.py
CHANGED
|
@@ -1,57 +1,64 @@
|
|
|
1
|
-
import
|
|
1
|
+
from hishel._core._storages._async_sqlite import AsyncSqliteStorage
|
|
2
|
+
from hishel._core._storages._async_base import AsyncBaseStorage
|
|
3
|
+
from hishel._core._storages._sync_sqlite import SyncSqliteStorage
|
|
4
|
+
from hishel._core._storages._sync_base import SyncBaseStorage
|
|
5
|
+
from hishel._core._headers import Headers as Headers
|
|
6
|
+
from hishel._core._spec import (
|
|
7
|
+
AnyState as AnyState,
|
|
8
|
+
CacheMiss as CacheMiss,
|
|
9
|
+
CacheOptions as CacheOptions,
|
|
10
|
+
CouldNotBeStored as CouldNotBeStored,
|
|
11
|
+
FromCache as FromCache,
|
|
12
|
+
IdleClient as IdleClient,
|
|
13
|
+
InvalidateEntries,
|
|
14
|
+
NeedRevalidation as NeedRevalidation,
|
|
15
|
+
NeedToBeUpdated as NeedToBeUpdated,
|
|
16
|
+
State as State,
|
|
17
|
+
StoreAndUse as StoreAndUse,
|
|
18
|
+
)
|
|
19
|
+
from hishel._core.models import (
|
|
20
|
+
Entry as Entry,
|
|
21
|
+
EntryMeta as EntryMeta,
|
|
22
|
+
Request as Request,
|
|
23
|
+
Response,
|
|
24
|
+
)
|
|
25
|
+
from hishel._async_cache import AsyncCacheProxy as AsyncCacheProxy
|
|
26
|
+
from hishel._sync_cache import SyncCacheProxy as SyncCacheProxy
|
|
2
27
|
|
|
3
|
-
from .
|
|
4
|
-
from ._controller import *
|
|
5
|
-
from ._exceptions import *
|
|
6
|
-
from ._headers import *
|
|
7
|
-
from ._serializers import *
|
|
8
|
-
from ._sync import *
|
|
9
|
-
from ._lfu_cache import *
|
|
28
|
+
from hishel._policies import SpecificationPolicy, FilterPolicy, CachePolicy
|
|
10
29
|
|
|
11
30
|
__all__ = (
|
|
12
|
-
#
|
|
13
|
-
|
|
14
|
-
"
|
|
15
|
-
"
|
|
16
|
-
"
|
|
31
|
+
# New API
|
|
32
|
+
## States
|
|
33
|
+
"AnyState",
|
|
34
|
+
"IdleClient",
|
|
35
|
+
"CacheMiss",
|
|
36
|
+
"FromCache",
|
|
37
|
+
"NeedRevalidation",
|
|
38
|
+
"AnyState",
|
|
39
|
+
"CacheOptions",
|
|
40
|
+
"NeedToBeUpdated",
|
|
41
|
+
"State",
|
|
42
|
+
"StoreAndUse",
|
|
43
|
+
"CouldNotBeStored",
|
|
44
|
+
"InvalidateEntries",
|
|
45
|
+
## Models
|
|
46
|
+
"Request",
|
|
47
|
+
"Response",
|
|
48
|
+
"Entry",
|
|
49
|
+
"EntryMeta",
|
|
50
|
+
## Headers
|
|
51
|
+
"Headers",
|
|
52
|
+
## Storages
|
|
53
|
+
"SyncBaseStorage",
|
|
17
54
|
"AsyncBaseStorage",
|
|
18
|
-
"
|
|
19
|
-
"
|
|
20
|
-
|
|
21
|
-
"
|
|
22
|
-
"
|
|
23
|
-
|
|
24
|
-
"
|
|
25
|
-
"
|
|
26
|
-
"
|
|
27
|
-
"ParseError",
|
|
28
|
-
"ValidationError",
|
|
29
|
-
"CacheControl",
|
|
30
|
-
"Vary",
|
|
31
|
-
"BaseSerializer",
|
|
32
|
-
"JSONSerializer",
|
|
33
|
-
"Metadata",
|
|
34
|
-
"PickleSerializer",
|
|
35
|
-
"YAMLSerializer",
|
|
36
|
-
"clone_model",
|
|
37
|
-
"CacheClient",
|
|
38
|
-
"MockConnectionPool",
|
|
39
|
-
"MockTransport",
|
|
40
|
-
"CacheConnectionPool",
|
|
41
|
-
"BaseStorage",
|
|
42
|
-
"FileStorage",
|
|
43
|
-
"InMemoryStorage",
|
|
44
|
-
"RedisStorage",
|
|
45
|
-
"S3Storage",
|
|
46
|
-
"SQLiteStorage",
|
|
47
|
-
"CacheTransport",
|
|
48
|
-
"LFUCache",
|
|
55
|
+
"SyncSqliteStorage",
|
|
56
|
+
"AsyncSqliteStorage",
|
|
57
|
+
# Proxy
|
|
58
|
+
"AsyncCacheProxy",
|
|
59
|
+
"SyncCacheProxy",
|
|
60
|
+
# Policies
|
|
61
|
+
"CachePolicy",
|
|
62
|
+
"SpecificationPolicy",
|
|
63
|
+
"FilterPolicy",
|
|
49
64
|
)
|
|
50
|
-
|
|
51
|
-
def install_cache() -> None: # pragma: no cover
|
|
52
|
-
httpx.AsyncClient = AsyncCacheClient # type: ignore
|
|
53
|
-
httpx.Client = CacheClient # type: ignore
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
__version__ = "0.1.4"
|
|
57
|
-
|
hishel/_async_cache.py
ADDED
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import hashlib
|
|
4
|
+
import logging
|
|
5
|
+
import time
|
|
6
|
+
from dataclasses import replace
|
|
7
|
+
from typing import AsyncIterable, AsyncIterator, Awaitable, Callable
|
|
8
|
+
|
|
9
|
+
from typing_extensions import assert_never
|
|
10
|
+
|
|
11
|
+
from hishel import (
|
|
12
|
+
AnyState,
|
|
13
|
+
AsyncBaseStorage,
|
|
14
|
+
AsyncSqliteStorage,
|
|
15
|
+
CacheMiss,
|
|
16
|
+
CouldNotBeStored,
|
|
17
|
+
FromCache,
|
|
18
|
+
IdleClient,
|
|
19
|
+
NeedRevalidation,
|
|
20
|
+
NeedToBeUpdated,
|
|
21
|
+
Request,
|
|
22
|
+
Response,
|
|
23
|
+
StoreAndUse,
|
|
24
|
+
)
|
|
25
|
+
from hishel._core._spec import InvalidateEntries, vary_headers_match
|
|
26
|
+
from hishel._core.models import Entry, ResponseMetadata
|
|
27
|
+
from hishel._policies import CachePolicy, FilterPolicy, SpecificationPolicy
|
|
28
|
+
from hishel._utils import make_async_iterator
|
|
29
|
+
|
|
30
|
+
logger = logging.getLogger("hishel.integrations.clients")
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class AsyncCacheProxy:
|
|
34
|
+
"""
|
|
35
|
+
A proxy for HTTP caching in clients.
|
|
36
|
+
|
|
37
|
+
This class is independent of any specific HTTP library and works only with internal models.
|
|
38
|
+
It delegates request execution to a user-provided callable, making it compatible with any
|
|
39
|
+
HTTP client. Caching behavior is determined by the policy object.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
request_sender: Callable that sends HTTP requests and returns responses.
|
|
43
|
+
storage: Storage backend for cache entries. Defaults to AsyncSqliteStorage.
|
|
44
|
+
policy: Caching policy to use. Can be SpecificationPolicy (respects RFC 9111) or
|
|
45
|
+
FilterPolicy (user-defined filtering). Defaults to SpecificationPolicy().
|
|
46
|
+
"""
|
|
47
|
+
|
|
48
|
+
def __init__(
|
|
49
|
+
self,
|
|
50
|
+
request_sender: Callable[[Request], Awaitable[Response]],
|
|
51
|
+
storage: AsyncBaseStorage | None = None,
|
|
52
|
+
policy: CachePolicy | None = None,
|
|
53
|
+
) -> None:
|
|
54
|
+
self.send_request = request_sender
|
|
55
|
+
self.storage = storage if storage is not None else AsyncSqliteStorage()
|
|
56
|
+
self.policy = policy if policy is not None else SpecificationPolicy()
|
|
57
|
+
|
|
58
|
+
async def handle_request(self, request: Request) -> Response:
|
|
59
|
+
if isinstance(self.policy, FilterPolicy):
|
|
60
|
+
return await self._handle_request_with_filters(request)
|
|
61
|
+
return await self._handle_request_respecting_spec(request)
|
|
62
|
+
|
|
63
|
+
async def _get_key_for_request(self, request: Request) -> str:
|
|
64
|
+
if self.policy.use_body_key or request.metadata.get("hishel_body_key"):
|
|
65
|
+
assert isinstance(request.stream, (AsyncIterator, AsyncIterable))
|
|
66
|
+
collected = b"".join([chunk async for chunk in request.stream])
|
|
67
|
+
hash_ = hashlib.sha256(collected).hexdigest()
|
|
68
|
+
request.stream = make_async_iterator([collected])
|
|
69
|
+
return hash_
|
|
70
|
+
return hashlib.sha256(str(request.url).encode("utf-8")).hexdigest()
|
|
71
|
+
|
|
72
|
+
async def _maybe_refresh_entry_ttl(self, entry: Entry) -> None:
|
|
73
|
+
if entry.request.metadata.get("hishel_refresh_ttl_on_access"):
|
|
74
|
+
await self.storage.update_entry(
|
|
75
|
+
entry.id,
|
|
76
|
+
lambda current_entry: replace(
|
|
77
|
+
current_entry,
|
|
78
|
+
meta=replace(current_entry.meta, created_at=time.time()),
|
|
79
|
+
),
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
async def _handle_request_with_filters(self, request: Request) -> Response:
|
|
83
|
+
assert isinstance(self.policy, FilterPolicy)
|
|
84
|
+
|
|
85
|
+
for request_filter in self.policy.request_filters:
|
|
86
|
+
if request_filter.needs_body():
|
|
87
|
+
body = await request.aread()
|
|
88
|
+
if not request_filter.apply(request, body):
|
|
89
|
+
logger.debug("Request filtered out by request filter")
|
|
90
|
+
return await self.send_request(request)
|
|
91
|
+
else:
|
|
92
|
+
if not request_filter.apply(request, None):
|
|
93
|
+
logger.debug("Request filtered out by request filter")
|
|
94
|
+
return await self.send_request(request)
|
|
95
|
+
|
|
96
|
+
logger.debug("Trying to get cached response ignoring specification")
|
|
97
|
+
cache_key = await self._get_key_for_request(request)
|
|
98
|
+
entries = await self.storage.get_entries(cache_key)
|
|
99
|
+
|
|
100
|
+
logger.debug(f"Found {len(entries)} cached entries for the request")
|
|
101
|
+
|
|
102
|
+
for entry in entries:
|
|
103
|
+
if (
|
|
104
|
+
str(entry.request.url) == str(request.url)
|
|
105
|
+
and entry.request.method == request.method
|
|
106
|
+
and vary_headers_match(
|
|
107
|
+
request,
|
|
108
|
+
entry,
|
|
109
|
+
)
|
|
110
|
+
):
|
|
111
|
+
logger.debug(
|
|
112
|
+
"Found matching cached response for the request",
|
|
113
|
+
)
|
|
114
|
+
response_meta = ResponseMetadata(
|
|
115
|
+
hishel_from_cache=True,
|
|
116
|
+
hishel_created_at=entry.meta.created_at,
|
|
117
|
+
hishel_revalidated=False,
|
|
118
|
+
hishel_stored=False,
|
|
119
|
+
)
|
|
120
|
+
entry.response.metadata.update(response_meta) # type: ignore
|
|
121
|
+
await self._maybe_refresh_entry_ttl(entry)
|
|
122
|
+
return entry.response
|
|
123
|
+
|
|
124
|
+
response = await self.send_request(request)
|
|
125
|
+
for response_filter in self.policy.response_filters:
|
|
126
|
+
if response_filter.needs_body():
|
|
127
|
+
body = await response.aread()
|
|
128
|
+
if not response_filter.apply(response, body):
|
|
129
|
+
logger.debug("Response filtered out by response filter")
|
|
130
|
+
return response
|
|
131
|
+
else:
|
|
132
|
+
if not response_filter.apply(response, None):
|
|
133
|
+
logger.debug("Response filtered out by response filter")
|
|
134
|
+
return response
|
|
135
|
+
response_meta = ResponseMetadata(
|
|
136
|
+
hishel_from_cache=False,
|
|
137
|
+
hishel_created_at=time.time(),
|
|
138
|
+
hishel_revalidated=False,
|
|
139
|
+
hishel_stored=True,
|
|
140
|
+
)
|
|
141
|
+
response.metadata.update(response_meta) # type: ignore
|
|
142
|
+
|
|
143
|
+
logger.debug("Storing response in cache ignoring specification")
|
|
144
|
+
entry = await self.storage.create_entry(
|
|
145
|
+
request,
|
|
146
|
+
response,
|
|
147
|
+
cache_key,
|
|
148
|
+
)
|
|
149
|
+
return entry.response
|
|
150
|
+
|
|
151
|
+
async def _handle_request_respecting_spec(self, request: Request) -> Response:
|
|
152
|
+
assert isinstance(self.policy, SpecificationPolicy)
|
|
153
|
+
state: AnyState = IdleClient(options=self.policy.cache_options)
|
|
154
|
+
|
|
155
|
+
while state:
|
|
156
|
+
logger.debug(f"Handling state: {state.__class__.__name__}")
|
|
157
|
+
if isinstance(state, IdleClient):
|
|
158
|
+
state = await self._handle_idle_state(state, request)
|
|
159
|
+
elif isinstance(state, CacheMiss):
|
|
160
|
+
state = await self._handle_cache_miss(state)
|
|
161
|
+
elif isinstance(state, StoreAndUse):
|
|
162
|
+
return await self._handle_store_and_use(state, request)
|
|
163
|
+
elif isinstance(state, CouldNotBeStored):
|
|
164
|
+
return state.response
|
|
165
|
+
elif isinstance(state, NeedRevalidation):
|
|
166
|
+
state = await self._handle_revalidation(state)
|
|
167
|
+
elif isinstance(state, FromCache):
|
|
168
|
+
await self._maybe_refresh_entry_ttl(state.entry)
|
|
169
|
+
return state.entry.response
|
|
170
|
+
elif isinstance(state, NeedToBeUpdated):
|
|
171
|
+
state = await self._handle_update(state)
|
|
172
|
+
elif isinstance(state, InvalidateEntries):
|
|
173
|
+
state = await self._handle_invalidate_entries(state)
|
|
174
|
+
else:
|
|
175
|
+
assert_never(state)
|
|
176
|
+
|
|
177
|
+
raise RuntimeError("Unreachable")
|
|
178
|
+
|
|
179
|
+
async def _handle_idle_state(self, state: IdleClient, request: Request) -> AnyState:
|
|
180
|
+
stored_entries = await self.storage.get_entries(await self._get_key_for_request(request))
|
|
181
|
+
return state.next(request, stored_entries)
|
|
182
|
+
|
|
183
|
+
async def _handle_cache_miss(self, state: CacheMiss) -> AnyState:
|
|
184
|
+
response = await self.send_request(state.request)
|
|
185
|
+
return state.next(response)
|
|
186
|
+
|
|
187
|
+
async def _handle_store_and_use(self, state: StoreAndUse, request: Request) -> Response:
|
|
188
|
+
entry = await self.storage.create_entry(
|
|
189
|
+
request,
|
|
190
|
+
state.response,
|
|
191
|
+
await self._get_key_for_request(request),
|
|
192
|
+
)
|
|
193
|
+
return entry.response
|
|
194
|
+
|
|
195
|
+
async def _handle_revalidation(self, state: NeedRevalidation) -> AnyState:
|
|
196
|
+
revalidation_response = await self.send_request(state.request)
|
|
197
|
+
return state.next(revalidation_response)
|
|
198
|
+
|
|
199
|
+
async def _handle_update(self, state: NeedToBeUpdated) -> AnyState:
|
|
200
|
+
for entry in state.updating_entries:
|
|
201
|
+
await self.storage.update_entry(
|
|
202
|
+
entry.id,
|
|
203
|
+
lambda entry: replace(
|
|
204
|
+
entry,
|
|
205
|
+
response=replace(entry.response, headers=entry.response.headers),
|
|
206
|
+
),
|
|
207
|
+
)
|
|
208
|
+
return state.next()
|
|
209
|
+
|
|
210
|
+
async def _handle_invalidate_entries(self, state: InvalidateEntries) -> AnyState:
|
|
211
|
+
for entry_id in state.entry_ids:
|
|
212
|
+
await self.storage.remove_entry(entry_id)
|
|
213
|
+
return state.next()
|
hishel/_async_httpx.py
ADDED
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import ssl
|
|
4
|
+
import typing as t
|
|
5
|
+
from typing import (
|
|
6
|
+
AsyncIterable,
|
|
7
|
+
AsyncIterator,
|
|
8
|
+
Iterator,
|
|
9
|
+
Union,
|
|
10
|
+
cast,
|
|
11
|
+
overload,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
from httpx import RequestNotRead
|
|
15
|
+
|
|
16
|
+
from hishel import AsyncCacheProxy, Headers, Request, Response
|
|
17
|
+
from hishel._core._storages._async_base import AsyncBaseStorage
|
|
18
|
+
from hishel._core.models import RequestMetadata, extract_metadata_from_headers
|
|
19
|
+
from hishel._policies import CachePolicy
|
|
20
|
+
from hishel._utils import (
|
|
21
|
+
filter_mapping,
|
|
22
|
+
make_async_iterator,
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
try:
|
|
26
|
+
import httpx
|
|
27
|
+
except ImportError as e:
|
|
28
|
+
raise ImportError(
|
|
29
|
+
"httpx is required to use hishel.httpx module. "
|
|
30
|
+
"Please install hishel with the 'httpx' extra, "
|
|
31
|
+
"e.g., 'pip install hishel[httpx]'."
|
|
32
|
+
) from e
|
|
33
|
+
|
|
34
|
+
SOCKET_OPTION = t.Union[
|
|
35
|
+
t.Tuple[int, int, int],
|
|
36
|
+
t.Tuple[int, int, t.Union[bytes, bytearray]],
|
|
37
|
+
t.Tuple[int, int, None, int],
|
|
38
|
+
]
|
|
39
|
+
|
|
40
|
+
# 128 KB
|
|
41
|
+
CHUNK_SIZE = 131072
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
@overload
|
|
45
|
+
def _internal_to_httpx(
|
|
46
|
+
value: Request,
|
|
47
|
+
) -> httpx.Request: ...
|
|
48
|
+
@overload
|
|
49
|
+
def _internal_to_httpx(
|
|
50
|
+
value: Response,
|
|
51
|
+
) -> httpx.Response: ...
|
|
52
|
+
def _internal_to_httpx(
|
|
53
|
+
value: Union[Request, Response],
|
|
54
|
+
) -> Union[httpx.Request, httpx.Response]:
|
|
55
|
+
"""
|
|
56
|
+
Convert internal Request/Response to httpx.Request/httpx.Response.
|
|
57
|
+
"""
|
|
58
|
+
if isinstance(value, Request):
|
|
59
|
+
return httpx.Request(
|
|
60
|
+
method=value.method,
|
|
61
|
+
url=value.url,
|
|
62
|
+
headers=value.headers,
|
|
63
|
+
stream=_IteratorStream(value._aiter_stream()),
|
|
64
|
+
extensions=value.metadata,
|
|
65
|
+
)
|
|
66
|
+
elif isinstance(value, Response):
|
|
67
|
+
return httpx.Response(
|
|
68
|
+
status_code=value.status_code,
|
|
69
|
+
headers=value.headers,
|
|
70
|
+
stream=_IteratorStream(value._aiter_stream()),
|
|
71
|
+
extensions=value.metadata,
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
@overload
|
|
76
|
+
def _httpx_to_internal(
|
|
77
|
+
value: httpx.Request,
|
|
78
|
+
) -> Request: ...
|
|
79
|
+
@overload
|
|
80
|
+
def _httpx_to_internal(
|
|
81
|
+
value: httpx.Response,
|
|
82
|
+
) -> Response: ...
|
|
83
|
+
def _httpx_to_internal(
|
|
84
|
+
value: Union[httpx.Request, httpx.Response],
|
|
85
|
+
) -> Union[Request, Response]:
|
|
86
|
+
"""
|
|
87
|
+
Convert httpx.Request/httpx.Response to internal Request/Response.
|
|
88
|
+
"""
|
|
89
|
+
headers = Headers(
|
|
90
|
+
filter_mapping(
|
|
91
|
+
Headers({key: value for key, value in value.headers.items()}),
|
|
92
|
+
["Transfer-Encoding"],
|
|
93
|
+
)
|
|
94
|
+
)
|
|
95
|
+
if isinstance(value, httpx.Request):
|
|
96
|
+
extension_metadata = RequestMetadata(
|
|
97
|
+
hishel_refresh_ttl_on_access=value.extensions.get("hishel_refresh_ttl_on_access"),
|
|
98
|
+
hishel_ttl=value.extensions.get("hishel_ttl"),
|
|
99
|
+
hishel_spec_ignore=value.extensions.get("hishel_spec_ignore"),
|
|
100
|
+
hishel_body_key=value.extensions.get("hishel_body_key"),
|
|
101
|
+
)
|
|
102
|
+
headers_metadata = extract_metadata_from_headers(value.headers)
|
|
103
|
+
|
|
104
|
+
for key, val in extension_metadata.items():
|
|
105
|
+
if key in value.extensions:
|
|
106
|
+
headers_metadata[key] = val # type: ignore
|
|
107
|
+
|
|
108
|
+
try:
|
|
109
|
+
stream = make_async_iterator([value.content])
|
|
110
|
+
except RequestNotRead:
|
|
111
|
+
stream = cast(AsyncIterator[bytes], value.stream)
|
|
112
|
+
|
|
113
|
+
return Request(
|
|
114
|
+
method=value.method,
|
|
115
|
+
url=str(value.url),
|
|
116
|
+
headers=headers,
|
|
117
|
+
stream=stream,
|
|
118
|
+
metadata=headers_metadata,
|
|
119
|
+
)
|
|
120
|
+
elif isinstance(value, httpx.Response):
|
|
121
|
+
if value.is_stream_consumed and "content-encoding" in value.headers:
|
|
122
|
+
raise RuntimeError("Can't get the raw stream of a response with `Content-Encoding` header.")
|
|
123
|
+
stream = (
|
|
124
|
+
make_async_iterator([value.content]) if value.is_stream_consumed else value.aiter_raw(chunk_size=CHUNK_SIZE)
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
return Response(
|
|
128
|
+
status_code=value.status_code,
|
|
129
|
+
headers=headers,
|
|
130
|
+
stream=stream,
|
|
131
|
+
metadata={},
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
class _IteratorStream(httpx.SyncByteStream, httpx.AsyncByteStream):
|
|
136
|
+
def __init__(self, iterator: Iterator[bytes] | AsyncIterator[bytes]) -> None:
|
|
137
|
+
self.iterator = iterator
|
|
138
|
+
|
|
139
|
+
async def __aiter__(self) -> AsyncIterator[bytes]:
|
|
140
|
+
assert isinstance(self.iterator, (AsyncIterator, AsyncIterable))
|
|
141
|
+
async for chunk in self.iterator:
|
|
142
|
+
yield chunk
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
class AsyncCacheTransport(httpx.AsyncBaseTransport):
|
|
146
|
+
def __init__(
|
|
147
|
+
self,
|
|
148
|
+
next_transport: httpx.AsyncBaseTransport,
|
|
149
|
+
storage: AsyncBaseStorage | None = None,
|
|
150
|
+
policy: CachePolicy | None = None,
|
|
151
|
+
) -> None:
|
|
152
|
+
self.next_transport = next_transport
|
|
153
|
+
self._cache_proxy: AsyncCacheProxy = AsyncCacheProxy(
|
|
154
|
+
request_sender=self.request_sender,
|
|
155
|
+
storage=storage,
|
|
156
|
+
policy=policy,
|
|
157
|
+
)
|
|
158
|
+
self.storage = self._cache_proxy.storage
|
|
159
|
+
|
|
160
|
+
async def handle_async_request(
|
|
161
|
+
self,
|
|
162
|
+
request: httpx.Request,
|
|
163
|
+
) -> httpx.Response:
|
|
164
|
+
internal_request = _httpx_to_internal(request)
|
|
165
|
+
internal_response = await self._cache_proxy.handle_request(internal_request)
|
|
166
|
+
response = _internal_to_httpx(internal_response)
|
|
167
|
+
return response
|
|
168
|
+
|
|
169
|
+
async def aclose(self) -> None:
|
|
170
|
+
await self.next_transport.aclose()
|
|
171
|
+
await self.storage.close()
|
|
172
|
+
await super().aclose()
|
|
173
|
+
|
|
174
|
+
async def request_sender(self, request: Request) -> Response:
|
|
175
|
+
httpx_request = _internal_to_httpx(request)
|
|
176
|
+
httpx_response = await self.next_transport.handle_async_request(httpx_request)
|
|
177
|
+
return _httpx_to_internal(httpx_response)
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
class AsyncCacheClient(httpx.AsyncClient):
|
|
181
|
+
def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
|
|
182
|
+
self.storage: AsyncBaseStorage | None = kwargs.pop("storage", None)
|
|
183
|
+
self.policy: CachePolicy | None = kwargs.pop("policy", None)
|
|
184
|
+
super().__init__(*args, **kwargs)
|
|
185
|
+
|
|
186
|
+
def _init_transport(
|
|
187
|
+
self,
|
|
188
|
+
verify: ssl.SSLContext | str | bool = True,
|
|
189
|
+
cert: t.Union[str, t.Tuple[str, str], t.Tuple[str, str, str], None] = None,
|
|
190
|
+
trust_env: bool = True,
|
|
191
|
+
http1: bool = True,
|
|
192
|
+
http2: bool = False,
|
|
193
|
+
limits: httpx.Limits = httpx.Limits(max_connections=100, max_keepalive_connections=20),
|
|
194
|
+
transport: httpx.AsyncBaseTransport | None = None,
|
|
195
|
+
**kwargs: t.Any,
|
|
196
|
+
) -> httpx.AsyncBaseTransport:
|
|
197
|
+
if transport is not None:
|
|
198
|
+
return transport
|
|
199
|
+
|
|
200
|
+
return AsyncCacheTransport(
|
|
201
|
+
next_transport=httpx.AsyncHTTPTransport(
|
|
202
|
+
verify=verify,
|
|
203
|
+
cert=cert,
|
|
204
|
+
trust_env=trust_env,
|
|
205
|
+
http1=http1,
|
|
206
|
+
http2=http2,
|
|
207
|
+
limits=limits,
|
|
208
|
+
),
|
|
209
|
+
storage=self.storage,
|
|
210
|
+
policy=self.policy,
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
def _init_proxy_transport(
|
|
214
|
+
self,
|
|
215
|
+
proxy: httpx.Proxy,
|
|
216
|
+
verify: ssl.SSLContext | str | bool = True,
|
|
217
|
+
cert: t.Union[str, t.Tuple[str, str], t.Tuple[str, str, str], None] = None,
|
|
218
|
+
trust_env: bool = True,
|
|
219
|
+
http1: bool = True,
|
|
220
|
+
http2: bool = False,
|
|
221
|
+
limits: httpx.Limits = httpx.Limits(max_connections=100, max_keepalive_connections=20),
|
|
222
|
+
**kwargs: t.Any,
|
|
223
|
+
) -> httpx.AsyncBaseTransport:
|
|
224
|
+
return AsyncCacheTransport(
|
|
225
|
+
next_transport=httpx.AsyncHTTPTransport(
|
|
226
|
+
verify=verify,
|
|
227
|
+
cert=cert,
|
|
228
|
+
trust_env=trust_env,
|
|
229
|
+
http1=http1,
|
|
230
|
+
http2=http2,
|
|
231
|
+
limits=limits,
|
|
232
|
+
proxy=proxy,
|
|
233
|
+
),
|
|
234
|
+
storage=self.storage,
|
|
235
|
+
policy=self.policy,
|
|
236
|
+
)
|