hishel 0.1.5__py3-none-any.whl → 1.0.0.dev1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. hishel/__init__.py +55 -53
  2. hishel/{beta/_async_cache.py → _async_cache.py} +3 -3
  3. hishel/{beta → _core}/__init__.py +6 -6
  4. hishel/_core/_async/_storages/_sqlite.py +457 -0
  5. hishel/{beta/_core → _core}/_base/_storages/_base.py +1 -1
  6. hishel/{beta/_core → _core}/_base/_storages/_packing.py +5 -5
  7. hishel/{beta/_core → _core}/_spec.py +89 -2
  8. hishel/_core/_sync/_storages/_sqlite.py +457 -0
  9. hishel/{beta/_core → _core}/models.py +1 -1
  10. hishel/{beta/_sync_cache.py → _sync_cache.py} +3 -3
  11. hishel/_utils.py +1 -241
  12. hishel/{beta/httpx.py → httpx.py} +15 -8
  13. hishel/{beta/requests.py → requests.py} +5 -5
  14. hishel-1.0.0.dev1.dist-info/METADATA +298 -0
  15. hishel-1.0.0.dev1.dist-info/RECORD +19 -0
  16. hishel/_async/__init__.py +0 -5
  17. hishel/_async/_client.py +0 -30
  18. hishel/_async/_mock.py +0 -43
  19. hishel/_async/_pool.py +0 -201
  20. hishel/_async/_storages.py +0 -768
  21. hishel/_async/_transports.py +0 -282
  22. hishel/_controller.py +0 -581
  23. hishel/_exceptions.py +0 -10
  24. hishel/_files.py +0 -54
  25. hishel/_headers.py +0 -215
  26. hishel/_lfu_cache.py +0 -71
  27. hishel/_lmdb_types_.pyi +0 -53
  28. hishel/_s3.py +0 -122
  29. hishel/_serializers.py +0 -329
  30. hishel/_sync/__init__.py +0 -5
  31. hishel/_sync/_client.py +0 -30
  32. hishel/_sync/_mock.py +0 -43
  33. hishel/_sync/_pool.py +0 -201
  34. hishel/_sync/_storages.py +0 -768
  35. hishel/_sync/_transports.py +0 -282
  36. hishel/_synchronization.py +0 -37
  37. hishel/beta/_core/__init__.py +0 -0
  38. hishel/beta/_core/_async/_storages/_sqlite.py +0 -411
  39. hishel/beta/_core/_sync/_storages/_sqlite.py +0 -411
  40. hishel-0.1.5.dist-info/METADATA +0 -258
  41. hishel-0.1.5.dist-info/RECORD +0 -41
  42. /hishel/{beta/_core → _core}/_headers.py +0 -0
  43. {hishel-0.1.5.dist-info → hishel-1.0.0.dev1.dist-info}/WHEEL +0 -0
  44. {hishel-0.1.5.dist-info → hishel-1.0.0.dev1.dist-info}/licenses/LICENSE +0 -0
@@ -1,411 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import time
4
- import uuid
5
- from dataclasses import replace
6
- from typing import (
7
- Iterable,
8
- Iterator,
9
- Callable,
10
- List,
11
- Literal,
12
- Optional,
13
- Union,
14
- )
15
-
16
- import sqlite3
17
-
18
- from hishel.beta._core._base._storages._base import SyncBaseStorage, ensure_cache_dict
19
- from hishel.beta._core._base._storages._packing import pack, unpack
20
- from hishel.beta._core.models import (
21
- CompletePair,
22
- IncompletePair,
23
- Pair,
24
- PairMeta,
25
- Request,
26
- Response,
27
- )
28
-
29
-
30
- class SyncSqliteStorage(SyncBaseStorage):
31
- _STREAM_KIND = {"request": 0, "response": 1}
32
- _COMPLETE_CHUNK_NUMBER = -1
33
-
34
- def __init__(
35
- self,
36
- *,
37
- connection: Optional[sqlite3.Connection] = None,
38
- database_path: str = "hishel_cache.db",
39
- default_ttl: Optional[float] = None,
40
- refresh_ttl_on_access: bool = True,
41
- ) -> None:
42
- base_path = ensure_cache_dict()
43
-
44
- self.connection = connection
45
- self.database_path = base_path / database_path
46
- self.default_ttl = default_ttl
47
- self.refresh_ttl_on_access = refresh_ttl_on_access
48
- self.last_cleanup = float("-inf")
49
- self._initialized = False
50
-
51
- def _ensure_connection(self) -> sqlite3.Connection:
52
- """Ensure connection is established and database is initialized."""
53
- if self.connection is None:
54
- self.connection = sqlite3.connect(str(self.database_path))
55
- if not self._initialized:
56
- self._initialize_database()
57
- self._initialized = True
58
- return self.connection
59
-
60
- def _initialize_database(self) -> None:
61
- """Initialize the database schema."""
62
- assert self.connection is not None
63
- cursor = self.connection.cursor()
64
-
65
- # Table for storing request/response pairs
66
- cursor.execute("""
67
- CREATE TABLE IF NOT EXISTS entries (
68
- id BLOB PRIMARY KEY,
69
- cache_key BLOB,
70
- data BLOB NOT NULL,
71
- created_at REAL NOT NULL,
72
- deleted_at REAL
73
- )
74
- """)
75
-
76
- # Table for storing stream chunks
77
- cursor.execute("""
78
- CREATE TABLE IF NOT EXISTS streams (
79
- entry_id BLOB NOT NULL,
80
- kind INTEGER NOT NULL,
81
- chunk_number INTEGER NOT NULL,
82
- chunk_data BLOB NOT NULL,
83
- PRIMARY KEY (entry_id, kind, chunk_number),
84
- FOREIGN KEY (entry_id) REFERENCES entries(id) ON DELETE CASCADE
85
- )
86
- """)
87
-
88
- # Indexes for performance
89
- cursor.execute("CREATE INDEX IF NOT EXISTS idx_entries_deleted_at ON entries(deleted_at)")
90
- cursor.execute("CREATE INDEX IF NOT EXISTS idx_entries_cache_key ON entries(cache_key)")
91
- # Note: PRIMARY KEY (entry_id, kind, chunk_number) already provides an index
92
- # for queries like: entry_id = ? AND kind = ? AND chunk_number = ?
93
-
94
- self.connection.commit()
95
-
96
- def create_pair(
97
- self,
98
- request: Request,
99
- id: uuid.UUID | None = None,
100
- ) -> IncompletePair:
101
- pair_id = id if id is not None else uuid.uuid4()
102
- pair_meta = PairMeta(
103
- created_at=time.time(),
104
- )
105
-
106
- pair = IncompletePair(id=pair_id, request=request, meta=pair_meta)
107
-
108
- packed_pair = pack(pair, kind="pair")
109
-
110
- connection = self._ensure_connection()
111
- cursor = connection.cursor()
112
- cursor.execute(
113
- "INSERT INTO entries (id, cache_key, data, created_at, deleted_at) VALUES (?, ?, ?, ?, ?)",
114
- (pair_id.bytes, None, packed_pair, pair_meta.created_at, None),
115
- )
116
- connection.commit()
117
-
118
- assert isinstance(request.stream, Iterable), "Request stream must be an Iterable, not Iterable"
119
-
120
- request = Request(
121
- method=request.method,
122
- url=request.url,
123
- headers=request.headers,
124
- metadata=request.metadata,
125
- stream=self._save_stream(request.stream, pair_id.bytes, "request"),
126
- )
127
-
128
- return replace(pair, request=request)
129
-
130
- def add_response(
131
- self,
132
- pair_id: uuid.UUID,
133
- response: Response,
134
- key: str | bytes,
135
- ) -> CompletePair:
136
- if isinstance(key, str):
137
- key = key.encode("utf-8")
138
-
139
- connection = self._ensure_connection()
140
- cursor = connection.cursor()
141
-
142
- # Get the existing pair
143
- cursor.execute("SELECT data FROM entries WHERE id = ?", (pair_id.bytes,))
144
- result = cursor.fetchone()
145
-
146
- if result is None:
147
- raise ValueError(f"Entry with ID {pair_id} not found.")
148
-
149
- pair = unpack(result[0], kind="pair")
150
-
151
- assert isinstance(response.stream, (Iterator, Iterable))
152
- response = replace(response, stream=self._save_stream(response.stream, pair_id.bytes, "response"))
153
-
154
- self._delete_stream(pair.id.bytes, cursor, type="response")
155
- complete_pair = CompletePair(id=pair.id, request=pair.request, response=response, meta=pair.meta, cache_key=key)
156
-
157
- # Update the entry with the complete pair and set cache_key
158
- cursor.execute(
159
- "UPDATE entries SET data = ?, cache_key = ? WHERE id = ?",
160
- (pack(complete_pair, kind="pair"), key, pair_id.bytes),
161
- )
162
- connection.commit()
163
-
164
- return complete_pair
165
-
166
- def get_pairs(self, key: str) -> List[CompletePair]:
167
- final_pairs: List[CompletePair] = []
168
-
169
- connection = self._ensure_connection()
170
- cursor = connection.cursor()
171
- # Query entries directly by cache_key
172
- cursor.execute("SELECT id, data FROM entries WHERE cache_key = ?", (key.encode("utf-8"),))
173
-
174
- for row in cursor.fetchall():
175
- pair_data = unpack(row[1], kind="pair")
176
-
177
- if isinstance(pair_data, IncompletePair):
178
- continue
179
-
180
- final_pairs.append(pair_data)
181
-
182
- pairs_with_streams: List[CompletePair] = []
183
-
184
- for pair in final_pairs:
185
- pairs_with_streams.append(
186
- replace(
187
- pair,
188
- response=replace(
189
- pair.response,
190
- stream=self._stream_data_from_cache(pair.id.bytes, "response"),
191
- ),
192
- request=replace(
193
- pair.request,
194
- stream=self._stream_data_from_cache(pair.id.bytes, "request"),
195
- ),
196
- )
197
- )
198
- return pairs_with_streams
199
-
200
- def update_pair(
201
- self,
202
- id: uuid.UUID,
203
- new_pair: Union[CompletePair, Callable[[CompletePair], CompletePair]],
204
- ) -> Optional[CompletePair]:
205
- connection = self._ensure_connection()
206
- cursor = connection.cursor()
207
- cursor.execute("SELECT data FROM entries WHERE id = ?", (id.bytes,))
208
- result = cursor.fetchone()
209
-
210
- if result is None:
211
- return None
212
-
213
- pair = unpack(result[0], kind="pair")
214
-
215
- if isinstance(pair, IncompletePair):
216
- return None
217
-
218
- if isinstance(new_pair, CompletePair):
219
- complete_pair = new_pair
220
- else:
221
- complete_pair = new_pair(pair)
222
-
223
- if pair.id != complete_pair.id:
224
- raise ValueError("Pair ID mismatch")
225
-
226
- cursor.execute("UPDATE entries SET data = ? WHERE id = ?", (pack(complete_pair, kind="pair"), id.bytes))
227
-
228
- if pair.cache_key != complete_pair.cache_key:
229
- cursor.execute(
230
- "UPDATE entries SET cache_key = ? WHERE id = ?",
231
- (complete_pair.cache_key, complete_pair.id.bytes),
232
- )
233
-
234
- connection.commit()
235
-
236
- return complete_pair
237
-
238
- def remove(self, id: uuid.UUID) -> None:
239
- connection = self._ensure_connection()
240
- cursor = connection.cursor()
241
- cursor.execute("SELECT data FROM entries WHERE id = ?", (id.bytes,))
242
- result = cursor.fetchone()
243
-
244
- if result is None:
245
- return None
246
-
247
- pair = unpack(result[0], kind="pair")
248
- self._soft_delete_pair(pair, cursor)
249
- connection.commit()
250
-
251
- def _is_stream_complete(
252
- self, kind: Literal["request", "response"], pair_id: uuid.UUID, cursor: sqlite3.Cursor
253
- ) -> bool:
254
- kind_id = self._STREAM_KIND[kind]
255
- # Check if there's a completion marker (chunk_number = -1)
256
- cursor.execute(
257
- "SELECT 1 FROM streams WHERE entry_id = ? AND kind = ? AND chunk_number = ? LIMIT 1",
258
- (pair_id.bytes, kind_id, self._COMPLETE_CHUNK_NUMBER),
259
- )
260
- return cursor.fetchone() is not None
261
-
262
- def _soft_delete_pair(self, pair: Union[CompletePair, IncompletePair], cursor: sqlite3.Cursor) -> None:
263
- """
264
- Mark the pair as deleted by setting the deleted_at timestamp.
265
- """
266
- marked_pair = self.mark_pair_as_deleted(pair)
267
- cursor.execute(
268
- "UPDATE entries SET data = ?, deleted_at = ? WHERE id = ?",
269
- (pack(marked_pair, kind="pair"), marked_pair.meta.deleted_at, pair.id.bytes),
270
- )
271
-
272
- def _is_pair_expired(self, pair: Pair, cursor: sqlite3.Cursor) -> bool:
273
- """
274
- Check if the pair is expired.
275
- """
276
- ttl = pair.request.metadata["hishel_ttl"] if "hishel_ttl" in pair.request.metadata else self.default_ttl
277
- created_at = pair.meta.created_at
278
- if ttl is None:
279
- return False
280
- return created_at + ttl < time.time()
281
-
282
- def _batch_cleanup(
283
- self,
284
- ) -> None:
285
- """
286
- Cleanup expired pairs in the database.
287
- """
288
- should_mark_as_deleted: List[Union[CompletePair, IncompletePair]] = []
289
- should_hard_delete: List[Union[CompletePair, IncompletePair]] = []
290
-
291
- connection = self._ensure_connection()
292
- cursor = connection.cursor()
293
- cursor.execute("SELECT id, data FROM entries")
294
-
295
- for row in cursor.fetchall():
296
- pair = unpack(row[1], kind="pair")
297
- if pair is None:
298
- continue
299
- if self._is_pair_expired(pair, cursor) and not self.is_soft_deleted(pair):
300
- should_mark_as_deleted.append(pair)
301
-
302
- if (self.is_soft_deleted(pair) and self.is_safe_to_hard_delete(pair)) or self._is_corrupted(
303
- pair, cursor
304
- ):
305
- should_hard_delete.append(pair)
306
-
307
- for pair in should_mark_as_deleted:
308
- self._soft_delete_pair(pair, cursor)
309
-
310
- for pair in should_hard_delete:
311
- self._hard_delete_pair(pair, cursor)
312
-
313
- connection.commit()
314
-
315
- def _is_corrupted(self, pair: IncompletePair | CompletePair, cursor: sqlite3.Cursor) -> bool:
316
- # if pair was created more than 1 hour ago and still not completed
317
- if pair.meta.created_at + 3600 < time.time() and isinstance(pair, IncompletePair):
318
- return True
319
-
320
- if isinstance(pair, CompletePair) and not self._is_stream_complete("request", pair.id, cursor):
321
- return True
322
- return False
323
-
324
- def _hard_delete_pair(self, pair: CompletePair | IncompletePair, cursor: sqlite3.Cursor) -> None:
325
- """
326
- Permanently delete the pair from the database.
327
- """
328
- cursor.execute("DELETE FROM entries WHERE id = ?", (pair.id.bytes,))
329
-
330
- # Delete all streams (both request and response) for this entry
331
- self._delete_stream(pair.id.bytes, cursor)
332
-
333
- def _delete_stream(
334
- self,
335
- entry_id: bytes,
336
- cursor: sqlite3.Cursor,
337
- type: Literal["request", "response", "all"] = "all",
338
- ) -> None:
339
- """
340
- Delete all streams (both request and response) associated with the given entry ID.
341
- """
342
- if type == "request":
343
- cursor.execute(
344
- "DELETE FROM streams WHERE entry_id = ? AND kind = ?", (entry_id, self._STREAM_KIND["request"])
345
- )
346
- elif type == "response":
347
- cursor.execute(
348
- "DELETE FROM streams WHERE entry_id = ? AND kind = ?", (entry_id, self._STREAM_KIND["response"])
349
- )
350
- elif type == "all":
351
- cursor.execute("DELETE FROM streams WHERE entry_id = ?", (entry_id,))
352
-
353
- def _save_stream(
354
- self,
355
- stream: Iterator[bytes],
356
- entry_id: bytes,
357
- kind: Literal["response", "request"],
358
- ) -> Iterator[bytes]:
359
- """
360
- Wrapper around an async iterator that also saves the data to the cache in chunks.
361
- """
362
- kind_id = self._STREAM_KIND[kind]
363
- chunk_number = 0
364
- for chunk in stream:
365
- connection = self._ensure_connection()
366
- cursor = connection.cursor()
367
- cursor.execute(
368
- "INSERT INTO streams (entry_id, kind, chunk_number, chunk_data) VALUES (?, ?, ?, ?)",
369
- (entry_id, kind_id, chunk_number, chunk),
370
- )
371
- connection.commit()
372
- chunk_number += 1
373
- yield chunk
374
-
375
- # Mark end of stream with chunk_number = -1
376
- connection = self._ensure_connection()
377
- cursor = connection.cursor()
378
- cursor.execute(
379
- "INSERT INTO streams (entry_id, kind, chunk_number, chunk_data) VALUES (?, ?, ?, ?)",
380
- (entry_id, kind_id, self._COMPLETE_CHUNK_NUMBER, b""),
381
- )
382
- connection.commit()
383
-
384
- def _stream_data_from_cache(
385
- self,
386
- entry_id: bytes,
387
- kind: Literal["response", "request"],
388
- ) -> Iterator[bytes]:
389
- """
390
- Get an async iterator that yields the stream data from the cache.
391
- """
392
- kind_id = self._STREAM_KIND[kind]
393
- chunk_number = 0
394
-
395
- connection = self._ensure_connection()
396
- while True:
397
- cursor = connection.cursor()
398
- cursor.execute(
399
- "SELECT chunk_data FROM streams WHERE entry_id = ? AND kind = ? AND chunk_number = ?",
400
- (entry_id, kind_id, chunk_number),
401
- )
402
- result = cursor.fetchone()
403
-
404
- if result is None:
405
- break
406
- chunk = result[0]
407
- # chunk_number = -1 is the completion marker with empty data
408
- if chunk == b"":
409
- break
410
- yield chunk
411
- chunk_number += 1
@@ -1,258 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: hishel
3
- Version: 0.1.5
4
- Summary: Persistent cache implementation for httpx and httpcore
5
- Project-URL: Homepage, https://hishel.com
6
- Project-URL: Source, https://github.com/karpetrosyan/hishel
7
- Author-email: Kar Petrosyan <kar.petrosyanpy@gmail.com>
8
- License-Expression: BSD-3-Clause
9
- License-File: LICENSE
10
- Classifier: Development Status :: 3 - Alpha
11
- Classifier: Environment :: Web Environment
12
- Classifier: Framework :: AsyncIO
13
- Classifier: Framework :: Trio
14
- Classifier: Intended Audience :: Developers
15
- Classifier: License :: OSI Approved :: BSD License
16
- Classifier: Operating System :: OS Independent
17
- Classifier: Programming Language :: Python :: 3
18
- Classifier: Programming Language :: Python :: 3 :: Only
19
- Classifier: Programming Language :: Python :: 3.9
20
- Classifier: Programming Language :: Python :: 3.10
21
- Classifier: Programming Language :: Python :: 3.11
22
- Classifier: Programming Language :: Python :: 3.12
23
- Classifier: Programming Language :: Python :: 3.13
24
- Classifier: Programming Language :: Python :: 3.14
25
- Classifier: Topic :: Internet :: WWW/HTTP
26
- Requires-Python: >=3.9
27
- Requires-Dist: anyio>=4.9.0
28
- Requires-Dist: anysqlite>=0.0.5
29
- Requires-Dist: httpx>=0.28.0
30
- Requires-Dist: msgpack>=1.1.2
31
- Requires-Dist: typing-extensions>=4.14.1
32
- Provides-Extra: httpx
33
- Requires-Dist: httpx>=0.28.1; extra == 'httpx'
34
- Provides-Extra: redis
35
- Requires-Dist: redis==6.2.0; extra == 'redis'
36
- Provides-Extra: requests
37
- Requires-Dist: requests>=2.32.5; extra == 'requests'
38
- Provides-Extra: s3
39
- Requires-Dist: boto3<=1.15.3,>=1.15.0; (python_version < '3.12') and extra == 's3'
40
- Requires-Dist: boto3>=1.15.3; (python_version >= '3.12') and extra == 's3'
41
- Provides-Extra: sqlite
42
- Requires-Dist: anysqlite>=0.0.5; extra == 'sqlite'
43
- Provides-Extra: yaml
44
- Requires-Dist: pyyaml==6.0.2; extra == 'yaml'
45
- Description-Content-Type: text/markdown
46
-
47
- <p align="center" class="logo">
48
- <div align="center">
49
- <picture>
50
- <source media="(prefers-color-scheme: dark)" srcset="https://raw.githubusercontent.com/karpetrosyan/hishel/master/docs/static/Shelkopryad_350x250_yellow.png">
51
- <source media="(prefers-color-scheme: light)" srcset="https://raw.githubusercontent.com/karpetrosyan/hishel/master/docs/static/Shelkopryad_350x250_black.png">
52
- <img alt="Logo" src="https://raw.githubusercontent.com/karpetrosyan/hishel/master/docs/static/Shelkopryad_350x250_yellow.png">
53
- </picture>
54
- </div>
55
- </p>
56
-
57
-
58
-
59
- <p align="center"><strong>Hishel</strong> <em>- An elegant HTTP Cache implementation for httpx and httpcore.</em></p>
60
-
61
- <p align="center">
62
-
63
- <a href="https://pypi.org/project/hishel">
64
- <img src="https://img.shields.io/pypi/v/hishel.svg" alt="pypi">
65
- </a>
66
-
67
- <a href="https://img.shields.io/pypi/l/hishel">
68
- <img src="https://img.shields.io/pypi/l/hishel" alt="license">
69
- </a>
70
-
71
- <a href="https://coveralls.io/github/karpetrosyan/hishel">
72
- <img src="https://img.shields.io/coverallsCoverage/github/karpetrosyan/hishel" alt="license">
73
- </a>
74
-
75
- <a href="https://github.com/karpetrosyan/hishel">
76
- <img src="https://img.shields.io/pypi/dm/hishel.svg" alt="Downloads">
77
- </a>
78
- </p>
79
-
80
- <p align="center">
81
- <a href="https://buymeacoffee.com/karpetrosyan" target="_blank"><img src="https://www.buymeacoffee.com/assets/img/custom_images/orange_img.png" alt="Buy Me A Coffee" style="height: 41px !important;width: 174px !important;box-shadow: 0px 3px 2px 0px rgba(190, 190, 190, 0.5) !important;-webkit-box-shadow: 0px 3px 2px 0px rgba(190, 190, 190, 0.5) !important;" ></a>
82
- </p>
83
-
84
- -----
85
-
86
- **Hishel (հիշել, remember)** is a library that implements HTTP Caching for [HTTPX](https://github.com/encode/httpx) and [HTTP Core](https://github.com/encode/httpcore) libraries in accordance with [**RFC 9111**](https://www.rfc-editor.org/rfc/rfc9111.html), the most recent caching specification.
87
-
88
- ## Features
89
-
90
- - 💾 **Persistence**: Responses are cached in the [**persistent memory**](https://en.m.wikipedia.org/wiki/Persistent_memory) for later use.
91
- - 🤲 **Compatibility**: It is completely compatible with your existing transports or connection pools, *whether they are default, custom, or provided by third-party libraries.*
92
- - 🤗 **Easy to use**: You continue to use httpx while also enabling [web cache](https://en.wikipedia.org/wiki/Web_cache).
93
- - 🧠 **Smart**: Attempts to clearly implement RFC 9111, understands `Vary`, `Etag`, `Last-Modified`, `Cache-Control`, and `Expires` headers, and *handles response re-validation automatically*.
94
- - ⚙️ **Configurable**: You have complete control over how the responses are stored and serialized.
95
- - 📦 **From the package**:
96
- - Built-in support for [File system](https://en.wikipedia.org/wiki/File_system), [Redis](https://en.wikipedia.org/wiki/Redis), [SQLite](https://en.wikipedia.org/wiki/SQLite), and [AWS S3](https://aws.amazon.com/s3/) backends.
97
- - Built-in support for [JSON](https://en.wikipedia.org/wiki/JSON), [YAML](https://en.wikipedia.org/wiki/YAML), and [pickle](https://docs.python.org/3/library/pickle.html) serializers.
98
- - 🚀 **Very fast**: Your requests will be even faster if there are *no IO operations*.
99
-
100
- ## Documentation
101
- Go through the [Hishel documentation](https://hishel.com).
102
-
103
- ## QuickStart
104
-
105
- Install `Hishel` using pip:
106
- ``` shell
107
- $ pip install hishel
108
- ```
109
-
110
- Let's begin with an example of a httpx client.
111
-
112
- ```python
113
- import hishel
114
-
115
- with hishel.CacheClient() as client:
116
- client.get("https://hishel.com") # 0.4749558370003797s
117
- client.get("https://hishel.com") # 0.002873589000046195s (~250x faster!)
118
- ```
119
-
120
- or in asynchronous context
121
-
122
- ```python
123
- import hishel
124
-
125
- async with hishel.AsyncCacheClient() as client:
126
- await client.get("https://hishel.com")
127
- await client.get("https://hishel.com") # takes from the cache
128
- ```
129
-
130
- ## Configurations
131
-
132
- Configure when and how you want to store your responses.
133
-
134
- ```python
135
- import hishel
136
-
137
- # All the specification configs
138
- controller = hishel.Controller(
139
- # Cache only GET and POST methods
140
- cacheable_methods=["GET", "POST"],
141
-
142
- # Cache only 200 status codes
143
- cacheable_status_codes=[200],
144
-
145
- # Use the stale response if there is a connection issue and the new response cannot be obtained.
146
- allow_stale=True,
147
-
148
- # First, revalidate the response and then utilize it.
149
- # If the response has not changed, do not download the
150
- # entire response data from the server; instead,
151
- # use the one you have because you know it has not been modified.
152
- always_revalidate=True,
153
- )
154
-
155
- # All the storage configs
156
- storage = hishel.S3Storage(
157
- bucket_name="my_bucket_name", # store my cache files in the `my_bucket_name` bucket
158
- ttl=3600, # delete the response if it is in the cache for more than an hour
159
- )
160
- client = hishel.CacheClient(controller=controller, storage=storage)
161
-
162
-
163
- # Ignore the fact that the server does not recommend you cache this request!
164
- client.post(
165
- "https://example.com",
166
- extensions={"force_cache": True}
167
- )
168
-
169
-
170
- # Return a regular response if it is in the cache; else, return a 504 status code. DO NOT SEND A REQUEST!
171
- client.post(
172
- "https://example.com",
173
- headers=[("Cache-Control", "only-if-cached")]
174
- )
175
-
176
-
177
- # Ignore cached responses and do not store incoming responses!
178
- response = client.post(
179
- "https://example.com",
180
- extensions={"cache_disabled": True}
181
- )
182
- ```
183
-
184
- ## How and where are the responses saved?
185
-
186
- The responses are stored by `Hishel` in [storages](https://hishel.com/userguide/#storages).
187
- You have complete control over them; you can change storage or even write your own if necessary.
188
-
189
-
190
- ## Support the project
191
-
192
- You can support the project by simply leaving a GitHub star ⭐ or by [contributing](https://hishel.com/contributing/).
193
- Help us grow and continue developing good software for you ❤️
194
-
195
- ## [0.1.5] - 2025-10-18
196
-
197
- ### 🚀 Features
198
-
199
- - *(perf)* Set chunk size to 128KB for httpx to reduce SQLite read/writes
200
- - Better cache-control parsing
201
- - Add close method to storages API (#384)
202
- - *(perf)* Increase requests buffer size to 128KB, disable charset detection
203
-
204
- ### 🐛 Bug Fixes
205
-
206
- - *(docs)* Fix some line breaks
207
-
208
- ### ⚙️ Miscellaneous Tasks
209
-
210
- - Remove some redundant files from repo
211
- ## [0.1.4] - 2025-10-14
212
-
213
- ### 🚀 Features
214
-
215
- - Add support for a sans-IO API (#366)
216
- - Allow already consumed streams with `CacheTransport` (#377)
217
- - Add sqlite storage for beta storages
218
- - Get rid of some locks from sqlite storage
219
- - Better async implemetation for sqlite storage
220
-
221
- ### 🐛 Bug Fixes
222
-
223
- - Create an sqlite file in a cache folder
224
- - Fix beta imports
225
-
226
- ### ⚙️ Miscellaneous Tasks
227
-
228
- - Improve CI (#369)
229
- - *(internal)* Remove src folder (#373)
230
- - *(internal)* Temporary remove python3.14 from CI
231
- - *(tests)* Add sqlite tests for new storage
232
- - *(tests)* Move some tests to beta
233
- ## [0.1.3] - 2025-07-06
234
-
235
- ### 🚀 Features
236
-
237
- - Support providing a path prefix to S3 storage (#342)
238
-
239
- ### 📚 Documentation
240
-
241
- - Update link to httpx transports page (#337)
242
- ## [0.1.2] - 2025-04-04
243
-
244
- ### 🐛 Bug Fixes
245
-
246
- - Requirements.txt to reduce vulnerabilities (#263)
247
- ## [0.0.30] - 2024-07-12
248
-
249
- ### 🐛 Bug Fixes
250
-
251
- - Requirements.txt to reduce vulnerabilities (#245)
252
- - Requirements.txt to reduce vulnerabilities (#255)
253
- ## [0.0.27] - 2024-05-31
254
-
255
- ### 🐛 Bug Fixes
256
-
257
- - *(redis)* Do not update metadata with negative ttl (#231)
258
- ## [0.0.1] - 2023-07-22