hishel 0.0.21__tar.gz → 0.0.22__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. {hishel-0.0.21 → hishel-0.0.22}/CHANGELOG.md +6 -0
  2. {hishel-0.0.21 → hishel-0.0.22}/PKG-INFO +11 -2
  3. {hishel-0.0.21 → hishel-0.0.22}/README.md +1 -1
  4. {hishel-0.0.21 → hishel-0.0.22}/hishel/__init__.py +1 -1
  5. {hishel-0.0.21 → hishel-0.0.22}/hishel/_async/_storages.py +110 -4
  6. hishel-0.0.22/hishel/_s3.py +54 -0
  7. {hishel-0.0.21 → hishel-0.0.22}/hishel/_sync/_storages.py +110 -4
  8. {hishel-0.0.21 → hishel-0.0.22}/pyproject.toml +8 -2
  9. {hishel-0.0.21 → hishel-0.0.22}/.gitignore +0 -0
  10. {hishel-0.0.21 → hishel-0.0.22}/LICENSE +0 -0
  11. {hishel-0.0.21 → hishel-0.0.22}/hishel/_async/__init__.py +0 -0
  12. {hishel-0.0.21 → hishel-0.0.22}/hishel/_async/_client.py +0 -0
  13. {hishel-0.0.21 → hishel-0.0.22}/hishel/_async/_mock.py +0 -0
  14. {hishel-0.0.21 → hishel-0.0.22}/hishel/_async/_pool.py +0 -0
  15. {hishel-0.0.21 → hishel-0.0.22}/hishel/_async/_transports.py +0 -0
  16. {hishel-0.0.21 → hishel-0.0.22}/hishel/_controller.py +0 -0
  17. {hishel-0.0.21 → hishel-0.0.22}/hishel/_exceptions.py +0 -0
  18. {hishel-0.0.21 → hishel-0.0.22}/hishel/_files.py +0 -0
  19. {hishel-0.0.21 → hishel-0.0.22}/hishel/_headers.py +0 -0
  20. {hishel-0.0.21 → hishel-0.0.22}/hishel/_lfu_cache.py +0 -0
  21. {hishel-0.0.21 → hishel-0.0.22}/hishel/_serializers.py +0 -0
  22. {hishel-0.0.21 → hishel-0.0.22}/hishel/_sync/__init__.py +0 -0
  23. {hishel-0.0.21 → hishel-0.0.22}/hishel/_sync/_client.py +0 -0
  24. {hishel-0.0.21 → hishel-0.0.22}/hishel/_sync/_mock.py +0 -0
  25. {hishel-0.0.21 → hishel-0.0.22}/hishel/_sync/_pool.py +0 -0
  26. {hishel-0.0.21 → hishel-0.0.22}/hishel/_sync/_transports.py +0 -0
  27. {hishel-0.0.21 → hishel-0.0.22}/hishel/_synchronization.py +0 -0
  28. {hishel-0.0.21 → hishel-0.0.22}/hishel/_utils.py +0 -0
  29. {hishel-0.0.21 → hishel-0.0.22}/hishel/py.typed +0 -0
@@ -1,5 +1,11 @@
1
1
  # Changelog
2
2
 
3
+ ## 0.0.22 (31th January, 2024)
4
+
5
+ - Make `FileStorage` to check staleness of all cache files with set interval. (#169)
6
+ - Support AWS S3 storages. (#164)
7
+ - Move `typing_extensions` from requirements.txt to pyproject.toml. (#161)
8
+
3
9
  ## 0.0.21 (29th December, 2023)
4
10
 
5
11
  - Fix inner transport and connection pool instances closing. (#147)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: hishel
3
- Version: 0.0.21
3
+ Version: 0.0.22
4
4
  Summary: Persistent cache implementation for httpx and httpcore
5
5
  Project-URL: Homepage, https://hishel.com
6
6
  Project-URL: Source, https://github.com/karpetrosyan/hishel
@@ -24,8 +24,11 @@ Classifier: Programming Language :: Python :: 3.12
24
24
  Classifier: Topic :: Internet :: WWW/HTTP
25
25
  Requires-Python: >=3.8
26
26
  Requires-Dist: httpx>=0.22.0
27
+ Requires-Dist: typing-extensions>=4.8.0
27
28
  Provides-Extra: redis
28
29
  Requires-Dist: redis==5.0.1; extra == 'redis'
30
+ Provides-Extra: s3
31
+ Requires-Dist: boto3<=1.15.3,>=1.15.0; extra == 's3'
29
32
  Provides-Extra: sqlite
30
33
  Requires-Dist: anysqlite>=0.0.5; extra == 'sqlite'
31
34
  Provides-Extra: yaml
@@ -70,7 +73,7 @@ Description-Content-Type: text/markdown
70
73
  - 🧠 **Smart**: Attempts to clearly implement RFC 9111, understands `Vary`, `Etag`, `Last-Modified`, `Cache-Control`, and `Expires` headers, and *handles response re-validation automatically*.
71
74
  - ⚙️ **Configurable**: You have complete control over how the responses are stored and serialized.
72
75
  - 📦 **From the package**:
73
- - Built-in support for [File system](https://en.wikipedia.org/wiki/File_system), [Redis](https://en.wikipedia.org/wiki/Redis), and [SQLite](https://en.wikipedia.org/wiki/SQLite) backends.
76
+ - Built-in support for [File system](https://en.wikipedia.org/wiki/File_system), [Redis](https://en.wikipedia.org/wiki/Redis), [SQLite](https://en.wikipedia.org/wiki/SQLite), and [AWS S3](https://aws.amazon.com/s3/) backends.
74
77
  - Built-in support for [JSON](https://en.wikipedia.org/wiki/JSON), [YAML](https://en.wikipedia.org/wiki/YAML), and [pickle](https://docs.python.org/3/library/pickle.html) serializers.
75
78
  - 🚀 **Very fast**: Your requests will be even faster if there are *no IO operations*.
76
79
 
@@ -153,6 +156,12 @@ Help us grow and continue developing good software for you ❤️
153
156
 
154
157
  # Changelog
155
158
 
159
+ ## 0.0.22 (31th January, 2024)
160
+
161
+ - Make `FileStorage` to check staleness of all cache files with set interval. (#169)
162
+ - Support AWS S3 storages. (#164)
163
+ - Move `typing_extensions` from requirements.txt to pyproject.toml. (#161)
164
+
156
165
  ## 0.0.21 (29th December, 2023)
157
166
 
158
167
  - Fix inner transport and connection pool instances closing. (#147)
@@ -36,7 +36,7 @@
36
36
  - 🧠 **Smart**: Attempts to clearly implement RFC 9111, understands `Vary`, `Etag`, `Last-Modified`, `Cache-Control`, and `Expires` headers, and *handles response re-validation automatically*.
37
37
  - ⚙️ **Configurable**: You have complete control over how the responses are stored and serialized.
38
38
  - 📦 **From the package**:
39
- - Built-in support for [File system](https://en.wikipedia.org/wiki/File_system), [Redis](https://en.wikipedia.org/wiki/Redis), and [SQLite](https://en.wikipedia.org/wiki/SQLite) backends.
39
+ - Built-in support for [File system](https://en.wikipedia.org/wiki/File_system), [Redis](https://en.wikipedia.org/wiki/Redis), [SQLite](https://en.wikipedia.org/wiki/SQLite), and [AWS S3](https://aws.amazon.com/s3/) backends.
40
40
  - Built-in support for [JSON](https://en.wikipedia.org/wiki/JSON), [YAML](https://en.wikipedia.org/wiki/YAML), and [pickle](https://docs.python.org/3/library/pickle.html) serializers.
41
41
  - 🚀 **Very fast**: Your requests will be even faster if there are *no IO operations*.
42
42
 
@@ -14,4 +14,4 @@ def install_cache() -> None: # pragma: no cover
14
14
  httpx.Client = CacheClient # type: ignore
15
15
 
16
16
 
17
- __version__ = "0.0.21"
17
+ __version__ = "0.0.22"
@@ -5,6 +5,11 @@ import warnings
5
5
  from copy import deepcopy
6
6
  from pathlib import Path
7
7
 
8
+ try:
9
+ import boto3
10
+ except ImportError: # pragma: no cover
11
+ boto3 = None # type: ignore
12
+
8
13
  try:
9
14
  import anysqlite
10
15
  except ImportError: # pragma: no cover
@@ -16,13 +21,14 @@ from typing_extensions import TypeAlias
16
21
  from hishel._serializers import BaseSerializer, clone_model
17
22
 
18
23
  from .._files import AsyncFileManager
24
+ from .._s3 import AsyncS3Manager
19
25
  from .._serializers import JSONSerializer, Metadata
20
26
  from .._synchronization import AsyncLock
21
27
  from .._utils import float_seconds_to_int_milliseconds
22
28
 
23
29
  logger = logging.getLogger("hishel.storages")
24
30
 
25
- __all__ = ("AsyncFileStorage", "AsyncRedisStorage", "AsyncSQLiteStorage", "AsyncInMemoryStorage")
31
+ __all__ = ("AsyncFileStorage", "AsyncRedisStorage", "AsyncSQLiteStorage", "AsyncInMemoryStorage", "AsyncS3Storage")
26
32
 
27
33
  StoredResponse: TypeAlias = tp.Tuple[Response, Request, Metadata]
28
34
 
@@ -61,6 +67,9 @@ class AsyncFileStorage(AsyncBaseStorage):
61
67
  :type base_path: tp.Optional[Path], optional
62
68
  :param ttl: Specifies the maximum number of seconds that the response can be cached, defaults to None
63
69
  :type ttl: tp.Optional[tp.Union[int, float]], optional
70
+ :param check_ttl_every: How often in seconds to check staleness of **all** cache files.
71
+ Makes sense only with set `ttl`, defaults to 60
72
+ :type check_ttl_every: tp.Union[int, float]
64
73
  """
65
74
 
66
75
  def __init__(
@@ -68,6 +77,7 @@ class AsyncFileStorage(AsyncBaseStorage):
68
77
  serializer: tp.Optional[BaseSerializer] = None,
69
78
  base_path: tp.Optional[Path] = None,
70
79
  ttl: tp.Optional[tp.Union[int, float]] = None,
80
+ check_ttl_every: tp.Union[int, float] = 60,
71
81
  ) -> None:
72
82
  super().__init__(serializer, ttl)
73
83
 
@@ -78,6 +88,8 @@ class AsyncFileStorage(AsyncBaseStorage):
78
88
 
79
89
  self._file_manager = AsyncFileManager(is_binary=self._serializer.is_binary)
80
90
  self._lock = AsyncLock()
91
+ self._check_ttl_every = check_ttl_every
92
+ self._last_cleaned = time.monotonic()
81
93
 
82
94
  async def store(self, key: str, response: Response, request: Request, metadata: Metadata) -> None:
83
95
  """
@@ -99,7 +111,7 @@ class AsyncFileStorage(AsyncBaseStorage):
99
111
  str(response_path),
100
112
  self._serializer.dumps(response=response, request=request, metadata=metadata),
101
113
  )
102
- await self._remove_expired_caches()
114
+ await self._remove_expired_caches(response_path)
103
115
 
104
116
  async def retrieve(self, key: str) -> tp.Optional[StoredResponse]:
105
117
  """
@@ -113,7 +125,7 @@ class AsyncFileStorage(AsyncBaseStorage):
113
125
 
114
126
  response_path = self._base_path / key
115
127
 
116
- await self._remove_expired_caches()
128
+ await self._remove_expired_caches(response_path)
117
129
  async with self._lock:
118
130
  if response_path.exists():
119
131
  return self._serializer.loads(await self._file_manager.read_from(str(response_path)))
@@ -122,10 +134,18 @@ class AsyncFileStorage(AsyncBaseStorage):
122
134
  async def aclose(self) -> None: # pragma: no cover
123
135
  return
124
136
 
125
- async def _remove_expired_caches(self) -> None:
137
+ async def _remove_expired_caches(self, response_path: Path) -> None:
126
138
  if self._ttl is None:
127
139
  return
128
140
 
141
+ if time.monotonic() - self._last_cleaned < self._check_ttl_every:
142
+ if response_path.is_file():
143
+ age = time.time() - response_path.stat().st_mtime
144
+ if age > self._ttl:
145
+ response_path.unlink()
146
+ return
147
+
148
+ self._last_cleaned = time.monotonic()
129
149
  async with self._lock:
130
150
  for file in self._base_path.iterdir():
131
151
  if file.is_file():
@@ -402,3 +422,89 @@ class AsyncInMemoryStorage(AsyncBaseStorage):
402
422
 
403
423
  for key in keys_to_remove:
404
424
  self._cache.remove_key(key)
425
+
426
+
427
+ class AsyncS3Storage(AsyncBaseStorage): # pragma: no cover
428
+ """
429
+ AWS S3 storage.
430
+
431
+ :param bucket_name: The name of the bucket to store the responses in
432
+ :type bucket_name: str
433
+ :param serializer: Serializer capable of serializing and de-serializing http responses, defaults to None
434
+ :type serializer: tp.Optional[BaseSerializer], optional
435
+ :param ttl: Specifies the maximum number of seconds that the response can be cached, defaults to None
436
+ :type ttl: tp.Optional[tp.Union[int, float]], optional
437
+ :param client: A client for S3, defaults to None
438
+ :type client: tp.Optional[tp.Any], optional
439
+ """
440
+
441
+ def __init__(
442
+ self,
443
+ bucket_name: str,
444
+ serializer: tp.Optional[BaseSerializer] = None,
445
+ ttl: tp.Optional[tp.Union[int, float]] = None,
446
+ client: tp.Optional[tp.Any] = None,
447
+ ) -> None:
448
+ super().__init__(serializer, ttl)
449
+
450
+ if boto3 is None: # pragma: no cover
451
+ raise RuntimeError(
452
+ (
453
+ f"The `{type(self).__name__}` was used, but the required packages were not found. "
454
+ "Check that you have `Hishel` installed with the `s3` extension as shown.\n"
455
+ "```pip install hishel[s3]```"
456
+ )
457
+ )
458
+
459
+ self._bucket_name = bucket_name
460
+ client = client or boto3.client("s3")
461
+ self._s3_manager = AsyncS3Manager(client=client, bucket_name=bucket_name, is_binary=self._serializer.is_binary)
462
+ self._lock = AsyncLock()
463
+
464
+ async def store(self, key: str, response: Response, request: Request, metadata: Metadata) -> None:
465
+ """
466
+ Stores the response in the cache.
467
+
468
+ :param key: Hashed value of concatenated HTTP method and URI
469
+ :type key: str
470
+ :param response: An HTTP response
471
+ :type response: httpcore.Response
472
+ :param request: An HTTP request
473
+ :type request: httpcore.Request
474
+ :param metadata: Additioal information about the stored response
475
+ :type metadata: Metadata`
476
+ """
477
+
478
+ async with self._lock:
479
+ serialized = self._serializer.dumps(response=response, request=request, metadata=metadata)
480
+ await self._s3_manager.write_to(path=key, data=serialized)
481
+
482
+ await self._remove_expired_caches()
483
+
484
+ async def retrieve(self, key: str) -> tp.Optional[StoredResponse]:
485
+ """
486
+ Retreives the response from the cache using his key.
487
+
488
+ :param key: Hashed value of concatenated HTTP method and URI
489
+ :type key: str
490
+ :return: An HTTP response and its HTTP request.
491
+ :rtype: tp.Optional[StoredResponse]
492
+ """
493
+
494
+ await self._remove_expired_caches()
495
+ async with self._lock:
496
+ try:
497
+ return self._serializer.loads(await self._s3_manager.read_from(path=key))
498
+ except Exception:
499
+ return None
500
+
501
+ async def aclose(self) -> None: # pragma: no cover
502
+ return
503
+
504
+ async def _remove_expired_caches(self) -> None:
505
+ if self._ttl is None:
506
+ return
507
+
508
+ async with self._lock:
509
+ converted_ttl = float_seconds_to_int_milliseconds(self._ttl)
510
+ await self._s3_manager.remove_expired(ttl=converted_ttl)
@@ -0,0 +1,54 @@
1
+ import typing as tp
2
+ from datetime import datetime, timedelta, timezone
3
+
4
+ from anyio import to_thread
5
+
6
+
7
+ class S3Manager:
8
+ def __init__(self, client: tp.Any, bucket_name: str, is_binary: bool = False):
9
+ self._client = client
10
+ self._bucket_name = bucket_name
11
+ self._is_binary = is_binary
12
+
13
+ def write_to(self, path: str, data: tp.Union[bytes, str]) -> None:
14
+ path = "hishel-" + path
15
+ if isinstance(data, str):
16
+ data = data.encode("utf-8")
17
+
18
+ self._client.put_object(Bucket=self._bucket_name, Key=path, Body=data)
19
+
20
+ def read_from(self, path: str) -> tp.Union[bytes, str]:
21
+ path = "hishel-" + path
22
+ response = self._client.get_object(
23
+ Bucket=self._bucket_name,
24
+ Key=path,
25
+ )
26
+
27
+ content = response["Body"].read()
28
+
29
+ if self._is_binary: # pragma: no cover
30
+ return tp.cast(bytes, content)
31
+
32
+ return tp.cast(str, content.decode("utf-8"))
33
+
34
+ def remove_expired(self, ttl: int) -> None:
35
+ for obj in self._client.list_objects(Bucket=self._bucket_name).get("Contents", []):
36
+ if not obj["Key"].startswith("hishel-"): # pragma: no cover
37
+ continue
38
+
39
+ if datetime.now(timezone.utc) - obj["LastModified"] > timedelta(milliseconds=ttl):
40
+ self._client.delete_object(Bucket=self._bucket_name, Key=obj["Key"])
41
+
42
+
43
+ class AsyncS3Manager:
44
+ def __init__(self, client: tp.Any, bucket_name: str, is_binary: bool = False):
45
+ self._sync_manager = S3Manager(client, bucket_name, is_binary)
46
+
47
+ async def write_to(self, path: str, data: tp.Union[bytes, str]) -> None:
48
+ return await to_thread.run_sync(self._sync_manager.write_to, path, data)
49
+
50
+ async def read_from(self, path: str) -> tp.Union[bytes, str]:
51
+ return await to_thread.run_sync(self._sync_manager.read_from, path)
52
+
53
+ async def remove_expired(self, ttl: int) -> None:
54
+ return await to_thread.run_sync(self._sync_manager.remove_expired, ttl)
@@ -5,6 +5,11 @@ import warnings
5
5
  from copy import deepcopy
6
6
  from pathlib import Path
7
7
 
8
+ try:
9
+ import boto3
10
+ except ImportError: # pragma: no cover
11
+ boto3 = None # type: ignore
12
+
8
13
  try:
9
14
  import sqlite3
10
15
  except ImportError: # pragma: no cover
@@ -16,13 +21,14 @@ from typing_extensions import TypeAlias
16
21
  from hishel._serializers import BaseSerializer, clone_model
17
22
 
18
23
  from .._files import FileManager
24
+ from .._s3 import S3Manager
19
25
  from .._serializers import JSONSerializer, Metadata
20
26
  from .._synchronization import Lock
21
27
  from .._utils import float_seconds_to_int_milliseconds
22
28
 
23
29
  logger = logging.getLogger("hishel.storages")
24
30
 
25
- __all__ = ("FileStorage", "RedisStorage", "SQLiteStorage", "InMemoryStorage")
31
+ __all__ = ("FileStorage", "RedisStorage", "SQLiteStorage", "InMemoryStorage", "S3Storage")
26
32
 
27
33
  StoredResponse: TypeAlias = tp.Tuple[Response, Request, Metadata]
28
34
 
@@ -61,6 +67,9 @@ class FileStorage(BaseStorage):
61
67
  :type base_path: tp.Optional[Path], optional
62
68
  :param ttl: Specifies the maximum number of seconds that the response can be cached, defaults to None
63
69
  :type ttl: tp.Optional[tp.Union[int, float]], optional
70
+ :param check_ttl_every: How often in seconds to check staleness of **all** cache files.
71
+ Makes sense only with set `ttl`, defaults to 60
72
+ :type check_ttl_every: tp.Union[int, float]
64
73
  """
65
74
 
66
75
  def __init__(
@@ -68,6 +77,7 @@ class FileStorage(BaseStorage):
68
77
  serializer: tp.Optional[BaseSerializer] = None,
69
78
  base_path: tp.Optional[Path] = None,
70
79
  ttl: tp.Optional[tp.Union[int, float]] = None,
80
+ check_ttl_every: tp.Union[int, float] = 60,
71
81
  ) -> None:
72
82
  super().__init__(serializer, ttl)
73
83
 
@@ -78,6 +88,8 @@ class FileStorage(BaseStorage):
78
88
 
79
89
  self._file_manager = FileManager(is_binary=self._serializer.is_binary)
80
90
  self._lock = Lock()
91
+ self._check_ttl_every = check_ttl_every
92
+ self._last_cleaned = time.monotonic()
81
93
 
82
94
  def store(self, key: str, response: Response, request: Request, metadata: Metadata) -> None:
83
95
  """
@@ -99,7 +111,7 @@ class FileStorage(BaseStorage):
99
111
  str(response_path),
100
112
  self._serializer.dumps(response=response, request=request, metadata=metadata),
101
113
  )
102
- self._remove_expired_caches()
114
+ self._remove_expired_caches(response_path)
103
115
 
104
116
  def retrieve(self, key: str) -> tp.Optional[StoredResponse]:
105
117
  """
@@ -113,7 +125,7 @@ class FileStorage(BaseStorage):
113
125
 
114
126
  response_path = self._base_path / key
115
127
 
116
- self._remove_expired_caches()
128
+ self._remove_expired_caches(response_path)
117
129
  with self._lock:
118
130
  if response_path.exists():
119
131
  return self._serializer.loads(self._file_manager.read_from(str(response_path)))
@@ -122,10 +134,18 @@ class FileStorage(BaseStorage):
122
134
  def close(self) -> None: # pragma: no cover
123
135
  return
124
136
 
125
- def _remove_expired_caches(self) -> None:
137
+ def _remove_expired_caches(self, response_path: Path) -> None:
126
138
  if self._ttl is None:
127
139
  return
128
140
 
141
+ if time.monotonic() - self._last_cleaned < self._check_ttl_every:
142
+ if response_path.is_file():
143
+ age = time.time() - response_path.stat().st_mtime
144
+ if age > self._ttl:
145
+ response_path.unlink()
146
+ return
147
+
148
+ self._last_cleaned = time.monotonic()
129
149
  with self._lock:
130
150
  for file in self._base_path.iterdir():
131
151
  if file.is_file():
@@ -402,3 +422,89 @@ class InMemoryStorage(BaseStorage):
402
422
 
403
423
  for key in keys_to_remove:
404
424
  self._cache.remove_key(key)
425
+
426
+
427
+ class S3Storage(BaseStorage): # pragma: no cover
428
+ """
429
+ AWS S3 storage.
430
+
431
+ :param bucket_name: The name of the bucket to store the responses in
432
+ :type bucket_name: str
433
+ :param serializer: Serializer capable of serializing and de-serializing http responses, defaults to None
434
+ :type serializer: tp.Optional[BaseSerializer], optional
435
+ :param ttl: Specifies the maximum number of seconds that the response can be cached, defaults to None
436
+ :type ttl: tp.Optional[tp.Union[int, float]], optional
437
+ :param client: A client for S3, defaults to None
438
+ :type client: tp.Optional[tp.Any], optional
439
+ """
440
+
441
+ def __init__(
442
+ self,
443
+ bucket_name: str,
444
+ serializer: tp.Optional[BaseSerializer] = None,
445
+ ttl: tp.Optional[tp.Union[int, float]] = None,
446
+ client: tp.Optional[tp.Any] = None,
447
+ ) -> None:
448
+ super().__init__(serializer, ttl)
449
+
450
+ if boto3 is None: # pragma: no cover
451
+ raise RuntimeError(
452
+ (
453
+ f"The `{type(self).__name__}` was used, but the required packages were not found. "
454
+ "Check that you have `Hishel` installed with the `s3` extension as shown.\n"
455
+ "```pip install hishel[s3]```"
456
+ )
457
+ )
458
+
459
+ self._bucket_name = bucket_name
460
+ client = client or boto3.client("s3")
461
+ self._s3_manager = S3Manager(client=client, bucket_name=bucket_name, is_binary=self._serializer.is_binary)
462
+ self._lock = Lock()
463
+
464
+ def store(self, key: str, response: Response, request: Request, metadata: Metadata) -> None:
465
+ """
466
+ Stores the response in the cache.
467
+
468
+ :param key: Hashed value of concatenated HTTP method and URI
469
+ :type key: str
470
+ :param response: An HTTP response
471
+ :type response: httpcore.Response
472
+ :param request: An HTTP request
473
+ :type request: httpcore.Request
474
+ :param metadata: Additioal information about the stored response
475
+ :type metadata: Metadata`
476
+ """
477
+
478
+ with self._lock:
479
+ serialized = self._serializer.dumps(response=response, request=request, metadata=metadata)
480
+ self._s3_manager.write_to(path=key, data=serialized)
481
+
482
+ self._remove_expired_caches()
483
+
484
+ def retrieve(self, key: str) -> tp.Optional[StoredResponse]:
485
+ """
486
+ Retreives the response from the cache using his key.
487
+
488
+ :param key: Hashed value of concatenated HTTP method and URI
489
+ :type key: str
490
+ :return: An HTTP response and its HTTP request.
491
+ :rtype: tp.Optional[StoredResponse]
492
+ """
493
+
494
+ self._remove_expired_caches()
495
+ with self._lock:
496
+ try:
497
+ return self._serializer.loads(self._s3_manager.read_from(path=key))
498
+ except Exception:
499
+ return None
500
+
501
+ def close(self) -> None: # pragma: no cover
502
+ return
503
+
504
+ def _remove_expired_caches(self) -> None:
505
+ if self._ttl is None:
506
+ return
507
+
508
+ with self._lock:
509
+ converted_ttl = float_seconds_to_int_milliseconds(self._ttl)
510
+ self._s3_manager.remove_expired(ttl=converted_ttl)
@@ -29,7 +29,8 @@ classifiers = [
29
29
  "Topic :: Internet :: WWW/HTTP",
30
30
  ]
31
31
  dependencies = [
32
- "httpx>=0.22.0"
32
+ "httpx>=0.22.0",
33
+ "typing_extensions>=4.8.0"
33
34
  ]
34
35
 
35
36
  [project.optional-dependencies]
@@ -46,6 +47,10 @@ sqlite = [
46
47
  "anysqlite>=0.0.5"
47
48
  ]
48
49
 
50
+ s3 = [
51
+ "boto3>=1.15.0,<=1.15.3"
52
+ ]
53
+
49
54
  [project.urls]
50
55
  Homepage = "https://hishel.com"
51
56
  Source = "https://github.com/karpetrosyan/hishel"
@@ -88,7 +93,8 @@ filterwarnings = []
88
93
  [tool.coverage.run]
89
94
  omit = [
90
95
  "venv/*",
91
- "hishel/_sync/*"
96
+ "hishel/_sync/*",
97
+ "hishel/_s3.py"
92
98
  ]
93
99
  include = ["hishel/*", "tests/*"]
94
100
 
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes