hishel 0.1.5__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. hishel/__init__.py +63 -52
  2. hishel/_async_cache.py +213 -0
  3. hishel/_async_httpx.py +236 -0
  4. hishel/{beta/_core → _core}/_headers.py +11 -1
  5. hishel/{beta/_core → _core}/_spec.py +270 -136
  6. hishel/_core/_storages/_async_base.py +71 -0
  7. hishel/_core/_storages/_async_sqlite.py +420 -0
  8. hishel/_core/_storages/_packing.py +144 -0
  9. hishel/_core/_storages/_sync_base.py +71 -0
  10. hishel/_core/_storages/_sync_sqlite.py +420 -0
  11. hishel/{beta/_core → _core}/models.py +100 -37
  12. hishel/_policies.py +49 -0
  13. hishel/_sync_cache.py +213 -0
  14. hishel/_sync_httpx.py +236 -0
  15. hishel/_utils.py +37 -366
  16. hishel/asgi.py +400 -0
  17. hishel/fastapi.py +263 -0
  18. hishel/httpx.py +12 -0
  19. hishel/{beta/requests.py → requests.py} +31 -25
  20. hishel-1.0.0.dist-info/METADATA +513 -0
  21. hishel-1.0.0.dist-info/RECORD +24 -0
  22. hishel/_async/__init__.py +0 -5
  23. hishel/_async/_client.py +0 -30
  24. hishel/_async/_mock.py +0 -43
  25. hishel/_async/_pool.py +0 -201
  26. hishel/_async/_storages.py +0 -768
  27. hishel/_async/_transports.py +0 -282
  28. hishel/_controller.py +0 -581
  29. hishel/_exceptions.py +0 -10
  30. hishel/_files.py +0 -54
  31. hishel/_headers.py +0 -215
  32. hishel/_lfu_cache.py +0 -71
  33. hishel/_lmdb_types_.pyi +0 -53
  34. hishel/_s3.py +0 -122
  35. hishel/_serializers.py +0 -329
  36. hishel/_sync/__init__.py +0 -5
  37. hishel/_sync/_client.py +0 -30
  38. hishel/_sync/_mock.py +0 -43
  39. hishel/_sync/_pool.py +0 -201
  40. hishel/_sync/_storages.py +0 -768
  41. hishel/_sync/_transports.py +0 -282
  42. hishel/_synchronization.py +0 -37
  43. hishel/beta/__init__.py +0 -59
  44. hishel/beta/_async_cache.py +0 -167
  45. hishel/beta/_core/__init__.py +0 -0
  46. hishel/beta/_core/_async/_storages/_sqlite.py +0 -411
  47. hishel/beta/_core/_base/_storages/_base.py +0 -272
  48. hishel/beta/_core/_base/_storages/_packing.py +0 -165
  49. hishel/beta/_core/_sync/_storages/_sqlite.py +0 -411
  50. hishel/beta/_sync_cache.py +0 -167
  51. hishel/beta/httpx.py +0 -328
  52. hishel-0.1.5.dist-info/METADATA +0 -258
  53. hishel-0.1.5.dist-info/RECORD +0 -41
  54. {hishel-0.1.5.dist-info → hishel-1.0.0.dist-info}/WHEEL +0 -0
  55. {hishel-0.1.5.dist-info → hishel-1.0.0.dist-info}/licenses/LICENSE +0 -0
hishel/_utils.py CHANGED
@@ -1,113 +1,17 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import calendar
4
- import hashlib
5
- import json
6
- import sqlite3
7
4
  import time
8
5
  import typing as tp
9
- from datetime import date
10
- from email.utils import parsedate_tz
11
- from typing import Any, AsyncIterator, Generator, Iterable, Iterator, TypeVar
12
-
13
- import anyio
14
- import anysqlite
15
- import httpcore
16
- import httpx
17
- from anyio import from_thread, to_thread
6
+ from email.utils import formatdate, parsedate_tz
7
+ from pathlib import Path
8
+ from typing import AsyncIterator, Iterable, Iterator
18
9
 
19
10
  HEADERS_ENCODING = "iso-8859-1"
20
11
 
21
12
  T = tp.TypeVar("T")
22
13
 
23
14
 
24
- class BaseClock:
25
- def now(self) -> int:
26
- raise NotImplementedError()
27
-
28
-
29
- class Clock(BaseClock):
30
- def now(self) -> int:
31
- return int(time.time())
32
-
33
-
34
- def normalized_url(url: tp.Union[httpcore.URL, str, bytes]) -> str:
35
- if isinstance(url, str): # pragma: no cover
36
- return url
37
-
38
- if isinstance(url, bytes): # pragma: no cover
39
- return url.decode("ascii")
40
-
41
- if isinstance(url, httpcore.URL):
42
- port = f":{url.port}" if url.port is not None else ""
43
- return f"{url.scheme.decode('ascii')}://{url.host.decode('ascii')}{port}{url.target.decode('ascii')}"
44
- assert False, "Invalid type for `normalized_url`" # pragma: no cover
45
-
46
-
47
- def get_safe_url(url: httpcore.URL) -> str:
48
- httpx_url = httpx.URL(bytes(url).decode("ascii"))
49
-
50
- schema = httpx_url.scheme
51
- host = httpx_url.host
52
- path = httpx_url.path
53
-
54
- return f"{schema}://{host}{path}"
55
-
56
-
57
- def generate_key(request: httpcore.Request, body: bytes = b"") -> str:
58
- encoded_url = normalized_url(request.url).encode("ascii")
59
-
60
- key_parts = [request.method, encoded_url, body]
61
-
62
- # FIPs mode disables blake2 algorithm, use sha256 instead when not found.
63
- blake2b_hasher = None
64
- sha256_hasher = hashlib.sha256(usedforsecurity=False)
65
- try:
66
- blake2b_hasher = hashlib.blake2b(digest_size=16, usedforsecurity=False)
67
- except (ValueError, TypeError, AttributeError):
68
- pass
69
-
70
- hexdigest: str
71
- if blake2b_hasher:
72
- for part in key_parts:
73
- blake2b_hasher.update(part)
74
-
75
- hexdigest = blake2b_hasher.hexdigest()
76
- else:
77
- for part in key_parts:
78
- sha256_hasher.update(part)
79
-
80
- hexdigest = sha256_hasher.hexdigest()
81
- return hexdigest
82
-
83
-
84
- def extract_header_values(
85
- headers: tp.List[tp.Tuple[bytes, bytes]],
86
- header_key: tp.Union[bytes, str],
87
- single: bool = False,
88
- ) -> tp.List[bytes]:
89
- if isinstance(header_key, str):
90
- header_key = header_key.encode(HEADERS_ENCODING)
91
- extracted_headers = []
92
- for key, value in headers:
93
- if key.lower() == header_key.lower():
94
- extracted_headers.append(value)
95
- if single:
96
- break
97
- return extracted_headers
98
-
99
-
100
- def extract_header_values_decoded(
101
- headers: tp.List[tp.Tuple[bytes, bytes]], header_key: bytes, single: bool = False
102
- ) -> tp.List[str]:
103
- values = extract_header_values(headers=headers, header_key=header_key, single=single)
104
- return [value.decode(HEADERS_ENCODING) for value in values]
105
-
106
-
107
- def header_presents(headers: tp.List[tp.Tuple[bytes, bytes]], header_key: bytes) -> bool:
108
- return bool(extract_header_values(headers, header_key, single=True))
109
-
110
-
111
15
  def parse_date(date: str) -> tp.Optional[int]:
112
16
  expires = parsedate_tz(date)
113
17
  if expires is None:
@@ -116,18 +20,10 @@ def parse_date(date: str) -> tp.Optional[int]:
116
20
  return timestamp
117
21
 
118
22
 
119
- async def asleep(seconds: tp.Union[int, float]) -> None:
120
- await anyio.sleep(seconds)
121
-
122
-
123
23
  def sleep(seconds: tp.Union[int, float]) -> None:
124
24
  time.sleep(seconds)
125
25
 
126
26
 
127
- def float_seconds_to_int_milliseconds(seconds: float) -> int:
128
- return int(seconds * 1000)
129
-
130
-
131
27
  def partition(iterable: tp.Iterable[T], predicate: tp.Callable[[T], bool]) -> tp.Tuple[tp.List[T], tp.List[T]]:
132
28
  """
133
29
  Partition an iterable into two lists: one for matching items and one for non-matching items.
@@ -155,69 +51,33 @@ def partition(iterable: tp.Iterable[T], predicate: tp.Callable[[T], bool]) -> tp
155
51
  return matching, non_matching
156
52
 
157
53
 
158
- def async_iterator_to_sync(iterator: AsyncIterator[bytes]) -> Iterator[bytes]:
159
- """
160
- Convert an asynchronous byte iterator to a synchronous one.
161
- This function takes an asynchronous iterator that yields bytes and converts it into
162
- a synchronous iterator.
163
-
164
- Args:
165
- iterator (AsyncIterator[bytes]): The asynchronous byte iterator to be converted.
166
- Returns:
167
- Iterator[bytes]: A synchronous iterator that yields the same byte chunks as the input iterator.
168
- Example:
169
- ```python
170
- async_iter = some_async_byte_stream()
171
- sync_iter = async_iterator_to_sync(async_iter)
172
- for chunk in sync_iter:
173
- process_bytes(chunk)
174
- ```
175
- """
176
-
177
- while True:
178
- try:
179
- chunk = from_thread.run(iterator.__anext__)
180
- except StopAsyncIteration:
181
- break
182
- yield chunk
183
-
184
-
185
- def _call_next(iterator: Iterator[bytes]) -> bytes:
186
- try:
187
- return iterator.__next__()
188
- except StopIteration:
189
- raise StopAsyncIteration
54
+ async def make_async_iterator(
55
+ iterable: Iterable[bytes],
56
+ ) -> AsyncIterator[bytes]:
57
+ for item in iterable:
58
+ yield item
190
59
 
191
60
 
192
- async def sync_iterator_to_async(iterator: Iterator[bytes]) -> AsyncIterator[bytes]:
193
- """
194
- Converts a synchronous bytes iterator to an asynchronous one.
195
- This function takes a synchronous iterator that yields bytes and converts it into an
196
- asynchronous iterator, allowing it to be used in async contexts without blocking.
197
- Args:
198
- iterator (Iterator[bytes]): A synchronous iterator yielding bytes objects.
199
- Returns:
200
- AsyncIterator[bytes]: An asynchronous iterator yielding the same bytes objects.
201
- Example:
202
- ```
203
- sync_iter = iter([b'data1', b'data2'])
204
- async for chunk in sync_iterator_to_async(sync_iter):
205
- await process_chunk(chunk)
206
- ```
61
+ def filter_mapping(mapping: tp.Mapping[str, T], keys_to_exclude: tp.Iterable[str]) -> tp.Dict[str, T]:
207
62
  """
63
+ Filter out specified keys from a string-keyed mapping using case-insensitive comparison.
208
64
 
209
- while True:
210
- try:
211
- chunk = await to_thread.run_sync(_call_next, iterator)
212
- except StopAsyncIteration:
213
- break
65
+ Args:
66
+ mapping: The input mapping with string keys to filter.
67
+ keys_to_exclude: An iterable of string keys to exclude (case-insensitive).
214
68
 
215
- yield chunk
69
+ Returns:
70
+ A new dictionary with the specified keys excluded.
216
71
 
217
-
218
- async def make_async_iterator(iterable: Iterable[bytes]) -> AsyncIterator[bytes]:
219
- for item in iterable:
220
- yield item
72
+ Example:
73
+ ```python
74
+ original = {'a': 1, 'B': 2, 'c': 3}
75
+ filtered = filter_mapping(original, ['b'])
76
+ # filtered will be {'a': 1, 'c': 3}
77
+ ```
78
+ """
79
+ exclude_set = {k.lower() for k in keys_to_exclude}
80
+ return {k: v for k, v in mapping.items() if k.lower() not in exclude_set}
221
81
 
222
82
 
223
83
  def make_sync_iterator(iterable: Iterable[bytes]) -> Iterator[bytes]:
@@ -247,212 +107,23 @@ def snake_to_header(text: str) -> str:
247
107
  return "X-" + "-".join(word.capitalize() for word in text.split("_"))
248
108
 
249
109
 
250
- _T = TypeVar("_T")
251
-
252
-
253
- class GeneratorWithReturnValue:
254
- def __init__(
255
- self, gen: Generator[None, bytes | None, bytes], stream: AsyncIterator[bytes] | Iterator[bytes]
256
- ) -> None:
257
- self.gen = gen
258
- self.stream = stream
259
- self.value: bytes | None = None
260
-
261
- def __iter__(self) -> Iterator[bytes]:
262
- return self
110
+ def ensure_cache_dict(base_path: str | None = None) -> Path:
111
+ _base_path = Path(base_path) if base_path is not None else Path(".cache/hishel")
112
+ _gitignore_file = _base_path / ".gitignore"
263
113
 
264
- def __next__(self) -> bytes:
265
- assert isinstance(self.stream, Iterator)
114
+ _base_path.mkdir(parents=True, exist_ok=True)
266
115
 
267
- try:
268
- chunk = next(self.stream)
269
- self.gen.send(chunk)
270
- except StopIteration as exc:
271
- self.gen.send(None)
272
- self.value = exc.value
273
- raise
274
- return chunk
116
+ if not _gitignore_file.is_file():
117
+ with open(_gitignore_file, "w", encoding="utf-8") as f:
118
+ f.write("# Automatically created by Hishel\n*")
119
+ return _base_path
275
120
 
276
- def __aiter__(self) -> AsyncIterator[bytes]:
277
- return self
278
121
 
279
- async def __anext__(self) -> bytes:
280
- assert isinstance(self.stream, AsyncIterator)
281
- try:
282
- chunk = await self.stream.__anext__()
283
- self.gen.send(chunk)
284
- except StopIteration as exc:
285
- self.gen.send(None)
286
- self.value = exc.value
287
- raise
288
- return chunk
289
-
290
-
291
- def print_sqlite_state(conn: sqlite3.Connection) -> str:
122
+ def generate_http_date() -> str:
292
123
  """
293
- Print all tables and their rows in a pretty format suitable for inline snapshots.
124
+ Generate a Date header value for HTTP responses.
125
+ Returns date in RFC 1123 format (required by HTTP/1.1).
294
126
 
295
- Args:
296
- conn: SQLite database connection
297
-
298
- Returns:
299
- Formatted string representation of the database state
300
- """
301
- cursor = conn.cursor()
302
-
303
- # Get all table names
304
- cursor.execute("SELECT name FROM sqlite_master WHERE type='table' ORDER BY name")
305
- tables = [row[0] for row in cursor.fetchall()]
306
-
307
- output_lines = []
308
- output_lines.append("=" * 80)
309
- output_lines.append("DATABASE SNAPSHOT")
310
- output_lines.append("=" * 80)
311
-
312
- for table_name in tables:
313
- # Get column information
314
- cursor.execute(f"PRAGMA table_info({table_name})")
315
- columns = cursor.fetchall()
316
- column_names = [col[1] for col in columns]
317
- column_types = {col[1]: col[2] for col in columns}
318
-
319
- # Get all rows
320
- cursor.execute(f"SELECT * FROM {table_name}")
321
- rows = cursor.fetchall()
322
-
323
- output_lines.append("")
324
- output_lines.append(f"TABLE: {table_name}")
325
- output_lines.append("-" * 80)
326
- output_lines.append(f"Rows: {len(rows)}")
327
- output_lines.append("")
328
-
329
- if not rows:
330
- output_lines.append(" (empty)")
331
- continue
332
-
333
- # Format each row
334
- for idx, row in enumerate(rows, 1):
335
- output_lines.append(f" Row {idx}:")
336
-
337
- for col_name, value in zip(column_names, row):
338
- col_type = column_types[col_name]
339
- formatted_value = format_value(value, col_name, col_type)
340
- output_lines.append(f" {col_name:15} = {formatted_value}")
341
-
342
- if idx < len(rows):
343
- output_lines.append("")
344
-
345
- output_lines.append("")
346
- output_lines.append("=" * 80)
347
-
348
- result = "\n".join(output_lines)
349
- return result
350
-
351
-
352
- async def aprint_sqlite_state(conn: anysqlite.Connection) -> str:
353
- """
354
- Print all tables and their rows in a pretty format suitable for inline snapshots.
355
-
356
- Args:
357
- conn: SQLite database connection
358
-
359
- Returns:
360
- Formatted string representation of the database state
127
+ Example output: 'Sun, 26 Oct 2025 12:34:56 GMT'
361
128
  """
362
- cursor = await conn.cursor()
363
-
364
- # Get all table names
365
- await cursor.execute("SELECT name FROM sqlite_master WHERE type='table' ORDER BY name")
366
- tables = [row[0] for row in await cursor.fetchall()]
367
-
368
- output_lines = []
369
- output_lines.append("=" * 80)
370
- output_lines.append("DATABASE SNAPSHOT")
371
- output_lines.append("=" * 80)
372
-
373
- for table_name in tables:
374
- # Get column information
375
- await cursor.execute(f"PRAGMA table_info({table_name})")
376
- columns = await cursor.fetchall()
377
- column_names = [col[1] for col in columns]
378
- column_types = {col[1]: col[2] for col in columns}
379
-
380
- # Get all rows
381
- await cursor.execute(f"SELECT * FROM {table_name}")
382
- rows = await cursor.fetchall()
383
-
384
- output_lines.append("")
385
- output_lines.append(f"TABLE: {table_name}")
386
- output_lines.append("-" * 80)
387
- output_lines.append(f"Rows: {len(rows)}")
388
- output_lines.append("")
389
-
390
- if not rows:
391
- output_lines.append(" (empty)")
392
- continue
393
-
394
- # Format each row
395
- for idx, row in enumerate(rows, 1):
396
- output_lines.append(f" Row {idx}:")
397
-
398
- for col_name, value in zip(column_names, row):
399
- col_type = column_types[col_name]
400
- formatted_value = format_value(value, col_name, col_type)
401
- output_lines.append(f" {col_name:15} = {formatted_value}")
402
-
403
- if idx < len(rows):
404
- output_lines.append("")
405
-
406
- output_lines.append("")
407
- output_lines.append("=" * 80)
408
-
409
- result = "\n".join(output_lines)
410
- return result
411
-
412
-
413
- def format_value(value: Any, col_name: str, col_type: str) -> str:
414
- """Format a value for display based on its type and column name."""
415
-
416
- if value is None:
417
- return "NULL"
418
-
419
- # Handle BLOB columns
420
- if col_type.upper() == "BLOB":
421
- if isinstance(value, bytes):
422
- # Try to decode as UTF-8 string first
423
- try:
424
- decoded = value.decode("utf-8")
425
- # Check if it looks like JSON
426
- if decoded.strip().startswith("{") or decoded.strip().startswith("["):
427
- try:
428
- parsed = json.loads(decoded)
429
- return f"(JSON) {json.dumps(parsed, indent=2)}"
430
- except json.JSONDecodeError:
431
- pass
432
- # Show string if it's printable
433
- if all(32 <= ord(c) <= 126 or c in "\n\r\t" for c in decoded):
434
- return f"(str) '{decoded}'"
435
- except UnicodeDecodeError:
436
- pass
437
-
438
- # Show hex representation for binary data
439
- hex_str = value.hex()
440
- if len(hex_str) > 64:
441
- return f"(bytes) 0x{hex_str[:60]}... ({len(value)} bytes)"
442
- return f"(bytes) 0x{hex_str} ({len(value)} bytes)"
443
- return repr(value)
444
-
445
- # Handle timestamps - ONLY show date, not the raw timestamp
446
- if col_name.endswith("_at") and isinstance(value, (int, float)):
447
- try:
448
- dt = date.fromtimestamp(value)
449
- return dt.isoformat() # Changed: removed the timestamp prefix
450
- except (ValueError, OSError):
451
- return str(value)
452
-
453
- # Handle TEXT columns
454
- if col_type.upper() == "TEXT":
455
- return f"'{value}'"
456
-
457
- # Handle other types
458
- return str(value)
129
+ return formatdate(timeval=None, localtime=False, usegmt=True)