PyperCache 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
PyperCache/__init__.py ADDED
@@ -0,0 +1,25 @@
1
+ """PyperCache — API response cache with pluggable storage backends.
2
+
3
+ Primary public surface::
4
+
5
+ from PyperCache import Cache, CacheRecord, RequestLogger
6
+ from PyperCache.query import JsonInjester
7
+
8
+ Utility sub-packages are importable directly when needed::
9
+
10
+ from PyperCache.utils import DataSerializer, PickleStore
11
+ from PyperCache.storage import get_storage_mechanism
12
+ """
13
+
14
+ from PyperCache.core.cache import Cache
15
+ from PyperCache.core.cache_record import CacheRecord
16
+ from PyperCache.core.request_logger import LogRecord, RequestLogger
17
+
18
+ __version__ = "0.1.0"
19
+
20
+ __all__ = [
21
+ "Cache",
22
+ "CacheRecord",
23
+ "LogRecord",
24
+ "RequestLogger",
25
+ ]
@@ -0,0 +1,7 @@
1
+ """PyperCache.core — Cache, CacheRecord, and RequestLogger."""
2
+
3
+ from PyperCache.core.cache import Cache
4
+ from PyperCache.core.cache_record import CacheRecord
5
+ from PyperCache.core.request_logger import LogRecord, RequestLogger
6
+
7
+ __all__ = ["Cache", "CacheRecord", "LogRecord", "RequestLogger"]
@@ -0,0 +1,126 @@
1
+ """Cache: persistent caching of API responses with optional TTL and type casting."""
2
+
3
+ import math
4
+ import time
5
+ from typing import Any, Optional
6
+
7
+ from PyperCache.core.cache_record import CacheRecord
8
+ from PyperCache.storage.factory import get_storage_mechanism
9
+ from PyperCache.utils.patterns import ClassRepository
10
+ from PyperCache.utils.sentinel import UNSET
11
+
12
+
13
+ CACHE_FILE = 'api-cache.pkl'
14
+
15
+
16
+ class Cache:
17
+ """Manages persistent caching of API responses with optional TTL and type casting."""
18
+
19
+ def __init__(self, filepath: Optional[str] = None):
20
+ """
21
+ Initialize the cache with a storage backend determined by file extension.
22
+
23
+ Args:
24
+ filepath: Path to the cache file. Defaults to CACHE_FILE ('api-cache.pkl').
25
+ """
26
+ self.classes = ClassRepository()
27
+
28
+ filepath = filepath or CACHE_FILE
29
+ StorageClass = get_storage_mechanism(filepath)
30
+ self.storage = StorageClass(filepath)
31
+
32
+ @staticmethod
33
+ def cached(cls):
34
+ """Class decorator that registers a class as cache-compatible.
35
+
36
+ Usage::
37
+
38
+ @Cache.cached
39
+ class MyAPIResponse:
40
+ ...
41
+ """
42
+ ClassRepository().add_class(cls)
43
+ return cls
44
+
45
+ def has(self, key: str) -> bool:
46
+ """Return True if a cache record exists for the given key."""
47
+ return key in self.storage.records
48
+
49
+ def is_data_fresh(self, key: str) -> bool:
50
+ """Return True if a non-stale cache record exists for the given key."""
51
+ if not self.has(key):
52
+ return False
53
+ return not self.storage.get_record(key).is_data_stale
54
+
55
+ def get(self, key: str) -> CacheRecord:
56
+ """Retrieve the raw CacheRecord for the given key.
57
+
58
+ Raises:
59
+ KeyError: If no record exists for the key.
60
+ """
61
+ if not self.has(key):
62
+ raise KeyError(f'No cache found for {key!r}!')
63
+ return self.storage.get_record(key)
64
+
65
+ def get_object(self, key: str, default_value: Any = UNSET) -> object:
66
+ """Retrieve the cached value for a key, cast to its registered type.
67
+
68
+ Args:
69
+ key: Cache key to look up.
70
+ default_value: Returned if the key is missing. Raises KeyError if omitted.
71
+
72
+ Raises:
73
+ KeyError: If the key is missing and no default was provided.
74
+ AttributeError: If the record has no cast type registered.
75
+ """
76
+ if not self.has(key):
77
+ if default_value is UNSET:
78
+ raise KeyError(f'No cache found for {key!r}!')
79
+ return default_value
80
+
81
+ record = self.storage.get_record(key)
82
+ if not record.should_convert_type:
83
+ raise AttributeError(f'No cast type provided for {key!r}!')
84
+ # Use the shared instantiation helper when converting types so
85
+ # generics and apimodels are hydrated correctly.
86
+ from ..utils.typing_cast import instantiate_type
87
+
88
+ return instantiate_type(record.cast, record.data)
89
+
90
+ def update(self, key: str, data: dict):
91
+ """Update the data payload of an existing cache record.
92
+
93
+ Raises:
94
+ KeyError: If no record exists for the key.
95
+ """
96
+ if not self.has(key):
97
+ raise KeyError(f'No cache found for {key!r}!')
98
+ self.storage.update_record(key, data)
99
+
100
+ def store(self, key: str, data: dict, expiry: int = math.inf, cast: type = None):
101
+ """Create or overwrite a cache record.
102
+
103
+ Args:
104
+ key: Unique identifier for this cache entry.
105
+ data: The payload to cache.
106
+ expiry: Seconds until the record is considered stale. Defaults to no expiry.
107
+ cast: Optional type to register for deserialising the cached data.
108
+ """
109
+ serialisable_expiry = 'math.inf' if expiry == math.inf else expiry
110
+ # Store short builtin names for primitives, fqname for other classes.
111
+ if isinstance(cast, type) and getattr(cast, "__module__", None) == "builtins":
112
+ cast_str = cast.__name__
113
+ else:
114
+ cast_str = (f"{cast.__module__}.{cast.__name__}" if isinstance(cast, type) else None)
115
+
116
+ new_record = {
117
+ 'cast': cast_str,
118
+ 'expiry': serialisable_expiry,
119
+ 'timestamp': time.time(),
120
+ 'data': data,
121
+ }
122
+ self.storage.store_record(key, new_record)
123
+
124
+ def completely_erase_cache(self):
125
+ """Permanently delete all records from the cache storage."""
126
+ self.storage.erase_everything()
@@ -0,0 +1,217 @@
1
+ """CacheRecord: a single cached API response with expiry and optional type casting."""
2
+
3
+ import inspect
4
+ import math
5
+ import time
6
+ from typing import Callable, Optional
7
+
8
+ from PyperCache.query import JsonInjester
9
+ from PyperCache.utils.patterns import ClassRepository
10
+ from PyperCache.utils.sentinel import UNSET
11
+
12
+
13
+ # Maps primitive type name strings to their corresponding Python types.
14
+ PRIMITIVE_TYPES_MAP = {
15
+ 'bool': bool,
16
+ 'bytearray': bytearray,
17
+ 'bytes': bytes,
18
+ 'complex': complex,
19
+ 'dict': dict,
20
+ 'float': float,
21
+ 'frozenset': frozenset,
22
+ 'int': int,
23
+ 'list': list,
24
+ 'object': object,
25
+ 'set': set,
26
+ 'str': str,
27
+ 'tuple': tuple,
28
+ 'type': type,
29
+ }
30
+
31
+
32
+ def look_up_class(class_name: str) -> type:
33
+ """Resolve a class by name, checking primitives first then the class repository.
34
+
35
+ Args:
36
+ class_name: The name of the class to look up.
37
+
38
+ Returns:
39
+ The resolved class type.
40
+
41
+ Raises:
42
+ NameError: If the class name is not registered.
43
+ TypeError: If the resolved object is not a class.
44
+ """
45
+ if class_name in PRIMITIVE_TYPES_MAP:
46
+ return PRIMITIVE_TYPES_MAP[class_name]
47
+
48
+ # Primitive types (short names) map directly to builtins.
49
+ if class_name in PRIMITIVE_TYPES_MAP:
50
+ return PRIMITIVE_TYPES_MAP[class_name]
51
+
52
+ classes = ClassRepository()
53
+ # Try resolving via the repository first (supports short and fqnames)
54
+ cls = classes.get_class(class_name)
55
+ if cls is not None and inspect.isclass(cls):
56
+ return cls
57
+
58
+ # If class_name looks like a fully-qualified path, try importing it.
59
+ if '.' in class_name:
60
+ module_name, _, attr = class_name.rpartition('.')
61
+ try:
62
+ module = __import__(module_name, fromlist=[attr])
63
+ obj = getattr(module, attr)
64
+ if inspect.isclass(obj):
65
+ return obj
66
+ except Exception:
67
+ pass
68
+
69
+ raise NameError(f'{class_name!r} is not defined')
70
+
71
+
72
+ class CacheRecord:
73
+ """Represents a single cached API response with expiry and optional type casting.
74
+
75
+ Records are stored and serialized as plain dicts, with ``math.inf``
76
+ represented as the string ``'math.inf'`` to support JSON-safe serialization.
77
+
78
+ The :attr:`query` property exposes the cached data through a
79
+ :class:`~cache_module.query.JsonInjester`, enabling dotted-path access
80
+ and filter queries without altering the underlying data::
81
+
82
+ record = cache.get_record("org:acme")
83
+ record.query.get("meta.total_users")
84
+ record.query.get("users?role=admin")
85
+ record.query.has("users")
86
+
87
+ Args:
88
+ record: Raw dict with keys ``timestamp``, ``expiry``, ``data``,
89
+ and optionally ``cast``.
90
+ class_resolver: Optional callable used to resolve the cast type name to
91
+ an actual type. Defaults to :func:`look_up_class`, which
92
+ consults :class:`ClassRepository`. Pass a custom resolver
93
+ in tests to avoid touching the global registry.
94
+ """
95
+
96
+ def __init__(
97
+ self,
98
+ record: dict,
99
+ class_resolver: Optional[Callable[[str], type]] = None,
100
+ ) -> None:
101
+ self.__record_dict = record
102
+ self.__class_resolver = class_resolver or look_up_class
103
+
104
+ self.timestamp: float = record['timestamp']
105
+ self.data: dict = record['data']
106
+ self.expiry: float = math.inf if record['expiry'] == 'math.inf' else record['expiry']
107
+ self.cast_str: Optional[str] = record.get('cast')
108
+ self.__cast: object = UNSET # Unresolved until first access.
109
+ self.__query: Optional[JsonInjester] = None # Built lazily on first access.
110
+
111
+ @staticmethod
112
+ def from_data(data: dict, expiry: float = math.inf, cast: type = None) -> 'CacheRecord':
113
+ """Construct a new CacheRecord from raw data.
114
+
115
+ Args:
116
+ data: The payload to cache.
117
+ expiry: Seconds until the record is considered stale. Defaults to never.
118
+ cast: Optional type to cast the data to on retrieval.
119
+
120
+ Returns:
121
+ A new CacheRecord instance.
122
+ """
123
+ # Store short builtin names (e.g. 'dict') for primitive types to
124
+ # preserve compatibility with earlier cache files; otherwise store
125
+ # fully-qualified name for user classes.
126
+ if isinstance(cast, type) and cast.__module__ == 'builtins':
127
+ cast_str = cast.__name__
128
+ else:
129
+ cast_str = (f"{cast.__module__}.{cast.__name__}" if isinstance(cast, type) else None)
130
+
131
+ record = {
132
+ 'cast': cast_str,
133
+ 'expiry': expiry,
134
+ 'timestamp': time.time(),
135
+ 'data': data,
136
+ }
137
+ return CacheRecord(record)
138
+
139
+ # ------------------------------------------------------------------
140
+ # Properties
141
+ # ------------------------------------------------------------------
142
+
143
+ @property
144
+ def cast(self) -> Optional[type]:
145
+ """Lazily resolve the cast type from its stored class name string."""
146
+ if self.__cast is UNSET:
147
+ self.__cast = (
148
+ self.__class_resolver(self.cast_str)
149
+ if isinstance(self.cast_str, str)
150
+ else None
151
+ )
152
+ return self.__cast
153
+
154
+ @property
155
+ def query(self) -> JsonInjester:
156
+ """A :class:`~cache_module.query.JsonInjester` view over :attr:`data`.
157
+
158
+ Built once on first access and reused. Supports dotted-path lookup,
159
+ existence checks, filtered list queries, and default values — all
160
+ without modifying the underlying cached data.
161
+
162
+ Example::
163
+
164
+ record.query.get("meta.total_users") # nested key
165
+ record.query.get("users?role=admin") # filter list
166
+ record.query.get("users?dept.name=Engineering")
167
+ record.query.has("meta.total_users") # existence check
168
+ record.query.get("missing_key", default_value=0)
169
+
170
+ Note: if :attr:`data` has been replaced via :meth:`update`, call
171
+ ``record.query`` again — the injester is rebuilt automatically because
172
+ :meth:`update` clears the cached instance.
173
+ """
174
+ if self.__query is None:
175
+ self.__query = JsonInjester(self.data)
176
+ return self.__query
177
+
178
+ @property
179
+ def should_convert_type(self) -> bool:
180
+ """True if a valid cast type is set and data should be converted on retrieval."""
181
+ return isinstance(self.cast, type)
182
+
183
+ @property
184
+ def is_data_stale(self) -> bool:
185
+ """True if the record has lived past its expiry window."""
186
+ return time.time() > self.timestamp + self.expiry
187
+
188
+ # ------------------------------------------------------------------
189
+ # Mutation
190
+ # ------------------------------------------------------------------
191
+
192
+ def update(self, data: dict):
193
+ """Replace the cached data and refresh the timestamp.
194
+
195
+ Also invalidates the cached :attr:`query` injester so the next access
196
+ reflects the new data.
197
+ """
198
+ self.data = data
199
+ self.timestamp = time.time()
200
+ self.__record_dict['data'] = data
201
+ self.__record_dict['timestamp'] = self.timestamp
202
+ self.__query = None # invalidate so next .query access wraps new data
203
+
204
+ # ------------------------------------------------------------------
205
+ # Serialisation
206
+ # ------------------------------------------------------------------
207
+
208
+ def as_dict(self) -> dict:
209
+ """Serialize the record to a plain dict, encoding infinity as 'math.inf'."""
210
+ return {
211
+ k: ('math.inf' if v == math.inf else v)
212
+ for k, v in self.__record_dict.items()
213
+ }
214
+
215
+ def __repr__(self) -> str:
216
+ label = 'data_stale' if self.is_data_stale else 'data_fresh'
217
+ return f'<{self.cast_str}::{label}>'
@@ -0,0 +1,107 @@
1
+ """RequestLogger: thread-safe API request log with JSONL append-mode writes."""
2
+
3
+ from datetime import datetime
4
+ import threading
5
+ from typing import List
6
+ from pathlib import Path
7
+ import time
8
+ import json
9
+
10
+ from PyperCache.utils.fs import ensure_dirs_exist
11
+
12
+
13
+ LOG_FILENAME = "api_logfile.log"
14
+
15
+
16
+ class LogRecord:
17
+ """Represents a single API request log entry."""
18
+
19
+ def __init__(self, record: dict) -> None:
20
+ self._record_dict = record
21
+ self.timestamp: float = record["timestamp"]
22
+ self.data: dict = {"uri": record["uri"], "status": int(record["status"])}
23
+
24
+ def as_dict(self) -> dict:
25
+ return self._record_dict
26
+
27
+ def __repr__(self) -> str:
28
+ ts_str = datetime.fromtimestamp(self.timestamp).strftime(
29
+ "%d-%m-%Y %I:%M:%S,%f %p"
30
+ )
31
+ return f"{ts_str} - {self.data!r}"
32
+
33
+
34
+ class RequestLogger:
35
+ """Persists API request logs to a JSON Lines file and provides
36
+ thread-safe read/write access.
37
+
38
+ File format: one JSON object per line (JSONL). Each call to ``log()``
39
+ appends a single line — an O(1) operation regardless of how many records
40
+ the file already contains. Legacy files written as a JSON array are
41
+ detected on load and migrated transparently.
42
+ """
43
+
44
+ def __init__(self, filepath: str | None = None) -> None:
45
+ self.lock = threading.Lock()
46
+ self.filepath: str = filepath or LOG_FILENAME
47
+ ensure_dirs_exist(self.filepath)
48
+ path = Path(self.filepath)
49
+ path.touch(exist_ok=True)
50
+ self.records: List[LogRecord] = self._load(path)
51
+
52
+ def log(self, uri: str, status: int) -> None:
53
+ """Append a new request record to the log file (O(1) per write)."""
54
+ new_record = {"uri": uri, "status": status, "timestamp": time.time()}
55
+ log_record = LogRecord(new_record)
56
+ with self.lock:
57
+ self.records.append(log_record)
58
+ self._append(log_record)
59
+
60
+ def get_logs_from_last_seconds(self, seconds: int = 60) -> List[LogRecord]:
61
+ """Return records from the last *seconds* seconds, sorted oldest-first."""
62
+ cutoff = time.time() - seconds
63
+ recent = [log for log in self.records if log.timestamp >= cutoff]
64
+ return sorted(recent, key=lambda log: log.timestamp)
65
+
66
+ def as_list(self) -> list[dict]:
67
+ return [r.as_dict() for r in self.records]
68
+
69
+ def _append(self, record: LogRecord) -> None:
70
+ """Write a single record as one JSON line (must be called under self.lock)."""
71
+ with open(self.filepath, "a") as fp:
72
+ fp.write(json.dumps(record.as_dict()))
73
+ fp.write("\n")
74
+
75
+ @staticmethod
76
+ def _load(path: Path) -> List[LogRecord]:
77
+ """Parse records from *path*, handling both JSONL and legacy JSON-array format."""
78
+ content = path.read_text().strip()
79
+ if not content:
80
+ return []
81
+
82
+ try:
83
+ records: list[dict] = []
84
+ for line in content.splitlines():
85
+ line = line.strip()
86
+ if line:
87
+ parsed = json.loads(line)
88
+ if not isinstance(parsed, dict):
89
+ raise ValueError("Expected a JSON object per line.")
90
+ records.append(parsed)
91
+ return [LogRecord(r) for r in records]
92
+ except (json.JSONDecodeError, ValueError):
93
+ pass
94
+
95
+ try:
96
+ records = json.loads(content)
97
+ if isinstance(records, list):
98
+ log_records = [LogRecord(r) for r in records if isinstance(r, dict)]
99
+ with open(path, "w") as fp:
100
+ for lr in log_records:
101
+ fp.write(json.dumps(lr.as_dict()))
102
+ fp.write("\n")
103
+ return log_records
104
+ except json.JSONDecodeError:
105
+ pass
106
+
107
+ return []
@@ -0,0 +1,49 @@
1
+ """Small `@apimodel` decorator for simple API models.
2
+
3
+ This module provides a light-weight decorator that:
4
+ - registers the class with `ClassRepository` (short name and fqname)
5
+ - injects a constructor that accepts a raw dict and hydrates annotated
6
+ fields (using `instantiate_type` for nested types)
7
+ - provides `from_dict` and `as_dict` helpers
8
+ """
9
+ from __future__ import annotations
10
+
11
+ from typing import Any
12
+
13
+ from ..utils.patterns import ClassRepository
14
+ from ..query.json_injester import JsonInjester
15
+ from ..utils.typing_cast import instantiate_type
16
+
17
+
18
+ def apimodel(cls: type) -> type:
19
+ """Decorator that makes a simple model from annotated fields.
20
+
21
+ The generated constructor accepts a single positional ``data`` dict.
22
+ Registered classes expose ``from_dict`` and ``as_dict`` for symmetry
23
+ with other parts of the codebase.
24
+ """
25
+ ClassRepository().add_class(cls)
26
+
27
+ annotations = getattr(cls, "__annotations__", {})
28
+
29
+ def __init__(self, data: dict) -> None:
30
+ ji = JsonInjester(data)
31
+ object.__setattr__(self, "_Initial__Data", data)
32
+
33
+ for field, annotated in annotations.items():
34
+ raw = ji.get(field, default_value=None)
35
+ value = instantiate_type(annotated, raw)
36
+ setattr(self, field, value)
37
+
38
+ def as_dict(self) -> dict:
39
+ return getattr(self, "_Initial__Data")
40
+
41
+ @classmethod
42
+ def from_dict(cls2, data: dict) -> Any:
43
+ return cls2(data)
44
+
45
+ cls.__init__ = __init__
46
+ cls.as_dict = as_dict
47
+ cls.from_dict = from_dict
48
+
49
+ return cls
PyperCache/py.typed ADDED
@@ -0,0 +1 @@
1
+ # Marker file for PEP 561
@@ -0,0 +1,10 @@
1
+ """
2
+ Query submodule: provides dictionary object searching utilities.
3
+ """
4
+
5
+
6
+ from .json_injester import JsonInjester
7
+
8
+ __all__ = [
9
+ "JsonInjester"
10
+ ]