hammad-python 0.0.14__py3-none-any.whl → 0.0.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hammad_python-0.0.15.dist-info/METADATA +184 -0
- hammad_python-0.0.15.dist-info/RECORD +4 -0
- hammad/__init__.py +0 -1
- hammad/ai/__init__.py +0 -1
- hammad/ai/_utils.py +0 -142
- hammad/ai/completions/__init__.py +0 -45
- hammad/ai/completions/client.py +0 -684
- hammad/ai/completions/create.py +0 -710
- hammad/ai/completions/settings.py +0 -100
- hammad/ai/completions/types.py +0 -792
- hammad/ai/completions/utils.py +0 -486
- hammad/ai/embeddings/__init__.py +0 -35
- hammad/ai/embeddings/client/__init__.py +0 -1
- hammad/ai/embeddings/client/base_embeddings_client.py +0 -26
- hammad/ai/embeddings/client/fastembed_text_embeddings_client.py +0 -200
- hammad/ai/embeddings/client/litellm_embeddings_client.py +0 -288
- hammad/ai/embeddings/create.py +0 -159
- hammad/ai/embeddings/types.py +0 -69
- hammad/cache/__init__.py +0 -40
- hammad/cache/base_cache.py +0 -181
- hammad/cache/cache.py +0 -169
- hammad/cache/decorators.py +0 -261
- hammad/cache/file_cache.py +0 -80
- hammad/cache/ttl_cache.py +0 -74
- hammad/cli/__init__.py +0 -33
- hammad/cli/animations.py +0 -573
- hammad/cli/plugins.py +0 -781
- hammad/cli/styles/__init__.py +0 -55
- hammad/cli/styles/settings.py +0 -139
- hammad/cli/styles/types.py +0 -358
- hammad/cli/styles/utils.py +0 -480
- hammad/data/__init__.py +0 -56
- hammad/data/collections/__init__.py +0 -34
- hammad/data/collections/base_collection.py +0 -58
- hammad/data/collections/collection.py +0 -452
- hammad/data/collections/searchable_collection.py +0 -556
- hammad/data/collections/vector_collection.py +0 -596
- hammad/data/configurations/__init__.py +0 -35
- hammad/data/configurations/configuration.py +0 -564
- hammad/data/databases/__init__.py +0 -21
- hammad/data/databases/database.py +0 -902
- hammad/data/models/__init__.py +0 -44
- hammad/data/models/base/__init__.py +0 -35
- hammad/data/models/base/fields.py +0 -546
- hammad/data/models/base/model.py +0 -1078
- hammad/data/models/base/utils.py +0 -280
- hammad/data/models/pydantic/__init__.py +0 -55
- hammad/data/models/pydantic/converters.py +0 -632
- hammad/data/models/pydantic/models/__init__.py +0 -28
- hammad/data/models/pydantic/models/arbitrary_model.py +0 -46
- hammad/data/models/pydantic/models/cacheable_model.py +0 -79
- hammad/data/models/pydantic/models/fast_model.py +0 -318
- hammad/data/models/pydantic/models/function_model.py +0 -176
- hammad/data/models/pydantic/models/subscriptable_model.py +0 -63
- hammad/data/types/__init__.py +0 -41
- hammad/data/types/file.py +0 -358
- hammad/data/types/multimodal/__init__.py +0 -24
- hammad/data/types/multimodal/audio.py +0 -96
- hammad/data/types/multimodal/image.py +0 -80
- hammad/data/types/text.py +0 -1066
- hammad/formatting/__init__.py +0 -38
- hammad/formatting/json/__init__.py +0 -21
- hammad/formatting/json/converters.py +0 -152
- hammad/formatting/text/__init__.py +0 -63
- hammad/formatting/text/converters.py +0 -723
- hammad/formatting/text/markdown.py +0 -131
- hammad/formatting/yaml/__init__.py +0 -26
- hammad/formatting/yaml/converters.py +0 -5
- hammad/logging/__init__.py +0 -35
- hammad/logging/decorators.py +0 -834
- hammad/logging/logger.py +0 -954
- hammad/mcp/__init__.py +0 -50
- hammad/mcp/client/__init__.py +0 -1
- hammad/mcp/client/client.py +0 -523
- hammad/mcp/client/client_service.py +0 -393
- hammad/mcp/client/settings.py +0 -178
- hammad/mcp/servers/__init__.py +0 -1
- hammad/mcp/servers/launcher.py +0 -1161
- hammad/performance/__init__.py +0 -36
- hammad/performance/imports.py +0 -231
- hammad/performance/runtime/__init__.py +0 -32
- hammad/performance/runtime/decorators.py +0 -142
- hammad/performance/runtime/run.py +0 -299
- hammad/py.typed +0 -0
- hammad/service/__init__.py +0 -49
- hammad/service/create.py +0 -532
- hammad/service/decorators.py +0 -285
- hammad/typing/__init__.py +0 -407
- hammad/web/__init__.py +0 -43
- hammad/web/http/__init__.py +0 -1
- hammad/web/http/client.py +0 -944
- hammad/web/models.py +0 -245
- hammad/web/openapi/__init__.py +0 -1
- hammad/web/openapi/client.py +0 -740
- hammad/web/search/__init__.py +0 -1
- hammad/web/search/client.py +0 -988
- hammad/web/utils.py +0 -472
- hammad_python-0.0.14.dist-info/METADATA +0 -70
- hammad_python-0.0.14.dist-info/RECORD +0 -99
- {hammad_python-0.0.14.dist-info → hammad_python-0.0.15.dist-info}/WHEEL +0 -0
- {hammad_python-0.0.14.dist-info → hammad_python-0.0.15.dist-info}/licenses/LICENSE +0 -0
hammad/ai/embeddings/types.py
DELETED
@@ -1,69 +0,0 @@
|
|
1
|
-
"""hammad.ai.embeddings.types"""
|
2
|
-
|
3
|
-
from typing import List, Literal
|
4
|
-
|
5
|
-
from pydantic import BaseModel
|
6
|
-
|
7
|
-
__all__ = (
|
8
|
-
"Embedding",
|
9
|
-
"EmbeddingUsage",
|
10
|
-
"EmbeddingResponse",
|
11
|
-
)
|
12
|
-
|
13
|
-
|
14
|
-
class Embedding(BaseModel):
|
15
|
-
embedding: List[float]
|
16
|
-
"""The embedding vector, which is a list of floats.
|
17
|
-
|
18
|
-
The length of vector depends on the model as listed in the
|
19
|
-
[embedding guide](https://platform.openai.com/docs/guides/embeddings).
|
20
|
-
"""
|
21
|
-
|
22
|
-
index: int
|
23
|
-
"""The index of the embedding in the list of embeddings."""
|
24
|
-
|
25
|
-
object: Literal["embedding"]
|
26
|
-
"""The object type, which is always "embedding"."""
|
27
|
-
|
28
|
-
@property
|
29
|
-
def dimensions(self) -> int:
|
30
|
-
"""The dimensions of the embedding."""
|
31
|
-
return len(self.embedding)
|
32
|
-
|
33
|
-
|
34
|
-
class EmbeddingUsage(BaseModel):
|
35
|
-
"""Usage statistics for embedding requests."""
|
36
|
-
|
37
|
-
prompt_tokens: int
|
38
|
-
"""The number of tokens used by the prompt."""
|
39
|
-
|
40
|
-
total_tokens: int
|
41
|
-
"""The total number of tokens used by the request."""
|
42
|
-
|
43
|
-
|
44
|
-
class EmbeddingResponse(BaseModel):
|
45
|
-
data: List[Embedding]
|
46
|
-
"""The list of embeddings generated by the model."""
|
47
|
-
|
48
|
-
model: str
|
49
|
-
"""The name of the model used to generate the embedding."""
|
50
|
-
|
51
|
-
object: Literal["list"]
|
52
|
-
"""The object type, which is always "list"."""
|
53
|
-
|
54
|
-
usage: EmbeddingUsage
|
55
|
-
"""The usage information for the request."""
|
56
|
-
|
57
|
-
@property
|
58
|
-
def dimensions(self) -> int:
|
59
|
-
"""The dimensions of the embedding."""
|
60
|
-
return len(self.data[0].embedding)
|
61
|
-
|
62
|
-
def __str__(self) -> str:
|
63
|
-
return (
|
64
|
-
"Embedding Response:\n"
|
65
|
-
f">>> Model: {self.model}\n"
|
66
|
-
f">>> Dimensions: {self.dimensions}\n"
|
67
|
-
f">>> Usage: {self.usage}\n"
|
68
|
-
f">>> Number of Generated Embeddings: {len(self.data)}\n"
|
69
|
-
)
|
hammad/cache/__init__.py
DELETED
@@ -1,40 +0,0 @@
|
|
1
|
-
"""hammad.cache"""
|
2
|
-
|
3
|
-
from typing import TYPE_CHECKING
|
4
|
-
from ..performance.imports import create_getattr_importer
|
5
|
-
|
6
|
-
|
7
|
-
if TYPE_CHECKING:
|
8
|
-
from .base_cache import BaseCache, CacheParams, CacheReturn, CacheType
|
9
|
-
from .file_cache import FileCache, FileCacheLocation
|
10
|
-
from .ttl_cache import TTLCache
|
11
|
-
from .cache import Cache, create_cache
|
12
|
-
from .decorators import cached, auto_cached, clear_decorator_cache
|
13
|
-
|
14
|
-
|
15
|
-
__all__ = (
|
16
|
-
# hammad.performance.cache.base_cache
|
17
|
-
"BaseCache",
|
18
|
-
"CacheParams",
|
19
|
-
"CacheReturn",
|
20
|
-
"CacheType",
|
21
|
-
# hammad.performance.cache.file_cache
|
22
|
-
"FileCache",
|
23
|
-
"FileCacheLocation",
|
24
|
-
# hammad.performance.cache.ttl_cache
|
25
|
-
"TTLCache",
|
26
|
-
# hammad.performance.cache.cache
|
27
|
-
"Cache",
|
28
|
-
"create_cache",
|
29
|
-
# hammad.performance.cache.decorators
|
30
|
-
"cached",
|
31
|
-
"auto_cached",
|
32
|
-
"clear_decorator_cache",
|
33
|
-
)
|
34
|
-
|
35
|
-
|
36
|
-
__getattr__ = create_getattr_importer(__all__)
|
37
|
-
|
38
|
-
|
39
|
-
def __dir__() -> list[str]:
|
40
|
-
return sorted(__all__)
|
hammad/cache/base_cache.py
DELETED
@@ -1,181 +0,0 @@
|
|
1
|
-
"""hammad.cache.base_cache"""
|
2
|
-
|
3
|
-
from dataclasses import dataclass
|
4
|
-
import hashlib
|
5
|
-
import inspect
|
6
|
-
from typing import Any, Literal, ParamSpec, TypeAlias, TypeVar, get_args
|
7
|
-
|
8
|
-
__all__ = (
|
9
|
-
"BaseCache",
|
10
|
-
"CacheType",
|
11
|
-
"CacheParams",
|
12
|
-
"CacheReturn",
|
13
|
-
)
|
14
|
-
|
15
|
-
|
16
|
-
CacheType: TypeAlias = Literal["ttl", "file"]
|
17
|
-
"""Type of caches that can be created using `hammad`.
|
18
|
-
|
19
|
-
- `"ttl"`: Time-to-live cache.
|
20
|
-
- `"file"`: File-based cache.
|
21
|
-
"""
|
22
|
-
|
23
|
-
CacheParams = ParamSpec("CacheParams")
|
24
|
-
"""Parameter specification for cache functions."""
|
25
|
-
|
26
|
-
CacheReturn = TypeVar("CacheReturn")
|
27
|
-
"""Return type for cache functions."""
|
28
|
-
|
29
|
-
|
30
|
-
@dataclass
|
31
|
-
class BaseCache:
|
32
|
-
"""Base class for all caches created using `hammad`."""
|
33
|
-
|
34
|
-
type: CacheType
|
35
|
-
"""Type of cache."""
|
36
|
-
|
37
|
-
def __post_init__(self) -> None:
|
38
|
-
"""Post-initialization hook."""
|
39
|
-
if self.type not in get_args(CacheType):
|
40
|
-
raise ValueError(f"Invalid cache type: {self.type}")
|
41
|
-
|
42
|
-
def __contains__(self, key: str) -> bool:
|
43
|
-
"""Check if key exists in cache."""
|
44
|
-
raise NotImplementedError("Subclasses must implement __contains__")
|
45
|
-
|
46
|
-
def __getitem__(self, key: str) -> Any:
|
47
|
-
"""Get value for key."""
|
48
|
-
raise NotImplementedError("Subclasses must implement __getitem__")
|
49
|
-
|
50
|
-
def __setitem__(self, key: str, value: Any) -> None:
|
51
|
-
"""Set value for key."""
|
52
|
-
raise NotImplementedError("Subclasses must implement __setitem__")
|
53
|
-
|
54
|
-
def get(self, key: str, default: Any = None) -> Any:
|
55
|
-
"""Get value with default if key doesn't exist."""
|
56
|
-
try:
|
57
|
-
return self[key]
|
58
|
-
except KeyError:
|
59
|
-
return default
|
60
|
-
|
61
|
-
def clear(self) -> None:
|
62
|
-
"""Clear all cached items."""
|
63
|
-
raise NotImplementedError("Subclasses must implement clear")
|
64
|
-
|
65
|
-
def make_hashable(self, obj: Any) -> str:
|
66
|
-
"""
|
67
|
-
Convert any object to a stable hash string.
|
68
|
-
|
69
|
-
Uses SHA-256 to generate consistent hash representations.
|
70
|
-
Handles nested structures recursively.
|
71
|
-
|
72
|
-
Args:
|
73
|
-
obj: Object to hash
|
74
|
-
|
75
|
-
Returns:
|
76
|
-
Hexadecimal hash string
|
77
|
-
"""
|
78
|
-
|
79
|
-
def _hash_obj(data: Any) -> str:
|
80
|
-
"""Internal recursive hashing function with memoization."""
|
81
|
-
# Handle None first
|
82
|
-
if data is None:
|
83
|
-
return "null"
|
84
|
-
|
85
|
-
if isinstance(data, bool):
|
86
|
-
return f"bool:{data}"
|
87
|
-
elif isinstance(data, int):
|
88
|
-
return f"int:{data}"
|
89
|
-
elif isinstance(data, float):
|
90
|
-
if data != data: # NaN
|
91
|
-
return "float:nan"
|
92
|
-
elif data == float("inf"):
|
93
|
-
return "float:inf"
|
94
|
-
elif data == float("-inf"):
|
95
|
-
return "float:-inf"
|
96
|
-
else:
|
97
|
-
return f"float:{data}"
|
98
|
-
elif isinstance(data, str):
|
99
|
-
return f"str:{data}"
|
100
|
-
elif isinstance(data, bytes):
|
101
|
-
return f"bytes:{data.hex()}"
|
102
|
-
|
103
|
-
# Handle collections
|
104
|
-
elif isinstance(data, (list, tuple)):
|
105
|
-
collection_type = "list" if isinstance(data, list) else "tuple"
|
106
|
-
items = [_hash_obj(item) for item in data]
|
107
|
-
return f"{collection_type}:[{','.join(items)}]"
|
108
|
-
|
109
|
-
elif isinstance(data, set):
|
110
|
-
try:
|
111
|
-
sorted_items = sorted(data, key=lambda x: str(x))
|
112
|
-
except TypeError:
|
113
|
-
sorted_items = sorted(
|
114
|
-
data, key=lambda x: (type(x).__name__, str(x))
|
115
|
-
)
|
116
|
-
items = [_hash_obj(item) for item in sorted_items]
|
117
|
-
return f"set:{{{','.join(items)}}}"
|
118
|
-
|
119
|
-
elif isinstance(data, dict):
|
120
|
-
try:
|
121
|
-
sorted_items = sorted(data.items(), key=lambda x: str(x[0]))
|
122
|
-
except TypeError:
|
123
|
-
# Fallback for non-comparable keys
|
124
|
-
sorted_items = sorted(
|
125
|
-
data.items(), key=lambda x: (type(x[0]).__name__, str(x[0]))
|
126
|
-
)
|
127
|
-
pairs = [f"{_hash_obj(k)}:{_hash_obj(v)}" for k, v in sorted_items]
|
128
|
-
return f"dict:{{{','.join(pairs)}}}"
|
129
|
-
|
130
|
-
elif isinstance(data, type):
|
131
|
-
module = getattr(data, "__module__", "builtins")
|
132
|
-
qualname = getattr(data, "__qualname__", data.__name__)
|
133
|
-
return f"type:{module}.{qualname}"
|
134
|
-
|
135
|
-
elif callable(data):
|
136
|
-
module = getattr(data, "__module__", "unknown")
|
137
|
-
qualname = getattr(
|
138
|
-
data, "__qualname__", getattr(data, "__name__", "unknown_callable")
|
139
|
-
)
|
140
|
-
|
141
|
-
try:
|
142
|
-
source = inspect.getsource(data)
|
143
|
-
normalized_source = " ".join(source.split())
|
144
|
-
return f"callable:{module}.{qualname}:{hash(normalized_source)}"
|
145
|
-
except (OSError, TypeError, IndentationError):
|
146
|
-
return f"callable:{module}.{qualname}"
|
147
|
-
|
148
|
-
elif hasattr(data, "__dict__"):
|
149
|
-
class_info = (
|
150
|
-
f"{data.__class__.__module__}.{data.__class__.__qualname__}"
|
151
|
-
)
|
152
|
-
obj_dict = {"__class__": class_info, **data.__dict__}
|
153
|
-
return f"object:{_hash_obj(obj_dict)}"
|
154
|
-
|
155
|
-
elif hasattr(data, "__slots__"):
|
156
|
-
class_info = (
|
157
|
-
f"{data.__class__.__module__}.{data.__class__.__qualname__}"
|
158
|
-
)
|
159
|
-
slot_dict = {
|
160
|
-
slot: getattr(data, slot, None)
|
161
|
-
for slot in data.__slots__
|
162
|
-
if hasattr(data, slot)
|
163
|
-
}
|
164
|
-
obj_dict = {"__class__": class_info, **slot_dict}
|
165
|
-
return f"slotted_object:{_hash_obj(obj_dict)}"
|
166
|
-
|
167
|
-
else:
|
168
|
-
try:
|
169
|
-
repr_str = repr(data)
|
170
|
-
return f"repr:{type(data).__name__}:{repr_str}"
|
171
|
-
except Exception:
|
172
|
-
# Ultimate fallback
|
173
|
-
return f"unknown:{type(data).__name__}:{id(data)}"
|
174
|
-
|
175
|
-
# Generate the hash representation
|
176
|
-
hash_representation = _hash_obj(obj)
|
177
|
-
|
178
|
-
# Create final SHA-256 hash
|
179
|
-
return hashlib.sha256(
|
180
|
-
hash_representation.encode("utf-8", errors="surrogatepass")
|
181
|
-
).hexdigest()
|
hammad/cache/cache.py
DELETED
@@ -1,169 +0,0 @@
|
|
1
|
-
"""hammad.cache.cache"""
|
2
|
-
|
3
|
-
from typing import (
|
4
|
-
overload,
|
5
|
-
TYPE_CHECKING,
|
6
|
-
Literal,
|
7
|
-
Optional,
|
8
|
-
Any,
|
9
|
-
Union,
|
10
|
-
get_args,
|
11
|
-
)
|
12
|
-
from pathlib import Path
|
13
|
-
|
14
|
-
from .base_cache import BaseCache, CacheType
|
15
|
-
from .file_cache import FileCache, FileCacheLocation
|
16
|
-
from .ttl_cache import TTLCache
|
17
|
-
|
18
|
-
|
19
|
-
__all__ = ("Cache", "create_cache")
|
20
|
-
|
21
|
-
|
22
|
-
class Cache:
|
23
|
-
"""
|
24
|
-
Helper factory class for creating cache instances.
|
25
|
-
|
26
|
-
Example usage:
|
27
|
-
ttl_cache = Cache(type="ttl", maxsize=100, ttl=60)
|
28
|
-
file_cache = Cache(type="file", location="cache.pkl")
|
29
|
-
"""
|
30
|
-
|
31
|
-
@overload
|
32
|
-
def __new__(
|
33
|
-
cls,
|
34
|
-
type: Literal["ttl"] = "ttl",
|
35
|
-
*,
|
36
|
-
maxsize: Optional[int] = None,
|
37
|
-
ttl: Optional[int] = None,
|
38
|
-
) -> "TTLCache":
|
39
|
-
"""
|
40
|
-
Create a new TTL (Time To Live) cache instance.
|
41
|
-
|
42
|
-
Args:
|
43
|
-
type: The type of cache to create.
|
44
|
-
maxsize: The maximum number of items to store in the cache.
|
45
|
-
ttl: The time to live for items in the cache.
|
46
|
-
|
47
|
-
Returns:
|
48
|
-
A new TTL cache instance.
|
49
|
-
"""
|
50
|
-
...
|
51
|
-
|
52
|
-
@overload
|
53
|
-
def __new__(
|
54
|
-
cls, type: Literal["file"], *, location: Optional["FileCacheLocation"] = None
|
55
|
-
) -> "FileCache":
|
56
|
-
"""
|
57
|
-
Create a new file cache instance.
|
58
|
-
|
59
|
-
Args:
|
60
|
-
type: The type of cache to create.
|
61
|
-
location: The directory to store the cache files.
|
62
|
-
|
63
|
-
Returns:
|
64
|
-
A new disk cache instance.
|
65
|
-
"""
|
66
|
-
...
|
67
|
-
|
68
|
-
def __new__(cls, type: "CacheType" = "ttl", **kwargs: Any) -> "BaseCache":
|
69
|
-
"""
|
70
|
-
Create a new cache instance.
|
71
|
-
"""
|
72
|
-
if type == "ttl":
|
73
|
-
from .ttl_cache import TTLCache
|
74
|
-
|
75
|
-
valid_ttl_params = {"maxsize", "ttl"}
|
76
|
-
ttl_constructor_kwargs = {
|
77
|
-
k: v
|
78
|
-
for k, v in kwargs.items()
|
79
|
-
if k in valid_ttl_params and v is not None
|
80
|
-
}
|
81
|
-
return TTLCache(type=type, **ttl_constructor_kwargs)
|
82
|
-
elif type == "file":
|
83
|
-
from .file_cache import FileCache
|
84
|
-
|
85
|
-
valid_file_params = {"location"}
|
86
|
-
file_constructor_kwargs = {
|
87
|
-
k: v
|
88
|
-
for k, v in kwargs.items()
|
89
|
-
if k in valid_file_params and v is not None
|
90
|
-
}
|
91
|
-
return FileCache(type=type, **file_constructor_kwargs)
|
92
|
-
else:
|
93
|
-
supported_types_tuple = get_args(CacheType)
|
94
|
-
raise ValueError(
|
95
|
-
f"Unsupported cache type: {type}. Supported types are: {supported_types_tuple}"
|
96
|
-
)
|
97
|
-
|
98
|
-
|
99
|
-
# Factory
|
100
|
-
|
101
|
-
|
102
|
-
@overload
|
103
|
-
def create_cache(
|
104
|
-
type: Literal["ttl"], *, maxsize: int = 128, ttl: Optional[float] = None
|
105
|
-
) -> "TTLCache": ...
|
106
|
-
|
107
|
-
|
108
|
-
@overload
|
109
|
-
def create_cache(
|
110
|
-
type: Literal["file"],
|
111
|
-
*,
|
112
|
-
location: Optional["FileCacheLocation"] = None,
|
113
|
-
maxsize: int = 128,
|
114
|
-
) -> "FileCache": ...
|
115
|
-
|
116
|
-
|
117
|
-
@overload
|
118
|
-
def create_cache(type: "CacheType", **kwargs: Any) -> "BaseCache": ...
|
119
|
-
|
120
|
-
|
121
|
-
def create_cache(type: "CacheType", **kwargs: Any) -> "BaseCache":
|
122
|
-
"""
|
123
|
-
Factory function to create cache instances of different types.
|
124
|
-
|
125
|
-
Args:
|
126
|
-
type: The type of cache to create. Can be "ttl" or "file".
|
127
|
-
**kwargs: Additional keyword arguments specific to the cache type.
|
128
|
-
|
129
|
-
Returns:
|
130
|
-
A cache instance of the specified type.
|
131
|
-
|
132
|
-
Raises:
|
133
|
-
ValueError: If an unsupported cache type is provided.
|
134
|
-
|
135
|
-
Examples:
|
136
|
-
```python
|
137
|
-
# Create a TTL cache with custom settings
|
138
|
-
ttl_cache = create_cache("ttl", maxsize=256, ttl=300)
|
139
|
-
|
140
|
-
# Create a file cache with custom location
|
141
|
-
file_cache = create_cache("file", location="/tmp/my_cache", maxsize=1000)
|
142
|
-
```
|
143
|
-
"""
|
144
|
-
if type == "ttl":
|
145
|
-
from .ttl_cache import TTLCache
|
146
|
-
|
147
|
-
maxsize = kwargs.pop("maxsize", 128)
|
148
|
-
ttl = kwargs.pop("ttl", None)
|
149
|
-
if kwargs:
|
150
|
-
raise TypeError(
|
151
|
-
f"Unexpected keyword arguments for TTL cache: {list(kwargs.keys())}"
|
152
|
-
)
|
153
|
-
return TTLCache(maxsize=maxsize, ttl=ttl)
|
154
|
-
elif type == "file":
|
155
|
-
from .file_cache import FileCache
|
156
|
-
|
157
|
-
location = kwargs.pop("location", None)
|
158
|
-
# FileCache doesn't support maxsize, so we just ignore it
|
159
|
-
kwargs.pop("maxsize", None)
|
160
|
-
if kwargs:
|
161
|
-
raise TypeError(
|
162
|
-
f"Unexpected keyword arguments for file cache: {list(kwargs.keys())}"
|
163
|
-
)
|
164
|
-
return FileCache(location=location, type=type)
|
165
|
-
else:
|
166
|
-
valid_types = get_args("CacheType")
|
167
|
-
raise ValueError(
|
168
|
-
f"Unsupported cache type: {type}. Valid types are: {valid_types}"
|
169
|
-
)
|