hammad-python 0.0.13__py3-none-any.whl → 0.0.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hammad_python-0.0.15.dist-info/METADATA +184 -0
- hammad_python-0.0.15.dist-info/RECORD +4 -0
- hammad/__init__.py +0 -180
- hammad/_core/__init__.py +0 -1
- hammad/_core/_utils/__init__.py +0 -4
- hammad/_core/_utils/_import_utils.py +0 -182
- hammad/ai/__init__.py +0 -59
- hammad/ai/_utils.py +0 -142
- hammad/ai/completions/__init__.py +0 -44
- hammad/ai/completions/client.py +0 -729
- hammad/ai/completions/create.py +0 -686
- hammad/ai/completions/types.py +0 -711
- hammad/ai/completions/utils.py +0 -374
- hammad/ai/embeddings/__init__.py +0 -35
- hammad/ai/embeddings/client/__init__.py +0 -1
- hammad/ai/embeddings/client/base_embeddings_client.py +0 -26
- hammad/ai/embeddings/client/fastembed_text_embeddings_client.py +0 -200
- hammad/ai/embeddings/client/litellm_embeddings_client.py +0 -288
- hammad/ai/embeddings/create.py +0 -159
- hammad/ai/embeddings/types.py +0 -69
- hammad/base/__init__.py +0 -35
- hammad/base/fields.py +0 -546
- hammad/base/model.py +0 -1078
- hammad/base/utils.py +0 -280
- hammad/cache/__init__.py +0 -48
- hammad/cache/base_cache.py +0 -181
- hammad/cache/cache.py +0 -169
- hammad/cache/decorators.py +0 -261
- hammad/cache/file_cache.py +0 -80
- hammad/cache/ttl_cache.py +0 -74
- hammad/cli/__init__.py +0 -33
- hammad/cli/animations.py +0 -604
- hammad/cli/plugins.py +0 -781
- hammad/cli/styles/__init__.py +0 -55
- hammad/cli/styles/settings.py +0 -139
- hammad/cli/styles/types.py +0 -358
- hammad/cli/styles/utils.py +0 -480
- hammad/configuration/__init__.py +0 -35
- hammad/configuration/configuration.py +0 -564
- hammad/data/__init__.py +0 -39
- hammad/data/collections/__init__.py +0 -34
- hammad/data/collections/base_collection.py +0 -58
- hammad/data/collections/collection.py +0 -452
- hammad/data/collections/searchable_collection.py +0 -556
- hammad/data/collections/vector_collection.py +0 -603
- hammad/data/databases/__init__.py +0 -21
- hammad/data/databases/database.py +0 -902
- hammad/json/__init__.py +0 -21
- hammad/json/converters.py +0 -152
- hammad/logging/__init__.py +0 -35
- hammad/logging/decorators.py +0 -834
- hammad/logging/logger.py +0 -954
- hammad/multimodal/__init__.py +0 -24
- hammad/multimodal/audio.py +0 -96
- hammad/multimodal/image.py +0 -80
- hammad/multithreading/__init__.py +0 -304
- hammad/py.typed +0 -0
- hammad/pydantic/__init__.py +0 -43
- hammad/pydantic/converters.py +0 -623
- hammad/pydantic/models/__init__.py +0 -28
- hammad/pydantic/models/arbitrary_model.py +0 -46
- hammad/pydantic/models/cacheable_model.py +0 -79
- hammad/pydantic/models/fast_model.py +0 -318
- hammad/pydantic/models/function_model.py +0 -176
- hammad/pydantic/models/subscriptable_model.py +0 -63
- hammad/text/__init__.py +0 -82
- hammad/text/converters.py +0 -723
- hammad/text/markdown.py +0 -131
- hammad/text/text.py +0 -1066
- hammad/types/__init__.py +0 -11
- hammad/types/file.py +0 -358
- hammad/typing/__init__.py +0 -407
- hammad/web/__init__.py +0 -43
- hammad/web/http/__init__.py +0 -1
- hammad/web/http/client.py +0 -944
- hammad/web/models.py +0 -245
- hammad/web/openapi/__init__.py +0 -0
- hammad/web/openapi/client.py +0 -740
- hammad/web/search/__init__.py +0 -1
- hammad/web/search/client.py +0 -988
- hammad/web/utils.py +0 -472
- hammad/yaml/__init__.py +0 -30
- hammad/yaml/converters.py +0 -19
- hammad_python-0.0.13.dist-info/METADATA +0 -38
- hammad_python-0.0.13.dist-info/RECORD +0 -85
- {hammad_python-0.0.13.dist-info → hammad_python-0.0.15.dist-info}/WHEEL +0 -0
- {hammad_python-0.0.13.dist-info → hammad_python-0.0.15.dist-info}/licenses/LICENSE +0 -0
hammad/base/utils.py
DELETED
@@ -1,280 +0,0 @@
|
|
1
|
-
"""hammad.base.utils"""
|
2
|
-
|
3
|
-
from functools import lru_cache
|
4
|
-
from typing import Any, Callable, Optional, Union, Tuple, Dict
|
5
|
-
|
6
|
-
from msgspec.structs import Struct
|
7
|
-
|
8
|
-
from .fields import FieldInfo, field, Field
|
9
|
-
from .model import Model
|
10
|
-
|
11
|
-
|
12
|
-
__all__ = (
|
13
|
-
"create_model",
|
14
|
-
"get_field_info",
|
15
|
-
"is_field",
|
16
|
-
"is_model",
|
17
|
-
"validator",
|
18
|
-
)
|
19
|
-
|
20
|
-
|
21
|
-
def create_model(
|
22
|
-
__model_name: str,
|
23
|
-
*,
|
24
|
-
__base__: Optional[Union[type, Tuple[type, ...]]] = None,
|
25
|
-
__module__: Optional[str] = None,
|
26
|
-
__qualname__: Optional[str] = None,
|
27
|
-
__doc__: Optional[str] = None,
|
28
|
-
__validators__: Optional[Dict[str, Any]] = None,
|
29
|
-
__config__: Optional[type] = None,
|
30
|
-
**field_definitions: Any,
|
31
|
-
) -> type[Model]:
|
32
|
-
"""Create a Model dynamically with Pydantic-compatible interface.
|
33
|
-
|
34
|
-
This function provides a drop-in replacement for pydantic.create_model()
|
35
|
-
that creates Model classes instead of pydantic BaseModel classes.
|
36
|
-
|
37
|
-
Args:
|
38
|
-
__model_name: Name of the model class to create
|
39
|
-
__base__: Base class(es) to inherit from. If None, uses Model.
|
40
|
-
Can be a single class or tuple of classes.
|
41
|
-
__module__: Module name for the created class
|
42
|
-
__qualname__: Qualified name for the created class
|
43
|
-
__doc__: Docstring for the created class
|
44
|
-
__validators__: Dictionary of validators (for compatibility - not fully implemented)
|
45
|
-
__config__: Configuration class (for compatibility - not fully implemented)
|
46
|
-
**field_definitions: Field definitions as keyword arguments.
|
47
|
-
Each can be:
|
48
|
-
- A type annotation (e.g., str, int)
|
49
|
-
- A tuple of (type, default_value)
|
50
|
-
- A tuple of (type, Field(...))
|
51
|
-
- A Field instance
|
52
|
-
|
53
|
-
Returns:
|
54
|
-
A new Model class with the specified fields
|
55
|
-
|
56
|
-
Examples:
|
57
|
-
# Simple model with basic types
|
58
|
-
User = create_model('User', name=str, age=int)
|
59
|
-
|
60
|
-
# Model with defaults
|
61
|
-
Config = create_model('Config',
|
62
|
-
host=(str, 'localhost'),
|
63
|
-
port=(int, 8080))
|
64
|
-
|
65
|
-
# Model with field constraints
|
66
|
-
Product = create_model('Product',
|
67
|
-
name=str,
|
68
|
-
price=(float, field(gt=0)),
|
69
|
-
tags=(List[str], field(default_factory=list)))
|
70
|
-
|
71
|
-
# Model with custom base class
|
72
|
-
class BaseEntity(Model):
|
73
|
-
id: int
|
74
|
-
created_at: str
|
75
|
-
|
76
|
-
User = create_model('User',
|
77
|
-
name=str,
|
78
|
-
email=str,
|
79
|
-
__base__=BaseEntity)
|
80
|
-
"""
|
81
|
-
# Handle base class specification
|
82
|
-
if __base__ is not None and __config__ is not None:
|
83
|
-
raise ValueError(
|
84
|
-
"Cannot specify both '__base__' and '__config__' - "
|
85
|
-
"use a base class with the desired configuration instead"
|
86
|
-
)
|
87
|
-
|
88
|
-
# Determine base classes
|
89
|
-
if __base__ is None:
|
90
|
-
bases = (Model,)
|
91
|
-
elif isinstance(__base__, tuple):
|
92
|
-
# Ensure all bases are compatible
|
93
|
-
for base in __base__:
|
94
|
-
if not (issubclass(base, Model) or issubclass(base, Struct)):
|
95
|
-
raise ValueError(
|
96
|
-
f"Base class {base} must be a subclass of Model or msgspec.Struct"
|
97
|
-
)
|
98
|
-
bases = __base__
|
99
|
-
else:
|
100
|
-
if not (issubclass(__base__, Model) or issubclass(__base__, Struct)):
|
101
|
-
raise ValueError(
|
102
|
-
f"Base class {__base__} must be a subclass of Model or msgspec.Struct"
|
103
|
-
)
|
104
|
-
bases = (__base__,)
|
105
|
-
|
106
|
-
# Build class dictionary
|
107
|
-
class_dict = {}
|
108
|
-
annotations = {}
|
109
|
-
|
110
|
-
# Set metadata
|
111
|
-
if __doc__ is not None:
|
112
|
-
class_dict["__doc__"] = __doc__
|
113
|
-
if __module__ is not None:
|
114
|
-
class_dict["__module__"] = __module__
|
115
|
-
if __qualname__ is not None:
|
116
|
-
class_dict["__qualname__"] = __qualname__
|
117
|
-
|
118
|
-
# Process field definitions in two passes to ensure proper ordering
|
119
|
-
# First pass: collect required and optional fields separately
|
120
|
-
required_fields = {}
|
121
|
-
optional_fields = {}
|
122
|
-
|
123
|
-
for field_name, field_definition in field_definitions.items():
|
124
|
-
if field_name.startswith("__") and field_name.endswith("__"):
|
125
|
-
# Skip special attributes that were passed as field definitions
|
126
|
-
continue
|
127
|
-
|
128
|
-
# Parse field definition
|
129
|
-
is_optional = False
|
130
|
-
|
131
|
-
if isinstance(field_definition, tuple):
|
132
|
-
if len(field_definition) == 2:
|
133
|
-
field_type, field_value = field_definition
|
134
|
-
annotations[field_name] = field_type
|
135
|
-
|
136
|
-
# Check if field_value is a Field instance or field
|
137
|
-
if hasattr(field_value, "__class__") and (
|
138
|
-
"field" in field_value.__class__.__name__.lower()
|
139
|
-
or hasattr(field_value, "default")
|
140
|
-
or callable(getattr(field_value, "__call__", None))
|
141
|
-
):
|
142
|
-
# It's a field descriptor
|
143
|
-
optional_fields[field_name] = field_value
|
144
|
-
else:
|
145
|
-
# It's a default value - create a field with this default
|
146
|
-
optional_fields[field_name] = field(default=field_value)
|
147
|
-
is_optional = True
|
148
|
-
else:
|
149
|
-
raise ValueError(
|
150
|
-
f"Field definition for '{field_name}' must be a 2-tuple of (type, default/Field)"
|
151
|
-
)
|
152
|
-
elif hasattr(field_definition, "__origin__") or hasattr(
|
153
|
-
field_definition, "__class__"
|
154
|
-
):
|
155
|
-
# It's a type annotation (like str, int, List[str], etc.) - required field
|
156
|
-
annotations[field_name] = field_definition
|
157
|
-
required_fields[field_name] = None
|
158
|
-
else:
|
159
|
-
# It's likely a default value without type annotation
|
160
|
-
# We'll infer the type from the value
|
161
|
-
annotations[field_name] = type(field_definition)
|
162
|
-
optional_fields[field_name] = field(default=field_definition)
|
163
|
-
is_optional = True
|
164
|
-
|
165
|
-
# Second pass: add fields in correct order (required first, then optional)
|
166
|
-
# This ensures msgspec field ordering requirements are met
|
167
|
-
for field_name, field_value in required_fields.items():
|
168
|
-
if field_value is not None:
|
169
|
-
class_dict[field_name] = field_value
|
170
|
-
|
171
|
-
for field_name, field_value in optional_fields.items():
|
172
|
-
class_dict[field_name] = field_value
|
173
|
-
|
174
|
-
# Set annotations in proper order (required fields first, then optional)
|
175
|
-
ordered_annotations = {}
|
176
|
-
|
177
|
-
# Add required field annotations first
|
178
|
-
for field_name in required_fields:
|
179
|
-
if field_name in annotations:
|
180
|
-
ordered_annotations[field_name] = annotations[field_name]
|
181
|
-
|
182
|
-
# Add optional field annotations second
|
183
|
-
for field_name in optional_fields:
|
184
|
-
if field_name in annotations:
|
185
|
-
ordered_annotations[field_name] = annotations[field_name]
|
186
|
-
|
187
|
-
class_dict["__annotations__"] = ordered_annotations
|
188
|
-
|
189
|
-
# Handle validators (basic implementation for compatibility)
|
190
|
-
if __validators__:
|
191
|
-
# Store validators for potential future use
|
192
|
-
class_dict["_validators"] = __validators__
|
193
|
-
# Note: Full validator implementation would require more complex integration
|
194
|
-
|
195
|
-
# Create the dynamic class
|
196
|
-
try:
|
197
|
-
DynamicModel = type(__model_name, bases, class_dict)
|
198
|
-
except Exception as e:
|
199
|
-
raise ValueError(f"Failed to create model '{__model_name}': {e}") from e
|
200
|
-
|
201
|
-
return DynamicModel
|
202
|
-
|
203
|
-
|
204
|
-
@lru_cache(maxsize=None)
|
205
|
-
def get_field_info(field: Any) -> Optional[FieldInfo]:
|
206
|
-
"""Extract FieldInfo from a field descriptor with caching."""
|
207
|
-
if isinstance(field, tuple) and len(field) == 2:
|
208
|
-
_, field_info = field
|
209
|
-
if isinstance(field_info, FieldInfo):
|
210
|
-
return field_info
|
211
|
-
elif hasattr(field, "_field_info"):
|
212
|
-
return field._field_info
|
213
|
-
elif hasattr(field, "field_info"):
|
214
|
-
return field.field_info
|
215
|
-
elif isinstance(field, Field):
|
216
|
-
return field.field_info
|
217
|
-
elif hasattr(field, "__class__") and field.__class__.__name__ == "FieldDescriptor":
|
218
|
-
return field.field_info
|
219
|
-
return None
|
220
|
-
|
221
|
-
|
222
|
-
def is_field(field: Any) -> bool:
|
223
|
-
"""Check if a field is a field."""
|
224
|
-
return get_field_info(field) is not None
|
225
|
-
|
226
|
-
|
227
|
-
def is_model(model: Any) -> bool:
|
228
|
-
"""Check if a model is a model."""
|
229
|
-
# Check if it's an instance of Model
|
230
|
-
if isinstance(model, Model):
|
231
|
-
return True
|
232
|
-
|
233
|
-
# Check if it's a Model class (not instance)
|
234
|
-
if isinstance(model, type) and issubclass(model, Model):
|
235
|
-
return True
|
236
|
-
|
237
|
-
# Check for Model characteristics using duck typing
|
238
|
-
# Look for key Model/msgspec.Struct attributes and methods
|
239
|
-
if hasattr(model, "__struct_fields__") and hasattr(model, "model_dump"):
|
240
|
-
# Check for Model-specific methods
|
241
|
-
if (
|
242
|
-
hasattr(model, "model_copy")
|
243
|
-
and hasattr(model, "model_validate")
|
244
|
-
and hasattr(model, "model_to_pydantic")
|
245
|
-
):
|
246
|
-
return True
|
247
|
-
|
248
|
-
# Check if it's an instance of any msgspec Struct with Model methods
|
249
|
-
try:
|
250
|
-
if isinstance(model, Struct) and hasattr(model, "model_dump"):
|
251
|
-
return True
|
252
|
-
except ImportError:
|
253
|
-
pass
|
254
|
-
|
255
|
-
return False
|
256
|
-
|
257
|
-
|
258
|
-
def validator(
|
259
|
-
*fields: str, pre: bool = False, post: bool = False, always: bool = False
|
260
|
-
):
|
261
|
-
"""Decorator to create a validator for specific fields.
|
262
|
-
|
263
|
-
Args:
|
264
|
-
*fields: Field names to validate
|
265
|
-
pre: Whether this is a pre-validator
|
266
|
-
post: Whether this is a post-validator
|
267
|
-
always: Whether to run even if the value is not set
|
268
|
-
|
269
|
-
Returns:
|
270
|
-
Decorator function
|
271
|
-
"""
|
272
|
-
|
273
|
-
def decorator(func: Callable) -> Callable:
|
274
|
-
func._validator_fields = fields
|
275
|
-
func._validator_pre = pre
|
276
|
-
func._validator_post = post
|
277
|
-
func._validator_always = always
|
278
|
-
return func
|
279
|
-
|
280
|
-
return decorator
|
hammad/cache/__init__.py
DELETED
@@ -1,48 +0,0 @@
|
|
1
|
-
"""hammad.cache
|
2
|
-
|
3
|
-
Contains helpful resources for creating simple cache systems, and
|
4
|
-
decorators that implement "automatic" hashing & caching of function calls.
|
5
|
-
"""
|
6
|
-
|
7
|
-
from typing import TYPE_CHECKING
|
8
|
-
from .._core._utils._import_utils import _auto_create_getattr_loader
|
9
|
-
|
10
|
-
if TYPE_CHECKING:
|
11
|
-
from .base_cache import BaseCache, CacheParams, CacheReturn, CacheType
|
12
|
-
from .file_cache import FileCache
|
13
|
-
from .ttl_cache import TTLCache
|
14
|
-
from .cache import Cache, create_cache
|
15
|
-
from .decorators import (
|
16
|
-
cached,
|
17
|
-
auto_cached,
|
18
|
-
get_decorator_cache,
|
19
|
-
clear_decorator_cache,
|
20
|
-
)
|
21
|
-
|
22
|
-
|
23
|
-
__all__ = (
|
24
|
-
# hammad.cache.base_cache
|
25
|
-
"BaseCache",
|
26
|
-
"CacheParams",
|
27
|
-
"CacheReturn",
|
28
|
-
"CacheType",
|
29
|
-
# hammad.cache.file_cache
|
30
|
-
"FileCache",
|
31
|
-
# hammad.cache.ttl_cache
|
32
|
-
"TTLCache",
|
33
|
-
# hammad.cache.cache
|
34
|
-
"Cache",
|
35
|
-
"create_cache",
|
36
|
-
# hammad.cache.decorators
|
37
|
-
"cached",
|
38
|
-
"auto_cached",
|
39
|
-
"get_decorator_cache",
|
40
|
-
"clear_decorator_cache",
|
41
|
-
)
|
42
|
-
|
43
|
-
|
44
|
-
__getattr__ = _auto_create_getattr_loader(__all__)
|
45
|
-
|
46
|
-
|
47
|
-
def __dir__() -> list[str]:
|
48
|
-
return list(__all__)
|
hammad/cache/base_cache.py
DELETED
@@ -1,181 +0,0 @@
|
|
1
|
-
"""hammad.cache.base_cache"""
|
2
|
-
|
3
|
-
from dataclasses import dataclass
|
4
|
-
import hashlib
|
5
|
-
import inspect
|
6
|
-
from typing import Any, Literal, ParamSpec, TypeAlias, TypeVar, get_args
|
7
|
-
|
8
|
-
__all__ = (
|
9
|
-
"BaseCache",
|
10
|
-
"CacheType",
|
11
|
-
"CacheParams",
|
12
|
-
"CacheReturn",
|
13
|
-
)
|
14
|
-
|
15
|
-
|
16
|
-
CacheType: TypeAlias = Literal["ttl", "file"]
|
17
|
-
"""Type of caches that can be created using `hammad`.
|
18
|
-
|
19
|
-
- `"ttl"`: Time-to-live cache.
|
20
|
-
- `"file"`: File-based cache.
|
21
|
-
"""
|
22
|
-
|
23
|
-
CacheParams = ParamSpec("CacheParams")
|
24
|
-
"""Parameter specification for cache functions."""
|
25
|
-
|
26
|
-
CacheReturn = TypeVar("CacheReturn")
|
27
|
-
"""Return type for cache functions."""
|
28
|
-
|
29
|
-
|
30
|
-
@dataclass
|
31
|
-
class BaseCache:
|
32
|
-
"""Base class for all caches created using `hammad`."""
|
33
|
-
|
34
|
-
type: CacheType
|
35
|
-
"""Type of cache."""
|
36
|
-
|
37
|
-
def __post_init__(self) -> None:
|
38
|
-
"""Post-initialization hook."""
|
39
|
-
if self.type not in get_args(CacheType):
|
40
|
-
raise ValueError(f"Invalid cache type: {self.type}")
|
41
|
-
|
42
|
-
def __contains__(self, key: str) -> bool:
|
43
|
-
"""Check if key exists in cache."""
|
44
|
-
raise NotImplementedError("Subclasses must implement __contains__")
|
45
|
-
|
46
|
-
def __getitem__(self, key: str) -> Any:
|
47
|
-
"""Get value for key."""
|
48
|
-
raise NotImplementedError("Subclasses must implement __getitem__")
|
49
|
-
|
50
|
-
def __setitem__(self, key: str, value: Any) -> None:
|
51
|
-
"""Set value for key."""
|
52
|
-
raise NotImplementedError("Subclasses must implement __setitem__")
|
53
|
-
|
54
|
-
def get(self, key: str, default: Any = None) -> Any:
|
55
|
-
"""Get value with default if key doesn't exist."""
|
56
|
-
try:
|
57
|
-
return self[key]
|
58
|
-
except KeyError:
|
59
|
-
return default
|
60
|
-
|
61
|
-
def clear(self) -> None:
|
62
|
-
"""Clear all cached items."""
|
63
|
-
raise NotImplementedError("Subclasses must implement clear")
|
64
|
-
|
65
|
-
def make_hashable(self, obj: Any) -> str:
|
66
|
-
"""
|
67
|
-
Convert any object to a stable hash string.
|
68
|
-
|
69
|
-
Uses SHA-256 to generate consistent hash representations.
|
70
|
-
Handles nested structures recursively.
|
71
|
-
|
72
|
-
Args:
|
73
|
-
obj: Object to hash
|
74
|
-
|
75
|
-
Returns:
|
76
|
-
Hexadecimal hash string
|
77
|
-
"""
|
78
|
-
|
79
|
-
def _hash_obj(data: Any) -> str:
|
80
|
-
"""Internal recursive hashing function with memoization."""
|
81
|
-
# Handle None first
|
82
|
-
if data is None:
|
83
|
-
return "null"
|
84
|
-
|
85
|
-
if isinstance(data, bool):
|
86
|
-
return f"bool:{data}"
|
87
|
-
elif isinstance(data, int):
|
88
|
-
return f"int:{data}"
|
89
|
-
elif isinstance(data, float):
|
90
|
-
if data != data: # NaN
|
91
|
-
return "float:nan"
|
92
|
-
elif data == float("inf"):
|
93
|
-
return "float:inf"
|
94
|
-
elif data == float("-inf"):
|
95
|
-
return "float:-inf"
|
96
|
-
else:
|
97
|
-
return f"float:{data}"
|
98
|
-
elif isinstance(data, str):
|
99
|
-
return f"str:{data}"
|
100
|
-
elif isinstance(data, bytes):
|
101
|
-
return f"bytes:{data.hex()}"
|
102
|
-
|
103
|
-
# Handle collections
|
104
|
-
elif isinstance(data, (list, tuple)):
|
105
|
-
collection_type = "list" if isinstance(data, list) else "tuple"
|
106
|
-
items = [_hash_obj(item) for item in data]
|
107
|
-
return f"{collection_type}:[{','.join(items)}]"
|
108
|
-
|
109
|
-
elif isinstance(data, set):
|
110
|
-
try:
|
111
|
-
sorted_items = sorted(data, key=lambda x: str(x))
|
112
|
-
except TypeError:
|
113
|
-
sorted_items = sorted(
|
114
|
-
data, key=lambda x: (type(x).__name__, str(x))
|
115
|
-
)
|
116
|
-
items = [_hash_obj(item) for item in sorted_items]
|
117
|
-
return f"set:{{{','.join(items)}}}"
|
118
|
-
|
119
|
-
elif isinstance(data, dict):
|
120
|
-
try:
|
121
|
-
sorted_items = sorted(data.items(), key=lambda x: str(x[0]))
|
122
|
-
except TypeError:
|
123
|
-
# Fallback for non-comparable keys
|
124
|
-
sorted_items = sorted(
|
125
|
-
data.items(), key=lambda x: (type(x[0]).__name__, str(x[0]))
|
126
|
-
)
|
127
|
-
pairs = [f"{_hash_obj(k)}:{_hash_obj(v)}" for k, v in sorted_items]
|
128
|
-
return f"dict:{{{','.join(pairs)}}}"
|
129
|
-
|
130
|
-
elif isinstance(data, type):
|
131
|
-
module = getattr(data, "__module__", "builtins")
|
132
|
-
qualname = getattr(data, "__qualname__", data.__name__)
|
133
|
-
return f"type:{module}.{qualname}"
|
134
|
-
|
135
|
-
elif callable(data):
|
136
|
-
module = getattr(data, "__module__", "unknown")
|
137
|
-
qualname = getattr(
|
138
|
-
data, "__qualname__", getattr(data, "__name__", "unknown_callable")
|
139
|
-
)
|
140
|
-
|
141
|
-
try:
|
142
|
-
source = inspect.getsource(data)
|
143
|
-
normalized_source = " ".join(source.split())
|
144
|
-
return f"callable:{module}.{qualname}:{hash(normalized_source)}"
|
145
|
-
except (OSError, TypeError, IndentationError):
|
146
|
-
return f"callable:{module}.{qualname}"
|
147
|
-
|
148
|
-
elif hasattr(data, "__dict__"):
|
149
|
-
class_info = (
|
150
|
-
f"{data.__class__.__module__}.{data.__class__.__qualname__}"
|
151
|
-
)
|
152
|
-
obj_dict = {"__class__": class_info, **data.__dict__}
|
153
|
-
return f"object:{_hash_obj(obj_dict)}"
|
154
|
-
|
155
|
-
elif hasattr(data, "__slots__"):
|
156
|
-
class_info = (
|
157
|
-
f"{data.__class__.__module__}.{data.__class__.__qualname__}"
|
158
|
-
)
|
159
|
-
slot_dict = {
|
160
|
-
slot: getattr(data, slot, None)
|
161
|
-
for slot in data.__slots__
|
162
|
-
if hasattr(data, slot)
|
163
|
-
}
|
164
|
-
obj_dict = {"__class__": class_info, **slot_dict}
|
165
|
-
return f"slotted_object:{_hash_obj(obj_dict)}"
|
166
|
-
|
167
|
-
else:
|
168
|
-
try:
|
169
|
-
repr_str = repr(data)
|
170
|
-
return f"repr:{type(data).__name__}:{repr_str}"
|
171
|
-
except Exception:
|
172
|
-
# Ultimate fallback
|
173
|
-
return f"unknown:{type(data).__name__}:{id(data)}"
|
174
|
-
|
175
|
-
# Generate the hash representation
|
176
|
-
hash_representation = _hash_obj(obj)
|
177
|
-
|
178
|
-
# Create final SHA-256 hash
|
179
|
-
return hashlib.sha256(
|
180
|
-
hash_representation.encode("utf-8", errors="surrogatepass")
|
181
|
-
).hexdigest()
|
hammad/cache/cache.py
DELETED
@@ -1,169 +0,0 @@
|
|
1
|
-
"""hammad.cache.cache"""
|
2
|
-
|
3
|
-
from typing import (
|
4
|
-
overload,
|
5
|
-
TYPE_CHECKING,
|
6
|
-
Literal,
|
7
|
-
Optional,
|
8
|
-
Any,
|
9
|
-
Union,
|
10
|
-
get_args,
|
11
|
-
)
|
12
|
-
from pathlib import Path
|
13
|
-
|
14
|
-
from .base_cache import BaseCache, CacheType
|
15
|
-
from .file_cache import FileCache, FileCacheLocation
|
16
|
-
from .ttl_cache import TTLCache
|
17
|
-
|
18
|
-
|
19
|
-
__all__ = ("Cache", "create_cache")
|
20
|
-
|
21
|
-
|
22
|
-
class Cache:
|
23
|
-
"""
|
24
|
-
Helper factory class for creating cache instances.
|
25
|
-
|
26
|
-
Example usage:
|
27
|
-
ttl_cache = Cache(type="ttl", maxsize=100, ttl=60)
|
28
|
-
file_cache = Cache(type="file", location="cache.pkl")
|
29
|
-
"""
|
30
|
-
|
31
|
-
@overload
|
32
|
-
def __new__(
|
33
|
-
cls,
|
34
|
-
type: Literal["ttl"] = "ttl",
|
35
|
-
*,
|
36
|
-
maxsize: Optional[int] = None,
|
37
|
-
ttl: Optional[int] = None,
|
38
|
-
) -> "TTLCache":
|
39
|
-
"""
|
40
|
-
Create a new TTL (Time To Live) cache instance.
|
41
|
-
|
42
|
-
Args:
|
43
|
-
type: The type of cache to create.
|
44
|
-
maxsize: The maximum number of items to store in the cache.
|
45
|
-
ttl: The time to live for items in the cache.
|
46
|
-
|
47
|
-
Returns:
|
48
|
-
A new TTL cache instance.
|
49
|
-
"""
|
50
|
-
...
|
51
|
-
|
52
|
-
@overload
|
53
|
-
def __new__(
|
54
|
-
cls, type: Literal["file"], *, location: Optional["FileCacheLocation"] = None
|
55
|
-
) -> "FileCache":
|
56
|
-
"""
|
57
|
-
Create a new file cache instance.
|
58
|
-
|
59
|
-
Args:
|
60
|
-
type: The type of cache to create.
|
61
|
-
location: The directory to store the cache files.
|
62
|
-
|
63
|
-
Returns:
|
64
|
-
A new disk cache instance.
|
65
|
-
"""
|
66
|
-
...
|
67
|
-
|
68
|
-
def __new__(cls, type: "CacheType" = "ttl", **kwargs: Any) -> "BaseCache":
|
69
|
-
"""
|
70
|
-
Create a new cache instance.
|
71
|
-
"""
|
72
|
-
if type == "ttl":
|
73
|
-
from .ttl_cache import TTLCache
|
74
|
-
|
75
|
-
valid_ttl_params = {"maxsize", "ttl"}
|
76
|
-
ttl_constructor_kwargs = {
|
77
|
-
k: v
|
78
|
-
for k, v in kwargs.items()
|
79
|
-
if k in valid_ttl_params and v is not None
|
80
|
-
}
|
81
|
-
return TTLCache(type=type, **ttl_constructor_kwargs)
|
82
|
-
elif type == "file":
|
83
|
-
from .file_cache import FileCache
|
84
|
-
|
85
|
-
valid_file_params = {"location"}
|
86
|
-
file_constructor_kwargs = {
|
87
|
-
k: v
|
88
|
-
for k, v in kwargs.items()
|
89
|
-
if k in valid_file_params and v is not None
|
90
|
-
}
|
91
|
-
return FileCache(type=type, **file_constructor_kwargs)
|
92
|
-
else:
|
93
|
-
supported_types_tuple = get_args(CacheType)
|
94
|
-
raise ValueError(
|
95
|
-
f"Unsupported cache type: {type}. Supported types are: {supported_types_tuple}"
|
96
|
-
)
|
97
|
-
|
98
|
-
|
99
|
-
# Factory
|
100
|
-
|
101
|
-
|
102
|
-
@overload
|
103
|
-
def create_cache(
|
104
|
-
type: Literal["ttl"], *, maxsize: int = 128, ttl: Optional[float] = None
|
105
|
-
) -> "TTLCache": ...
|
106
|
-
|
107
|
-
|
108
|
-
@overload
|
109
|
-
def create_cache(
|
110
|
-
type: Literal["file"],
|
111
|
-
*,
|
112
|
-
location: Optional["FileCacheLocation"] = None,
|
113
|
-
maxsize: int = 128,
|
114
|
-
) -> "FileCache": ...
|
115
|
-
|
116
|
-
|
117
|
-
@overload
|
118
|
-
def create_cache(type: "CacheType", **kwargs: Any) -> "BaseCache": ...
|
119
|
-
|
120
|
-
|
121
|
-
def create_cache(type: "CacheType", **kwargs: Any) -> "BaseCache":
|
122
|
-
"""
|
123
|
-
Factory function to create cache instances of different types.
|
124
|
-
|
125
|
-
Args:
|
126
|
-
type: The type of cache to create. Can be "ttl" or "file".
|
127
|
-
**kwargs: Additional keyword arguments specific to the cache type.
|
128
|
-
|
129
|
-
Returns:
|
130
|
-
A cache instance of the specified type.
|
131
|
-
|
132
|
-
Raises:
|
133
|
-
ValueError: If an unsupported cache type is provided.
|
134
|
-
|
135
|
-
Examples:
|
136
|
-
```python
|
137
|
-
# Create a TTL cache with custom settings
|
138
|
-
ttl_cache = create_cache("ttl", maxsize=256, ttl=300)
|
139
|
-
|
140
|
-
# Create a file cache with custom location
|
141
|
-
file_cache = create_cache("file", location="/tmp/my_cache", maxsize=1000)
|
142
|
-
```
|
143
|
-
"""
|
144
|
-
if type == "ttl":
|
145
|
-
from .ttl_cache import TTLCache
|
146
|
-
|
147
|
-
maxsize = kwargs.pop("maxsize", 128)
|
148
|
-
ttl = kwargs.pop("ttl", None)
|
149
|
-
if kwargs:
|
150
|
-
raise TypeError(
|
151
|
-
f"Unexpected keyword arguments for TTL cache: {list(kwargs.keys())}"
|
152
|
-
)
|
153
|
-
return TTLCache(maxsize=maxsize, ttl=ttl)
|
154
|
-
elif type == "file":
|
155
|
-
from .file_cache import FileCache
|
156
|
-
|
157
|
-
location = kwargs.pop("location", None)
|
158
|
-
# FileCache doesn't support maxsize, so we just ignore it
|
159
|
-
kwargs.pop("maxsize", None)
|
160
|
-
if kwargs:
|
161
|
-
raise TypeError(
|
162
|
-
f"Unexpected keyword arguments for file cache: {list(kwargs.keys())}"
|
163
|
-
)
|
164
|
-
return FileCache(location=location, type=type)
|
165
|
-
else:
|
166
|
-
valid_types = get_args("CacheType")
|
167
|
-
raise ValueError(
|
168
|
-
f"Unsupported cache type: {type}. Valid types are: {valid_types}"
|
169
|
-
)
|