hammad-python 0.0.13__py3-none-any.whl → 0.0.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hammad_python-0.0.15.dist-info/METADATA +184 -0
- hammad_python-0.0.15.dist-info/RECORD +4 -0
- hammad/__init__.py +0 -180
- hammad/_core/__init__.py +0 -1
- hammad/_core/_utils/__init__.py +0 -4
- hammad/_core/_utils/_import_utils.py +0 -182
- hammad/ai/__init__.py +0 -59
- hammad/ai/_utils.py +0 -142
- hammad/ai/completions/__init__.py +0 -44
- hammad/ai/completions/client.py +0 -729
- hammad/ai/completions/create.py +0 -686
- hammad/ai/completions/types.py +0 -711
- hammad/ai/completions/utils.py +0 -374
- hammad/ai/embeddings/__init__.py +0 -35
- hammad/ai/embeddings/client/__init__.py +0 -1
- hammad/ai/embeddings/client/base_embeddings_client.py +0 -26
- hammad/ai/embeddings/client/fastembed_text_embeddings_client.py +0 -200
- hammad/ai/embeddings/client/litellm_embeddings_client.py +0 -288
- hammad/ai/embeddings/create.py +0 -159
- hammad/ai/embeddings/types.py +0 -69
- hammad/base/__init__.py +0 -35
- hammad/base/fields.py +0 -546
- hammad/base/model.py +0 -1078
- hammad/base/utils.py +0 -280
- hammad/cache/__init__.py +0 -48
- hammad/cache/base_cache.py +0 -181
- hammad/cache/cache.py +0 -169
- hammad/cache/decorators.py +0 -261
- hammad/cache/file_cache.py +0 -80
- hammad/cache/ttl_cache.py +0 -74
- hammad/cli/__init__.py +0 -33
- hammad/cli/animations.py +0 -604
- hammad/cli/plugins.py +0 -781
- hammad/cli/styles/__init__.py +0 -55
- hammad/cli/styles/settings.py +0 -139
- hammad/cli/styles/types.py +0 -358
- hammad/cli/styles/utils.py +0 -480
- hammad/configuration/__init__.py +0 -35
- hammad/configuration/configuration.py +0 -564
- hammad/data/__init__.py +0 -39
- hammad/data/collections/__init__.py +0 -34
- hammad/data/collections/base_collection.py +0 -58
- hammad/data/collections/collection.py +0 -452
- hammad/data/collections/searchable_collection.py +0 -556
- hammad/data/collections/vector_collection.py +0 -603
- hammad/data/databases/__init__.py +0 -21
- hammad/data/databases/database.py +0 -902
- hammad/json/__init__.py +0 -21
- hammad/json/converters.py +0 -152
- hammad/logging/__init__.py +0 -35
- hammad/logging/decorators.py +0 -834
- hammad/logging/logger.py +0 -954
- hammad/multimodal/__init__.py +0 -24
- hammad/multimodal/audio.py +0 -96
- hammad/multimodal/image.py +0 -80
- hammad/multithreading/__init__.py +0 -304
- hammad/py.typed +0 -0
- hammad/pydantic/__init__.py +0 -43
- hammad/pydantic/converters.py +0 -623
- hammad/pydantic/models/__init__.py +0 -28
- hammad/pydantic/models/arbitrary_model.py +0 -46
- hammad/pydantic/models/cacheable_model.py +0 -79
- hammad/pydantic/models/fast_model.py +0 -318
- hammad/pydantic/models/function_model.py +0 -176
- hammad/pydantic/models/subscriptable_model.py +0 -63
- hammad/text/__init__.py +0 -82
- hammad/text/converters.py +0 -723
- hammad/text/markdown.py +0 -131
- hammad/text/text.py +0 -1066
- hammad/types/__init__.py +0 -11
- hammad/types/file.py +0 -358
- hammad/typing/__init__.py +0 -407
- hammad/web/__init__.py +0 -43
- hammad/web/http/__init__.py +0 -1
- hammad/web/http/client.py +0 -944
- hammad/web/models.py +0 -245
- hammad/web/openapi/__init__.py +0 -0
- hammad/web/openapi/client.py +0 -740
- hammad/web/search/__init__.py +0 -1
- hammad/web/search/client.py +0 -988
- hammad/web/utils.py +0 -472
- hammad/yaml/__init__.py +0 -30
- hammad/yaml/converters.py +0 -19
- hammad_python-0.0.13.dist-info/METADATA +0 -38
- hammad_python-0.0.13.dist-info/RECORD +0 -85
- {hammad_python-0.0.13.dist-info → hammad_python-0.0.15.dist-info}/WHEEL +0 -0
- {hammad_python-0.0.13.dist-info → hammad_python-0.0.15.dist-info}/licenses/LICENSE +0 -0
hammad/ai/_utils.py
DELETED
@@ -1,142 +0,0 @@
|
|
1
|
-
"""hammad.ai._utils
|
2
|
-
|
3
|
-
Shared internal utilities for the `hammad.ai` extension."""
|
4
|
-
|
5
|
-
from typing import Any, Optional, Sequence
|
6
|
-
|
7
|
-
__all__ = (
|
8
|
-
"get_instructor",
|
9
|
-
"get_litellm",
|
10
|
-
"get_fastembed",
|
11
|
-
"get_fastembed_text_embedding_model",
|
12
|
-
)
|
13
|
-
|
14
|
-
|
15
|
-
# ------------------------------------------------------------
|
16
|
-
# INSTRUCTOR
|
17
|
-
# ------------------------------------------------------------
|
18
|
-
|
19
|
-
|
20
|
-
INSTRUCTOR_MODULE = None
|
21
|
-
"""Library level singleton for the `instructor` module."""
|
22
|
-
|
23
|
-
|
24
|
-
def get_instructor():
|
25
|
-
"""Get the instructor module."""
|
26
|
-
global INSTRUCTOR_MODULE
|
27
|
-
|
28
|
-
if INSTRUCTOR_MODULE is None:
|
29
|
-
try:
|
30
|
-
import instructor
|
31
|
-
|
32
|
-
INSTRUCTOR_MODULE = instructor
|
33
|
-
except ImportError:
|
34
|
-
raise ImportError(
|
35
|
-
"instructor is not installed. Please install it with `pip install hammad-python[ai]`"
|
36
|
-
)
|
37
|
-
|
38
|
-
return INSTRUCTOR_MODULE
|
39
|
-
|
40
|
-
|
41
|
-
# ------------------------------------------------------------
|
42
|
-
# LITELLM
|
43
|
-
# ------------------------------------------------------------
|
44
|
-
|
45
|
-
|
46
|
-
LITELLM_MODULE = None
|
47
|
-
"""Library level singleton for the `litellm` module."""
|
48
|
-
|
49
|
-
|
50
|
-
def get_litellm():
|
51
|
-
"""Get the litellm module."""
|
52
|
-
global LITELLM_MODULE
|
53
|
-
if LITELLM_MODULE is None:
|
54
|
-
try:
|
55
|
-
import litellm
|
56
|
-
|
57
|
-
litellm.drop_params = True
|
58
|
-
litellm.modify_params = True
|
59
|
-
LITELLM_MODULE = litellm
|
60
|
-
except ImportError:
|
61
|
-
raise ImportError(
|
62
|
-
"litellm is not installed. Please install it with `pip install hammad-python[ai]`"
|
63
|
-
)
|
64
|
-
|
65
|
-
return LITELLM_MODULE
|
66
|
-
|
67
|
-
|
68
|
-
# ------------------------------------------------------------
|
69
|
-
# FASTEMBED
|
70
|
-
# ------------------------------------------------------------
|
71
|
-
|
72
|
-
|
73
|
-
FASTEMBED_MODULE = None
|
74
|
-
"""Library level singleton for the `fastembed` module."""
|
75
|
-
|
76
|
-
|
77
|
-
def get_fastembed():
|
78
|
-
"""Get the fastembed module."""
|
79
|
-
global FASTEMBED_MODULE
|
80
|
-
if FASTEMBED_MODULE is None:
|
81
|
-
try:
|
82
|
-
import fastembed
|
83
|
-
|
84
|
-
FASTEMBED_MODULE = fastembed
|
85
|
-
except ImportError:
|
86
|
-
raise ImportError(
|
87
|
-
"fastembed is not installed. Please install it with `pip install hammad-python[ai]`"
|
88
|
-
)
|
89
|
-
|
90
|
-
return FASTEMBED_MODULE
|
91
|
-
|
92
|
-
|
93
|
-
FASTEMBED_LOADED_TEXT_EMBEDDING_MODELS: dict = {}
|
94
|
-
|
95
|
-
|
96
|
-
def get_fastembed_text_embedding_model(
|
97
|
-
model: str,
|
98
|
-
cache_dir: Optional[str] = None,
|
99
|
-
threads: Optional[int] = None,
|
100
|
-
providers: Optional[Sequence[Any]] = None,
|
101
|
-
cuda: bool = False,
|
102
|
-
device_ids: Optional[list[int]] = None,
|
103
|
-
lazy_load: bool = False,
|
104
|
-
):
|
105
|
-
"""Initializes a fastembed model instance for a given
|
106
|
-
model name using a global library level singleton.
|
107
|
-
|
108
|
-
NOTE: Custom models are not supported yet.
|
109
|
-
|
110
|
-
Args:
|
111
|
-
model (str) : The model name to load.
|
112
|
-
cache_dir (Optional[str]) : The directory to cache the model in.
|
113
|
-
threads (Optional[int]) : The number of threads to use for the model.
|
114
|
-
providers (Optional[Sequence[Any]]) : The ONNX providers to use for the model.
|
115
|
-
cuda (bool) : Whether to use CUDA for the model.
|
116
|
-
device_ids (Optional[list[int]]) : The device IDs to use for the model.
|
117
|
-
lazy_load (bool) : Whether to lazy load the model.
|
118
|
-
|
119
|
-
Returns:
|
120
|
-
fastembed.TextEmbedding : The loaded fastembed model instance.
|
121
|
-
"""
|
122
|
-
global FASTEMBED_LOADED_TEXT_EMBEDDING_MODELS
|
123
|
-
|
124
|
-
if model not in FASTEMBED_LOADED_TEXT_EMBEDDING_MODELS:
|
125
|
-
fastembed_module = get_fastembed()
|
126
|
-
|
127
|
-
try:
|
128
|
-
embedding_model = fastembed_module.TextEmbedding(
|
129
|
-
model_name=model,
|
130
|
-
cache_dir=cache_dir,
|
131
|
-
threads=threads,
|
132
|
-
providers=providers,
|
133
|
-
cuda=cuda,
|
134
|
-
device_ids=device_ids,
|
135
|
-
lazy_load=lazy_load,
|
136
|
-
)
|
137
|
-
except Exception as e:
|
138
|
-
raise e
|
139
|
-
|
140
|
-
FASTEMBED_LOADED_TEXT_EMBEDDING_MODELS[model] = embedding_model
|
141
|
-
|
142
|
-
return FASTEMBED_LOADED_TEXT_EMBEDDING_MODELS[model]
|
@@ -1,44 +0,0 @@
|
|
1
|
-
"""hammad.ai.completions
|
2
|
-
|
3
|
-
Contains types and model like objects for working with language model
|
4
|
-
completions."""
|
5
|
-
|
6
|
-
from typing import TYPE_CHECKING
|
7
|
-
from ..._core._utils._import_utils import _auto_create_getattr_loader
|
8
|
-
|
9
|
-
if TYPE_CHECKING:
|
10
|
-
from .client import CompletionsClient
|
11
|
-
from .types import (
|
12
|
-
Completion,
|
13
|
-
CompletionChunk,
|
14
|
-
CompletionStream,
|
15
|
-
AsyncCompletionStream,
|
16
|
-
CompletionsInputParam,
|
17
|
-
CompletionsModelName,
|
18
|
-
CompletionsOutputType,
|
19
|
-
)
|
20
|
-
from .create import create_completion, async_create_completion
|
21
|
-
|
22
|
-
|
23
|
-
__all__ = (
|
24
|
-
# hammad.ai.completions.client
|
25
|
-
"CompletionsClient",
|
26
|
-
# hammad.ai.completions.types
|
27
|
-
"Completion",
|
28
|
-
"CompletionChunk",
|
29
|
-
"CompletionStream",
|
30
|
-
"AsyncCompletionStream",
|
31
|
-
"CompletionsInputParam",
|
32
|
-
"CompletionsModelName",
|
33
|
-
"CompletionsOutputType",
|
34
|
-
# hammad.ai.completions.create
|
35
|
-
"create_completion",
|
36
|
-
"async_create_completion",
|
37
|
-
)
|
38
|
-
|
39
|
-
|
40
|
-
__getattr__ = _auto_create_getattr_loader(__all__)
|
41
|
-
|
42
|
-
|
43
|
-
def __dir__() -> list[str]:
|
44
|
-
return list(__all__)
|