hammad-python 0.0.11__py3-none-any.whl → 0.0.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hammad/__init__.py +169 -56
- hammad/_core/__init__.py +1 -0
- hammad/_core/_utils/__init__.py +4 -0
- hammad/_core/_utils/_import_utils.py +182 -0
- hammad/ai/__init__.py +59 -0
- hammad/ai/_utils.py +142 -0
- hammad/ai/completions/__init__.py +44 -0
- hammad/ai/completions/client.py +729 -0
- hammad/ai/completions/create.py +686 -0
- hammad/ai/completions/types.py +711 -0
- hammad/ai/completions/utils.py +374 -0
- hammad/ai/embeddings/__init__.py +35 -0
- hammad/ai/embeddings/client/__init__.py +1 -0
- hammad/ai/embeddings/client/base_embeddings_client.py +26 -0
- hammad/ai/embeddings/client/fastembed_text_embeddings_client.py +200 -0
- hammad/ai/embeddings/client/litellm_embeddings_client.py +288 -0
- hammad/ai/embeddings/create.py +159 -0
- hammad/ai/embeddings/types.py +69 -0
- hammad/base/__init__.py +35 -0
- hammad/{based → base}/fields.py +23 -23
- hammad/{based → base}/model.py +124 -14
- hammad/base/utils.py +280 -0
- hammad/cache/__init__.py +30 -12
- hammad/cache/base_cache.py +181 -0
- hammad/cache/cache.py +169 -0
- hammad/cache/decorators.py +261 -0
- hammad/cache/file_cache.py +80 -0
- hammad/cache/ttl_cache.py +74 -0
- hammad/cli/__init__.py +10 -2
- hammad/cli/{styles/animations.py → animations.py} +79 -23
- hammad/cli/{plugins/__init__.py → plugins.py} +85 -90
- hammad/cli/styles/__init__.py +50 -0
- hammad/cli/styles/settings.py +4 -0
- hammad/configuration/__init__.py +35 -0
- hammad/{data/types/files → configuration}/configuration.py +96 -7
- hammad/data/__init__.py +14 -26
- hammad/data/collections/__init__.py +4 -2
- hammad/data/collections/collection.py +300 -75
- hammad/data/collections/vector_collection.py +118 -12
- hammad/data/databases/__init__.py +2 -2
- hammad/data/databases/database.py +383 -32
- hammad/json/__init__.py +2 -2
- hammad/logging/__init__.py +13 -5
- hammad/logging/decorators.py +404 -2
- hammad/logging/logger.py +442 -22
- hammad/multimodal/__init__.py +24 -0
- hammad/{data/types/files → multimodal}/audio.py +21 -6
- hammad/{data/types/files → multimodal}/image.py +5 -5
- hammad/multithreading/__init__.py +304 -0
- hammad/pydantic/__init__.py +2 -2
- hammad/pydantic/converters.py +1 -1
- hammad/pydantic/models/__init__.py +2 -2
- hammad/text/__init__.py +59 -14
- hammad/text/converters.py +723 -0
- hammad/text/{utils/markdown/formatting.py → markdown.py} +25 -23
- hammad/text/text.py +12 -14
- hammad/types/__init__.py +11 -0
- hammad/{data/types/files → types}/file.py +18 -18
- hammad/typing/__init__.py +138 -84
- hammad/web/__init__.py +3 -2
- hammad/web/models.py +245 -0
- hammad/web/search/client.py +75 -23
- hammad/web/utils.py +14 -5
- hammad/yaml/__init__.py +2 -2
- hammad/yaml/converters.py +1 -1
- {hammad_python-0.0.11.dist-info → hammad_python-0.0.13.dist-info}/METADATA +4 -1
- hammad_python-0.0.13.dist-info/RECORD +85 -0
- hammad/based/__init__.py +0 -52
- hammad/based/utils.py +0 -455
- hammad/cache/_cache.py +0 -746
- hammad/data/types/__init__.py +0 -33
- hammad/data/types/files/__init__.py +0 -1
- hammad/data/types/files/document.py +0 -195
- hammad/text/utils/__init__.py +0 -1
- hammad/text/utils/converters.py +0 -229
- hammad/text/utils/markdown/__init__.py +0 -1
- hammad/text/utils/markdown/converters.py +0 -506
- hammad_python-0.0.11.dist-info/RECORD +0 -65
- {hammad_python-0.0.11.dist-info → hammad_python-0.0.13.dist-info}/WHEEL +0 -0
- {hammad_python-0.0.11.dist-info → hammad_python-0.0.13.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,304 @@
|
|
1
|
+
"""hammad.multithreading"""
|
2
|
+
|
3
|
+
import concurrent.futures
|
4
|
+
import functools
|
5
|
+
import time
|
6
|
+
from typing import (
|
7
|
+
Callable,
|
8
|
+
Iterable,
|
9
|
+
List,
|
10
|
+
Any,
|
11
|
+
TypeVar,
|
12
|
+
Tuple,
|
13
|
+
Optional,
|
14
|
+
Union,
|
15
|
+
Type,
|
16
|
+
cast,
|
17
|
+
overload,
|
18
|
+
)
|
19
|
+
from tenacity import (
|
20
|
+
retry,
|
21
|
+
wait_exponential,
|
22
|
+
stop_after_attempt,
|
23
|
+
retry_if_exception_type,
|
24
|
+
retry_if_exception_message,
|
25
|
+
retry_if_exception_type,
|
26
|
+
)
|
27
|
+
|
28
|
+
T_Arg = TypeVar("T_Arg")
|
29
|
+
R_Out = TypeVar("R_Out")
|
30
|
+
|
31
|
+
SingleTaskArgs = Union[T_Arg, Tuple[Any, ...]]
|
32
|
+
|
33
|
+
|
34
|
+
__all__ = (
|
35
|
+
"run_sequentially",
|
36
|
+
"run_parallel",
|
37
|
+
"sequentialize",
|
38
|
+
"parallelize",
|
39
|
+
"typed_batch",
|
40
|
+
"run_with_retry",
|
41
|
+
"retry",
|
42
|
+
"wait_exponential",
|
43
|
+
"stop_after_attempt",
|
44
|
+
"retry_if_exception_type",
|
45
|
+
"retry_if_exception_message",
|
46
|
+
"retry_if_exception_type",
|
47
|
+
)
|
48
|
+
|
49
|
+
|
50
|
+
def run_sequentially(
|
51
|
+
func: Callable[..., R_Out], args_list: Iterable[SingleTaskArgs]
|
52
|
+
) -> List[R_Out]:
|
53
|
+
"""
|
54
|
+
Executes a function sequentially for each set of arguments in args_list.
|
55
|
+
If the function raises an exception during any call, the execution stops
|
56
|
+
and the exception is propagated.
|
57
|
+
|
58
|
+
Args:
|
59
|
+
func: The function to execute.
|
60
|
+
args_list: An iterable of arguments (or argument tuples) to pass to func.
|
61
|
+
- If func takes multiple arguments (e.g., func(a, b)),
|
62
|
+
each item in args_list should be a tuple (e.g., [(val1_a, val1_b), (val2_a, val2_b)]).
|
63
|
+
- If func takes one argument (e.g., func(a)),
|
64
|
+
each item can be the argument itself (e.g., [val1, val2]).
|
65
|
+
- If func takes no arguments (e.g., func()),
|
66
|
+
each item should be an empty tuple (e.g., [(), ()]).
|
67
|
+
|
68
|
+
Returns:
|
69
|
+
A list of results from the sequential execution.
|
70
|
+
"""
|
71
|
+
results: List[R_Out] = []
|
72
|
+
for args_item in args_list:
|
73
|
+
if isinstance(args_item, tuple):
|
74
|
+
results.append(func(*args_item))
|
75
|
+
else:
|
76
|
+
# This branch handles single arguments.
|
77
|
+
# If func expects no arguments, args_item should be `()` and caught by `isinstance(tuple)`.
|
78
|
+
# If func expects one argument, args_item is that argument.
|
79
|
+
results.append(func(args_item))
|
80
|
+
return results
|
81
|
+
|
82
|
+
|
83
|
+
def run_parallel(
|
84
|
+
func: Callable[..., R_Out],
|
85
|
+
args_list: Iterable[SingleTaskArgs],
|
86
|
+
max_workers: Optional[int] = None,
|
87
|
+
task_timeout: Optional[float] = None,
|
88
|
+
) -> List[Union[R_Out, Exception]]:
|
89
|
+
"""
|
90
|
+
Executes a function in parallel for each set of arguments in args_list
|
91
|
+
using a ThreadPoolExecutor. Results are returned in the same order as the input args_list.
|
92
|
+
|
93
|
+
Args:
|
94
|
+
func: The function to execute.
|
95
|
+
args_list: An iterable of arguments (or argument tuples) to pass to func.
|
96
|
+
(See `run_sequentially` for formatting details).
|
97
|
+
max_workers: The maximum number of worker threads. If None, it defaults
|
98
|
+
to ThreadPoolExecutor's default (typically based on CPU cores).
|
99
|
+
task_timeout: The maximum number of seconds to wait for each individual task
|
100
|
+
to complete. If a task exceeds this timeout, a
|
101
|
+
concurrent.futures.TimeoutError will be stored as its result.
|
102
|
+
If None, tasks will wait indefinitely for completion.
|
103
|
+
|
104
|
+
Returns:
|
105
|
+
A list where each element corresponds to the respective item in args_list.
|
106
|
+
- If a task executed successfully, its return value (R_Out) is stored.
|
107
|
+
- If a task raised an exception (including TimeoutError due to task_timeout),
|
108
|
+
the exception object itself is stored.
|
109
|
+
"""
|
110
|
+
# Materialize args_list to ensure consistent ordering and count, especially if it's a generator.
|
111
|
+
materialized_args_list = list(args_list)
|
112
|
+
if not materialized_args_list:
|
113
|
+
return []
|
114
|
+
|
115
|
+
with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
|
116
|
+
futures: List[concurrent.futures.Future] = []
|
117
|
+
for args_item in materialized_args_list:
|
118
|
+
if isinstance(args_item, tuple):
|
119
|
+
future = executor.submit(func, *args_item)
|
120
|
+
else:
|
121
|
+
future = executor.submit(func, args_item)
|
122
|
+
futures.append(future)
|
123
|
+
|
124
|
+
# Initialize results list. Type ignore is used because None is a placeholder.
|
125
|
+
results: List[Union[R_Out, Exception]] = [None] * len(futures) # type: ignore
|
126
|
+
for i, future in enumerate(futures):
|
127
|
+
try:
|
128
|
+
results[i] = future.result(timeout=task_timeout)
|
129
|
+
except Exception as e: # Catches TimeoutError from future.result and any exception from func
|
130
|
+
results[i] = e
|
131
|
+
return results
|
132
|
+
|
133
|
+
|
134
|
+
def sequentialize():
|
135
|
+
"""
|
136
|
+
Decorator to make a function that processes a single item (or argument set)
|
137
|
+
able to process an iterable of items (or argument sets) sequentially.
|
138
|
+
|
139
|
+
The decorated function will expect an iterable of argument sets as its
|
140
|
+
primary argument and will return a list of results. If the underlying
|
141
|
+
function raises an error, execution stops and the error propagates.
|
142
|
+
|
143
|
+
Example:
|
144
|
+
@sequentialize()
|
145
|
+
def process_single(data, factor):
|
146
|
+
return data * factor
|
147
|
+
|
148
|
+
# Now call it with a list of argument tuples
|
149
|
+
results = process_single([(1, 2), (3, 4)])
|
150
|
+
# results will be [2, 12]
|
151
|
+
"""
|
152
|
+
|
153
|
+
def decorator(
|
154
|
+
func_to_process_single_item: Callable[..., R_Out],
|
155
|
+
) -> Callable[[Iterable[SingleTaskArgs]], List[R_Out]]:
|
156
|
+
@functools.wraps(func_to_process_single_item)
|
157
|
+
def wrapper(args_list_for_func: Iterable[SingleTaskArgs]) -> List[R_Out]:
|
158
|
+
return run_sequentially(func_to_process_single_item, args_list_for_func)
|
159
|
+
|
160
|
+
return wrapper
|
161
|
+
|
162
|
+
return decorator
|
163
|
+
|
164
|
+
|
165
|
+
def parallelize(
|
166
|
+
max_workers: Optional[int] = None, task_timeout: Optional[float] = None
|
167
|
+
):
|
168
|
+
"""
|
169
|
+
Decorator to make a function that processes a single item (or argument set)
|
170
|
+
able to process an iterable of items (or argument sets) in parallel.
|
171
|
+
|
172
|
+
The decorated function will expect an iterable of argument sets as its
|
173
|
+
primary argument and will return a list of results or exceptions,
|
174
|
+
maintaining the original order.
|
175
|
+
|
176
|
+
Args:
|
177
|
+
max_workers (Optional[int]): Max worker threads for parallel execution.
|
178
|
+
task_timeout (Optional[float]): Timeout for each individual task.
|
179
|
+
|
180
|
+
Example:
|
181
|
+
@parallelize(max_workers=4, task_timeout=5.0)
|
182
|
+
def fetch_url_content(url: str) -> str:
|
183
|
+
# ... implementation to fetch url ...
|
184
|
+
return "content"
|
185
|
+
|
186
|
+
# Now call it with a list of URLs
|
187
|
+
results = fetch_url_content(["http://example.com", "http://example.org"])
|
188
|
+
# results will be a list of contents or Exception objects.
|
189
|
+
"""
|
190
|
+
|
191
|
+
def decorator(
|
192
|
+
func_to_process_single_item: Callable[..., R_Out],
|
193
|
+
) -> Callable[[Iterable[SingleTaskArgs]], List[Union[R_Out, Exception]]]:
|
194
|
+
@functools.wraps(func_to_process_single_item)
|
195
|
+
def wrapper(
|
196
|
+
args_list_for_func: Iterable[SingleTaskArgs],
|
197
|
+
) -> List[Union[R_Out, Exception]]:
|
198
|
+
return run_parallel(
|
199
|
+
func_to_process_single_item,
|
200
|
+
args_list_for_func,
|
201
|
+
max_workers=max_workers,
|
202
|
+
task_timeout=task_timeout,
|
203
|
+
)
|
204
|
+
|
205
|
+
return wrapper
|
206
|
+
|
207
|
+
return decorator
|
208
|
+
|
209
|
+
|
210
|
+
def typed_batch():
|
211
|
+
"""
|
212
|
+
Decorator that provides better IDE type hinting for functions converted from
|
213
|
+
single-item to batch processing. This helps IDEs understand the transformation
|
214
|
+
and provide accurate autocomplete and type checking.
|
215
|
+
|
216
|
+
The decorated function maintains proper type information showing it transforms
|
217
|
+
from Callable[[T], R] to Callable[[Iterable[T]], List[R]].
|
218
|
+
|
219
|
+
Example:
|
220
|
+
@typed_batch()
|
221
|
+
def process_url(url: str) -> dict:
|
222
|
+
return {"url": url, "status": "ok"}
|
223
|
+
|
224
|
+
# IDE will now correctly understand:
|
225
|
+
# process_url: (Iterable[str]) -> List[dict]
|
226
|
+
results = process_url(["http://example.com", "http://test.com"])
|
227
|
+
"""
|
228
|
+
|
229
|
+
def decorator(
|
230
|
+
func: Callable[..., R_Out],
|
231
|
+
) -> Callable[[Iterable[SingleTaskArgs]], List[R_Out]]:
|
232
|
+
@functools.wraps(func)
|
233
|
+
def wrapper(args_list: Iterable[SingleTaskArgs]) -> List[R_Out]:
|
234
|
+
return run_sequentially(func, args_list)
|
235
|
+
|
236
|
+
# Preserve original function's type info while updating signature
|
237
|
+
wrapper.__annotations__ = {
|
238
|
+
"args_list": Iterable[SingleTaskArgs],
|
239
|
+
"return": List[R_Out],
|
240
|
+
}
|
241
|
+
|
242
|
+
return cast(Callable[[Iterable[SingleTaskArgs]], List[R_Out]], wrapper)
|
243
|
+
|
244
|
+
return decorator
|
245
|
+
|
246
|
+
|
247
|
+
def run_with_retry(
|
248
|
+
max_attempts: int = 3,
|
249
|
+
delay: float = 1.0,
|
250
|
+
backoff: float = 2.0,
|
251
|
+
exceptions: Optional[Tuple[Type[Exception], ...]] = None,
|
252
|
+
):
|
253
|
+
"""
|
254
|
+
Decorator that adds retry logic to functions. Essential for robust parallel
|
255
|
+
processing when dealing with network calls, database operations, or other
|
256
|
+
operations that might fail transiently.
|
257
|
+
|
258
|
+
Args:
|
259
|
+
max_attempts: Maximum number of attempts (including the first try).
|
260
|
+
delay: Initial delay between retries in seconds.
|
261
|
+
backoff: Multiplier for delay after each failed attempt.
|
262
|
+
exceptions: Tuple of exception types to retry on. If None, retries on all exceptions.
|
263
|
+
|
264
|
+
Example:
|
265
|
+
@with_retry(max_attempts=3, delay=0.5, backoff=2.0, exceptions=(ConnectionError, TimeoutError))
|
266
|
+
def fetch_data(url: str) -> dict:
|
267
|
+
# This will retry up to 3 times with exponential backoff
|
268
|
+
# only for ConnectionError and TimeoutError
|
269
|
+
return requests.get(url).json()
|
270
|
+
|
271
|
+
@parallelize(max_workers=10)
|
272
|
+
@with_retry(max_attempts=2)
|
273
|
+
def robust_fetch(url: str) -> str:
|
274
|
+
return fetch_url_content(url)
|
275
|
+
"""
|
276
|
+
if exceptions is None:
|
277
|
+
exceptions = (Exception,)
|
278
|
+
|
279
|
+
def decorator(func: Callable[..., R_Out]) -> Callable[..., R_Out]:
|
280
|
+
@functools.wraps(func)
|
281
|
+
def wrapper(*args, **kwargs) -> R_Out:
|
282
|
+
last_exception = None
|
283
|
+
current_delay = delay
|
284
|
+
|
285
|
+
for attempt in range(max_attempts):
|
286
|
+
try:
|
287
|
+
return func(*args, **kwargs)
|
288
|
+
except exceptions as e:
|
289
|
+
last_exception = e
|
290
|
+
if attempt == max_attempts - 1: # Last attempt
|
291
|
+
break
|
292
|
+
|
293
|
+
print(
|
294
|
+
f"Attempt {attempt + 1} failed for {func.__name__}: {e}. Retrying in {current_delay:.2f}s..."
|
295
|
+
)
|
296
|
+
time.sleep(current_delay)
|
297
|
+
current_delay *= backoff
|
298
|
+
|
299
|
+
# If we get here, all attempts failed
|
300
|
+
raise last_exception
|
301
|
+
|
302
|
+
return wrapper
|
303
|
+
|
304
|
+
return decorator
|
hammad/pydantic/__init__.py
CHANGED
@@ -4,7 +4,7 @@ Contains both models and pydantic **specific** utiltiies / resources
|
|
4
4
|
meant for general case usage."""
|
5
5
|
|
6
6
|
from typing import TYPE_CHECKING
|
7
|
-
from ..
|
7
|
+
from .._core._utils._import_utils import _auto_create_getattr_loader
|
8
8
|
|
9
9
|
if TYPE_CHECKING:
|
10
10
|
from .converters import (
|
@@ -35,7 +35,7 @@ __all__ = (
|
|
35
35
|
)
|
36
36
|
|
37
37
|
|
38
|
-
__getattr__ =
|
38
|
+
__getattr__ = _auto_create_getattr_loader(__all__)
|
39
39
|
|
40
40
|
|
41
41
|
def __dir__() -> list[str]:
|
hammad/pydantic/converters.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
"""hammad.pydantic.models"""
|
2
2
|
|
3
3
|
from typing import TYPE_CHECKING
|
4
|
-
from ...
|
4
|
+
from ..._core._utils._import_utils import _auto_create_getattr_loader
|
5
5
|
|
6
6
|
if TYPE_CHECKING:
|
7
7
|
from .arbitrary_model import ArbitraryModel
|
@@ -20,7 +20,7 @@ __all__ = (
|
|
20
20
|
)
|
21
21
|
|
22
22
|
|
23
|
-
__getattr__ =
|
23
|
+
__getattr__ = _auto_create_getattr_loader(__all__)
|
24
24
|
|
25
25
|
|
26
26
|
def __dir__() -> list[str]:
|
hammad/text/__init__.py
CHANGED
@@ -1,37 +1,82 @@
|
|
1
1
|
"""hammad.text"""
|
2
2
|
|
3
3
|
from typing import TYPE_CHECKING
|
4
|
-
from ..
|
4
|
+
from .._core._utils._import_utils import _auto_create_getattr_loader
|
5
5
|
|
6
6
|
if TYPE_CHECKING:
|
7
|
+
from .converters import (
|
8
|
+
convert_collection_to_text,
|
9
|
+
convert_dataclass_to_text,
|
10
|
+
convert_dict_to_text,
|
11
|
+
convert_docstring_to_text,
|
12
|
+
convert_function_to_text,
|
13
|
+
convert_pydantic_to_text,
|
14
|
+
convert_type_to_text,
|
15
|
+
convert_to_text,
|
16
|
+
)
|
17
|
+
from .markdown import (
|
18
|
+
markdown_blockquote,
|
19
|
+
markdown_bold,
|
20
|
+
markdown_code,
|
21
|
+
markdown_code_block,
|
22
|
+
markdown_heading,
|
23
|
+
markdown_horizontal_rule,
|
24
|
+
markdown_italic,
|
25
|
+
markdown_link,
|
26
|
+
markdown_list_item,
|
27
|
+
markdown_table,
|
28
|
+
markdown_table_row,
|
29
|
+
)
|
7
30
|
from .text import (
|
31
|
+
BaseText,
|
8
32
|
Text,
|
33
|
+
OutputText,
|
34
|
+
OutputFormat,
|
35
|
+
HeadingStyle,
|
9
36
|
CodeSection,
|
10
|
-
SchemaSection,
|
11
37
|
SimpleText,
|
12
|
-
|
13
|
-
|
14
|
-
from .utils.converters import convert_docstring_to_text, convert_type_to_text
|
15
|
-
from .utils.markdown.converters import (
|
16
|
-
convert_to_markdown as convert_to_text,
|
38
|
+
SchemaSection,
|
39
|
+
UserResponse,
|
17
40
|
)
|
18
41
|
|
19
42
|
|
20
43
|
__all__ = (
|
21
|
-
|
22
|
-
"
|
23
|
-
"
|
24
|
-
"
|
25
|
-
"OutputText",
|
44
|
+
# hammad.text.converters
|
45
|
+
"convert_collection_to_text",
|
46
|
+
"convert_dataclass_to_text",
|
47
|
+
"convert_dict_to_text",
|
26
48
|
"convert_docstring_to_text",
|
49
|
+
"convert_function_to_text",
|
50
|
+
"convert_pydantic_to_text",
|
27
51
|
"convert_type_to_text",
|
28
52
|
"convert_to_text",
|
53
|
+
# hammad.text.markdown
|
54
|
+
"markdown_blockquote",
|
55
|
+
"markdown_bold",
|
56
|
+
"markdown_code",
|
57
|
+
"markdown_code_block",
|
58
|
+
"markdown_heading",
|
59
|
+
"markdown_horizontal_rule",
|
60
|
+
"markdown_italic",
|
61
|
+
"markdown_link",
|
62
|
+
"markdown_list_item",
|
63
|
+
"markdown_table",
|
64
|
+
"markdown_table_row",
|
65
|
+
# hammad.text.text
|
66
|
+
"BaseText",
|
67
|
+
"Text",
|
68
|
+
"OutputText",
|
69
|
+
"OutputFormat",
|
70
|
+
"HeadingStyle",
|
71
|
+
"CodeSection",
|
72
|
+
"SimpleText",
|
73
|
+
"SchemaSection",
|
74
|
+
"UserResponse",
|
29
75
|
)
|
30
76
|
|
31
77
|
|
32
|
-
__getattr__ =
|
78
|
+
__getattr__ = _auto_create_getattr_loader(__all__)
|
33
79
|
|
34
80
|
|
35
81
|
def __dir__() -> list[str]:
|
36
|
-
"""Get the attributes of the text module."""
|
37
82
|
return list(__all__)
|