hammad-python 0.0.12__py3-none-any.whl → 0.0.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. hammad/__init__.py +1 -180
  2. hammad/ai/__init__.py +0 -58
  3. hammad/ai/completions/__init__.py +3 -2
  4. hammad/ai/completions/client.py +84 -129
  5. hammad/ai/completions/create.py +33 -9
  6. hammad/ai/completions/settings.py +100 -0
  7. hammad/ai/completions/types.py +86 -5
  8. hammad/ai/completions/utils.py +112 -0
  9. hammad/ai/embeddings/__init__.py +2 -2
  10. hammad/ai/embeddings/client/fastembed_text_embeddings_client.py +1 -1
  11. hammad/ai/embeddings/client/litellm_embeddings_client.py +1 -1
  12. hammad/ai/embeddings/types.py +4 -4
  13. hammad/cache/__init__.py +13 -21
  14. hammad/cli/__init__.py +2 -2
  15. hammad/cli/animations.py +8 -39
  16. hammad/cli/styles/__init__.py +2 -2
  17. hammad/data/__init__.py +19 -2
  18. hammad/data/collections/__init__.py +2 -2
  19. hammad/data/collections/vector_collection.py +0 -7
  20. hammad/{configuration → data/configurations}/__init__.py +2 -2
  21. hammad/{configuration → data/configurations}/configuration.py +1 -1
  22. hammad/data/databases/__init__.py +2 -2
  23. hammad/data/models/__init__.py +44 -0
  24. hammad/{base → data/models/base}/__init__.py +3 -3
  25. hammad/{pydantic → data/models/pydantic}/__init__.py +28 -16
  26. hammad/{pydantic → data/models/pydantic}/converters.py +11 -2
  27. hammad/{pydantic → data/models/pydantic}/models/__init__.py +3 -3
  28. hammad/{pydantic → data/models/pydantic}/models/arbitrary_model.py +1 -1
  29. hammad/{pydantic → data/models/pydantic}/models/cacheable_model.py +1 -1
  30. hammad/{pydantic → data/models/pydantic}/models/fast_model.py +1 -1
  31. hammad/{pydantic → data/models/pydantic}/models/function_model.py +1 -1
  32. hammad/{pydantic → data/models/pydantic}/models/subscriptable_model.py +1 -1
  33. hammad/data/types/__init__.py +41 -0
  34. hammad/{types → data/types}/file.py +2 -2
  35. hammad/{multimodal → data/types/multimodal}/__init__.py +2 -2
  36. hammad/{multimodal → data/types/multimodal}/audio.py +2 -2
  37. hammad/{multimodal → data/types/multimodal}/image.py +2 -2
  38. hammad/{text → data/types}/text.py +4 -4
  39. hammad/formatting/__init__.py +38 -0
  40. hammad/{json → formatting/json}/__init__.py +3 -3
  41. hammad/{json → formatting/json}/converters.py +2 -2
  42. hammad/{text → formatting/text}/__init__.py +5 -24
  43. hammad/{text → formatting/text}/converters.py +2 -2
  44. hammad/{text → formatting/text}/markdown.py +1 -1
  45. hammad/{yaml → formatting/yaml}/__init__.py +3 -7
  46. hammad/formatting/yaml/converters.py +5 -0
  47. hammad/logging/__init__.py +2 -2
  48. hammad/mcp/__init__.py +50 -0
  49. hammad/mcp/client/__init__.py +1 -0
  50. hammad/mcp/client/client.py +523 -0
  51. hammad/mcp/client/client_service.py +393 -0
  52. hammad/mcp/client/settings.py +178 -0
  53. hammad/mcp/servers/__init__.py +1 -0
  54. hammad/mcp/servers/launcher.py +1161 -0
  55. hammad/performance/__init__.py +36 -0
  56. hammad/{_core/_utils/_import_utils.py → performance/imports.py} +125 -76
  57. hammad/performance/runtime/__init__.py +32 -0
  58. hammad/performance/runtime/decorators.py +142 -0
  59. hammad/performance/runtime/run.py +299 -0
  60. hammad/service/__init__.py +49 -0
  61. hammad/service/create.py +532 -0
  62. hammad/service/decorators.py +285 -0
  63. hammad/web/__init__.py +2 -2
  64. hammad/web/http/client.py +1 -1
  65. hammad/web/openapi/__init__.py +1 -0
  66. {hammad_python-0.0.12.dist-info → hammad_python-0.0.14.dist-info}/METADATA +35 -3
  67. hammad_python-0.0.14.dist-info/RECORD +99 -0
  68. hammad/_core/__init__.py +0 -1
  69. hammad/_core/_utils/__init__.py +0 -4
  70. hammad/multithreading/__init__.py +0 -304
  71. hammad/types/__init__.py +0 -11
  72. hammad/yaml/converters.py +0 -19
  73. hammad_python-0.0.12.dist-info/RECORD +0 -85
  74. /hammad/{base → data/models/base}/fields.py +0 -0
  75. /hammad/{base → data/models/base}/model.py +0 -0
  76. /hammad/{base → data/models/base}/utils.py +0 -0
  77. {hammad_python-0.0.12.dist-info → hammad_python-0.0.14.dist-info}/WHEEL +0 -0
  78. {hammad_python-0.0.12.dist-info → hammad_python-0.0.14.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,36 @@
1
+ """hammad.performance
2
+
3
+ Contains a collection of various utilities and resources for 'accelerating' or
4
+ optimizing different objects and operations in general Python development."""
5
+
6
+ from typing import TYPE_CHECKING
7
+ from .imports import create_getattr_importer
8
+
9
+
10
+ if TYPE_CHECKING:
11
+ from .runtime import (
12
+ sequentialize_function,
13
+ parallelize_function,
14
+ update_batch_type_hints,
15
+ run_sequentially,
16
+ run_parallel,
17
+ run_with_retry,
18
+ )
19
+
20
+
21
+ __all__ = (
22
+ # hammad.performance.runtime
23
+ "sequentialize_function",
24
+ "parallelize_function",
25
+ "update_batch_type_hints",
26
+ "run_sequentially",
27
+ "run_parallel",
28
+ "run_with_retry",
29
+ )
30
+
31
+
32
+ __getattr__ = create_getattr_importer(__all__)
33
+
34
+
35
+ def __dir__() -> list[str]:
36
+ return sorted(__all__)
@@ -1,15 +1,21 @@
1
- """hammad._core._utils._import_utils"""
1
+ """hammad.performance.runtime.imports"""
2
2
 
3
- from typing import Any, Callable
3
+ from typing import Any, Callable, List, Tuple, Union
4
4
  import inspect
5
5
  import ast
6
6
  import hashlib
7
7
 
8
- __all__ = ("_auto_create_getattr_loader",)
8
+ __all__ = ("create_getattr_importer",)
9
9
 
10
10
 
11
- class _ModuleCache:
12
- """Minimal cache implementation for internal use only."""
11
+ class GetAttrImporterError(Exception):
12
+ """An error that occurs when the `create_getattr_importer` function
13
+ fails to create a lazy loader function."""
14
+
15
+
16
+ class GetAttrImporterCache:
17
+ """Minimal cache implementation for internal use only
18
+ within the `create_getattr_importer` function."""
13
19
 
14
20
  def __init__(self, maxsize: int = 128):
15
21
  self.maxsize = maxsize
@@ -45,52 +51,142 @@ class _ModuleCache:
45
51
  return wrapper
46
52
 
47
53
 
48
- # Global cache instance for parse function
49
- _parse_cache = _ModuleCache(maxsize=64)
54
+ # NOTE:
55
+ # SINGLETON
56
+ GETATTR_IMPORTER_PARSE_CACHE = GetAttrImporterCache(maxsize=64)
57
+ """Library-wide singleton instance providing caching for the
58
+ `_parse_type_checking_imports` function."""
59
+
60
+
61
+ GETATTR_IMPORTER_TYPE_CHECKING_CACHE = {}
62
+ """Cache for the `_parse_type_checking_imports` function."""
63
+
64
+
65
+ def _parse_type_checking_imports(source_code: str) -> dict[str, tuple[str, str]]:
66
+ """Parses the TYPE_CHECKING imports from a source code file, to create
67
+ a dictionary of local names to (module_path, original_name) tuples.
68
+
69
+ This is used to create the mapping used within the `_create_getattr_importer_from_import_dict`
70
+ function.
71
+
72
+ Args:
73
+ source_code : The source code to parse
74
+
75
+ Returns:
76
+ A dictionary mapping local names to (module_path, original_name) tuples
77
+ """
78
+
79
+ @GETATTR_IMPORTER_PARSE_CACHE.cached_call
80
+ def _exec(source_code: str) -> dict[str, tuple[str, str]]:
81
+ tree = ast.parse(source_code)
82
+ imports = {}
83
+
84
+ # Walk through the AST and find TYPE_CHECKING blocks
85
+ for node in ast.walk(tree):
86
+ if isinstance(node, ast.If):
87
+ # Check if this is a TYPE_CHECKING block
88
+ is_type_checking = False
89
+
90
+ if isinstance(node.test, ast.Name) and node.test.id == "TYPE_CHECKING":
91
+ is_type_checking = True
92
+ elif isinstance(node.test, ast.Attribute):
93
+ if (
94
+ isinstance(node.test.value, ast.Name)
95
+ and node.test.value.id == "typing"
96
+ and node.test.attr == "TYPE_CHECKING"
97
+ ):
98
+ is_type_checking = True
99
+
100
+ if is_type_checking:
101
+ # Process imports in this block
102
+ for stmt in node.body:
103
+ if isinstance(stmt, ast.ImportFrom) and stmt.module:
104
+ module_path = f".{stmt.module}"
105
+ for alias in stmt.names:
106
+ original_name = alias.name
107
+ local_name = alias.asname or original_name
108
+ imports[local_name] = (module_path, original_name)
109
+
110
+ return imports
111
+
112
+ return _exec(source_code)
50
113
 
51
114
 
52
- def _create_getattr_loader(
53
- imports_dict: dict[str, tuple[str, str]], package: str
115
+ def _create_getattr_importer_from_import_dict(
116
+ imports_dict: dict[str, tuple[str, str]],
117
+ package: str,
118
+ all_attrs: Union[Tuple[str, ...], List[str]],
54
119
  ) -> Callable[[str], Any]:
55
- """Create a lazy loader function for __getattr__.
120
+ """Creates a lazy loader function for the `__getattr__` method
121
+ within `__init__.py` modules in Python packages.
56
122
 
57
123
  Args:
58
- imports_dict: Dictionary mapping attribute names to (module_path, original_name) tuples
59
- package: The package name for import_module
124
+ imports_dict : Dictionary mapping attribute names to (module_path, original_name) tuples
125
+ package : The package name for import_module
126
+ all_attrs : List of all valid attributes for this module
60
127
 
61
128
  Returns:
62
129
  A __getattr__ function that lazily imports modules
63
130
  """
64
131
  from importlib import import_module
65
132
 
66
- _cache = {}
133
+ cache = {}
67
134
 
68
135
  def __getattr__(name: str) -> Any:
69
- if name in _cache:
70
- return _cache[name]
136
+ if name in cache:
137
+ return cache[name]
71
138
 
72
139
  if name in imports_dict:
73
140
  module_path, original_name = imports_dict[name]
74
141
  module = import_module(module_path, package)
75
142
  result = getattr(module, original_name)
76
- _cache[name] = result
143
+ cache[name] = result
77
144
  return result
78
- raise AttributeError(f"module '{package}' has no attribute '{name}'")
145
+
146
+ # Try to import as a submodule
147
+ try:
148
+ module_path = f".{name}"
149
+ module = import_module(module_path, package)
150
+ cache[name] = module
151
+ return module
152
+ except ImportError:
153
+ pass
154
+
155
+ raise GetAttrImporterError(f"module '{package}' has no attribute '{name}'")
79
156
 
80
157
  return __getattr__
81
158
 
82
159
 
83
- _type_checking_cache = {}
160
+ def create_getattr_importer(
161
+ all: Union[Tuple[str, ...], List[str]],
162
+ ) -> Callable[[str], Any]:
163
+ """Loader used internally within the `hammad` package to create lazy
164
+ loaders within `__init__.py` modules using the `TYPE_CHECKING` and
165
+ `all` source code within files.
166
+
167
+ This function is meant to be set as the `__getattr__` method / var
168
+ within modules to allow for direct lazy loading of attributes.
169
+
170
+ Example:
171
+
172
+ ```
173
+ # Create a module that contains some imports and TYPE_CHECKING
174
+ from typing import TYPE_CHECKING
175
+ from hammad.performance.imports import create_getattr_importer
176
+
177
+ if TYPE_CHECKING:
178
+ from functools import wraps
84
179
 
180
+ all = ("wraps")
85
181
 
86
- def _auto_create_getattr_loader(all_exports: tuple[str, ...]) -> Callable[[str], Any]:
87
- """Automatically create a lazy loader by inspecting the calling module.
182
+ __getattr__ = create_getattr_importer(all)
88
183
 
89
- This function inspects the calling module's source code to extract
90
- TYPE_CHECKING imports and automatically builds the import map.
184
+ # Now, when you import this module, the `wraps` attribute will be
185
+ # lazily loaded when it is first accessed.
186
+ ```
91
187
 
92
188
  Args:
93
- all_exports: The __all__ tuple from the calling module
189
+ all : The `all` tuple from the calling module
94
190
 
95
191
  Returns:
96
192
  A __getattr__ function that lazily imports modules
@@ -106,9 +202,9 @@ def _auto_create_getattr_loader(all_exports: tuple[str, ...]) -> Callable[[str],
106
202
  filename = calling_frame.f_globals.get("__file__", "")
107
203
 
108
204
  # Check cache first
109
- cache_key = (filename, tuple(all_exports))
110
- if cache_key in _type_checking_cache:
111
- return _type_checking_cache[cache_key]
205
+ cache_key = (filename, tuple(all))
206
+ if cache_key in GETATTR_IMPORTER_TYPE_CHECKING_CACHE:
207
+ return GETATTR_IMPORTER_TYPE_CHECKING_CACHE[cache_key]
112
208
 
113
209
  # Read the source file
114
210
  try:
@@ -128,55 +224,8 @@ def _auto_create_getattr_loader(all_exports: tuple[str, ...]) -> Callable[[str],
128
224
  imports_map = _parse_type_checking_imports(source_code)
129
225
 
130
226
  # Filter to only include exports that are in __all__
131
- filtered_map = {
132
- name: path for name, path in imports_map.items() if name in all_exports
133
- }
227
+ filtered_map = {name: path for name, path in imports_map.items() if name in all}
134
228
 
135
- loader = _create_getattr_loader(filtered_map, package)
136
- _type_checking_cache[cache_key] = loader
229
+ loader = _create_getattr_importer_from_import_dict(filtered_map, package, all)
230
+ GETATTR_IMPORTER_TYPE_CHECKING_CACHE[cache_key] = loader
137
231
  return loader
138
-
139
-
140
- def _parse_type_checking_imports(source_code: str) -> dict[str, tuple[str, str]]:
141
- """Parse TYPE_CHECKING imports from source code to build import map.
142
-
143
- Args:
144
- source_code: The source code containing TYPE_CHECKING imports
145
-
146
- Returns:
147
- Dictionary mapping local names to (module_path, original_name) tuples
148
- """
149
-
150
- @_parse_cache.cached_call
151
- def _exec(source_code: str) -> dict[str, tuple[str, str]]:
152
- tree = ast.parse(source_code)
153
- imports = {}
154
-
155
- for node in ast.walk(tree):
156
- if isinstance(node, ast.If):
157
- # Check if this is a TYPE_CHECKING block
158
- is_type_checking = False
159
-
160
- if isinstance(node.test, ast.Name) and node.test.id == "TYPE_CHECKING":
161
- is_type_checking = True
162
- elif isinstance(node.test, ast.Attribute):
163
- if (
164
- isinstance(node.test.value, ast.Name)
165
- and node.test.value.id == "typing"
166
- and node.test.attr == "TYPE_CHECKING"
167
- ):
168
- is_type_checking = True
169
-
170
- if is_type_checking:
171
- # Process imports in this block
172
- for stmt in node.body:
173
- if isinstance(stmt, ast.ImportFrom) and stmt.module:
174
- module_path = f".{stmt.module}"
175
- for alias in stmt.names:
176
- original_name = alias.name
177
- local_name = alias.asname or original_name
178
- imports[local_name] = (module_path, original_name)
179
-
180
- return imports
181
-
182
- return _exec(source_code)
@@ -0,0 +1,32 @@
1
+ """hammad.performance.runtime"""
2
+
3
+ from typing import TYPE_CHECKING
4
+ from ..imports import create_getattr_importer
5
+
6
+
7
+ if TYPE_CHECKING:
8
+ from .decorators import (
9
+ sequentialize_function,
10
+ parallelize_function,
11
+ update_batch_type_hints,
12
+ )
13
+ from .run import run_sequentially, run_parallel, run_with_retry
14
+
15
+
16
+ __all__ = (
17
+ # hammad.performance.decorators
18
+ "sequentialize_function",
19
+ "parallelize_function",
20
+ "update_batch_type_hints",
21
+ # hammad.performance.run
22
+ "run_sequentially",
23
+ "run_parallel",
24
+ "run_with_retry",
25
+ )
26
+
27
+
28
+ __getattr__ = create_getattr_importer(__all__)
29
+
30
+
31
+ def __dir__() -> list[str]:
32
+ return list(__all__)
@@ -0,0 +1,142 @@
1
+ """hammad.performance.runtime.decorators"""
2
+
3
+ import functools
4
+ from typing import (
5
+ Callable,
6
+ Iterable,
7
+ List,
8
+ Any,
9
+ TypeVar,
10
+ Optional,
11
+ Union,
12
+ cast,
13
+ )
14
+
15
+
16
+ __all__ = (
17
+ "sequentialize_function",
18
+ "parallelize_function",
19
+ "update_batch_type_hints",
20
+ )
21
+
22
+
23
+ Parameters = TypeVar("Parameters", bound=dict[str, Any])
24
+ Return = TypeVar("Return")
25
+
26
+ TaskParameters = TypeVar("TaskParameters", bound=dict[str, Any])
27
+
28
+
29
+ def sequentialize_function():
30
+ """
31
+ Decorator to make a function that processes a single item (or argument set)
32
+ able to process an iterable of items (or argument sets) sequentially.
33
+
34
+ The decorated function will expect an iterable of argument sets as its
35
+ primary argument and will return a list of results. If the underlying
36
+ function raises an error, execution stops and the error propagates.
37
+
38
+ Example:
39
+ @sequentialize_function()
40
+ def process_single(data, factor):
41
+ return data * factor
42
+
43
+ # Now call it with a list of argument tuples
44
+ results = process_single([(1, 2), (3, 4)])
45
+ # results will be [2, 12]
46
+ """
47
+ from .run import run_sequentially
48
+
49
+ def decorator(
50
+ func_to_process_single_item: Callable[..., Return],
51
+ ) -> Callable[[Iterable[TaskParameters]], List[Return]]:
52
+ @functools.wraps(func_to_process_single_item)
53
+ def wrapper(args_list_for_func: Iterable[TaskParameters]) -> List[Return]:
54
+ return run_sequentially(func_to_process_single_item, args_list_for_func)
55
+
56
+ return wrapper
57
+
58
+ return decorator
59
+
60
+
61
+ def parallelize_function(
62
+ max_workers: Optional[int] = None, timeout: Optional[float] = None
63
+ ):
64
+ """
65
+ Decorator to make a function that processes a single item (or argument set)
66
+ able to process an iterable of items (or argument sets) in parallel.
67
+
68
+ The decorated function will expect an iterable of argument sets as its
69
+ primary argument and will return a list of results or exceptions,
70
+ maintaining the original order.
71
+
72
+ Args:
73
+ max_workers (Optional[int]): Max worker threads for parallel execution.
74
+ timeout (Optional[float]): Timeout for each individual task.
75
+
76
+ Example:
77
+ @parallelize_function(max_workers=4, timeout=5.0)
78
+ def fetch_url_content(url: str) -> str:
79
+ # ... implementation to fetch url ...
80
+ return "content"
81
+
82
+ # Now call it with a list of URLs
83
+ results = fetch_url_content(["http://example.com", "http://example.org"])
84
+ # results will be a list of contents or Exception objects.
85
+ """
86
+ from .run import run_parallel
87
+
88
+ def decorator(
89
+ func_to_process_single_item: Callable[..., Return],
90
+ ) -> Callable[[Iterable[TaskParameters]], List[Union[Return, Exception]]]:
91
+ @functools.wraps(func_to_process_single_item)
92
+ def wrapper(
93
+ args_list_for_func: Iterable[TaskParameters],
94
+ ) -> List[Union[Return, Exception]]:
95
+ return run_parallel(
96
+ func_to_process_single_item,
97
+ args_list_for_func,
98
+ max_workers=max_workers,
99
+ timeout=timeout,
100
+ )
101
+
102
+ return wrapper
103
+
104
+ return decorator
105
+
106
+
107
+ def update_batch_type_hints():
108
+ """
109
+ Decorator that provides better IDE type hinting for functions converted from
110
+ single-item to batch processing. This helps IDEs understand the transformation
111
+ and provide accurate autocomplete and type checking.
112
+
113
+ The decorated function maintains proper type information showing it transforms
114
+ from Callable[[T], R] to Callable[[Iterable[T]], List[R]].
115
+
116
+ Example:
117
+ @typed_batch()
118
+ def process_url(url: str) -> dict:
119
+ return {"url": url, "status": "ok"}
120
+
121
+ # IDE will now correctly understand:
122
+ # process_url: (Iterable[str]) -> List[dict]
123
+ results = process_url(["http://example.com", "http://test.com"])
124
+ """
125
+ from .run import run_sequentially
126
+
127
+ def decorator(
128
+ func: Callable[..., Return],
129
+ ) -> Callable[[Iterable[TaskParameters]], List[Return]]:
130
+ @functools.wraps(func)
131
+ def wrapper(args_list: Iterable[TaskParameters]) -> List[Return]:
132
+ return run_sequentially(func, args_list)
133
+
134
+ # Preserve original function's type info while updating signature
135
+ wrapper.__annotations__ = {
136
+ "args_list": Iterable[TaskParameters],
137
+ "return": List[Return],
138
+ }
139
+
140
+ return cast(Callable[[Iterable[TaskParameters]], List[Return]], wrapper)
141
+
142
+ return decorator