nexaai 1.0.12__cp310-cp310-macosx_13_0_x86_64.whl → 1.0.13rc1__cp310-cp310-macosx_13_0_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of nexaai might be problematic. Click here for more details.
- nexaai/_stub.cpython-310-darwin.so +0 -0
- nexaai/_version.py +1 -1
- nexaai/binds/__init__.py +1 -0
- nexaai/binds/libnexa_bridge.dylib +0 -0
- nexaai/binds/vlm_bind.cpython-310-darwin.so +0 -0
- nexaai/utils/model_manager.py +107 -49
- nexaai/vlm_impl/pybind_vlm_impl.py +6 -17
- {nexaai-1.0.12.dist-info → nexaai-1.0.13rc1.dist-info}/METADATA +1 -1
- {nexaai-1.0.12.dist-info → nexaai-1.0.13rc1.dist-info}/RECORD +11 -10
- {nexaai-1.0.12.dist-info → nexaai-1.0.13rc1.dist-info}/WHEEL +0 -0
- {nexaai-1.0.12.dist-info → nexaai-1.0.13rc1.dist-info}/top_level.txt +0 -0
|
Binary file
|
nexaai/_version.py
CHANGED
nexaai/binds/__init__.py
CHANGED
|
Binary file
|
|
Binary file
|
nexaai/utils/model_manager.py
CHANGED
|
@@ -1124,15 +1124,64 @@ def download_from_huggingface(
|
|
|
1124
1124
|
##########################################################################
|
|
1125
1125
|
|
|
1126
1126
|
|
|
1127
|
+
def _download_model_if_needed(
|
|
1128
|
+
model_path: str,
|
|
1129
|
+
param_name: str,
|
|
1130
|
+
progress_callback: Optional[Callable[[Dict[str, Any]], None]] = None,
|
|
1131
|
+
token: Union[bool, str, None] = None
|
|
1132
|
+
) -> str:
|
|
1133
|
+
"""
|
|
1134
|
+
Helper function to download a model from HuggingFace if it doesn't exist locally.
|
|
1135
|
+
|
|
1136
|
+
Args:
|
|
1137
|
+
model_path: The model path that may be local or remote
|
|
1138
|
+
param_name: Name of the parameter (for error messages)
|
|
1139
|
+
progress_callback: Callback function for download progress updates
|
|
1140
|
+
token: HuggingFace authentication token for private repositories
|
|
1141
|
+
|
|
1142
|
+
Returns:
|
|
1143
|
+
str: Local path to the model (either existing or downloaded)
|
|
1144
|
+
|
|
1145
|
+
Raises:
|
|
1146
|
+
RuntimeError: If download fails
|
|
1147
|
+
"""
|
|
1148
|
+
# Check if model_path exists locally (file or directory)
|
|
1149
|
+
if os.path.exists(model_path):
|
|
1150
|
+
# Local path exists, return as-is
|
|
1151
|
+
return model_path
|
|
1152
|
+
|
|
1153
|
+
# Model path doesn't exist locally, try to download from HuggingFace
|
|
1154
|
+
try:
|
|
1155
|
+
# Parse model_path to extract repo_id and filename
|
|
1156
|
+
repo_id, file_name = _parse_model_path(model_path)
|
|
1157
|
+
|
|
1158
|
+
# Download the model
|
|
1159
|
+
downloaded_path = download_from_huggingface(
|
|
1160
|
+
repo_id=repo_id,
|
|
1161
|
+
file_name=file_name,
|
|
1162
|
+
local_dir=None, # Use default cache directory
|
|
1163
|
+
enable_transfer=True,
|
|
1164
|
+
progress_callback=progress_callback,
|
|
1165
|
+
show_progress=True,
|
|
1166
|
+
token=token
|
|
1167
|
+
)
|
|
1168
|
+
|
|
1169
|
+
return downloaded_path
|
|
1170
|
+
|
|
1171
|
+
except Exception as e:
|
|
1172
|
+
# Only handle download-related errors
|
|
1173
|
+
raise RuntimeError(f"Could not load model from '{param_name}={model_path}': {e}")
|
|
1174
|
+
|
|
1175
|
+
|
|
1127
1176
|
def auto_download_model(func: Callable) -> Callable:
|
|
1128
1177
|
"""
|
|
1129
1178
|
Decorator that automatically downloads models from HuggingFace if they don't exist locally.
|
|
1130
1179
|
|
|
1131
|
-
This decorator should be applied to __init__ methods that take a name_or_path parameter
|
|
1132
|
-
If
|
|
1133
|
-
it from HuggingFace Hub using the download_from_huggingface function.
|
|
1180
|
+
This decorator should be applied to __init__ methods that take a name_or_path parameter
|
|
1181
|
+
and optionally an mmproj_path parameter. If these paths don't exist as local files/directories,
|
|
1182
|
+
it will attempt to download them from HuggingFace Hub using the download_from_huggingface function.
|
|
1134
1183
|
|
|
1135
|
-
The name_or_path can be in formats like:
|
|
1184
|
+
The name_or_path and mmproj_path can be in formats like:
|
|
1136
1185
|
- "microsoft/DialoGPT-small" (downloads entire repo)
|
|
1137
1186
|
- "microsoft/DialoGPT-small/pytorch_model.bin" (downloads specific file)
|
|
1138
1187
|
- "Qwen/Qwen3-4B-GGUF/Qwen3-4B-Q4_K_M.gguf" (downloads specific file)
|
|
@@ -1149,21 +1198,6 @@ def auto_download_model(func: Callable) -> Callable:
|
|
|
1149
1198
|
"""
|
|
1150
1199
|
@functools.wraps(func)
|
|
1151
1200
|
def wrapper(*args, **kwargs):
|
|
1152
|
-
# Find name_or_path in arguments
|
|
1153
|
-
# Assuming name_or_path is the first argument after self
|
|
1154
|
-
if len(args) >= 2:
|
|
1155
|
-
name_or_path = args[1]
|
|
1156
|
-
args_list = list(args)
|
|
1157
|
-
path_index = 1
|
|
1158
|
-
is_positional = True
|
|
1159
|
-
elif 'name_or_path' in kwargs:
|
|
1160
|
-
name_or_path = kwargs['name_or_path']
|
|
1161
|
-
path_index = None
|
|
1162
|
-
is_positional = False
|
|
1163
|
-
else:
|
|
1164
|
-
# No name_or_path found, call original function
|
|
1165
|
-
return func(*args, **kwargs)
|
|
1166
|
-
|
|
1167
1201
|
# Extract progress_callback and token from arguments
|
|
1168
1202
|
progress_callback = None
|
|
1169
1203
|
if 'progress_callback' in kwargs:
|
|
@@ -1173,39 +1207,63 @@ def auto_download_model(func: Callable) -> Callable:
|
|
|
1173
1207
|
if 'token' in kwargs:
|
|
1174
1208
|
token = kwargs.pop('token') # Remove from kwargs to avoid passing to original func
|
|
1175
1209
|
|
|
1176
|
-
#
|
|
1177
|
-
|
|
1178
|
-
|
|
1210
|
+
# Handle name_or_path parameter
|
|
1211
|
+
name_or_path = None
|
|
1212
|
+
name_path_index = None
|
|
1213
|
+
is_name_positional = False
|
|
1214
|
+
|
|
1215
|
+
# Find name_or_path in arguments
|
|
1216
|
+
# Assuming name_or_path is the first argument after self
|
|
1217
|
+
if len(args) >= 2:
|
|
1218
|
+
name_or_path = args[1]
|
|
1219
|
+
args_list = list(args)
|
|
1220
|
+
name_path_index = 1
|
|
1221
|
+
is_name_positional = True
|
|
1222
|
+
elif 'name_or_path' in kwargs:
|
|
1223
|
+
name_or_path = kwargs['name_or_path']
|
|
1224
|
+
is_name_positional = False
|
|
1225
|
+
|
|
1226
|
+
# Handle mmproj_path parameter
|
|
1227
|
+
mmproj_path = None
|
|
1228
|
+
if 'mmproj_path' in kwargs:
|
|
1229
|
+
mmproj_path = kwargs['mmproj_path']
|
|
1230
|
+
|
|
1231
|
+
# If neither parameter is found, call original function
|
|
1232
|
+
if name_or_path is None and mmproj_path is None:
|
|
1179
1233
|
return func(*args, **kwargs)
|
|
1180
1234
|
|
|
1181
|
-
#
|
|
1182
|
-
|
|
1183
|
-
|
|
1184
|
-
|
|
1185
|
-
|
|
1186
|
-
|
|
1187
|
-
|
|
1188
|
-
|
|
1189
|
-
|
|
1190
|
-
|
|
1191
|
-
|
|
1192
|
-
|
|
1193
|
-
|
|
1194
|
-
|
|
1195
|
-
|
|
1196
|
-
|
|
1197
|
-
|
|
1198
|
-
|
|
1199
|
-
|
|
1200
|
-
|
|
1201
|
-
|
|
1202
|
-
|
|
1203
|
-
|
|
1204
|
-
|
|
1205
|
-
|
|
1206
|
-
|
|
1235
|
+
# Download name_or_path if needed
|
|
1236
|
+
if name_or_path is not None:
|
|
1237
|
+
try:
|
|
1238
|
+
downloaded_name_path = _download_model_if_needed(
|
|
1239
|
+
name_or_path, 'name_or_path', progress_callback, token
|
|
1240
|
+
)
|
|
1241
|
+
|
|
1242
|
+
# Replace name_or_path with downloaded path
|
|
1243
|
+
if is_name_positional:
|
|
1244
|
+
if name_path_index is not None:
|
|
1245
|
+
args_list[name_path_index] = downloaded_name_path
|
|
1246
|
+
args = tuple(args_list)
|
|
1247
|
+
else:
|
|
1248
|
+
kwargs['name_or_path'] = downloaded_name_path
|
|
1249
|
+
|
|
1250
|
+
except Exception as e:
|
|
1251
|
+
raise e # Re-raise the error from _download_model_if_needed
|
|
1252
|
+
|
|
1253
|
+
# Download mmproj_path if needed
|
|
1254
|
+
if mmproj_path is not None:
|
|
1255
|
+
try:
|
|
1256
|
+
downloaded_mmproj_path = _download_model_if_needed(
|
|
1257
|
+
mmproj_path, 'mmproj_path', progress_callback, token
|
|
1258
|
+
)
|
|
1259
|
+
|
|
1260
|
+
# Replace mmproj_path with downloaded path
|
|
1261
|
+
kwargs['mmproj_path'] = downloaded_mmproj_path
|
|
1262
|
+
|
|
1263
|
+
except Exception as e:
|
|
1264
|
+
raise e # Re-raise the error from _download_model_if_needed
|
|
1207
1265
|
|
|
1208
|
-
# Call original function with updated
|
|
1266
|
+
# Call original function with updated paths (outside try-catch to let model creation errors bubble up)
|
|
1209
1267
|
return func(*args, **kwargs)
|
|
1210
1268
|
|
|
1211
1269
|
return wrapper
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
from typing import Generator, Optional, List, Dict, Any, Union
|
|
2
2
|
import queue
|
|
3
3
|
import threading
|
|
4
|
-
import base64
|
|
5
4
|
from pathlib import Path
|
|
6
5
|
|
|
7
6
|
from nexaai.common import ModelConfig, GenerationConfig, MultiModalMessage, PluginID
|
|
@@ -102,26 +101,16 @@ class PyBindVLMImpl(VLM):
|
|
|
102
101
|
t = c["type"]
|
|
103
102
|
if t == "text":
|
|
104
103
|
blocks.append({"type": "text", "text": c.get("text","") or ""})
|
|
104
|
+
elif t == "image":
|
|
105
|
+
# Pass through the original structure - let vlm-bind.cpp handle field extraction
|
|
106
|
+
blocks.append(c)
|
|
105
107
|
else:
|
|
106
|
-
|
|
107
|
-
src = c.get("url") or c.get("path")
|
|
108
|
-
if not src:
|
|
109
|
-
raise ValueError(f"No url/path for {t}")
|
|
110
|
-
# read local file or strip data URI
|
|
111
|
-
if Path(src).exists():
|
|
112
|
-
raw = Path(src).read_bytes()
|
|
113
|
-
b64 = base64.b64encode(raw).decode("ascii")
|
|
114
|
-
blocks.append({"type": t, "text": b64})
|
|
115
|
-
elif src.startswith("data:"):
|
|
116
|
-
b64 = src.split(",",1)[1]
|
|
117
|
-
blocks.append({"type": t, "text": b64})
|
|
118
|
-
else:
|
|
119
|
-
# remote URL
|
|
120
|
-
blocks.append({"type": t, "text": src})
|
|
108
|
+
raise ValueError(f"Unsupported content type: {t}. Use 'text' or 'image' to match the golden reference in vlm.cpp")
|
|
121
109
|
|
|
122
110
|
payload.append({"role": role, "content": blocks})
|
|
123
111
|
|
|
124
|
-
|
|
112
|
+
result = vlm_bind.ml_vlm_apply_chat_template(self._handle, payload, tools)
|
|
113
|
+
return result
|
|
125
114
|
|
|
126
115
|
def generate_stream(self, prompt: str, g_cfg: GenerationConfig = GenerationConfig()) -> Generator[str, None, None]:
|
|
127
116
|
"""Generate text with streaming."""
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
nexaai/__init__.py,sha256=jXdC4vv6DBK1fVewYTYSUhOOYfvf_Mk81UIeMGGIKUg,2029
|
|
2
|
-
nexaai/_stub.cpython-310-darwin.so,sha256=
|
|
3
|
-
nexaai/_version.py,sha256=
|
|
2
|
+
nexaai/_stub.cpython-310-darwin.so,sha256=R8fF_ouYUW419bQTnDSU8pu9M2bskkgYjx2DFG5qSsQ,49832
|
|
3
|
+
nexaai/_version.py,sha256=akSnDABX0T874oWowjXB3MrX7aTHCZHWA6-X80hJ7bY,143
|
|
4
4
|
nexaai/asr.py,sha256=NljMXDErwPNMOPaRkJZMEDka9Nk8xyur7L8i924TStY,2054
|
|
5
5
|
nexaai/base.py,sha256=N8PRgDFA-XPku2vWnQIofQ7ipz3pPlO6f8YZGnuhquE,982
|
|
6
6
|
nexaai/common.py,sha256=yBnIbqYaQYnfrl7IczOBh6MDibYZVxwaRJEglYcKgGs,3422
|
|
@@ -15,13 +15,14 @@ nexaai/vlm.py,sha256=3voXmAVnGlXnOiwA3wcX4p0Lvmp0X1VKkQVPObJdwBY,4649
|
|
|
15
15
|
nexaai/asr_impl/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
16
16
|
nexaai/asr_impl/mlx_asr_impl.py,sha256=eosd8-TIWAOwV0HltmoFrLwzXHcU4jyxtncvuZE9pgA,3257
|
|
17
17
|
nexaai/asr_impl/pybind_asr_impl.py,sha256=pE9Hb_hMi5yAc4MF83bLVOb8zDtreCkB3_u7XED9YpA,1516
|
|
18
|
-
nexaai/binds/__init__.py,sha256=
|
|
18
|
+
nexaai/binds/__init__.py,sha256=eYuay_8DDXeOUWz2_R9HFSabohxs6hvZn391t2L0Po0,104
|
|
19
19
|
nexaai/binds/common_bind.cpython-310-darwin.so,sha256=7xH5yNkK6Ot-2f9vxmultr9NIeTemlmN5tHIYp8SZHs,233960
|
|
20
20
|
nexaai/binds/embedder_bind.cpython-310-darwin.so,sha256=eQwf9m58Yrl_ZZzWlstBHfHD-s3UMFi0JNPty-Pc524,202032
|
|
21
21
|
nexaai/binds/libcrypto.dylib,sha256=ysW8ydmDPnnNRy3AHESjJwMTFfmGDKU9eLIaiR37ca0,5091432
|
|
22
|
-
nexaai/binds/libnexa_bridge.dylib,sha256=
|
|
22
|
+
nexaai/binds/libnexa_bridge.dylib,sha256=fAc-fbOkwd_5rPLHJ-c01dEG5YeFjM25iqRslIIeBIw,250592
|
|
23
23
|
nexaai/binds/libssl.dylib,sha256=JHPTSbRFnImmoWDO9rFdiKb0lJMT3q78VEsx-5-S0sk,889520
|
|
24
24
|
nexaai/binds/llm_bind.cpython-310-darwin.so,sha256=08DuSv91LIVCS5BSfSx3Nuz088h62lm3MV2yDTSIKt4,183008
|
|
25
|
+
nexaai/binds/vlm_bind.cpython-310-darwin.so,sha256=x1hxuCx09P69swsVj46FXHKUH5Ln39ISBjBscyJWESM,183000
|
|
25
26
|
nexaai/binds/nexa_llama_cpp/libggml-base.dylib,sha256=GyOkHOM-5uHp7NUZ4Sr9BWak6BYpcc9aqI9A-zPnQp4,629528
|
|
26
27
|
nexaai/binds/nexa_llama_cpp/libggml-cpu.so,sha256=cnLUQ7WdX-5iiDaH8v45u1kX1NUmK8DanpzSMGCpXPE,1039800
|
|
27
28
|
nexaai/binds/nexa_llama_cpp/libggml-metal.so,sha256=Xhhl_tLg1xmCIQVrKjqPFaLHAlx_2wUFiwDyUk0wJ-E,713680
|
|
@@ -359,14 +360,14 @@ nexaai/tts_impl/pybind_tts_impl.py,sha256=mpn44r6pfYLIl-NrEy2dXHjGtWtNCmM7HRyxiA
|
|
|
359
360
|
nexaai/utils/avatar_fetcher.py,sha256=bWy8ujgbOiTHFCjFxTwkn3uXbZ84PgEGUkXkR3MH4bI,3821
|
|
360
361
|
nexaai/utils/decode.py,sha256=61n4Zf6c5QLyqGoctEitlI9BX3tPlP2a5aaKNHbw3T4,404
|
|
361
362
|
nexaai/utils/manifest_utils.py,sha256=2waOuQErodNHhoAETQqlQgXdVes-T5A4HMb8pUIN9hg,9765
|
|
362
|
-
nexaai/utils/model_manager.py,sha256=
|
|
363
|
+
nexaai/utils/model_manager.py,sha256=ELmF2gVw7c0f_oGbbx0ldXrmCuJVxTZ9X-xsW59zWks,52300
|
|
363
364
|
nexaai/utils/model_types.py,sha256=-DER8L4lAUR_iLS99F0r57avwqWtuN21ug5pX2p24_E,1369
|
|
364
365
|
nexaai/utils/progress_tracker.py,sha256=jdUqtmPqyhwC9uSKvQcJEYETwSt-OhP4oitdJ94614o,15394
|
|
365
366
|
nexaai/utils/quantization_utils.py,sha256=4gvp6UQfSO9G1FYBwnFtQspTzH9sDbi1PBXw2t1N69M,7650
|
|
366
367
|
nexaai/vlm_impl/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
367
368
|
nexaai/vlm_impl/mlx_vlm_impl.py,sha256=od1R1mRoIgPG3NHC7JiDlcB_YJY8aklX8Em3ZkeHNpE,10734
|
|
368
|
-
nexaai/vlm_impl/pybind_vlm_impl.py,sha256=
|
|
369
|
-
nexaai-1.0.
|
|
370
|
-
nexaai-1.0.
|
|
371
|
-
nexaai-1.0.
|
|
372
|
-
nexaai-1.0.
|
|
369
|
+
nexaai/vlm_impl/pybind_vlm_impl.py,sha256=n4lBTeDA9i07Fz1sq4694XkA2XQtnJnR9RaaKK0CrcM,8458
|
|
370
|
+
nexaai-1.0.13rc1.dist-info/METADATA,sha256=2ZDKnf6R77BIkVCNLzAdpGXwoXEoY-GL6R3oHUJPcns,1201
|
|
371
|
+
nexaai-1.0.13rc1.dist-info/WHEEL,sha256=0KYp5feZ1CMUhsfFXKpSQTbSmQbXy4mv6yPPVBXg2EM,110
|
|
372
|
+
nexaai-1.0.13rc1.dist-info/top_level.txt,sha256=LRE2YERlrZk2vfuygnSzsEeqSknnZbz3Z1MHyNmBU4w,7
|
|
373
|
+
nexaai-1.0.13rc1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|