nexaai 1.0.19rc4__cp310-cp310-macosx_14_0_universal2.whl → 1.0.19rc6__cp310-cp310-macosx_14_0_universal2.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of nexaai might be problematic. Click here for more details.

Binary file
nexaai/_version.py CHANGED
@@ -1,4 +1,4 @@
1
1
  # This file is generated by CMake from _version.py.in
2
2
  # Do not modify this file manually - it will be overwritten
3
3
 
4
- __version__ = "1.0.19-rc4"
4
+ __version__ = "1.0.19-rc6"
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
@@ -22,11 +22,6 @@ from .model_types import (
22
22
  MODEL_TYPE_TO_PIPELINE
23
23
  )
24
24
 
25
- MODEL_FILE_TYPE_TO_PLUGIN_ID_MAPPING = {
26
- 'npu': 'npu',
27
- 'mlx': 'mlx',
28
- 'gguf': 'llama_cpp'
29
- }
30
25
 
31
26
  def process_manifest_metadata(manifest: Dict[str, Any], repo_id: str) -> Dict[str, Any]:
32
27
  """Process manifest metadata to handle null/missing fields."""
@@ -99,20 +94,12 @@ def save_download_metadata(directory_path: str, metadata: Dict[str, Any]) -> Non
99
94
  pass
100
95
 
101
96
 
102
- def _get_plugin_id_from_model_file_type(model_file_type: Optional[str], default: str = "llama_cpp") -> str:
103
- """Map model file type to PluginId."""
104
- return MODEL_FILE_TYPE_TO_PLUGIN_ID_MAPPING.get(model_file_type, default)
105
-
106
-
107
- def create_gguf_manifest(repo_id: str, files: List[str], directory_path: str, old_metadata: Dict[str, Any], is_mmproj: bool = False, file_name: Optional[Union[str, List[str]]] = None, **kwargs) -> Dict[str, Any]:
97
+ def create_gguf_manifest(repo_id: str, files: List[str], directory_path: str, old_metadata: Dict[str, Any], is_mmproj: bool = False, file_name: Optional[Union[str, List[str]]] = None) -> Dict[str, Any]:
108
98
  """Create GGUF format manifest."""
109
99
 
110
100
  # Load existing manifest to merge GGUF files if it exists
111
101
  existing_manifest = load_nexa_manifest(directory_path)
112
102
 
113
- # Check if there's a downloaded nexa.manifest from the repo
114
- downloaded_manifest = old_metadata.get('downloaded_manifest', {})
115
-
116
103
  model_files = {}
117
104
  if existing_manifest and "ModelFile" in existing_manifest:
118
105
  model_files = existing_manifest["ModelFile"].copy()
@@ -164,41 +151,10 @@ def create_gguf_manifest(repo_id: str, files: List[str], directory_path: str, ol
164
151
  "Size": file_size
165
152
  }
166
153
 
167
- # Determine PluginId with priority: kwargs > downloaded_manifest > model_file_type > default
168
- plugin_id = kwargs.get('plugin_id')
169
- if not plugin_id:
170
- model_file_type = old_metadata.get('model_file_type')
171
- if downloaded_manifest.get('PluginId'):
172
- plugin_id = downloaded_manifest.get('PluginId')
173
- elif model_file_type:
174
- plugin_id = _get_plugin_id_from_model_file_type(model_file_type)
175
- else:
176
- plugin_id = "llama_cpp"
177
-
178
- # Determine ModelType with priority: kwargs > downloaded_manifest > pipeline_tag mapping
179
- model_type = kwargs.get('model_type')
180
- if not model_type:
181
- if downloaded_manifest.get('ModelType'):
182
- model_type = downloaded_manifest.get('ModelType')
183
- else:
184
- model_type = PIPELINE_TO_MODEL_TYPE.get(old_metadata.get('pipeline_tag'), "other")
185
-
186
- # Determine ModelName with priority: kwargs > downloaded_manifest > empty string
187
- model_name = kwargs.get('model_name')
188
- if not model_name:
189
- model_name = downloaded_manifest.get('ModelName', '')
190
-
191
- # Get DeviceId and MinSDKVersion from kwargs or default to empty string
192
- device_id = kwargs.get('device_id', '')
193
- min_sdk_version = kwargs.get('min_sdk_version', '')
194
-
195
154
  manifest = {
196
155
  "Name": repo_id,
197
- "ModelName": model_name,
198
- "ModelType": model_type,
199
- "PluginId": plugin_id,
200
- "DeviceId": device_id,
201
- "MinSDKVersion": min_sdk_version,
156
+ "ModelType": PIPELINE_TO_MODEL_TYPE.get(old_metadata.get('pipeline_tag'), "other"),
157
+ "PluginId": "llama_cpp",
202
158
  "ModelFile": model_files,
203
159
  "MMProjFile": mmproj_file,
204
160
  "TokenizerFile": {
@@ -216,15 +172,12 @@ def create_gguf_manifest(repo_id: str, files: List[str], directory_path: str, ol
216
172
  return manifest
217
173
 
218
174
 
219
- def create_mlx_manifest(repo_id: str, files: List[str], directory_path: str, old_metadata: Dict[str, Any], is_mmproj: bool = False, file_name: Optional[Union[str, List[str]]] = None, **kwargs) -> Dict[str, Any]:
175
+ def create_mlx_manifest(repo_id: str, files: List[str], directory_path: str, old_metadata: Dict[str, Any], is_mmproj: bool = False, file_name: Optional[Union[str, List[str]]] = None) -> Dict[str, Any]:
220
176
  """Create MLX format manifest."""
221
177
 
222
178
  # Load existing manifest to merge MLX files if it exists
223
179
  existing_manifest = load_nexa_manifest(directory_path)
224
180
 
225
- # Check if there's a downloaded nexa.manifest from the repo
226
- downloaded_manifest = old_metadata.get('downloaded_manifest', {})
227
-
228
181
  model_files = {}
229
182
  extra_files = []
230
183
 
@@ -280,41 +233,10 @@ def create_mlx_manifest(repo_id: str, files: List[str], directory_path: str, old
280
233
  "Size": file_size
281
234
  })
282
235
 
283
- # Determine PluginId with priority: kwargs > downloaded_manifest > model_file_type > default
284
- plugin_id = kwargs.get('plugin_id')
285
- if not plugin_id:
286
- model_file_type = old_metadata.get('model_file_type')
287
- if downloaded_manifest.get('PluginId'):
288
- plugin_id = downloaded_manifest.get('PluginId')
289
- elif model_file_type:
290
- plugin_id = _get_plugin_id_from_model_file_type(model_file_type)
291
- else:
292
- plugin_id = "mlx"
293
-
294
- # Determine ModelType with priority: kwargs > downloaded_manifest > pipeline_tag mapping
295
- model_type = kwargs.get('model_type')
296
- if not model_type:
297
- if downloaded_manifest.get('ModelType'):
298
- model_type = downloaded_manifest.get('ModelType')
299
- else:
300
- model_type = PIPELINE_TO_MODEL_TYPE.get(old_metadata.get('pipeline_tag'), "other")
301
-
302
- # Determine ModelName with priority: kwargs > downloaded_manifest > empty string
303
- model_name = kwargs.get('model_name')
304
- if not model_name:
305
- model_name = downloaded_manifest.get('ModelName', '')
306
-
307
- # Get DeviceId and MinSDKVersion from kwargs or default to empty string
308
- device_id = kwargs.get('device_id', '')
309
- min_sdk_version = kwargs.get('min_sdk_version', '')
310
-
311
236
  manifest = {
312
237
  "Name": repo_id,
313
- "ModelName": model_name,
314
- "ModelType": model_type,
315
- "PluginId": plugin_id,
316
- "DeviceId": device_id,
317
- "MinSDKVersion": min_sdk_version,
238
+ "ModelType": PIPELINE_TO_MODEL_TYPE.get(old_metadata.get('pipeline_tag'), "other"),
239
+ "PluginId": "mlx",
318
240
  "ModelFile": model_files,
319
241
  "MMProjFile": mmproj_file,
320
242
  "TokenizerFile": {
@@ -346,7 +268,7 @@ def detect_model_type(files: List[str]) -> str:
346
268
  return "mlx"
347
269
 
348
270
 
349
- def create_manifest_from_files(repo_id: str, files: List[str], directory_path: str, old_metadata: Dict[str, Any], is_mmproj: bool = False, file_name: Optional[Union[str, List[str]]] = None, **kwargs) -> Dict[str, Any]:
271
+ def create_manifest_from_files(repo_id: str, files: List[str], directory_path: str, old_metadata: Dict[str, Any], is_mmproj: bool = False, file_name: Optional[Union[str, List[str]]] = None) -> Dict[str, Any]:
350
272
  """
351
273
  Create appropriate manifest format based on detected model type.
352
274
 
@@ -357,7 +279,6 @@ def create_manifest_from_files(repo_id: str, files: List[str], directory_path: s
357
279
  old_metadata: Existing metadata (pipeline_tag, download_time, avatar_url)
358
280
  is_mmproj: Whether the downloaded file is an mmproj file
359
281
  file_name: The specific file(s) that were downloaded (None if entire repo was downloaded)
360
- **kwargs: Additional metadata including plugin_id, model_name, model_type, device_id, min_sdk_version
361
282
 
362
283
  Returns:
363
284
  Dict containing the appropriate manifest format
@@ -365,12 +286,12 @@ def create_manifest_from_files(repo_id: str, files: List[str], directory_path: s
365
286
  model_type = detect_model_type(files)
366
287
 
367
288
  if model_type == "gguf":
368
- return create_gguf_manifest(repo_id, files, directory_path, old_metadata, is_mmproj, file_name, **kwargs)
289
+ return create_gguf_manifest(repo_id, files, directory_path, old_metadata, is_mmproj, file_name)
369
290
  else: # mlx or other
370
- return create_mlx_manifest(repo_id, files, directory_path, old_metadata, is_mmproj, file_name, **kwargs)
291
+ return create_mlx_manifest(repo_id, files, directory_path, old_metadata, is_mmproj, file_name)
371
292
 
372
293
 
373
- def save_manifest_with_files_metadata(repo_id: str, local_dir: str, old_metadata: Dict[str, Any], is_mmproj: bool = False, file_name: Optional[Union[str, List[str]]] = None, **kwargs) -> None:
294
+ def save_manifest_with_files_metadata(repo_id: str, local_dir: str, old_metadata: Dict[str, Any], is_mmproj: bool = False, file_name: Optional[Union[str, List[str]]] = None) -> None:
374
295
  """
375
296
  Create and save manifest based on files found in the directory.
376
297
 
@@ -380,7 +301,6 @@ def save_manifest_with_files_metadata(repo_id: str, local_dir: str, old_metadata
380
301
  old_metadata: Existing metadata to preserve
381
302
  is_mmproj: Whether the downloaded file is an mmproj file
382
303
  file_name: The specific file(s) that were downloaded (None if entire repo was downloaded)
383
- **kwargs: Additional metadata including plugin_id, model_name, model_type, device_id, min_sdk_version
384
304
  """
385
305
  # Get list of files in the directory
386
306
  files = []
@@ -394,7 +314,7 @@ def save_manifest_with_files_metadata(repo_id: str, local_dir: str, old_metadata
394
314
  pass
395
315
 
396
316
  # Create appropriate manifest
397
- manifest = create_manifest_from_files(repo_id, files, local_dir, old_metadata, is_mmproj, file_name, **kwargs)
317
+ manifest = create_manifest_from_files(repo_id, files, local_dir, old_metadata, is_mmproj, file_name)
398
318
 
399
319
  # Save manifest
400
320
  save_download_metadata(local_dir, manifest)
@@ -595,7 +595,6 @@ class HuggingFaceDownloader:
595
595
  self.enable_transfer = enable_transfer
596
596
  self.original_hf_transfer = None
597
597
  self.endpoint = endpoint # Store endpoint for avatar fetching
598
- self._model_info_cache: Dict[str, Any] = {} # Cache for model_info results
599
598
 
600
599
  def _create_repo_directory(self, local_dir: str, repo_id: str) -> str:
601
600
  """Create a directory structure for the repository following HF convention."""
@@ -619,32 +618,6 @@ class HuggingFaceDownloader:
619
618
  os.makedirs(local_dir, exist_ok=True)
620
619
  return local_dir
621
620
 
622
- def _get_model_info_cached(self, repo_id: str, files_metadata: bool = False):
623
- """Get model info with caching to avoid rate limiting.
624
-
625
- Args:
626
- repo_id: Repository ID
627
- files_metadata: Whether to include files metadata
628
-
629
- Returns:
630
- Model info object from HuggingFace API
631
- """
632
- # Create cache key based on repo_id and files_metadata flag
633
- cache_key = f"{repo_id}:files={files_metadata}"
634
-
635
- # Return cached result if available
636
- if cache_key in self._model_info_cache:
637
- return self._model_info_cache[cache_key]
638
-
639
- # Fetch from API and cache the result
640
- try:
641
- info = self.api.model_info(repo_id, files_metadata=files_metadata, token=self.token)
642
- self._model_info_cache[cache_key] = info
643
- return info
644
- except Exception:
645
- # Don't cache errors, re-raise
646
- raise
647
-
648
621
  def _get_repo_info_for_progress(
649
622
  self,
650
623
  repo_id: str,
@@ -652,7 +625,7 @@ class HuggingFaceDownloader:
652
625
  ) -> tuple[int, int]:
653
626
  """Get total repository size and file count for progress tracking."""
654
627
  try:
655
- info = self._get_model_info_cached(repo_id, files_metadata=True)
628
+ info = self.api.model_info(repo_id, files_metadata=True, token=self.token)
656
629
 
657
630
  total_size = 0
658
631
  file_count = 0
@@ -747,7 +720,7 @@ class HuggingFaceDownloader:
747
720
  ):
748
721
  """Validate repository exists and get info."""
749
722
  try:
750
- info = self._get_model_info_cached(repo_id, files_metadata=False)
723
+ info = self.api.model_info(repo_id, token=self.token)
751
724
  return info
752
725
  except RepositoryNotFoundError:
753
726
  error_msg = f"Repository '{repo_id}' not found. Please check the repository ID."
@@ -816,36 +789,6 @@ class HuggingFaceDownloader:
816
789
  # If no expected size, just check that file is not empty
817
790
  return os.path.getsize(file_path) > 0
818
791
 
819
- def _extract_model_file_type_from_tags(self, repo_id: str) -> Optional[str]:
820
- """Extract model file type from repo tags with priority: NPU > MLX > GGUF."""
821
- try:
822
- info = self._get_model_info_cached(repo_id, files_metadata=False)
823
- if hasattr(info, 'tags') and info.tags:
824
- # Convert tags to lowercase for case-insensitive matching
825
- tags_lower = [tag.lower() for tag in info.tags]
826
-
827
- # Check with priority: NPU > MLX > GGUF
828
- if 'npu' in tags_lower:
829
- return 'npu'
830
- elif 'mlx' in tags_lower:
831
- return 'mlx'
832
- elif 'gguf' in tags_lower:
833
- return 'gguf'
834
- except Exception:
835
- pass
836
- return None
837
-
838
- def _load_downloaded_manifest(self, local_dir: str) -> Dict[str, Any]:
839
- """Load nexa.manifest from the downloaded repository if it exists."""
840
- manifest_path = os.path.join(local_dir, 'nexa.manifest')
841
- if os.path.exists(manifest_path):
842
- try:
843
- with open(manifest_path, 'r', encoding='utf-8') as f:
844
- return json.load(f)
845
- except (json.JSONDecodeError, IOError):
846
- pass
847
- return {}
848
-
849
792
  def _fetch_and_save_metadata(self, repo_id: str, local_dir: str, is_mmproj: bool = False, file_name: Optional[Union[str, List[str]]] = None, **kwargs) -> None:
850
793
  """Fetch model info and save metadata after successful download."""
851
794
  # Initialize metadata with defaults to ensure manifest is always created
@@ -857,8 +800,8 @@ class HuggingFaceDownloader:
857
800
 
858
801
  # Try to fetch additional metadata, but don't let failures prevent manifest creation
859
802
  try:
860
- # Fetch model info to get pipeline_tag (using cache)
861
- info = self._get_model_info_cached(repo_id, files_metadata=False)
803
+ # Fetch model info to get pipeline_tag
804
+ info = self.api.model_info(repo_id, token=self.token)
862
805
  if hasattr(info, 'pipeline_tag') and info.pipeline_tag:
863
806
  old_metadata['pipeline_tag'] = info.pipeline_tag
864
807
  except Exception as e:
@@ -867,21 +810,11 @@ class HuggingFaceDownloader:
867
810
 
868
811
  # Use input avater url if provided
869
812
  old_metadata['avatar_url'] = kwargs.get('avatar_url')
870
-
871
- # Extract model file type from tags
872
- model_file_type = self._extract_model_file_type_from_tags(repo_id)
873
- if model_file_type:
874
- old_metadata['model_file_type'] = model_file_type
875
-
876
- # Load existing nexa.manifest from downloaded repo (if exists)
877
- downloaded_manifest = self._load_downloaded_manifest(local_dir)
878
- if downloaded_manifest:
879
- old_metadata['downloaded_manifest'] = downloaded_manifest
880
813
 
881
814
 
882
815
  # CRITICAL: Always create the manifest file, regardless of metadata fetch failures
883
816
  try:
884
- save_manifest_with_files_metadata(repo_id, local_dir, old_metadata, is_mmproj, file_name, **kwargs)
817
+ save_manifest_with_files_metadata(repo_id, local_dir, old_metadata, is_mmproj, file_name)
885
818
  print(f"[OK] Successfully created nexa.manifest for {repo_id}")
886
819
  except Exception as e:
887
820
  # This is critical - if manifest creation fails, we should know about it
@@ -890,11 +823,8 @@ class HuggingFaceDownloader:
890
823
  try:
891
824
  minimal_manifest = {
892
825
  "Name": repo_id,
893
- "ModelName": kwargs.get('model_name', ''),
894
- "ModelType": kwargs.get('model_type', 'other'),
895
- "PluginId": kwargs.get('plugin_id', 'unknown'),
896
- "DeviceId": kwargs.get('device_id', ''),
897
- "MinSDKVersion": kwargs.get('min_sdk_version', ''),
826
+ "ModelType": "other",
827
+ "PluginId": "unknown",
898
828
  "ModelFile": {},
899
829
  "MMProjFile": {"Name": "", "Downloaded": False, "Size": 0},
900
830
  "TokenizerFile": {"Name": "", "Downloaded": False, "Size": 0},
@@ -1206,12 +1136,6 @@ def download_from_huggingface(
1206
1136
  is_mmproj (bool, optional): Whether the file being downloaded is an mmproj file. Only used when
1207
1137
  file_name is not None. If None, defaults to True if 'mmproj' is in
1208
1138
  the filename, False otherwise.
1209
- **kwargs: Additional parameters including:
1210
- - plugin_id (str): Override PluginId in nexa.manifest (highest priority)
1211
- - model_name (str): Override ModelName in nexa.manifest (highest priority)
1212
- - model_type (str): Override ModelType in nexa.manifest (highest priority)
1213
- - device_id (str): Set DeviceId in nexa.manifest (highest priority)
1214
- - min_sdk_version (str): Set MinSDKVersion in nexa.manifest (highest priority)
1215
1139
 
1216
1140
  Returns:
1217
1141
  str: Path to the downloaded file or directory
@@ -13,8 +13,6 @@ class ModelTypeMapping(Enum):
13
13
  """Enum for mapping HuggingFace pipeline_tag to our ModelType."""
14
14
  TEXT_GENERATION = ("text-generation", "llm")
15
15
  IMAGE_TEXT_TO_TEXT = ("image-text-to-text", "vlm")
16
- ANY_TO_ANY = ("any-to-any", "a2a")
17
- AUTOMATIC_SPEECH_RECOGNITION = ("automatic-speech-recognition", "asr")
18
16
 
19
17
  def __init__(self, pipeline_tag: str, model_type: str):
20
18
  self.pipeline_tag = pipeline_tag
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nexaai
3
- Version: 1.0.19rc4
3
+ Version: 1.0.19rc6
4
4
  Summary: Python bindings for NexaSDK C-lib backend
5
5
  Author-email: "Nexa AI, Inc." <dev@nexa.ai>
6
6
  Project-URL: Homepage, https://github.com/NexaAI/nexasdk-bridge
@@ -1,6 +1,6 @@
1
1
  nexaai/__init__.py,sha256=L8oB7GFZZMGnUpCg0PecDbI_ycKuQak-ZEJ4Y12_QIw,2184
2
- nexaai/_stub.cpython-310-darwin.so,sha256=sbcbxdZLxGsZlBpq8K-jwZ7oA-dPXiGma9GJ9DKu6nE,66768
3
- nexaai/_version.py,sha256=bdMExnVp4tRCOyAvrkR7ukGCvEqFczr_8CNR5v7QpyM,143
2
+ nexaai/_stub.cpython-310-darwin.so,sha256=QLOXPLVTc4PYcr7qk-P-XQoXtrI9t49n-e7JRvi2zFs,66768
3
+ nexaai/_version.py,sha256=bLummKnPZ9cq490e2dV77gOzlVFq4FgDMlTOequMSCU,143
4
4
  nexaai/asr.py,sha256=NljMXDErwPNMOPaRkJZMEDka9Nk8xyur7L8i924TStY,2054
5
5
  nexaai/base.py,sha256=N8PRgDFA-XPku2vWnQIofQ7ipz3pPlO6f8YZGnuhquE,982
6
6
  nexaai/common.py,sha256=Y0NJNLTi4Nq4x1WL6PQsSvGUto0eGmWhjpsC6jcekfA,3444
@@ -19,16 +19,16 @@ nexaai/asr_impl/pybind_asr_impl.py,sha256=pE9Hb_hMi5yAc4MF83bLVOb8zDtreCkB3_u7XE
19
19
  nexaai/binds/__init__.py,sha256=eYuay_8DDXeOUWz2_R9HFSabohxs6hvZn391t2L0Po0,104
20
20
  nexaai/binds/common_bind.cpython-310-darwin.so,sha256=zxJuD0nSV--VZKxBfWZUavU7_bHj_JTi0FhkjvG4VJw,235264
21
21
  nexaai/binds/embedder_bind.cpython-310-darwin.so,sha256=tPa0c0Dv_GiW66fgmAGWGCHXRGNApznqoQS0eQx9GFM,202064
22
- nexaai/binds/libnexa_bridge.dylib,sha256=z6Yys2ObV_NQ5kqPGZ_Qyg2hYlTrQLEwC5uMvwRF3wg,251192
22
+ nexaai/binds/libnexa_bridge.dylib,sha256=uLl4C-hd0fpjFuebXwYbMxbTvvnTFg-xFVCk8emle-s,251192
23
23
  nexaai/binds/llm_bind.cpython-310-darwin.so,sha256=TAWfa1Hzq00TjtC1xVsiAeLp6hv2LrL5afDz4omUghc,182784
24
24
  nexaai/binds/vlm_bind.cpython-310-darwin.so,sha256=nd6eG_m2EiPthzkSZ97hlXWUOZQir4cQfFJZ4p6eR2U,182704
25
- nexaai/binds/nexa_llama_cpp/libggml-base.dylib,sha256=O4FCoEzwl90vT9FCmZKr-KyakflGYqo0ztmAgCdUzcM,650944
26
- nexaai/binds/nexa_llama_cpp/libggml-cpu.so,sha256=h9HVt-TibwTN-QzzKNj69KFz9uGfSVg62pHQMwF0bWY,694288
27
- nexaai/binds/nexa_llama_cpp/libggml-metal.so,sha256=QZ-ArPROGNribtgQ-LiQOFoEzUgQ3IVF2X_LXqNcu3o,675088
28
- nexaai/binds/nexa_llama_cpp/libggml.dylib,sha256=AUoIx9MkSAjbeVmkUIUCyJlg7zGKlpc9K8Y0BAfORqE,58640
29
- nexaai/binds/nexa_llama_cpp/libllama.dylib,sha256=oB-x254o2vSQefwUwjF9iGkgCT9EszhGiAO_w50gVpw,1823792
30
- nexaai/binds/nexa_llama_cpp/libmtmd.dylib,sha256=JBTkkr1lDX8SN0-MSL4FIYtPKZcR96sUnayEnXYSk4U,606192
31
- nexaai/binds/nexa_llama_cpp/libnexa_plugin.dylib,sha256=2CWwI1EzrvK2ZYiGMJD5GCtvYqLw2b4UWKghxee4QcY,1903240
25
+ nexaai/binds/nexa_llama_cpp/libggml-base.dylib,sha256=JM4oOkie1su0ES5hMdtILeQHlRukRzH1vTleTupUXhg,650736
26
+ nexaai/binds/nexa_llama_cpp/libggml-cpu.so,sha256=qiYxbTe4Nt7n36zJVvq3zovgSZEmrN2is6gzTern7UI,677728
27
+ nexaai/binds/nexa_llama_cpp/libggml-metal.so,sha256=zfaX7rIBYQazH2lf-vza007BMhPTK1ASd2T0HLLIA4E,673104
28
+ nexaai/binds/nexa_llama_cpp/libggml.dylib,sha256=aOTj_6RrAMkfDO0ZI28_3nfcC-l4Y3dRCiS3C0d0_eI,58592
29
+ nexaai/binds/nexa_llama_cpp/libllama.dylib,sha256=RkBd5usb8RvEIOamvxCW3UvMauI5bC66G_n6hw83NpY,1786128
30
+ nexaai/binds/nexa_llama_cpp/libmtmd.dylib,sha256=o6mQqefzQNF0CS4j6odwJKj0gkXm15hIxwlNt88FOn4,605248
31
+ nexaai/binds/nexa_llama_cpp/libnexa_plugin.dylib,sha256=f-ctmVHSL5x1LKLr0StYM5WscvUW_5olJ7al3ciVmmQ,1863000
32
32
  nexaai/binds/nexa_mlx/libnexa_plugin.dylib,sha256=KKM4j2JmHQrCQ1LyqEDRxwADRXKi_HM0IY5I5KFl2e0,659288
33
33
  nexaai/binds/nexa_mlx/py-lib/ml.py,sha256=DKXVOAfh8cg7KTKljh7jpcPwfQFNigc6uv_ZXF6lse8,23977
34
34
  nexaai/binds/nexa_mlx/py-lib/profiling.py,sha256=Dc-mybFwBdCIKFWL7CbSHjkOJGAoYHG7r_e_XPhzwBU,9361
@@ -184,10 +184,10 @@ nexaai/binds/nexa_mlx/py-lib/mlx_audio/tts/tests/test_interpolate.py,sha256=9dNm
184
184
  nexaai/binds/nexa_mlx/py-lib/mlx_audio/tts/tests/test_models.py,sha256=12RiOfPtSZQj5g5JM-yCJk3uGQfM3OdmRiPt5uUDE4E,35096
185
185
  nexaai/binds/nexa_nexaml/libfftw3.3.dylib,sha256=Ul6NlZv0UhmXnnqAyFxCRCw-8pOsV5e4rc_9-wxrRJg,693424
186
186
  nexaai/binds/nexa_nexaml/libfftw3f.3.dylib,sha256=SKZE35Ly9R_nbMt7oWpObydvpK3HIo9-UhUA2KkeQyk,693920
187
- nexaai/binds/nexa_nexaml/libggml-base.dylib,sha256=O4FCoEzwl90vT9FCmZKr-KyakflGYqo0ztmAgCdUzcM,650944
188
- nexaai/binds/nexa_nexaml/libggml-cpu.so,sha256=h9HVt-TibwTN-QzzKNj69KFz9uGfSVg62pHQMwF0bWY,694288
189
- nexaai/binds/nexa_nexaml/libggml-metal.so,sha256=QZ-ArPROGNribtgQ-LiQOFoEzUgQ3IVF2X_LXqNcu3o,675088
190
- nexaai/binds/nexa_nexaml/libggml.dylib,sha256=AUoIx9MkSAjbeVmkUIUCyJlg7zGKlpc9K8Y0BAfORqE,58640
187
+ nexaai/binds/nexa_nexaml/libggml-base.dylib,sha256=JM4oOkie1su0ES5hMdtILeQHlRukRzH1vTleTupUXhg,650736
188
+ nexaai/binds/nexa_nexaml/libggml-cpu.so,sha256=qiYxbTe4Nt7n36zJVvq3zovgSZEmrN2is6gzTern7UI,677728
189
+ nexaai/binds/nexa_nexaml/libggml-metal.so,sha256=zfaX7rIBYQazH2lf-vza007BMhPTK1ASd2T0HLLIA4E,673104
190
+ nexaai/binds/nexa_nexaml/libggml.dylib,sha256=aOTj_6RrAMkfDO0ZI28_3nfcC-l4Y3dRCiS3C0d0_eI,58592
191
191
  nexaai/binds/nexa_nexaml/libmp3lame.0.dylib,sha256=G-21u3MAZ2hiv1fbPEpOUGdToZfLtz2XI6BwW9xIqIA,305136
192
192
  nexaai/binds/nexa_nexaml/libmpg123.0.dylib,sha256=L4AgLcdcjGOQkEovUGDCNlH959500i0GkKBhqiHxBY4,306464
193
193
  nexaai/binds/nexa_nexaml/libnexa-mm-process.dylib,sha256=a_63nw3Fmdlw1KoxsfwS76ZXZgqQhw4sQ8rE42hpBwo,10205704
@@ -547,15 +547,15 @@ nexaai/tts_impl/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
547
547
  nexaai/tts_impl/mlx_tts_impl.py,sha256=i_uNPdvlXYtL3e01oKjDlP9jgkWCRt1bBHsExaaiJi8,3101
548
548
  nexaai/tts_impl/pybind_tts_impl.py,sha256=mpn44r6pfYLIl-NrEy2dXHjGtWtNCmM7HRyxiANxUI4,1444
549
549
  nexaai/utils/decode.py,sha256=61n4Zf6c5QLyqGoctEitlI9BX3tPlP2a5aaKNHbw3T4,404
550
- nexaai/utils/manifest_utils.py,sha256=Teji_i_ZG0IkiKWNKKwCq1j_a2k406db7bnd_MC4nu0,16195
551
- nexaai/utils/model_manager.py,sha256=NnbPv1iuwo6T523gLsWjnff-gGvPGUjez-rFg8-ffpE,59568
552
- nexaai/utils/model_types.py,sha256=9Q95icWn22jIUoY2eIEdIiXBBGJSGZlXXSbox0xXWNQ,1483
550
+ nexaai/utils/manifest_utils.py,sha256=PA84obFP7W1dlneURlIHIzJjWIF5dbDHGdNeHouUy68,12659
551
+ nexaai/utils/model_manager.py,sha256=_WKJP7YVk7q587OoOWwDNWVR-8tbKZkmHKjcCZN8Q4M,55979
552
+ nexaai/utils/model_types.py,sha256=-DER8L4lAUR_iLS99F0r57avwqWtuN21ug5pX2p24_E,1369
553
553
  nexaai/utils/progress_tracker.py,sha256=jdUqtmPqyhwC9uSKvQcJEYETwSt-OhP4oitdJ94614o,15394
554
554
  nexaai/utils/quantization_utils.py,sha256=FYcNSAKGlBqFDUTx3jSKOr2lnq4nyiyC0ZG8oSxFwiU,7825
555
555
  nexaai/vlm_impl/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
556
556
  nexaai/vlm_impl/mlx_vlm_impl.py,sha256=pLtWm_ckz8a0U-AtAOMVseFDO4OVPvHyYO2KlfBaGYk,10833
557
557
  nexaai/vlm_impl/pybind_vlm_impl.py,sha256=FAbhpRJzHgI78r0mUvKybO97R1szvNhH0aTn_I52oT4,8597
558
- nexaai-1.0.19rc4.dist-info/METADATA,sha256=spQ_-5okX3qza8-7RbMnQHaO--NZI7ywA75F82fz3kY,1201
559
- nexaai-1.0.19rc4.dist-info/WHEEL,sha256=T2p57lol9__xkoU6aJTyN1Pm43ZpRU3q6km7mIbrAMs,114
560
- nexaai-1.0.19rc4.dist-info/top_level.txt,sha256=LRE2YERlrZk2vfuygnSzsEeqSknnZbz3Z1MHyNmBU4w,7
561
- nexaai-1.0.19rc4.dist-info/RECORD,,
558
+ nexaai-1.0.19rc6.dist-info/METADATA,sha256=4fTTBbrhX1jkcaC-ZGt7DbDp0zA96DHJ1ZpNHmMlAoM,1201
559
+ nexaai-1.0.19rc6.dist-info/WHEEL,sha256=T2p57lol9__xkoU6aJTyN1Pm43ZpRU3q6km7mIbrAMs,114
560
+ nexaai-1.0.19rc6.dist-info/top_level.txt,sha256=LRE2YERlrZk2vfuygnSzsEeqSknnZbz3Z1MHyNmBU4w,7
561
+ nexaai-1.0.19rc6.dist-info/RECORD,,