nexaai 1.0.19rc2__cp310-cp310-macosx_14_0_universal2.whl → 1.0.19rc3__cp310-cp310-macosx_14_0_universal2.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of nexaai might be problematic. Click here for more details.

Binary file
nexaai/_version.py CHANGED
@@ -1,4 +1,4 @@
1
1
  # This file is generated by CMake from _version.py.in
2
2
  # Do not modify this file manually - it will be overwritten
3
3
 
4
- __version__ = "1.0.19-rc2"
4
+ __version__ = "1.0.19-rc3"
Binary file
@@ -22,6 +22,11 @@ from .model_types import (
22
22
  MODEL_TYPE_TO_PIPELINE
23
23
  )
24
24
 
25
+ MODEL_FILE_TYPE_TO_PLUGIN_ID_MAPPING = {
26
+ 'npu': 'npu',
27
+ 'mlx': 'mlx',
28
+ 'gguf': 'llama_cpp'
29
+ }
25
30
 
26
31
  def process_manifest_metadata(manifest: Dict[str, Any], repo_id: str) -> Dict[str, Any]:
27
32
  """Process manifest metadata to handle null/missing fields."""
@@ -94,12 +99,20 @@ def save_download_metadata(directory_path: str, metadata: Dict[str, Any]) -> Non
94
99
  pass
95
100
 
96
101
 
97
- def create_gguf_manifest(repo_id: str, files: List[str], directory_path: str, old_metadata: Dict[str, Any], is_mmproj: bool = False, file_name: Optional[Union[str, List[str]]] = None) -> Dict[str, Any]:
102
+ def _get_plugin_id_from_model_file_type(model_file_type: Optional[str], default: str = "llama_cpp") -> str:
103
+ """Map model file type to PluginId."""
104
+ return MODEL_FILE_TYPE_TO_PLUGIN_ID_MAPPING.get(model_file_type, default)
105
+
106
+
107
+ def create_gguf_manifest(repo_id: str, files: List[str], directory_path: str, old_metadata: Dict[str, Any], is_mmproj: bool = False, file_name: Optional[Union[str, List[str]]] = None, **kwargs) -> Dict[str, Any]:
98
108
  """Create GGUF format manifest."""
99
109
 
100
110
  # Load existing manifest to merge GGUF files if it exists
101
111
  existing_manifest = load_nexa_manifest(directory_path)
102
112
 
113
+ # Check if there's a downloaded nexa.manifest from the repo
114
+ downloaded_manifest = old_metadata.get('downloaded_manifest', {})
115
+
103
116
  model_files = {}
104
117
  if existing_manifest and "ModelFile" in existing_manifest:
105
118
  model_files = existing_manifest["ModelFile"].copy()
@@ -151,10 +164,41 @@ def create_gguf_manifest(repo_id: str, files: List[str], directory_path: str, ol
151
164
  "Size": file_size
152
165
  }
153
166
 
167
+ # Determine PluginId with priority: kwargs > downloaded_manifest > model_file_type > default
168
+ plugin_id = kwargs.get('plugin_id')
169
+ if not plugin_id:
170
+ model_file_type = old_metadata.get('model_file_type')
171
+ if downloaded_manifest.get('PluginId'):
172
+ plugin_id = downloaded_manifest.get('PluginId')
173
+ elif model_file_type:
174
+ plugin_id = _get_plugin_id_from_model_file_type(model_file_type)
175
+ else:
176
+ plugin_id = "llama_cpp"
177
+
178
+ # Determine ModelType with priority: kwargs > downloaded_manifest > pipeline_tag mapping
179
+ model_type = kwargs.get('model_type')
180
+ if not model_type:
181
+ if downloaded_manifest.get('ModelType'):
182
+ model_type = downloaded_manifest.get('ModelType')
183
+ else:
184
+ model_type = PIPELINE_TO_MODEL_TYPE.get(old_metadata.get('pipeline_tag'), "other")
185
+
186
+ # Determine ModelName with priority: kwargs > downloaded_manifest > empty string
187
+ model_name = kwargs.get('model_name')
188
+ if not model_name:
189
+ model_name = downloaded_manifest.get('ModelName', '')
190
+
191
+ # Get DeviceId and MinSDKVersion from kwargs or default to empty string
192
+ device_id = kwargs.get('device_id', '')
193
+ min_sdk_version = kwargs.get('min_sdk_version', '')
194
+
154
195
  manifest = {
155
196
  "Name": repo_id,
156
- "ModelType": PIPELINE_TO_MODEL_TYPE.get(old_metadata.get('pipeline_tag'), "other"),
157
- "PluginId": "llama_cpp",
197
+ "ModelName": model_name,
198
+ "ModelType": model_type,
199
+ "PluginId": plugin_id,
200
+ "DeviceId": device_id,
201
+ "MinSDKVersion": min_sdk_version,
158
202
  "ModelFile": model_files,
159
203
  "MMProjFile": mmproj_file,
160
204
  "TokenizerFile": {
@@ -172,12 +216,15 @@ def create_gguf_manifest(repo_id: str, files: List[str], directory_path: str, ol
172
216
  return manifest
173
217
 
174
218
 
175
- def create_mlx_manifest(repo_id: str, files: List[str], directory_path: str, old_metadata: Dict[str, Any], is_mmproj: bool = False, file_name: Optional[Union[str, List[str]]] = None) -> Dict[str, Any]:
219
+ def create_mlx_manifest(repo_id: str, files: List[str], directory_path: str, old_metadata: Dict[str, Any], is_mmproj: bool = False, file_name: Optional[Union[str, List[str]]] = None, **kwargs) -> Dict[str, Any]:
176
220
  """Create MLX format manifest."""
177
221
 
178
222
  # Load existing manifest to merge MLX files if it exists
179
223
  existing_manifest = load_nexa_manifest(directory_path)
180
224
 
225
+ # Check if there's a downloaded nexa.manifest from the repo
226
+ downloaded_manifest = old_metadata.get('downloaded_manifest', {})
227
+
181
228
  model_files = {}
182
229
  extra_files = []
183
230
 
@@ -233,10 +280,41 @@ def create_mlx_manifest(repo_id: str, files: List[str], directory_path: str, old
233
280
  "Size": file_size
234
281
  })
235
282
 
283
+ # Determine PluginId with priority: kwargs > downloaded_manifest > model_file_type > default
284
+ plugin_id = kwargs.get('plugin_id')
285
+ if not plugin_id:
286
+ model_file_type = old_metadata.get('model_file_type')
287
+ if downloaded_manifest.get('PluginId'):
288
+ plugin_id = downloaded_manifest.get('PluginId')
289
+ elif model_file_type:
290
+ plugin_id = _get_plugin_id_from_model_file_type(model_file_type)
291
+ else:
292
+ plugin_id = "mlx"
293
+
294
+ # Determine ModelType with priority: kwargs > downloaded_manifest > pipeline_tag mapping
295
+ model_type = kwargs.get('model_type')
296
+ if not model_type:
297
+ if downloaded_manifest.get('ModelType'):
298
+ model_type = downloaded_manifest.get('ModelType')
299
+ else:
300
+ model_type = PIPELINE_TO_MODEL_TYPE.get(old_metadata.get('pipeline_tag'), "other")
301
+
302
+ # Determine ModelName with priority: kwargs > downloaded_manifest > empty string
303
+ model_name = kwargs.get('model_name')
304
+ if not model_name:
305
+ model_name = downloaded_manifest.get('ModelName', '')
306
+
307
+ # Get DeviceId and MinSDKVersion from kwargs or default to empty string
308
+ device_id = kwargs.get('device_id', '')
309
+ min_sdk_version = kwargs.get('min_sdk_version', '')
310
+
236
311
  manifest = {
237
312
  "Name": repo_id,
238
- "ModelType": PIPELINE_TO_MODEL_TYPE.get(old_metadata.get('pipeline_tag'), "other"),
239
- "PluginId": "mlx",
313
+ "ModelName": model_name,
314
+ "ModelType": model_type,
315
+ "PluginId": plugin_id,
316
+ "DeviceId": device_id,
317
+ "MinSDKVersion": min_sdk_version,
240
318
  "ModelFile": model_files,
241
319
  "MMProjFile": mmproj_file,
242
320
  "TokenizerFile": {
@@ -268,7 +346,7 @@ def detect_model_type(files: List[str]) -> str:
268
346
  return "mlx"
269
347
 
270
348
 
271
- def create_manifest_from_files(repo_id: str, files: List[str], directory_path: str, old_metadata: Dict[str, Any], is_mmproj: bool = False, file_name: Optional[Union[str, List[str]]] = None) -> Dict[str, Any]:
349
+ def create_manifest_from_files(repo_id: str, files: List[str], directory_path: str, old_metadata: Dict[str, Any], is_mmproj: bool = False, file_name: Optional[Union[str, List[str]]] = None, **kwargs) -> Dict[str, Any]:
272
350
  """
273
351
  Create appropriate manifest format based on detected model type.
274
352
 
@@ -279,6 +357,7 @@ def create_manifest_from_files(repo_id: str, files: List[str], directory_path: s
279
357
  old_metadata: Existing metadata (pipeline_tag, download_time, avatar_url)
280
358
  is_mmproj: Whether the downloaded file is an mmproj file
281
359
  file_name: The specific file(s) that were downloaded (None if entire repo was downloaded)
360
+ **kwargs: Additional metadata including plugin_id, model_name, model_type, device_id, min_sdk_version
282
361
 
283
362
  Returns:
284
363
  Dict containing the appropriate manifest format
@@ -286,12 +365,12 @@ def create_manifest_from_files(repo_id: str, files: List[str], directory_path: s
286
365
  model_type = detect_model_type(files)
287
366
 
288
367
  if model_type == "gguf":
289
- return create_gguf_manifest(repo_id, files, directory_path, old_metadata, is_mmproj, file_name)
368
+ return create_gguf_manifest(repo_id, files, directory_path, old_metadata, is_mmproj, file_name, **kwargs)
290
369
  else: # mlx or other
291
- return create_mlx_manifest(repo_id, files, directory_path, old_metadata, is_mmproj, file_name)
370
+ return create_mlx_manifest(repo_id, files, directory_path, old_metadata, is_mmproj, file_name, **kwargs)
292
371
 
293
372
 
294
- def save_manifest_with_files_metadata(repo_id: str, local_dir: str, old_metadata: Dict[str, Any], is_mmproj: bool = False, file_name: Optional[Union[str, List[str]]] = None) -> None:
373
+ def save_manifest_with_files_metadata(repo_id: str, local_dir: str, old_metadata: Dict[str, Any], is_mmproj: bool = False, file_name: Optional[Union[str, List[str]]] = None, **kwargs) -> None:
295
374
  """
296
375
  Create and save manifest based on files found in the directory.
297
376
 
@@ -301,6 +380,7 @@ def save_manifest_with_files_metadata(repo_id: str, local_dir: str, old_metadata
301
380
  old_metadata: Existing metadata to preserve
302
381
  is_mmproj: Whether the downloaded file is an mmproj file
303
382
  file_name: The specific file(s) that were downloaded (None if entire repo was downloaded)
383
+ **kwargs: Additional metadata including plugin_id, model_name, model_type, device_id, min_sdk_version
304
384
  """
305
385
  # Get list of files in the directory
306
386
  files = []
@@ -314,7 +394,7 @@ def save_manifest_with_files_metadata(repo_id: str, local_dir: str, old_metadata
314
394
  pass
315
395
 
316
396
  # Create appropriate manifest
317
- manifest = create_manifest_from_files(repo_id, files, local_dir, old_metadata, is_mmproj, file_name)
397
+ manifest = create_manifest_from_files(repo_id, files, local_dir, old_metadata, is_mmproj, file_name, **kwargs)
318
398
 
319
399
  # Save manifest
320
400
  save_download_metadata(local_dir, manifest)
@@ -595,6 +595,7 @@ class HuggingFaceDownloader:
595
595
  self.enable_transfer = enable_transfer
596
596
  self.original_hf_transfer = None
597
597
  self.endpoint = endpoint # Store endpoint for avatar fetching
598
+ self._model_info_cache: Dict[str, Any] = {} # Cache for model_info results
598
599
 
599
600
  def _create_repo_directory(self, local_dir: str, repo_id: str) -> str:
600
601
  """Create a directory structure for the repository following HF convention."""
@@ -618,6 +619,32 @@ class HuggingFaceDownloader:
618
619
  os.makedirs(local_dir, exist_ok=True)
619
620
  return local_dir
620
621
 
622
+ def _get_model_info_cached(self, repo_id: str, files_metadata: bool = False):
623
+ """Get model info with caching to avoid rate limiting.
624
+
625
+ Args:
626
+ repo_id: Repository ID
627
+ files_metadata: Whether to include files metadata
628
+
629
+ Returns:
630
+ Model info object from HuggingFace API
631
+ """
632
+ # Create cache key based on repo_id and files_metadata flag
633
+ cache_key = f"{repo_id}:files={files_metadata}"
634
+
635
+ # Return cached result if available
636
+ if cache_key in self._model_info_cache:
637
+ return self._model_info_cache[cache_key]
638
+
639
+ # Fetch from API and cache the result
640
+ try:
641
+ info = self.api.model_info(repo_id, files_metadata=files_metadata, token=self.token)
642
+ self._model_info_cache[cache_key] = info
643
+ return info
644
+ except Exception:
645
+ # Don't cache errors, re-raise
646
+ raise
647
+
621
648
  def _get_repo_info_for_progress(
622
649
  self,
623
650
  repo_id: str,
@@ -625,7 +652,7 @@ class HuggingFaceDownloader:
625
652
  ) -> tuple[int, int]:
626
653
  """Get total repository size and file count for progress tracking."""
627
654
  try:
628
- info = self.api.model_info(repo_id, files_metadata=True, token=self.token)
655
+ info = self._get_model_info_cached(repo_id, files_metadata=True)
629
656
 
630
657
  total_size = 0
631
658
  file_count = 0
@@ -720,7 +747,7 @@ class HuggingFaceDownloader:
720
747
  ):
721
748
  """Validate repository exists and get info."""
722
749
  try:
723
- info = self.api.model_info(repo_id, token=self.token)
750
+ info = self._get_model_info_cached(repo_id, files_metadata=False)
724
751
  return info
725
752
  except RepositoryNotFoundError:
726
753
  error_msg = f"Repository '{repo_id}' not found. Please check the repository ID."
@@ -789,6 +816,36 @@ class HuggingFaceDownloader:
789
816
  # If no expected size, just check that file is not empty
790
817
  return os.path.getsize(file_path) > 0
791
818
 
819
+ def _extract_model_file_type_from_tags(self, repo_id: str) -> Optional[str]:
820
+ """Extract model file type from repo tags with priority: NPU > MLX > GGUF."""
821
+ try:
822
+ info = self._get_model_info_cached(repo_id, files_metadata=False)
823
+ if hasattr(info, 'tags') and info.tags:
824
+ # Convert tags to lowercase for case-insensitive matching
825
+ tags_lower = [tag.lower() for tag in info.tags]
826
+
827
+ # Check with priority: NPU > MLX > GGUF
828
+ if 'npu' in tags_lower:
829
+ return 'npu'
830
+ elif 'mlx' in tags_lower:
831
+ return 'mlx'
832
+ elif 'gguf' in tags_lower:
833
+ return 'gguf'
834
+ except Exception:
835
+ pass
836
+ return None
837
+
838
+ def _load_downloaded_manifest(self, local_dir: str) -> Dict[str, Any]:
839
+ """Load nexa.manifest from the downloaded repository if it exists."""
840
+ manifest_path = os.path.join(local_dir, 'nexa.manifest')
841
+ if os.path.exists(manifest_path):
842
+ try:
843
+ with open(manifest_path, 'r', encoding='utf-8') as f:
844
+ return json.load(f)
845
+ except (json.JSONDecodeError, IOError):
846
+ pass
847
+ return {}
848
+
792
849
  def _fetch_and_save_metadata(self, repo_id: str, local_dir: str, is_mmproj: bool = False, file_name: Optional[Union[str, List[str]]] = None, **kwargs) -> None:
793
850
  """Fetch model info and save metadata after successful download."""
794
851
  # Initialize metadata with defaults to ensure manifest is always created
@@ -800,8 +857,8 @@ class HuggingFaceDownloader:
800
857
 
801
858
  # Try to fetch additional metadata, but don't let failures prevent manifest creation
802
859
  try:
803
- # Fetch model info to get pipeline_tag
804
- info = self.api.model_info(repo_id, token=self.token)
860
+ # Fetch model info to get pipeline_tag (using cache)
861
+ info = self._get_model_info_cached(repo_id, files_metadata=False)
805
862
  if hasattr(info, 'pipeline_tag') and info.pipeline_tag:
806
863
  old_metadata['pipeline_tag'] = info.pipeline_tag
807
864
  except Exception as e:
@@ -810,11 +867,21 @@ class HuggingFaceDownloader:
810
867
 
811
868
  # Use input avater url if provided
812
869
  old_metadata['avatar_url'] = kwargs.get('avatar_url')
870
+
871
+ # Extract model file type from tags
872
+ model_file_type = self._extract_model_file_type_from_tags(repo_id)
873
+ if model_file_type:
874
+ old_metadata['model_file_type'] = model_file_type
875
+
876
+ # Load existing nexa.manifest from downloaded repo (if exists)
877
+ downloaded_manifest = self._load_downloaded_manifest(local_dir)
878
+ if downloaded_manifest:
879
+ old_metadata['downloaded_manifest'] = downloaded_manifest
813
880
 
814
881
 
815
882
  # CRITICAL: Always create the manifest file, regardless of metadata fetch failures
816
883
  try:
817
- save_manifest_with_files_metadata(repo_id, local_dir, old_metadata, is_mmproj, file_name)
884
+ save_manifest_with_files_metadata(repo_id, local_dir, old_metadata, is_mmproj, file_name, **kwargs)
818
885
  print(f"[OK] Successfully created nexa.manifest for {repo_id}")
819
886
  except Exception as e:
820
887
  # This is critical - if manifest creation fails, we should know about it
@@ -823,8 +890,11 @@ class HuggingFaceDownloader:
823
890
  try:
824
891
  minimal_manifest = {
825
892
  "Name": repo_id,
826
- "ModelType": "other",
827
- "PluginId": "unknown",
893
+ "ModelName": kwargs.get('model_name', ''),
894
+ "ModelType": kwargs.get('model_type', 'other'),
895
+ "PluginId": kwargs.get('plugin_id', 'unknown'),
896
+ "DeviceId": kwargs.get('device_id', ''),
897
+ "MinSDKVersion": kwargs.get('min_sdk_version', ''),
828
898
  "ModelFile": {},
829
899
  "MMProjFile": {"Name": "", "Downloaded": False, "Size": 0},
830
900
  "TokenizerFile": {"Name": "", "Downloaded": False, "Size": 0},
@@ -1136,6 +1206,12 @@ def download_from_huggingface(
1136
1206
  is_mmproj (bool, optional): Whether the file being downloaded is an mmproj file. Only used when
1137
1207
  file_name is not None. If None, defaults to True if 'mmproj' is in
1138
1208
  the filename, False otherwise.
1209
+ **kwargs: Additional parameters including:
1210
+ - plugin_id (str): Override PluginId in nexa.manifest (highest priority)
1211
+ - model_name (str): Override ModelName in nexa.manifest (highest priority)
1212
+ - model_type (str): Override ModelType in nexa.manifest (highest priority)
1213
+ - device_id (str): Set DeviceId in nexa.manifest (highest priority)
1214
+ - min_sdk_version (str): Set MinSDKVersion in nexa.manifest (highest priority)
1139
1215
 
1140
1216
  Returns:
1141
1217
  str: Path to the downloaded file or directory
@@ -13,6 +13,8 @@ class ModelTypeMapping(Enum):
13
13
  """Enum for mapping HuggingFace pipeline_tag to our ModelType."""
14
14
  TEXT_GENERATION = ("text-generation", "llm")
15
15
  IMAGE_TEXT_TO_TEXT = ("image-text-to-text", "vlm")
16
+ ANY_TO_ANY = ("any-to-any", "a2a")
17
+ AUTOMATIC_SPEECH_RECOGNITION = ("automatic-speech-recognition", "asr")
16
18
 
17
19
  def __init__(self, pipeline_tag: str, model_type: str):
18
20
  self.pipeline_tag = pipeline_tag
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nexaai
3
- Version: 1.0.19rc2
3
+ Version: 1.0.19rc3
4
4
  Summary: Python bindings for NexaSDK C-lib backend
5
5
  Author-email: "Nexa AI, Inc." <dev@nexa.ai>
6
6
  Project-URL: Homepage, https://github.com/NexaAI/nexasdk-bridge
@@ -1,6 +1,6 @@
1
1
  nexaai/__init__.py,sha256=L8oB7GFZZMGnUpCg0PecDbI_ycKuQak-ZEJ4Y12_QIw,2184
2
- nexaai/_stub.cpython-310-darwin.so,sha256=_YOzSEZoJ3x4rM9Wvoy8x7sUuxhgjUFmDYSP-5ummGo,66768
3
- nexaai/_version.py,sha256=0TlZt3jq-18bi9jwUBhTowzL5Fos7vn4Gd98k3B1AkY,143
2
+ nexaai/_stub.cpython-310-darwin.so,sha256=gU1-7T8bFDL1yl7mKFSMwcriX0RN_h9P4UptbbbIOKQ,66768
3
+ nexaai/_version.py,sha256=biSqFffhgVgZ7_N05gIols4ugoQJvVDrVGL8JTj6o5c,143
4
4
  nexaai/asr.py,sha256=NljMXDErwPNMOPaRkJZMEDka9Nk8xyur7L8i924TStY,2054
5
5
  nexaai/base.py,sha256=N8PRgDFA-XPku2vWnQIofQ7ipz3pPlO6f8YZGnuhquE,982
6
6
  nexaai/common.py,sha256=Y0NJNLTi4Nq4x1WL6PQsSvGUto0eGmWhjpsC6jcekfA,3444
@@ -19,7 +19,7 @@ nexaai/asr_impl/pybind_asr_impl.py,sha256=pE9Hb_hMi5yAc4MF83bLVOb8zDtreCkB3_u7XE
19
19
  nexaai/binds/__init__.py,sha256=eYuay_8DDXeOUWz2_R9HFSabohxs6hvZn391t2L0Po0,104
20
20
  nexaai/binds/common_bind.cpython-310-darwin.so,sha256=zxJuD0nSV--VZKxBfWZUavU7_bHj_JTi0FhkjvG4VJw,235264
21
21
  nexaai/binds/embedder_bind.cpython-310-darwin.so,sha256=tPa0c0Dv_GiW66fgmAGWGCHXRGNApznqoQS0eQx9GFM,202064
22
- nexaai/binds/libnexa_bridge.dylib,sha256=FLsNUU0MAVGC4OM9VIDZzP80CguslCw2ood3I2Wzsd4,251192
22
+ nexaai/binds/libnexa_bridge.dylib,sha256=xJ72KZaM0M1fd9-GgkzrgShsdViSDqfThDZ_nujw-Sw,251192
23
23
  nexaai/binds/llm_bind.cpython-310-darwin.so,sha256=TAWfa1Hzq00TjtC1xVsiAeLp6hv2LrL5afDz4omUghc,182784
24
24
  nexaai/binds/vlm_bind.cpython-310-darwin.so,sha256=nd6eG_m2EiPthzkSZ97hlXWUOZQir4cQfFJZ4p6eR2U,182704
25
25
  nexaai/binds/nexa_llama_cpp/libggml-base.dylib,sha256=JM4oOkie1su0ES5hMdtILeQHlRukRzH1vTleTupUXhg,650736
@@ -547,15 +547,15 @@ nexaai/tts_impl/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
547
547
  nexaai/tts_impl/mlx_tts_impl.py,sha256=i_uNPdvlXYtL3e01oKjDlP9jgkWCRt1bBHsExaaiJi8,3101
548
548
  nexaai/tts_impl/pybind_tts_impl.py,sha256=mpn44r6pfYLIl-NrEy2dXHjGtWtNCmM7HRyxiANxUI4,1444
549
549
  nexaai/utils/decode.py,sha256=61n4Zf6c5QLyqGoctEitlI9BX3tPlP2a5aaKNHbw3T4,404
550
- nexaai/utils/manifest_utils.py,sha256=PA84obFP7W1dlneURlIHIzJjWIF5dbDHGdNeHouUy68,12659
551
- nexaai/utils/model_manager.py,sha256=_WKJP7YVk7q587OoOWwDNWVR-8tbKZkmHKjcCZN8Q4M,55979
552
- nexaai/utils/model_types.py,sha256=-DER8L4lAUR_iLS99F0r57avwqWtuN21ug5pX2p24_E,1369
550
+ nexaai/utils/manifest_utils.py,sha256=Teji_i_ZG0IkiKWNKKwCq1j_a2k406db7bnd_MC4nu0,16195
551
+ nexaai/utils/model_manager.py,sha256=NnbPv1iuwo6T523gLsWjnff-gGvPGUjez-rFg8-ffpE,59568
552
+ nexaai/utils/model_types.py,sha256=9Q95icWn22jIUoY2eIEdIiXBBGJSGZlXXSbox0xXWNQ,1483
553
553
  nexaai/utils/progress_tracker.py,sha256=jdUqtmPqyhwC9uSKvQcJEYETwSt-OhP4oitdJ94614o,15394
554
554
  nexaai/utils/quantization_utils.py,sha256=FYcNSAKGlBqFDUTx3jSKOr2lnq4nyiyC0ZG8oSxFwiU,7825
555
555
  nexaai/vlm_impl/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
556
556
  nexaai/vlm_impl/mlx_vlm_impl.py,sha256=pLtWm_ckz8a0U-AtAOMVseFDO4OVPvHyYO2KlfBaGYk,10833
557
557
  nexaai/vlm_impl/pybind_vlm_impl.py,sha256=FAbhpRJzHgI78r0mUvKybO97R1szvNhH0aTn_I52oT4,8597
558
- nexaai-1.0.19rc2.dist-info/METADATA,sha256=XCIDnhvcXLzMm1s9ir50CI2Zgqn-r4aaUHMRmlpcDK4,1201
559
- nexaai-1.0.19rc2.dist-info/WHEEL,sha256=T2p57lol9__xkoU6aJTyN1Pm43ZpRU3q6km7mIbrAMs,114
560
- nexaai-1.0.19rc2.dist-info/top_level.txt,sha256=LRE2YERlrZk2vfuygnSzsEeqSknnZbz3Z1MHyNmBU4w,7
561
- nexaai-1.0.19rc2.dist-info/RECORD,,
558
+ nexaai-1.0.19rc3.dist-info/METADATA,sha256=mK5fFMBrvvErJcvUuUkSXWqJ8kBemVHKYKzYDGZVhfs,1201
559
+ nexaai-1.0.19rc3.dist-info/WHEEL,sha256=T2p57lol9__xkoU6aJTyN1Pm43ZpRU3q6km7mIbrAMs,114
560
+ nexaai-1.0.19rc3.dist-info/top_level.txt,sha256=LRE2YERlrZk2vfuygnSzsEeqSknnZbz3Z1MHyNmBU4w,7
561
+ nexaai-1.0.19rc3.dist-info/RECORD,,