nexaai 1.0.4rc13__cp310-cp310-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of nexaai might be problematic. Click here for more details.

Files changed (59) hide show
  1. nexaai/__init__.py +71 -0
  2. nexaai/_stub.cp310-win_amd64.pyd +0 -0
  3. nexaai/_version.py +4 -0
  4. nexaai/asr.py +60 -0
  5. nexaai/asr_impl/__init__.py +0 -0
  6. nexaai/asr_impl/mlx_asr_impl.py +91 -0
  7. nexaai/asr_impl/pybind_asr_impl.py +43 -0
  8. nexaai/base.py +39 -0
  9. nexaai/binds/__init__.py +3 -0
  10. nexaai/binds/common_bind.cp310-win_amd64.pyd +0 -0
  11. nexaai/binds/embedder_bind.cp310-win_amd64.pyd +0 -0
  12. nexaai/binds/llm_bind.cp310-win_amd64.pyd +0 -0
  13. nexaai/binds/nexa_bridge.dll +0 -0
  14. nexaai/binds/nexa_llama_cpp/ggml-base.dll +0 -0
  15. nexaai/binds/nexa_llama_cpp/ggml-cpu.dll +0 -0
  16. nexaai/binds/nexa_llama_cpp/ggml-cuda.dll +0 -0
  17. nexaai/binds/nexa_llama_cpp/ggml-vulkan.dll +0 -0
  18. nexaai/binds/nexa_llama_cpp/ggml.dll +0 -0
  19. nexaai/binds/nexa_llama_cpp/llama.dll +0 -0
  20. nexaai/binds/nexa_llama_cpp/mtmd.dll +0 -0
  21. nexaai/binds/nexa_llama_cpp/nexa_plugin.dll +0 -0
  22. nexaai/common.py +61 -0
  23. nexaai/cv.py +87 -0
  24. nexaai/cv_impl/__init__.py +0 -0
  25. nexaai/cv_impl/mlx_cv_impl.py +88 -0
  26. nexaai/cv_impl/pybind_cv_impl.py +31 -0
  27. nexaai/embedder.py +68 -0
  28. nexaai/embedder_impl/__init__.py +0 -0
  29. nexaai/embedder_impl/mlx_embedder_impl.py +114 -0
  30. nexaai/embedder_impl/pybind_embedder_impl.py +91 -0
  31. nexaai/image_gen.py +136 -0
  32. nexaai/image_gen_impl/__init__.py +0 -0
  33. nexaai/image_gen_impl/mlx_image_gen_impl.py +291 -0
  34. nexaai/image_gen_impl/pybind_image_gen_impl.py +84 -0
  35. nexaai/llm.py +89 -0
  36. nexaai/llm_impl/__init__.py +0 -0
  37. nexaai/llm_impl/mlx_llm_impl.py +249 -0
  38. nexaai/llm_impl/pybind_llm_impl.py +207 -0
  39. nexaai/rerank.py +51 -0
  40. nexaai/rerank_impl/__init__.py +0 -0
  41. nexaai/rerank_impl/mlx_rerank_impl.py +91 -0
  42. nexaai/rerank_impl/pybind_rerank_impl.py +42 -0
  43. nexaai/runtime.py +64 -0
  44. nexaai/tts.py +70 -0
  45. nexaai/tts_impl/__init__.py +0 -0
  46. nexaai/tts_impl/mlx_tts_impl.py +93 -0
  47. nexaai/tts_impl/pybind_tts_impl.py +42 -0
  48. nexaai/utils/avatar_fetcher.py +104 -0
  49. nexaai/utils/decode.py +18 -0
  50. nexaai/utils/model_manager.py +1195 -0
  51. nexaai/utils/progress_tracker.py +372 -0
  52. nexaai/vlm.py +120 -0
  53. nexaai/vlm_impl/__init__.py +0 -0
  54. nexaai/vlm_impl/mlx_vlm_impl.py +205 -0
  55. nexaai/vlm_impl/pybind_vlm_impl.py +228 -0
  56. nexaai-1.0.4rc13.dist-info/METADATA +26 -0
  57. nexaai-1.0.4rc13.dist-info/RECORD +59 -0
  58. nexaai-1.0.4rc13.dist-info/WHEEL +5 -0
  59. nexaai-1.0.4rc13.dist-info/top_level.txt +1 -0
nexaai/embedder.py ADDED
@@ -0,0 +1,68 @@
1
+ from typing import List, Union
2
+ from dataclasses import dataclass
3
+ from abc import abstractmethod
4
+ import numpy as np
5
+
6
+ from nexaai.base import BaseModel
7
+
8
+
9
+ @dataclass
10
+ class EmbeddingConfig:
11
+ batch_size: int = 32
12
+ normalize: bool = True
13
+ normalize_method: str = "l2"
14
+
15
+
16
+ class Embedder(BaseModel):
17
+ def __init__(self):
18
+ """
19
+ Internal initializer
20
+ """
21
+ pass
22
+
23
+ @classmethod
24
+ def _load_from(cls, model_path: str, tokenizer_file: str = "tokenizer.json", plugin_id: str = "llama_cpp"):
25
+ """
26
+ Load an embedder from model files, routing to appropriate implementation.
27
+
28
+ Args:
29
+ model_path: Path to the model file
30
+ tokenizer_file: Path to the tokenizer file (default: "tokenizer.json")
31
+ plugin_id: Plugin ID to use for the model (default: "llama_cpp")
32
+
33
+ Returns:
34
+ Embedder instance
35
+ """
36
+ if plugin_id == "mlx":
37
+ from nexaai.embedder_impl.mlx_embedder_impl import MLXEmbedderImpl
38
+ return MLXEmbedderImpl._load_from(model_path, tokenizer_file, plugin_id)
39
+ else:
40
+ from nexaai.embedder_impl.pybind_embedder_impl import PyBindEmbedderImpl
41
+ return PyBindEmbedderImpl._load_from(model_path, tokenizer_file, plugin_id)
42
+
43
+ @abstractmethod
44
+ def generate(self, texts: Union[List[str], str] = None, config: EmbeddingConfig = EmbeddingConfig(), input_ids: Union[List[int], List[List[int]]] = None) -> np.ndarray:
45
+ """
46
+ Generate embeddings for the given texts or input_ids.
47
+
48
+ Args:
49
+ texts: List of strings or single string to embed
50
+ input_ids: Pre-tokenized input as:
51
+ - Single sequence: list of integers [1, 2, 3, 4]
52
+ - Multiple sequences: list of lists [[1, 2, 3], [4, 5, 6]]
53
+ config: Configuration for embedding generation
54
+
55
+ Returns:
56
+ numpy array of embeddings with shape (num_sequences, embedding_dim)
57
+ """
58
+ pass
59
+
60
+ @abstractmethod
61
+ def get_embedding_dim(self) -> int:
62
+ """
63
+ Get the embedding dimension of the model
64
+
65
+ Returns:
66
+ The embedding dimension in int
67
+ """
68
+ pass
File without changes
@@ -0,0 +1,114 @@
1
+ from typing import List, Union
2
+ import numpy as np
3
+
4
+ from nexaai.embedder import Embedder, EmbeddingConfig
5
+ from nexaai.mlx_backend.embedding.interface import Embedder as MLXEmbedderInterface
6
+ from nexaai.mlx_backend.ml import ModelConfig as MLXModelConfig, SamplerConfig as MLXSamplerConfig, GenerationConfig as MLXGenerationConfig, EmbeddingConfig
7
+
8
+
9
+ class MLXEmbedderImpl(Embedder):
10
+ def __init__(self):
11
+ """Initialize MLX Embedder implementation."""
12
+ super().__init__()
13
+ self._mlx_embedder = None
14
+
15
+ @classmethod
16
+ def _load_from(cls, model_path: str, tokenizer_file: str = "tokenizer.json", plugin_id: str = "mlx"):
17
+ """
18
+ Load an embedder from model files using MLX backend.
19
+
20
+ Args:
21
+ model_path: Path to the model file
22
+ tokenizer_file: Path to the tokenizer file (default: "tokenizer.json")
23
+ plugin_id: Plugin ID to use for the model (default: "mlx")
24
+
25
+ Returns:
26
+ MLXEmbedderImpl instance
27
+ """
28
+ try:
29
+ # MLX interface is already imported
30
+
31
+ # Create instance and load MLX embedder
32
+ instance = cls()
33
+ instance._mlx_embedder = MLXEmbedderInterface(
34
+ model_path=model_path,
35
+ tokenizer_path=tokenizer_file
36
+ )
37
+
38
+ # Load the model
39
+ success = instance._mlx_embedder.load_model(model_path)
40
+ if not success:
41
+ raise RuntimeError("Failed to load MLX embedder model")
42
+
43
+ return instance
44
+ except Exception as e:
45
+ raise RuntimeError(f"Failed to load MLX Embedder: {str(e)}")
46
+
47
+ def eject(self):
48
+ """
49
+ Clean up resources and destroy the embedder
50
+ """
51
+ if self._mlx_embedder:
52
+ self._mlx_embedder.destroy()
53
+ self._mlx_embedder = None
54
+
55
+ def generate(self, texts: Union[List[str], str] = None, config: EmbeddingConfig = EmbeddingConfig(), input_ids: Union[List[int], List[List[int]]] = None) -> np.ndarray:
56
+ """
57
+ Generate embeddings for the given texts or input_ids.
58
+
59
+ Args:
60
+ texts: List of strings or single string to embed
61
+ input_ids: Pre-tokenized input as:
62
+ - Single sequence: list of integers [1, 2, 3, 4]
63
+ - Multiple sequences: list of lists [[1, 2, 3], [4, 5, 6]]
64
+ config: Configuration for embedding generation
65
+
66
+ Returns:
67
+ numpy array of embeddings with shape (num_sequences, embedding_dim)
68
+ """
69
+ if not self._mlx_embedder:
70
+ raise RuntimeError("MLX Embedder not loaded")
71
+
72
+ if texts is None and input_ids is None:
73
+ raise ValueError("Either texts or input_ids must be provided")
74
+
75
+ # MLX embedder currently only supports text input, not pre-tokenized input_ids
76
+ if input_ids is not None:
77
+ raise NotImplementedError("MLX embedder does not support input_ids, only text input")
78
+
79
+ try:
80
+ # Convert single string to list if needed
81
+ if isinstance(texts, str):
82
+ texts = [texts]
83
+
84
+ # MLX config classes are already imported
85
+
86
+ # Convert our config to MLX config
87
+ mlx_config = EmbeddingConfig()
88
+ mlx_config.batch_size = config.batch_size
89
+ mlx_config.normalize = config.normalize
90
+ mlx_config.normalize_method = config.normalize_method
91
+
92
+ # Generate embeddings using MLX
93
+ embeddings = self._mlx_embedder.embed(texts, mlx_config)
94
+
95
+ # Convert to numpy array
96
+ return np.array(embeddings, dtype=np.float32)
97
+
98
+ except Exception as e:
99
+ raise RuntimeError(f"Failed to generate embeddings: {str(e)}")
100
+
101
+ def get_embedding_dim(self) -> int:
102
+ """
103
+ Get the embedding dimension of the model
104
+
105
+ Returns:
106
+ The embedding dimension in int
107
+ """
108
+ if not self._mlx_embedder:
109
+ raise RuntimeError("MLX Embedder not loaded")
110
+
111
+ try:
112
+ return self._mlx_embedder.embedding_dim()
113
+ except Exception as e:
114
+ raise RuntimeError(f"Failed to get embedding dimension: {str(e)}")
@@ -0,0 +1,91 @@
1
+ from typing import List, Union
2
+ import numpy as np
3
+
4
+ from nexaai.embedder import Embedder, EmbeddingConfig
5
+ from nexaai.binds import embedder_bind
6
+ from nexaai.runtime import _ensure_runtime
7
+
8
+
9
+ class PyBindEmbedderImpl(Embedder):
10
+ def __init__(self, _handle_ptr):
11
+ """
12
+ Internal initializer
13
+ """
14
+ super().__init__()
15
+ self._handle = _handle_ptr
16
+
17
+ @classmethod
18
+ def _load_from(cls, model_path: str, tokenizer_file: str = "tokenizer.json", plugin_id: str = "llama_cpp"):
19
+ """
20
+ Load an embedder from model files
21
+
22
+ Args:
23
+ model_path: Path to the model file
24
+ tokenizer_file: Path to the tokenizer file (default: "tokenizer.json")
25
+ plugin_id: Plugin ID to use for the model (default: "llama_cpp")
26
+
27
+ Returns:
28
+ PyBindEmbedderImpl instance
29
+ """
30
+ _ensure_runtime()
31
+ handle = embedder_bind.ml_embedder_create(model_path, tokenizer_file, plugin_id)
32
+ return cls(handle)
33
+
34
+ def eject(self):
35
+ """
36
+ Clean up resources and destroy the embedder
37
+ """
38
+ # Destructor of the handle will unload the model correctly
39
+ del self._handle
40
+ self._handle = None
41
+
42
+ def generate(self, texts: Union[List[str], str] = None, config: EmbeddingConfig = EmbeddingConfig(), input_ids: Union[List[int], List[List[int]]] = None) -> np.ndarray:
43
+ """
44
+ Generate embeddings for the given texts or input_ids.
45
+
46
+ Args:
47
+ texts: List of strings or single string to embed
48
+ input_ids: Pre-tokenized input as:
49
+ - Single sequence: list of integers [1, 2, 3, 4]
50
+ - Multiple sequences: list of lists [[1, 2, 3], [4, 5, 6]]
51
+ config: Configuration for embedding generation
52
+
53
+ Returns:
54
+ numpy array of embeddings with shape (num_sequences, embedding_dim)
55
+ """
56
+ if texts is None and input_ids is None:
57
+ raise ValueError("Either texts or input_ids must be provided")
58
+
59
+ # Create bind config
60
+ bind_config = embedder_bind.EmbeddingConfig()
61
+ bind_config.batch_size = config.batch_size
62
+ bind_config.normalize = config.normalize
63
+ bind_config.normalize_method = config.normalize_method
64
+
65
+ # Convert single string to list if needed
66
+ if isinstance(texts, str):
67
+ texts = [texts]
68
+
69
+ # Convert input_ids to 2D format if needed
70
+ processed_input_ids = None
71
+ if input_ids is not None:
72
+ if len(input_ids) > 0 and isinstance(input_ids[0], int):
73
+ # Single sequence: convert [1, 2, 3] to [[1, 2, 3]]
74
+ processed_input_ids = [input_ids]
75
+ else:
76
+ # Multiple sequences: already in correct format [[1, 2], [3, 4]]
77
+ processed_input_ids = input_ids
78
+
79
+ # Pass both parameters, let the ABI handle validation
80
+ embeddings = embedder_bind.ml_embedder_embed(self._handle, bind_config, texts, processed_input_ids)
81
+
82
+ return embeddings
83
+
84
+ def get_embedding_dim(self) -> int:
85
+ """
86
+ Get the embedding dimension of the model
87
+
88
+ Returns:
89
+ The embedding dimension in int
90
+ """
91
+ return embedder_bind.ml_embedder_embedding_dim(self._handle)
nexaai/image_gen.py ADDED
@@ -0,0 +1,136 @@
1
+ from typing import List, Optional, Union
2
+ from abc import abstractmethod
3
+ from dataclasses import dataclass
4
+
5
+ from nexaai.base import BaseModel
6
+
7
+
8
+ @dataclass
9
+ class Image:
10
+ """Image data structure."""
11
+ data: List[float] # width × height × channels
12
+ width: int
13
+ height: int
14
+ channels: int # 3 = RGB, 4 = RGBA
15
+
16
+
17
+ @dataclass
18
+ class ImageSamplerConfig:
19
+ """Configuration for image sampling."""
20
+ method: str = "ddim"
21
+ steps: int = 20
22
+ guidance_scale: float = 7.5
23
+ eta: float = 0.0
24
+ seed: int = -1 # –1 for random
25
+
26
+
27
+ @dataclass
28
+ class ImageGenerationConfig:
29
+ """Configuration for image generation."""
30
+ prompts: Union[str, List[str]]
31
+ negative_prompts: Optional[Union[str, List[str]]] = None
32
+ height: int = 512
33
+ width: int = 512
34
+ sampler_config: Optional[ImageSamplerConfig] = None
35
+ lora_id: int = -1 # –1 for none
36
+ init_image: Optional[Image] = None
37
+ strength: float = 1.0
38
+ n_images: int = 1
39
+ n_rows: int = 1
40
+ decoding_batch_size: int = 1
41
+
42
+
43
+ @dataclass
44
+ class SchedulerConfig:
45
+ """Configuration for diffusion scheduler."""
46
+ type: str = "ddim"
47
+ num_train_timesteps: int = 1000
48
+ steps_offset: int = 0 # An offset added to the inference steps
49
+ beta_start: float = 0.00085
50
+ beta_end: float = 0.012
51
+ beta_schedule: str = "scaled_linear"
52
+ prediction_type: str = "epsilon"
53
+ timestep_type: str = "discrete"
54
+ timestep_spacing: str = "linspace"
55
+ interpolation_type: str = "linear"
56
+ config_path: Optional[str] = None
57
+
58
+
59
+ class ImageGen(BaseModel):
60
+ """Abstract base class for image generation models."""
61
+
62
+ def __init__(self):
63
+ """Initialize base image generation class."""
64
+ pass
65
+
66
+ @classmethod
67
+ def _load_from(cls,
68
+ model_path: str,
69
+ scheduler_config_path: str = "",
70
+ plugin_id: str = "llama_cpp",
71
+ device_id: Optional[str] = None,
72
+ float16: bool = True,
73
+ quantize: bool = False
74
+ ) -> 'ImageGen':
75
+ """Load image generation model from local path, routing to appropriate implementation."""
76
+ if plugin_id == "mlx":
77
+ from nexaai.image_gen_impl.mlx_image_gen_impl import MLXImageGenImpl
78
+ return MLXImageGenImpl._load_from(model_path, scheduler_config_path, plugin_id, device_id, float16, quantize)
79
+ else:
80
+ from nexaai.image_gen_impl.pybind_image_gen_impl import PyBindImageGenImpl
81
+ return PyBindImageGenImpl._load_from(model_path, scheduler_config_path, plugin_id, device_id, float16, quantize)
82
+
83
+ @abstractmethod
84
+ def load_model(self, model_path: str, extra_data: Optional[str] = None) -> bool:
85
+ """Load model from path."""
86
+ pass
87
+
88
+ @abstractmethod
89
+ def set_scheduler(self, config: SchedulerConfig) -> None:
90
+ """Set scheduler configuration."""
91
+ pass
92
+
93
+ @abstractmethod
94
+ def set_sampler(self, config: ImageSamplerConfig) -> None:
95
+ """Set sampler configuration."""
96
+ pass
97
+
98
+ @abstractmethod
99
+ def reset_sampler(self) -> None:
100
+ """Reset sampler to default configuration."""
101
+ pass
102
+
103
+ @abstractmethod
104
+ def txt2img(self, prompt: str, config: ImageGenerationConfig) -> Image:
105
+ """Generate image from text prompt."""
106
+ pass
107
+
108
+ @abstractmethod
109
+ def img2img(self, init_image: Image, prompt: str, config: ImageGenerationConfig) -> Image:
110
+ """Generate image from initial image and text prompt."""
111
+ pass
112
+
113
+ @abstractmethod
114
+ def generate(self, config: ImageGenerationConfig) -> Image:
115
+ """Generate image from configuration."""
116
+ pass
117
+
118
+ @abstractmethod
119
+ def set_lora(self, lora_id: int) -> None:
120
+ """Set active LoRA adapter."""
121
+ pass
122
+
123
+ @abstractmethod
124
+ def add_lora(self, lora_path: str) -> int:
125
+ """Add LoRA adapter and return its ID."""
126
+ pass
127
+
128
+ @abstractmethod
129
+ def remove_lora(self, lora_id: int) -> None:
130
+ """Remove LoRA adapter."""
131
+ pass
132
+
133
+ @abstractmethod
134
+ def list_loras(self) -> List[int]:
135
+ """List available LoRA adapters."""
136
+ pass
File without changes