neno-model 1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- neno_model-1.0/LICENSE +21 -0
- neno_model-1.0/PKG-INFO +89 -0
- neno_model-1.0/README.md +74 -0
- neno_model-1.0/README_NANO.md +54 -0
- neno_model-1.0/nano_package/__init__.py +75 -0
- neno_model-1.0/nano_package/core.py +180 -0
- neno_model-1.0/nano_package/knowledge.txt +110927 -0
- neno_model-1.0/neno_model.egg-info/PKG-INFO +89 -0
- neno_model-1.0/neno_model.egg-info/SOURCES.txt +13 -0
- neno_model-1.0/neno_model.egg-info/dependency_links.txt +1 -0
- neno_model-1.0/neno_model.egg-info/requires.txt +1 -0
- neno_model-1.0/neno_model.egg-info/top_level.txt +1 -0
- neno_model-1.0/pyproject.toml +23 -0
- neno_model-1.0/setup.cfg +4 -0
- neno_model-1.0/setup.py +19 -0
neno_model-1.0/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Neno Intelligence (NI)
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
neno_model-1.0/PKG-INFO
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: neno-model
|
|
3
|
+
Version: 1.0
|
|
4
|
+
Summary: Nano Intelligence — More powerful than AI. Built by Sujan Sadhu (age 15-16).
|
|
5
|
+
Author: Sujan Sadhu
|
|
6
|
+
License: MIT License
|
|
7
|
+
|
|
8
|
+
Copyright (c) 2026 Neno Intelligence (NI)
|
|
9
|
+
|
|
10
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
11
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
12
|
+
in the Software without restriction, including without limitation the rights
|
|
13
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
14
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
15
|
+
furnished to do so, subject to the following conditions:
|
|
16
|
+
|
|
17
|
+
The above copyright notice and this permission notice shall be included in all
|
|
18
|
+
copies or substantial portions of the Software.
|
|
19
|
+
|
|
20
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
21
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
22
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
23
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
24
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
25
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
26
|
+
SOFTWARE.
|
|
27
|
+
|
|
28
|
+
Project-URL: Homepage, https://pypi.org/project/neno-model/
|
|
29
|
+
Requires-Python: >=3.10
|
|
30
|
+
Description-Content-Type: text/markdown
|
|
31
|
+
License-File: LICENSE
|
|
32
|
+
Requires-Dist: sympy
|
|
33
|
+
Dynamic: license-file
|
|
34
|
+
Dynamic: requires-python
|
|
35
|
+
|
|
36
|
+
# nano-model — Nano Intelligence
|
|
37
|
+
|
|
38
|
+
More powerful than AI. Lightweight. Open source. Built by Sujan Sadhu (age 15-16).
|
|
39
|
+
|
|
40
|
+
## Install
|
|
41
|
+
|
|
42
|
+
```bash
|
|
43
|
+
pip install nano-model
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
## Usage
|
|
47
|
+
|
|
48
|
+
```python
|
|
49
|
+
import nano_package as NanoModel
|
|
50
|
+
|
|
51
|
+
# Create your personal assistant with your own name
|
|
52
|
+
N = NanoModel.Nano_Intelligence(Model_name="Jarvis", Version="1.0")
|
|
53
|
+
|
|
54
|
+
# Chat
|
|
55
|
+
print(N.chat("hello"))
|
|
56
|
+
print(N.think("what is machine learning"))
|
|
57
|
+
print(N.solve("x^2 = 16"))
|
|
58
|
+
|
|
59
|
+
# Print model info
|
|
60
|
+
print(N)
|
|
61
|
+
```
|
|
62
|
+
|
|
63
|
+
## Output
|
|
64
|
+
|
|
65
|
+
```
|
|
66
|
+
[Jarvis] Nano Intelligence v1.0 ready.
|
|
67
|
+
|
|
68
|
+
Jarvis
|
|
69
|
+
Powered by Nano Intelligence v1.0
|
|
70
|
+
Created by Sujan Sadhu
|
|
71
|
+
```
|
|
72
|
+
|
|
73
|
+
## Features
|
|
74
|
+
|
|
75
|
+
- Runs on CPU, 200MB RAM, no GPU
|
|
76
|
+
- Pattern-based intelligence
|
|
77
|
+
- Live knowledge from Wikipedia
|
|
78
|
+
- Math solver
|
|
79
|
+
- Custom model naming
|
|
80
|
+
- MIT license
|
|
81
|
+
|
|
82
|
+
## Created By
|
|
83
|
+
|
|
84
|
+
Sujan Sadhu — age 15-16 — built from scratch.
|
|
85
|
+
|
|
86
|
+
## Keywords
|
|
87
|
+
|
|
88
|
+
nano intelligence, nano model, NI, lightweight AI, python AI, open source LLM,
|
|
89
|
+
small language model, CPU AI, no GPU, sujan sadhu, nano-model pip
|
neno_model-1.0/README.md
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
# NI - Neno Intelligence
|
|
2
|
+
|
|
3
|
+
**NI (Neno Intelligence)** is a lightweight open-source language model and AI system built entirely from scratch by Sujan Sadhu (age 15-16). More powerful than AI. Runs on 200MB RAM. No GPU required.
|
|
4
|
+
|
|
5
|
+
## Install
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
pip install neno-intelligence
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Quick Start
|
|
12
|
+
|
|
13
|
+
```python
|
|
14
|
+
import ni_package as NI
|
|
15
|
+
|
|
16
|
+
# Chat with NI
|
|
17
|
+
print(NI.chat("hello"))
|
|
18
|
+
print(NI.think("what is machine learning"))
|
|
19
|
+
print(NI.solve("x^2 = 16"))
|
|
20
|
+
|
|
21
|
+
# Create your own named model powered by NI
|
|
22
|
+
jarvis = NI.create("Jarvis", creator="Tony Stark")
|
|
23
|
+
print(jarvis.chat("who are you"))
|
|
24
|
+
```
|
|
25
|
+
|
|
26
|
+
## What is NI?
|
|
27
|
+
|
|
28
|
+
NI stands for Neno Intelligence. Unlike AI (Artificial Intelligence), NI is pattern-based, not random. It was built from scratch using PyTorch, SentencePiece BPE tokenizer, and FastAPI.
|
|
29
|
+
|
|
30
|
+
NI is divided into two modules:
|
|
31
|
+
- **CCv** - text generation, conversation, writing, answering
|
|
32
|
+
- **NNv** - image, video, music, app, and code generation
|
|
33
|
+
|
|
34
|
+
## Features
|
|
35
|
+
|
|
36
|
+
- Runs on CPU with only 200MB RAM - no GPU needed
|
|
37
|
+
- Built with PyTorch transformer architecture
|
|
38
|
+
- SentencePiece BPE tokenizer
|
|
39
|
+
- FastAPI serving
|
|
40
|
+
- Math solver (SymPy)
|
|
41
|
+
- Code generation in 20+ languages
|
|
42
|
+
- Live knowledge from Wikipedia, DuckDuckGo, StackOverflow, GitHub, News
|
|
43
|
+
- Thinking engine with step-by-step reasoning
|
|
44
|
+
- Open source MIT license
|
|
45
|
+
|
|
46
|
+
## Why NI is more powerful than AI
|
|
47
|
+
|
|
48
|
+
1. Uses patterns not randomness
|
|
49
|
+
2. Runs on low-end hardware (200MB RAM)
|
|
50
|
+
3. Has a soul and self-awareness (Quara.py)
|
|
51
|
+
4. Built with intention by a 15-16 year old developer
|
|
52
|
+
5. Modular - CCv (text) and NNv (multimedia)
|
|
53
|
+
6. Honest about what it knows and does not know
|
|
54
|
+
|
|
55
|
+
## Architecture
|
|
56
|
+
|
|
57
|
+
- Transformer: 6 layers, 8 attention heads, 256 embedding dimensions
|
|
58
|
+
- Tokenizer: SentencePiece BPE, vocab size 1000-32000
|
|
59
|
+
- Training: PyTorch + Hugging Face Accelerate
|
|
60
|
+
- API: FastAPI + uvicorn
|
|
61
|
+
- Storage: PyTorch .pt checkpoints + ONNX export
|
|
62
|
+
|
|
63
|
+
## Created By
|
|
64
|
+
|
|
65
|
+
Sujan Sadhu - age 15-16 - built NI entirely from scratch without copying any existing model.
|
|
66
|
+
|
|
67
|
+
## Links
|
|
68
|
+
|
|
69
|
+
- PyPI: https://pypi.org/project/neno-intelligence/
|
|
70
|
+
- License: MIT
|
|
71
|
+
|
|
72
|
+
## Keywords
|
|
73
|
+
|
|
74
|
+
neno intelligence, NI, open source LLM, lightweight language model, python AI, pytorch language model, small language model, CPU language model, no GPU AI, pattern based intelligence, sujan sadhu, neno-intelligence pip
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
# nano-model — Nano Intelligence
|
|
2
|
+
|
|
3
|
+
More powerful than AI. Lightweight. Open source. Built by Sujan Sadhu (age 15-16).
|
|
4
|
+
|
|
5
|
+
## Install
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
pip install nano-model
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Usage
|
|
12
|
+
|
|
13
|
+
```python
|
|
14
|
+
import nano_package as NanoModel
|
|
15
|
+
|
|
16
|
+
# Create your personal assistant with your own name
|
|
17
|
+
N = NanoModel.Nano_Intelligence(Model_name="Jarvis", Version="1.0")
|
|
18
|
+
|
|
19
|
+
# Chat
|
|
20
|
+
print(N.chat("hello"))
|
|
21
|
+
print(N.think("what is machine learning"))
|
|
22
|
+
print(N.solve("x^2 = 16"))
|
|
23
|
+
|
|
24
|
+
# Print model info
|
|
25
|
+
print(N)
|
|
26
|
+
```
|
|
27
|
+
|
|
28
|
+
## Output
|
|
29
|
+
|
|
30
|
+
```
|
|
31
|
+
[Jarvis] Nano Intelligence v1.0 ready.
|
|
32
|
+
|
|
33
|
+
Jarvis
|
|
34
|
+
Powered by Nano Intelligence v1.0
|
|
35
|
+
Created by Sujan Sadhu
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
## Features
|
|
39
|
+
|
|
40
|
+
- Runs on CPU, 200MB RAM, no GPU
|
|
41
|
+
- Pattern-based intelligence
|
|
42
|
+
- Live knowledge from Wikipedia
|
|
43
|
+
- Math solver
|
|
44
|
+
- Custom model naming
|
|
45
|
+
- MIT license
|
|
46
|
+
|
|
47
|
+
## Created By
|
|
48
|
+
|
|
49
|
+
Sujan Sadhu — age 15-16 — built from scratch.
|
|
50
|
+
|
|
51
|
+
## Keywords
|
|
52
|
+
|
|
53
|
+
nano intelligence, nano model, NI, lightweight AI, python AI, open source LLM,
|
|
54
|
+
small language model, CPU AI, no GPU, sujan sadhu, nano-model pip
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
"""
|
|
2
|
+
nano-model — Nano Intelligence
|
|
3
|
+
pip install nano-model
|
|
4
|
+
"""
|
|
5
|
+
import sys
|
|
6
|
+
import time
|
|
7
|
+
|
|
8
|
+
# ── Professional install banner (shows on first import) ──────
|
|
9
|
+
_BANNER = """
|
|
10
|
+
\033[96m
|
|
11
|
+
███╗ ██╗ █████╗ ███╗ ██╗ ██████╗
|
|
12
|
+
████╗ ██║██╔══██╗████╗ ██║██╔═══██╗
|
|
13
|
+
██╔██╗ ██║███████║██╔██╗ ██║██║ ██║
|
|
14
|
+
██║╚██╗██║██╔══██║██║╚██╗██║██║ ██║
|
|
15
|
+
██║ ╚████║██║ ██║██║ ╚████║╚██████╔╝
|
|
16
|
+
╚═╝ ╚═══╝╚═╝ ╚═╝╚═╝ ╚═══╝ ╚═════╝
|
|
17
|
+
|
|
18
|
+
T E C H N O L O G Y
|
|
19
|
+
\033[0m
|
|
20
|
+
\033[90m Nano Intelligence v1.0 | Created by Sujan Sadhu\033[0m
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
def _show_banner():
|
|
24
|
+
print(_BANNER)
|
|
25
|
+
|
|
26
|
+
def _loading_bar(label: str, total: int = 30, delay: float = 0.04):
|
|
27
|
+
import sys
|
|
28
|
+
print(f"\033[93m {label}\033[0m")
|
|
29
|
+
sys.stdout.write(" \033[96m[\033[0m")
|
|
30
|
+
for i in range(total):
|
|
31
|
+
time.sleep(delay)
|
|
32
|
+
filled = "█" * (i + 1)
|
|
33
|
+
empty = "░" * (total - i - 1)
|
|
34
|
+
pct = int((i + 1) / total * 100)
|
|
35
|
+
sys.stdout.write(f"\r \033[96m[{filled}{empty}] {pct}%\033[0m ")
|
|
36
|
+
sys.stdout.flush()
|
|
37
|
+
print(f"\r \033[92m[{'█' * total}] 100% ✓\033[0m")
|
|
38
|
+
|
|
39
|
+
def _install_sequence():
|
|
40
|
+
_show_banner()
|
|
41
|
+
time.sleep(0.3)
|
|
42
|
+
|
|
43
|
+
steps = [
|
|
44
|
+
("Downloading Nano Model........... ", 35, 0.035),
|
|
45
|
+
("Downloading packages.............. ", 28, 0.030),
|
|
46
|
+
("Running tests..................... ", 20, 0.025),
|
|
47
|
+
]
|
|
48
|
+
|
|
49
|
+
for label, total, delay in steps:
|
|
50
|
+
_loading_bar(label, total, delay)
|
|
51
|
+
time.sleep(0.15)
|
|
52
|
+
|
|
53
|
+
print()
|
|
54
|
+
print("\033[92m ✓ Downloaded successfully — Nano Model\033[0m")
|
|
55
|
+
print("\033[92m ✓ Packages downloaded successfully\033[0m")
|
|
56
|
+
print("\033[92m ✓ Tests passed\033[0m")
|
|
57
|
+
print()
|
|
58
|
+
print("\033[96m Ready. Use:\033[0m")
|
|
59
|
+
print("\033[97m import nano_package as NanoModel\033[0m")
|
|
60
|
+
print("\033[97m N = NanoModel.Nano_Intelligence(Model_name=\"Jarvis\", Version=\"1.0\")\033[0m")
|
|
61
|
+
print("\033[97m N.chat(\"hello\")\033[0m")
|
|
62
|
+
print()
|
|
63
|
+
|
|
64
|
+
# Show on first install (when __version__ file doesn't exist)
|
|
65
|
+
import os as _os
|
|
66
|
+
_flag = _os.path.join(_os.path.dirname(__file__), ".installed")
|
|
67
|
+
if not _os.path.exists(_flag):
|
|
68
|
+
_install_sequence()
|
|
69
|
+
open(_flag, "w").close()
|
|
70
|
+
|
|
71
|
+
from .core import Nano_Intelligence, chat, think, solve
|
|
72
|
+
|
|
73
|
+
__version__ = "1.0"
|
|
74
|
+
__author__ = "Sujan Sadhu"
|
|
75
|
+
__all__ = ["Nano_Intelligence", "chat", "think", "solve"]
|
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
"""
|
|
2
|
+
nano-model core — Nano Intelligence Engine
|
|
3
|
+
User API:
|
|
4
|
+
import nano_package as NanoModel
|
|
5
|
+
|
|
6
|
+
# Create your personal assistant
|
|
7
|
+
N = NanoModel.Nano_Intelligence(Model_name="Jarvis", Version="1.0")
|
|
8
|
+
N.chat("hello")
|
|
9
|
+
N.think("what is machine learning")
|
|
10
|
+
N.solve("x^2 = 16")
|
|
11
|
+
"""
|
|
12
|
+
import os
|
|
13
|
+
import re
|
|
14
|
+
import urllib.request
|
|
15
|
+
import urllib.parse
|
|
16
|
+
import json
|
|
17
|
+
|
|
18
|
+
_pkg = os.path.dirname(os.path.abspath(__file__))
|
|
19
|
+
_KB_FILE = os.path.join(_pkg, "knowledge.txt")
|
|
20
|
+
|
|
21
|
+
VERSION = "1.0"
|
|
22
|
+
|
|
23
|
+
# ── Load bundled knowledge ────────────────────────────────────
|
|
24
|
+
def _load_kb() -> list[str]:
|
|
25
|
+
if os.path.exists(_KB_FILE):
|
|
26
|
+
text = open(_KB_FILE, encoding="utf-8").read()
|
|
27
|
+
return [s.strip() for s in re.split(r'(?<=[.!?\n])\s+', text) if len(s.strip()) > 15]
|
|
28
|
+
return []
|
|
29
|
+
|
|
30
|
+
_KB = _load_kb()
|
|
31
|
+
|
|
32
|
+
# ── Soul answers ──────────────────────────────────────────────
|
|
33
|
+
_SOUL = {
|
|
34
|
+
"who are you": "I am {name} — powered by Nano Intelligence v{ver}. Created by Sujan Sadhu.",
|
|
35
|
+
"what are you": "I am {name} — a Nano Intelligence model. Pattern-based, not random.",
|
|
36
|
+
"who made you": "Sujan Sadhu created Nano Intelligence. I am {name}, your personal assistant.",
|
|
37
|
+
"who is sujan": "Sujan Sadhu is the creator of Nano Intelligence. He is 15-16 years old and built this from scratch.",
|
|
38
|
+
"your name": "My name is {name}.",
|
|
39
|
+
"what is nano": "Nano Intelligence is a lightweight open-source intelligence system built by Sujan Sadhu. More powerful than AI.",
|
|
40
|
+
"version": "Nano Intelligence version {ver}. Your model: {name}.",
|
|
41
|
+
"more powerful": "Yes. Nano Intelligence uses patterns not randomness. Runs on 200MB RAM. No GPU needed. More powerful than AI.",
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
_COMPARE = {
|
|
45
|
+
("python", "c++"): "Python is easier and great for AI. C++ is faster for systems. For AI work, Python wins.",
|
|
46
|
+
("python", "java"): "Python dominates AI/ML. Java is better for enterprise. For Nano Intelligence projects, Python.",
|
|
47
|
+
("python", "javascript"): "Python for AI and backend. JavaScript for web frontend. Many projects use both.",
|
|
48
|
+
("pytorch", "tensorflow"): "PyTorch is more Pythonic — Nano Intelligence uses PyTorch. TensorFlow for large-scale production.",
|
|
49
|
+
("ai", "nano"): "AI is Artificial — random and error-prone. Nano Intelligence is pattern-based and intentional.",
|
|
50
|
+
("linux", "windows"): "Linux is preferred for AI development. Windows is more user-friendly.",
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
# ── Local search ──────────────────────────────────────────────
|
|
54
|
+
def _search(query: str, top: int = 3) -> str:
|
|
55
|
+
stopwords = {"what","is","the","a","an","of","in","to","and","or","how","why","who"}
|
|
56
|
+
q_words = set(re.sub(r'[^\w\s]', '', query.lower()).split()) - stopwords
|
|
57
|
+
if not q_words or not _KB:
|
|
58
|
+
return ""
|
|
59
|
+
scored = [(sum(1 for w in q_words if w in s.lower()), s) for s in _KB]
|
|
60
|
+
scored = [(sc, s) for sc, s in scored if sc > 0]
|
|
61
|
+
scored.sort(key=lambda x: -x[0])
|
|
62
|
+
return " ".join(s for _, s in scored[:top])
|
|
63
|
+
|
|
64
|
+
# ── Live Wikipedia fetch ──────────────────────────────────────
|
|
65
|
+
def _live(query: str) -> str:
|
|
66
|
+
try:
|
|
67
|
+
q = urllib.parse.quote(query.replace(" ", "_"))
|
|
68
|
+
url = f"https://en.wikipedia.org/api/rest_v1/page/summary/{q}"
|
|
69
|
+
req = urllib.request.Request(url, headers={"User-Agent": "NanoModel/1.0"})
|
|
70
|
+
with urllib.request.urlopen(req, timeout=6) as r:
|
|
71
|
+
data = json.loads(r.read().decode())
|
|
72
|
+
extract = data.get("extract", "")
|
|
73
|
+
if extract:
|
|
74
|
+
parts = re.split(r'(?<=[.!?])\s+', extract)
|
|
75
|
+
return " ".join(parts[:4])
|
|
76
|
+
except Exception:
|
|
77
|
+
pass
|
|
78
|
+
return ""
|
|
79
|
+
|
|
80
|
+
# ── Math ──────────────────────────────────────────────────────
|
|
81
|
+
def solve(expression: str) -> str:
|
|
82
|
+
try:
|
|
83
|
+
import sympy as sp
|
|
84
|
+
from sympy.parsing.sympy_parser import (
|
|
85
|
+
parse_expr, standard_transformations, implicit_multiplication_application
|
|
86
|
+
)
|
|
87
|
+
T = standard_transformations + (implicit_multiplication_application,)
|
|
88
|
+
if "=" in expression:
|
|
89
|
+
l, r = expression.split("=", 1)
|
|
90
|
+
lhs = parse_expr(l, transformations=T)
|
|
91
|
+
rhs = parse_expr(r, transformations=T)
|
|
92
|
+
syms = (lhs - rhs).free_symbols
|
|
93
|
+
if syms:
|
|
94
|
+
var = sorted(syms, key=str)[0]
|
|
95
|
+
return str(sp.solve(lhs - rhs, var))
|
|
96
|
+
expr = parse_expr(expression, transformations=T)
|
|
97
|
+
result = sp.simplify(expr)
|
|
98
|
+
return str(sp.N(result))
|
|
99
|
+
except Exception as e:
|
|
100
|
+
return f"Math error: {e}"
|
|
101
|
+
|
|
102
|
+
# ── Think ─────────────────────────────────────────────────────
|
|
103
|
+
def think(question: str, name: str = "Nano", ver: str = "1.0") -> str:
|
|
104
|
+
q = question.lower().strip()
|
|
105
|
+
|
|
106
|
+
# soul
|
|
107
|
+
for kw, ans in _SOUL.items():
|
|
108
|
+
if kw in q:
|
|
109
|
+
return ans.format(name=name, ver=ver)
|
|
110
|
+
|
|
111
|
+
# compare
|
|
112
|
+
for (a, b), ans in _COMPARE.items():
|
|
113
|
+
if (a in q and b in q) or (b in q and a in q):
|
|
114
|
+
return ans
|
|
115
|
+
|
|
116
|
+
# local KB
|
|
117
|
+
local = _search(question)
|
|
118
|
+
if local and len(local) > 50:
|
|
119
|
+
return local
|
|
120
|
+
|
|
121
|
+
# live
|
|
122
|
+
live = _live(question)
|
|
123
|
+
if live:
|
|
124
|
+
return live
|
|
125
|
+
|
|
126
|
+
return f"I am {name}. I don't have enough knowledge on that yet. Try asking something else."
|
|
127
|
+
|
|
128
|
+
# ── Chat ──────────────────────────────────────────────────────
|
|
129
|
+
def chat(message: str, name: str = "Nano", ver: str = "1.0") -> str:
|
|
130
|
+
return think(message, name=name, ver=ver)
|
|
131
|
+
|
|
132
|
+
# ── Nano_Intelligence class ───────────────────────────────────
|
|
133
|
+
class Nano_Intelligence:
|
|
134
|
+
"""
|
|
135
|
+
Nano Intelligence Personal Assistant
|
|
136
|
+
|
|
137
|
+
Usage:
|
|
138
|
+
import nano_package as NanoModel
|
|
139
|
+
|
|
140
|
+
N = NanoModel.Nano_Intelligence(Model_name="Jarvis", Version="1.0")
|
|
141
|
+
print(N.chat("hello"))
|
|
142
|
+
print(N.think("what is machine learning"))
|
|
143
|
+
print(N.solve("x^2 = 16"))
|
|
144
|
+
print(N)
|
|
145
|
+
"""
|
|
146
|
+
|
|
147
|
+
def __init__(self, Model_name: str = "Nano", Version: str = "1.0"):
|
|
148
|
+
self.Model_name = Model_name
|
|
149
|
+
self.Version = Version
|
|
150
|
+
self._history = []
|
|
151
|
+
print(f"\033[96m [{self.Model_name}] Nano Intelligence v{self.Version} ready.\033[0m")
|
|
152
|
+
|
|
153
|
+
def chat(self, message: str) -> str:
|
|
154
|
+
self._history.append({"role": "user", "content": message})
|
|
155
|
+
response = think(message, name=self.Model_name, ver=self.Version)
|
|
156
|
+
self._history.append({"role": self.Model_name, "content": response})
|
|
157
|
+
return response
|
|
158
|
+
|
|
159
|
+
def think(self, question: str) -> str:
|
|
160
|
+
return think(question, name=self.Model_name, ver=self.Version)
|
|
161
|
+
|
|
162
|
+
def solve(self, expression: str) -> str:
|
|
163
|
+
return solve(expression)
|
|
164
|
+
|
|
165
|
+
def reset(self):
|
|
166
|
+
self._history.clear()
|
|
167
|
+
|
|
168
|
+
def history(self) -> list:
|
|
169
|
+
return self._history
|
|
170
|
+
|
|
171
|
+
def __call__(self, message: str) -> str:
|
|
172
|
+
return self.chat(message)
|
|
173
|
+
|
|
174
|
+
def __repr__(self):
|
|
175
|
+
return (
|
|
176
|
+
f"\n\033[96m {self.Model_name}\033[0m\n"
|
|
177
|
+
f" Powered by Nano Intelligence v{self.Version}\n"
|
|
178
|
+
f" Created by Sujan Sadhu\n"
|
|
179
|
+
f" Turns in memory: {len(self._history)}\n"
|
|
180
|
+
)
|