oikan 0.0.1.2__py3-none-any.whl → 0.0.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- oikan/symbolic.py +127 -16
- oikan-0.0.1.4.dist-info/METADATA +81 -0
- {oikan-0.0.1.2.dist-info → oikan-0.0.1.4.dist-info}/RECORD +5 -5
- oikan-0.0.1.2.dist-info/METADATA +0 -65
- {oikan-0.0.1.2.dist-info → oikan-0.0.1.4.dist-info}/WHEEL +0 -0
- {oikan-0.0.1.2.dist-info → oikan-0.0.1.4.dist-info}/top_level.txt +0 -0
oikan/symbolic.py
CHANGED
@@ -1,21 +1,132 @@
|
|
1
1
|
import torch
|
2
2
|
import numpy as np
|
3
|
+
import networkx as nx
|
4
|
+
import matplotlib.pyplot as plt
|
3
5
|
|
4
|
-
|
5
|
-
|
6
|
-
|
6
|
+
ADVANCED_LIB = {
|
7
|
+
'x': lambda x: x,
|
8
|
+
'x^2': lambda x: x**2,
|
9
|
+
'x^3': lambda x: x**3,
|
10
|
+
'x^4': lambda x: x**4,
|
11
|
+
'x^5': lambda x: x**5,
|
12
|
+
'exp': lambda x: np.exp(x),
|
13
|
+
'log': lambda x: np.log(np.abs(x) + 1e-8),
|
14
|
+
'sqrt': lambda x: np.sqrt(np.abs(x)),
|
15
|
+
'tanh': lambda x: np.tanh(x),
|
16
|
+
'sin': lambda x: np.sin(x),
|
17
|
+
'abs': lambda x: np.abs(x)
|
18
|
+
}
|
19
|
+
|
20
|
+
# STEP-1: Helper functions
|
21
|
+
def get_model_predictions(model, X, mode):
|
22
|
+
"""Compute model predictions and return target values (and raw preds for classification)."""
|
23
|
+
X_tensor = torch.FloatTensor(X)
|
7
24
|
with torch.no_grad():
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
25
|
+
preds = model(X_tensor)
|
26
|
+
if mode == 'regression':
|
27
|
+
return preds.detach().cpu().numpy().flatten(), None
|
28
|
+
elif mode == 'classification':
|
29
|
+
out = preds.detach().cpu().numpy()
|
30
|
+
target = (out[:, 0] - out[:, 1]).flatten() if (out.ndim > 1 and out.shape[1] > 1) else out.flatten()
|
31
|
+
return target, out
|
32
|
+
else:
|
33
|
+
raise ValueError("Unknown mode")
|
34
|
+
|
35
|
+
def build_design_matrix(X, return_names=False):
|
36
|
+
"""Build the design matrix using the advanced nonlinear bases."""
|
37
|
+
X_np = np.array(X)
|
38
|
+
n_samples, d = X_np.shape
|
39
|
+
F_parts = [np.ones((n_samples, 1))]
|
40
|
+
names = ['1'] if return_names else None
|
41
|
+
for j in range(d):
|
42
|
+
xj = X_np[:, j:j+1]
|
43
|
+
for key, func in ADVANCED_LIB.items():
|
44
|
+
F_parts.append(func(xj))
|
45
|
+
if return_names:
|
46
|
+
names.append(f"{key}(x{j+1})")
|
47
|
+
return (np.hstack(F_parts), names) if return_names else np.hstack(F_parts)
|
48
|
+
|
49
|
+
# STEP-2: Main functions using helpers
|
50
|
+
def extract_symbolic_formula(model, X, mode='regression'):
|
51
|
+
"""
|
52
|
+
Approximate a symbolic formula from the model using advanced nonlinear bases.
|
53
|
+
"""
|
54
|
+
n_samples = np.array(X).shape[0]
|
55
|
+
y_target, _ = get_model_predictions(model, X, mode)
|
56
|
+
F, func_names = build_design_matrix(X, return_names=True)
|
57
|
+
beta, _, _, _ = np.linalg.lstsq(F, y_target, rcond=None)
|
58
|
+
terms = [f"({c:.2f}*{name})" for c, name in zip(beta, func_names) if abs(c) > 1e-4]
|
59
|
+
return " + ".join(terms)
|
60
|
+
|
61
|
+
def test_symbolic_formula(model, X, mode='regression'):
|
62
|
+
"""
|
63
|
+
Evaluate the extracted symbolic formula against model outputs.
|
64
|
+
"""
|
65
|
+
n_samples = np.array(X).shape[0]
|
66
|
+
y_target, out = get_model_predictions(model, X, mode)
|
67
|
+
F = build_design_matrix(X, return_names=False)
|
68
|
+
beta, _, _, _ = np.linalg.lstsq(F, y_target, rcond=None)
|
69
|
+
symbolic_vals = F.dot(beta)
|
70
|
+
if mode == 'regression':
|
71
|
+
mse = np.mean((symbolic_vals - y_target) ** 2)
|
72
|
+
mae = np.mean(np.abs(symbolic_vals - y_target))
|
73
|
+
rmse = np.sqrt(mse)
|
74
|
+
print(f"(Advanced) MSE: {mse:.4f}, MAE: {mae:.4f}, RMSE: {rmse:.4f}")
|
75
|
+
return mse, mae, rmse
|
76
|
+
elif mode == 'classification':
|
77
|
+
sym_preds = np.where(symbolic_vals >= 0, 0, 1)
|
78
|
+
model_classes = np.argmax(out, axis=1) if (out.ndim > 1) else (out >= 0.5).astype(int)
|
79
|
+
if model_classes.shape[0] != sym_preds.shape[0]:
|
80
|
+
raise ValueError("Shape mismatch between symbolic and model predictions.")
|
81
|
+
accuracy = np.mean(sym_preds == model_classes)
|
82
|
+
print(f"(Advanced) Accuracy: {accuracy:.4f}")
|
83
|
+
return accuracy
|
84
|
+
|
85
|
+
def plot_symbolic_formula(model, X, mode='regression'):
|
86
|
+
"""
|
87
|
+
Plot a graph representation of the extracted symbolic formula.
|
88
|
+
"""
|
89
|
+
formula = extract_symbolic_formula(model, X, mode)
|
90
|
+
G = nx.DiGraph()
|
91
|
+
G.add_node("Output")
|
92
|
+
terms = formula.split(" + ")
|
93
|
+
for term in terms:
|
94
|
+
expr = term.strip("()")
|
95
|
+
coeff_str, basis = expr.split("*", 1) if "*" in expr else (expr, "unknown")
|
96
|
+
node_label = f"{basis}\n({float(coeff_str):.2f})"
|
97
|
+
G.add_node(node_label)
|
98
|
+
G.add_edge(node_label, "Output", weight=float(coeff_str))
|
99
|
+
left_nodes = [n for n in G.nodes() if n != "Output"]
|
100
|
+
pos = {}
|
101
|
+
n_left = len(left_nodes)
|
102
|
+
for i, node in enumerate(sorted(left_nodes)):
|
103
|
+
pos[node] = (0, 1 - (i / max(n_left - 1, 1)))
|
104
|
+
pos["Output"] = (1, 0.5)
|
105
|
+
plt.figure(figsize=(12, 8))
|
106
|
+
nx.draw(G, pos, with_labels=True, node_color="skyblue", node_size=2500, font_size=10,
|
107
|
+
arrows=True, arrowstyle='->', arrowsize=20)
|
108
|
+
edge_labels = {(u, v): f"{d['weight']:.2f}" for u, v, d in G.edges(data=True)}
|
109
|
+
nx.draw_networkx_edge_labels(G, pos, edge_labels=edge_labels, font_color='red', font_size=10)
|
110
|
+
plt.title("OIKAN Symbolic Formula Graph")
|
111
|
+
plt.axis("off")
|
112
|
+
plt.show()
|
18
113
|
|
19
|
-
def
|
20
|
-
"""
|
21
|
-
|
114
|
+
def extract_latex_formula(model, X, mode='regression'):
|
115
|
+
"""
|
116
|
+
Return the extracted symbolic formula as LaTeX code.
|
117
|
+
"""
|
118
|
+
formula = extract_symbolic_formula(model, X, mode)
|
119
|
+
terms = formula.split(" + ")
|
120
|
+
latex_terms = []
|
121
|
+
for term in terms:
|
122
|
+
expr = term.strip("()")
|
123
|
+
coeff_str, basis = expr.split("*", 1) if "*" in expr else (expr, "")
|
124
|
+
coeff = float(coeff_str)
|
125
|
+
missing = basis.count("(") - basis.count(")")
|
126
|
+
if missing > 0:
|
127
|
+
basis = basis + ")" * missing
|
128
|
+
coeff_latex = f"{abs(coeff):.2f}".rstrip("0").rstrip(".")
|
129
|
+
term_latex = coeff_latex if basis.strip() == "1" else f"{coeff_latex} \\cdot {basis.strip()}"
|
130
|
+
latex_terms.append(f"- {term_latex}" if coeff < 0 else f"+ {term_latex}")
|
131
|
+
latex_formula = " ".join(latex_terms).lstrip("+ ").strip()
|
132
|
+
return f"$$ {latex_formula} $$"
|
@@ -0,0 +1,81 @@
|
|
1
|
+
Metadata-Version: 2.2
|
2
|
+
Name: oikan
|
3
|
+
Version: 0.0.1.4
|
4
|
+
Summary: OIKAN: Optimized Interpretable Kolmogorov-Arnold Networks
|
5
|
+
Author: Arman Zhalgasbayev
|
6
|
+
License: MIT
|
7
|
+
Classifier: Programming Language :: Python :: 3
|
8
|
+
Classifier: License :: OSI Approved :: MIT License
|
9
|
+
Classifier: Operating System :: OS Independent
|
10
|
+
Requires-Python: >=3.7
|
11
|
+
Description-Content-Type: text/markdown
|
12
|
+
Requires-Dist: torch
|
13
|
+
Requires-Dist: numpy
|
14
|
+
Requires-Dist: sympy
|
15
|
+
Requires-Dist: scipy
|
16
|
+
Requires-Dist: matplotlib
|
17
|
+
|
18
|
+
# OIKAN
|
19
|
+
|
20
|
+
Optimized Interpretable Kolmogorov-Arnold Networks (OIKAN)
|
21
|
+
A deep learning framework for interpretable neural networks using advanced basis functions.
|
22
|
+
|
23
|
+
[](https://badge.fury.io/py/oikan)
|
24
|
+
[](https://pypistats.org/packages/oikan)
|
25
|
+
|
26
|
+
## Key Features
|
27
|
+
- EfficientKAN layer implementation
|
28
|
+
- Built-in visualization tools
|
29
|
+
- Support for both regression and classification tasks
|
30
|
+
- Symbolic formula extraction
|
31
|
+
- Easy-to-use training interface
|
32
|
+
- LaTeX-formatted formula extraction
|
33
|
+
|
34
|
+
## Installation
|
35
|
+
|
36
|
+
```bash
|
37
|
+
git clone https://github.com/silvermete0r/OIKAN.git
|
38
|
+
cd OIKAN
|
39
|
+
pip install -e . # Install in development mode
|
40
|
+
```
|
41
|
+
|
42
|
+
## Quick Start
|
43
|
+
|
44
|
+
### Regression Example
|
45
|
+
```python
|
46
|
+
from oikan.model import OIKAN
|
47
|
+
from oikan.trainer import train
|
48
|
+
from oikan.symbolic import extract_symbolic_formula
|
49
|
+
|
50
|
+
model = OIKAN(input_dim=2, output_dim=1)
|
51
|
+
train(model, (X_train, y_train))
|
52
|
+
formula = extract_symbolic_formula(model, X_test, mode='regression')
|
53
|
+
print("Extracted formula:", formula)
|
54
|
+
```
|
55
|
+
|
56
|
+
### Classification Example
|
57
|
+
```python
|
58
|
+
from oikan.model import OIKAN
|
59
|
+
from oikan.trainer import train_classification
|
60
|
+
from oikan.visualize import visualize_classification
|
61
|
+
from oikan.symbolic import extract_symbolic_formula, plot_symbolic_formula, extract_latex_formula
|
62
|
+
|
63
|
+
model = OIKAN(input_dim=2, output_dim=2)
|
64
|
+
train_classification(model, (X_train, y_train))
|
65
|
+
visualize_classification(model, X_test, y_test)
|
66
|
+
formula = extract_symbolic_formula(model, X_test, mode='classification')
|
67
|
+
print("Extracted formula:", formula)
|
68
|
+
plot_symbolic_formula(model, X_test, mode='classification')
|
69
|
+
latex_formula = extract_latex_formula(model, X_test, mode='classification')
|
70
|
+
print("LaTeX:", latex_formula)
|
71
|
+
```
|
72
|
+
|
73
|
+
## Usage
|
74
|
+
- Explore the `oikan/` folder for model architectures, training routines, and symbolic extraction.
|
75
|
+
- Check the `examples/` directory for complete usage examples for both regression and classification.
|
76
|
+
|
77
|
+
## Contributing
|
78
|
+
Contributions are welcome! Submit a Pull Request with your improvements.
|
79
|
+
|
80
|
+
## License
|
81
|
+
This project is licensed under the MIT License. See the [LICENSE](LICENSE) file for details.
|
@@ -1,11 +1,11 @@
|
|
1
1
|
oikan/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
2
2
|
oikan/model.py,sha256=9_U3jh1YwASbLOgHpFm4F80J3QGEhzIgQHNkqbZCPJs,2920
|
3
3
|
oikan/regularization.py,sha256=D0Xc2lr5X5ORdA5ltvWDbNDuN8z0hkyoGzFo7pum2XE,1033
|
4
|
-
oikan/symbolic.py,sha256=
|
4
|
+
oikan/symbolic.py,sha256=K1aI5JEPgKFu8dyjXxWDA-UZm8Gvfp0lU1M7c2NAPLY,5517
|
5
5
|
oikan/trainer.py,sha256=itFCHSR_T6KHqa0D5RLRCmqFHa4lUIamsFGWKHmUZuI,1258
|
6
6
|
oikan/utils.py,sha256=XwY6pgAgfYlUI9SOjdop91wh0_t6LfPLCiHretlw2Wg,1754
|
7
7
|
oikan/visualize.py,sha256=8Dlk-tsqGZb63NyZBpZsLSlcsC51m2nXblQaS2Jf1y0,1142
|
8
|
-
oikan-0.0.1.
|
9
|
-
oikan-0.0.1.
|
10
|
-
oikan-0.0.1.
|
11
|
-
oikan-0.0.1.
|
8
|
+
oikan-0.0.1.4.dist-info/METADATA,sha256=G-HWX8CkuSzPWDLDIdn_NYVehCfVrEfuLbnaSuQbsxw,2726
|
9
|
+
oikan-0.0.1.4.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
|
10
|
+
oikan-0.0.1.4.dist-info/top_level.txt,sha256=XwnwKwTJddZwIvtrUsAz-l-58BJRj6HjAGWrfYi_3QY,6
|
11
|
+
oikan-0.0.1.4.dist-info/RECORD,,
|
oikan-0.0.1.2.dist-info/METADATA
DELETED
@@ -1,65 +0,0 @@
|
|
1
|
-
Metadata-Version: 2.2
|
2
|
-
Name: oikan
|
3
|
-
Version: 0.0.1.2
|
4
|
-
Summary: OIKAN: Optimized Interpretable Kolmogorov-Arnold Networks
|
5
|
-
Author: Arman Zhalgasbayev
|
6
|
-
License: MIT
|
7
|
-
Classifier: Programming Language :: Python :: 3
|
8
|
-
Classifier: License :: OSI Approved :: MIT License
|
9
|
-
Classifier: Operating System :: OS Independent
|
10
|
-
Requires-Python: >=3.7
|
11
|
-
Description-Content-Type: text/markdown
|
12
|
-
Requires-Dist: torch
|
13
|
-
Requires-Dist: numpy
|
14
|
-
Requires-Dist: sympy
|
15
|
-
Requires-Dist: scipy
|
16
|
-
Requires-Dist: matplotlib
|
17
|
-
|
18
|
-
# OIKAN Library
|
19
|
-
|
20
|
-
[](https://badge.fury.io/py/oikan)
|
21
|
-
[](https://pypistats.org/packages/oikan)
|
22
|
-
|
23
|
-
OIKAN (Optimized Implementation of Kolmogorov-Arnold Networks) is a PyTorch-based library for creating interpretable neural networks. It implements the KAN architecture to provide both accurate predictions and interpretable results.
|
24
|
-
|
25
|
-
## Key Features
|
26
|
-
|
27
|
-
- EfficientKAN layer implementation
|
28
|
-
- Built-in visualization tools
|
29
|
-
- Support for both regression and classification tasks
|
30
|
-
- Symbolic formula extraction
|
31
|
-
- Easy-to-use training interface
|
32
|
-
|
33
|
-
## Installation
|
34
|
-
|
35
|
-
```bash
|
36
|
-
git clone https://github.com/silvermete0r/OIKAN.git
|
37
|
-
cd OIKAN
|
38
|
-
pip install -e . # Install in development mode
|
39
|
-
```
|
40
|
-
|
41
|
-
## Quick Start
|
42
|
-
|
43
|
-
### Regression Example
|
44
|
-
```python
|
45
|
-
from oikan.model import OIKAN
|
46
|
-
from oikan.trainer import train
|
47
|
-
|
48
|
-
# Create and train model
|
49
|
-
model = OIKAN(input_dim=2, output_dim=1)
|
50
|
-
train(model, train_loader)
|
51
|
-
|
52
|
-
# Extract interpretable formula
|
53
|
-
formula = extract_symbolic_formula_regression(model, X)
|
54
|
-
```
|
55
|
-
|
56
|
-
### Classification Example
|
57
|
-
```python
|
58
|
-
model = OIKAN(input_dim=2, output_dim=2)
|
59
|
-
train_classification(model, train_loader)
|
60
|
-
visualize_classification(model, X, y)
|
61
|
-
```
|
62
|
-
|
63
|
-
## Contributing
|
64
|
-
|
65
|
-
Contributions are welcome! Please feel free to submit a Pull Request.
|
File without changes
|
File without changes
|