sklearnforpython312 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2024
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE OR OTHERWISE,
20
+ ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21
+ DEALINGS IN THE SOFTWARE.
@@ -0,0 +1,79 @@
1
+ Metadata-Version: 2.4
2
+ Name: sklearnforpython312
3
+ Version: 0.1.0
4
+ Summary: Machine learning examples and algorithms using scikit-learn for Python 3.12
5
+ Author-email: Your Name <your.email@example.com>
6
+ License: MIT
7
+ Project-URL: Homepage, https://github.com/yourusername/sklearnforpython312
8
+ Project-URL: Documentation, https://github.com/yourusername/sklearnforpython312
9
+ Project-URL: Repository, https://github.com/yourusername/sklearnforpython312.git
10
+ Project-URL: Issues, https://github.com/yourusername/sklearnforpython312/issues
11
+ Keywords: machine-learning,scikit-learn,sklearn,algorithms,examples
12
+ Classifier: Development Status :: 3 - Alpha
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: Intended Audience :: Education
15
+ Classifier: License :: OSI Approved :: MIT License
16
+ Classifier: Programming Language :: Python :: 3
17
+ Classifier: Programming Language :: Python :: 3.8
18
+ Classifier: Programming Language :: Python :: 3.9
19
+ Classifier: Programming Language :: Python :: 3.10
20
+ Classifier: Programming Language :: Python :: 3.11
21
+ Classifier: Programming Language :: Python :: 3.12
22
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
23
+ Requires-Python: >=3.8
24
+ Description-Content-Type: text/markdown
25
+ License-File: LICENSE
26
+ Requires-Dist: scikit-learn>=1.0.0
27
+ Requires-Dist: numpy>=1.20.0
28
+ Requires-Dist: pandas>=1.3.0
29
+ Requires-Dist: matplotlib>=3.4.0
30
+ Dynamic: license-file
31
+
32
+ # sklearnforpython312
33
+
34
+ A comprehensive collection of machine learning algorithms and examples using scikit-learn for Python 3.12.
35
+
36
+ ## About
37
+
38
+ This package contains implementations and demonstrations of various machine learning techniques including:
39
+
40
+ - **Clustering Methods** (em.py, id.py)
41
+ - **Classification Algorithms** (knn.py, knn2.py, lr.py, lr1.py, nb.py, svm2.py, svm3.py)
42
+ - **Dimensionality Reduction** (pca.py)
43
+ - **Neural Networks** (slp.py, slp2.py, mlp.py, mlp2.py, mlpag.py)
44
+ - **Regression** (lwr.py)
45
+ - **Utilities** (cm.py, nl.py, sp.py, dt.py, dt1.py)
46
+
47
+ ## Installation
48
+
49
+ ```bash
50
+ pip install sklearnforpython312
51
+ ```
52
+
53
+ ## Requirements
54
+
55
+ - Python 3.8+
56
+ - scikit-learn >= 1.0.0
57
+ - numpy >= 1.20.0
58
+ - pandas >= 1.3.0
59
+ - matplotlib >= 3.4.0
60
+
61
+ ## Usage
62
+
63
+ Each module in this package demonstrates different ML techniques. Import them individually:
64
+
65
+ ```python
66
+ import sklearnforpython312
67
+ ```
68
+
69
+ ## License
70
+
71
+ MIT License
72
+
73
+ ## Contributing
74
+
75
+ Contributions are welcome! Please feel free to submit pull requests.
76
+
77
+ ## Author
78
+
79
+ Your Name
@@ -0,0 +1,48 @@
1
+ # sklearnforpython312
2
+
3
+ A comprehensive collection of machine learning algorithms and examples using scikit-learn for Python 3.12.
4
+
5
+ ## About
6
+
7
+ This package contains implementations and demonstrations of various machine learning techniques including:
8
+
9
+ - **Clustering Methods** (em.py, id.py)
10
+ - **Classification Algorithms** (knn.py, knn2.py, lr.py, lr1.py, nb.py, svm2.py, svm3.py)
11
+ - **Dimensionality Reduction** (pca.py)
12
+ - **Neural Networks** (slp.py, slp2.py, mlp.py, mlp2.py, mlpag.py)
13
+ - **Regression** (lwr.py)
14
+ - **Utilities** (cm.py, nl.py, sp.py, dt.py, dt1.py)
15
+
16
+ ## Installation
17
+
18
+ ```bash
19
+ pip install sklearnforpython312
20
+ ```
21
+
22
+ ## Requirements
23
+
24
+ - Python 3.8+
25
+ - scikit-learn >= 1.0.0
26
+ - numpy >= 1.20.0
27
+ - pandas >= 1.3.0
28
+ - matplotlib >= 3.4.0
29
+
30
+ ## Usage
31
+
32
+ Each module in this package demonstrates different ML techniques. Import them individually:
33
+
34
+ ```python
35
+ import sklearnforpython312
36
+ ```
37
+
38
+ ## License
39
+
40
+ MIT License
41
+
42
+ ## Contributing
43
+
44
+ Contributions are welcome! Please feel free to submit pull requests.
45
+
46
+ ## Author
47
+
48
+ Your Name
@@ -0,0 +1,19 @@
1
+ import matplotlib.pyplot as plt
2
+ from sklearn.metrics import
3
+ ConfusionMatrixDisplay
4
+ from sklearn.datasets import
5
+ make_classification
6
+ from sklearn.model_selection import
7
+ train_test_split
8
+ from sklearn.linear_model import
9
+ LogisticRegression
10
+ X, y =
11
+ make_classification(n_samples=200,
12
+ random_state=42)
13
+ X_train, X_test, y_train, y_test =
14
+ train_test_split(X, y, random_state=42)
15
+ clf = LogisticRegression().fit(X_train,
16
+ y_train)
17
+ ConfusionMatrixDisplay.from_estimator(c
18
+ lf, X_test, y_test, cmap="Blues")
19
+ plt.show()
@@ -0,0 +1,22 @@
1
+ import pandas as pd
2
+ from sklearn.datasets import load_iris
3
+ from sklearn.tree import
4
+ DecisionTreeClassifier
5
+ from sklearn.model_selection import
6
+ train_test_split
7
+ from sklearn.metrics import accuracy_score
8
+ iris = load_iris()
9
+ X, y = iris.data, iris.target
10
+ X_train, X_test, y_train, y_test =
11
+ train_test_split(X, y, test_size=0.3,
12
+ random_state=42)
13
+ clf = DecisionTreeClassifier()
14
+ clf.fit(X_train, y_train)
15
+ y_pred = clf.predict(X_test)
16
+ accuracy = accuracy_score(y_test, y_pred)
17
+ print("Accuracy of Decision Tree classifier:",
18
+ accuracy)
19
+ sample = [[5.1, 3.5, 1.4, 0.2]]
20
+ prediction = clf.predict(sample)[0]
21
+ print(f"Predicted species for sample is:
22
+ {iris.target_names[prediction]}")
@@ -0,0 +1,11 @@
1
+ import matplotlib.pyplot as plt
2
+ from sklearn.datasets import load_iris
3
+ from sklearn.tree import
4
+ DecisionTreeClassifier,plot_tree
5
+ iris=load_iris()
6
+ X,y=iris.data,iris.target
7
+ df=DecisionTreeClassifier().fit(X,y)
8
+ plt.figure(figsize=(10,8))
9
+ plot_tree(df,filled=True,feature_names=iris.fe
10
+ ature_names,class_names=iris.target_names)
11
+ plt.show()
@@ -0,0 +1,30 @@
1
+ import numpy as np
2
+ import pandas as pd
3
+ from sklearn.mixture import GaussianMixture
4
+ from sklearn.cluster import KMeans
5
+ from sklearn.preprocessing import
6
+ StandardScaler
7
+ import matplotlib.pyplot as plt
8
+ data = pd.read_csv("data.csv")
9
+ X = data.values
10
+ scaler = StandardScaler()
11
+ X_scaled = scaler.fit_transform(X)
12
+ k = 2
13
+ gmm = GaussianMixture(
14
+ n_components=k,
15
+ covariance_type='full',
16
+ random_state=42
17
+ )
18
+ gmm.fit(X_scaled)
19
+ em_labels = gmm.predict(X_scaled)
20
+ print("EM Algorithm cluster labels:")
21
+ print(em_labels)
22
+ kmeans = KMeans(
23
+ n_clusters=k,
24
+ random_state=42,
25
+ n_init=10
26
+ )
27
+ kmeans.fit(X_scaled)
28
+ kmeans_labels = kmeans.labels_
29
+ print("\nK-Means cluster labels:")
30
+ print(kmeans_labels)
@@ -0,0 +1,31 @@
1
+ import math
2
+
3
+ def entropy(data):
4
+ total = len(data)
5
+ yes = data.count('yes')
6
+ no = data.count('no')
7
+ if yes == 0 or no == 0:
8
+ return 0
9
+ p_yes = yes / total
10
+ p_no = no / total
11
+ return -p_yes * math.log2(p_yes) - p_no * math.log2(p_no)
12
+
13
+ def info_gain(data, attr_data):
14
+ total_entropy = entropy(data)
15
+ gain = total_entropy
16
+ for value in set(attr_data):
17
+ subset = [data[i] for i in range(len(data)) if attr_data[i] == value]
18
+ gain -= (len(subset) / len(data)) * entropy(subset)
19
+ return gain
20
+
21
+ play = ['no', 'no', 'yes', 'yes']
22
+ outlook = ['sunny', 'sunny', 'overcast', 'rain']
23
+ temp = ['hot', 'hot', 'hot', 'mild']
24
+ wind = ['weak', 'strong', 'weakj', 'weak']
25
+ humidity = ['high', 'high', 'high', 'high']
26
+
27
+ print("entropy:", entropy(play))
28
+ print("information gain(outlook):", info_gain(play, outlook))
29
+ print("information gain(temp):", info_gain(play, temp))
30
+ print("information gain(wind):", info_gain(play, wind))
31
+ print("information gain(humidity):", info_gain(play, humidity))
@@ -0,0 +1,18 @@
1
+ from sklearn.datasets import load_iris
2
+ from sklearn.neighbors import
3
+ KNeighborsClassifier
4
+ iris = load_iris()
5
+ X = iris.data
6
+ y = iris.target
7
+ knn =
8
+ KNeighborsClassifier(n_neighbors=3)
9
+ knn.fit(X, y)
10
+ pred = knn.predict(X)
11
+ print("Correct predictions:")
12
+ for i in range(len(y)):
13
+ if y[i] == pred[i]:
14
+ print(i, y[i], pred[i])
15
+ print("\nWrong predictions:")
16
+ for i in range(len(y)):
17
+ if y[i] != pred[i]:
18
+ print(i, y[i], pred[i])
@@ -0,0 +1,22 @@
1
+ from sklearn.datasets import load_iris
2
+ from sklearn.neighbors import
3
+ KNeighborsClassifier
4
+ from sklearn.model_selection import
5
+ train_test_split
6
+ iris = load_iris()
7
+ X = iris.data
8
+ y = iris.target
9
+ target_names = iris.target_names
10
+ X_train,X_test,y_train,y_test=train_test_s
11
+ plit(X,y,test_size=0.3,random_state=42)
12
+ knn =
13
+ KNeighborsClassifier(n_neighbors=3)
14
+ knn.fit(X_train,y_train)
15
+ y_pred=knn.predict(X_test)
16
+ print("correct prediction:")
17
+ for i in range(len(y_test)):
18
+ if y_test[i]==y_pred[i]:
19
+ print(f"sample{i}:actual={target_nam
20
+ es[y_test[i]]},"
21
+ f"predicted={target_names[y_pred[i]]
22
+ }")
@@ -0,0 +1,25 @@
1
+ import numpy as np
2
+ from sklearn.linear_model import
3
+ LinearRegression
4
+ import matplotlib.pyplot as plt
5
+ tem=np.array([20,22,24,26,28,30,32]).resh
6
+ ape(-1,1)
7
+ sal=np.array([100,120,150,170,200,230,26
8
+ 0])
9
+ model=LinearRegression()
10
+ model.fit(tem,sal)
11
+ print("slope(m):",model.coef_[0])
12
+ print("intercept(c):",model.intercept_)
13
+ new_tem=27
14
+ predicted_sales=model.predict([[new_tem]
15
+ ])
16
+ print(f"prdicted ice-cream sales
17
+ at:",predicted_sales)
18
+ plt.scatter(tem,sal,label="data points")
19
+ plt.plot(tem,model.predict(tem),label="bes
20
+ t fit lines")
21
+ plt.xlabel("temperature")
22
+ plt.ylabel("ice-cream sales")
23
+ plt.title("linear regression:temp vs sales")
24
+ plt.legend()
25
+ plt.show()
@@ -0,0 +1,15 @@
1
+ import numpy as np
2
+ from sklearn.linear_model import
3
+ LogisticRegression
4
+ import matplotlib.pyplot as plt
5
+ tem=np.array([20,22,24,26,28,30,32]).resh
6
+ ape(-1,1)
7
+ sal=np.array([100,120,150,170,200,230,26
8
+ 0])
9
+ y=(sal>200).astype(int)
10
+ model=LogisticRegression()
11
+ model.fit(tem,y)
12
+ new_tem=27
13
+ pred=model.predict([[new_tem]])[0]
14
+ print("temp:",new_tem)
15
+ print("high sales:",pred)
@@ -0,0 +1,17 @@
1
+ import numpy as np
2
+ import math
3
+ def lwr(x, y, x0, tau):
4
+ numerator = 0
5
+ denominator = 0
6
+ for i in range(len(x))
7
+ wi = math.exp(-((x[i] - x0) ** 2) / (2 * tau
8
+ ** 2))
9
+ numerator += wi * y[i]
10
+ denominator += wi
11
+ return numerator / denominator
12
+ x = np.array([1, 2, 8])
13
+ y = np.array([2, 4, 6])
14
+ x0 = 2.5
15
+ tau = 2.5
16
+ y_pred = lwr(x, y, x0, tau)
17
+ print("Predicted value at x =", x0, "is", y_pred)
@@ -0,0 +1,13 @@
1
+ import numpy as np
2
+ X=np.array([[0,0],[0,1],[1,0],[1,1]])
3
+ y=np.array([[0],[1],[1],[0]])
4
+ np.random.seed(1)
5
+ w1=np.random.rand(2,2)
6
+ w2=np.random.rand(2,1)
7
+ def sigmoid(x):
8
+ return 1/(1+np.exp(-x))
9
+ for i in range(10000):
10
+ h=sigmoid(np.dot(X,w1))
11
+ output=sigmoid(np.dot(h,w2))
12
+ print("predicted output:")
13
+ print(np.round(output))
@@ -0,0 +1,24 @@
1
+ import numpy as np
2
+ X=np.array([[0,0],[0,1],[1,0],[1,1]])
3
+ y=np.array([[0],[1],[1],[0]])
4
+ np.random.seed(1)
5
+ w1=np.random.rand(2,2)
6
+ w2=np.random.rand(2,1)
7
+ lr=0.1
8
+ def sigmoid(x):
9
+ return 1/(1+np.exp(-x))
10
+ def sigmoid_derivative(x):
11
+ return x*(1-x)
12
+ for _ in range(10000):
13
+ h=sigmoid(np.dot(X,w1))
14
+ output=sigmoid(np.dot(h,w2))
15
+ error=y-output
16
+ d_output=error*sigmoid_derivative(out
17
+ put)
18
+ error_hidden=d_output.dot(w2.T)
19
+ d_hidden=error_hidden*sigmoid_deriva
20
+ tive(h)
21
+ w2+=h.T.dot(d_output)*lr
22
+ w1+=X.T.dot(d_hidden)*lr
23
+ print("predicted output:")
24
+ print(np.round(output))
@@ -0,0 +1,30 @@
1
+ import numpy as np
2
+ def sigmoid(x):
3
+ return 1 / (1 + np.exp(-x))
4
+ def sigmoid_derivative(x):
5
+ return x * (1 - x)
6
+ X = np.array([[0,0],[0,1], [1,0], [1,1]])
7
+ y = np.array([[0],[0],[0],[1]])
8
+ np.random.seed(1)
9
+ w1 = np.random.rand(2, 2)
10
+ b1 = np.random.rand(1, 2)
11
+ w2 = np.random.rand(2, 1)
12
+ b2 = np.random.rand(1, 1)
13
+ lr = 0.5
14
+ epochs = 100000
15
+ for i in range(epochs):
16
+ h = sigmoid(np.dot(X, w1) + b1)
17
+ out = sigmoid(np.dot(h, w2) + b2)
18
+ error = y - out
19
+ delta2 = error * sigmoid_derivative(out)
20
+ delta1 = np.dot(delta2, w2.T) *
21
+ sigmoid_derivative(h)
22
+ w2 += np.dot(h.T, delta2) * lr
23
+ b2 += np.sum(delta2, axis=0,
24
+ keepdims=True) * lr
25
+ w1 += np.dot(X.T, delta1) * lr
26
+ b1 += np.sum(delta1, axis=0,
27
+ keepdims=True) * lr
28
+ print("Input :", X)
29
+ print("Predicted Output :", np.round(out))
30
+ print("Actual Output :", y)
@@ -0,0 +1,18 @@
1
+ from sklearn.datasets import load_iris
2
+ from sklearn.model_selection import
3
+ train_test_split
4
+ from sklearn.naive_bayes import
5
+ GaussianNB
6
+ from sklearn.metrics import
7
+ accuracy_score,classification_report
8
+ iris=load_iris()
9
+ x=iris.data
10
+ y=iris.target
11
+ x_train,x_test,y_train,y_test=train_test_spl
12
+ it(x,y,test_size=0.4,random_state=42)
13
+ nb=GaussianNB()
14
+ nb.fit(x_train,y_train)
15
+ y_pred=nb.predict(x_test)
16
+ acc=accuracy_score(y_test,y_pred)
17
+ print("accuracy:",acc*100)
18
+ print(classification_report(y_test,y_pred))
@@ -0,0 +1,28 @@
1
+ import numpy as np
2
+ import matplotlib.pyplot as p
3
+ from sklearn.svm import SVC
4
+ X=np.array([
5
+ [1,2],[2,3],[3,1],[3,3],[6,6],[7,8],[8,6],[9,
6
+ 7]
7
+ ])
8
+ y=np.array([0,0,0,0,1,1,1,1])
9
+ model=SVC(kernel='rbf',gamma='scale',C
10
+ =10)
11
+ model.fit(X,y)
12
+ p.scatter(X[:,0], X[:,1], c=y,
13
+ cmap='coolwarm', s=80)
14
+ p.xlabel("feature 1:")
15
+ p.ylabel("feature 2:")
16
+ p.title("non linear svm:")
17
+ ax=p.gca()
18
+ xlim=ax.get_xlim()
19
+ ylim=ax.get_ylim()
20
+ xx=np.linspace(xlim[0],xlim[1],200)
21
+ yy=np.linspace(xlim[0],xlim[1],200)
22
+ YY,XX=np.meshgrid(yy,xx)
23
+ xy=np.vstack([XX.ravel(),YY.ravel()]).T
24
+ Z=model.decision_function(xy).reshape(X
25
+ X.shape)
26
+ ax.contour(XX,YY,Z,levels=[0],linewidths
27
+ =2)
28
+ p.show()
@@ -0,0 +1,22 @@
1
+ import numpy as np
2
+ import pandas as pd
3
+ import matplotlib.pyplot as plt
4
+ data = pd.read_csv("data.csv")
5
+ X = data.values
6
+ X_meaned = X - np.mean(X, axis=0)
7
+ cov_matrix = np.cov(X_meaned.T)
8
+ eigen_values, eigen_vectors =
9
+ np.linalg.eig(cov_matrix)
10
+ idx = np.argsort(eigen_values)[::-1]
11
+ eigen_vectors = eigen_vectors[:, idx]
12
+ eigen_values = eigen_values[idx]
13
+ k = 1
14
+ principal_components = eigen_vectors[:, :k]
15
+ X_pca = X_meaned @ principal_components
16
+ print("Reduced Data after PCA:")
17
+ print(X_pca)
18
+ plt.scatter(X_pca, np.zeros_like(X_pca))
19
+ plt.xlabel("Principal Component 1")
20
+ plt.title("PCA Reduced Data")
21
+ plt.tight_layout()
22
+ plt.show()
@@ -0,0 +1,44 @@
1
+ [build-system]
2
+ requires = ["setuptools>=65.0", "wheel"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "sklearnforpython312"
7
+ version = "0.1.0"
8
+ description = "Machine learning examples and algorithms using scikit-learn for Python 3.12"
9
+ readme = "README.md"
10
+ requires-python = ">=3.8"
11
+ license = {text = "MIT"}
12
+ authors = [
13
+ {name = "Your Name", email = "your.email@example.com"}
14
+ ]
15
+ keywords = ["machine-learning", "scikit-learn", "sklearn", "algorithms", "examples"]
16
+ classifiers = [
17
+ "Development Status :: 3 - Alpha",
18
+ "Intended Audience :: Developers",
19
+ "Intended Audience :: Education",
20
+ "License :: OSI Approved :: MIT License",
21
+ "Programming Language :: Python :: 3",
22
+ "Programming Language :: Python :: 3.8",
23
+ "Programming Language :: Python :: 3.9",
24
+ "Programming Language :: Python :: 3.10",
25
+ "Programming Language :: Python :: 3.11",
26
+ "Programming Language :: Python :: 3.12",
27
+ "Topic :: Scientific/Engineering :: Artificial Intelligence",
28
+ ]
29
+
30
+ dependencies = [
31
+ "scikit-learn>=1.0.0",
32
+ "numpy>=1.20.0",
33
+ "pandas>=1.3.0",
34
+ "matplotlib>=3.4.0",
35
+ ]
36
+
37
+ [project.urls]
38
+ Homepage = "https://github.com/yourusername/sklearnforpython312"
39
+ Documentation = "https://github.com/yourusername/sklearnforpython312"
40
+ Repository = "https://github.com/yourusername/sklearnforpython312.git"
41
+ Issues = "https://github.com/yourusername/sklearnforpython312/issues"
42
+
43
+ [tool.setuptools]
44
+ py-modules = ["cm", "dt", "dt1", "em", "id", "knn", "knn2", "lr", "lr1", "lwr", "mlp", "mlp2", "mlpag", "nb", "nl", "pca", "slp", "slp2", "sp", "svm2", "svm3"]
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,79 @@
1
+ Metadata-Version: 2.4
2
+ Name: sklearnforpython312
3
+ Version: 0.1.0
4
+ Summary: Machine learning examples and algorithms using scikit-learn for Python 3.12
5
+ Author-email: Your Name <your.email@example.com>
6
+ License: MIT
7
+ Project-URL: Homepage, https://github.com/yourusername/sklearnforpython312
8
+ Project-URL: Documentation, https://github.com/yourusername/sklearnforpython312
9
+ Project-URL: Repository, https://github.com/yourusername/sklearnforpython312.git
10
+ Project-URL: Issues, https://github.com/yourusername/sklearnforpython312/issues
11
+ Keywords: machine-learning,scikit-learn,sklearn,algorithms,examples
12
+ Classifier: Development Status :: 3 - Alpha
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: Intended Audience :: Education
15
+ Classifier: License :: OSI Approved :: MIT License
16
+ Classifier: Programming Language :: Python :: 3
17
+ Classifier: Programming Language :: Python :: 3.8
18
+ Classifier: Programming Language :: Python :: 3.9
19
+ Classifier: Programming Language :: Python :: 3.10
20
+ Classifier: Programming Language :: Python :: 3.11
21
+ Classifier: Programming Language :: Python :: 3.12
22
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
23
+ Requires-Python: >=3.8
24
+ Description-Content-Type: text/markdown
25
+ License-File: LICENSE
26
+ Requires-Dist: scikit-learn>=1.0.0
27
+ Requires-Dist: numpy>=1.20.0
28
+ Requires-Dist: pandas>=1.3.0
29
+ Requires-Dist: matplotlib>=3.4.0
30
+ Dynamic: license-file
31
+
32
+ # sklearnforpython312
33
+
34
+ A comprehensive collection of machine learning algorithms and examples using scikit-learn for Python 3.12.
35
+
36
+ ## About
37
+
38
+ This package contains implementations and demonstrations of various machine learning techniques including:
39
+
40
+ - **Clustering Methods** (em.py, id.py)
41
+ - **Classification Algorithms** (knn.py, knn2.py, lr.py, lr1.py, nb.py, svm2.py, svm3.py)
42
+ - **Dimensionality Reduction** (pca.py)
43
+ - **Neural Networks** (slp.py, slp2.py, mlp.py, mlp2.py, mlpag.py)
44
+ - **Regression** (lwr.py)
45
+ - **Utilities** (cm.py, nl.py, sp.py, dt.py, dt1.py)
46
+
47
+ ## Installation
48
+
49
+ ```bash
50
+ pip install sklearnforpython312
51
+ ```
52
+
53
+ ## Requirements
54
+
55
+ - Python 3.8+
56
+ - scikit-learn >= 1.0.0
57
+ - numpy >= 1.20.0
58
+ - pandas >= 1.3.0
59
+ - matplotlib >= 3.4.0
60
+
61
+ ## Usage
62
+
63
+ Each module in this package demonstrates different ML techniques. Import them individually:
64
+
65
+ ```python
66
+ import sklearnforpython312
67
+ ```
68
+
69
+ ## License
70
+
71
+ MIT License
72
+
73
+ ## Contributing
74
+
75
+ Contributions are welcome! Please feel free to submit pull requests.
76
+
77
+ ## Author
78
+
79
+ Your Name
@@ -0,0 +1,29 @@
1
+ LICENSE
2
+ README.md
3
+ cm.py
4
+ dt.py
5
+ dt1.py
6
+ em.py
7
+ id.py
8
+ knn.py
9
+ knn2.py
10
+ lr.py
11
+ lr1.py
12
+ lwr.py
13
+ mlp.py
14
+ mlp2.py
15
+ mlpag.py
16
+ nb.py
17
+ nl.py
18
+ pca.py
19
+ pyproject.toml
20
+ slp.py
21
+ slp2.py
22
+ sp.py
23
+ svm2.py
24
+ svm3.py
25
+ sklearnforpython312.egg-info/PKG-INFO
26
+ sklearnforpython312.egg-info/SOURCES.txt
27
+ sklearnforpython312.egg-info/dependency_links.txt
28
+ sklearnforpython312.egg-info/requires.txt
29
+ sklearnforpython312.egg-info/top_level.txt
@@ -0,0 +1,4 @@
1
+ scikit-learn>=1.0.0
2
+ numpy>=1.20.0
3
+ pandas>=1.3.0
4
+ matplotlib>=3.4.0
@@ -0,0 +1,21 @@
1
+ cm
2
+ dt
3
+ dt1
4
+ em
5
+ id
6
+ knn
7
+ knn2
8
+ lr
9
+ lr1
10
+ lwr
11
+ mlp
12
+ mlp2
13
+ mlpag
14
+ nb
15
+ nl
16
+ pca
17
+ slp
18
+ slp2
19
+ sp
20
+ svm2
21
+ svm3
@@ -0,0 +1,14 @@
1
+ X=[(0,0),(0,1),(1,0),(1,1)]
2
+ y=[0,0,0,1]
3
+ w=[0,0]
4
+ b=0
5
+ lr=1
6
+ for i in range(len(X)):
7
+ net=X[i][0]*w[0]+X[i][1]*w[1]+b
8
+ out=1 if net >=0 else 0
9
+ error=y[i]-out
10
+ w[0]+=lr*error*X[i][0]
11
+ w[1]+=lr*error*X[i][1]
12
+ b+=lr*error
13
+ print("weights:",w)
14
+ print("bias:",b)
@@ -0,0 +1,19 @@
1
+ X=[(0,0),(0,1),(1,0),(1,1)]
2
+ y=[0,0,0,1]
3
+ w1,w2=0,0
4
+ b=0
5
+ lr=0.1
6
+ for epoch in range(10):
7
+ print("epoch:",epoch+1)
8
+ for i in range (len(X)):
9
+ x1,x2=X[i]
10
+ target=y[i]
11
+ net=x1*w1+x2*w2+b
12
+ out=1 if net >=0 else 0
13
+ error=target-out
14
+ w1+=lr*error*x1
15
+ w2+=lr*error*x2
16
+ b+=lr*error
17
+ print(X[i],"output:",out,"target:",target)
18
+ print("final weight:",w1,w2)
19
+ print("final bias:",b)
@@ -0,0 +1,16 @@
1
+ from sklearn.feature_extraction.text import
2
+ CountVectorizer
3
+ from sklearn.naive_bayes import
4
+ MultinomialNB
5
+ emails=["hello,how are you today?","click
6
+ here for a prize","meeing tomorrow at 10
7
+ am","urgent:your account is been
8
+ suspend,click the link to fix"]
9
+ labels=[0,1,0,1]
10
+ new_email="get a huge discount and amzing
11
+ offer toady!"
12
+ x=CountVectorizer().fit_transform(emails)
13
+ model=MultinomialNB().fit(x,labels)
14
+ pred=model.predict(CountVectorizer().fit(ema
15
+ ils).transform([new_email]))
16
+ print(pred)
@@ -0,0 +1,20 @@
1
+ import numpy as np
2
+ import matplotlib.pyplot as p
3
+ from sklearn.svm import SVC
4
+ X =
5
+ np.array([[2,400],[3,450],[5,300],[6,520],[
6
+ 7,580],[8,220]])
7
+ y = np.array([0,0,0,0,1,1])
8
+ model = SVC(kernel='linear')
9
+ model.fit(X, y)
10
+ p.scatter(X[:,0], X[:,1], c=y,
11
+ cmap='coolwarm', s=80)
12
+ p.xlabel("annual income (in lakhs)")
13
+ p.ylabel("credit score")
14
+ p.title("svm for loan approval")
15
+ w = model.coef_[0]
16
+ b = model.intercept_[0]
17
+ x_vals = np.linspace(1, 10, 100)
18
+ y_vals = -(w[0]*x_vals + b) / w[1]
19
+ p.plot(x_vals, y_vals, 'k--', linewidth=2)
20
+ p.show()
@@ -0,0 +1,25 @@
1
+ import numpy as np
2
+ import matplotlib.pyplot as p
3
+ from sklearn.svm import SVC
4
+ from mpl_toolkits.mplot3d import axes3d
5
+ x =
6
+ np.array([[30,120,180],[35,125,190],[40,1
7
+ 30,200],[45,135,210],[50,150,240],[55,155
8
+ ,240]])
9
+ y = np.array([0,0,0,0,1,1])
10
+ model = SVC(kernel='linear')
11
+ model.fit(x, y)
12
+ w = model.coef_[0]
13
+ b = model.intercept_[0]
14
+ fig=p.figure()
15
+ ax=fig.add_subplot(111,projection="3d")
16
+ ax.scatter(x[y==0,0],x[y==0,1],x[y==0,2],l
17
+ abel='no disease')
18
+ ax.scatter(x[y==1,0],x[y==1,1],x[y==1,2],l
19
+ abel='disease')
20
+ ax.set_xlabel('age')
21
+ ax.set_ylabel('bp')
22
+ ax.set_zlabel('ch')
23
+ ax.set_title("3 feature svm")
24
+ p.show()
25
+ p.legend()