noshot 11.0.0__py3-none-any.whl → 13.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- noshot/data/DLE FSD BDA/DLE/DLE 1 (Json)/1. DNN (Image Classification).ipynb +389 -0
- noshot/data/DLE FSD BDA/DLE/DLE 1 (Json)/2. DNN vs CNN.ipynb +516 -0
- noshot/data/DLE FSD BDA/DLE/DLE 1 (Json)/3. CNN (Object Detecrion).ipynb +259 -0
- noshot/data/DLE FSD BDA/DLE/DLE 1 (Json)/4. FCN (Image Segmentaion).ipynb +274 -0
- noshot/data/DLE FSD BDA/DLE/DLE 2 (tim stan s)/1.1 DNN (Pytorch).ipynb +164 -0
- noshot/data/DLE FSD BDA/DLE/DLE 2 (tim stan s)/1.2 DNN (Tensorflow).ipynb +94 -0
- noshot/data/DLE FSD BDA/DLE/DLE 2 (tim stan s)/1.3 DNN (Image Classification).ipynb +134 -0
- noshot/data/DLE FSD BDA/DLE/DLE 2 (tim stan s)/2.1 DNN vs CNN.ipynb +127 -0
- noshot/data/DLE FSD BDA/DLE/DLE 2 (tim stan s)/2.2 DNN vs CNN.ipynb +123 -0
- noshot/data/DLE FSD BDA/DLE/DLE 2 (tim stan s)/4. FCNN (Image Segmentation).ipynb +108 -0
- noshot/data/DLE FSD BDA/DLE/DLE 2 (tim stan s)/Lab Excercise (Training DNN).ipynb +646 -0
- noshot/data/DLE FSD BDA/DLE/DLE 2 (tim stan s)/Load-Images.ipynb +553 -0
- noshot/data/DLE FSD BDA/DLE/DLE 3 (sonic boy)/Ex1.ipynb +216 -0
- noshot/data/DLE FSD BDA/DLE/DLE 3 (sonic boy)/Ex2.ipynb +195 -0
- noshot/data/DLE FSD BDA/DLE/DLE 3 (sonic boy)/Ex3.ipynb +427 -0
- noshot/data/DLE FSD BDA/DLE/DLE 3 (sonic boy)/Ex4.ipynb +186 -0
- noshot/data/DLE FSD BDA/DLE/DLE 4 (senior)/Exp01/DNN Ex No 1.ipynb +398 -0
- noshot/data/DLE FSD BDA/DLE/DLE 4 (senior)/Exp01/Ex No 1 Build in dataset.ipynb +171 -0
- noshot/data/DLE FSD BDA/DLE/DLE 4 (senior)/Exp01/Exp1-Short-DL_ANN_ImageClassification.ipynb +401 -0
- noshot/data/DLE FSD BDA/DLE/DLE 4 (senior)/Exp01/OR GATE .ipynb +8511 -0
- noshot/data/DLE FSD BDA/DLE/DLE 4 (senior)/Exp02/Exp2-Short-DL_CNN_ImageClassification.ipynb +737 -0
- noshot/data/DLE FSD BDA/DLE/DLE 4 (senior)/Exp03/DL-Ex3-RNN.ipynb +591 -0
- noshot/data/DLE FSD BDA/DLE/DLE 4 (senior)/Exp04/Ex no 4.ipynb +551 -0
- noshot/main.py +3 -3
- {noshot-11.0.0.dist-info → noshot-13.0.0.dist-info}/METADATA +1 -1
- noshot-13.0.0.dist-info/RECORD +32 -0
- noshot/data/ML TS XAI/ML/CNN(Image_for_Folders_5).ipynb +0 -201
- noshot/data/ML TS XAI/ML/CNN(Image_form_Folder_2).ipynb +0 -201
- noshot/data/ML TS XAI/ML/Json Codes/ML LAB CIA 2.ipynb +0 -409
- noshot/data/ML TS XAI/ML/ML 1/1. EDA-PCA (Balance Scale Dataset).ipynb +0 -147
- noshot/data/ML TS XAI/ML/ML 1/1. EDA-PCA (Rice Dataset).ipynb +0 -181
- noshot/data/ML TS XAI/ML/ML 1/10. HMM Veterbi.ipynb +0 -152
- noshot/data/ML TS XAI/ML/ML 1/2. KNN (Balance Scale Dataset).ipynb +0 -117
- noshot/data/ML TS XAI/ML/ML 1/2. KNN (Iris Dataset).ipynb +0 -156
- noshot/data/ML TS XAI/ML/ML 1/2. KNN (Sobar-72 Dataset).ipynb +0 -215
- noshot/data/ML TS XAI/ML/ML 1/3. LDA (Balance Scale Dataset).ipynb +0 -78
- noshot/data/ML TS XAI/ML/ML 1/3. LDA (NPHA Doctor Visits Dataset).ipynb +0 -114
- noshot/data/ML TS XAI/ML/ML 1/4. Linear Regression (Machine Dataset).ipynb +0 -115
- noshot/data/ML TS XAI/ML/ML 1/4. Linear Regression (Real Estate Dataset).ipynb +0 -146
- noshot/data/ML TS XAI/ML/ML 1/5. Logistic Regression (Magic04 Dataset).ipynb +0 -130
- noshot/data/ML TS XAI/ML/ML 1/5. Logistic Regression (Wine Dataset).ipynb +0 -112
- noshot/data/ML TS XAI/ML/ML 1/6. Naive Bayes Classifier (Agaricus Lepiota Dataset).ipynb +0 -118
- noshot/data/ML TS XAI/ML/ML 1/6. Naive Bayes Classifier (Wine Dataset).ipynb +0 -89
- noshot/data/ML TS XAI/ML/ML 1/7. SVM (Rice Dataset).ipynb +0 -120
- noshot/data/ML TS XAI/ML/ML 1/8. FeedForward NN (Sobar72 Dataset).ipynb +0 -262
- noshot/data/ML TS XAI/ML/ML 1/9. CNN (Cifar10 Dataset).ipynb +0 -156
- noshot/data/ML TS XAI/ML/ML 2/1. PCA.ipynb +0 -162
- noshot/data/ML TS XAI/ML/ML 2/10. CNN.ipynb +0 -100
- noshot/data/ML TS XAI/ML/ML 2/11. HMM.ipynb +0 -336
- noshot/data/ML TS XAI/ML/ML 2/2. KNN.ipynb +0 -149
- noshot/data/ML TS XAI/ML/ML 2/3. LDA.ipynb +0 -132
- noshot/data/ML TS XAI/ML/ML 2/4. Linear Regression.ipynb +0 -86
- noshot/data/ML TS XAI/ML/ML 2/5. Logistic Regression.ipynb +0 -115
- noshot/data/ML TS XAI/ML/ML 2/6. Naive Bayes (Titanic).ipynb +0 -196
- noshot/data/ML TS XAI/ML/ML 2/6. Naive Bayes (Wine).ipynb +0 -98
- noshot/data/ML TS XAI/ML/ML 2/7. SVM Linear.ipynb +0 -109
- noshot/data/ML TS XAI/ML/ML 2/8. SVM Non-Linear.ipynb +0 -195
- noshot/data/ML TS XAI/ML/ML 2/9. FNN With Regularization.ipynb +0 -189
- noshot/data/ML TS XAI/ML/ML 2/9. FNN Without Regularization.ipynb +0 -197
- noshot/data/ML TS XAI/ML/ML 2/All in One Lab CIA 1 Q.ipynb +0 -1087
- noshot/data/ML TS XAI/ML/ML 3 (Latest)/1. PCA EDA.ipynb +0 -274
- noshot/data/ML TS XAI/ML/ML 3 (Latest)/10. CNN.ipynb +0 -170
- noshot/data/ML TS XAI/ML/ML 3 (Latest)/11. HMM 2.ipynb +0 -1087
- noshot/data/ML TS XAI/ML/ML 3 (Latest)/11. HMM 3.ipynb +0 -178
- noshot/data/ML TS XAI/ML/ML 3 (Latest)/11. HMM 4.ipynb +0 -185
- noshot/data/ML TS XAI/ML/ML 3 (Latest)/11. HMM.ipynb +0 -106
- noshot/data/ML TS XAI/ML/ML 3 (Latest)/2. KNN.ipynb +0 -177
- noshot/data/ML TS XAI/ML/ML 3 (Latest)/3. LDA.ipynb +0 -195
- noshot/data/ML TS XAI/ML/ML 3 (Latest)/4. Linear Regression.ipynb +0 -267
- noshot/data/ML TS XAI/ML/ML 3 (Latest)/5. Logistic Regression.ipynb +0 -104
- noshot/data/ML TS XAI/ML/ML 3 (Latest)/6. Bayesian Classifier.ipynb +0 -109
- noshot/data/ML TS XAI/ML/ML 3 (Latest)/7. SVM.ipynb +0 -220
- noshot/data/ML TS XAI/ML/ML 3 (Latest)/8. MLP.ipynb +0 -99
- noshot/data/ML TS XAI/ML/ML 3 (Latest)/9. Ridge - Lasso.ipynb +0 -211
- noshot/data/ML TS XAI/ML/ML 3 (Latest)/9. Ridge Lasso 2.ipynb +0 -99
- noshot/data/ML TS XAI/ML/ML 3 (Latest)/Image Load Example.ipynb +0 -118
- noshot/data/ML TS XAI/ML/ML 3 (Latest)/Updated_Untitled.ipynb +0 -603
- noshot/data/ML TS XAI/ML/ML Lab AllinOne.ipynb +0 -961
- noshot/data/ML TS XAI/ML/ML Lab H Sec/1. Iris Dataset (Softmax vs Sigmoid).ipynb +0 -231
- noshot/data/ML TS XAI/ML/ML Lab H Sec/2. Student Dataset (Overfit vs Regularized).ipynb +0 -269
- noshot/data/ML TS XAI/ML/ML Lab H Sec/3. Insurance Target Categorical (Overfit vs Regularized).ipynb +0 -274
- noshot/data/ML TS XAI/ML/ML Lab H Sec/3. Insurance Target Numerical (Overfit vs Regularized).ipynb +0 -263
- noshot/data/ML TS XAI/ML/ML Lab H Sec/4. Smart House System HMM.ipynb +0 -198
- noshot/data/ML TS XAI/ML/ML Lab H Sec/5. Fraud Detection System HMM.ipynb +0 -201
- noshot/data/ML TS XAI/ML/ML Lab H Sec/insurance.csv +0 -1339
- noshot/data/ML TS XAI/ML/ML Lab H Sec/iris1.data +0 -151
- noshot/data/ML TS XAI/ML/ML Lab H Sec/student-mat.csv +0 -396
- noshot/data/ML TS XAI/ML/ML Lab H Sec/student-por.csv +0 -650
- noshot/data/ML TS XAI/ML/Rolls Royce AllinOne.ipynb +0 -691
- noshot-11.0.0.dist-info/RECORD +0 -72
- {noshot-11.0.0.dist-info → noshot-13.0.0.dist-info}/WHEEL +0 -0
- {noshot-11.0.0.dist-info → noshot-13.0.0.dist-info}/licenses/LICENSE.txt +0 -0
- {noshot-11.0.0.dist-info → noshot-13.0.0.dist-info}/top_level.txt +0 -0
@@ -1,603 +0,0 @@
|
|
1
|
-
{
|
2
|
-
"cells": [
|
3
|
-
{
|
4
|
-
"cell_type": "code",
|
5
|
-
"execution_count": null,
|
6
|
-
"id": "a246eeb3-2fc3-4dea-9d6f-f9742c74b917",
|
7
|
-
"metadata": {},
|
8
|
-
"outputs": [],
|
9
|
-
"source": [
|
10
|
-
"# Import libraries\n",
|
11
|
-
"import numpy as np\n",
|
12
|
-
"import matplotlib.pyplot as plt\n",
|
13
|
-
"from sklearn.datasets import load_iris, fetch_california_housing\n",
|
14
|
-
"from sklearn.model_selection import train_test_split\n",
|
15
|
-
"from sklearn.preprocessing import StandardScaler\n",
|
16
|
-
"from sklearn.metrics import accuracy_score, mean_squared_error\n",
|
17
|
-
"import tensorflow as tf\n",
|
18
|
-
"from tensorflow.keras.models import Sequential\n",
|
19
|
-
"from tensorflow.keras.layers import Dense\n",
|
20
|
-
"\n",
|
21
|
-
"# Set random seed for reproducibility\n",
|
22
|
-
"np.random.seed(42)\n",
|
23
|
-
"tf.random.set_seed(42)\n",
|
24
|
-
"\n",
|
25
|
-
"# --------- Classification: IRIS Dataset ---------\n",
|
26
|
-
"# Load dataset\n",
|
27
|
-
"iris = load_iris()\n",
|
28
|
-
"X_cls = iris.data\n",
|
29
|
-
"y_cls = iris.target\n",
|
30
|
-
"\n",
|
31
|
-
"# Split\n",
|
32
|
-
"X_train_cls, X_test_cls, y_train_cls, y_test_cls = train_test_split(X_cls, y_cls, test_size=0.2, random_state=42)\n",
|
33
|
-
"\n",
|
34
|
-
"# Standardize features\n",
|
35
|
-
"scaler_cls = StandardScaler()\n",
|
36
|
-
"X_train_cls = scaler_cls.fit_transform(X_train_cls)\n",
|
37
|
-
"X_test_cls = scaler_cls.transform(X_test_cls)\n",
|
38
|
-
"\n",
|
39
|
-
"# Build MLP model\n",
|
40
|
-
"model_cls = Sequential([\n",
|
41
|
-
" Dense(16, activation='relu', input_shape=(X_train_cls.shape[1],)),\n",
|
42
|
-
" Dense(8, activation='relu'),\n",
|
43
|
-
" Dense(3, activation='softmax')\n",
|
44
|
-
"])\n",
|
45
|
-
"\n",
|
46
|
-
"# Compile\n",
|
47
|
-
"model_cls.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])\n",
|
48
|
-
"\n",
|
49
|
-
"# Train\n",
|
50
|
-
"history_cls = model_cls.fit(X_train_cls, y_train_cls, epochs=50, validation_split=0.2, verbose=0)\n",
|
51
|
-
"\n",
|
52
|
-
"# Evaluate\n",
|
53
|
-
"y_pred_cls = np.argmax(model_cls.predict(X_test_cls), axis=1)\n",
|
54
|
-
"acc = accuracy_score(y_test_cls, y_pred_cls)\n",
|
55
|
-
"print(f\"Classification Accuracy on Iris dataset: {acc:.4f}\")\n",
|
56
|
-
"\n",
|
57
|
-
"# Plot training history\n",
|
58
|
-
"plt.figure(figsize=(12,5))\n",
|
59
|
-
"plt.subplot(1,2,1)\n",
|
60
|
-
"plt.plot(history_cls.history['accuracy'], label='Train Accuracy')\n",
|
61
|
-
"plt.plot(history_cls.history['val_accuracy'], label='Val Accuracy')\n",
|
62
|
-
"plt.title('Classification Accuracy')\n",
|
63
|
-
"plt.xlabel('Epochs')\n",
|
64
|
-
"plt.ylabel('Accuracy')\n",
|
65
|
-
"plt.legend()\n",
|
66
|
-
"\n",
|
67
|
-
"# --------- Regression: California Housing Dataset ---------\n",
|
68
|
-
"# Load dataset\n",
|
69
|
-
"housing = fetch_california_housing()\n",
|
70
|
-
"X_reg = housing.data\n",
|
71
|
-
"y_reg = housing.target\n",
|
72
|
-
"\n",
|
73
|
-
"# Split\n",
|
74
|
-
"X_train_reg, X_test_reg, y_train_reg, y_test_reg = train_test_split(X_reg, y_reg, test_size=0.2, random_state=42)\n",
|
75
|
-
"\n",
|
76
|
-
"# Standardize features\n",
|
77
|
-
"scaler_reg = StandardScaler()\n",
|
78
|
-
"X_train_reg = scaler_reg.fit_transform(X_train_reg)\n",
|
79
|
-
"X_test_reg = scaler_reg.transform(X_test_reg)\n",
|
80
|
-
"\n",
|
81
|
-
"# Build MLP model\n",
|
82
|
-
"model_reg = Sequential([\n",
|
83
|
-
" Dense(32, activation='relu', input_shape=(X_train_reg.shape[1],)),\n",
|
84
|
-
" Dense(16, activation='relu'),\n",
|
85
|
-
" Dense(1)\n",
|
86
|
-
"])\n",
|
87
|
-
"\n",
|
88
|
-
"# Compile\n",
|
89
|
-
"model_reg.compile(optimizer='adam', loss='mse')\n",
|
90
|
-
"\n",
|
91
|
-
"# Train\n",
|
92
|
-
"history_reg = model_reg.fit(X_train_reg, y_train_reg, epochs=50, validation_split=0.2, verbose=0)\n",
|
93
|
-
"\n",
|
94
|
-
"# Evaluate\n",
|
95
|
-
"y_pred_reg = model_reg.predict(X_test_reg).flatten()\n",
|
96
|
-
"mse = mean_squared_error(y_test_reg, y_pred_reg)\n",
|
97
|
-
"print(f\"Regression MSE on California Housing dataset: {mse:.4f}\")\n",
|
98
|
-
"\n",
|
99
|
-
"# Plot training history\n",
|
100
|
-
"plt.subplot(1,2,2)\n",
|
101
|
-
"plt.plot(history_reg.history['loss'], label='Train Loss')\n",
|
102
|
-
"plt.plot(history_reg.history['val_loss'], label='Val Loss')\n",
|
103
|
-
"plt.title('Regression Loss')\n",
|
104
|
-
"plt.xlabel('Epochs')\n",
|
105
|
-
"plt.ylabel('MSE Loss')\n",
|
106
|
-
"plt.legend()\n",
|
107
|
-
"\n",
|
108
|
-
"plt.tight_layout()\n",
|
109
|
-
"plt.show()"
|
110
|
-
]
|
111
|
-
},
|
112
|
-
{
|
113
|
-
"cell_type": "code",
|
114
|
-
"execution_count": null,
|
115
|
-
"id": "c97be2ce-bb9d-4a5c-be94-a9ac5ba31d8a",
|
116
|
-
"metadata": {},
|
117
|
-
"outputs": [],
|
118
|
-
"source": [
|
119
|
-
"# Import libraries\n",
|
120
|
-
"import numpy as np\n",
|
121
|
-
"import matplotlib.pyplot as plt\n",
|
122
|
-
"from sklearn.datasets import load_iris, fetch_california_housing\n",
|
123
|
-
"from sklearn.model_selection import train_test_split\n",
|
124
|
-
"from sklearn.preprocessing import StandardScaler\n",
|
125
|
-
"from sklearn.metrics import accuracy_score, mean_squared_error\n",
|
126
|
-
"from sklearn.neural_network import MLPClassifier, MLPRegressor\n",
|
127
|
-
"\n",
|
128
|
-
"# Set random seed\n",
|
129
|
-
"np.random.seed(42)\n",
|
130
|
-
"\n",
|
131
|
-
"# --------- Classification: IRIS Dataset ---------\n",
|
132
|
-
"# Load dataset\n",
|
133
|
-
"iris = load_iris()\n",
|
134
|
-
"X_cls = iris.data\n",
|
135
|
-
"y_cls = iris.target\n",
|
136
|
-
"\n",
|
137
|
-
"# Split\n",
|
138
|
-
"X_train_cls, X_test_cls, y_train_cls, y_test_cls = train_test_split(X_cls, y_cls, test_size=0.2, random_state=42)\n",
|
139
|
-
"\n",
|
140
|
-
"# Standardize\n",
|
141
|
-
"scaler_cls = StandardScaler()\n",
|
142
|
-
"X_train_cls = scaler_cls.fit_transform(X_train_cls)\n",
|
143
|
-
"X_test_cls = scaler_cls.transform(X_test_cls)\n",
|
144
|
-
"\n",
|
145
|
-
"# Create and train MLP Classifier\n",
|
146
|
-
"mlp_cls = MLPClassifier(hidden_layer_sizes=(16,8), activation='relu', solver='adam', max_iter=300, random_state=42, verbose=True)\n",
|
147
|
-
"mlp_cls.fit(X_train_cls, y_train_cls)\n",
|
148
|
-
"\n",
|
149
|
-
"# Predict and evaluate\n",
|
150
|
-
"y_pred_cls = mlp_cls.predict(X_test_cls)\n",
|
151
|
-
"acc = accuracy_score(y_test_cls, y_pred_cls)\n",
|
152
|
-
"print(f\"Classification Accuracy on Iris dataset: {acc:.4f}\")\n",
|
153
|
-
"\n",
|
154
|
-
"# Plot loss curve\n",
|
155
|
-
"plt.figure(figsize=(12,5))\n",
|
156
|
-
"plt.subplot(1,2,1)\n",
|
157
|
-
"plt.plot(mlp_cls.loss_curve_)\n",
|
158
|
-
"plt.title('Classification Loss Curve (IRIS)')\n",
|
159
|
-
"plt.xlabel('Iterations')\n",
|
160
|
-
"plt.ylabel('Loss')\n",
|
161
|
-
"\n",
|
162
|
-
"# --------- Regression: California Housing Dataset ---------\n",
|
163
|
-
"# Load dataset\n",
|
164
|
-
"housing = fetch_california_housing()\n",
|
165
|
-
"X_reg = housing.data\n",
|
166
|
-
"y_reg = housing.target\n",
|
167
|
-
"\n",
|
168
|
-
"# Split\n",
|
169
|
-
"X_train_reg, X_test_reg, y_train_reg, y_test_reg = train_test_split(X_reg, y_reg, test_size=0.2, random_state=42)\n",
|
170
|
-
"\n",
|
171
|
-
"# Standardize\n",
|
172
|
-
"scaler_reg = StandardScaler()\n",
|
173
|
-
"X_train_reg = scaler_reg.fit_transform(X_train_reg)\n",
|
174
|
-
"X_test_reg = scaler_reg.transform(X_test_reg)\n",
|
175
|
-
"\n",
|
176
|
-
"# Create and train MLP Regressor\n",
|
177
|
-
"mlp_reg = MLPRegressor(hidden_layer_sizes=(32,16), activation='relu', solver='adam', max_iter=300, random_state=42, verbose=True)\n",
|
178
|
-
"mlp_reg.fit(X_train_reg, y_train_reg)\n",
|
179
|
-
"\n",
|
180
|
-
"# Predict and evaluate\n",
|
181
|
-
"y_pred_reg = mlp_reg.predict(X_test_reg)\n",
|
182
|
-
"mse = mean_squared_error(y_test_reg, y_pred_reg)\n",
|
183
|
-
"print(f\"Regression MSE on California Housing dataset: {mse:.4f}\")\n",
|
184
|
-
"\n",
|
185
|
-
"# Plot loss curve\n",
|
186
|
-
"plt.subplot(1,2,2)\n",
|
187
|
-
"plt.plot(mlp_reg.loss_curve_)\n",
|
188
|
-
"plt.title('Regression Loss Curve (California Housing)')\n",
|
189
|
-
"plt.xlabel('Iterations')\n",
|
190
|
-
"plt.ylabel('Loss')\n",
|
191
|
-
"\n",
|
192
|
-
"plt.tight_layout()\n",
|
193
|
-
"plt.show()\n"
|
194
|
-
]
|
195
|
-
},
|
196
|
-
{
|
197
|
-
"cell_type": "code",
|
198
|
-
"execution_count": null,
|
199
|
-
"id": "16df4a42-e6b7-417e-83a6-032e15d42f8a",
|
200
|
-
"metadata": {
|
201
|
-
"scrolled": true
|
202
|
-
},
|
203
|
-
"outputs": [],
|
204
|
-
"source": [
|
205
|
-
"# Import libraries\n",
|
206
|
-
"import numpy as np\n",
|
207
|
-
"import matplotlib.pyplot as plt\n",
|
208
|
-
"import seaborn as sns\n",
|
209
|
-
"from sklearn.datasets import load_iris, fetch_california_housing\n",
|
210
|
-
"from sklearn.model_selection import train_test_split, learning_curve\n",
|
211
|
-
"from sklearn.preprocessing import StandardScaler\n",
|
212
|
-
"from sklearn.metrics import accuracy_score, mean_squared_error, confusion_matrix\n",
|
213
|
-
"from sklearn.neural_network import MLPClassifier, MLPRegressor\n",
|
214
|
-
"\n",
|
215
|
-
"# Set random seed\n",
|
216
|
-
"np.random.seed(42)\n",
|
217
|
-
"\n",
|
218
|
-
"# --------- Classification: IRIS Dataset ---------\n",
|
219
|
-
"# Load dataset\n",
|
220
|
-
"iris = load_iris()\n",
|
221
|
-
"X_cls = iris.data\n",
|
222
|
-
"y_cls = iris.target\n",
|
223
|
-
"\n",
|
224
|
-
"# Split\n",
|
225
|
-
"X_train_cls, X_test_cls, y_train_cls, y_test_cls = train_test_split(X_cls, y_cls, test_size=0.2, random_state=42)\n",
|
226
|
-
"\n",
|
227
|
-
"# Standardize\n",
|
228
|
-
"scaler_cls = StandardScaler()\n",
|
229
|
-
"X_train_cls = scaler_cls.fit_transform(X_train_cls)\n",
|
230
|
-
"X_test_cls = scaler_cls.transform(X_test_cls)\n",
|
231
|
-
"\n",
|
232
|
-
"# Create and train MLP Classifier\n",
|
233
|
-
"mlp_cls = MLPClassifier(hidden_layer_sizes=(16,8), activation='relu', solver='adam', max_iter=300, random_state=42, verbose=True)\n",
|
234
|
-
"mlp_cls.fit(X_train_cls, y_train_cls)\n",
|
235
|
-
"\n",
|
236
|
-
"# Predict and evaluate\n",
|
237
|
-
"y_pred_cls = mlp_cls.predict(X_test_cls)\n",
|
238
|
-
"acc = accuracy_score(y_test_cls, y_pred_cls)\n",
|
239
|
-
"print(f\"Classification Accuracy on Iris dataset: {acc:.4f}\")\n",
|
240
|
-
"\n",
|
241
|
-
"# Confusion Matrix\n",
|
242
|
-
"conf_mat = confusion_matrix(y_test_cls, y_pred_cls)\n",
|
243
|
-
"\n",
|
244
|
-
"# --------- Regression: California Housing Dataset ---------\n",
|
245
|
-
"# Load dataset\n",
|
246
|
-
"housing = fetch_california_housing()\n",
|
247
|
-
"X_reg = housing.data\n",
|
248
|
-
"y_reg = housing.target\n",
|
249
|
-
"\n",
|
250
|
-
"# Split\n",
|
251
|
-
"X_train_reg, X_test_reg, y_train_reg, y_test_reg = train_test_split(X_reg, y_reg, test_size=0.2, random_state=42)\n",
|
252
|
-
"\n",
|
253
|
-
"# Standardize\n",
|
254
|
-
"scaler_reg = StandardScaler()\n",
|
255
|
-
"X_train_reg = scaler_reg.fit_transform(X_train_reg)\n",
|
256
|
-
"X_test_reg = scaler_reg.transform(X_test_reg)\n",
|
257
|
-
"\n",
|
258
|
-
"# Create and train MLP Regressor\n",
|
259
|
-
"mlp_reg = MLPRegressor(hidden_layer_sizes=(32,16), activation='relu', solver='adam', max_iter=300, random_state=42, verbose=True)\n",
|
260
|
-
"mlp_reg.fit(X_train_reg, y_train_reg)\n",
|
261
|
-
"\n",
|
262
|
-
"# Predict and evaluate\n",
|
263
|
-
"y_pred_reg = mlp_reg.predict(X_test_reg)\n",
|
264
|
-
"mse = mean_squared_error(y_test_reg, y_pred_reg)\n",
|
265
|
-
"print(f\"Regression MSE on California Housing dataset: {mse:.4f}\")\n",
|
266
|
-
"\n",
|
267
|
-
"# --------- PLOTS ---------\n",
|
268
|
-
"\n",
|
269
|
-
"plt.figure(figsize=(18,10))\n",
|
270
|
-
"\n",
|
271
|
-
"# 1. Classification Loss Curve\n",
|
272
|
-
"plt.subplot(2,3,1)\n",
|
273
|
-
"plt.plot(mlp_cls.loss_curve_)\n",
|
274
|
-
"plt.title('Classification Loss Curve (IRIS)')\n",
|
275
|
-
"plt.xlabel('Iterations')\n",
|
276
|
-
"plt.ylabel('Loss')\n",
|
277
|
-
"\n",
|
278
|
-
"# 2. Confusion Matrix\n",
|
279
|
-
"plt.subplot(2,3,2)\n",
|
280
|
-
"sns.heatmap(conf_mat, annot=True, fmt='d', cmap='Blues', xticklabels=iris.target_names, yticklabels=iris.target_names)\n",
|
281
|
-
"plt.title('Confusion Matrix (IRIS)')\n",
|
282
|
-
"plt.xlabel('Predicted Label')\n",
|
283
|
-
"plt.ylabel('True Label')\n",
|
284
|
-
"\n",
|
285
|
-
"# 3. Learning Curve for Classification\n",
|
286
|
-
"train_sizes_cls, train_scores_cls, val_scores_cls = learning_curve(mlp_cls, X_cls, y_cls, cv=5, scoring='accuracy', train_sizes=np.linspace(0.1,1.0,5))\n",
|
287
|
-
"\n",
|
288
|
-
"plt.subplot(2,3,3)\n",
|
289
|
-
"plt.plot(train_sizes_cls, np.mean(train_scores_cls, axis=1), label='Train Accuracy')\n",
|
290
|
-
"plt.plot(train_sizes_cls, np.mean(val_scores_cls, axis=1), label='Validation Accuracy')\n",
|
291
|
-
"plt.title('Learning Curve (IRIS Classification)')\n",
|
292
|
-
"plt.xlabel('Training Size')\n",
|
293
|
-
"plt.ylabel('Accuracy')\n",
|
294
|
-
"plt.legend()\n",
|
295
|
-
"\n",
|
296
|
-
"# 4. Regression Loss Curve\n",
|
297
|
-
"plt.subplot(2,3,4)\n",
|
298
|
-
"plt.plot(mlp_reg.loss_curve_)\n",
|
299
|
-
"plt.title('Regression Loss Curve (California Housing)')\n",
|
300
|
-
"plt.xlabel('Iterations')\n",
|
301
|
-
"plt.ylabel('Loss')\n",
|
302
|
-
"\n",
|
303
|
-
"# 5. Prediction vs Ground Truth Scatter Plot\n",
|
304
|
-
"plt.subplot(2,3,5)\n",
|
305
|
-
"plt.scatter(y_test_reg, y_pred_reg, alpha=0.7)\n",
|
306
|
-
"plt.plot([y_test_reg.min(), y_test_reg.max()], [y_test_reg.min(), y_test_reg.max()], 'r--')\n",
|
307
|
-
"plt.title('Regression Prediction vs Ground Truth')\n",
|
308
|
-
"plt.xlabel('True Prices')\n",
|
309
|
-
"plt.ylabel('Predicted Prices')\n",
|
310
|
-
"\n",
|
311
|
-
"# 6. Learning Curve for Regression\n",
|
312
|
-
"train_sizes_reg, train_scores_reg, val_scores_reg = learning_curve(mlp_reg, X_reg, y_reg, cv=5, scoring='neg_mean_squared_error', train_sizes=np.linspace(0.1,1.0,5))\n",
|
313
|
-
"\n",
|
314
|
-
"plt.subplot(2,3,6)\n",
|
315
|
-
"plt.plot(train_sizes_reg, -np.mean(train_scores_reg, axis=1), label='Train MSE')\n",
|
316
|
-
"plt.plot(train_sizes_reg, -np.mean(val_scores_reg, axis=1), label='Validation MSE')\n",
|
317
|
-
"plt.title('Learning Curve (Housing Regression)')\n",
|
318
|
-
"plt.xlabel('Training Size')\n",
|
319
|
-
"plt.ylabel('MSE')\n",
|
320
|
-
"plt.legend()\n",
|
321
|
-
"\n",
|
322
|
-
"plt.tight_layout()\n",
|
323
|
-
"plt.show()"
|
324
|
-
]
|
325
|
-
},
|
326
|
-
{
|
327
|
-
"cell_type": "code",
|
328
|
-
"execution_count": null,
|
329
|
-
"id": "35e1b0bb-70b8-4b29-80e6-2b678c17b0a5",
|
330
|
-
"metadata": {
|
331
|
-
"scrolled": true
|
332
|
-
},
|
333
|
-
"outputs": [],
|
334
|
-
"source": [
|
335
|
-
"# --------- PLOTS ---------\n",
|
336
|
-
"\n",
|
337
|
-
"plt.figure(figsize=(18,10))\n",
|
338
|
-
"\n",
|
339
|
-
"# 1. Classification Loss Curve\n",
|
340
|
-
"plt.subplot(2,3,1)\n",
|
341
|
-
"plt.plot(mlp_cls.loss_curve_)\n",
|
342
|
-
"plt.title('Classification Loss Curve (IRIS)')\n",
|
343
|
-
"plt.xlabel('Iterations')\n",
|
344
|
-
"plt.ylabel('Loss')\n",
|
345
|
-
"\n",
|
346
|
-
"# 2. Confusion Matrix\n",
|
347
|
-
"plt.subplot(2,3,2)\n",
|
348
|
-
"sns.heatmap(conf_mat, annot=True, fmt='d', cmap='Blues', xticklabels=iris.target_names, yticklabels=iris.target_names)\n",
|
349
|
-
"plt.title('Confusion Matrix (IRIS)')\n",
|
350
|
-
"plt.xlabel('Predicted Label')\n",
|
351
|
-
"plt.ylabel('True Label')\n",
|
352
|
-
"\n",
|
353
|
-
"# 3. Learning Curve for Classification\n",
|
354
|
-
"train_sizes_cls, train_scores_cls, val_scores_cls = learning_curve(mlp_cls, X_cls, y_cls, cv=5, scoring='accuracy', train_sizes=np.linspace(0.1,1.0,5))\n",
|
355
|
-
"\n",
|
356
|
-
"plt.subplot(2,3,3)\n",
|
357
|
-
"plt.plot(train_sizes_cls, np.mean(train_scores_cls, axis=1), label='Train Accuracy')\n",
|
358
|
-
"plt.plot(train_sizes_cls, np.mean(val_scores_cls, axis=1), label='Validation Accuracy')\n",
|
359
|
-
"plt.title('Learning Curve (IRIS Classification)')\n",
|
360
|
-
"plt.xlabel('Training Size')\n",
|
361
|
-
"plt.ylabel('Accuracy')\n",
|
362
|
-
"plt.legend()\n",
|
363
|
-
"\n",
|
364
|
-
"# 4. Regression Loss Curve\n",
|
365
|
-
"plt.subplot(2,3,4)\n",
|
366
|
-
"plt.plot(mlp_reg.loss_curve_)\n",
|
367
|
-
"plt.title('Regression Loss Curve (California Housing)')\n",
|
368
|
-
"plt.xlabel('Iterations')\n",
|
369
|
-
"plt.ylabel('Loss')\n",
|
370
|
-
"\n",
|
371
|
-
"# 5. Prediction vs Ground Truth Scatter Plot\n",
|
372
|
-
"plt.subplot(2,3,5)\n",
|
373
|
-
"plt.scatter(y_test_reg, y_pred_reg, alpha=0.7)\n",
|
374
|
-
"plt.plot([y_test_reg.min(), y_test_reg.max()], [y_test_reg.min(), y_test_reg.max()], 'r--')\n",
|
375
|
-
"plt.title('Regression Prediction vs Ground Truth')\n",
|
376
|
-
"plt.xlabel('True Prices')\n",
|
377
|
-
"plt.ylabel('Predicted Prices')\n",
|
378
|
-
"\n",
|
379
|
-
"# 6. Learning Curve for Regression\n",
|
380
|
-
"train_sizes_reg, train_scores_reg, val_scores_reg = learning_curve(mlp_reg, X_reg, y_reg, cv=5, scoring='neg_mean_squared_error', train_sizes=np.linspace(0.1,1.0,5))\n",
|
381
|
-
"\n",
|
382
|
-
"plt.subplot(2,3,6)\n",
|
383
|
-
"plt.plot(train_sizes_reg, -np.mean(train_scores_reg, axis=1), label='Train MSE')\n",
|
384
|
-
"plt.plot(train_sizes_reg, -np.mean(val_scores_reg, axis=1), label='Validation MSE')\n",
|
385
|
-
"plt.title('Learning Curve (Housing Regression)')\n",
|
386
|
-
"plt.xlabel('Training Size')\n",
|
387
|
-
"plt.ylabel('MSE')\n",
|
388
|
-
"plt.legend()\n",
|
389
|
-
"\n",
|
390
|
-
"plt.tight_layout()\n",
|
391
|
-
"plt.show()"
|
392
|
-
]
|
393
|
-
},
|
394
|
-
{
|
395
|
-
"cell_type": "code",
|
396
|
-
"execution_count": null,
|
397
|
-
"id": "5aabf3a9-aa7f-4fc4-887a-4a4648d02559",
|
398
|
-
"metadata": {},
|
399
|
-
"outputs": [],
|
400
|
-
"source": [
|
401
|
-
"# Imports\n",
|
402
|
-
"import os\n",
|
403
|
-
"import numpy as np\n",
|
404
|
-
"import matplotlib.pyplot as plt\n",
|
405
|
-
"import tensorflow as tf\n",
|
406
|
-
"from tensorflow.keras.preprocessing.image import ImageDataGenerator\n",
|
407
|
-
"from tensorflow.keras import Sequential\n",
|
408
|
-
"from tensorflow.keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout\n",
|
409
|
-
"from sklearn.metrics import confusion_matrix, classification_report\n",
|
410
|
-
"import seaborn as sns\n",
|
411
|
-
"\n",
|
412
|
-
"# Correct path\n",
|
413
|
-
"data_dir = r\"E:\\seg_train\\seg_train\" # NOT just seg_train\n",
|
414
|
-
"\n",
|
415
|
-
"# Image parameters\n",
|
416
|
-
"img_size = (128, 128)\n",
|
417
|
-
"batch_size = 32\n",
|
418
|
-
"\n",
|
419
|
-
"# Data Generators\n",
|
420
|
-
"train_datagen = ImageDataGenerator(rescale=1./255, validation_split=0.2)\n",
|
421
|
-
"\n",
|
422
|
-
"train_generator = train_datagen.flow_from_directory(\n",
|
423
|
-
" data_dir,\n",
|
424
|
-
" target_size=img_size,\n",
|
425
|
-
" batch_size=batch_size,\n",
|
426
|
-
" class_mode='categorical', # MULTICLASS\n",
|
427
|
-
" subset='training',\n",
|
428
|
-
" shuffle=True,\n",
|
429
|
-
" seed=42\n",
|
430
|
-
")\n",
|
431
|
-
"\n",
|
432
|
-
"val_generator = train_datagen.flow_from_directory(\n",
|
433
|
-
" data_dir,\n",
|
434
|
-
" target_size=img_size,\n",
|
435
|
-
" batch_size=batch_size,\n",
|
436
|
-
" class_mode='categorical',\n",
|
437
|
-
" subset='validation',\n",
|
438
|
-
" shuffle=False,\n",
|
439
|
-
" seed=42\n",
|
440
|
-
")\n",
|
441
|
-
"\n",
|
442
|
-
"# CNN Model\n",
|
443
|
-
"model = Sequential([\n",
|
444
|
-
" Conv2D(32, (3,3), activation='relu', input_shape=(img_size[0], img_size[1], 3)),\n",
|
445
|
-
" MaxPooling2D(2,2),\n",
|
446
|
-
" Conv2D(64, (3,3), activation='relu'),\n",
|
447
|
-
" MaxPooling2D(2,2),\n",
|
448
|
-
" Conv2D(128, (3,3), activation='relu'),\n",
|
449
|
-
" MaxPooling2D(2,2),\n",
|
450
|
-
" Flatten(),\n",
|
451
|
-
" Dense(128, activation='relu'),\n",
|
452
|
-
" Dropout(0.5),\n",
|
453
|
-
" Dense(train_generator.num_classes, activation='softmax') # MULTI CLASS\n",
|
454
|
-
"])\n",
|
455
|
-
"\n",
|
456
|
-
"model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])\n",
|
457
|
-
"\n",
|
458
|
-
"# Train\n",
|
459
|
-
"history = model.fit(\n",
|
460
|
-
" train_generator,\n",
|
461
|
-
" validation_data=val_generator,\n",
|
462
|
-
" epochs=10\n",
|
463
|
-
")\n",
|
464
|
-
"\n",
|
465
|
-
"# Plotting Accuracy & Loss\n",
|
466
|
-
"plt.figure(figsize=(12,5))\n",
|
467
|
-
"\n",
|
468
|
-
"plt.subplot(1,2,1)\n",
|
469
|
-
"plt.plot(history.history['accuracy'], label='Train Acc')\n",
|
470
|
-
"plt.plot(history.history['val_accuracy'], label='Val Acc')\n",
|
471
|
-
"plt.title('Accuracy over Epochs')\n",
|
472
|
-
"plt.legend()\n",
|
473
|
-
"\n",
|
474
|
-
"plt.subplot(1,2,2)\n",
|
475
|
-
"plt.plot(history.history['loss'], label='Train Loss')\n",
|
476
|
-
"plt.plot(history.history['val_loss'], label='Val Loss')\n",
|
477
|
-
"plt.title('Loss over Epochs')\n",
|
478
|
-
"plt.legend()\n",
|
479
|
-
"\n",
|
480
|
-
"plt.tight_layout()\n",
|
481
|
-
"plt.show()\n",
|
482
|
-
"\n",
|
483
|
-
"# Confusion Matrix\n",
|
484
|
-
"val_generator.reset()\n",
|
485
|
-
"y_pred = model.predict(val_generator)\n",
|
486
|
-
"y_pred_classes = np.argmax(y_pred, axis=1)\n",
|
487
|
-
"y_true = val_generator.classes\n",
|
488
|
-
"\n",
|
489
|
-
"cm = confusion_matrix(y_true, y_pred_classes)\n",
|
490
|
-
"plt.figure(figsize=(8,6))\n",
|
491
|
-
"sns.heatmap(cm, annot=True, fmt='d', cmap='Blues', xticklabels=val_generator.class_indices.keys(), yticklabels=val_generator.class_indices.keys())\n",
|
492
|
-
"plt.title('Confusion Matrix')\n",
|
493
|
-
"plt.xlabel('Predicted')\n",
|
494
|
-
"plt.ylabel('True')\n",
|
495
|
-
"plt.show()\n",
|
496
|
-
"\n",
|
497
|
-
"# Classification Report\n",
|
498
|
-
"print(classification_report(y_true, y_pred_classes, target_names=val_generator.class_indices.keys()))\n",
|
499
|
-
"\n",
|
500
|
-
"# Sample predictions\n",
|
501
|
-
"class_labels = list(val_generator.class_indices.keys())\n",
|
502
|
-
"\n",
|
503
|
-
"plt.figure(figsize=(15,10))\n",
|
504
|
-
"for i in range(12):\n",
|
505
|
-
" img, label = next(val_generator) # <-- CORRECT way (not .next())\n",
|
506
|
-
" pred = model.predict(img)\n",
|
507
|
-
" plt.subplot(3,4,i+1)\n",
|
508
|
-
" plt.imshow(img[0])\n",
|
509
|
-
" plt.axis('off')\n",
|
510
|
-
" true_label = class_labels[np.argmax(label[0])]\n",
|
511
|
-
" pred_label = class_labels[np.argmax(pred[0])]\n",
|
512
|
-
" plt.title(f\"True: {true_label}\\nPred: {pred_label}\")\n",
|
513
|
-
"plt.tight_layout()\n",
|
514
|
-
"plt.show()\n"
|
515
|
-
]
|
516
|
-
},
|
517
|
-
{
|
518
|
-
"cell_type": "code",
|
519
|
-
"execution_count": null,
|
520
|
-
"id": "d8e3e2ce-4b14-494d-88b6-221c679f3ea9",
|
521
|
-
"metadata": {},
|
522
|
-
"outputs": [],
|
523
|
-
"source": [
|
524
|
-
"# Imports\n",
|
525
|
-
"import pandas as pd\n",
|
526
|
-
"from sklearn.model_selection import train_test_split\n",
|
527
|
-
"from sklearn.preprocessing import StandardScaler\n",
|
528
|
-
"from sklearn.neural_network import MLPClassifier\n",
|
529
|
-
"from sklearn.metrics import classification_report, confusion_matrix\n",
|
530
|
-
"import matplotlib.pyplot as plt\n",
|
531
|
-
"import seaborn as sns\n",
|
532
|
-
"\n",
|
533
|
-
"# Load the data\n",
|
534
|
-
"data = pd.read_csv(r\"E:\\heart_failure_clinical_records_dataset.csv\")\n",
|
535
|
-
"\n",
|
536
|
-
"# Features and Labels\n",
|
537
|
-
"X = data.drop('DEATH_EVENT', axis=1)\n",
|
538
|
-
"y = data['DEATH_EVENT']\n",
|
539
|
-
"\n",
|
540
|
-
"# (Optional) Group the classes to simplify\n",
|
541
|
-
"#y = y.apply(lambda q: 0 if q <= 5 else (1 if q == 6 else 2)) # 0: bad, 1: average, 2: good\n",
|
542
|
-
"\n",
|
543
|
-
"# Train-test split\n",
|
544
|
-
"X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)\n",
|
545
|
-
"\n",
|
546
|
-
"# Feature scaling\n",
|
547
|
-
"scaler = StandardScaler()\n",
|
548
|
-
"X_train = scaler.fit_transform(X_train)\n",
|
549
|
-
"X_test = scaler.transform(X_test)\n",
|
550
|
-
"\n",
|
551
|
-
"# Create MLP Classifier\n",
|
552
|
-
"mlp = MLPClassifier(hidden_layer_sizes=(64, 32), activation='relu', solver='adam', max_iter=300, random_state=42)\n",
|
553
|
-
"\n",
|
554
|
-
"# Train the model\n",
|
555
|
-
"mlp.fit(X_train, y_train)\n",
|
556
|
-
"\n",
|
557
|
-
"# Predict\n",
|
558
|
-
"y_pred = mlp.predict(X_test)\n",
|
559
|
-
"\n",
|
560
|
-
"# Evaluate\n",
|
561
|
-
"print(classification_report(y_test, y_pred))\n",
|
562
|
-
"\n",
|
563
|
-
"# Confusion matrix\n",
|
564
|
-
"cm = confusion_matrix(y_test, y_pred)\n",
|
565
|
-
"plt.figure(figsize=(6,5))\n",
|
566
|
-
"sns.heatmap(cm, annot=True, fmt='d', cmap='Oranges', cbar=False)\n",
|
567
|
-
"plt.xlabel('Predicted')\n",
|
568
|
-
"plt.ylabel('True')\n",
|
569
|
-
"plt.title('Confusion Matrix')\n",
|
570
|
-
"plt.show()\n"
|
571
|
-
]
|
572
|
-
},
|
573
|
-
{
|
574
|
-
"cell_type": "code",
|
575
|
-
"execution_count": null,
|
576
|
-
"id": "d75e12f4-7262-415b-abc2-cd2bdaa91f9e",
|
577
|
-
"metadata": {},
|
578
|
-
"outputs": [],
|
579
|
-
"source": []
|
580
|
-
}
|
581
|
-
],
|
582
|
-
"metadata": {
|
583
|
-
"kernelspec": {
|
584
|
-
"display_name": "Python 3 (ipykernel)",
|
585
|
-
"language": "python",
|
586
|
-
"name": "python3"
|
587
|
-
},
|
588
|
-
"language_info": {
|
589
|
-
"codemirror_mode": {
|
590
|
-
"name": "ipython",
|
591
|
-
"version": 3
|
592
|
-
},
|
593
|
-
"file_extension": ".py",
|
594
|
-
"mimetype": "text/x-python",
|
595
|
-
"name": "python",
|
596
|
-
"nbconvert_exporter": "python",
|
597
|
-
"pygments_lexer": "ipython3",
|
598
|
-
"version": "3.12.4"
|
599
|
-
}
|
600
|
-
},
|
601
|
-
"nbformat": 4,
|
602
|
-
"nbformat_minor": 5
|
603
|
-
}
|