noshot 7.0.0__py3-none-any.whl → 8.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. noshot/data/ML TS XAI/ML/CNN(Image_for_Folders_5).ipynb +201 -0
  2. noshot/data/ML TS XAI/ML/CNN(Image_form_Folder_2).ipynb +201 -0
  3. noshot/data/ML TS XAI/ML/ML 3 (Latest)/1. PCA EDA.ipynb +274 -0
  4. noshot/data/ML TS XAI/ML/ML 3 (Latest)/10. CNN.ipynb +170 -0
  5. noshot/data/ML TS XAI/ML/ML 3 (Latest)/11. HMM 2.ipynb +1087 -0
  6. noshot/data/ML TS XAI/ML/ML 3 (Latest)/11. HMM 3.ipynb +178 -0
  7. noshot/data/ML TS XAI/ML/ML 3 (Latest)/11. HMM 4.ipynb +185 -0
  8. noshot/data/ML TS XAI/ML/ML 3 (Latest)/11. HMM.ipynb +106 -0
  9. noshot/data/ML TS XAI/ML/ML 3 (Latest)/2. KNN.ipynb +177 -0
  10. noshot/data/ML TS XAI/ML/ML 3 (Latest)/3. LDA.ipynb +195 -0
  11. noshot/data/ML TS XAI/ML/ML 3 (Latest)/4. Linear Regression.ipynb +267 -0
  12. noshot/data/ML TS XAI/ML/ML 3 (Latest)/5. Logistic Regression.ipynb +104 -0
  13. noshot/data/ML TS XAI/ML/ML 3 (Latest)/6. Bayesian Classifier.ipynb +109 -0
  14. noshot/data/ML TS XAI/ML/ML 3 (Latest)/7. SVM.ipynb +220 -0
  15. noshot/data/ML TS XAI/ML/ML 3 (Latest)/8. MLP.ipynb +99 -0
  16. noshot/data/ML TS XAI/ML/ML 3 (Latest)/9. Ridge - Lasso.ipynb +211 -0
  17. noshot/data/ML TS XAI/ML/ML 3 (Latest)/9. Ridge Lasso 2.ipynb +99 -0
  18. noshot/data/ML TS XAI/ML/ML 3 (Latest)/Image Load Example.ipynb +118 -0
  19. noshot/data/ML TS XAI/ML/ML 3 (Latest)/Updated_Untitled.ipynb +603 -0
  20. {noshot-7.0.0.dist-info → noshot-8.0.0.dist-info}/METADATA +1 -1
  21. noshot-8.0.0.dist-info/RECORD +60 -0
  22. noshot-7.0.0.dist-info/RECORD +0 -41
  23. /noshot/data/ML TS XAI/ML/{Tamilan Code → ML 1}/1. EDA-PCA (Balance Scale Dataset).ipynb +0 -0
  24. /noshot/data/ML TS XAI/ML/{Tamilan Code → ML 1}/1. EDA-PCA (Rice Dataset).ipynb +0 -0
  25. /noshot/data/ML TS XAI/ML/{Tamilan Code → ML 1}/10. HMM Veterbi.ipynb +0 -0
  26. /noshot/data/ML TS XAI/ML/{Tamilan Code → ML 1}/2. KNN (Balance Scale Dataset).ipynb +0 -0
  27. /noshot/data/ML TS XAI/ML/{Tamilan Code → ML 1}/2. KNN (Iris Dataset).ipynb +0 -0
  28. /noshot/data/ML TS XAI/ML/{Tamilan Code → ML 1}/2. KNN (Sobar-72 Dataset).ipynb +0 -0
  29. /noshot/data/ML TS XAI/ML/{Tamilan Code → ML 1}/3. LDA (Balance Scale Dataset).ipynb +0 -0
  30. /noshot/data/ML TS XAI/ML/{Tamilan Code → ML 1}/3. LDA (NPHA Doctor Visits Dataset).ipynb +0 -0
  31. /noshot/data/ML TS XAI/ML/{Tamilan Code → ML 1}/4. Linear Regression (Machine Dataset).ipynb +0 -0
  32. /noshot/data/ML TS XAI/ML/{Tamilan Code → ML 1}/4. Linear Regression (Real Estate Dataset).ipynb +0 -0
  33. /noshot/data/ML TS XAI/ML/{Tamilan Code → ML 1}/5. Logistic Regression (Magic04 Dataset).ipynb +0 -0
  34. /noshot/data/ML TS XAI/ML/{Tamilan Code → ML 1}/5. Logistic Regression (Wine Dataset).ipynb +0 -0
  35. /noshot/data/ML TS XAI/ML/{Tamilan Code → ML 1}/6. Naive Bayes Classifier (Agaricus Lepiota Dataset).ipynb +0 -0
  36. /noshot/data/ML TS XAI/ML/{Tamilan Code → ML 1}/6. Naive Bayes Classifier (Wine Dataset).ipynb +0 -0
  37. /noshot/data/ML TS XAI/ML/{Tamilan Code → ML 1}/7. SVM (Rice Dataset).ipynb +0 -0
  38. /noshot/data/ML TS XAI/ML/{Tamilan Code → ML 1}/8. FeedForward NN (Sobar72 Dataset).ipynb +0 -0
  39. /noshot/data/ML TS XAI/ML/{Tamilan Code → ML 1}/9. CNN (Cifar10 Dataset).ipynb +0 -0
  40. /noshot/data/ML TS XAI/ML/{Whitefang Code → ML 2}/1. PCA.ipynb +0 -0
  41. /noshot/data/ML TS XAI/ML/{Whitefang Code → ML 2}/10. CNN.ipynb +0 -0
  42. /noshot/data/ML TS XAI/ML/{Whitefang Code → ML 2}/11. HMM.ipynb +0 -0
  43. /noshot/data/ML TS XAI/ML/{Whitefang Code → ML 2}/2. KNN.ipynb +0 -0
  44. /noshot/data/ML TS XAI/ML/{Whitefang Code → ML 2}/3. LDA.ipynb +0 -0
  45. /noshot/data/ML TS XAI/ML/{Whitefang Code → ML 2}/4. Linear Regression.ipynb +0 -0
  46. /noshot/data/ML TS XAI/ML/{Whitefang Code → ML 2}/5. Logistic Regression.ipynb +0 -0
  47. /noshot/data/ML TS XAI/ML/{Whitefang Code → ML 2}/6. Naive Bayes (Titanic).ipynb +0 -0
  48. /noshot/data/ML TS XAI/ML/{Whitefang Code → ML 2}/6. Naive Bayes (Wine).ipynb +0 -0
  49. /noshot/data/ML TS XAI/ML/{Whitefang Code → ML 2}/7. SVM Linear.ipynb +0 -0
  50. /noshot/data/ML TS XAI/ML/{Whitefang Code → ML 2}/8. SVM Non-Linear.ipynb +0 -0
  51. /noshot/data/ML TS XAI/ML/{Whitefang Code → ML 2}/9. FNN With Regularization.ipynb +0 -0
  52. /noshot/data/ML TS XAI/ML/{Whitefang Code → ML 2}/9. FNN Without Regularization.ipynb +0 -0
  53. /noshot/data/ML TS XAI/ML/{Whitefang Code → ML 2}/All in One Lab CIA 1 Q.ipynb +0 -0
  54. {noshot-7.0.0.dist-info → noshot-8.0.0.dist-info}/WHEEL +0 -0
  55. {noshot-7.0.0.dist-info → noshot-8.0.0.dist-info}/licenses/LICENSE.txt +0 -0
  56. {noshot-7.0.0.dist-info → noshot-8.0.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,603 @@
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": null,
6
+ "id": "a246eeb3-2fc3-4dea-9d6f-f9742c74b917",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "# Import libraries\n",
11
+ "import numpy as np\n",
12
+ "import matplotlib.pyplot as plt\n",
13
+ "from sklearn.datasets import load_iris, fetch_california_housing\n",
14
+ "from sklearn.model_selection import train_test_split\n",
15
+ "from sklearn.preprocessing import StandardScaler\n",
16
+ "from sklearn.metrics import accuracy_score, mean_squared_error\n",
17
+ "import tensorflow as tf\n",
18
+ "from tensorflow.keras.models import Sequential\n",
19
+ "from tensorflow.keras.layers import Dense\n",
20
+ "\n",
21
+ "# Set random seed for reproducibility\n",
22
+ "np.random.seed(42)\n",
23
+ "tf.random.set_seed(42)\n",
24
+ "\n",
25
+ "# --------- Classification: IRIS Dataset ---------\n",
26
+ "# Load dataset\n",
27
+ "iris = load_iris()\n",
28
+ "X_cls = iris.data\n",
29
+ "y_cls = iris.target\n",
30
+ "\n",
31
+ "# Split\n",
32
+ "X_train_cls, X_test_cls, y_train_cls, y_test_cls = train_test_split(X_cls, y_cls, test_size=0.2, random_state=42)\n",
33
+ "\n",
34
+ "# Standardize features\n",
35
+ "scaler_cls = StandardScaler()\n",
36
+ "X_train_cls = scaler_cls.fit_transform(X_train_cls)\n",
37
+ "X_test_cls = scaler_cls.transform(X_test_cls)\n",
38
+ "\n",
39
+ "# Build MLP model\n",
40
+ "model_cls = Sequential([\n",
41
+ " Dense(16, activation='relu', input_shape=(X_train_cls.shape[1],)),\n",
42
+ " Dense(8, activation='relu'),\n",
43
+ " Dense(3, activation='softmax')\n",
44
+ "])\n",
45
+ "\n",
46
+ "# Compile\n",
47
+ "model_cls.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])\n",
48
+ "\n",
49
+ "# Train\n",
50
+ "history_cls = model_cls.fit(X_train_cls, y_train_cls, epochs=50, validation_split=0.2, verbose=0)\n",
51
+ "\n",
52
+ "# Evaluate\n",
53
+ "y_pred_cls = np.argmax(model_cls.predict(X_test_cls), axis=1)\n",
54
+ "acc = accuracy_score(y_test_cls, y_pred_cls)\n",
55
+ "print(f\"Classification Accuracy on Iris dataset: {acc:.4f}\")\n",
56
+ "\n",
57
+ "# Plot training history\n",
58
+ "plt.figure(figsize=(12,5))\n",
59
+ "plt.subplot(1,2,1)\n",
60
+ "plt.plot(history_cls.history['accuracy'], label='Train Accuracy')\n",
61
+ "plt.plot(history_cls.history['val_accuracy'], label='Val Accuracy')\n",
62
+ "plt.title('Classification Accuracy')\n",
63
+ "plt.xlabel('Epochs')\n",
64
+ "plt.ylabel('Accuracy')\n",
65
+ "plt.legend()\n",
66
+ "\n",
67
+ "# --------- Regression: California Housing Dataset ---------\n",
68
+ "# Load dataset\n",
69
+ "housing = fetch_california_housing()\n",
70
+ "X_reg = housing.data\n",
71
+ "y_reg = housing.target\n",
72
+ "\n",
73
+ "# Split\n",
74
+ "X_train_reg, X_test_reg, y_train_reg, y_test_reg = train_test_split(X_reg, y_reg, test_size=0.2, random_state=42)\n",
75
+ "\n",
76
+ "# Standardize features\n",
77
+ "scaler_reg = StandardScaler()\n",
78
+ "X_train_reg = scaler_reg.fit_transform(X_train_reg)\n",
79
+ "X_test_reg = scaler_reg.transform(X_test_reg)\n",
80
+ "\n",
81
+ "# Build MLP model\n",
82
+ "model_reg = Sequential([\n",
83
+ " Dense(32, activation='relu', input_shape=(X_train_reg.shape[1],)),\n",
84
+ " Dense(16, activation='relu'),\n",
85
+ " Dense(1)\n",
86
+ "])\n",
87
+ "\n",
88
+ "# Compile\n",
89
+ "model_reg.compile(optimizer='adam', loss='mse')\n",
90
+ "\n",
91
+ "# Train\n",
92
+ "history_reg = model_reg.fit(X_train_reg, y_train_reg, epochs=50, validation_split=0.2, verbose=0)\n",
93
+ "\n",
94
+ "# Evaluate\n",
95
+ "y_pred_reg = model_reg.predict(X_test_reg).flatten()\n",
96
+ "mse = mean_squared_error(y_test_reg, y_pred_reg)\n",
97
+ "print(f\"Regression MSE on California Housing dataset: {mse:.4f}\")\n",
98
+ "\n",
99
+ "# Plot training history\n",
100
+ "plt.subplot(1,2,2)\n",
101
+ "plt.plot(history_reg.history['loss'], label='Train Loss')\n",
102
+ "plt.plot(history_reg.history['val_loss'], label='Val Loss')\n",
103
+ "plt.title('Regression Loss')\n",
104
+ "plt.xlabel('Epochs')\n",
105
+ "plt.ylabel('MSE Loss')\n",
106
+ "plt.legend()\n",
107
+ "\n",
108
+ "plt.tight_layout()\n",
109
+ "plt.show()"
110
+ ]
111
+ },
112
+ {
113
+ "cell_type": "code",
114
+ "execution_count": null,
115
+ "id": "c97be2ce-bb9d-4a5c-be94-a9ac5ba31d8a",
116
+ "metadata": {},
117
+ "outputs": [],
118
+ "source": [
119
+ "# Import libraries\n",
120
+ "import numpy as np\n",
121
+ "import matplotlib.pyplot as plt\n",
122
+ "from sklearn.datasets import load_iris, fetch_california_housing\n",
123
+ "from sklearn.model_selection import train_test_split\n",
124
+ "from sklearn.preprocessing import StandardScaler\n",
125
+ "from sklearn.metrics import accuracy_score, mean_squared_error\n",
126
+ "from sklearn.neural_network import MLPClassifier, MLPRegressor\n",
127
+ "\n",
128
+ "# Set random seed\n",
129
+ "np.random.seed(42)\n",
130
+ "\n",
131
+ "# --------- Classification: IRIS Dataset ---------\n",
132
+ "# Load dataset\n",
133
+ "iris = load_iris()\n",
134
+ "X_cls = iris.data\n",
135
+ "y_cls = iris.target\n",
136
+ "\n",
137
+ "# Split\n",
138
+ "X_train_cls, X_test_cls, y_train_cls, y_test_cls = train_test_split(X_cls, y_cls, test_size=0.2, random_state=42)\n",
139
+ "\n",
140
+ "# Standardize\n",
141
+ "scaler_cls = StandardScaler()\n",
142
+ "X_train_cls = scaler_cls.fit_transform(X_train_cls)\n",
143
+ "X_test_cls = scaler_cls.transform(X_test_cls)\n",
144
+ "\n",
145
+ "# Create and train MLP Classifier\n",
146
+ "mlp_cls = MLPClassifier(hidden_layer_sizes=(16,8), activation='relu', solver='adam', max_iter=300, random_state=42, verbose=True)\n",
147
+ "mlp_cls.fit(X_train_cls, y_train_cls)\n",
148
+ "\n",
149
+ "# Predict and evaluate\n",
150
+ "y_pred_cls = mlp_cls.predict(X_test_cls)\n",
151
+ "acc = accuracy_score(y_test_cls, y_pred_cls)\n",
152
+ "print(f\"Classification Accuracy on Iris dataset: {acc:.4f}\")\n",
153
+ "\n",
154
+ "# Plot loss curve\n",
155
+ "plt.figure(figsize=(12,5))\n",
156
+ "plt.subplot(1,2,1)\n",
157
+ "plt.plot(mlp_cls.loss_curve_)\n",
158
+ "plt.title('Classification Loss Curve (IRIS)')\n",
159
+ "plt.xlabel('Iterations')\n",
160
+ "plt.ylabel('Loss')\n",
161
+ "\n",
162
+ "# --------- Regression: California Housing Dataset ---------\n",
163
+ "# Load dataset\n",
164
+ "housing = fetch_california_housing()\n",
165
+ "X_reg = housing.data\n",
166
+ "y_reg = housing.target\n",
167
+ "\n",
168
+ "# Split\n",
169
+ "X_train_reg, X_test_reg, y_train_reg, y_test_reg = train_test_split(X_reg, y_reg, test_size=0.2, random_state=42)\n",
170
+ "\n",
171
+ "# Standardize\n",
172
+ "scaler_reg = StandardScaler()\n",
173
+ "X_train_reg = scaler_reg.fit_transform(X_train_reg)\n",
174
+ "X_test_reg = scaler_reg.transform(X_test_reg)\n",
175
+ "\n",
176
+ "# Create and train MLP Regressor\n",
177
+ "mlp_reg = MLPRegressor(hidden_layer_sizes=(32,16), activation='relu', solver='adam', max_iter=300, random_state=42, verbose=True)\n",
178
+ "mlp_reg.fit(X_train_reg, y_train_reg)\n",
179
+ "\n",
180
+ "# Predict and evaluate\n",
181
+ "y_pred_reg = mlp_reg.predict(X_test_reg)\n",
182
+ "mse = mean_squared_error(y_test_reg, y_pred_reg)\n",
183
+ "print(f\"Regression MSE on California Housing dataset: {mse:.4f}\")\n",
184
+ "\n",
185
+ "# Plot loss curve\n",
186
+ "plt.subplot(1,2,2)\n",
187
+ "plt.plot(mlp_reg.loss_curve_)\n",
188
+ "plt.title('Regression Loss Curve (California Housing)')\n",
189
+ "plt.xlabel('Iterations')\n",
190
+ "plt.ylabel('Loss')\n",
191
+ "\n",
192
+ "plt.tight_layout()\n",
193
+ "plt.show()\n"
194
+ ]
195
+ },
196
+ {
197
+ "cell_type": "code",
198
+ "execution_count": null,
199
+ "id": "16df4a42-e6b7-417e-83a6-032e15d42f8a",
200
+ "metadata": {
201
+ "scrolled": true
202
+ },
203
+ "outputs": [],
204
+ "source": [
205
+ "# Import libraries\n",
206
+ "import numpy as np\n",
207
+ "import matplotlib.pyplot as plt\n",
208
+ "import seaborn as sns\n",
209
+ "from sklearn.datasets import load_iris, fetch_california_housing\n",
210
+ "from sklearn.model_selection import train_test_split, learning_curve\n",
211
+ "from sklearn.preprocessing import StandardScaler\n",
212
+ "from sklearn.metrics import accuracy_score, mean_squared_error, confusion_matrix\n",
213
+ "from sklearn.neural_network import MLPClassifier, MLPRegressor\n",
214
+ "\n",
215
+ "# Set random seed\n",
216
+ "np.random.seed(42)\n",
217
+ "\n",
218
+ "# --------- Classification: IRIS Dataset ---------\n",
219
+ "# Load dataset\n",
220
+ "iris = load_iris()\n",
221
+ "X_cls = iris.data\n",
222
+ "y_cls = iris.target\n",
223
+ "\n",
224
+ "# Split\n",
225
+ "X_train_cls, X_test_cls, y_train_cls, y_test_cls = train_test_split(X_cls, y_cls, test_size=0.2, random_state=42)\n",
226
+ "\n",
227
+ "# Standardize\n",
228
+ "scaler_cls = StandardScaler()\n",
229
+ "X_train_cls = scaler_cls.fit_transform(X_train_cls)\n",
230
+ "X_test_cls = scaler_cls.transform(X_test_cls)\n",
231
+ "\n",
232
+ "# Create and train MLP Classifier\n",
233
+ "mlp_cls = MLPClassifier(hidden_layer_sizes=(16,8), activation='relu', solver='adam', max_iter=300, random_state=42, verbose=True)\n",
234
+ "mlp_cls.fit(X_train_cls, y_train_cls)\n",
235
+ "\n",
236
+ "# Predict and evaluate\n",
237
+ "y_pred_cls = mlp_cls.predict(X_test_cls)\n",
238
+ "acc = accuracy_score(y_test_cls, y_pred_cls)\n",
239
+ "print(f\"Classification Accuracy on Iris dataset: {acc:.4f}\")\n",
240
+ "\n",
241
+ "# Confusion Matrix\n",
242
+ "conf_mat = confusion_matrix(y_test_cls, y_pred_cls)\n",
243
+ "\n",
244
+ "# --------- Regression: California Housing Dataset ---------\n",
245
+ "# Load dataset\n",
246
+ "housing = fetch_california_housing()\n",
247
+ "X_reg = housing.data\n",
248
+ "y_reg = housing.target\n",
249
+ "\n",
250
+ "# Split\n",
251
+ "X_train_reg, X_test_reg, y_train_reg, y_test_reg = train_test_split(X_reg, y_reg, test_size=0.2, random_state=42)\n",
252
+ "\n",
253
+ "# Standardize\n",
254
+ "scaler_reg = StandardScaler()\n",
255
+ "X_train_reg = scaler_reg.fit_transform(X_train_reg)\n",
256
+ "X_test_reg = scaler_reg.transform(X_test_reg)\n",
257
+ "\n",
258
+ "# Create and train MLP Regressor\n",
259
+ "mlp_reg = MLPRegressor(hidden_layer_sizes=(32,16), activation='relu', solver='adam', max_iter=300, random_state=42, verbose=True)\n",
260
+ "mlp_reg.fit(X_train_reg, y_train_reg)\n",
261
+ "\n",
262
+ "# Predict and evaluate\n",
263
+ "y_pred_reg = mlp_reg.predict(X_test_reg)\n",
264
+ "mse = mean_squared_error(y_test_reg, y_pred_reg)\n",
265
+ "print(f\"Regression MSE on California Housing dataset: {mse:.4f}\")\n",
266
+ "\n",
267
+ "# --------- PLOTS ---------\n",
268
+ "\n",
269
+ "plt.figure(figsize=(18,10))\n",
270
+ "\n",
271
+ "# 1. Classification Loss Curve\n",
272
+ "plt.subplot(2,3,1)\n",
273
+ "plt.plot(mlp_cls.loss_curve_)\n",
274
+ "plt.title('Classification Loss Curve (IRIS)')\n",
275
+ "plt.xlabel('Iterations')\n",
276
+ "plt.ylabel('Loss')\n",
277
+ "\n",
278
+ "# 2. Confusion Matrix\n",
279
+ "plt.subplot(2,3,2)\n",
280
+ "sns.heatmap(conf_mat, annot=True, fmt='d', cmap='Blues', xticklabels=iris.target_names, yticklabels=iris.target_names)\n",
281
+ "plt.title('Confusion Matrix (IRIS)')\n",
282
+ "plt.xlabel('Predicted Label')\n",
283
+ "plt.ylabel('True Label')\n",
284
+ "\n",
285
+ "# 3. Learning Curve for Classification\n",
286
+ "train_sizes_cls, train_scores_cls, val_scores_cls = learning_curve(mlp_cls, X_cls, y_cls, cv=5, scoring='accuracy', train_sizes=np.linspace(0.1,1.0,5))\n",
287
+ "\n",
288
+ "plt.subplot(2,3,3)\n",
289
+ "plt.plot(train_sizes_cls, np.mean(train_scores_cls, axis=1), label='Train Accuracy')\n",
290
+ "plt.plot(train_sizes_cls, np.mean(val_scores_cls, axis=1), label='Validation Accuracy')\n",
291
+ "plt.title('Learning Curve (IRIS Classification)')\n",
292
+ "plt.xlabel('Training Size')\n",
293
+ "plt.ylabel('Accuracy')\n",
294
+ "plt.legend()\n",
295
+ "\n",
296
+ "# 4. Regression Loss Curve\n",
297
+ "plt.subplot(2,3,4)\n",
298
+ "plt.plot(mlp_reg.loss_curve_)\n",
299
+ "plt.title('Regression Loss Curve (California Housing)')\n",
300
+ "plt.xlabel('Iterations')\n",
301
+ "plt.ylabel('Loss')\n",
302
+ "\n",
303
+ "# 5. Prediction vs Ground Truth Scatter Plot\n",
304
+ "plt.subplot(2,3,5)\n",
305
+ "plt.scatter(y_test_reg, y_pred_reg, alpha=0.7)\n",
306
+ "plt.plot([y_test_reg.min(), y_test_reg.max()], [y_test_reg.min(), y_test_reg.max()], 'r--')\n",
307
+ "plt.title('Regression Prediction vs Ground Truth')\n",
308
+ "plt.xlabel('True Prices')\n",
309
+ "plt.ylabel('Predicted Prices')\n",
310
+ "\n",
311
+ "# 6. Learning Curve for Regression\n",
312
+ "train_sizes_reg, train_scores_reg, val_scores_reg = learning_curve(mlp_reg, X_reg, y_reg, cv=5, scoring='neg_mean_squared_error', train_sizes=np.linspace(0.1,1.0,5))\n",
313
+ "\n",
314
+ "plt.subplot(2,3,6)\n",
315
+ "plt.plot(train_sizes_reg, -np.mean(train_scores_reg, axis=1), label='Train MSE')\n",
316
+ "plt.plot(train_sizes_reg, -np.mean(val_scores_reg, axis=1), label='Validation MSE')\n",
317
+ "plt.title('Learning Curve (Housing Regression)')\n",
318
+ "plt.xlabel('Training Size')\n",
319
+ "plt.ylabel('MSE')\n",
320
+ "plt.legend()\n",
321
+ "\n",
322
+ "plt.tight_layout()\n",
323
+ "plt.show()"
324
+ ]
325
+ },
326
+ {
327
+ "cell_type": "code",
328
+ "execution_count": null,
329
+ "id": "35e1b0bb-70b8-4b29-80e6-2b678c17b0a5",
330
+ "metadata": {
331
+ "scrolled": true
332
+ },
333
+ "outputs": [],
334
+ "source": [
335
+ "# --------- PLOTS ---------\n",
336
+ "\n",
337
+ "plt.figure(figsize=(18,10))\n",
338
+ "\n",
339
+ "# 1. Classification Loss Curve\n",
340
+ "plt.subplot(2,3,1)\n",
341
+ "plt.plot(mlp_cls.loss_curve_)\n",
342
+ "plt.title('Classification Loss Curve (IRIS)')\n",
343
+ "plt.xlabel('Iterations')\n",
344
+ "plt.ylabel('Loss')\n",
345
+ "\n",
346
+ "# 2. Confusion Matrix\n",
347
+ "plt.subplot(2,3,2)\n",
348
+ "sns.heatmap(conf_mat, annot=True, fmt='d', cmap='Blues', xticklabels=iris.target_names, yticklabels=iris.target_names)\n",
349
+ "plt.title('Confusion Matrix (IRIS)')\n",
350
+ "plt.xlabel('Predicted Label')\n",
351
+ "plt.ylabel('True Label')\n",
352
+ "\n",
353
+ "# 3. Learning Curve for Classification\n",
354
+ "train_sizes_cls, train_scores_cls, val_scores_cls = learning_curve(mlp_cls, X_cls, y_cls, cv=5, scoring='accuracy', train_sizes=np.linspace(0.1,1.0,5))\n",
355
+ "\n",
356
+ "plt.subplot(2,3,3)\n",
357
+ "plt.plot(train_sizes_cls, np.mean(train_scores_cls, axis=1), label='Train Accuracy')\n",
358
+ "plt.plot(train_sizes_cls, np.mean(val_scores_cls, axis=1), label='Validation Accuracy')\n",
359
+ "plt.title('Learning Curve (IRIS Classification)')\n",
360
+ "plt.xlabel('Training Size')\n",
361
+ "plt.ylabel('Accuracy')\n",
362
+ "plt.legend()\n",
363
+ "\n",
364
+ "# 4. Regression Loss Curve\n",
365
+ "plt.subplot(2,3,4)\n",
366
+ "plt.plot(mlp_reg.loss_curve_)\n",
367
+ "plt.title('Regression Loss Curve (California Housing)')\n",
368
+ "plt.xlabel('Iterations')\n",
369
+ "plt.ylabel('Loss')\n",
370
+ "\n",
371
+ "# 5. Prediction vs Ground Truth Scatter Plot\n",
372
+ "plt.subplot(2,3,5)\n",
373
+ "plt.scatter(y_test_reg, y_pred_reg, alpha=0.7)\n",
374
+ "plt.plot([y_test_reg.min(), y_test_reg.max()], [y_test_reg.min(), y_test_reg.max()], 'r--')\n",
375
+ "plt.title('Regression Prediction vs Ground Truth')\n",
376
+ "plt.xlabel('True Prices')\n",
377
+ "plt.ylabel('Predicted Prices')\n",
378
+ "\n",
379
+ "# 6. Learning Curve for Regression\n",
380
+ "train_sizes_reg, train_scores_reg, val_scores_reg = learning_curve(mlp_reg, X_reg, y_reg, cv=5, scoring='neg_mean_squared_error', train_sizes=np.linspace(0.1,1.0,5))\n",
381
+ "\n",
382
+ "plt.subplot(2,3,6)\n",
383
+ "plt.plot(train_sizes_reg, -np.mean(train_scores_reg, axis=1), label='Train MSE')\n",
384
+ "plt.plot(train_sizes_reg, -np.mean(val_scores_reg, axis=1), label='Validation MSE')\n",
385
+ "plt.title('Learning Curve (Housing Regression)')\n",
386
+ "plt.xlabel('Training Size')\n",
387
+ "plt.ylabel('MSE')\n",
388
+ "plt.legend()\n",
389
+ "\n",
390
+ "plt.tight_layout()\n",
391
+ "plt.show()"
392
+ ]
393
+ },
394
+ {
395
+ "cell_type": "code",
396
+ "execution_count": null,
397
+ "id": "5aabf3a9-aa7f-4fc4-887a-4a4648d02559",
398
+ "metadata": {},
399
+ "outputs": [],
400
+ "source": [
401
+ "# Imports\n",
402
+ "import os\n",
403
+ "import numpy as np\n",
404
+ "import matplotlib.pyplot as plt\n",
405
+ "import tensorflow as tf\n",
406
+ "from tensorflow.keras.preprocessing.image import ImageDataGenerator\n",
407
+ "from tensorflow.keras import Sequential\n",
408
+ "from tensorflow.keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout\n",
409
+ "from sklearn.metrics import confusion_matrix, classification_report\n",
410
+ "import seaborn as sns\n",
411
+ "\n",
412
+ "# Correct path\n",
413
+ "data_dir = r\"E:\\seg_train\\seg_train\" # NOT just seg_train\n",
414
+ "\n",
415
+ "# Image parameters\n",
416
+ "img_size = (128, 128)\n",
417
+ "batch_size = 32\n",
418
+ "\n",
419
+ "# Data Generators\n",
420
+ "train_datagen = ImageDataGenerator(rescale=1./255, validation_split=0.2)\n",
421
+ "\n",
422
+ "train_generator = train_datagen.flow_from_directory(\n",
423
+ " data_dir,\n",
424
+ " target_size=img_size,\n",
425
+ " batch_size=batch_size,\n",
426
+ " class_mode='categorical', # MULTICLASS\n",
427
+ " subset='training',\n",
428
+ " shuffle=True,\n",
429
+ " seed=42\n",
430
+ ")\n",
431
+ "\n",
432
+ "val_generator = train_datagen.flow_from_directory(\n",
433
+ " data_dir,\n",
434
+ " target_size=img_size,\n",
435
+ " batch_size=batch_size,\n",
436
+ " class_mode='categorical',\n",
437
+ " subset='validation',\n",
438
+ " shuffle=False,\n",
439
+ " seed=42\n",
440
+ ")\n",
441
+ "\n",
442
+ "# CNN Model\n",
443
+ "model = Sequential([\n",
444
+ " Conv2D(32, (3,3), activation='relu', input_shape=(img_size[0], img_size[1], 3)),\n",
445
+ " MaxPooling2D(2,2),\n",
446
+ " Conv2D(64, (3,3), activation='relu'),\n",
447
+ " MaxPooling2D(2,2),\n",
448
+ " Conv2D(128, (3,3), activation='relu'),\n",
449
+ " MaxPooling2D(2,2),\n",
450
+ " Flatten(),\n",
451
+ " Dense(128, activation='relu'),\n",
452
+ " Dropout(0.5),\n",
453
+ " Dense(train_generator.num_classes, activation='softmax') # MULTI CLASS\n",
454
+ "])\n",
455
+ "\n",
456
+ "model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])\n",
457
+ "\n",
458
+ "# Train\n",
459
+ "history = model.fit(\n",
460
+ " train_generator,\n",
461
+ " validation_data=val_generator,\n",
462
+ " epochs=10\n",
463
+ ")\n",
464
+ "\n",
465
+ "# Plotting Accuracy & Loss\n",
466
+ "plt.figure(figsize=(12,5))\n",
467
+ "\n",
468
+ "plt.subplot(1,2,1)\n",
469
+ "plt.plot(history.history['accuracy'], label='Train Acc')\n",
470
+ "plt.plot(history.history['val_accuracy'], label='Val Acc')\n",
471
+ "plt.title('Accuracy over Epochs')\n",
472
+ "plt.legend()\n",
473
+ "\n",
474
+ "plt.subplot(1,2,2)\n",
475
+ "plt.plot(history.history['loss'], label='Train Loss')\n",
476
+ "plt.plot(history.history['val_loss'], label='Val Loss')\n",
477
+ "plt.title('Loss over Epochs')\n",
478
+ "plt.legend()\n",
479
+ "\n",
480
+ "plt.tight_layout()\n",
481
+ "plt.show()\n",
482
+ "\n",
483
+ "# Confusion Matrix\n",
484
+ "val_generator.reset()\n",
485
+ "y_pred = model.predict(val_generator)\n",
486
+ "y_pred_classes = np.argmax(y_pred, axis=1)\n",
487
+ "y_true = val_generator.classes\n",
488
+ "\n",
489
+ "cm = confusion_matrix(y_true, y_pred_classes)\n",
490
+ "plt.figure(figsize=(8,6))\n",
491
+ "sns.heatmap(cm, annot=True, fmt='d', cmap='Blues', xticklabels=val_generator.class_indices.keys(), yticklabels=val_generator.class_indices.keys())\n",
492
+ "plt.title('Confusion Matrix')\n",
493
+ "plt.xlabel('Predicted')\n",
494
+ "plt.ylabel('True')\n",
495
+ "plt.show()\n",
496
+ "\n",
497
+ "# Classification Report\n",
498
+ "print(classification_report(y_true, y_pred_classes, target_names=val_generator.class_indices.keys()))\n",
499
+ "\n",
500
+ "# Sample predictions\n",
501
+ "class_labels = list(val_generator.class_indices.keys())\n",
502
+ "\n",
503
+ "plt.figure(figsize=(15,10))\n",
504
+ "for i in range(12):\n",
505
+ " img, label = next(val_generator) # <-- CORRECT way (not .next())\n",
506
+ " pred = model.predict(img)\n",
507
+ " plt.subplot(3,4,i+1)\n",
508
+ " plt.imshow(img[0])\n",
509
+ " plt.axis('off')\n",
510
+ " true_label = class_labels[np.argmax(label[0])]\n",
511
+ " pred_label = class_labels[np.argmax(pred[0])]\n",
512
+ " plt.title(f\"True: {true_label}\\nPred: {pred_label}\")\n",
513
+ "plt.tight_layout()\n",
514
+ "plt.show()\n"
515
+ ]
516
+ },
517
+ {
518
+ "cell_type": "code",
519
+ "execution_count": null,
520
+ "id": "d8e3e2ce-4b14-494d-88b6-221c679f3ea9",
521
+ "metadata": {},
522
+ "outputs": [],
523
+ "source": [
524
+ "# Imports\n",
525
+ "import pandas as pd\n",
526
+ "from sklearn.model_selection import train_test_split\n",
527
+ "from sklearn.preprocessing import StandardScaler\n",
528
+ "from sklearn.neural_network import MLPClassifier\n",
529
+ "from sklearn.metrics import classification_report, confusion_matrix\n",
530
+ "import matplotlib.pyplot as plt\n",
531
+ "import seaborn as sns\n",
532
+ "\n",
533
+ "# Load the data\n",
534
+ "data = pd.read_csv(r\"E:\\heart_failure_clinical_records_dataset.csv\")\n",
535
+ "\n",
536
+ "# Features and Labels\n",
537
+ "X = data.drop('DEATH_EVENT', axis=1)\n",
538
+ "y = data['DEATH_EVENT']\n",
539
+ "\n",
540
+ "# (Optional) Group the classes to simplify\n",
541
+ "#y = y.apply(lambda q: 0 if q <= 5 else (1 if q == 6 else 2)) # 0: bad, 1: average, 2: good\n",
542
+ "\n",
543
+ "# Train-test split\n",
544
+ "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)\n",
545
+ "\n",
546
+ "# Feature scaling\n",
547
+ "scaler = StandardScaler()\n",
548
+ "X_train = scaler.fit_transform(X_train)\n",
549
+ "X_test = scaler.transform(X_test)\n",
550
+ "\n",
551
+ "# Create MLP Classifier\n",
552
+ "mlp = MLPClassifier(hidden_layer_sizes=(64, 32), activation='relu', solver='adam', max_iter=300, random_state=42)\n",
553
+ "\n",
554
+ "# Train the model\n",
555
+ "mlp.fit(X_train, y_train)\n",
556
+ "\n",
557
+ "# Predict\n",
558
+ "y_pred = mlp.predict(X_test)\n",
559
+ "\n",
560
+ "# Evaluate\n",
561
+ "print(classification_report(y_test, y_pred))\n",
562
+ "\n",
563
+ "# Confusion matrix\n",
564
+ "cm = confusion_matrix(y_test, y_pred)\n",
565
+ "plt.figure(figsize=(6,5))\n",
566
+ "sns.heatmap(cm, annot=True, fmt='d', cmap='Oranges', cbar=False)\n",
567
+ "plt.xlabel('Predicted')\n",
568
+ "plt.ylabel('True')\n",
569
+ "plt.title('Confusion Matrix')\n",
570
+ "plt.show()\n"
571
+ ]
572
+ },
573
+ {
574
+ "cell_type": "code",
575
+ "execution_count": null,
576
+ "id": "d75e12f4-7262-415b-abc2-cd2bdaa91f9e",
577
+ "metadata": {},
578
+ "outputs": [],
579
+ "source": []
580
+ }
581
+ ],
582
+ "metadata": {
583
+ "kernelspec": {
584
+ "display_name": "Python 3 (ipykernel)",
585
+ "language": "python",
586
+ "name": "python3"
587
+ },
588
+ "language_info": {
589
+ "codemirror_mode": {
590
+ "name": "ipython",
591
+ "version": 3
592
+ },
593
+ "file_extension": ".py",
594
+ "mimetype": "text/x-python",
595
+ "name": "python",
596
+ "nbconvert_exporter": "python",
597
+ "pygments_lexer": "ipython3",
598
+ "version": "3.12.4"
599
+ }
600
+ },
601
+ "nbformat": 4,
602
+ "nbformat_minor": 5
603
+ }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: noshot
3
- Version: 7.0.0
3
+ Version: 8.0.0
4
4
  Summary: Support library for Artificial Intelligence, Machine Learning and Data Science tools
5
5
  Author: Tim Stan S
6
6
  License: MIT
@@ -0,0 +1,60 @@
1
+ noshot/__init__.py,sha256=000R40tii8lDFU8C1fBaD3SOnxD0PWRNWZU-km49YrU,21
2
+ noshot/main.py,sha256=zXegIqjJPARlPnQMS-B2dAENcvyaZkNwmue63Gm8lHU,663
3
+ noshot/data/ML TS XAI/ML/CNN(Image_for_Folders_5).ipynb,sha256=VAk1gwoDTBMSdXJxiOLJRvWnzJs84kdNr8Tn_1LaGZw,8802
4
+ noshot/data/ML TS XAI/ML/CNN(Image_form_Folder_2).ipynb,sha256=o3Ho3f1CcYhzNW5yB8PEt5WuxFvgc04_bT73wMmpx14,8772
5
+ noshot/data/ML TS XAI/ML/Rolls Royce AllinOne.ipynb,sha256=dQ3HgLix6HLqPltFiPrElmEdYAsvR6flDpHEIjcngp4,24774
6
+ noshot/data/ML TS XAI/ML/ML 1/1. EDA-PCA (Balance Scale Dataset).ipynb,sha256=1QYmUb1QZ4FtmdwoWhTbF9divKNMOxS8AMOy56At0xg,3625
7
+ noshot/data/ML TS XAI/ML/ML 1/1. EDA-PCA (Rice Dataset).ipynb,sha256=1rp60fJyQl0bxzFWeJb6XR8VRtlQeonv9Yw5_9pvIH8,4133
8
+ noshot/data/ML TS XAI/ML/ML 1/10. HMM Veterbi.ipynb,sha256=0ESvYG9FT7wgcL2JUzMH2ChpSzevz2eez0X53a9wK20,4986
9
+ noshot/data/ML TS XAI/ML/ML 1/2. KNN (Balance Scale Dataset).ipynb,sha256=tbkkRm6xHnmM-K8cRpnK8LH1pUmQl30bdyo0dFSNFcw,2988
10
+ noshot/data/ML TS XAI/ML/ML 1/2. KNN (Iris Dataset).ipynb,sha256=9vxuGgpq2poMGb_AOJY_rpvUCzHwd-iCVYSXxseYVRs,4287
11
+ noshot/data/ML TS XAI/ML/ML 1/2. KNN (Sobar-72 Dataset).ipynb,sha256=oEHLzQlc0aD1HiardgHPbTL2F-uXcm2_htA_dSmM68M,5840
12
+ noshot/data/ML TS XAI/ML/ML 1/3. LDA (Balance Scale Dataset).ipynb,sha256=Z3zwZQKJmvCEgzTWN1OqgiOAF9Lw5oLIY1A63SRJ5tg,2101
13
+ noshot/data/ML TS XAI/ML/ML 1/3. LDA (NPHA Doctor Visits Dataset).ipynb,sha256=N_IFGBAckF8vJI0lPPbZ1soG50B1_IVyACCyU7jvo3U,2651
14
+ noshot/data/ML TS XAI/ML/ML 1/4. Linear Regression (Machine Dataset).ipynb,sha256=PxFEgyFi6n5nURhtjeT__OP5T-UsggOI9RfBKfpDNBo,3081
15
+ noshot/data/ML TS XAI/ML/ML 1/4. Linear Regression (Real Estate Dataset).ipynb,sha256=avtEqkS38VccYJrQa91kjpmYG43dsDYiMcYtp70SbpA,3895
16
+ noshot/data/ML TS XAI/ML/ML 1/5. Logistic Regression (Magic04 Dataset).ipynb,sha256=sSujtrR8C9GGjpIR4v6YN6gTF1cYMIxz5Ufnv_Fp5-I,3376
17
+ noshot/data/ML TS XAI/ML/ML 1/5. Logistic Regression (Wine Dataset).ipynb,sha256=YphX35eCBBWu5sCSLS6bw__Em4gbwAzOW49z_Zv-tRs,2668
18
+ noshot/data/ML TS XAI/ML/ML 1/6. Naive Bayes Classifier (Agaricus Lepiota Dataset).ipynb,sha256=gHvmS1w__3JxhdsxjcSstgrCfoBWfxp8e738O1rVlew,3077
19
+ noshot/data/ML TS XAI/ML/ML 1/6. Naive Bayes Classifier (Wine Dataset).ipynb,sha256=Ile_WuRAt8Is1HbKdDXu-ogHvQRNBGyxpd8OWauEEek,2058
20
+ noshot/data/ML TS XAI/ML/ML 1/7. SVM (Rice Dataset).ipynb,sha256=zJ4GGRSwNY73DQCEeAP8ladl6H_WB54B1C_nSyKb9q8,3762
21
+ noshot/data/ML TS XAI/ML/ML 1/8. FeedForward NN (Sobar72 Dataset).ipynb,sha256=JaXAnYDa1AViE2WErFX8QzExbNyGvDYTsf3Vdlie8rs,7122
22
+ noshot/data/ML TS XAI/ML/ML 1/9. CNN (Cifar10 Dataset).ipynb,sha256=Jt_x0JTXNM1KqbYQ8afLtj0qIHysN63UUzFnmZfCE3c,3996
23
+ noshot/data/ML TS XAI/ML/ML 2/1. PCA.ipynb,sha256=QiJKjyYDWetwngiOwTi4fzuDIorkNLilAFV47V56kO4,3907
24
+ noshot/data/ML TS XAI/ML/ML 2/10. CNN.ipynb,sha256=zraQfH-LW-CYMMawfVX--jaejlcTB2SE92wscb_eb50,3329
25
+ noshot/data/ML TS XAI/ML/ML 2/11. HMM.ipynb,sha256=RvE_6vM5OWlFKVvGG9-K9sQfz9AtC_fRP5lgRgQrndo,11203
26
+ noshot/data/ML TS XAI/ML/ML 2/2. KNN.ipynb,sha256=CP1tuMZoL6MyMIZXn7PL_Epof_0l5EWhKz6ySg3u_W4,4049
27
+ noshot/data/ML TS XAI/ML/ML 2/3. LDA.ipynb,sha256=-VyjQ_i6r-1KaGagT3Aoq8UQ_1xYxcDPhmORxuu5eBg,3183
28
+ noshot/data/ML TS XAI/ML/ML 2/4. Linear Regression.ipynb,sha256=e6qdlsdkQn-2D8s55C5ekZrd8oClxIglwsJoyW624GQ,2630
29
+ noshot/data/ML TS XAI/ML/ML 2/5. Logistic Regression.ipynb,sha256=yC-rMnCgSjKyY7iVeuoIVlXq6ge8xYLKUijL2gAMuMo,3074
30
+ noshot/data/ML TS XAI/ML/ML 2/6. Naive Bayes (Titanic).ipynb,sha256=EItNyvs2EHMY42SBEHlKxJ8_y6Oi4qlJOjsEMcOGCWg,4572
31
+ noshot/data/ML TS XAI/ML/ML 2/6. Naive Bayes (Wine).ipynb,sha256=iW3yRzgGRgkhG-VkIGNU5LJuk-ef4ZlxmPx4Vl_PCSQ,2278
32
+ noshot/data/ML TS XAI/ML/ML 2/7. SVM Linear.ipynb,sha256=ZUd1r_W94BdAOMhpXfL6gCylrAgU7E2NOI3xkW4vnHM,3526
33
+ noshot/data/ML TS XAI/ML/ML 2/8. SVM Non-Linear.ipynb,sha256=E4psLvzD8XzKGTFyd2759CRjhUa-7WO8Ow577nDLIWo,6351
34
+ noshot/data/ML TS XAI/ML/ML 2/9. FNN With Regularization.ipynb,sha256=SKdyms9nCdr3e0O3Os6Om3kFz9ebahv0OueqhJ4Psc4,6980
35
+ noshot/data/ML TS XAI/ML/ML 2/9. FNN Without Regularization.ipynb,sha256=ZsdOcoPzaXM8bQV2ct5uOjRj6wF9Km0cc9iR1zRdXXQ,7520
36
+ noshot/data/ML TS XAI/ML/ML 2/All in One Lab CIA 1 Q.ipynb,sha256=wJLu6e0vgrXxH_J1pVM8wB6Wg-o3lPcuzZ45hId1g2o,27364
37
+ noshot/data/ML TS XAI/ML/ML 3 (Latest)/1. PCA EDA.ipynb,sha256=CnC8HU8x8GQOc6O_bA5YTtfKRJbH_J_agZTbonLwno4,6060
38
+ noshot/data/ML TS XAI/ML/ML 3 (Latest)/10. CNN.ipynb,sha256=OsPCe4ZLzed96tBpQ1H0KPtROT462pGHTbpecmT13n4,5282
39
+ noshot/data/ML TS XAI/ML/ML 3 (Latest)/11. HMM 2.ipynb,sha256=QtVv-mPZ3bG_AqnnzSSMXXHPHm94N_M5zZJAVQx_cEg,46615
40
+ noshot/data/ML TS XAI/ML/ML 3 (Latest)/11. HMM 3.ipynb,sha256=kl7Vcq3x7joA0I_n0MRTwDd2y5nQPfu8Oc729xQwRLA,5733
41
+ noshot/data/ML TS XAI/ML/ML 3 (Latest)/11. HMM 4.ipynb,sha256=ducKJvoZXPbQ8KMlU8Vl_VtkkziW0Evc0wJs2YGPKKA,5495
42
+ noshot/data/ML TS XAI/ML/ML 3 (Latest)/11. HMM.ipynb,sha256=GNdyfNAvkBtDQqSNfWD9VMcwIpcpE4LWhi-4KjES8jQ,2768
43
+ noshot/data/ML TS XAI/ML/ML 3 (Latest)/2. KNN.ipynb,sha256=-7Qa-bGgYZJtuZclr00_TkmVcH7zgMxKsjNN9PQTGko,5284
44
+ noshot/data/ML TS XAI/ML/ML 3 (Latest)/3. LDA.ipynb,sha256=5PZVV-mWca3CtSSRGavzp-LNwFKWJHn-SBGY2pwsjcg,4846
45
+ noshot/data/ML TS XAI/ML/ML 3 (Latest)/4. Linear Regression.ipynb,sha256=pSQVwAmyP4z3g_xcgk_EjsX21Qk1Rnyv-K8MSZlbTE8,5691
46
+ noshot/data/ML TS XAI/ML/ML 3 (Latest)/5. Logistic Regression.ipynb,sha256=fHtNkZjbbAra8an4hLcSX92KuRt9pbma4GlPBH26OcY,3210
47
+ noshot/data/ML TS XAI/ML/ML 3 (Latest)/6. Bayesian Classifier.ipynb,sha256=91UbAZ41vK6q_K08IeXVZRDpDmQ3Xz8ZKVq0os0Eo0Y,2711
48
+ noshot/data/ML TS XAI/ML/ML 3 (Latest)/7. SVM.ipynb,sha256=QjQ2aT8HF8mg8bGDzLExJWimtKBfvcRKOhwoQts5bHw,4850
49
+ noshot/data/ML TS XAI/ML/ML 3 (Latest)/8. MLP.ipynb,sha256=OX9i_Pk-j-vswnwjAHxGfCtdvn5wcv4WrkXy6gLF6-c,3154
50
+ noshot/data/ML TS XAI/ML/ML 3 (Latest)/9. Ridge - Lasso.ipynb,sha256=wzI5UoSCQvExOhWRpKzhEl21s-rbe7R3oE0AeIbN7fk,8056
51
+ noshot/data/ML TS XAI/ML/ML 3 (Latest)/9. Ridge Lasso 2.ipynb,sha256=rDY1cJzA1MXQWM7fA-T72c5RR68KZTzbSdYYiX-J-yU,2813
52
+ noshot/data/ML TS XAI/ML/ML 3 (Latest)/Image Load Example.ipynb,sha256=4vCJo_ODnUrqz8WUrk-Dtvt0BIWz6gfGbc43LASV62o,4806
53
+ noshot/data/ML TS XAI/ML/ML 3 (Latest)/Updated_Untitled.ipynb,sha256=hc__yVZbRoSVy9sur02kfTCNE_TenLdHjYxG4iosT5U,22230
54
+ noshot/utils/__init__.py,sha256=QVrN1ZpzPXxZqDOqot5-t_ulFjZXVx7Cvr-Is9AK0po,110
55
+ noshot/utils/shell_utils.py,sha256=-XfgYlNQlULa_rRJ3vsfTns4m_jiueGEj396J_y0Gus,2611
56
+ noshot-8.0.0.dist-info/licenses/LICENSE.txt,sha256=fgCruaVm5cUjFGOeEoGIimT6nnUunBqcNZHpGzK8TSw,1086
57
+ noshot-8.0.0.dist-info/METADATA,sha256=ita4VgzBMbnF-39frifuXCXpM35yGt-c4Kgw0MV7gi8,2573
58
+ noshot-8.0.0.dist-info/WHEEL,sha256=ck4Vq1_RXyvS4Jt6SI0Vz6fyVs4GWg7AINwpsaGEgPE,91
59
+ noshot-8.0.0.dist-info/top_level.txt,sha256=UL-c0HffdRwohz-y9icY_rnY48pQDdxGcBsgyCKh2Q8,7
60
+ noshot-8.0.0.dist-info/RECORD,,