noshot 10.0.0__py3-none-any.whl → 12.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. noshot/data/DLE FSD BDA/DLE/1. DNN (Image Classification).ipynb +389 -0
  2. noshot/data/DLE FSD BDA/DLE/2. DNN vs CNN.ipynb +516 -0
  3. noshot/data/DLE FSD BDA/DLE/3. CNN (Object Detecrion).ipynb +259 -0
  4. noshot/data/DLE FSD BDA/DLE/4. FCN (Image Segmentaion).ipynb +274 -0
  5. noshot/main.py +3 -3
  6. {noshot-10.0.0.dist-info → noshot-12.0.0.dist-info}/METADATA +1 -1
  7. noshot-12.0.0.dist-info/RECORD +13 -0
  8. {noshot-10.0.0.dist-info → noshot-12.0.0.dist-info}/WHEEL +1 -1
  9. noshot/data/ML TS XAI/ML/CNN(Image_for_Folders_5).ipynb +0 -201
  10. noshot/data/ML TS XAI/ML/CNN(Image_form_Folder_2).ipynb +0 -201
  11. noshot/data/ML TS XAI/ML/Json Codes/ML LAB CIA 2.ipynb +0 -409
  12. noshot/data/ML TS XAI/ML/ML 1/1. EDA-PCA (Balance Scale Dataset).ipynb +0 -147
  13. noshot/data/ML TS XAI/ML/ML 1/1. EDA-PCA (Rice Dataset).ipynb +0 -181
  14. noshot/data/ML TS XAI/ML/ML 1/10. HMM Veterbi.ipynb +0 -152
  15. noshot/data/ML TS XAI/ML/ML 1/2. KNN (Balance Scale Dataset).ipynb +0 -117
  16. noshot/data/ML TS XAI/ML/ML 1/2. KNN (Iris Dataset).ipynb +0 -156
  17. noshot/data/ML TS XAI/ML/ML 1/2. KNN (Sobar-72 Dataset).ipynb +0 -215
  18. noshot/data/ML TS XAI/ML/ML 1/3. LDA (Balance Scale Dataset).ipynb +0 -78
  19. noshot/data/ML TS XAI/ML/ML 1/3. LDA (NPHA Doctor Visits Dataset).ipynb +0 -114
  20. noshot/data/ML TS XAI/ML/ML 1/4. Linear Regression (Machine Dataset).ipynb +0 -115
  21. noshot/data/ML TS XAI/ML/ML 1/4. Linear Regression (Real Estate Dataset).ipynb +0 -146
  22. noshot/data/ML TS XAI/ML/ML 1/5. Logistic Regression (Magic04 Dataset).ipynb +0 -130
  23. noshot/data/ML TS XAI/ML/ML 1/5. Logistic Regression (Wine Dataset).ipynb +0 -112
  24. noshot/data/ML TS XAI/ML/ML 1/6. Naive Bayes Classifier (Agaricus Lepiota Dataset).ipynb +0 -118
  25. noshot/data/ML TS XAI/ML/ML 1/6. Naive Bayes Classifier (Wine Dataset).ipynb +0 -89
  26. noshot/data/ML TS XAI/ML/ML 1/7. SVM (Rice Dataset).ipynb +0 -120
  27. noshot/data/ML TS XAI/ML/ML 1/8. FeedForward NN (Sobar72 Dataset).ipynb +0 -262
  28. noshot/data/ML TS XAI/ML/ML 1/9. CNN (Cifar10 Dataset).ipynb +0 -156
  29. noshot/data/ML TS XAI/ML/ML 2/1. PCA.ipynb +0 -162
  30. noshot/data/ML TS XAI/ML/ML 2/10. CNN.ipynb +0 -100
  31. noshot/data/ML TS XAI/ML/ML 2/11. HMM.ipynb +0 -336
  32. noshot/data/ML TS XAI/ML/ML 2/2. KNN.ipynb +0 -149
  33. noshot/data/ML TS XAI/ML/ML 2/3. LDA.ipynb +0 -132
  34. noshot/data/ML TS XAI/ML/ML 2/4. Linear Regression.ipynb +0 -86
  35. noshot/data/ML TS XAI/ML/ML 2/5. Logistic Regression.ipynb +0 -115
  36. noshot/data/ML TS XAI/ML/ML 2/6. Naive Bayes (Titanic).ipynb +0 -196
  37. noshot/data/ML TS XAI/ML/ML 2/6. Naive Bayes (Wine).ipynb +0 -98
  38. noshot/data/ML TS XAI/ML/ML 2/7. SVM Linear.ipynb +0 -109
  39. noshot/data/ML TS XAI/ML/ML 2/8. SVM Non-Linear.ipynb +0 -195
  40. noshot/data/ML TS XAI/ML/ML 2/9. FNN With Regularization.ipynb +0 -189
  41. noshot/data/ML TS XAI/ML/ML 2/9. FNN Without Regularization.ipynb +0 -197
  42. noshot/data/ML TS XAI/ML/ML 2/All in One Lab CIA 1 Q.ipynb +0 -1087
  43. noshot/data/ML TS XAI/ML/ML 3 (Latest)/1. PCA EDA.ipynb +0 -274
  44. noshot/data/ML TS XAI/ML/ML 3 (Latest)/10. CNN.ipynb +0 -170
  45. noshot/data/ML TS XAI/ML/ML 3 (Latest)/11. HMM 2.ipynb +0 -1087
  46. noshot/data/ML TS XAI/ML/ML 3 (Latest)/11. HMM 3.ipynb +0 -178
  47. noshot/data/ML TS XAI/ML/ML 3 (Latest)/11. HMM 4.ipynb +0 -185
  48. noshot/data/ML TS XAI/ML/ML 3 (Latest)/11. HMM.ipynb +0 -106
  49. noshot/data/ML TS XAI/ML/ML 3 (Latest)/2. KNN.ipynb +0 -177
  50. noshot/data/ML TS XAI/ML/ML 3 (Latest)/3. LDA.ipynb +0 -195
  51. noshot/data/ML TS XAI/ML/ML 3 (Latest)/4. Linear Regression.ipynb +0 -267
  52. noshot/data/ML TS XAI/ML/ML 3 (Latest)/5. Logistic Regression.ipynb +0 -104
  53. noshot/data/ML TS XAI/ML/ML 3 (Latest)/6. Bayesian Classifier.ipynb +0 -109
  54. noshot/data/ML TS XAI/ML/ML 3 (Latest)/7. SVM.ipynb +0 -220
  55. noshot/data/ML TS XAI/ML/ML 3 (Latest)/8. MLP.ipynb +0 -99
  56. noshot/data/ML TS XAI/ML/ML 3 (Latest)/9. Ridge - Lasso.ipynb +0 -211
  57. noshot/data/ML TS XAI/ML/ML 3 (Latest)/9. Ridge Lasso 2.ipynb +0 -99
  58. noshot/data/ML TS XAI/ML/ML 3 (Latest)/Image Load Example.ipynb +0 -118
  59. noshot/data/ML TS XAI/ML/ML 3 (Latest)/Updated_Untitled.ipynb +0 -603
  60. noshot/data/ML TS XAI/ML/ML Lab AllinOne.ipynb +0 -961
  61. noshot/data/ML TS XAI/ML/ML Lab H Sec/1. Iris Dataset (Softmax vs Sigmoid).ipynb +0 -231
  62. noshot/data/ML TS XAI/ML/ML Lab H Sec/2. Student Dataset (Overfit vs Regularized).ipynb +0 -269
  63. noshot/data/ML TS XAI/ML/ML Lab H Sec/3. Insurance Target Categorical (Overfit vs Regularized).ipynb +0 -274
  64. noshot/data/ML TS XAI/ML/ML Lab H Sec/3. Insurance Target Numerical (Overfit vs Regularized).ipynb +0 -263
  65. noshot/data/ML TS XAI/ML/ML Lab H Sec/4. Smart House System HMM.ipynb +0 -198
  66. noshot/data/ML TS XAI/ML/ML Lab H Sec/5. Fraud Detection System HMM.ipynb +0 -201
  67. noshot/data/ML TS XAI/ML/ML Lab H Sec/insurance.csv +0 -1339
  68. noshot/data/ML TS XAI/ML/ML Lab H Sec/iris1.data +0 -151
  69. noshot/data/ML TS XAI/ML/ML Lab H Sec/student-mat.csv +0 -396
  70. noshot/data/ML TS XAI/ML/ML Lab H Sec/student-por.csv +0 -650
  71. noshot/data/ML TS XAI/ML/Rolls Royce AllinOne.ipynb +0 -691
  72. noshot-10.0.0.dist-info/RECORD +0 -72
  73. {noshot-10.0.0.dist-info → noshot-12.0.0.dist-info}/licenses/LICENSE.txt +0 -0
  74. {noshot-10.0.0.dist-info → noshot-12.0.0.dist-info}/top_level.txt +0 -0
@@ -1,220 +0,0 @@
1
- {
2
- "cells": [
3
- {
4
- "cell_type": "code",
5
- "execution_count": null,
6
- "id": "a2ef0ba6",
7
- "metadata": {},
8
- "outputs": [],
9
- "source": [
10
- "import pandas as pd\n",
11
- "import numpy as np\n",
12
- "import matplotlib.pyplot as plt\n",
13
- "from sklearn.model_selection import train_test_split\n",
14
- "from sklearn.preprocessing import StandardScaler\n",
15
- "from sklearn import svm\n",
16
- "import seaborn as sns\n",
17
- "from sklearn.metrics import confusion_matrix,classification_report"
18
- ]
19
- },
20
- {
21
- "cell_type": "code",
22
- "execution_count": null,
23
- "id": "00a3a522",
24
- "metadata": {},
25
- "outputs": [],
26
- "source": [
27
- "df = pd.read_csv(r'heart.csv')"
28
- ]
29
- },
30
- {
31
- "cell_type": "code",
32
- "execution_count": null,
33
- "id": "585147d0",
34
- "metadata": {},
35
- "outputs": [],
36
- "source": [
37
- "df"
38
- ]
39
- },
40
- {
41
- "cell_type": "code",
42
- "execution_count": null,
43
- "id": "37e34b42",
44
- "metadata": {},
45
- "outputs": [],
46
- "source": [
47
- "plt.figure(figsize=(12,12))\n",
48
- "sns.heatmap(df.corr(),annot = True)"
49
- ]
50
- },
51
- {
52
- "cell_type": "code",
53
- "execution_count": null,
54
- "id": "3cfbbed7",
55
- "metadata": {},
56
- "outputs": [],
57
- "source": [
58
- "X = pd.DataFrame(df, columns = ['cp','oldpeak']) #pd.DataFrame(df, columns = ['cp','thalach','exang','oldpeak'])\n",
59
- "X\n",
60
- "feature_names = X.columns"
61
- ]
62
- },
63
- {
64
- "cell_type": "code",
65
- "execution_count": null,
66
- "id": "376a937f",
67
- "metadata": {},
68
- "outputs": [],
69
- "source": [
70
- "y = pd.DataFrame(df, columns = ['target'])\n",
71
- "y"
72
- ]
73
- },
74
- {
75
- "cell_type": "code",
76
- "execution_count": null,
77
- "id": "e36bd8e4",
78
- "metadata": {},
79
- "outputs": [],
80
- "source": [
81
- "scaler = StandardScaler()"
82
- ]
83
- },
84
- {
85
- "cell_type": "code",
86
- "execution_count": null,
87
- "id": "d44d0fc8",
88
- "metadata": {},
89
- "outputs": [],
90
- "source": [
91
- "X = scaler.fit_transform(X)"
92
- ]
93
- },
94
- {
95
- "cell_type": "code",
96
- "execution_count": null,
97
- "id": "f4134da1",
98
- "metadata": {},
99
- "outputs": [],
100
- "source": [
101
- "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)"
102
- ]
103
- },
104
- {
105
- "cell_type": "code",
106
- "execution_count": null,
107
- "id": "a51186d3",
108
- "metadata": {},
109
- "outputs": [],
110
- "source": [
111
- "model = svm.SVC(kernel='linear', C=0.01)"
112
- ]
113
- },
114
- {
115
- "cell_type": "code",
116
- "execution_count": null,
117
- "id": "ea6edc2d",
118
- "metadata": {},
119
- "outputs": [],
120
- "source": [
121
- "model.fit(X_train, y_train)"
122
- ]
123
- },
124
- {
125
- "cell_type": "code",
126
- "execution_count": null,
127
- "id": "19476831",
128
- "metadata": {},
129
- "outputs": [],
130
- "source": [
131
- "y_train_pred = model.predict(X_train)\n",
132
- "y_test_pred = model.predict(X_test)"
133
- ]
134
- },
135
- {
136
- "cell_type": "code",
137
- "execution_count": null,
138
- "id": "2a02f42a",
139
- "metadata": {},
140
- "outputs": [],
141
- "source": [
142
- "y_test_pred"
143
- ]
144
- },
145
- {
146
- "cell_type": "code",
147
- "execution_count": null,
148
- "id": "f73bfed0",
149
- "metadata": {},
150
- "outputs": [],
151
- "source": [
152
- "x_min, x_max = X_train[:, 0].min() - 1, X_train[:, 0].max() + 1\n",
153
- "y_min, y_max = X_train[:, 1].min() - 1, X_train[:, 1].max() + 1\n",
154
- "xx, yy = np.meshgrid(np.arange(x_min, x_max, 0.02),\n",
155
- " np.arange(y_min, y_max, 0.02))\n",
156
- " \n",
157
- "# Predict the decision boundary\n",
158
- "Z = model.predict(np.c_[xx.ravel(), yy.ravel()])\n",
159
- "Z = Z.reshape(xx.shape)\n",
160
- "\n",
161
- "# Plot the decision boundary\n",
162
- "plt.contourf(xx, yy, Z, alpha=0.8)\n",
163
- "plt.scatter(X[:, 0], X[:, 1], c=np.array(y), edgecolors='k', marker='o')\n",
164
- "plt.title('SVM Decision Boundary')\n",
165
- "plt.xlabel('Feature 1')\n",
166
- "plt.ylabel('Feature 2')\n",
167
- "plt.show()"
168
- ]
169
- },
170
- {
171
- "cell_type": "code",
172
- "execution_count": null,
173
- "id": "19439ba8",
174
- "metadata": {},
175
- "outputs": [],
176
- "source": [
177
- "y_pred = model.predict(X)\n",
178
- "\n",
179
- "# Create the confusion matrix\n",
180
- "cm = confusion_matrix(y_test, y_test_pred)\n",
181
- "class_report = classification_report(y_test, y_test_pred)\n",
182
- "sns.heatmap(cm, annot=True, fmt='d', cmap='Blues')\n",
183
- "plt.xlabel('Predicted')\n",
184
- "plt.ylabel('True')\n",
185
- "plt.title('Confusion Matrix')\n",
186
- "plt.show()\n",
187
- "print(class_report)"
188
- ]
189
- },
190
- {
191
- "cell_type": "code",
192
- "execution_count": null,
193
- "id": "6625955c",
194
- "metadata": {},
195
- "outputs": [],
196
- "source": []
197
- }
198
- ],
199
- "metadata": {
200
- "kernelspec": {
201
- "display_name": "Python 3 (ipykernel)",
202
- "language": "python",
203
- "name": "python3"
204
- },
205
- "language_info": {
206
- "codemirror_mode": {
207
- "name": "ipython",
208
- "version": 3
209
- },
210
- "file_extension": ".py",
211
- "mimetype": "text/x-python",
212
- "name": "python",
213
- "nbconvert_exporter": "python",
214
- "pygments_lexer": "ipython3",
215
- "version": "3.12.4"
216
- }
217
- },
218
- "nbformat": 4,
219
- "nbformat_minor": 5
220
- }
@@ -1,99 +0,0 @@
1
- {
2
- "cells": [
3
- {
4
- "cell_type": "code",
5
- "execution_count": null,
6
- "id": "17a00ba2-2808-4921-9df4-79a2911bc670",
7
- "metadata": {},
8
- "outputs": [],
9
- "source": [
10
- "import pandas as pd\n",
11
- "import numpy as np\n",
12
- "import matplotlib.pyplot as plt\n",
13
- "from sklearn.model_selection import train_test_split\n",
14
- "from sklearn.metrics import classification_report, confusion_matrix\n",
15
- "from sklearn.neural_network import MLPClassifier\n",
16
- "\n",
17
- "# Upload and read dataset\n",
18
- "df = pd.read_csv('heart.csv')\n",
19
- "print(f\"Dataset shape: {df.shape}\")\n",
20
- "\n",
21
- "# Normalize predictors\n",
22
- "target_column = 'target'\n",
23
- "predictors = [col for col in df.columns if col != target_column]\n",
24
- "df[predictors] = df[predictors] / df[predictors].max()\n",
25
- "\n",
26
- "# Split data\n",
27
- "X = df[predictors].values\n",
28
- "y = df[target_column].values\n",
29
- "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=40)\n",
30
- "print(f\"Training shape: {X_train.shape} | Testing shape: {X_test.shape}\")\n",
31
- "\n",
32
- "# Function to train and evaluate MLPClassifier\n",
33
- "def train_and_evaluate(activation, hidden_layers=(8,8,8), max_iter=500):\n",
34
- " print(f\"\\nTraining with activation='{activation}' and hidden_layers={hidden_layers}\")\n",
35
- " mlp = MLPClassifier(hidden_layer_sizes=hidden_layers, activation=activation,\n",
36
- " solver='adam', max_iter=max_iter, random_state=42)\n",
37
- " mlp.fit(X_train, y_train)\n",
38
- "\n",
39
- " predict_train = mlp.predict(X_train)\n",
40
- " predict_test = mlp.predict(X_test)\n",
41
- "\n",
42
- " print(\"\\nTrain Results:\")\n",
43
- " print(confusion_matrix(y_train, predict_train))\n",
44
- " print(classification_report(y_train, predict_train, zero_division=0))\n",
45
- "\n",
46
- " print(\"\\nTest Results:\")\n",
47
- " print(confusion_matrix(y_test, predict_test))\n",
48
- " print(classification_report(y_test, predict_test, zero_division=0))\n",
49
- "\n",
50
- "\n",
51
- "# Try different activation functions\n",
52
- "for activation in ['relu', 'identity', 'tanh', 'logistic']:\n",
53
- " train_and_evaluate(activation)\n",
54
- "\n",
55
- "# Try different hidden layer configuration\n",
56
- "train_and_evaluate('relu', hidden_layers=(10,10,10))\n"
57
- ]
58
- },
59
- {
60
- "cell_type": "code",
61
- "execution_count": null,
62
- "id": "171896d2-bf99-4d87-9cde-212df4c49cc1",
63
- "metadata": {},
64
- "outputs": [],
65
- "source": [
66
- "X_train"
67
- ]
68
- },
69
- {
70
- "cell_type": "code",
71
- "execution_count": null,
72
- "id": "59dd64e2-cca8-4b35-93d8-dbe7b30da280",
73
- "metadata": {},
74
- "outputs": [],
75
- "source": []
76
- }
77
- ],
78
- "metadata": {
79
- "kernelspec": {
80
- "display_name": "Python 3 (ipykernel)",
81
- "language": "python",
82
- "name": "python3"
83
- },
84
- "language_info": {
85
- "codemirror_mode": {
86
- "name": "ipython",
87
- "version": 3
88
- },
89
- "file_extension": ".py",
90
- "mimetype": "text/x-python",
91
- "name": "python",
92
- "nbconvert_exporter": "python",
93
- "pygments_lexer": "ipython3",
94
- "version": "3.12.4"
95
- }
96
- },
97
- "nbformat": 4,
98
- "nbformat_minor": 5
99
- }
@@ -1,211 +0,0 @@
1
- {
2
- "cells": [
3
- {
4
- "cell_type": "code",
5
- "execution_count": null,
6
- "id": "80023338-3fbf-47f2-adf2-0692417a70fe",
7
- "metadata": {},
8
- "outputs": [],
9
- "source": [
10
- "import numpy as np\n",
11
- "import pandas as pd\n",
12
- "import matplotlib.pyplot as plt\n",
13
- "from sklearn.linear_model import Ridge, Lasso, LinearRegression, SGDRegressor\n",
14
- "from sklearn.pipeline import make_pipeline\n",
15
- "from sklearn.preprocessing import PolynomialFeatures, StandardScaler\n",
16
- "\n",
17
- "# --------------------------\n",
18
- "# Data Preparation\n",
19
- "# --------------------------\n",
20
- "np.random.seed(42)\n",
21
- "m = 20\n",
22
- "x = 3 * np.random.rand(m, 1)\n",
23
- "y = 1 + 0.5 * x + np.random.randn(m, 1) / 1.5\n",
24
- "x_new = np.linspace(0, 3, 100).reshape(100, 1)\n",
25
- "\n",
26
- "# --------------------------\n",
27
- "# Helper Functions\n",
28
- "# --------------------------\n",
29
- "def plot_data(x, y):\n",
30
- " plt.figure(figsize=(6, 4))\n",
31
- " plt.plot(x, y, \"b.\")\n",
32
- " plt.xlabel(\"x1\")\n",
33
- " plt.ylabel(\"y\")\n",
34
- " plt.axis([0, 3, 0, 3.5])\n",
35
- " plt.grid()\n",
36
- " plt.show()\n",
37
- "\n",
38
- "def plot_model(model_class, polynomial, alphas, **model_kwargs):\n",
39
- " plt.plot(x, y, \"b.\", linewidth=3)\n",
40
- " for alpha, style in zip(alphas, (\"b:\", \"g--\", \"r-\")):\n",
41
- " model = model_class(alpha, **model_kwargs) if alpha > 0 else LinearRegression()\n",
42
- " if polynomial:\n",
43
- " model = make_pipeline(\n",
44
- " PolynomialFeatures(degree=10, include_bias=False),\n",
45
- " StandardScaler(),\n",
46
- " model\n",
47
- " )\n",
48
- " model.fit(x, y)\n",
49
- " y_pred = model.predict(x_new)\n",
50
- " plt.plot(x_new, y_pred, style, linewidth=2, label=fr\"$\\alpha = {alpha}$\")\n",
51
- " plt.legend(loc=\"upper left\")\n",
52
- " plt.xlabel(\"$x_1$\")\n",
53
- " plt.axis([0, 3, 0, 3.5])\n",
54
- " plt.grid()\n",
55
- "\n",
56
- "# --------------------------\n",
57
- "# Initial Data Plot\n",
58
- "# --------------------------\n",
59
- "plot_data(x, y)\n",
60
- "\n",
61
- "# --------------------------\n",
62
- "# Ridge Regression\n",
63
- "# --------------------------\n",
64
- "ridge = Ridge(alpha=0.1, solver=\"cholesky\")\n",
65
- "ridge.fit(x, y)\n",
66
- "print(\"Ridge prediction at x=1.5:\", ridge.predict([[1.5]]))\n",
67
- "\n",
68
- "plt.figure(figsize=(9, 3.5))\n",
69
- "plt.subplot(121)\n",
70
- "plot_model(Ridge, polynomial=False, alphas=(0, 10, 100), random_state=42)\n",
71
- "plt.ylabel(\"$y$\", rotation=0)\n",
72
- "plt.subplot(122)\n",
73
- "plot_model(Ridge, polynomial=True, alphas=(0, 1e-5, 1), random_state=42)\n",
74
- "plt.gca().axes.yaxis.set_ticklabels([])\n",
75
- "plt.show()\n",
76
- "\n",
77
- "# --------------------------\n",
78
- "# Stochastic Gradient Descent Ridge\n",
79
- "# --------------------------\n",
80
- "sgd_reg = SGDRegressor(penalty=\"l2\", alpha=0.1/m, tol=None,\n",
81
- " max_iter=1000, eta0=0.01, random_state=42)\n",
82
- "sgd_reg.fit(x, y.ravel())\n",
83
- "print(\"SGD prediction at x=1.5:\", sgd_reg.predict([[1.5]]))\n",
84
- "\n",
85
- "# Ridge with SAG solver\n",
86
- "ridge_reg = Ridge(alpha=0.1, solver=\"sag\", random_state=42)\n",
87
- "ridge_reg.fit(x, y)\n",
88
- "print(\"Ridge (sag) prediction at x=1.5:\", ridge_reg.predict([[1.5]]))\n",
89
- "\n",
90
- "# Closed form Ridge solution\n",
91
- "alpha = 0.1\n",
92
- "A = np.array([[0., 0.], [0., 1.]])\n",
93
- "X_b = np.c_[np.ones((m, 1)), x]\n",
94
- "theta_ridge = np.linalg.inv(X_b.T @ X_b + alpha * A) @ X_b.T @ y\n",
95
- "print(\"Closed-form Ridge solution:\\n\", theta_ridge)\n",
96
- "print(\"Model intercept and coefficients:\", ridge_reg.intercept_, ridge_reg.coef_)\n",
97
- "\n",
98
- "# --------------------------\n",
99
- "# Lasso Regression\n",
100
- "# --------------------------\n",
101
- "lasso_reg = Lasso(alpha=0.1)\n",
102
- "lasso_reg.fit(x, y)\n",
103
- "print(\"Lasso prediction at x=1.5:\", lasso_reg.predict([[1.5]]))\n",
104
- "\n",
105
- "plt.figure(figsize=(9, 3.5))\n",
106
- "plt.subplot(121)\n",
107
- "plot_model(Lasso, polynomial=False, alphas=(0, 0.1, 1), random_state=42)\n",
108
- "plt.ylabel(\"$y$\", rotation=0)\n",
109
- "plt.subplot(122)\n",
110
- "plot_model(Lasso, polynomial=True, alphas=(0, 1e-2, 1), random_state=42)\n",
111
- "plt.gca().axes.yaxis.set_ticklabels([])\n",
112
- "plt.show()\n",
113
- "\n",
114
- "# --------------------------\n",
115
- "# Contour Plot: L1 (Lasso) vs L2 (Ridge)\n",
116
- "# --------------------------\n",
117
- "def bgd_path(theta, X, y, l1, l2, core=1, eta=0.05, n_iterations=200):\n",
118
- " path = [theta]\n",
119
- " for _ in range(n_iterations):\n",
120
- " gradients = (core * 2 / len(X) * X.T @ (X @ theta - y)\n",
121
- " + l1 * np.sign(theta) + l2 * theta)\n",
122
- " theta = theta - eta * gradients\n",
123
- " path.append(theta)\n",
124
- " return np.array(path)\n",
125
- "\n",
126
- "# Prepare data for contour plots\n",
127
- "t1s = np.linspace(-1, 3, 500)\n",
128
- "t2s = np.linspace(-1.5, 1.5, 500)\n",
129
- "t1, t2 = np.meshgrid(t1s, t2s)\n",
130
- "T = np.c_[t1.ravel(), t2.ravel()]\n",
131
- "Xr = np.array([[1, 1], [1, -1], [1, 0.5]])\n",
132
- "yr = 2 * Xr[:, :1] + 0.5 * Xr[:, 1:]\n",
133
- "J = (1 / len(Xr) * ((T @ Xr.T - yr.T) ** 2).sum(axis=1)).reshape(t1.shape)\n",
134
- "N1 = np.linalg.norm(T, ord=1, axis=1).reshape(t1.shape)\n",
135
- "N2 = np.linalg.norm(T, ord=2, axis=1).reshape(t1.shape)\n",
136
- "\n",
137
- "# Plot contours\n",
138
- "fig, axes = plt.subplots(2, 2, sharex=True, sharey=True, figsize=(10.1, 8))\n",
139
- "titles = [\"Lasso\", \"Ridge\"]\n",
140
- "\n",
141
- "for i, (N, l1, l2) in enumerate([(N1, 2.0, 0), (N2, 0, 2.0)]):\n",
142
- " JR = J + l1 * N1 + l2 * 0.5 * N2**2\n",
143
- " t_min_idx = np.unravel_index(J.argmin(), J.shape)\n",
144
- " t1_min, t2_min = t1[t_min_idx], t2[t_min_idx]\n",
145
- " tr_min_idx = np.unravel_index(JR.argmin(), JR.shape)\n",
146
- " t1r_min, t2r_min = t1[tr_min_idx], t2[tr_min_idx]\n",
147
- "\n",
148
- " levelsJ = np.exp(np.linspace(0, 1, 20)) - 1\n",
149
- " levelsJR = levelsJ * (JR.max() - JR.min()) + JR.min()\n",
150
- " levelsN = np.linspace(0, N.max(), 10)\n",
151
- "\n",
152
- " path_J = bgd_path(np.array([[0.25], [-1]]), Xr, yr, l1=0, l2=0)\n",
153
- " path_JR = bgd_path(np.array([[0.25], [-1]]), Xr, yr, l1, l2)\n",
154
- " path_N = bgd_path(np.array([[2.0], [0.5]]), Xr, yr, l1=np.sign(l1)/3, l2=np.sign(l2), core=0)\n",
155
- "\n",
156
- " ax = axes[i, 0]\n",
157
- " ax.contourf(t1, t2, N/2, levels=levelsN)\n",
158
- " ax.plot(path_N[:, 0], path_N[:, 1], \"y--\")\n",
159
- " ax.plot(0, 0, \"ys\")\n",
160
- " ax.plot(t1_min, t2_min, \"ys\")\n",
161
- " ax.set_title(fr\"$\\ell_{i + 1}$ penalty\")\n",
162
- " ax.grid()\n",
163
- " if i == 1:\n",
164
- " ax.set_xlabel(r\"$\\theta_1$\")\n",
165
- " ax.set_ylabel(r\"$\\theta_2$\", rotation=0)\n",
166
- "\n",
167
- " ax = axes[i, 1]\n",
168
- " ax.contourf(t1, t2, JR, levels=levelsJR, alpha=0.9)\n",
169
- " ax.plot(path_JR[:, 0], path_JR[:, 1], \"w-o\")\n",
170
- " ax.plot(path_N[:, 0], path_N[:, 1], \"y--\")\n",
171
- " ax.plot(0, 0, \"ys\")\n",
172
- " ax.plot(t1_min, t2_min, \"ys\")\n",
173
- " ax.plot(t1r_min, t2r_min, \"rs\")\n",
174
- " ax.set_title(titles[i])\n",
175
- " ax.grid()\n",
176
- " if i == 1:\n",
177
- " ax.set_xlabel(r\"$\\theta_1$\")\n",
178
- "plt.show()\n"
179
- ]
180
- },
181
- {
182
- "cell_type": "code",
183
- "execution_count": null,
184
- "id": "6b3caebd-8202-4f65-8b66-0a6e09d81b3a",
185
- "metadata": {},
186
- "outputs": [],
187
- "source": []
188
- }
189
- ],
190
- "metadata": {
191
- "kernelspec": {
192
- "display_name": "Python 3 (ipykernel)",
193
- "language": "python",
194
- "name": "python3"
195
- },
196
- "language_info": {
197
- "codemirror_mode": {
198
- "name": "ipython",
199
- "version": 3
200
- },
201
- "file_extension": ".py",
202
- "mimetype": "text/x-python",
203
- "name": "python",
204
- "nbconvert_exporter": "python",
205
- "pygments_lexer": "ipython3",
206
- "version": "3.12.4"
207
- }
208
- },
209
- "nbformat": 4,
210
- "nbformat_minor": 5
211
- }
@@ -1,99 +0,0 @@
1
- {
2
- "cells": [
3
- {
4
- "cell_type": "code",
5
- "execution_count": null,
6
- "id": "d239aa84-e577-42a1-98d3-127530ffb38e",
7
- "metadata": {},
8
- "outputs": [],
9
- "source": [
10
- "import numpy as np\n",
11
- "import matplotlib.pyplot as plt\n",
12
- "from sklearn.linear_model import Ridge, Lasso\n",
13
- "from sklearn.datasets import make_regression\n",
14
- "\n",
15
- "# Create a sample dataset\n",
16
- "X, y = make_regression(n_samples=100, n_features=20, noise=0.1, random_state=42)\n",
17
- "\n",
18
- "# Range of alpha values (regularization strength)\n",
19
- "alphas = np.logspace(-3, 3, 100)\n",
20
- "\n",
21
- "ridge_coefs = []\n",
22
- "lasso_coefs = []\n",
23
- "\n",
24
- "for alpha in alphas:\n",
25
- " # Ridge\n",
26
- " ridge = Ridge(alpha=alpha)\n",
27
- " ridge.fit(X, y)\n",
28
- " ridge_coefs.append(ridge.coef_)\n",
29
- " \n",
30
- " # Lasso\n",
31
- " lasso = Lasso(alpha=alpha, max_iter=10000)\n",
32
- " lasso.fit(X, y)\n",
33
- " lasso_coefs.append(lasso.coef_)\n",
34
- "\n",
35
- "ridge_coefs = np.array(ridge_coefs)\n",
36
- "lasso_coefs = np.array(lasso_coefs)\n",
37
- "\n",
38
- "# ----------------------\n",
39
- "# Ridge plot\n",
40
- "# ----------------------\n",
41
- "plt.figure(figsize=(10, 6))\n",
42
- "for i in range(X.shape[1]):\n",
43
- " plt.plot(alphas, ridge_coefs[:, i], label=f'Feature {i}')\n",
44
- "plt.xscale('log')\n",
45
- "plt.title('Ridge Coefficients vs Alpha')\n",
46
- "plt.xlabel('Alpha (log scale)')\n",
47
- "plt.ylabel('Coefficient value')\n",
48
- "plt.grid(True)\n",
49
- "plt.legend(bbox_to_anchor=(1.05, 1), loc='upper left', fontsize=8)\n",
50
- "plt.tight_layout()\n",
51
- "plt.show()\n",
52
- "\n",
53
- "# ----------------------\n",
54
- "# Lasso plot\n",
55
- "# ----------------------\n",
56
- "plt.figure(figsize=(10, 6))\n",
57
- "for i in range(X.shape[1]):\n",
58
- " plt.plot(alphas, lasso_coefs[:, i], label=f'Feature {i}')\n",
59
- "plt.xscale('log')\n",
60
- "plt.title('Lasso Coefficients vs Alpha')\n",
61
- "plt.xlabel('Alpha (log scale)')\n",
62
- "plt.ylabel('Coefficient value')\n",
63
- "plt.grid(True)\n",
64
- "plt.legend(bbox_to_anchor=(1.05, 1), loc='upper left', fontsize=8)\n",
65
- "plt.tight_layout()\n",
66
- "plt.show()\n"
67
- ]
68
- },
69
- {
70
- "cell_type": "code",
71
- "execution_count": null,
72
- "id": "afdde643-5984-485e-aa6d-508602cc8f51",
73
- "metadata": {},
74
- "outputs": [],
75
- "source": []
76
- }
77
- ],
78
- "metadata": {
79
- "kernelspec": {
80
- "display_name": "Python 3 (ipykernel)",
81
- "language": "python",
82
- "name": "python3"
83
- },
84
- "language_info": {
85
- "codemirror_mode": {
86
- "name": "ipython",
87
- "version": 3
88
- },
89
- "file_extension": ".py",
90
- "mimetype": "text/x-python",
91
- "name": "python",
92
- "nbconvert_exporter": "python",
93
- "pygments_lexer": "ipython3",
94
- "version": "3.12.4"
95
- }
96
- },
97
- "nbformat": 4,
98
- "nbformat_minor": 5
99
- }