noshot 0.2.4__py3-none-any.whl → 0.2.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. noshot/main.py +18 -18
  2. noshot/utils/__init__.py +2 -2
  3. noshot/utils/shell_utils.py +56 -56
  4. {noshot-0.2.4.dist-info → noshot-0.2.6.dist-info}/LICENSE.txt +20 -20
  5. {noshot-0.2.4.dist-info → noshot-0.2.6.dist-info}/METADATA +55 -55
  6. noshot-0.2.6.dist-info/RECORD +9 -0
  7. noshot/data/ML TS XAI/ML/1. PCA - EDA/PCA-EDA.ipynb +0 -207
  8. noshot/data/ML TS XAI/ML/1. PCA - EDA/balance-scale.csv +0 -626
  9. noshot/data/ML TS XAI/ML/1. PCA - EDA/input.txt +0 -625
  10. noshot/data/ML TS XAI/ML/2. KNN Classifier/KNN.ipynb +0 -287
  11. noshot/data/ML TS XAI/ML/2. KNN Classifier/balance-scale.csv +0 -626
  12. noshot/data/ML TS XAI/ML/2. KNN Classifier/input.txt +0 -625
  13. noshot/data/ML TS XAI/ML/3. Linear Discriminant Analysis/LDA.ipynb +0 -83
  14. noshot/data/ML TS XAI/ML/3. Linear Discriminant Analysis/balance-scale.csv +0 -626
  15. noshot/data/ML TS XAI/ML/3. Linear Discriminant Analysis/input.txt +0 -625
  16. noshot/data/ML TS XAI/ML/4. Linear Regression/Linear-Regression.ipynb +0 -117
  17. noshot/data/ML TS XAI/ML/4. Linear Regression/machine-data.csv +0 -210
  18. noshot/data/ML TS XAI/ML/5. Logistic Regression/Logistic-Regression.ipynb +0 -137
  19. noshot/data/ML TS XAI/ML/5. Logistic Regression/wine-dataset.csv +0 -179
  20. noshot/data/ML TS XAI/ML/6. Bayesian Classifier/Bayesian.ipynb +0 -87
  21. noshot/data/ML TS XAI/ML/6. Bayesian Classifier/wine-dataset.csv +0 -179
  22. noshot/data/ML TS XAI/TS/1. EDA - Handling Time Series Data/Handling TS Data.ipynb +0 -247
  23. noshot/data/ML TS XAI/TS/1. EDA - Handling Time Series Data/raw_sales.csv +0 -29581
  24. noshot/data/ML TS XAI/TS/2. Feature Engineering/Feature Engineering-.ipynb +0 -183
  25. noshot/data/ML TS XAI/TS/3. Temporal Relationships/Exploring Temporal Relationships.ipynb +0 -172
  26. noshot/data/ML TS XAI/TS/4. Up-Down-Sampling and Interploation/Up-Down-Sampling.ipynb +0 -146
  27. noshot/data/ML TS XAI/TS/4. Up-Down-Sampling and Interploation/shampoo_sales.csv +0 -37
  28. noshot/data/ML TS XAI/TS/5. Stationarity - Trend - Seasonality/Stationarity-Trend-Seasonality.ipynb +0 -173
  29. noshot/data/ML TS XAI/TS/5. Stationarity - Trend - Seasonality/daily-min-temperatures.csv +0 -3651
  30. noshot/data/ML TS XAI/TS/5. Stationarity - Trend - Seasonality/daily-total-female-births.csv +0 -366
  31. noshot/data/ML TS XAI/TS/6. Autocorrelation - Partial Autocorrelation/ACF-PACF.ipynb +0 -77
  32. noshot/data/ML TS XAI/TS/6. Autocorrelation - Partial Autocorrelation/daily-min-temperatures.csv +0 -3651
  33. noshot/data/ML TS XAI/TS/AllinOne.ipynb +0 -12676
  34. noshot-0.2.4.dist-info/RECORD +0 -36
  35. {noshot-0.2.4.dist-info → noshot-0.2.6.dist-info}/WHEEL +0 -0
  36. {noshot-0.2.4.dist-info → noshot-0.2.6.dist-info}/top_level.txt +0 -0
@@ -1,287 +0,0 @@
1
- {
2
- "cells": [
3
- {
4
- "cell_type": "markdown",
5
- "id": "def24f4a",
6
- "metadata": {},
7
- "source": [
8
- "##### __Balance Scale Dataset__"
9
- ]
10
- },
11
- {
12
- "cell_type": "code",
13
- "execution_count": null,
14
- "id": "b4a8b5dc",
15
- "metadata": {},
16
- "outputs": [],
17
- "source": [
18
- "from sklearn.neighbors import KNeighborsClassifier\n",
19
- "from sklearn.datasets import load_iris\n",
20
- "from sklearn.model_selection import train_test_split\n",
21
- "from sklearn import metrics\n",
22
- "from sklearn.preprocessing import StandardScaler\n",
23
- "import sklearn\n",
24
- "import pandas as pd\n",
25
- "import numpy as np"
26
- ]
27
- },
28
- {
29
- "cell_type": "code",
30
- "execution_count": null,
31
- "id": "1c308767",
32
- "metadata": {},
33
- "outputs": [],
34
- "source": [
35
- "df = pd.read_csv('input.txt', delimiter = ',', names=['class name','left-weight','left-distance','right-weight','right-distance'])\n",
36
- "#df = pd.read_csv('balance-scale.csv')\n",
37
- "df.head()"
38
- ]
39
- },
40
- {
41
- "cell_type": "code",
42
- "execution_count": null,
43
- "id": "23d0288e",
44
- "metadata": {},
45
- "outputs": [],
46
- "source": [
47
- "feature = ['left-weight','left-distance','right-weight','right-distance']\n",
48
- "x = df.loc[:,feature]\n",
49
- "y = df.loc[:,'class name']\n",
50
- "x = StandardScaler().fit_transform(x)\n",
51
- "X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=0.4,\n",
52
- "random_state = 4)\n",
53
- "print (X_train.shape)\n",
54
- "print (X_test.shape)\n",
55
- "knn = KNeighborsClassifier(n_neighbors = 15)\n",
56
- "knn.fit(X_train, y_train) "
57
- ]
58
- },
59
- {
60
- "cell_type": "code",
61
- "execution_count": null,
62
- "id": "366c003d",
63
- "metadata": {},
64
- "outputs": [],
65
- "source": [
66
- "y_pred = knn.predict(X_test)\n",
67
- "print (metrics.accuracy_score(y_test, y_pred))"
68
- ]
69
- },
70
- {
71
- "cell_type": "markdown",
72
- "id": "6702687e",
73
- "metadata": {},
74
- "source": [
75
- "##### __class for [1,1,1,1] = R (predicted)__"
76
- ]
77
- },
78
- {
79
- "cell_type": "code",
80
- "execution_count": null,
81
- "id": "22e96c2a",
82
- "metadata": {},
83
- "outputs": [],
84
- "source": [
85
- "y_pred = knn.predict(np.array([1,1,1,1]).reshape(1, -1))[0]\n",
86
- "print(\"Class Predicted:\", y_pred)"
87
- ]
88
- },
89
- {
90
- "cell_type": "markdown",
91
- "id": "13d70944",
92
- "metadata": {},
93
- "source": [
94
- "##### __Iris Dataset__"
95
- ]
96
- },
97
- {
98
- "cell_type": "code",
99
- "execution_count": null,
100
- "id": "3192e255",
101
- "metadata": {},
102
- "outputs": [],
103
- "source": [
104
- "def to_category(val):\n",
105
- " match val:\n",
106
- " case 0: return \"setosa\"\n",
107
- " case 1: return \"versicolor\"\n",
108
- " case 2: return \"virginica\"\n",
109
- "iris = load_iris()\n",
110
- "df2 = pd.DataFrame(data=iris.data, columns=iris.feature_names)\n",
111
- "df2['class'] = iris.target\n",
112
- "df2['class'] = df2['class'].apply(to_category)\n",
113
- "print(df2.shape)\n",
114
- "df2.head()"
115
- ]
116
- },
117
- {
118
- "cell_type": "code",
119
- "execution_count": null,
120
- "id": "4115986d",
121
- "metadata": {
122
- "scrolled": true
123
- },
124
- "outputs": [],
125
- "source": [
126
- "feature = ['sepal length (cm)','sepal width (cm)','petal length (cm)','petal width (cm)']\n",
127
- "x = df2.loc[:,feature]\n",
128
- "y = df2.loc[:,'class']\n",
129
- "x = StandardScaler().fit_transform(x)\n",
130
- "X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=0.4,\n",
131
- "random_state = 4)\n",
132
- "print (X_train.shape)\n",
133
- "print (X_test.shape)\n",
134
- "knn = KNeighborsClassifier(n_neighbors = 15)\n",
135
- "knn.fit(X_train, y_train) "
136
- ]
137
- },
138
- {
139
- "cell_type": "code",
140
- "execution_count": null,
141
- "id": "8252b0f1",
142
- "metadata": {},
143
- "outputs": [],
144
- "source": [
145
- "y_pred = knn.predict(X_test)\n",
146
- "print (metrics.accuracy_score(y_test, y_pred))"
147
- ]
148
- },
149
- {
150
- "cell_type": "markdown",
151
- "id": "06559281",
152
- "metadata": {},
153
- "source": [
154
- "##### __class for [5.2,3.5,1.1,0.2] = virginica (predicted)__"
155
- ]
156
- },
157
- {
158
- "cell_type": "code",
159
- "execution_count": null,
160
- "id": "085896ef",
161
- "metadata": {},
162
- "outputs": [],
163
- "source": [
164
- "y_pred = knn.predict(np.array([5.2, 3.5, 1.1, 0.2]).reshape(1, -1))[0]\n",
165
- "print(\"Class Predicted:\", y_pred)"
166
- ]
167
- },
168
- {
169
- "cell_type": "markdown",
170
- "id": "cdd56944",
171
- "metadata": {},
172
- "source": [
173
- "##### __Iris Dataset Visualization__"
174
- ]
175
- },
176
- {
177
- "cell_type": "code",
178
- "execution_count": null,
179
- "id": "a549df51",
180
- "metadata": {},
181
- "outputs": [],
182
- "source": [
183
- "from sklearn.svm import SVC\n",
184
- "import numpy as np\n",
185
- "import matplotlib.pyplot as plt\n",
186
- "from sklearn import svm, datasets\n",
187
- "\n",
188
- "iris = load_iris()\n",
189
- "X = iris.data[:, :2]\n",
190
- "y = iris.target\n",
191
- "\n",
192
- "def make_meshgrid(x, y, h=.02):\n",
193
- " x_min, x_max = x.min() - 1, x.max() + 1\n",
194
- " y_min, y_max = y.min() - 1, y.max() + 1\n",
195
- " xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))\n",
196
- " return xx, yy\n",
197
- "\n",
198
- "def plot_contours(ax, clf, xx, yy, **params):\n",
199
- " Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])\n",
200
- " Z = Z.reshape(xx.shape)\n",
201
- " out = ax.contourf(xx, yy, Z, **params)\n",
202
- " return out\n",
203
- "\n",
204
- "model = svm.SVC(kernel='linear')\n",
205
- "clf = model.fit(X, y)\n",
206
- "\n",
207
- "fig, ax = plt.subplots()\n",
208
- "# title for the plots\n",
209
- "title = ('Decision surface of linear SVC ')\n",
210
- "# Set-up grid for plotting.\n",
211
- "X0, X1 = X[:, 0], X[:, 1]\n",
212
- "xx, yy = make_meshgrid(X0, X1)\n",
213
- "\n",
214
- "plot_contours(ax, clf, xx, yy, cmap=plt.cm.coolwarm, alpha=0.8)\n",
215
- "ax.scatter(X0, X1, c=y, cmap=plt.cm.coolwarm, s=20, edgecolors='k')\n",
216
- "ax.set_ylabel('y label here')\n",
217
- "ax.set_xlabel('x label here')\n",
218
- "ax.set_xticks(())\n",
219
- "ax.set_yticks(())\n",
220
- "ax.set_title(title)\n",
221
- "#ax.legend()\n",
222
- "plt.show()"
223
- ]
224
- },
225
- {
226
- "cell_type": "code",
227
- "execution_count": null,
228
- "id": "01719650",
229
- "metadata": {},
230
- "outputs": [],
231
- "source": [
232
- "from sklearn.svm import SVC\n",
233
- "import numpy as np\n",
234
- "import matplotlib.pyplot as plt\n",
235
- "from sklearn import svm, datasets\n",
236
- "from mpl_toolkits.mplot3d import Axes3D\n",
237
- "\n",
238
- "iris = datasets.load_iris()\n",
239
- "X = iris.data[:, :3] # we only take the first three features.\n",
240
- "Y = iris.target\n",
241
- "\n",
242
- "#make it binary classification problem\n",
243
- "X = X[np.logical_or(Y==0,Y==1)]\n",
244
- "Y = Y[np.logical_or(Y==0,Y==1)]\n",
245
- "\n",
246
- "model = svm.SVC(kernel='linear')\n",
247
- "clf = model.fit(X, Y)\n",
248
- "\n",
249
- "# The equation of the separating plane is given by all x so that np.dot(svc.coef_[0], x) + b = 0.\n",
250
- "# Solve for w3 (z)\n",
251
- "z = lambda x,y: (-clf.intercept_[0]-clf.coef_[0][0]*x -clf.coef_[0][1]*y) / clf.coef_[0][2]\n",
252
- "\n",
253
- "tmp = np.linspace(-5,5,30)\n",
254
- "x,y = np.meshgrid(tmp,tmp)\n",
255
- "\n",
256
- "fig = plt.figure()\n",
257
- "ax = fig.add_subplot(111, projection='3d')\n",
258
- "ax.plot3D(X[Y==0,0], X[Y==0,1], X[Y==0,2],'ob')\n",
259
- "ax.plot3D(X[Y==1,0], X[Y==1,1], X[Y==1,2],'sr')\n",
260
- "ax.plot_surface(x, y, z(x,y))\n",
261
- "ax.view_init(30, 60)\n",
262
- "plt.show()"
263
- ]
264
- }
265
- ],
266
- "metadata": {
267
- "kernelspec": {
268
- "display_name": "Python 3 (ipykernel)",
269
- "language": "python",
270
- "name": "python3"
271
- },
272
- "language_info": {
273
- "codemirror_mode": {
274
- "name": "ipython",
275
- "version": 3
276
- },
277
- "file_extension": ".py",
278
- "mimetype": "text/x-python",
279
- "name": "python",
280
- "nbconvert_exporter": "python",
281
- "pygments_lexer": "ipython3",
282
- "version": "3.12.4"
283
- }
284
- },
285
- "nbformat": 4,
286
- "nbformat_minor": 5
287
- }