noshot 0.4.0__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. noshot/data/ML TS XAI/TS/10. Seasonal ARIMA Forecasting.ipynb +246 -0
  2. noshot/data/ML TS XAI/TS/11. Multivariate ARIMA Forecasting.ipynb +228 -0
  3. noshot/data/ML TS XAI/TS/6. ACF PACF.ipynb +77 -0
  4. noshot/data/ML TS XAI/TS/7. Differencing.ipynb +167 -0
  5. noshot/data/ML TS XAI/TS/8. ARMA Forecasting.ipynb +197 -0
  6. noshot/data/ML TS XAI/TS/9. ARIMA Forecasting.ipynb +220 -0
  7. noshot/main.py +18 -18
  8. noshot/utils/__init__.py +2 -2
  9. noshot/utils/shell_utils.py +56 -56
  10. {noshot-0.4.0.dist-info → noshot-0.9.0.dist-info}/METADATA +58 -55
  11. noshot-0.9.0.dist-info/RECORD +15 -0
  12. {noshot-0.4.0.dist-info → noshot-0.9.0.dist-info}/WHEEL +1 -1
  13. {noshot-0.4.0.dist-info → noshot-0.9.0.dist-info/licenses}/LICENSE.txt +20 -20
  14. noshot/data/ML TS XAI/CIA-1-Delhi Boy.ipynb +0 -535
  15. noshot/data/ML TS XAI/Football Player/4.ipynb +0 -395
  16. noshot/data/ML TS XAI/ML/1. PCA - EDA.ipynb +0 -207
  17. noshot/data/ML TS XAI/ML/2. KNN Classifier.ipynb +0 -287
  18. noshot/data/ML TS XAI/ML/3. Linear Discriminant Analysis.ipynb +0 -83
  19. noshot/data/ML TS XAI/ML/4. Linear Regression.ipynb +0 -117
  20. noshot/data/ML TS XAI/ML/5. Logistic Regression.ipynb +0 -151
  21. noshot/data/ML TS XAI/ML/6. Bayesian Classifier.ipynb +0 -89
  22. noshot/data/ML TS XAI/ML/data/balance-scale.csv +0 -626
  23. noshot/data/ML TS XAI/ML/data/balance-scale.txt +0 -625
  24. noshot/data/ML TS XAI/ML/data/machine-data.csv +0 -210
  25. noshot/data/ML TS XAI/ML/data/wine-dataset.csv +0 -179
  26. noshot/data/ML TS XAI/ML Additional/Bank.ipynb +0 -74
  27. noshot/data/ML TS XAI/ML Additional/LR.ipynb +0 -69
  28. noshot/data/ML TS XAI/ML Additional/ObesityDataSet_raw_and_data_sinthetic.csv +0 -2112
  29. noshot/data/ML TS XAI/ML Additional/Q4 LR.csv +0 -206
  30. noshot/data/ML TS XAI/ML Additional/Q7 BANK DETAILS.csv +0 -41189
  31. noshot/data/ML TS XAI/ML Additional/airfoil.ipynb +0 -69
  32. noshot/data/ML TS XAI/ML Additional/airfoil_self_noise.dat +0 -1503
  33. noshot/data/ML TS XAI/ML Additional/obesity.ipynb +0 -78
  34. noshot/data/ML TS XAI/ML Additional/voronoicode.ipynb +0 -81
  35. noshot/data/ML TS XAI/ML Lab CIA - Healthy Class/1/1.ipynb +0 -133
  36. noshot/data/ML TS XAI/ML Lab CIA - Healthy Class/1/Question.txt +0 -12
  37. noshot/data/ML TS XAI/ML Lab CIA - Healthy Class/1/airfoil_self_noise.dat +0 -1503
  38. noshot/data/ML TS XAI/ML Lab CIA - Healthy Class/2/2.ipynb +0 -139
  39. noshot/data/ML TS XAI/ML Lab CIA - Healthy Class/2/Question.txt +0 -12
  40. noshot/data/ML TS XAI/ML Lab CIA - Healthy Class/2/pop_failures.dat +0 -143
  41. noshot/data/ML TS XAI/ML Lab CIA - Healthy Class/3/3.ipynb +0 -130
  42. noshot/data/ML TS XAI/ML Lab CIA - Healthy Class/3/Qu.txt +0 -1
  43. noshot/data/ML TS XAI/ML Lab CIA - Healthy Class/3/go_track_tracks.csv +0 -164
  44. noshot/data/ML TS XAI/ML Lab CIA - Healthy Class/4/4.ipynb +0 -141
  45. noshot/data/ML TS XAI/ML Lab CIA - Healthy Class/4/Wilt.csv +0 -4340
  46. noshot/data/ML TS XAI/ML Lab CIA - Healthy Class/4/qu.txt +0 -1
  47. noshot/data/ML TS XAI/ML Lab CIA - Our Class/AllinOne.ipynb +0 -1
  48. noshot/data/ML TS XAI/ML Lab CIA - Our Class/Heart-Disease-UCI-0.ipynb +0 -886
  49. noshot/data/ML TS XAI/ML Lab CIA - Our Class/Housing-0.ipynb +0 -292
  50. noshot/data/ML TS XAI/ML Lab CIA - Our Class/Lab Code Ex 1-4.ipynb +0 -1
  51. noshot/data/ML TS XAI/ML Lab CIA - Our Class/data/Housing.csv +0 -546
  52. noshot/data/ML TS XAI/ML Lab CIA - Our Class/data/heart_disease_uci.csv +0 -921
  53. noshot-0.4.0.dist-info/RECORD +0 -48
  54. {noshot-0.4.0.dist-info → noshot-0.9.0.dist-info}/top_level.txt +0 -0
@@ -1,78 +0,0 @@
1
- {
2
- "cells": [
3
- {
4
- "cell_type": "code",
5
- "execution_count": null,
6
- "id": "0654f3b1-de71-409d-b69e-bca199d6e851",
7
- "metadata": {},
8
- "outputs": [],
9
- "source": [
10
- "import pandas as pd\n",
11
- "import numpy as np\n",
12
- "from sklearn.model_selection import train_test_split\n",
13
- "from sklearn.preprocessing import StandardScaler, LabelEncoder\n",
14
- "from sklearn.neighbors import KNeighborsClassifier\n",
15
- "from sklearn.decomposition import PCA\n",
16
- "from sklearn.metrics import accuracy_score\n",
17
- "\n",
18
- "# Load the dataset\n",
19
- "data = pd.read_csv('ObesityDataSet_raw_and_data_sinthetic.csv')\n",
20
- "\n",
21
- "# Encode categorical columns if any\n",
22
- "for col in data.select_dtypes(include=['object']).columns:\n",
23
- " data[col] = LabelEncoder().fit_transform(data[col])\n",
24
- "\n",
25
- "# Assuming the last column is the target variable\n",
26
- "y = data.iloc[:, -1]\n",
27
- "X = data.iloc[:, :-1]\n",
28
- "\n",
29
- "# Standardize the features\n",
30
- "scaler = StandardScaler()\n",
31
- "X_scaled = scaler.fit_transform(X)\n",
32
- "\n",
33
- "# Split into training and testing sets\n",
34
- "X_train, X_test, y_train, y_test = train_test_split(X_scaled, y, test_size=0.2, random_state=42)\n",
35
- "\n",
36
- "# Apply KNN without PCA\n",
37
- "knn = KNeighborsClassifier(n_neighbors=5)\n",
38
- "knn.fit(X_train, y_train)\n",
39
- "y_pred_knn = knn.predict(X_test)\n",
40
- "knn_accuracy = accuracy_score(y_test, y_pred_knn)\n",
41
- "print(f'KNN Accuracy without PCA: {knn_accuracy}')\n",
42
- "\n",
43
- "# Applying PCA (reducing to 2 principal components for visualization purposes)\n",
44
- "pca = PCA(n_components=2)\n",
45
- "X_train_pca = pca.fit_transform(X_train)\n",
46
- "X_test_pca = pca.transform(X_test)\n",
47
- "\n",
48
- "# KNN with PCA-transformed data\n",
49
- "knn_pca = KNeighborsClassifier(n_neighbors=5)\n",
50
- "knn_pca.fit(X_train_pca, y_train)\n",
51
- "y_pred_pca = knn_pca.predict(X_test_pca)\n",
52
- "knn_pca_accuracy = accuracy_score(y_test, y_pred_pca)\n",
53
- "print(f'KNN Accuracy with PCA: {knn_pca_accuracy}')\n"
54
- ]
55
- }
56
- ],
57
- "metadata": {
58
- "kernelspec": {
59
- "display_name": "Python 3 (ipykernel)",
60
- "language": "python",
61
- "name": "python3"
62
- },
63
- "language_info": {
64
- "codemirror_mode": {
65
- "name": "ipython",
66
- "version": 3
67
- },
68
- "file_extension": ".py",
69
- "mimetype": "text/x-python",
70
- "name": "python",
71
- "nbconvert_exporter": "python",
72
- "pygments_lexer": "ipython3",
73
- "version": "3.12.4"
74
- }
75
- },
76
- "nbformat": 4,
77
- "nbformat_minor": 5
78
- }
@@ -1,81 +0,0 @@
1
- {
2
- "cells": [
3
- {
4
- "cell_type": "code",
5
- "execution_count": null,
6
- "id": "88e226f6-c463-4f5d-a469-a521debfb377",
7
- "metadata": {},
8
- "outputs": [],
9
- "source": [
10
- "import numpy as np\n",
11
- "import matplotlib.pyplot as plt\n",
12
- "from sklearn.neighbors import KNeighborsClassifier\n",
13
- "from sklearn.datasets import make_classification\n",
14
- "from scipy.spatial import Voronoi, voronoi_plot_2d\n",
15
- "\n",
16
- "# Generate synthetic dataset\n",
17
- "X, y = make_classification(\n",
18
- " n_samples=100,\n",
19
- " n_features=2,\n",
20
- " n_classes=3,\n",
21
- " n_clusters_per_class=1,\n",
22
- " n_redundant=0,\n",
23
- " n_informative=2,\n",
24
- " random_state=42,\n",
25
- ")\n",
26
- "\n",
27
- "# Fit KNN classifier\n",
28
- "knn = KNeighborsClassifier(n_neighbors=3)\n",
29
- "knn.fit(X, y)\n",
30
- "\n",
31
- "# Create a grid for decision boundary\n",
32
- "x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1\n",
33
- "y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1\n",
34
- "xx, yy = np.meshgrid(np.linspace(x_min, x_max, 500), np.linspace(y_min, y_max, 500))\n",
35
- "grid_points = np.c_[xx.ravel(), yy.ravel()]\n",
36
- "\n",
37
- "# Predict on the grid\n",
38
- "Z = knn.predict(grid_points)\n",
39
- "Z = Z.reshape(xx.shape)\n",
40
- "\n",
41
- "# Plot decision boundaries\n",
42
- "plt.figure(figsize=(10, 7))\n",
43
- "plt.contourf(xx, yy, Z, alpha=0.3, cmap=plt.cm.Paired)\n",
44
- "plt.scatter(X[:, 0], X[:, 1], c=y, edgecolor='k', cmap=plt.cm.Paired, label='Data Points')\n",
45
- "\n",
46
- "# Compute Voronoi regions\n",
47
- "vor = Voronoi(X)\n",
48
- "voronoi_plot_2d(vor, ax=plt.gca(), show_points=False, show_vertices=False, line_colors='black', line_width=0.8)\n",
49
- "\n",
50
- "# Display plot\n",
51
- "plt.title(\"KNN Classification with Voronoi Diagram (k=3)\")\n",
52
- "plt.xlabel(\"Feature 1\")\n",
53
- "plt.ylabel(\"Feature 2\")\n",
54
- "plt.legend(loc=\"upper right\")\n",
55
- "plt.grid()\n",
56
- "plt.show()"
57
- ]
58
- }
59
- ],
60
- "metadata": {
61
- "kernelspec": {
62
- "display_name": "Python 3 (ipykernel)",
63
- "language": "python",
64
- "name": "python3"
65
- },
66
- "language_info": {
67
- "codemirror_mode": {
68
- "name": "ipython",
69
- "version": 3
70
- },
71
- "file_extension": ".py",
72
- "mimetype": "text/x-python",
73
- "name": "python",
74
- "nbconvert_exporter": "python",
75
- "pygments_lexer": "ipython3",
76
- "version": "3.12.4"
77
- }
78
- },
79
- "nbformat": 4,
80
- "nbformat_minor": 5
81
- }
@@ -1,133 +0,0 @@
1
- {
2
- "cells": [
3
- {
4
- "cell_type": "code",
5
- "execution_count": null,
6
- "id": "31067fce-1168-4c6e-97c2-bfc4fb40904b",
7
- "metadata": {},
8
- "outputs": [],
9
- "source": [
10
- "import pandas as pd\n",
11
- "import numpy as np\n",
12
- "import matplotlib.pyplot as plt\n",
13
- "import seaborn as sns\n",
14
- "from sklearn.decomposition import PCA\n",
15
- "from sklearn.linear_model import LinearRegression\n",
16
- "from sklearn.model_selection import train_test_split\n",
17
- "from sklearn.preprocessing import StandardScaler\n",
18
- "from sklearn.metrics import r2_score, mean_squared_error"
19
- ]
20
- },
21
- {
22
- "cell_type": "code",
23
- "execution_count": null,
24
- "id": "30e4ba93-9e95-4b51-a3e4-89931c193a3a",
25
- "metadata": {},
26
- "outputs": [],
27
- "source": [
28
- "file_path = \"airfoil_self_noise.dat\"\n",
29
- "columns = [\"Frequency\", \"Angle of Attack\", \"Chord Length\", \"Free-stream Velocity\", \"Suction Side Thickness\", \"Scaled SPL\"]\n",
30
- "df = pd.read_csv(file_path, sep=\"\\t\", header=None, names=columns)\n",
31
- "df.head()"
32
- ]
33
- },
34
- {
35
- "cell_type": "code",
36
- "execution_count": null,
37
- "id": "c99f7732-9da4-4f2e-8ad2-16722962c435",
38
- "metadata": {},
39
- "outputs": [],
40
- "source": [
41
- "df.columns = df.columns.str.strip()\n",
42
- "X = df.iloc[:, :-1].values # Features\n",
43
- "y = df.iloc[:, -1].values # Target"
44
- ]
45
- },
46
- {
47
- "cell_type": "code",
48
- "execution_count": null,
49
- "id": "15940be7-1bdd-497e-81b4-eccd14424881",
50
- "metadata": {},
51
- "outputs": [],
52
- "source": [
53
- "scaler = StandardScaler()\n",
54
- "X_scaled = scaler.fit_transform(X)\n",
55
- "\n",
56
- "pca = PCA(n_components=2)\n",
57
- "X_pca = pca.fit_transform(X_scaled)\n",
58
- "\n",
59
- "X_train, X_test, y_train, y_test = train_test_split(X_scaled, y, test_size=0.2, random_state=42)\n",
60
- "X_pca_train, X_pca_test, _, _ = train_test_split(X_pca, y, test_size=0.2, random_state=42)\n",
61
- "\n",
62
- "lr_original = LinearRegression()\n",
63
- "lr_original.fit(X_train, y_train)\n",
64
- "y_pred_original = lr_original.predict(X_test)\n",
65
- "\n",
66
- "lr_pca = LinearRegression()\n",
67
- "lr_pca.fit(X_pca_train, y_train)\n",
68
- "y_pred_pca = lr_pca.predict(X_pca_test)"
69
- ]
70
- },
71
- {
72
- "cell_type": "code",
73
- "execution_count": null,
74
- "id": "617f4fdf-6722-4caf-bef3-66240c3cbc0e",
75
- "metadata": {},
76
- "outputs": [],
77
- "source": [
78
- "print(\"R2 Original:\", r2_score(y_test, y_pred_original))\n",
79
- "print(\"RMSE Original:\", np.sqrt(mean_squared_error(y_test, y_pred_original)))\n",
80
- "print(\"R2 PCA:\", r2_score(y_test, y_pred_pca))\n",
81
- "print(\"RMSE PCA:\", np.sqrt(mean_squared_error(y_test, y_pred_pca)))"
82
- ]
83
- },
84
- {
85
- "cell_type": "code",
86
- "execution_count": null,
87
- "id": "83ed2bce-0dfe-4bc4-b24b-356113eb6be3",
88
- "metadata": {},
89
- "outputs": [],
90
- "source": [
91
- "plt.figure(figsize=(12, 5))\n",
92
- "\n",
93
- "plt.subplot(1, 2, 1)\n",
94
- "sns.scatterplot(x=y_test, y=y_pred_original, alpha=0.5)\n",
95
- "plt.plot([min(y_test), max(y_test)], [min(y_test), max(y_test)], '--', color='red')\n",
96
- "plt.xlabel(\"Actual\")\n",
97
- "plt.ylabel(\"Predicted\")\n",
98
- "plt.title(\"Linear Regression on Original Data\")\n",
99
- "\n",
100
- "plt.subplot(1, 2, 2)\n",
101
- "sns.scatterplot(x=y_test, y=y_pred_pca, alpha=0.5)\n",
102
- "plt.plot([min(y_test), max(y_test)], [min(y_test), max(y_test)], '--', color='red')\n",
103
- "plt.xlabel(\"Actual\")\n",
104
- "plt.ylabel(\"Predicted\")\n",
105
- "plt.title(\"Linear Regression on PCA-Reduced Data\")\n",
106
- "\n",
107
- "plt.tight_layout()\n",
108
- "plt.show()"
109
- ]
110
- }
111
- ],
112
- "metadata": {
113
- "kernelspec": {
114
- "display_name": "Python 3 (ipykernel)",
115
- "language": "python",
116
- "name": "python3"
117
- },
118
- "language_info": {
119
- "codemirror_mode": {
120
- "name": "ipython",
121
- "version": 3
122
- },
123
- "file_extension": ".py",
124
- "mimetype": "text/x-python",
125
- "name": "python",
126
- "nbconvert_exporter": "python",
127
- "pygments_lexer": "ipython3",
128
- "version": "3.12.4"
129
- }
130
- },
131
- "nbformat": 4,
132
- "nbformat_minor": 5
133
- }
@@ -1,12 +0,0 @@
1
- Apply PCA and apply linear regression on original dataset and reduced dataset
2
- Attribute Information:
3
-
4
- This problem has the following inputs:
5
- 1. Frequency, in Hertzs.
6
- 2. Angle of attack, in degrees.
7
- 3. Chord length, in meters.
8
- 4. Free-stream velocity, in meters per second.
9
- 5. Suction side displacement thickness, in meters.
10
-
11
- The only output is:
12
- 6. Scaled sound pressure level, in decibels.