noshot 0.3.8__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. noshot/data/ML TS XAI/CIA-1-Delhi Boy.ipynb +535 -0
  2. noshot/data/ML TS XAI/Football Player/4.ipynb +395 -0
  3. noshot/data/ML TS XAI/ML Additional/Bank.ipynb +74 -0
  4. noshot/data/ML TS XAI/ML Additional/LR.ipynb +69 -0
  5. noshot/data/ML TS XAI/ML Additional/ObesityDataSet_raw_and_data_sinthetic.csv +2112 -0
  6. noshot/data/ML TS XAI/ML Additional/Q4 LR.csv +206 -0
  7. noshot/data/ML TS XAI/ML Additional/Q7 BANK DETAILS.csv +41189 -0
  8. noshot/data/ML TS XAI/ML Additional/airfoil.ipynb +69 -0
  9. noshot/data/ML TS XAI/ML Additional/obesity.ipynb +78 -0
  10. noshot/data/ML TS XAI/ML Additional/voronoicode.ipynb +81 -0
  11. noshot/data/ML TS XAI/ML Lab CIA - Healthy Class/1/airfoil_self_noise.dat +1503 -0
  12. noshot/data/ML TS XAI/ML Lab CIA - Our Class/AllinOne.ipynb +1 -0
  13. noshot/data/ML TS XAI/ML Lab CIA - Our Class/Heart-Disease-UCI-0.ipynb +886 -0
  14. noshot/data/ML TS XAI/ML Lab CIA - Our Class/Housing-0.ipynb +292 -0
  15. noshot/data/ML TS XAI/ML Lab CIA - Our Class/Lab Code Ex 1-4.ipynb +1 -0
  16. noshot/data/ML TS XAI/ML Lab CIA - Our Class/data/Housing.csv +546 -0
  17. noshot/data/ML TS XAI/ML Lab CIA - Our Class/data/heart_disease_uci.csv +921 -0
  18. {noshot-0.3.8.dist-info → noshot-0.4.0.dist-info}/METADATA +1 -1
  19. noshot-0.4.0.dist-info/RECORD +48 -0
  20. noshot/data/ML TS XAI/TS/1. EDA - Handling Time Series Data.ipynb +0 -247
  21. noshot/data/ML TS XAI/TS/2. Feature Engineering.ipynb +0 -183
  22. noshot/data/ML TS XAI/TS/3. Temporal Relationships.ipynb +0 -172
  23. noshot/data/ML TS XAI/TS/4. Up-Down-Sampling and Interpolation.ipynb +0 -146
  24. noshot/data/ML TS XAI/TS/5. Stationarity - Trend - Seasonality.ipynb +0 -173
  25. noshot/data/ML TS XAI/TS/6. Autocorrelation - Partial Autocorrelation.ipynb +0 -77
  26. noshot/data/ML TS XAI/TS/AllinOne.ipynb +0 -1416
  27. noshot/data/ML TS XAI/TS/data/daily-min-temperatures.csv +0 -3651
  28. noshot/data/ML TS XAI/TS/data/daily-total-female-births.csv +0 -366
  29. noshot/data/ML TS XAI/TS/data/raw_sales.csv +0 -29581
  30. noshot/data/ML TS XAI/TS/data/shampoo_sales.csv +0 -37
  31. noshot/data/ML TS XAI/TS Lab CIA/1 - AirPassengers/1 - AirPassengers.ipynb +0 -198
  32. noshot/data/ML TS XAI/TS Lab CIA/1 - AirPassengers/AirPassengers.csv +0 -145
  33. noshot/data/ML TS XAI/TS Lab CIA/2 - Daily-total-female-births/2 - daily-total-female-births.ipynb +0 -209
  34. noshot/data/ML TS XAI/TS Lab CIA/2 - Daily-total-female-births/daily-total-female-births.csv +0 -366
  35. noshot/data/ML TS XAI/TS Lab CIA/3 - Bill Charge/3 - Bill Charge.ipynb +0 -169
  36. noshot/data/ML TS XAI/TS Lab CIA/3 - Bill Charge/bill charge.csv +0 -21
  37. noshot/data/ML TS XAI/TS Lab CIA/4 - Daily min temperatures/4 - daily-min-temperatures.ipynb +0 -181
  38. noshot/data/ML TS XAI/TS Lab CIA/4 - Daily min temperatures/daily-min-temperatures.csv +0 -3651
  39. noshot/data/ML TS XAI/TS Lab CIA/5 - shampoo sales/5 - Shampoo sales.ipynb +0 -213
  40. noshot/data/ML TS XAI/TS Lab CIA/5 - shampoo sales/shampoo_sales.csv +0 -37
  41. noshot/data/ML TS XAI/TS Lab CIA/Questions TMS 27 Feb 25.pdf +0 -0
  42. noshot-0.3.8.dist-info/RECORD +0 -53
  43. /noshot/data/ML TS XAI/{ML Lab CIA/1 → ML Additional}/airfoil_self_noise.dat +0 -0
  44. /noshot/data/ML TS XAI/{ML Lab CIA → ML Lab CIA - Healthy Class}/1/1.ipynb +0 -0
  45. /noshot/data/ML TS XAI/{ML Lab CIA → ML Lab CIA - Healthy Class}/1/Question.txt +0 -0
  46. /noshot/data/ML TS XAI/{ML Lab CIA → ML Lab CIA - Healthy Class}/2/2.ipynb +0 -0
  47. /noshot/data/ML TS XAI/{ML Lab CIA → ML Lab CIA - Healthy Class}/2/Question.txt +0 -0
  48. /noshot/data/ML TS XAI/{ML Lab CIA → ML Lab CIA - Healthy Class}/2/pop_failures.dat +0 -0
  49. /noshot/data/ML TS XAI/{ML Lab CIA → ML Lab CIA - Healthy Class}/3/3.ipynb +0 -0
  50. /noshot/data/ML TS XAI/{ML Lab CIA → ML Lab CIA - Healthy Class}/3/Qu.txt +0 -0
  51. /noshot/data/ML TS XAI/{ML Lab CIA → ML Lab CIA - Healthy Class}/3/go_track_tracks.csv +0 -0
  52. /noshot/data/ML TS XAI/{ML Lab CIA → ML Lab CIA - Healthy Class}/4/4.ipynb +0 -0
  53. /noshot/data/ML TS XAI/{ML Lab CIA → ML Lab CIA - Healthy Class}/4/Wilt.csv +0 -0
  54. /noshot/data/ML TS XAI/{ML Lab CIA → ML Lab CIA - Healthy Class}/4/qu.txt +0 -0
  55. {noshot-0.3.8.dist-info → noshot-0.4.0.dist-info}/LICENSE.txt +0 -0
  56. {noshot-0.3.8.dist-info → noshot-0.4.0.dist-info}/WHEEL +0 -0
  57. {noshot-0.3.8.dist-info → noshot-0.4.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,69 @@
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": null,
6
+ "id": "44e9a29a-54f1-4757-b4e7-4f5e3370de5e",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "import pandas as pd\n",
11
+ "import numpy as np\n",
12
+ "from sklearn.model_selection import train_test_split\n",
13
+ "from sklearn.decomposition import PCA\n",
14
+ "from sklearn.linear_model import LinearRegression\n",
15
+ "from sklearn.metrics import mean_squared_error\n",
16
+ "\n",
17
+ "# Load the dataset\n",
18
+ "data = pd.read_csv('/mnt/data/airfoil_self_noise.dat', sep='\\t', header=None)\n",
19
+ "\n",
20
+ "# Assuming the last column is the target variable\n",
21
+ "y = data.iloc[:, -1]\n",
22
+ "X = data.iloc[:, :-1]\n",
23
+ "\n",
24
+ "# Split into training and testing sets\n",
25
+ "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)\n",
26
+ "\n",
27
+ "# Linear Regression on original dataset\n",
28
+ "lr = LinearRegression()\n",
29
+ "lr.fit(X_train, y_train)\n",
30
+ "y_pred = lr.predict(X_test)\n",
31
+ "mse_original = mean_squared_error(y_test, y_pred)\n",
32
+ "print(f'MSE on original dataset: {mse_original}')\n",
33
+ "\n",
34
+ "# Applying PCA (reducing to 2 principal components for simplicity)\n",
35
+ "pca = PCA(n_components=2)\n",
36
+ "X_train_pca = pca.fit_transform(X_train)\n",
37
+ "X_test_pca = pca.transform(X_test)\n",
38
+ "\n",
39
+ "# Linear Regression on PCA-reduced dataset\n",
40
+ "lr_pca = LinearRegression()\n",
41
+ "lr_pca.fit(X_train_pca, y_train)\n",
42
+ "y_pred_pca = lr_pca.predict(X_test_pca)\n",
43
+ "mse_pca = mean_squared_error(y_test, y_pred_pca)\n",
44
+ "print(f'MSE on PCA-reduced dataset: {mse_pca}')\n"
45
+ ]
46
+ }
47
+ ],
48
+ "metadata": {
49
+ "kernelspec": {
50
+ "display_name": "Python 3 (ipykernel)",
51
+ "language": "python",
52
+ "name": "python3"
53
+ },
54
+ "language_info": {
55
+ "codemirror_mode": {
56
+ "name": "ipython",
57
+ "version": 3
58
+ },
59
+ "file_extension": ".py",
60
+ "mimetype": "text/x-python",
61
+ "name": "python",
62
+ "nbconvert_exporter": "python",
63
+ "pygments_lexer": "ipython3",
64
+ "version": "3.12.4"
65
+ }
66
+ },
67
+ "nbformat": 4,
68
+ "nbformat_minor": 5
69
+ }
@@ -0,0 +1,78 @@
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": null,
6
+ "id": "0654f3b1-de71-409d-b69e-bca199d6e851",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "import pandas as pd\n",
11
+ "import numpy as np\n",
12
+ "from sklearn.model_selection import train_test_split\n",
13
+ "from sklearn.preprocessing import StandardScaler, LabelEncoder\n",
14
+ "from sklearn.neighbors import KNeighborsClassifier\n",
15
+ "from sklearn.decomposition import PCA\n",
16
+ "from sklearn.metrics import accuracy_score\n",
17
+ "\n",
18
+ "# Load the dataset\n",
19
+ "data = pd.read_csv('ObesityDataSet_raw_and_data_sinthetic.csv')\n",
20
+ "\n",
21
+ "# Encode categorical columns if any\n",
22
+ "for col in data.select_dtypes(include=['object']).columns:\n",
23
+ " data[col] = LabelEncoder().fit_transform(data[col])\n",
24
+ "\n",
25
+ "# Assuming the last column is the target variable\n",
26
+ "y = data.iloc[:, -1]\n",
27
+ "X = data.iloc[:, :-1]\n",
28
+ "\n",
29
+ "# Standardize the features\n",
30
+ "scaler = StandardScaler()\n",
31
+ "X_scaled = scaler.fit_transform(X)\n",
32
+ "\n",
33
+ "# Split into training and testing sets\n",
34
+ "X_train, X_test, y_train, y_test = train_test_split(X_scaled, y, test_size=0.2, random_state=42)\n",
35
+ "\n",
36
+ "# Apply KNN without PCA\n",
37
+ "knn = KNeighborsClassifier(n_neighbors=5)\n",
38
+ "knn.fit(X_train, y_train)\n",
39
+ "y_pred_knn = knn.predict(X_test)\n",
40
+ "knn_accuracy = accuracy_score(y_test, y_pred_knn)\n",
41
+ "print(f'KNN Accuracy without PCA: {knn_accuracy}')\n",
42
+ "\n",
43
+ "# Applying PCA (reducing to 2 principal components for visualization purposes)\n",
44
+ "pca = PCA(n_components=2)\n",
45
+ "X_train_pca = pca.fit_transform(X_train)\n",
46
+ "X_test_pca = pca.transform(X_test)\n",
47
+ "\n",
48
+ "# KNN with PCA-transformed data\n",
49
+ "knn_pca = KNeighborsClassifier(n_neighbors=5)\n",
50
+ "knn_pca.fit(X_train_pca, y_train)\n",
51
+ "y_pred_pca = knn_pca.predict(X_test_pca)\n",
52
+ "knn_pca_accuracy = accuracy_score(y_test, y_pred_pca)\n",
53
+ "print(f'KNN Accuracy with PCA: {knn_pca_accuracy}')\n"
54
+ ]
55
+ }
56
+ ],
57
+ "metadata": {
58
+ "kernelspec": {
59
+ "display_name": "Python 3 (ipykernel)",
60
+ "language": "python",
61
+ "name": "python3"
62
+ },
63
+ "language_info": {
64
+ "codemirror_mode": {
65
+ "name": "ipython",
66
+ "version": 3
67
+ },
68
+ "file_extension": ".py",
69
+ "mimetype": "text/x-python",
70
+ "name": "python",
71
+ "nbconvert_exporter": "python",
72
+ "pygments_lexer": "ipython3",
73
+ "version": "3.12.4"
74
+ }
75
+ },
76
+ "nbformat": 4,
77
+ "nbformat_minor": 5
78
+ }
@@ -0,0 +1,81 @@
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": null,
6
+ "id": "88e226f6-c463-4f5d-a469-a521debfb377",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "import numpy as np\n",
11
+ "import matplotlib.pyplot as plt\n",
12
+ "from sklearn.neighbors import KNeighborsClassifier\n",
13
+ "from sklearn.datasets import make_classification\n",
14
+ "from scipy.spatial import Voronoi, voronoi_plot_2d\n",
15
+ "\n",
16
+ "# Generate synthetic dataset\n",
17
+ "X, y = make_classification(\n",
18
+ " n_samples=100,\n",
19
+ " n_features=2,\n",
20
+ " n_classes=3,\n",
21
+ " n_clusters_per_class=1,\n",
22
+ " n_redundant=0,\n",
23
+ " n_informative=2,\n",
24
+ " random_state=42,\n",
25
+ ")\n",
26
+ "\n",
27
+ "# Fit KNN classifier\n",
28
+ "knn = KNeighborsClassifier(n_neighbors=3)\n",
29
+ "knn.fit(X, y)\n",
30
+ "\n",
31
+ "# Create a grid for decision boundary\n",
32
+ "x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1\n",
33
+ "y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1\n",
34
+ "xx, yy = np.meshgrid(np.linspace(x_min, x_max, 500), np.linspace(y_min, y_max, 500))\n",
35
+ "grid_points = np.c_[xx.ravel(), yy.ravel()]\n",
36
+ "\n",
37
+ "# Predict on the grid\n",
38
+ "Z = knn.predict(grid_points)\n",
39
+ "Z = Z.reshape(xx.shape)\n",
40
+ "\n",
41
+ "# Plot decision boundaries\n",
42
+ "plt.figure(figsize=(10, 7))\n",
43
+ "plt.contourf(xx, yy, Z, alpha=0.3, cmap=plt.cm.Paired)\n",
44
+ "plt.scatter(X[:, 0], X[:, 1], c=y, edgecolor='k', cmap=plt.cm.Paired, label='Data Points')\n",
45
+ "\n",
46
+ "# Compute Voronoi regions\n",
47
+ "vor = Voronoi(X)\n",
48
+ "voronoi_plot_2d(vor, ax=plt.gca(), show_points=False, show_vertices=False, line_colors='black', line_width=0.8)\n",
49
+ "\n",
50
+ "# Display plot\n",
51
+ "plt.title(\"KNN Classification with Voronoi Diagram (k=3)\")\n",
52
+ "plt.xlabel(\"Feature 1\")\n",
53
+ "plt.ylabel(\"Feature 2\")\n",
54
+ "plt.legend(loc=\"upper right\")\n",
55
+ "plt.grid()\n",
56
+ "plt.show()"
57
+ ]
58
+ }
59
+ ],
60
+ "metadata": {
61
+ "kernelspec": {
62
+ "display_name": "Python 3 (ipykernel)",
63
+ "language": "python",
64
+ "name": "python3"
65
+ },
66
+ "language_info": {
67
+ "codemirror_mode": {
68
+ "name": "ipython",
69
+ "version": 3
70
+ },
71
+ "file_extension": ".py",
72
+ "mimetype": "text/x-python",
73
+ "name": "python",
74
+ "nbconvert_exporter": "python",
75
+ "pygments_lexer": "ipython3",
76
+ "version": "3.12.4"
77
+ }
78
+ },
79
+ "nbformat": 4,
80
+ "nbformat_minor": 5
81
+ }