noshot 0.3.2__py3-none-any.whl → 0.3.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. noshot/data/ML TS XAI/ML/{1. PCA - EDA/PCA-EDA.ipynb → 1. PCA - EDA.ipynb } +2 -2
  2. noshot/data/ML TS XAI/ML/{2. KNN Classifier/KNN.ipynb → 2. KNN Classifier.ipynb } +2 -2
  3. noshot/data/ML TS XAI/ML/{3. Linear Discriminant Analysis/LDA.ipynb → 3. Linear Discriminant Analysis.ipynb } +2 -2
  4. noshot/data/ML TS XAI/ML/{4. Linear Regression/Linear-Regression.ipynb → 4. Linear Regression.ipynb } +1 -1
  5. noshot/data/ML TS XAI/ML/{5. Logistic Regression/Logistic-Regression.ipynb → 5. Logistic Regression.ipynb } +28 -14
  6. noshot/data/ML TS XAI/ML/{6. Bayesian Classifier/Bayesian.ipynb → 6. Bayesian Classifier.ipynb } +7 -5
  7. noshot/data/ML TS XAI/ML/{1. PCA - EDA → data}/balance-scale.csv +626 -626
  8. noshot/data/ML TS XAI/ML/{3. Linear Discriminant Analysis/input.txt → data/balance-scale.txt } +624 -624
  9. noshot/data/ML TS XAI/ML/{4. Linear Regression → data}/machine-data.csv +210 -210
  10. noshot/data/ML TS XAI/ML/{6. Bayesian Classifier → data}/wine-dataset.csv +179 -179
  11. noshot/data/ML TS XAI/TS/{1. EDA - Handling Time Series Data/Handling TS Data.ipynb → 1. EDA - Handling Time Series Data.ipynb } +1 -1
  12. noshot/data/ML TS XAI/TS/{5. Stationarity - Trend - Seasonality/Stationarity-Trend-Seasonality.ipynb → 5. Stationarity - Trend - Seasonality.ipynb } +1 -1
  13. noshot/data/ML TS XAI/TS/{6. Autocorrelation - Partial Autocorrelation/ACF-PACF.ipynb → 6. Autocorrelation - Partial Autocorrelation.ipynb } +1 -1
  14. noshot/data/ML TS XAI/TS/AllinOne.ipynb +140 -11400
  15. noshot/data/ML TS XAI/TS/{5. Stationarity - Trend - Seasonality → data}/daily-min-temperatures.csv +3650 -3650
  16. noshot/data/ML TS XAI/TS/{5. Stationarity - Trend - Seasonality → data}/daily-total-female-births.csv +365 -365
  17. noshot/data/ML TS XAI/TS/{1. EDA - Handling Time Series Data → data}/raw_sales.csv +29580 -29580
  18. noshot/data/ML TS XAI/TS/{4. Up-Down-Sampling and Interploation → data}/shampoo_sales.csv +36 -36
  19. noshot/main.py +18 -18
  20. noshot/utils/__init__.py +2 -2
  21. noshot/utils/shell_utils.py +56 -56
  22. {noshot-0.3.2.dist-info → noshot-0.3.4.dist-info}/LICENSE.txt +20 -20
  23. {noshot-0.3.2.dist-info → noshot-0.3.4.dist-info}/METADATA +55 -55
  24. noshot-0.3.4.dist-info/RECORD +30 -0
  25. noshot/data/ML TS XAI/ML/1. PCA - EDA/input.txt +0 -625
  26. noshot/data/ML TS XAI/ML/2. KNN Classifier/balance-scale.csv +0 -626
  27. noshot/data/ML TS XAI/ML/2. KNN Classifier/input.txt +0 -625
  28. noshot/data/ML TS XAI/ML/3. Linear Discriminant Analysis/balance-scale.csv +0 -626
  29. noshot/data/ML TS XAI/ML/5. Logistic Regression/wine-dataset.csv +0 -179
  30. noshot/data/ML TS XAI/TS/6. Autocorrelation - Partial Autocorrelation/daily-min-temperatures.csv +0 -3651
  31. noshot-0.3.2.dist-info/RECORD +0 -36
  32. /noshot/data/ML TS XAI/TS/{2. Feature Engineering/Feature Engineering-.ipynb → 2. Feature Engineering.ipynb} +0 -0
  33. /noshot/data/ML TS XAI/TS/{3. Temporal Relationships/Exploring Temporal Relationships.ipynb → 3. Temporal Relationships.ipynb} +0 -0
  34. /noshot/data/ML TS XAI/TS/{4. Up-Down-Sampling and Interploation/Up-Down-Sampling.ipynb → 4. Up-Down-Sampling and Interpolation.ipynb} +0 -0
  35. {noshot-0.3.2.dist-info → noshot-0.3.4.dist-info}/WHEEL +0 -0
  36. {noshot-0.3.2.dist-info → noshot-0.3.4.dist-info}/top_level.txt +0 -0
@@ -31,8 +31,8 @@
31
31
  "metadata": {},
32
32
  "outputs": [],
33
33
  "source": [
34
- "df = pd.read_table('input.txt', delimiter = ',', names = ['class name', 'left-weight', 'left-distance', 'right-weight', 'right-distance'])\n",
35
- "#df = pd.read_csv('balance-scale.csv')\n",
34
+ "df = pd.read_table('data/balance-scale.txt', delimiter = ',', names = ['class name', 'left-weight', 'left-distance', 'right-weight', 'right-distance'])\n",
35
+ "#df = pd.read_csv('data/balance-scale.csv')\n",
36
36
  "df.head()"
37
37
  ]
38
38
  },
@@ -32,8 +32,8 @@
32
32
  "metadata": {},
33
33
  "outputs": [],
34
34
  "source": [
35
- "df = pd.read_csv('input.txt', delimiter = ',', names=['class name','left-weight','left-distance','right-weight','right-distance'])\n",
36
- "#df = pd.read_csv('balance-scale.csv')\n",
35
+ "df = pd.read_csv('data/balance-scale.txt', delimiter = ',', names=['class name','left-weight','left-distance','right-weight','right-distance'])\n",
36
+ "#df = pd.read_csv('data/balance-scale.csv')\n",
37
37
  "df.head()"
38
38
  ]
39
39
  },
@@ -22,8 +22,8 @@
22
22
  "metadata": {},
23
23
  "outputs": [],
24
24
  "source": [
25
- "df = pd.read_table('input.txt', delimiter = \",\", names=['class name','left-weight','left-distance','right-weight','right-distance'])\n",
26
- "df = pd.read_csv('balance-scale.csv')\n",
25
+ "df = pd.read_table('data/balance-scale.txt', delimiter = \",\", names=['class name','left-weight','left-distance','right-weight','right-distance'])\n",
26
+ "#df = pd.read_csv('data/balance-scale.csv')\n",
27
27
  "df.head()"
28
28
  ]
29
29
  },
@@ -21,7 +21,7 @@
21
21
  "metadata": {},
22
22
  "outputs": [],
23
23
  "source": [
24
- "df = pd.read_csv('machine-data.csv')\n",
24
+ "df = pd.read_csv('data/machine-data.csv')\n",
25
25
  "df.head()"
26
26
  ]
27
27
  },
@@ -25,8 +25,18 @@
25
25
  "metadata": {},
26
26
  "outputs": [],
27
27
  "source": [
28
- "wine = datasets.load_wine()\n",
29
- "type(wine)"
28
+ "wine = pd.read_csv('data/wine-dataset.csv')\n",
29
+ "print(wine.shape)"
30
+ ]
31
+ },
32
+ {
33
+ "cell_type": "code",
34
+ "execution_count": null,
35
+ "id": "c4e953da-6941-43f2-a9ce-aab907876d45",
36
+ "metadata": {},
37
+ "outputs": [],
38
+ "source": [
39
+ "wine.columns"
30
40
  ]
31
41
  },
32
42
  {
@@ -36,18 +46,19 @@
36
46
  "metadata": {},
37
47
  "outputs": [],
38
48
  "source": [
39
- "wine.data[:5,:]"
49
+ "X = wine.iloc[:, :13]\n",
50
+ "X.head()"
40
51
  ]
41
52
  },
42
53
  {
43
54
  "cell_type": "code",
44
55
  "execution_count": null,
45
- "id": "3eed721d-7956-40fb-9831-1a79f73cb906",
56
+ "id": "5cfd2fe6-3825-4d95-b606-3b3e2ef685b2",
46
57
  "metadata": {},
47
58
  "outputs": [],
48
59
  "source": [
49
- "print(type(wine.feature_names))\n",
50
- "wine.feature_names"
60
+ "y = wine.iloc[:, 13]\n",
61
+ "y"
51
62
  ]
52
63
  },
53
64
  {
@@ -57,7 +68,7 @@
57
68
  "metadata": {},
58
69
  "outputs": [],
59
70
  "source": [
60
- "X_train,X_test,y_train,y_test = train_test_split(wine.data, wine.target, test_size=0.30, random_state=7)\n",
71
+ "X_train,X_test,y_train,y_test = train_test_split(X, y, test_size=0.30, random_state=7)\n",
61
72
  "\n",
62
73
  "log_reg_model = linear_model.LogisticRegression()\n",
63
74
  "log_reg_model.fit(X_train,y_train)"
@@ -88,21 +99,24 @@
88
99
  {
89
100
  "cell_type": "code",
90
101
  "execution_count": null,
91
- "id": "2fcd6449-feca-4b90-828f-420ba5bb8bcf",
92
- "metadata": {},
102
+ "id": "600ec8f2-34e1-4be7-8ef5-fe53ff673f41",
103
+ "metadata": {
104
+ "scrolled": true
105
+ },
93
106
  "outputs": [],
94
107
  "source": [
95
- "X = wine.data[:,:2]\n",
96
- "Y = wine.target\n",
108
+ "X = X.iloc[:, :2]\n",
109
+ "Y = y\n",
110
+ "\n",
97
111
  "log_reg_model.fit(X,Y)\n",
98
- "x_min, x_max = X[:, 0].min() - .5, X[:, 0].max() + .5\n",
99
- "y_min, y_max = X[:, 1].min() - .5, X[:, 1].max() + .5\n",
112
+ "x_min, x_max = X.iloc[:, 0].min() - .5, X.iloc[:, 0].max() + .5\n",
113
+ "y_min, y_max = X.iloc[:, 1].min() - .5, X.iloc[:, 1].max() + .5\n",
100
114
  "xx, yy = np.meshgrid(np.arange(x_min, x_max, .01), np.arange(y_min, y_max, .01))\n",
101
115
  "Z = log_reg_model.predict(np.c_[xx.ravel(), yy.ravel()])\n",
102
116
  "Z = Z.reshape(xx.shape)\n",
103
117
  "plt.figure(1, figsize = (4, 3))\n",
104
118
  "plt.pcolormesh(xx, yy, Z, cmap = plt.cm.Paired)\n",
105
- "plt.scatter(X[:, 0], X[:, 1], c = Y, edgecolors = 'k', cmap = plt.cm.Paired)\n",
119
+ "plt.scatter(X.iloc[:, 0], X.iloc[:, 1], c = Y, edgecolors = 'k', cmap = plt.cm.Paired)\n",
106
120
  "plt.xlabel('X')\n",
107
121
  "plt.ylabel('Y')\n",
108
122
  "plt.xlim(xx.min(), xx.max())\n",
@@ -10,7 +10,8 @@
10
10
  "from sklearn import datasets\n",
11
11
  "from sklearn.metrics import confusion_matrix\n",
12
12
  "from sklearn.model_selection import train_test_split\n",
13
- "from sklearn.naive_bayes import GaussianNB"
13
+ "from sklearn.naive_bayes import GaussianNB\n",
14
+ "import pandas as pd"
14
15
  ]
15
16
  },
16
17
  {
@@ -20,7 +21,8 @@
20
21
  "metadata": {},
21
22
  "outputs": [],
22
23
  "source": [
23
- "wine = datasets.load_wine()"
24
+ "wine = pd.read_csv('data/wine-dataset.csv')\n",
25
+ "print(wine.shape)"
24
26
  ]
25
27
  },
26
28
  {
@@ -30,8 +32,8 @@
30
32
  "metadata": {},
31
33
  "outputs": [],
32
34
  "source": [
33
- "X = wine.data\n",
34
- "X"
35
+ "X = wine.iloc[:, :13]\n",
36
+ "X.head()"
35
37
  ]
36
38
  },
37
39
  {
@@ -41,7 +43,7 @@
41
43
  "metadata": {},
42
44
  "outputs": [],
43
45
  "source": [
44
- "y = wine.target\n",
46
+ "y = wine.iloc[:, 13]\n",
45
47
  "y"
46
48
  ]
47
49
  },