noshot 4.0.0__py3-none-any.whl → 5.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. noshot/data/ML TS XAI/XAI/Q1.ipynb +535 -0
  2. noshot/data/ML TS XAI/XAI/Q2.ipynb +38129 -0
  3. noshot/data/ML TS XAI/XAI/Q3.ipynb +1340 -0
  4. noshot/data/ML TS XAI/XAI/Q4.ipynb +246 -0
  5. noshot/data/ML TS XAI/XAI/Q5.ipynb +2450 -0
  6. {noshot-4.0.0.dist-info → noshot-5.0.0.dist-info}/METADATA +1 -1
  7. noshot-5.0.0.dist-info/RECORD +14 -0
  8. noshot/data/ML TS XAI/ML/ML Lab CIA 2 (I Found Only This Check)/Copy_of_Pistachio_csv.ipynb +0 -269
  9. noshot/data/ML TS XAI/ML/ML Lab CIA 2 (I Found Only This Check)/weatherAUS.ipynb +0 -155
  10. noshot/data/ML TS XAI/ML/Main/1. EDA-PCA (Balance Scale Dataset).ipynb +0 -139
  11. noshot/data/ML TS XAI/ML/Main/1. EDA-PCA (Rice Dataset).ipynb +0 -181
  12. noshot/data/ML TS XAI/ML/Main/10. HMM Veterbi.ipynb +0 -228
  13. noshot/data/ML TS XAI/ML/Main/2. KNN (Balance Scale Dataset).ipynb +0 -117
  14. noshot/data/ML TS XAI/ML/Main/2. KNN (Iris Dataset).ipynb +0 -165
  15. noshot/data/ML TS XAI/ML/Main/2. KNN (Sobar-72 Dataset).ipynb +0 -251
  16. noshot/data/ML TS XAI/ML/Main/3. LDA (Balance Scale Dataset).ipynb +0 -78
  17. noshot/data/ML TS XAI/ML/Main/3. LDA (NPHA Doctor Visits Dataset).ipynb +0 -114
  18. noshot/data/ML TS XAI/ML/Main/4. Linear Regression (Machine Dataset).ipynb +0 -115
  19. noshot/data/ML TS XAI/ML/Main/4. Linear Regression (Real Estate Dataset).ipynb +0 -159
  20. noshot/data/ML TS XAI/ML/Main/5. Logistic Regression (Magic04 Dataset).ipynb +0 -200
  21. noshot/data/ML TS XAI/ML/Main/5. Logistic Regression (Wine Dataset).ipynb +0 -112
  22. noshot/data/ML TS XAI/ML/Main/6. Naive Bayes Classifier (Agaricus Lepiota Dataset).ipynb +0 -153
  23. noshot/data/ML TS XAI/ML/Main/6. Naive Bayes Classifier (Wine Dataset).ipynb +0 -89
  24. noshot/data/ML TS XAI/ML/Main/7. SVM (Rice Dataset).ipynb +0 -208
  25. noshot/data/ML TS XAI/ML/Main/8. FeedForward NN (Sobar72 Dataset).ipynb +0 -260
  26. noshot/data/ML TS XAI/ML/Main/9. CNN (Cifar10 Dataset).ipynb +0 -238
  27. noshot/data/ML TS XAI/ML/Main/data/agaricus-lepiota.data +0 -8124
  28. noshot/data/ML TS XAI/ML/Main/data/balance-scale.txt +0 -625
  29. noshot/data/ML TS XAI/ML/Main/data/doctor-visits.csv +0 -715
  30. noshot/data/ML TS XAI/ML/Main/data/iris.csv +0 -151
  31. noshot/data/ML TS XAI/ML/Main/data/machine-data.csv +0 -210
  32. noshot/data/ML TS XAI/ML/Main/data/magic04.data +0 -19020
  33. noshot/data/ML TS XAI/ML/Main/data/real-estate.xlsx +0 -0
  34. noshot/data/ML TS XAI/ML/Main/data/rice.arff +0 -3826
  35. noshot/data/ML TS XAI/ML/Main/data/sobar-72.csv +0 -73
  36. noshot/data/ML TS XAI/ML/Main/data/wine-dataset.csv +0 -179
  37. noshot/data/ML TS XAI/ML/Other Codes.ipynb +0 -158
  38. noshot/data/ML TS XAI/ML/Rolls Royce AllinOne.ipynb +0 -691
  39. noshot-4.0.0.dist-info/RECORD +0 -40
  40. {noshot-4.0.0.dist-info → noshot-5.0.0.dist-info}/WHEEL +0 -0
  41. {noshot-4.0.0.dist-info → noshot-5.0.0.dist-info}/licenses/LICENSE.txt +0 -0
  42. {noshot-4.0.0.dist-info → noshot-5.0.0.dist-info}/top_level.txt +0 -0
@@ -1,73 +0,0 @@
1
- behavior_sexualRisk,behavior_eating,behavior_personalHygine,intention_aggregation,intention_commitment,attitude_consistency,attitude_spontaneity,norm_significantPerson,norm_fulfillment,perception_vulnerability,perception_severity,motivation_strength,motivation_willingness,socialSupport_emotionality,socialSupport_appreciation,socialSupport_instrumental,empowerment_knowledge,empowerment_abilities,empowerment_desires,ca_cervix
2
- 10,13,12,4,7,9,10,1,8,7,3,14,8,5,7,12,12,11,8,1
3
- 10,11,11,10,14,7,7,5,5,4,2,15,13,7,6,5,5,4,4,1
4
- 10,15,3,2,14,8,10,1,4,7,2,7,3,3,6,11,3,3,15,1
5
- 10,11,10,10,15,7,7,1,5,4,2,15,13,7,4,4,4,4,4,1
6
- 8,11,7,8,10,7,8,1,5,3,2,15,5,3,6,12,5,4,7,1
7
- 10,14,8,6,15,8,10,1,3,4,2,14,8,7,2,7,13,9,6,1
8
- 10,15,4,6,14,6,10,5,3,7,2,7,13,3,3,15,3,3,5,1
9
- 8,12,9,10,10,5,10,5,5,5,2,10,9,13,2,9,8,7,12,1
10
- 10,15,7,2,15,6,10,1,3,5,2,9,15,13,10,15,13,15,15,1
11
- 7,15,7,6,11,8,8,5,3,3,4,15,3,8,2,9,3,4,4,1
12
- 7,15,7,10,14,7,9,1,3,8,2,4,3,7,9,13,8,3,9,1
13
- 10,15,8,9,15,7,10,1,3,7,2,15,3,3,6,13,7,5,9,1
14
- 10,15,12,10,15,6,10,1,3,3,2,4,3,3,2,15,13,6,11,1
15
- 9,12,14,9,15,10,9,3,6,3,2,15,15,3,10,15,11,3,11,1
16
- 2,15,15,6,13,8,9,1,3,3,4,15,3,7,6,7,7,7,3,1
17
- 10,15,7,6,14,8,8,4,8,10,2,3,3,3,2,5,5,5,3,1
18
- 10,15,9,7,6,8,8,1,12,5,4,5,4,3,3,5,7,7,3,1
19
- 10,12,7,5,10,8,8,1,8,10,4,6,3,3,2,4,4,3,5,1
20
- 10,11,12,2,10,8,8,2,10,8,7,6,5,3,2,4,4,4,3,1
21
- 10,12,12,8,10,8,6,2,7,6,2,12,11,9,8,12,10,10,9,1
22
- 10,15,15,4,15,8,10,5,3,8,3,11,3,3,2,7,8,5,3,1
23
- 10,12,11,10,15,7,8,3,3,3,2,13,11,10,7,12,12,12,12,0
24
- 10,13,14,10,15,6,8,1,5,5,2,15,10,12,8,15,15,15,15,0
25
- 10,15,13,10,15,2,10,1,5,6,2,14,14,14,8,14,15,14,15,0
26
- 10,12,10,7,15,6,8,2,4,9,2,15,12,10,7,12,14,10,14,0
27
- 10,15,13,10,15,6,10,1,3,5,2,15,13,9,7,12,15,11,15,0
28
- 10,13,15,8,13,7,8,3,5,9,2,13,11,12,9,10,12,13,12,0
29
- 10,15,11,10,15,8,10,1,3,3,2,15,13,13,10,15,15,13,15,0
30
- 10,11,11,10,14,5,8,1,4,3,4,15,11,13,9,13,13,12,13,0
31
- 10,14,10,9,15,4,5,2,5,7,3,10,7,4,6,7,5,9,12,0
32
- 10,8,9,10,15,10,10,1,3,3,2,15,13,11,6,15,15,10,15,0
33
- 10,15,15,8,9,8,9,4,7,6,4,12,12,14,9,14,13,9,12,0
34
- 10,10,11,10,15,5,8,1,5,3,6,15,13,13,10,15,13,13,13,0
35
- 10,11,10,9,15,5,10,3,3,3,2,11,11,9,4,9,15,15,15,0
36
- 10,15,15,10,15,10,10,1,3,3,2,15,10,10,10,15,15,15,15,0
37
- 10,3,5,2,9,6,10,1,3,9,6,11,10,9,9,14,6,10,10,0
38
- 10,15,9,3,15,8,10,1,3,5,6,10,15,13,10,15,15,15,15,0
39
- 10,10,12,5,7,6,6,4,5,10,4,11,9,11,8,11,11,10,11,0
40
- 10,9,11,10,15,7,6,1,3,6,2,15,15,15,10,15,15,15,14,0
41
- 10,14,14,10,11,5,9,1,5,4,2,14,15,11,8,14,13,13,13,0
42
- 10,12,11,10,15,7,8,3,3,4,2,14,7,9,8,12,15,10,14,0
43
- 10,15,13,10,15,6,10,1,7,7,2,15,7,3,4,3,11,5,9,0
44
- 10,15,15,10,15,8,8,5,11,15,10,15,15,15,10,15,15,15,15,0
45
- 10,15,15,10,15,9,10,5,11,15,10,15,15,15,10,15,15,15,15,0
46
- 10,11,14,10,15,10,10,5,15,14,10,15,9,9,4,3,14,11,15,0
47
- 10,15,14,10,11,10,8,5,11,15,10,15,15,15,10,15,15,15,15,0
48
- 10,14,11,10,15,9,10,5,15,15,10,15,13,6,6,12,15,11,14,0
49
- 10,15,15,6,11,7,6,5,11,13,10,15,15,11,10,15,11,11,15,0
50
- 10,15,11,10,15,8,10,1,15,15,10,15,13,3,2,9,15,8,11,0
51
- 6,15,11,10,12,8,10,5,14,13,10,15,7,5,2,5,13,9,3,0
52
- 10,11,15,10,11,6,10,5,15,11,10,15,15,15,6,9,15,15,9,0
53
- 10,15,15,10,15,10,10,5,15,14,9,9,13,12,9,15,15,15,15,0
54
- 10,9,12,10,14,9,6,5,11,11,9,15,11,3,2,6,13,7,3,0
55
- 10,13,12,2,15,7,10,5,15,10,2,15,12,11,7,6,10,9,12,0
56
- 10,15,15,10,11,7,8,5,15,13,10,15,15,11,8,15,15,13,11,0
57
- 10,9,8,2,15,6,10,1,15,15,8,11,11,13,10,15,13,13,10,0
58
- 10,10,5,2,15,8,10,5,13,15,10,15,3,3,2,13,15,15,15,0
59
- 10,11,8,10,15,7,8,5,14,13,8,12,7,4,3,3,4,4,7,0
60
- 10,11,9,6,15,6,8,5,14,11,8,11,7,3,2,3,3,3,3,0
61
- 10,13,9,10,15,8,8,5,14,8,8,11,3,3,2,3,3,3,3,0
62
- 10,12,10,10,15,6,8,5,15,11,8,13,7,3,2,3,3,3,3,0
63
- 10,10,10,10,15,6,6,5,14,13,9,15,9,13,8,14,13,12,12,0
64
- 10,13,11,6,15,8,10,5,15,7,10,13,7,3,5,3,3,3,3,0
65
- 10,13,15,10,15,8,10,5,14,6,8,13,7,3,4,3,3,6,3,0
66
- 10,15,8,6,11,6,10,5,11,15,8,15,7,3,4,11,13,10,15,0
67
- 10,13,11,6,14,9,10,5,15,15,10,15,3,3,4,7,7,7,11,0
68
- 10,12,13,10,11,7,7,5,14,15,9,14,10,6,6,6,9,7,11,0
69
- 10,14,14,10,15,6,7,5,15,14,10,15,13,9,8,12,12,11,9,0
70
- 10,12,15,10,15,8,8,5,15,14,8,12,14,11,7,13,15,11,14,0
71
- 10,8,11,6,10,6,4,3,13,9,8,14,12,9,7,11,12,10,10,0
72
- 9,12,13,10,13,6,6,5,14,13,10,13,12,11,8,12,11,13,15,0
73
- 10,14,14,6,12,7,8,5,15,12,10,10,13,11,9,14,13,15,15,0
@@ -1,179 +0,0 @@
1
- alcohol,malic_acid,ash,alcalinity_of_ash,magnesium,total_phenols,flavanoids,nonflavanoid_phenols,proanthocyanins,color_intensity,hue,od280/od315_of_diluted_wines,proline,target
2
- 14.23,1.71,2.43,15.6,127.0,2.8,3.06,0.28,2.29,5.64,1.04,3.92,1065.0,0
3
- 13.2,1.78,2.14,11.2,100.0,2.65,2.76,0.26,1.28,4.38,1.05,3.4,1050.0,0
4
- 13.16,2.36,2.67,18.6,101.0,2.8,3.24,0.3,2.81,5.68,1.03,3.17,1185.0,0
5
- 14.37,1.95,2.5,16.8,113.0,3.85,3.49,0.24,2.18,7.8,0.86,3.45,1480.0,0
6
- 13.24,2.59,2.87,21.0,118.0,2.8,2.69,0.39,1.82,4.32,1.04,2.93,735.0,0
7
- 14.2,1.76,2.45,15.2,112.0,3.27,3.39,0.34,1.97,6.75,1.05,2.85,1450.0,0
8
- 14.39,1.87,2.45,14.6,96.0,2.5,2.52,0.3,1.98,5.25,1.02,3.58,1290.0,0
9
- 14.06,2.15,2.61,17.6,121.0,2.6,2.51,0.31,1.25,5.05,1.06,3.58,1295.0,0
10
- 14.83,1.64,2.17,14.0,97.0,2.8,2.98,0.29,1.98,5.2,1.08,2.85,1045.0,0
11
- 13.86,1.35,2.27,16.0,98.0,2.98,3.15,0.22,1.85,7.22,1.01,3.55,1045.0,0
12
- 14.1,2.16,2.3,18.0,105.0,2.95,3.32,0.22,2.38,5.75,1.25,3.17,1510.0,0
13
- 14.12,1.48,2.32,16.8,95.0,2.2,2.43,0.26,1.57,5.0,1.17,2.82,1280.0,0
14
- 13.75,1.73,2.41,16.0,89.0,2.6,2.76,0.29,1.81,5.6,1.15,2.9,1320.0,0
15
- 14.75,1.73,2.39,11.4,91.0,3.1,3.69,0.43,2.81,5.4,1.25,2.73,1150.0,0
16
- 14.38,1.87,2.38,12.0,102.0,3.3,3.64,0.29,2.96,7.5,1.2,3.0,1547.0,0
17
- 13.63,1.81,2.7,17.2,112.0,2.85,2.91,0.3,1.46,7.3,1.28,2.88,1310.0,0
18
- 14.3,1.92,2.72,20.0,120.0,2.8,3.14,0.33,1.97,6.2,1.07,2.65,1280.0,0
19
- 13.83,1.57,2.62,20.0,115.0,2.95,3.4,0.4,1.72,6.6,1.13,2.57,1130.0,0
20
- 14.19,1.59,2.48,16.5,108.0,3.3,3.93,0.32,1.86,8.7,1.23,2.82,1680.0,0
21
- 13.64,3.1,2.56,15.2,116.0,2.7,3.03,0.17,1.66,5.1,0.96,3.36,845.0,0
22
- 14.06,1.63,2.28,16.0,126.0,3.0,3.17,0.24,2.1,5.65,1.09,3.71,780.0,0
23
- 12.93,3.8,2.65,18.6,102.0,2.41,2.41,0.25,1.98,4.5,1.03,3.52,770.0,0
24
- 13.71,1.86,2.36,16.6,101.0,2.61,2.88,0.27,1.69,3.8,1.11,4.0,1035.0,0
25
- 12.85,1.6,2.52,17.8,95.0,2.48,2.37,0.26,1.46,3.93,1.09,3.63,1015.0,0
26
- 13.5,1.81,2.61,20.0,96.0,2.53,2.61,0.28,1.66,3.52,1.12,3.82,845.0,0
27
- 13.05,2.05,3.22,25.0,124.0,2.63,2.68,0.47,1.92,3.58,1.13,3.2,830.0,0
28
- 13.39,1.77,2.62,16.1,93.0,2.85,2.94,0.34,1.45,4.8,0.92,3.22,1195.0,0
29
- 13.3,1.72,2.14,17.0,94.0,2.4,2.19,0.27,1.35,3.95,1.02,2.77,1285.0,0
30
- 13.87,1.9,2.8,19.4,107.0,2.95,2.97,0.37,1.76,4.5,1.25,3.4,915.0,0
31
- 14.02,1.68,2.21,16.0,96.0,2.65,2.33,0.26,1.98,4.7,1.04,3.59,1035.0,0
32
- 13.73,1.5,2.7,22.5,101.0,3.0,3.25,0.29,2.38,5.7,1.19,2.71,1285.0,0
33
- 13.58,1.66,2.36,19.1,106.0,2.86,3.19,0.22,1.95,6.9,1.09,2.88,1515.0,0
34
- 13.68,1.83,2.36,17.2,104.0,2.42,2.69,0.42,1.97,3.84,1.23,2.87,990.0,0
35
- 13.76,1.53,2.7,19.5,132.0,2.95,2.74,0.5,1.35,5.4,1.25,3.0,1235.0,0
36
- 13.51,1.8,2.65,19.0,110.0,2.35,2.53,0.29,1.54,4.2,1.1,2.87,1095.0,0
37
- 13.48,1.81,2.41,20.5,100.0,2.7,2.98,0.26,1.86,5.1,1.04,3.47,920.0,0
38
- 13.28,1.64,2.84,15.5,110.0,2.6,2.68,0.34,1.36,4.6,1.09,2.78,880.0,0
39
- 13.05,1.65,2.55,18.0,98.0,2.45,2.43,0.29,1.44,4.25,1.12,2.51,1105.0,0
40
- 13.07,1.5,2.1,15.5,98.0,2.4,2.64,0.28,1.37,3.7,1.18,2.69,1020.0,0
41
- 14.22,3.99,2.51,13.2,128.0,3.0,3.04,0.2,2.08,5.1,0.89,3.53,760.0,0
42
- 13.56,1.71,2.31,16.2,117.0,3.15,3.29,0.34,2.34,6.13,0.95,3.38,795.0,0
43
- 13.41,3.84,2.12,18.8,90.0,2.45,2.68,0.27,1.48,4.28,0.91,3.0,1035.0,0
44
- 13.88,1.89,2.59,15.0,101.0,3.25,3.56,0.17,1.7,5.43,0.88,3.56,1095.0,0
45
- 13.24,3.98,2.29,17.5,103.0,2.64,2.63,0.32,1.66,4.36,0.82,3.0,680.0,0
46
- 13.05,1.77,2.1,17.0,107.0,3.0,3.0,0.28,2.03,5.04,0.88,3.35,885.0,0
47
- 14.21,4.04,2.44,18.9,111.0,2.85,2.65,0.3,1.25,5.24,0.87,3.33,1080.0,0
48
- 14.38,3.59,2.28,16.0,102.0,3.25,3.17,0.27,2.19,4.9,1.04,3.44,1065.0,0
49
- 13.9,1.68,2.12,16.0,101.0,3.1,3.39,0.21,2.14,6.1,0.91,3.33,985.0,0
50
- 14.1,2.02,2.4,18.8,103.0,2.75,2.92,0.32,2.38,6.2,1.07,2.75,1060.0,0
51
- 13.94,1.73,2.27,17.4,108.0,2.88,3.54,0.32,2.08,8.9,1.12,3.1,1260.0,0
52
- 13.05,1.73,2.04,12.4,92.0,2.72,3.27,0.17,2.91,7.2,1.12,2.91,1150.0,0
53
- 13.83,1.65,2.6,17.2,94.0,2.45,2.99,0.22,2.29,5.6,1.24,3.37,1265.0,0
54
- 13.82,1.75,2.42,14.0,111.0,3.88,3.74,0.32,1.87,7.05,1.01,3.26,1190.0,0
55
- 13.77,1.9,2.68,17.1,115.0,3.0,2.79,0.39,1.68,6.3,1.13,2.93,1375.0,0
56
- 13.74,1.67,2.25,16.4,118.0,2.6,2.9,0.21,1.62,5.85,0.92,3.2,1060.0,0
57
- 13.56,1.73,2.46,20.5,116.0,2.96,2.78,0.2,2.45,6.25,0.98,3.03,1120.0,0
58
- 14.22,1.7,2.3,16.3,118.0,3.2,3.0,0.26,2.03,6.38,0.94,3.31,970.0,0
59
- 13.29,1.97,2.68,16.8,102.0,3.0,3.23,0.31,1.66,6.0,1.07,2.84,1270.0,0
60
- 13.72,1.43,2.5,16.7,108.0,3.4,3.67,0.19,2.04,6.8,0.89,2.87,1285.0,0
61
- 12.37,0.94,1.36,10.6,88.0,1.98,0.57,0.28,0.42,1.95,1.05,1.82,520.0,1
62
- 12.33,1.1,2.28,16.0,101.0,2.05,1.09,0.63,0.41,3.27,1.25,1.67,680.0,1
63
- 12.64,1.36,2.02,16.8,100.0,2.02,1.41,0.53,0.62,5.75,0.98,1.59,450.0,1
64
- 13.67,1.25,1.92,18.0,94.0,2.1,1.79,0.32,0.73,3.8,1.23,2.46,630.0,1
65
- 12.37,1.13,2.16,19.0,87.0,3.5,3.1,0.19,1.87,4.45,1.22,2.87,420.0,1
66
- 12.17,1.45,2.53,19.0,104.0,1.89,1.75,0.45,1.03,2.95,1.45,2.23,355.0,1
67
- 12.37,1.21,2.56,18.1,98.0,2.42,2.65,0.37,2.08,4.6,1.19,2.3,678.0,1
68
- 13.11,1.01,1.7,15.0,78.0,2.98,3.18,0.26,2.28,5.3,1.12,3.18,502.0,1
69
- 12.37,1.17,1.92,19.6,78.0,2.11,2.0,0.27,1.04,4.68,1.12,3.48,510.0,1
70
- 13.34,0.94,2.36,17.0,110.0,2.53,1.3,0.55,0.42,3.17,1.02,1.93,750.0,1
71
- 12.21,1.19,1.75,16.8,151.0,1.85,1.28,0.14,2.5,2.85,1.28,3.07,718.0,1
72
- 12.29,1.61,2.21,20.4,103.0,1.1,1.02,0.37,1.46,3.05,0.906,1.82,870.0,1
73
- 13.86,1.51,2.67,25.0,86.0,2.95,2.86,0.21,1.87,3.38,1.36,3.16,410.0,1
74
- 13.49,1.66,2.24,24.0,87.0,1.88,1.84,0.27,1.03,3.74,0.98,2.78,472.0,1
75
- 12.99,1.67,2.6,30.0,139.0,3.3,2.89,0.21,1.96,3.35,1.31,3.5,985.0,1
76
- 11.96,1.09,2.3,21.0,101.0,3.38,2.14,0.13,1.65,3.21,0.99,3.13,886.0,1
77
- 11.66,1.88,1.92,16.0,97.0,1.61,1.57,0.34,1.15,3.8,1.23,2.14,428.0,1
78
- 13.03,0.9,1.71,16.0,86.0,1.95,2.03,0.24,1.46,4.6,1.19,2.48,392.0,1
79
- 11.84,2.89,2.23,18.0,112.0,1.72,1.32,0.43,0.95,2.65,0.96,2.52,500.0,1
80
- 12.33,0.99,1.95,14.8,136.0,1.9,1.85,0.35,2.76,3.4,1.06,2.31,750.0,1
81
- 12.7,3.87,2.4,23.0,101.0,2.83,2.55,0.43,1.95,2.57,1.19,3.13,463.0,1
82
- 12.0,0.92,2.0,19.0,86.0,2.42,2.26,0.3,1.43,2.5,1.38,3.12,278.0,1
83
- 12.72,1.81,2.2,18.8,86.0,2.2,2.53,0.26,1.77,3.9,1.16,3.14,714.0,1
84
- 12.08,1.13,2.51,24.0,78.0,2.0,1.58,0.4,1.4,2.2,1.31,2.72,630.0,1
85
- 13.05,3.86,2.32,22.5,85.0,1.65,1.59,0.61,1.62,4.8,0.84,2.01,515.0,1
86
- 11.84,0.89,2.58,18.0,94.0,2.2,2.21,0.22,2.35,3.05,0.79,3.08,520.0,1
87
- 12.67,0.98,2.24,18.0,99.0,2.2,1.94,0.3,1.46,2.62,1.23,3.16,450.0,1
88
- 12.16,1.61,2.31,22.8,90.0,1.78,1.69,0.43,1.56,2.45,1.33,2.26,495.0,1
89
- 11.65,1.67,2.62,26.0,88.0,1.92,1.61,0.4,1.34,2.6,1.36,3.21,562.0,1
90
- 11.64,2.06,2.46,21.6,84.0,1.95,1.69,0.48,1.35,2.8,1.0,2.75,680.0,1
91
- 12.08,1.33,2.3,23.6,70.0,2.2,1.59,0.42,1.38,1.74,1.07,3.21,625.0,1
92
- 12.08,1.83,2.32,18.5,81.0,1.6,1.5,0.52,1.64,2.4,1.08,2.27,480.0,1
93
- 12.0,1.51,2.42,22.0,86.0,1.45,1.25,0.5,1.63,3.6,1.05,2.65,450.0,1
94
- 12.69,1.53,2.26,20.7,80.0,1.38,1.46,0.58,1.62,3.05,0.96,2.06,495.0,1
95
- 12.29,2.83,2.22,18.0,88.0,2.45,2.25,0.25,1.99,2.15,1.15,3.3,290.0,1
96
- 11.62,1.99,2.28,18.0,98.0,3.02,2.26,0.17,1.35,3.25,1.16,2.96,345.0,1
97
- 12.47,1.52,2.2,19.0,162.0,2.5,2.27,0.32,3.28,2.6,1.16,2.63,937.0,1
98
- 11.81,2.12,2.74,21.5,134.0,1.6,0.99,0.14,1.56,2.5,0.95,2.26,625.0,1
99
- 12.29,1.41,1.98,16.0,85.0,2.55,2.5,0.29,1.77,2.9,1.23,2.74,428.0,1
100
- 12.37,1.07,2.1,18.5,88.0,3.52,3.75,0.24,1.95,4.5,1.04,2.77,660.0,1
101
- 12.29,3.17,2.21,18.0,88.0,2.85,2.99,0.45,2.81,2.3,1.42,2.83,406.0,1
102
- 12.08,2.08,1.7,17.5,97.0,2.23,2.17,0.26,1.4,3.3,1.27,2.96,710.0,1
103
- 12.6,1.34,1.9,18.5,88.0,1.45,1.36,0.29,1.35,2.45,1.04,2.77,562.0,1
104
- 12.34,2.45,2.46,21.0,98.0,2.56,2.11,0.34,1.31,2.8,0.8,3.38,438.0,1
105
- 11.82,1.72,1.88,19.5,86.0,2.5,1.64,0.37,1.42,2.06,0.94,2.44,415.0,1
106
- 12.51,1.73,1.98,20.5,85.0,2.2,1.92,0.32,1.48,2.94,1.04,3.57,672.0,1
107
- 12.42,2.55,2.27,22.0,90.0,1.68,1.84,0.66,1.42,2.7,0.86,3.3,315.0,1
108
- 12.25,1.73,2.12,19.0,80.0,1.65,2.03,0.37,1.63,3.4,1.0,3.17,510.0,1
109
- 12.72,1.75,2.28,22.5,84.0,1.38,1.76,0.48,1.63,3.3,0.88,2.42,488.0,1
110
- 12.22,1.29,1.94,19.0,92.0,2.36,2.04,0.39,2.08,2.7,0.86,3.02,312.0,1
111
- 11.61,1.35,2.7,20.0,94.0,2.74,2.92,0.29,2.49,2.65,0.96,3.26,680.0,1
112
- 11.46,3.74,1.82,19.5,107.0,3.18,2.58,0.24,3.58,2.9,0.75,2.81,562.0,1
113
- 12.52,2.43,2.17,21.0,88.0,2.55,2.27,0.26,1.22,2.0,0.9,2.78,325.0,1
114
- 11.76,2.68,2.92,20.0,103.0,1.75,2.03,0.6,1.05,3.8,1.23,2.5,607.0,1
115
- 11.41,0.74,2.5,21.0,88.0,2.48,2.01,0.42,1.44,3.08,1.1,2.31,434.0,1
116
- 12.08,1.39,2.5,22.5,84.0,2.56,2.29,0.43,1.04,2.9,0.93,3.19,385.0,1
117
- 11.03,1.51,2.2,21.5,85.0,2.46,2.17,0.52,2.01,1.9,1.71,2.87,407.0,1
118
- 11.82,1.47,1.99,20.8,86.0,1.98,1.6,0.3,1.53,1.95,0.95,3.33,495.0,1
119
- 12.42,1.61,2.19,22.5,108.0,2.0,2.09,0.34,1.61,2.06,1.06,2.96,345.0,1
120
- 12.77,3.43,1.98,16.0,80.0,1.63,1.25,0.43,0.83,3.4,0.7,2.12,372.0,1
121
- 12.0,3.43,2.0,19.0,87.0,2.0,1.64,0.37,1.87,1.28,0.93,3.05,564.0,1
122
- 11.45,2.4,2.42,20.0,96.0,2.9,2.79,0.32,1.83,3.25,0.8,3.39,625.0,1
123
- 11.56,2.05,3.23,28.5,119.0,3.18,5.08,0.47,1.87,6.0,0.93,3.69,465.0,1
124
- 12.42,4.43,2.73,26.5,102.0,2.2,2.13,0.43,1.71,2.08,0.92,3.12,365.0,1
125
- 13.05,5.8,2.13,21.5,86.0,2.62,2.65,0.3,2.01,2.6,0.73,3.1,380.0,1
126
- 11.87,4.31,2.39,21.0,82.0,2.86,3.03,0.21,2.91,2.8,0.75,3.64,380.0,1
127
- 12.07,2.16,2.17,21.0,85.0,2.6,2.65,0.37,1.35,2.76,0.86,3.28,378.0,1
128
- 12.43,1.53,2.29,21.5,86.0,2.74,3.15,0.39,1.77,3.94,0.69,2.84,352.0,1
129
- 11.79,2.13,2.78,28.5,92.0,2.13,2.24,0.58,1.76,3.0,0.97,2.44,466.0,1
130
- 12.37,1.63,2.3,24.5,88.0,2.22,2.45,0.4,1.9,2.12,0.89,2.78,342.0,1
131
- 12.04,4.3,2.38,22.0,80.0,2.1,1.75,0.42,1.35,2.6,0.79,2.57,580.0,1
132
- 12.86,1.35,2.32,18.0,122.0,1.51,1.25,0.21,0.94,4.1,0.76,1.29,630.0,2
133
- 12.88,2.99,2.4,20.0,104.0,1.3,1.22,0.24,0.83,5.4,0.74,1.42,530.0,2
134
- 12.81,2.31,2.4,24.0,98.0,1.15,1.09,0.27,0.83,5.7,0.66,1.36,560.0,2
135
- 12.7,3.55,2.36,21.5,106.0,1.7,1.2,0.17,0.84,5.0,0.78,1.29,600.0,2
136
- 12.51,1.24,2.25,17.5,85.0,2.0,0.58,0.6,1.25,5.45,0.75,1.51,650.0,2
137
- 12.6,2.46,2.2,18.5,94.0,1.62,0.66,0.63,0.94,7.1,0.73,1.58,695.0,2
138
- 12.25,4.72,2.54,21.0,89.0,1.38,0.47,0.53,0.8,3.85,0.75,1.27,720.0,2
139
- 12.53,5.51,2.64,25.0,96.0,1.79,0.6,0.63,1.1,5.0,0.82,1.69,515.0,2
140
- 13.49,3.59,2.19,19.5,88.0,1.62,0.48,0.58,0.88,5.7,0.81,1.82,580.0,2
141
- 12.84,2.96,2.61,24.0,101.0,2.32,0.6,0.53,0.81,4.92,0.89,2.15,590.0,2
142
- 12.93,2.81,2.7,21.0,96.0,1.54,0.5,0.53,0.75,4.6,0.77,2.31,600.0,2
143
- 13.36,2.56,2.35,20.0,89.0,1.4,0.5,0.37,0.64,5.6,0.7,2.47,780.0,2
144
- 13.52,3.17,2.72,23.5,97.0,1.55,0.52,0.5,0.55,4.35,0.89,2.06,520.0,2
145
- 13.62,4.95,2.35,20.0,92.0,2.0,0.8,0.47,1.02,4.4,0.91,2.05,550.0,2
146
- 12.25,3.88,2.2,18.5,112.0,1.38,0.78,0.29,1.14,8.21,0.65,2.0,855.0,2
147
- 13.16,3.57,2.15,21.0,102.0,1.5,0.55,0.43,1.3,4.0,0.6,1.68,830.0,2
148
- 13.88,5.04,2.23,20.0,80.0,0.98,0.34,0.4,0.68,4.9,0.58,1.33,415.0,2
149
- 12.87,4.61,2.48,21.5,86.0,1.7,0.65,0.47,0.86,7.65,0.54,1.86,625.0,2
150
- 13.32,3.24,2.38,21.5,92.0,1.93,0.76,0.45,1.25,8.42,0.55,1.62,650.0,2
151
- 13.08,3.9,2.36,21.5,113.0,1.41,1.39,0.34,1.14,9.4,0.57,1.33,550.0,2
152
- 13.5,3.12,2.62,24.0,123.0,1.4,1.57,0.22,1.25,8.6,0.59,1.3,500.0,2
153
- 12.79,2.67,2.48,22.0,112.0,1.48,1.36,0.24,1.26,10.8,0.48,1.47,480.0,2
154
- 13.11,1.9,2.75,25.5,116.0,2.2,1.28,0.26,1.56,7.1,0.61,1.33,425.0,2
155
- 13.23,3.3,2.28,18.5,98.0,1.8,0.83,0.61,1.87,10.52,0.56,1.51,675.0,2
156
- 12.58,1.29,2.1,20.0,103.0,1.48,0.58,0.53,1.4,7.6,0.58,1.55,640.0,2
157
- 13.17,5.19,2.32,22.0,93.0,1.74,0.63,0.61,1.55,7.9,0.6,1.48,725.0,2
158
- 13.84,4.12,2.38,19.5,89.0,1.8,0.83,0.48,1.56,9.01,0.57,1.64,480.0,2
159
- 12.45,3.03,2.64,27.0,97.0,1.9,0.58,0.63,1.14,7.5,0.67,1.73,880.0,2
160
- 14.34,1.68,2.7,25.0,98.0,2.8,1.31,0.53,2.7,13.0,0.57,1.96,660.0,2
161
- 13.48,1.67,2.64,22.5,89.0,2.6,1.1,0.52,2.29,11.75,0.57,1.78,620.0,2
162
- 12.36,3.83,2.38,21.0,88.0,2.3,0.92,0.5,1.04,7.65,0.56,1.58,520.0,2
163
- 13.69,3.26,2.54,20.0,107.0,1.83,0.56,0.5,0.8,5.88,0.96,1.82,680.0,2
164
- 12.85,3.27,2.58,22.0,106.0,1.65,0.6,0.6,0.96,5.58,0.87,2.11,570.0,2
165
- 12.96,3.45,2.35,18.5,106.0,1.39,0.7,0.4,0.94,5.28,0.68,1.75,675.0,2
166
- 13.78,2.76,2.3,22.0,90.0,1.35,0.68,0.41,1.03,9.58,0.7,1.68,615.0,2
167
- 13.73,4.36,2.26,22.5,88.0,1.28,0.47,0.52,1.15,6.62,0.78,1.75,520.0,2
168
- 13.45,3.7,2.6,23.0,111.0,1.7,0.92,0.43,1.46,10.68,0.85,1.56,695.0,2
169
- 12.82,3.37,2.3,19.5,88.0,1.48,0.66,0.4,0.97,10.26,0.72,1.75,685.0,2
170
- 13.58,2.58,2.69,24.5,105.0,1.55,0.84,0.39,1.54,8.66,0.74,1.8,750.0,2
171
- 13.4,4.6,2.86,25.0,112.0,1.98,0.96,0.27,1.11,8.5,0.67,1.92,630.0,2
172
- 12.2,3.03,2.32,19.0,96.0,1.25,0.49,0.4,0.73,5.5,0.66,1.83,510.0,2
173
- 12.77,2.39,2.28,19.5,86.0,1.39,0.51,0.48,0.64,9.899999,0.57,1.63,470.0,2
174
- 14.16,2.51,2.48,20.0,91.0,1.68,0.7,0.44,1.24,9.7,0.62,1.71,660.0,2
175
- 13.71,5.65,2.45,20.5,95.0,1.68,0.61,0.52,1.06,7.7,0.64,1.74,740.0,2
176
- 13.4,3.91,2.48,23.0,102.0,1.8,0.75,0.43,1.41,7.3,0.7,1.56,750.0,2
177
- 13.27,4.28,2.26,20.0,120.0,1.59,0.69,0.43,1.35,10.2,0.59,1.56,835.0,2
178
- 13.17,2.59,2.37,20.0,120.0,1.65,0.68,0.53,1.46,9.3,0.6,1.62,840.0,2
179
- 14.13,4.1,2.74,24.5,96.0,2.05,0.76,0.56,1.35,9.2,0.61,1.6,560.0,2
@@ -1,158 +0,0 @@
1
- {
2
- "cells": [
3
- {
4
- "cell_type": "raw",
5
- "id": "bd639ea6-be49-4b3a-bebd-632493381a46",
6
- "metadata": {},
7
- "source": [
8
- "1. Design a custom 4-layer feed forward neural network for the weather prediction dataset. Modify your model to include L2 regularization on all hidden layers."
9
- ]
10
- },
11
- {
12
- "cell_type": "code",
13
- "execution_count": null,
14
- "id": "f4c79e75-428d-47dc-91a3-2edadee82436",
15
- "metadata": {},
16
- "outputs": [],
17
- "source": [
18
- "from tensorflow.keras.models import Sequential\n",
19
- "from tensorflow.keras.layers import Dense\n",
20
- "from tensorflow.keras.regularizers import l2\n",
21
- "\n",
22
- "model = Sequential([\n",
23
- " Dense(128, input_shape=(num_features,), activation='relu', kernel_regularizer=l2(0.01)),\n",
24
- " Dense(64, activation='relu', kernel_regularizer=l2(0.01)),\n",
25
- " Dense(32, activation='relu', kernel_regularizer=l2(0.01)),\n",
26
- " Dense(1, activation='linear') # Assuming a regression task\n",
27
- "])\n",
28
- "\n",
29
- "model.compile(optimizer='adam', loss='mse', metrics=['mae'])\n",
30
- "model.summary()\n"
31
- ]
32
- },
33
- {
34
- "cell_type": "raw",
35
- "id": "6afa7495-6f81-42cb-97d1-33bb475a3e58",
36
- "metadata": {},
37
- "source": [
38
- "2. Design customized 7-layer CNN architecture to classify the sign images."
39
- ]
40
- },
41
- {
42
- "cell_type": "code",
43
- "execution_count": null,
44
- "id": "84acb2a8-4a5a-4ce4-ac57-3890940b43be",
45
- "metadata": {},
46
- "outputs": [],
47
- "source": [
48
- "from tensorflow.keras.models import Sequential\n",
49
- "from tensorflow.keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout\n",
50
- "\n",
51
- "model = Sequential([\n",
52
- " Conv2D(32, (3,3), activation='relu', input_shape=(64, 64, 3)), # adjust input shape as needed\n",
53
- " Conv2D(32, (3,3), activation='relu'),\n",
54
- " MaxPooling2D(pool_size=(2,2)),\n",
55
- "\n",
56
- " Conv2D(64, (3,3), activation='relu'),\n",
57
- " Conv2D(64, (3,3), activation='relu'),\n",
58
- " MaxPooling2D(pool_size=(2,2)),\n",
59
- "\n",
60
- " Flatten(),\n",
61
- " Dense(128, activation='relu'),\n",
62
- " Dense(num_classes, activation='softmax')\n",
63
- "])\n",
64
- "\n",
65
- "model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])\n",
66
- "model.summary()\n"
67
- ]
68
- },
69
- {
70
- "cell_type": "raw",
71
- "id": "eea88c65-1ab3-42fe-9a68-ddb4a8577b3b",
72
- "metadata": {},
73
- "source": [
74
- "3. Build a Feed Forward Neural Network using Keras to classify pistachio types. Your model should include dropout layers to prevent overfitting."
75
- ]
76
- },
77
- {
78
- "cell_type": "code",
79
- "execution_count": null,
80
- "id": "e96669fb-25ef-408f-9b00-60f497e33341",
81
- "metadata": {},
82
- "outputs": [],
83
- "source": [
84
- "from tensorflow.keras.models import Sequential\n",
85
- "from tensorflow.keras.layers import Dense, Dropout\n",
86
- "\n",
87
- "model = Sequential([\n",
88
- " Dense(128, input_shape=(num_features,), activation='relu'),\n",
89
- " Dropout(0.3),\n",
90
- " Dense(64, activation='relu'),\n",
91
- " Dropout(0.3),\n",
92
- " Dense(1, activation='sigmoid') # or 'softmax' if more than 2 types\n",
93
- "])\n",
94
- "\n",
95
- "model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])\n",
96
- "model.summary()\n"
97
- ]
98
- },
99
- {
100
- "cell_type": "raw",
101
- "id": "8b5fd045-bb5d-449c-8dbe-f1702efd369b",
102
- "metadata": {},
103
- "source": [
104
- "4. Design customized 10-layer CNN architecture to classify the pistachio images."
105
- ]
106
- },
107
- {
108
- "cell_type": "code",
109
- "execution_count": null,
110
- "id": "ea1e8b99-13ce-4c79-972d-349189b590ae",
111
- "metadata": {},
112
- "outputs": [],
113
- "source": [
114
- "model = Sequential([\n",
115
- " Conv2D(32, (3,3), activation='relu', input_shape=(64, 64, 3)),\n",
116
- " Conv2D(32, (3,3), activation='relu'),\n",
117
- " MaxPooling2D(pool_size=(2,2)),\n",
118
- "\n",
119
- " Conv2D(64, (3,3), activation='relu'),\n",
120
- " Conv2D(64, (3,3), activation='relu'),\n",
121
- " MaxPooling2D(pool_size=(2,2)),\n",
122
- "\n",
123
- " Conv2D(128, (3,3), activation='relu'),\n",
124
- " Conv2D(128, (3,3), activation='relu'),\n",
125
- " MaxPooling2D(pool_size=(2,2)),\n",
126
- "\n",
127
- " Flatten(),\n",
128
- " Dense(256, activation='relu'),\n",
129
- " Dense(num_classes, activation='softmax')\n",
130
- "])\n",
131
- "\n",
132
- "model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])\n",
133
- "model.summary()\n"
134
- ]
135
- }
136
- ],
137
- "metadata": {
138
- "kernelspec": {
139
- "display_name": "Python 3 (ipykernel)",
140
- "language": "python",
141
- "name": "python3"
142
- },
143
- "language_info": {
144
- "codemirror_mode": {
145
- "name": "ipython",
146
- "version": 3
147
- },
148
- "file_extension": ".py",
149
- "mimetype": "text/x-python",
150
- "name": "python",
151
- "nbconvert_exporter": "python",
152
- "pygments_lexer": "ipython3",
153
- "version": "3.12.4"
154
- }
155
- },
156
- "nbformat": 4,
157
- "nbformat_minor": 5
158
- }