Code2Text 0.2.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- code2text-0.2.1/Code2Text/__init__.py +4 -0
- code2text-0.2.1/Code2Text/fdsa.py +598 -0
- code2text-0.2.1/Code2Text/utils.py +462 -0
- code2text-0.2.1/Code2Text.egg-info/PKG-INFO +7 -0
- code2text-0.2.1/Code2Text.egg-info/SOURCES.txt +9 -0
- code2text-0.2.1/Code2Text.egg-info/dependency_links.txt +1 -0
- code2text-0.2.1/Code2Text.egg-info/top_level.txt +1 -0
- code2text-0.2.1/PKG-INFO +7 -0
- code2text-0.2.1/pyproject.toml +11 -0
- code2text-0.2.1/setup.cfg +4 -0
- code2text-0.2.1/setup.py +8 -0
|
@@ -0,0 +1,598 @@
|
|
|
1
|
+
"""
|
|
2
|
+
✅ Class fdsa - Contains all programs from fds.py as text
|
|
3
|
+
Each method returns the program code as a string
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class fdsa:
|
|
8
|
+
"""Class containing all data science and ML programs as text"""
|
|
9
|
+
|
|
10
|
+
@staticmethod
|
|
11
|
+
def program_1():
|
|
12
|
+
"""EX NO: 1 – NumPy Arrays"""
|
|
13
|
+
return """# ========== PROGRAM 1: NumPy Arrays ==========
|
|
14
|
+
|
|
15
|
+
import numpy as np
|
|
16
|
+
|
|
17
|
+
arr = np.array([[1,2,3],[4,2,5]])
|
|
18
|
+
|
|
19
|
+
print("Type:", type(arr))
|
|
20
|
+
print("Dimensions:", arr.ndim)
|
|
21
|
+
print("Shape:", arr.shape)
|
|
22
|
+
print("Size:", arr.size)
|
|
23
|
+
print("Datatype:", arr.dtype)"""
|
|
24
|
+
|
|
25
|
+
@staticmethod
|
|
26
|
+
def program_2():
|
|
27
|
+
"""EX NO: 2 – Array Slicing"""
|
|
28
|
+
return """# ========== PROGRAM 2: Array Slicing ==========
|
|
29
|
+
|
|
30
|
+
import numpy as np
|
|
31
|
+
|
|
32
|
+
a = np.array([[1,2,3],[3,4,5],[4,5,6]])
|
|
33
|
+
|
|
34
|
+
print(a)
|
|
35
|
+
print("Second column:", a[:,1])
|
|
36
|
+
print("Second row:", a[1,:])
|
|
37
|
+
print("Column 1 onwards:\\n", a[:,1:])"""
|
|
38
|
+
|
|
39
|
+
@staticmethod
|
|
40
|
+
def program_3():
|
|
41
|
+
"""EX NO: 3 – Pandas DataFrame"""
|
|
42
|
+
return """# ========== PROGRAM 3: Pandas DataFrame ==========
|
|
43
|
+
|
|
44
|
+
import numpy as np
|
|
45
|
+
import pandas as pd
|
|
46
|
+
|
|
47
|
+
data = np.array([['','Col1','Col2'],
|
|
48
|
+
['Row1',1,2],
|
|
49
|
+
['Row2',3,4]])
|
|
50
|
+
|
|
51
|
+
df = pd.DataFrame(data=data[1:,1:], index=data[1:,0], columns=data[0,1:])
|
|
52
|
+
print(df)
|
|
53
|
+
|
|
54
|
+
arr = np.array([[1,2,3],[4,5,6]])
|
|
55
|
+
print(pd.DataFrame(arr))
|
|
56
|
+
|
|
57
|
+
d = {1:['1','3'],2:['1','2'],3:['2','4']}
|
|
58
|
+
print(pd.DataFrame(d))"""
|
|
59
|
+
|
|
60
|
+
@staticmethod
|
|
61
|
+
def program_4():
|
|
62
|
+
"""EX NO: 4 – Iris Dataset (CSV)"""
|
|
63
|
+
return """# ========== PROGRAM 4: Iris Dataset (CSV) ==========
|
|
64
|
+
|
|
65
|
+
import pandas as pd
|
|
66
|
+
|
|
67
|
+
df = pd.read_csv("Iris.csv")
|
|
68
|
+
|
|
69
|
+
print(df.head())
|
|
70
|
+
print("Shape:", df.shape)
|
|
71
|
+
|
|
72
|
+
print(df.info())
|
|
73
|
+
print(df.describe())
|
|
74
|
+
|
|
75
|
+
print("Missing values:\\n", df.isnull().sum())
|
|
76
|
+
|
|
77
|
+
# Remove duplicates
|
|
78
|
+
data = df.drop_duplicates(subset="Species")
|
|
79
|
+
print(data)
|
|
80
|
+
|
|
81
|
+
print(df["Species"].value_counts())"""
|
|
82
|
+
|
|
83
|
+
@staticmethod
|
|
84
|
+
def program_5():
|
|
85
|
+
"""EX NO: 5 – Univariate Analysis"""
|
|
86
|
+
return """# ========== PROGRAM 5: Univariate Analysis ==========
|
|
87
|
+
|
|
88
|
+
import pandas as pd
|
|
89
|
+
import numpy as np
|
|
90
|
+
|
|
91
|
+
df = pd.read_csv("diabetes.csv")
|
|
92
|
+
|
|
93
|
+
def analysis(df):
|
|
94
|
+
for col in df.columns:
|
|
95
|
+
print(f"\\n--- {col} ---")
|
|
96
|
+
print("Mean:", df[col].mean())
|
|
97
|
+
print("Median:", df[col].median())
|
|
98
|
+
print("Mode:", df[col].mode()[0])
|
|
99
|
+
print("Variance:", df[col].var())
|
|
100
|
+
print("Std Dev:", df[col].std())
|
|
101
|
+
print("Skewness:", df[col].skew())
|
|
102
|
+
|
|
103
|
+
analysis(df)"""
|
|
104
|
+
|
|
105
|
+
@staticmethod
|
|
106
|
+
def program_6():
|
|
107
|
+
"""EX NO: 6 – Logistic Regression"""
|
|
108
|
+
return """# ========== PROGRAM 6: Logistic Regression ==========
|
|
109
|
+
|
|
110
|
+
import pandas as pd
|
|
111
|
+
import numpy as np
|
|
112
|
+
from sklearn.model_selection import train_test_split
|
|
113
|
+
from sklearn.linear_model import LogisticRegression
|
|
114
|
+
from sklearn.metrics import accuracy_score
|
|
115
|
+
|
|
116
|
+
df = pd.read_csv("diabetes.csv")
|
|
117
|
+
|
|
118
|
+
df.replace(0, np.nan, inplace=True)
|
|
119
|
+
df.fillna(df.mean(numeric_only=True), inplace=True)
|
|
120
|
+
|
|
121
|
+
X = df.drop("Outcome", axis=1)
|
|
122
|
+
y = df["Outcome"]
|
|
123
|
+
|
|
124
|
+
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)
|
|
125
|
+
|
|
126
|
+
model = LogisticRegression(max_iter=1000)
|
|
127
|
+
model.fit(X_train, y_train)
|
|
128
|
+
|
|
129
|
+
y_pred = model.predict(X_test)
|
|
130
|
+
|
|
131
|
+
print("Accuracy:", accuracy_score(y_test, y_pred))"""
|
|
132
|
+
|
|
133
|
+
@staticmethod
|
|
134
|
+
def program_7():
|
|
135
|
+
"""EX NO: 7 – Histogram + Normal Curve"""
|
|
136
|
+
return """# ========== PROGRAM 7: Histogram + Normal Curve ==========
|
|
137
|
+
|
|
138
|
+
import pandas as pd
|
|
139
|
+
import numpy as np
|
|
140
|
+
import seaborn as sns
|
|
141
|
+
import matplotlib.pyplot as plt
|
|
142
|
+
from scipy.stats import norm
|
|
143
|
+
|
|
144
|
+
df = pd.read_csv("Iris.csv")
|
|
145
|
+
|
|
146
|
+
for col in df.columns[:-1]:
|
|
147
|
+
sns.histplot(df[col], kde=True, stat="density")
|
|
148
|
+
|
|
149
|
+
mean, std = norm.fit(df[col])
|
|
150
|
+
xmin, xmax = plt.xlim()
|
|
151
|
+
x = np.linspace(xmin, xmax, 100)
|
|
152
|
+
p = norm.pdf(x, mean, std)
|
|
153
|
+
|
|
154
|
+
plt.plot(x, p, 'k', linewidth=2)
|
|
155
|
+
plt.title(col)
|
|
156
|
+
plt.show()"""
|
|
157
|
+
|
|
158
|
+
@staticmethod
|
|
159
|
+
def program_8():
|
|
160
|
+
"""EX NO: 8 – Density + Contour"""
|
|
161
|
+
return """# ========== PROGRAM 8: Density + Contour ==========
|
|
162
|
+
|
|
163
|
+
import pandas as pd
|
|
164
|
+
import seaborn as sns
|
|
165
|
+
import matplotlib.pyplot as plt
|
|
166
|
+
|
|
167
|
+
df = pd.read_csv("Iris.csv")
|
|
168
|
+
|
|
169
|
+
# Density
|
|
170
|
+
for col in df.columns[:-1]:
|
|
171
|
+
sns.kdeplot(data=df, x=col, hue="Species", fill=True)
|
|
172
|
+
plt.show()
|
|
173
|
+
|
|
174
|
+
# Contour
|
|
175
|
+
sns.pairplot(df, hue="Species", kind="kde")
|
|
176
|
+
plt.show()"""
|
|
177
|
+
|
|
178
|
+
@staticmethod
|
|
179
|
+
def program_9():
|
|
180
|
+
"""EX NO: 9 – Correlation + Scatter"""
|
|
181
|
+
return """# ========== PROGRAM 9: Correlation + Scatter ==========
|
|
182
|
+
|
|
183
|
+
import pandas as pd
|
|
184
|
+
import seaborn as sns
|
|
185
|
+
import matplotlib.pyplot as plt
|
|
186
|
+
|
|
187
|
+
df = pd.read_csv("diabetes.csv")
|
|
188
|
+
|
|
189
|
+
corr = df.corr(numeric_only=True)
|
|
190
|
+
sns.heatmap(corr, annot=True)
|
|
191
|
+
plt.show()
|
|
192
|
+
|
|
193
|
+
plt.scatter(df["Glucose"], df["Insulin"])
|
|
194
|
+
plt.show()
|
|
195
|
+
|
|
196
|
+
plt.scatter(df["BMI"], df["Age"])
|
|
197
|
+
plt.show()"""
|
|
198
|
+
|
|
199
|
+
@staticmethod
|
|
200
|
+
def program_10():
|
|
201
|
+
"""EX NO: 10 – Histogram + 3D Plot"""
|
|
202
|
+
return """# ========== PROGRAM 10: Histogram + 3D Plot ==========
|
|
203
|
+
|
|
204
|
+
import pandas as pd
|
|
205
|
+
import matplotlib.pyplot as plt
|
|
206
|
+
from mpl_toolkits.mplot3d import Axes3D
|
|
207
|
+
|
|
208
|
+
df = pd.read_csv("diabetes.csv")
|
|
209
|
+
|
|
210
|
+
# Histogram
|
|
211
|
+
df.hist()
|
|
212
|
+
plt.show()
|
|
213
|
+
|
|
214
|
+
# 3D plot
|
|
215
|
+
fig = plt.figure()
|
|
216
|
+
ax = fig.add_subplot(111, projection='3d')
|
|
217
|
+
|
|
218
|
+
ax.scatter(df["Glucose"], df["BMI"], df["Age"])
|
|
219
|
+
|
|
220
|
+
ax.set_xlabel("Glucose")
|
|
221
|
+
ax.set_ylabel("BMI")
|
|
222
|
+
ax.set_zlabel("Age")
|
|
223
|
+
|
|
224
|
+
plt.show()"""
|
|
225
|
+
|
|
226
|
+
class fdsa_data:
|
|
227
|
+
def iris(self):
|
|
228
|
+
return '''Id,SepalLengthCm,SepalWidthCm,PetalLengthCm,PetalWidthCm,Species
|
|
229
|
+
1,5.1,3.5,1.4,0.2,setosa
|
|
230
|
+
2,4.9,3.0,1.4,0.2,setosa
|
|
231
|
+
3,4.7,3.2,1.3,0.2,setosa
|
|
232
|
+
4,4.6,3.1,1.5,0.2,setosa
|
|
233
|
+
5,5.0,3.6,1.4,0.2,setosa
|
|
234
|
+
6,5.4,3.9,1.7,0.4,setosa
|
|
235
|
+
7,4.6,3.4,1.4,0.3,setosa
|
|
236
|
+
8,5.0,3.4,1.5,0.2,setosa
|
|
237
|
+
9,4.4,2.9,1.4,0.2,setosa
|
|
238
|
+
10,4.9,3.1,1.5,0.1,setosa
|
|
239
|
+
11,5.4,3.7,1.5,0.2,setosa
|
|
240
|
+
12,4.8,3.4,1.6,0.2,setosa
|
|
241
|
+
13,4.8,3.0,1.4,0.1,setosa
|
|
242
|
+
14,4.3,3.0,1.1,0.1,setosa
|
|
243
|
+
15,5.8,4.0,1.2,0.2,setosa
|
|
244
|
+
16,5.7,4.4,1.5,0.4,setosa
|
|
245
|
+
17,5.4,3.9,1.3,0.4,setosa
|
|
246
|
+
18,5.1,3.5,1.4,0.3,setosa
|
|
247
|
+
19,5.7,3.8,1.7,0.3,setosa
|
|
248
|
+
20,5.1,3.8,1.5,0.3,setosa
|
|
249
|
+
21,5.4,3.4,1.7,0.2,setosa
|
|
250
|
+
22,5.1,3.7,1.5,0.4,setosa
|
|
251
|
+
23,4.6,3.6,1.0,0.2,setosa
|
|
252
|
+
24,5.1,3.3,1.7,0.5,setosa
|
|
253
|
+
25,4.8,3.4,1.9,0.2,setosa
|
|
254
|
+
26,5.0,3.0,1.6,0.2,setosa
|
|
255
|
+
27,5.0,3.4,1.6,0.4,setosa
|
|
256
|
+
28,5.2,3.5,1.5,0.2,setosa
|
|
257
|
+
29,5.2,3.4,1.4,0.2,setosa
|
|
258
|
+
30,4.7,3.2,1.6,0.2,setosa
|
|
259
|
+
31,4.8,3.1,1.6,0.2,setosa
|
|
260
|
+
32,5.4,3.4,1.5,0.4,setosa
|
|
261
|
+
33,5.2,4.1,1.5,0.1,setosa
|
|
262
|
+
34,5.5,4.2,1.4,0.2,setosa
|
|
263
|
+
35,4.9,3.1,1.5,0.2,setosa
|
|
264
|
+
36,5.0,3.2,1.2,0.2,setosa
|
|
265
|
+
37,5.5,3.5,1.3,0.2,setosa
|
|
266
|
+
38,4.9,3.6,1.4,0.1,setosa
|
|
267
|
+
39,4.4,3.0,1.3,0.2,setosa
|
|
268
|
+
40,5.1,3.4,1.5,0.2,setosa
|
|
269
|
+
41,5.0,3.5,1.3,0.3,setosa
|
|
270
|
+
42,4.5,2.3,1.3,0.3,setosa
|
|
271
|
+
43,4.4,3.2,1.3,0.2,setosa
|
|
272
|
+
44,5.0,3.5,1.6,0.6,setosa
|
|
273
|
+
45,5.1,3.8,1.9,0.4,setosa
|
|
274
|
+
46,4.8,3.0,1.4,0.3,setosa
|
|
275
|
+
47,5.1,3.8,1.6,0.2,setosa
|
|
276
|
+
48,4.6,3.2,1.4,0.2,setosa
|
|
277
|
+
49,5.3,3.7,1.5,0.2,setosa
|
|
278
|
+
50,5.0,3.3,1.4,0.2,setosa
|
|
279
|
+
51,7.0,3.2,4.7,1.4,versicolor
|
|
280
|
+
52,6.4,3.2,4.5,1.5,versicolor
|
|
281
|
+
53,6.9,3.1,4.9,1.5,versicolor
|
|
282
|
+
54,5.5,2.3,4.0,1.3,versicolor
|
|
283
|
+
55,6.5,2.8,4.6,1.5,versicolor
|
|
284
|
+
56,5.7,2.8,4.5,1.3,versicolor
|
|
285
|
+
57,6.3,3.3,4.7,1.6,versicolor
|
|
286
|
+
58,4.9,2.4,3.3,1.0,versicolor
|
|
287
|
+
59,6.6,2.9,4.6,1.3,versicolor
|
|
288
|
+
60,5.2,2.7,3.9,1.4,versicolor
|
|
289
|
+
61,5.0,2.0,3.5,1.0,versicolor
|
|
290
|
+
62,5.9,3.0,4.2,1.5,versicolor
|
|
291
|
+
63,6.0,2.2,4.0,1.0,versicolor
|
|
292
|
+
64,6.1,2.9,4.7,1.4,versicolor
|
|
293
|
+
65,5.6,2.9,3.6,1.3,versicolor
|
|
294
|
+
66,6.7,3.1,4.4,1.4,versicolor
|
|
295
|
+
67,5.6,3.0,4.5,1.5,versicolor
|
|
296
|
+
68,5.8,2.7,4.1,1.0,versicolor
|
|
297
|
+
69,6.2,2.2,4.5,1.5,versicolor
|
|
298
|
+
70,5.6,2.5,3.9,1.1,versicolor
|
|
299
|
+
71,5.9,3.2,4.8,1.8,versicolor
|
|
300
|
+
72,6.1,2.8,4.0,1.3,versicolor
|
|
301
|
+
73,6.3,2.5,4.9,1.5,versicolor
|
|
302
|
+
74,6.1,2.8,4.7,1.2,versicolor
|
|
303
|
+
75,6.4,2.9,4.3,1.3,versicolor
|
|
304
|
+
76,6.6,3.0,4.4,1.4,versicolor
|
|
305
|
+
77,6.8,2.8,4.8,1.4,versicolor
|
|
306
|
+
78,6.7,3.0,5.0,1.7,versicolor
|
|
307
|
+
79,6.0,2.9,4.5,1.5,versicolor
|
|
308
|
+
80,5.7,2.6,3.5,1.0,versicolor
|
|
309
|
+
81,5.5,2.4,3.8,1.1,versicolor
|
|
310
|
+
82,5.5,2.4,3.7,1.0,versicolor
|
|
311
|
+
83,5.8,2.7,3.9,1.2,versicolor
|
|
312
|
+
84,6.0,2.7,5.1,1.6,versicolor
|
|
313
|
+
85,5.4,3.0,4.5,1.5,versicolor
|
|
314
|
+
86,6.0,3.4,4.5,1.6,versicolor
|
|
315
|
+
87,6.7,3.1,4.7,1.5,versicolor
|
|
316
|
+
88,6.3,2.3,4.4,1.3,versicolor
|
|
317
|
+
89,5.6,3.0,4.1,1.3,versicolor
|
|
318
|
+
90,5.5,2.5,4.0,1.3,versicolor
|
|
319
|
+
91,5.5,2.6,4.4,1.2,versicolor
|
|
320
|
+
92,6.1,3.0,4.6,1.4,versicolor
|
|
321
|
+
93,5.8,2.6,4.0,1.2,versicolor
|
|
322
|
+
94,5.0,2.3,3.3,1.0,versicolor
|
|
323
|
+
95,5.6,2.7,4.2,1.3,versicolor
|
|
324
|
+
96,5.7,3.0,4.2,1.2,versicolor
|
|
325
|
+
97,5.7,2.9,4.2,1.3,versicolor
|
|
326
|
+
98,6.2,2.9,4.3,1.3,versicolor
|
|
327
|
+
99,5.1,2.5,3.0,1.1,versicolor
|
|
328
|
+
100,5.7,2.8,4.1,1.3,versicolor
|
|
329
|
+
101,6.3,3.3,6.0,2.5,virginica
|
|
330
|
+
102,5.8,2.7,5.1,1.9,virginica
|
|
331
|
+
103,7.1,3.0,5.9,2.1,virginica
|
|
332
|
+
104,6.3,2.9,5.6,1.8,virginica
|
|
333
|
+
105,6.5,3.0,5.8,2.2,virginica
|
|
334
|
+
106,7.6,3.0,6.6,2.1,virginica
|
|
335
|
+
107,4.9,2.5,4.5,1.7,virginica
|
|
336
|
+
108,7.3,2.9,6.3,1.8,virginica
|
|
337
|
+
109,6.7,2.5,5.8,1.8,virginica
|
|
338
|
+
110,7.2,3.6,6.1,2.5,virginica
|
|
339
|
+
111,6.5,3.2,5.1,2.0,virginica
|
|
340
|
+
112,6.4,2.7,5.3,1.9,virginica
|
|
341
|
+
113,6.8,3.0,5.5,2.1,virginica
|
|
342
|
+
114,5.7,2.5,5.0,2.0,virginica
|
|
343
|
+
115,5.8,2.8,5.1,2.4,virginica
|
|
344
|
+
116,6.4,3.2,5.3,2.3,virginica
|
|
345
|
+
117,6.5,3.0,5.5,1.8,virginica
|
|
346
|
+
118,7.7,3.8,6.7,2.2,virginica
|
|
347
|
+
119,7.7,2.6,6.9,2.3,virginica
|
|
348
|
+
120,6.0,2.2,5.0,1.5,virginica
|
|
349
|
+
121,6.9,3.2,5.7,2.3,virginica
|
|
350
|
+
122,5.6,2.8,4.9,2.0,virginica
|
|
351
|
+
123,7.7,2.8,6.7,2.0,virginica
|
|
352
|
+
124,6.3,2.7,4.9,1.8,virginica
|
|
353
|
+
125,6.7,3.3,5.7,2.1,virginica
|
|
354
|
+
126,7.2,3.2,6.0,1.8,virginica
|
|
355
|
+
127,6.2,2.8,4.8,1.8,virginica
|
|
356
|
+
128,6.1,3.0,4.9,1.8,virginica
|
|
357
|
+
129,6.4,2.8,5.6,2.1,virginica
|
|
358
|
+
130,7.2,3.0,5.8,1.6,virginica
|
|
359
|
+
131,7.4,2.8,6.1,1.9,virginica
|
|
360
|
+
132,7.9,3.8,6.4,2.0,virginica
|
|
361
|
+
133,6.4,2.8,5.6,2.2,virginica
|
|
362
|
+
134,6.3,2.8,5.1,1.5,virginica
|
|
363
|
+
135,6.1,2.6,5.6,1.4,virginica
|
|
364
|
+
136,7.7,3.0,6.1,2.3,virginica
|
|
365
|
+
137,6.3,3.4,5.6,2.4,virginica
|
|
366
|
+
138,6.4,3.1,5.5,1.8,virginica
|
|
367
|
+
139,6.0,3.0,4.8,1.8,virginica
|
|
368
|
+
140,6.9,3.1,5.4,2.1,virginica
|
|
369
|
+
141,6.7,3.1,5.6,2.4,virginica
|
|
370
|
+
142,6.9,3.1,5.1,2.3,virginica
|
|
371
|
+
143,5.8,2.7,5.1,1.9,virginica
|
|
372
|
+
144,6.8,3.2,5.9,2.3,virginica
|
|
373
|
+
145,6.7,3.3,5.7,2.5,virginica
|
|
374
|
+
146,6.7,3.0,5.2,2.3,virginica
|
|
375
|
+
147,6.3,2.5,5.0,1.9,virginica
|
|
376
|
+
148,6.5,3.0,5.2,2.0,virginica
|
|
377
|
+
149,6.2,3.4,5.4,2.3,virginica
|
|
378
|
+
150,5.9,3.0,5.1,1.8,virginica
|
|
379
|
+
'''
|
|
380
|
+
def diabetes(self):
|
|
381
|
+
return '''Pregnancies,Glucose,BloodPressure,SkinThickness,Insulin,BMI,DiabetesPedigreeFunction,Age,Outcome
|
|
382
|
+
6,98,69,25,362,43.0,2.138,43,1
|
|
383
|
+
3,105,58,30,507,30.2,0.699,51,1
|
|
384
|
+
12,82,56,20,273,21.1,1.275,63,0
|
|
385
|
+
14,140,102,46,505,44.6,0.631,51,1
|
|
386
|
+
10,155,58,45,14,40.7,2.47,46,0
|
|
387
|
+
7,97,97,44,191,21.4,2.366,21,1
|
|
388
|
+
12,135,94,28,484,42.9,0.195,34,1
|
|
389
|
+
4,114,101,29,27,41.5,1.793,52,0
|
|
390
|
+
6,131,62,27,38,32.0,2.321,30,0
|
|
391
|
+
9,97,48,23,272,34.0,0.533,58,0
|
|
392
|
+
2,97,51,24,230,28.8,1.463,56,1
|
|
393
|
+
6,177,40,40,85,19.5,2.297,32,1
|
|
394
|
+
10,113,97,10,473,27.1,0.181,46,0
|
|
395
|
+
10,153,40,12,125,39.7,1.774,46,0
|
|
396
|
+
7,99,73,25,536,18.1,0.814,28,0
|
|
397
|
+
4,144,87,32,144,27.0,2.319,69,0
|
|
398
|
+
3,197,40,20,524,28.8,2.431,49,1
|
|
399
|
+
7,161,55,21,211,32.5,2.366,19,0
|
|
400
|
+
7,198,100,19,24,42.8,1.238,64,0
|
|
401
|
+
2,190,103,41,579,27.4,2.169,31,0
|
|
402
|
+
5,96,102,25,393,27.4,2.127,37,0
|
|
403
|
+
4,190,108,17,66,37.9,0.866,39,1
|
|
404
|
+
1,185,61,47,232,30.2,2.089,28,0
|
|
405
|
+
7,72,106,21,108,24.1,0.189,44,1
|
|
406
|
+
11,172,115,33,145,30.2,1.531,20,0
|
|
407
|
+
13,131,65,37,110,21.8,0.652,34,0
|
|
408
|
+
5,120,55,17,227,22.8,0.389,61,1
|
|
409
|
+
1,128,90,37,110,31.5,0.285,45,0
|
|
410
|
+
11,187,96,45,469,29.3,1.771,46,1
|
|
411
|
+
4,165,68,35,33,42.7,0.916,38,1
|
|
412
|
+
0,182,117,17,368,27.8,1.839,64,0
|
|
413
|
+
11,131,108,37,594,33.7,0.257,33,1
|
|
414
|
+
9,121,86,37,553,35.1,0.857,54,1
|
|
415
|
+
5,81,101,46,424,18.4,1.395,37,1
|
|
416
|
+
12,108,108,45,517,35.9,1.998,23,1
|
|
417
|
+
11,199,115,36,281,22.8,0.865,36,1
|
|
418
|
+
8,182,55,26,319,43.9,1.602,22,1
|
|
419
|
+
0,170,87,18,442,22.0,2.226,20,1
|
|
420
|
+
10,182,78,42,314,29.2,1.578,18,1
|
|
421
|
+
10,150,72,29,376,20.3,0.659,55,0
|
|
422
|
+
14,182,62,22,288,44.9,0.159,32,0
|
|
423
|
+
9,71,49,37,436,31.6,2.188,51,1
|
|
424
|
+
11,199,108,38,276,34.1,0.151,52,0
|
|
425
|
+
11,123,73,22,581,19.8,2.199,66,1
|
|
426
|
+
14,156,91,44,502,38.2,1.369,42,1
|
|
427
|
+
13,198,49,15,515,23.7,2.354,41,0
|
|
428
|
+
13,195,58,27,349,42.2,2.017,29,1
|
|
429
|
+
14,199,97,14,445,23.5,2.495,35,1
|
|
430
|
+
13,122,40,34,349,23.1,0.942,32,1
|
|
431
|
+
2,137,108,11,136,19.0,1.941,61,1
|
|
432
|
+
11,192,43,19,242,30.7,1.065,33,0
|
|
433
|
+
6,107,55,39,542,33.3,1.252,44,1
|
|
434
|
+
3,93,63,14,39,19.8,1.606,25,1
|
|
435
|
+
8,138,119,42,228,38.9,2.197,63,1
|
|
436
|
+
2,185,41,10,547,30.2,2.462,42,0
|
|
437
|
+
4,167,71,27,222,32.2,1.944,34,1
|
|
438
|
+
2,166,63,41,5,29.9,1.103,46,0
|
|
439
|
+
6,193,51,20,321,28.8,1.111,25,0
|
|
440
|
+
4,139,89,30,467,33.1,1.87,32,1
|
|
441
|
+
8,162,74,35,74,22.2,0.673,44,1
|
|
442
|
+
6,72,72,34,3,22.9,0.365,63,0
|
|
443
|
+
1,159,72,31,117,41.3,0.951,67,1
|
|
444
|
+
3,165,100,36,503,43.5,0.789,69,0
|
|
445
|
+
8,121,90,22,573,28.1,0.811,24,0
|
|
446
|
+
11,197,82,42,440,25.3,0.661,20,1
|
|
447
|
+
13,108,51,43,193,35.4,0.201,46,1
|
|
448
|
+
1,151,106,44,391,29.0,0.143,61,0
|
|
449
|
+
9,173,104,10,25,18.7,2.471,41,0
|
|
450
|
+
8,198,72,30,434,22.2,1.127,20,0
|
|
451
|
+
9,80,79,15,172,37.3,1.022,37,0
|
|
452
|
+
4,111,113,37,299,35.8,1.731,63,1
|
|
453
|
+
1,168,82,26,325,18.7,0.624,32,1
|
|
454
|
+
3,76,83,14,409,24.0,2.38,42,1
|
|
455
|
+
11,159,68,40,117,24.2,1.987,47,1
|
|
456
|
+
14,181,52,14,224,36.1,0.315,31,1
|
|
457
|
+
11,129,51,47,531,18.5,1.102,24,0
|
|
458
|
+
6,182,85,12,112,20.8,2.21,34,1
|
|
459
|
+
11,71,41,32,139,39.6,2.367,61,1
|
|
460
|
+
12,198,74,46,558,22.8,1.222,37,1
|
|
461
|
+
7,117,47,46,0,35.6,1.572,53,1
|
|
462
|
+
14,106,65,19,89,24.4,0.501,26,1
|
|
463
|
+
2,78,113,19,319,20.7,2.479,55,0
|
|
464
|
+
13,168,73,28,549,24.6,0.656,65,0
|
|
465
|
+
0,117,46,26,138,37.5,2.363,39,0
|
|
466
|
+
3,123,107,30,510,41.1,1.659,19,1
|
|
467
|
+
1,189,97,23,258,40.4,1.559,41,0
|
|
468
|
+
7,185,114,18,9,28.7,1.33,25,0
|
|
469
|
+
3,144,68,10,260,36.0,0.654,28,1
|
|
470
|
+
1,182,75,22,171,23.5,0.524,30,1
|
|
471
|
+
13,173,60,13,12,25.9,0.629,34,1
|
|
472
|
+
5,153,75,10,167,42.2,0.547,25,0
|
|
473
|
+
5,181,49,49,595,18.4,1.971,38,0
|
|
474
|
+
9,168,112,41,320,20.3,0.94,35,1
|
|
475
|
+
3,162,63,43,318,23.6,0.239,47,1
|
|
476
|
+
5,197,103,37,328,18.7,2.426,35,0
|
|
477
|
+
12,179,88,40,549,22.9,2.221,63,0
|
|
478
|
+
14,151,75,17,418,33.7,2.327,64,1
|
|
479
|
+
1,123,63,48,94,29.4,2.488,48,1
|
|
480
|
+
9,137,62,35,580,42.1,0.517,55,0
|
|
481
|
+
11,102,101,43,317,40.1,1.051,49,1
|
|
482
|
+
1,90,76,12,235,27.2,1.92,28,0
|
|
483
|
+
9,117,51,21,333,25.0,1.77,62,0
|
|
484
|
+
13,197,94,10,375,28.3,0.469,42,0
|
|
485
|
+
3,197,52,14,458,33.9,2.058,50,0
|
|
486
|
+
13,102,62,39,136,25.2,0.639,55,1
|
|
487
|
+
14,184,69,39,587,34.9,0.637,58,1
|
|
488
|
+
14,188,56,26,546,29.1,1.389,25,0
|
|
489
|
+
7,91,101,32,512,32.9,1.523,28,0
|
|
490
|
+
13,107,52,24,295,29.8,1.492,39,0
|
|
491
|
+
6,178,98,46,319,26.0,0.32,68,0
|
|
492
|
+
11,120,58,30,571,43.6,2.206,41,0
|
|
493
|
+
8,77,88,23,348,38.6,0.737,58,1
|
|
494
|
+
13,96,51,11,228,21.8,0.411,34,1
|
|
495
|
+
7,96,100,20,269,41.4,2.233,29,1
|
|
496
|
+
4,90,58,48,290,31.2,2.394,63,1
|
|
497
|
+
12,99,115,47,164,42.2,2.169,49,0
|
|
498
|
+
1,166,48,43,4,39.6,2.043,32,0
|
|
499
|
+
14,97,110,47,378,29.5,1.673,33,0
|
|
500
|
+
4,180,67,43,77,18.6,1.422,39,0
|
|
501
|
+
7,130,117,27,409,25.3,0.309,45,0
|
|
502
|
+
9,117,91,39,573,32.6,1.08,37,1
|
|
503
|
+
8,73,55,24,131,35.1,0.994,50,0
|
|
504
|
+
11,104,108,36,41,25.0,0.723,27,1
|
|
505
|
+
11,118,51,43,88,21.8,1.836,57,1
|
|
506
|
+
11,86,64,47,401,40.5,1.29,49,1
|
|
507
|
+
12,115,91,42,295,44.6,0.295,67,1
|
|
508
|
+
8,186,92,33,294,32.2,0.628,50,0
|
|
509
|
+
12,75,62,24,525,22.6,1.74,19,1
|
|
510
|
+
14,168,55,39,421,25.4,0.283,50,1
|
|
511
|
+
12,193,96,26,22,18.5,2.143,50,1
|
|
512
|
+
0,106,78,14,574,42.7,1.288,58,0
|
|
513
|
+
8,93,92,38,142,21.2,1.253,30,0
|
|
514
|
+
6,162,81,13,144,33.6,1.522,47,0
|
|
515
|
+
8,115,97,19,224,25.4,2.079,50,1
|
|
516
|
+
7,164,78,26,502,33.0,0.935,30,1
|
|
517
|
+
0,168,53,19,103,35.6,1.727,69,0
|
|
518
|
+
11,185,44,26,38,40.4,1.458,43,1
|
|
519
|
+
7,136,74,29,197,23.6,0.741,39,0
|
|
520
|
+
7,197,114,33,578,18.3,2.209,56,0
|
|
521
|
+
14,87,57,14,390,21.7,2.014,19,1
|
|
522
|
+
10,94,115,43,178,42.3,1.68,55,1
|
|
523
|
+
2,123,48,15,553,41.6,2.141,56,1
|
|
524
|
+
0,127,113,11,447,34.1,2.182,43,1
|
|
525
|
+
7,136,97,22,499,34.2,1.8,63,0
|
|
526
|
+
2,173,56,20,14,36.0,2.109,67,0
|
|
527
|
+
2,93,46,32,288,22.7,1.774,32,0
|
|
528
|
+
0,183,85,25,477,42.7,1.732,51,0
|
|
529
|
+
10,101,52,40,230,29.3,1.585,59,0
|
|
530
|
+
4,155,79,20,419,28.3,1.907,61,1
|
|
531
|
+
9,196,81,25,156,32.0,0.481,66,0
|
|
532
|
+
6,199,48,17,549,19.3,2.214,57,0
|
|
533
|
+
9,86,89,13,440,22.5,2.192,28,0
|
|
534
|
+
8,173,66,49,96,37.9,0.17,20,1
|
|
535
|
+
11,112,105,13,410,20.2,2.082,23,1
|
|
536
|
+
6,108,44,34,288,34.3,0.409,26,1
|
|
537
|
+
8,95,68,12,253,24.6,0.904,23,1
|
|
538
|
+
7,168,76,41,195,28.5,1.884,26,0
|
|
539
|
+
11,119,77,12,597,25.8,0.486,56,0
|
|
540
|
+
1,82,47,36,521,27.6,2.063,48,1
|
|
541
|
+
0,129,104,38,516,37.4,2.097,49,1
|
|
542
|
+
6,126,56,41,117,26.0,1.318,58,0
|
|
543
|
+
6,105,110,28,158,33.3,0.115,67,0
|
|
544
|
+
13,89,84,30,227,30.9,0.789,29,1
|
|
545
|
+
7,134,43,14,215,35.9,1.581,25,1
|
|
546
|
+
4,77,75,27,14,43.3,2.455,25,1
|
|
547
|
+
2,184,109,37,156,37.8,1.616,67,1
|
|
548
|
+
11,161,70,31,260,23.8,0.724,45,0
|
|
549
|
+
7,167,58,30,156,18.8,1.622,37,0
|
|
550
|
+
5,135,100,15,46,25.1,1.396,64,0
|
|
551
|
+
10,101,93,10,323,34.1,1.972,24,1
|
|
552
|
+
2,155,78,14,129,19.4,0.357,28,1
|
|
553
|
+
0,120,113,21,410,31.4,1.926,37,0
|
|
554
|
+
2,132,58,35,94,34.1,1.399,33,0
|
|
555
|
+
4,194,78,43,291,27.0,2.411,21,0
|
|
556
|
+
14,127,106,23,282,38.8,0.92,23,0
|
|
557
|
+
13,127,84,35,68,20.9,1.618,39,1
|
|
558
|
+
2,155,52,36,10,20.0,2.337,43,0
|
|
559
|
+
0,118,97,18,253,37.7,0.346,20,0
|
|
560
|
+
4,139,59,35,549,31.4,2.349,58,0
|
|
561
|
+
9,84,111,31,133,36.6,1.751,59,0
|
|
562
|
+
6,123,100,39,22,29.7,0.263,31,0
|
|
563
|
+
14,170,78,26,345,24.7,0.822,28,0
|
|
564
|
+
13,77,40,35,301,40.1,1.8,35,0
|
|
565
|
+
6,122,42,45,362,39.6,0.262,29,1
|
|
566
|
+
10,129,116,10,395,36.8,1.497,40,1
|
|
567
|
+
8,177,101,17,217,25.3,0.93,29,1
|
|
568
|
+
14,74,102,44,140,33.9,1.59,30,1
|
|
569
|
+
14,172,64,24,209,27.7,0.21,42,0
|
|
570
|
+
9,75,95,31,459,20.5,2.192,62,1
|
|
571
|
+
9,178,72,23,355,42.8,2.436,68,1
|
|
572
|
+
11,185,77,35,323,21.7,2.425,36,0
|
|
573
|
+
12,163,45,37,132,43.7,1.899,63,0
|
|
574
|
+
2,116,97,32,126,30.0,0.412,53,1
|
|
575
|
+
14,168,83,23,105,23.0,1.92,62,1
|
|
576
|
+
6,124,84,33,377,32.6,0.159,25,0
|
|
577
|
+
0,121,71,11,50,41.6,0.153,46,1
|
|
578
|
+
3,82,84,35,28,37.8,0.877,49,0
|
|
579
|
+
12,183,100,23,461,39.8,1.273,68,0
|
|
580
|
+
3,193,86,16,295,35.8,1.949,61,0
|
|
581
|
+
13,175,60,12,296,36.7,1.74,35,0
|
|
582
|
+
'''
|
|
583
|
+
|
|
584
|
+
# Usage examples:
|
|
585
|
+
if __name__ == "__main__":
|
|
586
|
+
# Get program 1 as text
|
|
587
|
+
print("Program 1:")
|
|
588
|
+
print(fdsa.program_1())
|
|
589
|
+
print("\n" + "="*50 + "\n")
|
|
590
|
+
|
|
591
|
+
# Get program 2 as text
|
|
592
|
+
print("Program 2:")
|
|
593
|
+
print(fdsa.program_2())
|
|
594
|
+
print("\n" + "="*50 + "\n")
|
|
595
|
+
|
|
596
|
+
# Get program 3 as text
|
|
597
|
+
print("Program 3:")
|
|
598
|
+
print(fdsa.program_3())
|
|
@@ -0,0 +1,462 @@
|
|
|
1
|
+
programs={"ml":{
|
|
2
|
+
1:'''import pandas as pd
|
|
3
|
+
from sklearn.model_selection import train_test_split
|
|
4
|
+
from sklearn.tree import DecisionTreeClassifier, plot_tree
|
|
5
|
+
from sklearn.metrics import accuracy_score, classification_report, confusion_matrix
|
|
6
|
+
import matplotlib.pyplot as plt
|
|
7
|
+
|
|
8
|
+
# Load dataset
|
|
9
|
+
data = pd.read_csv("iris.csv")
|
|
10
|
+
|
|
11
|
+
print("Dataset preview:\n", data.head(), "\n")
|
|
12
|
+
|
|
13
|
+
# Features & target
|
|
14
|
+
X = data.drop("species", axis=1)
|
|
15
|
+
y, labels = pd.factorize(data["species"])
|
|
16
|
+
|
|
17
|
+
# Split data
|
|
18
|
+
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=42)
|
|
19
|
+
|
|
20
|
+
# Model
|
|
21
|
+
clf = DecisionTreeClassifier(criterion="gini", max_depth=5, random_state=42)
|
|
22
|
+
clf.fit(X_train, y_train)
|
|
23
|
+
|
|
24
|
+
# Prediction
|
|
25
|
+
y_pred = clf.predict(X_test)
|
|
26
|
+
|
|
27
|
+
# Evaluation
|
|
28
|
+
print("Accuracy:", accuracy_score(y_test, y_pred))
|
|
29
|
+
print("\nClassification Report:\n", classification_report(y_test, y_pred))
|
|
30
|
+
print("\nConfusion Matrix:\n", confusion_matrix(y_test, y_pred))
|
|
31
|
+
|
|
32
|
+
# New samples prediction
|
|
33
|
+
new_samples = pd.DataFrame([
|
|
34
|
+
[5.1, 3.5, 1.4, 0.2],
|
|
35
|
+
[6.7, 3.1, 4.7, 1.5],
|
|
36
|
+
[7.5, 3.6, 6.1, 2.5]
|
|
37
|
+
], columns=X.columns)
|
|
38
|
+
|
|
39
|
+
predictions = clf.predict(new_samples)
|
|
40
|
+
species_predictions = [labels[p] for p in predictions]
|
|
41
|
+
|
|
42
|
+
print("\nPredictions:", species_predictions)
|
|
43
|
+
|
|
44
|
+
# Tree Visualization
|
|
45
|
+
plt.figure(figsize=(20, 10))
|
|
46
|
+
plot_tree(clf, feature_names=X.columns, class_names=labels, filled=True)
|
|
47
|
+
plt.title("Decision Tree")
|
|
48
|
+
plt.show()''',
|
|
49
|
+
2:'''#Linearity Visualization
|
|
50
|
+
import numpy as np
|
|
51
|
+
import matplotlib.pyplot as plt
|
|
52
|
+
from sklearn import datasets
|
|
53
|
+
|
|
54
|
+
iris = datasets.load_iris()
|
|
55
|
+
X = iris.data
|
|
56
|
+
y = iris.target
|
|
57
|
+
|
|
58
|
+
# Plot
|
|
59
|
+
plt.scatter(X[:50, 0], X[:50, 2], color='black', marker='x', label='setosa')
|
|
60
|
+
plt.scatter(X[50:100, 0], X[50:100, 2], color='green', marker='s', label='versicolor')
|
|
61
|
+
|
|
62
|
+
plt.xlabel('Sepal Length')
|
|
63
|
+
plt.ylabel('Petal Length')
|
|
64
|
+
plt.legend()
|
|
65
|
+
plt.show()''',
|
|
66
|
+
3:'''#Non-Linearity
|
|
67
|
+
import numpy as np
|
|
68
|
+
import matplotlib.pyplot as plt
|
|
69
|
+
from sklearn import datasets
|
|
70
|
+
|
|
71
|
+
iris = datasets.load_iris()
|
|
72
|
+
X = iris.data
|
|
73
|
+
y = iris.target
|
|
74
|
+
|
|
75
|
+
plt.scatter(X[50:100, 0], X[50:100, 2], color='black', marker='x', label='versicolor')
|
|
76
|
+
plt.scatter(X[100:150, 0], X[100:150, 2], color='red', marker='o', label='virginica')
|
|
77
|
+
|
|
78
|
+
plt.xlabel('Sepal Length')
|
|
79
|
+
plt.ylabel('Petal Length')
|
|
80
|
+
plt.legend()
|
|
81
|
+
plt.show()''',
|
|
82
|
+
4:'''#Spam Classification (SVM)
|
|
83
|
+
import pandas as pd
|
|
84
|
+
from sklearn.feature_extraction.text import TfidfVectorizer
|
|
85
|
+
from sklearn.model_selection import train_test_split
|
|
86
|
+
from sklearn.svm import SVC
|
|
87
|
+
from sklearn.metrics import classification_report, accuracy_score
|
|
88
|
+
|
|
89
|
+
# Load dataset
|
|
90
|
+
data = pd.read_csv("spam.csv", delimiter='\t', encoding='latin-1')
|
|
91
|
+
|
|
92
|
+
# Keep required columns
|
|
93
|
+
data = data[['Type', 'Message']]
|
|
94
|
+
data.columns = ['label', 'email']
|
|
95
|
+
|
|
96
|
+
# Encode labels
|
|
97
|
+
data['label'] = data['label'].map({'ham': 0, 'spam': 1})
|
|
98
|
+
|
|
99
|
+
# Vectorization
|
|
100
|
+
vectorizer = TfidfVectorizer(stop_words='english')
|
|
101
|
+
X = vectorizer.fit_transform(data['email'])
|
|
102
|
+
y = data['label']
|
|
103
|
+
|
|
104
|
+
# Split
|
|
105
|
+
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=42)
|
|
106
|
+
|
|
107
|
+
# Model
|
|
108
|
+
model = SVC(kernel='linear')
|
|
109
|
+
model.fit(X_train, y_train)
|
|
110
|
+
|
|
111
|
+
# Prediction
|
|
112
|
+
y_pred = model.predict(X_test)
|
|
113
|
+
|
|
114
|
+
# Evaluation
|
|
115
|
+
print(classification_report(y_test, y_pred))
|
|
116
|
+
print("Accuracy:", accuracy_score(y_test, y_pred))''',
|
|
117
|
+
5:'''#Face Detection
|
|
118
|
+
import cv2
|
|
119
|
+
|
|
120
|
+
face_cascade = cv2.CascadeClassifier(cv2.data.haarcascades +
|
|
121
|
+
'haarcascade_frontalface_default.xml')
|
|
122
|
+
|
|
123
|
+
image = cv2.imread("image.jpg")
|
|
124
|
+
|
|
125
|
+
if image is None:
|
|
126
|
+
print("Image not found")
|
|
127
|
+
else:
|
|
128
|
+
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
|
|
129
|
+
|
|
130
|
+
faces = face_cascade.detectMultiScale(gray, 1.1, 5)
|
|
131
|
+
|
|
132
|
+
for (x, y, w, h) in faces:
|
|
133
|
+
cv2.rectangle(image, (x, y), (x+w, y+h), (255, 0, 0), 2)
|
|
134
|
+
|
|
135
|
+
cv2.imshow("Face Detection", image)
|
|
136
|
+
cv2.waitKey(0)
|
|
137
|
+
cv2.destroyAllWindows()
|
|
138
|
+
''',
|
|
139
|
+
6:'''#KNN
|
|
140
|
+
import pandas as pd
|
|
141
|
+
from sklearn.model_selection import train_test_split
|
|
142
|
+
from sklearn.preprocessing import LabelEncoder
|
|
143
|
+
from sklearn.neighbors import KNeighborsClassifier
|
|
144
|
+
from sklearn.metrics import accuracy_score
|
|
145
|
+
|
|
146
|
+
df = pd.read_csv("iris.csv")
|
|
147
|
+
|
|
148
|
+
le = LabelEncoder()
|
|
149
|
+
df["species"] = le.fit_transform(df["species"])
|
|
150
|
+
|
|
151
|
+
X = df.iloc[:, :-1]
|
|
152
|
+
y = df.iloc[:, -1]
|
|
153
|
+
|
|
154
|
+
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)
|
|
155
|
+
|
|
156
|
+
model = KNeighborsClassifier(n_neighbors=5)
|
|
157
|
+
model.fit(X_train, y_train)
|
|
158
|
+
|
|
159
|
+
y_pred = model.predict(X_test)
|
|
160
|
+
|
|
161
|
+
print("Accuracy:", accuracy_score(y_test, y_pred))''',
|
|
162
|
+
7:'''#MNIST Neural Network
|
|
163
|
+
import tensorflow as tf
|
|
164
|
+
from tensorflow import keras
|
|
165
|
+
import numpy as np
|
|
166
|
+
import matplotlib.pyplot as plt
|
|
167
|
+
|
|
168
|
+
# Load dataset
|
|
169
|
+
(x_train, y_train), (x_test, y_test) = keras.datasets.mnist.load_data()
|
|
170
|
+
|
|
171
|
+
# Normalize
|
|
172
|
+
x_train = x_train / 255.0
|
|
173
|
+
x_test = x_test / 255.0
|
|
174
|
+
|
|
175
|
+
# Flatten
|
|
176
|
+
x_train = x_train.reshape(-1, 28*28)
|
|
177
|
+
x_test = x_test.reshape(-1, 28*28)
|
|
178
|
+
|
|
179
|
+
# Model
|
|
180
|
+
model = keras.Sequential([
|
|
181
|
+
keras.layers.Dense(128, activation='relu', input_shape=(784,)),
|
|
182
|
+
keras.layers.Dense(64, activation='relu'),
|
|
183
|
+
keras.layers.Dense(10, activation='softmax')
|
|
184
|
+
])
|
|
185
|
+
|
|
186
|
+
# Compile
|
|
187
|
+
model.compile(optimizer='adam',
|
|
188
|
+
loss='sparse_categorical_crossentropy',
|
|
189
|
+
metrics=['accuracy'])
|
|
190
|
+
|
|
191
|
+
# Train
|
|
192
|
+
history = model.fit(x_train, y_train, epochs=10, batch_size=32,
|
|
193
|
+
validation_data=(x_test, y_test))
|
|
194
|
+
|
|
195
|
+
# Evaluate
|
|
196
|
+
loss, acc = model.evaluate(x_test, y_test)
|
|
197
|
+
print("Test Accuracy:", acc)
|
|
198
|
+
|
|
199
|
+
# Plot accuracy
|
|
200
|
+
plt.plot(history.history['accuracy'], label='train')
|
|
201
|
+
plt.plot(history.history['val_accuracy'], label='validation')
|
|
202
|
+
plt.legend()
|
|
203
|
+
plt.show()
|
|
204
|
+
|
|
205
|
+
# Prediction example
|
|
206
|
+
predictions = model.predict(x_test)
|
|
207
|
+
|
|
208
|
+
plt.imshow(x_test[0].reshape(28, 28), cmap='gray')
|
|
209
|
+
plt.title(f"Predicted: {np.argmax(predictions[0])}")
|
|
210
|
+
plt.show()''',
|
|
211
|
+
8:'''#K-Means Clustering
|
|
212
|
+
import pandas as pd
|
|
213
|
+
import numpy as np
|
|
214
|
+
from sklearn.cluster import KMeans
|
|
215
|
+
from sklearn.preprocessing import LabelEncoder
|
|
216
|
+
from sklearn.decomposition import PCA
|
|
217
|
+
from sklearn.metrics import accuracy_score
|
|
218
|
+
import matplotlib.pyplot as plt
|
|
219
|
+
|
|
220
|
+
# Load dataset
|
|
221
|
+
df = pd.read_csv('adult.csv')
|
|
222
|
+
|
|
223
|
+
# Clean data
|
|
224
|
+
df.replace(' ?', np.nan, inplace=True)
|
|
225
|
+
df.dropna(inplace=True)
|
|
226
|
+
|
|
227
|
+
# Encode categorical columns
|
|
228
|
+
le = LabelEncoder()
|
|
229
|
+
for col in df.select_dtypes(include=['object']).columns:
|
|
230
|
+
df[col] = le.fit_transform(df[col])
|
|
231
|
+
|
|
232
|
+
# Features & target
|
|
233
|
+
X = df.drop('income', axis=1)
|
|
234
|
+
y = df['income']
|
|
235
|
+
|
|
236
|
+
# Model
|
|
237
|
+
kmeans = KMeans(n_clusters=2, random_state=42)
|
|
238
|
+
df['predicted'] = kmeans.fit_predict(X)
|
|
239
|
+
|
|
240
|
+
# PCA for visualization
|
|
241
|
+
pca = PCA(n_components=2)
|
|
242
|
+
X_reduced = pca.fit_transform(X)
|
|
243
|
+
|
|
244
|
+
# Plot
|
|
245
|
+
plt.scatter(X_reduced[:, 0], X_reduced[:, 1], c=df['predicted'])
|
|
246
|
+
plt.title("K-Means Clustering")
|
|
247
|
+
plt.show()''',
|
|
248
|
+
9:'''#Random Forest
|
|
249
|
+
import pandas as pd
|
|
250
|
+
from sklearn.model_selection import train_test_split, GridSearchCV
|
|
251
|
+
from sklearn.feature_extraction.text import TfidfVectorizer
|
|
252
|
+
from sklearn.ensemble import RandomForestClassifier
|
|
253
|
+
from sklearn.metrics import accuracy_score, classification_report
|
|
254
|
+
|
|
255
|
+
# Load dataset
|
|
256
|
+
df = pd.read_csv('test.csv', encoding='ISO-8859-1')
|
|
257
|
+
|
|
258
|
+
# Encode sentiment
|
|
259
|
+
df['sentiment'] = df['sentiment'].apply(lambda x: 1 if x == 'positive' else 0)
|
|
260
|
+
|
|
261
|
+
X = df['text']
|
|
262
|
+
y = df['sentiment']
|
|
263
|
+
|
|
264
|
+
# Split
|
|
265
|
+
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)
|
|
266
|
+
|
|
267
|
+
# Vectorization
|
|
268
|
+
vectorizer = TfidfVectorizer(max_features=5000)
|
|
269
|
+
X_train = vectorizer.fit_transform(X_train)
|
|
270
|
+
X_test = vectorizer.transform(X_test)
|
|
271
|
+
|
|
272
|
+
# Model
|
|
273
|
+
model = RandomForestClassifier()
|
|
274
|
+
|
|
275
|
+
# Grid Search
|
|
276
|
+
params = {
|
|
277
|
+
'n_estimators': [50, 100],
|
|
278
|
+
'max_depth': [None, 10],
|
|
279
|
+
'min_samples_split': [2, 5]
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
grid = GridSearchCV(model, params, cv=3)
|
|
283
|
+
grid.fit(X_train, y_train)
|
|
284
|
+
|
|
285
|
+
# Prediction
|
|
286
|
+
y_pred = grid.predict(X_test)
|
|
287
|
+
|
|
288
|
+
print("Accuracy:", accuracy_score(y_test, y_pred))
|
|
289
|
+
print(classification_report(y_test, y_pred))
|
|
290
|
+
print("Best Params:", grid.best_params_)''',
|
|
291
|
+
10:'''#Locally Weighted Regression vs Linear Regression
|
|
292
|
+
import numpy as np
|
|
293
|
+
import pandas as pd
|
|
294
|
+
import matplotlib.pyplot as plt
|
|
295
|
+
from sklearn.linear_model import LinearRegression
|
|
296
|
+
from sklearn.metrics import mean_squared_error
|
|
297
|
+
|
|
298
|
+
# Gaussian weights
|
|
299
|
+
def gaussian_weights(X, x_query, tau):
|
|
300
|
+
return np.exp(-((X - x_query) ** 2) / (2 * tau ** 2))
|
|
301
|
+
|
|
302
|
+
# LWR
|
|
303
|
+
def lwr(X, y, x_query, tau):
|
|
304
|
+
W = np.diag(gaussian_weights(X, x_query, tau))
|
|
305
|
+
X_b = np.c_[np.ones(len(X)), X]
|
|
306
|
+
theta = np.linalg.inv(X_b.T @ W @ X_b) @ (X_b.T @ W @ y)
|
|
307
|
+
return np.array([1, x_query]) @ theta
|
|
308
|
+
|
|
309
|
+
# Load dataset
|
|
310
|
+
data = pd.read_csv('tips.csv')
|
|
311
|
+
X = data['total_bill'].values
|
|
312
|
+
y = data['tip'].values
|
|
313
|
+
|
|
314
|
+
# Predictions
|
|
315
|
+
X_test = np.linspace(min(X), max(X), 100)
|
|
316
|
+
y_lwr = np.array([lwr(X, y, x, 10) for x in X_test])
|
|
317
|
+
|
|
318
|
+
# Linear Regression
|
|
319
|
+
lr = LinearRegression()
|
|
320
|
+
lr.fit(X.reshape(-1,1), y)
|
|
321
|
+
y_lr = lr.predict(X_test.reshape(-1,1))
|
|
322
|
+
|
|
323
|
+
# Plot
|
|
324
|
+
plt.scatter(X, y)
|
|
325
|
+
plt.plot(X_test, y_lwr, color='red', label='LWR')
|
|
326
|
+
plt.plot(X_test, y_lr, linestyle='dashed', label='Linear')
|
|
327
|
+
plt.legend()
|
|
328
|
+
plt.show()''',
|
|
329
|
+
11:'''#Bayesian Network
|
|
330
|
+
import pandas as pd
|
|
331
|
+
from pgmpy.models import BayesianNetwork
|
|
332
|
+
from pgmpy.estimators import BayesianEstimator
|
|
333
|
+
from pgmpy.inference import VariableElimination
|
|
334
|
+
|
|
335
|
+
# Load dataset
|
|
336
|
+
df = pd.read_csv('heart.csv')
|
|
337
|
+
df.dropna(inplace=True)
|
|
338
|
+
|
|
339
|
+
# Model
|
|
340
|
+
model = BayesianNetwork([
|
|
341
|
+
('age', 'target'),
|
|
342
|
+
('chol', 'target')
|
|
343
|
+
])
|
|
344
|
+
|
|
345
|
+
# Train
|
|
346
|
+
model.fit(df, estimator=BayesianEstimator)
|
|
347
|
+
|
|
348
|
+
# Inference
|
|
349
|
+
infer = VariableElimination(model)
|
|
350
|
+
|
|
351
|
+
result = infer.query(variables=['target'], evidence={
|
|
352
|
+
'age': 63,
|
|
353
|
+
'chol': 233
|
|
354
|
+
})
|
|
355
|
+
|
|
356
|
+
print(result)''',
|
|
357
|
+
12:'''#Credit Card Fraud Detection
|
|
358
|
+
import pandas as pd
|
|
359
|
+
import tensorflow as tf
|
|
360
|
+
from tensorflow import keras
|
|
361
|
+
from sklearn.model_selection import train_test_split
|
|
362
|
+
from sklearn.preprocessing import StandardScaler
|
|
363
|
+
from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score
|
|
364
|
+
|
|
365
|
+
# Load dataset
|
|
366
|
+
df = pd.read_csv("credit.csv")
|
|
367
|
+
df.dropna(inplace=True)
|
|
368
|
+
|
|
369
|
+
X = df.drop("Class", axis=1)
|
|
370
|
+
y = df["Class"]
|
|
371
|
+
|
|
372
|
+
# Scale
|
|
373
|
+
scaler = StandardScaler()
|
|
374
|
+
X = scaler.fit_transform(X)
|
|
375
|
+
|
|
376
|
+
# Split
|
|
377
|
+
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)
|
|
378
|
+
|
|
379
|
+
# Model
|
|
380
|
+
model = keras.Sequential([
|
|
381
|
+
keras.layers.Dense(64, activation='relu', input_shape=(X.shape[1],)),
|
|
382
|
+
keras.layers.Dense(32, activation='relu'),
|
|
383
|
+
keras.layers.Dense(16, activation='relu'),
|
|
384
|
+
keras.layers.Dense(1, activation='sigmoid')
|
|
385
|
+
])
|
|
386
|
+
|
|
387
|
+
# Compile
|
|
388
|
+
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
|
|
389
|
+
|
|
390
|
+
# Train
|
|
391
|
+
model.fit(X_train, y_train, epochs=2, batch_size=32)
|
|
392
|
+
|
|
393
|
+
# Predict
|
|
394
|
+
y_pred = (model.predict(X_test) > 0.5).astype(int)
|
|
395
|
+
|
|
396
|
+
# Metrics
|
|
397
|
+
print("Accuracy:", accuracy_score(y_test, y_pred))
|
|
398
|
+
print("Precision:", precision_score(y_test, y_pred))
|
|
399
|
+
print("Recall:", recall_score(y_test, y_pred))
|
|
400
|
+
print("F1:", f1_score(y_test, y_pred))'''
|
|
401
|
+
}}
|
|
402
|
+
|
|
403
|
+
|
|
404
|
+
datasets = {
|
|
405
|
+
"iris":'''sepal_length,sepal_width,petal_length,petal_width,species
|
|
406
|
+
5.1,3.5,1.4,0.2,setosa
|
|
407
|
+
4.9,3.0,1.4,0.2,setosa
|
|
408
|
+
5.0,3.6,1.4,0.2,setosa
|
|
409
|
+
6.0,2.2,4.0,1.0,versicolor
|
|
410
|
+
5.5,2.3,4.0,1.3,versicolor
|
|
411
|
+
6.5,2.8,4.6,1.5,versicolor
|
|
412
|
+
6.3,3.3,6.0,2.5,virginica
|
|
413
|
+
5.8,2.7,5.1,1.9,virginica
|
|
414
|
+
7.1,3.0,5.9,2.1,virginica''',
|
|
415
|
+
"spam":'''Type Message
|
|
416
|
+
ham Hello how are you
|
|
417
|
+
spam Win money now!!!
|
|
418
|
+
ham Are you coming today?
|
|
419
|
+
spam Claim your free prize
|
|
420
|
+
ham Let's meet tomorrow
|
|
421
|
+
spam Free entry in contest''',
|
|
422
|
+
"adult":'''age,workclass,education,marital-status,occupation,race,gender,hours-per-week,income
|
|
423
|
+
39,State-gov,Bachelors,Never-married,Adm-clerical,White,Male,40,<=50K
|
|
424
|
+
50,Self-emp,HS-grad,Married,Exec-managerial,White,Male,60,>50K
|
|
425
|
+
38,Private,HS-grad,Divorced,Handlers-cleaners,White,Male,40,<=50K
|
|
426
|
+
53,Private,11th,Married,Handlers-cleaners,Black,Male,40,<=50K
|
|
427
|
+
28,Private,Bachelors,Married,Prof-specialty,Black,Female,40,>50K''',
|
|
428
|
+
"test":'''text,sentiment
|
|
429
|
+
I love this product,positive
|
|
430
|
+
This is very bad,negative
|
|
431
|
+
Amazing experience,positive
|
|
432
|
+
Worst service ever,negative
|
|
433
|
+
Very happy with this,positive
|
|
434
|
+
Not good at all,negative''',
|
|
435
|
+
"tips":'''total_bill,tip
|
|
436
|
+
10.34,1.66
|
|
437
|
+
20.45,3.50
|
|
438
|
+
15.20,2.50
|
|
439
|
+
25.00,4.00
|
|
440
|
+
30.50,5.50
|
|
441
|
+
18.75,3.00''',
|
|
442
|
+
"heart":'''age,chol,target
|
|
443
|
+
63,233,1
|
|
444
|
+
37,250,1
|
|
445
|
+
41,204,1
|
|
446
|
+
56,236,0
|
|
447
|
+
57,354,1
|
|
448
|
+
44,263,0''',
|
|
449
|
+
"credit":'''Time,V1,V2,V3,Amount,Class
|
|
450
|
+
0,-1.35,-0.07,2.53,149.62,0
|
|
451
|
+
1,1.19,0.26,0.16,2.69,0
|
|
452
|
+
2,-1.36,-1.34,1.77,378.66,0
|
|
453
|
+
3,-0.97,-0.18,1.79,123.50,1
|
|
454
|
+
4,1.23,0.14,0.05,69.99,0'''
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
def get_code(sub,no):
|
|
458
|
+
return programs[sub][no]
|
|
459
|
+
|
|
460
|
+
def get_data(title):
|
|
461
|
+
return datasets[title]
|
|
462
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
Code2Text
|
code2text-0.2.1/PKG-INFO
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "Code2Text"
|
|
3
|
+
version = "0.2.1"
|
|
4
|
+
description = "My reusable python module"
|
|
5
|
+
authors = [{name = "Unknown"}]
|
|
6
|
+
readme = "README.md"
|
|
7
|
+
requires-python = ">=3.7"
|
|
8
|
+
|
|
9
|
+
[build-system]
|
|
10
|
+
requires = ["setuptools"]
|
|
11
|
+
build-backend = "setuptools.build_meta"
|