pyerualjetwork 2.2.3__py3-none-any.whl → 2.2.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- plan_bi/__init__.py +1 -1
- plan_bi/plan_bi.py +11 -160
- plan_di/__init__.py +1 -1
- plan_di/plan_di.py +6 -118
- {pyerualjetwork-2.2.3.dist-info → pyerualjetwork-2.2.5.dist-info}/METADATA +1 -1
- pyerualjetwork-2.2.5.dist-info/RECORD +8 -0
- pyerualjetwork-2.2.3.dist-info/RECORD +0 -8
- {pyerualjetwork-2.2.3.dist-info → pyerualjetwork-2.2.5.dist-info}/WHEEL +0 -0
- {pyerualjetwork-2.2.3.dist-info → pyerualjetwork-2.2.5.dist-info}/top_level.txt +0 -0
plan_bi/__init__.py
CHANGED
@@ -2,4 +2,4 @@
|
|
2
2
|
|
3
3
|
# Bu dosya, plan modülünün ana giriş noktasıdır.
|
4
4
|
|
5
|
-
from .plan_bi import auto_balancer, normalization, Softmax, Sigmoid, Relu,
|
5
|
+
from .plan_bi import auto_balancer, normalization, Softmax, Sigmoid, Relu, weight_identification, fex, fit, evaluate, save_model, load_model, predict_model_ssd, predict_model_ram, get_weights, get_df, get_preds, get_acc, get_pot, synthetic_augmentation, standard_scaler, multiple_evaluate, encode_one_hot, split, metrics, decode_one_hot, roc_curve, confusion_matrix, plot_evaluate
|
plan_bi/plan_bi.py
CHANGED
@@ -59,7 +59,6 @@ def fit(
|
|
59
59
|
x_train_size = len(x_train[0])
|
60
60
|
|
61
61
|
W = weight_identification(len(layers) - 1,len(class_count),neurons,x_train_size)
|
62
|
-
Divides, Piece = synaptic_dividing(len(class_count),W)
|
63
62
|
trained_W = [1] * len(W)
|
64
63
|
print(Fore.GREEN + "Train Started with 0 ERROR" + Style.RESET_ALL,)
|
65
64
|
start_time = time.time()
|
@@ -72,19 +71,6 @@ def fit(
|
|
72
71
|
print(Fore.RED +"ERROR304: All input matrices or vectors in x_train list, must be same size. from: fit",infoPLAN + Style.RESET_ALL)
|
73
72
|
return 'e'
|
74
73
|
|
75
|
-
|
76
|
-
for Ulindex, Ul in enumerate(class_count):
|
77
|
-
|
78
|
-
if Ul == y_train[index]:
|
79
|
-
for Windex, w in enumerate(W):
|
80
|
-
for i, ul in enumerate(Ul):
|
81
|
-
if ul == 1.0:
|
82
|
-
k = i
|
83
|
-
|
84
|
-
cs = Divides[int(k)][Windex][0]
|
85
|
-
|
86
|
-
|
87
|
-
W[Windex] = synaptic_pruning(w, cs, 'row', int(k), len(class_count), Piece[Windex], True)
|
88
74
|
|
89
75
|
neural_layer = inp
|
90
76
|
|
@@ -94,7 +80,7 @@ def fit(
|
|
94
80
|
neural_layer = normalization(neural_layer)
|
95
81
|
|
96
82
|
if Layer == 'fex':
|
97
|
-
W[Lindex] = fex(neural_layer, W[Lindex], activation_potential,
|
83
|
+
W[Lindex] = fex(neural_layer, W[Lindex], activation_potential, True, y)
|
98
84
|
|
99
85
|
|
100
86
|
for i, w in enumerate(W):
|
@@ -173,114 +159,12 @@ def weight_identification(
|
|
173
159
|
W[len(W) - 1] = np.eye(class_count)
|
174
160
|
|
175
161
|
return W
|
176
|
-
|
177
|
-
def synaptic_pruning(
|
178
|
-
w, # list[num]: Weight matrix of the neural network.
|
179
|
-
cs, # int: cs = cut_start, Synaptic connections between neurons.
|
180
|
-
key, # int: key for identifying synaptic connections.
|
181
|
-
Class, # int: Class label for the current training instance.
|
182
|
-
class_count, # int: Total number of classes in the dataset.
|
183
|
-
piece, # int: Which set of neurons will information be transferred to?
|
184
|
-
is_training # bool: Flag indicating if the function is called during training (True or False).
|
185
|
-
|
186
|
-
) -> str:
|
187
|
-
infoPruning = """
|
188
|
-
Performs synaptic pruning in a neural network model.
|
189
|
-
|
190
|
-
Args:
|
191
|
-
w (list[num]): Weight matrix of the neural network.
|
192
|
-
cs (int): Synaptic connections between neurons.
|
193
|
-
key (str): key for identifying synaptic row or col connections.
|
194
|
-
Class (int): Class label for the current training instance.
|
195
|
-
class_count (int): Total number of classes in the dataset.
|
196
|
-
piece (int): Which set of neurons will information be transferred to?
|
197
|
-
is_training (bool): Flag indicating if the function is called during training (True or False).
|
198
|
-
|
199
|
-
Returns:
|
200
|
-
numpy array: Weight matrix.
|
201
|
-
"""
|
202
|
-
|
203
|
-
|
204
|
-
Class += 1 # because index start 0
|
205
|
-
|
206
|
-
if Class != 1:
|
207
|
-
|
208
|
-
ce = cs / Class # ce(cut_end) = cs(cut_start) / current_class
|
209
|
-
|
210
|
-
if is_training == True:
|
211
|
-
|
212
|
-
p = piece
|
213
|
-
|
214
|
-
for i in range(Class - 3):
|
215
|
-
|
216
|
-
piece+=p
|
217
|
-
|
218
|
-
if Class!= 2:
|
219
|
-
ce += piece
|
220
|
-
|
221
|
-
w[int(ce)-1::-1,:] = 0
|
222
|
-
|
223
|
-
w[cs:,:] = 0
|
224
|
-
|
225
|
-
else:
|
226
|
-
|
227
|
-
if key == 'row':
|
228
|
-
|
229
|
-
w[cs:,:] = 0
|
230
|
-
|
231
|
-
elif key == 'col':
|
232
|
-
|
233
|
-
w[:,cs] = 0
|
234
|
-
|
235
|
-
else:
|
236
|
-
print(Fore.RED + "ERROR103: synaptic_pruning func's key parameter must be 'row' or 'col' from: synaptic_pruning" + infoPruning)
|
237
|
-
return 'e'
|
238
|
-
|
239
|
-
return w
|
240
|
-
|
241
|
-
def synaptic_dividing(
|
242
|
-
class_count, # int: Total number of classes in the dataset.
|
243
|
-
W # list[num]: Weight matrix list of the neural network.
|
244
|
-
) -> str:
|
245
|
-
"""
|
246
|
-
Divides the synaptic weights of a neural network model based on class count.
|
247
|
-
|
248
|
-
Args:
|
249
|
-
class_count (int): Total number of classes in the dataset.
|
250
|
-
W (list[num]): Weight matrix of the neural network.
|
251
|
-
|
252
|
-
Returns:
|
253
|
-
list: a 3D list holds informations of divided net and list of neuron groups separated by classes.
|
254
|
-
"""
|
255
|
-
|
256
|
-
|
257
|
-
Piece = [1] * len(W)
|
258
|
-
Divides = [[[0] for _ in range(len(W))] for _ in range(class_count)]
|
259
|
-
|
260
|
-
|
261
|
-
for i in range(len(W)):
|
262
|
-
|
263
|
-
Piece[i] = int(math.floor(W[i].shape[0] / class_count))
|
264
|
-
|
265
|
-
cs = 0
|
266
|
-
# j = Classes, i = Weights, [0] = CutStart.
|
267
|
-
|
268
|
-
for i in range(len(W)):
|
269
|
-
for j in range(class_count):
|
270
|
-
cs = cs + Piece[i]
|
271
|
-
Divides[j][i][0] = cs
|
272
|
-
|
273
|
-
j = 0
|
274
|
-
cs = 0
|
275
|
-
|
276
|
-
return Divides, Piece
|
277
162
|
|
278
163
|
|
279
164
|
def fex(
|
280
165
|
Input, # list[num]: Input data.
|
281
166
|
w, # list[num]: Weight matrix of the neural network.
|
282
167
|
activation_potential, # float: Threshold value for comparison.
|
283
|
-
piece, # int: Which set of neurons will information be transferred to?
|
284
168
|
is_training, # bool: Flag indicating if the function is called during training (True or False).
|
285
169
|
Class # if is during training then which class(label) ? is isnt then put None.
|
286
170
|
) -> tuple:
|
@@ -300,55 +184,22 @@ def fex(
|
|
300
184
|
|
301
185
|
if is_training == True:
|
302
186
|
|
303
|
-
|
304
|
-
|
305
|
-
|
306
|
-
|
307
|
-
w[Class,input_features_index] = Input[input_features_index]
|
187
|
+
Input[Input < activation_potential] = 0
|
188
|
+
Input[Input > activation_potential] = 1
|
189
|
+
|
190
|
+
w[Class,:] = Input
|
308
191
|
|
309
192
|
return w
|
310
193
|
|
311
194
|
else:
|
312
195
|
|
313
|
-
|
314
|
-
|
196
|
+
Input[Input < activation_potential] = 0
|
197
|
+
Input[Input > activation_potential] = 1
|
315
198
|
|
316
199
|
neural_layer = np.dot(w, Input)
|
317
200
|
|
318
201
|
return neural_layer
|
319
|
-
|
320
|
-
def cat(
|
321
|
-
Input, # list[num]: Input data.
|
322
|
-
w, # list[num]: Weight matrix of the neural network.
|
323
|
-
is_training, # (bool): Flag indicating if the function is called during training (True or False).
|
324
|
-
Class # (int): if is during training then which class(label) ? is isnt then put None.
|
325
|
-
) -> tuple:
|
326
|
-
"""
|
327
|
-
Applies categorization process to the input data using synaptic pruning if specified.
|
328
|
-
|
329
|
-
Args:
|
330
|
-
Input (list[num]): Input data.
|
331
|
-
w (list[num]): Weight matrix of the neural network.
|
332
|
-
is_training (bool): Flag indicating if the function is called during training (True or False).
|
333
|
-
Class (int): if is during training then which class(label) ? is isnt then put None.
|
334
|
-
Returns:
|
335
|
-
tuple: A tuple containing the neural layer (vector) result and the possibly updated weight matrix.
|
336
|
-
"""
|
337
|
-
|
338
|
-
|
339
|
-
|
340
|
-
if is_training == True:
|
341
|
-
|
342
|
-
w[Class,Class] += 1
|
343
|
-
|
344
|
-
return w
|
345
|
-
|
346
|
-
else:
|
347
|
-
|
348
|
-
neural_layer = np.dot(w, Input)
|
349
202
|
|
350
|
-
return neural_layer
|
351
|
-
|
352
203
|
|
353
204
|
def normalization(
|
354
205
|
Input # num: Input data to be normalized.
|
@@ -469,7 +320,7 @@ def evaluate(
|
|
469
320
|
neural_layer = normalization(neural_layer)
|
470
321
|
|
471
322
|
if layers[index] == 'fex':
|
472
|
-
neural_layer = fex(neural_layer, W[index], activation_potential,
|
323
|
+
neural_layer = fex(neural_layer, W[index], activation_potential, False, None)
|
473
324
|
if layers[index] == 'cat':
|
474
325
|
neural_layer = np.dot(W[index], neural_layer)
|
475
326
|
|
@@ -912,7 +763,7 @@ def predict_model_ssd(Input, model_name, model_path):
|
|
912
763
|
neural_layer = normalization(neural_layer)
|
913
764
|
|
914
765
|
if layers[index] == 'fex':
|
915
|
-
neural_layer = fex(neural_layer, W[index], activation_potential,
|
766
|
+
neural_layer = fex(neural_layer, W[index], activation_potential, False, None)
|
916
767
|
if layers[index] == 'cat':
|
917
768
|
neural_layer = np.dot(W[index], neural_layer)
|
918
769
|
except:
|
@@ -956,7 +807,7 @@ def predict_model_ram(Input, activation_potential, scaler, W):
|
|
956
807
|
neural_layer = normalization(neural_layer)
|
957
808
|
|
958
809
|
if layers[index] == 'fex':
|
959
|
-
neural_layer = fex(neural_layer, W[index], activation_potential,
|
810
|
+
neural_layer = fex(neural_layer, W[index], activation_potential, False, None)
|
960
811
|
if layers[index] == 'cat':
|
961
812
|
neural_layer = np.dot(W[index], neural_layer)
|
962
813
|
|
@@ -1492,4 +1343,4 @@ def get_acc():
|
|
1492
1343
|
|
1493
1344
|
def get_pot():
|
1494
1345
|
|
1495
|
-
return 1
|
1346
|
+
return 1
|
plan_di/__init__.py
CHANGED
@@ -2,4 +2,4 @@
|
|
2
2
|
|
3
3
|
# Bu dosya, plan modülünün ana giriş noktasıdır.
|
4
4
|
|
5
|
-
from .plan_di import auto_balancer, normalization, Softmax, Sigmoid, Relu,
|
5
|
+
from .plan_di import auto_balancer, normalization, Softmax, Sigmoid, Relu, weight_identification, fex, fit, evaluate, save_model, load_model, predict_model_ssd, predict_model_ram, get_weights, get_df, get_preds, get_acc, synthetic_augmentation, standard_scaler, multiple_evaluate, encode_one_hot, split, metrics, decode_one_hot, roc_curve, confusion_matrix, plot_evaluate
|
plan_di/plan_di.py
CHANGED
@@ -1,3 +1,6 @@
|
|
1
|
+
|
2
|
+
import time
|
3
|
+
from colorama import Fore
|
1
4
|
"""
|
2
5
|
Created on Thu Jun 12 00:00:00 2024
|
3
6
|
|
@@ -59,7 +62,6 @@ def fit(
|
|
59
62
|
W = weight_identification(
|
60
63
|
len(layers) - 1, len(class_count), neurons, x_train_size)
|
61
64
|
|
62
|
-
#Divides, Piece = synaptic_dividing(len(class_count), W)
|
63
65
|
trained_W = [1] * len(W)
|
64
66
|
print(Fore.GREEN + "Train Started with 0 ERROR" + Style.RESET_ALL)
|
65
67
|
start_time = time.time()
|
@@ -72,20 +74,7 @@ def fit(
|
|
72
74
|
print(Fore.RED + "ERROR304: All input matrices or vectors in x_train list, must be same size. from: fit",
|
73
75
|
infoPLAN + Style.RESET_ALL)
|
74
76
|
return 'e'
|
75
|
-
"""
|
76
|
-
for Ulindex, Ul in enumerate(class_count):
|
77
|
-
|
78
|
-
if Ul == y_train[index]:
|
79
|
-
for Windex, w in enumerate(W):
|
80
|
-
for i, ul in enumerate(Ul):
|
81
|
-
if ul == 1.0:
|
82
|
-
k = i
|
83
77
|
|
84
|
-
cs = Divides[int(k)][Windex][0]
|
85
|
-
|
86
|
-
W[Windex] = synaptic_pruning(w, cs, 'row', int(
|
87
|
-
k), len(class_count), Piece[Windex], True)
|
88
|
-
"""
|
89
78
|
neural_layer = inp
|
90
79
|
|
91
80
|
for Lindex, Layer in enumerate(layers):
|
@@ -169,112 +158,10 @@ def weight_identification(
|
|
169
158
|
return W
|
170
159
|
|
171
160
|
|
172
|
-
def synaptic_pruning(
|
173
|
-
w, # num: Weight matrix of the neural network.
|
174
|
-
cs, # int: cs = cut_start, Synaptic connections between neurons.
|
175
|
-
key, # int: key for identifying synaptic connections.
|
176
|
-
Class, # int: Class label for the current training instance.
|
177
|
-
class_count, # int: Total number of classes in the dataset.
|
178
|
-
piece, # int: Which set of neurons will information be transferred to?
|
179
|
-
# bool: Flag indicating if the function is called during training (True or False).
|
180
|
-
is_training
|
181
|
-
|
182
|
-
) -> str:
|
183
|
-
infoPruning = """
|
184
|
-
Performs synaptic pruning in a neural network model.
|
185
|
-
|
186
|
-
Args:
|
187
|
-
w (list[num]): Weight matrix of the neural network.
|
188
|
-
cs (list[num]): Synaptic connections between neurons.
|
189
|
-
key (str): key for identifying synaptic row or col connections.
|
190
|
-
Class (int): Class label for the current training instance.
|
191
|
-
class_count (int): Total number of classes in the dataset.
|
192
|
-
piece (int): Which set of neurons will information be transferred to?
|
193
|
-
is_training (bool): Flag indicating if the function is called during training (True or False).
|
194
|
-
|
195
|
-
Returns:
|
196
|
-
numpy array: Weight matrix.
|
197
|
-
"""
|
198
|
-
|
199
|
-
Class += 1 # because index start 0
|
200
|
-
|
201
|
-
if Class != 1:
|
202
|
-
|
203
|
-
ce = cs / Class # ce(cut_end) = cs(cut_start) / current_class
|
204
|
-
|
205
|
-
if is_training == True:
|
206
|
-
|
207
|
-
p = piece
|
208
|
-
|
209
|
-
for i in range(Class - 3):
|
210
|
-
|
211
|
-
piece += p
|
212
|
-
|
213
|
-
if Class != 2:
|
214
|
-
ce += piece
|
215
|
-
|
216
|
-
w[int(ce)-1::-1, :] = 0
|
217
|
-
|
218
|
-
w[cs:, :] = 0
|
219
|
-
|
220
|
-
else:
|
221
|
-
|
222
|
-
if key == 'row':
|
223
|
-
|
224
|
-
w[cs:, :] = 0
|
225
|
-
|
226
|
-
elif key == 'col':
|
227
|
-
|
228
|
-
w[:, cs] = 0
|
229
|
-
|
230
|
-
else:
|
231
|
-
print(Fore.RED + "ERROR103: synaptic_pruning func's key parameter must be 'row' or 'col' from: synaptic_pruning" + infoPruning)
|
232
|
-
return 'e'
|
233
|
-
|
234
|
-
return w
|
235
|
-
|
236
|
-
|
237
|
-
def synaptic_dividing(
|
238
|
-
class_count, # int: Total number of classes in the dataset.
|
239
|
-
W # list[num]: Weight matrix of the neural network.
|
240
|
-
) -> str:
|
241
|
-
"""
|
242
|
-
Divides the synaptic weights of a neural network model based on class count.
|
243
|
-
|
244
|
-
Args:
|
245
|
-
class_count (int): Total number of classes in the dataset.
|
246
|
-
W (list[num]): Weight matrix of the neural network.
|
247
|
-
|
248
|
-
Returns:
|
249
|
-
list: a 3D list holds informations of divided net and list of neuron groups separated by classes.
|
250
|
-
"""
|
251
|
-
|
252
|
-
Piece = [1] * len(W)
|
253
|
-
|
254
|
-
Divides = [[[0] for _ in range(len(W))] for _ in range(class_count)]
|
255
|
-
|
256
|
-
for i in range(len(W)):
|
257
|
-
|
258
|
-
Piece[i] = int(math.floor(W[i].shape[0] / class_count))
|
259
|
-
|
260
|
-
cs = 0
|
261
|
-
|
262
|
-
for i in range(len(W)):
|
263
|
-
for j in range(class_count):
|
264
|
-
cs = cs + Piece[i]
|
265
|
-
Divides[j][i][0] = cs
|
266
|
-
|
267
|
-
j = 0
|
268
|
-
cs = 0
|
269
|
-
|
270
|
-
return Divides, Piece
|
271
|
-
|
272
|
-
|
273
161
|
def fex(
|
274
162
|
Input, # list[num]: Input data.
|
275
163
|
w, # num: Weight matrix of the neural network.
|
276
|
-
# bool: Flag indicating if the function is called during training (True or False).
|
277
|
-
is_training,
|
164
|
+
is_training, # bool: Flag indicating if the function is called during training (True or False).
|
278
165
|
Class # int: Which class is, if training.
|
279
166
|
) -> tuple:
|
280
167
|
"""
|
@@ -1428,4 +1315,5 @@ def get_preds():
|
|
1428
1315
|
|
1429
1316
|
def get_acc():
|
1430
1317
|
|
1431
|
-
return 2
|
1318
|
+
return 2
|
1319
|
+
|
@@ -0,0 +1,8 @@
|
|
1
|
+
plan_bi/__init__.py,sha256=rzDe7yWvNlwDVE6xSw8Qk51itcxT6EDBj7iiOeycxMw,475
|
2
|
+
plan_bi/plan_bi.py,sha256=oUEZIGG8Y1nTNED33kAy2RrlW5LQIFPnwxwM8_O3QOQ,45419
|
3
|
+
plan_di/__init__.py,sha256=Omxc07PXPQZOrXBD3PJQT6sPdni6NMykyiQgKVL_IZ0,466
|
4
|
+
plan_di/plan_di.py,sha256=HQqc_jheT9x8NkJ9_Fvqa8EN4ehkExVy3RqvqAwPwgU,42874
|
5
|
+
pyerualjetwork-2.2.5.dist-info/METADATA,sha256=X7c4sZ142CbFtk_c7juhftpO1yjzwHCa9Jt4PiQuwmQ,325
|
6
|
+
pyerualjetwork-2.2.5.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
|
7
|
+
pyerualjetwork-2.2.5.dist-info/top_level.txt,sha256=aaXSOcnD62fbXG1x7tw4nV50Qxx9g9zDNLK7OD4BdPE,16
|
8
|
+
pyerualjetwork-2.2.5.dist-info/RECORD,,
|
@@ -1,8 +0,0 @@
|
|
1
|
-
plan_bi/__init__.py,sha256=82q8bWRYqzwMrFuViQzBg7P19i6EqdV7VYBVxuQ-LV0,517
|
2
|
-
plan_bi/plan_bi.py,sha256=fNJK07y9JDVLU0GORD_RHKBjTUnVzb5ciYcQFpyiSvc,50490
|
3
|
-
plan_di/__init__.py,sha256=5BCC6Wut4J5lTadds5q3P10p01FKPD4pEeNwzkpL6qo,503
|
4
|
-
plan_di/plan_di.py,sha256=mzTfunaEqbgQtBOVtd9tCRXT5ujupHwhilwWuk6fVGg,46270
|
5
|
-
pyerualjetwork-2.2.3.dist-info/METADATA,sha256=iltDA-AwbGfBBSYtL3Tpxw8YMxUTHlzibrvPxZ0xPLA,325
|
6
|
-
pyerualjetwork-2.2.3.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
|
7
|
-
pyerualjetwork-2.2.3.dist-info/top_level.txt,sha256=aaXSOcnD62fbXG1x7tw4nV50Qxx9g9zDNLK7OD4BdPE,16
|
8
|
-
pyerualjetwork-2.2.3.dist-info/RECORD,,
|
File without changes
|
File without changes
|