pyerualjetwork 2.0.8__py3-none-any.whl → 2.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- plan_bi/plan_bi.py +58 -86
- plan_di/plan_di.py +49 -70
- pyerualjetwork-2.1.0.dist-info/METADATA +8 -0
- pyerualjetwork-2.1.0.dist-info/RECORD +8 -0
- pyerualjetwork-2.0.8.dist-info/METADATA +0 -8
- pyerualjetwork-2.0.8.dist-info/RECORD +0 -8
- {pyerualjetwork-2.0.8.dist-info → pyerualjetwork-2.1.0.dist-info}/WHEEL +0 -0
- {pyerualjetwork-2.0.8.dist-info → pyerualjetwork-2.1.0.dist-info}/top_level.txt +0 -0
plan_bi/plan_bi.py
CHANGED
@@ -192,12 +192,12 @@ def weight_identification(
|
|
192
192
|
return W
|
193
193
|
|
194
194
|
def synaptic_pruning(
|
195
|
-
w, # list[
|
196
|
-
cs, #
|
195
|
+
w, # list[num]: Weight matrix of the neural network.
|
196
|
+
cs, # int: cs = cut_start, Synaptic connections between neurons.
|
197
197
|
key, # int: key for identifying synaptic connections.
|
198
198
|
Class, # int: Class label for the current training instance.
|
199
199
|
class_count, # int: Total number of classes in the dataset.
|
200
|
-
piece, #
|
200
|
+
piece, # int: Which set of neurons will information be transferred to?
|
201
201
|
is_training # int: 1 or 0
|
202
202
|
|
203
203
|
) -> str:
|
@@ -205,8 +205,8 @@ def synaptic_pruning(
|
|
205
205
|
Performs synaptic pruning in a neural network model.
|
206
206
|
|
207
207
|
Args:
|
208
|
-
w (list[
|
209
|
-
cs (
|
208
|
+
w (list[num]): Weight matrix of the neural network.
|
209
|
+
cs (int): Synaptic connections between neurons.
|
210
210
|
key (str): key for identifying synaptic row or col connections.
|
211
211
|
Class (int): Class label for the current training instance.
|
212
212
|
class_count (int): Total number of classes in the dataset.
|
@@ -219,10 +219,8 @@ def synaptic_pruning(
|
|
219
219
|
Class += 1 # because index start 0
|
220
220
|
|
221
221
|
if Class != 1:
|
222
|
-
|
223
|
-
|
224
|
-
|
225
|
-
ce = cs / Class
|
222
|
+
|
223
|
+
ce = cs / Class # ce(cut_end) = cs(cut_start) / current_class
|
226
224
|
|
227
225
|
if is_training == 1:
|
228
226
|
|
@@ -237,66 +235,46 @@ def synaptic_pruning(
|
|
237
235
|
|
238
236
|
w[int(ce)-1::-1,:] = 0
|
239
237
|
|
240
|
-
|
241
238
|
w[cs:,:] = 0
|
242
239
|
|
243
240
|
else:
|
244
241
|
|
245
|
-
if
|
246
|
-
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
|
251
|
-
|
252
|
-
w[:,cs] = 0
|
242
|
+
if key == 'row':
|
243
|
+
|
244
|
+
w[cs:,:] = 0
|
245
|
+
|
246
|
+
elif key == 'col':
|
247
|
+
|
248
|
+
w[:,cs] = 0
|
253
249
|
|
254
|
-
else:
|
255
|
-
print(Fore.RED + "ERROR103: synaptic_pruning func's key parameter must be 'row' or 'col' from: synaptic_pruning" + infoPruning)
|
256
|
-
return 'e'
|
257
250
|
else:
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
ce = int(round(w.shape[0] - cs / class_count))
|
263
|
-
w[ce-1::-1,:] = 0
|
264
|
-
|
265
|
-
elif key == 'col':
|
266
|
-
|
267
|
-
w[:,cs] = 0
|
268
|
-
|
269
|
-
else:
|
270
|
-
print(Fore.RED + "ERROR103: synaptic_pruning func's key parameter must be 'row' or 'col' from: synaptic_pruning" + infoPruning + Style.RESET_ALL)
|
271
|
-
return 'e'
|
251
|
+
print(Fore.RED + "ERROR103: synaptic_pruning func's key parameter must be 'row' or 'col' from: synaptic_pruning" + infoPruning)
|
252
|
+
return 'e'
|
253
|
+
|
272
254
|
return w
|
273
255
|
|
274
256
|
def synaptic_dividing(
|
275
257
|
class_count, # int: Total number of classes in the dataset.
|
276
|
-
W # list[
|
258
|
+
W # list[num]: Weight matrix list of the neural network.
|
277
259
|
) -> str:
|
278
260
|
"""
|
279
261
|
Divides the synaptic weights of a neural network model based on class count.
|
280
262
|
|
281
263
|
Args:
|
282
264
|
class_count (int): Total number of classes in the dataset.
|
283
|
-
W (list[
|
265
|
+
W (list[num]): Weight matrix of the neural network.
|
284
266
|
|
285
267
|
Returns:
|
286
|
-
list: a 3D list holds informations of divided net.
|
268
|
+
list: a 3D list holds informations of divided net and list of neuron groups separated by classes.
|
287
269
|
"""
|
288
270
|
|
289
271
|
|
290
272
|
Piece = [1] * len(W)
|
291
|
-
#print('Piece:' + Piece)
|
292
|
-
#input()
|
293
|
-
# Boş bir üç boyutlu liste oluşturma
|
294
273
|
Divides = [[[0] for _ in range(len(W))] for _ in range(class_count)]
|
295
274
|
|
296
275
|
|
297
276
|
for i in range(len(W)):
|
298
277
|
|
299
|
-
|
300
278
|
Piece[i] = int(math.floor(W[i].shape[0] / class_count))
|
301
279
|
|
302
280
|
cs = 0
|
@@ -306,9 +284,6 @@ def synaptic_dividing(
|
|
306
284
|
for j in range(class_count):
|
307
285
|
cs = cs + Piece[i]
|
308
286
|
Divides[j][i][0] = cs
|
309
|
-
#pruning_param[i] = cs
|
310
|
-
#print('Divides: ' + j + i + ' = ' + Divides[j][i][0])
|
311
|
-
#input()
|
312
287
|
|
313
288
|
j = 0
|
314
289
|
cs = 0
|
@@ -318,20 +293,19 @@ def synaptic_dividing(
|
|
318
293
|
|
319
294
|
def fex(
|
320
295
|
Input, # list[num]: Input data.
|
321
|
-
w, # list[
|
322
|
-
activation_potential, #
|
323
|
-
piece, #
|
324
|
-
is_training #
|
296
|
+
w, # list[num]: Weight matrix of the neural network.,
|
297
|
+
activation_potential, # float: Threshold value for comparison.
|
298
|
+
piece, # int: Which set of neurons will information be transferred to?
|
299
|
+
is_training # int: 1 or 0
|
325
300
|
) -> tuple:
|
326
301
|
"""
|
327
302
|
Applies feature extraction process to the input data using synaptic pruning.
|
328
303
|
|
329
304
|
Args:
|
330
305
|
Input (list[num]): Input data.
|
331
|
-
w (list[
|
332
|
-
|
333
|
-
|
334
|
-
|
306
|
+
w (list[num]): Weight matrix of the neural network.
|
307
|
+
activation_potential (float): Threshold value for comparison.
|
308
|
+
piece (int): Which set of neurons will information be transferred to?
|
335
309
|
Returns:
|
336
310
|
tuple: A tuple (vector) containing the neural layer result and the updated weight matrix.
|
337
311
|
"""
|
@@ -346,30 +320,29 @@ def fex(
|
|
346
320
|
|
347
321
|
def cat(
|
348
322
|
Input, # list[num]: Input data.
|
349
|
-
w, # list[
|
350
|
-
activation_potential, #
|
351
|
-
|
352
|
-
piece # int
|
323
|
+
w, # list[num]: Weight matrix of the neural network.
|
324
|
+
activation_potential, # (float): Threshold value for comparison.
|
325
|
+
is_training, # (int): Flag indicating if the function is called during training (1 for training, 0 otherwise).
|
326
|
+
piece # (int) Which set of neurons will information be transferred to?
|
353
327
|
) -> tuple:
|
354
328
|
"""
|
355
329
|
Applies categorization process to the input data using synaptic pruning if specified.
|
356
330
|
|
357
331
|
Args:
|
358
332
|
Input (list[num]): Input data.
|
359
|
-
w (list[
|
360
|
-
|
361
|
-
|
362
|
-
|
363
|
-
|
333
|
+
w (list[num]): Weight matrix of the neural network.
|
334
|
+
activation_potential (float): Threshold value for comparison.
|
335
|
+
is_training (int): Flag indicating if the function is called during training (1 for training, 0 otherwise).
|
336
|
+
piece (int): Which set of neurons will information be transferred to?
|
364
337
|
Returns:
|
365
338
|
tuple: A tuple containing the neural layer (vector) result and the possibly updated weight matrix.
|
366
339
|
"""
|
367
340
|
|
368
341
|
PruneIndex = np.where(Input == 0)
|
369
342
|
|
370
|
-
if
|
343
|
+
if is_training == 1:
|
371
344
|
|
372
|
-
w = synaptic_pruning(w, PruneIndex, 'col', 0, 0, piece,
|
345
|
+
w = synaptic_pruning(w, PruneIndex, 'col', 0, 0, piece, is_training)
|
373
346
|
|
374
347
|
|
375
348
|
neural_layer = np.dot(w, Input)
|
@@ -378,16 +351,16 @@ def cat(
|
|
378
351
|
|
379
352
|
|
380
353
|
def normalization(
|
381
|
-
Input #
|
354
|
+
Input # num: Input data to be normalized.
|
382
355
|
):
|
383
356
|
"""
|
384
357
|
Normalizes the input data using maximum absolute scaling.
|
385
358
|
|
386
359
|
Args:
|
387
|
-
Input
|
360
|
+
Input num: Input data to be normalized.
|
388
361
|
|
389
362
|
Returns:
|
390
|
-
|
363
|
+
num: Scaled input data after normalization.
|
391
364
|
"""
|
392
365
|
|
393
366
|
|
@@ -401,16 +374,16 @@ def normalization(
|
|
401
374
|
|
402
375
|
|
403
376
|
def Softmax(
|
404
|
-
x #
|
377
|
+
x # num: Input data to be transformed using softmax function.
|
405
378
|
):
|
406
379
|
"""
|
407
380
|
Applies the softmax function to the input data.
|
408
381
|
|
409
382
|
Args:
|
410
|
-
|
383
|
+
num: Input data to be transformed using softmax function.
|
411
384
|
|
412
385
|
Returns:
|
413
|
-
|
386
|
+
num: Transformed data after applying softmax function.
|
414
387
|
"""
|
415
388
|
|
416
389
|
return softmax(x)
|
@@ -423,10 +396,10 @@ def Sigmoid(
|
|
423
396
|
Applies the sigmoid function to the input data.
|
424
397
|
|
425
398
|
Args:
|
426
|
-
|
399
|
+
num: Input data to be transformed using sigmoid function.
|
427
400
|
|
428
401
|
Returns:
|
429
|
-
|
402
|
+
num: Transformed data after applying sigmoid function.
|
430
403
|
"""
|
431
404
|
return expit(x)
|
432
405
|
|
@@ -438,10 +411,10 @@ def Relu(
|
|
438
411
|
Applies the Rectified Linear Unit (ReLU) function to the input data.
|
439
412
|
|
440
413
|
Args:
|
441
|
-
|
414
|
+
num: Input data to be transformed using ReLU function.
|
442
415
|
|
443
416
|
Returns:
|
444
|
-
|
417
|
+
num: Transformed data after applying ReLU function.
|
445
418
|
"""
|
446
419
|
|
447
420
|
|
@@ -451,21 +424,21 @@ def Relu(
|
|
451
424
|
|
452
425
|
|
453
426
|
def evaluate(
|
454
|
-
x_test, # list[
|
427
|
+
x_test, # list[num]: Test input data.
|
455
428
|
y_test, # list[num]: Test labels.
|
456
|
-
activation_potential, #
|
457
|
-
visualize, # visualize Testing procces or not visualize ('y' or 'n')
|
458
|
-
W # list[
|
429
|
+
activation_potential, # float: Threshold value for comparison.
|
430
|
+
visualize, # str: visualize Testing procces or not visualize ('y' or 'n')
|
431
|
+
W # list[num]: Weight matrix of the neural network.
|
459
432
|
) -> tuple:
|
460
433
|
infoTestModel = """
|
461
434
|
Tests the neural network model with the given test data.
|
462
435
|
|
463
436
|
Args:
|
464
|
-
x_test (list[
|
437
|
+
x_test (list[num]): Test input data.
|
465
438
|
y_test (list[num]): Test labels.
|
466
439
|
activation_potential (float): Input activation potential
|
467
440
|
visualize (str): Visualize test progress ? ('y' or 'n')
|
468
|
-
W (list[
|
441
|
+
W (list[num]): Weight matrix list of the neural network.
|
469
442
|
|
470
443
|
Returns:
|
471
444
|
tuple: A tuple containing the predicted labels and the accuracy of the model.
|
@@ -475,7 +448,7 @@ def evaluate(
|
|
475
448
|
|
476
449
|
|
477
450
|
try:
|
478
|
-
Wc = [0] * len(W)
|
451
|
+
Wc = [0] * len(W) # Wc = weight copy
|
479
452
|
true = 0
|
480
453
|
TestPredictions = [None] * len(y_test)
|
481
454
|
for i, w in enumerate(W):
|
@@ -602,7 +575,7 @@ def save_model(model_name,
|
|
602
575
|
weights_type (str): Type of weights to save (options: 'txt', 'npy', 'mat').
|
603
576
|
WeightFormat (str): Format of the weights (options: 'd', 'f', 'raw').
|
604
577
|
model_path (str): Path where the model will be saved. For example: C:/Users/beydili/Desktop/denemePLAN/
|
605
|
-
W: Weights of the model.
|
578
|
+
W: Weights list of the model.
|
606
579
|
|
607
580
|
Returns:
|
608
581
|
str: Message indicating if the model was saved successfully or encountered an error.
|
@@ -743,10 +716,9 @@ def load_model(model_name,
|
|
743
716
|
Arguments:
|
744
717
|
model_name (str): Name of the model.
|
745
718
|
model_path (str): Path where the model is saved.
|
746
|
-
log_type (str): Type of log to load (options: 'csv', 'txt', 'hdf5').
|
747
719
|
|
748
720
|
Returns:
|
749
|
-
lists: W(list[num]), activation_potential,
|
721
|
+
lists: W(list[num]), activation_potential, DataFrame of the model
|
750
722
|
"""
|
751
723
|
pass
|
752
724
|
|
@@ -797,7 +769,7 @@ def predict_model_ssd(Input,model_name,model_path):
|
|
797
769
|
Function to make a prediction using a divided pruning learning artificial neural network (PLAN).
|
798
770
|
|
799
771
|
Arguments:
|
800
|
-
Input (
|
772
|
+
Input (num): Input data for the model (single vector or single matrix).
|
801
773
|
model_name (str): Name of the model.
|
802
774
|
model_path (str): Path where the model is saved.
|
803
775
|
Returns:
|
@@ -1001,4 +973,4 @@ def get_acc():
|
|
1001
973
|
|
1002
974
|
def get_pot():
|
1003
975
|
|
1004
|
-
return 1
|
976
|
+
return 1
|
plan_di/plan_di.py
CHANGED
@@ -174,7 +174,7 @@ def weight_identification(
|
|
174
174
|
x_train_size (int): Size of the input data.
|
175
175
|
|
176
176
|
Returns:
|
177
|
-
list([numpy_arrays],[...]):
|
177
|
+
list([numpy_arrays],[...]): pretrained weight matices of the model. .
|
178
178
|
"""
|
179
179
|
|
180
180
|
|
@@ -188,12 +188,12 @@ def weight_identification(
|
|
188
188
|
return W
|
189
189
|
|
190
190
|
def synaptic_pruning(
|
191
|
-
w, #
|
192
|
-
cs, #
|
191
|
+
w, # num: Weight matrix of the neural network.
|
192
|
+
cs, # int: cs = cut_start, Synaptic connections between neurons.
|
193
193
|
key, # int: key for identifying synaptic connections.
|
194
194
|
Class, # int: Class label for the current training instance.
|
195
195
|
class_count, # int: Total number of classes in the dataset.
|
196
|
-
piece, #
|
196
|
+
piece, # int: Which set of neurons will information be transferred to?
|
197
197
|
is_training # int: 1 or 0
|
198
198
|
|
199
199
|
) -> str:
|
@@ -201,11 +201,13 @@ def synaptic_pruning(
|
|
201
201
|
Performs synaptic pruning in a neural network model.
|
202
202
|
|
203
203
|
Args:
|
204
|
-
w (list[
|
205
|
-
cs (list[
|
204
|
+
w (list[num]): Weight matrix of the neural network.
|
205
|
+
cs (list[num]): Synaptic connections between neurons.
|
206
206
|
key (str): key for identifying synaptic row or col connections.
|
207
207
|
Class (int): Class label for the current training instance.
|
208
208
|
class_count (int): Total number of classes in the dataset.
|
209
|
+
piece (int): Which set of neurons will information be transferred to?
|
210
|
+
is_training (int): 1 or 0
|
209
211
|
|
210
212
|
Returns:
|
211
213
|
numpy array: Weight matrix.
|
@@ -218,7 +220,7 @@ def synaptic_pruning(
|
|
218
220
|
|
219
221
|
|
220
222
|
|
221
|
-
ce = cs / Class
|
223
|
+
ce = cs / Class # ce(cut_end) = cs(cut_start) / current_class
|
222
224
|
|
223
225
|
if is_training == 1:
|
224
226
|
|
@@ -238,7 +240,6 @@ def synaptic_pruning(
|
|
238
240
|
|
239
241
|
else:
|
240
242
|
|
241
|
-
if Class == 1:
|
242
243
|
if key == 'row':
|
243
244
|
|
244
245
|
w[cs:,:] = 0
|
@@ -250,43 +251,27 @@ def synaptic_pruning(
|
|
250
251
|
else:
|
251
252
|
print(Fore.RED + "ERROR103: synaptic_pruning func's key parameter must be 'row' or 'col' from: synaptic_pruning" + infoPruning)
|
252
253
|
return 'e'
|
253
|
-
|
254
|
-
if key == 'row':
|
255
|
-
|
256
|
-
w[cs:,:] = 0
|
257
|
-
|
258
|
-
ce = int(round(w.shape[0] - cs / class_count))
|
259
|
-
w[ce-1::-1,:] = 0
|
260
|
-
|
261
|
-
elif key == 'col':
|
262
|
-
|
263
|
-
w[:,cs] = 0
|
264
|
-
|
265
|
-
else:
|
266
|
-
print(Fore.RED + "ERROR103: synaptic_pruning func's key parameter must be 'row' or 'col' from: synaptic_pruning" + infoPruning + Style.RESET_ALL)
|
267
|
-
return 'e'
|
254
|
+
|
268
255
|
return w
|
269
256
|
|
270
257
|
def synaptic_dividing(
|
271
258
|
class_count, # int: Total number of classes in the dataset.
|
272
|
-
W # list[
|
259
|
+
W # list[num]: Weight matrix of the neural network.
|
273
260
|
) -> str:
|
274
261
|
"""
|
275
262
|
Divides the synaptic weights of a neural network model based on class count.
|
276
263
|
|
277
264
|
Args:
|
278
265
|
class_count (int): Total number of classes in the dataset.
|
279
|
-
W (list[
|
266
|
+
W (list[num]): Weight matrix of the neural network.
|
280
267
|
|
281
268
|
Returns:
|
282
|
-
list: a 3D list holds informations of divided net.
|
269
|
+
list: a 3D list holds informations of divided net and list of neuron groups separated by classes.
|
283
270
|
"""
|
284
271
|
|
285
272
|
|
286
273
|
Piece = [1] * len(W)
|
287
|
-
|
288
|
-
#input()
|
289
|
-
# Boş bir üç boyutlu liste oluşturma
|
274
|
+
|
290
275
|
Divides = [[[0] for _ in range(len(W))] for _ in range(class_count)]
|
291
276
|
|
292
277
|
|
@@ -296,15 +281,11 @@ def synaptic_dividing(
|
|
296
281
|
Piece[i] = int(math.floor(W[i].shape[0] / class_count))
|
297
282
|
|
298
283
|
cs = 0
|
299
|
-
# j = Classes, i = Weights, [0] = CutStart.
|
300
284
|
|
301
285
|
for i in range(len(W)):
|
302
286
|
for j in range(class_count):
|
303
287
|
cs = cs + Piece[i]
|
304
288
|
Divides[j][i][0] = cs
|
305
|
-
#pruning_param[i] = cs
|
306
|
-
#print('Divides: ' + j + i + ' = ' + Divides[j][i][0])
|
307
|
-
#input()
|
308
289
|
|
309
290
|
j = 0
|
310
291
|
cs = 0
|
@@ -314,18 +295,18 @@ def synaptic_dividing(
|
|
314
295
|
|
315
296
|
def fex(
|
316
297
|
Input, # list[num]: Input data.
|
317
|
-
w, #
|
318
|
-
Class, # Which class is, if training.
|
319
|
-
is_training #
|
298
|
+
w, # num: Weight matrix of the neural network.,
|
299
|
+
Class, # int: Which class is, if training.
|
300
|
+
is_training # int: 1 or 0
|
320
301
|
) -> tuple:
|
321
302
|
"""
|
322
303
|
Applies feature extraction process to the input data using synaptic pruning.
|
323
304
|
|
324
305
|
Args:
|
325
|
-
Input (
|
326
|
-
w (
|
327
|
-
|
328
|
-
|
306
|
+
Input (num): Input data.
|
307
|
+
w (num): Weight matrix of the neural network.
|
308
|
+
Class (int): Which class is, if training.
|
309
|
+
is_training (int): Flag indicating if the function is called during training (1 for training, 0 otherwise).
|
329
310
|
|
330
311
|
Returns:
|
331
312
|
tuple: A tuple (vector) containing the neural layer result and the updated weight matrix.
|
@@ -341,29 +322,28 @@ def fex(
|
|
341
322
|
|
342
323
|
def cat(
|
343
324
|
Input, # list[num]: Input data.
|
344
|
-
w, # list[
|
345
|
-
|
346
|
-
piece # int
|
325
|
+
w, # list[num]: Weight matrix of the neural network.
|
326
|
+
is_training, # int: Flag indicating if the function is called during training (1 for training, 0 otherwise).
|
327
|
+
piece # int Which set of neurons will information be transferred to?
|
347
328
|
) -> tuple:
|
348
329
|
"""
|
349
330
|
Applies categorization process to the input data using synaptic pruning if specified.
|
350
331
|
|
351
332
|
Args:
|
352
333
|
Input (list[num]): Input data.
|
353
|
-
w (
|
354
|
-
|
355
|
-
|
356
|
-
|
357
|
-
|
334
|
+
w (num): Weight matrix of the neural network.
|
335
|
+
is_training (int): Flag indicating if the function is called during training (1 for training, 0 otherwise).
|
336
|
+
piece (int): Which set of neurons will information be transferred to?
|
337
|
+
) -> tuple:
|
358
338
|
Returns:
|
359
339
|
tuple: A tuple containing the neural layer (vector) result and the possibly updated weight matrix.
|
360
340
|
"""
|
361
341
|
|
362
342
|
PruneIndex = np.where(Input == 0)
|
363
343
|
|
364
|
-
if
|
344
|
+
if is_training == 1:
|
365
345
|
|
366
|
-
w = synaptic_pruning(w, PruneIndex, 'col', 0, 0, piece,
|
346
|
+
w = synaptic_pruning(w, PruneIndex, 'col', 0, 0, piece, is_training)
|
367
347
|
|
368
348
|
|
369
349
|
neural_layer = np.dot(w, Input)
|
@@ -372,16 +352,16 @@ def cat(
|
|
372
352
|
|
373
353
|
|
374
354
|
def normalization(
|
375
|
-
Input #
|
355
|
+
Input # num: Input data to be normalized.
|
376
356
|
):
|
377
357
|
"""
|
378
358
|
Normalizes the input data using maximum absolute scaling.
|
379
359
|
|
380
360
|
Args:
|
381
|
-
Input (
|
361
|
+
Input (num): Input data to be normalized.
|
382
362
|
|
383
363
|
Returns:
|
384
|
-
|
364
|
+
(num) Scaled input data after normalization.
|
385
365
|
"""
|
386
366
|
|
387
367
|
|
@@ -395,47 +375,47 @@ def normalization(
|
|
395
375
|
|
396
376
|
|
397
377
|
def Softmax(
|
398
|
-
x #
|
378
|
+
x # num: Input data to be transformed using softmax function.
|
399
379
|
):
|
400
380
|
"""
|
401
381
|
Applies the softmax function to the input data.
|
402
382
|
|
403
383
|
Args:
|
404
|
-
|
384
|
+
(num): Input data to be transformed using softmax function.
|
405
385
|
|
406
386
|
Returns:
|
407
|
-
|
387
|
+
(num): Transformed data after applying softmax function.
|
408
388
|
"""
|
409
389
|
|
410
390
|
return softmax(x)
|
411
391
|
|
412
392
|
|
413
393
|
def Sigmoid(
|
414
|
-
x #
|
394
|
+
x # num: Input data to be transformed using sigmoid function.
|
415
395
|
):
|
416
396
|
"""
|
417
397
|
Applies the sigmoid function to the input data.
|
418
398
|
|
419
399
|
Args:
|
420
|
-
|
400
|
+
(num): Input data to be transformed using sigmoid function.
|
421
401
|
|
422
402
|
Returns:
|
423
|
-
|
403
|
+
(num): Transformed data after applying sigmoid function.
|
424
404
|
"""
|
425
405
|
return expit(x)
|
426
406
|
|
427
407
|
|
428
408
|
def Relu(
|
429
|
-
x #
|
409
|
+
x # num: Input data to be transformed using ReLU function.
|
430
410
|
):
|
431
411
|
"""
|
432
412
|
Applies the Rectified Linear Unit (ReLU) function to the input data.
|
433
413
|
|
434
414
|
Args:
|
435
|
-
|
415
|
+
(num): Input data to be transformed using ReLU function.
|
436
416
|
|
437
417
|
Returns:
|
438
|
-
|
418
|
+
(num): Transformed data after applying ReLU function.
|
439
419
|
"""
|
440
420
|
|
441
421
|
|
@@ -445,20 +425,20 @@ def Relu(
|
|
445
425
|
|
446
426
|
|
447
427
|
def evaluate(
|
448
|
-
x_test, # list[
|
428
|
+
x_test, # list[num]: Test input data.
|
449
429
|
y_test, # list[num]: Test labels.
|
450
|
-
visualize, # visualize Testing procces or not visualize ('y' or 'n')
|
451
|
-
W # list[
|
430
|
+
visualize, # str: visualize Testing procces or not visualize ('y' or 'n')
|
431
|
+
W # list[num]: Weight matrix list of the neural network.
|
452
432
|
) -> tuple:
|
453
433
|
infoTestModel = """
|
454
434
|
Tests the neural network model with the given test data.
|
455
435
|
|
456
436
|
Args:
|
457
|
-
x_test (list[
|
437
|
+
x_test (list[num]): Test input data.
|
458
438
|
y_test (list[num]): Test labels.
|
459
439
|
activation_potential (float): Input activation potential
|
460
440
|
visualize (str): Visualize test progress ? ('y' or 'n')
|
461
|
-
W (list[
|
441
|
+
W (list[num]): Weight matrix list of the neural network.
|
462
442
|
|
463
443
|
Returns:
|
464
444
|
tuple: A tuple containing the predicted labels and the accuracy of the model.
|
@@ -468,7 +448,7 @@ def evaluate(
|
|
468
448
|
|
469
449
|
|
470
450
|
try:
|
471
|
-
Wc = [0] * len(W)
|
451
|
+
Wc = [0] * len(W) # Wc = Weight copy
|
472
452
|
true = 0
|
473
453
|
TestPredictions = [None] * len(y_test)
|
474
454
|
for i, w in enumerate(W):
|
@@ -734,10 +714,9 @@ def load_model(model_name,
|
|
734
714
|
Arguments:
|
735
715
|
model_name (str): Name of the model.
|
736
716
|
model_path (str): Path where the model is saved.
|
737
|
-
log_type (str): Type of log to load (options: 'csv', 'txt', 'hdf5').
|
738
717
|
|
739
718
|
Returns:
|
740
|
-
lists: W(list[num]), activation_potential,
|
719
|
+
lists: W(list[num]), activation_potential, DataFrame of the model
|
741
720
|
"""
|
742
721
|
pass
|
743
722
|
|
@@ -0,0 +1,8 @@
|
|
1
|
+
Metadata-Version: 2.1
|
2
|
+
Name: pyerualjetwork
|
3
|
+
Version: 2.1.0
|
4
|
+
Summary: Code improvements (Documentation in desc. Examples in GİTHUB: https://github.com/HCB06/PyerualJetwork)
|
5
|
+
Author: Hasan Can Beydili
|
6
|
+
Author-email: tchasancan@gmail.com
|
7
|
+
Keywords: model evaluation,classifcation,pruning learning artficial neural networks
|
8
|
+
|
@@ -0,0 +1,8 @@
|
|
1
|
+
plan_bi/__init__.py,sha256=itKrgkPwaA0VKwo2etQkiXv6m4pUwV7N06tRxZOVhuU,397
|
2
|
+
plan_bi/plan_bi.py,sha256=ZbSUejhr_onrLhQXTvkY2Y2vLEaobaAqLTwkVPpwO58,33183
|
3
|
+
plan_di/__init__.py,sha256=F4PPBzkY-_HXDjzA0Xyx2cMlcHtNBs7OHb-AxStZjP4,397
|
4
|
+
plan_di/plan_di.py,sha256=06XsMuNUeDV7HDYni4-wk-B4MzRt8cW90kx-QyhR-Us,32205
|
5
|
+
pyerualjetwork-2.1.0.dist-info/METADATA,sha256=loYTjaQ1BtdWwmo2bKxG-f_6oTCS_LEQcDeyhsiwf2A,325
|
6
|
+
pyerualjetwork-2.1.0.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
|
7
|
+
pyerualjetwork-2.1.0.dist-info/top_level.txt,sha256=aaXSOcnD62fbXG1x7tw4nV50Qxx9g9zDNLK7OD4BdPE,16
|
8
|
+
pyerualjetwork-2.1.0.dist-info/RECORD,,
|
@@ -1,8 +0,0 @@
|
|
1
|
-
Metadata-Version: 2.1
|
2
|
-
Name: pyerualjetwork
|
3
|
-
Version: 2.0.8
|
4
|
-
Summary: Advanced python deep learning library. New features: BINARY INJECTION (OLD) NOW ADDED NEW DIRECT FEATURE INJECTION. AND 'standard_scaler' func. Important Note: If there are any data smaller than 0 among the input data of the entry model, import plan_bi; otherwise, import plan_di. (Documentation in desc. Examples in GİTHUB: https://github.com/HCB06/PyerualJetwork)
|
5
|
-
Author: Hasan Can Beydili
|
6
|
-
Author-email: tchasancan@gmail.com
|
7
|
-
Keywords: model evaluation,classifcation,pruning learning artficial neural networks
|
8
|
-
|
@@ -1,8 +0,0 @@
|
|
1
|
-
plan_bi/__init__.py,sha256=itKrgkPwaA0VKwo2etQkiXv6m4pUwV7N06tRxZOVhuU,397
|
2
|
-
plan_bi/plan_bi.py,sha256=RL2Yu2NN3KoXU5OEnijkSEXabo2fsJt-ZpCvV_WiHFI,33909
|
3
|
-
plan_di/__init__.py,sha256=F4PPBzkY-_HXDjzA0Xyx2cMlcHtNBs7OHb-AxStZjP4,397
|
4
|
-
plan_di/plan_di.py,sha256=fmPuSIDl-x3W51aETEhXNx1W2DSFCLRJF3-1rDARgSA,32873
|
5
|
-
pyerualjetwork-2.0.8.dist-info/METADATA,sha256=IwLoEHpItc5cWg94uuONijwxwuOTS5RQ43oS6rjsBJw,588
|
6
|
-
pyerualjetwork-2.0.8.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
|
7
|
-
pyerualjetwork-2.0.8.dist-info/top_level.txt,sha256=aaXSOcnD62fbXG1x7tw4nV50Qxx9g9zDNLK7OD4BdPE,16
|
8
|
-
pyerualjetwork-2.0.8.dist-info/RECORD,,
|
File without changes
|
File without changes
|