pyerualjetwork 1.3.6__py3-none-any.whl → 1.3.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
plan/plan.py CHANGED
@@ -1,3 +1,8 @@
1
+ """
2
+ Created on Thu May 30 22:12:49 2024
3
+
4
+ @author: hasan can beydili
5
+ """
1
6
  import numpy as np
2
7
  import time
3
8
  from colorama import Fore,Style
@@ -148,7 +153,7 @@ def TrainPLAN(
148
153
  x_train_size = len(x_train[0])
149
154
 
150
155
  W = WeightIdentification(len(layers) - 1,class_count,neurons,x_train_size)
151
- Divides = SynapticDividing(class_count,W)
156
+ Divides, Piece = SynapticDividing(class_count,W)
152
157
  trained_W = [1] * len(W)
153
158
  print(Fore.GREEN + "Train Started with 0 ERROR" + Style.RESET_ALL,)
154
159
  train_predictions = [None] * len(y_train)
@@ -171,9 +176,11 @@ def TrainPLAN(
171
176
  for i, ul in enumerate(Ul):
172
177
  if ul == 1.0:
173
178
  k = i
179
+
174
180
  cs = Divides[int(k)][Windex][0]
181
+
175
182
 
176
- W[Windex] = SynapticPruning(w, cs, 'row', int(k),class_count)
183
+ W[Windex] = SynapticPruning(w, cs, 'row', int(k),class_count,Piece[Windex],1)
177
184
 
178
185
  neural_layer = inp
179
186
 
@@ -190,9 +197,9 @@ def TrainPLAN(
190
197
  neural_layer = Softmax(neural_layer)
191
198
 
192
199
  if Layer == 'fex':
193
- neural_layer,W[Lindex] = Fex(neural_layer, W[Lindex], membran_thresholds[Lindex], membran_potentials[Lindex])
200
+ neural_layer,W[Lindex] = Fex(neural_layer, W[Lindex], membran_thresholds[Lindex], membran_potentials[Lindex], Piece[Windex],1)
194
201
  elif Layer == 'cat':
195
- neural_layer,W[Lindex] = Cat(neural_layer, W[Lindex], membran_thresholds[Lindex], membran_potentials[Lindex],1)
202
+ neural_layer,W[Lindex] = Cat(neural_layer, W[Lindex], membran_thresholds[Lindex], membran_potentials[Lindex],1, Piece[Windex])
196
203
 
197
204
  RealOutput = np.argmax(y_train[index])
198
205
  PredictedOutput = np.argmax(neural_layer)
@@ -312,7 +319,9 @@ def SynapticPruning(
312
319
  cs, # list[list[num]]: Synaptic connections between neurons.
313
320
  key, # int: key for identifying synaptic connections.
314
321
  Class, # int: Class label for the current training instance.
315
- class_count # int: Total number of classes in the dataset.
322
+ class_count, # int: Total number of classes in the dataset.
323
+ piece, # ???
324
+ is_training # int: 1 or 0
316
325
 
317
326
  ) -> str:
318
327
  infoPruning = """
@@ -328,18 +337,31 @@ def SynapticPruning(
328
337
  Returns:
329
338
  numpy array: Weight matrix.
330
339
  """
331
-
340
+
332
341
 
333
342
  Class += 1 # because index start 0
334
343
 
335
- if Class != class_count and Class != 1:
336
-
337
- ce = cs / Class
344
+ if Class != 1:
345
+
338
346
 
339
- w[int(ce)-1::-1,:] = 0
340
-
341
- w[cs:,:] = 0
342
-
347
+
348
+ ce = cs / Class
349
+
350
+ if is_training == 1:
351
+
352
+ p = piece
353
+
354
+ for i in range(Class - 3):
355
+
356
+ piece+=p
357
+
358
+ if Class!= 2:
359
+ ce += piece
360
+
361
+ w[int(ce)-1::-1,:] = 0
362
+
363
+
364
+ w[cs:,:] = 0
343
365
 
344
366
  else:
345
367
 
@@ -394,32 +416,36 @@ def SynapticDividing(
394
416
  # Boş bir üç boyutlu liste oluşturma
395
417
  Divides = [[[0] for _ in range(len(W))] for _ in range(class_count)]
396
418
 
419
+
397
420
  for i in range(len(W)):
398
421
 
399
422
 
400
423
  Piece[i] = int(math.floor(W[i].shape[0] / class_count))
401
424
 
402
- cs = 0
425
+ cs = 0
403
426
  # j = Classes, i = Weights, [0] = CutStart.
404
427
 
405
428
  for i in range(len(W)):
406
429
  for j in range(class_count):
407
430
  cs = cs + Piece[i]
408
431
  Divides[j][i][0] = cs
432
+ #pruning_param[i] = cs
409
433
  #print('Divides: ' + j + i + ' = ' + Divides[j][i][0])
410
434
  #input()
411
-
435
+
412
436
  j = 0
413
437
  cs = 0
414
438
 
415
- return Divides
439
+ return Divides, Piece
416
440
 
417
441
 
418
442
  def Fex(
419
443
  Input, # list[num]: Input data.
420
444
  w, # list[list[num]]: Weight matrix of the neural network.
421
445
  membran_threshold, # str: Sign for threshold comparison ('<', '>', '==', '!=').
422
- membran_potential # num: Threshold value for comparison.
446
+ membran_potential, # num: Threshold value for comparison.
447
+ piece, # ???
448
+ is_training # num: 1 or 0
423
449
  ) -> tuple:
424
450
  """
425
451
  Applies feature extraction process to the input data using synaptic pruning.
@@ -443,7 +469,7 @@ def Fex(
443
469
  elif membran_threshold == '!=':
444
470
  PruneIndex = np.where(Input != membran_potential)
445
471
 
446
- w = SynapticPruning(w, PruneIndex, 'col', 0, 0)
472
+ w = SynapticPruning(w, PruneIndex, 'col', 0, 0, piece, is_training)
447
473
 
448
474
  neural_layer = np.dot(w, Input)
449
475
  return neural_layer,w
@@ -453,7 +479,8 @@ def Cat(
453
479
  w, # list[list[num]]: Weight matrix of the neural network.
454
480
  membran_threshold, # str: Sign for threshold comparison ('<', '>', '==', '!=').
455
481
  membran_potential, # num: Threshold value for comparison.
456
- isTrain # int: Flag indicating if the function is called during training (1 for training, 0 otherwise).
482
+ isTrain,
483
+ piece # int: Flag indicating if the function is called during training (1 for training, 0 otherwise).
457
484
  ) -> tuple:
458
485
  """
459
486
  Applies categorization process to the input data using synaptic pruning if specified.
@@ -479,7 +506,7 @@ def Cat(
479
506
  PruneIndex = np.where(Input != membran_potential)
480
507
  if isTrain == 1 and membran_threshold != 'none':
481
508
 
482
- w = SynapticPruning(w, PruneIndex, 'col', 0, 0)
509
+ w = SynapticPruning(w, PruneIndex, 'col', 0, 0, piece, isTrain)
483
510
 
484
511
 
485
512
  neural_layer = np.dot(w, Input)
@@ -557,6 +584,8 @@ def Relu(
557
584
  return np.maximum(0, x)
558
585
 
559
586
 
587
+
588
+
560
589
  def TestPLAN(
561
590
  x_test, # list[list[num]]: Test input data.
562
591
  y_test, # list[num]: Test labels.
@@ -613,9 +642,9 @@ def TestPLAN(
613
642
  neural_layer = Softmax(neural_layer)
614
643
 
615
644
  if layers[index] == 'fex':
616
- neural_layer,useless = Fex(neural_layer, W[index], membran_thresholds[index], membran_potentials[index])
645
+ neural_layer,useless = Fex(neural_layer, W[index], membran_thresholds[index], membran_potentials[index],0,0)
617
646
  if layers[index] == 'cat':
618
- neural_layer,useless = Cat(neural_layer, W[index], membran_thresholds[index], membran_potentials[index],0)
647
+ neural_layer,useless = Cat(neural_layer, W[index], membran_thresholds[index], membran_potentials[index],0,0)
619
648
  for i, w in enumerate(Wc):
620
649
  W[i] = np.copy(w)
621
650
  RealOutput = np.argmax(y_test[inpIndex])
@@ -683,13 +712,29 @@ def TestPLAN(
683
712
  print(Fore.MAGENTA + '\nTotal Test accuracy: ' ,acc, '\n' + Style.RESET_ALL)
684
713
 
685
714
  elif acc <= 0.6:
686
- print(Fore.RED+ '\nTotal Test accuracy: ' ,acc, '\n' + Style.RESET_ALL)
715
+ print(Fore.RED+ '\nTotal Test accuracy: ' ,acc, '\n' + Style.RESET_ALL)
716
+
717
+
718
+ y_testVisual = np.copy(y_test)
719
+ y_testVisual = np.argmax(y_testVisual, axis=1)
720
+
721
+ plt.figure(figsize=(12, 6))
722
+ sns.kdeplot(y_testVisual, label='Real Outputs', fill=True)
723
+ sns.kdeplot(TestPredictions, label='Predictions', fill=True)
724
+ plt.legend()
725
+ plt.xlabel('Class')
726
+ plt.ylabel('Data size')
727
+ plt.title('Predictions and Real Outputs for Testing KDE Plot')
728
+ plt.show()
729
+
687
730
 
688
731
  except:
689
732
 
690
733
  print(Fore.RED + "ERROR: Testing model parameters like 'layers' 'MembranCounts' must be same as trained model. Check parameters. Are you sure weights are loaded ? from: TestPLAN" + infoTestModel + Style.RESET_ALL)
691
734
  return 'e'
692
735
 
736
+
737
+
693
738
  return W,TestPredictions,acc
694
739
 
695
740
  def SavePLAN(model_name,
@@ -972,12 +1017,12 @@ def PredictFromDiscPLAN(Input,model_name,model_path,log_type):
972
1017
  if layers[index] == 'fex':
973
1018
  neural_layer,useless = Fex(neural_layer, W[index],
974
1019
  membran_thresholds[index],
975
- membran_potentials[index])
1020
+ membran_potentials[index],0,0)
976
1021
  if layers[index] == 'cat':
977
1022
  neural_layer,useless = Cat(neural_layer, W[index],
978
1023
  membran_thresholds[index],
979
1024
  membran_potentials[index],
980
- 0)
1025
+ 0,0)
981
1026
  except:
982
1027
  print(Fore.RED + "ERROR: The input was probably entered incorrectly. from: PredictFromDiscPLAN" + infoPredictFromDİscPLAN + Style.RESET_ALL)
983
1028
  return 'e'
@@ -1024,11 +1069,12 @@ def PredictFromRamPLAN(Input,layers,membran_thresholds,membran_potentials,normal
1024
1069
  if layers[index] == 'fex':
1025
1070
  neural_layer,useless = Fex(neural_layer, W[index],
1026
1071
  membran_thresholds[index],
1027
- membran_potentials[index])
1072
+ membran_potentials[index],0,0)
1028
1073
  if layers[index] == 'cat':
1029
1074
  neural_layer,useless = Cat(neural_layer, W[index],
1030
1075
  membran_thresholds[index],
1031
- membran_potentials[index],0)
1076
+ membran_potentials[index],
1077
+ 0,0)
1032
1078
  except:
1033
1079
  print(Fore.RED + "ERROR: Unexpected input or wrong model parameters from: PredictFromRamPLAN." + infoPredictFromRamPLAN + Style.RESET_ALL)
1034
1080
  return 'e'
@@ -0,0 +1,8 @@
1
+ Metadata-Version: 2.1
2
+ Name: pyerualjetwork
3
+ Version: 1.3.8
4
+ Summary: Advanced python deep learning library. MASSIVE Technic Update, unlocked class limits. (Documentation in desc. Examples in GİTHUB: https://github.com/HCB06/PyerualJetwork)
5
+ Author: Hasan Can Beydili
6
+ Author-email: tchasancan@gmail.com
7
+ Keywords: model evaluation,classifcation,pruning learning artficial neural networks
8
+
@@ -0,0 +1,6 @@
1
+ plan/__init__.py,sha256=LQbg-AnTUz7KA1E77-mg7X-zRM-7IiK7c3zK-j063rc,375
2
+ plan/plan.py,sha256=y8wVs2cA8G_XdTVCdYsTPmZYsJ2xu68L8uU-bbfkmu4,45348
3
+ pyerualjetwork-1.3.8.dist-info/METADATA,sha256=iNBBKRs72NOtRCe0dDT08Swkg08iwJ0-f1Uyf8gHcro,393
4
+ pyerualjetwork-1.3.8.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
5
+ pyerualjetwork-1.3.8.dist-info/top_level.txt,sha256=G0Al3HuNJ88434XneyDtRKAIUaLCizOFYFYNhd7e2OM,5
6
+ pyerualjetwork-1.3.8.dist-info/RECORD,,
@@ -1,8 +0,0 @@
1
- Metadata-Version: 2.1
2
- Name: pyerualjetwork
3
- Version: 1.3.6
4
- Summary: Advanced python deep learning library. New Features: 'SyntheticAugmentation' function added for unbalanced datasets. Changes for variable names to snake_case (Function names are still PascalCase). (Documentation in desc. Examples in GİTHUB: https://github.com/HCB06/PyerualJetwork)
5
- Author: Hasan Can Beydili
6
- Author-email: tchasancan@gmail.com
7
- Keywords: model evaluation,classifcation,pruning learning artficial neural networks
8
-
@@ -1,6 +0,0 @@
1
- plan/__init__.py,sha256=LQbg-AnTUz7KA1E77-mg7X-zRM-7IiK7c3zK-j063rc,375
2
- plan/plan.py,sha256=XIFGPk2NFYQcuKcPik-FMe-i-w3ezn60QFfWSkjXUU0,44211
3
- pyerualjetwork-1.3.6.dist-info/METADATA,sha256=FOvXyHLZLEWAfQCeSZ4Z7IHwzRC8Z3rSGWQg-CRffDw,504
4
- pyerualjetwork-1.3.6.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
5
- pyerualjetwork-1.3.6.dist-info/top_level.txt,sha256=G0Al3HuNJ88434XneyDtRKAIUaLCizOFYFYNhd7e2OM,5
6
- pyerualjetwork-1.3.6.dist-info/RECORD,,