pyerualjetwork 2.7.4__py3-none-any.whl → 2.7.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
plan/plan.py CHANGED
@@ -142,8 +142,6 @@ def fit(
142
142
 
143
143
  for Lindex, Layer in enumerate(layers):
144
144
 
145
- neural_layer = normalization(neural_layer)
146
-
147
145
  if Layer == 'fex':
148
146
  STPW[Lindex] = fex(neural_layer, STPW[Lindex], True, y[index], activation_potentiation)
149
147
 
@@ -493,64 +491,66 @@ def evaluate(
493
491
  Returns:
494
492
  tuple: A tuple containing the predicted labels and the accuracy of the model.
495
493
  """
494
+ try:
495
+ layers = ['fex']
496
496
 
497
- layers = ['fex']
498
-
499
- Wc = [0] * len(W) # Wc = Weight copy
500
- true = 0
501
- y_preds = [-1] * len(y_test)
502
- acc_list = []
503
-
504
- for i, w in enumerate(W):
505
- Wc[i] = np.copy(w)
506
-
497
+ Wc = [0] * len(W) # Wc = Weight copy
498
+ true = 0
499
+ y_preds = [-1] * len(y_test)
500
+ acc_list = []
507
501
 
508
- if bar_status == True:
502
+ for i, w in enumerate(W):
503
+ Wc[i] = np.copy(w)
504
+
505
+
506
+ if bar_status == True:
509
507
 
510
- test_progress = tqdm(total=len(x_test),leave=False, desc='Testing',ncols=120)
511
- acc_bar = tqdm(total=1, desc="Test Accuracy", ncols=120)
508
+ test_progress = tqdm(total=len(x_test),leave=False, desc='Testing',ncols=120)
509
+ acc_bar = tqdm(total=1, desc="Test Accuracy", ncols=120)
510
+
512
511
 
513
-
514
- for inpIndex, Input in enumerate(x_test):
515
- Input = np.array(Input)
516
- Input = Input.ravel()
517
- neural_layer = Input
512
+ for inpIndex, Input in enumerate(x_test):
513
+ Input = np.array(Input)
514
+ Input = Input.ravel()
515
+ neural_layer = Input
518
516
 
519
- for index, Layer in enumerate(layers):
517
+ for index, Layer in enumerate(layers):
520
518
 
521
- neural_layer = normalization(neural_layer)
519
+ if Layer == 'fex':
520
+ neural_layer = fex(neural_layer, W[index], False, None, activation_potentiation)
522
521
 
523
- if Layer == 'fex':
524
- neural_layer = fex(neural_layer, W[index], False, None, activation_potentiation)
525
522
 
523
+ for i, w in enumerate(Wc):
524
+ W[i] = np.copy(w)
525
+ RealOutput = np.argmax(y_test[inpIndex])
526
+ PredictedOutput = np.argmax(neural_layer)
527
+ if RealOutput == PredictedOutput:
528
+ true += 1
529
+ acc = true / len(y_test)
526
530
 
527
- for i, w in enumerate(Wc):
528
- W[i] = np.copy(w)
529
- RealOutput = np.argmax(y_test[inpIndex])
530
- PredictedOutput = np.argmax(neural_layer)
531
- if RealOutput == PredictedOutput:
532
- true += 1
533
- acc = true / len(y_test)
534
531
 
532
+ acc_list.append(acc)
533
+ y_preds[inpIndex] = PredictedOutput
534
+
535
+ if bar_status == True:
536
+ test_progress.update(1)
537
+ if inpIndex == 0:
538
+ acc_bar.update(acc)
539
+
540
+ else:
541
+ acc = acc - acc_list[inpIndex - 1]
542
+ acc_bar.update(acc)
535
543
 
536
- acc_list.append(acc)
537
- y_preds[inpIndex] = PredictedOutput
544
+ if show_metrices == True:
545
+ plot_evaluate(y_test, y_preds, acc_list)
538
546
 
539
- if bar_status == True:
540
- test_progress.update(1)
541
- if inpIndex == 0:
542
- acc_bar.update(acc)
543
-
544
- else:
545
- acc = acc - acc_list[inpIndex - 1]
546
- acc_bar.update(acc)
547
-
548
- if show_metrices == True:
549
- plot_evaluate(y_test, y_preds, acc_list)
547
+
548
+ for i, w in enumerate(Wc):
549
+ W[i] = np.copy(w)
550
550
 
551
+ except:
551
552
 
552
- for i, w in enumerate(Wc):
553
- W[i] = np.copy(w)
553
+ print(Fore.RED + 'ERROR:' + infoTestModel + Style.RESET_ALL)
554
554
 
555
555
  return W, y_preds, acc
556
556
 
@@ -936,9 +936,7 @@ def predict_model_ssd(Input, model_name, model_path):
936
936
  neural_layer = np.array(neural_layer)
937
937
  neural_layer = neural_layer.ravel()
938
938
  for index, Layer in enumerate(layers):
939
-
940
- neural_layer = normalization(neural_layer)
941
-
939
+
942
940
  if Layer == 'fex':
943
941
  neural_layer = fex(neural_layer, W[index], False, None, activation_potentiation)
944
942
  elif Layer == 'cat':
@@ -985,8 +983,6 @@ def predict_model_ram(Input, W, scaler_params=None, activation_potentiation=None
985
983
  neural_layer = neural_layer.ravel()
986
984
  for index, Layer in enumerate(layers):
987
985
 
988
- neural_layer = normalization(neural_layer)
989
-
990
986
  if Layer == 'fex':
991
987
  neural_layer = fex(neural_layer, W[index], False, None, activation_potentiation)
992
988
  elif Layer == 'cat':
@@ -1099,7 +1095,7 @@ def synthetic_augmentation(x_train, y_train):
1099
1095
  return np.array(x_balanced), np.array(y_balanced)
1100
1096
 
1101
1097
 
1102
- def standard_scaler(x_train, x_test, scaler_params=None):
1098
+ def standard_scaler(x_train, x_test=None, scaler_params=None):
1103
1099
  info_standard_scaler = """
1104
1100
  Standardizes training and test datasets. x_test may be None.
1105
1101
 
@@ -1107,7 +1103,7 @@ def standard_scaler(x_train, x_test, scaler_params=None):
1107
1103
  train_data: numpy.ndarray
1108
1104
  Training data
1109
1105
  test_data: numpy.ndarray
1110
- Test data
1106
+ Test data (optional)
1111
1107
 
1112
1108
  Returns:
1113
1109
  list:
@@ -1161,7 +1157,7 @@ def standard_scaler(x_train, x_test, scaler_params=None):
1161
1157
 
1162
1158
  except:
1163
1159
  print(
1164
- Fore.RED + "ERROR: x_train and x_test must be list[numpyarray] from standard_scaler" + info_standard_scaler)
1160
+ Fore.RED + "ERROR: x_train and x_test must be list[numpyarray] from standard_scaler" + info_standard_scaler + Style.RESET_ALL)
1165
1161
 
1166
1162
 
1167
1163
  def encode_one_hot(y_train, y_test):
@@ -1,7 +1,7 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: pyerualjetwork
3
- Version: 2.7.4
4
- Summary: Optimized for Vs Code
3
+ Version: 2.7.6
4
+ Summary: Changes in standard_scaler function paramter: x_test default value=None. Now x_test parameter is optional
5
5
  Author: Hasan Can Beydili
6
6
  Author-email: tchasancan@gmail.com
7
7
  Keywords: model evaluation,classifcation,potentiation learning artficial neural networks
@@ -0,0 +1,6 @@
1
+ plan/__init__.py,sha256=gmaz8lnQfl18MbOQwabBUPmShajK5S99jfyY-hQe8tc,502
2
+ plan/plan.py,sha256=W313lck6zriETEMWyeC431ixdZQunOgque9iVPtjLks,52869
3
+ pyerualjetwork-2.7.6.dist-info/METADATA,sha256=6BjTKUIx_jU1kacIXqY9nuDAHRWeAwzZbAO5M152tis,332
4
+ pyerualjetwork-2.7.6.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
5
+ pyerualjetwork-2.7.6.dist-info/top_level.txt,sha256=G0Al3HuNJ88434XneyDtRKAIUaLCizOFYFYNhd7e2OM,5
6
+ pyerualjetwork-2.7.6.dist-info/RECORD,,
@@ -1,6 +0,0 @@
1
- plan/__init__.py,sha256=gmaz8lnQfl18MbOQwabBUPmShajK5S99jfyY-hQe8tc,502
2
- plan/plan.py,sha256=t4Vghx1DcMYYzdzzdnd3G_fJobkJvNXVYMKAORJW8is,52772
3
- pyerualjetwork-2.7.4.dist-info/METADATA,sha256=RJUQ5bKgoOSby1F9unDSKicejquxNMMZFRfdV1meZxY,248
4
- pyerualjetwork-2.7.4.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
5
- pyerualjetwork-2.7.4.dist-info/top_level.txt,sha256=G0Al3HuNJ88434XneyDtRKAIUaLCizOFYFYNhd7e2OM,5
6
- pyerualjetwork-2.7.4.dist-info/RECORD,,