pyerualjetwork 2.5.5__py3-none-any.whl → 2.5.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
plan/plan.py CHANGED
@@ -1,4 +1,20 @@
1
1
  # -*- coding: utf-8 -*-
2
+ """
3
+ Created on Tue Jun 18 23:32:16 2024
4
+
5
+ @author: hasan
6
+ """
7
+
8
+ # optic
9
+
10
+ # -*- coding: utf-8 -*-
11
+ """
12
+ Created on Fri Jun 21 05:21:35 2024
13
+
14
+ @author: hasan
15
+ """
16
+
17
+ # -*- coding: utf-8 -*-
2
18
 
3
19
 
4
20
 
@@ -23,8 +39,13 @@ import seaborn as sns
23
39
 
24
40
  def fit(
25
41
  x_train: List[Union[int, float]],
26
- y_train: List[Union[int, float, str]], # At least two.. and one hot encoded
42
+ y_train: List[Union[int, float]], # At least two.. and one hot encoded
27
43
  show_training,
44
+ show_count= None,
45
+ val_count= None,
46
+ val= None,
47
+ x_val= None,
48
+ y_val= None,
28
49
  activation_potential=None # (float): Input activation potential (optional)
29
50
  ) -> str:
30
51
 
@@ -33,26 +54,50 @@ def fit(
33
54
 
34
55
  Args:
35
56
  x_train (list[num]): List of input data.
36
- y_train (list[num]): List of y_train. (one hot encoded)
37
- show_training (bool, str): True, None or 'final'
38
- activation_potential (float): Input activation potential (optional)
57
+ y_train (list[num]): List of target labels. (one hot encoded)
58
+ show_training (bool, str): True, None or'final'
59
+ show_count (None, int): How many learning steps in total will be displayed in a single figure? (Adjust according to your hardware) Default: 10 (optional)
60
+ val_count (None, int): After how many examples learned will an accuracy test be performed? Default: 0.1 (%10) (optional)
61
+ x_val (list[num]): List of validation data. (optional) Default: x_train
62
+ y_val (list[num]): (list[num]): List of target labels. (one hot encoded) (optional) Default: y_train
63
+ activation_potential (float): Input activation potential (for binary injection) (optional) in range: -1, 1
39
64
  Returns:
40
65
  list([num]): (Weight matrices list, train_predictions list, Train_acc).
41
66
  error handled ?: Process status ('e')
42
67
  """
43
-
44
- if activation_potential != None:
45
-
46
- if activation_potential < 0 or activation_potential > 1:
47
-
48
- print(Fore.RED + "ERROR101: ACTIVATION potential value must be in range 0-1. from: fit",infoPLAN)
49
- return 'e'
68
+
50
69
 
51
70
  if len(x_train) != len(y_train):
71
+
52
72
  print(Fore.RED + "ERROR301: x_train list and y_train list must be same length. from: fit", infoPLAN)
53
73
  return 'e'
74
+
75
+ if x_val == None and y_val == None:
76
+
77
+ x_val = x_train
78
+ y_val = y_train
79
+
80
+ if val == True and val_count == None:
81
+
82
+ val_count = 0.1
83
+ val_count_copy = val_count
54
84
 
85
+ if val == True:
86
+
87
+ val_count = int(len(x_train) * val_count)
88
+
89
+ val_count_copy = val_count
90
+
91
+ if show_training == True or show_training == 'final' and show_count == None:
92
+
93
+ show_count = 10
94
+
95
+ if show_training == True or show_training == 'final':
96
+
97
+ row, col = shape_control(x_train)
98
+
55
99
  class_count = set()
100
+
56
101
  for sublist in y_train:
57
102
 
58
103
  class_count.add(tuple(sublist))
@@ -63,6 +108,7 @@ def fit(
63
108
 
64
109
  neurons = [len(class_count), len(class_count)]
65
110
  layers = ['fex']
111
+ val_list = [None]
66
112
 
67
113
  x_train[0] = np.array(x_train[0])
68
114
  x_train[0] = x_train[0].ravel()
@@ -77,6 +123,9 @@ def fit(
77
123
  y = decode_one_hot(y_train)
78
124
 
79
125
  for index, inp in enumerate(x_train):
126
+
127
+ progress = index / len(x_train) * 100
128
+
80
129
  uni_start_time = time.time()
81
130
  inp = np.array(inp)
82
131
  inp = inp.ravel()
@@ -98,43 +147,78 @@ def fit(
98
147
  for i, w in enumerate(W):
99
148
  trained_W[i] = trained_W[i] + w
100
149
 
101
- if show_training == True or show_training == 'final':
102
-
103
- try:
104
- row = x_train[1].shape[0]
105
- col = x_train[1].shape[1]
150
+
151
+ if val == True:
152
+
153
+ if index == val_count:
154
+
155
+ val_count += val_count_copy
156
+
157
+ layers.append('cat')
158
+ trained_W.append(np.eye(len(class_count)))
159
+
160
+ validation_model = evaluate(x_val, y_val, None, trained_W)
161
+
162
+ layers.pop()
163
+ trained_W.pop()
164
+
165
+ val_acc = validation_model[get_acc()]
166
+
167
+ val_list.append(val_acc)
168
+
169
+
170
+ plt.plot(val_list, linestyle='-',
171
+ color='r')
172
+
173
+ progress_status = f"{progress:.1f}"
174
+ plt.title('Validation accuracy graph. Amount of data learned by the model: % ' + progress_status)
175
+ plt.xlabel('Learning Progress')
176
+ plt.ylabel('Accuracy')
177
+ plt.ylim(0, 1)
178
+ plt.draw()
179
+ plt.pause(0.1)
180
+
181
+ if show_training == True:
182
+
183
+ if index %show_count == 0:
184
+
185
+
186
+ if index != 0:
187
+ plt.close(fig)
188
+
189
+ fig, ax = plt.subplots(1, len(class_count), figsize=(18, 14))
106
190
 
107
- except:
191
+
108
192
 
109
- print(Fore.MAGENTA + 'WARNING: You trying show_training but inputs is raveled. x_train inputs should be reshaped for show_training.' + Style.RESET_ALL)
110
-
111
- try:
112
- row, col = find_numbers(len(x_train[0]))
113
-
114
- except:
115
-
116
- print(Fore.RED + 'ERROR: Change show_training to None. Input length cannot be reshaped', infoPLAN + Style.RESET_ALL)
117
- return 'e'
118
-
119
-
120
- if show_training == True:
193
+ for j in range(len(class_count)):
194
+
195
+
196
+ if row != 0:
197
+
198
+ mat = trained_W[0][j,:].reshape(row, col)
199
+ suptitle_info = 'Neurons Learning Progress: % '
200
+ title_info = f'{j+1}. Neuron'
201
+
202
+ else:
203
+
204
+ mat = trained_W[0]
205
+ suptitle_info = 'Weight Learning Progress: % '
206
+ j = 0
207
+ title_info = 'Weight Matrix Of Fex Layer'
121
208
 
122
- fig, ax = plt.subplots(1, len(class_count), figsize=(18, 14))
209
+ ax[j].imshow(mat, interpolation='sinc', cmap='viridis')
210
+ ax[j].set_aspect('equal')
211
+
212
+ ax[j].set_xticks([])
213
+ ax[j].set_yticks([])
214
+ ax[j].set_title(title_info)
215
+
216
+ progress_status = f"{progress:.1f}"
217
+ fig.suptitle(suptitle_info + progress_status)
218
+ plt.draw()
219
+ plt.pause(0.1)
220
+
123
221
 
124
- for j in range(len(class_count)):
125
-
126
- mat = trained_W[0][j,:].reshape(row, col)
127
-
128
- ax[j].imshow(mat, interpolation='sinc', cmap='viridis')
129
- ax[j].set_aspect('equal')
130
-
131
- ax[j].set_xticks([])
132
- ax[j].set_yticks([])
133
- ax[j].set_title(f'{j+1}. Neuron')
134
-
135
-
136
- plt.show()
137
-
138
222
  W = weight_identification(
139
223
  len(layers) - 1, len(class_count), neurons, x_train_size)
140
224
 
@@ -156,32 +240,24 @@ def fit(
156
240
 
157
241
  if show_training == 'final':
158
242
 
159
- fig, ax = plt.subplots(1, len(class_count), figsize=(18, 14))
160
-
161
- try:
162
-
163
- row = x_train[1].shape[0]
164
- col = x_train[1].shape[1]
165
-
166
- except:
167
-
168
- print(Fore.MAGENTA + 'WARNING: You try train showing but inputs is raveled. x_train inputs should be reshaped for training_show.', infoPLAN + Style.RESET_ALL)
243
+ fig, ax = plt.subplots(1, len(class_count), figsize=(18, 14))
169
244
 
170
- row, col = find_numbers(len(x_train[0]))
245
+ for j in range(len(class_count)):
171
246
 
172
- for j in range(len(class_count)):
247
+ mat = trained_W[0][j,:].reshape(row, col)
173
248
 
174
- mat = trained_W[0][j,:].reshape(row, col)
249
+ ax[j].imshow(mat, interpolation='sinc', cmap='viridis')
250
+ ax[j].set_aspect('equal')
251
+
252
+ ax[j].set_xticks([])
253
+ ax[j].set_yticks([])
254
+ ax[j].set_title(f'{j+1}. Neuron')
175
255
 
176
- ax[j].imshow(mat, interpolation='sinc', cmap='viridis')
177
- ax[j].set_aspect('equal')
178
-
179
- ax[j].set_xticks([])
180
- ax[j].set_yticks([])
181
- ax[j].set_title(f'{j+1}. Neuron')
256
+ progress_status = f"{progress:.1f}"
257
+ fig.suptitle('Neurons Learning Progress: % ' + progress_status)
258
+ plt.draw()
259
+ plt.pause(0.1)
182
260
 
183
-
184
- plt.show()
185
261
 
186
262
  EndTime = time.time()
187
263
 
@@ -206,6 +282,26 @@ def fit(
206
282
 
207
283
  # FUNCTIONS -----
208
284
 
285
+ def shape_control(x_train):
286
+
287
+ try:
288
+ row = x_train[1].shape[0]
289
+ col = x_train[1].shape[1]
290
+
291
+ except:
292
+
293
+ print(Fore.MAGENTA + 'WARNING: You trying show_training but inputs is raveled. x_train inputs should be reshaped for show_training.' + Style.RESET_ALL)
294
+
295
+ try:
296
+ row, col = find_numbers(len(x_train[0]))
297
+
298
+ except:
299
+
300
+ print(Fore.RED + 'ERROR: Change show_training to None. Input length cannot be reshaped' + Style.RESET_ALL)
301
+ return [0, 0]
302
+
303
+ return row, col
304
+
209
305
  def find_numbers(n):
210
306
  if n <= 1:
211
307
  raise ValueError("Parameter 'n' must be greater than 1.")
@@ -1589,3 +1685,4 @@ def get_preds():
1589
1685
  def get_acc():
1590
1686
 
1591
1687
  return 2
1688
+
@@ -1,7 +1,7 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: pyerualjetwork
3
- Version: 2.5.5
4
- Summary: standard_scaler function improved. Changes possibily nan values to 0
3
+ Version: 2.5.7
4
+ Summary: New optional parameters added for fit function, x_val, y_val show_count, val_count, val. For more information please read user document
5
5
  Author: Hasan Can Beydili
6
6
  Author-email: tchasancan@gmail.com
7
7
  Keywords: model evaluation,classifcation,pruning learning artficial neural networks
@@ -0,0 +1,6 @@
1
+ plan/__init__.py,sha256=gmaz8lnQfl18MbOQwabBUPmShajK5S99jfyY-hQe8tc,502
2
+ plan/plan.py,sha256=PEmoyvZidNWiNQTanaTbFBP9dlrAD0ZqU6-83wJXPV0,55726
3
+ pyerualjetwork-2.5.7.dist-info/METADATA,sha256=BjDzrhxLKzSODDlgEr8IFeRoypwj8tmBaN3GyNot3Ww,357
4
+ pyerualjetwork-2.5.7.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
5
+ pyerualjetwork-2.5.7.dist-info/top_level.txt,sha256=G0Al3HuNJ88434XneyDtRKAIUaLCizOFYFYNhd7e2OM,5
6
+ pyerualjetwork-2.5.7.dist-info/RECORD,,
@@ -1,6 +0,0 @@
1
- plan/__init__.py,sha256=gmaz8lnQfl18MbOQwabBUPmShajK5S99jfyY-hQe8tc,502
2
- plan/plan.py,sha256=Dt_PXXMLx0J4e-gqyRAN4YqPZA2UX0YlCaKrd5Rqd5Q,53491
3
- pyerualjetwork-2.5.5.dist-info/METADATA,sha256=9320HpScX-TASWWJNe3uruu1-8kOZe-pBAQKDXkF6ag,290
4
- pyerualjetwork-2.5.5.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
5
- pyerualjetwork-2.5.5.dist-info/top_level.txt,sha256=G0Al3HuNJ88434XneyDtRKAIUaLCizOFYFYNhd7e2OM,5
6
- pyerualjetwork-2.5.5.dist-info/RECORD,,