pyerualjetwork 2.4.0__py3-none-any.whl → 2.4.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- plan_bi/plan_bi.py +45 -22
- plan_di/plan_di.py +45 -17
- {pyerualjetwork-2.4.0.dist-info → pyerualjetwork-2.4.2.dist-info}/METADATA +1 -1
- pyerualjetwork-2.4.2.dist-info/RECORD +8 -0
- pyerualjetwork-2.4.0.dist-info/RECORD +0 -8
- {pyerualjetwork-2.4.0.dist-info → pyerualjetwork-2.4.2.dist-info}/WHEEL +0 -0
- {pyerualjetwork-2.4.0.dist-info → pyerualjetwork-2.4.2.dist-info}/top_level.txt +0 -0
plan_bi/plan_bi.py
CHANGED
@@ -88,35 +88,33 @@ def fit(
|
|
88
88
|
for i, w in enumerate(W):
|
89
89
|
trained_W[i] = trained_W[i] + w
|
90
90
|
|
91
|
-
|
92
|
-
|
93
|
-
fig, ax = plt.subplots(1, 10, figsize=(18, 14))
|
94
|
-
|
95
|
-
try:
|
96
|
-
row = x_train[1].shape[0]
|
97
|
-
col = x_train[1].shape[1]
|
98
|
-
except:
|
99
|
-
print(Fore.RED + 'ERROR: You try train showing but inputs is raveled. x_train inputs to must be reshape for training_show.', infoPLAN + Style.RESET_ALL)
|
100
|
-
return 'e'
|
91
|
+
if show_training == True:
|
101
92
|
|
102
|
-
|
103
|
-
|
93
|
+
fig, ax = plt.subplots(1, 10, figsize=(18, 14))
|
104
94
|
|
105
|
-
|
106
|
-
|
95
|
+
try:
|
96
|
+
row = x_train[1].shape[0]
|
97
|
+
col = x_train[1].shape[1]
|
98
|
+
except:
|
99
|
+
print(Fore.MAGENTA + 'WARNING: You try train showing but inputs is raveled. x_train inputs to should be reshape for training_show.', infoPLAN + Style.RESET_ALL)
|
100
|
+
|
101
|
+
row, col = find_factors(len(x_train[0]))
|
102
|
+
|
103
|
+
for j in range(len(class_count)):
|
104
|
+
|
105
|
+
mat = trained_W[0][j,:].reshape(row, col)
|
107
106
|
|
107
|
+
ax[j].imshow(mat, interpolation='sinc', cmap='viridis')
|
108
|
+
ax[j].set_aspect('equal')
|
108
109
|
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
ax[j].set_xticks([])
|
113
|
-
ax[j].set_yticks([])
|
114
|
-
ax[j].set_title(f'{j+1}. Neuron')
|
115
|
-
|
110
|
+
ax[j].set_xticks([])
|
111
|
+
ax[j].set_yticks([])
|
112
|
+
ax[j].set_title(f'{j+1}. Neuron')
|
116
113
|
|
114
|
+
|
117
115
|
plt.show()
|
118
116
|
|
119
|
-
|
117
|
+
|
120
118
|
W = weight_identification(len(layers) - 1, len(class_count), neurons, x_train_size)
|
121
119
|
|
122
120
|
|
@@ -136,6 +134,31 @@ def fit(
|
|
136
134
|
print('\rTraining: ' , index, "/", len(x_train),"\n", end="")
|
137
135
|
|
138
136
|
|
137
|
+
if show_training == 'final':
|
138
|
+
|
139
|
+
fig, ax = plt.subplots(1, 10, figsize=(18, 14))
|
140
|
+
|
141
|
+
try:
|
142
|
+
row = x_train[1].shape[0]
|
143
|
+
col = x_train[1].shape[1]
|
144
|
+
except:
|
145
|
+
print(Fore.MAGENTA + 'WARNING: You try train showing but inputs is raveled. x_train inputs to should be reshape for training_show.', infoPLAN + Style.RESET_ALL)
|
146
|
+
|
147
|
+
row, col = find_factors(len(x_train[0]))
|
148
|
+
|
149
|
+
for j in range(len(class_count)):
|
150
|
+
|
151
|
+
mat = trained_W[0][j,:].reshape(row, col)
|
152
|
+
|
153
|
+
ax[j].imshow(mat, interpolation='sinc', cmap='viridis')
|
154
|
+
ax[j].set_aspect('equal')
|
155
|
+
|
156
|
+
ax[j].set_xticks([])
|
157
|
+
ax[j].set_yticks([])
|
158
|
+
ax[j].set_title(f'{j+1}. Neuron')
|
159
|
+
|
160
|
+
|
161
|
+
plt.show()
|
139
162
|
|
140
163
|
EndTime = time.time()
|
141
164
|
|
plan_di/plan_di.py
CHANGED
@@ -64,7 +64,7 @@ def fit(
|
|
64
64
|
trained_W = [1] * len(W)
|
65
65
|
print(Fore.GREEN + "Train Started with 0 ERROR" + Style.RESET_ALL)
|
66
66
|
start_time = time.time()
|
67
|
-
y =
|
67
|
+
y = decode_one_hot(y_train)
|
68
68
|
|
69
69
|
for index, inp in enumerate(x_train):
|
70
70
|
uni_start_time = time.time()
|
@@ -89,17 +89,18 @@ def fit(
|
|
89
89
|
trained_W[i] = trained_W[i] + w
|
90
90
|
|
91
91
|
if show_training == True:
|
92
|
-
|
92
|
+
|
93
93
|
fig, ax = plt.subplots(1, 10, figsize=(18, 14))
|
94
94
|
|
95
95
|
try:
|
96
96
|
row = x_train[1].shape[0]
|
97
97
|
col = x_train[1].shape[1]
|
98
98
|
except:
|
99
|
-
print(Fore.
|
100
|
-
|
99
|
+
print(Fore.MAGENTA + 'WARNING: You try train showing but inputs is raveled. x_train inputs to should be reshape for training_show.', infoPLAN + Style.RESET_ALL)
|
100
|
+
|
101
|
+
row, col = find_factors(len(x_train[0]))
|
101
102
|
|
102
|
-
for j in range(
|
103
|
+
for j in range(len(class_count)):
|
103
104
|
|
104
105
|
mat = trained_W[0][j,:].reshape(row, col)
|
105
106
|
|
@@ -111,8 +112,8 @@ def fit(
|
|
111
112
|
ax[j].set_title(f'{j+1}. Neuron')
|
112
113
|
|
113
114
|
|
114
|
-
|
115
|
-
|
115
|
+
plt.show()
|
116
|
+
|
116
117
|
W = weight_identification(
|
117
118
|
len(layers) - 1, len(class_count), neurons, x_train_size)
|
118
119
|
|
@@ -133,29 +134,30 @@ def fit(
|
|
133
134
|
print('\rTraining: ', index, "/", len(x_train), "\n", end="")
|
134
135
|
|
135
136
|
if show_training == 'final':
|
136
|
-
|
137
|
+
|
137
138
|
fig, ax = plt.subplots(1, 10, figsize=(18, 14))
|
138
139
|
|
139
140
|
try:
|
140
141
|
row = x_train[1].shape[0]
|
141
142
|
col = x_train[1].shape[1]
|
142
143
|
except:
|
143
|
-
print(Fore.
|
144
|
-
return 'e'
|
144
|
+
print(Fore.MAGENTA + 'WARNING: You try train showing but inputs is raveled. x_train inputs to should be reshape for training_show.', infoPLAN + Style.RESET_ALL)
|
145
145
|
|
146
|
-
|
146
|
+
row, col = find_factors(len(x_train[0]))
|
147
147
|
|
148
|
+
for j in range(len(class_count)):
|
149
|
+
|
148
150
|
mat = trained_W[0][j,:].reshape(row, col)
|
149
151
|
|
150
152
|
ax[j].imshow(mat, interpolation='sinc', cmap='viridis')
|
151
|
-
ax[j].set_aspect('equal')
|
152
|
-
|
153
|
-
ax[j].set_xticks([])
|
154
|
-
ax[j].set_yticks([])
|
155
|
-
ax[j].set_title(f'{j+1}. Neuron')
|
153
|
+
ax[j].set_aspect('equal')
|
156
154
|
|
155
|
+
ax[j].set_xticks([])
|
156
|
+
ax[j].set_yticks([])
|
157
|
+
ax[j].set_title(f'{j+1}. Neuron')
|
157
158
|
|
158
|
-
|
159
|
+
|
160
|
+
plt.show()
|
159
161
|
|
160
162
|
EndTime = time.time()
|
161
163
|
|
@@ -180,6 +182,32 @@ def fit(
|
|
180
182
|
|
181
183
|
# FUNCTIONS -----
|
182
184
|
|
185
|
+
def prime_factors(n):
|
186
|
+
factors = []
|
187
|
+
divisor = 2
|
188
|
+
|
189
|
+
while divisor <= n:
|
190
|
+
if n % divisor == 0:
|
191
|
+
factors.append(divisor)
|
192
|
+
n //= divisor
|
193
|
+
else:
|
194
|
+
divisor += 1
|
195
|
+
|
196
|
+
return factors
|
197
|
+
|
198
|
+
def find_factors(n):
|
199
|
+
factors = prime_factors(n)
|
200
|
+
|
201
|
+
if len(factors) < 2:
|
202
|
+
return None, None
|
203
|
+
|
204
|
+
a = factors[0]
|
205
|
+
b = 1
|
206
|
+
for factor in factors[1:]:
|
207
|
+
b *= factor
|
208
|
+
|
209
|
+
return a, b
|
210
|
+
|
183
211
|
def weight_normalization(
|
184
212
|
W,
|
185
213
|
class_count
|
@@ -0,0 +1,8 @@
|
|
1
|
+
plan_bi/__init__.py,sha256=kHnuGDOKyMHQqeX49ToUUsdZckh9RPuyADhYw0SrmIo,514
|
2
|
+
plan_bi/plan_bi.py,sha256=31YsVfHNxIwnn7q6tAA4v2nyfSanfxBM7Wt7Z2ptyhI,52589
|
3
|
+
plan_di/__init__.py,sha256=DJzUsYj-tgbeewoGz-K9nfGsKqrRFUxIr_z-NgqySBk,505
|
4
|
+
plan_di/plan_di.py,sha256=izbP2BuixM2bUumACPXYE66Wj6LnVbuiH3NXLMrN2EY,50514
|
5
|
+
pyerualjetwork-2.4.2.dist-info/METADATA,sha256=Ege0t_siyKePfxhYLb3vhdic5wjxkD3QriA2XbttbiY,309
|
6
|
+
pyerualjetwork-2.4.2.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
|
7
|
+
pyerualjetwork-2.4.2.dist-info/top_level.txt,sha256=aaXSOcnD62fbXG1x7tw4nV50Qxx9g9zDNLK7OD4BdPE,16
|
8
|
+
pyerualjetwork-2.4.2.dist-info/RECORD,,
|
@@ -1,8 +0,0 @@
|
|
1
|
-
plan_bi/__init__.py,sha256=kHnuGDOKyMHQqeX49ToUUsdZckh9RPuyADhYw0SrmIo,514
|
2
|
-
plan_bi/plan_bi.py,sha256=qGk5ukjsDeefW4oEDK4QfJ46rIkLzJitxQ8PhPGBPIA,51828
|
3
|
-
plan_di/__init__.py,sha256=DJzUsYj-tgbeewoGz-K9nfGsKqrRFUxIr_z-NgqySBk,505
|
4
|
-
plan_di/plan_di.py,sha256=nPEGeNce2UW89HJqsCvvhrKfy9d6MlEG6wpFmaxw6_M,49952
|
5
|
-
pyerualjetwork-2.4.0.dist-info/METADATA,sha256=SRy0uGHL-qbnm28e1-tm33cqI2WMxacbqHHGeCwJ-Io,309
|
6
|
-
pyerualjetwork-2.4.0.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
|
7
|
-
pyerualjetwork-2.4.0.dist-info/top_level.txt,sha256=aaXSOcnD62fbXG1x7tw4nV50Qxx9g9zDNLK7OD4BdPE,16
|
8
|
-
pyerualjetwork-2.4.0.dist-info/RECORD,,
|
File without changes
|
File without changes
|