pyerualjetwork 5.0.3__py3-none-any.whl → 5.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyerualjetwork/__init__.py +3 -3
- pyerualjetwork/activation_functions_cpu.py +3 -71
- pyerualjetwork/activation_functions_cuda.py +2 -74
- pyerualjetwork/data_operations_cpu.py +3 -5
- pyerualjetwork/ene_cuda.py +5 -5
- pyerualjetwork/issue_solver.py +1 -1
- pyerualjetwork/model_operations_cpu.py +123 -55
- pyerualjetwork/model_operations_cuda.py +120 -51
- pyerualjetwork/neu_cpu.py +169 -52
- pyerualjetwork/neu_cuda.py +170 -55
- {pyerualjetwork-5.0.3.dist-info → pyerualjetwork-5.1.dist-info}/METADATA +3 -3
- pyerualjetwork-5.1.dist-info/RECORD +26 -0
- pyerualjetwork-5.0.3.dist-info/RECORD +0 -26
- {pyerualjetwork-5.0.3.dist-info → pyerualjetwork-5.1.dist-info}/WHEEL +0 -0
- {pyerualjetwork-5.0.3.dist-info → pyerualjetwork-5.1.dist-info}/top_level.txt +0 -0
pyerualjetwork/__init__.py
CHANGED
@@ -2,8 +2,8 @@
|
|
2
2
|
|
3
3
|
PyerualJetwork
|
4
4
|
==============
|
5
|
-
PyereualJetwork is a large
|
6
|
-
It features PLAN, MLP
|
5
|
+
PyereualJetwork is a large wide GPU-accelerated machine learning library in Python designed for professionals and researchers.
|
6
|
+
It features PLAN, MLP Deep Learning and PTNN training, as well as ENE (Eugenic NeuroEvolution) for genetic optimization,
|
7
7
|
which can also be applied to genetic algorithms or Reinforcement Learning (RL) problems.
|
8
8
|
The library includes functions for data pre-processing, visualizations, model saving and loading, prediction and evaluation,
|
9
9
|
training, and both detailed and simplified memory management.
|
@@ -42,7 +42,7 @@ PyerualJetwork document: https://github.com/HCB06/PyerualJetwork/blob/main/Welco
|
|
42
42
|
- Contact: tchasancan@gmail.com
|
43
43
|
"""
|
44
44
|
|
45
|
-
__version__ = "5.
|
45
|
+
__version__ = "5.1"
|
46
46
|
__update__ = """* Changes: https://github.com/HCB06/PyerualJetwork/blob/main/CHANGES
|
47
47
|
* PyerualJetwork Homepage: https://github.com/HCB06/PyerualJetwork/tree/main
|
48
48
|
* PyerualJetwork document: https://github.com/HCB06/PyerualJetwork/blob/main/Welcome_to_PyerualJetwork/PYERUALJETWORK_USER_MANUEL_AND_LEGAL_INFORMATION(EN).pdf
|
@@ -6,9 +6,9 @@ import warnings
|
|
6
6
|
# ACTIVATION FUNCTIONS -----
|
7
7
|
|
8
8
|
def all_activations():
|
9
|
-
|
10
|
-
activations_list = ['linear', 'sigmoid', 'relu', 'tanh', 'circular', 'spiral', '
|
11
|
-
|
9
|
+
# softplus, cubic, square_quartic, cubic_quadratic, scaled_cubic out.
|
10
|
+
activations_list = ['linear', 'sigmoid', 'relu', 'tanh', 'circular', 'spiral', 'sin_plus', 'mod_circular', 'tanh_circular', 'leaky_relu', 'gelu', 'sinakt', 'p_squared', 'sglu', 'dlrelu', 'acos', 'isra', 'waveakt', 'arctan', 'bent_identity', 'softsign', 'pwl', 'sine', 'tanh_square', 'sine_square', 'logarithmic', 'sine_offset']
|
11
|
+
# suggest: tanh, arctan, selu, elu, dlrelu, isra, bent_identity
|
12
12
|
return activations_list
|
13
13
|
|
14
14
|
def spiral_activation(x):
|
@@ -76,9 +76,6 @@ def Relu(
|
|
76
76
|
def tanh(x):
|
77
77
|
return np.tanh(x)
|
78
78
|
|
79
|
-
def swish(x):
|
80
|
-
return x * (1 / (1 + np.exp(-x)))
|
81
|
-
|
82
79
|
def sin_plus(x):
|
83
80
|
return (np.sin(x) + 1) / 2
|
84
81
|
|
@@ -91,18 +88,9 @@ def tanh_circular_activation(x):
|
|
91
88
|
def leaky_relu(x, alpha=0.01):
|
92
89
|
return np.where(x > 0, x, alpha * x)
|
93
90
|
|
94
|
-
def softplus(x):
|
95
|
-
return np.log(1 + np.exp(x))
|
96
|
-
|
97
|
-
def elu(x, alpha=1.0):
|
98
|
-
return np.where(x > 0, x, alpha * (np.exp(x) - 1))
|
99
|
-
|
100
91
|
def gelu(x):
|
101
92
|
return 0.5 * x * (1 + np.tanh(np.sqrt(2 / np.pi) * (x + 0.044715 * np.power(x, 3))))
|
102
93
|
|
103
|
-
def selu(x, lambda_=1.0507, alpha=1.6733):
|
104
|
-
return lambda_ * np.where(x > 0, x, alpha * (np.exp(x) - 1))
|
105
|
-
|
106
94
|
def sinakt(x):
|
107
95
|
return np.sin(x) + np.cos(x)
|
108
96
|
|
@@ -116,26 +104,10 @@ def sglu(x, alpha=1.0):
|
|
116
104
|
def dlrelu(x):
|
117
105
|
return np.maximum(0.01 * x, x) + np.minimum(0.01 * x, 0.1 * x)
|
118
106
|
|
119
|
-
# 5. Exponential Sigmoid (ExSig)
|
120
|
-
def exsig(x):
|
121
|
-
return 1 / (1 + np.exp(-x**2))
|
122
|
-
|
123
107
|
# 6. Adaptive Cosine Activation (ACos)
|
124
108
|
def acos(x, alpha=1.0, beta=0.0):
|
125
109
|
return np.cos(alpha * x + beta)
|
126
110
|
|
127
|
-
# 7. Gaussian-like Activation (GLA)
|
128
|
-
def gla(x, alpha=1.0, mu=0.0):
|
129
|
-
return np.exp(-alpha * (x - mu)**2)
|
130
|
-
|
131
|
-
# 8. Swish ReLU (SReLU)
|
132
|
-
def srelu(x):
|
133
|
-
return x * (1 / (1 + np.exp(-x))) + np.maximum(0, x)
|
134
|
-
|
135
|
-
# 9. Quadratic Exponential Linear Unit (QELU)
|
136
|
-
def qelu(x):
|
137
|
-
return x**2 * np.exp(x) - 1
|
138
|
-
|
139
111
|
# 10. Inverse Square Root Activation (ISRA)
|
140
112
|
def isra(x):
|
141
113
|
return x / np.sqrt(np.abs(x) + 1)
|
@@ -170,48 +142,24 @@ def circular_activation(x, scale=2.0, frequency=1.0, shift=0.0):
|
|
170
142
|
|
171
143
|
return circular_output
|
172
144
|
|
173
|
-
def sech(x):
|
174
|
-
return 2 / (np.exp(x) + np.exp(-x))
|
175
|
-
|
176
145
|
def softsign(x):
|
177
146
|
return x / (1 + np.abs(x))
|
178
147
|
|
179
148
|
def pwl(x, alpha=0.5, beta=1.5):
|
180
149
|
return np.where(x <= 0, alpha * x, beta * x)
|
181
150
|
|
182
|
-
def cubic(x):
|
183
|
-
return x**3
|
184
|
-
|
185
|
-
def gaussian(x, alpha=1.0, mu=0.0):
|
186
|
-
return np.exp(-alpha * (x - mu)**2)
|
187
|
-
|
188
151
|
def sine(x, alpha=1.0):
|
189
152
|
return np.sin(alpha * x)
|
190
153
|
|
191
154
|
def tanh_square(x):
|
192
155
|
return np.tanh(x)**2
|
193
156
|
|
194
|
-
def mod_sigmoid(x, alpha=1.0, beta=0.0):
|
195
|
-
return 1 / (1 + np.exp(-alpha * x + beta))
|
196
|
-
|
197
|
-
def quartic(x):
|
198
|
-
return x**4
|
199
|
-
|
200
|
-
def square_quartic(x):
|
201
|
-
return (x**2)**2
|
202
|
-
|
203
|
-
def cubic_quadratic(x):
|
204
|
-
return x**3 * (x**2)
|
205
|
-
|
206
157
|
def sine_square(x):
|
207
158
|
return np.sin(x)**2
|
208
159
|
|
209
160
|
def logarithmic(x):
|
210
161
|
return np.log(x**2 + 1)
|
211
162
|
|
212
|
-
def scaled_cubic(x, alpha=1.0):
|
213
|
-
return alpha * x**3
|
214
|
-
|
215
163
|
def sine_offset(x, beta=0.0):
|
216
164
|
return np.sin(x + beta)
|
217
165
|
|
@@ -228,45 +176,29 @@ def apply_activation(Input, activation_list):
|
|
228
176
|
|
229
177
|
activation_functions = {
|
230
178
|
'sigmoid': Sigmoid,
|
231
|
-
'swish': swish,
|
232
179
|
'mod_circular': modular_circular_activation,
|
233
180
|
'tanh_circular': tanh_circular_activation,
|
234
181
|
'leaky_relu': leaky_relu,
|
235
182
|
'relu': Relu,
|
236
|
-
'softplus': softplus,
|
237
|
-
'elu': elu,
|
238
183
|
'gelu': gelu,
|
239
|
-
'selu': selu,
|
240
184
|
'tanh': tanh,
|
241
185
|
'sinakt': sinakt,
|
242
186
|
'p_squared': p_squared,
|
243
187
|
'sglu': lambda x: sglu(x, alpha=1.0),
|
244
188
|
'dlrelu': dlrelu,
|
245
|
-
'exsig': exsig,
|
246
189
|
'sin_plus': sin_plus,
|
247
190
|
'acos': lambda x: acos(x, alpha=1.0, beta=0.0),
|
248
|
-
'gla': lambda x: gla(x, alpha=1.0, mu=0.0),
|
249
|
-
'srelu': srelu,
|
250
|
-
'qelu': qelu,
|
251
191
|
'isra': isra,
|
252
192
|
'waveakt': waveakt,
|
253
193
|
'arctan': arctan,
|
254
194
|
'bent_identity': bent_identity,
|
255
|
-
'sech': sech,
|
256
195
|
'softsign': softsign,
|
257
196
|
'pwl': pwl,
|
258
|
-
'cubic': cubic,
|
259
|
-
'gaussian': gaussian,
|
260
197
|
'sine': sine,
|
261
198
|
'tanh_square': tanh_square,
|
262
|
-
'mod_sigmoid': mod_sigmoid,
|
263
199
|
'linear': lambda x: x,
|
264
|
-
'quartic': quartic,
|
265
|
-
'square_quartic': square_quartic,
|
266
|
-
'cubic_quadratic': cubic_quadratic,
|
267
200
|
'sine_square': sine_square,
|
268
201
|
'logarithmic': logarithmic,
|
269
|
-
'scaled_cubic': lambda x: scaled_cubic(x, 1.0),
|
270
202
|
'sine_offset': lambda x: sine_offset(x, 1.0),
|
271
203
|
'spiral': spiral_activation,
|
272
204
|
'circular': circular_activation
|
@@ -6,7 +6,7 @@ import warnings
|
|
6
6
|
|
7
7
|
def all_activations():
|
8
8
|
|
9
|
-
activations_list = ['linear', 'sigmoid', 'relu', 'tanh', 'circular', 'spiral', '
|
9
|
+
activations_list = ['linear', 'sigmoid', 'relu', 'tanh', 'circular', 'spiral', 'sin_plus', 'mod_circular', 'tanh_circular', 'leaky_relu', 'gelu', 'sinakt', 'p_squared', 'sglu', 'dlrelu', 'acos', 'isra', 'waveakt', 'arctan', 'bent_identity', 'softsign', 'pwl', 'sine', 'tanh_square', 'sine_square', 'logarithmic', 'sine_offset']
|
10
10
|
|
11
11
|
return activations_list
|
12
12
|
|
@@ -75,9 +75,6 @@ def Relu(
|
|
75
75
|
def tanh(x):
|
76
76
|
return cp.tanh(x)
|
77
77
|
|
78
|
-
def swish(x):
|
79
|
-
return x * (1 / (1 + cp.exp(-x)))
|
80
|
-
|
81
78
|
def sin_plus(x):
|
82
79
|
return (cp.sin(x) + 1) / 2
|
83
80
|
|
@@ -90,18 +87,6 @@ def tanh_circular_activation(x):
|
|
90
87
|
def leaky_relu(x, alpha=0.01):
|
91
88
|
return cp.where(x > 0, x, alpha * x)
|
92
89
|
|
93
|
-
def softplus(x):
|
94
|
-
return cp.log(1 + cp.exp(x))
|
95
|
-
|
96
|
-
def elu(x, alpha=1.0):
|
97
|
-
return cp.where(x > 0, x, alpha * (cp.exp(x) - 1))
|
98
|
-
|
99
|
-
def gelu(x):
|
100
|
-
return 0.5 * x * (1 + cp.tanh(cp.sqrt(2 / cp.pi) * (x + 0.044715 * cp.power(x, 3))))
|
101
|
-
|
102
|
-
def selu(x, lambda_=1.0507, alpha=1.6733):
|
103
|
-
return lambda_ * cp.where(x > 0, x, alpha * (cp.exp(x) - 1))
|
104
|
-
|
105
90
|
def sinakt(x):
|
106
91
|
return cp.sin(x) + cp.cos(x)
|
107
92
|
|
@@ -115,26 +100,10 @@ def sglu(x, alpha=1.0):
|
|
115
100
|
def dlrelu(x):
|
116
101
|
return cp.maximum(0.01 * x, x) + cp.minimum(0.01 * x, 0.1 * x)
|
117
102
|
|
118
|
-
# 5. Exponential Sigmoid (ExSig)
|
119
|
-
def exsig(x):
|
120
|
-
return 1 / (1 + cp.exp(-x**2))
|
121
|
-
|
122
103
|
# 6. Adaptive Cosine Activation (ACos)
|
123
104
|
def acos(x, alpha=1.0, beta=0.0):
|
124
105
|
return cp.cos(alpha * x + beta)
|
125
106
|
|
126
|
-
# 7. Gaussian-like Activation (GLA)
|
127
|
-
def gla(x, alpha=1.0, mu=0.0):
|
128
|
-
return cp.exp(-alpha * (x - mu)**2)
|
129
|
-
|
130
|
-
# 8. Swish ReLU (SReLU)
|
131
|
-
def srelu(x):
|
132
|
-
return x * (1 / (1 + cp.exp(-x))) + cp.maximum(0, x)
|
133
|
-
|
134
|
-
# 9. Quadratic Exponential Linear Unit (QELU)
|
135
|
-
def qelu(x):
|
136
|
-
return x**2 * cp.exp(x) - 1
|
137
|
-
|
138
107
|
# 10. Inverse Square Root Activation (ISRA)
|
139
108
|
def isra(x):
|
140
109
|
return x / cp.sqrt(cp.abs(x) + 1)
|
@@ -169,48 +138,24 @@ def circular_activation(x, scale=2.0, frequency=1.0, shift=0.0):
|
|
169
138
|
|
170
139
|
return circular_output
|
171
140
|
|
172
|
-
def sech(x):
|
173
|
-
return 2 / (cp.exp(x) + cp.exp(-x))
|
174
|
-
|
175
141
|
def softsign(x):
|
176
142
|
return x / (1 + cp.abs(x))
|
177
143
|
|
178
144
|
def pwl(x, alpha=0.5, beta=1.5):
|
179
145
|
return cp.where(x <= 0, alpha * x, beta * x)
|
180
146
|
|
181
|
-
def cubic(x):
|
182
|
-
return x**3
|
183
|
-
|
184
|
-
def gaussian(x, alpha=1.0, mu=0.0):
|
185
|
-
return cp.exp(-alpha * (x - mu)**2)
|
186
|
-
|
187
147
|
def sine(x, alpha=1.0):
|
188
148
|
return cp.sin(alpha * x)
|
189
149
|
|
190
150
|
def tanh_square(x):
|
191
151
|
return cp.tanh(x)**2
|
192
152
|
|
193
|
-
def mod_sigmoid(x, alpha=1.0, beta=0.0):
|
194
|
-
return 1 / (1 + cp.exp(-alpha * x + beta))
|
195
|
-
|
196
|
-
def quartic(x):
|
197
|
-
return x**4
|
198
|
-
|
199
|
-
def square_quartic(x):
|
200
|
-
return (x**2)**2
|
201
|
-
|
202
|
-
def cubic_quadratic(x):
|
203
|
-
return x**3 * (x**2)
|
204
|
-
|
205
153
|
def sine_square(x):
|
206
154
|
return cp.sin(x)**2
|
207
155
|
|
208
156
|
def logarithmic(x):
|
209
157
|
return cp.log(x**2 + 1)
|
210
158
|
|
211
|
-
def scaled_cubic(x, alpha=1.0):
|
212
|
-
return alpha * x**3
|
213
|
-
|
214
159
|
def sine_offset(x, beta=0.0):
|
215
160
|
return cp.sin(x + beta)
|
216
161
|
|
@@ -227,45 +172,28 @@ def apply_activation(Input, activation_list):
|
|
227
172
|
|
228
173
|
activation_functions = {
|
229
174
|
'sigmoid': Sigmoid,
|
230
|
-
'swish': swish,
|
231
175
|
'mod_circular': modular_circular_activation,
|
232
176
|
'tanh_circular': tanh_circular_activation,
|
233
177
|
'leaky_relu': leaky_relu,
|
234
178
|
'relu': Relu,
|
235
|
-
'softplus': softplus,
|
236
|
-
'elu': elu,
|
237
|
-
'gelu': gelu,
|
238
|
-
'selu': selu,
|
239
179
|
'tanh': tanh,
|
240
180
|
'sinakt': sinakt,
|
241
181
|
'p_squared': p_squared,
|
242
182
|
'sglu': lambda x: sglu(x, alpha=1.0),
|
243
183
|
'dlrelu': dlrelu,
|
244
|
-
'exsig': exsig,
|
245
184
|
'sin_plus': sin_plus,
|
246
185
|
'acos': lambda x: acos(x, alpha=1.0, beta=0.0),
|
247
|
-
'gla': lambda x: gla(x, alpha=1.0, mu=0.0),
|
248
|
-
'srelu': srelu,
|
249
|
-
'qelu': qelu,
|
250
186
|
'isra': isra,
|
251
187
|
'waveakt': waveakt,
|
252
188
|
'arctan': arctan,
|
253
189
|
'bent_identity': bent_identity,
|
254
|
-
'sech': sech,
|
255
190
|
'softsign': softsign,
|
256
191
|
'pwl': pwl,
|
257
|
-
'cubic': cubic,
|
258
|
-
'gaussian': gaussian,
|
259
192
|
'sine': sine,
|
260
193
|
'tanh_square': tanh_square,
|
261
|
-
'mod_sigmoid': mod_sigmoid,
|
262
194
|
'linear': lambda x: x,
|
263
|
-
'quartic': quartic,
|
264
|
-
'square_quartic': square_quartic,
|
265
|
-
'cubic_quadratic': cubic_quadratic,
|
266
195
|
'sine_square': sine_square,
|
267
196
|
'logarithmic': logarithmic,
|
268
|
-
'scaled_cubic': lambda x: scaled_cubic(x, 1.0),
|
269
197
|
'sine_offset': lambda x: sine_offset(x, 1.0),
|
270
198
|
'spiral': spiral_activation,
|
271
199
|
'circular': circular_activation
|
@@ -282,4 +210,4 @@ def apply_activation(Input, activation_list):
|
|
282
210
|
|
283
211
|
except Exception as e:
|
284
212
|
warnings.warn(f"Error in activation processing: {str(e)}", RuntimeWarning)
|
285
|
-
return Input
|
213
|
+
return Input
|
@@ -89,7 +89,7 @@ def decode_one_hot(encoded_data):
|
|
89
89
|
else: return np.argmax(encoded_data, axis=1)
|
90
90
|
|
91
91
|
|
92
|
-
def split(X, y, test_size, random_state=42
|
92
|
+
def split(X, y, test_size, random_state=42):
|
93
93
|
"""
|
94
94
|
Splits the given X (features) and y (labels) data into training and testing subsets.
|
95
95
|
|
@@ -101,9 +101,7 @@ def split(X, y, test_size, random_state=42, dtype=np.float32):
|
|
101
101
|
test_size (float or int): Proportion or number of samples for the test subset.
|
102
102
|
|
103
103
|
random_state (int or None): Seed for random state. Default: 42.
|
104
|
-
|
105
|
-
dtype (numpy.dtype): Data type for the arrays. np.float32 by default. Example: np.float64 or np.float16. [fp32 for balanced devices, fp64 for strong devices, fp16 for weak devices: not reccomended!]
|
106
|
-
|
104
|
+
|
107
105
|
Returns:
|
108
106
|
tuple: x_train, x_test, y_train, y_test as ordered training and testing data subsets.
|
109
107
|
"""
|
@@ -288,7 +286,7 @@ def synthetic_augmentation(x, y, dtype=np.float32):
|
|
288
286
|
Args:
|
289
287
|
x_train: numpy array format
|
290
288
|
|
291
|
-
y_train: numpy array format
|
289
|
+
y_train: numpy array format (one-hot encoded)
|
292
290
|
|
293
291
|
dtype (numpy.dtype): Data type for the arrays. cp.float32 by default. Example: cp.float64 or cp.float16.
|
294
292
|
|
pyerualjetwork/ene_cuda.py
CHANGED
@@ -902,7 +902,7 @@ def mutation(weight,
|
|
902
902
|
|
903
903
|
if potential_activation_delete_prob > activation_delete_prob and len(activations) > 1:
|
904
904
|
|
905
|
-
random_index = random.randint(0, len(activations))
|
905
|
+
random_index = random.randint(0, len(activations)-1)
|
906
906
|
activations.pop(random_index)
|
907
907
|
|
908
908
|
|
@@ -910,7 +910,7 @@ def mutation(weight,
|
|
910
910
|
|
911
911
|
try:
|
912
912
|
|
913
|
-
random_index_all_act = int(random.uniform(0, len(all_acts)))
|
913
|
+
random_index_all_act = int(random.uniform(0, len(all_acts)-1))
|
914
914
|
activations.append(all_acts[random_index_all_act])
|
915
915
|
|
916
916
|
except:
|
@@ -919,12 +919,12 @@ def mutation(weight,
|
|
919
919
|
activations = []
|
920
920
|
|
921
921
|
activations.append(activation)
|
922
|
-
activations.append(all_acts[int(random.uniform(0, len(all_acts)))])
|
922
|
+
activations.append(all_acts[int(random.uniform(0, len(all_acts)-1))])
|
923
923
|
|
924
924
|
if potential_activation_change_prob > activation_change_prob:
|
925
925
|
|
926
|
-
random_index_all_act = int(random.uniform(0, len(all_acts)))
|
927
|
-
random_index_genom_act = int(random.uniform(0, len(activations)))
|
926
|
+
random_index_all_act = int(random.uniform(0, len(all_acts)-1))
|
927
|
+
random_index_genom_act = int(random.uniform(0, len(activations)-1))
|
928
928
|
|
929
929
|
activations[random_index_genom_act] = all_acts[random_index_all_act]
|
930
930
|
|