pyerualjetwork 4.3.0__py3-none-any.whl → 4.3.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pyerualjetwork-4.3.0.dist-info → pyerualjetwork-4.3.0.2.dist-info}/METADATA +2 -2
- pyerualjetwork-4.3.0.2.dist-info/RECORD +24 -0
- pyerualjetwork-4.3.0.2.dist-info/top_level.txt +1 -0
- {pyerualjetwork → pyerualjetwork-jetstorm}/__init__.py +1 -1
- pyerualjetwork-jetstorm/activation_functions.py +291 -0
- pyerualjetwork-jetstorm/activation_functions_cuda.py +290 -0
- {pyerualjetwork → pyerualjetwork-jetstorm}/model_operations.py +14 -14
- {pyerualjetwork → pyerualjetwork-jetstorm}/model_operations_cuda.py +16 -17
- {pyerualjetwork → pyerualjetwork-jetstorm}/plan.py +44 -246
- {pyerualjetwork → pyerualjetwork-jetstorm}/plan_cuda.py +37 -256
- {pyerualjetwork → pyerualjetwork-jetstorm}/planeat.py +11 -43
- {pyerualjetwork → pyerualjetwork-jetstorm}/planeat_cuda.py +8 -44
- pyerualjetwork/activation_functions.py +0 -343
- pyerualjetwork/activation_functions_cuda.py +0 -340
- pyerualjetwork-4.3.0.dist-info/RECORD +0 -24
- pyerualjetwork-4.3.0.dist-info/top_level.txt +0 -1
- {pyerualjetwork-4.3.0.dist-info → pyerualjetwork-4.3.0.2.dist-info}/WHEEL +0 -0
- {pyerualjetwork → pyerualjetwork-jetstorm}/data_operations.py +0 -0
- {pyerualjetwork → pyerualjetwork-jetstorm}/data_operations_cuda.py +0 -0
- {pyerualjetwork → pyerualjetwork-jetstorm}/help.py +0 -0
- {pyerualjetwork → pyerualjetwork-jetstorm}/loss_functions.py +0 -0
- {pyerualjetwork → pyerualjetwork-jetstorm}/loss_functions_cuda.py +0 -0
- {pyerualjetwork → pyerualjetwork-jetstorm}/memory_operations.py +0 -0
- {pyerualjetwork → pyerualjetwork-jetstorm}/metrics.py +0 -0
- {pyerualjetwork → pyerualjetwork-jetstorm}/metrics_cuda.py +0 -0
- {pyerualjetwork → pyerualjetwork-jetstorm}/ui.py +0 -0
- {pyerualjetwork → pyerualjetwork-jetstorm}/visualizations.py +0 -0
- {pyerualjetwork → pyerualjetwork-jetstorm}/visualizations_cuda.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: pyerualjetwork
|
3
|
-
Version: 4.3.0
|
3
|
+
Version: 4.3.0.2
|
4
4
|
Summary: PyerualJetwork is a machine learning library supported with GPU(CUDA) acceleration written in Python for professionals and researchers including with PLAN algorithm, PLANEAT algorithm (genetic optimization). Also includes data pre-process and memory manegament
|
5
5
|
Author: Hasan Can Beydili
|
6
6
|
Author-email: tchasancan@gmail.com
|
@@ -24,7 +24,7 @@ GitHub Page: https://github.com/HCB06/PyerualJetwork
|
|
24
24
|
|
25
25
|
|
26
26
|
pip install pyerualjetwork
|
27
|
-
pip install pyerualjetwork==x.x.x-jetstorm
|
27
|
+
pip install pyerualjetwork==x.x.x.x-jetstorm (last x shows it is a jetstorm package)
|
28
28
|
|
29
29
|
from pyerualjetwork import plan
|
30
30
|
from pyerualjetwork import planeat
|
@@ -0,0 +1,24 @@
|
|
1
|
+
pyerualjetwork-jetstorm/__init__.py,sha256=XYaTEM5PIQBeTV_cF75PSwKCW35tclp7ZywN65JjEm8,650
|
2
|
+
pyerualjetwork-jetstorm/activation_functions.py,sha256=2bv7o4EPEFr8cSKq7KI04HhMUyxgBpe8soGvN98Mazg,7740
|
3
|
+
pyerualjetwork-jetstorm/activation_functions_cuda.py,sha256=Ua606lsj9LQahfLi6oZMkSyzyPT7ySrvC6qfACNCbL8,7781
|
4
|
+
pyerualjetwork-jetstorm/data_operations.py,sha256=Flteouu6rfSo2uHMqBHuzO02dXmbNa-I5qWmUpGTZ5Y,14760
|
5
|
+
pyerualjetwork-jetstorm/data_operations_cuda.py,sha256=UpoJoFhIwTU4xg9dVuLAxLAT4CkRaGsxvtJG9j1xrNo,17629
|
6
|
+
pyerualjetwork-jetstorm/help.py,sha256=nQ_YbYA2RtuafhuvkreNpX0WWL1I_nzlelwCtvei0_Y,775
|
7
|
+
pyerualjetwork-jetstorm/loss_functions.py,sha256=6PyBI232SQRGuFnG3LDGvnv_PUdWzT2_2mUODJiejGI,618
|
8
|
+
pyerualjetwork-jetstorm/loss_functions_cuda.py,sha256=C93IZJcrOpT6HMK9x1O4AHJWXYTkN5WZiqdssPbvAPk,617
|
9
|
+
pyerualjetwork-jetstorm/memory_operations.py,sha256=I7QiZ--xSyRkFF0wcckPwZV7K9emEvyx5aJ3DiRHZFI,13468
|
10
|
+
pyerualjetwork-jetstorm/metrics.py,sha256=q7MkhnZDRbCjFBDDfUgrl8lBYnUT_1ro1LxeBq105pI,6077
|
11
|
+
pyerualjetwork-jetstorm/metrics_cuda.py,sha256=73h9GC7XwmnFCVzFEEiPQfF8CwHIz2wsCbxpZrJtYgw,5061
|
12
|
+
pyerualjetwork-jetstorm/model_operations.py,sha256=MCSCNYiiICRVZITobtS3ZIWmH5Q9gjyELuH32sAdgg4,12649
|
13
|
+
pyerualjetwork-jetstorm/model_operations_cuda.py,sha256=NT01BK5nrDYE7H1x3KnSI8gmx0QTGGB0mP_LqEb1uuU,13157
|
14
|
+
pyerualjetwork-jetstorm/plan.py,sha256=Gxv8ii4brTYMzzFZBP-X6kkwc6w6vtTPiMmqVOAqoq8,21972
|
15
|
+
pyerualjetwork-jetstorm/plan_cuda.py,sha256=usyL-rWfczko8MQ-tmgMyt7UrKoH7IG3FX3edBiq-vc,22716
|
16
|
+
pyerualjetwork-jetstorm/planeat.py,sha256=Lq5R0aMS4UIdZdbUKsKDv5g0WLwYryomR3IQYb8vAa4,37573
|
17
|
+
pyerualjetwork-jetstorm/planeat_cuda.py,sha256=dZdKrrhdnoTjcF8Uv23Y4UvlOfizazNyx9v6QsdpIoo,37621
|
18
|
+
pyerualjetwork-jetstorm/ui.py,sha256=wu2BhU1k-w3Kcho5Jtq4SEKe68ftaUeRGneUOSCVDjU,575
|
19
|
+
pyerualjetwork-jetstorm/visualizations.py,sha256=1SKMZaJ80OD2qHUyMxW1IOv8zwmxzMPxclfbeq1Xr4g,28772
|
20
|
+
pyerualjetwork-jetstorm/visualizations_cuda.py,sha256=KbMhfsLlxujy_i3QrwCf734Q-k6d7Zn_7CEbm3gzK9w,29186
|
21
|
+
pyerualjetwork-4.3.0.2.dist-info/METADATA,sha256=zB9fu5uTLIt_AkOVe0iBDRL5jfrDhETVj-YpH07W9v0,7546
|
22
|
+
pyerualjetwork-4.3.0.2.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
|
23
|
+
pyerualjetwork-4.3.0.2.dist-info/top_level.txt,sha256=LZ-gnOoO1Riaytpmz1-hjJJ-jNG8zzq1iwNVTJwa3Ek,24
|
24
|
+
pyerualjetwork-4.3.0.2.dist-info/RECORD,,
|
@@ -0,0 +1 @@
|
|
1
|
+
pyerualjetwork-jetstorm
|
@@ -1,4 +1,4 @@
|
|
1
|
-
__version__ = "4.3.0"
|
1
|
+
__version__ = "4.3.0.2-jetstorm"
|
2
2
|
__update__ = "* Changes: https://github.com/HCB06/PyerualJetwork/blob/main/CHANGES\n* PyerualJetwork Homepage: https://github.com/HCB06/PyerualJetwork/tree/main\n* PyerualJetwork document: https://github.com/HCB06/PyerualJetwork/blob/main/Welcome_to_PyerualJetwork/PYERUALJETWORK_USER_MANUEL_AND_LEGAL_INFORMATION(EN).pdf\n* YouTube tutorials: https://www.youtube.com/@HasanCanBeydili"
|
3
3
|
|
4
4
|
def print_version(__version__):
|
@@ -0,0 +1,291 @@
|
|
1
|
+
import numpy as np
|
2
|
+
from scipy.special import expit, softmax
|
3
|
+
import warnings
|
4
|
+
|
5
|
+
|
6
|
+
# ACTIVATION FUNCTIONS -----
|
7
|
+
|
8
|
+
def all_activations():
|
9
|
+
|
10
|
+
activations_list = ['linear', 'sigmoid', 'relu', 'tanh', 'circular', 'spiral', 'swish', 'sin_plus', 'mod_circular', 'tanh_circular', 'leaky_relu', 'softplus', 'elu', 'gelu', 'selu', 'sinakt', 'p_squared', 'sglu', 'dlrelu', 'exsig', 'acos', 'gla', 'srelu', 'qelu', 'isra', 'waveakt', 'arctan', 'bent_identity', 'sech', 'softsign', 'pwl', 'cubic', 'gaussian', 'sine', 'tanh_square', 'mod_sigmoid', 'quartic', 'square_quartic', 'cubic_quadratic', 'exp_cubic', 'sine_square', 'logarithmic', 'scaled_cubic', 'sine_offset']
|
11
|
+
|
12
|
+
return activations_list
|
13
|
+
|
14
|
+
def spiral_activation(x):
|
15
|
+
|
16
|
+
r = np.sqrt(np.sum(x**2))
|
17
|
+
|
18
|
+
theta = np.arctan2(x[1:], x[:-1])
|
19
|
+
|
20
|
+
spiral_x = r * np.cos(theta + r)
|
21
|
+
spiral_y = r * np.sin(theta + r)
|
22
|
+
|
23
|
+
|
24
|
+
spiral_output = np.concatenate(([spiral_x[0]], spiral_y))
|
25
|
+
|
26
|
+
return spiral_output
|
27
|
+
|
28
|
+
|
29
|
+
def Softmax(
|
30
|
+
x # num: Input data to be transformed using softmax function.
|
31
|
+
):
|
32
|
+
"""
|
33
|
+
Applies the softmax function to the input data.
|
34
|
+
|
35
|
+
Args:
|
36
|
+
(num): Input data to be transformed using softmax function.
|
37
|
+
|
38
|
+
Returns:
|
39
|
+
(num): Transformed data after applying softmax function.
|
40
|
+
"""
|
41
|
+
|
42
|
+
return softmax(x)
|
43
|
+
|
44
|
+
|
45
|
+
def Sigmoid(
|
46
|
+
x # num: Input data to be transformed using sigmoid function.
|
47
|
+
):
|
48
|
+
"""
|
49
|
+
Applies the sigmoid function to the input data.
|
50
|
+
|
51
|
+
Args:
|
52
|
+
(num): Input data to be transformed using sigmoid function.
|
53
|
+
|
54
|
+
Returns:
|
55
|
+
(num): Transformed data after applying sigmoid function.
|
56
|
+
"""
|
57
|
+
return expit(x)
|
58
|
+
|
59
|
+
|
60
|
+
def Relu(
|
61
|
+
x # num: Input data to be transformed using ReLU function.
|
62
|
+
):
|
63
|
+
"""
|
64
|
+
Applies the Rectified Linear Unit (ReLU) function to the input data.
|
65
|
+
|
66
|
+
Args:
|
67
|
+
(num): Input data to be transformed using ReLU function.
|
68
|
+
|
69
|
+
Returns:
|
70
|
+
(num): Transformed data after applying ReLU function.
|
71
|
+
"""
|
72
|
+
|
73
|
+
return np.maximum(0, x)
|
74
|
+
|
75
|
+
|
76
|
+
def tanh(x):
|
77
|
+
return np.tanh(x)
|
78
|
+
|
79
|
+
def swish(x):
|
80
|
+
return x * (1 / (1 + np.exp(-x)))
|
81
|
+
|
82
|
+
def sin_plus(x):
|
83
|
+
return (np.sin(x) + 1) / 2
|
84
|
+
|
85
|
+
def modular_circular_activation(x, period=2*np.pi):
|
86
|
+
return np.mod(x, period) / period
|
87
|
+
|
88
|
+
def tanh_circular_activation(x):
|
89
|
+
return (np.tanh(x) + 1) / 2
|
90
|
+
|
91
|
+
def leaky_relu(x, alpha=0.01):
|
92
|
+
return np.where(x > 0, x, alpha * x)
|
93
|
+
|
94
|
+
def softplus(x):
|
95
|
+
return np.log(1 + np.exp(x))
|
96
|
+
|
97
|
+
def elu(x, alpha=1.0):
|
98
|
+
return np.where(x > 0, x, alpha * (np.exp(x) - 1))
|
99
|
+
|
100
|
+
def gelu(x):
|
101
|
+
return 0.5 * x * (1 + np.tanh(np.sqrt(2 / np.pi) * (x + 0.044715 * np.power(x, 3))))
|
102
|
+
|
103
|
+
def selu(x, lambda_=1.0507, alpha=1.6733):
|
104
|
+
return lambda_ * np.where(x > 0, x, alpha * (np.exp(x) - 1))
|
105
|
+
|
106
|
+
def sinakt(x):
|
107
|
+
return np.sin(x) + np.cos(x)
|
108
|
+
|
109
|
+
def p_squared(x, alpha=1.0, beta=0.0):
|
110
|
+
return alpha * x**2 + beta * x
|
111
|
+
|
112
|
+
def sglu(x, alpha=1.0):
|
113
|
+
return softmax(alpha * x) * x
|
114
|
+
|
115
|
+
# 4. Double Leaky ReLU (DLReLU)
|
116
|
+
def dlrelu(x):
|
117
|
+
return np.maximum(0.01 * x, x) + np.minimum(0.01 * x, 0.1 * x)
|
118
|
+
|
119
|
+
# 5. Exponential Sigmoid (ExSig)
|
120
|
+
def exsig(x):
|
121
|
+
return 1 / (1 + np.exp(-x**2))
|
122
|
+
|
123
|
+
# 6. Adaptive Cosine Activation (ACos)
|
124
|
+
def acos(x, alpha=1.0, beta=0.0):
|
125
|
+
return np.cos(alpha * x + beta)
|
126
|
+
|
127
|
+
# 7. Gaussian-like Activation (GLA)
|
128
|
+
def gla(x, alpha=1.0, mu=0.0):
|
129
|
+
return np.exp(-alpha * (x - mu)**2)
|
130
|
+
|
131
|
+
# 8. Swish ReLU (SReLU)
|
132
|
+
def srelu(x):
|
133
|
+
return x * (1 / (1 + np.exp(-x))) + np.maximum(0, x)
|
134
|
+
|
135
|
+
# 9. Quadratic Exponential Linear Unit (QELU)
|
136
|
+
def qelu(x):
|
137
|
+
return x**2 * np.exp(x) - 1
|
138
|
+
|
139
|
+
# 10. Inverse Square Root Activation (ISRA)
|
140
|
+
def isra(x):
|
141
|
+
return x / np.sqrt(np.abs(x) + 1)
|
142
|
+
|
143
|
+
def waveakt(x, alpha=1.0, beta=2.0, gamma=3.0):
|
144
|
+
return np.sin(alpha * x) * np.cos(beta * x) * np.sin(gamma * x)
|
145
|
+
|
146
|
+
def arctan(x):
|
147
|
+
return np.arctan(x)
|
148
|
+
|
149
|
+
def bent_identity(x):
|
150
|
+
return (np.sqrt(x**2 + 1) - 1) / 2 + x
|
151
|
+
|
152
|
+
def circular_activation(x, scale=2.0, frequency=1.0, shift=0.0):
|
153
|
+
|
154
|
+
n_features = x.shape[0]
|
155
|
+
|
156
|
+
circular_output = np.zeros_like(x)
|
157
|
+
|
158
|
+
for i in range(n_features):
|
159
|
+
|
160
|
+
r = np.sqrt(np.sum(x**2))
|
161
|
+
theta = 2 * np.pi * (i / n_features) + shift
|
162
|
+
|
163
|
+
circular_x = r * np.cos(theta + frequency * r) * scale
|
164
|
+
circular_y = r * np.sin(theta + frequency * r) * scale
|
165
|
+
|
166
|
+
if i % 2 == 0:
|
167
|
+
circular_output[i] = circular_x
|
168
|
+
else:
|
169
|
+
circular_output[i] = circular_y
|
170
|
+
|
171
|
+
return circular_output
|
172
|
+
|
173
|
+
def sech(x):
|
174
|
+
return 2 / (np.exp(x) + np.exp(-x))
|
175
|
+
|
176
|
+
def softsign(x):
|
177
|
+
return x / (1 + np.abs(x))
|
178
|
+
|
179
|
+
def pwl(x, alpha=0.5, beta=1.5):
|
180
|
+
return np.where(x <= 0, alpha * x, beta * x)
|
181
|
+
|
182
|
+
def cubic(x):
|
183
|
+
return x**3
|
184
|
+
|
185
|
+
def gaussian(x, alpha=1.0, mu=0.0):
|
186
|
+
return np.exp(-alpha * (x - mu)**2)
|
187
|
+
|
188
|
+
def sine(x, alpha=1.0):
|
189
|
+
return np.sin(alpha * x)
|
190
|
+
|
191
|
+
def tanh_square(x):
|
192
|
+
return np.tanh(x)**2
|
193
|
+
|
194
|
+
def mod_sigmoid(x, alpha=1.0, beta=0.0):
|
195
|
+
return 1 / (1 + np.exp(-alpha * x + beta))
|
196
|
+
|
197
|
+
def quartic(x):
|
198
|
+
return x**4
|
199
|
+
|
200
|
+
def square_quartic(x):
|
201
|
+
return (x**2)**2
|
202
|
+
|
203
|
+
def cubic_quadratic(x):
|
204
|
+
return x**3 * (x**2)
|
205
|
+
|
206
|
+
def exp_cubic(x):
|
207
|
+
return np.exp(x**3)
|
208
|
+
|
209
|
+
def sine_square(x):
|
210
|
+
return np.sin(x)**2
|
211
|
+
|
212
|
+
def logarithmic(x):
|
213
|
+
return np.log(x**2 + 1)
|
214
|
+
|
215
|
+
def scaled_cubic(x, alpha=1.0):
|
216
|
+
return alpha * x**3
|
217
|
+
|
218
|
+
def sine_offset(x, beta=0.0):
|
219
|
+
return np.sin(x + beta)
|
220
|
+
|
221
|
+
|
222
|
+
def apply_activation(Input, activation_list):
|
223
|
+
"""
|
224
|
+
Applies activation functions for inputs
|
225
|
+
|
226
|
+
Args:
|
227
|
+
Input (numpy.ndarray):
|
228
|
+
activation_list (list):
|
229
|
+
"""
|
230
|
+
origin_input = np.copy(Input)
|
231
|
+
|
232
|
+
activation_functions = {
|
233
|
+
'sigmoid': Sigmoid,
|
234
|
+
'swish': swish,
|
235
|
+
'mod_circular': modular_circular_activation,
|
236
|
+
'tanh_circular': tanh_circular_activation,
|
237
|
+
'leaky_relu': leaky_relu,
|
238
|
+
'relu': Relu,
|
239
|
+
'softplus': softplus,
|
240
|
+
'elu': elu,
|
241
|
+
'gelu': gelu,
|
242
|
+
'selu': selu,
|
243
|
+
'tanh': tanh,
|
244
|
+
'sinakt': sinakt,
|
245
|
+
'p_squared': p_squared,
|
246
|
+
'sglu': lambda x: sglu(x, alpha=1.0),
|
247
|
+
'dlrelu': dlrelu,
|
248
|
+
'exsig': exsig,
|
249
|
+
'sin_plus': sin_plus,
|
250
|
+
'acos': lambda x: acos(x, alpha=1.0, beta=0.0),
|
251
|
+
'gla': lambda x: gla(x, alpha=1.0, mu=0.0),
|
252
|
+
'srelu': srelu,
|
253
|
+
'qelu': qelu,
|
254
|
+
'isra': isra,
|
255
|
+
'waveakt': waveakt,
|
256
|
+
'arctan': arctan,
|
257
|
+
'bent_identity': bent_identity,
|
258
|
+
'sech': sech,
|
259
|
+
'softsign': softsign,
|
260
|
+
'pwl': pwl,
|
261
|
+
'cubic': cubic,
|
262
|
+
'gaussian': gaussian,
|
263
|
+
'sine': sine,
|
264
|
+
'tanh_square': tanh_square,
|
265
|
+
'mod_sigmoid': mod_sigmoid,
|
266
|
+
'linear': lambda x: x,
|
267
|
+
'quartic': quartic,
|
268
|
+
'square_quartic': square_quartic,
|
269
|
+
'cubic_quadratic': cubic_quadratic,
|
270
|
+
'exp_cubic': exp_cubic,
|
271
|
+
'sine_square': sine_square,
|
272
|
+
'logarithmic': logarithmic,
|
273
|
+
'scaled_cubic': lambda x: scaled_cubic(x, 1.0),
|
274
|
+
'sine_offset': lambda x: sine_offset(x, 1.0),
|
275
|
+
'spiral': spiral_activation,
|
276
|
+
'circular': circular_activation
|
277
|
+
}
|
278
|
+
|
279
|
+
try:
|
280
|
+
valid_activations = [act for act in activation_list if act in activation_functions]
|
281
|
+
|
282
|
+
activation_outputs = np.stack([activation_functions[act](origin_input)
|
283
|
+
for act in valid_activations])
|
284
|
+
|
285
|
+
result = Input + np.sum(activation_outputs, axis=0)
|
286
|
+
|
287
|
+
return result
|
288
|
+
|
289
|
+
except Exception as e:
|
290
|
+
warnings.warn(f"Error in activation processing: {str(e)}", RuntimeWarning)
|
291
|
+
return Input
|
@@ -0,0 +1,290 @@
|
|
1
|
+
import cupy as cp
|
2
|
+
from scipy.special import expit, softmax
|
3
|
+
import warnings
|
4
|
+
|
5
|
+
# ACTIVATION FUNCTIONS ----
|
6
|
+
|
7
|
+
def all_activations():
|
8
|
+
|
9
|
+
activations_list = ['linear', 'sigmoid', 'relu', 'tanh', 'circular', 'spiral', 'swish', 'sin_plus', 'mod_circular', 'tanh_circular', 'leaky_relu', 'softplus', 'elu', 'gelu', 'selu', 'sinakt', 'p_squared', 'sglu', 'dlrelu', 'exsig', 'acos', 'gla', 'srelu', 'qelu', 'isra', 'waveakt', 'arctan', 'bent_identity', 'sech', 'softsign', 'pwl', 'cubic', 'gaussian', 'sine', 'tanh_square', 'mod_sigmoid', 'quartic', 'square_quartic', 'cubic_quadratic', 'exp_cubic', 'sine_square', 'logarithmic', 'scaled_cubic', 'sine_offset']
|
10
|
+
|
11
|
+
return activations_list
|
12
|
+
|
13
|
+
def spiral_activation(x):
|
14
|
+
|
15
|
+
r = cp.sqrt(cp.sum(x**2))
|
16
|
+
|
17
|
+
theta = cp.arctan2(x[1:], x[:-1])
|
18
|
+
|
19
|
+
spiral_x = r * cp.cos(theta + r)
|
20
|
+
spiral_y = r * cp.sin(theta + r)
|
21
|
+
|
22
|
+
|
23
|
+
spiral_output = cp.concatenate([cp.array([spiral_x[0]]), spiral_y])
|
24
|
+
|
25
|
+
return spiral_output
|
26
|
+
|
27
|
+
|
28
|
+
def Softmax(
|
29
|
+
x # num: Input data to be transformed using softmax function.
|
30
|
+
):
|
31
|
+
"""
|
32
|
+
Applies the softmax function to the input data.
|
33
|
+
|
34
|
+
Args:
|
35
|
+
(num): Input data to be transformed using softmax function.
|
36
|
+
|
37
|
+
Returns:
|
38
|
+
(num): Transformed data after applying softmax function.
|
39
|
+
"""
|
40
|
+
|
41
|
+
return cp.array(softmax(x.get()))
|
42
|
+
|
43
|
+
|
44
|
+
def Sigmoid(
|
45
|
+
x # num: Input data to be transformed using sigmoid function.
|
46
|
+
):
|
47
|
+
"""
|
48
|
+
Applies the sigmoid function to the input data.
|
49
|
+
|
50
|
+
Args:
|
51
|
+
(num): Input data to be transformed using sigmoid function.
|
52
|
+
|
53
|
+
Returns:
|
54
|
+
(num): Transformed data after applying sigmoid function.
|
55
|
+
"""
|
56
|
+
return expit(x)
|
57
|
+
|
58
|
+
|
59
|
+
def Relu(
|
60
|
+
x # num: Input data to be transformed using ReLU function.
|
61
|
+
):
|
62
|
+
"""
|
63
|
+
Applies the Rectified Linear Unit (ReLU) function to the input data.
|
64
|
+
|
65
|
+
Args:
|
66
|
+
(num): Input data to be transformed using ReLU function.
|
67
|
+
|
68
|
+
Returns:
|
69
|
+
(num): Transformed data after applying ReLU function.
|
70
|
+
"""
|
71
|
+
|
72
|
+
return cp.maximum(0, x)
|
73
|
+
|
74
|
+
|
75
|
+
def tanh(x):
|
76
|
+
return cp.tanh(x)
|
77
|
+
|
78
|
+
def swish(x):
|
79
|
+
return x * (1 / (1 + cp.exp(-x)))
|
80
|
+
|
81
|
+
def sin_plus(x):
|
82
|
+
return (cp.sin(x) + 1) / 2
|
83
|
+
|
84
|
+
def modular_circular_activation(x, period=2*cp.pi):
|
85
|
+
return cp.mod(x, period) / period
|
86
|
+
|
87
|
+
def tanh_circular_activation(x):
|
88
|
+
return (cp.tanh(x) + 1) / 2
|
89
|
+
|
90
|
+
def leaky_relu(x, alpha=0.01):
|
91
|
+
return cp.where(x > 0, x, alpha * x)
|
92
|
+
|
93
|
+
def softplus(x):
|
94
|
+
return cp.log(1 + cp.exp(x))
|
95
|
+
|
96
|
+
def elu(x, alpha=1.0):
|
97
|
+
return cp.where(x > 0, x, alpha * (cp.exp(x) - 1))
|
98
|
+
|
99
|
+
def gelu(x):
|
100
|
+
return 0.5 * x * (1 + cp.tanh(cp.sqrt(2 / cp.pi) * (x + 0.044715 * cp.power(x, 3))))
|
101
|
+
|
102
|
+
def selu(x, lambda_=1.0507, alpha=1.6733):
|
103
|
+
return lambda_ * cp.where(x > 0, x, alpha * (cp.exp(x) - 1))
|
104
|
+
|
105
|
+
def sinakt(x):
|
106
|
+
return cp.sin(x) + cp.cos(x)
|
107
|
+
|
108
|
+
def p_squared(x, alpha=1.0, beta=0.0):
|
109
|
+
return alpha * x**2 + beta * x
|
110
|
+
|
111
|
+
def sglu(x, alpha=1.0):
|
112
|
+
return cp.array(softmax(alpha * x.get())) * x
|
113
|
+
|
114
|
+
# 4. Double Leaky ReLU (DLReLU)
|
115
|
+
def dlrelu(x):
|
116
|
+
return cp.maximum(0.01 * x, x) + cp.minimum(0.01 * x, 0.1 * x)
|
117
|
+
|
118
|
+
# 5. Exponential Sigmoid (ExSig)
|
119
|
+
def exsig(x):
|
120
|
+
return 1 / (1 + cp.exp(-x**2))
|
121
|
+
|
122
|
+
# 6. Adaptive Cosine Activation (ACos)
|
123
|
+
def acos(x, alpha=1.0, beta=0.0):
|
124
|
+
return cp.cos(alpha * x + beta)
|
125
|
+
|
126
|
+
# 7. Gaussian-like Activation (GLA)
|
127
|
+
def gla(x, alpha=1.0, mu=0.0):
|
128
|
+
return cp.exp(-alpha * (x - mu)**2)
|
129
|
+
|
130
|
+
# 8. Swish ReLU (SReLU)
|
131
|
+
def srelu(x):
|
132
|
+
return x * (1 / (1 + cp.exp(-x))) + cp.maximum(0, x)
|
133
|
+
|
134
|
+
# 9. Quadratic Exponential Linear Unit (QELU)
|
135
|
+
def qelu(x):
|
136
|
+
return x**2 * cp.exp(x) - 1
|
137
|
+
|
138
|
+
# 10. Inverse Square Root Activation (ISRA)
|
139
|
+
def isra(x):
|
140
|
+
return x / cp.sqrt(cp.abs(x) + 1)
|
141
|
+
|
142
|
+
def waveakt(x, alpha=1.0, beta=2.0, gamma=3.0):
|
143
|
+
return cp.sin(alpha * x) * cp.cos(beta * x) * cp.sin(gamma * x)
|
144
|
+
|
145
|
+
def arctan(x):
|
146
|
+
return cp.arctan(x)
|
147
|
+
|
148
|
+
def bent_identity(x):
|
149
|
+
return (cp.sqrt(x**2 + 1) - 1) / 2 + x
|
150
|
+
|
151
|
+
def circular_activation(x, scale=2.0, frequency=1.0, shift=0.0):
|
152
|
+
|
153
|
+
n_features = x.shape[0]
|
154
|
+
|
155
|
+
circular_output = cp.zeros_like(x)
|
156
|
+
|
157
|
+
for i in range(n_features):
|
158
|
+
|
159
|
+
r = cp.sqrt(cp.sum(x**2))
|
160
|
+
theta = 2 * cp.pi * (i / n_features) + shift
|
161
|
+
|
162
|
+
circular_x = r * cp.cos(theta + frequency * r) * scale
|
163
|
+
circular_y = r * cp.sin(theta + frequency * r) * scale
|
164
|
+
|
165
|
+
if i % 2 == 0:
|
166
|
+
circular_output[i] = circular_x
|
167
|
+
else:
|
168
|
+
circular_output[i] = circular_y
|
169
|
+
|
170
|
+
return circular_output
|
171
|
+
|
172
|
+
def sech(x):
|
173
|
+
return 2 / (cp.exp(x) + cp.exp(-x))
|
174
|
+
|
175
|
+
def softsign(x):
|
176
|
+
return x / (1 + cp.abs(x))
|
177
|
+
|
178
|
+
def pwl(x, alpha=0.5, beta=1.5):
|
179
|
+
return cp.where(x <= 0, alpha * x, beta * x)
|
180
|
+
|
181
|
+
def cubic(x):
|
182
|
+
return x**3
|
183
|
+
|
184
|
+
def gaussian(x, alpha=1.0, mu=0.0):
|
185
|
+
return cp.exp(-alpha * (x - mu)**2)
|
186
|
+
|
187
|
+
def sine(x, alpha=1.0):
|
188
|
+
return cp.sin(alpha * x)
|
189
|
+
|
190
|
+
def tanh_square(x):
|
191
|
+
return cp.tanh(x)**2
|
192
|
+
|
193
|
+
def mod_sigmoid(x, alpha=1.0, beta=0.0):
|
194
|
+
return 1 / (1 + cp.exp(-alpha * x + beta))
|
195
|
+
|
196
|
+
def quartic(x):
|
197
|
+
return x**4
|
198
|
+
|
199
|
+
def square_quartic(x):
|
200
|
+
return (x**2)**2
|
201
|
+
|
202
|
+
def cubic_quadratic(x):
|
203
|
+
return x**3 * (x**2)
|
204
|
+
|
205
|
+
def exp_cubic(x):
|
206
|
+
return cp.exp(x**3)
|
207
|
+
|
208
|
+
def sine_square(x):
|
209
|
+
return cp.sin(x)**2
|
210
|
+
|
211
|
+
def logarithmic(x):
|
212
|
+
return cp.log(x**2 + 1)
|
213
|
+
|
214
|
+
def scaled_cubic(x, alpha=1.0):
|
215
|
+
return alpha * x**3
|
216
|
+
|
217
|
+
def sine_offset(x, beta=0.0):
|
218
|
+
return cp.sin(x + beta)
|
219
|
+
|
220
|
+
|
221
|
+
def apply_activation(Input, activation_list):
|
222
|
+
"""
|
223
|
+
Applies activation functions for inputs
|
224
|
+
|
225
|
+
Args:
|
226
|
+
Input (cupy.ndarray):
|
227
|
+
activation_list (list):
|
228
|
+
"""
|
229
|
+
origin_input = cp.copy(Input)
|
230
|
+
|
231
|
+
activation_functions = {
|
232
|
+
'sigmoid': Sigmoid,
|
233
|
+
'swish': swish,
|
234
|
+
'mod_circular': modular_circular_activation,
|
235
|
+
'tanh_circular': tanh_circular_activation,
|
236
|
+
'leaky_relu': leaky_relu,
|
237
|
+
'relu': Relu,
|
238
|
+
'softplus': softplus,
|
239
|
+
'elu': elu,
|
240
|
+
'gelu': gelu,
|
241
|
+
'selu': selu,
|
242
|
+
'tanh': tanh,
|
243
|
+
'sinakt': sinakt,
|
244
|
+
'p_squared': p_squared,
|
245
|
+
'sglu': lambda x: sglu(x, alpha=1.0),
|
246
|
+
'dlrelu': dlrelu,
|
247
|
+
'exsig': exsig,
|
248
|
+
'sin_plus': sin_plus,
|
249
|
+
'acos': lambda x: acos(x, alpha=1.0, beta=0.0),
|
250
|
+
'gla': lambda x: gla(x, alpha=1.0, mu=0.0),
|
251
|
+
'srelu': srelu,
|
252
|
+
'qelu': qelu,
|
253
|
+
'isra': isra,
|
254
|
+
'waveakt': waveakt,
|
255
|
+
'arctan': arctan,
|
256
|
+
'bent_identity': bent_identity,
|
257
|
+
'sech': sech,
|
258
|
+
'softsign': softsign,
|
259
|
+
'pwl': pwl,
|
260
|
+
'cubic': cubic,
|
261
|
+
'gaussian': gaussian,
|
262
|
+
'sine': sine,
|
263
|
+
'tanh_square': tanh_square,
|
264
|
+
'mod_sigmoid': mod_sigmoid,
|
265
|
+
'linear': lambda x: x,
|
266
|
+
'quartic': quartic,
|
267
|
+
'square_quartic': square_quartic,
|
268
|
+
'cubic_quadratic': cubic_quadratic,
|
269
|
+
'exp_cubic': exp_cubic,
|
270
|
+
'sine_square': sine_square,
|
271
|
+
'logarithmic': logarithmic,
|
272
|
+
'scaled_cubic': lambda x: scaled_cubic(x, 1.0),
|
273
|
+
'sine_offset': lambda x: sine_offset(x, 1.0),
|
274
|
+
'spiral': spiral_activation,
|
275
|
+
'circular': circular_activation
|
276
|
+
}
|
277
|
+
|
278
|
+
try:
|
279
|
+
valid_activations = [act for act in activation_list if act in activation_functions]
|
280
|
+
|
281
|
+
activation_outputs = cp.stack([activation_functions[act](origin_input)
|
282
|
+
for act in valid_activations])
|
283
|
+
|
284
|
+
result = Input + cp.sum(activation_outputs, axis=0)
|
285
|
+
|
286
|
+
return result
|
287
|
+
|
288
|
+
except Exception as e:
|
289
|
+
warnings.warn(f"Error in activation processing: {str(e)}", RuntimeWarning)
|
290
|
+
return Input
|
@@ -258,7 +258,7 @@ def predict_model_ssd(Input, model_name, model_path='', dtype=np.float32):
|
|
258
258
|
ndarray: Output from the model.
|
259
259
|
"""
|
260
260
|
|
261
|
-
from .
|
261
|
+
from .activation_functions import apply_activation
|
262
262
|
from .data_operations import standard_scaler
|
263
263
|
|
264
264
|
model = load_model(model_name, model_path)
|
@@ -269,12 +269,12 @@ def predict_model_ssd(Input, model_name, model_path='', dtype=np.float32):
|
|
269
269
|
|
270
270
|
Input = standard_scaler(None, Input, scaler_params, dtype=dtype)
|
271
271
|
|
272
|
-
|
273
|
-
|
274
|
-
neural_layer = neural_layer.ravel()
|
272
|
+
Input = np.array(Input, dtype=dtype, copy=False)
|
273
|
+
Input = Input.ravel()
|
275
274
|
|
276
275
|
try:
|
277
|
-
|
276
|
+
Input = apply_activation(Input, activation_potentiation)
|
277
|
+
neural_layer = Input @ W.T
|
278
278
|
return neural_layer
|
279
279
|
except:
|
280
280
|
print(Fore.RED + "ERROR: Unexpected Output or wrong model parameters from: predict_model_ssd." + Style.RESET_ALL)
|
@@ -304,7 +304,7 @@ def reverse_predict_model_ssd(output, model_name, model_path='', dtype=np.float3
|
|
304
304
|
W = model[get_weights()]
|
305
305
|
|
306
306
|
try:
|
307
|
-
Input =
|
307
|
+
Input = W.T @ output
|
308
308
|
return Input
|
309
309
|
except:
|
310
310
|
print(Fore.RED + "ERROR: Unexpected Output or wrong model parameters from: reverse_predict_model_ssd." + Style.RESET_ALL)
|
@@ -334,18 +334,18 @@ def predict_model_ram(Input, W, scaler_params=None, activation_potentiation=['li
|
|
334
334
|
ndarray: Output from the model.
|
335
335
|
"""
|
336
336
|
|
337
|
-
from data_operations import standard_scaler
|
338
|
-
from
|
337
|
+
from .data_operations import standard_scaler
|
338
|
+
from .activation_functions import apply_activation
|
339
339
|
|
340
340
|
Input = standard_scaler(None, Input, scaler_params, dtype=dtype)
|
341
341
|
|
342
|
+
Input = np.array(Input, dtype=dtype, copy=False)
|
343
|
+
Input = Input.ravel()
|
344
|
+
|
342
345
|
try:
|
343
346
|
|
344
|
-
|
345
|
-
neural_layer =
|
346
|
-
neural_layer = neural_layer.ravel()
|
347
|
-
|
348
|
-
neural_layer = feed_forward(neural_layer, np.copy(W.astype(dtype, copy=False)), is_training=False, Class='?', activation_potentiation=activation_potentiation)
|
347
|
+
Input = apply_activation(Input, activation_potentiation)
|
348
|
+
neural_layer = Input @ W.T
|
349
349
|
|
350
350
|
return neural_layer
|
351
351
|
|
@@ -371,7 +371,7 @@ def reverse_predict_model_ram(output, W, dtype=np.float32):
|
|
371
371
|
"""
|
372
372
|
|
373
373
|
try:
|
374
|
-
Input =
|
374
|
+
Input = W.T @ output
|
375
375
|
return Input
|
376
376
|
|
377
377
|
except:
|