pyerualjetwork 4.3.8.dev15__py3-none-any.whl → 4.3.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. pyerualjetwork/__init__.py +1 -1
  2. pyerualjetwork/activation_functions.py +2 -2
  3. pyerualjetwork/activation_functions_cuda.py +63 -114
  4. pyerualjetwork/data_operations_cuda.py +1 -1
  5. pyerualjetwork/fitness_functions.py +72 -0
  6. pyerualjetwork/fitness_functions_cuda.py +85 -0
  7. pyerualjetwork/model_operations.py +14 -14
  8. pyerualjetwork/model_operations_cuda.py +16 -17
  9. pyerualjetwork/plan.py +159 -382
  10. pyerualjetwork/plan_cuda.py +149 -387
  11. pyerualjetwork/planeat.py +24 -54
  12. pyerualjetwork/planeat_cuda.py +11 -47
  13. pyerualjetwork/visualizations.py +33 -30
  14. pyerualjetwork/visualizations_cuda.py +22 -24
  15. {pyerualjetwork-4.3.8.dev15.dist-info → pyerualjetwork-4.3.9.dist-info}/METADATA +3 -19
  16. pyerualjetwork-4.3.9.dist-info/RECORD +24 -0
  17. pyerualjetwork-4.3.9.dist-info/top_level.txt +1 -0
  18. pyerualjetwork/loss_functions.py +0 -21
  19. pyerualjetwork/loss_functions_cuda.py +0 -21
  20. pyerualjetwork-4.3.8.dev15.dist-info/RECORD +0 -45
  21. pyerualjetwork-4.3.8.dev15.dist-info/top_level.txt +0 -2
  22. pyerualjetwork_afterburner/__init__.py +0 -11
  23. pyerualjetwork_afterburner/activation_functions.py +0 -290
  24. pyerualjetwork_afterburner/activation_functions_cuda.py +0 -289
  25. pyerualjetwork_afterburner/data_operations.py +0 -406
  26. pyerualjetwork_afterburner/data_operations_cuda.py +0 -461
  27. pyerualjetwork_afterburner/help.py +0 -17
  28. pyerualjetwork_afterburner/loss_functions.py +0 -21
  29. pyerualjetwork_afterburner/loss_functions_cuda.py +0 -21
  30. pyerualjetwork_afterburner/memory_operations.py +0 -298
  31. pyerualjetwork_afterburner/metrics.py +0 -190
  32. pyerualjetwork_afterburner/metrics_cuda.py +0 -163
  33. pyerualjetwork_afterburner/model_operations.py +0 -408
  34. pyerualjetwork_afterburner/model_operations_cuda.py +0 -420
  35. pyerualjetwork_afterburner/parallel.py +0 -118
  36. pyerualjetwork_afterburner/plan.py +0 -432
  37. pyerualjetwork_afterburner/plan_cuda.py +0 -441
  38. pyerualjetwork_afterburner/planeat.py +0 -793
  39. pyerualjetwork_afterburner/planeat_cuda.py +0 -752
  40. pyerualjetwork_afterburner/ui.py +0 -22
  41. pyerualjetwork_afterburner/visualizations.py +0 -823
  42. pyerualjetwork_afterburner/visualizations_cuda.py +0 -825
  43. {pyerualjetwork-4.3.8.dev15.dist-info → pyerualjetwork-4.3.9.dist-info}/WHEEL +0 -0
@@ -1,45 +0,0 @@
1
- pyerualjetwork/__init__.py,sha256=nExIY8tR-NFtscBgb0Qnv8sKakbbE2V5SL2nES0aZTI,644
2
- pyerualjetwork/activation_functions.py,sha256=AR91fQV2W2rc-Qb4Yp7b8ucYpGjwyQUewO-M-lyEMs8,7729
3
- pyerualjetwork/activation_functions_cuda.py,sha256=ztIw6rMR4t1289_TPIGYwE6qarl_YbSOGj5Ep3rUMqs,11803
4
- pyerualjetwork/data_operations.py,sha256=Flteouu6rfSo2uHMqBHuzO02dXmbNa-I5qWmUpGTZ5Y,14760
5
- pyerualjetwork/data_operations_cuda.py,sha256=UpoJoFhIwTU4xg9dVuLAxLAT4CkRaGsxvtJG9j1xrNo,17629
6
- pyerualjetwork/help.py,sha256=nQ_YbYA2RtuafhuvkreNpX0WWL1I_nzlelwCtvei0_Y,775
7
- pyerualjetwork/loss_functions.py,sha256=6PyBI232SQRGuFnG3LDGvnv_PUdWzT2_2mUODJiejGI,618
8
- pyerualjetwork/loss_functions_cuda.py,sha256=C93IZJcrOpT6HMK9x1O4AHJWXYTkN5WZiqdssPbvAPk,617
9
- pyerualjetwork/memory_operations.py,sha256=I7QiZ--xSyRkFF0wcckPwZV7K9emEvyx5aJ3DiRHZFI,13468
10
- pyerualjetwork/metrics.py,sha256=q7MkhnZDRbCjFBDDfUgrl8lBYnUT_1ro1LxeBq105pI,6077
11
- pyerualjetwork/metrics_cuda.py,sha256=73h9GC7XwmnFCVzFEEiPQfF8CwHIz2wsCbxpZrJtYgw,5061
12
- pyerualjetwork/model_operations.py,sha256=RKqnh7-MByFosxqme4q4jC1lOndX26O-OVXYV6ZxoEE,12965
13
- pyerualjetwork/model_operations_cuda.py,sha256=XnKKq54ZLaqCm-NaJ6d8IToACKcKg2Ttq6moowVRRWo,13365
14
- pyerualjetwork/plan.py,sha256=ApMQC46_I8qtMqO4lLYLme--SGcMRg-GRo1-gSb3A3I,31894
15
- pyerualjetwork/plan_cuda.py,sha256=ifXiyZs8y3N8b6BbM-T8fMrvzAal-zHqcxFlqwnfwII,33256
16
- pyerualjetwork/planeat.py,sha256=uRX-hDywGOai6hHhbYrmcRodNZOg4WCQeJWZbdMlZs8,39470
17
- pyerualjetwork/planeat_cuda.py,sha256=QNHCQLkR0MNFqyN2iHAtC7cbf8qZiD3p_54YH3lnMFA,39529
18
- pyerualjetwork/ui.py,sha256=wu2BhU1k-w3Kcho5Jtq4SEKe68ftaUeRGneUOSCVDjU,575
19
- pyerualjetwork/visualizations.py,sha256=VL00sX2DZz83F__PyEJH9s1LizuXpOBzWjnoSjMJIJ0,28770
20
- pyerualjetwork/visualizations_cuda.py,sha256=KbMhfsLlxujy_i3QrwCf734Q-k6d7Zn_7CEbm3gzK9w,29186
21
- pyerualjetwork_afterburner/__init__.py,sha256=A5YqLtQ9TUhfBHlkSSpTb6pMtkk0FJQOUwAIJu0LMIc,656
22
- pyerualjetwork_afterburner/activation_functions.py,sha256=bKf00lsuuLJNO-4vVp4OqBi4zJ-qZ8L3v-vl52notkY,7721
23
- pyerualjetwork_afterburner/activation_functions_cuda.py,sha256=5y1Ti3GDfDteQDCUmODwe7tAyDAUlDTKmIikChQ8d6g,7772
24
- pyerualjetwork_afterburner/data_operations.py,sha256=Flteouu6rfSo2uHMqBHuzO02dXmbNa-I5qWmUpGTZ5Y,14760
25
- pyerualjetwork_afterburner/data_operations_cuda.py,sha256=ZcjmLXE1-HVwedextYdJZ1rgrns1OfSekzFpr1a9m6o,17625
26
- pyerualjetwork_afterburner/help.py,sha256=nQ_YbYA2RtuafhuvkreNpX0WWL1I_nzlelwCtvei0_Y,775
27
- pyerualjetwork_afterburner/loss_functions.py,sha256=6PyBI232SQRGuFnG3LDGvnv_PUdWzT2_2mUODJiejGI,618
28
- pyerualjetwork_afterburner/loss_functions_cuda.py,sha256=C93IZJcrOpT6HMK9x1O4AHJWXYTkN5WZiqdssPbvAPk,617
29
- pyerualjetwork_afterburner/memory_operations.py,sha256=I7QiZ--xSyRkFF0wcckPwZV7K9emEvyx5aJ3DiRHZFI,13468
30
- pyerualjetwork_afterburner/metrics.py,sha256=q7MkhnZDRbCjFBDDfUgrl8lBYnUT_1ro1LxeBq105pI,6077
31
- pyerualjetwork_afterburner/metrics_cuda.py,sha256=73h9GC7XwmnFCVzFEEiPQfF8CwHIz2wsCbxpZrJtYgw,5061
32
- pyerualjetwork_afterburner/model_operations.py,sha256=MCSCNYiiICRVZITobtS3ZIWmH5Q9gjyELuH32sAdgg4,12649
33
- pyerualjetwork_afterburner/model_operations_cuda.py,sha256=NT01BK5nrDYE7H1x3KnSI8gmx0QTGGB0mP_LqEb1uuU,13157
34
- pyerualjetwork_afterburner/parallel.py,sha256=TqTSqyxnq7lA9IYE-lCxqUO_GVdAYL34n4K67CMSNKI,5946
35
- pyerualjetwork_afterburner/plan.py,sha256=EOXngujG7DQRf3cooFigKB7heQsEoK96JtrcKivT_pE,22449
36
- pyerualjetwork_afterburner/plan_cuda.py,sha256=fg5YunEuBE7sK6q9paP_yAGONr9x0e19oF0J0DucejM,23380
37
- pyerualjetwork_afterburner/planeat.py,sha256=Lq5R0aMS4UIdZdbUKsKDv5g0WLwYryomR3IQYb8vAa4,37573
38
- pyerualjetwork_afterburner/planeat_cuda.py,sha256=icjtJcZnA1DcE93mKpdQOp5nMGSqycTbLOym7yITXwY,35299
39
- pyerualjetwork_afterburner/ui.py,sha256=wu2BhU1k-w3Kcho5Jtq4SEKe68ftaUeRGneUOSCVDjU,575
40
- pyerualjetwork_afterburner/visualizations.py,sha256=1SKMZaJ80OD2qHUyMxW1IOv8zwmxzMPxclfbeq1Xr4g,28772
41
- pyerualjetwork_afterburner/visualizations_cuda.py,sha256=KbMhfsLlxujy_i3QrwCf734Q-k6d7Zn_7CEbm3gzK9w,29186
42
- pyerualjetwork-4.3.8.dev15.dist-info/METADATA,sha256=6YSlEsLUfwZz6a3hxScGBeRZj5M2WIZbrPPh6BA-dNA,8385
43
- pyerualjetwork-4.3.8.dev15.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
44
- pyerualjetwork-4.3.8.dev15.dist-info/top_level.txt,sha256=uK64ge08QQoPuXM3aiRVPgiQQtl8Fxm2-HieIut5Lwo,42
45
- pyerualjetwork-4.3.8.dev15.dist-info/RECORD,,
@@ -1,2 +0,0 @@
1
- pyerualjetwork
2
- pyerualjetwork_afterburner
@@ -1,11 +0,0 @@
1
- __version__ = "4.3.8dev15-afterburner"
2
- __update__ = "* Changes: https://github.com/HCB06/PyerualJetwork/blob/main/CHANGES\n* PyerualJetwork Homepage: https://github.com/HCB06/PyerualJetwork/tree/main\n* PyerualJetwork document: https://github.com/HCB06/PyerualJetwork/blob/main/Welcome_to_PyerualJetwork/PYERUALJETWORK_USER_MANUEL_AND_LEGAL_INFORMATION(EN).pdf\n* YouTube tutorials: https://www.youtube.com/@HasanCanBeydili"
3
-
4
- def print_version(__version__):
5
- print(f"PyerualJetwork Version {__version__}" + '\n')
6
-
7
- def print_update_notes(__update__):
8
- print(f"Notes:\n{__update__}")
9
-
10
- print_version(__version__)
11
- print_update_notes(__update__)
@@ -1,290 +0,0 @@
1
- import numpy as np
2
- from scipy.special import expit, softmax
3
- import warnings
4
-
5
-
6
- # ACTIVATION FUNCTIONS -----
7
-
8
- def all_activations():
9
-
10
- activations_list = ['linear', 'sigmoid', 'relu', 'tanh', 'circular', 'spiral', 'swish', 'sin_plus', 'mod_circular', 'tanh_circular', 'leaky_relu', 'softplus', 'elu', 'gelu', 'selu', 'sinakt', 'p_squared', 'sglu', 'dlrelu', 'exsig', 'acos', 'gla', 'srelu', 'qelu', 'isra', 'waveakt', 'arctan', 'bent_identity', 'sech', 'softsign', 'pwl', 'cubic', 'gaussian', 'sine', 'tanh_square', 'mod_sigmoid', 'quartic', 'square_quartic', 'cubic_quadratic', 'exp_cubic', 'sine_square', 'logarithmic', 'scaled_cubic', 'sine_offset']
11
-
12
- return activations_list
13
-
14
- def spiral_activation(x):
15
-
16
- r = np.sqrt(np.sum(x**2))
17
-
18
- theta = np.arctan2(x[1:], x[:-1])
19
-
20
- spiral_x = r * np.cos(theta + r)
21
- spiral_y = r * np.sin(theta + r)
22
-
23
-
24
- spiral_output = np.concatenate(([spiral_x[0]], spiral_y))
25
-
26
- return spiral_output
27
-
28
-
29
- def Softmax(
30
- x # num: Input data to be transformed using softmax function.
31
- ):
32
- """
33
- Applies the softmax function to the input data.
34
-
35
- Args:
36
- (num): Input data to be transformed using softmax function.
37
-
38
- Returns:
39
- (num): Transformed data after applying softmax function.
40
- """
41
-
42
- return softmax(x)
43
-
44
-
45
- def Sigmoid(
46
- x # num: Input data to be transformed using sigmoid function.
47
- ):
48
- """
49
- Applies the sigmoid function to the input data.
50
-
51
- Args:
52
- (num): Input data to be transformed using sigmoid function.
53
-
54
- Returns:
55
- (num): Transformed data after applying sigmoid function.
56
- """
57
- return expit(x)
58
-
59
-
60
- def Relu(
61
- x # num: Input data to be transformed using ReLU function.
62
- ):
63
- """
64
- Applies the Rectified Linear Unit (ReLU) function to the input data.
65
-
66
- Args:
67
- (num): Input data to be transformed using ReLU function.
68
-
69
- Returns:
70
- (num): Transformed data after applying ReLU function.
71
- """
72
-
73
- return np.maximum(0, x)
74
-
75
-
76
- def tanh(x):
77
- return np.tanh(x)
78
-
79
- def swish(x):
80
- return x * (1 / (1 + np.exp(-x)))
81
-
82
- def sin_plus(x):
83
- return (np.sin(x) + 1) / 2
84
-
85
- def modular_circular_activation(x, period=2*np.pi):
86
- return np.mod(x, period) / period
87
-
88
- def tanh_circular_activation(x):
89
- return (np.tanh(x) + 1) / 2
90
-
91
- def leaky_relu(x, alpha=0.01):
92
- return np.where(x > 0, x, alpha * x)
93
-
94
- def softplus(x):
95
- return np.log(1 + np.exp(x))
96
-
97
- def elu(x, alpha=1.0):
98
- return np.where(x > 0, x, alpha * (np.exp(x) - 1))
99
-
100
- def gelu(x):
101
- return 0.5 * x * (1 + np.tanh(np.sqrt(2 / np.pi) * (x + 0.044715 * np.power(x, 3))))
102
-
103
- def selu(x, lambda_=1.0507, alpha=1.6733):
104
- return lambda_ * np.where(x > 0, x, alpha * (np.exp(x) - 1))
105
-
106
- def sinakt(x):
107
- return np.sin(x) + np.cos(x)
108
-
109
- def p_squared(x, alpha=1.0, beta=0.0):
110
- return alpha * x**2 + beta * x
111
-
112
- def sglu(x, alpha=1.0):
113
- return softmax(alpha * x) * x
114
-
115
- # 4. Double Leaky ReLU (DLReLU)
116
- def dlrelu(x):
117
- return np.maximum(0.01 * x, x) + np.minimum(0.01 * x, 0.1 * x)
118
-
119
- # 5. Exponential Sigmoid (ExSig)
120
- def exsig(x):
121
- return 1 / (1 + np.exp(-x**2))
122
-
123
- # 6. Adaptive Cosine Activation (ACos)
124
- def acos(x, alpha=1.0, beta=0.0):
125
- return np.cos(alpha * x + beta)
126
-
127
- # 7. Gaussian-like Activation (GLA)
128
- def gla(x, alpha=1.0, mu=0.0):
129
- return np.exp(-alpha * (x - mu)**2)
130
-
131
- # 8. Swish ReLU (SReLU)
132
- def srelu(x):
133
- return x * (1 / (1 + np.exp(-x))) + np.maximum(0, x)
134
-
135
- # 9. Quadratic Exponential Linear Unit (QELU)
136
- def qelu(x):
137
- return x**2 * np.exp(x) - 1
138
-
139
- # 10. Inverse Square Root Activation (ISRA)
140
- def isra(x):
141
- return x / np.sqrt(np.abs(x) + 1)
142
-
143
- def waveakt(x, alpha=1.0, beta=2.0, gamma=3.0):
144
- return np.sin(alpha * x) * np.cos(beta * x) * np.sin(gamma * x)
145
-
146
- def arctan(x):
147
- return np.arctan(x)
148
-
149
- def bent_identity(x):
150
- return (np.sqrt(x**2 + 1) - 1) / 2 + x
151
-
152
- def circular_activation(x, scale=2.0, frequency=1.0, shift=0.0):
153
-
154
- n_features = x.shape[0]
155
-
156
- circular_output = np.zeros_like(x)
157
-
158
- for i in range(n_features):
159
-
160
- r = np.sqrt(np.sum(x**2))
161
- theta = 2 * np.pi * (i / n_features) + shift
162
-
163
- circular_x = r * np.cos(theta + frequency * r) * scale
164
- circular_y = r * np.sin(theta + frequency * r) * scale
165
-
166
- if i % 2 == 0:
167
- circular_output[i] = circular_x
168
- else:
169
- circular_output[i] = circular_y
170
-
171
- return circular_output
172
-
173
- def sech(x):
174
- return 2 / (np.exp(x) + np.exp(-x))
175
-
176
- def softsign(x):
177
- return x / (1 + np.abs(x))
178
-
179
- def pwl(x, alpha=0.5, beta=1.5):
180
- return np.where(x <= 0, alpha * x, beta * x)
181
-
182
- def cubic(x):
183
- return x**3
184
-
185
- def gaussian(x, alpha=1.0, mu=0.0):
186
- return np.exp(-alpha * (x - mu)**2)
187
-
188
- def sine(x, alpha=1.0):
189
- return np.sin(alpha * x)
190
-
191
- def tanh_square(x):
192
- return np.tanh(x)**2
193
-
194
- def mod_sigmoid(x, alpha=1.0, beta=0.0):
195
- return 1 / (1 + np.exp(-alpha * x + beta))
196
-
197
- def quartic(x):
198
- return x**4
199
-
200
- def square_quartic(x):
201
- return (x**2)**2
202
-
203
- def cubic_quadratic(x):
204
- return x**3 * (x**2)
205
-
206
- def exp_cubic(x):
207
- return np.exp(x**3)
208
-
209
- def sine_square(x):
210
- return np.sin(x)**2
211
-
212
- def logarithmic(x):
213
- return np.log(x**2 + 1)
214
-
215
- def scaled_cubic(x, alpha=1.0):
216
- return alpha * x**3
217
-
218
- def sine_offset(x, beta=0.0):
219
- return np.sin(x + beta)
220
-
221
-
222
- def apply_activation(Input, activation_list):
223
- """
224
- Applies activation functions for inputs
225
-
226
- Args:
227
- Input (numpy.ndarray):
228
- activation_list (list):
229
- """
230
- origin_input = np.copy(Input)
231
-
232
- activation_functions = {
233
- 'sigmoid': Sigmoid,
234
- 'swish': swish,
235
- 'mod_circular': modular_circular_activation,
236
- 'tanh_circular': tanh_circular_activation,
237
- 'leaky_relu': leaky_relu,
238
- 'relu': Relu,
239
- 'softplus': softplus,
240
- 'elu': elu,
241
- 'gelu': gelu,
242
- 'selu': selu,
243
- 'tanh': tanh,
244
- 'sinakt': sinakt,
245
- 'p_squared': p_squared,
246
- 'sglu': lambda x: sglu(x, alpha=1.0),
247
- 'dlrelu': dlrelu,
248
- 'exsig': exsig,
249
- 'sin_plus': sin_plus,
250
- 'acos': lambda x: acos(x, alpha=1.0, beta=0.0),
251
- 'gla': lambda x: gla(x, alpha=1.0, mu=0.0),
252
- 'srelu': srelu,
253
- 'qelu': qelu,
254
- 'isra': isra,
255
- 'waveakt': waveakt,
256
- 'arctan': arctan,
257
- 'bent_identity': bent_identity,
258
- 'sech': sech,
259
- 'softsign': softsign,
260
- 'pwl': pwl,
261
- 'cubic': cubic,
262
- 'gaussian': gaussian,
263
- 'sine': sine,
264
- 'tanh_square': tanh_square,
265
- 'mod_sigmoid': mod_sigmoid,
266
- 'linear': lambda x: x,
267
- 'quartic': quartic,
268
- 'square_quartic': square_quartic,
269
- 'cubic_quadratic': cubic_quadratic,
270
- 'exp_cubic': exp_cubic,
271
- 'sine_square': sine_square,
272
- 'logarithmic': logarithmic,
273
- 'scaled_cubic': lambda x: scaled_cubic(x, 1.0),
274
- 'sine_offset': lambda x: sine_offset(x, 1.0),
275
- 'spiral': spiral_activation,
276
- 'circular': circular_activation
277
- }
278
-
279
- try:
280
-
281
- valid_mask = np.array([act in activation_functions for act in activation_list])
282
- valid_activations = np.array(activation_list)[valid_mask]
283
-
284
- activation_outputs = np.array([activation_functions[act](origin_input) for act in valid_activations])
285
-
286
- return Input + np.sum(activation_outputs, axis=0)
287
-
288
- except Exception as e:
289
- warnings.warn(f"Error in activation processing: {str(e)}", RuntimeWarning)
290
- return Input
@@ -1,289 +0,0 @@
1
- import cupy as cp
2
- from scipy.special import expit, softmax
3
- import warnings
4
-
5
- # ACTIVATION FUNCTIONS ----
6
-
7
- def all_activations():
8
-
9
- activations_list = ['linear', 'sigmoid', 'relu', 'tanh', 'circular', 'spiral', 'swish', 'sin_plus', 'mod_circular', 'tanh_circular', 'leaky_relu', 'softplus', 'elu', 'gelu', 'selu', 'sinakt', 'p_squared', 'sglu', 'dlrelu', 'exsig', 'acos', 'gla', 'srelu', 'qelu', 'isra', 'waveakt', 'arctan', 'bent_identity', 'sech', 'softsign', 'pwl', 'cubic', 'gaussian', 'sine', 'tanh_square', 'mod_sigmoid', 'quartic', 'square_quartic', 'cubic_quadratic', 'exp_cubic', 'sine_square', 'logarithmic', 'scaled_cubic', 'sine_offset']
10
-
11
- return activations_list
12
-
13
- def spiral_activation(x):
14
-
15
- r = cp.sqrt(cp.sum(x**2))
16
-
17
- theta = cp.arctan2(x[1:], x[:-1])
18
-
19
- spiral_x = r * cp.cos(theta + r)
20
- spiral_y = r * cp.sin(theta + r)
21
-
22
-
23
- spiral_output = cp.concatenate([cp.array([spiral_x[0]]), spiral_y])
24
-
25
- return spiral_output
26
-
27
-
28
- def Softmax(
29
- x # num: Input data to be transformed using softmax function.
30
- ):
31
- """
32
- Applies the softmax function to the input data.
33
-
34
- Args:
35
- (num): Input data to be transformed using softmax function.
36
-
37
- Returns:
38
- (num): Transformed data after applying softmax function.
39
- """
40
-
41
- return cp.array(softmax(x.get()))
42
-
43
-
44
- def Sigmoid(
45
- x # num: Input data to be transformed using sigmoid function.
46
- ):
47
- """
48
- Applies the sigmoid function to the input data.
49
-
50
- Args:
51
- (num): Input data to be transformed using sigmoid function.
52
-
53
- Returns:
54
- (num): Transformed data after applying sigmoid function.
55
- """
56
- return expit(x)
57
-
58
-
59
- def Relu(
60
- x # num: Input data to be transformed using ReLU function.
61
- ):
62
- """
63
- Applies the Rectified Linear Unit (ReLU) function to the input data.
64
-
65
- Args:
66
- (num): Input data to be transformed using ReLU function.
67
-
68
- Returns:
69
- (num): Transformed data after applying ReLU function.
70
- """
71
-
72
- return cp.maximum(0, x)
73
-
74
-
75
- def tanh(x):
76
- return cp.tanh(x)
77
-
78
- def swish(x):
79
- return x * (1 / (1 + cp.exp(-x)))
80
-
81
- def sin_plus(x):
82
- return (cp.sin(x) + 1) / 2
83
-
84
- def modular_circular_activation(x, period=2*cp.pi):
85
- return cp.mod(x, period) / period
86
-
87
- def tanh_circular_activation(x):
88
- return (cp.tanh(x) + 1) / 2
89
-
90
- def leaky_relu(x, alpha=0.01):
91
- return cp.where(x > 0, x, alpha * x)
92
-
93
- def softplus(x):
94
- return cp.log(1 + cp.exp(x))
95
-
96
- def elu(x, alpha=1.0):
97
- return cp.where(x > 0, x, alpha * (cp.exp(x) - 1))
98
-
99
- def gelu(x):
100
- return 0.5 * x * (1 + cp.tanh(cp.sqrt(2 / cp.pi) * (x + 0.044715 * cp.power(x, 3))))
101
-
102
- def selu(x, lambda_=1.0507, alpha=1.6733):
103
- return lambda_ * cp.where(x > 0, x, alpha * (cp.exp(x) - 1))
104
-
105
- def sinakt(x):
106
- return cp.sin(x) + cp.cos(x)
107
-
108
- def p_squared(x, alpha=1.0, beta=0.0):
109
- return alpha * x**2 + beta * x
110
-
111
- def sglu(x, alpha=1.0):
112
- return cp.array(softmax(alpha * x.get())) * x
113
-
114
- # 4. Double Leaky ReLU (DLReLU)
115
- def dlrelu(x):
116
- return cp.maximum(0.01 * x, x) + cp.minimum(0.01 * x, 0.1 * x)
117
-
118
- # 5. Exponential Sigmoid (ExSig)
119
- def exsig(x):
120
- return 1 / (1 + cp.exp(-x**2))
121
-
122
- # 6. Adaptive Cosine Activation (ACos)
123
- def acos(x, alpha=1.0, beta=0.0):
124
- return cp.cos(alpha * x + beta)
125
-
126
- # 7. Gaussian-like Activation (GLA)
127
- def gla(x, alpha=1.0, mu=0.0):
128
- return cp.exp(-alpha * (x - mu)**2)
129
-
130
- # 8. Swish ReLU (SReLU)
131
- def srelu(x):
132
- return x * (1 / (1 + cp.exp(-x))) + cp.maximum(0, x)
133
-
134
- # 9. Quadratic Exponential Linear Unit (QELU)
135
- def qelu(x):
136
- return x**2 * cp.exp(x) - 1
137
-
138
- # 10. Inverse Square Root Activation (ISRA)
139
- def isra(x):
140
- return x / cp.sqrt(cp.abs(x) + 1)
141
-
142
- def waveakt(x, alpha=1.0, beta=2.0, gamma=3.0):
143
- return cp.sin(alpha * x) * cp.cos(beta * x) * cp.sin(gamma * x)
144
-
145
- def arctan(x):
146
- return cp.arctan(x)
147
-
148
- def bent_identity(x):
149
- return (cp.sqrt(x**2 + 1) - 1) / 2 + x
150
-
151
- def circular_activation(x, scale=2.0, frequency=1.0, shift=0.0):
152
-
153
- n_features = x.shape[0]
154
-
155
- circular_output = cp.zeros_like(x)
156
-
157
- for i in range(n_features):
158
-
159
- r = cp.sqrt(cp.sum(x**2))
160
- theta = 2 * cp.pi * (i / n_features) + shift
161
-
162
- circular_x = r * cp.cos(theta + frequency * r) * scale
163
- circular_y = r * cp.sin(theta + frequency * r) * scale
164
-
165
- if i % 2 == 0:
166
- circular_output[i] = circular_x
167
- else:
168
- circular_output[i] = circular_y
169
-
170
- return circular_output
171
-
172
- def sech(x):
173
- return 2 / (cp.exp(x) + cp.exp(-x))
174
-
175
- def softsign(x):
176
- return x / (1 + cp.abs(x))
177
-
178
- def pwl(x, alpha=0.5, beta=1.5):
179
- return cp.where(x <= 0, alpha * x, beta * x)
180
-
181
- def cubic(x):
182
- return x**3
183
-
184
- def gaussian(x, alpha=1.0, mu=0.0):
185
- return cp.exp(-alpha * (x - mu)**2)
186
-
187
- def sine(x, alpha=1.0):
188
- return cp.sin(alpha * x)
189
-
190
- def tanh_square(x):
191
- return cp.tanh(x)**2
192
-
193
- def mod_sigmoid(x, alpha=1.0, beta=0.0):
194
- return 1 / (1 + cp.exp(-alpha * x + beta))
195
-
196
- def quartic(x):
197
- return x**4
198
-
199
- def square_quartic(x):
200
- return (x**2)**2
201
-
202
- def cubic_quadratic(x):
203
- return x**3 * (x**2)
204
-
205
- def exp_cubic(x):
206
- return cp.exp(x**3)
207
-
208
- def sine_square(x):
209
- return cp.sin(x)**2
210
-
211
- def logarithmic(x):
212
- return cp.log(x**2 + 1)
213
-
214
- def scaled_cubic(x, alpha=1.0):
215
- return alpha * x**3
216
-
217
- def sine_offset(x, beta=0.0):
218
- return cp.sin(x + beta)
219
-
220
-
221
- def apply_activation(Input, activation_list):
222
- """
223
- Applies activation functions for inputs
224
-
225
- Args:
226
- Input (cupy.ndarray):
227
- activation_list (list):
228
- """
229
- origin_input = cp.copy(Input)
230
-
231
- activation_functions = {
232
- 'sigmoid': Sigmoid,
233
- 'swish': swish,
234
- 'mod_circular': modular_circular_activation,
235
- 'tanh_circular': tanh_circular_activation,
236
- 'leaky_relu': leaky_relu,
237
- 'relu': Relu,
238
- 'softplus': softplus,
239
- 'elu': elu,
240
- 'gelu': gelu,
241
- 'selu': selu,
242
- 'tanh': tanh,
243
- 'sinakt': sinakt,
244
- 'p_squared': p_squared,
245
- 'sglu': lambda x: sglu(x, alpha=1.0),
246
- 'dlrelu': dlrelu,
247
- 'exsig': exsig,
248
- 'sin_plus': sin_plus,
249
- 'acos': lambda x: acos(x, alpha=1.0, beta=0.0),
250
- 'gla': lambda x: gla(x, alpha=1.0, mu=0.0),
251
- 'srelu': srelu,
252
- 'qelu': qelu,
253
- 'isra': isra,
254
- 'waveakt': waveakt,
255
- 'arctan': arctan,
256
- 'bent_identity': bent_identity,
257
- 'sech': sech,
258
- 'softsign': softsign,
259
- 'pwl': pwl,
260
- 'cubic': cubic,
261
- 'gaussian': gaussian,
262
- 'sine': sine,
263
- 'tanh_square': tanh_square,
264
- 'mod_sigmoid': mod_sigmoid,
265
- 'linear': lambda x: x,
266
- 'quartic': quartic,
267
- 'square_quartic': square_quartic,
268
- 'cubic_quadratic': cubic_quadratic,
269
- 'exp_cubic': exp_cubic,
270
- 'sine_square': sine_square,
271
- 'logarithmic': logarithmic,
272
- 'scaled_cubic': lambda x: scaled_cubic(x, 1.0),
273
- 'sine_offset': lambda x: sine_offset(x, 1.0),
274
- 'spiral': spiral_activation,
275
- 'circular': circular_activation
276
- }
277
-
278
- try:
279
-
280
- valid_mask = cp.array([act in activation_functions for act in activation_list])
281
- valid_activations = cp.array(activation_list)[valid_mask]
282
-
283
- activation_outputs = cp.array([activation_functions[act](origin_input) for act in valid_activations])
284
-
285
- return Input + cp.sum(activation_outputs, axis=0)
286
-
287
- except Exception as e:
288
- warnings.warn(f"Error in activation processing: {str(e)}", RuntimeWarning)
289
- return Input