pyerualjetwork 4.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,363 @@
1
+ import cupy as cp
2
+
3
+ # ACTIVATION FUNCTIONS ----
4
+
5
+ def all_activations():
6
+
7
+ activations_list = ['linear', 'sigmoid', 'relu', 'tanh', 'circular', 'spiral', 'swish', 'sin_plus', 'mod_circular', 'tanh_circular', 'leaky_relu', 'softplus', 'elu', 'gelu', 'selu', 'sinakt', 'p_squared', 'sglu', 'dlrelu', 'exsig', 'acos', 'gla', 'srelu', 'qelu', 'isra', 'waveakt', 'arctan', 'bent_identity', 'sech', 'softsign', 'pwl', 'cubic', 'gaussian', 'sine', 'tanh_square', 'mod_sigmoid', 'quartic', 'square_quartic', 'cubic_quadratic', 'exp_cubic', 'sine_square', 'logarithmic', 'scaled_cubic', 'sine_offset']
8
+
9
+ return activations_list
10
+
11
+ def spiral_activation(x):
12
+ if x.ndim == 1:
13
+ r = cp.sqrt(cp.sum(x**2))
14
+ theta = cp.arctan2(x[1], x[0])
15
+
16
+ spiral_x = r * cp.cos(theta + r)
17
+ spiral_y = r * cp.sin(theta + r)
18
+
19
+ spiral_output = cp.array([spiral_x, spiral_y])
20
+ else:
21
+ r = cp.sqrt(cp.sum(x**2, axis=-1))
22
+ theta = cp.arctan2(x[:, 1], x[:, 0])
23
+
24
+ spiral_x = r * cp.cos(theta + r)
25
+ spiral_y = r * cp.sin(theta + r)
26
+
27
+ spiral_output = cp.stack((spiral_x, spiral_y), axis=-1)
28
+
29
+ return spiral_output
30
+
31
+
32
+ def Softmax(x):
33
+ """Optimized Softmax function"""
34
+ return cp.array(cp.exp(x - cp.max(x, axis=-1, keepdims=True)) / cp.sum(cp.exp(x - cp.max(x, axis=-1, keepdims=True)), axis=-1, keepdims=True))
35
+
36
+ def Sigmoid(x):
37
+ """Optimized Sigmoid function"""
38
+ return 1 / (1 + cp.exp(-x))
39
+
40
+ def Relu(x):
41
+ """Optimized ReLU function"""
42
+ return cp.maximum(0, x)
43
+
44
+ def tanh(x):
45
+ """Optimized Tanh function"""
46
+ return cp.tanh(x)
47
+
48
+ def swish(x):
49
+ """Optimized Swish function"""
50
+ return x * Sigmoid(x)
51
+
52
+ def sin_plus(x):
53
+ """Optimized SinPlus function"""
54
+ return (cp.sin(x) + 1) / 2
55
+
56
+ def modular_circular_activation(x, period=2*cp.pi):
57
+ """Optimized Modular Circular Activation function"""
58
+ return cp.mod(x, period) / period
59
+
60
+ def tanh_circular_activation(x):
61
+ """Optimized Tanh Circular Activation function"""
62
+ return (cp.tanh(x) + 1) / 2
63
+
64
+ def leaky_relu(x, alpha=0.01):
65
+ """Optimized Leaky ReLU function"""
66
+ return cp.where(x > 0, x, alpha * x)
67
+
68
+ def softplus(x):
69
+ """Optimized Softplus function"""
70
+ return cp.log1p(cp.exp(x))
71
+
72
+ def elu(x, alpha=1.0):
73
+ """Optimized ELU function"""
74
+ return cp.where(x > 0, x, alpha * (cp.exp(x) - 1))
75
+
76
+ def gelu(x):
77
+ """Optimized GELU function"""
78
+ return 0.5 * x * (1 + cp.tanh(cp.sqrt(2 / cp.pi) * (x + 0.044715 * cp.power(x, 3))))
79
+
80
+ def selu(x, lambda_=1.0507, alpha=1.6733):
81
+ """Optimized SELU function"""
82
+ return lambda_ * cp.where(x > 0, x, alpha * (cp.exp(x) - 1))
83
+
84
+ def sinakt(x):
85
+ """Optimized SinAkt function"""
86
+ return cp.sin(x) + cp.cos(x)
87
+
88
+ def p_squared(x, alpha=1.0, beta=0.0):
89
+ """Optimized P-squared function"""
90
+ return alpha * x**2 + beta * x
91
+
92
+ def sglu(x, alpha=1.0):
93
+ """Optimized SGU function"""
94
+ return cp.array(cp.exp(alpha * x)) * x
95
+
96
+ def dlrelu(x):
97
+ """Optimized Double Leaky ReLU (DLReLU) function"""
98
+ return cp.maximum(0.01 * x, x) + cp.minimum(0.01 * x, 0.1 * x)
99
+
100
+ def exsig(x):
101
+ """Optimized Exponential Sigmoid (ExSig) function"""
102
+ return 1 / (1 + cp.exp(-x**2))
103
+
104
+ def acos(x, alpha=1.0, beta=0.0):
105
+ """Optimized Adaptive Cosine Activation (ACos) function"""
106
+ return cp.cos(alpha * x + beta)
107
+
108
+ def gla(x, alpha=1.0, mu=0.0):
109
+ """Optimized Gaussian-like Activation (GLA) function"""
110
+ return cp.exp(-alpha * (x - mu)**2)
111
+
112
+ def srelu(x):
113
+ """Optimized Swish ReLU (SReLU) function"""
114
+ return x * (1 / (1 + cp.exp(-x))) + cp.maximum(0, x)
115
+
116
+ def qelu(x):
117
+ """Optimized Quadratic Exponential Linear Unit (QELU) function"""
118
+ return x**2 * cp.exp(x) - 1
119
+
120
+ def isra(x):
121
+ """Optimized Inverse Square Root Activation (ISRA) function"""
122
+ return x / cp.sqrt(cp.abs(x) + 1)
123
+
124
+ def waveakt(x, alpha=1.0, beta=2.0, gamma=3.0):
125
+ """Optimized Wave Activation function"""
126
+ return cp.sin(alpha * x) * cp.cos(beta * x) * cp.sin(gamma * x)
127
+
128
+ def arctan(x):
129
+ """Optimized Arctan function"""
130
+ return cp.arctan(x)
131
+
132
+ def bent_identity(x):
133
+ """Optimized Bent Identity function"""
134
+ return (cp.sqrt(x**2 + 1) - 1) / 2 + x
135
+
136
+ def circular_activation(x, scale=2.0, frequency=1.0, shift=0.0):
137
+ """Optimized Circular Activation function"""
138
+ n_features = x.shape[0]
139
+ circular_output = cp.zeros_like(x)
140
+
141
+ r = cp.sqrt(cp.sum(x**2))
142
+ for i in range(n_features):
143
+ theta = 2 * cp.pi * (i / n_features) + shift
144
+ circular_x = r * cp.cos(theta + frequency * r) * scale
145
+ circular_y = r * cp.sin(theta + frequency * r) * scale
146
+
147
+ circular_output[i] = circular_x if i % 2 == 0 else circular_y
148
+
149
+ return circular_output
150
+
151
+ def sech(x):
152
+ """Optimized Sech function"""
153
+ return 2 / (cp.exp(x) + cp.exp(-x))
154
+
155
+ def softsign(x):
156
+ """Optimized Softsign function"""
157
+ return x / (1 + cp.abs(x))
158
+
159
+ def pwl(x, alpha=0.5, beta=1.5):
160
+ """Optimized Piecewise Linear function (PWL)"""
161
+ return cp.where(x <= 0, alpha * x, beta * x)
162
+
163
+ def cubic(x):
164
+ """Optimized Cubic function"""
165
+ return x**3
166
+
167
+ def gaussian(x, alpha=1.0, mu=0.0):
168
+ """Optimized Gaussian function"""
169
+ return cp.exp(-alpha * (x - mu)**2)
170
+
171
+ def sine(x, alpha=1.0):
172
+ """Optimized Sine function"""
173
+ return cp.sin(alpha * x)
174
+
175
+ def tanh_square(x):
176
+ """Optimized Tanh Square function"""
177
+ return cp.tanh(x)**2
178
+
179
+ def mod_sigmoid(x, alpha=1.0, beta=0.0):
180
+ """Optimized Modified Sigmoid function"""
181
+ return 1 / (1 + cp.exp(-alpha * x + beta))
182
+
183
+ def quartic(x):
184
+ """Optimized Quartic function"""
185
+ return x**4
186
+
187
+ def square_quartic(x):
188
+ """Optimized Square Quartic function"""
189
+ return (x**2)**2
190
+
191
+ def cubic_quadratic(x):
192
+ """Optimized Cubic Quadratic function"""
193
+ return x**3 * (x**2)
194
+
195
+ def exp_cubic(x):
196
+ """Optimized Exponential Cubic function"""
197
+ return cp.exp(x**3)
198
+
199
+ def sine_square(x):
200
+ """Optimized Sine Square function"""
201
+ return cp.sin(x)**2
202
+
203
+ def logarithmic(x):
204
+ """Optimized Logarithmic function"""
205
+ return cp.log(x**2 + 1)
206
+
207
+ def scaled_cubic(x, alpha=1.0):
208
+ """Optimized Scaled Cubic function"""
209
+ return alpha * x**3
210
+
211
+ def sine_offset(x, beta=0.0):
212
+ """Optimized Sine Offset function"""
213
+ return cp.sin(x + beta)
214
+
215
+ def apply_activation(Input, activation_list):
216
+ """
217
+ Applies a sequence of activation functions to the input.
218
+
219
+ Args:
220
+ Input (numpy.ndarray): The input to apply activations to.
221
+ activation_list (list): A list of activation function names to apply.
222
+
223
+ Returns:
224
+ numpy.ndarray: The input after all activations have been applied.
225
+ """
226
+
227
+ origin_input = cp.copy(Input)
228
+
229
+ for i in range(len(activation_list)):
230
+
231
+ if activation_list[i] == 'sigmoid':
232
+ Input += Sigmoid(origin_input)
233
+
234
+ elif activation_list[i] == 'swish':
235
+ Input += swish(origin_input)
236
+
237
+ elif activation_list[i] == 'mod_circular':
238
+ Input += modular_circular_activation(origin_input)
239
+
240
+ elif activation_list[i] == 'tanh_circular':
241
+ Input += tanh_circular_activation(origin_input)
242
+
243
+ elif activation_list[i] == 'leaky_relu':
244
+ Input += leaky_relu(origin_input)
245
+
246
+ elif activation_list[i] == 'relu':
247
+ Input += Relu(origin_input)
248
+
249
+ elif activation_list[i] == 'softplus':
250
+ Input += softplus(origin_input)
251
+
252
+ elif activation_list[i] == 'elu':
253
+ Input += elu(origin_input)
254
+
255
+ elif activation_list[i] == 'gelu':
256
+ Input += gelu(origin_input)
257
+
258
+ elif activation_list[i] == 'selu':
259
+ Input += selu(origin_input)
260
+
261
+ elif activation_list[i] == 'tanh':
262
+ Input += tanh(origin_input)
263
+
264
+ elif activation_list[i] == 'sinakt':
265
+ Input += sinakt(origin_input)
266
+
267
+ elif activation_list[i] == 'p_squared':
268
+ Input += p_squared(origin_input)
269
+
270
+ elif activation_list[i] == 'sglu':
271
+ Input += sglu(origin_input, alpha=1.0)
272
+
273
+ elif activation_list[i] == 'dlrelu':
274
+ Input += dlrelu(origin_input)
275
+
276
+ elif activation_list[i] == 'exsig':
277
+ Input += exsig(origin_input)
278
+
279
+ elif activation_list[i] == 'sin_plus':
280
+ Input += sin_plus(origin_input)
281
+
282
+ elif activation_list[i] == 'acos':
283
+ Input += acos(origin_input, alpha=1.0, beta=0.0)
284
+
285
+ elif activation_list[i] == 'gla':
286
+ Input += gla(origin_input, alpha=1.0, mu=0.0)
287
+
288
+ elif activation_list[i] == 'srelu':
289
+ Input += srelu(origin_input)
290
+
291
+ elif activation_list[i] == 'qelu':
292
+ Input += qelu(origin_input)
293
+
294
+ elif activation_list[i] == 'isra':
295
+ Input += isra(origin_input)
296
+
297
+ elif activation_list[i] == 'waveakt':
298
+ Input += waveakt(origin_input)
299
+
300
+ elif activation_list[i] == 'arctan':
301
+ Input += arctan(origin_input)
302
+
303
+ elif activation_list[i] == 'bent_identity':
304
+ Input += bent_identity(origin_input)
305
+
306
+ elif activation_list[i] == 'sech':
307
+ Input += sech(origin_input)
308
+
309
+ elif activation_list[i] == 'softsign':
310
+ Input += softsign(origin_input)
311
+
312
+ elif activation_list[i] == 'pwl':
313
+ Input += pwl(origin_input)
314
+
315
+ elif activation_list[i] == 'cubic':
316
+ Input += cubic(origin_input)
317
+
318
+ elif activation_list[i] == 'gaussian':
319
+ Input += gaussian(origin_input)
320
+
321
+ elif activation_list[i] == 'sine':
322
+ Input += sine(origin_input)
323
+
324
+ elif activation_list[i] == 'tanh_square':
325
+ Input += tanh_square(origin_input)
326
+
327
+ elif activation_list[i] == 'mod_sigmoid':
328
+ Input += mod_sigmoid(origin_input)
329
+
330
+ elif activation_list[i] == 'linear':
331
+ Input += origin_input
332
+
333
+ elif activation_list[i] == 'quartic':
334
+ Input += quartic(origin_input)
335
+
336
+ elif activation_list[i] == 'square_quartic':
337
+ Input += square_quartic(origin_input)
338
+
339
+ elif activation_list[i] == 'cubic_quadratic':
340
+ Input += cubic_quadratic(origin_input)
341
+
342
+ elif activation_list[i] == 'exp_cubic':
343
+ Input += exp_cubic(origin_input)
344
+
345
+ elif activation_list[i] == 'sine_square':
346
+ Input += sine_square(origin_input)
347
+
348
+ elif activation_list[i] == 'logarithmic':
349
+ Input += logarithmic(origin_input)
350
+
351
+ elif activation_list[i] == 'scaled_cubic':
352
+ Input += scaled_cubic(origin_input, 1.0)
353
+
354
+ elif activation_list[i] == 'sine_offset':
355
+ Input += sine_offset(origin_input, 1.0)
356
+
357
+ elif activation_list[i] == 'spiral':
358
+ Input += spiral_activation(origin_input)
359
+
360
+ elif activation_list[i] == 'circular':
361
+ Input += circular_activation(origin_input)
362
+
363
+ return Input