pyerualjetwork 4.0.5__tar.gz → 4.0.6__tar.gz
Sign up to get free protection for your applications and to get access to all the features.
- {pyerualjetwork-4.0.5 → pyerualjetwork-4.0.6}/PKG-INFO +2 -2
- {pyerualjetwork-4.0.5 → pyerualjetwork-4.0.6}/README.md +1 -1
- {pyerualjetwork-4.0.5 → pyerualjetwork-4.0.6}/pyerualjetwork/__init__.py +1 -1
- pyerualjetwork-4.0.6/pyerualjetwork/activation_functions.py +344 -0
- pyerualjetwork-4.0.6/pyerualjetwork/activation_functions_cuda.py +342 -0
- {pyerualjetwork-4.0.5 → pyerualjetwork-4.0.6}/pyerualjetwork/data_operations.py +3 -0
- {pyerualjetwork-4.0.5 → pyerualjetwork-4.0.6}/pyerualjetwork/data_operations_cuda.py +7 -9
- {pyerualjetwork-4.0.5 → pyerualjetwork-4.0.6}/pyerualjetwork/plan.py +9 -7
- {pyerualjetwork-4.0.5 → pyerualjetwork-4.0.6}/pyerualjetwork/plan_cuda.py +7 -9
- {pyerualjetwork-4.0.5 → pyerualjetwork-4.0.6}/pyerualjetwork.egg-info/PKG-INFO +2 -2
- {pyerualjetwork-4.0.5 → pyerualjetwork-4.0.6}/setup.py +1 -1
- pyerualjetwork-4.0.5/pyerualjetwork/activation_functions.py +0 -367
- pyerualjetwork-4.0.5/pyerualjetwork/activation_functions_cuda.py +0 -363
- {pyerualjetwork-4.0.5 → pyerualjetwork-4.0.6}/pyerualjetwork/help.py +0 -0
- {pyerualjetwork-4.0.5 → pyerualjetwork-4.0.6}/pyerualjetwork/loss_functions.py +0 -0
- {pyerualjetwork-4.0.5 → pyerualjetwork-4.0.6}/pyerualjetwork/loss_functions_cuda.py +0 -0
- {pyerualjetwork-4.0.5 → pyerualjetwork-4.0.6}/pyerualjetwork/metrics.py +0 -0
- {pyerualjetwork-4.0.5 → pyerualjetwork-4.0.6}/pyerualjetwork/metrics_cuda.py +0 -0
- {pyerualjetwork-4.0.5 → pyerualjetwork-4.0.6}/pyerualjetwork/model_operations.py +0 -0
- {pyerualjetwork-4.0.5 → pyerualjetwork-4.0.6}/pyerualjetwork/model_operations_cuda.py +0 -0
- {pyerualjetwork-4.0.5 → pyerualjetwork-4.0.6}/pyerualjetwork/planeat.py +0 -0
- {pyerualjetwork-4.0.5 → pyerualjetwork-4.0.6}/pyerualjetwork/planeat_cuda.py +0 -0
- {pyerualjetwork-4.0.5 → pyerualjetwork-4.0.6}/pyerualjetwork/ui.py +0 -0
- {pyerualjetwork-4.0.5 → pyerualjetwork-4.0.6}/pyerualjetwork/visualizations.py +0 -0
- {pyerualjetwork-4.0.5 → pyerualjetwork-4.0.6}/pyerualjetwork/visualizations_cuda.py +0 -0
- {pyerualjetwork-4.0.5 → pyerualjetwork-4.0.6}/pyerualjetwork.egg-info/SOURCES.txt +0 -0
- {pyerualjetwork-4.0.5 → pyerualjetwork-4.0.6}/pyerualjetwork.egg-info/dependency_links.txt +0 -0
- {pyerualjetwork-4.0.5 → pyerualjetwork-4.0.6}/pyerualjetwork.egg-info/top_level.txt +0 -0
- {pyerualjetwork-4.0.5 → pyerualjetwork-4.0.6}/setup.cfg +0 -0
@@ -1,13 +1,13 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: pyerualjetwork
|
3
|
-
Version: 4.0.
|
3
|
+
Version: 4.0.6
|
4
4
|
Summary: PyerualJetwork is a machine learning library written in Python for professionals, incorporating advanced, unique, new, and modern techniques.
|
5
5
|
Author: Hasan Can Beydili
|
6
6
|
Author-email: tchasancan@gmail.com
|
7
7
|
Keywords: model evaluation,classification,potentiation learning artificial neural networks,NEAT,genetic algorithms,reinforcement learning,neural networks
|
8
8
|
Description-Content-Type: text/markdown
|
9
9
|
|
10
|
-
# PyerualJetwork [![Socket Badge](https://socket.dev/api/badge/pypi/package/pyerualjetwork/4.0.
|
10
|
+
# PyerualJetwork [![Socket Badge](https://socket.dev/api/badge/pypi/package/pyerualjetwork/4.0.6?artifact_id=tar-gz)](https://socket.dev/pypi/package/pyerualjetwork/overview/4.0.6/tar-gz) [![CodeFactor](https://www.codefactor.io/repository/github/hcb06/pyerualjetwork/badge)](https://www.codefactor.io/repository/github/hcb06/pyerualjetwork) [![PyPI Downloads](https://static.pepy.tech/badge/anaplan)](https://pepy.tech/projects/anaplan) + [![PyPI Downloads](https://static.pepy.tech/badge/pyerualjetwork)](https://pepy.tech/projects/pyerualjetwork) [![PyPI Downloads](https://static.pepy.tech/badge/pyerualjetwork/month)](https://pepy.tech/projects/pyerualjetwork) [![PyPI Downloads](https://static.pepy.tech/badge/pyerualjetwork/week)](https://pepy.tech/projects/pyerualjetwork) [![PyPI version](https://img.shields.io/pypi/v/pyerualjetwork.svg)](https://pypi.org/project/pyerualjetwork/)
|
11
11
|
|
12
12
|
Note: anaplan old name of pyerualjetwork
|
13
13
|
|
@@ -1,4 +1,4 @@
|
|
1
|
-
# PyerualJetwork [![Socket Badge](https://socket.dev/api/badge/pypi/package/pyerualjetwork/4.0.
|
1
|
+
# PyerualJetwork [![Socket Badge](https://socket.dev/api/badge/pypi/package/pyerualjetwork/4.0.6?artifact_id=tar-gz)](https://socket.dev/pypi/package/pyerualjetwork/overview/4.0.6/tar-gz) [![CodeFactor](https://www.codefactor.io/repository/github/hcb06/pyerualjetwork/badge)](https://www.codefactor.io/repository/github/hcb06/pyerualjetwork) [![PyPI Downloads](https://static.pepy.tech/badge/anaplan)](https://pepy.tech/projects/anaplan) + [![PyPI Downloads](https://static.pepy.tech/badge/pyerualjetwork)](https://pepy.tech/projects/pyerualjetwork) [![PyPI Downloads](https://static.pepy.tech/badge/pyerualjetwork/month)](https://pepy.tech/projects/pyerualjetwork) [![PyPI Downloads](https://static.pepy.tech/badge/pyerualjetwork/week)](https://pepy.tech/projects/pyerualjetwork) [![PyPI version](https://img.shields.io/pypi/v/pyerualjetwork.svg)](https://pypi.org/project/pyerualjetwork/)
|
2
2
|
|
3
3
|
Note: anaplan old name of pyerualjetwork
|
4
4
|
|
@@ -47,7 +47,7 @@ for package_name in package_names:
|
|
47
47
|
|
48
48
|
print(f"PyerualJetwork is ready to use with {err} errors")
|
49
49
|
|
50
|
-
__version__ = "4.0.
|
50
|
+
__version__ = "4.0.6"
|
51
51
|
__update__ = "* Note: CUDA modules need cupy. Enter this command in your terminal: 'pip install cupy-cuda12x' or your cuda version.\n* Changes: https://github.com/HCB06/PyerualJetwork/blob/main/CHANGES\n* PyerualJetwork document: https://github.com/HCB06/Anaplan/blob/main/Welcome_to_PyerualJetwork/PYERUALJETWORK_USER_MANUEL_AND_LEGAL_INFORMATION(EN).pdf\n* YouTube tutorials: https://www.youtube.com/@HasanCanBeydili"
|
52
52
|
|
53
53
|
def print_version(__version__):
|
@@ -0,0 +1,344 @@
|
|
1
|
+
import numpy as np
|
2
|
+
from scipy.special import expit, softmax
|
3
|
+
import warnings
|
4
|
+
|
5
|
+
|
6
|
+
# ACTIVATION FUNCTIONS -----
|
7
|
+
|
8
|
+
def all_activations():
|
9
|
+
|
10
|
+
activations_list = ['linear', 'sigmoid', 'relu', 'tanh', 'circular', 'spiral', 'swish', 'sin_plus', 'mod_circular', 'tanh_circular', 'leaky_relu', 'softplus', 'elu', 'gelu', 'selu', 'sinakt', 'p_squared', 'sglu', 'dlrelu', 'exsig', 'acos', 'gla', 'srelu', 'qelu', 'isra', 'waveakt', 'arctan', 'bent_identity', 'sech', 'softsign', 'pwl', 'cubic', 'gaussian', 'sine', 'tanh_square', 'mod_sigmoid', 'quartic', 'square_quartic', 'cubic_quadratic', 'exp_cubic', 'sine_square', 'logarithmic', 'scaled_cubic', 'sine_offset']
|
11
|
+
|
12
|
+
return activations_list
|
13
|
+
|
14
|
+
def spiral_activation(x):
|
15
|
+
|
16
|
+
r = np.sqrt(np.sum(x**2))
|
17
|
+
|
18
|
+
theta = np.arctan2(x[1:], x[:-1])
|
19
|
+
|
20
|
+
spiral_x = r * np.cos(theta + r)
|
21
|
+
spiral_y = r * np.sin(theta + r)
|
22
|
+
|
23
|
+
|
24
|
+
spiral_output = np.concatenate(([spiral_x[0]], spiral_y))
|
25
|
+
|
26
|
+
return spiral_output
|
27
|
+
|
28
|
+
|
29
|
+
def Softmax(
|
30
|
+
x # num: Input data to be transformed using softmax function.
|
31
|
+
):
|
32
|
+
"""
|
33
|
+
Applies the softmax function to the input data.
|
34
|
+
|
35
|
+
Args:
|
36
|
+
(num): Input data to be transformed using softmax function.
|
37
|
+
|
38
|
+
Returns:
|
39
|
+
(num): Transformed data after applying softmax function.
|
40
|
+
"""
|
41
|
+
|
42
|
+
return softmax(x)
|
43
|
+
|
44
|
+
|
45
|
+
def Sigmoid(
|
46
|
+
x # num: Input data to be transformed using sigmoid function.
|
47
|
+
):
|
48
|
+
"""
|
49
|
+
Applies the sigmoid function to the input data.
|
50
|
+
|
51
|
+
Args:
|
52
|
+
(num): Input data to be transformed using sigmoid function.
|
53
|
+
|
54
|
+
Returns:
|
55
|
+
(num): Transformed data after applying sigmoid function.
|
56
|
+
"""
|
57
|
+
return expit(x)
|
58
|
+
|
59
|
+
|
60
|
+
def Relu(
|
61
|
+
x # num: Input data to be transformed using ReLU function.
|
62
|
+
):
|
63
|
+
"""
|
64
|
+
Applies the Rectified Linear Unit (ReLU) function to the input data.
|
65
|
+
|
66
|
+
Args:
|
67
|
+
(num): Input data to be transformed using ReLU function.
|
68
|
+
|
69
|
+
Returns:
|
70
|
+
(num): Transformed data after applying ReLU function.
|
71
|
+
"""
|
72
|
+
|
73
|
+
return np.maximum(0, x)
|
74
|
+
|
75
|
+
|
76
|
+
def tanh(x):
|
77
|
+
return np.tanh(x)
|
78
|
+
|
79
|
+
def swish(x):
|
80
|
+
return x * (1 / (1 + np.exp(-x)))
|
81
|
+
|
82
|
+
def sin_plus(x):
|
83
|
+
return (np.sin(x) + 1) / 2
|
84
|
+
|
85
|
+
def modular_circular_activation(x, period=2*np.pi):
|
86
|
+
return np.mod(x, period) / period
|
87
|
+
|
88
|
+
def tanh_circular_activation(x):
|
89
|
+
return (np.tanh(x) + 1) / 2
|
90
|
+
|
91
|
+
def leaky_relu(x, alpha=0.01):
|
92
|
+
return np.where(x > 0, x, alpha * x)
|
93
|
+
|
94
|
+
def softplus(x):
|
95
|
+
return np.log(1 + np.exp(x))
|
96
|
+
|
97
|
+
def elu(x, alpha=1.0):
|
98
|
+
return np.where(x > 0, x, alpha * (np.exp(x) - 1))
|
99
|
+
|
100
|
+
def gelu(x):
|
101
|
+
return 0.5 * x * (1 + np.tanh(np.sqrt(2 / np.pi) * (x + 0.044715 * np.power(x, 3))))
|
102
|
+
|
103
|
+
def selu(x, lambda_=1.0507, alpha=1.6733):
|
104
|
+
return lambda_ * np.where(x > 0, x, alpha * (np.exp(x) - 1))
|
105
|
+
|
106
|
+
def sinakt(x):
|
107
|
+
return np.sin(x) + np.cos(x)
|
108
|
+
|
109
|
+
def p_squared(x, alpha=1.0, beta=0.0):
|
110
|
+
return alpha * x**2 + beta * x
|
111
|
+
|
112
|
+
def sglu(x, alpha=1.0):
|
113
|
+
return softmax(alpha * x) * x
|
114
|
+
|
115
|
+
# 4. Double Leaky ReLU (DLReLU)
|
116
|
+
def dlrelu(x):
|
117
|
+
return np.maximum(0.01 * x, x) + np.minimum(0.01 * x, 0.1 * x)
|
118
|
+
|
119
|
+
# 5. Exponential Sigmoid (ExSig)
|
120
|
+
def exsig(x):
|
121
|
+
return 1 / (1 + np.exp(-x**2))
|
122
|
+
|
123
|
+
# 6. Adaptive Cosine Activation (ACos)
|
124
|
+
def acos(x, alpha=1.0, beta=0.0):
|
125
|
+
return np.cos(alpha * x + beta)
|
126
|
+
|
127
|
+
# 7. Gaussian-like Activation (GLA)
|
128
|
+
def gla(x, alpha=1.0, mu=0.0):
|
129
|
+
return np.exp(-alpha * (x - mu)**2)
|
130
|
+
|
131
|
+
# 8. Swish ReLU (SReLU)
|
132
|
+
def srelu(x):
|
133
|
+
return x * (1 / (1 + np.exp(-x))) + np.maximum(0, x)
|
134
|
+
|
135
|
+
# 9. Quadratic Exponential Linear Unit (QELU)
|
136
|
+
def qelu(x):
|
137
|
+
return x**2 * np.exp(x) - 1
|
138
|
+
|
139
|
+
# 10. Inverse Square Root Activation (ISRA)
|
140
|
+
def isra(x):
|
141
|
+
return x / np.sqrt(np.abs(x) + 1)
|
142
|
+
|
143
|
+
def waveakt(x, alpha=1.0, beta=2.0, gamma=3.0):
|
144
|
+
return np.sin(alpha * x) * np.cos(beta * x) * np.sin(gamma * x)
|
145
|
+
|
146
|
+
def arctan(x):
|
147
|
+
return np.arctan(x)
|
148
|
+
|
149
|
+
def bent_identity(x):
|
150
|
+
return (np.sqrt(x**2 + 1) - 1) / 2 + x
|
151
|
+
|
152
|
+
def circular_activation(x, scale=2.0, frequency=1.0, shift=0.0):
|
153
|
+
|
154
|
+
n_features = x.shape[0]
|
155
|
+
|
156
|
+
circular_output = np.zeros_like(x)
|
157
|
+
|
158
|
+
for i in range(n_features):
|
159
|
+
|
160
|
+
r = np.sqrt(np.sum(x**2))
|
161
|
+
theta = 2 * np.pi * (i / n_features) + shift
|
162
|
+
|
163
|
+
circular_x = r * np.cos(theta + frequency * r) * scale
|
164
|
+
circular_y = r * np.sin(theta + frequency * r) * scale
|
165
|
+
|
166
|
+
if i % 2 == 0:
|
167
|
+
circular_output[i] = circular_x
|
168
|
+
else:
|
169
|
+
circular_output[i] = circular_y
|
170
|
+
|
171
|
+
return circular_output
|
172
|
+
|
173
|
+
def sech(x):
|
174
|
+
return 2 / (np.exp(x) + np.exp(-x))
|
175
|
+
|
176
|
+
def softsign(x):
|
177
|
+
return x / (1 + np.abs(x))
|
178
|
+
|
179
|
+
def pwl(x, alpha=0.5, beta=1.5):
|
180
|
+
return np.where(x <= 0, alpha * x, beta * x)
|
181
|
+
|
182
|
+
def cubic(x):
|
183
|
+
return x**3
|
184
|
+
|
185
|
+
def gaussian(x, alpha=1.0, mu=0.0):
|
186
|
+
return np.exp(-alpha * (x - mu)**2)
|
187
|
+
|
188
|
+
def sine(x, alpha=1.0):
|
189
|
+
return np.sin(alpha * x)
|
190
|
+
|
191
|
+
def tanh_square(x):
|
192
|
+
return np.tanh(x)**2
|
193
|
+
|
194
|
+
def mod_sigmoid(x, alpha=1.0, beta=0.0):
|
195
|
+
return 1 / (1 + np.exp(-alpha * x + beta))
|
196
|
+
|
197
|
+
def quartic(x):
|
198
|
+
return x**4
|
199
|
+
|
200
|
+
def square_quartic(x):
|
201
|
+
return (x**2)**2
|
202
|
+
|
203
|
+
def cubic_quadratic(x):
|
204
|
+
return x**3 * (x**2)
|
205
|
+
|
206
|
+
def exp_cubic(x):
|
207
|
+
return np.exp(x**3)
|
208
|
+
|
209
|
+
def sine_square(x):
|
210
|
+
return np.sin(x)**2
|
211
|
+
|
212
|
+
def logarithmic(x):
|
213
|
+
return np.log(x**2 + 1)
|
214
|
+
|
215
|
+
def scaled_cubic(x, alpha=1.0):
|
216
|
+
return alpha * x**3
|
217
|
+
|
218
|
+
def sine_offset(x, beta=0.0):
|
219
|
+
return np.sin(x + beta)
|
220
|
+
|
221
|
+
|
222
|
+
|
223
|
+
def safe_aggregate(current_sum, new_value):
|
224
|
+
try:
|
225
|
+
return current_sum + new_value
|
226
|
+
except OverflowError:
|
227
|
+
return np.array(current_sum) + np.array(new_value)
|
228
|
+
|
229
|
+
|
230
|
+
def apply_activation(Input, activation_list):
|
231
|
+
"""
|
232
|
+
Applies a sequence of activation functions to the input.
|
233
|
+
|
234
|
+
Args:
|
235
|
+
Input (numpy.ndarray): The input to apply activations to.
|
236
|
+
activation_list (list): A list of activation function names to apply.
|
237
|
+
|
238
|
+
Returns:
|
239
|
+
numpy.ndarray: The input after all activations have been applied.
|
240
|
+
"""
|
241
|
+
|
242
|
+
origin_input = np.copy(Input)
|
243
|
+
|
244
|
+
for i in range(len(activation_list)):
|
245
|
+
try:
|
246
|
+
if activation_list[i] == 'sigmoid':
|
247
|
+
Input = safe_aggregate(Input, Sigmoid(origin_input))
|
248
|
+
elif activation_list[i] == 'swish':
|
249
|
+
Input = safe_aggregate(Input, swish(origin_input))
|
250
|
+
elif activation_list[i] == 'mod_circular':
|
251
|
+
Input = safe_aggregate(Input, modular_circular_activation(origin_input))
|
252
|
+
elif activation_list[i] == 'tanh_circular':
|
253
|
+
Input = safe_aggregate(Input, tanh_circular_activation(origin_input))
|
254
|
+
elif activation_list[i] == 'leaky_relu':
|
255
|
+
Input = safe_aggregate(Input, leaky_relu(origin_input))
|
256
|
+
elif activation_list[i] == 'relu':
|
257
|
+
Input = safe_aggregate(Input, Relu(origin_input))
|
258
|
+
elif activation_list[i] == 'softplus':
|
259
|
+
Input = safe_aggregate(Input, softplus(origin_input))
|
260
|
+
elif activation_list[i] == 'elu':
|
261
|
+
Input = safe_aggregate(Input, elu(origin_input))
|
262
|
+
elif activation_list[i] == 'gelu':
|
263
|
+
Input = safe_aggregate(Input, gelu(origin_input))
|
264
|
+
elif activation_list[i] == 'selu':
|
265
|
+
Input = safe_aggregate(Input, selu(origin_input))
|
266
|
+
elif activation_list[i] == 'tanh':
|
267
|
+
Input = safe_aggregate(Input, tanh(origin_input))
|
268
|
+
elif activation_list[i] == 'sinakt':
|
269
|
+
Input = safe_aggregate(Input, sinakt(origin_input))
|
270
|
+
elif activation_list[i] == 'p_squared':
|
271
|
+
Input = safe_aggregate(Input, p_squared(origin_input))
|
272
|
+
elif activation_list[i] == 'sglu':
|
273
|
+
Input = safe_aggregate(Input, sglu(origin_input, alpha=1.0))
|
274
|
+
elif activation_list[i] == 'dlrelu':
|
275
|
+
Input = safe_aggregate(Input, dlrelu(origin_input))
|
276
|
+
elif activation_list[i] == 'exsig':
|
277
|
+
Input = safe_aggregate(Input, exsig(origin_input))
|
278
|
+
elif activation_list[i] == 'sin_plus':
|
279
|
+
Input = safe_aggregate(Input, sin_plus(origin_input))
|
280
|
+
elif activation_list[i] == 'acos':
|
281
|
+
Input = safe_aggregate(Input, acos(origin_input, alpha=1.0, beta=0.0))
|
282
|
+
elif activation_list[i] == 'gla':
|
283
|
+
Input = safe_aggregate(Input, gla(origin_input, alpha=1.0, mu=0.0))
|
284
|
+
elif activation_list[i] == 'srelu':
|
285
|
+
Input = safe_aggregate(Input, srelu(origin_input))
|
286
|
+
elif activation_list[i] == 'qelu':
|
287
|
+
Input = safe_aggregate(Input, qelu(origin_input))
|
288
|
+
elif activation_list[i] == 'isra':
|
289
|
+
Input = safe_aggregate(Input, isra(origin_input))
|
290
|
+
elif activation_list[i] == 'waveakt':
|
291
|
+
Input = safe_aggregate(Input, waveakt(origin_input))
|
292
|
+
elif activation_list[i] == 'arctan':
|
293
|
+
Input = safe_aggregate(Input, arctan(origin_input))
|
294
|
+
elif activation_list[i] == 'bent_identity':
|
295
|
+
Input = safe_aggregate(Input, bent_identity(origin_input))
|
296
|
+
elif activation_list[i] == 'sech':
|
297
|
+
Input = safe_aggregate(Input, sech(origin_input))
|
298
|
+
elif activation_list[i] == 'softsign':
|
299
|
+
Input = safe_aggregate(Input, softsign(origin_input))
|
300
|
+
elif activation_list[i] == 'pwl':
|
301
|
+
Input = safe_aggregate(Input, pwl(origin_input))
|
302
|
+
elif activation_list[i] == 'cubic':
|
303
|
+
Input = safe_aggregate(Input, cubic(origin_input))
|
304
|
+
elif activation_list[i] == 'gaussian':
|
305
|
+
Input = safe_aggregate(Input, gaussian(origin_input))
|
306
|
+
elif activation_list[i] == 'sine':
|
307
|
+
Input = safe_aggregate(Input, sine(origin_input))
|
308
|
+
elif activation_list[i] == 'tanh_square':
|
309
|
+
Input = safe_aggregate(Input, tanh_square(origin_input))
|
310
|
+
elif activation_list[i] == 'mod_sigmoid':
|
311
|
+
Input = safe_aggregate(Input, mod_sigmoid(origin_input))
|
312
|
+
elif activation_list[i] == 'linear':
|
313
|
+
Input = safe_aggregate(Input, origin_input)
|
314
|
+
elif activation_list[i] == 'quartic':
|
315
|
+
Input = safe_aggregate(Input, quartic(origin_input))
|
316
|
+
elif activation_list[i] == 'square_quartic':
|
317
|
+
Input = safe_aggregate(Input, square_quartic(origin_input))
|
318
|
+
elif activation_list[i] == 'cubic_quadratic':
|
319
|
+
Input = safe_aggregate(Input, cubic_quadratic(origin_input))
|
320
|
+
elif activation_list[i] == 'exp_cubic':
|
321
|
+
Input = safe_aggregate(Input, exp_cubic(origin_input))
|
322
|
+
elif activation_list[i] == 'sine_square':
|
323
|
+
Input = safe_aggregate(Input, sine_square(origin_input))
|
324
|
+
elif activation_list[i] == 'logarithmic':
|
325
|
+
Input = safe_aggregate(Input, logarithmic(origin_input))
|
326
|
+
elif activation_list[i] == 'scaled_cubic':
|
327
|
+
Input = safe_aggregate(Input, scaled_cubic(origin_input, 1.0))
|
328
|
+
elif activation_list[i] == 'sine_offset':
|
329
|
+
Input = safe_aggregate(Input, sine_offset(origin_input, 1.0))
|
330
|
+
elif activation_list[i] == 'spiral':
|
331
|
+
Input = safe_aggregate(Input, spiral_activation(origin_input))
|
332
|
+
elif activation_list[i] == 'circular':
|
333
|
+
Input = safe_aggregate(Input, circular_activation(origin_input))
|
334
|
+
|
335
|
+
|
336
|
+
except Exception as e:
|
337
|
+
warnings.warn(f"Error in activation {activation_list[i]}: {str(e)}", RuntimeWarning)
|
338
|
+
if not isinstance(Input, np.ndarray):
|
339
|
+
Input = np.array(Input)
|
340
|
+
if not isinstance(origin_input, np.ndarray):
|
341
|
+
origin_input = np.array(origin_input)
|
342
|
+
continue
|
343
|
+
|
344
|
+
return Input
|