pyerualjetwork 4.2.9b7__py3-none-any.whl → 4.3.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. {pyerualjetwork-4.2.9b7.dist-info → pyerualjetwork-4.3.0.1.dist-info}/METADATA +2 -1
  2. pyerualjetwork-4.3.0.1.dist-info/RECORD +24 -0
  3. pyerualjetwork-4.3.0.1.dist-info/top_level.txt +1 -0
  4. {pyerualjetwork → pyerualjetwork-jetstorm}/__init__.py +1 -1
  5. pyerualjetwork-jetstorm/activation_functions.py +291 -0
  6. pyerualjetwork-jetstorm/activation_functions_cuda.py +290 -0
  7. {pyerualjetwork → pyerualjetwork-jetstorm}/data_operations.py +2 -3
  8. {pyerualjetwork → pyerualjetwork-jetstorm}/model_operations.py +14 -14
  9. {pyerualjetwork → pyerualjetwork-jetstorm}/model_operations_cuda.py +16 -17
  10. {pyerualjetwork → pyerualjetwork-jetstorm}/plan.py +46 -248
  11. {pyerualjetwork → pyerualjetwork-jetstorm}/plan_cuda.py +44 -263
  12. {pyerualjetwork → pyerualjetwork-jetstorm}/planeat.py +14 -47
  13. {pyerualjetwork → pyerualjetwork-jetstorm}/planeat_cuda.py +11 -48
  14. pyerualjetwork/activation_functions.py +0 -343
  15. pyerualjetwork/activation_functions_cuda.py +0 -341
  16. pyerualjetwork-4.2.9b7.dist-info/RECORD +0 -24
  17. pyerualjetwork-4.2.9b7.dist-info/top_level.txt +0 -1
  18. {pyerualjetwork-4.2.9b7.dist-info → pyerualjetwork-4.3.0.1.dist-info}/WHEEL +0 -0
  19. {pyerualjetwork → pyerualjetwork-jetstorm}/data_operations_cuda.py +0 -0
  20. {pyerualjetwork → pyerualjetwork-jetstorm}/help.py +0 -0
  21. {pyerualjetwork → pyerualjetwork-jetstorm}/loss_functions.py +0 -0
  22. {pyerualjetwork → pyerualjetwork-jetstorm}/loss_functions_cuda.py +0 -0
  23. {pyerualjetwork → pyerualjetwork-jetstorm}/memory_operations.py +0 -0
  24. {pyerualjetwork → pyerualjetwork-jetstorm}/metrics.py +0 -0
  25. {pyerualjetwork → pyerualjetwork-jetstorm}/metrics_cuda.py +0 -0
  26. {pyerualjetwork → pyerualjetwork-jetstorm}/ui.py +0 -0
  27. {pyerualjetwork → pyerualjetwork-jetstorm}/visualizations.py +0 -0
  28. {pyerualjetwork → pyerualjetwork-jetstorm}/visualizations_cuda.py +0 -0
@@ -1,341 +0,0 @@
1
- import cupy as cp
2
- from scipy.special import expit, softmax
3
- import warnings
4
-
5
- # ACTIVATION FUNCTIONS ----
6
-
7
- def all_activations():
8
-
9
- activations_list = ['linear', 'sigmoid', 'relu', 'tanh', 'circular', 'spiral', 'swish', 'sin_plus', 'mod_circular', 'tanh_circular', 'leaky_relu', 'softplus', 'elu', 'gelu', 'selu', 'sinakt', 'p_squared', 'sglu', 'dlrelu', 'exsig', 'acos', 'gla', 'srelu', 'qelu', 'isra', 'waveakt', 'arctan', 'bent_identity', 'sech', 'softsign', 'pwl', 'cubic', 'gaussian', 'sine', 'tanh_square', 'mod_sigmoid', 'quartic', 'square_quartic', 'cubic_quadratic', 'exp_cubic', 'sine_square', 'logarithmic', 'scaled_cubic', 'sine_offset']
10
-
11
- return activations_list
12
-
13
- def spiral_activation(x):
14
-
15
- r = cp.sqrt(cp.sum(x**2))
16
-
17
- theta = cp.arctan2(x[1:], x[:-1])
18
-
19
- spiral_x = r * cp.cos(theta + r)
20
- spiral_y = r * cp.sin(theta + r)
21
-
22
-
23
- spiral_output = cp.concatenate([cp.array([spiral_x[0]]), spiral_y])
24
-
25
- return spiral_output
26
-
27
-
28
- def Softmax(
29
- x # num: Input data to be transformed using softmax function.
30
- ):
31
- """
32
- Applies the softmax function to the input data.
33
-
34
- Args:
35
- (num): Input data to be transformed using softmax function.
36
-
37
- Returns:
38
- (num): Transformed data after applying softmax function.
39
- """
40
-
41
- return cp.array(softmax(x.get()))
42
-
43
-
44
- def Sigmoid(
45
- x # num: Input data to be transformed using sigmoid function.
46
- ):
47
- """
48
- Applies the sigmoid function to the input data.
49
-
50
- Args:
51
- (num): Input data to be transformed using sigmoid function.
52
-
53
- Returns:
54
- (num): Transformed data after applying sigmoid function.
55
- """
56
- return expit(x)
57
-
58
-
59
- def Relu(
60
- x # num: Input data to be transformed using ReLU function.
61
- ):
62
- """
63
- Applies the Rectified Linear Unit (ReLU) function to the input data.
64
-
65
- Args:
66
- (num): Input data to be transformed using ReLU function.
67
-
68
- Returns:
69
- (num): Transformed data after applying ReLU function.
70
- """
71
-
72
- return cp.maximum(0, x)
73
-
74
-
75
- def tanh(x):
76
- return cp.tanh(x)
77
-
78
- def swish(x):
79
- return x * (1 / (1 + cp.exp(-x)))
80
-
81
- def sin_plus(x):
82
- return (cp.sin(x) + 1) / 2
83
-
84
- def modular_circular_activation(x, period=2*cp.pi):
85
- return cp.mod(x, period) / period
86
-
87
- def tanh_circular_activation(x):
88
- return (cp.tanh(x) + 1) / 2
89
-
90
- def leaky_relu(x, alpha=0.01):
91
- return cp.where(x > 0, x, alpha * x)
92
-
93
- def softplus(x):
94
- return cp.log(1 + cp.exp(x))
95
-
96
- def elu(x, alpha=1.0):
97
- return cp.where(x > 0, x, alpha * (cp.exp(x) - 1))
98
-
99
- def gelu(x):
100
- return 0.5 * x * (1 + cp.tanh(cp.sqrt(2 / cp.pi) * (x + 0.044715 * cp.power(x, 3))))
101
-
102
- def selu(x, lambda_=1.0507, alpha=1.6733):
103
- return lambda_ * cp.where(x > 0, x, alpha * (cp.exp(x) - 1))
104
-
105
- def sinakt(x):
106
- return cp.sin(x) + cp.cos(x)
107
-
108
- def p_squared(x, alpha=1.0, beta=0.0):
109
- return alpha * x**2 + beta * x
110
-
111
- def sglu(x, alpha=1.0):
112
- return cp.array(softmax(alpha * x.get())) * x
113
-
114
- # 4. Double Leaky ReLU (DLReLU)
115
- def dlrelu(x):
116
- return cp.maximum(0.01 * x, x) + cp.minimum(0.01 * x, 0.1 * x)
117
-
118
- # 5. Exponential Sigmoid (ExSig)
119
- def exsig(x):
120
- return 1 / (1 + cp.exp(-x**2))
121
-
122
- # 6. Adaptive Cosine Activation (ACos)
123
- def acos(x, alpha=1.0, beta=0.0):
124
- return cp.cos(alpha * x + beta)
125
-
126
- # 7. Gaussian-like Activation (GLA)
127
- def gla(x, alpha=1.0, mu=0.0):
128
- return cp.exp(-alpha * (x - mu)**2)
129
-
130
- # 8. Swish ReLU (SReLU)
131
- def srelu(x):
132
- return x * (1 / (1 + cp.exp(-x))) + cp.maximum(0, x)
133
-
134
- # 9. Quadratic Exponential Linear Unit (QELU)
135
- def qelu(x):
136
- return x**2 * cp.exp(x) - 1
137
-
138
- # 10. Inverse Square Root Activation (ISRA)
139
- def isra(x):
140
- return x / cp.sqrt(cp.abs(x) + 1)
141
-
142
- def waveakt(x, alpha=1.0, beta=2.0, gamma=3.0):
143
- return cp.sin(alpha * x) * cp.cos(beta * x) * cp.sin(gamma * x)
144
-
145
- def arctan(x):
146
- return cp.arctan(x)
147
-
148
- def bent_identity(x):
149
- return (cp.sqrt(x**2 + 1) - 1) / 2 + x
150
-
151
- def circular_activation(x, scale=2.0, frequency=1.0, shift=0.0):
152
-
153
- n_features = x.shape[0]
154
-
155
- circular_output = cp.zeros_like(x)
156
-
157
- for i in range(n_features):
158
-
159
- r = cp.sqrt(cp.sum(x**2))
160
- theta = 2 * cp.pi * (i / n_features) + shift
161
-
162
- circular_x = r * cp.cos(theta + frequency * r) * scale
163
- circular_y = r * cp.sin(theta + frequency * r) * scale
164
-
165
- if i % 2 == 0:
166
- circular_output[i] = circular_x
167
- else:
168
- circular_output[i] = circular_y
169
-
170
- return circular_output
171
-
172
- def sech(x):
173
- return 2 / (cp.exp(x) + cp.exp(-x))
174
-
175
- def softsign(x):
176
- return x / (1 + cp.abs(x))
177
-
178
- def pwl(x, alpha=0.5, beta=1.5):
179
- return cp.where(x <= 0, alpha * x, beta * x)
180
-
181
- def cubic(x):
182
- return x**3
183
-
184
- def gaussian(x, alpha=1.0, mu=0.0):
185
- return cp.exp(-alpha * (x - mu)**2)
186
-
187
- def sine(x, alpha=1.0):
188
- return cp.sin(alpha * x)
189
-
190
- def tanh_square(x):
191
- return cp.tanh(x)**2
192
-
193
- def mod_sigmoid(x, alpha=1.0, beta=0.0):
194
- return 1 / (1 + cp.exp(-alpha * x + beta))
195
-
196
- def quartic(x):
197
- return x**4
198
-
199
- def square_quartic(x):
200
- return (x**2)**2
201
-
202
- def cubic_quadratic(x):
203
- return x**3 * (x**2)
204
-
205
- def exp_cubic(x):
206
- return cp.exp(x**3)
207
-
208
- def sine_square(x):
209
- return cp.sin(x)**2
210
-
211
- def logarithmic(x):
212
- return cp.log(x**2 + 1)
213
-
214
- def scaled_cubic(x, alpha=1.0):
215
- return alpha * x**3
216
-
217
- def sine_offset(x, beta=0.0):
218
- return cp.sin(x + beta)
219
-
220
-
221
- def safe_add(current_sum, new_value):
222
- try:
223
- return current_sum + new_value
224
- except OverflowError:
225
- return cp.array(current_sum) + cp.array(new_value)
226
-
227
-
228
- def apply_activation(Input, activation_list):
229
- """
230
- Applies a sequence of activation functions to the input.
231
-
232
- Args:
233
- Input (cupy.ndarray): The input to apply activations to.
234
- activation_list (list): A list of activation function names to apply.
235
-
236
- Returns:
237
- cupy.ndarray: The input after all activations have been applied.
238
- """
239
-
240
- origin_input = cp.copy(Input)
241
-
242
- for i in range(len(activation_list)):
243
- try:
244
- if activation_list[i] == 'sigmoid':
245
- Input = safe_add(Input, Sigmoid(origin_input))
246
- elif activation_list[i] == 'swish':
247
- Input = safe_add(Input, swish(origin_input))
248
- elif activation_list[i] == 'mod_circular':
249
- Input = safe_add(Input, modular_circular_activation(origin_input))
250
- elif activation_list[i] == 'tanh_circular':
251
- Input = safe_add(Input, tanh_circular_activation(origin_input))
252
- elif activation_list[i] == 'leaky_relu':
253
- Input = safe_add(Input, leaky_relu(origin_input))
254
- elif activation_list[i] == 'relu':
255
- Input = safe_add(Input, Relu(origin_input))
256
- elif activation_list[i] == 'softplus':
257
- Input = safe_add(Input, softplus(origin_input))
258
- elif activation_list[i] == 'elu':
259
- Input = safe_add(Input, elu(origin_input))
260
- elif activation_list[i] == 'gelu':
261
- Input = safe_add(Input, gelu(origin_input))
262
- elif activation_list[i] == 'selu':
263
- Input = safe_add(Input, selu(origin_input))
264
- elif activation_list[i] == 'tanh':
265
- Input = safe_add(Input, tanh(origin_input))
266
- elif activation_list[i] == 'sinakt':
267
- Input = safe_add(Input, sinakt(origin_input))
268
- elif activation_list[i] == 'p_squared':
269
- Input = safe_add(Input, p_squared(origin_input))
270
- elif activation_list[i] == 'sglu':
271
- Input = safe_add(Input, sglu(origin_input, alpha=1.0))
272
- elif activation_list[i] == 'dlrelu':
273
- Input = safe_add(Input, dlrelu(origin_input))
274
- elif activation_list[i] == 'exsig':
275
- Input = safe_add(Input, exsig(origin_input))
276
- elif activation_list[i] == 'sin_plus':
277
- Input = safe_add(Input, sin_plus(origin_input))
278
- elif activation_list[i] == 'acos':
279
- Input = safe_add(Input, acos(origin_input, alpha=1.0, beta=0.0))
280
- elif activation_list[i] == 'gla':
281
- Input = safe_add(Input, gla(origin_input, alpha=1.0, mu=0.0))
282
- elif activation_list[i] == 'srelu':
283
- Input = safe_add(Input, srelu(origin_input))
284
- elif activation_list[i] == 'qelu':
285
- Input = safe_add(Input, qelu(origin_input))
286
- elif activation_list[i] == 'isra':
287
- Input = safe_add(Input, isra(origin_input))
288
- elif activation_list[i] == 'waveakt':
289
- Input = safe_add(Input, waveakt(origin_input))
290
- elif activation_list[i] == 'arctan':
291
- Input = safe_add(Input, arctan(origin_input))
292
- elif activation_list[i] == 'bent_identity':
293
- Input = safe_add(Input, bent_identity(origin_input))
294
- elif activation_list[i] == 'sech':
295
- Input = safe_add(Input, sech(origin_input))
296
- elif activation_list[i] == 'softsign':
297
- Input = safe_add(Input, softsign(origin_input))
298
- elif activation_list[i] == 'pwl':
299
- Input = safe_add(Input, pwl(origin_input))
300
- elif activation_list[i] == 'cubic':
301
- Input = safe_add(Input, cubic(origin_input))
302
- elif activation_list[i] == 'gaussian':
303
- Input = safe_add(Input, gaussian(origin_input))
304
- elif activation_list[i] == 'sine':
305
- Input = safe_add(Input, sine(origin_input))
306
- elif activation_list[i] == 'tanh_square':
307
- Input = safe_add(Input, tanh_square(origin_input))
308
- elif activation_list[i] == 'mod_sigmoid':
309
- Input = safe_add(Input, mod_sigmoid(origin_input))
310
- elif activation_list[i] == 'linear':
311
- Input += origin_input
312
- elif activation_list[i] == 'quartic':
313
- Input = safe_add(Input, quartic(origin_input))
314
- elif activation_list[i] == 'square_quartic':
315
- Input = safe_add(Input, square_quartic(origin_input))
316
- elif activation_list[i] == 'cubic_quadratic':
317
- Input = safe_add(Input, cubic_quadratic(origin_input))
318
- elif activation_list[i] == 'exp_cubic':
319
- Input = safe_add(Input, exp_cubic(origin_input))
320
- elif activation_list[i] == 'sine_square':
321
- Input = safe_add(Input, sine_square(origin_input))
322
- elif activation_list[i] == 'logarithmic':
323
- Input = safe_add(Input, logarithmic(origin_input))
324
- elif activation_list[i] == 'scaled_cubic':
325
- Input = safe_add(Input, scaled_cubic(origin_input, 1.0))
326
- elif activation_list[i] == 'sine_offset':
327
- Input = safe_add(Input, sine_offset(origin_input, 1.0))
328
- elif activation_list[i] == 'spiral':
329
- Input = safe_add(Input, spiral_activation(origin_input))
330
- elif activation_list[i] == 'circular':
331
- Input = safe_add(Input, circular_activation(origin_input))
332
-
333
- except Exception as e:
334
- warnings.warn(f"Error in activation {activation_list[i]}: {str(e)}", RuntimeWarning)
335
- if not isinstance(Input, cp.ndarray):
336
- Input = cp.array(Input)
337
- if not isinstance(origin_input, cp.ndarray):
338
- origin_input = cp.array(origin_input)
339
- continue
340
-
341
- return Input
@@ -1,24 +0,0 @@
1
- pyerualjetwork/__init__.py,sha256=-UIV7CqxZoriiu5trhAYI1q_ztU6L6nky-nOFm1p9l0,641
2
- pyerualjetwork/activation_functions.py,sha256=eLEesmMgDvkI1TqaLTpqtOgTaLbHEAyw-D57KIKd9G4,11775
3
- pyerualjetwork/activation_functions_cuda.py,sha256=ahUOF47g073epWrIrv4kGBqQjif1xcw3qfEhvLJEDp4,11789
4
- pyerualjetwork/data_operations.py,sha256=pb5CqJ0Th6fCjTNMCtqQMiwH3KezTxAijacglsKUxmY,14730
5
- pyerualjetwork/data_operations_cuda.py,sha256=UpoJoFhIwTU4xg9dVuLAxLAT4CkRaGsxvtJG9j1xrNo,17629
6
- pyerualjetwork/help.py,sha256=nQ_YbYA2RtuafhuvkreNpX0WWL1I_nzlelwCtvei0_Y,775
7
- pyerualjetwork/loss_functions.py,sha256=6PyBI232SQRGuFnG3LDGvnv_PUdWzT2_2mUODJiejGI,618
8
- pyerualjetwork/loss_functions_cuda.py,sha256=C93IZJcrOpT6HMK9x1O4AHJWXYTkN5WZiqdssPbvAPk,617
9
- pyerualjetwork/memory_operations.py,sha256=I7QiZ--xSyRkFF0wcckPwZV7K9emEvyx5aJ3DiRHZFI,13468
10
- pyerualjetwork/metrics.py,sha256=q7MkhnZDRbCjFBDDfUgrl8lBYnUT_1ro1LxeBq105pI,6077
11
- pyerualjetwork/metrics_cuda.py,sha256=73h9GC7XwmnFCVzFEEiPQfF8CwHIz2wsCbxpZrJtYgw,5061
12
- pyerualjetwork/model_operations.py,sha256=RKqnh7-MByFosxqme4q4jC1lOndX26O-OVXYV6ZxoEE,12965
13
- pyerualjetwork/model_operations_cuda.py,sha256=XnKKq54ZLaqCm-NaJ6d8IToACKcKg2Ttq6moowVRRWo,13365
14
- pyerualjetwork/plan.py,sha256=UzCTFCA9cTv9ITCtsqfJ1g02rCMyescoIV6j1amvYGw,32134
15
- pyerualjetwork/plan_cuda.py,sha256=hpXZl3h7B1qAVYW-gZebwKMZd4-ftAZ-u05teOJjsno,33525
16
- pyerualjetwork/planeat.py,sha256=t6qyuMB2c5n8lsAJooEpShzEnw2GvepBI0bpLMx0DUI,39440
17
- pyerualjetwork/planeat_cuda.py,sha256=UBdbAk87M5zEZzZlRBeOzW-q0Sy8c_XWl4zdrtDnyIs,39499
18
- pyerualjetwork/ui.py,sha256=wu2BhU1k-w3Kcho5Jtq4SEKe68ftaUeRGneUOSCVDjU,575
19
- pyerualjetwork/visualizations.py,sha256=1SKMZaJ80OD2qHUyMxW1IOv8zwmxzMPxclfbeq1Xr4g,28772
20
- pyerualjetwork/visualizations_cuda.py,sha256=KbMhfsLlxujy_i3QrwCf734Q-k6d7Zn_7CEbm3gzK9w,29186
21
- pyerualjetwork-4.2.9b7.dist-info/METADATA,sha256=hqgQTjNLjoljou-abs6HJwbCoeHlnrkZTcmU91-rB7c,7454
22
- pyerualjetwork-4.2.9b7.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
23
- pyerualjetwork-4.2.9b7.dist-info/top_level.txt,sha256=BRyt62U_r3ZmJpj-wXNOoA345Bzamrj6RbaWsyW4tRg,15
24
- pyerualjetwork-4.2.9b7.dist-info/RECORD,,
@@ -1 +0,0 @@
1
- pyerualjetwork
File without changes
File without changes
File without changes