pyerualjetwork 4.0.5__py3-none-any.whl → 4.0.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -47,7 +47,7 @@ for package_name in package_names:
47
47
 
48
48
  print(f"PyerualJetwork is ready to use with {err} errors")
49
49
 
50
- __version__ = "4.0.5"
50
+ __version__ = "4.0.7"
51
51
  __update__ = "* Note: CUDA modules need cupy. Enter this command in your terminal: 'pip install cupy-cuda12x' or your cuda version.\n* Changes: https://github.com/HCB06/PyerualJetwork/blob/main/CHANGES\n* PyerualJetwork document: https://github.com/HCB06/Anaplan/blob/main/Welcome_to_PyerualJetwork/PYERUALJETWORK_USER_MANUEL_AND_LEGAL_INFORMATION(EN).pdf\n* YouTube tutorials: https://www.youtube.com/@HasanCanBeydili"
52
52
 
53
53
  def print_version(__version__):
@@ -1,5 +1,7 @@
1
1
  import numpy as np
2
2
  from scipy.special import expit, softmax
3
+ import warnings
4
+
3
5
 
4
6
  # ACTIVATION FUNCTIONS -----
5
7
 
@@ -216,152 +218,127 @@ def scaled_cubic(x, alpha=1.0):
216
218
  def sine_offset(x, beta=0.0):
217
219
  return np.sin(x + beta)
218
220
 
219
- def apply_activation(Input, activation_list):
220
- """
221
- Applies a sequence of activation functions to the input.
222
-
223
- Args:
224
- Input (numpy.ndarray): The input to apply activations to.
225
- activation_list (list): A list of activation function names to apply.
226
-
227
- Returns:
228
- numpy.ndarray: The input after all activations have been applied.
229
- """
230
-
231
- origin_input = np.copy(Input)
232
-
233
- for i in range(len(activation_list)):
234
-
235
- if activation_list[i] == 'sigmoid':
236
- Input += Sigmoid(origin_input)
237
-
238
- elif activation_list[i] == 'swish':
239
- Input += swish(origin_input)
240
-
241
- elif activation_list[i] == 'mod_circular':
242
- Input += modular_circular_activation(origin_input)
243
-
244
- elif activation_list[i] == 'tanh_circular':
245
- Input += tanh_circular_activation(origin_input)
246
-
247
- elif activation_list[i] == 'leaky_relu':
248
- Input += leaky_relu(origin_input)
249
-
250
- elif activation_list[i] == 'relu':
251
- Input += Relu(origin_input)
252
-
253
- elif activation_list[i] == 'softplus':
254
- Input += softplus(origin_input)
255
-
256
- elif activation_list[i] == 'elu':
257
- Input += elu(origin_input)
258
-
259
- elif activation_list[i] == 'gelu':
260
- Input += gelu(origin_input)
261
-
262
- elif activation_list[i] == 'selu':
263
- Input += selu(origin_input)
264
-
265
- elif activation_list[i] == 'tanh':
266
- Input += tanh(origin_input)
267
-
268
- elif activation_list[i] == 'sinakt':
269
- Input += sinakt(origin_input)
270
-
271
- elif activation_list[i] == 'p_squared':
272
- Input += p_squared(origin_input)
273
-
274
- elif activation_list[i] == 'sglu':
275
- Input += sglu(origin_input, alpha=1.0)
276
-
277
- elif activation_list[i] == 'dlrelu':
278
- Input += dlrelu(origin_input)
279
-
280
- elif activation_list[i] == 'exsig':
281
- Input += exsig(origin_input)
282
-
283
- elif activation_list[i] == 'sin_plus':
284
- Input += sin_plus(origin_input)
285
-
286
- elif activation_list[i] == 'acos':
287
- Input += acos(origin_input, alpha=1.0, beta=0.0)
288
-
289
- elif activation_list[i] == 'gla':
290
- Input += gla(origin_input, alpha=1.0, mu=0.0)
291
-
292
- elif activation_list[i] == 'srelu':
293
- Input += srelu(origin_input)
294
-
295
- elif activation_list[i] == 'qelu':
296
- Input += qelu(origin_input)
297
221
 
298
- elif activation_list[i] == 'isra':
299
- Input += isra(origin_input)
300
222
 
301
- elif activation_list[i] == 'waveakt':
302
- Input += waveakt(origin_input)
303
-
304
- elif activation_list[i] == 'arctan':
305
- Input += arctan(origin_input)
306
-
307
- elif activation_list[i] == 'bent_identity':
308
- Input += bent_identity(origin_input)
309
-
310
- elif activation_list[i] == 'sech':
311
- Input += sech(origin_input)
312
-
313
- elif activation_list[i] == 'softsign':
314
- Input += softsign(origin_input)
315
-
316
- elif activation_list[i] == 'pwl':
317
- Input += pwl(origin_input)
318
-
319
- elif activation_list[i] == 'cubic':
320
- Input += cubic(origin_input)
321
-
322
- elif activation_list[i] == 'gaussian':
323
- Input += gaussian(origin_input)
324
-
325
- elif activation_list[i] == 'sine':
326
- Input += sine(origin_input)
327
-
328
- elif activation_list[i] == 'tanh_square':
329
- Input += tanh_square(origin_input)
330
-
331
- elif activation_list[i] == 'mod_sigmoid':
332
- Input += mod_sigmoid(origin_input)
333
-
334
- elif activation_list[i] == 'linear':
335
- Input += origin_input
336
-
337
- elif activation_list[i] == 'quartic':
338
- Input += quartic(origin_input)
339
-
340
- elif activation_list[i] == 'square_quartic':
341
- Input += square_quartic(origin_input)
342
-
343
- elif activation_list[i] == 'cubic_quadratic':
344
- Input += cubic_quadratic(origin_input)
345
-
346
- elif activation_list[i] == 'exp_cubic':
347
- Input += exp_cubic(origin_input)
348
-
349
- elif activation_list[i] == 'sine_square':
350
- Input += sine_square(origin_input)
351
-
352
- elif activation_list[i] == 'logarithmic':
353
- Input += logarithmic(origin_input)
354
-
355
- elif activation_list[i] == 'scaled_cubic':
356
- Input += scaled_cubic(origin_input, 1.0)
357
-
358
- elif activation_list[i] == 'sine_offset':
359
- Input += sine_offset(origin_input, 1.0)
360
-
361
- elif activation_list[i] == 'spiral':
362
- Input += spiral_activation(origin_input)
223
+ def safe_aggregate(current_sum, new_value):
224
+ try:
225
+ return current_sum + new_value
226
+ except OverflowError:
227
+ return np.array(current_sum) + np.array(new_value)
228
+
363
229
 
364
- elif activation_list[i] == 'circular':
365
- Input += circular_activation(origin_input)
230
+ def apply_activation(Input, activation_list):
231
+ """
232
+ Applies a sequence of activation functions to the input.
233
+
234
+ Args:
235
+ Input (numpy.ndarray): The input to apply activations to.
236
+ activation_list (list): A list of activation function names to apply.
237
+
238
+ Returns:
239
+ numpy.ndarray: The input after all activations have been applied.
240
+ """
241
+
242
+ origin_input = np.copy(Input)
243
+
244
+ for i in range(len(activation_list)):
245
+ try:
246
+ if activation_list[i] == 'sigmoid':
247
+ Input = safe_aggregate(Input, Sigmoid(origin_input))
248
+ elif activation_list[i] == 'swish':
249
+ Input = safe_aggregate(Input, swish(origin_input))
250
+ elif activation_list[i] == 'mod_circular':
251
+ Input = safe_aggregate(Input, modular_circular_activation(origin_input))
252
+ elif activation_list[i] == 'tanh_circular':
253
+ Input = safe_aggregate(Input, tanh_circular_activation(origin_input))
254
+ elif activation_list[i] == 'leaky_relu':
255
+ Input = safe_aggregate(Input, leaky_relu(origin_input))
256
+ elif activation_list[i] == 'relu':
257
+ Input = safe_aggregate(Input, Relu(origin_input))
258
+ elif activation_list[i] == 'softplus':
259
+ Input = safe_aggregate(Input, softplus(origin_input))
260
+ elif activation_list[i] == 'elu':
261
+ Input = safe_aggregate(Input, elu(origin_input))
262
+ elif activation_list[i] == 'gelu':
263
+ Input = safe_aggregate(Input, gelu(origin_input))
264
+ elif activation_list[i] == 'selu':
265
+ Input = safe_aggregate(Input, selu(origin_input))
266
+ elif activation_list[i] == 'tanh':
267
+ Input = safe_aggregate(Input, tanh(origin_input))
268
+ elif activation_list[i] == 'sinakt':
269
+ Input = safe_aggregate(Input, sinakt(origin_input))
270
+ elif activation_list[i] == 'p_squared':
271
+ Input = safe_aggregate(Input, p_squared(origin_input))
272
+ elif activation_list[i] == 'sglu':
273
+ Input = safe_aggregate(Input, sglu(origin_input, alpha=1.0))
274
+ elif activation_list[i] == 'dlrelu':
275
+ Input = safe_aggregate(Input, dlrelu(origin_input))
276
+ elif activation_list[i] == 'exsig':
277
+ Input = safe_aggregate(Input, exsig(origin_input))
278
+ elif activation_list[i] == 'sin_plus':
279
+ Input = safe_aggregate(Input, sin_plus(origin_input))
280
+ elif activation_list[i] == 'acos':
281
+ Input = safe_aggregate(Input, acos(origin_input, alpha=1.0, beta=0.0))
282
+ elif activation_list[i] == 'gla':
283
+ Input = safe_aggregate(Input, gla(origin_input, alpha=1.0, mu=0.0))
284
+ elif activation_list[i] == 'srelu':
285
+ Input = safe_aggregate(Input, srelu(origin_input))
286
+ elif activation_list[i] == 'qelu':
287
+ Input = safe_aggregate(Input, qelu(origin_input))
288
+ elif activation_list[i] == 'isra':
289
+ Input = safe_aggregate(Input, isra(origin_input))
290
+ elif activation_list[i] == 'waveakt':
291
+ Input = safe_aggregate(Input, waveakt(origin_input))
292
+ elif activation_list[i] == 'arctan':
293
+ Input = safe_aggregate(Input, arctan(origin_input))
294
+ elif activation_list[i] == 'bent_identity':
295
+ Input = safe_aggregate(Input, bent_identity(origin_input))
296
+ elif activation_list[i] == 'sech':
297
+ Input = safe_aggregate(Input, sech(origin_input))
298
+ elif activation_list[i] == 'softsign':
299
+ Input = safe_aggregate(Input, softsign(origin_input))
300
+ elif activation_list[i] == 'pwl':
301
+ Input = safe_aggregate(Input, pwl(origin_input))
302
+ elif activation_list[i] == 'cubic':
303
+ Input = safe_aggregate(Input, cubic(origin_input))
304
+ elif activation_list[i] == 'gaussian':
305
+ Input = safe_aggregate(Input, gaussian(origin_input))
306
+ elif activation_list[i] == 'sine':
307
+ Input = safe_aggregate(Input, sine(origin_input))
308
+ elif activation_list[i] == 'tanh_square':
309
+ Input = safe_aggregate(Input, tanh_square(origin_input))
310
+ elif activation_list[i] == 'mod_sigmoid':
311
+ Input = safe_aggregate(Input, mod_sigmoid(origin_input))
312
+ elif activation_list[i] == 'linear':
313
+ Input = safe_aggregate(Input, origin_input)
314
+ elif activation_list[i] == 'quartic':
315
+ Input = safe_aggregate(Input, quartic(origin_input))
316
+ elif activation_list[i] == 'square_quartic':
317
+ Input = safe_aggregate(Input, square_quartic(origin_input))
318
+ elif activation_list[i] == 'cubic_quadratic':
319
+ Input = safe_aggregate(Input, cubic_quadratic(origin_input))
320
+ elif activation_list[i] == 'exp_cubic':
321
+ Input = safe_aggregate(Input, exp_cubic(origin_input))
322
+ elif activation_list[i] == 'sine_square':
323
+ Input = safe_aggregate(Input, sine_square(origin_input))
324
+ elif activation_list[i] == 'logarithmic':
325
+ Input = safe_aggregate(Input, logarithmic(origin_input))
326
+ elif activation_list[i] == 'scaled_cubic':
327
+ Input = safe_aggregate(Input, scaled_cubic(origin_input, 1.0))
328
+ elif activation_list[i] == 'sine_offset':
329
+ Input = safe_aggregate(Input, sine_offset(origin_input, 1.0))
330
+ elif activation_list[i] == 'spiral':
331
+ Input = safe_aggregate(Input, spiral_activation(origin_input))
332
+ elif activation_list[i] == 'circular':
333
+ Input = safe_aggregate(Input, circular_activation(origin_input))
366
334
 
367
- return Input
335
+
336
+ except Exception as e:
337
+ warnings.warn(f"Error in activation {activation_list[i]}: {str(e)}", RuntimeWarning)
338
+ if not isinstance(Input, np.ndarray):
339
+ Input = np.array(Input)
340
+ if not isinstance(origin_input, np.ndarray):
341
+ origin_input = np.array(origin_input)
342
+ continue
343
+
344
+ return Input
@@ -1,4 +1,6 @@
1
1
  import cupy as cp
2
+ from scipy.special import expit, softmax
3
+ import warnings
2
4
 
3
5
  # ACTIVATION FUNCTIONS ----
4
6
 
@@ -9,355 +11,332 @@ def all_activations():
9
11
  return activations_list
10
12
 
11
13
  def spiral_activation(x):
12
- if x.ndim == 1:
13
- r = cp.sqrt(cp.sum(x**2))
14
- theta = cp.arctan2(x[1], x[0])
15
14
 
16
- spiral_x = r * cp.cos(theta + r)
17
- spiral_y = r * cp.sin(theta + r)
15
+ r = cp.sqrt(cp.sum(x**2))
16
+
17
+ theta = cp.arctan2(x[1:], x[:-1])
18
18
 
19
- spiral_output = cp.array([spiral_x, spiral_y])
20
- else:
21
- r = cp.sqrt(cp.sum(x**2, axis=-1))
22
- theta = cp.arctan2(x[:, 1], x[:, 0])
19
+ spiral_x = r * cp.cos(theta + r)
20
+ spiral_y = r * cp.sin(theta + r)
21
+
23
22
 
24
- spiral_x = r * cp.cos(theta + r)
25
- spiral_y = r * cp.sin(theta + r)
23
+ spiral_output = cp.concatenate(([spiral_x[0]], spiral_y))
24
+
25
+ return spiral_output
26
26
 
27
- spiral_output = cp.stack((spiral_x, spiral_y), axis=-1)
28
27
 
29
- return spiral_output
28
+ def Softmax(
29
+ x # num: Input data to be transformed using softmax function.
30
+ ):
31
+ """
32
+ Applies the softmax function to the input data.
30
33
 
34
+ Args:
35
+ (num): Input data to be transformed using softmax function.
36
+
37
+ Returns:
38
+ (num): Transformed data after applying softmax function.
39
+ """
31
40
 
32
- def Softmax(x):
33
- """Optimized Softmax function"""
34
- return cp.array(cp.exp(x - cp.max(x, axis=-1, keepdims=True)) / cp.sum(cp.exp(x - cp.max(x, axis=-1, keepdims=True)), axis=-1, keepdims=True))
41
+ return cp.array(softmax(x.get()))
35
42
 
36
- def Sigmoid(x):
37
- """Optimized Sigmoid function"""
38
- return 1 / (1 + cp.exp(-x))
39
43
 
40
- def Relu(x):
41
- """Optimized ReLU function"""
44
+ def Sigmoid(
45
+ x # num: Input data to be transformed using sigmoid function.
46
+ ):
47
+ """
48
+ Applies the sigmoid function to the input data.
49
+
50
+ Args:
51
+ (num): Input data to be transformed using sigmoid function.
52
+
53
+ Returns:
54
+ (num): Transformed data after applying sigmoid function.
55
+ """
56
+ return expit(x)
57
+
58
+
59
+ def Relu(
60
+ x # num: Input data to be transformed using ReLU function.
61
+ ):
62
+ """
63
+ Applies the Rectified Linear Unit (ReLU) function to the input data.
64
+
65
+ Args:
66
+ (num): Input data to be transformed using ReLU function.
67
+
68
+ Returns:
69
+ (num): Transformed data after applying ReLU function.
70
+ """
71
+
42
72
  return cp.maximum(0, x)
43
73
 
74
+
44
75
  def tanh(x):
45
- """Optimized Tanh function"""
46
76
  return cp.tanh(x)
47
77
 
48
78
  def swish(x):
49
- """Optimized Swish function"""
50
- return x * Sigmoid(x)
79
+ return x * (1 / (1 + cp.exp(-x)))
51
80
 
52
81
  def sin_plus(x):
53
- """Optimized SinPlus function"""
54
82
  return (cp.sin(x) + 1) / 2
55
83
 
56
84
  def modular_circular_activation(x, period=2*cp.pi):
57
- """Optimized Modular Circular Activation function"""
58
85
  return cp.mod(x, period) / period
59
86
 
60
87
  def tanh_circular_activation(x):
61
- """Optimized Tanh Circular Activation function"""
62
88
  return (cp.tanh(x) + 1) / 2
63
89
 
64
90
  def leaky_relu(x, alpha=0.01):
65
- """Optimized Leaky ReLU function"""
66
91
  return cp.where(x > 0, x, alpha * x)
67
92
 
68
93
  def softplus(x):
69
- """Optimized Softplus function"""
70
- return cp.log1p(cp.exp(x))
94
+ return cp.log(1 + cp.exp(x))
71
95
 
72
96
  def elu(x, alpha=1.0):
73
- """Optimized ELU function"""
74
97
  return cp.where(x > 0, x, alpha * (cp.exp(x) - 1))
75
98
 
76
99
  def gelu(x):
77
- """Optimized GELU function"""
78
100
  return 0.5 * x * (1 + cp.tanh(cp.sqrt(2 / cp.pi) * (x + 0.044715 * cp.power(x, 3))))
79
101
 
80
102
  def selu(x, lambda_=1.0507, alpha=1.6733):
81
- """Optimized SELU function"""
82
103
  return lambda_ * cp.where(x > 0, x, alpha * (cp.exp(x) - 1))
83
104
 
84
105
  def sinakt(x):
85
- """Optimized SinAkt function"""
86
106
  return cp.sin(x) + cp.cos(x)
87
107
 
88
108
  def p_squared(x, alpha=1.0, beta=0.0):
89
- """Optimized P-squared function"""
90
109
  return alpha * x**2 + beta * x
91
110
 
92
111
  def sglu(x, alpha=1.0):
93
- """Optimized SGU function"""
94
- return cp.array(cp.exp(alpha * x)) * x
112
+ return cp.array(softmax(alpha * x.get())) * x
95
113
 
114
+ # 4. Double Leaky ReLU (DLReLU)
96
115
  def dlrelu(x):
97
- """Optimized Double Leaky ReLU (DLReLU) function"""
98
116
  return cp.maximum(0.01 * x, x) + cp.minimum(0.01 * x, 0.1 * x)
99
117
 
118
+ # 5. Exponential Sigmoid (ExSig)
100
119
  def exsig(x):
101
- """Optimized Exponential Sigmoid (ExSig) function"""
102
120
  return 1 / (1 + cp.exp(-x**2))
103
121
 
122
+ # 6. Adaptive Cosine Activation (ACos)
104
123
  def acos(x, alpha=1.0, beta=0.0):
105
- """Optimized Adaptive Cosine Activation (ACos) function"""
106
124
  return cp.cos(alpha * x + beta)
107
125
 
126
+ # 7. Gaussian-like Activation (GLA)
108
127
  def gla(x, alpha=1.0, mu=0.0):
109
- """Optimized Gaussian-like Activation (GLA) function"""
110
128
  return cp.exp(-alpha * (x - mu)**2)
111
129
 
130
+ # 8. Swish ReLU (SReLU)
112
131
  def srelu(x):
113
- """Optimized Swish ReLU (SReLU) function"""
114
132
  return x * (1 / (1 + cp.exp(-x))) + cp.maximum(0, x)
115
133
 
134
+ # 9. Quadratic Exponential Linear Unit (QELU)
116
135
  def qelu(x):
117
- """Optimized Quadratic Exponential Linear Unit (QELU) function"""
118
136
  return x**2 * cp.exp(x) - 1
119
137
 
138
+ # 10. Inverse Square Root Activation (ISRA)
120
139
  def isra(x):
121
- """Optimized Inverse Square Root Activation (ISRA) function"""
122
140
  return x / cp.sqrt(cp.abs(x) + 1)
123
141
 
124
142
  def waveakt(x, alpha=1.0, beta=2.0, gamma=3.0):
125
- """Optimized Wave Activation function"""
126
143
  return cp.sin(alpha * x) * cp.cos(beta * x) * cp.sin(gamma * x)
127
144
 
128
145
  def arctan(x):
129
- """Optimized Arctan function"""
130
146
  return cp.arctan(x)
131
147
 
132
148
  def bent_identity(x):
133
- """Optimized Bent Identity function"""
134
149
  return (cp.sqrt(x**2 + 1) - 1) / 2 + x
135
150
 
136
- def circular_activation(x, scale=2.0, frequency=1.0, shift=0.0):
137
- """Optimized Circular Activation function"""
151
+ def circular_activation(x, scale=2.0, frequency=1.0, shift=0.0):
152
+
138
153
  n_features = x.shape[0]
154
+
139
155
  circular_output = cp.zeros_like(x)
140
156
 
141
- r = cp.sqrt(cp.sum(x**2))
142
157
  for i in range(n_features):
158
+
159
+ r = cp.sqrt(cp.sum(x**2))
143
160
  theta = 2 * cp.pi * (i / n_features) + shift
161
+
144
162
  circular_x = r * cp.cos(theta + frequency * r) * scale
145
163
  circular_y = r * cp.sin(theta + frequency * r) * scale
146
164
 
147
- circular_output[i] = circular_x if i % 2 == 0 else circular_y
165
+ if i % 2 == 0:
166
+ circular_output[i] = circular_x
167
+ else:
168
+ circular_output[i] = circular_y
148
169
 
149
170
  return circular_output
150
171
 
151
172
  def sech(x):
152
- """Optimized Sech function"""
153
173
  return 2 / (cp.exp(x) + cp.exp(-x))
154
174
 
155
175
  def softsign(x):
156
- """Optimized Softsign function"""
157
176
  return x / (1 + cp.abs(x))
158
177
 
159
178
  def pwl(x, alpha=0.5, beta=1.5):
160
- """Optimized Piecewise Linear function (PWL)"""
161
179
  return cp.where(x <= 0, alpha * x, beta * x)
162
180
 
163
181
  def cubic(x):
164
- """Optimized Cubic function"""
165
182
  return x**3
166
183
 
167
184
  def gaussian(x, alpha=1.0, mu=0.0):
168
- """Optimized Gaussian function"""
169
185
  return cp.exp(-alpha * (x - mu)**2)
170
-
186
+
171
187
  def sine(x, alpha=1.0):
172
- """Optimized Sine function"""
173
188
  return cp.sin(alpha * x)
174
189
 
175
190
  def tanh_square(x):
176
- """Optimized Tanh Square function"""
177
191
  return cp.tanh(x)**2
178
192
 
179
193
  def mod_sigmoid(x, alpha=1.0, beta=0.0):
180
- """Optimized Modified Sigmoid function"""
181
194
  return 1 / (1 + cp.exp(-alpha * x + beta))
182
195
 
183
196
  def quartic(x):
184
- """Optimized Quartic function"""
185
197
  return x**4
186
198
 
187
199
  def square_quartic(x):
188
- """Optimized Square Quartic function"""
189
200
  return (x**2)**2
190
201
 
191
202
  def cubic_quadratic(x):
192
- """Optimized Cubic Quadratic function"""
193
203
  return x**3 * (x**2)
194
204
 
195
205
  def exp_cubic(x):
196
- """Optimized Exponential Cubic function"""
197
206
  return cp.exp(x**3)
198
207
 
199
208
  def sine_square(x):
200
- """Optimized Sine Square function"""
201
209
  return cp.sin(x)**2
202
210
 
203
211
  def logarithmic(x):
204
- """Optimized Logarithmic function"""
205
212
  return cp.log(x**2 + 1)
206
213
 
207
214
  def scaled_cubic(x, alpha=1.0):
208
- """Optimized Scaled Cubic function"""
209
215
  return alpha * x**3
210
216
 
211
217
  def sine_offset(x, beta=0.0):
212
- """Optimized Sine Offset function"""
213
218
  return cp.sin(x + beta)
214
219
 
215
- def apply_activation(Input, activation_list):
216
- """
217
- Applies a sequence of activation functions to the input.
218
-
219
- Args:
220
- Input (numpy.ndarray): The input to apply activations to.
221
- activation_list (list): A list of activation function names to apply.
222
-
223
- Returns:
224
- numpy.ndarray: The input after all activations have been applied.
225
- """
226
-
227
- origin_input = cp.copy(Input)
228
-
229
- for i in range(len(activation_list)):
230
-
231
- if activation_list[i] == 'sigmoid':
232
- Input += Sigmoid(origin_input)
233
-
234
- elif activation_list[i] == 'swish':
235
- Input += swish(origin_input)
236
-
237
- elif activation_list[i] == 'mod_circular':
238
- Input += modular_circular_activation(origin_input)
239
-
240
- elif activation_list[i] == 'tanh_circular':
241
- Input += tanh_circular_activation(origin_input)
242
-
243
- elif activation_list[i] == 'leaky_relu':
244
- Input += leaky_relu(origin_input)
245
-
246
- elif activation_list[i] == 'relu':
247
- Input += Relu(origin_input)
248
-
249
- elif activation_list[i] == 'softplus':
250
- Input += softplus(origin_input)
251
220
 
252
- elif activation_list[i] == 'elu':
253
- Input += elu(origin_input)
254
221
 
255
- elif activation_list[i] == 'gelu':
256
- Input += gelu(origin_input)
257
-
258
- elif activation_list[i] == 'selu':
259
- Input += selu(origin_input)
260
-
261
- elif activation_list[i] == 'tanh':
262
- Input += tanh(origin_input)
263
-
264
- elif activation_list[i] == 'sinakt':
265
- Input += sinakt(origin_input)
266
-
267
- elif activation_list[i] == 'p_squared':
268
- Input += p_squared(origin_input)
269
-
270
- elif activation_list[i] == 'sglu':
271
- Input += sglu(origin_input, alpha=1.0)
272
-
273
- elif activation_list[i] == 'dlrelu':
274
- Input += dlrelu(origin_input)
275
-
276
- elif activation_list[i] == 'exsig':
277
- Input += exsig(origin_input)
278
-
279
- elif activation_list[i] == 'sin_plus':
280
- Input += sin_plus(origin_input)
281
-
282
- elif activation_list[i] == 'acos':
283
- Input += acos(origin_input, alpha=1.0, beta=0.0)
284
-
285
- elif activation_list[i] == 'gla':
286
- Input += gla(origin_input, alpha=1.0, mu=0.0)
287
-
288
- elif activation_list[i] == 'srelu':
289
- Input += srelu(origin_input)
290
-
291
- elif activation_list[i] == 'qelu':
292
- Input += qelu(origin_input)
293
-
294
- elif activation_list[i] == 'isra':
295
- Input += isra(origin_input)
296
-
297
- elif activation_list[i] == 'waveakt':
298
- Input += waveakt(origin_input)
299
-
300
- elif activation_list[i] == 'arctan':
301
- Input += arctan(origin_input)
302
-
303
- elif activation_list[i] == 'bent_identity':
304
- Input += bent_identity(origin_input)
305
-
306
- elif activation_list[i] == 'sech':
307
- Input += sech(origin_input)
308
-
309
- elif activation_list[i] == 'softsign':
310
- Input += softsign(origin_input)
311
-
312
- elif activation_list[i] == 'pwl':
313
- Input += pwl(origin_input)
314
-
315
- elif activation_list[i] == 'cubic':
316
- Input += cubic(origin_input)
317
-
318
- elif activation_list[i] == 'gaussian':
319
- Input += gaussian(origin_input)
320
-
321
- elif activation_list[i] == 'sine':
322
- Input += sine(origin_input)
323
-
324
- elif activation_list[i] == 'tanh_square':
325
- Input += tanh_square(origin_input)
326
-
327
- elif activation_list[i] == 'mod_sigmoid':
328
- Input += mod_sigmoid(origin_input)
329
-
330
- elif activation_list[i] == 'linear':
331
- Input += origin_input
332
-
333
- elif activation_list[i] == 'quartic':
334
- Input += quartic(origin_input)
335
-
336
- elif activation_list[i] == 'square_quartic':
337
- Input += square_quartic(origin_input)
338
-
339
- elif activation_list[i] == 'cubic_quadratic':
340
- Input += cubic_quadratic(origin_input)
341
-
342
- elif activation_list[i] == 'exp_cubic':
343
- Input += exp_cubic(origin_input)
344
-
345
- elif activation_list[i] == 'sine_square':
346
- Input += sine_square(origin_input)
347
-
348
- elif activation_list[i] == 'logarithmic':
349
- Input += logarithmic(origin_input)
350
-
351
- elif activation_list[i] == 'scaled_cubic':
352
- Input += scaled_cubic(origin_input, 1.0)
353
-
354
- elif activation_list[i] == 'sine_offset':
355
- Input += sine_offset(origin_input, 1.0)
356
-
357
- elif activation_list[i] == 'spiral':
358
- Input += spiral_activation(origin_input)
359
-
360
- elif activation_list[i] == 'circular':
361
- Input += circular_activation(origin_input)
222
+ def safe_aggregate(current_sum, new_value):
223
+ try:
224
+ return current_sum + new_value
225
+ except OverflowError:
226
+ return cp.array(current_sum) + cp.array(new_value)
227
+
362
228
 
363
- return Input
229
+ def apply_activation(Input, activation_list):
230
+ """
231
+ Applies a sequence of activation functions to the input.
232
+
233
+ Args:
234
+ Input (numpy.ndarray): The input to apply activations to.
235
+ activation_list (list): A list of activation function names to apply.
236
+
237
+ Returns:
238
+ numpy.ndarray: The input after all activations have been applied.
239
+ """
240
+
241
+ origin_input = cp.copy(Input)
242
+
243
+ for i in range(len(activation_list)):
244
+ try:
245
+ if activation_list[i] == 'sigmoid':
246
+ Input = safe_aggregate(Input, Sigmoid(origin_input))
247
+ elif activation_list[i] == 'swish':
248
+ Input = safe_aggregate(Input, swish(origin_input))
249
+ elif activation_list[i] == 'mod_circular':
250
+ Input = safe_aggregate(Input, modular_circular_activation(origin_input))
251
+ elif activation_list[i] == 'tanh_circular':
252
+ Input = safe_aggregate(Input, tanh_circular_activation(origin_input))
253
+ elif activation_list[i] == 'leaky_relu':
254
+ Input = safe_aggregate(Input, leaky_relu(origin_input))
255
+ elif activation_list[i] == 'relu':
256
+ Input = safe_aggregate(Input, Relu(origin_input))
257
+ elif activation_list[i] == 'softplus':
258
+ Input = safe_aggregate(Input, softplus(origin_input))
259
+ elif activation_list[i] == 'elu':
260
+ Input = safe_aggregate(Input, elu(origin_input))
261
+ elif activation_list[i] == 'gelu':
262
+ Input = safe_aggregate(Input, gelu(origin_input))
263
+ elif activation_list[i] == 'selu':
264
+ Input = safe_aggregate(Input, selu(origin_input))
265
+ elif activation_list[i] == 'tanh':
266
+ Input = safe_aggregate(Input, tanh(origin_input))
267
+ elif activation_list[i] == 'sinakt':
268
+ Input = safe_aggregate(Input, sinakt(origin_input))
269
+ elif activation_list[i] == 'p_squared':
270
+ Input = safe_aggregate(Input, p_squared(origin_input))
271
+ elif activation_list[i] == 'sglu':
272
+ Input = safe_aggregate(Input, sglu(origin_input, alpha=1.0))
273
+ elif activation_list[i] == 'dlrelu':
274
+ Input = safe_aggregate(Input, dlrelu(origin_input))
275
+ elif activation_list[i] == 'exsig':
276
+ Input = safe_aggregate(Input, exsig(origin_input))
277
+ elif activation_list[i] == 'sin_plus':
278
+ Input = safe_aggregate(Input, sin_plus(origin_input))
279
+ elif activation_list[i] == 'acos':
280
+ Input = safe_aggregate(Input, acos(origin_input, alpha=1.0, beta=0.0))
281
+ elif activation_list[i] == 'gla':
282
+ Input = safe_aggregate(Input, gla(origin_input, alpha=1.0, mu=0.0))
283
+ elif activation_list[i] == 'srelu':
284
+ Input = safe_aggregate(Input, srelu(origin_input))
285
+ elif activation_list[i] == 'qelu':
286
+ Input = safe_aggregate(Input, qelu(origin_input))
287
+ elif activation_list[i] == 'isra':
288
+ Input = safe_aggregate(Input, isra(origin_input))
289
+ elif activation_list[i] == 'waveakt':
290
+ Input = safe_aggregate(Input, waveakt(origin_input))
291
+ elif activation_list[i] == 'arctan':
292
+ Input = safe_aggregate(Input, arctan(origin_input))
293
+ elif activation_list[i] == 'bent_identity':
294
+ Input = safe_aggregate(Input, bent_identity(origin_input))
295
+ elif activation_list[i] == 'sech':
296
+ Input = safe_aggregate(Input, sech(origin_input))
297
+ elif activation_list[i] == 'softsign':
298
+ Input = safe_aggregate(Input, softsign(origin_input))
299
+ elif activation_list[i] == 'pwl':
300
+ Input = safe_aggregate(Input, pwl(origin_input))
301
+ elif activation_list[i] == 'cubic':
302
+ Input = safe_aggregate(Input, cubic(origin_input))
303
+ elif activation_list[i] == 'gaussian':
304
+ Input = safe_aggregate(Input, gaussian(origin_input))
305
+ elif activation_list[i] == 'sine':
306
+ Input = safe_aggregate(Input, sine(origin_input))
307
+ elif activation_list[i] == 'tanh_square':
308
+ Input = safe_aggregate(Input, tanh_square(origin_input))
309
+ elif activation_list[i] == 'mod_sigmoid':
310
+ Input = safe_aggregate(Input, mod_sigmoid(origin_input))
311
+ elif activation_list[i] == 'linear':
312
+ Input = safe_aggregate(Input, origin_input)
313
+ elif activation_list[i] == 'quartic':
314
+ Input = safe_aggregate(Input, quartic(origin_input))
315
+ elif activation_list[i] == 'square_quartic':
316
+ Input = safe_aggregate(Input, square_quartic(origin_input))
317
+ elif activation_list[i] == 'cubic_quadratic':
318
+ Input = safe_aggregate(Input, cubic_quadratic(origin_input))
319
+ elif activation_list[i] == 'exp_cubic':
320
+ Input = safe_aggregate(Input, exp_cubic(origin_input))
321
+ elif activation_list[i] == 'sine_square':
322
+ Input = safe_aggregate(Input, sine_square(origin_input))
323
+ elif activation_list[i] == 'logarithmic':
324
+ Input = safe_aggregate(Input, logarithmic(origin_input))
325
+ elif activation_list[i] == 'scaled_cubic':
326
+ Input = safe_aggregate(Input, scaled_cubic(origin_input, 1.0))
327
+ elif activation_list[i] == 'sine_offset':
328
+ Input = safe_aggregate(Input, sine_offset(origin_input, 1.0))
329
+ elif activation_list[i] == 'spiral':
330
+ Input = safe_aggregate(Input, spiral_activation(origin_input))
331
+ elif activation_list[i] == 'circular':
332
+ Input = safe_aggregate(Input, circular_activation(origin_input))
333
+
334
+ except Exception as e:
335
+ warnings.warn(f"Error in activation {activation_list[i]}: {str(e)}", RuntimeWarning)
336
+ if not isinstance(Input, cp.ndarray):
337
+ Input = cp.array(Input)
338
+ if not isinstance(origin_input, cp.ndarray):
339
+ origin_input = cp.array(origin_input)
340
+ continue
341
+
342
+ return Input
@@ -421,6 +421,9 @@ def find_closest_factors(a):
421
421
 
422
422
  def batcher(x_test, y_test, batch_size=1):
423
423
 
424
+ if batch_size == 1:
425
+ return x_test, y_test
426
+
424
427
  y_labels = np.argmax(y_test, axis=1)
425
428
 
426
429
  sampled_x, sampled_y = [], []
@@ -430,11 +430,14 @@ def find_closest_factors(a):
430
430
  j = a // i
431
431
  return i, j
432
432
 
433
-
434
433
  def batcher(x_test, y_test, batch_size=1):
435
- y_labels = cp.argmax(y_test, axis=1) # Sınıf etiketlerini belirle
434
+
435
+ if batch_size == 1:
436
+ return x_test, y_test
437
+
438
+ y_labels = cp.argmax(y_test, axis=1)
436
439
 
437
- unique_labels = cp.unique(y_labels) # Tüm sınıfları bul
440
+ unique_labels = cp.unique(y_labels)
438
441
  total_samples = sum(
439
442
  int(cp.sum(y_labels == class_label) * batch_size) for class_label in unique_labels
440
443
  )
@@ -444,20 +447,15 @@ def batcher(x_test, y_test, batch_size=1):
444
447
 
445
448
  offset = 0
446
449
  for class_label in unique_labels:
447
- # Sınıfa ait indeksleri bulun
448
450
  class_indices = cp.where(y_labels == class_label)[0]
449
451
 
450
- # Örnek sayısını belirle
451
452
  num_samples = int(len(class_indices) * batch_size)
452
453
 
453
- # Rastgele örnek seç
454
454
  sampled_indices = cp.random.choice(class_indices, num_samples, replace=False)
455
455
 
456
- # Veriyi sampled dizilerine yaz
457
456
  sampled_x[offset:offset + num_samples] = x_test[sampled_indices]
458
457
  sampled_y[offset:offset + num_samples] = y_test[sampled_indices]
459
-
460
- # Kaydırmayı güncelle
458
+
461
459
  offset += num_samples
462
460
 
463
461
  return sampled_x, sampled_y
pyerualjetwork/plan.py CHANGED
@@ -125,7 +125,15 @@ def fit(
125
125
 
126
126
  elif val and (x_val is not None and y_val is not None):
127
127
  x_val = x_val.astype(dtype, copy=False)
128
- y_val = y_val.astype(dtype, copy=False)
128
+ if len(y_val[0]) < 256:
129
+ if y_val.dtype != np.uint8:
130
+ y_val = np.array(y_val, copy=False).astype(np.uint8, copy=False)
131
+ elif len(y_val[0]) <= 32767:
132
+ if y_val.dtype != np.uint16:
133
+ y_val = np.array(y_val, copy=False).astype(np.uint16, copy=False)
134
+ else:
135
+ if y_val.dtype != np.uint32:
136
+ y_val = np.array(y_val, copy=False).astype(np.uint32, copy=False)
129
137
 
130
138
  val_list = [] if val else None
131
139
  val_count = val_count or 10
@@ -137,7 +145,7 @@ def fit(
137
145
 
138
146
  # Training process
139
147
  for index, inp in enumerate(x_train):
140
- inp = np.array(inp, copy=False).ravel()
148
+ inp = np.array(inp, copy=False, dtype=dtype).ravel()
141
149
  y_decoded = decode_one_hot(y_train)
142
150
  # Weight updates
143
151
  STPW = feed_forward(inp, STPW, is_training=True, Class=y_decoded[index], activation_potentiation=activation_potentiation, LTD=LTD)
@@ -244,15 +252,15 @@ def learner(x_train, y_train, x_test=None, y_test=None, strategy='accuracy', bat
244
252
  if x_test is not None:
245
253
  x_test = x_test.astype(dtype, copy=False)
246
254
 
247
- if len(y_test[0]) < 256:
248
- if y_test.dtype != np.uint8:
249
- y_test = np.array(y_test, copy=False).astype(np.uint8, copy=False)
250
- elif len(y_test[0]) <= 32767:
251
- if y_test.dtype != np.uint16:
252
- y_test = np.array(y_test, copy=False).astype(np.uint16, copy=False)
253
- else:
254
- if y_test.dtype != np.uint32:
255
- y_test = np.array(y_test, copy=False).astype(np.uint32, copy=False)
255
+ if len(y_test[0]) < 256:
256
+ if y_test.dtype != np.uint8:
257
+ y_test = np.array(y_test, copy=False).astype(np.uint8, copy=False)
258
+ elif len(y_test[0]) <= 32767:
259
+ if y_test.dtype != np.uint16:
260
+ y_test = np.array(y_test, copy=False).astype(np.uint16, copy=False)
261
+ else:
262
+ if y_test.dtype != np.uint32:
263
+ y_test = np.array(y_test, copy=False).astype(np.uint32, copy=False)
256
264
 
257
265
  if x_test is None and y_test is None:
258
266
  x_test = x_train
@@ -304,8 +312,9 @@ def learner(x_train, y_train, x_test=None, y_test=None, strategy='accuracy', bat
304
312
  else:
305
313
  best_activations = start_this
306
314
  x_test_batch, y_test_batch = batcher(x_test, y_test, batch_size=batch_size)
307
- W = fit(x_train, y_train, activation_potentiation=best_activations, train_bar=False, auto_normalization=auto_normalization)
308
- model = evaluate(x_test_batch, y_test_batch, W=W, loading_bar_status=False, activation_potentiation=activations)
315
+
316
+ W = fit(x_train, y_train, activation_potentiation=best_activations, train_bar=False, auto_normalization=auto_normalization, dtype=dtype)
317
+ model = evaluate(x_test_batch, y_test_batch, W=W, loading_bar_status=False, activation_potentiation=activations, dtype=dtype)
309
318
 
310
319
  if loss == 'categorical_crossentropy':
311
320
  test_loss = categorical_crossentropy(y_true_batch=y_test_batch, y_pred_batch=model[get_preds_softmax()])
@@ -334,6 +343,7 @@ def learner(x_train, y_train, x_test=None, y_test=None, strategy='accuracy', bat
334
343
  activations.append(activation_potentiation[j])
335
344
 
336
345
  x_test_batch, y_test_batch = batcher(x_test, y_test, batch_size=batch_size)
346
+
337
347
  W = fit(x_train, y_train, activation_potentiation=activations, train_bar=False, auto_normalization=auto_normalization, dtype=dtype)
338
348
  model = evaluate(x_test_batch, y_test_batch, W=W, loading_bar_status=False, activation_potentiation=activations, dtype=dtype)
339
349
 
@@ -435,7 +445,7 @@ def learner(x_train, y_train, x_test=None, y_test=None, strategy='accuracy', bat
435
445
  if target_acc is not None and best_acc >= target_acc:
436
446
  progress.close()
437
447
  train_model = evaluate(x_train, y_train, W=best_weights, loading_bar_status=False,
438
- activation_potentiation=final_activations)
448
+ activation_potentiation=final_activations, dtype=dtype)
439
449
 
440
450
  if loss == 'categorical_crossentropy':
441
451
  train_loss = categorical_crossentropy(y_true_batch=y_train,
@@ -460,7 +470,7 @@ def learner(x_train, y_train, x_test=None, y_test=None, strategy='accuracy', bat
460
470
  if target_loss is not None and best_loss <= target_loss:
461
471
  progress.close()
462
472
  train_model = evaluate(x_train, y_train, W=best_weights, loading_bar_status=False,
463
- activation_potentiation=final_activations)
473
+ activation_potentiation=final_activations, dtype=dtype)
464
474
 
465
475
  if loss == 'categorical_crossentropy':
466
476
  train_loss = categorical_crossentropy(y_true_batch=y_train,
@@ -503,7 +513,7 @@ def learner(x_train, y_train, x_test=None, y_test=None, strategy='accuracy', bat
503
513
  if best_acc_per_depth_list[i] == best_acc_per_depth_list[i-1]:
504
514
  progress.close()
505
515
  train_model = evaluate(x_train, y_train, W=best_weights, loading_bar_status=False,
506
- activation_potentiation=final_activations)
516
+ activation_potentiation=final_activations, dtype=dtype)
507
517
 
508
518
  if loss == 'categorical_crossentropy':
509
519
  train_loss = categorical_crossentropy(y_true_batch=y_train,
@@ -527,7 +537,7 @@ def learner(x_train, y_train, x_test=None, y_test=None, strategy='accuracy', bat
527
537
  # Final evaluation
528
538
  progress.close()
529
539
  train_model = evaluate(x_train, y_train, W=best_weights, loading_bar_status=False,
530
- activation_potentiation=final_activations)
540
+ activation_potentiation=final_activations, dtype=dtype)
531
541
 
532
542
  if loss == 'categorical_crossentropy':
533
543
  train_loss = categorical_crossentropy(y_true_batch=y_train, y_pred_batch=train_model[get_preds_softmax()])
@@ -428,7 +428,7 @@ def learner(x_train, y_train, x_test=None, y_test=None, strategy='accuracy', bat
428
428
  if target_acc is not None and best_acc >= target_acc:
429
429
  progress.close()
430
430
  train_model = evaluate(x_train, y_train, W=best_weights, loading_bar_status=False,
431
- activation_potentiation=final_activations)
431
+ activation_potentiation=final_activations, dtype=dtype)
432
432
 
433
433
  if loss == 'categorical_crossentropy':
434
434
  train_loss = categorical_crossentropy(y_true_batch=y_train,
@@ -453,7 +453,7 @@ def learner(x_train, y_train, x_test=None, y_test=None, strategy='accuracy', bat
453
453
  if target_loss is not None and best_loss <= target_loss:
454
454
  progress.close()
455
455
  train_model = evaluate(x_train, y_train, W=best_weights, loading_bar_status=False,
456
- activation_potentiation=final_activations)
456
+ activation_potentiation=final_activations, dtype=dtype)
457
457
 
458
458
  if loss == 'categorical_crossentropy':
459
459
  train_loss = categorical_crossentropy(y_true_batch=y_train,
@@ -496,7 +496,7 @@ def learner(x_train, y_train, x_test=None, y_test=None, strategy='accuracy', bat
496
496
  if best_acc_per_depth_list[i] == best_acc_per_depth_list[i-1]:
497
497
  progress.close()
498
498
  train_model = evaluate(x_train, y_train, W=best_weights, loading_bar_status=False,
499
- activation_potentiation=final_activations)
499
+ activation_potentiation=final_activations, dtype=dtype)
500
500
 
501
501
  if loss == 'categorical_crossentropy':
502
502
  train_loss = categorical_crossentropy(y_true_batch=y_train,
@@ -520,7 +520,7 @@ def learner(x_train, y_train, x_test=None, y_test=None, strategy='accuracy', bat
520
520
  # Final evaluation
521
521
  progress.close()
522
522
  train_model = evaluate(x_train, y_train, W=best_weights, loading_bar_status=False,
523
- activation_potentiation=final_activations)
523
+ activation_potentiation=final_activations, dtype=dtype)
524
524
 
525
525
  if loss == 'categorical_crossentropy':
526
526
  train_loss = categorical_crossentropy(y_true_batch=y_train, y_pred_batch=train_model[get_preds_softmax()])
@@ -609,9 +609,9 @@ def evaluate(
609
609
 
610
610
  loading_bar_status (bool): Loading bar (optional). Default = True.
611
611
 
612
- show_metrics (bool): Metrikleri gösterme seçeneği (isteğe bağlı). Default = False.
612
+ show_metrics (bool): Visualize metrics ? (optional). Default = False.
613
613
 
614
- dtype (cupy.dtype): Data type for the arrays. np.float32 by default. Example: cp.float64 or cp.float16. [fp32 for balanced devices, fp64 for strong devices, fp16 for weak devices: not reccomended!] (optional)
614
+ dtype (cupy.dtype): Data type for the arrays. cp.float32 by default. Example: cp.float64 or cp.float16. [fp32 for balanced devices, fp64 for strong devices, fp16 for weak devices: not reccomended!] (optional)
615
615
 
616
616
  Returns:
617
617
  tuple: Model (list).
@@ -647,10 +647,8 @@ def evaluate(
647
647
  Input = x_test[inpIndex].ravel()
648
648
  neural_layer = Input
649
649
 
650
- # Feedforward işlemi
651
650
  neural_layer = feed_forward(neural_layer, cp.copy(W), is_training=False, Class='?', activation_potentiation=activation_potentiation)
652
651
 
653
- # Olasılıkları ve tahminleri hesapla
654
652
  predict_probabilitys[inpIndex] = Softmax(neural_layer)
655
653
 
656
654
  RealOutput = cp.argmax(y_test[inpIndex])
@@ -674,4 +672,4 @@ def evaluate(
674
672
  if show_metrics:
675
673
  plot_evaluate(x_test, y_test, predict_classes, acc_list, W=cp.copy(W), activation_potentiation=activation_potentiation)
676
674
 
677
- return W, predict_classes, acc_list[-1], None, None, predict_probabilitys
675
+ return W, predict_classes, acc_list[-1], None, None, predict_probabilitys
@@ -1,13 +1,13 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: pyerualjetwork
3
- Version: 4.0.5
3
+ Version: 4.0.7
4
4
  Summary: PyerualJetwork is a machine learning library written in Python for professionals, incorporating advanced, unique, new, and modern techniques.
5
5
  Author: Hasan Can Beydili
6
6
  Author-email: tchasancan@gmail.com
7
7
  Keywords: model evaluation,classification,potentiation learning artificial neural networks,NEAT,genetic algorithms,reinforcement learning,neural networks
8
8
  Description-Content-Type: text/markdown
9
9
 
10
- # PyerualJetwork [![Socket Badge](https://socket.dev/api/badge/pypi/package/pyerualjetwork/4.0.5?artifact_id=tar-gz)](https://socket.dev/pypi/package/pyerualjetwork/overview/4.0.5/tar-gz) [![CodeFactor](https://www.codefactor.io/repository/github/hcb06/pyerualjetwork/badge)](https://www.codefactor.io/repository/github/hcb06/pyerualjetwork) [![PyPI Downloads](https://static.pepy.tech/badge/anaplan)](https://pepy.tech/projects/anaplan) + [![PyPI Downloads](https://static.pepy.tech/badge/pyerualjetwork)](https://pepy.tech/projects/pyerualjetwork) [![PyPI Downloads](https://static.pepy.tech/badge/pyerualjetwork/month)](https://pepy.tech/projects/pyerualjetwork) [![PyPI Downloads](https://static.pepy.tech/badge/pyerualjetwork/week)](https://pepy.tech/projects/pyerualjetwork) [![PyPI version](https://img.shields.io/pypi/v/pyerualjetwork.svg)](https://pypi.org/project/pyerualjetwork/)
10
+ # PyerualJetwork [![Socket Badge](https://socket.dev/api/badge/pypi/package/pyerualjetwork/4.0.6?artifact_id=tar-gz)](https://socket.dev/pypi/package/pyerualjetwork/overview/4.0.6/tar-gz) [![CodeFactor](https://www.codefactor.io/repository/github/hcb06/pyerualjetwork/badge)](https://www.codefactor.io/repository/github/hcb06/pyerualjetwork) [![PyPI Downloads](https://static.pepy.tech/badge/anaplan)](https://pepy.tech/projects/anaplan) + [![PyPI Downloads](https://static.pepy.tech/badge/pyerualjetwork)](https://pepy.tech/projects/pyerualjetwork) [![PyPI Downloads](https://static.pepy.tech/badge/pyerualjetwork/month)](https://pepy.tech/projects/pyerualjetwork) [![PyPI Downloads](https://static.pepy.tech/badge/pyerualjetwork/week)](https://pepy.tech/projects/pyerualjetwork) [![PyPI version](https://img.shields.io/pypi/v/pyerualjetwork.svg)](https://pypi.org/project/pyerualjetwork/)
11
11
 
12
12
  Note: anaplan old name of pyerualjetwork
13
13
 
@@ -1,8 +1,8 @@
1
- pyerualjetwork/__init__.py,sha256=mBpIQOG8jn_kVBG6FrXWWqZ39nSivP_ibUikmVmT2Ik,2542
2
- pyerualjetwork/activation_functions.py,sha256=iJpdsX8FqZ3lB3x-YG7d9-em8xHD0y1ciJLNWmI7Y6A,9941
3
- pyerualjetwork/activation_functions_cuda.py,sha256=8wV31USrS-9BGI388Ntya10HCucNkV-zm5EH0YL2iRw,10896
4
- pyerualjetwork/data_operations.py,sha256=ncg9AXy3_NdDcJ6W4U-VC_Ku0mvyLl11RUKffkQ8sxY,16474
5
- pyerualjetwork/data_operations_cuda.py,sha256=Ekme6MnVNCqYMjKaO3nGhFYMnvWlELW9__QQWr2LXk4,17368
1
+ pyerualjetwork/__init__.py,sha256=i_pt-vpEDdNMFEVgM_LjkJ-_bmhTswIYaZTUC-LsRps,2542
2
+ pyerualjetwork/activation_functions.py,sha256=UeuuagJWcSoFfmwikDU7O8ph--oySnWDJNqKbEh4SlE,12043
3
+ pyerualjetwork/activation_functions_cuda.py,sha256=5F49gKkiRngo0hAaS1KfarxQ7wEyub13WAX_apxf8j8,12069
4
+ pyerualjetwork/data_operations.py,sha256=rnOYLLK3YnRdWpEsEQABU0RE950lQQI7971eBLBpqOQ,16536
5
+ pyerualjetwork/data_operations_cuda.py,sha256=hh51PQORfQicWruu9Bo-HAJW56lQkoG2QZOBoTjFGkw,17175
6
6
  pyerualjetwork/help.py,sha256=pZs7hIhgFkovGLle97d9Qu9m5zKhMh7-OAIphIoSxBg,830
7
7
  pyerualjetwork/loss_functions.py,sha256=6PyBI232SQRGuFnG3LDGvnv_PUdWzT2_2mUODJiejGI,618
8
8
  pyerualjetwork/loss_functions_cuda.py,sha256=C93IZJcrOpT6HMK9x1O4AHJWXYTkN5WZiqdssPbvAPk,617
@@ -10,14 +10,14 @@ pyerualjetwork/metrics.py,sha256=q7MkhnZDRbCjFBDDfUgrl8lBYnUT_1ro1LxeBq105pI,607
10
10
  pyerualjetwork/metrics_cuda.py,sha256=TCwn5Z_4jQjqPCURX_xtcz9cjsYVzlahgKDA-qCgpU4,5072
11
11
  pyerualjetwork/model_operations.py,sha256=k_53BJladPm9fBWdlVpS6Uf5IQzpNlJWLH746DXGq_M,13036
12
12
  pyerualjetwork/model_operations_cuda.py,sha256=Guo0lFaaLiAXwKmnOi8Fz_bL_p38qR46CIhGOg_V1Sw,13138
13
- pyerualjetwork/plan.py,sha256=kGZucs_GOoSNH25vsvfPgWKttPdFHvjCek8EICj-W1s,34205
14
- pyerualjetwork/plan_cuda.py,sha256=4APaOl0_HGmp2i9EubFptajpfRQSoNU0LQHF1LDiQrg,33969
13
+ pyerualjetwork/plan.py,sha256=iF0zIaO2KrPYF8G__-Q2wMYbgQEIdRWap3BBMRZ1Fpo,34746
14
+ pyerualjetwork/plan_cuda.py,sha256=JF2LK5BgDZAQb3LuPDCyz8G7ICEvEg8BWezeZdw8X-4,33920
15
15
  pyerualjetwork/planeat.py,sha256=8cwWboJtXgFTKq6nFl1T9McbLDmBquKUr12y168PmcM,39513
16
16
  pyerualjetwork/planeat_cuda.py,sha256=boN-HFwm_D9cT1z0eAR8zgkiD_XOg-J2T2jNFvZweG4,39570
17
17
  pyerualjetwork/ui.py,sha256=wu2BhU1k-w3Kcho5Jtq4SEKe68ftaUeRGneUOSCVDjU,575
18
18
  pyerualjetwork/visualizations.py,sha256=DvbiQGlvlKNAgBJ3O3ukAi6uxSheha9SRFh5YX7ZxIA,26678
19
19
  pyerualjetwork/visualizations_cuda.py,sha256=dA0u85ZIyKqjtoSJ6p3EbEpJs4V4vS5W5ftR6eif8yg,26713
20
- pyerualjetwork-4.0.5.dist-info/METADATA,sha256=EXQuW_wIKq9Y7tVYikjrKyJAp-nSc9iEkb-Xxx19954,6357
21
- pyerualjetwork-4.0.5.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
22
- pyerualjetwork-4.0.5.dist-info/top_level.txt,sha256=BRyt62U_r3ZmJpj-wXNOoA345Bzamrj6RbaWsyW4tRg,15
23
- pyerualjetwork-4.0.5.dist-info/RECORD,,
20
+ pyerualjetwork-4.0.7.dist-info/METADATA,sha256=BWCx6dmY3FC-ud-gC_8ukAcE6-4o-wrU1xIItVeadDw,6357
21
+ pyerualjetwork-4.0.7.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
22
+ pyerualjetwork-4.0.7.dist-info/top_level.txt,sha256=BRyt62U_r3ZmJpj-wXNOoA345Bzamrj6RbaWsyW4tRg,15
23
+ pyerualjetwork-4.0.7.dist-info/RECORD,,