pyerualjetwork 4.1.2b0__tar.gz → 4.1.3__tar.gz
Sign up to get free protection for your applications and to get access to all the features.
- {pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/PKG-INFO +1 -1
- {pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/pyerualjetwork/__init__.py +1 -1
- {pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/pyerualjetwork/activation_functions.py +45 -48
- {pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/pyerualjetwork/activation_functions_cuda.py +45 -46
- {pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/pyerualjetwork/data_operations.py +2 -2
- {pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/pyerualjetwork/data_operations_cuda.py +0 -7
- pyerualjetwork-4.1.3/pyerualjetwork/help.py +16 -0
- {pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/pyerualjetwork/metrics_cuda.py +1 -2
- {pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/pyerualjetwork/model_operations.py +1 -0
- {pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/pyerualjetwork/model_operations_cuda.py +1 -0
- {pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/pyerualjetwork/plan.py +2 -10
- {pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/pyerualjetwork/plan_cuda.py +1 -1
- {pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/pyerualjetwork/visualizations_cuda.py +5 -5
- {pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/pyerualjetwork.egg-info/PKG-INFO +1 -1
- {pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/setup.py +1 -1
- pyerualjetwork-4.1.2b0/pyerualjetwork/help.py +0 -16
- {pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/README.md +0 -0
- {pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/pyerualjetwork/loss_functions.py +0 -0
- {pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/pyerualjetwork/loss_functions_cuda.py +0 -0
- {pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/pyerualjetwork/metrics.py +0 -0
- {pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/pyerualjetwork/planeat.py +0 -0
- {pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/pyerualjetwork/planeat_cuda.py +0 -0
- {pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/pyerualjetwork/ui.py +0 -0
- {pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/pyerualjetwork/visualizations.py +0 -0
- {pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/pyerualjetwork.egg-info/SOURCES.txt +0 -0
- {pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/pyerualjetwork.egg-info/dependency_links.txt +0 -0
- {pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/pyerualjetwork.egg-info/top_level.txt +0 -0
- {pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: pyerualjetwork
|
3
|
-
Version: 4.1.
|
3
|
+
Version: 4.1.3
|
4
4
|
Summary: PyerualJetwork is a machine learning library written in Python for professionals, incorporating advanced, unique, new, and modern techniques.
|
5
5
|
Author: Hasan Can Beydili
|
6
6
|
Author-email: tchasancan@gmail.com
|
@@ -47,7 +47,7 @@ for package_name in package_names:
|
|
47
47
|
|
48
48
|
print(f"PyerualJetwork is ready to use with {err} errors")
|
49
49
|
|
50
|
-
__version__ = "4.1.
|
50
|
+
__version__ = "4.1.3"
|
51
51
|
__update__ = "* Note: CUDA modules need cupy. Enter this command in your terminal: 'pip install cupy-cuda12x' or your cuda version.\n* Changes: https://github.com/HCB06/PyerualJetwork/blob/main/CHANGES\n* PyerualJetwork document: https://github.com/HCB06/Anaplan/blob/main/Welcome_to_PyerualJetwork/PYERUALJETWORK_USER_MANUEL_AND_LEGAL_INFORMATION(EN).pdf\n* YouTube tutorials: https://www.youtube.com/@HasanCanBeydili"
|
52
52
|
|
53
53
|
def print_version(__version__):
|
@@ -218,15 +218,12 @@ def scaled_cubic(x, alpha=1.0):
|
|
218
218
|
def sine_offset(x, beta=0.0):
|
219
219
|
return np.sin(x + beta)
|
220
220
|
|
221
|
-
|
222
|
-
|
223
|
-
def safe_aggregate(current_sum, new_value):
|
221
|
+
def safe_add(current_sum, new_value):
|
224
222
|
try:
|
225
223
|
return current_sum + new_value
|
226
224
|
except OverflowError:
|
227
225
|
return np.array(current_sum) + np.array(new_value)
|
228
226
|
|
229
|
-
|
230
227
|
def apply_activation(Input, activation_list):
|
231
228
|
"""
|
232
229
|
Applies a sequence of activation functions to the input.
|
@@ -244,93 +241,93 @@ def apply_activation(Input, activation_list):
|
|
244
241
|
for i in range(len(activation_list)):
|
245
242
|
try:
|
246
243
|
if activation_list[i] == 'sigmoid':
|
247
|
-
Input =
|
244
|
+
Input = safe_add(Input, Sigmoid(origin_input))
|
248
245
|
elif activation_list[i] == 'swish':
|
249
|
-
Input =
|
246
|
+
Input = safe_add(Input, swish(origin_input))
|
250
247
|
elif activation_list[i] == 'mod_circular':
|
251
|
-
Input =
|
248
|
+
Input = safe_add(Input, modular_circular_activation(origin_input))
|
252
249
|
elif activation_list[i] == 'tanh_circular':
|
253
|
-
Input =
|
250
|
+
Input = safe_add(Input, tanh_circular_activation(origin_input))
|
254
251
|
elif activation_list[i] == 'leaky_relu':
|
255
|
-
Input =
|
252
|
+
Input = safe_add(Input, leaky_relu(origin_input))
|
256
253
|
elif activation_list[i] == 'relu':
|
257
|
-
Input =
|
254
|
+
Input = safe_add(Input, Relu(origin_input))
|
258
255
|
elif activation_list[i] == 'softplus':
|
259
|
-
Input =
|
256
|
+
Input = safe_add(Input, softplus(origin_input))
|
260
257
|
elif activation_list[i] == 'elu':
|
261
|
-
Input =
|
258
|
+
Input = safe_add(Input, elu(origin_input))
|
262
259
|
elif activation_list[i] == 'gelu':
|
263
|
-
Input =
|
260
|
+
Input = safe_add(Input, gelu(origin_input))
|
264
261
|
elif activation_list[i] == 'selu':
|
265
|
-
Input =
|
262
|
+
Input = safe_add(Input, selu(origin_input))
|
266
263
|
elif activation_list[i] == 'tanh':
|
267
|
-
Input =
|
264
|
+
Input = safe_add(Input, tanh(origin_input))
|
268
265
|
elif activation_list[i] == 'sinakt':
|
269
|
-
Input =
|
266
|
+
Input = safe_add(Input, sinakt(origin_input))
|
270
267
|
elif activation_list[i] == 'p_squared':
|
271
|
-
Input =
|
268
|
+
Input = safe_add(Input, p_squared(origin_input))
|
272
269
|
elif activation_list[i] == 'sglu':
|
273
|
-
Input =
|
270
|
+
Input = safe_add(Input, sglu(origin_input, alpha=1.0))
|
274
271
|
elif activation_list[i] == 'dlrelu':
|
275
|
-
Input =
|
272
|
+
Input = safe_add(Input, dlrelu(origin_input))
|
276
273
|
elif activation_list[i] == 'exsig':
|
277
|
-
Input =
|
274
|
+
Input = safe_add(Input, exsig(origin_input))
|
278
275
|
elif activation_list[i] == 'sin_plus':
|
279
|
-
Input =
|
276
|
+
Input = safe_add(Input, sin_plus(origin_input))
|
280
277
|
elif activation_list[i] == 'acos':
|
281
|
-
Input =
|
278
|
+
Input = safe_add(Input, acos(origin_input, alpha=1.0, beta=0.0))
|
282
279
|
elif activation_list[i] == 'gla':
|
283
|
-
Input =
|
280
|
+
Input = safe_add(Input, gla(origin_input, alpha=1.0, mu=0.0))
|
284
281
|
elif activation_list[i] == 'srelu':
|
285
|
-
Input =
|
282
|
+
Input = safe_add(Input, srelu(origin_input))
|
286
283
|
elif activation_list[i] == 'qelu':
|
287
|
-
Input =
|
284
|
+
Input = safe_add(Input, qelu(origin_input))
|
288
285
|
elif activation_list[i] == 'isra':
|
289
|
-
Input =
|
286
|
+
Input = safe_add(Input, isra(origin_input))
|
290
287
|
elif activation_list[i] == 'waveakt':
|
291
|
-
Input =
|
288
|
+
Input = safe_add(Input, waveakt(origin_input))
|
292
289
|
elif activation_list[i] == 'arctan':
|
293
|
-
Input =
|
290
|
+
Input = safe_add(Input, arctan(origin_input))
|
294
291
|
elif activation_list[i] == 'bent_identity':
|
295
|
-
Input =
|
292
|
+
Input = safe_add(Input, bent_identity(origin_input))
|
296
293
|
elif activation_list[i] == 'sech':
|
297
|
-
Input =
|
294
|
+
Input = safe_add(Input, sech(origin_input))
|
298
295
|
elif activation_list[i] == 'softsign':
|
299
|
-
Input =
|
296
|
+
Input = safe_add(Input, softsign(origin_input))
|
300
297
|
elif activation_list[i] == 'pwl':
|
301
|
-
Input =
|
298
|
+
Input = safe_add(Input, pwl(origin_input))
|
302
299
|
elif activation_list[i] == 'cubic':
|
303
|
-
Input =
|
300
|
+
Input = safe_add(Input, cubic(origin_input))
|
304
301
|
elif activation_list[i] == 'gaussian':
|
305
|
-
Input =
|
302
|
+
Input = safe_add(Input, gaussian(origin_input))
|
306
303
|
elif activation_list[i] == 'sine':
|
307
|
-
Input =
|
304
|
+
Input = safe_add(Input, sine(origin_input))
|
308
305
|
elif activation_list[i] == 'tanh_square':
|
309
|
-
Input =
|
306
|
+
Input = safe_add(Input, tanh_square(origin_input))
|
310
307
|
elif activation_list[i] == 'mod_sigmoid':
|
311
|
-
Input =
|
308
|
+
Input = safe_add(Input, mod_sigmoid(origin_input))
|
312
309
|
elif activation_list[i] == 'linear':
|
313
|
-
Input =
|
310
|
+
Input = safe_add(Input, origin_input)
|
314
311
|
elif activation_list[i] == 'quartic':
|
315
|
-
Input =
|
312
|
+
Input = safe_add(Input, quartic(origin_input))
|
316
313
|
elif activation_list[i] == 'square_quartic':
|
317
|
-
Input =
|
314
|
+
Input = safe_add(Input, square_quartic(origin_input))
|
318
315
|
elif activation_list[i] == 'cubic_quadratic':
|
319
|
-
Input =
|
316
|
+
Input = safe_add(Input, cubic_quadratic(origin_input))
|
320
317
|
elif activation_list[i] == 'exp_cubic':
|
321
|
-
Input =
|
318
|
+
Input = safe_add(Input, exp_cubic(origin_input))
|
322
319
|
elif activation_list[i] == 'sine_square':
|
323
|
-
Input =
|
320
|
+
Input = safe_add(Input, sine_square(origin_input))
|
324
321
|
elif activation_list[i] == 'logarithmic':
|
325
|
-
Input =
|
322
|
+
Input = safe_add(Input, logarithmic(origin_input))
|
326
323
|
elif activation_list[i] == 'scaled_cubic':
|
327
|
-
Input =
|
324
|
+
Input = safe_add(Input, scaled_cubic(origin_input, 1.0))
|
328
325
|
elif activation_list[i] == 'sine_offset':
|
329
|
-
Input =
|
326
|
+
Input = safe_add(Input, sine_offset(origin_input, 1.0))
|
330
327
|
elif activation_list[i] == 'spiral':
|
331
|
-
Input =
|
328
|
+
Input = safe_add(Input, spiral_activation(origin_input))
|
332
329
|
elif activation_list[i] == 'circular':
|
333
|
-
Input =
|
330
|
+
Input = safe_add(Input, circular_activation(origin_input))
|
334
331
|
|
335
332
|
|
336
333
|
except Exception as e:
|
@@ -219,13 +219,12 @@ def sine_offset(x, beta=0.0):
|
|
219
219
|
|
220
220
|
|
221
221
|
|
222
|
-
def
|
222
|
+
def safe_add(current_sum, new_value):
|
223
223
|
try:
|
224
224
|
return current_sum + new_value
|
225
225
|
except OverflowError:
|
226
226
|
return cp.array(current_sum) + cp.array(new_value)
|
227
227
|
|
228
|
-
|
229
228
|
def apply_activation(Input, activation_list):
|
230
229
|
"""
|
231
230
|
Applies a sequence of activation functions to the input.
|
@@ -243,93 +242,93 @@ def apply_activation(Input, activation_list):
|
|
243
242
|
for i in range(len(activation_list)):
|
244
243
|
try:
|
245
244
|
if activation_list[i] == 'sigmoid':
|
246
|
-
Input =
|
245
|
+
Input = safe_add(Input, Sigmoid(origin_input))
|
247
246
|
elif activation_list[i] == 'swish':
|
248
|
-
Input =
|
247
|
+
Input = safe_add(Input, swish(origin_input))
|
249
248
|
elif activation_list[i] == 'mod_circular':
|
250
|
-
Input =
|
249
|
+
Input = safe_add(Input, modular_circular_activation(origin_input))
|
251
250
|
elif activation_list[i] == 'tanh_circular':
|
252
|
-
Input =
|
251
|
+
Input = safe_add(Input, tanh_circular_activation(origin_input))
|
253
252
|
elif activation_list[i] == 'leaky_relu':
|
254
|
-
Input =
|
253
|
+
Input = safe_add(Input, leaky_relu(origin_input))
|
255
254
|
elif activation_list[i] == 'relu':
|
256
|
-
Input =
|
255
|
+
Input = safe_add(Input, Relu(origin_input))
|
257
256
|
elif activation_list[i] == 'softplus':
|
258
|
-
Input =
|
257
|
+
Input = safe_add(Input, softplus(origin_input))
|
259
258
|
elif activation_list[i] == 'elu':
|
260
|
-
Input =
|
259
|
+
Input = safe_add(Input, elu(origin_input))
|
261
260
|
elif activation_list[i] == 'gelu':
|
262
|
-
Input =
|
261
|
+
Input = safe_add(Input, gelu(origin_input))
|
263
262
|
elif activation_list[i] == 'selu':
|
264
|
-
Input =
|
263
|
+
Input = safe_add(Input, selu(origin_input))
|
265
264
|
elif activation_list[i] == 'tanh':
|
266
|
-
Input =
|
265
|
+
Input = safe_add(Input, tanh(origin_input))
|
267
266
|
elif activation_list[i] == 'sinakt':
|
268
|
-
Input =
|
267
|
+
Input = safe_add(Input, sinakt(origin_input))
|
269
268
|
elif activation_list[i] == 'p_squared':
|
270
|
-
Input =
|
269
|
+
Input = safe_add(Input, p_squared(origin_input))
|
271
270
|
elif activation_list[i] == 'sglu':
|
272
|
-
Input =
|
271
|
+
Input = safe_add(Input, sglu(origin_input, alpha=1.0))
|
273
272
|
elif activation_list[i] == 'dlrelu':
|
274
|
-
Input =
|
273
|
+
Input = safe_add(Input, dlrelu(origin_input))
|
275
274
|
elif activation_list[i] == 'exsig':
|
276
|
-
Input =
|
275
|
+
Input = safe_add(Input, exsig(origin_input))
|
277
276
|
elif activation_list[i] == 'sin_plus':
|
278
|
-
Input =
|
277
|
+
Input = safe_add(Input, sin_plus(origin_input))
|
279
278
|
elif activation_list[i] == 'acos':
|
280
|
-
Input =
|
279
|
+
Input = safe_add(Input, acos(origin_input, alpha=1.0, beta=0.0))
|
281
280
|
elif activation_list[i] == 'gla':
|
282
|
-
Input =
|
281
|
+
Input = safe_add(Input, gla(origin_input, alpha=1.0, mu=0.0))
|
283
282
|
elif activation_list[i] == 'srelu':
|
284
|
-
Input =
|
283
|
+
Input = safe_add(Input, srelu(origin_input))
|
285
284
|
elif activation_list[i] == 'qelu':
|
286
|
-
Input =
|
285
|
+
Input = safe_add(Input, qelu(origin_input))
|
287
286
|
elif activation_list[i] == 'isra':
|
288
|
-
Input =
|
287
|
+
Input = safe_add(Input, isra(origin_input))
|
289
288
|
elif activation_list[i] == 'waveakt':
|
290
|
-
Input =
|
289
|
+
Input = safe_add(Input, waveakt(origin_input))
|
291
290
|
elif activation_list[i] == 'arctan':
|
292
|
-
Input =
|
291
|
+
Input = safe_add(Input, arctan(origin_input))
|
293
292
|
elif activation_list[i] == 'bent_identity':
|
294
|
-
Input =
|
293
|
+
Input = safe_add(Input, bent_identity(origin_input))
|
295
294
|
elif activation_list[i] == 'sech':
|
296
|
-
Input =
|
295
|
+
Input = safe_add(Input, sech(origin_input))
|
297
296
|
elif activation_list[i] == 'softsign':
|
298
|
-
Input =
|
297
|
+
Input = safe_add(Input, softsign(origin_input))
|
299
298
|
elif activation_list[i] == 'pwl':
|
300
|
-
Input =
|
299
|
+
Input = safe_add(Input, pwl(origin_input))
|
301
300
|
elif activation_list[i] == 'cubic':
|
302
|
-
Input =
|
301
|
+
Input = safe_add(Input, cubic(origin_input))
|
303
302
|
elif activation_list[i] == 'gaussian':
|
304
|
-
Input =
|
303
|
+
Input = safe_add(Input, gaussian(origin_input))
|
305
304
|
elif activation_list[i] == 'sine':
|
306
|
-
Input =
|
305
|
+
Input = safe_add(Input, sine(origin_input))
|
307
306
|
elif activation_list[i] == 'tanh_square':
|
308
|
-
Input =
|
307
|
+
Input = safe_add(Input, tanh_square(origin_input))
|
309
308
|
elif activation_list[i] == 'mod_sigmoid':
|
310
|
-
Input =
|
309
|
+
Input = safe_add(Input, mod_sigmoid(origin_input))
|
311
310
|
elif activation_list[i] == 'linear':
|
312
|
-
Input =
|
311
|
+
Input = safe_add(Input, origin_input)
|
313
312
|
elif activation_list[i] == 'quartic':
|
314
|
-
Input =
|
313
|
+
Input = safe_add(Input, quartic(origin_input))
|
315
314
|
elif activation_list[i] == 'square_quartic':
|
316
|
-
Input =
|
315
|
+
Input = safe_add(Input, square_quartic(origin_input))
|
317
316
|
elif activation_list[i] == 'cubic_quadratic':
|
318
|
-
Input =
|
317
|
+
Input = safe_add(Input, cubic_quadratic(origin_input))
|
319
318
|
elif activation_list[i] == 'exp_cubic':
|
320
|
-
Input =
|
319
|
+
Input = safe_add(Input, exp_cubic(origin_input))
|
321
320
|
elif activation_list[i] == 'sine_square':
|
322
|
-
Input =
|
321
|
+
Input = safe_add(Input, sine_square(origin_input))
|
323
322
|
elif activation_list[i] == 'logarithmic':
|
324
|
-
Input =
|
323
|
+
Input = safe_add(Input, logarithmic(origin_input))
|
325
324
|
elif activation_list[i] == 'scaled_cubic':
|
326
|
-
Input =
|
325
|
+
Input = safe_add(Input, scaled_cubic(origin_input, 1.0))
|
327
326
|
elif activation_list[i] == 'sine_offset':
|
328
|
-
Input =
|
327
|
+
Input = safe_add(Input, sine_offset(origin_input, 1.0))
|
329
328
|
elif activation_list[i] == 'spiral':
|
330
|
-
Input =
|
329
|
+
Input = safe_add(Input, spiral_activation(origin_input))
|
331
330
|
elif activation_list[i] == 'circular':
|
332
|
-
Input =
|
331
|
+
Input = safe_add(Input, circular_activation(origin_input))
|
333
332
|
|
334
333
|
except Exception as e:
|
335
334
|
warnings.warn(f"Error in activation {activation_list[i]}: {str(e)}", RuntimeWarning)
|
@@ -76,7 +76,7 @@ def decode_one_hot(encoded_data):
|
|
76
76
|
return decoded_labels
|
77
77
|
|
78
78
|
|
79
|
-
def split(X, y, test_size, random_state, dtype=np.float32):
|
79
|
+
def split(X, y, test_size, random_state=42, dtype=np.float32):
|
80
80
|
"""
|
81
81
|
Splits the given X (features) and y (labels) data into training and testing subsets.
|
82
82
|
|
@@ -84,7 +84,7 @@ def split(X, y, test_size, random_state, dtype=np.float32):
|
|
84
84
|
X (numpy.ndarray): Features data.
|
85
85
|
y (numpy.ndarray): Labels data.
|
86
86
|
test_size (float or int): Proportion or number of samples for the test subset.
|
87
|
-
random_state (int or None): Seed for random state.
|
87
|
+
random_state (int or None): Seed for random state. Default: 42.
|
88
88
|
dtype (numpy.dtype): Data type for the arrays. np.float32 by default. Example: np.float64 or np.float16. [fp32 for balanced devices, fp64 for strong devices, fp16 for weak devices: not reccomended!]
|
89
89
|
|
90
90
|
Returns:
|
@@ -87,15 +87,10 @@ def split(X, y, test_size, random_state=42, dtype=cp.float32, use_cpu=False):
|
|
87
87
|
|
88
88
|
Args:
|
89
89
|
X (cupy.ndarray): Features data.
|
90
|
-
|
91
90
|
y (cupy.ndarray): Labels data.
|
92
|
-
|
93
91
|
test_size (float or int): Proportion or number of samples for the test subset.
|
94
|
-
|
95
92
|
random_state (int or None): Seed for random state. Default: 42.
|
96
|
-
|
97
93
|
dtype (cupy.dtype): Data type for the arrays. np.float32 by default. Example: cp.float64 or cp.float16. [fp32 for balanced devices, fp64 for strong devices, fp16 for weak devices: not reccomended!] (optional)
|
98
|
-
|
99
94
|
use_cpu (bool): If True, output will be same cpu's split function. Default: False.
|
100
95
|
Returns:
|
101
96
|
tuple: x_train, x_test, y_train, y_test as ordered training and testing data subsets.
|
@@ -255,7 +250,6 @@ def auto_balancer(x_train, y_train, dtype=cp.float32, use_cpu=False):
|
|
255
250
|
dtype (cupy.dtype): Data type for the arrays. np.float32 by default. Example: cp.float64 or cp.float16. [fp32 for balanced devices, fp64 for strong devices, fp16 for weak devices: not reccomended!] (optional)
|
256
251
|
|
257
252
|
use_cpu (bool): If True, output will be same cpu's auto_balancer function. Default: False.
|
258
|
-
|
259
253
|
Returns:
|
260
254
|
tuple: A tuple containing balanced input data and labels.
|
261
255
|
"""
|
@@ -399,7 +393,6 @@ def synthetic_augmentation(x_train, y_train, dtype=cp.float32, use_cpu=False):
|
|
399
393
|
|
400
394
|
return x_balanced, y_balanced
|
401
395
|
|
402
|
-
|
403
396
|
def standard_scaler(x_train=None, x_test=None, scaler_params=None, dtype=cp.float32):
|
404
397
|
"""
|
405
398
|
Standardizes training and test datasets. x_test may be None.
|
@@ -0,0 +1,16 @@
|
|
1
|
+
from .activation_functions import all_activations
|
2
|
+
|
3
|
+
def activation_potentiation():
|
4
|
+
|
5
|
+
activations_list = all_activations()
|
6
|
+
|
7
|
+
print('All available activations: ', activations_list, "\n\nYOU CAN COMBINE EVERY ACTIVATION. EXAMPLE: ['linear', 'tanh'] or ['waveakt', 'linear', 'sine'].")
|
8
|
+
|
9
|
+
return activations_list
|
10
|
+
|
11
|
+
def docs_and_examples():
|
12
|
+
|
13
|
+
print('PLAN document: https://github.com/HCB06/Anaplan/tree/main/Welcome_to_PLAN\n')
|
14
|
+
print('PLAN examples: https://github.com/HCB06/Anaplan/tree/main/Welcome_to_Anaplan/ExampleCodes\n')
|
15
|
+
print('PLANEAT examples: https://github.com/HCB06/Anaplan/tree/main/Welcome_to_Anaplan/ExampleCodes/PLANEAT\n')
|
16
|
+
print('Anaplan document and examples: https://github.com/HCB06/Anaplan/tree/main/Welcome_to_Anaplan')
|
@@ -1,7 +1,7 @@
|
|
1
1
|
import cupy as cp
|
2
|
-
from .data_operations_cuda import decode_one_hot
|
3
2
|
|
4
3
|
def metrics(y_ts, test_preds, average='weighted'):
|
4
|
+
from .data_operations import decode_one_hot
|
5
5
|
y_test_d = cp.array(decode_one_hot(y_ts))
|
6
6
|
y_pred = cp.array(test_preds)
|
7
7
|
|
@@ -50,7 +50,6 @@ def metrics(y_ts, test_preds, average='weighted'):
|
|
50
50
|
return precision_val.item(), recall_val.item(), f1_val.item()
|
51
51
|
|
52
52
|
|
53
|
-
|
54
53
|
def roc_curve(y_true, y_score):
|
55
54
|
"""
|
56
55
|
Compute Receiver Operating Characteristic (ROC) curve.
|
@@ -125,15 +125,7 @@ def fit(
|
|
125
125
|
|
126
126
|
elif val and (x_val is not None and y_val is not None):
|
127
127
|
x_val = x_val.astype(dtype, copy=False)
|
128
|
-
|
129
|
-
if y_val.dtype != np.uint8:
|
130
|
-
y_val = np.array(y_val, copy=False).astype(np.uint8, copy=False)
|
131
|
-
elif len(y_val[0]) <= 32767:
|
132
|
-
if y_val.dtype != np.uint16:
|
133
|
-
y_val = np.array(y_val, copy=False).astype(np.uint16, copy=False)
|
134
|
-
else:
|
135
|
-
if y_val.dtype != np.uint32:
|
136
|
-
y_val = np.array(y_val, copy=False).astype(np.uint32, copy=False)
|
128
|
+
y_val = y_val.astype(dtype, copy=False)
|
137
129
|
|
138
130
|
val_list = [] if val else None
|
139
131
|
val_count = val_count or 10
|
@@ -145,7 +137,7 @@ def fit(
|
|
145
137
|
|
146
138
|
# Training process
|
147
139
|
for index, inp in enumerate(x_train):
|
148
|
-
inp = np.array(inp, copy=False).ravel()
|
140
|
+
inp = np.array(inp, copy=False, dtype=dtype).ravel()
|
149
141
|
y_decoded = decode_one_hot(y_train)
|
150
142
|
# Weight updates
|
151
143
|
STPW = feed_forward(inp, STPW, is_training=True, Class=y_decoded[index], activation_potentiation=activation_potentiation, LTD=LTD)
|
@@ -219,7 +219,7 @@ def learner(x_train, y_train, x_test=None, y_test=None, strategy='accuracy', bat
|
|
219
219
|
tuple: A list for model parameters: [Weight matrix, Preds, Accuracy, [Activations functions]]. You can acces this parameters in model_operations module. For example: model_operations.get_weights() for Weight matrix.
|
220
220
|
|
221
221
|
"""
|
222
|
-
print(Fore.WHITE + "\nRemember, optimization on large datasets can be very time-consuming and computationally expensive. Therefore, if you are working with such a dataset, our recommendation is to include activation function: ['circular'] in the 'except_this' parameter unless absolutely necessary, as they can significantly prolong the process. from: learner\n" + Fore.RESET)
|
222
|
+
print(Fore.WHITE + "\nRemember, optimization on large datasets can be very time-consuming and computationally expensive. Therefore, if you are working with such a dataset, our recommendation is to include activation function: ['circular', 'spiral'] in the 'except_this' parameter unless absolutely necessary, as they can significantly prolong the process. from: learner\n" + Fore.RESET)
|
223
223
|
|
224
224
|
activation_potentiation = all_activations()
|
225
225
|
|
@@ -525,8 +525,8 @@ def plot_decision_boundary(x, y, activation_potentiation, W, artist=None, ax=Non
|
|
525
525
|
|
526
526
|
def plot_decision_space(x, y, y_preds=None, s=100, color='tab20'):
|
527
527
|
|
528
|
-
from
|
529
|
-
from
|
528
|
+
from .metrics_cuda import pca
|
529
|
+
from .data_operations_cuda import decode_one_hot
|
530
530
|
|
531
531
|
if x.shape[1] > 2:
|
532
532
|
|
@@ -587,7 +587,7 @@ def neuron_history(LTPW, ax1, row, col, class_count, artist5, data, fig1, acc=Fa
|
|
587
587
|
|
588
588
|
title_info = f'{j+1}. Neuron'
|
589
589
|
|
590
|
-
art5 = ax1[j].imshow(mat, interpolation='sinc', cmap='viridis')
|
590
|
+
art5 = ax1[j].imshow(mat.get(), interpolation='sinc', cmap='viridis')
|
591
591
|
|
592
592
|
ax1[j].set_aspect('equal')
|
593
593
|
ax1[j].set_xticks([])
|
@@ -604,7 +604,7 @@ def neuron_history(LTPW, ax1, row, col, class_count, artist5, data, fig1, acc=Fa
|
|
604
604
|
|
605
605
|
def initialize_visualization_for_fit(val, show_training, neurons_history, x_train, y_train):
|
606
606
|
"""Initializes the visualization setup based on the parameters."""
|
607
|
-
from data_operations import find_closest_factors
|
607
|
+
from .data_operations import find_closest_factors
|
608
608
|
visualization_objects = {}
|
609
609
|
|
610
610
|
if show_training:
|
@@ -680,7 +680,7 @@ def display_visualization_for_fit(fig, artist_list, interval):
|
|
680
680
|
|
681
681
|
def initialize_visualization_for_learner(show_history, neurons_history, neural_web_history, x_train, y_train):
|
682
682
|
"""Initialize all visualization components"""
|
683
|
-
from data_operations import find_closest_factors
|
683
|
+
from .data_operations import find_closest_factors
|
684
684
|
viz_objects = {}
|
685
685
|
|
686
686
|
if show_history:
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: pyerualjetwork
|
3
|
-
Version: 4.1.
|
3
|
+
Version: 4.1.3
|
4
4
|
Summary: PyerualJetwork is a machine learning library written in Python for professionals, incorporating advanced, unique, new, and modern techniques.
|
5
5
|
Author: Hasan Can Beydili
|
6
6
|
Author-email: tchasancan@gmail.com
|
@@ -1,16 +0,0 @@
|
|
1
|
-
from .activation_functions import all_activations
|
2
|
-
|
3
|
-
def activation_potentiation():
|
4
|
-
|
5
|
-
activations_list = all_activations()
|
6
|
-
|
7
|
-
print('All available activations: ', activations_list, "\n\nYOU CAN COMBINE EVERY ACTIVATION. EXAMPLE: ['linear', 'tanh'] or ['waveakt', 'linear', 'sine'].")
|
8
|
-
|
9
|
-
return activations_list
|
10
|
-
|
11
|
-
def docs_and_examples():
|
12
|
-
|
13
|
-
print('PLAN document: https://github.com/HCB06/PyerualJetwork/tree/main/Welcome_to_PLAN\n')
|
14
|
-
print('PLAN examples: https://github.com/HCB06/PyerualJetwork/tree/main/Welcome_to_PyerualJetwork/ExampleCodes\n')
|
15
|
-
print('PLANEAT examples: https://github.com/HCB06/PyerualJetwork/tree/main/Welcome_to_PyerualJetwork/ExampleCodes/PLANEAT\n')
|
16
|
-
print('PyerualJetwork document and examples: https://github.com/HCB06/PyerualJetwork/tree/main/Welcome_to_PyerualJetwork')
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{pyerualjetwork-4.1.2b0 → pyerualjetwork-4.1.3}/pyerualjetwork.egg-info/dependency_links.txt
RENAMED
File without changes
|
File without changes
|
File without changes
|