pyerualjetwork 4.0.5__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,71 @@
1
+
2
+ import subprocess
3
+ subprocess.check_call(["pip", "install", 'setuptools==75.6.0'])
4
+ import pkg_resources
5
+ from datetime import datetime
6
+
7
+ print("Auto checking and installation dependencies for PyerualJetwork")
8
+
9
+ package_names = [
10
+ 'scipy==1.13.1',
11
+ 'tqdm==4.66.4',
12
+ 'seaborn==0.13.2',
13
+ 'pandas==2.2.2',
14
+ 'networkx==3.3',
15
+ 'numpy==1.26.4',
16
+ 'matplotlib==3.9.0',
17
+ 'colorama==0.4.6'
18
+ ]
19
+
20
+ installed_packages = pkg_resources.working_set
21
+ installed = {pkg.key: pkg.version for pkg in installed_packages}
22
+ err = 0
23
+
24
+ for package_name in package_names:
25
+ package_name_only, required_version = package_name.split('==')
26
+
27
+ if package_name_only not in installed:
28
+
29
+ try:
30
+ print(f"{package_name} Installing...")
31
+ subprocess.check_call(["pip", "install", package_name])
32
+ except Exception as e:
33
+ err += 1
34
+ print(f"Error installing {package_name} library, installation continues: {e}")
35
+ else:
36
+
37
+ installed_version = installed[package_name_only]
38
+ if installed_version != required_version:
39
+ print(f"Updating {package_name_only} from version {installed_version} to {required_version}...")
40
+ try:
41
+ subprocess.check_call(["pip", "install", package_name])
42
+ except Exception as e:
43
+ err += 1
44
+ print(f"Error updating {package_name} library, installation continues: {e}")
45
+ else:
46
+ print(f"{package_name} ready.")
47
+
48
+ print(f"PyerualJetwork is ready to use with {err} errors")
49
+
50
+ __version__ = "4.0.5"
51
+ __update__ = "* Note: CUDA modules need cupy. Enter this command in your terminal: 'pip install cupy-cuda12x' or your cuda version.\n* Changes: https://github.com/HCB06/PyerualJetwork/blob/main/CHANGES\n* PyerualJetwork document: https://github.com/HCB06/Anaplan/blob/main/Welcome_to_PyerualJetwork/PYERUALJETWORK_USER_MANUEL_AND_LEGAL_INFORMATION(EN).pdf\n* YouTube tutorials: https://www.youtube.com/@HasanCanBeydili"
52
+
53
+ def print_version(__version__):
54
+ print(f"PyerualJetwork Version {__version__}" + '\n')
55
+
56
+ def print_update_notes(__update__):
57
+ print(f"Update Notes:\n{__update__}")
58
+
59
+ print_version(__version__)
60
+ print_update_notes(__update__)
61
+
62
+ from .plan import *
63
+ from .planeat import *
64
+ from .activation_functions import *
65
+ from .data_operations import *
66
+ from .loss_functions import *
67
+ from .metrics import *
68
+ from .model_operations import *
69
+ from .ui import *
70
+ from .visualizations import *
71
+ from .help import *
@@ -0,0 +1,367 @@
1
+ import numpy as np
2
+ from scipy.special import expit, softmax
3
+
4
+ # ACTIVATION FUNCTIONS -----
5
+
6
+ def all_activations():
7
+
8
+ activations_list = ['linear', 'sigmoid', 'relu', 'tanh', 'circular', 'spiral', 'swish', 'sin_plus', 'mod_circular', 'tanh_circular', 'leaky_relu', 'softplus', 'elu', 'gelu', 'selu', 'sinakt', 'p_squared', 'sglu', 'dlrelu', 'exsig', 'acos', 'gla', 'srelu', 'qelu', 'isra', 'waveakt', 'arctan', 'bent_identity', 'sech', 'softsign', 'pwl', 'cubic', 'gaussian', 'sine', 'tanh_square', 'mod_sigmoid', 'quartic', 'square_quartic', 'cubic_quadratic', 'exp_cubic', 'sine_square', 'logarithmic', 'scaled_cubic', 'sine_offset']
9
+
10
+ return activations_list
11
+
12
+ def spiral_activation(x):
13
+
14
+ r = np.sqrt(np.sum(x**2))
15
+
16
+ theta = np.arctan2(x[1:], x[:-1])
17
+
18
+ spiral_x = r * np.cos(theta + r)
19
+ spiral_y = r * np.sin(theta + r)
20
+
21
+
22
+ spiral_output = np.concatenate(([spiral_x[0]], spiral_y))
23
+
24
+ return spiral_output
25
+
26
+
27
+ def Softmax(
28
+ x # num: Input data to be transformed using softmax function.
29
+ ):
30
+ """
31
+ Applies the softmax function to the input data.
32
+
33
+ Args:
34
+ (num): Input data to be transformed using softmax function.
35
+
36
+ Returns:
37
+ (num): Transformed data after applying softmax function.
38
+ """
39
+
40
+ return softmax(x)
41
+
42
+
43
+ def Sigmoid(
44
+ x # num: Input data to be transformed using sigmoid function.
45
+ ):
46
+ """
47
+ Applies the sigmoid function to the input data.
48
+
49
+ Args:
50
+ (num): Input data to be transformed using sigmoid function.
51
+
52
+ Returns:
53
+ (num): Transformed data after applying sigmoid function.
54
+ """
55
+ return expit(x)
56
+
57
+
58
+ def Relu(
59
+ x # num: Input data to be transformed using ReLU function.
60
+ ):
61
+ """
62
+ Applies the Rectified Linear Unit (ReLU) function to the input data.
63
+
64
+ Args:
65
+ (num): Input data to be transformed using ReLU function.
66
+
67
+ Returns:
68
+ (num): Transformed data after applying ReLU function.
69
+ """
70
+
71
+ return np.maximum(0, x)
72
+
73
+
74
+ def tanh(x):
75
+ return np.tanh(x)
76
+
77
+ def swish(x):
78
+ return x * (1 / (1 + np.exp(-x)))
79
+
80
+ def sin_plus(x):
81
+ return (np.sin(x) + 1) / 2
82
+
83
+ def modular_circular_activation(x, period=2*np.pi):
84
+ return np.mod(x, period) / period
85
+
86
+ def tanh_circular_activation(x):
87
+ return (np.tanh(x) + 1) / 2
88
+
89
+ def leaky_relu(x, alpha=0.01):
90
+ return np.where(x > 0, x, alpha * x)
91
+
92
+ def softplus(x):
93
+ return np.log(1 + np.exp(x))
94
+
95
+ def elu(x, alpha=1.0):
96
+ return np.where(x > 0, x, alpha * (np.exp(x) - 1))
97
+
98
+ def gelu(x):
99
+ return 0.5 * x * (1 + np.tanh(np.sqrt(2 / np.pi) * (x + 0.044715 * np.power(x, 3))))
100
+
101
+ def selu(x, lambda_=1.0507, alpha=1.6733):
102
+ return lambda_ * np.where(x > 0, x, alpha * (np.exp(x) - 1))
103
+
104
+ def sinakt(x):
105
+ return np.sin(x) + np.cos(x)
106
+
107
+ def p_squared(x, alpha=1.0, beta=0.0):
108
+ return alpha * x**2 + beta * x
109
+
110
+ def sglu(x, alpha=1.0):
111
+ return softmax(alpha * x) * x
112
+
113
+ # 4. Double Leaky ReLU (DLReLU)
114
+ def dlrelu(x):
115
+ return np.maximum(0.01 * x, x) + np.minimum(0.01 * x, 0.1 * x)
116
+
117
+ # 5. Exponential Sigmoid (ExSig)
118
+ def exsig(x):
119
+ return 1 / (1 + np.exp(-x**2))
120
+
121
+ # 6. Adaptive Cosine Activation (ACos)
122
+ def acos(x, alpha=1.0, beta=0.0):
123
+ return np.cos(alpha * x + beta)
124
+
125
+ # 7. Gaussian-like Activation (GLA)
126
+ def gla(x, alpha=1.0, mu=0.0):
127
+ return np.exp(-alpha * (x - mu)**2)
128
+
129
+ # 8. Swish ReLU (SReLU)
130
+ def srelu(x):
131
+ return x * (1 / (1 + np.exp(-x))) + np.maximum(0, x)
132
+
133
+ # 9. Quadratic Exponential Linear Unit (QELU)
134
+ def qelu(x):
135
+ return x**2 * np.exp(x) - 1
136
+
137
+ # 10. Inverse Square Root Activation (ISRA)
138
+ def isra(x):
139
+ return x / np.sqrt(np.abs(x) + 1)
140
+
141
+ def waveakt(x, alpha=1.0, beta=2.0, gamma=3.0):
142
+ return np.sin(alpha * x) * np.cos(beta * x) * np.sin(gamma * x)
143
+
144
+ def arctan(x):
145
+ return np.arctan(x)
146
+
147
+ def bent_identity(x):
148
+ return (np.sqrt(x**2 + 1) - 1) / 2 + x
149
+
150
+ def circular_activation(x, scale=2.0, frequency=1.0, shift=0.0):
151
+
152
+ n_features = x.shape[0]
153
+
154
+ circular_output = np.zeros_like(x)
155
+
156
+ for i in range(n_features):
157
+
158
+ r = np.sqrt(np.sum(x**2))
159
+ theta = 2 * np.pi * (i / n_features) + shift
160
+
161
+ circular_x = r * np.cos(theta + frequency * r) * scale
162
+ circular_y = r * np.sin(theta + frequency * r) * scale
163
+
164
+ if i % 2 == 0:
165
+ circular_output[i] = circular_x
166
+ else:
167
+ circular_output[i] = circular_y
168
+
169
+ return circular_output
170
+
171
+ def sech(x):
172
+ return 2 / (np.exp(x) + np.exp(-x))
173
+
174
+ def softsign(x):
175
+ return x / (1 + np.abs(x))
176
+
177
+ def pwl(x, alpha=0.5, beta=1.5):
178
+ return np.where(x <= 0, alpha * x, beta * x)
179
+
180
+ def cubic(x):
181
+ return x**3
182
+
183
+ def gaussian(x, alpha=1.0, mu=0.0):
184
+ return np.exp(-alpha * (x - mu)**2)
185
+
186
+ def sine(x, alpha=1.0):
187
+ return np.sin(alpha * x)
188
+
189
+ def tanh_square(x):
190
+ return np.tanh(x)**2
191
+
192
+ def mod_sigmoid(x, alpha=1.0, beta=0.0):
193
+ return 1 / (1 + np.exp(-alpha * x + beta))
194
+
195
+ def quartic(x):
196
+ return x**4
197
+
198
+ def square_quartic(x):
199
+ return (x**2)**2
200
+
201
+ def cubic_quadratic(x):
202
+ return x**3 * (x**2)
203
+
204
+ def exp_cubic(x):
205
+ return np.exp(x**3)
206
+
207
+ def sine_square(x):
208
+ return np.sin(x)**2
209
+
210
+ def logarithmic(x):
211
+ return np.log(x**2 + 1)
212
+
213
+ def scaled_cubic(x, alpha=1.0):
214
+ return alpha * x**3
215
+
216
+ def sine_offset(x, beta=0.0):
217
+ return np.sin(x + beta)
218
+
219
+ def apply_activation(Input, activation_list):
220
+ """
221
+ Applies a sequence of activation functions to the input.
222
+
223
+ Args:
224
+ Input (numpy.ndarray): The input to apply activations to.
225
+ activation_list (list): A list of activation function names to apply.
226
+
227
+ Returns:
228
+ numpy.ndarray: The input after all activations have been applied.
229
+ """
230
+
231
+ origin_input = np.copy(Input)
232
+
233
+ for i in range(len(activation_list)):
234
+
235
+ if activation_list[i] == 'sigmoid':
236
+ Input += Sigmoid(origin_input)
237
+
238
+ elif activation_list[i] == 'swish':
239
+ Input += swish(origin_input)
240
+
241
+ elif activation_list[i] == 'mod_circular':
242
+ Input += modular_circular_activation(origin_input)
243
+
244
+ elif activation_list[i] == 'tanh_circular':
245
+ Input += tanh_circular_activation(origin_input)
246
+
247
+ elif activation_list[i] == 'leaky_relu':
248
+ Input += leaky_relu(origin_input)
249
+
250
+ elif activation_list[i] == 'relu':
251
+ Input += Relu(origin_input)
252
+
253
+ elif activation_list[i] == 'softplus':
254
+ Input += softplus(origin_input)
255
+
256
+ elif activation_list[i] == 'elu':
257
+ Input += elu(origin_input)
258
+
259
+ elif activation_list[i] == 'gelu':
260
+ Input += gelu(origin_input)
261
+
262
+ elif activation_list[i] == 'selu':
263
+ Input += selu(origin_input)
264
+
265
+ elif activation_list[i] == 'tanh':
266
+ Input += tanh(origin_input)
267
+
268
+ elif activation_list[i] == 'sinakt':
269
+ Input += sinakt(origin_input)
270
+
271
+ elif activation_list[i] == 'p_squared':
272
+ Input += p_squared(origin_input)
273
+
274
+ elif activation_list[i] == 'sglu':
275
+ Input += sglu(origin_input, alpha=1.0)
276
+
277
+ elif activation_list[i] == 'dlrelu':
278
+ Input += dlrelu(origin_input)
279
+
280
+ elif activation_list[i] == 'exsig':
281
+ Input += exsig(origin_input)
282
+
283
+ elif activation_list[i] == 'sin_plus':
284
+ Input += sin_plus(origin_input)
285
+
286
+ elif activation_list[i] == 'acos':
287
+ Input += acos(origin_input, alpha=1.0, beta=0.0)
288
+
289
+ elif activation_list[i] == 'gla':
290
+ Input += gla(origin_input, alpha=1.0, mu=0.0)
291
+
292
+ elif activation_list[i] == 'srelu':
293
+ Input += srelu(origin_input)
294
+
295
+ elif activation_list[i] == 'qelu':
296
+ Input += qelu(origin_input)
297
+
298
+ elif activation_list[i] == 'isra':
299
+ Input += isra(origin_input)
300
+
301
+ elif activation_list[i] == 'waveakt':
302
+ Input += waveakt(origin_input)
303
+
304
+ elif activation_list[i] == 'arctan':
305
+ Input += arctan(origin_input)
306
+
307
+ elif activation_list[i] == 'bent_identity':
308
+ Input += bent_identity(origin_input)
309
+
310
+ elif activation_list[i] == 'sech':
311
+ Input += sech(origin_input)
312
+
313
+ elif activation_list[i] == 'softsign':
314
+ Input += softsign(origin_input)
315
+
316
+ elif activation_list[i] == 'pwl':
317
+ Input += pwl(origin_input)
318
+
319
+ elif activation_list[i] == 'cubic':
320
+ Input += cubic(origin_input)
321
+
322
+ elif activation_list[i] == 'gaussian':
323
+ Input += gaussian(origin_input)
324
+
325
+ elif activation_list[i] == 'sine':
326
+ Input += sine(origin_input)
327
+
328
+ elif activation_list[i] == 'tanh_square':
329
+ Input += tanh_square(origin_input)
330
+
331
+ elif activation_list[i] == 'mod_sigmoid':
332
+ Input += mod_sigmoid(origin_input)
333
+
334
+ elif activation_list[i] == 'linear':
335
+ Input += origin_input
336
+
337
+ elif activation_list[i] == 'quartic':
338
+ Input += quartic(origin_input)
339
+
340
+ elif activation_list[i] == 'square_quartic':
341
+ Input += square_quartic(origin_input)
342
+
343
+ elif activation_list[i] == 'cubic_quadratic':
344
+ Input += cubic_quadratic(origin_input)
345
+
346
+ elif activation_list[i] == 'exp_cubic':
347
+ Input += exp_cubic(origin_input)
348
+
349
+ elif activation_list[i] == 'sine_square':
350
+ Input += sine_square(origin_input)
351
+
352
+ elif activation_list[i] == 'logarithmic':
353
+ Input += logarithmic(origin_input)
354
+
355
+ elif activation_list[i] == 'scaled_cubic':
356
+ Input += scaled_cubic(origin_input, 1.0)
357
+
358
+ elif activation_list[i] == 'sine_offset':
359
+ Input += sine_offset(origin_input, 1.0)
360
+
361
+ elif activation_list[i] == 'spiral':
362
+ Input += spiral_activation(origin_input)
363
+
364
+ elif activation_list[i] == 'circular':
365
+ Input += circular_activation(origin_input)
366
+
367
+ return Input