pyerualjetwork 5.30__py3-none-any.whl → 5.31__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -42,7 +42,7 @@ PyerualJetwork document: https://github.com/HCB06/PyerualJetwork/blob/main/Welco
42
42
  - Contact: tchasancan@gmail.com
43
43
  """
44
44
 
45
- __version__ = "5.30"
45
+ __version__ = "5.31"
46
46
  __update__ = """* Changes: https://github.com/HCB06/PyerualJetwork/blob/main/CHANGES
47
47
  * PyerualJetwork Homepage: https://github.com/HCB06/PyerualJetwork/tree/main
48
48
  * PyerualJetwork document: https://github.com/HCB06/PyerualJetwork/blob/main/Welcome_to_PyerualJetwork/PYERUALJETWORK_USER_MANUEL_AND_LEGAL_INFORMATION(EN).pdf
@@ -34,6 +34,7 @@ Module functions:
34
34
  - 'sine_offset': lambda x: sine_offset(x, 1.0),
35
35
  - 'spiral': spiral_activation,
36
36
  - 'circular': circular_activation
37
+ - Softmax()
37
38
  """
38
39
 
39
40
  import numpy as np
@@ -62,7 +62,7 @@ def save_model(model_name,
62
62
  test_acc=None,
63
63
  model_path='',
64
64
  activations=['linear'],
65
- activation_potentiation=[],
65
+ activation_potentiation=None,
66
66
  weights_type='npy',
67
67
  weights_format='raw',
68
68
  show_architecture=False,
pyerualjetwork/cpu/nn.py CHANGED
@@ -132,18 +132,29 @@ def learn(x_train, y_train, optimizer, gen, pop_size, fit_start=True, batch_size
132
132
  * This function also able to train classic MLP model architectures.
133
133
  * And my newest innovative architecture: PTNN (Potentiation Transfer Neural Network).
134
134
 
135
+ Examples:
136
+
137
+ This creates a PLAN model:
138
+ - ```learn(x_train, y_train, optimizer, pop_size=100, gen=100, fit_start=True) ```
139
+
140
+ This creates a MLP model(with 2 hidden layer):
141
+ - ```learn(x_train, y_train, optimizer, pop_size=100, gen=100, fit_start=False, neurons=[64, 64], activation_functions=['tanh', 'tanh']) ```
142
+
143
+ This creates a PTNN model(with 2 hidden layer & 1 aggregation layer(comes with PLAN)):
144
+ - ```learn(x_train, y_train, optimizer, pop_size=100, gen=[10, 100], fit_start=True, neurons=[64, 64], activation_functions=['tanh', 'tanh']) ```
145
+
135
146
  :Args:
136
147
  :param x_train: (array-like): Training input data.
137
148
  :param y_train: (array-like): Labels for training data. one-hot encoded.
138
- :param optimizer: (function): Optimization technique with hyperparameters. (PLAN, MLP & PTNN (all) using ENE for optimization. Gradient based technique's will added in the future.) Please use this: from pyerualjetwork.ene_cpu import evolver (and) optimizer = lambda *args, **kwargs: evolver(*args, 'here give your hyperparameters for example: activation_add_prob=0.85', **kwargs) Example:
149
+ :param optimizer: (function): Optimization technique with hyperparameters. (PLAN, MLP & PTNN (all) using ENE for optimization. Gradient based technique's will added in the future.) Please use this: from pyerualjetwork.cpu.ene import evolver (and) optimizer = lambda *args, **kwargs: evolver(*args, 'here give your hyperparameters for example: activation_add_prob=0.85', **kwargs) Example:
139
150
  ```python
140
- optimizer = lambda *args, **kwargs: ene_cpu.evolver(*args,
151
+ optimizer = lambda *args, **kwargs: ene.evolver(*args,
141
152
  activation_add_prob=0.05,
142
153
  strategy='aggressive',
143
154
  policy='more_selective',
144
155
  **kwargs)
145
156
 
146
- model = neu_cpu.learn(x_train,
157
+ model = nn.learn(x_train,
147
158
  y_train,
148
159
  optimizer,
149
160
  fit_start=True,
@@ -34,6 +34,7 @@ Module functions:
34
34
  - 'sine_offset': lambda x: sine_offset(x, 1.0),
35
35
  - 'spiral': spiral_activation,
36
36
  - 'circular': circular_activation
37
+ - Softmax()
37
38
  """
38
39
 
39
40
  import cupy as cp
@@ -64,7 +64,7 @@ def save_model(model_name,
64
64
  test_acc=None,
65
65
  model_path='',
66
66
  activations=['linear'],
67
- activation_potentiation=[],
67
+ activation_potentiation=None,
68
68
  weights_type='npy',
69
69
  weights_format='raw',
70
70
  show_architecture=False,
pyerualjetwork/cuda/nn.py CHANGED
@@ -127,19 +127,30 @@ def learn(x_train, y_train, optimizer, gen, pop_size, fit_start=True, batch_size
127
127
  * This function also able to train classic MLP model architectures.
128
128
  * And my newest innovative architecture: PTNN (Potentiation Transfer Neural Network).
129
129
 
130
+ Examples:
131
+
132
+ This creates a PLAN model:
133
+ - ```learn(x_train, y_train, optimizer, pop_size=100, gen=100, fit_start=True) ```
134
+
135
+ This creates a MLP model(with 2 hidden layer):
136
+ - ```learn(x_train, y_train, optimizer, pop_size=100, gen=100, fit_start=False, neurons=[64, 64], activation_functions=['tanh', 'tanh']) ```
137
+
138
+ This creates a PTNN model(with 2 hidden layer & 1 aggregation layer(comes with PLAN)):
139
+ - ```learn(x_train, y_train, optimizer, pop_size=100, gen=[10, 100], fit_start=True, neurons=[64, 64], activation_functions=['tanh', 'tanh']) ```
140
+
130
141
  :Args:
131
142
  :param x_train: (array-like): Training input data.
132
143
  :param y_train: (array-like): Labels for training data.
133
- :param optimizer: (function): Optimization technique with hyperparameters. (PLAN, MLP & PTNN (all) using ENE for optimization. Gradient based technique's will added in the future.) Please use this: from pyerualjetwork.ene_cuda import evolver (and) optimizer = lambda *args, **kwargs: evolver(*args, 'here give your hyperparameters for example: activation_add_prob=0.85', **kwargs) Example:
144
+ :param optimizer: (function): Optimization technique with hyperparameters. (PLAN, MLP & PTNN (all) using ENE for optimization. Gradient based technique's will added in the future.) Please use this: from pyerualjetwork.cuda.ene import evolver (and) optimizer = lambda *args, **kwargs: evolver(*args, 'here give your hyperparameters for example: activation_add_prob=0.85', **kwargs) Example:
134
145
  ```python
135
146
 
136
- optimizer = lambda *args, **kwargs: ene_cuda.evolver(*args,
147
+ optimizer = lambda *args, **kwargs: ene.evolver(*args,
137
148
  activation_add_prob=0.05,
138
149
  strategy='aggressive',
139
150
  policy='more_selective',
140
151
  **kwargs)
141
152
 
142
- model = neu_cuda.learn(x_train,
153
+ model = nn.learn(x_train,
143
154
  y_train,
144
155
  optimizer,
145
156
  fit_start=True,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: pyerualjetwork
3
- Version: 5.30
3
+ Version: 5.31
4
4
  Summary: PyereualJetwork is a GPU-accelerated machine learning library in Python for professionals and researchers. It features PLAN, MLP, Deep Learning training, and ENE (Eugenic NeuroEvolution) for genetic optimization, applicable to genetic algorithms or Reinforcement Learning (RL). The library includes data pre-processing, visualizations, model saving/loading, prediction, evaluation, training, and detailed or simplified memory management.
5
5
  Author: Hasan Can Beydili
6
6
  Author-email: tchasancan@gmail.com
@@ -24,17 +24,14 @@ GitHub Page: https://github.com/HCB06/PyerualJetwork
24
24
 
25
25
  YouTube Tutorials: https://www.youtube.com/watch?v=6wMQstZ00is&list=PLNgNWpM7HbsBpCx2VTJ4SK9wcPyse-EHw
26
26
 
27
- pip install pyerualjetwork
27
+ installation:
28
+ 'pip install pyerualjetwork'
28
29
 
29
- from pyerualjetwork import neu_cpu
30
- from pyerualjetwork import ene_cpu
31
- from pyerualjetwork import data_operations_cpu
32
- from pyerualjetwork import model_operations_cpu
33
-
34
- from pyerualjetwork import neu_cuda
35
- from pyerualjetwork import ene_cuda
36
- from pyerualjetwork import data_operations_cuda
37
- from pyerualjetwork import model_operations_cuda
30
+ package modules:
31
+ 'from pyerualjetwork.cpu import nn, ene, data_ops, model_ops, memory_ops'
32
+ 'from pyerualjetwork.cuda import nn, ene, data_ops, model_ops, memory_ops'
33
+
34
+ please read docstrings.
38
35
 
39
36
  PyerualJetwork has Issue Solver. This operation provides users ready-to-use functions to identify potential issues
40
37
  caused by version incompatibilities in major updates, ensuring users are not affected by such problems.
@@ -74,10 +71,10 @@ PyerualJetwork is free to use for commercial business and individual users.
74
71
  PyerualJetwork ready for both eager execution(like PyTorch) and static graph(like Tensorflow) concepts because PyerualJetwork using only functions.
75
72
  For example:
76
73
 
77
- fit function only fits given training data(suitable for dynamic graph) but learner function learns and optimize entire architecture(suitable for static graph). Or more deeper eager executions PyerualJetwork have: feed_forward function, list of activation functions, loss functions. You can create your unique model architecture. Move your data to GPU or CPU or manage how much should in GPU, Its all up to you.
74
+ plan_fit function only fits given training data(suitable for dynamic graph) but learn function learns and optimize entire architecture(suitable for static graph). Or more deeper eager executions PyerualJetwork have: cross_over function, mutation function, list of activation functions, loss functions. You can create your unique model architecture. Move your data to GPU or CPU or manage how much should in GPU, Its all up to you.
78
75
  <br><br>
79
76
 
80
- PyerualJetworket includes PLAN, MLP & ENE.<br>
77
+ PyerualJetworket includes PLAN, MLP, PTNN & ENE.<br>
81
78
 
82
79
  PLAN VISION:<br>
83
80
 
@@ -122,6 +119,6 @@ HOW DO I IMPORT IT TO MY PROJECT?
122
119
 
123
120
  Anaconda users can access the 'Anaconda Prompt' terminal from the Start menu and add the necessary library modules to the Python module search queue by typing "pip install pyerualjetwork" and pressing enter. If you are not using Anaconda, you can simply open the 'cmd' Windows command terminal from the Start menu and type "pip install PyerualJetwork". (Visual Studio Code reccomended) After installation, it's important to periodically open the terminal of the environment you are using and stay up to date by using the command "pip install PyerualJetwork --upgrade".
124
121
 
125
- After installing the module using "pip" you can now call the library module in your project environment. Use: “from pyerualjetwork import neu_cpu”. Now, you can call the necessary functions from the neu module.
122
+ After installing the module using "pip" you can now call the library module in your project environment. Use: “from pyerualjetwork.cpu import nn. Now, you can call the necessary functions from the nn module.
126
123
 
127
124
  The PLAN algorithm & ENE algorithm will not be explained in this document. This document focuses on how professionals can integrate and use PyerualJetwork in their systems. However, briefly, the PLAN algorithm can be described as a classification algorithm. PLAN algorithm achieves this task with an incredibly energy-efficient, fast, and hyperparameter-free user-friendly approach. For more detailed information, you can check out ![PYERUALJETWORK USER MANUEL](https://github.com/HCB06/PyerualJetwork/blob/main/Welcome_to_PyerualJetwork/PYERUALJETWORK_USER_MANUEL_AND_LEGAL_INFORMATION(EN).pdf) file.
@@ -1,28 +1,28 @@
1
- pyerualjetwork/__init__.py,sha256=QGem2FufN_W5bOFfUM2NtYGX_TDV0iUrGuO1YsXu1oU,2704
1
+ pyerualjetwork/__init__.py,sha256=virzpB1MRqS8c5NYr5Gy9N2IMzwq7PU2QgBPMxm0oLQ,2704
2
2
  pyerualjetwork/fitness_functions.py,sha256=D9JVCr9DFid_xXgBD4uCKxdW2k10MVDE5HZRSOK4Igg,1237
3
3
  pyerualjetwork/help.py,sha256=Nyi0gHAN9ZnO4wgQLeENt0n7tSCZ3hJmjaJ853eGjCE,831
4
4
  pyerualjetwork/issue_solver.py,sha256=3pZTGotS29sy3pIuGQoJFUePibtSzS-tNoU80T_Usgk,3131
5
5
  pyerualjetwork/memory_ops.py,sha256=TUFh9SYWCKL6N-vNdWId_EwU313TuZomQCHOrltrD-4,14280
6
6
  pyerualjetwork/ui.py,sha256=JBTFYz5R24XwNKhA3GSW-oYAoiIBxAE3kFGXkvm5gqw,656
7
7
  pyerualjetwork/cpu/__init__.py,sha256=0yAYner_-v7SmT3P7JV2itU8xJUQdQpb40dhAMQiZkc,829
8
- pyerualjetwork/cpu/activation_functions.py,sha256=BVr-iP7lOl_fm91Z3Aqr22Z9gigMZSPVwtAPWlfAcQs,6676
8
+ pyerualjetwork/cpu/activation_functions.py,sha256=zZSoOQ452Ykp_RsHVxklxesJmmFgufyIB4F3WQjudEQ,6689
9
9
  pyerualjetwork/cpu/data_ops.py,sha256=-XeMLRTQ5g7GMJdKYVMKJA7bSj6PbKEEpbQDRRhAIT4,16166
10
10
  pyerualjetwork/cpu/ene.py,sha256=ZLCaCxkpAmFLdxDS2OH-S8fT4jKq4HNVCHgpIufb8lg,44322
11
11
  pyerualjetwork/cpu/loss_functions.py,sha256=6PyBI232SQRGuFnG3LDGvnv_PUdWzT2_2mUODJiejGI,618
12
12
  pyerualjetwork/cpu/metrics.py,sha256=WhZ8iEqWehaygPRADUlhA5j_Qv3UwqV_eMxpyRVkeVs,6070
13
- pyerualjetwork/cpu/model_ops.py,sha256=ZoX2YV1KXySWI_IK-w-xH078OfGpA6ANLbqxtWDl3eI,20476
14
- pyerualjetwork/cpu/nn.py,sha256=Xx7BlUMetRVgr9V59JVu6mnmASWejHMO71X5RAP4mVQ,31390
13
+ pyerualjetwork/cpu/model_ops.py,sha256=9iZgl2yPYH6m7d9C-QdBYnkDEZiXgutxofck2papRxU,20478
14
+ pyerualjetwork/cpu/nn.py,sha256=J_Y5us-vOIhcD_h4CgaY4aOza4xi9ISu2WAfii1AfFw,32020
15
15
  pyerualjetwork/cpu/visualizations.py,sha256=rOQsc-W8b71z7ovXSoF49lx4fmpvlaHLsyj9ejWnhnI,28164
16
16
  pyerualjetwork/cuda/__init__.py,sha256=NbqvAS4jlMdoFdXa5_hi5ukXQ5zAZR_5BQ4QAqtiKug,879
17
- pyerualjetwork/cuda/activation_functions.py,sha256=M-B8Vwo1IIDEWBkrL7RRzRuORhS7hcz9YgJ_uFM-UKs,6753
17
+ pyerualjetwork/cuda/activation_functions.py,sha256=FmoSAxDr9SGO4nkE6ZflXK4pmvZ0sL3Epe1Lz-3GOVI,6766
18
18
  pyerualjetwork/cuda/data_ops.py,sha256=SiNodFNmWyTPY_KnKuAi9biPRdpTAYY3XM01bRSUPCs,18510
19
19
  pyerualjetwork/cuda/ene.py,sha256=aSCPr9VFdgK2cxxfwuP7z0jbJL9gkKNM0rgu8ihLarQ,44830
20
20
  pyerualjetwork/cuda/loss_functions.py,sha256=C93IZJcrOpT6HMK9x1O4AHJWXYTkN5WZiqdssPbvAPk,617
21
21
  pyerualjetwork/cuda/metrics.py,sha256=PjDBoRvr6va8vRvDIJJGBO4-I4uumrk3NCM1Vz4NJTo,5054
22
- pyerualjetwork/cuda/model_ops.py,sha256=R_jJAzhlWrlBnRRYjTiPa2_zYYCG23zMFIZfrZDEqNw,21202
23
- pyerualjetwork/cuda/nn.py,sha256=xkTdnuKnKg2nieV_m0rH8XwJrxSD_39xFIcpaYtx9k0,32516
22
+ pyerualjetwork/cuda/model_ops.py,sha256=lM6yT4ZMHs-0_M3Op8m8mQV_HRADm7ROHESgyTc7bCw,21204
23
+ pyerualjetwork/cuda/nn.py,sha256=7rbaIEcmssaFgcionWVRmKijlgFyftVjf-MMNaLO_28,33140
24
24
  pyerualjetwork/cuda/visualizations.py,sha256=9l5BhXqXoeopdhLvVGvjH1TKYZb9JdKOsSE2IYD02zs,28569
25
- pyerualjetwork-5.30.dist-info/METADATA,sha256=Q3E3pQhsRrDKTJU35peIjK7gia91s4uN-K0LPwHdKoM,8133
26
- pyerualjetwork-5.30.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
27
- pyerualjetwork-5.30.dist-info/top_level.txt,sha256=BRyt62U_r3ZmJpj-wXNOoA345Bzamrj6RbaWsyW4tRg,15
28
- pyerualjetwork-5.30.dist-info/RECORD,,
25
+ pyerualjetwork-5.31.dist-info/METADATA,sha256=8xJBTVON9V34hEKYXvZQdPS1HtEDvLNRiif0A1pqRos,8020
26
+ pyerualjetwork-5.31.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
27
+ pyerualjetwork-5.31.dist-info/top_level.txt,sha256=BRyt62U_r3ZmJpj-wXNOoA345Bzamrj6RbaWsyW4tRg,15
28
+ pyerualjetwork-5.31.dist-info/RECORD,,