rb-deeplearning-lib 0.0.1__py3-none-any.whl → 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,3 +1,6 @@
1
+ import optimizer
2
+ import sequence
3
+
1
4
  class Layer:
2
5
  def __init__(self, input,out,activ="_",rangeW=(-1,1),rangeB=(-1,1)):
3
6
  self.weights = Values((rangeW[0]-rangeW[1])*np.random.rand(input,out)+rangeW[1])
@@ -17,13 +20,7 @@ class Layer:
17
20
 
18
21
  def params(self):
19
22
  return self.weights, self.bias
20
-
21
- def updateParams(self, l_rate):
22
- self.weights.vals = self.weights.vals - l_rate * self.weights.grad
23
- self.bias.vals = self.bias.vals - l_rate * self.bias.grad
24
- self.weights.grad = self.weights.grad * 0
25
- self.bias.grad = self.bias.grad * 0
26
-
23
+
27
24
  class Dense:
28
25
  def __init__(self, layNum, inL, midL, outL, activ="_",f_activ="_",rangeW=(-0.1,0.1),rangeB=(-0.1,0.1)):
29
26
  if layNum < 1:
@@ -46,8 +43,6 @@ class Dense:
46
43
 
47
44
  def params(self):
48
45
  return self.seq.params()
49
- def updateParams(self, l_rate):
50
- self.seq.updateParams(l_rate)
51
46
 
52
47
  class Dropout:
53
48
  def __init__(self, size, chance):
@@ -81,8 +76,15 @@ def mse_loss(y_true, y_pred):
81
76
 
82
77
 
83
78
  class Model:
84
- def __init__(self, blocks, regu = "", train = True, loss_fn=None, pen_fn = None):
79
+ def __init__(self, blocks, regu = "", train = True, loss_fn=None, pen_fn = None, optimizer=None):
85
80
  self.blocks = Sequence(blocks)
81
+
82
+ # Handle optimizer instantiation
83
+ if optimizer is None:
84
+ self.optimizer = Optimizer()
85
+ else:
86
+ self.optimizer = optimizer
87
+
86
88
  self.regu = regu
87
89
  self.inTrain = train
88
90
  self.train_loss = []
@@ -103,7 +105,7 @@ class Model:
103
105
  x_ = x if isinstance(x, Values) else Values(x)
104
106
  return self.blocks(x_)
105
107
 
106
- def train(self, epochs, x_t, y_t, x_v, y_v, val_run=1, l_rate=0.01, _lambda=0.1, batch_size = None):
108
+ def train(self, epochs, x_t, y_t, x_v, y_v, l_rate=0.01, val_run=1, _lambda=0.1, batch_size = None):
107
109
  x_trn = x_t if isinstance(x_t, Values) else Values(x_t)
108
110
  y_trn = y_t if isinstance(y_t, Values) else Values(y_t)
109
111
  x_vl = x_v if isinstance(x_v, Values) else Values(x_v)
@@ -156,7 +158,10 @@ class Model:
156
158
  penalized_loss = self.pen_fn(current_loss,self,_lambda)
157
159
  penalized_loss.grad = np.ones_like(penalized_loss.vals)
158
160
  penalized_loss.backward()
159
- self.blocks.updateParams(l_rate)
161
+ # Use the optimizer to step and clear gradients
162
+ # Retrieve parameters within the training loop before each optimization step
163
+ all_params = self.blocks.params()
164
+ self.optimizer.step(all_params, l_rate)
160
165
  print("\r", end="")
161
166
 
162
167
  for l in self.blocks.arr:
@@ -0,0 +1,100 @@
1
+ import numpy as np
2
+ import autogradient
3
+
4
+ class Optimizer:
5
+ def __init__(self):
6
+ pass
7
+
8
+ def step(self, params, learning_rate):
9
+ for p in params:
10
+ p.vals = p.vals - learning_rate * p.grad
11
+ p.grad = np.zeros_like(p.grad)
12
+
13
+ class Optim_SGD(Optimizer):
14
+ def __init__(self, finitters, fin_l_rate):
15
+ self.t = 0
16
+ self.finitter = finitters
17
+ self.fin_l_rate = fin_l_rate
18
+
19
+ def step(self, params, learning_rate):
20
+ self.t += 1
21
+ t = self.t
22
+ alpha = t/self.finitters
23
+ if(alpha < 1):
24
+ l_rate = learning_rate*(1-alpha) + alpha*self.fin_l_rate
25
+ else:
26
+ l_rate = self.fin_l_rate
27
+ for p in params:
28
+ p.vals = p.vals - l_rate * p.grad
29
+ p.grad = np.zeros_like(p.grad)
30
+
31
+ class Optim_SGD_Momentum(Optimizer):
32
+ def __init__(self,mom_beta=0.9):
33
+ self.v = {}
34
+ self.beta = mom_beta
35
+
36
+ def step(self, params, learning_rate):
37
+ v = self.v
38
+ for p in params:
39
+ if p not in self.v:
40
+ v[p] = np.zeros_like(p.vals)
41
+ v[p] = self.beta*v[p] - learning_rate*p.grad
42
+ p.vals = p.vals + self.v[p]
43
+ p.grad = np.zeros_like(p.grad)
44
+ self.v = v
45
+
46
+ class Optim_AdaGrad(Optimizer):
47
+ def __init__(self, gamma=0.0000001):
48
+ self.gamma = gamma
49
+ self.r = {}
50
+
51
+ def step(self, params, l_rate):
52
+ for p in params:
53
+ if p not in self.r:
54
+ self.r[p] = np.zeros_like(p.vals)
55
+ self.r[p] = self.r[p] + p.grad**2
56
+ p.vals = p.vals - l_rate * p.grad / (self.gamma + self.r[p]**0.5)
57
+ p.grad = np.zeros_like(p.grad)
58
+
59
+ class Optim_RMSPropclass(Optimizer):
60
+ def __init__(self,decay_rate, gamma=0.000001):
61
+ self.decay_rate = decay_rate
62
+ self.gamma = gamma
63
+ self.r = {}
64
+ def step(self, params, l_rate):
65
+ dr = self.decay_rate
66
+ for p in params:
67
+ if p not in self.r:
68
+ self.r[p] = np.zeros_like(p.vals)
69
+ self.r[p] = dr*self.r[p] + (1-dr)*p.grad**2
70
+ p.vals = p.vals - l_rate*p.grad/(self.gamma + self.r[p]**0.5)
71
+ p.grad = np.zeros_like(p.grad)
72
+
73
+ class Optim_Adam(Optimizer):
74
+ def __init__(self, beta1, beta2, gamma = 0.000001):
75
+ self.b1 = beta1
76
+ self.b2 = beta2
77
+ self.gamma = gamma
78
+ self.r = {}
79
+ self.s = {}
80
+ self.t = 0
81
+
82
+ def step(self, params, l_rate):
83
+ self.t += 1
84
+ t = self.t
85
+ beta1 = self.b1
86
+ beta2 = self.b2
87
+ for p in params:
88
+ if p not in self.r:
89
+ self.r[p] = np.zeros_like(p.vals)
90
+ if p not in self.s:
91
+ self.s[p] = np.zeros_like(p.vals)
92
+
93
+ self.s[p] = beta1*self.s[p] + (1-beta1)*p.grad
94
+ self.r[p] = beta2*self.r[p] + (1-beta2)*p.grad**2
95
+
96
+ s_hat = self.s[p]/(1-beta1**t)
97
+ r_hat = self.r[p]/(1-beta2**t)
98
+
99
+ p.vals = p.vals - l_rate*s_hat/(self.gamma + r_hat**0.5)
100
+ p.grad = np.zeros_like(p.grad)
@@ -1,9 +1,9 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: rb-deeplearning-lib
3
- Version: 0.0.1
3
+ Version: 0.1.0
4
4
  Summary: This is a machine learning--more specifically deep learning--library from my independent study on deep learning. This library is both a result of my learning and a tool for AI development.
5
5
  License-Expression: MIT
6
- Project-URL: Homepage, https://github.com/rylan-berry/DeepLearningIndependentStudy/deeplearning_package
6
+ Project-URL: Homepage, https://github.com/rylan-berry/DeepLearningIndependentStudy/tree/main/deeplearning_package
7
7
  Requires-Python: >=3.8
8
8
  Description-Content-Type: text/markdown
9
9
  License-File: LICENSE
@@ -0,0 +1,10 @@
1
+ rb_deeplearning_lib/__init__.py,sha256=3pOhAKKTkdfXcPT1mxouH4ZOHfk419-Wnqv24oss6P4,54
2
+ rb_deeplearning_lib/autogradient.py,sha256=woXGKi1EZ1QobvRbPMtj4WQqdkOCr7Xk_KeG8WE1d6k,9388
3
+ rb_deeplearning_lib/neural_net.py,sha256=nwC4bdEYSYRtylqe7p68SFlRJKc1htBB-sMJGiFl8fk,6478
4
+ rb_deeplearning_lib/optimizer.py,sha256=OI2x7xrna4ksbaprNgIx25jogVM10OGfNYKpwetRc7o,2690
5
+ rb_deeplearning_lib/sequence.py,sha256=As8FiHVrcN2w0xXV8Vt_eHhNh1vDWDM3oIfrZMUkZ5g,417
6
+ rb_deeplearning_lib-0.1.0.dist-info/licenses/LICENSE,sha256=RGN7cN89q7JPnZj-z1KSZT9M3zcJPwjzb-gEOt6SCCA,1069
7
+ rb_deeplearning_lib-0.1.0.dist-info/METADATA,sha256=rFIR8_pNwAgckfor06KZlnkJdwAPPMohhpB4dmCp4S4,2274
8
+ rb_deeplearning_lib-0.1.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
9
+ rb_deeplearning_lib-0.1.0.dist-info/top_level.txt,sha256=oK1ClPmIXDzAbwPHl69BE88PFjpsYta_a5vNEa5WJoA,20
10
+ rb_deeplearning_lib-0.1.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.9.0)
2
+ Generator: setuptools (80.10.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,9 +0,0 @@
1
- rb_deeplearning_lib/__init__.py,sha256=3pOhAKKTkdfXcPT1mxouH4ZOHfk419-Wnqv24oss6P4,54
2
- rb_deeplearning_lib/autogradient.py,sha256=woXGKi1EZ1QobvRbPMtj4WQqdkOCr7Xk_KeG8WE1d6k,9388
3
- rb_deeplearning_lib/neural_net.py,sha256=tMGHeL1VVVTe3D4MzFMxQaz9pPdMA3-gbQbMqkPQzKs,6411
4
- rb_deeplearning_lib/sequence.py,sha256=As8FiHVrcN2w0xXV8Vt_eHhNh1vDWDM3oIfrZMUkZ5g,417
5
- rb_deeplearning_lib-0.0.1.dist-info/licenses/LICENSE,sha256=RGN7cN89q7JPnZj-z1KSZT9M3zcJPwjzb-gEOt6SCCA,1069
6
- rb_deeplearning_lib-0.0.1.dist-info/METADATA,sha256=LDVDxShZ5P5C_rHucP-lsx196KwVLZa9tX2E7k_7nLo,2264
7
- rb_deeplearning_lib-0.0.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
8
- rb_deeplearning_lib-0.0.1.dist-info/top_level.txt,sha256=oK1ClPmIXDzAbwPHl69BE88PFjpsYta_a5vNEa5WJoA,20
9
- rb_deeplearning_lib-0.0.1.dist-info/RECORD,,