foscat 3.1.5__py3-none-any.whl → 3.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
foscat/GCNN.py CHANGED
@@ -1,172 +1,239 @@
1
- import numpy as np
2
1
  import pickle
2
+
3
+ import numpy as np
4
+
3
5
  import foscat.scat_cov as sc
4
-
6
+
5
7
 
6
8
  class GCNN:
7
-
8
- def __init__(self,
9
- scat_operator=None,
10
- nparam=1,
11
- nscale=1,
12
- chanlist=[],
13
- in_nside=1,
14
- n_chan_out=1,
15
- nbatch=1,
16
- SEED=1234,
17
- hidden=None,
18
- filename=None):
9
+
10
+ def __init__(
11
+ self,
12
+ scat_operator=None,
13
+ nparam=1,
14
+ nscale=1,
15
+ chanlist=[],
16
+ in_nside=1,
17
+ n_chan_out=1,
18
+ nbatch=1,
19
+ SEED=1234,
20
+ hidden=None,
21
+ filename=None,
22
+ ):
19
23
 
20
24
  if filename is not None:
21
25
 
22
- outlist=pickle.load(open("%s.pkl"%(filename),"rb"))
23
-
24
- self.scat_operator=sc.funct(KERNELSZ=outlist[3],all_type=outlist[7])
25
- self.KERNELSZ= self.scat_operator.KERNELSZ
26
- self.all_type= self.scat_operator.all_type
27
- self.npar=outlist[2]
28
- self.nscale=outlist[5]
29
- self.chanlist=outlist[0]
30
- self.in_nside=outlist[4]
31
- self.nbatch=outlist[1]
32
- self.n_chan_out=outlist[8]
33
- if len(outlist[9])>0:
34
- self.hidden=outlist[9]
26
+ outlist = pickle.load(open("%s.pkl" % (filename), "rb"))
27
+
28
+ self.scat_operator = sc.funct(KERNELSZ=outlist[3], all_type=outlist[7])
29
+ self.KERNELSZ = self.scat_operator.KERNELSZ
30
+ self.all_type = self.scat_operator.all_type
31
+ self.npar = outlist[2]
32
+ self.nscale = outlist[5]
33
+ self.chanlist = outlist[0]
34
+ self.in_nside = outlist[4]
35
+ self.nbatch = outlist[1]
36
+ self.n_chan_out = outlist[8]
37
+ if len(outlist[9]) > 0:
38
+ self.hidden = outlist[9]
35
39
  else:
36
- self.hidden=None
37
-
38
- self.x=self.scat_operator.backend.bk_cast(outlist[6])
40
+ self.hidden = None
41
+
42
+ self.x = self.scat_operator.backend.bk_cast(outlist[6])
39
43
  else:
40
- self.nscale=nscale
41
- self.nbatch=nbatch
42
- self.npar=nparam
43
- self.n_chan_out=n_chan_out
44
- self.scat_operator=scat_operator
45
-
46
- if len(chanlist)!=nscale+1:
47
- print('len of chanlist (here %d) should of nscale+1 (here %d)'%(len(chanlist),nscale+1))
44
+ self.nscale = nscale
45
+ self.nbatch = nbatch
46
+ self.npar = nparam
47
+ self.n_chan_out = n_chan_out
48
+ self.scat_operator = scat_operator
49
+
50
+ if len(chanlist) != nscale + 1:
51
+ print(
52
+ "len of chanlist (here %d) should of nscale+1 (here %d)"
53
+ % (len(chanlist), nscale + 1)
54
+ )
48
55
  return None
49
-
50
- self.chanlist=chanlist
51
- self.KERNELSZ= scat_operator.KERNELSZ
52
- self.all_type= scat_operator.all_type
53
- self.in_nside=in_nside
54
- self.hidden=hidden
56
+
57
+ self.chanlist = chanlist
58
+ self.KERNELSZ = scat_operator.KERNELSZ
59
+ self.all_type = scat_operator.all_type
60
+ self.in_nside = in_nside
61
+ self.hidden = hidden
55
62
 
56
63
  np.random.seed(SEED)
57
- self.x=scat_operator.backend.bk_cast(np.random.randn(self.get_number_of_weights())/(self.KERNELSZ*self.KERNELSZ))
64
+ self.x = scat_operator.backend.bk_cast(
65
+ np.random.randn(self.get_number_of_weights())
66
+ / (self.KERNELSZ * self.KERNELSZ)
67
+ )
58
68
 
59
- def save(self,filename):
69
+ def save(self, filename):
60
70
 
61
71
  if self.hidden is None:
62
- tabh=[]
72
+ tabh = []
63
73
  else:
64
- tabh=self.hidden
65
-
66
- www= self.get_weights()
67
-
68
- if not isinstance(www,np.ndarray):
69
- www=www.numpy()
70
-
71
- outlist=[self.chanlist, \
72
- self.nbatch, \
73
- self.npar, \
74
- self.KERNELSZ, \
75
- self.in_nside, \
76
- self.nscale, \
77
- www, \
78
- self.all_type, \
79
- self.n_chan_out, \
80
- tabh]
81
-
82
- myout=open("%s.pkl"%(filename),"wb")
83
- pickle.dump(outlist,myout)
74
+ tabh = self.hidden
75
+
76
+ www = self.get_weights()
77
+
78
+ if not isinstance(www, np.ndarray):
79
+ www = www.numpy()
80
+
81
+ outlist = [
82
+ self.chanlist,
83
+ self.nbatch,
84
+ self.npar,
85
+ self.KERNELSZ,
86
+ self.in_nside,
87
+ self.nscale,
88
+ www,
89
+ self.all_type,
90
+ self.n_chan_out,
91
+ tabh,
92
+ ]
93
+
94
+ myout = open("%s.pkl" % (filename), "wb")
95
+ pickle.dump(outlist, myout)
84
96
  myout.close()
85
-
97
+
86
98
  def get_number_of_weights(self):
87
- totnchan=0
88
- szk=self.KERNELSZ*self.KERNELSZ
99
+ totnchan = 0
100
+ szk = self.KERNELSZ * self.KERNELSZ
89
101
  if self.hidden is not None:
90
- totnchan=totnchan+self.hidden[0]*self.npar
91
- for i in range(1,len(self.hidden)):
92
- totnchan=totnchan+self.hidden[i]*self.hidden[i-1]
93
- totnchan=totnchan+self.hidden[len(self.hidden)-1]*12*self.in_nside**2*self.chanlist[0]
102
+ totnchan = totnchan + self.hidden[0] * self.npar
103
+ for i in range(1, len(self.hidden)):
104
+ totnchan = totnchan + self.hidden[i] * self.hidden[i - 1]
105
+ totnchan = (
106
+ totnchan
107
+ + self.hidden[len(self.hidden) - 1]
108
+ * 12
109
+ * self.in_nside**2
110
+ * self.chanlist[0]
111
+ )
94
112
  else:
95
- totnchan=self.npar*12*self.in_nside**2*self.chanlist[0]
96
-
113
+ totnchan = self.npar * 12 * self.in_nside**2 * self.chanlist[0]
114
+
97
115
  for i in range(self.nscale):
98
- totnchan=totnchan+self.chanlist[i]*self.chanlist[i+1]*szk
99
-
100
- return totnchan+self.chanlist[i+1]*self.n_chan_out*szk
116
+ totnchan = totnchan + self.chanlist[i] * self.chanlist[i + 1] * szk
117
+
118
+ return totnchan + self.chanlist[i + 1] * self.n_chan_out * szk
119
+
120
+ def set_weights(self, x):
121
+ self.x = x
101
122
 
102
- def set_weights(self,x):
103
- self.x=x
104
-
105
123
  def get_weights(self):
106
124
  return self.x
107
-
108
- def eval(self,param,indices=None,weights=None,axis=0):
109
125
 
110
- x=self.x
111
-
126
+ def eval(self, param, indices=None, weights=None, axis=0):
127
+
128
+ x = self.x
112
129
 
113
- if axis==0:
114
- nval=1
130
+ if axis == 0:
131
+ nval = 1
115
132
  else:
116
- nval=param.shape[0]
117
-
118
- nn=0
119
- im=self.scat_operator.backend.bk_reshape(param,[nval,self.npar])
133
+ nval = param.shape[0]
134
+
135
+ nn = 0
136
+ im = self.scat_operator.backend.bk_reshape(param, [nval, self.npar])
120
137
  if self.hidden is not None:
121
- ww=self.scat_operator.backend.bk_reshape(x[nn:nn+self.npar*self.hidden[0]], \
122
- [self.npar,self.hidden[0]])
123
- im=self.scat_operator.backend.bk_matmul(im,ww)
124
- im=self.scat_operator.backend.bk_relu(im)
125
- nn+=self.npar*self.hidden[0]
126
-
127
- for i in range(1,len(self.hidden)):
128
- ww=self.scat_operator.backend.bk_reshape(x[nn:nn+self.hidden[i]*self.hidden[i-1]], \
129
- [self.hidden[i-1],self.hidden[i]])
130
- im=self.scat_operator.backend.bk_matmul(im,ww)
131
- im=self.scat_operator.backend.bk_relu(im)
132
- nn+=self.hidden[i]*self.hidden[i-1]
133
-
134
- ww=self.scat_operator.backend.bk_reshape(x[nn:nn+self.hidden[len(self.hidden)-1]*12*self.in_nside**2*self.chanlist[0]], \
135
- [self.hidden[len(self.hidden)-1],
136
- 12*self.in_nside**2*self.chanlist[0]])
137
- im=self.scat_operator.backend.bk_matmul(im,ww)
138
- im=self.scat_operator.backend.bk_reshape(im,[nval,12*self.in_nside**2,self.chanlist[0]])
139
- im=self.scat_operator.backend.bk_relu(im)
140
- nn+=self.hidden[len(self.hidden)-1]*12*self.in_nside**2*self.chanlist[0]
141
-
138
+ ww = self.scat_operator.backend.bk_reshape(
139
+ x[nn : nn + self.npar * self.hidden[0]], [self.npar, self.hidden[0]]
140
+ )
141
+ im = self.scat_operator.backend.bk_matmul(im, ww)
142
+ im = self.scat_operator.backend.bk_relu(im)
143
+ nn += self.npar * self.hidden[0]
144
+
145
+ for i in range(1, len(self.hidden)):
146
+ ww = self.scat_operator.backend.bk_reshape(
147
+ x[nn : nn + self.hidden[i] * self.hidden[i - 1]],
148
+ [self.hidden[i - 1], self.hidden[i]],
149
+ )
150
+ im = self.scat_operator.backend.bk_matmul(im, ww)
151
+ im = self.scat_operator.backend.bk_relu(im)
152
+ nn += self.hidden[i] * self.hidden[i - 1]
153
+
154
+ ww = self.scat_operator.backend.bk_reshape(
155
+ x[
156
+ nn : nn
157
+ + self.hidden[len(self.hidden) - 1]
158
+ * 12
159
+ * self.in_nside**2
160
+ * self.chanlist[0]
161
+ ],
162
+ [
163
+ self.hidden[len(self.hidden) - 1],
164
+ 12 * self.in_nside**2 * self.chanlist[0],
165
+ ],
166
+ )
167
+ im = self.scat_operator.backend.bk_matmul(im, ww)
168
+ im = self.scat_operator.backend.bk_reshape(
169
+ im, [nval, 12 * self.in_nside**2, self.chanlist[0]]
170
+ )
171
+ im = self.scat_operator.backend.bk_relu(im)
172
+ nn += (
173
+ self.hidden[len(self.hidden) - 1]
174
+ * 12
175
+ * self.in_nside**2
176
+ * self.chanlist[0]
177
+ )
178
+
142
179
  else:
143
- ww=self.scat_operator.backend.bk_reshape(x[0:self.npar*12*self.in_nside**2*self.chanlist[0]], \
144
- [self.npar,12*self.in_nside**2*self.chanlist[0]])
145
- im=self.scat_operator.backend.bk_matmul(im,ww)
146
- im=self.scat_operator.backend.bk_reshape(im,[nval,12*self.in_nside**2,self.chanlist[0]])
147
- im=self.scat_operator.backend.bk_relu(im)
180
+ ww = self.scat_operator.backend.bk_reshape(
181
+ x[0 : self.npar * 12 * self.in_nside**2 * self.chanlist[0]],
182
+ [self.npar, 12 * self.in_nside**2 * self.chanlist[0]],
183
+ )
184
+ im = self.scat_operator.backend.bk_matmul(im, ww)
185
+ im = self.scat_operator.backend.bk_reshape(
186
+ im, [nval, 12 * self.in_nside**2, self.chanlist[0]]
187
+ )
188
+ im = self.scat_operator.backend.bk_relu(im)
148
189
 
149
- nn=self.npar*12*self.chanlist[0]*self.in_nside**2
190
+ nn = self.npar * 12 * self.chanlist[0] * self.in_nside**2
150
191
 
151
-
152
192
  for k in range(self.nscale):
153
- ww=self.scat_operator.backend.bk_reshape(x[nn:nn+self.KERNELSZ*self.KERNELSZ*self.chanlist[k]*self.chanlist[k+1]],
154
- [self.KERNELSZ*self.KERNELSZ,self.chanlist[k],self.chanlist[k+1]])
155
- nn=nn+self.KERNELSZ*self.KERNELSZ*self.chanlist[k]*self.chanlist[k+1]
193
+ ww = self.scat_operator.backend.bk_reshape(
194
+ x[
195
+ nn : nn
196
+ + self.KERNELSZ
197
+ * self.KERNELSZ
198
+ * self.chanlist[k]
199
+ * self.chanlist[k + 1]
200
+ ],
201
+ [self.KERNELSZ * self.KERNELSZ, self.chanlist[k], self.chanlist[k + 1]],
202
+ )
203
+ nn = (
204
+ nn
205
+ + self.KERNELSZ
206
+ * self.KERNELSZ
207
+ * self.chanlist[k]
208
+ * self.chanlist[k + 1]
209
+ )
156
210
  if indices is None:
157
- im=self.scat_operator.healpix_layer_transpose(im,ww,axis=1)
211
+ im = self.scat_operator.healpix_layer_transpose(im, ww, axis=1)
158
212
  else:
159
- im=self.scat_operator.healpix_layer_transpose(im,ww,indices=indices[k],weights=weights[k],axis=1)
160
- im=self.scat_operator.backend.bk_relu(im)
213
+ im = self.scat_operator.healpix_layer_transpose(
214
+ im, ww, indices=indices[k], weights=weights[k], axis=1
215
+ )
216
+ im = self.scat_operator.backend.bk_relu(im)
161
217
 
162
- ww=self.scat_operator.backend.bk_reshape(x[nn:],[self.KERNELSZ*self.KERNELSZ,self.chanlist[self.nscale],self.n_chan_out])
218
+ ww = self.scat_operator.backend.bk_reshape(
219
+ x[nn:],
220
+ [
221
+ self.KERNELSZ * self.KERNELSZ,
222
+ self.chanlist[self.nscale],
223
+ self.n_chan_out,
224
+ ],
225
+ )
163
226
  if indices is None:
164
- im=self.scat_operator.healpix_layer(im,ww,axis=1)
227
+ im = self.scat_operator.healpix_layer(im, ww, axis=1)
165
228
  else:
166
- im=self.scat_operator.healpix_layer(im,ww,indices=indices[self.nscale],weights=weights[self.nscale],axis=1)
167
-
168
- if axis==0:
169
- im=self.scat_operator.backend.bk_reshape(im,[im.shape[1],im.shape[2]])
170
- return im
229
+ im = self.scat_operator.healpix_layer(
230
+ im,
231
+ ww,
232
+ indices=indices[self.nscale],
233
+ weights=weights[self.nscale],
234
+ axis=1,
235
+ )
171
236
 
172
-
237
+ if axis == 0:
238
+ im = self.scat_operator.backend.bk_reshape(im, [im.shape[1], im.shape[2]])
239
+ return im
foscat/Softmax.py CHANGED
@@ -1,54 +1,59 @@
1
- import tensorflow as tf
2
- from tensorflow.keras.models import Sequential
1
+ #import tensorflow as tf
3
2
  from tensorflow.keras.layers import Dense, Softmax
4
- import numpy as np
3
+ from tensorflow.keras.models import Sequential
4
+
5
5
 
6
6
  class SoftmaxClassifier:
7
7
  """
8
8
  A classifier based on the softmax function for multi-class classification.
9
-
9
+
10
10
  Attributes:
11
11
  model (Sequential): A TensorFlow/Keras model comprising a hidden layer and a softmax output layer.
12
-
12
+
13
13
  Parameters:
14
14
  Nval (int): Number of features in the input dataset.
15
15
  Nclass (int): Number of classes to classify the input data into.
16
16
  Nhidden (int, optional): Number of neurons in the hidden layer. Defaults to 10.
17
17
  """
18
18
 
19
- def __init__(self, Nval, Nclass, Nhidden=10,Nlevel=1):
19
+ def __init__(self, Nval, Nclass, Nhidden=10, Nlevel=1):
20
20
  """
21
21
  Initializes the SoftmaxClassifier with a specified number of input features, classes, and hidden neurons.
22
-
22
+
23
23
  The model consists of a dense hidden layer with ReLU activation and a dense output layer with softmax activation.
24
-
24
+
25
25
  Args:
26
26
  Nval (int): Number of features in the input dataset.
27
27
  Nclass (int): Number of classes for the output classification.
28
28
  Nhidden (int): Number of neurons in the hidden layer.
29
29
  """
30
30
  # Create the model
31
- TheModel=[Dense(units=Nhidden, activation='relu', input_shape=(Nval,))]
32
-
33
- for k in range(1,Nlevel):
34
- TheModel=TheModel+[Dense(units=Nhidden, activation='relu', input_shape=(Nhidden,))]
35
-
36
- TheModel=TheModel+[Dense(units=Nclass), # The output layer with Nclass neurons (for Nclass classes)
37
- Softmax() # Softmax activation for classification
31
+ TheModel = [Dense(units=Nhidden, activation="relu", input_shape=(Nval,))]
32
+
33
+ for k in range(1, Nlevel):
34
+ TheModel = TheModel + [
35
+ Dense(units=Nhidden, activation="relu", input_shape=(Nhidden,))
36
+ ]
37
+
38
+ TheModel = TheModel + [
39
+ Dense(
40
+ units=Nclass
41
+ ), # The output layer with Nclass neurons (for Nclass classes)
42
+ Softmax(), # Softmax activation for classification
38
43
  ]
39
44
  self.model = Sequential(TheModel)
40
45
 
41
46
  # Model compilation
42
47
  self.model.compile(
43
- optimizer='adam', # Adam optimizer
44
- loss='sparse_categorical_crossentropy', # Loss function for Nclass-class classification
45
- metrics=['accuracy'] # Evaluation metric: accuracy
48
+ optimizer="adam", # Adam optimizer
49
+ loss="sparse_categorical_crossentropy", # Loss function for Nclass-class classification
50
+ metrics=["accuracy"], # Evaluation metric: accuracy
46
51
  )
47
-
52
+
48
53
  def fit(self, x_train, y_train, epochs=10):
49
54
  """
50
55
  Trains the model on the provided dataset.
51
-
56
+
52
57
  Args:
53
58
  x_train (np.ndarray): Training data features, shape (num_samples, Nval).
54
59
  y_train (np.ndarray): Training data labels, shape (num_samples, ).
@@ -60,10 +65,10 @@ class SoftmaxClassifier:
60
65
  def predict(self, x_train):
61
66
  """
62
67
  Predicts the class labels for the given input data.
63
-
68
+
64
69
  Args:
65
70
  x_train (np.ndarray): Input data for which to predict class labels, shape (num_samples, Nval).
66
-
71
+
67
72
  Returns:
68
73
  np.ndarray: Predicted class labels for the input data.
69
74
  """
foscat/Spline1D.py CHANGED
@@ -1,11 +1,14 @@
1
-
2
1
  class Spline1D:
3
2
  def __init__(self, nodes, degree=3):
4
3
  self.degree = degree
5
4
  self.nodes = nodes
6
5
  self.norm = [0] * (self.degree + 1)
7
6
  for i in range(self.degree + 1):
8
- self.norm[i] = pow(-1, i) * (self.degree + 1) / (self._fact_spline(self.degree + 1 - i) * self._fact_spline(i))
7
+ self.norm[i] = (
8
+ pow(-1, i)
9
+ * (self.degree + 1)
10
+ / (self._fact_spline(self.degree + 1 - i) * self._fact_spline(i))
11
+ )
9
12
 
10
13
  def _fact_spline(self, x):
11
14
  if x <= 1:
@@ -32,11 +35,12 @@ class Spline1D:
32
35
  if x > 1.0:
33
36
  tx = (self.nodes - 1) - i
34
37
  for j in range(self.degree + 1):
35
- tmp += self.norm[j] * self.yplus_spline1d(tx - j + (self.degree + 1) / 2)
38
+ tmp += self.norm[j] * self.yplus_spline1d(
39
+ tx - j + (self.degree + 1) / 2
40
+ )
36
41
  if tmp < 0:
37
42
  tmp = 0.0
38
43
  y[i] += tmp
39
44
  total = sum(y)
40
45
  y = [yi / total for yi in y]
41
46
  return y
42
-