foscat 3.0.9__py3-none-any.whl → 3.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
foscat/GCNN.py ADDED
@@ -0,0 +1,239 @@
1
+ import pickle
2
+
3
+ import numpy as np
4
+
5
+ import foscat.scat_cov as sc
6
+
7
+
8
+ class GCNN:
9
+
10
+ def __init__(
11
+ self,
12
+ scat_operator=None,
13
+ nparam=1,
14
+ nscale=1,
15
+ chanlist=[],
16
+ in_nside=1,
17
+ n_chan_out=1,
18
+ nbatch=1,
19
+ SEED=1234,
20
+ hidden=None,
21
+ filename=None,
22
+ ):
23
+
24
+ if filename is not None:
25
+
26
+ outlist = pickle.load(open("%s.pkl" % (filename), "rb"))
27
+
28
+ self.scat_operator = sc.funct(KERNELSZ=outlist[3], all_type=outlist[7])
29
+ self.KERNELSZ = self.scat_operator.KERNELSZ
30
+ self.all_type = self.scat_operator.all_type
31
+ self.npar = outlist[2]
32
+ self.nscale = outlist[5]
33
+ self.chanlist = outlist[0]
34
+ self.in_nside = outlist[4]
35
+ self.nbatch = outlist[1]
36
+ self.n_chan_out = outlist[8]
37
+ if len(outlist[9]) > 0:
38
+ self.hidden = outlist[9]
39
+ else:
40
+ self.hidden = None
41
+
42
+ self.x = self.scat_operator.backend.bk_cast(outlist[6])
43
+ else:
44
+ self.nscale = nscale
45
+ self.nbatch = nbatch
46
+ self.npar = nparam
47
+ self.n_chan_out = n_chan_out
48
+ self.scat_operator = scat_operator
49
+
50
+ if len(chanlist) != nscale + 1:
51
+ print(
52
+ "len of chanlist (here %d) should of nscale+1 (here %d)"
53
+ % (len(chanlist), nscale + 1)
54
+ )
55
+ return None
56
+
57
+ self.chanlist = chanlist
58
+ self.KERNELSZ = scat_operator.KERNELSZ
59
+ self.all_type = scat_operator.all_type
60
+ self.in_nside = in_nside
61
+ self.hidden = hidden
62
+
63
+ np.random.seed(SEED)
64
+ self.x = scat_operator.backend.bk_cast(
65
+ np.random.randn(self.get_number_of_weights())
66
+ / (self.KERNELSZ * self.KERNELSZ)
67
+ )
68
+
69
+ def save(self, filename):
70
+
71
+ if self.hidden is None:
72
+ tabh = []
73
+ else:
74
+ tabh = self.hidden
75
+
76
+ www = self.get_weights()
77
+
78
+ if not isinstance(www, np.ndarray):
79
+ www = www.numpy()
80
+
81
+ outlist = [
82
+ self.chanlist,
83
+ self.nbatch,
84
+ self.npar,
85
+ self.KERNELSZ,
86
+ self.in_nside,
87
+ self.nscale,
88
+ www,
89
+ self.all_type,
90
+ self.n_chan_out,
91
+ tabh,
92
+ ]
93
+
94
+ myout = open("%s.pkl" % (filename), "wb")
95
+ pickle.dump(outlist, myout)
96
+ myout.close()
97
+
98
+ def get_number_of_weights(self):
99
+ totnchan = 0
100
+ szk = self.KERNELSZ * self.KERNELSZ
101
+ if self.hidden is not None:
102
+ totnchan = totnchan + self.hidden[0] * self.npar
103
+ for i in range(1, len(self.hidden)):
104
+ totnchan = totnchan + self.hidden[i] * self.hidden[i - 1]
105
+ totnchan = (
106
+ totnchan
107
+ + self.hidden[len(self.hidden) - 1]
108
+ * 12
109
+ * self.in_nside**2
110
+ * self.chanlist[0]
111
+ )
112
+ else:
113
+ totnchan = self.npar * 12 * self.in_nside**2 * self.chanlist[0]
114
+
115
+ for i in range(self.nscale):
116
+ totnchan = totnchan + self.chanlist[i] * self.chanlist[i + 1] * szk
117
+
118
+ return totnchan + self.chanlist[i + 1] * self.n_chan_out * szk
119
+
120
+ def set_weights(self, x):
121
+ self.x = x
122
+
123
+ def get_weights(self):
124
+ return self.x
125
+
126
+ def eval(self, param, indices=None, weights=None, axis=0):
127
+
128
+ x = self.x
129
+
130
+ if axis == 0:
131
+ nval = 1
132
+ else:
133
+ nval = param.shape[0]
134
+
135
+ nn = 0
136
+ im = self.scat_operator.backend.bk_reshape(param, [nval, self.npar])
137
+ if self.hidden is not None:
138
+ ww = self.scat_operator.backend.bk_reshape(
139
+ x[nn : nn + self.npar * self.hidden[0]], [self.npar, self.hidden[0]]
140
+ )
141
+ im = self.scat_operator.backend.bk_matmul(im, ww)
142
+ im = self.scat_operator.backend.bk_relu(im)
143
+ nn += self.npar * self.hidden[0]
144
+
145
+ for i in range(1, len(self.hidden)):
146
+ ww = self.scat_operator.backend.bk_reshape(
147
+ x[nn : nn + self.hidden[i] * self.hidden[i - 1]],
148
+ [self.hidden[i - 1], self.hidden[i]],
149
+ )
150
+ im = self.scat_operator.backend.bk_matmul(im, ww)
151
+ im = self.scat_operator.backend.bk_relu(im)
152
+ nn += self.hidden[i] * self.hidden[i - 1]
153
+
154
+ ww = self.scat_operator.backend.bk_reshape(
155
+ x[
156
+ nn : nn
157
+ + self.hidden[len(self.hidden) - 1]
158
+ * 12
159
+ * self.in_nside**2
160
+ * self.chanlist[0]
161
+ ],
162
+ [
163
+ self.hidden[len(self.hidden) - 1],
164
+ 12 * self.in_nside**2 * self.chanlist[0],
165
+ ],
166
+ )
167
+ im = self.scat_operator.backend.bk_matmul(im, ww)
168
+ im = self.scat_operator.backend.bk_reshape(
169
+ im, [nval, 12 * self.in_nside**2, self.chanlist[0]]
170
+ )
171
+ im = self.scat_operator.backend.bk_relu(im)
172
+ nn += (
173
+ self.hidden[len(self.hidden) - 1]
174
+ * 12
175
+ * self.in_nside**2
176
+ * self.chanlist[0]
177
+ )
178
+
179
+ else:
180
+ ww = self.scat_operator.backend.bk_reshape(
181
+ x[0 : self.npar * 12 * self.in_nside**2 * self.chanlist[0]],
182
+ [self.npar, 12 * self.in_nside**2 * self.chanlist[0]],
183
+ )
184
+ im = self.scat_operator.backend.bk_matmul(im, ww)
185
+ im = self.scat_operator.backend.bk_reshape(
186
+ im, [nval, 12 * self.in_nside**2, self.chanlist[0]]
187
+ )
188
+ im = self.scat_operator.backend.bk_relu(im)
189
+
190
+ nn = self.npar * 12 * self.chanlist[0] * self.in_nside**2
191
+
192
+ for k in range(self.nscale):
193
+ ww = self.scat_operator.backend.bk_reshape(
194
+ x[
195
+ nn : nn
196
+ + self.KERNELSZ
197
+ * self.KERNELSZ
198
+ * self.chanlist[k]
199
+ * self.chanlist[k + 1]
200
+ ],
201
+ [self.KERNELSZ * self.KERNELSZ, self.chanlist[k], self.chanlist[k + 1]],
202
+ )
203
+ nn = (
204
+ nn
205
+ + self.KERNELSZ
206
+ * self.KERNELSZ
207
+ * self.chanlist[k]
208
+ * self.chanlist[k + 1]
209
+ )
210
+ if indices is None:
211
+ im = self.scat_operator.healpix_layer_transpose(im, ww, axis=1)
212
+ else:
213
+ im = self.scat_operator.healpix_layer_transpose(
214
+ im, ww, indices=indices[k], weights=weights[k], axis=1
215
+ )
216
+ im = self.scat_operator.backend.bk_relu(im)
217
+
218
+ ww = self.scat_operator.backend.bk_reshape(
219
+ x[nn:],
220
+ [
221
+ self.KERNELSZ * self.KERNELSZ,
222
+ self.chanlist[self.nscale],
223
+ self.n_chan_out,
224
+ ],
225
+ )
226
+ if indices is None:
227
+ im = self.scat_operator.healpix_layer(im, ww, axis=1)
228
+ else:
229
+ im = self.scat_operator.healpix_layer(
230
+ im,
231
+ ww,
232
+ indices=indices[self.nscale],
233
+ weights=weights[self.nscale],
234
+ axis=1,
235
+ )
236
+
237
+ if axis == 0:
238
+ im = self.scat_operator.backend.bk_reshape(im, [im.shape[1], im.shape[2]])
239
+ return im
foscat/Softmax.py CHANGED
@@ -1,50 +1,59 @@
1
- import tensorflow as tf
2
- from tensorflow.keras.models import Sequential
1
+ # import tensorflow as tf
3
2
  from tensorflow.keras.layers import Dense, Softmax
4
- import numpy as np
3
+ from tensorflow.keras.models import Sequential
4
+
5
5
 
6
6
  class SoftmaxClassifier:
7
7
  """
8
8
  A classifier based on the softmax function for multi-class classification.
9
-
9
+
10
10
  Attributes:
11
11
  model (Sequential): A TensorFlow/Keras model comprising a hidden layer and a softmax output layer.
12
-
12
+
13
13
  Parameters:
14
14
  Nval (int): Number of features in the input dataset.
15
15
  Nclass (int): Number of classes to classify the input data into.
16
16
  Nhidden (int, optional): Number of neurons in the hidden layer. Defaults to 10.
17
17
  """
18
18
 
19
- def __init__(self, Nval, Nclass, Nhidden=10):
19
+ def __init__(self, Nval, Nclass, Nhidden=10, Nlevel=1):
20
20
  """
21
21
  Initializes the SoftmaxClassifier with a specified number of input features, classes, and hidden neurons.
22
-
22
+
23
23
  The model consists of a dense hidden layer with ReLU activation and a dense output layer with softmax activation.
24
-
24
+
25
25
  Args:
26
26
  Nval (int): Number of features in the input dataset.
27
27
  Nclass (int): Number of classes for the output classification.
28
28
  Nhidden (int): Number of neurons in the hidden layer.
29
29
  """
30
30
  # Create the model
31
- self.model = Sequential([
32
- Dense(units=Nhidden, activation='relu', input_shape=(Nval,)), # A hidden layer with Nhidden neurons
33
- Dense(units=Nclass), # The output layer with Nclass neurons (for Nclass classes)
34
- Softmax() # Softmax activation for classification
35
- ])
31
+ TheModel = [Dense(units=Nhidden, activation="relu", input_shape=(Nval,))]
32
+
33
+ for k in range(1, Nlevel):
34
+ TheModel = TheModel + [
35
+ Dense(units=Nhidden, activation="relu", input_shape=(Nhidden,))
36
+ ]
37
+
38
+ TheModel = TheModel + [
39
+ Dense(
40
+ units=Nclass
41
+ ), # The output layer with Nclass neurons (for Nclass classes)
42
+ Softmax(), # Softmax activation for classification
43
+ ]
44
+ self.model = Sequential(TheModel)
36
45
 
37
46
  # Model compilation
38
47
  self.model.compile(
39
- optimizer='adam', # Adam optimizer
40
- loss='sparse_categorical_crossentropy', # Loss function for Nclass-class classification
41
- metrics=['accuracy'] # Evaluation metric: accuracy
48
+ optimizer="adam", # Adam optimizer
49
+ loss="sparse_categorical_crossentropy", # Loss function for Nclass-class classification
50
+ metrics=["accuracy"], # Evaluation metric: accuracy
42
51
  )
43
-
52
+
44
53
  def fit(self, x_train, y_train, epochs=10):
45
54
  """
46
55
  Trains the model on the provided dataset.
47
-
56
+
48
57
  Args:
49
58
  x_train (np.ndarray): Training data features, shape (num_samples, Nval).
50
59
  y_train (np.ndarray): Training data labels, shape (num_samples, ).
@@ -56,10 +65,10 @@ class SoftmaxClassifier:
56
65
  def predict(self, x_train):
57
66
  """
58
67
  Predicts the class labels for the given input data.
59
-
68
+
60
69
  Args:
61
70
  x_train (np.ndarray): Input data for which to predict class labels, shape (num_samples, Nval).
62
-
71
+
63
72
  Returns:
64
73
  np.ndarray: Predicted class labels for the input data.
65
74
  """
foscat/Spline1D.py CHANGED
@@ -1,42 +1,92 @@
1
+ import numpy as np
1
2
 
2
3
  class Spline1D:
3
4
  def __init__(self, nodes, degree=3):
5
+ """
6
+ Initializes the Spline1D instance.
7
+
8
+ Parameters:
9
+ - nodes (int): The number of nodes in the spline.
10
+ - degree (int): The degree of the spline. Default is 3.
11
+ """
4
12
  self.degree = degree
5
13
  self.nodes = nodes
6
- self.norm = [0] * (self.degree + 1)
7
- for i in range(self.degree + 1):
8
- self.norm[i] = pow(-1, i) * (self.degree + 1) / (self._fact_spline(self.degree + 1 - i) * self._fact_spline(i))
9
-
10
- def _fact_spline(self, x):
11
- if x <= 1:
12
- return 1
13
- return x * self._fact_spline(x - 1)
14
-
15
- def yplus_spline1d(self, x):
16
- if x < 0.0:
17
- return 0.0
18
- if self.degree == 0:
19
- if x == 0.0:
20
- return 0.5
21
- else:
22
- return 1.0
23
- return pow(x, self.degree)
24
-
25
- def calculate(self, x):
26
- y = [0] * self.nodes
27
- for i in range(self.nodes):
28
- tmp = 0
29
- tx = (self.nodes - 1) * x - i
30
- if x < 0:
31
- tx = -i
32
- if x > 1.0:
33
- tx = (self.nodes - 1) - i
34
- for j in range(self.degree + 1):
35
- tmp += self.norm[j] * self.yplus_spline1d(tx - j + (self.degree + 1) / 2)
36
- if tmp < 0:
37
- tmp = 0.0
38
- y[i] += tmp
39
- total = sum(y)
40
- y = [yi / total for yi in y]
41
- return y
14
+
15
+
16
+ def cubic_spline_function(self,x):
17
+ """
18
+ Evaluate the cubic spline basis function.
19
+
20
+ Args:
21
+ x (float or array): Input value(s) to evaluate the spline basis function.
22
+
23
+ Returns:
24
+ float or array: Result of the cubic spline basis function.
25
+ """
26
+ return -2 * x**3 + 3 * x**2
27
+
28
+
29
+ def eval(self,x):
30
+ """
31
+ Compute a 3rd-degree cubic spline with 4-point support.
32
+
33
+ Args:
34
+ x (float or array): Input value(s) to compute the spline.
35
+
36
+ Returns:
37
+ indices (array): Indices of the spline support points.
38
+ coefficients (array): Normalized spline coefficients.
39
+ """
40
+ N=self.nodes
41
+
42
+ if isinstance(x, float):
43
+ # Single scalar input
44
+ base_idx = int(x * (N-1))
45
+ indices = np.zeros([4], dtype="int")
46
+ coefficients = np.zeros([4])
47
+ else:
48
+ # Array input
49
+ base_idx = (x * (N-1)).astype("int")
50
+ indices = np.zeros([4, x.shape[0]], dtype="int")
51
+ coefficients = np.zeros([4, x.shape[0]])
52
+
53
+ # Compute the fractional part of the input
54
+ fractional_part = x * (N-1) - base_idx
55
+
56
+ # Compute spline coefficients for 4 support points
57
+ coefficients[3] = self.cubic_spline_function(fractional_part / 2) / 2
58
+ coefficients[2] = self.cubic_spline_function(0.5 + fractional_part / 2) / 2
59
+ coefficients[1] = self.cubic_spline_function(1 - fractional_part / 2) / 2
60
+ coefficients[0] = self.cubic_spline_function(0.5 - fractional_part / 2) / 2
61
+
62
+ # Assign indices for the support points
63
+ indices[3] = base_idx + 3
64
+ indices[2] = base_idx + 2
65
+ indices[1] = base_idx + 1
66
+ indices[0] = base_idx
67
+
68
+ # Handle boundary conditions
69
+ if isinstance(x, float):
70
+ if indices[0] == 0:
71
+ indices[0] = 1
72
+ if indices[1] == 0:
73
+ indices[1] = 1
74
+ if indices[2] == N + 1:
75
+ indices[2] = N
76
+ if indices[3] == N + 1:
77
+ indices[3] = N
78
+ if indices[3] == N + 2:
79
+ indices[3] = N
80
+ else:
81
+ indices[0, indices[0] == 0] = 1
82
+ indices[1, indices[1] == 0] = 1
83
+ indices[2, indices[2] >= N + 1] = N
84
+ indices[3, indices[3] >= N + 1] = N
85
+
86
+ # Adjust indices to start from 0
87
+ indices = indices - 1
88
+ # Square coefficients and normalize
89
+ coefficients = coefficients * coefficients
90
+ coefficients /= np.sum(coefficients, axis=0)
42
91
 
92
+ return indices, coefficients