foscat 3.0.9__tar.gz → 3.0.11__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. {foscat-3.0.9 → foscat-3.0.11}/PKG-INFO +7 -1
  2. {foscat-3.0.9 → foscat-3.0.11}/setup.py +1 -1
  3. {foscat-3.0.9 → foscat-3.0.11}/src/foscat/FoCUS.py +90 -0
  4. {foscat-3.0.9 → foscat-3.0.11}/src/foscat/scat_cov.py +120 -16
  5. {foscat-3.0.9 → foscat-3.0.11}/src/foscat.egg-info/PKG-INFO +7 -1
  6. {foscat-3.0.9 → foscat-3.0.11}/README.md +0 -0
  7. {foscat-3.0.9 → foscat-3.0.11}/setup.cfg +0 -0
  8. {foscat-3.0.9 → foscat-3.0.11}/src/foscat/CircSpline.py +0 -0
  9. {foscat-3.0.9 → foscat-3.0.11}/src/foscat/GetGPUinfo.py +0 -0
  10. {foscat-3.0.9 → foscat-3.0.11}/src/foscat/Softmax.py +0 -0
  11. {foscat-3.0.9 → foscat-3.0.11}/src/foscat/Spline1D.py +0 -0
  12. {foscat-3.0.9 → foscat-3.0.11}/src/foscat/Synthesis.py +0 -0
  13. {foscat-3.0.9 → foscat-3.0.11}/src/foscat/__init__.py +0 -0
  14. {foscat-3.0.9 → foscat-3.0.11}/src/foscat/backend.py +0 -0
  15. {foscat-3.0.9 → foscat-3.0.11}/src/foscat/loss_backend_tens.py +0 -0
  16. {foscat-3.0.9 → foscat-3.0.11}/src/foscat/loss_backend_torch.py +0 -0
  17. {foscat-3.0.9 → foscat-3.0.11}/src/foscat/scat.py +0 -0
  18. {foscat-3.0.9 → foscat-3.0.11}/src/foscat/scat1D.py +0 -0
  19. {foscat-3.0.9 → foscat-3.0.11}/src/foscat/scat2D.py +0 -0
  20. {foscat-3.0.9 → foscat-3.0.11}/src/foscat/scat_cov1D.py +0 -0
  21. {foscat-3.0.9 → foscat-3.0.11}/src/foscat/scat_cov2D.py +0 -0
  22. {foscat-3.0.9 → foscat-3.0.11}/src/foscat/scat_cov_map.py +0 -0
  23. {foscat-3.0.9 → foscat-3.0.11}/src/foscat/scat_cov_map2D.py +0 -0
  24. {foscat-3.0.9 → foscat-3.0.11}/src/foscat.egg-info/SOURCES.txt +0 -0
  25. {foscat-3.0.9 → foscat-3.0.11}/src/foscat.egg-info/dependency_links.txt +0 -0
  26. {foscat-3.0.9 → foscat-3.0.11}/src/foscat.egg-info/requires.txt +0 -0
  27. {foscat-3.0.9 → foscat-3.0.11}/src/foscat.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: foscat
3
- Version: 3.0.9
3
+ Version: 3.0.11
4
4
  Summary: Generate synthetic Healpix or 2D data using Cross Scattering Transform
5
5
  Home-page: https://github.com/jmdelouis/FOSCAT
6
6
  Author: Jean-Marc DELOUIS
@@ -9,6 +9,12 @@ Maintainer: Theo Foulquier
9
9
  Maintainer-email: theo.foulquier@ifremer.fr
10
10
  License: MIT
11
11
  Keywords: Scattering transform,Component separation,denoising
12
+ Requires-Dist: imageio
13
+ Requires-Dist: imagecodecs
14
+ Requires-Dist: matplotlib
15
+ Requires-Dist: numpy
16
+ Requires-Dist: tensorflow
17
+ Requires-Dist: healpy
12
18
 
13
19
  Utilize the Cross Scattering Transform (described in https://arxiv.org/abs/2207.12527) to synthesize Healpix or 2D data that is suitable for component separation purposes, such as denoising.
14
20
  A demo package for this process can be found at https://github.com/jmdelouis/FOSCAT_DEMO.
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
3
3
 
4
4
  setup(
5
5
  name='foscat',
6
- version='3.0.9',
6
+ version='3.0.11',
7
7
  description='Generate synthetic Healpix or 2D data using Cross Scattering Transform' ,
8
8
  long_description='Utilize the Cross Scattering Transform (described in https://arxiv.org/abs/2207.12527) to synthesize Healpix or 2D data that is suitable for component separation purposes, such as denoising. \n A demo package for this process can be found at https://github.com/jmdelouis/FOSCAT_DEMO. \n Complete doc can be found at https://foscat-documentation.readthedocs.io/en/latest/index.html. \n\n List of developers : J.-M. Delouis, T. Foulquier, L. Mousset, T. Odaka, F. Paul, E. Allys ' ,
9
9
  license='MIT',
@@ -126,6 +126,8 @@ class FoCUS:
126
126
 
127
127
  self.ww_Real = {}
128
128
  self.ww_Imag = {}
129
+ self.ww_CNN_Transpose = {}
130
+ self.ww_CNN = {}
129
131
 
130
132
  wwc=np.zeros([KERNELSZ**2,l_NORIENT]).astype(all_type)
131
133
  wws=np.zeros([KERNELSZ**2,l_NORIENT]).astype(all_type)
@@ -271,6 +273,8 @@ class FoCUS:
271
273
  self.nest2R4[lout]=None
272
274
  self.inv_nest2R[lout]=None
273
275
  self.remove_border[lout]=None
276
+ self.ww_CNN_Transpose[lout]=None
277
+ self.ww_CNN[lout]=None
274
278
 
275
279
  self.loss={}
276
280
 
@@ -296,7 +300,92 @@ class FoCUS:
296
300
  res=x
297
301
  res[idx]=y[idx]
298
302
  return(res)
303
+
304
+ # ---------------------------------------------−---------
305
+ # make the CNN working : index reporjection of the kernel on healpix
306
+
307
+ def init_CNN_index(self,nside,transpose=False):
308
+ l_kernel=int(self.KERNELSZ*self.KERNELSZ)
309
+ weights=self.backend.bk_cast(np.ones([12*nside*nside*l_kernel],dtype='float'))
310
+ try:
311
+ if transpose:
312
+ indices=np.load('%s/FOSCAT_%s_W%d_%d_%d_CNN_Transpose.npy'%(self.TEMPLATE_PATH,TMPFILE_VERSION,l_kernel,self.NORIENT,nside))
313
+ else:
314
+ indices=np.load('%s/FOSCAT_%s_W%d_%d_%d_CNN.npy'%(self.TEMPLATE_PATH,TMPFILE_VERSION,l_kernel,self.NORIENT,nside))
315
+ except:
316
+ to,po=hp.pix2ang(nside,np.arange(12*nside*nside),nest=True)
317
+ x,y,z=hp.pix2vec(nside,np.arange(12*nside*nside),nest=True)
318
+
319
+ idx=np.argsort((x-1.0)**2+y**2+z**2)[0:l_kernel]
320
+ tc,pc=hp.pix2ang(nside,idx,nest=True)
321
+
322
+ indices=np.zeros([12*nside*nside,l_kernel,2],dtype='int')
323
+ for k in range(12*nside*nside):
324
+ if k%(nside*nside)==0:
325
+ print('Pre-compute nside=%6d %.2f%%'%(nside,100*k/(12*nside*nside)))
326
+
327
+ rot=[po[k]/np.pi*180.0,90+(-to[k])/np.pi*180.0]
328
+ r=hp.Rotator(rot=rot).get_inverse()
329
+ # get the coordinate
330
+ ty,tx=r(tc,pc)
331
+
332
+ indices[k,:,0]=k*l_kernel+np.arange(l_kernel).astype('int')
333
+ indices[k,:,1]=hp.ang2pix(nside,ty,tx,nest=True)
334
+ if transpose:
335
+ indices[:,:,1]=indices[:,:,1]//4
336
+ np.save('%s/FOSCAT_%s_W%d_%d_%d_CNN_Transpose.npy'%(self.TEMPLATE_PATH,TMPFILE_VERSION,l_kernel,self.NORIENT,nside),indices)
337
+ print('Write %s/FOSCAT_%s_W%d_%d_%d_CNN_Transpose.npy'%(self.TEMPLATE_PATH,TMPFILE_VERSION,l_kernel,self.NORIENT,nside))
338
+ else:
339
+ np.save('%s/FOSCAT_%s_W%d_%d_%d_CNNnpy'%(self.TEMPLATE_PATH,TMPFILE_VERSION,l_kernel,self.NORIENT,nside),indices)
340
+ print('Write %s/FOSCAT_%s_W%d_%d_%d_CNN.npy'%(self.TEMPLATE_PATH,TMPFILE_VERSION,l_kernel,self.NORIENT,nside))
341
+
342
+
343
+ if transpose:
344
+ self.ww_CNN_Transpose[nside]=self.backend.bk_SparseTensor(indices.reshape(12*nside*nside*l_kernel,2),
345
+ weights,[12*nside*nside*l_kernel,
346
+ 3*nside*nside])
347
+ else:
348
+ self.ww_CNN[nside]=self.backend.bk_SparseTensor(indices.reshape(12*nside*nside*l_kernel,2),
349
+ weights,[12*nside*nside*l_kernel,
350
+ 12*nside*nside])
351
+
352
+ # ---------------------------------------------−---------
353
+ def healpix_layer_transpose(self,im,ww):
354
+ nside=2*int(np.sqrt(im.shape[0]//12))
355
+ l_kernel=self.KERNELSZ*self.KERNELSZ
356
+
357
+ if im.shape[1]!=ww.shape[1]:
358
+ print('Weights channels should be equal to the input image channels')
359
+ return -1
360
+
361
+ if self.ww_CNN_Transpose[nside] is None:
362
+ self.init_CNN_index(nside,transpose=True)
363
+
364
+ tmp=self.backend.bk_sparse_dense_matmul(self.ww_CNN_Transpose[nside],im)
365
+
366
+ density=self.backend.bk_reshape(tmp,[12*nside*nside,l_kernel*im.shape[1]])
367
+
368
+ return self.backend.bk_matmul(density,self.backend.bk_reshape(ww,[l_kernel*im.shape[1],ww.shape[2]]))
369
+
370
+ # ---------------------------------------------−---------
371
+ # ---------------------------------------------−---------
372
+ def healpix_layer(self,im,ww):
373
+ nside=int(np.sqrt(im.shape[0]//12))
374
+ l_kernel=self.KERNELSZ*self.KERNELSZ
375
+
376
+ if im.shape[1]!=ww.shape[1]:
377
+ print('Weights channels should be equal to the input image channels')
378
+ return -1
379
+
380
+ if self.ww_CNN[nside] is None:
381
+ self.init_CNN_index(nside,transpose=False)
382
+
383
+ tmp=self.backend.bk_sparse_dense_matmul(self.ww_CNN[nside],im)
384
+ density=self.backend.bk_reshape(tmp,[12*nside*nside,l_kernel*im.shape[1]])
299
385
 
386
+ return self.backend.bk_matmul(density,self.backend.bk_reshape(ww,[l_kernel*im.shape[1],ww.shape[2]]))
387
+ # ---------------------------------------------−---------
388
+
300
389
  # ---------------------------------------------−---------
301
390
  def get_rank(self):
302
391
  return(self.rank)
@@ -882,6 +971,7 @@ class FoCUS:
882
971
  return tmp
883
972
 
884
973
  return wr,wi,ws,tmp
974
+
885
975
 
886
976
  # ---------------------------------------------−---------
887
977
  # Compute x [....,a,....] to [....,a*a,....]
@@ -1491,8 +1491,100 @@ class funct(FOC.FoCUS):
1491
1491
 
1492
1492
  return scat_cov(mS0, mP00, mC01, mC11, s1=mS1,c10=mC10,backend=self.backend), \
1493
1493
  scat_cov(sS0, sP00, sC01, sC11, s1=sS1,c10=sC10,backend=self.backend)
1494
+
1495
+ # compute local direction to make the statistical analysis more efficient
1496
+ def stat_cfft(self,im,upscale=False,smooth_scale=0):
1497
+ tmp=im
1498
+ if upscale:
1499
+ l_nside=int(np.sqrt(tmp.shape[1]//12))
1500
+ tmp=self.up_grade(tmp,l_nside*2,axis=1)
1501
+
1502
+ l_nside=int(np.sqrt(tmp.shape[1]//12))
1503
+ nscale=int(np.log(l_nside)/np.log(2))
1504
+ cmat={}
1505
+ cmat2={}
1506
+ for k in range(nscale):
1507
+ sim=self.backend.bk_abs(self.convol(tmp,axis=1))
1508
+ cc=self.backend.bk_reduce_mean(sim[:,:,0]-sim[:,:,2],0)
1509
+ ss=self.backend.bk_reduce_mean(sim[:,:,1]-sim[:,:,3],0)
1510
+ for m in range(smooth_scale):
1511
+ if cc.shape[0]>12:
1512
+ cc=self.ud_grade_2(self.smooth(cc))
1513
+ ss=self.ud_grade_2(self.smooth(ss))
1514
+ if cc.shape[0]!=tmp.shape[0]:
1515
+ ll_nside=int(np.sqrt(tmp.shape[1]//12))
1516
+ cc=self.up_grade(cc,ll_nside)
1517
+ ss=self.up_grade(ss,ll_nside)
1518
+ phase=np.fmod(np.arctan2(ss.numpy(),cc.numpy())+2*np.pi,2*np.pi)
1519
+ iph=(4*phase/(2*np.pi)).astype('int')
1520
+ alpha=(4*phase/(2*np.pi)-iph)
1521
+ mat=np.zeros([sim.shape[1],4*4])
1522
+ lidx=np.arange(sim.shape[1])
1523
+ for l in range(4):
1524
+ mat[lidx,4*((l+iph)%4)+l]=1.0-alpha
1525
+ mat[lidx,4*((l+iph+1)%4)+l]=alpha
1526
+
1527
+ cmat[k]=self.backend.bk_cast(mat.astype('complex64'))
1528
+
1529
+ mat2=np.zeros([k+1,sim.shape[1],4,4*4])
1530
+
1531
+ for k2 in range(k+1):
1532
+ tmp2=self.backend.bk_repeat(sim,4,axis=-1)
1533
+ sim2=self.backend.bk_reduce_sum(self.backend.bk_reshape(mat.reshape(1,mat.shape[0],16)*tmp2,
1534
+ [sim.shape[0],cmat[k].shape[0],4,4]),2)
1535
+ sim2=self.backend.bk_abs(self.convol(sim2,axis=1))
1536
+
1537
+ cc=self.smooth(self.backend.bk_reduce_mean(sim2[:,:,0]-sim2[:,:,2],0))
1538
+ ss=self.smooth(self.backend.bk_reduce_mean(sim2[:,:,1]-sim2[:,:,3],0))
1539
+ for m in range(smooth_scale):
1540
+ if cc.shape[0]>12:
1541
+ cc=self.ud_grade_2(self.smooth(cc))
1542
+ ss=self.ud_grade_2(self.smooth(ss))
1543
+ if cc.shape[0]!=sim.shape[1]:
1544
+ ll_nside=int(np.sqrt(sim.shape[1]//12))
1545
+ cc=self.up_grade(cc,ll_nside)
1546
+ ss=self.up_grade(ss,ll_nside)
1547
+
1548
+ phase=np.fmod(np.arctan2(ss.numpy(),cc.numpy())+2*np.pi,2*np.pi)
1549
+ """
1550
+ for k in range(4):
1551
+ hp.mollview(np.fmod(phase+np.pi,2*np.pi),cmap='jet',nest=True,hold=False,sub=(2,2,1+k))
1552
+ plt.show()
1553
+ exit(0)
1554
+ """
1555
+ iph=(4*phase/(2*np.pi)).astype('int')
1556
+ alpha=(4*phase/(2*np.pi)-iph)
1557
+ lidx=np.arange(sim.shape[1])
1558
+ for m in range(4):
1559
+ for l in range(4):
1560
+ mat2[k2,lidx,m,4*((l+iph[:,m])%4)+l]=1.0-alpha[:,m]
1561
+ mat2[k2,lidx,m,4*((l+iph[:,m]+1)%4)+l]=alpha[:,m]
1562
+
1563
+ cmat2[k]=self.backend.bk_cast(mat2.astype('complex64'))
1564
+ """
1565
+ tmp=self.backend.bk_repeat(sim[0],4,axis=1)
1566
+ sim2=self.backend.bk_reduce_sum(self.backend.bk_reshape(mat*tmp,[12*nside**2,4,4]),1)
1567
+
1568
+ cc2=(sim2[:,0]-sim2[:,2])
1569
+ ss2=(sim2[:,1]-sim2[:,3])
1570
+ phase2=np.fmod(np.arctan2(ss2.numpy(),cc2.numpy())+2*np.pi,2*np.pi)
1571
+
1572
+ plt.figure()
1573
+ hp.mollview(phase,cmap='jet',nest=True,hold=False,sub=(2,2,1))
1574
+ hp.mollview(np.fmod(phase2+np.pi,2*np.pi),cmap='jet',nest=True,hold=False,sub=(2,2,2))
1575
+ plt.figure()
1576
+ for k in range(4):
1577
+ hp.mollview((sim[0,:,k]).numpy().real,cmap='jet',nest=True,hold=False,sub=(2,4,1+k),min=-10,max=10)
1578
+ hp.mollview((sim2[:,k]).numpy().real,cmap='jet',nest=True,hold=False,sub=(2,4,5+k),min=-10,max=10)
1579
+
1580
+ plt.show()
1581
+ """
1582
+
1583
+ if k<l_nside-1:
1584
+ tmp=self.ud_grade_2(tmp,axis=1)
1585
+ return cmat,cmat2
1494
1586
 
1495
- def eval(self, image1, image2=None, mask=None, norm=None, Auto=True, calc_var=False):
1587
+ def eval(self, image1, image2=None, mask=None, norm=None, Auto=True, calc_var=False,cmat=None,cmat2=None):
1496
1588
  """
1497
1589
  Calculates the scattering correlations for a batch of images. Mean are done over pixels.
1498
1590
  mean of modulus:
@@ -1677,6 +1769,11 @@ class funct(FOC.FoCUS):
1677
1769
  ####### S1 and P00
1678
1770
  ### Make the convolution I1 * Psi_j3
1679
1771
  conv1 = self.convol(I1, axis=1) # [Nbatch, Npix_j3, Norient3]
1772
+
1773
+ if cmat is not None:
1774
+ tmp2=self.backend.bk_repeat(conv1,4,axis=-1)
1775
+ conv1=self.backend.bk_reduce_sum(self.backend.bk_reshape(cmat[j3]*tmp2,[1,cmat[j3].shape[0],4,4]),2)
1776
+
1680
1777
  ### Take the module M1 = |I1 * Psi_j3|
1681
1778
  M1_square = conv1*self.backend.bk_conjugate(conv1) # [Nbatch, Npix_j3, Norient3]
1682
1779
  M1 = self.backend.bk_L1(M1_square) # [Nbatch, Npix_j3, Norient3]
@@ -1749,6 +1846,9 @@ class funct(FOC.FoCUS):
1749
1846
  else: # Cross
1750
1847
  ### Make the convolution I2 * Psi_j3
1751
1848
  conv2 = self.convol(I2, axis=1) # [Nbatch, Npix_j3, Norient3]
1849
+ if cmat is not None:
1850
+ tmp2=self.backend.bk_repeat(conv2,4,axis=-1)
1851
+ conv2=self.backend.bk_reduce_sum(self.backend.bk_reshape(cmat[j3]*tmp2,[1,cmat[j3].shape[0],4,4]),2)
1752
1852
  ### Take the module M2 = |I2 * Psi_j3|
1753
1853
  M2_square = conv2*self.backend.bk_conjugate(conv2) # [Nbatch, Npix_j3, Norient3]
1754
1854
  M2 = self.backend.bk_L1(M2_square) # [Nbatch, Npix_j3, Norient3]
@@ -1852,19 +1952,19 @@ class funct(FOC.FoCUS):
1852
1952
  ### C01_auto = < (I1 * Psi)_j3 x (|I1 * Psi_j2| * Psi_j3)^* >_pix
1853
1953
  if not cross:
1854
1954
  if calc_var:
1855
- c01,vc01 = self._compute_C01(j2,
1955
+ c01,vc01 = self._compute_C01(j2,j3,
1856
1956
  conv1,
1857
1957
  vmask,
1858
1958
  M1_dic,
1859
1959
  M1convPsi_dic,
1860
- calc_var=True) # [Nbatch, Nmask, Norient3, Norient2]
1960
+ calc_var=True,cmat2=cmat2) # [Nbatch, Nmask, Norient3, Norient2]
1861
1961
  else:
1862
- c01 = self._compute_C01(j2,
1962
+ c01 = self._compute_C01(j2,j3,
1863
1963
  conv1,
1864
1964
  vmask,
1865
1965
  M1_dic,
1866
1966
  M1convPsi_dic,
1867
- return_data=return_data) # [Nbatch, Nmask, Norient3, Norient2]
1967
+ return_data=return_data,cmat2=cmat2) # [Nbatch, Nmask, Norient3, Norient2]
1868
1968
 
1869
1969
  if return_data:
1870
1970
  if C01[j3] is None:
@@ -1892,31 +1992,31 @@ class funct(FOC.FoCUS):
1892
1992
  ### C10_cross = < (I2 * Psi)_j3 x (|I1 * Psi_j2| * Psi_j3)^* >_pix
1893
1993
  else:
1894
1994
  if calc_var:
1895
- c01,vc01 = self._compute_C01(j2,
1995
+ c01,vc01 = self._compute_C01(j2,j3,
1896
1996
  conv1,
1897
1997
  vmask,
1898
1998
  M2_dic,
1899
1999
  M2convPsi_dic,
1900
- calc_var=True)
1901
- c10,vc10 = self._compute_C01(j2,
2000
+ calc_var=True,cmat2=cmat2)
2001
+ c10,vc10 = self._compute_C01(j2,j3,
1902
2002
  conv2,
1903
2003
  vmask,
1904
2004
  M1_dic,
1905
2005
  M1convPsi_dic,
1906
- calc_var=True)
2006
+ calc_var=True,cmat2=cmat2)
1907
2007
  else:
1908
- c01 = self._compute_C01(j2,
2008
+ c01 = self._compute_C01(j2,j3,
1909
2009
  conv1,
1910
2010
  vmask,
1911
2011
  M2_dic,
1912
2012
  M2convPsi_dic,
1913
- return_data=return_data)
1914
- c10 = self._compute_C01(j2,
2013
+ return_data=return_data,cmat2=cmat2)
2014
+ c10 = self._compute_C01(j2,j3,
1915
2015
  conv2,
1916
2016
  vmask,
1917
2017
  M1_dic,
1918
2018
  M1convPsi_dic,
1919
- return_data=return_data)
2019
+ return_data=return_data,cmat2=cmat2)
1920
2020
 
1921
2021
  if return_data:
1922
2022
  if C01[j3] is None:
@@ -2077,11 +2177,12 @@ class funct(FOC.FoCUS):
2077
2177
  self.P2_dic = None
2078
2178
  return
2079
2179
 
2080
- def _compute_C01(self, j2, conv,
2180
+ def _compute_C01(self, j2, j3,conv,
2081
2181
  vmask, M_dic,
2082
2182
  MconvPsi_dic,
2083
2183
  calc_var=False,
2084
- return_data=False):
2184
+ return_data=False,
2185
+ cmat2=None):
2085
2186
  """
2086
2187
  Compute the C01 coefficients (auto or cross)
2087
2188
  C01 = < (Ia * Psi)_j3 x (|Ib * Psi_j2| * Psi_j3)^* >_pix
@@ -2094,7 +2195,10 @@ class funct(FOC.FoCUS):
2094
2195
  ### Compute |I1 * Psi_j2| * Psi_j3 = M1_j2 * Psi_j3
2095
2196
  # Warning: M1_dic[j2] is already at j3 resolution [Nbatch, Npix_j3, Norient3]
2096
2197
  MconvPsi = self.convol(M_dic[j2], axis=1) # [Nbatch, Npix_j3, Norient3, Norient2]
2097
-
2198
+ if cmat2 is not None:
2199
+ tmp2=self.backend.bk_repeat(MconvPsi,4,axis=-1)
2200
+ MconvPsi=self.backend.bk_reduce_sum(self.backend.bk_reshape(cmat2[j3][j2]*tmp2,[1,cmat2[j3].shape[1],4,4,4]),3)
2201
+
2098
2202
  # Store it so we can use it in C11 computation
2099
2203
  MconvPsi_dic[j2] = MconvPsi # [Nbatch, Npix_j3, Norient3, Norient2]
2100
2204
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: foscat
3
- Version: 3.0.9
3
+ Version: 3.0.11
4
4
  Summary: Generate synthetic Healpix or 2D data using Cross Scattering Transform
5
5
  Home-page: https://github.com/jmdelouis/FOSCAT
6
6
  Author: Jean-Marc DELOUIS
@@ -9,6 +9,12 @@ Maintainer: Theo Foulquier
9
9
  Maintainer-email: theo.foulquier@ifremer.fr
10
10
  License: MIT
11
11
  Keywords: Scattering transform,Component separation,denoising
12
+ Requires-Dist: imageio
13
+ Requires-Dist: imagecodecs
14
+ Requires-Dist: matplotlib
15
+ Requires-Dist: numpy
16
+ Requires-Dist: tensorflow
17
+ Requires-Dist: healpy
12
18
 
13
19
  Utilize the Cross Scattering Transform (described in https://arxiv.org/abs/2207.12527) to synthesize Healpix or 2D data that is suitable for component separation purposes, such as denoising.
14
20
  A demo package for this process can be found at https://github.com/jmdelouis/FOSCAT_DEMO.
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes