orpheus-npcf 0.1.11__cp39-cp39-macosx_14_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of orpheus-npcf might be problematic. Click here for more details.

orpheus/patchutils.py ADDED
@@ -0,0 +1,356 @@
1
+ # Here we collect some utils for mapping the a full-sky survey to a set of overlapping patches
2
+ # In the middle term much of this functionality should be included in the orpheus code
3
+
4
+ from astropy.coordinates import SkyCoord
5
+ from healpy import ang2pix, pix2vec, nside2pixarea, nside2resol, query_disc, Rotator, nside2npix
6
+ import numpy as np
7
+ from pathlib import Path
8
+ import pickle
9
+ import os
10
+ import sys
11
+ from time import time
12
+
13
+ from sklearn.cluster import KMeans
14
+
15
+ def pickle_save(data, filename):
16
+
17
+ file_path = Path(filename)
18
+ file_path.parent.mkdir(parents=True, exist_ok=True)
19
+
20
+ try:
21
+ with open(filename, 'wb') as file:
22
+ pickle.dump(data, file)
23
+ except Exception as e:
24
+ print(f"An error occurred while saving the dictionary: {e}")
25
+
26
+ def pickle_load(filename):
27
+
28
+ try:
29
+ with open(filename, 'rb') as file:
30
+ data = pickle.load(file)
31
+ return data
32
+ except Exception as e:
33
+ pass
34
+
35
+ def frompatchindices_preparerot(index, patchindices, ra, dec, rotsignflip):
36
+
37
+ inds_inner = patchindices["patches"][index]["inner"]
38
+ inds_outer = patchindices["patches"][index]["outer"]
39
+ inds_extpatch = np.append(inds_inner,inds_outer)
40
+ ngal_patch = len(inds_extpatch)
41
+ patch_isinner = np.zeros(ngal_patch,dtype=bool)
42
+ patch_isinner[:len(inds_inner)] = True
43
+ patch_isinner[len(inds_inner):] = False
44
+ # Note that we fix the rotangle at this instance as this is required when computing patches
45
+ # across multiple catalogs. In that case the patchcenters are by definition the com of the
46
+ # joint catalog. For a single catalog this does not matter. The signs match the (theta,phi)
47
+ # conventions in healpy -- see the toorigin function for details.
48
+ rotangle = [+patchindices['info']['patchcenters'][index][0]*np.pi/180.,
49
+ -patchindices['info']['patchcenters'][index][1]*np.pi/180.]
50
+ nextrotres = toorigin(ra[inds_extpatch],
51
+ dec[inds_extpatch],
52
+ isinner=patch_isinner,
53
+ rotangle=rotangle,
54
+ inv=False,
55
+ rotsignflip=rotsignflip,
56
+ radec_units="deg")
57
+ rotangle, ra_rot, dec_rot, rotangle_polars = nextrotres
58
+
59
+ return inds_extpatch, patch_isinner, rotangle, ra_rot, dec_rot, rotangle_polars
60
+
61
+ def gen_cat_patchindices(ra_deg, dec_deg, npatches, patchextend_arcmin, nside_hash=128, verbose=False, method='kmeans_healpix',
62
+ kmeanshp_maxiter=1000, kmeanshp_tol=1e-10, kmeanshp_randomstate=42, healpix_nside=8):
63
+ """ Decomposes a spherical catalog in ~equal-area patches with a buffer region
64
+
65
+ Parameters
66
+ ----------
67
+ ra_deg: numpy.ndarray
68
+ The ra of the catalog, given in units of degree.
69
+ dec_deg: numpy.ndarray
70
+ The dec of the catalog, given in units of degree.
71
+ npatches: int
72
+ The number of patches in which the catalog shall be decomposed.
73
+ patchextend_arcmin: float
74
+ The buffer region that extends around each patch, given in units of arcmin.
75
+ nside_hash: int
76
+ The healpix resolution used for hashing subareas of the patches.
77
+ verbose: bool
78
+ Flag setting on whether output is printed to the console.
79
+
80
+ Returns
81
+ -------
82
+ cat_patchindices: dict
83
+ A dictionary containing information about the individual patches,
84
+ as well as the galaxy indices that are assigned to the inner region
85
+ and to the buffer region of each individual patch
86
+
87
+ Notes
88
+ -----
89
+ Choosing a small value of nside_hash will result in a larger extension of
90
+ the patches then neccessary while choosing a large value increases the
91
+ runtime. A good compromise is to choose nside_hash such that its resolution
92
+ is a few times smaller than the buffer region of the patches
93
+ """
94
+
95
+ def build_indexhash(arr):
96
+ """Returns a hash for indices of repeated values in a 1D array"""
97
+ sort_indices = np.argsort(arr)
98
+ arr = np.asarray(arr)[sort_indices]
99
+ vals, first_indices = np.unique(arr, return_index=True)
100
+ indices = np.split(sort_indices, first_indices[1:])
101
+ indhash = {}
102
+ for elval,val in enumerate(vals):
103
+ indhash[val] = indices[elval]
104
+ return indhash
105
+
106
+ if verbose:
107
+ print("Computing inner region of patches")
108
+ t1 = time()
109
+
110
+ # Run treecorrs k-means implementation
111
+ if method=='kmeans_treecorr':
112
+ try:
113
+ import treecorr
114
+ cat = treecorr.Catalog(ra=ra_deg, dec=dec_deg,
115
+ ra_units="deg", dec_units="deg",
116
+ npatch=npatches)
117
+ patchinds = cat.patch
118
+ except ImportError:
119
+ if method=='kmeans_treecorr':
120
+ print('Treecorr not availbale...switching to patch creation via KMeans')
121
+ method = 'kmeans_healpix'
122
+
123
+ # Run standard k-means on catalog reduced to healpix pixels
124
+ elif method=='kmeans_healpix':
125
+ # Step 1: Reduce discrete ra/dec to unique healpix pixels and transform those to to 3D positions
126
+ nside_kmeans = 2048 # I keep this fixed for now as it will most likely work well for all reasonable cases.
127
+ eq = SkyCoord(ra_deg, dec_deg, frame='galactic', unit='deg')
128
+ l, b = eq.galactic.l.value, eq.galactic.b.value
129
+ theta = np.radians(90. - b)
130
+ phi = np.radians(l)
131
+ hpx_inds = ang2pix(nside_kmeans, theta, phi)
132
+ hpx_uinds = np.unique(hpx_inds)
133
+ # Step 2: Run standard kmeans algorithm on the healpix pixels
134
+ # Note that each pixel carries the same (unity) weight. This implies
135
+ # that we make the patches have approximately equal area, but neglect
136
+ # depth variations on a patch sized scale. To me this seems to be a
137
+ # sensible choice as the flat-sky approximation only cares about the
138
+ # extent of the patches. If one wants to use the patches as Jackknife
139
+ # samples for an internal covariance matrix estimate this choice might
140
+ # need to be revisited (but as of now I do not see a clear point against
141
+ # continuing to use the current setup as long as the patchsize is in a
142
+ # domain where the contributions to the covariance that are containing
143
+ # shapenoise are expected to be subdominant).
144
+ clust = KMeans(n_clusters=npatches,
145
+ init='k-means++',
146
+ n_init='auto',
147
+ max_iter=kmeanshp_maxiter,
148
+ tol=kmeanshp_tol,
149
+ verbose=0,
150
+ random_state=kmeanshp_randomstate,
151
+ copy_x=True,
152
+ algorithm='lloyd')
153
+ X = np.array(pix2vec(nside=nside_kmeans,ipix=hpx_uinds,nest=False)).T
154
+ clustinds = clust.fit_predict(X, y=None, sample_weight=None)
155
+ # Step 3: Map the pixel centers back to the galaxy indices
156
+ hashmap = np.vectorize({upix: center for upix, center in zip(hpx_uinds, clustinds)}.get)
157
+ patchinds = hashmap(hpx_inds)
158
+ # Simply assign to healpix pixel. Fast and stable, but patchareas might strongly vary in size.
159
+ elif method == "healpix":
160
+ eq = SkyCoord(ra_deg, dec_deg, frame='galactic', unit='deg')
161
+ l, b = eq.galactic.l.value, eq.galactic.b.value
162
+ theta = np.radians(90. - b)
163
+ phi = np.radians(l)
164
+ patchinds = ang2pix(healpix_nside, theta, phi).astype(int)
165
+ else:
166
+ raise NotImplementedError
167
+
168
+ if verbose:
169
+ t2=time()
170
+ print("Took %.3f seconds"%(t2-t1))
171
+
172
+ # Assign galaxy positions to healpix pixels
173
+ if verbose:
174
+ print("Mapping catalog to healpix grid")
175
+ t1=time()
176
+ eq = SkyCoord(ra_deg, dec_deg, frame='galactic', unit='deg')
177
+ l, b = eq.galactic.l.value, eq.galactic.b.value
178
+ theta = np.radians(90. - b)
179
+ phi = np.radians(l)
180
+ cat_indices = ang2pix(nside_hash, theta, phi)
181
+ if verbose:
182
+ t2=time()
183
+ print("Took %.3f seconds"%(t2-t1))
184
+
185
+ # Build a hash connecting the galaxies residing in each healpix pixel
186
+ if verbose:
187
+ t1=time()
188
+ print("Building index hash")
189
+ cat_indhash = build_indexhash(cat_indices)
190
+ if verbose:
191
+ t2=time()
192
+ print("Took %.3f seconds"%(t2-t1))
193
+
194
+ # Construct buffer region around patches
195
+ if verbose:
196
+ print("Building buffer around patches")
197
+ t1=time()
198
+ _pixarea = nside2pixarea(nside_hash,degrees=True)
199
+ _pixreso = nside2resol(nside_hash,arcmin=True)
200
+ if method == 'kmeans_treecorr':
201
+ _patchcenters = cat.patch_centers
202
+ elif method == 'kmeans_healpix' or method=='healpix':
203
+ _patchcenters = np.array([[np.mean(ra_deg[ patchinds==patchind]), np.mean(dec_deg[ patchinds==patchind])] for patchind in range(npatches)])
204
+ else:
205
+ raise NotImplementedError
206
+
207
+ cat_patchindices = {}
208
+ cat_patchindices["info"] = {}
209
+ cat_patchindices["info"]["patchcenters"] = _patchcenters
210
+ cat_patchindices["info"]["patchareas"] = np.zeros(npatches,dtype=float)
211
+ cat_patchindices["info"]["patch_ngalsinner"] = np.zeros(npatches,dtype=int)
212
+ cat_patchindices["info"]["patch_ngalsouter"] = np.zeros(npatches,dtype=int)
213
+ cat_patchindices["patches"] = {}
214
+ ext_buffer = (patchextend_arcmin+_pixreso)*np.pi/180./60.
215
+ for elpatch in range(npatches):
216
+ if verbose:
217
+ sys.stdout.write("\r%i/%i"%(elpatch+1,npatches))
218
+ patchsel = patchinds==elpatch
219
+ cat_patchindices["patches"][elpatch] = {}
220
+
221
+ # Get indices of gals within inner patch
222
+ galinds_inner = np.argwhere(patchsel).flatten().astype(int)
223
+
224
+ # Find healpix pixels in extended patch
225
+ patch_indices = np.unique(ang2pix(nside_hash, theta[patchsel], phi[patchsel]))
226
+ extpatch_indices = set()
227
+ for pix in patch_indices:
228
+ nextset = set(query_disc(nside=nside_hash,
229
+ vec=pix2vec(nside_hash,pix),
230
+ radius=ext_buffer))
231
+ extpatch_indices.update(nextset)
232
+
233
+ # Assign galaxies to extended patch
234
+ galinds_ext = set()
235
+ for pix in extpatch_indices:
236
+ try:
237
+ galinds_ext.update(set(cat_indhash[pix]))
238
+ except:
239
+ pass
240
+ galinds_outer = np.array(list(galinds_ext-set(galinds_inner)),dtype=int)
241
+ cat_patchindices["info"]["patchareas"][elpatch] = _pixarea*len(patch_indices)
242
+ cat_patchindices["info"]["patch_ngalsinner"][elpatch] = len(galinds_inner)
243
+ cat_patchindices["info"]["patch_ngalsouter"][elpatch] = len(galinds_outer)
244
+ cat_patchindices["patches"][elpatch]["inner"] = galinds_inner
245
+ cat_patchindices["patches"][elpatch]["outer"] = galinds_outer
246
+ if verbose:
247
+ t2=time()
248
+ print("Took %.3f seconds"%(t2-t1))
249
+
250
+ return cat_patchindices
251
+
252
+ def toorigin(ras, decs, isinner=None, rotangle=None, inv=False, rotsignflip=False, radec_units="deg"):
253
+ """ Rotates survey patch s.t. its center of mass lies in the origin. """
254
+ import healpy as hp
255
+ assert(radec_units in ["rad", "deg"])
256
+
257
+ if isinner is None:
258
+ isinner = np.ones(len(ras), dtype=bool)
259
+
260
+ # Map (ra, dec) --> (theta, phi)
261
+ if radec_units=="deg":
262
+ decs_rad = decs*np.pi/180.
263
+ ras_rad = ras*np.pi/180.
264
+ thetas = np.pi/2. + decs_rad
265
+ phis = ras_rad
266
+
267
+ # Compute rotation angle
268
+ if rotangle is None:
269
+ rotangle = [np.mean(phis[isinner]),np.pi/2.-np.mean(thetas[isinner])]
270
+ thisrot = Rotator(rot=rotangle, deg=False, inv=inv)
271
+ rotatedthetas, rotatedphis = thisrot(thetas,phis,inv=False)
272
+ rotangle_polars = np.exp((-1)**rotsignflip*1J * 2 * thisrot.angle_ref(rotatedthetas, rotatedphis,inv=True))
273
+
274
+ # Transform back to (ra,dec)
275
+ ra_rot = rotatedphis
276
+ dec_rot = rotatedthetas - np.pi/2.
277
+ if radec_units=="deg":
278
+ dec_rot *= 180./np.pi
279
+ ra_rot *= 180./np.pi
280
+
281
+ return rotangle, ra_rot, dec_rot, rotangle_polars
282
+
283
+ def cat2hpx(lon, lat, nside, radec=True, do_counts=False, return_idx=False, return_indices=False, weights=None):
284
+ """
285
+ Convert a catalogue to a HEALPix map of number counts per resolution
286
+ element.
287
+
288
+ Parameters
289
+ ----------
290
+ lon, lat : (ndarray, ndarray)
291
+ Coordinates of the sources in degree. If radec=True, assume input is in the icrs
292
+ coordinate system. Otherwise assume input is glon, glat
293
+ nside : int
294
+ HEALPix nside of the target map
295
+ radec : bool
296
+ Switch between R.A./Dec and glon/glat as input coordinate system.
297
+ do_counts : bool
298
+ Return the number of counts per HEALPix pixel
299
+ return_idx : bool
300
+ Return the set of non-empty HEALPix pixel indices
301
+ return_indices : bool
302
+ Returns the per-object HEALPix pixel indices
303
+ weights: None or ndarray
304
+ Needs to be given if each point carries an individual weight
305
+
306
+ Return
307
+ ------
308
+ hpx_map : ndarray
309
+ HEALPix map of the catalogue number counts in Galactic coordinates
310
+
311
+ Notes
312
+ -----
313
+ This functions is a generalised version of https://stackoverflow.com/a/50495134
314
+ """
315
+
316
+ npix = nside2npix(nside)
317
+
318
+ if radec:
319
+ eq = SkyCoord(lon, lat, frame='galactic', unit='deg')
320
+ l, b = eq.galactic.l.value, eq.galactic.b.value
321
+ else:
322
+ l, b = lon, lat
323
+
324
+ # conver to theta, phi
325
+ theta = np.radians(90. - b)
326
+ phi = np.radians(l)
327
+
328
+ # convert to HEALPix indices
329
+ indices = ang2pix(nside, theta, phi)
330
+
331
+ if do_counts:
332
+ idx, counts = np.unique(indices, return_counts=True)
333
+ if weights is not None:
334
+ idx, inv = np.unique(indices,return_inverse=True)
335
+ weights_pix = np.bincount(inv,weights.reshape(-1))
336
+ else:
337
+ idx = np.asarray(list(set(list(indices)))).astype(int)
338
+
339
+ # fill the fullsky map
340
+ hpx_map = np.zeros(npix, dtype=int)
341
+ #counts[counts>1] = 1
342
+ if do_counts:
343
+ hpx_map[idx] = counts
344
+ else:
345
+ hpx_map[idx] = np.ones(len(idx), dtype=int)
346
+
347
+ res = ()
348
+ if return_idx:
349
+ res += (idx, )
350
+ res += (hpx_map.astype(int)),
351
+ if weights is not None:
352
+ res += (weights_pix),
353
+ if return_indices:
354
+ res += (indices),
355
+
356
+ return res
orpheus/utils.py ADDED
@@ -0,0 +1,152 @@
1
+ import numpy as np
2
+ from itertools import combinations_with_replacement
3
+ import os
4
+ import site
5
+
6
+ def convertunits(unit_in, unit_target):
7
+ '''unit can be '''
8
+ vals = {'rad': 180./np.pi,
9
+ 'deg': 1.,
10
+ 'arcmin': 1./60.,
11
+ 'arcsec': 1./60./60.}
12
+ assert((unit_in in vals.keys()) and (unit_target in vals.keys()))
13
+ return vals[unit_in]/vals[unit_target]
14
+
15
+ def flatlist(A):
16
+ rt = []
17
+ for i in A:
18
+ if isinstance(i,list): rt.extend(flatlist(i))
19
+ else: rt.append(i)
20
+ return rt
21
+
22
+ def get_site_packages_dir():
23
+ return [p for p in site.getsitepackages()
24
+ if p.endswith(("site-packages", "dist-packages"))][0]
25
+
26
+ def search_file_in_site_package(directory, package):
27
+ for root, dirs, files in os.walk(directory):
28
+ for file in files:
29
+ if file.startswith(package):
30
+ return os.path.join(root, file)
31
+ return None
32
+
33
+ def gen_thetacombis_fourthorder(nbinsr, nthreads, batchsize, batchsize_max, ordered=True, custom=None, verbose=False):
34
+
35
+ # Allocate selector for custom bins
36
+ if custom is None:
37
+ customsel = np.ones(nbinsr*nbinsr*nbinsr, dtype=bool)
38
+ else:
39
+ custom = custom.astype(int)
40
+ assert(np.max(custom)<nbinsr*nbinsr*nbinsr)
41
+ assert(np.min(custom)>=0)
42
+ customsel = np.zeros(nbinsr*nbinsr*nbinsr, dtype=bool)
43
+ customsel[custom] = True
44
+
45
+ # Build the bins
46
+ allelbs = []
47
+ thetacombis_batches = []
48
+ nbinsr3 = 0
49
+ cutlo_2 = 0
50
+ cutlo_3 = 0
51
+ tmpind = 0
52
+ for elb1 in range(nbinsr):
53
+ for elb2 in range(nbinsr):
54
+ for elb3 in range(nbinsr):
55
+ valid = True
56
+ if ordered:
57
+ if elb1>elb2 or elb1>elb3 or elb2>elb3:
58
+ valid = False
59
+ if valid and customsel[tmpind]:
60
+ thetacombis_batches.append([tmpind])
61
+ allelbs.append([elb1,elb2,elb3])
62
+ nbinsr3 += 1
63
+ tmpind += 1
64
+ thetacombis_batches = np.asarray(thetacombis_batches,dtype=np.int32)
65
+ allelbs = np.asarray(allelbs,dtype=np.int32)
66
+ if batchsize is None:
67
+ batchsize = min(nbinsr3,min(batchsize_max,nbinsr3/nthreads))
68
+ if verbose:
69
+ print("Using batchsize of %i for radial bins"%batchsize)
70
+ if batchsize==batchsize_max:
71
+ nbatches = np.int32(np.ceil(nbinsr3/batchsize))
72
+ else:
73
+ nbatches = np.int32(nbinsr3/batchsize)
74
+ #thetacombis_batches = np.arange(nbinsr3).astype(np.int32)
75
+ cumnthetacombis_batches = (np.arange(nbatches+1)*nbinsr3/(nbatches)).astype(np.int32)
76
+ nthetacombis_batches = (cumnthetacombis_batches[1:]-cumnthetacombis_batches[:-1]).astype(np.int32)
77
+ cumnthetacombis_batches[-1] = nbinsr3
78
+ nthetacombis_batches[-1] = nbinsr3-cumnthetacombis_batches[-2]
79
+ thetacombis_batches = thetacombis_batches.flatten().astype(np.int32)
80
+ nbatches = len(nthetacombis_batches)
81
+
82
+ return nbinsr3, allelbs, thetacombis_batches, cumnthetacombis_batches, nthetacombis_batches, nbatches
83
+
84
+ def gen_n2n3indices_Upsfourth(nmax):
85
+ """ List of flattened indices corresponding to selection """
86
+ nmax_alloc = 2*nmax+1
87
+ reconstructed = np.zeros((2*nmax_alloc+1,2*nmax_alloc+1),dtype=int)
88
+ for _n2 in range(-nmax-1,nmax+2):
89
+ for _n3 in range(-nmax-1,nmax+2):
90
+ reconstructed[nmax_alloc+_n2,nmax_alloc+_n3] += 1
91
+ for _n3 in range(-2*nmax,-nmax-1):
92
+ for _n2 in range(-nmax-1-_n3,nmax+2):
93
+ reconstructed[nmax_alloc+_n2,nmax_alloc+_n3] += 1
94
+ for _n2 in range(-2*nmax-1,-nmax-1):
95
+ for _n3 in range(-nmax-1-_n2,nmax+2):
96
+ reconstructed[nmax_alloc+_n2,nmax_alloc+_n3] += 1
97
+ for _n3 in range(nmax+2,2*nmax+1):
98
+ for _n2 in range(-nmax-1,nmax+2-_n3):
99
+ reconstructed[nmax_alloc+_n2,nmax_alloc+_n3] += 1
100
+ for _n2 in range(nmax+2,2*nmax+2):
101
+ for _n3 in range(-nmax-1,nmax+2-_n2):
102
+ reconstructed[nmax_alloc+_n2,nmax_alloc+_n3] += 1
103
+ _shape = reconstructed.shape
104
+ _inds = np.argwhere((reconstructed>0).flatten())[:,0].astype(np.int32)
105
+ _n2s = np.argwhere(reconstructed>0)[:,0].astype(np.int32)-nmax_alloc
106
+ _n3s = np.argwhere(reconstructed>0)[:,1].astype(np.int32)-nmax_alloc
107
+ return _shape, _inds, _n2s, _n3s
108
+
109
+ def symmetrize_map3_multiscale(map3, return_list=False):
110
+ """
111
+ Symmetrizes third-order aperture mass over tomographic bin combinations
112
+ and radial bin combinations
113
+ Assumes map3 to be of shape (8, nbinsz**3, nbinsr**3)
114
+ """
115
+
116
+ nbinsz = int(round((map3.shape[1])**(1/3)))
117
+ nbinsr = int(round((map3.shape[2])**(1/3)))
118
+
119
+ # Get unique combinations of indices (e.g., r1 <= r2 <= r3)
120
+ r_combs = np.array(list(combinations_with_replacement(range(nbinsr), 3)))
121
+ z_combs = np.array(list(combinations_with_replacement(range(nbinsz), 3)))
122
+ indr3, indz3 = r_combs.shape[0], z_combs.shape[0]
123
+
124
+ # Get permutations for each unique combination
125
+ perm_map = np.array([[0, 1, 2], [1, 2, 0], [2, 0, 1],
126
+ [1, 0, 2], [2, 1, 0], [0, 2, 1]])
127
+
128
+ # Create arrays of permuted 3D indices
129
+ # Shape is (6, n_combinations, 3)
130
+ r_perms_3d = r_combs[:, perm_map].transpose(1, 0, 2)
131
+ z_perms_3d = z_combs[:, perm_map].transpose(1, 0, 2)
132
+
133
+ # Convert 3D permuted indices to flat indices
134
+ r_powers = np.array([nbinsr**2, nbinsr, 1])
135
+ sel_foots = np.dot(r_perms_3d, r_powers) # Shape: (6, indr3)
136
+ z_powers = np.array([nbinsz**2, nbinsz, 1])
137
+ zcombis = np.dot(z_perms_3d, z_powers) # Shape: (6, indz3)
138
+
139
+ # Do the averaging
140
+ # Shape is (8, 6_z_perms, indz3, 6_r_perms, indr3)
141
+ all_perms_data = map3[:, zcombis][:, :, :, sel_foots]
142
+ map3_symm = np.mean(all_perms_data, axis=(1, 3))
143
+
144
+ # Allocate final result
145
+ res = (map3_symm,)
146
+ if return_list:
147
+ # Rearrange and split the data to match original list format
148
+ list_data = all_perms_data.transpose(3, 0, 1, 4, 2)
149
+ map3_list = [arr.squeeze(axis=-1) for arr in np.split(list_data, indz3, axis=-1)]
150
+ res += (map3_list,)
151
+
152
+ return res
@@ -0,0 +1,67 @@
1
+ Metadata-Version: 2.4
2
+ Name: orpheus-npcf
3
+ Version: 0.1.11
4
+ Summary: Compute N-point correlation functions of spin-s fields.
5
+ Home-page: https://github.com/lporth93/orpheus
6
+ Author: Lucas Porth
7
+ License: MIT
8
+ Classifier: Development Status :: 4 - Beta
9
+ Classifier: Programming Language :: Python :: 3
10
+ Classifier: License :: OSI Approved :: MIT License
11
+ Requires-Python: >=3.9
12
+ Description-Content-Type: text/markdown
13
+ License-File: LICENSE
14
+ Requires-Dist: astropy>=6
15
+ Requires-Dist: healpy>=1.17
16
+ Requires-Dist: coverage>=7.6.1
17
+ Requires-Dist: numba<=0.62.1,>=0.58
18
+ Requires-Dist: numpy<1.27,>=1.22
19
+ Requires-Dist: scipy>=1.15
20
+ Requires-Dist: scikit-learn
21
+ Dynamic: author
22
+ Dynamic: classifier
23
+ Dynamic: description
24
+ Dynamic: description-content-type
25
+ Dynamic: home-page
26
+ Dynamic: license
27
+ Dynamic: license-file
28
+ Dynamic: requires-dist
29
+ Dynamic: requires-python
30
+ Dynamic: summary
31
+
32
+ <p align="center">
33
+ <img src="docs/orpheus_logov1.png" alt="Orpheus logo" width="500"/>
34
+ </p>
35
+
36
+ Orpheus is python package for the calculation of second- third- and fourth-order correlation functions of scalar and polar fields such as weak lensing shear. To efficiently perform the calculations, orpheus makes use of a mulitpole decomposition of the N>2 correlation functions and uses parallelized C code for the heavy lifting.
37
+
38
+ ## Installation, Documentation and Examples
39
+ Installation steps, documentation and examples are provided at [orpheus.readthedocs.io](https://orpheus.readthedocs.io/).
40
+
41
+ ### Installation
42
+ First clone the directory via:
43
+ ```shell
44
+ git clone git@github.com:lporth93/orpheus.git
45
+ ```
46
+ or
47
+ ```shell
48
+ git clone https://github.com/lporth/orpheus.git
49
+ ```
50
+ Then navigate to the cloned directory
51
+ ```shell
52
+ cd orpheus
53
+ conda env create -f orpheus_env.yaml
54
+ conda activate orpheus_env
55
+ pip install .
56
+ ```
57
+
58
+ ### Documentation
59
+ In the [documentation](https://orpheus.readthedocs.io/) you find more information about the algorithms and approximation schemes employed in orpheus, as well as a series of jupyter notebooks that give examples of how to use the different estimators implemented in orpheus.
60
+
61
+ ## Using the code
62
+ As at this moment there is no dedicated orpheus paper, please cite the paper that introduced the functionality implemented in orpheus:
63
+ * If you use the three-point functionality, please cite [Porth+2024](https://doi.org/10.1051/0004-6361/202347987)
64
+ * If you use the four-point functionality, please cite [Porth+2025](https://arxiv.org/abs/2509.07974)
65
+ * If you use the direct estimator functionality, please cite [Porth & Smith 2022](https://doi.org/10.1093/mnras/stab2819)
66
+
67
+ In each of the papers, you can find the main equations implemented in orpheus.
@@ -0,0 +1,15 @@
1
+ orpheus_npcf-0.1.11.dist-info/RECORD,,
2
+ orpheus_npcf-0.1.11.dist-info/WHEEL,sha256=DhkyuvaF_ECzC6tOpvYO14C_ylOEhcIxq8q81VPjNdw,134
3
+ orpheus_npcf-0.1.11.dist-info/top_level.txt,sha256=EGQN7bZMbZZGcbqsLDoUVjVl6glgjt9qmT6FZt5N-Yo,8
4
+ orpheus_npcf-0.1.11.dist-info/METADATA,sha256=f7TnjM8cvAd42wBJ5SGCft23IOJ8czgFv2C63nhCIfw,2626
5
+ orpheus_npcf-0.1.11.dist-info/licenses/LICENSE,sha256=ixuiBLtpoK3iv89l7ylKkg9rs2GzF9ukPH7ynZYzK5s,35148
6
+ orpheus/catalog.py,sha256=AFPcQpK6QVf9Jf3ubyeNFDMz8XTdG2YxbBp5l3u73Wg,55815
7
+ orpheus/flat2dgrid.py,sha256=1YSmUN1pt3036oKKqscmNmDp2UAUj4WL82IDULVmolU,1905
8
+ orpheus/orpheus_clib.cpython-39-darwin.so,sha256=4Co8O--n-YLY9Kgbli1265w6xmnv1kAwtYvP7HE_exI,340704
9
+ orpheus/npcf.py,sha256=P9O-mo-NsEjxLXPreQ1OfNfuY5ZCUS90hlLTB3G_bxI,204974
10
+ orpheus/patchutils.py,sha256=5bjj3SIX48op88s_RlODIyvQ3oIowmZJkdcXjSNNF48,14259
11
+ orpheus/covariance.py,sha256=iyjQ7fgPSNLbEMn400T02sGG0lML1yzTUkqOL6PoqeM,5660
12
+ orpheus/__init__.py,sha256=yeLNi-zSAhmBvF9ptv4VK8obQNMDtwMPP6QLspoHMjI,137
13
+ orpheus/utils.py,sha256=Aa_l20oGmWKgjZ1rYnp14Ctvao3o6hb85Xpg6PkyP-M,6362
14
+ orpheus/direct.py,sha256=qYWXGXgmiHSFmYWCTrtC12NXfAPZ46xENi1lYleXbec,47734
15
+ orpheus/.dylibs/libgomp.1.dylib,sha256=8Bw0CGsTlKIeJIEDIPtLaYGkEzK31ke8ZcLhCoOh0iA,441360
@@ -0,0 +1,6 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.9.0)
3
+ Root-Is-Purelib: false
4
+ Tag: cp39-cp39-macosx_14_0_arm64
5
+ Generator: delocate 0.13.0
6
+