multipers 2.3.1__cp313-cp313-win_amd64.whl → 2.3.2b1__cp313-cp313-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of multipers might be problematic. Click here for more details.
- multipers/_signed_measure_meta.py +71 -65
- multipers/array_api/__init__.py +39 -0
- multipers/array_api/numpy.py +34 -0
- multipers/array_api/torch.py +35 -0
- multipers/distances.py +6 -2
- multipers/filtrations/density.py +23 -12
- multipers/filtrations/filtrations.py +74 -15
- multipers/function_rips.cp313-win_amd64.pyd +0 -0
- multipers/grids.cp313-win_amd64.pyd +0 -0
- multipers/grids.pyx +144 -61
- multipers/gudhi/Simplex_tree_multi_interface.h +35 -0
- multipers/gudhi/gudhi/Multi_persistence/Box.h +3 -0
- multipers/gudhi/gudhi/One_critical_filtration.h +17 -9
- multipers/gudhi/mma_interface_matrix.h +5 -3
- multipers/gudhi/truc.h +488 -42
- multipers/io.cp313-win_amd64.pyd +0 -0
- multipers/io.pyx +16 -86
- multipers/ml/mma.py +3 -3
- multipers/ml/signed_measures.py +60 -62
- multipers/mma_structures.cp313-win_amd64.pyd +0 -0
- multipers/mma_structures.pxd +2 -1
- multipers/mma_structures.pyx +56 -12
- multipers/mma_structures.pyx.tp +14 -3
- multipers/multiparameter_module_approximation/approximation.h +45 -13
- multipers/multiparameter_module_approximation.cp313-win_amd64.pyd +0 -0
- multipers/multiparameter_module_approximation.pyx +22 -6
- multipers/plots.py +1 -0
- multipers/point_measure.cp313-win_amd64.pyd +0 -0
- multipers/point_measure.pyx +6 -2
- multipers/simplex_tree_multi.cp313-win_amd64.pyd +0 -0
- multipers/simplex_tree_multi.pxd +1 -0
- multipers/simplex_tree_multi.pyx +487 -109
- multipers/simplex_tree_multi.pyx.tp +67 -18
- multipers/slicer.cp313-win_amd64.pyd +0 -0
- multipers/slicer.pxd +719 -237
- multipers/slicer.pxd.tp +22 -6
- multipers/slicer.pyx +5311 -1364
- multipers/slicer.pyx.tp +199 -46
- multipers/tbb12.dll +0 -0
- multipers/tbbbind_2_5.dll +0 -0
- multipers/tbbmalloc.dll +0 -0
- multipers/tbbmalloc_proxy.dll +0 -0
- multipers/tests/__init__.py +9 -4
- multipers/torch/diff_grids.py +30 -7
- {multipers-2.3.1.dist-info → multipers-2.3.2b1.dist-info}/METADATA +4 -25
- {multipers-2.3.1.dist-info → multipers-2.3.2b1.dist-info}/RECORD +49 -46
- {multipers-2.3.1.dist-info → multipers-2.3.2b1.dist-info}/WHEEL +1 -1
- {multipers-2.3.1.dist-info → multipers-2.3.2b1.dist-info/licenses}/LICENSE +0 -0
- {multipers-2.3.1.dist-info → multipers-2.3.2b1.dist-info}/top_level.txt +0 -0
multipers/slicer.pyx.tp
CHANGED
|
@@ -11,6 +11,7 @@ import pickle
|
|
|
11
11
|
with open("build/tmp/_slicer_names.pkl", "rb") as f:
|
|
12
12
|
slicers=pickle.load(f)
|
|
13
13
|
|
|
14
|
+
dtypes = set([(D['PY_VALUE_TYPE'], D['C_VALUE_TYPE'], D['SHORT_VALUE_TYPE']) for D in slicers])
|
|
14
15
|
|
|
15
16
|
}}
|
|
16
17
|
|
|
@@ -20,13 +21,15 @@ from typing import Optional,Literal
|
|
|
20
21
|
import threading
|
|
21
22
|
import os
|
|
22
23
|
from joblib import Parallel, delayed
|
|
24
|
+
from warnings import warn
|
|
23
25
|
|
|
24
26
|
from multipers.slicer cimport *
|
|
25
27
|
from multipers.filtrations cimport *
|
|
26
28
|
from multipers.filtration_conversions cimport *
|
|
27
29
|
## TODO: these two are not needed, remove that by updating rank code.
|
|
28
30
|
from multipers.point_measure import sparsify, rank_decomposition_by_rectangles
|
|
29
|
-
from multipers.grids import compute_grid
|
|
31
|
+
from multipers.grids import compute_grid, sanitize_grid, evaluate_in_grid, _push_pts_to_line
|
|
32
|
+
from multipers.array_api import api_from_tensor
|
|
30
33
|
|
|
31
34
|
import numpy as np
|
|
32
35
|
cimport cython
|
|
@@ -104,7 +107,7 @@ _valid_pers_backend = Literal[
|
|
|
104
107
|
#------------------------------------------------------------------------------
|
|
105
108
|
cdef class {{D['PYTHON_TYPE']}}:
|
|
106
109
|
cdef {{D['C_TEMPLATE_TYPE']}} truc
|
|
107
|
-
cdef public
|
|
110
|
+
cdef public object filtration_grid
|
|
108
111
|
cdef public int minpres_degree ## TODO : maybe change directly the degree in the minpres ?
|
|
109
112
|
|
|
110
113
|
def __repr__(self):
|
|
@@ -112,7 +115,7 @@ cdef class {{D['PYTHON_TYPE']}}:
|
|
|
112
115
|
|
|
113
116
|
@property
|
|
114
117
|
def is_squeezed(self)->bool:
|
|
115
|
-
return self.filtration_grid.
|
|
118
|
+
return self.filtration_grid is not None and len(self.filtration_grid) > 0 and len(self.filtration_grid[0]) > 0
|
|
116
119
|
@property
|
|
117
120
|
def is_minpres(self)->bool:
|
|
118
121
|
return self.minpres_degree>=0
|
|
@@ -215,10 +218,22 @@ cdef class {{D['PYTHON_TYPE']}}:
|
|
|
215
218
|
self.minpres_degree = -1
|
|
216
219
|
{{endif}}
|
|
217
220
|
|
|
218
|
-
def to_colexical(self)->{{D['PYTHON_TYPE']}}:
|
|
219
|
-
assert not self.is_squeezed, "Unsqueeze first"
|
|
221
|
+
def to_colexical(self, bool return_permutation = False)->{{D['PYTHON_TYPE']}}|tuple[{{D['PYTHON_TYPE']}},np.ndarray]:
|
|
222
|
+
# assert not self.is_squeezed, "Unsqueeze first, this is not implented yet for squeezed slicers"
|
|
220
223
|
new_slicer = {{D['PYTHON_TYPE']}}()
|
|
221
|
-
|
|
224
|
+
cdef pair[{{D['C_TEMPLATE_TYPE']}}, vector[unsigned int]] stuff = self.truc.colexical_rearange()
|
|
225
|
+
|
|
226
|
+
new_slicer.truc = stuff.first
|
|
227
|
+
new_slicer.minpres_degree = self.minpres_degree
|
|
228
|
+
new_slicer.filtration_grid = self.filtration_grid
|
|
229
|
+
|
|
230
|
+
if return_permutation:
|
|
231
|
+
return new_slicer, np.array(stuff.second, dtype=np.int32)
|
|
232
|
+
return new_slicer
|
|
233
|
+
def permute_generators(self, permutation)->{{D['PYTHON_TYPE']}}:
|
|
234
|
+
cdef vector[unsigned int] c_perm = permutation
|
|
235
|
+
new_slicer = {{D['PYTHON_TYPE']}}()
|
|
236
|
+
new_slicer.truc = self.truc.permute(c_perm)
|
|
222
237
|
new_slicer.minpres_degree = self.minpres_degree
|
|
223
238
|
return new_slicer
|
|
224
239
|
|
|
@@ -231,6 +246,21 @@ cdef class {{D['PYTHON_TYPE']}}:
|
|
|
231
246
|
copy_.minpres_degree = self.minpres_degree
|
|
232
247
|
copy_.filtration_grid = self.filtration_grid
|
|
233
248
|
return copy_
|
|
249
|
+
def compute_kernel_projective_cover(self, dim:Optional[int]=None)->{{D['PYTHON_TYPE']}}:
|
|
250
|
+
if len(self) == 0:
|
|
251
|
+
return {{D['PYTHON_TYPE']}}()
|
|
252
|
+
if dim is None:
|
|
253
|
+
dim = self.truc.get_dimension(0)
|
|
254
|
+
out = {{D['PYTHON_TYPE']}}()
|
|
255
|
+
out.truc = self.truc.projective_cover_kernel(dim)
|
|
256
|
+
out.filtration_grid = self.filtration_grid
|
|
257
|
+
return out
|
|
258
|
+
|
|
259
|
+
def get_barcode_idx(self, bool keep_inf = False):
|
|
260
|
+
"""
|
|
261
|
+
Returns the current barcode.
|
|
262
|
+
"""
|
|
263
|
+
return tuple(np.asarray(x) if len(x) else np.empty((0,2), dtype=int)for x in self.truc.get_barcode_idx())
|
|
234
264
|
def get_barcode(self, bool keep_inf = False):
|
|
235
265
|
"""
|
|
236
266
|
Returns the current barcode.
|
|
@@ -239,12 +269,13 @@ cdef class {{D['PYTHON_TYPE']}}:
|
|
|
239
269
|
bcs = tuple(np.asarray(stuff, dtype = {{D['PY_VALUE_TYPE']}}) for stuff in self.truc.get_barcode())
|
|
240
270
|
else:
|
|
241
271
|
bcs = {{D['PYTHON_TYPE']}}._threshold_bcs(self.truc.get_barcode())
|
|
242
|
-
return
|
|
272
|
+
return bcs
|
|
243
273
|
def push_to_line(self, basepoint, direction=None)->{{D['PYTHON_TYPE']}}:
|
|
244
274
|
"""
|
|
245
275
|
Pushes the current slicer to the line defined by a basepoint and an optional direction.
|
|
246
276
|
If the direction is not provided, it is assumed to be diagonal.
|
|
247
277
|
"""
|
|
278
|
+
{{if D['IS_FLOAT']}}
|
|
248
279
|
basepoint = np.asarray(basepoint, dtype = {{D['PY_VALUE_TYPE']}})
|
|
249
280
|
cdef Line[{{D['C_VALUE_TYPE']}}] line
|
|
250
281
|
if direction is None:
|
|
@@ -254,6 +285,9 @@ cdef class {{D['PYTHON_TYPE']}}:
|
|
|
254
285
|
line = Line[{{D['C_VALUE_TYPE']}}](_py21c_{{D['SHORT_VALUE_TYPE']}}(basepoint),_py21c_{{D['SHORT_VALUE_TYPE']}}(direction))
|
|
255
286
|
self.truc.push_to(line)
|
|
256
287
|
return self
|
|
288
|
+
{{else}}
|
|
289
|
+
raise NotImplementedError("There is no `int` slicing.")
|
|
290
|
+
{{endif}}
|
|
257
291
|
|
|
258
292
|
@staticmethod
|
|
259
293
|
cdef _threshold_bcs(vector[vector[pair[{{D['C_VALUE_TYPE']}}, {{D['C_VALUE_TYPE']}}]]] bcs):
|
|
@@ -261,8 +295,8 @@ cdef class {{D['PYTHON_TYPE']}}:
|
|
|
261
295
|
@staticmethod
|
|
262
296
|
def _bc_to_full(bcs, basepoint, direction=None):
|
|
263
297
|
# i, (b sv d), coords
|
|
264
|
-
basepoint =
|
|
265
|
-
direction = 1 if direction is None else
|
|
298
|
+
basepoint = basepoint[None,None,:]
|
|
299
|
+
direction = 1 if direction is None else direction[None,None,:]
|
|
266
300
|
return tuple(bc[:,:,None]*direction + basepoint for bc in bcs)
|
|
267
301
|
|
|
268
302
|
def persistence_on_line(self,basepoint,direction=None, bool keep_inf=True, bool full=False, bool ignore_infinite_filtration_values = True):
|
|
@@ -273,6 +307,7 @@ cdef class {{D['PYTHON_TYPE']}}:
|
|
|
273
307
|
|
|
274
308
|
Warning: This is not parallelizable. Use `persitence_on_lines`.
|
|
275
309
|
"""
|
|
310
|
+
{{if D['IS_FLOAT']}}
|
|
276
311
|
self.push_to_line(basepoint,direction)
|
|
277
312
|
self.truc.compute_persistence(ignore_infinite_filtration_values)
|
|
278
313
|
if keep_inf:
|
|
@@ -283,6 +318,34 @@ cdef class {{D['PYTHON_TYPE']}}:
|
|
|
283
318
|
if full:
|
|
284
319
|
bcs = {{D['PYTHON_TYPE']}}._bc_to_full(bcs, basepoint, direction)
|
|
285
320
|
return bcs
|
|
321
|
+
{{else}}
|
|
322
|
+
if not self.is_squeezed:
|
|
323
|
+
raise ValueError("Unsqueeze tensor, or provide a filtration grid. Cannot slice lines with integers...")
|
|
324
|
+
api = api_from_tensor(self.filtration_grid[0])
|
|
325
|
+
s = self.unsqueeze()
|
|
326
|
+
fil = evaluate_in_grid(np.asarray(self.get_filtrations()), self.filtration_grid)
|
|
327
|
+
projected_fil =_push_pts_to_line(fil, basepoint, direction)
|
|
328
|
+
s.compute_persistence(projected_fil)
|
|
329
|
+
bcs_idx = s.get_barcode_idx()
|
|
330
|
+
bcs = tuple(
|
|
331
|
+
api.stack([projected_fil[bc[:,0]], projected_fil[bc[:,1]]], axis=1)
|
|
332
|
+
if bc.size>0
|
|
333
|
+
else api.empty((0,2), dtype = self.filtration_grid[0].dtype)
|
|
334
|
+
for bc in bcs_idx
|
|
335
|
+
)
|
|
336
|
+
if full:
|
|
337
|
+
bcs = self._bc_to_full(bcs, basepoint, direction)
|
|
338
|
+
return bcs
|
|
339
|
+
{{endif}}
|
|
340
|
+
|
|
341
|
+
def _custom_persistences_idx(self, filtration_array, bool ignore_inf=True):
|
|
342
|
+
filtration_array = np.asarray(filtration_array, dtype= {{D['PY_VALUE_TYPE']}})
|
|
343
|
+
cdef {{D['C_VALUE_TYPE']}}[:,:] arr_view = filtration_array
|
|
344
|
+
cdef int size = arr_view.shape[0]
|
|
345
|
+
if arr_view.shape[1] != self.truc.num_generators():
|
|
346
|
+
raise ValueError(f"Got filtration array of shape {filtration_array.shape=} / {arr_view.shape=}. Was expecting (-1, {len(self)=})")
|
|
347
|
+
|
|
348
|
+
return tuple(tuple(np.array(bc_idx_degree, dtype=int) for bc_idx_degree in bc_idx) for bc_idx in self.truc.custom_persistences(&arr_view[0,0], size, ignore_inf))
|
|
286
349
|
|
|
287
350
|
def persistence_on_lines(self, basepoints=None, directions=None, bool keep_inf=True, bool full=False, bool ignore_infinite_filtration_values = True):
|
|
288
351
|
"""
|
|
@@ -303,7 +366,9 @@ cdef class {{D['PYTHON_TYPE']}}:
|
|
|
303
366
|
cdef int num_bc = c_basepoints.size()
|
|
304
367
|
|
|
305
368
|
if keep_inf:
|
|
306
|
-
out = tuple(tuple(
|
|
369
|
+
out = tuple(tuple(
|
|
370
|
+
np.asarray(y, dtype = {{D['PY_VALUE_TYPE']}}) if len(y)>0 else np.empty((0,2), dtype = {{D['PY_VALUE_TYPE']}})
|
|
371
|
+
for y in x) for x in c_out)
|
|
307
372
|
else:
|
|
308
373
|
out = tuple({{D['PYTHON_TYPE']}}._threshold_bcs(x) for x in c_out)
|
|
309
374
|
|
|
@@ -318,7 +383,7 @@ cdef class {{D['PYTHON_TYPE']}}:
|
|
|
318
383
|
self.get_boundaries(),
|
|
319
384
|
self.get_dimensions(),
|
|
320
385
|
self.get_filtrations(),
|
|
321
|
-
|
|
386
|
+
self.filtration_grid,
|
|
322
387
|
self.minpres_degree,
|
|
323
388
|
)
|
|
324
389
|
def __setstate__(self, tuple dump):
|
|
@@ -347,26 +412,50 @@ cdef class {{D['PYTHON_TYPE']}}:
|
|
|
347
412
|
|
|
348
413
|
|
|
349
414
|
|
|
350
|
-
def compute_persistence(self,one_filtration=None, bool ignore_infinite_filtration_values = True)->
|
|
415
|
+
def compute_persistence(self,one_filtration=None, bool ignore_infinite_filtration_values = True)->tuple:
|
|
351
416
|
"""
|
|
352
417
|
Computes the current persistence, or the persistence
|
|
353
418
|
given by the filtration one_filtration (num_generators,).
|
|
354
419
|
"""
|
|
355
420
|
if one_filtration is not None:
|
|
356
|
-
|
|
421
|
+
api = api_from_tensor(one_filtration)
|
|
422
|
+
one_filtration=api.astensor(one_filtration)
|
|
423
|
+
# self.truc.set_one_filtration(one_filtration)
|
|
424
|
+
# s = self.unsqueeze()
|
|
425
|
+
# fil = evaluate_in_grid(np.asarray(self.get_filtrations()), self.filtration_grid)
|
|
426
|
+
# projected_fil =_push_pts_to_line(fil, basepoint, direction)
|
|
427
|
+
if one_filtration.ndim > 2 or one_filtration.ndim == 0:
|
|
428
|
+
raise ValueError(f"Expected a filtration shape of the form ((num_1_param), num_generators). Got {one_filtration.shape=}")
|
|
429
|
+
squeeze = False
|
|
430
|
+
if one_filtration.ndim == 1:
|
|
431
|
+
one_filtration = one_filtration[None]
|
|
432
|
+
squeeze = True
|
|
433
|
+
|
|
434
|
+
bcs = self._custom_persistences_idx(api.asnumpy(one_filtration),ignore_infinite_filtration_values)
|
|
435
|
+
|
|
436
|
+
bcs = tuple(tuple(
|
|
437
|
+
api.stack([F[bc[:,0]], F[bc[:,1]]], axis=1)
|
|
438
|
+
if bc.size>0
|
|
439
|
+
else api.empty((0,2), dtype = F.dtype)
|
|
440
|
+
for bc in bcs_idx
|
|
441
|
+
)
|
|
442
|
+
for bcs_idx,F in zip(bcs,one_filtration)
|
|
443
|
+
)
|
|
444
|
+
return bcs[0] if squeeze else bcs
|
|
445
|
+
|
|
357
446
|
# TODO: Later
|
|
358
447
|
# if len(degrees)>0:
|
|
359
448
|
# self.truc.compute_persistence(degrees)
|
|
360
449
|
# else:
|
|
361
450
|
# self.truc.compute_persistence()
|
|
362
451
|
self.truc.compute_persistence(ignore_infinite_filtration_values)
|
|
363
|
-
return self
|
|
364
|
-
|
|
452
|
+
# return self
|
|
453
|
+
return self.get_barcode()
|
|
365
454
|
def get_barcode(self):
|
|
366
455
|
"""
|
|
367
456
|
Returns the barcode of the current 1d-persistence.
|
|
368
457
|
"""
|
|
369
|
-
return self.truc.get_barcode()
|
|
458
|
+
return tuple(np.asarray(bc) for bc in self.truc.get_barcode())
|
|
370
459
|
def sliced_filtration(self,basepoint, direction=None):
|
|
371
460
|
"""
|
|
372
461
|
Computes the filtration on a line L defined by
|
|
@@ -439,7 +528,15 @@ cdef class {{D['PYTHON_TYPE']}}:
|
|
|
439
528
|
Returns the boundaries of the generators.
|
|
440
529
|
"""
|
|
441
530
|
return tuple(tuple(b) for b in self.truc.get_boundaries())
|
|
442
|
-
def grid_squeeze(
|
|
531
|
+
def grid_squeeze(
|
|
532
|
+
self,
|
|
533
|
+
filtration_grid=None,
|
|
534
|
+
strategy="exact",
|
|
535
|
+
resolution:Optional[int]=None,
|
|
536
|
+
bool coordinates=True,
|
|
537
|
+
bool inplace = False,
|
|
538
|
+
grid_strategy=None
|
|
539
|
+
)->{{D['PYTHON_TYPE'][:-3]+"i32"}}|{{D['PYTHON_TYPE']}}:
|
|
443
540
|
"""
|
|
444
541
|
Coarsen the filtration values on a grid. This is necessary to compute some invariants.
|
|
445
542
|
|
|
@@ -450,28 +547,53 @@ cdef class {{D['PYTHON_TYPE']}}:
|
|
|
450
547
|
|
|
451
548
|
- inplace:bool if true, does the operation inplace, i.e., doesn't return a copy.
|
|
452
549
|
"""
|
|
453
|
-
if
|
|
454
|
-
|
|
550
|
+
if grid_strategy is not None:
|
|
551
|
+
warn("`grid_strategy` is deprecated, use `strategy` instead.",DeprecationWarning)
|
|
552
|
+
strategy=grid_strategy
|
|
553
|
+
|
|
554
|
+
if self.is_squeezed:
|
|
555
|
+
warn("(copy warning) Squeezing an already squeezed slicer.")
|
|
556
|
+
temp = self.unsqueeze()
|
|
557
|
+
subgrid = compute_grid(self.filtration_grid, strategy=strategy, resolution=resolution)
|
|
558
|
+
return temp.grid_squeeze(subgrid, coordinates=coordinates, inplace=inplace)
|
|
559
|
+
|
|
455
560
|
if filtration_grid is None:
|
|
456
561
|
filtration_grid = compute_grid(
|
|
457
562
|
self.get_filtrations_values().T,
|
|
458
|
-
strategy=
|
|
563
|
+
strategy=strategy,
|
|
459
564
|
resolution=resolution)
|
|
460
565
|
cdef vector[vector[{{D['C_VALUE_TYPE']}}]] grid = filtration_grid
|
|
461
566
|
if inplace or not coordinates:
|
|
462
567
|
self.truc.coarsen_on_grid_inplace(grid, coordinates)
|
|
463
|
-
|
|
568
|
+
if coordinates:
|
|
569
|
+
self.filtration_grid = filtration_grid
|
|
464
570
|
else:
|
|
465
571
|
{{if D['COLUMN_TYPE'] is None}}
|
|
466
572
|
raise ValueError("WIP")
|
|
467
573
|
{{else}}
|
|
468
574
|
out = {{D['PYTHON_TYPE'][:-3]+"i32"}}()
|
|
469
575
|
out.truc = self.truc.coarsen_on_grid(grid)
|
|
470
|
-
|
|
576
|
+
if coordinates:
|
|
577
|
+
out.filtration_grid = sanitize_grid(filtration_grid)
|
|
471
578
|
out.minpres_degree = self.minpres_degree
|
|
472
579
|
return out
|
|
473
580
|
{{endif}}
|
|
474
581
|
return self
|
|
582
|
+
def _clean_filtration_grid(self):
|
|
583
|
+
"""
|
|
584
|
+
Removes the values in filtration_grid that are not linked to any splx.
|
|
585
|
+
"""
|
|
586
|
+
if not self.is_squeezed:
|
|
587
|
+
raise ValueError("No grid to clean.")
|
|
588
|
+
F = self.filtration_grid
|
|
589
|
+
self.filtration_grid=None
|
|
590
|
+
cleaned_coordinates = compute_grid(self)
|
|
591
|
+
new_slicer = self.grid_squeeze(cleaned_coordinates)
|
|
592
|
+
|
|
593
|
+
self._from_ptr(new_slicer.get_ptr())
|
|
594
|
+
self.filtration_grid = tuple(f[g] for f,g in zip(F,cleaned_coordinates))
|
|
595
|
+
return self
|
|
596
|
+
|
|
475
597
|
def minpres(self,
|
|
476
598
|
int degree=-1,
|
|
477
599
|
list[int] degrees=[],
|
|
@@ -486,7 +608,7 @@ cdef class {{D['PYTHON_TYPE']}}:
|
|
|
486
608
|
Computes the minimal presentation of the slicer, and returns it as a new slicer.
|
|
487
609
|
See :func:`multipers.slicer.minimal_presentation`.
|
|
488
610
|
"""
|
|
489
|
-
new_slicer = minimal_presentation(self, degree=degree, degrees=degrees, backend=backend, slicer_backend=slicer_backend, vineyard=vineyard, id=id, **minpres_kwargs)
|
|
611
|
+
new_slicer = minimal_presentation(self, degree=degree, degrees=degrees, backend=backend, slicer_backend=slicer_backend, dtype=dtype, vineyard=vineyard, id=id, **minpres_kwargs)
|
|
490
612
|
return new_slicer
|
|
491
613
|
|
|
492
614
|
@property
|
|
@@ -563,10 +685,11 @@ cdef class {{D['PYTHON_TYPE']}}:
|
|
|
563
685
|
self.truc.build_from_scc_file(c_path, rivet_compatible, reverse, shift_dimension)
|
|
564
686
|
return self
|
|
565
687
|
|
|
566
|
-
def unsqueeze(self, grid=None
|
|
567
|
-
from multipers.grids import evaluate_in_grid
|
|
688
|
+
def unsqueeze(self, grid=None)->{{D['PYTHON_TYPE'][:-3]+"f64"}}:
|
|
689
|
+
from multipers.grids import evaluate_in_grid, sanitize_grid
|
|
568
690
|
from multipers import Slicer
|
|
569
|
-
grid =
|
|
691
|
+
grid = self.filtration_grid if grid is None else grid
|
|
692
|
+
grid = sanitize_grid(grid, numpyfy=True)
|
|
570
693
|
new_filtrations = evaluate_in_grid(np.asarray(self.get_filtrations(), dtype=np.int32), grid)
|
|
571
694
|
new_slicer = {{D['PYTHON_TYPE'][:-3]+"f64"}}(
|
|
572
695
|
self.get_boundaries(),
|
|
@@ -587,10 +710,13 @@ cdef extern from "gudhi/cubical_to_boundary.h" namespace "":
|
|
|
587
710
|
void get_vertices(unsigned int, cset[unsigned int]&, const vector[vector[unsigned int]]&) nogil
|
|
588
711
|
|
|
589
712
|
|
|
590
|
-
|
|
713
|
+
{{for pytype,ctype,fshort in dtypes}}
|
|
714
|
+
def _from_bitmap{{fshort}}(image, **slicer_kwargs):
|
|
591
715
|
from multipers import Slicer
|
|
592
|
-
|
|
593
|
-
slicer_kwargs["dtype"] =
|
|
716
|
+
dtype = slicer_kwargs.get("dtype", image.dtype)
|
|
717
|
+
slicer_kwargs["dtype"] = dtype
|
|
718
|
+
if image.dtype != dtype:
|
|
719
|
+
raise ValueError(f"Invalid type matching. Got {dtype=} and {image.dtype=}")
|
|
594
720
|
_Slicer = Slicer(return_type_only=True, **slicer_kwargs)
|
|
595
721
|
cdef vector[unsigned int] img_shape = image.shape[:-1]
|
|
596
722
|
cdef unsigned int num_parameters = image.shape[-1]
|
|
@@ -600,11 +726,12 @@ def from_bitmap(image, **slicer_kwargs):
|
|
|
600
726
|
with nogil:
|
|
601
727
|
_to_boundary(img_shape,gen_maps, gen_dims)
|
|
602
728
|
|
|
603
|
-
cdef unsigned int num_gens = gen_dims.size()
|
|
604
|
-
filtration_values = np.zeros(shape=(num_gens, num_parameters), dtype = np.double) - np.inf
|
|
605
|
-
cdef double[:,:] F = filtration_values
|
|
606
|
-
cdef double[:,:] c_img = image.reshape(-1,num_parameters)
|
|
607
729
|
cdef cset[unsigned int] vertices
|
|
730
|
+
|
|
731
|
+
cdef unsigned int num_gens = gen_dims.size()
|
|
732
|
+
filtration_values = np.zeros(shape=(num_gens, num_parameters), dtype = {{pytype}}) - _Slicer._inf_value()
|
|
733
|
+
cdef {{ctype}}[:,:] F = filtration_values
|
|
734
|
+
cdef {{ctype}}[:,:] c_img = image.reshape(-1,num_parameters)
|
|
608
735
|
with nogil:
|
|
609
736
|
for i in range(num_gens):
|
|
610
737
|
# with gil:
|
|
@@ -622,13 +749,22 @@ def from_bitmap(image, **slicer_kwargs):
|
|
|
622
749
|
# print(f"F = {np.asarray(F[i])}")
|
|
623
750
|
slicer = _Slicer(gen_maps, gen_dims, filtration_values)
|
|
624
751
|
return slicer
|
|
752
|
+
{{endfor}}
|
|
753
|
+
|
|
754
|
+
def from_bitmap(img, **kwargs):
|
|
755
|
+
img = np.asarray(img)
|
|
756
|
+
{{for pytype,ctype,stype in dtypes}}
|
|
757
|
+
if img.dtype == {{pytype}}:
|
|
758
|
+
return _from_bitmap{{stype}}(img, **kwargs)
|
|
759
|
+
{{endfor}}
|
|
760
|
+
raise ValueError(f"Invalid dtype. Got {img.dtype=}, was expecting {available_dtype=}.")
|
|
625
761
|
|
|
626
762
|
def from_function_delaunay(
|
|
627
763
|
points,
|
|
628
764
|
grades,
|
|
629
765
|
int degree=-1,
|
|
630
|
-
backend:
|
|
631
|
-
vineyard:Optional[bool]
|
|
766
|
+
backend:Optional[_valid_pers_backend]=None,
|
|
767
|
+
vineyard=None, # TODO : Optional[bool] when cython fixes it
|
|
632
768
|
dtype=np.float64,
|
|
633
769
|
bool verbose = False,
|
|
634
770
|
bool clear = True,
|
|
@@ -689,12 +825,12 @@ def minimal_presentation(
|
|
|
689
825
|
and returns it as a slicer.
|
|
690
826
|
Backends differents than `mpfree` are unstable.
|
|
691
827
|
"""
|
|
692
|
-
from multipers.io import
|
|
828
|
+
from multipers.io import _init_external_softwares, input_path, scc_reduce_from_str_to_slicer
|
|
693
829
|
if is_slicer(slicer) and slicer.is_minpres and not force:
|
|
694
830
|
from warnings import warn
|
|
695
|
-
warn(f"The slicer seems to be already reduced, from homology of degree {slicer.minpres_degree}.")
|
|
831
|
+
warn(f"(unnecessary computation) The slicer seems to be already reduced, from homology of degree {slicer.minpres_degree}.")
|
|
696
832
|
return slicer
|
|
697
|
-
|
|
833
|
+
_init_external_softwares(requires=[backend])
|
|
698
834
|
if len(degrees)>0:
|
|
699
835
|
def todo(int degree):
|
|
700
836
|
return minimal_presentation(slicer, degree=degree, backend=backend, slicer_backend=slicer_backend, vineyard=vineyard, id=id, **minpres_kwargs)
|
|
@@ -703,22 +839,23 @@ def minimal_presentation(
|
|
|
703
839
|
)
|
|
704
840
|
# return tuple(minimal_presentation(slicer, degree=d, backend=backend, slicer_backend=slicer_backend, vineyard=vineyard, id=id, **minpres_kwargs) for d in degrees)
|
|
705
841
|
assert degree>=0, f"Degree not provided."
|
|
706
|
-
|
|
842
|
+
if not np.any(slicer.get_dimensions() == degree):
|
|
843
|
+
return type(slicer)()
|
|
707
844
|
if id is None:
|
|
708
845
|
id = str(threading.get_native_id())
|
|
709
846
|
if dtype is None:
|
|
710
847
|
dtype = slicer.dtype
|
|
711
848
|
dimension = slicer.dimension - degree # latest = L-1, which is empty, -1 for degree 0, -2 for degree 1 etc.
|
|
712
|
-
slicer.to_scc(path=input_path+id, strip_comments=True, degree=degree-1)
|
|
849
|
+
slicer.to_scc(path=input_path+id, strip_comments=True, degree=degree-1, unsqueeze = False)
|
|
713
850
|
new_slicer = multipers.Slicer(None,backend=slicer_backend, vineyard=vineyard, dtype=dtype)
|
|
714
851
|
if backend=="mpfree":
|
|
715
852
|
shift_dimension=degree-1
|
|
716
853
|
else:
|
|
717
854
|
shift_dimension=degree
|
|
718
855
|
scc_reduce_from_str_to_slicer(path=input_path+id, slicer=new_slicer, dimension=dimension, backend=backend, shift_dimension=shift_dimension, **minpres_kwargs)
|
|
856
|
+
|
|
719
857
|
new_slicer.minpres_degree = degree
|
|
720
|
-
|
|
721
|
-
new_slicer.filtration_grid = filtration_grid
|
|
858
|
+
new_slicer.filtration_grid = slicer.filtration_grid if slicer.is_squeezed else None
|
|
722
859
|
return new_slicer
|
|
723
860
|
|
|
724
861
|
|
|
@@ -897,24 +1034,38 @@ def _rank_from_slicer(
|
|
|
897
1034
|
|
|
898
1035
|
cdef int num_parameters = len(grid_shape)
|
|
899
1036
|
|
|
900
|
-
if zero_pad:
|
|
901
|
-
|
|
902
|
-
|
|
1037
|
+
# if zero_pad:
|
|
1038
|
+
# grid_shape += 1
|
|
1039
|
+
# for i, _ in enumerate(grid_shape):
|
|
1040
|
+
# grid_shape[i] += 1 # adds a 0
|
|
903
1041
|
# for i,f in enumerate(grid_conversion):
|
|
904
1042
|
# grid_conversion[i] = np.concatenate([f, [mass_default[i]]])
|
|
905
1043
|
|
|
1044
|
+
|
|
906
1045
|
grid_shape_with_degree = np.asarray(np.concatenate([[len(degrees)], grid_shape, grid_shape]), dtype=python_indices_type)
|
|
1046
|
+
if verbose:
|
|
1047
|
+
print("Container shape: ", grid_shape_with_degree)
|
|
907
1048
|
container_array = np.ascontiguousarray(np.zeros(grid_shape_with_degree, dtype=python_tensor_dtype).ravel())
|
|
908
1049
|
assert len(container_array) < np.iinfo(python_indices_type).max, "Too large container. Raise an issue on github if you encounter this issue. (Due to tensor's operator[])"
|
|
1050
|
+
# if zero_pad:
|
|
1051
|
+
# grid_shape_with_degree[1:] -= 1
|
|
909
1052
|
cdef vector[indices_type] c_grid_shape = grid_shape_with_degree
|
|
910
1053
|
cdef tensor_dtype[::1] container = container_array
|
|
911
1054
|
cdef tensor_dtype* container_ptr = &container[0]
|
|
912
1055
|
|
|
913
1056
|
## SLICERS
|
|
1057
|
+
if verbose:
|
|
1058
|
+
print("Computing rank invariant...", end="")
|
|
914
1059
|
_compute_rank_invariant(slicer, container_ptr, c_grid_shape, degrees, n_jobs, ignore_inf)
|
|
1060
|
+
if verbose:
|
|
1061
|
+
print("Done.")
|
|
915
1062
|
|
|
1063
|
+
if verbose:
|
|
1064
|
+
print("Computing Möbius inversion...", end="")
|
|
1065
|
+
# if zero_pad:
|
|
1066
|
+
# grid_shape_with_degree[1:] += 1
|
|
916
1067
|
rank = container_array.reshape(grid_shape_with_degree)
|
|
917
|
-
rank = tuple(rank_decomposition_by_rectangles(rank_of_degree, threshold
|
|
1068
|
+
rank = tuple(rank_decomposition_by_rectangles(rank_of_degree, threshold=zero_pad) for rank_of_degree in rank)
|
|
918
1069
|
if return_raw:
|
|
919
1070
|
return rank
|
|
920
1071
|
out = []
|
|
@@ -928,4 +1079,6 @@ def _rank_from_slicer(
|
|
|
928
1079
|
return coords, weights
|
|
929
1080
|
|
|
930
1081
|
out = tuple(clean_rank(rank_decomposition) for rank_decomposition in rank)
|
|
1082
|
+
if verbose:
|
|
1083
|
+
print("Done.")
|
|
931
1084
|
return out
|
multipers/tbb12.dll
CHANGED
|
Binary file
|
multipers/tbbbind_2_5.dll
CHANGED
|
Binary file
|
multipers/tbbmalloc.dll
CHANGED
|
Binary file
|
multipers/tbbmalloc_proxy.dll
CHANGED
|
Binary file
|
multipers/tests/__init__.py
CHANGED
|
@@ -21,11 +21,15 @@ def sort_sm(sms):
|
|
|
21
21
|
return tuple((sm[0][idx], sm[1][idx]) for sm in sms)
|
|
22
22
|
|
|
23
23
|
|
|
24
|
-
def assert_sm_pair(sm1, sm2, exact=True, max_error=1e-3, reg=0.1):
|
|
24
|
+
def assert_sm_pair(sm1, sm2, exact=True, max_error=1e-3, reg=0.1, threshold=None):
|
|
25
25
|
if not exact:
|
|
26
26
|
from multipers.distances import sm_distance
|
|
27
|
+
if threshold is not None:
|
|
28
|
+
_inf_value_fix = threshold
|
|
29
|
+
sm1[0][sm1[0] >threshold] = _inf_value_fix
|
|
30
|
+
sm2[0][sm2[0] >threshold] = _inf_value_fix
|
|
27
31
|
|
|
28
|
-
d = sm_distance(sm1, sm2, reg=
|
|
32
|
+
d = sm_distance(sm1, sm2, reg=reg)
|
|
29
33
|
assert d < max_error, f"Failed comparison:\n{sm1}\n{sm2},\n with distance {d}."
|
|
30
34
|
return
|
|
31
35
|
assert np.all(
|
|
@@ -37,10 +41,11 @@ def assert_sm_pair(sm1, sm2, exact=True, max_error=1e-3, reg=0.1):
|
|
|
37
41
|
), f"Failed comparison:\n-----------------\n{sm1}\n-----------------\n{sm2}"
|
|
38
42
|
|
|
39
43
|
|
|
40
|
-
def assert_sm(*args, exact=True, max_error=1e-5, reg=0.1):
|
|
44
|
+
def assert_sm(*args, exact=True, max_error=1e-5, reg=0.1, threshold=None):
|
|
41
45
|
sms = tuple(args)
|
|
42
46
|
for i in range(len(sms) - 1):
|
|
43
|
-
|
|
47
|
+
print(i)
|
|
48
|
+
assert_sm_pair(sms[i], sms[i + 1], exact=exact, max_error=max_error, reg=reg, threshold=threshold)
|
|
44
49
|
|
|
45
50
|
|
|
46
51
|
def random_st(npts=100, num_parameters=2, max_dim=2):
|
multipers/torch/diff_grids.py
CHANGED
|
@@ -15,6 +15,8 @@ def get_grid(strategy: Literal["exact", "regular_closest", "regular_left", "quan
|
|
|
15
15
|
match strategy:
|
|
16
16
|
case "exact":
|
|
17
17
|
return _exact_grid
|
|
18
|
+
case "regular":
|
|
19
|
+
return _regular_grid
|
|
18
20
|
case "regular_closest":
|
|
19
21
|
return _regular_closest_grid
|
|
20
22
|
case "regular_left":
|
|
@@ -35,27 +37,44 @@ def todense(grid: list[torch.Tensor]):
|
|
|
35
37
|
|
|
36
38
|
|
|
37
39
|
def _exact_grid(filtration_values, r=None):
|
|
40
|
+
assert r is None
|
|
38
41
|
grid = tuple(_unique_any(f) for f in filtration_values)
|
|
39
42
|
return grid
|
|
40
43
|
|
|
41
44
|
|
|
42
|
-
def _regular_closest_grid(filtration_values,
|
|
43
|
-
grid = tuple(_regular_closest(f, r) for f in filtration_values)
|
|
45
|
+
def _regular_closest_grid(filtration_values, res):
|
|
46
|
+
grid = tuple(_regular_closest(f, r) for f,r in zip(filtration_values, res))
|
|
44
47
|
return grid
|
|
45
48
|
|
|
49
|
+
def _regular_grid(filtration_values, res):
|
|
50
|
+
grid = tuple(_regular(g,r) for g,r in zip(filtration_values, res))
|
|
51
|
+
return grid
|
|
52
|
+
|
|
53
|
+
def _regular(x, r:int):
|
|
54
|
+
if x.ndim != 1:
|
|
55
|
+
raise ValueError(f"Got ndim!=1. {x=}")
|
|
56
|
+
return torch.linspace(start=torch.min(x), end=torch.max(x), steps=r, dtype=x.dtype)
|
|
46
57
|
|
|
47
|
-
def _regular_left_grid(filtration_values,
|
|
48
|
-
grid = tuple(_regular_left(f, r) for f in filtration_values)
|
|
58
|
+
def _regular_left_grid(filtration_values, res):
|
|
59
|
+
grid = tuple(_regular_left(f, r) for f,r in zip(filtration_values,res))
|
|
49
60
|
return grid
|
|
50
61
|
|
|
51
62
|
|
|
52
|
-
def _quantile_grid(filtration_values,
|
|
53
|
-
|
|
54
|
-
grid = tuple(_unique_any(torch.quantile(f, q=qs)) for f in filtration_values)
|
|
63
|
+
def _quantile_grid(filtration_values, res):
|
|
64
|
+
grid = tuple(_quantile(f, r) for f,r in zip(filtration_values,res))
|
|
55
65
|
return grid
|
|
66
|
+
def _quantile(x, r):
|
|
67
|
+
if x.ndim != 1:
|
|
68
|
+
raise ValueError(f"Got ndim!=1. {x=}")
|
|
69
|
+
qs = torch.linspace(0, 1, r, dtype=x.dtype)
|
|
70
|
+
return _unique_any(torch.quantile(x, q=qs))
|
|
71
|
+
|
|
72
|
+
|
|
56
73
|
|
|
57
74
|
|
|
58
75
|
def _unique_any(x, assume_sorted=False, remove_inf: bool = True):
|
|
76
|
+
if x.ndim != 1:
|
|
77
|
+
raise ValueError(f"Got ndim!=1. {x=}")
|
|
59
78
|
if not assume_sorted:
|
|
60
79
|
x, _ = x.sort()
|
|
61
80
|
if remove_inf and x[-1] == torch.inf:
|
|
@@ -68,6 +87,8 @@ def _unique_any(x, assume_sorted=False, remove_inf: bool = True):
|
|
|
68
87
|
|
|
69
88
|
|
|
70
89
|
def _regular_left(f, r: int, unique: bool = True):
|
|
90
|
+
if f.ndim != 1:
|
|
91
|
+
raise ValueError(f"Got ndim!=1. {f=}")
|
|
71
92
|
f = _unique_any(f)
|
|
72
93
|
with torch.no_grad():
|
|
73
94
|
f_regular = torch.linspace(f[0].item(), f[-1].item(), r, device=f.device)
|
|
@@ -79,6 +100,8 @@ def _regular_left(f, r: int, unique: bool = True):
|
|
|
79
100
|
|
|
80
101
|
|
|
81
102
|
def _regular_closest(f, r: int, unique: bool = True):
|
|
103
|
+
if f.ndim != 1:
|
|
104
|
+
raise ValueError(f"Got ndim!=1. {f=}")
|
|
82
105
|
f = _unique_any(f)
|
|
83
106
|
with torch.no_grad():
|
|
84
107
|
f_reg = torch.linspace(
|
|
@@ -1,31 +1,10 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: multipers
|
|
3
|
-
Version: 2.3.
|
|
3
|
+
Version: 2.3.2b1
|
|
4
4
|
Summary: Multiparameter Topological Persistence for Machine Learning
|
|
5
5
|
Author-email: David Loiseaux <david.lapous@proton.me>, Hannah Schreiber <hannah.schreiber@inria.fr>
|
|
6
6
|
Maintainer-email: David Loiseaux <david.lapous@proton.me>
|
|
7
|
-
License: MIT
|
|
8
|
-
|
|
9
|
-
Copyright (c) 2023 David Loiseaux
|
|
10
|
-
|
|
11
|
-
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
12
|
-
of this software and associated documentation files (the "Software"), to deal
|
|
13
|
-
in the Software without restriction, including without limitation the rights
|
|
14
|
-
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
15
|
-
copies of the Software, and to permit persons to whom the Software is
|
|
16
|
-
furnished to do so, subject to the following conditions:
|
|
17
|
-
|
|
18
|
-
The above copyright notice and this permission notice shall be included in all
|
|
19
|
-
copies or substantial portions of the Software.
|
|
20
|
-
|
|
21
|
-
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
22
|
-
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
23
|
-
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
24
|
-
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
25
|
-
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
26
|
-
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
27
|
-
SOFTWARE.
|
|
28
|
-
|
|
7
|
+
License-Expression: MIT
|
|
29
8
|
Project-URL: source, https://github.com/DavidLapous/multipers
|
|
30
9
|
Project-URL: download, https://pypi.org/project/multipers/#files
|
|
31
10
|
Project-URL: tracker, https://github.com/DavidLapous/multipers/issues
|
|
@@ -40,7 +19,6 @@ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
|
|
40
19
|
Classifier: Topic :: Scientific/Engineering :: Mathematics
|
|
41
20
|
Classifier: Topic :: Scientific/Engineering :: Visualization
|
|
42
21
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
43
|
-
Classifier: License :: OSI Approved :: MIT License
|
|
44
22
|
Requires-Python: >=3.10
|
|
45
23
|
Description-Content-Type: text/markdown
|
|
46
24
|
License-File: LICENSE
|
|
@@ -54,6 +32,7 @@ Requires-Dist: scikit-learn
|
|
|
54
32
|
Requires-Dist: filtration-domination
|
|
55
33
|
Requires-Dist: pykeops
|
|
56
34
|
Requires-Dist: pot
|
|
35
|
+
Dynamic: license-file
|
|
57
36
|
|
|
58
37
|
# multipers : Multiparameter Persistence for Machine Learning
|
|
59
38
|
[](https://doi.org/10.21105/joss.06773) [](https://davidlapous.github.io/multipers) [](https://github.com/DavidLapous/multipers/actions/workflows/python_PR.yml)
|