multipers 2.3.1__cp313-cp313-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of multipers might be problematic. Click here for more details.

Files changed (182) hide show
  1. multipers/__init__.py +33 -0
  2. multipers/_signed_measure_meta.py +430 -0
  3. multipers/_slicer_meta.py +211 -0
  4. multipers/data/MOL2.py +458 -0
  5. multipers/data/UCR.py +18 -0
  6. multipers/data/__init__.py +1 -0
  7. multipers/data/graphs.py +466 -0
  8. multipers/data/immuno_regions.py +27 -0
  9. multipers/data/minimal_presentation_to_st_bf.py +0 -0
  10. multipers/data/pytorch2simplextree.py +91 -0
  11. multipers/data/shape3d.py +101 -0
  12. multipers/data/synthetic.py +113 -0
  13. multipers/distances.py +198 -0
  14. multipers/filtration_conversions.pxd +229 -0
  15. multipers/filtration_conversions.pxd.tp +84 -0
  16. multipers/filtrations/__init__.py +18 -0
  17. multipers/filtrations/density.py +563 -0
  18. multipers/filtrations/filtrations.py +289 -0
  19. multipers/filtrations.pxd +224 -0
  20. multipers/function_rips.cp313-win_amd64.pyd +0 -0
  21. multipers/function_rips.pyx +105 -0
  22. multipers/grids.cp313-win_amd64.pyd +0 -0
  23. multipers/grids.pyx +350 -0
  24. multipers/gudhi/Persistence_slices_interface.h +132 -0
  25. multipers/gudhi/Simplex_tree_interface.h +239 -0
  26. multipers/gudhi/Simplex_tree_multi_interface.h +516 -0
  27. multipers/gudhi/cubical_to_boundary.h +59 -0
  28. multipers/gudhi/gudhi/Bitmap_cubical_complex.h +450 -0
  29. multipers/gudhi/gudhi/Bitmap_cubical_complex_base.h +1070 -0
  30. multipers/gudhi/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h +579 -0
  31. multipers/gudhi/gudhi/Debug_utils.h +45 -0
  32. multipers/gudhi/gudhi/Fields/Multi_field.h +484 -0
  33. multipers/gudhi/gudhi/Fields/Multi_field_operators.h +455 -0
  34. multipers/gudhi/gudhi/Fields/Multi_field_shared.h +450 -0
  35. multipers/gudhi/gudhi/Fields/Multi_field_small.h +531 -0
  36. multipers/gudhi/gudhi/Fields/Multi_field_small_operators.h +507 -0
  37. multipers/gudhi/gudhi/Fields/Multi_field_small_shared.h +531 -0
  38. multipers/gudhi/gudhi/Fields/Z2_field.h +355 -0
  39. multipers/gudhi/gudhi/Fields/Z2_field_operators.h +376 -0
  40. multipers/gudhi/gudhi/Fields/Zp_field.h +420 -0
  41. multipers/gudhi/gudhi/Fields/Zp_field_operators.h +400 -0
  42. multipers/gudhi/gudhi/Fields/Zp_field_shared.h +418 -0
  43. multipers/gudhi/gudhi/Flag_complex_edge_collapser.h +337 -0
  44. multipers/gudhi/gudhi/Matrix.h +2107 -0
  45. multipers/gudhi/gudhi/Multi_critical_filtration.h +1038 -0
  46. multipers/gudhi/gudhi/Multi_persistence/Box.h +171 -0
  47. multipers/gudhi/gudhi/Multi_persistence/Line.h +282 -0
  48. multipers/gudhi/gudhi/Off_reader.h +173 -0
  49. multipers/gudhi/gudhi/One_critical_filtration.h +1433 -0
  50. multipers/gudhi/gudhi/Persistence_matrix/Base_matrix.h +769 -0
  51. multipers/gudhi/gudhi/Persistence_matrix/Base_matrix_with_column_compression.h +686 -0
  52. multipers/gudhi/gudhi/Persistence_matrix/Boundary_matrix.h +842 -0
  53. multipers/gudhi/gudhi/Persistence_matrix/Chain_matrix.h +1350 -0
  54. multipers/gudhi/gudhi/Persistence_matrix/Id_to_index_overlay.h +1105 -0
  55. multipers/gudhi/gudhi/Persistence_matrix/Position_to_index_overlay.h +859 -0
  56. multipers/gudhi/gudhi/Persistence_matrix/RU_matrix.h +910 -0
  57. multipers/gudhi/gudhi/Persistence_matrix/allocators/entry_constructors.h +139 -0
  58. multipers/gudhi/gudhi/Persistence_matrix/base_pairing.h +230 -0
  59. multipers/gudhi/gudhi/Persistence_matrix/base_swap.h +211 -0
  60. multipers/gudhi/gudhi/Persistence_matrix/boundary_cell_position_to_id_mapper.h +60 -0
  61. multipers/gudhi/gudhi/Persistence_matrix/boundary_face_position_to_id_mapper.h +60 -0
  62. multipers/gudhi/gudhi/Persistence_matrix/chain_pairing.h +136 -0
  63. multipers/gudhi/gudhi/Persistence_matrix/chain_rep_cycles.h +190 -0
  64. multipers/gudhi/gudhi/Persistence_matrix/chain_vine_swap.h +616 -0
  65. multipers/gudhi/gudhi/Persistence_matrix/columns/chain_column_extra_properties.h +150 -0
  66. multipers/gudhi/gudhi/Persistence_matrix/columns/column_dimension_holder.h +106 -0
  67. multipers/gudhi/gudhi/Persistence_matrix/columns/column_utilities.h +219 -0
  68. multipers/gudhi/gudhi/Persistence_matrix/columns/entry_types.h +327 -0
  69. multipers/gudhi/gudhi/Persistence_matrix/columns/heap_column.h +1140 -0
  70. multipers/gudhi/gudhi/Persistence_matrix/columns/intrusive_list_column.h +934 -0
  71. multipers/gudhi/gudhi/Persistence_matrix/columns/intrusive_set_column.h +934 -0
  72. multipers/gudhi/gudhi/Persistence_matrix/columns/list_column.h +980 -0
  73. multipers/gudhi/gudhi/Persistence_matrix/columns/naive_vector_column.h +1092 -0
  74. multipers/gudhi/gudhi/Persistence_matrix/columns/row_access.h +192 -0
  75. multipers/gudhi/gudhi/Persistence_matrix/columns/set_column.h +921 -0
  76. multipers/gudhi/gudhi/Persistence_matrix/columns/small_vector_column.h +1093 -0
  77. multipers/gudhi/gudhi/Persistence_matrix/columns/unordered_set_column.h +1012 -0
  78. multipers/gudhi/gudhi/Persistence_matrix/columns/vector_column.h +1244 -0
  79. multipers/gudhi/gudhi/Persistence_matrix/matrix_dimension_holders.h +186 -0
  80. multipers/gudhi/gudhi/Persistence_matrix/matrix_row_access.h +164 -0
  81. multipers/gudhi/gudhi/Persistence_matrix/ru_pairing.h +156 -0
  82. multipers/gudhi/gudhi/Persistence_matrix/ru_rep_cycles.h +376 -0
  83. multipers/gudhi/gudhi/Persistence_matrix/ru_vine_swap.h +540 -0
  84. multipers/gudhi/gudhi/Persistent_cohomology/Field_Zp.h +118 -0
  85. multipers/gudhi/gudhi/Persistent_cohomology/Multi_field.h +173 -0
  86. multipers/gudhi/gudhi/Persistent_cohomology/Persistent_cohomology_column.h +128 -0
  87. multipers/gudhi/gudhi/Persistent_cohomology.h +745 -0
  88. multipers/gudhi/gudhi/Points_off_io.h +171 -0
  89. multipers/gudhi/gudhi/Simple_object_pool.h +69 -0
  90. multipers/gudhi/gudhi/Simplex_tree/Simplex_tree_iterators.h +463 -0
  91. multipers/gudhi/gudhi/Simplex_tree/Simplex_tree_node_explicit_storage.h +83 -0
  92. multipers/gudhi/gudhi/Simplex_tree/Simplex_tree_siblings.h +106 -0
  93. multipers/gudhi/gudhi/Simplex_tree/Simplex_tree_star_simplex_iterators.h +277 -0
  94. multipers/gudhi/gudhi/Simplex_tree/hooks_simplex_base.h +62 -0
  95. multipers/gudhi/gudhi/Simplex_tree/indexing_tag.h +27 -0
  96. multipers/gudhi/gudhi/Simplex_tree/serialization_utils.h +62 -0
  97. multipers/gudhi/gudhi/Simplex_tree/simplex_tree_options.h +157 -0
  98. multipers/gudhi/gudhi/Simplex_tree.h +2794 -0
  99. multipers/gudhi/gudhi/Simplex_tree_multi.h +152 -0
  100. multipers/gudhi/gudhi/distance_functions.h +62 -0
  101. multipers/gudhi/gudhi/graph_simplicial_complex.h +104 -0
  102. multipers/gudhi/gudhi/persistence_interval.h +253 -0
  103. multipers/gudhi/gudhi/persistence_matrix_options.h +170 -0
  104. multipers/gudhi/gudhi/reader_utils.h +367 -0
  105. multipers/gudhi/mma_interface_coh.h +256 -0
  106. multipers/gudhi/mma_interface_h0.h +223 -0
  107. multipers/gudhi/mma_interface_matrix.h +291 -0
  108. multipers/gudhi/naive_merge_tree.h +536 -0
  109. multipers/gudhi/scc_io.h +310 -0
  110. multipers/gudhi/truc.h +957 -0
  111. multipers/io.cp313-win_amd64.pyd +0 -0
  112. multipers/io.pyx +714 -0
  113. multipers/ml/__init__.py +0 -0
  114. multipers/ml/accuracies.py +90 -0
  115. multipers/ml/invariants_with_persistable.py +79 -0
  116. multipers/ml/kernels.py +176 -0
  117. multipers/ml/mma.py +713 -0
  118. multipers/ml/one.py +472 -0
  119. multipers/ml/point_clouds.py +352 -0
  120. multipers/ml/signed_measures.py +1589 -0
  121. multipers/ml/sliced_wasserstein.py +461 -0
  122. multipers/ml/tools.py +113 -0
  123. multipers/mma_structures.cp313-win_amd64.pyd +0 -0
  124. multipers/mma_structures.pxd +127 -0
  125. multipers/mma_structures.pyx +2742 -0
  126. multipers/mma_structures.pyx.tp +1083 -0
  127. multipers/multi_parameter_rank_invariant/diff_helpers.h +84 -0
  128. multipers/multi_parameter_rank_invariant/euler_characteristic.h +97 -0
  129. multipers/multi_parameter_rank_invariant/function_rips.h +322 -0
  130. multipers/multi_parameter_rank_invariant/hilbert_function.h +769 -0
  131. multipers/multi_parameter_rank_invariant/persistence_slices.h +148 -0
  132. multipers/multi_parameter_rank_invariant/rank_invariant.h +369 -0
  133. multipers/multiparameter_edge_collapse.py +41 -0
  134. multipers/multiparameter_module_approximation/approximation.h +2298 -0
  135. multipers/multiparameter_module_approximation/combinatory.h +129 -0
  136. multipers/multiparameter_module_approximation/debug.h +107 -0
  137. multipers/multiparameter_module_approximation/euler_curves.h +0 -0
  138. multipers/multiparameter_module_approximation/format_python-cpp.h +286 -0
  139. multipers/multiparameter_module_approximation/heap_column.h +238 -0
  140. multipers/multiparameter_module_approximation/images.h +79 -0
  141. multipers/multiparameter_module_approximation/list_column.h +174 -0
  142. multipers/multiparameter_module_approximation/list_column_2.h +232 -0
  143. multipers/multiparameter_module_approximation/ru_matrix.h +347 -0
  144. multipers/multiparameter_module_approximation/set_column.h +135 -0
  145. multipers/multiparameter_module_approximation/structure_higher_dim_barcode.h +36 -0
  146. multipers/multiparameter_module_approximation/unordered_set_column.h +166 -0
  147. multipers/multiparameter_module_approximation/utilities.h +403 -0
  148. multipers/multiparameter_module_approximation/vector_column.h +223 -0
  149. multipers/multiparameter_module_approximation/vector_matrix.h +331 -0
  150. multipers/multiparameter_module_approximation/vineyards.h +464 -0
  151. multipers/multiparameter_module_approximation/vineyards_trajectories.h +649 -0
  152. multipers/multiparameter_module_approximation.cp313-win_amd64.pyd +0 -0
  153. multipers/multiparameter_module_approximation.pyx +218 -0
  154. multipers/pickle.py +90 -0
  155. multipers/plots.py +342 -0
  156. multipers/point_measure.cp313-win_amd64.pyd +0 -0
  157. multipers/point_measure.pyx +322 -0
  158. multipers/simplex_tree_multi.cp313-win_amd64.pyd +0 -0
  159. multipers/simplex_tree_multi.pxd +133 -0
  160. multipers/simplex_tree_multi.pyx +10402 -0
  161. multipers/simplex_tree_multi.pyx.tp +1947 -0
  162. multipers/slicer.cp313-win_amd64.pyd +0 -0
  163. multipers/slicer.pxd +2552 -0
  164. multipers/slicer.pxd.tp +218 -0
  165. multipers/slicer.pyx +16530 -0
  166. multipers/slicer.pyx.tp +931 -0
  167. multipers/tbb12.dll +0 -0
  168. multipers/tbbbind_2_5.dll +0 -0
  169. multipers/tbbmalloc.dll +0 -0
  170. multipers/tbbmalloc_proxy.dll +0 -0
  171. multipers/tensor/tensor.h +672 -0
  172. multipers/tensor.pxd +13 -0
  173. multipers/test.pyx +44 -0
  174. multipers/tests/__init__.py +57 -0
  175. multipers/torch/__init__.py +1 -0
  176. multipers/torch/diff_grids.py +217 -0
  177. multipers/torch/rips_density.py +310 -0
  178. multipers-2.3.1.dist-info/LICENSE +21 -0
  179. multipers-2.3.1.dist-info/METADATA +144 -0
  180. multipers-2.3.1.dist-info/RECORD +182 -0
  181. multipers-2.3.1.dist-info/WHEEL +5 -0
  182. multipers-2.3.1.dist-info/top_level.txt +1 -0
multipers/io.pyx ADDED
@@ -0,0 +1,714 @@
1
+ import re
2
+ from gudhi import SimplexTree
3
+ import gudhi as gd
4
+ import numpy as np
5
+ import os
6
+ from shutil import which
7
+ from libcpp cimport bool
8
+ from typing import Optional, Literal
9
+ from collections import defaultdict
10
+ import itertools
11
+ import threading
12
+ import cython
13
+ cimport cython
14
+
15
+ # from multipers.filtration_conversions cimport *
16
+ # from multipers.mma_structures cimport boundary_matrix,float,pair,vector,intptr_t
17
+ # cimport numpy as cnp
18
+ current_doc_url = "https://davidlapous.github.io/multipers/"
19
+ doc_soft_urls = {
20
+ "mpfree":"https://bitbucket.org/mkerber/mpfree/",
21
+ "multi_chunk":"",
22
+ "function_delaunay":"https://bitbucket.org/mkerber/function_delaunay/",
23
+ "2pac":"https://gitlab.com/flenzen/2pac",
24
+ }
25
+ doc_soft_easy_install = {
26
+ "mpfree":f"""
27
+ ```sh
28
+ git clone {doc_soft_urls["mpfree"]}
29
+ cd mpfree
30
+ cmake . --fresh
31
+ make
32
+ sudo cp mpfree /usr/bin/
33
+ cd ..
34
+ rm -rf mpfree
35
+ ```
36
+ """,
37
+ "multi_chunk":f"""
38
+ ```sh
39
+ git clone {doc_soft_urls["multi_chunk"]}
40
+ cd multi_chunk
41
+ cmake . --fresh
42
+ make
43
+ sudo cp multi_chunk /usr/bin/
44
+ cd ..
45
+ rm -rf multi_chunk
46
+ ```
47
+ """,
48
+ "function_delaunay":f"""
49
+ ```sh
50
+ git clone {doc_soft_urls["function_delaunay"]}
51
+ cd function_delaunay
52
+ cmake . --fresh
53
+ make
54
+ sudo cp main /usr/bin/function_delaunay
55
+ cd ..
56
+ rm -rf function_delaunay
57
+ ```
58
+ """,
59
+ "2pac":f"""
60
+ ```sh
61
+ git clone {doc_soft_urls["2pac"]} 2pac
62
+ cd 2pac && mkdir build && cd build
63
+ cmake ..
64
+ make
65
+ sudo cp 2pac /usr/bin
66
+ ```
67
+ """,
68
+ }
69
+ doc_soft_urls = defaultdict(lambda:"<Unknown url>", doc_soft_urls)
70
+ doc_soft_easy_install = defaultdict(lambda:"<Unknown>", doc_soft_easy_install)
71
+
72
+ available_reduce_softs = Literal["mpfree","multi_chunk","2pac"]
73
+
74
+ def _path_init(soft:str|os.PathLike):
75
+ a = which(f"./{soft}")
76
+ b = which(f"{soft}")
77
+ if a:
78
+ pathes[soft] = a
79
+ elif b:
80
+ pathes[soft] = b
81
+
82
+ if pathes[soft] is not None:
83
+ verbose_arg = "> /dev/null 2>&1"
84
+ test = os.system(pathes[soft] + " --help " + verbose_arg)
85
+ if test:
86
+ from warnings import warn
87
+ warn(f"""
88
+ Found external software {soft} at {pathes[soft]}
89
+ but may not behave well.
90
+ """)
91
+
92
+
93
+
94
+ cdef dict[str,str|None] pathes = {
95
+ "mpfree":None,
96
+ "2pac":None,
97
+ "function_delaunay":None,
98
+ "multi_chunk":None,
99
+ }
100
+
101
+ # mpfree_in_path:str|os.PathLike = "multipers_mpfree_input.scc"
102
+ # mpfree_out_path:str|os.PathLike = "multipers_mpfree_output.scc"
103
+ # twopac_in_path:str|os.PathLike = "multipers_twopac_input.scc"
104
+ # twopac_out_path:str|os.PathLike = "multipers_twopac_output.scc"
105
+ # multi_chunk_in_path:str|os.PathLike = "multipers_multi_chunk_input.scc"
106
+ # multi_chunk_out_path:str|os.PathLike = "multipers_multi_chunk_output.scc"
107
+ # function_delaunay_out_path:str|os.PathLike = "function_delaunay_output.scc"
108
+ # function_delaunay_in_path:str|os.PathLike = "function_delaunay_input.txt" # point cloud
109
+ input_path:str|os.PathLike = "multipers_input.scc"
110
+ output_path:str|os.PathLike = "multipers_output.scc"
111
+
112
+
113
+
114
+ ## TODO : optimize with Python.h ?
115
+ def scc_parser(path: str| os.PathLike):
116
+ """
117
+ Parse an scc file into the scc python format, aka blocks.
118
+ """
119
+ pass_line_regex = re.compile(r"^\s*$|^#|^scc2020$")
120
+ def valid_line(line):
121
+ return pass_line_regex.match(line) is None
122
+ parse_line_regex = re.compile(r"^(?P<filtration>[^;]+);(?P<boundary>[^;]*)$")
123
+ cdef tuple[tuple[str,str]] clines
124
+ with open(path, "r") as f:
125
+ lines =(x.strip() for x in f if valid_line(x))
126
+ num_parameters = int(next(lines))
127
+ sizes = np.cumsum(np.asarray([0] + next(lines).split(), dtype=np.int32))
128
+ lines = (parse_line_regex.match(a) for a in lines)
129
+ clines = tuple((a.group("filtration"),a.group("boundary")) for a in lines)
130
+ # F = np.fromiter((a[0].split() for a in clines), dtype=np.dtype((np.float64,2)), count = sizes[-1])
131
+ F = np.fromiter((np.fromstring(a[0], sep=r' ', dtype=np.float64) for a in clines), dtype=np.dtype((np.float64,num_parameters)), count = sizes[-1])
132
+
133
+ # B = tuple(np.asarray(a[1].split(), dtype=np.int32) if len(a[1])>0 else np.empty(0, dtype=np.int32) for a in clines) ## TODO : this is very slow : optimize
134
+ B = tuple(np.fromstring(a[1], sep=' ', dtype=np.int32) for a in clines)
135
+ # block_lines = (tuple(get_bf(x, num_parameters) for x in lines[sizes[i]:sizes[i+1]]) for i in range(len(sizes)-1))
136
+
137
+ # blocks = [(np.asarray([x[0] for x in b if len(x)>0], dtype=float),tuple(x[1] for x in b)) for b in block_lines]
138
+ blocks = [(F[sizes[i]:sizes[i+1]], B[sizes[i]:sizes[i+1]]) for i in range(len(sizes)-1)]
139
+
140
+ return blocks
141
+
142
+
143
+ def scc_parser__old(path: str):
144
+ """
145
+ Parse an scc file into the scc python format, aka blocks.
146
+ """
147
+ with open(path, "r") as f:
148
+ lines = f.readlines()
149
+ # Find scc2020
150
+ while lines[0].strip() != "scc2020":
151
+ lines = lines[1:]
152
+ lines = lines[1:]
153
+ # stripped scc2020 we can start
154
+
155
+ def pass_line(line):
156
+ return re.match(r"^\s*$|^#", line) is not None
157
+
158
+ for i, line in enumerate(lines):
159
+ line = line.strip()
160
+ if pass_line(line):
161
+ continue
162
+ num_parameters = int(line)
163
+ lines = lines[i + 1 :]
164
+ break
165
+
166
+ block_sizes = []
167
+
168
+ for i, line in enumerate(lines):
169
+ line = line.strip()
170
+ if pass_line(line):
171
+ continue
172
+ block_sizes = tuple(int(i) for i in line.split(" "))
173
+ lines = lines[i + 1 :]
174
+ break
175
+ blocks = []
176
+ cdef int counter
177
+ for block_size in block_sizes:
178
+ counter = block_size
179
+ block_filtrations = []
180
+ block_boundaries = []
181
+ for i, line in enumerate(lines):
182
+ if counter == 0:
183
+ lines = lines[i:]
184
+ break
185
+ line = line.strip()
186
+ if pass_line(line):
187
+ continue
188
+ splitted_line = re.match(r"^(?P<floats>[^;]+);(?P<ints>[^;]*)$", line)
189
+ filtrations = np.asarray(splitted_line.group("floats").split(), dtype=float)
190
+ boundary = np.asarray(splitted_line.group("ints").split(), dtype=int)
191
+ block_filtrations.append(filtrations)
192
+ block_boundaries.append(boundary)
193
+ # filtration_boundary = line.split(";")
194
+ # if len(filtration_boundary) == 1:
195
+ # # happens when last generators do not have a ";" in the end
196
+ # filtration_boundary.append(" ")
197
+ # filtration, boundary = filtration_boundary
198
+ # block_filtrations.append(
199
+ # tuple(float(x) for x in filtration.split(" ") if len(x) > 0)
200
+ # )
201
+ # block_boundaries.append(tuple(int(x) for x in boundary.split(" ") if len(x) > 0))
202
+ counter -= 1
203
+ blocks.append((np.asarray(block_filtrations, dtype=float), tuple(block_boundaries)))
204
+
205
+ return blocks
206
+
207
+
208
+
209
+ def _put_temp_files_to_ram():
210
+ global input_path,output_path
211
+ shm_memory = "/tmp/" # on unix, we can write in RAM instead of disk.
212
+ if os.access(shm_memory, os.W_OK) and not input_path.startswith(shm_memory):
213
+ input_path = shm_memory + input_path
214
+ output_path = shm_memory + output_path
215
+
216
+ def _init_external_softwares(requires=[]):
217
+ global pathes
218
+ cdef bool any = False
219
+ for soft,soft_path in pathes.items():
220
+ if soft_path is None:
221
+ _path_init(soft)
222
+ any = any or (soft in requires)
223
+
224
+ if any:
225
+ _put_temp_files_to_ram()
226
+ for soft in requires:
227
+ if pathes[soft] is None:
228
+ global doc_soft_urls
229
+ raise ValueError(f"""
230
+ Did not find {soft}.
231
+ Install it from {doc_soft_urls[soft]}, and put it in your current directory,
232
+ or in you $PATH.
233
+ Documentation is available here: {current_doc_url}compilation.html#external-libraries
234
+ For instance:
235
+ {doc_soft_easy_install[soft]}
236
+ """)
237
+ _init_external_softwares()
238
+ def _check_available(soft:str):
239
+ _init_external_softwares()
240
+ return pathes.get(soft,None) is not None
241
+
242
+
243
+ def scc_reduce_from_str(
244
+ path:str|os.PathLike,
245
+ bool full_resolution=True,
246
+ int dimension: int | np.int64 = 1,
247
+ bool clear: bool = True,
248
+ id: Optional[str] = None, # For parallel stuff
249
+ bool verbose:bool=False,
250
+ backend:Literal["mpfree","multi_chunk","twopac"]="mpfree"
251
+ ):
252
+ """
253
+ Computes a minimal presentation of the file in path,
254
+ using mpfree.
255
+
256
+ path:PathLike
257
+ full_resolution: bool
258
+ dimension: int, presentation dimension to consider
259
+ clear: bool, removes temporary files if True
260
+ id: str, temporary files are of this id, allowing for multiprocessing
261
+ verbose: bool
262
+ backend: "mpfree", "multi_chunk" or "2pac"
263
+ """
264
+ global pathes, input_path, output_path
265
+ assert _check_available(backend), f"Backend {backend} is not available."
266
+
267
+
268
+ resolution_str = "--resolution" if full_resolution else ""
269
+ # print(mpfree_in_path + id, mpfree_out_path + id)
270
+ if id is None:
271
+ id = str(threading.get_native_id())
272
+ if not os.path.exists(path):
273
+ raise ValueError(f"No file found at {path}.")
274
+ if os.path.exists(output_path + id):
275
+ os.remove(output_path + id)
276
+ verbose_arg = "> /dev/null 2>&1" if not verbose else ""
277
+ if backend == "mpfree":
278
+ more_verbose = "-v" if verbose else ""
279
+ command = (
280
+ f"{pathes[backend]} {more_verbose} {resolution_str} --dim={dimension} {path} {output_path+id} {verbose_arg}"
281
+ )
282
+ elif backend == "multi_chunk":
283
+ command = (
284
+ f"{pathes[backend]} {path} {output_path+id} {verbose_arg}"
285
+ )
286
+ elif backend in ["twopac", "2pac"]:
287
+ command = (
288
+ f"{pathes[backend]} -f {path} --scc-input -n{dimension} --save-resolution-scc {output_path+id} {verbose_arg}"
289
+ )
290
+ else:
291
+ raise ValueError(f"Unsupported backend {backend}.")
292
+ if verbose:
293
+ print(f"Calling :\n\n {command}")
294
+ os.system(command)
295
+
296
+ blocks = scc_parser(output_path + id)
297
+ if clear:
298
+ clear_io(input_path+id, output_path + id)
299
+
300
+
301
+ ## mpfree workaround: last size is 0 but shouldn't...
302
+ if len(blocks) and not len(blocks[-1][1]):
303
+ blocks=blocks[:-1]
304
+
305
+ return blocks
306
+
307
+ def scc_reduce_from_str_to_slicer(
308
+ path:str|os.PathLike,
309
+ slicer,
310
+ bool full_resolution=True,
311
+ int dimension: int | np.int64 = 1,
312
+ bool clear: bool = True,
313
+ id: Optional[str] = None, # For parallel stuff
314
+ bool verbose:bool=False,
315
+ backend:Literal["mpfree","multi_chunk","twopac"]="mpfree",
316
+ shift_dimension=0
317
+ ):
318
+ """
319
+ Computes a minimal presentation of the file in path,
320
+ using mpfree.
321
+
322
+ path:PathLike
323
+ slicer: empty slicer to fill
324
+ full_resolution: bool
325
+ dimension: int, presentation dimension to consider
326
+ clear: bool, removes temporary files if True
327
+ id: str, temporary files are of this id, allowing for multiprocessing
328
+ verbose: bool
329
+ backend: "mpfree", "multi_chunk" or "2pac"
330
+ """
331
+ global pathes, input_path, output_path
332
+ assert _check_available(backend), f"Backend {backend} is not available."
333
+
334
+
335
+ resolution_str = "--resolution" if full_resolution else ""
336
+ # print(mpfree_in_path + id, mpfree_out_path + id)
337
+ if id is None:
338
+ id = str(threading.get_native_id())
339
+ if not os.path.exists(path):
340
+ raise ValueError(f"No file found at {path}.")
341
+ if os.path.exists(output_path + id):
342
+ os.remove(output_path + id)
343
+ verbose_arg = "> /dev/null 2>&1" if not verbose else ""
344
+ if backend == "mpfree":
345
+ more_verbose = "-v" if verbose else ""
346
+ command = (
347
+ f"{pathes[backend]} {more_verbose} {resolution_str} --dim={dimension} {path} {output_path+id} {verbose_arg}"
348
+ )
349
+ elif backend == "multi_chunk":
350
+ command = (
351
+ f"{pathes[backend]} {path} {output_path+id} {verbose_arg}"
352
+ )
353
+ elif backend in ["twopac", "2pac"]:
354
+ command = (
355
+ f"{pathes[backend]} -f {path} --scc-input -n{dimension} --save-resolution-scc {output_path+id} {verbose_arg}"
356
+ )
357
+ else:
358
+ raise ValueError(f"Unsupported backend {backend}.")
359
+ if verbose:
360
+ print(f"Calling :\n\n {command}")
361
+ os.system(command)
362
+
363
+ slicer._build_from_scc_file(path=output_path+id, shift_dimension=shift_dimension)
364
+
365
+ if clear:
366
+ clear_io(input_path+id, output_path + id)
367
+
368
+
369
+ def reduce_complex(
370
+ complex, # Simplextree, Slicer, or str
371
+ bool full_resolution: bool = True,
372
+ int dimension: int | np.int64 = 1,
373
+ bool clear: bool = True,
374
+ id: Optional[str]=None, # For parallel stuff
375
+ bool verbose:bool=False,
376
+ backend:available_reduce_softs="mpfree"
377
+ ):
378
+ """
379
+ Computes a minimal presentation of the file in path,
380
+ using `backend`.
381
+
382
+ simplextree
383
+ full_resolution: bool
384
+ dimension: int, presentation dimension to consider
385
+ clear: bool, removes temporary files if True
386
+ id: str, temporary files are of this id, allowing for multiprocessing
387
+ verbose: bool
388
+ """
389
+
390
+ from multipers.simplex_tree_multi import is_simplextree_multi
391
+ from multipers.slicer import slicer2blocks
392
+ if id is None:
393
+ id = str(threading.get_native_id())
394
+ path = input_path+id
395
+ if is_simplextree_multi(complex):
396
+ complex.to_scc(
397
+ path=path,
398
+ rivet_compatible=False,
399
+ strip_comments=False,
400
+ ignore_last_generators=False,
401
+ overwrite=True,
402
+ reverse_block=False,
403
+ )
404
+ dimension = complex.dimension - dimension
405
+ elif isinstance(complex,str):
406
+ path = complex
407
+ elif isinstance(complex, list) or isinstance(complex, tuple):
408
+ scc2disk(complex,path=path)
409
+ else:
410
+ # Assumes its a slicer
411
+ blocks = slicer2blocks(complex)
412
+ scc2disk(blocks,path=path)
413
+ dimension = len(blocks) -2 -dimension
414
+
415
+ return scc_reduce_from_str(
416
+ path=path,
417
+ full_resolution=full_resolution,
418
+ dimension=dimension,
419
+ clear=clear,
420
+ id=id,
421
+ verbose=verbose,
422
+ backend=backend
423
+ )
424
+
425
+
426
+
427
+
428
+ def function_delaunay_presentation(
429
+ point_cloud:np.ndarray,
430
+ function_values:np.ndarray,
431
+ id:Optional[str] = None,
432
+ bool clear:bool = True,
433
+ bool verbose:bool=False,
434
+ int degree = -1,
435
+ bool multi_chunk = False,
436
+ ):
437
+ """
438
+ Computes a function delaunay presentation, and returns it as blocks.
439
+
440
+ points : (num_pts, n) float array
441
+ grades : (num_pts,) float array
442
+ degree (opt) : if given, computes a minimal presentation of this homological degree first
443
+ clear:bool, removes temporary files if true
444
+ degree: computes minimal presentation of this degree if given
445
+ verbose : bool
446
+ """
447
+ if id is None:
448
+ id = str(threading.get_native_id())
449
+ global input_path, output_path, pathes
450
+ backend = "function_delaunay"
451
+ assert _check_available(backend), f"Backend {backend} is not available."
452
+
453
+ to_write = np.concatenate([point_cloud, function_values.reshape(-1,1)], axis=1)
454
+ np.savetxt(input_path+id,to_write,delimiter=' ')
455
+ verbose_arg = "> /dev/null 2>&1" if not verbose else ""
456
+ degree_arg = f"--minpres {degree}" if degree >= 0 else ""
457
+ multi_chunk_arg = "--multi-chunk" if multi_chunk else ""
458
+ if os.path.exists(output_path + id):
459
+ os.remove(output_path+ id)
460
+ command = f"{pathes[backend]} {degree_arg} {multi_chunk_arg} {input_path+id} {output_path+id} {verbose_arg} --no-delaunay-compare"
461
+ if verbose:
462
+ print(command)
463
+ os.system(command)
464
+
465
+ blocks = scc_parser(output_path + id)
466
+ if clear:
467
+ clear_io(output_path + id, input_path + id)
468
+ ## Function Delaunay workaround: last size is 0 but shouldn't...
469
+ if degree<0 and len(blocks) and not len(blocks[-1][1]):
470
+ blocks=blocks[:-1]
471
+
472
+ return blocks
473
+
474
+ def function_delaunay_presentation_to_slicer(
475
+ slicer,
476
+ point_cloud:np.ndarray,
477
+ function_values:np.ndarray,
478
+ id:Optional[str] = None,
479
+ bool clear:bool = True,
480
+ bool verbose:bool=False,
481
+ int degree = -1,
482
+ bool multi_chunk = False,
483
+ ):
484
+ """
485
+ Computes a function delaunay presentation, and returns it as a slicer.
486
+
487
+ slicer: empty slicer to fill
488
+ points : (num_pts, n) float array
489
+ grades : (num_pts,) float array
490
+ degree (opt) : if given, computes a minimal presentation of this homological degree first
491
+ clear:bool, removes temporary files if true
492
+ degree: computes minimal presentation of this degree if given
493
+ verbose : bool
494
+ """
495
+ if id is None:
496
+ id = str(threading.get_native_id())
497
+ global input_path, output_path, pathes
498
+ backend = "function_delaunay"
499
+ assert _check_available(backend), f"Backend {backend} is not available."
500
+
501
+ to_write = np.concatenate([point_cloud, function_values.reshape(-1,1)], axis=1)
502
+ np.savetxt(input_path+id,to_write,delimiter=' ')
503
+ verbose_arg = "> /dev/null 2>&1" if not verbose else ""
504
+ degree_arg = f"--minpres {degree}" if degree >= 0 else ""
505
+ multi_chunk_arg = "--multi-chunk" if multi_chunk else ""
506
+ if os.path.exists(output_path + id):
507
+ os.remove(output_path+ id)
508
+ command = f"{pathes[backend]} {degree_arg} {multi_chunk_arg} {input_path+id} {output_path+id} {verbose_arg} --no-delaunay-compare"
509
+ if verbose:
510
+ print(command)
511
+ os.system(command)
512
+
513
+ slicer._build_from_scc_file(path=output_path+id, shift_dimension=-1 if degree <= 0 else degree-1 )
514
+
515
+ if clear:
516
+ clear_io(output_path + id, input_path + id)
517
+
518
+
519
+
520
+ def clear_io(*args):
521
+ """Removes temporary files"""
522
+ global input_path,output_path
523
+ for x in [input_path,output_path] + list(args):
524
+ if os.path.exists(x):
525
+ os.remove(x)
526
+
527
+
528
+
529
+
530
+
531
+
532
+ # cdef extern from "multiparameter_module_approximation/format_python-cpp.h" namespace "Gudhi::multiparameter::mma":
533
+ # pair[boundary_matrix, vector[One_critical_filtration[double]]] simplextree_to_boundary_filtration(intptr_t)
534
+ # vector[pair[ vector[vector[float]],boundary_matrix]] simplextree_to_scc(intptr_t)
535
+ # vector[pair[ vector[vector[vector[float]]],boundary_matrix]] function_simplextree_to_scc(intptr_t)
536
+ # pair[vector[vector[float]],boundary_matrix ] simplextree_to_ordered_bf(intptr_t)
537
+
538
+ # def simplex_tree2boundary_filtrations(simplextree:SimplexTreeMulti | SimplexTree):
539
+ # """Computes a (sparse) boundary matrix, with associated filtration. Can be used as an input of approx afterwards.
540
+ #
541
+ # Parameters
542
+ # ----------
543
+ # simplextree: Gudhi or mma simplextree
544
+ # The simplextree defining the filtration to convert to boundary-filtration.
545
+ #
546
+ # Returns
547
+ # -------
548
+ # B:List of lists of ints
549
+ # The boundary matrix.
550
+ # F: List of 1D filtration
551
+ # The filtrations aligned with B; the i-th simplex of this simplextree has boundary B[i] and filtration(s) F[i].
552
+ #
553
+ # """
554
+ # cdef intptr_t cptr
555
+ # if isinstance(simplextree, SimplexTreeMulti):
556
+ # cptr = simplextree.thisptr
557
+ # elif isinstance(simplextree, SimplexTree):
558
+ # temp_st = gd.SimplexTreeMulti(simplextree, parameters=1)
559
+ # cptr = temp_st.thisptr
560
+ # else:
561
+ # raise TypeError("Has to be a simplextree")
562
+ # cdef pair[boundary_matrix, vector[One_critical_filtration[double]]] cboundary_filtration = simplextree_to_boundary_filtration(cptr)
563
+ # boundary = cboundary_filtration.first
564
+ # # multi_filtrations = np.array(<vector[vector[float]]>One_critical_filtration.to_python(cboundary_filtration.second))
565
+ # cdef cnp.ndarray[double, ndim=2] multi_filtrations = _fmf2numpy_f64(cboundary_filtration.second)
566
+ # return boundary, multi_filtrations
567
+
568
+ # def simplextree2scc(simplextree:SimplexTreeMulti | SimplexTree, filtration_dtype=np.float32, bool flattened=False):
569
+ # """
570
+ # Turns a simplextree into a (simplicial) module presentation.
571
+ # """
572
+ # cdef intptr_t cptr
573
+ # cdef bool is_function_st = False
574
+ # if isinstance(simplextree, SimplexTreeMulti):
575
+ # cptr = simplextree.thisptr
576
+ # is_function_st = simplextree._is_function_simplextree
577
+ # elif isinstance(simplextree, SimplexTree):
578
+ # temp_st = gd.SimplexTreeMulti(simplextree, parameters=1)
579
+ # cptr = temp_st.thisptr
580
+ # else:
581
+ # raise TypeError("Has to be a simplextree")
582
+ #
583
+ # cdef pair[vector[vector[float]], boundary_matrix] out
584
+ # if flattened:
585
+ # out = simplextree_to_ordered_bf(cptr)
586
+ # return np.asarray(out.first,dtype=filtration_dtype), tuple(out.second)
587
+ #
588
+ # if is_function_st:
589
+ # blocks = function_simplextree_to_scc(cptr)
590
+ # else:
591
+ # blocks = simplextree_to_scc(cptr)
592
+ # # reduces the space in memory
593
+ # if is_function_st:
594
+ # blocks = [(tuple(f), tuple(b)) for f,b in blocks[::-1]]
595
+ # else:
596
+ # blocks = [(np.asarray(f,dtype=filtration_dtype), tuple(b)) for f,b in blocks[::-1]] ## presentation is on the other order
597
+ # return blocks+[(np.empty(0,dtype=filtration_dtype),[])]
598
+
599
+ @cython.boundscheck(False)
600
+ @cython.wraparound(False)
601
+ def scc2disk(
602
+ stuff,
603
+ path:str|os.PathLike,
604
+ int num_parameters = -1,
605
+ bool reverse_block = False,
606
+ bool rivet_compatible = False,
607
+ bool ignore_last_generators = False,
608
+ bool strip_comments = False,
609
+ ):
610
+ """
611
+ Writes a scc python format / blocks into a file.
612
+ """
613
+ if num_parameters == -1:
614
+ for block in stuff:
615
+ if len(block[0]) == 0:
616
+ continue
617
+ num_gens, num_parameters_= np.asarray(block[0]).shape
618
+ num_parameters = num_parameters_
619
+ break
620
+ assert num_parameters > 0, f"Invalid number of parameters {num_parameters}"
621
+
622
+ if reverse_block: stuff.reverse()
623
+ with open(path, "w") as f:
624
+ f.write("scc2020\n") if not rivet_compatible else f.write("firep\n")
625
+ if not strip_comments and not rivet_compatible: f.write("# Number of parameters\n")
626
+ if rivet_compatible:
627
+ assert num_parameters == 2
628
+ f.write("Filtration 1\n")
629
+ f.write("Filtration 2\n")
630
+ else:
631
+ f.write(f"{num_parameters}\n")
632
+
633
+ if not strip_comments: f.write("# Sizes of generating sets\n")
634
+ for block in stuff: f.write(f"{len(block[0])} ")
635
+ f.write("\n")
636
+ for i,block in enumerate(stuff):
637
+ if (rivet_compatible or ignore_last_generators) and i == len(stuff)-1: continue
638
+ if not strip_comments: f.write(f"# Block of dimension {len(stuff)-1-i}\n")
639
+ filtration, boundary = block
640
+ filtration = np.asarray(filtration).astype(str)
641
+ # boundary = tuple(x.astype(str) for x in boundary)
642
+ f.write(" ".join(itertools.chain.from_iterable(
643
+ ((*(f.tolist()), ";", *(np.asarray(b).astype(str).tolist()), "\n")
644
+ for f,b in zip(filtration, boundary))
645
+ )
646
+ ))
647
+ # for j in range(<int>len(filtration)):
648
+ # line = " ".join((
649
+ # *filtration[j],
650
+ # ";",
651
+ # *boundary[j],
652
+ # "\n",
653
+ # ))
654
+ # f.write(line)
655
+
656
+ def scc2disk_old(
657
+ stuff,
658
+ path:str|os.PathLike,
659
+ num_parameters = -1,
660
+ reverse_block = False,
661
+ rivet_compatible = False,
662
+ ignore_last_generators = False,
663
+ strip_comments = False,
664
+ ):
665
+ """
666
+ Writes a scc python format / blocks into a file.
667
+ """
668
+ if num_parameters == -1:
669
+ for block in stuff:
670
+ if len(block[0]) == 0:
671
+ continue
672
+ num_gens, num_parameters_= np.asarray(block[0]).shape
673
+ num_parameters = num_parameters_
674
+ break
675
+ assert num_parameters > 0, f"Invalid number of parameters {num_parameters}"
676
+
677
+ if reverse_block: stuff.reverse()
678
+ out = []
679
+ if rivet_compatible:
680
+ out.append(r"firep")
681
+ else:
682
+ out.append(r"scc2020")
683
+ if not strip_comments and not rivet_compatible:
684
+ out.append(r"# Number of parameters")
685
+ if rivet_compatible:
686
+ out.append("Filtration 1")
687
+ out.append("Filtration 2\n")
688
+ else:
689
+ out.append(f"{num_parameters}")
690
+
691
+ if not strip_comments:
692
+ out.append("# Sizes of generating sets")
693
+
694
+ # for block in stuff:
695
+ # f.write(f"{len(block[0])} ")
696
+ out.append(" ".join(str(len(block[0])) for block in stuff))
697
+ str_blocks = [out]
698
+ for i,block in enumerate(stuff):
699
+ if (rivet_compatible or ignore_last_generators) and i == len(stuff)-1: continue
700
+ if not strip_comments:
701
+ str_blocks.append([f"# Block of dimension {len(stuff)-1-i}"])
702
+ filtration, boundary = block
703
+ if len(filtration) == 0:
704
+ continue
705
+ filtration = filtration.astype(str)
706
+ C = filtration[:,0]
707
+ for i in range(1,filtration.shape[1]):
708
+ C = np.char.add(C," ")
709
+ C = np.char.add(C,filtration[:,i])
710
+ C = np.char.add(C, ";")
711
+ D = np.fromiter((" ".join(b.astype(str).tolist()) for b in boundary), dtype="<U11") #int32-> str is "<U11" #check np.array(1, dtype=np.int32).astype(str)
712
+ str_blocks.append(np.char.add(C,D))
713
+
714
+ np.savetxt("test.scc", np.concatenate(str_blocks), delimiter="", fmt="%s")