fastremap 1.14.2__tar.gz → 1.15.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {fastremap-1.14.2 → fastremap-1.15.0}/.github/workflows/build_wheel.yml +1 -0
- {fastremap-1.14.2 → fastremap-1.15.0}/AUTHORS +1 -0
- {fastremap-1.14.2 → fastremap-1.15.0}/ChangeLog +10 -0
- fastremap-1.15.0/MANIFEST.in +3 -0
- {fastremap-1.14.2 → fastremap-1.15.0}/PKG-INFO +2 -2
- {fastremap-1.14.2 → fastremap-1.15.0}/README.md +1 -1
- {fastremap-1.14.2 → fastremap-1.15.0}/automated_test.py +27 -18
- {fastremap-1.14.2 → fastremap-1.15.0}/fastremap.egg-info/PKG-INFO +2 -2
- {fastremap-1.14.2 → fastremap-1.15.0}/fastremap.egg-info/SOURCES.txt +7 -5
- fastremap-1.15.0/fastremap.egg-info/pbr.json +1 -0
- fastremap-1.15.0/pyproject.toml +7 -0
- {fastremap-1.14.2 → fastremap-1.15.0}/setup.py +2 -2
- {fastremap-1.14.2 → fastremap-1.15.0/src}/fastremap.pyx +124 -34
- {fastremap-1.14.2 → fastremap-1.15.0}/test.py +7 -4
- fastremap-1.14.2/fastremap.egg-info/pbr.json +0 -1
- {fastremap-1.14.2 → fastremap-1.15.0}/.dockerignore +0 -0
- {fastremap-1.14.2 → fastremap-1.15.0}/LICENSE +0 -0
- {fastremap-1.14.2 → fastremap-1.15.0}/build_linux.sh +0 -0
- {fastremap-1.14.2 → fastremap-1.15.0}/fastremap.egg-info/dependency_links.txt +0 -0
- {fastremap-1.14.2 → fastremap-1.15.0}/fastremap.egg-info/not-zip-safe +0 -0
- {fastremap-1.14.2 → fastremap-1.15.0}/fastremap.egg-info/requires.txt +0 -0
- {fastremap-1.14.2 → fastremap-1.15.0}/fastremap.egg-info/top_level.txt +0 -0
- {fastremap-1.14.2 → fastremap-1.15.0}/manylinux1.Dockerfile +0 -0
- {fastremap-1.14.2 → fastremap-1.15.0}/manylinux2010.Dockerfile +0 -0
- {fastremap-1.14.2 → fastremap-1.15.0}/manylinux2014.Dockerfile +0 -0
- {fastremap-1.14.2 → fastremap-1.15.0}/requirements.txt +0 -0
- {fastremap-1.14.2 → fastremap-1.15.0}/requirements_dev.txt +0 -0
- {fastremap-1.14.2 → fastremap-1.15.0}/setup.cfg +0 -0
- {fastremap-1.14.2 → fastremap-1.15.0/src}/fastremap.pxd +0 -0
- {fastremap-1.14.2 → fastremap-1.15.0/src}/ipt.hpp +0 -0
- {fastremap-1.14.2 → fastremap-1.15.0/src}/ska_flat_hash_map.hpp +0 -0
- {fastremap-1.14.2 → fastremap-1.15.0}/test.cpp +0 -0
- {fastremap-1.14.2 → fastremap-1.15.0}/tox.ini +0 -0
@@ -1,6 +1,16 @@
|
|
1
1
|
CHANGES
|
2
2
|
=======
|
3
3
|
|
4
|
+
1.15.0
|
5
|
+
------
|
6
|
+
|
7
|
+
* docs: remove travis badge
|
8
|
+
* refactor: clean up repo and put source files in "src" dir
|
9
|
+
* ci: add arm64 and x86\_64 to macos build
|
10
|
+
* perf: add return\_inverse to fr.unique
|
11
|
+
* perf(unique/array): avoid two operations for return\_index
|
12
|
+
* feat: point\_cloud supports 2d images
|
13
|
+
|
4
14
|
1.14.2
|
5
15
|
------
|
6
16
|
|
@@ -1,12 +1,12 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: fastremap
|
3
|
-
Version: 1.
|
3
|
+
Version: 1.15.0
|
4
4
|
Summary: Remap, mask, renumber, unique, and in-place transposition of 3D labeled images. Point cloud too.
|
5
5
|
Home-page: https://github.com/seung-lab/fastremap/
|
6
6
|
Author: William Silversmith
|
7
7
|
Author-email: ws9@princeton.edu
|
8
8
|
License: UNKNOWN
|
9
|
-
Description: [](https://badge.fury.io/py/fastremap)
|
10
10
|
|
11
11
|
# fastremap
|
12
12
|
|
@@ -1,4 +1,4 @@
|
|
1
|
-
[](https://badge.fury.io/py/fastremap)
|
2
2
|
|
3
3
|
# fastremap
|
4
4
|
|
@@ -408,72 +408,81 @@ def test_unique(order):
|
|
408
408
|
|
409
409
|
# array_unique
|
410
410
|
labels = reorder(np.random.randint(0, 500, size=(128,128,128)))
|
411
|
-
uniq_np, idx_np, cts_np = np.unique(labels, return_counts=True, return_index=True)
|
412
|
-
uniq_fr, idx_fr, cts_fr = fastremap.unique(labels, return_counts=True, return_index=True)
|
411
|
+
uniq_np, idx_np, inv_np, cts_np = np.unique(labels, return_counts=True, return_index=True, return_inverse=True)
|
412
|
+
uniq_fr, idx_fr, inv_fr, cts_fr = fastremap.unique(labels, return_counts=True, return_index=True, return_inverse=True)
|
413
413
|
assert np.all(uniq_np == uniq_fr)
|
414
|
+
assert np.all(inv_np == inv_fr)
|
414
415
|
assert np.all(cts_np == cts_fr)
|
415
416
|
assert np.all(labels.flatten()[idx_np] == labels.flatten()[idx_fr])
|
416
417
|
|
417
418
|
labels = reorder(np.random.randint(0, 500, size=(128,128,128)))
|
418
|
-
uniq_np, idx_np, cts_np = np.unique(labels, return_counts=True,return_index=True)
|
419
|
-
uniq_fr, idx_fr, cts_fr = fastremap.unique_via_array(labels.flatten(), np.max(labels), return_index=True)
|
419
|
+
uniq_np, idx_np, inv_np, cts_np = np.unique(labels, return_counts=True, return_index=True, return_inverse=True)
|
420
|
+
uniq_fr, idx_fr, cts_fr, inv_fr = fastremap.unique_via_array(labels.flatten(), np.max(labels), return_index=True, return_inverse=True)
|
420
421
|
assert np.all(uniq_np == uniq_fr)
|
422
|
+
assert np.all(inv_np.flatten() == inv_fr)
|
421
423
|
assert np.all(cts_np == cts_fr)
|
422
424
|
assert np.all(labels.flatten()[idx_np] == labels.flatten()[idx_fr])
|
423
425
|
|
424
426
|
# array_unique + shift
|
425
427
|
labels = reorder(np.random.randint(-500, 500, size=(128,128,128)))
|
426
|
-
uniq_np, idx_np, cts_np = np.unique(labels, return_counts=True, return_index=True)
|
427
|
-
uniq_fr, idx_fr, cts_fr = fastremap.unique(labels, return_counts=True, return_index=True)
|
428
|
+
uniq_np, idx_np, inv_np, cts_np = np.unique(labels, return_counts=True, return_index=True, return_inverse=True)
|
429
|
+
uniq_fr, idx_fr, inv_fr, cts_fr = fastremap.unique(labels, return_counts=True, return_index=True, return_inverse=True)
|
428
430
|
assert np.all(uniq_np == uniq_fr)
|
431
|
+
assert np.all(inv_np == inv_fr)
|
429
432
|
assert np.all(cts_np == cts_fr)
|
430
433
|
assert np.all(labels.flatten()[idx_np] == labels.flatten()[idx_fr])
|
431
434
|
|
432
435
|
labels = reorder(np.random.randint(-500, 500, size=(128,128,128)))
|
433
|
-
uniq_np, idx_np, cts_np = np.unique(labels, return_counts=True, return_index=True)
|
434
|
-
uniq_fr, idx_fr, cts_fr = fastremap.unique_via_shifted_array(labels.flatten(), return_index=True)
|
436
|
+
uniq_np, idx_np, inv_np, cts_np = np.unique(labels, return_counts=True, return_index=True, return_inverse=True)
|
437
|
+
uniq_fr, idx_fr, cts_fr, inv_fr = fastremap.unique_via_shifted_array(labels.flatten(), return_index=True, return_inverse=True)
|
435
438
|
assert np.all(uniq_np == uniq_fr)
|
439
|
+
assert np.all(inv_np.flatten() == inv_fr)
|
436
440
|
assert np.all(cts_np == cts_fr)
|
437
441
|
assert np.all(labels.flatten()[idx_np] == labels.flatten()[idx_fr])
|
438
442
|
|
439
443
|
# array_unique + shift
|
440
444
|
labels = reorder(np.random.randint(128**3 - 500, 128**3 + 500, size=(128,128,128)))
|
441
|
-
uniq_np, idx_np, cts_np = np.unique(labels, return_counts=True, return_index=True)
|
442
|
-
uniq_fr, idx_fr, cts_fr = fastremap.unique(labels, return_counts=True, return_index=True)
|
445
|
+
uniq_np, idx_np, inv_np, cts_np = np.unique(labels, return_counts=True, return_index=True, return_inverse=True)
|
446
|
+
uniq_fr, idx_fr, inv_fr, cts_fr = fastremap.unique(labels, return_counts=True, return_index=True, return_inverse=True)
|
443
447
|
assert np.all(uniq_np == uniq_fr)
|
448
|
+
assert np.all(inv_np == inv_fr)
|
444
449
|
assert np.all(cts_np == cts_fr)
|
445
450
|
assert np.all(labels.flatten()[idx_np] == labels.flatten()[idx_fr])
|
446
451
|
|
447
452
|
# array_unique + shift
|
448
453
|
labels = reorder(np.random.randint(128**3 - 500, 128**3 + 500, size=(128,128,128)))
|
449
|
-
uniq_np, idx_np, cts_np = np.unique(labels, return_counts=True, return_index=True)
|
450
|
-
uniq_fr, idx_fr, cts_fr = fastremap.unique_via_shifted_array(labels.flatten(), return_index=True)
|
454
|
+
uniq_np, idx_np, inv_np, cts_np = np.unique(labels, return_counts=True, return_index=True, return_inverse=True)
|
455
|
+
uniq_fr, idx_fr, cts_fr, inv_fr = fastremap.unique_via_shifted_array(labels.flatten(), return_index=True, return_inverse=True)
|
451
456
|
assert np.all(uniq_np == uniq_fr)
|
457
|
+
assert np.all(inv_np.flatten() == inv_fr)
|
452
458
|
assert np.all(cts_np == cts_fr)
|
453
459
|
assert np.all(labels.flatten()[idx_np] == labels.flatten()[idx_fr])
|
454
460
|
|
455
461
|
# renumber + array_unique
|
456
462
|
labels = reorder(np.random.randint(0, 1, size=(128,128,128)))
|
457
463
|
labels[0,0,0] = 128**3 + 10
|
458
|
-
uniq_np, idx_np, cts_np = np.unique(labels, return_counts=True, return_index=True)
|
459
|
-
uniq_fr, idx_fr, cts_fr = fastremap.unique(labels, return_counts=True, return_index=True)
|
464
|
+
uniq_np, idx_np, inv_np, cts_np = np.unique(labels, return_counts=True, return_index=True, return_inverse=True)
|
465
|
+
uniq_fr, idx_fr, inv_fr, cts_fr = fastremap.unique(labels, return_counts=True, return_index=True, return_inverse=True)
|
460
466
|
assert np.all(uniq_np == uniq_fr)
|
467
|
+
assert np.all(inv_np == inv_fr)
|
461
468
|
assert np.all(cts_np == cts_fr)
|
462
469
|
assert np.all(labels.flatten()[idx_np] == labels.flatten()[idx_fr])
|
463
470
|
|
464
471
|
labels = reorder(np.random.randint(0, 1, size=(128,128,128)))
|
465
472
|
labels[0,0,0] = 128**3 + 10
|
466
|
-
uniq_np, idx_np, cts_np = np.unique(labels, return_counts=True, return_index=True)
|
467
|
-
uniq_fr, idx_fr, cts_fr = fastremap.unique_via_renumber(labels.flatten(), return_index=True)
|
473
|
+
uniq_np, idx_np, inv_np, cts_np = np.unique(labels, return_counts=True, return_index=True, return_inverse=True)
|
474
|
+
uniq_fr, idx_fr, cts_fr, inv_fr = fastremap.unique_via_renumber(labels.flatten(), return_index=True, return_inverse=True)
|
468
475
|
assert np.all(uniq_np == uniq_fr)
|
476
|
+
assert np.all(inv_np.flatten() == inv_fr)
|
469
477
|
assert np.all(cts_np == cts_fr)
|
470
478
|
assert np.all(labels.flatten()[idx_np] == labels.flatten()[idx_fr])
|
471
479
|
|
472
480
|
# sort
|
473
481
|
labels = reorder(np.random.randint(-1000, 128**3, size=(100,100,100)))
|
474
|
-
uniq_np, idx_np, cts_np = np.unique(labels, return_counts=True, return_index=True)
|
475
|
-
uniq_fr, idx_fr, cts_fr = fastremap.unique(labels, return_counts=True, return_index=True)
|
482
|
+
uniq_np, idx_np, inv_np, cts_np = np.unique(labels, return_counts=True, return_index=True, return_inverse=True)
|
483
|
+
uniq_fr, idx_fr, inv_fr, cts_fr = fastremap.unique(labels, return_counts=True, return_index=True, return_inverse=True)
|
476
484
|
assert np.all(uniq_np == uniq_fr)
|
485
|
+
assert np.all(inv_np == inv_fr)
|
477
486
|
assert np.all(cts_np == cts_fr)
|
478
487
|
assert np.all(labels.flatten()[idx_np] == labels.flatten()[idx_fr])
|
479
488
|
|
@@ -1,12 +1,12 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: fastremap
|
3
|
-
Version: 1.
|
3
|
+
Version: 1.15.0
|
4
4
|
Summary: Remap, mask, renumber, unique, and in-place transposition of 3D labeled images. Point cloud too.
|
5
5
|
Home-page: https://github.com/seung-lab/fastremap/
|
6
6
|
Author: William Silversmith
|
7
7
|
Author-email: ws9@princeton.edu
|
8
8
|
License: UNKNOWN
|
9
|
-
Description: [](https://badge.fury.io/py/fastremap)
|
10
10
|
|
11
11
|
# fastremap
|
12
12
|
|
@@ -2,20 +2,18 @@
|
|
2
2
|
AUTHORS
|
3
3
|
ChangeLog
|
4
4
|
LICENSE
|
5
|
+
MANIFEST.in
|
5
6
|
README.md
|
6
7
|
automated_test.py
|
7
8
|
build_linux.sh
|
8
|
-
fastremap.pxd
|
9
|
-
fastremap.pyx
|
10
|
-
ipt.hpp
|
11
9
|
manylinux1.Dockerfile
|
12
10
|
manylinux2010.Dockerfile
|
13
11
|
manylinux2014.Dockerfile
|
12
|
+
pyproject.toml
|
14
13
|
requirements.txt
|
15
14
|
requirements_dev.txt
|
16
15
|
setup.cfg
|
17
16
|
setup.py
|
18
|
-
ska_flat_hash_map.hpp
|
19
17
|
test.cpp
|
20
18
|
test.py
|
21
19
|
tox.ini
|
@@ -26,4 +24,8 @@ fastremap.egg-info/dependency_links.txt
|
|
26
24
|
fastremap.egg-info/not-zip-safe
|
27
25
|
fastremap.egg-info/pbr.json
|
28
26
|
fastremap.egg-info/requires.txt
|
29
|
-
fastremap.egg-info/top_level.txt
|
27
|
+
fastremap.egg-info/top_level.txt
|
28
|
+
src/fastremap.pxd
|
29
|
+
src/fastremap.pyx
|
30
|
+
src/ipt.hpp
|
31
|
+
src/ska_flat_hash_map.hpp
|
@@ -0,0 +1 @@
|
|
1
|
+
{"git_version": "80ce9fc", "is_release": true}
|
@@ -28,11 +28,11 @@ setuptools.setup(
|
|
28
28
|
ext_modules=[
|
29
29
|
setuptools.Extension(
|
30
30
|
'fastremap',
|
31
|
-
sources=['fastremap.pyx'],
|
31
|
+
sources=['src/fastremap.pyx'],
|
32
32
|
depends=[],
|
33
33
|
language='c++',
|
34
34
|
language_level=3,
|
35
|
-
include_dirs=[str(NumpyImport())],
|
35
|
+
include_dirs=["src", str(NumpyImport())],
|
36
36
|
extra_compile_args=extra_compile_args,
|
37
37
|
)
|
38
38
|
],
|
@@ -753,13 +753,26 @@ def unique(labels, return_index=False, return_inverse=False, return_counts=False
|
|
753
753
|
|
754
754
|
return_index: also return the index of the first detected occurance
|
755
755
|
of each label.
|
756
|
+
return_inverse: If True, also return the indices of the unique array
|
757
|
+
(for the specified axis, if provided) that can be used to reconstruct
|
758
|
+
the input array.
|
756
759
|
return_counts: also return the unique label frequency as an array.
|
757
760
|
|
758
761
|
Returns:
|
759
|
-
|
760
|
-
|
761
|
-
|
762
|
-
|
762
|
+
unique ndarray
|
763
|
+
The sorted unique values.
|
764
|
+
|
765
|
+
unique_indices ndarray, optional
|
766
|
+
The indices of the first occurrences of the unique values in the original array.
|
767
|
+
Only provided if return_index is True.
|
768
|
+
|
769
|
+
unique_inverse ndarray, optional
|
770
|
+
The indices to reconstruct the original array from the unique array.
|
771
|
+
Only provided if return_inverse is True.
|
772
|
+
|
773
|
+
unique_counts ndarray, optional
|
774
|
+
The number of times each of the unique values comes up in the original array.
|
775
|
+
Only provided if return_counts is True.
|
763
776
|
"""
|
764
777
|
if not isinstance(labels, np.ndarray):
|
765
778
|
labels = np.array(labels)
|
@@ -769,7 +782,7 @@ def unique(labels, return_index=False, return_inverse=False, return_counts=False
|
|
769
782
|
|
770
783
|
# These flags are currently unsupported so call uncle and
|
771
784
|
# use the standard implementation instead.
|
772
|
-
if
|
785
|
+
if axis is not None:
|
773
786
|
return np.unique(
|
774
787
|
labels,
|
775
788
|
return_index=return_index,
|
@@ -781,7 +794,8 @@ def unique(labels, return_index=False, return_inverse=False, return_counts=False
|
|
781
794
|
cdef size_t voxels = labels.size
|
782
795
|
|
783
796
|
shape = labels.shape
|
784
|
-
fortran_order = labels.flags
|
797
|
+
fortran_order = labels.flags.f_contiguous
|
798
|
+
order = "F" if fortran_order else "C"
|
785
799
|
labels_orig = labels
|
786
800
|
labels = reshape(labels, (voxels,))
|
787
801
|
|
@@ -792,33 +806,38 @@ def unique(labels, return_index=False, return_inverse=False, return_counts=False
|
|
792
806
|
else:
|
793
807
|
min_label, max_label = (0, 0)
|
794
808
|
|
795
|
-
def c_order_index(
|
809
|
+
def c_order_index(arr):
|
796
810
|
if len(shape) > 1 and fortran_order:
|
797
811
|
return np.ravel_multi_index(
|
798
|
-
np.unravel_index(
|
812
|
+
np.unravel_index(arr, shape, order='F'),
|
799
813
|
shape, order='C'
|
800
814
|
)
|
801
|
-
return
|
815
|
+
return arr
|
802
816
|
|
803
817
|
if voxels == 0:
|
804
818
|
uniq = np.array([], dtype=labels.dtype)
|
805
819
|
counts = np.array([], dtype=np.uint32)
|
806
820
|
index = np.array([], dtype=np.uint64)
|
821
|
+
inverse = np.array([], dtype=np.uintp)
|
807
822
|
elif min_label >= 0 and max_label < <int64_t>voxels:
|
808
|
-
uniq, index, counts = unique_via_array(labels, max_label, return_index=return_index)
|
823
|
+
uniq, index, counts, inverse = unique_via_array(labels, max_label, return_index=return_index, return_inverse=return_inverse)
|
809
824
|
elif (max_label - min_label) <= <int64_t>voxels:
|
810
|
-
uniq, index, counts = unique_via_shifted_array(labels, min_label, max_label, return_index=return_index)
|
825
|
+
uniq, index, counts, inverse = unique_via_shifted_array(labels, min_label, max_label, return_index=return_index, return_inverse=return_inverse)
|
811
826
|
elif float(pixel_pairs(labels)) / float(voxels) > 0.66:
|
812
|
-
uniq, index, counts = unique_via_renumber(labels, return_index=return_index)
|
813
|
-
elif return_index:
|
814
|
-
return np.unique(labels_orig, return_index=return_index, return_counts=return_counts)
|
827
|
+
uniq, index, counts, inverse = unique_via_renumber(labels, return_index=return_index, return_inverse=return_inverse)
|
828
|
+
elif return_index or return_inverse:
|
829
|
+
return np.unique(labels_orig, return_index=return_index, return_counts=return_counts, return_inverse=return_inverse)
|
815
830
|
else:
|
816
831
|
uniq, counts = unique_via_sort(labels)
|
832
|
+
index = None
|
833
|
+
inverse = None
|
817
834
|
|
818
835
|
results = [ uniq ]
|
819
836
|
if return_index:
|
820
837
|
# This is required to match numpy's behavior
|
821
838
|
results.append(c_order_index(index))
|
839
|
+
if return_inverse:
|
840
|
+
results.append(reshape(inverse, shape, order=order))
|
822
841
|
if return_counts:
|
823
842
|
results.append(counts)
|
824
843
|
|
@@ -826,23 +845,23 @@ def unique(labels, return_index=False, return_inverse=False, return_counts=False
|
|
826
845
|
return tuple(results)
|
827
846
|
return uniq
|
828
847
|
|
829
|
-
def unique_via_shifted_array(labels, min_label=None, max_label=None, return_index=False):
|
848
|
+
def unique_via_shifted_array(labels, min_label=None, max_label=None, return_index=False, return_inverse=False):
|
830
849
|
if min_label is None or max_label is None:
|
831
850
|
min_label, max_label = minmax(labels)
|
832
851
|
|
833
852
|
labels -= min_label
|
834
|
-
uniq, idx, counts = unique_via_array(labels, max_label - min_label + 1, return_index)
|
853
|
+
uniq, idx, counts, inverse = unique_via_array(labels, max_label - min_label + 1, return_index, return_inverse)
|
835
854
|
labels += min_label
|
836
855
|
uniq += min_label
|
837
|
-
return uniq, idx, counts
|
856
|
+
return uniq, idx, counts, inverse
|
838
857
|
|
839
|
-
def unique_via_renumber(labels, return_index=False):
|
858
|
+
def unique_via_renumber(labels, return_index=False, return_inverse=False):
|
840
859
|
dtype = labels.dtype
|
841
860
|
labels, remap = renumber(labels)
|
842
861
|
remap = { v:k for k,v in remap.items() }
|
843
|
-
uniq, idx, counts = unique_via_array(labels, max(remap.keys()), return_index)
|
862
|
+
uniq, idx, counts, inverse = unique_via_array(labels, max(remap.keys()), return_index, return_inverse)
|
844
863
|
uniq = np.array([ remap[segid] for segid in uniq ], dtype=dtype)
|
845
|
-
return uniq, idx, counts
|
864
|
+
return uniq, idx, counts, inverse
|
846
865
|
|
847
866
|
@cython.boundscheck(False)
|
848
867
|
@cython.wraparound(False) # turn off negative index wrapping for entire function
|
@@ -886,7 +905,7 @@ def unique_via_sort(cnp.ndarray[ALLINT, ndim=1] labels):
|
|
886
905
|
def unique_via_array(
|
887
906
|
cnp.ndarray[ALLINT, ndim=1] labels,
|
888
907
|
size_t max_label,
|
889
|
-
return_index
|
908
|
+
return_index, return_inverse,
|
890
909
|
):
|
891
910
|
cdef cnp.ndarray[uint64_t, ndim=1] counts = np.zeros(
|
892
911
|
(max_label+1,), dtype=np.uint64
|
@@ -895,9 +914,9 @@ def unique_via_array(
|
|
895
914
|
|
896
915
|
cdef uintptr_t sentinel = np.iinfo(np.uintp).max
|
897
916
|
if return_index:
|
898
|
-
index = np.
|
899
|
-
(max_label+1,), dtype=np.uintp
|
900
|
-
)
|
917
|
+
index = np.full(
|
918
|
+
(max_label+1,), sentinel, dtype=np.uintp
|
919
|
+
)
|
901
920
|
|
902
921
|
cdef size_t voxels = labels.shape[0]
|
903
922
|
cdef size_t i = 0
|
@@ -935,12 +954,26 @@ def unique_via_array(
|
|
935
954
|
for i in range(max_label + 1):
|
936
955
|
if counts[i] > 0:
|
937
956
|
idx[j] = index[i]
|
938
|
-
j += 1
|
957
|
+
j += 1
|
958
|
+
|
959
|
+
cdef cnp.ndarray[uintptr_t, ndim=1] mapping
|
960
|
+
|
961
|
+
if return_inverse:
|
962
|
+
if segids.size:
|
963
|
+
mapping = np.zeros([segids[segids.size - 1] + 1], dtype=np.uintp)
|
964
|
+
for i in range(real_size):
|
965
|
+
mapping[segids[i]] = i
|
966
|
+
inverse_idx = mapping[labels]
|
967
|
+
else:
|
968
|
+
inverse_idx = np.zeros([0], dtype=np.uintp)
|
939
969
|
|
970
|
+
ret = [ segids, None, cts, None ]
|
940
971
|
if return_index:
|
941
|
-
|
942
|
-
|
943
|
-
|
972
|
+
ret[1] = idx
|
973
|
+
if return_inverse:
|
974
|
+
ret[3] = inverse_idx
|
975
|
+
|
976
|
+
return ret
|
944
977
|
|
945
978
|
def transpose(arr):
|
946
979
|
"""
|
@@ -1225,20 +1258,77 @@ def _foreground(cnp.ndarray[ALLINT, ndim=1] arr):
|
|
1225
1258
|
n_foreground += <size_t>(arr[i] != 0)
|
1226
1259
|
return n_foreground
|
1227
1260
|
|
1228
|
-
|
1229
|
-
@cython.wraparound(False) # turn off negative index wrapping for entire function
|
1230
|
-
@cython.nonecheck(False)
|
1231
|
-
def point_cloud(cnp.ndarray[ALLINT, ndim=3] arr):
|
1261
|
+
def point_cloud(arr):
|
1232
1262
|
"""
|
1233
1263
|
point_cloud(arr)
|
1234
1264
|
|
1235
|
-
Given a 3D integer image, return a mapping from
|
1265
|
+
Given a 2D or 3D integer image, return a mapping from
|
1236
1266
|
labels to their (x,y,z) position in the image.
|
1237
1267
|
|
1238
1268
|
Zero is considered a background label.
|
1239
1269
|
|
1240
|
-
Returns: ndarray(N, 3, dtype=uint16)
|
1270
|
+
Returns: ndarray(N, 2 or 3, dtype=uint16)
|
1241
1271
|
"""
|
1272
|
+
if arr.dtype == bool:
|
1273
|
+
arr = arr.view(np.uint8)
|
1274
|
+
|
1275
|
+
if arr.ndim == 2:
|
1276
|
+
return _point_cloud_2d(arr)
|
1277
|
+
else:
|
1278
|
+
return _point_cloud_3d(arr)
|
1279
|
+
|
1280
|
+
@cython.boundscheck(False)
|
1281
|
+
@cython.wraparound(False) # turn off negative index wrapping for entire function
|
1282
|
+
@cython.nonecheck(False)
|
1283
|
+
def _point_cloud_2d(cnp.ndarray[ALLINT, ndim=2] arr):
|
1284
|
+
cdef size_t n_foreground = foreground(arr)
|
1285
|
+
|
1286
|
+
cdef size_t sx = arr.shape[0]
|
1287
|
+
cdef size_t sy = arr.shape[1]
|
1288
|
+
|
1289
|
+
if n_foreground == 0:
|
1290
|
+
return {}
|
1291
|
+
|
1292
|
+
cdef cnp.ndarray[ALLINT, ndim=1] ptlabel = np.zeros((n_foreground,), dtype=arr.dtype)
|
1293
|
+
cdef cnp.ndarray[uint16_t, ndim=2] ptcloud = np.zeros((n_foreground, 2), dtype=np.uint16)
|
1294
|
+
|
1295
|
+
cdef size_t i = 0
|
1296
|
+
cdef size_t j = 0
|
1297
|
+
|
1298
|
+
cdef size_t idx = 0
|
1299
|
+
for i in range(sx):
|
1300
|
+
for j in range(sy):
|
1301
|
+
if arr[i,j] != 0:
|
1302
|
+
ptlabel[idx] = arr[i,j]
|
1303
|
+
ptcloud[idx,0] = i
|
1304
|
+
ptcloud[idx,1] = j
|
1305
|
+
idx += 1
|
1306
|
+
|
1307
|
+
sortidx = ptlabel.argsort()
|
1308
|
+
ptlabel = ptlabel[sortidx]
|
1309
|
+
ptcloud = ptcloud[sortidx]
|
1310
|
+
del sortidx
|
1311
|
+
|
1312
|
+
ptcloud_by_label = {}
|
1313
|
+
if n_foreground == 1:
|
1314
|
+
ptcloud_by_label[ptlabel[0]] = ptcloud
|
1315
|
+
return ptcloud_by_label
|
1316
|
+
|
1317
|
+
cdef size_t start = 0
|
1318
|
+
cdef size_t end = 0
|
1319
|
+
for end in range(1, n_foreground):
|
1320
|
+
if ptlabel[end] != ptlabel[end - 1]:
|
1321
|
+
ptcloud_by_label[ptlabel[end - 1]] = ptcloud[start:end,:]
|
1322
|
+
start = end
|
1323
|
+
|
1324
|
+
ptcloud_by_label[ptlabel[end]] = ptcloud[start:,:]
|
1325
|
+
|
1326
|
+
return ptcloud_by_label
|
1327
|
+
|
1328
|
+
@cython.boundscheck(False)
|
1329
|
+
@cython.wraparound(False) # turn off negative index wrapping for entire function
|
1330
|
+
@cython.nonecheck(False)
|
1331
|
+
def _point_cloud_3d(cnp.ndarray[ALLINT, ndim=3] arr):
|
1242
1332
|
cdef size_t n_foreground = foreground(arr)
|
1243
1333
|
|
1244
1334
|
cdef size_t sx = arr.shape[0]
|
@@ -1,11 +1,14 @@
|
|
1
1
|
import fastremap
|
2
2
|
import numpy as np
|
3
3
|
|
4
|
-
x = np.ones((
|
5
|
-
|
6
|
-
print(
|
4
|
+
x = np.ones((512,512,512), dtype=np.float32)
|
5
|
+
x = fastremap.asfortranarray(x)
|
6
|
+
print(x)
|
7
|
+
print(x.flags)
|
8
|
+
print(x.strides)
|
9
|
+
|
10
|
+
print(x.dtype)
|
7
11
|
|
8
|
-
print(cts[0] / 2**32)
|
9
12
|
|
10
13
|
|
11
14
|
# @profile
|
@@ -1 +0,0 @@
|
|
1
|
-
{"git_version": "9b0e91f", "is_release": true}
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|