siibra 0.4a35__py3-none-any.whl → 0.4a46__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (35) hide show
  1. siibra/VERSION +1 -1
  2. siibra/__init__.py +1 -0
  3. siibra/commons.py +38 -17
  4. siibra/configuration/configuration.py +21 -17
  5. siibra/configuration/factory.py +38 -12
  6. siibra/core/atlas.py +11 -8
  7. siibra/core/concept.py +22 -2
  8. siibra/core/parcellation.py +42 -22
  9. siibra/core/region.py +56 -95
  10. siibra/features/anchor.py +7 -4
  11. siibra/features/connectivity/functional_connectivity.py +8 -1
  12. siibra/features/connectivity/regional_connectivity.py +14 -19
  13. siibra/features/dataset/ebrains.py +1 -1
  14. siibra/features/feature.py +193 -29
  15. siibra/features/image/__init__.py +1 -1
  16. siibra/features/image/image.py +1 -0
  17. siibra/features/image/volume_of_interest.py +14 -5
  18. siibra/features/tabular/__init__.py +2 -0
  19. siibra/features/tabular/regional_timeseries_activity.py +213 -0
  20. siibra/livequeries/ebrains.py +2 -3
  21. siibra/locations/location.py +4 -3
  22. siibra/locations/pointset.py +2 -2
  23. siibra/retrieval/datasets.py +73 -3
  24. siibra/retrieval/repositories.py +17 -6
  25. siibra/retrieval/requests.py +68 -61
  26. siibra/volumes/neuroglancer.py +9 -9
  27. siibra/volumes/nifti.py +4 -5
  28. siibra/volumes/parcellationmap.py +157 -97
  29. siibra/volumes/sparsemap.py +27 -31
  30. siibra/volumes/volume.py +1 -1
  31. {siibra-0.4a35.dist-info → siibra-0.4a46.dist-info}/METADATA +2 -1
  32. {siibra-0.4a35.dist-info → siibra-0.4a46.dist-info}/RECORD +35 -34
  33. {siibra-0.4a35.dist-info → siibra-0.4a46.dist-info}/WHEEL +1 -1
  34. {siibra-0.4a35.dist-info → siibra-0.4a46.dist-info}/LICENSE +0 -0
  35. {siibra-0.4a35.dist-info → siibra-0.4a46.dist-info}/top_level.txt +0 -0
@@ -24,40 +24,49 @@ from ..commons import (
24
24
  clear_name,
25
25
  create_key,
26
26
  create_gaussian_kernel,
27
- Species
27
+ siibra_tqdm,
28
+ Species,
29
+ CompareMapsResult
28
30
  )
29
31
  from ..core import concept, space, parcellation, region as _region
30
32
  from ..locations import point, pointset
31
33
  from ..retrieval import requests
32
34
 
33
35
  import numpy as np
34
- from tqdm import tqdm
35
- from typing import Union, Dict, List, TYPE_CHECKING, Iterable
36
+ from typing import Union, Dict, List, TYPE_CHECKING, Iterable, Tuple
36
37
  from scipy.ndimage import distance_transform_edt
37
38
  from collections import defaultdict
38
39
  from nibabel import Nifti1Image
39
40
  from nilearn import image
40
41
  import pandas as pd
42
+ from dataclasses import dataclass, asdict
41
43
 
42
44
  if TYPE_CHECKING:
43
45
  from ..core.region import Region
44
46
 
45
47
 
46
- class ExcessiveArgumentException(ValueError):
47
- pass
48
+ class ExcessiveArgumentException(ValueError): pass
48
49
 
49
50
 
50
- class InsufficientArgumentException(ValueError):
51
- pass
51
+ class InsufficientArgumentException(ValueError): pass
52
52
 
53
53
 
54
- class ConflictingArgumentException(ValueError):
55
- pass
54
+ class ConflictingArgumentException(ValueError): pass
56
55
 
57
56
 
58
- class NonUniqueIndexError(RuntimeError):
59
- pass
57
+ class NonUniqueIndexError(RuntimeError): pass
60
58
 
59
+ @dataclass
60
+ class Assignment:
61
+ input_structure: int
62
+ centroid: Union[Tuple[np.ndarray], point.Point]
63
+ volume: int
64
+ fragment: str
65
+ map_value: np.ndarray
66
+
67
+
68
+ @dataclass
69
+ class AssignImageResult(CompareMapsResult, Assignment): pass
61
70
 
62
71
  class Map(concept.AtlasConcept, configuration_folder="maps"):
63
72
 
@@ -157,6 +166,9 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
157
166
  self._parcellation_spec = parcellation_spec
158
167
  self._affine_cached = None
159
168
  for v in self.volumes:
169
+ # allow the providers to query their parcellation map if needed
170
+ for p in v._providers.values():
171
+ p.parcellation_map = self
160
172
  v._space_spec = space_spec
161
173
 
162
174
  @property
@@ -224,7 +236,7 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
224
236
  matched_region_names = set(_.name for _ in (self.parcellation.find(regionname)))
225
237
  matches = matched_region_names & self._indices.keys()
226
238
  if len(matches) == 0:
227
- logger.warn(f"Region {regionname} not defined in {self}")
239
+ logger.warning(f"Region {regionname} not defined in {self}")
228
240
  return {
229
241
  idx: regionname
230
242
  for regionname in matches
@@ -250,6 +262,8 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
250
262
  Region
251
263
  A region object defined in the parcellation map.
252
264
  """
265
+ if isinstance(label, MapIndex) and index is None:
266
+ raise TypeError(f"Specify MapIndex with index keyword.")
253
267
  if index is None:
254
268
  index = MapIndex(volume, label)
255
269
  matches = [
@@ -258,13 +272,13 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
258
272
  if index in indexlist
259
273
  ]
260
274
  if len(matches) == 0:
261
- logger.warn(f"Index {index} not defined in {self}")
275
+ logger.warning(f"Index {index} not defined in {self}")
262
276
  return None
263
277
  elif len(matches) == 1:
264
278
  return self.parcellation.get_region(matches[0])
265
279
  else:
266
280
  # this should not happen, already tested in constructor
267
- raise RuntimeError(f"Index {index} is not unique in {self}")
281
+ raise RuntimeError(f"Index {index} is not unique in {self}")
268
282
 
269
283
  @property
270
284
  def space(self):
@@ -278,7 +292,7 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
278
292
  for key in ["@id", "name"]:
279
293
  if key in self._parcellation_spec:
280
294
  return parcellation.Parcellation.get_instance(self._parcellation_spec[key])
281
- logger.warn(
295
+ logger.warning(
282
296
  f"Cannot determine parcellation of {self.__class__.__name__} "
283
297
  f"{self.name} from {self._parcellation_spec}"
284
298
  )
@@ -410,7 +424,7 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
410
424
  print(str(e))
411
425
 
412
426
  if result is None:
413
- raise RuntimeError(f"Error fetching {mapindex} from {self} as {kwargs['format']}.")
427
+ raise RuntimeError(f"Error fetching {mapindex} from {self} as {kwargs.get('format', f'{self.formats}')}.")
414
428
  return result
415
429
 
416
430
  def fetch_iter(self, **kwargs):
@@ -498,12 +512,12 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
498
512
  next_labelindex = 1
499
513
  region_indices = defaultdict(list)
500
514
 
501
- for volidx in tqdm(
515
+ for volidx in siibra_tqdm(
502
516
  range(len(self.volumes)), total=len(self.volumes), unit='maps',
503
517
  desc=f"Compressing {len(self.volumes)} {self.maptype.name.lower()} volumes into single-volume parcellation",
504
518
  disable=(len(self.volumes) == 1)
505
519
  ):
506
- for frag in tqdm(
520
+ for frag in siibra_tqdm(
507
521
  self.fragments, total=len(self.fragments), unit='maps',
508
522
  desc=f"Compressing {len(self.fragments)} {self.maptype.name.lower()} fragments into single-fragment parcellation",
509
523
  disable=(len(self.fragments) == 1 or self.fragments is None)
@@ -525,7 +539,7 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
525
539
  mapindex.__setattr__("label", int(label))
526
540
  region = self.get_region(index=mapindex)
527
541
  if region is None:
528
- logger.warn(f"Label index {label} is observed in map volume {self}, but no region is defined for it.")
542
+ logger.warning(f"Label index {label} is observed in map volume {self}, but no region is defined for it.")
529
543
  continue
530
544
  region_indices[region.name].append({"volume": 0, "label": next_labelindex})
531
545
  if label is None:
@@ -564,7 +578,7 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
564
578
  regions = sorted(self._indices.items(), key=lambda v: min(_.volume for _ in v[1]))
565
579
  current_vol_index = MapIndex(volume=0)
566
580
  maparr = None
567
- for regionname, indexlist in tqdm(regions, unit="regions", desc="Computing centroids"):
581
+ for regionname, indexlist in siibra_tqdm(regions, unit="regions", desc="Computing centroids"):
568
582
  assert len(indexlist) == 1
569
583
  index = indexlist[0]
570
584
  if index.label == 0:
@@ -582,7 +596,7 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
582
596
  )
583
597
  return centroids
584
598
 
585
- def colorize(self, values: dict):
599
+ def colorize(self, values: dict, **kwargs):
586
600
  """Colorize the map with the provided regional values.
587
601
 
588
602
  Parameters
@@ -596,7 +610,7 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
596
610
  """
597
611
 
598
612
  result = None
599
- for volidx, vol in enumerate(self.fetch_iter()):
613
+ for volidx, vol in enumerate(self.fetch_iter(**kwargs)):
600
614
  if isinstance(vol, dict):
601
615
  raise NotImplementedError("Map colorization not yet implemented for meshes.")
602
616
  img = np.asanyarray(vol.dataobj)
@@ -747,6 +761,27 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
747
761
  for pointindex, value
748
762
  in enumerate(np.asanyarray(volimg.dataobj)[x, y, z])
749
763
  ]
764
+
765
+ def _assign(
766
+ self,
767
+ item: Union[point.Point, pointset.PointSet, Nifti1Image],
768
+ minsize_voxel=1,
769
+ lower_threshold=0.0
770
+ ) -> List[Union[Assignment,AssignImageResult]]:
771
+ """
772
+ For internal use only. Returns a dataclass, which provides better static type checking.
773
+ """
774
+
775
+ if isinstance(item, point.Point):
776
+ return self._assign_points(pointset.PointSet([item], item.space, sigma_mm=item.sigma), lower_threshold)
777
+ if isinstance(item, pointset.PointSet):
778
+ return self._assign_points(item, lower_threshold)
779
+ if isinstance(item, Nifti1Image):
780
+ return self._assign_image(item, minsize_voxel, lower_threshold)
781
+
782
+ raise RuntimeError(
783
+ f"Items of type {item.__class__.__name__} cannot be used for region assignment."
784
+ )
750
785
 
751
786
  def assign(
752
787
  self,
@@ -800,16 +835,7 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
800
835
  a Point or PointSet, returns None.
801
836
  """
802
837
 
803
- if isinstance(item, point.Point):
804
- assignments = self._assign_points(pointset.PointSet([item], item.space, sigma_mm=item.sigma), lower_threshold)
805
- elif isinstance(item, pointset.PointSet):
806
- assignments = self._assign_points(item, lower_threshold)
807
- elif isinstance(item, Nifti1Image):
808
- assignments = self._assign_image(item, minsize_voxel, lower_threshold)
809
- else:
810
- raise RuntimeError(
811
- f"Items of type {item.__class__.__name__} cannot be used for region assignment."
812
- )
838
+ assignments = self._assign(item, minsize_voxel, lower_threshold)
813
839
 
814
840
  # format assignments as pandas dataframe
815
841
  columns = [
@@ -827,50 +853,82 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
827
853
  "input containedness"
828
854
  ]
829
855
  if len(assignments) == 0:
830
- df = pd.DataFrame(columns=columns)
831
- else:
832
-
833
- # determine the unique set of observed indices in order to do region lookups
834
- # only once for each map index occuring in the point list
835
- labelled = self.is_labelled # avoid calling this in a loop
836
- observed_indices = { # unique set of observed map indices. NOTE: len(observed_indices) << len(assignments)
837
- (
838
- a.get('volume'),
839
- a.get('fragment'),
840
- a.get('map value') if labelled else None
841
- )
842
- for a in assignments
843
- }
844
- region_lut = { # lookup table of observed region objects
845
- (v, f, l): self.get_region(
846
- index=MapIndex(
847
- volume=int(v),
848
- label=l if l is None else int(l),
849
- fragment=f
850
- )
856
+ return pd.DataFrame(columns=columns)
857
+ # determine the unique set of observed indices in order to do region lookups
858
+ # only once for each map index occuring in the point list
859
+ labelled = self.is_labelled # avoid calling this in a loop
860
+ observed_indices = { # unique set of observed map indices. NOTE: len(observed_indices) << len(assignments)
861
+ (
862
+ a.volume,
863
+ a.fragment,
864
+ a.map_value if labelled else None
865
+ )
866
+ for a in assignments
867
+ }
868
+ region_lut = { # lookup table of observed region objects
869
+ (v, f, l): self.get_region(
870
+ index=MapIndex(
871
+ volume=int(v),
872
+ label=l if l is None else int(l),
873
+ fragment=f
851
874
  )
852
- for v, f, l in observed_indices
853
- }
854
-
855
- for a in assignments:
856
- a["region"] = region_lut[
857
- a.get('volume'),
858
- a.get('fragment'),
859
- a.get('map value') if labelled else None
860
- ]
861
- a['map containedness'] = a.pop('intersection over first', None)
862
- a['input containedness'] = a.pop('intersection over second', None)
863
- a['map weighted mean'] = a.pop('weighted mean of first', None)
864
- a['input weighted mean'] = a.pop('weighted mean of second', None)
865
- df = pd.DataFrame(assignments)
875
+ )
876
+ for v, f, l in observed_indices
877
+ }
866
878
 
879
+ dataframe_list = []
880
+ for a in assignments:
881
+ item_to_append = {
882
+ "input structure": a.input_structure,
883
+ "centroid": a.centroid,
884
+ "volume": a.volume,
885
+ "fragment": a.fragment,
886
+ "region": region_lut[
887
+ a.volume,
888
+ a.fragment,
889
+ a.map_value if labelled else None
890
+ ],
891
+ }
892
+ # because AssignImageResult is a subclass of Assignment
893
+ # need to check for isinstance AssignImageResult first
894
+ if isinstance(a, AssignImageResult):
895
+ item_to_append = {
896
+ **item_to_append,
897
+ **{
898
+ "correlation": a.correlation,
899
+ "intersection over union": a.intersection_over_union,
900
+ "map value": a.map_value,
901
+ "map weighted mean": a.weighted_mean_of_first,
902
+ "map containedness": a.intersection_over_first,
903
+ "input weighted mean": a.weighted_mean_of_second,
904
+ "input containedness": a.intersection_over_second,
905
+ }
906
+ }
907
+ elif isinstance(a, Assignment):
908
+ item_to_append = {
909
+ **item_to_append,
910
+ **{
911
+ "correlation": None,
912
+ "intersection over union": None,
913
+ "map value": None,
914
+ "map weighted mean": None,
915
+ "map containedness": None,
916
+ "input weighted mean": None,
917
+ "input containedness": None,
918
+ }
919
+ }
920
+ else:
921
+ raise RuntimeError(f"assignments must be of type Assignment or AssignImageResult!")
922
+
923
+ dataframe_list.append(item_to_append)
924
+ df = pd.DataFrame(dataframe_list)
867
925
  return (
868
926
  df
869
927
  .convert_dtypes() # convert will guess numeric column types
870
928
  .reindex(columns=columns)
871
929
  )
872
930
 
873
- def _assign_points(self, points, lower_threshold: float):
931
+ def _assign_points(self, points:pointset.PointSet, lower_threshold: float) -> List[Assignment]:
874
932
  """
875
933
  assign a PointSet to this parcellation map.
876
934
 
@@ -904,20 +962,20 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
904
962
  if value > lower_threshold:
905
963
  position = pts_warped[pointindex].coordinate
906
964
  assignments.append(
907
- {
908
- "input structure": pointindex,
909
- "centroid": tuple(np.array(position).round(2)),
910
- "volume": vol,
911
- "fragment": frag,
912
- "map value": value
913
- }
965
+ Assignment(
966
+ input_structure=pointindex,
967
+ centroid=tuple(np.array(position).round(2)),
968
+ volume=vol,
969
+ fragment=frag,
970
+ map_value=value
971
+ )
914
972
  )
915
973
  return assignments
916
974
 
917
975
  # if we get here, we need to handle each point independently.
918
976
  # This is much slower but more precise in dealing with the uncertainties
919
977
  # of the coordinates.
920
- for pointindex, pt in tqdm(
978
+ for pointindex, pt in siibra_tqdm(
921
979
  enumerate(points.warp(self.space.id)),
922
980
  total=len(points), desc="Warping points",
923
981
  ):
@@ -931,13 +989,13 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
931
989
  for _, vol, frag, value in values:
932
990
  if value > lower_threshold:
933
991
  assignments.append(
934
- {
935
- "input structure": pointindex,
936
- "centroid": tuple(pt),
937
- "volume": vol,
938
- "fragment": frag,
939
- "map value": value
940
- }
992
+ Assignment(
993
+ input_structure=pointindex,
994
+ centroid=tuple(pt),
995
+ volume=vol,
996
+ fragment=frag,
997
+ map_value=value
998
+ )
941
999
  )
942
1000
  else:
943
1001
  logger.info(
@@ -951,13 +1009,13 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
951
1009
  # build niftiimage with the Gaussian blob,
952
1010
  # then recurse into this method with the image input
953
1011
  W = Nifti1Image(dataobj=kernel, affine=np.dot(self.affine, shift))
954
- for entry in self.assign(W, lower_threshold=lower_threshold).T.to_dict().values():
955
- entry["input structure"] = pointindex
956
- entry["centroid"] = tuple(pt)
1012
+ for entry in self._assign(W, lower_threshold=lower_threshold):
1013
+ entry.input_structure=pointindex
1014
+ entry.centroid=tuple(pt)
957
1015
  assignments.append(entry)
958
1016
  return assignments
959
1017
 
960
- def _assign_image(self, queryimg: Nifti1Image, minsize_voxel: int, lower_threshold: float):
1018
+ def _assign_image(self, queryimg: Nifti1Image, minsize_voxel: int, lower_threshold: float) -> List[AssignImageResult]:
961
1019
  """
962
1020
  Assign an image volume to this parcellation map.
963
1021
 
@@ -991,7 +1049,7 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
991
1049
  # but only if the sequence is long.
992
1050
  seqlen = N or len(it)
993
1051
  return iter(it) if seqlen < min_elements \
994
- else tqdm(it, desc=desc, total=N)
1052
+ else siibra_tqdm(it, desc=desc, total=N)
995
1053
 
996
1054
  with QUIET and _volume.SubvolumeProvider.UseCaching():
997
1055
  for frag in self.fragments or {None}:
@@ -1012,14 +1070,16 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
1012
1070
  targetimg = vol_img if label is None \
1013
1071
  else Nifti1Image((vol_data == label).astype('uint8'), vol_img.affine)
1014
1072
  scores = compare_maps(maskimg, targetimg)
1015
- if scores["intersection over union"] > 0:
1016
- info = {
1017
- "input structure": mode,
1018
- "centroid": tuple(position.round(2)),
1019
- "volume": vol,
1020
- "fragment": frag,
1021
- "map value": label
1022
- }
1023
- assignments.append(dict(**info, **scores))
1073
+ if scores.intersection_over_union > 0:
1074
+ assignments.append(
1075
+ AssignImageResult(
1076
+ input_structure=mode,
1077
+ centroid=tuple(position.round(2)),
1078
+ volume=vol,
1079
+ fragment=frag,
1080
+ map_value=label,
1081
+ **asdict(scores)
1082
+ )
1083
+ )
1024
1084
 
1025
1085
  return assignments
@@ -15,16 +15,15 @@
15
15
  """Represents lists of probabilistic brain region maps."""
16
16
  from . import parcellationmap, volume as _volume
17
17
 
18
- from ..commons import MapIndex, logger, iterate_connected_components
18
+ from ..commons import MapIndex, logger, iterate_connected_components, siibra_tqdm
19
19
  from ..locations import boundingbox
20
20
  from ..retrieval import cache
21
21
 
22
22
  from os import path
23
23
  import gzip
24
- from typing import Dict, Union, TYPE_CHECKING
24
+ from typing import Dict, Union, TYPE_CHECKING, List
25
25
  from nilearn import image
26
26
  from nibabel import Nifti1Image, load
27
- from tqdm import tqdm
28
27
  import numpy as np
29
28
 
30
29
  if TYPE_CHECKING:
@@ -163,12 +162,11 @@ class SparseIndex:
163
162
 
164
163
  with gzip.open(probsfile, "rt") as f:
165
164
  lines = f.readlines()
166
- for line in tqdm(
165
+ for line in siibra_tqdm(
167
166
  lines,
168
167
  total=len(lines),
169
168
  desc="Loading sparse index",
170
- unit="voxels",
171
- disable=logger.level > 20,
169
+ unit="voxels"
172
170
  ):
173
171
  fields = line.strip().split(" ")
174
172
  mapindices = list(map(int, fields[0::2]))
@@ -247,15 +245,14 @@ class SparseMap(parcellationmap.Map):
247
245
  @property
248
246
  def sparse_index(self):
249
247
  if self._sparse_index_cached is None:
250
- prefix = f"{self.parcellation.id}_{self.space.id}_{self.maptype}_index"
248
+ prefix = f"{self.parcellation.id}_{self.space.id}_{self.maptype}_{self.name}_index"
251
249
  spind = SparseIndex.from_cache(prefix)
252
- with _volume.SubvolumeProvider.UseCaching():
253
- if spind is None:
250
+ if spind is None:
251
+ with _volume.SubvolumeProvider.UseCaching():
254
252
  spind = SparseIndex()
255
- for vol in tqdm(
253
+ for vol in siibra_tqdm(
256
254
  range(len(self)), total=len(self), unit="maps",
257
- desc=f"Fetching {len(self)} volumetric maps",
258
- disable=logger.level > 20,
255
+ desc=f"Fetching {len(self)} volumetric maps"
259
256
  ):
260
257
  img = super().fetch(
261
258
  index=MapIndex(volume=vol, label=None)
@@ -265,8 +262,8 @@ class SparseMap(parcellationmap.Map):
265
262
  logger.error(f"Cannot retrieve volume #{vol} for {region.name}, it will not be included in the sparse map.")
266
263
  continue
267
264
  spind.add_img(img)
268
- spind.to_cache(prefix)
269
- self._sparse_index_cached = spind
265
+ spind.to_cache(prefix)
266
+ self._sparse_index_cached = spind
270
267
  assert self._sparse_index_cached.max() == len(self._sparse_index_cached.probs) - 1
271
268
  return self._sparse_index_cached
272
269
 
@@ -365,7 +362,7 @@ class SparseMap(parcellationmap.Map):
365
362
  for volume, value in spind.probs[voxel].items()
366
363
  )
367
364
 
368
- def _assign_image(self, queryimg: Nifti1Image, minsize_voxel: int, lower_threshold: float):
365
+ def _assign_image(self, queryimg: Nifti1Image, minsize_voxel: int, lower_threshold: float) -> List[parcellationmap.AssignImageResult]:
369
366
  """
370
367
  Assign an image volume to this parcellation map.
371
368
 
@@ -416,12 +413,11 @@ class SparseMap(parcellationmap.Map):
416
413
 
417
414
  spind = self.sparse_index
418
415
 
419
- for volume in tqdm(
416
+ for volume in siibra_tqdm(
420
417
  range(len(self)),
421
418
  desc=f"Assigning structure #{mode} to {len(self)} sparse maps",
422
419
  total=len(self),
423
- unit=" map",
424
- disable=logger.level > 20,
420
+ unit=" map"
425
421
  ):
426
422
  bbox1 = boundingbox.BoundingBox(
427
423
  self.sparse_index.bboxes[volume]["minpoint"],
@@ -475,19 +471,19 @@ class SparseMap(parcellationmap.Map):
475
471
  maxval = v1.max()
476
472
 
477
473
  assignments.append(
478
- {
479
- "input structure": mode,
480
- "centroid": tuple(position.round(2)),
481
- "volume": volume,
482
- "fragment": None,
483
- "map value": maxval,
484
- "intersection over union": iou,
485
- "intersection over first": intersection / (v1 > 0).sum(),
486
- "intersection over second": intersection / (v2 > 0).sum(),
487
- "correlation": rho,
488
- "weighted mean of first": np.sum(v1 * v2) / np.sum(v2),
489
- "weighted mean of second": np.sum(v1 * v2) / np.sum(v1)
490
- }
474
+ parcellationmap.AssignImageResult(
475
+ input_structure=mode,
476
+ centroid=tuple(position.round(2)),
477
+ volume=volume,
478
+ fragment=None,
479
+ map_value=maxval,
480
+ intersection_over_union=iou,
481
+ intersection_over_first=intersection / (v1 > 0).sum(),
482
+ intersection_over_second=intersection / (v2 > 0).sum(),
483
+ correlation=rho,
484
+ weighted_mean_of_first=np.sum(v1 * v2) / np.sum(v2),
485
+ weighted_mean_of_second=np.sum(v1 * v2) / np.sum(v1)
486
+ )
491
487
  )
492
488
 
493
489
  return assignments
siibra/volumes/volume.py CHANGED
@@ -84,7 +84,7 @@ class Volume:
84
84
  def concat(url: Union[str, Dict[str, str]], concat: str):
85
85
  if isinstance(url, str):
86
86
  return url + concat
87
- return { key: url[key] + concat for key in url }
87
+ return {key: url[key] + concat for key in url}
88
88
  return {
89
89
  srctype: concat(prov._url, f" {prov.label}" if hasattr(prov, "label") else "")
90
90
  for srctype, prov in self._providers.items()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: siibra
3
- Version: 0.4a35
3
+ Version: 0.4a46
4
4
  Summary: siibra - Software interfaces for interacting with brain atlases
5
5
  Home-page: https://github.com/FZJ-INM1-BDA/siibra-python
6
6
  Author: Big Data Analytics Group, Forschungszentrum Juelich, Institute of Neuroscience and Medicine (INM-1)
@@ -22,6 +22,7 @@ Requires-Dist: scikit-image
22
22
  Requires-Dist: requests
23
23
  Requires-Dist: neuroglancer-scripts
24
24
  Requires-Dist: nilearn
25
+ Requires-Dist: filelock
25
26
  Requires-Dist: typing-extensions ; python_version < "3.8"
26
27
 
27
28
  |License| |PyPI version| |Python versions| |Documentation Status|