siibra 1.0.1a0__py3-none-any.whl → 1.0.1a2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (82) hide show
  1. siibra/VERSION +1 -1
  2. siibra/__init__.py +11 -20
  3. siibra/commons.py +17 -14
  4. siibra/configuration/__init__.py +1 -1
  5. siibra/configuration/configuration.py +6 -6
  6. siibra/configuration/factory.py +10 -9
  7. siibra/core/__init__.py +2 -2
  8. siibra/core/assignment.py +2 -1
  9. siibra/core/atlas.py +4 -4
  10. siibra/core/concept.py +7 -5
  11. siibra/core/parcellation.py +10 -10
  12. siibra/core/region.py +82 -73
  13. siibra/core/space.py +5 -7
  14. siibra/core/structure.py +4 -4
  15. siibra/exceptions.py +6 -2
  16. siibra/explorer/__init__.py +1 -1
  17. siibra/explorer/url.py +2 -2
  18. siibra/explorer/util.py +1 -1
  19. siibra/features/__init__.py +1 -1
  20. siibra/features/anchor.py +4 -6
  21. siibra/features/connectivity/__init__.py +1 -1
  22. siibra/features/connectivity/functional_connectivity.py +1 -1
  23. siibra/features/connectivity/regional_connectivity.py +12 -15
  24. siibra/features/connectivity/streamline_counts.py +1 -1
  25. siibra/features/connectivity/streamline_lengths.py +1 -1
  26. siibra/features/connectivity/tracing_connectivity.py +1 -1
  27. siibra/features/dataset/__init__.py +1 -1
  28. siibra/features/dataset/ebrains.py +2 -2
  29. siibra/features/feature.py +31 -28
  30. siibra/features/image/__init__.py +5 -3
  31. siibra/features/image/image.py +4 -6
  32. siibra/features/image/sections.py +82 -3
  33. siibra/features/image/volume_of_interest.py +1 -9
  34. siibra/features/tabular/__init__.py +2 -2
  35. siibra/features/tabular/bigbrain_intensity_profile.py +3 -2
  36. siibra/features/tabular/cell_density_profile.py +10 -11
  37. siibra/features/tabular/cortical_profile.py +9 -9
  38. siibra/features/tabular/gene_expression.py +7 -6
  39. siibra/features/tabular/layerwise_bigbrain_intensities.py +5 -4
  40. siibra/features/tabular/layerwise_cell_density.py +5 -7
  41. siibra/features/tabular/receptor_density_fingerprint.py +47 -19
  42. siibra/features/tabular/receptor_density_profile.py +2 -3
  43. siibra/features/tabular/regional_timeseries_activity.py +9 -9
  44. siibra/features/tabular/tabular.py +10 -9
  45. siibra/livequeries/__init__.py +1 -1
  46. siibra/livequeries/allen.py +23 -25
  47. siibra/livequeries/bigbrain.py +252 -55
  48. siibra/livequeries/ebrains.py +14 -11
  49. siibra/livequeries/query.py +5 -5
  50. siibra/locations/__init__.py +19 -10
  51. siibra/locations/boundingbox.py +10 -13
  52. siibra/{experimental/plane3d.py → locations/experimental.py} +117 -17
  53. siibra/locations/location.py +11 -13
  54. siibra/locations/point.py +10 -19
  55. siibra/locations/pointcloud.py +59 -23
  56. siibra/retrieval/__init__.py +1 -1
  57. siibra/retrieval/cache.py +2 -1
  58. siibra/retrieval/datasets.py +23 -17
  59. siibra/retrieval/exceptions/__init__.py +1 -1
  60. siibra/retrieval/repositories.py +14 -15
  61. siibra/retrieval/requests.py +32 -30
  62. siibra/vocabularies/__init__.py +2 -3
  63. siibra/volumes/__init__.py +5 -4
  64. siibra/volumes/parcellationmap.py +55 -20
  65. siibra/volumes/providers/__init__.py +1 -1
  66. siibra/volumes/providers/freesurfer.py +7 -7
  67. siibra/volumes/providers/gifti.py +5 -5
  68. siibra/volumes/providers/neuroglancer.py +25 -28
  69. siibra/volumes/providers/nifti.py +7 -7
  70. siibra/volumes/providers/provider.py +4 -3
  71. siibra/volumes/sparsemap.py +8 -7
  72. siibra/volumes/volume.py +33 -40
  73. {siibra-1.0.1a0.dist-info → siibra-1.0.1a2.dist-info}/METADATA +21 -8
  74. siibra-1.0.1a2.dist-info/RECORD +80 -0
  75. {siibra-1.0.1a0.dist-info → siibra-1.0.1a2.dist-info}/WHEEL +1 -1
  76. siibra/experimental/__init__.py +0 -19
  77. siibra/experimental/contour.py +0 -61
  78. siibra/experimental/cortical_profile_sampler.py +0 -57
  79. siibra/experimental/patch.py +0 -98
  80. siibra-1.0.1a0.dist-info/RECORD +0 -84
  81. {siibra-1.0.1a0.dist-info → siibra-1.0.1a2.dist-info}/LICENSE +0 -0
  82. {siibra-1.0.1a0.dist-info → siibra-1.0.1a2.dist-info}/top_level.txt +0 -0
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,19 +14,18 @@
14
14
  # limitations under the License.
15
15
  """Metadata connection to EBRAINS datasets."""
16
16
 
17
- from .requests import MultiSourcedRequest, GitlabProxy, GitlabProxyEnum
18
-
19
17
  import re
20
- from typing import Union, List
21
- from abc import ABC, abstractproperty
18
+ from abc import ABC, abstractmethod
22
19
  from hashlib import md5
23
-
20
+ from typing import Union, List
24
21
  try:
25
22
  from typing import TypedDict
26
23
  except ImportError:
27
24
  # support python 3.7
28
25
  from typing_extensions import TypedDict
29
26
 
27
+ from .requests import MultiSourcedRequest, GitlabProxy, GitlabProxyEnum
28
+
30
29
 
31
30
  class EbrainsDatasetUrl(TypedDict):
32
31
  url: str
@@ -48,31 +47,38 @@ EbrainsDatasetEmbargoStatus = TypedDict("EbrainsDatasetEmbargoStatus", {
48
47
 
49
48
 
50
49
  class EbrainsBaseDataset(ABC):
51
- @abstractproperty
50
+ @property
51
+ @abstractmethod
52
52
  def id(self) -> str:
53
53
  raise NotImplementedError
54
54
 
55
- @abstractproperty
55
+ @property
56
+ @abstractmethod
56
57
  def name(self) -> str:
57
58
  raise NotImplementedError
58
59
 
59
- @abstractproperty
60
+ @property
61
+ @abstractmethod
60
62
  def urls(self) -> List[EbrainsDatasetUrl]:
61
63
  raise NotImplementedError
62
64
 
63
- @abstractproperty
65
+ @property
66
+ @abstractmethod
64
67
  def description(self) -> str:
65
68
  raise NotImplementedError
66
69
 
67
- @abstractproperty
70
+ @property
71
+ @abstractmethod
68
72
  def contributors(self) -> List[EbrainsDatasetPerson]:
69
73
  raise NotImplementedError
70
74
 
71
- @abstractproperty
75
+ @property
76
+ @abstractmethod
72
77
  def ebrains_page(self) -> str:
73
78
  raise NotImplementedError
74
79
 
75
- @abstractproperty
80
+ @property
81
+ @abstractmethod
76
82
  def custodians(self) -> List[EbrainsDatasetPerson]:
77
83
  raise NotImplementedError
78
84
 
@@ -319,12 +325,12 @@ class EbrainsV3Dataset(EbrainsBaseDataset):
319
325
  @property
320
326
  def contributors(self):
321
327
  if self._contributers is None:
322
- contributers = {}
328
+ contributors = {}
323
329
  for version_id in self.version_ids:
324
- contributers.update(
330
+ contributors.update(
325
331
  {c['@id']: c for c in EbrainsV3DatasetVersion(version_id).contributors}
326
332
  )
327
- self._contributers = list(contributers.values())
333
+ self._contributers = list(contributors.values())
328
334
  return self._contributers
329
335
 
330
336
  @property
@@ -342,7 +348,7 @@ class EbrainsV3Dataset(EbrainsBaseDataset):
342
348
  return [version.get("id") for version in self._detail.get("versions", [])]
343
349
 
344
350
 
345
- class GenericDataset():
351
+ class GenericDataset:
346
352
 
347
353
  def __init__(
348
354
  self,
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,25 +14,24 @@
14
14
  # limitations under the License.
15
15
  """Connect to repositories to browse and pull files within."""
16
16
 
17
+ from abc import ABC, abstractmethod
18
+ from urllib.parse import quote
19
+ import pathlib
20
+ import os
21
+ from zipfile import ZipFile
22
+ from typing import List
23
+
24
+ from .cache import CACHE
17
25
  from .requests import (
18
26
  HttpRequest,
19
27
  EbrainsRequest,
20
28
  SiibraHttpRequestError,
21
- find_suitiable_decoder,
29
+ find_suitable_decoder,
22
30
  DECODERS,
23
31
  FileLoader
24
32
  )
25
- from .cache import CACHE
26
-
27
33
  from ..commons import logger, siibra_tqdm
28
34
 
29
- from abc import ABC, abstractmethod
30
- from urllib.parse import quote
31
- import pathlib
32
- import os
33
- from zipfile import ZipFile
34
- from typing import List
35
-
36
35
 
37
36
  class RepositoryConnector(ABC):
38
37
  """
@@ -67,7 +66,7 @@ class RepositoryConnector(ABC):
67
66
  pass
68
67
 
69
68
  def _decode_response(self, response, filename: str):
70
- decoder = find_suitiable_decoder(filename)
69
+ decoder = find_suitable_decoder(filename)
71
70
  return decoder(response) if decoder else response
72
71
 
73
72
  def get(self, filename, folder="", decode_func=None):
@@ -196,7 +195,7 @@ class GithubConnector(RepositoryConnector):
196
195
  if len(matched_reftags) == 1:
197
196
  self._want_commit_cached = matched_reftags[0]["commit"]
198
197
  else:
199
- raise RuntimeError(f"Found {len(matched_reftags)} mathces to {reftag}")
198
+ raise RuntimeError(f"Found {len(matched_reftags)} matches to {reftag}")
200
199
  self._tag_checked = True
201
200
  except Exception:
202
201
  logger.warning("Could not connect to GitHub repository.", exc_info=1)
@@ -258,7 +257,7 @@ class GitlabConnector(RepositoryConnector):
258
257
  n.b. only archive_mode should only be set for trusted domains. Extraction of archive can result in files created outside the path
259
258
  see https://docs.python.org/3/library/tarfile.html#tarfile.TarFile.extractall
260
259
  """
261
- # TODO: the query builder needs to check wether the reftag is a branch, and then not cache.
260
+ # TODO: the query builder needs to check whether the reftag is a branch, and then not cache.
262
261
  assert server.startswith("http")
263
262
  RepositoryConnector.__init__(
264
263
  self, base_url=f"{server}/api/v4/projects/{project}/repository"
@@ -584,7 +583,7 @@ class EbrainsPublicDatasetConnector(RepositoryConnector):
584
583
  Part of dataset title as an alternative dataset specification (will ignore dataset_id then)
585
584
  in_progress: bool (default:False)
586
585
  If true, will request datasets that are still under curation.
587
- Will only work when autenticated with an appropriately privileged
586
+ Will only work when authenticated with an appropriately privileged
588
587
  user account.
589
588
  """
590
589
  self.dataset_id = dataset_id
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,35 +14,37 @@
14
14
  # limitations under the License.
15
15
  """Request files with decoders, lazy loading, and caching."""
16
16
 
17
- from .cache import CACHE, cache_user_fn
18
- from .exceptions import EbrainsAuthenticationError
19
- from ..commons import (
20
- logger,
21
- HBP_AUTH_TOKEN,
22
- KEYCLOAK_CLIENT_ID,
23
- KEYCLOAK_CLIENT_SECRET,
24
- siibra_tqdm,
25
- SIIBRA_USE_LOCAL_SNAPSPOT,
26
- )
27
- from .. import __version__
28
-
29
17
  import json
30
18
  from zipfile import ZipFile
31
19
  import requests
32
20
  import os
33
- from nibabel import Nifti1Image, GiftiImage, streamlines, freesurfer
34
- from skimage import io as skimage_io
35
21
  import gzip
36
22
  from io import BytesIO
37
23
  import urllib.parse
38
- import pandas as pd
39
- import numpy as np
40
24
  from typing import List, Callable, TYPE_CHECKING
41
25
  from enum import Enum
42
26
  from functools import wraps
43
27
  from time import sleep
44
28
  import sys
29
+
45
30
  from filelock import FileLock as Lock
31
+ import numpy as np
32
+ import pandas as pd
33
+ from skimage import io as skimage_io
34
+ from nibabel import Nifti1Image, GiftiImage, streamlines, freesurfer
35
+
36
+ from . import exceptions as _exceptions
37
+ from .cache import CACHE, cache_user_fn
38
+ from .. import __version__
39
+ from ..commons import (
40
+ logger,
41
+ HBP_AUTH_TOKEN,
42
+ KEYCLOAK_CLIENT_ID,
43
+ KEYCLOAK_CLIENT_SECRET,
44
+ siibra_tqdm,
45
+ SIIBRA_USE_LOCAL_SNAPSPOT,
46
+ )
47
+
46
48
  if TYPE_CHECKING:
47
49
  from .repositories import GitlabConnector
48
50
 
@@ -91,11 +93,11 @@ DECODERS = {
91
93
  }
92
94
 
93
95
 
94
- def find_suitiable_decoder(url: str) -> Callable:
96
+ def find_suitable_decoder(url: str) -> Callable:
95
97
  """
96
98
  By supplying a url or a filename, obtain a suitable decoder function
97
- for siibra to digest based on predifined DECODERS. An extra layer of
98
- gzip decompresser automatically added for gzipped files.
99
+ for siibra to digest based on predefined DECODERS. An extra layer of
100
+ gzip decompressor automatically added for gzipped files.
99
101
 
100
102
  Parameters
101
103
  ----------
@@ -108,7 +110,7 @@ def find_suitiable_decoder(url: str) -> Callable:
108
110
  """
109
111
  urlpath = urllib.parse.urlsplit(url).path
110
112
  if urlpath.endswith(".gz"):
111
- dec = find_suitiable_decoder(urlpath[:-3])
113
+ dec = find_suitable_decoder(urlpath[:-3])
112
114
  if dec is None:
113
115
  return lambda b: gzip.decompress(b)
114
116
  else:
@@ -183,7 +185,7 @@ class HttpRequest:
183
185
  ----------
184
186
  func : Callable, default: None
185
187
  """
186
- self.func = func or find_suitiable_decoder(self.url)
188
+ self.func = func or find_suitable_decoder(self.url)
187
189
 
188
190
  @property
189
191
  def cached(self):
@@ -193,7 +195,7 @@ class HttpRequest:
193
195
  """
194
196
  Populates the file cache with the data from http if required.
195
197
  noop if 1/ data is already cached and 2/ refresh flag not set
196
- The caller should load the cachefile after _retrieve successfuly executes
198
+ The caller should load the cachefile after _retrieve successfully executes
197
199
  """
198
200
  if self.cached and not self.refresh:
199
201
  return
@@ -277,7 +279,7 @@ class FileLoader(HttpRequest):
277
279
  def __init__(self, filepath, func=None):
278
280
  HttpRequest.__init__(
279
281
  self, filepath, refresh=False,
280
- func=func or find_suitiable_decoder(filepath)
282
+ func=func or find_suitable_decoder(filepath)
281
283
  )
282
284
  self.cachefile = filepath
283
285
 
@@ -291,7 +293,7 @@ class ZipfileRequest(HttpRequest):
291
293
  def __init__(self, url, filename, func=None, refresh=False):
292
294
  HttpRequest.__init__(
293
295
  self, url, refresh=refresh,
294
- func=func or find_suitiable_decoder(filename)
296
+ func=func or find_suitable_decoder(filename)
295
297
  )
296
298
  self.filename = filename
297
299
 
@@ -388,7 +390,7 @@ class EbrainsRequest(HttpRequest):
388
390
  ), # if explicitly enabled by env var, do not raise
389
391
  ]
390
392
  ):
391
- raise EbrainsAuthenticationError(
393
+ raise _exceptions.EbrainsAuthenticationError(
392
394
  "sys.stdout is not tty, SIIBRA_ENABLE_DEVICE_FLOW is not set,"
393
395
  "and not running in a notebook. Are you running in batch mode?"
394
396
  )
@@ -444,7 +446,7 @@ class EbrainsRequest(HttpRequest):
444
446
  f"exceeded max attempts: {cls._IAM_DEVICE_MAXTRIES}, aborting..."
445
447
  )
446
448
  logger.error(message)
447
- raise EbrainsAuthenticationError(message)
449
+ raise _exceptions.EbrainsAuthenticationError(message)
448
450
  attempt_number += 1
449
451
  resp = requests.post(
450
452
  url=cls._IAM_TOKEN_ENDPOINT,
@@ -457,9 +459,9 @@ class EbrainsRequest(HttpRequest):
457
459
 
458
460
  if resp.status_code == 200:
459
461
  json_resp = resp.json()
460
- logger.debug("Device flow sucessful:", json_resp)
462
+ logger.debug("Device flow successful:", json_resp)
461
463
  cls._KG_API_TOKEN = json_resp.get("access_token")
462
- print("ebrains token successfuly set.")
464
+ print("ebrains token successfully set.")
463
465
  break
464
466
 
465
467
  if resp.status_code == 400:
@@ -470,7 +472,7 @@ class EbrainsRequest(HttpRequest):
470
472
  logger.debug(f"400 error: {resp.content}")
471
473
  continue
472
474
 
473
- raise EbrainsAuthenticationError(resp.content)
475
+ raise _exceptions.EbrainsAuthenticationError(resp.content)
474
476
 
475
477
  @classmethod
476
478
  def set_token(cls, token):
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,11 +14,10 @@
14
14
  # limitations under the License.
15
15
  """Abbreviations and aliases."""
16
16
 
17
- from ..commons import InstanceTable
18
-
19
17
  import json
20
18
  from os import path
21
19
 
20
+ from ..commons import InstanceTable
22
21
 
23
22
  RT_DIR = path.dirname(__file__)
24
23
 
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,10 +14,11 @@
14
14
  # limitations under the License.
15
15
  """Package handling variety of volumes and volume operations"""
16
16
 
17
+ from typing import List, Union
18
+
19
+ import numpy as np
20
+
17
21
  from .parcellationmap import Map
18
22
  from .providers import provider
19
23
  from .volume import from_array, from_file, from_pointcloud, from_nifti, Volume
20
-
21
24
  from ..commons import logger
22
- from typing import List, Union
23
- import numpy as np
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,9 +14,18 @@
14
14
  # limitations under the License.
15
15
  """Provides spatial representations for parcellations and regions."""
16
16
 
17
+ from collections import defaultdict
18
+ from dataclasses import dataclass, asdict
19
+ from typing import Union, Dict, List, TYPE_CHECKING, Iterable, Tuple
20
+
21
+ import numpy as np
22
+ import pandas as pd
23
+ from scipy.ndimage import distance_transform_edt
24
+ from nilearn import image
25
+
17
26
  from . import volume as _volume
18
27
  from .providers import provider
19
- from .. import logger, QUIET, exceptions
28
+ from .. import exceptions
20
29
  from ..commons import (
21
30
  MapIndex,
22
31
  MapType,
@@ -29,19 +38,13 @@ from ..commons import (
29
38
  siibra_tqdm,
30
39
  Species,
31
40
  CompareMapsResult,
32
- generate_uuid
41
+ generate_uuid,
42
+ logger,
43
+ QUIET,
33
44
  )
34
45
  from ..core import concept, space, parcellation, region as _region
35
46
  from ..locations import location, point, pointcloud
36
47
 
37
- import numpy as np
38
- from typing import Union, Dict, List, TYPE_CHECKING, Iterable, Tuple
39
- from scipy.ndimage import distance_transform_edt
40
- from collections import defaultdict
41
- from nilearn import image
42
- import pandas as pd
43
- from dataclasses import dataclass, asdict
44
-
45
48
  if TYPE_CHECKING:
46
49
  from ..core.region import Region
47
50
 
@@ -503,7 +506,7 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
503
506
  taking the voxelwise maximum across the mapped volumes and fragments,
504
507
  and re-labelling regions sequentially.
505
508
 
506
- Paramaters
509
+ Parameters
507
510
  ----------
508
511
  **kwargs: Takes the fetch arguments of its space's template.
509
512
 
@@ -690,7 +693,7 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
690
693
  name=f"Custom colorization of {self}"
691
694
  )
692
695
 
693
- def get_colormap(self, region_specs: Iterable = None):
696
+ def get_colormap(self, region_specs: Iterable = None, *, fill_uncolored: bool = False):
694
697
  """
695
698
  Generate a matplotlib colormap from known rgb values of label indices.
696
699
 
@@ -698,13 +701,24 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
698
701
  ----------
699
702
  region_specs: iterable(regions), optional
700
703
  Optional parameter to only color the desired regions.
704
+ fill_uncolored: bool , optional
705
+ If a region has no preconfigured color, a color will be randomly (reproducible) created.
701
706
 
702
707
  Returns
703
708
  -------
704
709
  ListedColormap
705
710
  """
706
- from matplotlib.colors import ListedColormap
707
- import numpy as np
711
+ try:
712
+ from matplotlib.colors import ListedColormap
713
+ except ImportError as e:
714
+ logger.error(
715
+ "matplotlib not available. Please install matplotlib to create a matplotlib colormap."
716
+ )
717
+ raise e
718
+ if fill_uncolored:
719
+ seed = len(self.regions)
720
+ np.random.seed(seed)
721
+ logger.info(f"Random colors are allowed for regions without preconfgirued colors. Random seed: {seed}.")
708
722
 
709
723
  colors = {}
710
724
  if region_specs is not None:
@@ -714,6 +728,7 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
714
728
  else:
715
729
  include_region_names = None
716
730
 
731
+ no_predefined_color = []
717
732
  for regionname, indices in self._indices.items():
718
733
  for index in indices:
719
734
  if index.label is None:
@@ -725,14 +740,34 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
725
740
  region = self.get_region(index=index)
726
741
  if region.rgb is not None:
727
742
  colors[index.label] = region.rgb
743
+ elif fill_uncolored:
744
+ random_clr = [np.random.randint(0, 255) for r in range(3)]
745
+ while random_clr in list(colors.values()):
746
+ random_clr = [np.random.randint(0, 255) for r in range(3)]
747
+ colors[index.label] = random_clr
748
+ else:
749
+ no_predefined_color.append(region.name)
750
+
751
+ if len(colors) == 0:
752
+ raise exceptions.NoPredifinedColormapException(
753
+ f"There is no predefined/preconfigured colormap for '{self}'."
754
+ "Set `fill_uncolored=True` to get a reproducible colormap."
755
+ )
756
+
757
+ if no_predefined_color:
758
+ logger.info(
759
+ f"No preconfigured color found for the following regions."
760
+ "Use `fill_uncolored=True` to display with a non-background color.\n"
761
+ f"{no_predefined_color}"
762
+ )
728
763
 
729
- pallette = np.array(
764
+ palette = np.array(
730
765
  [
731
766
  list(colors[i]) + [1] if i in colors else [0, 0, 0, 0]
732
767
  for i in range(max(colors.keys()) + 1)
733
768
  ]
734
769
  ) / [255, 255, 255, 1]
735
- return ListedColormap(pallette)
770
+ return ListedColormap(palette)
736
771
 
737
772
  def sample_locations(self, regionspec, numpoints: int):
738
773
  """ Sample 3D locations inside a given region.
@@ -750,7 +785,7 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
750
785
  Returns
751
786
  -------
752
787
  PointCloud
753
- Sample points in physcial coordinates corresponding to this
788
+ Sample points in physical coordinates corresponding to this
754
789
  parcellationmap
755
790
  """
756
791
  index = self.get_index(regionspec)
@@ -923,7 +958,7 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
923
958
  if len(assignments) == 0:
924
959
  return pd.DataFrame(columns=columns)
925
960
  # determine the unique set of observed indices in order to do region lookups
926
- # only once for each map index occuring in the point list
961
+ # only once for each map index occurring in the point list
927
962
  labelled = self.is_labelled # avoid calling this in a loop
928
963
  observed_indices = { # unique set of observed map indices. NOTE: len(observed_indices) << len(assignments)
929
964
  (
@@ -1032,7 +1067,7 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
1032
1067
  assignments.append(
1033
1068
  MapAssignment(
1034
1069
  input_structure=pointindex,
1035
- centroid=tuple(np.array(position).round(2)),
1070
+ centroid=tuple(position),
1036
1071
  volume=vol,
1037
1072
  fragment=frag,
1038
1073
  map_value=value
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,12 +14,12 @@
14
14
  # limitations under the License.
15
15
  """Handles reading and preparing gii files."""
16
16
 
17
- from . import provider as _provider
18
-
19
- from ...retrieval.requests import HttpRequest, ZipfileRequest
17
+ from typing import Union, Dict, TYPE_CHECKING
20
18
 
21
19
  import numpy as np
22
- from typing import Union, Dict, TYPE_CHECKING
20
+
21
+ from . import provider as _provider
22
+ from ...retrieval.requests import HttpRequest, ZipfileRequest
23
23
 
24
24
  if TYPE_CHECKING:
25
25
  from ...locations import boundingbox as _boundingbox
@@ -54,7 +54,7 @@ class FreesurferAnnot(_provider.VolumeProvider, srctype="freesurfer-annot"):
54
54
  frag_labels[selected_label] = 1
55
55
  frag_labels[~selected_label] = 0
56
56
  else:
57
- frag_labels[frag_labels == -1] = 0 # annot files store backgorund as -1 while siibra uses 0
57
+ frag_labels[frag_labels == -1] = 0 # annot files store background as -1 while siibra uses 0
58
58
  vertex_labels.append(frag_labels)
59
59
 
60
60
  return {"labels": np.hstack(vertex_labels)}
@@ -98,7 +98,7 @@ class ZippedFreesurferAnnot(_provider.VolumeProvider, srctype="zip/freesurfer-an
98
98
  frag_labels[selected_label] = 1
99
99
  frag_labels[~selected_label] = 0
100
100
  else:
101
- frag_labels[frag_labels == -1] = 0 # annot files store backgorund as -1 while siibra uses 0
101
+ frag_labels[frag_labels == -1] = 0 # annot files store background as -1 while siibra uses 0
102
102
  vertex_labels.append(frag_labels)
103
103
 
104
104
  return {"labels": np.hstack(vertex_labels)}
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2024
1
+ # Copyright 2018-2025
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,15 +14,15 @@
14
14
  # limitations under the License.
15
15
  """Handles reading and preparing gii files."""
16
16
 
17
- from . import provider as _provider
17
+ from typing import Union, Dict
18
+
19
+ import numpy as np
18
20
 
21
+ from . import provider as _provider
19
22
  from ...retrieval import requests
20
23
  from ...commons import logger, merge_meshes
21
24
  from ...locations import boundingbox as _boundingbox
22
25
 
23
- import numpy as np
24
- from typing import Union, Dict
25
-
26
26
 
27
27
  class GiftiMesh(_provider.VolumeProvider, srctype="gii-mesh"):
28
28
  """