siibra 0.4a35__py3-none-any.whl → 0.4a46__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (35) hide show
  1. siibra/VERSION +1 -1
  2. siibra/__init__.py +1 -0
  3. siibra/commons.py +38 -17
  4. siibra/configuration/configuration.py +21 -17
  5. siibra/configuration/factory.py +38 -12
  6. siibra/core/atlas.py +11 -8
  7. siibra/core/concept.py +22 -2
  8. siibra/core/parcellation.py +42 -22
  9. siibra/core/region.py +56 -95
  10. siibra/features/anchor.py +7 -4
  11. siibra/features/connectivity/functional_connectivity.py +8 -1
  12. siibra/features/connectivity/regional_connectivity.py +14 -19
  13. siibra/features/dataset/ebrains.py +1 -1
  14. siibra/features/feature.py +193 -29
  15. siibra/features/image/__init__.py +1 -1
  16. siibra/features/image/image.py +1 -0
  17. siibra/features/image/volume_of_interest.py +14 -5
  18. siibra/features/tabular/__init__.py +2 -0
  19. siibra/features/tabular/regional_timeseries_activity.py +213 -0
  20. siibra/livequeries/ebrains.py +2 -3
  21. siibra/locations/location.py +4 -3
  22. siibra/locations/pointset.py +2 -2
  23. siibra/retrieval/datasets.py +73 -3
  24. siibra/retrieval/repositories.py +17 -6
  25. siibra/retrieval/requests.py +68 -61
  26. siibra/volumes/neuroglancer.py +9 -9
  27. siibra/volumes/nifti.py +4 -5
  28. siibra/volumes/parcellationmap.py +157 -97
  29. siibra/volumes/sparsemap.py +27 -31
  30. siibra/volumes/volume.py +1 -1
  31. {siibra-0.4a35.dist-info → siibra-0.4a46.dist-info}/METADATA +2 -1
  32. {siibra-0.4a35.dist-info → siibra-0.4a46.dist-info}/RECORD +35 -34
  33. {siibra-0.4a35.dist-info → siibra-0.4a46.dist-info}/WHEEL +1 -1
  34. {siibra-0.4a35.dist-info → siibra-0.4a46.dist-info}/LICENSE +0 -0
  35. {siibra-0.4a35.dist-info → siibra-0.4a46.dist-info}/top_level.txt +0 -0
siibra/VERSION CHANGED
@@ -1 +1 @@
1
- 0.4a35
1
+ 0.4a46
siibra/__init__.py CHANGED
@@ -119,6 +119,7 @@ def __dir__():
119
119
  "use_configuration",
120
120
  "extend_configuration",
121
121
  "get_region",
122
+ "find_regions",
122
123
  "get_map",
123
124
  "get_template",
124
125
  "MapType",
siibra/commons.py CHANGED
@@ -18,9 +18,11 @@ import re
18
18
  from enum import Enum
19
19
  from nibabel import Nifti1Image
20
20
  import logging
21
+ from tqdm import tqdm
21
22
  import numpy as np
22
23
  from typing import Generic, Iterable, Iterator, List, TypeVar, Union, Dict
23
24
  from skimage.filters import gaussian
25
+ from dataclasses import dataclass
24
26
 
25
27
  logger = logging.getLogger(__name__.split(os.path.extsep)[0])
26
28
  ch = logging.StreamHandler()
@@ -36,9 +38,20 @@ KEYCLOAK_CLIENT_SECRET = os.getenv("KEYCLOAK_CLIENT_SECRET")
36
38
  SIIBRA_CACHEDIR = os.getenv("SIIBRA_CACHEDIR")
37
39
  SIIBRA_LOG_LEVEL = os.getenv("SIIBRA_LOG_LEVEL", "INFO")
38
40
  SIIBRA_USE_CONFIGURATION = os.getenv("SIIBRA_USE_CONFIGURATION")
41
+
42
+ SIIBRA_USE_LOCAL_SNAPSPOT = os.getenv("SIIBRA_USE_LOCAL_SNAPSPOT")
43
+
39
44
  with open(os.path.join(ROOT_DIR, "VERSION"), "r") as fp:
40
45
  __version__ = fp.read().strip()
41
46
 
47
+ @dataclass
48
+ class CompareMapsResult:
49
+ intersection_over_union: float
50
+ intersection_over_first: float
51
+ intersection_over_second: float
52
+ correlation: float
53
+ weighted_mean_of_first: float
54
+ weighted_mean_of_second: float
42
55
 
43
56
  T = TypeVar("T")
44
57
 
@@ -233,6 +246,15 @@ QUIET = LoggingContext("ERROR")
233
246
  VERBOSE = LoggingContext("DEBUG")
234
247
 
235
248
 
249
+ def siibra_tqdm(iterable: Iterable[T]=None, *args, **kwargs):
250
+ return tqdm(
251
+ iterable,
252
+ *args,
253
+ disable=kwargs.pop("disable", False) or (logger.level > 20),
254
+ **kwargs
255
+ )
256
+
257
+
236
258
  def create_key(name: str):
237
259
  """
238
260
  Creates an uppercase identifier string that includes only alphanumeric
@@ -411,14 +433,14 @@ def compare_maps(map1: Nifti1Image, map2: Nifti1Image):
411
433
  m1, m2 = ((_ > 0).astype("uint8") for _ in [v1, v2])
412
434
  intersection = np.minimum(m1, m2).sum()
413
435
  if intersection == 0:
414
- return {
415
- "intersection over union": 0,
416
- "intersection over first": 0,
417
- "intersection over second": 0,
418
- "correlation": 0,
419
- "weighted mean of first": 0,
420
- "weighted mean of second": 0,
421
- }
436
+ return CompareMapsResult(
437
+ intersection_over_union=0,
438
+ intersection_over_first=0,
439
+ intersection_over_second=0,
440
+ correlation=0,
441
+ weighted_mean_of_first=0,
442
+ weighted_mean_of_second=0,
443
+ )
422
444
 
423
445
  # Compute the nonzero voxels in map1 with their correspondences in map2
424
446
  XYZnz1 = nonzero_coordinates(a1)
@@ -452,15 +474,14 @@ def compare_maps(map1: Nifti1Image, map2: Nifti1Image):
452
474
 
453
475
  bx = (x > 0).astype("uint8")
454
476
  by = (y > 0).astype("uint8")
455
-
456
- return {
457
- "intersection over union": intersection / np.maximum(bx, by).sum(),
458
- "intersection over first": intersection / N1,
459
- "intersection over second": intersection / N2,
460
- "correlation": r,
461
- "weighted mean of first": np.sum(x * y) / np.sum(y),
462
- "weighted mean of second": np.sum(x * y) / np.sum(x),
463
- }
477
+ return CompareMapsResult(
478
+ intersection_over_union=intersection / np.maximum(bx, by).sum(),
479
+ intersection_over_first=intersection / N1,
480
+ intersection_over_second=intersection / N2,
481
+ correlation=r,
482
+ weighted_mean_of_first=np.sum(x * y) / np.sum(y),
483
+ weighted_mean_of_second=np.sum(x * y) / np.sum(x),
484
+ )
464
485
 
465
486
 
466
487
  class PolyLine:
@@ -13,7 +13,7 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
- from ..commons import logger, __version__, SIIBRA_USE_CONFIGURATION
16
+ from ..commons import logger, __version__, SIIBRA_USE_CONFIGURATION, siibra_tqdm
17
17
  from ..retrieval.repositories import GitlabConnector, RepositoryConnector
18
18
  from ..retrieval.exceptions import NoSiibraConfigMirrorsAvailableException
19
19
  from ..retrieval.requests import SiibraHttpRequestError
@@ -21,7 +21,6 @@ from ..retrieval.requests import SiibraHttpRequestError
21
21
  from typing import Union
22
22
  from collections import defaultdict
23
23
  from requests.exceptions import ConnectionError
24
- from tqdm import tqdm
25
24
  from os import path
26
25
 
27
26
 
@@ -47,12 +46,7 @@ class Configuration:
47
46
 
48
47
  CONFIGURATION_EXTENSIONS = []
49
48
 
50
- # lists of loaders for json specification files
51
- # found in the siibra configuration, stored per
52
- # preconfigured class name. These files can
53
- # loaded and fed to the Factory.from_json
54
- # to produce the corresponding object.
55
- spec_loaders = defaultdict(list)
49
+
56
50
 
57
51
  _cleanup_funcs = []
58
52
 
@@ -65,6 +59,13 @@ class Configuration:
65
59
 
66
60
  def __init__(self):
67
61
 
62
+ # lists of loaders for json specification files
63
+ # found in the siibra configuration, stored per
64
+ # preconfigured class name. These files can
65
+ # loaded and fed to the Factory.from_json
66
+ # to produce the corresponding object.
67
+ self.spec_loaders = defaultdict(list)
68
+
68
69
  # retrieve json spec loaders from the default configuration
69
70
  for connector in self.CONFIGURATIONS:
70
71
  try:
@@ -122,11 +123,14 @@ class Configuration:
122
123
  conn = RepositoryConnector._from_url(conn)
123
124
  if not isinstance(conn, RepositoryConnector):
124
125
  raise RuntimeError("conn needs to be an instance of RepositoryConnector or a valid str")
125
- logger.info(f"Extending configuration with {str(conn)}")
126
- cls.CONFIGURATION_EXTENSIONS.append(conn)
127
- # call registered cleanup functions
128
- for func in cls._cleanup_funcs:
129
- func()
126
+ if conn in cls.CONFIGURATION_EXTENSIONS:
127
+ logger.warn(f"The configuration {str(conn)} is already registered.")
128
+ else:
129
+ logger.info(f"Extending configuration with {str(conn)}")
130
+ cls.CONFIGURATION_EXTENSIONS.append(conn)
131
+ # call registered cleanup functions
132
+ for func in cls._cleanup_funcs:
133
+ func()
130
134
 
131
135
  @classmethod
132
136
  def register_cleanup(cls, func):
@@ -143,7 +147,7 @@ class Configuration:
143
147
  result = []
144
148
 
145
149
  if folder not in self.folders:
146
- logger.warn(f"No configuration found for building from configuration folder {folder}.")
150
+ logger.warning(f"No configuration found for building from configuration folder {folder}.")
147
151
  return result
148
152
 
149
153
  from .factory import Factory
@@ -158,12 +162,12 @@ class Configuration:
158
162
  )
159
163
  )
160
164
 
161
- for fname, loader in tqdm(
165
+ for fname, loader in siibra_tqdm(
162
166
  specloaders,
163
167
  total=len(specloaders),
164
168
  desc=f"Loading preconfigured {obj0.__class__.__name__} instances"
165
169
  ):
166
- # filename is added to allow Factory creating reasonable default object identifiers
170
+ # filename is added to allow Factory creating reasonable default object identifiers\
167
171
  obj = Factory.from_json(dict(loader.data, **{'filename': fname}))
168
172
  result.append(obj)
169
173
 
@@ -174,5 +178,5 @@ class Configuration:
174
178
 
175
179
 
176
180
  if SIIBRA_USE_CONFIGURATION:
177
- logger.warn(f"config.SIIBRA_USE_CONFIGURATION defined, use configuration at {SIIBRA_USE_CONFIGURATION}")
181
+ logger.warning(f"config.SIIBRA_USE_CONFIGURATION defined, use configuration at {SIIBRA_USE_CONFIGURATION}")
178
182
  Configuration.use_configuration(SIIBRA_USE_CONFIGURATION)
@@ -19,7 +19,8 @@ from ..features.tabular import (
19
19
  receptor_density_profile,
20
20
  receptor_density_fingerprint,
21
21
  cell_density_profile,
22
- layerwise_cell_density
22
+ layerwise_cell_density,
23
+ regional_timeseries_activity
23
24
  )
24
25
  from ..features.image import sections, volume_of_interest
25
26
  from ..core import atlas, parcellation, space, region
@@ -54,6 +55,7 @@ BUILDFUNCS = {
54
55
  "siibra/feature/connectivitymatrix/v0.2": "build_connectivity_matrix",
55
56
  "siibra/feature/section/v0.1": "build_section",
56
57
  "siibra/feature/voi/v0.1": "build_volume_of_interest",
58
+ "siibra/feature/timeseries/activity/v0.1": "build_activity_timeseries"
57
59
  }
58
60
 
59
61
 
@@ -68,9 +70,10 @@ class Factory:
68
70
  result.append(
69
71
  datasets.EbrainsDataset(id=spec["ebrains"]["minds/core/dataset/v1.0.0"])
70
72
  )
71
- if "openminds/Dataset" in spec.get("ebrains", {}):
72
- # TODO add parser for ebrains kg v3 dataset. see EbrainsV3Dataset
73
- pass
73
+ if "openminds/DatasetVersion" in spec.get("ebrains", {}):
74
+ result.append(
75
+ datasets.EbrainsV3DatasetVersion(id=spec["ebrains"]["openminds/DatasetVersion"])
76
+ )
74
77
  return result
75
78
 
76
79
  @classmethod
@@ -79,7 +82,7 @@ class Factory:
79
82
  for vspec in volume_specs:
80
83
  if space_id:
81
84
  if 'space' in vspec:
82
- logger.warn(f"Replacing space spec {vspec['space']} in volume spec with {space_id}")
85
+ logger.warning(f"Replacing space spec {vspec['space']} in volume spec with {space_id}")
83
86
  vspec['space'] = {"@id": space_id}
84
87
  if name and vspec.get('name') is None: # only use provided name if the volume has no specific name
85
88
  vspec['name'] = name
@@ -142,7 +145,7 @@ class Factory:
142
145
  reftag=repospec['branch']
143
146
  )
144
147
  else:
145
- logger.warn(
148
+ logger.warning(
146
149
  "Do not know how to create a repository "
147
150
  f"connector from specification type {spectype}."
148
151
  )
@@ -243,7 +246,7 @@ class Factory:
243
246
  break
244
247
  else:
245
248
  if srctype not in cls._warnings_issued:
246
- logger.warn(f"No provider defined for volume Source type {srctype}")
249
+ logger.warning(f"No provider defined for volume Source type {srctype}")
247
250
  cls._warnings_issued.append(srctype)
248
251
 
249
252
  assert all([isinstance(provider, volume.VolumeProvider) for provider in providers])
@@ -275,7 +278,7 @@ class Factory:
275
278
  else:
276
279
  max_z = max(
277
280
  d.get('z', 0)
278
- for _, l in spec.get("indices", {}).items()
281
+ for l in spec.get("indices", {}).values()
279
282
  for d in l
280
283
  ) + 1
281
284
  if max_z > MIN_VOLUMES_FOR_SPARSE_MAP:
@@ -411,8 +414,12 @@ class Factory:
411
414
  return volume_of_interest.PLIVolumeOfInterest(
412
415
  modality="transmittance", **kwargs
413
416
  )
414
- elif modality == "segmentation":
415
- return volume_of_interest.SegmentedVolumeOfInterest(**kwargs)
417
+ elif modality == "XPCT":
418
+ return volume_of_interest.XPCTVolumeOfInterest(
419
+ modality="XPCT", **kwargs
420
+ )
421
+ # elif modality == "segmentation":
422
+ # return volume_of_interest.SegmentedVolumeOfInterest(**kwargs)
416
423
  elif modality == "T2 weighted MRI":
417
424
  return volume_of_interest.MRIVolumeOfInterest(
418
425
  modality="T2", **kwargs
@@ -421,8 +428,6 @@ class Factory:
421
428
  return volume_of_interest.MRIVolumeOfInterest(
422
429
  modality="T1", **kwargs
423
430
  )
424
- elif modality == "segmentation":
425
- return volume_of_interest.SegmentedVolumeOfInterest(**kwargs)
426
431
  else:
427
432
  raise ValueError(f"No method for building image section feature type {modality}.")
428
433
 
@@ -453,6 +458,27 @@ class Factory:
453
458
  else:
454
459
  raise ValueError(f"No method for building connectivity matrix of type {modality}.")
455
460
 
461
+ @classmethod
462
+ def build_activity_timeseries(cls, spec):
463
+ modality = spec["modality"]
464
+ kwargs = {
465
+ "cohort": spec["cohort"],
466
+ "modality": modality,
467
+ "regions": spec["regions"],
468
+ "connector": cls.extract_connector(spec),
469
+ "decode_func": cls.extract_decoder(spec),
470
+ "files": spec.get("files", {}),
471
+ "anchor": cls.extract_anchor(spec),
472
+ "description": spec.get("description", ""),
473
+ "datasets": cls.extract_datasets(spec),
474
+ "timestep": spec.get("timestep", ("1 no_unit"))
475
+ }
476
+ if modality == "Regional BOLD signal":
477
+ kwargs["paradigm"] = spec.get("paradigm", "")
478
+ return regional_timeseries_activity.RegionalBOLD(**kwargs)
479
+ else:
480
+ raise ValueError(f"No method for building signal table of type {modality}.")
481
+
456
482
  @classmethod
457
483
  def from_json(cls, spec: dict):
458
484
 
siibra/core/atlas.py CHANGED
@@ -184,17 +184,20 @@ class Atlas(concept.AtlasConcept, configuration_folder="atlases"):
184
184
 
185
185
  Parameters
186
186
  ----------
187
- regionspec : any of
188
- - a string with a possibly inexact name, which is matched both
189
- against the name and the identifier key,
190
- - an integer, which is interpreted as a labelindex
191
- - a region object
187
+ regionspec: str, regex, int, Region, MapIndex
188
+ - a string with a possibly inexact name (matched both against the name and the identifier key)
189
+ - a string in '/pattern/flags' format to use regex search (acceptable flags: aiLmsux)
190
+ - a regex applied to region names
191
+ - a Region object
192
192
  all_versions : Bool, default: False
193
193
  If True, matched regions for all versions of a parcellation are returned.
194
+ filter_children : bool, default: True
195
+ If False, children of matched parents will be returned.
194
196
 
195
- Yield
196
- -----
197
- list of matching regions
197
+ Returns
198
+ -------
199
+ list[Region]
200
+ list of regions matching to the regionspec
198
201
  """
199
202
  result = []
200
203
  for p in self._parcellation_ids:
siibra/core/concept.py CHANGED
@@ -85,9 +85,29 @@ class AtlasConcept:
85
85
  else Species.decode(species) # overwritable property implementation below
86
86
  self.shortname = shortname
87
87
  self.modality = modality
88
- self.description = description
89
- self.publications = publications
88
+ self._description = description
89
+ self._publications = publications
90
90
  self.datasets = datasets
91
+
92
+ @property
93
+ def description(self):
94
+ if self._description:
95
+ return self._description
96
+ for ds in self.datasets:
97
+ if ds.description:
98
+ return ds.description
99
+ return ''
100
+
101
+ @property
102
+ def publications(self) -> List[TypePublication]:
103
+ return [
104
+ *self._publications,
105
+ *[{
106
+ 'citation': 'DOI',
107
+ 'url': url.get("url")
108
+ } for ds in self.datasets
109
+ for url in ds.urls]
110
+ ]
91
111
 
92
112
  @property
93
113
  def species(self) -> Species:
@@ -137,7 +137,7 @@ class Parcellation(region.Region, configuration_folder="parcellations"):
137
137
  return True
138
138
  return super().matches(spec)
139
139
 
140
- def get_map(self, space=None, maptype: Union[str, MapType] = MapType.LABELLED):
140
+ def get_map(self, space=None, maptype: Union[str, MapType] = MapType.LABELLED, spec: str = ""):
141
141
  """
142
142
  Get the maps for the parcellation in the requested template space.
143
143
 
@@ -155,6 +155,11 @@ class Parcellation(region.Region, configuration_folder="parcellations"):
155
155
  Type of map requested (e.g., statistical or labelled).
156
156
  Use MapType.STATISTICAL to request probability maps.
157
157
  Defaults to MapType.LABELLED.
158
+ spec: str, optional
159
+ In case of multiple matching maps for the given parcellation, space
160
+ and type, use this field to specify keywords matching the desired
161
+ parcellation map name. Otherwise, siibra will default to the first
162
+ in the list of matches (and inform with a log message)
158
163
  Returns
159
164
  -------
160
165
  parcellationmap.Map or SparseMap
@@ -175,14 +180,29 @@ class Parcellation(region.Region, configuration_folder="parcellations"):
175
180
  logger.error(f"No {maptype} map in {space} available for {str(self)}")
176
181
  return None
177
182
  if len(candidates) > 1:
178
- logger.warning(f"Multiple {maptype} maps in {space} available for {str(self)}, choosing the first.")
183
+ spec_candidates = [
184
+ c for c in candidates if all(w.lower() in c.name.lower() for w in spec.split())
185
+ ]
186
+ if len(spec_candidates) == 0:
187
+ logger.warning(f"'{spec}' does not match any options from {[c.name for c in candidates]}.")
188
+ return None
189
+ if len(spec_candidates) > 1:
190
+ logger.warning(
191
+ f"Multiple maps are available in this specification of space, parcellation, and map type.\n"
192
+ f"Choosing the first map from {[c.name for c in spec_candidates]}."
193
+ )
194
+ return spec_candidates[0]
179
195
  return candidates[0]
180
196
 
181
- @classmethod
182
- def find_regions(cls, region_spec: str, parents_only=True):
197
+ @staticmethod
198
+ def find_regions(region_spec: str, parents_only=True):
183
199
  """
184
200
  Find regions that match the given region specification in the subtree
185
- headed by this region.
201
+ headed by each parcellation in the registry.
202
+ Note
203
+ ----
204
+ Use Region.find() to search for a region in an instance of a
205
+ parcellation.
186
206
 
187
207
  Parameters
188
208
  ----------
@@ -196,11 +216,11 @@ class Parcellation(region.Region, configuration_folder="parcellations"):
196
216
  List[Region]
197
217
  list of matching regions
198
218
  """
199
- MEM = cls._CACHED_REGION_SEARCHES
219
+ MEM = Parcellation._CACHED_REGION_SEARCHES
200
220
  if region_spec not in MEM:
201
221
  MEM[region_spec] = [
202
222
  r
203
- for p in cls.registry()
223
+ for p in Parcellation.registry()
204
224
  for r in p.find(regionspec=region_spec)
205
225
  ]
206
226
  if parents_only:
@@ -239,30 +259,25 @@ class Parcellation(region.Region, configuration_folder="parcellations"):
239
259
 
240
260
  def get_region(
241
261
  self,
242
- regionspec: Union[str, int, MapIndex, region.Region],
262
+ regionspec: Union[str, region.Region],
243
263
  find_topmost: bool = True,
244
264
  allow_tuple: bool = False
245
265
  ):
246
266
  """
247
267
  Given a unique specification, return the corresponding region.
248
268
 
249
- The spec could be a label index, a (possibly incomplete) name, or a
250
- region object. This method is meant to definitely determine a valid
251
- region. Therefore, if no match is found, it raises a ValueError.
252
- If multiple matches are found, the method tries to return only the
253
- common parent node. If there is no common parent, an exception is
254
- raised, except when allow_tuple=True - then a tuple of matched regions
255
- is returned.
269
+ The spec could be a (possibly incomplete) name, or a region object.
270
+ This method is meant to definitely determine a valid region. Therefore,
271
+ if no match is found, it raises a ValueError. If multiple matches are
272
+ found, the method tries to return only the common parent node. If there
273
+ is no common parent, an exception is raised, except when
274
+ allow_tuple=True - then a tuple of matched regions is returned.
256
275
 
257
276
  Parameters
258
277
  ----------
259
- regionspec: str, regex, int, Region, MapIndex
278
+ regionspec: str, Region
260
279
  - a string with a possibly inexact name (matched both against the name and the identifier key)
261
- - a string in '/pattern/flags' format to use regex search (acceptable flags: aiLmsux)
262
- - a regex applied to region names
263
- - an integer (interpreted as a labelindex)
264
280
  - a Region object
265
- - a full MapIndex object
266
281
  find_topmost: bool, default: True
267
282
  If True, will automatically return the parent of a decoded region
268
283
  the decoded region is its only child.
@@ -282,6 +297,7 @@ class Parcellation(region.Region, configuration_folder="parcellations"):
282
297
  ValueError
283
298
  If the spec cannot be matched against any region
284
299
  """
300
+ assert isinstance(regionspec, (str, region.Region)), f"get_region takes str or Region but you provided {type(regionspec)}"
285
301
  if isinstance(regionspec, region.Region) and (regionspec.parcellation == self):
286
302
  return regionspec
287
303
 
@@ -294,6 +310,10 @@ class Parcellation(region.Region, configuration_folder="parcellations"):
294
310
  else:
295
311
  candidates = self.find(regionspec, filter_children=True, find_topmost=find_topmost)
296
312
 
313
+ exact_matches = [r for r in candidates if regionspec == r]
314
+ if len(exact_matches) == 1:
315
+ return exact_matches[0]
316
+
297
317
  if len(candidates) > 1 and isinstance(regionspec, str):
298
318
  # if we have an exact match of words in one region, discard other candidates.
299
319
  querywords = {w.replace(',', '').lower() for w in regionspec.split()}
@@ -313,7 +333,7 @@ class Parcellation(region.Region, configuration_folder="parcellations"):
313
333
  if allow_tuple:
314
334
  return tuple(candidates)
315
335
  raise RuntimeError(
316
- f"Spec {regionspec} resulted in multiple matches: {', '.join(r.name for r in candidates)}."
336
+ f"Spec {regionspec!r} resulted in multiple matches: {', '.join(r.name for r in candidates)}."
317
337
  )
318
338
 
319
339
  def __str__(self):
@@ -333,7 +353,7 @@ class Parcellation(region.Region, configuration_folder="parcellations"):
333
353
  We sort parcellations by their version
334
354
  """
335
355
  if (self.version is None) or (other.version is None):
336
- logger.warn(
356
+ logger.warning(
337
357
  f"Sorting non-versioned instances of {self.__class__.__name__} "
338
358
  f"by name: {self.name}, {other.name}"
339
359
  )