siibra 0.5a2__py3-none-any.whl → 1.0.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (83) hide show
  1. siibra/VERSION +1 -1
  2. siibra/__init__.py +20 -12
  3. siibra/commons.py +145 -90
  4. siibra/configuration/__init__.py +1 -1
  5. siibra/configuration/configuration.py +22 -17
  6. siibra/configuration/factory.py +177 -128
  7. siibra/core/__init__.py +1 -8
  8. siibra/core/{relation_qualification.py → assignment.py} +17 -14
  9. siibra/core/atlas.py +66 -35
  10. siibra/core/concept.py +81 -39
  11. siibra/core/parcellation.py +83 -67
  12. siibra/core/region.py +569 -263
  13. siibra/core/space.py +7 -39
  14. siibra/core/structure.py +111 -0
  15. siibra/exceptions.py +63 -0
  16. siibra/experimental/__init__.py +19 -0
  17. siibra/experimental/contour.py +61 -0
  18. siibra/experimental/cortical_profile_sampler.py +57 -0
  19. siibra/experimental/patch.py +98 -0
  20. siibra/experimental/plane3d.py +256 -0
  21. siibra/explorer/__init__.py +16 -0
  22. siibra/explorer/url.py +112 -52
  23. siibra/explorer/util.py +31 -9
  24. siibra/features/__init__.py +73 -8
  25. siibra/features/anchor.py +75 -196
  26. siibra/features/connectivity/__init__.py +1 -1
  27. siibra/features/connectivity/functional_connectivity.py +2 -2
  28. siibra/features/connectivity/regional_connectivity.py +99 -10
  29. siibra/features/connectivity/streamline_counts.py +1 -1
  30. siibra/features/connectivity/streamline_lengths.py +1 -1
  31. siibra/features/connectivity/tracing_connectivity.py +1 -1
  32. siibra/features/dataset/__init__.py +1 -1
  33. siibra/features/dataset/ebrains.py +3 -3
  34. siibra/features/feature.py +219 -110
  35. siibra/features/image/__init__.py +1 -1
  36. siibra/features/image/image.py +21 -13
  37. siibra/features/image/sections.py +1 -1
  38. siibra/features/image/volume_of_interest.py +1 -1
  39. siibra/features/tabular/__init__.py +1 -1
  40. siibra/features/tabular/bigbrain_intensity_profile.py +24 -13
  41. siibra/features/tabular/cell_density_profile.py +111 -69
  42. siibra/features/tabular/cortical_profile.py +82 -16
  43. siibra/features/tabular/gene_expression.py +117 -6
  44. siibra/features/tabular/layerwise_bigbrain_intensities.py +7 -9
  45. siibra/features/tabular/layerwise_cell_density.py +9 -24
  46. siibra/features/tabular/receptor_density_fingerprint.py +11 -6
  47. siibra/features/tabular/receptor_density_profile.py +12 -15
  48. siibra/features/tabular/regional_timeseries_activity.py +74 -18
  49. siibra/features/tabular/tabular.py +17 -8
  50. siibra/livequeries/__init__.py +1 -7
  51. siibra/livequeries/allen.py +139 -77
  52. siibra/livequeries/bigbrain.py +104 -128
  53. siibra/livequeries/ebrains.py +7 -4
  54. siibra/livequeries/query.py +1 -2
  55. siibra/locations/__init__.py +32 -25
  56. siibra/locations/boundingbox.py +153 -127
  57. siibra/locations/location.py +45 -80
  58. siibra/locations/point.py +97 -83
  59. siibra/locations/pointcloud.py +349 -0
  60. siibra/retrieval/__init__.py +1 -1
  61. siibra/retrieval/cache.py +107 -13
  62. siibra/retrieval/datasets.py +9 -14
  63. siibra/retrieval/exceptions/__init__.py +2 -1
  64. siibra/retrieval/repositories.py +147 -53
  65. siibra/retrieval/requests.py +64 -29
  66. siibra/vocabularies/__init__.py +2 -2
  67. siibra/volumes/__init__.py +7 -9
  68. siibra/volumes/parcellationmap.py +396 -253
  69. siibra/volumes/providers/__init__.py +20 -0
  70. siibra/volumes/providers/freesurfer.py +113 -0
  71. siibra/volumes/{gifti.py → providers/gifti.py} +29 -18
  72. siibra/volumes/{neuroglancer.py → providers/neuroglancer.py} +204 -92
  73. siibra/volumes/{nifti.py → providers/nifti.py} +64 -44
  74. siibra/volumes/providers/provider.py +107 -0
  75. siibra/volumes/sparsemap.py +159 -260
  76. siibra/volumes/volume.py +720 -152
  77. {siibra-0.5a2.dist-info → siibra-1.0.0a1.dist-info}/METADATA +25 -28
  78. siibra-1.0.0a1.dist-info/RECORD +84 -0
  79. {siibra-0.5a2.dist-info → siibra-1.0.0a1.dist-info}/WHEEL +1 -1
  80. siibra/locations/pointset.py +0 -198
  81. siibra-0.5a2.dist-info/RECORD +0 -74
  82. {siibra-0.5a2.dist-info → siibra-1.0.0a1.dist-info}/LICENSE +0 -0
  83. {siibra-0.5a2.dist-info → siibra-1.0.0a1.dist-info}/top_level.txt +0 -0
siibra/VERSION CHANGED
@@ -1 +1 @@
1
- 0.5a02
1
+ 1.0.0-alpha.1
siibra/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2021
1
+ # Copyright 2018-2024
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -33,10 +33,13 @@ from .retrieval.requests import (
33
33
  EbrainsRequest as _EbrainsRequest,
34
34
  CACHE as cache
35
35
  )
36
+ from .retrieval.cache import Warmup, WarmupLevel
37
+
36
38
  from . import configuration
39
+ from . import experimental
37
40
  from .configuration import factory
38
41
  from . import features, livequeries
39
- from siibra.locations import Point, PointSet
42
+ from siibra.locations import Point, PointCloud
40
43
 
41
44
  import os as _os
42
45
  logger.info(f"Version: {__version__}")
@@ -48,7 +51,7 @@ logger.info(
48
51
  # forward access to some functions
49
52
  set_ebrains_token = _EbrainsRequest.set_token
50
53
  fetch_ebrains_token = _EbrainsRequest.fetch_token
51
- find_regions = _parcellation.Parcellation.find_regions
54
+ find_regions = _parcellation.find_regions
52
55
  from_json = factory.Factory.from_json
53
56
 
54
57
 
@@ -104,6 +107,14 @@ def set_feasible_download_size(maxsize_gbyte):
104
107
 
105
108
 
106
109
  def set_cache_size(maxsize_gbyte: int):
110
+ """
111
+ siibra runs maintainance on its local cache to keep it under a predetermined
112
+ size of 2 gigabytes. This method changes the cache size.
113
+
114
+ Parameters
115
+ ----------
116
+ maxsize_gbyte : int
117
+ """
107
118
  assert maxsize_gbyte >= 0
108
119
  cache.SIZE_GIB = maxsize_gbyte
109
120
  logger.info(f"Set cache size to {maxsize_gbyte} GiB.")
@@ -113,7 +124,7 @@ if "SIIBRA_CACHE_SIZE_GIB" in _os.environ:
113
124
  set_cache_size(float(_os.environ.get("SIIBRA_CACHE_SIZE_GIB")))
114
125
 
115
126
 
116
- def warm_cache():
127
+ def warm_cache(level=WarmupLevel.INSTANCE):
117
128
  """
118
129
  Preload preconfigured siibra concepts.
119
130
 
@@ -122,12 +133,7 @@ def warm_cache():
122
133
  features. By preloading the instances, siibra commits all preconfigurations
123
134
  to the memory at once instead of commiting them when required.
124
135
  """
125
- _ = _atlas.Atlas.registry()
126
- _ = _space.Space.registry()
127
- _ = _parcellation.Parcellation.registry()
128
- _ = _parcellationmap.Map.registry()
129
- features.warm_cache()
130
- livequeries.warm_cache()
136
+ Warmup.warmup(level)
131
137
 
132
138
 
133
139
  def __dir__():
@@ -144,7 +150,7 @@ def __dir__():
144
150
  "get_template",
145
151
  "MapType",
146
152
  "Point",
147
- "PointSet",
153
+ "PointCloud",
148
154
  "QUIET",
149
155
  "VERBOSE",
150
156
  "fetch_ebrains_token",
@@ -152,5 +158,7 @@ def __dir__():
152
158
  "vocabularies",
153
159
  "__version__",
154
160
  "cache",
155
- "warm_cache"
161
+ "warm_cache",
162
+ "set_cache_size",
163
+ "from_json",
156
164
  ]
siibra/commons.py CHANGED
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2021
1
+ # Copyright 2018-2024
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -18,19 +18,24 @@ import os
18
18
  import re
19
19
  from enum import Enum
20
20
  from nibabel import Nifti1Image
21
+ from nilearn.image import resample_to_img
21
22
  import logging
22
23
  from tqdm import tqdm
23
24
  import numpy as np
24
25
  import pandas as pd
25
- from typing import Generic, Iterable, Iterator, List, TypeVar, Union, Dict
26
+ from typing import Generic, Iterable, Iterator, List, TypeVar, Union, Dict, Generator, Tuple
26
27
  from skimage.filters import gaussian
27
28
  from dataclasses import dataclass
29
+ from hashlib import md5
30
+ from uuid import UUID
31
+ import math
28
32
  try:
29
33
  from typing import TypedDict
30
34
  except ImportError:
31
35
  # support python 3.7
32
36
  from typing_extensions import TypedDict
33
37
 
38
+ logging.addLevelName(21, "INFO_WO_PROGRESS_BARS")
34
39
  logger = logging.getLogger(__name__.split(os.path.extsep)[0])
35
40
  ch = logging.StreamHandler()
36
41
  formatter = logging.Formatter("[{name}:{levelname}] {message}", style="{")
@@ -47,7 +52,7 @@ SIIBRA_LOG_LEVEL = os.getenv("SIIBRA_LOG_LEVEL", "INFO")
47
52
  SIIBRA_USE_CONFIGURATION = os.getenv("SIIBRA_USE_CONFIGURATION")
48
53
  SIIBRA_USE_LOCAL_SNAPSPOT = os.getenv("SIIBRA_USE_LOCAL_SNAPSPOT")
49
54
  SKIP_CACHEINIT_MAINTENANCE = os.getenv("SKIP_CACHEINIT_MAINTENANCE")
50
- NEUROGLANCER_MAX_GIB = os.getenv("NEUROGLANCER_MAX_GIB", 0.2)
55
+ SIIBRA_MAX_FETCH_SIZE_GIB = float(os.getenv("SIIBRA_MAX_FETCH_SIZE_GIB", 0.2))
51
56
 
52
57
  with open(os.path.join(ROOT_DIR, "VERSION"), "r") as fp:
53
58
  __version__ = fp.read().strip()
@@ -99,9 +104,11 @@ class InstanceTable(Generic[T], Iterable):
99
104
  self._dataframe_cached = None
100
105
 
101
106
  def add(self, key: str, value: T) -> None:
102
- """Add a key/value pair to the registry.
107
+ """
108
+ Add a key/value pair to the registry.
103
109
 
104
- Args:
110
+ Parameters
111
+ ----------
105
112
  key (string): Unique name or key of the object
106
113
  value (object): The registered object
107
114
  """
@@ -124,6 +131,9 @@ class InstanceTable(Generic[T], Iterable):
124
131
  else:
125
132
  return f"Empty {self.__class__.__name__}"
126
133
 
134
+ def __repr__(self):
135
+ return f"<{self.__class__.__name__} of {self[0].__class__}>"
136
+
127
137
  def __iter__(self) -> Iterator[T]:
128
138
  """Iterate over all objects in the registry"""
129
139
  return (w for w in self._elements.values())
@@ -147,10 +157,13 @@ class InstanceTable(Generic[T], Iterable):
147
157
  the first in sorted order is returned. If the specification does not match,
148
158
  a RuntimeError is raised.
149
159
 
150
- Args:
151
- spec [int or str]: Index or string specification of an object
160
+ Parameters
161
+ ----------
162
+ spec: int, str
163
+ Index or string specification of an object
152
164
 
153
- Returns:
165
+ Returns
166
+ -------
154
167
  Matched object
155
168
  """
156
169
  if spec is None:
@@ -159,10 +172,9 @@ class InstanceTable(Generic[T], Iterable):
159
172
  raise IndexError(f"{__class__.__name__} indexed with empty string")
160
173
  matches = self.find(spec)
161
174
  if len(matches) == 0:
162
- print(str(self))
163
175
  raise IndexError(
164
- f"{__class__.__name__} has no entry matching the specification '{spec}'.\n"
165
- f"Possible values are: " + ", ".join(self._elements.keys())
176
+ f"{__class__.__name__} has no entry matching the specification '{spec}'."
177
+ f"Possible values are:\n" + str(self)
166
178
  )
167
179
  elif len(matches) == 1:
168
180
  return matches[0]
@@ -286,7 +298,7 @@ def siibra_tqdm(iterable: Iterable[T] = None, *args, **kwargs):
286
298
  return tqdm(
287
299
  iterable,
288
300
  *args,
289
- disable=kwargs.pop("disable", False) or (logger.level > 20),
301
+ disable=kwargs.pop("disable", False) or (logger.level > logging.INFO),
290
302
  **kwargs
291
303
  )
292
304
 
@@ -334,10 +346,11 @@ class MapIndex:
334
346
  return f"(volume:{self.volume}, label:{self.label}, fragment:{self.fragment})"
335
347
 
336
348
  def __repr__(self):
337
- return f"{self.__class__.__name__}{str(self)}"
349
+ frag = f"'{self.fragment}'" if self.fragment else self.fragment
350
+ return f"<{self.__class__.__name__}(volume={self.volume}, label={self.label}, fragment={frag})>"
338
351
 
339
352
  def __eq__(self, other):
340
- assert isinstance(other, self.__class__)
353
+ assert isinstance(other, self.__class__), f'Cannot compare {self.__class__} and {other.__class__}'
341
354
  return all([
342
355
  self.volume == other.volume,
343
356
  self.label == other.label,
@@ -353,9 +366,6 @@ class MapType(Enum):
353
366
  STATISTICAL = 2
354
367
 
355
368
 
356
- SIIBRA_DEFAULT_MAPTYPE = MapType.LABELLED
357
- SIIBRA_DEFAULT_MAP_THRESHOLD = None
358
-
359
369
  REMOVE_FROM_NAME = [
360
370
  "hemisphere",
361
371
  " -",
@@ -392,10 +402,8 @@ NZCACHE = {}
392
402
 
393
403
 
394
404
  def nonzero_coordinates(arr):
395
- obj_id = id(arr)
396
- if obj_id not in NZCACHE:
397
- NZCACHE[obj_id] = np.c_[np.nonzero(arr > 0)]
398
- return NZCACHE[obj_id]
405
+ # TODO: fix caching
406
+ return np.c_[np.nonzero(arr > 0)]
399
407
 
400
408
 
401
409
  def affine_scaling(affine):
@@ -408,34 +416,18 @@ def affine_scaling(affine):
408
416
  return np.prod(unit_lengths)
409
417
 
410
418
 
411
- def iterate_connected_components(img: Nifti1Image):
412
- """
413
- Provide an iterator over masks of connected components in the given image.
414
- """
415
- from skimage import measure
416
- imgdata = np.asanyarray(img.dataobj).squeeze()
417
- components = measure.label(imgdata > 0)
418
- component_labels = np.unique(components)
419
- assert component_labels[0] == 0
420
- return (
421
- (label, Nifti1Image((components == label).astype('uint8'), img.affine))
422
- for label in component_labels[1:]
423
- )
424
-
425
-
426
- def compare_maps(map1: Nifti1Image, map2: Nifti1Image):
419
+ def compare_arrays(arr1: np.ndarray, affine1: np.ndarray, arr2: np.ndarray, affine2: np.ndarray):
427
420
  """
428
- Compare two maps, given as Nifti1Image objects.
429
- This function exploits that nibabel's get_fdata() caches the numerical arrays,
430
- so we can use the object id to cache extraction of the nonzero coordinates.
421
+ Compare two arrays in physical space as defined by the given affine matrices.
422
+ Matrices map voxel coordinates to physical coordinates.
423
+ This function uses the object id to cache extraction of the nonzero coordinates.
431
424
  Repeated calls involving the same map will therefore be much faster as they
432
425
  will only access the image array if overlapping pixels are detected.
433
426
 
434
427
  It is recommended to install the indexed-gzip package,
435
428
  which will further speed this up.
436
429
  """
437
-
438
- a1, a2 = [m.get_fdata().squeeze() for m in (map1, map2)]
430
+ a1, a2 = arr1.squeeze(), arr2.squeeze()
439
431
 
440
432
  def homog(XYZ):
441
433
  return np.c_[XYZ, np.ones(XYZ.shape[0])]
@@ -446,7 +438,7 @@ def compare_maps(map1: Nifti1Image, map2: Nifti1Image):
446
438
  # Compute the nonzero voxels in map2 and their correspondences in map1
447
439
  XYZnz2 = nonzero_coordinates(a2)
448
440
  N2 = XYZnz2.shape[0]
449
- warp2on1 = np.dot(np.linalg.inv(map1.affine), map2.affine)
441
+ warp2on1 = np.dot(np.linalg.inv(affine1), affine2)
450
442
  XYZnz2on1 = (np.dot(warp2on1, homog(XYZnz2).T).T[:, :3] + 0.5).astype("int")
451
443
 
452
444
  # valid voxel pairs
@@ -454,9 +446,9 @@ def compare_maps(map1: Nifti1Image, map2: Nifti1Image):
454
446
  np.logical_and.reduce(
455
447
  [
456
448
  XYZnz2on1 >= 0,
457
- XYZnz2on1 < map1.shape[:3],
449
+ XYZnz2on1 < arr1.shape[:3],
458
450
  XYZnz2 >= 0,
459
- XYZnz2 < map2.shape[:3],
451
+ XYZnz2 < arr2.shape[:3],
460
452
  ]
461
453
  ),
462
454
  1,
@@ -481,14 +473,14 @@ def compare_maps(map1: Nifti1Image, map2: Nifti1Image):
481
473
  # Compute the nonzero voxels in map1 with their correspondences in map2
482
474
  XYZnz1 = nonzero_coordinates(a1)
483
475
  N1 = XYZnz1.shape[0]
484
- warp1on2 = np.dot(np.linalg.inv(map2.affine), map1.affine)
476
+ warp1on2 = np.dot(np.linalg.inv(affine2), affine1)
485
477
 
486
478
  # Voxels referring to the union of the nonzero pixels in both maps
487
479
  XYZa1 = np.unique(np.concatenate((XYZnz1, XYZnz2on1)), axis=0)
488
480
  XYZa2 = (np.dot(warp1on2, homog(XYZa1).T).T[:, :3] + 0.5).astype("int")
489
481
  valid = np.all(
490
482
  np.logical_and.reduce(
491
- [XYZa1 >= 0, XYZa1 < map1.shape[:3], XYZa2 >= 0, XYZa2 < map2.shape[:3]]
483
+ [XYZa1 >= 0, XYZa1 < arr1.shape[:3], XYZa2 >= 0, XYZa2 < arr2.shape[:3]]
492
484
  ),
493
485
  1,
494
486
  )
@@ -520,47 +512,75 @@ def compare_maps(map1: Nifti1Image, map2: Nifti1Image):
520
512
  )
521
513
 
522
514
 
523
- class PolyLine:
524
- """Simple polyline representation which allows equidistant sampling.."""
525
-
526
- def __init__(self, pts):
527
- self.pts = pts
528
- self.lengths = [
529
- np.sqrt(np.sum((pts[i, :] - pts[i - 1, :]) ** 2))
530
- for i in range(1, pts.shape[0])
531
- ]
515
+ def resample_img_to_img(
516
+ source_img: Nifti1Image,
517
+ target_img: Nifti1Image,
518
+ interpolation: str = ""
519
+ ) -> Nifti1Image:
520
+ """
521
+ Resamples to source image to match the target image according to target's
522
+ affine. (A wrapper of `nilearn.image.resample_to_img`.)
523
+
524
+ Parameters
525
+ ----------
526
+ source_img : Nifti1Image
527
+ target_img : Nifti1Image
528
+ interpolation : str, Default: "nearest" if the source image is a mask otherwise "linear".
529
+ Can be 'continuous', 'linear', or 'nearest'. Indicates the resample method.
530
+
531
+ Returns
532
+ -------
533
+ Nifti1Image
534
+ """
535
+ interpolation = "nearest" if np.array_equal(np.unique(source_img.dataobj), [0, 1]) else "linear"
536
+ resampled_img = resample_to_img(
537
+ source_img=source_img,
538
+ target_img=target_img,
539
+ interpolation=interpolation
540
+ )
541
+ return resampled_img
532
542
 
533
- def length(self):
534
- return sum(self.lengths)
535
543
 
536
- def sample(self, d):
544
+ def connected_components(
545
+ imgdata: np.ndarray,
546
+ background: int = 0,
547
+ connectivity: int = 2,
548
+ threshold: float = 0.0,
549
+ ) -> Generator[Tuple[int, np.ndarray], None, None]:
550
+ """
551
+ Provide an iterator over connected components in the array. If the image
552
+ data is float (such as probability maps), it will convert to a mask and
553
+ then find the connected components.
554
+
555
+ Note
556
+ ----
557
+ `Uses skimage.measure.label()` to determine foreground compenents.
558
+
559
+ Parameters
560
+ ----------
561
+ imgdata : np.ndarray
562
+ background_value : int, Default: 0
563
+ connectivity : int, Default: 2
564
+ threshold: float, Default: 0.0
565
+ The threshold used to create mask from probability maps, i.e, anything
566
+ below set to 0 and rest to 1.
567
+
568
+ Yields
569
+ ------
570
+ Generator[Tuple[int, np.ndarray], None, None]
571
+ tuple of integer label of the component and component as an nd.array in
572
+ the shape of the original image.
573
+ """
574
+ from skimage import measure
537
575
 
538
- # if d is interable, we assume a list of sample positions
539
- try:
540
- iter(d)
541
- except TypeError:
542
- positions = [d]
543
- else:
544
- positions = d
545
-
546
- samples = []
547
- for s_ in positions:
548
- s = min(max(s_, 0), 1)
549
- target_distance = s * self.length()
550
- current_distance = 0
551
- for i, length in enumerate(self.lengths):
552
- current_distance += length
553
- if current_distance >= target_distance:
554
- p1 = self.pts[i, :]
555
- p2 = self.pts[i + 1, :]
556
- r = (target_distance - current_distance + length) / length
557
- samples.append(p1 + (p2 - p1) * r)
558
- break
559
-
560
- if len(samples) == 1:
561
- return samples[0]
562
- else:
563
- return np.array(samples)
576
+ mask = (imgdata > threshold).astype('uint8')
577
+ components = measure.label(mask, connectivity=connectivity, background=background)
578
+ component_labels = np.unique(components)
579
+ return (
580
+ (label, (components == label).astype('uint8'))
581
+ for label in component_labels
582
+ if label > 0
583
+ )
564
584
 
565
585
 
566
586
  def unify_stringlist(L: list):
@@ -606,11 +626,15 @@ def MI(arr1, arr2, nbins=100, normalized=True):
606
626
  """
607
627
  Compute the mutual information between two 3D arrays, which need to have the same shape.
608
628
 
609
- Parameters:
610
- arr1 : First 3D array
611
- arr2 : Second 3D array
612
- nbins : number of bins to use for computing the joint histogram (applies to intensity range)
613
- normalized : Boolean, default:True
629
+ Parameters
630
+ ----------
631
+ arr1: np.ndarray
632
+ First 3D array
633
+ arr2: np.ndarray
634
+ Second 3D array
635
+ nbins: int
636
+ number of bins to use for computing the joint histogram (applies to intensity range)
637
+ normalized: Boolean. Default: True
614
638
  if True, the normalized MI of arrays X and Y will be returned,
615
639
  leading to a range of values between 0 and 1. Normalization is
616
640
  achieved by NMI = 2*MI(X,Y) / (H(X) + H(Y)), where H(x) is the entropy of X
@@ -766,3 +790,34 @@ class Species(Enum):
766
790
 
767
791
  def __repr__(self):
768
792
  return f"{self.__class__.__name__}: {str(self)}"
793
+
794
+
795
+ def generate_uuid(string: str):
796
+ if isinstance(string, str):
797
+ b = string.encode("UTF-8")
798
+ elif isinstance(string, Nifti1Image):
799
+ b = string.to_bytes()
800
+ else:
801
+ raise ValueError(f"Cannot build uuid for parameter type {type(string)}")
802
+ hex_string = md5(b).hexdigest()
803
+ return str(UUID(hex=hex_string))
804
+
805
+
806
+ def translation_matrix(tx: float, ty: float, tz: float):
807
+ """Construct a 3D homoegneous translation matrix."""
808
+ return np.array([
809
+ [1, 0, 0, tx],
810
+ [0, 1, 0, ty],
811
+ [0, 0, 1, tz],
812
+ [0, 0, 0, 1]
813
+ ])
814
+
815
+
816
+ def y_rotation_matrix(alpha: float):
817
+ """Construct a 3D y axis rotation matrix."""
818
+ return np.array([
819
+ [math.cos(alpha), 0, math.sin(alpha), 0],
820
+ [0, 1, 0, 0],
821
+ [-math.sin(alpha), 0, math.cos(alpha), 0],
822
+ [0, 0, 0, 1]
823
+ ])
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2021
1
+ # Copyright 2018-2024
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2021
1
+ # Copyright 2018-2024
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,11 +14,11 @@
14
14
  # limitations under the License.
15
15
 
16
16
  from ..commons import logger, __version__, SIIBRA_USE_CONFIGURATION, siibra_tqdm
17
- from ..retrieval.repositories import GitlabConnector, RepositoryConnector
17
+ from ..retrieval.repositories import GitlabConnector, RepositoryConnector, GithubConnector
18
18
  from ..retrieval.exceptions import NoSiibraConfigMirrorsAvailableException
19
19
  from ..retrieval.requests import SiibraHttpRequestError
20
20
 
21
- from typing import Union
21
+ from typing import Union, List
22
22
  from collections import defaultdict
23
23
  from requests.exceptions import ConnectionError
24
24
  from os import path
@@ -35,16 +35,20 @@ class Configuration:
35
35
  """
36
36
 
37
37
  CONFIG_REPOS = [
38
- ("https://jugit.fz-juelich.de", 3484),
39
- ("https://gitlab.ebrains.eu", 93),
38
+ (GithubConnector, "FZJ-INM1-BDA", "siibra-configurations"),
39
+ (GitlabConnector, "https://gitlab.ebrains.eu", 892)
40
40
  ]
41
-
42
- CONFIGURATIONS = [
43
- GitlabConnector(server, project, "siibra-{}".format(__version__), skip_branchtest=True)
44
- for server, project in CONFIG_REPOS
41
+ CONFIG_CONNECTORS: List[RepositoryConnector] = [
42
+ conn(
43
+ server_or_owner,
44
+ project_or_repo,
45
+ reftag="siibra-{}".format(__version__),
46
+ skip_branchtest=True
47
+ )
48
+ for conn, server_or_owner, project_or_repo in CONFIG_REPOS
45
49
  ]
46
50
 
47
- CONFIGURATION_EXTENSIONS = []
51
+ CONFIG_EXTENSIONS = []
48
52
 
49
53
  _cleanup_funcs = []
50
54
 
@@ -65,7 +69,7 @@ class Configuration:
65
69
  self.spec_loaders = defaultdict(list)
66
70
 
67
71
  # retrieve json spec loaders from the default configuration
68
- for connector in self.CONFIGURATIONS:
72
+ for connector in self.CONFIG_CONNECTORS:
69
73
  try:
70
74
  for folder in self.get_folders(connector):
71
75
  loaders = connector.get_loaders(folder, suffix='.json')
@@ -74,7 +78,7 @@ class Configuration:
74
78
  break
75
79
  except (ConnectionError, SiibraHttpRequestError):
76
80
  logger.error(f"Cannot load configuration from {str(connector)}")
77
- *_, last = self.CONFIGURATIONS
81
+ *_, last = self.CONFIG_CONNECTORS
78
82
  if connector is last:
79
83
  raise NoSiibraConfigMirrorsAvailableException(
80
84
  "Tried all mirrors, none available."
@@ -83,7 +87,7 @@ class Configuration:
83
87
  raise RuntimeError("Cannot pull any default siibra configuration.")
84
88
 
85
89
  # add additional spec loaders from extension configurations
86
- for connector in self.CONFIGURATION_EXTENSIONS:
90
+ for connector in self.CONFIG_EXTENSIONS:
87
91
  try:
88
92
  for folder in self.get_folders(connector):
89
93
  self.spec_loaders[folder].extend(
@@ -110,7 +114,7 @@ class Configuration:
110
114
  if not isinstance(conn, RepositoryConnector):
111
115
  raise RuntimeError("Configuration needs to be an instance of RepositoryConnector or a valid str")
112
116
  logger.info(f"Using custom configuration from {str(conn)}")
113
- cls.CONFIGURATIONS = [conn]
117
+ cls.CONFIG_CONNECTORS = [conn]
114
118
  # call registered cleanup functions
115
119
  for func in cls._cleanup_funcs:
116
120
  func()
@@ -121,11 +125,11 @@ class Configuration:
121
125
  conn = RepositoryConnector._from_url(conn)
122
126
  if not isinstance(conn, RepositoryConnector):
123
127
  raise RuntimeError("conn needs to be an instance of RepositoryConnector or a valid str")
124
- if conn in cls.CONFIGURATION_EXTENSIONS:
128
+ if conn in cls.CONFIG_EXTENSIONS:
125
129
  logger.warning(f"The configuration {str(conn)} is already registered.")
126
130
  else:
127
131
  logger.info(f"Extending configuration with {str(conn)}")
128
- cls.CONFIGURATION_EXTENSIONS.append(conn)
132
+ cls.CONFIG_EXTENSIONS.append(conn)
129
133
  # call registered cleanup functions
130
134
  for func in cls._cleanup_funcs:
131
135
  func()
@@ -164,7 +168,8 @@ class Configuration:
164
168
  for fname, loader in siibra_tqdm(
165
169
  specloaders,
166
170
  total=len(specloaders),
167
- desc=f"Loading preconfigured {obj_class} instances"
171
+ desc=f"Loading preconfigured {obj_class} instances",
172
+ unit=obj_class
168
173
  ):
169
174
  # filename is added to allow Factory creating reasonable default object identifiers\
170
175
  obj = Factory.from_json(dict(loader.data, **{'filename': fname}))