siibra 0.4a56__py3-none-any.whl → 0.4a58__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

siibra/VERSION CHANGED
@@ -1 +1 @@
1
- 0.4a56
1
+ 0.4a58
@@ -35,8 +35,6 @@ from typing import List, Type
35
35
  import pandas as pd
36
36
  from io import BytesIO
37
37
 
38
- MIN_VOLUMES_FOR_SPARSE_MAP = 125
39
-
40
38
  BUILDFUNCS = {
41
39
  "juelich/iav/atlas/v1.0.0": "build_atlas",
42
40
  "siibra/space/v0.0.1": "build_space",
@@ -74,6 +72,10 @@ class Factory:
74
72
  result.append(
75
73
  datasets.EbrainsV3DatasetVersion(id=spec["ebrains"]["openminds/DatasetVersion"])
76
74
  )
75
+ if "openminds/Dataset" in spec.get("ebrains", {}):
76
+ result.append(
77
+ datasets.EbrainsV3Dataset(id=spec["ebrains"]["openminds/Dataset"])
78
+ )
77
79
  return result
78
80
 
79
81
  @classmethod
@@ -269,44 +271,10 @@ class Factory:
269
271
  identifier = f"{spec['@type'].replace('/','-')}_{basename}"
270
272
  volumes = cls.extract_volumes(spec)
271
273
 
272
- if ("sparsemap" in spec) and spec.get("sparsemap").get("is_sparsemap"):
273
- Maptype = sparsemap.SparseMap
274
- return Maptype(
275
- identifier=spec.get("@id", identifier),
276
- name=spec.get("name", name),
277
- space_spec=spec.get("space", {}),
278
- parcellation_spec=spec.get("parcellation", {}),
279
- indices=spec.get("indices", {}),
280
- volumes=volumes,
281
- shortname=spec.get("shortName", ""),
282
- description=spec.get("description"),
283
- modality=spec.get("modality"),
284
- publications=spec.get("publications", []),
285
- datasets=cls.extract_datasets(spec),
286
- is_cached=spec.get("sparsemap").get("cached", False),
287
- cache_url=spec.get("sparsemap").get("url", "")
288
- )
289
-
290
- Maptype = parcellationmap.Map
291
- if len(volumes) > MIN_VOLUMES_FOR_SPARSE_MAP:
292
- logger.debug(
293
- f"Using sparse map for {spec.get('filename')} to code its "
294
- f"{len(volumes)} volumes efficiently."
295
- )
274
+ if spec.get("sparsemap", {}).get("is_sparsemap"):
296
275
  Maptype = sparsemap.SparseMap
297
276
  else:
298
- max_z = max(
299
- d.get('z', 0)
300
- for l in spec.get("indices", {}).values()
301
- for d in l
302
- ) + 1
303
- if max_z > MIN_VOLUMES_FOR_SPARSE_MAP:
304
- logger.debug(
305
- f"Using sparse map for {spec.get('filename')} to code its "
306
- f"{max_z} z levels efficiently."
307
- )
308
- Maptype = sparsemap.SparseMap
309
-
277
+ Maptype = parcellationmap.Map
310
278
  return Maptype(
311
279
  identifier=spec.get("@id", identifier),
312
280
  name=spec.get("name", name),
@@ -180,9 +180,10 @@ class Feature:
180
180
  @property
181
181
  def id(self):
182
182
  prefix = ''
183
- id_set = {ds.id for ds in self.datasets if hasattr(ds, 'id')}
184
- if len(id_set) == 1:
185
- prefix = list(id_set)[0] + '--'
183
+ for ds in self.datasets:
184
+ if hasattr(ds, "id"):
185
+ prefix = ds.id + '--'
186
+ break
186
187
  return prefix + md5(self.name.encode("utf-8")).hexdigest()
187
188
 
188
189
  @staticmethod
@@ -102,7 +102,7 @@ class EbrainsBaseDataset(ABC):
102
102
 
103
103
 
104
104
  class EbrainsDataset(EbrainsBaseDataset):
105
-
105
+ """Ebrains dataset v1 connection"""
106
106
  def __init__(self, id, name=None, embargo_status: List[EbrainsDatasetEmbargoStatus] = None, *, cached_data=None):
107
107
  super().__init__()
108
108
 
@@ -214,9 +214,20 @@ class EbrainsV3DatasetVersion(EbrainsBaseDataset):
214
214
  @property
215
215
  def name(self) -> str:
216
216
  fullname = self.detail.get("fullName")
217
+ for dataset in self.is_version_of:
218
+ if fullname is not None:
219
+ break
220
+ fullname = dataset.name
221
+
217
222
  version_id = self.detail.get("versionIdentifier")
218
223
  return f"{fullname} ({version_id})"
219
224
 
225
+ @property
226
+ def is_version_of(self):
227
+ if not hasattr(self, "_is_version_of"):
228
+ self._is_version_of = [EbrainsV3Dataset(id=id.get("id")) for id in self.detail.get("isVersionOf", [])]
229
+ return self._is_version_of
230
+
220
231
  @property
221
232
  def urls(self) -> List[EbrainsDatasetUrl]:
222
233
  return [{
@@ -225,7 +236,12 @@ class EbrainsV3DatasetVersion(EbrainsBaseDataset):
225
236
 
226
237
  @property
227
238
  def description(self) -> str:
228
- return self.detail.get("description", "")
239
+ description = self.detail.get("description")
240
+ for ds in self.is_version_of:
241
+ if description:
242
+ break
243
+ description = ds.description
244
+ return description or ""
229
245
 
230
246
  @property
231
247
  def contributors(self) -> List[EbrainsDatasetPerson]:
@@ -241,15 +257,36 @@ class EbrainsV3DatasetVersion(EbrainsBaseDataset):
241
257
  def custodians(self) -> EbrainsDatasetPerson:
242
258
  return [EbrainsV3DatasetVersion.parse_person(d) for d in self.detail.get("custodian", [])]
243
259
 
260
+ @property
261
+ def version_changes(self):
262
+ return self.detail.get("versionInnovation", "")
263
+
264
+ @property
265
+ def version_identifier(self):
266
+ return self.detail.get("versionIdentifier", "")
267
+
244
268
 
245
269
  class EbrainsV3Dataset(EbrainsBaseDataset):
246
- # TODO finish implementing me
247
- # some fields are currently missing, e.g. desc, contributors etc.
248
270
  def __init__(self, id, *, cached_data=None) -> None:
249
271
  super().__init__()
250
272
 
251
273
  self._id = id
252
274
  self._cached_data = cached_data
275
+ self._contributers = None
276
+
277
+ @property
278
+ def id(self) -> str:
279
+ return self._id
280
+
281
+ @property
282
+ def name(self) -> str:
283
+ return self.detail.get("fullName")
284
+
285
+ @property
286
+ def urls(self) -> List[EbrainsDatasetUrl]:
287
+ return [{
288
+ "url": doi.get("identifier", None)
289
+ } for doi in self.detail.get("doi", [])]
253
290
 
254
291
  @property
255
292
  def detail(self):
@@ -265,3 +302,32 @@ class EbrainsV3Dataset(EbrainsBaseDataset):
265
302
  ]
266
303
  ).data
267
304
  return self._cached_data
305
+
306
+ @property
307
+ def description(self) -> str:
308
+ return self.detail.get("description", "")
309
+
310
+ @property
311
+ def contributors(self):
312
+ if self._contributers is None:
313
+ contributers = {}
314
+ for version_id in self.version_ids:
315
+ contributers.update(
316
+ {c['@id']: c for c in EbrainsV3DatasetVersion(version_id).contributors}
317
+ )
318
+ self._contributers = list(contributers.values())
319
+ return self._contributers
320
+
321
+ @property
322
+ def ebrains_page(self) -> str:
323
+ if len(self.urls) > 0:
324
+ return self.urls[0].get("url")
325
+ return None
326
+
327
+ @property
328
+ def custodians(self) -> EbrainsDatasetPerson:
329
+ return [EbrainsV3DatasetVersion.parse_person(d) for d in self.detail.get("custodian", [])]
330
+
331
+ @property
332
+ def version_ids(self) -> List['str']:
333
+ return [version.get("id") for version in self.detail.get("versions", [])]
@@ -23,7 +23,7 @@ from zipfile import ZipFile
23
23
  import requests
24
24
  import os
25
25
  from nibabel import Nifti1Image, GiftiImage, streamlines
26
- from skimage import io
26
+ from skimage import io as skimage_io
27
27
  import gzip
28
28
  from io import BytesIO
29
29
  import urllib.parse
@@ -47,7 +47,6 @@ if TYPE_CHECKING:
47
47
  USER_AGENT_HEADER = {"User-Agent": f"siibra-python/{__version__}"}
48
48
 
49
49
  DECODERS = {
50
- ".nii.gz": lambda b: Nifti1Image.from_bytes(gzip.decompress(b)),
51
50
  ".nii": lambda b: Nifti1Image.from_bytes(b),
52
51
  ".gii": lambda b: GiftiImage.from_bytes(b),
53
52
  ".json": lambda b: json.loads(b.decode()),
@@ -56,7 +55,7 @@ DECODERS = {
56
55
  ".tsv": lambda b: pd.read_csv(BytesIO(b), delimiter="\t").dropna(axis=0, how="all"),
57
56
  ".txt": lambda b: pd.read_csv(BytesIO(b), delimiter=" ", header=None),
58
57
  ".zip": lambda b: ZipFile(BytesIO(b)),
59
- ".png": lambda b: io.imread(BytesIO(b)),
58
+ ".png": lambda b: skimage_io.imread(BytesIO(b)),
60
59
  ".npy": lambda b: np.load(BytesIO(b))
61
60
  }
62
61
 
@@ -108,40 +107,38 @@ class HttpRequest:
108
107
  """
109
108
  assert url is not None
110
109
  self.url = url
111
- suitable_decoders = [dec for sfx, dec in DECODERS.items() if url.endswith(sfx)]
112
- if (func is None) and (len(suitable_decoders) > 0):
113
- assert len(suitable_decoders) == 1
114
- self.func = suitable_decoders[0]
115
- else:
116
- self.func = func
110
+ self._set_decoder_func(func)
117
111
  self.kwargs = kwargs
118
112
  self.cachefile = CACHE.build_filename(self.url + json.dumps(kwargs))
119
113
  self.msg_if_not_cached = msg_if_not_cached
120
114
  self.refresh = refresh
121
115
  self.post = post
122
- self._set_decoder_func(func, url)
123
-
124
- def _set_decoder_func(self, func, fileurl: str):
125
- urlpath = urllib.parse.urlsplit(fileurl).path
126
- if func is None:
127
- suitable_decoders = [
128
- dec for sfx, dec in DECODERS.items() if urlpath.endswith(sfx)
129
- ]
130
- if len(suitable_decoders) > 0:
131
- assert len(suitable_decoders) == 1
132
- self.func = suitable_decoders[0]
133
- return
134
- self.func = func
116
+
117
+ @staticmethod
118
+ def find_suitiable_decoder(url: str):
119
+ urlpath = urllib.parse.urlsplit(url).path
120
+ if urlpath.endswith('.gz'):
121
+ dec = HttpRequest.find_suitiable_decoder(urlpath[:-3])
122
+ return lambda b: dec(gzip.decompress(b))
123
+
124
+ suitable_decoders = [dec for sfx, dec in DECODERS.items() if urlpath.endswith(sfx)]
125
+ if len(suitable_decoders) > 0:
126
+ assert len(suitable_decoders) == 1
127
+ return suitable_decoders[0]
128
+
129
+ def _set_decoder_func(self, func):
130
+ self.func = func or self.find_suitiable_decoder(self.url)
135
131
 
136
132
  @property
137
133
  def cached(self):
138
134
  return os.path.isfile(self.cachefile)
139
135
 
140
136
  def _retrieve(self, block_size=1024, min_bytesize_with_no_progress_info=2e8):
141
- # Populates the file cache with the data from http if required.
142
- # noop if 1/ data is already cached and 2/ refresh flag not set
143
- # The caller should load the cachefile after _retrieve successfuly executes
144
-
137
+ """
138
+ Populates the file cache with the data from http if required.
139
+ noop if 1/ data is already cached and 2/ refresh flag not set
140
+ The caller should load the cachefile after _retrieve successfuly executes
141
+ """
145
142
  if self.cached and not self.refresh:
146
143
  return
147
144
 
@@ -196,7 +193,7 @@ class HttpRequest:
196
193
  # if that happens, remove cachefile and
197
194
  try:
198
195
  os.unlink(self.cachefile)
199
- except Exception: # TODO: do not use bare except
196
+ except Exception:
200
197
  pass
201
198
  raise e
202
199
 
@@ -210,7 +207,7 @@ class ZipfileRequest(HttpRequest):
210
207
  def __init__(self, url, filename, func=None):
211
208
  HttpRequest.__init__(self, url, func=func)
212
209
  self.filename = filename
213
- self._set_decoder_func(func, filename)
210
+ self._set_decoder_func(self.find_suitiable_decoder(self.filename))
214
211
 
215
212
  def get(self):
216
213
  self._retrieve()
@@ -279,6 +276,7 @@ class EbrainsRequest(HttpRequest):
279
276
  Fetch an EBRAINS token using commandline-supplied username/password
280
277
  using the data proxy endpoint.
281
278
 
279
+
282
280
  :ref:`Details on how to access EBRAINS are here.<accessEBRAINS>`
283
281
  """
284
282
  cls.device_flow(**kwargs)
@@ -200,7 +200,7 @@ class NeuroglancerVolume:
200
200
  self._transform_nm = np.array(res)
201
201
  return self._transform_nm
202
202
 
203
- self._transform_nm = np.identity(1)
203
+ self._transform_nm = np.identity(4)
204
204
  logger.warning(f"No transform.json found at {self.url}, using identity.")
205
205
  return self._transform_nm
206
206
 
@@ -771,8 +771,9 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
771
771
  self,
772
772
  item: Union[point.Point, pointset.PointSet, Nifti1Image],
773
773
  minsize_voxel=1,
774
- lower_threshold=0.0
775
- ) -> List[Union[Assignment, AssignImageResult]]:
774
+ lower_threshold=0.0,
775
+ **kwargs
776
+ ) -> List[Union[Assignment,AssignImageResult]]:
776
777
  """
777
778
  For internal use only. Returns a dataclass, which provides better static type checking.
778
779
  """
@@ -782,8 +783,8 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
782
783
  if isinstance(item, pointset.PointSet):
783
784
  return self._assign_points(item, lower_threshold)
784
785
  if isinstance(item, Nifti1Image):
785
- return self._assign_image(item, minsize_voxel, lower_threshold)
786
-
786
+ return self._assign_image(item, minsize_voxel, lower_threshold, **kwargs)
787
+
787
788
  raise RuntimeError(
788
789
  f"Items of type {item.__class__.__name__} cannot be used for region assignment."
789
790
  )
@@ -792,7 +793,8 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
792
793
  self,
793
794
  item: Union[point.Point, pointset.PointSet, Nifti1Image],
794
795
  minsize_voxel=1,
795
- lower_threshold=0.0
796
+ lower_threshold=0.0,
797
+ **kwargs
796
798
  ):
797
799
  """Assign an input image to brain regions.
798
800
 
@@ -840,7 +842,7 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
840
842
  a Point or PointSet, returns None.
841
843
  """
842
844
 
843
- assignments = self._assign(item, minsize_voxel, lower_threshold)
845
+ assignments = self._assign(item, minsize_voxel, lower_threshold, **kwargs)
844
846
 
845
847
  # format assignments as pandas dataframe
846
848
  columns = [
@@ -1020,7 +1022,7 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
1020
1022
  assignments.append(entry)
1021
1023
  return assignments
1022
1024
 
1023
- def _assign_image(self, queryimg: Nifti1Image, minsize_voxel: int, lower_threshold: float) -> List[AssignImageResult]:
1025
+ def _assign_image(self, queryimg: Nifti1Image, minsize_voxel: int, lower_threshold: float, split_components: bool = True) -> List[AssignImageResult]:
1024
1026
  """
1025
1027
  Assign an image volume to this parcellation map.
1026
1028
 
@@ -1055,6 +1057,9 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
1055
1057
  seqlen = N or len(it)
1056
1058
  return iter(it) if seqlen < min_elements \
1057
1059
  else siibra_tqdm(it, desc=desc, total=N)
1060
+
1061
+ iter_func = iterate_connected_components if split_components \
1062
+ else lambda img: [(1, img)]
1058
1063
 
1059
1064
  with QUIET and _volume.SubvolumeProvider.UseCaching():
1060
1065
  for frag in self.fragments or {None}:
@@ -1064,7 +1069,7 @@ class Map(concept.AtlasConcept, configuration_folder="maps"):
1064
1069
  desc=f"Assigning to {len(self)} volumes"
1065
1070
  ):
1066
1071
  queryimg_res = resample(queryimg, vol_img.affine, vol_img.shape)
1067
- for mode, maskimg in iterate_connected_components(queryimg_res):
1072
+ for mode, maskimg in iter_func(queryimg_res):
1068
1073
  vol_data = np.asanyarray(vol_img.dataobj)
1069
1074
  position = np.array(np.where(maskimg.get_fdata())).T.mean(0)
1070
1075
  labels = {v.label for L in self._indices.values() for v in L if v.volume == vol}
@@ -225,9 +225,7 @@ class SparseMap(parcellationmap.Map):
225
225
  description: str = "",
226
226
  modality: str = None,
227
227
  publications: list = [],
228
- datasets: list = [],
229
- is_cached: bool = False,
230
- cache_url: str = "",
228
+ datasets: list = []
231
229
  ):
232
230
  parcellationmap.Map.__init__(
233
231
  self,
@@ -244,7 +242,6 @@ class SparseMap(parcellationmap.Map):
244
242
  volumes=volumes,
245
243
  )
246
244
  self._sparse_index_cached = None
247
- self._sparseindex_zip_url = cache_url if is_cached else ""
248
245
 
249
246
  @property
250
247
  def _cache_prefix(self):
@@ -254,14 +251,8 @@ class SparseMap(parcellationmap.Map):
254
251
  def sparse_index(self):
255
252
  if self._sparse_index_cached is None:
256
253
  spind = SparseIndex._from_local_cache(self._cache_prefix)
257
- if spind is None and len(self._sparseindex_zip_url) > 0:
258
- logger.debug("Loading SparseIndex from precomputed source.")
259
- try:
260
- spind = self.load_zipped_sparseindex(self._sparseindex_zip_url)
261
- except Exception:
262
- logger.debug("Could not load SparseIndex from precomputed source.", exc_info=1)
263
254
  if spind is None:
264
- logger.debug("Loading SparseIndex from Gitlab.")
255
+ logger.info("Downloading precomputed SparseIndex...")
265
256
  gconn = GitlabConnector(self._GITLAB_SERVER, self._GITLAB_PROJECT, "main")
266
257
  zip_fname = f"{self.name.replace(' ', '_')}_index.zip"
267
258
  try:
@@ -269,6 +260,7 @@ class SparseMap(parcellationmap.Map):
269
260
  zipfile = gconn.get_loader(zip_fname).url
270
261
  spind = self.load_zipped_sparseindex(zipfile)
271
262
  except Exception:
263
+ logger.info("Failed to load precomputed SparseIndex from Gitlab.")
272
264
  logger.debug(f"Could not load SparseIndex from Gitlab at {gconn}", exc_info=1)
273
265
  if spind is None:
274
266
  with _volume.SubvolumeProvider.UseCaching():
@@ -449,7 +441,7 @@ class SparseMap(parcellationmap.Map):
449
441
  for volume, value in spind.probs[voxel].items()
450
442
  )
451
443
 
452
- def _assign_image(self, queryimg: Nifti1Image, minsize_voxel: int, lower_threshold: float) -> List[parcellationmap.AssignImageResult]:
444
+ def _assign_image(self, queryimg: Nifti1Image, minsize_voxel: int, lower_threshold: float, split_components: bool = True) -> List[parcellationmap.AssignImageResult]:
453
445
  """
454
446
  Assign an image volume to this parcellation map.
455
447
 
@@ -482,7 +474,10 @@ class SparseMap(parcellationmap.Map):
482
474
 
483
475
  querydata = np.asanyarray(queryimg.dataobj).squeeze()
484
476
 
485
- for mode, modeimg in iterate_connected_components(queryimg):
477
+ iter_func = iterate_connected_components if split_components \
478
+ else lambda img: [(1, img)]
479
+
480
+ for mode, modeimg in iter_func(queryimg):
486
481
 
487
482
  # determine bounding box of the mode
488
483
  modemask = np.asanyarray(modeimg.dataobj)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: siibra
3
- Version: 0.4a56
3
+ Version: 0.4a58
4
4
  Summary: siibra - Software interfaces for interacting with brain atlases
5
5
  Home-page: https://github.com/FZJ-INM1-BDA/siibra-python
6
6
  Author: Big Data Analytics Group, Forschungszentrum Juelich, Institute of Neuroscience and Medicine (INM-1)
@@ -1,9 +1,9 @@
1
- siibra/VERSION,sha256=SctsI0RGiwaZftD942I95MQq6MY4Ycs3d4K9Hrt_q-E,7
1
+ siibra/VERSION,sha256=1tSFjxSjifTj51RNiReB-NndlknUfzl44UNcXWVReBU,7
2
2
  siibra/__init__.py,sha256=PljcqmLhrj7FSasNAQ_DJAWmymYSkW-QiqnvHp_tYek,3941
3
3
  siibra/commons.py,sha256=Trop7axUV3562Q1apT_9LLZ_lY7rGdn7ZDhwxFxMr7g,25127
4
4
  siibra/configuration/__init__.py,sha256=gXtcw2wbHCJ1o6jnd_HBudpaptHWBke71gcBd3ze7zE,688
5
5
  siibra/configuration/configuration.py,sha256=ilXR7Leo9afkEjwwewLL_CkzGaTcOeGW4_qs_Wd8W3Y,6884
6
- siibra/configuration/factory.py,sha256=9Xq5DHBBz2I5bE4RDpU1q1am__tM-tNPtgt4Bb1FRME,20421
6
+ siibra/configuration/factory.py,sha256=8SwH7UrlIUZMLa7SNB81565Tzd3cWQXzvo0zdtj4fnY,19110
7
7
  siibra/core/__init__.py,sha256=YQkeqGqpOT3aH-PPHeGb1kxSyMbgR3ZHqDCy2I0Zlsk,735
8
8
  siibra/core/atlas.py,sha256=FDtzpW1d492hFekttMz9aX-ggDwQrSwXlw1fWWF2tfE,7947
9
9
  siibra/core/concept.py,sha256=klwYfuyNR2X4yZY7etKiZIWRZefV_sWLIeP7Nv1dB90,8083
@@ -12,7 +12,7 @@ siibra/core/region.py,sha256=jy9AMDRQAaMx6OFpnbd_bQ9IyqxTsLaxG7bC147Im2Y,24623
12
12
  siibra/core/space.py,sha256=dC225jhRFt0KlPtjKEEVre6X8HoDupsymEOrc7AdmmI,5774
13
13
  siibra/features/__init__.py,sha256=da1Eyti0i6gMeZlwCQkf7SgZJpePc9CoUP7I53-DC6c,1315
14
14
  siibra/features/anchor.py,sha256=KGMRWmppxLzU9uK6c43A91o9DZQNPizkrDSITF9tjug,16109
15
- siibra/features/feature.py,sha256=VZo0Aqmx7VU1LDIDSxgb4QZYkf0tCdfl_XHwJ7RGQxU,16011
15
+ siibra/features/feature.py,sha256=cxuD6oVJzFcRd-Z2xlQ1IqeUuUnd2p6_ZAsCNcxP6Ik,15995
16
16
  siibra/features/connectivity/__init__.py,sha256=qx1O4CxVnsPSyiVwYlpWzGHNwVW5gCPhfC3JxBL8Iew,1013
17
17
  siibra/features/connectivity/functional_connectivity.py,sha256=AkRYufi4r3CeV50FLaI6kjKraQ4t9G58iPn521HRRGk,1407
18
18
  siibra/features/connectivity/regional_connectivity.py,sha256=HIMrJro76UpBCQi9EizJRcZZ8c9_EaYoVILO-L_JIS0,12253
@@ -48,9 +48,9 @@ siibra/locations/point.py,sha256=1ea3X9cGKltNPZnnQyBoKoVhCE0U0hGtzWljf-xZw6I,125
48
48
  siibra/locations/pointset.py,sha256=GaWnHDN7NRuKvl4hSU6YREcydFX1yWygbEOJBKTW2Ws,7110
49
49
  siibra/retrieval/__init__.py,sha256=VaR81n9ldwwbpPgIHmoUPmn8QHlZ0_R633yDqQrx6JA,1028
50
50
  siibra/retrieval/cache.py,sha256=NfieYe-3stgjpWQXVGTJSVSLJQOoat-eswj5eWEEaB8,4889
51
- siibra/retrieval/datasets.py,sha256=hBeBWp0QZXblDkOU3gSnU6kfOf2a2Eqm7l1qZtReS6A,7604
51
+ siibra/retrieval/datasets.py,sha256=-SYBuwZsUEUtLoDuBhLVl4kIeJtAfr1s8m0AyoG41u0,9611
52
52
  siibra/retrieval/repositories.py,sha256=cwA4xfmTfm50dk-WkgMBcIc9M3FfgZAWDRnGUvX0Lds,26557
53
- siibra/retrieval/requests.py,sha256=hO1LgnDhim0yavz68AFn8CnC6ucpE5BMoGjQAb-DQxg,20959
53
+ siibra/retrieval/requests.py,sha256=2NQSUCiQ-lKDYEZsTwrZ-GKhWid-4CeqQfRXNqJyueU,20780
54
54
  siibra/retrieval/exceptions/__init__.py,sha256=STIPOyunBN2f7d26NTElLTL3xM1KoB3U7jLhNBivguA,173
55
55
  siibra/vocabularies/__init__.py,sha256=I_tTB-3xYAdVzRGP0CCVGgd3dP6QiltoJqEZFTr-lkI,1261
56
56
  siibra/vocabularies/gene_names.json,sha256=i-gnh753GyZtQfX_dWibNYr_d5ccDPHooOwsdeKUYqE,1647972
@@ -58,13 +58,13 @@ siibra/vocabularies/receptor_symbols.json,sha256=F6DZIArPCBmJV_lWGV-zDpBBH_GOJOZ
58
58
  siibra/vocabularies/region_aliases.json,sha256=T2w1wRlxPNTsPppXn0bzC70tNsb8mOjLsoHuxDSYm2w,8563
59
59
  siibra/volumes/__init__.py,sha256=b6O5aWKfFU0JGT8PMTUF6OewCti7aRyOno_kjJ4es_U,830
60
60
  siibra/volumes/gifti.py,sha256=idULD08pB_wxqzSFHlC_JkhFr-KuHP7FZnqp62QLXGo,5504
61
- siibra/volumes/neuroglancer.py,sha256=MMtx5dme70er2CZlio6sJYMk8UfP1jJJ4XIvUc81wvU,24048
61
+ siibra/volumes/neuroglancer.py,sha256=McBRUX0ZVrZ_fJd27hsRqpwH3GoDsYPBNtArZHI2KaY,24048
62
62
  siibra/volumes/nifti.py,sha256=3psZ8cdOVRe9nsf82sUivb7VwfhD5MtYqAOs9IzW8Is,8948
63
- siibra/volumes/parcellationmap.py,sha256=fa1BXPXKOHunqotDoYe0k6iB-2jqmm1sM2rXcvqaTg4,42302
64
- siibra/volumes/sparsemap.py,sha256=sSLNvjfbvDzfNb9lQrdeggSt2OR32kNt0G5mH04h3m4,21980
63
+ siibra/volumes/parcellationmap.py,sha256=s4GzecVRMVJC_SBnuBf0L63UOlkvXRMICyyksBpsy-o,42497
64
+ siibra/volumes/sparsemap.py,sha256=rb4Xj0TDck62tDB-6GjOGRolPwJADzCkAyt007-w3DQ,21681
65
65
  siibra/volumes/volume.py,sha256=Er8w5c76ZEKmoWyvEU-HHmZ6bbh8YM51j3jT-XRYDtU,10889
66
- siibra-0.4a56.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
67
- siibra-0.4a56.dist-info/METADATA,sha256=5_-Jfi4HgVyOezyH8bMThoM8RZ2z7oUgvXiwM54WAPY,9378
68
- siibra-0.4a56.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
69
- siibra-0.4a56.dist-info/top_level.txt,sha256=NF0OSGLL0li2qyC7MaU0iBB5Y9S09_euPpvisD0-8Hg,7
70
- siibra-0.4a56.dist-info/RECORD,,
66
+ siibra-0.4a58.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
67
+ siibra-0.4a58.dist-info/METADATA,sha256=YKauP9zjtMYivRkWBqRBt7abhQklSVHwnKfMv4HPMMk,9378
68
+ siibra-0.4a58.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
69
+ siibra-0.4a58.dist-info/top_level.txt,sha256=NF0OSGLL0li2qyC7MaU0iBB5Y9S09_euPpvisD0-8Hg,7
70
+ siibra-0.4a58.dist-info/RECORD,,