siibra 0.4a35__py3-none-any.whl → 0.4a46__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (35) hide show
  1. siibra/VERSION +1 -1
  2. siibra/__init__.py +1 -0
  3. siibra/commons.py +38 -17
  4. siibra/configuration/configuration.py +21 -17
  5. siibra/configuration/factory.py +38 -12
  6. siibra/core/atlas.py +11 -8
  7. siibra/core/concept.py +22 -2
  8. siibra/core/parcellation.py +42 -22
  9. siibra/core/region.py +56 -95
  10. siibra/features/anchor.py +7 -4
  11. siibra/features/connectivity/functional_connectivity.py +8 -1
  12. siibra/features/connectivity/regional_connectivity.py +14 -19
  13. siibra/features/dataset/ebrains.py +1 -1
  14. siibra/features/feature.py +193 -29
  15. siibra/features/image/__init__.py +1 -1
  16. siibra/features/image/image.py +1 -0
  17. siibra/features/image/volume_of_interest.py +14 -5
  18. siibra/features/tabular/__init__.py +2 -0
  19. siibra/features/tabular/regional_timeseries_activity.py +213 -0
  20. siibra/livequeries/ebrains.py +2 -3
  21. siibra/locations/location.py +4 -3
  22. siibra/locations/pointset.py +2 -2
  23. siibra/retrieval/datasets.py +73 -3
  24. siibra/retrieval/repositories.py +17 -6
  25. siibra/retrieval/requests.py +68 -61
  26. siibra/volumes/neuroglancer.py +9 -9
  27. siibra/volumes/nifti.py +4 -5
  28. siibra/volumes/parcellationmap.py +157 -97
  29. siibra/volumes/sparsemap.py +27 -31
  30. siibra/volumes/volume.py +1 -1
  31. {siibra-0.4a35.dist-info → siibra-0.4a46.dist-info}/METADATA +2 -1
  32. {siibra-0.4a35.dist-info → siibra-0.4a46.dist-info}/RECORD +35 -34
  33. {siibra-0.4a35.dist-info → siibra-0.4a46.dist-info}/WHEEL +1 -1
  34. {siibra-0.4a35.dist-info → siibra-0.4a46.dist-info}/LICENSE +0 -0
  35. {siibra-0.4a35.dist-info → siibra-0.4a46.dist-info}/top_level.txt +0 -0
@@ -72,7 +72,7 @@ class EbrainsBaseDataset(ABC):
72
72
  raise NotImplementedError
73
73
 
74
74
  @abstractproperty
75
- def custodians(self) -> EbrainsDatasetPerson:
75
+ def custodians(self) -> List[EbrainsDatasetPerson]:
76
76
  raise NotImplementedError
77
77
 
78
78
  def __hash__(self):
@@ -168,10 +168,80 @@ class EbrainsDataset(EbrainsBaseDataset):
168
168
  def custodians(self) -> EbrainsDatasetPerson:
169
169
  return self.detail.get("custodians")
170
170
 
171
+ class EbrainsV3DatasetVersion(EbrainsBaseDataset):
172
+
173
+ @staticmethod
174
+ def parse_person(d: dict) -> EbrainsDatasetPerson:
175
+ assert "https://openminds.ebrains.eu/core/Person" in d.get("type"), f"Cannot convert a non person to a person dict!"
176
+ _id = d.get('id')
177
+ name = f"{d.get('givenName')} {d.get('familyName')}"
178
+ return {
179
+ '@id': _id,
180
+ 'schema.org/shortName': name,
181
+ 'identifier': _id,
182
+ "shortName": name,
183
+ "name": name
184
+ }
185
+
186
+ def __init__(self, id, *, cached_data=None) -> None:
187
+ super().__init__()
188
+
189
+ self._id = id
190
+ self._cached_data = cached_data
191
+
192
+ @property
193
+ def detail(self):
194
+ if not self._cached_data:
195
+ match = re.search(r"([a-f0-9-]+)$", self._id)
196
+ instance_id = match.group(1)
197
+ self._cached_data = MultiSourcedRequest(
198
+ requests=[
199
+ GitlabProxy(
200
+ GitlabProxyEnum.DATASETVERSION_V3,
201
+ instance_id=instance_id,
202
+ ),
203
+ ]
204
+ ).data
205
+ return self._cached_data
206
+
207
+ @property
208
+ def id(self) -> str :
209
+ return self._id
210
+
211
+ @property
212
+ def name(self) -> str :
213
+ fullname = self.detail.get("fullName")
214
+ version_id = self.detail.get("versionIdentifier")
215
+ return f"{fullname} ({version_id})"
216
+
217
+ @property
218
+ def urls(self) -> List[EbrainsDatasetUrl] :
219
+ return [{
220
+ "url": doi.get("identifier", None)
221
+ } for doi in self.detail.get("doi", []) ]
222
+
223
+ @property
224
+ def description(self) -> str :
225
+ return self.detail.get("description", "")
226
+
227
+ @property
228
+ def contributors(self) -> List[EbrainsDatasetPerson] :
229
+ return [EbrainsV3DatasetVersion.parse_person(d) for d in self.detail.get("author", [])]
230
+
231
+ @property
232
+ def ebrains_page(self) -> str :
233
+ if len(self.urls) > 0:
234
+ return self.urls[0].get("url")
235
+ return None
236
+
237
+ @property
238
+ def custodians(self) -> EbrainsDatasetPerson:
239
+ return [EbrainsV3DatasetVersion.parse_person(d) for d in self.detail.get("custodian", [])]
240
+
171
241
  class EbrainsV3Dataset(EbrainsBaseDataset):
172
242
  # TODO finish implementing me
173
243
  # some fields are currently missing, e.g. desc, contributors etc.
174
- def __init__(self, id, *, cached_data) -> None:
244
+ def __init__(self, id, *, cached_data=None) -> None:
175
245
  super().__init__()
176
246
 
177
247
  self._id = id
@@ -180,7 +250,7 @@ class EbrainsV3Dataset(EbrainsBaseDataset):
180
250
  @property
181
251
  def detail(self):
182
252
  if not self._cached_data:
183
- match = re.search(r"([a-f0-9-]+)$", self.id)
253
+ match = re.search(r"([a-f0-9-]+)$", self._id)
184
254
  instance_id = match.group(1)
185
255
  self._cached_data = MultiSourcedRequest(
186
256
  requests=[
@@ -16,11 +16,10 @@
16
16
  from .requests import DECODERS, HttpRequest, EbrainsRequest, SiibraHttpRequestError
17
17
  from .cache import CACHE
18
18
 
19
- from .. import logger
19
+ from ..commons import logger, siibra_tqdm
20
20
 
21
21
  from abc import ABC, abstractmethod
22
22
  from urllib.parse import quote
23
- from tqdm import tqdm
24
23
  import os
25
24
  from zipfile import ZipFile
26
25
  from typing import List
@@ -84,9 +83,7 @@ class RepositoryConnector(ABC):
84
83
  if progress is None or all_cached:
85
84
  return result
86
85
  else:
87
- return list(tqdm(
88
- result, total=len(fnames), desc=progress, disable=logger.level > 20
89
- ))
86
+ return list(siibra_tqdm(result, total=len(fnames), desc=progress))
90
87
 
91
88
  @classmethod
92
89
  def _from_url(cls, url: str):
@@ -101,6 +98,9 @@ class RepositoryConnector(ABC):
101
98
  f"connector from url '{url}'."
102
99
  )
103
100
 
101
+ def __eq__(self, other):
102
+ return self.base_url == other.base_url
103
+
104
104
 
105
105
  class LocalFileRepository(RepositoryConnector):
106
106
 
@@ -159,6 +159,9 @@ class LocalFileRepository(RepositoryConnector):
159
159
  def __str__(self):
160
160
  return f"{self.__class__.__name__} at {self._folder}"
161
161
 
162
+ def __eq__(self, other):
163
+ return self._folder == other._folder
164
+
162
165
 
163
166
  class GitlabConnector(RepositoryConnector):
164
167
 
@@ -270,6 +273,12 @@ class GitlabConnector(RepositoryConnector):
270
273
  with open(f"{archive_directory}/{folder}/{filename}", "rb") as fp:
271
274
  return decoder(fp.read())
272
275
 
276
+ def __eq__(self, other):
277
+ return all([
278
+ self.base_url == other.base_url,
279
+ self.reftag == other.reftag
280
+ ])
281
+
273
282
 
274
283
  class ZipfileConnector(RepositoryConnector):
275
284
 
@@ -307,6 +316,9 @@ class ZipfileConnector(RepositoryConnector):
307
316
  result.append(fname)
308
317
  return result
309
318
 
319
+ def __eq__(self, other):
320
+ return self.url == other.url
321
+
310
322
  class FileLoader:
311
323
  """
312
324
  Loads a file from the zip archive, but mimics the behaviour
@@ -625,7 +637,6 @@ class EbrainsPublicDatasetConnectorMinds(RepositoryConnector):
625
637
  logger.info(f"Using title '{title}' for EBRAINS dataset search, ignoring id '{dataset_id}'")
626
638
  url = f"{self.base_url}/{self.QUERY_ID}/instances?databaseScope={stage}&title={title}"
627
639
  req = EbrainsRequest(url, DECODERS[".json"])
628
- print(req.cachefile)
629
640
  response = req.get()
630
641
  self._files = {}
631
642
  results = response.get('results', [])
@@ -15,7 +15,7 @@
15
15
 
16
16
  from .cache import CACHE
17
17
  from .exceptions import EbrainsAuthenticationError
18
- from ..commons import logger, HBP_AUTH_TOKEN, KEYCLOAK_CLIENT_ID, KEYCLOAK_CLIENT_SECRET
18
+ from ..commons import logger, HBP_AUTH_TOKEN, KEYCLOAK_CLIENT_ID, KEYCLOAK_CLIENT_SECRET, siibra_tqdm, SIIBRA_USE_LOCAL_SNAPSPOT
19
19
  from .. import __version__
20
20
 
21
21
  import json
@@ -26,15 +26,20 @@ from nibabel import Nifti1Image, GiftiImage, streamlines
26
26
  from skimage import io
27
27
  import gzip
28
28
  from io import BytesIO
29
- import urllib
29
+ import urllib.parse
30
30
  import pandas as pd
31
31
  import numpy as np
32
- from tqdm import tqdm
33
32
  from typing import List, Callable, Any, TYPE_CHECKING
34
33
  from enum import Enum
35
34
  from functools import wraps
36
35
  from time import sleep
37
36
  import sys
37
+ import platform
38
+
39
+ if platform.system() == "Linux":
40
+ from filelock import FileLock as Lock
41
+ else:
42
+ from filelock import SoftFileLock as Lock
38
43
 
39
44
  if TYPE_CHECKING:
40
45
  from .repositories import GitlabConnector
@@ -48,7 +53,7 @@ DECODERS = {
48
53
  ".json": lambda b: json.loads(b.decode()),
49
54
  ".tck": lambda b: streamlines.load(BytesIO(b)),
50
55
  ".csv": lambda b: pd.read_csv(BytesIO(b), delimiter=";"),
51
- ".tsv": lambda b: pd.read_csv(BytesIO(b), delimiter="\t"),
56
+ ".tsv": lambda b: pd.read_csv(BytesIO(b), delimiter="\t").dropna(axis=0, how="all"),
52
57
  ".txt": lambda b: pd.read_csv(BytesIO(b), delimiter=" ", header=None),
53
58
  ".zip": lambda b: ZipFile(BytesIO(b)),
54
59
  ".png": lambda b: io.imread(BytesIO(b)),
@@ -66,16 +71,16 @@ class SiibraHttpRequestError(Exception):
66
71
 
67
72
  def __str__(self):
68
73
  return (
69
- f"{self.msg}\n\tStatus code:{self.status_code:68.68}\n\tUrl:{self.response.url:76.76}"
74
+ f"{self.msg}\n\tStatus code: {self.status_code}\n\tUrl: {self.url:76.76}"
70
75
  )
71
76
 
72
77
 
73
78
  class HttpRequest:
74
79
  def __init__(
75
80
  self,
76
- url,
77
- func=None,
78
- msg_if_not_cached=None,
81
+ url: str,
82
+ func: Callable=None,
83
+ msg_if_not_cached: str=None,
79
84
  refresh=False,
80
85
  post=False,
81
86
  **kwargs,
@@ -133,58 +138,55 @@ class HttpRequest:
133
138
  return os.path.isfile(self.cachefile)
134
139
 
135
140
  def _retrieve(self, block_size=1024, min_bytesize_with_no_progress_info=2e8):
136
- # Loads the data from http if required.
137
- # If the data is already cached, None is returned,
138
- # otherwise data (as it is already in memory anyway).
139
- # The caller should load the cachefile only
140
- # if None is returned.
141
+ # Populates the file cache with the data from http if required.
142
+ # noop if 1/ data is already cached and 2/ refresh flag not set
143
+ # The caller should load the cachefile after _retrieve successfuly executes
141
144
 
142
145
  if self.cached and not self.refresh:
143
146
  return
144
- else:
145
- # not yet in cache, perform http request.
146
- if self.msg_if_not_cached is not None:
147
- logger.debug(self.msg_if_not_cached)
148
- headers = self.kwargs.get('headers', {})
149
- other_kwargs = {key: self.kwargs[key] for key in self.kwargs if key != "headers"}
150
- if self.post:
151
- r = requests.post(self.url, headers={
152
- **USER_AGENT_HEADER,
153
- **headers,
154
- }, **other_kwargs, stream=True)
155
- else:
156
- r = requests.get(self.url, headers={
157
- **USER_AGENT_HEADER,
158
- **headers,
159
- }, **other_kwargs, stream=True)
160
- if r.ok:
161
- size_bytes = int(r.headers.get('content-length', 0))
162
- if size_bytes > min_bytesize_with_no_progress_info:
163
- progress_bar = tqdm(
164
- total=size_bytes, unit='iB', unit_scale=True,
165
- position=0, leave=True,
166
- desc=f"Downloading {os.path.split(self.url)[-1]} ({size_bytes / 1024**2:.1f} MiB)"
167
- )
168
- temp_cachefile = self.cachefile + "_temp"
169
- with open(temp_cachefile, "wb") as f:
170
- for data in r.iter_content(block_size):
171
- if size_bytes > min_bytesize_with_no_progress_info:
172
- progress_bar.update(len(data))
173
- f.write(data)
174
- if size_bytes > min_bytesize_with_no_progress_info:
175
- progress_bar.close()
176
- self.refresh = False
177
- os.rename(temp_cachefile, self.cachefile)
178
- with open(self.cachefile, 'rb') as f:
179
- return f.read()
180
- else:
181
- raise SiibraHttpRequestError(status_code=r.status_code, url=self.url)
147
+
148
+ # not yet in cache, perform http request.
149
+ if self.msg_if_not_cached is not None:
150
+ logger.debug(self.msg_if_not_cached)
151
+
152
+ headers = self.kwargs.get('headers', {})
153
+ other_kwargs = {key: self.kwargs[key] for key in self.kwargs if key != "headers"}
154
+
155
+ http_method = requests.post if self.post else requests.get
156
+ r = http_method(self.url, headers={
157
+ **USER_AGENT_HEADER,
158
+ **headers,
159
+ }, **other_kwargs, stream=True)
160
+
161
+ if not r.ok:
162
+ raise SiibraHttpRequestError(status_code=r.status_code, url=self.url)
163
+
164
+ size_bytes = int(r.headers.get('content-length', 0))
165
+ if size_bytes > min_bytesize_with_no_progress_info:
166
+ progress_bar = siibra_tqdm(
167
+ total=size_bytes, unit='iB', unit_scale=True,
168
+ position=0, leave=True,
169
+ desc=f"Downloading {os.path.split(self.url)[-1]} ({size_bytes / 1024**2:.1f} MiB)"
170
+ )
171
+ temp_cachefile = f"{self.cachefile}_temp"
172
+ lock = Lock(f"{temp_cachefile}.lock")
173
+
174
+ with lock:
175
+ with open(temp_cachefile, "wb") as f:
176
+ for data in r.iter_content(block_size):
177
+ if size_bytes > min_bytesize_with_no_progress_info:
178
+ progress_bar.update(len(data))
179
+ f.write(data)
180
+ if size_bytes > min_bytesize_with_no_progress_info:
181
+ progress_bar.close()
182
+ self.refresh = False
183
+ os.rename(temp_cachefile, self.cachefile)
184
+
182
185
 
183
186
  def get(self):
184
- data = self._retrieve()
185
- if data is None:
186
- with open(self.cachefile, "rb") as f:
187
- data = f.read()
187
+ self._retrieve()
188
+ with open(self.cachefile, "rb") as f:
189
+ data = f.read()
188
190
  try:
189
191
  return data if self.func is None else self.func(data)
190
192
  except Exception as e:
@@ -262,13 +264,13 @@ class EbrainsRequest(HttpRequest):
262
264
  logger.debug(f"token_endpoint exists in .well-known/openid-configuration. Setting _IAM_TOKEN_ENDPOINT to {json_resp.get('token_endpoint')}")
263
265
  cls._IAM_TOKEN_ENDPOINT = json_resp.get("token_endpoint")
264
266
  else:
265
- logger.warn("expect token endpoint in .well-known/openid-configuration, but was not present")
267
+ logger.warning("expect token endpoint in .well-known/openid-configuration, but was not present")
266
268
 
267
269
  if "device_authorization_endpoint" in json_resp:
268
270
  logger.debug(f"device_authorization_endpoint exists in .well-known/openid-configuration. setting _IAM_DEVICE_ENDPOINT to {json_resp.get('device_authorization_endpoint')}")
269
271
  cls._IAM_DEVICE_ENDPOINT = json_resp.get("device_authorization_endpoint")
270
272
  else:
271
- logger.warn("expected device_authorization_endpoint in .well-known/openid-configuration, but was not present")
273
+ logger.warning("expected device_authorization_endpoint in .well-known/openid-configuration, but was not present")
272
274
 
273
275
  @classmethod
274
276
  def fetch_token(cls, **kwargs):
@@ -299,13 +301,13 @@ class EbrainsRequest(HttpRequest):
299
301
  if not scopes:
300
302
  return None
301
303
  if not isinstance(scopes, list):
302
- logger.warn(f"scopes needs to be a list, is but is not... skipping")
304
+ logger.warning(f"scopes needs to be a list, is but is not... skipping")
303
305
  return None
304
306
  if not all(isinstance(scope, str) for scope in scopes):
305
- logger.warn(f"scopes needs to be all str, but is not")
307
+ logger.warning(f"scopes needs to be all str, but is not")
306
308
  return None
307
309
  if len(scopes) == 0:
308
- logger.warn(f'provided empty list as scopes... skipping')
310
+ logger.warning(f'provided empty list as scopes... skipping')
309
311
  return None
310
312
  return "+".join(scopes)
311
313
 
@@ -466,6 +468,7 @@ class GitlabProxyEnum(Enum):
466
468
  DATASET_V1 = "DATASET_V1"
467
469
  PARCELLATIONREGION_V1 = "PARCELLATIONREGION_V1"
468
470
  DATASET_V3 = "DATASET_V3"
471
+ DATASETVERSION_V3 = "DATASETVERSION_V3"
469
472
 
470
473
  @property
471
474
  def connectors(self) -> List['GitlabConnector']:
@@ -473,7 +476,10 @@ class GitlabProxyEnum(Enum):
473
476
  ("https://jugit.fz-juelich.de", 7846),
474
477
  ("https://gitlab.ebrains.eu", 421),
475
478
  ]
476
- from .repositories import GitlabConnector
479
+ from .repositories import GitlabConnector, LocalFileRepository
480
+ if SIIBRA_USE_LOCAL_SNAPSPOT:
481
+ logger.info(f"Using localsnapshot at {SIIBRA_USE_LOCAL_SNAPSPOT}")
482
+ return [LocalFileRepository(SIIBRA_USE_LOCAL_SNAPSPOT)]
477
483
  return [GitlabConnector(server[0], server[1], "master", archive_mode=True) for server in servers]
478
484
 
479
485
  @try_all_connectors()
@@ -492,6 +498,7 @@ class GitlabProxy(HttpRequest):
492
498
  folder_dict = {
493
499
  GitlabProxyEnum.DATASET_V1: "ebrainsquery/v1/dataset",
494
500
  GitlabProxyEnum.DATASET_V3: "ebrainsquery/v3/Dataset",
501
+ GitlabProxyEnum.DATASETVERSION_V3: "ebrainsquery/v3/DatasetVersion",
495
502
  GitlabProxyEnum.PARCELLATIONREGION_V1: "ebrainsquery/v1/parcellationregions",
496
503
  }
497
504
 
@@ -144,8 +144,8 @@ class NeuroglancerProvider(volume.VolumeProvider, srctype="neuroglancer/precompu
144
144
  s0 = np.identity(4)
145
145
  s0[:3, -1] = list(bbox.minpoint.transform(np.linalg.inv(img.affine)))
146
146
  result_affine = np.dot(img.affine, s0) # adjust global bounding box offset to get global affine
147
- voxdims = np.dot(np.linalg.inv(result_affine), np.r_[bbox.shape, 1])[:3]
148
- result_arr = np.zeros((voxdims + .5).astype('int'))
147
+ voxdims = np.asanyarray(bbox.transform(result_affine).shape, dtype="int")
148
+ result_arr = np.zeros(voxdims, dtype=img.dataobj.dtype)
149
149
  result = nib.Nifti1Image(dataobj=result_arr, affine=result_affine)
150
150
 
151
151
  arr = np.asanyarray(img.dataobj)
@@ -162,7 +162,7 @@ class NeuroglancerProvider(volume.VolumeProvider, srctype="neuroglancer/precompu
162
162
 
163
163
  if num_conflicts > 0:
164
164
  num_voxels = np.count_nonzero(result_arr)
165
- logger.warn(f"Merging fragments required to overwrite {num_conflicts} conflicting voxels ({num_conflicts/num_voxels*100.:2.1f}%).")
165
+ logger.warning(f"Merging fragments required to overwrite {num_conflicts} conflicting voxels ({num_conflicts/num_voxels*100.:2.1f}%).")
166
166
 
167
167
  return result
168
168
 
@@ -201,7 +201,7 @@ class NeuroglancerVolume:
201
201
  return self._transform_nm
202
202
 
203
203
  self._transform_nm = np.identity(1)
204
- logger.warn(f"No transform.json found at {self.url}, using identity.")
204
+ logger.warning(f"No transform.json found at {self.url}, using identity.")
205
205
  return self._transform_nm
206
206
 
207
207
  @transform_nm.setter
@@ -278,7 +278,7 @@ class NeuroglancerVolume:
278
278
  scale = suitable[-1]
279
279
  else:
280
280
  scale = self.scales[0]
281
- logger.warn(
281
+ logger.warning(
282
282
  f"Requested resolution {resolution_mm} is not available. "
283
283
  f"Falling back to the highest possible resolution of "
284
284
  f"{', '.join(map('{:.2f}'.format, scale.res_mm))} mm."
@@ -294,7 +294,7 @@ class NeuroglancerVolume:
294
294
  f"relative to the limit of {self.MAX_BYTES/1024**3}GiB."
295
295
  )
296
296
  if scale_changed:
297
- logger.warn(f"Resolution was reduced to {scale.res_mm} to provide a feasible volume size")
297
+ logger.warning(f"Resolution was reduced to {scale.res_mm} to provide a feasible volume size")
298
298
  return scale
299
299
 
300
300
 
@@ -399,7 +399,7 @@ class NeuroglancerScale:
399
399
  x1, y1, z1 = np.minimum(self.chunk_sizes + [x0, y0, z0], self.size)
400
400
  chunk_czyx = self.volume._io.read_chunk(self.key, (x0, x1, y0, y1, z0, z1))
401
401
  if not chunk_czyx.shape[0] == 1 and not self.color_warning_issued:
402
- logger.warn(
402
+ logger.warning(
403
403
  "Color channel data is not yet supported. Returning first channel only."
404
404
  )
405
405
  self.color_warning_issued = True
@@ -419,7 +419,7 @@ class NeuroglancerScale:
419
419
 
420
420
  for dim in range(3):
421
421
  if bbox_.shape[dim] < 1:
422
- logger.warn(
422
+ logger.warning(
423
423
  f"Bounding box in voxel space will be enlarged to voxel size 1 along axis {dim}."
424
424
  )
425
425
  bbox_.maxpoint[dim] = bbox_.maxpoint[dim] + 1
@@ -507,7 +507,7 @@ class NeuroglancerMesh(volume.VolumeProvider, srctype="neuroglancer/precompmesh"
507
507
  fragment_names = meshinfo.get('fragments')
508
508
 
509
509
  if len(fragment_names) == 0:
510
- raise RuntimeError("No fragments found at {meshurl}")
510
+ raise RuntimeError(f"No fragments found at {meshurl}")
511
511
  elif len(self._meshes) > 1:
512
512
  # multiple meshes were configured, so we expect only one fragment under each mesh url
513
513
  if len(fragment_names) > 1:
siibra/volumes/nifti.py CHANGED
@@ -50,7 +50,6 @@ class NiftiProvider(volume.VolumeProvider, srctype="nii"):
50
50
  self._img_loaders = {lbl: loader(url) for lbl, url in src.items()}
51
51
  else:
52
52
  raise ValueError(f"Invalid source specification for {self.__class__}: {src}")
53
-
54
53
  if not isinstance(src, nib.Nifti1Image):
55
54
  self._init_url = src
56
55
 
@@ -72,7 +71,7 @@ class NiftiProvider(volume.VolumeProvider, srctype="nii"):
72
71
  for loader in self._img_loaders.values():
73
72
  img = loader()
74
73
  if len(img.shape) > 3:
75
- logger.warn(
74
+ logger.warning(
76
75
  f"N-D NIfTI volume has shape {img.shape}, but "
77
76
  f"bounding box considers only {img.shape[:3]}"
78
77
  )
@@ -95,8 +94,8 @@ class NiftiProvider(volume.VolumeProvider, srctype="nii"):
95
94
  s0 = np.identity(4)
96
95
  s0[:3, -1] = list(bbox.minpoint.transform(np.linalg.inv(img.affine)))
97
96
  result_affine = np.dot(img.affine, s0) # adjust global bounding box offset to get global affine
98
- voxdims = np.dot(np.linalg.inv(result_affine), np.r_[bbox.shape, 1])[:3]
99
- result_arr = np.zeros((voxdims + .5).astype('int'))
97
+ voxdims = np.asanyarray(bbox.transform(result_affine).shape, dtype="int")
98
+ result_arr = np.zeros(voxdims, dtype=img.dataobj.dtype)
100
99
  result = nib.Nifti1Image(dataobj=result_arr, affine=result_affine)
101
100
 
102
101
  arr = np.asanyarray(img.dataobj)
@@ -113,7 +112,7 @@ class NiftiProvider(volume.VolumeProvider, srctype="nii"):
113
112
 
114
113
  if num_conflicts > 0:
115
114
  num_voxels = np.count_nonzero(result_arr)
116
- logger.warn(f"Merging fragments required to overwrite {num_conflicts} conflicting voxels ({num_conflicts/num_voxels*100.:2.1f}%).")
115
+ logger.warning(f"Merging fragments required to overwrite {num_conflicts} conflicting voxels ({num_conflicts/num_voxels*100.:2.1f}%).")
117
116
 
118
117
  return result
119
118