siibra 0.4a33__py3-none-any.whl → 0.4a46__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (64) hide show
  1. siibra/VERSION +1 -1
  2. siibra/__init__.py +2 -0
  3. siibra/commons.py +53 -8
  4. siibra/configuration/configuration.py +21 -17
  5. siibra/configuration/factory.py +95 -19
  6. siibra/core/atlas.py +11 -8
  7. siibra/core/concept.py +41 -8
  8. siibra/core/parcellation.py +94 -43
  9. siibra/core/region.py +160 -187
  10. siibra/core/space.py +44 -39
  11. siibra/features/__init__.py +19 -19
  12. siibra/features/anchor.py +9 -6
  13. siibra/features/connectivity/__init__.py +0 -8
  14. siibra/features/connectivity/functional_connectivity.py +11 -3
  15. siibra/features/{basetypes → connectivity}/regional_connectivity.py +46 -33
  16. siibra/features/connectivity/streamline_counts.py +3 -2
  17. siibra/features/connectivity/streamline_lengths.py +3 -2
  18. siibra/features/{basetypes → dataset}/__init__.py +2 -0
  19. siibra/features/{external → dataset}/ebrains.py +3 -3
  20. siibra/features/feature.py +420 -0
  21. siibra/{samplers → features/image}/__init__.py +7 -1
  22. siibra/features/{basetypes/volume_of_interest.py → image/image.py} +12 -7
  23. siibra/features/{external/__init__.py → image/sections.py} +8 -5
  24. siibra/features/image/volume_of_interest.py +70 -0
  25. siibra/features/{cellular → tabular}/__init__.py +7 -11
  26. siibra/features/{cellular → tabular}/bigbrain_intensity_profile.py +5 -2
  27. siibra/features/{cellular → tabular}/cell_density_profile.py +6 -2
  28. siibra/features/{basetypes → tabular}/cortical_profile.py +48 -41
  29. siibra/features/{molecular → tabular}/gene_expression.py +5 -2
  30. siibra/features/{cellular → tabular}/layerwise_bigbrain_intensities.py +6 -2
  31. siibra/features/{cellular → tabular}/layerwise_cell_density.py +9 -3
  32. siibra/features/{molecular → tabular}/receptor_density_fingerprint.py +3 -2
  33. siibra/features/{molecular → tabular}/receptor_density_profile.py +6 -2
  34. siibra/features/tabular/regional_timeseries_activity.py +213 -0
  35. siibra/features/{basetypes → tabular}/tabular.py +14 -9
  36. siibra/livequeries/allen.py +1 -1
  37. siibra/livequeries/bigbrain.py +2 -3
  38. siibra/livequeries/ebrains.py +3 -9
  39. siibra/livequeries/query.py +1 -1
  40. siibra/locations/location.py +4 -3
  41. siibra/locations/point.py +21 -17
  42. siibra/locations/pointset.py +2 -2
  43. siibra/retrieval/__init__.py +1 -1
  44. siibra/retrieval/cache.py +8 -2
  45. siibra/retrieval/datasets.py +149 -29
  46. siibra/retrieval/repositories.py +19 -8
  47. siibra/retrieval/requests.py +98 -116
  48. siibra/volumes/gifti.py +26 -11
  49. siibra/volumes/neuroglancer.py +35 -19
  50. siibra/volumes/nifti.py +8 -9
  51. siibra/volumes/parcellationmap.py +341 -184
  52. siibra/volumes/sparsemap.py +67 -53
  53. siibra/volumes/volume.py +25 -13
  54. {siibra-0.4a33.dist-info → siibra-0.4a46.dist-info}/METADATA +4 -3
  55. siibra-0.4a46.dist-info/RECORD +69 -0
  56. {siibra-0.4a33.dist-info → siibra-0.4a46.dist-info}/WHEEL +1 -1
  57. siibra/features/basetypes/feature.py +0 -248
  58. siibra/features/fibres/__init__.py +0 -14
  59. siibra/features/functional/__init__.py +0 -14
  60. siibra/features/molecular/__init__.py +0 -26
  61. siibra/samplers/bigbrain.py +0 -181
  62. siibra-0.4a33.dist-info/RECORD +0 -71
  63. {siibra-0.4a33.dist-info → siibra-0.4a46.dist-info}/LICENSE +0 -0
  64. {siibra-0.4a33.dist-info → siibra-0.4a46.dist-info}/top_level.txt +0 -0
@@ -15,7 +15,7 @@
15
15
 
16
16
  from .cache import CACHE
17
17
  from .exceptions import EbrainsAuthenticationError
18
- from ..commons import logger, HBP_AUTH_TOKEN, KEYCLOAK_CLIENT_ID, KEYCLOAK_CLIENT_SECRET
18
+ from ..commons import logger, HBP_AUTH_TOKEN, KEYCLOAK_CLIENT_ID, KEYCLOAK_CLIENT_SECRET, siibra_tqdm, SIIBRA_USE_LOCAL_SNAPSPOT
19
19
  from .. import __version__
20
20
 
21
21
  import json
@@ -26,15 +26,20 @@ from nibabel import Nifti1Image, GiftiImage, streamlines
26
26
  from skimage import io
27
27
  import gzip
28
28
  from io import BytesIO
29
- import urllib
29
+ import urllib.parse
30
30
  import pandas as pd
31
31
  import numpy as np
32
- from tqdm import tqdm
33
32
  from typing import List, Callable, Any, TYPE_CHECKING
34
33
  from enum import Enum
35
34
  from functools import wraps
36
35
  from time import sleep
37
36
  import sys
37
+ import platform
38
+
39
+ if platform.system() == "Linux":
40
+ from filelock import FileLock as Lock
41
+ else:
42
+ from filelock import SoftFileLock as Lock
38
43
 
39
44
  if TYPE_CHECKING:
40
45
  from .repositories import GitlabConnector
@@ -48,7 +53,7 @@ DECODERS = {
48
53
  ".json": lambda b: json.loads(b.decode()),
49
54
  ".tck": lambda b: streamlines.load(BytesIO(b)),
50
55
  ".csv": lambda b: pd.read_csv(BytesIO(b), delimiter=";"),
51
- ".tsv": lambda b: pd.read_csv(BytesIO(b), delimiter="\t"),
56
+ ".tsv": lambda b: pd.read_csv(BytesIO(b), delimiter="\t").dropna(axis=0, how="all"),
52
57
  ".txt": lambda b: pd.read_csv(BytesIO(b), delimiter=" ", header=None),
53
58
  ".zip": lambda b: ZipFile(BytesIO(b)),
54
59
  ".png": lambda b: io.imread(BytesIO(b)),
@@ -66,16 +71,16 @@ class SiibraHttpRequestError(Exception):
66
71
 
67
72
  def __str__(self):
68
73
  return (
69
- f"{self.msg}\n\tStatus code:{self.status_code:68.68}\n\tUrl:{self.response.url:76.76}"
74
+ f"{self.msg}\n\tStatus code: {self.status_code}\n\tUrl: {self.url:76.76}"
70
75
  )
71
76
 
72
77
 
73
78
  class HttpRequest:
74
79
  def __init__(
75
80
  self,
76
- url,
77
- func=None,
78
- msg_if_not_cached=None,
81
+ url: str,
82
+ func: Callable=None,
83
+ msg_if_not_cached: str=None,
79
84
  refresh=False,
80
85
  post=False,
81
86
  **kwargs,
@@ -133,58 +138,55 @@ class HttpRequest:
133
138
  return os.path.isfile(self.cachefile)
134
139
 
135
140
  def _retrieve(self, block_size=1024, min_bytesize_with_no_progress_info=2e8):
136
- # Loads the data from http if required.
137
- # If the data is already cached, None is returned,
138
- # otherwise data (as it is already in memory anyway).
139
- # The caller should load the cachefile only
140
- # if None is returned.
141
+ # Populates the file cache with the data from http if required.
142
+ # noop if 1/ data is already cached and 2/ refresh flag not set
143
+ # The caller should load the cachefile after _retrieve successfuly executes
141
144
 
142
145
  if self.cached and not self.refresh:
143
146
  return
144
- else:
145
- # not yet in cache, perform http request.
146
- if self.msg_if_not_cached is not None:
147
- logger.debug(self.msg_if_not_cached)
148
- headers = self.kwargs.get('headers', {})
149
- other_kwargs = {key: self.kwargs[key] for key in self.kwargs if key != "headers"}
150
- if self.post:
151
- r = requests.post(self.url, headers={
152
- **USER_AGENT_HEADER,
153
- **headers,
154
- }, **other_kwargs, stream=True)
155
- else:
156
- r = requests.get(self.url, headers={
157
- **USER_AGENT_HEADER,
158
- **headers,
159
- }, **other_kwargs, stream=True)
160
- if r.ok:
161
- size_bytes = int(r.headers.get('content-length', 0))
162
- if size_bytes > min_bytesize_with_no_progress_info:
163
- progress_bar = tqdm(
164
- total=size_bytes, unit='iB', unit_scale=True,
165
- position=0, leave=True,
166
- desc=f"Downloading {os.path.split(self.url)[-1]} ({size_bytes / 1024**2:.1f} MiB)"
167
- )
168
- temp_cachefile = self.cachefile + "_temp"
169
- with open(temp_cachefile, "wb") as f:
170
- for data in r.iter_content(block_size):
171
- if size_bytes > min_bytesize_with_no_progress_info:
172
- progress_bar.update(len(data))
173
- f.write(data)
174
- if size_bytes > min_bytesize_with_no_progress_info:
175
- progress_bar.close()
176
- self.refresh = False
177
- os.rename(temp_cachefile, self.cachefile)
178
- with open(self.cachefile, 'rb') as f:
179
- return f.read()
180
- else:
181
- raise SiibraHttpRequestError(status_code=r.status_code, url=self.url)
147
+
148
+ # not yet in cache, perform http request.
149
+ if self.msg_if_not_cached is not None:
150
+ logger.debug(self.msg_if_not_cached)
151
+
152
+ headers = self.kwargs.get('headers', {})
153
+ other_kwargs = {key: self.kwargs[key] for key in self.kwargs if key != "headers"}
154
+
155
+ http_method = requests.post if self.post else requests.get
156
+ r = http_method(self.url, headers={
157
+ **USER_AGENT_HEADER,
158
+ **headers,
159
+ }, **other_kwargs, stream=True)
160
+
161
+ if not r.ok:
162
+ raise SiibraHttpRequestError(status_code=r.status_code, url=self.url)
163
+
164
+ size_bytes = int(r.headers.get('content-length', 0))
165
+ if size_bytes > min_bytesize_with_no_progress_info:
166
+ progress_bar = siibra_tqdm(
167
+ total=size_bytes, unit='iB', unit_scale=True,
168
+ position=0, leave=True,
169
+ desc=f"Downloading {os.path.split(self.url)[-1]} ({size_bytes / 1024**2:.1f} MiB)"
170
+ )
171
+ temp_cachefile = f"{self.cachefile}_temp"
172
+ lock = Lock(f"{temp_cachefile}.lock")
173
+
174
+ with lock:
175
+ with open(temp_cachefile, "wb") as f:
176
+ for data in r.iter_content(block_size):
177
+ if size_bytes > min_bytesize_with_no_progress_info:
178
+ progress_bar.update(len(data))
179
+ f.write(data)
180
+ if size_bytes > min_bytesize_with_no_progress_info:
181
+ progress_bar.close()
182
+ self.refresh = False
183
+ os.rename(temp_cachefile, self.cachefile)
184
+
182
185
 
183
186
  def get(self):
184
- data = self._retrieve()
185
- if data is None:
186
- with open(self.cachefile, "rb") as f:
187
- data = f.read()
187
+ self._retrieve()
188
+ with open(self.cachefile, "rb") as f:
189
+ data = f.read()
188
190
  try:
189
191
  return data if self.func is None else self.func(data)
190
192
  except Exception as e:
@@ -262,24 +264,26 @@ class EbrainsRequest(HttpRequest):
262
264
  logger.debug(f"token_endpoint exists in .well-known/openid-configuration. Setting _IAM_TOKEN_ENDPOINT to {json_resp.get('token_endpoint')}")
263
265
  cls._IAM_TOKEN_ENDPOINT = json_resp.get("token_endpoint")
264
266
  else:
265
- logger.warn("expect token endpoint in .well-known/openid-configuration, but was not present")
267
+ logger.warning("expect token endpoint in .well-known/openid-configuration, but was not present")
266
268
 
267
269
  if "device_authorization_endpoint" in json_resp:
268
270
  logger.debug(f"device_authorization_endpoint exists in .well-known/openid-configuration. setting _IAM_DEVICE_ENDPOINT to {json_resp.get('device_authorization_endpoint')}")
269
271
  cls._IAM_DEVICE_ENDPOINT = json_resp.get("device_authorization_endpoint")
270
272
  else:
271
- logger.warn("expected device_authorization_endpoint in .well-known/openid-configuration, but was not present")
273
+ logger.warning("expected device_authorization_endpoint in .well-known/openid-configuration, but was not present")
272
274
 
273
275
  @classmethod
274
- def fetch_token(cls):
275
- """Fetch an EBRAINS token using commandline-supplied username/password
276
+ def fetch_token(cls, **kwargs):
277
+ """
278
+ Fetch an EBRAINS token using commandline-supplied username/password
276
279
  using the data proxy endpoint.
280
+
277
281
  :ref:`Details on how to access EBRAINS are here.<accessEBRAINS>`
278
282
  """
279
- cls.device_flow()
283
+ cls.device_flow(**kwargs)
280
284
 
281
285
  @classmethod
282
- def device_flow(cls):
286
+ def device_flow(cls, **kwargs):
283
287
  if all([
284
288
  not sys.__stdout__.isatty(), # if is tty, do not raise
285
289
  not any(k in ['JPY_INTERRUPT_EVENT', "JPY_PARENT_PID"] for k in os.environ), # if is notebook environment, do not raise
@@ -291,11 +295,33 @@ class EbrainsRequest(HttpRequest):
291
295
  )
292
296
 
293
297
  cls.init_oidc()
298
+
299
+ def get_scopes() -> str:
300
+ scopes = kwargs.get("scopes")
301
+ if not scopes:
302
+ return None
303
+ if not isinstance(scopes, list):
304
+ logger.warning(f"scopes needs to be a list, is but is not... skipping")
305
+ return None
306
+ if not all(isinstance(scope, str) for scope in scopes):
307
+ logger.warning(f"scopes needs to be all str, but is not")
308
+ return None
309
+ if len(scopes) == 0:
310
+ logger.warning(f'provided empty list as scopes... skipping')
311
+ return None
312
+ return "+".join(scopes)
313
+
314
+ scopes = get_scopes()
315
+
316
+ data = {
317
+ 'client_id': cls._IAM_DEVICE_FLOW_CLIENTID
318
+ }
319
+
320
+ if scopes:
321
+ data['scopes'] = scopes
294
322
  resp = requests.post(
295
323
  url=cls._IAM_DEVICE_ENDPOINT,
296
- data={
297
- 'client_id': cls._IAM_DEVICE_FLOW_CLIENTID
298
- }
324
+ data=data
299
325
  )
300
326
  resp.raise_for_status()
301
327
  resp_json = resp.json()
@@ -420,55 +446,6 @@ class EbrainsRequest(HttpRequest):
420
446
  return super().get()
421
447
 
422
448
 
423
- class EbrainsKgQuery(EbrainsRequest):
424
- """Request outputs from a knowledge graph query."""
425
-
426
- server = "https://kg.humanbrainproject.eu"
427
- org = "minds"
428
- domain = "core"
429
- version = "v1.0.0"
430
-
431
- SC_MESSAGES = {
432
- 401: "The provided EBRAINS authentication token is not valid",
433
- 403: "No permission to access the given query",
434
- 404: "Query with this id not found",
435
- }
436
-
437
- def __init__(self, query_id, instance_id=None, schema="dataset", params={}):
438
- inst_tail = "/" + instance_id if instance_id is not None else ""
439
- self.schema = schema
440
- url = "{}/query/{}/{}/{}/{}/{}/instances{}?databaseScope=RELEASED".format(
441
- self.server,
442
- self.org,
443
- self.domain,
444
- self.schema,
445
- self.version,
446
- query_id,
447
- inst_tail,
448
- )
449
- EbrainsRequest.__init__(
450
- self,
451
- url,
452
- decoder=DECODERS[".json"],
453
- params=params,
454
- msg_if_not_cached=f"Executing EBRAINS KG query {query_id}{inst_tail}",
455
- )
456
-
457
- def get(self):
458
- try:
459
- result = EbrainsRequest.get(self)
460
- except SiibraHttpRequestError as e:
461
- if e.status_code in self.SC_MESSAGES:
462
- raise RuntimeError(self.SC_MESSAGES[e.status_code])
463
- else:
464
- raise RuntimeError(
465
- f"Could not process HTTP request (status code: "
466
- f"{e.status_code}). Message was: {e.msg}"
467
- f"URL was: {e.url}"
468
- )
469
- return result
470
-
471
-
472
449
  def try_all_connectors():
473
450
  def outer(fn):
474
451
  @wraps(fn)
@@ -491,6 +468,7 @@ class GitlabProxyEnum(Enum):
491
468
  DATASET_V1 = "DATASET_V1"
492
469
  PARCELLATIONREGION_V1 = "PARCELLATIONREGION_V1"
493
470
  DATASET_V3 = "DATASET_V3"
471
+ DATASETVERSION_V3 = "DATASETVERSION_V3"
494
472
 
495
473
  @property
496
474
  def connectors(self) -> List['GitlabConnector']:
@@ -498,7 +476,10 @@ class GitlabProxyEnum(Enum):
498
476
  ("https://jugit.fz-juelich.de", 7846),
499
477
  ("https://gitlab.ebrains.eu", 421),
500
478
  ]
501
- from .repositories import GitlabConnector
479
+ from .repositories import GitlabConnector, LocalFileRepository
480
+ if SIIBRA_USE_LOCAL_SNAPSPOT:
481
+ logger.info(f"Using localsnapshot at {SIIBRA_USE_LOCAL_SNAPSPOT}")
482
+ return [LocalFileRepository(SIIBRA_USE_LOCAL_SNAPSPOT)]
502
483
  return [GitlabConnector(server[0], server[1], "master", archive_mode=True) for server in servers]
503
484
 
504
485
  @try_all_connectors()
@@ -515,8 +496,9 @@ class GitlabProxyEnum(Enum):
515
496
  class GitlabProxy(HttpRequest):
516
497
 
517
498
  folder_dict = {
518
- GitlabProxyEnum.DATASET_V1: "ebrainsquery/v1/datasets",
519
- GitlabProxyEnum.DATASET_V3: "ebrainsquery/v3/datasets",
499
+ GitlabProxyEnum.DATASET_V1: "ebrainsquery/v1/dataset",
500
+ GitlabProxyEnum.DATASET_V3: "ebrainsquery/v3/Dataset",
501
+ GitlabProxyEnum.DATASETVERSION_V3: "ebrainsquery/v3/DatasetVersion",
520
502
  GitlabProxyEnum.PARCELLATIONREGION_V1: "ebrainsquery/v1/parcellationregions",
521
503
  }
522
504
 
siibra/volumes/gifti.py CHANGED
@@ -55,12 +55,24 @@ class GiftiMesh(volume.VolumeProvider, srctype="gii-mesh"):
55
55
 
56
56
  def fetch(self, fragment: str = None, **kwargs):
57
57
  """
58
- Returns the mesh as a dictionary with two numpy arrays: An Nx3 array of vertex coordinates,
59
- and an Mx3 array of face definitions using row indices of the vertex array.
60
-
61
- A fragment name can be specified to choose from multiple fragments.
62
- If not specified, multiple fragments will be merged into one mesh. In such a case,
63
- the verts and faces arrays of different fragments are appended to one another.
58
+ Returns the mesh as a dictionary with two numpy arrays.
59
+
60
+ Parameters
61
+ ----------
62
+ fragment: str, default: None
63
+ A fragment name can be specified to choose from multiple fragments.
64
+
65
+ Note
66
+ ----
67
+ If not specified, multiple fragments will be merged into one mesh.
68
+ In such a case, the verts and faces arrays of different fragments
69
+ are appended to one another.
70
+
71
+ Returns
72
+ -------
73
+ dict
74
+ - 'verts': An Nx3 array of vertex coordinates,
75
+ - 'faces': an Mx3 array of face definitions using row indices of the vertex array
64
76
  """
65
77
  for arg in ["resolution_mm", "voi"]:
66
78
  if kwargs.get(arg):
@@ -93,11 +105,14 @@ class GiftiMesh(volume.VolumeProvider, srctype="gii-mesh"):
93
105
 
94
106
  def fetch_iter(self):
95
107
  """
96
- Iterator returning all submeshes, each represented as a dictionary
97
- with elements
98
- - 'verts': An Nx3 array of vertex coordinates,
99
- - 'faces': an Mx3 array of face definitions using row indices of the vertex array
100
- - 'name': Name of the of the mesh variant
108
+ Iterator returning all submeshes
109
+
110
+ Returns
111
+ -------
112
+ dict
113
+ - 'verts': An Nx3 array of vertex coordinates,
114
+ - 'faces': an Mx3 array of face definitions using row indices of the vertex array
115
+ - 'name': Name of the of the mesh variant
101
116
  """
102
117
  return (self.fetch(v) for v in self.variants)
103
118
 
@@ -58,14 +58,14 @@ class NeuroglancerProvider(volume.VolumeProvider, srctype="neuroglancer/precompu
58
58
 
59
59
  Parameters
60
60
  ----------
61
- fragment: str
62
- Optional name of a fragment volume to fetch, if any.
63
- For example, some volumes are split into left and right hemisphere fragments.
64
- see :func:`~siibra.volumes.Volume.fragments`
61
+ fragment: str, optional
62
+ The name of a fragment volume to fetch, if any. For example,
63
+ some volumes are split into left and right hemisphere fragments.
64
+ See :func:`~siibra.volumes.Volume.fragments`
65
65
  resolution_mm: float
66
- Specify the resolugion
67
- voi : BoundingBox
68
- optional specification of a volume of interst to fetch.
66
+ Specify the resolution
67
+ voi: BoundingBox
68
+ optional specification of a volume of interest to fetch.
69
69
  """
70
70
 
71
71
  result = None
@@ -144,8 +144,8 @@ class NeuroglancerProvider(volume.VolumeProvider, srctype="neuroglancer/precompu
144
144
  s0 = np.identity(4)
145
145
  s0[:3, -1] = list(bbox.minpoint.transform(np.linalg.inv(img.affine)))
146
146
  result_affine = np.dot(img.affine, s0) # adjust global bounding box offset to get global affine
147
- voxdims = np.dot(np.linalg.inv(result_affine), np.r_[bbox.shape, 1])[:3]
148
- result_arr = np.zeros((voxdims + .5).astype('int'))
147
+ voxdims = np.asanyarray(bbox.transform(result_affine).shape, dtype="int")
148
+ result_arr = np.zeros(voxdims, dtype=img.dataobj.dtype)
149
149
  result = nib.Nifti1Image(dataobj=result_arr, affine=result_affine)
150
150
 
151
151
  arr = np.asanyarray(img.dataobj)
@@ -162,7 +162,7 @@ class NeuroglancerProvider(volume.VolumeProvider, srctype="neuroglancer/precompu
162
162
 
163
163
  if num_conflicts > 0:
164
164
  num_voxels = np.count_nonzero(result_arr)
165
- logger.warn(f"Merging fragments required to overwrite {num_conflicts} conflicting voxels ({num_conflicts/num_voxels*100.:2.1f}%).")
165
+ logger.warning(f"Merging fragments required to overwrite {num_conflicts} conflicting voxels ({num_conflicts/num_voxels*100.:2.1f}%).")
166
166
 
167
167
  return result
168
168
 
@@ -201,7 +201,7 @@ class NeuroglancerVolume:
201
201
  return self._transform_nm
202
202
 
203
203
  self._transform_nm = np.identity(1)
204
- logger.warn(f"No transform.json found at {self.url}, using identity.")
204
+ logger.warning(f"No transform.json found at {self.url}, using identity.")
205
205
  return self._transform_nm
206
206
 
207
207
  @transform_nm.setter
@@ -278,7 +278,7 @@ class NeuroglancerVolume:
278
278
  scale = suitable[-1]
279
279
  else:
280
280
  scale = self.scales[0]
281
- logger.warn(
281
+ logger.warning(
282
282
  f"Requested resolution {resolution_mm} is not available. "
283
283
  f"Falling back to the highest possible resolution of "
284
284
  f"{', '.join(map('{:.2f}'.format, scale.res_mm))} mm."
@@ -294,7 +294,7 @@ class NeuroglancerVolume:
294
294
  f"relative to the limit of {self.MAX_BYTES/1024**3}GiB."
295
295
  )
296
296
  if scale_changed:
297
- logger.warn(f"Resolution was reduced to {scale.res_mm} to provide a feasible volume size")
297
+ logger.warning(f"Resolution was reduced to {scale.res_mm} to provide a feasible volume size")
298
298
  return scale
299
299
 
300
300
 
@@ -399,7 +399,7 @@ class NeuroglancerScale:
399
399
  x1, y1, z1 = np.minimum(self.chunk_sizes + [x0, y0, z0], self.size)
400
400
  chunk_czyx = self.volume._io.read_chunk(self.key, (x0, x1, y0, y1, z0, z1))
401
401
  if not chunk_czyx.shape[0] == 1 and not self.color_warning_issued:
402
- logger.warn(
402
+ logger.warning(
403
403
  "Color channel data is not yet supported. Returning first channel only."
404
404
  )
405
405
  self.color_warning_issued = True
@@ -419,7 +419,7 @@ class NeuroglancerScale:
419
419
 
420
420
  for dim in range(3):
421
421
  if bbox_.shape[dim] < 1:
422
- logger.warn(
422
+ logger.warning(
423
423
  f"Bounding box in voxel space will be enlarged to voxel size 1 along axis {dim}."
424
424
  )
425
425
  bbox_.maxpoint[dim] = bbox_.maxpoint[dim] + 1
@@ -507,7 +507,7 @@ class NeuroglancerMesh(volume.VolumeProvider, srctype="neuroglancer/precompmesh"
507
507
  fragment_names = meshinfo.get('fragments')
508
508
 
509
509
  if len(fragment_names) == 0:
510
- raise RuntimeError("No fragments found at {meshurl}")
510
+ raise RuntimeError(f"No fragments found at {meshurl}")
511
511
  elif len(self._meshes) > 1:
512
512
  # multiple meshes were configured, so we expect only one fragment under each mesh url
513
513
  if len(fragment_names) > 1:
@@ -535,9 +535,25 @@ class NeuroglancerMesh(volume.VolumeProvider, srctype="neuroglancer/precompmesh"
535
535
  def fetch(self, label: int, fragment: str):
536
536
  """
537
537
  Fetches a particular mesh. Each mesh is a dictionary with keys:
538
-
539
- - verts: an Nx3 array of coordinates (in nanometer)
540
- - faces: an MX3 array containing connection data of vertices
538
+
539
+ Parameters
540
+ ----------
541
+ label: int
542
+ Label of the volume
543
+ fragment: str, default: None
544
+ A fragment name can be specified to choose from multiple fragments.
545
+
546
+ Note
547
+ ----
548
+ If not specified, multiple fragments will be merged into one mesh.
549
+ In such a case, the verts and faces arrays of different fragments
550
+ are appended to one another.
551
+ Returns
552
+ -------
553
+ dict
554
+ - 'verts': An Nx3 array of vertex coordinates (in nanometer)
555
+ - 'faces': an MX3 array containing connection data of vertices
556
+ - 'name': Name of the of the mesh variant
541
557
  """
542
558
 
543
559
  # extract fragment information for the requested mesh
siibra/volumes/nifti.py CHANGED
@@ -50,7 +50,6 @@ class NiftiProvider(volume.VolumeProvider, srctype="nii"):
50
50
  self._img_loaders = {lbl: loader(url) for lbl, url in src.items()}
51
51
  else:
52
52
  raise ValueError(f"Invalid source specification for {self.__class__}: {src}")
53
-
54
53
  if not isinstance(src, nib.Nifti1Image):
55
54
  self._init_url = src
56
55
 
@@ -72,7 +71,7 @@ class NiftiProvider(volume.VolumeProvider, srctype="nii"):
72
71
  for loader in self._img_loaders.values():
73
72
  img = loader()
74
73
  if len(img.shape) > 3:
75
- logger.warn(
74
+ logger.warning(
76
75
  f"N-D NIfTI volume has shape {img.shape}, but "
77
76
  f"bounding box considers only {img.shape[:3]}"
78
77
  )
@@ -95,8 +94,8 @@ class NiftiProvider(volume.VolumeProvider, srctype="nii"):
95
94
  s0 = np.identity(4)
96
95
  s0[:3, -1] = list(bbox.minpoint.transform(np.linalg.inv(img.affine)))
97
96
  result_affine = np.dot(img.affine, s0) # adjust global bounding box offset to get global affine
98
- voxdims = np.dot(np.linalg.inv(result_affine), np.r_[bbox.shape, 1])[:3]
99
- result_arr = np.zeros((voxdims + .5).astype('int'))
97
+ voxdims = np.asanyarray(bbox.transform(result_affine).shape, dtype="int")
98
+ result_arr = np.zeros(voxdims, dtype=img.dataobj.dtype)
100
99
  result = nib.Nifti1Image(dataobj=result_arr, affine=result_affine)
101
100
 
102
101
  arr = np.asanyarray(img.dataobj)
@@ -113,7 +112,7 @@ class NiftiProvider(volume.VolumeProvider, srctype="nii"):
113
112
 
114
113
  if num_conflicts > 0:
115
114
  num_voxels = np.count_nonzero(result_arr)
116
- logger.warn(f"Merging fragments required to overwrite {num_conflicts} conflicting voxels ({num_conflicts/num_voxels*100.:2.1f}%).")
115
+ logger.warning(f"Merging fragments required to overwrite {num_conflicts} conflicting voxels ({num_conflicts/num_voxels*100.:2.1f}%).")
117
116
 
118
117
  return result
119
118
 
@@ -133,8 +132,8 @@ class NiftiProvider(volume.VolumeProvider, srctype="nii"):
133
132
  For example, some volumes are split into left and right hemisphere fragments.
134
133
  see :func:`~siibra.volumes.Volume.fragments`
135
134
  voi : BoundingBox
136
- optional specification of a volume of interst to fetch.
137
- label: int or None
135
+ optional specification of a volume of interest to fetch.
136
+ label: int, default: None
138
137
  Optional: a label index can be provided. Then the mask of the
139
138
  3D volume will be returned, where voxels matching this label
140
139
  are marked as "1".
@@ -202,14 +201,14 @@ class NiftiProvider(volume.VolumeProvider, srctype="nii"):
202
201
  """
203
202
  Find peaks in the image data.
204
203
 
205
- Arguments:
204
+ Parameters
206
205
  ----------
207
206
  min_distance_mm : float
208
207
  Minimum distance between peaks in mm
209
208
 
210
209
  Returns:
211
210
  --------
212
- peaks: PointSet
211
+ PointSet
213
212
  """
214
213
 
215
214
  from skimage.feature.peak import peak_local_max