siibra 1.0a14__py3-none-any.whl → 1.0.1a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (80) hide show
  1. siibra/VERSION +1 -1
  2. siibra/__init__.py +15 -5
  3. siibra/commons.py +3 -48
  4. siibra/configuration/__init__.py +1 -1
  5. siibra/configuration/configuration.py +1 -1
  6. siibra/configuration/factory.py +164 -127
  7. siibra/core/__init__.py +1 -1
  8. siibra/core/assignment.py +1 -1
  9. siibra/core/atlas.py +24 -17
  10. siibra/core/concept.py +18 -9
  11. siibra/core/parcellation.py +76 -55
  12. siibra/core/region.py +163 -183
  13. siibra/core/space.py +3 -1
  14. siibra/core/structure.py +1 -2
  15. siibra/exceptions.py +17 -1
  16. siibra/experimental/contour.py +6 -6
  17. siibra/experimental/patch.py +2 -2
  18. siibra/experimental/plane3d.py +8 -8
  19. siibra/explorer/__init__.py +1 -1
  20. siibra/explorer/url.py +15 -0
  21. siibra/explorer/util.py +1 -1
  22. siibra/features/__init__.py +1 -1
  23. siibra/features/anchor.py +13 -14
  24. siibra/features/connectivity/__init__.py +1 -1
  25. siibra/features/connectivity/functional_connectivity.py +1 -1
  26. siibra/features/connectivity/regional_connectivity.py +7 -5
  27. siibra/features/connectivity/streamline_counts.py +1 -1
  28. siibra/features/connectivity/streamline_lengths.py +1 -1
  29. siibra/features/connectivity/tracing_connectivity.py +1 -1
  30. siibra/features/dataset/__init__.py +1 -1
  31. siibra/features/dataset/ebrains.py +1 -1
  32. siibra/features/feature.py +50 -28
  33. siibra/features/image/__init__.py +1 -1
  34. siibra/features/image/image.py +18 -13
  35. siibra/features/image/sections.py +1 -1
  36. siibra/features/image/volume_of_interest.py +1 -1
  37. siibra/features/tabular/__init__.py +1 -1
  38. siibra/features/tabular/bigbrain_intensity_profile.py +2 -2
  39. siibra/features/tabular/cell_density_profile.py +102 -66
  40. siibra/features/tabular/cortical_profile.py +5 -3
  41. siibra/features/tabular/gene_expression.py +1 -1
  42. siibra/features/tabular/layerwise_bigbrain_intensities.py +1 -1
  43. siibra/features/tabular/layerwise_cell_density.py +8 -25
  44. siibra/features/tabular/receptor_density_fingerprint.py +5 -3
  45. siibra/features/tabular/receptor_density_profile.py +5 -3
  46. siibra/features/tabular/regional_timeseries_activity.py +7 -5
  47. siibra/features/tabular/tabular.py +5 -3
  48. siibra/livequeries/__init__.py +1 -1
  49. siibra/livequeries/allen.py +46 -20
  50. siibra/livequeries/bigbrain.py +9 -9
  51. siibra/livequeries/ebrains.py +1 -1
  52. siibra/livequeries/query.py +1 -2
  53. siibra/locations/__init__.py +10 -10
  54. siibra/locations/boundingbox.py +77 -38
  55. siibra/locations/location.py +12 -4
  56. siibra/locations/point.py +14 -9
  57. siibra/locations/{pointset.py → pointcloud.py} +69 -27
  58. siibra/retrieval/__init__.py +1 -1
  59. siibra/retrieval/cache.py +1 -1
  60. siibra/retrieval/datasets.py +1 -1
  61. siibra/retrieval/exceptions/__init__.py +1 -1
  62. siibra/retrieval/repositories.py +10 -27
  63. siibra/retrieval/requests.py +20 -3
  64. siibra/vocabularies/__init__.py +1 -1
  65. siibra/volumes/__init__.py +2 -2
  66. siibra/volumes/parcellationmap.py +121 -94
  67. siibra/volumes/providers/__init__.py +1 -1
  68. siibra/volumes/providers/freesurfer.py +1 -1
  69. siibra/volumes/providers/gifti.py +1 -1
  70. siibra/volumes/providers/neuroglancer.py +68 -42
  71. siibra/volumes/providers/nifti.py +18 -28
  72. siibra/volumes/providers/provider.py +2 -2
  73. siibra/volumes/sparsemap.py +128 -247
  74. siibra/volumes/volume.py +252 -65
  75. {siibra-1.0a14.dist-info → siibra-1.0.1a0.dist-info}/METADATA +17 -4
  76. siibra-1.0.1a0.dist-info/RECORD +84 -0
  77. {siibra-1.0a14.dist-info → siibra-1.0.1a0.dist-info}/WHEEL +1 -1
  78. siibra-1.0a14.dist-info/RECORD +0 -84
  79. {siibra-1.0a14.dist-info → siibra-1.0.1a0.dist-info}/LICENSE +0 -0
  80. {siibra-1.0a14.dist-info → siibra-1.0.1a0.dist-info}/top_level.txt +0 -0
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2021
1
+ # Copyright 2018-2024
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,15 +16,77 @@
16
16
  from . import cortical_profile
17
17
 
18
18
  from .. import anchor as _anchor
19
- from ...commons import PolyLine, logger, create_key
19
+ from ...commons import logger
20
20
  from ...retrieval import requests
21
21
 
22
22
  from skimage.draw import polygon
23
23
  from skimage.transform import resize
24
- from io import BytesIO
25
24
  import numpy as np
26
25
  import pandas as pd
27
26
 
27
+ from io import BytesIO
28
+ from typing import Union, Tuple, Iterable
29
+
30
+
31
+ def cell_reader(bytes_buffer: bytes):
32
+ return pd.read_csv(BytesIO(bytes_buffer[2:]), delimiter=" ", header=0).astype(
33
+ {"layer": int, "label": int}
34
+ )
35
+
36
+
37
+ def layer_reader(bytes_buffer: bytes):
38
+ return pd.read_csv(BytesIO(bytes_buffer[2:]), delimiter=" ", header=0, index_col=0)
39
+
40
+
41
+ def poly_srt(poly):
42
+ return poly[poly[:, 0].argsort(), :]
43
+
44
+
45
+ def poly_rev(poly):
46
+ return poly[poly[:, 0].argsort()[::-1], :]
47
+
48
+
49
+ class PolyLine:
50
+ """Simple polyline representation which allows equidistant sampling."""
51
+
52
+ def __init__(self, pts):
53
+ self.pts = pts
54
+ self.lengths = [
55
+ np.sqrt(np.sum((pts[i, :] - pts[i - 1, :]) ** 2))
56
+ for i in range(1, pts.shape[0])
57
+ ]
58
+
59
+ def length(self):
60
+ return sum(self.lengths)
61
+
62
+ def sample(self, d: Union[Iterable[float], np.ndarray, float]):
63
+ # if d is interable, we assume a list of sample positions
64
+ try:
65
+ iter(d)
66
+ except TypeError:
67
+ positions = [d]
68
+ else:
69
+ positions = d
70
+
71
+ samples = []
72
+ for s_ in positions:
73
+ s = min(max(s_, 0), 1)
74
+ target_distance = s * self.length()
75
+ current_distance = 0
76
+ for i, length in enumerate(self.lengths):
77
+ current_distance += length
78
+ if current_distance >= target_distance:
79
+ p1 = self.pts[i, :]
80
+ p2 = self.pts[i + 1, :]
81
+ r = (target_distance - current_distance + length) / length
82
+ samples.append(p1 + (p2 - p1) * r)
83
+ break
84
+
85
+ if len(samples) == 1:
86
+ return samples[0]
87
+ else:
88
+ return np.array(samples)
89
+
28
90
 
29
91
  class CellDensityProfile(
30
92
  cortical_profile.CorticalProfile,
@@ -45,24 +107,6 @@ class CellDensityProfile(
45
107
 
46
108
  _filter_attrs = cortical_profile.CorticalProfile._filter_attrs + ["location"]
47
109
 
48
- @classmethod
49
- def CELL_READER(cls, b):
50
- return pd.read_csv(BytesIO(b[2:]), delimiter=" ", header=0).astype(
51
- {"layer": int, "label": int}
52
- )
53
-
54
- @classmethod
55
- def LAYER_READER(cls, b):
56
- return pd.read_csv(BytesIO(b[2:]), delimiter=" ", header=0, index_col=0)
57
-
58
- @staticmethod
59
- def poly_srt(poly):
60
- return poly[poly[:, 0].argsort(), :]
61
-
62
- @staticmethod
63
- def poly_rev(poly):
64
- return poly[poly[:, 0].argsort()[::-1], :]
65
-
66
110
  def __init__(
67
111
  self,
68
112
  section: int,
@@ -70,7 +114,8 @@ class CellDensityProfile(
70
114
  url: str,
71
115
  anchor: _anchor.AnatomicalAnchor,
72
116
  datasets: list = [],
73
- id: str = None
117
+ id: str = None,
118
+ prerelease: bool = False,
74
119
  ):
75
120
  """
76
121
  Generate a cell density profile from a URL to a cloud folder
@@ -83,13 +128,14 @@ class CellDensityProfile(
83
128
  unit="cells / 0.1mm3",
84
129
  anchor=anchor,
85
130
  datasets=datasets,
86
- id=id
131
+ id=id,
132
+ prerelease=prerelease,
87
133
  )
88
134
  self._step = 0.01
89
135
  self._url = url
90
- self._cell_loader = requests.HttpRequest(url, self.CELL_READER)
136
+ self._cell_loader = requests.HttpRequest(url, cell_reader)
91
137
  self._layer_loader = requests.HttpRequest(
92
- url.replace("segments", "layerinfo"), self.LAYER_READER
138
+ url.replace("segments", "layerinfo"), layer_reader
93
139
  )
94
140
  self._density_image = None
95
141
  self._layer_mask = None
@@ -103,47 +149,49 @@ class CellDensityProfile(
103
149
 
104
150
  @property
105
151
  def shape(self):
106
- return tuple(self.cells[["y", "x"]].max().astype("int") + 1)
152
+ """(y,x)"""
153
+ return tuple(np.ceil(self.cells[["y", "x"]].max()).astype("int"))
107
154
 
108
- def boundary_annotation(self, boundary):
155
+ def boundary_annotation(self, boundary: Tuple[int, int]) -> np.ndarray:
109
156
  """Returns the annotation of a specific layer boundary."""
110
- y1, x1 = self.shape
157
+ shape_y, shape_x = self.shape
111
158
 
112
159
  # start of image patch
113
160
  if boundary == (-1, 0):
114
- return np.array([[0, 0], [x1, 0]])
161
+ return np.array([[0, 0], [shape_x, 0]])
115
162
 
116
163
  # end of image patch
117
164
  if boundary == (7, 8):
118
- return np.array([[0, y1], [x1, y1]])
165
+ return np.array([[0, shape_y], [shape_x, shape_y]])
119
166
 
120
167
  # retrieve polygon
121
168
  basename = "{}_{}.json".format(
122
169
  *(self.LAYERS[layer] for layer in boundary)
123
170
  ).replace("0_I", "0")
124
- url = self._url.replace("segments.txt", basename)
125
- poly = self.poly_srt(np.array(requests.HttpRequest(url).get()["segments"]))
171
+ poly_url = self._url.replace("segments.txt", basename)
172
+ poly = poly_srt(np.array(requests.HttpRequest(poly_url).get()["segments"]))
126
173
 
127
- # ensure full width
174
+ # ensure full width and trim to the image shape
128
175
  poly[0, 0] = 0
129
- poly[-1, 0] = x1
176
+ poly[poly[:, 0] > shape_x, 0] = shape_x
177
+ poly[poly[:, 1] > shape_y, 1] = shape_y
130
178
 
131
179
  return poly
132
180
 
133
- def layer_annotation(self, layer):
181
+ def layer_annotation(self, layer: int) -> np.ndarray:
134
182
  return np.vstack(
135
183
  (
136
184
  self.boundary_annotation((layer - 1, layer)),
137
- self.poly_rev(self.boundary_annotation((layer, layer + 1))),
185
+ poly_rev(self.boundary_annotation((layer, layer + 1))),
138
186
  self.boundary_annotation((layer - 1, layer))[0, :],
139
187
  )
140
188
  )
141
189
 
142
190
  @property
143
- def layer_mask(self):
191
+ def layer_mask(self) -> np.ndarray:
144
192
  """Generates a layer mask from boundary annotations."""
145
193
  if self._layer_mask is None:
146
- self._layer_mask = np.zeros(np.array(self.shape).astype("int") + 1)
194
+ self._layer_mask = np.zeros(np.array(self.shape, dtype=int) + 1, dtype="int")
147
195
  for layer in range(1, 8):
148
196
  pl = self.layer_annotation(layer)
149
197
  X, Y = polygon(pl[:, 0], pl[:, 1])
@@ -151,20 +199,20 @@ class CellDensityProfile(
151
199
  return self._layer_mask
152
200
 
153
201
  @property
154
- def depth_image(self):
202
+ def depth_image(self) -> np.ndarray:
155
203
  """Cortical depth image from layer boundary polygons by equidistant sampling."""
156
204
 
157
205
  if self._depth_image is None:
158
-
206
+ logger.info("Calculating cell densities from cell and layer data...")
159
207
  # compute equidistant cortical depth image from inner and outer contour
160
208
  scale = 0.1
161
- D = np.zeros((np.array(self.density_image.shape) * scale).astype("int") + 1)
209
+ depth_arr = np.zeros(np.ceil(np.array(self.shape) * scale).astype("int") + 1)
162
210
 
163
211
  # determine sufficient stepwidth for profile sampling
164
212
  # to match downscaled image resolution
165
- vstep, hstep = 1.0 / np.array(D.shape) / 2.0
213
+ vstep, hstep = 1.0 / np.array(depth_arr.shape) / 2.0
166
214
  vsteps = np.arange(0, 1 + vstep, vstep)
167
- hsteps = np.arange(0, 1 + vstep, hstep)
215
+ hsteps = np.arange(0, 1 + hstep, hstep)
168
216
 
169
217
  # build straight profiles between outer and inner cortical boundary
170
218
  s0 = PolyLine(self.boundary_annotation((0, 1)) * scale).sample(hsteps)
@@ -173,16 +221,16 @@ class CellDensityProfile(
173
221
 
174
222
  # write sample depths to their location in the depth image
175
223
  for prof in profiles:
176
- XY = prof.sample(vsteps).astype("int")
177
- D[XY[:, 1], XY[:, 0]] = vsteps
224
+ prof_samples_as_index = prof.sample(vsteps).astype("int")
225
+ depth_arr[prof_samples_as_index[:, 1], prof_samples_as_index[:, 0]] = vsteps
178
226
 
179
227
  # fix wm region, account for rounding error
180
228
  XY = self.layer_annotation(7) * scale
181
- D[polygon(XY[:, 1] - 1, XY[:, 0])] = 1
182
- D[-1, :] = 1
229
+ depth_arr[polygon(XY[:, 1] - 1, XY[:, 0])] = 1
230
+ depth_arr[-1, :] = 1
183
231
 
184
232
  # rescale depth image to original patch size
185
- self._depth_image = resize(D, self.density_image.shape)
233
+ self._depth_image = resize(depth_arr, self.density_image.shape)
186
234
 
187
235
  return self._depth_image
188
236
 
@@ -198,7 +246,7 @@ class CellDensityProfile(
198
246
  return self._boundary_positions
199
247
 
200
248
  @property
201
- def density_image(self):
249
+ def density_image(self) -> np.ndarray:
202
250
  if self._density_image is None:
203
251
  logger.debug("Computing density image for", self._url)
204
252
  # we integrate cell counts into 2D bins
@@ -207,9 +255,7 @@ class CellDensityProfile(
207
255
  counts, xedges, yedges = np.histogram2d(
208
256
  self.cells.y,
209
257
  self.cells.x,
210
- bins=(np.array(self.layer_mask.shape) / pixel_size_micron + 0.5).astype(
211
- "int"
212
- ),
258
+ bins=np.round(np.array(self.shape) / pixel_size_micron).astype("int"),
213
259
  )
214
260
 
215
261
  # rescale the counts from count / pixel_size**2 to count / 0.1mm^3,
@@ -227,11 +273,11 @@ class CellDensityProfile(
227
273
  return self._density_image
228
274
 
229
275
  @property
230
- def cells(self):
276
+ def cells(self) -> pd.DataFrame:
231
277
  return self._cell_loader.get()
232
278
 
233
279
  @property
234
- def layers(self):
280
+ def layers(self) -> pd.DataFrame:
235
281
  return self._layer_loader.get()
236
282
 
237
283
  @property
@@ -240,6 +286,7 @@ class CellDensityProfile(
240
286
 
241
287
  @property
242
288
  def _values(self):
289
+ # TODO: release a dataset update instead of on the fly computation
243
290
  densities = []
244
291
  delta = self._step / 2.0
245
292
  for d in self._depths:
@@ -247,16 +294,5 @@ class CellDensityProfile(
247
294
  if np.sum(mask) > 0:
248
295
  densities.append(self.density_image[mask].mean())
249
296
  else:
250
- densities.append(np.NaN)
297
+ densities.append(np.nan)
251
298
  return np.asanyarray(densities)
252
-
253
- @property
254
- def key(self):
255
- assert len(self.species) == 1
256
- return create_key("{}_{}_{}_{}_{}".format(
257
- self.id,
258
- self.species[0]['name'],
259
- self.regionspec,
260
- self.section,
261
- self.patch
262
- ))
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2021
1
+ # Copyright 2018-2024
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -57,7 +57,8 @@ class CorticalProfile(tabular.Tabular, Compoundable):
57
57
  unit: str = None,
58
58
  boundary_positions: Dict[Tuple[int, int], float] = None,
59
59
  datasets: list = [],
60
- id: str = None
60
+ id: str = None,
61
+ prerelease: bool = False,
61
62
  ):
62
63
  """Initialize profile.
63
64
 
@@ -98,7 +99,8 @@ class CorticalProfile(tabular.Tabular, Compoundable):
98
99
  anchor=anchor,
99
100
  data=None, # lazy loader below
100
101
  datasets=datasets,
101
- id=id
102
+ id=id,
103
+ prerelease=prerelease,
102
104
  )
103
105
 
104
106
  def _check_sanity(self):
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2021
1
+ # Copyright 2018-2024
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2021
1
+ # Copyright 2018-2024
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2021
1
+ # Copyright 2018-2024
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,13 +16,13 @@
16
16
  from . import cortical_profile
17
17
  from .. import anchor as _anchor
18
18
  from . import tabular
19
+ from ..tabular.cell_density_profile import cell_reader, layer_reader
19
20
 
20
21
  from ... import commons
21
22
  from ...retrieval import requests
22
23
 
23
24
  import pandas as pd
24
25
  import numpy as np
25
- from io import BytesIO
26
26
 
27
27
 
28
28
  class LayerwiseCellDensity(
@@ -40,23 +40,14 @@ class LayerwiseCellDensity(
40
40
  "The cortical depth is estimated from the measured layer thicknesses."
41
41
  )
42
42
 
43
- @classmethod
44
- def CELL_READER(cls, b):
45
- return pd.read_csv(BytesIO(b[2:]), delimiter=" ", header=0).astype(
46
- {"layer": int, "label": int}
47
- )
48
-
49
- @classmethod
50
- def LAYER_READER(cls, b):
51
- return pd.read_csv(BytesIO(b[2:]), delimiter=" ", header=0, index_col=0)
52
-
53
43
  def __init__(
54
44
  self,
55
45
  segmentfiles: list,
56
46
  layerfiles: list,
57
47
  anchor: _anchor.AnatomicalAnchor,
58
48
  datasets: list = [],
59
- id: str = None
49
+ id: str = None,
50
+ prerelease: bool = False,
60
51
  ):
61
52
  tabular.Tabular.__init__(
62
53
  self,
@@ -65,7 +56,8 @@ class LayerwiseCellDensity(
65
56
  anchor=anchor,
66
57
  datasets=datasets,
67
58
  data=None, # lazy loading below
68
- id=id
59
+ id=id,
60
+ prerelease=prerelease,
69
61
  )
70
62
  self.unit = "# detected cells/0.1mm3"
71
63
  self._filepairs = list(zip(segmentfiles, layerfiles))
@@ -75,8 +67,8 @@ class LayerwiseCellDensity(
75
67
  density_dict = {}
76
68
  for i, (cellfile, layerfile) in enumerate(self._filepairs):
77
69
  try:
78
- cells = requests.HttpRequest(cellfile, func=self.CELL_READER).data
79
- layers = requests.HttpRequest(layerfile, func=self.LAYER_READER).data
70
+ cells = requests.HttpRequest(cellfile, func=cell_reader).data
71
+ layers = requests.HttpRequest(layerfile, func=layer_reader).data
80
72
  except requests.SiibraHttpRequestError as e:
81
73
  print(str(e))
82
74
  commons.logger.error(f"Skipping to bootstrap a {self.__class__.__name__} feature, cannot access file resource.")
@@ -101,12 +93,3 @@ class LayerwiseCellDensity(
101
93
  )
102
94
  self._data_cached.index.name = 'layer'
103
95
  return self._data_cached
104
-
105
- @property
106
- def key(self):
107
- assert len(self.species) == 1
108
- return commons.create_key("{}_{}_{}".format(
109
- self.dataset_id,
110
- self.species[0]['name'],
111
- self.regionspec
112
- ))
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2021
1
+ # Copyright 2018-2024
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -43,7 +43,8 @@ class ReceptorDensityFingerprint(
43
43
  tsvfile: str,
44
44
  anchor: _anchor.AnatomicalAnchor,
45
45
  datasets: list = [],
46
- id: str = None
46
+ id: str = None,
47
+ prerelease: bool = False,
47
48
  ):
48
49
  """ Generate a receptor fingerprint from a URL to a .tsv file
49
50
  formatted according to the structure used by Palomero-Gallagher et al.
@@ -55,7 +56,8 @@ class ReceptorDensityFingerprint(
55
56
  anchor=anchor,
56
57
  data=None, # lazy loading below
57
58
  datasets=datasets,
58
- id=id
59
+ id=id,
60
+ prerelease=prerelease,
59
61
  )
60
62
  self._loader = requests.HttpRequest(tsvfile)
61
63
 
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2021
1
+ # Copyright 2018-2024
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -42,7 +42,8 @@ class ReceptorDensityProfile(
42
42
  tsvfile: str,
43
43
  anchor: _anchor.AnatomicalAnchor,
44
44
  datasets: list = [],
45
- id: str = None
45
+ id: str = None,
46
+ prerelease: bool = False,
46
47
  ):
47
48
  """Generate a receptor density profile from a URL to a .tsv file
48
49
  formatted according to the structure used by Palomero-Gallagher et al.
@@ -53,7 +54,8 @@ class ReceptorDensityProfile(
53
54
  modality="Receptor density",
54
55
  anchor=anchor,
55
56
  datasets=datasets,
56
- id=id
57
+ id=id,
58
+ prerelease=prerelease
57
59
  )
58
60
  self.receptor = receptor
59
61
  self._data_cached = None
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2021
1
+ # Copyright 2018-2024
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,7 +19,7 @@ from ..feature import Compoundable
19
19
  from ...core import region as _region
20
20
  from .. import anchor as _anchor
21
21
  from ...commons import QUIET, siibra_tqdm
22
- from ...locations import pointset
22
+ from ...locations import pointcloud
23
23
  from ...retrieval.repositories import RepositoryConnector
24
24
  from ...retrieval.requests import HttpRequest
25
25
 
@@ -49,7 +49,8 @@ class RegionalTimeseriesActivity(tabular.Tabular, Compoundable):
49
49
  description: str = "",
50
50
  datasets: list = [],
51
51
  subject: str = "average",
52
- id: str = None
52
+ id: str = None,
53
+ prerelease: bool = False,
53
54
  ):
54
55
  """
55
56
  """
@@ -60,7 +61,8 @@ class RegionalTimeseriesActivity(tabular.Tabular, Compoundable):
60
61
  anchor=anchor,
61
62
  datasets=datasets,
62
63
  data=None, # lazy loading below
63
- id=id
64
+ id=id,
65
+ prerelease=prerelease,
64
66
  )
65
67
  self.cohort = cohort.upper()
66
68
  if isinstance(connector, str) and connector:
@@ -201,7 +203,7 @@ class RegionalTimeseriesActivity(tabular.Tabular, Compoundable):
201
203
  found = [r for r in region if r.name in all_centroids]
202
204
  assert len(found) > 0
203
205
  result.append(
204
- tuple(pointset.PointSet(
206
+ tuple(pointcloud.PointCloud(
205
207
  [all_centroids[r.name] for r in found], space=space
206
208
  ).centroid)
207
209
  )
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2021
1
+ # Copyright 2018-2024
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -45,7 +45,8 @@ class Tabular(feature.Feature):
45
45
  anchor: _anchor.AnatomicalAnchor,
46
46
  data: pd.DataFrame, # sample x feature dimension
47
47
  datasets: list = [],
48
- id: str = None
48
+ id: str = None,
49
+ prerelease: bool = False,
49
50
  ):
50
51
  feature.Feature.__init__(
51
52
  self,
@@ -53,7 +54,8 @@ class Tabular(feature.Feature):
53
54
  description=description,
54
55
  anchor=anchor,
55
56
  datasets=datasets,
56
- id=id
57
+ id=id,
58
+ prerelease=prerelease
57
59
  )
58
60
  self._data_cached = data
59
61
 
@@ -1,4 +1,4 @@
1
- # Copyright 2018-2021
1
+ # Copyright 2018-2024
2
2
  # Institute of Neuroscience and Medicine (INM-1), Forschungszentrum Jülich GmbH
3
3
 
4
4
  # Licensed under the Apache License, Version 2.0 (the "License");