siibra 1.0a9__py3-none-any.whl → 1.0a11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (35) hide show
  1. siibra/VERSION +1 -1
  2. siibra/commons.py +43 -26
  3. siibra/configuration/factory.py +15 -16
  4. siibra/core/atlas.py +40 -16
  5. siibra/core/region.py +241 -38
  6. siibra/features/__init__.py +19 -8
  7. siibra/features/connectivity/functional_connectivity.py +1 -1
  8. siibra/features/connectivity/regional_connectivity.py +45 -3
  9. siibra/features/feature.py +62 -12
  10. siibra/features/image/image.py +3 -1
  11. siibra/features/tabular/bigbrain_intensity_profile.py +1 -1
  12. siibra/features/tabular/cell_density_profile.py +5 -3
  13. siibra/features/tabular/cortical_profile.py +79 -15
  14. siibra/features/tabular/gene_expression.py +110 -1
  15. siibra/features/tabular/layerwise_bigbrain_intensities.py +1 -1
  16. siibra/features/tabular/layerwise_cell_density.py +3 -1
  17. siibra/features/tabular/receptor_density_fingerprint.py +3 -1
  18. siibra/features/tabular/receptor_density_profile.py +3 -5
  19. siibra/features/tabular/regional_timeseries_activity.py +59 -10
  20. siibra/features/tabular/tabular.py +4 -2
  21. siibra/livequeries/bigbrain.py +34 -0
  22. siibra/retrieval/cache.py +14 -9
  23. siibra/retrieval/requests.py +30 -1
  24. siibra/volumes/parcellationmap.py +17 -21
  25. siibra/volumes/providers/__init__.py +1 -0
  26. siibra/volumes/providers/freesurfer.py +113 -0
  27. siibra/volumes/providers/neuroglancer.py +55 -25
  28. siibra/volumes/providers/nifti.py +14 -16
  29. siibra/volumes/sparsemap.py +1 -1
  30. siibra/volumes/volume.py +13 -15
  31. {siibra-1.0a9.dist-info → siibra-1.0a11.dist-info}/METADATA +1 -1
  32. {siibra-1.0a9.dist-info → siibra-1.0a11.dist-info}/RECORD +35 -34
  33. {siibra-1.0a9.dist-info → siibra-1.0a11.dist-info}/LICENSE +0 -0
  34. {siibra-1.0a9.dist-info → siibra-1.0a11.dist-info}/WHEEL +0 -0
  35. {siibra-1.0a9.dist-info → siibra-1.0a11.dist-info}/top_level.txt +0 -0
@@ -24,7 +24,9 @@ from typing import Union, TYPE_CHECKING, List, Dict, Type, Tuple, BinaryIO, Any,
24
24
  from hashlib import md5
25
25
  from collections import defaultdict
26
26
  from zipfile import ZipFile
27
- from abc import ABC
27
+ from abc import ABC, abstractmethod
28
+ from re import sub
29
+ from textwrap import wrap
28
30
 
29
31
  if TYPE_CHECKING:
30
32
  from ..retrieval.datasets import EbrainsDataset
@@ -96,7 +98,8 @@ class Feature:
96
98
  modality: str,
97
99
  description: str,
98
100
  anchor: _anchor.AnatomicalAnchor,
99
- datasets: List['TypeDataset'] = []
101
+ datasets: List['TypeDataset'] = [],
102
+ id: str = None
100
103
  ):
101
104
  """
102
105
  Parameters
@@ -113,6 +116,7 @@ class Feature:
113
116
  self._description = description
114
117
  self._anchor_cached = anchor
115
118
  self.datasets = datasets
119
+ self._id = id
116
120
 
117
121
  @property
118
122
  def modality(self):
@@ -194,7 +198,8 @@ class Feature:
194
198
  @property
195
199
  def name(self):
196
200
  """Returns a short human-readable name of this feature."""
197
- return f"{self.__class__.__name__} ({self.modality}) anchored at {self.anchor}"
201
+ readable_class_name = sub("([a-z])([A-Z])", r"\g<1> \g<2>", self.__class__.__name__)
202
+ return sub("([b,B]ig [b,B]rain)", "BigBrain", readable_class_name)
198
203
 
199
204
  @classmethod
200
205
  def _get_instances(cls, **kwargs) -> List['Feature']:
@@ -260,12 +265,17 @@ class Feature:
260
265
 
261
266
  @property
262
267
  def id(self):
268
+ if self._id:
269
+ return self._id
270
+
263
271
  prefix = ''
264
272
  for ds in self.datasets:
265
273
  if hasattr(ds, "id"):
266
274
  prefix = ds.id + '--'
267
275
  break
268
- return prefix + md5(self.name.encode("utf-8")).hexdigest()
276
+ return prefix + md5(
277
+ f"{self.name} - {self.anchor}".encode("utf-8")
278
+ ).hexdigest()
269
279
 
270
280
  def _to_zip(self, fh: ZipFile):
271
281
  """
@@ -607,6 +617,7 @@ class Feature:
607
617
  def __init__(self, inst: Feature, fid: str):
608
618
  self.inst = inst
609
619
  self.fid = fid
620
+ self.category = inst.category
610
621
 
611
622
  def __str__(self) -> str:
612
623
  return self.inst.__str__()
@@ -677,6 +688,24 @@ class Compoundable(ABC):
677
688
  def _merge_anchors(cls, anchors: List[_anchor.AnatomicalAnchor]):
678
689
  return sum(anchors)
679
690
 
691
+ @classmethod
692
+ @abstractmethod
693
+ def _merge_elements(
694
+ cls,
695
+ elements,
696
+ description: str,
697
+ modality: str,
698
+ anchor: _anchor.AnatomicalAnchor
699
+ ) -> Feature:
700
+ """
701
+ Compute the merge data and create a merged instance from a set of
702
+ elements of this class. This will be used by CompoundFeature to
703
+ create the aggegated data and plot it. For example, to compute an
704
+ average connectivity matrix from a set of subfeatures, we create a
705
+ RegionalConnectivty feature.
706
+ """
707
+ raise NotImplementedError
708
+
680
709
 
681
710
  class CompoundFeature(Feature):
682
711
  """
@@ -725,6 +754,7 @@ class CompoundFeature(Feature):
725
754
  datasets=list(dict.fromkeys([ds for f in elements for ds in f.datasets]))
726
755
  )
727
756
  self._queryconcept = queryconcept
757
+ self._merged_feature_cached = None
728
758
 
729
759
  def __getattr__(self, attr: str) -> Any:
730
760
  """Expose compounding attributes explicitly."""
@@ -743,9 +773,27 @@ class CompoundFeature(Feature):
743
773
  return super().__dir__() + list(self._compounding_attributes.keys())
744
774
 
745
775
  def plot(self, *args, **kwargs):
746
- raise NotImplementedError(
747
- "CompoundFeatures does not have a standardized plot. Try plotting the elements instead."
776
+ kwargs["title"] = "(Derived data: averaged)\n" + kwargs.get(
777
+ "title",
778
+ "\n".join(wrap(self.name, kwargs.pop("textwrap", 40)))
748
779
  )
780
+ return self._get_merged_feature().plot(*args, **kwargs)
781
+
782
+ def _get_merged_feature(self) -> Feature:
783
+ if self._merged_feature_cached is None:
784
+ logger.info(f"{self.__class__.__name__}.data averages the data of each element.")
785
+ assert issubclass(self.feature_type, Compoundable)
786
+ self._merged_feature_cached = self.feature_type._merge_elements(
787
+ elements=self.elements,
788
+ modality=self.modality,
789
+ description=self.description,
790
+ anchor=self.anchor
791
+ )
792
+ return self._merged_feature_cached
793
+
794
+ @property
795
+ def data(self):
796
+ return self._get_merged_feature().data
749
797
 
750
798
  @property
751
799
  def indexing_attributes(self) -> Tuple[str]:
@@ -770,14 +818,16 @@ class CompoundFeature(Feature):
770
818
  @property
771
819
  def name(self) -> str:
772
820
  """Returns a short human-readable name of this feature."""
821
+ readable_feature_type = sub(
822
+ "([b,B]ig [b,B]rain)", "BigBrain",
823
+ sub("([a-z])([A-Z])", r"\g<1> \g<2>", self.feature_type.__name__)
824
+ )
773
825
  groupby = ', '.join([
774
- f"{v} {k}" for k, v in self._compounding_attributes.items()
826
+ f"{k}: {v}"
827
+ for k, v in self._compounding_attributes.items()
828
+ if k != 'modality'
775
829
  ])
776
- return (
777
- f"{self.__class__.__name__} of {len(self)} "
778
- f"{self.feature_type.__name__} features grouped by ({groupby})"
779
- f" anchored at {self.anchor}"
780
- )
830
+ return f"{len(self)} {readable_feature_type} features{f' {groupby}' if groupby else ''}"
781
831
 
782
832
  @property
783
833
  def id(self) -> str:
@@ -63,13 +63,15 @@ class Image(feature.Feature, _volume.Volume):
63
63
  providers: List[provider.VolumeProvider],
64
64
  region: str = None,
65
65
  datasets: List = [],
66
+ id: str = None
66
67
  ):
67
68
  feature.Feature.__init__(
68
69
  self,
69
70
  modality=modality,
70
71
  description=None, # lazy implementation below!
71
72
  anchor=None, # lazy implementation below!
72
- datasets=datasets
73
+ datasets=datasets,
74
+ id=id
73
75
  )
74
76
 
75
77
  _volume.Volume.__init__(
@@ -30,7 +30,7 @@ class BigBrainIntensityProfile(
30
30
  "as described in the publication 'Wagstyl, K., et al (2020). BigBrain 3D atlas of "
31
31
  "cortical layers: Cortical and laminar thickness gradients diverge in sensory and "
32
32
  "motor cortices. PLoS Biology, 18(4), e3000678. "
33
- "http://dx.doi.org/10.1371/journal.pbio.3000678'."
33
+ "http://dx.doi.org/10.1371/journal.pbio.3000678."
34
34
  "The data is taken from the tutorial at "
35
35
  "https://github.com/kwagstyl/cortical_layers_tutorial. Each vertex is "
36
36
  "assigned to the regional map when queried."
@@ -69,7 +69,8 @@ class CellDensityProfile(
69
69
  patch: int,
70
70
  url: str,
71
71
  anchor: _anchor.AnatomicalAnchor,
72
- datasets: list = []
72
+ datasets: list = [],
73
+ id: str = None
73
74
  ):
74
75
  """
75
76
  Generate a cell density profile from a URL to a cloud folder
@@ -82,6 +83,7 @@ class CellDensityProfile(
82
83
  unit="cells / 0.1mm3",
83
84
  anchor=anchor,
84
85
  datasets=datasets,
86
+ id=id
85
87
  )
86
88
  self._step = 0.01
87
89
  self._url = url
@@ -234,7 +236,7 @@ class CellDensityProfile(
234
236
 
235
237
  @property
236
238
  def _depths(self):
237
- return [d + self._step / 2 for d in np.arange(0, 1, self._step)]
239
+ return np.arange(0, 1, self._step) + self._step / 2
238
240
 
239
241
  @property
240
242
  def _values(self):
@@ -246,7 +248,7 @@ class CellDensityProfile(
246
248
  densities.append(self.density_image[mask].mean())
247
249
  else:
248
250
  densities.append(np.NaN)
249
- return densities
251
+ return np.asanyarray(densities)
250
252
 
251
253
  @property
252
254
  def key(self):
@@ -19,7 +19,7 @@ from ..feature import Compoundable
19
19
  from .. import anchor as _anchor
20
20
 
21
21
  import pandas as pd
22
- from typing import Union, Dict, Tuple
22
+ from typing import Union, Dict, Tuple, List
23
23
  from textwrap import wrap
24
24
  import numpy as np
25
25
 
@@ -56,7 +56,8 @@ class CorticalProfile(tabular.Tabular, Compoundable):
56
56
  values: Union[list, np.ndarray] = None,
57
57
  unit: str = None,
58
58
  boundary_positions: Dict[Tuple[int, int], float] = None,
59
- datasets: list = []
59
+ datasets: list = [],
60
+ id: str = None
60
61
  ):
61
62
  """Initialize profile.
62
63
 
@@ -96,21 +97,22 @@ class CorticalProfile(tabular.Tabular, Compoundable):
96
97
  description=description,
97
98
  anchor=anchor,
98
99
  data=None, # lazy loader below
99
- datasets=datasets
100
+ datasets=datasets,
101
+ id=id
100
102
  )
101
103
 
102
104
  def _check_sanity(self):
103
105
  # check plausibility of the profile
104
- assert isinstance(self._depths, (list, np.ndarray))
105
- assert isinstance(self._values, (list, np.ndarray))
106
- assert len(self._values) == len(self._depths)
107
- assert all(0 <= d <= 1 for d in self._depths)
106
+ assert isinstance(self._depths, (list, np.ndarray)), "Some depths are not valid"
107
+ assert isinstance(self._values, (list, np.ndarray)), "Some values are not valid"
108
+ assert len(self._values) == len(self._depths), "There exist uneven number of depths and values"
109
+ assert all(0 <= d <= 1 for d in self._depths), "Some depths is not between 0 and 1"
108
110
  if self.boundaries_mapped:
109
- assert all(0 <= d <= 1 for d in self.boundary_positions.values())
111
+ assert all(0 <= d <= 1 for d in self.boundary_positions.values()), "Some boundary positions are not between 0 and 1"
110
112
  assert all(
111
113
  layerpair in self.BOUNDARIES
112
114
  for layerpair in self.boundary_positions.keys()
113
- )
115
+ ), "Some value in BOUNDARIES are not mapped in boundary_positions"
114
116
 
115
117
  @property
116
118
  def unit(self) -> str:
@@ -159,8 +161,32 @@ class CorticalProfile(tabular.Tabular, Compoundable):
159
161
  def data(self):
160
162
  """Return a pandas Series representing the profile."""
161
163
  self._check_sanity()
162
- return pd.DataFrame(
163
- self._values, index=self._depths, columns=[f"{self.modality} ({self.unit})"]
164
+ iscompound = len(self._values.shape) > 1 and self._values.shape[1] == 2
165
+ if iscompound:
166
+ columns = [f"{self.modality} mean ({self.unit})", "std"]
167
+ else:
168
+ columns = [f"{self.modality} ({self.unit})"]
169
+ return pd.DataFrame(self._values, index=self._depths, columns=columns)
170
+
171
+ @classmethod
172
+ def _merge_elements(
173
+ cls,
174
+ elements: List["CorticalProfile"],
175
+ description: str,
176
+ modality: str,
177
+ anchor: _anchor.AnatomicalAnchor,
178
+ ):
179
+ assert all(np.array_equal(elements[0]._depths, f._depths) for f in elements)
180
+ assert len({f.unit for f in elements}) == 1
181
+ values_stacked = np.stack([f._values for f in elements])
182
+ return CorticalProfile(
183
+ description=description,
184
+ modality=modality,
185
+ anchor=anchor,
186
+ depths=np.stack([f._depths for f in elements]).mean(0),
187
+ values=np.stack([values_stacked.mean(0), values_stacked.std(0)]).T,
188
+ unit=elements[0].unit,
189
+ boundary_positions=None,
164
190
  )
165
191
 
166
192
  def plot(self, *args, backend="matplotlib", **kwargs):
@@ -180,12 +206,17 @@ class CorticalProfile(tabular.Tabular, Compoundable):
180
206
  kwargs["title"] = kwargs.get("title", "\n".join(wrap(self.name, wrapwidth)))
181
207
  layercolor = kwargs.pop("layercolor", "gray")
182
208
 
209
+ iscompound = len(self._values.shape) > 1 and self._values.shape[1] == 2
210
+ ymax = max(
211
+ 0,
212
+ sum(self._values.max(axis=0)) if iscompound else self._values.max()
213
+ )
183
214
  if backend == "matplotlib":
184
215
  kwargs["xlabel"] = kwargs.get("xlabel", "Cortical depth")
185
216
  kwargs["ylabel"] = kwargs.get("ylabel", self.unit)
186
217
  kwargs["grid"] = kwargs.get("grid", True)
187
- kwargs["ylim"] = kwargs.get("ylim", (0, max(self._values)))
188
- axs = self.data.plot(*args, **kwargs, backend=backend)
218
+ axs = self.data.iloc[:, 0].plot(*args, **kwargs, backend=backend)
219
+ axs.set_ylim(kwargs.get("ylim", (0, ymax)))
189
220
 
190
221
  if self.boundaries_mapped:
191
222
  bvals = list(self.boundary_positions.values())
@@ -201,14 +232,22 @@ class CorticalProfile(tabular.Tabular, Compoundable):
201
232
  axs.axvspan(d1, d2, color=layercolor, alpha=0.3)
202
233
 
203
234
  axs.set_title(axs.get_title(), fontsize="medium")
235
+
236
+ if iscompound:
237
+ axs.set_ylabel(f"average {kwargs['ylabel']} \u00b1 std")
238
+ av = self.data.values[:, 0]
239
+ std = self.data.values[:, 1]
240
+ axs.fill_between(self.data.index.values, av - std, av + std, alpha=0.5)
241
+
204
242
  return axs
243
+
205
244
  elif backend == "plotly":
206
245
  kwargs["title"] = kwargs["title"].replace("\n", "<br>")
207
246
  kwargs["labels"] = {
208
247
  "index": kwargs.pop("xlabel", None) or kwargs.pop("index", "Cortical depth"),
209
248
  "value": kwargs.pop("ylabel", None) or kwargs.pop("value", self.unit)
210
249
  }
211
- fig = self.data.plot(*args, **kwargs, backend=backend)
250
+ fig = self.data.iloc[:, 0].plot(*args, **kwargs, backend=backend)
212
251
  if self.boundaries_mapped:
213
252
  bvals = list(self.boundary_positions.values())
214
253
  for i, (d1, d2) in enumerate(list(zip(bvals[:-1], bvals[1:]))):
@@ -219,12 +258,29 @@ class CorticalProfile(tabular.Tabular, Compoundable):
219
258
  )
220
259
  fig.update_layout(
221
260
  showlegend=False,
222
- yaxis_range=(0, max(self._values)),
261
+ yaxis_range=(0, ymax),
223
262
  title=dict(
224
263
  automargin=True, yref="container", xref="container",
225
264
  pad=dict(t=40), xanchor="left", yanchor="top"
226
265
  )
227
266
  )
267
+ if iscompound:
268
+ from plotly.graph_objects import Scatter
269
+ x = self.data.index.values
270
+ av = self.data.values[:, 0]
271
+ std = self.data.values[:, 1]
272
+ fig.update_layout(yaxis_title=f"average {kwargs['labels']['value']} &plusmn; std")
273
+ fig.add_traces(
274
+ Scatter(
275
+ x=np.concatenate((x, x[::-1])), # x, then x reversed
276
+ y=np.concatenate((av + std, (av - std)[::-1])), # upper, then lower reversed
277
+ fill='toself',
278
+ fillcolor='rgba(0,100,80,0.5)',
279
+ line=dict(color='rgba(255,255,255,0)'),
280
+ hoverinfo="skip",
281
+ showlegend=False
282
+ )
283
+ )
228
284
  return fig
229
285
  else:
230
286
  return self.data.plot(*args, **kwargs, backend=backend)
@@ -254,3 +310,11 @@ class CorticalProfile(tabular.Tabular, Compoundable):
254
310
  f"'_values' not available for {self.__class__.__name__}."
255
311
  )
256
312
  return self._values_cached
313
+
314
+ @property
315
+ def name(self):
316
+ if hasattr(self, "receptor"):
317
+ return super().name + f": {self.receptor}"
318
+ if hasattr(self, "location"):
319
+ return super().name + f": {self.location.coordinate}"
320
+ return super().name
@@ -15,6 +15,7 @@
15
15
 
16
16
  from .. import anchor as _anchor
17
17
  from . import tabular
18
+ from ...retrieval.datasets import GenericDataset
18
19
 
19
20
  import pandas as pd
20
21
  from textwrap import wrap
@@ -46,6 +47,114 @@ class GeneExpressions(
46
47
  as specified at https://alleninstitute.org/legal/terms-use/.
47
48
  """
48
49
 
50
+ DATASET = GenericDataset(
51
+ name="An anatomically comprehensive atlas of the adult human brain transcriptome",
52
+ contributors=[
53
+ 'Michael J. Hawrylycz',
54
+ 'Ed S. Lein',
55
+ 'Angela L. Guillozet-Bongaarts',
56
+ 'Elaine H. Shen',
57
+ 'Lydia Ng',
58
+ 'Jeremy A. Miller',
59
+ 'Louie N. van de Lagemaat',
60
+ 'Kimberly A. Smith',
61
+ 'Amanda Ebbert',
62
+ 'Zackery L. Riley',
63
+ 'Chris Abajian',
64
+ 'Christian F. Beckmann',
65
+ 'Amy Bernard',
66
+ 'Darren Bertagnolli',
67
+ 'Andrew F. Boe',
68
+ 'Preston M. Cartagena',
69
+ 'M. Mallar Chakravarty',
70
+ 'Mike Chapin',
71
+ 'Jimmy Chong',
72
+ 'Rachel A. Dalley',
73
+ 'Barry David Daly',
74
+ 'Chinh Dang',
75
+ 'Suvro Datta',
76
+ 'Nick Dee',
77
+ 'Tim A. Dolbeare',
78
+ 'Vance Faber',
79
+ 'David Feng',
80
+ 'David R. Fowler',
81
+ 'Jeff Goldy',
82
+ 'Benjamin W. Gregor',
83
+ 'Zeb Haradon',
84
+ 'David R. Haynor',
85
+ 'John G. Hohmann',
86
+ 'Steve Horvath',
87
+ 'Robert E. Howard',
88
+ 'Andreas Jeromin',
89
+ 'Jayson M. Jochim',
90
+ 'Marty Kinnunen',
91
+ 'Christopher Lau',
92
+ 'Evan T. Lazarz',
93
+ 'Changkyu Lee',
94
+ 'Tracy A. Lemon',
95
+ 'Ling Li',
96
+ 'Yang Li',
97
+ 'John A. Morris',
98
+ 'Caroline C. Overly',
99
+ 'Patrick D. Parker',
100
+ 'Sheana E. Parry',
101
+ 'Melissa Reding',
102
+ 'Joshua J. Royall',
103
+ 'Jay Schulkin',
104
+ 'Pedro Adolfo Sequeira',
105
+ 'Clifford R. Slaughterbeck',
106
+ 'Simon C. Smith',
107
+ 'Andy J. Sodt',
108
+ 'Susan M. Sunkin',
109
+ 'Beryl E. Swanson',
110
+ 'Marquis P. Vawter',
111
+ 'Derric Williams',
112
+ 'Paul Wohnoutka',
113
+ 'H. Ronald Zielke',
114
+ 'Daniel H. Geschwind',
115
+ 'Patrick R. Hof',
116
+ 'Stephen M. Smith',
117
+ 'Christof Koch',
118
+ 'Seth G. N. Grant',
119
+ 'Allan R. Jones'
120
+ ],
121
+ url="https://doi.org/10.1038%2Fnature11405",
122
+ description='Neuroanatomically precise, genome-wide maps of transcript '
123
+ 'distributions are critical resources to complement genomic '
124
+ 'sequence data and to correlate functional and genetic brain '
125
+ 'architecture. Here we describe the generation and analysis '
126
+ 'of a transcriptional atlas of the adult human brain, '
127
+ 'comprising extensive histological analysis and comprehensive '
128
+ 'microarray profiling of ~900 neuroanatomically precise '
129
+ 'subdivisions in two individuals. Transcriptional regulation '
130
+ 'varies enormously by anatomical location, with different '
131
+ 'regions and their constituent cell types displaying robust '
132
+ 'molecular signatures that are highly conserved between '
133
+ 'individuals. Analysis of differential gene expression and '
134
+ 'gene co-expression relationships demonstrates that brain-'
135
+ 'wide variation strongly reflects the distributions of major '
136
+ 'cell classes such as neurons, oligodendrocytes, astrocytes '
137
+ 'and microglia. Local neighbourhood relationships between '
138
+ 'fine anatomical subdivisions are associated with discrete '
139
+ 'neuronal subtypes and genes involved with synaptic '
140
+ 'transmission. The neocortex displays a relatively '
141
+ 'homogeneous transcriptional pattern, but with distinct '
142
+ 'features associated selectively with primary sensorimotor '
143
+ 'cortices and with enriched frontal lobe expression. Notably, '
144
+ 'the spatial topography of the neocortex is strongly '
145
+ 'reflected in its molecular topography— the closer two '
146
+ 'cortical regions, the more similar their transcriptomes. '
147
+ 'This freely accessible online data resource forms a high-'
148
+ 'resolution transcriptional baseline for neurogenetic studies '
149
+ 'of normal and abnormal human brain function.'
150
+ ""
151
+ "For retrieving microarray data, siibra connects to the web API of "
152
+ "the Allen Brain Atlas (© 2015 Allen Institute for Brain Science), "
153
+ "available from https://brain-map.org/api/index.html. Any use of the "
154
+ "microarray data needs to be in accordance with their terms of use, "
155
+ "as specified at https://alleninstitute.org/legal/terms-use/."
156
+ )
157
+
49
158
  class _DonorDict(TypedDict):
50
159
  id: int
51
160
  name: str
@@ -66,7 +175,7 @@ class GeneExpressions(
66
175
  genes: List[str],
67
176
  additional_columns: dict,
68
177
  anchor: _anchor.AnatomicalAnchor,
69
- datasets: List = []
178
+ datasets: List = [DATASET]
70
179
  ):
71
180
  """
72
181
  Construct gene expression table.
@@ -35,7 +35,7 @@ class LayerwiseBigBrainIntensities(
35
35
  "'Wagstyl, K., et al (2020). BigBrain 3D atlas of "
36
36
  "cortical layers: Cortical and laminar thickness gradients diverge in sensory and "
37
37
  "motor cortices. PLoS Biology, 18(4), e3000678. "
38
- "http://dx.doi.org/10.1371/journal.pbio.3000678'."
38
+ "http://dx.doi.org/10.1371/journal.pbio.3000678."
39
39
  "The data is taken from the tutorial at "
40
40
  "https://github.com/kwagstyl/cortical_layers_tutorial. Each vertex is "
41
41
  "assigned to the regional map when queried."
@@ -56,6 +56,7 @@ class LayerwiseCellDensity(
56
56
  layerfiles: list,
57
57
  anchor: _anchor.AnatomicalAnchor,
58
58
  datasets: list = [],
59
+ id: str = None
59
60
  ):
60
61
  tabular.Tabular.__init__(
61
62
  self,
@@ -63,7 +64,8 @@ class LayerwiseCellDensity(
63
64
  modality="Cell body density",
64
65
  anchor=anchor,
65
66
  datasets=datasets,
66
- data=None # lazy loading below
67
+ data=None, # lazy loading below
68
+ id=id
67
69
  )
68
70
  self.unit = "# detected cells/0.1mm3"
69
71
  self._filepairs = list(zip(segmentfiles, layerfiles))
@@ -42,7 +42,8 @@ class ReceptorDensityFingerprint(
42
42
  self,
43
43
  tsvfile: str,
44
44
  anchor: _anchor.AnatomicalAnchor,
45
- datasets: list = []
45
+ datasets: list = [],
46
+ id: str = None
46
47
  ):
47
48
  """ Generate a receptor fingerprint from a URL to a .tsv file
48
49
  formatted according to the structure used by Palomero-Gallagher et al.
@@ -54,6 +55,7 @@ class ReceptorDensityFingerprint(
54
55
  anchor=anchor,
55
56
  data=None, # lazy loading below
56
57
  datasets=datasets,
58
+ id=id
57
59
  )
58
60
  self._loader = requests.HttpRequest(tsvfile)
59
61
 
@@ -41,7 +41,8 @@ class ReceptorDensityProfile(
41
41
  receptor: str,
42
42
  tsvfile: str,
43
43
  anchor: _anchor.AnatomicalAnchor,
44
- datasets: list = []
44
+ datasets: list = [],
45
+ id: str = None
45
46
  ):
46
47
  """Generate a receptor density profile from a URL to a .tsv file
47
48
  formatted according to the structure used by Palomero-Gallagher et al.
@@ -52,6 +53,7 @@ class ReceptorDensityProfile(
52
53
  modality="Receptor density",
53
54
  anchor=anchor,
54
55
  datasets=datasets,
56
+ id=id
55
57
  )
56
58
  self.receptor = receptor
57
59
  self._data_cached = None
@@ -72,10 +74,6 @@ class ReceptorDensityProfile(
72
74
  def receptor_fullname(self):
73
75
  return vocabularies.RECEPTOR_SYMBOLS[self.receptor]['receptor']['name']
74
76
 
75
- @property
76
- def name(self):
77
- return super().name + f" for {self.receptor}"
78
-
79
77
  @property
80
78
  def neurotransmitter(self):
81
79
  return "{} ({})".format(