siibra 1.0.1a1__py3-none-any.whl → 1.0.1a5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siibra might be problematic. Click here for more details.

Files changed (67) hide show
  1. siibra/VERSION +1 -1
  2. siibra/__init__.py +7 -16
  3. siibra/commons.py +19 -8
  4. siibra/configuration/configuration.py +5 -6
  5. siibra/configuration/factory.py +13 -8
  6. siibra/core/__init__.py +1 -1
  7. siibra/core/assignment.py +19 -7
  8. siibra/core/atlas.py +3 -3
  9. siibra/core/concept.py +4 -2
  10. siibra/core/parcellation.py +5 -5
  11. siibra/core/region.py +24 -25
  12. siibra/core/space.py +4 -6
  13. siibra/core/structure.py +2 -2
  14. siibra/explorer/url.py +2 -2
  15. siibra/features/anchor.py +3 -7
  16. siibra/features/connectivity/regional_connectivity.py +51 -40
  17. siibra/features/dataset/ebrains.py +1 -1
  18. siibra/features/feature.py +29 -20
  19. siibra/features/image/__init__.py +6 -3
  20. siibra/features/image/image.py +2 -4
  21. siibra/features/image/sections.py +81 -2
  22. siibra/features/image/volume_of_interest.py +8 -7
  23. siibra/features/tabular/__init__.py +1 -1
  24. siibra/features/tabular/bigbrain_intensity_profile.py +2 -1
  25. siibra/features/tabular/cell_density_profile.py +8 -9
  26. siibra/features/tabular/cortical_profile.py +6 -6
  27. siibra/features/tabular/gene_expression.py +34 -16
  28. siibra/features/tabular/layerwise_bigbrain_intensities.py +4 -3
  29. siibra/features/tabular/layerwise_cell_density.py +83 -24
  30. siibra/features/tabular/receptor_density_fingerprint.py +34 -9
  31. siibra/features/tabular/receptor_density_profile.py +1 -2
  32. siibra/features/tabular/regional_timeseries_activity.py +7 -7
  33. siibra/features/tabular/tabular.py +14 -7
  34. siibra/livequeries/allen.py +23 -22
  35. siibra/livequeries/bigbrain.py +239 -51
  36. siibra/livequeries/ebrains.py +13 -10
  37. siibra/livequeries/query.py +3 -3
  38. siibra/locations/__init__.py +17 -8
  39. siibra/locations/boundingbox.py +10 -8
  40. siibra/{experimental/plane3d.py → locations/experimental.py} +113 -13
  41. siibra/locations/location.py +17 -13
  42. siibra/locations/point.py +14 -19
  43. siibra/locations/pointcloud.py +57 -12
  44. siibra/retrieval/cache.py +1 -0
  45. siibra/retrieval/datasets.py +19 -13
  46. siibra/retrieval/repositories.py +10 -11
  47. siibra/retrieval/requests.py +26 -24
  48. siibra/vocabularies/__init__.py +1 -2
  49. siibra/volumes/__init__.py +4 -3
  50. siibra/volumes/parcellationmap.py +33 -17
  51. siibra/volumes/providers/freesurfer.py +4 -4
  52. siibra/volumes/providers/gifti.py +4 -4
  53. siibra/volumes/providers/neuroglancer.py +19 -22
  54. siibra/volumes/providers/nifti.py +6 -6
  55. siibra/volumes/providers/provider.py +3 -2
  56. siibra/volumes/sparsemap.py +19 -26
  57. siibra/volumes/volume.py +21 -28
  58. {siibra-1.0.1a1.dist-info → siibra-1.0.1a5.dist-info}/METADATA +37 -17
  59. siibra-1.0.1a5.dist-info/RECORD +80 -0
  60. {siibra-1.0.1a1.dist-info → siibra-1.0.1a5.dist-info}/WHEEL +1 -1
  61. siibra/experimental/__init__.py +0 -19
  62. siibra/experimental/contour.py +0 -61
  63. siibra/experimental/cortical_profile_sampler.py +0 -57
  64. siibra/experimental/patch.py +0 -98
  65. siibra-1.0.1a1.dist-info/RECORD +0 -84
  66. {siibra-1.0.1a1.dist-info → siibra-1.0.1a5.dist-info/licenses}/LICENSE +0 -0
  67. {siibra-1.0.1a1.dist-info → siibra-1.0.1a5.dist-info}/top_level.txt +0 -0
@@ -14,11 +14,18 @@
14
14
  # limitations under the License.
15
15
 
16
16
  from zipfile import ZipFile
17
- from ..feature import Feature, Compoundable
18
- from ..tabular.tabular import Tabular
17
+ from typing import Callable, Union, List, Tuple, Iterator
18
+ try:
19
+ from typing import Literal
20
+ except ImportError: # support python 3.7
21
+ from typing_extensions import Literal
19
22
 
20
- from .. import anchor as _anchor
23
+ import numpy as np
24
+ import pandas as pd
21
25
 
26
+ from .. import anchor as _anchor
27
+ from ..feature import Feature, Compoundable
28
+ from ..tabular.tabular import Tabular
22
29
  from ...commons import logger, QUIET, siibra_tqdm
23
30
  from ...core import region as _region
24
31
  from ...locations import pointcloud
@@ -26,16 +33,6 @@ from ...retrieval.repositories import RepositoryConnector
26
33
  from ...retrieval.requests import HttpRequest
27
34
 
28
35
 
29
- import pandas as pd
30
- import numpy as np
31
- from typing import Callable, Union, List, Tuple, Iterator
32
-
33
- try:
34
- from typing import Literal
35
- except ImportError: # support python 3.7
36
- from typing_extensions import Literal
37
-
38
-
39
36
  class RegionalConnectivity(Feature, Compoundable):
40
37
  """
41
38
  Parcellation-averaged connectivity, providing one or more matrices of a
@@ -107,6 +104,7 @@ class RegionalConnectivity(Feature, Compoundable):
107
104
  self._matrix = None
108
105
  self._subject = subject
109
106
  self._feature = feature
107
+ self._matrix_std = None # only used for compound feature
110
108
 
111
109
  @property
112
110
  def subject(self):
@@ -173,6 +171,9 @@ class RegionalConnectivity(Feature, Compoundable):
173
171
  merged._matrix = elements[0]._arraylike_to_dataframe(
174
172
  np.stack(all_arrays).mean(0)
175
173
  )
174
+ merged._matrix_std = elements[0]._arraylike_to_dataframe(
175
+ np.stack(all_arrays).std(0)
176
+ )
176
177
  return merged
177
178
 
178
179
  def _plot_matrix(
@@ -261,10 +262,14 @@ class RegionalConnectivity(Feature, Compoundable):
261
262
  non-symmetric matrices. ('column' or 'row')
262
263
  """
263
264
  matrix = self.data
265
+ assert isinstance(matrix, pd.DataFrame)
266
+ matrix_std = self._matrix_std
264
267
  if direction.lower() not in ['column', 'row']:
265
268
  raise ValueError("Direction can only be 'column' or 'row'")
266
269
  if direction.lower() == 'row':
267
270
  matrix = matrix.transpose()
271
+ if matrix_std is not None:
272
+ matrix_std = matrix_std.transpose()
268
273
 
269
274
  def matches(r1, r2):
270
275
  if isinstance(r1, tuple):
@@ -273,32 +278,38 @@ class RegionalConnectivity(Feature, Compoundable):
273
278
  assert isinstance(r1, _region.Region)
274
279
  return r1.matches(r2)
275
280
 
276
- regions = [r for r in matrix.index if matches(r, region)]
277
- if len(regions) == 0:
281
+ # decode region spec
282
+ region_candidates = [r for r in matrix.index if matches(r, region)]
283
+ if len(region_candidates) == 0:
278
284
  raise ValueError(f"Invalid region specification: {region}")
279
- elif len(regions) > 1:
280
- raise ValueError(f"Region specification {region} matched more than one profile: {regions}")
281
- else:
282
- name = self.modality
283
- series = matrix[regions[0]]
284
- last_index = len(series) - 1 if max_rows is None \
285
- else min(max_rows, len(series) - 1)
286
- return Tabular(
287
- description=self.description,
288
- modality=f"{self.modality} {self.cohort}",
289
- anchor=_anchor.AnatomicalAnchor(
290
- species=list(self.anchor.species)[0],
291
- region=regions[0]
292
- ),
293
- data=(
294
- series[:last_index]
295
- .to_frame(name=name)
296
- .query(f'`{name}` > {min_connectivity}')
297
- .sort_values(by=name, ascending=False)
298
- .rename_axis('Target regions')
299
- ),
300
- datasets=self.datasets
301
- )
285
+ if len(region_candidates) > 1:
286
+ raise ValueError(f"Region specification {region} matched more than one profile: {region_candidates}")
287
+ region = region_candidates[0]
288
+
289
+ # create DataFrame
290
+ data = matrix[region].to_frame('mean')
291
+ if matrix_std is not None:
292
+ data = pd.concat([data, matrix_std[region].rename('std')], axis=1)
293
+
294
+ last_index = len(data) if max_rows is None else min(max_rows, len(data))
295
+
296
+ data = (
297
+ data
298
+ .query(f'`mean` > {min_connectivity}')
299
+ .sort_values(by="mean", ascending=False)
300
+ .rename_axis('Target regions')
301
+ )[:last_index]
302
+
303
+ return Tabular(
304
+ description=self.description,
305
+ modality=f"{self.modality} {self.cohort}",
306
+ anchor=_anchor.AnatomicalAnchor(
307
+ species=list(self.anchor.species)[0],
308
+ region=region
309
+ ),
310
+ data=data,
311
+ datasets=self.datasets
312
+ )
302
313
 
303
314
  def plot(
304
315
  self,
@@ -339,8 +350,8 @@ class RegionalConnectivity(Feature, Compoundable):
339
350
  profile = self.get_profile(regions, min_connectivity, max_rows, direction)
340
351
  kwargs["kind"] = kwargs.get("kind", "barh")
341
352
  if backend == "matplotlib":
342
- kwargs["logx"] = kwargs.get("logx", logscale)
343
- return profile.data.plot(*args, backend=backend, **kwargs)
353
+ kwargs["logy"] = kwargs.get("logy", logscale)
354
+ return profile.plot(*args, backend=backend, **kwargs)
344
355
  elif backend == "plotly":
345
356
  kwargs.update({
346
357
  "color": kwargs.get("color", profile.data.columns[0]),
@@ -15,9 +15,9 @@
15
15
  """Non-preconfigured data features hosted at EBRAINS."""
16
16
 
17
17
  from zipfile import ZipFile
18
+
18
19
  from .. import anchor as _anchor
19
20
  from .. import feature
20
-
21
21
  from ...retrieval import datasets
22
22
 
23
23
 
@@ -14,12 +14,6 @@
14
14
  # limitations under the License.
15
15
  """Handles multimodal data features and related queries."""
16
16
 
17
- from . import anchor as _anchor
18
-
19
- from ..commons import logger, InstanceTable, siibra_tqdm, __version__
20
- from ..core import concept, space, region, parcellation, structure
21
- from ..volumes import volume
22
-
23
17
  from typing import Union, TYPE_CHECKING, List, Dict, Type, Tuple, BinaryIO, Any, Iterator
24
18
  from hashlib import md5
25
19
  from collections import defaultdict
@@ -28,6 +22,11 @@ from abc import ABC, abstractmethod
28
22
  from re import sub
29
23
  from textwrap import wrap
30
24
 
25
+ from . import anchor as _anchor
26
+ from ..commons import logger, InstanceTable, siibra_tqdm, __version__
27
+ from ..core import concept, space, region, parcellation, structure
28
+ from ..volumes import volume
29
+
31
30
  if TYPE_CHECKING:
32
31
  from ..retrieval.datasets import EbrainsDataset
33
32
  TypeDataset = EbrainsDataset
@@ -130,23 +129,22 @@ class Feature:
130
129
  # allows subclasses to implement lazy loading of an anchor
131
130
  return self._anchor_cached
132
131
 
133
- def __init_subclass__(cls, configuration_folder=None, category=None, do_not_index=False, **kwargs):
132
+ def __init_subclass__(cls, configuration_folder=None, category=None, **kwargs):
134
133
 
135
134
  # Feature.SUBCLASSES serves as an index where feature class inheritance is cached. When users
136
135
  # queries a branch on the hierarchy, all children will also be queried. There are usecases where
137
136
  # such behavior is not desired (e.g. ProxyFeature, which wraps livequery features id to capture the
138
137
  # query context).
139
- # do_not_index flag allow the default index behavior to be toggled off.
140
-
141
- if do_not_index is False:
138
+ if "ProxyFeature" in cls.__name__:
139
+ return
142
140
 
143
- # extend the subclass lists
144
- # Iterate over all mro, not just immediate base classes
145
- for BaseCls in cls.__mro__:
146
- # some base classes may not be sub class of feature, ignore these
147
- if not issubclass(BaseCls, Feature):
148
- continue
149
- cls._SUBCLASSES[BaseCls].append(cls)
141
+ # extend the subclass lists
142
+ # Iterate over all mro, not just immediate base classes
143
+ for BaseCls in cls.__mro__:
144
+ # some base classes may not be sub class of feature, ignore these
145
+ if not issubclass(BaseCls, Feature):
146
+ continue
147
+ cls._SUBCLASSES[BaseCls].append(cls)
150
148
 
151
149
  cls._live_queries = []
152
150
  cls._preconfigured_instances = None
@@ -156,6 +154,9 @@ class Feature:
156
154
  cls._CATEGORIZED[category].add(cls.__name__, cls)
157
155
  return super().__init_subclass__(**kwargs)
158
156
 
157
+ def __repr__(self) -> str:
158
+ return f"<{self.__class__.__name__}(id='{self.id}', name='{self.name}')>"
159
+
159
160
  @classmethod
160
161
  def _get_subclasses(cls):
161
162
  return {Cls.__name__: Cls for Cls in cls._SUBCLASSES}
@@ -231,7 +232,10 @@ class Feature:
231
232
  from ..configuration.configuration import Configuration
232
233
  conf = Configuration()
233
234
  Configuration.register_cleanup(cls._clean_instances)
234
- assert cls._configuration_folder in conf.folders
235
+ if cls._configuration_folder not in conf.folders:
236
+ logger.debug(f"{cls._configuration_folder} is not in current configuration")
237
+ return []
238
+
235
239
  cls._preconfigured_instances = [
236
240
  o for o in conf.build_objects(cls._configuration_folder)
237
241
  if isinstance(o, cls)
@@ -579,7 +583,10 @@ class Feature:
579
583
  # with the query concept.
580
584
  live_instances = feature_type._livequery(concept, **kwargs)
581
585
 
582
- results = list(dict.fromkeys(preconfigured_instances + live_instances))
586
+ results = sorted(
587
+ dict.fromkeys(preconfigured_instances + live_instances), # to remove duplicates
588
+ key=lambda f: min(f.last_match_result) if f.last_match_result else False, # to order according to assignmnent ranking
589
+ )
583
590
  return CompoundFeature._compound(results, concept)
584
591
 
585
592
  @classmethod
@@ -625,7 +632,9 @@ class Feature:
625
632
 
626
633
  See docstring of serialize_query_context for further context.
627
634
  """
628
- class ProxyFeature(feature.__class__, do_not_index=True):
635
+
636
+ # if you change the name of this class, change the string in Feature.__init_subclass__
637
+ class ProxyFeature(feature.__class__):
629
638
 
630
639
  # override __class__ property
631
640
  # some instances of features accesses inst.__class__
@@ -21,7 +21,10 @@ from .volume_of_interest import (
21
21
  MRIVolumeOfInterest,
22
22
  XPCTVolumeOfInterest,
23
23
  LSFMVolumeOfInterest,
24
- DTIVolumeOfInterest
25
- # SegmentedVolumeOfInterest
24
+ DTIVolumeOfInterest,
25
+ MorphometryVolumeOfInterest,
26
+ )
27
+ from .sections import (
28
+ CellbodyStainedSection,
29
+ BigBrain1MicronPatch
26
30
  )
27
- from .sections import CellbodyStainedSection
@@ -14,15 +14,13 @@
14
14
  # limitations under the License.
15
15
  """Base type of features in volume format and related anatomical anchor."""
16
16
 
17
+ from typing import List, TYPE_CHECKING
17
18
  from zipfile import ZipFile
18
- from .. import feature
19
19
 
20
+ from .. import feature
20
21
  from .. import anchor as _anchor
21
-
22
22
  from ...volumes import volume as _volume
23
23
 
24
- from typing import List, TYPE_CHECKING
25
-
26
24
  if TYPE_CHECKING:
27
25
  from ...locations.boundingbox import BoundingBox
28
26
  from ...volumes.providers import provider
@@ -14,13 +14,92 @@
14
14
  # limitations under the License.
15
15
  """Multimodal data features in 2D section."""
16
16
 
17
+ from typing import TYPE_CHECKING
18
+
17
19
  from . import image
18
20
 
21
+ if TYPE_CHECKING:
22
+ from ...locations import AxisAlignedPatch, Contour
23
+ from ...features.anchor import AnatomicalAnchor
24
+
19
25
 
20
26
  class CellbodyStainedSection(
21
27
  image.Image,
22
- configuration_folder='features/images/sections/cellbody',
23
- category="cellular"
28
+ configuration_folder="features/images/sections/cellbody",
29
+ category="cellular",
24
30
  ):
25
31
  def __init__(self, **kwargs):
26
32
  image.Image.__init__(self, **kwargs, modality="cell body staining")
33
+
34
+
35
+ class BigBrain1MicronPatch(image.Image, category="cellular"):
36
+
37
+ _DESCRIPTION = """Sample approximately orthogonal cortical image patches
38
+ from BigBrain 1 micron sections, guided by an image volume
39
+ in a supported reference space providing. The image
40
+ volume is used as a weighted mask to extract patches
41
+ along the cortical midsurface with nonzero weights in the
42
+ input image.
43
+ An optional lower_threshold can be used to narrow down
44
+ the search. The weight is stored with the resulting features."""
45
+
46
+ def __init__(
47
+ self,
48
+ patch: "AxisAlignedPatch",
49
+ profile: "Contour",
50
+ section: CellbodyStainedSection,
51
+ vertex: int,
52
+ relevance: float,
53
+ anchor: "AnatomicalAnchor",
54
+ ):
55
+ self._patch = patch
56
+ self._profile = profile
57
+ self._section = section
58
+ self.vertex = vertex
59
+ self.relevance = relevance
60
+ image.Image.__init__(
61
+ self,
62
+ name=f"Cortical patch in {section.name}",
63
+ modality=section.modality,
64
+ space_spec=section._space_spec,
65
+ providers=list(section._providers.values()),
66
+ region=None,
67
+ datasets=section.datasets,
68
+ bbox=patch.boundingbox,
69
+ id=None,
70
+ )
71
+ self._anchor_cached = anchor
72
+ self._description_cached = self._DESCRIPTION
73
+
74
+ def __repr__(self):
75
+ return (
76
+ f"<{self.__class__.__name__}(space_spec={self._space_spec}, "
77
+ f"name='{self.name}', "
78
+ f"section='{self._section.get_boundingbox().minpoint.bigbrain_section()}', "
79
+ f"vertex='{self.vertex}', providers={self._providers})>"
80
+ )
81
+
82
+ @property
83
+ def section(self) -> CellbodyStainedSection:
84
+ return self._section
85
+
86
+ def get_boundingbox(self, **fetch_kwargs):
87
+ """Enforce that the bounding box spans the full section thickness."""
88
+ bbox_section = self._section.get_boundingbox(**fetch_kwargs)
89
+ bbox = self._patch.boundingbox
90
+ bbox.minpoint[1] = bbox_section.minpoint[1]
91
+ bbox.maxpoint[1] = bbox_section.maxpoint[1]
92
+ return bbox
93
+
94
+ @property
95
+ def profile(self) -> "Contour":
96
+ return self._profile
97
+
98
+ @property
99
+ def bigbrain_section(self):
100
+ return self.get_boundingbox().minpoint.bigbrain_section()
101
+
102
+ def fetch(self, flip: bool = False, resolution_mm: float = -1, **kwargs):
103
+ assert len(kwargs) == 0
104
+ p = self._patch.flip() if flip else self._patch
105
+ return p.extract_volume(self._section, resolution_mm=resolution_mm).fetch()
@@ -79,10 +79,11 @@ class LSFMVolumeOfInterest(
79
79
  def __init__(self, modality, **kwargs):
80
80
  image.Image.__init__(self, **kwargs, modality=modality)
81
81
 
82
- # class SegmentedVolumeOfInterest(
83
- # image.Image,
84
- # configuration_folder="features/images/vois/segmentation",
85
- # category="segmentation"
86
- # ):
87
- # def __init__(self, **kwargs):
88
- # image.Image.__init__(self, **kwargs, modality="segmentation")
82
+
83
+ class MorphometryVolumeOfInterest(
84
+ image.Image,
85
+ configuration_folder="features/images/vois/morphometry",
86
+ category="macrostructural"
87
+ ):
88
+ def __init__(self, modality, **kwargs):
89
+ image.Image.__init__(self, **kwargs, modality=modality)
@@ -15,7 +15,7 @@
15
15
  """Multimodal data features in tabular formats."""
16
16
 
17
17
  from .bigbrain_intensity_profile import BigBrainIntensityProfile
18
- from .cell_density_profile import CellDensityProfile
18
+ from .cell_density_profile import CellDensityProfile, cell_reader, layer_reader
19
19
  from .gene_expression import GeneExpressions
20
20
  from .layerwise_bigbrain_intensities import LayerwiseBigBrainIntensities
21
21
  from .layerwise_cell_density import LayerwiseCellDensity
@@ -13,9 +13,10 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
+ from typing import List, TYPE_CHECKING
17
+
16
18
  from . import cortical_profile
17
19
 
18
- from typing import List, TYPE_CHECKING
19
20
  if TYPE_CHECKING:
20
21
  from ...features.anchor import AnatomicalAnchor
21
22
 
@@ -13,19 +13,18 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
- from . import cortical_profile
17
-
18
- from .. import anchor as _anchor
19
- from ...commons import logger
20
- from ...retrieval import requests
16
+ from io import BytesIO
17
+ from typing import Union, Tuple, Iterable
21
18
 
22
- from skimage.draw import polygon
23
- from skimage.transform import resize
24
19
  import numpy as np
25
20
  import pandas as pd
21
+ from skimage.draw import polygon
22
+ from skimage.transform import resize
26
23
 
27
- from io import BytesIO
28
- from typing import Union, Tuple, Iterable
24
+ from . import cortical_profile
25
+ from .. import anchor as _anchor
26
+ from ...commons import logger
27
+ from ...retrieval import requests
29
28
 
30
29
 
31
30
  def cell_reader(bytes_buffer: bytes):
@@ -13,15 +13,15 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
- from . import tabular
17
- from ..feature import Compoundable
18
-
19
- from .. import anchor as _anchor
20
-
21
- import pandas as pd
22
16
  from typing import Union, Dict, Tuple, List
17
+
23
18
  from textwrap import wrap
24
19
  import numpy as np
20
+ import pandas as pd
21
+
22
+ from . import tabular
23
+ from .. import anchor as _anchor
24
+ from ..feature import Compoundable
25
25
 
26
26
 
27
27
  class CorticalProfile(tabular.Tabular, Compoundable):
@@ -13,11 +13,6 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
- from .. import anchor as _anchor
17
- from . import tabular
18
- from ...retrieval.datasets import GenericDataset
19
-
20
- import pandas as pd
21
16
  from textwrap import wrap
22
17
  from typing import List
23
18
  try:
@@ -25,6 +20,12 @@ try:
25
20
  except ImportError:
26
21
  from typing_extensions import TypedDict
27
22
 
23
+ import pandas as pd
24
+
25
+ from . import tabular
26
+ from .. import anchor as _anchor
27
+ from ...retrieval.datasets import GenericDataset
28
+
28
29
 
29
30
  class GeneExpressions(
30
31
  tabular.Tabular,
@@ -224,6 +225,7 @@ class GeneExpressions(
224
225
  datasets=datasets
225
226
  )
226
227
  self.unit = "expression level"
228
+ self._genes = list(set(genes))
227
229
 
228
230
  def plot(self, *args, backend="matplotlib", **kwargs):
229
231
  """
@@ -238,18 +240,34 @@ class GeneExpressions(
238
240
  Keyword arguments are passed on to the plot command.
239
241
  """
240
242
  wrapwidth = kwargs.pop("textwrap") if "textwrap" in kwargs else 40
241
- kwargs["title"] = kwargs.pop("title", None) \
242
- or "\n".join(wrap(f"{self.modality} measured in {self.anchor._regionspec or self.anchor.location}", wrapwidth))
243
- kwargs["kind"] = "box"
243
+ kwargs["title"] = kwargs.pop(
244
+ "title",
245
+ "\n".join(wrap(
246
+ f"{self.modality}\n{self.anchor._regionspec or self.anchor.location}",
247
+ wrapwidth
248
+ ))
249
+ )
250
+ kwargs["kind"] = kwargs.get("kind", "box")
244
251
  if backend == "matplotlib":
245
- for arg in ['yerr', 'y', 'ylabel', 'xlabel', 'width']:
246
- assert arg not in kwargs
247
- default_kwargs = {
248
- "grid": True, "legend": False, 'by': "gene",
249
- 'column': ['level'], 'showfliers': False, 'ax': None,
250
- 'ylabel': 'expression level'
251
- }
252
- return self.data.plot(*args, **{**default_kwargs, **kwargs}, backend=backend)
252
+ if kwargs["kind"] == "box":
253
+ from matplotlib.pyplot import tight_layout
254
+
255
+ title = kwargs.pop("title")
256
+ default_kwargs = {
257
+ "grid": True,
258
+ 'by': "gene",
259
+ 'column': ['level'],
260
+ 'showfliers': False,
261
+ 'ylabel': 'expression level',
262
+ 'xlabel': 'gene',
263
+ 'color': 'dimgray',
264
+ 'rot': 90 if len(self._genes) > 1 else 0,
265
+ }
266
+ ax, *_ = self.data.plot(*args, backend=backend, **{**default_kwargs, **kwargs})
267
+ ax.set_title(title)
268
+ tight_layout()
269
+ return ax
270
+ return self.data.plot(*args, backend=backend, **kwargs)
253
271
  elif backend == "plotly":
254
272
  kwargs["title"] = kwargs["title"].replace('\n', "<br>")
255
273
  return self.data.plot(y='level', x='gene', backend=backend, **kwargs)
@@ -13,13 +13,14 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
- from . import cortical_profile
17
- from . import tabular
16
+ from typing import TYPE_CHECKING
18
17
 
19
18
  import pandas as pd
20
19
  import numpy as np
21
20
 
22
- from typing import TYPE_CHECKING
21
+ from . import tabular
22
+ from . import cortical_profile
23
+
23
24
  if TYPE_CHECKING:
24
25
  from ...features.anchor import AnatomicalAnchor
25
26