vdata 1.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. vdata/IO/__init__.py +19 -0
  2. vdata/IO/errors.py +53 -0
  3. vdata/IO/logger.py +261 -0
  4. vdata/__init__.py +36 -0
  5. vdata/_meta.py +3 -0
  6. vdata/_typing.py +46 -0
  7. vdata/anndata_proxy/__init__.py +3 -0
  8. vdata/anndata_proxy/anndata.py +285 -0
  9. vdata/anndata_proxy/containers.py +209 -0
  10. vdata/anndata_proxy/dataframe.py +55 -0
  11. vdata/array_view.py +183 -0
  12. vdata/data/__init__.py +11 -0
  13. vdata/data/_file.py +20 -0
  14. vdata/data/_indexing.py +104 -0
  15. vdata/data/_parse/__init__.py +10 -0
  16. vdata/data/_parse/anndata.py +121 -0
  17. vdata/data/_parse/data.py +324 -0
  18. vdata/data/_parse/objects/__init__.py +7 -0
  19. vdata/data/_parse/objects/layers.py +86 -0
  20. vdata/data/_parse/objects/objects.py +39 -0
  21. vdata/data/_parse/objects/obs.py +138 -0
  22. vdata/data/_parse/objects/uns.py +17 -0
  23. vdata/data/_parse/objects/var.py +100 -0
  24. vdata/data/_parse/time.py +88 -0
  25. vdata/data/_parse/utils.py +14 -0
  26. vdata/data/arrays/__init__.py +25 -0
  27. vdata/data/arrays/base.py +340 -0
  28. vdata/data/arrays/layers.py +146 -0
  29. vdata/data/arrays/lazy.py +34 -0
  30. vdata/data/arrays/obs.py +343 -0
  31. vdata/data/arrays/var.py +325 -0
  32. vdata/data/arrays/view.py +177 -0
  33. vdata/data/concatenate.py +186 -0
  34. vdata/data/convert.py +311 -0
  35. vdata/data/hash.py +50 -0
  36. vdata/data/read.py +139 -0
  37. vdata/data/vdata.py +890 -0
  38. vdata/data/view.py +566 -0
  39. vdata/data/write.py +138 -0
  40. vdata/names.py +6 -0
  41. vdata/tdf/__init__.py +6 -0
  42. vdata/tdf/_parse.py +249 -0
  43. vdata/tdf/base.py +1322 -0
  44. vdata/tdf/dataframe.py +507 -0
  45. vdata/tdf/index.py +52 -0
  46. vdata/tdf/indexers.py +199 -0
  47. vdata/tdf/indexing.py +299 -0
  48. vdata/tdf/view.py +187 -0
  49. vdata/timedict.py +32 -0
  50. vdata/timepoint/__init__.py +11 -0
  51. vdata/timepoint/_functions.py +104 -0
  52. vdata/timepoint/_typing.py +3 -0
  53. vdata/timepoint/array.py +223 -0
  54. vdata/timepoint/range.py +82 -0
  55. vdata/timepoint/timepoint.py +126 -0
  56. vdata/update.py +181 -0
  57. vdata/utils.py +119 -0
  58. vdata-1.0rc1.dist-info/LICENSE +514 -0
  59. vdata-1.0rc1.dist-info/METADATA +578 -0
  60. vdata-1.0rc1.dist-info/RECORD +62 -0
  61. vdata-1.0rc1.dist-info/WHEEL +5 -0
  62. vdata-1.0rc1.dist-info/top_level.txt +1 -0
vdata/IO/__init__.py ADDED
@@ -0,0 +1,19 @@
1
+ # coding: utf-8
2
+ # Created on 11/6/20 6:07 PM
3
+ # Author : matteo
4
+
5
+ # ====================================================
6
+ # imports
7
+ from .errors import IncoherenceError, ShapeError, VClosedFileError, VLockError, VReadOnlyError
8
+ from .logger import generalLogger, getLoggingLevel, setLoggingLevel
9
+
10
+ __all__ = [
11
+ 'generalLogger',
12
+ 'setLoggingLevel',
13
+ 'getLoggingLevel',
14
+ 'ShapeError',
15
+ 'IncoherenceError',
16
+ 'VLockError',
17
+ 'VClosedFileError',
18
+ 'VReadOnlyError'
19
+ ]
vdata/IO/errors.py ADDED
@@ -0,0 +1,53 @@
1
+ from . import logger
2
+
3
+
4
+ # Errors
5
+ class VBaseError(Exception):
6
+ """
7
+ Base class for custom error. Error messages are redirected to the logger instead of being printed directly.
8
+ """
9
+ def __init__(self, msg: str = ""):
10
+ self.msg = msg
11
+
12
+ def __str__(self) -> str:
13
+ logger.generalLogger.error(self.msg)
14
+ return self.msg
15
+
16
+
17
+ class ShapeError(VBaseError):
18
+ """
19
+ Custom error for errors in variable shapes.
20
+ """
21
+ pass
22
+
23
+
24
+ class IncoherenceError(VBaseError):
25
+ """
26
+ Custom error for incoherent data formats.
27
+ """
28
+ pass
29
+
30
+
31
+ class VLockError(VBaseError):
32
+ """
33
+ Custom error for tdf lock errors.
34
+ """
35
+ pass
36
+
37
+
38
+ class VClosedFileError(VBaseError):
39
+ """
40
+ Custom error for tdf lock errors.
41
+ """
42
+ def __init__(self, msg: str = ""):
43
+ self.msg = "Closed backing file !"
44
+
45
+
46
+ class VReadOnlyError(VBaseError):
47
+ """
48
+ Custom error for modifications on read only data.
49
+ """
50
+
51
+ def __init__(self, msg: str = ""):
52
+ self.msg = "Read-only file !"
53
+
vdata/IO/logger.py ADDED
@@ -0,0 +1,261 @@
1
+ from __future__ import annotations
2
+
3
+ import logging.config
4
+ import os
5
+ import sys
6
+ import traceback
7
+ import warnings
8
+ from enum import Enum
9
+ from pathlib import Path
10
+ from types import TracebackType
11
+ from typing import Any, Callable, Optional, Type
12
+
13
+ from vdata.IO import errors
14
+
15
+ warnings.simplefilter(action="ignore", category=FutureWarning)
16
+
17
+
18
+ class LoggingLevel(Enum):
19
+ DEBUG = 10
20
+ INFO = 20
21
+ WARNING = 30
22
+ ERROR = 40
23
+ CRITICAL = 50
24
+
25
+
26
+ class Color:
27
+ TCYAN = "\033[36m"
28
+ TORANGE = "\033[33m"
29
+ TRED = "\033[31m"
30
+ BBLACK = "\033[40m"
31
+ BGREY = "\033[100m"
32
+ ENDC = "\033[0m"
33
+
34
+
35
+ class Tb:
36
+ trace: Optional[TracebackType] = None
37
+ exception: Type[BaseException] = BaseException
38
+
39
+
40
+ def callable_msg(level: LoggingLevel) -> Callable[[Callable[..., None]], Callable[..., Any]]:
41
+ def inner(func: Callable[[_VLogger, str | Callable[[], str]], None]) -> Callable[..., Any]:
42
+ def wrapper(self: _VLogger, msg: str | Callable[[], str]) -> None:
43
+ if not self.logger.isEnabledFor(level.value):
44
+ return None
45
+
46
+ if callable(msg):
47
+ msg = msg()
48
+
49
+ return func(self, msg)
50
+
51
+ return wrapper
52
+
53
+ return inner
54
+
55
+
56
+ # code
57
+ class _VLogger:
58
+ """
59
+ Custom logger for reporting messages to the console.
60
+ Logging levels are :
61
+ - DEBUG
62
+ - INFO
63
+ - WARNING
64
+ - ERROR
65
+ - CRITICAL
66
+
67
+ The default minimal level for logging is <INFO>.
68
+ """
69
+
70
+ def __init__(self, logger_level: LoggingLevel = LoggingLevel.WARNING):
71
+ """
72
+ :param logger_level: minimal log level for the logger. (DEBUG, INFO, WARNING, ERROR, CRITICAL)
73
+ """
74
+ # load configuration from logging.conf
75
+ logging.config.fileConfig(
76
+ Path(os.path.dirname(__file__)) / "logger.conf",
77
+ defaults={"log_level": logger_level.name},
78
+ disable_existing_loggers=False,
79
+ )
80
+
81
+ # get logger
82
+ self.logger = logging.getLogger("vdata.vlogger")
83
+
84
+ @property
85
+ def level(self) -> LoggingLevel:
86
+ """
87
+ Get the logging level.
88
+ :return: the logging level.
89
+ """
90
+ return LoggingLevel(self.logger.level)
91
+
92
+ @level.setter
93
+ def level(self, log_level: LoggingLevel | str) -> None:
94
+ """
95
+ Re-init the logger, for setting new minimal logging level
96
+ :param logger_level: minimal log level for the logger. (DEBUG, INFO, WARNING, ERROR, CRITICAL)
97
+ """
98
+ log_level = _as_log_level(log_level)
99
+
100
+ self.logger.setLevel(log_level.name)
101
+ for handler in self.logger.handlers:
102
+ handler.setLevel(log_level.name)
103
+
104
+ @staticmethod
105
+ def _getBaseMsg(msg: str) -> str:
106
+ """
107
+ Build the message to log with format <[fileName.py] msg>
108
+
109
+ :param msg: the message to be logged
110
+ :return: the formatted message
111
+ """
112
+
113
+ # Get the name of the file that called the logger for displaying where the message came from
114
+ # if Tb.trace is None:
115
+ # frames = inspect.stack(0)
116
+ #
117
+ # caller_filename = frames[0].filename
118
+ # index = 0
119
+ #
120
+ # while index < len(frames) - 1 and (caller_filename.endswith("logger.py")
121
+ # or caller_filename.endswith("errors.py")):
122
+ # index += 1
123
+ # caller_filename = frames[index].filename
124
+ #
125
+ # caller = os.path.splitext(os.path.basename(caller_filename))[0]
126
+ #
127
+ # # return base message
128
+ # return f"[{caller}.py] {msg}"
129
+ #
130
+ # else:
131
+ # traceback.print_tb(Tb.trace)
132
+ # caller = ""
133
+ #
134
+ # while Tb.trace is not None:
135
+ # caller = Tb.trace.tb_frame.f_code.co_filename
136
+ # Tb.trace = Tb.trace.tb_next
137
+ #
138
+ # return f"[{os.path.basename(caller)} : {Tb.exception.__name__}] {msg}"
139
+
140
+ return msg
141
+
142
+ @callable_msg(LoggingLevel.DEBUG)
143
+ def debug(self, msg: str) -> None:
144
+ """
145
+ Log a debug message (level 10)
146
+
147
+ :param msg: the message to be logged
148
+ """
149
+ self.logger.debug(Color.BGREY + self._getBaseMsg(msg) + Color.ENDC)
150
+
151
+ @callable_msg(LoggingLevel.INFO)
152
+ def info(self, msg: str) -> None:
153
+ """
154
+ Log an info message (level 20)
155
+
156
+ :param msg: the message to be logged
157
+ """
158
+ self.logger.info(Color.TCYAN + self._getBaseMsg(msg) + Color.ENDC)
159
+
160
+ @callable_msg(LoggingLevel.WARNING)
161
+ def warning(self, msg: str) -> None:
162
+ """
163
+ Log a warning message (level 30)
164
+
165
+ :param msg: the message to be logged
166
+ """
167
+ self.logger.warning(Color.TORANGE + self._getBaseMsg(msg) + Color.ENDC)
168
+
169
+ @callable_msg(LoggingLevel.ERROR)
170
+ def error(self, msg: str) -> None:
171
+ """
172
+ Log an error message (level 40)
173
+
174
+ :param msg: the message to be logged
175
+ """
176
+ self.logger.error(Color.TRED + self._getBaseMsg(msg) + Color.ENDC)
177
+ quit()
178
+
179
+ def uncaught_error(self, msg: str) -> None:
180
+ """
181
+ Log and uncaught (not originating from a custom error class) error message (level 40)
182
+
183
+ :param msg: the message to be logged
184
+ """
185
+ traceback.print_tb(Tb.trace)
186
+
187
+ last = None
188
+ while Tb.trace is not None:
189
+ last = Tb.trace.tb_frame
190
+ Tb.trace = Tb.trace.tb_next
191
+
192
+ self.logger.error(
193
+ f"[{last.f_globals['__name__'] if last is not None else 'UNCAUGHT'} :" f" {Tb.exception.__name__}] {msg}"
194
+ )
195
+
196
+ @callable_msg(LoggingLevel.CRITICAL)
197
+ def critical(self, msg: str) -> None:
198
+ """
199
+ Log a critical message (level 50)
200
+
201
+ :param msg: the message to be logged
202
+ """
203
+ self.logger.critical(Color.TRED + Color.BBLACK + self._getBaseMsg(msg) + Color.ENDC)
204
+
205
+
206
+ generalLogger = _VLogger()
207
+
208
+
209
+ def _as_log_level(log_level: LoggingLevel | str) -> LoggingLevel:
210
+ if not isinstance(log_level, LoggingLevel):
211
+ try:
212
+ return LoggingLevel[log_level]
213
+
214
+ except KeyError as e:
215
+ raise KeyError(
216
+ f"Incorrect logging level '{log_level}', " f"should be in {[ll.value for ll in LoggingLevel]}"
217
+ ) from e
218
+
219
+ return log_level
220
+
221
+
222
+ def setLoggingLevel(log_level: LoggingLevel | str) -> None:
223
+ """
224
+ Set the logging level for package vdata.
225
+ :param log_level: a logging level to set, in ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']
226
+ """
227
+ generalLogger.level = _as_log_level(log_level)
228
+
229
+
230
+ def getLoggingLevel() -> LoggingLevel:
231
+ """
232
+ Get the logging level for package vdata.
233
+ :return: the logging level for package vdata.
234
+ """
235
+ return generalLogger.level
236
+
237
+
238
+ # disable traceback messages, except if the logging level is set to DEBUG
239
+ def exception_handler(
240
+ exception_type: Type[BaseException],
241
+ exception: BaseException,
242
+ traceback_: TracebackType,
243
+ debug_hook: Callable[[type[BaseException], BaseException, TracebackType | None], Any] = sys.excepthook,
244
+ ) -> None:
245
+ Tb.trace = traceback_
246
+ Tb.exception = exception_type
247
+
248
+ if generalLogger.level == LoggingLevel.DEBUG:
249
+ if not issubclass(exception_type, errors.VBaseError):
250
+ generalLogger.uncaught_error(str(exception))
251
+ debug_hook(exception_type, exception, traceback_)
252
+ else:
253
+ if not issubclass(exception_type, errors.VBaseError):
254
+ generalLogger.uncaught_error(str(exception))
255
+ else:
256
+ print(exception)
257
+
258
+ traceback.print_tb(traceback_)
259
+
260
+
261
+ sys.excepthook = exception_handler # type: ignore[assignment]
vdata/__init__.py ADDED
@@ -0,0 +1,36 @@
1
+ """Annotated, temporal and multivariate observation data."""
2
+
3
+ from ch5mpy import H5Mode
4
+
5
+ from vdata.data import VData, VDataView, concatenate, convert_anndata_to_vdata
6
+ from vdata.IO import (
7
+ IncoherenceError,
8
+ ShapeError,
9
+ VLockError,
10
+ getLoggingLevel,
11
+ setLoggingLevel,
12
+ )
13
+ from vdata.tdf import Index, TemporalDataFrame, TemporalDataFrameView
14
+ from vdata.timepoint import TimePoint
15
+
16
+ read = VData.read
17
+ read_from_csv = VData.read_from_csv
18
+ read_from_anndata = VData.read_from_anndata
19
+
20
+ mode = H5Mode
21
+
22
+ __all__ = [
23
+ "VData",
24
+ "TemporalDataFrame",
25
+ "VDataView",
26
+ "TemporalDataFrameView",
27
+ "convert_anndata_to_vdata",
28
+ "setLoggingLevel",
29
+ "getLoggingLevel",
30
+ "concatenate",
31
+ "ShapeError",
32
+ "IncoherenceError",
33
+ "VLockError",
34
+ "TimePoint",
35
+ "Index",
36
+ ]
vdata/_meta.py ADDED
@@ -0,0 +1,3 @@
1
+ class PrettyRepr(type):
2
+ def __repr__(self) -> str:
3
+ return f"vdata.{self.__name__}"
vdata/_typing.py ADDED
@@ -0,0 +1,46 @@
1
+ from types import EllipsisType
2
+ from typing import Collection, SupportsIndex, TypedDict, TypeVar, Union
3
+
4
+ import ch5mpy as ch
5
+ import numpy as np
6
+ import numpy.typing as npt
7
+
8
+ import vdata.timepoint as tp
9
+ from vdata.array_view import NDArrayView
10
+ from vdata.timedict import TimeDict
11
+
12
+ _T = TypeVar("_T")
13
+ _T_NP = TypeVar("_T_NP", bound=np.generic)
14
+
15
+ IF = Union[int, float, np.int_, np.float_]
16
+ IFS = Union[np.int_, int, np.float_, float, np.str_, str]
17
+
18
+ AnyNDArrayLike = Union[npt.NDArray[_T_NP], ch.H5Array[_T_NP], NDArrayView[_T_NP]]
19
+
20
+ AnyNDArrayLike_IF = Union[
21
+ npt.NDArray[np.int_ | np.float_], ch.H5Array[np.int_ | np.float_], NDArrayView[np.int_ | np.float_]
22
+ ]
23
+
24
+ NDArray_IFS = npt.NDArray[np.int_ | np.float_ | np.str_]
25
+ NDArrayLike_IFS = Union[npt.NDArray[np.int_ | np.float_ | np.str_], ch.H5Array[np.int_ | np.float_ | np.str_]]
26
+ AnyNDArrayLike_IFS = Union[
27
+ npt.NDArray[np.int_ | np.float_ | np.str_],
28
+ ch.H5Array[np.int_ | np.float_ | np.str_],
29
+ NDArrayView[np.int_ | np.float_ | np.str_],
30
+ ]
31
+
32
+ Collection_IFS = Collection[np.int_ | int | np.float_ | float | np.str_ | str]
33
+ DictLike = Union[dict[str, _T], ch.H5Dict[_T]]
34
+ AnyDictLike = Union[dict[str, _T], ch.H5Dict[_T], TimeDict]
35
+
36
+ Slicer = Union[IFS, tp.TimePoint, Collection[Union[IFS, tp.TimePoint]], range, slice, EllipsisType]
37
+ PreSlicer = Union[IFS, tp.TimePoint, Collection[Union[IFS, bool, tp.TimePoint]], range, slice, EllipsisType]
38
+ Indexer = Union[SupportsIndex, slice, npt.NDArray[np.int_], npt.NDArray[np.bool_] | None]
39
+
40
+
41
+ class AttrDict(TypedDict):
42
+ name: str
43
+ timepoints_column_name: str | None
44
+ locked_indices: bool
45
+ locked_columns: bool
46
+ repeating_index: bool
@@ -0,0 +1,3 @@
1
+ from vdata.anndata_proxy.anndata import AnnDataProxy
2
+
3
+ __all__ = ["AnnDataProxy"]
@@ -0,0 +1,285 @@
1
+ from __future__ import annotations
2
+
3
+ from pathlib import Path
4
+ from typing import TYPE_CHECKING, Any, Literal, Sequence
5
+
6
+ import numpy.typing as npt
7
+ import pandas as pd
8
+ from anndata import AnnData
9
+ from h5dataframe import H5DataFrame
10
+ from scipy import sparse
11
+
12
+ from vdata._typing import AnyNDArrayLike, DictLike
13
+ from vdata.anndata_proxy.containers import H5DataFrameContainerProxy, TemporalDataFrameContainerProxy
14
+ from vdata.anndata_proxy.dataframe import DataFrameProxy_TDF
15
+ from vdata.data._file import NoData
16
+
17
+ if TYPE_CHECKING:
18
+ from vdata.data import VData, VDataView
19
+
20
+
21
+ def skip_time_axis(slicer: Any) -> tuple[Any, ...]:
22
+ if isinstance(slice, tuple):
23
+ return (slice(None),) + slicer
24
+
25
+ return (slice(None), slicer)
26
+
27
+
28
+ class AnnDataProxy(AnnData): # type: ignore[misc]
29
+ """
30
+ Class faking to be an anndata.AnnData object but actually wrapping a VData.
31
+ """
32
+
33
+ __slots__ = "_vdata", "_X", "_layers", "_obs", "_obsm", "_obsp", "_var", "_varm", "_varp", "_uns"
34
+
35
+ # region magic methods
36
+ def __init__(self, vdata: VData | VDataView, X: str | None = None) -> None:
37
+ """
38
+ Args:
39
+ vdata: a VData object to wrap.
40
+ X: an optional layer name to use as X.
41
+ """
42
+ self._X = None if X is None else str(X)
43
+
44
+ if self._X is not None and self._X not in vdata.layers:
45
+ raise ValueError(f"Could not find layer '{self._X}' in the given VData.")
46
+
47
+ self._init_from_vdata(vdata)
48
+
49
+ def _init_from_vdata(self, vdata: VData | VDataView) -> None:
50
+ self._vdata = vdata
51
+ self._layers = TemporalDataFrameContainerProxy(vdata, name="layers")
52
+ self._obs = DataFrameProxy_TDF(vdata.obs)
53
+ self._obsm = TemporalDataFrameContainerProxy(vdata, name="obsm")
54
+ self._obsp = H5DataFrameContainerProxy(vdata.obsp, name="Obsp", index=vdata.obs.index, columns=vdata.obs.index)
55
+ self._var = vdata.var
56
+ self._varm = H5DataFrameContainerProxy(vdata.varm, name="Varm", index=vdata.var.index)
57
+ self._varp = H5DataFrameContainerProxy(vdata.varp, name="Varp", index=vdata.var.index, columns=vdata.var.index)
58
+ self._uns = vdata.uns
59
+
60
+ def __repr__(self) -> str:
61
+ return f"AnnDataProxy from {self._vdata}"
62
+
63
+ def __sizeof__(self, show_stratified: bool | None = None) -> int:
64
+ del show_stratified
65
+ raise NotImplementedError
66
+
67
+ def __delitem__(self, index: Any) -> None:
68
+ raise NotImplementedError
69
+
70
+ def __getitem__(self, index: Any) -> AnnDataProxy:
71
+ """Returns a sliced view of the object."""
72
+ return AnnDataProxy(self._vdata[skip_time_axis(index)], X=self._X)
73
+
74
+ def __setitem__(self, index: Any, val: int | float | npt.NDArray[Any] | sparse.spmatrix) -> None:
75
+ raise NotImplementedError
76
+
77
+ # endregion
78
+
79
+ # region attributes
80
+ @property
81
+ def _n_obs(self) -> int:
82
+ return self._vdata.n_obs_total
83
+
84
+ @property
85
+ def _n_vars(self) -> int:
86
+ return self._vdata.n_var
87
+
88
+ @property
89
+ def X(self) -> AnyNDArrayLike[Any] | None:
90
+ if self._X is None:
91
+ return None
92
+ return self._vdata.layers[self._X].values
93
+
94
+ @X.setter
95
+ def X(self, value: Any) -> None:
96
+ raise NotImplementedError
97
+
98
+ @X.deleter
99
+ def X(self) -> None:
100
+ self._X = None
101
+
102
+ @property
103
+ def layers(self) -> TemporalDataFrameContainerProxy:
104
+ return self._layers
105
+
106
+ @layers.setter
107
+ def layers(self, value: Any) -> None:
108
+ del value
109
+ raise NotImplementedError
110
+
111
+ @layers.deleter
112
+ def layers(self) -> None:
113
+ raise NotImplementedError
114
+
115
+ @property
116
+ def raw(self) -> None:
117
+ raise NotImplementedError
118
+
119
+ @raw.setter
120
+ def raw(self, value: AnnData) -> None:
121
+ raise NotImplementedError
122
+
123
+ @raw.deleter
124
+ def raw(self) -> None:
125
+ raise NotImplementedError
126
+
127
+ @property
128
+ def obs(self) -> DataFrameProxy_TDF:
129
+ return self._obs
130
+
131
+ @obs.setter
132
+ def obs(self, value: pd.DataFrame) -> None:
133
+ raise NotImplementedError
134
+
135
+ @obs.deleter
136
+ def obs(self) -> None:
137
+ raise NotImplementedError
138
+
139
+ @property
140
+ def obs_names(self) -> pd.Index:
141
+ """Names of observations (alias for `.obs.index`)."""
142
+ return self.obs.index
143
+
144
+ @obs_names.setter
145
+ def obs_names(self, names: Sequence[str]) -> None:
146
+ raise NotImplementedError
147
+
148
+ @property
149
+ def var(self) -> H5DataFrame:
150
+ """One-dimensional annotation of variables/ features (`pd.DataFrame`)."""
151
+ return self._var
152
+
153
+ @var.setter
154
+ def var(self, value: pd.DataFrame) -> None:
155
+ raise NotImplementedError
156
+
157
+ @var.deleter
158
+ def var(self) -> None:
159
+ raise NotImplementedError
160
+
161
+ @property
162
+ def var_names(self) -> pd.Index:
163
+ """Names of variables (alias for `.var.index`)."""
164
+ return self.var.index
165
+
166
+ @var_names.setter
167
+ def var_names(self, names: Sequence[str]) -> None:
168
+ raise NotImplementedError
169
+
170
+ @property
171
+ def uns(self) -> DictLike[Any]:
172
+ """Unstructured annotation (ordered dictionary)."""
173
+ return self._uns
174
+
175
+ @uns.setter
176
+ def uns(self, value: DictLike[Any]) -> None:
177
+ raise NotImplementedError
178
+
179
+ @uns.deleter
180
+ def uns(self) -> None:
181
+ raise NotImplementedError
182
+
183
+ @property
184
+ def obsm(self) -> TemporalDataFrameContainerProxy:
185
+ return self._obsm
186
+
187
+ @obsm.setter
188
+ def obsm(self, value: Any) -> None:
189
+ raise NotImplementedError
190
+
191
+ @obsm.deleter
192
+ def obsm(self) -> None:
193
+ raise NotImplementedError
194
+
195
+ @property
196
+ def varm(self) -> H5DataFrameContainerProxy:
197
+ return self._varm
198
+
199
+ @varm.setter
200
+ def varm(self, value: Any) -> None:
201
+ raise NotImplementedError
202
+
203
+ @varm.deleter
204
+ def varm(self) -> None:
205
+ raise NotImplementedError
206
+
207
+ @property
208
+ def obsp(self) -> H5DataFrameContainerProxy:
209
+ return self._obsp
210
+
211
+ @obsp.setter
212
+ def obsp(self, value: Any) -> None:
213
+ raise NotImplementedError
214
+
215
+ @obsp.deleter
216
+ def obsp(self) -> None:
217
+ raise NotImplementedError
218
+
219
+ @property
220
+ def varp(self) -> H5DataFrameContainerProxy:
221
+ return self._varp
222
+
223
+ @varp.setter
224
+ def varp(self, value: Any) -> None:
225
+ raise NotImplementedError
226
+
227
+ @varp.deleter
228
+ def varp(self) -> None:
229
+ raise NotImplementedError
230
+
231
+ @property
232
+ def filename(self) -> Path | None:
233
+ if self._vdata.data is NoData._:
234
+ return None
235
+ return Path(self._vdata.data.filename)
236
+
237
+ @filename.setter
238
+ def filename(self, filename: Path | None) -> None:
239
+ raise NotImplementedError
240
+
241
+ # endregion
242
+
243
+ # region predicates
244
+ @property
245
+ def isbacked(self) -> bool:
246
+ """`True` if object is backed on disk, `False` otherwise."""
247
+ return self._vdata.is_backed
248
+
249
+ @property
250
+ def is_view(self) -> bool:
251
+ """`True` if object is view of another AnnData object, `False` otherwise."""
252
+ return self._vdata.is_view
253
+
254
+ # endregion
255
+
256
+ # region methods
257
+ def as_vdata(self) -> VData | VDataView:
258
+ return self._vdata
259
+
260
+ def rename_categories(self, key: str, categories: Sequence[Any]) -> None:
261
+ raise NotImplementedError
262
+
263
+ def strings_to_categoricals(self, df: pd.DataFrame | None = None) -> None:
264
+ raise NotImplementedError
265
+
266
+ def _inplace_subset_var(self, index: Any) -> None:
267
+ self._init_from_vdata(self._vdata[skip_time_axis((slice(None), index))])
268
+
269
+ def _inplace_subset_obs(self, index: Any) -> None:
270
+ self._init_from_vdata(self._vdata[skip_time_axis(index)])
271
+
272
+ def copy(self, filename: Path | None = None) -> None:
273
+ """Full copy, optionally on disk."""
274
+ raise NotImplementedError
275
+
276
+ def write_h5ad(
277
+ self,
278
+ filename: Path | None = None,
279
+ compression: Literal["gzip", "lzf"] | None = None,
280
+ compression_opts: Any = None,
281
+ as_dense: Sequence[str] = (),
282
+ ) -> None:
283
+ raise NotImplementedError
284
+
285
+ # endregion