lamindb 0.76.2__py3-none-any.whl → 0.76.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
lamindb/__init__.py CHANGED
@@ -25,7 +25,7 @@ Key functionality
25
25
 
26
26
  context
27
27
  connect
28
- Curate
28
+ Curator
29
29
  view
30
30
  save
31
31
 
@@ -44,7 +44,7 @@ Modules & settings
44
44
  """
45
45
 
46
46
  # denote a release candidate for 0.1.0 with 0.1rc1, 0.1a1, 0.1b1, etc.
47
- __version__ = "0.76.2"
47
+ __version__ = "0.76.3"
48
48
 
49
49
  import os as _os
50
50
 
@@ -94,7 +94,7 @@ if _check_instance_setup(from_lamindb=True):
94
94
  _ulabel,
95
95
  integrations,
96
96
  )
97
- from ._curate import Curate
97
+ from ._curate import Curator
98
98
  from ._save import save
99
99
  from ._view import view
100
100
  from .core._context import context
@@ -110,6 +110,7 @@ if _check_instance_setup(from_lamindb=True):
110
110
 
111
111
  track = context.track # backward compat
112
112
  finish = context.finish # backward compat
113
+ Curate = Curator # backward compat
113
114
  settings.__doc__ = """Global :class:`~lamindb.core.Settings`."""
114
115
  context.__doc__ = """Global :class:`~lamindb.core.Context`."""
115
116
  from django.db.models import Q
lamindb/_artifact.py CHANGED
@@ -545,6 +545,7 @@ def __init__(artifact: Artifact, *args, **kwargs):
545
545
  skip_check_exists = (
546
546
  kwargs.pop("skip_check_exists") if "skip_check_exists" in kwargs else False
547
547
  )
548
+ _uid = kwargs.pop("_uid", None)
548
549
  if "default_storage" in kwargs:
549
550
  default_storage = kwargs.pop("default_storage")
550
551
  else:
@@ -560,6 +561,9 @@ def __init__(artifact: Artifact, *args, **kwargs):
560
561
  if "is_new_version_of" in kwargs:
561
562
  logger.warning("`is_new_version_of` will be removed soon, please use `revises`")
562
563
  revises = kwargs.pop("is_new_version_of")
564
+ assert not ( # noqa: S101
565
+ revises is not None and _uid is not None
566
+ ), "Can not init with both `revises` and `_uid`"
563
567
  if not len(kwargs) == 0:
564
568
  raise ValueError(
565
569
  "Only data, key, run, description, version, revises, visibility"
@@ -575,8 +579,10 @@ def __init__(artifact: Artifact, *args, **kwargs):
575
579
  raise ValueError(
576
580
  f"`key` is {key}, but `revises.key` is '{revises.key}'\n\n Either do *not* pass `key`.\n\n{note}"
577
581
  )
578
-
579
- provisional_uid, revises = create_uid(revises=revises, version=version)
582
+ if _uid is not None:
583
+ provisional_uid, revises = _uid, None
584
+ else:
585
+ provisional_uid, revises = create_uid(revises=revises, version=version)
580
586
  if revises is not None:
581
587
  if not isinstance(revises, Artifact):
582
588
  raise TypeError("`revises` has to be of type `Artifact`")
lamindb/_curate.py CHANGED
@@ -84,10 +84,34 @@ class CurateLookup:
84
84
  return colors.warning("No fields are found!")
85
85
 
86
86
 
87
- class DataFrameCurator:
87
+ class BaseCurator:
88
+ """Curate a dataset."""
89
+
90
+ def validate(self) -> bool:
91
+ """Validate dataset.
92
+
93
+ Returns:
94
+ Boolean indicating whether the dataset is validated.
95
+ """
96
+ pass
97
+
98
+ def save_artifact(self, description: str | None = None, **kwargs) -> Artifact:
99
+ """Save the dataset as artifact.
100
+
101
+ Args:
102
+ description: Description of the DataFrame object.
103
+ **kwargs: Object level metadata.
104
+
105
+ Returns:
106
+ A saved artifact record.
107
+ """
108
+ pass
109
+
110
+
111
+ class DataFrameCurator(BaseCurator):
88
112
  """Curation flow for a DataFrame object.
89
113
 
90
- See also :class:`~lamindb.Curate`.
114
+ See also :class:`~lamindb.Curator`.
91
115
 
92
116
  Args:
93
117
  df: The DataFrame object to curate.
@@ -101,7 +125,7 @@ class DataFrameCurator:
101
125
 
102
126
  Examples:
103
127
  >>> import bionty as bt
104
- >>> curate = ln.Curate.from_df(
128
+ >>> curate = ln.Curator.from_df(
105
129
  ... df,
106
130
  ... categoricals={
107
131
  ... "cell_type_ontology_id": bt.CellType.ontology_id,
@@ -184,6 +208,7 @@ class DataFrameCurator:
184
208
  using_key=self._using_key,
185
209
  validated_only=False,
186
210
  source=self._sources.get("columns"),
211
+ exclude=self._exclude.get("columns"),
187
212
  **kwargs,
188
213
  )
189
214
 
@@ -199,6 +224,7 @@ class DataFrameCurator:
199
224
  validated_only=validated_only,
200
225
  df=self._df, # Get the Feature type from df
201
226
  source=self._sources.get("columns"),
227
+ exclude=self._exclude.get("columns"),
202
228
  warning=False, # Do not warn about missing columns, just an info message
203
229
  **kwargs,
204
230
  )
@@ -251,6 +277,7 @@ class DataFrameCurator:
251
277
  using_key=self._using_key,
252
278
  validated_only=validated_only,
253
279
  source=self._sources.get(categorical),
280
+ exclude=self._exclude.get(categorical),
254
281
  **kwargs,
255
282
  )
256
283
 
@@ -330,9 +357,11 @@ class DataFrameCurator:
330
357
  class AnnDataCurator(DataFrameCurator):
331
358
  """Curation flow for ``AnnData``.
332
359
 
333
- See also :class:`~lamindb.Curate`.
360
+ See also :class:`~lamindb.Curator`.
361
+
362
+ Note that if genes are removed from the AnnData object, the object should be recreated using :meth:`~lamindb.Curator.from_anndata`.
334
363
 
335
- Note that if genes are removed from the AnnData object, the object should be recreated using :meth:`~lamindb.Curate.from_anndata`.
364
+ See :doc:`docs:cellxgene-curate` for instructions on how to curate against a specific cellxgene schema version.
336
365
 
337
366
  Args:
338
367
  data: The AnnData object or an AnnData-like path.
@@ -346,7 +375,7 @@ class AnnDataCurator(DataFrameCurator):
346
375
 
347
376
  Examples:
348
377
  >>> import bionty as bt
349
- >>> curate = ln.Curate.from_anndata(
378
+ >>> curate = ln.Curator.from_anndata(
350
379
  ... adata,
351
380
  ... var_index=bt.Gene.ensembl_gene_id,
352
381
  ... categoricals={
@@ -437,6 +466,7 @@ class AnnDataCurator(DataFrameCurator):
437
466
  validated_only=validated_only,
438
467
  organism=organism,
439
468
  source=self._sources.get("var_index"),
469
+ exclude=self._exclude.get("var_index"),
440
470
  )
441
471
 
442
472
  def _update_registry_all(self, validated_only: bool = True, **kwargs):
@@ -536,10 +566,10 @@ class AnnDataCurator(DataFrameCurator):
536
566
  class MuDataCurator:
537
567
  """Curation flow for a ``MuData`` object.
538
568
 
539
- See also :class:`~lamindb.Curate`.
569
+ See also :class:`~lamindb.Curator`.
540
570
 
541
571
  Note that if genes or other measurements are removed from the MuData object,
542
- the object should be recreated using :meth:`~lamindb.Curate.from_mudata`.
572
+ the object should be recreated using :meth:`~lamindb.Curator.from_mudata`.
543
573
 
544
574
  Args:
545
575
  mdata: The MuData object to curate.
@@ -556,7 +586,7 @@ class MuDataCurator:
556
586
 
557
587
  Examples:
558
588
  >>> import bionty as bt
559
- >>> curate = ln.Curate.from_mudata(
589
+ >>> curate = ln.Curator.from_mudata(
560
590
  ... mdata,
561
591
  ... var_index={
562
592
  ... "rna": bt.Gene.ensembl_gene_id,
@@ -641,6 +671,7 @@ class MuDataCurator:
641
671
  validated_only=validated_only,
642
672
  dtype="number",
643
673
  source=self._sources.get(modality, {}).get("var_index"),
674
+ exclude=self._exclude.get(modality, {}).get("var_index"),
644
675
  **kwargs,
645
676
  )
646
677
 
@@ -704,6 +735,7 @@ class MuDataCurator:
704
735
  validated_only=False,
705
736
  df=self._mdata[modality].obs,
706
737
  source=self._sources.get(modality, {}).get("columns"),
738
+ exclude=self._exclude.get(modality, {}).get("columns"),
707
739
  **self._kwargs, # type: ignore
708
740
  **kwargs,
709
741
  )
@@ -789,7 +821,8 @@ class MuDataCurator:
789
821
  field=var_field,
790
822
  key=f"{modality}_var_index",
791
823
  using_key=self._using_key,
792
- exclude=self._exclude.get(f"{modality}_var_index"),
824
+ source=self._sources.get(modality, {}).get("var_index"),
825
+ exclude=self._exclude.get(modality, {}).get("var_index"),
793
826
  **self._kwargs, # type: ignore
794
827
  )
795
828
  validated_var &= is_validated_var
@@ -846,19 +879,19 @@ class MuDataCurator:
846
879
  return self._artifact
847
880
 
848
881
 
849
- class Curate:
850
- """Curation flow.
882
+ class Curator(BaseCurator):
883
+ """Dataset curator.
851
884
 
852
885
  Data curation entails accurately labeling datasets with standardized metadata
853
886
  to facilitate data integration, interpretation and analysis.
854
887
 
855
888
  The curation flow has several steps:
856
889
 
857
- 1. Create a :class:`Curate` object corresponding to the object type that you want to curate:
890
+ 1. Instantiate `Curator` from one of the following dataset objects:
858
891
 
859
- - :meth:`~lamindb.Curate.from_df`
860
- - :meth:`~lamindb.Curate.from_anndata`
861
- - :meth:`~lamindb.Curate.from_mudata`
892
+ - :meth:`~lamindb.Curator.from_df`
893
+ - :meth:`~lamindb.Curator.from_anndata`
894
+ - :meth:`~lamindb.Curator.from_mudata`
862
895
 
863
896
  During object creation, any passed categoricals found in the object will be saved.
864
897
 
@@ -867,7 +900,7 @@ class Curate:
867
900
  - Values that can successfully validated and already exist in the registry.
868
901
  - Values which are new and not yet validated or potentially problematic values.
869
902
 
870
- 3. Determine how to handle validated and unvalidated values:
903
+ 3. Determine how to handle validated and non-validated values:
871
904
 
872
905
  - Validated values not yet in the registry can be automatically registered using :meth:`~lamindb.core.DataFrameCurator.add_validated_from`.
873
906
  - Valid and new values can be registered using :meth:`~lamindb.core.DataFrameCurator.add_new_from`.
@@ -982,10 +1015,22 @@ def standardize_and_inspect(
982
1015
  field: FieldAttr,
983
1016
  registry: type[Record],
984
1017
  standardize: bool = False,
1018
+ exclude: str | list | None = None,
985
1019
  **kwargs,
986
1020
  ):
987
1021
  """Standardize and inspect values using a registry."""
988
- filter_kwargs = get_current_filter_kwargs(registry, kwargs)
1022
+ # inspect exclude values in the default instance
1023
+ values = list(values)
1024
+ include_validated = []
1025
+ if exclude is not None:
1026
+ exclude = [exclude] if isinstance(exclude, str) else exclude
1027
+ exclude = [i for i in exclude if i in values]
1028
+ if len(exclude) > 0:
1029
+ # exclude values are validated without source and organism
1030
+ inspect_result_exclude = registry.inspect(exclude, field=field, mute=True)
1031
+ # if exclude values are validated, remove them from the values
1032
+ values = [i for i in values if i not in inspect_result_exclude.validated]
1033
+ include_validated = inspect_result_exclude.validated
989
1034
 
990
1035
  if standardize:
991
1036
  if hasattr(registry, "standardize") and hasattr(
@@ -993,11 +1038,17 @@ def standardize_and_inspect(
993
1038
  "synonyms", # https://github.com/laminlabs/lamindb/issues/1685
994
1039
  ):
995
1040
  standardized_values = registry.standardize(
996
- values, field=field, mute=True, **filter_kwargs
1041
+ values, field=field, mute=True, **kwargs
997
1042
  )
998
1043
  values = standardized_values
999
1044
 
1000
- return registry.inspect(values, field=field, mute=True, **filter_kwargs)
1045
+ inspect_result = registry.inspect(values, field=field, mute=True, **kwargs)
1046
+ inspect_result._validated += include_validated
1047
+ inspect_result._non_validated = [
1048
+ i for i in inspect_result.non_validated if i not in include_validated
1049
+ ]
1050
+
1051
+ return inspect_result
1001
1052
 
1002
1053
 
1003
1054
  def check_registry_organism(registry: Record, organism: str | None = None) -> dict:
@@ -1049,35 +1100,32 @@ def validate_categories(
1049
1100
  logger.indent = " "
1050
1101
 
1051
1102
  registry = field.field.model
1103
+
1052
1104
  kwargs = check_registry_organism(registry, organism)
1053
1105
  kwargs.update({"source": source} if source else {})
1106
+ kwargs_current = get_current_filter_kwargs(registry, kwargs)
1054
1107
 
1055
1108
  # inspect the default instance
1056
- if exclude is not None:
1057
- exclude = [exclude] if isinstance(exclude, str) else exclude
1058
- # exclude values are validated without source and organism
1059
- inspect_result = registry.inspect(exclude, field=field, mute=True)
1060
- # if exclude values are validated, remove them from the values
1061
- values = [i for i in values if i not in inspect_result.validated]
1062
-
1063
1109
  inspect_result = standardize_and_inspect(
1064
1110
  values=values,
1065
1111
  field=field,
1066
1112
  registry=registry,
1067
1113
  standardize=standardize,
1068
- **kwargs,
1114
+ exclude=exclude,
1115
+ **kwargs_current,
1069
1116
  )
1070
1117
  non_validated = inspect_result.non_validated
1071
1118
 
1119
+ # inspect the using instance
1072
1120
  values_validated = []
1073
1121
  if using_key is not None and using_key != "default" and non_validated:
1074
1122
  registry_using = get_registry_instance(registry, using_key)
1075
- # inspect the using instance
1076
1123
  inspect_result = standardize_and_inspect(
1077
1124
  values=non_validated,
1078
1125
  field=field,
1079
1126
  registry=registry_using,
1080
1127
  standardize=standardize,
1128
+ exclude=exclude,
1081
1129
  **kwargs,
1082
1130
  )
1083
1131
  non_validated = inspect_result.non_validated
@@ -1091,7 +1139,7 @@ def validate_categories(
1091
1139
  public_records = registry.from_values(
1092
1140
  non_validated,
1093
1141
  field=field,
1094
- **get_current_filter_kwargs(registry, kwargs),
1142
+ **kwargs_current,
1095
1143
  )
1096
1144
  values_validated += [getattr(r, field.field.name) for r in public_records]
1097
1145
  finally:
@@ -1111,9 +1159,13 @@ def validate_categories(
1111
1159
  non_validated = [i for i in non_validated if i not in values_validated]
1112
1160
  n_non_validated = len(non_validated)
1113
1161
  if n_non_validated == 0:
1114
- logger.indent = ""
1115
- logger.success(f"{key} is validated against {colors.italic(model_field)}")
1116
- return True, []
1162
+ if n_validated == 0:
1163
+ logger.indent = ""
1164
+ logger.success(f"{key} is validated against {colors.italic(model_field)}")
1165
+ return True, []
1166
+ else:
1167
+ # validated values still need to be saved to the current instance
1168
+ return False, []
1117
1169
  else:
1118
1170
  are = "are" if n_non_validated > 1 else "is"
1119
1171
  print_values = _print_values(non_validated)
@@ -1138,6 +1190,9 @@ def validate_categories_in_df(
1138
1190
  **kwargs,
1139
1191
  ) -> tuple[bool, dict]:
1140
1192
  """Validate categories in DataFrame columns using LaminDB registries."""
1193
+ if not fields:
1194
+ return True, {}
1195
+
1141
1196
  if sources is None:
1142
1197
  sources = {}
1143
1198
  validated = True
@@ -1270,6 +1325,7 @@ def update_registry(
1270
1325
  source: Record | None = None,
1271
1326
  standardize: bool = True,
1272
1327
  warning: bool = True,
1328
+ exclude: str | list | None = None,
1273
1329
  **kwargs,
1274
1330
  ) -> None:
1275
1331
  """Save features or labels records in the default instance from the using_key instance.
@@ -1329,7 +1385,8 @@ def update_registry(
1329
1385
  field=field,
1330
1386
  registry=registry,
1331
1387
  standardize=standardize,
1332
- **filter_kwargs,
1388
+ exclude=exclude,
1389
+ **filter_kwargs_current,
1333
1390
  )
1334
1391
  if not inspect_result_current.non_validated:
1335
1392
  all_labels = registry.from_values(
@@ -1348,6 +1405,7 @@ def update_registry(
1348
1405
  inspect_result_current.non_validated,
1349
1406
  field=field,
1350
1407
  using_key=using_key,
1408
+ exclude=exclude,
1351
1409
  **filter_kwargs,
1352
1410
  )
1353
1411
 
@@ -1467,6 +1525,7 @@ def update_registry_from_using_instance(
1467
1525
  field: FieldAttr,
1468
1526
  using_key: str | None = None,
1469
1527
  standardize: bool = False,
1528
+ exclude: str | list | None = None,
1470
1529
  **kwargs,
1471
1530
  ) -> tuple[list[str], list[str]]:
1472
1531
  """Save features or labels records from the using_key instance.
@@ -1492,6 +1551,7 @@ def update_registry_from_using_instance(
1492
1551
  field=field,
1493
1552
  registry=registry_using,
1494
1553
  standardize=standardize,
1554
+ exclude=exclude,
1495
1555
  **kwargs,
1496
1556
  )
1497
1557
  labels_using = registry_using.filter(
@@ -1519,3 +1579,6 @@ def _save_organism(name: str): # pragma: no cover
1519
1579
  )
1520
1580
  organism.save()
1521
1581
  return organism
1582
+
1583
+
1584
+ Curate = Curator # backward compat
lamindb/_filter.py CHANGED
@@ -1,11 +1,13 @@
1
1
  from __future__ import annotations
2
2
 
3
- from lnschema_core import Artifact, Collection, Record
4
- from lnschema_core.types import VisibilityChoice
3
+ from typing import TYPE_CHECKING
5
4
 
6
- from lamindb import settings
5
+ from lnschema_core import Artifact, Collection
7
6
 
8
- from ._query_set import QuerySet
7
+ from ._query_set import QuerySet, process_expressions
8
+
9
+ if TYPE_CHECKING:
10
+ from lnschema_core import Record
9
11
 
10
12
 
11
13
  def filter(registry: type[Record], **expressions) -> QuerySet:
@@ -13,23 +15,7 @@ def filter(registry: type[Record], **expressions) -> QuerySet:
13
15
  _using_key = None
14
16
  if "_using_key" in expressions:
15
17
  _using_key = expressions.pop("_using_key")
16
- if registry in {Artifact, Collection}:
17
- # visibility is set to 0 unless expressions contains id or uid equality
18
- if not (
19
- "id" in expressions
20
- or "uid" in expressions
21
- or "uid__startswith" in expressions
22
- ):
23
- visibility = "visibility"
24
- if not any(e.startswith(visibility) for e in expressions):
25
- expressions[visibility] = (
26
- VisibilityChoice.default.value
27
- ) # default visibility
28
- # if visibility is None, do not apply a filter
29
- # otherwise, it would mean filtering for NULL values, which doesn't make
30
- # sense for a non-NULLABLE column
31
- elif visibility in expressions and expressions[visibility] is None:
32
- expressions.pop(visibility)
18
+ expressions = process_expressions(registry, expressions)
33
19
  qs = QuerySet(model=registry, using=_using_key)
34
20
  if len(expressions) > 0:
35
21
  return qs.filter(**expressions)
lamindb/_finish.py CHANGED
@@ -52,7 +52,7 @@ def save_context_core(
52
52
  return None
53
53
  notebook_content = read_notebook(filepath) # type: ignore
54
54
  is_consecutive = check_consecutiveness(
55
- notebook_content, calling_statement="ln.finish()"
55
+ notebook_content, calling_statement=".finish()"
56
56
  )
57
57
  if not is_consecutive:
58
58
  msg = " Do you still want to proceed with finishing? (y/n) "
lamindb/_query_set.py CHANGED
@@ -6,6 +6,7 @@ from typing import TYPE_CHECKING, Iterable, NamedTuple
6
6
  import pandas as pd
7
7
  from django.db import models
8
8
  from django.db.models import F
9
+ from lamin_utils import logger
9
10
  from lamindb_setup.core._docs import doc_args
10
11
  from lnschema_core.models import (
11
12
  Artifact,
@@ -13,8 +14,10 @@ from lnschema_core.models import (
13
14
  Collection,
14
15
  IsVersioned,
15
16
  Record,
17
+ Registry,
16
18
  Run,
17
19
  Transform,
20
+ VisibilityChoice,
18
21
  )
19
22
 
20
23
  from lamindb.core.exceptions import DoesNotExist
@@ -64,6 +67,27 @@ def one_helper(self):
64
67
  return self[0]
65
68
 
66
69
 
70
+ def process_expressions(registry: Registry, expressions: dict) -> dict:
71
+ if registry in {Artifact, Collection}:
72
+ # visibility is set to 0 unless expressions contains id or uid equality
73
+ if not (
74
+ "id" in expressions
75
+ or "uid" in expressions
76
+ or "uid__startswith" in expressions
77
+ ):
78
+ visibility = "visibility"
79
+ if not any(e.startswith(visibility) for e in expressions):
80
+ expressions[visibility] = (
81
+ VisibilityChoice.default.value
82
+ ) # default visibility
83
+ # if visibility is None, do not apply a filter
84
+ # otherwise, it would mean filtering for NULL values, which doesn't make
85
+ # sense for a non-NULLABLE column
86
+ elif visibility in expressions and expressions[visibility] is None:
87
+ expressions.pop(visibility)
88
+ return expressions
89
+
90
+
67
91
  def get(
68
92
  registry_or_queryset: type[Record] | QuerySet,
69
93
  idlike: int | str | None = None,
@@ -88,7 +112,7 @@ def get(
88
112
  return qs.one()
89
113
  else:
90
114
  assert idlike is None # noqa: S101
91
- # below behaves exactly like `.one()`
115
+ expressions = process_expressions(registry, expressions)
92
116
  return registry.objects.get(**expressions)
93
117
 
94
118
 
@@ -221,6 +245,7 @@ class QuerySet(models.QuerySet, CanValidate):
221
245
  # both Transform & Run might reference artifacts
222
246
  if self.model in {Artifact, Collection, Transform, Run}:
223
247
  for record in self:
248
+ logger.important(f"deleting {record}")
224
249
  record.delete(*args, **kwargs)
225
250
  else:
226
251
  self._delete_base_class(*args, **kwargs)
lamindb/core/__init__.py CHANGED
@@ -30,6 +30,7 @@ Curators:
30
30
  .. autosummary::
31
31
  :toctree: .
32
32
 
33
+ BaseCurator
33
34
  DataFrameCurator
34
35
  AnnDataCurator
35
36
  MuDataCurator
@@ -80,6 +81,7 @@ from lnschema_core.models import (
80
81
 
81
82
  from lamindb._curate import (
82
83
  AnnDataCurator,
84
+ BaseCurator,
83
85
  CurateLookup,
84
86
  DataFrameCurator,
85
87
  MuDataCurator,
lamindb/core/_context.py CHANGED
@@ -18,7 +18,7 @@ from ._sync_git import get_transform_reference_from_git_repo
18
18
  from ._track_environment import track_environment
19
19
  from .exceptions import (
20
20
  MissingContext,
21
- NotebookNotSaved,
21
+ NotebookFileNotSavedToDisk,
22
22
  NotebookNotSavedError,
23
23
  NoTitleError,
24
24
  TrackNotCalled,
@@ -504,8 +504,8 @@ class Context:
504
504
  get_seconds_since_modified(context._path) > 3
505
505
  and os.getenv("LAMIN_TESTING") is None
506
506
  ):
507
- raise NotebookNotSaved(
508
- "Please save the notebook in your editor right before running `ln.finish()`"
507
+ raise NotebookFileNotSavedToDisk(
508
+ "Please save the notebook manually in your editor right before running `ln.finish()`"
509
509
  )
510
510
  save_context_core(
511
511
  run=context.run,
@@ -17,6 +17,7 @@ from .storage._anndata_accessor import (
17
17
  GroupTypes,
18
18
  StorageType,
19
19
  _safer_read_index,
20
+ get_spec,
20
21
  registry,
21
22
  )
22
23
 
@@ -153,13 +154,30 @@ class MappedCollection:
153
154
  self._make_connections(path_list, parallel)
154
155
 
155
156
  self.n_obs_list = []
156
- for storage in self.storages:
157
+ for i, storage in enumerate(self.storages):
157
158
  with _Connect(storage) as store:
158
159
  X = store["X"]
160
+ store_path = self.path_list[i]
161
+ self._check_csc_raise_error(X, "X", store_path)
159
162
  if isinstance(X, ArrayTypes): # type: ignore
160
163
  self.n_obs_list.append(X.shape[0])
161
164
  else:
162
165
  self.n_obs_list.append(X.attrs["shape"][0])
166
+ for layer_key in self.layers_keys:
167
+ if layer_key == "X":
168
+ continue
169
+ self._check_csc_raise_error(
170
+ store["layers"][layer_key],
171
+ f"layers/{layer_key}",
172
+ store_path,
173
+ )
174
+ if self.obsm_keys is not None:
175
+ for obsm_key in self.obsm_keys:
176
+ self._check_csc_raise_error(
177
+ store["obsm"][obsm_key],
178
+ f"obsm/{obsm_key}",
179
+ store_path,
180
+ )
163
181
  self.n_obs = sum(self.n_obs_list)
164
182
 
165
183
  self.indices = np.hstack([np.arange(n_obs) for n_obs in self.n_obs_list])
@@ -281,6 +299,18 @@ class MappedCollection:
281
299
  vars = pd.Index(vars)
282
300
  return [i for i, vrs in enumerate(self.var_list) if not vrs.equals(vars)]
283
301
 
302
+ def _check_csc_raise_error(
303
+ self, elem: GroupType | ArrayType, key: str, path: UPathStr
304
+ ):
305
+ if isinstance(elem, ArrayTypes): # type: ignore
306
+ return
307
+ if get_spec(elem).encoding_type == "csc_matrix":
308
+ if not self.parallel:
309
+ self.close()
310
+ raise ValueError(
311
+ f"{key} in {path} is a csc matrix, `MappedCollection` doesn't support this format yet."
312
+ )
313
+
284
314
  def __len__(self):
285
315
  return self.n_obs
286
316
 
@@ -20,7 +20,7 @@ class TrackNotCalled(SystemExit):
20
20
  pass
21
21
 
22
22
 
23
- class NotebookNotSaved(SystemExit):
23
+ class NotebookFileNotSavedToDisk(SystemExit):
24
24
  pass
25
25
 
26
26
 
@@ -19,7 +19,7 @@ Array accessors.
19
19
  from lamindb_setup.core.upath import LocalPathClasses, UPath, infer_filesystem
20
20
 
21
21
  from ._backed_access import AnnDataAccessor, BackedAccessor
22
- from ._tiledbsoma import register_for_tiledbsoma_store, write_tiledbsoma_store
22
+ from ._tiledbsoma import save_tiledbsoma_experiment
23
23
  from ._valid_suffixes import VALID_SUFFIXES
24
24
  from .objects import infer_suffix, write_to_disk
25
25
  from .paths import delete_storage, load_to_memory
@@ -4,16 +4,17 @@ from typing import TYPE_CHECKING, Literal
4
4
 
5
5
  from anndata import AnnData
6
6
  from lamin_utils import logger
7
+ from lamindb_setup import settings as setup_settings
7
8
  from lamindb_setup.core._settings_storage import get_storage_region
8
9
  from lamindb_setup.core.upath import create_path
9
- from lnschema_core import Artifact, Run, Storage
10
- from upath import UPath
10
+ from lnschema_core import Artifact, Run
11
11
 
12
12
  if TYPE_CHECKING:
13
13
  from lamindb_setup.core.types import UPathStr
14
14
  from tiledbsoma import Collection as SOMACollection
15
15
  from tiledbsoma import Experiment as SOMAExperiment
16
16
  from tiledbsoma.io import ExperimentAmbientLabelMapping
17
+ from upath import UPath
17
18
 
18
19
 
19
20
  def _read_adata_h5ad_zarr(objpath: UPath):
@@ -67,21 +68,44 @@ def _open_tiledbsoma(
67
68
  return SOMAType.open(storepath_str, mode=mode, context=ctx)
68
69
 
69
70
 
70
- def register_for_tiledbsoma_store(
71
- store: UPathStr | Artifact | None,
71
+ def save_tiledbsoma_experiment(
72
+ # Artifact args
72
73
  adatas: list[AnnData | UPathStr],
73
- measurement_name: str,
74
- obs_field_name: str,
75
- var_field_name: str,
76
- append_obsm_varm: bool = False,
74
+ key: str | None = None,
75
+ description: str | None = None,
77
76
  run: Run | None = None,
78
- ) -> tuple[ExperimentAmbientLabelMapping, list[AnnData]]:
79
- """Register `AnnData` objects to append to `tiledbsoma.Experiment`.
80
-
81
- Pass the returned registration mapping and `AnnData` objects to `write_tiledbsoma_store`.
77
+ revises: Artifact | None = None,
78
+ # tiledbsoma.io.from_anndata args
79
+ measurement_name: str = "RNA",
80
+ obs_id_name: str = "obs_id",
81
+ var_id_name: str = "var_id",
82
+ append_obsm_varm: bool = False,
83
+ # additional keyword args for tiledbsoma.io.from_anndata
84
+ **kwargs,
85
+ ) -> Artifact:
86
+ """Write `AnnData` to `tiledbsoma.Experiment`.
82
87
 
83
- See `tiledbsoma.io.from_h5ad
84
- <https://tiledbsoma.readthedocs.io/en/latest/_autosummary/tiledbsoma.io.from_h5ad.html>`__.
88
+ Reads `AnnData` objects, writes them to `tiledbsoma.Experiment`, creates `lamindb.Artifact`
89
+ and saves the artifact.
90
+ Note that this function adds `lamin_run_uid` column to `obs` of in-memory `AnnData` objects
91
+ when it writes to a new store or appends to a store that has this column in `obs`.
92
+
93
+ See also `tiledbsoma.io.from_anndata
94
+ <https://tiledbsoma.readthedocs.io/en/latest/_autosummary/tiledbsoma.io.from_anndata.html>`__.
95
+
96
+ Args:
97
+ adatas: `AnnData` objects to write, in-memory or on-disk.
98
+ key: A relative path within default storage.
99
+ description: A description.
100
+ run: The run that creates the artifact.
101
+ revises: `lamindb.Artifact` with `tiledbsoma.Experiment` to append to.
102
+ Triggers a revision (a new untagged version).
103
+ measurement_name: The name of the measurement to store data in `tiledbsoma.Experiment`.
104
+ obs_id_name: Which `AnnData` `obs` column to use for append mode.
105
+ var_id_name: Which `AnnData` `var` column to use for append mode.
106
+ append_obsm_varm: Whether to append `obsm` and `varm` in append mode .
107
+ **kwargs: Additional keyword arguments passed to `tiledbsoma.io.from_anndata` that
108
+ writes `adatas`.
85
109
  """
86
110
  try:
87
111
  import tiledbsoma as soma
@@ -89,27 +113,38 @@ def register_for_tiledbsoma_store(
89
113
  except ImportError as e:
90
114
  raise ImportError("Please install tiledbsoma: pip install tiledbsoma") from e
91
115
 
92
- if isinstance(store, Artifact):
93
- storepath = store.path
116
+ from lamindb.core._data import get_run
117
+ from lamindb.core.storage.paths import auto_storage_key_from_artifact_uid
118
+ from lamindb.core.versioning import create_uid
119
+
120
+ run = get_run(run)
121
+
122
+ appending = revises is not None
123
+
124
+ if appending:
125
+ _uid = None
126
+ storepath = revises.path
94
127
  else:
95
- storepath = None if store is None else create_path(store)
128
+ _uid, _ = create_uid(n_full_id=20)
129
+ storage_key = auto_storage_key_from_artifact_uid(
130
+ _uid, ".tiledbsoma", is_dir=True
131
+ )
132
+ storepath = setup_settings.storage.root / storage_key
133
+
134
+ if storepath.protocol == "s3":
135
+ ctx = soma.SOMATileDBContext(tiledb_config=_tiledb_config_s3(storepath))
136
+ else:
137
+ ctx = None
138
+
139
+ storepath = storepath.as_posix()
96
140
 
97
141
  add_run_uid = True
98
- ctx = None
99
- if storepath is not None:
100
- if storepath.protocol == "s3":
101
- ctx = soma.SOMATileDBContext(tiledb_config=_tiledb_config_s3(storepath))
102
- if storepath.exists():
103
- with soma.Experiment.open(
104
- storepath.as_posix(), mode="r", context=ctx
105
- ) as store:
106
- add_run_uid = "lamin_run_uid" in store["obs"].schema.names
107
- storepath = storepath.as_posix()
108
-
109
- if add_run_uid:
110
- from lamindb.core._data import get_run
111
-
112
- run = get_run(run)
142
+ if appending:
143
+ with soma.Experiment.open(storepath, mode="r", context=ctx) as store:
144
+ add_run_uid = "lamin_run_uid" in store["obs"].schema.names
145
+
146
+ if add_run_uid and run is None:
147
+ raise ValueError("Pass `run`")
113
148
 
114
149
  adata_objects = []
115
150
  for adata in adatas:
@@ -117,10 +152,9 @@ def register_for_tiledbsoma_store(
117
152
  if add_run_uid:
118
153
  if adata.is_view:
119
154
  raise ValueError(
120
- "Can not register an `AnnData` view, please do `adata.copy()` before passing."
155
+ "Can not write an `AnnData` view, please do `adata.copy()` before passing."
121
156
  )
122
157
  else:
123
- logger.warning("Mutating in-memory AnnData.")
124
158
  adata.obs["lamin_run_uid"] = run.uid
125
159
  else:
126
160
  adata = _read_adata_h5ad_zarr(create_path(adata))
@@ -128,102 +162,31 @@ def register_for_tiledbsoma_store(
128
162
  adata.obs["lamin_run_uid"] = run.uid
129
163
  adata_objects.append(adata)
130
164
 
131
- registration_mapping = soma_io.register_anndatas(
132
- experiment_uri=storepath,
133
- adatas=adata_objects,
134
- measurement_name=measurement_name,
135
- obs_field_name=obs_field_name,
136
- var_field_name=var_field_name,
137
- append_obsm_varm=append_obsm_varm,
138
- context=ctx,
139
- )
140
-
141
- return registration_mapping, adata_objects
142
-
143
-
144
- def write_tiledbsoma_store(
145
- store: Artifact | UPathStr,
146
- adata: AnnData | UPathStr,
147
- run: Run | None = None,
148
- artifact_kwargs: dict | None = None,
149
- **kwargs,
150
- ) -> Artifact:
151
- """Write `AnnData` to `tiledbsoma.Experiment`.
152
-
153
- Reads `AnnData`, writes it to `tiledbsoma.Experiment` and creates `lamindb.Artifact`.
154
-
155
- See `tiledbsoma.io.from_h5ad
156
- <https://tiledbsoma.readthedocs.io/en/latest/_autosummary/tiledbsoma.io.from_h5ad.html>`__.
157
- """
158
- try:
159
- import tiledbsoma as soma
160
- import tiledbsoma.io as soma_io
161
- except ImportError as e:
162
- raise ImportError("Please install tiledbsoma: pip install tiledbsoma") from e
163
-
164
- from lamindb.core._data import get_run
165
-
166
- if artifact_kwargs is None:
167
- artifact_kwargs = {}
168
-
169
- appending: bool = kwargs.get("registration_mapping", None) is not None
170
- store_is_artifact: bool = isinstance(store, Artifact)
171
- if store_is_artifact:
172
- if not appending:
173
- raise ValueError(
174
- "Trying to append to an existing store without `registration_mapping`."
175
- )
176
- storepath = store.path
177
- else:
178
- storepath = create_path(store)
179
- add_run_uid: bool = not appending
180
-
181
- if not isinstance(adata, AnnData):
182
- # create_path is used
183
- # in case adata is somewhere in our managed s3 bucket or just in s3
184
- adata = _read_adata_h5ad_zarr(create_path(adata))
185
- elif add_run_uid and adata.is_view:
186
- raise ValueError(
187
- "Can not write from an `AnnData` view, please do `adata.copy()` before passing."
165
+ if appending or len(adata_objects) > 1:
166
+ registration_mapping = soma_io.register_anndatas(
167
+ experiment_uri=storepath if appending else None,
168
+ adatas=adata_objects,
169
+ measurement_name=measurement_name,
170
+ obs_field_name=obs_id_name,
171
+ var_field_name=var_id_name,
172
+ append_obsm_varm=append_obsm_varm,
173
+ context=ctx,
188
174
  )
189
-
190
- run = get_run(run)
191
-
192
- if add_run_uid:
193
- adata.obs["lamin_run_uid"] = run.uid
194
-
195
- if storepath.protocol == "s3":
196
- ctx = soma.SOMATileDBContext(tiledb_config=_tiledb_config_s3(storepath))
197
175
  else:
198
- ctx = None
199
-
200
- soma_io.from_anndata(storepath.as_posix(), adata, context=ctx, **kwargs)
201
-
202
- if add_run_uid:
203
- del adata.obs["lamin_run_uid"]
176
+ registration_mapping = None
177
+
178
+ for adata_obj in adata_objects:
179
+ soma_io.from_anndata(
180
+ storepath,
181
+ adata_obj,
182
+ measurement_name,
183
+ context=ctx,
184
+ obs_id_name=obs_id_name,
185
+ var_id_name=var_id_name,
186
+ registration_mapping=registration_mapping,
187
+ **kwargs,
188
+ )
204
189
 
205
- revises = None
206
- if appending:
207
- if store_is_artifact:
208
- revises = store
209
- else:
210
- from lamindb._artifact import (
211
- check_path_in_existing_storage,
212
- get_relative_path_to_directory,
213
- )
214
-
215
- storage = check_path_in_existing_storage(storepath)
216
- if isinstance(storage, Storage):
217
- search_by_key = get_relative_path_to_directory(
218
- path=storepath, directory=UPath(storage.root)
219
- ).as_posix()
220
- revises = Artifact.filter(
221
- key=search_by_key, is_latest=True, _key_is_virtual=False
222
- ).one_or_none()
223
- if revises is not None:
224
- logger.info(f"Assuming it is a new version of {revises}.")
225
-
226
- if revises is None:
227
- return Artifact(storepath, run=run, **artifact_kwargs)
228
- else:
229
- return Artifact(storepath, run=run, revises=revises, **artifact_kwargs)
190
+ return Artifact(
191
+ storepath, key=key, description=description, run=run, revises=revises, _uid=_uid
192
+ ).save()
@@ -4,6 +4,9 @@
4
4
  :toctree: .
5
5
 
6
6
  save_vitessce_config
7
+ save_tiledbsoma_experiment
7
8
  """
8
9
 
10
+ from lamindb.core.storage import save_tiledbsoma_experiment
11
+
9
12
  from ._vitessce import save_vitessce_config
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: lamindb
3
- Version: 0.76.2
3
+ Version: 0.76.3
4
4
  Summary: A data framework for biology.
5
5
  Author-email: Lamin Labs <open-source@lamin.ai>
6
6
  Requires-Python: >=3.8
@@ -9,8 +9,8 @@ Classifier: Programming Language :: Python :: 3.8
9
9
  Classifier: Programming Language :: Python :: 3.9
10
10
  Classifier: Programming Language :: Python :: 3.10
11
11
  Classifier: Programming Language :: Python :: 3.11
12
- Requires-Dist: lnschema_core==0.73.2
13
- Requires-Dist: lamindb_setup==0.76.6
12
+ Requires-Dist: lnschema_core==0.73.3
13
+ Requires-Dist: lamindb_setup==0.76.7
14
14
  Requires-Dist: lamin_utils==0.13.4
15
15
  Requires-Dist: lamin_cli==0.16.2
16
16
  Requires-Dist: rapidfuzz
@@ -24,7 +24,7 @@ Requires-Dist: pandas
24
24
  Requires-Dist: graphviz
25
25
  Requires-Dist: psycopg2-binary
26
26
  Requires-Dist: lamindb_setup[aws] ; extra == "aws"
27
- Requires-Dist: bionty==0.48.3 ; extra == "bionty"
27
+ Requires-Dist: bionty==0.49.0 ; extra == "bionty"
28
28
  Requires-Dist: pre-commit ; extra == "dev"
29
29
  Requires-Dist: nox ; extra == "dev"
30
30
  Requires-Dist: laminci>=0.3 ; extra == "dev"
@@ -1,17 +1,17 @@
1
- lamindb/__init__.py,sha256=OCLxYHwhRyaf6Zx-AKHTSEFWM-J342YRxDP2hhCIu6c,2309
2
- lamindb/_artifact.py,sha256=RygNHePDo895KG61FctFRdHO4jVa9cmj-yBBsyRE50A,43840
1
+ lamindb/__init__.py,sha256=iZlJlG4hUmSRNZvau2bAEG2Z9B-qnWWNazYYw5m3ciM,2351
2
+ lamindb/_artifact.py,sha256=6iD2qzZxWUHPiItUdEJt9BDbYVv6HF3bgpW2ZDjMf7s,44093
3
3
  lamindb/_can_validate.py,sha256=ne8-9sAG9vXnMXZqso6mYMt-xDg16h-gq6yHJXKFpuI,17690
4
4
  lamindb/_collection.py,sha256=F_VgpLBprrzUQ-tPngWvO9vFd7jX66MVwIi031JOris,14871
5
- lamindb/_curate.py,sha256=EAikoFg2rL8kRsWEIXH33P1VplSGAACMuFHanicguWw,55964
5
+ lamindb/_curate.py,sha256=_VISRycaPaburnJKuFM5XpEWqNGozdUcwBGMj_knZpM,58041
6
6
  lamindb/_feature.py,sha256=nZhtrH0ssoNls-hV-dkwfK9sKypg2El59R9qfarxfUE,5340
7
7
  lamindb/_feature_set.py,sha256=DmAy96V_RyV0yiyvWOCHgustXPsCaMwn4TrWwh2qDd8,8104
8
- lamindb/_filter.py,sha256=Fs7_x3tA_KVz0lYt0bJkWfjnEKAnkVN20CfKhp9aKWg,1403
9
- lamindb/_finish.py,sha256=wCjPmTBmL_z2WcZq9v6TZroZ_J_Te9KC5GzFuYdrIRc,9413
8
+ lamindb/_filter.py,sha256=9QHa9J-_6QeYPQATZpTun2VGiFofwzB0Km-KnKajHcM,663
9
+ lamindb/_finish.py,sha256=Yytv26ruL7EFxKewOrgqJxDircZVCpHOkwUjqdh4oMY,9411
10
10
  lamindb/_from_values.py,sha256=8kYpR8Q85EOaTcsPGjVHeZh29fGVgum5OEQf4Hsz_80,13533
11
11
  lamindb/_is_versioned.py,sha256=5lAnhTboltFkZCKVRV1uxkm0OCjJz_HKi3yQq_vEuMs,1306
12
12
  lamindb/_parents.py,sha256=eMavdd6IO6STOVJSlR2TzdRtx6sKYDKsMOtlR3DZlgQ,15599
13
13
  lamindb/_query_manager.py,sha256=Ipe85HL31DDwMbC8CN_1Svbwk48a_DUh_INGQdZL08I,4222
14
- lamindb/_query_set.py,sha256=mwNfGWcZbI5a1VfuiQzkweU4VQchE8Va8HiCIa0GNu4,11604
14
+ lamindb/_query_set.py,sha256=rykST1bxbk2_dt8Rx1TGTEHzXUc1yyc0rZWHO6bRGBA,12667
15
15
  lamindb/_record.py,sha256=MDLvcsZ23lZkThnrYQtj_uW-1BnxCPpyJ5mKl9KPs4A,21199
16
16
  lamindb/_run.py,sha256=5M_r1zGDv9HlqbqRKTWCYCOtENovJ-8mQ4kY7XqcLaU,1888
17
17
  lamindb/_save.py,sha256=Fu7Z84btKOXfTfpunKLni21s5ER2zIllqg5e3nPq-0A,10910
@@ -20,16 +20,16 @@ lamindb/_transform.py,sha256=Ck674iuKIp9HDpHuyFFM2xKJcFAUvQDRTnEdnx00Rq0,4114
20
20
  lamindb/_ulabel.py,sha256=XDSdZBXX_ki5s1vOths3MjF2x5DPggBR_PV_KF4SGyg,1611
21
21
  lamindb/_utils.py,sha256=LGdiW4k3GClLz65vKAVRkL6Tw-Gkx9DWAdez1jyA5bE,428
22
22
  lamindb/_view.py,sha256=4Ln2ItTb3857PAI-70O8eJYqoTJ_NNFc7E_wds6OGns,2412
23
- lamindb/core/__init__.py,sha256=QePKN3dkGuNEfIc2CKwBt3Kcl39bbghX6XwXKpK3BA0,1491
24
- lamindb/core/_context.py,sha256=O4ovdIfvIC9E4oaqpSjBqwHOKThmP8_aod0MiOWaEwI,19517
23
+ lamindb/core/__init__.py,sha256=Nq-YbQT8X0KXoGXgctr9lm8H7J6sZeBBJ3hixGq0iuQ,1523
24
+ lamindb/core/_context.py,sha256=FamAbmq3VZYrgwZkaRtkBY3f5YepLtlblh8wyv4a1OQ,19546
25
25
  lamindb/core/_data.py,sha256=eocOXsZGu62LPtz6yIlvHhPSJTf3yF2ITZTffyflWYI,16269
26
26
  lamindb/core/_feature_manager.py,sha256=94tX6gq_Rx7fkDARQBxB2z92qUDpHocFSAdKv5izMT4,32490
27
27
  lamindb/core/_label_manager.py,sha256=jSLvxAuTuOYosSh_QJhIz3bqnbmWKP43y5abVMb-hOQ,9240
28
- lamindb/core/_mapped_collection.py,sha256=ST-cTfokIGkRadjSHEyvIK2san8cGr7WZpgbgs5neLI,22025
28
+ lamindb/core/_mapped_collection.py,sha256=1XzratL2IvRleqioNhWo26Lsuqkev8-HEImmHQxw9Kw,23266
29
29
  lamindb/core/_settings.py,sha256=GGEB8BU5GinIfD4ktr1Smp6GPHGaInu46MhP4EecZDY,5950
30
30
  lamindb/core/_sync_git.py,sha256=qc0yfPyKeG4uuNT_3qsv-mkIMqhLFqfXNeNVO49vV00,4547
31
31
  lamindb/core/_track_environment.py,sha256=STzEVUzOeUEWdX7WDJUkKH4u08k7eupRX6AXQwoVt14,828
32
- lamindb/core/exceptions.py,sha256=qNFYN5Jc7Y6kw4re-jsW0AEIprsV2HB1wTcJiO-u-ks,1278
32
+ lamindb/core/exceptions.py,sha256=TKyt1JOUwWIHbkCQjir_LQadf8960eQ95RWhSpz5_Bk,1288
33
33
  lamindb/core/fields.py,sha256=47Jmh3efUr5ZscgimR_yckY-I3cNf8ScLutbwKCK3j4,162
34
34
  lamindb/core/schema.py,sha256=KiYQn_8fokSMztTNDe6qUocZzKXWxU32H-YChNJv51A,1877
35
35
  lamindb/core/types.py,sha256=uVBqSVLoQaTkqP9nqsJhwU6yYnx8H5e6-ZxrB6vpOOw,265
@@ -37,11 +37,11 @@ lamindb/core/versioning.py,sha256=__SOHhk5OjMJgAUjixzp0GFcQrpjm8sBUXC9Fouk2AE,51
37
37
  lamindb/core/datasets/__init__.py,sha256=zRP98oqUAaXhqWyKMiH0s_ImVIuNeziQQ2kQ_t0f-DI,1353
38
38
  lamindb/core/datasets/_core.py,sha256=CgVF_pXuBXLElzubDMsl1DbpYOnXCY0HleITVvBKih4,19873
39
39
  lamindb/core/datasets/_fake.py,sha256=BZF9R_1iF0HDnvtZNqL2FtsjSMuqDIfuFxnw_LJYIh4,953
40
- lamindb/core/storage/__init__.py,sha256=9B3JqHydQnclP4NUY2kEc99K1cJBQZA4jyy3EmDxsYk,541
40
+ lamindb/core/storage/__init__.py,sha256=x-Bpxv1rx6uGog-chI0fdpt0UhkXQkwoQqye0TNk0WQ,514
41
41
  lamindb/core/storage/_anndata_accessor.py,sha256=jmEZeeZlt8-qBXRkU0tTA-t6dVEb_dH86wc1ok0jSRY,24030
42
42
  lamindb/core/storage/_anndata_sizes.py,sha256=aXO3OB--tF5MChenSsigW6Q-RuE8YJJOUTVukkLrv9A,1029
43
43
  lamindb/core/storage/_backed_access.py,sha256=YcWCeT2eligJGsBdjJS_-4el_eC9J088jxUWG9lsleM,3231
44
- lamindb/core/storage/_tiledbsoma.py,sha256=GNPG8pDYmZLFBSujsQ8VqlfWaFmDO58kgBXw6JESQJs,7812
44
+ lamindb/core/storage/_tiledbsoma.py,sha256=_ObAT3pIx0OA5uq55v6aWnIeyTTKjetjV3Gkk4oDWR0,6851
45
45
  lamindb/core/storage/_valid_suffixes.py,sha256=vUSeQ4s01rdhD_vSd6wKmFBsgMJAKkBMnL_T9Y1znMg,501
46
46
  lamindb/core/storage/_zarr.py,sha256=5ceEz6YIvgvUnVVNWhK5Z4W0WfrvyvY82Yna5jSX1_E,3661
47
47
  lamindb/core/storage/objects.py,sha256=OzvBCS-Urz5mr-O95qYt6RGBDDX5HmjfRRKWPPDn1ZE,1797
@@ -49,11 +49,11 @@ lamindb/core/storage/paths.py,sha256=woOrjtBhNnzm8DjF262ipwyZaQ_A-7MT2ZPoiefAfYk
49
49
  lamindb/core/subsettings/__init__.py,sha256=KFHPzIE7f7Bj4RgMjGQF4CjTdHVG_VNFBrCndo49ixo,198
50
50
  lamindb/core/subsettings/_creation_settings.py,sha256=54mfMH_osC753hpxcl7Dq1rwBD2LHnWveXtQpkLBITE,1194
51
51
  lamindb/core/subsettings/_transform_settings.py,sha256=4YbCuZtJo6zdytl6UQR4GvdDkTtT6SRBqVzofGzNOt8,583
52
- lamindb/integrations/__init__.py,sha256=MoLRD_qqX5WHFUAqHL6zoY_cDkWH0zimaGT_1CyXKnk,124
52
+ lamindb/integrations/__init__.py,sha256=RWGMYYIzr8zvmNPyVB4m-p4gMDhxdRbjES2Ed23OItw,215
53
53
  lamindb/integrations/_vitessce.py,sha256=aDCyZTddpMfUzjEo0DXQ3XlD--ebSqnsGiMxJBunX90,5141
54
54
  lamindb/setup/__init__.py,sha256=OwZpZzPDv5lPPGXZP7-zK6UdO4FHvvuBh439yZvIp3A,410
55
55
  lamindb/setup/core/__init__.py,sha256=SevlVrc2AZWL3uALbE5sopxBnIZPWZ1IB0NBDudiAL8,167
56
- lamindb-0.76.2.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
57
- lamindb-0.76.2.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
58
- lamindb-0.76.2.dist-info/METADATA,sha256=99nkmjVubVfQahE9S4aO1e8Ot6G4iFaPZltNBE-oFPo,2381
59
- lamindb-0.76.2.dist-info/RECORD,,
56
+ lamindb-0.76.3.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
57
+ lamindb-0.76.3.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
58
+ lamindb-0.76.3.dist-info/METADATA,sha256=DoLPrLaR-VmA_7wyT_AM-fLUqi-0qL9OhxKT4U-wvWU,2381
59
+ lamindb-0.76.3.dist-info/RECORD,,