lamindb 1.10.1__py3-none-any.whl → 1.10.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
lamindb/__init__.py CHANGED
@@ -108,7 +108,7 @@ Backwards compatibility.
108
108
 
109
109
  # ruff: noqa: I001
110
110
  # denote a release candidate for 0.1.0 with 0.1rc1, 0.1a1, 0.1b1, etc.
111
- __version__ = "1.10.1"
111
+ __version__ = "1.10.2"
112
112
 
113
113
  import warnings
114
114
 
lamindb/core/_settings.py CHANGED
@@ -10,6 +10,7 @@ from lamindb_setup import settings as setup_settings
10
10
  from lamindb_setup._set_managed_storage import set_managed_storage
11
11
  from lamindb_setup.core import deprecated
12
12
  from lamindb_setup.core._settings_instance import sanitize_git_repo_url
13
+ from lamindb_setup.core._settings_storage import StorageSettings
13
14
 
14
15
  from .subsettings._annotation_settings import AnnotationSettings, annotation_settings
15
16
  from .subsettings._creation_settings import CreationSettings, creation_settings
@@ -18,7 +19,6 @@ if TYPE_CHECKING:
18
19
  from collections.abc import Mapping
19
20
  from pathlib import Path
20
21
 
21
- from lamindb_setup.core._settings_storage import StorageSettings
22
22
  from upath import UPath
23
23
 
24
24
 
@@ -193,13 +193,39 @@ class Settings:
193
193
 
194
194
  @storage.setter
195
195
  def storage(self, path_kwargs: str | Path | UPath | tuple[str | UPath, Mapping]):
196
+ import lamindb as ln
197
+
196
198
  if isinstance(path_kwargs, tuple):
197
199
  path, kwargs = path_kwargs
200
+ # we should ultimately deprecate passing host here, I think
198
201
  if isinstance(kwargs, str):
199
202
  kwargs = {"host": kwargs}
200
203
  else:
201
204
  path, kwargs = path_kwargs, {}
202
- set_managed_storage(path, **kwargs)
205
+ ssettings = StorageSettings(root=path) # there is no need to pass kwargs here!
206
+ exists = ln.Storage.filter(root=ssettings.root_as_str).one_or_none()
207
+ if exists is None:
208
+ response = input(
209
+ f"Storage location {ssettings.root_as_str} does not yet exist. Do you want to continue with creating it? (y/n)"
210
+ )
211
+ # logger.warning(f"deprecated call because storage location does **not yet** exist; going forward, please create through ln.Storage(root={path}).save() going forward")
212
+ if response != "y":
213
+ return None
214
+ set_managed_storage(path, **kwargs)
215
+ else:
216
+ if exists.instance_uid != ln_setup.settings.instance.uid:
217
+ raise ValueError(
218
+ f"Storage {ssettings.root_as_str} exists in another instance ({exists.instance_uid}), cannot write to it from here."
219
+ )
220
+ ssettings = StorageSettings(
221
+ root=exists.root,
222
+ region=exists.region,
223
+ uid=exists.uid,
224
+ instance_id=ln_setup.settings.instance._id,
225
+ )
226
+ ln_setup.settings.instance._storage = ssettings
227
+ kwargs.pop("host", None) # host is not needed for existing storage
228
+ settings.storage._set_fs_kwargs(**kwargs)
203
229
 
204
230
  @property
205
231
  def instance_uid(self) -> str:
@@ -223,6 +249,23 @@ class Settings:
223
249
 
224
250
  @local_storage.setter
225
251
  def local_storage(self, local_root: Path):
252
+ import lamindb as ln
253
+
254
+ # note duplication with storage setter!
255
+ ssettings = StorageSettings(root=local_root)
256
+ exists = ln.Storage.filter(root=ssettings.root_as_str).one_or_none()
257
+ if exists is None:
258
+ response = input(
259
+ f"Storage location {ssettings.root_as_str} does not yet exist. Do you want to continue with creating it? (y/n)"
260
+ )
261
+ # logger.warning(f"deprecated call because storage location does **not yet** exist; going forward, please create through ln.Storage(root={path}).save() going forward")
262
+ if response != "y":
263
+ return None
264
+ else:
265
+ if exists.instance_uid != ln_setup.settings.instance.uid:
266
+ raise ValueError(
267
+ f"Storage {ssettings.root_as_str} exists in another instance ({exists.instance_uid}), cannot write to it from here."
268
+ )
226
269
  ln_setup.settings.instance.local_storage = local_root
227
270
 
228
271
  @property
@@ -13,12 +13,17 @@ from anndata import __version__ as anndata_version
13
13
  from anndata._core.index import _normalize_indices
14
14
  from anndata._core.views import _resolve_idx
15
15
  from anndata._io.h5ad import read_dataframe_legacy as read_dataframe_legacy_h5
16
- from anndata._io.specs.registry import get_spec, read_elem, read_elem_partial
16
+ from anndata._io.specs.registry import (
17
+ get_spec,
18
+ read_elem,
19
+ read_elem_partial,
20
+ write_elem,
21
+ )
17
22
  from anndata.compat import _read_attr
18
23
  from fsspec.implementations.local import LocalFileSystem
19
24
  from fsspec.utils import infer_compression
20
25
  from lamin_utils import logger
21
- from lamindb_setup.core.upath import infer_filesystem
26
+ from lamindb_setup.core.upath import S3FSMap, infer_filesystem
22
27
  from packaging import version
23
28
  from upath import UPath
24
29
 
@@ -28,6 +33,8 @@ if TYPE_CHECKING:
28
33
  from fsspec.core import OpenFile
29
34
  from lamindb_setup.types import UPathStr
30
35
 
36
+ from lamindb import Artifact
37
+
31
38
 
32
39
  anndata_version_parse = version.parse(anndata_version)
33
40
 
@@ -300,6 +307,11 @@ if ZARR_INSTALLED:
300
307
 
301
308
  store = get_zarr_store(filepath)
302
309
  storage = zarr.open(store, mode=mode)
310
+ # zarr v2 re-initializes the mapper
311
+ # we need to put back the correct one
312
+ # S3FSMap is returned from get_zarr_store only for zarr v2
313
+ if isinstance(store, S3FSMap):
314
+ storage.store.map = store
303
315
  conn = None
304
316
  return conn, storage
305
317
 
@@ -438,9 +450,15 @@ def _try_backed_full(elem):
438
450
  return read_elem(elem)
439
451
 
440
452
 
453
+ def _to_index(elem: np.ndarray):
454
+ if elem.dtype in (np.float64, np.int64):
455
+ elem = elem.astype(str)
456
+ return pd.Index(elem)
457
+
458
+
441
459
  def _safer_read_index(elem):
442
460
  if isinstance(elem, GroupTypes):
443
- return pd.Index(read_elem(elem[_read_attr(elem.attrs, "_index")]))
461
+ return _to_index(read_elem(elem[_read_attr(elem.attrs, "_index")]))
444
462
  elif isinstance(elem, ArrayTypes):
445
463
  indices = None
446
464
  for index_name in ("index", "_index"):
@@ -450,7 +468,7 @@ def _safer_read_index(elem):
450
468
  if indices is not None and len(indices) > 0:
451
469
  if isinstance(indices[0], bytes):
452
470
  indices = np.frompyfunc(lambda x: x.decode("utf-8"), 1, 1)(indices)
453
- return pd.Index(indices)
471
+ return _to_index(indices)
454
472
  else:
455
473
  raise ValueError("Indices not found.")
456
474
  else:
@@ -479,33 +497,40 @@ class _MapAccessor:
479
497
  return descr
480
498
 
481
499
 
500
+ def _safer_read_df(elem, indices=None):
501
+ if indices is not None:
502
+ obj = registry.safer_read_partial(elem, indices=indices)
503
+ df = _records_to_df(obj)
504
+ else:
505
+ df = registry.read_dataframe(elem)
506
+ if df.index.dtype in (np.float64, np.int64):
507
+ df.index = df.index.astype(str)
508
+ return df
509
+
510
+
482
511
  class _AnnDataAttrsMixin:
483
512
  storage: StorageType
484
513
  _attrs_keys: Mapping[str, list]
485
514
 
486
515
  @cached_property
487
- def obs(self) -> pd.DataFrame:
516
+ def obs(self) -> pd.DataFrame | None:
488
517
  if "obs" not in self._attrs_keys:
489
518
  return None
490
519
  indices = getattr(self, "indices", None)
491
- if indices is not None:
492
- indices = (indices[0], slice(None))
493
- obj = registry.safer_read_partial(self.storage["obs"], indices=indices) # type: ignore
494
- return _records_to_df(obj)
495
- else:
496
- return registry.read_dataframe(self.storage["obs"]) # type: ignore
520
+ return _safer_read_df(
521
+ self.storage["obs"], # type: ignore
522
+ indices=(indices[0], slice(None)) if indices is not None else None,
523
+ )
497
524
 
498
525
  @cached_property
499
- def var(self) -> pd.DataFrame:
526
+ def var(self) -> pd.DataFrame | None:
500
527
  if "var" not in self._attrs_keys:
501
528
  return None
502
529
  indices = getattr(self, "indices", None)
503
- if indices is not None:
504
- indices = (indices[1], slice(None))
505
- obj = registry.safer_read_partial(self.storage["var"], indices=indices) # type: ignore
506
- return _records_to_df(obj)
507
- else:
508
- return registry.read_dataframe(self.storage["var"]) # type: ignore
530
+ return _safer_read_df(
531
+ self.storage["var"], # type: ignore
532
+ indices=(indices[1], slice(None)) if indices is not None else None,
533
+ )
509
534
 
510
535
  @cached_property
511
536
  def uns(self):
@@ -702,6 +727,7 @@ class AnnDataAccessor(_AnnDataAttrsMixin):
702
727
  connection: OpenFile | None,
703
728
  storage: StorageType,
704
729
  filename: str,
730
+ artifact: Artifact | None = None,
705
731
  ):
706
732
  self._conn = connection
707
733
  self.storage = storage
@@ -713,6 +739,11 @@ class AnnDataAccessor(_AnnDataAttrsMixin):
713
739
  self._obs_names = _safer_read_index(self.storage["obs"]) # type: ignore
714
740
  self._var_names = _safer_read_index(self.storage["var"]) # type: ignore
715
741
 
742
+ self._artifact = artifact # save artifact to update in write mode
743
+
744
+ self._updated = False # track updates in r+ mode for zarr
745
+
746
+ self._entered = False # check that the context manager is used
716
747
  self._closed = False
717
748
 
718
749
  def close(self):
@@ -723,11 +754,23 @@ class AnnDataAccessor(_AnnDataAttrsMixin):
723
754
  self._conn.close()
724
755
  self._closed = True
725
756
 
757
+ if self._updated and (artifact := self._artifact) is not None:
758
+ from lamindb.models.artifact import Artifact
759
+ from lamindb.models.sqlrecord import init_self_from_db
760
+
761
+ new_version = Artifact(
762
+ artifact.path, revises=artifact, _is_internal_call=True
763
+ ).save()
764
+ # note: sets _state.db = "default"
765
+ init_self_from_db(artifact, new_version)
766
+
726
767
  @property
727
768
  def closed(self):
728
769
  return self._closed
729
770
 
730
771
  def __enter__(self):
772
+ self._entered = True
773
+
731
774
  return self
732
775
 
733
776
  def __exit__(self, exc_type, exc_val, exc_tb):
@@ -763,6 +806,35 @@ class AnnDataAccessor(_AnnDataAttrsMixin):
763
806
  self.storage["raw"], None, None, self._obs_names, None, self.shape[0]
764
807
  )
765
808
 
809
+ def add_column(
810
+ self,
811
+ where: Literal["obs", "var"],
812
+ col_name: str,
813
+ col: np.ndarray | pd.Categorical,
814
+ ):
815
+ """Add a new column to .obs or .var of the underlying AnnData object."""
816
+ df_store = self.storage[where] # type: ignore
817
+ if getattr(df_store, "read_only", True):
818
+ raise ValueError(
819
+ "You can use .add_column(...) only with zarr in a writable mode."
820
+ )
821
+ write_elem(df_store, col_name, col)
822
+ df_store.attrs["column-order"] = df_store.attrs["column-order"] + [col_name]
823
+ # remind only once if this wasn't updated before and not in the context manager
824
+ if not self._updated and not self._entered and self._artifact is not None:
825
+ logger.important(
826
+ "Do not forget to call .close() after you finish "
827
+ f"working with this accessor for {self._name} "
828
+ "to automatically update the corresponding artifact."
829
+ )
830
+
831
+ self._updated = True
832
+ # reset the cached property
833
+ # todo: maybe just append the column if the df was already loaded
834
+ self.__dict__.pop(where, None)
835
+ # update the cached columns
836
+ self._attrs_keys[where].append(col_name)
837
+
766
838
 
767
839
  # get the number of observations in an anndata object or file fast and safely
768
840
  def _anndata_n_observations(object: UPathStr | AnnData) -> int | None:
@@ -4,6 +4,7 @@ from dataclasses import dataclass
4
4
  from pathlib import Path
5
5
  from typing import TYPE_CHECKING, Any, Callable, Literal
6
6
 
7
+ import h5py
7
8
  from anndata._io.specs.registry import get_spec
8
9
 
9
10
  from ._anndata_accessor import AnnDataAccessor, StorageType, registry
@@ -92,10 +93,10 @@ def backed_access(
92
93
  from lamindb.models import Artifact
93
94
 
94
95
  if isinstance(artifact_or_filepath, Artifact):
95
- objectpath, _ = filepath_from_artifact(
96
- artifact_or_filepath, using_key=using_key
97
- )
96
+ artifact = artifact_or_filepath
97
+ objectpath, _ = filepath_from_artifact(artifact, using_key=using_key)
98
98
  else:
99
+ artifact = None
99
100
  objectpath = artifact_or_filepath
100
101
  name = objectpath.name
101
102
  # ignore .gz, only check the real suffix
@@ -111,9 +112,11 @@ def backed_access(
111
112
  elif suffix in {".h5", ".hdf5", ".h5ad"}:
112
113
  conn, storage = registry.open("h5py", objectpath, mode=mode, **kwargs)
113
114
  elif suffix == ".zarr":
115
+ if mode not in {"r", "r+"}:
116
+ raise ValueError("`mode` should be either 'r' or 'r+' for zarr.")
114
117
  conn, storage = registry.open("zarr", objectpath, mode=mode, **kwargs)
115
118
  if "spatialdata_attrs" in storage.attrs:
116
- return SpatialDataAccessor(storage, name)
119
+ return SpatialDataAccessor(storage, name, artifact)
117
120
  elif len(df_suffixes := _flat_suffixes(objectpath)) == 1 and (
118
121
  df_suffix := df_suffixes.pop()
119
122
  ) in set(PYARROW_SUFFIXES).union(POLARS_SUFFIXES):
@@ -127,9 +130,9 @@ def backed_access(
127
130
 
128
131
  is_anndata = suffix == ".h5ad" or get_spec(storage).encoding_type == "anndata"
129
132
  if is_anndata:
130
- if mode != "r":
131
- raise ValueError("Can only access `AnnData` with mode='r'.")
132
- return AnnDataAccessor(conn, storage, name)
133
+ if mode != "r" and isinstance(storage, h5py.Group):
134
+ raise ValueError("Can only access `hdf5` `AnnData` with mode='r'.")
135
+ return AnnDataAccessor(conn, storage, name, artifact)
133
136
  else:
134
137
  return BackedAccessor(conn, storage)
135
138
 
@@ -8,13 +8,22 @@ from ._anndata_accessor import AnnDataAccessor
8
8
  if TYPE_CHECKING:
9
9
  from zarr import Group
10
10
 
11
+ from lamindb import Artifact
12
+
11
13
 
12
14
  class _TablesAccessor:
13
- def __init__(self, tables: Group):
15
+ def __init__(self, tables: Group, artifact: Artifact | None = None):
14
16
  self._tables = tables
15
17
 
18
+ self._artifact = artifact
19
+
16
20
  def __getitem__(self, key: str) -> AnnDataAccessor:
17
- return AnnDataAccessor(connection=None, storage=self._tables[key], filename=key)
21
+ return AnnDataAccessor(
22
+ connection=None,
23
+ storage=self._tables[key],
24
+ filename=key,
25
+ artifact=self._artifact,
26
+ )
18
27
 
19
28
  def keys(self) -> list[str]:
20
29
  return list(self._tables.keys())
@@ -33,14 +42,16 @@ class SpatialDataAccessor:
33
42
  For now only allows to access `tables`.
34
43
  """
35
44
 
36
- def __init__(self, storage: Group, name: str):
45
+ def __init__(self, storage: Group, name: str, artifact: Artifact | None = None):
37
46
  self.storage = storage
38
47
  self._name = name
39
48
 
49
+ self._artifact = artifact
50
+
40
51
  @cached_property
41
52
  def tables(self) -> _TablesAccessor:
42
53
  """tables of the underlying SpatialData object."""
43
- return _TablesAccessor(self.storage["tables"])
54
+ return _TablesAccessor(self.storage["tables"], self._artifact)
44
55
 
45
56
  def __repr__(self):
46
57
  """Description of the SpatialDataAccessor object."""
@@ -37,6 +37,9 @@ def get_zarr_store(
37
37
  if isinstance(storepath, LocalPathClasses):
38
38
  store = storepath_str
39
39
  elif IS_ZARR_V3:
40
+ # todo: also check how to treat non-asynchronous filesystems
41
+ # zarr has something for this, using fsspec async wrapper
42
+ # check FsspecStore code
40
43
  store = zarr.storage.FsspecStore.from_upath(UPath(storepath, asynchronous=True))
41
44
  else:
42
45
  store = create_mapper(storepath.fs, storepath_str, check=check, create=create)
lamindb/curators/core.py CHANGED
@@ -364,12 +364,13 @@ class SlotsCurator(Curator):
364
364
  )
365
365
  break
366
366
 
367
- self._artifact.schema = self._schema
368
- self._artifact.save()
369
367
  cat_vectors = {}
370
368
  for curator in self._slots.values():
371
369
  for key, cat_vector in curator.cat._cat_vectors.items():
372
370
  cat_vectors[key] = cat_vector
371
+
372
+ self._artifact.schema = self._schema
373
+ self._artifact.save()
373
374
  return annotate_artifact( # type: ignore
374
375
  self._artifact,
375
376
  curator=self,
@@ -685,10 +686,11 @@ class DataFrameCurator(Curator):
685
686
  description=description,
686
687
  revises=revises,
687
688
  run=run,
688
- format=".csv" if key.endswith(".csv") else None,
689
+ format=".csv" if key is not None and key.endswith(".csv") else None,
689
690
  )
690
- self._artifact.schema = self._schema
691
- self._artifact.save()
691
+
692
+ self._artifact.schema = self._schema
693
+ self._artifact.save()
692
694
  return annotate_artifact( # type: ignore
693
695
  self._artifact,
694
696
  cat_vectors=self.cat._cat_vectors,
@@ -0,0 +1,99 @@
1
+ # Generated by Django 5.2 on 2025-08-07 15:28
2
+
3
+ import django.db.models.deletion
4
+ import django.db.models.functions.datetime
5
+ from django.db import migrations, models
6
+
7
+ import lamindb.base.fields
8
+ import lamindb.base.users
9
+ import lamindb.models.run
10
+ import lamindb.models.sqlrecord
11
+
12
+
13
+ class Migration(migrations.Migration):
14
+ dependencies = [
15
+ ("lamindb", "0117_fix_artifact_storage_hash_unique_constraints"),
16
+ ]
17
+
18
+ operations = [
19
+ migrations.AlterField(
20
+ model_name="recordproject",
21
+ name="value",
22
+ field=lamindb.base.fields.ForeignKey(
23
+ blank=True,
24
+ on_delete=django.db.models.deletion.PROTECT,
25
+ related_name="links_in_record",
26
+ to="lamindb.project",
27
+ ),
28
+ ),
29
+ migrations.CreateModel(
30
+ name="ProjectRecord",
31
+ fields=[
32
+ (
33
+ "created_at",
34
+ lamindb.base.fields.DateTimeField(
35
+ blank=True,
36
+ db_default=django.db.models.functions.datetime.Now(),
37
+ db_index=True,
38
+ editable=False,
39
+ ),
40
+ ),
41
+ ("id", models.BigAutoField(primary_key=True, serialize=False)),
42
+ (
43
+ "created_by",
44
+ lamindb.base.fields.ForeignKey(
45
+ blank=True,
46
+ default=lamindb.base.users.current_user_id,
47
+ editable=False,
48
+ on_delete=django.db.models.deletion.PROTECT,
49
+ related_name="+",
50
+ to="lamindb.user",
51
+ ),
52
+ ),
53
+ (
54
+ "feature",
55
+ lamindb.base.fields.ForeignKey(
56
+ blank=True,
57
+ default=None,
58
+ null=True,
59
+ on_delete=django.db.models.deletion.PROTECT,
60
+ related_name="links_projectrecord",
61
+ to="lamindb.feature",
62
+ ),
63
+ ),
64
+ (
65
+ "project",
66
+ lamindb.base.fields.ForeignKey(
67
+ blank=True,
68
+ on_delete=django.db.models.deletion.PROTECT,
69
+ related_name="links_record",
70
+ to="lamindb.project",
71
+ ),
72
+ ),
73
+ (
74
+ "record",
75
+ lamindb.base.fields.ForeignKey(
76
+ blank=True,
77
+ on_delete=django.db.models.deletion.CASCADE,
78
+ related_name="links_project",
79
+ to="lamindb.record",
80
+ ),
81
+ ),
82
+ (
83
+ "run",
84
+ lamindb.base.fields.ForeignKey(
85
+ blank=True,
86
+ default=lamindb.models.run.current_run,
87
+ null=True,
88
+ on_delete=django.db.models.deletion.PROTECT,
89
+ related_name="+",
90
+ to="lamindb.run",
91
+ ),
92
+ ),
93
+ ],
94
+ options={
95
+ "unique_together": {("record", "project", "feature")},
96
+ },
97
+ bases=(lamindb.models.sqlrecord.IsLink, models.Model),
98
+ ),
99
+ ]
@@ -0,0 +1,26 @@
1
+ # Generated by Django 5.2 on 2025-08-09 13:31
2
+
3
+ from django.db import migrations, models
4
+
5
+
6
+ class Migration(migrations.Migration):
7
+ dependencies = [
8
+ ("lamindb", "0118_alter_recordproject_value_projectrecord"),
9
+ ]
10
+
11
+ operations = [
12
+ migrations.RenameField(
13
+ model_name="project",
14
+ old_name="records",
15
+ new_name="linked_in_records",
16
+ ),
17
+ migrations.AddField(
18
+ model_name="project",
19
+ name="records",
20
+ field=models.ManyToManyField(
21
+ related_name="projects",
22
+ through="lamindb.ProjectRecord",
23
+ to="lamindb.record",
24
+ ),
25
+ ),
26
+ ]
@@ -1,4 +1,4 @@
1
- # Generated by Django 5.2 on 2025-07-26 18:58
1
+ # Generated by Django 5.2 on 2025-08-09 13:33
2
2
 
3
3
  import django.core.validators
4
4
  import django.db.models.deletion
@@ -139,6 +139,8 @@ class Migration(migrations.Migration):
139
139
  ("lamindb", "0115_alter_space_uid"),
140
140
  ("lamindb", "0116_remove_artifact_unique_artifact_storage_key_hash_and_more"),
141
141
  ("lamindb", "0117_fix_artifact_storage_hash_unique_constraints"),
142
+ ("lamindb", "0118_alter_recordproject_value_projectrecord"),
143
+ ("lamindb", "0119_rename_records_project_linked_in_records"),
142
144
  ]
143
145
 
144
146
  dependencies = [] # type: ignore
@@ -1522,6 +1524,60 @@ class Migration(migrations.Migration):
1522
1524
  },
1523
1525
  bases=(lamindb.models.can_curate.CanCurate, models.Model),
1524
1526
  ),
1527
+ migrations.CreateModel(
1528
+ name="ProjectRecord",
1529
+ fields=[
1530
+ (
1531
+ "created_at",
1532
+ lamindb.base.fields.DateTimeField(
1533
+ blank=True,
1534
+ db_default=django.db.models.functions.datetime.Now(),
1535
+ db_index=True,
1536
+ editable=False,
1537
+ ),
1538
+ ),
1539
+ ("id", models.BigAutoField(primary_key=True, serialize=False)),
1540
+ (
1541
+ "feature",
1542
+ lamindb.base.fields.ForeignKey(
1543
+ blank=True,
1544
+ default=None,
1545
+ null=True,
1546
+ on_delete=django.db.models.deletion.PROTECT,
1547
+ related_name="links_projectrecord",
1548
+ to="lamindb.feature",
1549
+ ),
1550
+ ),
1551
+ (
1552
+ "project",
1553
+ lamindb.base.fields.ForeignKey(
1554
+ blank=True,
1555
+ on_delete=django.db.models.deletion.PROTECT,
1556
+ related_name="links_record",
1557
+ to="lamindb.project",
1558
+ ),
1559
+ ),
1560
+ (
1561
+ "record",
1562
+ lamindb.base.fields.ForeignKey(
1563
+ blank=True,
1564
+ on_delete=django.db.models.deletion.CASCADE,
1565
+ related_name="links_project",
1566
+ to="lamindb.record",
1567
+ ),
1568
+ ),
1569
+ ],
1570
+ bases=(lamindb.models.sqlrecord.IsLink, models.Model),
1571
+ ),
1572
+ migrations.AddField(
1573
+ model_name="project",
1574
+ name="records",
1575
+ field=models.ManyToManyField(
1576
+ related_name="projects",
1577
+ through="lamindb.ProjectRecord",
1578
+ to="lamindb.record",
1579
+ ),
1580
+ ),
1525
1581
  migrations.AddField(
1526
1582
  model_name="artifactrecord",
1527
1583
  name="record",
@@ -1651,7 +1707,7 @@ class Migration(migrations.Migration):
1651
1707
  lamindb.base.fields.ForeignKey(
1652
1708
  blank=True,
1653
1709
  on_delete=django.db.models.deletion.PROTECT,
1654
- related_name="links_record",
1710
+ related_name="links_in_record",
1655
1711
  to="lamindb.project",
1656
1712
  ),
1657
1713
  ),
@@ -1663,7 +1719,7 @@ class Migration(migrations.Migration):
1663
1719
  ),
1664
1720
  migrations.AddField(
1665
1721
  model_name="project",
1666
- name="records",
1722
+ name="linked_in_records",
1667
1723
  field=models.ManyToManyField(
1668
1724
  related_name="linked_projects",
1669
1725
  through="lamindb.RecordProject",
@@ -2198,6 +2254,18 @@ class Migration(migrations.Migration):
2198
2254
  to="lamindb.run",
2199
2255
  ),
2200
2256
  ),
2257
+ migrations.AddField(
2258
+ model_name="projectrecord",
2259
+ name="run",
2260
+ field=lamindb.base.fields.ForeignKey(
2261
+ blank=True,
2262
+ default=lamindb.models.run.current_run,
2263
+ null=True,
2264
+ on_delete=django.db.models.deletion.PROTECT,
2265
+ related_name="+",
2266
+ to="lamindb.run",
2267
+ ),
2268
+ ),
2201
2269
  migrations.AddField(
2202
2270
  model_name="project",
2203
2271
  name="run",
@@ -4174,6 +4242,18 @@ class Migration(migrations.Migration):
4174
4242
  to="lamindb.user",
4175
4243
  ),
4176
4244
  ),
4245
+ migrations.AddField(
4246
+ model_name="projectrecord",
4247
+ name="created_by",
4248
+ field=lamindb.base.fields.ForeignKey(
4249
+ blank=True,
4250
+ default=lamindb.base.users.current_user_id,
4251
+ editable=False,
4252
+ on_delete=django.db.models.deletion.PROTECT,
4253
+ related_name="+",
4254
+ to="lamindb.user",
4255
+ ),
4256
+ ),
4177
4257
  migrations.AddField(
4178
4258
  model_name="project",
4179
4259
  name="created_by",
@@ -4427,6 +4507,10 @@ class Migration(migrations.Migration):
4427
4507
  name="runfeaturevalue",
4428
4508
  unique_together={("run", "featurevalue")},
4429
4509
  ),
4510
+ migrations.AlterUniqueTogether(
4511
+ name="projectrecord",
4512
+ unique_together={("record", "project", "feature")},
4513
+ ),
4430
4514
  migrations.AlterUniqueTogether(
4431
4515
  name="personproject",
4432
4516
  unique_together={("person", "project")},
@@ -542,6 +542,7 @@ def log_storage_hint(
542
542
  def data_is_scversedatastructure(
543
543
  data: ScverseDataStructures | UPathStr,
544
544
  structure_type: Literal["AnnData", "MuData", "SpatialData"] | None = None,
545
+ cloud_warning: bool = True,
545
546
  ) -> bool:
546
547
  """Determine whether a specific in-memory object or a UPathstr is any or a specific scverse data structure."""
547
548
  file_suffix = None
@@ -580,7 +581,7 @@ def data_is_scversedatastructure(
580
581
  )
581
582
  == data_type
582
583
  )
583
- else:
584
+ elif cloud_warning:
584
585
  logger.warning(
585
586
  f"we do not check whether cloud zarr is {structure_type}"
586
587
  )
@@ -600,6 +601,7 @@ def data_is_soma_experiment(data: SOMAExperiment | UPathStr) -> bool:
600
601
  def _check_otype_artifact(
601
602
  data: UPathStr | pd.DataFrame | ScverseDataStructures,
602
603
  otype: str | None = None,
604
+ cloud_warning: bool = True,
603
605
  ) -> str:
604
606
  if otype is None:
605
607
  if isinstance(data, pd.DataFrame):
@@ -608,15 +610,15 @@ def _check_otype_artifact(
608
610
  return otype
609
611
 
610
612
  data_is_path = isinstance(data, (str, Path))
611
- if data_is_scversedatastructure(data, "AnnData"):
613
+ if data_is_scversedatastructure(data, "AnnData", cloud_warning):
612
614
  if not data_is_path:
613
615
  logger.warning("data is an AnnData, please use .from_anndata()")
614
616
  otype = "AnnData"
615
- elif data_is_scversedatastructure(data, "MuData"):
617
+ elif data_is_scversedatastructure(data, "MuData", cloud_warning):
616
618
  if not data_is_path:
617
619
  logger.warning("data is a MuData, please use .from_mudata()")
618
620
  otype = "MuData"
619
- elif data_is_scversedatastructure(data, "SpatialData"):
621
+ elif data_is_scversedatastructure(data, "SpatialData", cloud_warning):
620
622
  if not data_is_path:
621
623
  logger.warning("data is a SpatialData, please use .from_spatialdata()")
622
624
  otype = "SpatialData"
@@ -1424,7 +1426,9 @@ class Artifact(SQLRecord, IsVersioned, TracksRun, TracksUpdates):
1424
1426
  # issue in Groovy / nf-lamin producing malformed S3 paths
1425
1427
  # https://laminlabs.slack.com/archives/C08J590666Q/p1751315027830849?thread_ts=1751039961.479259&cid=C08J590666Q
1426
1428
  data = data.replace("s3:///", "s3://")
1427
- otype = _check_otype_artifact(data=data, otype=otype)
1429
+ otype = _check_otype_artifact(
1430
+ data=data, otype=otype, cloud_warning=not _is_internal_call
1431
+ )
1428
1432
  if "type" in kwargs:
1429
1433
  logger.warning("`type` will be removed soon, please use `kind`")
1430
1434
  kind = kwargs.pop("type")
@@ -2285,17 +2289,19 @@ class Artifact(SQLRecord, IsVersioned, TracksRun, TracksUpdates):
2285
2289
  ):
2286
2290
  """Open a dataset for streaming.
2287
2291
 
2288
- Works for `AnnData` (`.h5ad` and `.zarr`), generic `hdf5` and `zarr`,
2289
- `tiledbsoma` objects (`.tiledbsoma`), `pyarrow` or `polars` compatible formats
2292
+ Works for `AnnData` (`.h5ad` and `.zarr`), `SpatialData` (`.zarr`),
2293
+ generic `hdf5` and `zarr`, `tiledbsoma` objects (`.tiledbsoma`),
2294
+ `pyarrow` or `polars` compatible formats
2290
2295
  (`.parquet`, `.csv`, `.ipc` etc. files or directories with such files).
2291
2296
 
2292
2297
  Args:
2293
- mode: can only be `"w"` (write mode) for `tiledbsoma` stores,
2298
+ mode: can be `"r"` or `"w"` (write mode) for `tiledbsoma` stores,
2299
+ `"r"` or `"r+"` for `AnnData` or `SpatialData` `zarr` stores,
2294
2300
  otherwise should be always `"r"` (read-only mode).
2295
2301
  engine: Which module to use for lazy loading of a dataframe
2296
2302
  from `pyarrow` or `polars` compatible formats.
2297
2303
  This has no effect if the artifact is not a dataframe, i.e.
2298
- if it is an `AnnData,` `hdf5`, `zarr` or `tiledbsoma` object.
2304
+ if it is an `AnnData,` `hdf5`, `zarr`, `tiledbsoma` object etc.
2299
2305
  is_run_input: Whether to track this artifact as run input.
2300
2306
  **kwargs: Keyword arguments for the accessor, i.e. `h5py` or `zarr` connection,
2301
2307
  `pyarrow.dataset.dataset`, `polars.scan_*` function.
@@ -2339,7 +2345,8 @@ class Artifact(SQLRecord, IsVersioned, TracksRun, TracksUpdates):
2339
2345
  s + ".gz" for s in PYARROW_SUFFIXES
2340
2346
  ) # this doesn't work for externally gzipped files, REMOVE LATER
2341
2347
  )
2342
- if self.suffix not in suffixes:
2348
+ suffix = self.suffix
2349
+ if suffix not in suffixes:
2343
2350
  raise ValueError(
2344
2351
  "Artifact should have a zarr, h5, tiledbsoma object"
2345
2352
  " or a compatible `pyarrow.dataset.dataset` or `polars.scan_*` directory"
@@ -2348,23 +2355,28 @@ class Artifact(SQLRecord, IsVersioned, TracksRun, TracksUpdates):
2348
2355
  f" Or no suffix for a folder with {', '.join(df_suffixes)} files"
2349
2356
  " (no mixing allowed)."
2350
2357
  )
2351
- if self.suffix != ".tiledbsoma" and self.key != "soma" and mode != "r":
2352
- raise ValueError(
2353
- "Only a tiledbsoma store can be openened with `mode!='r'`."
2354
- )
2355
-
2356
2358
  using_key = settings._using_key
2357
2359
  filepath, cache_key = filepath_cache_key_from_artifact(
2358
2360
  self, using_key=using_key
2359
2361
  )
2362
+
2360
2363
  is_tiledbsoma_w = (
2361
- filepath.name == "soma" or self.suffix == ".tiledbsoma"
2364
+ filepath.name == "soma" or suffix == ".tiledbsoma"
2362
2365
  ) and mode == "w"
2366
+ is_zarr_w = suffix == ".zarr" and mode == "r+"
2367
+
2368
+ if mode != "r" and not (is_tiledbsoma_w or is_zarr_w):
2369
+ raise ValueError(
2370
+ f"It is not allowed to open a {suffix} object with mode='{mode}'. "
2371
+ "You can open all supported formats with mode='r', "
2372
+ "a tiledbsoma store with mode='w', "
2373
+ "AnnData or SpatialData zarr store with mode='r+'."
2374
+ )
2363
2375
  # consider the case where an object is already locally cached
2364
2376
  localpath = setup_settings.paths.cloud_to_local_no_update(
2365
2377
  filepath, cache_key=cache_key
2366
2378
  )
2367
- if is_tiledbsoma_w:
2379
+ if is_tiledbsoma_w or is_zarr_w:
2368
2380
  open_cache = False
2369
2381
  else:
2370
2382
  open_cache = not isinstance(
@@ -2395,9 +2407,7 @@ class Artifact(SQLRecord, IsVersioned, TracksRun, TracksUpdates):
2395
2407
  else:
2396
2408
  localpath.unlink(missing_ok=True)
2397
2409
  else:
2398
- access = backed_access(
2399
- filepath, mode, engine, using_key=using_key, **kwargs
2400
- )
2410
+ access = backed_access(self, mode, engine, using_key=using_key, **kwargs)
2401
2411
  if is_tiledbsoma_w:
2402
2412
 
2403
2413
  def finalize():
@@ -2413,6 +2423,7 @@ class Artifact(SQLRecord, IsVersioned, TracksRun, TracksUpdates):
2413
2423
  new_version = Artifact(
2414
2424
  filepath, revises=self, _is_internal_call=True
2415
2425
  ).save()
2426
+ # note: sets _state.db = "default"
2416
2427
  init_self_from_db(self, new_version)
2417
2428
 
2418
2429
  if localpath != filepath and localpath.exists():
lamindb/models/project.py CHANGED
@@ -286,10 +286,14 @@ class Project(SQLRecord, CanCurate, TracksRun, TracksUpdates, ValidateFields):
286
286
  Schema, through="SchemaProject", related_name="projects"
287
287
  )
288
288
  """Linked schemas."""
289
- records: Record = models.ManyToManyField(
289
+ linked_in_records: Record = models.ManyToManyField(
290
290
  Record, through="RecordProject", related_name="linked_projects"
291
291
  )
292
292
  """Linked records."""
293
+ records: Record = models.ManyToManyField(
294
+ Record, through="ProjectRecord", related_name="projects"
295
+ )
296
+ """Annotated record."""
293
297
  collections: Collection = models.ManyToManyField(
294
298
  Collection, through="CollectionProject", related_name="projects"
295
299
  )
@@ -440,11 +444,29 @@ class RecordReference(BaseSQLRecord, IsLink):
440
444
  unique_together = ("record", "feature", "value")
441
445
 
442
446
 
447
+ # for annotation of records with projects, RecordProject is for storing project values
448
+ class ProjectRecord(BaseSQLRecord, IsLink, TracksRun):
449
+ id: int = models.BigAutoField(primary_key=True)
450
+ record: Record = ForeignKey(Record, CASCADE, related_name="links_project")
451
+ project: Project = ForeignKey(Project, PROTECT, related_name="links_record")
452
+ feature: Feature | None = ForeignKey(
453
+ Feature,
454
+ PROTECT,
455
+ null=True,
456
+ default=None,
457
+ related_name="links_projectrecord",
458
+ )
459
+
460
+ class Meta:
461
+ # can have the same label linked to the same artifact if the feature is different
462
+ unique_together = ("record", "project", "feature")
463
+
464
+
443
465
  class RecordProject(BaseSQLRecord, IsLink):
444
466
  id: int = models.BigAutoField(primary_key=True)
445
467
  record: Record = ForeignKey(Record, CASCADE, related_name="values_project")
446
468
  feature: Feature = ForeignKey(Feature, PROTECT, related_name="links_recordproject")
447
- value: Project = ForeignKey(Project, PROTECT, related_name="links_record")
469
+ value: Project = ForeignKey(Project, PROTECT, related_name="links_in_record")
448
470
 
449
471
  class Meta:
450
472
  unique_together = ("record", "feature", "value")
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: lamindb
3
- Version: 1.10.1
3
+ Version: 1.10.2
4
4
  Summary: A data framework for biology.
5
5
  Author-email: Lamin Labs <open-source@lamin.ai>
6
6
  Requires-Python: >=3.10,<3.14
@@ -9,6 +9,7 @@ Classifier: Programming Language :: Python :: 3.10
9
9
  Classifier: Programming Language :: Python :: 3.11
10
10
  Classifier: Programming Language :: Python :: 3.12
11
11
  Classifier: Programming Language :: Python :: 3.13
12
+ License-File: LICENSE
12
13
  Requires-Dist: lamin_utils==0.15.0
13
14
  Requires-Dist: lamin_cli==1.6.1
14
15
  Requires-Dist: lamindb_setup[aws]==1.9.1
@@ -1,4 +1,4 @@
1
- lamindb/__init__.py,sha256=cWTeR7mqoNI5I8jjkRldYjJOqmaCsy7n8wCvbbWbeNI,2905
1
+ lamindb/__init__.py,sha256=pY7V83g2bBifDvpSqVwvAWDmkIBDXUh_fBMIG70o7Zo,2905
2
2
  lamindb/_finish.py,sha256=3HVKRw27rQs_S2ior-JX2IU0vUkeG5a6p8XgEgdT8-U,21219
3
3
  lamindb/_tracked.py,sha256=-wK7BJv30nf4v2_nH5qDCyxHvug7ih6duQNGxDrj3UE,4447
4
4
  lamindb/_view.py,sha256=cod1RnZoLyzMVJcjWjytg78Sf4qsR8IAdqpwzsi8FTw,4950
@@ -14,21 +14,21 @@ lamindb/core/__init__.py,sha256=I9F-GugBMZwFLpUPb1MXyLfccIVAj021Gb_00h_18MY,633
14
14
  lamindb/core/_compat.py,sha256=NLnKk1qk4xdgMV-QwFDnBnbio02ujjlF86icvhpdv4c,2029
15
15
  lamindb/core/_context.py,sha256=5edP0M5_m8zgNXu7-1XaRLOv20i5LaWHBkEvMHsSiB0,40376
16
16
  lamindb/core/_mapped_collection.py,sha256=osquwC6ee0wJ_I6O-8AZwnQUa_r9zqa0MN82Q-nBI3Y,25746
17
- lamindb/core/_settings.py,sha256=Dj44Xh5x3OEpTPvG0twREcIv6X6Er5NzDZO86i8Cq9g,8678
17
+ lamindb/core/_settings.py,sha256=urrw4PtH6XR5MXyziggkzdCHMG2qUI51ZF6ISFOtA7w,10990
18
18
  lamindb/core/_sync_git.py,sha256=Z7keuyS5X7CAj285sEbZIFExZF9mtjGH8DzKwz3xhHw,5881
19
19
  lamindb/core/_track_environment.py,sha256=fa0-qKEe0BpL79_nsDUDtbg1iA3VpJTh0RCOGdc2XOA,974
20
20
  lamindb/core/exceptions.py,sha256=FMEoSvT3FvtLkxQAt2oDXPeaPem8V5x5UBbTsPFYU5w,53
21
21
  lamindb/core/loaders.py,sha256=QH3r3Q_aPrbkgiWaV30TmxcLCs6zZFQLRW442DsTrNU,5456
22
22
  lamindb/core/types.py,sha256=_u8amXATAZN-nNGNWYGmyqYDcHbT_i0NZeLRhm_-ygI,378
23
23
  lamindb/core/storage/__init__.py,sha256=2gJyn9w6rzv3oPHjY756OfQYrLEXb37YuKMqh6ZjbF8,542
24
- lamindb/core/storage/_anndata_accessor.py,sha256=NOAqI-SHS8IwM0MwsZmRUtqG8r4tBTJ9T8888lhvQkw,26611
25
- lamindb/core/storage/_backed_access.py,sha256=5y3C_D1g89d831FAXr8-WashmmL3cdNfTR5s61vrMdA,7858
24
+ lamindb/core/storage/_anndata_accessor.py,sha256=0u6U7wNUc__yb2addm1Isp_ITEABuy-YHrGSF3oFz7g,29092
25
+ lamindb/core/storage/_backed_access.py,sha256=f5BOAEFSfiFQcZQiok-vTpSUqF6BbSyF6UKkYLC_chc,8077
26
26
  lamindb/core/storage/_polars_lazy_df.py,sha256=_JhnU8RmoTzU2kp7kyHRSUTAkFhpCJo6NNnHxvBXQv4,2929
27
27
  lamindb/core/storage/_pyarrow_dataset.py,sha256=lRYYt7edUtwauhxd7RwFud6YPDbz2PFvYYgqLhfapfk,1398
28
- lamindb/core/storage/_spatialdata_accessor.py,sha256=e5jqRMWAb2zA0srHOTpgmUdFZk54g6gVEbkEBqq6Ndk,1446
28
+ lamindb/core/storage/_spatialdata_accessor.py,sha256=mn3iiNIBixGpBJoMmkHCvcoUut7Ttwy9QxGK7P5ywNo,1718
29
29
  lamindb/core/storage/_tiledbsoma.py,sha256=EdzgGVTamB7Ef31doGFWwIOhTd9TNrBfNUplDWSp6wo,11386
30
30
  lamindb/core/storage/_valid_suffixes.py,sha256=vUSeQ4s01rdhD_vSd6wKmFBsgMJAKkBMnL_T9Y1znMg,501
31
- lamindb/core/storage/_zarr.py,sha256=PncgrnA3XX2D01AQsTatCImEpbJUriEo00LHmARCkIE,4265
31
+ lamindb/core/storage/_zarr.py,sha256=VHT01D_QktmZB4r92XSe8FYveoPkFtn_8XAd3hV1GSA,4433
32
32
  lamindb/core/storage/objects.py,sha256=ISVjBuXPQENZ2XVQDvfX-HZSyDjQi_OGnoJXQmI---Y,3282
33
33
  lamindb/core/storage/paths.py,sha256=C6qAFtWFvFA2RtA9M4KGxT87wmZWqB9b1V5jOCY0ALc,7141
34
34
  lamindb/core/subsettings/__init__.py,sha256=f_vOqZOjVGez8pLmtrUuc_ayDGXl07t_ZY-P2Cedxbo,201
@@ -36,7 +36,7 @@ lamindb/core/subsettings/_annotation_settings.py,sha256=o-yTYw-NmjFmtehbKU8qnf7t
36
36
  lamindb/core/subsettings/_creation_settings.py,sha256=NGHWKqCFSzVNBxAr2VnmdYguiFdW29XUK7T9wRsVshg,906
37
37
  lamindb/curators/__init__.py,sha256=WLnaVxrhQGZxGB3pjg-SM4oUu6DaKA78S_J3BfVKLEg,496
38
38
  lamindb/curators/_legacy.py,sha256=V2zF1J7jN2Ry7ascH06JytrAQzeOiirg7e4a9d0hpvM,54768
39
- lamindb/curators/core.py,sha256=OfCSLw6Ngu_HmCSjwRUZrsVwcdq-ge9JL1Wfl0djiyM,68832
39
+ lamindb/curators/core.py,sha256=2ZzZG8Xyg3N7UdKrQwhgFjtrd2MSz2fKJ0D3WsInD2s,68838
40
40
  lamindb/examples/__init__.py,sha256=f0pBxijIA26ULUBnsP2sa1e4CLqMTeUUEqNeINJIf9o,179
41
41
  lamindb/examples/cellxgene/__init__.py,sha256=Xzxfi_NQcWdK-RrbNFdlIFQFVPG8Qy18ekYx3sOQZeM,161
42
42
  lamindb/examples/cellxgene/_cellxgene.py,sha256=HP_skQJbNYDEYhiLUpaP9kfmtPivmgTuGyoWNLs07nw,8844
@@ -105,7 +105,9 @@ lamindb/migrations/0114_alter_run__status_code.py,sha256=KkGecSBJElA3LBnhSK5_rFp
105
105
  lamindb/migrations/0115_alter_space_uid.py,sha256=18fCP8d31Ox1KxSSmfzU-W3lSpS3xtiaBNbPeHQiuTM,1332
106
106
  lamindb/migrations/0116_remove_artifact_unique_artifact_storage_key_hash_and_more.py,sha256=vCAeIg8TdtVaC2kZIq3KTY5hcz_RiXh_44Sczv75RA4,1562
107
107
  lamindb/migrations/0117_fix_artifact_storage_hash_unique_constraints.py,sha256=OqgUmf9_TeNwTddwcwZdJYfpN6cpFYBaw5_KitYumNM,1033
108
- lamindb/migrations/0117_squashed.py,sha256=i9rdXIVWkgNaZTaaFs-Ozi_0CC-pKHOdF2ZM37M4fOY,163772
108
+ lamindb/migrations/0118_alter_recordproject_value_projectrecord.py,sha256=aNC_o3xfH9wk1BTbOWKbXeV4r_IPXgytYBRTFi_U-MM,3493
109
+ lamindb/migrations/0119_rename_records_project_linked_in_records.py,sha256=Feh4rCfoGD4kiGG-sk_IQx7cplDn-yVIlzI5FzE8utI,688
110
+ lamindb/migrations/0119_squashed.py,sha256=s4qKQa6EMR3fontNSSd18LsamK0HkfqPJtolwqWcAVI,166916
109
111
  lamindb/migrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
110
112
  lamindb/models/__init__.py,sha256=mtjZH0x31aV463YaUef8ZvdQHlGa6SZr_thsrlbdkCg,2419
111
113
  lamindb/models/_describe.py,sha256=OD-MokYrB6mfzkmFdTkRj0J93LyqxFOgJi_5LvOXr1A,10915
@@ -115,13 +117,13 @@ lamindb/models/_from_values.py,sha256=cCGPMDlAbBrdhFW-XrIQVZ10q1LNg4MycYPLOkF0fT
115
117
  lamindb/models/_is_versioned.py,sha256=Th2_cBf9UWh27E6ANxg6LGmjBOumXFy7AjH0GG4FoXA,7601
116
118
  lamindb/models/_label_manager.py,sha256=O3KaaTEVId5ky3h0aCGg1kDoFFfrovFYlV82YsZZyIs,12127
117
119
  lamindb/models/_relations.py,sha256=zHYLujy9vkuB9jVq5844TpzLSP__iYNCQcsl-FzK1Jw,3700
118
- lamindb/models/artifact.py,sha256=yBNgOJTokGKy5EUDzugzs1bXBObGKSqDLJ2nZLk6M3s,117610
120
+ lamindb/models/artifact.py,sha256=NZtF9x7z3O7pSaCGQwA9V-lnoSvaHfusEMkuWjxora4,118203
119
121
  lamindb/models/artifact_set.py,sha256=VOZEGDo3m_9Yg_ftx3I2fwdydjHN61X_qV18N6xG4kM,4117
120
122
  lamindb/models/can_curate.py,sha256=_6ymHhtc9zpU8O6ozqNDqn7jh87C9JisU_dl2gH2Hpo,29329
121
123
  lamindb/models/collection.py,sha256=ZMBaDqQtil9yWqDC_twKbavGkdAulcu9j2WlfHADxn0,28358
122
124
  lamindb/models/feature.py,sha256=eIskKsSBkwW6ftfOZ_ngeqD9Pi6Y70cY_m3WHTCXNU8,38031
123
125
  lamindb/models/has_parents.py,sha256=NRNshrWCX7G3nnM3lnnHQ3Ho216T3EJfgakY6KlTvt8,20301
124
- lamindb/models/project.py,sha256=4w2rMZMPrLmvirvJQanhKfqwGqEnpY42iaOWi99YPw0,17345
126
+ lamindb/models/project.py,sha256=AzfkZsMIUtTM2QGz_jhS4F5zt53V-j2l4Sz-b6NvW34,18192
125
127
  lamindb/models/query_manager.py,sha256=EzbyNA5zWUbLYH5yJ7dIC90j1teVoQHrXpRLjCfBEao,11036
126
128
  lamindb/models/query_set.py,sha256=d27m8UF8QZAzHZBEVE1QdJRtx9wCzYgkDdHGCATBM48,34815
127
129
  lamindb/models/record.py,sha256=WW6iuQT8M4KHcpZoYGSTjND10hvDUIff7DOkyD5d2Fc,12070
@@ -137,7 +139,7 @@ lamindb/setup/_switch.py,sha256=njZJN__JOhVrBFGClQG1wobdhJJp6l_XzPGKtKSCrfU,434
137
139
  lamindb/setup/core/__init__.py,sha256=SevlVrc2AZWL3uALbE5sopxBnIZPWZ1IB0NBDudiAL8,167
138
140
  lamindb/setup/errors/__init__.py,sha256=bAHTxOUJW1rm4zpF0Pvqkftn8W6iMGnQ-uyNBu13Nfg,171
139
141
  lamindb/setup/types/__init__.py,sha256=ATaosOi6q-cDWB52T69_sRmLMqj8cHfc-vljzZsrJNw,169
140
- lamindb-1.10.1.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
141
- lamindb-1.10.1.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
142
- lamindb-1.10.1.dist-info/METADATA,sha256=UYl8nK7xlSZg0L4GaKYNkMjXuNs36LvfgSJFq-StSho,5199
143
- lamindb-1.10.1.dist-info/RECORD,,
142
+ lamindb-1.10.2.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
143
+ lamindb-1.10.2.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
144
+ lamindb-1.10.2.dist-info/METADATA,sha256=n3KlfwBGbuxGLNq-qpfSWxHaU7ZmEhjsHwfrvHrBchk,5221
145
+ lamindb-1.10.2.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: flit 3.10.1
2
+ Generator: flit 3.12.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any