lamindb 0.76.5__py3-none-any.whl → 0.76.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
lamindb/__init__.py CHANGED
@@ -41,7 +41,7 @@ Modules and settings.
41
41
  """
42
42
 
43
43
  # denote a release candidate for 0.1.0 with 0.1rc1, 0.1a1, 0.1b1, etc.
44
- __version__ = "0.76.5"
44
+ __version__ = "0.76.6"
45
45
 
46
46
  import os as _os
47
47
 
lamindb/_artifact.py CHANGED
@@ -28,7 +28,7 @@ from lnschema_core.types import (
28
28
  )
29
29
 
30
30
  from lamindb._utils import attach_func_to_class_method
31
- from lamindb.core._data import HasFeatures, _track_run_input
31
+ from lamindb.core._data import _track_run_input, describe, view_lineage
32
32
  from lamindb.core._settings import settings
33
33
  from lamindb.core.exceptions import IntegrityError
34
34
  from lamindb.core.storage import (
@@ -334,7 +334,7 @@ def get_artifact_kwargs_from_data(
334
334
  # save the information that this artifact was previously
335
335
  # produced by another run
336
336
  if artifact.run is not None:
337
- artifact.run.output_artifacts_with_later_updates.add(artifact)
337
+ artifact.run._output_artifacts_with_later_updates.add(artifact)
338
338
  # update the run of the artifact with the latest run
339
339
  stat_or_artifact.run = run
340
340
  stat_or_artifact.transform = run.transform
@@ -497,13 +497,6 @@ def _check_accessor_artifact(data: Any, accessor: str | None = None):
497
497
  return accessor
498
498
 
499
499
 
500
- def update_attributes(data: HasFeatures, attributes: Mapping[str, str]):
501
- for key, value in attributes.items():
502
- if getattr(data, key) != value:
503
- logger.warning(f"updated {key} from {getattr(data, key)} to {value}")
504
- setattr(data, key, value)
505
-
506
-
507
500
  def __init__(artifact: Artifact, *args, **kwargs):
508
501
  artifact.features = FeatureManager(artifact)
509
502
  artifact.params = ParamManager(artifact)
@@ -608,7 +601,7 @@ def __init__(artifact: Artifact, *args, **kwargs):
608
601
 
609
602
  # an object with the same hash already exists
610
603
  if isinstance(kwargs_or_artifact, Artifact):
611
- from ._record import init_self_from_db
604
+ from ._record import init_self_from_db, update_attributes
612
605
 
613
606
  init_self_from_db(artifact, kwargs_or_artifact)
614
607
  # adding "key" here is dangerous because key might be auto-populated
@@ -908,14 +901,6 @@ def replace(
908
901
  self._to_store = not check_path_in_storage
909
902
 
910
903
 
911
- # deprecated
912
- def backed(
913
- self, mode: str = "r", is_run_input: bool | None = None
914
- ) -> AnnDataAccessor | BackedAccessor | SOMACollection | SOMAExperiment:
915
- logger.warning("`.backed()` is deprecated, use `.open()`!'")
916
- return self.open(mode, is_run_input)
917
-
918
-
919
904
  # docstring handled through attach_func_to_class_method
920
905
  def open(
921
906
  self, mode: str = "r", is_run_input: bool | None = None
@@ -1185,5 +1170,5 @@ for name in METHOD_NAMES:
1185
1170
  Artifact._delete_skip_storage = _delete_skip_storage
1186
1171
  Artifact._save_skip_storage = _save_skip_storage
1187
1172
  Artifact.path = path
1188
- Artifact.backed = backed
1189
- Artifact.view_lineage = HasFeatures.view_lineage
1173
+ Artifact.describe = describe
1174
+ Artifact.view_lineage = view_lineage
lamindb/_collection.py CHANGED
@@ -17,19 +17,17 @@ from lamindb_setup.core.hashing import hash_set
17
17
  from lnschema_core.models import (
18
18
  Collection,
19
19
  CollectionArtifact,
20
- FeatureManager,
21
20
  FeatureSet,
22
21
  )
23
22
  from lnschema_core.types import VisibilityChoice
24
23
 
25
- from lamindb._artifact import update_attributes
26
24
  from lamindb._utils import attach_func_to_class_method
27
- from lamindb.core._data import _track_run_input
25
+ from lamindb.core._data import _track_run_input, describe, view_lineage
28
26
  from lamindb.core._mapped_collection import MappedCollection
29
27
  from lamindb.core.versioning import process_revises
30
28
 
31
29
  from . import Artifact, Run
32
- from ._record import init_self_from_db
30
+ from ._record import init_self_from_db, update_attributes
33
31
  from .core._data import (
34
32
  add_transform_to_kwargs,
35
33
  get_run,
@@ -44,12 +42,45 @@ if TYPE_CHECKING:
44
42
  from ._query_set import QuerySet
45
43
 
46
44
 
45
+ class CollectionFeatureManager:
46
+ """Query features of artifact in collection."""
47
+
48
+ def __init__(self, collection: Collection):
49
+ self._collection = collection
50
+
51
+ def get_feature_sets_union(self) -> dict[str, FeatureSet]:
52
+ links_feature_set_artifact = Artifact.feature_sets.through.objects.filter(
53
+ artifact_id__in=self._collection.artifacts.values_list("id", flat=True)
54
+ )
55
+ feature_sets_by_slots = defaultdict(list)
56
+ for link in links_feature_set_artifact:
57
+ feature_sets_by_slots[link.slot].append(link.featureset_id)
58
+ feature_sets_union = {}
59
+ for slot, feature_set_ids_slot in feature_sets_by_slots.items():
60
+ feature_set_1 = FeatureSet.get(id=feature_set_ids_slot[0])
61
+ related_name = feature_set_1._get_related_name()
62
+ features_registry = getattr(FeatureSet, related_name).field.model
63
+ # this way of writing the __in statement turned out to be the fastest
64
+ # evaluated on a link table with 16M entries connecting 500 feature sets with
65
+ # 60k genes
66
+ feature_ids = (
67
+ features_registry.feature_sets.through.objects.filter(
68
+ featureset_id__in=feature_set_ids_slot
69
+ )
70
+ .values(f"{features_registry.__name__.lower()}_id")
71
+ .distinct()
72
+ )
73
+ features = features_registry.filter(id__in=feature_ids)
74
+ feature_sets_union[slot] = FeatureSet(features, dtype=feature_set_1.dtype)
75
+ return feature_sets_union
76
+
77
+
47
78
  def __init__(
48
79
  collection: Collection,
49
80
  *args,
50
81
  **kwargs,
51
82
  ):
52
- collection.features = FeatureManager(collection)
83
+ collection.features = CollectionFeatureManager(collection)
53
84
  if len(args) == len(collection._meta.concrete_fields):
54
85
  super(Collection, collection).__init__(*args, **kwargs)
55
86
  return None
@@ -78,9 +109,6 @@ def __init__(
78
109
  if "visibility" in kwargs
79
110
  else VisibilityChoice.default.value
80
111
  )
81
- feature_sets: dict[str, FeatureSet] = (
82
- kwargs.pop("feature_sets") if "feature_sets" in kwargs else {}
83
- )
84
112
  if "is_new_version_of" in kwargs:
85
113
  logger.warning("`is_new_version_of` will be removed soon, please use `revises`")
86
114
  revises = kwargs.pop("is_new_version_of")
@@ -98,7 +126,7 @@ def __init__(
98
126
  if not hasattr(artifacts, "__getitem__"):
99
127
  raise ValueError("Artifact or List[Artifact] is allowed.")
100
128
  assert isinstance(artifacts[0], Artifact) # type: ignore # noqa: S101
101
- hash, feature_sets = from_artifacts(artifacts) # type: ignore
129
+ hash = from_artifacts(artifacts) # type: ignore
102
130
  if meta_artifact is not None:
103
131
  if not isinstance(meta_artifact, Artifact):
104
132
  raise ValueError("meta_artifact has to be an Artifact")
@@ -107,11 +135,6 @@ def __init__(
107
135
  raise ValueError(
108
136
  "Save meta_artifact artifact before creating collection!"
109
137
  )
110
- if not feature_sets:
111
- feature_sets = meta_artifact.features._feature_set_by_slot
112
- else:
113
- if len(meta_artifact.features._feature_set_by_slot) > 0:
114
- logger.info("overwriting feature sets linked to artifact")
115
138
  # we ignore collections in trash containing the same hash
116
139
  if hash is not None:
117
140
  existing_collection = Collection.filter(hash=hash).one_or_none()
@@ -126,7 +149,7 @@ def __init__(
126
149
  # save the information that this artifact was previously
127
150
  # produced by another run
128
151
  if existing_collection.run is not None:
129
- existing_collection.run.output_collections_with_later_updates.add(
152
+ existing_collection.run._output_collections_with_later_updates.add(
130
153
  existing_collection
131
154
  )
132
155
  # update the run of the artifact with the latest run
@@ -134,11 +157,6 @@ def __init__(
134
157
  existing_collection.transform = run.transform
135
158
  init_self_from_db(collection, existing_collection)
136
159
  update_attributes(collection, {"description": description, "name": name})
137
- for slot, feature_set in collection.features._feature_set_by_slot.items():
138
- if slot in feature_sets:
139
- if not feature_sets[slot] == feature_set:
140
- collection.feature_sets.remove(feature_set)
141
- logger.warning(f"removing feature set: {feature_set}")
142
160
  else:
143
161
  kwargs = {}
144
162
  add_transform_to_kwargs(kwargs, run)
@@ -161,7 +179,6 @@ def __init__(
161
179
  )
162
180
  settings.creation.search_names = search_names_setting
163
181
  collection._artifacts = artifacts
164
- collection._feature_sets = feature_sets
165
182
  # register provenance
166
183
  if revises is not None:
167
184
  _track_run_input(revises, run=run)
@@ -171,61 +188,21 @@ def __init__(
171
188
  # internal function, not exposed to user
172
189
  def from_artifacts(artifacts: Iterable[Artifact]) -> tuple[str, dict[str, str]]:
173
190
  # assert all artifacts are already saved
174
- logger.debug("check not saved")
175
191
  saved = not any(artifact._state.adding for artifact in artifacts)
176
192
  if not saved:
177
193
  raise ValueError("Not all artifacts are yet saved, please save them")
178
- # query all feature sets of artifacts
179
- logger.debug("artifact ids")
180
- artifact_ids = [artifact.id for artifact in artifacts]
181
- # query all feature sets at the same time rather
182
- # than making a single query per artifact
183
- logger.debug("links_feature_set_artifact")
184
- links_feature_set_artifact = Artifact.feature_sets.through.objects.filter(
185
- artifact_id__in=artifact_ids
186
- )
187
- feature_sets_by_slots = defaultdict(list)
188
- logger.debug("slots")
189
- for link in links_feature_set_artifact:
190
- feature_sets_by_slots[link.slot].append(link.featureset_id)
191
- feature_sets_union = {}
192
- logger.debug("union")
193
- for slot, feature_set_ids_slot in feature_sets_by_slots.items():
194
- feature_set_1 = FeatureSet.get(id=feature_set_ids_slot[0])
195
- related_name = feature_set_1._get_related_name()
196
- features_registry = getattr(FeatureSet, related_name).field.model
197
- start_time = logger.debug("run filter")
198
- # this way of writing the __in statement turned out to be the fastest
199
- # evaluated on a link table with 16M entries connecting 500 feature sets with
200
- # 60k genes
201
- feature_ids = (
202
- features_registry.feature_sets.through.objects.filter(
203
- featureset_id__in=feature_set_ids_slot
204
- )
205
- .values(f"{features_registry.__name__.lower()}_id")
206
- .distinct()
207
- )
208
- start_time = logger.debug("done, start evaluate", time=start_time)
209
- features = features_registry.filter(id__in=feature_ids)
210
- feature_sets_union[slot] = FeatureSet(features, dtype=feature_set_1.dtype)
211
- start_time = logger.debug("done", time=start_time)
212
- # validate consistency of hashes
213
- # we do not allow duplicate hashes
214
- logger.debug("hashes")
215
- # artifact.hash is None for zarr
216
- # todo: more careful handling of such cases
194
+ # validate consistency of hashes - we do not allow duplicate hashes
217
195
  hashes = [artifact.hash for artifact in artifacts if artifact.hash is not None]
218
- if len(hashes) != len(set(hashes)):
196
+ hashes_set = set(hashes)
197
+ if len(hashes) != len(hashes_set):
219
198
  seen = set()
220
199
  non_unique = [x for x in hashes if x in seen or seen.add(x)] # type: ignore
221
200
  raise ValueError(
222
201
  "Please pass artifacts with distinct hashes: these ones are non-unique"
223
202
  f" {non_unique}"
224
203
  )
225
- time = logger.debug("hash")
226
- hash = hash_set(set(hashes))
227
- logger.debug("done", time=time)
228
- return hash, feature_sets_union
204
+ hash = hash_set(hashes_set)
205
+ return hash
229
206
 
230
207
 
231
208
  # docstring handled through attach_func_to_class_method
@@ -401,3 +378,5 @@ for name in METHOD_NAMES:
401
378
 
402
379
  Collection.ordered_artifacts = ordered_artifacts
403
380
  Collection.data_artifact = data_artifact
381
+ Collection.describe = describe
382
+ Collection.view_lineage = view_lineage
lamindb/_curate.py CHANGED
@@ -334,9 +334,9 @@ class DataFrameCurator(BaseCurator):
334
334
  from lamindb.core._settings import settings
335
335
 
336
336
  if not self._validated:
337
- raise ValidationError(
338
- f"Data object is not validated, please run {colors.yellow('validate()')}!"
339
- )
337
+ self.validate()
338
+ if not self._validated:
339
+ raise ValidationError("Dataset does not validate. Please curate.")
340
340
 
341
341
  # Make sure all labels are saved in the current instance
342
342
  verbosity = settings.verbosity
@@ -442,7 +442,7 @@ class AnnDataCurator(DataFrameCurator):
442
442
  exclude=exclude,
443
443
  check_valid_keys=False,
444
444
  )
445
- self._obs_fields = categoricals
445
+ self._obs_fields = categoricals or {}
446
446
  self._check_valid_keys(extra={"var_index"})
447
447
 
448
448
  @property
@@ -563,9 +563,9 @@ class AnnDataCurator(DataFrameCurator):
563
563
  A saved artifact record.
564
564
  """
565
565
  if not self._validated:
566
- raise ValidationError(
567
- f"Data object is not validated, please run {colors.yellow('validate()')}!"
568
- )
566
+ self.validate()
567
+ if not self._validated:
568
+ raise ValidationError("Dataset does not validate. Please curate.")
569
569
 
570
570
  self._artifact = save_artifact(
571
571
  self._data,
@@ -1498,14 +1498,14 @@ def log_saved_labels(
1498
1498
 
1499
1499
  if k == "without reference" and validated_only:
1500
1500
  msg = colors.yellow(
1501
- f"{len(labels)} non-validated categories are not saved in {model_field}: {labels}!"
1501
+ f"{len(labels)} non-validated values are not saved in {model_field}: {labels}!"
1502
1502
  )
1503
1503
  lookup_print = (
1504
1504
  f"lookup().{key}" if key.isidentifier() else f".lookup()['{key}']"
1505
1505
  )
1506
1506
 
1507
1507
  hint = f".add_new_from('{key}')"
1508
- msg += f"\n → to lookup categories, use {lookup_print}"
1508
+ msg += f"\n → to lookup values, use {lookup_print}"
1509
1509
  msg += (
1510
1510
  f"\n → to save, run {colors.yellow(hint)}"
1511
1511
  if save_function == "add_new_from"
lamindb/_filter.py CHANGED
@@ -10,7 +10,7 @@ if TYPE_CHECKING:
10
10
  from lnschema_core import Record
11
11
 
12
12
 
13
- def filter(registry: type[Record], **expressions) -> QuerySet:
13
+ def filter(registry: type[Record], *queries, **expressions) -> QuerySet:
14
14
  """See :meth:`~lamindb.core.Record.filter`."""
15
15
  _using_key = None
16
16
  if "_using_key" in expressions:
@@ -18,6 +18,6 @@ def filter(registry: type[Record], **expressions) -> QuerySet:
18
18
  expressions = process_expressions(registry, expressions)
19
19
  qs = QuerySet(model=registry, using=_using_key)
20
20
  if len(expressions) > 0:
21
- return qs.filter(**expressions)
21
+ return qs.filter(*queries, **expressions)
22
22
  else:
23
23
  return qs
lamindb/_record.py CHANGED
@@ -12,7 +12,7 @@ from lamin_utils._lookup import Lookup
12
12
  from lamindb_setup._connect_instance import get_owner_name_from_identifier
13
13
  from lamindb_setup.core._docs import doc_args
14
14
  from lamindb_setup.core._hub_core import connect_instance
15
- from lnschema_core.models import Collection, IsVersioned, Record
15
+ from lnschema_core.models import IsVersioned, Record
16
16
 
17
17
  from lamindb._utils import attach_func_to_class_method
18
18
  from lamindb.core._settings import settings
@@ -36,6 +36,13 @@ def init_self_from_db(self: Record, existing_record: Record):
36
36
  self._state.db = "default"
37
37
 
38
38
 
39
+ def update_attributes(record: Record, attributes: dict[str, str]):
40
+ for key, value in attributes.items():
41
+ if getattr(record, key) != value:
42
+ logger.warning(f"updated {key} from {getattr(record, key)} to {value}")
43
+ setattr(record, key, value)
44
+
45
+
39
46
  def validate_required_fields(record: Record, kwargs):
40
47
  required_fields = {
41
48
  k.name for k in record._meta.fields if not k.null and k.default is None
@@ -123,11 +130,11 @@ def __init__(record: Record, *args, **kwargs):
123
130
 
124
131
  @classmethod # type:ignore
125
132
  @doc_args(Record.filter.__doc__)
126
- def filter(cls, **expressions) -> QuerySet:
133
+ def filter(cls, *queries, **expressions) -> QuerySet:
127
134
  """{}""" # noqa: D415
128
135
  from lamindb._filter import filter
129
136
 
130
- return filter(cls, **expressions)
137
+ return filter(cls, *queries, **expressions)
131
138
 
132
139
 
133
140
  @classmethod # type:ignore
@@ -430,6 +437,7 @@ def update_fk_to_default_db(
430
437
  records: Record | list[Record] | QuerySet,
431
438
  fk: str,
432
439
  using_key: str | None,
440
+ transfer_logs: dict,
433
441
  ):
434
442
  record = records[0] if isinstance(records, (List, QuerySet)) else records
435
443
  if hasattr(record, f"{fk}_id") and getattr(record, f"{fk}_id") is not None:
@@ -442,7 +450,9 @@ def update_fk_to_default_db(
442
450
  from copy import copy
443
451
 
444
452
  fk_record_default = copy(fk_record)
445
- transfer_to_default_db(fk_record_default, using_key, save=True)
453
+ transfer_to_default_db(
454
+ fk_record_default, using_key, save=True, transfer_logs=transfer_logs
455
+ )
446
456
  if isinstance(records, (List, QuerySet)):
447
457
  for r in records:
448
458
  setattr(r, f"{fk}", None)
@@ -460,66 +470,66 @@ FKBULK = [
460
470
  ]
461
471
 
462
472
 
463
- def transfer_fk_to_default_db_bulk(records: list | QuerySet, using_key: str | None):
473
+ def transfer_fk_to_default_db_bulk(
474
+ records: list | QuerySet, using_key: str | None, transfer_logs: dict
475
+ ):
464
476
  for fk in FKBULK:
465
- update_fk_to_default_db(records, fk, using_key)
477
+ update_fk_to_default_db(records, fk, using_key, transfer_logs=transfer_logs)
466
478
 
467
479
 
468
480
  def transfer_to_default_db(
469
481
  record: Record,
470
482
  using_key: str | None,
483
+ *,
484
+ transfer_logs: dict,
471
485
  save: bool = False,
472
- mute: bool = False,
473
486
  transfer_fk: bool = True,
474
487
  ) -> Record | None:
475
- db = record._state.db
476
- if db is not None and db != "default" and using_key is None:
477
- registry = record.__class__
478
- record_on_default = registry.objects.filter(uid=record.uid).one_or_none()
479
- if record_on_default is not None:
480
- logger.important(
481
- f"returning existing {record.__class__.__name__}(uid='{record.uid}') on default database"
482
- )
483
- return record_on_default
484
- if not mute:
485
- logger.hint(f"saving from instance {db} to default instance: {record}")
486
- from lamindb.core._context import context
487
- from lamindb.core._data import WARNING_RUN_TRANSFORM
488
-
489
- if hasattr(record, "created_by_id"):
490
- # this line is needed to point created_by to default db
491
- record.created_by = None
492
- record.created_by_id = ln_setup.settings.user.id
493
- if hasattr(record, "run_id"):
494
- record.run = None
495
- if context.run is not None:
496
- record.run_id = context.run.id
497
- else:
498
- if not settings.creation.artifact_silence_missing_run_warning:
499
- logger.warning(WARNING_RUN_TRANSFORM)
500
- record.run_id = None
501
- if hasattr(record, "transform_id") and record._meta.model_name != "run":
502
- record.transform = None
503
- if context.run is not None:
504
- record.transform_id = context.run.transform_id
505
- else:
506
- record.transform_id = None
507
- # transfer other foreign key fields
508
- fk_fields = [
509
- i.name
510
- for i in record._meta.fields
511
- if i.get_internal_type() == "ForeignKey"
512
- if i.name not in {"created_by", "run", "transform"}
513
- ]
514
- if not transfer_fk:
515
- # don't transfer fk fields that are already bulk transferred
516
- fk_fields = [fk for fk in fk_fields if fk not in FKBULK]
517
- for fk in fk_fields:
518
- update_fk_to_default_db(record, fk, using_key)
519
- record.id = None
520
- record._state.db = "default"
521
- if save:
522
- record.save()
488
+ from lamindb.core._context import context
489
+ from lamindb.core._data import WARNING_RUN_TRANSFORM
490
+
491
+ registry = record.__class__
492
+ record_on_default = registry.objects.filter(uid=record.uid).one_or_none()
493
+ record_str = f"{record.__class__.__name__}(uid='{record.uid}')"
494
+ if record_on_default is not None:
495
+ transfer_logs["mapped"].append(record_str)
496
+ return record_on_default
497
+ else:
498
+ transfer_logs["transferred"].append(record_str)
499
+
500
+ if hasattr(record, "created_by_id"):
501
+ record.created_by = None
502
+ record.created_by_id = ln_setup.settings.user.id
503
+ if hasattr(record, "run_id"):
504
+ record.run = None
505
+ if context.run is not None:
506
+ record.run_id = context.run.id
507
+ else:
508
+ if not settings.creation.artifact_silence_missing_run_warning:
509
+ logger.warning(WARNING_RUN_TRANSFORM)
510
+ record.run_id = None
511
+ if hasattr(record, "transform_id") and record._meta.model_name != "run":
512
+ record.transform = None
513
+ if context.run is not None:
514
+ record.transform_id = context.run.transform_id
515
+ else:
516
+ record.transform_id = None
517
+ # transfer other foreign key fields
518
+ fk_fields = [
519
+ i.name
520
+ for i in record._meta.fields
521
+ if i.get_internal_type() == "ForeignKey"
522
+ if i.name not in {"created_by", "run", "transform"}
523
+ ]
524
+ if not transfer_fk:
525
+ # don't transfer fk fields that are already bulk transferred
526
+ fk_fields = [fk for fk in fk_fields if fk not in FKBULK]
527
+ for fk in fk_fields:
528
+ update_fk_to_default_db(record, fk, using_key, transfer_logs=transfer_logs)
529
+ record.id = None
530
+ record._state.db = "default"
531
+ if save:
532
+ record.save()
523
533
  return None
524
534
 
525
535
 
@@ -534,10 +544,20 @@ def save(self, *args, **kwargs) -> Record:
534
544
  if self.__class__.__name__ == "Collection" and self.id is not None:
535
545
  # when creating a new collection without being able to access artifacts
536
546
  artifacts = self.ordered_artifacts.list()
537
- # transfer of the record to the default db with fk fields
538
- result = transfer_to_default_db(self, using_key)
539
- if result is not None:
540
- init_self_from_db(self, result)
547
+ pre_existing_record = None
548
+ # consider records that are being transferred from other databases
549
+ transfer_logs: dict[str, list[str]] = {"mapped": [], "transferred": []}
550
+ if db is not None and db != "default" and using_key is None:
551
+ if isinstance(self, IsVersioned):
552
+ if not self.is_latest:
553
+ raise NotImplementedError(
554
+ "You are attempting to transfer a record that's not the latest in its version history. This is currently not supported."
555
+ )
556
+ pre_existing_record = transfer_to_default_db(
557
+ self, using_key, transfer_logs=transfer_logs
558
+ )
559
+ if pre_existing_record is not None:
560
+ init_self_from_db(self, pre_existing_record)
541
561
  else:
542
562
  # save versioned record
543
563
  if isinstance(self, IsVersioned) and self._revises is not None:
@@ -571,8 +591,10 @@ def save(self, *args, **kwargs) -> Record:
571
591
  self_on_db._state.db = db
572
592
  self_on_db.pk = pk_on_db # manually set the primary key
573
593
  self_on_db.features = FeatureManager(self_on_db)
574
- self.features._add_from(self_on_db)
575
- self.labels.add_from(self_on_db)
594
+ self.features._add_from(self_on_db, transfer_logs=transfer_logs)
595
+ self.labels.add_from(self_on_db, transfer_logs=transfer_logs)
596
+ for k, v in transfer_logs.items():
597
+ logger.important(f"{k} records: {', '.join(v)}")
576
598
  return self
577
599
 
578
600
 
lamindb/_transform.py CHANGED
@@ -39,9 +39,23 @@ def __init__(transform: Transform, *args, **kwargs):
39
39
  )
40
40
  if revises is None:
41
41
  if key is not None:
42
- revises = Transform.filter(key=key).order_by("-created_at").first()
43
- elif uid is not None and not uid.endswith("0000"):
44
- revises = Transform.filter(uid__startswith=uid[:-4]).one_or_none()
42
+ revises = (
43
+ Transform.filter(key=key, is_latest=True)
44
+ .order_by("-created_at")
45
+ .first()
46
+ )
47
+ elif uid is not None:
48
+ revises = (
49
+ Transform.filter(uid__startswith=uid[:-4], is_latest=True)
50
+ .order_by("-created_at")
51
+ .first()
52
+ )
53
+ if revises is not None and uid is not None and uid == revises.uid:
54
+ from ._record import init_self_from_db, update_attributes
55
+
56
+ init_self_from_db(transform, revises)
57
+ update_attributes(transform, {"name": name})
58
+ return None
45
59
  if revises is not None and key is not None and revises.key != key:
46
60
  note = message_update_key_in_version_family(
47
61
  suid=revises.stem_uid,
lamindb/core/__init__.py CHANGED
@@ -66,8 +66,6 @@ from lamin_utils._inspect import InspectResult
66
66
  from lnschema_core.models import (
67
67
  CanValidate,
68
68
  FeatureValue,
69
- HasFeatures,
70
- HasParams,
71
69
  HasParents,
72
70
  IsVersioned,
73
71
  ParamValue,
lamindb/core/_context.py CHANGED
@@ -2,7 +2,6 @@ from __future__ import annotations
2
2
 
3
3
  import builtins
4
4
  import hashlib
5
- import os
6
5
  from datetime import datetime, timezone
7
6
  from pathlib import Path, PurePath
8
7
  from typing import TYPE_CHECKING
@@ -12,6 +11,7 @@ from lamin_utils import logger
12
11
  from lamindb_setup.core.hashing import hash_file
13
12
  from lnschema_core import Run, Transform, ids
14
13
  from lnschema_core.ids import base62_12
14
+ from lnschema_core.models import format_field_value
15
15
  from lnschema_core.users import current_user_id
16
16
 
17
17
  from ._settings import settings
@@ -242,7 +242,7 @@ class Context:
242
242
  ):
243
243
  better_version = bump_version_function(self.version)
244
244
  raise SystemExit(
245
- f"Version '{self.version}' is already taken by Transform('{transform.uid}'); please set another version, e.g., ln.context.version = '{better_version}'"
245
+ f"Version '{self.version}' is already taken by Transform(uid='{transform.uid}'); please set another version, e.g., ln.context.version = '{better_version}'"
246
246
  )
247
247
  elif transform_settings_are_set:
248
248
  stem_uid, self.version = (
@@ -293,10 +293,10 @@ class Context:
293
293
  transform_exists = Transform.filter(id=transform.id).first()
294
294
  if transform_exists is None:
295
295
  transform.save()
296
- self._logging_message += f"created Transform('{transform.uid}')"
296
+ self._logging_message += f"created Transform(uid='{transform.uid}')"
297
297
  transform_exists = transform
298
298
  else:
299
- self._logging_message += f"loaded Transform('{transform.uid}')"
299
+ self._logging_message += f"loaded Transform(uid='{transform.uid}')"
300
300
  self._transform = transform_exists
301
301
 
302
302
  if new_run is None: # for notebooks, default to loading latest runs
@@ -311,7 +311,9 @@ class Context:
311
311
  )
312
312
  if run is not None: # loaded latest run
313
313
  run.started_at = datetime.now(timezone.utc) # update run time
314
- self._logging_message += f" & loaded Run('{run.started_at}')"
314
+ self._logging_message += (
315
+ f" & loaded Run(started_at={format_field_value(run.started_at)})"
316
+ )
315
317
 
316
318
  if run is None: # create new run
317
319
  run = Run(
@@ -319,7 +321,9 @@ class Context:
319
321
  params=params,
320
322
  )
321
323
  run.started_at = datetime.now(timezone.utc)
322
- self._logging_message += f" & created Run('{run.started_at}')"
324
+ self._logging_message += (
325
+ f" & created Run(started_at={format_field_value(run.started_at)})"
326
+ )
323
327
  # can only determine at ln.finish() if run was consecutive in
324
328
  # interactive session, otherwise, is consecutive
325
329
  run.is_consecutive = True if is_run_from_ipython else None
@@ -432,7 +436,7 @@ class Context:
432
436
  reference_type=transform_ref_type,
433
437
  type=transform_type,
434
438
  ).save()
435
- self._logging_message += f"created Transform('{transform.uid}')"
439
+ self._logging_message += f"created Transform(uid='{transform.uid}')"
436
440
  else:
437
441
  uid = transform.uid
438
442
  # check whether the transform file has been renamed
@@ -473,7 +477,9 @@ class Context:
473
477
  if condition:
474
478
  bump_revision = True
475
479
  else:
476
- self._logging_message += f"loaded Transform('{transform.uid}')"
480
+ self._logging_message += (
481
+ f"loaded Transform(uid='{transform.uid}')"
482
+ )
477
483
  if bump_revision:
478
484
  change_type = (
479
485
  "Re-running saved notebook"
@@ -490,7 +496,7 @@ class Context:
490
496
  f'ln.context.uid = "{suid}{new_vuid}"'
491
497
  )
492
498
  else:
493
- self._logging_message += f"loaded Transform('{transform.uid}')"
499
+ self._logging_message += f"loaded Transform(uid='{transform.uid}')"
494
500
  self._transform = transform
495
501
 
496
502
  def finish(self, ignore_non_consecutive: None | bool = None) -> None:
lamindb/core/_data.py CHANGED
@@ -10,7 +10,6 @@ from lnschema_core.models import (
10
10
  Collection,
11
11
  Feature,
12
12
  FeatureSet,
13
- HasFeatures,
14
13
  Record,
15
14
  Run,
16
15
  ULabel,
@@ -96,9 +95,14 @@ def save_feature_set_links(self: Artifact | Collection) -> None:
96
95
  bulk_create(links, ignore_conflicts=True)
97
96
 
98
97
 
99
- @doc_args(HasFeatures.describe.__doc__)
100
- def describe(self: HasFeatures, print_types: bool = False):
98
+ @doc_args(Artifact.describe.__doc__)
99
+ def describe(self: Artifact, print_types: bool = False):
101
100
  """{}""" # noqa: D415
101
+ model_name = self.__class__.__name__
102
+ msg = f"{colors.green(model_name)}{record_repr(self, include_foreign_keys=False).lstrip(model_name)}\n"
103
+ if self._state.db is not None and self._state.db != "default":
104
+ msg += f" {colors.italic('Database instance')}\n"
105
+ msg += f" slug: {self._state.db}\n"
102
106
  # prefetch all many-to-many relationships
103
107
  # doesn't work for describing using artifact
104
108
  # self = (
@@ -109,10 +113,7 @@ def describe(self: HasFeatures, print_types: bool = False):
109
113
  # .get(id=self.id)
110
114
  # )
111
115
 
112
- model_name = self.__class__.__name__
113
- msg = f"{colors.green(model_name)}{record_repr(self, include_foreign_keys=False).lstrip(model_name)}\n"
114
116
  prov_msg = ""
115
-
116
117
  fields = self._meta.fields
117
118
  direct_fields = []
118
119
  foreign_key_fields = []
@@ -129,9 +130,14 @@ def describe(self: HasFeatures, print_types: bool = False):
129
130
  .get(id=self.id)
130
131
  )
131
132
  # prefetch m-2-m relationships
133
+ many_to_many_fields = []
134
+ if isinstance(self, (Collection, Artifact)):
135
+ many_to_many_fields.append("input_of_runs")
136
+ if isinstance(self, Artifact):
137
+ many_to_many_fields.append("feature_sets")
132
138
  self = (
133
139
  self.__class__.objects.using(self._state.db)
134
- .prefetch_related("feature_sets", "input_of_runs")
140
+ .prefetch_related(*many_to_many_fields)
135
141
  .get(id=self.id)
136
142
  )
137
143
 
@@ -149,20 +155,32 @@ def describe(self: HasFeatures, print_types: bool = False):
149
155
  ]
150
156
  )
151
157
  prov_msg += related_msg
152
- # input of
153
- if self.id is not None and self.input_of_runs.exists():
154
- values = [format_field_value(i.started_at) for i in self.input_of_runs.all()]
155
- type_str = ": Run" if print_types else "" # type: ignore
156
- prov_msg += f" .input_of_runs{type_str} = {values}\n"
157
158
  if prov_msg:
158
159
  msg += f" {colors.italic('Provenance')}\n"
159
160
  msg += prov_msg
161
+
162
+ # input of runs
163
+ input_of_message = ""
164
+ if self.id is not None and self.input_of_runs.exists():
165
+ values = [format_field_value(i.started_at) for i in self.input_of_runs.all()]
166
+ type_str = ": Run" if print_types else "" # type: ignore
167
+ input_of_message += f" .input_of_runs{type_str} = {', '.join(values)}\n"
168
+ if input_of_message:
169
+ msg += f" {colors.italic('Usage')}\n"
170
+ msg += input_of_message
171
+
172
+ # labels
160
173
  msg += print_labels(self, print_types=print_types)
161
- msg += print_features( # type: ignore
162
- self,
163
- print_types=print_types,
164
- print_params=hasattr(self, "type") and self.type == "model",
165
- )
174
+
175
+ # features
176
+ if isinstance(self, Artifact):
177
+ msg += print_features( # type: ignore
178
+ self,
179
+ print_types=print_types,
180
+ print_params=hasattr(self, "type") and self.type == "model",
181
+ )
182
+
183
+ # print entire message
166
184
  logger.print(msg)
167
185
 
168
186
 
@@ -328,7 +346,7 @@ def add_labels(
328
346
 
329
347
 
330
348
  def _track_run_input(
331
- data: HasFeatures | Iterable[HasFeatures],
349
+ data: Artifact | Collection | Iterable[Artifact] | Iterable[Collection],
332
350
  is_run_input: bool | None = None,
333
351
  run: Run | None = None,
334
352
  ):
@@ -340,12 +358,14 @@ def _track_run_input(
340
358
  elif run is None:
341
359
  run = context.run
342
360
  # consider that data is an iterable of Data
343
- data_iter: Iterable[HasFeatures] = [data] if isinstance(data, HasFeatures) else data
361
+ data_iter: Iterable[Artifact] | Iterable[Collection] = (
362
+ [data] if isinstance(data, (Artifact, Collection)) else data
363
+ )
344
364
  track_run_input = False
345
365
  input_data = []
346
366
  if run is not None:
347
367
  # avoid cycles: data can't be both input and output
348
- def is_valid_input(data: HasFeatures):
368
+ def is_valid_input(data: Artifact | Collection):
349
369
  return (
350
370
  data.run_id != run.id
351
371
  and not data._state.adding
@@ -416,7 +436,3 @@ def _track_run_input(
416
436
  if len(input_data) == 1:
417
437
  if input_data[0].transform is not None:
418
438
  run.transform.predecessors.add(input_data[0].transform)
419
-
420
-
421
- HasFeatures.describe = describe
422
- HasFeatures.view_lineage = view_lineage
@@ -19,11 +19,7 @@ from lnschema_core.models import (
19
19
  Collection,
20
20
  Feature,
21
21
  FeatureManager,
22
- FeatureManagerArtifact,
23
- FeatureManagerCollection,
24
22
  FeatureValue,
25
- HasFeatures,
26
- HasParams,
27
23
  LinkORM,
28
24
  Param,
29
25
  ParamManager,
@@ -116,7 +112,7 @@ def get_feature_set_links(host: Artifact | Collection) -> QuerySet:
116
112
  return links_feature_set
117
113
 
118
114
 
119
- def get_link_attr(link: LinkORM | type[LinkORM], data: HasFeatures) -> str:
115
+ def get_link_attr(link: LinkORM | type[LinkORM], data: Artifact | Collection) -> str:
120
116
  link_model_name = link.__class__.__name__
121
117
  if link_model_name in {"Registry", "ModelBase"}: # we passed the type of the link
122
118
  link_model_name = link.__name__
@@ -137,7 +133,7 @@ def custom_aggregate(field, using: str):
137
133
 
138
134
 
139
135
  def print_features(
140
- self: HasFeatures | HasParams,
136
+ self: Artifact | Collection,
141
137
  print_types: bool = False,
142
138
  to_dict: bool = False,
143
139
  print_params: bool = False,
@@ -362,7 +358,7 @@ def __getitem__(self, slot) -> QuerySet:
362
358
 
363
359
 
364
360
  def filter_base(cls, **expression):
365
- if cls in {FeatureManagerArtifact, FeatureManagerCollection}:
361
+ if cls is FeatureManager:
366
362
  model = Feature
367
363
  value_model = FeatureValue
368
364
  else:
@@ -394,10 +390,11 @@ def filter_base(cls, **expression):
394
390
  new_expression["ulabels"] = label
395
391
  else:
396
392
  raise NotImplementedError
397
- if cls == FeatureManagerArtifact or cls == ParamManagerArtifact:
393
+ if cls == FeatureManager or cls == ParamManagerArtifact:
398
394
  return Artifact.filter(**new_expression)
399
- elif cls == FeatureManagerCollection:
400
- return Collection.filter(**new_expression)
395
+ # might renable something similar in the future
396
+ # elif cls == FeatureManagerCollection:
397
+ # return Collection.filter(**new_expression)
401
398
  elif cls == ParamManagerRun:
402
399
  return Run.filter(**new_expression)
403
400
 
@@ -791,9 +788,11 @@ def _add_set_from_mudata(
791
788
  self._host.save()
792
789
 
793
790
 
794
- def _add_from(self, data: HasFeatures):
791
+ def _add_from(self, data: Artifact | Collection, transfer_logs: dict = None):
795
792
  """Transfer features from a artifact or collection."""
796
- # This only covers feature sets, though.
793
+ # This only covers feature sets
794
+ if transfer_logs is None:
795
+ transfer_logs = {"mapped": [], "transferred": []}
797
796
  using_key = settings._using_key
798
797
  for slot, feature_set in data.features._feature_set_by_slot.items():
799
798
  members = feature_set.members
@@ -815,15 +814,18 @@ def _add_from(self, data: HasFeatures):
815
814
  new_members = members.filter(**{f"{field}__in": new_members_uids}).all()
816
815
  n_new_members = len(new_members)
817
816
  if n_new_members > 0:
818
- mute = True if n_new_members > 10 else False
819
817
  # transfer foreign keys needs to be run before transfer to default db
820
- transfer_fk_to_default_db_bulk(new_members, using_key)
818
+ transfer_fk_to_default_db_bulk(
819
+ new_members, using_key, transfer_logs=transfer_logs
820
+ )
821
821
  for feature in new_members:
822
822
  # not calling save=True here as in labels, because want to
823
823
  # bulk save below
824
824
  # transfer_fk is set to False because they are already transferred
825
825
  # in the previous step transfer_fk_to_default_db_bulk
826
- transfer_to_default_db(feature, using_key, mute=mute, transfer_fk=False)
826
+ transfer_to_default_db(
827
+ feature, using_key, transfer_fk=False, transfer_logs=transfer_logs
828
+ )
827
829
  logger.info(f"saving {n_new_members} new {registry.__name__} records")
828
830
  save(new_members)
829
831
 
@@ -20,12 +20,12 @@ from ._settings import settings
20
20
  from .schema import dict_related_model_to_related_name
21
21
 
22
22
  if TYPE_CHECKING:
23
- from lnschema_core.models import Artifact, Collection, HasFeatures, Record
23
+ from lnschema_core.models import Artifact, Collection, Record
24
24
 
25
25
  from lamindb._query_set import QuerySet
26
26
 
27
27
 
28
- def get_labels_as_dict(self: HasFeatures, links: bool = False):
28
+ def get_labels_as_dict(self: Artifact | Collection, links: bool = False):
29
29
  exclude_set = {
30
30
  "feature_sets",
31
31
  "artifacts",
@@ -57,7 +57,9 @@ def get_labels_as_dict(self: HasFeatures, links: bool = False):
57
57
  return labels
58
58
 
59
59
 
60
- def print_labels(self: HasFeatures, field: str = "name", print_types: bool = False):
60
+ def print_labels(
61
+ self: Artifact | Collection, field: str = "name", print_types: bool = False
62
+ ):
61
63
  labels_msg = ""
62
64
  for related_name, (related_model, labels) in get_labels_as_dict(self).items():
63
65
  # there is a try except block here to deal with schema inconsistencies
@@ -167,22 +169,22 @@ class LabelManager:
167
169
 
168
170
  return get_labels(self._host, feature=feature, mute=mute, flat_names=flat_names)
169
171
 
170
- def add_from(self, data: HasFeatures) -> None:
172
+ def add_from(self, data: Artifact | Collection, transfer_logs: dict = None) -> None:
171
173
  """Add labels from an artifact or collection to another artifact or collection.
172
174
 
173
175
  Examples:
174
- >>> file1 = ln.Artifact(pd.DataFrame(index=[0, 1]))
175
- >>> file1.save()
176
- >>> file2 = ln.Artifact(pd.DataFrame(index=[2, 3]))
177
- >>> file2.save()
176
+ >>> artifact1 = ln.Artifact(pd.DataFrame(index=[0, 1])).save()
177
+ >>> artifact2 = ln.Artifact(pd.DataFrame(index=[2, 3])).save()
178
178
  >>> ulabels = ln.ULabel.from_values(["Label1", "Label2"], field="name")
179
179
  >>> ln.save(ulabels)
180
180
  >>> labels = ln.ULabel.filter(name__icontains = "label").all()
181
- >>> file1.ulabels.set(labels)
182
- >>> file2.labels.add_from(file1)
181
+ >>> artifact1.ulabels.set(labels)
182
+ >>> artifact2.labels.add_from(artifact1)
183
183
  """
184
184
  from django.db.utils import ProgrammingError
185
185
 
186
+ if transfer_logs is None:
187
+ transfer_logs = {"mapped": [], "transferred": []}
186
188
  using_key = settings._using_key
187
189
  for related_name, (_, labels) in get_labels_as_dict(data).items():
188
190
  labels = labels.all()
@@ -195,7 +197,9 @@ class LabelManager:
195
197
  features = set()
196
198
  _, new_labels = validate_labels(labels)
197
199
  if len(new_labels) > 0:
198
- transfer_fk_to_default_db_bulk(new_labels, using_key)
200
+ transfer_fk_to_default_db_bulk(
201
+ new_labels, using_key, transfer_logs=transfer_logs
202
+ )
199
203
  for label in labels:
200
204
  # if the link table doesn't follow this convention, we'll ignore it
201
205
  if not hasattr(label, f"links_{data_name_lower}"):
@@ -212,7 +216,7 @@ class LabelManager:
212
216
  label_returned = transfer_to_default_db(
213
217
  label,
214
218
  using_key,
215
- mute=True,
219
+ transfer_logs=transfer_logs,
216
220
  transfer_fk=False,
217
221
  save=True,
218
222
  )
@@ -223,10 +227,15 @@ class LabelManager:
223
227
  # treat features
224
228
  _, new_features = validate_labels(list(features))
225
229
  if len(new_features) > 0:
226
- transfer_fk_to_default_db_bulk(new_features, using_key)
230
+ transfer_fk_to_default_db_bulk(
231
+ new_features, using_key, transfer_logs=transfer_logs
232
+ )
227
233
  for feature in new_features:
228
234
  transfer_to_default_db(
229
- feature, using_key, mute=True, transfer_fk=False
235
+ feature,
236
+ using_key,
237
+ transfer_logs=transfer_logs,
238
+ transfer_fk=False,
230
239
  )
231
240
  save(new_features)
232
241
  if hasattr(self._host, related_name):
lamindb/core/_settings.py CHANGED
@@ -52,7 +52,7 @@ class Settings:
52
52
  return creation_settings
53
53
 
54
54
  track_run_inputs: bool = True
55
- """Track files as input upon `.load()`, `.cache()` and `.backed()`.
55
+ """Track files as input upon `.load()`, `.cache()` and `.open()`.
56
56
 
57
57
  Requires a global run context with :func:`~lamindb.core.Context.track` was created!
58
58
 
@@ -80,8 +80,7 @@ def save_vitessce_config(
80
80
  name="save_vitessce_config",
81
81
  type="function",
82
82
  version="2",
83
- )
84
- transform.save()
83
+ ).save()
85
84
  run = Run(transform=transform).save()
86
85
  if len(dataset_artifacts) > 1:
87
86
  # if we have more datasets, we should create a collection
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: lamindb
3
- Version: 0.76.5
3
+ Version: 0.76.6
4
4
  Summary: A data framework for biology.
5
5
  Author-email: Lamin Labs <open-source@lamin.ai>
6
6
  Requires-Python: >=3.8
@@ -9,10 +9,10 @@ Classifier: Programming Language :: Python :: 3.8
9
9
  Classifier: Programming Language :: Python :: 3.9
10
10
  Classifier: Programming Language :: Python :: 3.10
11
11
  Classifier: Programming Language :: Python :: 3.11
12
- Requires-Dist: lnschema_core==0.73.5
12
+ Requires-Dist: lnschema_core==0.74.1
13
13
  Requires-Dist: lamindb_setup==0.77.1
14
14
  Requires-Dist: lamin_utils==0.13.4
15
- Requires-Dist: lamin_cli==0.17.1
15
+ Requires-Dist: lamin_cli==0.17.2
16
16
  Requires-Dist: rapidfuzz
17
17
  Requires-Dist: pyarrow
18
18
  Requires-Dist: typing_extensions!=4.6.0
@@ -24,7 +24,7 @@ Requires-Dist: pandas
24
24
  Requires-Dist: graphviz
25
25
  Requires-Dist: psycopg2-binary
26
26
  Requires-Dist: lamindb_setup[aws] ; extra == "aws"
27
- Requires-Dist: bionty==0.49.1 ; extra == "bionty"
27
+ Requires-Dist: bionty==0.50.1 ; extra == "bionty"
28
28
  Requires-Dist: pre-commit ; extra == "dev"
29
29
  Requires-Dist: nox ; extra == "dev"
30
30
  Requires-Dist: laminci>=0.3 ; extra == "dev"
@@ -1,32 +1,32 @@
1
- lamindb/__init__.py,sha256=jllkTTXtX6UHCCnv7BvPAywnywCNWlWm0l5UbCnK9nM,2344
2
- lamindb/_artifact.py,sha256=QjCge5kaAcfhGv84s299OT99LmHTSYDxgzw5kN-x3-8,44416
1
+ lamindb/__init__.py,sha256=e0nF_wRocm1Y_i1yIPgyzx6_ugrJWP6unIW-7apS_rA,2344
2
+ lamindb/_artifact.py,sha256=k3fMfo5hQJ-AfM6OFMVeafGaKdV9DqQ5yPhxEUG5vW0,43896
3
3
  lamindb/_can_validate.py,sha256=9di9FLmC2m3dpT42sceF34UEFzQITi2e_hjVMa8DIc4,18261
4
- lamindb/_collection.py,sha256=F_VgpLBprrzUQ-tPngWvO9vFd7jX66MVwIi031JOris,14871
5
- lamindb/_curate.py,sha256=gCbDiqhsJzVZZ6BuEoFXUpsNOffpUNDlrX1dJiOqJJo,58753
4
+ lamindb/_collection.py,sha256=t-Eh0Trr8twxic0-7mRusAsFZcrdtdyeioSYNI2ixT8,13832
5
+ lamindb/_curate.py,sha256=Y-vK1JXCL3YXz97pyX_cVG06s7pokZcnN3zXomdXR58,58765
6
6
  lamindb/_feature.py,sha256=nZhtrH0ssoNls-hV-dkwfK9sKypg2El59R9qfarxfUE,5340
7
7
  lamindb/_feature_set.py,sha256=DmAy96V_RyV0yiyvWOCHgustXPsCaMwn4TrWwh2qDd8,8104
8
- lamindb/_filter.py,sha256=9QHa9J-_6QeYPQATZpTun2VGiFofwzB0Km-KnKajHcM,663
8
+ lamindb/_filter.py,sha256=NMxIX67gYqN7n3lA4XL6hRgAaAFhpwnwxm3vBA5XGlU,683
9
9
  lamindb/_finish.py,sha256=riwm7mA-RXej_L0iz_svt6j5Z6faQb3NmQGKjAwhx8g,9282
10
10
  lamindb/_from_values.py,sha256=8kYpR8Q85EOaTcsPGjVHeZh29fGVgum5OEQf4Hsz_80,13533
11
11
  lamindb/_is_versioned.py,sha256=5lAnhTboltFkZCKVRV1uxkm0OCjJz_HKi3yQq_vEuMs,1306
12
12
  lamindb/_parents.py,sha256=eMavdd6IO6STOVJSlR2TzdRtx6sKYDKsMOtlR3DZlgQ,15599
13
13
  lamindb/_query_manager.py,sha256=Ipe85HL31DDwMbC8CN_1Svbwk48a_DUh_INGQdZL08I,4222
14
14
  lamindb/_query_set.py,sha256=BiGvEiaBSd9aV28EAy83Q8h6RLsYMDjfxLOljAcyMaM,12692
15
- lamindb/_record.py,sha256=53_0oU6v45V5gIDJgkAUSX7iIV5Si_4cuOWUHJa8JVo,21241
15
+ lamindb/_record.py,sha256=d-tBYj_EgNBD2Nl9pBC5H_OYGdRmBKAVvRnE91_23e8,22035
16
16
  lamindb/_run.py,sha256=5M_r1zGDv9HlqbqRKTWCYCOtENovJ-8mQ4kY7XqcLaU,1888
17
17
  lamindb/_save.py,sha256=Fu7Z84btKOXfTfpunKLni21s5ER2zIllqg5e3nPq-0A,10910
18
18
  lamindb/_storage.py,sha256=GBVChv-DHVMNEBJL5l_JT6B4RDhZ6NnwgzmUICphYKk,413
19
- lamindb/_transform.py,sha256=ekwHQc4fv8PV1cffCYtTPfxL1RJtENd9_Y3v9CwxqYc,4213
19
+ lamindb/_transform.py,sha256=tRO7Uq-8fkq6Tm4U5qQ1lBOaNUehH8IkiDDPnYPgQH8,4623
20
20
  lamindb/_ulabel.py,sha256=XDSdZBXX_ki5s1vOths3MjF2x5DPggBR_PV_KF4SGyg,1611
21
21
  lamindb/_utils.py,sha256=LGdiW4k3GClLz65vKAVRkL6Tw-Gkx9DWAdez1jyA5bE,428
22
22
  lamindb/_view.py,sha256=4Ln2ItTb3857PAI-70O8eJYqoTJ_NNFc7E_wds6OGns,2412
23
- lamindb/core/__init__.py,sha256=hxPWM_Jnrllx0G_6itEGU2meXwptkkgiL9zsBvlhHM4,1495
24
- lamindb/core/_context.py,sha256=zt4aJz_IxPu3ujENyPjwJCebh_3w5Vu6QkDh0dspZFA,20719
25
- lamindb/core/_data.py,sha256=eocOXsZGu62LPtz6yIlvHhPSJTf3yF2ITZTffyflWYI,16269
26
- lamindb/core/_feature_manager.py,sha256=94tX6gq_Rx7fkDARQBxB2z92qUDpHocFSAdKv5izMT4,32490
27
- lamindb/core/_label_manager.py,sha256=zCE-PS1Y5ALpzoSOx1P6ZTFVPgFNRAmmyTQF0e8QBXA,9131
23
+ lamindb/core/__init__.py,sha256=DrTH8wUuQarZ0zvYDcOphkgfmfWniC1Tyte25lrD_CE,1463
24
+ lamindb/core/_context.py,sha256=Y-18enP_KRUB9MwKuEoXcYJSWheLtwX63WmYcmfhmKA,20971
25
+ lamindb/core/_data.py,sha256=nGswaJUUWKdrRsKCWvcVlTn2Lyt71J14L28rzivKdhk,16924
26
+ lamindb/core/_feature_manager.py,sha256=Bzh6x-azq9llFiqgK3GUvL6nLcFGOVpIUnacYVsGSEo,32595
27
+ lamindb/core/_label_manager.py,sha256=VRNQq3HmSuQULbjrZ_y2mTiJryGzqE7AHvK1rw2xdMQ,9525
28
28
  lamindb/core/_mapped_collection.py,sha256=1XzratL2IvRleqioNhWo26Lsuqkev8-HEImmHQxw9Kw,23266
29
- lamindb/core/_settings.py,sha256=GGEB8BU5GinIfD4ktr1Smp6GPHGaInu46MhP4EecZDY,5950
29
+ lamindb/core/_settings.py,sha256=73SV-vTDzSKX9E5rSvj9kdPV4jHSovRM3x7bgT1OBh8,5948
30
30
  lamindb/core/_sync_git.py,sha256=qc0yfPyKeG4uuNT_3qsv-mkIMqhLFqfXNeNVO49vV00,4547
31
31
  lamindb/core/_track_environment.py,sha256=STzEVUzOeUEWdX7WDJUkKH4u08k7eupRX6AXQwoVt14,828
32
32
  lamindb/core/exceptions.py,sha256=TKyt1JOUwWIHbkCQjir_LQadf8960eQ95RWhSpz5_Bk,1288
@@ -50,10 +50,10 @@ lamindb/core/subsettings/__init__.py,sha256=KFHPzIE7f7Bj4RgMjGQF4CjTdHVG_VNFBrCn
50
50
  lamindb/core/subsettings/_creation_settings.py,sha256=54mfMH_osC753hpxcl7Dq1rwBD2LHnWveXtQpkLBITE,1194
51
51
  lamindb/core/subsettings/_transform_settings.py,sha256=4YbCuZtJo6zdytl6UQR4GvdDkTtT6SRBqVzofGzNOt8,583
52
52
  lamindb/integrations/__init__.py,sha256=RWGMYYIzr8zvmNPyVB4m-p4gMDhxdRbjES2Ed23OItw,215
53
- lamindb/integrations/_vitessce.py,sha256=671jHIF8LgUjcOgRvJNhP0aK1Xty9pHkQ8ukx1U2gLY,4578
53
+ lamindb/integrations/_vitessce.py,sha256=S51wl7iF2QvQmrNcZ9yDdqTtcn_AAzuh0i5axKwQ2sM,4560
54
54
  lamindb/setup/__init__.py,sha256=OwZpZzPDv5lPPGXZP7-zK6UdO4FHvvuBh439yZvIp3A,410
55
55
  lamindb/setup/core/__init__.py,sha256=SevlVrc2AZWL3uALbE5sopxBnIZPWZ1IB0NBDudiAL8,167
56
- lamindb-0.76.5.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
57
- lamindb-0.76.5.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
58
- lamindb-0.76.5.dist-info/METADATA,sha256=lmB6jhR6ZnklsigeYZFeCiNWMyGHqStcg_teeo1JmNk,2372
59
- lamindb-0.76.5.dist-info/RECORD,,
56
+ lamindb-0.76.6.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
57
+ lamindb-0.76.6.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
58
+ lamindb-0.76.6.dist-info/METADATA,sha256=5MHI_pJ-0Ep5tCTtng3sXm9_9RwV3LIN9rYXf4V0GgQ,2372
59
+ lamindb-0.76.6.dist-info/RECORD,,