lamindb 0.73.1__py3-none-any.whl → 0.74.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
lamindb/__init__.py CHANGED
@@ -14,6 +14,7 @@ Registries:
14
14
  ULabel
15
15
  Feature
16
16
  FeatureSet
17
+ Param
17
18
 
18
19
  Key functionality:
19
20
 
@@ -41,12 +42,13 @@ Modules & settings:
41
42
  """
42
43
 
43
44
  # denote a release candidate for 0.1.0 with 0.1rc1, 0.1a1, 0.1b1, etc.
44
- __version__ = "0.73.1"
45
+ __version__ = "0.74.0"
45
46
 
46
47
  import os as _os
47
48
 
48
49
  import lamindb_setup as _lamindb_setup
49
- from lamindb_setup._check_setup import InstanceNotSetupError, _check_instance_setup
50
+ from lamindb_setup._check_setup import InstanceNotSetupError as _InstanceNotSetupError
51
+ from lamindb_setup._check_setup import _check_instance_setup
50
52
  from lamindb_setup._connect_instance import connect
51
53
  from lamindb_setup.core.upath import UPath
52
54
 
@@ -54,17 +56,18 @@ from . import setup
54
56
 
55
57
 
56
58
  def __getattr__(name):
57
- raise InstanceNotSetupError()
59
+ raise _InstanceNotSetupError()
58
60
 
59
61
 
60
62
  if _check_instance_setup(from_lamindb=True):
61
- del InstanceNotSetupError
63
+ del _InstanceNotSetupError
62
64
  del __getattr__ # delete so that imports work out
63
65
  from lnschema_core.models import (
64
66
  Artifact,
65
67
  Collection,
66
68
  Feature,
67
69
  FeatureSet,
70
+ Param,
68
71
  Run,
69
72
  Storage,
70
73
  Transform,
lamindb/_artifact.py CHANGED
@@ -23,7 +23,7 @@ from lamindb_setup.core.upath import (
23
23
  get_stat_dir_cloud,
24
24
  get_stat_file_cloud,
25
25
  )
26
- from lnschema_core.models import Artifact, FeatureManager, Run, Storage
26
+ from lnschema_core.models import Artifact, FeatureManager, ParamManager, Run, Storage
27
27
  from lnschema_core.types import (
28
28
  VisibilityChoice,
29
29
  )
@@ -84,7 +84,7 @@ def process_pathlike(
84
84
  pass
85
85
  if isinstance(filepath, LocalPathClasses):
86
86
  filepath = filepath.resolve()
87
- if check_path_is_child_of_root(filepath, default_storage.root_as_path()):
87
+ if check_path_is_child_of_root(filepath, default_storage.root):
88
88
  use_existing_storage_key = True
89
89
  return default_storage, use_existing_storage_key
90
90
  else:
@@ -272,28 +272,17 @@ def check_path_in_existing_storage(
272
272
  ) -> Storage | bool:
273
273
  for storage in Storage.objects.using(using_key).filter().all():
274
274
  # if path is part of storage, return it
275
- if check_path_is_child_of_root(path, root=create_path(storage.root)):
275
+ if check_path_is_child_of_root(path, root=storage.root):
276
276
  return storage
277
277
  return False
278
278
 
279
279
 
280
280
  def check_path_is_child_of_root(path: Path | UPath, root: Path | UPath | None) -> bool:
281
- path = UPath(str(path)) if not isinstance(path, UPath) else path
282
- root = UPath(str(root)) if not isinstance(root, UPath) else root
283
-
284
- # the following comparisons can fail if types aren't comparable
285
- if not isinstance(path, LocalPathClasses) and not isinstance(
286
- root, LocalPathClasses
287
- ):
288
- # the following tests equivalency of two UPath objects
289
- # via string representations; otherwise
290
- # S3Path('s3://lndb-storage/') and S3Path('s3://lamindb-ci/')
291
- # test as equivalent
292
- return list(path.parents)[-1].as_posix() == root.as_posix()
293
- elif isinstance(path, LocalPathClasses) and isinstance(root, LocalPathClasses):
294
- return root.resolve() in path.resolve().parents
295
- else:
296
- return False
281
+ # str is needed to eliminate UPath storage_options
282
+ # from the equality checks below
283
+ path = UPath(str(path))
284
+ root = UPath(str(root))
285
+ return root.resolve() in path.resolve().parents
297
286
 
298
287
 
299
288
  def get_relative_path_to_directory(
@@ -358,7 +347,7 @@ def get_artifact_kwargs_from_data(
358
347
  check_path_in_storage = False
359
348
  if use_existing_storage_key:
360
349
  inferred_key = get_relative_path_to_directory(
361
- path=path, directory=storage.root_as_path()
350
+ path=path, directory=UPath(storage.root)
362
351
  ).as_posix()
363
352
  if key is None:
364
353
  key = inferred_key
@@ -516,6 +505,7 @@ def update_attributes(data: HasFeatures, attributes: Mapping[str, str]):
516
505
 
517
506
  def __init__(artifact: Artifact, *args, **kwargs):
518
507
  artifact.features = FeatureManager(artifact)
508
+ artifact.params = ParamManager(artifact)
519
509
  # Below checks for the Django-internal call in from_db()
520
510
  # it'd be better if we could avoid this, but not being able to create a Artifact
521
511
  # from data with the default constructor renders the central class of the API
@@ -532,6 +522,7 @@ def __init__(artifact: Artifact, *args, **kwargs):
532
522
  raise ValueError("Only one non-keyword arg allowed: data")
533
523
 
534
524
  data: str | Path = kwargs.pop("data") if len(args) == 0 else args[0]
525
+ type: str = kwargs.pop("type") if "type" in kwargs else "dataset"
535
526
  key: str | None = kwargs.pop("key") if "key" in kwargs else None
536
527
  run: Run | None = kwargs.pop("run") if "run" in kwargs else None
537
528
  description: str | None = (
@@ -616,6 +607,7 @@ def __init__(artifact: Artifact, *args, **kwargs):
616
607
  "to _retain_ the old state by duplicating the entire folder, do _not_ pass `is_new_version_of`"
617
608
  )
618
609
 
610
+ kwargs["type"] = type
619
611
  kwargs["uid"] = provisional_uid
620
612
  kwargs["version"] = version
621
613
  kwargs["description"] = description
@@ -662,6 +654,7 @@ def from_df(
662
654
  version=version,
663
655
  is_new_version_of=is_new_version_of,
664
656
  accessor="DataFrame",
657
+ type="dataset",
665
658
  **kwargs,
666
659
  )
667
660
  return artifact
@@ -690,6 +683,7 @@ def from_anndata(
690
683
  version=version,
691
684
  is_new_version_of=is_new_version_of,
692
685
  accessor="AnnData",
686
+ type="dataset",
693
687
  **kwargs,
694
688
  )
695
689
  return artifact
@@ -716,6 +710,7 @@ def from_mudata(
716
710
  version=version,
717
711
  is_new_version_of=is_new_version_of,
718
712
  accessor="MuData",
713
+ type="dataset",
719
714
  **kwargs,
720
715
  )
721
716
  return artifact
@@ -752,7 +747,7 @@ def from_dir(
752
747
  else:
753
748
  # maintain the hierachy within an existing storage location
754
749
  folder_key_path = get_relative_path_to_directory(
755
- folderpath, storage.root_as_path()
750
+ folderpath, UPath(storage.root)
756
751
  )
757
752
  else:
758
753
  folder_key_path = Path(key)
lamindb/_feature.py CHANGED
@@ -27,12 +27,12 @@ FEATURE_TYPES = {
27
27
  }
28
28
 
29
29
 
30
- def convert_numpy_dtype_to_lamin_feature_type(dtype) -> str:
30
+ def convert_numpy_dtype_to_lamin_feature_type(dtype, str_as_cat: bool = True) -> str:
31
31
  orig_type = dtype.name
32
32
  # strip precision qualifiers
33
33
  type = "".join(i for i in orig_type if not i.isdigit())
34
34
  if type == "object" or type == "str":
35
- type = "cat"
35
+ type = "cat" if str_as_cat else "str"
36
36
  return type
37
37
 
38
38
 
lamindb/_finish.py CHANGED
@@ -33,13 +33,19 @@ def get_seconds_since_modified(filepath) -> float:
33
33
  return datetime.now().timestamp() - filepath.stat().st_mtime
34
34
 
35
35
 
36
- def finish():
36
+ def finish() -> None:
37
37
  """Mark a tracked run as finished.
38
38
 
39
39
  Saves source code and, for notebooks, a run report to your default storage location.
40
40
  """
41
- if run_context.path is None:
41
+ if run_context.run is None:
42
42
  raise TrackNotCalled("Please run `ln.track()` before `ln.finish()`")
43
+ if run_context.path is None:
44
+ assert run_context.transform.type not in {"script", "notebook"}
45
+ run_context.run.finished_at = datetime.now(timezone.utc)
46
+ run_context.run.save()
47
+ # nothing else to do
48
+ return None
43
49
  if is_run_from_ipython: # notebooks
44
50
  if (
45
51
  get_seconds_since_modified(run_context.path) > 3
lamindb/_from_values.py CHANGED
@@ -278,14 +278,26 @@ def index_iterable(iterable: Iterable) -> pd.Index:
278
278
 
279
279
 
280
280
  def _print_values(names: Iterable, n: int = 20, quotes: bool = True) -> str:
281
- names = (name for name in names if name != "None")
282
- unique_names = list(dict.fromkeys(names))[:n]
283
- if quotes:
284
- print_values = ", ".join(f"'{name}'" for name in unique_names)
281
+ if isinstance(names, dict):
282
+ items = {
283
+ f"{key}: {value}": None
284
+ for key, value in names.items()
285
+ if key != "None" and value != "None"
286
+ }
285
287
  else:
286
- print_values = ", ".join(f"{name}" for name in unique_names)
287
- if len(unique_names) > n:
288
+ # Use a dictionary instead of a list to have unique values and preserve order
289
+ items = {str(name): None for name in names if name != "None"}
290
+
291
+ unique_items = list(items.keys())
292
+
293
+ if quotes:
294
+ unique_items = [f"'{item}'" for item in unique_items]
295
+
296
+ print_values = ", ".join(unique_items[:n])
297
+
298
+ if len(unique_items) > n:
288
299
  print_values += ", ..."
300
+
289
301
  return print_values
290
302
 
291
303
 
lamindb/_registry.py CHANGED
@@ -54,7 +54,7 @@ def suggest_records_with_similar_names(record: Registry, kwargs) -> bool:
54
54
  """
55
55
  if kwargs.get("name") is None:
56
56
  return False
57
- queryset = _search(record.__class__, kwargs["name"], truncate_words=True, limit=5)
57
+ queryset = _search(record.__class__, kwargs["name"], truncate_words=True, limit=20)
58
58
  if not queryset.exists(): # empty queryset
59
59
  return False
60
60
  for alternative_record in queryset:
lamindb/_run.py CHANGED
@@ -1,9 +1,10 @@
1
1
  from __future__ import annotations
2
2
 
3
- from lnschema_core.models import Run, Transform
3
+ from lnschema_core.models import ParamManager, Run, Transform
4
4
 
5
5
 
6
6
  def __init__(run: Run, *args, **kwargs):
7
+ run.params = ParamManager(run)
7
8
  if len(args) == len(run._meta.concrete_fields):
8
9
  super(Run, run).__init__(*args, **kwargs)
9
10
  return None
lamindb/_storage.py CHANGED
@@ -3,11 +3,6 @@ from lamindb_setup.core.upath import UPath, create_path
3
3
  from lnschema_core import Storage
4
4
 
5
5
 
6
- def root_as_path(self) -> UPath:
7
- access_token = self._access_token if hasattr(self, "_access_token") else None
8
- return create_path(self.root, access_token=access_token)
9
-
10
-
11
6
  @property # type: ignore
12
7
  @doc_args(Storage.path.__doc__)
13
8
  def path(self) -> UPath:
@@ -16,5 +11,4 @@ def path(self) -> UPath:
16
11
  return create_path(self.root, access_token=access_token)
17
12
 
18
13
 
19
- Storage.root_as_path = root_as_path
20
14
  Storage.path = path
lamindb/_view.py CHANGED
@@ -1,14 +1,16 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import builtins
3
4
  import importlib
4
5
  import inspect
5
6
 
6
- from IPython.display import display
7
7
  from lamin_utils import colors, logger
8
8
  from lamindb_setup import settings
9
9
  from lamindb_setup._init_instance import get_schema_module_name
10
10
  from lnschema_core import Registry
11
11
 
12
+ is_run_from_ipython = getattr(builtins, "__IPYTHON__", False)
13
+
12
14
 
13
15
  def view(
14
16
  n: int = 7, schema: str | None = None, registries: list[str] | None = None
@@ -25,6 +27,11 @@ def view(
25
27
  Examples:
26
28
  >>> ln.view()
27
29
  """
30
+ if is_run_from_ipython:
31
+ from IPython.display import display as show
32
+ else:
33
+ show = logger.print
34
+
28
35
  if schema is not None:
29
36
  schema_names = [schema]
30
37
  else:
@@ -60,4 +67,4 @@ def view(
60
67
  df = orm.df().iloc[-n:]
61
68
  if df.shape[0] > 0:
62
69
  logger.print(colors.blue(colors.bold(orm.__name__)))
63
- display(df)
70
+ show(df)
lamindb/core/__init__.py CHANGED
@@ -10,7 +10,9 @@ Registries:
10
10
  QueryManager
11
11
  RecordsList
12
12
  HasFeatures
13
+ HasParams
13
14
  FeatureManager
15
+ ParamManager
14
16
  LabelManager
15
17
  IsVersioned
16
18
  CanValidate
@@ -56,6 +58,7 @@ from lamin_utils._inspect import InspectResult
56
58
  from lnschema_core.models import (
57
59
  CanValidate,
58
60
  HasFeatures,
61
+ HasParams,
59
62
  HasParents,
60
63
  IsVersioned,
61
64
  Registry,
@@ -71,7 +74,7 @@ from lamindb._annotate import (
71
74
  )
72
75
  from lamindb._query_manager import QueryManager
73
76
  from lamindb._query_set import QuerySet, RecordsList
74
- from lamindb.core._feature_manager import FeatureManager
77
+ from lamindb.core._feature_manager import FeatureManager, ParamManager
75
78
  from lamindb.core._label_manager import LabelManager
76
79
 
77
80
  from . import _data, datasets, exceptions, fields, types
lamindb/core/_data.py CHANGED
@@ -24,13 +24,12 @@ from lamindb._registry import get_default_str_field
24
24
  from lamindb.core._settings import settings
25
25
 
26
26
  from ._feature_manager import (
27
- FeatureManager,
28
27
  get_feature_set_links,
29
28
  get_host_id_field,
30
29
  get_label_links,
31
30
  print_features,
32
31
  )
33
- from ._label_manager import LabelManager, print_labels
32
+ from ._label_manager import print_labels
34
33
  from ._run_context import run_context
35
34
  from .exceptions import ValidationError
36
35
  from .schema import (
@@ -159,7 +158,11 @@ def describe(self: HasFeatures, print_types: bool = False):
159
158
  msg += f" {colors.italic('Provenance')}\n"
160
159
  msg += prov_msg
161
160
  msg += print_labels(self, print_types=print_types)
162
- msg += print_features(self, print_types=print_types) # type: ignore
161
+ msg += print_features( # type: ignore
162
+ self,
163
+ print_types=print_types,
164
+ print_params=hasattr(self, "type") and self.type == "model",
165
+ )
163
166
  logger.print(msg)
164
167
 
165
168
 
@@ -10,9 +10,8 @@ import numpy as np
10
10
  import pandas as pd
11
11
  from anndata import AnnData
12
12
  from django.contrib.postgres.aggregates import ArrayAgg
13
- from django.db import connections, models
14
- from django.db.models import Aggregate, CharField, F, Value
15
- from django.db.models.functions import Concat
13
+ from django.db import connections
14
+ from django.db.models import Aggregate
16
15
  from lamin_utils import colors, logger
17
16
  from lamindb_setup.core.upath import create_path
18
17
  from lnschema_core.models import (
@@ -24,8 +23,15 @@ from lnschema_core.models import (
24
23
  FeatureManagerCollection,
25
24
  FeatureValue,
26
25
  HasFeatures,
26
+ HasParams,
27
27
  LinkORM,
28
+ Param,
29
+ ParamManager,
30
+ ParamManagerArtifact,
31
+ ParamManagerRun,
32
+ ParamValue,
28
33
  Registry,
34
+ Run,
29
35
  ULabel,
30
36
  )
31
37
 
@@ -43,6 +49,9 @@ from lamindb.core.storage import LocalPathClasses
43
49
 
44
50
  from ._label_manager import get_labels_as_dict
45
51
  from ._settings import settings
52
+ from .schema import (
53
+ dict_related_model_to_related_name,
54
+ )
46
55
 
47
56
  if TYPE_CHECKING:
48
57
  from lnschema_core.types import FieldAttr
@@ -107,8 +116,10 @@ def get_feature_set_links(host: Artifact | Collection) -> QuerySet:
107
116
  return feature_set_links
108
117
 
109
118
 
110
- def get_link_attr(link: LinkORM, data: HasFeatures) -> str:
119
+ def get_link_attr(link: LinkORM | type[LinkORM], data: HasFeatures) -> str:
111
120
  link_model_name = link.__class__.__name__
121
+ if link_model_name == "ModelBase": # we passed the type of the link
122
+ link_model_name = link.__name__
112
123
  link_attr = link_model_name.replace(data.__class__.__name__, "")
113
124
  if link_attr == "ExperimentalFactor":
114
125
  link_attr = "experimental_factor"
@@ -131,42 +142,51 @@ def custom_aggregate(field, using: str):
131
142
 
132
143
 
133
144
  def print_features(
134
- self: HasFeatures, print_types: bool = False, to_dict: bool = False
145
+ self: HasFeatures | HasParams,
146
+ print_types: bool = False,
147
+ to_dict: bool = False,
148
+ print_params: bool = False,
135
149
  ) -> str | dict[str, Any]:
136
150
  from lamindb._from_values import _print_values
137
151
 
138
152
  msg = ""
139
153
  dictionary = {}
140
154
  # categorical feature values
141
- labels_msg = ""
142
- labels_by_feature = defaultdict(list)
143
- for _, (_, links) in get_labels_as_dict(self, links=True).items():
144
- for link in links:
145
- if link.feature_id is not None:
146
- link_attr = get_link_attr(link, self)
147
- labels_by_feature[link.feature_id].append(getattr(link, link_attr).name)
148
- for feature_id, labels_list in labels_by_feature.items():
149
- feature = Feature.objects.using(self._state.db).get(id=feature_id)
150
- print_values = _print_values(labels_list, n=10)
151
- type_str = f": {feature.dtype}" if print_types else ""
152
- if to_dict:
153
- dictionary[feature.name] = (
154
- labels_list if len(labels_list) > 1 else labels_list[0]
155
- )
156
- labels_msg += f" '{feature.name}'{type_str} = {print_values}\n"
157
- if labels_msg:
158
- msg += labels_msg
155
+ if not print_params:
156
+ labels_msg = ""
157
+ labels_by_feature = defaultdict(list)
158
+ for _, (_, links) in get_labels_as_dict(self, links=True).items():
159
+ for link in links:
160
+ if link.feature_id is not None:
161
+ link_attr = get_link_attr(link, self)
162
+ labels_by_feature[link.feature_id].append(
163
+ getattr(link, link_attr).name
164
+ )
165
+ for feature_id, labels_list in labels_by_feature.items():
166
+ feature = Feature.objects.using(self._state.db).get(id=feature_id)
167
+ print_values = _print_values(labels_list, n=10)
168
+ type_str = f": {feature.dtype}" if print_types else ""
169
+ if to_dict:
170
+ dictionary[feature.name] = (
171
+ labels_list if len(labels_list) > 1 else labels_list[0]
172
+ )
173
+ labels_msg += f" '{feature.name}'{type_str} = {print_values}\n"
174
+ if labels_msg:
175
+ msg += labels_msg
159
176
 
160
177
  # non-categorical feature values
161
178
  non_labels_msg = ""
162
- if self.id is not None and self.__class__ == Artifact:
163
- feature_values = self.feature_values.values(
164
- "feature__name", "feature__dtype"
165
- ).annotate(values=custom_aggregate("value", self._state.db))
179
+ if self.id is not None and self.__class__ == Artifact or self.__class__ == Run:
180
+ attr_name = "param" if print_params else "feature"
181
+ feature_values = (
182
+ getattr(self, f"{attr_name}_values")
183
+ .values(f"{attr_name}__name", f"{attr_name}__dtype")
184
+ .annotate(values=custom_aggregate("value", self._state.db))
185
+ )
166
186
  if len(feature_values) > 0:
167
187
  for fv in feature_values:
168
- feature_name = fv["feature__name"]
169
- feature_dtype = fv["feature__dtype"]
188
+ feature_name = fv[f"{attr_name}__name"]
189
+ feature_dtype = fv[f"{attr_name}__dtype"]
170
190
  values = fv["values"]
171
191
  # TODO: understand why the below is necessary
172
192
  if not isinstance(values, list):
@@ -174,24 +194,33 @@ def print_features(
174
194
  if to_dict:
175
195
  dictionary[feature_name] = values if len(values) > 1 else values[0]
176
196
  type_str = f": {feature_dtype}" if print_types else ""
177
- non_labels_msg += f" '{feature_name}'{type_str} = {_print_values(values, n=10, quotes=False)}\n"
197
+ printed_values = (
198
+ _print_values(values, n=10, quotes=False)
199
+ if not feature_dtype.startswith("list")
200
+ else values
201
+ )
202
+ non_labels_msg += f" '{feature_name}'{type_str} = {printed_values}\n"
178
203
  msg += non_labels_msg
179
204
 
180
205
  if msg != "":
181
- msg = f" {colors.italic('Features')}\n" + msg
206
+ header = "Features" if not print_params else "Params"
207
+ msg = f" {colors.italic(header)}\n" + msg
182
208
 
183
209
  # feature sets
184
- feature_set_msg = ""
185
- for slot, feature_set in get_feature_set_by_slot_(self).items():
186
- features = feature_set.members
187
- # features.first() is a lot slower than features[0] here
188
- name_field = get_default_str_field(features[0])
189
- feature_names = list(features.values_list(name_field, flat=True)[:20])
190
- type_str = f": {feature_set.registry}" if print_types else ""
191
- feature_set_msg += f" '{slot}'{type_str} = {_print_values(feature_names)}\n"
192
- if feature_set_msg:
193
- msg += f" {colors.italic('Feature sets')}\n"
194
- msg += feature_set_msg
210
+ if not print_params:
211
+ feature_set_msg = ""
212
+ for slot, feature_set in get_feature_set_by_slot_(self).items():
213
+ features = feature_set.members
214
+ # features.first() is a lot slower than features[0] here
215
+ name_field = get_default_str_field(features[0])
216
+ feature_names = list(features.values_list(name_field, flat=True)[:20])
217
+ type_str = f": {feature_set.registry}" if print_types else ""
218
+ feature_set_msg += (
219
+ f" '{slot}'{type_str} = {_print_values(feature_names)}\n"
220
+ )
221
+ if feature_set_msg:
222
+ msg += f" {colors.italic('Feature sets')}\n"
223
+ msg += feature_set_msg
195
224
  if to_dict:
196
225
  return dictionary
197
226
  else:
@@ -258,7 +287,9 @@ def parse_feature_sets_from_anndata(
258
287
  return feature_sets
259
288
 
260
289
 
261
- def infer_feature_type_convert_json(value: Any, mute: bool = False) -> tuple[str, Any]:
290
+ def infer_feature_type_convert_json(
291
+ value: Any, mute: bool = False, str_as_ulabel: bool = True
292
+ ) -> tuple[str, Any]:
262
293
  if isinstance(value, bool):
263
294
  return FEATURE_TYPES["bool"], value
264
295
  elif isinstance(value, int):
@@ -266,39 +297,53 @@ def infer_feature_type_convert_json(value: Any, mute: bool = False) -> tuple[str
266
297
  elif isinstance(value, float):
267
298
  return FEATURE_TYPES["float"], value
268
299
  elif isinstance(value, str):
269
- return FEATURE_TYPES["str"] + "[ULabel]", value
300
+ if str_as_ulabel:
301
+ return FEATURE_TYPES["str"] + "[ULabel]", value
302
+ else:
303
+ return "str", value
270
304
  elif isinstance(value, Iterable) and not isinstance(value, (str, bytes)):
271
305
  if isinstance(value, (pd.Series, np.ndarray)):
272
- return convert_numpy_dtype_to_lamin_feature_type(value.dtype), list(value)
306
+ return convert_numpy_dtype_to_lamin_feature_type(
307
+ value.dtype, str_as_cat=str_as_ulabel
308
+ ), list(value)
309
+ if isinstance(value, dict):
310
+ return "dict", value
273
311
  if len(value) > 0: # type: ignore
274
312
  first_element_type = type(next(iter(value)))
275
313
  if all(isinstance(elem, first_element_type) for elem in value):
276
314
  if first_element_type == bool:
277
- return FEATURE_TYPES["bool"], value
315
+ return f"list[{FEATURE_TYPES['bool']}]", value
278
316
  elif first_element_type == int:
279
- return FEATURE_TYPES["int"], value
317
+ return f"list[{FEATURE_TYPES['int']}]", value
280
318
  elif first_element_type == float:
281
- return FEATURE_TYPES["float"], value
319
+ return f"list[{FEATURE_TYPES['float']}]", value
282
320
  elif first_element_type == str:
283
- return FEATURE_TYPES["str"] + "[ULabel]", value
321
+ if str_as_ulabel:
322
+ return FEATURE_TYPES["str"] + "[ULabel]", value
323
+ else:
324
+ return "list[str]", value
325
+ elif isinstance(value, Registry):
326
+ return (f"cat[{value.__class__.__get_name_with_schema__()}]", value)
284
327
  if not mute:
285
328
  logger.warning(f"cannot infer feature type of: {value}, returning '?")
286
329
  return ("?", value)
287
330
 
288
331
 
289
- def __init__(self, host: Artifact | Collection):
332
+ def __init__(self, host: Artifact | Collection | Run):
290
333
  self._host = host
291
334
  self._feature_set_by_slot_ = None
292
335
  self._accessor_by_registry_ = None
293
336
 
294
337
 
295
338
  def __repr__(self) -> str:
296
- return print_features(self._host) # type: ignore
339
+ return print_features(self._host, print_params=(self.__class__ == ParamManager)) # type: ignore
297
340
 
298
341
 
299
342
  def get_values(self) -> dict[str, Any]:
300
343
  """Get feature values as a dictionary."""
301
- return print_features(self._host, to_dict=True) # type: ignore
344
+ return print_features(
345
+ self._host, to_dict=True, print_params=(self.__class__ == ParamManager)
346
+ ) # type: ignore
302
347
 
303
348
 
304
349
  def __getitem__(self, slot) -> QuerySet:
@@ -316,19 +361,25 @@ def __getitem__(self, slot) -> QuerySet:
316
361
  @classmethod # type: ignore
317
362
  def filter(cls, **expression) -> QuerySet:
318
363
  """Filter features."""
364
+ if cls in {FeatureManagerArtifact, FeatureManagerCollection}:
365
+ model = Feature
366
+ value_model = FeatureValue
367
+ else:
368
+ model = Param
369
+ value_model = ParamValue
319
370
  keys_normalized = [key.split("__")[0] for key in expression]
320
- validated = Feature.validate(keys_normalized, field="name", mute=True)
371
+ validated = model.validate(keys_normalized, field="name", mute=True)
321
372
  if sum(validated) != len(keys_normalized):
322
373
  raise ValidationError(
323
374
  f"Some keys in the filter expression are not registered as features: {np.array(keys_normalized)[~validated]}"
324
375
  )
325
376
  new_expression = {}
326
- features = Feature.filter(name__in=keys_normalized).all().distinct()
377
+ features = model.filter(name__in=keys_normalized).all().distinct()
327
378
  for key, value in expression.items():
328
379
  normalized_key = key.split("__")[0]
329
380
  feature = features.get(name=normalized_key)
330
381
  if not feature.dtype.startswith("cat"):
331
- feature_value = FeatureValue.filter(feature=feature, value=value).one()
382
+ feature_value = value_model.filter(feature=feature, value=value).one()
332
383
  new_expression["feature_values"] = feature_value
333
384
  else:
334
385
  if isinstance(value, str):
@@ -336,10 +387,12 @@ def filter(cls, **expression) -> QuerySet:
336
387
  new_expression["ulabels"] = label
337
388
  else:
338
389
  raise NotImplementedError
339
- if cls == FeatureManagerArtifact:
390
+ if cls == FeatureManagerArtifact or cls == ParamManagerArtifact:
340
391
  return Artifact.filter(**new_expression)
341
- else:
392
+ elif cls == FeatureManagerCollection:
342
393
  return Collection.filter(**new_expression)
394
+ elif cls == ParamManagerRun:
395
+ return Run.filter(**new_expression)
343
396
 
344
397
 
345
398
  @property # type: ignore
@@ -358,16 +411,17 @@ def _accessor_by_registry(self):
358
411
  return self._accessor_by_registry_
359
412
 
360
413
 
361
- def add_values(
414
+ def _add_values(
362
415
  self,
363
416
  values: dict[str, str | int | float | bool],
364
- feature_field: FieldAttr = Feature.name,
417
+ feature_param_field: FieldAttr,
418
+ str_as_ulabel: bool = True,
365
419
  ) -> None:
366
420
  """Annotate artifact with features & values.
367
421
 
368
422
  Args:
369
423
  values: A dictionary of keys (features) & values (labels, numbers, booleans).
370
- feature_field: The field of a reference registry to map keys of the
424
+ feature_param_field: The field of a reference registry to map keys of the
371
425
  dictionary.
372
426
  """
373
427
  # rename to distinguish from the values inside the dict
@@ -377,62 +431,83 @@ def add_values(
377
431
  keys = list(keys) # type: ignore
378
432
  # deal with other cases later
379
433
  assert all(isinstance(key, str) for key in keys)
380
- registry = feature_field.field.model
381
- validated = registry.validate(keys, field=feature_field, mute=True)
434
+ registry = feature_param_field.field.model
435
+ is_param = registry == Param
436
+ model = Param if is_param else Feature
437
+ value_model = ParamValue if is_param else FeatureValue
438
+ model_name = "Param" if is_param else "Feature"
439
+ if is_param:
440
+ if self._host.__class__ == Artifact:
441
+ if self._host.type != "model":
442
+ raise ValidationError("Can only set params for model-like artifacts.")
443
+ else:
444
+ if self._host.__class__ == Artifact:
445
+ if self._host.type != "dataset" and self._host.type is not None:
446
+ raise ValidationError(
447
+ "Can only set features for dataset-like artifacts."
448
+ )
449
+ validated = registry.validate(keys, field=feature_param_field, mute=True)
382
450
  keys_array = np.array(keys)
383
451
  validated_keys = keys_array[validated]
384
452
  if validated.sum() != len(keys):
385
453
  not_validated_keys = keys_array[~validated]
386
454
  hint = "\n".join(
387
455
  [
388
- f" ln.Feature(name='{key}', dtype='{infer_feature_type_convert_json(features_values[key])[0]}').save()"
456
+ f" ln.{model_name}(name='{key}', dtype='{infer_feature_type_convert_json(features_values[key], str_as_ulabel=str_as_ulabel)[0]}').save()"
389
457
  for key in not_validated_keys
390
458
  ]
391
459
  )
392
460
  msg = (
393
461
  f"These keys could not be validated: {not_validated_keys.tolist()}\n"
394
- f"If there are no typos, create features for them:\n\n{hint}"
462
+ f"Here is how to create a {model_name.lower()}:\n\n{hint}"
395
463
  )
396
464
  raise ValidationError(msg)
397
465
  registry.from_values(
398
466
  validated_keys,
399
- field=feature_field,
467
+ field=feature_param_field,
400
468
  )
401
469
  # figure out which of the values go where
402
- features_labels = []
470
+ features_labels = defaultdict(list)
403
471
  feature_values = []
404
472
  not_validated_values = []
405
473
  for key, value in features_values.items():
406
- feature = Feature.filter(name=key).one()
474
+ feature = model.filter(name=key).one()
407
475
  inferred_type, converted_value = infer_feature_type_convert_json(
408
- value, mute=True
476
+ value,
477
+ mute=True,
478
+ str_as_ulabel=str_as_ulabel,
409
479
  )
410
480
  if feature.dtype == "number":
411
481
  if inferred_type not in {"int", "float"}:
412
482
  raise TypeError(
413
483
  f"Value for feature '{key}' with type {feature.dtype} must be a number"
414
484
  )
415
- elif feature.dtype == "cat":
485
+ elif feature.dtype.startswith("cat"):
416
486
  if not (inferred_type.startswith("cat") or isinstance(value, Registry)):
417
487
  raise TypeError(
418
488
  f"Value for feature '{key}' with type '{feature.dtype}' must be a string or record."
419
489
  )
420
- elif feature.dtype == "bool":
421
- assert isinstance(value, bool)
490
+ elif not inferred_type == feature.dtype:
491
+ raise ValidationError(
492
+ f"Expected dtype for '{key}' is '{feature.dtype}', got '{inferred_type}'"
493
+ )
422
494
  if not feature.dtype.startswith("cat"):
423
495
  # can remove the query once we have the unique constraint
424
- feature_value = FeatureValue.filter(
425
- feature=feature, value=converted_value
426
- ).one_or_none()
496
+ filter_kwargs = {model_name.lower(): feature, "value": converted_value}
497
+ feature_value = value_model.filter(**filter_kwargs).one_or_none()
427
498
  if feature_value is None:
428
- feature_value = FeatureValue(feature=feature, value=converted_value)
499
+ feature_value = value_model(**filter_kwargs)
429
500
  feature_values.append(feature_value)
430
501
  else:
431
502
  if isinstance(value, Registry):
432
- assert not value._state.adding
503
+ if value._state.adding:
504
+ raise ValidationError(
505
+ "Please save your label record before annotation."
506
+ )
433
507
  label_record = value
434
- assert isinstance(label_record, ULabel)
435
- features_labels.append((feature, label_record))
508
+ features_labels[
509
+ label_record.__class__.__get_name_with_schema__()
510
+ ].append((feature, label_record))
436
511
  else:
437
512
  if isinstance(value, str):
438
513
  values = [value] # type: ignore
@@ -447,7 +522,7 @@ def add_values(
447
522
  if validated.sum() != len(values):
448
523
  not_validated_values += values_array[~validated].tolist()
449
524
  label_records = ULabel.from_values(validated_values, field="name")
450
- features_labels += [
525
+ features_labels["ULabel"] += [
451
526
  (feature, label_record) for label_record in label_records
452
527
  ]
453
528
  if not_validated_values:
@@ -457,40 +532,67 @@ def add_values(
457
532
  )
458
533
  msg = (
459
534
  f"These values could not be validated: {not_validated_values}\n"
460
- f"If there are no typos, create ulabels for them:\n\n{hint}"
535
+ f"Here is how to create ulabels for them:\n\n{hint}"
461
536
  )
462
537
  raise ValidationError(msg)
463
538
  # bulk add all links to ArtifactULabel
464
539
  if features_labels:
465
- LinkORM = self._host.ulabels.through
466
- links = [
467
- LinkORM(
468
- artifact_id=self._host.id, feature_id=feature.id, ulabel_id=label.id
469
- )
470
- for (feature, label) in features_labels
471
- ]
472
- # a link might already exist
473
- try:
474
- save(links, ignore_conflicts=False)
475
- except Exception:
476
- save(links, ignore_conflicts=True)
477
- # now deal with links that were previously saved without a feature_id
478
- saved_links = LinkORM.filter(
479
- artifact_id=self._host.id,
480
- ulabel_id__in=[l.id for _, l in features_labels],
481
- )
482
- for link in saved_links.all():
483
- # TODO: also check for inconsistent features
484
- if link.feature_id is None:
485
- link.feature_id = [
486
- f.id for f, l in features_labels if l.id == link.ulabel_id
487
- ][0]
488
- link.save()
540
+ if list(features_labels.keys()) != ["ULabel"]:
541
+ related_names = dict_related_model_to_related_name(self._host.__class__)
542
+ else:
543
+ related_names = {"ULabel": "ulabels"}
544
+ for class_name, registry_features_labels in features_labels.items():
545
+ related_name = related_names[class_name] # e.g., "ulabels"
546
+ LinkORM = getattr(self._host, related_name).through
547
+ field_name = f"{get_link_attr(LinkORM, self._host)}_id" # e.g., ulabel_id
548
+ links = [
549
+ LinkORM(
550
+ **{
551
+ "artifact_id": self._host.id,
552
+ "feature_id": feature.id,
553
+ field_name: label.id,
554
+ }
555
+ )
556
+ for (feature, label) in registry_features_labels
557
+ ]
558
+ # a link might already exist
559
+ try:
560
+ save(links, ignore_conflicts=False)
561
+ except Exception:
562
+ save(links, ignore_conflicts=True)
563
+ # now deal with links that were previously saved without a feature_id
564
+ saved_links = LinkORM.filter(
565
+ **{
566
+ "artifact_id": self._host.id,
567
+ f"{field_name}__in": [
568
+ l.id for _, l in registry_features_labels
569
+ ],
570
+ }
571
+ )
572
+ for link in saved_links.all():
573
+ # TODO: also check for inconsistent features
574
+ if link.feature_id is None:
575
+ link.feature_id = [
576
+ f.id
577
+ for f, l in registry_features_labels
578
+ if l.id == getattr(link, field_name)
579
+ ][0]
580
+ link.save()
489
581
  if feature_values:
490
582
  save(feature_values)
491
- LinkORM = self._host.feature_values.through
583
+ if is_param:
584
+ LinkORM = self._host.param_values.through
585
+ valuefield_id = "paramvalue_id"
586
+ else:
587
+ LinkORM = self._host.feature_values.through
588
+ valuefield_id = "featurevalue_id"
492
589
  links = [
493
- LinkORM(artifact_id=self._host.id, featurevalue_id=feature_value.id)
590
+ LinkORM(
591
+ **{
592
+ f"{self._host.__get_name_with_schema__().lower()}_id": self._host.id,
593
+ valuefield_id: feature_value.id,
594
+ }
595
+ )
494
596
  for feature_value in feature_values
495
597
  ]
496
598
  # a link might already exist, to avoid raising a unique constraint
@@ -498,6 +600,35 @@ def add_values(
498
600
  save(links, ignore_conflicts=True)
499
601
 
500
602
 
603
+ def add_values_features(
604
+ self,
605
+ values: dict[str, str | int | float | bool],
606
+ feature_field: FieldAttr = Feature.name,
607
+ str_as_ulabel: bool = True,
608
+ ) -> None:
609
+ """Annotate artifact with features & values.
610
+
611
+ Args:
612
+ values: A dictionary of keys (features) & values (labels, numbers, booleans).
613
+ feature_field: The field of a reference registry to map keys of the
614
+ dictionary.
615
+ str_as_ulabel: Whether to interpret string values as ulabels.
616
+ """
617
+ _add_values(self, values, feature_field, str_as_ulabel=str_as_ulabel)
618
+
619
+
620
+ def add_values_params(
621
+ self,
622
+ values: dict[str, str | int | float | bool],
623
+ ) -> None:
624
+ """Annotate artifact with features & values.
625
+
626
+ Args:
627
+ values: A dictionary of keys (features) & values (labels, numbers, booleans).
628
+ """
629
+ _add_values(self, values, Param.name, str_as_ulabel=False)
630
+
631
+
501
632
  def add_feature_set(self, feature_set: FeatureSet, slot: str) -> None:
502
633
  """Annotate artifact with a feature set.
503
634
 
@@ -686,15 +817,19 @@ def _add_from(self, data: HasFeatures, parents: bool = True):
686
817
 
687
818
 
688
819
  FeatureManager.__init__ = __init__
820
+ ParamManager.__init__ = __init__
689
821
  FeatureManager.__repr__ = __repr__
822
+ ParamManager.__repr__ = __repr__
690
823
  FeatureManager.__getitem__ = __getitem__
691
824
  FeatureManager.get_values = get_values
692
825
  FeatureManager._feature_set_by_slot = _feature_set_by_slot
693
826
  FeatureManager._accessor_by_registry = _accessor_by_registry
694
- FeatureManager.add_values = add_values
827
+ FeatureManager.add_values = add_values_features
695
828
  FeatureManager.add_feature_set = add_feature_set
696
829
  FeatureManager._add_set_from_df = _add_set_from_df
697
830
  FeatureManager._add_set_from_anndata = _add_set_from_anndata
698
831
  FeatureManager._add_set_from_mudata = _add_set_from_mudata
699
832
  FeatureManager._add_from = _add_from
700
833
  FeatureManager.filter = filter
834
+ ParamManager.add_values = add_values_params
835
+ ParamManager.get_values = get_values
@@ -211,26 +211,6 @@ def pretty_pypackages(dependencies: dict) -> str:
211
211
  return " ".join(deps_list)
212
212
 
213
213
 
214
- def parse_and_link_params(run: Run, params: dict) -> None:
215
- param_values = []
216
- for key, value in params.items():
217
- param = Param.filter(name=key).one_or_none()
218
- if param is None:
219
- dtype = type(value).__name__
220
- logger.warning(
221
- f"param '{key}' does not yet exist, creating it with dtype '{dtype}'"
222
- )
223
- param = Param(name=key, dtype=dtype).save()
224
- param_value, _ = ParamValue.objects.get_or_create(param=param, value=value)
225
- param_values.append(param_value)
226
- if param_values:
227
- links = [
228
- RunParamValue(run_id=run.id, paramvalue_id=param_value.id)
229
- for param_value in param_values
230
- ]
231
- RunParamValue.objects.bulk_create(links)
232
-
233
-
234
214
  class run_context:
235
215
  """Global run context."""
236
216
 
@@ -370,7 +350,7 @@ class run_context:
370
350
  # need to save in all cases
371
351
  run.save()
372
352
  if params is not None:
373
- parse_and_link_params(run, params)
353
+ run.params.add_values(params)
374
354
  cls.run = run
375
355
 
376
356
  from ._track_environment import track_environment
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: lamindb
3
- Version: 0.73.1
3
+ Version: 0.74.0
4
4
  Summary: A data framework for biology.
5
5
  Author-email: Lamin Labs <open-source@lamin.ai>
6
6
  Requires-Python: >=3.8
@@ -9,8 +9,8 @@ Classifier: Programming Language :: Python :: 3.8
9
9
  Classifier: Programming Language :: Python :: 3.9
10
10
  Classifier: Programming Language :: Python :: 3.10
11
11
  Classifier: Programming Language :: Python :: 3.11
12
- Requires-Dist: lnschema_core==0.68.1
13
- Requires-Dist: lamindb_setup==0.73.1
12
+ Requires-Dist: lnschema_core==0.70.0
13
+ Requires-Dist: lamindb_setup==0.73.2
14
14
  Requires-Dist: lamin_utils==0.13.2
15
15
  Requires-Dist: lamin_cli==0.14.0
16
16
  Requires-Dist: rapidfuzz
@@ -25,7 +25,7 @@ Requires-Dist: graphviz
25
25
  Requires-Dist: psycopg2-binary
26
26
  Requires-Dist: psutil
27
27
  Requires-Dist: lamindb_setup[aws] ; extra == "aws"
28
- Requires-Dist: bionty==0.43.1 ; extra == "bionty"
28
+ Requires-Dist: bionty==0.44.0 ; extra == "bionty"
29
29
  Requires-Dist: pandas<2 ; extra == "dev"
30
30
  Requires-Dist: pre-commit ; extra == "dev"
31
31
  Requires-Dist: nox ; extra == "dev"
@@ -1,31 +1,31 @@
1
- lamindb/__init__.py,sha256=gdITn70-3HoUMrn3iMBej3cwEYPot50lGkLAkm4WgY4,2207
1
+ lamindb/__init__.py,sha256=GHl_dP7bu9sy6o-6q5GDDm41RPzoXQ9_CTwpOU4HOLU,2297
2
2
  lamindb/_annotate.py,sha256=CScyKVB3k_x5p0pihxO1UrTFBCvPcSSjBXvGY3YgTLs,44381
3
- lamindb/_artifact.py,sha256=uMUlXCGU1JYSnOYMxCwEe_1MslbcnZACP9SYL-a4zRk,40816
3
+ lamindb/_artifact.py,sha256=OpHmffSsmUMjF3P_1qjtf8m0QDuLWsAcp5pJSC7scdA,40454
4
4
  lamindb/_can_validate.py,sha256=s1q0lxplqnhytrVgArBTm05XKMMmpreK0ZlVCsd2jjk,14849
5
5
  lamindb/_collection.py,sha256=AGiztgM_OIet617aF86muL6lIv6GEDcmXorize6V62U,14657
6
- lamindb/_feature.py,sha256=Aawz4jCgjhuneZnmoty7JkZI3zjA_50vr_0dnFjSN3U,7348
6
+ lamindb/_feature.py,sha256=Z_Awtsj183SA1egZgNmJI_dH04rqXAj72od3H6n0uBk,7398
7
7
  lamindb/_feature_set.py,sha256=ZAFLyIiWC6yPOFTF7K03olE2Rl3KthsV-1ttqJySzqQ,8106
8
8
  lamindb/_filter.py,sha256=jEA1n1Hi6lEPaD0JXiTSh9K4joGzWU6Yxy0LCLTiOdY,1422
9
- lamindb/_finish.py,sha256=CP52X5Wr-96DdXVGqz_Ls4uum3OErk1Twt9e74Q4grk,9945
10
- lamindb/_from_values.py,sha256=qQTeIjA8RrW5Dpv6ZdzTeLU3WS1NEsftP0igesLJUqw,13649
9
+ lamindb/_finish.py,sha256=CqilMKpmkocX5jVnLDpqbQ1SwkEuV-RFdd1CtnopUBk,10202
10
+ lamindb/_from_values.py,sha256=p0DFK7oovz5DkzutUMQNybjCe-uFIxqtP91TuZ_Myu0,13901
11
11
  lamindb/_is_versioned.py,sha256=0PgRCmxEmYDcAjllLSOYZm132B1lW6QgmBBERhRyFt0,1341
12
12
  lamindb/_parents.py,sha256=kb5AHkntpTP5g2lk1aPL0FmIilYZYVZvj6stddFOI40,15075
13
13
  lamindb/_query_manager.py,sha256=qxwrBM8UmNQnUiNOfe84YN6NpfJBg2wQ2JqUfH6HHhc,4225
14
14
  lamindb/_query_set.py,sha256=JgdJY2pACCqjC4AHjFiMAdq_qjeuhEo2NUX68f6ODwk,11597
15
- lamindb/_registry.py,sha256=VRyWgDEYtuaV_krMzLDfg2uBwSCMCBPwelISGLOHuQw,18755
16
- lamindb/_run.py,sha256=b7A52M1On3QzFgIYyfQoz5Kk7V3wcu9p_Prq5bzd8v8,1838
15
+ lamindb/_registry.py,sha256=hoBkVl8D4yLQKMoOhVIDTNkO_401yCoVHI4MlNHDZgI,18756
16
+ lamindb/_run.py,sha256=xj3ER4F_yWvuNw1mr0XU-QuIPi5hBO7Ue0ygBgJQ6mc,1887
17
17
  lamindb/_save.py,sha256=It4XO448D8NG2cReo9Xy0lQBQdkMm_rCx_TGD1qZWWc,11864
18
- lamindb/_storage.py,sha256=VW8xq3VRv58-ciholvOdlcgvp_OIlLxx5GxLt-e2Irs,614
18
+ lamindb/_storage.py,sha256=8wRefV-Klu6VBVtwcwppvTojeXnxRThaBdFniA0AEIw,400
19
19
  lamindb/_transform.py,sha256=E9C7psuOnsNrUQpWRuGgEUM8_pc7YhDn7n4ieHzB4X0,3169
20
20
  lamindb/_ulabel.py,sha256=XDSdZBXX_ki5s1vOths3MjF2x5DPggBR_PV_KF4SGyg,1611
21
21
  lamindb/_utils.py,sha256=LGdiW4k3GClLz65vKAVRkL6Tw-Gkx9DWAdez1jyA5bE,428
22
- lamindb/_view.py,sha256=GV1FrqIMmdooEkA-5zvcTWgV1nqx1sehi6WdWEaFpxM,2171
23
- lamindb/core/__init__.py,sha256=RkE5BrdOWNHQIz60z1npYoRim-Ot24-L7JTmLXd2WTU,1363
24
- lamindb/core/_data.py,sha256=xkjEvAhWeEEYRIZIi9Qbb0aEWiUPttq-qnCLx_2xy48,16172
25
- lamindb/core/_feature_manager.py,sha256=h7puH83LjaiJrBlmw3wSkUVO5c_LVfubCJKklPryP84,26205
22
+ lamindb/_view.py,sha256=NdWWF75rmkCHefqBViUnsHLu3OViJ1z_bEvfcedydsw,2330
23
+ lamindb/core/__init__.py,sha256=948oRi2FsLY1H4quaDHGSApwkM218I8scciUovJwsfs,1421
24
+ lamindb/core/_data.py,sha256=olA2-_CueezxmFB2757lGv5g3P8ystHkhGwJECloWcY,16230
25
+ lamindb/core/_feature_manager.py,sha256=jFp6Sd9SEX_bQEWmAf957U3T7UrGdsHg5qfQ5_odHp4,31367
26
26
  lamindb/core/_label_manager.py,sha256=d9r3tiNDFbpZQmxE1jmgdgnMYPOfTPpAKGKtRRLmVj8,9640
27
27
  lamindb/core/_mapped_collection.py,sha256=_OwFZh5SePDUD70XIK5kngv3we_Z5-YdGHNfpUSatSQ,19469
28
- lamindb/core/_run_context.py,sha256=aGdZv4IeYo7QpXl9H9U7RNwpgzqGeCDKPuoSdtP-CAE,18988
28
+ lamindb/core/_run_context.py,sha256=A-hJmj56p8ggw8ADYjJiN_hLftG_aAtE8Y5leaUW6Tk,18220
29
29
  lamindb/core/_settings.py,sha256=rW1KfEXfT56XErwcnSuQxaCytpOy1kJ-u7tVmkmNmxY,6131
30
30
  lamindb/core/_sync_git.py,sha256=5Fb82eG1WYyBlfMRCMlcUZNTVk70UU_SPnfvYEBL-A8,4124
31
31
  lamindb/core/_track_environment.py,sha256=xLZ6kgzxWS6MWZ5LQ_wkbJX99vmYOT8iQ-Fz4OHCgWw,754
@@ -49,7 +49,7 @@ lamindb/integrations/__init__.py,sha256=aH2PmO2m4-vwIifMYTB0Fyyr_gZWtVnV71jT0tVW
49
49
  lamindb/integrations/_vitessce.py,sha256=Qd39OuNsL0GXU7nMvEWLRRESFQ0mwGf_ePMJE_FDYm8,2639
50
50
  lamindb/setup/__init__.py,sha256=OwZpZzPDv5lPPGXZP7-zK6UdO4FHvvuBh439yZvIp3A,410
51
51
  lamindb/setup/core/__init__.py,sha256=SevlVrc2AZWL3uALbE5sopxBnIZPWZ1IB0NBDudiAL8,167
52
- lamindb-0.73.1.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
53
- lamindb-0.73.1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
54
- lamindb-0.73.1.dist-info/METADATA,sha256=wJj1zRW8leb93Gj0ouXl-ujvE3UzjfNZ4i_oXQEZEvE,2735
55
- lamindb-0.73.1.dist-info/RECORD,,
52
+ lamindb-0.74.0.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
53
+ lamindb-0.74.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
54
+ lamindb-0.74.0.dist-info/METADATA,sha256=2r0FMsKG8RAIaPZLLMO8FIUj13IeW87jDSIVHzOizIs,2735
55
+ lamindb-0.74.0.dist-info/RECORD,,