lamindb 0.64.1__py3-none-any.whl → 0.64.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
lamindb/__init__.py CHANGED
@@ -54,7 +54,7 @@ Modules & settings:
54
54
 
55
55
  """
56
56
 
57
- __version__ = "0.64.1" # denote a release candidate for 0.1.0 with 0.1rc1
57
+ __version__ = "0.64.2" # denote a release candidate for 0.1.0 with 0.1rc1
58
58
 
59
59
  import os as _os
60
60
 
@@ -72,7 +72,7 @@ _py_version_warning("3.8", "3.11")
72
72
  _TESTING = _lamindb_setup._TESTING
73
73
  _INSTANCE_SETUP = _check_instance_setup(from_lamindb=True)
74
74
  # allow the user to call setup
75
- from . import setup # noqa
75
+ from . import setup
76
76
 
77
77
 
78
78
  class InstanceNotSetupError(Exception):
@@ -90,7 +90,7 @@ def __getattr__(name):
90
90
  if _INSTANCE_SETUP:
91
91
  del InstanceNotSetupError
92
92
  del __getattr__ # delete so that imports work out
93
- from lnschema_core import ( # noqa
93
+ from lnschema_core import (
94
94
  Artifact,
95
95
  Dataset,
96
96
  Feature,
@@ -104,25 +104,25 @@ if _INSTANCE_SETUP:
104
104
 
105
105
  File = Artifact # backward compat
106
106
  from . import _artifact # noqa
107
- from . import _dataset # noqa
108
- from . import _feature # noqa
109
- from . import _feature_set # noqa
110
- from . import _parents # noqa
111
- from . import _registry # noqa
112
- from . import _run # noqa
113
- from . import _storage # noqa
114
- from . import _transform # noqa
115
- from . import _ulabel # noqa
116
- from . import _validate # noqa
117
- from . import dev # noqa
118
- from ._delete import delete # noqa
119
- from ._save import save # noqa
120
- from ._view import view # noqa
121
- from .dev._run_context import run_context # noqa
107
+ from . import _dataset
108
+ from . import _feature
109
+ from . import _feature_set
110
+ from . import _parents
111
+ from . import _registry
112
+ from . import _run
113
+ from . import _storage
114
+ from . import _transform
115
+ from . import _ulabel
116
+ from . import _validate
117
+ from . import dev
118
+ from ._delete import delete
119
+ from ._save import save
120
+ from ._view import view
121
+ from .dev._run_context import run_context
122
122
  from .dev._settings import settings
123
123
 
124
124
  # schema modules
125
125
  _reload_schema_modules(_lamindb_setup.settings.instance)
126
126
 
127
- track = run_context._track # noqa
127
+ track = run_context._track
128
128
  settings.__doc__ = """Global :class:`~lamindb.dev.Settings`."""
lamindb/_artifact.py CHANGED
@@ -1,5 +1,5 @@
1
1
  from pathlib import Path, PurePath, PurePosixPath
2
- from typing import Any, Dict, List, Optional, Tuple, Union
2
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
3
3
 
4
4
  import anndata as ad
5
5
  import fsspec
@@ -36,7 +36,6 @@ from lamindb.dev.storage import (
36
36
  size_adata,
37
37
  write_to_file,
38
38
  )
39
- from lamindb.dev.storage._backed_access import AnnDataAccessor, BackedAccessor
40
39
  from lamindb.dev.storage.file import (
41
40
  auto_storage_key_from_artifact,
42
41
  auto_storage_key_from_artifact_uid,
@@ -54,6 +53,9 @@ from .dev._data import (
54
53
  )
55
54
  from .dev.storage.file import AUTO_KEY_PREFIX
56
55
 
56
+ if TYPE_CHECKING:
57
+ from lamindb.dev.storage._backed_access import AnnDataAccessor, BackedAccessor
58
+
57
59
 
58
60
  def process_pathlike(
59
61
  filepath: UPath, skip_existence_check: bool = False
@@ -405,34 +407,34 @@ def get_artifact_kwargs_from_data(
405
407
  if check_path_in_storage:
406
408
  key_is_virtual = False
407
409
 
408
- kwargs = dict(
409
- suffix=suffix,
410
- hash=hash,
411
- hash_type=hash_type,
412
- key=key,
413
- size=size,
414
- storage_id=storage.id,
410
+ kwargs = {
411
+ "suffix": suffix,
412
+ "hash": hash,
413
+ "hash_type": hash_type,
414
+ "key": key,
415
+ "size": size,
416
+ "storage_id": storage.id,
415
417
  # passing both the id and the object
416
418
  # to make them both available immediately
417
419
  # after object creation
418
- n_objects=n_objects,
419
- n_observations=None, # to implement
420
- run_id=run.id if run is not None else None,
421
- run=run,
422
- key_is_virtual=key_is_virtual,
423
- )
420
+ "n_objects": n_objects,
421
+ "n_observations": None, # to implement
422
+ "run_id": run.id if run is not None else None,
423
+ "run": run,
424
+ "key_is_virtual": key_is_virtual,
425
+ }
424
426
  if not isinstance(path, LocalPathClasses):
425
427
  local_filepath = None
426
428
  cloud_filepath = path
427
429
  else:
428
430
  local_filepath = path
429
431
  cloud_filepath = None
430
- privates = dict(
431
- local_filepath=local_filepath,
432
- cloud_filepath=cloud_filepath,
433
- memory_rep=memory_rep,
434
- check_path_in_storage=check_path_in_storage,
435
- )
432
+ privates = {
433
+ "local_filepath": local_filepath,
434
+ "cloud_filepath": cloud_filepath,
435
+ "memory_rep": memory_rep,
436
+ "check_path_in_storage": check_path_in_storage,
437
+ }
436
438
  return kwargs, privates
437
439
 
438
440
 
@@ -623,7 +625,7 @@ def from_df(
623
625
  is_new_version_of: Optional["Artifact"] = None,
624
626
  **kwargs,
625
627
  ) -> "Artifact":
626
- """{}"""
628
+ """{}."""
627
629
  artifact = Artifact(
628
630
  data=df,
629
631
  key=key,
@@ -698,7 +700,7 @@ def from_anndata(
698
700
  is_new_version_of: Optional["Artifact"] = None,
699
701
  **kwargs,
700
702
  ) -> "Artifact":
701
- """{}"""
703
+ """{}."""
702
704
  artifact = Artifact(
703
705
  data=adata,
704
706
  key=key,
@@ -721,7 +723,7 @@ def from_dir(
721
723
  *,
722
724
  run: Optional[Run] = None,
723
725
  ) -> List["Artifact"]:
724
- """{}"""
726
+ """{}."""
725
727
  logger.warning(
726
728
  "this creates one artifact per file in the directory - you might simply call"
727
729
  " ln.Artifact(dir) to get one artifact for the entire directory"
@@ -788,7 +790,7 @@ def from_dir(
788
790
  non_unique_artifacts = {
789
791
  hash: artifact
790
792
  for hash, artifact in artifacts_dict.items()
791
- if artifact.hash in seen_hashes or seen_hashes.add(artifact.hash) # type: ignore # noqa
793
+ if artifact.hash in seen_hashes or seen_hashes.add(artifact.hash) # type: ignore
792
794
  }
793
795
  display_non_unique = "\n ".join(
794
796
  f"{artifact}" for artifact in non_unique_artifacts
@@ -995,7 +997,7 @@ def _save_skip_storage(file, *args, **kwargs) -> None:
995
997
  @property # type: ignore
996
998
  @doc_args(Artifact.path.__doc__)
997
999
  def path(self) -> Union[Path, UPath]:
998
- """{}"""
1000
+ """{}."""
999
1001
  return filepath_from_artifact(self)
1000
1002
 
1001
1003
 
@@ -1008,7 +1010,7 @@ def view_tree(
1008
1010
  length_limit: int = 1000,
1009
1011
  max_files_per_dir_per_type: int = 7,
1010
1012
  ) -> None:
1011
- """{}"""
1013
+ """{}."""
1012
1014
  from lamindb.dev._view_tree import view_tree as _view_tree
1013
1015
 
1014
1016
  _view_tree(
@@ -1056,6 +1058,6 @@ for name in METHOD_NAMES:
1056
1058
  # privates currently dealt with separately
1057
1059
  Artifact._delete_skip_storage = _delete_skip_storage
1058
1060
  Artifact._save_skip_storage = _save_skip_storage
1059
- setattr(Artifact, "path", path)
1061
+ Artifact.path = path
1060
1062
  # this seems a Django-generated function
1061
1063
  delattr(Artifact, "get_visibility_display")
lamindb/_dataset.py CHANGED
@@ -1,5 +1,5 @@
1
1
  from collections import defaultdict
2
- from typing import Dict, Iterable, List, Literal, Optional, Tuple, Union
2
+ from typing import TYPE_CHECKING, Dict, Iterable, List, Literal, Optional, Tuple, Union
3
3
 
4
4
  import anndata as ad
5
5
  import pandas as pd
@@ -11,7 +11,6 @@ from lnschema_core.types import AnnDataLike, DataLike, FieldAttr, VisibilityChoi
11
11
  from lamindb._utils import attach_func_to_class_method
12
12
  from lamindb.dev._data import _track_run_input
13
13
  from lamindb.dev._mapped_dataset import MappedDataset
14
- from lamindb.dev.storage._backed_access import AnnDataAccessor, BackedAccessor
15
14
  from lamindb.dev.versioning import get_ids_from_old_version, init_uid
16
15
 
17
16
  from . import _TESTING, Artifact, Run
@@ -25,6 +24,9 @@ from .dev._data import (
25
24
  )
26
25
  from .dev.hashing import hash_set
27
26
 
27
+ if TYPE_CHECKING:
28
+ from lamindb.dev.storage._backed_access import AnnDataAccessor, BackedAccessor
29
+
28
30
 
29
31
  def __init__(
30
32
  dataset: Dataset,
@@ -69,7 +71,7 @@ def __init__(
69
71
  )
70
72
  if not len(kwargs) == 0:
71
73
  raise ValueError(
72
- f"Only data, name, run, description, reference, reference_type, visibility can be passed, you passed: {kwargs}" # noqa
74
+ f"Only data, name, run, description, reference, reference_type, visibility can be passed, you passed: {kwargs}"
73
75
  )
74
76
 
75
77
  if is_new_version_of is None:
@@ -198,7 +200,7 @@ def from_df(
198
200
  is_new_version_of: Optional["Artifact"] = None,
199
201
  **kwargs,
200
202
  ) -> "Dataset":
201
- """{}"""
203
+ """{}."""
202
204
  feature_set = FeatureSet.from_df(df, field=field, **kwargs)
203
205
  if feature_set is not None:
204
206
  feature_sets = {"columns": feature_set}
@@ -233,7 +235,7 @@ def from_anndata(
233
235
  is_new_version_of: Optional["Artifact"] = None,
234
236
  **kwargs,
235
237
  ) -> "Dataset":
236
- """{}"""
238
+ """{}."""
237
239
  if isinstance(adata, Artifact):
238
240
  assert not adata._state.adding
239
241
  assert adata.accessor == "AnnData"
@@ -259,7 +261,7 @@ def from_anndata(
259
261
  def from_artifacts(artifacts: Iterable[Artifact]) -> Tuple[str, Dict[str, str]]:
260
262
  # assert all artifacts are already saved
261
263
  logger.debug("check not saved")
262
- saved = not any([artifact._state.adding for artifact in artifacts])
264
+ saved = not any(artifact._state.adding for artifact in artifacts)
263
265
  if not saved:
264
266
  raise ValueError("Not all artifacts are yet saved, please save them")
265
267
  # query all feature sets of artifacts
lamindb/_delete.py CHANGED
@@ -1,4 +1,4 @@
1
- from typing import List, Union, overload # noqa
1
+ from typing import List, Union, overload
2
2
 
3
3
  from lamin_utils import colors, logger
4
4
  from lnschema_core import Registry
lamindb/_feature.py CHANGED
@@ -36,7 +36,7 @@ def __init__(self, *args, **kwargs):
36
36
  # now we proceed with the user-facing constructor
37
37
  if len(args) != 0:
38
38
  raise ValueError("Only non-keyword args allowed")
39
- type: Optional[Union[type, str]] = kwargs.pop("type") if "type" in kwargs else None
39
+ type: Optional[Union[type, str]] = kwargs.pop("type") if "type" in kwargs else None # noqa: F823
40
40
  registries: Optional[List[Registry]] = (
41
41
  kwargs.pop("registries") if "registries" in kwargs else None
42
42
  )
@@ -87,7 +87,7 @@ def categoricals_from_df(df: "pd.DataFrame") -> Dict:
87
87
  @classmethod # type:ignore
88
88
  @doc_args(Feature.from_df.__doc__)
89
89
  def from_df(cls, df: "pd.DataFrame") -> List["Feature"]:
90
- """{}"""
90
+ """{}."""
91
91
  categoricals = categoricals_from_df(df)
92
92
 
93
93
  types = {}
@@ -146,7 +146,7 @@ def from_df(cls, df: "pd.DataFrame") -> List["Feature"]:
146
146
 
147
147
  @doc_args(Feature.save.__doc__)
148
148
  def save(self, *args, **kwargs) -> None:
149
- """{}"""
149
+ """{}."""
150
150
  super(Feature, self).save(*args, **kwargs)
151
151
 
152
152
 
lamindb/_feature_set.py CHANGED
@@ -1,7 +1,6 @@
1
- from typing import Dict, Iterable, List, Optional, Type, Union
1
+ from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Type, Union
2
2
 
3
3
  import numpy as np
4
- import pandas as pd
5
4
  from lamin_utils import logger
6
5
  from lamindb_setup.dev._docs import doc_args
7
6
  from lnschema_core import Feature, FeatureSet, Registry, ids
@@ -12,9 +11,13 @@ from lamindb.dev.hashing import hash_set
12
11
 
13
12
  from . import _TESTING
14
13
  from ._feature import convert_numpy_dtype_to_lamin_feature_type
15
- from ._query_set import QuerySet
16
14
  from ._registry import init_self_from_db
17
15
 
16
+ if TYPE_CHECKING:
17
+ import pandas as pd
18
+
19
+ from ._query_set import QuerySet
20
+
18
21
  NUMBER_TYPE = "number"
19
22
 
20
23
 
@@ -75,14 +78,16 @@ def validate_features(features: List[Registry]) -> Registry:
75
78
  if len(features) == 0:
76
79
  raise ValueError("Provide list of features with at least one element")
77
80
  except TypeError:
78
- raise ValueError("Please pass a ListLike of features, not a single feature")
81
+ raise ValueError(
82
+ "Please pass a ListLike of features, not a single feature"
83
+ ) from None
79
84
  if not hasattr(features, "__getitem__"):
80
85
  raise TypeError("features has to be list-like")
81
86
  if not isinstance(features[0], Registry):
82
87
  raise TypeError(
83
88
  "features has to store feature records! use .from_values() otherwise"
84
89
  )
85
- feature_types = set([feature.__class__ for feature in features])
90
+ feature_types = {feature.__class__ for feature in features}
86
91
  if len(feature_types) > 1:
87
92
  raise TypeError("feature_set can only contain a single type")
88
93
  for feature in features:
@@ -130,7 +135,7 @@ def __init__(self, *args, **kwargs):
130
135
 
131
136
  @doc_args(FeatureSet.save.__doc__)
132
137
  def save(self, *args, **kwargs) -> None:
133
- """{}"""
138
+ """{}."""
134
139
  super(FeatureSet, self).save(*args, **kwargs)
135
140
  if hasattr(self, "_features"):
136
141
  related_name, records = self._features
@@ -157,7 +162,7 @@ def from_values(
157
162
  name: Optional[str] = None,
158
163
  **kwargs,
159
164
  ) -> Optional["FeatureSet"]:
160
- """{}"""
165
+ """{}."""
161
166
  if not isinstance(field, FieldAttr):
162
167
  raise TypeError(
163
168
  "Argument `field` must be a Registry field, e.g., `Feature.name`"
@@ -191,7 +196,7 @@ def from_df(
191
196
  name: Optional[str] = None,
192
197
  **kwargs,
193
198
  ) -> Optional["FeatureSet"]:
194
- """{}"""
199
+ """{}."""
195
200
  registry = field.field.model
196
201
  validated = registry.validate(df.columns, field=field, **kwargs)
197
202
  if validated.sum() == 0:
@@ -219,7 +224,7 @@ def from_df(
219
224
  @property # type: ignore
220
225
  @doc_args(FeatureSet.members.__doc__)
221
226
  def members(self) -> "QuerySet":
222
- """{}"""
227
+ """{}."""
223
228
  if self._state.adding:
224
229
  # this should return a queryset and not a list...
225
230
  # need to fix this
@@ -255,5 +260,5 @@ if _TESTING:
255
260
  for name in METHOD_NAMES:
256
261
  attach_func_to_class_method(name, FeatureSet, globals())
257
262
 
258
- setattr(FeatureSet, "members", members)
259
- setattr(FeatureSet, "_get_related_name", _get_related_name)
263
+ FeatureSet.members = members
264
+ FeatureSet._get_related_name = _get_related_name
lamindb/_filter.py CHANGED
@@ -12,7 +12,7 @@ def filter(Registry: Type[Registry], **expressions) -> QuerySet:
12
12
  # visibility is set to 0 unless expressions contains id or uid equality
13
13
  if not ("id" in expressions or "uid" in expressions):
14
14
  visibility = "visibility"
15
- if not any([e.startswith(visibility) for e in expressions]):
15
+ if not any(e.startswith(visibility) for e in expressions):
16
16
  expressions[
17
17
  visibility
18
18
  ] = VisibilityChoice.default.value # default visibility
lamindb/_from_values.py CHANGED
@@ -53,7 +53,7 @@ def get_or_create_records(
53
53
  n_nonval = colors.yellow(f"{len(unmapped_values)} non-validated")
54
54
  logger.warning(
55
55
  f"{colors.red('did not create')} {name} record{s} for "
56
- f"{n_nonval} {colors.italic(f'{field.field.name}{s}')}: {print_values}" # noqa
56
+ f"{n_nonval} {colors.italic(f'{field.field.name}{s}')}: {print_values}"
57
57
  )
58
58
  if Registry.__module__.startswith("lnschema_bionty.") or Registry == ULabel:
59
59
  if isinstance(iterable, pd.Series):
@@ -74,8 +74,10 @@ def get_or_create_records(
74
74
  def get_existing_records(
75
75
  iterable_idx: pd.Index,
76
76
  field: StrField,
77
- kwargs: Dict = {},
77
+ kwargs: Dict = None,
78
78
  ):
79
+ if kwargs is None:
80
+ kwargs = {}
79
81
  model = field.field.model
80
82
  condition: Dict = {} if len(kwargs) == 0 else kwargs.copy()
81
83
  # existing records matching is agnostic to the bionty source
@@ -194,7 +196,7 @@ def create_records_from_bionty(
194
196
  print_values = colors.purple(_print_values(names))
195
197
  msg_syn = (
196
198
  "created"
197
- f" {colors.purple(f'{len(syn_mapper)} {model.__name__} record{s} from Bionty')}" # noqa
199
+ f" {colors.purple(f'{len(syn_mapper)} {model.__name__} record{s} from Bionty')}"
198
200
  f" matching {colors.italic('synonyms')}: {print_values}"
199
201
  )
200
202
 
@@ -221,11 +223,9 @@ def create_records_from_bionty(
221
223
  if len(msg) > 0:
222
224
  logger.success(msg)
223
225
  logger.success(
224
- (
225
- "created"
226
- f" {colors.purple(f'{len(validated)} {model.__name__} record{s} from Bionty')}" # noqa
227
- f" matching {colors.italic(f'{field.field.name}')}: {print_values}" # noqa
228
- )
226
+ "created"
227
+ f" {colors.purple(f'{len(validated)} {model.__name__} record{s} from Bionty')}"
228
+ f" matching {colors.italic(f'{field.field.name}')}: {print_values}"
229
229
  )
230
230
 
231
231
  # make sure that synonyms logging appears after the field logging
lamindb/_parents.py CHANGED
@@ -61,15 +61,15 @@ def view_parents(
61
61
  )
62
62
 
63
63
 
64
- def view_flow(data: Union[Artifact, Dataset], with_children: bool = True) -> None:
64
+ def view_lineage(data: Union[Artifact, Dataset], with_children: bool = True) -> None:
65
65
  """Graph of data flow.
66
66
 
67
67
  Notes:
68
68
  For more info, see use cases: :doc:`docs:data-flow`.
69
69
 
70
70
  Examples:
71
- >>> dataset.view_flow()
72
- >>> artifact.view_flow()
71
+ >>> dataset.view_lineage()
72
+ >>> artifact.view_lineage()
73
73
  """
74
74
  import graphviz
75
75
 
@@ -276,16 +276,16 @@ def _record_label(record: Registry, field: Optional[str] = None):
276
276
  elif isinstance(record, Run):
277
277
  name = f'{record.transform.name.replace("&", "&")}'
278
278
  return (
279
- rf'<{TRANSFORM_EMOJIS.get(str(record.transform.type), "💫")} {name}<BR/><FONT COLOR="GREY" POINT-SIZE="10"' # noqa
280
- rf' FACE="Monospace">uid={record.transform.uid}<BR/>type={record.transform.type},' # noqa
281
- rf" user={record.created_by.name}<BR/>run_at={format_field_value(record.run_at)}</FONT>>" # noqa
279
+ rf'<{TRANSFORM_EMOJIS.get(str(record.transform.type), "💫")} {name}<BR/><FONT COLOR="GREY" POINT-SIZE="10"'
280
+ rf' FACE="Monospace">uid={record.transform.uid}<BR/>type={record.transform.type},'
281
+ rf" user={record.created_by.name}<BR/>run_at={format_field_value(record.run_at)}</FONT>>"
282
282
  )
283
283
  elif isinstance(record, Transform):
284
284
  name = f'{record.name.replace("&", "&amp;")}'
285
285
  return (
286
- rf'<{TRANSFORM_EMOJIS.get(str(record.type), "💫")} {name}<BR/><FONT COLOR="GREY" POINT-SIZE="10"' # noqa
286
+ rf'<{TRANSFORM_EMOJIS.get(str(record.type), "💫")} {name}<BR/><FONT COLOR="GREY" POINT-SIZE="10"'
287
287
  rf' FACE="Monospace">uid={record.uid}<BR/>type={record.type},'
288
- rf" user={record.created_by.name}<BR/>updated_at={format_field_value(record.updated_at)}</FONT>>" # noqa
288
+ rf" user={record.created_by.name}<BR/>updated_at={format_field_value(record.updated_at)}</FONT>>"
289
289
  )
290
290
  else:
291
291
  name = record.__getattribute__(field)
lamindb/_query_manager.py CHANGED
@@ -59,9 +59,9 @@ class QueryManager(models.Manager):
59
59
  """
60
60
  self._track_run_input_manager()
61
61
  if field is None:
62
- return [item for item in self.all()]
62
+ return list(self.all())
63
63
  else:
64
- return [item for item in self.values_list(field, flat=True)]
64
+ return list(self.values_list(field, flat=True))
65
65
 
66
66
  def df(self, **kwargs):
67
67
  """Convert to DataFrame.
@@ -80,14 +80,14 @@ class QueryManager(models.Manager):
80
80
 
81
81
  @doc_args(Registry.search.__doc__)
82
82
  def search(self, string: str, **kwargs):
83
- """{}"""
83
+ """{}."""
84
84
  from ._registry import _search
85
85
 
86
86
  return _search(cls=self.all(), string=string, **kwargs)
87
87
 
88
88
  @doc_args(Registry.lookup.__doc__)
89
89
  def lookup(self, field: Optional[StrField] = None, **kwargs) -> NamedTuple:
90
- """{}"""
90
+ """{}."""
91
91
  from ._registry import _lookup
92
92
 
93
93
  return _lookup(cls=self.all(), field=field, **kwargs)
@@ -107,15 +107,13 @@ class QueryManager(models.Manager):
107
107
  return
108
108
 
109
109
 
110
- setattr(models.Manager, "list", QueryManager.list)
111
- setattr(models.Manager, "df", QueryManager.df)
112
- setattr(models.Manager, "search", QueryManager.search)
113
- setattr(models.Manager, "lookup", QueryManager.lookup)
114
- setattr(models.Manager, "__getitem__", QueryManager.__getitem__)
115
- setattr(
116
- models.Manager, "_track_run_input_manager", QueryManager._track_run_input_manager
117
- )
110
+ models.Manager.list = QueryManager.list
111
+ models.Manager.df = QueryManager.df
112
+ models.Manager.search = QueryManager.search
113
+ models.Manager.lookup = QueryManager.lookup
114
+ models.Manager.__getitem__ = QueryManager.__getitem__
115
+ models.Manager._track_run_input_manager = QueryManager._track_run_input_manager
118
116
  # the two lines below would be easy if we could actually inherit; like this,
119
117
  # they're suboptimal
120
- setattr(models.Manager, "all_base_class", models.Manager.all)
121
- setattr(models.Manager, "all", QueryManager.all)
118
+ models.Manager.all_base_class = models.Manager.all
119
+ models.Manager.all = QueryManager.all
lamindb/_query_set.py CHANGED
@@ -52,7 +52,7 @@ class QuerySet(models.QuerySet, CanValidate, IsTree):
52
52
  Examples:
53
53
 
54
54
  >>> ln.save(ln.ULabel.from_values(["ULabel1", "ULabel2", "ULabel3"], field="name")) # noqa
55
- >>> ln.ULabel.filter().df()
55
+ >>> ln.ULabel.df()
56
56
  >>> label = ln.ULabel.filter(name="ULabel1").one()
57
57
  >>> label = ln.ULabel.filter(name="benchmark").one()
58
58
  >>> label.parents.add(label)
@@ -144,9 +144,9 @@ class QuerySet(models.QuerySet, CanValidate, IsTree):
144
144
  ['ULabel1', 'ULabel2', 'ULabel3']
145
145
  """
146
146
  if field is None:
147
- return [item for item in self]
147
+ return list(self)
148
148
  else:
149
- return [item for item in self.values_list(field, flat=True)]
149
+ return list(self.values_list(field, flat=True))
150
150
 
151
151
  def first(self) -> Optional[Registry]:
152
152
  """If non-empty, the first result in the query set, otherwise None.
@@ -195,14 +195,14 @@ class QuerySet(models.QuerySet, CanValidate, IsTree):
195
195
 
196
196
  @doc_args(Registry.search.__doc__)
197
197
  def search(self, string: str, **kwargs):
198
- """{}"""
198
+ """{}."""
199
199
  from ._registry import _search
200
200
 
201
201
  return _search(cls=self, string=string, **kwargs)
202
202
 
203
203
  @doc_args(Registry.lookup.__doc__)
204
204
  def lookup(self, field: Optional[StrField] = None, **kwargs) -> NamedTuple:
205
- """{}"""
205
+ """{}."""
206
206
  from ._registry import _lookup
207
207
 
208
208
  return _lookup(cls=self, field=field, **kwargs)
@@ -211,7 +211,7 @@ class QuerySet(models.QuerySet, CanValidate, IsTree):
211
211
  def validate(
212
212
  self, values: ListLike, field: Optional[Union[str, StrField]] = None, **kwargs
213
213
  ):
214
- """{}"""
214
+ """{}."""
215
215
  from ._validate import _validate
216
216
 
217
217
  return _validate(cls=self, values=values, field=field, **kwargs)
@@ -220,7 +220,7 @@ class QuerySet(models.QuerySet, CanValidate, IsTree):
220
220
  def inspect(
221
221
  self, values: ListLike, field: Optional[Union[str, StrField]] = None, **kwargs
222
222
  ):
223
- """{}"""
223
+ """{}."""
224
224
  from ._validate import _inspect
225
225
 
226
226
  return _inspect(cls=self, values=values, field=field, **kwargs)
@@ -229,7 +229,7 @@ class QuerySet(models.QuerySet, CanValidate, IsTree):
229
229
  def standardize(
230
230
  self, values: Iterable, field: Optional[Union[str, StrField]] = None, **kwargs
231
231
  ):
232
- """{}"""
232
+ """{}."""
233
233
  from ._validate import _standardize
234
234
 
235
235
  return _standardize(cls=self, values=values, field=field, **kwargs)
@@ -242,7 +242,7 @@ class QuerySet(models.QuerySet, CanValidate, IsTree):
242
242
  length_limit: int = 1000,
243
243
  max_files_per_dir_per_type: int = 7,
244
244
  ) -> None:
245
- """{}"""
245
+ """{}."""
246
246
  from .dev._view_tree import view_tree as _view_tree
247
247
 
248
248
  _view_tree(
@@ -254,13 +254,13 @@ class QuerySet(models.QuerySet, CanValidate, IsTree):
254
254
  )
255
255
 
256
256
 
257
- setattr(models.QuerySet, "df", QuerySet.df)
258
- setattr(models.QuerySet, "list", QuerySet.list)
259
- setattr(models.QuerySet, "first", QuerySet.first)
260
- setattr(models.QuerySet, "one", QuerySet.one)
261
- setattr(models.QuerySet, "one_or_none", QuerySet.one_or_none)
262
- setattr(models.QuerySet, "search", QuerySet.search)
263
- setattr(models.QuerySet, "lookup", QuerySet.lookup)
264
- setattr(models.QuerySet, "validate", QuerySet.validate)
265
- setattr(models.QuerySet, "inspect", QuerySet.inspect)
266
- setattr(models.QuerySet, "standardize", QuerySet.standardize)
257
+ models.QuerySet.df = QuerySet.df
258
+ models.QuerySet.list = QuerySet.list
259
+ models.QuerySet.first = QuerySet.first
260
+ models.QuerySet.one = QuerySet.one
261
+ models.QuerySet.one_or_none = QuerySet.one_or_none
262
+ models.QuerySet.search = QuerySet.search
263
+ models.QuerySet.lookup = QuerySet.lookup
264
+ models.QuerySet.validate = QuerySet.validate
265
+ models.QuerySet.inspect = QuerySet.inspect
266
+ models.QuerySet.standardize = QuerySet.standardize