lamindb 0.68.0__py3-none-any.whl → 0.68.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
lamindb/__init__.py CHANGED
@@ -39,6 +39,7 @@ Functions:
39
39
  .. autosummary::
40
40
  :toctree: .
41
41
 
42
+ connect
42
43
  track
43
44
  view
44
45
  save
@@ -54,41 +55,23 @@ Modules & settings:
54
55
 
55
56
  """
56
57
 
57
- __version__ = "0.68.0" # denote a release candidate for 0.1.0 with 0.1rc1
58
+ __version__ = "0.68.2" # denote a release candidate for 0.1.0 with 0.1rc1
58
59
 
59
60
  import os as _os
60
61
 
61
62
  import lamindb_setup as _lamindb_setup
63
+ from lamindb_setup._check_setup import InstanceNotSetupError, _check_instance_setup
64
+ from lamindb_setup._connect_instance import connect
65
+ from lamindb_setup.core.upath import UPath
62
66
 
63
- # prints warning of python versions
64
- from lamin_utils import py_version_warning as _py_version_warning
65
- from lamindb_setup import _check_instance_setup
66
- from lamindb_setup._check_instance_setup import _INSTANCE_NOT_SETUP_WARNING
67
- from lamindb_setup._init_instance import reload_schema_modules as _reload_schema_modules
68
- from lamindb_setup.dev.upath import UPath
69
-
70
- _py_version_warning("3.8", "3.11")
71
-
72
- _TESTING = _lamindb_setup._TESTING
73
- _INSTANCE_SETUP = _check_instance_setup(from_lamindb=True)
74
-
75
- # allow the user to call setup
76
67
  from . import setup
77
68
 
78
69
 
79
- class InstanceNotSetupError(Exception):
80
- pass
81
-
82
-
83
70
  def __getattr__(name):
84
- raise InstanceNotSetupError(
85
- f"{_INSTANCE_NOT_SETUP_WARNING}If you used the CLI to init or load an instance,"
86
- " please RESTART the python session (in a notebook, restart kernel)"
87
- )
71
+ raise InstanceNotSetupError()
88
72
 
89
73
 
90
- # only import all other functionality if setup was successful
91
- if _INSTANCE_SETUP:
74
+ if _check_instance_setup(from_lamindb=True):
92
75
  del InstanceNotSetupError
93
76
  del __getattr__ # delete so that imports work out
94
77
  from lnschema_core.models import (
@@ -128,6 +111,10 @@ if _INSTANCE_SETUP:
128
111
 
129
112
  # schema modules
130
113
  if not _os.environ.get("LAMINDB_MULTI_INSTANCE") == "true":
114
+ from lamindb_setup._init_instance import (
115
+ reload_schema_modules as _reload_schema_modules,
116
+ )
117
+
131
118
  _reload_schema_modules(_lamindb_setup.settings.instance)
132
119
 
133
120
  track = run_context._track
lamindb/_artifact.py CHANGED
@@ -2,26 +2,33 @@ from pathlib import Path, PurePath, PurePosixPath
2
2
  from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
3
3
 
4
4
  import fsspec
5
+ import lamindb_setup as ln_setup
5
6
  import pandas as pd
6
7
  from anndata import AnnData
7
8
  from lamin_utils import colors, logger
8
9
  from lamindb_setup import settings as setup_settings
9
10
  from lamindb_setup._init_instance import register_storage
10
- from lamindb_setup.dev import StorageSettings
11
- from lamindb_setup.dev._docs import doc_args
12
- from lamindb_setup.dev.upath import create_path, extract_suffix_from_path
11
+ from lamindb_setup.core import StorageSettings
12
+ from lamindb_setup.core._docs import doc_args
13
+ from lamindb_setup.core.hashing import b16_to_b64, hash_file, hash_md5s_from_dir
14
+ from lamindb_setup.core.types import UPathStr
15
+ from lamindb_setup.core.upath import (
16
+ create_path,
17
+ extract_suffix_from_path,
18
+ get_stat_dir_gs,
19
+ get_stat_dir_s3,
20
+ get_stat_file_cloud,
21
+ )
13
22
  from lnschema_core import Artifact, Run, Storage
14
23
  from lnschema_core.models import IsTree
15
24
  from lnschema_core.types import (
16
25
  DataLike,
17
- PathLike,
18
26
  VisibilityChoice,
19
27
  )
20
28
 
21
29
  from lamindb._utils import attach_func_to_class_method
22
30
  from lamindb.core._data import _track_run_input
23
31
  from lamindb.core._settings import settings
24
- from lamindb.core.hashing import b16_to_b64, hash_file, hash_md5s_from_dir
25
32
  from lamindb.core.storage import (
26
33
  LocalPathClasses,
27
34
  UPath,
@@ -38,7 +45,6 @@ from lamindb.core.storage.file import (
38
45
  )
39
46
  from lamindb.core.versioning import get_uid_from_old_version, init_uid
40
47
 
41
- from . import _TESTING
42
48
  from .core._data import (
43
49
  add_transform_to_kwargs,
44
50
  get_run,
@@ -106,7 +112,7 @@ def process_pathlike(
106
112
 
107
113
  def process_data(
108
114
  provisional_uid: str,
109
- data: Union[PathLike, DataLike],
115
+ data: Union[UPathStr, DataLike],
110
116
  format: Optional[str],
111
117
  key: Optional[str],
112
118
  default_storage: Storage,
@@ -115,7 +121,7 @@ def process_data(
115
121
  ) -> Tuple[Any, Union[Path, UPath], str, Storage, bool]:
116
122
  """Serialize a data object that's provided as file or in memory."""
117
123
  # if not overwritten, data gets stored in default storage
118
- if isinstance(data, (str, Path, UPath)): # PathLike, spelled out
124
+ if isinstance(data, (str, Path, UPath)): # UPathStr, spelled out
119
125
  access_token = (
120
126
  default_storage._access_token
121
127
  if hasattr(default_storage, "_access_token")
@@ -162,69 +168,6 @@ def process_data(
162
168
  return memory_rep, path, suffix, storage, use_existing_storage_key
163
169
 
164
170
 
165
- def get_stat_file_cloud(stat: Dict) -> Tuple[int, str, str]:
166
- size = stat["size"]
167
- # small files
168
- if "-" not in stat["ETag"]:
169
- # only store hash for non-multipart uploads
170
- # we can't rapidly validate multi-part uploaded files client-side
171
- # we can add more logic later down-the-road
172
- hash = b16_to_b64(stat["ETag"])
173
- hash_type = "md5"
174
- else:
175
- stripped_etag, suffix = stat["ETag"].split("-")
176
- suffix = suffix.strip('"')
177
- hash = f"{b16_to_b64(stripped_etag)}-{suffix}"
178
- hash_type = "md5-n" # this is the S3 chunk-hashing strategy
179
- return size, hash, hash_type
180
-
181
-
182
- def get_stat_dir_s3(path: UPath) -> Tuple[int, str, str, int]:
183
- import boto3
184
- from lamindb_setup.dev.upath import AWS_CREDENTIALS_PRESENT
185
-
186
- if not AWS_CREDENTIALS_PRESENT:
187
- # passing the following param directly to Session() doesn't
188
- # work, unfortunately: botocore_session=path.fs.session
189
- from botocore import UNSIGNED
190
- from botocore.config import Config
191
-
192
- config = Config(signature_version=UNSIGNED)
193
- s3 = boto3.session.Session().resource("s3", config=config)
194
- else:
195
- s3 = boto3.session.Session().resource("s3")
196
- bucket, key, _ = path.fs.split_path(path.as_posix())
197
- # assuming this here is the fastest way of querying for many objects
198
- objects = s3.Bucket(bucket).objects.filter(Prefix=key)
199
- size = sum([object.size for object in objects])
200
- md5s = [
201
- # skip leading and trailing quotes
202
- object.e_tag[1:-1]
203
- for object in objects
204
- ]
205
- n_objects = len(md5s)
206
- hash, hash_type = hash_md5s_from_dir(md5s)
207
- return size, hash, hash_type, n_objects
208
-
209
-
210
- def get_stat_dir_gs(path: UPath) -> Tuple[int, str, str, int]:
211
- import google.cloud.storage as gc_storage
212
-
213
- bucket, key, _ = path.fs.split_path(path.as_posix())
214
- # assuming this here is the fastest way of querying for many objects
215
- client = gc_storage.Client(
216
- credentials=path.fs.credentials.credentials, project=path.fs.project
217
- )
218
- objects = client.Bucket(bucket).list_blobs(prefix=key)
219
- sizes, md5s = [], []
220
- for object in objects:
221
- sizes.append(object.size)
222
- md5s.append(object.md5_hash)
223
- n_objects = len(md5s)
224
- hash, hash_type = hash_md5s_from_dir(md5s)
225
- return sum(sizes), hash, hash_type, n_objects
226
-
227
-
228
171
  def get_stat_or_artifact(
229
172
  path: UPath,
230
173
  suffix: str,
@@ -690,7 +633,7 @@ def from_anndata(
690
633
  @doc_args(Artifact.from_dir.__doc__)
691
634
  def from_dir(
692
635
  cls,
693
- path: PathLike,
636
+ path: UPathStr,
694
637
  key: Optional[str] = None,
695
638
  *,
696
639
  run: Optional[Run] = None,
@@ -791,7 +734,7 @@ def from_dir(
791
734
  # docstring handled through attach_func_to_class_method
792
735
  def replace(
793
736
  self,
794
- data: Union[PathLike, DataLike],
737
+ data: Union[UPathStr, DataLike],
795
738
  run: Optional[Run] = None,
796
739
  format: Optional[str] = None,
797
740
  ) -> None:
@@ -906,19 +849,21 @@ def delete(
906
849
  storage: Optional[bool] = None,
907
850
  using_key: Optional[str] = None,
908
851
  ) -> None:
909
- # by default, we only move artifacts into the trash
910
- if self.visibility > -1 and permanent is not True:
852
+ # by default, we only move artifacts into the trash (visibility = -1)
853
+ trash_visibility = VisibilityChoice.trash.value
854
+ if self.visibility > trash_visibility and permanent is not True:
911
855
  if storage is not None:
912
856
  logger.warning("moving artifact to trash, storage arg is ignored")
913
857
  # move to trash
914
- self.visibility = -1
858
+ self.visibility = trash_visibility
915
859
  self.save()
916
- logger.warning("moved artifact to trash (visibility = -1)")
860
+ logger.warning(f"moved artifact to trash (visibility = {trash_visibility})")
917
861
  return
918
862
 
919
863
  # if the artifact is already in the trash
920
864
  # permanent delete skips the trash
921
865
  if permanent is None:
866
+ # ask for confirmation of permanent delete
922
867
  response = input(
923
868
  "Artifact record is already in trash! Are you sure you want to permanently"
924
869
  " delete it? (y/n) You can't undo this action."
@@ -928,13 +873,14 @@ def delete(
928
873
  # this second option doesn't feel very intuitive
929
874
  delete_record = permanent
930
875
 
931
- if delete_record:
876
+ if delete_record is True:
932
877
  # need to grab file path before deletion
933
878
  filepath = filepath_from_artifact(self, using_key)
934
879
  # only delete in storage if DB delete is successful
935
880
  # DB delete might error because of a foreign key constraint violated etc.
936
881
  self._delete_skip_storage()
937
882
  if self.key is None or self.key_is_virtual:
883
+ # always delete in storage if the key is virtual
938
884
  delete_in_storage = True
939
885
  if storage is not None:
940
886
  logger.warning("storage arg is ignored if storage key is non-semantic")
@@ -951,9 +897,10 @@ def delete(
951
897
  delete_in_storage = storage
952
898
  # we don't yet have logic to bring back the deleted metadata record
953
899
  # in case storage deletion fails - this is important for ACID down the road
954
- if delete_in_storage:
955
- delete_storage(filepath)
956
- logger.success(f"deleted {colors.yellow(f'{filepath}')}")
900
+ if delete_in_storage is True:
901
+ delete_msg = delete_storage(filepath)
902
+ if delete_msg != "did-not-delete":
903
+ logger.success(f"deleted {colors.yellow(f'{filepath}')}")
957
904
 
958
905
 
959
906
  def _delete_skip_storage(artifact, *args, **kwargs) -> None:
@@ -1036,7 +983,7 @@ METHOD_NAMES = [
1036
983
  "view_tree",
1037
984
  ]
1038
985
 
1039
- if _TESTING:
986
+ if ln_setup._TESTING:
1040
987
  from inspect import signature
1041
988
 
1042
989
  SIGS = {
lamindb/_collection.py CHANGED
@@ -12,10 +12,12 @@ from typing import (
12
12
  )
13
13
 
14
14
  import anndata as ad
15
+ import lamindb_setup as ln_setup
15
16
  import pandas as pd
16
17
  from anndata import AnnData
17
18
  from lamin_utils import logger
18
- from lamindb_setup.dev._docs import doc_args
19
+ from lamindb_setup.core._docs import doc_args
20
+ from lamindb_setup.core.hashing import hash_set
19
21
  from lnschema_core.models import Collection, CollectionArtifact, FeatureSet
20
22
  from lnschema_core.types import DataLike, VisibilityChoice
21
23
 
@@ -24,7 +26,7 @@ from lamindb.core._data import _track_run_input
24
26
  from lamindb.core._mapped_collection import MappedCollection
25
27
  from lamindb.core.versioning import get_uid_from_old_version, init_uid
26
28
 
27
- from . import _TESTING, Artifact, Run
29
+ from . import Artifact, Run
28
30
  from ._artifact import data_is_anndata
29
31
  from ._query_set import QuerySet
30
32
  from ._registry import init_self_from_db
@@ -34,7 +36,6 @@ from .core._data import (
34
36
  save_feature_set_links,
35
37
  save_feature_sets,
36
38
  )
37
- from .core.hashing import hash_set
38
39
 
39
40
  if TYPE_CHECKING:
40
41
  from lamindb.core.storage._backed_access import AnnDataAccessor, BackedAccessor
@@ -408,10 +409,11 @@ def load(
408
409
  # docstring handled through attach_func_to_class_method
409
410
  def delete(self, permanent: Optional[bool] = None) -> None:
410
411
  # change visibility to trash
411
- if self.visibility > VisibilityChoice.trash.value and permanent is not True:
412
- self.visibility = VisibilityChoice.trash.value
412
+ trash_visibility = VisibilityChoice.trash.value
413
+ if self.visibility > trash_visibility and permanent is not True:
414
+ self.visibility = trash_visibility
413
415
  self.save()
414
- logger.warning("moved collection to trash.")
416
+ logger.warning(f"moved collection to trash (visibility = {trash_visibility})")
415
417
  return
416
418
 
417
419
  # permanent delete
@@ -481,7 +483,7 @@ METHOD_NAMES = [
481
483
  "restore",
482
484
  ]
483
485
 
484
- if _TESTING:
486
+ if ln_setup._TESTING:
485
487
  from inspect import signature
486
488
 
487
489
  SIGS = {
lamindb/_feature.py CHANGED
@@ -1,7 +1,8 @@
1
1
  from typing import Dict, List, Optional, Union
2
2
 
3
+ import lamindb_setup as ln_setup
3
4
  import pandas as pd
4
- from lamindb_setup.dev._docs import doc_args
5
+ from lamindb_setup.core._docs import doc_args
5
6
  from lnschema_core.models import Feature, Registry
6
7
  from lnschema_core.types import FieldAttr
7
8
  from pandas.api.types import CategoricalDtype, is_string_dtype
@@ -9,7 +10,6 @@ from pandas.api.types import CategoricalDtype, is_string_dtype
9
10
  from lamindb._utils import attach_func_to_class_method
10
11
  from lamindb.core._settings import settings
11
12
 
12
- from . import _TESTING
13
13
  from ._query_set import RecordsList
14
14
 
15
15
  FEATURE_TYPES = {
@@ -181,7 +181,7 @@ METHOD_NAMES = [
181
181
  "save",
182
182
  ]
183
183
 
184
- if _TESTING:
184
+ if ln_setup._TESTING:
185
185
  from inspect import signature
186
186
 
187
187
  SIGS = {
lamindb/_feature_set.py CHANGED
@@ -1,15 +1,15 @@
1
1
  from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Type, Union
2
2
 
3
+ import lamindb_setup as ln_setup
3
4
  import numpy as np
4
5
  from lamin_utils import logger
5
- from lamindb_setup.dev._docs import doc_args
6
+ from lamindb_setup.core._docs import doc_args
7
+ from lamindb_setup.core.hashing import hash_set
6
8
  from lnschema_core import Feature, FeatureSet, Registry, ids
7
9
  from lnschema_core.types import FieldAttr, ListLike
8
10
 
9
11
  from lamindb._utils import attach_func_to_class_method
10
- from lamindb.core.hashing import hash_set
11
12
 
12
- from . import _TESTING
13
13
  from ._feature import convert_numpy_dtype_to_lamin_feature_type
14
14
  from ._registry import init_self_from_db
15
15
 
@@ -250,7 +250,7 @@ METHOD_NAMES = [
250
250
  "save",
251
251
  ]
252
252
 
253
- if _TESTING:
253
+ if ln_setup._TESTING:
254
254
  from inspect import signature
255
255
 
256
256
  SIGS = {
lamindb/_is_versioned.py CHANGED
@@ -1,12 +1,12 @@
1
1
  from typing import Optional
2
2
 
3
+ import lamindb_setup as ln_setup
3
4
  from lamin_utils import logger
4
- from lamindb_setup.dev.upath import UPath
5
+ from lamindb_setup.core.upath import UPath
5
6
  from lnschema_core.models import IsVersioned
6
7
 
7
8
  from lamindb._utils import attach_func_to_class_method
8
9
 
9
- from . import _TESTING
10
10
  from .core.versioning import get_new_path_from_uid, get_uid_from_old_version
11
11
 
12
12
 
@@ -33,7 +33,7 @@ METHOD_NAMES = [
33
33
  "add_to_version_family",
34
34
  ]
35
35
 
36
- if _TESTING: # type: ignore
36
+ if ln_setup._TESTING: # type: ignore
37
37
  from inspect import signature
38
38
 
39
39
  SIGS = {name: signature(getattr(IsVersioned, name)) for name in METHOD_NAMES}
lamindb/_parents.py CHANGED
@@ -1,13 +1,13 @@
1
1
  import builtins
2
2
  from typing import List, Optional, Set, Union
3
3
 
4
+ import lamindb_setup as ln_setup
4
5
  from lamin_utils import logger
5
6
  from lnschema_core import Artifact, Collection, Registry, Run, Transform
6
7
  from lnschema_core.models import HasParents, format_field_value
7
8
 
8
9
  from lamindb._utils import attach_func_to_class_method
9
10
 
10
- from . import _TESTING
11
11
  from ._registry import StrField, get_default_str_field
12
12
 
13
13
  LAMIN_GREEN_LIGHTER = "#10b981"
@@ -427,7 +427,7 @@ METHOD_NAMES = [
427
427
  "view_parents",
428
428
  ]
429
429
 
430
- if _TESTING: # type: ignore
430
+ if ln_setup._TESTING: # type: ignore
431
431
  from inspect import signature
432
432
 
433
433
  SIGS = {
lamindb/_query_manager.py CHANGED
@@ -2,7 +2,7 @@ from typing import NamedTuple, Optional
2
2
 
3
3
  from django.db import models
4
4
  from lamin_utils import logger
5
- from lamindb_setup.dev._docs import doc_args
5
+ from lamindb_setup.core._docs import doc_args
6
6
  from lnschema_core.models import Registry
7
7
  from lnschema_core.types import StrField
8
8
 
lamindb/_query_set.py CHANGED
@@ -3,7 +3,7 @@ from typing import Dict, Iterable, List, NamedTuple, Optional, Union
3
3
 
4
4
  import pandas as pd
5
5
  from django.db import models
6
- from lamindb_setup.dev._docs import doc_args
6
+ from lamindb_setup.core._docs import doc_args
7
7
  from lnschema_core.models import (
8
8
  Artifact,
9
9
  CanValidate,
lamindb/_registry.py CHANGED
@@ -11,18 +11,17 @@ from django.db.models import Manager, QuerySet
11
11
  from lamin_utils import logger
12
12
  from lamin_utils._lookup import Lookup
13
13
  from lamin_utils._search import search as base_search
14
+ from lamindb_setup._connect_instance import get_owner_name_from_identifier
14
15
  from lamindb_setup._init_instance import InstanceSettings
15
- from lamindb_setup._load_instance import get_owner_name_from_identifier
16
- from lamindb_setup.dev._docs import doc_args
17
- from lamindb_setup.dev._hub_core import load_instance
18
- from lamindb_setup.dev._settings_storage import StorageSettings
16
+ from lamindb_setup.core._docs import doc_args
17
+ from lamindb_setup.core._hub_core import connect_instance
18
+ from lamindb_setup.core._settings_storage import StorageSettings
19
19
  from lnschema_core import Registry
20
20
  from lnschema_core.types import ListLike, StrField
21
21
 
22
22
  from lamindb._utils import attach_func_to_class_method
23
23
  from lamindb.core._settings import settings
24
24
 
25
- from . import _TESTING
26
25
  from ._from_values import get_or_create_records
27
26
 
28
27
  IPYTHON = getattr(builtins, "__IPYTHON__", False)
@@ -356,16 +355,16 @@ def using(
356
355
  instance: str,
357
356
  ) -> "QuerySet":
358
357
  """{}."""
359
- from lamindb_setup._load_instance import (
358
+ from lamindb_setup._connect_instance import (
360
359
  load_instance_settings,
361
360
  update_db_using_local,
362
361
  )
363
- from lamindb_setup.dev._settings_store import instance_settings_file
362
+ from lamindb_setup.core._settings_store import instance_settings_file
364
363
 
365
364
  owner, name = get_owner_name_from_identifier(instance)
366
365
  settings_file = instance_settings_file(name, owner)
367
366
  if not settings_file.exists():
368
- load_result = load_instance(owner=owner, name=name)
367
+ load_result = connect_instance(owner=owner, name=name)
369
368
  if isinstance(load_result, str):
370
369
  raise RuntimeError(
371
370
  f"Failed to load instance {instance}, please check your permission!"
@@ -413,18 +412,21 @@ def update_fk_to_default_db(
413
412
  setattr(records, f"{fk}_id", fk_record_default.id)
414
413
 
415
414
 
415
+ FKBULK = [
416
+ "organism",
417
+ "public_source",
418
+ "initial_version",
419
+ "latest_report", # Transform
420
+ "source_code", # Transform
421
+ "report", # Run
422
+ "file", # Collection
423
+ ]
424
+
425
+
416
426
  def transfer_fk_to_default_db_bulk(
417
427
  records: Union[List, QuerySet], using_key: Optional[str]
418
428
  ):
419
- for fk in [
420
- "organism",
421
- "public_source",
422
- "initial_version",
423
- "latest_report", # Transform
424
- "source_code", # Transform
425
- "report", # Run
426
- "file", # Collection
427
- ]:
429
+ for fk in FKBULK:
428
430
  update_fk_to_default_db(records, fk, using_key)
429
431
 
430
432
 
@@ -433,6 +435,7 @@ def transfer_to_default_db(
433
435
  using_key: Optional[str],
434
436
  save: bool = False,
435
437
  mute: bool = False,
438
+ transfer_fk: bool = True,
436
439
  ) -> Optional[Registry]:
437
440
  db = record._state.db
438
441
  if db is not None and db != "default" and using_key is None:
@@ -459,14 +462,24 @@ def transfer_to_default_db(
459
462
  else:
460
463
  logger.warning(WARNING_RUN_TRANSFORM)
461
464
  record.run_id = None
462
- if hasattr(record, "transform_id"):
465
+ if hasattr(record, "transform_id") and record._meta.model_name != "run":
463
466
  record.transform = None
464
467
  if run_context.transform is not None:
465
468
  record.transform_id = run_context.transform.id
466
469
  else:
467
470
  record.transform_id = None
468
- update_fk_to_default_db(record, "storage", using_key)
469
- update_fk_to_default_db(record, "artifact", using_key)
471
+ # transfer other foreign key fields
472
+ fk_fields = [
473
+ i.name
474
+ for i in record._meta.fields
475
+ if i.get_internal_type() == "ForeignKey"
476
+ if i.name not in {"created_by", "run", "transform"}
477
+ ]
478
+ if not transfer_fk:
479
+ # don't transfer fk fields that are already bulk transferred
480
+ fk_fields = [fk for fk in fk_fields if fk not in FKBULK]
481
+ for fk in fk_fields:
482
+ update_fk_to_default_db(record, fk, using_key)
470
483
  record.id = None
471
484
  record._state.db = "default"
472
485
  if save:
@@ -485,15 +498,19 @@ def save(self, *args, **kwargs) -> None:
485
498
  if self.__class__.__name__ == "Collection" and self.id is not None:
486
499
  # when creating a new collection without being able to access artifacts
487
500
  artifacts = self.artifacts.list()
501
+ # transfer of the record to the default db with fk fields
488
502
  result = transfer_to_default_db(self, using_key)
489
503
  if result is not None:
490
504
  init_self_from_db(self, result)
491
505
  else:
492
506
  # here, we can't use the parents argument
507
+ # parents are not saved for the self record
493
508
  save_kwargs = kwargs.copy()
494
509
  if "parents" in save_kwargs:
495
510
  save_kwargs.pop("parents")
496
511
  super(Registry, self).save(*args, **save_kwargs)
512
+ # perform transfer of many-to-many fields
513
+ # only supported for Artifact and Collection records
497
514
  if db is not None and db != "default" and using_key is None:
498
515
  if self.__class__.__name__ == "Collection":
499
516
  if len(artifacts) > 0:
@@ -507,6 +524,7 @@ def save(self, *args, **kwargs) -> None:
507
524
  self_on_db = copy(self)
508
525
  self_on_db._state.db = db
509
526
  self_on_db.pk = pk_on_db
527
+ # by default, transfer parents of the labels to maintain ontological hierarchy
510
528
  add_from_kwargs = {"parents": kwargs.get("parents", True)}
511
529
  logger.info("transfer features")
512
530
  self.features._add_from(self_on_db, **add_from_kwargs)
@@ -523,7 +541,7 @@ METHOD_NAMES = [
523
541
  "using",
524
542
  ]
525
543
 
526
- if _TESTING: # type: ignore
544
+ if ln_setup._TESTING: # type: ignore
527
545
  from inspect import signature
528
546
 
529
547
  SIGS = {
lamindb/_save.py CHANGED
@@ -10,7 +10,7 @@ import lamindb_setup
10
10
  from django.db import transaction
11
11
  from django.utils.functional import partition
12
12
  from lamin_utils import logger
13
- from lamindb_setup.dev.upath import print_hook
13
+ from lamindb_setup.core.upath import print_hook
14
14
  from lnschema_core.models import Artifact, Registry
15
15
 
16
16
  from lamindb.core._settings import settings
lamindb/_storage.py CHANGED
@@ -1,5 +1,5 @@
1
- from lamindb_setup.dev._docs import doc_args
2
- from lamindb_setup.dev.upath import UPath, create_path
1
+ from lamindb_setup.core._docs import doc_args
2
+ from lamindb_setup.core.upath import UPath, create_path
3
3
  from lnschema_core import Storage
4
4
 
5
5
 
lamindb/_transform.py CHANGED
@@ -1,13 +1,11 @@
1
- from typing import TYPE_CHECKING, Optional
1
+ from typing import Optional
2
2
 
3
- from lnschema_core.models import TRANSFORM_TYPE_DEFAULT, Artifact, Run, Transform
3
+ from lnschema_core.models import Artifact, Run, Transform
4
+ from lnschema_core.types import TransformType
4
5
 
5
6
  from ._run import delete_run_artifacts
6
7
  from .core.versioning import get_uid_from_old_version, init_uid
7
8
 
8
- if TYPE_CHECKING:
9
- from lnschema_core.types import TransformType
10
-
11
9
 
12
10
  def __init__(transform: Transform, *args, **kwargs):
13
11
  if len(args) == len(transform._meta.concrete_fields):
@@ -23,7 +21,7 @@ def __init__(transform: Transform, *args, **kwargs):
23
21
  (kwargs.pop("initial_version_id") if "initial_version_id" in kwargs else None)
24
22
  version: Optional[str] = kwargs.pop("version") if "version" in kwargs else None
25
23
  type: Optional[TransformType] = (
26
- kwargs.pop("type") if "type" in kwargs else TRANSFORM_TYPE_DEFAULT
24
+ kwargs.pop("type") if "type" in kwargs else TransformType.pipeline
27
25
  )
28
26
  reference: Optional[str] = (
29
27
  kwargs.pop("reference") if "reference" in kwargs else None
@@ -85,5 +83,11 @@ def delete(self) -> None:
85
83
  super(Transform, self).delete()
86
84
 
87
85
 
86
+ @property # type: ignore
87
+ def latest_run(self) -> Run:
88
+ return self.runs.order_by("-run_at").first()
89
+
90
+
88
91
  Transform.__init__ = __init__
89
92
  Transform.delete = delete
93
+ Transform.latest_run = latest_run
lamindb/_ulabel.py CHANGED
@@ -1,12 +1,12 @@
1
1
  from typing import List, Optional
2
2
 
3
- from lamindb_setup.dev._docs import doc_args
3
+ import lamindb_setup as ln_setup
4
+ from lamindb_setup.core._docs import doc_args
4
5
  from lnschema_core import ULabel
5
6
  from lnschema_core.types import ListLike
6
7
 
7
8
  from lamindb._utils import attach_func_to_class_method
8
9
 
9
- from . import _TESTING
10
10
  from ._from_values import get_or_create_records
11
11
 
12
12
 
@@ -55,7 +55,7 @@ METHOD_NAMES = [
55
55
  "from_values",
56
56
  ]
57
57
 
58
- if _TESTING:
58
+ if ln_setup._TESTING:
59
59
  from inspect import signature
60
60
 
61
61
  SIGS = {