lamindb 0.50.0__py3-none-any.whl → 0.50.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lamindb/__init__.py +1 -1
- lamindb/_context.py +16 -16
- lamindb/_delete.py +2 -2
- lamindb/_feature_set.py +4 -4
- lamindb/_file.py +10 -9
- lamindb/_registry.py +14 -7
- lamindb/_save.py +7 -11
- lamindb/_synonym.py +3 -3
- lamindb/_validate.py +5 -10
- lamindb/dev/__init__.py +2 -0
- lamindb/dev/_data.py +13 -12
- lamindb/dev/_view_parents.py +19 -5
- lamindb/dev/datasets/__init__.py +2 -0
- lamindb/dev/datasets/_core.py +22 -0
- {lamindb-0.50.0.dist-info → lamindb-0.50.1.dist-info}/METADATA +5 -5
- {lamindb-0.50.0.dist-info → lamindb-0.50.1.dist-info}/RECORD +19 -19
- {lamindb-0.50.0.dist-info → lamindb-0.50.1.dist-info}/LICENSE +0 -0
- {lamindb-0.50.0.dist-info → lamindb-0.50.1.dist-info}/WHEEL +0 -0
- {lamindb-0.50.0.dist-info → lamindb-0.50.1.dist-info}/entry_points.txt +0 -0
lamindb/__init__.py
CHANGED
lamindb/_context.py
CHANGED
@@ -15,16 +15,16 @@ from lnschema_core.types import TransformType
|
|
15
15
|
is_run_from_ipython = getattr(builtins, "__IPYTHON__", False)
|
16
16
|
|
17
17
|
msg_path_failed = (
|
18
|
-
"
|
18
|
+
"failed to infer notebook path.\nfix: either track manually via"
|
19
19
|
" `ln.track(ln.Transform(name='My notebook'))` or pass"
|
20
|
-
" `notebook_path` to ln.track()
|
20
|
+
" `notebook_path` to ln.track()"
|
21
21
|
)
|
22
22
|
|
23
23
|
msg_manual_init = (
|
24
|
-
"\n(1)
|
25
|
-
"\n(2)
|
24
|
+
"\n(1) save your notebook!"
|
25
|
+
"\n(2) attach metadata to the notebook by running the CLI:\n"
|
26
26
|
"lamin track {notebook_path}"
|
27
|
-
"\n(3)
|
27
|
+
"\n(3) reload or re-open your notebook"
|
28
28
|
)
|
29
29
|
|
30
30
|
|
@@ -204,20 +204,20 @@ class run_context:
|
|
204
204
|
except Exception as e:
|
205
205
|
if isinstance(e, ImportError):
|
206
206
|
logger.info(
|
207
|
-
"
|
208
|
-
"notebook!\
|
207
|
+
"it looks like you are running ln.track() from a "
|
208
|
+
"notebook!\nplease install nbproject: pip install nbproject"
|
209
209
|
)
|
210
210
|
elif isinstance(e, UpdateNbWithNonInteractiveEditorError):
|
211
211
|
raise e
|
212
212
|
elif isinstance(e, (NotebookNotSavedError, NoTitleError)):
|
213
213
|
raise e
|
214
214
|
else:
|
215
|
-
logger.warning(f"
|
215
|
+
logger.warning(f"automatic tracking of notebook failed: {e}")
|
216
216
|
is_tracked_notebook = False
|
217
217
|
|
218
218
|
if not is_tracked_notebook:
|
219
219
|
logger.warning(
|
220
|
-
"
|
220
|
+
"no automatic metadata detection, consider passing transform"
|
221
221
|
)
|
222
222
|
return None
|
223
223
|
else:
|
@@ -227,7 +227,7 @@ class run_context:
|
|
227
227
|
transform_exists = Transform.filter(id=transform.id).first()
|
228
228
|
if transform_exists is None:
|
229
229
|
transform.save()
|
230
|
-
logger.
|
230
|
+
logger.save(f"saved: {transform}")
|
231
231
|
transform_exists = transform
|
232
232
|
else:
|
233
233
|
logger.success(f"loaded: {transform_exists}")
|
@@ -253,17 +253,17 @@ class run_context:
|
|
253
253
|
if run is None: # create new run
|
254
254
|
run = ln.Run(transform=cls.transform)
|
255
255
|
run.save()
|
256
|
-
logger.
|
256
|
+
logger.save(f"saved: {run}")
|
257
257
|
cls.run = run
|
258
258
|
|
259
259
|
# at this point, we have a transform can display its parents if there are any
|
260
260
|
parents = cls.transform.parents.all() if cls.transform is not None else []
|
261
261
|
if len(parents) > 0:
|
262
262
|
if len(parents) == 1:
|
263
|
-
logger.info(f"parent transform: {parents[0]}")
|
263
|
+
logger.info(f" parent transform: {parents[0]}")
|
264
264
|
else:
|
265
265
|
parents_formatted = "\n - ".join([f"{parent}" for parent in parents])
|
266
|
-
logger.info(f"parent transforms:\n - {parents_formatted}")
|
266
|
+
logger.info(f" parent transforms:\n - {parents_formatted}")
|
267
267
|
|
268
268
|
# only for newly intialized notebooks
|
269
269
|
if hasattr(cls, "_notebook_meta"):
|
@@ -405,9 +405,9 @@ class run_context:
|
|
405
405
|
type=TransformType.notebook,
|
406
406
|
)
|
407
407
|
transform.save()
|
408
|
-
logger.
|
408
|
+
logger.save(f"saved: {transform}")
|
409
409
|
else:
|
410
|
-
logger.
|
410
|
+
logger.success(f"loaded: {transform}")
|
411
411
|
if transform.name != title or transform.short_name != filestem:
|
412
412
|
response = input(
|
413
413
|
"Updated notebook name and/or title: Do you want to assign a"
|
@@ -427,7 +427,7 @@ class run_context:
|
|
427
427
|
transform.short_name = filestem
|
428
428
|
transform.save()
|
429
429
|
if response == "y":
|
430
|
-
logger.
|
430
|
+
logger.save(f"saved: {transform}")
|
431
431
|
else:
|
432
432
|
logger.success(f"updated: {transform}")
|
433
433
|
|
lamindb/_delete.py
CHANGED
@@ -55,11 +55,11 @@ def delete( # type: ignore
|
|
55
55
|
Label(id=CcFPLmpq, name=Label1, updated_at=2023-07-19 18:28:16, created_by_id=kmvZDIX9)] # noqa
|
56
56
|
>>> queryset.delete()
|
57
57
|
"""
|
58
|
-
logger.warning("
|
58
|
+
logger.warning("for efficient bulk delete, use `queryset.delete` instead")
|
59
59
|
if isinstance(records, list):
|
60
60
|
records = records
|
61
61
|
elif isinstance(records, Registry):
|
62
62
|
records = [records]
|
63
63
|
for record in records:
|
64
64
|
record.delete()
|
65
|
-
logger.success(f"
|
65
|
+
logger.success(f"deleted {colors.yellow(f'{record}')}")
|
lamindb/_feature_set.py
CHANGED
@@ -62,7 +62,7 @@ def get_validated_features(features: List[Registry], field: Field) -> List[Regis
|
|
62
62
|
if non_validated_features:
|
63
63
|
non_validated_features_display = ",".join(non_validated_features)
|
64
64
|
logger.warning(
|
65
|
-
f"ignoring
|
65
|
+
f"ignoring non-validated features: {non_validated_features_display}"
|
66
66
|
)
|
67
67
|
return validated_features
|
68
68
|
|
@@ -96,7 +96,7 @@ def __init__(self, *args, **kwargs):
|
|
96
96
|
features_hash = hash_set({feature.id for feature in features})
|
97
97
|
feature_set = FeatureSet.filter(hash=features_hash).one_or_none()
|
98
98
|
if feature_set is not None:
|
99
|
-
logger.
|
99
|
+
logger.success(f"loaded: {feature_set}")
|
100
100
|
init_self_from_db(self, feature_set)
|
101
101
|
return None
|
102
102
|
else:
|
@@ -181,7 +181,7 @@ def from_values(
|
|
181
181
|
features_hash = hash_set(set(validated_feature_ids))
|
182
182
|
feature_set = FeatureSet.filter(hash=features_hash).one_or_none()
|
183
183
|
if feature_set is not None:
|
184
|
-
logger.
|
184
|
+
logger.success(f"loaded {feature_set}")
|
185
185
|
else:
|
186
186
|
if type is not None:
|
187
187
|
type_str = type.__name__ if not isinstance(type, str) else type
|
@@ -213,7 +213,7 @@ def from_df(
|
|
213
213
|
if validated_features:
|
214
214
|
feature_set = FeatureSet(validated_features, name=name)
|
215
215
|
else:
|
216
|
-
logger.warning("no validated features,
|
216
|
+
logger.warning("no validated features, skip creating feature set")
|
217
217
|
feature_set = None
|
218
218
|
# raise ValidationError("Dataframe columns contain no validated feature names")
|
219
219
|
return feature_set
|
lamindb/_file.py
CHANGED
@@ -171,9 +171,9 @@ def get_hash(
|
|
171
171
|
result = File.filter(hash=hash).list()
|
172
172
|
if len(result) > 0:
|
173
173
|
if settings.upon_file_create_if_hash_exists == "error":
|
174
|
-
msg = f"
|
174
|
+
msg = f"file with same hash exists: {result[0]}"
|
175
175
|
hint = (
|
176
|
-
"💡
|
176
|
+
"💡 you can make this error a warning:\n"
|
177
177
|
" ln.settings.upon_file_create_if_hash_exists"
|
178
178
|
)
|
179
179
|
raise RuntimeError(f"{msg}\n{hint}")
|
@@ -381,9 +381,9 @@ def log_storage_hint(
|
|
381
381
|
else:
|
382
382
|
hint += "file will be copied to default storage upon `save()`"
|
383
383
|
if key is None:
|
384
|
-
hint += f" with key
|
384
|
+
hint += f" with key '{id}{suffix}'"
|
385
385
|
else:
|
386
|
-
hint += f" with key
|
386
|
+
hint += f" with key '{key}'"
|
387
387
|
logger.hint(hint)
|
388
388
|
|
389
389
|
|
@@ -557,24 +557,25 @@ def from_anndata(
|
|
557
557
|
else:
|
558
558
|
type = convert_numpy_dtype_to_lamin_feature_type(adata.X.dtype)
|
559
559
|
feature_sets = {}
|
560
|
-
logger.info("parsing feature names of X
|
560
|
+
logger.info("parsing feature names of X stored in slot 'var'")
|
561
561
|
logger.indent = " "
|
562
562
|
feature_set_var = FeatureSet.from_values(
|
563
563
|
data_parse.var.index,
|
564
564
|
var_ref,
|
565
565
|
type=type,
|
566
566
|
)
|
567
|
+
|
567
568
|
if feature_set_var is not None:
|
568
|
-
logger.info(f"linking: {feature_set_var}")
|
569
569
|
feature_sets["var"] = feature_set_var
|
570
|
+
logger.save(f"linked: {feature_set_var}")
|
570
571
|
logger.indent = ""
|
571
572
|
if len(data_parse.obs.columns) > 0:
|
572
573
|
logger.info("parsing feature names of slot 'obs'")
|
573
574
|
logger.indent = " "
|
574
575
|
feature_set_obs = FeatureSet.from_df(data_parse.obs)
|
575
576
|
if feature_set_obs is not None:
|
576
|
-
logger.info(f"linking: {feature_set_obs}")
|
577
577
|
feature_sets["obs"] = feature_set_obs
|
578
|
+
logger.save(f"linked: {feature_set_obs}")
|
578
579
|
logger.indent = ""
|
579
580
|
file._feature_sets = feature_sets
|
580
581
|
return file
|
@@ -663,7 +664,7 @@ def replace(
|
|
663
664
|
self._clear_storagekey = self.key
|
664
665
|
self.key = str(key_path.with_name(new_filename))
|
665
666
|
logger.warning(
|
666
|
-
f"
|
667
|
+
f"replacing the file will replace key '{key_path}' with '{self.key}'"
|
667
668
|
f" and delete '{key_path}' upon `save()`"
|
668
669
|
)
|
669
670
|
else:
|
@@ -812,7 +813,7 @@ def _save_skip_storage(file, *args, **kwargs) -> None:
|
|
812
813
|
for feature_set in file._feature_sets.values():
|
813
814
|
feature_set.save()
|
814
815
|
s = "s" if len(file._feature_sets) > 1 else ""
|
815
|
-
logger.
|
816
|
+
logger.save(
|
816
817
|
f"saved {len(file._feature_sets)} feature set{s} for slot{s}:"
|
817
818
|
f" {list(file._feature_sets.keys())}"
|
818
819
|
)
|
lamindb/_registry.py
CHANGED
@@ -19,6 +19,7 @@ from . import _TESTING
|
|
19
19
|
from ._from_values import get_or_create_records
|
20
20
|
from .dev._feature_manager import create_features_df
|
21
21
|
from .dev._settings import settings
|
22
|
+
from .dev._view_parents import _transform_emoji
|
22
23
|
|
23
24
|
IPYTHON = getattr(builtins, "__IPYTHON__", False)
|
24
25
|
|
@@ -65,9 +66,10 @@ def suggest_objects_with_same_name(orm: Registry, kwargs) -> Optional[str]:
|
|
65
66
|
if results.index[0] == kwargs["name"]:
|
66
67
|
return "object-with-same-name-exists"
|
67
68
|
else:
|
69
|
+
s = "" if results.shape[0] == 1 else "s"
|
70
|
+
it = "it" if results.shape[0] == 1 else "one of them"
|
68
71
|
msg = (
|
69
|
-
"
|
70
|
-
" them?"
|
72
|
+
f"record{s} with similar name{s} exist! did you mean to load {it}?"
|
71
73
|
)
|
72
74
|
if IPYTHON:
|
73
75
|
from IPython.display import display
|
@@ -97,13 +99,13 @@ def __init__(orm: Registry, *args, **kwargs):
|
|
97
99
|
existing_record = orm.filter(name=kwargs["name"]).one()
|
98
100
|
if existing_record is not None:
|
99
101
|
logger.success(
|
100
|
-
f"
|
102
|
+
f"loaded record with exact same name{version_comment}"
|
101
103
|
)
|
102
104
|
init_self_from_db(orm, existing_record)
|
103
105
|
return None
|
104
106
|
super(Registry, orm).__init__(**kwargs)
|
105
107
|
elif len(args) != len(orm._meta.concrete_fields):
|
106
|
-
raise ValueError("
|
108
|
+
raise ValueError("please provide keyword arguments, not plain arguments")
|
107
109
|
else:
|
108
110
|
# object is loaded from DB (**kwargs could be omitted below, I believe)
|
109
111
|
super(Registry, orm).__init__(*args, **kwargs)
|
@@ -317,7 +319,12 @@ def describe(self):
|
|
317
319
|
|
318
320
|
# Display Provenance
|
319
321
|
# display line by line the foreign key fields
|
320
|
-
emojis = {
|
322
|
+
emojis = {
|
323
|
+
"storage": "🗃️",
|
324
|
+
"created_by": "👤",
|
325
|
+
"transform": _transform_emoji(self.transform),
|
326
|
+
"run": "🚗",
|
327
|
+
}
|
321
328
|
if len(foreign_key_fields) > 0:
|
322
329
|
record_msg = f"{model_name}({''.join([f'{i}={self.__getattribute__(i)}, ' for i in direct_fields])})" # noqa
|
323
330
|
msg += f"{record_msg.rstrip(', )')})\n\n"
|
@@ -352,7 +359,7 @@ def describe(self):
|
|
352
359
|
key_split = feature_set.registry.split(".")
|
353
360
|
if len(key_split) == 3:
|
354
361
|
logger.warning(
|
355
|
-
"
|
362
|
+
"you have a legacy entry in feature_set.field, should be just"
|
356
363
|
" 'bionty.Gene'"
|
357
364
|
)
|
358
365
|
orm_name_with_schema = f"{key_split[0]}.{key_split[1]}"
|
@@ -474,7 +481,7 @@ def __get_name_with_schema__(cls) -> str:
|
|
474
481
|
|
475
482
|
|
476
483
|
def select_backward(cls, **expressions):
|
477
|
-
logger.warning("select() is deprecated!
|
484
|
+
logger.warning("select() is deprecated! please use: Registry.filter()")
|
478
485
|
return cls.filter(**expressions)
|
479
486
|
|
480
487
|
|
lamindb/_save.py
CHANGED
@@ -86,11 +86,11 @@ def save(records: Iterable[Registry], **kwargs) -> None: # type: ignore
|
|
86
86
|
):
|
87
87
|
# save the record with parents one by one
|
88
88
|
logger.warning(
|
89
|
-
"
|
89
|
+
"now recursing through parents: "
|
90
90
|
"this only happens once, but is much slower than bulk saving"
|
91
91
|
)
|
92
92
|
logger.hint(
|
93
|
-
"
|
93
|
+
"you can switch this off via: lb.settings.auto_save_parents = False"
|
94
94
|
)
|
95
95
|
for record in non_files_with_parents:
|
96
96
|
record._save_ontology_parents()
|
@@ -123,7 +123,7 @@ def check_and_attempt_upload(file: File) -> Optional[Exception]:
|
|
123
123
|
try:
|
124
124
|
upload_data_object(file)
|
125
125
|
except Exception as exception:
|
126
|
-
logger.warning(f"
|
126
|
+
logger.warning(f"could not upload file: {file}")
|
127
127
|
return exception
|
128
128
|
# copies (if ob-disk) or moves the temporary file (if in-memory) to the cache
|
129
129
|
copy_or_move_to_cache(file)
|
@@ -173,7 +173,7 @@ def check_and_attempt_clearing(file: File) -> Optional[Exception]:
|
|
173
173
|
if file._clear_storagekey is not None:
|
174
174
|
delete_storage_using_key(file, file._clear_storagekey)
|
175
175
|
logger.success(
|
176
|
-
f"
|
176
|
+
f"deleted stale object at storage key {file._clear_storagekey}"
|
177
177
|
)
|
178
178
|
file._clear_storagekey = None
|
179
179
|
except Exception as exception:
|
@@ -238,20 +238,16 @@ def upload_data_object(file) -> None:
|
|
238
238
|
"""Store and add file and its linked entries."""
|
239
239
|
# do NOT hand-craft the storage key!
|
240
240
|
file_storage_key = auto_storage_key_from_file(file)
|
241
|
+
msg = f"storing file '{file.id}' with key '{file_storage_key}'"
|
241
242
|
if hasattr(file, "_to_store") and file._to_store and file.suffix != ".zarr":
|
242
|
-
|
243
|
-
f"`{file.key}` ('{file_storage_key}')"
|
244
|
-
if file.key is None
|
245
|
-
else f"'{file_storage_key}'"
|
246
|
-
)
|
247
|
-
logger.hint(f"storing file '{file.id}' with key {display_key}")
|
243
|
+
logger.save(msg)
|
248
244
|
store_object(file._local_filepath, file_storage_key)
|
249
245
|
elif (
|
250
246
|
file.suffix in {".zarr", ".zrad"}
|
251
247
|
and hasattr(file, "_memory_rep")
|
252
248
|
and file._memory_rep is not None
|
253
249
|
):
|
254
|
-
logger.
|
250
|
+
logger.save(msg)
|
255
251
|
storagepath = lamindb_setup.settings.storage.key_to_filepath(file_storage_key)
|
256
252
|
print_progress = partial(
|
257
253
|
print_hook, filepath=file_storage_key, action="uploading"
|
lamindb/_synonym.py
CHANGED
@@ -94,8 +94,8 @@ def _add_or_remove_synonyms(
|
|
94
94
|
if matches_df.shape[0] > 0:
|
95
95
|
records_df = pd.DataFrame(syns_all.filter(id__in=matches_df["id"]).values())
|
96
96
|
logger.error(
|
97
|
-
f"
|
98
|
-
" with the following records:\n
|
97
|
+
f"input synonyms {matches_df['synonyms'].unique()} already associated"
|
98
|
+
" with the following records:\n"
|
99
99
|
)
|
100
100
|
display(records_df)
|
101
101
|
raise SystemExit(AssertionError)
|
@@ -110,7 +110,7 @@ def _add_or_remove_synonyms(
|
|
110
110
|
return
|
111
111
|
# because we use | as the separator
|
112
112
|
if any(["|" in i for i in syn_new_set]):
|
113
|
-
raise AssertionError("
|
113
|
+
raise AssertionError("a synonym can't contain '|'!")
|
114
114
|
|
115
115
|
# existing synonyms
|
116
116
|
syns_exist = record.synonyms
|
lamindb/_validate.py
CHANGED
@@ -3,6 +3,7 @@ from typing import Dict, List, Optional, Union
|
|
3
3
|
import numpy as np
|
4
4
|
import pandas as pd
|
5
5
|
from django.db.models import QuerySet
|
6
|
+
from lamin_utils._inspect import InspectResult
|
6
7
|
from lamindb_setup.dev._docs import doc_args
|
7
8
|
from lnschema_core import Registry, ValidationAware
|
8
9
|
from lnschema_core.types import ListLike, StrField
|
@@ -20,16 +21,14 @@ def inspect(
|
|
20
21
|
values: ListLike,
|
21
22
|
field: StrField,
|
22
23
|
*,
|
23
|
-
return_df: bool = False,
|
24
24
|
mute: bool = False,
|
25
25
|
**kwargs,
|
26
|
-
) ->
|
26
|
+
) -> InspectResult:
|
27
27
|
"""{}"""
|
28
28
|
return _inspect(
|
29
29
|
cls=cls,
|
30
30
|
values=values,
|
31
31
|
field=field,
|
32
|
-
return_df=return_df,
|
33
32
|
mute=mute,
|
34
33
|
**kwargs,
|
35
34
|
)
|
@@ -47,7 +46,6 @@ def _inspect(
|
|
47
46
|
values: ListLike,
|
48
47
|
field: StrField,
|
49
48
|
*,
|
50
|
-
return_df: bool = False,
|
51
49
|
mute: bool = False,
|
52
50
|
**kwargs,
|
53
51
|
) -> Union["pd.DataFrame", Dict[str, List[str]]]:
|
@@ -66,8 +64,8 @@ def _inspect(
|
|
66
64
|
identifiers=values,
|
67
65
|
field=str(field),
|
68
66
|
inspect_synonyms=True,
|
69
|
-
|
70
|
-
|
67
|
+
mute=mute,
|
68
|
+
**kwargs,
|
71
69
|
)
|
72
70
|
|
73
71
|
|
@@ -87,10 +85,7 @@ def _validate(cls, values: ListLike, field: StrField, **kwargs) -> np.ndarray[bo
|
|
87
85
|
)
|
88
86
|
)
|
89
87
|
return validate(
|
90
|
-
identifiers=values,
|
91
|
-
field_values=field_values,
|
92
|
-
case_sensitive=True,
|
93
|
-
return_df=False,
|
88
|
+
identifiers=values, field_values=field_values, case_sensitive=True, **kwargs
|
94
89
|
)
|
95
90
|
|
96
91
|
|
lamindb/dev/__init__.py
CHANGED
@@ -10,6 +10,7 @@
|
|
10
10
|
FeatureManager
|
11
11
|
ValidationAware
|
12
12
|
SynonymsAware
|
13
|
+
InspectResult
|
13
14
|
datasets
|
14
15
|
hashing
|
15
16
|
storage
|
@@ -18,6 +19,7 @@
|
|
18
19
|
exc.ValidationError
|
19
20
|
"""
|
20
21
|
|
22
|
+
from lamin_utils._inspect import InspectResult
|
21
23
|
from lnschema_core.models import Data, Registry, SynonymsAware, ValidationAware
|
22
24
|
|
23
25
|
from lamindb._query_manager import QueryManager
|
lamindb/dev/_data.py
CHANGED
@@ -43,12 +43,12 @@ def get_labels(
|
|
43
43
|
feature_name = feature
|
44
44
|
feature = Feature.filter(name=feature_name).one_or_none()
|
45
45
|
if feature is None:
|
46
|
-
raise ValueError("
|
46
|
+
raise ValueError("feature doesn't exist")
|
47
47
|
if feature.registries is None:
|
48
|
-
raise ValueError("
|
48
|
+
raise ValueError("feature does not have linked labels")
|
49
49
|
registries_to_check = feature.registries.split("|")
|
50
50
|
if len(registries_to_check) > 1 and not mute:
|
51
|
-
logger.warning("
|
51
|
+
logger.warning("labels come from multiple registries!")
|
52
52
|
qs_by_registry = {}
|
53
53
|
for registry in registries_to_check:
|
54
54
|
# currently need to distinguish between Label and non-Label, because
|
@@ -147,17 +147,18 @@ def add_labels(
|
|
147
147
|
for record in records
|
148
148
|
]
|
149
149
|
)
|
150
|
-
msg += f"
|
151
|
-
if msg != "":
|
152
|
-
msg += ", "
|
150
|
+
msg += f"linked labels {records_display} to feature '{feature.name}'"
|
153
151
|
if feature.registries is None or orm_name not in feature.registries:
|
154
|
-
msg
|
152
|
+
if len(msg) > 0:
|
153
|
+
msg += ", "
|
154
|
+
msg += f"linked feature '{feature.name}' to registry '{orm_name}'"
|
155
155
|
if feature.registries is None:
|
156
156
|
feature.registries = orm_name
|
157
157
|
elif orm_name not in feature.registries:
|
158
158
|
feature.registries += f"|{orm_name}"
|
159
159
|
feature.save()
|
160
|
-
|
160
|
+
if len(msg) > 0:
|
161
|
+
logger.save(msg)
|
161
162
|
# check whether we have to update the feature set that manages labels
|
162
163
|
# (Feature) to account for a new feature
|
163
164
|
found_feature = False
|
@@ -166,18 +167,18 @@ def add_labels(
|
|
166
167
|
found_feature = True
|
167
168
|
if not found_feature:
|
168
169
|
if "external" not in linked_features_by_slot:
|
169
|
-
logger.success("creating feature set for slot 'external'")
|
170
170
|
feature_set = FeatureSet([feature], modality="meta")
|
171
171
|
feature_set.save()
|
172
172
|
self.features.add_feature_set(feature_set, slot="external")
|
173
|
+
logger.save("created feature set for slot 'external'")
|
173
174
|
else:
|
174
175
|
feature_set = self.features._feature_set_by_slot["external"]
|
175
|
-
logger.success(
|
176
|
-
f"linking feature {feature.name} to feature set {feature_set}"
|
177
|
-
)
|
178
176
|
feature_set.features.add(feature)
|
179
177
|
feature_set.n += 1
|
180
178
|
feature_set.save()
|
179
|
+
logger.save(
|
180
|
+
f"linked feature {feature.name} to feature set {feature_set}"
|
181
|
+
)
|
181
182
|
|
182
183
|
|
183
184
|
@property # type: ignore
|
lamindb/dev/_view_parents.py
CHANGED
@@ -1,11 +1,12 @@
|
|
1
1
|
from typing import List, Set, Union
|
2
2
|
|
3
|
-
from lnschema_core import File, Registry, Run
|
3
|
+
from lnschema_core import File, Registry, Run, Transform
|
4
4
|
from lnschema_core.models import format_field_value
|
5
5
|
|
6
6
|
LAMIN_GREEN_LIGHTER = "#10b981"
|
7
7
|
LAMIN_GREEN_DARKER = "#065f46"
|
8
8
|
GREEN_FILL = "honeydew"
|
9
|
+
TRANSFORM_EMOJIS = {"notebook": "📔", "app": "🖥️", "pipeline": "🧩"}
|
9
10
|
|
10
11
|
|
11
12
|
def view_lineage(file: File, with_children: bool = True):
|
@@ -120,11 +121,11 @@ def view_parents(
|
|
120
121
|
)
|
121
122
|
u.node(
|
122
123
|
record_label.replace(":", "_"),
|
123
|
-
label=record_label,
|
124
|
+
label=_add_emoji(record, record_label),
|
124
125
|
fillcolor=LAMIN_GREEN_LIGHTER,
|
125
126
|
)
|
126
127
|
for _, row in df_edges.iterrows():
|
127
|
-
u.node(row["source"], label=row["source_label"])
|
128
|
+
u.node(row["source"], label=_add_emoji(record, row["source_label"]))
|
128
129
|
u.edge(row["source"], row["target"], color="dimgrey")
|
129
130
|
|
130
131
|
return u
|
@@ -182,6 +183,16 @@ def _df_edges_from_parents(
|
|
182
183
|
return df_edges
|
183
184
|
|
184
185
|
|
186
|
+
def _add_emoji(record: Registry, label: str):
|
187
|
+
if record.__class__.__name__ == "Transform":
|
188
|
+
emoji = TRANSFORM_EMOJIS.get(record.type, "💫")
|
189
|
+
elif record.__class__.__name__ == "Run":
|
190
|
+
emoji = TRANSFORM_EMOJIS.get(record.transform.type, "💫")
|
191
|
+
else:
|
192
|
+
emoji = ""
|
193
|
+
return f"{emoji} {label}"
|
194
|
+
|
195
|
+
|
185
196
|
def _get_all_parent_runs(file: File):
|
186
197
|
"""Get all input file runs recursively."""
|
187
198
|
all_runs = {file.run}
|
@@ -224,10 +235,9 @@ def _label_file_run(record: Union[File, Run]):
|
|
224
235
|
rf' FACE="Monospace">id={record.id}<BR/>suffix={record.suffix}</FONT>>'
|
225
236
|
)
|
226
237
|
elif isinstance(record, Run):
|
227
|
-
emojis = {"notebook": "📔", "app": "🖥️"}
|
228
238
|
name = f'{record.transform.name.replace("&", "&")}'
|
229
239
|
return (
|
230
|
-
rf'<{
|
240
|
+
rf'<{TRANSFORM_EMOJIS.get(str(record.transform.type), "💫")} {name}<BR/><FONT COLOR="GREY" POINT-SIZE="10"' # noqa
|
231
241
|
rf' FACE="Monospace">id={record.id}<BR/>type={record.transform.type},'
|
232
242
|
rf" user={record.created_by.name}<BR/>run_at={format_field_value(record.run_at)}</FONT>>" # noqa
|
233
243
|
)
|
@@ -253,3 +263,7 @@ def _df_edges_from_runs(all_runs: List[Run]):
|
|
253
263
|
df["source_label"] = df["source_record"].apply(_label_file_run)
|
254
264
|
df["target_label"] = df["target_record"].apply(_label_file_run)
|
255
265
|
return df
|
266
|
+
|
267
|
+
|
268
|
+
def _transform_emoji(transform: Transform):
|
269
|
+
return TRANSFORM_EMOJIS.get(transform.type, "💫")
|
lamindb/dev/datasets/__init__.py
CHANGED
@@ -19,6 +19,7 @@
|
|
19
19
|
anndata_mouse_sc_lymph_node
|
20
20
|
anndata_human_immune_cells
|
21
21
|
anndata_pbmc68k_reduced
|
22
|
+
anndata_file_pbmc68k_test
|
22
23
|
anndata_pbmc3k_processed
|
23
24
|
anndata_with_obs
|
24
25
|
anndata_suo22_Visium10X
|
@@ -29,6 +30,7 @@
|
|
29
30
|
"""
|
30
31
|
|
31
32
|
from ._core import (
|
33
|
+
anndata_file_pbmc68k_test,
|
32
34
|
anndata_human_immune_cells,
|
33
35
|
anndata_mouse_sc_lymph_node,
|
34
36
|
anndata_pbmc3k_processed,
|
lamindb/dev/datasets/_core.py
CHANGED
@@ -132,6 +132,28 @@ def anndata_pbmc68k_reduced() -> ad.AnnData:
|
|
132
132
|
return ad.read(filepath)
|
133
133
|
|
134
134
|
|
135
|
+
def anndata_file_pbmc68k_test() -> Path:
|
136
|
+
"""Modified from scanpy.datasets.pbmc68k_reduced().
|
137
|
+
|
138
|
+
Additional slots were added for testing purposes. Returns the filepath.
|
139
|
+
|
140
|
+
To reproduce::
|
141
|
+
|
142
|
+
pbmc68k = ln.dev.datasets.anndata_pbmc68k_reduced()
|
143
|
+
pbmc68k_test = pbmc68k[:30, :200].copy()
|
144
|
+
pbmc68k_test.raw = pbmc68k_test[:, :100]
|
145
|
+
pbmc68k_test.obsp["test"] = sparse.eye(pbmc68k_test.shape[0], format="csr")
|
146
|
+
pbmc68k_test.varp["test"] = sparse.eye(pbmc68k_test.shape[1], format="csr")
|
147
|
+
pbmc68k_test.layers["test"] = sparse.csr_matrix(pbmc68k_test.shape)
|
148
|
+
pbmc68k_test.layers["test"][0] = 1.
|
149
|
+
pbmc68k_test.write("pbmc68k_test.h5ad")
|
150
|
+
"""
|
151
|
+
filepath, _ = urlretrieve(
|
152
|
+
"https://lamindb-test.s3.amazonaws.com/pbmc68k_test.h5ad", "pbmc68k_test.h5ad"
|
153
|
+
)
|
154
|
+
return Path(filepath)
|
155
|
+
|
156
|
+
|
135
157
|
def anndata_pbmc3k_processed() -> ad.AnnData:
|
136
158
|
"""Modified from scanpy.pbmc3k_processed()."""
|
137
159
|
filepath, _ = urlretrieve(
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: lamindb
|
3
|
-
Version: 0.50.
|
3
|
+
Version: 0.50.1
|
4
4
|
Summary: Open-source data platform for biology.
|
5
5
|
Author-email: Lamin Labs <laminlabs@gmail.com>
|
6
6
|
Requires-Python: >=3.8
|
@@ -8,9 +8,9 @@ Description-Content-Type: text/markdown
|
|
8
8
|
Classifier: Programming Language :: Python :: 3.8
|
9
9
|
Classifier: Programming Language :: Python :: 3.9
|
10
10
|
Classifier: Programming Language :: Python :: 3.10
|
11
|
-
Requires-Dist: lnschema_core==0.44.
|
12
|
-
Requires-Dist: lamindb_setup==0.49.
|
13
|
-
Requires-Dist: lamin_utils==0.9.
|
11
|
+
Requires-Dist: lnschema_core==0.44.3
|
12
|
+
Requires-Dist: lamindb_setup==0.49.6
|
13
|
+
Requires-Dist: lamin_utils==0.9.6
|
14
14
|
Requires-Dist: erdiagram>=0.1.2
|
15
15
|
Requires-Dist: rapidfuzz
|
16
16
|
Requires-Dist: pydantic[dotenv]
|
@@ -24,7 +24,7 @@ Requires-Dist: botocore==1.29.76 ; extra == "aws"
|
|
24
24
|
Requires-Dist: urllib3<2 ; extra == "aws"
|
25
25
|
Requires-Dist: boto3==1.26.76 ; extra == "aws"
|
26
26
|
Requires-Dist: fsspec[s3]==2023.5.0 ; extra == "aws"
|
27
|
-
Requires-Dist: lnschema_bionty==0.29.
|
27
|
+
Requires-Dist: lnschema_bionty==0.29.2 ; extra == "bionty"
|
28
28
|
Requires-Dist: pre-commit ; extra == "dev"
|
29
29
|
Requires-Dist: nox ; extra == "dev"
|
30
30
|
Requires-Dist: laminci>=0.3 ; extra == "dev"
|
@@ -1,34 +1,34 @@
|
|
1
|
-
lamindb/__init__.py,sha256=
|
2
|
-
lamindb/_context.py,sha256=
|
1
|
+
lamindb/__init__.py,sha256=oE18o68Zgr88aED7EbueNHqZ2JQApmGBUwiEcfqeXl0,3807
|
2
|
+
lamindb/_context.py,sha256=bc-OpnQ5g_7ltx_qdj0Z1YYuNgeqm87T2jInjZX3iOY,17837
|
3
3
|
lamindb/_dataset.py,sha256=w5Byx-9IwVsi2rZx1lKy-HuxTHq3f_Q-MF7t3zb0Msw,4884
|
4
|
-
lamindb/_delete.py,sha256=
|
4
|
+
lamindb/_delete.py,sha256=jvzDM_hbraBvOMhulibRziyDhoM1mqIX_MSLeC42c1M,1917
|
5
5
|
lamindb/_feature.py,sha256=B3U9q-v_Bu9o3xrl2ScYefMEkNnDrlWpLzL4sdN4izQ,5479
|
6
|
-
lamindb/_feature_set.py,sha256=
|
7
|
-
lamindb/_file.py,sha256=
|
6
|
+
lamindb/_feature_set.py,sha256=XBIGI2vuqECG9VegEzaiQm7iBNfqvpCq_WwJnL7h4ig,8492
|
7
|
+
lamindb/_file.py,sha256=0W8mSiYhjl7Epvb87W3SJVFAgpEBlxZnS5r7zNgMyhY,35984
|
8
8
|
lamindb/_filter.py,sha256=Rf5RSkglIhJhSdk3AIPfc83F5NIfZrdCZDpgcYNj5KY,351
|
9
9
|
lamindb/_from_values.py,sha256=mezvC4d_b8ndRF8NNPl1vEj3O70qLv92b9ZjII5Qa3E,10532
|
10
10
|
lamindb/_label.py,sha256=-gLa8WvD1mycruDEB67Zx_oewhO85KvOwVZ3SsKvSvs,1544
|
11
11
|
lamindb/_logger.py,sha256=Q9ugUnZE4EwH0N6qFMG2HlILmu-SArnEEy-nafTPjRg,47
|
12
12
|
lamindb/_query_manager.py,sha256=RSdPsANk4WBlKbu1DIoDto8HdSqCKbo8Q4CGyRqLd2U,1983
|
13
13
|
lamindb/_query_set.py,sha256=yyzrU0ACqG0iebME5bah6ycj4HkvZ8_JigtyATHW3PA,9953
|
14
|
-
lamindb/_registry.py,sha256=
|
15
|
-
lamindb/_save.py,sha256=
|
14
|
+
lamindb/_registry.py,sha256=Jd0qb9sK1CO8ONMSN8MnF9UYofd-vvM1XszJQvNETNA,16486
|
15
|
+
lamindb/_save.py,sha256=jC2a9JzQBZYVHHuq6NyIovFQI1Y6WT1q2C1dfiXKv_E,9216
|
16
16
|
lamindb/_storage.py,sha256=a0ofo106NHNY2RBF3MB1PeeONZMc28aHFG2fbndn_80,246
|
17
|
-
lamindb/_synonym.py,sha256=
|
17
|
+
lamindb/_synonym.py,sha256=I7qL57F5a_Z3a_5hVIQKz5w55HGKEpgpBY9s-35Bg80,5870
|
18
18
|
lamindb/_transform.py,sha256=K-Y37UqQPu06lsA3Jfnkyt5dssRpuMnpCDDHDbT40Z4,1251
|
19
|
-
lamindb/_validate.py,sha256=
|
19
|
+
lamindb/_validate.py,sha256=W374jmKFvAy1wIfibz-C4sftxxtlWVwBHliIBy_YD1w,3606
|
20
20
|
lamindb/_view.py,sha256=dxZ7BPrMAf0rJ8aKSprkQiO1O2OJsjtSaHBJfpKvVT4,2187
|
21
21
|
lamindb/types.py,sha256=svg5S_aynuGfbEOsbmqkR_gF9d9YMzfOkcvGN37Rzvg,232
|
22
|
-
lamindb/dev/__init__.py,sha256=
|
23
|
-
lamindb/dev/_data.py,sha256
|
22
|
+
lamindb/dev/__init__.py,sha256=K8cgi474J2uIxbnQZeeeMrsDcWIPdsRXwjIrWcJ_qaQ,656
|
23
|
+
lamindb/dev/_data.py,sha256=M5AGGCAvw7e_cDooAWA8_hUQFjxAC62QPs-vEQFqpD4,7806
|
24
24
|
lamindb/dev/_feature_manager.py,sha256=laJRqKiL8Qlm5Jq60RoXXX6xYwgyL2MPd6OgS8eXs6A,3526
|
25
25
|
lamindb/dev/_settings.py,sha256=WLweWZNAvBquybQYUlYyIehIlv3t5llgYhyNpL0obdg,2926
|
26
|
-
lamindb/dev/_view_parents.py,sha256=
|
26
|
+
lamindb/dev/_view_parents.py,sha256=pF0qTZirLwzgWFtqzRe-KksqASHjpFWLRkNK-0GsyGU,8590
|
27
27
|
lamindb/dev/exc.py,sha256=xJ0QCeineZccjYKq-W-RXhU6fJH6JyFLtDyHKZTOoyU,96
|
28
28
|
lamindb/dev/hashing.py,sha256=oYHDSEjINsmDYgCLasnhh_VHbB1dLag27ufVUwDgLyQ,1385
|
29
29
|
lamindb/dev/utils.py,sha256=LGdiW4k3GClLz65vKAVRkL6Tw-Gkx9DWAdez1jyA5bE,428
|
30
|
-
lamindb/dev/datasets/__init__.py,sha256=
|
31
|
-
lamindb/dev/datasets/_core.py,sha256=
|
30
|
+
lamindb/dev/datasets/__init__.py,sha256=0ObE_PmGsxOaCvMYZYkLQLo7zYD6Hr17Ixh53A-jqNs,1310
|
31
|
+
lamindb/dev/datasets/_core.py,sha256=6GsgLPavWQPI0K7EIKJ5xq9JXKIxiF1ZNf_1GOABvD0,12426
|
32
32
|
lamindb/dev/datasets/_fake.py,sha256=S8mNho-oSh1M9x9oOSsUBLLHmBAegsOLlFk6LnF81EA,942
|
33
33
|
lamindb/dev/storage/__init__.py,sha256=S9BtWiqOj54Fiv213ThKqiyTVgEJpo91wVOkpzE1kEk,368
|
34
34
|
lamindb/dev/storage/_anndata_sizes.py,sha256=0XVzA6AQeVGPaGPrhGusKyxFgFjeo3qSN29hxb8D5E8,993
|
@@ -40,8 +40,8 @@ lamindb/schema/__init__.py,sha256=PznznlFvbeNSZKpn1RS6Gv0JMXFkLmU2_ej_1hVLSTs,79
|
|
40
40
|
lamindb/schema/_core.py,sha256=nWR3X_rNd1AbWw3naMiBi8ppAEpqIDyEYqM54feRB_s,766
|
41
41
|
lamindb/setup/__init__.py,sha256=8-0F2C4Glx23-b8-D_1CBGgRBM5PppVhazhoXZYOLsg,275
|
42
42
|
lamindb/setup/dev/__init__.py,sha256=iD0f2lx_Hgp-udkiPGal7si5waJSOgvnG6Id-g1mMOY,213
|
43
|
-
lamindb-0.50.
|
44
|
-
lamindb-0.50.
|
45
|
-
lamindb-0.50.
|
46
|
-
lamindb-0.50.
|
47
|
-
lamindb-0.50.
|
43
|
+
lamindb-0.50.1.dist-info/entry_points.txt,sha256=MioM8vSpKwXxY3geNBwjo1wnwy1l15WjJYlI3lpKuZI,53
|
44
|
+
lamindb-0.50.1.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
45
|
+
lamindb-0.50.1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
|
46
|
+
lamindb-0.50.1.dist-info/METADATA,sha256=It6LNQR9QYgxitisJK1KcWhRfDhOq5CkcLpqpDhjLuY,12570
|
47
|
+
lamindb-0.50.1.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|