lamindb 0.71.2__py3-none-any.whl → 0.71.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lamindb/__init__.py +1 -1
- lamindb/_artifact.py +19 -9
- lamindb/_can_validate.py +9 -3
- lamindb/_collection.py +3 -3
- lamindb/_from_values.py +30 -8
- lamindb/_registry.py +52 -94
- lamindb/_save.py +28 -11
- lamindb/core/_data.py +26 -3
- lamindb/core/_feature_manager.py +74 -51
- lamindb/core/_label_manager.py +41 -29
- {lamindb-0.71.2.dist-info → lamindb-0.71.3.dist-info}/METADATA +7 -6
- {lamindb-0.71.2.dist-info → lamindb-0.71.3.dist-info}/RECORD +14 -14
- {lamindb-0.71.2.dist-info → lamindb-0.71.3.dist-info}/LICENSE +0 -0
- {lamindb-0.71.2.dist-info → lamindb-0.71.3.dist-info}/WHEEL +0 -0
lamindb/__init__.py
CHANGED
lamindb/_artifact.py
CHANGED
@@ -1,12 +1,14 @@
|
|
1
1
|
from __future__ import annotations
|
2
2
|
|
3
3
|
import shutil
|
4
|
+
from concurrent.futures import ThreadPoolExecutor
|
4
5
|
from pathlib import Path, PurePath, PurePosixPath
|
5
6
|
from typing import TYPE_CHECKING, Any, Mapping
|
6
7
|
|
7
8
|
import fsspec
|
8
9
|
import lamindb_setup as ln_setup
|
9
10
|
import pandas as pd
|
11
|
+
import psutil
|
10
12
|
from anndata import AnnData
|
11
13
|
from lamin_utils import colors, logger
|
12
14
|
from lamindb_setup import settings as setup_settings
|
@@ -204,15 +206,23 @@ def get_stat_or_artifact(
|
|
204
206
|
return size, hash, hash_type, n_objects
|
205
207
|
else:
|
206
208
|
if path.is_dir():
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
|
212
|
-
|
213
|
-
|
214
|
-
|
215
|
-
|
209
|
+
files = (subpath for subpath in path.rglob("*") if subpath.is_file())
|
210
|
+
|
211
|
+
def hash_size(file):
|
212
|
+
file_size = file.stat().st_size
|
213
|
+
return hash_file(file, file_size)[0], file_size
|
214
|
+
|
215
|
+
n_workers = len(psutil.Process().cpu_affinity())
|
216
|
+
if n_workers > 1:
|
217
|
+
with ThreadPoolExecutor(n_workers) as pool:
|
218
|
+
hashes_sizes = pool.map(hash_size, files)
|
219
|
+
else:
|
220
|
+
hashes_sizes = map(hash_size, files)
|
221
|
+
hashes, sizes = zip(*hashes_sizes)
|
222
|
+
|
223
|
+
hash, hash_type = hash_md5s_from_dir(hashes)
|
224
|
+
n_objects = len(hashes)
|
225
|
+
size = sum(sizes)
|
216
226
|
else:
|
217
227
|
hash, hash_type = hash_file(path)
|
218
228
|
size = stat.st_size
|
lamindb/_can_validate.py
CHANGED
@@ -80,7 +80,9 @@ def _inspect(
|
|
80
80
|
|
81
81
|
# inspect in the DB
|
82
82
|
result_db = inspect(
|
83
|
-
df=_filter_query_based_on_organism(
|
83
|
+
df=_filter_query_based_on_organism(
|
84
|
+
queryset=queryset, field=field, organism=organism
|
85
|
+
),
|
84
86
|
identifiers=values,
|
85
87
|
field=field,
|
86
88
|
mute=mute,
|
@@ -161,6 +163,7 @@ def _validate(
|
|
161
163
|
field_values = pd.Series(
|
162
164
|
_filter_query_based_on_organism(
|
163
165
|
queryset=queryset,
|
166
|
+
field=field,
|
164
167
|
organism=organism,
|
165
168
|
values_list_field=field,
|
166
169
|
),
|
@@ -284,7 +287,9 @@ def _standardize(
|
|
284
287
|
|
285
288
|
try:
|
286
289
|
orm._meta.get_field(synonyms_field)
|
287
|
-
df = _filter_query_based_on_organism(
|
290
|
+
df = _filter_query_based_on_organism(
|
291
|
+
queryset=queryset, field=field, organism=organism
|
292
|
+
)
|
288
293
|
except FieldDoesNotExist:
|
289
294
|
df = pd.DataFrame()
|
290
295
|
|
@@ -439,6 +444,7 @@ def _check_synonyms_field_exist(record: Registry):
|
|
439
444
|
|
440
445
|
def _filter_query_based_on_organism(
|
441
446
|
queryset: QuerySet,
|
447
|
+
field: str,
|
442
448
|
organism: str | Registry | None = None,
|
443
449
|
values_list_field: str | None = None,
|
444
450
|
):
|
@@ -447,7 +453,7 @@ def _filter_query_based_on_organism(
|
|
447
453
|
|
448
454
|
orm = queryset.model
|
449
455
|
|
450
|
-
if _has_organism_field(orm):
|
456
|
+
if _has_organism_field(orm) and not field.endswith("id"):
|
451
457
|
# here, we can safely import lnschema_bionty
|
452
458
|
from lnschema_bionty._bionty import create_or_get_organism_record
|
453
459
|
|
lamindb/_collection.py
CHANGED
@@ -103,9 +103,9 @@ def __init__(
|
|
103
103
|
if meta._state.adding:
|
104
104
|
raise ValueError("Save meta artifact before creating collection!")
|
105
105
|
if not feature_sets:
|
106
|
-
feature_sets = meta.features.
|
106
|
+
feature_sets = meta.features.feature_set_by_slot
|
107
107
|
else:
|
108
|
-
if len(meta.features.
|
108
|
+
if len(meta.features.feature_set_by_slot) > 0:
|
109
109
|
logger.info("overwriting feature sets linked to artifact")
|
110
110
|
# we ignore collections in trash containing the same hash
|
111
111
|
if hash is not None:
|
@@ -129,7 +129,7 @@ def __init__(
|
|
129
129
|
existing_collection.transform = run.transform
|
130
130
|
init_self_from_db(collection, existing_collection)
|
131
131
|
update_attributes(collection, {"description": description, "name": name})
|
132
|
-
for slot, feature_set in collection.features.
|
132
|
+
for slot, feature_set in collection.features.feature_set_by_slot.items():
|
133
133
|
if slot in feature_sets:
|
134
134
|
if not feature_sets[slot] == feature_set:
|
135
135
|
collection.feature_sets.remove(feature_set)
|
lamindb/_from_values.py
CHANGED
@@ -102,6 +102,9 @@ def get_existing_records(
|
|
102
102
|
|
103
103
|
# standardize based on the DB reference
|
104
104
|
# log synonyms mapped terms
|
105
|
+
print("field", field)
|
106
|
+
print("organism", kwargs.get("organism"))
|
107
|
+
print("public_source", kwargs.get("public_source"))
|
105
108
|
result = model.inspect(
|
106
109
|
iterable_idx,
|
107
110
|
field=field,
|
@@ -185,8 +188,15 @@ def create_records_from_public(
|
|
185
188
|
|
186
189
|
# create the corresponding bionty object from model
|
187
190
|
try:
|
191
|
+
# TODO: more generic
|
192
|
+
organism = kwargs.get("organism")
|
193
|
+
if field.field.name == "ensembl_gene_id":
|
194
|
+
if iterable_idx[0].startswith("ENSG"):
|
195
|
+
organism = "human"
|
196
|
+
elif iterable_idx[0].startswith("ENSMUSG"):
|
197
|
+
organism = "mouse"
|
188
198
|
public_ontology = model.public(
|
189
|
-
organism=
|
199
|
+
organism=organism, public_source=kwargs.get("public_source")
|
190
200
|
)
|
191
201
|
except Exception:
|
192
202
|
# for custom records that are not created from public sources
|
@@ -223,8 +233,15 @@ def create_records_from_public(
|
|
223
233
|
bionty_kwargs, multi_msg = _bulk_create_dicts_from_df(
|
224
234
|
keys=mapped_values, column_name=field.field.name, df=bionty_df
|
225
235
|
)
|
236
|
+
organism_kwargs = {}
|
237
|
+
if "organism" not in kwargs:
|
238
|
+
organism_record = _get_organism_record(
|
239
|
+
field, public_ontology.organism, force=True
|
240
|
+
)
|
241
|
+
if organism_record is not None:
|
242
|
+
organism_kwargs["organism"] = organism_record
|
226
243
|
for bk in bionty_kwargs:
|
227
|
-
records.append(model(**bk, **kwargs))
|
244
|
+
records.append(model(**bk, **kwargs, **organism_kwargs))
|
228
245
|
|
229
246
|
# number of records that matches field (not synonyms)
|
230
247
|
validated = result.validated
|
@@ -260,10 +277,11 @@ def index_iterable(iterable: Iterable) -> pd.Index:
|
|
260
277
|
return idx[(idx != "") & (~idx.isnull())]
|
261
278
|
|
262
279
|
|
263
|
-
def _print_values(names:
|
264
|
-
names =
|
265
|
-
|
266
|
-
|
280
|
+
def _print_values(names: Iterable, n: int = 20) -> str:
|
281
|
+
names = (name for name in names if name != "None")
|
282
|
+
unique_names = list(dict.fromkeys(names))[:n]
|
283
|
+
print_values = ", ".join(f"'{name}'" for name in unique_names)
|
284
|
+
if len(unique_names) > n:
|
267
285
|
print_values += ", ..."
|
268
286
|
return print_values
|
269
287
|
|
@@ -334,9 +352,13 @@ def _has_organism_field(orm: Registry) -> bool:
|
|
334
352
|
return False
|
335
353
|
|
336
354
|
|
337
|
-
def _get_organism_record(
|
355
|
+
def _get_organism_record(
|
356
|
+
field: StrField, organism: str | Registry, force: bool = False
|
357
|
+
) -> Registry:
|
338
358
|
model = field.field.model
|
339
|
-
if
|
359
|
+
check = True if force else field.field.name != "ensembl_gene_id"
|
360
|
+
|
361
|
+
if _has_organism_field(model) and check:
|
340
362
|
from lnschema_bionty._bionty import create_or_get_organism_record
|
341
363
|
|
342
364
|
organism_record = create_or_get_organism_record(organism=organism, orm=model)
|
lamindb/_registry.py
CHANGED
@@ -2,14 +2,12 @@ from __future__ import annotations
|
|
2
2
|
|
3
3
|
import builtins
|
4
4
|
from typing import TYPE_CHECKING, Iterable, List, NamedTuple
|
5
|
-
from uuid import UUID
|
6
5
|
|
7
6
|
import dj_database_url
|
8
7
|
import lamindb_setup as ln_setup
|
9
|
-
import pandas as pd
|
10
8
|
from django.core.exceptions import FieldDoesNotExist
|
11
9
|
from django.db import connections
|
12
|
-
from django.db.models import Manager, QuerySet
|
10
|
+
from django.db.models import Manager, Q, QuerySet
|
13
11
|
from lamin_utils import logger
|
14
12
|
from lamin_utils._lookup import Lookup
|
15
13
|
from lamin_utils._search import search as base_search
|
@@ -26,6 +24,7 @@ from lamindb.core._settings import settings
|
|
26
24
|
from ._from_values import get_or_create_records
|
27
25
|
|
28
26
|
if TYPE_CHECKING:
|
27
|
+
import pandas as pd
|
29
28
|
from lnschema_core.types import ListLike, StrField
|
30
29
|
|
31
30
|
IPYTHON = getattr(builtins, "__IPYTHON__", False)
|
@@ -61,20 +60,15 @@ def suggest_objects_with_same_name(orm: Registry, kwargs) -> str | None:
|
|
61
60
|
if kwargs.get("name") is None:
|
62
61
|
return None
|
63
62
|
else:
|
64
|
-
|
65
|
-
if
|
63
|
+
queryset = orm.search(kwargs["name"])
|
64
|
+
if not queryset.exists(): # empty queryset
|
66
65
|
return None
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
# test for exact match
|
72
|
-
if len(results) > 0:
|
73
|
-
if results.index[0] == kwargs["name"]:
|
74
|
-
return "object-with-same-name-exists"
|
66
|
+
else:
|
67
|
+
for record in queryset:
|
68
|
+
if record.name == kwargs["name"]:
|
69
|
+
return "object-with-same-name-exists"
|
75
70
|
else:
|
76
|
-
s = "" if
|
77
|
-
it = "it" if results.shape[0] == 1 else "one of them"
|
71
|
+
s, it = ("", "it") if len(queryset) == 1 else ("s", "one of them")
|
78
72
|
msg = (
|
79
73
|
f"record{s} with similar name{s} exist! did you mean to load {it}?"
|
80
74
|
)
|
@@ -83,9 +77,9 @@ def suggest_objects_with_same_name(orm: Registry, kwargs) -> str | None:
|
|
83
77
|
|
84
78
|
logger.warning(f"{msg}")
|
85
79
|
if settings._verbosity_int >= 1:
|
86
|
-
display(
|
80
|
+
display(queryset.df())
|
87
81
|
else:
|
88
|
-
logger.warning(f"{msg}\n{
|
82
|
+
logger.warning(f"{msg}\n{queryset}")
|
89
83
|
return None
|
90
84
|
|
91
85
|
|
@@ -162,80 +156,42 @@ def _search(
|
|
162
156
|
string: str,
|
163
157
|
*,
|
164
158
|
field: StrField | list[StrField] | None = None,
|
165
|
-
limit: int | None =
|
166
|
-
return_queryset: bool = False,
|
159
|
+
limit: int | None = 20,
|
167
160
|
case_sensitive: bool = False,
|
168
|
-
synonyms_field: StrField | None = "synonyms",
|
169
161
|
using_key: str | None = None,
|
170
|
-
) ->
|
171
|
-
|
172
|
-
orm =
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
field = get_default_str_field(orm=orm, field=field)
|
180
|
-
|
181
|
-
try:
|
182
|
-
orm._meta.get_field(synonyms_field)
|
183
|
-
synonyms_field_exists = True
|
184
|
-
except FieldDoesNotExist:
|
185
|
-
synonyms_field_exists = False
|
186
|
-
|
187
|
-
if synonyms_field is not None and synonyms_field_exists:
|
188
|
-
df = pd.DataFrame(queryset.values("uid", field, synonyms_field))
|
189
|
-
else:
|
190
|
-
df = pd.DataFrame(queryset.values("uid", field))
|
191
|
-
|
192
|
-
return base_search(
|
193
|
-
df=df,
|
194
|
-
string=string,
|
195
|
-
field=field,
|
196
|
-
limit=limit,
|
197
|
-
synonyms_field=str(synonyms_field),
|
198
|
-
case_sensitive=case_sensitive,
|
199
|
-
)
|
200
|
-
|
201
|
-
# search in both key and description fields for Artifact
|
202
|
-
if orm._meta.model.__name__ == "Artifact" and field is None:
|
203
|
-
field = ["key", "description"]
|
204
|
-
|
205
|
-
if not isinstance(field, List):
|
206
|
-
field = [field]
|
207
|
-
|
208
|
-
results = []
|
209
|
-
for fd in field:
|
210
|
-
result_field = _search_single_field(
|
211
|
-
string=string, field=fd, synonyms_field=synonyms_field
|
212
|
-
)
|
213
|
-
results.append(result_field)
|
214
|
-
# turn off synonyms search after the 1st field
|
215
|
-
synonyms_field = None
|
216
|
-
|
217
|
-
if len(results) > 1:
|
218
|
-
result = (
|
219
|
-
pd.concat([r.reset_index() for r in results], join="outer")
|
220
|
-
.drop(columns=["index"], errors="ignore")
|
221
|
-
.set_index("uid")
|
222
|
-
)
|
223
|
-
else:
|
224
|
-
result = results[0]
|
225
|
-
|
226
|
-
# remove results that have __ratio__ 0
|
227
|
-
if "__ratio__" in result.columns:
|
228
|
-
result = result[result["__ratio__"] > 0].sort_values(
|
229
|
-
"__ratio__", ascending=False
|
230
|
-
)
|
231
|
-
# restrict to 1 decimal
|
232
|
-
# move the score to be the last column
|
233
|
-
result["score"] = result.pop("__ratio__").round(1)
|
234
|
-
|
235
|
-
if return_queryset:
|
236
|
-
return _order_queryset_by_ids(queryset, result.reset_index()["uid"])
|
162
|
+
) -> QuerySet:
|
163
|
+
input_queryset = _queryset(cls, using_key=using_key)
|
164
|
+
orm = input_queryset.model
|
165
|
+
if field is None:
|
166
|
+
fields = [
|
167
|
+
field.name
|
168
|
+
for field in orm._meta.fields
|
169
|
+
if field.get_internal_type() in {"CharField", "TextField"}
|
170
|
+
]
|
237
171
|
else:
|
238
|
-
|
172
|
+
if not isinstance(field, list):
|
173
|
+
fields_input = [field]
|
174
|
+
else:
|
175
|
+
fields_input = field
|
176
|
+
fields = []
|
177
|
+
for field in fields_input:
|
178
|
+
if not isinstance(field, str):
|
179
|
+
try:
|
180
|
+
fields.append(field.field.name)
|
181
|
+
except AttributeError as error:
|
182
|
+
raise TypeError(
|
183
|
+
"Please pass a Registry string field, e.g., `CellType.name`!"
|
184
|
+
) from error
|
185
|
+
else:
|
186
|
+
fields.append(field)
|
187
|
+
expression = Q()
|
188
|
+
case_sensitive_i = "" if case_sensitive else "i"
|
189
|
+
for field in fields:
|
190
|
+
# Construct the keyword for the Q object dynamically
|
191
|
+
query = {f"{field}__{case_sensitive_i}contains": string}
|
192
|
+
expression |= Q(**query) # Unpack the dictionary into Q()
|
193
|
+
output_queryset = input_queryset.filter(expression)[:limit]
|
194
|
+
return output_queryset
|
239
195
|
|
240
196
|
|
241
197
|
@classmethod # type: ignore
|
@@ -246,19 +202,15 @@ def search(
|
|
246
202
|
*,
|
247
203
|
field: StrField | None = None,
|
248
204
|
limit: int | None = 20,
|
249
|
-
return_queryset: bool = False,
|
250
205
|
case_sensitive: bool = False,
|
251
|
-
|
252
|
-
) -> pd.DataFrame | QuerySet:
|
206
|
+
) -> QuerySet:
|
253
207
|
"""{}."""
|
254
208
|
return _search(
|
255
209
|
cls=cls,
|
256
210
|
string=string,
|
257
211
|
field=field,
|
258
|
-
return_queryset=return_queryset,
|
259
212
|
limit=limit,
|
260
213
|
case_sensitive=case_sensitive,
|
261
|
-
synonyms_field=synonyms_field,
|
262
214
|
)
|
263
215
|
|
264
216
|
|
@@ -535,7 +487,13 @@ def save(self, *args, **kwargs) -> Registry:
|
|
535
487
|
self_on_db._state.db = db
|
536
488
|
self_on_db.pk = pk_on_db
|
537
489
|
# by default, transfer parents of the labels to maintain ontological hierarchy
|
538
|
-
|
490
|
+
try:
|
491
|
+
import bionty as bt
|
492
|
+
|
493
|
+
parents = kwargs.get("parents", bt.settings.auto_save_parents)
|
494
|
+
except ImportError:
|
495
|
+
parents = kwargs.get("parents", True)
|
496
|
+
add_from_kwargs = {"parents": parents}
|
539
497
|
logger.info("transfer features")
|
540
498
|
self.features._add_from(self_on_db, **add_from_kwargs)
|
541
499
|
logger.info("transfer labels")
|
lamindb/_save.py
CHANGED
@@ -9,10 +9,10 @@ from functools import partial
|
|
9
9
|
from typing import TYPE_CHECKING, Iterable, overload
|
10
10
|
|
11
11
|
import lamindb_setup
|
12
|
-
from django.db import transaction
|
12
|
+
from django.db import IntegrityError, transaction
|
13
13
|
from django.utils.functional import partition
|
14
14
|
from lamin_utils import logger
|
15
|
-
from lamindb_setup.core.upath import
|
15
|
+
from lamindb_setup.core.upath import LocalPathClasses
|
16
16
|
from lnschema_core.models import Artifact, Registry
|
17
17
|
|
18
18
|
from lamindb.core._settings import settings
|
@@ -78,14 +78,15 @@ def save(
|
|
78
78
|
# for artifacts, we want to bulk-upload rather than upload one-by-one
|
79
79
|
non_artifacts, artifacts = partition(lambda r: isinstance(r, Artifact), records)
|
80
80
|
if non_artifacts:
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
bulk_create(
|
81
|
+
non_artifacts_old, non_artifacts_new = partition(
|
82
|
+
lambda r: r._state.adding or r.pk is None, non_artifacts
|
83
|
+
)
|
84
|
+
bulk_create(non_artifacts_new, ignore_conflicts=ignore_conflicts)
|
85
|
+
if non_artifacts_old:
|
86
|
+
bulk_update(non_artifacts_old)
|
85
87
|
non_artifacts_with_parents = [
|
86
|
-
r for r in
|
88
|
+
r for r in non_artifacts_new if hasattr(r, "_parents")
|
87
89
|
]
|
88
|
-
|
89
90
|
if len(non_artifacts_with_parents) > 0 and kwargs.get("parents") is not False:
|
90
91
|
# this can only happen within lnschema_bionty right now!!
|
91
92
|
# we might extend to core lamindb later
|
@@ -129,6 +130,19 @@ def bulk_create(records: Iterable[Registry], ignore_conflicts: bool | None = Fal
|
|
129
130
|
orm.objects.bulk_create(records, ignore_conflicts=ignore_conflicts)
|
130
131
|
|
131
132
|
|
133
|
+
def bulk_update(records: Iterable[Registry], ignore_conflicts: bool | None = False):
|
134
|
+
records_by_orm = defaultdict(list)
|
135
|
+
for record in records:
|
136
|
+
records_by_orm[record.__class__].append(record)
|
137
|
+
for orm, records in records_by_orm.items():
|
138
|
+
field_names = [
|
139
|
+
field.name
|
140
|
+
for field in orm._meta.fields
|
141
|
+
if (field.name != "created_at" and field.name != "id")
|
142
|
+
]
|
143
|
+
orm.objects.bulk_update(records, field_names)
|
144
|
+
|
145
|
+
|
132
146
|
# This is also used within Artifact.save()
|
133
147
|
def check_and_attempt_upload(
|
134
148
|
artifact: Artifact,
|
@@ -166,9 +180,12 @@ def copy_or_move_to_cache(artifact: Artifact, storage_path: UPath):
|
|
166
180
|
is_dir = local_path.is_dir()
|
167
181
|
cache_dir = settings._storage_settings.cache_dir
|
168
182
|
|
169
|
-
# just delete from the cache dir if
|
170
|
-
if
|
171
|
-
if
|
183
|
+
# just delete from the cache dir if storage_path is local
|
184
|
+
if isinstance(storage_path, LocalPathClasses):
|
185
|
+
if (
|
186
|
+
local_path.as_posix() != storage_path.as_posix()
|
187
|
+
and cache_dir in local_path.parents
|
188
|
+
):
|
172
189
|
if is_dir:
|
173
190
|
shutil.rmtree(local_path)
|
174
191
|
else:
|
lamindb/core/_data.py
CHANGED
@@ -114,6 +114,16 @@ def format_repr(value: Registry, exclude: list[str] | str | None = None) -> str:
|
|
114
114
|
@doc_args(Data.describe.__doc__)
|
115
115
|
def describe(self: Data):
|
116
116
|
"""{}."""
|
117
|
+
# prefetch all many-to-many relationships
|
118
|
+
# doesn't work for describing using artifact
|
119
|
+
# self = (
|
120
|
+
# self.__class__.objects.using(self._state.db)
|
121
|
+
# .prefetch_related(
|
122
|
+
# *[f.name for f in self.__class__._meta.get_fields() if f.many_to_many]
|
123
|
+
# )
|
124
|
+
# .get(id=self.id)
|
125
|
+
# )
|
126
|
+
|
117
127
|
model_name = self.__class__.__name__
|
118
128
|
msg = ""
|
119
129
|
|
@@ -125,6 +135,19 @@ def describe(self: Data):
|
|
125
135
|
foreign_key_fields.append(f.name)
|
126
136
|
else:
|
127
137
|
direct_fields.append(f.name)
|
138
|
+
if not self._state.adding:
|
139
|
+
# prefetch foreign key relationships
|
140
|
+
self = (
|
141
|
+
self.__class__.objects.using(self._state.db)
|
142
|
+
.select_related(*foreign_key_fields)
|
143
|
+
.get(id=self.id)
|
144
|
+
)
|
145
|
+
# prefetch m-2-m relationships
|
146
|
+
self = (
|
147
|
+
self.__class__.objects.using(self._state.db)
|
148
|
+
.prefetch_related("feature_sets", "input_of")
|
149
|
+
.get(id=self.id)
|
150
|
+
)
|
128
151
|
|
129
152
|
# provenance
|
130
153
|
if len(foreign_key_fields) > 0: # always True for Artifact and Collection
|
@@ -194,7 +217,7 @@ def get_labels(
|
|
194
217
|
)
|
195
218
|
else:
|
196
219
|
qs_by_registry[registry] = getattr(
|
197
|
-
self, self.features.
|
220
|
+
self, self.features.accessor_by_orm[registry]
|
198
221
|
).all()
|
199
222
|
if flat_names:
|
200
223
|
# returns a flat list of names
|
@@ -282,7 +305,7 @@ def add_labels(
|
|
282
305
|
)
|
283
306
|
for registry_name, records in records_by_registry.items():
|
284
307
|
labels_accessor = getattr(
|
285
|
-
self, self.features.
|
308
|
+
self, self.features.accessor_by_orm[registry_name]
|
286
309
|
)
|
287
310
|
# remove labels that are already linked as add doesn't perform update
|
288
311
|
linked_labels = [r for r in records if r in labels_accessor.filter()]
|
@@ -321,7 +344,7 @@ def add_labels(
|
|
321
344
|
found_feature = True
|
322
345
|
if not found_feature:
|
323
346
|
if "external" in linked_features_by_slot:
|
324
|
-
feature_set = self.features.
|
347
|
+
feature_set = self.features.feature_set_by_slot["external"]
|
325
348
|
features_list = feature_set.features.list()
|
326
349
|
else:
|
327
350
|
features_list = []
|
lamindb/core/_feature_manager.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
from __future__ import annotations
|
2
2
|
|
3
3
|
from itertools import compress
|
4
|
-
from typing import TYPE_CHECKING, Iterable
|
4
|
+
from typing import TYPE_CHECKING, Iterable
|
5
5
|
|
6
6
|
import anndata as ad
|
7
7
|
from anndata import AnnData
|
@@ -57,15 +57,13 @@ def get_feature_set_by_slot(host) -> dict:
|
|
57
57
|
host_id_field = get_host_id_field(host)
|
58
58
|
kwargs = {host_id_field: host.id}
|
59
59
|
# otherwise, we need a query
|
60
|
-
feature_set_links =
|
61
|
-
|
60
|
+
feature_set_links = (
|
61
|
+
host.feature_sets.through.objects.using(host_db)
|
62
|
+
.filter(**kwargs)
|
63
|
+
.select_related("feature_set")
|
62
64
|
)
|
63
|
-
|
64
|
-
|
65
|
-
id=feature_set_link.feature_set_id
|
66
|
-
)
|
67
|
-
for feature_set_link in feature_set_links
|
68
|
-
}
|
65
|
+
|
66
|
+
return {fsl.slot: fsl.feature_set for fsl in feature_set_links}
|
69
67
|
|
70
68
|
|
71
69
|
def get_label_links(
|
@@ -74,7 +72,7 @@ def get_label_links(
|
|
74
72
|
host_id_field = get_host_id_field(host)
|
75
73
|
kwargs = {host_id_field: host.id, "feature_id": feature.id}
|
76
74
|
link_records = (
|
77
|
-
getattr(host, host.features.
|
75
|
+
getattr(host, host.features.accessor_by_orm[registry])
|
78
76
|
.through.objects.using(host._state.db)
|
79
77
|
.filter(**kwargs)
|
80
78
|
)
|
@@ -93,48 +91,50 @@ def print_features(self: Data) -> str:
|
|
93
91
|
|
94
92
|
from ._data import format_repr
|
95
93
|
|
96
|
-
|
97
|
-
|
98
|
-
for slot, feature_set in self.features._feature_set_by_slot.items():
|
94
|
+
messages = []
|
95
|
+
for slot, feature_set in get_feature_set_by_slot(self).items():
|
99
96
|
if feature_set.registry != "core.Feature":
|
100
97
|
features = feature_set.members
|
98
|
+
# features.first() is a lot slower than features[0] here
|
101
99
|
name_field = get_default_str_field(features[0])
|
102
|
-
feature_names =
|
103
|
-
|
100
|
+
feature_names = list(features.values_list(name_field, flat=True)[:30])
|
101
|
+
messages.append(
|
104
102
|
f" {colors.bold(slot)}: {format_repr(feature_set, exclude='hash')}\n"
|
105
103
|
)
|
106
104
|
print_values = _print_values(feature_names, n=20)
|
107
|
-
|
105
|
+
messages.append(f" {print_values}\n")
|
108
106
|
else:
|
109
|
-
|
110
|
-
|
107
|
+
features_lookup = {
|
108
|
+
f.name: f for f in Feature.objects.using(self._state.db).filter().all()
|
109
|
+
}
|
110
|
+
messages.append(
|
111
111
|
f" {colors.bold(slot)}: {format_repr(feature_set, exclude='hash')}\n"
|
112
112
|
)
|
113
|
-
for
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
)
|
113
|
+
for name, row_type, registries in feature_set.features.values_list(
|
114
|
+
"name", "type", "registries"
|
115
|
+
):
|
116
|
+
if row_type == "category" and registries is not None:
|
117
|
+
labels = self.labels.get(features_lookup.get(name), mute=True)
|
118
118
|
indent = ""
|
119
119
|
if isinstance(labels, dict):
|
120
|
-
|
120
|
+
messages.append(f" 🔗 {name} ({registries})\n")
|
121
121
|
indent = " "
|
122
122
|
else:
|
123
|
-
labels = {
|
124
|
-
for registry,
|
125
|
-
|
126
|
-
|
127
|
-
|
123
|
+
labels = {registries: labels}
|
124
|
+
for registry, registry_labels in labels.items():
|
125
|
+
field = get_default_str_field(registry_labels)
|
126
|
+
values_list = registry_labels.values_list(field, flat=True)
|
127
|
+
count_str = f"{feature_set.n}, {colors.italic(f'{registry}')}"
|
128
|
+
print_values = _print_values(values_list[:20], n=10)
|
128
129
|
msg_objects = (
|
129
|
-
f"{indent} 🔗 {
|
130
|
-
f" {print_values}\n"
|
130
|
+
f"{indent} 🔗 {name} ({count_str}):" f" {print_values}\n"
|
131
131
|
)
|
132
|
-
|
132
|
+
messages.append(msg_objects)
|
133
133
|
else:
|
134
|
-
|
135
|
-
if
|
136
|
-
|
137
|
-
return
|
134
|
+
messages.append(f" {name} ({row_type})\n")
|
135
|
+
if messages:
|
136
|
+
messages.insert(0, f"{colors.green('Features')}:\n")
|
137
|
+
return "".join(messages)
|
138
138
|
|
139
139
|
|
140
140
|
def parse_feature_sets_from_anndata(
|
@@ -204,30 +204,44 @@ class FeatureManager:
|
|
204
204
|
|
205
205
|
def __init__(self, host: Artifact | Collection):
|
206
206
|
self._host = host
|
207
|
-
self._feature_set_by_slot =
|
208
|
-
self._accessor_by_orm =
|
207
|
+
self._feature_set_by_slot = None
|
208
|
+
self._accessor_by_orm = None
|
209
209
|
|
210
210
|
def __repr__(self) -> str:
|
211
|
-
if len(self.
|
211
|
+
if len(self.feature_set_by_slot) > 0:
|
212
212
|
return print_features(self._host)
|
213
213
|
else:
|
214
214
|
return "no linked features"
|
215
215
|
|
216
216
|
def __getitem__(self, slot) -> QuerySet:
|
217
|
-
if slot not in self.
|
217
|
+
if slot not in self.feature_set_by_slot:
|
218
218
|
raise ValueError(
|
219
219
|
f"No linked feature set for slot: {slot}\nDid you get validation"
|
220
220
|
" warnings? Only features that match registered features get validated"
|
221
221
|
" and linked."
|
222
222
|
)
|
223
|
-
feature_set = self.
|
223
|
+
feature_set = self.feature_set_by_slot[slot]
|
224
224
|
orm_name = feature_set.registry
|
225
225
|
if hasattr(feature_set, "_features"):
|
226
226
|
# feature set is not yet saved
|
227
227
|
# need to think about turning this into a queryset
|
228
228
|
return feature_set._features
|
229
229
|
else:
|
230
|
-
return getattr(feature_set, self.
|
230
|
+
return getattr(feature_set, self.accessor_by_orm[orm_name]).all()
|
231
|
+
|
232
|
+
@property
|
233
|
+
def feature_set_by_slot(self):
|
234
|
+
"""Feature sets by slot."""
|
235
|
+
if self._feature_set_by_slot is None:
|
236
|
+
self._feature_set_by_slot = get_feature_set_by_slot(self._host)
|
237
|
+
return self._feature_set_by_slot
|
238
|
+
|
239
|
+
@property
|
240
|
+
def accessor_by_orm(self):
|
241
|
+
"""Accessor by ORM."""
|
242
|
+
if self._accessor_by_orm is None:
|
243
|
+
self._accessor_by_orm = get_accessor_by_orm(self._host)
|
244
|
+
return self._accessor_by_orm
|
231
245
|
|
232
246
|
def add(self, features: Iterable[Registry], slot: str | None = None):
|
233
247
|
"""Add features stratified by slot."""
|
@@ -351,26 +365,36 @@ class FeatureManager:
|
|
351
365
|
)
|
352
366
|
if link_record is None:
|
353
367
|
self._host.feature_sets.through(**kwargs).save(using=host_db)
|
354
|
-
|
368
|
+
if slot in self.feature_set_by_slot:
|
369
|
+
logger.warning(f"replaced existing {slot} featureset")
|
370
|
+
# this _feature_set_by_slot here is private
|
371
|
+
self._feature_set_by_slot[slot] = feature_set # type: ignore
|
355
372
|
|
356
373
|
def _add_from(self, data: Data, parents: bool = True):
|
357
374
|
"""Transfer features from a artifact or collection."""
|
358
375
|
using_key = settings._using_key
|
359
|
-
for slot, feature_set in data.features.
|
376
|
+
for slot, feature_set in data.features.feature_set_by_slot.items():
|
377
|
+
print(slot)
|
360
378
|
members = feature_set.members
|
361
|
-
if members
|
379
|
+
if len(members) == 0:
|
362
380
|
continue
|
363
381
|
registry = members[0].__class__
|
364
382
|
# note here the features are transferred based on an unique field
|
365
383
|
field = REGISTRY_UNIQUE_FIELD.get(registry.__name__.lower(), "uid")
|
384
|
+
# TODO: get a default ID field for the registry
|
366
385
|
if hasattr(registry, "ontology_id") and parents:
|
367
386
|
field = "ontology_id"
|
387
|
+
elif hasattr(registry, "ensembl_gene_id"):
|
388
|
+
field = "ensembl_gene_id"
|
389
|
+
elif hasattr(registry, "uniprotkb_id"):
|
390
|
+
field = "uniprotkb_id"
|
391
|
+
|
368
392
|
if registry.__get_name_with_schema__() == "bionty.Organism":
|
369
393
|
parents = False
|
370
394
|
# this will be e.g. be a list of ontology_ids or uids
|
371
395
|
member_uids = list(members.values_list(field, flat=True))
|
372
396
|
# create records from ontology_id in order to populate parents
|
373
|
-
if field == "ontology_id" and len(member_uids) > 0:
|
397
|
+
if field == "ontology_id" and len(member_uids) > 0 and parents:
|
374
398
|
# create from bionty
|
375
399
|
records = registry.from_values(member_uids, field=field)
|
376
400
|
if len(records) > 0:
|
@@ -378,8 +402,9 @@ class FeatureManager:
|
|
378
402
|
validated = registry.validate(member_uids, field=field, mute=True)
|
379
403
|
new_members_uids = list(compress(member_uids, ~validated))
|
380
404
|
new_members = members.filter(**{f"{field}__in": new_members_uids}).all()
|
381
|
-
|
382
|
-
|
405
|
+
n_new_members = len(new_members)
|
406
|
+
if n_new_members > 0:
|
407
|
+
mute = True if n_new_members > 10 else False
|
383
408
|
# transfer foreign keys needs to be run before transfer to default db
|
384
409
|
transfer_fk_to_default_db_bulk(new_members, using_key)
|
385
410
|
for feature in new_members:
|
@@ -390,9 +415,7 @@ class FeatureManager:
|
|
390
415
|
transfer_to_default_db(
|
391
416
|
feature, using_key, mute=mute, transfer_fk=False
|
392
417
|
)
|
393
|
-
logger.info(
|
394
|
-
f"saving {new_members.count()} new {registry.__name__} records"
|
395
|
-
)
|
418
|
+
logger.info(f"saving {n_new_members} new {registry.__name__} records")
|
396
419
|
save(new_members, parents=parents)
|
397
420
|
|
398
421
|
# create a new feature set from feature values using the same uid
|
lamindb/core/_label_manager.py
CHANGED
@@ -42,14 +42,17 @@ def get_labels_as_dict(self: Data):
|
|
42
42
|
return labels
|
43
43
|
|
44
44
|
|
45
|
-
def print_labels(self: Data):
|
45
|
+
def print_labels(self: Data, field: str = "name"):
|
46
46
|
labels_msg = ""
|
47
47
|
for related_name, (related_model, labels) in get_labels_as_dict(self).items():
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
48
|
+
try:
|
49
|
+
labels_list = list(labels.values_list(field, flat=True))
|
50
|
+
if len(labels_list) > 0:
|
51
|
+
get_default_str_field(labels)
|
52
|
+
print_values = _print_values(labels_list[:20], n=10)
|
53
|
+
labels_msg += f" 📎 {related_name} ({len(labels_list)}, {colors.italic(related_model)}): {print_values}\n"
|
54
|
+
except Exception:
|
55
|
+
continue
|
53
56
|
if len(labels_msg) > 0:
|
54
57
|
return f"{colors.green('Labels')}:\n{labels_msg}"
|
55
58
|
else:
|
@@ -72,7 +75,7 @@ def transfer_add_labels(labels, features_lookup_self, self, row, parents: bool =
|
|
72
75
|
# link labels records from self db
|
73
76
|
self._host.labels.add(
|
74
77
|
validated_labels + new_labels,
|
75
|
-
feature=
|
78
|
+
feature=features_lookup_self.get(row["name"]),
|
76
79
|
)
|
77
80
|
|
78
81
|
# validate labels on the default db
|
@@ -94,6 +97,10 @@ def validate_labels(labels: QuerySet | list | dict, parents: bool = True):
|
|
94
97
|
field = REGISTRY_UNIQUE_FIELD.get(registry.__name__.lower(), "uid")
|
95
98
|
if hasattr(registry, "ontology_id") and parents:
|
96
99
|
field = "ontology_id"
|
100
|
+
elif hasattr(registry, "ensembl_gene_id"):
|
101
|
+
field = "ensembl_gene_id"
|
102
|
+
elif hasattr(registry, "uniprotkb_id"):
|
103
|
+
field = "uniprotkb_id"
|
97
104
|
if registry.__get_name_with_schema__() == "bionty.Organism":
|
98
105
|
parents = False
|
99
106
|
# if the field value is None, use uid field
|
@@ -195,9 +202,13 @@ class LabelManager:
|
|
195
202
|
>>> file1.ulabels.set(labels)
|
196
203
|
>>> file2.labels.add_from(file1)
|
197
204
|
"""
|
198
|
-
|
199
|
-
|
200
|
-
for
|
205
|
+
from django.db.utils import ProgrammingError
|
206
|
+
|
207
|
+
features_lookup_self = {f.name: f for f in Feature.objects.filter().all()}
|
208
|
+
features_lookup_data = {
|
209
|
+
f.name: f for f in Feature.objects.using(data._state.db).filter().all()
|
210
|
+
}
|
211
|
+
for _, feature_set in data.features.feature_set_by_slot.items():
|
201
212
|
# add labels stratified by feature
|
202
213
|
if feature_set.registry == "core.Feature":
|
203
214
|
# df_slot is the Feature table with type and registries
|
@@ -207,30 +218,31 @@ class LabelManager:
|
|
207
218
|
logger.info(f"transferring {row['name']}")
|
208
219
|
# labels records from data db
|
209
220
|
labels = data.labels.get(
|
210
|
-
|
221
|
+
features_lookup_data.get(row["name"]), mute=True
|
211
222
|
)
|
212
223
|
transfer_add_labels(
|
213
224
|
labels, features_lookup_self, self, row, parents=parents
|
214
225
|
)
|
215
|
-
|
216
|
-
# for now, have this be duplicated, need to disentangle above
|
226
|
+
# TODO: for now, has to be duplicated
|
217
227
|
using_key = settings._using_key
|
218
228
|
for related_name, (_, labels) in get_labels_as_dict(data).items():
|
219
229
|
labels = labels.all()
|
220
|
-
|
230
|
+
try:
|
231
|
+
if len(labels) == 0:
|
232
|
+
continue
|
233
|
+
validated_labels, new_labels = validate_labels(labels, parents=parents)
|
234
|
+
if len(new_labels) > 0:
|
235
|
+
transfer_fk_to_default_db_bulk(new_labels, using_key)
|
236
|
+
for label in new_labels:
|
237
|
+
transfer_to_default_db(
|
238
|
+
label, using_key, mute=True, transfer_fk=False
|
239
|
+
)
|
240
|
+
save(new_labels, parents=parents)
|
241
|
+
# this should not occur as file and collection should have the same attributes
|
242
|
+
# but this might not be true for custom schema
|
243
|
+
labels_list = validated_labels + new_labels
|
244
|
+
if hasattr(self._host, related_name):
|
245
|
+
getattr(self._host, related_name).add(*labels_list)
|
246
|
+
# ProgrammingError is raised when schemas don't match between source and target instances
|
247
|
+
except ProgrammingError:
|
221
248
|
continue
|
222
|
-
validated_labels, new_labels = validate_labels(
|
223
|
-
labels.all(), parents=parents
|
224
|
-
)
|
225
|
-
if len(new_labels) > 0:
|
226
|
-
transfer_fk_to_default_db_bulk(new_labels, using_key)
|
227
|
-
for label in new_labels:
|
228
|
-
transfer_to_default_db(
|
229
|
-
label, using_key, mute=True, transfer_fk=False
|
230
|
-
)
|
231
|
-
save(new_labels, parents=parents)
|
232
|
-
# this should not occur as file and collection should have the same attributes
|
233
|
-
# but this might not be true for custom schema
|
234
|
-
labels_list = validated_labels + new_labels
|
235
|
-
if hasattr(self._host, related_name):
|
236
|
-
getattr(self._host, related_name).add(*labels_list)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: lamindb
|
3
|
-
Version: 0.71.
|
3
|
+
Version: 0.71.3
|
4
4
|
Summary: A data framework for biology.
|
5
5
|
Author-email: Lamin Labs <open-source@lamin.ai>
|
6
6
|
Requires-Python: >=3.8
|
@@ -9,10 +9,10 @@ Classifier: Programming Language :: Python :: 3.8
|
|
9
9
|
Classifier: Programming Language :: Python :: 3.9
|
10
10
|
Classifier: Programming Language :: Python :: 3.10
|
11
11
|
Classifier: Programming Language :: Python :: 3.11
|
12
|
-
Requires-Dist: lnschema_core==0.66.
|
13
|
-
Requires-Dist: lamindb_setup==0.71.
|
12
|
+
Requires-Dist: lnschema_core==0.66.5
|
13
|
+
Requires-Dist: lamindb_setup==0.71.4
|
14
14
|
Requires-Dist: lamin_utils==0.13.2
|
15
|
-
Requires-Dist: lamin_cli==0.13.
|
15
|
+
Requires-Dist: lamin_cli==0.13.2
|
16
16
|
Requires-Dist: rapidfuzz
|
17
17
|
Requires-Dist: pyarrow
|
18
18
|
Requires-Dist: typing_extensions!=4.6.0
|
@@ -23,8 +23,9 @@ Requires-Dist: fsspec
|
|
23
23
|
Requires-Dist: pandas
|
24
24
|
Requires-Dist: graphviz
|
25
25
|
Requires-Dist: psycopg2-binary
|
26
|
+
Requires-Dist: psutil
|
26
27
|
Requires-Dist: lamindb_setup[aws] ; extra == "aws"
|
27
|
-
Requires-Dist: bionty==0.42.
|
28
|
+
Requires-Dist: bionty==0.42.11 ; extra == "bionty"
|
28
29
|
Requires-Dist: pandas<2 ; extra == "dev"
|
29
30
|
Requires-Dist: pre-commit ; extra == "dev"
|
30
31
|
Requires-Dist: nox ; extra == "dev"
|
@@ -37,7 +38,7 @@ Requires-Dist: faker-biology ; extra == "dev"
|
|
37
38
|
Requires-Dist: django-schema-graph ; extra == "erdiagram"
|
38
39
|
Requires-Dist: readfcs>=1.1.8 ; extra == "fcs"
|
39
40
|
Requires-Dist: lamindb_setup[gcp] ; extra == "gcp"
|
40
|
-
Requires-Dist: nbproject==0.10.
|
41
|
+
Requires-Dist: nbproject==0.10.3 ; extra == "jupyter"
|
41
42
|
Requires-Dist: nbstripout==0.6.1 ; extra == "jupyter"
|
42
43
|
Requires-Dist: nbconvert ; extra == "jupyter"
|
43
44
|
Requires-Dist: zarr>=2.16.0 ; extra == "zarr"
|
@@ -1,29 +1,29 @@
|
|
1
|
-
lamindb/__init__.py,sha256=
|
1
|
+
lamindb/__init__.py,sha256=qomw1l2wiK8BNFYinT6ejwDpCRMTtD9EHY3678R253k,2182
|
2
2
|
lamindb/_annotate.py,sha256=kgbilILfgzoS-GEpjxzVwRMs7CoSa9BNEcIWXFBW69I,43915
|
3
|
-
lamindb/_artifact.py,sha256=
|
4
|
-
lamindb/_can_validate.py,sha256=
|
5
|
-
lamindb/_collection.py,sha256=
|
3
|
+
lamindb/_artifact.py,sha256=Xm3Q0yLQxKHkYHUVR7Tlg6SIRVyg6hf7DSXhoilsEQM,40509
|
4
|
+
lamindb/_can_validate.py,sha256=s1q0lxplqnhytrVgArBTm05XKMMmpreK0ZlVCsd2jjk,14849
|
5
|
+
lamindb/_collection.py,sha256=T6_jVVdYp4ewAlTu40NoH7NhgwOWJYM4jNsK51JE_Wg,14624
|
6
6
|
lamindb/_feature.py,sha256=srAKchY7gqD-h-cWlEiAWuHlpFKFwv0PWIA-JX0Go8c,6758
|
7
7
|
lamindb/_feature_set.py,sha256=AzjOcHzQajpeikPOAic-aj0z_C5b7VpHVegg3ThRSLw,9045
|
8
8
|
lamindb/_filter.py,sha256=xnjJzjF3Zj4dK_Kfymvhgczk27MhhXz5ZYc7XINbgHY,1331
|
9
9
|
lamindb/_finish.py,sha256=6GwhqrC-x-JdFd16i7-uyhCWeQgGKxr25aSsSXPZt4g,8598
|
10
|
-
lamindb/_from_values.py,sha256=
|
10
|
+
lamindb/_from_values.py,sha256=L9RBI9G7TZCOnbT_DcLodhFccG4E9zbmWrhM1P3eKrc,13544
|
11
11
|
lamindb/_is_versioned.py,sha256=0PgRCmxEmYDcAjllLSOYZm132B1lW6QgmBBERhRyFt0,1341
|
12
12
|
lamindb/_parents.py,sha256=N9T8jbd3eaoHDLE9TD1y1QgGcO81E6Brapy8LILzRCQ,14790
|
13
13
|
lamindb/_query_manager.py,sha256=3zokXqxgj9vTJBnN2sbYKS-q69fyDDPF_aGq_rFHzXU,4066
|
14
14
|
lamindb/_query_set.py,sha256=n0owd74cTzGz6-mIv8SlDz0wcyRz7Xw3Ke1LhE8UlIg,10784
|
15
|
-
lamindb/_registry.py,sha256=
|
15
|
+
lamindb/_registry.py,sha256=xgHyw49yRcqxaUdzCZddFgqURBSHHX_kjIHGteLFnP4,18173
|
16
16
|
lamindb/_run.py,sha256=We50MUeGH778begutDGoNFM-n5_81_BfMCnZS1bdkt0,1937
|
17
|
-
lamindb/_save.py,sha256=
|
17
|
+
lamindb/_save.py,sha256=pksthZrL3SMjLoFGmRNnCO92iXHHHhRk9mpOlW7lXTU,11514
|
18
18
|
lamindb/_storage.py,sha256=VW8xq3VRv58-ciholvOdlcgvp_OIlLxx5GxLt-e2Irs,614
|
19
19
|
lamindb/_transform.py,sha256=E9C7psuOnsNrUQpWRuGgEUM8_pc7YhDn7n4ieHzB4X0,3169
|
20
20
|
lamindb/_ulabel.py,sha256=e5dw9h1tR0_u-DMn7Gzx0WhUhV5w7j4v3QbnLWQV7eI,1941
|
21
21
|
lamindb/_utils.py,sha256=LGdiW4k3GClLz65vKAVRkL6Tw-Gkx9DWAdez1jyA5bE,428
|
22
22
|
lamindb/_view.py,sha256=GV1FrqIMmdooEkA-5zvcTWgV1nqx1sehi6WdWEaFpxM,2171
|
23
23
|
lamindb/core/__init__.py,sha256=TI9_1Jtpwae_cUPQ3-U0RRPH5c3GBA-gLhHvlAk_Nlo,1213
|
24
|
-
lamindb/core/_data.py,sha256=
|
25
|
-
lamindb/core/_feature_manager.py,sha256=
|
26
|
-
lamindb/core/_label_manager.py,sha256=
|
24
|
+
lamindb/core/_data.py,sha256=ujwl2fA0gScz610DN50bAdAi_XCF-USw_yhwXkhWcFY,18445
|
25
|
+
lamindb/core/_feature_manager.py,sha256=WvHY1zhB7vg8Dg7JW9ouqi_qAKJOfJ4gklwnHSiEX7s,16833
|
26
|
+
lamindb/core/_label_manager.py,sha256=HXWYYg6k6vfsTGgXcfjMbxQTsIRV1a5m-WWsC4s-daU,9699
|
27
27
|
lamindb/core/_mapped_collection.py,sha256=_OwFZh5SePDUD70XIK5kngv3we_Z5-YdGHNfpUSatSQ,19469
|
28
28
|
lamindb/core/_run_context.py,sha256=7iCCOB2z154puBI7ZKzcaEZ5l6_9S8aSYBOBJI65lyc,17117
|
29
29
|
lamindb/core/_settings.py,sha256=rW1KfEXfT56XErwcnSuQxaCytpOy1kJ-u7tVmkmNmxY,6131
|
@@ -48,7 +48,7 @@ lamindb/integrations/__init__.py,sha256=aH2PmO2m4-vwIifMYTB0Fyyr_gZWtVnV71jT0tVW
|
|
48
48
|
lamindb/integrations/_vitessce.py,sha256=b0FqTBsP-M6Q7xCYXVwFwM8DOIeeOBZEhYbryhtq4gk,2535
|
49
49
|
lamindb/setup/__init__.py,sha256=OwZpZzPDv5lPPGXZP7-zK6UdO4FHvvuBh439yZvIp3A,410
|
50
50
|
lamindb/setup/core/__init__.py,sha256=SevlVrc2AZWL3uALbE5sopxBnIZPWZ1IB0NBDudiAL8,167
|
51
|
-
lamindb-0.71.
|
52
|
-
lamindb-0.71.
|
53
|
-
lamindb-0.71.
|
54
|
-
lamindb-0.71.
|
51
|
+
lamindb-0.71.3.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
52
|
+
lamindb-0.71.3.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
|
53
|
+
lamindb-0.71.3.dist-info/METADATA,sha256=ITpf4eh_xlvgR5d0Z_hn8FSJ8s7JUsZRX4fMNoQ1bDA,2697
|
54
|
+
lamindb-0.71.3.dist-info/RECORD,,
|
File without changes
|
File without changes
|