lamindb 0.76.8__py3-none-any.whl → 0.76.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. lamindb/__init__.py +113 -113
  2. lamindb/_artifact.py +1205 -1205
  3. lamindb/_can_validate.py +579 -579
  4. lamindb/_collection.py +389 -387
  5. lamindb/_curate.py +1601 -1601
  6. lamindb/_feature.py +155 -155
  7. lamindb/_feature_set.py +242 -242
  8. lamindb/_filter.py +23 -23
  9. lamindb/_finish.py +256 -256
  10. lamindb/_from_values.py +382 -382
  11. lamindb/_is_versioned.py +40 -40
  12. lamindb/_parents.py +476 -476
  13. lamindb/_query_manager.py +125 -125
  14. lamindb/_query_set.py +362 -362
  15. lamindb/_record.py +649 -649
  16. lamindb/_run.py +57 -57
  17. lamindb/_save.py +308 -308
  18. lamindb/_storage.py +14 -14
  19. lamindb/_transform.py +127 -127
  20. lamindb/_ulabel.py +56 -56
  21. lamindb/_utils.py +9 -9
  22. lamindb/_view.py +72 -72
  23. lamindb/core/__init__.py +94 -94
  24. lamindb/core/_context.py +574 -574
  25. lamindb/core/_data.py +438 -438
  26. lamindb/core/_feature_manager.py +867 -867
  27. lamindb/core/_label_manager.py +253 -253
  28. lamindb/core/_mapped_collection.py +631 -597
  29. lamindb/core/_settings.py +187 -187
  30. lamindb/core/_sync_git.py +138 -138
  31. lamindb/core/_track_environment.py +27 -27
  32. lamindb/core/datasets/__init__.py +59 -59
  33. lamindb/core/datasets/_core.py +581 -571
  34. lamindb/core/datasets/_fake.py +36 -36
  35. lamindb/core/exceptions.py +90 -90
  36. lamindb/core/fields.py +12 -12
  37. lamindb/core/loaders.py +164 -164
  38. lamindb/core/schema.py +56 -56
  39. lamindb/core/storage/__init__.py +25 -25
  40. lamindb/core/storage/_anndata_accessor.py +740 -740
  41. lamindb/core/storage/_anndata_sizes.py +41 -41
  42. lamindb/core/storage/_backed_access.py +98 -98
  43. lamindb/core/storage/_tiledbsoma.py +204 -204
  44. lamindb/core/storage/_valid_suffixes.py +21 -21
  45. lamindb/core/storage/_zarr.py +110 -110
  46. lamindb/core/storage/objects.py +62 -62
  47. lamindb/core/storage/paths.py +172 -172
  48. lamindb/core/subsettings/__init__.py +12 -12
  49. lamindb/core/subsettings/_creation_settings.py +38 -38
  50. lamindb/core/subsettings/_transform_settings.py +21 -21
  51. lamindb/core/types.py +19 -19
  52. lamindb/core/versioning.py +158 -158
  53. lamindb/integrations/__init__.py +12 -12
  54. lamindb/integrations/_vitessce.py +107 -107
  55. lamindb/setup/__init__.py +14 -14
  56. lamindb/setup/core/__init__.py +4 -4
  57. {lamindb-0.76.8.dist-info → lamindb-0.76.9.dist-info}/LICENSE +201 -201
  58. {lamindb-0.76.8.dist-info → lamindb-0.76.9.dist-info}/METADATA +4 -4
  59. lamindb-0.76.9.dist-info/RECORD +60 -0
  60. {lamindb-0.76.8.dist-info → lamindb-0.76.9.dist-info}/WHEEL +1 -1
  61. lamindb-0.76.8.dist-info/RECORD +0 -60
lamindb/_record.py CHANGED
@@ -1,649 +1,649 @@
1
- from __future__ import annotations
2
-
3
- import builtins
4
- from typing import TYPE_CHECKING, List, NamedTuple
5
-
6
- import dj_database_url
7
- import lamindb_setup as ln_setup
8
- from django.db import connections, transaction
9
- from django.db.models import IntegerField, Manager, Q, QuerySet, Value
10
- from lamin_utils import logger
11
- from lamin_utils._lookup import Lookup
12
- from lamindb_setup._connect_instance import get_owner_name_from_identifier
13
- from lamindb_setup.core._docs import doc_args
14
- from lamindb_setup.core._hub_core import connect_instance
15
- from lnschema_core.models import IsVersioned, Record
16
-
17
- from lamindb._utils import attach_func_to_class_method
18
- from lamindb.core._settings import settings
19
-
20
- from ._from_values import get_or_create_records
21
-
22
- if TYPE_CHECKING:
23
- import pandas as pd
24
- from lnschema_core.types import ListLike, StrField
25
-
26
-
27
- IPYTHON = getattr(builtins, "__IPYTHON__", False)
28
-
29
-
30
- def init_self_from_db(self: Record, existing_record: Record):
31
- new_args = [
32
- getattr(existing_record, field.attname) for field in self._meta.concrete_fields
33
- ]
34
- super(self.__class__, self).__init__(*new_args)
35
- self._state.adding = False # mimic from_db
36
- self._state.db = "default"
37
-
38
-
39
- def update_attributes(record: Record, attributes: dict[str, str]):
40
- for key, value in attributes.items():
41
- if getattr(record, key) != value:
42
- logger.warning(f"updated {key} from {getattr(record, key)} to {value}")
43
- setattr(record, key, value)
44
-
45
-
46
- def validate_required_fields(record: Record, kwargs):
47
- required_fields = {
48
- k.name for k in record._meta.fields if not k.null and k.default is None
49
- }
50
- required_fields_not_passed = {k: None for k in required_fields if k not in kwargs}
51
- kwargs.update(required_fields_not_passed)
52
- missing_fields = [
53
- k for k, v in kwargs.items() if v is None and k in required_fields
54
- ]
55
- if missing_fields:
56
- raise TypeError(f"{missing_fields} are required.")
57
-
58
-
59
- def suggest_records_with_similar_names(record: Record, kwargs) -> bool:
60
- """Returns True if found exact match, otherwise False.
61
-
62
- Logs similar matches if found.
63
- """
64
- if kwargs.get("name") is None:
65
- return False
66
- queryset = _search(
67
- record.__class__, kwargs["name"], field="name", truncate_words=True, limit=3
68
- )
69
- if not queryset.exists(): # empty queryset
70
- return False
71
- for alternative_record in queryset:
72
- if alternative_record.name == kwargs["name"]:
73
- return True
74
- s, it, nots = ("", "it", "s") if len(queryset) == 1 else ("s", "one of them", "")
75
- msg = f"record{s} with similar name{s} exist{nots}! did you mean to load {it}?"
76
- if IPYTHON:
77
- from IPython.display import display
78
-
79
- logger.warning(f"{msg}")
80
- if settings._verbosity_int >= 1:
81
- display(queryset.df())
82
- else:
83
- logger.warning(f"{msg}\n{queryset}")
84
- return False
85
-
86
-
87
- def __init__(record: Record, *args, **kwargs):
88
- if not args:
89
- validate_required_fields(record, kwargs)
90
-
91
- # do not search for names if an id is passed; this is important
92
- # e.g. when synching ids from the notebook store to lamindb
93
- has_consciously_provided_uid = False
94
- if "_has_consciously_provided_uid" in kwargs:
95
- has_consciously_provided_uid = kwargs.pop("_has_consciously_provided_uid")
96
- if settings.creation.search_names and not has_consciously_provided_uid:
97
- match = suggest_records_with_similar_names(record, kwargs)
98
- if match:
99
- if "version" in kwargs:
100
- if kwargs["version"] is not None:
101
- version_comment = " and version"
102
- existing_record = record.__class__.filter(
103
- name=kwargs["name"], version=kwargs["version"]
104
- ).one_or_none()
105
- else:
106
- # for a versioned record, an exact name match is not a
107
- # criterion for retrieving a record in case `version`
108
- # isn't passed - we'd always pull out many records with exactly the
109
- # same name
110
- existing_record = None
111
- else:
112
- version_comment = ""
113
- existing_record = record.__class__.filter(
114
- name=kwargs["name"]
115
- ).one_or_none()
116
- if existing_record is not None:
117
- logger.important(
118
- f"returning existing {record.__class__.__name__} record with same"
119
- f" name{version_comment}: '{kwargs['name']}'"
120
- )
121
- init_self_from_db(record, existing_record)
122
- return None
123
- super(Record, record).__init__(**kwargs)
124
- elif len(args) != len(record._meta.concrete_fields):
125
- raise ValueError("please provide keyword arguments, not plain arguments")
126
- else:
127
- # object is loaded from DB (**kwargs could be omitted below, I believe)
128
- super(Record, record).__init__(*args, **kwargs)
129
-
130
-
131
- @classmethod # type:ignore
132
- @doc_args(Record.filter.__doc__)
133
- def filter(cls, *queries, **expressions) -> QuerySet:
134
- """{}""" # noqa: D415
135
- from lamindb._filter import filter
136
-
137
- return filter(cls, *queries, **expressions)
138
-
139
-
140
- @classmethod # type:ignore
141
- @doc_args(Record.get.__doc__)
142
- def get(
143
- cls,
144
- idlike: int | str | None = None,
145
- **expressions,
146
- ) -> Record:
147
- """{}""" # noqa: D415
148
- # this is the only place in which we need the lamindb queryset
149
- # in this file; everywhere else it should be Django's
150
- from lamindb._query_set import QuerySet
151
-
152
- return QuerySet(model=cls).get(idlike, **expressions)
153
-
154
-
155
- @classmethod # type:ignore
156
- @doc_args(Record.df.__doc__)
157
- def df(
158
- cls,
159
- include: str | list[str] | None = None,
160
- join: str = "inner",
161
- limit: int = 100,
162
- ) -> pd.DataFrame:
163
- """{}""" # noqa: D415
164
- from lamindb._filter import filter
165
-
166
- query_set = filter(cls)
167
- if hasattr(cls, "updated_at"):
168
- query_set = query_set.order_by("-updated_at")
169
- return query_set[:limit].df(include=include, join=join)
170
-
171
-
172
- # from_values doesn't apply for QuerySet or Manager
173
- @classmethod # type:ignore
174
- @doc_args(Record.from_values.__doc__)
175
- def from_values(
176
- cls,
177
- values: ListLike,
178
- field: StrField | None = None,
179
- create: bool = False,
180
- organism: Record | str | None = None,
181
- source: Record | None = None,
182
- mute: bool = False,
183
- ) -> list[Record]:
184
- """{}""" # noqa: D415
185
- from_source = True if cls.__module__.startswith("bionty.") else False
186
-
187
- field_str = get_name_field(cls, field=field)
188
- return get_or_create_records(
189
- iterable=values,
190
- field=getattr(cls, field_str),
191
- create=create,
192
- from_source=from_source,
193
- organism=organism,
194
- source=source,
195
- mute=mute,
196
- )
197
-
198
-
199
- def _search(
200
- cls,
201
- string: str,
202
- *,
203
- field: StrField | list[StrField] | None = None,
204
- limit: int | None = 20,
205
- case_sensitive: bool = False,
206
- using_key: str | None = None,
207
- truncate_words: bool = False,
208
- ) -> QuerySet:
209
- input_queryset = _queryset(cls, using_key=using_key)
210
- registry = input_queryset.model
211
- if field is None:
212
- fields = [
213
- field.name
214
- for field in registry._meta.fields
215
- if field.get_internal_type() in {"CharField", "TextField"}
216
- ]
217
- else:
218
- if not isinstance(field, list):
219
- fields_input = [field]
220
- else:
221
- fields_input = field
222
- fields = []
223
- for field in fields_input:
224
- if not isinstance(field, str):
225
- try:
226
- fields.append(field.field.name)
227
- except AttributeError as error:
228
- raise TypeError(
229
- "Please pass a Record string field, e.g., `CellType.name`!"
230
- ) from error
231
- else:
232
- fields.append(field)
233
-
234
- # decompose search string
235
- def truncate_word(word) -> str:
236
- if len(word) > 5:
237
- n_80_pct = int(len(word) * 0.8)
238
- return word[:n_80_pct]
239
- elif len(word) > 3:
240
- return word[:3]
241
- else:
242
- return word
243
-
244
- decomposed_string = str(string).split()
245
- # add the entire string back
246
- decomposed_string += [string]
247
- for word in decomposed_string:
248
- # will not search against words with 3 or fewer characters
249
- if len(word) <= 3:
250
- decomposed_string.remove(word)
251
- if truncate_words:
252
- decomposed_string = [truncate_word(word) for word in decomposed_string]
253
- # construct the query
254
- expression = Q()
255
- case_sensitive_i = "" if case_sensitive else "i"
256
- for field in fields:
257
- for word in decomposed_string:
258
- query = {f"{field}__{case_sensitive_i}contains": word}
259
- expression |= Q(**query)
260
- output_queryset = input_queryset.filter(expression)
261
- # ensure exact matches are at the top
262
- narrow_expression = Q()
263
- for field in fields:
264
- query = {f"{field}__{case_sensitive_i}contains": string}
265
- narrow_expression |= Q(**query)
266
- refined_output_queryset = output_queryset.filter(narrow_expression).annotate(
267
- ordering=Value(1, output_field=IntegerField())
268
- )
269
- remaining_output_queryset = output_queryset.exclude(narrow_expression).annotate(
270
- ordering=Value(2, output_field=IntegerField())
271
- )
272
- combined_queryset = refined_output_queryset.union(
273
- remaining_output_queryset
274
- ).order_by("ordering")[:limit]
275
- return combined_queryset
276
-
277
-
278
- @classmethod # type: ignore
279
- @doc_args(Record.search.__doc__)
280
- def search(
281
- cls,
282
- string: str,
283
- *,
284
- field: StrField | None = None,
285
- limit: int | None = 20,
286
- case_sensitive: bool = False,
287
- ) -> QuerySet:
288
- """{}""" # noqa: D415
289
- return _search(
290
- cls=cls,
291
- string=string,
292
- field=field,
293
- limit=limit,
294
- case_sensitive=case_sensitive,
295
- )
296
-
297
-
298
- def _lookup(
299
- cls,
300
- field: StrField | None = None,
301
- return_field: StrField | None = None,
302
- using_key: str | None = None,
303
- ) -> NamedTuple:
304
- """{}""" # noqa: D415
305
- queryset = _queryset(cls, using_key=using_key)
306
- field = get_name_field(registry=queryset.model, field=field)
307
-
308
- return Lookup(
309
- records=queryset,
310
- values=[i.get(field) for i in queryset.values()],
311
- tuple_name=cls.__class__.__name__,
312
- prefix="ln",
313
- ).lookup(
314
- return_field=(
315
- get_name_field(registry=queryset.model, field=return_field)
316
- if return_field is not None
317
- else None
318
- )
319
- )
320
-
321
-
322
- @classmethod # type: ignore
323
- @doc_args(Record.lookup.__doc__)
324
- def lookup(
325
- cls,
326
- field: StrField | None = None,
327
- return_field: StrField | None = None,
328
- ) -> NamedTuple:
329
- """{}""" # noqa: D415
330
- return _lookup(cls=cls, field=field, return_field=return_field)
331
-
332
-
333
- def get_name_field(
334
- registry: type[Record] | QuerySet | Manager,
335
- *,
336
- field: str | StrField | None = None,
337
- ) -> str:
338
- """Get the 1st char or text field from the registry."""
339
- if isinstance(registry, (QuerySet, Manager)):
340
- registry = registry.model
341
- model_field_names = [i.name for i in registry._meta.fields]
342
-
343
- # set to default name field
344
- if field is None:
345
- if hasattr(registry, "_name_field"):
346
- field = registry._meta.get_field(registry._name_field)
347
- elif "name" in model_field_names:
348
- field = registry._meta.get_field("name")
349
- else:
350
- # first char or text field that doesn't contain "id"
351
- for i in registry._meta.fields:
352
- if "id" in i.name:
353
- continue
354
- if i.get_internal_type() in {"CharField", "TextField"}:
355
- field = i
356
- break
357
-
358
- # no default name field can be found
359
- if field is None:
360
- raise ValueError(
361
- "please pass a Record string field, e.g., `CellType.name`!"
362
- )
363
- else:
364
- field = field.name # type:ignore
365
- if not isinstance(field, str):
366
- try:
367
- field = field.field.name
368
- except AttributeError:
369
- raise TypeError(
370
- "please pass a Record string field, e.g., `CellType.name`!"
371
- ) from None
372
-
373
- return field
374
-
375
-
376
- def _queryset(cls: Record | QuerySet | Manager, using_key: str) -> QuerySet:
377
- if isinstance(cls, (QuerySet, Manager)):
378
- return cls.all()
379
- elif using_key is None or using_key == "default":
380
- return cls.objects.all()
381
- else:
382
- # using must be called on cls, otherwise the connection isn't found
383
- return cls.using(using_key).all()
384
-
385
-
386
- def add_db_connection(db: str, using: str):
387
- db_config = dj_database_url.config(
388
- default=db, conn_max_age=600, conn_health_checks=True
389
- )
390
- db_config["TIME_ZONE"] = "UTC"
391
- db_config["OPTIONS"] = {}
392
- db_config["AUTOCOMMIT"] = True
393
- connections.settings[using] = db_config
394
-
395
-
396
- @classmethod # type: ignore
397
- @doc_args(Record.using.__doc__)
398
- def using(
399
- cls,
400
- instance: str | None,
401
- ) -> QuerySet:
402
- """{}""" # noqa: D415
403
- if instance is None:
404
- return QuerySet(model=cls, using=None)
405
- from lamindb_setup._connect_instance import (
406
- load_instance_settings,
407
- update_db_using_local,
408
- )
409
- from lamindb_setup.core._settings_store import instance_settings_file
410
-
411
- owner, name = get_owner_name_from_identifier(instance)
412
- settings_file = instance_settings_file(name, owner)
413
- if not settings_file.exists():
414
- load_result = connect_instance(owner=owner, name=name)
415
- if isinstance(load_result, str):
416
- raise RuntimeError(
417
- f"Failed to load instance {instance}, please check your permission!"
418
- )
419
- instance_result, _ = load_result
420
- settings_file = instance_settings_file(name, owner)
421
- db = update_db_using_local(instance_result, settings_file)
422
- else:
423
- isettings = load_instance_settings(settings_file)
424
- db = isettings.db
425
- add_db_connection(db, instance)
426
- return QuerySet(model=cls, using=instance)
427
-
428
-
429
- REGISTRY_UNIQUE_FIELD = {
430
- "storage": "root",
431
- "feature": "name",
432
- "ulabel": "name",
433
- }
434
-
435
-
436
- def update_fk_to_default_db(
437
- records: Record | list[Record] | QuerySet,
438
- fk: str,
439
- using_key: str | None,
440
- transfer_logs: dict,
441
- ):
442
- record = records[0] if isinstance(records, (List, QuerySet)) else records
443
- if hasattr(record, f"{fk}_id") and getattr(record, f"{fk}_id") is not None:
444
- fk_record = getattr(record, fk)
445
- field = REGISTRY_UNIQUE_FIELD.get(fk, "uid")
446
- fk_record_default = fk_record.__class__.filter(
447
- **{field: getattr(fk_record, field)}
448
- ).one_or_none()
449
- if fk_record_default is None:
450
- from copy import copy
451
-
452
- fk_record_default = copy(fk_record)
453
- transfer_to_default_db(
454
- fk_record_default, using_key, save=True, transfer_logs=transfer_logs
455
- )
456
- if isinstance(records, (List, QuerySet)):
457
- for r in records:
458
- setattr(r, f"{fk}", None)
459
- setattr(r, f"{fk}_id", fk_record_default.id)
460
- else:
461
- setattr(records, f"{fk}", None)
462
- setattr(records, f"{fk}_id", fk_record_default.id)
463
-
464
-
465
- FKBULK = [
466
- "organism",
467
- "source",
468
- "_source_code_artifact", # Transform
469
- "report", # Run
470
- ]
471
-
472
-
473
- def transfer_fk_to_default_db_bulk(
474
- records: list | QuerySet, using_key: str | None, transfer_logs: dict
475
- ):
476
- for fk in FKBULK:
477
- update_fk_to_default_db(records, fk, using_key, transfer_logs=transfer_logs)
478
-
479
-
480
- def transfer_to_default_db(
481
- record: Record,
482
- using_key: str | None,
483
- *,
484
- transfer_logs: dict,
485
- save: bool = False,
486
- transfer_fk: bool = True,
487
- ) -> Record | None:
488
- from lamindb.core._context import context
489
- from lamindb.core._data import WARNING_RUN_TRANSFORM
490
-
491
- registry = record.__class__
492
- record_on_default = registry.objects.filter(uid=record.uid).one_or_none()
493
- record_str = f"{record.__class__.__name__}(uid='{record.uid}')"
494
- if record_on_default is not None:
495
- transfer_logs["mapped"].append(record_str)
496
- return record_on_default
497
- else:
498
- transfer_logs["transferred"].append(record_str)
499
-
500
- if hasattr(record, "created_by_id"):
501
- record.created_by = None
502
- record.created_by_id = ln_setup.settings.user.id
503
- if hasattr(record, "run_id"):
504
- record.run = None
505
- if context.run is not None:
506
- record.run_id = context.run.id
507
- else:
508
- if not settings.creation.artifact_silence_missing_run_warning:
509
- logger.warning(WARNING_RUN_TRANSFORM)
510
- record.run_id = None
511
- if hasattr(record, "transform_id") and record._meta.model_name != "run":
512
- record.transform = None
513
- if context.run is not None:
514
- record.transform_id = context.run.transform_id
515
- else:
516
- record.transform_id = None
517
- # transfer other foreign key fields
518
- fk_fields = [
519
- i.name
520
- for i in record._meta.fields
521
- if i.get_internal_type() == "ForeignKey"
522
- if i.name not in {"created_by", "run", "transform"}
523
- ]
524
- if not transfer_fk:
525
- # don't transfer fk fields that are already bulk transferred
526
- fk_fields = [fk for fk in fk_fields if fk not in FKBULK]
527
- for fk in fk_fields:
528
- update_fk_to_default_db(record, fk, using_key, transfer_logs=transfer_logs)
529
- record.id = None
530
- record._state.db = "default"
531
- if save:
532
- record.save()
533
- return None
534
-
535
-
536
- # docstring handled through attach_func_to_class_method
537
- def save(self, *args, **kwargs) -> Record:
538
- using_key = None
539
- if "using" in kwargs:
540
- using_key = kwargs["using"]
541
- db = self._state.db
542
- pk_on_db = self.pk
543
- artifacts: list = []
544
- if self.__class__.__name__ == "Collection" and self.id is not None:
545
- # when creating a new collection without being able to access artifacts
546
- artifacts = self.ordered_artifacts.list()
547
- pre_existing_record = None
548
- # consider records that are being transferred from other databases
549
- transfer_logs: dict[str, list[str]] = {"mapped": [], "transferred": []}
550
- if db is not None and db != "default" and using_key is None:
551
- if isinstance(self, IsVersioned):
552
- if not self.is_latest:
553
- raise NotImplementedError(
554
- "You are attempting to transfer a record that's not the latest in its version history. This is currently not supported."
555
- )
556
- pre_existing_record = transfer_to_default_db(
557
- self, using_key, transfer_logs=transfer_logs
558
- )
559
- if pre_existing_record is not None:
560
- init_self_from_db(self, pre_existing_record)
561
- else:
562
- # save versioned record
563
- if isinstance(self, IsVersioned) and self._revises is not None:
564
- assert self._revises.is_latest # noqa: S101
565
- revises = self._revises
566
- revises.is_latest = False
567
- with transaction.atomic():
568
- revises._revises = None # ensure we don't start a recursion
569
- revises.save()
570
- super(Record, self).save(*args, **kwargs)
571
- self._revises = None
572
- # save unversioned record
573
- else:
574
- super(Record, self).save(*args, **kwargs)
575
- # perform transfer of many-to-many fields
576
- # only supported for Artifact and Collection records
577
- if db is not None and db != "default" and using_key is None:
578
- if self.__class__.__name__ == "Collection":
579
- if len(artifacts) > 0:
580
- logger.info("transfer artifacts")
581
- for artifact in artifacts:
582
- artifact.save()
583
- self.artifacts.add(*artifacts)
584
- if hasattr(self, "labels"):
585
- from copy import copy
586
-
587
- from lnschema_core.models import FeatureManager
588
-
589
- # here we go back to original record on the source database
590
- self_on_db = copy(self)
591
- self_on_db._state.db = db
592
- self_on_db.pk = pk_on_db # manually set the primary key
593
- self_on_db.features = FeatureManager(self_on_db)
594
- self.features._add_from(self_on_db, transfer_logs=transfer_logs)
595
- self.labels.add_from(self_on_db, transfer_logs=transfer_logs)
596
- for k, v in transfer_logs.items():
597
- logger.important(f"{k} records: {', '.join(v)}")
598
- return self
599
-
600
-
601
- def delete(self) -> None:
602
- """Delete the record."""
603
- # note that the logic below does not fire if a record is moved to the trash
604
- # the idea is that moving a record to the trash should move its entire version family
605
- # to the trash, whereas permanently deleting should default to only deleting a single record
606
- # of a version family
607
- # we can consider making it easy to permanently delete entire version families as well,
608
- # but that's for another time
609
- if isinstance(self, IsVersioned) and self.is_latest:
610
- new_latest = (
611
- self.__class__.objects.using(self._state.db)
612
- .filter(is_latest=False, uid__startswith=self.stem_uid)
613
- .order_by("-created_at")
614
- .first()
615
- )
616
- if new_latest is not None:
617
- new_latest.is_latest = True
618
- with transaction.atomic():
619
- new_latest.save()
620
- super(Record, self).delete()
621
- logger.warning(f"new latest version is {new_latest}")
622
- return None
623
- super(Record, self).delete()
624
-
625
-
626
- METHOD_NAMES = [
627
- "__init__",
628
- "filter",
629
- "get",
630
- "df",
631
- "search",
632
- "lookup",
633
- "save",
634
- "delete",
635
- "from_values",
636
- "using",
637
- ]
638
-
639
- if ln_setup._TESTING: # type: ignore
640
- from inspect import signature
641
-
642
- SIGS = {
643
- name: signature(getattr(Record, name))
644
- for name in METHOD_NAMES
645
- if not name.startswith("__")
646
- }
647
-
648
- for name in METHOD_NAMES:
649
- attach_func_to_class_method(name, Record, globals())
1
+ from __future__ import annotations
2
+
3
+ import builtins
4
+ from typing import TYPE_CHECKING, List, NamedTuple
5
+
6
+ import dj_database_url
7
+ import lamindb_setup as ln_setup
8
+ from django.db import connections, transaction
9
+ from django.db.models import IntegerField, Manager, Q, QuerySet, Value
10
+ from lamin_utils import logger
11
+ from lamin_utils._lookup import Lookup
12
+ from lamindb_setup._connect_instance import get_owner_name_from_identifier
13
+ from lamindb_setup.core._docs import doc_args
14
+ from lamindb_setup.core._hub_core import connect_instance
15
+ from lnschema_core.models import IsVersioned, Record
16
+
17
+ from lamindb._utils import attach_func_to_class_method
18
+ from lamindb.core._settings import settings
19
+
20
+ from ._from_values import get_or_create_records
21
+
22
+ if TYPE_CHECKING:
23
+ import pandas as pd
24
+ from lnschema_core.types import ListLike, StrField
25
+
26
+
27
+ IPYTHON = getattr(builtins, "__IPYTHON__", False)
28
+
29
+
30
+ def init_self_from_db(self: Record, existing_record: Record):
31
+ new_args = [
32
+ getattr(existing_record, field.attname) for field in self._meta.concrete_fields
33
+ ]
34
+ super(self.__class__, self).__init__(*new_args)
35
+ self._state.adding = False # mimic from_db
36
+ self._state.db = "default"
37
+
38
+
39
+ def update_attributes(record: Record, attributes: dict[str, str]):
40
+ for key, value in attributes.items():
41
+ if getattr(record, key) != value:
42
+ logger.warning(f"updated {key} from {getattr(record, key)} to {value}")
43
+ setattr(record, key, value)
44
+
45
+
46
+ def validate_required_fields(record: Record, kwargs):
47
+ required_fields = {
48
+ k.name for k in record._meta.fields if not k.null and k.default is None
49
+ }
50
+ required_fields_not_passed = {k: None for k in required_fields if k not in kwargs}
51
+ kwargs.update(required_fields_not_passed)
52
+ missing_fields = [
53
+ k for k, v in kwargs.items() if v is None and k in required_fields
54
+ ]
55
+ if missing_fields:
56
+ raise TypeError(f"{missing_fields} are required.")
57
+
58
+
59
+ def suggest_records_with_similar_names(record: Record, kwargs) -> bool:
60
+ """Returns True if found exact match, otherwise False.
61
+
62
+ Logs similar matches if found.
63
+ """
64
+ if kwargs.get("name") is None:
65
+ return False
66
+ queryset = _search(
67
+ record.__class__, kwargs["name"], field="name", truncate_words=True, limit=3
68
+ )
69
+ if not queryset.exists(): # empty queryset
70
+ return False
71
+ for alternative_record in queryset:
72
+ if alternative_record.name == kwargs["name"]:
73
+ return True
74
+ s, it, nots = ("", "it", "s") if len(queryset) == 1 else ("s", "one of them", "")
75
+ msg = f"record{s} with similar name{s} exist{nots}! did you mean to load {it}?"
76
+ if IPYTHON:
77
+ from IPython.display import display
78
+
79
+ logger.warning(f"{msg}")
80
+ if settings._verbosity_int >= 1:
81
+ display(queryset.df())
82
+ else:
83
+ logger.warning(f"{msg}\n{queryset}")
84
+ return False
85
+
86
+
87
+ def __init__(record: Record, *args, **kwargs):
88
+ if not args:
89
+ validate_required_fields(record, kwargs)
90
+
91
+ # do not search for names if an id is passed; this is important
92
+ # e.g. when synching ids from the notebook store to lamindb
93
+ has_consciously_provided_uid = False
94
+ if "_has_consciously_provided_uid" in kwargs:
95
+ has_consciously_provided_uid = kwargs.pop("_has_consciously_provided_uid")
96
+ if settings.creation.search_names and not has_consciously_provided_uid:
97
+ match = suggest_records_with_similar_names(record, kwargs)
98
+ if match:
99
+ if "version" in kwargs:
100
+ if kwargs["version"] is not None:
101
+ version_comment = " and version"
102
+ existing_record = record.__class__.filter(
103
+ name=kwargs["name"], version=kwargs["version"]
104
+ ).one_or_none()
105
+ else:
106
+ # for a versioned record, an exact name match is not a
107
+ # criterion for retrieving a record in case `version`
108
+ # isn't passed - we'd always pull out many records with exactly the
109
+ # same name
110
+ existing_record = None
111
+ else:
112
+ version_comment = ""
113
+ existing_record = record.__class__.filter(
114
+ name=kwargs["name"]
115
+ ).one_or_none()
116
+ if existing_record is not None:
117
+ logger.important(
118
+ f"returning existing {record.__class__.__name__} record with same"
119
+ f" name{version_comment}: '{kwargs['name']}'"
120
+ )
121
+ init_self_from_db(record, existing_record)
122
+ return None
123
+ super(Record, record).__init__(**kwargs)
124
+ elif len(args) != len(record._meta.concrete_fields):
125
+ raise ValueError("please provide keyword arguments, not plain arguments")
126
+ else:
127
+ # object is loaded from DB (**kwargs could be omitted below, I believe)
128
+ super(Record, record).__init__(*args, **kwargs)
129
+
130
+
131
+ @classmethod # type:ignore
132
+ @doc_args(Record.filter.__doc__)
133
+ def filter(cls, *queries, **expressions) -> QuerySet:
134
+ """{}""" # noqa: D415
135
+ from lamindb._filter import filter
136
+
137
+ return filter(cls, *queries, **expressions)
138
+
139
+
140
+ @classmethod # type:ignore
141
+ @doc_args(Record.get.__doc__)
142
+ def get(
143
+ cls,
144
+ idlike: int | str | None = None,
145
+ **expressions,
146
+ ) -> Record:
147
+ """{}""" # noqa: D415
148
+ # this is the only place in which we need the lamindb queryset
149
+ # in this file; everywhere else it should be Django's
150
+ from lamindb._query_set import QuerySet
151
+
152
+ return QuerySet(model=cls).get(idlike, **expressions)
153
+
154
+
155
+ @classmethod # type:ignore
156
+ @doc_args(Record.df.__doc__)
157
+ def df(
158
+ cls,
159
+ include: str | list[str] | None = None,
160
+ join: str = "inner",
161
+ limit: int = 100,
162
+ ) -> pd.DataFrame:
163
+ """{}""" # noqa: D415
164
+ from lamindb._filter import filter
165
+
166
+ query_set = filter(cls)
167
+ if hasattr(cls, "updated_at"):
168
+ query_set = query_set.order_by("-updated_at")
169
+ return query_set[:limit].df(include=include, join=join)
170
+
171
+
172
+ # from_values doesn't apply for QuerySet or Manager
173
+ @classmethod # type:ignore
174
+ @doc_args(Record.from_values.__doc__)
175
+ def from_values(
176
+ cls,
177
+ values: ListLike,
178
+ field: StrField | None = None,
179
+ create: bool = False,
180
+ organism: Record | str | None = None,
181
+ source: Record | None = None,
182
+ mute: bool = False,
183
+ ) -> list[Record]:
184
+ """{}""" # noqa: D415
185
+ from_source = True if cls.__module__.startswith("bionty.") else False
186
+
187
+ field_str = get_name_field(cls, field=field)
188
+ return get_or_create_records(
189
+ iterable=values,
190
+ field=getattr(cls, field_str),
191
+ create=create,
192
+ from_source=from_source,
193
+ organism=organism,
194
+ source=source,
195
+ mute=mute,
196
+ )
197
+
198
+
199
+ def _search(
200
+ cls,
201
+ string: str,
202
+ *,
203
+ field: StrField | list[StrField] | None = None,
204
+ limit: int | None = 20,
205
+ case_sensitive: bool = False,
206
+ using_key: str | None = None,
207
+ truncate_words: bool = False,
208
+ ) -> QuerySet:
209
+ input_queryset = _queryset(cls, using_key=using_key)
210
+ registry = input_queryset.model
211
+ if field is None:
212
+ fields = [
213
+ field.name
214
+ for field in registry._meta.fields
215
+ if field.get_internal_type() in {"CharField", "TextField"}
216
+ ]
217
+ else:
218
+ if not isinstance(field, list):
219
+ fields_input = [field]
220
+ else:
221
+ fields_input = field
222
+ fields = []
223
+ for field in fields_input:
224
+ if not isinstance(field, str):
225
+ try:
226
+ fields.append(field.field.name)
227
+ except AttributeError as error:
228
+ raise TypeError(
229
+ "Please pass a Record string field, e.g., `CellType.name`!"
230
+ ) from error
231
+ else:
232
+ fields.append(field)
233
+
234
+ # decompose search string
235
+ def truncate_word(word) -> str:
236
+ if len(word) > 5:
237
+ n_80_pct = int(len(word) * 0.8)
238
+ return word[:n_80_pct]
239
+ elif len(word) > 3:
240
+ return word[:3]
241
+ else:
242
+ return word
243
+
244
+ decomposed_string = str(string).split()
245
+ # add the entire string back
246
+ decomposed_string += [string]
247
+ for word in decomposed_string:
248
+ # will not search against words with 3 or fewer characters
249
+ if len(word) <= 3:
250
+ decomposed_string.remove(word)
251
+ if truncate_words:
252
+ decomposed_string = [truncate_word(word) for word in decomposed_string]
253
+ # construct the query
254
+ expression = Q()
255
+ case_sensitive_i = "" if case_sensitive else "i"
256
+ for field in fields:
257
+ for word in decomposed_string:
258
+ query = {f"{field}__{case_sensitive_i}contains": word}
259
+ expression |= Q(**query)
260
+ output_queryset = input_queryset.filter(expression)
261
+ # ensure exact matches are at the top
262
+ narrow_expression = Q()
263
+ for field in fields:
264
+ query = {f"{field}__{case_sensitive_i}contains": string}
265
+ narrow_expression |= Q(**query)
266
+ refined_output_queryset = output_queryset.filter(narrow_expression).annotate(
267
+ ordering=Value(1, output_field=IntegerField())
268
+ )
269
+ remaining_output_queryset = output_queryset.exclude(narrow_expression).annotate(
270
+ ordering=Value(2, output_field=IntegerField())
271
+ )
272
+ combined_queryset = refined_output_queryset.union(
273
+ remaining_output_queryset
274
+ ).order_by("ordering")[:limit]
275
+ return combined_queryset
276
+
277
+
278
+ @classmethod # type: ignore
279
+ @doc_args(Record.search.__doc__)
280
+ def search(
281
+ cls,
282
+ string: str,
283
+ *,
284
+ field: StrField | None = None,
285
+ limit: int | None = 20,
286
+ case_sensitive: bool = False,
287
+ ) -> QuerySet:
288
+ """{}""" # noqa: D415
289
+ return _search(
290
+ cls=cls,
291
+ string=string,
292
+ field=field,
293
+ limit=limit,
294
+ case_sensitive=case_sensitive,
295
+ )
296
+
297
+
298
+ def _lookup(
299
+ cls,
300
+ field: StrField | None = None,
301
+ return_field: StrField | None = None,
302
+ using_key: str | None = None,
303
+ ) -> NamedTuple:
304
+ """{}""" # noqa: D415
305
+ queryset = _queryset(cls, using_key=using_key)
306
+ field = get_name_field(registry=queryset.model, field=field)
307
+
308
+ return Lookup(
309
+ records=queryset,
310
+ values=[i.get(field) for i in queryset.values()],
311
+ tuple_name=cls.__class__.__name__,
312
+ prefix="ln",
313
+ ).lookup(
314
+ return_field=(
315
+ get_name_field(registry=queryset.model, field=return_field)
316
+ if return_field is not None
317
+ else None
318
+ )
319
+ )
320
+
321
+
322
+ @classmethod # type: ignore
323
+ @doc_args(Record.lookup.__doc__)
324
+ def lookup(
325
+ cls,
326
+ field: StrField | None = None,
327
+ return_field: StrField | None = None,
328
+ ) -> NamedTuple:
329
+ """{}""" # noqa: D415
330
+ return _lookup(cls=cls, field=field, return_field=return_field)
331
+
332
+
333
+ def get_name_field(
334
+ registry: type[Record] | QuerySet | Manager,
335
+ *,
336
+ field: str | StrField | None = None,
337
+ ) -> str:
338
+ """Get the 1st char or text field from the registry."""
339
+ if isinstance(registry, (QuerySet, Manager)):
340
+ registry = registry.model
341
+ model_field_names = [i.name for i in registry._meta.fields]
342
+
343
+ # set to default name field
344
+ if field is None:
345
+ if hasattr(registry, "_name_field"):
346
+ field = registry._meta.get_field(registry._name_field)
347
+ elif "name" in model_field_names:
348
+ field = registry._meta.get_field("name")
349
+ else:
350
+ # first char or text field that doesn't contain "id"
351
+ for i in registry._meta.fields:
352
+ if "id" in i.name:
353
+ continue
354
+ if i.get_internal_type() in {"CharField", "TextField"}:
355
+ field = i
356
+ break
357
+
358
+ # no default name field can be found
359
+ if field is None:
360
+ raise ValueError(
361
+ "please pass a Record string field, e.g., `CellType.name`!"
362
+ )
363
+ else:
364
+ field = field.name # type:ignore
365
+ if not isinstance(field, str):
366
+ try:
367
+ field = field.field.name
368
+ except AttributeError:
369
+ raise TypeError(
370
+ "please pass a Record string field, e.g., `CellType.name`!"
371
+ ) from None
372
+
373
+ return field
374
+
375
+
376
+ def _queryset(cls: Record | QuerySet | Manager, using_key: str) -> QuerySet:
377
+ if isinstance(cls, (QuerySet, Manager)):
378
+ return cls.all()
379
+ elif using_key is None or using_key == "default":
380
+ return cls.objects.all()
381
+ else:
382
+ # using must be called on cls, otherwise the connection isn't found
383
+ return cls.using(using_key).all()
384
+
385
+
386
+ def add_db_connection(db: str, using: str):
387
+ db_config = dj_database_url.config(
388
+ default=db, conn_max_age=600, conn_health_checks=True
389
+ )
390
+ db_config["TIME_ZONE"] = "UTC"
391
+ db_config["OPTIONS"] = {}
392
+ db_config["AUTOCOMMIT"] = True
393
+ connections.settings[using] = db_config
394
+
395
+
396
+ @classmethod # type: ignore
397
+ @doc_args(Record.using.__doc__)
398
+ def using(
399
+ cls,
400
+ instance: str | None,
401
+ ) -> QuerySet:
402
+ """{}""" # noqa: D415
403
+ if instance is None:
404
+ return QuerySet(model=cls, using=None)
405
+ from lamindb_setup._connect_instance import (
406
+ load_instance_settings,
407
+ update_db_using_local,
408
+ )
409
+ from lamindb_setup.core._settings_store import instance_settings_file
410
+
411
+ owner, name = get_owner_name_from_identifier(instance)
412
+ settings_file = instance_settings_file(name, owner)
413
+ if not settings_file.exists():
414
+ load_result = connect_instance(owner=owner, name=name)
415
+ if isinstance(load_result, str):
416
+ raise RuntimeError(
417
+ f"Failed to load instance {instance}, please check your permission!"
418
+ )
419
+ instance_result, _ = load_result
420
+ settings_file = instance_settings_file(name, owner)
421
+ db = update_db_using_local(instance_result, settings_file)
422
+ else:
423
+ isettings = load_instance_settings(settings_file)
424
+ db = isettings.db
425
+ add_db_connection(db, instance)
426
+ return QuerySet(model=cls, using=instance)
427
+
428
+
429
+ REGISTRY_UNIQUE_FIELD = {
430
+ "storage": "root",
431
+ "feature": "name",
432
+ "ulabel": "name",
433
+ }
434
+
435
+
436
+ def update_fk_to_default_db(
437
+ records: Record | list[Record] | QuerySet,
438
+ fk: str,
439
+ using_key: str | None,
440
+ transfer_logs: dict,
441
+ ):
442
+ record = records[0] if isinstance(records, (List, QuerySet)) else records
443
+ if hasattr(record, f"{fk}_id") and getattr(record, f"{fk}_id") is not None:
444
+ fk_record = getattr(record, fk)
445
+ field = REGISTRY_UNIQUE_FIELD.get(fk, "uid")
446
+ fk_record_default = fk_record.__class__.filter(
447
+ **{field: getattr(fk_record, field)}
448
+ ).one_or_none()
449
+ if fk_record_default is None:
450
+ from copy import copy
451
+
452
+ fk_record_default = copy(fk_record)
453
+ transfer_to_default_db(
454
+ fk_record_default, using_key, save=True, transfer_logs=transfer_logs
455
+ )
456
+ if isinstance(records, (List, QuerySet)):
457
+ for r in records:
458
+ setattr(r, f"{fk}", None)
459
+ setattr(r, f"{fk}_id", fk_record_default.id)
460
+ else:
461
+ setattr(records, f"{fk}", None)
462
+ setattr(records, f"{fk}_id", fk_record_default.id)
463
+
464
+
465
+ FKBULK = [
466
+ "organism",
467
+ "source",
468
+ "_source_code_artifact", # Transform
469
+ "report", # Run
470
+ ]
471
+
472
+
473
+ def transfer_fk_to_default_db_bulk(
474
+ records: list | QuerySet, using_key: str | None, transfer_logs: dict
475
+ ):
476
+ for fk in FKBULK:
477
+ update_fk_to_default_db(records, fk, using_key, transfer_logs=transfer_logs)
478
+
479
+
480
+ def transfer_to_default_db(
481
+ record: Record,
482
+ using_key: str | None,
483
+ *,
484
+ transfer_logs: dict,
485
+ save: bool = False,
486
+ transfer_fk: bool = True,
487
+ ) -> Record | None:
488
+ from lamindb.core._context import context
489
+ from lamindb.core._data import WARNING_RUN_TRANSFORM
490
+
491
+ registry = record.__class__
492
+ record_on_default = registry.objects.filter(uid=record.uid).one_or_none()
493
+ record_str = f"{record.__class__.__name__}(uid='{record.uid}')"
494
+ if record_on_default is not None:
495
+ transfer_logs["mapped"].append(record_str)
496
+ return record_on_default
497
+ else:
498
+ transfer_logs["transferred"].append(record_str)
499
+
500
+ if hasattr(record, "created_by_id"):
501
+ record.created_by = None
502
+ record.created_by_id = ln_setup.settings.user.id
503
+ if hasattr(record, "run_id"):
504
+ record.run = None
505
+ if context.run is not None:
506
+ record.run_id = context.run.id
507
+ else:
508
+ if not settings.creation.artifact_silence_missing_run_warning:
509
+ logger.warning(WARNING_RUN_TRANSFORM)
510
+ record.run_id = None
511
+ if hasattr(record, "transform_id") and record._meta.model_name != "run":
512
+ record.transform = None
513
+ if context.run is not None:
514
+ record.transform_id = context.run.transform_id
515
+ else:
516
+ record.transform_id = None
517
+ # transfer other foreign key fields
518
+ fk_fields = [
519
+ i.name
520
+ for i in record._meta.fields
521
+ if i.get_internal_type() == "ForeignKey"
522
+ if i.name not in {"created_by", "run", "transform"}
523
+ ]
524
+ if not transfer_fk:
525
+ # don't transfer fk fields that are already bulk transferred
526
+ fk_fields = [fk for fk in fk_fields if fk not in FKBULK]
527
+ for fk in fk_fields:
528
+ update_fk_to_default_db(record, fk, using_key, transfer_logs=transfer_logs)
529
+ record.id = None
530
+ record._state.db = "default"
531
+ if save:
532
+ record.save()
533
+ return None
534
+
535
+
536
+ # docstring handled through attach_func_to_class_method
537
+ def save(self, *args, **kwargs) -> Record:
538
+ using_key = None
539
+ if "using" in kwargs:
540
+ using_key = kwargs["using"]
541
+ db = self._state.db
542
+ pk_on_db = self.pk
543
+ artifacts: list = []
544
+ if self.__class__.__name__ == "Collection" and self.id is not None:
545
+ # when creating a new collection without being able to access artifacts
546
+ artifacts = self.ordered_artifacts.list()
547
+ pre_existing_record = None
548
+ # consider records that are being transferred from other databases
549
+ transfer_logs: dict[str, list[str]] = {"mapped": [], "transferred": []}
550
+ if db is not None and db != "default" and using_key is None:
551
+ if isinstance(self, IsVersioned):
552
+ if not self.is_latest:
553
+ raise NotImplementedError(
554
+ "You are attempting to transfer a record that's not the latest in its version history. This is currently not supported."
555
+ )
556
+ pre_existing_record = transfer_to_default_db(
557
+ self, using_key, transfer_logs=transfer_logs
558
+ )
559
+ if pre_existing_record is not None:
560
+ init_self_from_db(self, pre_existing_record)
561
+ else:
562
+ # save versioned record
563
+ if isinstance(self, IsVersioned) and self._revises is not None:
564
+ assert self._revises.is_latest # noqa: S101
565
+ revises = self._revises
566
+ revises.is_latest = False
567
+ with transaction.atomic():
568
+ revises._revises = None # ensure we don't start a recursion
569
+ revises.save()
570
+ super(Record, self).save(*args, **kwargs)
571
+ self._revises = None
572
+ # save unversioned record
573
+ else:
574
+ super(Record, self).save(*args, **kwargs)
575
+ # perform transfer of many-to-many fields
576
+ # only supported for Artifact and Collection records
577
+ if db is not None and db != "default" and using_key is None:
578
+ if self.__class__.__name__ == "Collection":
579
+ if len(artifacts) > 0:
580
+ logger.info("transfer artifacts")
581
+ for artifact in artifacts:
582
+ artifact.save()
583
+ self.artifacts.add(*artifacts)
584
+ if hasattr(self, "labels"):
585
+ from copy import copy
586
+
587
+ from lnschema_core.models import FeatureManager
588
+
589
+ # here we go back to original record on the source database
590
+ self_on_db = copy(self)
591
+ self_on_db._state.db = db
592
+ self_on_db.pk = pk_on_db # manually set the primary key
593
+ self_on_db.features = FeatureManager(self_on_db)
594
+ self.features._add_from(self_on_db, transfer_logs=transfer_logs)
595
+ self.labels.add_from(self_on_db, transfer_logs=transfer_logs)
596
+ for k, v in transfer_logs.items():
597
+ logger.important(f"{k} records: {', '.join(v)}")
598
+ return self
599
+
600
+
601
+ def delete(self) -> None:
602
+ """Delete the record."""
603
+ # note that the logic below does not fire if a record is moved to the trash
604
+ # the idea is that moving a record to the trash should move its entire version family
605
+ # to the trash, whereas permanently deleting should default to only deleting a single record
606
+ # of a version family
607
+ # we can consider making it easy to permanently delete entire version families as well,
608
+ # but that's for another time
609
+ if isinstance(self, IsVersioned) and self.is_latest:
610
+ new_latest = (
611
+ self.__class__.objects.using(self._state.db)
612
+ .filter(is_latest=False, uid__startswith=self.stem_uid)
613
+ .order_by("-created_at")
614
+ .first()
615
+ )
616
+ if new_latest is not None:
617
+ new_latest.is_latest = True
618
+ with transaction.atomic():
619
+ new_latest.save()
620
+ super(Record, self).delete()
621
+ logger.warning(f"new latest version is {new_latest}")
622
+ return None
623
+ super(Record, self).delete()
624
+
625
+
626
+ METHOD_NAMES = [
627
+ "__init__",
628
+ "filter",
629
+ "get",
630
+ "df",
631
+ "search",
632
+ "lookup",
633
+ "save",
634
+ "delete",
635
+ "from_values",
636
+ "using",
637
+ ]
638
+
639
+ if ln_setup._TESTING: # type: ignore
640
+ from inspect import signature
641
+
642
+ SIGS = {
643
+ name: signature(getattr(Record, name))
644
+ for name in METHOD_NAMES
645
+ if not name.startswith("__")
646
+ }
647
+
648
+ for name in METHOD_NAMES:
649
+ attach_func_to_class_method(name, Record, globals())