lamindb 0.69.3__py3-none-any.whl → 0.69.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lamindb/__init__.py +5 -3
- lamindb/_annotate.py +253 -182
- lamindb/_collection.py +37 -158
- lamindb/_finish.py +4 -3
- lamindb/core/_data.py +1 -3
- lamindb/core/_feature_manager.py +1 -2
- lamindb/integrations/__init__.py +8 -0
- lamindb/integrations/_vitessce.py +39 -0
- {lamindb-0.69.3.dist-info → lamindb-0.69.5.dist-info}/METADATA +4 -4
- {lamindb-0.69.3.dist-info → lamindb-0.69.5.dist-info}/RECORD +12 -10
- {lamindb-0.69.3.dist-info → lamindb-0.69.5.dist-info}/LICENSE +0 -0
- {lamindb-0.69.3.dist-info → lamindb-0.69.5.dist-info}/WHEEL +0 -0
lamindb/_collection.py
CHANGED
@@ -64,7 +64,7 @@ def __init__(
|
|
64
64
|
# now we proceed with the user-facing constructor
|
65
65
|
if len(args) > 1:
|
66
66
|
raise ValueError("Only one non-keyword arg allowed: data")
|
67
|
-
data: Union[
|
67
|
+
data: Union[Artifact, Iterable[Artifact]] = (
|
68
68
|
kwargs.pop("data") if len(args) == 0 else args[0]
|
69
69
|
)
|
70
70
|
meta: Optional[str] = kwargs.pop("meta") if "meta" in kwargs else None
|
@@ -108,57 +108,24 @@ def __init__(
|
|
108
108
|
if name is None:
|
109
109
|
name = is_new_version_of.name
|
110
110
|
run = get_run(run)
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
111
|
+
if isinstance(data, Artifact):
|
112
|
+
data = [data]
|
113
|
+
else:
|
114
|
+
if not hasattr(data, "__getitem__"):
|
115
|
+
raise ValueError("Artifact or List[Artifact] is allowed.")
|
116
|
+
assert isinstance(data[0], Artifact) # type: ignore
|
117
|
+
hash, feature_sets = from_artifacts(data) # type: ignore
|
115
118
|
if meta is not None:
|
116
|
-
if not isinstance(meta,
|
117
|
-
raise ValueError(
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
# init artifact - is either data or metadata
|
122
|
-
if isinstance(data, (pd.DataFrame, ad.AnnData, Artifact)):
|
123
|
-
if isinstance(data, Artifact):
|
124
|
-
artifact = data
|
125
|
-
if artifact._state.adding:
|
126
|
-
raise ValueError("Save artifact before creating collection!")
|
119
|
+
if not isinstance(meta, Artifact):
|
120
|
+
raise ValueError("meta has to be an Artifact")
|
121
|
+
if isinstance(meta, Artifact):
|
122
|
+
if meta._state.adding:
|
123
|
+
raise ValueError("Save meta artifact before creating collection!")
|
127
124
|
if not feature_sets:
|
128
|
-
feature_sets =
|
125
|
+
feature_sets = meta.features._feature_set_by_slot
|
129
126
|
else:
|
130
|
-
if len(
|
127
|
+
if len(meta.features._feature_set_by_slot) > 0:
|
131
128
|
logger.info("overwriting feature sets linked to artifact")
|
132
|
-
else:
|
133
|
-
artifact_is_new_version_of = (
|
134
|
-
is_new_version_of.artifact if is_new_version_of is not None else None
|
135
|
-
)
|
136
|
-
artifact = Artifact(
|
137
|
-
data,
|
138
|
-
run=run,
|
139
|
-
description="tmp",
|
140
|
-
version=version,
|
141
|
-
is_new_version_of=artifact_is_new_version_of,
|
142
|
-
accessor=accessor,
|
143
|
-
)
|
144
|
-
# do we really want to update the artifact here?
|
145
|
-
if feature_sets:
|
146
|
-
artifact._feature_sets = feature_sets
|
147
|
-
hash = artifact.hash # type: ignore
|
148
|
-
provisional_uid = artifact.uid # type: ignore
|
149
|
-
if artifact.description is None or artifact.description == "tmp":
|
150
|
-
artifact.description = f"See collection {provisional_uid}" # type: ignore
|
151
|
-
data_init_complete = True
|
152
|
-
if not data_init_complete:
|
153
|
-
if hasattr(data, "__getitem__"):
|
154
|
-
assert isinstance(data[0], Artifact) # type: ignore
|
155
|
-
artifacts = data
|
156
|
-
hash, feature_sets = from_artifacts(artifacts) # type: ignore
|
157
|
-
data_init_complete = True
|
158
|
-
else:
|
159
|
-
raise ValueError(
|
160
|
-
"Only DataFrame, AnnData, Artifact or list of artifacts is allowed."
|
161
|
-
)
|
162
129
|
# we ignore collections in trash containing the same hash
|
163
130
|
if hash is not None:
|
164
131
|
existing_collection = Collection.filter(hash=hash).one_or_none()
|
@@ -183,88 +150,19 @@ def __init__(
|
|
183
150
|
description=description,
|
184
151
|
reference=reference,
|
185
152
|
reference_type=reference_type,
|
186
|
-
artifact=
|
153
|
+
artifact=meta,
|
187
154
|
hash=hash,
|
188
155
|
run=run,
|
189
156
|
version=version,
|
190
157
|
visibility=visibility,
|
191
158
|
**kwargs,
|
192
159
|
)
|
193
|
-
collection._artifacts =
|
160
|
+
collection._artifacts = data
|
194
161
|
collection._feature_sets = feature_sets
|
195
162
|
# register provenance
|
196
163
|
if is_new_version_of is not None:
|
197
164
|
_track_run_input(is_new_version_of, run=run)
|
198
|
-
|
199
|
-
_track_run_input(artifact, run=run)
|
200
|
-
elif artifacts is not None:
|
201
|
-
_track_run_input(artifacts, run=run)
|
202
|
-
|
203
|
-
|
204
|
-
@classmethod # type: ignore
|
205
|
-
@doc_args(Collection.from_df.__doc__)
|
206
|
-
def from_df(
|
207
|
-
cls,
|
208
|
-
df: "pd.DataFrame",
|
209
|
-
name: Optional[str] = None,
|
210
|
-
description: Optional[str] = None,
|
211
|
-
run: Optional[Run] = None,
|
212
|
-
reference: Optional[str] = None,
|
213
|
-
reference_type: Optional[str] = None,
|
214
|
-
version: Optional[str] = None,
|
215
|
-
is_new_version_of: Optional["Artifact"] = None,
|
216
|
-
**kwargs,
|
217
|
-
) -> "Collection":
|
218
|
-
"""{}."""
|
219
|
-
if isinstance(df, Artifact):
|
220
|
-
assert not df._state.adding
|
221
|
-
assert df.accessor == "DataFrame"
|
222
|
-
collection = Collection(
|
223
|
-
data=df,
|
224
|
-
name=name,
|
225
|
-
run=run,
|
226
|
-
description=description,
|
227
|
-
reference=reference,
|
228
|
-
reference_type=reference_type,
|
229
|
-
version=version,
|
230
|
-
is_new_version_of=is_new_version_of,
|
231
|
-
accessor="DataFrame",
|
232
|
-
**kwargs,
|
233
|
-
)
|
234
|
-
return collection
|
235
|
-
|
236
|
-
|
237
|
-
@classmethod # type: ignore
|
238
|
-
@doc_args(Collection.from_anndata.__doc__)
|
239
|
-
def from_anndata(
|
240
|
-
cls,
|
241
|
-
adata: "AnnData",
|
242
|
-
name: Optional[str] = None,
|
243
|
-
description: Optional[str] = None,
|
244
|
-
run: Optional[Run] = None,
|
245
|
-
reference: Optional[str] = None,
|
246
|
-
reference_type: Optional[str] = None,
|
247
|
-
version: Optional[str] = None,
|
248
|
-
is_new_version_of: Optional["Artifact"] = None,
|
249
|
-
**kwargs,
|
250
|
-
) -> "Collection":
|
251
|
-
"""{}."""
|
252
|
-
if isinstance(adata, Artifact):
|
253
|
-
assert not adata._state.adding
|
254
|
-
assert adata.accessor == "AnnData"
|
255
|
-
collection = Collection(
|
256
|
-
data=adata,
|
257
|
-
run=run,
|
258
|
-
name=name,
|
259
|
-
description=description,
|
260
|
-
reference=reference,
|
261
|
-
reference_type=reference_type,
|
262
|
-
version=version,
|
263
|
-
is_new_version_of=is_new_version_of,
|
264
|
-
accessor="AnnData",
|
265
|
-
**kwargs,
|
266
|
-
)
|
267
|
-
return collection
|
165
|
+
_track_run_input(data, run=run)
|
268
166
|
|
269
167
|
|
270
168
|
# internal function, not exposed to user
|
@@ -373,18 +271,6 @@ def stage(self, is_run_input: Optional[bool] = None) -> List[UPath]:
|
|
373
271
|
return path_list
|
374
272
|
|
375
273
|
|
376
|
-
# docstring handled through attach_func_to_class_method
|
377
|
-
def backed(
|
378
|
-
self, is_run_input: Optional[bool] = None
|
379
|
-
) -> Union["AnnDataAccessor", "BackedAccessor"]:
|
380
|
-
_track_run_input(self, is_run_input)
|
381
|
-
if self.artifact is None:
|
382
|
-
raise RuntimeError(
|
383
|
-
"Can only call backed() for collections with a single artifact"
|
384
|
-
)
|
385
|
-
return self.artifact.backed()
|
386
|
-
|
387
|
-
|
388
274
|
# docstring handled through attach_func_to_class_method
|
389
275
|
def load(
|
390
276
|
self,
|
@@ -393,29 +279,25 @@ def load(
|
|
393
279
|
**kwargs,
|
394
280
|
) -> DataLike:
|
395
281
|
# cannot call _track_run_input here, see comment further down
|
396
|
-
|
397
|
-
|
398
|
-
|
399
|
-
|
400
|
-
|
401
|
-
|
402
|
-
|
403
|
-
|
404
|
-
|
405
|
-
|
406
|
-
|
407
|
-
|
408
|
-
|
409
|
-
|
410
|
-
|
411
|
-
|
412
|
-
|
413
|
-
|
414
|
-
|
415
|
-
)
|
416
|
-
# only call it here because there might be errors during concat
|
417
|
-
_track_run_input(self, is_run_input)
|
418
|
-
return concat_object
|
282
|
+
all_artifacts = self.artifacts.all()
|
283
|
+
suffixes = [artifact.suffix for artifact in all_artifacts]
|
284
|
+
if len(set(suffixes)) != 1:
|
285
|
+
raise RuntimeError(
|
286
|
+
"Can only load collections where all artifacts have the same suffix"
|
287
|
+
)
|
288
|
+
# because we're tracking data flow on the collection-level, here, we don't
|
289
|
+
# want to track it on the artifact-level
|
290
|
+
objects = [artifact.load(is_run_input=False) for artifact in all_artifacts]
|
291
|
+
artifact_uids = [artifact.uid for artifact in all_artifacts]
|
292
|
+
if isinstance(objects[0], pd.DataFrame):
|
293
|
+
concat_object = pd.concat(objects, join=join)
|
294
|
+
elif isinstance(objects[0], ad.AnnData):
|
295
|
+
concat_object = ad.concat(
|
296
|
+
objects, join=join, label="artifact_uid", keys=artifact_uids
|
297
|
+
)
|
298
|
+
# only call it here because there might be errors during concat
|
299
|
+
_track_run_input(self, is_run_input)
|
300
|
+
return concat_object
|
419
301
|
|
420
302
|
|
421
303
|
# docstring handled through attach_func_to_class_method
|
@@ -484,11 +366,8 @@ def artifacts(self) -> QuerySet:
|
|
484
366
|
|
485
367
|
METHOD_NAMES = [
|
486
368
|
"__init__",
|
487
|
-
"from_anndata",
|
488
|
-
"from_df",
|
489
369
|
"mapped",
|
490
370
|
"stage",
|
491
|
-
"backed",
|
492
371
|
"load",
|
493
372
|
"delete",
|
494
373
|
"save",
|
lamindb/_finish.py
CHANGED
@@ -8,6 +8,7 @@ from typing import Optional
|
|
8
8
|
import lamindb_setup as ln_setup
|
9
9
|
from lamin_utils import logger
|
10
10
|
from lnschema_core import Run, Transform
|
11
|
+
from lnschema_core.types import TransformType
|
11
12
|
|
12
13
|
from ._query_set import QuerySet
|
13
14
|
from .core._run_context import is_run_from_ipython, run_context
|
@@ -69,7 +70,7 @@ def save_run_context_core(
|
|
69
70
|
|
70
71
|
ln.settings.verbosity = "success"
|
71
72
|
|
72
|
-
if transform.type ==
|
73
|
+
if transform.type == TransformType.notebook:
|
73
74
|
try:
|
74
75
|
import nbstripout
|
75
76
|
from nbproject.dev import (
|
@@ -187,7 +188,7 @@ def save_run_context_core(
|
|
187
188
|
run.environment = artifact
|
188
189
|
logger.success(f"saved run.environment: {run.environment}")
|
189
190
|
# save report file
|
190
|
-
if not transform.type ==
|
191
|
+
if not transform.type == TransformType.notebook:
|
191
192
|
run.save()
|
192
193
|
else:
|
193
194
|
if run.report_id is not None:
|
@@ -212,7 +213,7 @@ def save_run_context_core(
|
|
212
213
|
run.save()
|
213
214
|
transform.latest_report = run.report
|
214
215
|
transform.save()
|
215
|
-
if transform.type ==
|
216
|
+
if transform.type == TransformType.notebook:
|
216
217
|
logger.success(f"saved transform.latest_report: {transform.latest_report}")
|
217
218
|
identifier = ln_setup.settings.instance.slug
|
218
219
|
logger.success(f"go to: https://lamin.ai/{identifier}/transform/{transform.uid}")
|
lamindb/core/_data.py
CHANGED
@@ -36,9 +36,7 @@ from ._label_manager import LabelManager, print_labels
|
|
36
36
|
from ._run_context import run_context
|
37
37
|
from .exceptions import ValidationError
|
38
38
|
|
39
|
-
WARNING_RUN_TRANSFORM = (
|
40
|
-
"no run & transform get linked, consider passing a `run` or calling ln.track()"
|
41
|
-
)
|
39
|
+
WARNING_RUN_TRANSFORM = "no run & transform get linked, consider calling ln.track()"
|
42
40
|
|
43
41
|
|
44
42
|
def get_run(run: Optional[Run]) -> Optional[Run]:
|
lamindb/core/_feature_manager.py
CHANGED
@@ -253,8 +253,7 @@ class FeatureManager:
|
|
253
253
|
if isinstance(self._host, Artifact):
|
254
254
|
assert self._host.accessor == "AnnData"
|
255
255
|
else:
|
256
|
-
|
257
|
-
assert self._host.artifact.accessor == "AnnData"
|
256
|
+
raise NotImplementedError()
|
258
257
|
|
259
258
|
# parse and register features
|
260
259
|
adata = self._host.load()
|
@@ -0,0 +1,39 @@
|
|
1
|
+
import json
|
2
|
+
from datetime import datetime, timezone
|
3
|
+
|
4
|
+
import lamindb_setup as ln_setup
|
5
|
+
from lamin_utils import logger
|
6
|
+
|
7
|
+
from lamindb._artifact import Artifact
|
8
|
+
|
9
|
+
|
10
|
+
# tested in lamin-spatial
|
11
|
+
# can't type vitessce_config because can't assume it's installed
|
12
|
+
def save_vitessce_config(vitessce_config, description: str) -> Artifact:
|
13
|
+
"""Takes a ``VitessceConfig`` object and saves it as an artifact.
|
14
|
+
|
15
|
+
Args:
|
16
|
+
vitessce_config (``VitessceConfig``): A VitessceConfig object.
|
17
|
+
description: A description for the artifact.
|
18
|
+
"""
|
19
|
+
from vitessce import VitessceConfig
|
20
|
+
|
21
|
+
assert isinstance(vitessce_config, VitessceConfig)
|
22
|
+
timestamp = datetime.now(timezone.utc).isoformat().split(".")[0]
|
23
|
+
vitesse_export = f"./vitessce_export_{timestamp}.vitessce"
|
24
|
+
vitessce_config.export(to="files", base_url="", out_dir=vitesse_export)
|
25
|
+
logger.important(f"local export: {vitesse_export}")
|
26
|
+
artifact = Artifact(vitesse_export, description=description)
|
27
|
+
artifact.save()
|
28
|
+
config_dict = vitessce_config.to_dict(base_url=artifact.path.to_url())
|
29
|
+
logger.important(f"base url: {artifact.path.to_url()}")
|
30
|
+
config_filename = "vitessce_config.json"
|
31
|
+
config_file_local_path = f"{vitesse_export}/{config_filename}"
|
32
|
+
with open(config_file_local_path, "w") as file:
|
33
|
+
json.dump(config_dict, file)
|
34
|
+
config_file_path = artifact.path / config_filename
|
35
|
+
config_file_path.upload_from(config_file_local_path)
|
36
|
+
logger.important(f"config url: {config_file_path.to_url()}")
|
37
|
+
slug = ln_setup.settings.instance.slug
|
38
|
+
logger.important(f"go to: https://lamin.ai/{slug}/artifact/{artifact.uid}")
|
39
|
+
return artifact
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: lamindb
|
3
|
-
Version: 0.69.
|
3
|
+
Version: 0.69.5
|
4
4
|
Summary: A data framework for biology.
|
5
5
|
Author-email: Lamin Labs <open-source@lamin.ai>
|
6
6
|
Requires-Python: >=3.8
|
@@ -9,10 +9,10 @@ Classifier: Programming Language :: Python :: 3.8
|
|
9
9
|
Classifier: Programming Language :: Python :: 3.9
|
10
10
|
Classifier: Programming Language :: Python :: 3.10
|
11
11
|
Classifier: Programming Language :: Python :: 3.11
|
12
|
-
Requires-Dist: lnschema_core==0.64.
|
13
|
-
Requires-Dist: lamindb_setup==0.68.
|
12
|
+
Requires-Dist: lnschema_core==0.64.4
|
13
|
+
Requires-Dist: lamindb_setup==0.68.3
|
14
14
|
Requires-Dist: lamin_utils==0.13.1
|
15
|
-
Requires-Dist: lamin_cli==0.
|
15
|
+
Requires-Dist: lamin_cli==0.11.0
|
16
16
|
Requires-Dist: rapidfuzz
|
17
17
|
Requires-Dist: pyarrow
|
18
18
|
Requires-Dist: typing_extensions!=4.6.0
|
@@ -1,12 +1,12 @@
|
|
1
|
-
lamindb/__init__.py,sha256=
|
2
|
-
lamindb/_annotate.py,sha256=
|
1
|
+
lamindb/__init__.py,sha256=cWnU0vX4YUkRJTNOMNyjJHRQzTAWQDAj5R7Vp_6EB4M,2163
|
2
|
+
lamindb/_annotate.py,sha256=mM-GCej7i9eUH0cU5AcxWZ916k8NRI41WF84dfjJfu4,29955
|
3
3
|
lamindb/_artifact.py,sha256=RV36tcHMZ6wH6u65jOAQ_H4rfmFiIzZmAr8IY7kFhm0,35817
|
4
4
|
lamindb/_can_validate.py,sha256=w7lrUGTWldpvwaRiXBRrjfU_ZRidA7CooOu_r5MbocY,14569
|
5
|
-
lamindb/_collection.py,sha256=
|
5
|
+
lamindb/_collection.py,sha256=SdNNhhMh2O4q0hG4Hf_y1bcwcbkMF_sqk6MIYc-hLZo,14525
|
6
6
|
lamindb/_feature.py,sha256=ahRv87q1tcRLQ0UM5FA3KtcMQvIjW__fZq1yAdRAV7s,6728
|
7
7
|
lamindb/_feature_set.py,sha256=G_Ss6mKh4D0Eji-xSfLRbKVFXwgUE82YOqIUmkV0CAA,8767
|
8
8
|
lamindb/_filter.py,sha256=_PjyQWQBR3ohDAvJbR3hMvZ-2p2GvzFxLfKGC-gPnHI,1320
|
9
|
-
lamindb/_finish.py,sha256=
|
9
|
+
lamindb/_finish.py,sha256=8lfJzRedTDCA_XXBUf4ECOevpPhVxKqMMj9qgVkmF8M,8672
|
10
10
|
lamindb/_from_values.py,sha256=Ei11ml77Q1xubVekt2C4-mbox2-qnC7kP18B-LhCdSc,11886
|
11
11
|
lamindb/_is_versioned.py,sha256=DXp5t-1DwErpqqMc9eb08kpQPCHOC2fNzaozMoBunR4,1337
|
12
12
|
lamindb/_parents.py,sha256=pTDsW8HjQ_txFbPKrBU0WjjtCNH6sx2LASUuGWpJuYE,14742
|
@@ -21,8 +21,8 @@ lamindb/_ulabel.py,sha256=euXsDPD7wC99oopLXVkT-vq7f3E6-zP4Z4akI-yh0aM,1913
|
|
21
21
|
lamindb/_utils.py,sha256=LGdiW4k3GClLz65vKAVRkL6Tw-Gkx9DWAdez1jyA5bE,428
|
22
22
|
lamindb/_view.py,sha256=yFMu4vnt0YqvN1q11boAkwigxCH1gdliDUSbzh3IuDw,2175
|
23
23
|
lamindb/core/__init__.py,sha256=Mw4sI-xgnMXNsu84oYFQBZOF8mxxxhp6-e3BjTQqjlA,1131
|
24
|
-
lamindb/core/_data.py,sha256=
|
25
|
-
lamindb/core/_feature_manager.py,sha256=
|
24
|
+
lamindb/core/_data.py,sha256=SCyUjS9bL7MMqyZTJl8PxnNtLKL7eNiUcLvmwFrqP-k,17260
|
25
|
+
lamindb/core/_feature_manager.py,sha256=_Bicjal2DQbpl6tR7p5o7Alb9rq0XYzAxrF_bV9sTjE,13894
|
26
26
|
lamindb/core/_label_manager.py,sha256=zrWDSd2AkR6fKsGDxLSWqHC9fz9BcGlavPZEh92Wzjg,9063
|
27
27
|
lamindb/core/_mapped_collection.py,sha256=e4P3AoykIMjD4_88BWbISWvKyWWTklwHl-_WLa72ZG4,16841
|
28
28
|
lamindb/core/_run_context.py,sha256=EK0lFJWx32NY2FdqFR1YozR9zioC-BjA394nPu-KwLQ,17510
|
@@ -44,9 +44,11 @@ lamindb/core/storage/_backed_access.py,sha256=DUJIDjkGkemjmKLD05blndP_rO5DpUD0EZ
|
|
44
44
|
lamindb/core/storage/_zarr.py,sha256=bMQSCsTOCtQy4Yo3KwCVpbUkKdWRApN9FM1rM-d2_G0,2839
|
45
45
|
lamindb/core/storage/file.py,sha256=WTeC4ENn_O6HEoinmTviB89W81UrJT3bSGtnpqPpIyE,7242
|
46
46
|
lamindb/core/storage/object.py,sha256=MPUb2M8Fleq2j9x1Ryqr3BETmvsDKyf11Ifvbxd3NpA,1097
|
47
|
+
lamindb/integrations/__init__.py,sha256=aH2PmO2m4-vwIifMYTB0Fyyr_gZWtVnV71jT0tVWSw0,123
|
48
|
+
lamindb/integrations/_vitessce.py,sha256=n85g8YRP8Y2sfU5DPJdbU84BGPrTfU3Dg2jStdmBBRI,1637
|
47
49
|
lamindb/setup/__init__.py,sha256=OwZpZzPDv5lPPGXZP7-zK6UdO4FHvvuBh439yZvIp3A,410
|
48
50
|
lamindb/setup/core/__init__.py,sha256=LqIIvJNcONxkqjbnP6CUaP4d45Lbd6TSMAcXFp4C7_8,231
|
49
|
-
lamindb-0.69.
|
50
|
-
lamindb-0.69.
|
51
|
-
lamindb-0.69.
|
52
|
-
lamindb-0.69.
|
51
|
+
lamindb-0.69.5.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
52
|
+
lamindb-0.69.5.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
|
53
|
+
lamindb-0.69.5.dist-info/METADATA,sha256=meDN6DOhRAnUt2jbDcykj9Hclhos_uyO_UbtSPNukG4,2856
|
54
|
+
lamindb-0.69.5.dist-info/RECORD,,
|
File without changes
|
File without changes
|