lamindb 0.69.3__py3-none-any.whl → 0.69.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lamindb/__init__.py +5 -3
- lamindb/_annotate.py +18 -18
- lamindb/_collection.py +37 -158
- lamindb/core/_data.py +1 -3
- lamindb/core/_feature_manager.py +1 -2
- lamindb/integrations/__init__.py +8 -0
- lamindb/integrations/_vitessce.py +36 -0
- {lamindb-0.69.3.dist-info → lamindb-0.69.4.dist-info}/METADATA +2 -2
- {lamindb-0.69.3.dist-info → lamindb-0.69.4.dist-info}/RECORD +11 -9
- {lamindb-0.69.3.dist-info → lamindb-0.69.4.dist-info}/LICENSE +0 -0
- {lamindb-0.69.3.dist-info → lamindb-0.69.4.dist-info}/WHEEL +0 -0
lamindb/__init__.py
CHANGED
@@ -15,7 +15,7 @@ Registries:
|
|
15
15
|
Feature
|
16
16
|
FeatureSet
|
17
17
|
|
18
|
-
|
18
|
+
Key functionality:
|
19
19
|
|
20
20
|
.. autosummary::
|
21
21
|
:toctree: .
|
@@ -23,6 +23,7 @@ Functions:
|
|
23
23
|
connect
|
24
24
|
track
|
25
25
|
finish
|
26
|
+
Annotate
|
26
27
|
view
|
27
28
|
save
|
28
29
|
|
@@ -31,7 +32,7 @@ Modules & settings:
|
|
31
32
|
.. autosummary::
|
32
33
|
:toctree: .
|
33
34
|
|
34
|
-
|
35
|
+
integrations
|
35
36
|
settings
|
36
37
|
setup
|
37
38
|
UPath
|
@@ -39,7 +40,7 @@ Modules & settings:
|
|
39
40
|
|
40
41
|
"""
|
41
42
|
|
42
|
-
__version__ = "0.69.
|
43
|
+
__version__ = "0.69.4" # denote a release candidate for 0.1.0 with 0.1rc1
|
43
44
|
|
44
45
|
import os as _os
|
45
46
|
|
@@ -88,6 +89,7 @@ if _check_instance_setup(from_lamindb=True):
|
|
88
89
|
)
|
89
90
|
|
90
91
|
dev = core # backward compat
|
92
|
+
from . import integrations
|
91
93
|
from ._annotate import Annotate
|
92
94
|
from ._finish import finish
|
93
95
|
from ._save import save
|
lamindb/_annotate.py
CHANGED
@@ -81,7 +81,7 @@ class DataFrameAnnotator:
|
|
81
81
|
self._collection = None
|
82
82
|
self._validated = False
|
83
83
|
self._kwargs: Dict = kwargs
|
84
|
-
self.
|
84
|
+
self.save_features()
|
85
85
|
|
86
86
|
@property
|
87
87
|
def fields(self) -> Dict:
|
@@ -99,7 +99,7 @@ class DataFrameAnnotator:
|
|
99
99
|
fields = {**{"feature": self._feature_field}, **self.fields}
|
100
100
|
return AnnotateLookup(fields=fields, using=using or self._using)
|
101
101
|
|
102
|
-
def
|
102
|
+
def save_features(self, validated_only: bool = True) -> None:
|
103
103
|
"""Register features records."""
|
104
104
|
missing_columns = set(self.fields.keys()) - set(self._df.columns)
|
105
105
|
if missing_columns:
|
@@ -141,7 +141,7 @@ class DataFrameAnnotator:
|
|
141
141
|
if feature == "all":
|
142
142
|
self._update_registry_all(validated_only=validated_only, **kwargs)
|
143
143
|
elif feature == "feature":
|
144
|
-
self.
|
144
|
+
self.save_features(validated_only=validated_only)
|
145
145
|
else:
|
146
146
|
if feature not in self.fields:
|
147
147
|
raise ValueError(f"Feature {feature} is not part of the fields!")
|
@@ -175,7 +175,7 @@ class DataFrameAnnotator:
|
|
175
175
|
)
|
176
176
|
return self._validated
|
177
177
|
|
178
|
-
def
|
178
|
+
def save_artifact(self, description: str, **kwargs) -> Artifact:
|
179
179
|
"""Register the validated DataFrame and metadata.
|
180
180
|
|
181
181
|
Args:
|
@@ -199,7 +199,7 @@ class DataFrameAnnotator:
|
|
199
199
|
settings.verbosity = "warning"
|
200
200
|
self.update_registry("all")
|
201
201
|
|
202
|
-
self._artifact =
|
202
|
+
self._artifact = save_artifact(
|
203
203
|
self._df,
|
204
204
|
description=description,
|
205
205
|
fields=self.fields,
|
@@ -211,9 +211,9 @@ class DataFrameAnnotator:
|
|
211
211
|
|
212
212
|
return self._artifact
|
213
213
|
|
214
|
-
def
|
214
|
+
def save_collection(
|
215
215
|
self,
|
216
|
-
artifact: Artifact
|
216
|
+
artifact: Union[Artifact, Iterable[Artifact]],
|
217
217
|
name: str,
|
218
218
|
description: Optional[str] = None,
|
219
219
|
reference: Optional[str] = None,
|
@@ -288,7 +288,7 @@ class AnnDataAnnotator(DataFrameAnnotator):
|
|
288
288
|
**kwargs,
|
289
289
|
)
|
290
290
|
self._obs_fields = obs_fields
|
291
|
-
self.
|
291
|
+
self._save_variables()
|
292
292
|
|
293
293
|
@property
|
294
294
|
def var_field(self) -> FieldAttr:
|
@@ -308,7 +308,7 @@ class AnnDataAnnotator(DataFrameAnnotator):
|
|
308
308
|
}
|
309
309
|
return AnnotateLookup(fields=fields, using=using or self._using)
|
310
310
|
|
311
|
-
def
|
311
|
+
def _save_variables(self, validated_only: bool = True, **kwargs):
|
312
312
|
"""Register variable records."""
|
313
313
|
self._kwargs.update(kwargs)
|
314
314
|
update_registry(
|
@@ -334,11 +334,11 @@ class AnnDataAnnotator(DataFrameAnnotator):
|
|
334
334
|
def update_registry(self, feature: str, validated_only: bool = True, **kwargs):
|
335
335
|
"""Register labels for a feature."""
|
336
336
|
if feature == "variables":
|
337
|
-
self.
|
337
|
+
self._save_variables(validated_only=validated_only, **kwargs)
|
338
338
|
else:
|
339
339
|
super().update_registry(feature, validated_only, **kwargs)
|
340
340
|
|
341
|
-
def
|
341
|
+
def save_artifact(self, description: str, **kwargs) -> Artifact:
|
342
342
|
"""Register the validated AnnData and metadata.
|
343
343
|
|
344
344
|
Args:
|
@@ -352,7 +352,7 @@ class AnnDataAnnotator(DataFrameAnnotator):
|
|
352
352
|
if not self._validated:
|
353
353
|
raise ValidationError("Please run `validate()` first!")
|
354
354
|
|
355
|
-
self._artifact =
|
355
|
+
self._artifact = save_artifact(
|
356
356
|
self._adata,
|
357
357
|
description=description,
|
358
358
|
feature_field=self.var_field,
|
@@ -536,7 +536,7 @@ def validate_anndata(
|
|
536
536
|
return validated_var and validated_obs
|
537
537
|
|
538
538
|
|
539
|
-
def
|
539
|
+
def save_artifact(
|
540
540
|
data: Union[pd.DataFrame, ad.AnnData],
|
541
541
|
description: str,
|
542
542
|
fields: Dict[str, FieldAttr],
|
@@ -669,7 +669,7 @@ def update_registry(
|
|
669
669
|
non_validated_records = Feature.from_df(df)
|
670
670
|
else:
|
671
671
|
if "organism" in filter_kwargs:
|
672
|
-
filter_kwargs["organism"] =
|
672
|
+
filter_kwargs["organism"] = _save_organism(name=organism)
|
673
673
|
for value in labels_registered["without reference"]:
|
674
674
|
filter_kwargs[field.field.name] = value
|
675
675
|
if registry == Feature:
|
@@ -678,7 +678,7 @@ def update_registry(
|
|
678
678
|
ln_save(non_validated_records)
|
679
679
|
|
680
680
|
if registry == ULabel and field.field.name == "name":
|
681
|
-
|
681
|
+
save_ulabels_with_parent(values, field=field, feature_name=feature_name)
|
682
682
|
finally:
|
683
683
|
settings.verbosity = verbosity
|
684
684
|
|
@@ -710,7 +710,7 @@ def log_registered_labels(
|
|
710
710
|
lookup_print = f".lookup().['{feature_name}']"
|
711
711
|
msg += f"\n → to lookup categories, use {lookup_print}"
|
712
712
|
msg += (
|
713
|
-
f"\n → to register, run {colors.yellow('
|
713
|
+
f"\n → to register, run {colors.yellow('save_features(validated_only=False)')}"
|
714
714
|
if labels_type == "features"
|
715
715
|
else f"\n → to register, set {colors.yellow('validated_only=False')}"
|
716
716
|
)
|
@@ -722,7 +722,7 @@ def log_registered_labels(
|
|
722
722
|
)
|
723
723
|
|
724
724
|
|
725
|
-
def
|
725
|
+
def save_ulabels_with_parent(
|
726
726
|
values: List[str], field: FieldAttr, feature_name: str
|
727
727
|
) -> None:
|
728
728
|
"""Register a parent label for the given labels."""
|
@@ -774,7 +774,7 @@ def update_registry_from_using_instance(
|
|
774
774
|
return labels_registered, not_registered
|
775
775
|
|
776
776
|
|
777
|
-
def
|
777
|
+
def _save_organism(name: str):
|
778
778
|
"""Register an organism record."""
|
779
779
|
import bionty as bt
|
780
780
|
|
lamindb/_collection.py
CHANGED
@@ -64,7 +64,7 @@ def __init__(
|
|
64
64
|
# now we proceed with the user-facing constructor
|
65
65
|
if len(args) > 1:
|
66
66
|
raise ValueError("Only one non-keyword arg allowed: data")
|
67
|
-
data: Union[
|
67
|
+
data: Union[Artifact, Iterable[Artifact]] = (
|
68
68
|
kwargs.pop("data") if len(args) == 0 else args[0]
|
69
69
|
)
|
70
70
|
meta: Optional[str] = kwargs.pop("meta") if "meta" in kwargs else None
|
@@ -108,57 +108,24 @@ def __init__(
|
|
108
108
|
if name is None:
|
109
109
|
name = is_new_version_of.name
|
110
110
|
run = get_run(run)
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
111
|
+
if isinstance(data, Artifact):
|
112
|
+
data = [data]
|
113
|
+
else:
|
114
|
+
if not hasattr(data, "__getitem__"):
|
115
|
+
raise ValueError("Artifact or List[Artifact] is allowed.")
|
116
|
+
assert isinstance(data[0], Artifact) # type: ignore
|
117
|
+
hash, feature_sets = from_artifacts(data) # type: ignore
|
115
118
|
if meta is not None:
|
116
|
-
if not isinstance(meta,
|
117
|
-
raise ValueError(
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
# init artifact - is either data or metadata
|
122
|
-
if isinstance(data, (pd.DataFrame, ad.AnnData, Artifact)):
|
123
|
-
if isinstance(data, Artifact):
|
124
|
-
artifact = data
|
125
|
-
if artifact._state.adding:
|
126
|
-
raise ValueError("Save artifact before creating collection!")
|
119
|
+
if not isinstance(meta, Artifact):
|
120
|
+
raise ValueError("meta has to be an Artifact")
|
121
|
+
if isinstance(meta, Artifact):
|
122
|
+
if meta._state.adding:
|
123
|
+
raise ValueError("Save meta artifact before creating collection!")
|
127
124
|
if not feature_sets:
|
128
|
-
feature_sets =
|
125
|
+
feature_sets = meta.features._feature_set_by_slot
|
129
126
|
else:
|
130
|
-
if len(
|
127
|
+
if len(meta.features._feature_set_by_slot) > 0:
|
131
128
|
logger.info("overwriting feature sets linked to artifact")
|
132
|
-
else:
|
133
|
-
artifact_is_new_version_of = (
|
134
|
-
is_new_version_of.artifact if is_new_version_of is not None else None
|
135
|
-
)
|
136
|
-
artifact = Artifact(
|
137
|
-
data,
|
138
|
-
run=run,
|
139
|
-
description="tmp",
|
140
|
-
version=version,
|
141
|
-
is_new_version_of=artifact_is_new_version_of,
|
142
|
-
accessor=accessor,
|
143
|
-
)
|
144
|
-
# do we really want to update the artifact here?
|
145
|
-
if feature_sets:
|
146
|
-
artifact._feature_sets = feature_sets
|
147
|
-
hash = artifact.hash # type: ignore
|
148
|
-
provisional_uid = artifact.uid # type: ignore
|
149
|
-
if artifact.description is None or artifact.description == "tmp":
|
150
|
-
artifact.description = f"See collection {provisional_uid}" # type: ignore
|
151
|
-
data_init_complete = True
|
152
|
-
if not data_init_complete:
|
153
|
-
if hasattr(data, "__getitem__"):
|
154
|
-
assert isinstance(data[0], Artifact) # type: ignore
|
155
|
-
artifacts = data
|
156
|
-
hash, feature_sets = from_artifacts(artifacts) # type: ignore
|
157
|
-
data_init_complete = True
|
158
|
-
else:
|
159
|
-
raise ValueError(
|
160
|
-
"Only DataFrame, AnnData, Artifact or list of artifacts is allowed."
|
161
|
-
)
|
162
129
|
# we ignore collections in trash containing the same hash
|
163
130
|
if hash is not None:
|
164
131
|
existing_collection = Collection.filter(hash=hash).one_or_none()
|
@@ -183,88 +150,19 @@ def __init__(
|
|
183
150
|
description=description,
|
184
151
|
reference=reference,
|
185
152
|
reference_type=reference_type,
|
186
|
-
artifact=
|
153
|
+
artifact=meta,
|
187
154
|
hash=hash,
|
188
155
|
run=run,
|
189
156
|
version=version,
|
190
157
|
visibility=visibility,
|
191
158
|
**kwargs,
|
192
159
|
)
|
193
|
-
collection._artifacts =
|
160
|
+
collection._artifacts = data
|
194
161
|
collection._feature_sets = feature_sets
|
195
162
|
# register provenance
|
196
163
|
if is_new_version_of is not None:
|
197
164
|
_track_run_input(is_new_version_of, run=run)
|
198
|
-
|
199
|
-
_track_run_input(artifact, run=run)
|
200
|
-
elif artifacts is not None:
|
201
|
-
_track_run_input(artifacts, run=run)
|
202
|
-
|
203
|
-
|
204
|
-
@classmethod # type: ignore
|
205
|
-
@doc_args(Collection.from_df.__doc__)
|
206
|
-
def from_df(
|
207
|
-
cls,
|
208
|
-
df: "pd.DataFrame",
|
209
|
-
name: Optional[str] = None,
|
210
|
-
description: Optional[str] = None,
|
211
|
-
run: Optional[Run] = None,
|
212
|
-
reference: Optional[str] = None,
|
213
|
-
reference_type: Optional[str] = None,
|
214
|
-
version: Optional[str] = None,
|
215
|
-
is_new_version_of: Optional["Artifact"] = None,
|
216
|
-
**kwargs,
|
217
|
-
) -> "Collection":
|
218
|
-
"""{}."""
|
219
|
-
if isinstance(df, Artifact):
|
220
|
-
assert not df._state.adding
|
221
|
-
assert df.accessor == "DataFrame"
|
222
|
-
collection = Collection(
|
223
|
-
data=df,
|
224
|
-
name=name,
|
225
|
-
run=run,
|
226
|
-
description=description,
|
227
|
-
reference=reference,
|
228
|
-
reference_type=reference_type,
|
229
|
-
version=version,
|
230
|
-
is_new_version_of=is_new_version_of,
|
231
|
-
accessor="DataFrame",
|
232
|
-
**kwargs,
|
233
|
-
)
|
234
|
-
return collection
|
235
|
-
|
236
|
-
|
237
|
-
@classmethod # type: ignore
|
238
|
-
@doc_args(Collection.from_anndata.__doc__)
|
239
|
-
def from_anndata(
|
240
|
-
cls,
|
241
|
-
adata: "AnnData",
|
242
|
-
name: Optional[str] = None,
|
243
|
-
description: Optional[str] = None,
|
244
|
-
run: Optional[Run] = None,
|
245
|
-
reference: Optional[str] = None,
|
246
|
-
reference_type: Optional[str] = None,
|
247
|
-
version: Optional[str] = None,
|
248
|
-
is_new_version_of: Optional["Artifact"] = None,
|
249
|
-
**kwargs,
|
250
|
-
) -> "Collection":
|
251
|
-
"""{}."""
|
252
|
-
if isinstance(adata, Artifact):
|
253
|
-
assert not adata._state.adding
|
254
|
-
assert adata.accessor == "AnnData"
|
255
|
-
collection = Collection(
|
256
|
-
data=adata,
|
257
|
-
run=run,
|
258
|
-
name=name,
|
259
|
-
description=description,
|
260
|
-
reference=reference,
|
261
|
-
reference_type=reference_type,
|
262
|
-
version=version,
|
263
|
-
is_new_version_of=is_new_version_of,
|
264
|
-
accessor="AnnData",
|
265
|
-
**kwargs,
|
266
|
-
)
|
267
|
-
return collection
|
165
|
+
_track_run_input(data, run=run)
|
268
166
|
|
269
167
|
|
270
168
|
# internal function, not exposed to user
|
@@ -373,18 +271,6 @@ def stage(self, is_run_input: Optional[bool] = None) -> List[UPath]:
|
|
373
271
|
return path_list
|
374
272
|
|
375
273
|
|
376
|
-
# docstring handled through attach_func_to_class_method
|
377
|
-
def backed(
|
378
|
-
self, is_run_input: Optional[bool] = None
|
379
|
-
) -> Union["AnnDataAccessor", "BackedAccessor"]:
|
380
|
-
_track_run_input(self, is_run_input)
|
381
|
-
if self.artifact is None:
|
382
|
-
raise RuntimeError(
|
383
|
-
"Can only call backed() for collections with a single artifact"
|
384
|
-
)
|
385
|
-
return self.artifact.backed()
|
386
|
-
|
387
|
-
|
388
274
|
# docstring handled through attach_func_to_class_method
|
389
275
|
def load(
|
390
276
|
self,
|
@@ -393,29 +279,25 @@ def load(
|
|
393
279
|
**kwargs,
|
394
280
|
) -> DataLike:
|
395
281
|
# cannot call _track_run_input here, see comment further down
|
396
|
-
|
397
|
-
|
398
|
-
|
399
|
-
|
400
|
-
|
401
|
-
|
402
|
-
|
403
|
-
|
404
|
-
|
405
|
-
|
406
|
-
|
407
|
-
|
408
|
-
|
409
|
-
|
410
|
-
|
411
|
-
|
412
|
-
|
413
|
-
|
414
|
-
|
415
|
-
)
|
416
|
-
# only call it here because there might be errors during concat
|
417
|
-
_track_run_input(self, is_run_input)
|
418
|
-
return concat_object
|
282
|
+
all_artifacts = self.artifacts.all()
|
283
|
+
suffixes = [artifact.suffix for artifact in all_artifacts]
|
284
|
+
if len(set(suffixes)) != 1:
|
285
|
+
raise RuntimeError(
|
286
|
+
"Can only load collections where all artifacts have the same suffix"
|
287
|
+
)
|
288
|
+
# because we're tracking data flow on the collection-level, here, we don't
|
289
|
+
# want to track it on the artifact-level
|
290
|
+
objects = [artifact.load(is_run_input=False) for artifact in all_artifacts]
|
291
|
+
artifact_uids = [artifact.uid for artifact in all_artifacts]
|
292
|
+
if isinstance(objects[0], pd.DataFrame):
|
293
|
+
concat_object = pd.concat(objects, join=join)
|
294
|
+
elif isinstance(objects[0], ad.AnnData):
|
295
|
+
concat_object = ad.concat(
|
296
|
+
objects, join=join, label="artifact_uid", keys=artifact_uids
|
297
|
+
)
|
298
|
+
# only call it here because there might be errors during concat
|
299
|
+
_track_run_input(self, is_run_input)
|
300
|
+
return concat_object
|
419
301
|
|
420
302
|
|
421
303
|
# docstring handled through attach_func_to_class_method
|
@@ -484,11 +366,8 @@ def artifacts(self) -> QuerySet:
|
|
484
366
|
|
485
367
|
METHOD_NAMES = [
|
486
368
|
"__init__",
|
487
|
-
"from_anndata",
|
488
|
-
"from_df",
|
489
369
|
"mapped",
|
490
370
|
"stage",
|
491
|
-
"backed",
|
492
371
|
"load",
|
493
372
|
"delete",
|
494
373
|
"save",
|
lamindb/core/_data.py
CHANGED
@@ -36,9 +36,7 @@ from ._label_manager import LabelManager, print_labels
|
|
36
36
|
from ._run_context import run_context
|
37
37
|
from .exceptions import ValidationError
|
38
38
|
|
39
|
-
WARNING_RUN_TRANSFORM = (
|
40
|
-
"no run & transform get linked, consider passing a `run` or calling ln.track()"
|
41
|
-
)
|
39
|
+
WARNING_RUN_TRANSFORM = "no run & transform get linked, consider calling ln.track()"
|
42
40
|
|
43
41
|
|
44
42
|
def get_run(run: Optional[Run]) -> Optional[Run]:
|
lamindb/core/_feature_manager.py
CHANGED
@@ -253,8 +253,7 @@ class FeatureManager:
|
|
253
253
|
if isinstance(self._host, Artifact):
|
254
254
|
assert self._host.accessor == "AnnData"
|
255
255
|
else:
|
256
|
-
|
257
|
-
assert self._host.artifact.accessor == "AnnData"
|
256
|
+
raise NotImplementedError()
|
258
257
|
|
259
258
|
# parse and register features
|
260
259
|
adata = self._host.load()
|
@@ -0,0 +1,36 @@
|
|
1
|
+
import json
|
2
|
+
from datetime import datetime, timezone
|
3
|
+
|
4
|
+
import lamindb_setup as ln_setup
|
5
|
+
from lamin_utils import logger
|
6
|
+
|
7
|
+
from lamindb._artifact import Artifact
|
8
|
+
|
9
|
+
|
10
|
+
# tested in lamin-spatial
|
11
|
+
# can't type vitessce_config because can't assume it's installed
|
12
|
+
def save_vitessce_config(vitessce_config, description: str) -> Artifact:
|
13
|
+
"""Takes a ``VitessceConfig`` object and saves it as an artifact.
|
14
|
+
|
15
|
+
Args:
|
16
|
+
vitessce_config (``VitessceConfig``): A VitessceConfig object.
|
17
|
+
description: A description for the artifact.
|
18
|
+
"""
|
19
|
+
from vitessce import VitessceConfig
|
20
|
+
|
21
|
+
assert isinstance(vitessce_config, VitessceConfig)
|
22
|
+
timestamp = datetime.now(timezone.utc).isoformat().split(".")[0]
|
23
|
+
vitesse_export = f"./vitessce_export_{timestamp}.vitessce"
|
24
|
+
vitessce_config.export(to="files", base_url="", out_dir=vitesse_export)
|
25
|
+
logger.important(f"local export: {vitesse_export}")
|
26
|
+
artifact = Artifact(vitesse_export, description=description)
|
27
|
+
artifact.save()
|
28
|
+
config_dict = vitessce_config.to_dict(base_url=artifact.path.to_url())
|
29
|
+
config_filename = "vitessce_config.json"
|
30
|
+
config_file_local_path = f"{vitesse_export}/{config_filename}"
|
31
|
+
with open(config_file_local_path, "w") as file:
|
32
|
+
json.dump(config_dict, file)
|
33
|
+
(artifact.path / config_filename).upload_from(config_file_local_path)
|
34
|
+
slug = ln_setup.settings.instance.slug
|
35
|
+
logger.important(f"go to: https://lamin.ai/{slug}/artifact/{artifact.uid}")
|
36
|
+
return artifact
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: lamindb
|
3
|
-
Version: 0.69.
|
3
|
+
Version: 0.69.4
|
4
4
|
Summary: A data framework for biology.
|
5
5
|
Author-email: Lamin Labs <open-source@lamin.ai>
|
6
6
|
Requires-Python: >=3.8
|
@@ -10,7 +10,7 @@ Classifier: Programming Language :: Python :: 3.9
|
|
10
10
|
Classifier: Programming Language :: Python :: 3.10
|
11
11
|
Classifier: Programming Language :: Python :: 3.11
|
12
12
|
Requires-Dist: lnschema_core==0.64.1
|
13
|
-
Requires-Dist: lamindb_setup==0.68.
|
13
|
+
Requires-Dist: lamindb_setup==0.68.2
|
14
14
|
Requires-Dist: lamin_utils==0.13.1
|
15
15
|
Requires-Dist: lamin_cli==0.10.2
|
16
16
|
Requires-Dist: rapidfuzz
|
@@ -1,8 +1,8 @@
|
|
1
|
-
lamindb/__init__.py,sha256=
|
2
|
-
lamindb/_annotate.py,sha256=
|
1
|
+
lamindb/__init__.py,sha256=3kVakyBIty4I2C-69S114uW3z9lnEOgpnOz3LsZb2ro,2163
|
2
|
+
lamindb/_annotate.py,sha256=BopvJvzUlNKiTzOELg6MXeXQNUVMUTk-FcRoV9mfZdo,27682
|
3
3
|
lamindb/_artifact.py,sha256=RV36tcHMZ6wH6u65jOAQ_H4rfmFiIzZmAr8IY7kFhm0,35817
|
4
4
|
lamindb/_can_validate.py,sha256=w7lrUGTWldpvwaRiXBRrjfU_ZRidA7CooOu_r5MbocY,14569
|
5
|
-
lamindb/_collection.py,sha256=
|
5
|
+
lamindb/_collection.py,sha256=SdNNhhMh2O4q0hG4Hf_y1bcwcbkMF_sqk6MIYc-hLZo,14525
|
6
6
|
lamindb/_feature.py,sha256=ahRv87q1tcRLQ0UM5FA3KtcMQvIjW__fZq1yAdRAV7s,6728
|
7
7
|
lamindb/_feature_set.py,sha256=G_Ss6mKh4D0Eji-xSfLRbKVFXwgUE82YOqIUmkV0CAA,8767
|
8
8
|
lamindb/_filter.py,sha256=_PjyQWQBR3ohDAvJbR3hMvZ-2p2GvzFxLfKGC-gPnHI,1320
|
@@ -21,8 +21,8 @@ lamindb/_ulabel.py,sha256=euXsDPD7wC99oopLXVkT-vq7f3E6-zP4Z4akI-yh0aM,1913
|
|
21
21
|
lamindb/_utils.py,sha256=LGdiW4k3GClLz65vKAVRkL6Tw-Gkx9DWAdez1jyA5bE,428
|
22
22
|
lamindb/_view.py,sha256=yFMu4vnt0YqvN1q11boAkwigxCH1gdliDUSbzh3IuDw,2175
|
23
23
|
lamindb/core/__init__.py,sha256=Mw4sI-xgnMXNsu84oYFQBZOF8mxxxhp6-e3BjTQqjlA,1131
|
24
|
-
lamindb/core/_data.py,sha256=
|
25
|
-
lamindb/core/_feature_manager.py,sha256=
|
24
|
+
lamindb/core/_data.py,sha256=SCyUjS9bL7MMqyZTJl8PxnNtLKL7eNiUcLvmwFrqP-k,17260
|
25
|
+
lamindb/core/_feature_manager.py,sha256=_Bicjal2DQbpl6tR7p5o7Alb9rq0XYzAxrF_bV9sTjE,13894
|
26
26
|
lamindb/core/_label_manager.py,sha256=zrWDSd2AkR6fKsGDxLSWqHC9fz9BcGlavPZEh92Wzjg,9063
|
27
27
|
lamindb/core/_mapped_collection.py,sha256=e4P3AoykIMjD4_88BWbISWvKyWWTklwHl-_WLa72ZG4,16841
|
28
28
|
lamindb/core/_run_context.py,sha256=EK0lFJWx32NY2FdqFR1YozR9zioC-BjA394nPu-KwLQ,17510
|
@@ -44,9 +44,11 @@ lamindb/core/storage/_backed_access.py,sha256=DUJIDjkGkemjmKLD05blndP_rO5DpUD0EZ
|
|
44
44
|
lamindb/core/storage/_zarr.py,sha256=bMQSCsTOCtQy4Yo3KwCVpbUkKdWRApN9FM1rM-d2_G0,2839
|
45
45
|
lamindb/core/storage/file.py,sha256=WTeC4ENn_O6HEoinmTviB89W81UrJT3bSGtnpqPpIyE,7242
|
46
46
|
lamindb/core/storage/object.py,sha256=MPUb2M8Fleq2j9x1Ryqr3BETmvsDKyf11Ifvbxd3NpA,1097
|
47
|
+
lamindb/integrations/__init__.py,sha256=aH2PmO2m4-vwIifMYTB0Fyyr_gZWtVnV71jT0tVWSw0,123
|
48
|
+
lamindb/integrations/_vitessce.py,sha256=NHOiDJzn2QtzfkThhHEuDJuTTcSkem9x5d_wrQ-8nPI,1474
|
47
49
|
lamindb/setup/__init__.py,sha256=OwZpZzPDv5lPPGXZP7-zK6UdO4FHvvuBh439yZvIp3A,410
|
48
50
|
lamindb/setup/core/__init__.py,sha256=LqIIvJNcONxkqjbnP6CUaP4d45Lbd6TSMAcXFp4C7_8,231
|
49
|
-
lamindb-0.69.
|
50
|
-
lamindb-0.69.
|
51
|
-
lamindb-0.69.
|
52
|
-
lamindb-0.69.
|
51
|
+
lamindb-0.69.4.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
52
|
+
lamindb-0.69.4.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
|
53
|
+
lamindb-0.69.4.dist-info/METADATA,sha256=lGw_OTGbviwuQbwwav9IVSHfblR8bwUNC1weeEx6Eok,2856
|
54
|
+
lamindb-0.69.4.dist-info/RECORD,,
|
File without changes
|
File without changes
|